forecast/0000755000176200001440000000000013617774362012100 5ustar liggesusersforecast/NAMESPACE0000644000176200001440000002005213617635023013304 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method("[",msts) S3method(accuracy,default) S3method(accuracy,mforecast) S3method(as.Date,timeDate) S3method(as.character,Arima) S3method(as.character,bats) S3method(as.character,ets) S3method(as.character,tbats) S3method(as.data.frame,forecast) S3method(as.data.frame,mforecast) S3method(as.ts,forecast) S3method(autolayer,forecast) S3method(autolayer,mforecast) S3method(autolayer,msts) S3method(autolayer,mts) S3method(autolayer,ts) S3method(autoplot,Arima) S3method(autoplot,StructTS) S3method(autoplot,acf) S3method(autoplot,ar) S3method(autoplot,bats) S3method(autoplot,decomposed.ts) S3method(autoplot,ets) S3method(autoplot,forecast) S3method(autoplot,mforecast) S3method(autoplot,mpacf) S3method(autoplot,mstl) S3method(autoplot,msts) S3method(autoplot,mts) S3method(autoplot,seas) S3method(autoplot,splineforecast) S3method(autoplot,stl) S3method(autoplot,tbats) S3method(autoplot,ts) S3method(coef,ets) S3method(fitted,ARFIMA) S3method(fitted,Arima) S3method(fitted,ar) S3method(fitted,bats) S3method(fitted,ets) S3method(fitted,forecast_ARIMA) S3method(fitted,lagwalk) S3method(fitted,modelAR) S3method(fitted,nnetar) S3method(fitted,tbats) S3method(fitted,tslm) S3method(forecast,Arima) S3method(forecast,HoltWinters) S3method(forecast,StructTS) S3method(forecast,ar) S3method(forecast,baggedModel) S3method(forecast,bats) S3method(forecast,default) S3method(forecast,ets) S3method(forecast,forecast) S3method(forecast,forecast_ARIMA) S3method(forecast,fracdiff) S3method(forecast,lagwalk) S3method(forecast,lm) S3method(forecast,mlm) S3method(forecast,modelAR) S3method(forecast,mstl) S3method(forecast,mts) S3method(forecast,nnetar) S3method(forecast,stl) S3method(forecast,stlm) S3method(forecast,tbats) S3method(forecast,ts) S3method(forecast,varest) S3method(fortify,ts) S3method(getResponse,Arima) S3method(getResponse,ar) S3method(getResponse,baggedModel) S3method(getResponse,bats) S3method(getResponse,default) S3method(getResponse,fracdiff) S3method(getResponse,lm) S3method(getResponse,mforecast) S3method(getResponse,tbats) S3method(head,ts) S3method(logLik,ets) S3method(nobs,ets) S3method(plot,Arima) S3method(plot,ar) S3method(plot,bats) S3method(plot,ets) S3method(plot,forecast) S3method(plot,mforecast) S3method(plot,splineforecast) S3method(plot,tbats) S3method(predict,default) S3method(print,CVar) S3method(print,OCSBtest) S3method(print,baggedModel) S3method(print,bats) S3method(print,ets) S3method(print,forecast) S3method(print,forecast_ARIMA) S3method(print,lagwalk) S3method(print,mforecast) S3method(print,modelAR) S3method(print,msts) S3method(print,nnetar) S3method(print,nnetarmodels) S3method(print,tbats) S3method(residuals,ARFIMA) S3method(residuals,Arima) S3method(residuals,ar) S3method(residuals,bats) S3method(residuals,ets) S3method(residuals,forecast) S3method(residuals,forecast_ARIMA) S3method(residuals,nnetar) S3method(residuals,stlm) S3method(residuals,tbats) S3method(residuals,tslm) S3method(seasadj,decomposed.ts) S3method(seasadj,mstl) S3method(seasadj,seas) S3method(seasadj,stl) S3method(seasadj,tbats) S3method(simulate,Arima) S3method(simulate,ar) S3method(simulate,ets) S3method(simulate,fracdiff) S3method(simulate,lagwalk) S3method(simulate,modelAR) S3method(simulate,nnetar) S3method(subset,msts) S3method(subset,ts) S3method(summary,Arima) S3method(summary,ets) S3method(summary,forecast) S3method(summary,mforecast) S3method(tail,ts) S3method(window,msts) export("%>%") export(Acf) export(Arima) export(BoxCox) export(BoxCox.lambda) export(CV) export(CVar) export(Ccf) export(GeomForecast) export(InvBoxCox) export(Pacf) export(StatForecast) export(accuracy) export(arfima) export(arima.errors) export(arimaorder) export(auto.arima) export(autolayer) export(autoplot) export(baggedETS) export(baggedModel) export(bats) export(bizdays) export(bld.mbb.bootstrap) export(checkresiduals) export(croston) export(dm.test) export(dshw) export(easter) export(ets) export(findfrequency) export(forecast) export(forecast.ets) export(fourier) export(fourierf) export(geom_forecast) export(getResponse) export(ggAcf) export(ggCcf) export(ggPacf) export(gghistogram) export(gglagchull) export(gglagplot) export(ggmonthplot) export(ggseasonplot) export(ggsubseriesplot) export(ggtaperedacf) export(ggtaperedpacf) export(ggtsdisplay) export(holt) export(hw) export(is.Arima) export(is.acf) export(is.baggedModel) export(is.bats) export(is.constant) export(is.ets) export(is.forecast) export(is.mforecast) export(is.modelAR) export(is.nnetar) export(is.nnetarmodels) export(is.splineforecast) export(is.stlm) export(ma) export(meanf) export(modelAR) export(monthdays) export(mstl) export(msts) export(na.interp) export(naive) export(ndiffs) export(nnetar) export(nsdiffs) export(ocsb.test) export(remainder) export(rwf) export(seasadj) export(seasonal) export(seasonaldummy) export(seasonaldummyf) export(seasonplot) export(ses) export(sindexf) export(snaive) export(splinef) export(stlf) export(stlm) export(taperedacf) export(taperedpacf) export(tbats) export(tbats.components) export(thetaf) export(trendcycle) export(tsCV) export(tsclean) export(tsdisplay) export(tslm) export(tsoutliers) import(Rcpp) import(parallel) importFrom(colorspace,sequential_hcl) importFrom(fracdiff,diffseries) importFrom(fracdiff,fracdiff) importFrom(fracdiff,fracdiff.sim) importFrom(ggplot2,autoplot) importFrom(ggplot2,fortify) importFrom(grDevices,gray) importFrom(grDevices,heat.colors) importFrom(grDevices,nclass.FD) importFrom(grDevices,palette) importFrom(graphics,abline) importFrom(graphics,axis) importFrom(graphics,grid) importFrom(graphics,hist) importFrom(graphics,layout) importFrom(graphics,lines) importFrom(graphics,mtext) importFrom(graphics,par) importFrom(graphics,plot) importFrom(graphics,points) importFrom(graphics,polygon) importFrom(graphics,text) importFrom(graphics,title) importFrom(lmtest,bgtest) importFrom(magrittr,"%>%") importFrom(nnet,nnet) importFrom(stats,"tsp<-") importFrom(stats,AIC) importFrom(stats,BIC) importFrom(stats,Box.test) importFrom(stats,acf) importFrom(stats,aggregate) importFrom(stats,approx) importFrom(stats,ar) importFrom(stats,arima) importFrom(stats,arima.sim) importFrom(stats,as.formula) importFrom(stats,as.ts) importFrom(stats,complete.cases) importFrom(stats,cycle) importFrom(stats,decompose) importFrom(stats,diffinv) importFrom(stats,end) importFrom(stats,extractAIC) importFrom(stats,filter) importFrom(stats,fitted) importFrom(stats,formula) importFrom(stats,frequency) importFrom(stats,hatvalues) importFrom(stats,is.mts) importFrom(stats,is.ts) importFrom(stats,ksmooth) importFrom(stats,lm) importFrom(stats,loess) importFrom(stats,logLik) importFrom(stats,lsfit) importFrom(stats,median) importFrom(stats,model.frame) importFrom(stats,na.contiguous) importFrom(stats,na.exclude) importFrom(stats,na.omit) importFrom(stats,na.pass) importFrom(stats,napredict) importFrom(stats,nobs) importFrom(stats,optim) importFrom(stats,optimize) importFrom(stats,pf) importFrom(stats,plot.ts) importFrom(stats,poly) importFrom(stats,predict) importFrom(stats,pt) importFrom(stats,qnorm) importFrom(stats,qt) importFrom(stats,quantile) importFrom(stats,reformulate) importFrom(stats,residuals) importFrom(stats,rnorm) importFrom(stats,runif) importFrom(stats,sd) importFrom(stats,simulate) importFrom(stats,smooth.spline) importFrom(stats,spec.ar) importFrom(stats,start) importFrom(stats,stl) importFrom(stats,supsmu) importFrom(stats,terms) importFrom(stats,time) importFrom(stats,ts) importFrom(stats,tsdiag) importFrom(stats,tsp) importFrom(stats,var) importFrom(stats,window) importFrom(timeDate,Easter) importFrom(timeDate,as.Date.timeDate) importFrom(timeDate,as.timeDate) importFrom(timeDate,difftimeDate) importFrom(timeDate,isBizday) importFrom(tseries,adf.test) importFrom(tseries,kpss.test) importFrom(tseries,pp.test) importFrom(urca,ur.df) importFrom(urca,ur.kpss) importFrom(urca,ur.pp) importFrom(utils,head) importFrom(utils,head.matrix) importFrom(utils,methods) importFrom(utils,packageVersion) importFrom(utils,tail) importFrom(utils,tail.matrix) importFrom(zoo,as.Date) importFrom(zoo,as.yearqtr) importFrom(zoo,rollmean) useDynLib(forecast, .registration = TRUE) forecast/README.md0000644000176200001440000000544413553673010013351 0ustar liggesusersforecast ====================== [![Travis-CI Build Status](https://travis-ci.org/robjhyndman/forecast.svg?branch=master)](https://travis-ci.org/robjhyndman/forecast) [![CRAN_Status_Badge](https://www.r-pkg.org/badges/version/forecast)](https://cran.r-project.org/package=forecast) [![cran checks](https://cranchecks.info/badges/worst/forecast)](https://cran.r-project.org/web/checks/check_results_forecast.html) [![Lifecycle: retired](https://img.shields.io/badge/lifecycle-retired-orange.svg)](https://www.tidyverse.org/lifecycle/#retired) [![Downloads](https://cranlogs.r-pkg.org/badges/forecast)](https://cran.r-project.org/package=forecast) [![Licence](https://img.shields.io/badge/licence-GPL--3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0.en.html) The R package *forecast* provides methods and tools for displaying and analysing univariate time series forecasts including exponential smoothing via state space models and automatic ARIMA modelling. This package is now retired in favour of the [fable](http://fable.tidyverts.org/) package. The forecast package will remain in its current state, and maintained with bug fixes only. For the latest features and development, we recommend forecasting with the fable package. ## Installation You can install the **stable** version from [CRAN](https://cran.r-project.org/package=forecast). ```s install.packages('forecast', dependencies = TRUE) ``` You can install the **development** version from [Github](https://github.com/robjhyndman/forecast) ```s # install.packages("remotes") remotes::install_github("robjhyndman/forecast") ``` ## Usage ```s library(forecast) library(ggplot2) # ETS forecasts USAccDeaths %>% ets() %>% forecast() %>% autoplot() # Automatic ARIMA forecasts WWWusage %>% auto.arima() %>% forecast(h=20) %>% autoplot() # ARFIMA forecasts library(fracdiff) x <- fracdiff.sim( 100, ma=-.4, d=.3)$series arfima(x) %>% forecast(h=30) %>% autoplot() # Forecasting with STL USAccDeaths %>% stlm(modelfunction=ar) %>% forecast(h=36) %>% autoplot() AirPassengers %>% stlf(lambda=0) %>% autoplot() USAccDeaths %>% stl(s.window='periodic') %>% forecast() %>% autoplot() # TBATS forecasts USAccDeaths %>% tbats() %>% forecast() %>% autoplot() taylor %>% tbats() %>% forecast() %>% autoplot() ``` ## For more information * Get started in forecasting with the online textbook at http://OTexts.org/fpp2/ * Read the Hyndsight blog at https://robjhyndman.com/hyndsight/ * Ask forecasting questions on http://stats.stackexchange.com/tags/forecasting * Ask R questions on http://stackoverflow.com/tags/forecasting+r * Join the International Institute of Forecasters: http://forecasters.org/ ## License This package is free and open source software, licensed under GPL-3. forecast/data/0000755000176200001440000000000013553673010012774 5ustar liggesusersforecast/data/gold.rda0000644000176200001440000000532413553673010014415 0ustar liggesusersu ЦQ2bPĄJ%ldz2I泔 }y9!Q}! k$,LFEh׈Yw9yٽ:{+bsssK,ҍ\d_m"sq[CG;-̯Y ?k9GW z(_>߄OB毧χ8mk#~FvTz)~PO׿S= |#]ۿ-)mLY?hn]|e?r id\n/V¡f;<|ѺAuooqwc'"=4~ȯeh3j^a:e5Ke1_UʼnP{^w(8fq_R?A} .?mηseߝt ;~2Tdyzϖ۝iS:ƇLFc,k ꋱ8X}y觥~|{ %O|Vp?mk#r8ʛ-=-jo0./ i廱e?pݜ~i{cG.?V^hN$/o6v&/s Zݾͼ7+z׭mN}_TKu'/ǿ|uS:™krpUgo\OYހo~/yJ~6KUܗ/deLclB'8'8u=.#aZT?NϷ^@xS>g"=DuSGI=Suz;閡cr/ O>:{s˧|Iݔ\PugM#?>#E~e_DFKTG 졞}~֟n>ՠ bAq` 􋁾xz ˗w}8(>:?ϑ?W=YNꄤsΉOīsgl̓g"#Gm_Y<?pO^W{uL,p=_ij<\$(?G~yQ=[U;\]ܪK %y&Iѩ??N"G z;0IgOY-?co{~'=Ng,=r/^gY^5ȹ׳V2{yގW'eCVf8|:ueOWtl߫9Gw:y+tqr>%?z`'~_\j~'}z_,~?٬~/iځ'~Zs9悔>,s~wݨ?<;w2M ^>8]]ٯs& ;D?Mry2qˉ1YmܜVz7&{5>Oa̾'LV>RZν=*yj=U_1/c[|q>エSOV<-8tyuλfb5ν:uDGzԩ:qX\c:({+[a3xt|^މ^=!?zuǏ3㹃| w!uA^]\Ǣ_}K| ^y7os ꌂ7:gK:沈\qwӬE/bo~U;KKg~ZGG?(G:up3nU9es="ɣpynlډ}?ɋg?TK]Q~yV]&WIz_VKj=IOkݮ-}p>>#gׂz=A|k}<9?8/#}#gI<&TǏ,zEUR/&(L4|?I$ר7>}c|}>+gG6&;~ynv`o9(+}A]/R!w#zTfZ}QW;bЇޏ>-l(ǠYA~Z# pj!;xua{f =yx=u.[މ;A_yonޓM ̾zߕhqgN^=XuhlKősqk1#forecast/data/woolyrnq.rda0000644000176200001440000000067213553673010015363 0ustar liggesusersmK(DasLb!,,쐕Ґ K%R36n)Xb9cf "eaeI$I{L9us]p;R6eK7l3?Ifý]=JifvErJt2˰vC0a׎`l*<]@nB7OJ'}Mxx3^,->+ CwOOa0J\&uyƒrDW ةBYwqz:ϘG%껉KϾط@pϋ"zoeT>&n. #CaN3Bwg9׿N\|9st{qhgٿ ZO D[ɓp5τu5Cn {[Wc8m^cMforecast/data/taylor.rda0000644000176200001440000002402413553673010015000 0ustar liggesusers] l[Y1M FQ(*JLsszsqNT3EEED((E$"wUyy}j_kڱ/z3$y$yYO}GmL.ג)8II2I27^ߗ$cI2"$IzwJԘcgߒ$SNkdIr$I$I+WL۝cӽA>q,UMIr Iސ$$9$92$)?%&$Ǧ$.{r$?$WI{cߢp][d|Gl7^Tǧcmy|[M_J$Y$eǛو9c(7$>w>W|Aq{#~^'[}$Yw%2 (3Iٱ]/X. |-3v<emY^3♖lLiO_$w1[0yuX\?Es}Z;HuoO+`Q$Uz~/~ah"~'ZH78dyb]c/iőQcrX_˗z‰1³E[yӚ괥;yGW n8v/v#zr}ѥ=xY`NȣږIv IS1q99o[ןn-FAkvz#V8yZ耽xtkUgԜG+|,w?n^c;u¸h9du =~@lۄvcvް<[ŃLaO;# vJ SzG8|$d!فUX]P?fhT d jķf"4>~ 64]svak.]эjQw:_nи.ͨb]95qEӨ]IgI8hxϲ^ԠqfwKm׆bgW6kO^*}9NK6guuuƔ?6,e]%ׯ>`?VP=cL0B N }ĩ[V܆A/+Sy/hw Flyp m{#(O >y&bǧ.쩯7bѣ!_%Q}̳v&ҵ :(s828g}M̄8)bkWʁWtR o\7#_~ >8yQcϑq*w>+zP; &Bi 2Yk {2ns1\ӌX,.`D'h-ʨ=5Ö&]hн:lhOwU[kҹgp,Vr-d>maoWh;é,f(@#W ))5`=wj})xWJjm9߅=uŽ=K;}4x e=GI͵LSK\5j Vg&dK( zɂZZEkϊe^ʨ#bi! ,ܵ~ֿ6ܒ^v O>pQJ[} W^A__BvY-:%6,j5T_fqՍY84HXnٜ YG ~r]iʋ ^MOMɖGzݠ+5Ż M↚Ѥe-_WjљH9Ņ=}Ԟ5fȝ)=Ȅn'+L5#u6#wx̆i=6ǫM~Wκt ]6v o[Rل&=nDn7@3~zb-v;qƇ\q2Yq͘l8'c9Dzs;+1]~Jn]ƞn6Ŧ[wb0UjOhCX+q[T͛o8#?u?oҌ,Ȱ%#9q&O+qvbc׺r /Xd{,֨:Zzgxhk5쟈)},>?Cߣ} )ygɌ6 cVΖaxO=:k9hwvg\񼡦6ܨ55\\K .qNYxGنM )O4~ EU^mN_WcaR\ӴlvAk񡏣U: Suu%ku߁ (*] &{xoQ~:XU=T+c̨p솻yUG3> zGK!2FOgc-/VMcl6ncvWebcJG.-~qԴSy jݻ͋浱W |M}iT1n^u[jbqg~,a+#LXu^6GO|X,}z6h>^ѯ]Y9#7j?PKkoz٣+]yP`O؂ѺVuj쉜 /c?lM+|)X^ɫmzqQd:˦<\k‡\ ֎.nV>~{Вu5wu9~]{1>WY~ 4q&`-#!1ʅߊYSe`rnKĝ Q]~"<XyzĹHC{rv7bǽĚx^Czhњc=]ϊZQ|W]_3ϵT?O=ooCl8kϦsVq`Vv9eBb?BҴy=$74a)غ.[Ӣm=ۣ{lsݎleKzeugBgm-Ѷ'[q[(/KbTSrz~CӰAဧƺ5&683oՠf^o;w'qpØk|[\gƽ׭}% '%=2 ,=OIQo>G'`cTL#!ai9 )qN=-"x u9G]~}4i6{C2(VqW}--MԘṸNvԂ]*to6tk٪*Ҥŗ t~-ŧ K\u" 8?=߇ؚ)?Sr9Uk ? 5nԯ_xf_gj>܃]T!<mq[yc$\s++nY뛌5XY}T kV7D_QGTg5=aW}_õ\<³uz*kr)[e16ތ)=ߐaPd]Z9X_םɾ-6l=O}&6ºe_vM-R6.axܧaԯh܂\sޔ|pyz+eN<ޥwp鶿[5ju݁L}[4V^_bK1 ź1yXYa A+|/gtLcxuS̈́`y ]x wi]k:ϜeSZp\?|ϫWhꭚ%_E-UA}5ZtU^]W=:c 04 k9GcBҘ\Lۈ+cDa?;j)<^#j8{hǵpW:58t]ǹbo[/ |wؽ ;֯;zUv/31Z> giLEZ|"WԽb؋.: Ws=ڏ?tzxo/UbSNP{5}%|ZG}iv{o}8&{r5}x{ͮsh k4?HK Np;N+~ ыhF9PnԒpmmr%gp݇'w=1wάX>[[l}P7jՇp/gRzZ6θsFb hJU>jk!~\'.s8Sg;m^l;w][}XѐUҬg(#4rއCO^ol pMN Šz5|X6iS7sڼ ˏ,Ő1ZK7 ߿7V,^XsX{\7=#\d]ܦO}O-zܿ^y,ͫ0-E}ZyyK`}u잚w̿+[[gÓ{jgJDUgcz5H=-u]Kc~n ^4YMqʘwjr]__q&?U+~_ǿ0SuSt¶]oL쫱X V6Q=_JiR006閘+x%qu5כtRM 4.喝ԦZζucMc-z]>{oS = #禬RHEl,][6=Ssc}P|5]j\ @//K8 5A~X.>=7oca #cLflL/0 W?^qVS,Ŧaý= =E{6\wD[|]m/z^襜]᥵exwsct Nn.X]W307J>xQ#S7k~6~]{"p¸FrݥLLOi11.DO}Omr:`o+ K۽g kGvNxiqiTw\U uud  q[_xVI5bZyM>4nN\#3lGWĞ(OcMcūbh҆Mm>/>^qCqŦZSsL}Q&>*9ڛH|3׍Ye>O4tFvT>=1—EN\.M\>ZGqͮ6]jY5 q]YGClĵgΆbgw*WGޠKo?^p)ׇ;c%>1&Y Jjw7^mh>Ægz6x.=k\_>s+zLz!sa+9J( h؟É1Z8)BkjY&>u%p[W?cэЗVCzi}.K4BoxC}=-R]dwYSPGfԋ }wceSz! ҼA|o 'ox k'oÖzX3$9;K|(Z{ӷ~JDx~YTpb]X יٙg kx]=0=D-1FVOnzW|SWh_X>.JU V`|rn/lwIixJwںa&R4$CO)uhpmG6`@m kF9輬3LO#}:?:XOOOQW=yv$|?=e\O Oē!~( >E=ئi%-qoӮv2NU޵KwMŶXᯡ>׌}es8g+x95ǾqhFGyϰ"gs|eC϶Qp cM5%'-ý_h?;#&u~Þ/0֨5_LOrf_<ЫlEMz?C3&:!#g,,#.]iz qkx?iܖkޖ5zP \;=iߴ]D-dzM-;ϼ< ~߼d4c>֋Y.[ϋO|k%6½ [-k[}mW67~0d]xZQ ~'RCiq|j3\9;[zXinΫys9sF;K7N}s|[&C[;Zo}g!֧Ozm|l6exз7z">~͎+=X;θ'qg2 ڷj$M=O!g,0{p_fNN1>;>~w};ZѣvӃޣw_m=-jiMjwӜ X񾺻 k{|͉WqO2֋t~d)>o~O'ʘggUrX.77~EO~ǹ_2lXY߳F6\Z7|[q[/SX~_l5>iuNLs~K~'czN86=htG0CWBj{ ̵(0yu.~1+ I7po>ss5&OXYW#i>oG7p.M.yS7 %mG_՟ |44a?forecast/data/gas.rda0000644000176200001440000000273013553673010014240 0ustar liggesusers] Le s9lZ:ԥ9OfZs谴IeehX$f͚3MA8r f^PAS΁'׬o} {~wqq1111"G#6[O{IɑnͶ]Gb~l;O3{ w' { s_Cv scUUO0cpRcxsM0Ye13'Ƕ3&Gs0630NgA‚[iəgɌ7딝ſь`=sE)x?_h摻D{U'ˏ5g9!,zWA}~c_<M3󱧘{3}?i?+Vr;VX\X9+?&qZֹ̯7<4a߱>3Yy>Ч}u6KtK+ϢL|yo7l:N4"h?`ai:7ŗ/W)80TI>;ExzȜlHx?~sqZUjGI5̗®L8{R')>_}a=!>Oyr߸Hny(|'8uPzDnd+✸({ߒ!xޱһӅܓ /{O<O`? >3x\ ]?UtɡRoV<$&1ke$>.ȽѱCm #uK /WdgS?v:_oaQ|%[C4Os΂۵9ǜ=|'p߄ ?'?w!"vue=nA׏M_[[:)?e.î:\pforecast/data/wineind.rda0000644000176200001440000000127013553673010015121 0ustar liggesusersmKHTaǯwHܴUAfD բ QZDP,jуR)Qv-UÜqfq#A[T?.;}MjKJ 0 Bxb]thGy~5z=V՚%١FF`_ s6ȎGܬ:DV~x[Kow]z A$9t#̰p]s58X]򧛄N01Bw.-][}R]Cv:J3pramE :s`U+X#U/^mO=T|X~̛]1]^O}7%;/?:uEj~Wf+L_κ#j/3=}=a0F1ot@FsnΕl_4Z񟑝N>s#ܑD^'xjfq\/[ ?:ȷt'Tg)cBP4o =y׳˃}UhcURtx{wSϠٓ˽(forecast/man/0000755000176200001440000000000013617634745012653 5ustar liggesusersforecast/man/forecast.lm.Rd0000644000176200001440000000662313566112560015353 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/lm.R \name{forecast.lm} \alias{forecast.lm} \title{Forecast a linear model with possible time series components} \usage{ \method{forecast}{lm}( object, newdata, h = 10, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, ts = TRUE, ... ) } \arguments{ \item{object}{Object of class "lm", usually the result of a call to \code{\link[stats]{lm}} or \code{\link{tslm}}.} \item{newdata}{An optional data frame in which to look for variables with which to predict. If omitted, it is assumed that the only variables are trend and season, and \code{h} forecasts are produced.} \item{h}{Number of periods for forecasting. Ignored if \code{newdata} present.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{ts}{If \code{TRUE}, the forecasts will be treated as time series provided the original data is a time series; the \code{newdata} will be interpreted as related to the subsequent time periods. If \code{FALSE}, any time series attributes of the original data will be ignored.} \item{...}{Other arguments passed to \code{\link[stats]{predict.lm}()}.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.lm}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The historical data for the response variable.} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values} } \description{ \code{forecast.lm} is used to predict linear models, especially those involving trend and seasonality components. } \details{ \code{forecast.lm} is largely a wrapper for \code{\link[stats]{predict.lm}()} except that it allows variables "trend" and "season" which are created on the fly from the time series characteristics of the data. Also, the output is reformatted into a \code{forecast} object. } \examples{ y <- ts(rnorm(120,0,3) + 1:120 + 20*sin(2*pi*(1:120)/12), frequency=12) fit <- tslm(y ~ trend + season) plot(forecast(fit, h=20)) } \seealso{ \code{\link{tslm}}, \code{\link[stats]{lm}}. } \author{ Rob J Hyndman } \keyword{stats} forecast/man/ggmonthplot.Rd0000644000176200001440000000222613566112560015473 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{ggmonthplot} \alias{ggmonthplot} \alias{ggsubseriesplot} \title{Create a seasonal subseries ggplot} \usage{ ggmonthplot(x, labels = NULL, times = time(x), phase = cycle(x), ...) ggsubseriesplot(x, labels = NULL, times = time(x), phase = cycle(x), ...) } \arguments{ \item{x}{a time series object (type \code{ts}).} \item{labels}{A vector of labels to use for each 'season'} \item{times}{A vector of times for each observation} \item{phase}{A vector of seasonal components} \item{\dots}{Not used (for consistency with monthplot)} } \value{ Returns an object of class \code{ggplot}. } \description{ Plots a subseries plot using ggplot. Each season is plotted as a separate mini time series. The blue lines represent the mean of the observations within each season. } \details{ The \code{ggmonthplot} function is simply a wrapper for \code{ggsubseriesplot} as a convenience for users familiar with \code{\link[stats]{monthplot}}. } \examples{ ggsubseriesplot(AirPassengers) ggsubseriesplot(woolyrnq) } \seealso{ \code{\link[stats]{monthplot}} } \author{ Mitchell O'Hara-Wild } forecast/man/bizdays.Rd0000644000176200001440000000150513566112560014575 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calendar.R \name{bizdays} \alias{bizdays} \title{Number of trading days in each season} \usage{ bizdays(x, FinCenter = c("New York", "London", "NERC", "Tokyo", "Zurich")) } \arguments{ \item{x}{Monthly or quarterly time series} \item{FinCenter}{Major financial center.} } \value{ Time series } \description{ Returns number of trading days in each month or quarter of the observed time period in a major financial center. } \details{ Useful for trading days length adjustments. More on how to define "business days", please refer to \code{\link[timeDate]{isBizday}}. } \examples{ x <- ts(rnorm(30), start = c(2013, 2), frequency = 12) bizdays(x, FinCenter = "New York") } \seealso{ \code{\link[forecast]{monthdays}} } \author{ Earo Wang } \keyword{ts} forecast/man/forecast.bats.Rd0000644000176200001440000000527513566112560015676 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecastBATS.R, R/forecastTBATS.R \name{forecast.bats} \alias{forecast.bats} \alias{forecast.tbats} \title{Forecasting using BATS and TBATS models} \usage{ \method{forecast}{bats}(object, h, level = c(80, 95), fan = FALSE, biasadj = NULL, ...) \method{forecast}{tbats}(object, h, level = c(80, 95), fan = FALSE, biasadj = NULL, ...) } \arguments{ \item{object}{An object of class "\code{bats}". Usually the result of a call to \code{\link{bats}}.} \item{h}{Number of periods for forecasting. Default value is twice the largest seasonal period (for seasonal data) or ten (for non-seasonal data).} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If TRUE, point forecasts and fitted values are mean forecast. Otherwise, these points can be considered the median of the forecast densities.} \item{...}{Other arguments, currently ignored.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.bats}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A copy of the \code{bats} object} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Forecasts \code{h} steps ahead with a BATS model. Prediction intervals are also produced. } \examples{ \dontrun{ fit <- bats(USAccDeaths) plot(forecast(fit)) taylor.fit <- bats(taylor) plot(forecast(taylor.fit)) } } \references{ De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), Forecasting time series with complex seasonal patterns using exponential smoothing, \emph{Journal of the American Statistical Association}, \bold{106}(496), 1513-1527. } \seealso{ \code{\link{bats}}, \code{\link{tbats}},\code{\link{forecast.ets}}. } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/forecast.HoltWinters.Rd0000644000176200001440000000557413566112560017231 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{forecast.HoltWinters} \alias{forecast.HoltWinters} \title{Forecasting using Holt-Winters objects} \usage{ \method{forecast}{HoltWinters}( object, h = ifelse(frequency(object$x) > 1, 2 * frequency(object$x), 10), level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{HoltWinters}". Usually the result of a call to \code{\link[stats]{HoltWinters}}.} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.HoltWinters}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for univariate Holt-Winters time series models. } \details{ This function calls \code{\link[stats]{predict.HoltWinters}} and constructs an object of class "\code{forecast}" from the results. It is included for completeness, but the \code{\link{ets}} is recommended for use instead of \code{\link[stats]{HoltWinters}}. } \examples{ fit <- HoltWinters(WWWusage,gamma=FALSE) plot(forecast(fit)) } \seealso{ \code{\link[stats]{predict.HoltWinters}}, \code{\link[stats]{HoltWinters}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/bats.Rd0000644000176200001440000000713113566112560014062 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/bats.R \name{bats} \alias{bats} \alias{as.character.bats} \alias{print.bats} \title{BATS model (Exponential smoothing state space model with Box-Cox transformation, ARMA errors, Trend and Seasonal components)} \usage{ bats( y, use.box.cox = NULL, use.trend = NULL, use.damped.trend = NULL, seasonal.periods = NULL, use.arma.errors = TRUE, use.parallel = length(y) > 1000, num.cores = 2, bc.lower = 0, bc.upper = 1, biasadj = FALSE, model = NULL, ... ) } \arguments{ \item{y}{The time series to be forecast. Can be \code{numeric}, \code{msts} or \code{ts}. Only univariate time series are supported.} \item{use.box.cox}{\code{TRUE/FALSE} indicates whether to use the Box-Cox transformation or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{use.trend}{\code{TRUE/FALSE} indicates whether to include a trend or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{use.damped.trend}{\code{TRUE/FALSE} indicates whether to include a damping parameter in the trend or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{seasonal.periods}{If \code{y} is a numeric then seasonal periods can be specified with this parameter.} \item{use.arma.errors}{\code{TRUE/FALSE} indicates whether to include ARMA errors or not. If \code{TRUE} the best fit is selected by AIC. If \code{FALSE} then the selection algorithm does not consider ARMA errors.} \item{use.parallel}{\code{TRUE/FALSE} indicates whether or not to use parallel processing.} \item{num.cores}{The number of parallel processes to be used if using parallel processing. If \code{NULL} then the number of logical cores is detected and all available cores are used.} \item{bc.lower}{The lower limit (inclusive) for the Box-Cox transformation.} \item{bc.upper}{The upper limit (inclusive) for the Box-Cox transformation.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If TRUE, point forecasts and fitted values are mean forecast. Otherwise, these points can be considered the median of the forecast densities.} \item{model}{Output from a previous call to \code{bats}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{...}{Additional arguments to be passed to \code{auto.arima} when choose an ARMA(p, q) model for the errors. (Note that xreg will be ignored, as will any arguments concerning seasonality and differencing, but arguments controlling the values of p and q will be used.)} } \value{ An object of class "\code{bats}". The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{bats} and associated functions. The fitted model is designated BATS(omega, p,q, phi, m1,...mJ) where omega is the Box-Cox parameter and phi is the damping parameter; the error is modelled as an ARMA(p,q) process and m1,...,mJ list the seasonal periods used in the model. } \description{ Fits a BATS model applied to \code{y}, as described in De Livera, Hyndman & Snyder (2011). Parallel processing is used by default to speed up the computations. } \examples{ \dontrun{ fit <- bats(USAccDeaths) plot(forecast(fit)) taylor.fit <- bats(taylor) plot(forecast(taylor.fit))} } \references{ De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), Forecasting time series with complex seasonal patterns using exponential smoothing, \emph{Journal of the American Statistical Association}, \bold{106}(496), 1513-1527. } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/easter.Rd0000644000176200001440000000137113553673010014412 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calendar.R \name{easter} \alias{easter} \title{Easter holidays in each season} \usage{ easter(x, easter.mon = FALSE) } \arguments{ \item{x}{Monthly or quarterly time series} \item{easter.mon}{If TRUE, the length of Easter holidays includes Easter Monday.} } \value{ Time series } \description{ Returns a vector of 0's and 1's or fractional results if Easter spans March and April in the observed time period. Easter is defined as the days from Good Friday to Easter Sunday inclusively, plus optionally Easter Monday if \code{easter.mon=TRUE}. } \details{ Useful for adjusting calendar effects. } \examples{ easter(wineind, easter.mon = TRUE) } \author{ Earo Wang } \keyword{ts} forecast/man/woolyrnq.Rd0000644000176200001440000000072313553673010015021 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{woolyrnq} \alias{woolyrnq} \title{Quarterly production of woollen yarn in Australia} \format{Time series data} \source{ Time Series Data Library. \url{https://pkg.yangzhuoranyang.com/tsdl/} } \usage{ woolyrnq } \description{ Quarterly production of woollen yarn in Australia: tonnes. Mar 1965 -- Sep 1994. } \examples{ tsdisplay(woolyrnq) } \keyword{datasets} forecast/man/plot.bats.Rd0000644000176200001440000000244713553673010015042 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/bats.R, R/ggplot.R, R/tbats.R \name{plot.bats} \alias{plot.bats} \alias{autoplot.tbats} \alias{autoplot.bats} \alias{plot.tbats} \title{Plot components from BATS model} \usage{ \method{plot}{bats}(x, main = "Decomposition by BATS model", ...) \method{autoplot}{tbats}(object, range.bars = FALSE, ...) \method{autoplot}{bats}(object, range.bars = FALSE, ...) \method{plot}{tbats}(x, main = "Decomposition by TBATS model", ...) } \arguments{ \item{x}{Object of class \dQuote{bats/tbats}.} \item{main}{Main title for plot.} \item{...}{Other plotting parameters passed to \code{\link[graphics]{par}}.} \item{object}{Object of class \dQuote{bats/tbats}.} \item{range.bars}{Logical indicating if each plot should have a bar at its right side representing relative size. If NULL, automatic selection takes place.} } \value{ None. Function produces a plot } \description{ Produces a plot of the level, slope and seasonal components from a BATS or TBATS model. The plotted components are Box-Cox transformed using the estimated transformation parameter. } \examples{ \dontrun{ fit <- tbats(USAccDeaths) plot(fit) autoplot(fit, range.bars = TRUE)} } \seealso{ \code{\link{bats}},\code{\link{tbats}} } \author{ Rob J Hyndman } \keyword{hplot} forecast/man/na.interp.Rd0000644000176200001440000000217513566112560015032 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clean.R \name{na.interp} \alias{na.interp} \title{Interpolate missing values in a time series} \usage{ na.interp( x, lambda = NULL, linear = (frequency(x) <= 1 | sum(!is.na(x)) <= 2 * frequency(x)) ) } \arguments{ \item{x}{time series} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{linear}{Should a linear interpolation be used.} } \value{ Time series } \description{ By default, uses linear interpolation for non-seasonal series. For seasonal series, a robust STL decomposition is first computed. Then a linear interpolation is applied to the seasonally adjusted data, and the seasonal component is added back. } \details{ A more general and flexible approach is available using \code{na.approx} in the \code{zoo} package. } \examples{ data(gold) plot(na.interp(gold)) } \seealso{ \code{\link[forecast]{tsoutliers}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/is.forecast.Rd0000644000176200001440000000066313553673010015352 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast.R, R/mforecast.R, R/spline.R \name{is.forecast} \alias{is.forecast} \alias{is.mforecast} \alias{is.splineforecast} \title{Is an object a particular forecast type?} \usage{ is.forecast(x) is.mforecast(x) is.splineforecast(x) } \arguments{ \item{x}{object to be tested} } \description{ Returns true if the forecast object is of a particular type } forecast/man/tsoutliers.Rd0000644000176200001440000000201413553673010015337 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clean.R \name{tsoutliers} \alias{tsoutliers} \title{Identify and replace outliers in a time series} \usage{ tsoutliers(x, iterate = 2, lambda = NULL) } \arguments{ \item{x}{time series} \item{iterate}{the number of iteration only for non-seasonal series} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} } \value{ \item{index}{Indicating the index of outlier(s)} \item{replacement}{Suggested numeric values to replace identified outliers} } \description{ Uses supsmu for non-seasonal series and a periodic stl decomposition with seasonal series to identify outliers and estimate their replacements. } \examples{ data(gold) tsoutliers(gold) } \seealso{ \code{\link[forecast]{na.interp}}, \code{\link[forecast]{tsclean}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/simulate.ets.Rd0000644000176200001440000000760413566112560015553 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/simulate.R \name{simulate.ets} \alias{simulate.ets} \alias{simulate.Arima} \alias{simulate.ar} \alias{simulate.lagwalk} \alias{simulate.fracdiff} \alias{simulate.nnetar} \alias{simulate.modelAR} \title{Simulation from a time series model} \usage{ \method{simulate}{ets}( object, nsim = length(object$x), seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ... ) \method{simulate}{Arima}( object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ... ) \method{simulate}{ar}( object, nsim = object$n.used, seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ... ) \method{simulate}{lagwalk}( object, nsim = length(object$x), seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ... ) \method{simulate}{fracdiff}( object, nsim = object$n, seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ... ) \method{simulate}{nnetar}( object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ... ) \method{simulate}{modelAR}( object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ... ) } \arguments{ \item{object}{An object of class "\code{ets}", "\code{Arima}", "\code{ar}" or "\code{nnetar}".} \item{nsim}{Number of periods for the simulated series. Ignored if either \code{xreg} or \code{innov} are not \code{NULL}.} \item{seed}{Either \code{NULL} or an integer that will be used in a call to \code{\link[base]{set.seed}} before simulating the time series. The default, \code{NULL}, will not change the random generator state.} \item{future}{Produce sample paths that are future to and conditional on the data in \code{object}. Otherwise simulate unconditionally.} \item{bootstrap}{Do simulation using resampled errors rather than normally distributed errors or errors provided as \code{innov}.} \item{innov}{A vector of innovations to use as the error series. Ignored if \code{bootstrap==TRUE}. If not \code{NULL}, the value of \code{nsim} is set to length of \code{innov}.} \item{...}{Other arguments, not currently used.} \item{xreg}{New values of \code{xreg} to be used for forecasting. The value of \code{nsim} is set to the number of rows of \code{xreg} if it is not \code{NULL}.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} } \value{ An object of class "\code{ts}". } \description{ Returns a time series based on the model object \code{object}. } \details{ With \code{simulate.Arima()}, the \code{object} should be produced by \code{\link{Arima}} or \code{\link{auto.arima}}, rather than \code{\link[stats]{arima}}. By default, the error series is assumed normally distributed and generated using \code{\link[stats]{rnorm}}. If \code{innov} is present, it is used instead. If \code{bootstrap=TRUE} and \code{innov=NULL}, the residuals are resampled instead. When \code{future=TRUE}, the sample paths are conditional on the data. When \code{future=FALSE} and the model is stationary, the sample paths do not depend on the data at all. When \code{future=FALSE} and the model is non-stationary, the location of the sample paths is arbitrary, so they all start at the value of the first observation. } \examples{ fit <- ets(USAccDeaths) plot(USAccDeaths, xlim=c(1973,1982)) lines(simulate(fit, 36), col="red") } \seealso{ \code{\link{ets}}, \code{\link{Arima}}, \code{\link{auto.arima}}, \code{\link{ar}}, \code{\link{arfima}}, \code{\link{nnetar}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/wineind.Rd0000644000176200001440000000070013553673010014557 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{wineind} \alias{wineind} \title{Australian total wine sales} \format{Time series data} \source{ Time Series Data Library. \url{https://pkg.yangzhuoranyang.com/tsdl/} } \usage{ wineind } \description{ Australian total wine sales by wine makers in bottles <= 1 litre. Jan 1980 -- Aug 1994. } \examples{ tsdisplay(wineind) } \keyword{datasets} forecast/man/autoplot.ts.Rd0000644000176200001440000000422013566112560015421 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{autolayer.mts} \alias{autolayer.mts} \alias{autolayer.msts} \alias{autolayer.ts} \alias{autoplot.ts} \alias{autoplot.mts} \alias{autoplot.msts} \alias{fortify.ts} \title{Automatically create a ggplot for time series objects} \usage{ \method{autolayer}{mts}(object, colour = TRUE, series = NULL, ...) \method{autolayer}{msts}(object, series = NULL, ...) \method{autolayer}{ts}(object, colour = TRUE, series = NULL, ...) \method{autoplot}{ts}( object, series = NULL, xlab = "Time", ylab = deparse(substitute(object)), main = NULL, ... ) \method{autoplot}{mts}( object, colour = TRUE, facets = FALSE, xlab = "Time", ylab = deparse(substitute(object)), main = NULL, ... ) \method{autoplot}{msts}(object, ...) \method{fortify}{ts}(model, data, ...) } \arguments{ \item{object}{Object of class \dQuote{\code{ts}} or \dQuote{\code{mts}}.} \item{colour}{If TRUE, the time series will be assigned a colour aesthetic} \item{series}{Identifies the timeseries with a colour, which integrates well with the functionality of \link{geom_forecast}.} \item{...}{Other plotting parameters to affect the plot.} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} \item{main}{Main title.} \item{facets}{If TRUE, multiple time series will be faceted (and unless specified, colour is set to FALSE). If FALSE, each series will be assigned a colour.} \item{model}{Object of class \dQuote{\code{ts}} to be converted to \dQuote{\code{data.frame}}.} \item{data}{Not used (required for \link{fortify} method)} } \value{ None. Function produces a ggplot graph. } \description{ \code{autoplot} takes an object of type \code{ts} or \code{mts} and creates a ggplot object suitable for usage with \code{stat_forecast}. } \details{ \code{fortify.ts} takes a \code{ts} object and converts it into a data frame (for usage with ggplot2). } \examples{ library(ggplot2) autoplot(USAccDeaths) lungDeaths <- cbind(mdeaths, fdeaths) autoplot(lungDeaths) autoplot(lungDeaths, facets=TRUE) } \seealso{ \code{\link[stats]{plot.ts}}, \code{\link[ggplot2]{fortify}} } \author{ Mitchell O'Hara-Wild } forecast/man/plot.mforecast.Rd0000644000176200001440000000423413566112560016072 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R, R/mforecast.R \name{autoplot.mforecast} \alias{autoplot.mforecast} \alias{autolayer.mforecast} \alias{plot.mforecast} \title{Multivariate forecast plot} \usage{ \method{autoplot}{mforecast}(object, PI = TRUE, facets = TRUE, colour = FALSE, ...) \method{autolayer}{mforecast}(object, series = NULL, PI = TRUE, ...) \method{plot}{mforecast}(x, main = paste("Forecasts from", unique(x$method)), xlab = "time", ...) } \arguments{ \item{object}{Multivariate forecast object of class \code{mforecast}. Used for ggplot graphics (S3 method consistency).} \item{PI}{If \code{FALSE}, confidence intervals will not be plotted, giving only the forecast line.} \item{facets}{If TRUE, multiple time series will be faceted. If FALSE, each series will be assigned a colour.} \item{colour}{If TRUE, the time series will be assigned a colour aesthetic} \item{\dots}{additional arguments to each individual \code{plot}.} \item{series}{Matches an unidentified forecast layer with a coloured object on the plot.} \item{x}{Multivariate forecast object of class \code{mforecast}.} \item{main}{Main title. Default is the forecast method. For autoplot, specify a vector of titles for each plot.} \item{xlab}{X-axis label. For autoplot, specify a vector of labels for each plot.} } \description{ Plots historical data with multivariate forecasts and prediction intervals. } \details{ \code{autoplot} will produce an equivalent plot as a ggplot object. } \examples{ library(ggplot2) lungDeaths <- cbind(mdeaths, fdeaths) fit <- tslm(lungDeaths ~ trend + season) fcast <- forecast(fit, h=10) plot(fcast) autoplot(fcast) carPower <- as.matrix(mtcars[,c("qsec","hp")]) carmpg <- mtcars[,"mpg"] fit <- lm(carPower ~ carmpg) fcast <- forecast(fit, newdata=data.frame(carmpg=30)) plot(fcast, xlab="Year") autoplot(fcast, xlab=rep("Year",2)) } \references{ Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://OTexts.org/fpp2/} } \seealso{ \code{\link[forecast]{plot.forecast}}, \code{\link[stats]{plot.ts}} } \author{ Mitchell O'Hara-Wild } \keyword{ts} forecast/man/splinef.Rd0000644000176200001440000000672413566112560014600 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spline.R \name{splinef} \alias{splinef} \title{Cubic Spline Forecast} \usage{ splinef( y, h = 10, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, method = c("gcv", "mle"), x = y ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{method}{Method for selecting the smoothing parameter. If \code{method="gcv"}, the generalized cross-validation method from \code{\link[stats]{smooth.spline}} is used. If \code{method="mle"}, the maximum likelihood method from Hyndman et al (2002) is used.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{splinef}. An object of class \code{"forecast"} containing the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{onestepf}{One-step forecasts from the fitted model.} \item{fitted}{Smooth estimates of the fitted trend using all data.} \item{residuals}{Residuals from the fitted model. That is x minus one-step forecasts.} } \description{ Returns local linear forecasts and prediction intervals using cubic smoothing splines. } \details{ The cubic smoothing spline model is equivalent to an ARIMA(0,2,2) model but with a restricted parameter space. The advantage of the spline model over the full ARIMA model is that it provides a smooth historical trend as well as a linear forecast function. Hyndman, King, Pitrun, and Billah (2002) show that the forecast performance of the method is hardly affected by the restricted parameter space. } \examples{ fcast <- splinef(uspop,h=5) plot(fcast) summary(fcast) } \references{ Hyndman, King, Pitrun and Billah (2005) Local linear forecasts using cubic smoothing splines. \emph{Australian and New Zealand Journal of Statistics}, \bold{47}(1), 87-99. \url{https://robjhyndman.com/publications/splinefcast/}. } \seealso{ \code{\link[stats]{smooth.spline}}, \code{\link[stats]{arima}}, \code{\link{holt}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.StructTS.Rd0000644000176200001440000000531513566112560016473 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{forecast.StructTS} \alias{forecast.StructTS} \title{Forecasting using Structural Time Series models} \usage{ \method{forecast}{StructTS}( object, h = ifelse(object$coef["epsilon"] > 1e-10, 2 * object$xtsp[3], 10), level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{StructTS}". Usually the result of a call to \code{\link[stats]{StructTS}}.} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.StructTS}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for univariate structural time series models. } \details{ This function calls \code{predict.StructTS} and constructs an object of class "\code{forecast}" from the results. } \examples{ fit <- StructTS(WWWusage,"level") plot(forecast(fit)) } \seealso{ \code{\link[stats]{StructTS}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/tsCV.Rd0000644000176200001440000000435513566112560014015 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tscv.R \name{tsCV} \alias{tsCV} \title{Time series cross-validation} \usage{ tsCV(y, forecastfunction, h = 1, window = NULL, xreg = NULL, initial = 0, ...) } \arguments{ \item{y}{Univariate time series} \item{forecastfunction}{Function to return an object of class \code{forecast}. Its first argument must be a univariate time series, and it must have an argument \code{h} for the forecast horizon.} \item{h}{Forecast horizon} \item{window}{Length of the rolling window, if NULL, a rolling window will not be used.} \item{xreg}{Exogeneous predictor variables passed to the forecast function if required.} \item{initial}{Initial period of the time series where no cross-validation is performed.} \item{...}{Other arguments are passed to \code{forecastfunction}.} } \value{ Numerical time series object containing the forecast errors as a vector (if h=1) and a matrix otherwise. The time index corresponds to the last period of the training data. The columns correspond to the forecast horizons. } \description{ \code{tsCV} computes the forecast errors obtained by applying \code{forecastfunction} to subsets of the time series \code{y} using a rolling forecast origin. } \details{ Let \code{y} contain the time series \eqn{y_1,\dots,y_T}{y[1:T]}. Then \code{forecastfunction} is applied successively to the time series \eqn{y_1,\dots,y_t}{y[1:t]}, for \eqn{t=1,\dots,T-h}, making predictions \eqn{\hat{y}_{t+h|t}}{f[t+h]}. The errors are given by \eqn{e_{t+h} = y_{t+h}-\hat{y}_{t+h|t}}{e[t+h] = y[t+h]-f[t+h]}. If h=1, these are returned as a vector, \eqn{e_1,\dots,e_T}{e[1:T]}. For h>1, they are returned as a matrix with the hth column containing errors for forecast horizon h. The first few errors may be missing as it may not be possible to apply \code{forecastfunction} to very short time series. } \examples{ #Fit an AR(2) model to each rolling origin subset far2 <- function(x, h){forecast(Arima(x, order=c(2,0,0)), h=h)} e <- tsCV(lynx, far2, h=1) #Fit the same model with a rolling window of length 30 e <- tsCV(lynx, far2, h=1, window=30) } \seealso{ \link{CV}, \link{CVar}, \link{residuals.Arima}, \url{https://robjhyndman.com/hyndsight/tscv/}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/plot.Arima.Rd0000644000176200001440000000316013566112560015135 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/armaroots.R, R/ggplot.R \name{plot.Arima} \alias{plot.Arima} \alias{plot.ar} \alias{autoplot.Arima} \alias{autoplot.ar} \title{Plot characteristic roots from ARIMA model} \usage{ \method{plot}{Arima}( x, type = c("both", "ar", "ma"), main, xlab = "Real", ylab = "Imaginary", ... ) \method{plot}{ar}(x, main, xlab = "Real", ylab = "Imaginary", ...) \method{autoplot}{Arima}(object, type = c("both", "ar", "ma"), ...) \method{autoplot}{ar}(object, ...) } \arguments{ \item{x}{Object of class \dQuote{Arima} or \dQuote{ar}.} \item{type}{Determines if both AR and MA roots are plotted, of if just one set is plotted.} \item{main}{Main title. Default is "Inverse AR roots" or "Inverse MA roots".} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} \item{...}{Other plotting parameters passed to \code{\link[graphics]{par}}.} \item{object}{Object of class \dQuote{Arima} or \dQuote{ar}. Used for ggplot graphics (S3 method consistency).} } \value{ None. Function produces a plot } \description{ Produces a plot of the inverse AR and MA roots of an ARIMA model. Inverse roots outside the unit circle are shown in red. } \details{ \code{autoplot} will produce an equivalent plot as a ggplot object. } \examples{ library(ggplot2) fit <- Arima(WWWusage, order=c(3,1,0)) plot(fit) autoplot(fit) fit <- Arima(woolyrnq,order=c(2,0,0),seasonal=c(2,1,1)) plot(fit) autoplot(fit) plot(ar.ols(gold[1:61])) autoplot(ar.ols(gold[1:61])) } \seealso{ \code{\link{Arima}}, \code{\link[stats]{ar}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{hplot} forecast/man/fourier.Rd0000644000176200001440000000442313553673010014603 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{fourier} \alias{fourier} \alias{fourierf} \title{Fourier terms for modelling seasonality} \usage{ fourier(x, K, h = NULL) fourierf(x, K, h) } \arguments{ \item{x}{Seasonal time series: a \code{ts} or a \code{msts} object} \item{K}{Maximum order(s) of Fourier terms} \item{h}{Number of periods ahead to forecast (optional)} } \value{ Numerical matrix. } \description{ \code{fourier} returns a matrix containing terms from a Fourier series, up to order \code{K}, suitable for use in \code{\link{Arima}}, \code{\link{auto.arima}}, or \code{\link{tslm}}. } \details{ \code{fourierf} is deprecated, instead use the \code{h} argument in \code{fourier}. The period of the Fourier terms is determined from the time series characteristics of \code{x}. When \code{h} is missing, the length of \code{x} also determines the number of rows for the matrix returned by \code{fourier}. Otherwise, the value of \code{h} determines the number of rows for the matrix returned by \code{fourier}, typically used for forecasting. The values within \code{x} are not used. Typical use would omit \code{h} when generating Fourier terms for training a model and include \code{h} when generating Fourier terms for forecasting. When \code{x} is a \code{ts} object, the value of \code{K} should be an integer and specifies the number of sine and cosine terms to return. Thus, the matrix returned has \code{2*K} columns. When \code{x} is a \code{msts} object, then \code{K} should be a vector of integers specifying the number of sine and cosine terms for each of the seasonal periods. Then the matrix returned will have \code{2*sum(K)} columns. } \examples{ library(ggplot2) # Using Fourier series for a "ts" object # K is chosen to minimize the AICc deaths.model <- auto.arima(USAccDeaths, xreg=fourier(USAccDeaths,K=5), seasonal=FALSE) deaths.fcast <- forecast(deaths.model, xreg=fourier(USAccDeaths, K=5, h=36)) autoplot(deaths.fcast) + xlab("Year") # Using Fourier series for a "msts" object taylor.lm <- tslm(taylor ~ fourier(taylor, K = c(3, 3))) taylor.fcast <- forecast(taylor.lm, data.frame(fourier(taylor, K = c(3, 3), h = 270))) autoplot(taylor.fcast) } \seealso{ \code{\link{seasonaldummy}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/getResponse.Rd0000644000176200001440000000261013553673010015422 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/getResponse.R \name{getResponse} \alias{getResponse} \alias{getResponse.default} \alias{getResponse.lm} \alias{getResponse.Arima} \alias{getResponse.fracdiff} \alias{getResponse.ar} \alias{getResponse.tbats} \alias{getResponse.bats} \alias{getResponse.mforecast} \alias{getResponse.baggedModel} \title{Get response variable from time series model.} \usage{ getResponse(object, ...) \method{getResponse}{default}(object, ...) \method{getResponse}{lm}(object, ...) \method{getResponse}{Arima}(object, ...) \method{getResponse}{fracdiff}(object, ...) \method{getResponse}{ar}(object, ...) \method{getResponse}{tbats}(object, ...) \method{getResponse}{bats}(object, ...) \method{getResponse}{mforecast}(object, ...) \method{getResponse}{baggedModel}(object, ...) } \arguments{ \item{object}{a time series model or forecast object.} \item{...}{Additional arguments that are ignored.} } \value{ A numerical vector or a time series object of class \code{ts}. } \description{ \code{getResponse} is a generic function for extracting the historical data from a time series model (including \code{Arima}, \code{ets}, \code{ar}, \code{fracdiff}), a linear model of class \code{lm}, or a forecast object. The function invokes particular \emph{methods} which depend on the class of the first argument. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/ocsb.test.Rd0000644000176200001440000000342613553673010015036 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/unitRoot.R \name{ocsb.test} \alias{ocsb.test} \alias{print.OCSBtest} \title{Osborn, Chui, Smith, and Birchenhall Test for Seasonal Unit Roots} \usage{ ocsb.test(x, lag.method = c("fixed", "AIC", "BIC", "AICc"), maxlag = 0) } \arguments{ \item{x}{a univariate seasonal time series.} \item{lag.method}{a character specifying the lag order selection method.} \item{maxlag}{the maximum lag order to be considered by \code{lag.method}.} } \value{ ocsb.test returns a list of class "OCSBtest" with the following components: * statistics the value of the test statistics. * pvalues the p-values for each test statistics. * method a character string describing the type of test. * data.name a character string giving the name of the data. * fitted.model the fitted regression model. } \description{ An implementation of the Osborn, Chui, Smith, and Birchenhall (OCSB) test. } \details{ The regression equation may include lags of the dependent variable. When lag.method = "fixed", the lag order is fixed to maxlag; otherwise, maxlag is the maximum number of lags considered in a lag selection procedure that minimises the lag.method criterion, which can be AIC or BIC or corrected AIC, AICc, obtained as AIC + (2k(k+1))/(n-k-1), where k is the number of parameters and n is the number of available observations in the model. Critical values for the test are based on simulations, which has been smoothed over to produce critical values for all seasonal periods. } \examples{ ocsb.test(AirPassengers) } \references{ Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the order of integration for consumption", \emph{Oxford Bulletin of Economics and Statistics} \bold{50}(4):361-377. } \seealso{ \code{\link{nsdiffs}} } forecast/man/dm.test.Rd0000644000176200001440000000513713566112560014513 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/DM2.R \name{dm.test} \alias{dm.test} \title{Diebold-Mariano test for predictive accuracy} \usage{ dm.test( e1, e2, alternative = c("two.sided", "less", "greater"), h = 1, power = 2 ) } \arguments{ \item{e1}{Forecast errors from method 1.} \item{e2}{Forecast errors from method 2.} \item{alternative}{a character string specifying the alternative hypothesis, must be one of \code{"two.sided"} (default), \code{"greater"} or \code{"less"}. You can specify just the initial letter.} \item{h}{The forecast horizon used in calculating \code{e1} and \code{e2}.} \item{power}{The power used in the loss function. Usually 1 or 2.} } \value{ A list with class \code{"htest"} containing the following components: \item{statistic}{the value of the DM-statistic.} \item{parameter}{the forecast horizon and loss function power used in the test.} \item{alternative}{a character string describing the alternative hypothesis.} \item{p.value}{the p-value for the test.} \item{method}{a character string with the value "Diebold-Mariano Test".} \item{data.name}{a character vector giving the names of the two error series.} } \description{ The Diebold-Mariano test compares the forecast accuracy of two forecast methods. } \details{ This function implements the modified test proposed by Harvey, Leybourne and Newbold (1997). The null hypothesis is that the two methods have the same forecast accuracy. For \code{alternative="less"}, the alternative hypothesis is that method 2 is less accurate than method 1. For \code{alternative="greater"}, the alternative hypothesis is that method 2 is more accurate than method 1. For \code{alternative="two.sided"}, the alternative hypothesis is that method 1 and method 2 have different levels of accuracy. } \examples{ # Test on in-sample one-step forecasts f1 <- ets(WWWusage) f2 <- auto.arima(WWWusage) accuracy(f1) accuracy(f2) dm.test(residuals(f1),residuals(f2),h=1) # Test on out-of-sample one-step forecasts f1 <- ets(WWWusage[1:80]) f2 <- auto.arima(WWWusage[1:80]) f1.out <- ets(WWWusage[81:100],model=f1) f2.out <- Arima(WWWusage[81:100],model=f2) accuracy(f1.out) accuracy(f2.out) dm.test(residuals(f1.out),residuals(f2.out),h=1) } \references{ Diebold, F.X. and Mariano, R.S. (1995) Comparing predictive accuracy. \emph{Journal of Business and Economic Statistics}, \bold{13}, 253-263. Harvey, D., Leybourne, S., & Newbold, P. (1997). Testing the equality of prediction mean squared errors. \emph{International Journal of forecasting}, \bold{13}(2), 281-291. } \author{ George Athanasopoulos } \keyword{htest} \keyword{ts} forecast/man/ets.Rd0000644000176200001440000001333713566112560013731 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ets.R \name{ets} \alias{ets} \alias{print.ets} \alias{summary.ets} \alias{as.character.ets} \alias{coef.ets} \alias{tsdiag.ets} \title{Exponential smoothing state space model} \usage{ ets( y, model = "ZZZ", damped = NULL, alpha = NULL, beta = NULL, gamma = NULL, phi = NULL, additive.only = FALSE, lambda = NULL, biasadj = FALSE, lower = c(rep(1e-04, 3), 0.8), upper = c(rep(0.9999, 3), 0.98), opt.crit = c("lik", "amse", "mse", "sigma", "mae"), nmse = 3, bounds = c("both", "usual", "admissible"), ic = c("aicc", "aic", "bic"), restrict = TRUE, allow.multiplicative.trend = FALSE, use.initial.values = FALSE, na.action = c("na.contiguous", "na.interp", "na.fail"), ... ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{model}{Usually a three-character string identifying method using the framework terminology of Hyndman et al. (2002) and Hyndman et al. (2008). The first letter denotes the error type ("A", "M" or "Z"); the second letter denotes the trend type ("N","A","M" or "Z"); and the third letter denotes the season type ("N","A","M" or "Z"). In all cases, "N"=none, "A"=additive, "M"=multiplicative and "Z"=automatically selected. So, for example, "ANN" is simple exponential smoothing with additive errors, "MAM" is multiplicative Holt-Winters' method with multiplicative errors, and so on. It is also possible for the model to be of class \code{"ets"}, and equal to the output from a previous call to \code{ets}. In this case, the same model is fitted to \code{y} without re-estimating any smoothing parameters. See also the \code{use.initial.values} argument.} \item{damped}{If TRUE, use a damped trend (either additive or multiplicative). If NULL, both damped and non-damped trends will be tried and the best model (according to the information criterion \code{ic}) returned.} \item{alpha}{Value of alpha. If NULL, it is estimated.} \item{beta}{Value of beta. If NULL, it is estimated.} \item{gamma}{Value of gamma. If NULL, it is estimated.} \item{phi}{Value of phi. If NULL, it is estimated.} \item{additive.only}{If TRUE, will only consider additive models. Default is FALSE.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated. When \code{lambda} is specified, \code{additive.only} is set to \code{TRUE}.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{lower}{Lower bounds for the parameters (alpha, beta, gamma, phi)} \item{upper}{Upper bounds for the parameters (alpha, beta, gamma, phi)} \item{opt.crit}{Optimization criterion. One of "mse" (Mean Square Error), "amse" (Average MSE over first \code{nmse} forecast horizons), "sigma" (Standard deviation of residuals), "mae" (Mean of absolute residuals), or "lik" (Log-likelihood, the default).} \item{nmse}{Number of steps for average multistep MSE (1<=\code{nmse}<=30).} \item{bounds}{Type of parameter space to impose: \code{"usual" } indicates all parameters must lie between specified lower and upper bounds; \code{"admissible"} indicates parameters must lie in the admissible space; \code{"both"} (default) takes the intersection of these regions.} \item{ic}{Information criterion to be used in model selection.} \item{restrict}{If \code{TRUE} (default), the models with infinite variance will not be allowed.} \item{allow.multiplicative.trend}{If \code{TRUE}, models with multiplicative trend are allowed when searching for a model. Otherwise, the model space excludes them. This argument is ignored if a multiplicative trend model is explicitly requested (e.g., using \code{model="MMN"}).} \item{use.initial.values}{If \code{TRUE} and \code{model} is of class \code{"ets"}, then the initial values in the model are also not re-estimated.} \item{na.action}{A function which indicates what should happen when the data contains NA values. By default, the largest contiguous portion of the time-series will be used.} \item{...}{Other undocumented arguments.} } \value{ An object of class "\code{ets}". The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{ets} and associated functions. } \description{ Returns ets model applied to \code{y}. } \details{ Based on the classification of methods as described in Hyndman et al (2008). The methodology is fully automatic. The only required argument for ets is the time series. The model is chosen automatically if not specified. This methodology performed extremely well on the M3-competition data. (See Hyndman, et al, 2002, below.) } \examples{ fit <- ets(USAccDeaths) plot(forecast(fit)) } \references{ Hyndman, R.J., Koehler, A.B., Snyder, R.D., and Grose, S. (2002) "A state space framework for automatic forecasting using exponential smoothing methods", \emph{International J. Forecasting}, \bold{18}(3), 439--454. Hyndman, R.J., Akram, Md., and Archibald, B. (2008) "The admissible parameter space for exponential smoothing models". \emph{Annals of Statistical Mathematics}, \bold{60}(2), 407--426. Hyndman, R.J., Koehler, A.B., Ord, J.K., and Snyder, R.D. (2008) \emph{Forecasting with exponential smoothing: the state space approach}, Springer-Verlag. \url{http://www.exponentialsmoothing.net}. } \seealso{ \code{\link[stats]{HoltWinters}}, \code{\link{rwf}}, \code{\link{Arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.Rd0000644000176200001440000001135413566112560014741 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast.R \name{forecast} \alias{forecast} \alias{print.forecast} \alias{summary.forecast} \alias{as.data.frame.forecast} \alias{as.ts.forecast} \alias{forecast.default} \alias{forecast.ts} \title{Forecasting time series} \usage{ forecast(object, ...) \method{forecast}{default}(object, ...) \method{forecast}{ts}( object, h = ifelse(frequency(object) > 1, 2 * frequency(object), 10), level = c(80, 95), fan = FALSE, robust = FALSE, lambda = NULL, biasadj = FALSE, find.frequency = FALSE, allow.multiplicative.trend = FALSE, model = NULL, ... ) } \arguments{ \item{object}{a time series or time series model for which forecasts are required} \item{...}{Additional arguments affecting the forecasts produced. If \code{model=NULL}, \code{forecast.ts} passes these to \code{\link{ets}} or \code{\link{stlf}} depending on the frequency of the time series. If \code{model} is not \code{NULL}, the arguments are passed to the relevant modelling function.} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, \code{level} is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{robust}{If TRUE, the function is robust to missing values and outliers in \code{object}. This argument is only valid when \code{object} is of class \code{ts}.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{find.frequency}{If TRUE, the function determines the appropriate period, if the data is of unknown period.} \item{allow.multiplicative.trend}{If TRUE, then ETS models with multiplicative trends are allowed. Otherwise, only additive or no trend ETS models are permitted.} \item{model}{An object describing a time series model; e.g., one of of class \code{ets}, \code{Arima}, \code{bats}, \code{tbats}, or \code{nnetar}.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessors functions \code{fitted.values} and \code{residuals} extract various useful features of the value returned by \code{forecast$model}. An object of class \code{"forecast"} is a list usually containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. For models with additive errors, the residuals will be x minus the fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ \code{forecast} is a generic function for forecasting from time series or time series models. The function invokes particular \emph{methods} which depend on the class of the first argument. } \details{ For example, the function \code{\link{forecast.Arima}} makes forecasts based on the results produced by \code{\link[stats]{arima}}. If \code{model=NULL},the function \code{\link{forecast.ts}} makes forecasts using \code{\link{ets}} models (if the data are non-seasonal or the seasonal period is 12 or less) or \code{\link{stlf}} (if the seasonal period is 13 or more). If \code{model} is not \code{NULL}, \code{forecast.ts} will apply the \code{model} to the \code{object} time series, and then generate forecasts accordingly. } \examples{ WWWusage \%>\% forecast \%>\% plot fit <- ets(window(WWWusage, end=60)) fc <- forecast(WWWusage, model=fit) } \seealso{ Other functions which return objects of class \code{"forecast"} are \code{\link{forecast.ets}}, \code{\link{forecast.Arima}}, \code{\link{forecast.HoltWinters}}, \code{\link{forecast.StructTS}}, \code{\link{meanf}}, \code{\link{rwf}}, \code{\link{splinef}}, \code{\link{thetaf}}, \code{\link{croston}}, \code{\link{ses}}, \code{\link{holt}}, \code{\link{hw}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/msts.Rd0000644000176200001440000000274613566112560014126 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/msts.R \name{msts} \alias{msts} \alias{print.msts} \alias{window.msts} \alias{`[.msts`} \title{Multi-Seasonal Time Series} \usage{ msts(data, seasonal.periods, ts.frequency = floor(max(seasonal.periods)), ...) } \arguments{ \item{data}{A numeric vector, ts object, matrix or data frame. It is intended that the time series data is univariate, otherwise treated the same as ts().} \item{seasonal.periods}{A vector of the seasonal periods of the msts.} \item{ts.frequency}{The seasonal period that should be used as frequency of the underlying ts object. The default value is \code{max(seasonal.periods)}.} \item{...}{Arguments to be passed to the underlying call to \code{ts()}. For example \code{start=c(1987,5)}.} } \value{ An object of class \code{c("msts", "ts")}. If there is only one seasonal period (i.e., \code{length(seasonal.periods)==1}), then the object is of class \code{"ts"}. } \description{ msts is an S3 class for multi seasonal time series objects, intended to be used for models that support multiple seasonal periods. The msts class inherits from the ts class and has an additional "msts" attribute which contains the vector of seasonal periods. All methods that work on a ts class, should also work on a msts class. } \examples{ x <- msts(taylor, seasonal.periods=c(48,336), start=2000+22/52) y <- msts(USAccDeaths, seasonal.periods=12, start=1949) } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/baggedModel.Rd0000644000176200001440000000445613566112560015332 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/baggedModel.R \name{baggedModel} \alias{baggedModel} \alias{print.baggedModel} \alias{baggedETS} \title{Forecasting using a bagged model} \usage{ baggedModel(y, bootstrapped_series = bld.mbb.bootstrap(y, 100), fn = ets, ...) baggedETS(y, bootstrapped_series = bld.mbb.bootstrap(y, 100), ...) } \arguments{ \item{y}{A numeric vector or time series of class \code{ts}.} \item{bootstrapped_series}{bootstrapped versions of y.} \item{fn}{the forecast function to use. Default is \code{\link{ets}}.} \item{\dots}{Other arguments passed to the forecast function.} } \value{ Returns an object of class "\code{baggedModel}". The function \code{print} is used to obtain and print a summary of the results. \item{models}{A list containing the fitted ensemble models.} \item{method}{The function for producing a forecastable model.} \item{y}{The original time series.} \item{bootstrapped_series}{The bootstrapped series.} \item{modelargs}{The arguments passed through to \code{fn}.} \item{fitted}{Fitted values (one-step forecasts). The mean of the fitted values is calculated over the ensemble.} \item{residuals}{Original values minus fitted values.} } \description{ The bagged model forecasting method. } \details{ This function implements the bagged model forecasting method described in Bergmeir et al. By default, the \code{\link{ets}} function is applied to all bootstrapped series. Base models other than \code{\link{ets}} can be given by the parameter \code{fn}. Using the default parameters, the function \code{\link{bld.mbb.bootstrap}} is used to calculate the bootstrapped series with the Box-Cox and Loess-based decomposition (BLD) bootstrap. The function \code{\link{forecast.baggedModel}} can then be used to calculate forecasts. \code{baggedETS} is a wrapper for \code{baggedModel}, setting \code{fn} to "ets". This function is included for backwards compatibility only, and may be deprecated in the future. } \examples{ fit <- baggedModel(WWWusage) fcast <- forecast(fit) plot(fcast) } \references{ Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging Exponential Smoothing Methods using STL Decomposition and Box-Cox Transformation. International Journal of Forecasting 32, 303-312. } \author{ Christoph Bergmeir, Fotios Petropoulos } \keyword{ts} forecast/man/forecast.nnetar.Rd0000644000176200001440000001023413566112560016223 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nnetar.R \name{forecast.nnetar} \alias{forecast.nnetar} \title{Forecasting using neural network models} \usage{ \method{forecast}{nnetar}( object, h = ifelse(object$m > 1, 2 * object$m, 10), PI = FALSE, level = c(80, 95), fan = FALSE, xreg = NULL, lambda = object$lambda, bootstrap = FALSE, npaths = 1000, innov = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{nnetar}" resulting from a call to \code{\link{nnetar}}.} \item{h}{Number of periods for forecasting. If \code{xreg} is used, \code{h} is ignored and the number of forecast periods is set to the number of rows of \code{xreg}.} \item{PI}{If TRUE, prediction intervals are produced, otherwise only point forecasts are calculated. If \code{PI} is FALSE, then \code{level}, \code{fan}, \code{bootstrap} and \code{npaths} are all ignored.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{xreg}{Future values of external regressor variables.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{bootstrap}{If \code{TRUE}, then prediction intervals computed using simulations with resampled residuals rather than normally distributed errors. Ignored if \code{innov} is not \code{NULL}.} \item{npaths}{Number of sample paths used in computing simulated prediction intervals.} \item{innov}{Values to use as innovations for prediction intervals. Must be a matrix with \code{h} rows and \code{npaths} columns (vectors are coerced into a matrix). If present, \code{bootstrap} is ignored.} \item{...}{Additional arguments passed to \code{\link{simulate.nnetar}}} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.nnetar}. An object of class "\code{forecast}" is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} \item{...}{Other arguments} } \description{ Returns forecasts and other information for univariate neural network models. } \details{ Prediction intervals are calculated through simulations and can be slow. Note that if the network is too complex and overfits the data, the residuals can be arbitrarily small; if used for prediction interval calculations, they could lead to misleadingly small values. It is possible to use out-of-sample residuals to ameliorate this, see examples. } \examples{ ## Fit & forecast model fit <- nnetar(USAccDeaths, size=2) fcast <- forecast(fit, h=20) plot(fcast) \dontrun{ ## Include prediction intervals in forecast fcast2 <- forecast(fit, h=20, PI=TRUE, npaths=100) plot(fcast2) ## Set up out-of-sample innovations using cross-validation fit_cv <- CVar(USAccDeaths, size=2) res_sd <- sd(fit_cv$residuals, na.rm=TRUE) myinnovs <- rnorm(20*100, mean=0, sd=res_sd) ## Forecast using new innovations fcast3 <- forecast(fit, h=20, PI=TRUE, npaths=100, innov=myinnovs) plot(fcast3) } } \seealso{ \code{\link{nnetar}}. } \author{ Rob J Hyndman and Gabriel Caceres } \keyword{ts} forecast/man/seasonplot.Rd0000644000176200001440000000411513566112560015317 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R, R/graph.R \name{ggseasonplot} \alias{ggseasonplot} \alias{seasonplot} \title{Seasonal plot} \usage{ ggseasonplot( x, season.labels = NULL, year.labels = FALSE, year.labels.left = FALSE, type = NULL, col = NULL, continuous = FALSE, polar = FALSE, labelgap = 0.04, ... ) seasonplot( x, s, season.labels = NULL, year.labels = FALSE, year.labels.left = FALSE, type = "o", main, xlab = NULL, ylab = "", col = 1, labelgap = 0.1, ... ) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}.} \item{season.labels}{Labels for each season in the "year"} \item{year.labels}{Logical flag indicating whether labels for each year of data should be plotted on the right.} \item{year.labels.left}{Logical flag indicating whether labels for each year of data should be plotted on the left.} \item{type}{plot type (as for \code{\link[graphics]{plot}}). Not yet supported for ggseasonplot.} \item{col}{Colour} \item{continuous}{Should the colour scheme for years be continuous or discrete?} \item{polar}{Plot the graph on seasonal coordinates} \item{labelgap}{Distance between year labels and plotted lines} \item{\dots}{additional arguments to \code{\link[graphics]{plot}}.} \item{s}{seasonal frequency of x} \item{main}{Main title.} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} } \value{ None. } \description{ Plots a seasonal plot as described in Hyndman and Athanasopoulos (2014, chapter 2). This is like a time plot except that the data are plotted against the seasons in separate years. } \examples{ ggseasonplot(AirPassengers, col=rainbow(12), year.labels=TRUE) ggseasonplot(AirPassengers, year.labels=TRUE, continuous=TRUE) seasonplot(AirPassengers, col=rainbow(12), year.labels=TRUE) } \references{ Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://OTexts.org/fpp2/} } \seealso{ \code{\link[stats]{monthplot}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/ma.Rd0000644000176200001440000000262013553673010013522 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{ma} \alias{ma} \title{Moving-average smoothing} \usage{ ma(x, order, centre = TRUE) } \arguments{ \item{x}{Univariate time series} \item{order}{Order of moving average smoother} \item{centre}{If TRUE, then the moving average is centred for even orders.} } \value{ Numerical time series object containing the simple moving average smoothed values. } \description{ \code{ma} computes a simple moving average smoother of a given time series. } \details{ The moving average smoother averages the nearest \code{order} periods of each observation. As neighbouring observations of a time series are likely to be similar in value, averaging eliminates some of the randomness in the data, leaving a smooth trend-cycle component. \deqn{\hat{T}_{t} = \frac{1}{m} \sum_{j=-k}^k y_{t+j}}{T[t]=1/m(y[t-k]+y[t-k+1]+\ldots+y[t]+\ldots+y[t+k-1]+y[t+k])} where \eqn{k=\frac{m-1}{2}}{k=(m-1)/2} When an even \code{order} is specified, the observations averaged will include one more observation from the future than the past (k is rounded up). If centre is TRUE, the value from two moving averages (where k is rounded up and down respectively) are averaged, centering the moving average. } \examples{ plot(wineind) sm <- ma(wineind,order=12) lines(sm,col="red") } \seealso{ \code{\link[stats]{decompose}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/gglagplot.Rd0000644000176200001440000000370513566112560015114 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{gglagplot} \alias{gglagplot} \alias{gglagchull} \title{Time series lag ggplots} \usage{ gglagplot( x, lags = ifelse(frequency(x) > 9, 16, 9), set.lags = 1:lags, diag = TRUE, diag.col = "gray", do.lines = TRUE, colour = TRUE, continuous = frequency(x) > 12, labels = FALSE, seasonal = TRUE, ... ) gglagchull( x, lags = ifelse(frequency(x) > 1, min(12, frequency(x)), 4), set.lags = 1:lags, diag = TRUE, diag.col = "gray", ... ) } \arguments{ \item{x}{a time series object (type \code{ts}).} \item{lags}{number of lag plots desired, see arg set.lags.} \item{set.lags}{vector of positive integers specifying which lags to use.} \item{diag}{logical indicating if the x=y diagonal should be drawn.} \item{diag.col}{color to be used for the diagonal if(diag).} \item{do.lines}{if TRUE, lines will be drawn, otherwise points will be drawn.} \item{colour}{logical indicating if lines should be coloured.} \item{continuous}{Should the colour scheme for years be continuous or discrete?} \item{labels}{logical indicating if labels should be used.} \item{seasonal}{Should the line colour be based on seasonal characteristics (TRUE), or sequential (FALSE).} \item{\dots}{Not used (for consistency with lag.plot)} } \value{ None. } \description{ Plots a lag plot using ggplot. } \details{ \dQuote{gglagplot} will plot time series against lagged versions of themselves. Helps visualising 'auto-dependence' even when auto-correlations vanish. \dQuote{gglagchull} will layer convex hulls of the lags, layered on a single plot. This helps visualise the change in 'auto-dependence' as lags increase. } \examples{ gglagplot(woolyrnq) gglagplot(woolyrnq,seasonal=FALSE) lungDeaths <- cbind(mdeaths, fdeaths) gglagplot(lungDeaths, lags=2) gglagchull(lungDeaths, lags=6) gglagchull(woolyrnq) } \seealso{ \code{\link[stats]{lag.plot}} } \author{ Mitchell O'Hara-Wild } forecast/man/Acf.Rd0000644000176200001440000000747113566112560013631 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/acf.R \name{Acf} \alias{Acf} \alias{Pacf} \alias{Ccf} \alias{taperedacf} \alias{taperedpacf} \title{(Partial) Autocorrelation and Cross-Correlation Function Estimation} \usage{ Acf( x, lag.max = NULL, type = c("correlation", "covariance", "partial"), plot = TRUE, na.action = na.contiguous, demean = TRUE, ... ) Pacf( x, lag.max = NULL, plot = TRUE, na.action = na.contiguous, demean = TRUE, ... ) Ccf( x, y, lag.max = NULL, type = c("correlation", "covariance"), plot = TRUE, na.action = na.contiguous, ... ) taperedacf( x, lag.max = NULL, type = c("correlation", "partial"), plot = TRUE, calc.ci = TRUE, level = 95, nsim = 100, ... ) taperedpacf(x, ...) } \arguments{ \item{x}{a univariate or multivariate (not Ccf) numeric time series object or a numeric vector or matrix.} \item{lag.max}{maximum lag at which to calculate the acf. Default is $10*log10(N/m)$ where $N$ is the number of observations and $m$ the number of series. Will be automatically limited to one less than the number of observations in the series.} \item{type}{character string giving the type of acf to be computed. Allowed values are \dQuote{\code{correlation}} (the default), \dQuote{\code{covariance}} or \dQuote{\code{partial}}.} \item{plot}{logical. If \code{TRUE} (the default) the resulting acf, pacf or ccf is plotted.} \item{na.action}{function to handle missing values. Default is \code{\link[stats]{na.contiguous}}. Useful alternatives are \code{\link[stats]{na.pass}} and \code{\link{na.interp}}.} \item{demean}{Should covariances be about the sample means?} \item{...}{Additional arguments passed to the plotting function.} \item{y}{a univariate numeric time series object or a numeric vector.} \item{calc.ci}{If \code{TRUE}, confidence intervals for the ACF/PACF estimates are calculated.} \item{level}{Percentage level used for the confidence intervals.} \item{nsim}{The number of bootstrap samples used in estimating the confidence intervals.} } \value{ The \code{Acf}, \code{Pacf} and \code{Ccf} functions return objects of class "acf" as described in \code{\link[stats]{acf}} from the stats package. The \code{taperedacf} and \code{taperedpacf} functions return objects of class "mpacf". } \description{ The function \code{Acf} computes (and by default plots) an estimate of the autocorrelation function of a (possibly multivariate) time series. Function \code{Pacf} computes (and by default plots) an estimate of the partial autocorrelation function of a (possibly multivariate) time series. Function \code{Ccf} computes the cross-correlation or cross-covariance of two univariate series. } \details{ The functions improve the \code{\link[stats]{acf}}, \code{\link[stats]{pacf}} and \code{\link[stats]{ccf}} functions. The main differences are that \code{Acf} does not plot a spike at lag 0 when \code{type=="correlation"} (which is redundant) and the horizontal axes show lags in time units rather than seasonal units. The tapered versions implement the ACF and PACF estimates and plots described in Hyndman (2015), based on the banded and tapered estimates of autocovariance proposed by McMurry and Politis (2010). } \examples{ Acf(wineind) Pacf(wineind) \dontrun{ taperedacf(wineind, nsim=50) taperedpacf(wineind, nsim=50) } } \references{ Hyndman, R.J. (2015). Discussion of ``High-dimensional autocovariance matrices and optimal linear prediction''. \emph{Electronic Journal of Statistics}, 9, 792-796. McMurry, T. L., & Politis, D. N. (2010). Banded and tapered estimates for autocovariance matrices and the linear process bootstrap. \emph{Journal of Time Series Analysis}, 31(6), 471-482. } \seealso{ \code{\link[stats]{acf}}, \code{\link[stats]{pacf}}, \code{\link[stats]{ccf}}, \code{\link{tsdisplay}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/seasonal.Rd0000644000176200001440000000205413553673010014733 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/components.R \name{seasonal} \alias{seasonal} \alias{trendcycle} \alias{remainder} \title{Extract components from a time series decomposition} \usage{ seasonal(object) trendcycle(object) remainder(object) } \arguments{ \item{object}{Object created by \code{\link[stats]{decompose}}, \code{\link[stats]{stl}} or \code{\link{tbats}}.} } \value{ Univariate time series. } \description{ Returns a univariate time series equal to either a seasonal component, trend-cycle component or remainder component from a time series decomposition. } \examples{ plot(USAccDeaths) fit <- stl(USAccDeaths, s.window="periodic") lines(trendcycle(fit),col="red") library(ggplot2) autoplot(cbind( Data=USAccDeaths, Seasonal=seasonal(fit), Trend=trendcycle(fit), Remainder=remainder(fit)), facets=TRUE) + ylab("") + xlab("Year") } \seealso{ \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, \code{\link{tbats}}, \code{\link{seasadj}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/CVar.Rd0000644000176200001440000000404213566112560013762 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tscv.R \name{CVar} \alias{CVar} \alias{print.CVar} \title{k-fold Cross-Validation applied to an autoregressive model} \usage{ CVar( y, k = 10, FUN = nnetar, cvtrace = FALSE, blocked = FALSE, LBlags = 24, ... ) } \arguments{ \item{y}{Univariate time series} \item{k}{Number of folds to use for cross-validation.} \item{FUN}{Function to fit an autoregressive model. Currently, it only works with the \code{\link{nnetar}} function.} \item{cvtrace}{Provide progress information.} \item{blocked}{choose folds randomly or as blocks?} \item{LBlags}{lags for the Ljung-Box test, defaults to 24, for yearly series can be set to 20} \item{...}{Other arguments are passed to \code{FUN}.} } \value{ A list containing information about the model and accuracy for each fold, plus other summary information computed across folds. } \description{ \code{CVar} computes the errors obtained by applying an autoregressive modelling function to subsets of the time series \code{y} using k-fold cross-validation as described in Bergmeir, Hyndman and Koo (2015). It also applies a Ljung-Box test to the residuals. If this test is significant (see returned pvalue), there is serial correlation in the residuals and the model can be considered to be underfitting the data. In this case, the cross-validated errors can underestimate the generalization error and should not be used. } \examples{ modelcv <- CVar(lynx, k=5, lambda=0.15) print(modelcv) print(modelcv$fold1) library(ggplot2) autoplot(lynx, series="Data") + autolayer(modelcv$testfit, series="Fits") + autolayer(modelcv$residuals, series="Residuals") ggAcf(modelcv$residuals) } \references{ Bergmeir, C., Hyndman, R.J., Koo, B. (2018) A note on the validity of cross-validation for evaluating time series prediction. \emph{Computational Statistics & Data Analysis}, \bold{120}, 70-83. \url{https://robjhyndman.com/publications/cv-time-series/}. } \seealso{ \link{CV}, \link{tsCV}. } \author{ Gabriel Caceres and Rob J Hyndman } \keyword{ts} forecast/man/plot.ets.Rd0000644000176200001440000000207513553673010014701 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ets.R, R/ggplot.R \name{plot.ets} \alias{plot.ets} \alias{autoplot.ets} \title{Plot components from ETS model} \usage{ \method{plot}{ets}(x, ...) \method{autoplot}{ets}(object, range.bars = NULL, ...) } \arguments{ \item{x}{Object of class \dQuote{ets}.} \item{...}{Other plotting parameters to affect the plot.} \item{object}{Object of class \dQuote{ets}. Used for ggplot graphics (S3 method consistency).} \item{range.bars}{Logical indicating if each plot should have a bar at its right side representing relative size. If NULL, automatic selection takes place.} } \value{ None. Function produces a plot } \description{ Produces a plot of the level, slope and seasonal components from an ETS model. } \details{ \code{autoplot} will produce an equivalent plot as a ggplot object. } \examples{ fit <- ets(USAccDeaths) plot(fit) plot(fit,plot.type="single",ylab="",col=1:3) library(ggplot2) autoplot(fit) } \seealso{ \code{\link{ets}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{hplot} forecast/man/arima.errors.Rd0000644000176200001440000000153113553673010015531 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arima.R \name{arima.errors} \alias{arima.errors} \title{Errors from a regression model with ARIMA errors} \usage{ arima.errors(object) } \arguments{ \item{object}{An object containing a time series model of class \code{Arima}.} } \value{ A \code{ts} object } \description{ Returns time series of the regression residuals from a fitted ARIMA model. } \details{ This is a deprecated function which is identical to \code{\link{residuals.Arima}(object, type="regression")} Regression residuals are equal to the original data minus the effect of any regression variables. If there are no regression variables, the errors will be identical to the original series (possibly adjusted to have zero mean). } \seealso{ \code{\link{residuals.Arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/gghistogram.Rd0000644000176200001440000000171513617634745015461 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{gghistogram} \alias{gghistogram} \title{Histogram with optional normal and kernel density functions} \usage{ gghistogram( x, add.normal = FALSE, add.kde = FALSE, add.rug = TRUE, bins, boundary = 0 ) } \arguments{ \item{x}{a numerical vector.} \item{add.normal}{Add a normal density function for comparison} \item{add.kde}{Add a kernel density estimate for comparison} \item{add.rug}{Add a rug plot on the horizontal axis} \item{bins}{The number of bins to use for the histogram. Selected by default using the Friedman-Diaconis rule given by \code{\link[grDevices]{nclass.FD}}} \item{boundary}{A boundary between two bins.} } \value{ None. } \description{ Plots a histogram and density estimates using ggplot. } \examples{ gghistogram(lynx, add.kde=TRUE) } \seealso{ \code{\link[graphics]{hist}}, \code{\link[ggplot2]{geom_histogram}} } \author{ Rob J Hyndman } forecast/man/arimaorder.Rd0000644000176200001440000000211113553673010015245 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arima.R \name{arimaorder} \alias{arimaorder} \title{Return the order of an ARIMA or ARFIMA model} \usage{ arimaorder(object) } \arguments{ \item{object}{An object of class \dQuote{\code{Arima}}, dQuote\code{ar} or \dQuote{\code{fracdiff}}. Usually the result of a call to \code{\link[stats]{arima}}, \code{\link{Arima}}, \code{\link{auto.arima}}, \code{\link[stats]{ar}}, \code{\link{arfima}} or \code{\link[fracdiff]{fracdiff}}.} } \value{ A numerical vector giving the values \eqn{p}, \eqn{d} and \eqn{q} of the ARIMA or ARFIMA model. For a seasonal ARIMA model, the returned vector contains the values \eqn{p}, \eqn{d}, \eqn{q}, \eqn{P}, \eqn{D}, \eqn{Q} and \eqn{m}, where \eqn{m} is the period of seasonality. } \description{ Returns the order of a univariate ARIMA or ARFIMA model. } \examples{ WWWusage \%>\% auto.arima \%>\% arimaorder } \seealso{ \code{\link[stats]{ar}}, \code{\link{auto.arima}}, \code{\link{Arima}}, \code{\link[stats]{arima}}, \code{\link{arfima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/tsclean.Rd0000644000176200001440000000206613553673010014562 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clean.R \name{tsclean} \alias{tsclean} \title{Identify and replace outliers and missing values in a time series} \usage{ tsclean(x, replace.missing = TRUE, lambda = NULL) } \arguments{ \item{x}{time series} \item{replace.missing}{If TRUE, it not only replaces outliers, but also interpolates missing values} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} } \value{ Time series } \description{ Uses supsmu for non-seasonal series and a robust STL decomposition for seasonal series. To estimate missing values and outlier replacements, linear interpolation is used on the (possibly seasonally adjusted) series } \examples{ cleangold <- tsclean(gold) } \seealso{ \code{\link[forecast]{na.interp}}, \code{\link[forecast]{tsoutliers}}, \code{\link[stats]{supsmu}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/autolayer.Rd0000644000176200001440000000134413571300652015133 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{autolayer} \alias{autolayer} \title{Create a ggplot layer appropriate to a particular data type} \usage{ autolayer(object, ...) } \arguments{ \item{object}{an object, whose class will determine the behaviour of autolayer} \item{...}{other arguments passed to specific methods} } \value{ a ggplot layer } \description{ \code{autolayer} uses ggplot2 to draw a particular layer for an object of a particular class in a single command. This defines the S3 generic that other classes and packages can extend. } \seealso{ \code{\link[ggplot2:autoplot]{autoplot()}}, \code{\link[ggplot2:ggplot]{ggplot()}} and \code{\link[ggplot2:fortify]{fortify()}} } forecast/man/croston.Rd0000644000176200001440000000511113553673010014612 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{croston} \alias{croston} \title{Forecasts for intermittent demand using Croston's method} \usage{ croston(y, h = 10, alpha = 0.1, x = y) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting.} \item{alpha}{Value of alpha. Default value is 0.1.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model. The first element gives the model used for non-zero demands. The second element gives the model used for times between non-zero demands. Both elements are of class \code{forecast}.} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is y minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{croston} and associated functions. } \description{ Returns forecasts and other information for Croston's forecasts applied to y. } \details{ Based on Croston's (1972) method for intermittent demand forecasting, also described in Shenstone and Hyndman (2005). Croston's method involves using simple exponential smoothing (SES) on the non-zero elements of the time series and a separate application of SES to the times between non-zero elements of the time series. The smoothing parameters of the two applications of SES are assumed to be equal and are denoted by \code{alpha}. Note that prediction intervals are not computed as Croston's method has no underlying stochastic model. } \examples{ y <- rpois(20,lambda=.3) fcast <- croston(y) plot(fcast) } \references{ Croston, J. (1972) "Forecasting and stock control for intermittent demands", \emph{Operational Research Quarterly}, \bold{23}(3), 289-303. Shenstone, L., and Hyndman, R.J. (2005) "Stochastic models underlying Croston's method for intermittent demand forecasting". \emph{Journal of Forecasting}, \bold{24}, 389-402. } \seealso{ \code{\link{ses}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/is.ets.Rd0000644000176200001440000000120613553673010014331 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/acf.R, R/arima.R, R/baggedModel.R, R/bats.R, % R/ets.R, R/modelAR.R, R/mstl.R, R/nnetar.R \name{is.acf} \alias{is.acf} \alias{is.Arima} \alias{is.baggedModel} \alias{is.bats} \alias{is.ets} \alias{is.modelAR} \alias{is.stlm} \alias{is.nnetar} \alias{is.nnetarmodels} \title{Is an object a particular model type?} \usage{ is.acf(x) is.Arima(x) is.baggedModel(x) is.bats(x) is.ets(x) is.modelAR(x) is.stlm(x) is.nnetar(x) is.nnetarmodels(x) } \arguments{ \item{x}{object to be tested} } \description{ Returns true if the model object is of a particular type } forecast/man/ndiffs.Rd0000644000176200001440000000523113566112560014401 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/unitRoot.R \name{ndiffs} \alias{ndiffs} \title{Number of differences required for a stationary series} \usage{ ndiffs( x, alpha = 0.05, test = c("kpss", "adf", "pp"), type = c("level", "trend"), max.d = 2, ... ) } \arguments{ \item{x}{A univariate time series} \item{alpha}{Level of the test, possible values range from 0.01 to 0.1.} \item{test}{Type of unit root test to use} \item{type}{Specification of the deterministic component in the regression} \item{max.d}{Maximum number of non-seasonal differences allowed} \item{...}{Additional arguments to be passed on to the unit root test} } \value{ An integer indicating the number of differences required for stationarity. } \description{ Functions to estimate the number of differences required to make a given time series stationary. \code{ndiffs} estimates the number of first differences necessary. } \details{ \code{ndiffs} uses a unit root test to determine the number of differences required for time series \code{x} to be made stationary. If \code{test="kpss"}, the KPSS test is used with the null hypothesis that \code{x} has a stationary root against a unit-root alternative. Then the test returns the least number of differences required to pass the test at the level \code{alpha}. If \code{test="adf"}, the Augmented Dickey-Fuller test is used and if \code{test="pp"} the Phillips-Perron test is used. In both of these cases, the null hypothesis is that \code{x} has a unit root against a stationary root alternative. Then the test returns the least number of differences required to fail the test at the level \code{alpha}. } \examples{ ndiffs(WWWusage) ndiffs(diff(log(AirPassengers),12)) } \references{ Dickey DA and Fuller WA (1979), "Distribution of the Estimators for Autoregressive Time Series with a Unit Root", \emph{Journal of the American Statistical Association} \bold{74}:427-431. Kwiatkowski D, Phillips PCB, Schmidt P and Shin Y (1992) "Testing the Null Hypothesis of Stationarity against the Alternative of a Unit Root", \emph{Journal of Econometrics} \bold{54}:159-178. Osborn, D.R. (1990) "A survey of seasonality in UK macroeconomic variables", \emph{International Journal of Forecasting}, \bold{6}:327-336. Phillips, P.C.B. and Perron, P. (1988) "Testing for a unit root in time series regression", \emph{Biometrika}, \bold{72}(2), 335-346. Said E and Dickey DA (1984), "Testing for Unit Roots in Autoregressive Moving Average Models of Unknown Order", \emph{Biometrika} \bold{71}:599-607. } \seealso{ \code{\link{auto.arima}} and \code{\link{ndiffs}} } \author{ Rob J Hyndman, Slava Razbash & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/tsdisplay.Rd0000644000176200001440000000465113566112560015151 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R, R/graph.R \name{ggtsdisplay} \alias{ggtsdisplay} \alias{tsdisplay} \title{Time series display} \usage{ ggtsdisplay( x, plot.type = c("partial", "histogram", "scatter", "spectrum"), points = TRUE, smooth = FALSE, lag.max, na.action = na.contiguous, theme = NULL, ... ) tsdisplay( x, plot.type = c("partial", "histogram", "scatter", "spectrum"), points = TRUE, ci.type = c("white", "ma"), lag.max, na.action = na.contiguous, main = NULL, xlab = "", ylab = "", pch = 1, cex = 0.5, ... ) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}.} \item{plot.type}{type of plot to include in lower right corner.} \item{points}{logical flag indicating whether to show the individual points or not in the time plot.} \item{smooth}{logical flag indicating whether to show a smooth loess curve superimposed on the time plot.} \item{lag.max}{the maximum lag to plot for the acf and pacf. A suitable value is selected by default if the argument is missing.} \item{na.action}{function to handle missing values in acf, pacf and spectrum calculations. The default is \code{\link[stats]{na.contiguous}}. Useful alternatives are \code{\link[stats]{na.pass}} and \code{\link{na.interp}}.} \item{theme}{Adds a ggplot element to each plot, typically a theme.} \item{\dots}{additional arguments to \code{\link[stats]{acf}}.} \item{ci.type}{type of confidence limits for ACF that is passed to \code{\link[stats]{acf}}. Should the confidence limits assume a white noise input or for lag \eqn{k} an MA(\eqn{k-1}) input?} \item{main}{Main title.} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} \item{pch}{Plotting character.} \item{cex}{Character size.} } \value{ None. } \description{ Plots a time series along with its acf and either its pacf, lagged scatterplot or spectrum. } \details{ \code{ggtsdisplay} will produce the equivalent plot using ggplot graphics. } \examples{ library(ggplot2) ggtsdisplay(USAccDeaths, plot.type="scatter", theme=theme_bw()) tsdisplay(diff(WWWusage)) ggtsdisplay(USAccDeaths, plot.type="scatter") } \references{ Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://OTexts.org/fpp2/} } \seealso{ \code{\link[stats]{plot.ts}}, \code{\link{Acf}}, \code{\link[stats]{spec.ar}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/ses.Rd0000644000176200001440000001120013566112560013713 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/HoltWintersNew.R \name{ses} \alias{ses} \alias{holt} \alias{hw} \title{Exponential smoothing forecasts} \usage{ ses( y, h = 10, level = c(80, 95), fan = FALSE, initial = c("optimal", "simple"), alpha = NULL, lambda = NULL, biasadj = FALSE, x = y, ... ) holt( y, h = 10, damped = FALSE, level = c(80, 95), fan = FALSE, initial = c("optimal", "simple"), exponential = FALSE, alpha = NULL, beta = NULL, phi = NULL, lambda = NULL, biasadj = FALSE, x = y, ... ) hw( y, h = 2 * frequency(x), seasonal = c("additive", "multiplicative"), damped = FALSE, level = c(80, 95), fan = FALSE, initial = c("optimal", "simple"), exponential = FALSE, alpha = NULL, beta = NULL, gamma = NULL, phi = NULL, lambda = NULL, biasadj = FALSE, x = y, ... ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{initial}{Method used for selecting initial state values. If \code{optimal}, the initial values are optimized along with the smoothing parameters using \code{\link{ets}}. If \code{simple}, the initial values are set to values obtained using simple calculations on the first few observations. See Hyndman & Athanasopoulos (2014) for details.} \item{alpha}{Value of smoothing parameter for the level. If \code{NULL}, it will be estimated.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{x}{Deprecated. Included for backwards compatibility.} \item{...}{Other arguments passed to \code{forecast.ets}.} \item{damped}{If TRUE, use a damped trend.} \item{exponential}{If TRUE, an exponential trend is fitted. Otherwise, the trend is (locally) linear.} \item{beta}{Value of smoothing parameter for the trend. If \code{NULL}, it will be estimated.} \item{phi}{Value of damping parameter if \code{damped=TRUE}. If \code{NULL}, it will be estimated.} \item{seasonal}{Type of seasonality in \code{hw} model. "additive" or "multiplicative"} \item{gamma}{Value of smoothing parameter for the seasonal component. If \code{NULL}, it will be estimated.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{ets} and associated functions. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for exponential smoothing forecasts applied to \code{y}. } \details{ ses, holt and hw are simply convenient wrapper functions for \code{forecast(ets(...))}. } \examples{ fcast <- holt(airmiles) plot(fcast) deaths.fcast <- hw(USAccDeaths,h=48) plot(deaths.fcast) } \references{ Hyndman, R.J., Koehler, A.B., Ord, J.K., Snyder, R.D. (2008) \emph{Forecasting with exponential smoothing: the state space approach}, Springer-Verlag: New York. \url{http://www.exponentialsmoothing.net}. Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://OTexts.org/fpp2/} } \seealso{ \code{\link{ets}}, \code{\link[stats]{HoltWinters}}, \code{\link{rwf}}, \code{\link[stats]{arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.mts.Rd0000644000176200001440000000711313566112560015541 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mforecast.R \name{forecast.mts} \alias{forecast.mts} \alias{mforecast} \alias{print.mforecast} \alias{summary.mforecast} \alias{as.data.frame.mforecast} \title{Forecasting time series} \usage{ \method{forecast}{mts}( object, h = ifelse(frequency(object) > 1, 2 * frequency(object), 10), level = c(80, 95), fan = FALSE, robust = FALSE, lambda = NULL, biasadj = FALSE, find.frequency = FALSE, allow.multiplicative.trend = FALSE, ... ) } \arguments{ \item{object}{a multivariate time series or multivariate time series model for which forecasts are required} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, \code{level} is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{robust}{If TRUE, the function is robust to missing values and outliers in \code{object}. This argument is only valid when \code{object} is of class \code{mts}.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{find.frequency}{If TRUE, the function determines the appropriate period, if the data is of unknown period.} \item{allow.multiplicative.trend}{If TRUE, then ETS models with multiplicative trends are allowed. Otherwise, only additive or no trend ETS models are permitted.} \item{...}{Additional arguments affecting the forecasts produced.} } \value{ An object of class "\code{mforecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the multivariate forecasts and prediction intervals. The generic accessors functions \code{fitted.values} and \code{residuals} extract various useful features of the value returned by \code{forecast$model}. An object of class \code{"mforecast"} is a list usually containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. For models with additive errors, the residuals will be x minus the fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ \code{mforecast} is a class of objects for forecasting from multivariate time series or multivariate time series models. The function invokes particular \emph{methods} which depend on the class of the first argument. } \details{ For example, the function \code{\link{forecast.mlm}} makes multivariate forecasts based on the results produced by \code{\link{tslm}}. } \seealso{ Other functions which return objects of class \code{"mforecast"} are \code{\link{forecast.mlm}}, \code{forecast.varest}. } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } forecast/man/modelAR.Rd0000644000176200001440000000733213566112560014457 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/modelAR.R \name{modelAR} \alias{modelAR} \alias{print.modelAR} \title{Time Series Forecasts with a user-defined model} \usage{ modelAR( y, p, P = 1, FUN, predict.FUN, xreg = NULL, lambda = NULL, model = NULL, subset = NULL, scale.inputs = FALSE, x = y, ... ) } \arguments{ \item{y}{A numeric vector or time series of class \code{ts}.} \item{p}{Embedding dimension for non-seasonal time series. Number of non-seasonal lags used as inputs. For non-seasonal time series, the default is the optimal number of lags (according to the AIC) for a linear AR(p) model. For seasonal time series, the same method is used but applied to seasonally adjusted data (from an stl decomposition).} \item{P}{Number of seasonal lags used as inputs.} \item{FUN}{Function used for model fitting. Must accept argument \code{x} and \code{y} for the predictors and response, respectively (\code{formula} object not currently supported).} \item{predict.FUN}{Prediction function used to apply \code{FUN} to new data. Must accept an object of class \code{FUN} as its first argument, and a data frame or matrix of new data for its second argument. Additionally, it should return fitted values when new data is omitted.} \item{xreg}{Optionally, a vector or matrix of external regressors, which must have the same number of rows as \code{y}. Must be numeric.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{model}{Output from a previous call to \code{nnetar}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{subset}{Optional vector specifying a subset of observations to be used in the fit. Can be an integer index vector or a logical vector the same length as \code{y}. All observations are used by default.} \item{scale.inputs}{If TRUE, inputs are scaled by subtracting the column means and dividing by their respective standard deviations. If \code{lambda} is not \code{NULL}, scaling is applied after Box-Cox transformation.} \item{x}{Deprecated. Included for backwards compatibility.} \item{\dots}{Other arguments passed to \code{FUN} for \code{modelAR}.} } \value{ Returns an object of class "\code{modelAR}". The function \code{summary} is used to obtain and print a summary of the results. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{nnetar}. \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{x}{The original time series.} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} \item{...}{Other arguments} } \description{ Experimental function to forecast univariate time series with a user-defined model } \details{ This is an experimental function and only recommended for advanced users. The selected model is fitted with lagged values of \code{y} as inputs. The inputs are for lags 1 to \code{p}, and lags \code{m} to \code{mP} where \code{m=frequency(y)}. If \code{xreg} is provided, its columns are also used as inputs. If there are missing values in \code{y} or \code{xreg}, the corresponding rows (and any others which depend on them as lags) are omitted from the fit. The model is trained for one-step forecasting. Multi-step forecasts are computed recursively. } \author{ Rob J Hyndman and Gabriel Caceres } \keyword{ts} forecast/man/taylor.Rd0000644000176200001440000000124713553673010014443 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{taylor} \alias{taylor} \title{Half-hourly electricity demand} \format{Time series data} \source{ James W Taylor } \usage{ taylor } \description{ Half-hourly electricity demand in England and Wales from Monday 5 June 2000 to Sunday 27 August 2000. Discussed in Taylor (2003), and kindly provided by James W Taylor. Units: Megawatts } \examples{ plot(taylor) } \references{ Taylor, J.W. (2003) Short-term electricity demand forecasting using double seasonal exponential smoothing. \emph{Journal of the Operational Research Society}, \bold{54}, 799-805. } \keyword{datasets} forecast/man/seasonaldummy.Rd0000644000176200001440000000313713553673010016012 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{seasonaldummy} \alias{seasonaldummy} \alias{seasonaldummyf} \title{Seasonal dummy variables} \usage{ seasonaldummy(x, h = NULL) seasonaldummyf(x, h) } \arguments{ \item{x}{Seasonal time series: a \code{ts} or a \code{msts} object} \item{h}{Number of periods ahead to forecast (optional)} } \value{ Numerical matrix. } \description{ \code{seasonaldummy} returns a matrix of dummy variables suitable for use in \code{\link{Arima}}, \code{\link{auto.arima}} or \code{\link{tslm}}. The last season is omitted and used as the control. } \details{ \code{seasonaldummyf} is deprecated, instead use the \code{h} argument in \code{seasonaldummy}. The number of dummy variables is determined from the time series characteristics of \code{x}. When \code{h} is missing, the length of \code{x} also determines the number of rows for the matrix returned by \code{seasonaldummy}. the value of \code{h} determines the number of rows for the matrix returned by \code{seasonaldummy}, typically used for forecasting. The values within \code{x} are not used. } \examples{ plot(ldeaths) # Using seasonal dummy variables month <- seasonaldummy(ldeaths) deaths.lm <- tslm(ldeaths ~ month) tsdisplay(residuals(deaths.lm)) ldeaths.fcast <- forecast(deaths.lm, data.frame(month=I(seasonaldummy(ldeaths,36)))) plot(ldeaths.fcast) # A simpler approach to seasonal dummy variables deaths.lm <- tslm(ldeaths ~ season) ldeaths.fcast <- forecast(deaths.lm, h=36) plot(ldeaths.fcast) } \seealso{ \code{\link{fourier}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.modelAR.Rd0000644000176200001440000000705213566112560016263 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/modelAR.R \name{forecast.modelAR} \alias{forecast.modelAR} \title{Forecasting using user-defined model} \usage{ \method{forecast}{modelAR}( object, h = ifelse(object$m > 1, 2 * object$m, 10), PI = FALSE, level = c(80, 95), fan = FALSE, xreg = NULL, lambda = object$lambda, bootstrap = FALSE, npaths = 1000, innov = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{modelAR}" resulting from a call to \code{\link{modelAR}}.} \item{h}{Number of periods for forecasting. If \code{xreg} is used, \code{h} is ignored and the number of forecast periods is set to the number of rows of \code{xreg}.} \item{PI}{If TRUE, prediction intervals are produced, otherwise only point forecasts are calculated. If \code{PI} is FALSE, then \code{level}, \code{fan}, \code{bootstrap} and \code{npaths} are all ignored.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{xreg}{Future values of external regressor variables.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{bootstrap}{If \code{TRUE}, then prediction intervals computed using simulations with resampled residuals rather than normally distributed errors. Ignored if \code{innov} is not \code{NULL}.} \item{npaths}{Number of sample paths used in computing simulated prediction intervals.} \item{innov}{Values to use as innovations for prediction intervals. Must be a matrix with \code{h} rows and \code{npaths} columns (vectors are coerced into a matrix). If present, \code{bootstrap} is ignored.} \item{...}{Additional arguments passed to \code{\link{simulate.nnetar}}} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.nnetar}. An object of class "\code{forecast}" is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} \item{...}{Other arguments} } \description{ Returns forecasts and other information for user-defined models. } \details{ Prediction intervals are calculated through simulations and can be slow. Note that if the model is too complex and overfits the data, the residuals can be arbitrarily small; if used for prediction interval calculations, they could lead to misleadingly small values. } \seealso{ \code{\link{nnetar}}. } \author{ Rob J Hyndman and Gabriel Caceres } \keyword{ts} forecast/man/accuracy.Rd0000644000176200001440000000633613617634745014744 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/errors.R \name{accuracy} \alias{accuracy} \alias{accuracy.default} \title{Accuracy measures for a forecast model} \usage{ accuracy(f, ...) \method{accuracy}{default}(f, x, test = NULL, d = NULL, D = NULL, ...) } \arguments{ \item{f}{An object of class \dQuote{\code{forecast}}, or a numerical vector containing forecasts. It will also work with \code{Arima}, \code{ets} and \code{lm} objects if \code{x} is omitted -- in which case training set accuracy measures are returned.} \item{...}{Additional arguments depending on the specific method.} \item{x}{An optional numerical vector containing actual values of the same length as object, or a time series overlapping with the times of \code{f}.} \item{test}{Indicator of which elements of \code{x} and \code{f} to test. If \code{test} is \code{NULL}, all elements are used. Otherwise test is a numeric vector containing the indices of the elements to use in the test.} \item{d}{An integer indicating the number of lag-1 differences to be used for the denominator in MASE calculation. Default value is 1 for non-seasonal series and 0 for seasonal series.} \item{D}{An integer indicating the number of seasonal differences to be used for the denominator in MASE calculation. Default value is 0 for non-seasonal series and 1 for seasonal series.} } \value{ Matrix giving forecast accuracy measures. } \description{ Returns range of summary measures of the forecast accuracy. If \code{x} is provided, the function measures test set forecast accuracy based on \code{x-f}. If \code{x} is not provided, the function only produces training set accuracy measures of the forecasts based on \code{f["x"]-fitted(f)}. All measures are defined and discussed in Hyndman and Koehler (2006). } \details{ The measures calculated are: \itemize{ \item ME: Mean Error \item RMSE: Root Mean Squared Error \item MAE: Mean Absolute Error \item MPE: Mean Percentage Error \item MAPE: Mean Absolute Percentage Error \item MASE: Mean Absolute Scaled Error \item ACF1: Autocorrelation of errors at lag 1. } By default, the MASE calculation is scaled using MAE of training set naive forecasts for non-seasonal time series, training set seasonal naive forecasts for seasonal time series and training set mean forecasts for non-time series data. If \code{f} is a numerical vector rather than a \code{forecast} object, the MASE will not be returned as the training data will not be available. See Hyndman and Koehler (2006) and Hyndman and Athanasopoulos (2014, Section 2.5) for further details. } \examples{ fit1 <- rwf(EuStockMarkets[1:200,1],h=100) fit2 <- meanf(EuStockMarkets[1:200,1],h=100) accuracy(fit1) accuracy(fit2) accuracy(fit1,EuStockMarkets[201:300,1]) accuracy(fit2,EuStockMarkets[201:300,1]) plot(fit1) lines(EuStockMarkets[1:300,1]) } \references{ Hyndman, R.J. and Koehler, A.B. (2006) "Another look at measures of forecast accuracy". \emph{International Journal of Forecasting}, \bold{22}(4), 679-688. Hyndman, R.J. and Athanasopoulos, G. (2018) "Forecasting: principles and practice", 2nd ed., OTexts, Melbourne, Australia. Section 3.4 "Evaluating forecast accuracy". \url{https://otexts.org/fpp2/accuracy.html}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/figures/0000755000176200001440000000000013553673010014302 5ustar liggesusersforecast/man/figures/logo.png0000644000176200001440000001365413553673010015761 0ustar liggesusersPNG  IHDRxb]egAMA a cHRMz&u0`:pQ<bKGDIDATx]WgOПR[jUZrkoϹ'X.R\QYDvYCBl B"{?O&<2|΁df|2))dSu*5cTJ|*5SR3R33t}If2LfV$gliR3CT:[X>IOd|06CnI[ؑۆ/x>-WxnWc۩:mm'9ľL[~yK&|uK۳}4D̶$M{i]C9vuHK" yD_N2Yrl3vށ?+ێ@Cf]'Z@)>d*Jg7M];YJ9VEi:Fl:ZDiDyQVgW(Ŷ RgLiQsvRuH[U3b˱jՖ$tVO|ӂ;^0`#>'~[ro1RO6`gyI-XwmK*)/n;~%ԓ X:JOwL[{><Ƨu_-I-H3'U 3Uזm[-YHݜp8ZnCޅR {dB7XO.t`濾WWPDa$~(%K-6M rMhBu$K-I{@)j'\FšFՇ) LFޅ-{ҶV-[?W%<,4vOr LcCzS[RYyQs;Ӳ6816~_>u &lJI.Z:K[q[O7 e˝@QE֣~/m ƹAkcߖm㽶yq GZQ=6|8tδbMF ʞm<Ɵ7.;|q3F(,n\U#xYuxmڒJX{.\Ł~994ydIrXjƟroQmI%mUΩ&l5.{\~4Ƒ횐%ã`=nN1;LmmɕoQ^S5G\?E_^HepSˢ4{&;:KUXNT;?71=QH FD]Wu^؂Xondž:dYqЕ=Sd:x5c ]6c۹ž>0:'.=-v0 \3enL8[yϯɎ|]dQW+ڟ%YC|䅊]sf+$r>T>p8`{Ygko5h4QB]ez;vTF%eIޯq] ۜxx>Z=_$cBawLh$a56#]q m \}6|tr\;*Aievޯ V V/ẃ]tE7́6j{'hbpbgp?>!\m'3~eV܂fG,,9l7ؚ͓c@j`&9PAՋI9ppṔKn F&sO^ -,5`k@i?J .o8,Zl>b|np*CnaSܺ^/sK0K3E!W[$\'ПVhu-8Σ~/<3}h*ѧw=<)KɎ Ar-c3Džox>Y/i%[8Zn kQ\ou㞞Cȴ$vNzCIASsw|l:=oͱG(!ÿdQ [11jVn꜂OB}HTŃ2[RKց) {Ǟ`6W~;[g}%}x؃Fg/F1yJ&pՁ#嶐8RnõVuNpdaƘ96~}^ԋhܗŨ׏~YvlG 47oc^? h5=}1kk.7Z5= N3WgRpMϡqM ȹ߇Q?6Q45o:ݎŮ?,|ACÞ0>&}}g>EcZ78Pjp l[`Sg,^3臦`>oE+P7gI5=\;\z491N>BA]g`ɇPP 'f]9di)SyT>0<@$ , wL(hcOq/-v9$ Yf")zbG.V/*,H1('۱r/CۂMrՇ1gv\}B5ƞ^4g;mS*"C0KcM3CdP=pMϡ I<| 4r[6qb^|߉uq 6Q30Ƙ4=:0S#8K;+ DoJ_׺᚞ pڮ cˉ {Xf{.6DWWjENQ EDM$ ۜꞤ")ֺb%D]$LOQaBAf{wMϋ>/su}^]An {VXr:@,t\ 4%Ilz~N=Bv{^K%] J.?PM\wZhv,x>1z,elF2+( S-c@e4*""š.4q8M=HWzse7RR+FT,VA?4-Z$%z5O3G\vQf`iOJ;'Pw:*]7*96CcJ{ CPLCcϦldv2L0 rQ^h&8&8K;zI7Œ[7uΠRr&X\iH.;= \ NTGpQq8<-l@(5Q|`$wLTYfð9kǴn^ngճSPqAr+yc"4bڪE(lsGp w;\T Fb{|ogBp!@)T+޵F8c_ E'BAH{-z 'dL.n[3T'K;2i` "WԈ|n*IK &Tz"8 dM;WpbUHR#z+]X^.t fDQzvjAvb-c3nXjDZ^X[ '!$#YP硺tԣozC>'UVj!Nj"gYe-9ul`K)Xͯ )6 -WQXɎ*#8ud!ׯis6jV'*~4( jcz Tty_?(Ճ %>f$jcmDgiRQJ*OK5CwjAeֈ_ tNllէiw 2, rHYPtSvB֫H5lSm㸬EzӴDg&8U<*4;$#vWDW(آ!!Sb 'LI"eB)HY"5 tN,[P է^ArբH['& "c5BOgDzi<*hI)ʘbAOW؅bIX3eFziFL*,0=y ӓL,4+-MvW!A@*  mx# N= Y=G2+G*,ye#,F`{#8Z労 %9cMiRnX'Xy w3=" =KL'8K;t~b"HȅJn6lEvi,sw6f$`]cZ9r[2K{ ]cX&tiyf($wY (- Dgqߓn2ʭQ$91` ?w.ѻ,vf/5:M^ع7g(m$CK:i$I Xr XO@^"ֆB%f!- Pd7$wVha`E{l< &cBq%A0r)yCr7J)bdДfAPf EQK`1ҔLN6,=K!h XȨ_x lB&'\ Ar y!-#X)`2 UM,9ISJ$佊Ş+c%U ?1r ksʖF~>Ns y?gŔ`~e,U"B )-yj! XgH3sLz6%;Y{8rhߢ6b R*7AI"%D<8ra#?rzPU/Ӹ~j1nRFU@6h(fDt v!gLahC}ϙOpZY!YA?ފJ{]Gn4cƢqhuqRqIF6(t y]qPʞ"e]fʞ" وSVjIR3Xk~MNj`/z+cJJJJJͬSu4&'yx+VSoSJͤ/|)yUxkrcՇ,*5RئR3/$[D)eAVgPR3+Rh7Yp>ӤUI(tǚ4̺P}g* z}=fDjSoiI}59RMW$u6 _Z8>9R$ME/D{Sy>쪌{:-%֛Jl׾/O곀RcJdF+$:SNٲr5GiӞ+ҞFW+YVo/ }^Vg}*5Tr.{R,BgH2iҪDjK*(/ŕΆ ߖ\VgZgkKK@ʞi^#K@_Z%ٖTX^\\֖Tۖܔ%hm$:3ڒKP}^ڟhӞڄL{-d*e0yɵmG-I@B+zCAy1qϲiUTHn12 Wo=]/iKf'uvs%bϵ˭̴L%tEXtdate:create2018-03-12T05:44:18+00:00;2?T%tEXtdate:modify2018-03-12T05:44:18+00:00JotEXtSoftwareAdobe ImageReadyqe<IENDB`forecast/man/forecast.ets.Rd0000644000176200001440000000652413566112560015536 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/etsforecast.R \name{forecast.ets} \alias{forecast.ets} \title{Forecasting using ETS models} \usage{ \method{forecast}{ets}( object, h = ifelse(object$m > 1, 2 * object$m, 10), level = c(80, 95), fan = FALSE, simulate = FALSE, bootstrap = FALSE, npaths = 5000, PI = TRUE, lambda = object$lambda, biasadj = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{ets}". Usually the result of a call to \code{\link{ets}}.} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{simulate}{If TRUE, prediction intervals are produced by simulation rather than using analytic formulae. Errors are assumed to be normally distributed.} \item{bootstrap}{If TRUE, then prediction intervals are produced by simulation using resampled errors (rather than normally distributed errors).} \item{npaths}{Number of sample paths used in computing simulated prediction intervals.} \item{PI}{If TRUE, prediction intervals are produced, otherwise only point forecasts are calculated. If \code{PI} is FALSE, then \code{level}, \code{fan}, \code{simulate}, \code{bootstrap} and \code{npaths} are all ignored.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.ets}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. For models with additive errors, the residuals are x - fitted values. For models with multiplicative errors, the residuals are equal to x /(fitted values) - 1.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for univariate ETS models. } \examples{ fit <- ets(USAccDeaths) plot(forecast(fit,h=48)) } \seealso{ \code{\link{ets}}, \code{\link{ses}}, \code{\link{holt}}, \code{\link{hw}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/CV.Rd0000644000176200001440000000135513553673010013441 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/lm.R \name{CV} \alias{CV} \title{Cross-validation statistic} \usage{ CV(obj) } \arguments{ \item{obj}{output from \code{\link[stats]{lm}} or \code{\link{tslm}}} } \value{ Numerical vector containing CV, AIC, AICc, BIC and AdjR2 values. } \description{ Computes the leave-one-out cross-validation statistic (also known as PRESS -- prediction residual sum of squares), AIC, corrected AIC, BIC and adjusted R^2 values for a linear model. } \examples{ y <- ts(rnorm(120,0,3) + 20*sin(2*pi*(1:120)/12), frequency=12) fit1 <- tslm(y ~ trend + season) fit2 <- tslm(y ~ season) CV(fit1) CV(fit2) } \seealso{ \code{\link[stats]{AIC}} } \author{ Rob J Hyndman } \keyword{models} forecast/man/forecast.baggedModel.Rd0000644000176200001440000000456613566112560017141 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/baggedModel.R \name{forecast.baggedModel} \alias{forecast.baggedModel} \title{Forecasting using a bagged model} \usage{ \method{forecast}{baggedModel}( object, h = ifelse(frequency(object$y) > 1, 2 * frequency(object$y), 10), ... ) } \arguments{ \item{object}{An object of class "\code{baggedModel}" resulting from a call to \code{\link{baggedModel}}.} \item{h}{Number of periods for forecasting.} \item{...}{Other arguments, passed on to the \code{\link{forecast}} function of the original method} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. An object of class "\code{forecast}" is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for bagged models. } \details{ Intervals are calculated as min and max values over the point forecasts from the models in the ensemble. I.e., the intervals are not prediction intervals, but give an indication of how different the forecasts within the ensemble are. } \examples{ fit <- baggedModel(WWWusage) fcast <- forecast(fit) plot(fcast) \dontrun{ fit2 <- baggedModel(WWWusage, fn="auto.arima") fcast2 <- forecast(fit2) plot(fcast2) accuracy(fcast2)} } \references{ Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging Exponential Smoothing Methods using STL Decomposition and Box-Cox Transformation. International Journal of Forecasting 32, 303-312. } \seealso{ \code{\link{baggedModel}}. } \author{ Christoph Bergmeir, Fotios Petropoulos } \keyword{ts} forecast/man/reexports.Rd0000644000176200001440000000074513553673010015166 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast-package.R, R/ggplot.R \docType{import} \name{reexports} \alias{reexports} \alias{\%>\%} \alias{autoplot} \title{Objects exported from other packages} \keyword{internal} \description{ These objects are imported from other packages. Follow the links below to see their documentation. \describe{ \item{ggplot2}{\code{\link[ggplot2]{autoplot}}} \item{magrittr}{\code{\link[magrittr]{\%>\%}}} }} forecast/man/findfrequency.Rd0000644000176200001440000000224513553673010015772 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast.R \name{findfrequency} \alias{findfrequency} \title{Find dominant frequency of a time series} \usage{ findfrequency(x) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}} } \value{ an integer value } \description{ \code{findfrequency} returns the period of the dominant frequency of a time series. For seasonal data, it will return the seasonal period. For cyclic data, it will return the average cycle length. } \details{ The dominant frequency is determined from a spectral analysis of the time series. First, a linear trend is removed, then the spectral density function is estimated from the best fitting autoregressive model (based on the AIC). If there is a large (possibly local) maximum in the spectral density function at frequency \eqn{f}, then the function will return the period \eqn{1/f} (rounded to the nearest integer). If no such dominant frequency can be found, the function will return 1. } \examples{ findfrequency(USAccDeaths) # Monthly data findfrequency(taylor) # Half-hourly data findfrequency(lynx) # Annual data } \author{ Rob J Hyndman } \keyword{ts} forecast/man/nnetar.Rd0000644000176200001440000001073513566112560014424 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nnetar.R \name{nnetar} \alias{nnetar} \alias{print.nnetar} \alias{print.nnetarmodels} \title{Neural Network Time Series Forecasts} \usage{ nnetar( y, p, P = 1, size, repeats = 20, xreg = NULL, lambda = NULL, model = NULL, subset = NULL, scale.inputs = TRUE, x = y, ... ) } \arguments{ \item{y}{A numeric vector or time series of class \code{ts}.} \item{p}{Embedding dimension for non-seasonal time series. Number of non-seasonal lags used as inputs. For non-seasonal time series, the default is the optimal number of lags (according to the AIC) for a linear AR(p) model. For seasonal time series, the same method is used but applied to seasonally adjusted data (from an stl decomposition).} \item{P}{Number of seasonal lags used as inputs.} \item{size}{Number of nodes in the hidden layer. Default is half of the number of input nodes (including external regressors, if given) plus 1.} \item{repeats}{Number of networks to fit with different random starting weights. These are then averaged when producing forecasts.} \item{xreg}{Optionally, a vector or matrix of external regressors, which must have the same number of rows as \code{y}. Must be numeric.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{model}{Output from a previous call to \code{nnetar}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{subset}{Optional vector specifying a subset of observations to be used in the fit. Can be an integer index vector or a logical vector the same length as \code{y}. All observations are used by default.} \item{scale.inputs}{If TRUE, inputs are scaled by subtracting the column means and dividing by their respective standard deviations. If \code{lambda} is not \code{NULL}, scaling is applied after Box-Cox transformation.} \item{x}{Deprecated. Included for backwards compatibility.} \item{\dots}{Other arguments passed to \code{\link[nnet]{nnet}} for \code{nnetar}.} } \value{ Returns an object of class "\code{nnetar}". The function \code{summary} is used to obtain and print a summary of the results. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{nnetar}. \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{x}{The original time series.} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} \item{...}{Other arguments} } \description{ Feed-forward neural networks with a single hidden layer and lagged inputs for forecasting univariate time series. } \details{ A feed-forward neural network is fitted with lagged values of \code{y} as inputs and a single hidden layer with \code{size} nodes. The inputs are for lags 1 to \code{p}, and lags \code{m} to \code{mP} where \code{m=frequency(y)}. If \code{xreg} is provided, its columns are also used as inputs. If there are missing values in \code{y} or \code{xreg}, the corresponding rows (and any others which depend on them as lags) are omitted from the fit. A total of \code{repeats} networks are fitted, each with random starting weights. These are then averaged when computing forecasts. The network is trained for one-step forecasting. Multi-step forecasts are computed recursively. For non-seasonal data, the fitted model is denoted as an NNAR(p,k) model, where k is the number of hidden nodes. This is analogous to an AR(p) model but with nonlinear functions. For seasonal data, the fitted model is called an NNAR(p,P,k)[m] model, which is analogous to an ARIMA(p,0,0)(P,0,0)[m] model but with nonlinear functions. } \examples{ fit <- nnetar(lynx) fcast <- forecast(fit) plot(fcast) ## Arguments can be passed to nnet() fit <- nnetar(lynx, decay=0.5, maxit=150) plot(forecast(fit)) lines(lynx) ## Fit model to first 100 years of lynx data fit <- nnetar(window(lynx,end=1920), decay=0.5, maxit=150) plot(forecast(fit,h=14)) lines(lynx) ## Apply fitted model to later data, including all optional arguments fit2 <- nnetar(window(lynx,start=1921), model=fit) } \author{ Rob J Hyndman and Gabriel Caceres } \keyword{ts} forecast/man/autoplot.acf.Rd0000644000176200001440000000553613571300652015534 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{autoplot.acf} \alias{autoplot.acf} \alias{ggAcf} \alias{ggPacf} \alias{ggCcf} \alias{autoplot.mpacf} \alias{ggtaperedacf} \alias{ggtaperedpacf} \title{ggplot (Partial) Autocorrelation and Cross-Correlation Function Estimation and Plotting} \usage{ \method{autoplot}{acf}(object, ci = 0.95, ...) ggAcf( x, lag.max = NULL, type = c("correlation", "covariance", "partial"), plot = TRUE, na.action = na.contiguous, demean = TRUE, ... ) ggPacf( x, lag.max = NULL, plot = TRUE, na.action = na.contiguous, demean = TRUE, ... ) ggCcf( x, y, lag.max = NULL, type = c("correlation", "covariance"), plot = TRUE, na.action = na.contiguous, ... ) \method{autoplot}{mpacf}(object, ...) ggtaperedacf( x, lag.max = NULL, type = c("correlation", "partial"), plot = TRUE, calc.ci = TRUE, level = 95, nsim = 100, ... ) ggtaperedpacf(x, ...) } \arguments{ \item{object}{Object of class \dQuote{\code{acf}}.} \item{ci}{coverage probability for confidence interval. Plotting of the confidence interval is suppressed if ci is zero or negative.} \item{...}{Other plotting parameters to affect the plot.} \item{x}{a univariate or multivariate (not Ccf) numeric time series object or a numeric vector or matrix.} \item{lag.max}{maximum lag at which to calculate the acf.} \item{type}{character string giving the type of acf to be computed. Allowed values are "\code{correlation}" (the default), \dQuote{\code{covariance}} or \dQuote{\code{partial}}.} \item{plot}{logical. If \code{TRUE} (the default) the resulting ACF, PACF or CCF is plotted.} \item{na.action}{function to handle missing values. Default is \code{\link[stats]{na.contiguous}}. Useful alternatives are \code{\link[stats]{na.pass}} and \code{\link{na.interp}}.} \item{demean}{Should covariances be about the sample means?} \item{y}{a univariate numeric time series object or a numeric vector.} \item{calc.ci}{If \code{TRUE}, confidence intervals for the ACF/PACF estimates are calculated.} \item{level}{Percentage level used for the confidence intervals.} \item{nsim}{The number of bootstrap samples used in estimating the confidence intervals.} } \value{ A ggplot object. } \description{ Produces a ggplot object of their equivalent Acf, Pacf, Ccf, taperedacf and taperedpacf functions. } \details{ If \code{autoplot} is given an \code{acf} or \code{mpacf} object, then an appropriate ggplot object will be created. ggtaperedpacf } \examples{ library(ggplot2) ggAcf(wineind) wineind \%>\% Acf(plot=FALSE) \%>\% autoplot \dontrun{ wineind \%>\% taperedacf(plot=FALSE) \%>\% autoplot ggtaperedacf(wineind) ggtaperedpacf(wineind)} ggCcf(mdeaths, fdeaths) } \seealso{ \code{\link[stats]{plot.acf}}, \code{\link{Acf}}, \code{\link[stats]{acf}}, \code{\link{taperedacf}} } \author{ Mitchell O'Hara-Wild } forecast/man/is.constant.Rd0000644000176200001440000000046013553673010015370 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/newarima2.R \name{is.constant} \alias{is.constant} \title{Is an object constant?} \usage{ is.constant(x) } \arguments{ \item{x}{object to be tested} } \description{ Returns true if the object's numerical values do not vary. } forecast/man/naive.Rd0000644000176200001440000000777213566112560014246 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/naive.R \name{rwf} \alias{rwf} \alias{naive} \alias{print.naive} \alias{snaive} \title{Naive and Random Walk Forecasts} \usage{ rwf( y, h = 10, drift = FALSE, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, ..., x = y ) naive( y, h = 10, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, ..., x = y ) snaive( y, h = 2 * frequency(x), level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, ..., x = y ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting} \item{drift}{Logical flag. If TRUE, fits a random walk with drift model.} \item{level}{Confidence levels for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Additional arguments affecting the forecasts produced. If \code{model=NULL}, \code{forecast.ts} passes these to \code{\link{ets}} or \code{\link{stlf}} depending on the frequency of the time series. If \code{model} is not \code{NULL}, the arguments are passed to the relevant modelling function.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{naive} or \code{snaive}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ \code{rwf()} returns forecasts and prediction intervals for a random walk with drift model applied to \code{y}. This is equivalent to an ARIMA(0,1,0) model with an optional drift coefficient. \code{naive()} is simply a wrapper to \code{rwf()} for simplicity. \code{snaive()} returns forecasts and prediction intervals from an ARIMA(0,0,0)(0,1,0)m model where m is the seasonal period. } \details{ The random walk with drift model is \deqn{Y_t=c + Y_{t-1} + Z_t}{Y[t]=c + Y[t-1] + Z[t]} where \eqn{Z_t}{Z[t]} is a normal iid error. Forecasts are given by \deqn{Y_n(h)=ch+Y_n}{Y[n+h]=ch+Y[n]}. If there is no drift (as in \code{naive}), the drift parameter c=0. Forecast standard errors allow for uncertainty in estimating the drift parameter (unlike the corresponding forecasts obtained by fitting an ARIMA model directly). The seasonal naive model is \deqn{Y_t= Y_{t-m} + Z_t}{Y[t]=Y[t-m] + Z[t]} where \eqn{Z_t}{Z[t]} is a normal iid error. } \examples{ gold.fcast <- rwf(gold[1:60], h=50) plot(gold.fcast) plot(naive(gold,h=50),include=200) plot(snaive(wineind)) } \seealso{ \code{\link{Arima}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/monthdays.Rd0000644000176200001440000000134613553673010015137 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{monthdays} \alias{monthdays} \title{Number of days in each season} \usage{ monthdays(x) } \arguments{ \item{x}{time series} } \value{ Time series } \description{ Returns number of days in each month or quarter of the observed time period. } \details{ Useful for month length adjustments } \examples{ par(mfrow=c(2,1)) plot(ldeaths,xlab="Year",ylab="pounds", main="Monthly deaths from lung disease (UK)") ldeaths.adj <- ldeaths/monthdays(ldeaths)*365.25/12 plot(ldeaths.adj,xlab="Year",ylab="pounds", main="Adjusted monthly deaths from lung disease (UK)") } \seealso{ \code{\link[forecast]{bizdays}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/gas.Rd0000644000176200001440000000060213553673010013675 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{gas} \alias{gas} \title{Australian monthly gas production} \format{Time series data} \source{ Australian Bureau of Statistics. } \usage{ gas } \description{ Australian monthly gas production: 1956--1995. } \examples{ plot(gas) seasonplot(gas) tsdisplay(gas) } \keyword{datasets} forecast/man/tslm.Rd0000644000176200001440000000433113553673010014105 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/lm.R \name{tslm} \alias{tslm} \title{Fit a linear model with time series components} \usage{ tslm(formula, data, subset, lambda = NULL, biasadj = FALSE, ...) } \arguments{ \item{formula}{an object of class "formula" (or one that can be coerced to that class): a symbolic description of the model to be fitted.} \item{data}{an optional data frame, list or environment (or object coercible by as.data.frame to a data frame) containing the variables in the model. If not found in data, the variables are taken from environment(formula), typically the environment from which lm is called.} \item{subset}{an optional subset containing rows of data to keep. For best results, pass a logical vector of rows to keep. Also supports \code{\link[base]{subset}()} functions.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments passed to \code{\link[stats]{lm}()}} } \value{ Returns an object of class "lm". } \description{ \code{tslm} is used to fit linear models to time series including trend and seasonality components. } \details{ \code{tslm} is largely a wrapper for \code{\link[stats]{lm}()} except that it allows variables "trend" and "season" which are created on the fly from the time series characteristics of the data. The variable "trend" is a simple time trend and "season" is a factor indicating the season (e.g., the month or the quarter depending on the frequency of the data). } \examples{ y <- ts(rnorm(120,0,3) + 1:120 + 20*sin(2*pi*(1:120)/12), frequency=12) fit <- tslm(y ~ trend + season) plot(forecast(fit, h=20)) } \seealso{ \code{\link{forecast.lm}}, \code{\link[stats]{lm}}. } \author{ Mitchell O'Hara-Wild and Rob J Hyndman } \keyword{stats} forecast/man/forecast.mlm.Rd0000644000176200001440000000710013566112560015517 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mforecast.R \name{forecast.mlm} \alias{forecast.mlm} \title{Forecast a multiple linear model with possible time series components} \usage{ \method{forecast}{mlm}( object, newdata, h = 10, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, ts = TRUE, ... ) } \arguments{ \item{object}{Object of class "mlm", usually the result of a call to \code{\link[stats]{lm}} or \code{\link{tslm}}.} \item{newdata}{An optional data frame in which to look for variables with which to predict. If omitted, it is assumed that the only variables are trend and season, and \code{h} forecasts are produced.} \item{h}{Number of periods for forecasting. Ignored if \code{newdata} present.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{ts}{If \code{TRUE}, the forecasts will be treated as time series provided the original data is a time series; the \code{newdata} will be interpreted as related to the subsequent time periods. If \code{FALSE}, any time series attributes of the original data will be ignored.} \item{...}{Other arguments passed to \code{\link[forecast]{forecast.lm}()}.} } \value{ An object of class "\code{mforecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.lm}. An object of class \code{"mforecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a multivariate time series} \item{lower}{Lower limits for prediction intervals of each series} \item{upper}{Upper limits for prediction intervals of each series} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The historical data for the response variable.} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values} } \description{ \code{forecast.mlm} is used to predict multiple linear models, especially those involving trend and seasonality components. } \details{ \code{forecast.mlm} is largely a wrapper for \code{\link[forecast]{forecast.lm}()} except that it allows forecasts to be generated on multiple series. Also, the output is reformatted into a \code{mforecast} object. } \examples{ lungDeaths <- cbind(mdeaths, fdeaths) fit <- tslm(lungDeaths ~ trend + season) fcast <- forecast(fit, h=10) carPower <- as.matrix(mtcars[,c("qsec","hp")]) carmpg <- mtcars[,"mpg"] fit <- lm(carPower ~ carmpg) fcast <- forecast(fit, newdata=data.frame(carmpg=30)) } \seealso{ \code{\link{tslm}}, \code{\link{forecast.lm}}, \code{\link[stats]{lm}}. } \author{ Mitchell O'Hara-Wild } forecast/man/bld.mbb.bootstrap.Rd0000644000176200001440000000234513553673010016445 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/bootstrap.R \name{bld.mbb.bootstrap} \alias{bld.mbb.bootstrap} \title{Box-Cox and Loess-based decomposition bootstrap.} \usage{ bld.mbb.bootstrap(x, num, block_size = NULL) } \arguments{ \item{x}{Original time series.} \item{num}{Number of bootstrapped versions to generate.} \item{block_size}{Block size for the moving block bootstrap.} } \value{ A list with bootstrapped versions of the series. The first series in the list is the original series. } \description{ Generates bootstrapped versions of a time series using the Box-Cox and Loess-based decomposition bootstrap. } \details{ The procedure is described in Bergmeir et al. Box-Cox decomposition is applied, together with STL or Loess (for non-seasonal time series), and the remainder is bootstrapped using a moving block bootstrap. } \examples{ bootstrapped_series <- bld.mbb.bootstrap(WWWusage, 100) } \references{ Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging Exponential Smoothing Methods using STL Decomposition and Box-Cox Transformation. International Journal of Forecasting 32, 303-312. } \seealso{ \code{\link{baggedETS}}. } \author{ Christoph Bergmeir, Fotios Petropoulos } \keyword{ts} forecast/man/autoplot.seas.Rd0000644000176200001440000000346613571300652015736 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R, R/mstl.R \name{autoplot.decomposed.ts} \alias{autoplot.decomposed.ts} \alias{autoplot.stl} \alias{autoplot.StructTS} \alias{autoplot.seas} \alias{autoplot.mstl} \title{Plot time series decomposition components using ggplot} \usage{ \method{autoplot}{decomposed.ts}(object, labels = NULL, range.bars = NULL, ...) \method{autoplot}{stl}(object, labels = NULL, range.bars = TRUE, ...) \method{autoplot}{StructTS}(object, labels = NULL, range.bars = TRUE, ...) \method{autoplot}{seas}(object, labels = NULL, range.bars = NULL, ...) \method{autoplot}{mstl}(object, ...) } \arguments{ \item{object}{Object of class \dQuote{\code{seas}}, \dQuote{\code{stl}}, or \dQuote{\code{decomposed.ts}}.} \item{labels}{Labels to replace \dQuote{seasonal}, \dQuote{trend}, and \dQuote{remainder}.} \item{range.bars}{Logical indicating if each plot should have a bar at its right side representing relative size. If \code{NULL}, automatic selection takes place.} \item{...}{Other plotting parameters to affect the plot.} } \value{ Returns an object of class \code{ggplot}. } \description{ Produces a ggplot object of seasonally decomposed time series for objects of class \dQuote{\code{stl}} (created with \code{\link[stats]{stl}}), class \dQuote{\code{seas}} (created with \code{\link[seasonal]{seas}}), or class \dQuote{\code{decomposed.ts}} (created with \code{\link[stats]{decompose}}). } \examples{ library(ggplot2) co2 \%>\% decompose \%>\% autoplot nottem \%>\% stl(s.window='periodic') \%>\% autoplot \dontrun{ library(seasonal) seas(USAccDeaths) \%>\% autoplot } } \seealso{ \code{\link[seasonal]{seas}}, \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, \code{\link[stats]{StructTS}}, \code{\link[stats]{plot.stl}}. } \author{ Mitchell O'Hara-Wild } forecast/man/dshw.Rd0000644000176200001440000001004213566112560014071 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dshw.r \name{dshw} \alias{dshw} \title{Double-Seasonal Holt-Winters Forecasting} \usage{ dshw( y, period1 = NULL, period2 = NULL, h = 2 * max(period1, period2), alpha = NULL, beta = NULL, gamma = NULL, omega = NULL, phi = NULL, lambda = NULL, biasadj = FALSE, armethod = TRUE, model = NULL ) } \arguments{ \item{y}{Either an \code{\link{msts}} object with two seasonal periods or a numeric vector.} \item{period1}{Period of the shorter seasonal period. Only used if \code{y} is not an \code{\link{msts}} object.} \item{period2}{Period of the longer seasonal period. Only used if \code{y} is not an \code{\link{msts}} object.} \item{h}{Number of periods for forecasting.} \item{alpha}{Smoothing parameter for the level. If \code{NULL}, the parameter is estimated using least squares.} \item{beta}{Smoothing parameter for the slope. If \code{NULL}, the parameter is estimated using least squares.} \item{gamma}{Smoothing parameter for the first seasonal period. If \code{NULL}, the parameter is estimated using least squares.} \item{omega}{Smoothing parameter for the second seasonal period. If \code{NULL}, the parameter is estimated using least squares.} \item{phi}{Autoregressive parameter. If \code{NULL}, the parameter is estimated using least squares.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{armethod}{If TRUE, the forecasts are adjusted using an AR(1) model for the errors.} \item{model}{If it's specified, an existing model is applied to a new data set.} } \value{ An object of class "\code{forecast}" which is a list that includes the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{x}{The original time series.} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{dshw}. } \description{ Returns forecasts using Taylor's (2003) Double-Seasonal Holt-Winters method. } \details{ Taylor's (2003) double-seasonal Holt-Winters method uses additive trend and multiplicative seasonality, where there are two seasonal components which are multiplied together. For example, with a series of half-hourly data, one would set \code{period1=48} for the daily period and \code{period2=336} for the weekly period. The smoothing parameter notation used here is different from that in Taylor (2003); instead it matches that used in Hyndman et al (2008) and that used for the \code{\link{ets}} function. } \examples{ \dontrun{ fcast <- dshw(taylor) plot(fcast) t <- seq(0,5,by=1/20) x <- exp(sin(2*pi*t) + cos(2*pi*t*4) + rnorm(length(t),0,.1)) fit <- dshw(x,20,5) plot(fit) } } \references{ Taylor, J.W. (2003) Short-term electricity demand forecasting using double seasonal exponential smoothing. \emph{Journal of the Operational Research Society}, \bold{54}, 799-805. Hyndman, R.J., Koehler, A.B., Ord, J.K., and Snyder, R.D. (2008) \emph{Forecasting with exponential smoothing: the state space approach}, Springer-Verlag. \url{http://www.exponentialsmoothing.net}. } \seealso{ \code{\link[stats]{HoltWinters}}, \code{\link{ets}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/Arima.Rd0000644000176200001440000001067213566112560014166 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arima.R \name{Arima} \alias{Arima} \alias{print.ARIMA} \alias{summary.Arima} \alias{as.character.Arima} \title{Fit ARIMA model to univariate time series} \usage{ Arima( y, order = c(0, 0, 0), seasonal = c(0, 0, 0), xreg = NULL, include.mean = TRUE, include.drift = FALSE, include.constant, lambda = model$lambda, biasadj = FALSE, method = c("CSS-ML", "ML", "CSS"), model = NULL, x = y, ... ) } \arguments{ \item{y}{a univariate time series of class \code{ts}.} \item{order}{A specification of the non-seasonal part of the ARIMA model: the three components (p, d, q) are the AR order, the degree of differencing, and the MA order.} \item{seasonal}{A specification of the seasonal part of the ARIMA model, plus the period (which defaults to frequency(y)). This should be a list with components order and period, but a specification of just a numeric vector of length 3 will be turned into a suitable list with the specification as the order.} \item{xreg}{Optionally, a numerical vector or matrix of external regressors, which must have the same number of rows as y. It should not be a data frame.} \item{include.mean}{Should the ARIMA model include a mean term? The default is \code{TRUE} for undifferenced series, \code{FALSE} for differenced ones (where a mean would not affect the fit nor predictions).} \item{include.drift}{Should the ARIMA model include a linear drift term? (i.e., a linear regression with ARIMA errors is fitted.) The default is \code{FALSE}.} \item{include.constant}{If \code{TRUE}, then \code{include.mean} is set to be \code{TRUE} for undifferenced series and \code{include.drift} is set to be \code{TRUE} for differenced series. Note that if there is more than one difference taken, no constant is included regardless of the value of this argument. This is deliberate as otherwise quadratic and higher order polynomial trends would be induced.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{method}{Fitting method: maximum likelihood or minimize conditional sum-of-squares. The default (unless there are missing values) is to use conditional-sum-of-squares to find starting values, then maximum likelihood.} \item{model}{Output from a previous call to \code{Arima}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{x}{Deprecated. Included for backwards compatibility.} \item{...}{Additional arguments to be passed to \code{\link[stats]{arima}}.} } \value{ See the \code{\link[stats]{arima}} function in the stats package. The additional objects returned are \item{x}{The time series data} \item{xreg}{The regressors used in fitting (when relevant).} \item{sigma2}{The bias adjusted MLE of the innovations variance.} } \description{ Largely a wrapper for the \code{\link[stats]{arima}} function in the stats package. The main difference is that this function allows a drift term. It is also possible to take an ARIMA model from a previous call to \code{Arima} and re-apply it to the data \code{y}. } \details{ See the \code{\link[stats]{arima}} function in the stats package. } \examples{ library(ggplot2) WWWusage \%>\% Arima(order=c(3,1,0)) \%>\% forecast(h=20) \%>\% autoplot # Fit model to first few years of AirPassengers data air.model <- Arima(window(AirPassengers,end=1956+11/12),order=c(0,1,1), seasonal=list(order=c(0,1,1),period=12),lambda=0) plot(forecast(air.model,h=48)) lines(AirPassengers) # Apply fitted model to later data air.model2 <- Arima(window(AirPassengers,start=1957),model=air.model) # Forecast accuracy measures on the log scale. # in-sample one-step forecasts. accuracy(air.model) # out-of-sample one-step forecasts. accuracy(air.model2) # out-of-sample multi-step forecasts accuracy(forecast(air.model,h=48,lambda=NULL), log(window(AirPassengers,start=1957))) } \seealso{ \code{\link{auto.arima}}, \code{\link{forecast.Arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/plot.forecast.Rd0000644000176200001440000000704013566112560015713 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast.R, R/ggplot.R, R/spline.R \name{plot.forecast} \alias{plot.forecast} \alias{autoplot.forecast} \alias{autoplot.splineforecast} \alias{autolayer.forecast} \alias{plot.splineforecast} \title{Forecast plot} \usage{ \method{plot}{forecast}( x, include, PI = TRUE, showgap = TRUE, shaded = TRUE, shadebars = (length(x$mean) < 5), shadecols = NULL, col = 1, fcol = 4, pi.col = 1, pi.lty = 2, ylim = NULL, main = NULL, xlab = "", ylab = "", type = "l", flty = 1, flwd = 2, ... ) \method{autoplot}{forecast}( object, include, PI = TRUE, shadecols = c("#596DD5", "#D5DBFF"), fcol = "#0000AA", flwd = 0.5, ... ) \method{autoplot}{splineforecast}(object, PI = TRUE, ...) \method{autolayer}{forecast}(object, series = NULL, PI = TRUE, showgap = TRUE, ...) \method{plot}{splineforecast}(x, fitcol = 2, type = "o", pch = 19, ...) } \arguments{ \item{x}{Forecast object produced by \code{\link{forecast}}.} \item{include}{number of values from time series to include in plot. Default is all values.} \item{PI}{Logical flag indicating whether to plot prediction intervals.} \item{showgap}{If \code{showgap=FALSE}, the gap between the historical observations and the forecasts is removed.} \item{shaded}{Logical flag indicating whether prediction intervals should be shaded (\code{TRUE}) or lines (\code{FALSE})} \item{shadebars}{Logical flag indicating if prediction intervals should be plotted as shaded bars (if \code{TRUE}) or a shaded polygon (if \code{FALSE}). Ignored if \code{shaded=FALSE}. Bars are plotted by default if there are fewer than five forecast horizons.} \item{shadecols}{Colors for shaded prediction intervals. To get default colors used prior to v3.26, set \code{shadecols="oldstyle"}.} \item{col}{Colour for the data line.} \item{fcol}{Colour for the forecast line.} \item{pi.col}{If \code{shaded=FALSE} and \code{PI=TRUE}, the prediction intervals are plotted in this colour.} \item{pi.lty}{If \code{shaded=FALSE} and \code{PI=TRUE}, the prediction intervals are plotted using this line type.} \item{ylim}{Limits on y-axis.} \item{main}{Main title.} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} \item{type}{1-character string giving the type of plot desired. As for \code{\link[graphics]{plot.default}}.} \item{flty}{Line type for the forecast line.} \item{flwd}{Line width for the forecast line.} \item{...}{Other plotting parameters to affect the plot.} \item{object}{Forecast object produced by \code{\link{forecast}}. Used for ggplot graphics (S3 method consistency).} \item{series}{Matches an unidentified forecast layer with a coloured object on the plot.} \item{fitcol}{Line colour for fitted values.} \item{pch}{Plotting character (if \code{type=="p"} or \code{type=="o"}).} } \value{ None. } \description{ Plots historical data with forecasts and prediction intervals. } \details{ \code{autoplot} will produce a ggplot object. plot.splineforecast autoplot.splineforecast } \examples{ library(ggplot2) wine.fit <- hw(wineind,h=48) plot(wine.fit) autoplot(wine.fit) fit <- tslm(wineind ~ fourier(wineind,4)) fcast <- forecast(fit, newdata=data.frame(fourier(wineind,4,20))) autoplot(fcast) fcast <- splinef(airmiles,h=5) plot(fcast) autoplot(fcast) } \references{ Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://OTexts.org/fpp2/} } \seealso{ \code{\link[stats]{plot.ts}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/arfima.Rd0000644000176200001440000000636213566112560014375 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arfima.R \name{arfima} \alias{arfima} \title{Fit a fractionally differenced ARFIMA model} \usage{ arfima( y, drange = c(0, 0.5), estim = c("mle", "ls"), model = NULL, lambda = NULL, biasadj = FALSE, x = y, ... ) } \arguments{ \item{y}{a univariate time series (numeric vector).} \item{drange}{Allowable values of d to be considered. Default of \code{c(0,0.5)} ensures a stationary model is returned.} \item{estim}{If \code{estim=="ls"}, then the ARMA parameters are calculated using the Haslett-Raftery algorithm. If \code{estim=="mle"}, then the ARMA parameters are calculated using full MLE via the \code{\link[stats]{arima}} function.} \item{model}{Output from a previous call to \code{arfima}. If model is passed, this same model is fitted to y without re-estimating any parameters.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{x}{Deprecated. Included for backwards compatibility.} \item{\dots}{Other arguments passed to \code{\link{auto.arima}} when selecting p and q.} } \value{ A list object of S3 class \code{"fracdiff"}, which is described in the \code{\link[fracdiff]{fracdiff}} documentation. A few additional objects are added to the list including \code{x} (the original time series), and the \code{residuals} and \code{fitted} values. } \description{ An ARFIMA(p,d,q) model is selected and estimated automatically using the Hyndman-Khandakar (2008) algorithm to select p and q and the Haslett and Raftery (1989) algorithm to estimate the parameters including d. } \details{ This function combines \code{\link[fracdiff]{fracdiff}} and \code{\link{auto.arima}} to automatically select and estimate an ARFIMA model. The fractional differencing parameter is chosen first assuming an ARFIMA(2,d,0) model. Then the data are fractionally differenced using the estimated d and an ARMA model is selected for the resulting time series using \code{\link{auto.arima}}. Finally, the full ARFIMA(p,d,q) model is re-estimated using \code{\link[fracdiff]{fracdiff}}. If \code{estim=="mle"}, the ARMA coefficients are refined using \code{\link[stats]{arima}}. } \examples{ library(fracdiff) x <- fracdiff.sim( 100, ma=-.4, d=.3)$series fit <- arfima(x) tsdisplay(residuals(fit)) } \references{ J. Haslett and A. E. Raftery (1989) Space-time Modelling with Long-memory Dependence: Assessing Ireland's Wind Power Resource (with discussion); \emph{Applied Statistics} \bold{38}, 1-50. Hyndman, R.J. and Khandakar, Y. (2008) "Automatic time series forecasting: The forecast package for R", \emph{Journal of Statistical Software}, \bold{26}(3). } \seealso{ \code{\link[fracdiff]{fracdiff}}, \code{\link{auto.arima}}, \code{\link{forecast.fracdiff}}. } \author{ Rob J Hyndman and Farah Yasmeen } \keyword{ts} forecast/man/forecast-package.Rd0000644000176200001440000000113113553673010016320 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast.R \docType{package} \name{forecast-package} \alias{forecast-package} \title{Forecasting Functions for Time Series and Linear Models} \description{ Methods and tools for displaying and analysing univariate time series forecasts including exponential smoothing via state space models and automatic ARIMA modelling. } \details{ \tabular{ll}{ Package: \tab forecast\cr Type: \tab Package\cr License: \tab GPL3\cr LazyLoad: \tab yes\cr } } \author{ Rob J Hyndman Maintainer: Rob.Hyndman@monash.edu } \keyword{package} forecast/man/meanf.Rd0000644000176200001440000000552113566112560014220 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{meanf} \alias{meanf} \title{Mean Forecast} \usage{ meanf( y, h = 10, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, bootstrap = FALSE, npaths = 5000, x = y ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting} \item{level}{Confidence levels for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{bootstrap}{If TRUE, use a bootstrap method to compute prediction intervals. Otherwise, assume a normal distribution.} \item{npaths}{Number of bootstrapped sample paths to use if \code{bootstrap==TRUE}.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{meanf}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and prediction intervals for an iid model applied to y. } \details{ The iid model is \deqn{Y_t=\mu + Z_t}{Y[t]=mu + Z[t]} where \eqn{Z_t}{Z[t]} is a normal iid error. Forecasts are given by \deqn{Y_n(h)=\mu}{Y[n+h]=mu} where \eqn{\mu}{mu} is estimated by the sample mean. } \examples{ nile.fcast <- meanf(Nile, h=10) plot(nile.fcast) } \seealso{ \code{\link{rwf}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/nsdiffs.Rd0000644000176200001440000000527613566112560014575 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/unitRoot.R \name{nsdiffs} \alias{nsdiffs} \title{Number of differences required for a seasonally stationary series} \usage{ nsdiffs( x, alpha = 0.05, m = frequency(x), test = c("seas", "ocsb", "hegy", "ch"), max.D = 1, ... ) } \arguments{ \item{x}{A univariate time series} \item{alpha}{Level of the test, possible values range from 0.01 to 0.1.} \item{m}{Deprecated. Length of seasonal period} \item{test}{Type of unit root test to use} \item{max.D}{Maximum number of seasonal differences allowed} \item{...}{Additional arguments to be passed on to the unit root test} } \value{ An integer indicating the number of differences required for stationarity. } \description{ Functions to estimate the number of differences required to make a given time series stationary. \code{nsdiffs} estimates the number of seasonal differences necessary. } \details{ \code{nsdiffs} uses seasonal unit root tests to determine the number of seasonal differences required for time series \code{x} to be made stationary (possibly with some lag-one differencing as well). Several different tests are available: \itemize{ \item If \code{test="seas"} (default), a measure of seasonal strength is used, where differencing is selected if the seasonal strength (Wang, Smith & Hyndman, 2006) exceeds 0.64 (based on minimizing MASE when forecasting using auto.arima on M3 and M4 data). \item If \code{test="ch"}, the Canova-Hansen (1995) test is used (with null hypothesis of deterministic seasonality) \item If \code{test="hegy"}, the Hylleberg, Engle, Granger & Yoo (1990) test is used. \item If \code{test="ocsb"}, the Osborn-Chui-Smith-Birchenhall (1988) test is used (with null hypothesis that a seasonal unit root exists). } } \examples{ nsdiffs(AirPassengers) } \references{ Wang, X, Smith, KA, Hyndman, RJ (2006) "Characteristic-based clustering for time series data", \emph{Data Mining and Knowledge Discovery}, \bold{13}(3), 335-364. Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the order of integration for consumption", \emph{Oxford Bulletin of Economics and Statistics} \bold{50}(4):361-377. Canova F and Hansen BE (1995) "Are Seasonal Patterns Constant over Time? A Test for Seasonal Stability", \emph{Journal of Business and Economic Statistics} \bold{13}(3):237-252. Hylleberg S, Engle R, Granger C and Yoo B (1990) "Seasonal integration and cointegration.", \emph{Journal of Econometrics} \bold{44}(1), pp. 215-238. } \seealso{ \code{\link{auto.arima}}, \code{\link{ndiffs}}, \code{\link{ocsb.test}}, \code{\link[uroot]{hegy.test}}, and \code{\link[uroot]{ch.test}} } \author{ Rob J Hyndman, Slava Razbash and Mitchell O'Hara-Wild } forecast/man/BoxCox.Rd0000644000176200001440000000335313553673010014333 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{BoxCox} \alias{BoxCox} \alias{InvBoxCox} \title{Box Cox Transformation} \usage{ BoxCox(x, lambda) InvBoxCox(x, lambda, biasadj = FALSE, fvar = NULL) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}.} \item{lambda}{transformation parameter. If \code{lambda = "auto"}, then the transformation parameter lambda is chosen using BoxCox.lambda.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{fvar}{Optional parameter required if biasadj=TRUE. Can either be the forecast variance, or a list containing the interval \code{level}, and the corresponding \code{upper} and \code{lower} intervals.} } \value{ a numeric vector of the same length as x. } \description{ BoxCox() returns a transformation of the input variable using a Box-Cox transformation. InvBoxCox() reverses the transformation. } \details{ The Box-Cox transformation is given by \deqn{f_\lambda(x) =\frac{x^\lambda - 1}{\lambda}}{f(x;lambda)=(x^lambda - 1)/lambda} if \eqn{\lambda\ne0}{lambda is not equal to 0}. For \eqn{\lambda=0}{lambda=0}, \deqn{f_0(x)=\log(x)}{f(x;0)=log(x)}. } \examples{ lambda <- BoxCox.lambda(lynx) lynx.fit <- ar(BoxCox(lynx,lambda)) plot(forecast(lynx.fit,h=20,lambda=lambda)) } \references{ Box, G. E. P. and Cox, D. R. (1964) An analysis of transformations. \emph{JRSS B} \bold{26} 211--246. } \seealso{ \code{\link{BoxCox.lambda}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/tbats.Rd0000644000176200001440000000740513566112560014252 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tbats.R \name{tbats} \alias{tbats} \alias{as.character.tbats} \alias{print.tbats} \title{TBATS model (Exponential smoothing state space model with Box-Cox transformation, ARMA errors, Trend and Seasonal components)} \usage{ tbats( y, use.box.cox = NULL, use.trend = NULL, use.damped.trend = NULL, seasonal.periods = NULL, use.arma.errors = TRUE, use.parallel = length(y) > 1000, num.cores = 2, bc.lower = 0, bc.upper = 1, biasadj = FALSE, model = NULL, ... ) } \arguments{ \item{y}{The time series to be forecast. Can be \code{numeric}, \code{msts} or \code{ts}. Only univariate time series are supported.} \item{use.box.cox}{\code{TRUE/FALSE} indicates whether to use the Box-Cox transformation or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{use.trend}{\code{TRUE/FALSE} indicates whether to include a trend or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{use.damped.trend}{\code{TRUE/FALSE} indicates whether to include a damping parameter in the trend or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{seasonal.periods}{If \code{y} is \code{numeric} then seasonal periods can be specified with this parameter.} \item{use.arma.errors}{\code{TRUE/FALSE} indicates whether to include ARMA errors or not. If \code{TRUE} the best fit is selected by AIC. If \code{FALSE} then the selection algorithm does not consider ARMA errors.} \item{use.parallel}{\code{TRUE/FALSE} indicates whether or not to use parallel processing.} \item{num.cores}{The number of parallel processes to be used if using parallel processing. If \code{NULL} then the number of logical cores is detected and all available cores are used.} \item{bc.lower}{The lower limit (inclusive) for the Box-Cox transformation.} \item{bc.upper}{The upper limit (inclusive) for the Box-Cox transformation.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If TRUE, point forecasts and fitted values are mean forecast. Otherwise, these points can be considered the median of the forecast densities.} \item{model}{Output from a previous call to \code{tbats}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{...}{Additional arguments to be passed to \code{auto.arima} when choose an ARMA(p, q) model for the errors. (Note that xreg will be ignored, as will any arguments concerning seasonality and differencing, but arguments controlling the values of p and q will be used.)} } \value{ An object with class \code{c("tbats", "bats")}. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{bats} and associated functions. The fitted model is designated TBATS(omega, p,q, phi, ,...,) where omega is the Box-Cox parameter and phi is the damping parameter; the error is modelled as an ARMA(p,q) process and m1,...,mJ list the seasonal periods used in the model and k1,...,kJ are the corresponding number of Fourier terms used for each seasonality. } \description{ Fits a TBATS model applied to \code{y}, as described in De Livera, Hyndman & Snyder (2011). Parallel processing is used by default to speed up the computations. } \examples{ \dontrun{ fit <- tbats(USAccDeaths) plot(forecast(fit)) taylor.fit <- tbats(taylor) plot(forecast(taylor.fit))} } \references{ De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), Forecasting time series with complex seasonal patterns using exponential smoothing, \emph{Journal of the American Statistical Association}, \bold{106}(496), 1513-1527. } \seealso{ \code{\link{tbats.components}}. } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/sindexf.Rd0000644000176200001440000000164613553673010014574 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{sindexf} \alias{sindexf} \title{Forecast seasonal index} \usage{ sindexf(object, h) } \arguments{ \item{object}{Output from \code{\link[stats]{decompose}} or \link[stats]{stl}.} \item{h}{Number of periods ahead to forecast} } \value{ Time series } \description{ Returns vector containing the seasonal index for \code{h} future periods. If the seasonal index is non-periodic, it uses the last values of the index. } \examples{ uk.stl <- stl(UKDriverDeaths,"periodic") uk.sa <- seasadj(uk.stl) uk.fcast <- holt(uk.sa,36) seasf <- sindexf(uk.stl,36) uk.fcast$mean <- uk.fcast$mean + seasf uk.fcast$lower <- uk.fcast$lower + cbind(seasf,seasf) uk.fcast$upper <- uk.fcast$upper + cbind(seasf,seasf) uk.fcast$x <- UKDriverDeaths plot(uk.fcast,main="Forecasts from Holt's method with seasonal adjustment") } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.stl.Rd0000644000176200001440000001633513617634745015561 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mstl.R \name{forecast.stl} \alias{forecast.stl} \alias{stlm} \alias{forecast.stlm} \alias{stlf} \title{Forecasting using stl objects} \usage{ \method{forecast}{stl}( object, method = c("ets", "arima", "naive", "rwdrift"), etsmodel = "ZZN", forecastfunction = NULL, h = frequency(object$time.series) * 2, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, xreg = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ... ) stlm( y, s.window = 13, robust = FALSE, method = c("ets", "arima"), modelfunction = NULL, model = NULL, etsmodel = "ZZN", lambda = NULL, biasadj = FALSE, xreg = NULL, allow.multiplicative.trend = FALSE, x = y, ... ) \method{forecast}{stlm}( object, h = 2 * object$m, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ... ) stlf( y, h = frequency(x) * 2, s.window = 13, t.window = NULL, robust = FALSE, lambda = NULL, biasadj = FALSE, x = y, ... ) } \arguments{ \item{object}{An object of class \code{stl} or \code{stlm}. Usually the result of a call to \code{\link[stats]{stl}} or \code{stlm}.} \item{method}{Method to use for forecasting the seasonally adjusted series.} \item{etsmodel}{The ets model specification passed to \code{\link[forecast]{ets}}. By default it allows any non-seasonal model. If \code{method!="ets"}, this argument is ignored.} \item{forecastfunction}{An alternative way of specifying the function for forecasting the seasonally adjusted series. If \code{forecastfunction} is not \code{NULL}, then \code{method} is ignored. Otherwise \code{method} is used to specify the forecasting method to be used.} \item{h}{Number of periods for forecasting.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{xreg}{Historical regressors to be used in \code{\link[forecast]{auto.arima}()} when \code{method=="arima"}.} \item{newxreg}{Future regressors to be used in \code{\link[forecast]{forecast.Arima}()}.} \item{allow.multiplicative.trend}{If TRUE, then ETS models with multiplicative trends are allowed. Otherwise, only additive or no trend ETS models are permitted.} \item{...}{Other arguments passed to \code{forecast.stl}, \code{modelfunction} or \code{forecastfunction}.} \item{y}{A univariate numeric time series of class \code{ts}} \item{s.window}{Either the character string ``periodic'' or the span (in lags) of the loess window for seasonal extraction.} \item{robust}{If \code{TRUE}, robust fitting will used in the loess procedure within \code{\link[stats]{stl}}.} \item{modelfunction}{An alternative way of specifying the function for modelling the seasonally adjusted series. If \code{modelfunction} is not \code{NULL}, then \code{method} is ignored. Otherwise \code{method} is used to specify the time series model to be used.} \item{model}{Output from a previous call to \code{stlm}. If a \code{stlm} model is passed, this same model is fitted to y without re-estimating any parameters.} \item{x}{Deprecated. Included for backwards compatibility.} \item{t.window}{A number to control the smoothness of the trend. See \code{\link[stats]{stl}} for details.} } \value{ \code{stlm} returns an object of class \code{stlm}. The other functions return objects of class \code{forecast}. There are many methods for working with \code{\link{forecast}} objects including \code{summary} to obtain and print a summary of the results, while \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features. } \description{ Forecasts of STL objects are obtained by applying a non-seasonal forecasting method to the seasonally adjusted data and re-seasonalizing using the last year of the seasonal component. } \details{ \code{stlm} takes a time series \code{y}, applies an STL decomposition, and models the seasonally adjusted data using the model passed as \code{modelfunction} or specified using \code{method}. It returns an object that includes the original STL decomposition and a time series model fitted to the seasonally adjusted data. This object can be passed to the \code{forecast.stlm} for forecasting. \code{forecast.stlm} forecasts the seasonally adjusted data, then re-seasonalizes the results by adding back the last year of the estimated seasonal component. \code{stlf} combines \code{stlm} and \code{forecast.stlm}. It takes a \code{ts} argument, applies an STL decomposition, models the seasonally adjusted data, reseasonalizes, and returns the forecasts. However, it allows more general forecasting methods to be specified via \code{forecastfunction}. \code{forecast.stl} is similar to \code{stlf} except that it takes the STL decomposition as the first argument, instead of the time series. Note that the prediction intervals ignore the uncertainty associated with the seasonal component. They are computed using the prediction intervals from the seasonally adjusted series, which are then reseasonalized using the last year of the seasonal component. The uncertainty in the seasonal component is ignored. The time series model for the seasonally adjusted data can be specified in \code{stlm} using either \code{method} or \code{modelfunction}. The \code{method} argument provides a shorthand way of specifying \code{modelfunction} for a few special cases. More generally, \code{modelfunction} can be any function with first argument a \code{ts} object, that returns an object that can be passed to \code{\link{forecast}}. For example, \code{forecastfunction=ar} uses the \code{\link{ar}} function for modelling the seasonally adjusted series. The forecasting method for the seasonally adjusted data can be specified in \code{stlf} and \code{forecast.stl} using either \code{method} or \code{forecastfunction}. The \code{method} argument provides a shorthand way of specifying \code{forecastfunction} for a few special cases. More generally, \code{forecastfunction} can be any function with first argument a \code{ts} object, and other \code{h} and \code{level}, which returns an object of class \code{\link{forecast}}. For example, \code{forecastfunction=thetaf} uses the \code{\link{thetaf}} function for forecasting the seasonally adjusted series. } \examples{ tsmod <- stlm(USAccDeaths, modelfunction = ar) plot(forecast(tsmod, h = 36)) decomp <- stl(USAccDeaths, s.window = "periodic") plot(forecast(decomp)) plot(stlf(AirPassengers, lambda = 0)) } \seealso{ \code{\link[stats]{stl}}, \code{\link{forecast.ets}}, \code{\link{forecast.Arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/checkresiduals.Rd0000644000176200001440000000355613553673010016127 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/checkresiduals.R \name{checkresiduals} \alias{checkresiduals} \title{Check that residuals from a time series model look like white noise} \usage{ checkresiduals(object, lag, df = NULL, test, plot = TRUE, ...) } \arguments{ \item{object}{Either a time series model, a forecast object, or a time series (assumed to be residuals).} \item{lag}{Number of lags to use in the Ljung-Box or Breusch-Godfrey test. If missing, it is set to \code{min(10,n/5)} for non-seasonal data, and \code{min(2m, n/5)} for seasonal data, where \code{n} is the length of the series, and \code{m} is the seasonal period of the data. It is further constrained to be at least \code{df+3} where \code{df} is the degrees of freedom of the model. This ensures there are at least 3 degrees of freedom used in the chi-squared test.} \item{df}{Number of degrees of freedom for fitted model, required for the Ljung-Box or Breusch-Godfrey test. Ignored if the degrees of freedom can be extracted from \code{object}.} \item{test}{Test to use for serial correlation. By default, if \code{object} is of class \code{lm}, then \code{test="BG"}. Otherwise, \code{test="LB"}. Setting \code{test=FALSE} will prevent the test results being printed.} \item{plot}{Logical. If \code{TRUE}, will produce the plot.} \item{...}{Other arguments are passed to \code{\link{ggtsdisplay}}.} } \value{ None } \description{ If \code{plot=TRUE}, produces a time plot of the residuals, the corresponding ACF, and a histogram. If the degrees of freedom for the model can be determined and \code{test} is not \code{FALSE}, the output from either a Ljung-Box test or Breusch-Godfrey test is printed. } \examples{ fit <- ets(WWWusage) checkresiduals(fit) } \seealso{ \code{\link{ggtsdisplay}}, \code{\link[stats]{Box.test}}, \code{\link[lmtest]{bgtest}} } \author{ Rob J Hyndman } forecast/man/thetaf.Rd0000644000176200001440000000546513566112560014414 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/theta.R \name{thetaf} \alias{thetaf} \title{Theta method forecast} \usage{ thetaf( y, h = ifelse(frequency(y) > 1, 2 * frequency(y), 10), level = c(80, 95), fan = FALSE, x = y ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting} \item{level}{Confidence levels for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{rwf}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and prediction intervals for a theta method forecast. } \details{ The theta method of Assimakopoulos and Nikolopoulos (2000) is equivalent to simple exponential smoothing with drift. This is demonstrated in Hyndman and Billah (2003). The series is tested for seasonality using the test outlined in A&N. If deemed seasonal, the series is seasonally adjusted using a classical multiplicative decomposition before applying the theta method. The resulting forecasts are then reseasonalized. Prediction intervals are computed using the underlying state space model. More general theta methods are available in the \code{\link[forecTheta]{forecTheta}} package. } \examples{ nile.fcast <- thetaf(Nile) plot(nile.fcast) } \references{ Assimakopoulos, V. and Nikolopoulos, K. (2000). The theta model: a decomposition approach to forecasting. \emph{International Journal of Forecasting} \bold{16}, 521-530. Hyndman, R.J., and Billah, B. (2003) Unmasking the Theta method. \emph{International J. Forecasting}, \bold{19}, 287-290. } \seealso{ \code{\link[stats]{arima}}, \code{\link{meanf}}, \code{\link{rwf}}, \code{\link{ses}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/fitted.Arima.Rd0000644000176200001440000000325513617634745015456 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arfima.R, R/arima.R, R/bats.R, R/ets.R, % R/modelAR.R, R/nnetar.R, R/tbats.R \name{fitted.ARFIMA} \alias{fitted.ARFIMA} \alias{fitted.Arima} \alias{fitted.forecast_ARIMA} \alias{fitted.ar} \alias{fitted.bats} \alias{fitted.ets} \alias{fitted.modelAR} \alias{fitted.nnetar} \alias{fitted.tbats} \title{h-step in-sample forecasts for time series models.} \usage{ \method{fitted}{ARFIMA}(object, h = 1, ...) \method{fitted}{Arima}(object, h = 1, ...) \method{fitted}{ar}(object, ...) \method{fitted}{bats}(object, h = 1, ...) \method{fitted}{ets}(object, h = 1, ...) \method{fitted}{modelAR}(object, h = 1, ...) \method{fitted}{nnetar}(object, h = 1, ...) \method{fitted}{tbats}(object, h = 1, ...) } \arguments{ \item{object}{An object of class "\code{Arima}", "\code{bats}", "\code{tbats}", "\code{ets}" or "\code{nnetar}".} \item{h}{The number of steps to forecast ahead.} \item{...}{Other arguments.} } \value{ A time series of the h-step forecasts. } \description{ Returns h-step forecasts for the data used in fitting the model. } \examples{ fit <- ets(WWWusage) plot(WWWusage) lines(fitted(fit), col='red') lines(fitted(fit, h=2), col='green') lines(fitted(fit, h=3), col='blue') legend("topleft", legend=paste("h =",1:3), col=2:4, lty=1) } \seealso{ \code{\link{forecast.Arima}}, \code{\link{forecast.bats}}, \code{\link{forecast.tbats}}, \code{\link{forecast.ets}}, \code{\link{forecast.nnetar}}, \code{\link{residuals.Arima}}, \code{\link{residuals.bats}}, \code{\link{residuals.tbats}}, \code{\link{residuals.ets}}, \code{\link{residuals.nnetar}}. } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/auto.arima.Rd0000644000176200001440000001462213566112560015174 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/newarima2.R \name{auto.arima} \alias{auto.arima} \title{Fit best ARIMA model to univariate time series} \usage{ auto.arima( y, d = NA, D = NA, max.p = 5, max.q = 5, max.P = 2, max.Q = 2, max.order = 5, max.d = 2, max.D = 1, start.p = 2, start.q = 2, start.P = 1, start.Q = 1, stationary = FALSE, seasonal = TRUE, ic = c("aicc", "aic", "bic"), stepwise = TRUE, nmodels = 94, trace = FALSE, approximation = (length(x) > 150 | frequency(x) > 12), method = NULL, truncate = NULL, xreg = NULL, test = c("kpss", "adf", "pp"), test.args = list(), seasonal.test = c("seas", "ocsb", "hegy", "ch"), seasonal.test.args = list(), allowdrift = TRUE, allowmean = TRUE, lambda = NULL, biasadj = FALSE, parallel = FALSE, num.cores = 2, x = y, ... ) } \arguments{ \item{y}{a univariate time series} \item{d}{Order of first-differencing. If missing, will choose a value based on \code{test}.} \item{D}{Order of seasonal-differencing. If missing, will choose a value based on \code{season.test}.} \item{max.p}{Maximum value of p} \item{max.q}{Maximum value of q} \item{max.P}{Maximum value of P} \item{max.Q}{Maximum value of Q} \item{max.order}{Maximum value of p+q+P+Q if model selection is not stepwise.} \item{max.d}{Maximum number of non-seasonal differences} \item{max.D}{Maximum number of seasonal differences} \item{start.p}{Starting value of p in stepwise procedure.} \item{start.q}{Starting value of q in stepwise procedure.} \item{start.P}{Starting value of P in stepwise procedure.} \item{start.Q}{Starting value of Q in stepwise procedure.} \item{stationary}{If \code{TRUE}, restricts search to stationary models.} \item{seasonal}{If \code{FALSE}, restricts search to non-seasonal models.} \item{ic}{Information criterion to be used in model selection.} \item{stepwise}{If \code{TRUE}, will do stepwise selection (faster). Otherwise, it searches over all models. Non-stepwise selection can be very slow, especially for seasonal models.} \item{nmodels}{Maximum number of models considered in the stepwise search.} \item{trace}{If \code{TRUE}, the list of ARIMA models considered will be reported.} \item{approximation}{If \code{TRUE}, estimation is via conditional sums of squares and the information criteria used for model selection are approximated. The final model is still computed using maximum likelihood estimation. Approximation should be used for long time series or a high seasonal period to avoid excessive computation times.} \item{method}{fitting method: maximum likelihood or minimize conditional sum-of-squares. The default (unless there are missing values) is to use conditional-sum-of-squares to find starting values, then maximum likelihood. Can be abbreviated.} \item{truncate}{An integer value indicating how many observations to use in model selection. The last \code{truncate} values of the series are used to select a model when \code{truncate} is not \code{NULL} and \code{approximation=TRUE}. All observations are used if either \code{truncate=NULL} or \code{approximation=FALSE}.} \item{xreg}{Optionally, a numerical vector or matrix of external regressors, which must have the same number of rows as \code{y}. (It should not be a data frame.)} \item{test}{Type of unit root test to use. See \code{\link{ndiffs}} for details.} \item{test.args}{Additional arguments to be passed to the unit root test.} \item{seasonal.test}{This determines which method is used to select the number of seasonal differences. The default method is to use a measure of seasonal strength computed from an STL decomposition. Other possibilities involve seasonal unit root tests.} \item{seasonal.test.args}{Additional arguments to be passed to the seasonal unit root test. See \code{\link{nsdiffs}} for details.} \item{allowdrift}{If \code{TRUE}, models with drift terms are considered.} \item{allowmean}{If \code{TRUE}, models with a non-zero mean are considered.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{parallel}{If \code{TRUE} and \code{stepwise = FALSE}, then the specification search is done in parallel. This can give a significant speedup on mutlicore machines.} \item{num.cores}{Allows the user to specify the amount of parallel processes to be used if \code{parallel = TRUE} and \code{stepwise = FALSE}. If \code{NULL}, then the number of logical cores is automatically detected and all available cores are used.} \item{x}{Deprecated. Included for backwards compatibility.} \item{...}{Additional arguments to be passed to \code{\link[stats]{arima}}.} } \value{ Same as for \code{\link{Arima}} } \description{ Returns best ARIMA model according to either AIC, AICc or BIC value. The function conducts a search over possible model within the order constraints provided. } \details{ The default arguments are designed for rapid estimation of models for many time series. If you are analysing just one time series, and can afford to take some more time, it is recommended that you set \code{stepwise=FALSE} and \code{approximation=FALSE}. Non-stepwise selection can be slow, especially for seasonal data. The stepwise algorithm outlined in Hyndman & Khandakar (2008) is used except that the default method for selecting seasonal differences is now based on an estimate of seasonal strength (Wang, Smith & Hyndman, 2006) rather than the Canova-Hansen test. There are also some other minor variations to the algorithm described in Hyndman and Khandakar (2008). } \examples{ fit <- auto.arima(WWWusage) plot(forecast(fit,h=20)) } \references{ Hyndman, RJ and Khandakar, Y (2008) "Automatic time series forecasting: The forecast package for R", \emph{Journal of Statistical Software}, \bold{26}(3). Wang, X, Smith, KA, Hyndman, RJ (2006) "Characteristic-based clustering for time series data", \emph{Data Mining and Knowledge Discovery}, \bold{13}(3), 335-364. } \seealso{ \code{\link{Arima}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/geom_forecast.Rd0000644000176200001440000001037413566112560015751 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \docType{data} \name{StatForecast} \alias{StatForecast} \alias{GeomForecast} \alias{geom_forecast} \title{Forecast plot} \format{An object of class \code{StatForecast} (inherits from \code{Stat}, \code{ggproto}, \code{gg}) of length 3.} \usage{ StatForecast GeomForecast geom_forecast( mapping = NULL, data = NULL, stat = "forecast", position = "identity", na.rm = FALSE, show.legend = NA, inherit.aes = TRUE, PI = TRUE, showgap = TRUE, series = NULL, ... ) } \arguments{ \item{mapping}{Set of aesthetic mappings created by \code{\link{aes}} or \code{\link{aes_}}. If specified and \code{inherit.aes = TRUE} (the default), it is combined with the default mapping at the top level of the plot. You must supply \code{mapping} if there is no plot mapping.} \item{data}{The data to be displayed in this layer. There are three options: If \code{NULL}, the default, the data is inherited from the plot data as specified in the call to \code{\link{ggplot}}. A \code{data.frame}, or other object, will override the plot data. All objects will be fortified to produce a data frame. See \code{\link{fortify}} for which variables will be created. A \code{function} will be called with a single argument, the plot data. The return value must be a \code{data.frame}, and will be used as the layer data.} \item{stat}{The stat object to use calculate the data.} \item{position}{Position adjustment, either as a string, or the result of a call to a position adjustment function.} \item{na.rm}{If \code{FALSE} (the default), removes missing values with a warning. If \code{TRUE} silently removes missing values.} \item{show.legend}{logical. Should this layer be included in the legends? \code{NA}, the default, includes if any aesthetics are mapped. \code{FALSE} never includes, and \code{TRUE} always includes.} \item{inherit.aes}{If \code{FALSE}, overrides the default aesthetics, rather than combining with them. This is most useful for helper functions that define both data and aesthetics and shouldn't inherit behaviour from the default plot specification, e.g. \code{\link{borders}}.} \item{PI}{If \code{FALSE}, confidence intervals will not be plotted, giving only the forecast line.} \item{showgap}{If \code{showgap=FALSE}, the gap between the historical observations and the forecasts is removed.} \item{series}{Matches an unidentified forecast layer with a coloured object on the plot.} \item{...}{Additional arguments for \code{\link{forecast.ts}}, other arguments are passed on to \code{\link{layer}}. These are often aesthetics, used to set an aesthetic to a fixed value, like \code{color = "red"} or \code{alpha = .5}. They may also be parameters to the paired geom/stat.} } \value{ A layer for a ggplot graph. } \description{ Generates forecasts from \code{forecast.ts} and adds them to the plot. Forecasts can be modified via sending forecast specific arguments above. } \details{ Multivariate forecasting is supported by having each time series on a different group. You can also pass \code{geom_forecast} a \code{forecast} object to add it to the plot. The aesthetics required for the forecasting to work includes forecast observations on the y axis, and the \code{time} of the observations on the x axis. Refer to the examples below. To automatically set up aesthetics, use \code{autoplot}. } \examples{ \dontrun{ library(ggplot2) autoplot(USAccDeaths) + geom_forecast() lungDeaths <- cbind(mdeaths, fdeaths) autoplot(lungDeaths) + geom_forecast() # Using fortify.ts p <- ggplot(aes(x=x, y=y), data=USAccDeaths) p <- p + geom_line() p + geom_forecast() # Without fortify.ts data <- data.frame(USAccDeaths=as.numeric(USAccDeaths), time=as.numeric(time(USAccDeaths))) p <- ggplot(aes(x=time, y=USAccDeaths), data=data) p <- p + geom_line() p + geom_forecast() p + geom_forecast(h=60) p <- ggplot(aes(x=time, y=USAccDeaths), data=data) p + geom_forecast(level=c(70,98)) p + geom_forecast(level=c(70,98),colour="lightblue") #Add forecasts to multivariate series with colour groups lungDeaths <- cbind(mdeaths, fdeaths) autoplot(lungDeaths) + geom_forecast(forecast(mdeaths), series="mdeaths") } } \seealso{ \code{\link{forecast}}, \code{\link[ggplot2]{ggproto}} } \author{ Mitchell O'Hara-Wild } \keyword{datasets} forecast/man/residuals.forecast.Rd0000644000176200001440000000610713617634745016746 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/residuals.R \name{residuals.forecast} \alias{residuals.forecast} \alias{residuals.ar} \alias{residuals.Arima} \alias{residuals.forecast_ARIMA} \alias{residuals.bats} \alias{residuals.tbats} \alias{residuals.ets} \alias{residuals.ARFIMA} \alias{residuals.nnetar} \alias{residuals.stlm} \alias{residuals.tslm} \title{Residuals for various time series models} \usage{ \method{residuals}{forecast}(object, type = c("innovation", "response"), ...) \method{residuals}{ar}(object, type = c("innovation", "response"), ...) \method{residuals}{Arima}(object, type = c("innovation", "response", "regression"), h = 1, ...) \method{residuals}{bats}(object, type = c("innovation", "response"), h = 1, ...) \method{residuals}{tbats}(object, type = c("innovation", "response"), h = 1, ...) \method{residuals}{ets}(object, type = c("innovation", "response"), h = 1, ...) \method{residuals}{ARFIMA}(object, type = c("innovation", "response"), ...) \method{residuals}{nnetar}(object, type = c("innovation", "response"), h = 1, ...) \method{residuals}{stlm}(object, type = c("innovation", "response"), ...) \method{residuals}{tslm}(object, type = c("innovation", "response", "deviance"), ...) } \arguments{ \item{object}{An object containing a time series model of class \code{ar}, \code{Arima}, \code{bats}, \code{ets}, \code{arfima}, \code{nnetar} or \code{stlm}. If \code{object} is of class \code{forecast}, then the function will return \code{object$residuals} if it exists, otherwise it returns the differences between the observations and their fitted values.} \item{type}{Type of residual.} \item{...}{Other arguments not used.} \item{h}{If \code{type='response'}, then the fitted values are computed for \code{h}-step forecasts.} } \value{ A \code{ts} object. } \description{ Returns time series of residuals from a fitted model. } \details{ Innovation residuals correspond to the white noise process that drives the evolution of the time series model. Response residuals are the difference between the observations and the fitted values (equivalent to \code{h}-step forecasts). For functions with no \code{h} argument, \code{h=1}. For homoscedastic models, the innovation residuals and the response residuals for \code{h=1} are identical. Regression residuals are available for regression models with ARIMA errors, and are equal to the original data minus the effect of the regression variables. If there are no regression variables, the errors will be identical to the original series (possibly adjusted to have zero mean). \code{arima.errors} is a deprecated function which is identical to \code{residuals.Arima(object, type="regression")}. For \code{nnetar} objects, when \code{type="innovations"} and \code{lambda} is used, a matrix of time-series consisting of the residuals from each of the fitted neural networks is returned. } \examples{ fit <- Arima(lynx,order=c(4,0,0), lambda=0.5) plot(residuals(fit)) plot(residuals(fit, type='response')) } \seealso{ \code{\link{fitted.Arima}}, \code{\link{checkresiduals}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.Arima.Rd0000644000176200001440000001064413566112560015772 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arfima.R, R/arima.R \name{forecast.fracdiff} \alias{forecast.fracdiff} \alias{forecast.Arima} \alias{forecast.forecast_ARIMA} \alias{forecast.ar} \title{Forecasting using ARIMA or ARFIMA models} \usage{ \method{forecast}{fracdiff}( object, h = 10, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, ... ) \method{forecast}{Arima}( object, h = ifelse(object$arma[5] > 1, 2 * object$arma[5], 10), level = c(80, 95), fan = FALSE, xreg = NULL, lambda = object$lambda, bootstrap = FALSE, npaths = 5000, biasadj = NULL, ... ) \method{forecast}{ar}( object, h = 10, level = c(80, 95), fan = FALSE, lambda = NULL, bootstrap = FALSE, npaths = 5000, biasadj = FALSE, ... ) } \arguments{ \item{object}{An object of class "\code{Arima}", "\code{ar}" or "\code{fracdiff}". Usually the result of a call to \code{\link[stats]{arima}}, \code{\link{auto.arima}}, \code{\link[stats]{ar}}, \code{\link{arfima}} or \code{\link[fracdiff]{fracdiff}}.} \item{h}{Number of periods for forecasting. If \code{xreg} is used, \code{h} is ignored and the number of forecast periods is set to the number of rows of \code{xreg}.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments.} \item{xreg}{Future values of an regression variables (for class \code{Arima} objects only). A numerical vector or matrix of external regressors; it should not be a data frame.} \item{bootstrap}{If \code{TRUE}, then prediction intervals computed using simulation with resampled errors.} \item{npaths}{Number of sample paths used in computing simulated prediction intervals when \code{bootstrap=TRUE}.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.Arima}. An object of class "\code{forecast}" is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for univariate ARIMA models. } \details{ For \code{Arima} or \code{ar} objects, the function calls \code{\link[stats]{predict.Arima}} or \code{\link[stats]{predict.ar}} and constructs an object of class "\code{forecast}" from the results. For \code{fracdiff} objects, the calculations are all done within \code{\link{forecast.fracdiff}} using the equations given by Peiris and Perera (1988). } \examples{ fit <- Arima(WWWusage,c(3,1,0)) plot(forecast(fit)) library(fracdiff) x <- fracdiff.sim( 100, ma=-.4, d=.3)$series fit <- arfima(x) plot(forecast(fit,h=30)) } \references{ Peiris, M. & Perera, B. (1988), On prediction with fractionally differenced ARIMA models, \emph{Journal of Time Series Analysis}, \bold{9}(3), 215-220. } \seealso{ \code{\link[stats]{predict.Arima}}, \code{\link[stats]{predict.ar}}, \code{\link{auto.arima}}, \code{\link{Arima}}, \code{\link[stats]{arima}}, \code{\link[stats]{ar}}, \code{\link{arfima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/tbats.components.Rd0000644000176200001440000000236213553673010016431 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tbats.R \name{tbats.components} \alias{tbats.components} \title{Extract components of a TBATS model} \usage{ tbats.components(x) } \arguments{ \item{x}{A tbats object created by \code{\link{tbats}}.} } \value{ A multiple time series (\code{mts}) object. The first series is the observed time series. The second series is the trend component of the fitted model. Series three onwards are the seasonal components of the fitted model with one time series for each of the seasonal components. All components are transformed using estimated Box-Cox parameter. } \description{ Extract the level, slope and seasonal components of a TBATS model. The extracted components are Box-Cox transformed using the estimated transformation parameter. } \examples{ \dontrun{ fit <- tbats(USAccDeaths, use.parallel=FALSE) components <- tbats.components(fit) plot(components)} } \references{ De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), Forecasting time series with complex seasonal patterns using exponential smoothing, \emph{Journal of the American Statistical Association}, \bold{106}(496), 1513-1527. } \seealso{ \code{\link{tbats}}. } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/gold.Rd0000644000176200001440000000052313553673010014052 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{gold} \alias{gold} \title{Daily morning gold prices} \format{Time series data} \usage{ gold } \description{ Daily morning gold prices in US dollars. 1 January 1985 -- 31 March 1989. } \examples{ tsdisplay(gold) } \keyword{datasets} forecast/man/subset.ts.Rd0000644000176200001440000000404513566112560015064 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/subset.R \name{subset.ts} \alias{subset.ts} \alias{subset.msts} \title{Subsetting a time series} \usage{ \method{subset}{ts}( x, subset = NULL, month = NULL, quarter = NULL, season = NULL, start = NULL, end = NULL, ... ) \method{subset}{msts}(x, subset = NULL, start = NULL, end = NULL, ...) } \arguments{ \item{x}{a univariate time series to be subsetted} \item{subset}{optional logical expression indicating elements to keep; missing values are taken as false. \code{subset} must be the same length as \code{x}.} \item{month}{Numeric or character vector of months to retain. Partial matching on month names used.} \item{quarter}{Numeric or character vector of quarters to retain.} \item{season}{Numeric vector of seasons to retain.} \item{start}{Index of start of contiguous subset.} \item{end}{Index of end of contiguous subset.} \item{...}{Other arguments, unused.} } \value{ If \code{subset} is used, a numeric vector is returned with no ts attributes. If \code{start} and/or \code{end} are used, a ts object is returned consisting of x[start:end], with the appropriate time series attributes retained. Otherwise, a ts object is returned with frequency equal to the length of \code{month}, \code{quarter} or \code{season}. } \description{ Various types of subsetting of a time series. Allows subsetting by index values (unlike \code{\link[stats]{window}}). Also allows extraction of the values of a specific season or subset of seasons in each year. For example, to extract all values for the month of May from a time series. } \details{ If character values for months are used, either upper or lower case may be used, and partial unambiguous names are acceptable. Possible character values for quarters are \code{"Q1"}, \code{"Q2"}, \code{"Q3"}, and \code{"Q4"}. } \examples{ plot(subset(gas,month="November")) subset(woolyrnq,quarter=3) subset(USAccDeaths, start=49) } \seealso{ \code{\link[base]{subset}}, \code{\link[stats]{window}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/mstl.Rd0000644000176200001440000000313713617634745014125 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mstl.R \name{mstl} \alias{mstl} \title{Multiple seasonal decomposition} \usage{ mstl(x, lambda = NULL, iterate = 2, s.window = 13, ...) } \arguments{ \item{x}{Univariate time series of class \code{msts} or \code{ts}.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{iterate}{Number of iterations to use to refine the seasonal component.} \item{s.window}{Seasonal windows to be used in the decompositions. If scalar, the same value is used for all seasonal components. Otherwise, it should be a vector of the same length as the number of seasonal components.} \item{...}{Other arguments are passed to \code{\link[stats]{stl}}.} } \description{ Decompose a time series into seasonal, trend and remainder components. Seasonal components are estimated iteratively using STL. Multiple seasonal periods are allowed. The trend component is computed for the last iteration of STL. Non-seasonal time series are decomposed into trend and remainder only. In this case, \code{\link[stats]{supsmu}} is used to estimate the trend. Optionally, the time series may be Box-Cox transformed before decomposition. Unlike \code{\link[stats]{stl}}, \code{mstl} is completely automated. } \examples{ library(ggplot2) mstl(taylor) \%>\% autoplot() mstl(AirPassengers, lambda = "auto") \%>\% autoplot() } \seealso{ \code{\link[stats]{stl}}, \code{\link[stats]{supsmu}} } forecast/man/BoxCox.lambda.Rd0000644000176200001440000000305613566112560015554 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/guerrero.R \name{BoxCox.lambda} \alias{BoxCox.lambda} \title{Automatic selection of Box Cox transformation parameter} \usage{ BoxCox.lambda(x, method = c("guerrero", "loglik"), lower = -1, upper = 2) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}} \item{method}{Choose method to be used in calculating lambda.} \item{lower}{Lower limit for possible lambda values.} \item{upper}{Upper limit for possible lambda values.} } \value{ a number indicating the Box-Cox transformation parameter. } \description{ If \code{method=="guerrero"}, Guerrero's (1993) method is used, where lambda minimizes the coefficient of variation for subseries of \code{x}. } \details{ If \code{method=="loglik"}, the value of lambda is chosen to maximize the profile log likelihood of a linear model fitted to \code{x}. For non-seasonal data, a linear time trend is fitted while for seasonal data, a linear time trend with seasonal dummy variables is used. } \examples{ lambda <- BoxCox.lambda(AirPassengers,lower=0) air.fit <- Arima(AirPassengers, order=c(0,1,1), seasonal=list(order=c(0,1,1),period=12), lambda=lambda) plot(forecast(air.fit)) } \references{ Box, G. E. P. and Cox, D. R. (1964) An analysis of transformations. \emph{JRSS B} \bold{26} 211--246. Guerrero, V.M. (1993) Time-series analysis supported by power transformations. \emph{Journal of Forecasting}, \bold{12}, 37--48. } \seealso{ \code{\link{BoxCox}} } \author{ Leanne Chhay and Rob J Hyndman } \keyword{ts} forecast/man/seasadj.Rd0000644000176200001440000000177513553673010014551 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/seasadj.R \name{seasadj} \alias{seasadj} \alias{seasadj.stl} \alias{seasadj.mstl} \alias{seasadj.decomposed.ts} \alias{seasadj.tbats} \alias{seasadj.seas} \title{Seasonal adjustment} \usage{ seasadj(object, ...) \method{seasadj}{stl}(object, ...) \method{seasadj}{mstl}(object, ...) \method{seasadj}{decomposed.ts}(object, ...) \method{seasadj}{tbats}(object, ...) \method{seasadj}{seas}(object, ...) } \arguments{ \item{object}{Object created by \code{\link[stats]{decompose}}, \code{\link[stats]{stl}} or \code{\link{tbats}}.} \item{...}{Other arguments not currently used.} } \value{ Univariate time series. } \description{ Returns seasonally adjusted data constructed by removing the seasonal component. } \examples{ plot(AirPassengers) lines(seasadj(decompose(AirPassengers,"multiplicative")),col=4) } \seealso{ \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, \code{\link{tbats}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/DESCRIPTION0000644000176200001440000000521313617774362013607 0ustar liggesusersPackage: forecast Version: 8.11 Title: Forecasting Functions for Time Series and Linear Models Description: Methods and tools for displaying and analysing univariate time series forecasts including exponential smoothing via state space models and automatic ARIMA modelling. Depends: R (>= 3.0.2), Imports: colorspace, fracdiff, ggplot2 (>= 2.2.1), graphics, lmtest, magrittr, nnet, parallel, Rcpp (>= 0.11.0), stats, timeDate, tseries, urca, zoo Suggests: uroot, knitr, rmarkdown, rticles, testthat, methods LinkingTo: Rcpp (>= 0.11.0), RcppArmadillo (>= 0.2.35) LazyData: yes ByteCompile: TRUE Authors@R: c( person("Rob", "Hyndman", email="Rob.Hyndman@monash.edu", role=c("aut", "cre", "cph"), comment = c(ORCID = "0000-0002-2140-5352")), person("George", "Athanasopoulos", role='aut'), person("Christoph", "Bergmeir", role="aut", comment = c(ORCID = "0000-0002-3665-9021")), person("Gabriel", "Caceres", role="aut"), person("Leanne", "Chhay", role='aut'), person("Mitchell", "O'Hara-Wild", role="aut", comment=c(ORCID = "0000-0001-6729-7695")), person("Fotios", "Petropoulos", role='aut', comment=c(ORCID = "0000-0003-3039-4955")), person("Slava", "Razbash", role="aut"), person("Earo", "Wang", role="aut"), person("Farah", "Yasmeen", role='aut', comment=c(ORCID="0000-0002-1479-5401")), person("R Core Team", role=c('ctb','cph')), person("Ross", "Ihaka", role=c('ctb','cph')), person("Daniel", "Reid", role='ctb'), person("David", "Shaub", role='ctb'), person("Yuan", "Tang", role='ctb', comment = c(ORCID = "0000-0001-5243-233X")), person("Zhenyu","Zhou", role='ctb') ) BugReports: https://github.com/robjhyndman/forecast/issues License: GPL-3 URL: http://pkg.robjhyndman.com/forecast, https://github.com/robjhyndman/forecast VignetteBuilder: knitr Encoding: UTF-8 RoxygenNote: 7.0.2 NeedsCompilation: yes Packaged: 2020-02-09 06:22:31 UTC; robjhyndman Author: Rob Hyndman [aut, cre, cph] (), George Athanasopoulos [aut], Christoph Bergmeir [aut] (), Gabriel Caceres [aut], Leanne Chhay [aut], Mitchell O'Hara-Wild [aut] (), Fotios Petropoulos [aut] (), Slava Razbash [aut], Earo Wang [aut], Farah Yasmeen [aut] (), R Core Team [ctb, cph], Ross Ihaka [ctb, cph], Daniel Reid [ctb], David Shaub [ctb], Yuan Tang [ctb] (), Zhenyu Zhou [ctb] Maintainer: Rob Hyndman Repository: CRAN Date/Publication: 2020-02-09 12:20:02 UTC forecast/build/0000755000176200001440000000000013617722446013173 5ustar liggesusersforecast/build/vignette.rds0000644000176200001440000000041513617722446015532 0ustar liggesusersuQMK@|ME$ '& Rxvi)қ:ibWٙvްo!/~i0_{"1y0=&Y%+r2HkC6VA`dpղB 70YDs<ywXiGp8BLcE>Ӛl_CvSs:Lsg/tv~?6&wj?Î]Xa.:h1) [OH}forecast/tests/0000755000176200001440000000000013553673010013225 5ustar liggesusersforecast/tests/testthat/0000755000176200001440000000000013617774362015102 5ustar liggesusersforecast/tests/testthat/test-tslm.R0000644000176200001440000001203213553673010017142 0ustar liggesusers# A unit test for tslm function if (require(testthat)) { context("Tests on building model in tslm") mv_y <- ts(cbind(rnorm(120, 0, 3) + 1:120 + 20 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + 1:120 + 16 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) mv_x <- ts(cbind(rnorm(120, 0, 8) + (1:120) / 2 + 42 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + (1:120) * -1 + 20 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) v_y <- ts(rnorm(120, 0, 8) + (1:120) / 2 + 12 * sin(2 * pi * (1:120) / 12), frequency = 12) v_x <- ts(rnorm(120, 0, 1) + (1:120) * (-1) + 28 * sin(2 * pi * (1:120) / 12), frequency = 12) data <- datamat(mv_y, mv_x, v_y, v_x, fourier(v_y, 3)) test_that("tests on model building with univariate time series", { fit1 <- tslm(v_y ~ trend + season, data = data) fit2 <- tslm(v_y ~ trend + season, data = data, lambda = 2, biasadj = FALSE) fit3 <- tslm(v_y ~ trend + season, data = data, lambda = 2, biasadj = TRUE) expect_false(identical(fit2$fitted.values, fit3$fitted.values)) fit2 <- tslm(v_y ~ trend + season, data = data.frame(trend = rnorm(120))) expect_false(identical(fit1$model, fit2$model)) fit2 <- tslm(v_y ~ trend + season) expect_that(names(fit1), equals(names(fit2))) expect_that(fit1$model, equals(fit2$model)) expect_that(fit1$coefficients, equals(fit2$coefficients)) fit1 <- tslm(USAccDeaths ~ trend + season, data = USAccDeaths) fit2 <- tslm(USAccDeaths ~ trend + season) expect_that(names(fit1), equals(names(fit2))) expect_that(fit1$model, equals(fit2$model)) expect_that(fit1$coefficients, equals(fit2$coefficients)) expect_warning(fit3 <- tslm( USAccDeaths ~ trend + season, data = USAccDeaths, subset = time(USAccDeaths) %% 1 < 0.1 )) fit <- tslm(USAccDeaths ~ trend + season + trend * season, data = USAccDeaths) expect_true("trend:season" %in% attr(fit$terms, "term.labels")) }) test_that("tslm parity with lm", { fit1 <- tslm(v_y ~ v_x + fourier(v_y, 3), data = data.frame(v_y = v_y)) fit2 <- lm(v_y ~ v_x + fourier(v_y, 3), data = data.frame(v_y = v_y)) expect_equal(fit1$coefficients, fit1$coefficients) expect_equal(fit1$model, fit2$model) }) test_that("tests on subsetting data", { a <- mv_y[, 1] expect_warning(fit1 <- tslm(mv_y ~ trend, subset = a < 20), "Subset has been assumed contiguous") expect_error(fit2 <- tslm(mv_y ~ trend, subset = subset(mv_y, mv_y[, 1] < 20))) expect_warning(tslm(v_y ~ trend + season + trend * season, subset = v_y < 100), "Subset has been assumed contiguous") }) test_that("tests on model building with multivariate time series", { fit1 <- tslm(mv_y ~ trend + season) fit2 <- tslm(mv_y ~ trend + season, lambda = 0.5) expect_false(identical(fit1$coefficients, fit2$coefficients)) fit3 <- tslm(mv_y ~ trend + season, lambda = 0.5, biasadj = TRUE) expect_false(identical(fit2$fitted.values, fit3$fitted.values)) fit2 <- tslm(mv_y ~ trend + season, data = data) expect_that(names(fit1), equals(names(fit2))) expect_that(fit1$model, equals(fit2$model)) expect_that(fit1$coefficients, equals(fit2$coefficients)) expect_warning(fit3 <- tslm(mv_y ~ trend + season, subset = mv_y[, 1] < 1), "Subset has been assumed contiguous") expect_warning(fit4 <- tslm(mv_y ~ trend + season, data = data, subset = mv_y[, 1] < 1), "Subset has been assumed contiguous") expect_that(names(fit3), equals(names(fit4))) expect_that(fit3$model, equals(fit4$model)) expect_that(fit3$coefficients, equals(fit4$coefficients)) }) test_that("tests with bad input", { expect_error(tslm(mpg ~ cyl, data = mtcars), "Not time series data") expect_error(tslm(tmp2 ~ trend + season + trend * season, subset = subset(tmp2, month = "January"), "Non-seasonal data cannot be modelled using a seasonal factor")) }) test_that("forecast.lm", { fit1 <- tslm(v_y ~ trend + season, lambda = 2, biasadj = FALSE) fit2 <- tslm(v_y ~ trend + season, lambda = 2, biasadj = TRUE) fcast1 <- forecast(fit1, h = 60, biasadj = FALSE) fcast2 <- forecast(fit2, h = 60, biasadj = TRUE) expect_false(identical(fcast1$mean, fcast2$mean)) fred <- tslm(ldeaths ~ trend + season, lambda = 0) fc <- forecast(fred) }) test_that("Unusual usage", { fit1 <- tslm(v_y ~ trend + v_x + I(v_x ^ 2) + fourier(v_x, 3)) # forecast(fit1, newdata=data.frame(v_x=ts(1:2,freq=12))) # tslm(v_y ~ trend + I(v_x) + I(v_x^2) + fourier(v_x, 3), data=data) # tslm(v_y ~ trend + season + I(v_x) + I(v_x^2) + fourier(ts(season, freq=12), 3)) # fit2 <- tslm(v_y ~ trend + season + I(v_x)*fourier(v_x,3)) # forecast(fit2, newdata=data.frame(v_x=ts(1:2,freq=12))) # tslm(v_y ~ trend + season + I(v_x)*fourier(v_x,3),data=data) }) test_that("Missing values", { USMissingDeaths <- USAccDeaths USMissingDeaths[c(1,44, 72)] <- NA timetrend <- 1:72 fit <- tslm(USMissingDeaths ~ season + timetrend) expect_equal(sum(is.na(residuals(fit))), 3) fc <- forecast(fit, newdata = data.frame(timetrend = 73)) expect_length(fc$mean, 1) }) } forecast/tests/testthat/test-nnetar.R0000644000176200001440000001317113553673010017457 0ustar liggesusers# A unit test for nnetar.R if (require(testthat)) { context("Testing nnetar") test_that("Tests for nnetar", { oilnnet <- nnetar(airmiles, lambda = 0.15) woolyrnqnnet <- nnetar(woolyrnq, repeats = 10) expect_output(print(woolyrnqnnet), regexp = "Series: woolyrnq") expect_true(length(forecast(oilnnet)$mean) == 10) expect_true(length(forecast(woolyrnqnnet)$mean) == 2 * frequency(woolyrnq)) # # Test with single-column xreg (which might be a vector) uscnnet <- nnetar(woolyrnq, xreg = 1:length(woolyrnq)) expect_true(all(dim(uscnnet$xreg) == c(119, 1))) expect_true(length(forecast(uscnnet, xreg = 120:130)$mean) == 11) # Test default size with and without xreg uscnnet <- nnetar(woolyrnq, p = 2, P = 2) expect_output( print(uscnnet), regexp = "NNAR(2,2,2)", fixed = TRUE ) expect_output( print(uscnnet), regexp = "4-2-1 network", fixed = TRUE ) expect_true(uscnnet$size == 2) uscnnet <- nnetar(woolyrnq, p = 2, P = 2, xreg = 1:119, repeats = 10) expect_output( print(uscnnet), regexp = "NNAR(2,2,3)", fixed = TRUE ) expect_output( print(uscnnet), regexp = "5-3-1 network", fixed = TRUE ) expect_true(uscnnet$size == 3) # Test P=0 when m>1 uscnnet <- nnetar(woolyrnq, p = 4, P = 0) expect_true(uscnnet$size == 2) expect_output(print(uscnnet), regexp = "NNAR(4,2)", fixed = TRUE) # Test overlapping p & P uscnnet <- nnetar(woolyrnq, p = 4, P = 2) expect_true(uscnnet$size == 3) expect_output( print(uscnnet), regexp = "NNAR(4,2,3)", fixed = TRUE ) expect_output( print(uscnnet), regexp = "5-3-1 network", fixed = TRUE ) # Test with multiple-column xreg creditnnet <- nnetar( wineind, xreg = cbind(bizdays(wineind), fourier(wineind, 1)) ) expect_warning(forecast(creditnnet, h = 2, xreg = matrix(2, 2, 3))$mean, "different column names") %>% expect_length(2L) # Test if h doesn't match xreg expect_warning(forecast(creditnnet, h = 5, xreg = matrix(2, 2, 3))$mean, "different column names") %>% expect_length(2L) # Test that P is ignored if m=1 expect_warning(creditnnet <- nnetar(WWWusage, p = 2, P = 4, xreg = 1:length(WWWusage))) expect_output( print(creditnnet), regexp = "NNAR(2,2)", fixed = TRUE ) # Test fixed size creditnnet <- nnetar(WWWusage, p = 1, P = 1, xreg = 1:length(WWWusage), size = 12) expect_true(uscnnet$size == 3) expect_output(print(creditnnet), regexp = "NNAR(1,12)", fixed = TRUE) # Test passing arguments to nnet expect_warning(creditnnet <- nnetar( WWWusage, p = 2, P = 4, xreg = 1:length(WWWusage), decay = 0.1 )) expect_output( print(creditnnet), regexp = "decay=0.1", fixed = TRUE ) ## Test output format correct oilnnet <- nnetar(airmiles, p = 1, size = 0, skip = TRUE, Wts = c(0, 1), maxit = 0, repeats = 10) expect_true(all.equal(oilnnet$fitted[-1], airmiles[-length(airmiles)])) ## Test output format correct when NAs present oilna <- airmiles oilna[12] <- NA suppressWarnings(oilnnet <- nnetar(oilna, p = 1, size = 0, skip = TRUE, Wts = c(0, 1), maxit = 0)) expect_true(all.equal(oilnnet$fitted[-c(1, 12, 13)], oilna[-c(11, 12, length(oilna))])) ## Test model argument fit1 <- nnetar( WWWusage, xreg = 1:length(WWWusage), lambda = 2, decay = 0.5, maxit = 25, repeats = 7 ) fit2 <- nnetar(WWWusage, xreg = 1:length(WWWusage), model = fit1) # Check some model parameters expect_true(identical(fit1$p, fit2$p)) expect_true(identical(fit1$lambda, fit2$lambda)) expect_true(identical(fit1$nnetargs, fit2$nnetargs)) # Check fitted values are all the same expect_true(identical(fitted(fit1), fitted(fit2))) # Check residuals all the same expect_true(identical(residuals(fit1), residuals(fit2))) # Check number of neural nets expect_true(identical(length(fit1$model), length(fit2$model))) # Check neural network weights all the same expect_true(identical(fit1$model[[1]]$wts, fit2$model[[1]]$wts)) expect_true(identical(fit1$model[[7]]$wts, fit2$model[[7]]$wts)) # Check subset argument oilnnet <- nnetar(airmiles, subset = 11:20) expect_true(identical(which(!is.na(fitted(oilnnet))), 11:20)) oilnnet <- nnetar(airmiles, subset = c(rep(F, 10), rep(T, 10), rep(F, length(airmiles) - 20))) expect_true(identical(which(!is.na(fitted(oilnnet))), 11:20)) ## Check short and constant data expect_warning(nnetfit <- nnetar(rep(1, 10), p=2, P=0, size=1, repeats=1, lambda = 0.1), "Constant data") expect_true(nnetfit$p == 1) expect_true(is.null(nnetfit$lambda)) expect_true(is.null(nnetfit$scalex)) expect_error(nnetfit <- nnetar(rnorm(2), p=1, P=0, size=1, repeats=1), "Not enough data") expect_silent(nnetfit <- nnetar(rnorm(3), p=1, P=0, size=1, repeats=1)) expect_true(nnetfit$p == 1) expect_silent(nnetfit <- nnetar(rnorm(3), p=2, P=0, size=1, repeats=1)) expect_true(nnetfit$p == 2) expect_warning(nnetfit <- nnetar(rnorm(3), p=3, P=0, size=1, repeats=1), "short series") expect_true(nnetfit$p == 2) expect_warning(nnetfit <- nnetar(rnorm(3), p=4, P=0, size=1, repeats=1), "short series") expect_true(nnetfit$p == 2) expect_warning(nnetfit <- nnetar(rnorm(10), xreg=rep(1, 10), p=2, P=0, size=1, repeats=1, lambda = 0.1), "Constant xreg") expect_true(is.null(nnetfit$scalexreg)) expect_warning(nnetfit <- nnetar(rnorm(3), xreg=matrix(c(1, 2, 3, 1, 1, 1), ncol=2), p=1, P=0, size=1, repeats=1, lambda = 0.1), "Constant xreg") expect_true(is.null(nnetfit$scalexreg)) }) } forecast/tests/testthat/test-ets.R0000644000176200001440000000425713553673010016770 0ustar liggesusers# A unit test for ets function if (require(testthat)) { context("Tests on input") test_that("tests for some arguments in ets", { fit <- ets(wineind, model = "ZZM") comp <- paste0(fit$components[1:3], collapse = "") expect_that(comp, equals("MAM")) }) test_that("tests for some arguments in ets", { fit <- ets(wineind, model = "MAM", alpha = 0.1611) expect_that(as.numeric(fit$par["alpha"]), equals(0.1611)) }) test_that("refit ets model to new data", { fit <- ets(wineind, model = "MAM", alpha = 0.1611) parnames <- c("alpha", "beta", "gamma") par <- fit$par[parnames] expect_identical(ets(wineind, model = fit, alpha = 0.1611)$par[parnames], par) expect_identical(ets(wineind, model = fit, alpha = 0.1611, beta = NA)$par[parnames], par) expect_identical(ets(wineind, model = fit, alpha = 0.1611, gamma = NA)$par[parnames], par) expect_identical(ets(wineind, model = fit, alpha = 0.1611, phi = NA)$par[parnames], par) expect_identical(ets(wineind, model = fit, alpha = 0.1611, use.initial.values = TRUE)$par, fit$par) }) test_that("class methods for ets work", { fit <- ets(wineind, model = "MAM", alpha = 0.1611) expect_output(summary(fit), "Smoothing parameters") expect_equal(length(coef(fit)), 16L) expect_lt(abs(logLik(fit) + 1802.9586023), 1e-5) plot(fit) }) test_that("test ets() for errors", { expect_warning(ets(taylor)) fit1 <- ets(airmiles, lambda = 0.15, biasadj = FALSE) expect_gt(fit1$par["alpha"], 0.95) fit2 <- ets(airmiles, lambda = 0.15, biasadj = TRUE) expect_lt(fit2$par["beta"], 1e-3) expect_false(identical(fit1$fitted, fit2$fitted)) expect_error(ets(taylor, model = "ZZA")) }) test_that("forecast.ets()", { fit <- ets(airmiles, lambda = 0.15, biasadj = TRUE) fcast1 <- forecast(fit, PI = FALSE) expect_true(is.null(fcast1$upper) & is.null(fcast1$lower)) fcast1 <- forecast(fit, biasadj = FALSE) fcast2 <- forecast(fit, biasadj = TRUE) expect_false(identical(fcast1$mean, fcast2$mean)) fcast <- forecast(fit, simulate = TRUE) expect_true(!is.null(fcast$upper) & !is.null(fcast$lower)) expect_true(all(fcast$upper > fcast$lower)) }) } forecast/tests/testthat/test-calendar.R0000644000176200001440000000134513553673010017741 0ustar liggesusers# A unit test for calendar.R if (require(testthat)) { context("Testing calendar functions") test_that("Tests for bizdays()", { expect_error(bizdays(1:20)) b1 <- bizdays(woolyrnq, FinCenter = "New York") b2 <- bizdays(woolyrnq, FinCenter = "London") b3 <- bizdays(woolyrnq, FinCenter = "Zurich") expect_equal(sum(abs(b1 - b2)), 109L) expect_equal(sum(abs(b1 - b3)), 144L) expect_equal(sum(abs(b2 - b3)), 117L) b1 <- bizdays(gas, FinCenter = "NERC") b2 <- bizdays(gas, FinCenter = "Tokyo") expect_equal(sum(abs(b1 - b2)), 244L) }) test_that("Tests for easter()", { expect_true(length(easter(woolyrnq)) == length(woolyrnq)) expect_true(length(easter(wineind)) == length(wineind)) }) } forecast/tests/testthat/test-spline.R0000644000176200001440000000115613553673010017462 0ustar liggesusers# A unit test for spline.R if (require(testthat)) { context("Testing splinef()") test_that("Tests for splinef()", { plot.splineforecast(splinef(airmiles)) fit1 <- splinef(woolyrnq, lambda = 0.2, biasadj = FALSE) fit2 <- splinef(woolyrnq, lambda = 0.2, biasadj = TRUE) expect_false(identical(fit1$mean, fit2$mean)) splinef(woolyrnq, method = "mle") splinef(WWWusage, method = "mle") expect_true(all(as.numeric(splinef(WWWusage, fan = TRUE)$mean) == as.numeric(splinef(WWWusage)$mean))) expect_error(splinef(woolyrnq, level = 110)) expect_error(splinef(woolyrnq, level = -10)) }) } forecast/tests/testthat/test-forecast.R0000644000176200001440000000305713553673010020000 0ustar liggesusers# A unit test for forecast.R if (require(testthat)) { context("Test forecast.R") test_that("tests for findfrequency()", { expect_true(frequency(airmiles) == findfrequency(as.numeric(airmiles))) expect_false(frequency(wineind) == findfrequency(as.numeric(wineind))) expect_true(frequency(woolyrnq) == findfrequency(as.numeric(woolyrnq))) expect_true(frequency(gas) == findfrequency(as.numeric(gas))) }) test_that("tests forecast.ts()", { fc1 <- as.numeric(forecast(as.numeric(airmiles), find.frequency = TRUE)$mean) fc2 <- as.numeric(forecast(airmiles)$mean) expect_true(all(fc1 == fc2)) }) test_that("tests summary.forecast() and forecast.forecast()", { WWWusageforecast <- forecast(WWWusage) expect_output(summary(WWWusageforecast), regexp = "Forecast method:") expect_true(all(predict(WWWusageforecast)$mean == forecast(WWWusageforecast)$mean)) }) # test_that("tests plot.forecast()", { # # Fit several types of models for plotting # batsmod <- bats(woolyrnq) # nnetmod <- nnetar(woolyrnq) # tslmmod <- tslm(woolyrnq ~ trend + season) # nnetfc<- forecast(nnetmod) # batsfc <- forecast(batsmod) # tslmfc <- forecast(tslmmod) # skip_on_travis() # # Plot the forecasts # expect_that(plot(nnetfc), not(throws_error())) # expect_that(plot(batsfc), not(throws_error())) # expect_that(plot(batsfc, shaded = FALSE), not(throws_error())) # expect_that(plot(tslmfc, PI = FALSE), not(throws_error())) # expect_that(plot(forecast(tslmmod, h = 0)), not(throws_error())) # }) } forecast/tests/testthat/test-season.R0000644000176200001440000000710713553673010017462 0ustar liggesusers# A unit test for na.interp() and tsclean() if (require(testthat)) { test_that("tests for monthdays", { expect_error(monthdays(rnorm(10))) expect_error(monthdays(rnorm(10))) expect_true(all(monthdays(ts(rep(100, 12), f = 12)) == c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31))) expect_true(all(monthdays(ts(rep(1, 4), f = 4)) == c(90, 91, 92, 92))) # Test leapyears expect_true(monthdays(ts(rep(1, 48), f = 12))[38] == 29) expect_true(monthdays(ts(rep(1, 16), f = 4))[13] == 91) }) test_that("tests for seasonaldummy", { expect_error(seasonaldummy(1)) testseries <- ts(rep(1:7, 5), f = 7) dummymat <- seasonaldummy(testseries) expect_true(length(testseries) == nrow(dummymat)) expect_true(ncol(dummymat) == 6) expect_true(all(seasonaldummy(wineind)[1:11, ] == diag(11))) }) test_that("tests for seasonaldummyf", { expect_error(seasonaldummy(1)) expect_warning(dummymat <- seasonaldummyf(wineind, 4), "deprecated") expect_true(nrow(dummymat) == 4) expect_true(ncol(dummymat) == 11) }) test_that("tests for fourier", { expect_error(fourier(1)) testseries <- ts(rep(1:7, 5), f = 7) fouriermat <- fourier(testseries, 3) expect_true(length(testseries) == nrow(fouriermat)) expect_true(ncol(fouriermat) == 6) expect_true(all(grep("-7", colnames(fouriermat)))) }) test_that("tests for fourierf", { expect_warning(fouriermat <- fourierf(wineind, 4, 10), "deprecated") expect_true(nrow(fouriermat) == 10) expect_true(ncol(fouriermat) == 8) }) test_that("tests for stlm", { expect_warning(stlm(ts(rep(5, 24), f = 4), etsmodel = "ZZZ")) }) test_that("tests for forecast.stlm", { expect_error(forecast.stlm(stlm(wineind), newxreg = matrix(rep(1, 24), ncol = 2))) stlmfit1 <- stlm(woolyrnq, method = "ets") stlmfit2 <- stlm(woolyrnq, method = "arima", approximation = FALSE) fcfit1 <- forecast(stlmfit1) fcfit2 <- forecast(stlmfit1, fan = TRUE) expect_true(all(fcfit2$level == seq(from = 51, to = 99, by = 3))) fcstlmfit3 <- forecast(stlmfit2) expect_true(all(round(forecast(stlm(ts(rep(100, 120), f = 12)))$mean, 10) == 100)) expect_true(all(round(forecast(stlm(ts(rep(100, 120), f = 12), lambda = 1))$mean, 10) == 100)) }) test_that("tests for stlf", { expect_true(all(forecast.stlm(stlm(wineind))$mean == stlf(wineind)$mean)) expect_true(all(forecast.stlm(stlm(wineind, lambda = .5))$mean == stlf(wineind, lambda = .5)$mean)) fit1 <- stlf(wineind, lambda = .2, biasadj = FALSE) fit2 <- stlf(wineind, lambda = .2, biasadj = TRUE) expect_false(identical(fit1$mean, fit2$mean)) # Constant series should not error series <- ts(rep(950, 20), f = 4) constantForecast <- expect_error(stlf(series), NA) # Small eps expect_true(all(abs(constantForecast$mean - mean(series)) < 10 ^ -8)) y <- ts(rep(1:7, 3), frequency = 7) expect_equal(c(stlf(y)$mean), rep(1:7, 2)) }) test_that("tests for ma", { testseries <- ts(1:20, f = 4) expect_true(frequency(ma(testseries, order = 4)) == frequency(testseries)) maseries <- ma(testseries, order = 3) expect_true(identical(which(is.na(maseries)), c(1L, 20L))) expect_true(all(abs(maseries[2:19] - 2:19) < 1e-14)) maseries <- ma(testseries, order = 2, centre = FALSE) expect_true(identical(which(is.na(maseries)), 20L)) expect_true(all(abs(maseries[1:19] - 1:19 - 0.5) < 1e-14)) maseries <- ma(testseries, order = 2, centre = TRUE) expect_true(identical(which(is.na(maseries)), c(1L, 20L))) expect_true(all(abs(maseries[2:19] - 2:19) < 1e-14)) }) } forecast/tests/testthat/test-ggplot.R0000644000176200001440000000533013553673010017462 0ustar liggesusers# A unit test for ggplot support if (require(testthat)) { context("forecast ggplot tests") test_that("tests for autoplot/gg functions", { library(ggplot2) lungDeaths <- cbind(mdeaths, fdeaths) ggAcf(wineind) autoplot(Acf(wineind)) expect_identical(ggAcf(wineind, plot = FALSE)$acf, acf(wineind, plot = FALSE, lag.max = 24)$acf) ggPacf(wineind) autoplot(Pacf(wineind)) expect_identical(ggPacf(wineind, plot = FALSE)$acf, acf(wineind, plot = FALSE, type = "partial", lag.max = 24)$acf) ggCcf(mdeaths, fdeaths) autoplot(Ccf(mdeaths, fdeaths)) expect_identical(ggCcf(mdeaths, fdeaths, plot = FALSE)$acf, ccf(mdeaths, fdeaths, plot = FALSE, type = "correlation", lag.max = 24)$acf) arimafit <- Arima(USAccDeaths, order = c(1, 1, 1), seasonal = c(1, 1, 1)) autoplot(arimafit) autoplot(arimafit, type = "ma") autoplot(arimafit, type = "ar") arfit <- ar(USAccDeaths) autoplot(arfit) decomposefit <- decompose(USAccDeaths) autoplot(decomposefit) etsfit <- ets(USAccDeaths, model = "ANA") autoplot(etsfit) structfit <- StructTS(USAccDeaths) autoplot(structfit) stlfit <- stl(USAccDeaths, s.window = "periodic") autoplot(stlfit) # seasfit <- seasonal::seas(USAccDeaths) # autoplot(seasfit) etsfcast <- forecast(etsfit) autoplot(etsfcast) autoplot(etsfcast, PI = FALSE) lmfit <- lm(mpg ~ disp, data = mtcars) lmfcast <- forecast(lmfit, newdata = data.frame(disp = 214)) autoplot(lmfcast) mfcast <- forecast(lungDeaths) autoplot(mfcast) ggtsdisplay(USAccDeaths, plot.type = "spectrum") ggtsdisplay(USAccDeaths, plot.type = "partial") ggtsdisplay(USAccDeaths, plot.type = "histogram") ggtsdisplay(USAccDeaths, plot.type = "scatter", theme = ggplot2::theme_bw()) gglagplot(woolyrnq, lags = 2) gglagplot(lungDeaths, lags = 2) gglagplot(WWWusage, do.lines = FALSE, colour = FALSE, labels = TRUE) gglagchull(woolyrnq, lags = 4) ggmonthplot(woolyrnq) ggseasonplot(woolyrnq, year.labels = TRUE, year.labels.left = TRUE) ggseasonplot(USAccDeaths, polar = TRUE, col = 1:5, continuous = TRUE) splinefit <- splinef(airmiles, h = 5) autoplot(splinefit) autoplot(USAccDeaths) autoplot(lungDeaths) autoplot(lungDeaths, facet = TRUE) autoplot(USAccDeaths) + geom_forecast() autoplot(USAccDeaths) + autolayer(etsfcast, series = "ETS") autoplot(lungDeaths) + geom_forecast() autoplot(lungDeaths) + autolayer(mfcast, series = c("mdeaths", "fdeaths")) autoplot(lungDeaths) + autolayer(mfcast) autoplot(lungDeaths) + autolayer(mfcast, series = TRUE) autoplot(lungDeaths, facet = TRUE) + geom_forecast() gghistogram(USAccDeaths, add.kde = TRUE) }) } forecast/tests/testthat/test-refit.R0000644000176200001440000001222113553673010017274 0ustar liggesusers# A unit test for re-fitting models if (require(testthat)) { context("Re-fitting models") test_that("tests for re-fitting models", { # arima fit <- Arima(mdeaths, c(1, 0, 0), c(2, 0, 0), include.mean = FALSE, include.drift = TRUE) refit <- Arima(fdeaths, model = fit) expect_true(identical(fit$coef, refit$coef)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- Arima(mdeaths, model = fit) expect_true(identical(fit$coef, refit_same$coef)) expect_true(identical(fit$x, refit_same$x)) expect_true(all.equal(fit$fitted, refit_same$fitted)) expect_true(all.equal(fit$residuals, refit_same$residuals)) # arfima fit <- arfima(mdeaths) refit <- arfima(fdeaths, model = fit) expect_true(identical(fit$ar, refit$ar)) expect_true(identical(fit$ma, refit$ma)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- arfima(mdeaths, model = fit) expect_true(identical(fit$ar, refit_same$ar)) expect_true(identical(fit$ma, refit_same$ma)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(fit$residuals, refit_same$residuals)) # dshw fit <- dshw(mdeaths, period1 = 4, period2 = 12) refit <- dshw(fdeaths, model = fit) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- dshw(mdeaths, model = fit) expect_true(identical(fit$model, refit_same$model)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(fit$residuals, refit_same$residuals)) # ets fit <- ets(mdeaths) refit <- ets(fdeaths, model = fit, use.initial.values = TRUE) expect_true(identical(fit$fit, refit$fit)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- ets(mdeaths, model = fit, use.initial.values = TRUE) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(residuals(fit), residuals(refit_same))) # stlm fit <- stlm(mdeaths) refit <- stlm(fdeaths, model = fit) expect_true(identical(fit$model$par, refit$model$par)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- stlm(mdeaths, model = fit) expect_true(identical(fit$model$par, refit_same$model$par)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(fit$residuals, refit_same$residuals)) # bats fit <- bats(mdeaths) refit <- bats(fdeaths, model = fit) expect_true(identical(fit$parameters, refit$parameters)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted.values, refit$fitted.values)) expect_false(identical(residuals(fit), residuals(refit))) refit_same <- bats(mdeaths, model = fit) expect_true(identical(fit$model$par, refit_same$model$par)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(residuals(fit), residuals(refit_same))) # tbats fit <- tbats(mdeaths) refit <- tbats(fdeaths, model = fit) expect_true(identical(fit$parameters, refit$parameters)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted.values, refit$fitted.values)) expect_false(identical(residuals(fit), residuals(refit))) refit_same <- tbats(mdeaths, model = fit) expect_true(identical(fit$model$par, refit_same$model$par)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(residuals(fit), residuals(refit_same))) # nnetar fit <- nnetar(mdeaths) refit <- nnetar(fdeaths, model = fit) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- nnetar(mdeaths, model = fit) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(residuals(fit), residuals(refit_same))) # forecast.ts fit <- forecast(mdeaths) refit <- forecast(fdeaths, model = fit, use.initial.values = TRUE) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- forecast(mdeaths, model = fit, use.initial.values = TRUE) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(residuals(fit), residuals(refit_same))) }) } forecast/tests/testthat/test-newarima2.R0000644000176200001440000000377713553673010020070 0ustar liggesusers# A unit test functions in newarima2.R if (require(testthat)) { test_that("test auto.arima() and associated methods", { expect_warning(auto.arima(rep(1, 100), stepwise = TRUE, parallel = TRUE)) set.seed(345) testseries1 <- ts(rnorm(100) + 1:100, f = 0.1) xregmat <- matrix(runif(300), ncol = 3) expect_true(frequency(forecast(auto.arima(testseries1))) == 1) fit1 <- auto.arima(testseries1, xreg = xregmat, allowdrift = FALSE) expect_true(all(xregmat == fit1$xreg)) testseries2 <- ts(rep(100, 120), f = 12) xregmat <- matrix(runif(240), ncol = 2) expect_output(print(auto.arima(testseries2, xreg = xregmat)), regexp = "Series: testseries2") expect_output(summary(auto.arima(testseries2, xreg = xregmat, approximation = TRUE, stepwise = FALSE)), regexp = "Series: testseries2") expect_output(print(auto.arima(ts(testseries2, f = 4), approximation = TRUE, trace = TRUE)), regexp = "ARIMA") fit1 <- auto.arima(testseries1, stepwise = FALSE, lambda = 2, biasadj = FALSE) fit2 <- auto.arima(testseries1, stepwise = FALSE, lambda = 2, biasadj = TRUE) expect_false(identical(fit1$fitted, fit2$fitted)) }) test_that("test parallel = TRUE and stepwise = FALSE for auto.arima()", { skip_on_travis() expect_equal(auto.arima(WWWusage, parallel = TRUE, stepwise = FALSE)$arma, c(3L, 0L, 0L, 0L, 1L, 1L, 0L)) }) test_that("tests for ndiffs()", { expect_true(ndiffs(AirPassengers, test = "kpss") == 1) expect_true(ndiffs(AirPassengers, test = "adf") == 1) expect_true(ndiffs(AirPassengers, test = "pp") == 1) }) test_that("tests for nsdiffs()", { expect_true(nsdiffs(AirPassengers, test = "seas") == 1) expect_true(nsdiffs(AirPassengers, test = "ocsb") == 1) expect_error(nsdiffs(airmiles)) expect_true(nsdiffs(rep(1, 100)) == 0) expect_warning(nsdiffs(ts(rnorm(10), f = 0.1))) skip_if_not_installed("uroot") expect_true(nsdiffs(AirPassengers, test = "hegy") == 1) expect_true(nsdiffs(AirPassengers, test = "ch") == 0) }) } forecast/tests/testthat/test-wrangle.R0000644000176200001440000000233513553673010017627 0ustar liggesusers# A unit test for wrangling functions if (require(testthat)) { context("Tests joining data.frames") mv_y <- ts(cbind(rnorm(120, 0, 3) + 1:120 + 20 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + 1:120 + 16 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) mv_x <- ts(cbind(rnorm(120, 0, 8) + (1:120) / 2 + 42 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + (1:120) * -1 + 20 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) v_y <- ts(rnorm(120, 0, 8) + (1:120) / 2 + 12 * sin(2 * pi * (1:120) / 12), frequency = 12) v_x <- ts(rnorm(120, 0, 1) + (1:120) * (-1) + 28 * sin(2 * pi * (1:120) / 12), frequency = 12) test_that("tests on retaining matrix attributes", { data <- datamat(mv_y, mv_x, v_y, v_x) expect_true(is.ts(data[, 1])) expect_true(identical(tsp(data[, 1]), tsp(data[, 2]))) expect_true(NCOL(data) == 8) expect_true(NCOL(data[, 1]) == 2) expect_true("matrix" %in% class(data[, 1])) expect_true(class(data) == "data.frame") }) test_that("flatten data.frames", { mvdata <- datamat(mv_y, mv_x) vdata <- datamat(v_y, v_x) data <- datamat(mvdata, vdata, flatten = TRUE) expect_true(class(data) == "data.frame") expect_true(!"data.frame" %in% class(data[, 1])) }) } forecast/tests/testthat/test-acf.R0000644000176200001440000000075513553673010016725 0ustar liggesusers# A unit test for Acf() function if (require(testthat)) { test_that("tests for acf", { out <- Acf(wineind, lag.max = 10, type = "partial", plot = FALSE) expect_that(length(out$lag), equals(10)) expect_that(out$acf, equals(Pacf(wineind, lag.max = 10, plot = FALSE)$acf)) expect_equal(dim(Acf(wineind, lag.max = 10, type = "correlation", plot = FALSE)$acf), c(11L, 1L, 1L)) expect_equal(Acf(wineind, lag.max = 10, type = "correlation", plot = TRUE)$acf[1, 1, 1], 1) }) } forecast/tests/testthat/test-modelAR.R0000644000176200001440000002354413553673010017520 0ustar liggesusers# A unit test for modelAR.R if (require(testthat)) { context("Testing modelAR") test_that("Tests for modelAR", { ## Set up functions to match 'nnetar' behavior avnnet2 <- function(x, y, repeats = repeats, linout=TRUE, trace=FALSE, ...){ mods <- list() for (i in 1:repeats) mods[[i]] <- nnet::nnet(x, y, linout = linout, trace = trace, ...) return(structure(mods, class = "nnetarmodels")) } ## predict.avnnet2 <- function(model, newdata=NULL){ if (is.null(newdata)){ if (length(predict(model[[1]])) > 1) { rowMeans(sapply(model, predict)) } else { mean(sapply(model, predict)) } } else { if (NCOL(newdata) >= 2 & NROW(newdata) >= 2){ rowMeans(sapply(model, predict, newdata=newdata)) } else { mean(sapply(model, predict, newdata=newdata)) } } } ## compare residuals to 'nnetar' expect_silent({ set.seed(123) nnetar_model <- nnetar(lynx[1:100], p=2, P=1, size=3, repeats=20) set.seed(123) modelAR_model <- modelAR(lynx[1:100], FUN = avnnet2, predict.FUN = predict.avnnet2, p=2, P=1, scale.inputs = TRUE, size=3, repeats=20) res1 <- residuals(nnetar_model) res2 <- residuals(modelAR_model) }) expect_true(identical(res1, res2)) ## check re-fitting old model and compare to 'nnetar' expect_silent({ nnetar_model2 <- nnetar(lynx[101:114], model=nnetar_model) modelAR_model2 <- modelAR(lynx[101:114], FUN = avnnet2, predict.FUN= predict.avnnet2, model=modelAR_model) res1 <- residuals(nnetar_model2) res2 <- residuals(modelAR_model2) }) expect_true(identical(res1, res2)) ## compare forecasts with 'nnetar' expect_silent({ f1 <- forecast(nnetar_model)$mean f2 <- forecast(modelAR_model)$mean }) expect_true(identical(f1, f2)) ## test lambda and compare to 'nnetar' expect_silent({ set.seed(123) oilnnet_nnetar <- nnetar(airmiles, lambda = 0.15, size = 1, repeats=20) set.seed(123) oilnnet_modelAR <- modelAR(airmiles, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, lambda = 0.15, size = 1, repeats=20) }) expect_true(identical(residuals(oilnnet_nnetar, type = "response"), residuals(oilnnet_modelAR, type = "response"))) expect_true(length(forecast(oilnnet_modelAR)$mean) == 10) ## check print input name expect_silent(woolyrnqnnet <- modelAR(woolyrnq, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, p = 1, P = 0, size = 8, repeats = 10)) expect_output(print(woolyrnqnnet), regexp = "Series: woolyrnq") ## check default forecast length expect_true(length(forecast(woolyrnqnnet)$mean) == 2 * frequency(woolyrnq)) # # Test with single-column xreg (which might be a vector) expect_silent({ set.seed(123) woolyrnqnnet <- modelAR(woolyrnq, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = 1:length(woolyrnq), p = 2, P = 2, size = 4, repeats = 10) set.seed(123) woolyrnqnnet2 <- nnetar(woolyrnq, xreg = 1:length(woolyrnq), p = 2, P = 2, size = 4, repeats = 10) }) expect_true(all(dim(woolyrnqnnet$xreg) == c(119, 1))) expect_true(length(forecast(woolyrnqnnet, xreg = 120:130)$mean) == 11) expect_true(identical(forecast(woolyrnqnnet, xreg = 120:130)$mean, forecast(woolyrnqnnet2, xreg = 120:130)$mean)) ## Test with multiple-column xreg expect_silent({ set.seed(123) winennet <- modelAR(wineind, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = cbind(bizdays(wineind), fourier(wineind, 1)), p = 2, P = 1, size = 4, repeats = 10) set.seed(123) winennet2 <- nnetar( wineind, xreg = cbind(bizdays(wineind), fourier(wineind, 1)), p = 2, P=1, size = 4, repeats = 10 ) }) expect_true(length(forecast(winennet, h = 2, xreg = matrix(2, 2, 3))$mean) == 2L) ## Test if h matches xreg expect_true(length(forecast(winennet, h = 5, xreg = matrix(2, 2, 3))$mean) == 2L) expect_warning(forecast(winennet2, xreg = matrix(2, 2, 3))$mean, "different column names") %>% expect_identical(forecast(winennet, xreg = matrix(2, 2, 3))$mean) ## Test that P is ignored if m=1 expect_warning(wwwnnet <- modelAR(WWWusage, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = 1:length(WWWusage), p = 2, P = 4, size = 3, repeats = 10)) ## Test passing arguments to nnet expect_silent({ set.seed(123) wwwnnet <- modelAR(WWWusage, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = 1:length(WWWusage), p = 2, P = 0, size = 3, decay = 0.1, repeats = 10) set.seed(123) wwwnnet2 <- nnetar(WWWusage, size = 3, p = 2, P = 0, xreg = 1:length(WWWusage), decay = 0.1, repeats = 10) }) expect_true(identical(forecast(wwwnnet, h=2, xreg = (length(WWWusage)+1):(length(WWWusage)+5))$mean, forecast(wwwnnet2, h=2, xreg = (length(WWWusage)+1):(length(WWWusage)+5))$mean )) ## Test output format correct when NAs present airna <- airmiles airna[12] <- NA expect_warning(airnnet <- modelAR(airna, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, p = 1, size = 0, skip = TRUE, Wts = c(0, 1), maxit = 0, repeats = 5)) expect_equal(airnnet$fitted[-c(1, 12, 13)], airna[-c(11, 12, length(airna))]) ## Test model argument expect_silent({ set.seed(123) fit1 <- modelAR( WWWusage, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = 1:length(WWWusage), p = 3, size=2, lambda = 2, decay = 0.5, maxit = 25, repeats = 7 ) fit2 <- modelAR(WWWusage, xreg = 1:length(WWWusage), model = fit1) set.seed(123) fit3 <- nnetar(WWWusage, xreg = 1:length(WWWusage), p = 3, size=2, lambda = 2, decay = 0.5, maxit = 25, repeats = 7) }) # Check some model parameters expect_true(identical(fit1$p, fit2$p)) expect_true(identical(fit1$lambda, fit2$lambda)) expect_true(identical(fit1$modelargs, fit2$modelargs)) # Check fitted values are all the same expect_true(identical(fitted(fit1), fitted(fit2))) expect_true(identical(fitted(fit1, h=2), fitted(fit2, h=2))) # Check residuals all the same expect_true(identical(residuals(fit1), residuals(fit2))) # Check number of neural nets expect_true(identical(length(fit1$model), length(fit2$model))) # Check neural network weights all the same expect_true(identical(fit1$model[[1]]$wts, fit2$model[[1]]$wts)) expect_true(identical(fit1$model[[7]]$wts, fit2$model[[7]]$wts)) ## compare results with 'nnetar' expect_true(identical(fitted(fit1), fitted(fit3))) expect_true(identical(fitted(fit1, h=3), fitted(fit3, h=3))) expect_true(identical(residuals(fit1, type="response"), residuals(fit3, type="response"))) ## Check subset argument using indices expect_silent({ set.seed(123) airnnet <- modelAR(airmiles, , FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, subset = 11:20, p=1, size=1, repeats=10) set.seed(123) airnnet2 <- nnetar(airmiles, , subset = 11:20, p=1, size=1, repeats=10) }) expect_true(identical(which(!is.na(fitted(airnnet))), 11:20)) expect_true(identical(fitted(airnnet), fitted(airnnet2))) expect_true(identical(forecast(airnnet, h=5)$mean, forecast(airnnet2, h=5)$mean)) ## Check subset argument using logical vector expect_silent({ set.seed(123) airnnet <- modelAR(airmiles, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, subset = c(rep(F, 10), rep(T, 10), rep(F, length(airmiles) - 20)), p=1, size=1, repeats=10) set.seed(123) airnnet2 <- nnetar(airmiles, , subset = c(rep(F, 10), rep(T, 10), rep(F, length(airmiles) - 20)), p=1, size=1, repeats=10) }) expect_true(identical(which(!is.na(fitted(airnnet))), 11:20)) expect_true(identical(fitted(airnnet), fitted(airnnet2))) expect_true(identical(forecast(airnnet, h=5)$mean, forecast(airnnet2, h=5)$mean)) ## compare prediction intervals with 'nnetar' expect_silent({ set.seed(456) f1 <- forecast(airnnet, h=5, PI=TRUE, npaths=100) set.seed(456) f2 <- forecast(airnnet2, h=5, PI=TRUE, npaths=100) }) expect_true(identical(f1$upper, f2$upper)) expect_true(identical(f1$lower, f2$lower)) ## Check short and constant data expect_warning(nnetfit <- modelAR(rep(1, 10), FUN = avnnet2, predict.FUN = predict.avnnet2, p=2, P=0, size=1, repeats=1, lambda = 0.1), "Constant data") expect_true(nnetfit$p == 1) expect_true(is.null(nnetfit$lambda)) expect_true(is.null(nnetfit$scalex)) expect_error(nnetfit <- modelAR(rnorm(2), FUN = avnnet2, predict.FUN = predict.avnnet2, p=1, P=0, size=1, repeats=1), "Not enough data") expect_silent(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, p=1, P=0, size=1, repeats=1)) expect_true(nnetfit$p == 1) expect_silent(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, p=2, P=0, size=1, repeats=1)) expect_true(nnetfit$p == 2) expect_warning(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, p=3, P=0, size=1, repeats=1), "short series") expect_true(nnetfit$p == 2) expect_warning(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, p=4, P=0, size=1, repeats=1), "short series") expect_true(nnetfit$p == 2) expect_warning(nnetfit <- modelAR(rnorm(10), FUN = avnnet2, predict.FUN = predict.avnnet2, xreg=rep(1, 10), p=2, P=0, size=1, repeats=1, lambda = 0.1), "Constant xreg") expect_true(is.null(nnetfit$scalexreg)) expect_warning(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, xreg=matrix(c(1, 2, 3, 1, 1, 1), ncol=2), p=1, P=0, size=1, repeats=1, lambda = 0.1), "Constant xreg") expect_true(is.null(nnetfit$scalexreg)) }) } forecast/tests/testthat/test-hfitted.R0000644000176200001440000000252713553673010017622 0ustar liggesusers# A unit test for h-step fits if (require(testthat)) { context("Tests for h-step fits with hfitted") test_that("variance test on h-step fits", { mod1 <- ets(WWWusage, model = "AAN", damped = TRUE) h1 <- fitted(mod1, h = 1) h2 <- fitted(mod1, h = 2) j <- !is.na(h1) & !is.na(h2) expect_lt(var(diff(h1[j])), var(diff(h2[j]))) # hfitted automatic function selection h2_1 <- hfitted(mod1, h = 2) expect_true(identical(h2, h2_1)) mod2 <- Arima(WWWusage, order = c(1, 1, 1)) h1 <- fitted(mod2, h = 1) h2 <- fitted(mod2, h = 2) j <- !is.na(h1) & !is.na(h2) expect_lt(var(diff(h1[j])), var(diff(h2[j]))) mod3 <- arfima(WWWusage) h1 <- fitted(mod3, h = 1) h2 <- fitted(mod3, h = 2) j <- !is.na(h1) & !is.na(h2) expect_lt(var(diff(h1[j])), var(diff(h2[j]))) # mod3 <- tbats(WWWusage) # h1 <- fitted(mod3, h=1) # h2 <- fitted(mod3, h=2) # j <- !is.na(h1) & !is.na(h2) # expect_lt(var(diff(h1[j])), var(diff(h2[j]))) # # mod4 <- bats(WWWusage) # h1 <- fitted(mod4, h=1) # h2 <- fitted(mod4, h=2) # j <- !is.na(h1) & !is.na(h2) # expect_lt(var(diff(h1[j])), var(diff(h2[j]))) mod5 <- nnetar(WWWusage) h1 <- fitted(mod5, h = 1) h2 <- fitted(mod5, h = 2) j <- !is.na(h1) & !is.na(h2) expect_lt(var(diff(h1[j])), var(diff(h2[j]))) }) } forecast/tests/testthat/test-msts.R0000644000176200001440000000045013553673010017152 0ustar liggesusers# A unit test for msts.R if (require(testthat)) { context("Test msts.R") test_that("tests for msts() and print.msts()", { x <- msts(taylor, seasonal.periods = c(48, 336), ts.frequency = 48, start = 2000 + 22 / 52) expect_output(print(x), regexp = "Multi-Seasonal Time Series") }) } forecast/tests/testthat/test-accuracy.R0000644000176200001440000000315013553673010017756 0ustar liggesusers# A unit test for accuracy() function if (require(testthat)) { context("Tests on input") test_that("tests for a non-forecast object", { expect_that(accuracy(USAccDeaths), throws_error()) }) context("Tests on output") test_that("tests for dimension", { train <- window(USAccDeaths, start = c(1973, 1), end = c(1976, 12)) test <- window(USAccDeaths, start = c(1977, 1)) fcasts <- forecast(train, h = 6) expect_that(dim(accuracy(fcasts)), equals(c(1, 7))) expect_that(dim(accuracy(fcasts, test)), equals(c(2, 8))) expect_false( all(dim(accuracy(fcasts, test, test = 1:2)) == dim(accuracy(fcasts, test))) ) expect_that(accuracy(fcasts, test = 1:length(train)), equals(accuracy(fcasts))) }) test_that("tests for accuracy", { # Test arima fitarima <- Arima(USAccDeaths, order = c(0, 1, 1), seasonal = c(0, 1, 1)) accuracyarima <- accuracy(fitarima)[1, "RMSE"] accuracyarimasim <- accuracy(Arima(simulate(fitarima, seed = 123), order = c(0, 1, 0), seasonal = c(0, 0, 1)))[1, "RMSE"] expect_lt(accuracyarima, accuracyarimasim) # Test ets fitets <- ets(AirPassengers, model = "MAM", damped = TRUE) accuracyets <- accuracy(fitets)[1, "RMSE"] accuracyetssim <- accuracy(ets(simulate(fitets, seed = 123), model = "MAM", damped = TRUE))[1, "RMSE"] expect_lt(accuracyets, accuracyetssim) # Test lm month <- factor(rep(1:12, 14)) fitlm <- lm(wineind[1:168] ~ month) accuracylm <- accuracy(fitlm)[1, "RMSE"] accuracylmsim <- accuracy(lm(simulate(fitlm, seed = 123)[, 1] ~ month))[1, "RMSE"] expect_gt(accuracylm, accuracylmsim) }) } forecast/tests/testthat/test-arfima.R0000644000176200001440000000210513553673010017422 0ustar liggesusers# A unit test for arfima.R if (require(testthat)) { arfima1 <- arfima(WWWusage, estim = "mle") arfima2 <- arfima(WWWusage, estim = "ls") arfimabc <- arfima(WWWusage, estim = "mle", lambda = 0.75, biasadj = FALSE) arfimabc2 <- arfima(WWWusage, estim = "mle", lambda = 0.75, biasadj = TRUE) test_that("test fitted() and residuals().", { expect_true(all(arimaorder(arfima1) == arimaorder(arfima2))) fitarfima <- fitted(arfima1) residarfima <- residuals(arfima2) expect_true(length(fitarfima) == length(residarfima)) expect_true(all(getResponse(arfima1) == WWWusage)) expect_false(identical(arfimabc$fitted, arfimabc2$fitted)) }) test_that("test forecast.fracdiff()", { expect_true(all(forecast(arfima1, fan = TRUE)$mean == forecast(arfima1, fan = FALSE)$mean)) expect_error(forecast(arfimabc, level = -10)) expect_error(forecast(arfimabc, level = 110)) expect_false(identical(forecast(arfimabc, biasadj = FALSE), forecast(arfimabc, biasadj = TRUE))) expect_output(summary(forecast(arfimabc)), regexp = "Forecast method: ARFIMA") }) } forecast/tests/testthat/test-boxcox.R0000644000176200001440000000417013553673010017471 0ustar liggesusers# A unit test for boxcox transformations if (require(testthat)) { context("Tests for BoxCox") test_that("tests for biasadj automatically set based on model fit", { # lm fit <- tslm(USAccDeaths ~ trend, lambda = 0.5, biasadj = TRUE) expect_true(all.equal(forecast(fit), forecast(fit, biasadj = TRUE))) # HoltWintersZZ fit <- ses(USAccDeaths, initial = "simple", lambda = 0.5, biasadj = TRUE) expect_true(all.equal(forecast(fit), forecast(fit, biasadj = TRUE))) # arfima x <- fracdiff::fracdiff.sim(100, ma = -.4, d = .3)$series fit <- arfima(x) expect_true(all.equal(forecast(fit), forecast(fit, biasadj=TRUE))) #arima fit1 <- Arima(USAccDeaths, order = c(0,1,1), seasonal = c(0,1,1), lambda = 0.5, biasadj = TRUE) fit2 <- auto.arima(USAccDeaths, max.p=0, max.d=1, max.q=1, max.P=0, max.D=1, max.Q=1, lambda = 0.5, biasadj = TRUE) expect_true(all.equal(forecast(fit1), forecast(fit1, biasadj=TRUE))) expect_true(all.equal(forecast(fit2), forecast(fit2, biasadj=TRUE))) expect_true(all.equal(forecast(fit1)$mean, forecast(fit2)$mean)) # ets fit <- ets(USAccDeaths, model = "ANA", lambda = 0.5, biasadj = TRUE) expect_true(all.equal(forecast(fit), forecast(fit, biasadj = TRUE))) # bats # fit <- bats(USAccDeaths, use.box.cox = TRUE, biasadj = TRUE) # expect_true(all.equal(forecast(fit), forecast(fit, biasadj=TRUE))) # tbats # fit <- tbats(USAccDeaths, use.box.cox = TRUE, biasadj = TRUE) # expect_true(all.equal(forecast(fit), forecast(fit, biasadj=TRUE))) }) test_that("tests for automatic lambda selection in BoxCox transformation", { lambda_auto <- BoxCox.lambda(USAccDeaths) # lm fit <- tslm(USAccDeaths ~ trend, lambda = "auto", biasadj = TRUE) expect_equal(as.numeric(fit$lambda), lambda_auto) # ets fit <- ets(USAccDeaths, model = "ANA", lambda = "auto", biasadj = TRUE) expect_equal(as.numeric(fit$lambda), lambda_auto) # arima fit <- Arima(USAccDeaths, order = c(0,1,1), seasonal = c(0,1,1), lambda = "auto", biasadj = TRUE) expect_equal(as.numeric(fit$lambda), lambda_auto) }) } forecast/tests/testthat/test-arima.R0000644000176200001440000001173613553673010017266 0ustar liggesusers# A unit test for Arima() function if (require(testthat)) { context("Tests on input") test_that("tests for a non-ts object", { set.seed(123) abc <- rnorm(50, 5, 1) fit <- Arima(abc, order = c(2, 0, 1)) expect_that(fit$arma, equals(c(2, 1, 0, 0, 1, 0, 0))) }) test_that("tests for a ts with the seasonal component", { fit <- Arima(wineind, order = c(1, 1, 1), seasonal = c(0, 1, 1)) expect_that(fit$arma, equals(c(1, 1, 0, 1, 12, 1, 1))) }) test_that("tests for ARIMA errors", { fit <- Arima(wineind, order = c(1, 1, 1), seasonal = c(0, 1, 1)) expect_that(residuals(fit, type = "regression"), equals(wineind)) }) test_that("tests for arimaorder", { for (ar in 1:5) { for (i in 0:1) { for (ma in 1:5) { fitarima <- Arima(lynx, order = c(ar, i, ma), method = "ML", include.constant = TRUE, lambda = 0.5) arextracted <- fitarima$arma[1] iextracted <- fitarima$arma[6] maextracted <- fitarima$arma[2] expect_true(all(arimaorder(fitarima) == c(arextracted, iextracted, maextracted))) expect_true(all(names(arimaorder(fitarima)) == c("p", "d", "q"))) expect_true(arimaorder(fitarima)["p"] == ar) expect_true(arimaorder(fitarima)["d"] == i) expect_true(arimaorder(fitarima)["q"] == ma) } } } # Test ar arMod <- ar(lynx, order.max = 2) expect_true(arimaorder(arMod)["p"] == 2) expect_true(arimaorder(arMod)["d"] == 0) expect_true(arimaorder(arMod)["q"] == 0) expect_true(all(names(arimaorder(arMod)) == c("p", "d", "q"))) # Test SARIMA sarimaMod <- Arima(wineind, order = c(1, 1, 2), seasonal=c(0, 1,1)) expect_true(all(names(arimaorder(sarimaMod)) == c("p", "d", "q", "P", "D", "Q", "Frequency"))) expect_true(arimaorder(sarimaMod)["p"] == 1) expect_true(arimaorder(sarimaMod)["d"] == 1) expect_true(arimaorder(sarimaMod)["q"] == 2) expect_true(arimaorder(sarimaMod)["P"] == 0) expect_true(arimaorder(sarimaMod)["D"] == 1) expect_true(arimaorder(sarimaMod)["Q"] == 1) expect_true(arimaorder(sarimaMod)["Frequency"] == frequency(wineind)) # Test fracdiff set.seed(4) fracdiffMod <- fracdiff::fracdiff(lynx, nar = 2, nma = 2) expect_true(all(names(arimaorder(fracdiffMod)) == c("p", "d", "q"))) expect_true(arimaorder(fracdiffMod)["p"] == 2) expect_true(arimaorder(fracdiffMod)["d"] >= 0) expect_true(arimaorder(fracdiffMod)["d"] <= 1) expect_true(arimaorder(fracdiffMod)["p"] == 2) }) test_that("tests for forecast.Arima", { fit1 <- Arima(wineind, order = c(1, 1, 2), seasonal = c(0, 1, 1), method = "CSS") expect_warning(forecast.Arima(fit1, xreg = 1:10), "xreg not required") expect_warning(forecast.Arima(fit1, include.drift = TRUE)) expect_true(all.equal(forecast.Arima(fit1, bootstrap = TRUE, npaths = 100)$ mean, forecast.Arima(fit1)$mean)) fit2 <- Arima(wineind, order = c(1, 0, 1), seasonal = c(0, 0, 0), include.drift = TRUE) expect_warning(Arima(wineind, order = c(1, 2, 1), include.drift = TRUE)) expect_true("drift" %in% names(coef(fit2))) expect_true(length(forecast.Arima(fit2)$mean) == 2 * frequency(wineind)) fit3 <- Arima(wineind, order = c(1, 1, 2), seasonal = c(0, 1, 1), include.mean = FALSE) expect_false("intercept" %in% names(coef(fit3))) expect_true(frequency(forecast.Arima(fit3)$mean) == frequency(wineind)) fit4 <- Arima(wineind, order = c(1, 1, 2), seasonal = c(0, 1, 1), xreg = rnorm(length(wineind))) expect_error(forecast.Arima(fit4)) expect_error(forecast.Arima(fit4, xreg = matrix(rnorm(40), ncol = 2))) forecast.Arima(fit4, xreg = rnorm(20))$mean %>% length %>% expect_equal(20) fit5 <- Arima(wineind[1:150], order = c(1, 1, 2), seasonal = c(0, 1, 1), method = "ML") expect_true(accuracy(fit5)[1, "MAPE"] < accuracy(Arima(wineind, model = fit5))[1, "MAPE"]) fit6 <- Arima(wineind, order = c(1, 1, 2), seasonal = c(0, 1, 1), method = "CSS", lambda = 5) expect_false(identical(fit1$coef, fit6$coef)) }) test_that("tests for search.arima", { set.seed(444) arimasim <- arima.sim(n = 300, model = list(ar = runif(8, -.1, 0.1), ma = runif(8, -0.1, 0.1), sd = 0.1)) expect_true(AIC(auto.arima(arimasim)) >= AIC(auto.arima(arimasim, stepwise = FALSE))) }) test_that("tests for forecast.ar()", { fitar <- ar(taylor) arfc <- forecast.ar(fitar)$mean expect_true(all(arfc == forecast.ar(fitar, bootstrap = TRUE, npaths = 100)$mean)) expect_true(all(arfc == forecast.ar(fitar, fan = TRUE)$mean)) expect_error(forecast.ar(fitar, level = -10)) expect_error(forecast.ar(fitar, level = 110)) expect_true(all(arfc + 1 == forecast.ar(fitar, lambda = 1)$mean)) arfcbc <- forecast.ar(fitar, lambda = 2) arfcabc <- forecast.ar(fitar, lambda = 2, biasadj = TRUE) expect_false(identical(arfcbc$mean, arfcabc$mean)) }) test_that("tests for as.character.Arima()", { expect_match(as.character(auto.arima(woolyrnq)), regexp = "ARIMA") }) } forecast/tests/testthat/test-thetaf.R0000644000176200001440000000074613553673010017447 0ustar liggesusers# A unit test for thetaf.R if (require(testthat)) { test_that("test thetaf()", { thetafc <- thetaf(WWWusage)$mean expect_true(all(thetafc == thetaf(WWWusage, fan = TRUE)$mean)) expect_error(thetaf(WWWusage, level = -10)) expect_error(thetaf(WWWusage, level = 110)) # Constant series should not error series <- ts(rep(950, 20), f = 4) constantForecast <- expect_error(thetaf(series), NA) expect_true(is.constant(round(constantForecast$mean,12))) }) } forecast/tests/testthat/test-clean.R0000644000176200001440000000253513553673010017254 0ustar liggesusers# A unit test for na.interp() and tsclean() if (require(testthat)) { test_that("tests for na.interp", { # Test nonseasonal interpolation expect_true(all(na.interp(c(1, 2, 3, NA, 5, 6, 7)) == 1:7)) # Test for identical on series without NAs expect_true(all(na.interp(wineind) == wineind)) # Test seasonal interpolation testseries <- ts(rep(1:7, 5), f = 7) testseries[c(1, 3, 11, 17)] <- NA expect_true(sum(abs(na.interp(testseries) - rep(1:7, 5))) < 1e-14) # Test length of output expect_true(length(testseries) == length(na.interp(testseries))) }) test_that("tests for tsclean", { # Test for no NAs expect_false(any(is.na(tsclean(gold)))) # Test for removing outliers in seasonal series testseries <- ts(rep(1:7, 5), f = 7) testseries[c(2, 4, 14)] <- 0 expect_true(sum(abs(tsclean(testseries) - rep(1:7, 5))) < 1e-14) # Test for NAs left with replace.missing = FALSE argument testseries[c(2, 4, 14)] <- NA expect_true(any(is.na(tsclean(testseries, replace.missing = FALSE)))) # Test for outliers in a series expect_true(sum(wineind != tsclean(wineind)) == 3L) # Test for identical on series without NAs or outliers expect_true(identical(USAccDeaths, tsclean(USAccDeaths))) # Test length of output expect_true(length(tsclean(testseries)) == length(testseries)) }) } forecast/tests/testthat/test-bats.R0000644000176200001440000000134613553673010017122 0ustar liggesusers# A unit test for bats function if (require(testthat)) { context("Tests on input and output") test_that("tests for a non-ts object", { set.seed(123) abc <- rnorm(50, 5, 1) fit <- bats(abc, use.box.cox = TRUE, use.parallel = FALSE) expect_false(fit$lambda == 0) expect_output(print(fit), "Seed States") expect_equal(length(residuals(fit)), 50L) plot(fit) expect_equal(bats(1, use.box.cox = TRUE, use.parallel = FALSE)$AIC, -Inf) expect_equal(bats(-1, use.box.cox = TRUE, use.parallel = FALSE)$AIC, -Inf) }) test_that("Test parallel of bats", { abc <- rnorm(50, 5, 1) skip_on_cran() skip_on_travis() expect_gt(bats(abc, use.box.cox = TRUE, use.parallel = TRUE)$lambda, 0.999) }) } forecast/tests/testthat/test-graph.R0000644000176200001440000000106313553673010017266 0ustar liggesusers# A unit test for graph.R if (require(testthat)) { context("Testing graph") test_that("Tests for seasonplot()", { expect_error(seasonplot(airmiles)) seasonplot(ts(gold, f = 7)) seasonplot(woolyrnq) seasonplot(wineind) seasonplot(wineind, year.labels = TRUE) seasonplot(wineind, year.labels.left = TRUE) # seasonplot(taylor) }) test_that("Tests for tsdisplay()", { tsdisplay(airmiles, ci.type = "ma") tsdisplay(1:20) tsdisplay(airmiles, plot.type = "scatter") tsdisplay(airmiles, plot.type = "spectrum") }) } forecast/tests/testthat/test-dshw.R0000644000176200001440000000207513553673010017136 0ustar liggesusers# A unit test for dshw function if (require(testthat)) { context("Tests on dshw()") test_that("Test dshw()", { # Test negative values and period1 and period2 not specified set.seed(345) expect_error(dshw(-10:10)) expect_error(dshw(abs(rnorm(100)))) # Test fits with period1 and period2 swapped set.seed(5555) t <- seq(0, 1, by = 0.1) x <- exp(sin(2 * pi * t) + cos(2 * pi * t * 4) + rnorm(length(t), 0, 0.1)) fit1 <- dshw(x, period1 = 4, period2 = 2)$mean fit2 <- dshw(x, period1 = 2, period2 = 4)$mean expect_true(all(fit1 == fit2)) # Test fits with lambda specified and armethod = FALSE y <- x + 1 fit3 <- dshw(y, period1 = 2, period2 = 4, lambda = 2, biasadj = FALSE) fit4 <- dshw(y, period1 = 2, period2 = 4, lambda = 2, biasadj = TRUE) expect_false(identical(fit3$mean, fit4$mean)) fit5 <- dshw(x, period1 = 2, period2 = 4, armethod = FALSE) # Test fits with inappropriate periods specified expect_error(dshw(x, period1 = 2, period2 = 2)) expect_error(dshw(x, period1 = 2, period2 = 4.1)) }) } forecast/tests/testthat/test-tbats.R0000644000176200001440000000500313553673010017300 0ustar liggesusers# A unit test for tbats function if (require(testthat)) { context("Tests on tbats() functions") test_that("Test simple cases for tbats", { expect_error(tbats(data.frame(x1 = 1, x2 = 2), use.parallel = FALSE)) expect_warning(tbats(c(1:5, NA, 7:9), use.parallel = FALSE)) expect_true(all(forecast(tbats(rep(1, 100), use.parallel = FALSE))$mean == 1)) }) test_that("Test tbats() and forecasts", { # Fit tbats models tbatsfit1 <- tbats(subset(wineind, end = 50), use.parallel = FALSE) tbatsfit2 <- tbats(WWWusage, use.parallel = FALSE) tbatsfit3 <- tbats(as.numeric(woolyrnq), seasonal.periods = frequency(woolyrnq), use.parallel = FALSE) tbatsfit4 <- tbats(airmiles, use.box.cox = FALSE, use.parallel = FALSE) # Test tbats.components tbats.components(tbatsfit1) tbats.components(tbatsfit2) tbats.components(tbatsfit3) tbats.components(tbatsfit4) # Test accuracy.tbats() function expect_output(print(accuracy(tbatsfit1)), regexp = "ME") expect_output(print(accuracy(tbatsfit2)), regexp = "ME") expect_output(print(accuracy(tbatsfit3)), regexp = "ME") expect_output(print(accuracy(tbatsfit4)), regexp = "ME") # Test summary.tbats() expect_output(print(summary(tbatsfit1)), regexp = "Length") expect_output(print(summary(tbatsfit2)), regexp = "Length") expect_output(print(summary(tbatsfit3)), regexp = "Length") expect_output(print(summary(tbatsfit4)), regexp = "Length") # Test fitted length expect_true(length(fitted(tbatsfit1)) == 50) expect_true(length(fitted(tbatsfit2)) == length(WWWusage)) expect_true(length(fitted(tbatsfit3)) == length(woolyrnq)) expect_true(length(fitted(tbatsfit4)) == length(airmiles)) # Test length of forecast expect_true(length(forecast(tbatsfit1)$mean) == 2 * frequency(wineind)) expect_true(length(forecast(tbatsfit2)$mean) == 10) # expect_true(length(forecast(tbatsfit3)$mean) == 2 * frequency(woolyrnq)) expect_true(length(forecast(tbatsfit4)$mean) == 10) # Test inappropriate levels expect_error(forecast(tbatsfit1, level = -10)) expect_error(forecast(tbatsfit1, level = 110)) # Test forecasts with fan = TRUE expect_true(all(forecast(tbatsfit1, fan = TRUE)$mean == forecast(tbatsfit1)$mean)) }) test_that("Test tbats() with parallel", { # Tests will not run on Travis in parallel # expect_output(print(tbats(woolyrnq, num.cores = 1)), regexp = "TBATS") # expect_output(print(tbats(elecsales, num.cores = 1, use.trend = FALSE)), regexp = "BATS") }) } forecast/tests/testthat/test-forecast2.R0000644000176200001440000000734313553673010020064 0ustar liggesusers# A unit test for forecast2.R if (require(testthat)) { context("Test forecast2.R") test_that("test meanf()", { meanfc <- mean(wineind) expect_true(all(meanf(wineind)$mean == meanfc)) bcforecast <- meanf(wineind, lambda = -0.5)$mean expect_true(max(bcforecast) == min(bcforecast)) expect_true(all(meanf(wineind, fan = TRUE)$mean == meanfc)) expect_error(meanf(wineind, level = -10)) expect_error(meanf(wineind, level = 110)) # Constant series should not error series <- ts(rep(950, 20), f = 4) constantForecast <- expect_error(rwf(series), NA) expect_true(is.constant(constantForecast$mean)) }) test_that("test rwf()", { rwfc <- rwf(airmiles)$mean expect_true(all(rwfc == naive(airmiles)$mean)) expect_true(all(rwfc < rwf(airmiles, drift = TRUE)$mean)) expect_true(all(rwf(airmiles, fan = TRUE)$mean == rwfc)) expect_true(length(rwf(airmiles, lambda = 0.15)$mean) == 10) expect_false(identical(rwf(airmiles, lambda = 0.15, biasadj = FALSE)$mean, rwf(airmiles, lambda = 0.15, biasadj = TRUE)$mean)) # Constant series should not error series <- ts(rep(950, 20), f = 4) constantForecast <- expect_error(rwf(series), NA) expect_true(is.constant(constantForecast$mean)) }) test_that("test forecast.HoltWinters()", { hwmod <- stats::HoltWinters(UKgas) expect_true(all(forecast(hwmod, fan = TRUE)$mean == forecast(hwmod)$mean)) expect_error(forecast(hwmod, level = -10)) expect_error(forecast(hwmod, level = 110)) # Forecasts transformed manually with Box-Cox should match # forecasts when lambda is passed as an argument hwmodbc <- stats::HoltWinters(BoxCox(UKgas, lambda = 0.25)) hwfc <- forecast(hwmodbc, lambda = 0.25, biasadj = FALSE)$mean hwfc2 <- forecast(hwmodbc, lambda = 0.25, biasadj = TRUE)$mean hwbcfc <- InvBoxCox(forecast(hwmodbc)$mean, lambda = 0.25) expect_true(all(hwfc == hwbcfc)) expect_false(identical(hwfc, hwfc2)) }) test_that("test for forecast.StructTS()", { structtsmod <- stats::StructTS(wineind) fc1 <- forecast(structtsmod)$mean expect_true(all(fc1 == forecast(structtsmod, fan = TRUE)$mean)) expect_error(forecast(structtsmod, level = -10)) expect_error(forecast(structtsmod, level = 110)) # Forecasts transformed manually with Box-Cox should match # forecasts when lambda is passed as an argument bcseries <- BoxCox(woolyrnq, lambda = 0.19) fc2 <- InvBoxCox(forecast(stats::StructTS(bcseries))$mean, lambda = 0.19) fc3 <- forecast(stats::StructTS(bcseries), lambda = 0.19, biasadj = FALSE)$mean fc4 <- forecast(stats::StructTS(bcseries), lambda = 0.19, biasadj = TRUE)$mean expect_true(all(fc2 == fc3)) expect_false(identical(fc3, fc4)) }) test_that("test croston()", { set.seed(1234) expect_error(croston(rnorm(100))) expect_true(all(croston(rep(0, 100))$mean == 0)) }) test_that("test hw()", { expect_output(summary(holt(wineind)), regexp = "Forecast method: Holt's method") expect_output(summary(holt(wineind, damped = TRUE)), regexp = "Forecast method: Damped Holt's method") }) test_that("test holt()", { expect_output(summary(hw(wineind)), regexp = "Forecast method: Holt-Winters' additive method") }) test_that("test naive() and snaive()", { # WWWusage has frequency = 1, so naive and snaive should match expect_true(all(snaive(WWWusage, h = 10)$mean == naive(WWWusage)$mean)) expect_true(all(snaive(WWWusage, h = 10)$upper == naive(WWWusage)$upper)) expect_true(all(snaive(WWWusage, h = 10)$lower == naive(WWWusage)$lower)) # Constant series should not error series <- ts(rep(950, 20), f = 4) constantForecast <- expect_error(snaive(series), NA) expect_true(is.constant(constantForecast$mean)) }) } forecast/tests/testthat/test-armaroots.R0000644000176200001440000000051013553673010020170 0ustar liggesusers# A unit test for armaroots.R if (require(testthat)) { context("Testing armaroots") test_that("Tests for plot.Arima()", { arimafit <- Arima(lynx, c(2, 0, 2), include.mean = FALSE) plot(arimafit) plot(arimafit, type = "ma") plot(arimafit, type = "ar") expect_warning(plot(Arima(lynx, c(0, 1, 0)))) }) } forecast/tests/testthat/test-subset.R0000644000176200001440000000407313553673010017476 0ustar liggesusers# A unit test for subset function if (require(testthat)) { context("Tests on input") mtsobj <- ts(matrix(rnorm(200), ncol = 2), freq = 4) test_that("tests specifying correct argument", { sub <- subset(wineind, month = "September") expect_that(length(sub), equals(tsp(sub)[2] - tsp(sub)[1] + 1)) expect_that(round(sum(sub)), equals(338985)) sub2 <- subset(wineind, month = "SEPT") expect_identical(sub, sub2) sub2 <- subset(wineind, month = 9) expect_identical(sub, sub2) sub2 <- subset(wineind, season = 9) expect_identical(sub, sub2) sub <- subset(woolyrnq, quarter = 1) expect_that(length(sub), equals(tsp(sub)[2] - tsp(sub)[1] + 1)) expect_that(sum(sub), equals(153142)) sub2 <- subset(woolyrnq, season = 1) expect_identical(sub, sub2) sub <- subset(wineind, subset = wineind < 25000) expect_that(round(sum(sub)), equals(1948985)) expect_that(length(sub), equals(91)) sub <- subset(mtsobj, c(1, 1, rep(0, 98)) == 1) expect_that(ncol(sub), equals(2)) expect_that(nrow(sub), equals(2)) sub <- subset(mtsobj, quarter = 1) expect_that(ncol(sub), equals(2)) expect_that(nrow(sub), equals(25)) }) test_that("tests specifying wrong argument", { expect_error(subset(wineind, quarter = 1), "Data is not quarterly") expect_error(subset(woolyrnq, month = "January"), "Data is not monthly") }) test_that("test for bad input", { expect_error(subset.ts(mtcars, quarter = 1), "Data must be seasonal") expect_error(subset(wineind, subset = c(1, 2)), "subset must be the same length as x") expect_error(subset(mtsobj, mtsobj < .5), "subset must be a vector of rows to keep") expect_error(subset(wineind, month = "Jaan"), "No recognizable months") expect_error(subset(wineind, season = 1:14), "Seasons must be between 1 and 12") expect_error(subset(wineind, month = 1:14), "Months must be between 1 and 12") expect_error(subset(woolyrnq, quarter = "qq1"), "No recognizable quarters") expect_error(subset(woolyrnq, quarter = 1:6), "Quarters must be between 1 and 4") }) } forecast/tests/testthat/test-mforecast.R0000644000176200001440000000465413553673010020161 0ustar liggesusers# A unit test for forecast.R if (require(testthat)) { context("Test mforecast.R") mv_y <- ts(cbind(rnorm(120, 0, 3) + 1:120 + 20 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + 1:120 + 16 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) mv_x <- ts(cbind(rnorm(120, 0, 8) + (1:120) / 2 + 42 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + (1:120) * -1 + 20 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) v_y <- ts(rnorm(120, 0, 8) + (1:120) / 2 + 12 * sin(2 * pi * (1:120) / 12), frequency = 12) v_x <- ts(rnorm(120, 0, 1) + (1:120) * (-1) + 28 * sin(2 * pi * (1:120) / 12), frequency = 12) test_that("tests for is.mforecast()", { fit <- lm(mv_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) expect_true(is.mforecast(fcast)) fit <- lm(v_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) expect_false(is.mforecast(fcast)) }) test_that("tests for mlmsplit()", { fit <- lm(mv_y ~ v_x) fit1 <- mlmsplit(fit, index = 1) fit2 <- mlmsplit(fit, index = 2) fit3 <- lm(mv_y[, 1] ~ v_x) fit4 <- lm(mv_y[, 2] ~ v_x) expect_identical(fit1$coefficients, fit3$coefficients) expect_identical(fit2$coefficients, fit4$coefficients) expect_identical(fit1$rank, fit3$rank) expect_identical(fit2$rank, fit4$rank) expect_equal(fit1$fitted.values, fit3$fitted.values) expect_equal(fit2$fitted.values, fit4$fitted.values) expect_error(mlmsplit(fit), "Must select lm") }) test_that("tests for forecast.mlm()", { fit <- lm(mv_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) fit2 <- lm(mv_y[, 1] ~ v_x) fcast2 <- forecast(fit2, newdata = data.frame(v_x = 30)) expect_equal(fcast$forecast[[1]]$residuals, fcast2$residuals) }) test_that("tests for forecast.mts()", { lungDeaths <- cbind(mdeaths, fdeaths) fcast_b <- forecast(lungDeaths) fcast_m <- forecast(mdeaths) fcast_f <- forecast(fdeaths) expect_true(all.equal(fcast_b$forecast[[1]]$mean, fcast_m$mean)) expect_true(all.equal(fcast_b$forecast[[2]]$mean, fcast_f$mean)) }) test_that("tests for print.mforecast()", { fit <- lm(mv_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) expect_output(print(fcast), "Series 1") expect_output(print(fcast), "Series 2") }) test_that("tests for plot.mforecast()", { fit <- lm(mv_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) plot(fcast) }) } forecast/tests/testthat.R0000644000176200001440000000011713553673010015207 0ustar liggesusersSys.setenv("R_TESTS" = "") if (require(testthat)) { test_check("forecast") } forecast/src/0000755000176200001440000000000013617722446012663 5ustar liggesusersforecast/src/etscalc.c0000644000176200001440000001710313553673010014436 0ustar liggesusers#include #define NONE 0 #define ADD 1 #define MULT 2 #define DAMPED 1 #define TOL 1.0e-10 #define HUGEN 1.0e10 #define NA -99999.0 // Functions called by R void etscalc(double *, int *, double *, int *, int *, int *, int *, double *, double *, double *, double *, double *, double *, double *, int*); void etssimulate(double *, int *, int *, int *, int *, double *, double *, double *, double *, int *, double *, double *); void etsforecast(double *, int *, int *, int *, double *, int *, double *); // Internal functions void forecast(double, double, double *, int, int, int, double, double *, int); void update(double *, double *, double *, double *, double *, double *, int, int, int, double, double, double, double, double); // ****************************************************************** void etscalc(double *y, int *n, double *x, int *m, int *error, int *trend, int *season, double *alpha, double *beta, double *gamma, double *phi, double *e, double *lik, double *amse, int *nmse) { int i, j, nstates; double oldl, l, oldb, b, olds[24], s[24], f[30], lik2, tmp, denom[30]; if((*m > 24) & (*season > NONE)) return; else if(*m < 1) *m = 1; if(*nmse > 30) *nmse = 30; nstates = (*m)*(*season>NONE) + 1 + (*trend>NONE); // Copy initial state components l = x[0]; if(*trend > NONE) b = x[1]; if(*season > NONE) { for(j=0; j<(*m); j++) s[j] = x[(*trend>NONE)+j+1]; } *lik = 0.0; lik2 = 0.0; for(j=0; j<(*nmse); j++) { amse[j] = 0.0; denom[j] = 0.0; } for (i=0; i<(*n); i++) { // COPY PREVIOUS STATE oldl = l; if(*trend > NONE) oldb = b; if(*season > NONE) { for(j=0; j<(*m); j++) olds[j] = s[j]; } // ONE STEP FORECAST forecast(oldl, oldb, olds, *m, *trend, *season, *phi, f, *nmse); if(fabs(f[0]-NA) < TOL) { *lik = NA; return; } if(*error == ADD) e[i] = y[i] - f[0]; else e[i] = (y[i] - f[0])/f[0]; for(j=0; j<(*nmse); j++) { if(i+j<(*n)) { denom[j] += 1.0; tmp = y[i+j]-f[j]; amse[j] = (amse[j] * (denom[j]-1.0) + (tmp*tmp)) / denom[j]; } } // UPDATE STATE update(&oldl, &l, &oldb, &b, olds, s, *m, *trend, *season, *alpha, *beta, *gamma, *phi, y[i]); // STORE NEW STATE x[nstates*(i+1)] = l; if(*trend > NONE) x[nstates*(i+1)+1] = b; if(*season > NONE) { for(j=0; j<(*m); j++) x[(*trend>NONE)+nstates*(i+1)+j+1] = s[j]; } *lik = *lik + e[i]*e[i]; lik2 += log(fabs(f[0])); } *lik = (*n) * log(*lik); if(*error == MULT) *lik += 2*lik2; } // ********************************************************************************* void etssimulate(double *x, int *m, int *error, int *trend, int *season, double *alpha, double *beta, double *gamma, double *phi, int *h, double *y, double *e) { int i, j, nstates; double oldl, l, oldb, b, olds[24], s[24], f[10]; if((*m > 24) & (*season > NONE)) return; else if(*m < 1) *m = 1; nstates = (*m)*(*season>NONE) + 1 + (*trend>NONE); // Copy initial state components l = x[0]; if(*trend > NONE) b = x[1]; if(*season > NONE) { for(j=0; j<(*m); j++) s[j] = x[(*trend>NONE)+j+1]; } for (i=0; i<(*h); i++) { // COPY PREVIOUS STATE oldl = l; if(*trend > NONE) oldb = b; if(*season > NONE) { for(j=0; j<(*m); j++) olds[j] = s[j]; } // ONE STEP FORECAST forecast(oldl, oldb, olds, *m, *trend, *season, *phi, f, 1); if(fabs(f[0]-NA) < TOL) { y[0]=NA; return; } if(*error == ADD) y[i] = f[0] + e[i]; else y[i] = f[0]*(1.0+e[i]); // UPDATE STATE update(&oldl, &l, &oldb, &b, olds, s, *m, *trend, *season, *alpha, *beta, *gamma, *phi, y[i]); } } // ********************************************************************************* void etsforecast(double *x, int *m, int *trend, int *season, double *phi, int *h, double *f) { int j; double l, b, s[24]; if((*m > 24) & (*season > NONE)) return; else if(*m < 1) *m = 1; // Copy initial state components l = x[0]; b = 0.0; if(*trend > NONE) b = x[1]; if(*season > NONE) { for(j=0; j<(*m); j++) s[j] = x[(*trend>NONE)+j+1]; } // Compute forecasts forecast(l, b, s, *m, *trend, *season, *phi, f, *h); } // ***************************************************************** void forecast(double l, double b, double *s, int m, int trend, int season, double phi, double *f, int h) { int i,j; double phistar; phistar = phi; // FORECASTS for(i=0; i NONE) { if(trend==ADD) r = (*l) - (*oldl); // l[t]-l[t-1] else //if(trend==MULT) { if(fabs(*oldl) < TOL) r = HUGEN; else r = (*l)/(*oldl); // l[t]/l[t-1] } *b = phib + (beta/alpha)*(r - phib); // b[t] = phi*b[t-1] + beta*(r - phi*b[t-1]) // b[t] = b[t-1]^phi + beta*(r - b[t-1]^phi) } // NEW SEASON if(season > NONE) { if(season==ADD) t = y - q; else //if(season==MULT) { if(fabs(q) < TOL) t = HUGEN; else t = y / q; } s[0] = olds[m-1] + gamma*(t - olds[m-1]); // s[t] = s[t-m] + gamma*(t - s[t-m]) for(j=1; j #include #include //For R's Nelder-Mead solver #include #include #include "etsTargetFunction.h" // This function initializes all the parameters, constructs an // object of type EtsTargetFunction and adds an external pointer // to this object with name "ets.xptr" // to the environment submitted as p_rho // RcppExport SEXP etsTargetFunctionInit(SEXP p_y, SEXP p_nstate, SEXP p_errortype, SEXP p_trendtype, SEXP p_seasontype, SEXP p_damped, SEXP p_lower, SEXP p_upper, SEXP p_opt_crit, SEXP p_nmse, SEXP p_bounds, SEXP p_m, SEXP p_optAlpha, SEXP p_optBeta, SEXP p_optGamma, SEXP p_optPhi, SEXP p_givenAlpha, SEXP p_givenBeta, SEXP p_givenGamma, SEXP p_givenPhi, SEXP p_alpha, SEXP p_beta, SEXP p_gamma, SEXP p_phi, SEXP p_rho) { BEGIN_RCPP; EtsTargetFunction* sp = new EtsTargetFunction(); std::vector y = Rcpp::as< std::vector >(p_y); int nstate = Rcpp::as(p_nstate); int errortype = Rcpp::as(p_errortype); int trendtype = Rcpp::as(p_trendtype); int seasontype = Rcpp::as(p_seasontype); bool damped = Rcpp::as(p_damped); std::vector lower = Rcpp::as< std::vector >(p_lower); std::vector upper = Rcpp::as< std::vector >(p_upper); std::string opt_crit = Rcpp::as(p_opt_crit); int nmse = Rcpp::as(p_nmse); std::string bounds = Rcpp::as< std::string >(p_bounds); int m = Rcpp::as(p_m); bool optAlpha = Rcpp::as(p_optAlpha); bool optBeta = Rcpp::as(p_optBeta); bool optGamma = Rcpp::as(p_optGamma); bool optPhi = Rcpp::as(p_optPhi); bool givenAlpha = Rcpp::as(p_givenAlpha); bool givenBeta = Rcpp::as(p_givenBeta); bool givenGamma = Rcpp::as(p_givenGamma); bool givenPhi = Rcpp::as(p_givenPhi); double alpha = Rcpp::as(p_alpha); double beta = Rcpp::as(p_beta); double gamma = Rcpp::as(p_gamma); double phi = Rcpp::as(p_phi); sp->init(y, nstate, errortype, trendtype, seasontype, damped, lower, upper, opt_crit, nmse, bounds, m, optAlpha, optBeta, optGamma, optPhi, givenAlpha, givenBeta, givenGamma, givenPhi, alpha, beta, gamma, phi); Rcpp::Environment e(p_rho); e["ets.xptr"] = Rcpp::XPtr( sp, true ); return Rcpp::wrap(e); END_RCPP; } // RcppExport double targetFunctionRmalschains(SEXP p_par, SEXP p_env) // { // Rcpp::NumericVector par(p_par); // Rcpp::Environment e(p_env); // Rcpp::XPtr sp(e.get("ets.xptr")); // sp->eval(par.begin(), par.size()); // //return Rcpp::wrap(sp->getObjVal()); // return sp->getObjVal(); // } // RcppExport SEXP etsGetTargetFunctionRmalschainsPtr() { // typedef double (*funcPtr)(SEXP, SEXP); // return (Rcpp::XPtr(new funcPtr(&targetFunctionRmalschains))); // } /* RcppExport SEXP targetFunctionRdonlp2(SEXP p_var, SEXP p_env) { Rcpp::Environment e(p_env); Rcpp::XPtr sp(e.get("ets.xptr")); Rcpp::NumericVector var(p_var); int mode = var[0]; int fun_id = var[1]; sp->eval(var.begin()+2, var.size()-2); if(mode == 0) { if(fun_id == 0) { return Rcpp::wrap(sp->getObjVal()); } else { return Rcpp::wrap(0); //return Rcpp::wrap(sp->restrictions[fun_id-1]); } } else if(mode==1) { // error("Gradients are not implemented, exiting."); }; return R_NilValue; } RcppExport SEXP etsGetTargetFunctionRdonlp2Ptr() { typedef SEXP (*funcPtr)(SEXP, SEXP); return (Rcpp::XPtr(new funcPtr(&targetFunctionRdonlp2))); } */ double targetFunctionEtsNelderMead(int n, double *par, void *ex) { EtsTargetFunction* sp = (EtsTargetFunction*) ex; sp->eval(par, n); return sp->getObjVal(); } RcppExport SEXP etsNelderMead(SEXP p_var, SEXP p_env, SEXP p_abstol, SEXP p_intol, SEXP p_alpha, SEXP p_beta, SEXP p_gamma, SEXP p_trace, SEXP p_maxit) { double abstol = Rcpp::as(p_abstol); double intol = Rcpp::as(p_intol); double alpha = Rcpp::as(p_alpha); double beta= Rcpp::as(p_beta); double gamma= Rcpp::as(p_gamma); int trace = Rcpp::as(p_trace); int maxit = Rcpp::as(p_maxit); int fncount = 0, fail=0; double Fmin = 0.0; Rcpp::NumericVector dpar(p_var); Rcpp::NumericVector opar(dpar.size()); Rcpp::Environment e(p_env); Rcpp::XPtr sp(e.get("ets.xptr")); double (*funcPtr)(int n, double *par, void *ex) = targetFunctionEtsNelderMead; nmmin(dpar.size(), dpar.begin(), opar.begin(), &Fmin, funcPtr, &fail, abstol, intol, sp, alpha, beta, gamma, trace, &fncount, maxit); return Rcpp::List::create(Rcpp::Named("value") = Fmin, Rcpp::Named("par") = opar, Rcpp::Named("fail") = fail, Rcpp::Named("fncount") = fncount); } forecast/src/makeTBATSMatrices.cpp0000644000176200001440000000563413553673010016571 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP makeTBATSWMatrix(SEXP smallPhi_s, SEXP kVector_s, SEXP arCoefs_s, SEXP maCoefs_s, SEXP tau_s) { BEGIN_RCPP double *smallPhi, *arCoefs, *maCoefs; int *kVector, *tau; int adjustPhi = 0; R_len_t numSeasonal = 0, numCols = 1, p = 0, q = 0; if(!Rf_isNull(smallPhi_s)) { smallPhi = REAL(smallPhi_s); adjustPhi = 1; numCols = numCols + 1; } if(!Rf_isNull(kVector_s)) { tau = &INTEGER(tau_s)[0]; kVector = INTEGER(kVector_s); numSeasonal = LENGTH(kVector_s); numCols = numCols + *tau; } if(!Rf_isNull(arCoefs_s)) { arCoefs = REAL(arCoefs_s); p = LENGTH(arCoefs_s); numCols = numCols + p; } if(!Rf_isNull(maCoefs_s)) { maCoefs = REAL(maCoefs_s); q = LENGTH(maCoefs_s); numCols = numCols + q; } NumericMatrix wTranspose_r(1, numCols); arma::mat wTranspose(wTranspose_r.begin(), wTranspose_r.nrow(), wTranspose_r.ncol(), false); if(!Rf_isNull(kVector_s)) { wTranspose.zeros(); int position = adjustPhi; for(R_len_t s = 0; s < numSeasonal; s++) { //wTranspose.submat(0,(position+1), 0, (position + kVector[s])) = arma::ones(1, kVector[s]); for(int j = (position+1); j <= (position + kVector[s]); j++) { wTranspose(0,j) = 1; } position = position + (2 * kVector[s]); } } wTranspose(0,0) = 1; if(adjustPhi == 1) { wTranspose(0,1) = *smallPhi; } if(!Rf_isNull(arCoefs_s)) { for(R_len_t i = 1; i <= p; i++) { wTranspose(0,(adjustPhi + *tau +i)) = arCoefs[(i-1)]; } } if(!Rf_isNull(maCoefs_s)) { for(R_len_t i = 1; i <= q; i++) { wTranspose(0,(adjustPhi + *tau + p + i)) = maCoefs[(i-1)]; } } arma::mat w = arma::trans(wTranspose); smallPhi = 0; arCoefs = 0; maCoefs = 0; kVector = 0; return List::create( Named("w") = w, Named("w.transpose") = wTranspose ); END_RCPP } SEXP makeCIMatrix(SEXP k_s, SEXP m_s) { BEGIN_RCPP double pi = arma::datum::pi; double lambda, *m; int *k; k = &INTEGER(k_s)[0]; m = &REAL(m_s)[0]; NumericMatrix C(*k, *k); for(int j = 1; j<=*k; j++) { lambda = (2 * pi * j) / *m; C((j-1),(j-1)) = std::cos(lambda); } return wrap(C); END_RCPP } SEXP makeSIMatrix(SEXP k_s, SEXP m_s) { BEGIN_RCPP double pi = arma::datum::pi; double lambda, *m; int *k; k = &INTEGER(k_s)[0]; m = &REAL(m_s)[0]; NumericMatrix S(*k, *k); for(int j = 1; j<=*k; j++) { lambda = (2 * pi * j) / *m; S((j-1),(j-1)) = std::sin(lambda); } return wrap(S); END_RCPP } SEXP makeAIMatrix(SEXP C_s, SEXP S_s, SEXP k_s) { int *k; k = &INTEGER(k_s)[0]; NumericMatrix C_r(C_s); NumericMatrix S_r(S_s); arma::mat C(C_r.begin(), C_r.nrow(), C_r.ncol(), false); arma::mat S(S_r.begin(), S_r.nrow(), S_r.ncol(), false); arma::mat A((*k * 2), (*k * 2)); A.submat(0,0, (*k -1), (*k -1)) = C; A.submat(0,*k, (*k -1), ((*k *2) -1)) = S; A.submat(*k,0, ((*k *2) -1), (*k -1)) = (-1 * S); A.submat(*k,*k, ((*k *2) -1), ((*k *2) -1)) = C; return wrap(A); } forecast/src/calcBATS.cpp0000644000176200001440000002171413553673010014737 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP calcBATS(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es ){ BEGIN_RCPP NumericMatrix yr(ys); NumericMatrix yHatr(yHats); NumericMatrix wTransposer(wTransposes); NumericMatrix Fr(Fs); NumericMatrix xr(xs); NumericMatrix gr(gs); NumericMatrix er(es); int t; arma::mat y(yr.begin(), yr.nrow(), yr.ncol(), false); arma::mat yHat(yHatr.begin(), yHatr.nrow(), yHatr.ncol(), false); arma::mat wTranspose(wTransposer.begin(), wTransposer.nrow(), wTransposer.ncol(), false); arma::mat F(Fr.begin(), Fr.nrow(), Fr.ncol(), false); arma::mat x(xr.begin(), xr.nrow(), xr.ncol(), false); arma::mat g(gr.begin(), gr.nrow(), gr.ncol(), false); arma::mat e(er.begin(), er.nrow(), er.ncol(), false); for(t = 1; t < yr.ncol(); t++) { yHat.col(t) = wTranspose * x.col((t-1)); e(0,t) = y(0, t) - yHat(0, t); x.col(t) = F * x.col((t-1)) + g * e(0,t); } return List::create( Named("y.hat") = yHat, Named("e") = e, Named("x") = x ); END_RCPP } SEXP calcBATSFaster(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es, SEXP xNought_s, SEXP sPeriods_s, SEXP betaV, SEXP tau_s, SEXP p_s, SEXP q_s ) { BEGIN_RCPP NumericMatrix yr(ys); NumericMatrix yHatr(yHats); NumericMatrix wTransposer(wTransposes); NumericMatrix Fr(Fs); NumericMatrix xr(xs); NumericMatrix gr(gs); NumericMatrix er(es); NumericMatrix xNought_r(xNought_s); //IntegerVector sPeriodsR(sPeriods); int adjBeta, previousS, lengthArma, *tau, *p, *q, *sPeriods; R_len_t lengthSeasonal; tau = &INTEGER(tau_s)[0]; p = &INTEGER(p_s)[0]; q = &INTEGER(q_s)[0]; lengthArma = *p + *q; if(!Rf_isNull(sPeriods_s)) { sPeriods = INTEGER(sPeriods_s); lengthSeasonal = LENGTH(sPeriods_s); } if(!Rf_isNull(betaV)) { adjBeta = 1; } else { adjBeta = 0; } arma::mat y(yr.begin(), yr.nrow(), yr.ncol(), false); arma::mat yHat(yHatr.begin(), yHatr.nrow(), yHatr.ncol(), false); arma::mat wTranspose(wTransposer.begin(), wTransposer.nrow(), wTransposer.ncol(), false); arma::mat F(Fr.begin(), Fr.nrow(), Fr.ncol(), false); arma::mat x(xr.begin(), xr.nrow(), xr.ncol(), false); arma::mat g(gr.begin(), gr.nrow(), gr.ncol(), false); arma::mat e(er.begin(), er.nrow(), er.ncol(), false); arma::mat xNought(xNought_r.begin(), xNought_r.nrow(), xNought_r.ncol(), false); if(!Rf_isNull(sPeriods_s)) { //One //Rprintf("one-1\n"); yHat.col(0) = wTranspose.cols(0, adjBeta) * xNought.rows(0, adjBeta); //Rprintf("one-2\n"); previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { //Rprintf("one-3\n"); yHat(0,0) = yHat(0,0) + xNought( (previousS + sPeriods[i] + adjBeta), 0); previousS += sPeriods[i]; } if(lengthArma > 0) { //Rprintf("bg-1"); yHat.col(0) = yHat(0,0) + wTranspose.cols((*tau + adjBeta + 1), (xNought.n_rows-1)) * xNought.rows((*tau + adjBeta + 1), (xNought.n_rows-1)); } //Two e(0,0) = y(0, 0) - yHat(0, 0); //Three //Rprintf("three-5\n"); x.submat(0, 0, adjBeta, 0) = F.submat(0,0,adjBeta,adjBeta) * xNought.rows(0,adjBeta); if(lengthArma > 0) { //Rprintf("bg-2"); x.submat(0, 0, adjBeta, 0) += F.submat(0,(adjBeta+ *tau + 1),adjBeta,(F.n_cols - 1)) * xNought.rows((adjBeta+ *tau + 1),(F.n_cols - 1)); } previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { //Rprintf("three-7\n"); x((adjBeta+previousS+1),0) = xNought((adjBeta+previousS+sPeriods[i]),0); if(lengthArma > 0) { //Rprintf("bg-3"); x.submat((adjBeta+previousS+1),0, (adjBeta+previousS+1),0) = x.submat((adjBeta+previousS+1),0, (adjBeta+previousS+1),0) + F.submat((adjBeta + previousS + 1), (adjBeta+*tau+1), (adjBeta + previousS + 1), (F.n_cols-1)) * xNought.rows((adjBeta + *tau +1), (F.n_cols-1)); } //Rprintf("three-9\n"); x.submat((adjBeta + previousS + 2), 0, (adjBeta + previousS + sPeriods[i]), 0) = xNought.rows((adjBeta + previousS + 1), (adjBeta + previousS + sPeriods[i] -1)); previousS += sPeriods[i]; } if(*p > 0) { //Rprintf("bg-4"); x.submat((adjBeta+ *tau + 1),0,(adjBeta+ *tau + 1),0) = F.submat((adjBeta + *tau +1), (adjBeta + *tau +1), (adjBeta + *tau + 1), (F.n_cols-1)) * xNought.rows((adjBeta+*tau+1), (F.n_cols-1)); //Rprintf("bg-5"); ////error is HERE!!! if(*p > 1) { x.submat((adjBeta + *tau + 2),0,(adjBeta + *tau + *p),0) = xNought.rows((adjBeta + *tau + 1),(adjBeta + *tau + *p-1)); } } if(*q > 0) { //Rprintf("three-12\n"); x((adjBeta+ *tau + *p + 1),0) = 0; if(*q > 1) { //Rprintf("three-13\n"); x.submat((adjBeta+ *tau + *p + 2), 0, (adjBeta + *tau + *p + *q) , 0) = xNought.rows((adjBeta + *tau + *p + 1),(adjBeta + *tau + *p + *q - 1)); } } ///Temporary fix! //x.col(0) += g * e(0,0); //End /////////// x(0,0) += g(0,0) * e(0,0); if(adjBeta == 1) { x(1,0) += g(1,0) * e(0,0); } previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { x((adjBeta+previousS+1),0) += g((adjBeta+previousS+1),0) * e(0,0); previousS += sPeriods[i]; } if(*p > 0) { x((adjBeta + *tau + 1),0) += e(0,0); if(*q > 0) { x((adjBeta + *tau + *p + 1),0) += e(0,0); } } else if(*q > 0) { x((adjBeta + *tau + 1),0) += e(0,0); } ///////////////////////////////// for(int t = 1; t < yr.ncol(); t++) { //Rprintf("point-x\n"); //One yHat.col(t) = wTranspose.cols(0, adjBeta) * x.submat(0, (t-1), adjBeta, (t-1)); previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { //mod here //Rprintf("point-xx\n"); yHat(0,t) += x((previousS + sPeriods[i] + adjBeta), (t-1)); previousS += sPeriods[i]; } if(lengthArma > 0) { //Rprintf("bg-6"); yHat.col(t) += wTranspose.cols((*tau + adjBeta + 1), (xNought.n_rows-1)) * x.submat((*tau + adjBeta + 1), (t-1), (x.n_rows-1), (t-1)); } //Two //Rprintf("point-x4\n"); e(0,t) = y(0, t) - yHat(0, t); //Three //Rprintf("point-x5\n"); x.submat(0, t, adjBeta, t) = F.submat(0,0,adjBeta,adjBeta) * x.submat(0, (t-1), adjBeta, (t-1)); if(lengthArma > 0) { //Rprintf("bg-7"); x.submat(0, t, adjBeta, t) += F.submat(0,(adjBeta+ *tau + 1),adjBeta,(F.n_cols - 1)) * x.submat((adjBeta+ *tau + 1), (t-1), (F.n_cols - 1), (t-1)); } previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { //Rprintf("point-x7\n"); x((adjBeta+previousS+1),t) = x((adjBeta+previousS+sPeriods[i]),(t-1)); if(lengthArma > 0) { //Rprintf("bg-8"); x.submat((adjBeta+previousS+1),t, (adjBeta+previousS+1),t) += F.submat((adjBeta + previousS + 1), (adjBeta+*tau+1), (adjBeta + previousS + 1), (F.n_cols-1)) * x.submat((adjBeta + *tau +1), (t-1), (F.n_cols-1), (t-1)); } //Rprintf("Three-L-9\n"); x.submat((adjBeta + previousS + 2), t, (adjBeta + previousS + sPeriods[i]), t) = x.submat((adjBeta + previousS + 1), (t-1), (adjBeta + previousS + sPeriods[i] -1), (t-1)); previousS += sPeriods[i]; } /* if(lengthArma > 0) { x.submat((adjBeta+ *tau + 1),t, (x.n_rows-1),t) = F.submat((adjBeta+ *tau + 1), (adjBeta+ *tau + 1), (F.n_rows - 1), (F.n_rows - 1)) * x.submat((adjBeta+ *tau + 1),(t-1), (x.n_rows-1),(t-1)); } */ if(*p > 0) { //Rprintf("bg-9"); x.submat((adjBeta+ *tau + 1),t, (adjBeta+ *tau + 1),t) = F.submat((adjBeta + *tau +1), (adjBeta + *tau +1), (adjBeta + *tau + 1), (F.n_cols-1)) * x.submat((adjBeta+*tau+1), (t-1), (F.n_cols-1), (t-1)); if(*p > 1) { x.submat((adjBeta + *tau + 2),t,(adjBeta + *tau + *p),t) = x.submat((adjBeta + *tau + 1), (t-1), (adjBeta + *tau + *p -1), (t-1)); } } if(*q > 0) { x((adjBeta+ *tau + *p + 1),t) = 0; if(*q > 1) { x.submat((adjBeta+ *tau + *p + 2), t, (adjBeta + *tau + *p + *q) , t) = x.submat((adjBeta + *tau + *p + 1), (t-1), (adjBeta + *tau + *p + *q - 1), (t-1)); } } ///Temporary fix! //x.col(t) += g * e(0,t); //End /////////// x(0,t) += g(0,0) * e(0,t); if(adjBeta == 1) { x(1,t) += g(1,0) * e(0,t); } previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { x((adjBeta+previousS+1),t) += g((adjBeta+previousS+1),0) * e(0,t); previousS += sPeriods[i]; } if(*p > 0) { x((adjBeta + *tau + 1),t) += e(0,t); if(*q > 0) { x((adjBeta + *tau + *p + 1),t) += e(0,t); } } else if(*q > 0) { x((adjBeta + *tau + 1),t) += e(0,t); } ///////////////////////////////// } } else { yHat.col(0) = wTranspose * xNought; e(0,0) = y(0, 0) - yHat(0, 0); x.col(0) = F * xNought + g * e(0,0); for(int t = 1; t < yr.ncol(); t++) { yHat.col(t) = wTranspose * x.col((t-1)); e(0,t) = y(0, t) - yHat(0, t); x.col(t) = F * x.col((t-1)) + g * e(0,t); } } return R_NilValue; END_RCPP } SEXP calcWTilda(SEXP wTildaTransposes, SEXP Ds) { BEGIN_RCPP NumericMatrix wTildaTransposer(wTildaTransposes); NumericMatrix Dr(Ds); int t; arma::mat wTildaTranspose(wTildaTransposer.begin(), wTildaTransposer.nrow(), wTildaTransposer.ncol(), false); arma::mat D(Dr.begin(), Dr.nrow(), Dr.ncol(), false); for(t = 1; t < wTildaTransposer.nrow(); t++) { wTildaTranspose.row(t) = wTildaTranspose.row((t-1)) * D; } return wTildaTransposer; END_RCPP } forecast/src/calcBATS.h0000644000176200001440000000464013553673010014403 0ustar liggesusers#ifndef _forecast_CALCBATS #define _forecast_CALCBATS ///////////////////////////////////// // if unable to compile, please comment these lines // #define __GXX_EXPERIMENTAL_CXX0X__ 1 // #ifndef HAVE_ERRNO_T // typedef int errno_t; // #endif // #if __WORDSIZE == 64 // # ifndef __intptr_t_defined // typedef long int intptr_t; // # define __intptr_t_defined // # endif // typedef unsigned long int uintptr_t; // #else // # ifndef __intptr_t_defined // typedef int intptr_t; // # define __intptr_t_defined // # endif // typedef unsigned int uintptr_t; // #endif // #include // #include // #include // #include // #include // if unable to compile, please comment these lines ///////////////////////////////////// #include #include RcppExport SEXP calcBATS(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es ) ; RcppExport SEXP calcBATSFaster(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es, SEXP xNought_s, SEXP sPeriods_s, SEXP betaV, SEXP tau_s, SEXP p_s, SEXP q_s ) ; RcppExport SEXP calcWTilda(SEXP wTildaTransposes, SEXP Ds) ; RcppExport SEXP makeBATSWMatrix(SEXP smallPhi_s, SEXP sPeriods_s, SEXP arCoefs_s, SEXP maCoefs_s) ; RcppExport SEXP makeBATSGMatrix(SEXP alpha_s, SEXP beta_s, SEXP gammaVector_s, SEXP seasonalPeriods_s, SEXP p_s, SEXP q_s) ; RcppExport SEXP updateFMatrix(SEXP F_s, SEXP smallPhi_s, SEXP alpha_s, SEXP beta_s, SEXP gammaBold_s, SEXP ar_s, SEXP ma_s, SEXP tau_s) ; RcppExport SEXP updateWtransposeMatrix(SEXP wTranspose_s, SEXP smallPhi_s, SEXP tau_s, SEXP arCoefs_s, SEXP maCoefs_s, SEXP p_s, SEXP q_s) ; RcppExport SEXP updateGMatrix(SEXP g_s, SEXP gammaBold_s, SEXP alpha_s, SEXP beta_s, SEXP gammaVector_s, SEXP seasonalPeriods_s) ; //TBATS Functions RcppExport SEXP makeTBATSWMatrix(SEXP smallPhi_s, SEXP kVector_s, SEXP arCoefs_s, SEXP maCoefs_s, SEXP tau_s) ; RcppExport SEXP makeCIMatrix(SEXP k_s, SEXP m_s) ; RcppExport SEXP makeSIMatrix(SEXP k_s, SEXP m_s) ; RcppExport SEXP makeAIMatrix(SEXP C_s, SEXP S_s, SEXP k_s) ; RcppExport SEXP updateTBATSGammaBold(SEXP gammaBold_s, SEXP kVector_s, SEXP gammaOne_s, SEXP gammaTwo_s) ; RcppExport SEXP updateTBATSGMatrix(SEXP g_s, SEXP gammaBold_s, SEXP alpha_s, SEXP beta_s) ; RcppExport SEXP calcTBATSFaster(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es, SEXP xNought_s) ; #endif forecast/src/Makevars0000644000176200001440000000017313553673010014347 0ustar liggesusers## Use the R_HOME indirection to support installations of multiple R version PKG_LIBS=$(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) forecast/src/Makevars.win0000644000176200001440000000035313553673010015143 0ustar liggesusers ## This assume that we can call Rscript to ask Rcpp about its locations ## Use the R_HOME indirection to support installations of multiple R version PKG_LIBS=$(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) ## Use C++11 compiler CXX_STD = CXX11 forecast/src/updateTBATSMatrices.cpp0000644000176200001440000000252613553673010017133 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP updateTBATSGammaBold(SEXP gammaBold_s, SEXP kVector_s, SEXP gammaOne_s, SEXP gammaTwo_s) { BEGIN_RCPP NumericMatrix gammaBold(gammaBold_s); IntegerVector kVector(kVector_s); NumericVector gammaOne(gammaOne_s); NumericVector gammaTwo(gammaTwo_s); int endPos = 0; int numK = kVector.size(); for(int i =0; i < numK; i++) { for(int j = endPos; j < (kVector(i) + endPos); j++) { gammaBold(0,j)=gammaOne(i); } for(int j = (kVector(i) + endPos); j < ((2*kVector(i)) + endPos); j++) { gammaBold(0,j)=gammaTwo(i); } endPos += 2 * kVector(i); } return R_NilValue; END_RCPP } SEXP updateTBATSGMatrix(SEXP g_s, SEXP gammaBold_s, SEXP alpha_s, SEXP beta_s) { BEGIN_RCPP int adjBeta = 0; double *gammaVector; NumericMatrix g_r(g_s); //Rprintf("one\n"); g_r(0,0) = REAL(alpha_s)[0]; //Rprintf("two\n"); if(!Rf_isNull(beta_s)) { //Rprintf("three\n"); g_r(1,0) = REAL(beta_s)[0]; adjBeta = 1; } //Rprintf("four\n"); if(!Rf_isNull(gammaBold_s)) { NumericMatrix gammaBold_r(gammaBold_s); arma::mat gammaBold(gammaBold_r.begin(), gammaBold_r.nrow(), gammaBold_r.ncol(), false); arma::mat g(g_r.begin(), g_r.nrow(), g_r.ncol(), false); g.submat((adjBeta+1), 0,(adjBeta+gammaBold.n_cols), 0) = trans(gammaBold); } //Rprintf("five\n"); return R_NilValue; END_RCPP } forecast/src/registerDynamicSymbol.c0000644000176200001440000000034513553673010017337 0ustar liggesusers// RegisteringDynamic Symbols #include #include #include void R_init_markovchain(DllInfo* info) { R_registerRoutines(info, NULL, NULL, NULL, NULL); R_useDynamicSymbols(info, TRUE); } forecast/src/etspolyroot.c0000644000176200001440000003533313553673010015430 0ustar liggesusers/* Formerly src/appl/cpoly.c: * * Copyright (C) 1997-1998 Ross Ihaka * Copyright (C) 1999-2001 R Core Team * * cpoly finds the zeros of a complex polynomial. * * On Entry * * opr, opi - double precision vectors of real and * imaginary parts of the coefficients in * order of decreasing powers. * * degree - int degree of polynomial. * * * On Return * * zeror, zeroi - output double precision vectors of * real and imaginary parts of the zeros. * * fail - output int parameter, true only if * leading coefficient is zero or if cpoly * has found fewer than degree zeros. * * The program has been written to reduce the chance of overflow * occurring. If it does occur, there is still a possibility that * the zerofinder will work provided the overflowed quantity is * replaced by a large number. * * This is a C translation of the following. * * TOMS Algorithm 419 * Jenkins and Traub. * Comm. ACM 15 (1972) 97-99. * * Ross Ihaka * February 1997 */ #include /* for declaration of hypot */ #include /* for declaration of R_alloc */ #include /* for FLT_RADIX */ #include /* for R_pow_di */ static void calct(Rboolean *); static Rboolean fxshft(int, double *, double *); static Rboolean vrshft(int, double *, double *); static void nexth(Rboolean); static void noshft(int); static void polyev(int, double, double, double *, double *, double *, double *, double *, double *); static double errev(int, double *, double *, double, double, double, double); static double cpoly_cauchy(int, double *, double *); static double cpoly_scale(int, double *, double, double, double, double); static void cdivid(double, double, double, double, double *, double *); /* Global Variables (too many!) */ static int nn; static double *pr, *pi, *hr, *hi, *qpr, *qpi, *qhr, *qhi, *shr, *shi; static double sr, si; static double tr, ti; static double pvr, pvi; static const double eta = DBL_EPSILON; static const double are = /* eta = */DBL_EPSILON; static const double mre = 2. * M_SQRT2 * /* eta, i.e. */DBL_EPSILON; static const double infin = DBL_MAX; void cpolyroot(double *opr, double *opi, int *degree, double *zeror, double *zeroi, Rboolean *fail) { static const double smalno = DBL_MIN; static const double base = (double)FLT_RADIX; static int d_n, i, i1, i2; static double zi, zr, xx, yy; static double bnd, xxx; Rboolean conv; int d1; double *tmp; static const double cosr =/* cos 94 */ -0.06975647374412529990; static const double sinr =/* sin 94 */ 0.99756405025982424767; xx = M_SQRT1_2;/* 1/sqrt(2) = 0.707.... */ yy = -xx; *fail = FALSE; nn = *degree; d1 = nn - 1; /* algorithm fails if the leading coefficient is zero. */ if (opr[0] == 0. && opi[0] == 0.) { *fail = TRUE; return; } /* remove the zeros at the origin if any. */ while (opr[nn] == 0. && opi[nn] == 0.) { d_n = d1-nn+1; zeror[d_n] = 0.; zeroi[d_n] = 0.; nn--; } nn++; /*-- Now, global var. nn := #{coefficients} = (relevant degree)+1 */ if (nn == 1) return; /* Use a single allocation as these as small */ tmp = (double *) R_alloc((size_t) (10*nn), sizeof(double)); pr = tmp; pi = tmp + nn; hr = tmp + 2*nn; hi = tmp + 3*nn; qpr = tmp + 4*nn; qpi = tmp + 5*nn; qhr = tmp + 6*nn; qhi = tmp + 7*nn; shr = tmp + 8*nn; shi = tmp + 9*nn; /* make a copy of the coefficients and shr[] = | p[] | */ for (i = 0; i < nn; i++) { pr[i] = opr[i]; pi[i] = opi[i]; shr[i] = hypot(pr[i], pi[i]); } /* scale the polynomial with factor 'bnd'. */ bnd = cpoly_scale(nn, shr, eta, infin, smalno, base); if (bnd != 1.) { for (i=0; i < nn; i++) { pr[i] *= bnd; pi[i] *= bnd; } } /* start the algorithm for one zero */ while (nn > 2) { /* calculate bnd, a lower bound on the modulus of the zeros. */ for (i=0 ; i < nn ; i++) shr[i] = hypot(pr[i], pi[i]); bnd = cpoly_cauchy(nn, shr, shi); /* outer loop to control 2 major passes */ /* with different sequences of shifts */ for (i1 = 1; i1 <= 2; i1++) { /* first stage calculation, no shift */ noshft(5); /* inner loop to select a shift */ for (i2 = 1; i2 <= 9; i2++) { /* shift is chosen with modulus bnd */ /* and amplitude rotated by 94 degrees */ /* from the previous shift */ xxx= cosr * xx - sinr * yy; yy = sinr * xx + cosr * yy; xx = xxx; sr = bnd * xx; si = bnd * yy; /* second stage calculation, fixed shift */ conv = fxshft(i2 * 10, &zr, &zi); if (conv) goto L10; } } /* the zerofinder has failed on two major passes */ /* return empty handed */ *fail = TRUE; return; /* the second stage jumps directly to the third stage iteration. * if successful, the zero is stored and the polynomial deflated. */ L10: d_n = d1+2 - nn; zeror[d_n] = zr; zeroi[d_n] = zi; --nn; for (i=0; i < nn ; i++) { pr[i] = qpr[i]; pi[i] = qpi[i]; } }/*while*/ /* calculate the final zero and return */ cdivid(-pr[1], -pi[1], pr[0], pi[0], &zeror[d1], &zeroi[d1]); return; } /* Computes the derivative polynomial as the initial * polynomial and computes l1 no-shift h polynomials. */ static void noshft(int l1) { int i, j, jj, n = nn - 1, nm1 = n - 1; double t1, t2, xni; for (i=0; i < n; i++) { xni = (double)(nn - i - 1); hr[i] = xni * pr[i] / n; hi[i] = xni * pi[i] / n; } for (jj = 1; jj <= l1; jj++) { if (hypot(hr[n-1], hi[n-1]) <= eta * 10.0 * hypot(pr[n-1], pi[n-1])) { /* If the constant term is essentially zero, */ /* shift h coefficients. */ for (i = 1; i <= nm1; i++) { j = nn - i; hr[j-1] = hr[j-2]; hi[j-1] = hi[j-2]; } hr[0] = 0.; hi[0] = 0.; } else { cdivid(-pr[nn-1], -pi[nn-1], hr[n-1], hi[n-1], &tr, &ti); for (i = 1; i <= nm1; i++) { j = nn - i; t1 = hr[j-2]; t2 = hi[j-2]; hr[j-1] = tr * t1 - ti * t2 + pr[j-1]; hi[j-1] = tr * t2 + ti * t1 + pi[j-1]; } hr[0] = pr[0]; hi[0] = pi[0]; } } } /* Computes l2 fixed-shift h polynomials and tests for convergence. * initiates a variable-shift iteration and returns with the * approximate zero if successful. */ static Rboolean fxshft(int l2, double *zr, double *zi) { /* l2 - limit of fixed shift steps * zr,zi - approximate zero if convergence (result TRUE) * * Return value indicates convergence of stage 3 iteration * * Uses global (sr,si), nn, pr[], pi[], .. (all args of polyev() !) */ Rboolean pasd, bool, test; static double svsi, svsr; static int i, j, n; static double oti, otr; n = nn - 1; /* evaluate p at s. */ polyev(nn, sr, si, pr, pi, qpr, qpi, &pvr, &pvi); test = TRUE; pasd = FALSE; /* calculate first t = -p(s)/h(s). */ calct(&bool); /* main loop for one second stage step. */ for (j=1; j<=l2; j++) { otr = tr; oti = ti; /* compute next h polynomial and new t. */ nexth(bool); calct(&bool); *zr = sr + tr; *zi = si + ti; /* test for convergence unless stage 3 has */ /* failed once or this is the last h polynomial. */ if (!bool && test && j != l2) { if (hypot(tr - otr, ti - oti) >= hypot(*zr, *zi) * 0.5) { pasd = FALSE; } else if (! pasd) { pasd = TRUE; } else { /* the weak convergence test has been */ /* passed twice, start the third stage */ /* iteration, after saving the current */ /* h polynomial and shift. */ for (i = 0; i < n; i++) { shr[i] = hr[i]; shi[i] = hi[i]; } svsr = sr; svsi = si; if (vrshft(10, zr, zi)) { return TRUE; } /* the iteration failed to converge. */ /* turn off testing and restore */ /* h, s, pv and t. */ test = FALSE; for (i=1 ; i<=n ; i++) { hr[i-1] = shr[i-1]; hi[i-1] = shi[i-1]; } sr = svsr; si = svsi; polyev(nn, sr, si, pr, pi, qpr, qpi, &pvr, &pvi); calct(&bool); } } } /* attempt an iteration with final h polynomial */ /* from second stage. */ return(vrshft(10, zr, zi)); } /* carries out the third stage iteration. */ static Rboolean vrshft(int l3, double *zr, double *zi) { /* l3 - limit of steps in stage 3. * zr,zi - on entry contains the initial iterate; * if the iteration converges it contains * the final iterate on exit. * Returns TRUE if iteration converges * * Assign and uses GLOBAL sr, si */ Rboolean bool, b; static int i, j; static double r1, r2, mp, ms, tp, relstp; static double omp; b = FALSE; sr = *zr; si = *zi; /* main loop for stage three */ for (i = 1; i <= l3; i++) { /* evaluate p at s and test for convergence. */ polyev(nn, sr, si, pr, pi, qpr, qpi, &pvr, &pvi); mp = hypot(pvr, pvi); ms = hypot(sr, si); if (mp <= 20. * errev(nn, qpr, qpi, ms, mp, /*are=*/eta, mre)) { goto L_conv; } /* polynomial value is smaller in value than */ /* a bound on the error in evaluating p, */ /* terminate the iteration. */ if (i != 1) { if (!b && mp >= omp && relstp < .05) { /* iteration has stalled. probably a */ /* cluster of zeros. do 5 fixed shift */ /* steps into the cluster to force */ /* one zero to dominate. */ tp = relstp; b = TRUE; if (relstp < eta) tp = eta; r1 = sqrt(tp); r2 = sr * (r1 + 1.) - si * r1; si = sr * r1 + si * (r1 + 1.); sr = r2; polyev(nn, sr, si, pr, pi, qpr, qpi, &pvr, &pvi); for (j = 1; j <= 5; ++j) { calct(&bool); nexth(bool); } omp = infin; goto L10; } else { /* exit if polynomial value */ /* increases significantly. */ if (mp * .1 > omp) return FALSE; } } omp = mp; /* calculate next iterate. */ L10: calct(&bool); nexth(bool); calct(&bool); if (!bool) { relstp = hypot(tr, ti) / hypot(sr, si); sr += tr; si += ti; } } return FALSE; L_conv: *zr = sr; *zi = si; return TRUE; } static void calct(Rboolean *bool) { /* computes t = -p(s)/h(s). * bool - logical, set true if h(s) is essentially zero. */ int n = nn - 1; double hvi, hvr; /* evaluate h(s). */ polyev(n, sr, si, hr, hi, qhr, qhi, &hvr, &hvi); *bool = hypot(hvr, hvi) <= are * 10. * hypot(hr[n-1], hi[n-1]); if (!*bool) { cdivid(-pvr, -pvi, hvr, hvi, &tr, &ti); } else { tr = 0.; ti = 0.; } } static void nexth(Rboolean bool) { /* calculates the next shifted h polynomial. * bool : if TRUE h(s) is essentially zero */ int j, n = nn - 1; double t1, t2; if (!bool) { for (j=1; j < n; j++) { t1 = qhr[j - 1]; t2 = qhi[j - 1]; hr[j] = tr * t1 - ti * t2 + qpr[j]; hi[j] = tr * t2 + ti * t1 + qpi[j]; } hr[0] = qpr[0]; hi[0] = qpi[0]; } else { /* if h(s) is zero replace h with qh. */ for (j=1; j < n; j++) { hr[j] = qhr[j-1]; hi[j] = qhi[j-1]; } hr[0] = 0.; hi[0] = 0.; } } /*--------------------- Independent Complex Polynomial Utilities ----------*/ static void polyev(int n, double s_r, double s_i, double *p_r, double *p_i, double *q_r, double *q_i, double *v_r, double *v_i) { /* evaluates a polynomial p at s by the horner recurrence * placing the partial sums in q and the computed value in v_. */ int i; double t; q_r[0] = p_r[0]; q_i[0] = p_i[0]; *v_r = q_r[0]; *v_i = q_i[0]; for (i = 1; i < n; i++) { t = *v_r * s_r - *v_i * s_i + p_r[i]; q_i[i] = *v_i = *v_r * s_i + *v_i * s_r + p_i[i]; q_r[i] = *v_r = t; } } static double errev(int n, double *qr, double *qi, double ms, double mp, double a_re, double m_re) { /* bounds the error in evaluating the polynomial by the horner * recurrence. * * qr,qi - the partial sum vectors * ms - modulus of the point * mp - modulus of polynomial value * a_re,m_re - error bounds on complex addition and multiplication */ double e; int i; e = hypot(qr[0], qi[0]) * m_re / (a_re + m_re); for (i=0; i < n; i++) e = e*ms + hypot(qr[i], qi[i]); return e * (a_re + m_re) - mp * m_re; } static double cpoly_cauchy(int n, double *pot, double *q) { /* Computes a lower bound on the moduli of the zeros of a polynomial * pot[1:nn] is the modulus of the coefficients. */ double f, x, delf, dx, xm; int i, n1 = n - 1; pot[n1] = -pot[n1]; /* compute upper estimate of bound. */ x = exp((log(-pot[n1]) - log(pot[0])) / (double) n1); /* if newton step at the origin is better, use it. */ if (pot[n1-1] != 0.) { xm = -pot[n1] / pot[n1-1]; if (xm < x) x = xm; } /* chop the interval (0,x) unitl f le 0. */ for(;;) { xm = x * 0.1; f = pot[0]; for (i = 1; i < n; i++) f = f * xm + pot[i]; if (f <= 0.0) { break; } x = xm; } dx = x; /* do Newton iteration until x converges to two decimal places. */ while (fabs(dx / x) > 0.005) { q[0] = pot[0]; for(i = 1; i < n; i++) q[i] = q[i-1] * x + pot[i]; f = q[n1]; delf = q[0]; for(i = 1; i < n1; i++) delf = delf * x + q[i]; dx = f / delf; x -= dx; } return x; } static double cpoly_scale(int n, double *pot, double eps, double BIG, double small, double base) { /* Returns a scale factor to multiply the coefficients of the polynomial. * The scaling is done to avoid overflow and to avoid * undetected underflow interfering with the convergence criterion. * The factor is a power of the base. * pot[1:n] : modulus of coefficients of p * eps,BIG, * small,base - constants describing the floating point arithmetic. */ int i, ell; double x, high, sc, lo, min_, max_; /* find largest and smallest moduli of coefficients. */ high = sqrt(BIG); lo = small / eps; max_ = 0.; min_ = BIG; for (i = 0; i < n; i++) { x = pot[i]; if (x > max_) max_ = x; if (x != 0. && x < min_) min_ = x; } /* scale only if there are very large or very small components. */ if (min_ < lo || max_ > high) { x = lo / min_; if (x <= 1.) sc = 1. / (sqrt(max_) * sqrt(min_)); else { sc = x; if (BIG / sc > max_) sc = 1.0; } ell = (int) (log(sc) / log(base) + 0.5); return R_pow_di(base, ell); } else return 1.0; } static void cdivid(double ar, double ai, double br, double bi, double *cr, double *ci) { /* complex division c = a/b, i.e., (cr +i*ci) = (ar +i*ai) / (br +i*bi), avoiding overflow. */ double d, r; if (br == 0. && bi == 0.) { /* division by zero, c = infinity. */ *cr = *ci = R_PosInf; } else if (fabs(br) >= fabs(bi)) { r = bi / br; d = br + r * bi; *cr = (ar + ai * r) / d; *ci = (ai - ar * r) / d; } else { r = br / bi; d = bi + r * br; *cr = (ar * r + ai) / d; *ci = (ai * r - ar) / d; } } /* static double cpoly_cmod(double *r, double *i) * --> replaced by hypot() everywhere */ forecast/src/calcTBATS.cpp0000644000176200001440000000231113553673010015053 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP calcTBATSFaster(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es, SEXP xNought_s) { BEGIN_RCPP NumericMatrix yr(ys); NumericMatrix yHatr(yHats); NumericMatrix wTransposer(wTransposes); NumericMatrix Fr(Fs); NumericMatrix xr(xs); NumericMatrix gr(gs); NumericMatrix er(es); NumericMatrix xNought_r(xNought_s); arma::mat y(yr.begin(), yr.nrow(), yr.ncol(), false); arma::mat yHat(yHatr.begin(), yHatr.nrow(), yHatr.ncol(), false); arma::mat wTranspose(wTransposer.begin(), wTransposer.nrow(), wTransposer.ncol(), false); arma::mat F(Fr.begin(), Fr.nrow(), Fr.ncol(), false); arma::mat x(xr.begin(), xr.nrow(), xr.ncol(), false); arma::mat g(gr.begin(), gr.nrow(), gr.ncol(), false); arma::mat e(er.begin(), er.nrow(), er.ncol(), false); arma::mat xNought(xNought_r.begin(), xNought_r.nrow(), xNought_r.ncol(), false); yHat.col(0) = wTranspose * xNought; e(0,0) = y(0, 0) - yHat(0, 0); x.col(0) = F * xNought + g * e(0,0); for(int t = 1; t < yr.ncol(); t++) { yHat.col(t) = wTranspose * x.col((t-1)); e(0,t) = y(0, t) - yHat(0, t); x.col(t) = F * x.col((t-1)) + g * e(0,t); } return R_NilValue; END_RCPP } forecast/src/etsTargetFunction.h0000644000176200001440000000267113553673010016501 0ustar liggesusers#include #include extern "C" { void etscalc(double *, int *, double *, int *, int *, int *, int *, double *, double *, double *, double *, double *, double *, double *, int *); void cpolyroot(double *opr, double *opi, int *degree, double *zeror, double *zeroi, Rboolean *fail); } class EtsTargetFunction { public: void eval(const double* p_var, int p_var_length); void init(std::vector & p_y, int p_nstate, int p_errortype, int p_trendtype, int p_seasontype, bool p_damped, std::vector & p_lower, std::vector & p_upper, std::string p_opt_crit, int p_nmse, std::string p_bounds, int p_m, bool p_optAlpha, bool p_optBeta, bool p_optGamma, bool p_optPhi, bool p_givenAlpha, bool p_givenBeta, bool p_givenGamma, bool p_givenPhi, double alpha, double beta, double gamma, double phi); double getObjVal() { return(objval); }; private: bool check_params(); bool admissible(); std::vector par; std::vector y; int nstate; int errortype; int trendtype; int seasontype; bool damped; std::vector par_noopt; std::vector lower; std::vector upper; std::string opt_crit; int nmse; std::string bounds; int m; int n; std::vector state; double alpha, beta, gamma, phi; std::vector e; std::vector amse; double lik, objval; bool optAlpha, optBeta, optGamma, optPhi, givenAlpha, givenBeta, givenGamma, givenPhi; }; forecast/src/makeBATSMatrices.cpp0000644000176200001440000001165213553673010016442 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP makeBATSWMatrix(SEXP smallPhi_s, SEXP sPeriods_s, SEXP arCoefs_s, SEXP maCoefs_s) { BEGIN_RCPP double *smallPhi, *arCoefs, *maCoefs; int *seasonalPeriods; int adjustPhi = 0; R_len_t numCols = 1, numSeasonal = 0, p = 0, q = 0; int lengthSeasonal = 0; if(!Rf_isNull(smallPhi_s)) { smallPhi = REAL(smallPhi_s); adjustPhi = 1; numCols = numCols + 1; } if(!Rf_isNull(sPeriods_s)) { seasonalPeriods = INTEGER(sPeriods_s); numSeasonal = LENGTH(sPeriods_s); for(R_len_t s = 0; s < numSeasonal; s++) { lengthSeasonal = lengthSeasonal + seasonalPeriods[s]; } numCols = numCols + lengthSeasonal; } else { lengthSeasonal = 0; } if(!Rf_isNull(arCoefs_s)) { arCoefs = REAL(arCoefs_s); p = LENGTH(arCoefs_s); numCols = numCols + p; } if(!Rf_isNull(maCoefs_s)) { maCoefs = REAL(maCoefs_s); q = LENGTH(maCoefs_s); numCols = numCols + q; } NumericMatrix wTranspose_r(1, numCols); arma::mat wTranspose(wTranspose_r.begin(), wTranspose_r.nrow(), wTranspose_r.ncol(), false); if(!Rf_isNull(sPeriods_s)) { wTranspose.zeros(); int position = adjustPhi; for(R_len_t s = 0; s < numSeasonal; s++) { position = position + seasonalPeriods[s]; wTranspose(0,position) = 1; } } wTranspose(0,0) = 1; if(adjustPhi == 1) { wTranspose(0,1) = *smallPhi; } if(!Rf_isNull(arCoefs_s)) { for(R_len_t i = 1; i <= p; i++) { wTranspose(0,(adjustPhi + lengthSeasonal +i)) = arCoefs[(i-1)]; } } if(!Rf_isNull(maCoefs_s)) { for(R_len_t i = 1; i <= q; i++) { wTranspose(0,(adjustPhi + lengthSeasonal + p + i)) = maCoefs[(i-1)]; } } arma::mat w = arma::trans(wTranspose); smallPhi = 0; arCoefs = 0; maCoefs = 0; seasonalPeriods = 0; return List::create( Named("w") = w, Named("w.transpose") = wTranspose ); END_RCPP } SEXP makeBATSGMatrix(SEXP alpha_s, SEXP beta_s, SEXP gammaVector_s, SEXP seasonalPeriods_s, SEXP p_s, SEXP q_s) { BEGIN_RCPP double *gammaVector; int *seasonalPeriods, *p, *q; int numCols, gammaLength = 0; int adjustBeta = 0; p = INTEGER(p_s); q = INTEGER(q_s); numCols = 1 + *p + *q; if(!Rf_isNull(beta_s)) { numCols = numCols + 1; adjustBeta = 1; } //Find the length of the gamma/seasonal bit if((!Rf_isNull(gammaVector_s))&&(!Rf_isNull(seasonalPeriods_s))) { gammaVector = REAL(gammaVector_s); seasonalPeriods = INTEGER(seasonalPeriods_s); for(R_len_t i = 0; i < LENGTH(seasonalPeriods_s); i++) { gammaLength = gammaLength + seasonalPeriods[i]; } numCols = numCols + gammaLength; } else { gammaLength = 0; } NumericMatrix gTranspose_r(1, numCols); arma::mat gTranspose(gTranspose_r.begin(), gTranspose_r.nrow(), gTranspose_r.ncol(), false); gTranspose.zeros(); gTranspose(0,0) = REAL(alpha_s)[0]; if(!Rf_isNull(beta_s)) { gTranspose(0,1) = REAL(beta_s)[0]; } //Copy the gamma/seasonal bits if((!Rf_isNull(gammaVector_s))&&(!Rf_isNull(seasonalPeriods_s))) { int position = adjustBeta + 1; gTranspose(0, position) = gammaVector[0]; if(LENGTH(gammaVector_s) > 1) { for(R_len_t s = 0; s < (LENGTH(seasonalPeriods_s)-1); s++) { position = position + seasonalPeriods[s]; gTranspose(0, position) = gammaVector[(s+1)]; } } } if(*p != 0) { gTranspose(0, (adjustBeta+gammaLength+1)) = 1; } if(*q != 0) { gTranspose(0, (adjustBeta+gammaLength+ *p +1)) = 1; } arma::mat g(arma::trans(gTranspose)); seasonalPeriods = 0; p = 0; q = 0; gammaVector = 0; if((!Rf_isNull(gammaVector_s))&&(!Rf_isNull(seasonalPeriods_s))) { arma::mat gammaBold = gTranspose.cols((1+adjustBeta), (adjustBeta+gammaLength)); return List::create( Named("g") = g, Named("g.transpose") = gTranspose, Named("gamma.bold.matrix") = gammaBold ); } else { return List::create( Named("g") = g, Named("g.transpose") = gTranspose, Named("gamma.bold.matrix") = R_NilValue ); } END_RCPP } /* SEXP makeFMatrix(SEXP alpha_s, SEXP beta_s, SEXP smallPhi_s, SEXP seasonalPeriods_s, SEXP gammaBoldMatrix_s, SEXP arCoefs_s, SEXP maCoefs_s) { BEGIN_RCPP NumericMatrix alpha_r(alpha_s); if(!Rf_isNull(beta_s)) { NumericMatrix beta_r(beta_s); bool indBeta = true; } else { bool indBeta = false; } if(!Rf_isNull(smallPhi_s)) { NumericMatrix smallPhi_r(smallPhi_s); bool indSmallPhi = true; } else { bool indSmallPhi = false; } if(!Rf_isNull(seasonalPeriods_s)) { NumericMatrix seasonalPeriods_r(seasonalPeriods_s); bool indSeasonalPeriods = true; } else { bool indSeasonalPeriods = false; } if(!Rf_isNull(gammaBoldMatrix_s)) { NumericMatrix gammaBoldMatrix_r(gammaBoldMatrix_s); bool indGammaBoldMatrix = true; } else { bool indGammaBoldMatrix = false; } if(!Rf_isNull(arCoefs_s)) { NumericMatrix arCoefs_r(arCoefs_s); bool indArCoefs = true; } else { bool indArCoefs = false; } if(!Rf_isNull(maCoefs_s)) { NumericMatrix maCoefs_r(maCoefs_s); bool indMaCoefs = true; } else { bool indMaCoefs = false; } arma::mat END_RCPP } */ forecast/src/updateMatrices.cpp0000644000176200001440000001127113553673010016332 0ustar liggesusers/* * updateMatrices.cpp * * Created on: 03/11/2011 * Author: srazbash */ #include "calcBATS.h" using namespace Rcpp ; SEXP updateFMatrix(SEXP F_s, SEXP smallPhi_s, SEXP alpha_s, SEXP beta_s, SEXP gammaBold_s, SEXP ar_s, SEXP ma_s, SEXP tau_s) { BEGIN_RCPP NumericMatrix F_r(F_s); arma::mat F(F_r.begin(), F_r.nrow(), F_r.ncol(), false); double *beta, *alpha = &REAL(alpha_s)[0]; int *tau, p, q, betaAdjust; int zero = 0; if(!Rf_isNull(gammaBold_s)) { tau = &INTEGER(tau_s)[0]; } else { tau = &zero; } if(!Rf_isNull(beta_s)) { beta = &REAL(beta_s)[0]; double *smallPhi = &REAL(smallPhi_s)[0]; F(0,1) = *smallPhi; F(1,1) = *smallPhi; betaAdjust = 1; } else { betaAdjust = 0; } if(!Rf_isNull(ar_s)) { //Rprintf("before arma::mat ar\n"); NumericMatrix ar_r(ar_s); arma::mat ar(ar_r.begin(), ar_r.nrow(), ar_r.ncol(), false); //Rprintf("after arma::mat ar\n"); p = ar.n_cols; //Rprintf("line-a-before\n"); F.submat(0,(betaAdjust+ *tau+1),0,(betaAdjust+ *tau+p)) = *alpha * ar; //Rprintf("line-a-after\n"); if(betaAdjust == 1) { //Rprintf("line-b-before\n"); F.submat(1,(betaAdjust+ *tau+1),1,(betaAdjust+ *tau+p)) = *beta * ar; //Rprintf("line-b-after\n"); } if(*tau > 0) { //Rprintf("la\n"); NumericMatrix gammaBold_r(gammaBold_s); //Rprintf("la-2\n"); arma::mat gammaBold(gammaBold_r.begin(), gammaBold_r.nrow(), gammaBold_r.ncol(), false); //Rprintf("la-3\n"); //arma::mat gammaBold = as(gammaBold_s); arma::mat B = trans(gammaBold) * ar; //Rprintf("line-c-before\n"); F.submat((1+betaAdjust),(betaAdjust+ *tau+1), (betaAdjust+ *tau), (betaAdjust+ *tau+p)) = B; //Rprintf("line-c-after\n"); } //Rprintf("line-d-before\n"); F.submat((betaAdjust+ *tau+1),(betaAdjust+ *tau+1),(betaAdjust+ *tau+1),(betaAdjust+ *tau+p)) = ar; //Rprintf("line-d-after\n"); } else { p = 0; } if(!Rf_isNull(ma_s)) { NumericMatrix ma_r(ma_s); arma::mat ma(ma_r.begin(), ma_r.nrow(), ma_r.ncol(), false); q = ma.n_cols; //Rprintf("one-before\n"); F.submat(0,(betaAdjust+ *tau+p+1),0,(betaAdjust+ *tau+p+q)) = *alpha * ma; //Rprintf("one-after\n"); if(betaAdjust == 1) { //Rprintf("two-before\n"); F.submat(1,(betaAdjust+ *tau+p+1),1,(betaAdjust+ *tau+p+q)) = *beta * ma; ///Rprintf("two-after\n"); } if(*tau > 0) { //arma::mat gammaBold = as(gammaBold_s); NumericMatrix gammaBold_r(gammaBold_s); arma::mat gammaBold(gammaBold_r.begin(), gammaBold_r.nrow(), gammaBold_r.ncol(), false); arma::mat C = trans(gammaBold) * ma; //Rprintf("three-before\n"); F.submat((1+betaAdjust),(betaAdjust+ *tau+p+1), (betaAdjust+ *tau), (betaAdjust+ *tau+p+q)) = C; //Rprintf("three-after\n"); } if(!Rf_isNull(ar_s)) { //Rprintf("four-before\n"); F.submat((betaAdjust+ *tau+1), (betaAdjust+ *tau+p+1), (betaAdjust+ *tau+1), (betaAdjust+ *tau+p+q)) = ma; //Rprintf("four-after\n"); } } else { q = 0; } return R_NilValue; END_RCPP } SEXP updateWtransposeMatrix(SEXP wTranspose_s, SEXP smallPhi_s, SEXP tau_s, SEXP arCoefs_s, SEXP maCoefs_s, SEXP p_s, SEXP q_s) { BEGIN_RCPP NumericMatrix wTranspose(wTranspose_s); double *arCoefs, *maCoefs; int *p, *q, *tau, adjBeta = 0; p = &INTEGER(p_s)[0]; q = &INTEGER(q_s)[0]; tau = &INTEGER(tau_s)[0]; if(!Rf_isNull(smallPhi_s)) { adjBeta = 1; wTranspose(0,1) = REAL(smallPhi_s)[0]; } if(*p > 0) { arCoefs = REAL(arCoefs_s); for(int i = 1; i <= *p; i++) { wTranspose(0,(adjBeta + *tau + i)) = arCoefs[(i - 1)]; } if(*q > 0) { maCoefs = REAL(maCoefs_s); for(int i = 1; i <= *q; i++) { wTranspose(0,(adjBeta + *tau + *p + i)) = maCoefs[(i - 1)]; } } } else if(*q > 0) { maCoefs = REAL(maCoefs_s); for(int i = 1; i <= *q; i++) { wTranspose(0,(adjBeta + *tau + i)) = maCoefs[(i - 1)]; } } return R_NilValue; END_RCPP } SEXP updateGMatrix(SEXP g_s, SEXP gammaBold_s, SEXP alpha_s, SEXP beta_s, SEXP gammaVector_s, SEXP seasonalPeriods_s) { BEGIN_RCPP int adjBeta = 0, *seasonalPeriods; double *gammaVector; NumericMatrix g(g_s); g(0,0) = REAL(alpha_s)[0]; if(!Rf_isNull(beta_s)) { g(1,0) = REAL(beta_s)[0]; adjBeta = 1; } if((!Rf_isNull(gammaVector_s))&&(!Rf_isNull(seasonalPeriods_s))) { NumericMatrix gammaBold(gammaBold_s); seasonalPeriods = INTEGER(seasonalPeriods_s); gammaVector = REAL(gammaVector_s); int position = adjBeta + 1; int bPos = 0; gammaBold(0,bPos) = gammaVector[0]; g(position, 0) = gammaVector[0]; if(LENGTH(gammaVector_s) > 1) { for(R_len_t s = 0; s < (LENGTH(seasonalPeriods_s)-1); s++) { position = position + seasonalPeriods[s]; bPos = bPos + seasonalPeriods[s]; g(position, 0) = gammaVector[(s+1)]; } } } return R_NilValue; END_RCPP } forecast/src/etsTargetFunction.cpp0000644000176200001440000001550013553673010017027 0ustar liggesusers#include #include //for isnan, math.h is needed //#include #include "etsTargetFunction.h" #include void EtsTargetFunction::init(std::vector & p_y, int p_nstate, int p_errortype, int p_trendtype, int p_seasontype, bool p_damped, std::vector & p_lower, std::vector & p_upper, std::string p_opt_crit, int p_nmse, std::string p_bounds, int p_m, bool p_optAlpha, bool p_optBeta, bool p_optGamma, bool p_optPhi, bool p_givenAlpha, bool p_givenBeta, bool p_givenGamma, bool p_givenPhi, double alpha, double beta, double gamma, double phi) { this->y = p_y; this->n = this->y.size(); this->nstate = p_nstate; this->errortype = p_errortype; this->trendtype = p_trendtype; this->seasontype = p_seasontype; this->damped = p_damped; this->lower = p_lower; this->upper = p_upper; this->opt_crit = p_opt_crit; this->nmse = p_nmse; this->bounds = p_bounds; this->m = p_m; this->optAlpha = p_optAlpha; this->optBeta = p_optBeta; this->optGamma = p_optGamma; this->optPhi = p_optPhi; this->givenAlpha = p_givenAlpha; this->givenBeta = p_givenBeta; this->givenGamma = p_givenGamma; this->givenPhi = p_givenPhi; /* Rprintf("optAlpha: %d\n", optAlpha); Rprintf("optBeta: %d\n", optBeta); Rprintf("optGamma: %d\n", optGamma); Rprintf("optPhi: %d\n", optPhi); Rprintf("givenAlpha: %d\n", givenAlpha); Rprintf("givenBeta: %d\n", givenBeta); Rprintf("givenGamma: %d\n", givenGamma); Rprintf("givenPhi: %d\n", givenPhi); */ this->alpha = alpha; this->beta = beta; this->gamma = gamma; this->phi = phi; this->lik = 0; this->objval = 0; // for(int i=0; i < 10; i++) this->amse.push_back(0); // for(int i=0; i < n; i++) this->e.push_back(0); this->amse.resize(30, 0); this->e.resize(n, 0); } void EtsTargetFunction::eval(const double* p_par, int p_par_length) { bool equal=true; // ---------show params---------- // Rprintf("par: "); // for(int j=0;j < p_par_length;j++) { // Rprintf("%f ", p_par[j]); // } // Rprintf(" objval: %f\n", this->objval); //Rprintf("\n"); // ---------show params---------- // Check if the parameter configuration has changed, if not, just return. if(p_par_length != this->par.size()) { equal=false; } else { for(int j=0;j < p_par_length;j++) { if(p_par[j] != this->par[j]) { equal=false; break; } } } if(equal) return; this->par.clear(); for(int j=0;j < p_par_length;j++) { this->par.push_back(p_par[j]); } int j=0; if(optAlpha) this->alpha = par[j++]; if(optBeta) this->beta = par[j++]; if(optGamma) this->gamma = par[j++]; if(optPhi) this->phi = par[j++]; if(!this->check_params()) { this->objval = R_PosInf; return; } this->state.clear(); for(int i=par.size()-nstate; i < par.size(); i++) { this->state.push_back(par[i]); } // Add extra state if(seasontype!=0) {//"N"=0, "M"=2 //init.state <- c(init.state, m*(seasontype==2) - sum(init.state[(2+(trendtype!=0)):nstate])) double sum=0; for(int i=(1+((trendtype!=0) ? 1 : 0));iobjval = R_PosInf; return; } // seas.states <- init.state[-(1:(1+(trendtype!=0)))] //if(min(seas.states) < 0) // return(1e8) }; int p = state.size(); for(int i=0; i <= p*this->y.size(); i++) state.push_back(0); etscalc(&this->y[0], &this->n, &this->state[0], &this->m, &this->errortype, &this->trendtype, &this->seasontype, &this->alpha, &this->beta, &this->gamma, &this->phi, &this->e[0], &this->lik, &this->amse[0], &this->nmse); // Avoid perfect fits if (this->lik < -1e10) this->lik = -1e10; // isnan() is a C99 function //if (isnan(this->lik)) this->lik = 1e8; if (ISNAN(this->lik)) this->lik = R_PosInf; if(fabs(this->lik+99999) < 1e-7) this->lik = R_PosInf; if(this->opt_crit=="lik") this->objval = this->lik; else if(this->opt_crit=="mse") this->objval = this->amse[0]; else if(this->opt_crit=="amse") { //return(mean(e$amse[1:nmse])) double mean=0; for(int i=0;i < this->nmse;i++) { mean+=amse[i]/this->nmse; } this->objval=mean; } else if(this->opt_crit=="sigma") { //return(mean(e$e^2)) double mean=0; int ne=e.size(); for(int i=0;iobjval=mean; } else if(this->opt_crit=="mae") { //return(mean(abs(e$e))) double mean=0; int ne=e.size(); for(int i=0;iobjval=mean; } } bool EtsTargetFunction::check_params() { if(bounds != "admissible") { if(optAlpha) { if(alpha < lower[0] || alpha > upper[0]) return(false); } if(optBeta) { if(beta < lower[1] || beta > alpha || beta > upper[1]) return(false); } if(optPhi) { if(phi < lower[3] || phi > upper[3]) return(false); } if(optGamma) { if(gamma < lower[2] || gamma > 1-alpha || gamma > upper[2]) return(false); } } if(bounds != "usual") { if(!admissible()) return(false); } return(TRUE); } bool EtsTargetFunction::admissible() { if(phi < 0 || phi > 1+1e-8) return(false); //If gamma was set by the user or it is optimized, the bounds need to be enforced if(!optGamma && !givenGamma) { if(alpha < 1-1/phi || alpha > 1+1/phi) return(false); if(optBeta || givenBeta) { if(beta < alpha * (phi-1) || beta > (1+phi)*(2-alpha)) return(false); } } else if(m > 1) //Seasonal model { if(!optBeta && !givenBeta) beta = 0; //max(1-1/phi-alpha,0) double d = 1-1/phi-alpha; if(gamma < ((d > 0) ? d : 0) || gamma > 1+1/phi-alpha) return(false); if(alpha < 1-1/phi-gamma*(1-m+phi+phi*m)/(2*phi*m)) return(false); if(beta < -(1-phi)*(gamma/m+alpha)) return(false); // End of easy tests. Now use characteristic equation std::vector opr; opr.push_back(1); opr.push_back(alpha+beta-phi); for(int i=0;i opi; opi.resize(opr.size(),0); std::vector zeror(degree); std::vector zeroi(degree); Rboolean fail; cpolyroot(&opr[0], &opi[0], °ree, &zeror[0], &zeroi[0], &fail); double max = 0; for(int i=0;imax) max = abs_val; } //Rprintf("maxpolyroot: %f\n", max); if(max > 1+1e-10) return(false); // P <- c(phi*(1-alpha-gamma),alpha+beta-alpha*phi+gamma-1,rep(alpha+beta-alpha*phi,m-2),(alpha+beta-phi),1) // roots <- polyroot(P) // if(max(abs(roots)) > 1+1e-10) return(false); } //Passed all tests return(true); } forecast/vignettes/0000755000176200001440000000000013617722446014104 5ustar liggesusersforecast/vignettes/jsslogo.jpg0000644000176200001440000005221313553673010016260 0ustar liggesusersJFIFC    $.' ",#(7),01444'9=82<.342C  2!!22222222222222222222222222222222222222222222222222" X*,QY0)-e+s+YJƕ.u2j!Qk-fg!W4^VT\ʕs1GBJ ¥p%%ZKPM-$(JsbJNXDY`V\A KdJaؔ,0MD L"rLMldQ)J@K$W)2hW?Ͽj6C(@jD*jR iSf"LJՅVXh 0aQr,6AX$BVXA¶6MXtE1$*Kyqq!X4@*e LQ !ȹ4#HHQ  HYaWyyތgˢpmcĞ5u˦{/=kiG6j!"!_0;6|<;ϥ8I,+V\rzӏ};q{G Im4@Y)A&f_+=K,նD5ԸNfnjzypOJYQoDԻeQb>-RZeWwq:8og '}Cy= k<`LQcIhY7edQZշ6Wi]JGW:DZ[Oܻ<=rmEԪ? yԬȃ%e_QOeBF.cMY \?ZrCf{g? DX!8,^Yt1 q1 dúVIK\-űnM; Pjs z{9oA01˛`+ ܷLp@'xgDa_qOL;]Cܠ'[Vn>TD͊rtGS]1}ܨ4v `E]6)4 2w`QE@lK icDvj1#b5rU^vNcn rRoXp(lc$̫RJiz952kۿsLnG_nT%gWeH}Y3r[m3)R [60M988 i &s sc/1svdt"n'DGaꇠ9ro5!ȏD7%! 01@A"2QP?#*TR *TR # 4BJ(PB *TB *TRJ v cH 4uIsq_$Ev8DrAd-23|?T?I24h"Ǜzc#L|u?g% !01@APQ"2?$I$I$IbĖ$I$I$I$,Xbŋ,Xbŋ,I$IbĖ,8bŋ3 03 .\.\r,\bŋõ,'IHRܯdOp'TS!APR(`s7NQb.zGt38#7G zmAW'j\F*7q9Wjo2!1 A"02PQaq@B`Rbpr?+BfQzBoЛFPpҿTB*%l\}27.~00䕈}D"=ڇpDҀ+ij[pq?%U%4qf72[#xiE B hCa@Wwd70\K-J5uq-U_{ğF7a@E3-]^=GvRB#3p+ YW~Wa#Mm "Nі."hBl4GRf}OqOr8P>$|!>(k-n\?9bRlL-"_ aEƝUZ.MN"I\+E%USZ-Da$f#"z5WWlH&1vdHPcq RhAppQL{# NEƸӫJ g_DH"$*~Z[!lWAGj++++bVUX+~ZSa+e碸W V:'3 Ak(rϤ##ߐEY hNoԨH{!6;ꦃЈ[4J~0>"=9Gzs&cAh[%pCeeiP'c%o-2y+N@=VHUVT*q.%b]Z e Q6G%7X/ uW 8ʦow[+io1SJ ˅p.fF{Ax Y7t"(y,BɊTiYZG‹B,nJ/X"U2|?k.­+{~ۃA[S8 y uE;V>W;GhN.KU}/?N?= :">}!.˶X|frsb]NZdD&$O(2MCŊ&T0dJrpO&UX?Z"f!ȘiCYUl>lrBxSYXv"2PW#e# hyR Iy +xMdzwUeԧ:T.T]PMn-C(=tԾF((,H__;*[U<(ByK~*!1A Qaq0@P?!B ^GN fBD4h-[+z=zzOS=tz=:އ}/[§xcJFB{IEA@""tLiHDBh]Н$L`..h"Ү7-35HJCa:Ce&HBiKNM"jBDiN.1h%]D hٹBBXBt! ]Mi4G]cd]$ J{tz.%z.aK0`eo'-^&}ae1\shٍY5BZ9-f/ f[]SҍŦRL6Z 㫓2n#N-qGNc>om6l&c^0 jوp Nt qނjt\!Gҙ"mm>Gitl%.#MX0>y>"2DK"Bd4QR b+琢.l4pg2ۆrcb&Qɳ*'Qrn&]6r,gy iPRwdd9<X'֡4UOYBI2.IOrۑ9\AEG،8tǸmJp?R ?pFpՃ5vjh-fy))p]]73;E]b+7 䳱ї`٩Ļ n9p65r&cKMoIʒ v#DoHOpGeH{ 6>½;"""K2hJ_xz R6D19$;WX7h7b{a c JixSt0{l>Q}Dnc,#`JNcgv6 )n4^RoBa`&{ Ȣv<8ݖW,qO"iq3+ -?Mi mAo 썣L!8)nt|gf_})OrITH[pY7ß{W3[7&O}^î!F (IY/속+Lk6V &a'W,"xһF- q m8;8Yigᾆy~cm{;!Gqx)@?O0[!Y ʼn S; ЛUৗbײxtB ;6W٘=d'A{kLՐș G7{ ̃6r:{"po=Oc<ɞzYewh];Sy:ٔrZfLǯ$!4OJ{= mRR,2RxNbm1]7Mx`Kâ~M^$hwG$Z/~ ^tRn3(mz_~ Rztz)zoNQ /])F]nKJRMҢ÷R+ZzOSrRJRKSQ]礥)K(D2/mM%ʄ~]M#> *v4CEϹۃH7Tڃ!({zomݘ{4s/ţQÆlRs [h<WTb!(8 G7dfp*^^dQ蟏FdcMgȾtzZHff i8/;{phVe=&A:%0XA.h&+6'q{ld~4VI2_-bcyM3l˶H[zH< **V% +X jg9HkFY3Vo>Cis?|7T$]m<7&\[юy LJO~CwGp1M;F avޕ\f!%M&&[; D]n`1 M EKy̱4IFe YB=|=h]Ɣ1xCLb3GoÚ3%¾>SA@==O#cwCSck6A1Mu)*J; F22UXYTk-oݦtE9e;N\y)w䗐E+i2-D VnM [åH&<.E91.@n3SΈ>5o/ 1i9 )/`]<#@e b-}&XHB7 %R0|v\שL+C*6F7y3c5G@ⳀeگZƔvJE/>XK&/([ŭdZѫxQ^dO׌k0ZrpN|Ri%SbwNe;+xD[aGo+S>Wpυ_XyA&YƧè(WTbYE7I7$1u']A]LADr_N.#$A}YM?/|=9Y̲ <}Ԙ0 I^KxLÍ=!n7$;=)LS|׌uִR =ae;.sQIz>MwB !f!{˱=͍ӵL{DU՘ 1䒵kܯKE890]ˏM׀l{3 bJ 8]t0sü]{~hM0ül ,)/Or-\,׬p,҃ ʈ>"-X qɍ|z 7 0 ! 10AQqa@?#dx!! FB2%ܲsBXbBH$$X5̸#`G+*..j!6@ֈ!;D|!x#$v6t(%6G ~ u$%)K" ;4XHw2 eBc2CRM+(f:}a~ !X.C?'ூMHK> @5C4B\RsbF%? 4(qb3KQsJ>,6QV./ R4%P2H/F \ڿPCHOKׅ*%0ѱHddH*(HM m6l6b&8bk Yxj o6RluPl m#H8:ɗcm A#a}+ycBhc$_ Wgy!#.4=1<1RSlCV϶lKmk6?:N$BLA HuF:eCv;s/E# !1 0QA@aq?iQ$AA$&I$ £֌IOHO#ɲZER,QyPBQFv1˚R.S+Š 9 ;%ZH664cĭu6R[fҗXԴ4$7BLطIZ.-u68`KWҾ-QHI.Um?6,kBKQymҔWWx=Ҋ&!`fLh76!\M/Bᴑw;! "#DCK:_lm$6(E1:R:HkYn,”ĊA$xLV?xLBa^%983pSʫ@""-f0TBYqv" dE'5jZc{'pCAz8Qjg*4okubay\E:b"Į2Mo2 %b* z *P-`, 9jTQaH%dPQ,"ε.b~%RQb ]W#ܫ ,r\/ĶٝR-m` ) 5T}Ow}%tq ah+!]*%rΈzlJ-mqs=k-wR• PJ2:Ԡ =}KRmy&2xcǟQ8b-VÈ<@ܩ1LZܫlOQVtef~Xwrټ ` =CtaqopJwl"1YDERŞRH&D^+2ڭAZ`,ɛYtsW2䗥.RgIa70Vo Gn`Ѧ`2gP.S(0Vkw yki`G)H7 pbS=7(91IWdqf FVys1H&X<RWHs4J!Mh _XޘT.frb*DNmP+,0XjU`c)ܧ]B b Sl=17YY)YAlb PLY^~sf ?x`f2RerD5*0N̈́ uSjΊ=">[.gQ9gT?ysCpNoeAE#n\һ-E]喎92YԦ1KOqF^"Zb/D(tFټRÃ&?Xq,LKɠ_D{# ~EȏQH{-E&8jRnGQ(GJ7 oCV%H̾-NҸHĵN&ʕ^aF kT<1um>[S9Fc0@9e.Hlh RnRZ*Zc/΂| Y8  (;k ٭ψ=}%`k^=ƶOM st^:=x"PT;pl5Gpy7Dl6ˁ\Y_by/=-_Ț;>:G}v`ё#R mUObepeXANeb̅0ԲCxP912fߏ-V9p6jOE0 JYؕ RUQ=1`BvbP%0~۾uY@~rYE.0 ˔D-FƦ>ex24bE6YN UDV̥O/Y%[0f3B]s*>ȯIhg  QW5gͥ>j,x)K.8EiG>~^c@9+5Yl.(4s,X%q(B:.})ƚ}W;e5:dLw*r.*f#),] YBZ8KөUuj{KP+ aJWRp]^40Pi< 6'x-@[j̀1_:X76;e k:Doļ=],%_x ̭Ū)ɬc9 8n Eob 2@C7 ":%1o/xBGvp"mIOw=,9=KZ7jZV,/7Rd @sJha*w R])Q y+pna[{ E+ /;*ݜew`hk_4h~#[=nJcpy#51M72wTncMϼs$t:%J8V30YYS솆 V-|:bW0 JSLx]j@s3_jܷ.+>jfJ+1JĢ .` i\nSUjXj"\J;07pCb+4b] /2˗lK/3u/10B<lT^ߤ0B5DqޥoP7WaY{. GsC?O/RPXԸ4>mr2KbYԿ/Aľ]VFjb+y^%x/cx_SnzxD5//r}%1\|ܹĻu-ܸKE6L^qJ6:ƥ\LK[.-_ K\ 2HؽA/ Xĺ*nZ4<ŗ˗xKcpܾYD 8u- ľ|\?pj[,c/9̴̼^w,5i,R|+,JVfJ}=KSq/1m̼ ̫ń| PQqtyBqǤq Uٗc:1ՕuS"fҠ˶\Kcj- B<\JDNDX|*2ԻeˋrMT  uqk2ݘ7<`+$2n0Bk\V}^ pRy̫_qD4L$Yx.=sEBp̦v\|3#-imڔNJbvL/B#xLLj8`ORޥnAr/upKY.[.)|[6V^a8Z4ĹyE#`\ۈba[}"N[Bmfxw73 2;T@h9DLq *fn"BU%$TuPlbf$˹X'1Ǥ5:+)A_.[%ALm&YԮ-i}4e K lc/)`6Ge_8j/J=}(֠1ЇTpco J0l8KfQ'˷>n_3FtulE s2%q^!h~휵Z*&~j1u7`jlq5ϸtM&)p K9MbjT h.-W7[D8-O B^ƊO ̩LC@JhcEi mS/ TEQWG )>} HW~'eh!pG[`.ZK?P&_D((y>R K!(?S QDQ1d5Ej4?7ѵ  ؊Q3@Vs|9HYQ(^0L/`'b œ}=6.,lQ+6G%K)k:eL1C& "6lAŖV-j#uxo28b`KAcSs.ڍu EnT>@4Q}@Q-8#^H7$* cN@7>Qub=)D[wɖe]: ˬY !_2\,e  X' p4yawanw'=A3c @+}XWgsvçqNxG2GiR"6Ǒ7LNkS1L,]ԡudR}F& F5fx7iq娊Z4 ]WG y\f`e[Ʋ{Oډsυ@^_KLތ6F!g *-eH3㈊‹42qS|@T[ĨFgs |ns3)SY]0 @}* siHٱ W\L>[WZ8X.2J[rģ/&KPt#:MpqKf:OˁVJbC1X˾ekkB%7Q`s\J a\ĜfSQaD.G-~X3&2e!XUu\K%#2yrYPixuP,J)lh"yc[VKEl75-5#w[NŪ±8t^Kq.JS^ˊilauqNw5 So3Z,>%>;# .Cb7PElW N  1FSP/xk- !=հUDJ $[h8sEjj(Vd29aVPf!^0!V;!u_mXWf;Yg`@747>8cW[Lj+W(rH h86df5awq<`\\ir k`,{8!r!AVg*2& ͏2?hW&Ut)\ȍ)zHSm100۹HexC(򻎊1xUKCs)UNf';_ c P:|c=c՟:aKV= n12\į8{Ϧ/ ZW ~`R]KĀeLS+l\Dɉ\p%\dE $?:su,ʶ˹%ey5aYu,\UIX9q`fq ^5_0>>L DnPI0 U dX$̿wA8yT&pbp$>/DU7u.L _.ϣ fFe*(0Z/04##^\V*ka؍j)Lwy0UF!s"7I831O\@csS~?u1& c`kVޠ+r  `H^X]Dht keg/) k\!O!S%)+{3 o0_fJrZL08jb~#,ۢ + %f -&n:n?V`D1*|}?oLU#`^DibW$Ĕ(5ys<ͿUyȖS/)x>Z@hsa8~Qm?|3ZNGxP hu0y`ZuASi*FA!eaK<"0 Ꭷ,f0FSy8*xffa?'w38H!{@R; *%..L3Bq;6"omq wQ/ܩ㈗ Ƥ6Xn16]qb]:k_M)%LKmAt8UwzG<]8UT> a%KN.5S)qϬǘ#{#ċRߖ7cZZλ r,P`gpdF6Qʷw3l]7Mcp|LA8{ 7,1kuܩEoq9#-~ w>$'3ġ XnU 33p[2埪a_qM@A%B1'ƽˎRZgoY=EU1,*bq;ly=}Ͷ]gR5SdE*&_Shʜ|;kusTC~7QssN!Je.%k8SOforecast/vignettes/JSS-paper.bib0000644000176200001440000005324013553673010016321 0ustar liggesusers@STRING{advap = {Advances in Applied Probability}} @STRING{amath = {Annals of Mathematics}} @STRING{ams = {The Annals of Mathematical Statistics}} @STRING{amstat = {The American Statistician}} @STRING{annalap = {The Annals of Applied Probability}} @STRING{annalp = {The Annals of Probability}} @STRING{annals = {The Annals of Statistics}} @STRING{anneug = {Annals of Eugenics}} @STRING{anzjs = {Australian \& New Zealand Journal of Statistics}} @STRING{appstat = {Applied Statistics}} @STRING{ausjstat = {Australian Journal of Statistics}} @STRING{bioc = {Biometrics}} @STRING{bioj = {Biometrical Journal}} @STRING{biok = {Biometrika}} @STRING{chance = {Chance}} @STRING{cjs = {The Canadian Journal of Statistics}} @STRING{comms = {Communications in Statistics}} @STRING{commscs = {Communications in Statistics: Computation \& Simulation}} @STRING{commstm = {Communications in Statistics: Theory \& Methods}} @STRING{compstat = {Computational Statistics}} @STRING{csda = {Computational Statistics \& Data Analysis}} @STRING{debs = {Department of Econometrics \& Business Statistics, Monash University}} @STRING{ejor = {European Journal of Operational Research}} @STRING{ijf = {International Journal of Forecasting}} @STRING{isr = {International Statistical Review}} @STRING{jap = {Journal of Applied Probability}} @STRING{jas = {Journal of Applied Statistics}} @STRING{jasa = {Journal of the American Statistical Association}} @STRING{jcgs = {Journal of Computational \& Graphical Statistics}} @STRING{je = {Journal of Econometrics}} @STRING{jes = {Journal of Educational Statistics}} @STRING{jf = {Journal of Forecasting}} @STRING{jma = {Journal of Multivariate Analysis}} @STRING{jors = {Journal of the Operational Research Society}} @STRING{jos = {Journal of Official Statistics}} @STRING{jrssa = {Journal of the Royal Statistical Society A}} @STRING{jrssb = {Journal of the Royal Statistical Society B}} @STRING{jscs = {Journal of Statistical Computation \& Simulation}} @STRING{jspi = {Journal of Statistical Planning \& Inference}} @STRING{jtp = {Journal of Theoretical Probability}} @STRING{jtsa = {Journal of Time Series Analysis}} @STRING{mansci = {Management Science}} @STRING{psyka = {Psychometrika}} @STRING{ptrf = {Probability Theory \& Related Fields}} @STRING{sankhya = {Sankhy\={a}}} @STRING{sasj = {South African Statistical Journal}} @STRING{scandjs = {Scandinavian Journal of Statistics: Theory \& Applications}} @STRING{siamjssc = {SIAM Journal of Scientific \& Statistical Computing}} @STRING{jss = {Journal of Statistical Software}} @STRING{spl = {Statistics \& Probability Letters}} @STRING{statmed = {Statistics in Medicine}} @STRING{statsci = {Statistical Science}} @STRING{statsin = {Statistica Sinica}} @STRING{survmeth = {Survey Methodology}} @STRING{tech = {Technometrics}} @STRING{toap = {to appear}} @STRING{tpaa = {Theory of Probability \& its Applications}} @STRING{tstat = {The Statistician}} @BOOK{AM79, title = {Optimal Filtering}, publisher = {Prentice-Hall}, year = {1979}, author = {B. D. O. Anderson and J. B. Moore}, address = {Englewood Cliffs}, } @BOOK{Aoki87, title = {State Space Modeling of Time Series}, publisher = {Springer-Verlag}, year = {1987}, author = {Masanao Aoki}, address = {Berlin}, } @ARTICLE{Archibald90, author = {Blyth C. Archibald}, title = {Parameter Space of the {H}olt-{W}inters' Model}, journal = ijf, year = {1990}, volume = {6}, pages = {199--209}, fileno = {1151}, keywords = {Exponential smoothing; seasonal; coefficient choice; stability; evaluation}, pdf = {Archibald90.pdf}, } @ARTICLE{AN00, author = {V. Assimakopoulos and K. Nikolopoulos}, title = {The Theta Model: A Decomposition Approach to Forecasting}, journal = ijf, year = {2000}, volume = {16}, pages = {521-530}, fileno = {1047}, keywords = {M3-Competition; Time series; Univariate forecasting method}, } @BOOK{BOK05, title = {Forecasting, Time Series and Regression: An Applied Approach}, publisher = {Thomson Brooks/Cole}, year = {2005}, author = {B. L. Bowerman and R. T. O'Connell and Anne B. Koehler}, address = {Belmont CA}, } @BOOK{BDbook91, title = {Time Series: Theory and Methods}, publisher = {Springer-Verlag}, year = {1991}, author = {P. J. Brockwell and R. A Davis}, address = {New York}, edition = {2nd}, } @BOOK{BDbook91a, title = {Introduction to Time Series and Forecasting}, publisher = {John Wiley \& Sons}, year = {2002}, edition = {2nd}, author = {P.J. Brockwell and R.A. Davis}, } @ARTICLE{CH95, author = {F. Canova and B. E. Hansen}, title = {Are Seasonal Patterns Constant Over Time? {A} Test for Seasonal Stability}, journal = {Journal of Business and Economic Statistics}, year = {1995}, volume = {13}, pages = {237-252}, file = {CH95.pdf:CH95.pdf:PDF}, pdf = {CH95.pdf}, } @ARTICLE{CY91, author = {Chris Chatfield and Mohammad Yar}, title = {Prediction Intervals for Multiplicative {H}olt-{W}inters}, journal = ijf, year = {1991}, volume = {7}, pages = {31-37}, keywords = {Holt-Winters; Prediction intervals; Exponential smoothing}, } @ARTICLE{Croston72, author = {J. D. Croston}, title = {Forecasting and Stock Control for Intermittent Demands}, journal = {Operational Research Quarterly}, year = {1972}, volume = {23}, pages = {289--304}, number = {3}, pdf = {Croston72.pdf}, } @ARTICLE{DF81, author = {D. A. Dickey and W. A. Fuller}, title = {Likelihood Ratio Statistics for Autoregressive Time Series with a Unit Root}, journal = {Econometrica}, year = {1981}, volume = {49}, pages = {1057-1071}, } @BOOK{DKbook01, title = {Time Series Analysis by State Space Methods}, publisher = {Oxford University Press}, year = {2001}, author = {J Durbin and Siem J Koopman}, address = {Oxford}, } @ARTICLE{Gardner85, author = {Gardner, Jr, Everette S.}, title = {Exponential Smoothing: The State of the Art}, journal = jf, year = {1985}, volume = {4}, pages = {1-28}, keywords = {Bibliography; exponential smoothing; comparative methods; ARIMA; exponential smoothing; control charts; CUSUM; evaluation-forecasting monitoring systems; exponential smoothing; adaptive exponential smoothing-adaptive; coefficient choice; higher-order; review; theory seasonality-estimation; harmonics; tracking signal-methodology; use-inventory control}, } @ARTICLE{GM85, author = {Gardner, Jr, Everette S. and Ed McKenzie}, title = {Forecasting Trends in Time Series}, journal = mansci, year = {1985}, volume = {31}, pages = {1237-1246}, number = {10}, keywords = {Forecasting; time series}, } @TECHREPORT{Gomez98, author = {Victor G\'{o}mez}, title = {Automatic Model Identification in the Presence of Missing Observations and Outliers}, institution = {Ministerio de Econom{\'\i}a y Hacienda, Direcci{\'o}n General de An{\'a}lisis y Programaci{\'o}n Presupuestaria}, year = {1998}, type = {Working paper}, number = {D-98009}, pdf = {Gomez98.pdf}, } @TECHREPORT{TRAMOSEATS98, author = {Victor G\'{o}mez and Agust\'{i}n Maravall}, title = {Programs \pkg{TRAMO} and \pkg{SEATS}, Instructions for the Users}, institution = {Ministerio de Econom{\'\i}a y Hacienda, Direcci{\'o}n General de An{\'a}lisis y Programaci{\'o}n Presupuestaria}, year = {1998}, type = {Working paper}, number = {97001}, month = {June}, edition = {Beta version}, } @ARTICLE{ForecastPro00, author = {Robert L Goodrich}, title = {The \pkg{Forecast Pro} Methodology}, journal = ijf, year = {2000}, volume = {16}, pages = {533-535}, number = {4}, pdf = {ForecastPro00.pdf}, } @ARTICLE{HR82, author = {E. J. Hannan and J. Rissanen}, title = {Recursive Estimation of Mixed Autoregressive-Moving Average Order}, journal = biok, year = {1982}, volume = {69}, pages = {81-94}, number = {1}, keywords = {Autoregressive-moving average; best coding; martingale; recursive calculation; strong convergence; vector autoregression}, } @ARTICLE{Hendry97, author = {David F. Hendry}, title = {The Econometrics of Macroeconomic Forecasting}, journal = {The Economic Journal}, year = {1997}, volume = {107}, pages = {1330-1357.}, number = {444}, } @ARTICLE{HEGY90, author = {S. Hylleberg and R. Engle and C. Granger and B. Yoo}, title = {Seasonal Integration and Cointegration}, journal = {Journal of Econometrics}, year = {1990}, volume = {44}, pages = {215-238}, } @ARTICLE{Hyndman01, author = {Rob J Hyndman}, title = {It's Time To Move from `What' To `Why'---Comments on the {M3}-Competition}, journal = ijf, year = {2001}, volume = {17}, pages = {567-570}, number = {4}, keywords = {commentaries on the M3-competition}, } @MANUAL{forecast, title = {\pkg{forecast}: Forecasting Functions for Time Series}, author = {Rob J Hyndman}, year = {2008}, note = {\proglang{R}~package version~1.11}, url = {http://CRAN.R-project.org/package=forecasting}, } @MANUAL{fma, title = {\pkg{fma}: Data Sets from ``{F}orecasting: Methods and Applications'' By {M}akridakis, {W}heelwright \& {H}yndman (1998)}, author = {Rob J Hyndman}, year = {2008}, note = {\proglang{R}~package version~1.11}, url = {http://CRAN.R-project.org/package=forecasting}, } @MANUAL{expsmooth, title = {\pkg{expsmooth}: Data Sets from ``{F}orecasting with Exponential Smoothing'' by Hyndman, Koehler, Ord \& Snyder (2008)}, author = {Rob J Hyndman}, year = {2008}, note = {\proglang{R}~package version~1.11}, url = {http://CRAN.R-project.org/package=forecasting}, } @MANUAL{Mcomp, title = {\pkg{Mcomp}: Data from the {M}-Competitions}, author = {Rob J Hyndman}, year = {2008}, note = {\proglang{R}~package version~1.11}, url = { http://CRAN.R-project.org/package=forecasting}, } @ARTICLE{HAA08, author = {Rob J Hyndman and {Md} Akram and Blyth C Archibald}, title = {The Admissible Parameter Space for Exponential Smoothing Models}, journal = {Annals of the Institute of Statistical Mathematics}, year = {2008}, volume = {60}, number = {2}, pages = {407--426} } @ARTICLE{HB03, author = {Rob J Hyndman and Billah, Baki}, title = {Unmasking the {T}heta Method}, journal = ijf, year = {2003}, volume = {19}, pages = {287-290}, number = {2}, keywords = {Exponential smoothing; forecasting competitions; State space models}, } @ARTICLE{HKPB05, author = {Rob J Hyndman and Maxwell L. King and Pitrun, Ivet and Billah, Baki}, title = {Local Linear Forecasts Using Cubic Smoothing Splines}, journal = anzjs, year = {2005}, volume = {47}, pages = {87-99}, number = {1}, keywords = {ARIMA models; Exponential smoothing; Holt's local linear forecasts; Maximum likelihood estimation; non-parametric regression; smoothing splines; state-space model; stochastic trends}, } @ARTICLE{HK06, author = {Rob J Hyndman and Anne B Koehler}, title = {Another Look at Measures of Forecast Accuracy}, journal = ijf, year = {2006}, volume = {22}, pages = {679-688}, issue = {4}, } @ARTICLE{HK2008, author = {Rob J Hyndman and Yeasmin Khandakar}, title = {Automatic Time Series Forecasting: The Forecast Package for R}, journal = jss, year = {2008}, volume = {27}, issue = {3}, } @ARTICLE{HKOS05, author = {Rob J Hyndman and Anne B Koehler and J Keith Ord and Ralph D Snyder}, title = {Prediction Intervals for Exponential Smoothing Using Two New Classes of State Space Models}, journal = {Journal of Forecasting}, year = {2005}, volume = {24}, pages = {17-37}, } @BOOK{expsmooth08, title = {Forecasting with Exponential Smoothing: The State Space Approach}, publisher = {Springer-Verlag}, year = {2008}, author = {Rob J Hyndman and Anne B Koehler and J Keith Ord and Ralph D Snyder}, url = {http://www.exponentialsmoothing.net/}, } @ARTICLE{HKSG02, author = {Rob J Hyndman and Anne B Koehler and Ralph D Snyder and Simone Grose}, title = {A State Space Framework for Automatic Forecasting Using Exponential Smoothing Methods}, journal = ijf, year = {2002}, volume = {18}, pages = {439-454}, number = {3}, keywords = {Prediction intervals; State space models}, } @ARTICLE{shortseasonal, author = {Rob J Hyndman and Andrey V Kostenko}, title = {Minimum Sample Size Requirements for Seasonal Forecasting Models}, journal = {Foresight: The International Journal of Applied Forecasting}, year = {2007}, volume = {6}, pages = {12-15}, } @ARTICLE{KPSS92, author = {Denis Kwiatkowski and Peter C.B. Phillips and Peter Schmidt and Yongcheol Shin}, title = {Testing the Null Hypothesis of Stationarity Against the Alternative of a Unit Root}, journal = je, year = {1992}, volume = {54}, pages = {159-178}, } @ARTICLE{Liu89, author = {L. M. Liu}, title = {Identification of Seasonal {Arima} Models Using a Filtering Method}, journal = commstm, year = {1989}, volume = {18}, pages = {2279-2288}, keywords = {model identification, seasonal time series, ARIMA models, filtering, intermediary models, calendar variation, intervention, transfer function models}, } @ARTICLE{Mcomp82, author = {S. Makridakis and A. Anderson and R. Carbone and R. Fildes and M. Hibon and R. Lewandowski and J. Newton and E. Parzen and R. Winkler}, title = {The Accuracy of Extrapolation (Time Series) Methods: Results of a Forecasting Competition}, journal = jf, year = {1982}, volume = {1}, pages = {111-153}, keywords = {Forecasting; Time series; Evaluation; Accuracy; Comparison; Empirical Study}, } @ARTICLE{Metal82, author = {Spyros Makridakis and A. Anderson and R. Carbone and R. Fildes and M. Hibon and R. Lewandowskiand J. Newton and E. Parzen and R. Winkler}, title = {The Accuracy of Extrapolation (Time Series) Methods: Results of a Forecasting Competition}, journal = jf, year = {1982}, volume = {1}, pages = {111--153}, } @ARTICLE{Metal93, author = {Spyros Makridakis and Chris Chatfield and Mich\'{e}le Hibon and Michael Lawrence and Terence Mills and J. Keith Ord and LeRoy F. Simmons}, title = {The {M}2-Competition: A Real-Time Judgmentally Based Forecasting study}, journal = ijf, year = {1993}, volume = {9}, pages = {5--22}, } @ARTICLE{M3comp00, author = {Spyros Makridakis and Michele Hibon}, title = {The {M3}-Competition: Results, Conclusions and Implications}, journal = ijf, year = {2000}, volume = {16}, pages = {451-476}, keywords = {Comparative methods-Time series: Univariate; Forecasting competitions; {M}-competition; Forecasting methods; Forecasting accuracy}, } @BOOK{MWH3, title = {Forecasting: Methods and Applications}, publisher = {John Wiley \& Sons}, year = {1998}, author = {Makridakis, Spyros and Wheelwright, Steven C. and Rob J Hyndman}, pages = {642}, address = {New York}, edition = {3rd}, url = {http://www.robhyndman.info/forecasting/}, } @ARTICLE{MP00a, author = {G. M\'{e}lard and J.-M Pasteels}, title = {Automatic {ARIMA} Modeling Including Intervention, Using Time Series Expert Software}, journal = ijf, year = {2000}, volume = {16}, pages = {497-508}, keywords = {M3-Competition; ARIMA models; Expert systems; Intervention analysis; Outliers}, } @ARTICLE{Meyer:2002, author = {David Meyer}, title = {Naive Time Series Forecasting Methods}, journal = {\proglang{R} News}, year = {2002}, volume = {2}, number = {2}, pages = {7--10}, month = {June}, url = {http://CRAN.R-project.org/doc/Rnews/}, } @ARTICLE{OKS97, author = {J. Keith Ord and Anne B. Koehler and Ralph D. Snyder}, title = {Estimation and Prediction for a Class of Dynamic Nonlinear Statistical Models}, journal = jasa, year = {1997}, volume = {92}, pages = {1621-1629}, keywords = {Forecasting; Holt-Winters; Maximum likelihood estimation; State-space models}, pdf = {OKS97.pdf}, } @ARTICLE{OL96, author = {Keith Ord and Sam Lowe}, title = {Automatic Forecasting}, journal = amstat, year = {1996}, volume = {50}, pages = {88-94}, number = {1}, month = {February}, keywords = {automatic, Forecasting, Autobox, AutocastII, Forecast Pro}, } @ARTICLE{Pegels69, author = {C. Carl Pegels}, title = {Exponential Forecasting: Some New Variations}, journal = mansci, year = {1969}, volume = {15}, pages = {311-315}, number = {5}, } @ARTICLE{Reilly00, author = {Reilly, David}, title = {The \pkg{Autobox} System}, journal = ijf, year = {2000}, volume = {16}, pages = {531-533}, number = {4}, pdf = {Reilly00.pdf}, } @ARTICLE{Ripley:2002, author = {Brian D. Ripley}, title = {Time Series in \proglang{R}~1.5.0}, journal = {\proglang{R} News}, year = {2002}, volume = {2}, number = {2}, pages = {2--7}, month = {June}, url = {http://CRAN.R-project.org/doc/Rnews/}, } @ARTICLE{SH05, author = {Lydia Shenstone and Rob J Hyndman}, title = {Stochastic Models Underlying {C}roston's Method for Intermittent Demand Forecasting}, journal = jf, year = {2005}, volume = {24}, pages = {389-402}, } @ARTICLE{SY94, author = {Jeremy Smith and Sanjay Yadav}, title = {Forecasting Costs Incurred from Unit Differencing Fractionally Integrated Processes}, journal = ijf, year = {1994}, volume = {10}, pages = {507-514}, number = {4}, pdf = {SY94.pdf}, } @ARTICLE{SKHO04, author = {Ralph D Snyder and Anne B Koehler and Rob J Hyndman and J Keith Ord}, title = {Exponential Smoothing Models: Means and Variances for Lead-Time Demand}, journal = ejor, year = {2004}, volume = {158}, pages = {444-455}, number = {2}, } @ARTICLE{Taylor03a, author = {James W. Taylor}, title = {Exponential Smoothing with a Damped Multiplicative Trend}, journal = ijf, year = {2003}, volume = {19}, pages = {715-725}, keywords = {Damped trend exponential smoothing, Pegels classification, Multiplicative trend}, } @ARTICLE{Wallis99, author = {Wallis, K. F.}, title = {Asymmetric Density Forecasts of Inflation and the {Bank of England's} Fan Chart}, journal = {National Institute Economic Review}, year = {1999}, volume = {167}, pages = {106-112}, number = {1}, } @Manual{R, title = {\proglang{R}: A Language and Environment for Statistical Computing}, author = {{\proglang{R} Development Core Team}}, organization = {\proglang{R} Foundation for Statistical Computing}, address = {Vienna, Austria}, year = {2008}, note = {{ISBN} 3-900051-07-0}, url = {http://www.R-project.org/}, } forecast/vignettes/JSS2008.Rmd0000644000176200001440000017307413553673010015524 0ustar liggesusers--- author: - name: Rob J Hyndman affiliation: Monash University address: > Department of Econometrics \& Business Statistics Monash University Clayton VIC 3800, Australia email: Rob.Hyndman@monash.edu url: https://robjhyndman.com - name: Yeasmin Khandakar affiliation: Monash University address: > Department of Econometrics \& Business Statistics Monash University Clayton VIC 3800, Australia title: formatted: "Automatic Time Series Forecasting:\\newline the \\pkg{forecast} Package for \\proglang{R}" # If you use tex in the formatted title, also supply version without plain: "Automatic Time Series Forecasting: the forecast Package for R" # For running headers, if needed short: "\\pkg{forecast}: Automatic Time Series Forecasting" abstract: > This vignette to the \proglang{R} package \pkg{forecast} is an updated version of @HK2008, published in the *Journal of Statistical Software*. Automatic forecasts of large numbers of univariate time series are often needed in business and other contexts. We describe two automatic forecasting algorithms that have been implemented in the \pkg{forecast} package for \proglang{R}. The first is based on innovations state space models that underly exponential smoothing methods. The second is a step-wise algorithm for forecasting with ARIMA models. The algorithms are applicable to both seasonal and non-seasonal data, and are compared and illustrated using four real time series. We also briefly describe some of the other functionality available in the \pkg{forecast} package.} keywords: # at least one keyword must be supplied formatted: [ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, "\\proglang{R}"] plain: [ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, R] preamble: > \usepackage{amsmath,rotating,bm,fancyvrb,paralist,thumbpdf} \Volume{27} \Issue{3} \Month{July} \Year{2008} \Submitdate{2007-05-29} \Acceptdate{2008-03-22} \def\damped{$_{\mbox{\footnotesize d}}$} \let\var=\VAR \def\R{\proglang{R}} \def\dampfactor{\phi_h} \raggedbottom bibliography: JSS-paper.bib vignette: > %\VignetteIndexEntry{Automatic Time Series Forecasting: the forecast Package for R (Hyndman & Khandakar, JSS 2008)} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} documentclass: jss output: if (rmarkdown::pandoc_version() >= 2) rticles::jss_article else rmarkdown::html_vignette fig_width: 7 fig_height: 6 fig_caption: true --- ```{r load_forecast, echo=FALSE, message=FALSE} library('forecast') ``` ```{r load_expsmooth, echo=FALSE, message=FALSE, eval=FALSE} library('expsmooth') ``` ```{r expsmooth_datsets, echo=FALSE, message=FALSE} bonds <- structure(c(5.83, 6.06, 6.58, 7.09, 7.31, 7.23, 7.43, 7.37, 7.6, 7.89, 8.12, 7.96, 7.93, 7.61, 7.33, 7.18, 6.74, 6.27, 6.38, 6.6, 6.3, 6.13, 6.02, 5.79, 5.73, 5.89, 6.37, 6.62, 6.85, 7.03, 6.99, 6.75, 6.95, 6.64, 6.3, 6.4, 6.69, 6.52, 6.8, 7.01, 6.82, 6.6, 6.32, 6.4, 6.11, 5.82, 5.87, 5.89, 5.63, 5.65, 5.73, 5.72, 5.73, 5.58, 5.53, 5.41, 4.87, 4.58, 4.89, 4.69, 4.78, 4.99, 5.23, 5.18, 5.54, 5.9, 5.8, 5.94, 5.91, 6.1, 6.03, 6.26, 6.66, 6.52, 6.26, 6, 6.42, 6.1, 6.04, 5.83, 5.8, 5.74, 5.72, 5.23, 5.14, 5.1, 4.89, 5.13, 5.37, 5.26, 5.23, 4.97, 4.76, 4.55, 4.61, 5.07, 5, 4.9, 5.28, 5.21, 5.15, 4.9, 4.62, 4.24, 3.88, 3.91, 4.04, 4.03, 4.02, 3.9, 3.79, 3.94, 3.56, 3.32, 3.93, 4.44, 4.29, 4.27, 4.29, 4.26, 4.13, 4.06, 3.81, 4.32, 4.7), .Tsp = c(1994, 2004.33333333333, 12), class = "ts") usnetelec <- structure(c(296.1, 334.1, 375.3, 403.8, 447, 476.3, 550.3, 603.9, 634.6, 648.5, 713.4, 759.2, 797.1, 857.9, 920, 987.2, 1058.4, 1147.5, 1217.8, 1332.8, 1445.5, 1535.1, 1615.9, 1753, 1864.1, 1870.3, 1920.8, 2040.9, 2127.4, 2209.4, 2250.7, 2289.6, 2298, 2244.4, 2313.4, 2419.5, 2473, 2490.5, 2575.3, 2707.4, 2967.3, 3038, 3073.8, 3083.9, 3197.2, 3247.5, 3353.5, 3444.2, 3492.2, 3620.3, 3694.8, 3802.1, 3736.6, 3858.5, 3848), .Tsp = c(1949, 2003, 1), class = "ts") ukcars <- structure(c(330.371, 371.051, 270.67, 343.88, 358.491, 362.822, 261.281, 240.355, 325.382, 316.7, 171.153, 257.217, 298.127, 251.464, 181.555, 192.598, 245.652, 245.526, 225.261, 238.211, 257.385, 228.461, 175.371, 226.462, 266.15, 287.251, 225.883, 265.313, 272.759, 234.134, 196.462, 205.551, 291.283, 284.422, 221.571, 250.697, 253.757, 267.016, 220.388, 277.801, 283.233, 302.072, 259.72, 297.658, 306.129, 322.106, 256.723, 341.877, 356.004, 361.54, 270.433, 311.105, 326.688, 327.059, 274.257, 367.606, 346.163, 348.211, 250.008, 292.518, 343.318, 343.429, 275.386, 329.747, 364.521, 378.448, 300.798, 331.757, 362.536, 389.133, 323.322, 391.832, 421.646, 416.823, 311.713, 381.902, 422.982, 427.722, 376.85, 458.58, 436.225, 441.487, 369.566, 450.723, 462.442, 468.232, 403.636, 413.948, 460.496, 448.932, 407.787, 469.408, 494.311, 433.24, 335.106, 378.795, 387.1, 372.395, 335.79, 397.08, 449.755, 402.252, 391.847, 385.89, 424.325, 433.28, 391.213, 408.74, 445.458, 428.202, 379.048, 394.042, 432.796), .Tsp = c(1977, 2005, 4), class = "ts") visitors <- structure(c(75.7, 75.4, 83.1, 82.9, 77.3, 105.7, 121.9, 150, 98, 118, 129.5, 110.6, 91.7, 94.8, 109.5, 105.1, 95, 130.3, 156.7, 190.1, 139.7, 147.8, 145.2, 132.7, 120.7, 116.5, 142, 140.4, 128, 165.7, 183.1, 222.8, 161.3, 180.4, 185.2, 160.5, 157.1, 163.8, 203.3, 196.9, 179.6, 207.3, 208, 245.8, 168.9, 191.1, 180, 160.1, 136.6, 142.7, 175.4, 161.4, 149.9, 174.1, 192.7, 247.4, 176.2, 192.8, 189.1, 181.1, 149.9, 157.3, 185.3, 178.2, 162.7, 190.6, 198.6, 253.1, 177.4, 190.6, 189.2, 168, 161.4, 172.2, 208.3, 199.3, 197.4, 216, 223.9, 266.8, 196.1, 238.2, 217.8, 203.8, 175.2, 176.9, 219.3, 199.1, 190, 229.3, 255, 302.4, 242.8, 245.5, 257.9, 226.3, 213.4, 204.6, 244.6, 239.9, 224, 267.2, 285.9, 344, 250.5, 304.3, 307.4, 255.1, 214.9, 230.9, 282.5, 265.4, 254, 301.6, 311, 384, 303.8, 319.1, 313.5, 294.2, 244.8, 261.4, 329.7, 304.9, 268.6, 320.7, 342.9, 422.3, 317.2, 392.7, 365.6, 333.2, 261.5, 306.9, 358.2, 329.2, 309.2, 350.4, 375.6, 465.2, 342.9, 408, 390.9, 325.9, 289.1, 308.2, 397.4, 330.4, 330.9, 366.5, 379.5, 448.3, 346.2, 353.6, 338.6, 341.1, 283.4, 304.2, 372.3, 323.7, 323.9, 354.8, 367.9, 457.6, 351, 398.6, 389, 334.1, 298.1, 317.1, 388.5, 355.6, 353.1, 397, 416.7, 460.8, 360.8, 434.6, 411.9, 405.6, 319.3, 347.9, 429, 372.9, 403, 426.5, 459.9, 559.9, 416.6, 429.2, 428.7, 405.4, 330.2, 370, 446.9, 384.6, 366.3, 378.5, 376.2, 523.2, 379.3, 437.2, 446.5, 360.3, 329.9, 339.4, 418.2, 371.9, 358.6, 428.9, 437, 534, 396.6, 427.5, 392.5, 321.5, 260.9, 308.3, 415.5, 362.2, 385.6, 435.3, 473.3, 566.6, 420.2, 454.8, 432.3, 402.8, 341.3, 367.3, 472, 405.8, 395.6, 449.9, 479.9, 593.1, 462.4, 501.6, 504.7, 409.5), .Tsp = c(1985.33333333333, 2005.25, 12), class = "ts") ``` # Introduction Automatic forecasts of large numbers of univariate time series are often needed in business. It is common to have over one thousand product lines that need forecasting at least monthly. Even when a smaller number of forecasts are required, there may be nobody suitably trained in the use of time series models to produce them. In these circumstances, an automatic forecasting algorithm is an essential tool. Automatic forecasting algorithms must determine an appropriate time series model, estimate the parameters and compute the forecasts. They must be robust to unusual time series patterns, and applicable to large numbers of series without user intervention. The most popular automatic forecasting algorithms are based on either exponential smoothing or ARIMA models. In this article, we discuss the implementation of two automatic univariate forecasting methods in the \pkg{forecast} package for \proglang{R}. We also briefly describe some univariate forecasting methods that are part of the \pkg{forecast} package. The \pkg{forecast} package for the \proglang{R} system for statistical computing [@R] is available from the Comprehensive \proglang{R} Archive Network at \url{https://CRAN.R-project.org/package=forecast}. Version `r packageVersion('forecast')` of the package was used for this paper. The \pkg{forecast} package contains functions for univariate forecasting and a few examples of real time series data. For more extensive testing of forecasting methods, the \pkg{fma} package contains the 90 data sets from @MWH3, the \pkg{expsmooth} package contains 24 data sets from @expsmooth08, and the \pkg{Mcomp} package contains the 1001 time series from the M-competition [@Mcomp82] and the 3003 time series from the M3-competition [@M3comp00]. The \pkg{forecast} package implements automatic forecasting using exponential smoothing, ARIMA models, the Theta method [@AN00], cubic splines [@HKPB05], as well as other common forecasting methods. In this article, we primarily discuss the exponential smoothing approach (in Section \ref{sec:expsmooth}) and the ARIMA modelling approach (in Section \ref{sec:arima}) to automatic forecasting. In Section \ref{sec:package}, we describe the implementation of these methods in the \pkg{forecast} package, along with other features of the package. # Exponential smoothing {#sec:expsmooth} Although exponential smoothing methods have been around since the 1950s, a modelling framework incorporating procedures for model selection was not developed until relatively recently. @OKS97, @HKSG02 and @HKOS05 have shown that all exponential smoothing methods (including non-linear methods) are optimal forecasts from innovations state space models. Exponential smoothing methods were originally classified by Pegels' (1969)\nocite{Pegels69} taxonomy. This was later extended by @Gardner85, modified by @HKSG02, and extended again by @Taylor03a, giving a total of fifteen methods seen in the following table. \begin{table}[!hbt] \begin{center}\vspace{0.2cm} \begin{tabular}{|ll|ccc|} \hline & &\multicolumn{3}{c|}{Seasonal Component} \\ \multicolumn{2}{|c|}{Trend}& N & A & M\\ \multicolumn{2}{|c|}{Component} & (None) & (Additive) & (Multiplicative)\\ \cline{3-5} &&&\\[-0.3cm] N & (None) & N,N & N,A & N,M\\ &&&&\\[-0.3cm] A & (Additive) & A,N & A,A & A,M\\ &&&&\\[-0.3cm] A\damped & (Additive damped) & A\damped,N & A\damped,A & A\damped,M\\ &&&&\\[-0.3cm] M & (Multiplicative) & M,N & M,A & M,M\\ &&&&\\[-0.3cm] M\damped & (Multiplicative damped) & M\damped,N & M\damped,A & M\damped,M\\ \hline \end{tabular}\vspace{0.2cm} \end{center} \caption{The fifteen exponential smoothing methods.} \end{table} Some of these methods are better known under other names. For example, cell (N,N) describes the simple exponential smoothing (or SES) method, cell (A,N) describes Holt's linear method, and cell (A\damped,N) describes the damped trend method. The additive Holt-Winters' method is given by cell (A,A) and the multiplicative Holt-Winters' method is given by cell (A,M). The other cells correspond to less commonly used but analogous methods. ## Point forecasts for all methods We denote the observed time series by $y_1,y_2,\dots,y_n$. A forecast of $y_{t+h}$ based on all of the data up to time $t$ is denoted by $\hat{y}_{t+h|t}$. To illustrate the method, we give the point forecasts and updating equations for method (A,A), the Holt-Winters' additive method: \begin{subequations}\label{eq:AMmethod}\vspace{-15pt} \begin{align} \mbox{Level:}\quad &\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha)(\ell_{t-1} + b_{t-1})\hspace*{1cm} \label{eq:3-44a}\\ \mbox{Growth:}\quad &b_t = \beta^*(\ell_t - \ell_{t-1}) + (1-\beta^*)b_{t-1} \label{eq:3-45a}\\ \mbox{Seasonal:}\quad &s_t = \gamma(y_t - \ell_{t-1} -b_{t-1}) + (1-\gamma)s_{t-m}\label{eq:3-46a}\\ \mbox{Forecast:}\quad &\hat{y}_{t+h|t} = \ell_t + b_th +s_{t-m+h_m^+}. \label{eq:3-47a} \end{align} \end{subequations} where $m$ is the length of seasonality (e.g., the number of months or quarters in a year), $\ell_t$ represents the level of the series, $b_t$ denotes the growth, $s_t$ is the seasonal component, $\hat{y}_{t+h|t}$ is the forecast for $h$ periods ahead, and $h_m^+ = \big[(h-1) \mbox{ mod } m\big] + 1$. To use method \eqref{eq:AMmethod}, we need values for the initial states $\ell_0$, $b_0$ and $s_{1-m},\dots,s_0$, and for the smoothing parameters $\alpha$, $\beta^*$ and $\gamma$. All of these will be estimated from the observed data. Equation \eqref{eq:3-46a} is slightly different from the usual Holt-Winters equations such as those in @MWH3 or @BOK05. These authors replace \eqref{eq:3-46a} with $$ s_t = \gamma^*(y_t - \ell_{t}) + (1-\gamma^*)s_{t-m}. $$ If $\ell_t$ is substituted using \eqref{eq:3-44a}, we obtain $$s_t = \gamma^*(1-\alpha)(y_t - \ell_{t-1}-b_{t-1}) + \{1-\gamma^*(1-\alpha)\}s_{t-m}. $$ Thus, we obtain identical forecasts using this approach by replacing $\gamma$ in \eqref{eq:3-46a} with $\gamma^*(1-\alpha)$. The modification given in \eqref{eq:3-46a} was proposed by @OKS97 to make the state space formulation simpler. It is equivalent to Archibald's (1990)\nocite{Archibald90} variation of the Holt-Winters' method. \begin{sidewaystable} \begin{small} \begin{center} \begin{tabular}{|c|lll|} \hline & \multicolumn{3}{c|}{Seasonal} \\ {Trend} & \multicolumn{1}{c}{N} & \multicolumn{1}{c}{A} & \multicolumn{1}{c|}{M}\\ \cline{2-4} & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) \ell_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) \ell_{t-1}$\\ {N} & & $s_t = \gamma (y_t - \ell_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / \ell_{t-1}) + (1-\gamma) s_{t-m}$ \\ & $\hat{y}_{t+h|t} = \ell_t$ & $\hat{y}_{t+h|t} = \ell_t + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_ts_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) (\ell_{t-1}+b_{t-1})$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) (\ell_{t-1}+b_{t-1})$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) (\ell_{t-1}+b_{t-1})$\\ {A} & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}-b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}-b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_t+hb_t$ & $\hat{y}_{t+h|t} = \ell_t +hb_t +s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= (\ell_t+hb_t)s_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$\\ {A\damped } & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}-\phi b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}-\phi b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_t+\dampfactor b_t$ & $\hat{y}_{t+h|t} = \ell_t+\dampfactor b_t+s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= (\ell_t+\dampfactor b_t)s_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}b_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) \ell_{t-1}b_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) \ell_{t-1}b_{t-1}$\\ {M} & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_tb_t^h$ & $\hat{y}_{t+h|t} = \ell_tb_t^h + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_tb_t^hs_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}b^\phi_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha)\ell_{t-1}b^\phi_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha)\ell_{t-1}b^\phi_{t-1}$\\ {M\damped } & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}b^\phi_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}b^\phi_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_tb_t^{\phi_h}$ & $\hat{y}_{t+h|t} = \ell_tb_t^{\phi_h} + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_tb_t^{\phi_h}s_{t-m+h_m^+}$ \\ \hline \end{tabular} \end{center} \end{small} \caption{Formulae for recursive calculations and point forecasts. In each case, $\ell_t$ denotes the series level at time $t$, $b_t$ denotes the slope at time $t$, $s_t$ denotes the seasonal component of the series at time $t$, and $m$ denotes the number of seasons in a year; $\alpha$, $\beta^*$, $\gamma$ and $\phi$ are constants, $\phi_h = \phi+\phi^2+\dots+\phi^{h}$ and $h_m^+ = \big[(h-1) \mbox{ mod } m\big] + 1$.}\label{table:pegels} \end{sidewaystable} Table \ref{table:pegels} gives recursive formulae for computing point forecasts $h$ periods ahead for all of the exponential smoothing methods. Some interesting special cases can be obtained by setting the smoothing parameters to extreme values. For example, if $\alpha=0$, the level is constant over time; if $\beta^*=0$, the slope is constant over time; and if $\gamma=0$, the seasonal pattern is constant over time. At the other extreme, naïve forecasts (i.e., $\hat{y}_{t+h|t}=y_t$ for all $h$) are obtained using the (N,N) method with $\alpha=1$. Finally, the additive and multiplicative trend methods are special cases of their damped counterparts obtained by letting $\phi=1$. ## Innovations state space models {#sec:statespace} For each exponential smoothing method in Table \ref{table:pegels}, @expsmooth08 describe two possible innovations state space models, one corresponding to a model with additive errors and the other to a model with multiplicative errors. If the same parameter values are used, these two models give equivalent point forecasts, although different prediction intervals. Thus there are 30 potential models described in this classification. Historically, the nature of the error component has often been ignored, because the distinction between additive and multiplicative errors makes no difference to point forecasts. We are careful to distinguish exponential smoothing \emph{methods} from the underlying state space \emph{models}. An exponential smoothing method is an algorithm for producing point forecasts only. The underlying stochastic state space model gives the same point forecasts, but also provides a framework for computing prediction intervals and other properties. To distinguish the models with additive and multiplicative errors, we add an extra letter to the front of the method notation. The triplet (E,T,S) refers to the three components: error, trend and seasonality. So the model ETS(A,A,N) has additive errors, additive trend and no seasonality---in other words, this is Holt's linear method with additive errors. Similarly, ETS(M,M\damped,M) refers to a model with multiplicative errors, a damped multiplicative trend and multiplicative seasonality. The notation ETS($\cdot$,$\cdot$,$\cdot$) helps in remembering the order in which the components are specified. Once a model is specified, we can study the probability distribution of future values of the series and find, for example, the conditional mean of a future observation given knowledge of the past. We denote this as $\mu_{t+h|t} = \E(y_{t+h} \mid \bm{x}_t)$, where $\bm{x}_t$ contains the unobserved components such as $\ell_t$, $b_t$ and $s_t$. For $h=1$ we use $\mu_t\equiv\mu_{t+1|t}$ as a shorthand notation. For many models, these conditional means will be identical to the point forecasts given in Table \ref{table:pegels}, so that $\mu_{t+h|t}=\hat{y}_{t+h|t}$. However, for other models (those with multiplicative trend or multiplicative seasonality), the conditional mean and the point forecast will differ slightly for $h\ge 2$. We illustrate these ideas using the damped trend method of @GM85. \subsubsection{Additive error model: ETS(A,A$_d$,N)} Let $\mu_t = \hat{y}_t = \ell_{t-1}+b_{t-1}$ denote the one-step forecast of $y_{t}$ assuming that we know the values of all parameters. Also, let $\varepsilon_t = y_t - \mu_t$ denote the one-step forecast error at time $t$. From the equations in Table \ref{table:pegels}, we find that\vspace*{-15pt} \begin{align} \label{ss1} y_t &= \ell_{t-1} + \phi b_{t-1} + \varepsilon_t\\ \ell_t &= \ell_{t-1} + \phi b_{t-1} + \alpha \varepsilon_t \label{ss2}\\ b_t &= \phi b_{t-1} + \beta^*(\ell_t - \ell_{t-1}- \phi b_{t-1}) = \phi b_{t-1} + \alpha\beta^*\varepsilon_t. \label{ss3} \end{align} We simplify the last expression by setting $\beta=\alpha\beta^*$. The three equations above constitute a state space model underlying the damped Holt's method. Note that it is an \emph{innovations} state space model [@AM79;@Aoki87] because the same error term appears in each equation. We an write it in standard state space notation by defining the state vector as $\bm{x}_t = (\ell_t,b_t)'$ and expressing \eqref{ss1}--\eqref{ss3} as \begin{subequations}\vspace*{-15pt} \begin{align} y_t &= \left[ 1 \phi \right] \bm{x}_{t-1} + \varepsilon_t\label{obseq}\\ \bm{x}_t &= \left[\begin{array}{ll} 1 & \phi\\ 0 & \phi \end{array}\right]\bm{x}_{t-1} + \left[\begin{array}{l} \alpha\\ \beta \end{array}\right]\varepsilon_t.\label{stateeq} \end{align} \end{subequations} The model is fully specified once we state the distribution of the error term $\varepsilon_t$. Usually we assume that these are independent and identically distributed, following a normal distribution with mean 0 and variance $\sigma^2$, which we write as $\varepsilon_t \sim\mbox{NID}(0, \sigma^2)$. \subsubsection{Multiplicative error model: ETS(M,A$_d$,N)} A model with multiplicative error can be derived similarly, by first setting $\varepsilon_t = (y_t-\mu_t)/\mu_t$, so that $\varepsilon_t$ is the relative error. Then, following a similar approach to that for additive errors, we find\vspace*{-15pt} \begin{align*} y_t &= (\ell_{t-1} + \phi b_{t-1})(1 + \varepsilon_t)\\ \ell_t &= (\ell_{t-1} + \phi b_{t-1})(1 + \alpha \varepsilon_t)\\ b_t &= \phi b_{t-1} + \beta(\ell_{t-1}+\phi b_{t-1})\varepsilon_t, \end{align*} or\vspace*{-15pt} \begin{align*} y_t &= \left[ 1 \phi \right] \bm{x}_{t-1}(1 + \varepsilon_t)\\ \bm{x}_t &= \left[\begin{array}{ll} 1 & \phi \\ 0 & \phi \end{array}\right]\bm{x}_{t-1} + \left[ 1 \phi \right] \bm{x}_{t-1} \left[\begin{array}{l} \alpha\\ \beta \end{array}\right]\varepsilon_t. \end{align*} Again we assume that $\varepsilon_t \sim \mbox{NID}(0,\sigma^2)$. Of course, this is a nonlinear state space model, which is usually considered difficult to handle in estimating and forecasting. However, that is one of the many advantages of the innovations form of state space models --- we can still compute forecasts, the likelihood and prediction intervals for this nonlinear model with no more effort than is required for the additive error model. ## State space models for all exponential smoothing methods {#sec:ssmodels} There are similar state space models for all 30 exponential smoothing variations. The general model involves a state vector $\bm{x}_t = (\ell_t, b_t$, $s_t, s_{t-1}, \dots, s_{t-m+1})'$ and state space equations of the form \begin{subequations}\label{eq:ss}\vspace*{-15pt} \begin{align} y_t &= w(\bm{x}_{t-1}) + r(\bm{x}_{t-1})\varepsilon_t \label{eq:ss1}\\ \bm{x}_t &= f(\bm{x}_{t-1}) + g(\bm{x}_{t-1})\varepsilon_t \label{eq:ss2} \end{align} \end{subequations} where $\{\varepsilon_t\}$ is a Gaussian white noise process with mean zero and variance $\sigma^2$, and $\mu_t = w(\bm{x}_{t-1})$. The model with additive errors has $r(\bm{x}_{t-1})=1$, so that $y_t = \mu_{t} + \varepsilon_t$. The model with multiplicative errors has $r(\bm{x}_{t-1})=\mu_t$, so that $y_t = \mu_{t}(1 + \varepsilon_t)$. Thus, $\varepsilon_t = (y_t - \mu_t)/\mu_t$ is the relative error for the multiplicative model. The models are not unique. Clearly, any value of $r(\bm{x}_{t-1})$ will lead to identical point forecasts for $y_t$. All of the methods in Table \ref{table:pegels} can be written in the form \eqref{eq:ss1} and \eqref{eq:ss2}. The specific form for each model is given in @expsmooth08. Some of the combinations of trend, seasonality and error can occasionally lead to numerical difficulties; specifically, any model equation that requires division by a state component could involve division by zero. This is a problem for models with additive errors and either multiplicative trend or multiplicative seasonality, as well as for the model with multiplicative errors, multiplicative trend and additive seasonality. These models should therefore be used with caution. The multiplicative error models are useful when the data are strictly positive, but are not numerically stable when the data contain zeros or negative values. So when the time series is not strictly positive, only the six fully additive models may be applied. The point forecasts given in Table \ref{table:pegels} are easily obtained from these models by iterating equations \eqref{eq:ss1} and \eqref{eq:ss2} for $t=n+1, n+2,\dots,n+h$, setting $\varepsilon_{n+j}=0$ for $j=1,\dots,h$. In most cases (notable exceptions being models with multiplicative seasonality or multiplicative trend for $h\ge2$), the point forecasts can be shown to be equal to $\mu_{t+h|t} = \E(y_{t+h} \mid \bm{x}_t)$, the conditional expectation of the corresponding state space model. The models also provide a means of obtaining prediction intervals. In the case of the linear models, where the forecast distributions are normal, we can derive the conditional variance $v_{t+h|t} = \var(y_{t+h} \mid \bm{x}_t)$ and obtain prediction intervals accordingly. This approach also works for many of the nonlinear models. Detailed derivations of the results for many models are given in @HKOS05. A more direct approach that works for all of the models is to simply simulate many future sample paths conditional on the last estimate of the state vector, $\bm{x}_t$. Then prediction intervals can be obtained from the percentiles of the simulated sample paths. Point forecasts can also be obtained in this way by taking the average of the simulated values at each future time period. An advantage of this approach is that we generate an estimate of the complete predictive distribution, which is especially useful in applications such as inventory planning, where expected costs depend on the whole distribution. ## Estimation {#sec:estimation} In order to use these models for forecasting, we need to know the values of $\bm{x}_0$ and the parameters $\alpha$, $\beta$, $\gamma$ and $\phi$. It is easy to compute the likelihood of the innovations state space model \eqref{eq:ss}, and so obtain maximum likelihood estimates. @OKS97 show that\vspace*{-15pt} \begin{equation}\label{likelihood} L^*(\bm\theta,\bm{x}_0) = n\log\Big(\sum_{t=1}^n \varepsilon^2_t\Big) + 2\sum_{t=1}^n \log|r(\bm{x}_{t-1})| \end{equation} is equal to twice the negative logarithm of the likelihood function (with constant terms eliminated), conditional on the parameters $\bm\theta = (\alpha,\beta,\gamma,\phi)'$ and the initial states $\bm{x}_0 = (\ell_0,b_0,s_0,s_{-1},\dots,s_{-m+1})'$, where $n$ is the number of observations. This is easily computed by simply using the recursive equations in Table \ref{table:pegels}. Unlike state space models with multiple sources of error, we do not need to use the Kalman filter to compute the likelihood. The parameters $\bm\theta$ and the initial states $\bm{x}_0$ can be estimated by minimizing $L^*$. Most implementations of exponential smoothing use an ad hoc heuristic scheme to estimate $\bm{x}_0$. However, with modern computers, there is no reason why we cannot estimate $\bm{x}_0$ along with $\bm\theta$, and the resulting forecasts are often substantially better when we do. We constrain the initial states $\bm{x}_0$ so that the seasonal indices add to zero for additive seasonality, and add to $m$ for multiplicative seasonality. There have been several suggestions for restricting the parameter space for $\alpha$, $\beta$ and $\gamma$. The traditional approach is to ensure that the various equations can be interpreted as weighted averages, thus requiring $\alpha$, $\beta^*=\beta/\alpha$, $\gamma^*=\gamma/(1-\alpha)$ and $\phi$ to all lie within $(0,1)$. This suggests $$0<\alpha<1,\qquad 0<\beta<\alpha,\qquad 0<\gamma < 1-\alpha,\qquad\mbox{and}\qquad 0<\phi<1. $$ However, @HAA08 show that these restrictions are usually stricter than necessary (although in a few cases they are not restrictive enough). ## Model selection Forecast accuracy measures such as mean squared error (MSE) can be used for selecting a model for a given set of data, provided the errors are computed from data in a hold-out set and not from the same data as were used for model estimation. However, there are often too few out-of-sample errors to draw reliable conclusions. Consequently, a penalized method based on the in-sample fit is usually better. One such approach uses a penalized likelihood such as Akaike's Information Criterion: $$\mbox{AIC} = L^*(\hat{\bm\theta},\hat{\bm{x}}_0) + 2q, $$ where $q$ is the number of parameters in $\bm\theta$ plus the number of free states in $\bm{x}_0$, and $\hat{\bm\theta}$ and $\hat{\bm{x}}_0$ denote the estimates of $\bm\theta$ and $\bm{x}_0$. We select the model that minimizes the AIC amongst all of the models that are appropriate for the data. The AIC also provides a method for selecting between the additive and multiplicative error models. The point forecasts from the two models are identical so that standard forecast accuracy measures such as the MSE or mean absolute percentage error (MAPE) are unable to select between the error types. The AIC is able to select between the error types because it is based on likelihood rather than one-step forecasts. Obviously, other model selection criteria (such as the BIC) could also be used in a similar manner. ## Automatic forecasting {#sec:algorithm} We combine the preceding ideas to obtain a robust and widely applicable automatic forecasting algorithm. The steps involved are summarized below. \begin{compactenum} \item For each series, apply all models that are appropriate, optimizing the parameters (both smoothing parameters and the initial state variable) of the model in each case. \item Select the best of the models according to the AIC. \item Produce point forecasts using the best model (with optimized parameters) for as many steps ahead as required. \item Obtain prediction intervals for the best model either using the analytical results of Hyndman, Koehler, et al. (2005), or by simulating future sample paths for $\{y_{n+1},\dots,y_{n+h}\}$ and finding the $\alpha/2$ and $1-\alpha/2$ percentiles of the simulated data at each forecasting horizon. If simulation is used, the sample paths may be generated using the normal distribution for errors (parametric bootstrap) or using the resampled errors (ordinary bootstrap). \end{compactenum} @HKSG02 applied this automatic forecasting strategy to the M-competition data [@Mcomp82] and the IJF-M3 competition data [@M3comp00] using a restricted set of exponential smoothing models, and demonstrated that the methodology is particularly good at short term forecasts (up to about 6 periods ahead), and especially for seasonal short-term series (beating all other methods in the competitions for these series). # ARIMA models {#sec:arima} A common obstacle for many people in using Autoregressive Integrated Moving Average (ARIMA) models for forecasting is that the order selection process is usually considered subjective and difficult to apply. But it does not have to be. There have been several attempts to automate ARIMA modelling in the last 25 years. @HR82 proposed a method to identify the order of an ARMA model for a stationary series. In their method the innovations can be obtained by fitting a long autoregressive model to the data, and then the likelihood of potential models is computed via a series of standard regressions. They established the asymptotic properties of the procedure under very general conditions. @Gomez98 extended the Hannan-Rissanen identification method to include multiplicative seasonal ARIMA model identification. @TRAMOSEATS98 implemented this automatic identification procedure in the software \pkg{TRAMO} and \pkg{SEATS}. For a given series, the algorithm attempts to find the model with the minimum BIC. @Liu89 proposed a method for identification of seasonal ARIMA models using a filtering method and certain heuristic rules; this algorithm is used in the \pkg{SCA-Expert} software. Another approach is described by @MP00a whose algorithm for univariate ARIMA models also allows intervention analysis. It is implemented in the software package ``Time Series Expert'' (\pkg{TSE-AX}). Other algorithms are in use in commercial software, although they are not documented in the public domain literature. In particular, \pkg{Forecast Pro} [@ForecastPro00] is well-known for its excellent automatic ARIMA algorithm which was used in the M3-forecasting competition [@M3comp00]. Another proprietary algorithm is implemented in \pkg{Autobox} [@Reilly00]. @OL96 provide an early review of some of the commercial software that implement automatic ARIMA forecasting. ## Choosing the model order using unit root tests and the AIC A non-seasonal ARIMA($p,d,q$) process is given by $$ \phi(B)(1-B^d)y_{t} = c + \theta(B)\varepsilon_t $$ where $\{\varepsilon_t\}$ is a white noise process with mean zero and variance $\sigma^2$, $B$ is the backshift operator, and $\phi(z)$ and $\theta(z)$ are polynomials of order $p$ and $q$ respectively. To ensure causality and invertibility, it is assumed that $\phi(z)$ and $\theta(z)$ have no roots for $|z|<1$ [@BDbook91]. If $c\ne0$, there is an implied polynomial of order $d$ in the forecast function. The seasonal ARIMA$(p,d,q)(P,D,Q)_m$ process is given by $$ \Phi(B^m)\phi(B)(1-B^{m})^D(1-B)^dy_{t} = c + \Theta(B^m)\theta(B)\varepsilon_t $$ where $\Phi(z)$ and $\Theta(z)$ are polynomials of orders $P$ and $Q$ respectively, each containing no roots inside the unit circle. If $c\ne0$, there is an implied polynomial of order $d+D$ in the forecast function. The main task in automatic ARIMA forecasting is selecting an appropriate model order, that is the values $p$, $q$, $P$, $Q$, $D$, $d$. If $d$ and $D$ are known, we can select the orders $p$, $q$, $P$ and $Q$ via an information criterion such as the AIC: $$\mbox{AIC} = -2\log(L) + 2(p+q+P+Q+k)$$ where $k=1$ if $c\ne0$ and 0 otherwise, and $L$ is the maximized likelihood of the model fitted to the \emph{differenced} data $(1-B^m)^D(1-B)^dy_t$. The likelihood of the full model for $y_t$ is not actually defined and so the value of the AIC for different levels of differencing are not comparable. One solution to this difficulty is the ``diffuse prior'' approach which is outlined in @DKbook01 and implemented in the \code{arima()} function [@Ripley:2002] in \R. In this approach, the initial values of the time series (before the observed values) are assumed to have mean zero and a large variance. However, choosing $d$ and $D$ by minimizing the AIC using this approach tends to lead to over-differencing. For forecasting purposes, we believe it is better to make as few differences as possible because over-differencing harms forecasts [@SY94] and widens prediction intervals. [Although, see @Hendry97 for a contrary view.] Consequently, we need some other approach to choose $d$ and $D$. We prefer unit-root tests. However, most unit-root tests are based on a null hypothesis that a unit root exists which biases results towards more differences rather than fewer differences. For example, variations on the Dickey-Fuller test [@DF81] all assume there is a unit root at lag 1, and the HEGY test of @HEGY90 is based on a null hypothesis that there is a seasonal unit root. Instead, we prefer unit-root tests based on a null hypothesis of no unit-root. For non-seasonal data, we consider ARIMA($p,d,q$) models where $d$ is selected based on successive KPSS unit-root tests [@KPSS92]. That is, we test the data for a unit root; if the test result is significant, we test the differenced data for a unit root; and so on. We stop this procedure when we obtain our first insignificant result. For seasonal data, we consider ARIMA$(p,d,q)(P,D,Q)_m$ models where $m$ is the seasonal frequency and $D=0$ or $D=1$ depending on an extended Canova-Hansen test [@CH95]. Canova and Hansen only provide critical values for $21$. Let $C_m$ be the critical value for seasonal period $m$. We plotted $C_m$ against $m$ for values of $m$ up to 365 and noted that they fit the line $C_m = 0.269 m^{0.928}$ almost exactly. So for $m>12$, we use this simple expression to obtain the critical value. We note in passing that the null hypothesis for the Canova-Hansen test is not an ARIMA model as it includes seasonal dummy terms. It is a test for whether the seasonal pattern changes sufficiently over time to warrant a seasonal unit root, or whether a stable seasonal pattern modelled using fixed dummy variables is more appropriate. Nevertheless, we have found that the test is still useful for choosing $D$ in a strictly ARIMA framework (i.e., without seasonal dummy variables). If a stable seasonal pattern is selected (i.e., the null hypothesis is not rejected), the seasonality is effectively handled by stationary seasonal AR and MA terms. After $D$ is selected, we choose $d$ by applying successive KPSS unit-root tests to the seasonally differenced data (if $D=1$) or the original data (if $D=0$). Once $d$ (and possibly $D$) are selected, we proceed to select the values of $p$, $q$, $P$ and $Q$ by minimizing the AIC. We allow $c\ne0$ for models where $d+D < 2$. ## A step-wise procedure for traversing the model space Suppose we have seasonal data and we consider ARIMA$(p,d,q)(P,D,Q)_m$ models where $p$ and $q$ can take values from 0 to 3, and $P$ and $Q$ can take values from 0 to 1. When $c=0$ there is a total of 288 possible models, and when $c\ne 0$ there is a total of 192 possible models, giving 480 models altogether. If the values of $p$, $d$, $q$, $P$, $D$ and $Q$ are allowed to range more widely, the number of possible models increases rapidly. Consequently, it is often not feasible to simply fit every potential model and choose the one with the lowest AIC. Instead, we need a way of traversing the space of models efficiently in order to arrive at the model with the lowest AIC value. We propose a step-wise algorithm as follows. \begin{description} \item[Step 1:] We try four possible models to start with. \begin{itemize} \item ARIMA($2,d,2$) if $m=1$ and ARIMA($2,d,2)(1,D,1)$ if $m>1$. \item ARIMA($0,d,0$) if $m=1$ and ARIMA($0,d,0)(0,D,0)$ if $m>1$. \item ARIMA($1,d,0$) if $m=1$ and ARIMA($1,d,0)(1,D,0)$ if $m>1$. \item ARIMA($0,d,1$) if $m=1$ and ARIMA($0,d,1)(0,D,1)$ if $m>1$. \end{itemize} If $d+D \le 1$, these models are fitted with $c\ne0$. Otherwise, we set $c=0$. Of these four models, we select the one with the smallest AIC value. This is called the ``current'' model and is denoted by ARIMA($p,d,q$) if $m=1$ or ARIMA($p,d,q)(P,D,Q)_m$ if $m>1$. \item[Step 2:] We consider up to seventeen variations on the current model: \begin{itemize} \item where one of $p$, $q$, $P$ and $Q$ is allowed to vary by $\pm1$ from the current model; \item where $p$ and $q$ both vary by $\pm1$ from the current model; \item where $P$ and $Q$ both vary by $\pm1$ from the current model; \item where the constant $c$ is included if the current model has $c=0$ or excluded if the current model has $c\ne0$. \end{itemize} Whenever a model with lower AIC is found, it becomes the new ``current'' model and the procedure is repeated. This process finishes when we cannot find a model close to the current model with lower AIC. \end{description} There are several constraints on the fitted models to avoid problems with convergence or near unit-roots. The constraints are outlined below. \begin{compactitem}\itemsep=8pt \item The values of $p$ and $q$ are not allowed to exceed specified upper bounds (with default values of 5 in each case). \item The values of $P$ and $Q$ are not allowed to exceed specified upper bounds (with default values of 2 in each case). \item We reject any model which is ``close'' to non-invertible or non-causal. Specifically, we compute the roots of $\phi(B)\Phi(B)$ and $\theta(B)\Theta(B)$. If either have a root that is smaller than 1.001 in absolute value, the model is rejected. \item If there are any errors arising in the non-linear optimization routine used for estimation, the model is rejected. The rationale here is that any model that is difficult to fit is probably not a good model for the data. \end{compactitem} The algorithm is guaranteed to return a valid model because the model space is finite and at least one of the starting models will be accepted (the model with no AR or MA parameters). The selected model is used to produce forecasts. ## Comparisons with exponential smoothing There is a widespread myth that ARIMA models are more general than exponential smoothing. This is not true. The two classes of models overlap. The linear exponential smoothing models are all special cases of ARIMA models---the equivalences are discussed in @HAA08. However, the non-linear exponential smoothing models have no equivalent ARIMA counterpart. On the other hand, there are many ARIMA models which have no exponential smoothing counterpart. Thus, the two model classes overlap and are complimentary; each has its strengths and weaknesses. The exponential smoothing state space models are all non-stationary. Models with seasonality or non-damped trend (or both) have two unit roots; all other models---that is, non-seasonal models with either no trend or damped trend---have one unit root. It is possible to define a stationary model with similar characteristics to exponential smoothing, but this is not normally done. The philosophy of exponential smoothing is that the world is non-stationary. So if a stationary model is required, ARIMA models are better. One advantage of the exponential smoothing models is that they can be non-linear. So time series that exhibit non-linear characteristics including heteroscedasticity may be better modelled using exponential smoothing state space models. For seasonal data, there are many more ARIMA models than the 30 possible models in the exponential smoothing class of Section \ref{sec:expsmooth}. It may be thought that the larger model class is advantageous. However, the results in @HKSG02 show that the exponential smoothing models performed better than the ARIMA models for the seasonal M3 competition data. (For the annual M3 data, the ARIMA models performed better.) In a discussion of these results, @Hyndman01 speculates that the larger model space of ARIMA models actually harms forecasting performance because it introduces additional uncertainty. The smaller exponential smoothing class is sufficiently rich to capture the dynamics of almost all real business and economic time series. # The forecast package {#sec:package} The algorithms and modelling frameworks for automatic univariate time series forecasting are implemented in the \pkg{forecast} package in \R. We illustrate the methods using the following four real time series shown in Figure \ref{fig:etsexamples}. \begin{compactitem} \item Figure \ref{fig:etsexamples}(a) shows 125 monthly US government bond yields (percent per annum) from January 1994 to May 2004. \item Figure \ref{fig:etsexamples}(b) displays 55 observations of annual US net electricity generation (billion kwh) for 1949 through 2003. \item Figure \ref{fig:etsexamples}(c) presents 113 quarterly observations of passenger motor vehicle production in the U.K. (thousands of cars) for the first quarter of 1977 through the first quarter of 2005. \item Figure \ref{fig:etsexamples}(d) shows 240 monthly observations of the number of short term overseas visitors to Australia from May 1985 to April 2005. \end{compactitem} ```{r etsexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using exponential smoothing state space models."} par(mfrow = c(2,2)) mod1 <- ets(bonds) mod2 <- ets(usnetelec) mod3 <- ets(ukcars) mod4 <- ets(visitors) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ``` ```{r etsnames, echo=FALSE} etsnames <- c(mod1$method, mod2$method, mod3$method, mod4$method) etsnames <- gsub("Ad","A\\\\damped",etsnames) ``` ## Implementation of the automatic exponential smoothing algorithm The innovations state space modelling framework described in Section \ref{sec:expsmooth} is implemented via the \code{ets()} function in the \pkg{forecast} package. (The default settings of \code{ets()} do not allow models with multiplicative trend, but they can be included using \code{allow.multiplicative.trend=TRUE}.) The models chosen via the algorithm for the four data sets were: \begin{compactitem} \item `r etsnames[1]` for monthly US 10-year bonds yield\\ ($\alpha=`r format(coef(mod1)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod1)['beta'], digits=4, nsmall=4)`$, $\phi=`r format(coef(mod1)['phi'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod1)['l'], digits=4, nsmall=4)`$, $b_0=`r format(coef(mod1)['b'], digits=4, nsmall=4)`$); \item `r etsnames[2]` for annual US net electricity generation\\ ($\alpha=`r format(coef(mod2)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod2)['beta'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod2)['l'], digits=4, nsmall=4)`$, $b_0=`r format(coef(mod2)['b'], digits=4, nsmall=4)`$); \item `r etsnames[3]` for quarterly UK motor vehicle production\\ ($\alpha=`r format(coef(mod3)['alpha'], digits=4, nsmall=4)`$, $\gamma=`r format(coef(mod3)['gamma'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod3)['l'], digits=4, nsmall=4)`$, $s_{-3}=`r format(-sum(coef(mod3)[c('s0','s1','s2')]), digits=4, nsmall=4)`$, $s_{-2}=`r format(coef(mod3)['s2'], digits=4, nsmall=4)`$, $s_{-1}=`r format(coef(mod3)['s1'], digits=4, nsmall=4)`$, $s_0=`r format(coef(mod3)['s0'], digits=4, nsmall=4)`$); \item `r etsnames[4]` for monthly Australian overseas visitors\\ ($\alpha=`r format(coef(mod4)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod4)['beta'], digits=2, nsmall=4)`$, $\gamma=`r format(coef(mod4)['gamma'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod4)['l'], digits=4, nsmall=4)`$, $b_0 = `r format(coef(mod4)['b'], digits=4, nsmall=4)`$, $s_{-11}=`r format(12-sum(tail(coef(mod4),11)), digits=4, nsmall=4)`$, $s_{-10}=`r format(coef(mod4)['s10'], digits=4, nsmall=4)`$, $s_{-9}=`r format(coef(mod4)['s9'], digits=4, nsmall=4)`$, $s_{-8}=`r format(coef(mod4)['s8'], digits=4, nsmall=4)`$, $s_{-7}=`r format(coef(mod4)['s7'], digits=4, nsmall=4)`$, $s_{-6}=`r format(coef(mod4)['s6'], digits=4, nsmall=4)`$, $s_{-5}=`r format(coef(mod4)['s5'], digits=4, nsmall=4)`$, $s_{-4}=`r format(coef(mod4)['s4'], digits=4, nsmall=4)`$, $s_{-3}=`r format(coef(mod4)['s3'], digits=4, nsmall=4)`$, $s_{-2}=`r format(coef(mod4)['s2'], digits=4, nsmall=4)`$, $s_{-1}=`r format(coef(mod4)['s1'], digits=4, nsmall=4)`$, $s_0=`r format(coef(mod4)['s0'], digits=4, nsmall=4)`$). \end{compactitem} Although there is a lot of computation involved, it can be handled remarkably quickly on modern computers. Each of the forecasts shown in Figure \ref{fig:etsexamples} took no more than a few seconds on a standard PC. The US electricity generation series took the longest as there are no analytical prediction intervals available for the ETS(M,M\damped,N) model. Consequently, the prediction intervals for this series were computed using simulation of 5000 future sample paths. To apply the algorithm to the US net electricity generation time series \code{usnetelec}, we use the following command. ```{r ets-usnetelec, echo=TRUE} etsfit <- ets(usnetelec) ``` The object \code{etsfit} is of class ``\code{ets}'' and contains all of the necessary information about the fitted model including model parameters, the value of the state vector $\bm{x}_t$ for all $t$, residuals and so on. Printing the \code{etsfit} object shows the main items of interest. ```{r ets-usnetelec-print,echo=TRUE} etsfit ``` Some goodness-of-fit measures [defined in @HK06] are obtained using \code{accuracy()}. ```{r ets-usnetelec-accuracy,eval=TRUE,echo=TRUE} accuracy(etsfit) ``` There are also \code{coef()}, \code{plot()}, \code{summary()}, \code{residuals()}, \code{fitted()} and \code{simulate()} methods for objects of class ``\code{ets}''. The \code{plot()} function shows time plots of the original time series along with the extracted components (level, growth and seasonal). The \code{forecast()} function computes the required forecasts which are then plotted as in Figure \ref{fig:etsexamples}(b). ```{r ets-usnetelec-fcast, fig.height=5, fig.width=8, message=FALSE, warning=FALSE, include=FALSE, output=FALSE} fcast <- forecast(etsfit) plot(fcast) ``` Printing the \code{fcast} object gives a table showing the prediction intervals. ```{r ets-usnetelec-fcast-print,eval=TRUE,echo=TRUE} fcast ``` The \code{ets()} function also provides the useful feature of applying a fitted model to a new data set. For example, we could withhold 10 observations from the \code{usnetelec} data set when fitting, then compute the one-step forecast errors for the out-of-sample data. ```{r ets-usnetelec-newdata,eval=FALSE,echo=TRUE} fit <- ets(usnetelec[1:45]) test <- ets(usnetelec[46:55], model = fit) accuracy(test) ``` We can also look at the measures of forecast accuracy where the forecasts are based on only the fitting data. ```{r ets-usnetelec-fcast-accuracy,eval=FALSE,echo=TRUE} accuracy(forecast(fit,10), usnetelec[46:55]) ``` ## The HoltWinters() function There is another implementation of exponential smoothing in \R\ via the \code{HoltWinters()} function [@Meyer:2002] in the \pkg{stats} package. It implements only the (N,N), (A,N), (A,A) and (A,M) methods. The initial states $\bm{x}_0$ are fixed using a heuristic algorithm. Because of the way the initial states are estimated, a full three years of seasonal data are required to implement the seasonal forecasts using \code{HoltWinters()}. (See @shortseasonal for the minimal sample size required.) The smoothing parameters are optimized by minimizing the average squared prediction errors, which is equivalent to minimizing \eqref{likelihood} in the case of additive errors. There is a \code{predict()} method for the resulting object which can produce point forecasts and prediction intervals. Although it is nowhere documented, it appears that the prediction intervals produced by \code{predict()} for an object of class \code{HoltWinters} are based on an equivalent ARIMA model in the case of the (N,N), (A,N) and (A,A) methods, assuming additive errors. These prediction intervals are equivalent to the prediction intervals that arise from the (A,N,N), (A,A,N) and (A,A,A) state space models. For the (A,M) method, the prediction interval provided by \code{predict()} appears to be based on @CY91 which is an approximation to the true prediction interval arising from the (A,A,M) model. Prediction intervals with multiplicative errors are not possible using the \code{HoltWinters()} function. ## Implementation of the automatic ARIMA algorithm ```{r arimaexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using ARIMA models."} mod1 <- auto.arima(bonds, seasonal=FALSE, approximation=FALSE) mod2 <- auto.arima(usnetelec) mod3 <- auto.arima(ukcars) mod4 <- auto.arima(visitors) par(mfrow = c(2,2)) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ``` The algorithm of Section \ref{sec:arima} is applied to the same four time series. Unlike the exponential smoothing algorithm, the ARIMA class of models assumes homoscedasticity, which is not always appropriate. Consequently, transformations are sometimes necessary. For these four time series, we model the raw data for series (a)--(c), but the logged data for series (d). The prediction intervals are back-transformed with the point forecasts to preserve the probability coverage. To apply this algorithm to the US net electricity generation time series \code{usnetelec}, we use the following commands. ```{r arima-auto-fcast,eval=TRUE,echo=TRUE,fig.show="hide"} arimafit <- auto.arima(usnetelec) fcast <- forecast(arimafit) plot(fcast) ``` ```{r arimanames, echo=FALSE} # Convert character strings to latex arimanames <- c(as.character(mod1), as.character(mod2), as.character(mod3), as.character(mod4)) arimanames <- gsub("\\[([0-9]*)\\]", "$_{\\1}$", arimanames) ``` The function \code{auto.arima()} implements the algorithm of Section \ref{sec:arima} and returns an object of class \code{Arima}. The resulting forecasts are shown in Figure \ref{fig:arimaexamples}. The fitted models are as follows: \begin{compactitem} \item `r arimanames[1]` for monthly US 10-year bonds yield\\ ($\theta_1= `r format(coef(mod1)['ma1'], digits=4, nsmall=4)`$); \item `r arimanames[2]` for annual US net electricity generation\\ ($\phi_1= `r format(coef(mod2)['ar1'], digits=4, nsmall=4)`$; $\phi_2= `r format(coef(mod2)['ar2'], digits=4, nsmall=4)`$; $\theta_1= `r format(coef(mod2)['ma1'], digits=4, nsmall=4)`$; $\theta_2= `r format(coef(mod2)['ma2'], digits=4, nsmall=4)`$; $c= `r format(coef(mod2)['drift'], digits=4, nsmall=4)`$); \item `r arimanames[3]` for quarterly UK motor vehicle production\\ ($\phi_1= `r format(coef(mod3)['ar1'], digits=4, nsmall=4)`$; $\phi_2= `r format(coef(mod3)['ar2'], digits=4, nsmall=4)`$; $\Phi_1= `r format(coef(mod3)['sar1'], digits=4, nsmall=4)`$; $\Phi_2= `r format(coef(mod3)['sar2'], digits=4, nsmall=4)`$); \item `r arimanames[4]` for monthly Australian overseas visitors\\ ($\phi_1= `r format(coef(mod4)['ar1'], digits=4, nsmall=4)`$; $\theta_1= `r format(coef(mod4)['ma1'], digits=4, nsmall=4)`$; $\Theta_1= `r format(coef(mod4)['sma1'], digits=4, nsmall=4)`$; $\Theta_2= `r format(coef(mod4)['sma2'], digits=4, nsmall=4)`$; $c= `r format(coef(mod4)['drift'], digits=4, nsmall=4)`$). \end{compactitem} Note that the \R\ parameterization has $\theta(B) = (1 + \theta_1B + \dots + \theta_qB)$ and $\phi(B) = (1 - \phi_1B + \dots - \phi_qB)$, and similarly for the seasonal terms. A summary of the forecasts is available, part of which is shown below. ``` Forecast method: ARIMA(2,1,2) with drift Series: usnetelec Coefficients: ar1 ar2 ma1 ma2 drift -1.3032 -0.4332 1.5284 0.8340 66.1585 s.e. 0.2122 0.2084 0.1417 0.1185 7.5595 sigma^2 estimated as 2262: log likelihood=-283.34 AIC=578.67 AICc=580.46 BIC=590.61 Error measures: ME RMSE MAE MPE MAPE MASE ACF1 Training set 0.046402 44.894 32.333 -0.61771 2.1012 0.45813 0.022492 Forecasts: Point Forecast Lo 80 Hi 80 Lo 95 Hi 95 2004 3968.957 3908.002 4029.912 3875.734 4062.180 2005 3970.350 3873.950 4066.751 3822.919 4117.782 2006 4097.171 3971.114 4223.228 3904.383 4289.959 2007 4112.332 3969.691 4254.973 3894.182 4330.482 2008 4218.671 4053.751 4383.591 3966.448 4470.894 2009 4254.559 4076.108 4433.010 3981.641 4527.476 2010 4342.760 4147.088 4538.431 4043.505 4642.014 2011 4393.306 4185.211 4601.401 4075.052 4711.560 2012 4470.261 4248.068 4692.455 4130.446 4810.077 2013 4529.113 4295.305 4762.920 4171.535 4886.690 ``` The training set error measures for the two models are very similar. Note that the information criteria are not comparable. The \pkg{forecast} package also contains the function \code{Arima()} which is largely a wrapper to the \code{arima()} function in the \pkg{stats} package. The \code{Arima()} function in the \pkg{forecast} package makes it easier to include a drift term when $d+D=1$. (Setting \code{include.mean=TRUE} in the \code{arima()} function from the \pkg{stats} package will only work when $d+D=0$.) It also provides the facility for fitting an existing ARIMA model to a new data set (as was demonstrated for the \code{ets()} function earlier). One-step forecasts for ARIMA models are now available via a \code{fitted()} function. We also provide a new function \code{arima.errors()} which returns the original time series after adjusting for regression variables. If there are no regression variables in the ARIMA model, then the errors will be identical to the original series. If there are regression variables in the ARIMA model, then the errors will be equal to the original series minus the effect of the regression variables, but leaving in the serial correlation that is modelled with the AR and MA terms. In contrast, \code{residuals()} provides true residuals, removing the AR and MA terms as well. The generic functions \code{summary()}, \code{print()}, \code{fitted()} and \code{forecast()} apply to models obtained from either the \code{Arima()} or \code{arima()} functions. ## The forecast() function The \code{forecast()} function is generic and has S3 methods for a wide range of time series models. It computes point forecasts and prediction intervals from the time series model. Methods exist for models fitted using \code{ets()}, \code{auto.arima()}, \code{Arima()}, \code{arima()}, \code{ar()}, \code{HoltWinters()} and \texttt{StructTS()}. There is also a method for a \code{ts} object. If a time series object is passed as the first argument to \code{forecast()}, the function will produce forecasts based on the exponential smoothing algorithm of Section \ref{sec:expsmooth}. In most cases, there is an existing \code{predict()} function which is intended to do much the same thing. Unfortunately, the resulting objects from the \code{predict()} function contain different information in each case and so it is not possible to build generic functions (such as \code{plot()} and \code{summary()}) for the results. So, instead, \code{forecast()} acts as a wrapper to \code{predict()}, and packages the information obtained in a common format (the \code{forecast} class). We also define a default \code{predict()} method which is used when no existing \code{predict()} function exists, and calls the relevant \code{forecast()} function. Thus, \code{predict()} methods parallel \code{forecast()} methods, but the latter provide consistent output that is more useable. \subsection[The forecast class]{The \code{forecast} class} The output from the \code{forecast()} function is an object of class ``\code{forecast}'' and includes at least the following information: \begin{compactitem} \item the original series; \item point forecasts; \item prediction intervals of specified coverage; \item the forecasting method used and information about the fitted model; \item residuals from the fitted model; \item one-step forecasts from the fitted model for the period of the observed data. \end{compactitem} There are \code{print()}, \code{plot()} and \code{summary()} methods for the ``\code{forecast}'' class. Figures \ref{fig:etsexamples} and \ref{fig:arimaexamples} were produced using the \code{plot()} method. The prediction intervals are, by default, computed for 80\% and 95\% coverage, although other values are possible if requested. Fan charts [@Wallis99] are possible using the combination \verb|plot(forecast(model.object, fan = TRUE))|. ## Other functions {#sec:other} We now briefly describe some of the other features of the \pkg{forecast} package. Each of the following functions produces an object of class ``\code{forecast}''. \code{croston()} : implements the method of @Croston72 for intermittent demand forecasting. In this method, the time series is decomposed into two separate sequences: the non-zero values and the time intervals between non-zero values. These are then independently forecast using simple exponential smoothing and the forecasts of the original series are obtained as ratios of the two sets of forecasts. No prediction intervals are provided because there is no underlying stochastic model [@SH05]. \code{theta()} : provides forecasts from the Theta method [@AN00]. @HB03 showed that these were equivalent to a special case of simple exponential smoothing with drift. \code{splinef()} : gives cubic-spline forecasts, based on fitting a cubic spline to the historical data and extrapolating it linearly. The details of this method, and the associated prediction intervals, are discussed in @HKPB05. \code{meanf()} : returns forecasts based on the historical mean. \code{rwf()} : gives ``naïve'' forecasts equal to the most recent observation assuming a random walk model. This function also allows forecasting using a random walk with drift. In addition, there are some new plotting functions for time series. \code{tsdisplay()} : provides a time plot along with an ACF and PACF. \code{seasonplot()} : produces a seasonal plot as described in @MWH3. \newpage # Bibliography forecast/R/0000755000176200001440000000000013617634745012301 5ustar liggesusersforecast/R/residuals.R0000644000176200001440000001377113617634745014430 0ustar liggesusers#' Residuals for various time series models #' #' Returns time series of residuals from a fitted model. #' #' Innovation residuals correspond to the white noise process that drives the #' evolution of the time series model. Response residuals are the difference #' between the observations and the fitted values (equivalent to \code{h}-step #' forecasts). For functions with no \code{h} argument, \code{h=1}. For #' homoscedastic models, the innovation residuals and the response residuals #' for \code{h=1} are identical. Regression residuals are available for #' regression models with ARIMA errors, and are equal to the original data #' minus the effect of the regression variables. If there are no regression #' variables, the errors will be identical to the original series (possibly #' adjusted to have zero mean). \code{arima.errors} is a deprecated function #' which is identical to \code{residuals.Arima(object, type="regression")}. #' For \code{nnetar} objects, when \code{type="innovations"} and \code{lambda} is used, a #' matrix of time-series consisting of the residuals from each of the fitted neural networks is returned. #' #' @param object An object containing a time series model of class \code{ar}, #' \code{Arima}, \code{bats}, \code{ets}, \code{arfima}, \code{nnetar} or #' \code{stlm}. #' If \code{object} is of class \code{forecast}, then the function will return #' \code{object$residuals} if it exists, otherwise it returns the differences between #' the observations and their fitted values. #' @param type Type of residual. #' @param h If \code{type='response'}, then the fitted values are computed for #' \code{h}-step forecasts. #' @param ... Other arguments not used. #' @return A \code{ts} object. #' @author Rob J Hyndman #' @seealso \code{\link{fitted.Arima}}, \code{\link{checkresiduals}}. #' @keywords ts #' #' @export residuals.forecast <- function(object, type=c("innovation", "response"), ...) { type <- match.arg(type) if (type == "innovation") { object$residuals } else { getResponse(object) - fitted(object) } } #' @rdname residuals.forecast #' @export residuals.ar <- function(object, type=c("innovation", "response"), ...) { type <- match.arg(type) # innovation and response residuals are the same for AR models object$resid } #' @rdname residuals.forecast #' #' @aliases residuals.forecast_ARIMA #' @examples #' fit <- Arima(lynx,order=c(4,0,0), lambda=0.5) #' #' plot(residuals(fit)) #' plot(residuals(fit, type='response')) #' @export residuals.Arima <- function(object, type=c("innovation", "response", "regression"), h=1, ...) { type <- match.arg(type) if (type == "innovation") { object$residuals } else if (type == "response") { getResponse(object) - fitted(object, h = h) } else { x <- getResponse(object) if (!is.null(object$lambda)) { x <- BoxCox(x, object$lambda) } xreg <- getxreg(object) # Remove intercept if (is.element("intercept", names(object$coef))) { xreg <- cbind(rep(1, length(x)), xreg) } # Return errors if (is.null(xreg)) { return(x) } else { norder <- sum(object$arma[1:4]) return(ts( c(x - xreg %*% as.matrix(object$coef[(norder + 1):length(object$coef)])), frequency = frequency(x), start = start(x) )) } } } #' @export residuals.forecast_ARIMA <- residuals.Arima #' @rdname residuals.forecast #' @export residuals.bats <- function(object, type=c("innovation", "response"), h=1, ...) { type <- match.arg(type) if (type == "innovation") { object$errors } else { getResponse(object) - fitted(object, h = h) } } #' @rdname residuals.forecast #' @export residuals.tbats <- function(object, type=c("innovation", "response"), h=1, ...) { type <- match.arg(type) if (type == "innovation") { object$errors } else { getResponse(object) - fitted(object, h = h) } } #' @rdname residuals.forecast #' @export residuals.ets <- function(object, type=c("innovation", "response"), h=1, ...) { type <- match.arg(type) if (type == "innovation") { object$residuals } else { getResponse(object) - fitted(object, h = h) } } #' @rdname residuals.forecast #' @export residuals.ARFIMA <- function(object, type=c("innovation", "response"), ...) { type <- match.arg(type) if (type == "innovation") { if (!is.null(object$residuals)) { # Object produced by arfima() return(object$residuals) } else # Object produced by fracdiff() { if (is.element("x", names(object))) { x <- object$x } else { x <- eval.parent(parse(text = as.character(object$call)[2])) } if (!is.null(object$lambda)) { x <- BoxCox(x, object$lambda) } y <- fracdiff::diffseries(x - mean(x), d = object$d) fit <- arima(y, order = c(length(object$ar), 0, length(object$ma)), include.mean = FALSE, fixed = c(object$ar, -object$ma)) return(residuals(fit, type = "innovation")) } } else { getResponse(object) - fitted(object) } } #' @rdname residuals.forecast #' @export residuals.nnetar <- function(object, type=c("innovation", "response"), h=1, ...) { type <- match.arg(type) if (type == "innovation" && !is.null(object$lambda)) { res <- matrix(unlist(lapply(object$model, residuals)), ncol = length(object$model)) if (!is.null(object$scalex$scale)) { res <- res * object$scalex$scale } } else { res <- getResponse(object) - fitted(object, h = h) } tspx <- tsp(getResponse(object)) res <- ts(res, frequency = tspx[3L], end = tspx[2L]) return(res) } #' @rdname residuals.forecast #' @export residuals.stlm <- function(object, type=c("innovation", "response"), ...) { type <- match.arg(type) if (type == "innovation") { object$residuals } else { getResponse(object) - fitted(object) } } #' @rdname residuals.forecast #' @export residuals.tslm <- function(object, type=c("innovation", "response", "deviance"), ...) { type <- match.arg(type) if (type == "innovation" || type == "deviance") { object$residuals } else { getResponse(object) - fitted(object) } } forecast/R/adjustSeasonalSeeds.R0000644000176200001440000001231513553673010016355 0ustar liggesusers############################################################################### # TBATS code cutWTBATS <- function(use.beta, w.tilda.transpose, seasonal.periods, p=0, q=0) { mask.vector <- numeric(length(seasonal.periods)) i <- length(seasonal.periods) while (i > 1) { for (j in 1:(i - 1)) { if ((seasonal.periods[i] %% seasonal.periods[j]) == 0) { mask.vector[j] <- 1 } } i <- i - 1 } w.pos.counter <- 1 w.pos <- 1 if (use.beta) { w.pos <- w.pos + 1 } for (s in seasonal.periods) { if (mask.vector[w.pos.counter] == 1) { w.tilda.transpose <- w.tilda.transpose[, -((w.pos + 1):(w.pos + s))] } else if (mask.vector[w.pos.counter] < 0) { # Cut more than one off w.pos <- w.pos + s w.tilda.transpose <- w.tilda.transpose[, -c((w.pos + mask.vector[w.pos.counter] + 1):w.pos)] w.pos <- w.pos + mask.vector[w.pos.counter] } else { w.pos <- w.pos + s w.tilda.transpose <- w.tilda.transpose[, -w.pos] w.pos <- w.pos - 1 } w.pos.counter <- w.pos.counter + 1 } if ((p != 0) | (q != 0)) { end.cut <- ncol(w.tilda.transpose) start.cut <- end.cut - (p + q) + 1 w.tilda.transpose <- w.tilda.transpose[, -c(start.cut:end.cut)] } return(list(matrix = w.tilda.transpose, mask.vector = mask.vector)) } # BATS code below ######### cutW <- function(use.beta, w.tilda.transpose, seasonal.periods, p=0, q=0) { mask.vector <- numeric(length(seasonal.periods)) i <- length(seasonal.periods) while (i > 1) { for (j in 1:(i - 1)) { if ((seasonal.periods[i] %% seasonal.periods[j]) == 0) { mask.vector[j] <- 1 } } i <- i - 1 } if (length(seasonal.periods) > 1) { for (s in length(seasonal.periods):2) { for (j in (s - 1):1) { hcf <- findGCD(seasonal.periods[s], seasonal.periods[j]) if (hcf != 1) { if ((mask.vector[s] != 1) && (mask.vector[j] != 1)) { mask.vector[s] <- hcf * -1 } } } } } w.pos.counter <- 1 w.pos <- 1 if (use.beta) { w.pos <- w.pos + 1 } for (s in seasonal.periods) { if (mask.vector[w.pos.counter] == 1) { w.tilda.transpose <- w.tilda.transpose[, -((w.pos + 1):(w.pos + s))] } else if (mask.vector[w.pos.counter] < 0) { # Cut more than one off w.pos <- w.pos + s w.tilda.transpose <- w.tilda.transpose[, -c((w.pos + mask.vector[w.pos.counter] + 1):w.pos)] w.pos <- w.pos + mask.vector[w.pos.counter] } else { w.pos <- w.pos + s w.tilda.transpose <- w.tilda.transpose[, -w.pos] w.pos <- w.pos - 1 } w.pos.counter <- w.pos.counter + 1 } if ((p != 0) | (q != 0)) { end.cut <- ncol(w.tilda.transpose) start.cut <- end.cut - (p + q) + 1 w.tilda.transpose <- w.tilda.transpose[, -c(start.cut:end.cut)] } return(list(matrix = w.tilda.transpose, mask.vector = mask.vector)) } calcSeasonalSeeds <- function(use.beta, coefs, seasonal.periods, mask.vector, p=0, q=0) { x.pos.counter <- 1 sum.k <- 0 if (use.beta) { x.pos <- 2 new.x.nought <- matrix(coefs[1:2], nrow = 2, ncol = 1) } else { x.pos <- 1 new.x.nought <- matrix(coefs[1], nrow = 1, ncol = 1) } x.pos.counter <- 1 for (s in seasonal.periods) { if (mask.vector[x.pos.counter] == 1) { # Make a vector of zeros season <- matrix(0, nrow = s, ncol = 1) new.x.nought <- rbind(new.x.nought, season) } else if (mask.vector[x.pos.counter] < 0) { extract <- coefs[(x.pos + 1):(x.pos + s + mask.vector[x.pos.counter])] # print("extract:") # print(extract) # Find k k <- sum(extract) # update sum.k sum.k <- sum.k + k / s # create the current.periodicity vector current.periodicity <- extract - k / s current.periodicity <- matrix(current.periodicity, nrow = length(current.periodicity), ncol = 1) additional <- matrix(-k / s, nrow = (-1 * mask.vector[x.pos.counter]), ncol = 1) current.periodicity <- rbind(current.periodicity, additional) new.x.nought <- rbind(new.x.nought, current.periodicity) x.pos <- x.pos + s + mask.vector[x.pos.counter] } else { # Find k k <- sum(coefs[(x.pos + 1):(x.pos + s - 1)]) # update sum.k sum.k <- sum.k + k / s # create the current.periodicity vector current.periodicity <- coefs[(x.pos + 1):(x.pos + s - 1)] - k / s current.periodicity <- c(current.periodicity, -k / s) current.periodicity <- matrix(current.periodicity, nrow = length(current.periodicity), ncol = 1) new.x.nought <- rbind(new.x.nought, current.periodicity) x.pos <- x.pos + s - 1 } # Adjust L(t) x.pos.counter <- x.pos.counter + 1 } # print(new.x.nought) # Lastly, get the arma error seed states, if they exist. if ((p != 0) | (q != 0)) { arma.seed.states <- numeric((p + q)) arma.seed.states <- matrix(arma.seed.states, nrow = length(arma.seed.states), ncol = 1) # Final value of x.nought x.nought <- rbind(new.x.nought, arma.seed.states) } else { x.nought <- new.x.nought } return(x.nought) } findGCD <- function(larger, smaller) { remainder <- larger %% smaller if (remainder != 0) { return(findGCD(smaller, remainder)) } else { return(smaller) } } forecast/R/forecastBATS.R0000644000176200001440000002123713553673010014674 0ustar liggesusers#' Forecasting using BATS and TBATS models #' #' Forecasts \code{h} steps ahead with a BATS model. Prediction intervals are #' also produced. #' #' @param object An object of class "\code{bats}". Usually the result of a call #' to \code{\link{bats}}. #' @param h Number of periods for forecasting. Default value is twice the #' largest seasonal period (for seasonal data) or ten (for non-seasonal data). #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to \code{seq(51,99,by=3)}. This is suitable #' for fan plots. #' @param biasadj Use adjusted back-transformed mean for Box-Cox #' transformations. If TRUE, point forecasts and fitted values are mean #' forecast. Otherwise, these points can be considered the median of the #' forecast densities. #' @param ... Other arguments, currently ignored. #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.bats}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A copy of the \code{bats} object} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for #' prediction intervals} \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the time #' series used to create the model stored as \code{object}).} #' \item{residuals}{Residuals from the fitted model.} \item{fitted}{Fitted #' values (one-step forecasts)} #' @author Slava Razbash and Rob J Hyndman #' @seealso \code{\link{bats}}, \code{\link{tbats}},\code{\link{forecast.ets}}. #' @references De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), #' Forecasting time series with complex seasonal patterns using exponential #' smoothing, \emph{Journal of the American Statistical Association}, #' \bold{106}(496), 1513-1527. #' @keywords ts #' @examples #' #' \dontrun{ #' fit <- bats(USAccDeaths) #' plot(forecast(fit)) #' #' taylor.fit <- bats(taylor) #' plot(forecast(taylor.fit)) #' } #' #' @export forecast.bats <- function(object, h, level=c(80, 95), fan=FALSE, biasadj=NULL, ...) { # Set up the variables if (any(class(object$y) == "ts")) { ts.frequency <- frequency(object$y) } else { ts.frequency <- ifelse(!is.null(object$seasonal.periods), max(object$seasonal.periods), 1) } if (missing(h)) { if (is.null(object$seasonal.periods)) { h <- ifelse(ts.frequency == 1, 10, 2 * ts.frequency) } else { h <- 2 * max(object$seasonal.periods) } } else if (h <= 0) { stop("Forecast horizon out of bounds") } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Set up the matrices x <- matrix(0, nrow = nrow(object$x), ncol = h) y.forecast <- numeric(h) # w <- makeWMatrix(small.phi=object$damping.parameter, seasonal.periods=object$seasonal.periods, ar.coefs=object$ar.coefficients, ma.coefs=object$ma.coefficients) w <- .Call("makeBATSWMatrix", smallPhi_s = object$damping.parameter, sPeriods_s = object$seasonal.periods, arCoefs_s = object$ar.coefficients, maCoefs_s = object$ma.coefficients, PACKAGE = "forecast") # g <- makeGMatrix(alpha=object$alpha, beta=object$beta, gamma.vector=object$gamma.values, seasonal.periods=object$seasonal.periods, p=length(object$ar.coefficients), q=length(object$ma.coefficients)) g <- .Call("makeBATSGMatrix", object$alpha, object$beta, object$gamma.values, object$seasonal.periods, length(object$ar.coefficients), length(object$ma.coefficients), PACKAGE = "forecast") F <- makeFMatrix(alpha = object$alpha, beta = object$beta, small.phi = object$damping.parameter, seasonal.periods = object$seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = object$ar.coefficients, ma.coefs = object$ma.coefficients) # Do the forecast y.forecast[1] <- w$w.transpose %*% object$x[, ncol(object$x)] x[, 1] <- F %*% object$x[, ncol(object$x)] # + g$g %*% object$errors[length(object$errors)] if (h > 1) { for (t in 2:h) { x[, t] <- F %*% x[, (t - 1)] y.forecast[t] <- w$w.transpose %*% x[, (t - 1)] } } ## Make prediction intervals here lower.bounds <- upper.bounds <- matrix(NA, ncol = length(level), nrow = h) variance.multiplier <- numeric(h) variance.multiplier[1] <- 1 if (h > 1) { for (j in 1:(h - 1)) { if (j == 1) { f.running <- diag(ncol(F)) } else { f.running <- f.running %*% F } c.j <- w$w.transpose %*% f.running %*% g$g variance.multiplier[(j + 1)] <- variance.multiplier[j] + c.j ^ 2 } } variance <- object$variance * variance.multiplier # print(variance) st.dev <- sqrt(variance) for (i in 1:length(level)) { marg.error <- st.dev * abs(qnorm((100 - level[i]) / 200)) lower.bounds[, i] <- y.forecast - marg.error upper.bounds[, i] <- y.forecast + marg.error } # Inv Box Cox transform if required if (!is.null(object$lambda)) { y.forecast <- InvBoxCox(y.forecast, object$lambda, biasadj, list(level = level, upper = upper.bounds, lower = lower.bounds)) lower.bounds <- InvBoxCox(lower.bounds, object$lambda) if (object$lambda < 1) { lower.bounds <- pmax(lower.bounds, 0) } upper.bounds <- InvBoxCox(upper.bounds, object$lambda) } ## Calc a start time for the forecast start.time <- start(object$y) y <- ts(c(object$y, 0), start = start.time, frequency = ts.frequency) fcast.start.time <- end(y) # Make msts object for x and mean x <- msts(object$y, seasonal.periods = (if (!is.null(object$seasonal.periods)) { object$seasonal.periods } else { ts.frequency }), ts.frequency = ts.frequency, start = start.time) fitted.values <- msts(object$fitted.values, seasonal.periods = (if (!is.null(object$seasonal.periods)) { object$seasonal.periods } else { ts.frequency }), ts.frequency = ts.frequency, start = start.time) y.forecast <- msts(y.forecast, seasonal.periods = (if (!is.null(object$seasonal.periods)) { object$seasonal.periods } else { ts.frequency }), ts.frequency = ts.frequency, start = fcast.start.time) upper.bounds <- msts(upper.bounds, seasonal.periods = (if (!is.null(object$seasonal.periods)) { object$seasonal.periods } else { ts.frequency }), ts.frequency = ts.frequency, start = fcast.start.time) lower.bounds <- msts(lower.bounds, seasonal.periods = (if (!is.null(object$seasonal.periods)) { object$seasonal.periods } else { ts.frequency }), ts.frequency = ts.frequency, start = fcast.start.time) colnames(upper.bounds) <- colnames(lower.bounds) <- paste0(level, "%") forecast.object <- list( model = object, mean = y.forecast, level = level, x = x, series = object$series, upper = upper.bounds, lower = lower.bounds, fitted = fitted.values, method = as.character(object), residuals = object$errors ) if (is.null(object$series)) { forecast.object$series <- deparse(object$call$y) } class(forecast.object) <- "forecast" return(forecast.object) } #' @export as.character.bats <- function(x, ...) { name <- "BATS(" if (!is.null(x$lambda)) { name <- paste(name, round(x$lambda, digits = 3), sep = "") } else { name <- paste(name, "1", sep = "") } name <- paste(name, ", {", sep = "") if (!is.null(x$ar.coefficients)) { name <- paste(name, length(x$ar.coefficients), sep = "") } else { name <- paste(name, "0", sep = "") } name <- paste(name, ",", sep = "") if (!is.null(x$ma.coefficients)) { name <- paste(name, length(x$ma.coefficients), sep = "") } else { name <- paste(name, "0", sep = "") } name <- paste(name, "}, ", sep = "") if (!is.null(x$damping.parameter)) { name <- paste(name, round(x$damping.parameter, digits = 3), sep = "") } else { name <- paste(name, "-", sep = "") } name <- paste(name, ", ", sep = "") if (!is.null(x$seasonal.periods)) { name <- paste(name, "{", sep = "") for (i in x$seasonal.periods) { name <- paste(name, i, sep = "") if (i != x$seasonal.periods[length(x$seasonal.periods)]) { name <- paste(name, ",", sep = "") } else { name <- paste(name, "})", sep = "") } } } else { name <- paste(name, "-)", sep = "") } return(name) } forecast/R/seasadj.R0000644000176200001440000000312013553673010014015 0ustar liggesusers## Generic seasadj functions #' Seasonal adjustment #' #' Returns seasonally adjusted data constructed by removing the seasonal #' component. #' #' #' @param object Object created by \code{\link[stats]{decompose}}, #' \code{\link[stats]{stl}} or \code{\link{tbats}}. #' @param ... Other arguments not currently used. #' @return Univariate time series. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, #' \code{\link{tbats}}. #' @keywords ts #' @examples #' plot(AirPassengers) #' lines(seasadj(decompose(AirPassengers,"multiplicative")),col=4) #' #' @export seasadj <- function(object, ...) UseMethod("seasadj") #' @rdname seasadj #' @export seasadj.stl <- function(object, ...) { return(trendcycle(object) + remainder(object)) } #' @rdname seasadj #' @export seasadj.mstl <- function(object, ...) { return(trendcycle(object) + remainder(object)) } #' @rdname seasadj #' @export seasadj.decomposed.ts <- function(object, ...) { if (object$type == "additive") { return(object$x - object$seasonal) } else { return(object$x / object$seasonal) } } #' @rdname seasadj #' @export seasadj.tbats <- function(object, ...) { return(object$y - seasonal(object)) # comp <- tbats.components(object) # scols <- grep("season",colnames(comp)) # sa <- comp[,"observed"] - rowSums(comp[,scols,drop=FALSE]) # # Back transform if necessary # if (!is.null(object$lambda)) # sa <- InvBoxCox(sa, object$lambda) # return(sa) } #' @rdname seasadj #' @export seasadj.seas <- function(object, ...) { return(seasextract_w_na_action(object, "final")) } forecast/R/HoltWintersNew.R0000644000176200001440000004526213553673010015354 0ustar liggesusers# Modelled on the HoltWinters() function but with more conventional # initialization. # Written by Zhenyu Zhou. 21 October 2012 HoltWintersZZ <- function(x, # smoothing parameters alpha = NULL, # level beta = NULL, # trend gamma = NULL, # seasonal component seasonal = c("additive", "multiplicative"), exponential = FALSE, # exponential phi = NULL, # damp lambda = NULL, # box-cox biasadj = FALSE, # adjusted back-transformed mean for box-cox warnings = TRUE # return optimization warnings ) { x <- as.ts(x) seasonal <- match.arg(seasonal) m <- frequency(x) lenx <- length(x) if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } if (is.null(phi) || !is.numeric(phi)) { phi <- 1 } if (!is.null(alpha) && !is.numeric(alpha)) { stop("cannot fit models without level ('alpha' must not be 0 or FALSE).") } if (!all(is.null(c(alpha, beta, gamma))) && any(c(alpha, beta, gamma) < 0 | c(alpha, beta, gamma) > 1)) { stop("'alpha', 'beta' and 'gamma' must be within the unit interval.") } if ((is.null(gamma) || gamma > 0)) { if (seasonal == "multiplicative" && any(x <= 0)) { stop("data must be positive for multiplicative Holt-Winters.") } } if (m <= 1) { gamma <- FALSE } ## initialise l0, b0, s0 if (!is.null(gamma) && is.logical(gamma) && !gamma) { seasonal <- "none" l.start <- x[1L] s.start <- 0 if (is.null(beta) || !is.logical(beta) || beta) { if (!exponential) { b.start <- x[2L] - x[1L] } else { b.start <- x[2L] / x[1L] } } } else { ## seasonal Holt-Winters l.start <- mean(x[1:m]) b.start <- (mean(x[m + (1:m)]) - l.start) / m if (seasonal == "additive") { s.start <- x[1:m] - l.start } else { s.start <- x[1:m] / l.start } } # initialise smoothing parameters # lower=c(rep(0.0001,3), 0.8) # upper=c(rep(0.9999,3),0.98) lower <- c(0, 0, 0, 0) upper <- c(1, 1, 1, 1) if (!is.null(beta) && is.logical(beta) && !beta) { trendtype <- "N" } else if (exponential) { trendtype <- "M" } else { trendtype <- "A" } if (seasonal == "none") { seasontype <- "N" } else if (seasonal == "multiplicative") { seasontype <- "M" } else { seasontype <- "A" } ## initialise smoothing parameter optim.start <- initparam( alpha = alpha, beta = beta, gamma = gamma, phi = 1, trendtype = trendtype, seasontype = seasontype, damped = FALSE, lower = lower, upper = upper, m = m ) # if(!is.na(optim.start["alpha"])) # alpha2 <- optim.start["alpha"] # else # alpha2 <- alpha # if(!is.na(optim.start["beta"])) # beta2 <- optim.start["beta"] # else # beta2 <- beta # if(!is.na(optim.start["gamma"])) # gamma2 <- optim.start["gamma"] # else # gamma2 <- gamma # if(!check.param(alpha = alpha2,beta = beta2, gamma = gamma2,phi=1,lower,upper,bounds="haha",m=m)) # { # print(paste("alpha=", alpha2, "beta=",beta2, "gamma=",gamma2)) # stop("Parameters out of range") # } ################################################################################### # optimisation: alpha, beta, gamma, if any of them is null, then optimise them error <- function(p, select) { if (select[1] > 0) { alpha <- p[1L] } if (select[2] > 0) { beta <- p[1L + select[1]] } if (select[3] > 0) { gamma <- p[1L + select[1] + select[2]] } zzhw( x, lenx = lenx, alpha = alpha, beta = beta, gamma = gamma, seasonal = seasonal, m = m, dotrend = (!is.logical(beta) || beta), doseasonal = (!is.logical(gamma) || gamma), exponential = exponential, phi = phi, l.start = l.start, b.start = b.start, s.start = s.start )$SSE } select <- as.numeric(c(is.null(alpha), is.null(beta), is.null(gamma))) if (sum(select) > 0) # There are parameters to optimize { sol <- optim(optim.start, error, method = "L-BFGS-B", lower = lower[select], upper = upper[select], select = select) if (sol$convergence || any(sol$par < 0 | sol$par > 1)) { if (sol$convergence > 50) { if (warnings) { warning(gettextf("optimization difficulties: %s", sol$message), domain = NA) } } else { stop("optimization failure") } } if (select[1] > 0) { alpha <- sol$p[1L] } if (select[2] > 0) { beta <- sol$p[1L + select[1]] } if (select[3] > 0) { gamma <- sol$p[1L + select[1] + select[2]] } } final.fit <- zzhw( x, lenx = lenx, alpha = alpha, beta = beta, gamma = gamma, seasonal = seasonal, m = m, dotrend = (!is.logical(beta) || beta), doseasonal = (!is.logical(gamma) || gamma), exponential = exponential, phi = phi, l.start = l.start, b.start = b.start, s.start = s.start ) tspx <- tsp(x) fitted <- ts(final.fit$fitted, frequency = m, start = tspx[1]) res <- ts(final.fit$residuals, frequency = m, start = tspx[1]) if (!is.null(lambda)) { fitted <- InvBoxCox(fitted, lambda, biasadj, var(final.fit$residuals)) attr(lambda, "biasadj") <- biasadj } states <- matrix(final.fit$level, ncol = 1) colnames(states) <- "l" if (trendtype != "N") { states <- cbind(states, b = final.fit$trend) } if (seasontype != "N") { nr <- nrow(states) nc <- ncol(states) for (i in 1:m) states <- cbind(states, final.fit$season[(m - i) + (1:nr)]) colnames(states)[nc + (1:m)] <- paste("s", 1:m, sep = "") } states <- ts(states, frequency = m, start = tspx[1] - 1 / m) # Package output as HoltWinters class # structure(list(fitted = fitted, # x = x, # alpha = alpha, # beta = beta, # gamma = gamma, # coefficients = c(a = final.fit$level[lenx], # b = if (!is.logical(beta) || beta) final.fit$trend[lenx], # s = if (!is.logical(gamma) || gamma) final.fit$season[lenx - m + 1L:m]), # seasonal = seasonal, # exponential = exponential, # SSE = final.fit$SSE, # call = match.call(), # level = final.fit$level, # trend = final.fit$trend, # season = final.fit$season, # phi = phi # ), # class = "HoltWinters" # ) # Package output as ets class damped <- (phi < 1.0) if (seasonal == "additive") { # This should not happen components <- c("A", trendtype, seasontype, damped) } else if (seasonal == "multiplicative") { components <- c("M", trendtype, seasontype, damped) } else if (seasonal == "none" && exponential) { components <- c("M", trendtype, seasontype, damped) } else { # if(seasonal=="none" & !exponential) components <- c("A", trendtype, seasontype, damped) } initstate <- states[1, ] param <- alpha names(param) <- "alpha" if (trendtype != "N") { param <- c(param, beta = beta) names(param)[length(param)] <- "beta" } if (seasontype != "N") { param <- c(param, gamma = gamma) names(param)[length(param)] <- "gamma" } if (damped) { param <- c(param, phi = phi) names(param)[length(param)] <- "phi" } if (components[1] == "A") { sigma2 <- mean(res ^ 2) } else { sigma2 <- mean((res / fitted) ^ 2) } structure( list( fitted = fitted, residuals = res, components = components, x = x, par = c(param, initstate), initstate = initstate, states = states, SSE = final.fit$SSE, sigma2 = sigma2, call = match.call(), m = m ), class = "ets" ) } ################################################################################### # filter function zzhw <- function(x, lenx, alpha=NULL, beta=NULL, gamma=NULL, seasonal="additive", m, dotrend=FALSE, doseasonal=FALSE, l.start=NULL, exponential = NULL, phi=NULL, b.start=NULL, s.start=NULL) { if (exponential != TRUE || is.null(exponential)) { exponential <- FALSE } if (is.null(phi) || !is.numeric(phi)) { phi <- 1 } # initialise array of l, b, s level <- trend <- season <- xfit <- residuals <- numeric(lenx) SSE <- 0 if (!dotrend) { beta <- 0 b.start <- 0 } if (!doseasonal) { gamma <- 0 s.start[1:length(s.start)] <- ifelse(seasonal == "additive", 0, 1) } lastlevel <- level0 <- l.start lasttrend <- trend0 <- b.start season0 <- s.start for (i in 1:lenx) { # definel l(t-1) if (i > 1) { lastlevel <- level[i - 1] } # define b(t-1) if (i > 1) { lasttrend <- trend[i - 1] } # define s(t-m) if (i > m) { lastseason <- season[i - m] } else { lastseason <- season0[i] } if (is.na(lastseason)) { lastseason <- ifelse(seasonal == "additive", 0, 1) } # stop((lastlevel + phi*lasttrend)*lastseason) # forecast for this period i if (seasonal == "additive") { if (!exponential) { xhat <- lastlevel + phi * lasttrend + lastseason } else { xhat <- lastlevel * lasttrend ^ phi + lastseason } } else { if (!exponential) { xhat <- (lastlevel + phi * lasttrend) * lastseason } else { xhat <- lastlevel * lasttrend ^ phi * lastseason } } xfit[i] <- xhat res <- x[i] - xhat residuals[i] <- res SSE <- SSE + res * res # calculate level[i] if (seasonal == "additive") { if (!exponential) { level[i] <- alpha * (x[i] - lastseason) + (1 - alpha) * (lastlevel + phi * lasttrend) } else { level[i] <- alpha * (x[i] - lastseason) + (1 - alpha) * (lastlevel * lasttrend ^ phi) } } else { if (!exponential) { level[i] <- alpha * (x[i] / lastseason) + (1 - alpha) * (lastlevel + phi * lasttrend) } else { level[i] <- alpha * (x[i] / lastseason) + (1 - alpha) * (lastlevel * lasttrend ^ phi) } } # calculate trend[i] if (!exponential) { trend[i] <- beta * (level[i] - lastlevel) + (1 - beta) * phi * lasttrend } else { trend[i] <- beta * (level[i] / lastlevel) + (1 - beta) * lasttrend ^ phi } # calculate season[i] if (seasonal == "additive") { if (!exponential) { season[i] <- gamma * (x[i] - lastlevel - phi * lasttrend) + (1 - gamma) * lastseason } else { season[i] <- gamma * (x[i] - lastlevel * lasttrend ^ phi) + (1 - gamma) * lastseason } } else { if (!exponential) { season[i] <- gamma * (x[i] / (lastlevel + phi * lasttrend)) + (1 - gamma) * lastseason } else { season[i] <- gamma * (x[i] / (lastlevel * lasttrend ^ phi)) + (1 - gamma) * lastseason } } } list( SSE = SSE, fitted = xfit, residuals = residuals, level = c(level0, level), trend = c(trend0, trend), season = c(season0, season), phi = phi ) } #' Exponential smoothing forecasts #' #' Returns forecasts and other information for exponential smoothing forecasts #' applied to \code{y}. #' #' ses, holt and hw are simply convenient wrapper functions for #' \code{forecast(ets(...))}. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting. #' @param damped If TRUE, use a damped trend. #' @param seasonal Type of seasonality in \code{hw} model. "additive" or #' "multiplicative" #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param initial Method used for selecting initial state values. If #' \code{optimal}, the initial values are optimized along with the smoothing #' parameters using \code{\link{ets}}. If \code{simple}, the initial values are #' set to values obtained using simple calculations on the first few #' observations. See Hyndman & Athanasopoulos (2014) for details. #' @param exponential If TRUE, an exponential trend is fitted. Otherwise, the #' trend is (locally) linear. #' @param alpha Value of smoothing parameter for the level. If \code{NULL}, it #' will be estimated. #' @param beta Value of smoothing parameter for the trend. If \code{NULL}, it #' will be estimated. #' @param gamma Value of smoothing parameter for the seasonal component. If #' \code{NULL}, it will be estimated. #' @param phi Value of damping parameter if \code{damped=TRUE}. If \code{NULL}, #' it will be estimated. #' @param x Deprecated. Included for backwards compatibility. #' @param ... Other arguments passed to \code{forecast.ets}. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{ets} and associated #' functions. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted #' model.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link{ets}}, \code{\link[stats]{HoltWinters}}, #' \code{\link{rwf}}, \code{\link[stats]{arima}}. #' @references Hyndman, R.J., Koehler, A.B., Ord, J.K., Snyder, R.D. (2008) #' \emph{Forecasting with exponential smoothing: the state space approach}, #' Springer-Verlag: New York. \url{http://www.exponentialsmoothing.net}. #' #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://OTexts.org/fpp2/} #' @keywords ts #' @examples #' #' fcast <- holt(airmiles) #' plot(fcast) #' deaths.fcast <- hw(USAccDeaths,h=48) #' plot(deaths.fcast) #' #' @export ses <- function(y, h = 10, level = c(80, 95), fan = FALSE, initial=c("optimal", "simple"), alpha=NULL, lambda=NULL, biasadj=FALSE, x=y, ...) { initial <- match.arg(initial) if (initial == "optimal") { fcast <- forecast(ets(x, "ANN", alpha = alpha, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } else { fcast <- forecast(HoltWintersZZ(x, alpha = alpha, beta = FALSE, gamma = FALSE, lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } fcast$method <- fcast$model$method <- "Simple exponential smoothing" fcast$model$call <- match.call() fcast$series <- deparse(substitute(y)) return(fcast) } #' @rdname ses #' @export holt <- function(y, h = 10, damped = FALSE, level = c(80, 95), fan = FALSE, initial=c("optimal", "simple"), exponential=FALSE, alpha=NULL, beta=NULL, phi=NULL, lambda=NULL, biasadj=FALSE, x=y, ...) { initial <- match.arg(initial) if (length(y) <= 1L) { stop("I need at least two observations to estimate trend.") } if (initial == "optimal" || damped) { if (exponential) { fcast <- forecast(ets(x, "MMN", alpha = alpha, beta = beta, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } else { fcast <- forecast(ets(x, "AAN", alpha = alpha, beta = beta, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } } else { fcast <- forecast( HoltWintersZZ(x, alpha = alpha, beta = beta, gamma = FALSE, phi = phi, exponential = exponential, lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ... ) } if (damped) { fcast$method <- "Damped Holt's method" if (initial == "simple") { warning("Damped Holt's method requires optimal initialization") } } else { fcast$method <- "Holt's method" } if (exponential) { fcast$method <- paste(fcast$method, "with exponential trend") } fcast$model$method <- fcast$method fcast$model$call <- match.call() fcast$series <- deparse(substitute(y)) return(fcast) } #' @rdname ses #' @export hw <- function(y, h = 2 * frequency(x), seasonal = c("additive", "multiplicative"), damped = FALSE, level = c(80, 95), fan = FALSE, initial=c("optimal", "simple"), exponential=FALSE, alpha=NULL, beta=NULL, gamma=NULL, phi=NULL, lambda=NULL, biasadj=FALSE, x=y, ...) { initial <- match.arg(initial) seasonal <- match.arg(seasonal) m <- frequency(x) if (m <= 1L) { stop("The time series should have frequency greater than 1.") } if (length(y) < m + 3) { stop(paste("I need at least", m + 3, "observations to estimate seasonality.")) } if (initial == "optimal" || damped) { if (seasonal == "additive" && exponential) { stop("Forbidden model combination") } else if (seasonal == "additive" && !exponential) { fcast <- forecast(ets(x, "AAA", alpha = alpha, beta = beta, gamma = gamma, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } else if (seasonal != "additive" && exponential) { fcast <- forecast(ets(x, "MMM", alpha = alpha, beta = beta, gamma = gamma, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } else { # if(seasonal!="additive" & !exponential) fcast <- forecast(ets(x, "MAM", alpha = alpha, beta = beta, gamma = gamma, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } } else { fcast <- forecast( HoltWintersZZ(x, alpha = alpha, beta = beta, gamma = gamma, phi = phi, seasonal = seasonal, exponential = exponential, lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ... ) } if (seasonal == "additive") { fcast$method <- "Holt-Winters' additive method" } else { fcast$method <- "Holt-Winters' multiplicative method" } if (exponential) { fcast$method <- paste(fcast$method, "with exponential trend") } if (damped) { fcast$method <- paste("Damped", fcast$method) if (initial == "simple") { warning("Damped methods require optimal initialization") } } fcast$model$method <- fcast$method fcast$model$call <- match.call() fcast$series <- deparse(substitute(y)) return(fcast) } forecast/R/acf.R0000644000176200001440000003435313553673010013150 0ustar liggesusers# Replacement for the acf() function. #' (Partial) Autocorrelation and Cross-Correlation Function Estimation #' #' The function \code{Acf} computes (and by default plots) an estimate of the #' autocorrelation function of a (possibly multivariate) time series. Function #' \code{Pacf} computes (and by default plots) an estimate of the partial #' autocorrelation function of a (possibly multivariate) time series. Function #' \code{Ccf} computes the cross-correlation or cross-covariance of two #' univariate series. #' #' The functions improve the \code{\link[stats]{acf}}, #' \code{\link[stats]{pacf}} and \code{\link[stats]{ccf}} functions. The main #' differences are that \code{Acf} does not plot a spike at lag 0 when #' \code{type=="correlation"} (which is redundant) and the horizontal axes show #' lags in time units rather than seasonal units. #' #' The tapered versions implement the ACF and PACF estimates and plots #' described in Hyndman (2015), based on the banded and tapered estimates of #' autocovariance proposed by McMurry and Politis (2010). #' #' @param x a univariate or multivariate (not Ccf) numeric time series object #' or a numeric vector or matrix. #' @param y a univariate numeric time series object or a numeric vector. #' @param lag.max maximum lag at which to calculate the acf. Default is #' $10*log10(N/m)$ where $N$ is the number of observations and $m$ the number #' of series. Will be automatically limited to one less than the number of #' observations in the series. #' @param type character string giving the type of acf to be computed. Allowed #' values are \dQuote{\code{correlation}} (the default), #' \dQuote{\code{covariance}} or \dQuote{\code{partial}}. #' @param plot logical. If \code{TRUE} (the default) the resulting acf, pacf or #' ccf is plotted. #' @param na.action function to handle missing values. Default is #' \code{\link[stats]{na.contiguous}}. Useful alternatives are #' \code{\link[stats]{na.pass}} and \code{\link{na.interp}}. #' @param demean Should covariances be about the sample means? #' @param calc.ci If \code{TRUE}, confidence intervals for the ACF/PACF #' estimates are calculated. #' @param level Percentage level used for the confidence intervals. #' @param nsim The number of bootstrap samples used in estimating the #' confidence intervals. #' @param ... Additional arguments passed to the plotting function. #' @return The \code{Acf}, \code{Pacf} and \code{Ccf} functions return objects #' of class "acf" as described in \code{\link[stats]{acf}} from the stats #' package. The \code{taperedacf} and \code{taperedpacf} functions return #' objects of class "mpacf". #' @author Rob J Hyndman #' @seealso \code{\link[stats]{acf}}, \code{\link[stats]{pacf}}, #' \code{\link[stats]{ccf}}, \code{\link{tsdisplay}} #' @references Hyndman, R.J. (2015). Discussion of ``High-dimensional #' autocovariance matrices and optimal linear prediction''. \emph{Electronic #' Journal of Statistics}, 9, 792-796. #' #' McMurry, T. L., & Politis, D. N. (2010). Banded and tapered estimates for #' autocovariance matrices and the linear process bootstrap. \emph{Journal of #' Time Series Analysis}, 31(6), 471-482. #' @keywords ts #' @examples #' #' Acf(wineind) #' Pacf(wineind) #' \dontrun{ #' taperedacf(wineind, nsim=50) #' taperedpacf(wineind, nsim=50) #' } #' #' @export Acf <- function(x, lag.max = NULL, type = c("correlation", "covariance", "partial"), plot = TRUE, na.action = na.contiguous, demean=TRUE, ...) { type <- match.arg(type) # Set maximum lag nseries <- NCOL(x) if (is.null(lag.max)) { lag.max <- as.integer(max( floor(10 * (log10(NROW(x)) - log10(nseries))), 2 * frequency(x) )) } acf.out <- stats::acf( x, plot = FALSE, lag.max = lag.max, type = type, na.action = na.action, demean = demean ) acf.out$tsp <- tsp(x) acf.out$periods <- attributes(x)$msts acf.out$series <- deparse(substitute(x)) # Make lags in integer units nlags <- dim(acf.out$lag)[1] if (type == "partial") { acf.out$lag[, , ] <- 1:(nlags) } else { acf.out$lag[, , ] <- 0:(nlags - 1) } # Plot if required if (plot) { plot.out <- acf.out # Hide 0 lag if autocorrelations if (type == "correlation") { for (i in 1:NCOL(x)) { plot.out$lag[1, i, i] <- 1 plot.out$acf[1, i, i] <- 0 } } if (nseries > 1) { plot(plot.out, ...) } else { # Check if there is a ylim input input_list <- as.list(substitute(list(...))) ylimarg <- is.element("ylim", names(input_list)) if (ylimarg) { plot(plot.out, xaxt = "n", ...) } else { ylim <- c(-1, 1) * 3 / sqrt(length(x)) ylim <- range(ylim, plot.out$acf) plot(plot.out, ylim = ylim, xaxt = "n", ...) } # Make nice horizontal axis if (is.element("msts", class(x))) { seasonalaxis(attributes(x)$msts, nlags, type = "acf") } else { seasonalaxis(frequency(x), nlags, type = "acf") } if (type == "covariance") { axis(at = 0, side = 1) } } return(invisible(acf.out)) } else { return(acf.out) } } # Make nice horizontal axis with ticks at seasonal lags # Return tick points if breaks=TRUE seasonalaxis <- function(frequency, nlags, type, plot=TRUE) { # List of unlabelled tick points out2 <- NULL # Check for non-seasonal data if (length(frequency) == 1) { # Compute number of seasonal periods np <- trunc(nlags / frequency) evenfreq <- (frequency %% 2L) == 0L # Defaults for labelled tick points if (type == "acf") { out <- pretty(1:nlags) } else { out <- pretty(-nlags:nlags) } if (frequency == 1) { if (type == "acf" && nlags <= 16) { out <- 1:nlags } else if (type == "ccf" && nlags <= 8) { out <- (-nlags:nlags) } else { if (nlags <= 30 && type == "acf") { out2 <- 1:nlags } else if (nlags <= 15 && type == "ccf") { out2 <- (-nlags:nlags) } if (!is.null(out2)) { out <- pretty(out2) } } } else if (frequency > 1 && ((type == "acf" && np >= 2L) || (type == "ccf" && np >= 1L))) { if (type == "acf" && nlags <= 40) { out <- frequency * (1:np) out2 <- 1:nlags # Add half-years if (nlags <= 30 && evenfreq && np <= 3) { out <- c(out, frequency * ((1:np) - 0.5)) } } else if (type == "ccf" && nlags <= 20) { out <- frequency * (-np:np) out2 <- (-nlags:nlags) # Add half-years if (nlags <= 15 && evenfreq && np <= 3) { out <- c(out, frequency * ((-np:np) + 0.5)) } } else if (np < (12 - 4 * (type == "ccf"))) { out <- frequency * (-np:np) } } } else { # Determine which frequency to show np <- trunc(nlags / frequency) frequency <- frequency[which(np <= 16)] if (length(frequency) > 0L) { frequency <- min(frequency) } else { frequency <- 1 } out <- seasonalaxis(frequency, nlags, type, plot = FALSE) } if (plot) { axis(1, at = out) if (!is.null(out2)) { axis(1, at = out2, tcl = -0.2, labels = FALSE) } } else { return(out) } } #' @rdname Acf #' @export Pacf <- function(x, lag.max=NULL, plot = TRUE, na.action = na.contiguous, demean=TRUE, ...) { object <- Acf( x, lag.max = lag.max, type = "partial", na.action = na.action, demean = demean, plot = FALSE ) object$series <- deparse(substitute(x)) # Plot if required if (plot) { nlags <- dim(object$lag)[1] plot.out <- object # Check if there is a ylim input input_list <- as.list(substitute(list(...))) ylimarg <- is.element("ylim", names(input_list)) if (ylimarg) { plot(plot.out, xaxt = "n", ...) } else { ylim <- c(-1, 1) * 3 / sqrt(length(x)) ylim <- range(ylim, plot.out$acf) plot(plot.out, ylim = ylim, xaxt = "n", ...) } # Make nice horizontal axis if (is.element("msts", class(x))) { seasonalaxis(attributes(x)$msts, nlags, type = "acf") } else { seasonalaxis(frequency(x), nlags, type = "acf") } return(invisible(object)) } else { return(object) } } #' @rdname Acf #' @export Ccf <- function(x, y, lag.max=NULL, type=c("correlation", "covariance"), plot=TRUE, na.action=na.contiguous, ...) { type <- match.arg(type) if (is.null(lag.max)) { lag.max <- as.integer(max(floor(10 * log10(NROW(x))), 2 * frequency(x))) } ccf.out <- stats::ccf( x, y, plot = FALSE, type = type, lag.max = lag.max, na.action = na.action ) # Make lags in integer units nlags <- (dim(ccf.out$lag)[1] - 1) / 2 ccf.out$lag[, 1, 1] <- -nlags:nlags # Plot if required if (plot) { vnames <- c(deparse(substitute(x))[1L], deparse(substitute(y))[1L]) ccf.out$snames <- paste(vnames, collapse = " & ") plot(ccf.out, ylab = "CCF", xaxt = "n", ...) seasonalaxis(frequency(x), nlags, type = "ccf") return(invisible(ccf.out)) } else { return(ccf.out) } } kappa <- function(x) { k <- rep(0, length(x)) x <- abs(x) k[x <= 1] <- 1 k[x > 1 & x <= 2] <- 2 - x[x > 1 & x <= 2] return(k) } # McMurray-Politis estimate of ACF wacf <- function(x, lag.max = length(x) - 1) { n <- length(x) lag.max <- min(lag.max, n - 1) if (lag.max < 0) { stop("'lag.max' must be at least 0") } # Standard estimator acfest <- stats::acf( c(x), lag.max = lag.max, plot = FALSE, na.action = na.contiguous ) acfest$series <- deparse(substitute(x)) # Taper estimates s <- 1:length(acfest$acf[, , 1]) upper <- 2 * sqrt(log(n, 10) / n) ac <- abs(acfest$acf[, , 1]) # Find l: ac < upper for 5 consecutive lags j <- (ac < upper) l <- 0 k <- 1 N <- length(j) - 4 while (l < 1 && k <= N) { if (all(j[k:(k + 4)])) { l <- k } else { k <- k + 1 } } acfest$acf[, , 1] <- acfest$acf[, , 1] * kappa(s / l) # End of Tapering # Now do some shrinkage towards white noise using eigenvalues # Construct covariance matrix gamma <- acfest$acf[, , 1] s <- length(gamma) Gamma <- matrix(1, s, s) d <- row(Gamma) - col(Gamma) for (i in 1:(s - 1)) Gamma[d == i | d == (-i)] <- gamma[i + 1] # Compute eigenvalue decomposition ei <- eigen(Gamma) # Shrink eigenvalues d <- pmax(ei$values, 20 / n) # Construct new covariance matrix Gamma2 <- ei$vectors %*% diag(d) %*% t(ei$vectors) Gamma2 <- Gamma2 / mean(d) # Estimate new ACF d <- row(Gamma2) - col(Gamma2) for (i in 2:s) gamma[i] <- mean(Gamma2[d == (i - 1)]) acfest$acf[, , 1] <- gamma ############### end of shrinkage return(acfest) } # Find tapered PACF using LD recursions wpacf <- function(x, lag.max=length(x) - 1) { # Compute pacf as usual, just to set up structure out <- Pacf(x, lag.max = lag.max, plot = FALSE) # Compute acf using tapered estimate acvf <- wacf(x, lag.max = lag.max)$acf[, , 1] # Durbin-Levinson recursions # Modified from http://faculty.washington.edu/dbp/s519/R-code/LD-recursions.R p <- length(acvf) - 1 phis <- acvf[2] / acvf[1] pev <- rep(acvf[1], p + 1) pacf <- rep(phis, p) pev[2] <- pev[1] * (1 - phis ^ 2) if (p > 1) { for (k in 2:p) { old.phis <- phis phis <- rep(0, k) ## compute kth order pacf (reflection coefficient) phis[k] <- (acvf[k + 1] - sum(old.phis * acvf[k:2])) / pev[k] phis[1:(k - 1)] <- old.phis - phis[k] * rev(old.phis) pacf[k] <- phis[k] pev[k + 1] <- pev[k] * (1 - phis[k] ^ 2) # if(abs(pacf[k]) > 1) # warning("PACF larger than 1 in absolute value") } } out$acf[, , 1] <- pacf return(out) } # Function to produce new style plot of ACF or PACF with CI # x = time series #' @rdname Acf #' @export taperedacf <- function(x, lag.max=NULL, type=c("correlation", "partial"), plot=TRUE, calc.ci=TRUE, level=95, nsim=100, ...) { type <- match.arg(type) if (is.null(lag.max)) { lag.max <- max(floor(20 * log10(length(x))), 4 * frequency(x)) } lag <- min(lag.max, length(x) - 1) if (type == "correlation") { z <- wacf(x, )$acf[2:(lag + 1), , 1] } else { z <- wpacf(x, )$acf[1:lag, , 1] } out <- list(z = z, lag = lag, type = type, x = x) if (calc.ci) { # Get confidence intervals for plots bootsim <- lpb(x, nsim = nsim) s1 <- matrix(0, nrow = lag, ncol = nsim) if (type == "correlation") { for (i in 1:nsim) s1[, i] <- wacf(bootsim[, i])$acf[2:(lag + 1), , 1] } else { for (i in 1:nsim) s1[, i] <- wpacf(bootsim[, i])$acf[1:lag, , 1] } prob <- (100 - level) / 200 out$upper <- apply(s1, 1, quantile, prob = 1 - prob) out$lower <- apply(s1, 1, quantile, prob = prob) } out <- structure(out, class = "mpacf") if (!plot) { return(out) } else { plot(out, ...) return(invisible(out)) } return(out) } #' @rdname Acf #' @export taperedpacf <- function(x, ...) { taperedacf(x, type = "partial", ...) } plot.mpacf <- function(object, xlim=NULL, ylim=NULL, xlab="Lag", ylab="", ...) { lagx <- 1:object$lag if (is.null(xlim)) { xlim <- c(1, object$lag) } if (is.null(ylim)) { ylim <- range(object$z, object$upper, object$lower) } if (ylab == "") { ylab <- ifelse(object$type == "partial", "PACF", "ACF") } plot( lagx, object$z, type = "n", xlim = xlim, ylim = ylim, xlab = xlab, ylab = ylab, xaxt = "n", ... ) grid(col = gray(.80), nx = NA, ny = NULL, lty = 1) abline(h = 0, col = gray(.4)) if (frequency(object$x) > 1) { axis(1, at = (0:100) * frequency(object$x)) for (i in 1:100) abline(v = (i - 1) * frequency(object$x), lty = 1, col = gray(0.80)) } else { axis(1) grid(col = gray(.80), ny = NA, lty = 1) } if (!is.null(object$lower)) { for (j in 1:object$lag) { polygon( lagx[j] + c(-0.55, 0.55, 0.55, -0.55), c(rep(object$lower[j], 2), rep(object$upper[j], 2)), col = gray(0.60), border = FALSE ) } # polygon(c(lagx,rev(lagx)),c(object$lower,rev(object$upper)),col=gray(.60),border=FALSE) } lines(lagx, object$z, lwd = 1.5) j <- (object$lower < 0 & object$upper > 0) points(lagx[j], object$z[j], pch = 1, cex = 0.5) points(lagx[!j], object$z[!j], pch = 19) } #' @rdname is.ets #' @export is.acf <- function(x) { inherits(x, "acf") } forecast/R/ggplot.R0000644000176200001440000023514013617634745013725 0ustar liggesusers#' @inherit ggplot2::autolayer #' @export autolayer <- function(object, ...){ UseMethod("autolayer") } #' @importFrom ggplot2 autoplot #' @export ggplot2::autoplot ggAddExtras <- function(xlab=NA, ylab=NA, main=NA) { dots <- eval.parent(quote(list(...))) extras <- list() if ("xlab" %in% names(dots) || is.null(xlab) || any(!is.na(xlab))) { if ("xlab" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::xlab(dots$xlab) } else { extras[[length(extras) + 1]] <- ggplot2::xlab(paste0(xlab[!is.na(xlab)], collapse = " ")) } } if ("ylab" %in% names(dots) || is.null(ylab) || any(!is.na(ylab))) { if ("ylab" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::ylab(dots$ylab) } else { extras[[length(extras) + 1]] <- ggplot2::ylab(paste0(ylab[!is.na(ylab)], collapse = " ")) } } if ("main" %in% names(dots) || is.null(main) || any(!is.na(main))) { if ("main" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::ggtitle(dots$main) } else { extras[[length(extras) + 1]] <- ggplot2::ggtitle(paste0(main[!is.na(main)], collapse = " ")) } } if ("xlim" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::xlim(dots$xlim) } if ("ylim" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::ylim(dots$ylim) } return(extras) } ggtsbreaks <- function(x) { # Make x axis contain only whole numbers (e.g., years) return(unique(round(pretty(floor(x[1]):ceiling(x[2]))))) } #' ggplot (Partial) Autocorrelation and Cross-Correlation Function Estimation #' and Plotting #' #' Produces a ggplot object of their equivalent Acf, Pacf, Ccf, taperedacf and #' taperedpacf functions. #' #' If \code{autoplot} is given an \code{acf} or \code{mpacf} object, then an #' appropriate ggplot object will be created. #' #' ggtaperedpacf #' @param object Object of class \dQuote{\code{acf}}. #' @param x a univariate or multivariate (not Ccf) numeric time series object #' or a numeric vector or matrix. #' @param y a univariate numeric time series object or a numeric vector. #' @param ci coverage probability for confidence interval. Plotting of the #' confidence interval is suppressed if ci is zero or negative. #' @param lag.max maximum lag at which to calculate the acf. #' @param type character string giving the type of acf to be computed. Allowed #' values are "\code{correlation}" (the default), \dQuote{\code{covariance}} or #' \dQuote{\code{partial}}. #' @param plot logical. If \code{TRUE} (the default) the resulting ACF, PACF or #' CCF is plotted. #' @param na.action function to handle missing values. Default is #' \code{\link[stats]{na.contiguous}}. Useful alternatives are #' \code{\link[stats]{na.pass}} and \code{\link{na.interp}}. #' @param demean Should covariances be about the sample means? #' @param calc.ci If \code{TRUE}, confidence intervals for the ACF/PACF #' estimates are calculated. #' @param level Percentage level used for the confidence intervals. #' @param nsim The number of bootstrap samples used in estimating the #' confidence intervals. #' @param ... Other plotting parameters to affect the plot. #' @return A ggplot object. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{plot.acf}}, \code{\link{Acf}}, #' \code{\link[stats]{acf}}, \code{\link{taperedacf}} #' @examples #' #' library(ggplot2) #' ggAcf(wineind) #' wineind %>% Acf(plot=FALSE) %>% autoplot #' \dontrun{ #' wineind %>% taperedacf(plot=FALSE) %>% autoplot #' ggtaperedacf(wineind) #' ggtaperedpacf(wineind)} #' ggCcf(mdeaths, fdeaths) #' #' @export autoplot.acf <- function(object, ci=0.95, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "acf")) { stop("autoplot.acf requires a acf object, use object=object") } acf <- `dimnames<-`(object$acf, list(NULL, object$snames, object$snames)) lag <- `dimnames<-`(object$lag, list(NULL, object$snames, object$snames)) data <- as.data.frame.table(acf)[-1] data$lag <- as.numeric(lag) if (object$type == "correlation") { data <- data[data$lag != 0, ] } # Initialise ggplot object p <- ggplot2::ggplot( ggplot2::aes_(x = ~lag, xend = ~lag, y = 0, yend = ~Freq), data = data ) p <- p + ggplot2::geom_hline(yintercept = 0) # Add data p <- p + ggplot2::geom_segment(lineend = "butt", ...) # Add ci lines (assuming white noise input) ci <- qnorm((1 + ci) / 2) / sqrt(object$n.used) p <- p + ggplot2::geom_hline(yintercept = c(-ci, ci), colour = "blue", linetype = "dashed") # Add facets if needed if(any(dim(object$acf)[2:3] != c(1,1))){ p <- p + ggplot2::facet_grid( as.formula(paste0(colnames(data)[1:2], collapse = "~")) ) } # Prepare graph labels if (!is.null(object$ccf)) { ylab <- "CCF" ticktype <- "ccf" main <- paste("Series:", object$snames) nlags <- round(dim(object$lag)[1] / 2) } else if (object$type == "partial") { ylab <- "PACF" ticktype <- "acf" main <- paste("Series:", object$series) nlags <- dim(object$lag)[1] } else if (object$type == "correlation") { ylab <- "ACF" ticktype <- "acf" main <- paste("Series:", object$series) nlags <- dim(object$lag)[1] } else { ylab <- NULL } # Add seasonal x-axis # Change ticks to be seasonal and prepare default title if (!is.null(object$tsp)) { freq <- object$tsp[3] } else { freq <- 1 } if (!is.null(object$periods)) { periods <- object$periods periods <- periods[periods != freq] minorbreaks <- periods * seq(-20:20) } else { minorbreaks <- NULL } p <- p + ggplot2::scale_x_continuous(breaks = seasonalaxis( freq, nlags, type = ticktype, plot = FALSE ), minor_breaks = minorbreaks) p <- p + ggAddExtras(ylab = ylab, xlab = "Lag", main = main) return(p) } } #' @rdname autoplot.acf #' @export ggAcf <- function(x, lag.max = NULL, type = c("correlation", "covariance", "partial"), plot = TRUE, na.action = na.contiguous, demean=TRUE, ...) { cl <- match.call() if (plot) { cl$plot <- FALSE } cl[[1]] <- quote(Acf) object <- eval.parent(cl) object$tsp <- tsp(x) object$periods <- attributes(x)$msts if (plot) { return(autoplot(object, ...)) } else { return(object) } } #' @rdname autoplot.acf #' @export ggPacf <- function(x, lag.max = NULL, plot = TRUE, na.action = na.contiguous, demean=TRUE, ...) { object <- Acf(x, lag.max = lag.max, type = "partial", na.action = na.action, demean = demean, plot = FALSE) object$series <- deparse(substitute(x)) if (plot) { return(autoplot(object, ...)) } else { return(object) } } #' @rdname autoplot.acf #' @export ggCcf <- function(x, y, lag.max=NULL, type=c("correlation", "covariance"), plot=TRUE, na.action=na.contiguous, ...) { cl <- match.call() if (plot) { cl$plot <- FALSE } cl[[1]] <- quote(Ccf) object <- eval.parent(cl) object$snames <- paste(deparse(substitute(x)), "&", deparse(substitute(y))) object$ccf <- TRUE if (plot) { return(autoplot(object, ...)) } else { return(object) } } #' @rdname autoplot.acf #' @export autoplot.mpacf <- function(object, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "mpacf")) { stop("autoplot.mpacf requires a mpacf object, use object=object") } if (!is.null(object$lower)) { data <- data.frame(Lag = 1:object$lag, z = object$z, sig = (object$lower < 0 & object$upper > 0)) cidata <- data.frame(Lag = rep(1:object$lag, each = 2) + c(-0.5, 0.5), z = rep(object$z, each = 2), upper = rep(object$upper, each = 2), lower = rep(object$lower, each = 2)) plotpi <- TRUE } else { data <- data.frame(Lag = 1:object$lag, z = object$z) plotpi <- FALSE } # Initialise ggplot object p <- ggplot2::ggplot() p <- p + ggplot2::geom_hline(ggplot2::aes(yintercept = 0), size = 0.2) # Add data if (plotpi) { p <- p + ggplot2::geom_ribbon(ggplot2::aes_(x = ~Lag, ymin = ~lower, ymax = ~upper), data = cidata, fill = "grey50") } p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~Lag, y = ~z), data = data) if (plotpi) { p <- p + ggplot2::geom_point(ggplot2::aes_(x = ~Lag, y = ~z, colour = ~sig), data = data) } # Change ticks to be seasonal freq <- frequency(object$x) msts <- is.element("msts", class(object$x)) # Add seasonal x-axis if (msts) { periods <- attributes(object$x)$msts periods <- periods[periods != freq] minorbreaks <- periods * seq(-20:20) } else { minorbreaks <- NULL } p <- p + ggplot2::scale_x_continuous( breaks = seasonalaxis(frequency(object$x), length(data$Lag), type = "acf", plot = FALSE), minor_breaks = minorbreaks ) if (object$type == "partial") { ylab <- "PACF" } else if (object$type == "correlation") { ylab <- "ACF" } p <- p + ggAddExtras(ylab = ylab) return(p) } } #' @rdname autoplot.acf #' @export ggtaperedacf <- function(x, lag.max=NULL, type=c("correlation", "partial"), plot=TRUE, calc.ci=TRUE, level=95, nsim=100, ...) { cl <- match.call() if (plot) { cl$plot <- FALSE } cl[[1]] <- quote(taperedacf) object <- eval.parent(cl) if (plot) { return(autoplot(object, ...)) } else { return(object) } } #' @rdname autoplot.acf #' @export ggtaperedpacf <- function(x, ...) { ggtaperedacf(x, type = "partial", ...) } #' @rdname plot.Arima #' @export autoplot.Arima <- function(object, type = c("both", "ar", "ma"), ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (is.Arima(object)) { # Detect type type <- match.arg(type) q <- p <- 0 if (length(object$model$phi) > 0) { test <- abs(object$model$phi) > 1e-09 if (any(test)) { p <- max(which(test)) } } if (length(object$model$theta) > 0) { test <- abs(object$model$theta) > 1e-09 if (any(test)) { q <- max(which(test)) } } if (type == "both") { type <- c("ar", "ma") } } else if (inherits(object, "ar")) { type <- "ar" p <- length(arroots(object)$roots) q <- 0 } else { stop("autoplot.Arima requires an Arima object") } # Remove NULL type type <- intersect(type, c("ar", "ma")[c(p > 0, q > 0)]) # Prepare data arData <- maData <- NULL allRoots <- data.frame(roots = numeric(0), type = character(0)) if ("ar" %in% type && p > 0) { arData <- arroots(object) allRoots <- rbind(allRoots, data.frame(roots = arData$roots, type = arData$type)) } if ("ma" %in% type && q > 0) { maData <- maroots(object) allRoots <- rbind(allRoots, data.frame(roots = maData$roots, type = maData$type)) } allRoots$Real <- Re(1 / allRoots$roots) allRoots$Imaginary <- Im(1 / allRoots$roots) allRoots$UnitCircle <- factor(ifelse((abs(allRoots$roots) > 1), "Within", "Outside")) # Initialise general ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~Real, y = ~Imaginary, colour = ~UnitCircle), data = allRoots) p <- p + ggplot2::coord_fixed(ratio = 1) p <- p + ggplot2::annotate( "path", x = cos(seq(0, 2 * pi, length.out = 100)), y = sin(seq(0, 2 * pi, length.out = 100)) ) p <- p + ggplot2::geom_vline(xintercept = 0) p <- p + ggplot2::geom_hline(yintercept = 0) p <- p + ggAddExtras(xlab = "Real", ylab = "Imaginary") if (NROW(allRoots) == 0) { return(p + ggAddExtras(main = "No AR or MA roots")) } p <- p + ggplot2::geom_point(size = 3) if (length(type) == 1) { p <- p + ggAddExtras(main = paste("Inverse", toupper(type), "roots")) } else { p <- p + ggplot2::facet_wrap(~ type, labeller = function(labels) lapply(labels, function(x) paste("Inverse", as.character(x), "roots"))) } } return(p) } #' @rdname plot.Arima #' @export autoplot.ar <- function(object, ...) { autoplot.Arima(object, ...) } #' @rdname autoplot.seas #' @export autoplot.decomposed.ts <- function(object, labels=NULL, range.bars = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "decomposed.ts")) { stop("autoplot.decomposed.ts requires a decomposed.ts object") } if (is.null(labels)) { labels <- c("seasonal", "trend", "remainder") } cn <- c("data", labels) data <- data.frame( datetime = rep(time(object$x), 4), y = c(object$x, object$seasonal, object$trend, object$random), parts = factor(rep(cn, each = NROW(object$x)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~datetime, y = ~y), data = data) # Add data int <- as.numeric(object$type == "multiplicative") p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~y), data = subset(data, data$parts != cn[4]), na.rm = TRUE) p <- p + ggplot2::geom_segment( ggplot2::aes_(x = ~datetime, xend = ~datetime, y = int, yend = ~y), data = subset(data, data$parts == cn[4]), lineend = "butt", na.rm = TRUE ) p <- p + ggplot2::facet_grid("parts ~ .", scales = "free_y", switch = "y") p <- p + ggplot2::geom_hline(ggplot2::aes_(yintercept = ~y), data = data.frame(y = int, parts = cn[4])) if (is.null(range.bars)) { range.bars <- object$type == "additive" } if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = colnames(yranges), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes_(xmin = ~left, xmax = ~right, ymax = ~top, ymin = ~bottom), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } # Add axis labels p <- p + ggAddExtras( main = paste("Decomposition of", object$type, "time series"), xlab = "Time", ylab = "" ) # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = unique(round(pretty(data$datetime)))) return(p) } } #' @rdname plot.ets #' @export autoplot.ets <- function(object, range.bars = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!is.ets(object)) { stop("autoplot.ets requires an ets object, use object=object") } names <- c(y = "observed", l = "level", b = "slope", s1 = "season") data <- cbind(object$x, object$states[, colnames(object$states) %in% names(names)]) cn <- c("y", c(colnames(object$states))) colnames(data) <- cn <- names[stats::na.exclude(match(cn, names(names)))] # Convert to longform data <- data.frame( datetime = rep(time(data), NCOL(data)), y = c(data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~datetime, y = ~y), data = data, ylab = "") # Add data p <- p + ggplot2::geom_line(na.rm = TRUE) p <- p + ggplot2::facet_grid(parts ~ ., scales = "free_y", switch = "y") if (is.null(range.bars)) { range.bars <- is.null(object$lambda) } if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = colnames(yranges), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes_(xmin = ~left, xmax = ~right, ymax = ~top, ymin = ~bottom), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } p <- p + ggAddExtras(xlab = NULL, ylab = "", main = paste("Components of", object$method, "method")) return(p) } } #' @rdname plot.bats #' @export autoplot.tbats <- function(object, range.bars = FALSE, ...) { cl <- match.call() cl[[1]] <- quote(autoplot.bats) eval.parent(cl) } #' @rdname plot.bats #' @export autoplot.bats <- function(object, range.bars = FALSE, ...) { data <- tbats.components(object) cn <- colnames(data) # Convert to longform data <- data.frame( datetime = rep(time(data), NCOL(data)), y = c(data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~datetime, y = ~y), data = data, ylab = "") # Add data p <- p + ggplot2::geom_line(na.rm = TRUE) p <- p + ggplot2::facet_grid(parts ~ ., scales = "free_y", switch = "y") if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = colnames(yranges), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes_(xmin = ~left, xmax = ~right, ymax = ~top, ymin = ~bottom), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } p <- p + ggAddExtras(xlab = NULL, ylab = "", main = paste("Components of", object$method, "method")) return(p) } #' @rdname plot.forecast #' @export autoplot.forecast <- function(object, include, PI=TRUE, shadecols=c("#596DD5", "#D5DBFF"), fcol="#0000AA", flwd=0.5, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!is.forecast(object)) { stop("autoplot.forecast requires a forecast object, use object=object") } if (is.null(object$lower) || is.null(object$upper) || is.null(object$level)) { PI <- FALSE } else if (!is.finite(max(object$upper))) { PI <- FALSE } if (!is.null(object$model$terms) && !is.null(object$model$model)) { # Initialise original dataset mt <- object$model$terms if (!is.null(object$series)) { yvar <- object$series } else { yvar <- deparse(mt[[2]]) } # Perhaps a better way to do this xvar <- attr(mt, "term.labels") vars <- c(yvar = yvar, xvar = xvar) data <- object$model$model colnames(data) <- names(vars)[match(colnames(data), vars)] if (!is.null(object$model$lambda)) { data$yvar <- InvBoxCox(data$yvar, object$model$lambda) } } else { if (!is.null(object$x)) { data <- data.frame(yvar = c(object$x)) } else if (!is.null(object$residuals) && !is.null(object$fitted)) { data <- data.frame(yvar = c(object$residuals + object$fitted)) } else { stop("Could not find data") } if (!is.null(object$series)) { vars <- c(yvar = object$series) } else if (!is.null(object$model$call)) { vars <- c(yvar = deparse(object$model$call$y)) if (vars == "object") { vars <- c(yvar = "y") } } else { vars <- c(yvar = "y") } } # Initialise ggplot object p <- ggplot2::ggplot() # Cross sectional forecasts if (!is.element("ts", class(object$mean))) { if (length(xvar) > 1) { stop("Forecast plot for regression models only available for a single predictor") } if (NCOL(object$newdata) == 1) { # Make sure column has correct name colnames(object$newdata) <- xvar } flwd <- 2 * flwd # Scale for points # Data points p <- p + ggplot2::geom_point(ggplot2::aes_(x = ~xvar, y = ~yvar), data = data) p <- p + ggplot2::labs(y = vars["yvar"], x = vars["xvar"]) # Forecasted intervals if (PI) { levels <- NROW(object$level) interval <- data.frame(xpred = rep(object$newdata[[1]], levels), lower = c(object$lower), upper = c(object$upper), level = rep(object$level, each = NROW(object$newdata[[1]]))) interval <- interval[order(interval$level, decreasing = TRUE), ] # Must be ordered for gg z-index p <- p + ggplot2::geom_linerange(ggplot2::aes_(x = ~xpred, ymin = ~lower, ymax = ~upper, colour = ~level), data = interval, size = flwd) if (length(object$level) <= 5) { p <- p + ggplot2::scale_colour_gradientn(breaks = object$level, colours = shadecols, guide = "legend") } else { p <- p + ggplot2::scale_colour_gradientn(colours = shadecols, guide = "colourbar") } } # Forecasted points predicted <- data.frame(object$newdata, object$mean) colnames(predicted) <- c("xpred", "ypred") p <- p + ggplot2::geom_point(ggplot2::aes_(x = ~xpred, y = ~ypred), data = predicted, color = fcol, size = flwd) # Line of best fit coef <- data.frame(int = 0, m = 0) i <- match("(Intercept)", names(object$model$coefficients)) if (i != 0) { coef$int <- object$model$coefficients[i] if (NROW(object$model$coefficients) == 2) { coef$m <- object$model$coefficients[-i] } } else { if (NROW(object$model$coefficients) == 1) { coef$m <- object$model$coefficients } } p <- p + ggplot2::geom_abline(intercept = coef$int, slope = coef$m) } else { # Time series objects (assumed) if(!missing(shadecols)){ warning( "The `schadecols` argument is deprecated for time series forecasts. Interval shading is now done automatically based on the level and `fcol`.", call. = FALSE) } # Data points if (!is.null(time(object$x))) { timex <- time(object$x) } else if (!is.null(time(object$model$residuals))) { timex <- time(object$model$residuals) } data <- data.frame(yvar = as.numeric(data$yvar), datetime = as.numeric(timex)) if (!missing(include)) { data <- tail(data, include) } p <- p + ggplot2::scale_x_continuous() p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~yvar), data = data) + ggplot2::labs(y = vars["yvar"], x = "Time") # Forecasted intervals p <- p + autolayer(object, PI = PI, colour = fcol, size = flwd) # predicted <- data.frame(xvar = time(object$mean), yvar = object$mean) # colnames(predicted) <- c("datetime", "ypred") # if (PI) { # levels <- NROW(object$level) # interval <- data.frame(datetime = rep(predicted$datetime, levels), lower = c(object$lower), upper = c(object$upper), level = rep(object$level, each = NROW(object$mean))) # interval <- interval[order(interval$level, decreasing = TRUE), ] # Must be ordered for gg z-index # p <- p + ggplot2::geom_ribbon(ggplot2::aes_(x = ~datetime, ymin = ~lower, ymax = ~upper, group = ~-level, fill = ~level), data = interval) # if (min(object$level) < 50) { # scalelimit <- c(1, 99) # } # else { # scalelimit <- c(50, 99) # } # if (length(object$level) <= 5) { # p <- p + ggplot2::scale_fill_gradientn(breaks = object$level, colours = shadecols, limit = scalelimit, guide = "legend") # } # else { # p <- p + ggplot2::scale_fill_gradientn(colours = shadecols, limit = scalelimit) # } # # Negative group is a work around for missing z-index # } # # Forecasted points # p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~ypred), data = predicted, color = fcol, size = flwd) } p <- p + ggAddExtras(main = paste("Forecasts from ", object$method, sep = "")) return(p) } } #' @rdname plot.mforecast #' @export autoplot.mforecast <- function(object, PI = TRUE, facets = TRUE, colour = FALSE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!is.mforecast(object)) { stop("autoplot.mforecast requires a mforecast object, use object=object") } if (is.ts(object$forecast[[1]]$mean)) { # ts forecasts p <- autoplot(getResponse(object), facets = facets, colour = colour) + autolayer(object, ...) if (facets) { p <- p + ggplot2::facet_wrap( ~ series, labeller = function(labels) { if (!is.null(object$method)) { lapply(labels, function(x) paste0(as.character(x), "\n", object$method[as.character(x)])) } else { lapply(labels, function(x) paste0(as.character(x))) } }, ncol = 1, scales = "free_y" ) } p <- p + ggAddExtras(ylab = NULL) return(p) } else { # lm forecasts if (!requireNamespace("grid")) { stop("grid is needed for this function to work. Install it via install.packages(\"grid\")", call. = FALSE) } K <- length(object$forecast) if (K < 2) { warning("Expected at least two plots but forecast required less.") } # Set up vector arguments if (missing(PI)) { PI <- rep(TRUE, K) } # Set up grid # ncol: Number of columns of plots # nrow: Number of rows needed, calculated from # of cols gridlayout <- matrix(seq(1, K), ncol = 1, nrow = K) grid::grid.newpage() grid::pushViewport(grid::viewport(layout = grid::grid.layout(nrow(gridlayout), ncol(gridlayout)))) for (i in 1:K) { partialfcast <- object$forecast[[i]] partialfcast$model <- mlmsplit(object$model, index = i) matchidx <- as.data.frame(which(gridlayout == i, arr.ind = TRUE)) print( autoplot( structure(partialfcast, class = "forecast"), PI = PI[i], ... ) + ggAddExtras(ylab = names(object$forecast)[i]), vp = grid::viewport( layout.pos.row = matchidx$row, layout.pos.col = matchidx$col ) ) } } } } #' @rdname tsdisplay #' #' @examples #' library(ggplot2) #' ggtsdisplay(USAccDeaths, plot.type="scatter", theme=theme_bw()) #' #' @export ggtsdisplay <- function(x, plot.type=c("partial", "histogram", "scatter", "spectrum"), points=TRUE, smooth=FALSE, lag.max, na.action=na.contiguous, theme=NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else if (!requireNamespace("grid", quietly = TRUE)) { stop("grid is needed for this function to work. Install it via install.packages(\"grid\")", call. = FALSE) } else { if (NCOL(x) > 1) { stop("ggtsdisplay is only for univariate time series") } plot.type <- match.arg(plot.type) main <- deparse(substitute(x)) if (!is.ts(x)) { x <- ts(x) } if (missing(lag.max)) { lag.max <- round(min(max(10 * log10(length(x)), 3 * frequency(x)), length(x) / 3)) } dots <- list(...) if (is.null(dots$xlab)) { dots$xlab <- "" } if (is.null(dots$ylab)) { dots$ylab <- "" } labs <- match(c("xlab", "ylab", "main"), names(dots), nomatch = 0) # Set up grid for plots gridlayout <- matrix(c(1, 2, 1, 3), nrow = 2) grid::grid.newpage() grid::pushViewport(grid::viewport(layout = grid::grid.layout(nrow(gridlayout), ncol(gridlayout)))) # Add ts plot with points matchidx <- as.data.frame(which(gridlayout == 1, arr.ind = TRUE)) tsplot <- do.call(ggplot2::autoplot, c(object = quote(x), dots[labs])) if (points) { tsplot <- tsplot + ggplot2::geom_point(size = 0.5) } if (smooth) { tsplot <- tsplot + ggplot2::geom_smooth(method = "loess", se = FALSE) } if (is.null(tsplot$labels$title)) { # Add title if missing tsplot <- tsplot + ggplot2::ggtitle(main) } if (!is.null(theme)) { tsplot <- tsplot + theme } print( tsplot, vp = grid::viewport( layout.pos.row = matchidx$row, layout.pos.col = matchidx$col ) ) # Prepare Acf plot acfplot <- do.call(ggAcf, c(x = quote(x), lag.max = lag.max, na.action = na.action, dots[-labs])) + ggplot2::ggtitle(NULL) if (!is.null(theme)) { acfplot <- acfplot + theme } # Prepare last plot (variable) if (plot.type == "partial") { lastplot <- ggPacf(x, lag.max = lag.max, na.action = na.action) + ggplot2::ggtitle(NULL) # Match y-axis acfplotrange <- ggplot2::layer_scales(acfplot)$y$range$range pacfplotrange <- ggplot2::layer_scales(lastplot)$y$range$range yrange <- range(c(acfplotrange, pacfplotrange)) acfplot <- acfplot + ggplot2::ylim(yrange) lastplot <- lastplot + ggplot2::ylim(yrange) } else if (plot.type == "histogram") { lastplot <- gghistogram(x, add.normal = TRUE, add.rug = TRUE) + ggplot2::xlab(main) } else if (plot.type == "scatter") { scatterData <- data.frame(y = x[2:NROW(x)], x = x[1:NROW(x) - 1]) lastplot <- ggplot2::ggplot(ggplot2::aes_(y = ~y, x = ~x), data = scatterData) + ggplot2::geom_point() + ggplot2::labs(x = expression(Y[t - 1]), y = expression(Y[t])) } else if (plot.type == "spectrum") { specData <- spec.ar(x, plot = FALSE) specData <- data.frame(spectrum = specData$spec, frequency = specData$freq) lastplot <- ggplot2::ggplot(ggplot2::aes_(y = ~spectrum, x = ~frequency), data = specData) + ggplot2::geom_line() + ggplot2::scale_y_log10() } if (!is.null(theme)) { lastplot <- lastplot + theme } # Add ACF plot matchidx <- as.data.frame(which(gridlayout == 2, arr.ind = TRUE)) print( acfplot, vp = grid::viewport( layout.pos.row = matchidx$row, layout.pos.col = matchidx$col ) ) # Add last plot matchidx <- as.data.frame(which(gridlayout == 3, arr.ind = TRUE)) print( lastplot, vp = grid::viewport( layout.pos.row = matchidx$row, layout.pos.col = matchidx$col ) ) } } #' Time series lag ggplots #' #' Plots a lag plot using ggplot. #' #' \dQuote{gglagplot} will plot time series against lagged versions of #' themselves. Helps visualising 'auto-dependence' even when auto-correlations #' vanish. #' #' \dQuote{gglagchull} will layer convex hulls of the lags, layered on a single #' plot. This helps visualise the change in 'auto-dependence' as lags increase. #' #' @param x a time series object (type \code{ts}). #' @param lags number of lag plots desired, see arg set.lags. #' @param set.lags vector of positive integers specifying which lags to use. #' @param diag logical indicating if the x=y diagonal should be drawn. #' @param diag.col color to be used for the diagonal if(diag). #' @param do.lines if TRUE, lines will be drawn, otherwise points will be #' drawn. #' @param colour logical indicating if lines should be coloured. #' @param continuous Should the colour scheme for years be continuous or #' discrete? #' @param labels logical indicating if labels should be used. #' @param seasonal Should the line colour be based on seasonal characteristics #' (TRUE), or sequential (FALSE). #' @param \dots Not used (for consistency with lag.plot) #' @return None. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{lag.plot}} #' @examples #' #' gglagplot(woolyrnq) #' gglagplot(woolyrnq,seasonal=FALSE) #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' gglagplot(lungDeaths, lags=2) #' gglagchull(lungDeaths, lags=6) #' #' @export gglagplot <- function(x, lags=ifelse(frequency(x) > 9, 16, 9), set.lags = 1:lags, diag=TRUE, diag.col="gray", do.lines = TRUE, colour = TRUE, continuous = frequency(x) > 12, labels = FALSE, seasonal = TRUE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { freq <- frequency(x) if (freq > 1) { linecol <- cycle(x) if (freq > 24) { continuous <- TRUE } } else { seasonal <- FALSE continuous <- TRUE } if (!seasonal) { continuous <- TRUE } # Make sure lags is evaluated tmp <- lags x <- as.matrix(x) # Prepare data for plotting n <- NROW(x) data <- data.frame() for (i in 1:NCOL(x)) { for (lagi in set.lags) { sname <- colnames(x)[i] if (is.null(sname)) { sname <- deparse(match.call()$x) } data <- rbind( data, data.frame( lagnum = 1:(n - lagi), freqcur = ifelse(rep(seasonal, n - lagi), linecol[1:(n - lagi)], 1:(n - lagi)), orig = x[1:(n - lagi), i], lagged = x[(lagi + 1):n, i], lagVal = rep(lagi, n - lagi), series = factor(rep(sname, n - lagi)) ) ) } } if (!continuous) { data$freqcur <- factor(data$freqcur) } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~orig, y = ~lagged), data = data) if (diag) { p <- p + ggplot2::geom_abline(colour = diag.col, linetype = "dashed") } if (labels) { linesize <- 0.25 * (2 - do.lines) } else { linesize <- 0.5 * (2 - do.lines) } plottype <- if (do.lines) { ggplot2::geom_path } else { ggplot2::geom_point } if (colour) { p <- p + plottype(ggplot2::aes_(colour = ~freqcur), size = linesize) } else { p <- p + plottype(size = linesize) } if (labels) { p <- p + ggplot2::geom_text(ggplot2::aes_(label = ~lagnum)) } # Ensure all facets are of size size (if extreme values are excluded in lag specification) if (max(set.lags) > NROW(x) / 2) { axissize <- rbind(aggregate(orig ~ series, data = data, min), aggregate(orig~ series, data = data, max)) axissize <- data.frame(series = rep(axissize$series, length(set.lags)), orig = rep(axissize$orig, length(set.lags)), lagVal = rep(set.lags, each = NCOL(x))) p <- p + ggplot2::geom_blank(ggplot2::aes_(x = ~orig, y = ~orig), data = axissize) } # Facet labellerFn <- function(labels) { if (!is.null(labels$series)) { # Multivariate labels labels$series <- as.character(labels$series) } labels$lagVal <- paste("lag", labels$lagVal) return(labels) } if (NCOL(x) > 1) { p <- p + ggplot2::facet_wrap(~series + lagVal, scales = "free", labeller = labellerFn) } else { p <- p + ggplot2::facet_wrap(~lagVal, labeller = labellerFn) } p <- p + ggplot2::theme(aspect.ratio = 1) if (colour) { if (seasonal) { if (freq == 4L) { title <- "Quarter" } else if (freq == 12L) { title <- "Month" } else if (freq == 7L) { title <- "Day" } else if (freq == 24L) { title <- "Hour" } else { title <- "Season" } } else { title <- "Time" } if (continuous) { p <- p + ggplot2::guides(colour = ggplot2::guide_colourbar(title = title)) } else { p <- p + ggplot2::guides(colour = ggplot2::guide_legend(title = title)) } } p <- p + ggAddExtras(ylab = NULL, xlab = NULL) return(p) } } #' @rdname gglagplot #' #' @examples #' gglagchull(woolyrnq) #' #' @export gglagchull <- function(x, lags=ifelse(frequency(x) > 1, min(12, frequency(x)), 4), set.lags = 1:lags, diag=TRUE, diag.col="gray", ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { # Make sure lags is evaluated tmp <- lags x <- as.matrix(x) # Prepare data for plotting n <- NROW(x) data <- data.frame() for (i in 1:NCOL(x)) { for (lag in set.lags) { sname <- colnames(x)[i] if (is.null(sname)) { sname <- deparse(substitute(x)) } data <- rbind(data, data.frame(orig = x[(lag + 1):n, i], lagged = x[1:(n - lag), i], lag = rep(lag, n - lag), series = rep(sname, n - lag))[grDevices::chull(x[(lag + 1):n, i], x[1:(n - lag), i]), ]) } } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~orig, y = ~lagged), data = data) if (diag) { p <- p + ggplot2::geom_abline(colour = diag.col, linetype = "dashed") } p <- p + ggplot2::geom_polygon(ggplot2::aes_(group = ~lag, colour = ~lag, fill = ~lag), alpha = 1 / length(set.lags)) p <- p + ggplot2::guides(colour = ggplot2::guide_colourbar(title = "lag")) p <- p + ggplot2::theme(aspect.ratio = 1) # Facet if (NCOL(x) > 1) { p <- p + ggplot2::facet_wrap(~series, scales = "free") } p <- p + ggAddExtras(ylab = "lagged", xlab = "original") return(p) } } #' Create a seasonal subseries ggplot #' #' Plots a subseries plot using ggplot. Each season is plotted as a separate #' mini time series. The blue lines represent the mean of the observations #' within each season. #' #' The \code{ggmonthplot} function is simply a wrapper for #' \code{ggsubseriesplot} as a convenience for users familiar with #' \code{\link[stats]{monthplot}}. #' #' @param x a time series object (type \code{ts}). #' @param labels A vector of labels to use for each 'season' #' @param times A vector of times for each observation #' @param phase A vector of seasonal components #' @param \dots Not used (for consistency with monthplot) #' @return Returns an object of class \code{ggplot}. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{monthplot}} #' @examples #' #' ggsubseriesplot(AirPassengers) #' ggsubseriesplot(woolyrnq) #' #' @export ggmonthplot <- function(x, labels = NULL, times = time(x), phase = cycle(x), ...) { ggsubseriesplot(x, labels, times, phase, ...) } #' @rdname ggmonthplot #' @export ggsubseriesplot <- function(x, labels = NULL, times = time(x), phase = cycle(x), ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(x, "ts")) { stop("ggsubseriesplot requires a ts object, use x=object") } if (round(frequency(x)) <= 1) { stop("Data are not seasonal") } if("1" %in% dimnames(table(table(phase)))[[1]]){ stop(paste("Each season requires at least 2 observations.", ifelse(frequency(x)%%1 == 0, "Your series length may be too short for this graphic.", "This may be caused from specifying a time-series with non-integer frequency.") ) ) } data <- data.frame(y = as.numeric(x), year = trunc(time(x)), season = as.numeric(phase)) seasonwidth <- (max(data$year) - min(data$year)) * 1.05 data$time <- data$season + 0.025 + (data$year - min(data$year)) / seasonwidth avgLines <- stats::aggregate(data$y, by = list(data$season), FUN = mean) colnames(avgLines) <- c("season", "avg") data <- merge(data, avgLines, by = "season") # Initialise ggplot object # p <- ggplot2::ggplot(ggplot2::aes_(x=~interaction(year, season), y=~y, group=~season), data=data, na.rm=TRUE) p <- ggplot2::ggplot( ggplot2::aes_(x = ~time, y = ~y, group = ~season), data = data, na.rm = TRUE ) # Remove vertical break lines p <- p + ggplot2::theme(panel.grid.major.x = ggplot2::element_blank()) # Add data p <- p + ggplot2::geom_line() # Add average lines p <- p + ggplot2::geom_line(ggplot2::aes_(y = ~avg), col = "#0000AA") # Create x-axis labels xfreq <- frequency(x) if (xfreq == 4) { xbreaks <- c("Q1", "Q2", "Q3", "Q4") xlab <- "Quarter" } else if (xfreq == 7) { xbreaks <- c( "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" ) xlab <- "Day" } else if (xfreq == 12) { xbreaks <- month.abb xlab <- "Month" } else { xbreaks <- 1:frequency(x) xlab <- "Season" } # X-axis p <- p + ggplot2::scale_x_continuous(breaks = 0.5 + (1:xfreq), labels = xbreaks) # Graph labels p <- p + ggAddExtras(ylab = deparse(substitute(x)), xlab = xlab) return(p) } } #' @rdname seasonplot #' #' @param continuous Should the colour scheme for years be continuous or #' discrete? #' @param polar Plot the graph on seasonal coordinates #' #' @examples #' ggseasonplot(AirPassengers, col=rainbow(12), year.labels=TRUE) #' ggseasonplot(AirPassengers, year.labels=TRUE, continuous=TRUE) #' #' @export ggseasonplot <- function(x, season.labels=NULL, year.labels=FALSE, year.labels.left=FALSE, type=NULL, col=NULL, continuous=FALSE, polar=FALSE, labelgap=0.04, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } if (!inherits(x, "ts")) { stop("autoplot.seasonplot requires a ts object, use x=object") } if (!is.null(type)) { message("Plot types are not yet supported for seasonplot()") } # Check data are seasonal and convert to integer seasonality s <- round(frequency(x)) if (s <= 1) { stop("Data are not seasonal") } # Grab name for plot title xname <- deparse(substitute(x)) tspx <- tsp(x) x <- ts(x, start = tspx[1], frequency = s) data <- data.frame( y = as.numeric(x), year = trunc(round(time(x), 8)), cycle = as.numeric(cycle(x)), time = as.numeric((cycle(x) - 1) / s) ) data$year <- if (continuous) { as.numeric(data$year) } else { as.factor(data$year) } if (polar) { startValues <- data[data$cycle == 1, ] if (data$cycle[1] == 1) { startValues <- startValues[-1, ] } startValues$time <- 1 - .Machine$double.eps levels(startValues$year) <- as.numeric(levels(startValues$year)) - 1 data <- rbind(data, startValues) } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~time, y = ~y, group = ~year, colour = ~year), data = data, na.rm = TRUE) # p <- p + ggplot2::scale_x_continuous() # Add data p <- p + ggplot2::geom_line() if (!is.null(col)) { if(is.numeric(col)){ col <- palette()[(col-1)%%(length(palette())) + 1] } if (continuous) { p <- p + ggplot2::scale_color_gradientn(colours = col) } else { ncol <- length(unique(data$year)) if (length(col) == 1) { p <- p + ggplot2::scale_color_manual(guide = "none", values = rep(col, ncol)) } else { p <- p + ggplot2::scale_color_manual(values = rep(col, ceiling(ncol / length(col)))[1:ncol]) } } } if (year.labels) { yrlab <- stats::aggregate(time ~ year, data = data, FUN = max) yrlab <- cbind(yrlab, offset = labelgap) } if (year.labels.left) { yrlabL <- stats::aggregate(time ~ year, data = data, FUN = min) yrlabL <- cbind(yrlabL, offset = -labelgap) if (year.labels) { yrlab <- rbind(yrlab, yrlabL) } } if (year.labels || year.labels.left) { yrlab <- merge(yrlab, data) yrlab$time <- yrlab$time + yrlab$offset p <- p + ggplot2::guides(colour = FALSE) p <- p + ggplot2::geom_text(ggplot2::aes_(x = ~time, y = ~y, label = ~year), data = yrlab) } # Add seasonal labels if (s == 12) { labs <- month.abb xLab <- "Month" } else if (s == 4) { labs <- paste("Q", 1:4, sep = "") xLab <- "Quarter" } else if (s == 7) { labs <- c("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat") xLab <- "Day" } else if (s == 52) { labs <- 1:s xLab <- "Week" } else if (s == 24) { labs <- 0:(s - 1) xLab <- "Hour" } else if (s == 48) { labs <- seq(0, 23.5, by = 0.5) xLab <- "Half-hour" } else { labs <- 1:s xLab <- "Season" } if (!is.null(season.labels)) { if (length(season.labels) != length(labs)) { warning(paste0("Provided season.labels have length ", length(season.labels), ", but ", length(labs), " are required. Ignoring season.labels.")) } else { labs <- season.labels } } breaks <- sort(unique(data$time)) if (polar) { breaks <- head(breaks, -1) p <- p + ggplot2::coord_polar() } p <- p + ggplot2::scale_x_continuous(breaks = breaks, minor_breaks = NULL, labels = labs) # Graph title and axes p <- p + ggAddExtras(main = paste("Seasonal plot:", xname), xlab = xLab, ylab = NULL) return(p) } #' @rdname plot.forecast #' @export autoplot.splineforecast <- function(object, PI=TRUE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { p <- autoplot(object$x) + autolayer(object) p <- p + ggplot2::geom_point(size = 2) fit <- data.frame(datetime = as.numeric(time(object$fitted)), y = as.numeric(object$fitted)) p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~y), colour = "red", data = fit) p <- p + ggAddExtras(ylab = deparse(object$model$call$x)) if (!is.null(object$series)) { p <- p + ggplot2::ylab(object$series) } return(p) } } #' @rdname autoplot.seas #' @export autoplot.stl <- function(object, labels = NULL, range.bars = TRUE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "stl")) { stop("autoplot.stl requires a stl object, use x=object") } # Re-order series as trend, seasonal, remainder object$time.series <- object$time.series[, c("trend", "seasonal", "remainder")] if (is.null(labels)) { labels <- colnames(object$time.series) } data <- object$time.series cn <- c("data", labels) data <- data.frame( datetime = rep(time(data), NCOL(data) + 1), y = c(rowSums(data), data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~datetime, y = ~y), data = data) # Add data # Timeseries lines p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~y), data = subset(data, data$parts != cn[4]), na.rm = TRUE) p <- p + ggplot2::geom_segment( ggplot2::aes_(x = ~datetime, xend = ~datetime, y = 0, yend = ~y), data = subset(data, data$parts == cn[4]), lineend = "butt" ) # Rangebars if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = colnames(yranges), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes_(xmin = ~left, xmax = ~right, ymax = ~top, ymin = ~bottom), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } # Remainder p <- p + ggplot2::facet_grid("parts ~ .", scales = "free_y", switch = "y") p <- p + ggplot2::geom_hline(ggplot2::aes_(yintercept = ~y), data = data.frame(y = 0, parts = cn[4])) # Add axis labels p <- p + ggAddExtras(xlab = "Time", ylab = "") # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = unique(round(pretty(data$datetime)))) # ^^ Remove rightmost x axis gap with `expand=c(0.05, 0, 0, 0)` argument when assymetric `expand` feature is supported # issue: tidyverse/ggplot2#1669 return(p) } } #' @rdname autoplot.seas #' @export autoplot.StructTS <- function(object, labels = NULL, range.bars = TRUE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "StructTS")) { stop("autoplot.StructTS requires a StructTS object.") } if (is.null(labels)) { labels <- colnames(object$fitted) } data <- object$fitted cn <- c("data", labels) data <- data.frame( datetime = rep(time(data), NCOL(data) + 1), y = c(object$data, data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~datetime, y = ~y), data = data) # Add data p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~y), na.rm = TRUE) p <- p + ggplot2::facet_grid("parts ~ .", scales = "free_y", switch = "y") if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = colnames(yranges), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes_(xmin = ~left, xmax = ~right, ymax = ~top, ymin = ~bottom), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } # Add axis labels p <- p + ggAddExtras(xlab = "Time", ylab = "") # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = unique(round(pretty(data$datetime)))) return(p) } } #' Plot time series decomposition components using ggplot #' #' Produces a ggplot object of seasonally decomposed time series for objects of #' class \dQuote{\code{stl}} (created with \code{\link[stats]{stl}}), class #' \dQuote{\code{seas}} (created with \code{\link[seasonal]{seas}}), or class #' \dQuote{\code{decomposed.ts}} (created with \code{\link[stats]{decompose}}). #' #' @param object Object of class \dQuote{\code{seas}}, \dQuote{\code{stl}}, or #' \dQuote{\code{decomposed.ts}}. #' @param labels Labels to replace \dQuote{seasonal}, \dQuote{trend}, and #' \dQuote{remainder}. #' @param range.bars Logical indicating if each plot should have a bar at its #' right side representing relative size. If \code{NULL}, automatic selection #' takes place. #' @param ... Other plotting parameters to affect the plot. #' @return Returns an object of class \code{ggplot}. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[seasonal]{seas}}, \code{\link[stats]{stl}}, #' \code{\link[stats]{decompose}}, \code{\link[stats]{StructTS}}, #' \code{\link[stats]{plot.stl}}. #' @examples #' #' library(ggplot2) #' co2 %>% decompose %>% autoplot #' nottem %>% stl(s.window='periodic') %>% autoplot #' #' \dontrun{ #' library(seasonal) #' seas(USAccDeaths) %>% autoplot #' } #' #' @export autoplot.seas <- function(object, labels = NULL, range.bars = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "seas")) { stop("autoplot.seas requires a seas object") } if (is.null(labels)) { labels <- c("seasonal", "trend", "remainder") } data <- cbind(object$x, object$data[, c("seasonal", "trend", "irregular")]) cn <- c("data", labels) data <- data.frame( datetime = rep(time(data), NCOL(data)), y = c(data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~datetime, y = ~y), data = data) # Add data p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~y), data = subset(data, data$parts != cn[4]), na.rm = TRUE) p <- p + ggplot2::geom_segment( ggplot2::aes_(x = ~datetime, xend = ~datetime, y = 1, yend = ~y), data = subset(data, data$parts == cn[4]), lineend = "butt" ) p <- p + ggplot2::facet_grid("parts ~ .", scales = "free_y", switch = "y") p <- p + ggplot2::geom_hline(ggplot2::aes_(yintercept = ~y), data = data.frame(y = 1, parts = cn[4])) # Rangebars if (is.null(range.bars)) { range.bars <- object$spc$transform$`function` == "none" } if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = colnames(yranges), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes_(xmin = ~left, xmax = ~right, ymax = ~top, ymin = ~bottom), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } # Add axis labels p <- p + ggAddExtras(xlab = "Time", ylab = "") # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = unique(round(pretty(data$datetime)))) return(p) } } #' @rdname autoplot.ts #' @export autolayer.mts <- function(object, colour=TRUE, series=NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { cl <- match.call() cl[[1]] <- quote(autolayer) cl$object <- quote(object[, i]) if (length(series) != NCOL(object)) { if (colour) { message("For a multivariate timeseries, specify a seriesname for each timeseries. Defaulting to column names.") } series <- colnames(object) } out <- list() for (i in 1:NCOL(object)) { cl$series <- series[i] out[[i]] <- eval(cl) } return(out) } } #' @rdname autoplot.ts #' @export autolayer.msts <- function(object, series = NULL, ...) { if (NCOL(object) > 1) { class(object) <- c("mts", "ts", "matrix") } else { if (is.null(series)) { series <- deparse(substitute(series)) } class(object) <- c("ts") } attr(object, "msts") <- NULL autolayer(object, series = series, ...) } #' @rdname autoplot.ts #' @export autolayer.ts <- function(object, colour=TRUE, series=NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { tsdata <- data.frame( timeVal = as.numeric(time(object)), series = ifelse(is.null(series), deparse(substitute(object)), series), seriesVal = as.numeric(object) ) if (colour) { ggplot2::geom_line(ggplot2::aes_(x = ~timeVal, y = ~seriesVal, group = ~series, colour = ~series), data = tsdata, ..., inherit.aes = FALSE) } else { ggplot2::geom_line(ggplot2::aes_(x = ~timeVal, y = ~seriesVal, group = ~series), data = tsdata, ..., inherit.aes = FALSE) } } } #' @rdname plot.forecast #' @export autolayer.forecast <- function(object, series = NULL, PI = TRUE, showgap = TRUE, ...) { PI <- PI & !is.null(object$level) data <- forecast2plotdf(object, PI = PI, showgap = showgap) mapping <- ggplot2::aes_(x = ~x, y = ~y) if (!is.null(object$series)) { data[["series"]] <- object$series } if (!is.null(series)) { data[["series"]] <- series mapping$colour <- quote(series) } if (PI) { mapping$level <- quote(level) mapping$ymin <- quote(ymin) mapping$ymax <- quote(ymax) } geom_forecast(mapping = mapping, data = data, stat = "identity", ..., inherit.aes = FALSE) } #' @rdname plot.mforecast #' @export autolayer.mforecast <- function(object, series = NULL, PI = TRUE, ...) { cl <- match.call() cl[[1]] <- quote(autolayer) cl$object <- quote(object$forecast[[i]]) if (!is.null(series)) { if (length(series) != length(object$forecast)) { series <- names(object$forecast) } } out <- list() for (i in 1:length(object$forecast)) { cl$series <- series[i] out[[i]] <- eval(cl) } return(out) } #' Automatically create a ggplot for time series objects #' #' \code{autoplot} takes an object of type \code{ts} or \code{mts} and creates #' a ggplot object suitable for usage with \code{stat_forecast}. #' #' \code{fortify.ts} takes a \code{ts} object and converts it into a data frame #' (for usage with ggplot2). #' #' @param object Object of class \dQuote{\code{ts}} or \dQuote{\code{mts}}. #' @param series Identifies the timeseries with a colour, which integrates well #' with the functionality of \link{geom_forecast}. #' @param facets If TRUE, multiple time series will be faceted (and unless #' specified, colour is set to FALSE). If FALSE, each series will be assigned a #' colour. #' @param colour If TRUE, the time series will be assigned a colour aesthetic #' @param model Object of class \dQuote{\code{ts}} to be converted to #' \dQuote{\code{data.frame}}. #' @param data Not used (required for \link{fortify} method) #' @param ... Other plotting parameters to affect the plot. #' @inheritParams plot.forecast #' @return None. Function produces a ggplot graph. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{plot.ts}}, \code{\link[ggplot2]{fortify}} #' @examples #' #' library(ggplot2) #' autoplot(USAccDeaths) #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' autoplot(lungDeaths) #' autoplot(lungDeaths, facets=TRUE) #' #' @export autoplot.ts <- function(object, series=NULL, xlab = "Time", ylab = deparse(substitute(object)), main = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!is.ts(object)) { stop("autoplot.ts requires a ts object, use object=object") } # Create data frame with time as a column labelled x # and time series as a column labelled y. data <- data.frame(y = as.numeric(object), x = as.numeric(time(object))) if (!is.null(series)) { data <- transform(data, series = series) } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(y = ~y, x = ~x), data = data) # Add data if (!is.null(series)) { p <- p + ggplot2::geom_line(ggplot2::aes_(group = ~series, colour = ~series), na.rm = TRUE, ...) } else { p <- p + ggplot2::geom_line(na.rm = TRUE, ...) } # Add labels p <- p + ggAddExtras(xlab = xlab, ylab = ylab, main = main) # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = ggtsbreaks) return(p) } } #' @rdname autoplot.ts #' @export autoplot.mts <- function(object, colour=TRUE, facets=FALSE, xlab = "Time", ylab = deparse(substitute(object)), main = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!stats::is.mts(object)) { stop("autoplot.mts requires a mts object, use x=object") } if (NCOL(object) <= 1) { return(autoplot.ts(object, ...)) } cn <- colnames(object) if (is.null(cn)) { cn <- paste("Series", seq_len(NCOL(object))) } data <- data.frame( y = as.numeric(c(object)), x = rep(as.numeric(time(object)), NCOL(object)), series = factor(rep(cn, each = NROW(object)), levels = cn) ) # Initialise ggplot object mapping <- ggplot2::aes_(y = ~y, x = ~x, group = ~series) if (colour && (!facets || !missing(colour))) { mapping$colour <- quote(series) } p <- ggplot2::ggplot(mapping, data = data) p <- p + ggplot2::geom_line(na.rm = TRUE, ...) if (facets) { p <- p + ggplot2::facet_grid(series~., scales = "free_y") } p <- p + ggAddExtras(xlab = xlab, ylab = ylab, main = main) return(p) } } #' @rdname autoplot.ts #' @export autoplot.msts <- function(object, ...) { sname <- deparse(substitute(object)) if (NCOL(object) > 1) { class(object) <- c("mts", "ts", "matrix") } else { class(object) <- c("ts") } attr(object, "msts") <- NULL autoplot(object, ...) + ggAddExtras(ylab = sname) } #' @rdname autoplot.ts #' @export fortify.ts <- function(model, data, ...) { # Use ggfortify version if it is loaded # to prevent cran errors if (exists("ggfreqplot")) { tsp <- attr(model, which = "tsp") dtindex <- time(model) if (any(tsp[3] == c(4, 12))) { dtindex <- zoo::as.Date.yearmon(dtindex) } model <- data.frame(Index = dtindex, Data = as.numeric(model)) return(ggplot2::fortify(model)) } else { model <- cbind(x = as.numeric(time(model)), y = as.numeric(model)) as.data.frame(model) } } forecast2plotdf <- function(model, data=as.data.frame(model), PI=TRUE, showgap=TRUE, ...) { # Time series forecasts if (is.element("ts", class(model$mean))) { xVals <- as.numeric(time(model$mean)) # x axis is time } # Cross-sectional forecasts else if (!is.null(model[["newdata"]])) { xVals <- as.numeric(model[["newdata"]][, 1]) # Only display the first column of newdata, should be generalised. if (NCOL(model[["newdata"]]) > 1) { message("Note: only extracting first column of data") } } else { stop("Could not find forecast x axis") } Hiloc <- grep("Hi ", names(data)) Loloc <- grep("Lo ", names(data)) if (PI && !is.null(model$level)) { # PI if (length(Hiloc) == length(Loloc)) { if (length(Hiloc) > 0) { out <- data.frame( x = rep(xVals, length(Hiloc) + 1), y = c(rep(NA, NROW(data) * (length(Hiloc))), data[, 1]), level = c(as.numeric(rep(gsub("Hi ", "", names(data)[Hiloc]), each = NROW(data))), rep(NA, NROW(data))), ymax = c(unlist(data[, Hiloc]), rep(NA, NROW(data))), ymin = c(unlist(data[, Loloc]), rep(NA, NROW(data))) ) numInterval <- length(model$level) } } else { warning("missing intervals detected, plotting point predictions only") PI <- FALSE } } if (!PI) { # No PI out <- data.frame(x = xVals, y = as.numeric(model$mean), level = rep(NA, NROW(model$mean)), ymax = rep(NA, NROW(model$mean)), ymin = rep(NA, NROW(model$mean))) numInterval <- 0 } if (!showgap) { if (is.null(model$x)) { warning("Removing the gap requires historical data, provide this via model$x. Defaulting showgap to TRUE.") } else { intervalGap <- data.frame( x = rep(time(model$x)[length(model$x)], numInterval + 1), y = c(model$x[length(model$x)], rep(NA, numInterval)), level = c(NA, model$level)[seq_along(1:(numInterval + 1))], ymax = c(NA, rep(model$x[length(model$x)], numInterval)), ymin = c(NA, rep(model$x[length(model$x)], numInterval)) ) out <- rbind(intervalGap, out) } } return(out) } #' @rdname geom_forecast #' @export StatForecast <- ggplot2::ggproto( "StatForecast", ggplot2::Stat, required_aes = c("x", "y"), compute_group = function(data, scales, params, PI=TRUE, showgap=TRUE, series=NULL, h=NULL, level=c(80, 95), fan=FALSE, robust=FALSE, lambda=NULL, find.frequency=FALSE, allow.multiplicative.trend=FALSE, ...) { ## TODO: Rewrite tspx <- recoverTSP(data$x) if (is.null(h)) { h <- ifelse(tspx[3] > 1, 2 * tspx[3], 10) } tsdat <- ts(data = data$y, start = tspx[1], frequency = tspx[3]) fcast <- forecast( tsdat, h = h, level = level, fan = fan, robust = robust, lambda = lambda, find.frequency = find.frequency, allow.multiplicative.trend = allow.multiplicative.trend ) fcast <- forecast2plotdf(fcast, PI = PI, showgap = showgap) # Add ggplot & series information extraInfo <- as.list(data[1, !colnames(data) %in% colnames(fcast)]) extraInfo$`_data` <- quote(fcast) if (!is.null(series)) { if (data$group[1] > length(series)) { message("Recycling series argument, please provide a series name for each time series") } extraInfo[["series"]] <- series[(abs(data$group[1]) - 1) %% length(series) + 1] } do.call("transform", extraInfo) } ) #' @rdname geom_forecast #' @export GeomForecast <- ggplot2::ggproto( "GeomForecast", ggplot2::Geom, # Produces both point forecasts and intervals on graph required_aes = c("x", "y"), optional_aes = c("ymin", "ymax", "level"), default_aes = ggplot2::aes( colour = "blue", fill = "grey60", size = .5, linetype = 1, weight = 1, alpha = 1, level = NA ), draw_key = function(data, params, size) { lwd <- min(data$size, min(size) / 4) # Calculate and set colour linecol <- blendHex(data$col, "gray30", 1) fillcol <- blendHex(data$col, "#CCCCCC", 0.8) grid::grobTree( grid::rectGrob( width = grid::unit(1, "npc") - grid::unit(lwd, "mm"), height = grid::unit(1, "npc") - grid::unit(lwd, "mm"), gp = grid::gpar( col = fillcol, fill = scales::alpha(fillcol, data$alpha), lty = data$linetype, lwd = lwd * ggplot2::.pt, linejoin = "mitre" ) ), grid::linesGrob( x = c(0, 0.4, 0.6, 1), y = c(0.2, 0.6, 0.4, 0.9), gp = grid::gpar( col = linecol, fill = scales::alpha(linecol, data$alpha), lty = data$linetype, lwd = lwd * ggplot2::.pt, linejoin = "mitre" ) ) ) }, handle_na = function(self, data, params) { ## TODO: Consider removing/changing data }, draw_group = function(data, panel_scales, coord) { data <- if (!is.null(data$level)) { split(data, !is.na(data$level)) } else { list(data) } # Draw forecasted points and intervals if (length(data) == 1) { # PI=FALSE ggplot2:::ggname( "geom_forecast", GeomForecastPoint$draw_panel(data[[1]], panel_scales, coord) ) } else { # PI=TRUE ggplot2:::ggname( "geom_forecast", grid::addGrob( GeomForecastInterval$draw_group(data[[2]], panel_scales, coord), GeomForecastPoint$draw_panel(data[[1]], panel_scales, coord) ) ) } } ) GeomForecastPoint <- ggplot2::ggproto( "GeomForecastPoint", GeomForecast, ## Produces only point forecasts required_aes = c("x", "y"), setup_data = function(data, params) { data[!is.na(data$y), ] # Extract only forecast points }, draw_group = function(data, panel_scales, coord) { linecol <- blendHex(data$colour[1], "gray30", 1) # Compute alpha transparency data$alpha <- grDevices::col2rgb(linecol, alpha = TRUE)[4, ] / 255 * data$alpha # Select appropriate Geom and set defaults if (NROW(data) == 0) { # Blank ggplot2::GeomBlank$draw_panel } else if (NROW(data) == 1) { # Point GeomForecastPointGeom <- ggplot2::GeomPoint$draw_panel pointpred <- transform(data, fill = NA, colour = linecol, size = 1, shape = 19, stroke = 0.5) } else { # Line GeomForecastPointGeom <- ggplot2::GeomLine$draw_panel pointpred <- transform(data, fill = NA, colour = linecol) } # Draw forecast points ggplot2:::ggname( "geom_forecast_point", grid::grobTree(GeomForecastPointGeom(pointpred, panel_scales, coord)) ) } ) blendHex <- function(mixcol, seqcol, alpha=1) { requireNamespace("methods") if (is.na(seqcol)) { return(mixcol) } # transform to hue/lightness/saturation colorspace seqcol <- grDevices::col2rgb(seqcol, alpha = TRUE) mixcol <- grDevices::col2rgb(mixcol, alpha = TRUE) seqcolHLS <- suppressWarnings(methods::coerce(colorspace::RGB(R = seqcol[1, ] / 255, G = seqcol[2, ] / 255, B = seqcol[3, ] / 255), structure(NULL, class = "HLS"))) mixcolHLS <- suppressWarnings(methods::coerce(colorspace::RGB(R = mixcol[1, ] / 255, G = mixcol[2, ] / 255, B = mixcol[3, ] / 255), structure(NULL, class = "HLS"))) # copy luminence mixcolHLS@coords[, "L"] <- seqcolHLS@coords[, "L"] mixcolHLS@coords[, "S"] <- alpha * mixcolHLS@coords[, "S"] + (1 - alpha) * seqcolHLS@coords[, "S"] mixcolHex <- suppressWarnings(methods::coerce(mixcolHLS, structure(NULL, class = "RGB"))) mixcolHex <- colorspace::hex(mixcolHex) mixcolHex <- ggplot2::alpha(mixcolHex, mixcol[4, ] / 255) return(mixcolHex) } GeomForecastInterval <- ggplot2::ggproto( "GeomForecastInterval", GeomForecast, ## Produces only forecasts intervals on graph required_aes = c("x", "ymin", "ymax"), setup_data = function(data, params) { data[is.na(data$y), ] # Extract only forecast intervals }, draw_group = function(data, panel_scales, coord) { # If level scale from fabletools is not loaded, convert to colour if(is.numeric(data$level)){ leveldiff <- diff(range(data$level)) if (leveldiff == 0) { leveldiff <- 1 } shadeVal <- (data$level - min(data$level)) / leveldiff * 0.2 + 8 / 15 data$level <- rgb(shadeVal, shadeVal, shadeVal) } intervalGrobList <- lapply( split(data, data$level), FUN = function(x) { # Calculate colour fillcol <- blendHex(x$colour[1], x$level[1], 0.7) # Compute alpha transparency x$alpha <- grDevices::col2rgb(fillcol, alpha = TRUE)[4, ] / 255 * x$alpha # Select appropriate Geom and set defaults if (NROW(x) == 0) { # Blank ggplot2::GeomBlank$draw_panel } else if (NROW(x) == 1) { # Linerange GeomForecastIntervalGeom <- ggplot2::GeomLinerange$draw_panel x <- transform(x, colour = fillcol, fill = NA, size = 1) } else { # Ribbon GeomForecastIntervalGeom <- ggplot2::GeomRibbon$draw_group x <- transform(x, colour = NA, fill = fillcol) } # Create grob return(GeomForecastIntervalGeom(x, panel_scales, coord)) ## Create list pair with average ymin/ymax to order layers } ) # Draw forecast intervals ggplot2:::ggname("geom_forecast_interval", do.call(grid::grobTree, rev(intervalGrobList))) # TODO: Find reliable method to stacking them correctly } ) #' Forecast plot #' #' Generates forecasts from \code{forecast.ts} and adds them to the plot. #' Forecasts can be modified via sending forecast specific arguments above. #' #' Multivariate forecasting is supported by having each time series on a #' different group. #' #' You can also pass \code{geom_forecast} a \code{forecast} object to add it to #' the plot. #' #' The aesthetics required for the forecasting to work includes forecast #' observations on the y axis, and the \code{time} of the observations on the x #' axis. Refer to the examples below. To automatically set up aesthetics, use #' \code{autoplot}. #' #' @param mapping Set of aesthetic mappings created by \code{\link{aes}} or #' \code{\link{aes_}}. If specified and \code{inherit.aes = TRUE} (the #' default), it is combined with the default mapping at the top level of the #' plot. You must supply \code{mapping} if there is no plot mapping. #' @param data The data to be displayed in this layer. There are three options: #' #' If \code{NULL}, the default, the data is inherited from the plot data as #' specified in the call to \code{\link{ggplot}}. #' #' A \code{data.frame}, or other object, will override the plot data. All #' objects will be fortified to produce a data frame. See \code{\link{fortify}} #' for which variables will be created. #' #' A \code{function} will be called with a single argument, the plot data. The #' return value must be a \code{data.frame}, and will be used as the layer #' data. #' @param stat The stat object to use calculate the data. #' @param position Position adjustment, either as a string, or the result of a #' call to a position adjustment function. #' @param na.rm If \code{FALSE} (the default), removes missing values with a #' warning. If \code{TRUE} silently removes missing values. #' @param show.legend logical. Should this layer be included in the legends? #' \code{NA}, the default, includes if any aesthetics are mapped. \code{FALSE} #' never includes, and \code{TRUE} always includes. #' @param inherit.aes If \code{FALSE}, overrides the default aesthetics, rather #' than combining with them. This is most useful for helper functions that #' define both data and aesthetics and shouldn't inherit behaviour from the #' default plot specification, e.g. \code{\link{borders}}. #' @param PI If \code{FALSE}, confidence intervals will not be plotted, giving #' only the forecast line. #' @param showgap If \code{showgap=FALSE}, the gap between the historical #' observations and the forecasts is removed. #' @param series Matches an unidentified forecast layer with a coloured object #' on the plot. #' @param ... Additional arguments for \code{\link{forecast.ts}}, other #' arguments are passed on to \code{\link{layer}}. These are often aesthetics, #' used to set an aesthetic to a fixed value, like \code{color = "red"} or #' \code{alpha = .5}. They may also be parameters to the paired geom/stat. #' @return A layer for a ggplot graph. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link{forecast}}, \code{\link[ggplot2]{ggproto}} #' @examples #' #' \dontrun{ #' library(ggplot2) #' autoplot(USAccDeaths) + geom_forecast() #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' autoplot(lungDeaths) + geom_forecast() #' #' # Using fortify.ts #' p <- ggplot(aes(x=x, y=y), data=USAccDeaths) #' p <- p + geom_line() #' p + geom_forecast() #' #' # Without fortify.ts #' data <- data.frame(USAccDeaths=as.numeric(USAccDeaths), time=as.numeric(time(USAccDeaths))) #' p <- ggplot(aes(x=time, y=USAccDeaths), data=data) #' p <- p + geom_line() #' p + geom_forecast() #' #' p + geom_forecast(h=60) #' p <- ggplot(aes(x=time, y=USAccDeaths), data=data) #' p + geom_forecast(level=c(70,98)) #' p + geom_forecast(level=c(70,98),colour="lightblue") #' #' #Add forecasts to multivariate series with colour groups #' lungDeaths <- cbind(mdeaths, fdeaths) #' autoplot(lungDeaths) + geom_forecast(forecast(mdeaths), series="mdeaths") #' } #' #' @export geom_forecast <- function(mapping = NULL, data = NULL, stat = "forecast", position = "identity", na.rm = FALSE, show.legend = NA, inherit.aes = TRUE, PI=TRUE, showgap=TRUE, series=NULL, ...) { if (is.forecast(mapping) || is.mforecast(mapping)) { warning("Use autolayer instead of geom_forecast to add a forecast layer to your ggplot object.") cl <- match.call() cl[[1]] <- quote(autolayer) names(cl)[names(cl) == "mapping"] <- "object" return(eval.parent(cl)) } if (is.ts(mapping)) { data <- data.frame(y = as.numeric(mapping), x = as.numeric(time(mapping))) mapping <- ggplot2::aes_(y = ~y, x = ~x) } if (stat == "forecast") { paramlist <- list(na.rm = na.rm, PI = PI, showgap = showgap, series = series, ...) if (!is.null(series)) { if (inherits(mapping, "uneval")) { mapping$colour <- quote(..series..) } else { mapping <- ggplot2::aes_(colour = ~..series..) } } } else { paramlist <- list(na.rm = na.rm, ...) } ggplot2::layer( geom = GeomForecast, mapping = mapping, data = data, stat = stat, position = position, show.legend = show.legend, inherit.aes = inherit.aes, params = paramlist ) } # Produce nice histogram with appropriately chosen bin widths # Designed to work with time series data without issuing warnings. #' Histogram with optional normal and kernel density functions #' #' Plots a histogram and density estimates using ggplot. #' #' #' @param x a numerical vector. #' @param add.normal Add a normal density function for comparison #' @param add.kde Add a kernel density estimate for comparison #' @param add.rug Add a rug plot on the horizontal axis #' @param bins The number of bins to use for the histogram. Selected by default #' using the Friedman-Diaconis rule given by \code{\link[grDevices]{nclass.FD}} #' @param boundary A boundary between two bins. #' @return None. #' @author Rob J Hyndman #' @seealso \code{\link[graphics]{hist}}, \code{\link[ggplot2]{geom_histogram}} #' @examples #' #' gghistogram(lynx, add.kde=TRUE) #' #' @export gghistogram <- function(x, add.normal=FALSE, add.kde=FALSE, add.rug=TRUE, bins, boundary=0) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (missing(bins)) { bins <- min(500, grDevices::nclass.FD(na.exclude(x))) } data <- data.frame(x = as.numeric(c(x))) # Initialise ggplot object and plot histogram binwidth <- (max(x, na.rm = TRUE) - min(x, na.rm = TRUE)) / bins p <- ggplot2::ggplot() + ggplot2::geom_histogram(ggplot2::aes(x), data = data, binwidth = binwidth, boundary = boundary) + ggplot2::xlab(deparse(substitute(x))) # Add normal density estimate if (add.normal || add.kde) { xmin <- min(x, na.rm = TRUE) xmax <- max(x, na.rm = TRUE) if (add.kde) { h <- stats::bw.SJ(x) xmin <- xmin - 3 * h xmax <- xmax + 3 * h } if (add.normal) { xmean <- mean(x, na.rm = TRUE) xsd <- sd(x, na.rm = TRUE) xmin <- min(xmin, xmean - 3 * xsd) xmax <- max(xmax, xmean + 3 * xsd) } xgrid <- seq(xmin, xmax, l = 512) if (add.normal) { df <- data.frame(x = xgrid, y = length(x) * binwidth * stats::dnorm(xgrid, xmean, xsd)) p <- p + ggplot2::geom_line(ggplot2::aes(df$x, df$y), col = "#ff8a62") } if (add.kde) { kde <- stats::density(x, bw = h, from = xgrid[1], to = xgrid[512], n = 512) p <- p + ggplot2::geom_line(ggplot2::aes(x = kde$x, y = length(x) * binwidth * kde$y), col = "#67a9ff") } } if (add.rug) { p <- p + ggplot2::geom_rug(ggplot2::aes(x)) } return(p) } } forecast/R/msts.R0000644000176200001440000000621713553673010013403 0ustar liggesusers#' Multi-Seasonal Time Series #' #' msts is an S3 class for multi seasonal time series objects, intended to be #' used for models that support multiple seasonal periods. The msts class #' inherits from the ts class and has an additional "msts" attribute which #' contains the vector of seasonal periods. All methods that work on a ts #' class, should also work on a msts class. #' #' @aliases print.msts window.msts `[.msts` #' #' @param data A numeric vector, ts object, matrix or data frame. It is #' intended that the time series data is univariate, otherwise treated the same #' as ts(). #' @param seasonal.periods A vector of the seasonal periods of the msts. #' @param ts.frequency The seasonal period that should be used as frequency of #' the underlying ts object. The default value is \code{max(seasonal.periods)}. #' @param ... Arguments to be passed to the underlying call to \code{ts()}. For #' example \code{start=c(1987,5)}. #' @return An object of class \code{c("msts", "ts")}. If there is only one #' seasonal period (i.e., \code{length(seasonal.periods)==1}), then the object #' is of class \code{"ts"}. #' @author Slava Razbash and Rob J Hyndman #' @keywords ts #' @examples #' #' x <- msts(taylor, seasonal.periods=c(48,336), start=2000+22/52) #' y <- msts(USAccDeaths, seasonal.periods=12, start=1949) #' #' @export msts <- function(data, seasonal.periods, ts.frequency=floor(max(seasonal.periods)), ...) { # if(!is.element(ts.frequency, round(seasonal.periods-0.5+1e-12))) # stop("ts.frequency should be one of the seasonal periods") if (inherits(data, "ts") && frequency(data) == ts.frequency && length(list(...)) == 0) { object <- data } else { object <- ts(data = data, frequency = ts.frequency, ...) } if (length(seasonal.periods) > 1L) { class(object) <- c("msts", "ts") attr(object, "msts") <- sort(seasonal.periods) } return(object) } #' @export print.msts <- function(x, ...) { cat("Multi-Seasonal Time Series:\n") cat("Start: ") cat(start(x)) # cat("\nEnd: ") # cat(x$end) cat("\nSeasonal Periods: ") cat(attr(x, "msts")) cat("\nData:\n") xx <- unclass(x) # handles both univariate and multivariate ts attr(xx, "tsp") <- attr(xx, "msts") <- NULL print(xx) # print(matrix(x, ncol=length(x)), nrow=1) cat("\n") } #' @export window.msts <- function(x, ...) { seasonal.periods <- attr(x, "msts") class(x) <- c("ts") x <- window(x, ...) class(x) <- c("msts", "ts") attr(x, "msts") <- seasonal.periods return(x) } #' @export `[.msts` <- function(x, i, j, drop = TRUE) { y <- NextMethod("[") if(!inherits(y, "ts")) return(y) class(y) <- c("msts", class(y)) attr(y, "msts") <- attr(x, "msts") y } # #' @export # Ops.msts <- function(e1, e2){ # msts <- attr(e1, "msts") # if(is.null(msts)){ # msts <- attr(e2, "msts") # class(e2) <- setdiff("msts", class(e2)) # } # else{ # if(is.null(attr(e2, "msts"))){ # class(e1) <- setdiff("msts", class(e1)) # } else { # if(!identical(msts, attr(e2, "msts"))){ # "Cannot combine time series with different seasonal specifications." # } # } # } # structure(NextMethod(), msts = msts, class = c("msts", "ts")) # }forecast/R/subset.R0000644000176200001440000001230213553673010013712 0ustar liggesusers#' Subsetting a time series #' #' Various types of subsetting of a time series. Allows subsetting by index #' values (unlike \code{\link[stats]{window}}). Also allows extraction of the #' values of a specific season or subset of seasons in each year. For example, #' to extract all values for the month of May from a time series. #' #' If character values for months are used, either upper or lower case may be #' used, and partial unambiguous names are acceptable. Possible character #' values for quarters are \code{"Q1"}, \code{"Q2"}, \code{"Q3"}, and #' \code{"Q4"}. #' #' @param x a univariate time series to be subsetted #' @param subset optional logical expression indicating elements to keep; #' missing values are taken as false. \code{subset} must be the same length as #' \code{x}. #' @param month Numeric or character vector of months to retain. Partial #' matching on month names used. #' @param quarter Numeric or character vector of quarters to retain. #' @param season Numeric vector of seasons to retain. #' @param start Index of start of contiguous subset. #' @param end Index of end of contiguous subset. #' @param ... Other arguments, unused. #' @return If \code{subset} is used, a numeric vector is returned with no ts #' attributes. If \code{start} and/or \code{end} are used, a ts object is #' returned consisting of x[start:end], with the appropriate time series #' attributes retained. Otherwise, a ts object is returned with frequency equal #' to the length of \code{month}, \code{quarter} or \code{season}. #' @author Rob J Hyndman #' @seealso \code{\link[base]{subset}}, \code{\link[stats]{window}} #' @keywords ts #' @examples #' plot(subset(gas,month="November")) #' subset(woolyrnq,quarter=3) #' subset(USAccDeaths, start=49) #' #' @export subset.ts <- function(x, subset=NULL, month=NULL, quarter=NULL, season=NULL, start=NULL, end=NULL, ...) { if (!is.null(subset)) { if (NROW(subset) != NROW(x)) { stop("subset must be the same length as x") } if (NCOL(subset) != 1) { stop("subset must be a vector of rows to keep") } if ("mts" %in% class(x)) { return(subset.matrix(x, subset)) } else { return(subset.default(x, subset)) } } else if (!is.null(start) | !is.null(end)) { if (is.null(start)) { start <- 1 } if (is.null(end)) { end <- NROW(x) } if ("mts" %in% class(x)) { xsub <- x[start:end, ] } else { xsub <- x[start:end] } tspx <- tsp(x) return(ts(xsub, frequency = tspx[3], start = tspx[1L] + (start - 1) / tspx[3L])) } else if (frequency(x) <= 1) { stop("Data must be seasonal") } if (!is.null(month)) { if (frequency(x) != 12) { stop("Data is not monthly") } if (is.character(month)) { season <- pmatch(tolower(month), tolower(month.name), duplicates.ok = TRUE) } else { season <- month } season <- na.omit(season) if (length(season) == 0L) { stop("No recognizable months") } if (min(season) < 1L | max(season) > 12L) { stop("Months must be between 1 and 12") } } else if (!is.null(quarter)) { if (frequency(x) != 4) { stop("Data is not quarterly") } if (is.character(quarter)) { season <- pmatch(tolower(quarter), paste("q", 1:4, sep = ""), duplicates.ok = TRUE) } else { season <- quarter } season <- na.omit(season) if (length(season) == 0L) { stop("No recognizable quarters") } if (min(season) < 1L | max(season) > 4L) { stop("Quarters must be between 1 and 4") } } else if (is.null(season)) { stop("No subset specified") } else if (min(season) < 1L | max(season) > frequency(x)) { stop(paste("Seasons must be between 1 and", frequency(x))) } start <- utils::head(time(x)[is.element(cycle(x), season)], 1) if ("mts" %in% class(x)) { x <- subset.matrix(x, is.element(cycle(x), season)) } else { x <- subset.default(x, is.element(cycle(x), season)) } return(ts(x, frequency = length(season), start = start)) } #' @export #' @importFrom utils head.matrix head.ts <- function(x, n=6L, ...) { attr_x <- attributes(x) attr_x$names <- NULL if (NCOL(x) > 1) { hx <- head.matrix(as.matrix(x), n = n, ...) } else if ((length(x) + n) > 0) { hx <- head(c(x), n = n, ...) } else { return(numeric(0)) } attr_x$tsp[2] <- attr_x$tsp[1] + (NROW(hx) - 1) / attr_x$tsp[3] if (!is.null(dim(x))) { attr_x$dim[1] <- NROW(hx) } attributes(hx) <- attr_x return(hx) } #' @export #' @importFrom utils tail.matrix tail.ts <- function(x, n=6L, ...) { attr_x <- attributes(x) attr_x$names <- NULL if (NCOL(x) > 1) { hx <- tail.matrix(as.matrix(x), n = n, ...) } else if ((length(x) + n) > 0) { hx <- tail(c(x), n = n, ...) } else { return(numeric(0)) } attr_x$tsp[1] <- attr_x$tsp[2] - (NROW(hx) - 1) / attr_x$tsp[3] if (!is.null(dim(x))) { attr_x$dim[1] <- NROW(hx) } attributes(hx) <- attr_x return(hx) } #' @rdname subset.ts #' @export subset.msts <- function(x, subset=NULL, start=NULL, end=NULL, ...) { out <- subset.ts(x, start = start, end = end, ...) tspx <- tsp(out) msts( out, seasonal.periods = attr(x, "msts"), start = tspx[1], ts.frequency = tspx[3] ) } forecast/R/bats.R0000644000176200001440000003666113553673010013354 0ustar liggesusers# Author: srazbash ############################################################################### #' BATS model (Exponential smoothing state space model with Box-Cox #' transformation, ARMA errors, Trend and Seasonal components) #' #' Fits a BATS model applied to \code{y}, as described in De Livera, Hyndman & #' Snyder (2011). Parallel processing is used by default to speed up the #' computations. #' #' @aliases as.character.bats print.bats #' #' @param y The time series to be forecast. Can be \code{numeric}, \code{msts} #' or \code{ts}. Only univariate time series are supported. #' @param use.box.cox \code{TRUE/FALSE} indicates whether to use the Box-Cox #' transformation or not. If \code{NULL} then both are tried and the best fit #' is selected by AIC. #' @param use.trend \code{TRUE/FALSE} indicates whether to include a trend or #' not. If \code{NULL} then both are tried and the best fit is selected by AIC. #' @param use.damped.trend \code{TRUE/FALSE} indicates whether to include a #' damping parameter in the trend or not. If \code{NULL} then both are tried #' and the best fit is selected by AIC. #' @param seasonal.periods If \code{y} is a numeric then seasonal periods can #' be specified with this parameter. #' @param use.arma.errors \code{TRUE/FALSE} indicates whether to include ARMA #' errors or not. If \code{TRUE} the best fit is selected by AIC. If #' \code{FALSE} then the selection algorithm does not consider ARMA errors. #' @param use.parallel \code{TRUE/FALSE} indicates whether or not to use #' parallel processing. #' @param num.cores The number of parallel processes to be used if using #' parallel processing. If \code{NULL} then the number of logical cores is #' detected and all available cores are used. #' @param bc.lower The lower limit (inclusive) for the Box-Cox transformation. #' @param bc.upper The upper limit (inclusive) for the Box-Cox transformation. #' @param biasadj Use adjusted back-transformed mean for Box-Cox #' transformations. If TRUE, point forecasts and fitted values are mean #' forecast. Otherwise, these points can be considered the median of the #' forecast densities. #' @param model Output from a previous call to \code{bats}. If model is passed, #' this same model is fitted to \code{y} without re-estimating any parameters. #' @param ... Additional arguments to be passed to \code{auto.arima} when #' choose an ARMA(p, q) model for the errors. (Note that xreg will be ignored, #' as will any arguments concerning seasonality and differencing, but arguments #' controlling the values of p and q will be used.) #' @return An object of class "\code{bats}". The generic accessor functions #' \code{fitted.values} and \code{residuals} extract useful features of the #' value returned by \code{bats} and associated functions. The fitted model is #' designated BATS(omega, p,q, phi, m1,...mJ) where omega is the Box-Cox #' parameter and phi is the damping parameter; the error is modelled as an #' ARMA(p,q) process and m1,...,mJ list the seasonal periods used in the model. #' @author Slava Razbash and Rob J Hyndman #' @references De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), #' Forecasting time series with complex seasonal patterns using exponential #' smoothing, \emph{Journal of the American Statistical Association}, #' \bold{106}(496), 1513-1527. #' @keywords ts #' @examples #' #' \dontrun{ #' fit <- bats(USAccDeaths) #' plot(forecast(fit)) #' #' taylor.fit <- bats(taylor) #' plot(forecast(taylor.fit))} #' #' @export bats <- function(y, use.box.cox=NULL, use.trend=NULL, use.damped.trend=NULL, seasonal.periods=NULL, use.arma.errors=TRUE, use.parallel=length(y) > 1000, num.cores=2, bc.lower=0, bc.upper=1, biasadj = FALSE, model=NULL, ...) { if (!is.numeric(y) || NCOL(y) > 1) { stop("y should be a univariate time series") } seriesname <- deparse(substitute(y)) origy <- y attr_y <- attributes(origy) # Get seasonal periods if (is.null(seasonal.periods)) { if (any(class(y) == "msts")) { seasonal.periods <- attr(y, "msts") } else if (class(y) == "ts") { seasonal.periods <- frequency(y) } else { y <- as.ts(y) seasonal.periods <- 1 } } else { # Add ts attributes if (!any(class(y) == "ts")) { y <- msts(y, seasonal.periods) } } seasonal.periods <- unique(pmax(seasonal.periods, 1)) if (all(seasonal.periods == 1)) { seasonal.periods <- NULL } ny <- length(y) y <- na.contiguous(y) if (ny != length(y)) { warning("Missing values encountered. Using longest contiguous portion of time series") if (!is.null(attr_y$tsp)) { attr_y$tsp[1:2] <- range(time(y)) } } # Refit model if available if (!is.null(model)) { refitModel <- try(fitPreviousBATSModel(y, model = model), silent = TRUE) return(refitModel) } # Check for constancy if (is.constant(y)) { fit <- list( y = y, x = matrix(y, nrow = 1, ncol = ny), errors = y * 0, fitted.values = y, seed.states = matrix(y[1]), AIC = -Inf, likelihood = -Inf, variance = 0, alpha = 0.9999, method = "BATS", call = match.call() ) return(structure(fit, class = "bats")) } # Check for non-positive data if (any((y <= 0))) { use.box.cox <- FALSE } if ((!is.null(use.box.cox)) && (!is.null(use.trend)) && (use.parallel)) { if (use.trend && (!is.null(use.damped.trend))) { # In the this case, there is only one alternative. use.parallel <- FALSE } else if (use.trend == FALSE) { # As above, in the this case, there is only one alternative. use.parallel <- FALSE } } if (!is.null(seasonal.periods)) { seasonal.mask <- (seasonal.periods == 1) seasonal.periods <- seasonal.periods[!seasonal.mask] } # Check if there is anything to parallelise if (is.null(seasonal.periods) && !is.null(use.box.cox) && !is.null(use.trend)) { use.parallel <- FALSE } if (is.null(use.box.cox)) { use.box.cox <- c(FALSE, TRUE) } if (any(use.box.cox)) { init.box.cox <- BoxCox.lambda(y, lower = bc.lower, upper = bc.upper) } else { init.box.cox <- NULL } if (is.null(use.trend)) { use.trend <- c(FALSE, TRUE) } else if (use.trend == FALSE) { use.damped.trend <- FALSE } if (is.null(use.damped.trend)) { use.damped.trend <- c(FALSE, TRUE) } y <- as.numeric(y) best.aic <- NULL if (use.parallel) { # Set up the control array control.array <- NULL for (box.cox in use.box.cox) { for (trend in use.trend) { for (damping in use.damped.trend) { if (!trend && damping) { next } control.line <- c(box.cox, trend, damping) if (!is.null(control.array)) { control.array <- rbind(control.array, control.line) } else { control.array <- control.line } } } } ## Fit the models if (is.null(num.cores)) { num.cores <- detectCores(all.tests = FALSE, logical = TRUE) } clus <- makeCluster(num.cores) models.list <- clusterApplyLB(clus, c(1:nrow(control.array)), parFilterSpecifics, y = y, control.array = control.array, seasonal.periods = seasonal.periods, use.arma.errors = use.arma.errors, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ...) stopCluster(clus) ## Choose the best model #### Get the AICs aics <- numeric(nrow(control.array)) for (i in 1:nrow(control.array)) { aics[i] <- models.list[[i]]$AIC } best.number <- which.min(aics) best.model <- models.list[[best.number]] } else { for (box.cox in use.box.cox) { for (trend in use.trend) { for (damping in use.damped.trend) { current.model <- filterSpecifics( y, box.cox = box.cox, trend = trend, damping = damping, seasonal.periods = seasonal.periods, use.arma.errors = use.arma.errors, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ... ) if (!is.null(best.aic)) { if (current.model$AIC < best.aic) { best.aic <- current.model$AIC best.model <- current.model } } else { best.model <- current.model best.aic <- best.model$AIC } } } } } best.model$call <- match.call() if (best.model$optim.return.code != 0) { warning("optim() did not converge.") } attributes(best.model$fitted.values) <- attributes(best.model$errors) <- attr_y best.model$y <- origy best.model$series <- seriesname best.model$method <- "BATS" return(best.model) } filterSpecifics <- function(y, box.cox, trend, damping, seasonal.periods, use.arma.errors, force.seasonality=FALSE, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE, ...) { if (!trend && damping) { return(list(AIC = Inf)) } first.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (!is.null(seasonal.periods) && !force.seasonality) { non.seasonal.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = NULL, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (first.model$AIC > non.seasonal.model$AIC) { seasonal.periods <- NULL first.model <- non.seasonal.model } } if (use.arma.errors) { suppressWarnings(arma <- auto.arima(as.numeric(first.model$errors), d = 0, ...)) p <- arma$arma[1] q <- arma$arma[2] if (p != 0 || q != 0) { # Did auto.arima() find any AR() or MA() coefficients? if (p != 0) { ar.coefs <- numeric(p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- numeric(q) } else { ma.coefs <- NULL } starting.params <- first.model$parameters # printCASE(box.cox, trend, damping, seasonal.periods, ar.coefs, ma.coefs, p, q) second.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, ar.coefs = ar.coefs, ma.coefs = ma.coefs, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (second.model$AIC < first.model$AIC) { return(second.model) } else { return(first.model) } } else { # Else auto.arima() did not find any AR() or MA()coefficients return(first.model) } } else { return(first.model) } } parFilterSpecifics <- function(control.number, control.array, y, seasonal.periods, use.arma.errors, force.seasonality=FALSE, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE, ...) { box.cox <- control.array[control.number, 1] trend <- control.array[control.number, 2] damping <- control.array[control.number, 3] if (!trend && damping) { return(list(AIC = Inf)) } first.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (!is.null(seasonal.periods) && !force.seasonality) { non.seasonal.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = NULL, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (first.model$AIC > non.seasonal.model$AIC) { seasonal.periods <- NULL first.model <- non.seasonal.model } } if (use.arma.errors) { suppressWarnings(arma <- auto.arima(as.numeric(first.model$errors), d = 0, ...)) p <- arma$arma[1] q <- arma$arma[2] if (p != 0 || q != 0) { # Did auto.arima() find any AR() or MA() coefficients? if (p != 0) { ar.coefs <- numeric(p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- numeric(q) } else { ma.coefs <- NULL } starting.params <- first.model$parameters # printCASE(box.cox, trend, damping, seasonal.periods, ar.coefs, ma.coefs, p, q) second.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, ar.coefs = ar.coefs, ma.coefs = ma.coefs, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (second.model$AIC < first.model$AIC) { return(second.model) } else { return(first.model) } } else { # Else auto.arima() did not find any AR() or MA()coefficients return(first.model) } } else { return(first.model) } } #' @rdname fitted.Arima #' @export fitted.bats <- function(object, h=1, ...) { if (h == 1) { return(object$fitted.values) } else { return(hfitted(object = object, h = h, FUN = "bats", ...)) } } #' @export print.bats <- function(x, ...) { cat(as.character(x)) cat("\n") cat("\nCall: ") print(x$call) cat("\nParameters") if (!is.null(x$lambda)) { cat("\n Lambda: ") cat(round(x$lambda, 6)) } cat("\n Alpha: ") cat(x$alpha) if (!is.null(x$beta)) { cat("\n Beta: ") cat(x$beta) cat("\n Damping Parameter: ") cat(round(x$damping.parameter, 6)) } if (!is.null(x$gamma.values)) { cat("\n Gamma Values: ") cat(x$gamma.values) } if (!is.null(x$ar.coefficients)) { cat("\n AR coefficients: ") cat(round(x$ar.coefficients, 6)) } if (!is.null(x$ma.coefficients)) { cat("\n MA coefficients: ") cat(round(x$ma.coefficients, 6)) } cat("\n") cat("\nSeed States:\n") print(x$seed.states) cat("\nSigma: ") cat(sqrt(x$variance)) cat("\nAIC: ") cat(x$AIC) cat("\n") } #' Plot components from BATS model #' #' Produces a plot of the level, slope and seasonal components from a BATS or #' TBATS model. The plotted components are Box-Cox transformed using the estimated transformation parameter. #' #' @param x Object of class \dQuote{bats/tbats}. #' @param object Object of class \dQuote{bats/tbats}. #' @param main Main title for plot. #' @param range.bars Logical indicating if each plot should have a bar at its #' right side representing relative size. If NULL, automatic selection takes #' place. #' @param ... Other plotting parameters passed to \code{\link[graphics]{par}}. #' @return None. Function produces a plot #' @author Rob J Hyndman #' @seealso \code{\link{bats}},\code{\link{tbats}} #' @keywords hplot #' #' @export plot.bats <- function(x, main="Decomposition by BATS model", ...) { # Get original data, transform if necessary if (!is.null(x$lambda)) { y <- BoxCox(x$y, x$lambda) lambda <- attr(y, "lambda") } else { y <- x$y } # Extract states out <- cbind(observed = c(y), level = x$x[1, ]) if (!is.null(x$beta)) { out <- cbind(out, slope = x$x[2, ]) } nonseas <- 2 + !is.null(x$beta) # No. non-seasonal columns in out nseas <- length(x$gamma.values) # No. seasonal periods if (!is.null(x$gamma)) { seas.states <- x$x[-(1:(1 + !is.null(x$beta))), ] j <- cumsum(c(1, x$seasonal.periods)) for (i in 1:nseas) out <- cbind(out, season = seas.states[j[i], ]) if (nseas > 1) { colnames(out)[nonseas + 1:nseas] <- paste("season", 1:nseas, sep = "") } } # Add time series characteristics out <- ts(out) tsp(out) <- tsp(y) # Do the plot plot.ts(out, main = main, nc = 1, ...) } #' @rdname is.ets #' @export is.bats <- function(x) { inherits(x, "bats") } forecast/R/clean.R0000644000176200001440000001547313571300652013502 0ustar liggesusers# Functions to remove outliers and fill missing values in a time series # Nothing for multiple seasonality yet. # na.interp fills in missing values # Uses linear interpolation for non-seasonal series # Adds seasonality based on a periodic stl decomposition with seasonal series # Argument lambda allows for Box-Cox transformation #' Interpolate missing values in a time series #' #' By default, uses linear interpolation for non-seasonal series. For seasonal series, a #' robust STL decomposition is first computed. Then a linear interpolation is applied to the #' seasonally adjusted data, and the seasonal component is added back. #' #' A more general and flexible approach is available using \code{na.approx} in #' the \code{zoo} package. #' #' @param x time series #' @param linear Should a linear interpolation be used. #' @inheritParams forecast #' @return Time series #' @author Rob J Hyndman #' @seealso \code{\link[forecast]{tsoutliers}} #' @keywords ts #' @examples #' #' data(gold) #' plot(na.interp(gold)) #' #' @export na.interp <- function(x, lambda=NULL, linear=(frequency(x) <= 1 | sum(!is.na(x)) <= 2 * frequency(x))) { missng <- is.na(x) # Do nothing if no missing values if (sum(missng) == 0L) { return(x) } origx <- x rangex <- range(x, na.rm=TRUE) drangex <- rangex[2L] - rangex[1L] # Convert to ts if (is.null(tsp(x))) { x <- ts(x) } if (length(dim(x)) > 1) { if (NCOL(x) == 1) { x <- x[, 1] } else { stop("The time series is not univariate.") } } # Transform if requested if (!is.null(lambda)) { x <- BoxCox(x, lambda = lambda) lambda <- attr(x, "lambda") } freq <- frequency(x) tspx <- tsp(x) n <- length(x) tt <- 1:n idx <- tt[!missng] if (linear) { # Use linear interpolation x <- ts(approx(idx, x[idx], tt, rule = 2)$y) } else { # Otherwise estimate seasonal component robustly # Then add to linear interpolation of seasonally adjusted series # Fit Fourier series for seasonality and a polynomial for the trend, # just to get something reasonable to start with if ("msts" %in% class(x)) { K <- pmin(trunc(attributes(x)$msts / 2), 20L) } else { K <- min(trunc(freq / 2), 5) } X <- cbind(fourier(x, K), poly(tt, degree = pmin(pmax(trunc(n / 10), 1), 6L))) fit <- lm(x ~ X, na.action = na.exclude) pred <- predict(fit, newdata = data.frame(X)) x[missng] <- pred[missng] # Now re-do it with stl to get better results fit <- mstl(x, robust = TRUE) # Interpolate seasonally adjusted values sa <- seasadj(fit) sa <- approx(idx, sa[idx], 1:n, rule = 2)$y # Replace original missing values seas <- seasonal(fit) if (NCOL(seas) > 1) { seas <- rowSums(seas) } x[missng] <- sa[missng] + seas[missng] } # Backtransform if required if (!is.null(lambda)) { x <- InvBoxCox(x, lambda = lambda) } # Ensure time series characteristics not lost tsp(x) <- tspx # Check stability and use linear interpolation if there is a problem if(!linear & (max(x) > rangex[2L]+0.5*drangex | min(x) < rangex[1L]-0.5*drangex)) return(na.interp(origx, lambda=lambda, linear=TRUE)) else return(x) } # Function to identify outliers and replace them with better values # Missing values replaced as well if replace.missing=TRUE #' Identify and replace outliers and missing values in a time series #' #' Uses supsmu for non-seasonal series and a robust STL decomposition for #' seasonal series. To estimate missing values and outlier replacements, #' linear interpolation is used on the (possibly seasonally adjusted) series #' #' @param x time series #' @param replace.missing If TRUE, it not only replaces outliers, but also #' interpolates missing values #' @inheritParams forecast #' @return Time series #' @author Rob J Hyndman #' @seealso \code{\link[forecast]{na.interp}}, #' \code{\link[forecast]{tsoutliers}}, \code{\link[stats]{supsmu}} #' @keywords ts #' @examples #' #' cleangold <- tsclean(gold) #' #' @export tsclean <- function(x, replace.missing=TRUE, lambda = NULL) { outliers <- tsoutliers(x, lambda = lambda) x[outliers$index] <- outliers$replacements if (replace.missing) { x <- na.interp(x, lambda = lambda) } return(x) } # Function to identify time series outlieres #' Identify and replace outliers in a time series #' #' Uses supsmu for non-seasonal series and a periodic stl decomposition with #' seasonal series to identify outliers and estimate their replacements. #' #' #' @param x time series #' @param iterate the number of iteration only for non-seasonal series #' @inheritParams forecast #' @return \item{index}{Indicating the index of outlier(s)} #' \item{replacement}{Suggested numeric values to replace identified outliers} #' @author Rob J Hyndman #' @seealso \code{\link[forecast]{na.interp}}, \code{\link[forecast]{tsclean}} #' @keywords ts #' @examples #' #' data(gold) #' tsoutliers(gold) #' #' @export tsoutliers <- function(x, iterate=2, lambda=NULL) { n <- length(x) freq <- frequency(x) # Identify and fill missing values missng <- is.na(x) nmiss <- sum(missng) if (nmiss > 0L) { xx <- na.interp(x, lambda = lambda) } else { xx <- x } # Check if constant if (is.constant(xx)) { return(list(index = integer(0), replacements = numeric(0))) } # Transform if requested if (!is.null(lambda)) { xx <- BoxCox(xx, lambda = lambda) lambda <- attr(xx, "lambda") } # Seasonally adjust data if necessary if (freq > 1 && n > 2 * freq) { fit <- mstl(xx, robust=TRUE) # Check if seasonality is sufficient to warrant adjustment rem <- remainder(fit) detrend <- xx - trendcycle(fit) strength <- 1 - var(rem) / var(detrend) if (strength >= 0.6) { xx <- seasadj(fit) } } # Use super-smoother on the (seasonally adjusted) data tt <- 1:n mod <- supsmu(tt, xx) resid <- xx - mod$y # Make sure missing values are not interpeted as outliers if (nmiss > 0L) { resid[missng] <- NA } # Limits of acceptable residuals resid.q <- quantile(resid, prob = c(0.25, 0.75), na.rm = TRUE) iqr <- diff(resid.q) limits <- resid.q + 3 * iqr * c(-1, 1) # Find residuals outside limits if ((limits[2] - limits[1]) > 1e-14) { outliers <- which((resid < limits[1]) | (resid > limits[2])) } else { outliers <- numeric(0) } # Replace all missing values including outliers x[outliers] <- NA x <- na.interp(x, lambda = lambda) # Do no more than 2 iterations regardless of the value of iterate if (iterate > 1) { tmp <- tsoutliers(x, iterate = 1, lambda = lambda) if (length(tmp$index) > 0) # Found some more { outliers <- sort(unique(c(outliers, tmp$index))) x[outliers] <- NA x <- na.interp(x, lambda = lambda) } } # Return outlier indexes and replacements return(list(index = outliers, replacements = x[outliers])) } forecast/R/baggedModel.R0000644000176200001440000001707313553673010014611 0ustar liggesusers## #' Forecasting using a bagged model #' #' The bagged model forecasting method. #' #' This function implements the bagged model forecasting method described in #' Bergmeir et al. By default, the \code{\link{ets}} function is applied to all #' bootstrapped series. Base models other than \code{\link{ets}} can be given by the #' parameter \code{fn}. Using the default parameters, the function #' \code{\link{bld.mbb.bootstrap}} is used to calculate the bootstrapped series #' with the Box-Cox and Loess-based decomposition (BLD) bootstrap. The function #' \code{\link{forecast.baggedModel}} can then be used to calculate forecasts. #' #' \code{baggedETS} is a wrapper for \code{baggedModel}, setting \code{fn} to "ets". #' This function is included for backwards compatibility only, and may be #' deprecated in the future. #' #' @aliases print.baggedModel #' #' @param y A numeric vector or time series of class \code{ts}. #' @param bootstrapped_series bootstrapped versions of y. #' @param fn the forecast function to use. Default is \code{\link{ets}}. #' @param \dots Other arguments passed to the forecast function. #' @return Returns an object of class "\code{baggedModel}". #' #' The function \code{print} is used to obtain and print a summary of the #' results. #' #' \item{models}{A list containing the fitted ensemble models.} #' \item{method}{The function for producing a forecastable model.} #' \item{y}{The original time series.} #' \item{bootstrapped_series}{The bootstrapped series.} #' \item{modelargs}{The arguments passed through to \code{fn}.} #' \item{fitted}{Fitted values (one-step forecasts). The #' mean of the fitted values is calculated over the ensemble.} #' \item{residuals}{Original values minus fitted values.} #' @author Christoph Bergmeir, Fotios Petropoulos #' @references Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging #' Exponential Smoothing Methods using STL Decomposition and Box-Cox #' Transformation. International Journal of Forecasting 32, 303-312. #' @keywords ts #' @examples #' fit <- baggedModel(WWWusage) #' fcast <- forecast(fit) #' plot(fcast) #' #' @export baggedModel <- function(y, bootstrapped_series=bld.mbb.bootstrap(y, 100), fn=ets, ...) { # Add package info in case forecast not loaded if(!is.function(fn)){ warning(paste0("Using character specification for `fn` is deprecated. Please use `fn = ", match.arg(fn,c("ets", "auto.arima")), "`.")) fn <- utils::getFromNamespace(match.arg(fn,c("ets", "auto.arima")), "forecast") } mod_boot <- lapply(bootstrapped_series, function(x) { mod <- fn(x, ...) }) # Return results out <- list() out$y <- as.ts(y) out$bootstrapped_series <- bootstrapped_series out$models <- mod_boot out$modelargs <- list(...) fitted_boot <- lapply(out$models, fitted) fitted_boot <- as.matrix(as.data.frame(fitted_boot)) out$fitted <- ts(apply(fitted_boot, 1, mean)) tsp(out$fitted) <- tsp(out$y) out$residuals <- out$y - out$fitted out$series <- deparse(substitute(y)) out$method <- "baggedModel" out$call <- match.call() return(structure(out, class = c("baggedModel"))) } #' @rdname baggedModel #' @export baggedETS <- function(y, bootstrapped_series=bld.mbb.bootstrap(y, 100), ...) { out <- baggedModel(y, bootstrapped_series, fn = ets, ...) class(out) <- c("baggedETS", class(out)) out } #' Forecasting using a bagged model #' #' Returns forecasts and other information for bagged models. #' #' Intervals are calculated as min and max values over the point forecasts from #' the models in the ensemble. I.e., the intervals are not prediction #' intervals, but give an indication of how different the forecasts within the #' ensemble are. #' #' @param object An object of class "\code{baggedModel}" resulting from a call to #' \code{\link{baggedModel}}. #' @param h Number of periods for forecasting. #' @param ... Other arguments, passed on to the \code{\link{forecast}} function of the original method #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' An object of class "\code{forecast}" is a list containing at least the #' following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} #' \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the #' time series used to create the model stored as \code{object}).} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That #' is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' @author Christoph Bergmeir, Fotios Petropoulos #' @seealso \code{\link{baggedModel}}. #' @references Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging #' Exponential Smoothing Methods using STL Decomposition and Box-Cox #' Transformation. International Journal of Forecasting 32, 303-312. #' @keywords ts #' @examples #' fit <- baggedModel(WWWusage) #' fcast <- forecast(fit) #' plot(fcast) #' #' \dontrun{ #' fit2 <- baggedModel(WWWusage, fn="auto.arima") #' fcast2 <- forecast(fit2) #' plot(fcast2) #' accuracy(fcast2)} #' #' @export forecast.baggedModel <- function(object, h=ifelse(frequency(object$y) > 1, 2 * frequency(object$y), 10), ...) { out <- list( model = object, series = object$series, x = object$y, method = object$method, fitted = object$fitted, residuals = object$residuals ) # out <- object tspx <- tsp(out$x) forecasts_boot <- lapply(out$model$models, function(mod) { if (inherits(mod, "ets")) { forecast(mod, PI = FALSE, h = h, ...)$mean } else { forecast(mod, h = h, ...)$mean } }) forecasts_boot <- as.matrix(as.data.frame(forecasts_boot)) colnames(forecasts_boot) <- NULL if (!is.null(tspx)) { start.f <- tspx[2] + 1 / frequency(out$x) } else { start.f <- length(out$x) + 1 } # out <- list() out$forecasts_boot <- forecasts_boot # browser() # out$model$models out$mean <- ts(apply(forecasts_boot, 1, mean), frequency = frequency(out$x), start = start.f) out$median <- ts(apply(forecasts_boot, 1, median)) out$lower <- ts(apply(forecasts_boot, 1, min)) out$upper <- ts(apply(forecasts_boot, 1, max)) out$level <- 100 tsp(out$median) <- tsp(out$lower) <- tsp(out$upper) <- tsp(out$mean) class(out) <- "forecast" out } # fitted.baggedModel <- function(object, h=1, accum_func=mean, ...){ # # fitted_boot <- lapply(object$models, fitted, h) # fitted_boot <- as.matrix(as.data.frame(fitted_boot)) # fitted_boot <- apply(fitted_boot, 2, accum_func) # fitted_boot # } # residuals.baggedModel <- function(object, h=1, ...){ # # residuals_boot <- lapply(object$models, residuals, h) # residuals_boot <- as.matrix(as.data.frame(residuals_boot)) # residuals_boot # # #Alternative implementation: # #object$x - fitted(object, h) # } #' @export print.baggedModel <- function(x, digits = max(3, getOption("digits") - 3), ...) { cat("Series:", x$series, "\n") cat("Model: ", x$method, "\n") cat("Call: ") print(x$call) # print(x$model) # cat("\nsigma^2 estimated as ", format(mean(residuals(x)^2,na.rm=TRUE), digits = digits), "\n", sep = "") invisible(x) } #' @rdname is.ets #' @export is.baggedModel <- function(x) { inherits(x, "baggedModel") } forecast/R/nnetar.R0000644000176200001440000005024713553673010013706 0ustar liggesusers# Defaults: # For non-seasonal data, p chosen using AIC from linear AR(p) model # For seasonal data, p chosen using AIC from linear AR(p) model after # seasonally adjusting with STL decomposition, and P=1 # size set to average of number of inputs and number of outputs: (p+P+1)/2 # if xreg is included then size = (p+P+ncol(xreg)+1)/2 #' Neural Network Time Series Forecasts #' #' Feed-forward neural networks with a single hidden layer and lagged inputs #' for forecasting univariate time series. #' #' A feed-forward neural network is fitted with lagged values of \code{y} as #' inputs and a single hidden layer with \code{size} nodes. The inputs are for #' lags 1 to \code{p}, and lags \code{m} to \code{mP} where #' \code{m=frequency(y)}. If \code{xreg} is provided, its columns are also #' used as inputs. If there are missing values in \code{y} or #' \code{xreg}, the corresponding rows (and any others which depend on them as #' lags) are omitted from the fit. A total of \code{repeats} networks are #' fitted, each with random starting weights. These are then averaged when #' computing forecasts. The network is trained for one-step forecasting. #' Multi-step forecasts are computed recursively. #' #' For non-seasonal data, the fitted model is denoted as an NNAR(p,k) model, #' where k is the number of hidden nodes. This is analogous to an AR(p) model #' but with nonlinear functions. For seasonal data, the fitted model is called #' an NNAR(p,P,k)[m] model, which is analogous to an ARIMA(p,0,0)(P,0,0)[m] #' model but with nonlinear functions. #' #' @aliases print.nnetar print.nnetarmodels #' #' @param y A numeric vector or time series of class \code{ts}. #' @param p Embedding dimension for non-seasonal time series. Number of #' non-seasonal lags used as inputs. For non-seasonal time series, the default #' is the optimal number of lags (according to the AIC) for a linear AR(p) #' model. For seasonal time series, the same method is used but applied to #' seasonally adjusted data (from an stl decomposition). #' @param P Number of seasonal lags used as inputs. #' @param size Number of nodes in the hidden layer. Default is half of the #' number of input nodes (including external regressors, if given) plus 1. #' @param repeats Number of networks to fit with different random starting #' weights. These are then averaged when producing forecasts. #' @param xreg Optionally, a vector or matrix of external regressors, which #' must have the same number of rows as \code{y}. Must be numeric. #' @param model Output from a previous call to \code{nnetar}. If model is #' passed, this same model is fitted to \code{y} without re-estimating any #' parameters. #' @param subset Optional vector specifying a subset of observations to be used #' in the fit. Can be an integer index vector or a logical vector the same #' length as \code{y}. All observations are used by default. #' @param scale.inputs If TRUE, inputs are scaled by subtracting the column #' means and dividing by their respective standard deviations. If \code{lambda} #' is not \code{NULL}, scaling is applied after Box-Cox transformation. #' @param x Deprecated. Included for backwards compatibility. #' @param \dots Other arguments passed to \code{\link[nnet]{nnet}} for #' \code{nnetar}. #' @inheritParams forecast #' #' @return Returns an object of class "\code{nnetar}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{nnetar}. #' #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{x}{The original time series.} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' \item{...}{Other arguments} #' #' @author Rob J Hyndman and Gabriel Caceres #' @keywords ts #' @examples #' fit <- nnetar(lynx) #' fcast <- forecast(fit) #' plot(fcast) #' #' ## Arguments can be passed to nnet() #' fit <- nnetar(lynx, decay=0.5, maxit=150) #' plot(forecast(fit)) #' lines(lynx) #' #' ## Fit model to first 100 years of lynx data #' fit <- nnetar(window(lynx,end=1920), decay=0.5, maxit=150) #' plot(forecast(fit,h=14)) #' lines(lynx) #' #' ## Apply fitted model to later data, including all optional arguments #' fit2 <- nnetar(window(lynx,start=1921), model=fit) #' #' @export nnetar <- function(y, p, P=1, size, repeats=20, xreg=NULL, lambda=NULL, model=NULL, subset=NULL, scale.inputs=TRUE, x=y, ...) { useoldmodel <- FALSE yname <- deparse(substitute(y)) if (!is.null(model)) { # Use previously fitted model useoldmodel <- TRUE # Check for conflicts between new and old data: # Check model class if (!is.nnetar(model)) { stop("Model must be a nnetar object") } # Check new data m <- max(round(frequency(model$x)), 1L) minlength <- max(c(model$p, model$P * m)) + 1 if (length(x) < minlength) { stop(paste("Series must be at least of length", minlength, "to use fitted model")) } if (tsp(as.ts(x))[3] != m) { warning(paste("Data frequency doesn't match fitted model, coercing to frequency =", m)) x <- ts(x, frequency = m) } # Check xreg if (!is.null(model$xreg)) { if (is.null(xreg)) { stop("No external regressors provided") } if (NCOL(xreg) != NCOL(model$xreg)) { stop("Number of external regressors does not match fitted model") } } # Update parameters with previous model lambda <- model$lambda size <- model$size p <- model$p P <- model$P if (P > 0) { lags <- sort(unique(c(1:p, m * (1:P)))) } else { lags <- 1:p } if (is.null(model$scalex)) { scale.inputs <- FALSE } } else { # when not using an old model if (length(y) < 3) { stop("Not enough data to fit a model") } # Check for constant data in time series constant_data <- is.constant(na.interp(x)) if (constant_data){ warning("Constant data, setting p=1, P=0, lambda=NULL, scale.inputs=FALSE") scale.inputs <- FALSE lambda <- NULL p <- 1 P <- 0 } ## Check for constant data in xreg if (!is.null(xreg)){ constant_xreg <- any(apply(as.matrix(xreg), 2, function(x) is.constant(na.interp(x)))) if (constant_xreg){ warning("Constant xreg column, setting scale.inputs=FALSE") scale.inputs <- FALSE } } } # Check for NAs in x if (any(is.na(x))) { warning("Missing values in x, omitting rows") } # Transform data if (!is.null(lambda)) { xx <- BoxCox(x, lambda) lambda <- attr(xx, "lambda") } else { xx <- x } ## Check whether to use a subset of the data xsub <- rep(TRUE, length(x)) if (is.numeric(subset)) { xsub[-subset] <- FALSE } if (is.logical(subset)) { xsub <- subset } # Scale series scalex <- NULL if (scale.inputs) { if (useoldmodel) { scalex <- model$scalex } else { tmpx <- scale(xx[xsub], center = TRUE, scale = TRUE) scalex <- list( center = attr(tmpx, "scaled:center"), scale = attr(tmpx, "scaled:scale") ) } xx <- scale(xx, center = scalex$center, scale = scalex$scale) xx <- xx[, 1] } # Check xreg class & dim xxreg <- NULL scalexreg <- NULL if (!is.null(xreg)) { xxreg <- xreg <- as.matrix(xreg) if (length(x) != NROW(xreg)) { stop("Number of rows in xreg does not match series length") } # Check for NAs in xreg if (any(is.na(xreg))) { warning("Missing values in xreg, omitting rows") } # Scale xreg if (scale.inputs) { if (useoldmodel) { scalexreg <- model$scalexreg } else { tmpx <- scale(xxreg[xsub, ], center = TRUE, scale = TRUE) scalexreg <- list( center = attr(tmpx, "scaled:center"), scale = attr(tmpx, "scaled:scale") ) } xxreg <- scale(xxreg, center = scalexreg$center, scale = scalexreg$scale) } } # Set up lagged matrix n <- length(xx) xx <- as.ts(xx) m <- max(round(frequency(xx)), 1L) if (!useoldmodel) { if (m == 1) { if (missing(p)) { p <- max(length(ar(na.interp(xx))$ar), 1) } if (p >= n) { warning("Reducing number of lagged inputs due to short series") p <- n - 1 } lags <- 1:p if (P > 1) { warning("Non-seasonal data, ignoring seasonal lags") } P <- 0 } else { if (missing(p)) { if (n > 2 * m) { x.sa <- seasadj(mstl(na.interp(xx))) } else { x.sa <- na.interp(xx) } p <- max(length(ar(x.sa)$ar), 1) } if (p >= n) { warning("Reducing number of lagged inputs due to short series") p <- n - 1 } if (P > 0 && n >= m * P + 2) { lags <- sort(unique(c(1:p, m * (1:P)))) } else { lags <- 1:p if (P > 0) { warning("Series too short for seasonal lags") P <- 0 } } } } maxlag <- max(lags) nlag <- length(lags) y <- xx[-(1:maxlag)] lags.X <- matrix(NA_real_, ncol = nlag, nrow = n - maxlag) for (i in 1:nlag) lags.X[, i] <- xx[(maxlag - lags[i] + 1):(n - lags[i])] # Add xreg into lagged matrix lags.X <- cbind(lags.X, xxreg[-(1:maxlag), ]) if (missing(size)) { size <- round((NCOL(lags.X) + 1) / 2) } # Remove missing values if present j <- complete.cases(lags.X, y) ## Remove values not in subset j <- j & xsub[-(1:maxlag)] ## Stop if there's no data to fit (e.g. due to NAs or NaNs) if (NROW(lags.X[j,, drop=FALSE]) == 0) { stop("No data to fit (possibly due to NA or NaN)") } ## Fit average ANN. if (useoldmodel) { fit <- oldmodel_avnnet(lags.X[j, , drop = FALSE], y[j], size = size, model) } else { fit <- avnnet(lags.X[j, , drop=FALSE], y[j], size = size, repeats = repeats, ...) } # Return results out <- list() out$x <- as.ts(x) out$m <- m out$p <- p out$P <- P out$scalex <- scalex out$scalexreg <- scalexreg out$size <- size out$xreg <- xreg out$lambda <- lambda out$subset <- (1:length(x))[xsub] out$model <- fit out$nnetargs <- list(...) if (useoldmodel) { out$nnetargs <- model$nnetargs } if (NROW(lags.X[j,, drop=FALSE]) == 1){ fits <- c(rep(NA_real_, maxlag), mean(sapply(fit, predict))) } else{ fits <- c(rep(NA_real_, maxlag), rowMeans(sapply(fit, predict))) } if (scale.inputs) { fits <- fits * scalex$scale + scalex$center } fits <- ts(fits) if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda) } out$fitted <- ts(rep(NA_real_, length(out$x))) out$fitted[c(rep(TRUE, maxlag), j)] <- fits tsp(out$fitted) <- tsp(out$x) out$residuals <- out$x - out$fitted out$lags <- lags out$series <- yname out$method <- paste("NNAR(", p, sep = "") if (P > 0) { out$method <- paste(out$method, ",", P, sep = "") } out$method <- paste(out$method, ",", size, ")", sep = "") if (P > 0) { out$method <- paste(out$method, "[", m, "]", sep = "") } out$call <- match.call() return(structure(out, class = c("nnetar"))) } # Aggregate several neural network models avnnet <- function(x, y, repeats, linout=TRUE, trace=FALSE, ...) { mods <- list() for (i in 1:repeats) mods[[i]] <- nnet::nnet(x, y, linout = linout, trace = trace, ...) return(structure(mods, class = "nnetarmodels")) } # Fit old model to new data oldmodel_avnnet <- function(x, y, size, model) { repeats <- length(model$model) args <- list(x = x, y = y, size = size, linout = 1, trace = FALSE) # include additional nnet arguments args <- c(args, model$nnetargs) # set iterations to zero (i.e. weights stay fixed) args$maxit <- 0 mods <- list() for (i in 1:repeats) { args$Wts <- model$model[[i]]$wts mods[[i]] <- do.call(nnet::nnet, args) } return(structure(mods, class = "nnetarmodels")) } #' @export print.nnetarmodels <- function(x, ...) { cat(paste("\nAverage of", length(x), "networks, each of which is\n")) print(x[[1]]) } #' Forecasting using neural network models #' #' Returns forecasts and other information for univariate neural network #' models. #' #' Prediction intervals are calculated through simulations and can be slow. #' Note that if the network is too complex and overfits the data, the residuals #' can be arbitrarily small; if used for prediction interval calculations, they #' could lead to misleadingly small values. It is possible to use out-of-sample #' residuals to ameliorate this, see examples. #' #' @param object An object of class "\code{nnetar}" resulting from a call to #' \code{\link{nnetar}}. #' @param h Number of periods for forecasting. If \code{xreg} is used, \code{h} #' is ignored and the number of forecast periods is set to the number of rows #' of \code{xreg}. #' @param PI If TRUE, prediction intervals are produced, otherwise only point #' forecasts are calculated. If \code{PI} is FALSE, then \code{level}, #' \code{fan}, \code{bootstrap} and \code{npaths} are all ignored. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param xreg Future values of external regressor variables. #' @param bootstrap If \code{TRUE}, then prediction intervals computed using #' simulations with resampled residuals rather than normally distributed #' errors. Ignored if \code{innov} is not \code{NULL}. #' @param npaths Number of sample paths used in computing simulated prediction #' intervals. #' @param innov Values to use as innovations for prediction intervals. Must be #' a matrix with \code{h} rows and \code{npaths} columns (vectors are coerced #' into a matrix). If present, \code{bootstrap} is ignored. #' @param ... Additional arguments passed to \code{\link{simulate.nnetar}} #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.nnetar}. #' #' An object of class "\code{forecast}" is a list containing at least the #' following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} #' \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the time series #' used to create the model stored as \code{object}).} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' \item{...}{Other arguments} #' #' @author Rob J Hyndman and Gabriel Caceres #' @seealso \code{\link{nnetar}}. #' @keywords ts #' @examples #' ## Fit & forecast model #' fit <- nnetar(USAccDeaths, size=2) #' fcast <- forecast(fit, h=20) #' plot(fcast) #' #' \dontrun{ #' ## Include prediction intervals in forecast #' fcast2 <- forecast(fit, h=20, PI=TRUE, npaths=100) #' plot(fcast2) #' #' ## Set up out-of-sample innovations using cross-validation #' fit_cv <- CVar(USAccDeaths, size=2) #' res_sd <- sd(fit_cv$residuals, na.rm=TRUE) #' myinnovs <- rnorm(20*100, mean=0, sd=res_sd) #' ## Forecast using new innovations #' fcast3 <- forecast(fit, h=20, PI=TRUE, npaths=100, innov=myinnovs) #' plot(fcast3) #' } #' #' @export forecast.nnetar <- function(object, h=ifelse(object$m > 1, 2 * object$m, 10), PI=FALSE, level=c(80, 95), fan=FALSE, xreg=NULL, lambda=object$lambda, bootstrap=FALSE, npaths=1000, innov=NULL, ...) { # require(nnet) out <- object tspx <- tsp(out$x) # if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Check if xreg was used in fitted model if (is.null(object$xreg)) { if (!is.null(xreg)) { warning("External regressors were not used in fitted model, xreg will be ignored") } xreg <- NULL } else { if (is.null(xreg)) { stop("No external regressors provided") } xreg <- as.matrix(xreg) if (NCOL(xreg) != NCOL(object$xreg)) { stop("Number of external regressors does not match fitted model") } if(!identical(colnames(xreg), colnames(object$xreg))){ warning("xreg contains different column names from the xreg used in training. Please check that the regressors are in the same order.") } h <- NROW(xreg) } fcast <- numeric(h) xx <- object$x xxreg <- xreg if (!is.null(lambda)) { xx <- BoxCox(xx, lambda) lambda <- attr(xx, "lambda") } # Check and apply scaling of fitted model if (!is.null(object$scalex)) { xx <- scale(xx, center = object$scalex$center, scale = object$scalex$scale) if (!is.null(xreg)) { xxreg <- scale(xreg, center = object$scalexreg$center, scale = object$scalexreg$scale) } } # Get lags used in fitted model lags <- object$lags maxlag <- max(lags) flag <- rev(tail(xx, n = maxlag)) # Iterative 1-step forecast for (i in 1:h) { newdata <- c(flag[lags], xxreg[i, ]) if (any(is.na(newdata))) { stop("I can't forecast when there are missing values near the end of the series.") } fcast[i] <- mean(sapply(object$model, predict, newdata = newdata)) flag <- c(fcast[i], flag[-maxlag]) } # Re-scale point forecasts if (!is.null(object$scalex)) { fcast <- fcast * object$scalex$scale + object$scalex$center } # Add ts properties fcast <- ts(fcast, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) # Back-transform point forecasts if (!is.null(lambda)) { fcast <- InvBoxCox(fcast, lambda) } # Compute prediction intervals using simulations if (isTRUE(PI)) { nint <- length(level) sim <- matrix(NA, nrow = npaths, ncol = h) if (!is.null(innov)) { if (length(innov) != h * npaths) { stop("Incorrect number of innovations, need h*npaths values") } innov <- matrix(innov, nrow = h, ncol = npaths) bootstrap <- FALSE } for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = bootstrap, xreg = xreg, lambda = lambda, innov = innov[, i], ...) lower <- apply(sim, 2, quantile, 0.5 - level / 200, type = 8, na.rm = TRUE) upper <- apply(sim, 2, quantile, 0.5 + level / 200, type = 8, na.rm = TRUE) if (nint > 1L) { lower <- ts(t(lower)) upper <- ts(t(upper)) } else { lower <- ts(matrix(lower, ncol = 1L)) upper <- ts(matrix(upper, ncol = 1L)) } tsp(lower) <- tsp(upper) <- tsp(fcast) } else { level <- NULL lower <- NULL upper <- NULL } out$mean <- fcast out$level <- level out$lower <- lower out$upper <- upper return(structure(out, class = "forecast")) } #' @rdname fitted.Arima #' @export fitted.nnetar <- function(object, h=1, ...) { if (h == 1) { return(object$fitted) } else { return(hfitted(object = object, h = h, FUN = "nnetar", ...)) } } #' @export print.nnetar <- function(x, digits = max(3, getOption("digits") - 3), ...) { cat("Series:", x$series, "\n") cat("Model: ", x$method, "\n") # cat(" one hidden layer with",x$size,"nodes\n") cat("Call: ") print(x$call) print(x$model) cat( "\nsigma^2 estimated as ", format(mean(residuals(x) ^ 2, na.rm = TRUE), digits = digits), "\n", sep = "" ) invisible(x) } #' @rdname is.ets #' @export is.nnetar <- function(x) { inherits(x, "nnetar") } #' @rdname is.ets #' @export is.nnetarmodels <- function(x) { inherits(x, "nnetarmodels") } # Scale a univariate time series scale.ts <- function(x, center=TRUE, scale=TRUE) { tspx <- tsp(x) x <- as.ts(scale.default(x, center = center, scale = scale)) tsp(x) <- tspx return(x) } forecast/R/components.R0000644000176200001440000000712313553673010014577 0ustar liggesusers# Functions to extract components from time series decomposition # These should match corresponding functions in the seasonal package # providing similar functional for stl, decomposed.ts and tbats objects #' Extract components from a time series decomposition #' #' Returns a univariate time series equal to either a seasonal component, #' trend-cycle component or remainder component from a time series #' decomposition. #' #' @param object Object created by \code{\link[stats]{decompose}}, #' \code{\link[stats]{stl}} or \code{\link{tbats}}. #' @return Univariate time series. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, #' \code{\link{tbats}}, \code{\link{seasadj}}. #' @keywords ts #' @examples #' plot(USAccDeaths) #' fit <- stl(USAccDeaths, s.window="periodic") #' lines(trendcycle(fit),col="red") #' #' library(ggplot2) #' autoplot(cbind( #' Data=USAccDeaths, #' Seasonal=seasonal(fit), #' Trend=trendcycle(fit), #' Remainder=remainder(fit)), #' facets=TRUE) + #' ylab("") + xlab("Year") #' #' @export seasonal <- function(object) { if ("mstl" %in% class(object)) { cols <- grep("Season", colnames(object)) return(object[, cols]) } else if ("stl" %in% class(object)) { return(object$time.series[, "seasonal"]) } else if ("decomposed.ts" %in% class(object)) { return(object$seasonal) } else if ("tbats" %in% class(object)) { comp <- tbats.components(object) scols <- grep("season", colnames(comp)) season <- ts(rowSums(comp[, scols, drop = FALSE])) if (!is.null(object$lambda)) { season <- InvBoxCox(season, object$lambda) } tsp(season) <- tsp(comp) return(season) } else if ("seas" %in% class(object)) { return(object$data[, "seasonal"]) } else { stop("Unknown object type") } } #' @rdname seasonal #' @export trendcycle <- function(object) { if ("mstl" %in% class(object)) { return(object[, "Trend"]) } else if ("stl" %in% class(object)) { return(object$time.series[, "trend"]) } else if ("decomposed.ts" %in% class(object)) { return(object$trend) } # else if("tbats" %in% class(object)) # { # trnd <- tbats.components(object)[,"level"] # if (!is.null(object$lambda)) # trnd <- InvBoxCox(trnd, object$lambda) # return(trnd) # } else if ("seas" %in% class(object)) { return(seasextract_w_na_action(object, "trend")) } else { stop("Unknown object type") } } #' @rdname seasonal #' @export remainder <- function(object) { if ("mstl" %in% class(object)) { return(object[, "Remainder"]) } else if ("stl" %in% class(object)) { return(object$time.series[, "remainder"]) } else if ("decomposed.ts" %in% class(object)) { return(object$random) } # else if("tbats" %in% class(object)) # { # comp <- tbats.components(object) # trnd <- comp[,"level"] # scols <- grep("season",colnames(comp)) # season <- rowSums(comp[,scols,drop=FALSE]) # irreg <- ts(comp[,'observed'] - trnd - season) # tsp(irreg) <- tsp(comp) # return(irreg) # } else if ("seas" %in% class(object)) { return(seasextract_w_na_action(object, "irregular")) } else { stop("Unknown object type") } } ## Copied from seasonal:::extract_w_na_action ## Importing is problematic due to issues with ARM processors seasextract_w_na_action <- function(x, name) { if (is.null(x$data)) { return(NULL) } z <- na.omit(x$data[, name]) if (!is.null(x$na.action)) { if (attr(x$na.action, "class") == "exclude") { z <- ts(stats::napredict(x$na.action, z)) tsp(z) <- tsp(x$x) } } z } forecast/R/dshw.r0000644000176200001440000002210413553673010013413 0ustar liggesusers#################################################################### ## Double Seasonal Holt Winters method as per Taylor (2003) ## Periods must be nested. ## y can be an msts object, or periods can be passed explicitly. #################################################################### #' Double-Seasonal Holt-Winters Forecasting #' #' Returns forecasts using Taylor's (2003) Double-Seasonal Holt-Winters method. #' #' Taylor's (2003) double-seasonal Holt-Winters method uses additive trend and #' multiplicative seasonality, where there are two seasonal components which #' are multiplied together. For example, with a series of half-hourly data, one #' would set \code{period1=48} for the daily period and \code{period2=336} for #' the weekly period. The smoothing parameter notation used here is different #' from that in Taylor (2003); instead it matches that used in Hyndman et al #' (2008) and that used for the \code{\link{ets}} function. #' #' @param y Either an \code{\link{msts}} object with two seasonal periods or a #' numeric vector. #' @param period1 Period of the shorter seasonal period. Only used if \code{y} #' is not an \code{\link{msts}} object. #' @param period2 Period of the longer seasonal period. Only used if \code{y} #' is not an \code{\link{msts}} object. #' @param h Number of periods for forecasting. #' @param alpha Smoothing parameter for the level. If \code{NULL}, the #' parameter is estimated using least squares. #' @param beta Smoothing parameter for the slope. If \code{NULL}, the parameter #' is estimated using least squares. #' @param gamma Smoothing parameter for the first seasonal period. If #' \code{NULL}, the parameter is estimated using least squares. #' @param omega Smoothing parameter for the second seasonal period. If #' \code{NULL}, the parameter is estimated using least squares. #' @param phi Autoregressive parameter. If \code{NULL}, the parameter is #' estimated using least squares. #' @param armethod If TRUE, the forecasts are adjusted using an AR(1) model for #' the errors. #' @param model If it's specified, an existing model is applied to a new data #' set. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}" which is a list that includes the #' following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} #' \item{x}{The original time series.} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{dshw}. #' #' @author Rob J Hyndman #' @seealso \code{\link[stats]{HoltWinters}}, \code{\link{ets}}. #' @references Taylor, J.W. (2003) Short-term electricity demand forecasting #' using double seasonal exponential smoothing. \emph{Journal of the #' Operational Research Society}, \bold{54}, 799-805. #' #' Hyndman, R.J., Koehler, A.B., Ord, J.K., and Snyder, R.D. (2008) #' \emph{Forecasting with exponential smoothing: the state space approach}, #' Springer-Verlag. \url{http://www.exponentialsmoothing.net}. #' @keywords ts #' @examples #' #' \dontrun{ #' fcast <- dshw(taylor) #' plot(fcast) #' #' t <- seq(0,5,by=1/20) #' x <- exp(sin(2*pi*t) + cos(2*pi*t*4) + rnorm(length(t),0,.1)) #' fit <- dshw(x,20,5) #' plot(fit) #' } #' #' @export dshw <- function(y, period1=NULL, period2=NULL, h=2 * max(period1, period2), alpha=NULL, beta=NULL, gamma=NULL, omega=NULL, phi=NULL, lambda=NULL, biasadj=FALSE, armethod=TRUE, model = NULL) { if (min(y, na.rm = TRUE) <= 0) { stop("dshw not suitable when data contain zeros or negative numbers") } seriesname <- deparse(substitute(y)) if (!is.null(model) && model$method == "DSHW") { period1 <- model$period1 period2 <- model$period2 } else if (inherits(y, "msts") && (length(attr(y, "msts")) == 2)) { period1 <- as.integer(sort(attr(y, "msts"))[1]) period2 <- as.integer(sort(attr(y, "msts"))[2]) } else if (is.null(period1) || is.null(period2)) { stop("Error in dshw(): y must either be an msts object with two seasonal periods OR the seasonal periods should be specified with period1= and period2=") } else { if (period1 > period2) { tmp <- period2 period2 <- period1 period1 <- tmp } } if (any(class(y) != "msts")) { y <- msts(y, c(period1, period2)) } if (length(y) < 2 * max(period2)) { stop("Insufficient data to estimate model") } if (!armethod) { phi <- 0 } if (period1 < 1 || period1 == period2) { stop("Inappropriate periods") } ratio <- period2 / period1 if (ratio - trunc(ratio) > 1e-10) { stop("Seasonal periods are not nested") } if (!is.null(model)) { lambda <- model$model$lambda } if (!is.null(lambda)) { origy <- y y <- BoxCox(y, lambda) lambda <- attr(y, "lambda") } if (!is.null(model)) { pars <- model$model alpha <- pars$alpha beta <- pars$beta gamma <- pars$gamma omega <- pars$omega phi <- pars$phi } else { pars <- rep(NA, 5) if (!is.null(alpha)) { pars[1] <- alpha } if (!is.null(beta)) { pars[2] <- beta } if (!is.null(gamma)) { pars[3] <- gamma } if (!is.null(omega)) { pars[4] <- omega } if (!is.null(phi)) { pars[5] <- phi } } # Estimate parameters if (sum(is.na(pars)) > 0) { pars <- par_dshw(y, period1, period2, pars) alpha <- pars[1] beta <- pars[2] gamma <- pars[3] omega <- pars[4] phi <- pars[5] } ## Allocate space n <- length(y) yhat <- numeric(n) ## Starting values I <- seasindex(y, period1) wstart <- seasindex(y, period2) wstart <- wstart / rep(I, ratio) w <- wstart x <- c(0, diff(y[1:period2])) t <- t.start <- mean(((y[1:period2] - y[(period2 + 1):(2 * period2)]) / period2) + x) / 2 s <- s.start <- (mean(y[1:(2 * period2)]) - (period2 + 0.5) * t) ## In-sample fit for (i in 1:n) { yhat[i] <- (s + t) * I[i] * w[i] snew <- alpha * (y[i] / (I[i] * w[i])) + (1 - alpha) * (s + t) tnew <- beta * (snew - s) + (1 - beta) * t I[i + period1] <- gamma * (y[i] / (snew * w[i])) + (1 - gamma) * I[i] w[i + period2] <- omega * (y[i] / (snew * I[i])) + (1 - omega) * w[i] s <- snew t <- tnew } # Forecasts fcast <- (s + (1:h) * t) * rep(I[n + (1:period1)], h / period1 + 1)[1:h] * rep(w[n + (1:period2)], h / period2 + 1)[1:h] fcast <- msts(fcast, c(period1, period2), start = tsp(y)[2] + 1 / tsp(y)[3]) # Calculate MSE and MAPE yhat <- ts(yhat) tsp(yhat) <- tsp(y) yhat <- msts(yhat, c(period1, period2)) e <- y - yhat e <- msts(e, c(period1, period2)) if (armethod) { yhat <- yhat + phi * c(0, e[-n]) fcast <- fcast + phi ^ (1:h) * e[n] e <- y - yhat } mse <- mean(e ^ 2) mape <- mean(abs(e) / y) * 100 end.y <- end(y) if (end.y[2] == frequency(y)) { end.y[1] <- end.y[1] + 1 end.y[2] <- 1 } else { end.y[2] <- end.y[2] + 1 } fcast <- msts(fcast, c(period1, period2)) if (!is.null(lambda)) { y <- origy fcast <- InvBoxCox(fcast, lambda, biasadj, var(e)) attr(lambda, "biasadj") <- biasadj # Does this also need a biasadj backtransform? yhat <- InvBoxCox(yhat, lambda) } return(structure(list( mean = fcast, method = "DSHW", x = y, residuals = e, fitted = yhat, series = seriesname, model = list( mape = mape, mse = mse, alpha = alpha, beta = beta, gamma = gamma, omega = omega, phi = phi, lambda = lambda, l0 = s.start, b0 = t.start, s10 = wstart, s20 = I ), period1 = period1, period2 = period2 ), class = "forecast")) } ### Double Seasonal Holt-Winters smoothing parameter optimization par_dshw <- function(y, period1, period2, pars) { start <- c(0.1, 0.01, 0.001, 0.001, 0.0)[is.na(pars)] out <- optim(start, dshw.mse, y = y, period1 = period1, period2 = period2, pars = pars) pars[is.na(pars)] <- out$par return(pars) } dshw.mse <- function(par, y, period1, period2, pars) { pars[is.na(pars)] <- par if (max(pars) > 0.99 | min(pars) < 0 | pars[5] > .9) { return(Inf) } else { return(dshw(y, period1, period2, h = 1, pars[1], pars[2], pars[3], pars[4], pars[5], armethod = (abs(pars[5]) > 1e-7))$model$mse) } } ### Calculating seasonal indexes seasindex <- function(y, p) { n <- length(y) n2 <- 2 * p shorty <- y[1:n2] average <- numeric(n) simplema <- zoo::rollmean.default(shorty, p) if (identical(p %% 2, 0)) # Even order { centeredma <- zoo::rollmean.default(simplema[1:(n2 - p + 1)], 2) average[p / 2 + 1:p] <- shorty[p / 2 + 1:p] / centeredma[1:p] si <- average[c(p + (1:(p / 2)), (1 + p / 2):p)] } else # Odd order { average[(p - 1) / 2 + 1:p] <- shorty[(p - 1) / 2 + 1:p] / simplema[1:p] si <- average[c(p + (1:((p - 1) / 2)), (1 + (p - 1) / 2):p)] } return(si) } forecast/R/mforecast.R0000644000176200001440000002515313553673010014400 0ustar liggesusers#' @rdname is.forecast #' @export is.mforecast <- function(x) { inherits(x, "mforecast") } mlmsplit <- function(x, index=NULL) { if (is.null(index)) { stop("Must select lm using index=integer(1)") } mfit <- match(c("coefficients", "residuals", "effects", "fitted.values"), names(x), 0L) for (j in mfit) { x[[j]] <- x[[j]][, index] } class(x) <- "lm" y <- attr(x$terms, "response") yName <- make.names(colnames(x$model[[y]])[index]) x$model[[y]] <- x$model[[y]][, index] colnames(x$model)[y] <- yName attr(x$model, "terms") <- terms(reformulate(attr(x$terms, "term.labels"), response = yName), data = x$model) if (!is.null(tsp(x$data[, 1]))) { tspx <- tsp(x$data[, 1]) # Consolidate ts attributes for forecast.lm x$data <- lapply(x$model, function(x) ts(x, start = tspx[1], end = tspx[2], frequency = tspx[3])) class(x$data) <- "data.frame" row.names(x$data) <- 1:max(sapply(x$data, NROW)) } x$terms <- terms(x$model) return(x) } #' Forecast a multiple linear model with possible time series components #' #' \code{forecast.mlm} is used to predict multiple linear models, especially #' those involving trend and seasonality components. #' #' \code{forecast.mlm} is largely a wrapper for #' \code{\link[forecast]{forecast.lm}()} except that it allows forecasts to be #' generated on multiple series. Also, the output is reformatted into a #' \code{mforecast} object. #' #' @param object Object of class "mlm", usually the result of a call to #' \code{\link[stats]{lm}} or \code{\link{tslm}}. #' @param newdata An optional data frame in which to look for variables with #' which to predict. If omitted, it is assumed that the only variables are #' trend and season, and \code{h} forecasts are produced. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable #' for fan plots. #' @param h Number of periods for forecasting. Ignored if \code{newdata} #' present. #' @param ts If \code{TRUE}, the forecasts will be treated as time series #' provided the original data is a time series; the \code{newdata} will be #' interpreted as related to the subsequent time periods. If \code{FALSE}, any #' time series attributes of the original data will be ignored. #' @param ... Other arguments passed to \code{\link[forecast]{forecast.lm}()}. #' @inheritParams forecast #' #' @return An object of class "\code{mforecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.lm}. #' #' An object of class \code{"mforecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a multivariate time series} #' \item{lower}{Lower limits for prediction intervals of each series} #' \item{upper}{Upper limits for prediction intervals of each series} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The historical data for the response variable.} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted #' values.} \item{fitted}{Fitted values} #' @author Mitchell O'Hara-Wild #' @seealso \code{\link{tslm}}, \code{\link{forecast.lm}}, #' \code{\link[stats]{lm}}. #' @examples #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' fit <- tslm(lungDeaths ~ trend + season) #' fcast <- forecast(fit, h=10) #' #' carPower <- as.matrix(mtcars[,c("qsec","hp")]) #' carmpg <- mtcars[,"mpg"] #' fit <- lm(carPower ~ carmpg) #' fcast <- forecast(fit, newdata=data.frame(carmpg=30)) #' #' @export forecast.mlm <- function(object, newdata, h=10, level=c(80, 95), fan=FALSE, lambda=object$lambda, biasadj=NULL, ts=TRUE, ...) { out <- list(model = object, forecast = vector("list", NCOL(object$coefficients))) cl <- match.call() cl[[1]] <- quote(forecast.lm) cl$object <- quote(mlmsplit(object, index = i)) for (i in seq_along(out$forecast)) { out$forecast[[i]] <- eval(cl) out$forecast[[i]]$series <- colnames(object$coefficients)[i] } out$method <- rep("Multiple linear regression model", length(out$forecast)) names(out$forecast) <- names(out$method) <- colnames(object$coefficients) return(structure(out, class = "mforecast")) } #' Forecasting time series #' #' \code{mforecast} is a class of objects for forecasting from multivariate #' time series or multivariate time series models. The function invokes #' particular \emph{methods} which depend on the class of the first argument. #' #' For example, the function \code{\link{forecast.mlm}} makes multivariate #' forecasts based on the results produced by \code{\link{tslm}}. #' #' @aliases mforecast print.mforecast summary.mforecast as.data.frame.mforecast #' #' @param object a multivariate time series or multivariate time series model #' for which forecasts are required #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, \code{level} is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param robust If TRUE, the function is robust to missing values and outliers #' in \code{object}. This argument is only valid when \code{object} is of class #' \code{mts}. #' @param find.frequency If TRUE, the function determines the appropriate #' period, if the data is of unknown period. #' @param allow.multiplicative.trend If TRUE, then ETS models with #' multiplicative trends are allowed. Otherwise, only additive or no trend ETS #' models are permitted. #' @param ... Additional arguments affecting the forecasts produced. #' @inheritParams forecast #' @return An object of class "\code{mforecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the multivariate #' forecasts and prediction intervals. #' #' The generic accessors functions \code{fitted.values} and \code{residuals} #' extract various useful features of the value returned by #' \code{forecast$model}. #' #' An object of class \code{"mforecast"} is a list usually containing at least #' the following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' For models with additive errors, the residuals will be x minus the fitted #' values.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso Other functions which return objects of class \code{"mforecast"} #' are \code{\link{forecast.mlm}}, \code{forecast.varest}. #' #' @export forecast.mts <- function(object, h=ifelse(frequency(object) > 1, 2 * frequency(object), 10), level=c(80, 95), fan=FALSE, robust=FALSE, lambda = NULL, biasadj = FALSE, find.frequency = FALSE, allow.multiplicative.trend=FALSE, ...) { out <- list(forecast = vector("list", NCOL(object))) cl <- match.call() cl[[1]] <- quote(forecast.ts) cl$object <- quote(object[, i]) for (i in 1:NCOL(object)) { out$forecast[[i]] <- eval(cl) out$forecast[[i]]$series <- colnames(object)[i] } out$method <- vapply(out$forecast, function(x) x$method, character(1)) names(out$forecast) <- names(out$method) <- colnames(object) return(structure(out, class = "mforecast")) } #' @export print.mforecast <- function(x, ...) { lapply(x$forecast, function(x) { cat(paste0(x$series, "\n")) print(x) cat("\n") }) return(invisible()) } #' Multivariate forecast plot #' #' Plots historical data with multivariate forecasts and prediction intervals. #' #' \code{autoplot} will produce an equivalent plot as a ggplot object. #' #' @param x Multivariate forecast object of class \code{mforecast}. #' @param object Multivariate forecast object of class \code{mforecast}. Used #' for ggplot graphics (S3 method consistency). #' @param main Main title. Default is the forecast method. For autoplot, #' specify a vector of titles for each plot. #' @param xlab X-axis label. For autoplot, specify a vector of labels for each #' plot. #' @param PI If \code{FALSE}, confidence intervals will not be plotted, giving #' only the forecast line. #' @param facets If TRUE, multiple time series will be faceted. If FALSE, each #' series will be assigned a colour. #' @param colour If TRUE, the time series will be assigned a colour aesthetic #' @param series Matches an unidentified forecast layer with a coloured object #' on the plot. #' @param \dots additional arguments to each individual \code{plot}. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[forecast]{plot.forecast}}, \code{\link[stats]{plot.ts}} #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://OTexts.org/fpp2/} #' @keywords ts #' @examples #' library(ggplot2) #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' fit <- tslm(lungDeaths ~ trend + season) #' fcast <- forecast(fit, h=10) #' plot(fcast) #' autoplot(fcast) #' #' carPower <- as.matrix(mtcars[,c("qsec","hp")]) #' carmpg <- mtcars[,"mpg"] #' fit <- lm(carPower ~ carmpg) #' fcast <- forecast(fit, newdata=data.frame(carmpg=30)) #' plot(fcast, xlab="Year") #' autoplot(fcast, xlab=rep("Year",2)) #' #' @export plot.mforecast <- function(x, main=paste("Forecasts from", unique(x$method)), xlab="time", ...) { oldpar <- par(mfrow = c(length(x$forecast), 1), mar = c(0, 5.1, 0, 2.1), oma = c(6, 0, 5, 0)) on.exit(par(oldpar)) for (fcast in x$forecast) { plot(fcast, main = "", xaxt = "n", ylab = fcast$series, ...) } axis(1) mtext(xlab, outer = TRUE, side = 1, line = 3) title(main = main, outer = TRUE) } #' @export summary.mforecast <- function(object, ...) { cat(paste("\nForecast method:", unique(object$method))) cat(paste("\n\nModel Information:\n")) print(object$model) cat("\nError measures:\n") print(accuracy(object)) if (is.null(object$forecast)) { cat("\n No forecasts\n") } else { cat("\nForecasts:\n") print(object) } } forecast/R/forecast-package.R0000644000176200001440000000235613566114153015616 0ustar liggesusers#' @import parallel #' @import Rcpp #' #' @importFrom colorspace sequential_hcl #' @importFrom fracdiff fracdiff diffseries fracdiff.sim #' @importFrom tseries adf.test pp.test kpss.test #' @importFrom zoo rollmean as.Date as.yearqtr #' @importFrom timeDate as.timeDate isBizday difftimeDate Easter as.Date.timeDate #' @importFrom nnet nnet #' @importFrom grDevices gray heat.colors nclass.FD palette #' @importFrom graphics abline axis grid layout lines mtext par plot points polygon text title hist #' @importFrom stats Box.test acf approx ar arima arima.sim as.ts complete.cases cycle decompose diffinv end extractAIC fitted formula frequency window filter na.contiguous spec.ar hatvalues is.ts ksmooth lm lsfit loess median model.frame na.exclude na.omit na.pass optim optimize pf plot.ts poly predict pt qnorm qt quantile residuals rnorm runif sd simulate smooth.spline start stl supsmu terms time ts tsp tsp<- tsdiag var logLik nobs napredict #' @importFrom stats aggregate as.formula is.mts reformulate #' @importFrom utils packageVersion tail head #' @importFrom ggplot2 autoplot fortify #' @importFrom lmtest bgtest #' @importFrom stats supsmu #' @importFrom magrittr %>% #' #' @useDynLib forecast, .registration = TRUE NULL #' @export magrittr::`%>%` forecast/R/etsforecast.R0000644000176200001440000002625413553673010014742 0ustar liggesusers#' Forecasting using ETS models #' #' Returns forecasts and other information for univariate ETS models. #' #' #' @param object An object of class "\code{ets}". Usually the result of a call #' to \code{\link{ets}}. #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param simulate If TRUE, prediction intervals are produced by simulation rather #' than using analytic formulae. Errors are assumed to be normally distributed. #' @param bootstrap If TRUE, then prediction intervals are produced by simulation using #' resampled errors (rather than normally distributed errors). #' @param npaths Number of sample paths used in computing simulated prediction #' intervals. #' @param PI If TRUE, prediction intervals are produced, otherwise only point #' forecasts are calculated. If \code{PI} is FALSE, then \code{level}, #' \code{fan}, \code{simulate}, \code{bootstrap} and \code{npaths} are all #' ignored. #' @param ... Other arguments. #' @inheritParams forecast #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.ets}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' For models with additive errors, the residuals are x - fitted values. For #' models with multiplicative errors, the residuals are equal to x /(fitted #' values) - 1.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link{ets}}, \code{\link{ses}}, \code{\link{holt}}, #' \code{\link{hw}}. #' @keywords ts #' @examples #' fit <- ets(USAccDeaths) #' plot(forecast(fit,h=48)) #' #' @export #' @export forecast.ets forecast.ets <- function(object, h=ifelse(object$m > 1, 2 * object$m, 10), level=c(80, 95), fan=FALSE, simulate=FALSE, bootstrap=FALSE, npaths=5000, PI=TRUE, lambda=object$lambda, biasadj=NULL, ...) { # Check inputs # if(h>2000 | h<=0) if (h <= 0) { stop("Forecast horizon out of bounds") } if (is.null(lambda)) { biasadj <- FALSE } else { if (is.null(biasadj)) { biasadj <- attr(lambda, "biasadj") } if (!is.logical(biasadj)) { warning("biasadj information not found, defaulting to FALSE.") biasadj <- FALSE } } if (!PI && !biasadj) { simulate <- bootstrap <- fan <- FALSE if (!biasadj) { npaths <- 2 } # Just to avoid errors level <- 90 } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Order levels level <- sort(level) n <- length(object$x) damped <- as.logical(object$components[4]) if (bootstrap) { simulate <- TRUE } if (simulate) { f <- pegelsfcast.C(h, object, level = level, bootstrap = bootstrap, npaths = npaths) } else if (object$components[1] == "A" && is.element(object$components[2], c("A", "N")) && is.element(object$components[3], c("N", "A"))) { f <- class1(h, object$states[n + 1, ], object$components[2], object$components[3], damped, object$m, object$sigma2, object$par) } else if (object$components[1] == "M" && is.element(object$components[2], c("A", "N")) && is.element(object$components[3], c("N", "A"))) { f <- class2(h, object$states[n + 1, ], object$components[2], object$components[3], damped, object$m, object$sigma2, object$par) } else if (object$components[1] == "M" && object$components[3] == "M" && object$components[2] != "M") { f <- class3(h, object$states[n + 1, ], object$components[2], object$components[3], damped, object$m, object$sigma2, object$par) } else { f <- pegelsfcast.C(h, object, level = level, bootstrap = bootstrap, npaths = npaths) } tsp.x <- tsp(object$x) if (!is.null(tsp.x)) { start.f <- tsp(object$x)[2] + 1 / object$m } else { start.f <- length(object$x) + 1 } out <- list(model = object, mean = ts(f$mu, frequency = object$m, start = start.f), level = level, x = object$x) if (PI || biasadj) { if (!is.null(f$var)) { out$lower <- out$upper <- ts(matrix(NA, ncol = length(level), nrow = h)) colnames(out$lower) <- colnames(out$upper) <- paste(level, "%", sep = "") for (i in 1:length(level)) { marg.error <- sqrt(f$var) * abs(qnorm((100 - level[i]) / 200)) out$lower[, i] <- out$mean - marg.error out$upper[, i] <- out$mean + marg.error } tsp(out$lower) <- tsp(out$upper) <- tsp(out$mean) } else if (!is.null(f$lower)) { out$lower <- ts(f$lower) out$upper <- ts(f$upper) tsp(out$lower) <- tsp(out$upper) <- tsp(out$mean) } else if (PI) { warning("No prediction intervals for this model") } else if (any(biasadj)) { warning("No bias adjustment possible") } } out$fitted <- fitted(object) out$method <- object$method if (!is.null(object$series)) { out$series <- object$series } else { out$series <- object$call$y } out$residuals <- residuals(object) if (!is.null(lambda)) { # out$x <- InvBoxCox(object$x,lambda) # out$fitted <- InvBoxCox(out$fitted,lambda) out$mean <- InvBoxCox(out$mean, lambda, biasadj, out) if (PI) # PI = TRUE { out$lower <- InvBoxCox(out$lower, lambda) out$upper <- InvBoxCox(out$upper, lambda) } } if (!PI) { out$lower <- out$upper <- out$level <- NULL } return(structure(out, class = "forecast")) } pegelsfcast.C <- function(h, obj, npaths, level, bootstrap) { y.paths <- matrix(NA, nrow = npaths, ncol = h) obj$lambda <- NULL # No need to transform these here as we do it later. for (i in 1:npaths) y.paths[i, ] <- simulate.ets(obj, h, future = TRUE, bootstrap = bootstrap) y.f <- .C( "etsforecast", as.double(obj$state[length(obj$x) + 1, ]), as.integer(obj$m), as.integer(switch(obj$components[2], "N" = 0, "A" = 1, "M" = 2)), as.integer(switch(obj$components[3], "N" = 0, "A" = 1, "M" = 2)), as.double(ifelse(obj$components[4] == "FALSE", 1, obj$par["phi"])), as.integer(h), as.double(numeric(h)), PACKAGE = "forecast" )[[7]] if (abs(y.f[1] + 99999) < 1e-7) { stop("Problem with multiplicative damped trend") } lower <- apply(y.paths, 2, quantile, 0.5 - level / 200, type = 8, na.rm = TRUE) upper <- apply(y.paths, 2, quantile, 0.5 + level / 200, type = 8, na.rm = TRUE) if (length(level) > 1) { lower <- t(lower) upper <- t(upper) } return(list(mu = y.f, lower = lower, upper = upper)) } class1 <- function(h, last.state, trendtype, seasontype, damped, m, sigma2, par) { p <- length(last.state) H <- matrix(c(1, rep(0, p - 1)), nrow = 1) if (seasontype == "A") { H[1, p] <- 1 } if (trendtype == "A") { if (damped) { H[1, 2] <- par["phi"] } else { H[1, 2] <- 1 } } F <- matrix(0, p, p) F[1, 1] <- 1 if (trendtype == "A") { if (damped) { F[1, 2] <- F[2, 2] <- par["phi"] } else { F[1, 2] <- F[2, 2] <- 1 } } if (seasontype == "A") { F[p - m + 1, p] <- 1 F[(p - m + 2):p, (p - m + 1):(p - 1)] <- diag(m - 1) } G <- matrix(0, nrow = p, ncol = 1) G[1, 1] <- par["alpha"] if (trendtype == "A") { G[2, 1] <- par["beta"] } if (seasontype == "A") { G[3, 1] <- par["gamma"] } mu <- numeric(h) Fj <- diag(p) cj <- numeric(h - 1) if (h > 1) { for (i in 1:(h - 1)) { mu[i] <- H %*% Fj %*% last.state cj[i] <- H %*% Fj %*% G Fj <- Fj %*% F } cj2 <- cumsum(cj ^ 2) var <- sigma2 * c(1, 1 + cj2) } else { var <- sigma2 } mu[h] <- H %*% Fj %*% last.state return(list(mu = mu, var = var, cj = cj)) } class2 <- function(h, last.state, trendtype, seasontype, damped, m, sigma2, par) { tmp <- class1(h, last.state, trendtype, seasontype, damped, m, sigma2, par) theta <- numeric(h) theta[1] <- tmp$mu[1] ^ 2 if (h > 1) { for (j in 2:h) theta[j] <- tmp$mu[j] ^ 2 + sigma2 * sum(tmp$cj[1:(j - 1)] ^ 2 * theta[(j - 1):1]) } var <- (1 + sigma2) * theta - tmp$mu ^ 2 return(list(mu = tmp$mu, var = var)) } class3 <- function(h, last.state, trendtype, seasontype, damped, m, sigma2, par) { p <- length(last.state) H1 <- matrix(rep(1, 1 + (trendtype != "N")), nrow = 1) H2 <- matrix(c(rep(0, m - 1), 1), nrow = 1) if (trendtype == "N") { F1 <- 1 G1 <- par["alpha"] } else { F1 <- rbind(c(1, 1), c(0, ifelse(damped, par["phi"], 1))) G1 <- rbind(c(par["alpha"], par["alpha"]), c(par["beta"], par["beta"])) } F2 <- rbind(c(rep(0, m - 1), 1), cbind(diag(m - 1), rep(0, m - 1))) G2 <- matrix(0, m, m) G2[1, m] <- par["gamma"] Mh <- matrix(last.state[1:(p - m)]) %*% matrix(last.state[(p - m + 1):p], nrow = 1) Vh <- matrix(0, length(Mh), length(Mh)) H21 <- H2 %x% H1 F21 <- F2 %x% F1 G21 <- G2 %x% G1 K <- (G2 %x% F1) + (F2 %x% G1) mu <- var <- numeric(h) for (i in 1:h) { mu[i] <- H1 %*% Mh %*% t(H2) var[i] <- (1 + sigma2) * H21 %*% Vh %*% t(H21) + sigma2 * mu[i] ^ 2 vecMh <- c(Mh) Vh <- F21 %*% Vh %*% t(F21) + sigma2 * (F21 %*% Vh %*% t(G21) + G21 %*% Vh %*% t(F21) + K %*% (Vh + vecMh %*% t(vecMh)) %*% t(K) + sigma2 * G21 %*% (3 * Vh + 2 * vecMh %*% t(vecMh)) %*% t(G21)) Mh <- F1 %*% Mh %*% t(F2) + G1 %*% Mh %*% t(G2) * sigma2 } return(list(mu = mu, var = var)) } # ses <- function(x,h=10,level=c(80,95),fan=FALSE,...) # { # fcast <- forecast(ets(x,"ANN"),h,level=level,fan=fan,...) # fcast$method <- "Simple exponential smoothing" # fcast$model$call <- match.call() # return(fcast) # } # holt <- function(x,h=10, damped=FALSE, level=c(80,95), fan=FALSE, ...) # { # junk <- forecast(ets(x,"AAN",damped=damped),h,level=level,fan=fan,...) # if(damped) # junk$method <- "Damped Holt's method" # else # junk$method <- "Holt's method" # junk$model$call <- match.call() # return(junk) # } # hw <- function(x,h=2*frequency(x),seasonal="additive",damped=FALSE,level=c(80,95), fan=FALSE, ...) # { # if(seasonal=="additive") # { # junk <- forecast(ets(x,"AAA",damped=damped),h,level=level,fan=fan,...) # junk$method <- "Holt-Winters' additive method" # } # else # { # junk <- forecast(ets(x,"MAM",damped=damped),h,level=level,fan=fan,...) # junk$method <- "Holt-Winters' multiplicative method" # } # junk$model$call <- match.call() # return(junk) # } forecast/R/checkresiduals.R0000644000176200001440000001150613553673010015403 0ustar liggesusers#' Check that residuals from a time series model look like white noise #' #' If \code{plot=TRUE}, produces a time plot of the residuals, the #' corresponding ACF, and a histogram. If the degrees of freedom for the model #' can be determined and \code{test} is not \code{FALSE}, the output from #' either a Ljung-Box test or Breusch-Godfrey test is printed. #' #' #' @param object Either a time series model, a forecast object, or a time #' series (assumed to be residuals). #' @param lag Number of lags to use in the Ljung-Box or Breusch-Godfrey test. #' If missing, it is set to \code{min(10,n/5)} for non-seasonal data, and #' \code{min(2m, n/5)} for seasonal data, where \code{n} is the length of the series, #' and \code{m} is the seasonal period of the data. It is further constrained to be #' at least \code{df+3} where \code{df} is the degrees of freedom of the model. This #' ensures there are at least 3 degrees of freedom used in the chi-squared test. #' @param df Number of degrees of freedom for fitted model, required for the #' Ljung-Box or Breusch-Godfrey test. Ignored if the degrees of freedom can be #' extracted from \code{object}. #' @param test Test to use for serial correlation. By default, if \code{object} #' is of class \code{lm}, then \code{test="BG"}. Otherwise, \code{test="LB"}. #' Setting \code{test=FALSE} will prevent the test results being printed. #' @param plot Logical. If \code{TRUE}, will produce the plot. #' @param ... Other arguments are passed to \code{\link{ggtsdisplay}}. #' @return None #' @author Rob J Hyndman #' @seealso \code{\link{ggtsdisplay}}, \code{\link[stats]{Box.test}}, #' \code{\link[lmtest]{bgtest}} #' @examples #' #' fit <- ets(WWWusage) #' checkresiduals(fit) #' #' @export checkresiduals <- function(object, lag, df=NULL, test, plot=TRUE, ...) { showtest <- TRUE if (missing(test)) { if (is.element("lm", class(object))) { test <- "BG" } else { test <- "LB" } showtest <- TRUE } else if (test != FALSE) { test <- match.arg(test, c("LB", "BG")) showtest <- TRUE } else { showtest <- FALSE } # Extract residuals if (is.element("ts", class(object)) | is.element("numeric", class(object))) { residuals <- object object <- list(method = "Missing") } else { residuals <- residuals(object) } if (length(residuals) == 0L) { stop("No residuals found") } if ("ar" %in% class(object)) { method <- paste("AR(", object$order, ")", sep = "") } else if (!is.null(object$method)) { method <- object$method } else if ("HoltWinters" %in% class(object)) { method <- "HoltWinters" } else if ("StructTS" %in% class(object)) { method <- "StructTS" } else { method <- try(as.character(object), silent = TRUE) if ("try-error" %in% class(method)) { method <- "Missing" } else if (length(method) > 1 | base::nchar(method[1]) > 50) { method <- "Missing" } } if (method == "Missing") { main <- "Residuals" } else { main <- paste("Residuals from", method) } if (plot) { suppressWarnings(ggtsdisplay(residuals, plot.type = "histogram", main = main, ...)) } # Check if we have the model if (is.element("forecast", class(object))) { object <- object$model } if (is.null(object) | !showtest) { return(invisible()) } # Seasonality of data freq <- frequency(residuals) # Find model df if(grepl("STL \\+ ", method)){ warning("The fitted degrees of freedom is based on the model used for the seasonally adjusted data.") } df <- modeldf(object) if (missing(lag)) { lag <- ifelse(freq > 1, 2 * freq, 10) lag <- min(lag, round(length(residuals)/5)) lag <- max(df+3, lag) } if (!is.null(df)) { if (test == "BG") { # Do Breusch-Godfrey test BGtest <- lmtest::bgtest(object, order = lag) BGtest$data.name <- main #print(BGtest) return(BGtest) } else { # Do Ljung-Box test LBtest <- Box.test(zoo::na.approx(residuals), fitdf = df, lag = lag, type = "Ljung") LBtest$method <- "Ljung-Box test" LBtest$data.name <- main names(LBtest$statistic) <- "Q*" print(LBtest) cat(paste("Model df: ", df, ". Total lags used: ", lag, "\n\n", sep = "")) return(invisible(LBtest)) } } } modeldf <- function(object, ...){ UseMethod("modeldf") } modeldf.default <- function(object, ...){ warning("Could not find appropriate degrees of freedom for this model.") NULL } modeldf.ets <- function(object, ...){ length(object$par) } modeldf.Arima <- function(object, ...){ length(object$coef) } modeldf.bats <- function(object, ...){ length(object$parameters$vect) + NROW(object$seed.states) } modeldf.lm <- function(object, ...){ length(object$coefficients) } modeldf.lagwalk <- function(object, ...){ as.numeric(object$par$includedrift) } modeldf.meanf <- function(object, ...){ 1 } forecast/R/wrangle.R0000644000176200001440000000320113553673010014042 0ustar liggesuserstoMat <- function(x) { if (NCOL(x) > 1 && !is.matrix(x)) { x <- matrix(x, ncol = NCOL(x)) } return(x) } # Converts arguments into data.frame, whilst retaining mts/ts/matrix properties datamat <- function(..., flatten=TRUE, functions=TRUE) { vars <- list(...) if (length(vars) == 0) { return(data.frame()) } if (!is.null(names(vars))) { names(vars)[!nzchar(names(vars))] <- as.character(substitute(list(...))[-1])[!nzchar(names(vars))] } else { names(vars) <- as.character(substitute(list(...))[-1]) } if (flatten) { i <- 1 while (i <= length(vars)) { if (is.data.frame(vars[[i]])) { vars <- c(vars, c(vars[[i]])) # Append data.frame components vars[[i]] <- NULL # Remove data.frame } else if (is.matrix(vars[[i]])) { for (j in 1:NCOL(vars[[i]])) { vars[[length(vars) + 1]] <- vars[[i]][, j] names(vars)[length(vars)] <- make.names(colnames(vars[[i]])[j]) } i <- i + 1 } else { i <- i + 1 } } } class(vars) <- "data.frame" row.names(vars) <- 1:max(sapply(vars, NROW)) # if(is.ts(vars[,1])){ # if(NCOL(vars)>1){ # class(vars) <- c(class(vars),"mts") # } # class(vars) <- c(class(vars),"ts") # tspx <- unique(sapply(vars,tsp), MARGIN = 2) # if(length(tspx)==3){ # attr(vars, "tsp") <- tspx # } # } return(vars) } recoverTSP <- function(times.x) { freq <- sort(unique(round(times.x %% 1, digits = 6))) # The subset cannot increase frequency freq <- length(freq) return(c(min(times.x), min(times.x) + (length(times.x) - 1) / freq, freq)) } forecast/R/simulate.R0000644000176200001440000006365113566114520014245 0ustar liggesusers#' Simulation from a time series model #' #' Returns a time series based on the model object \code{object}. #' #' With \code{simulate.Arima()}, the \code{object} should be produced by #' \code{\link{Arima}} or \code{\link{auto.arima}}, rather than #' \code{\link[stats]{arima}}. By default, the error series is assumed normally #' distributed and generated using \code{\link[stats]{rnorm}}. If \code{innov} #' is present, it is used instead. If \code{bootstrap=TRUE} and #' \code{innov=NULL}, the residuals are resampled instead. #' #' When \code{future=TRUE}, the sample paths are conditional on the data. When #' \code{future=FALSE} and the model is stationary, the sample paths do not #' depend on the data at all. When \code{future=FALSE} and the model is #' non-stationary, the location of the sample paths is arbitrary, so they all #' start at the value of the first observation. #' #' @param object An object of class "\code{ets}", "\code{Arima}", "\code{ar}" #' or "\code{nnetar}". #' @param nsim Number of periods for the simulated series. Ignored if either #' \code{xreg} or \code{innov} are not \code{NULL}. #' @param seed Either \code{NULL} or an integer that will be used in a call to #' \code{\link[base]{set.seed}} before simulating the time series. The default, #' \code{NULL}, will not change the random generator state. #' @param future Produce sample paths that are future to and conditional on the #' data in \code{object}. Otherwise simulate unconditionally. #' @param bootstrap Do simulation using resampled errors rather than normally #' distributed errors or errors provided as \code{innov}. #' @param innov A vector of innovations to use as the error series. Ignored if #' \code{bootstrap==TRUE}. If not \code{NULL}, the value of \code{nsim} is set #' to length of \code{innov}. #' @param xreg New values of \code{xreg} to be used for forecasting. The value #' of \code{nsim} is set to the number of rows of \code{xreg} if it is not #' \code{NULL}. #' @param ... Other arguments, not currently used. #' @inheritParams forecast #' #' @return An object of class "\code{ts}". #' @author Rob J Hyndman #' @seealso \code{\link{ets}}, \code{\link{Arima}}, \code{\link{auto.arima}}, #' \code{\link{ar}}, \code{\link{arfima}}, \code{\link{nnetar}}. #' @keywords ts #' @examples #' fit <- ets(USAccDeaths) #' plot(USAccDeaths, xlim=c(1973,1982)) #' lines(simulate(fit, 36), col="red") #' #' @export simulate.ets <- function(object, nsim=length(object$x), seed=NULL, future=TRUE, bootstrap=FALSE, innov=NULL, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv, inherits = FALSE)) { runif(1) } if (is.null(seed)) { RNGstate <- get(".Random.seed", envir = .GlobalEnv) } else { R.seed <- get(".Random.seed", envir = .GlobalEnv) set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } if (is.null(tsp(object$x))) { object$x <- ts(object$x, frequency = 1, start = 1) } if (future) { initstate <- object$state[length(object$x) + 1, ] } else { # choose a random starting point initstate <- object$state[sample(1:length(object$x), 1), ] } if (bootstrap) { res <- na.omit(c(object$residuals) - mean(object$residuals, na.rm = TRUE)) e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { e <- rnorm(nsim, 0, sqrt(object$sigma)) } else if (length(innov) == nsim) { e <- innov } else { stop("Length of innov must be equal to nsim") } if (object$components[1] == "M") { e <- pmax(-1, e) } tmp <- ts(.C( "etssimulate", as.double(initstate), as.integer(object$m), as.integer(switch(object$components[1], "A" = 1, "M" = 2)), as.integer(switch(object$components[2], "N" = 0, "A" = 1, "M" = 2)), as.integer(switch(object$components[3], "N" = 0, "A" = 1, "M" = 2)), as.double(object$par["alpha"]), as.double(ifelse(object$components[2] == "N", 0, object$par["beta"])), as.double(ifelse(object$components[3] == "N", 0, object$par["gamma"])), as.double(ifelse(object$components[4] == "FALSE", 1, object$par["phi"])), as.integer(nsim), as.double(numeric(nsim)), as.double(e), PACKAGE = "forecast" )[[11]], frequency = object$m, start = ifelse(future, tsp(object$x)[2] + 1 / tsp(object$x)[3], tsp(object$x)[1])) if (is.na(tmp[1])) { stop("Problem with multiplicative damped trend") } if (!is.null(object$lambda)) { tmp <- InvBoxCox(tmp, object$lambda) } return(tmp) } # Simulate ARIMA model starting with observed data x # Some of this function is borrowed from the arima.sim() function in the stats package. # Note that myarima.sim() does simulation conditional on the values of observed x, whereas # arima.sim() is unconditional on any observed x. myarima.sim <- function(model, n, x, e, ...) { start.innov <- residuals(model) innov <- e data <- x # Remove initial NAs first.nonmiss <- which(!is.na(x))[1] if (first.nonmiss > 1) { tsp.x <- tsp(x) start.x <- tsp.x[1] + (first.nonmiss - 1) / tsp.x[3] x <- window(x, start = start.x) start.innov <- window(start.innov, start = start.x) } model$x <- x n.start <- length(x) x <- ts(c(start.innov, innov), start = 1 - n.start, frequency = model$seasonal.period) flag.noadjust <- FALSE if (is.null(tsp(data))) { data <- ts(data, frequency = 1, start = 1) } if (!is.list(model)) { stop("'model' must be list") } if (n <= 0L) { stop("'n' must be strictly positive") } p <- length(model$ar) q <- length(model$ma) d <- 0 D <- model$seasonal.difference m <- model$seasonal.period if (!is.null(ord <- model$order)) { if (length(ord) != 3L) { stop("'model$order' must be of length 3") } if (p != ord[1L]) { stop("inconsistent specification of 'ar' order") } if (q != ord[3L]) { stop("inconsistent specification of 'ma' order") } d <- ord[2L] if (d != round(d) || d < 0) { stop("number of differences must be a positive integer") } } if (p) { minroots <- min(Mod(polyroot(c(1, -model$ar)))) if (minroots <= 1) { stop("'ar' part of model is not stationary") } } if (length(model$ma)) { # MA filtering x <- stats::filter(x, c(1, model$ma), method = "convolution", sides = 1L) x[seq_along(model$ma)] <- 0 } ## AR "filtering" len.ar <- length(model$ar) if (length(model$ar) && (len.ar <= length(data))) { if ((D != 0) && (d != 0)) { diff.data <- diff(data, lag = 1, differences = d) diff.data <- diff(diff.data, lag = m, differences = D) } else if ((D != 0) && (d == 0)) { diff.data <- diff(data, lag = model$seasonal.period, differences = D) } else if ((D == 0) && (d != 0)) { diff.data <- diff(data, lag = 1, differences = d) } else { diff.data <- data } x.new.innovations <- x[(length(start.innov) + 1):length(x)] x.with.data <- c(diff.data, x.new.innovations) for (i in (length(diff.data) + 1):length(x.with.data)) { lagged.x.values <- x.with.data[(i - len.ar):(i - 1)] ar.coefficients <- model$ar[length(model$ar):1] sum.multiplied.x <- sum((lagged.x.values * ar.coefficients)[abs(ar.coefficients) > .Machine$double.eps]) x.with.data[i] <- x.with.data[i] + sum.multiplied.x } x.end <- x.with.data[(length(diff.data) + 1):length(x.with.data)] x <- ts(x.end, start = 1, frequency = model$seasonal.period) flag.noadjust <- TRUE } else if (length(model$ar)) # but data too short { # AR filtering for all other cases where AR is used. x <- stats::filter(x, model$ar, method = "recursive") } if ((d == 0) && (D == 0) && (flag.noadjust == FALSE)) # Adjust to ensure end matches approximately { # Last 20 diffs if (n.start >= 20) { xdiff <- (model$x - x[1:n.start])[n.start - (19:0)] } else { xdiff <- model$x - x[1:n.start] } # If all same sign, choose last if (all(sign(xdiff) == 1) || all(sign(xdiff) == -1)) { xdiff <- xdiff[length(xdiff)] } else { # choose mean. xdiff <- mean(xdiff) } x <- x + xdiff } if ((n.start > 0) && (flag.noadjust == FALSE)) { x <- x[-(1:n.start)] } ######## Undo all differences if ((D > 0) && (d == 0)) { # Seasonal undifferencing, if there is no regular differencing i <- length(data) - D * m + 1 seasonal.xi <- data[i:length(data)] length.s.xi <- length(seasonal.xi) x <- diffinv(x, lag = m, differences = D, xi = seasonal.xi)[-(1:length.s.xi)] } else if ((d > 0) && (D == 0)) { # Regular undifferencing, if there is no seasonal differencing x <- diffinv(x, differences = d, xi = data[length(data) - (d:1) + 1])[-(1:d)] } else if ((d > 0) && (D > 0)) { # Undifferencing for where the differencing is both Seasonal and Non-Seasonal # Regular first delta.four <- diff(data, lag = m, differences = D) regular.xi <- delta.four[(length(delta.four) - D):length(delta.four)] x <- diffinv(x, differences = d, xi = regular.xi[length(regular.xi) - (d:1) + 1])[-(1:d)] # Then seasonal i <- length(data) - D * m + 1 seasonal.xi <- data[i:length(data)] length.s.xi <- length(seasonal.xi) x <- diffinv(x, lag = m, differences = D, xi = seasonal.xi) x <- x[-(1:length.s.xi)] } x <- ts(x[1:n], frequency = frequency(data), start = tsp(data)[2] + 1 / tsp(data)[3]) return(x) } #' @rdname simulate.ets #' @export simulate.Arima <- function(object, nsim=length(object$x), seed=NULL, xreg=NULL, future=TRUE, bootstrap=FALSE, innov=NULL, lambda=object$lambda, ...) { # Error check: if (object$arma[7] < 0) { stop("Value for seasonal difference is < 0. Must be >= 0") } else if ((sum(object$arma[c(3, 4, 7)]) > 0) && (object$arma[5] < 2)) { stop("Invalid value for seasonal period") } if (!is.null(xreg)) { xreg <- as.matrix(xreg) nsim <- nrow(xreg) } #### # Random Seed Code if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv)) { runif(1) } if (is.null(seed)) { RNGstate <- .Random.seed } else { R.seed <- .Random.seed set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } ############# End Random seed code # Check for seasonal ARMA components and set flag accordingly. This will be used later in myarima.sim() flag.s.arma <- (sum(object$arma[c(3, 4)]) > 0) # Check for Seasonality in ARIMA model if (sum(object$arma[c(3, 4, 7)]) > 0) { # return(simulateSeasonalArima(object, nsim=nsim, seed=seed, xreg=xreg, future=future, bootstrap=bootstrap, ...)) if (sum(object$model$phi) == 0) { ar <- NULL } else { ar <- as.double(object$model$phi) } if (sum(object$model$theta) == 0) { ma <- NULL } else { ma <- as.double(object$model$theta) } order <- c(length(ar), object$arma[6], length(ma)) if (future) { model <- list( order = order, ar = ar, ma = ma, sd = sqrt(object$sigma2), residuals = residuals(object), seasonal.difference = object$arma[7], seasonal.period = object$arma[5], flag.seasonal.arma = flag.s.arma, seasonal.order = object$arma[c(3, 7, 4)] ) } else { model <- list(order = order, ar = ar, ma = ma, sd = sqrt(object$sigma2), residuals = residuals(object)) } flag.seasonal.diff <- (object$arma[7] > 0) } else { #### Non-Seasonal ARIMA specific code: Set up the model order <- object$arma[c(1, 6, 2)] if (order[1] > 0) { ar <- object$model$phi[1:order[1]] } else { ar <- NULL } if (order[3] > 0) { ma <- object$model$theta[1:order[3]] } else { ma <- NULL } if (object$arma[2] != length(ma)) { stop("MA length wrong") } else if (object$arma[1] != length(ar)) { stop("AR length wrong") } if (future) { model <- list( order = object$arma[c(1, 6, 2)], ar = ar, ma = ma, sd = sqrt(object$sigma2), residuals = residuals(object), seasonal.difference = 0, flag.seasonal.arma = flag.s.arma, seasonal.order = c(0, 0, 0), seasonal.period = 1 ) } else { model <- list(order = object$arma[c(1, 6, 2)], ar = ar, ma = ma, sd = sqrt(object$sigma2), residuals = residuals(object)) } flag.seasonal.diff <- FALSE ### End non-seasonal ARIMA specific code } x <- object$x <- getResponse(object) if (is.null(tsp(x))) { x <- ts(x, frequency = 1, start = 1) } if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } n <- length(x) if (bootstrap) { res <- na.omit(c(model$residuals) - mean(model$residuals, na.rm = TRUE)) e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { e <- rnorm(nsim, 0, model$sd) } else if (length(innov) == nsim) { e <- innov } else { stop("Length of innov must be equal to nsim") } use.drift <- is.element("drift", names(object$coef)) usexreg <- (!is.null(xreg) | use.drift) xm <- oldxm <- 0 if (!is.null(xreg)) { xreg <- as.matrix(xreg) if (nrow(xreg) < nsim) { stop("Not enough rows in xreg") } else { xreg <- xreg[1:nsim, ] } } if (use.drift) { # Remove existing drift column if (NCOL(xreg) == 1 && all(diff(xreg)==1)) { xreg <- NULL } else if(!is.null(colnames(xreg))) { xreg <- xreg[, colnames(xreg)!="drift", drop = FALSE] } # Create new drift column for historical simulation dft <- as.matrix(1:nsim) # Adapt if future simulation if (future) { dft <- dft + n } # Add to xreg xreg <- cbind(drift = dft, xreg) } narma <- sum(object$arma[1L:4L]) if (length(object$coef) > narma) { if (names(object$coef)[narma + 1L] == "intercept") { xreg <- cbind(intercept = rep(1, nsim), xreg) object$xreg <- cbind(intercept = rep(1, n), object$xreg) } if (!is.null(xreg)) { xm <- if (narma == 0) { drop(as.matrix(xreg) %*% object$coef) } else { drop(as.matrix(xreg) %*% object$coef[-(1L:narma)]) } oldxm <- if (narma == 0) { drop(as.matrix(object$xreg) %*% object$coef) } else { drop(as.matrix(object$xreg) %*% object$coef[-(1L:narma)]) } } } if (future) { sim <- myarima.sim(model, nsim, x - oldxm, e = e) + xm } else { if (flag.seasonal.diff) { zeros <- object$arma[5] * object$arma[7] sim <- arima.sim(model, nsim, innov = e) sim <- diffinv(sim, lag = object$arma[5], differences = object$arma[7])[-(1:zeros)] sim <- ts(tail(sim, nsim) + xm) } else { sim <- ts(tail(arima.sim(model, nsim, innov = e), nsim) + xm) } if(nsim==n) tsp(sim) <- tsp(x) # If model is non-stationary, then condition simulated data on first observation if (model$order[2] > 0 || flag.seasonal.diff) { sim <- sim - sim[1] + x[1] } } if (!is.null(lambda)) { sim <- InvBoxCox(sim, lambda) } return(sim) } #' @rdname simulate.ets #' @export simulate.ar <- function(object, nsim=object$n.used, seed=NULL, future=TRUE, bootstrap=FALSE, innov=NULL, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv)) { runif(1) } if (is.null(seed)) { RNGstate <- .Random.seed } else { R.seed <- .Random.seed set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } if (future) { model <- list(ar = object$ar, sd = sqrt(object$var.pred), residuals = object$resid, seasonal.difference = 0, seasonal.period = 1, flag.seasonal.arma = FALSE) } else { model <- list(ar = object$ar, sd = sqrt(object$var.pred), residuals = object$resid) } x.mean <- object$x.mean object$x <- getResponse(object) object$x <- object$x - x.mean if (bootstrap) { res <- na.omit(c(model$residuals) - mean(model$residuals, na.rm = TRUE)) e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { e <- rnorm(nsim, 0, model$sd) } else if (length(innov) == nsim) { e <- innov } else { stop("Length of innov must be equal to nsim") } if (future) { return(myarima.sim(model, nsim, x = object$x, e = e) + x.mean) } else { return(arima.sim(model, nsim, innov = e) + x.mean) } } #' @rdname simulate.ets #' @export simulate.lagwalk <- function(object, nsim=length(object$x), seed=NULL, future=TRUE, bootstrap=FALSE, innov=NULL, lambda = object$lambda, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv)) { runif(1) } if (is.null(seed)) { RNGstate <- .Random.seed } else { R.seed <- .Random.seed set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } if (bootstrap) { res <- na.omit(c(object$residuals) - mean(object$residuals, na.rm = TRUE)) e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { se <- sqrt(object$sigma2*nsim + (nsim*object$par$drift.se)^2) e <- rnorm(nsim, 0, se) } else { e <- innov } # Cumulate errors lag_grp <- rep_len(seq_len(object$par$lag), length(e)) e <- split(e, lag_grp) cumulative_e <- unsplit(lapply(e, cumsum), lag_grp) # Find starting position x <- object$x if(!is.null(lambda)){ x <- BoxCox(x, lambda) } if(future){ start <- tail(x, object$par$lag) } else{ start <- head(x, object$par$lag) } # Handle missing values if(any(na_pos <- is.na(start))){ if(!is.null(innov)){ warning("Missing values encountered at simulation starting values, simulating starting values from closest observed value.") } lag_grp <- rep_len(seq_len(object$par$lag), length(x)) start[na_pos] <- vapply(split(x, lag_grp)[na_pos], function(x){ if(future){ x <- rev(x) } pos <- which.min(is.na(x)) x[pos] + sum(rnorm(pos-1, 0, sqrt(object$sigma2))) }, numeric(1L)) } # Construct simulated ts sim <- rep_len(start, nsim) + seq_len(nsim)*object$par$drift + cumulative_e if(!is.null(lambda)){ sim <- InvBoxCox(sim, lambda) } tspx <- tsp(x) ts(sim, start = ifelse(future, tspx[2] + 1/tspx[3], tspx[1]), frequency = tspx[3]) } #' @rdname simulate.ets #' @export simulate.fracdiff <- function(object, nsim=object$n, seed=NULL, future=TRUE, bootstrap=FALSE, innov=NULL, ...) { x <- getResponse(object) # Strip initial and final missing values xx <- na.ends(x) n <- length(xx) # Remove mean meanx <- mean(xx) xx <- xx - meanx # Difference series (removes mean as well) y <- undo.na.ends(x, diffseries(xx, d = object$d)) # Create ARMA model for differenced series arma <- Arima( y, order = c(length(object$ar), 0, length(object$ma)), include.mean = FALSE, fixed = c(object$ar, -object$ma) ) # Simulate from ARMA model ysim <- simulate(arma, nsim, seed, future = future, bootstrap = bootstrap, innov = innov) # Undo differencing and add back mean return(unfracdiff(xx, ysim, n, nsim, object$d) + meanx) } #' @rdname simulate.ets #' @export simulate.nnetar <- function(object, nsim=length(object$x), seed=NULL, xreg=NULL, future=TRUE, bootstrap=FALSE, innov=NULL, lambda=object$lambda, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv, inherits = FALSE)) { runif(1) } if (is.null(seed)) { RNGstate <- get(".Random.seed", envir = .GlobalEnv) } else { R.seed <- get(".Random.seed", envir = .GlobalEnv) set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } ## only future currently implemented if (!future) { warning("simulate.nnetar() currently only supports future=TRUE") } ## set simulation innovations if (bootstrap) { res <- na.omit(c(residuals(object, type = "innovation"))) res <- res - mean(res) ## scale if appropriate if (!is.null(object$scalex$scale)) { res <- res / object$scalex$scale } e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { res <- na.omit(c(residuals(object, type = "innovation"))) ## scale if appropriate if (!is.null(object$scalex$scale)) { res <- res / object$scalex$scale } e <- rnorm(nsim, 0, sd(res, na.rm = TRUE)) } else if (length(innov) == nsim) { e <- innov if (!is.null(object$scalex$scale)){ e <- e/object$scalex$scale } } else if (isTRUE(innov == 0L)) { ## to pass innov=0 so simulation equals mean forecast e <- rep(innov, nsim) } else { stop("Length of innov must be equal to nsim") } ## tspx <- tsp(object$x) # Check if xreg was used in fitted model if (is.null(object$xreg)) { if (!is.null(xreg)) { warning("External regressors were not used in fitted model, xreg will be ignored") } xreg <- NULL } else { if (is.null(xreg)) { stop("No external regressors provided") } xreg <- as.matrix(xreg) if (NCOL(xreg) != NCOL(object$xreg)) { stop("Number of external regressors does not match fitted model") } if (NROW(xreg) != nsim) { stop("Number of rows in xreg does not match nsim") } } xx <- object$x if (!is.null(lambda)) { xx <- BoxCox(xx, lambda) lambda <- attr(xx, "lambda") } # Check and apply scaling of fitted model if (!is.null(object$scalex)) { xx <- scale(xx, center = object$scalex$center, scale = object$scalex$scale) if (!is.null(xreg)) { xreg <- scale(xreg, center = object$scalexreg$center, scale = object$scalexreg$scale) } } ## Get lags used in fitted model lags <- object$lags maxlag <- max(lags) flag <- rev(tail(xx, n = maxlag)) ## Simulate by iteratively forecasting and adding innovation path <- numeric(nsim) for (i in 1:nsim) { newdata <- c(flag[lags], xreg[i, ]) if (any(is.na(newdata))) { stop("I can't simulate when there are missing values near the end of the series.") } path[i] <- mean(sapply(object$model, predict, newdata = newdata)) + e[i] flag <- c(path[i], flag[-maxlag]) } ## Re-scale simulated points if (!is.null(object$scalex)) { path <- path * object$scalex$scale + object$scalex$center } ## Add ts properties path <- ts(path, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) ## Back-transform simulated points if (!is.null(lambda)) { path <- InvBoxCox(path, lambda) } return(path) } #' @rdname simulate.ets #' @export simulate.modelAR <- function(object, nsim=length(object$x), seed=NULL, xreg=NULL, future=TRUE, bootstrap=FALSE, innov=NULL, lambda=object$lambda, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv, inherits = FALSE)) { runif(1) } if (is.null(seed)) { RNGstate <- get(".Random.seed", envir = .GlobalEnv) } else { R.seed <- get(".Random.seed", envir = .GlobalEnv) set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } ## only future currently implemented if (!future) { warning("simulate.nnetar() currently only supports future=TRUE") } ## set simulation innovations if (bootstrap) { res <- na.omit(c(residuals(object, type = "innovation"))) res <- res - mean(res) ## scale if appropriate if (!is.null(object$scalex$scale)) { res <- res / object$scalex$scale } e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { res <- na.omit(c(residuals(object, type = "innovation"))) ## scale if appropriate if (!is.null(object$scalex$scale)) { res <- res / object$scalex$scale } e <- rnorm(nsim, 0, sd(res, na.rm = TRUE)) } else if (length(innov) == nsim) { e <- innov if (!is.null(object$scalex$scale)){ e <- e/object$scalex$scale } } else if (isTRUE(innov == 0L)) { ## to pass innov=0 so simulation equals mean forecast e <- rep(innov, nsim) } else { stop("Length of innov must be equal to nsim") } ## tspx <- tsp(object$x) # Check if xreg was used in fitted model if (is.null(object$xreg)) { if (!is.null(xreg)) { warning("External regressors were not used in fitted model, xreg will be ignored") } xreg <- NULL } else { if (is.null(xreg)) { stop("No external regressors provided") } xreg <- as.matrix(xreg) if (NCOL(xreg) != NCOL(object$xreg)) { stop("Number of external regressors does not match fitted model") } if (NROW(xreg) != nsim) { stop("Number of rows in xreg does not match nsim") } } xx <- object$x if (!is.null(lambda)) { xx <- BoxCox(xx, lambda) lambda <- attr(xx, "lambda") } # Check and apply scaling of fitted model if (!is.null(object$scalex)) { xx <- scale(xx, center = object$scalex$center, scale = object$scalex$scale) if (!is.null(xreg)) { xreg <- scale(xreg, center = object$scalexreg$center, scale = object$scalexreg$scale) } } ## Get lags used in fitted model lags <- object$lags maxlag <- max(lags) flag <- rev(tail(xx, n = maxlag)) ## Simulate by iteratively forecasting and adding innovation path <- numeric(nsim) for (i in 1:nsim) { newdata <- c(flag[lags], xreg[i, ]) if (any(is.na(newdata))) { stop("I can't simulate when there are missing values near the end of the series.") } path[i] <- object$predict.FUN(object$model, newdata) + e[i] flag <- c(path[i], flag[-maxlag]) } ## Re-scale simulated points if (!is.null(object$scalex)) { path <- path * object$scalex$scale + object$scalex$center } ## Add ts properties path <- ts(path, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) ## Back-transform simulated points if (!is.null(lambda)) { path <- InvBoxCox(path, lambda) } return(path) } forecast/R/lm.R0000644000176200001440000004570313553673010013030 0ustar liggesusers#' Fit a linear model with time series components #' #' \code{tslm} is used to fit linear models to time series including trend and #' seasonality components. #' #' \code{tslm} is largely a wrapper for \code{\link[stats]{lm}()} except that #' it allows variables "trend" and "season" which are created on the fly from #' the time series characteristics of the data. The variable "trend" is a #' simple time trend and "season" is a factor indicating the season (e.g., the #' month or the quarter depending on the frequency of the data). #' #' @param formula an object of class "formula" (or one that can be coerced to #' that class): a symbolic description of the model to be fitted. #' @param data an optional data frame, list or environment (or object coercible #' by as.data.frame to a data frame) containing the variables in the model. If #' not found in data, the variables are taken from environment(formula), #' typically the environment from which lm is called. #' @param subset an optional subset containing rows of data to keep. For best #' results, pass a logical vector of rows to keep. Also supports #' \code{\link[base]{subset}()} functions. #' @inheritParams forecast #' #' @param ... Other arguments passed to \code{\link[stats]{lm}()} #' @return Returns an object of class "lm". #' @author Mitchell O'Hara-Wild and Rob J Hyndman #' @seealso \code{\link{forecast.lm}}, \code{\link[stats]{lm}}. #' @keywords stats #' @examples #' #' y <- ts(rnorm(120,0,3) + 1:120 + 20*sin(2*pi*(1:120)/12), frequency=12) #' fit <- tslm(y ~ trend + season) #' plot(forecast(fit, h=20)) #' #' @export tslm <- function(formula, data, subset, lambda=NULL, biasadj=FALSE, ...) { cl <- match.call() if (!("formula" %in% class(formula))) { formula <- stats::as.formula(formula) } if (missing(data)) { mt <- try(terms(formula)) if (is.element("try-error", class(mt))) { stop("Cannot extract terms from formula, please provide data argument.") } } else { mt <- terms(formula, data = data) } ## Categorise formula variables into time-series, functions, and data. vars <- attr(mt, "variables") # Check for time series variables tsvar <- match(c("trend", "season"), as.character(vars), 0L) # Check for functions (which should be evaluated later, in lm) fnvar <- NULL for (i in 2:length(vars)) { term <- vars[[i]] if (!is.symbol(term)) { if (typeof(eval(term[[1]])) == "closure") { # If this term is a function (alike fourier) fnvar <- c(fnvar, i) } } } ## Fix formula's environment for correct `...` scoping. attr(formula, ".Environment") <- environment() if (sum(c(tsvar, fnvar)) > 0) { # Remove variables not needed in data (trend+season+functions) rmvar <- c(tsvar, fnvar) rmvar <- rmvar[rmvar != attr(mt, "response") + 1] # Never remove the reponse variable if (any(rmvar != 0)) { vars <- vars[-rmvar] } } ## Grab any variables missing from data if (!missing(data)) { # Check for any missing variables in data vars <- vars[c(TRUE, !as.character(vars[-1]) %in% colnames(data))] dataname <- substitute(data) } if (!missing(data)) { data <- datamat(do.call(datamat, as.list(vars[-1]), envir = parent.frame()), data) } else { data <- do.call(datamat, as.list(vars[-1]), envir = parent.frame()) } ## Set column name of univariate dataset if (is.null(dim(data)) && length(data) != 0) { cn <- as.character(vars)[2] } else { cn <- colnames(data) } ## Get time series attributes from the data if (is.null(tsp(data))) { if ((attr(mt, "response") + 1) %in% fnvar) { # Check unevaluated response variable tspx <- tsp(eval(attr(mt, "variables")[[attr(mt, "response") + 1]])) } tspx <- tsp(data[, 1]) # Check for complex ts data.frame } else { tspx <- tsp(data) } if (is.null(tspx)) { stop("Not time series data, use lm()") } tsdat <- match(c("trend", "season"), cn, 0L) ## Create trend and season if missing from the data if (tsdat[1] == 0) { # &tsvar[1]!=0){#If "trend" is not in data, but is in formula trend <- 1:NROW(data) cn <- c(cn, "trend") data <- cbind(data, trend) } if (tsdat[2] == 0) { # &tsvar[2]!=0){#If "season" is not in data, but is in formula if (tsvar[2] != 0 && tspx[3] <= 1) { # Nonseasonal data, and season requested stop("Non-seasonal data cannot be modelled using a seasonal factor") } season <- as.factor(cycle(data[, 1])) cn <- c(cn, "season") data <- cbind(data, season) } colnames(data) <- cn ## Subset the data according to subset argument if (!missing(subset)) { if (!is.logical(subset)) { stop("subset must be logical") } else if (NCOL(subset) > 1) { stop("subset must be a logical vector") } else if (NROW(subset) != NROW(data)) { stop("Subset must be the same length as the number of rows in the dataset") } warning("Subset has been assumed contiguous") timesx <- time(data[, 1])[subset] tspx <- recoverTSP(timesx) if (tspx[3] == 1 && tsdat[2] == 0 && tsvar[2] != 0) { stop("Non-seasonal data cannot be modelled using a seasonal factor") } data <- data[subset, ] # model.frame(formula,as.data.frame(data[subsetTF,])) } if (!is.null(lambda)) { data[, 1] <- BoxCox(data[, 1], lambda) lambda <- attr(data[, 1], "lambda") } if (tsdat[2] == 0 && tsvar[2] != 0) { data$season <- factor(data$season) # fix for lost factor information, may not be needed? } ## Fit the model and prepare model structure fit <- lm(formula, data = data, na.action = na.exclude, ...) fit$data <- data responsevar <- deparse(formula[[2]]) fit$residuals <- ts(residuals(fit)) fit$x <- fit$residuals fit$x[!is.na(fit$x)] <- model.frame(fit)[, responsevar] fit$fitted.values <- ts(fitted(fit)) tsp(fit$residuals) <- tsp(fit$x) <- tsp(fit$fitted.values) <- tsp(data[, 1]) <- tspx fit$call <- cl fit$method <- "Linear regression model" if (exists("dataname")) { fit$call$data <- dataname } if (!is.null(lambda)) { attr(lambda, "biasadj") <- biasadj fit$lambda <- lambda fit$fitted.values <- InvBoxCox(fit$fitted.values, lambda, biasadj, var(fit$residuals)) fit$x <- InvBoxCox(fit$x, lambda) } class(fit) <- c("tslm", class(fit)) return(fit) } #' @export fitted.tslm <- function(object, ...){ object$fitted.values } #' Forecast a linear model with possible time series components #' #' \code{forecast.lm} is used to predict linear models, especially those #' involving trend and seasonality components. #' #' \code{forecast.lm} is largely a wrapper for #' \code{\link[stats]{predict.lm}()} except that it allows variables "trend" #' and "season" which are created on the fly from the time series #' characteristics of the data. Also, the output is reformatted into a #' \code{forecast} object. #' #' @param object Object of class "lm", usually the result of a call to #' \code{\link[stats]{lm}} or \code{\link{tslm}}. #' @param newdata An optional data frame in which to look for variables with #' which to predict. If omitted, it is assumed that the only variables are #' trend and season, and \code{h} forecasts are produced. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable #' for fan plots. #' @param h Number of periods for forecasting. Ignored if \code{newdata} #' present. #' @param ts If \code{TRUE}, the forecasts will be treated as time series #' provided the original data is a time series; the \code{newdata} will be #' interpreted as related to the subsequent time periods. If \code{FALSE}, any #' time series attributes of the original data will be ignored. #' @param ... Other arguments passed to \code{\link[stats]{predict.lm}()}. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.lm}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The historical data for #' the response variable.} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values} #' @author Rob J Hyndman #' @seealso \code{\link{tslm}}, \code{\link[stats]{lm}}. #' @keywords stats #' @examples #' #' y <- ts(rnorm(120,0,3) + 1:120 + 20*sin(2*pi*(1:120)/12), frequency=12) #' fit <- tslm(y ~ trend + season) #' plot(forecast(fit, h=20)) #' #' @export forecast.lm <- function(object, newdata, h=10, level=c(80, 95), fan=FALSE, lambda=object$lambda, biasadj=NULL, ts=TRUE, ...) { if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } if (!is.null(object$data)) { origdata <- object$data } # no longer exists else if (!is.null(object$model)) { origdata <- object$model } else if (!is.null(object$call$data)) { origdata <- try(object$data <- eval(object$call$data), silent = TRUE) if (is.element("try-error", class(origdata))) { stop("Could not find data. Try training your model using tslm() or attach data directly to the object via object$data<-modeldata for some object<-lm(formula,modeldata).") } } else { origdata <- as.data.frame(fitted(object) + residuals(object)) } if (!is.element("data.frame", class(origdata))) { origdata <- as.data.frame(origdata) if (!is.element("data.frame", class(origdata))) { stop("Could not find data. Try training your model using tslm() or attach data directly to the object via object$data<-modeldata for some object<-lm(formula,modeldata).") } } # Check if the forecasts will be time series if (ts && is.element("ts", class(origdata))) { tspx <- tsp(origdata) timesx <- time(origdata) } else if (ts && is.element("ts", class(origdata[, 1]))) { tspx <- tsp(origdata[, 1]) timesx <- time(origdata[, 1]) } else if (ts && is.element("ts", class(fitted(object)))) { tspx <- tsp(fitted(object)) timesx <- time(fitted(object)) } else { tspx <- NULL } # if(!is.null(object$call$subset)) # { # j <- eval(object$call$subset) # origdata <- origdata[j,] # if(!is.null(tspx)) # { # # Try to figure out times for subset. Assume they are contiguous. # timesx <- timesx[j] # tspx <- tsp(origdata) <- c(min(timesx),max(timesx),tspx[3]) # } # } # Add trend and seasonal to data frame oldterms <- terms(object) # Adjust terms for function variables and rename datamat colnames to match. if (!missing(newdata)) { reqvars <- as.character(attr(object$terms, "variables")[-1])[-attr(object$terms, "response")] # Search for time series variables tsvar <- match(c("trend", "season"), reqvars, 0L) # Check if required variables are functions fnvar <- sapply(reqvars, function(x) !(is.symbol(parse(text = x)[[1]]) || typeof(eval(parse(text = x)[[1]][[1]])) != "closure")) if (!is.data.frame(newdata)) { newdata <- datamat(newdata) colnames(newdata)[1] <- ifelse(sum(tsvar > 0), reqvars[-tsvar][1], reqvars[1]) warning("newdata column names not specified, defaulting to first variable required.") } oldnewdata <- newdata newvars <- make.names(colnames(newdata)) # Check if variables are missing misvar <- match(make.names(reqvars), newvars, 0L) == 0L if (any(!misvar & !fnvar)) { # If any variables are not missing/functions, add them to data tmpdata <- datamat(newdata[reqvars[!misvar]]) rm1 <- FALSE } else { # Prefill the datamat tmpdata <- datamat(1:NROW(newdata)) rm1 <- TRUE } # Remove trend and seasonality from required variables if (sum(tsvar) > 0) { reqvars <- reqvars[-tsvar] fnvar <- fnvar[-tsvar] misvar <- match(make.names(reqvars), newvars, 0L) == 0L } if (any(misvar | fnvar)) { # If any variables are missing/functions reqvars <- reqvars[misvar | fnvar] # They are required fnvar <- fnvar[misvar | fnvar] # Update required function variables for (i in reqvars) { found <- FALSE subvars <- NULL for (j in 1:length(object$coefficients)) { subvars[j] <- pmatch(i, names(object$coefficients)[j]) } subvars <- !is.na(subvars) subvars <- names(object$coefficients)[subvars] # Detect if subvars if multivariate if (length(subvars) > 1) { # Extract prefix only subvars <- substr(subvars, nchar(i) + 1, 999L) fsub <- match(make.names(subvars), newvars, 0L) if (any(fsub == 0)) { # Check for misnamed columns fsub <- grep(paste(make.names(subvars), collapse = "|"), newvars) } if (all(fsub != 0) && length(fsub) == length(subvars)) { imat <- as.matrix(newdata[, fsub], ncol = length(fsub)) colnames(imat) <- subvars tmpdata[[length(tmpdata) + 1]] <- imat found <- TRUE } else { # Attempt to evaluate it as a function subvars <- i } } if (length(subvars) == 1) { # Check if it is a function if (fnvar[match(i, reqvars)]) { # Pre-evaluate function from data tmpdata[[length(tmpdata) + 1]] <- eval(parse(text = subvars)[[1]], newdata) found <- TRUE } } if (found) { names(tmpdata)[length(tmpdata)] <- paste0("solvedFN___", match(i, reqvars)) subvarloc <- match(i, lapply(attr(object$terms, "predvars"), deparse)) attr(object$terms, "predvars")[[subvarloc]] <- attr(object$terms, "variables")[[subvarloc]] <- parse(text = paste0("solvedFN___", match(i, reqvars)))[[1]] } else { warning(paste0("Could not find required variable ", i, " in newdata. Specify newdata as a named data.frame")) } } } if (rm1) { tmpdata[[1]] <- NULL } newdata <- cbind(newdata, tmpdata) h <- nrow(newdata) } if (!is.null(tspx)) { # Always generate trend series trend <- ifelse(is.null(origdata$trend), NCOL(origdata), max(origdata$trend)) + (1:h) if (!missing(newdata)) { newdata <- cbind(newdata, trend) } else { newdata <- datamat(trend) } # Always generate season series x <- ts(1:h, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) season <- as.factor(cycle(x)) newdata <- cbind(newdata, season) } newdata <- as.data.frame(newdata) if (!exists("oldnewdata")) { oldnewdata <- newdata } # If only one column, assume its name. if (ncol(newdata) == 1 && colnames(newdata)[1] == "newdata") { colnames(newdata) <- as.character(formula(object$model))[3] } # Check regressors included in newdata. # Not working so removed for now. # xreg <- attributes(terms(object$model))$term.labels # if(any(!is.element(xreg,colnames(newdata)))) # stop("Predictor variables not included") object$x <- getResponse(object) # responsevar <- as.character(formula(object$model))[2] # responsevar <- gsub("`","",responsevar) # object$x <- model.frame(object$model)[,responsevar] # Remove missing values from residuals predict_object <- object predict_object$residuals <- na.omit(as.numeric(object$residuals)) out <- list() nl <- length(level) for (i in 1:nl) out[[i]] <- predict(predict_object, newdata = newdata, se.fit = TRUE, interval = "prediction", level = level[i] / 100, ...) if (nrow(newdata) != length(out[[1]]$fit[, 1])) { stop("Variables not found in newdata") } object$terms <- oldterms if (is.null(object$series)) { # Model produced via lm(), add series attribute object$series <- deparse(attr(oldterms, "variables")[[1 + attr(oldterms, "response")]]) } fcast <- list( model = object, mean = out[[1]]$fit[, 1], lower = out[[1]]$fit[, 2], upper = out[[1]]$fit[, 3], level = level, x = object$x, series = object$series ) fcast$method <- "Linear regression model" fcast$newdata <- oldnewdata fcast$residuals <- residuals(object) fcast$fitted <- fitted(object) if (NROW(origdata) != NROW(fcast$x)) { # Give up on ts attributes as some data are missing tspx <- NULL } if (NROW(fcast$x) != NROW(fcast$residuals)) { tspx <- NULL } if (!is.null(tspx)) { fcast$x <- ts(fcast$x) fcast$residuals <- ts(fcast$residuals) fcast$fitted <- ts(fcast$fitted) tsp(fcast$x) <- tsp(fcast$residuals) <- tsp(fcast$fitted) <- tspx } if (nl > 1) { for (i in 2:nl) { fcast$lower <- cbind(fcast$lower, out[[i]]$fit[, 2]) fcast$upper <- cbind(fcast$upper, out[[i]]$fit[, 3]) } } if (!is.null(tspx)) { fcast$mean <- ts(fcast$mean, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) fcast$upper <- ts(fcast$upper, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) fcast$lower <- ts(fcast$lower, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) } if (!is.null(lambda)) { fcast$x <- InvBoxCox(fcast$x, lambda) fcast$mean <- InvBoxCox(fcast$mean, lambda, biasadj, fcast) fcast$lower <- InvBoxCox(fcast$lower, lambda) fcast$upper <- InvBoxCox(fcast$upper, lambda) } return(structure(fcast, class = "forecast")) } # Compute cross-validation and information criteria from a linear model #' Cross-validation statistic #' #' Computes the leave-one-out cross-validation statistic (also known as PRESS #' -- prediction residual sum of squares), AIC, corrected AIC, BIC and adjusted #' R^2 values for a linear model. #' #' #' @param obj output from \code{\link[stats]{lm}} or \code{\link{tslm}} #' @return Numerical vector containing CV, AIC, AICc, BIC and AdjR2 values. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{AIC}} #' @keywords models #' @examples #' #' y <- ts(rnorm(120,0,3) + 20*sin(2*pi*(1:120)/12), frequency=12) #' fit1 <- tslm(y ~ trend + season) #' fit2 <- tslm(y ~ season) #' CV(fit1) #' CV(fit2) #' #' @export CV <- function(obj) { if (!is.element("lm", class(obj))) { stop("This function is for objects of class lm") } n <- length(obj$residuals) k <- extractAIC(obj)[1] - 1 # number of predictors (constant removed) aic <- extractAIC(obj)[2] + 2 # add 2 for the variance estimate aicc <- aic + 2 * (k + 2) * (k + 3) / (n - k - 3) bic <- aic + (k + 2) * (log(n) - 2) cv <- mean((residuals(obj) / (1 - hatvalues(obj))) ^ 2, na.rm = TRUE) adjr2 <- summary(obj)$adj out <- c(cv, aic, aicc, bic, adjr2) names(out) <- c("CV", "AIC", "AICc", "BIC", "AdjR2") return(out) } forecast/R/forecastTBATS.R0000644000176200001440000001563413553673010015024 0ustar liggesusers#' @rdname forecast.bats #' @export forecast.tbats <- function(object, h, level=c(80, 95), fan=FALSE, biasadj=NULL, ...) { # Check if forecast.tbats called incorrectly if (identical(class(object), "bats")) { return(forecast.bats(object, h, level, fan, biasadj, ...)) } # Set up the variables if (any(class(object$y) == "ts")) { ts.frequency <- frequency(object$y) } else { ts.frequency <- ifelse(!is.null(object$seasonal.periods), max(object$seasonal.periods), 1) } if (missing(h)) { if (is.null(object$seasonal.periods)) { h <- ifelse(ts.frequency == 1, 10, 2 * ts.frequency) } else { h <- 2 * max(object$seasonal.periods) } } else if (h <= 0) { stop("Forecast horizon out of bounds") } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } if (!is.null(object$k.vector)) { tau <- 2 * sum(object$k.vector) } else { tau <- 0 } x <- matrix(0, nrow = nrow(object$x), ncol = h) y.forecast <- numeric(h) if (!is.null(object$beta)) { adj.beta <- 1 } else { adj.beta <- 0 } # Set up the matrices w <- .Call("makeTBATSWMatrix", smallPhi_s = object$damping.parameter, kVector_s = as.integer(object$k.vector), arCoefs_s = object$ar.coefficients, maCoefs_s = object$ma.coefficients, tau_s = as.integer(tau), PACKAGE = "forecast") if (!is.null(object$seasonal.periods)) { gamma.bold <- matrix(0, nrow = 1, ncol = tau) .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = as.integer(object$k.vector), gammaOne_s = object$gamma.one.v, gammaTwo_s = object$gamma.two.v, PACKAGE = "forecast") } else { gamma.bold <- NULL } g <- matrix(0, nrow = (tau + 1 + adj.beta + object$p + object$q), ncol = 1) if (object$p != 0) { g[(1 + adj.beta + tau + 1), 1] <- 1 } if (object$q != 0) { g[(1 + adj.beta + tau + object$p + 1), 1] <- 1 } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = object$alpha, beta_s = object$beta.v, PACKAGE = "forecast") # print(g) F <- makeTBATSFMatrix(alpha = object$alpha, beta = object$beta, small.phi = object$damping.parameter, seasonal.periods = object$seasonal.periods, k.vector = as.integer(object$k.vector), gamma.bold.matrix = gamma.bold, ar.coefs = object$ar.coefficients, ma.coefs = object$ma.coefficients) # Do the forecast y.forecast[1] <- w$w.transpose %*% object$x[, ncol(object$x)] x[, 1] <- F %*% object$x[, ncol(object$x)] # + g %*% object$errors[length(object$errors)] if (h > 1) { for (t in 2:h) { x[, t] <- F %*% x[, (t - 1)] y.forecast[t] <- w$w.transpose %*% x[, (t - 1)] } } ## Make prediction intervals here lower.bounds <- upper.bounds <- matrix(NA, ncol = length(level), nrow = h) variance.multiplier <- numeric(h) variance.multiplier[1] <- 1 if (h > 1) { for (j in 1:(h - 1)) { if (j == 1) { f.running <- diag(ncol(F)) } else { f.running <- f.running %*% F } c.j <- w$w.transpose %*% f.running %*% g variance.multiplier[(j + 1)] <- variance.multiplier[j] + c.j ^ 2 } } variance <- object$variance * variance.multiplier # print(variance) st.dev <- sqrt(variance) for (i in 1:length(level)) { marg.error <- st.dev * abs(qnorm((100 - level[i]) / 200)) lower.bounds[, i] <- y.forecast - marg.error upper.bounds[, i] <- y.forecast + marg.error } # Inv Box Cox transform if required if (!is.null(object$lambda)) { y.forecast <- InvBoxCox(y.forecast, object$lambda, biasadj, list(level = level, upper = upper.bounds, lower = lower.bounds)) lower.bounds <- InvBoxCox(lower.bounds, object$lambda) if (object$lambda < 1) { lower.bounds <- pmax(lower.bounds, 0) } upper.bounds <- InvBoxCox(upper.bounds, object$lambda) } ## Calc a start time for the forecast # y <- object$y start.time <- start(object$y) y <- ts(c(object$y, 0), start = start.time, frequency = ts.frequency) # y[(length(y)+1)] <- 0 # y <- ts(y, start=object$start.time, frequency=ts.frequency) fcast.start.time <- end(y) # Make msts object for x and mean x <- msts(object$y, seasonal.periods = (if (!is.null(object$seasonal.periods)) { object$seasonal.periods } else { ts.frequency }), ts.frequency = ts.frequency, start = start.time) fitted.values <- msts(object$fitted.values, seasonal.periods = (if (!is.null(object$seasonal.periods)) { object$seasonal.periods } else { ts.frequency }), ts.frequency = ts.frequency, start = start.time) y.forecast <- msts(y.forecast, seasonal.periods = (if (!is.null(object$seasonal.periods)) { object$seasonal.periods } else { ts.frequency }), ts.frequency = ts.frequency, start = fcast.start.time) upper.bounds <- msts(upper.bounds, seasonal.periods = (if (!is.null(object$seasonal.periods)) { object$seasonal.periods } else { ts.frequency }), ts.frequency = ts.frequency, start = fcast.start.time) lower.bounds <- msts(lower.bounds, seasonal.periods = (if (!is.null(object$seasonal.periods)) { object$seasonal.periods } else { ts.frequency }), ts.frequency = ts.frequency, start = fcast.start.time) colnames(upper.bounds) <- colnames(lower.bounds) <- paste0(level, "%") forecast.object <- list( model = object, mean = y.forecast, level = level, x = x, series = object$series, upper = upper.bounds, lower = lower.bounds, fitted = fitted.values, method = as.character(object), residuals = object$errors ) if (is.null(object$series)) { forecast.object$series <- deparse(object$call$y) } class(forecast.object) <- "forecast" return(forecast.object) } #' @export as.character.tbats <- function(x, ...) { name <- "TBATS(" if (!is.null(x$lambda)) { name <- paste(name, round(x$lambda, digits = 3), sep = "") } else { name <- paste(name, "1", sep = "") } name <- paste(name, ", {", sep = "") if (!is.null(x$ar.coefficients)) { name <- paste(name, length(x$ar.coefficients), sep = "") } else { name <- paste(name, "0", sep = "") } name <- paste(name, ",", sep = "") if (!is.null(x$ma.coefficients)) { name <- paste(name, length(x$ma.coefficients), sep = "") } else { name <- paste(name, "0", sep = "") } name <- paste(name, "}, ", sep = "") if (!is.null(x$damping.parameter)) { name <- paste(name, round(x$damping.parameter, digits = 3), ",", sep = "") } else { name <- paste(name, "-,", sep = "") } if (!is.null(x$seasonal.periods)) { name <- paste(name, " {", sep = "") M <- length(x$seasonal.periods) for (i in 1:M) { name <- paste(name, "<", round(x$seasonal.periods[i], 2), ",", x$k.vector[i], ">", sep = "") if (i < M) { name <- paste(name, ", ", sep = "") } else { name <- paste(name, "})", sep = "") } } } else { name <- paste(name, "{-})", sep = "") } return(name) } forecast/R/data.R0000644000176200001440000000321013553673010013314 0ustar liggesusers #' Australian monthly gas production #' #' Australian monthly gas production: 1956--1995. #' #' #' @format Time series data #' @source Australian Bureau of Statistics. #' @keywords datasets #' @examples #' plot(gas) #' seasonplot(gas) #' tsdisplay(gas) #' "gas" #' Daily morning gold prices #' #' Daily morning gold prices in US dollars. 1 January 1985 -- 31 March 1989. #' #' #' @format Time series data #' @keywords datasets #' @examples #' tsdisplay(gold) #' "gold" #' Half-hourly electricity demand #' #' Half-hourly electricity demand in England and Wales from Monday 5 June 2000 #' to Sunday 27 August 2000. Discussed in Taylor (2003), and kindly provided by #' James W Taylor. Units: Megawatts #' #' #' @format Time series data #' @references Taylor, J.W. (2003) Short-term electricity demand forecasting #' using double seasonal exponential smoothing. \emph{Journal of the #' Operational Research Society}, \bold{54}, 799-805. #' @source James W Taylor #' @keywords datasets #' @examples #' plot(taylor) #' "taylor" #' Australian total wine sales #' #' Australian total wine sales by wine makers in bottles <= 1 litre. Jan 1980 #' -- Aug 1994. #' #' #' @format Time series data #' @source Time Series Data Library. \url{https://pkg.yangzhuoranyang.com/tsdl/} #' @keywords datasets #' @examples #' tsdisplay(wineind) #' "wineind" #' Quarterly production of woollen yarn in Australia #' #' Quarterly production of woollen yarn in Australia: tonnes. Mar 1965 -- Sep #' 1994. #' #' #' @format Time series data #' @source Time Series Data Library. \url{https://pkg.yangzhuoranyang.com/tsdl/} #' @keywords datasets #' @examples #' tsdisplay(woolyrnq) #' "woolyrnq" forecast/R/arfima.R0000644000176200001440000002525213617634745013671 0ustar liggesusers# Remove missing values from end points na.ends <- function(x) { tspx <- tsp(x) # Strip initial and final missing values nonmiss <- (1:length(x))[!is.na(x)] if (length(nonmiss) == 0) { stop("No non-missing data") } j <- nonmiss[1] k <- nonmiss[length(nonmiss)] x <- x[j:k] if (!is.null(tspx)) { x <- ts(x, start = tspx[1] + (j - 1) / tspx[3], frequency = tspx[3]) } return(x) } # Add back missing values at ends # x is original series. y is the series with NAs removed at ends. # returns y with the nas put back at beginning but not end. undo.na.ends <- function(x, y) { n <- length(x) nonmiss <- (1:length(x))[!is.na(x)] j <- nonmiss[1] k <- nonmiss[length(nonmiss)] if (j > 1) { y <- c(rep(NA, j - 1), y) } if (k < n) { y <- c(y, rep(NA, n - k)) } tspx <- tsp(x) if (!is.null(tspx)) { tsp(y) <- tsp(x) } return(y) } ## Undifference unfracdiff <- function(x, y, n, h, d) { bin.c <- (-1) ^ (0:(n + h)) * choose(d, (0:(n + h))) b <- numeric(n) xnew <- LHS <- numeric(h) RHS <- cumsum(y) bs <- cumsum(bin.c[1:h]) b <- bin.c[(1:n) + 1] xnew[1] <- RHS[1] <- y[1] - sum(b * rev(x)) if (h > 1) { for (k in 2:h) { b <- b + bin.c[(1:n) + k] RHS[k] <- RHS[k] - sum(b * rev(x)) LHS[k] <- sum(rev(xnew[1:(k - 1)]) * bs[2:k]) xnew[k] <- RHS[k] - LHS[k] } } tspx <- tsp(x) if (is.null(tspx)) { tspx <- c(1, length(x), 1) } return(ts(xnew, frequency = tspx[3], start = tspx[2] + 1 / tspx[3])) } ## Automatic ARFIMA modelling ## Will return Arima object if d < 0.01 to prevent estimation problems #' Fit a fractionally differenced ARFIMA model #' #' An ARFIMA(p,d,q) model is selected and estimated automatically using the #' Hyndman-Khandakar (2008) algorithm to select p and q and the Haslett and #' Raftery (1989) algorithm to estimate the parameters including d. #' #' This function combines \code{\link[fracdiff]{fracdiff}} and #' \code{\link{auto.arima}} to automatically select and estimate an ARFIMA #' model. The fractional differencing parameter is chosen first assuming an #' ARFIMA(2,d,0) model. Then the data are fractionally differenced using the #' estimated d and an ARMA model is selected for the resulting time series #' using \code{\link{auto.arima}}. Finally, the full ARFIMA(p,d,q) model is #' re-estimated using \code{\link[fracdiff]{fracdiff}}. If \code{estim=="mle"}, #' the ARMA coefficients are refined using \code{\link[stats]{arima}}. #' #' @param y a univariate time series (numeric vector). #' @param drange Allowable values of d to be considered. Default of #' \code{c(0,0.5)} ensures a stationary model is returned. #' @param estim If \code{estim=="ls"}, then the ARMA parameters are calculated #' using the Haslett-Raftery algorithm. If \code{estim=="mle"}, then the ARMA #' parameters are calculated using full MLE via the \code{\link[stats]{arima}} #' function. #' @param model Output from a previous call to \code{arfima}. If model is #' passed, this same model is fitted to y without re-estimating any parameters. #' @param x Deprecated. Included for backwards compatibility. #' @param \dots Other arguments passed to \code{\link{auto.arima}} when #' selecting p and q. #' @inheritParams forecast #' #' @return A list object of S3 class \code{"fracdiff"}, which is described in #' the \code{\link[fracdiff]{fracdiff}} documentation. A few additional objects #' are added to the list including \code{x} (the original time series), and the #' \code{residuals} and \code{fitted} values. #' #' @export #' #' @author Rob J Hyndman and Farah Yasmeen #' @seealso \code{\link[fracdiff]{fracdiff}}, \code{\link{auto.arima}}, #' \code{\link{forecast.fracdiff}}. #' @references J. Haslett and A. E. Raftery (1989) Space-time Modelling with #' Long-memory Dependence: Assessing Ireland's Wind Power Resource (with #' discussion); \emph{Applied Statistics} \bold{38}, 1-50. #' #' Hyndman, R.J. and Khandakar, Y. (2008) "Automatic time series forecasting: #' The forecast package for R", \emph{Journal of Statistical Software}, #' \bold{26}(3). #' @keywords ts #' @examples #' #' library(fracdiff) #' x <- fracdiff.sim( 100, ma=-.4, d=.3)$series #' fit <- arfima(x) #' tsdisplay(residuals(fit)) #' arfima <- function(y, drange = c(0, 0.5), estim = c("mle", "ls"), model = NULL, lambda = NULL, biasadj = FALSE, x=y, ...) { estim <- match.arg(estim) # require(fracdiff) seriesname <- deparse(substitute(y)) orig.x <- x if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } # Re-fit arfima model if (!is.null(model)) { fit <- model fit$residuals <- fit$fitted <- fit$lambda <- NULL if (!is.null(lambda)) { fit$lambda <- lambda # Required for residuals.fracdiff() } } # Estimate model else { # Strip initial and final missing values xx <- na.ends(x) # Remove mean meanx <- mean(xx) xx <- xx - meanx # Choose differencing parameter with AR(2) proxy to handle correlations suppressWarnings(fit <- fracdiff::fracdiff(xx, nar = 2, drange = drange)) # Choose p and q d <- fit$d y <- fracdiff::diffseries(xx, d = d) fit <- auto.arima(y, max.P = 0, max.Q = 0, stationary = TRUE, ...) # Refit model using fracdiff suppressWarnings(fit <- fracdiff::fracdiff(xx, nar = fit$arma[1], nma = fit$arma[2], drange = drange)) # Refine parameters with MLE if (estim == "mle") { y <- fracdiff::diffseries(xx, d = fit$d) p <- length(fit$ar) q <- length(fit$ma) fit2 <- try(Arima(y, order = c(p, 0, q), include.mean = FALSE)) if (is.element("try-error", class(fit2))) { fit2 <- try(Arima(y, order = c(p, 0, q), include.mean = FALSE, method = "ML")) } if (!is.element("try-error", class(fit2))) { if (p > 0) { fit$ar <- fit2$coef[1:p] } if (q > 0) { fit$ma <- -fit2$coef[p + (1:q)] } fit$residuals <- fit2$residuals } else { warning("MLE estimation failed. Returning LS estimates") } } } # Add things to model that will be needed by forecast.fracdiff fit$x <- orig.x fit$residuals <- undo.na.ends(x, residuals(fit)) fit$fitted <- x - fit$residuals if (!is.null(lambda)) { fit$fitted <- InvBoxCox(fit$fitted, lambda, biasadj, var(fit$residuals)) attr(lambda, "biasadj") <- biasadj } fit$lambda <- lambda fit$call <- match.call() fit$series <- seriesname fit <- structure(fit, class = c("ARFIMA","fracdiff")) # fit$call$data <- data.frame(x=x) #Consider replacing fit$call with match.call for consistency and tidyness return(fit) } # Forecast the output of fracdiff() or arfima() #' @rdname forecast.Arima #' @export forecast.fracdiff <- function(object, h=10, level=c(80, 95), fan=FALSE, lambda=object$lambda, biasadj=NULL, ...) { # Extract data x <- object$x <- getResponse(object) if (is.null(x)) { stop("Unable to find original time series") } if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } xx <- na.ends(x) n <- length(xx) meanx <- mean(xx) xx <- xx - meanx # Construct ARMA part of model and forecast with it y <- fracdiff::diffseries(xx, d = object$d) fit <- Arima(y, order = c(length(object$ar), 0, length(object$ma)), include.mean = FALSE, fixed = c(object$ar, -object$ma)) fcast.y <- forecast(fit, h = h, level = level) # Undifference fcast.x <- unfracdiff(xx, fcast.y$mean, n, h, object$d) # Binomial coefficient for expansion of d bin.c <- (-1) ^ (0:(n + h)) * choose(object$d, (0:(n + h))) # Cumulative forecasts of y and forecast of y # b <- numeric(n) # fcast.x <- LHS <- numeric(h) # RHS <- cumsum(fcast.y$mean) # bs <- cumsum(bin.c[1:h]) # b <- bin.c[(1:n)+1] # fcast.x[1] <- RHS[1] <- fcast.y$mean[1] - sum(b*rev(xx)) # if(h>1) # { # for (k in 2:h) # { # b <- b + bin.c[(1:n)+k] # RHS[k] <- RHS[k] - sum(b*rev(xx)) # LHS[k] <- sum(rev(fcast.x[1:(k-1)]) * bs[2:k]) # fcast.x[k] <- RHS[k] - LHS[k] # } # } # Extract stuff from ARMA model p <- fit$arma[1] q <- fit$arma[2] phi <- theta <- numeric(h) if (p > 0) { phi[1:p] <- fit$coef[1:p] } if (q > 0) { theta[1:q] <- fit$coef[p + (1:q)] } # Calculate psi weights new.phi <- psi <- numeric(h) psi[1] <- new.phi[1] <- 1 if (h > 1) { new.phi[2:h] <- -bin.c[2:h] for (i in 2:h) { if (p > 0) { new.phi[i] <- sum(phi[1:(i - 1)] * bin.c[(i - 1):1]) - bin.c[i] } psi[i] <- sum(new.phi[2:i] * rev(psi[1:(i - 1)])) + theta[i - 1] } } # Compute forecast variances fse <- sqrt(cumsum(psi ^ 2) * fit$sigma2) # Compute prediction intervals if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nint <- length(level) upper <- lower <- matrix(NA, ncol = nint, nrow = h) for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- fcast.x - qq * fse upper[, i] <- fcast.x + qq * fse } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") res <- undo.na.ends(x, residuals(fit)) fits <- x - res data.tsp <- tsp(x) if (is.null(data.tsp)) { data.tsp <- c(1, length(x), 1) } mean.fcast <- ts(fcast.x + meanx, frequency = data.tsp[3], start = data.tsp[2] + 1 / data.tsp[3]) lower <- ts(lower + meanx, frequency = data.tsp[3], start = data.tsp[2] + 1 / data.tsp[3]) upper <- ts(upper + meanx, frequency = data.tsp[3], start = data.tsp[2] + 1 / data.tsp[3]) method <- paste("ARFIMA(", p, ",", round(object$d, 2), ",", q, ")", sep = "") if (!is.null(lambda)) { x <- InvBoxCox(x, lambda) fits <- InvBoxCox(fits, lambda) mean.fcast <- InvBoxCox(mean.fcast, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } seriesname <- if (!is.null(object$series)) { object$series } else { deparse(object$call$x) } return(structure(list( x = x, mean = mean.fcast, upper = upper, lower = lower, level = level, method = method, model = object, series = seriesname, residuals = res, fitted = fits ), class = "forecast")) } # Fitted values from arfima() #' @rdname fitted.Arima #' @export fitted.ARFIMA <- function(object, h = 1, ...) { if (!is.null(object$fitted)) { # Object produced by arfima() if (h == 1) { return(object$fitted) } else { return(hfitted(object = object, h = h, FUN = "arfima", ...)) } } else { if (h != 1) { warning("h-step fits are not supported for models produced by fracdiff(), returning one-step fits (h=1)") } x <- getResponse(object) return(x - residuals(object)) } } forecast/R/tbats.R0000644000176200001440000006305613553673010013536 0ustar liggesusers# Author: srazbash ############################################################################### #' TBATS model (Exponential smoothing state space model with Box-Cox #' transformation, ARMA errors, Trend and Seasonal components) #' #' Fits a TBATS model applied to \code{y}, as described in De Livera, Hyndman & #' Snyder (2011). Parallel processing is used by default to speed up the #' computations. #' #' @aliases as.character.tbats print.tbats #' #' @param y The time series to be forecast. Can be \code{numeric}, \code{msts} #' or \code{ts}. Only univariate time series are supported. #' @param use.box.cox \code{TRUE/FALSE} indicates whether to use the Box-Cox #' transformation or not. If \code{NULL} then both are tried and the best fit #' is selected by AIC. #' @param use.trend \code{TRUE/FALSE} indicates whether to include a trend or #' not. If \code{NULL} then both are tried and the best fit is selected by AIC. #' @param use.damped.trend \code{TRUE/FALSE} indicates whether to include a #' damping parameter in the trend or not. If \code{NULL} then both are tried #' and the best fit is selected by AIC. #' @param seasonal.periods If \code{y} is \code{numeric} then seasonal periods #' can be specified with this parameter. #' @param use.arma.errors \code{TRUE/FALSE} indicates whether to include ARMA #' errors or not. If \code{TRUE} the best fit is selected by AIC. If #' \code{FALSE} then the selection algorithm does not consider ARMA errors. #' @param use.parallel \code{TRUE/FALSE} indicates whether or not to use #' parallel processing. #' @param num.cores The number of parallel processes to be used if using #' parallel processing. If \code{NULL} then the number of logical cores is #' detected and all available cores are used. #' @param bc.lower The lower limit (inclusive) for the Box-Cox transformation. #' @param bc.upper The upper limit (inclusive) for the Box-Cox transformation. #' @param biasadj Use adjusted back-transformed mean for Box-Cox #' transformations. If TRUE, point forecasts and fitted values are mean #' forecast. Otherwise, these points can be considered the median of the #' forecast densities. #' @param model Output from a previous call to \code{tbats}. If model is #' passed, this same model is fitted to \code{y} without re-estimating any #' parameters. #' @param ... Additional arguments to be passed to \code{auto.arima} when #' choose an ARMA(p, q) model for the errors. (Note that xreg will be ignored, #' as will any arguments concerning seasonality and differencing, but arguments #' controlling the values of p and q will be used.) #' @return An object with class \code{c("tbats", "bats")}. The generic accessor #' functions \code{fitted.values} and \code{residuals} extract useful features #' of the value returned by \code{bats} and associated functions. The fitted #' model is designated TBATS(omega, p,q, phi, ,...,) where omega #' is the Box-Cox parameter and phi is the damping parameter; the error is #' modelled as an ARMA(p,q) process and m1,...,mJ list the seasonal periods #' used in the model and k1,...,kJ are the corresponding number of Fourier #' terms used for each seasonality. #' @author Slava Razbash and Rob J Hyndman #' @seealso \code{\link{tbats.components}}. #' @references De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), #' Forecasting time series with complex seasonal patterns using exponential #' smoothing, \emph{Journal of the American Statistical Association}, #' \bold{106}(496), 1513-1527. #' @keywords ts #' @examples #' #' \dontrun{ #' fit <- tbats(USAccDeaths) #' plot(forecast(fit)) #' #' taylor.fit <- tbats(taylor) #' plot(forecast(taylor.fit))} #' #' @export tbats <- function(y, use.box.cox=NULL, use.trend=NULL, use.damped.trend=NULL, seasonal.periods=NULL, use.arma.errors=TRUE, use.parallel=length(y) > 1000, num.cores=2, bc.lower=0, bc.upper=1, biasadj=FALSE, model=NULL, ...) { if (!is.numeric(y) || NCOL(y) > 1) { stop("y should be a univariate time series") } seriesname <- deparse(substitute(y)) origy <- y attr_y <- attributes(origy) # Get seasonal periods if (is.null(seasonal.periods)) { if (any(class(y) == "msts")) { seasonal.periods <- sort(attr(y, "msts")) } else if (class(y) == "ts") { seasonal.periods <- frequency(y) } else { y <- as.ts(y) seasonal.periods <- 1 } } else { # Add ts attributes if (!any(class(y) == "ts")) { y <- msts(y, seasonal.periods) } } seasonal.periods <- unique(pmax(seasonal.periods, 1)) if (all(seasonal.periods == 1)) { seasonal.periods <- NULL } ny <- length(y) y <- na.contiguous(y) if (ny != length(y)) { warning("Missing values encountered. Using longest contiguous portion of time series") if (!is.null(attr_y$tsp)) { attr_y$tsp[1:2] <- range(time(y)) } } # Refit model if available if (!is.null(model)) { if (is.element("tbats", class(model))) { refitModel <- try(fitPreviousTBATSModel(y, model = model), silent = TRUE) } else if (is.element("bats", class(model))) { refitModel <- bats(origy, model = model) } return(refitModel) } # Return constant model if required if (is.constant(y)) { fit <- list( y = y, x = matrix(y, nrow = 1, ncol = ny), errors = y * 0, fitted.values = y, seed.states = matrix(y[1]), AIC = -Inf, likelihood = -Inf, variance = 0, alpha = 0.9999, method = "TBATS", call = match.call() ) return(structure(fit, class = "bats")) } # Check for observations are positive if (any((y <= 0))) { use.box.cox <- FALSE } # Fit non-seasonal model as a benchmark non.seasonal.model <- bats( as.numeric(y), use.box.cox = use.box.cox, use.trend = use.trend, use.damped.trend = use.damped.trend, use.arma.errors = use.arma.errors, use.parallel = use.parallel, num.cores = num.cores, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ... ) # If non-seasonal data, return the non-seasonal model if (is.null(seasonal.periods)) { non.seasonal.model$call <- match.call() attributes(non.seasonal.model$fitted.values) <- attributes(non.seasonal.model$errors) <- attributes(origy) non.seasonal.model$y <- origy return(non.seasonal.model) } else { seasonal.mask <- (seasonal.periods == 1) seasonal.periods <- seasonal.periods[!seasonal.mask] } if (is.null(use.box.cox)) { use.box.cox <- c(FALSE, TRUE) } if (any(use.box.cox)) { init.box.cox <- BoxCox.lambda(y, lower = bc.lower, upper = bc.upper) } else { init.box.cox <- NULL } if (is.null(use.trend)) { use.trend <- c(FALSE, TRUE) } else if (use.trend == FALSE) { use.damped.trend <- FALSE } if (is.null(use.damped.trend)) { use.damped.trend <- c(FALSE, TRUE) } # Set a vector of model params for later comparison model.params <- logical(length = 3) model.params[1] <- any(use.box.cox) model.params[2] <- any(use.trend) model.params[3] <- any(use.damped.trend) y <- as.numeric(y) n <- length(y) k.vector <- rep(1, length(seasonal.periods)) if (use.parallel) { if (is.null(num.cores)) { num.cores <- detectCores(all.tests = FALSE, logical = TRUE) } clus <- makeCluster(num.cores) } best.model <- try(fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE) if (is.element("try-error", class(best.model))) { best.model <- list(AIC = Inf) } for (i in 1:length(seasonal.periods)) { if (seasonal.periods[i] == 2) { next } max.k <- floor(((seasonal.periods[i] - 1) / 2)) if (i != 1) { current.k <- 2 while (current.k <= max.k) { if (seasonal.periods[i] %% current.k != 0) { current.k <- current.k + 1 next } latter <- seasonal.periods[i] / current.k if (any(((seasonal.periods[1:(i - 1)] %% latter) == 0))) { max.k <- current.k - 1 break } else { current.k <- current.k + 1 } } } if (max.k == 1) { next } if (max.k <= 6) { k.vector[i] <- max.k best.model$AIC <- Inf repeat { # old.k <- k.vector[i] # k.vector[i] <- k.vector[i]-1 new.model <- try( fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(new.model))) { new.model <- list(AIC = Inf) } if (new.model$AIC > best.model$AIC) { k.vector[i] <- k.vector[i] + 1 break } else { if (k.vector[i] == 1) { break } k.vector[i] <- k.vector[i] - 1 best.model <- new.model } } next } else { # Three different k vectors step.up.k <- k.vector step.down.k <- k.vector step.up.k[i] <- 7 step.down.k[i] <- 5 k.vector[i] <- 6 # Fit three different models ### if(use.parallel) then do parallel if (use.parallel) { k.control.array <- rbind(step.up.k, step.down.k, k.vector) models.list <- clusterApplyLB( clus, c(1:3), parFitSpecificTBATS, y = y, box.cox = model.params[1], trend = model.params[2], damping = model.params[3], seasonal.periods = seasonal.periods, k.control.matrix = k.control.array, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ) up.model <- models.list[[1]] level.model <- models.list[[3]] down.model <- models.list[[2]] } else { up.model <- try( fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = step.up.k, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(up.model))) { up.model <- list(AIC = Inf) } level.model <- try( fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(level.model))) { level.model <- list(AIC = Inf) } down.model <- try( fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = step.down.k, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(down.model))) { down.model <- list(AIC = Inf) } } # Decide the best model of the three and then follow that direction to find the optimal k aic.vector <- c(up.model$AIC, level.model$AIC, down.model$AIC) ## If shifting down if (min(aic.vector) == down.model$AIC) { best.model <- down.model k.vector[i] <- 5 repeat{ k.vector[i] <- k.vector[i] - 1 down.model <- try( fitSpecificTBATS( y = y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(down.model))) { down.model <- list(AIC = Inf) } if (down.model$AIC > best.model$AIC) { k.vector[i] <- k.vector[i] + 1 break } else { best.model <- down.model } if (k.vector[i] == 1) { break } } ## If staying level } else if (min(aic.vector) == level.model$AIC) { best.model <- level.model next ## If shifting up } else { best.model <- up.model k.vector[i] <- 7 repeat { k.vector[i] <- k.vector[i] + 1 up.model <- try( fitSpecificTBATS(y, model.params[1], model.params[2], model.params[3], seasonal.periods, k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) if (is.element("try-error", class(up.model))) { up.model <- list(AIC = Inf) } if (up.model$AIC > best.model$AIC) { k.vector[i] <- k.vector[i] - 1 break } else { best.model <- up.model } if (k.vector[i] == max.k) { break } } } } } aux.model <- best.model if (non.seasonal.model$AIC < best.model$AIC) { best.model <- non.seasonal.model } if ((length(use.box.cox) == 1) && use.trend[1] && (length(use.trend) == 1) && (length(use.damped.trend) == 1) && (use.parallel)) { # In the this case, there is only one alternative. use.parallel <- FALSE stopCluster(clus) } else if ((length(use.box.cox) == 1) && !use.trend[1] && (length(use.trend) == 1) && (use.parallel)) { # As above, in the this case, there is only one alternative. use.parallel <- FALSE stopCluster(clus) } if (use.parallel) { # Set up the control array control.array <- NULL for (box.cox in use.box.cox) { for (trend in use.trend) { for (damping in use.damped.trend) { if (!trend && damping) { next } control.line <- c(box.cox, trend, damping) if (!is.null(control.array)) { control.array <- rbind(control.array, control.line) } else { control.array <- control.line } } } } models.list <- clusterApplyLB(clus, c(1:nrow(control.array)), parFilterTBATSSpecifics, y = y, control.array = control.array, model.params = model.params, seasonal.periods = seasonal.periods, k.vector = k.vector, use.arma.errors = use.arma.errors, aux.model = aux.model, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ...) stopCluster(clus) ## Choose the best model #### Get the AICs aics <- numeric(nrow(control.array)) for (i in 1:nrow(control.array)) { aics[i] <- models.list[[i]]$AIC } best.number <- which.min(aics) best.seasonal.model <- models.list[[best.number]] if (best.seasonal.model$AIC < best.model$AIC) { best.model <- best.seasonal.model } } else { for (box.cox in use.box.cox) { for (trend in use.trend) { for (damping in use.damped.trend) { if (all((model.params == c(box.cox, trend, damping)))) { new.model <- filterTBATSSpecifics(y, box.cox, trend, damping, seasonal.periods, k.vector, use.arma.errors, aux.model = aux.model, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ...) } else if (trend || !damping) { new.model <- filterTBATSSpecifics(y, box.cox, trend, damping, seasonal.periods, k.vector, use.arma.errors, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ...) } if (new.model$AIC < best.model$AIC) { best.model <- new.model } } } } } best.model$call <- match.call() attributes(best.model$fitted.values) <- attributes(best.model$errors) <- attr_y best.model$y <- origy best.model$series <- seriesname best.model$method <- "TBATS" return(best.model) } ###################################################################################################################################### parFilterTBATSSpecifics <- function(control.number, y, control.array, model.params, seasonal.periods, k.vector, use.arma.errors, aux.model=NULL, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE, ...) { box.cox <- control.array[control.number, 1] trend <- control.array[control.number, 2] damping <- control.array[control.number, 3] if (!all((model.params == c(box.cox, trend, damping)))) { first.model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) } else { first.model <- aux.model } if (is.element("try-error", class(first.model))) { first.model <- list(AIC = Inf) } if (use.arma.errors) { suppressWarnings(arma <- try(auto.arima(as.numeric(first.model$errors), d = 0, ...), silent = TRUE)) if (!is.element("try-error", class(arma))) { p <- arma$arma[1] q <- arma$arma[2] if ((p != 0) || (q != 0)) { # Did auto.arima() find any AR() or MA() coefficients? if (p != 0) { ar.coefs <- numeric(p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- numeric(q) } else { ma.coefs <- NULL } starting.params <- first.model$parameters second.model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, ar.coefs = ar.coefs, ma.coefs = ma.coefs, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) if (is.element("try-error", class(second.model))) { second.model <- list(AIC = Inf) } if (second.model$AIC < first.model$AIC) { return(second.model) } else { return(first.model) } } else { # Else auto.arima() did not find any AR() or MA()coefficients return(first.model) } } else { return(first.model) } } else { return(first.model) } } ################################################################################################# parFitSpecificTBATS <- function(control.number, y, box.cox, trend, damping, seasonal.periods, k.control.matrix, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE) { k.vector <- k.control.matrix[control.number, ] model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) if (is.element("try-error", class(model))) { model <- list(AIC = Inf) } return(model) } filterTBATSSpecifics <- function(y, box.cox, trend, damping, seasonal.periods, k.vector, use.arma.errors, aux.model=NULL, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE, ...) { if (is.null(aux.model)) { first.model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) } else { first.model <- aux.model } if (is.element("try-error", class(first.model))) { first.model <- list(AIC = Inf) } if (use.arma.errors) { suppressWarnings(arma <- try(auto.arima(as.numeric(first.model$errors), d = 0, ...), silent = TRUE)) if (!is.element("try-error", class(arma))) { p <- arma$arma[1] q <- arma$arma[2] if ((p != 0) || (q != 0)) { # Did auto.arima() find any AR() or MA() coefficients? if (p != 0) { ar.coefs <- numeric(p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- numeric(q) } else { ma.coefs <- NULL } starting.params <- first.model$parameters second.model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, ar.coefs = ar.coefs, ma.coefs = ma.coefs, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) if (is.element("try-error", class(second.model))) { second.model <- list(AIC = Inf) } if (second.model$AIC < first.model$AIC) { return(second.model) } else { return(first.model) } } else { # Else auto.arima() did not find any AR() or MA()coefficients return(first.model) } } else { return(first.model) } } else { return(first.model) } } makeSingleFourier <- function(j, m, T) { frier <- matrix(0, nrow = T, ncol = 2) for (t in 1:T) { frier[t, 1] <- cos((2 * pi * j) / m) frier[t, 2] <- sin((2 * pi * j) / m) } return(frier) } calcFTest <- function(r.sse, ur.sse, num.restrictions, num.u.params, num.observations) { f.stat <- ((r.sse - ur.sse) / num.restrictions) / (r.sse / (num.observations - num.u.params)) p.value <- pf(f.stat, num.restrictions, (num.observations - num.u.params), lower.tail = FALSE) return(p.value) } #' @rdname fitted.Arima #' @export fitted.tbats <- function(object, h=1, ...) { if (h == 1) { return(object$fitted.values) } else { return(hfitted(object = object, h = h, FUN = "tbats", ...)) } } #' @export print.tbats <- function(x, ...) { cat(as.character(x)) cat("\n") cat("\nCall: ") print(x$call) cat("\nParameters") if (!is.null(x$lambda)) { cat("\n Lambda: ") cat(round(x$lambda, 6)) } cat("\n Alpha: ") cat(x$alpha) if (!is.null(x$beta)) { cat("\n Beta: ") cat(x$beta) cat("\n Damping Parameter: ") cat(round(x$damping.parameter, 6)) } if (!is.null(x$gamma.one.values)) { cat("\n Gamma-1 Values: ") cat(x$gamma.one.values) } if (!is.null(x$gamma.two.values)) { cat("\n Gamma-2 Values: ") cat(x$gamma.two.values) } if (!is.null(x$ar.coefficients)) { cat("\n AR coefficients: ") cat(round(x$ar.coefficients, 6)) } if (!is.null(x$ma.coefficients)) { cat("\n MA coefficients: ") cat(round(x$ma.coefficients, 6)) } cat("\n") cat("\nSeed States:\n") print(x$seed.states) cat("\nSigma: ") cat(sqrt(x$variance)) cat("\nAIC: ") cat(x$AIC) cat("\n") } #' @rdname plot.bats #' #' @examples #' #' \dontrun{ #' fit <- tbats(USAccDeaths) #' plot(fit) #' autoplot(fit, range.bars = TRUE)} #' #' @export plot.tbats <- function(x, main="Decomposition by TBATS model", ...) { out <- tbats.components(x) plot.ts(out, main = main, nc = 1, ...) } #' Extract components of a TBATS model #' #' Extract the level, slope and seasonal components of a TBATS model. The extracted components are Box-Cox transformed using the estimated transformation parameter. #' #' #' @param x A tbats object created by \code{\link{tbats}}. #' @return A multiple time series (\code{mts}) object. The first series is the observed time series. The second series is the trend component of the fitted model. Series three onwards are the seasonal components of the fitted model with one time series for each of the seasonal components. All components are transformed using estimated Box-Cox parameter. #' @author Slava Razbash and Rob J Hyndman #' @seealso \code{\link{tbats}}. #' @references De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), #' Forecasting time series with complex seasonal patterns using exponential #' smoothing, \emph{Journal of the American Statistical Association}, #' \bold{106}(496), 1513-1527. #' @keywords ts #' @examples #' #' \dontrun{ #' fit <- tbats(USAccDeaths, use.parallel=FALSE) #' components <- tbats.components(fit) #' plot(components)} #' #' @export tbats.components <- function(x) { # Get original data, transform if necessary if (!is.null(x$lambda)) { y <- BoxCox(x$y, x$lambda) lambda <- attr(y, "lambda") } else { y <- x$y } # Compute matrices tau <- ifelse(!is.null(x$k.vector), 2 * sum(x$k.vector), 0) w <- .Call( "makeTBATSWMatrix", smallPhi_s = x$damping.parameter, kVector_s = as.integer(x$k.vector), arCoefs_s = x$ar.coefficients, maCoefs_s = x$ma.coefficients, tau_s = as.integer(tau), PACKAGE = "forecast" ) out <- cbind(observed = c(y), level = x$x[1, ]) if (!is.null(x$beta)) { out <- cbind(out, slope = x$x[2, ]) } # Add seasonal components if they exist if (tau > 0) { nonseas <- 2 + !is.null(x$beta) # No. non-seasonal columns in out nseas <- length(x$seasonal.periods) # No. seasonal periods seas.states <- cbind(x$seed.states, x$x)[-(1:(1 + !is.null(x$beta))), ] seas.states <- seas.states[, -ncol(seas.states)] w <- w$w.transpose[, -(1:(1 + !is.null(x$beta))), drop = FALSE] w <- w[, 1:tau, drop = FALSE] j <- cumsum(c(1, 2 * x$k.vector)) for (i in 1:nseas) out <- cbind(out, season = c(w[, j[i]:(j[i + 1] - 1), drop = FALSE] %*% seas.states[j[i]:(j[i + 1] - 1), ])) if (nseas > 1) { colnames(out)[nonseas + 1:nseas] <- paste("season", 1:nseas, sep = "") } } # Add time series characteristics out <- ts(out) tsp(out) <- tsp(y) return(out) } forecast/R/DM2.R0000644000176200001440000001000513553673010012765 0ustar liggesusers# Diebold-Mariano test. Modified from code by Adrian Trapletti. # Then adapted by M. Yousaf Khan for better performance on small samples #' Diebold-Mariano test for predictive accuracy #' #' The Diebold-Mariano test compares the forecast accuracy of two forecast #' methods. #' #' This function implements the modified test proposed by Harvey, Leybourne and #' Newbold (1997). The null hypothesis is that the two methods have the same #' forecast accuracy. For \code{alternative="less"}, the alternative hypothesis #' is that method 2 is less accurate than method 1. For #' \code{alternative="greater"}, the alternative hypothesis is that method 2 is #' more accurate than method 1. For \code{alternative="two.sided"}, the #' alternative hypothesis is that method 1 and method 2 have different levels #' of accuracy. #' #' @param e1 Forecast errors from method 1. #' @param e2 Forecast errors from method 2. #' @param alternative a character string specifying the alternative hypothesis, #' must be one of \code{"two.sided"} (default), \code{"greater"} or #' \code{"less"}. You can specify just the initial letter. #' @param h The forecast horizon used in calculating \code{e1} and \code{e2}. #' @param power The power used in the loss function. Usually 1 or 2. #' @return A list with class \code{"htest"} containing the following #' components: \item{statistic}{the value of the DM-statistic.} #' \item{parameter}{the forecast horizon and loss function power used in the #' test.} \item{alternative}{a character string describing the alternative #' hypothesis.} \item{p.value}{the p-value for the test.} \item{method}{a #' character string with the value "Diebold-Mariano Test".} \item{data.name}{a #' character vector giving the names of the two error series.} #' @author George Athanasopoulos #' @references Diebold, F.X. and Mariano, R.S. (1995) Comparing predictive #' accuracy. \emph{Journal of Business and Economic Statistics}, \bold{13}, #' 253-263. #' #' Harvey, D., Leybourne, S., & Newbold, P. (1997). Testing the equality of #' prediction mean squared errors. \emph{International Journal of forecasting}, #' \bold{13}(2), 281-291. #' @keywords htest ts #' @examples #' #' # Test on in-sample one-step forecasts #' f1 <- ets(WWWusage) #' f2 <- auto.arima(WWWusage) #' accuracy(f1) #' accuracy(f2) #' dm.test(residuals(f1),residuals(f2),h=1) #' #' # Test on out-of-sample one-step forecasts #' f1 <- ets(WWWusage[1:80]) #' f2 <- auto.arima(WWWusage[1:80]) #' f1.out <- ets(WWWusage[81:100],model=f1) #' f2.out <- Arima(WWWusage[81:100],model=f2) #' accuracy(f1.out) #' accuracy(f2.out) #' dm.test(residuals(f1.out),residuals(f2.out),h=1) #' #' @export dm.test <- function(e1, e2, alternative = c("two.sided", "less", "greater"), h = 1, power = 2) { alternative <- match.arg(alternative) d <- c(abs(e1)) ^ power - c(abs(e2)) ^ power d.cov <- acf(d, na.action = na.omit, lag.max = h - 1, type = "covariance", plot = FALSE)$acf[, , 1] d.var <- sum(c(d.cov[1], 2 * d.cov[-1])) / length(d) dv <- d.var # max(1e-8,d.var) if (dv > 0) { STATISTIC <- mean(d, na.rm = TRUE) / sqrt(dv) } else if (h == 1) { stop("Variance of DM statistic is zero") } else { warning("Variance is negative, using horizon h=1") return(dm.test(e1, e2, alternative, h = 1, power)) } n <- length(d) k <- ((n + 1 - 2 * h + (h / n) * (h - 1)) / n) ^ (1 / 2) STATISTIC <- STATISTIC * k names(STATISTIC) <- "DM" if (alternative == "two.sided") { PVAL <- 2 * pt(-abs(STATISTIC), df = n - 1) } else if (alternative == "less") { PVAL <- pt(STATISTIC, df = n - 1) } else if (alternative == "greater") { PVAL <- pt(STATISTIC, df = n - 1, lower.tail = FALSE) } PARAMETER <- c(h, power) names(PARAMETER) <- c("Forecast horizon", "Loss function power") structure( list( statistic = STATISTIC, parameter = PARAMETER, alternative = alternative, p.value = PVAL, method = "Diebold-Mariano Test", data.name = c(deparse(substitute(e1)), deparse(substitute(e2))) ), class = "htest" ) } is.htest <- function(x) { inherits(x, "htest") } forecast/R/errors.R0000644000176200001440000002451313553673010013730 0ustar liggesusers## Measures of forecast accuracy ## Forecasts in f. This may be a numerical vector or the output from arima or ets or derivatives. ## Actual values in x # dx = response variable in historical data ## test enables a subset of x and f to be tested. # MASE: d is the # of differencing # MASE: D is the # of seasonal differencing testaccuracy <- function(f, x, test, d, D) { dx <- getResponse(f) if (is.data.frame(x)) { responsevar <- as.character(formula(f$model))[2] if (is.element(responsevar, colnames(x))) { x <- x[, responsevar] } else { stop("I can't figure out what data to use.") } } if (is.list(f)) { if (is.element("mean", names(f))) { f <- f$mean } else { stop("Unknown list structure") } } if (is.ts(x) && is.ts(f)) { tspf <- tsp(f) tspx <- tsp(x) start <- max(tspf[1], tspx[1]) end <- min(tspf[2], tspx[2]) # Adjustment to allow for floating point issues start <- min(start, end) end <- max(start, end) f <- window(f, start = start, end = end) x <- window(x, start = start, end = end) } n <- length(x) if (is.null(test)) { test <- 1:n } else if (min(test) < 1 || max(test) > n) { warning("test elements must be within sample") test <- test[test >= 1 & test <= n] } ff <- f xx <- x # Check length of f if (length(f) < n) { stop("Not enough forecasts. Check that forecasts and test data match.") } error <- (xx - ff[1:n])[test] pe <- error / xx[test] * 100 me <- mean(error, na.rm = TRUE) mse <- mean(error ^ 2, na.rm = TRUE) mae <- mean(abs(error), na.rm = TRUE) mape <- mean(abs(pe), na.rm = TRUE) mpe <- mean(pe, na.rm = TRUE) out <- c(me, sqrt(mse), mae, mpe, mape) names(out) <- c("ME", "RMSE", "MAE", "MPE", "MAPE") # Compute MASE if historical data available if (!is.null(dx)) { tspdx <- tsp(dx) if (!is.null(tspdx)) { if (D > 0) { # seasonal differencing nsd <- diff(dx, lag = round(tspdx[3L]), differences = D) } else { # non seasonal differencing nsd <- dx } if (d > 0) { nd <- diff(nsd, differences = d) } else { nd <- nsd } scale <- mean(abs(nd), na.rm = TRUE) } else { # not time series scale <- mean(abs(dx - mean(dx, na.rm = TRUE)), na.rm = TRUE) } mase <- mean(abs(error / scale), na.rm = TRUE) out <- c(out, mase) names(out)[length(out)] <- "MASE" } # Additional time series measures if (!is.null(tsp(x)) && n > 1) { fpe <- (c(ff[2:n]) / c(xx[1:(n - 1)]) - 1)[test - 1] ape <- (c(xx[2:n]) / c(xx[1:(n - 1)]) - 1)[test - 1] theil <- sqrt(sum((fpe - ape) ^ 2, na.rm = TRUE) / sum(ape ^ 2, na.rm = TRUE)) if (length(error) > 1) { r1 <- acf(error, plot = FALSE, lag.max = 2, na.action = na.pass)$acf[2, 1, 1] } else { r1 <- NA } nj <- length(out) out <- c(out, r1, theil) names(out)[nj + (1:2)] <- c("ACF1", "Theil's U") } return(out) } trainingaccuracy <- function(f, test, d, D) { # Make sure x is an element of f when f is a fitted model rather than a forecast # if(!is.list(f)) # stop("f must be a forecast object or a time series model object.") dx <- getResponse(f) if (is.element("splineforecast", class(f))) { fits <- f$onestepf } else { fits <- fitted(f) } # Don't use f$resid as this may contain multiplicative errors. res <- dx - fits n <- length(res) if (is.null(test)) { test <- 1:n } if (min(test) < 1 || max(test) > n) { warning("test elements must be within sample") test <- test[test >= 1 & test <= n] } tspdx <- tsp(dx) res <- res[test] dx <- dx[test] pe <- res / dx * 100 # Percentage error me <- mean(res, na.rm = TRUE) mse <- mean(res ^ 2, na.rm = TRUE) mae <- mean(abs(res), na.rm = TRUE) mape <- mean(abs(pe), na.rm = TRUE) mpe <- mean(pe, na.rm = TRUE) out <- c(me, sqrt(mse), mae, mpe, mape) names(out) <- c("ME", "RMSE", "MAE", "MPE", "MAPE") # Compute MASE if historical data available if (!is.null(dx)) { if (!is.null(tspdx)) { if (D > 0) { # seasonal differencing nsd <- diff(dx, lag = round(tspdx[3L]), differences = D) } else { # non seasonal differencing nsd <- dx } if (d > 0) { nd <- diff(nsd, differences = d) } else { nd <- nsd } scale <- mean(abs(nd), na.rm = TRUE) } else { # not time series scale <- mean(abs(dx - mean(dx, na.rm = TRUE)), na.rm = TRUE) } mase <- mean(abs(res / scale), na.rm = TRUE) out <- c(out, mase) names(out)[length(out)] <- "MASE" } # Additional time series measures if (!is.null(tspdx)) { if (length(res) > 1) { r1 <- acf(res, plot = FALSE, lag.max = 2, na.action = na.pass)$acf[2, 1, 1] } else { r1 <- NA } nj <- length(out) out <- c(out, r1) names(out)[nj + 1] <- "ACF1" } return(out) } #' Accuracy measures for a forecast model #' #' Returns range of summary measures of the forecast accuracy. If \code{x} is #' provided, the function measures test set forecast accuracy #' based on \code{x-f}. If \code{x} is not provided, the function only produces #' training set accuracy measures of the forecasts based on #' \code{f["x"]-fitted(f)}. All measures are defined and discussed in Hyndman #' and Koehler (2006). #' #' The measures calculated are: #' \itemize{ #' \item ME: Mean Error #' \item RMSE: Root Mean Squared Error #' \item MAE: Mean Absolute Error #' \item MPE: Mean Percentage Error #' \item MAPE: Mean Absolute Percentage Error #' \item MASE: Mean Absolute Scaled Error #' \item ACF1: Autocorrelation of errors at lag 1. #' } #' By default, the MASE calculation is scaled using MAE of training set naive #' forecasts for non-seasonal time series, training set seasonal naive forecasts #' for seasonal time series and training set mean forecasts for non-time series data. #' If \code{f} is a numerical vector rather than a \code{forecast} object, the MASE #' will not be returned as the training data will not be available. #' #' See Hyndman and Koehler (2006) and Hyndman and Athanasopoulos (2014, Section #' 2.5) for further details. #' #' @param f An object of class \dQuote{\code{forecast}}, or a numerical vector #' containing forecasts. It will also work with \code{Arima}, \code{ets} and #' \code{lm} objects if \code{x} is omitted -- in which case training set accuracy #' measures are returned. #' @param x An optional numerical vector containing actual values of the same #' length as object, or a time series overlapping with the times of \code{f}. #' @param test Indicator of which elements of \code{x} and \code{f} to test. If #' \code{test} is \code{NULL}, all elements are used. Otherwise test is a #' numeric vector containing the indices of the elements to use in the test. #' @param d An integer indicating the number of lag-1 differences to be used #' for the denominator in MASE calculation. Default value is 1 for non-seasonal #' series and 0 for seasonal series. #' @param D An integer indicating the number of seasonal differences to be used #' for the denominator in MASE calculation. Default value is 0 for non-seasonal #' series and 1 for seasonal series. #' @param ... Additional arguments depending on the specific method. #' @return Matrix giving forecast accuracy measures. #' @author Rob J Hyndman #' @references Hyndman, R.J. and Koehler, A.B. (2006) "Another look at measures #' of forecast accuracy". \emph{International Journal of Forecasting}, #' \bold{22}(4), 679-688. Hyndman, R.J. and Athanasopoulos, G. (2018) #' "Forecasting: principles and practice", 2nd ed., OTexts, Melbourne, Australia. #' Section 3.4 "Evaluating forecast accuracy". #' \url{https://otexts.org/fpp2/accuracy.html}. #' @keywords ts #' @examples #' #' fit1 <- rwf(EuStockMarkets[1:200,1],h=100) #' fit2 <- meanf(EuStockMarkets[1:200,1],h=100) #' accuracy(fit1) #' accuracy(fit2) #' accuracy(fit1,EuStockMarkets[201:300,1]) #' accuracy(fit2,EuStockMarkets[201:300,1]) #' plot(fit1) #' lines(EuStockMarkets[1:300,1]) #' @export accuracy <- function(f, ...) { UseMethod("accuracy") } #' @rdname accuracy #' @method accuracy default #' @export accuracy.default <- function(f, x, test=NULL, d=NULL, D=NULL, ...) { if (!any(is.element(class(f), c( "mforecast", "forecast", "ts", "integer", "numeric", "Arima", "ets", "lm", "bats", "tbats", "nnetar", "stlm", "baggedModel" )))) { stop("First argument should be a forecast object or a time series.") } if (is.element("mforecast", class(f))) { return(accuracy.mforecast(f, x, test, d, D)) } trainset <- (is.list(f)) testset <- (!missing(x)) if (testset && !is.null(test)) { trainset <- FALSE } if (!trainset && !testset) { stop("Unable to compute forecast accuracy measures") } # Find d and D if (is.null(D) && is.null(d)) { if (testset) { d <- as.numeric(frequency(x) == 1) D <- as.numeric(frequency(x) > 1) } else if (trainset) { if (!is.null(f$mean)) { d <- as.numeric(frequency(f$mean) == 1) D <- as.numeric(frequency(f$mean) > 1) } else { d <- as.numeric(frequency(f$x) == 1) D <- as.numeric(frequency(f$x) > 1) } } else { d <- as.numeric(frequency(f) == 1) D <- as.numeric(frequency(f) > 1) } } if (trainset) { trainout <- trainingaccuracy(f, test, d, D) trainnames <- names(trainout) } else { trainnames <- NULL } if (testset) { testout <- testaccuracy(f, x, test, d, D) testnames <- names(testout) } else { testnames <- NULL } outnames <- unique(c(trainnames, testnames)) out <- matrix(NA, nrow = 2, ncol = length(outnames)) colnames(out) <- outnames rownames(out) <- c("Training set", "Test set") if (trainset) { out[1, names(trainout)] <- trainout } if (testset) { out[2, names(testout)] <- testout } if (!testset) { out <- out[1, , drop = FALSE] } if (!trainset) { out <- out[2, , drop = FALSE] } return(out) } # Compute accuracy for an mforecast object #' @export accuracy.mforecast <- function(f, x, test=NULL, d, D, ...) { object <- f out <- NULL nox <- missing(x) i <- 1 for (fcast in object$forecast) { if (nox) { out1 <- accuracy(fcast, test = test, d = d, D = D) } else { out1 <- accuracy(fcast, x[, i], test, d, D) } rownames(out1) <- paste(fcast$series, rownames(out1)) out <- rbind(out, out1) i <- i + 1 } return(out) } forecast/R/modelAR.R0000644000176200001440000004173313553673010013742 0ustar liggesusers# Defaults: # For non-seasonal data, p chosen using AIC from linear AR(p) model # For seasonal data, p chosen using AIC from linear AR(p) model after # seasonally adjusting with STL decomposition, and P=1 #' Time Series Forecasts with a user-defined model #' #' Experimental function to forecast univariate time series with a #' user-defined model #' #' This is an experimental function and only recommended for advanced users. #' The selected model is fitted with lagged values of \code{y} as #' inputs. The inputs are for #' lags 1 to \code{p}, and lags \code{m} to \code{mP} where #' \code{m=frequency(y)}. If \code{xreg} is provided, its columns are also #' used as inputs. If there are missing values in \code{y} or #' \code{xreg}, the corresponding rows (and any others which depend on them as #' lags) are omitted from the fit. The model is trained for one-step #' forecasting. Multi-step forecasts are computed recursively. #' #' @aliases print.modelAR #' #' @param y A numeric vector or time series of class \code{ts}. #' @param p Embedding dimension for non-seasonal time series. Number of #' non-seasonal lags used as inputs. For non-seasonal time series, the default #' is the optimal number of lags (according to the AIC) for a linear AR(p) #' model. For seasonal time series, the same method is used but applied to #' seasonally adjusted data (from an stl decomposition). #' @param P Number of seasonal lags used as inputs. #' @param FUN Function used for model fitting. Must accept argument \code{x} #' and \code{y} for the predictors and response, respectively (\code{formula} #' object not currently supported). #' @param predict.FUN Prediction function used to apply \code{FUN} to new data. #' Must accept an object of class \code{FUN} as its first argument, and a #' data frame or matrix of new data for its second argument. Additionally, #' it should return fitted values when new data is omitted. #' @param xreg Optionally, a vector or matrix of external regressors, which #' must have the same number of rows as \code{y}. Must be numeric. #' @param model Output from a previous call to \code{nnetar}. If model is #' passed, this same model is fitted to \code{y} without re-estimating any #' parameters. #' @param subset Optional vector specifying a subset of observations to be used #' in the fit. Can be an integer index vector or a logical vector the same #' length as \code{y}. All observations are used by default. #' @param scale.inputs If TRUE, inputs are scaled by subtracting the column #' means and dividing by their respective standard deviations. If \code{lambda} #' is not \code{NULL}, scaling is applied after Box-Cox transformation. #' @param x Deprecated. Included for backwards compatibility. #' @param \dots Other arguments passed to \code{FUN} for #' \code{modelAR}. #' @inheritParams forecast #' #' @return Returns an object of class "\code{modelAR}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{nnetar}. #' #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{x}{The original time series.} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' \item{...}{Other arguments} #' #' @author Rob J Hyndman and Gabriel Caceres #' @keywords ts #' #' @export modelAR <- function(y, p, P=1, FUN, predict.FUN, xreg=NULL, lambda=NULL, model=NULL, subset=NULL, scale.inputs=FALSE, x=y, ...) { useoldmodel <- FALSE yname <- deparse(substitute(y)) if (!is.null(model)) { # Use previously fitted model useoldmodel <- TRUE # Check for conflicts between new and old data: # Check model class if (!is.modelAR(model)) { stop("Model must be a modelAR object") } # Check new data m <- max(round(frequency(model$x)), 1L) minlength <- max(c(model$p, model$P * m)) + 1 if (length(x) < minlength) { stop(paste("Series must be at least of length", minlength, "to use fitted model")) } if (tsp(as.ts(x))[3] != m) { warning(paste("Data frequency doesn't match fitted model, coercing to frequency =", m)) x <- ts(x, frequency = m) } # Check xreg if (!is.null(model$xreg)) { if (is.null(xreg)) { stop("No external regressors provided") } if (NCOL(xreg) != NCOL(model$xreg)) { stop("Number of external regressors does not match fitted model") } } # Update parameters with previous model lambda <- model$lambda p <- model$p P <- model$P FUN <- model$FUN predict.FUN <- model$predict.FUN if (P > 0) { lags <- sort(unique(c(1:p, m * (1:P)))) } else { lags <- 1:p } if (!is.null(model$scalex)) { scale.inputs <- TRUE } } else { # when not using an old model if (length(y) < 3) { stop("Not enough data to fit a model") } # Check for constant data in time series constant_data <- is.constant(na.interp(x)) if (constant_data){ warning("Constant data, setting p=1, P=0, lambda=NULL, scale.inputs=FALSE") scale.inputs <- FALSE lambda <- NULL p <- 1 P <- 0 } ## Check for constant data in xreg if (!is.null(xreg)){ constant_xreg <- any(apply(as.matrix(xreg), 2, function(x) is.constant(na.interp(x)))) if (constant_xreg){ warning("Constant xreg column, setting scale.inputs=FALSE") scale.inputs <- FALSE } } } # Check for NAs in x if (any(is.na(x))) { warning("Missing values in x, omitting rows") } # Transform data if (!is.null(lambda)) { xx <- BoxCox(x, lambda) lambda <- attr(xx, "lambda") } else { xx <- x } ## Check whether to use a subset of the data xsub <- rep(TRUE, length(x)) if (is.numeric(subset)) { xsub[-subset] <- FALSE } if (is.logical(subset)) { xsub <- subset } # Scale series scalex <- NULL if (scale.inputs) { if (useoldmodel) { scalex <- model$scalex } else { tmpx <- scale(xx[xsub], center = TRUE, scale = TRUE) scalex <- list( center = attr(tmpx, "scaled:center"), scale = attr(tmpx, "scaled:scale") ) } xx <- scale(xx, center = scalex$center, scale = scalex$scale) xx <- xx[, 1] } # Check xreg class & dim xxreg <- NULL scalexreg <- NULL if (!is.null(xreg)) { xxreg <- xreg <- as.matrix(xreg) if (length(x) != NROW(xreg)) { stop("Number of rows in xreg does not match series length") } # Check for NAs in xreg if (any(is.na(xreg))) { warning("Missing values in xreg, omitting rows") } # Scale xreg if (scale.inputs) { if (useoldmodel) { scalexreg <- model$scalexreg } else { tmpx <- scale(xxreg[xsub, ], center = TRUE, scale = TRUE) scalexreg <- list( center = attr(tmpx, "scaled:center"), scale = attr(tmpx, "scaled:scale") ) } xxreg <- scale(xxreg, center = scalexreg$center, scale = scalexreg$scale) } } # Set up lagged matrix n <- length(xx) xx <- as.ts(xx) m <- max(round(frequency(xx)), 1L) if (!useoldmodel) { if (m == 1) { if (missing(p)) { p <- max(length(ar(na.interp(xx))$ar), 1) } if (p >= n) { warning("Reducing number of lagged inputs due to short series") p <- n - 1 } lags <- 1:p if (P > 1) { warning("Non-seasonal data, ignoring seasonal lags") } P <- 0 } else { if (missing(p)) { if (n >= 2 * m) { x.sa <- seasadj(mstl(na.interp(xx))) } else { x.sa <- na.interp(xx) } p <- max(length(ar(x.sa)$ar), 1) } if (p >= n) { warning("Reducing number of lagged inputs due to short series") p <- n - 1 } if (P > 0 && n >= m * P + 2) { lags <- sort(unique(c(1:p, m * (1:P)))) } else { lags <- 1:p if (P > 0) { warning("Series too short for seasonal lags") P <- 0 } } } } maxlag <- max(lags) nlag <- length(lags) y <- xx[-(1:maxlag)] lags.X <- matrix(NA_real_, ncol = nlag, nrow = n - maxlag) for (i in 1:nlag){ lags.X[, i] <- xx[(maxlag - lags[i] + 1):(n - lags[i])] } # Add xreg into lagged matrix lags.X <- cbind(lags.X, xxreg[-(1:maxlag), ]) # Remove missing values if present j <- complete.cases(lags.X, y) ## Remove values not in subset j <- j & xsub[-(1:maxlag)] ## Stop if there's no data to fit (e.g. due to NAs or NaNs) if (NROW(lags.X[j,, drop=FALSE]) == 0) { stop("No data to fit (possibly due to NA or NaN)") } ## Fit selected model if (useoldmodel) { fit <- model$model } else { fit <- FUN(x = lags.X[j,, drop=FALSE], y = y[j], ...) } # Return results out <- list() out$x <- as.ts(x) out$m <- m out$p <- p out$P <- P out$FUN <- FUN out$predict.FUN <- predict.FUN out$scalex <- scalex out$scalexreg <- scalexreg out$xreg <- xreg out$lambda <- lambda out$subset <- (1:length(x))[xsub] out$model <- fit out$modelargs <- list(...) if (useoldmodel) { out$modelargs <- model$modelargs fits <- c(rep(NA_real_, maxlag), predict.FUN(fit, lags.X[j,, drop=FALSE])) } else { fits <- c(rep(NA_real_, maxlag), predict.FUN(fit)) } if (scale.inputs) { fits <- fits * scalex$scale + scalex$center } fits <- ts(fits) if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda) } out$fitted <- ts(rep(NA_real_, length(out$x))) out$fitted[c(rep(TRUE, maxlag), j)] <- fits tsp(out$fitted) <- tsp(out$x) out$residuals <- out$x - out$fitted out$lags <- lags out$series <- yname out$method <- deparse(substitute(FUN)) out$method <- paste0(out$method, "-AR(", p) if (P > 0) out$method <- paste(out$method, ",", P, sep = "") out$method <- paste0(out$method, ")") if (P > 0) out$method <- paste(out$method, "[", m, "]", sep = "") out$call <- match.call() return(structure(out, class = c("modelAR"))) } #' Forecasting using user-defined model #' #' Returns forecasts and other information for user-defined #' models. #' #' Prediction intervals are calculated through simulations and can be slow. #' Note that if the model is too complex and overfits the data, the residuals #' can be arbitrarily small; if used for prediction interval calculations, they #' could lead to misleadingly small values. #' #' @param object An object of class "\code{modelAR}" resulting from a call to #' \code{\link{modelAR}}. #' @param h Number of periods for forecasting. If \code{xreg} is used, \code{h} #' is ignored and the number of forecast periods is set to the number of rows #' of \code{xreg}. #' @param PI If TRUE, prediction intervals are produced, otherwise only point #' forecasts are calculated. If \code{PI} is FALSE, then \code{level}, #' \code{fan}, \code{bootstrap} and \code{npaths} are all ignored. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param xreg Future values of external regressor variables. #' @param bootstrap If \code{TRUE}, then prediction intervals computed using #' simulations with resampled residuals rather than normally distributed #' errors. Ignored if \code{innov} is not \code{NULL}. #' @param npaths Number of sample paths used in computing simulated prediction #' intervals. #' @param innov Values to use as innovations for prediction intervals. Must be #' a matrix with \code{h} rows and \code{npaths} columns (vectors are coerced #' into a matrix). If present, \code{bootstrap} is ignored. #' @param ... Additional arguments passed to \code{\link{simulate.nnetar}} #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.nnetar}. #' #' An object of class "\code{forecast}" is a list containing at least the #' following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} #' \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the time series #' used to create the model stored as \code{object}).} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' \item{...}{Other arguments} #' #' @author Rob J Hyndman and Gabriel Caceres #' @seealso \code{\link{nnetar}}. #' @keywords ts #' #' @export forecast.modelAR <- function(object, h=ifelse(object$m > 1, 2 * object$m, 10), PI=FALSE, level=c(80, 95), fan=FALSE, xreg=NULL, lambda=object$lambda, bootstrap=FALSE, npaths=1000, innov=NULL, ...) { out <- object tspx <- tsp(out$x) # if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Check if xreg was used in fitted model if (is.null(object$xreg)) { if (!is.null(xreg)) { warning("External regressors were not used in fitted model, xreg will be ignored") } xreg <- NULL } else { if (is.null(xreg)) { stop("No external regressors provided") } xreg <- as.matrix(xreg) if (NCOL(xreg) != NCOL(object$xreg)) { stop("Number of external regressors does not match fitted model") } h <- NROW(xreg) } fcast <- numeric(h) xx <- object$x xxreg <- xreg if (!is.null(lambda)) { xx <- BoxCox(xx, lambda) lambda <- attr(xx, "lambda") } # Check and apply scaling of fitted model if (!is.null(object$scalex)) { xx <- scale(xx, center = object$scalex$center, scale = object$scalex$scale) if (!is.null(xreg)) { xxreg <- scale(xreg, center = object$scalexreg$center, scale = object$scalexreg$scale) } } # Get lags used in fitted model lags <- object$lags maxlag <- max(lags) flag <- rev(tail(xx, n = maxlag)) # Iterative 1-step forecast for (i in 1:h) { newdata <- c(flag[lags], xxreg[i, ]) if (any(is.na(newdata))) { stop("I can't forecast when there are missing values near the end of the series.") } fcast[i] <- object$predict.FUN(object$model, newdata) flag <- c(fcast[i], flag[-maxlag]) } # Re-scale point forecasts if (!is.null(object$scalex)) { fcast <- fcast * object$scalex$scale + object$scalex$center } # Add ts properties fcast <- ts(fcast, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) # Back-transform point forecasts if (!is.null(lambda)) { fcast <- InvBoxCox(fcast, lambda) } # Compute prediction intervals using simulations if (isTRUE(PI)) { nint <- length(level) sim <- matrix(NA, nrow = npaths, ncol = h) if (!is.null(innov)) { if (length(innov) != h * npaths) { stop("Incorrect number of innovations, need h*npaths values") } innov <- matrix(innov, nrow = h, ncol = npaths) bootstrap <- FALSE } for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = bootstrap, xreg = xreg, lambda = lambda, innov = innov[, i], ...) lower <- apply(sim, 2, quantile, 0.5 - level / 200, type = 8) upper <- apply(sim, 2, quantile, 0.5 + level / 200, type = 8) if (nint > 1L) { lower <- ts(t(lower)) upper <- ts(t(upper)) } else { lower <- ts(matrix(lower, ncol = 1L)) upper <- ts(matrix(upper, ncol = 1L)) } tsp(lower) <- tsp(upper) <- tsp(fcast) } else { level <- NULL lower <- NULL upper <- NULL } out$mean <- fcast out$level <- level out$lower <- lower out$upper <- upper return(structure(out, class = "forecast")) } #' @rdname fitted.Arima #' @export fitted.modelAR <- function(object, h=1, ...) { if (h == 1) { return(object$fitted) } else { return(hfitted(object = object, h = h, FUN = "modelAR", ...)) } } #' @export print.modelAR <- function(x, digits = max(3, getOption("digits") - 3), ...) { cat("Series:", x$series, "\n") cat("Model: ", x$method, "\n") cat("Call: ") print(x$call) print(x$model) cat( "\nsigma^2 estimated as ", format(mean(residuals(x) ^ 2, na.rm = TRUE), digits = digits), "\n", sep = "" ) invisible(x) } #' @rdname is.ets #' @export is.modelAR <- function(x) { inherits(x, "modelAR") } forecast/R/ets.R0000644000176200001440000011703013553673010013204 0ustar liggesusers#' Exponential smoothing state space model #' #' Returns ets model applied to \code{y}. #' #' Based on the classification of methods as described in Hyndman et al (2008). #' #' The methodology is fully automatic. The only required argument for ets is #' the time series. The model is chosen automatically if not specified. This #' methodology performed extremely well on the M3-competition data. (See #' Hyndman, et al, 2002, below.) #' #' @aliases print.ets summary.ets as.character.ets coef.ets tsdiag.ets #' #' @param y a numeric vector or time series of class \code{ts} #' @param model Usually a three-character string identifying method using the #' framework terminology of Hyndman et al. (2002) and Hyndman et al. (2008). #' The first letter denotes the error type ("A", "M" or "Z"); the second letter #' denotes the trend type ("N","A","M" or "Z"); and the third letter denotes #' the season type ("N","A","M" or "Z"). In all cases, "N"=none, "A"=additive, #' "M"=multiplicative and "Z"=automatically selected. So, for example, "ANN" is #' simple exponential smoothing with additive errors, "MAM" is multiplicative #' Holt-Winters' method with multiplicative errors, and so on. #' #' It is also possible for the model to be of class \code{"ets"}, and equal to #' the output from a previous call to \code{ets}. In this case, the same model #' is fitted to \code{y} without re-estimating any smoothing parameters. See #' also the \code{use.initial.values} argument. #' @param damped If TRUE, use a damped trend (either additive or #' multiplicative). If NULL, both damped and non-damped trends will be tried #' and the best model (according to the information criterion \code{ic}) #' returned. #' @param alpha Value of alpha. If NULL, it is estimated. #' @param beta Value of beta. If NULL, it is estimated. #' @param gamma Value of gamma. If NULL, it is estimated. #' @param phi Value of phi. If NULL, it is estimated. #' @param additive.only If TRUE, will only consider additive models. Default is #' FALSE. #' @param lambda Box-Cox transformation parameter. If \code{lambda="auto"}, #' then a transformation is automatically selected using \code{BoxCox.lambda}. #' The transformation is ignored if NULL. Otherwise, #' data transformed before model is estimated. When \code{lambda} is specified, #' \code{additive.only} is set to \code{TRUE}. #' @param lower Lower bounds for the parameters (alpha, beta, gamma, phi) #' @param upper Upper bounds for the parameters (alpha, beta, gamma, phi) #' @param opt.crit Optimization criterion. One of "mse" (Mean Square Error), #' "amse" (Average MSE over first \code{nmse} forecast horizons), "sigma" #' (Standard deviation of residuals), "mae" (Mean of absolute residuals), or #' "lik" (Log-likelihood, the default). #' @param nmse Number of steps for average multistep MSE (1<=\code{nmse}<=30). #' @param bounds Type of parameter space to impose: \code{"usual" } indicates #' all parameters must lie between specified lower and upper bounds; #' \code{"admissible"} indicates parameters must lie in the admissible space; #' \code{"both"} (default) takes the intersection of these regions. #' @param ic Information criterion to be used in model selection. #' @param restrict If \code{TRUE} (default), the models with infinite variance #' will not be allowed. #' @param allow.multiplicative.trend If \code{TRUE}, models with multiplicative #' trend are allowed when searching for a model. Otherwise, the model space #' excludes them. This argument is ignored if a multiplicative trend model is #' explicitly requested (e.g., using \code{model="MMN"}). #' @param use.initial.values If \code{TRUE} and \code{model} is of class #' \code{"ets"}, then the initial values in the model are also not #' re-estimated. #' @param na.action A function which indicates what should happen when the data #' contains NA values. By default, the largest contiguous portion of the #' time-series will be used. #' @param ... Other undocumented arguments. #' @inheritParams forecast #' #' @return An object of class "\code{ets}". #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{ets} and associated #' functions. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{HoltWinters}}, \code{\link{rwf}}, #' \code{\link{Arima}}. #' @references Hyndman, R.J., Koehler, A.B., Snyder, R.D., and Grose, S. (2002) #' "A state space framework for automatic forecasting using exponential #' smoothing methods", \emph{International J. Forecasting}, \bold{18}(3), #' 439--454. #' #' Hyndman, R.J., Akram, Md., and Archibald, B. (2008) "The admissible #' parameter space for exponential smoothing models". \emph{Annals of #' Statistical Mathematics}, \bold{60}(2), 407--426. #' #' Hyndman, R.J., Koehler, A.B., Ord, J.K., and Snyder, R.D. (2008) #' \emph{Forecasting with exponential smoothing: the state space approach}, #' Springer-Verlag. \url{http://www.exponentialsmoothing.net}. #' @keywords ts #' @examples #' fit <- ets(USAccDeaths) #' plot(forecast(fit)) #' #' @export ets <- function(y, model="ZZZ", damped=NULL, alpha=NULL, beta=NULL, gamma=NULL, phi=NULL, additive.only=FALSE, lambda=NULL, biasadj=FALSE, lower=c(rep(0.0001, 3), 0.8), upper=c(rep(0.9999, 3), 0.98), opt.crit=c("lik", "amse", "mse", "sigma", "mae"), nmse=3, bounds=c("both", "usual", "admissible"), ic=c("aicc", "aic", "bic"), restrict=TRUE, allow.multiplicative.trend=FALSE, use.initial.values=FALSE, na.action = c("na.contiguous", "na.interp", "na.fail"), ...) { # dataname <- substitute(y) opt.crit <- match.arg(opt.crit) bounds <- match.arg(bounds) ic <- match.arg(ic) if(!is.function(na.action)){ na.fn_name <- match.arg(na.action) na.action <- get(na.fn_name) } seriesname <- deparse(substitute(y)) if (any(class(y) %in% c("data.frame", "list", "matrix", "mts"))) { stop("y should be a univariate time series") } y <- as.ts(y) # Check if data is constant if (missing(model) && is.constant(y)) { return(ses(y, alpha = 0.99999, initial = "simple")$model) } # Remove missing values near ends ny <- length(y) y <- na.action(y) if (ny != length(y) && na.fn_name == "na.contiguous") { warning("Missing values encountered. Using longest contiguous portion of time series") ny <- length(y) } orig.y <- y if (identical(class(model), "ets") && is.null(lambda)) { lambda <- model$lambda } if (!is.null(lambda)) { y <- BoxCox(y, lambda) lambda <- attr(y, "lambda") additive.only <- TRUE } if (nmse < 1 || nmse > 30) { stop("nmse out of range") } m <- frequency(y) if (any(upper < lower)) { stop("Lower limits must be less than upper limits") } # If model is an ets object, re-fit model to new data if (class(model) == "ets") { # Prevent alpha being zero (to avoid divide by zero in the C code) alpha <- max(model$par["alpha"], 1e-10) beta <- model$par["beta"] if (is.na(beta)) { beta <- NULL } gamma <- model$par["gamma"] if (is.na(gamma)) { gamma <- NULL } phi <- model$par["phi"] if (is.na(phi)) { phi <- NULL } modelcomponents <- paste(model$components[1], model$components[2], model$components[3], sep = "") damped <- (model$components[4] == "TRUE") if (use.initial.values) { errortype <- substr(modelcomponents, 1, 1) trendtype <- substr(modelcomponents, 2, 2) seasontype <- substr(modelcomponents, 3, 3) # Recompute errors from pegelsresid.C e <- pegelsresid.C(y, m, model$initstate, errortype, trendtype, seasontype, damped, alpha, beta, gamma, phi, nmse) # Compute error measures np <- length(model$par) + 1 model$loglik <- -0.5 * e$lik model$aic <- e$lik + 2 * np model$bic <- e$lik + log(ny) * np model$aicc <- model$aic + 2 * np * (np + 1) / (ny - np - 1) model$mse <- e$amse[1] model$amse <- mean(e$amse) # Compute states, fitted values and residuals tsp.y <- tsp(y) model$states <- ts(e$states, frequency = tsp.y[3], start = tsp.y[1] - 1 / tsp.y[3]) colnames(model$states)[1] <- "l" if (trendtype != "N") { colnames(model$states)[2] <- "b" } if (seasontype != "N") { colnames(model$states)[(2 + (trendtype != "N")):ncol(model$states)] <- paste("s", 1:m, sep = "") } if (errortype == "A") { model$fitted <- ts(y - e$e, frequency = tsp.y[3], start = tsp.y[1]) } else { model$fitted <- ts(y / (1 + e$e), frequency = tsp.y[3], start = tsp.y[1]) } model$residuals <- ts(e$e, frequency = tsp.y[3], start = tsp.y[1]) model$sigma2 <- sum(model$residuals ^ 2, na.rm = TRUE) / (ny - np) model$x <- orig.y model$series <- seriesname if (!is.null(lambda)) { model$fitted <- InvBoxCox(model$fitted, lambda, biasadj, var(model$residuals)) attr(lambda, "biasadj") <- biasadj } model$lambda <- lambda # Return model object return(model) } else { model <- modelcomponents if (missing(use.initial.values)) { message("Model is being refit with current smoothing parameters but initial states are being re-estimated.\nSet 'use.initial.values=TRUE' if you want to re-use existing initial values.") } } } errortype <- substr(model, 1, 1) trendtype <- substr(model, 2, 2) seasontype <- substr(model, 3, 3) if (!is.element(errortype, c("M", "A", "Z"))) { stop("Invalid error type") } if (!is.element(trendtype, c("N", "A", "M", "Z"))) { stop("Invalid trend type") } if (!is.element(seasontype, c("N", "A", "M", "Z"))) { stop("Invalid season type") } if (m < 1 || length(y) <= m) { # warning("I can't handle data with frequency less than 1. Seasonality will be ignored.") seasontype <- "N" } if (m == 1) { if (seasontype == "A" || seasontype == "M") { stop("Nonseasonal data") } else { substr(model, 3, 3) <- seasontype <- "N" } } if (m > 24) { if (is.element(seasontype, c("A", "M"))) { stop("Frequency too high") } else if (seasontype == "Z") { warning("I can't handle data with frequency greater than 24. Seasonality will be ignored. Try stlf() if you need seasonal forecasts.") substr(model, 3, 3) <- seasontype <- "N" # m <- 1 } } # Check inputs if (restrict) { if ((errortype == "A" && (trendtype == "M" || seasontype == "M")) | (errortype == "M" && trendtype == "M" && seasontype == "A") || (additive.only && (errortype == "M" || trendtype == "M" || seasontype == "M"))) { stop("Forbidden model combination") } } data.positive <- (min(y) > 0) if (!data.positive && errortype == "M") { stop("Inappropriate model for data with negative or zero values") } if (!is.null(damped)) { if (damped && trendtype == "N") { stop("Forbidden model combination") } } n <- length(y) # Check we have enough data to fit a model npars <- 2L # alpha + l0 if (trendtype == "A" || trendtype == "M") { npars <- npars + 2L } # beta + b0 if (seasontype == "A" || seasontype == "M") { npars <- npars + m } # gamma + s if (!is.null(damped)) { npars <- npars + as.numeric(damped) } # Produce something non-optimized for tiny data sets if (n <= npars + 4L) { if (!is.null(damped)) { if (damped) { warning("Not enough data to use damping") } } if (seasontype == "A" || seasontype == "M") { fit <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = beta, gamma = gamma, phi = phi, exponential = (trendtype == "M"), seasonal = ifelse(seasontype != "A", "multiplicative", "additive"), lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) if (!("try-error" %in% class(fit))) { fit$call <- match.call() fit$method <- as.character(fit) fit$series <- deparse(substitute(y)) return(fit) } else { warning("Seasonal component could not be estimated") } } if (trendtype == "A" || trendtype == "M") { fit <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = beta, gamma = FALSE, phi = phi, exponential = (trendtype == "M"), lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) if (!("try-error" %in% class(fit))) { fit$call <- match.call() fit$method <- as.character(fit) fit$series <- deparse(substitute(y)) return(fit) } else { warning("Trend component could not be estimated") } } if (trendtype == "N" && seasontype == "N") { fit <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = FALSE, gamma = FALSE, lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) if (!("try-error" %in% class(fit))) { fit$call <- match.call() fit$method <- as.character(fit) fit$series <- deparse(substitute(y)) return(fit) } } # Try holt and ses and return best fit1 <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = beta, gamma = FALSE, phi = phi, exponential = (trendtype == "M"), lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) fit2 <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = FALSE, gamma = FALSE, phi = phi, exponential = (trendtype == "M"), lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) if ("try-error" %in% class(fit1)) { fit <- fit2 } else if (fit1$sigma2 < fit2$sigma2) { fit <- fit1 } else { fit <- fit2 } if("try-error" %in% class(fit)) stop("Unable to estimate a model.") fit$call <- match.call() fit$method <- as.character(fit) fit$series <- deparse(substitute(y)) return(fit) } # Fit model (assuming only one nonseasonal model) if (errortype == "Z") { errortype <- c("A", "M") } if (trendtype == "Z") { if (allow.multiplicative.trend) { trendtype <- c("N", "A", "M") } else { trendtype <- c("N", "A") } } if (seasontype == "Z") { seasontype <- c("N", "A", "M") } if (is.null(damped)) { damped <- c(TRUE, FALSE) } best.ic <- Inf for (i in 1:length(errortype)) { for (j in 1:length(trendtype)) { for (k in 1:length(seasontype)) { for (l in 1:length(damped)) { if (trendtype[j] == "N" && damped[l]) { next } if (restrict) { if (errortype[i] == "A" && (trendtype[j] == "M" || seasontype[k] == "M")) { next } if (errortype[i] == "M" && trendtype[j] == "M" && seasontype[k] == "A") { next } if (additive.only && (errortype[i] == "M" || trendtype[j] == "M" || seasontype[k] == "M")) { next } } if (!data.positive && errortype[i] == "M") { next } fit <- try(etsmodel( y, errortype[i], trendtype[j], seasontype[k], damped[l], alpha, beta, gamma, phi, lower = lower, upper = upper, opt.crit = opt.crit, nmse = nmse, bounds = bounds, ... ), silent=TRUE) if(is.element("try-error", class(fit))) fit.ic <- Inf else fit.ic <- switch(ic, aic = fit$aic, bic = fit$bic, aicc = fit$aicc) if (!is.na(fit.ic)) { if (fit.ic < best.ic) { model <- fit best.ic <- fit.ic best.e <- errortype[i] best.t <- trendtype[j] best.s <- seasontype[k] best.d <- damped[l] } } } } } } if (best.ic == Inf) { stop("No model able to be fitted") } model$m <- m model$method <- paste("ETS(", best.e, ",", best.t, ifelse(best.d, "d", ""), ",", best.s, ")", sep = "") model$series <- seriesname model$components <- c(best.e, best.t, best.s, best.d) model$call <- match.call() model$initstate <- model$states[1, ] np <- length(model$par) model$sigma2 <- sum(model$residuals^2, na.rm = TRUE) / (ny - np) model$x <- orig.y if (!is.null(lambda)) { model$fitted <- InvBoxCox(model$fitted, lambda, biasadj, var(model$residuals)) attr(lambda, "biasadj") <- biasadj } model$lambda <- lambda # model$call$data <- dataname return(structure(model, class = "ets")) } #' @export as.character.ets <- function(x, ...) { paste( "ETS(", x$components[1], ",", x$components[2], ifelse(x$components[4], "d", ""), ",", x$components[3], ")", sep = "" ) } # myRequire <- function(libName) { # req.suc <- require(libName, quietly=TRUE, character.only=TRUE) # if(!req.suc) stop("The ",libName," package is not available.") # req.suc # } # getNewBounds <- function(par, lower, upper, nstate) { # myLower <- NULL # myUpper <- NULL # if("alpha" %in% names(par)) { # myLower <- c(myLower, lower[1]) # myUpper <- c(myUpper, upper[1]) # } # if("beta" %in% names(par)) { # myLower <- c(myLower, lower[2]) # myUpper <- c(myUpper, upper[2]) # } # if("gamma" %in% names(par)) { # myLower <- c(myLower, lower[3]) # myUpper <- c(myUpper, upper[3]) # } # if("phi" %in% names(par)) { # myLower <- c(myLower, lower[4]) # myUpper <- c(myUpper, upper[4]) # } # myLower <- c(myLower,rep(-1e8,nstate)) # myUpper <- c(myUpper,rep(1e8,nstate)) # list(lower=myLower, upper=myUpper) # } etsmodel <- function(y, errortype, trendtype, seasontype, damped, alpha=NULL, beta=NULL, gamma=NULL, phi=NULL, lower, upper, opt.crit, nmse, bounds, maxit=2000, control=NULL, seed=NULL, trace=FALSE) { tsp.y <- tsp(y) if (is.null(tsp.y)) { tsp.y <- c(1, length(y), 1) } if (seasontype != "N") { m <- tsp.y[3] } else { m <- 1 } # Modify limits if alpha, beta or gamma have been specified. if (!is.null(alpha)) { upper[2] <- min(alpha, upper[2]) upper[3] <- min(1 - alpha, upper[3]) } if (!is.null(beta)) { lower[1] <- max(beta, lower[1]) } if (!is.null(gamma)) { upper[1] <- min(1 - gamma, upper[1]) } # Initialize smoothing parameters par <- initparam(alpha, beta, gamma, phi, trendtype, seasontype, damped, lower, upper, m) names(alpha) <- names(beta) <- names(gamma) <- names(phi) <- NULL par.noopt <- c(alpha = alpha, beta = beta, gamma = gamma, phi = phi) if (!is.null(par.noopt)) { par.noopt <- c(na.omit(par.noopt)) } if (!is.na(par["alpha"])) { alpha <- par["alpha"] } if (!is.na(par["beta"])) { beta <- par["beta"] } if (!is.na(par["gamma"])) { gamma <- par["gamma"] } if (!is.na(par["phi"])) { phi <- par["phi"] } # if(errortype=="M" | trendtype=="M" | seasontype=="M") # bounds="usual" if (!check.param(alpha, beta, gamma, phi, lower, upper, bounds, m)) { print(paste("Model: ETS(", errortype, ",", trendtype, ifelse(damped, "d", ""), ",", seasontype, ")", sep = "")) stop("Parameters out of range") } # Initialize state init.state <- initstate(y, trendtype, seasontype) nstate <- length(init.state) par <- c(par, init.state) lower <- c(lower, rep(-Inf, nstate)) upper <- c(upper, rep(Inf, nstate)) np <- length(par) if (np >= length(y) - 1) { # Not enough data to continue return(list(aic = Inf, bic = Inf, aicc = Inf, mse = Inf, amse = Inf, fit = NULL, par = par, states = init.state)) } # ------------------------------------------------- # if(is.null(seed)) seed <- 1000*runif(1) # if(solver=="malschains" || solver=="malschains_c") { # malschains <- NULL # if(!myRequire("Rmalschains")) # stop("malschains optimizer unavailable") # func <- NULL # #env <- NULL # if(solver=="malschains") { # func <- function(myPar) { # names(myPar) <- names(par) # res <- lik(myPar,y=y,nstate=nstate, errortype=errortype, trendtype=trendtype, # seasontype=seasontype, damped=damped, par.noopt=par.noopt, lowerb=lower, upperb=upper, # opt.crit=opt.crit, nmse=nmse, bounds=bounds, m=m,pnames=names(par),pnames2=names(par.noopt)) # res # } # env <- new.env() # } else { # env <- etsTargetFunctionInit(par=par, y=y, nstate=nstate, errortype=errortype, trendtype=trendtype, # seasontype=seasontype, damped=damped, par.noopt=par.noopt, lowerb=lower, upperb=upper, # opt.crit=opt.crit, nmse=nmse, bounds=bounds, m=m,pnames=names(par),pnames2=names(par.noopt)) # func <- .Call("etsGetTargetFunctionRmalschainsPtr", PACKAGE="forecast") # } # myBounds <- getNewBounds(par, lower, upper, nstate) # if(is.null(control)) { # control <- Rmalschains::malschains.control(ls="simplex", lsOnly=TRUE) # } # control$optimum <- if(opt.crit=="lik") -1e12 else 0 # fredTmp <- Rmalschains::malschains(func, env=env, lower=myBounds$lower, upper=myBounds$upper, # maxEvals=maxit, seed=seed, initialpop=par, control=control) # fred <- NULL # fred$par <- fredTmp$sol # fit.par <- fred$par # names(fit.par) <- names(par) # } else if (solver=="Rdonlp2") { # # donlp2 <- NULL # myRequire("Rdonlp2") # # env <- etsTargetFunctionInit(par=par, y=y, nstate=nstate, errortype=errortype, trendtype=trendtype, # seasontype=seasontype, damped=damped, par.noopt=par.noopt, lowerb=lower, upperb=upper, # opt.crit=opt.crit, nmse=nmse, bounds=bounds, m=m,pnames=names(par),pnames2=names(par.noopt)) # # func <- .Call("etsGetTargetFunctionRdonlp2Ptr", PACKAGE="forecast") # # myBounds <- getNewBounds(par, lower, upper, nstate) # # fred <- donlp2(par, func, env=env, par.lower=myBounds$lower, par.upper=myBounds$upper)#, nlin.lower=c(-1), nlin.upper=c(1)) #nlin.lower=c(0,-Inf, -Inf, -Inf), nlin.upper=c(0,0,0,0)) # # fit.par <- fred$par # # names(fit.par) <- names(par) # } else if(solver=="optim_c"){ env <- etsTargetFunctionInit( par = par, y = y, nstate = nstate, errortype = errortype, trendtype = trendtype, seasontype = seasontype, damped = damped, par.noopt = par.noopt, lowerb = lower, upperb = upper, opt.crit = opt.crit, nmse = as.integer(nmse), bounds = bounds, m = m, pnames = names(par), pnames2 = names(par.noopt) ) fred <- .Call( "etsNelderMead", par, env, -Inf, sqrt(.Machine$double.eps), 1.0, 0.5, 2.0, trace, maxit, PACKAGE = "forecast" ) fit.par <- fred$par names(fit.par) <- names(par) # } else { #if(solver=="optim") # # Optimize parameters and state # if(length(par)==1) # method <- "Brent" # else # method <- "Nelder-Mead" # fred <- optim(par,lik,method=method,y=y,nstate=nstate, errortype=errortype, trendtype=trendtype, # seasontype=seasontype, damped=damped, par.noopt=par.noopt, lowerb=lower, upperb=upper, # opt.crit=opt.crit, nmse=nmse, bounds=bounds, m=m,pnames=names(par),pnames2=names(par.noopt), # control=list(maxit=maxit)) # fit.par <- fred$par # names(fit.par) <- names(par) # } # ------------------------------------------------- init.state <- fit.par[(np - nstate + 1):np] # Add extra state if (seasontype != "N") { init.state <- c(init.state, m * (seasontype == "M") - sum(init.state[(2 + (trendtype != "N")):nstate])) } if (!is.na(fit.par["alpha"])) { alpha <- fit.par["alpha"] } if (!is.na(fit.par["beta"])) { beta <- fit.par["beta"] } if (!is.na(fit.par["gamma"])) { gamma <- fit.par["gamma"] } if (!is.na(fit.par["phi"])) { phi <- fit.par["phi"] } e <- pegelsresid.C(y, m, init.state, errortype, trendtype, seasontype, damped, alpha, beta, gamma, phi, nmse) np <- np + 1 ny <- length(y) aic <- e$lik + 2 * np bic <- e$lik + log(ny) * np aicc <- aic + 2 * np * (np + 1) / (ny - np - 1) mse <- e$amse[1] amse <- mean(e$amse) states <- ts(e$states, frequency = tsp.y[3], start = tsp.y[1] - 1 / tsp.y[3]) colnames(states)[1] <- "l" if (trendtype != "N") { colnames(states)[2] <- "b" } if (seasontype != "N") { colnames(states)[(2 + (trendtype != "N")):ncol(states)] <- paste("s", 1:m, sep = "") } tmp <- c("alpha", rep("beta", trendtype != "N"), rep("gamma", seasontype != "N"), rep("phi", damped)) fit.par <- c(fit.par, par.noopt) # fit.par <- fit.par[order(names(fit.par))] if (errortype == "A") { fits <- y - e$e } else { fits <- y / (1 + e$e) } return(list( loglik = -0.5 * e$lik, aic = aic, bic = bic, aicc = aicc, mse = mse, amse = amse, fit = fred, residuals = ts(e$e, frequency = tsp.y[3], start = tsp.y[1]), fitted = ts(fits, frequency = tsp.y[3], start = tsp.y[1]), states = states, par = fit.par )) } etsTargetFunctionInit <- function(par, y, nstate, errortype, trendtype, seasontype, damped, par.noopt, lowerb, upperb, opt.crit, nmse, bounds, m, pnames, pnames2) { names(par) <- pnames names(par.noopt) <- pnames2 alpha <- c(par["alpha"], par.noopt["alpha"])["alpha"] if (is.na(alpha)) { stop("alpha problem!") } if (trendtype != "N") { beta <- c(par["beta"], par.noopt["beta"])["beta"] if (is.na(beta)) { stop("beta Problem!") } } else { beta <- NULL } if (seasontype != "N") { gamma <- c(par["gamma"], par.noopt["gamma"])["gamma"] if (is.na(gamma)) { stop("gamma Problem!") } } else { m <- 1 gamma <- NULL } if (damped) { phi <- c(par["phi"], par.noopt["phi"])["phi"] if (is.na(phi)) { stop("phi Problem!") } } else { phi <- NULL } # determine which values to optimize and which ones are given by the user/not needed optAlpha <- !is.null(alpha) optBeta <- !is.null(beta) optGamma <- !is.null(gamma) optPhi <- !is.null(phi) givenAlpha <- FALSE givenBeta <- FALSE givenGamma <- FALSE givenPhi <- FALSE if (!is.null(par.noopt["alpha"])) { if (!is.na(par.noopt["alpha"])) { optAlpha <- FALSE givenAlpha <- TRUE } } if (!is.null(par.noopt["beta"])) { if (!is.na(par.noopt["beta"])) { optBeta <- FALSE givenBeta <- TRUE } } if (!is.null(par.noopt["gamma"])) { if (!is.na(par.noopt["gamma"])) { optGamma <- FALSE givenGamma <- TRUE } } if (!is.null(par.noopt["phi"])) { if (!is.na(par.noopt["phi"])) { optPhi <- FALSE givenPhi <- TRUE } } if (!damped) { phi <- 1 } if (trendtype == "N") { beta <- 0 } if (seasontype == "N") { gamma <- 0 } # cat("alpha: ", alpha) # cat(" beta: ", beta) # cat(" gamma: ", gamma) # cat(" phi: ", phi, "\n") # # cat("useAlpha: ", useAlpha) # cat(" useBeta: ", useBeta) # cat(" useGamma: ", useGamma) # cat(" usePhi: ", usePhi, "\n") env <- new.env() res <- .Call( "etsTargetFunctionInit", y = y, nstate = nstate, errortype = switch(errortype, "A" = 1, "M" = 2), trendtype = switch(trendtype, "N" = 0, "A" = 1, "M" = 2), seasontype = switch(seasontype, "N" = 0, "A" = 1, "M" = 2), damped = damped, lowerb = lowerb, upperb = upperb, opt.crit = opt.crit, nmse = as.integer(nmse), bounds = bounds, m = m, optAlpha, optBeta, optGamma, optPhi, givenAlpha, givenBeta, givenGamma, givenPhi, alpha, beta, gamma, phi, env, PACKAGE = "forecast" ) res } initparam <- function(alpha, beta, gamma, phi, trendtype, seasontype, damped, lower, upper, m) { if (any(lower > upper)) { stop("Inconsistent parameter boundaries") } # Select alpha if (is.null(alpha)) { alpha <- lower[1] + 0.2 * (upper[1] - lower[1]) / m if (alpha > 1 || alpha < 0) { alpha <- lower[1] + 2e-3 } par <- c(alpha = alpha) } else { par <- numeric(0) } # Select beta if (trendtype != "N" && is.null(beta)) { # Ensure beta < alpha upper[2] <- min(upper[2], alpha) beta <- lower[2] + 0.1 * (upper[2] - lower[2]) if (beta < 0 || beta > alpha) { beta <- alpha - 1e-3 } par <- c(par, beta = beta) } # Select gamma if (seasontype != "N" && is.null(gamma)) { # Ensure gamma < 1-alpha upper[3] <- min(upper[3], 1 - alpha) gamma <- lower[3] + 0.05 * (upper[3] - lower[3]) if (gamma < 0 || gamma > 1 - alpha) { gamma <- 1 - alpha - 1e-3 } par <- c(par, gamma = gamma) } # Select phi if (damped && is.null(phi)) { phi <- lower[4] + .99 * (upper[4] - lower[4]) if (phi < 0 || phi > 1) { phi <- upper[4] - 1e-3 } par <- c(par, phi = phi) } return(par) } check.param <- function(alpha, beta, gamma, phi, lower, upper, bounds, m) { if (bounds != "admissible") { if (!is.null(alpha)) { if (alpha < lower[1] || alpha > upper[1]) { return(0) } } if (!is.null(beta)) { if (beta < lower[2] || beta > alpha || beta > upper[2]) { return(0) } } if (!is.null(phi)) { if (phi < lower[4] || phi > upper[4]) { return(0) } } if (!is.null(gamma)) { if (gamma < lower[3] || gamma > 1 - alpha || gamma > upper[3]) { return(0) } } } if (bounds != "usual") { if (!admissible(alpha, beta, gamma, phi, m)) { return(0) } } return(1) } initstate <- function(y, trendtype, seasontype) { if (seasontype != "N") { # Do decomposition m <- frequency(y) n <- length(y) if (n < 4) { stop("You've got to be joking (not enough data).") } else if (n < 3 * m) # Fit simple Fourier model. { fouriery <- fourier(y, 1) fit <- tslm(y ~ trend + fouriery) if (seasontype == "A") { y.d <- list(seasonal = y - fit$coef[1] - fit$coef[2] * (1:n)) } else { # seasontype=="M". Biased method, but we only need a starting point y.d <- list(seasonal = y / (fit$coef[1] + fit$coef[2] * (1:n))) } } else { # n is large enough to do a decomposition y.d <- decompose(y, type = switch(seasontype, A = "additive", M = "multiplicative")) } init.seas <- rev(y.d$seasonal[2:m]) # initial seasonal component names(init.seas) <- paste("s", 0:(m - 2), sep = "") # Seasonally adjusted data if (seasontype == "A") { y.sa <- y - y.d$seasonal } else { init.seas <- pmax(init.seas, 1e-2) # We do not want negative seasonal indexes if (sum(init.seas) > m) { init.seas <- init.seas / sum(init.seas + 1e-2) } y.sa <- y / pmax(y.d$seasonal, 1e-2) } } else # non-seasonal model { m <- 1 init.seas <- NULL y.sa <- y } maxn <- min(max(10, 2 * m), length(y.sa)) if (trendtype == "N") { l0 <- mean(y.sa[1:maxn]) b0 <- NULL } else # Simple linear regression on seasonally adjusted data { fit <- lsfit(1:maxn, y.sa[1:maxn]) if (trendtype == "A") { l0 <- fit$coef[1] b0 <- fit$coef[2] # If error type is "M", then we don't want l0+b0=0. # So perturb just in case. if (abs(l0 + b0) < 1e-8) { l0 <- l0 * (1 + 1e-3) b0 <- b0 * (1 - 1e-3) } } else # if(trendtype=="M") { l0 <- fit$coef[1] + fit$coef[2] # First fitted value if (abs(l0) < 1e-8) { l0 <- 1e-7 } b0 <- (fit$coef[1] + 2 * fit$coef[2]) / l0 # Ratio of first two fitted values l0 <- l0 / b0 # First fitted value divided by b0 if (abs(b0) > 1e10) { # Avoid infinite slopes b0 <- sign(b0) * 1e10 } if (l0 < 1e-8 || b0 < 1e-8) # Simple linear approximation didn't work. { l0 <- max(y.sa[1], 1e-3) b0 <- max(y.sa[2] / y.sa[1], 1e-3) } } } names(l0) <- "l" if (!is.null(b0)) { names(b0) <- "b" } return(c(l0, b0, init.seas)) } lik <- function(par, y, nstate, errortype, trendtype, seasontype, damped, par.noopt, lowerb, upperb, opt.crit, nmse, bounds, m, pnames, pnames2) { # browser() # cat("par: ", par, "\n") names(par) <- pnames names(par.noopt) <- pnames2 alpha <- c(par["alpha"], par.noopt["alpha"])["alpha"] if (is.na(alpha)) { stop("alpha problem!") } if (trendtype != "N") { beta <- c(par["beta"], par.noopt["beta"])["beta"] if (is.na(beta)) { stop("beta Problem!") } } else { beta <- NULL } if (seasontype != "N") { gamma <- c(par["gamma"], par.noopt["gamma"])["gamma"] if (is.na(gamma)) { stop("gamma Problem!") } } else { m <- 1 gamma <- NULL } if (damped) { phi <- c(par["phi"], par.noopt["phi"])["phi"] if (is.na(phi)) { stop("phi Problem!") } } else { phi <- NULL } if (!check.param(alpha, beta, gamma, phi, lowerb, upperb, bounds, m)) { return(Inf) } np <- length(par) init.state <- par[(np - nstate + 1):np] # Add extra state if (seasontype != "N") { init.state <- c(init.state, m * (seasontype == "M") - sum(init.state[(2 + (trendtype != "N")):nstate])) } # Check states if (seasontype == "M") { seas.states <- init.state[-(1:(1 + (trendtype != "N")))] if (min(seas.states) < 0) { return(Inf) } } e <- pegelsresid.C(y, m, init.state, errortype, trendtype, seasontype, damped, alpha, beta, gamma, phi, nmse) if (is.na(e$lik)) { return(Inf) } if (e$lik < -1e10) { # Avoid perfect fits return(-1e10) } # cat("lik: ", e$lik, "\n") # points(alpha,e$lik,col=2) if (opt.crit == "lik") { return(e$lik) } else if (opt.crit == "mse") { return(e$amse[1]) } else if (opt.crit == "amse") { return(mean(e$amse)) } else if (opt.crit == "sigma") { return(mean(e$e ^ 2)) } else if (opt.crit == "mae") { return(mean(abs(e$e))) } } #' @export print.ets <- function(x, ...) { cat(paste(x$method, "\n\n")) cat(paste("Call:\n", deparse(x$call), "\n\n")) ncoef <- length(x$initstate) if (!is.null(x$lambda)) { cat(" Box-Cox transformation: lambda=", round(x$lambda, 4), "\n\n") } cat(" Smoothing parameters:\n") cat(paste(" alpha =", round(x$par["alpha"], 4), "\n")) if (x$components[2] != "N") { cat(paste(" beta =", round(x$par["beta"], 4), "\n")) } if (x$components[3] != "N") { cat(paste(" gamma =", round(x$par["gamma"], 4), "\n")) } if (x$components[4] != "FALSE") { cat(paste(" phi =", round(x$par["phi"], 4), "\n")) } cat("\n Initial states:\n") cat(paste(" l =", round(x$initstate[1], 4), "\n")) if (x$components[2] != "N") { cat(paste(" b =", round(x$initstate[2], 4), "\n")) } else { x$initstate <- c(x$initstate[1], NA, x$initstate[2:ncoef]) ncoef <- ncoef + 1 } if (x$components[3] != "N") { cat(" s = ") if (ncoef <= 8) { cat(round(x$initstate[3:ncoef], 4)) } else { cat(round(x$initstate[3:8], 4)) cat("\n ") cat(round(x$initstate[9:ncoef], 4)) } cat("\n") } cat("\n sigma: ") cat(round(sqrt(x$sigma2), 4)) if (!is.null(x$aic)) { stats <- c(x$aic, x$aicc, x$bic) names(stats) <- c("AIC", "AICc", "BIC") cat("\n\n") print(stats) } # cat("\n AIC: ") # cat(round(x$aic,4)) # cat("\n AICc: ") # cat(round(x$aicc,4)) # cat("\n BIC: ") # cat(round(x$bic,4)) } pegelsresid.C <- function(y, m, init.state, errortype, trendtype, seasontype, damped, alpha, beta, gamma, phi, nmse) { n <- length(y) p <- length(init.state) x <- numeric(p * (n + 1)) x[1:p] <- init.state e <- numeric(n) lik <- 0 if (!damped) { phi <- 1 } if (trendtype == "N") { beta <- 0 } if (seasontype == "N") { gamma <- 0 } amse <- numeric(nmse) Cout <- .C( "etscalc", as.double(y), as.integer(n), as.double(x), as.integer(m), as.integer(switch(errortype, "A" = 1, "M" = 2)), as.integer(switch(trendtype, "N" = 0, "A" = 1, "M" = 2)), as.integer(switch(seasontype, "N" = 0, "A" = 1, "M" = 2)), as.double(alpha), as.double(beta), as.double(gamma), as.double(phi), as.double(e), as.double(lik), as.double(amse), as.integer(nmse), PACKAGE = "forecast" ) if (!is.na(Cout[[13]])) { if (abs(Cout[[13]] + 99999) < 1e-7) { Cout[[13]] <- NA } } tsp.y <- tsp(y) e <- ts(Cout[[12]]) tsp(e) <- tsp.y return(list(lik = Cout[[13]], amse = Cout[[14]], e = e, states = matrix(Cout[[3]], nrow = n + 1, ncol = p, byrow = TRUE))) } admissible <- function(alpha, beta, gamma, phi, m) { if (is.null(phi)) { phi <- 1 } if (phi < 0 || phi > 1 + 1e-8) { return(0) } if (is.null(gamma)) { if (alpha < 1 - 1 / phi || alpha > 1 + 1 / phi) { return(0) } if (!is.null(beta)) { if (beta < alpha * (phi - 1) || beta > (1 + phi) * (2 - alpha)) { return(0) } } } else if (m > 1) # Seasonal model { if (is.null(beta)) { beta <- 0 } if (gamma < max(1 - 1 / phi - alpha, 0) || gamma > 1 + 1 / phi - alpha) { return(0) } if (alpha < 1 - 1 / phi - gamma * (1 - m + phi + phi * m) / (2 * phi * m)) { return(0) } if (beta < -(1 - phi) * (gamma / m + alpha)) { return(0) } # End of easy tests. Now use characteristic equation P <- c(phi * (1 - alpha - gamma), alpha + beta - alpha * phi + gamma - 1, rep(alpha + beta - alpha * phi, m - 2), (alpha + beta - phi), 1) roots <- polyroot(P) # cat("maxpolyroots: ", max(abs(roots)), "\n") if (max(abs(roots)) > 1 + 1e-10) { return(0) } } # Passed all tests return(1) } ### PLOT COMPONENTS #' Plot components from ETS model #' #' Produces a plot of the level, slope and seasonal components from an ETS #' model. #' #' \code{autoplot} will produce an equivalent plot as a ggplot object. #' #' @param x Object of class \dQuote{ets}. #' @param object Object of class \dQuote{ets}. Used for ggplot graphics (S3 #' method consistency). #' @param range.bars Logical indicating if each plot should have a bar at its #' right side representing relative size. If NULL, automatic selection takes #' place. #' @param ... Other plotting parameters to affect the plot. #' @return None. Function produces a plot #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link{ets}} #' @keywords hplot #' @examples #' #' fit <- ets(USAccDeaths) #' plot(fit) #' plot(fit,plot.type="single",ylab="",col=1:3) #' #' library(ggplot2) #' autoplot(fit) #' #' @export plot.ets <- function(x, ...) { if (!is.null(x$lambda)) { y <- BoxCox(x$x, x$lambda) } else { y <- x$x } if (x$components[3] == "N" && x$components[2] == "N") { plot( cbind(observed = y, level = x$states[, 1]), main = paste("Decomposition by", x$method, "method"), ... ) } else if (x$components[3] == "N") { plot( cbind(observed = y, level = x$states[, 1], slope = x$states[, "b"]), main = paste("Decomposition by", x$method, "method"), ... ) } else if (x$components[2] == "N") { plot( cbind(observed = y, level = x$states[, 1], season = x$states[, "s1"]), main = paste("Decomposition by", x$method, "method"), ... ) } else { plot( cbind( observed = y, level = x$states[, 1], slope = x$states[, "b"], season = x$states[, "s1"] ), main = paste("Decomposition by", x$method, "method"), ... ) } } #' @export summary.ets <- function(object, ...) { print(object) cat("\nTraining set error measures:\n") print(accuracy(object)) } #' @export coef.ets <- function(object, ...) { object$par } #' @rdname fitted.Arima #' @export fitted.ets <- function(object, h=1, ...) { if (h == 1) { return(object$fitted) } else { return(hfitted(object = object, h = h, FUN = "ets", ...)) } } #' @export logLik.ets <- function(object, ...) { structure(object$loglik, df = length(object$par) + 1, class = "logLik") } #' @export nobs.ets <- function(object, ...) { length(object$x) } #' Is an object a particular model type? #' #' Returns true if the model object is of a particular type #' #' @param x object to be tested #' @export is.ets <- function(x) { inherits(x, "ets") } forecast/R/spline.R0000644000176200001440000002010113556655411013703 0ustar liggesusers############################################### ##### Forecasting Using Smoothing Splines ##### ############################################### # Optimal smoothing paramter denoted by beta # lambda is Box-Cox parameter. ################# FUNCTIONS ################## ## Set up Sigma of order (n x n) make.Sigma <- function(n, n0=0) { nn <- n + n0 Sigma <- matrix(0, nrow = nn, ncol = nn) for (i in 1:nn) Sigma[i, i:nn] <- Sigma[i:nn, i] <- (i * i * (3 * (i:nn) - i)) / 6 return(Sigma / (n ^ 3)) } ## Compute spline matrices spline.matrices <- function(n, beta, cc=1e2, n0=0) { nn <- n + n0 Sigma <- make.Sigma(n, n0) s <- cbind(rep(1, nn), (1:nn) / n) Omega <- cc * s %*% t(s) + Sigma / beta + diag(nn) max.Omega <- max(Omega) inv.Omega <- solve(Omega / max.Omega, tol = 1e-10) / max.Omega P <- chol(inv.Omega) return(list(s = s, Sigma = Sigma, Omega = Omega, inv.Omega = inv.Omega, P = P)) } ## Compute smoothing splines ## Return -loglikelihood # beta multiplied by 1e6 to avoid numerical difficulties in optimization spline.loglik <- function(beta, y, cc=1e2) { n <- length(y) mat <- spline.matrices(n, beta / 1e6, cc = cc) y.star <- mat$P %*% matrix(y) return(-log(det(mat$P)) + 0.5 * n * log(sum(y.star ^ 2))) } # Spline forecasts #' Cubic Spline Forecast #' #' Returns local linear forecasts and prediction intervals using cubic #' smoothing splines. #' #' The cubic smoothing spline model is equivalent to an ARIMA(0,2,2) model but #' with a restricted parameter space. The advantage of the spline model over #' the full ARIMA model is that it provides a smooth historical trend as well #' as a linear forecast function. Hyndman, King, Pitrun, and Billah (2002) show #' that the forecast performance of the method is hardly affected by the #' restricted parameter space. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param method Method for selecting the smoothing parameter. If #' \code{method="gcv"}, the generalized cross-validation method from #' \code{\link[stats]{smooth.spline}} is used. If \code{method="mle"}, the #' maximum likelihood method from Hyndman et al (2002) is used. #' @param x Deprecated. Included for backwards compatibility. #' @inheritParams forecast #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{splinef}. #' #' An object of class \code{"forecast"} containing the following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for #' prediction intervals} \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the time #' series used to create the model stored as \code{object}).} #' \item{onestepf}{One-step forecasts from the fitted model.} #' \item{fitted}{Smooth estimates of the fitted trend using all data.} #' \item{residuals}{Residuals from the fitted model. That is x minus one-step #' forecasts.} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{smooth.spline}}, \code{\link[stats]{arima}}, #' \code{\link{holt}}. #' @references Hyndman, King, Pitrun and Billah (2005) Local linear forecasts #' using cubic smoothing splines. \emph{Australian and New Zealand Journal of #' Statistics}, \bold{47}(1), 87-99. #' \url{https://robjhyndman.com/publications/splinefcast/}. #' @keywords ts #' @examples #' fcast <- splinef(uspop,h=5) #' plot(fcast) #' summary(fcast) #' #' @export splinef <- function(y, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, method=c("gcv", "mle"), x=y) { method <- match.arg(method) if (!is.ts(x)) { x <- ts(x) } n <- length(x) freq <- frequency(x) if (!is.null(lambda)) { origx <- x x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } # Find optimal beta using likelihood approach in Hyndman et al paper. if (method == "mle") { if (n > 100) { # Use only last 100 observations to get beta xx <- x[(n - 99):n] } else { xx <- x } beta.est <- optimize(spline.loglik, interval = c(1e-6, 1e7), y = xx)$minimum / 1e6 # Compute spar which is equivalent to beta r <- 256 * smooth.spline(1:n, x, spar = 0)$lambda lss <- beta.est * n ^ 3 / (n - 1) ^ 3 spar <- (log(lss / r) / log(256) + 1) / 3 splinefit <- smooth.spline(1:n, x, spar = spar) sfits <- splinefit$y } else # Use GCV { splinefit <- smooth.spline(1:n, x, cv = FALSE, spar = NULL) sfits <- splinefit$y beta.est <- pmax(1e-7, splinefit$lambda * (n - 1) ^ 3 / n ^ 3) } # Compute matrices for optimal beta mat <- spline.matrices(n, beta.est) newmat <- spline.matrices(n, beta.est, n0 = h) # Get one-step predictors yfit <- e <- rep(NA, n) if (n > 1000) { warning("Series too long to compute training set fits and residuals") } else # This is probably grossly inefficient but I can't think of a better way right now { for (i in 1:(n - 1)) { U <- mat$Omega[1:i, i + 1] Oinv <- solve(mat$Omega[1:i, 1:i] / 1e6) / 1e6 yfit[i + 1] <- t(U) %*% Oinv %*% x[1:i] sd <- sqrt(mat$Omega[i + 1, i + 1] - t(U) %*% Oinv %*% U) e[i + 1] <- (x[i + 1] - yfit[i + 1]) / sd } } # Compute sigma^2 sigma2 <- mean(e ^ 2, na.rm = TRUE) # Compute mean and var of forecasts U <- newmat$Omega[1:n, n + (1:h)] Omega0 <- newmat$Omega[n + (1:h), n + (1:h)] Yhat <- t(U) %*% mat$inv.Omega %*% x sd <- sqrt(sigma2 * diag(Omega0 - t(U) %*% mat$inv.Omega %*% U)) # Compute prediction intervals. if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nconf <- length(level) lower <- upper <- matrix(NA, nrow = h, ncol = nconf) for (i in 1:nconf) { conf.factor <- qnorm(0.5 + 0.005 * level[i]) upper[, i] <- Yhat + conf.factor * sd lower[, i] <- Yhat - conf.factor * sd } lower <- ts(lower, start = tsp(x)[2] + 1 / freq, frequency = freq) upper <- ts(upper, start = tsp(x)[2] + 1 / freq, frequency = freq) res <- ts(x - yfit, start = start(x), frequency = freq) if (!is.null(lambda)) { Yhat <- InvBoxCox(Yhat, lambda, biasadj, list(level = level, upper = upper, lower = lower)) upper <- InvBoxCox(upper, lambda) lower <- InvBoxCox(lower, lambda) yfit <- InvBoxCox(yfit, lambda) sfits <- InvBoxCox(sfits, lambda) x <- origx } return(structure( list( method = "Cubic Smoothing Spline", level = level, x = x, series = deparse(substitute(y)), mean = ts(Yhat, frequency = freq, start = tsp(x)[2] + 1 / freq), upper = ts(upper, start = tsp(x)[2] + 1 / freq, frequency = freq), lower = ts(lower, start = tsp(x)[2] + 1 / freq, frequency = freq), model = list(beta = beta.est * n ^ 3, call = match.call()), fitted = ts(sfits, start = start(x), frequency = freq), residuals = res, standardizedresiduals = ts(e, start = start(x), frequency = freq), onestepf = ts(yfit, start = start(x), frequency = freq) ), lambda = lambda, class = c("splineforecast", "forecast") )) } #' @rdname plot.forecast #' #' @examples #' fcast <- splinef(airmiles,h=5) #' plot(fcast) #' autoplot(fcast) #' #' @export plot.splineforecast <- function(x, fitcol=2, type="o", pch=19, ...) { plot.forecast(x, type = type, pch = pch, ...) lines(x$fitted, col = fitcol) } #' @rdname is.forecast #' @export is.splineforecast <- function(x) { inherits(x, "splineforecast") } forecast/R/whichmodels.R0000644000176200001440000000104013553673010014710 0ustar liggesusersWhichModels <- function(max.p, max.q, max.P, max.Q, maxK) { total.models <- (max.p + 1) * (max.q + 1) * (max.P + 1) * (max.Q + 1) * length(0:maxK) x <- numeric(total.models) i <- 1 for (x1 in 0:max.p) for (x2 in 0:max.q) { for (x3 in 0:max.P) for (x4 in 0:max.Q) { for (K in 0:maxK) { x[i] <- paste(x1, "f", x2, "f", x3, "f", x4, "f", K, sep = "") i <- i + 1 } } } return(x) } UndoWhichModels <- function(n) { as.numeric(unlist(strsplit(n, split = "f"))) } forecast/R/forecast2.R0000644000176200001440000005007713553673010014310 0ustar liggesusers# Mean forecast #' Mean Forecast #' #' Returns forecasts and prediction intervals for an iid model applied to y. #' #' The iid model is \deqn{Y_t=\mu + Z_t}{Y[t]=mu + Z[t]} where \eqn{Z_t}{Z[t]} #' is a normal iid error. Forecasts are given by \deqn{Y_n(h)=\mu}{Y[n+h]=mu} #' where \eqn{\mu}{mu} is estimated by the sample mean. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting #' @param level Confidence levels for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param bootstrap If TRUE, use a bootstrap method to compute prediction intervals. #' Otherwise, assume a normal distribution. #' @param npaths Number of bootstrapped sample paths to use if \code{bootstrap==TRUE}. #' @param x Deprecated. Included for backwards compatibility. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{meanf}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link{rwf}} #' @keywords ts #' @examples #' nile.fcast <- meanf(Nile, h=10) #' plot(nile.fcast) #' #' @export meanf <- function(y, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, bootstrap=FALSE, npaths=5000, x=y) { n <- length(x) if (!is.null(lambda)) { origx <- x x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } meanx <- mean(x, na.rm = TRUE) fits <- rep(meanx, length(x)) res <- x - fits f <- rep(meanx, h) if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nconf <- length(level) s <- sd(x, na.rm = TRUE) if (bootstrap) { e <- na.omit(res) - mean(res, na.rm = TRUE) sim <- matrix(sample(e, size = npaths * h, replace = TRUE), ncol = npaths, nrow = h) sim <- sweep(sim, 1, f, "+") lower <- t(apply(sim, 1, quantile, prob = .5 - level / 200)) upper <- t(apply(sim, 1, quantile, prob = .5 + level / 200)) } else { lower <- upper <- matrix(NA, nrow = h, ncol = nconf) for (i in 1:nconf) { if (n > 1) { tfrac <- qt(0.5 - level[i] / 200, n - 1) } else { tfrac <- -Inf } w <- -tfrac * s * sqrt(1 + 1 / n) lower[, i] <- f - w upper[, i] <- f + w } } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") if (is.ts(x)) { fits <- ts(fits) res <- ts(res) tsp(fits) <- tsp(res) <- tsp(x) freq <- frequency(x) f <- ts(f, start = tsp(x)[2] + 1 / freq, frequency = freq) lower <- ts(lower, start = tsp(x)[2] + 1 / freq, frequency = freq) upper <- ts(upper, start = tsp(x)[2] + 1 / freq, frequency = freq) } if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda) x <- origx f <- InvBoxCox(f, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } out <- list( method = "Mean", level = level, x = x, series = deparse(substitute(y)), mean = f, lower = lower, upper = upper, model = structure(list(mu = f[1], mu.se = s / sqrt(length(x)), sd = s, bootstrap = bootstrap), class = "meanf"), lambda = lambda, fitted = fits, residuals = res ) out$model$call <- match.call() return(structure(out, class = "forecast")) } #' Box Cox Transformation #' #' BoxCox() returns a transformation of the input variable using a Box-Cox #' transformation. InvBoxCox() reverses the transformation. #' #' The Box-Cox transformation is given by \deqn{f_\lambda(x) =\frac{x^\lambda - #' 1}{\lambda}}{f(x;lambda)=(x^lambda - 1)/lambda} if \eqn{\lambda\ne0}{lambda #' is not equal to 0}. For \eqn{\lambda=0}{lambda=0}, #' \deqn{f_0(x)=\log(x)}{f(x;0)=log(x)}. #' #' @param x a numeric vector or time series of class \code{ts}. #' @param lambda transformation parameter. If \code{lambda = "auto"}, then #' the transformation parameter lambda is chosen using BoxCox.lambda. #' @param biasadj Use adjusted back-transformed mean for Box-Cox #' transformations. If transformed data is used to produce forecasts and fitted values, #' a regular back transformation will result in median forecasts. If biasadj is TRUE, #' an adjustment will be made to produce mean forecasts and fitted values. #' @param fvar Optional parameter required if biasadj=TRUE. Can either be the #' forecast variance, or a list containing the interval \code{level}, and the #' corresponding \code{upper} and \code{lower} intervals. #' @return a numeric vector of the same length as x. #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link{BoxCox.lambda}} #' @references Box, G. E. P. and Cox, D. R. (1964) An analysis of #' transformations. \emph{JRSS B} \bold{26} 211--246. #' @keywords ts #' @examples #' #' lambda <- BoxCox.lambda(lynx) #' lynx.fit <- ar(BoxCox(lynx,lambda)) #' plot(forecast(lynx.fit,h=20,lambda=lambda)) #' #' @export BoxCox <- function(x, lambda) { if (lambda == "auto") { lambda <- BoxCox.lambda(x) } if (lambda < 0) { x[x < 0] <- NA } if (lambda == 0) { out <- log(x) } else { out <- (sign(x) * abs(x) ^ lambda - 1) / lambda } if (!is.null(colnames(x))) { colnames(out) <- colnames(x) } attr(out, "lambda") <- lambda return(out) } #' @rdname BoxCox #' @export InvBoxCox <- function(x, lambda, biasadj=FALSE, fvar=NULL) { if (lambda < 0) { x[x > -1 / lambda] <- NA } if (lambda == 0) { out <- exp(x) } else { xx <- x * lambda + 1 out <- sign(xx) * abs(xx) ^ (1 / lambda) } if (!is.null(colnames(x))) { colnames(out) <- colnames(x) } if (is.null(biasadj)) { biasadj <- attr(lambda, "biasadj") } if (!is.logical(biasadj)) { warning("biasadj information not found, defaulting to FALSE.") biasadj <- FALSE } if (biasadj) { if (is.null(fvar)) { stop("fvar must be provided when biasadj=TRUE") } if (is.list(fvar)) { # Create fvar from forecast interval level <- max(fvar$level) if (NCOL(fvar$upper) > 1 && NCOL(fvar$lower)) { i <- match(level, fvar$level) fvar$upper <- fvar$upper[, i] fvar$lower <- fvar$lower[, i] } if (level > 1) { level <- level / 100 } level <- mean(c(level, 1)) # Note: Use BoxCox transformed upper and lower values fvar <- as.numeric((fvar$upper - fvar$lower) / stats::qnorm(level) / 2) ^ 2 } if (NCOL(fvar) > 1) { fvar <- diag(fvar) } out <- out * (1 + 0.5 * fvar * (1 - lambda) / (out) ^ (2 * lambda)) } return(out) } # Deprecated InvBoxCoxf <- function(x=NULL, fvar=NULL, lambda=NULL) { message("Deprecated, use InvBoxCox instead") if (is.null(lambda)) { stop("Must specify lambda using lambda=numeric(1)") } if (is.null(fvar)) { level <- max(x$level) if (NCOL(x$upper) > 1 && NCOL(x$lower)) { i <- match(level, x$level) x$upper <- x$upper[, i] x$lower <- x$lower[, i] } if (level > 1) { level <- level / 100 } level <- mean(c(level, 1)) # Note: Use BoxCox transformed upper and lower values fvar <- ((x$upper - x$lower) / stats::qnorm(level) / 2) ^ 2 } else { x <- list(mean = x) } if ("matrix" %in% class(fvar)) { fvar <- diag(fvar) } return(x$mean * (1 + 0.5 * fvar * (1 - lambda) / (x$mean) ^ (2 * lambda))) } #' Forecasting using Structural Time Series models #' #' Returns forecasts and other information for univariate structural time #' series models. #' #' This function calls \code{predict.StructTS} and constructs an object of #' class "\code{forecast}" from the results. #' #' @param object An object of class "\code{StructTS}". Usually the result of a #' call to \code{\link[stats]{StructTS}}. #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param ... Other arguments. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.StructTS}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{StructTS}}. #' @keywords ts #' @examples #' fit <- StructTS(WWWusage,"level") #' plot(forecast(fit)) #' #' @export forecast.StructTS <- function(object, h=ifelse(object$coef["epsilon"] > 1e-10, 2 * object$xtsp[3], 10), level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=NULL, ...) { x <- object$data pred <- predict(object, n.ahead = h) if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nint <- length(level) upper <- lower <- matrix(NA, ncol = nint, nrow = length(pred$pred)) for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- pred$pred - qq * pred$se upper[, i] <- pred$pred + qq * pred$se } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") if (is.element("seas", names(object$coef))) { method <- "Basic structural model" } else if (is.element("slope", names(object$coef))) { method <- "Local linear structural model" } else { method <- "Local level structural model" } fits <- ts(c(x - residuals(object))) tsp(fits) <- tsp(x) if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda) x <- InvBoxCox(x, lambda) pred$pred <- InvBoxCox(pred$pred, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } return(structure( list( method = method, model = object, level = level, mean = pred$pred, lower = lower, upper = upper, x = x, series = object$series, fitted = fits, residuals = x-fits ), class = "forecast" )) } #' Forecasting using Holt-Winters objects #' #' Returns forecasts and other information for univariate Holt-Winters time #' series models. #' #' This function calls \code{\link[stats]{predict.HoltWinters}} and constructs #' an object of class "\code{forecast}" from the results. #' #' It is included for completeness, but the \code{\link{ets}} is recommended #' for use instead of \code{\link[stats]{HoltWinters}}. #' #' @param object An object of class "\code{HoltWinters}". Usually the result of #' a call to \code{\link[stats]{HoltWinters}}. #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param ... Other arguments. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by #' \code{forecast.HoltWinters}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted #' model.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{predict.HoltWinters}}, #' \code{\link[stats]{HoltWinters}}. #' @keywords ts #' @examples #' fit <- HoltWinters(WWWusage,gamma=FALSE) #' plot(forecast(fit)) #' #' @export forecast.HoltWinters <- function(object, h=ifelse(frequency(object$x) > 1, 2 * frequency(object$x), 10), level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=NULL, ...) { x <- object$x if (!is.null(object$exponential)) { if (object$exponential) { stop("Forecasting for exponential trend not yet implemented.") } } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nint <- length(level) pred <- predict(object, n.ahead = h, prediction.interval = TRUE, level = level[1] / 100) pmean <- pred[, 1] upper <- lower <- matrix(NA, ncol = nint, nrow = length(pred[, 1])) se <- (pred[, 2] - pred[, 3]) / (2 * qnorm(0.5 * (1 + level[1] / 100))) for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- pmean - qq * se upper[, i] <- pmean + qq * se } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") if (!is.null(lambda)) { fitted <- InvBoxCox(object$fitted[, 1], lambda) x <- InvBoxCox(x, lambda) pmean <- InvBoxCox(pmean, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } else { fitted <- object$fitted[, 1] } # Pad fitted values with NAs nf <- length(fitted) n <- length(x) fitted <- ts(c(rep(NA, n - nf), fitted)) tsp(fitted) <- tsp(object$x) return(structure( list( method = "HoltWinters", model = object, level = level, mean = pmean, lower = lower, upper = upper, x = x, series = deparse(object$call$x), fitted = fitted, residuals = x - fitted ), class = "forecast" )) } ## CROSTON #' Forecasts for intermittent demand using Croston's method #' #' Returns forecasts and other information for Croston's forecasts applied to #' y. #' #' Based on Croston's (1972) method for intermittent demand forecasting, also #' described in Shenstone and Hyndman (2005). Croston's method involves using #' simple exponential smoothing (SES) on the non-zero elements of the time #' series and a separate application of SES to the times between non-zero #' elements of the time series. The smoothing parameters of the two #' applications of SES are assumed to be equal and are denoted by \code{alpha}. #' #' Note that prediction intervals are not computed as Croston's method has no #' underlying stochastic model. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting. #' @param alpha Value of alpha. Default value is 0.1. #' @param x Deprecated. Included for backwards compatibility. #' @return An object of class \code{"forecast"} is a list containing at least #' the following elements: \item{model}{A list containing information about the #' fitted model. The first element gives the model used for non-zero demands. #' The second element gives the model used for times between non-zero demands. #' Both elements are of class \code{forecast}.} \item{method}{The name of the #' forecasting method as a character string} \item{mean}{Point forecasts as a #' time series} \item{x}{The original time series (either \code{object} itself #' or the time series used to create the model stored as \code{object}).} #' \item{residuals}{Residuals from the fitted model. That is y minus fitted #' values.} \item{fitted}{Fitted values (one-step forecasts)} #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{croston} and #' associated functions. #' @author Rob J Hyndman #' @seealso \code{\link{ses}}. #' @references Croston, J. (1972) "Forecasting and stock control for #' intermittent demands", \emph{Operational Research Quarterly}, \bold{23}(3), #' 289-303. #' #' Shenstone, L., and Hyndman, R.J. (2005) "Stochastic models underlying #' Croston's method for intermittent demand forecasting". \emph{Journal of #' Forecasting}, \bold{24}, 389-402. #' @keywords ts #' @examples #' y <- rpois(20,lambda=.3) #' fcast <- croston(y) #' plot(fcast) #' #' @export croston <- function(y, h=10, alpha=0.1, x=y) { if (sum(x < 0) > 0) { stop("Series should not contain negative values") } out <- croston2(x, h, alpha) out$x <- x if (!is.null(out$fitted)) { out$residuals <- x - out$fitted } out$method <- "Croston's method" out$series <- deparse(substitute(y)) return(structure(out, class = "forecast")) } croston2 <- function(x, h=10, alpha=0.1, nofits=FALSE) { x <- as.ts(x) y <- x[x > 0] tsp.x <- tsp(x) freq.x <- tsp.x[3] start.f <- tsp.x[2] + 1 / freq.x if (length(y) == 0) # All historical values are equal to zero { fc <- ts(rep(0, h), start = start.f, frequency = freq.x) if (nofits) { return(fc) } else { return(list(mean = fc, fitted = ts(x * 0, start = tsp.x[1], frequency = freq.x))) } } tt <- diff(c(0, (1:length(x))[x > 0])) # Times between non-zero observations if (length(y) == 1 && length(tt) == 1) # Only one non-zero observation { y.f <- list(mean = ts(rep(y, h), start = start.f, frequency = freq.x)) p.f <- list(mean = ts(rep(tt, h), start = start.f, frequency = freq.x)) } else if (length(y) <= 1 || length(tt) <= 1) { # length(tt)==0 but length(y)>0. How does that happen? return(list(mean = ts(rep(NA, h), start = start.f, frequency = freq.x))) } else { y.f <- ses(y, alpha = alpha, initial = "simple", h = h, PI = FALSE) p.f <- ses(tt, alpha = alpha, initial = "simple", h = h, PI = FALSE) } ratio <- ts(y.f$mean / p.f$mean, start = start.f, frequency = freq.x) if (nofits) { return(ratio) } else { n <- length(x) fits <- x * NA if (n > 1) { for (i in 1:(n - 1)) fits[i + 1] <- croston2(x[1:i], h = 1, alpha = alpha, nofits = TRUE) } fits <- ts(fits) tsp(fits) <- tsp.x return(list(mean = ratio, fitted = fits, model = list(demand = y.f, period = p.f))) } } forecast/R/makeParamVector.R0000644000176200001440000001734613553673010015503 0ustar liggesusers# TODO: Add comment # # Author: srazbash ############################################################################### unParameteriseTBATS <- function(param.vector, control) { # print(control) if (control$use.box.cox) { lambda <- param.vector[1] alpha <- param.vector[2] if (control$use.beta) { if (control$use.damping) { small.phi <- param.vector[3] beta <- param.vector[4] gamma.start <- 5 } else { small.phi <- 1 beta <- param.vector[3] gamma.start <- 4 } } else { small.phi <- NULL beta <- NULL gamma.start <- 3 } if (control$length.gamma > 0) { gamma.one.vector <- param.vector[gamma.start:(gamma.start + (control$length.gamma / 2) - 1)] gamma.two.vector <- param.vector[(gamma.start + (control$length.gamma / 2)):(gamma.start + (control$length.gamma) - 1)] final.gamma.pos <- gamma.start + control$length.gamma - 1 } else { gamma.one.vector <- NULL gamma.two.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (control$p != 0) { ar.coefs <- param.vector[(final.gamma.pos + 1):(final.gamma.pos + control$p)] } else { ar.coefs <- NULL } if (control$q != 0) { ma.coefs <- param.vector[(final.gamma.pos + control$p + 1):length(param.vector)] } else { ma.coefs <- NULL } } else { lambda <- NULL alpha <- param.vector[1] if (control$use.beta) { if (control$use.damping) { small.phi <- param.vector[2] beta <- param.vector[3] gamma.start <- 4 } else { small.phi <- 1 beta <- param.vector[2] gamma.start <- 3 } } else { small.phi <- NULL beta <- NULL gamma.start <- 2 } if (control$length.gamma > 0) { gamma.one.vector <- param.vector[gamma.start:(gamma.start + (control$length.gamma / 2) - 1)] gamma.two.vector <- param.vector[(gamma.start + (control$length.gamma / 2)):(gamma.start + (control$length.gamma) - 1)] final.gamma.pos <- gamma.start + control$length.gamma - 1 } else { gamma.one.vector <- NULL gamma.two.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (control$p != 0) { ar.coefs <- param.vector[(final.gamma.pos + 1):(final.gamma.pos + control$p)] } else { ar.coefs <- NULL } if (control$q != 0) { ma.coefs <- param.vector[(final.gamma.pos + control$p + 1):length(param.vector)] } else { ma.coefs <- NULL } } return(list(lambda = lambda, alpha = alpha, beta = beta, small.phi = small.phi, gamma.one.v = gamma.one.vector, gamma.two.v = gamma.two.vector, ar.coefs = ar.coefs, ma.coefs = ma.coefs)) } makeParscale <- function(control) { # print(control) if (control$use.box.cox) { parscale <- c(.001, .01) } else { parscale <- .01 } if (control$use.beta) { if (control$use.damping) { parscale <- c(parscale, 1e-2, 1e-2) } else { parscale <- c(parscale, 1e-2) } } if (control$length.gamma > 0) { parscale <- c(parscale, rep(1e-5, control$length.gamma)) } if ((control$p != 0) | (control$q != 0)) { parscale <- c(parscale, rep(1e-1, (control$p + control$q))) } # print(parscale) return(parscale) } ############################################################################################################################################################################################## ## BATS related stuff below ######################################## makeParscaleBATS <- function(control) { # print(control) if (control$use.box.cox) { parscale <- c(.001, .1) } else { parscale <- .1 } if (control$use.beta) { if (control$use.damping) { parscale <- c(parscale, 1e-2, 1e-2) } else { parscale <- c(parscale, 1e-2) } } if (control$length.gamma > 0) { parscale <- c(parscale, rep(1e-2, control$length.gamma)) } if ((control$p != 0) | (control$q != 0)) { parscale <- c(parscale, rep(1e-1, (control$p + control$q))) } # print(parscale) return(parscale) } parameterise <- function(alpha, beta.v=NULL, small.phi=1, gamma.v=NULL, lambda=NULL, ar.coefs=NULL, ma.coefs=NULL) { # print("urg") # print(lambda) if (!is.null(lambda)) { param.vector <- cbind(lambda, alpha) use.box.cox <- TRUE } else { # print("hello") param.vector <- alpha use.box.cox <- FALSE # print(use.box.cox) } if (!is.null(beta.v)) { use.beta <- TRUE if (is.null(small.phi)) { use.damping <- FALSE } else if (small.phi != 1) { param.vector <- cbind(param.vector, small.phi) use.damping <- TRUE } else { use.damping <- FALSE } param.vector <- cbind(param.vector, beta.v) } else { use.beta <- FALSE use.damping <- FALSE } if (!is.null(gamma.v)) { gamma.v <- matrix(gamma.v, nrow = 1, ncol = length(gamma.v)) param.vector <- cbind(param.vector, gamma.v) length.gamma <- length(gamma.v) } else { length.gamma <- 0 } if (!is.null(ar.coefs)) { ar.coefs <- matrix(ar.coefs, nrow = 1, ncol = length(ar.coefs)) param.vector <- cbind(param.vector, ar.coefs) p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { ma.coefs <- matrix(ma.coefs, nrow = 1, ncol = length(ma.coefs)) param.vector <- cbind(param.vector, ma.coefs) q <- length(ma.coefs) } else { q <- 0 } # print(use.box.cox) control <- list(use.beta = use.beta, use.box.cox = use.box.cox, use.damping = use.damping, length.gamma = length.gamma, p = p, q = q) return(list(vect = as.numeric(param.vector), control = control)) } unParameterise <- function(param.vector, control) { # print(control) if (control$use.box.cox) { lambda <- param.vector[1] alpha <- param.vector[2] if (control$use.beta) { if (control$use.damping) { small.phi <- param.vector[3] beta <- param.vector[4] gamma.start <- 5 } else { small.phi <- 1 beta <- param.vector[3] gamma.start <- 4 } } else { small.phi <- NULL beta <- NULL gamma.start <- 3 } if (control$length.gamma > 0) { gamma.vector <- param.vector[gamma.start:(gamma.start + control$length.gamma - 1)] final.gamma.pos <- gamma.start + control$length.gamma - 1 } else { gamma.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (control$p != 0) { ar.coefs <- param.vector[(final.gamma.pos + 1):(final.gamma.pos + control$p)] } else { ar.coefs <- NULL } if (control$q != 0) { ma.coefs <- param.vector[(final.gamma.pos + control$p + 1):length(param.vector)] } else { ma.coefs <- NULL } } else { lambda <- NULL alpha <- param.vector[1] if (control$use.beta) { if (control$use.damping) { small.phi <- param.vector[2] beta <- param.vector[3] gamma.start <- 4 } else { small.phi <- 1 beta <- param.vector[2] gamma.start <- 3 } } else { small.phi <- NULL beta <- NULL gamma.start <- 2 } if (control$length.gamma > 0) { gamma.vector <- param.vector[gamma.start:(gamma.start + control$length.gamma - 1)] final.gamma.pos <- gamma.start + control$length.gamma - 1 } else { gamma.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (control$p != 0) { ar.coefs <- param.vector[(final.gamma.pos + 1):(final.gamma.pos + control$p)] } else { ar.coefs <- NULL } if (control$q != 0) { ma.coefs <- param.vector[(final.gamma.pos + control$p + 1):length(param.vector)] } else { ma.coefs <- NULL } } return(list(lambda = lambda, alpha = alpha, beta = beta, small.phi = small.phi, gamma.v = gamma.vector, ar.coefs = ar.coefs, ma.coefs = ma.coefs)) } forecast/R/getResponse.R0000644000176200001440000000640513553673010014712 0ustar liggesusers# Functions to return the response variable for different models. # If a Box-Cox transformation is used, the series returned here should # be on the original scale, not the Box-Cox transformed scale. #' Get response variable from time series model. #' #' \code{getResponse} is a generic function for extracting the historical data #' from a time series model (including \code{Arima}, \code{ets}, \code{ar}, #' \code{fracdiff}), a linear model of class \code{lm}, or a forecast object. #' The function invokes particular \emph{methods} which depend on the class of #' the first argument. #' #' #' @param object a time series model or forecast object. #' @param ... Additional arguments that are ignored. #' @return A numerical vector or a time series object of class \code{ts}. #' @author Rob J Hyndman #' @keywords ts #' #' @export getResponse <- function(object, ...) UseMethod("getResponse") #' @rdname getResponse #' @export getResponse.default <- function(object, ...) { if (is.list(object)) { return(object$x) } else { return(NULL) } } #' @rdname getResponse #' @export getResponse.lm <- function(object, ...) { if(!is.null(object[["x"]])){ object[["x"]] } else{ responsevar <- deparse(formula(object)[[2]]) model.frame(object$model)[, responsevar] } } #' @rdname getResponse #' @export getResponse.Arima <- function(object, ...) { if (is.element("x", names(object))) { x <- object$x } else { series.name <- object$series if (is.null(series.name)) { stop("missing original time series") } else { x <- try(eval.parent(parse(text = series.name)), silent = TRUE) if (is.element("try-error", class(x))) { # Try one level further up the chain x <- try(eval.parent(parse(text = series.name), 2), silent = TRUE) } if (is.element("try-error", class(x))) { # Give up return(NULL) } } } return(as.ts(x)) } #' @rdname getResponse #' @export getResponse.fracdiff <- function(object, ...) { if (is.element("x", names(object))) { x <- object$x } else { series.name <- as.character(object$call)[2] if (is.null(series.name)) { stop("missing original time series") } else { x <- try(eval.parent(parse(text = series.name)), silent = TRUE) if (is.element("try-error", class(x))) { # Try one level further up the chain x <- try(eval.parent(parse(text = series.name), 2), silent = TRUE) } if (is.element("try-error", class(x))) { # Give up return(NULL) } } } return(as.ts(x)) } #' @rdname getResponse #' @export getResponse.ar <- function(object, ...) { getResponse.Arima(object) } #' @rdname getResponse #' @export getResponse.tbats <- function(object, ...) { if (is.element("y", names(object))) { y <- object$y } else { return(NULL) } return(as.ts(y)) } #' @rdname getResponse #' @export getResponse.bats <- function(object, ...) { return(getResponse.tbats(object, ...)) } #' @rdname getResponse #' @export getResponse.mforecast <- function(object, ...) { return(do.call(cbind, lapply(object$forecast, function(x) x$x))) } #' @rdname getResponse #' @export getResponse.baggedModel <- function(object, ...) { if (is.element("y", names(object))) { y <- object$y } else { return(NULL) } return(as.ts(y)) }forecast/R/forecast.R0000644000176200001440000006225513553673010014227 0ustar liggesusers#' Forecasting Functions for Time Series and Linear Models #' #' Methods and tools for displaying and analysing univariate time series #' forecasts including exponential smoothing via state space models and #' automatic ARIMA modelling. #' #' \tabular{ll}{ Package: \tab forecast\cr Type: \tab Package\cr License: \tab #' GPL3\cr LazyLoad: \tab yes\cr } #' #' @docType package #' @name forecast-package #' @author Rob J Hyndman #' #' Maintainer: Rob.Hyndman@monash.edu #' @keywords package NULL # Instead of "_PACKAGE" to remove inclusion of \alias{forecast} # "_PACKAGE" ## Generic forecast functions ## Part of forecast and demography packages #' Forecasting time series #' #' \code{forecast} is a generic function for forecasting from time series or #' time series models. The function invokes particular \emph{methods} which #' depend on the class of the first argument. #' #' For example, the function \code{\link{forecast.Arima}} makes forecasts based #' on the results produced by \code{\link[stats]{arima}}. #' #' If \code{model=NULL},the function \code{\link{forecast.ts}} makes forecasts #' using \code{\link{ets}} models (if the data are non-seasonal or the seasonal #' period is 12 or less) or \code{\link{stlf}} (if the seasonal period is 13 or #' more). #' #' If \code{model} is not \code{NULL}, \code{forecast.ts} will apply the #' \code{model} to the \code{object} time series, and then generate forecasts #' accordingly. #' #' @aliases print.forecast summary.forecast as.data.frame.forecast as.ts.forecast #' #' @param object a time series or time series model for which forecasts are #' required #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, \code{level} is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param robust If TRUE, the function is robust to missing values and outliers #' in \code{object}. This argument is only valid when \code{object} is of class #' \code{ts}. #' @param lambda Box-Cox transformation parameter. If \code{lambda="auto"}, #' then a transformation is automatically selected using \code{BoxCox.lambda}. #' The transformation is ignored if NULL. Otherwise, #' data transformed before model is estimated. #' @param find.frequency If TRUE, the function determines the appropriate #' period, if the data is of unknown period. #' @param allow.multiplicative.trend If TRUE, then ETS models with #' multiplicative trends are allowed. Otherwise, only additive or no trend ETS #' models are permitted. #' @param model An object describing a time series model; e.g., one of of class #' \code{ets}, \code{Arima}, \code{bats}, \code{tbats}, or \code{nnetar}. #' @param ... Additional arguments affecting the forecasts produced. If #' \code{model=NULL}, \code{forecast.ts} passes these to \code{\link{ets}} or #' \code{\link{stlf}} depending on the frequency of the time series. If #' \code{model} is not \code{NULL}, the arguments are passed to the relevant #' modelling function. #' @inheritParams BoxCox #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessors functions \code{fitted.values} and \code{residuals} #' extract various useful features of the value returned by #' \code{forecast$model}. #' #' An object of class \code{"forecast"} is a list usually containing at least #' the following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' For models with additive errors, the residuals will be x minus the fitted #' values.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman #' @seealso Other functions which return objects of class \code{"forecast"} are #' \code{\link{forecast.ets}}, \code{\link{forecast.Arima}}, #' \code{\link{forecast.HoltWinters}}, \code{\link{forecast.StructTS}}, #' \code{\link{meanf}}, \code{\link{rwf}}, \code{\link{splinef}}, #' \code{\link{thetaf}}, \code{\link{croston}}, \code{\link{ses}}, #' \code{\link{holt}}, \code{\link{hw}}. #' @keywords ts #' @examples #' #' WWWusage %>% forecast %>% plot #' fit <- ets(window(WWWusage, end=60)) #' fc <- forecast(WWWusage, model=fit) #' #' @export forecast <- function(object, ...) UseMethod("forecast") #' @rdname forecast #' @export forecast.default <- function(object, ...) forecast.ts(object, ...) ## A function determining the appropriate period, if the data is of unknown period ## Written by Rob Hyndman #' Find dominant frequency of a time series #' #' \code{findfrequency} returns the period of the dominant frequency of a time #' series. For seasonal data, it will return the seasonal period. For cyclic #' data, it will return the average cycle length. #' #' The dominant frequency is determined from a spectral analysis of the time #' series. First, a linear trend is removed, then the spectral density function #' is estimated from the best fitting autoregressive model (based on the AIC). #' If there is a large (possibly local) maximum in the spectral density #' function at frequency \eqn{f}, then the function will return the period #' \eqn{1/f} (rounded to the nearest integer). If no such dominant frequency #' can be found, the function will return 1. #' #' @param x a numeric vector or time series of class \code{ts} #' @return an integer value #' @author Rob J Hyndman #' @keywords ts #' @examples #' #' findfrequency(USAccDeaths) # Monthly data #' findfrequency(taylor) # Half-hourly data #' findfrequency(lynx) # Annual data #' #' @export findfrequency <- function(x) { n <- length(x) x <- as.ts(x) # Remove trend from data x <- residuals(tslm(x ~ trend)) # Compute spectrum by fitting ar model to largest section of x n.freq <- 500 spec <- spec.ar(c(na.contiguous(x)), plot = FALSE, n.freq = n.freq) if (max(spec$spec) > 10) # Arbitrary threshold chosen by trial and error. { period <- floor(1 / spec$freq[which.max(spec$spec)] + 0.5) if (period == Inf) # Find next local maximum { j <- which(diff(spec$spec) > 0) if (length(j) > 0) { nextmax <- j[1] + which.max(spec$spec[(j[1] + 1):n.freq]) if (nextmax < length(spec$freq)) { period <- floor(1 / spec$freq[nextmax] + 0.5) } else { period <- 1L } } else { period <- 1L } } } else { period <- 1L } return(as.integer(period)) } #' @rdname forecast #' @export forecast.ts <- function(object, h=ifelse(frequency(object) > 1, 2 * frequency(object), 10), level=c(80, 95), fan=FALSE, robust=FALSE, lambda = NULL, biasadj = FALSE, find.frequency = FALSE, allow.multiplicative.trend=FALSE, model=NULL, ...) { n <- length(object) if (find.frequency) { object <- ts(object, frequency = findfrequency(object)) obj.freq <- frequency(object) } else { obj.freq <- frequency(object) } if (robust) { object <- tsclean(object, replace.missing = TRUE, lambda = lambda) } if (!is.null(model)) { if (inherits(model, "forecast")) { model <- model$model } if (inherits(model, "ets")) { fit <- ets(object, model = model, ...) } else if (inherits(model, "Arima")) { fit <- Arima(object, model = model, ...) } else if (inherits(model, "tbats")) { fit <- tbats(object, model = model, ...) } else if (inherits(model, "bats")) { fit <- bats(object, model = model, ...) } else if (inherits(model, "nnetar")) { fit <- nnetar(object, model = model, ...) } else { stop("Unknown model class") } return(forecast(fit, h = h, level = level, fan = fan)) } if (n > 3) { if (obj.freq < 13) { out <- forecast( ets(object, lambda = lambda, biasadj = biasadj, allow.multiplicative.trend = allow.multiplicative.trend, ...), h = h, level = level, fan = fan ) } else if (n > 2 * obj.freq) { out <- stlf( object, h = h, level = level, fan = fan, lambda = lambda, biasadj = biasadj, allow.multiplicative.trend = allow.multiplicative.trend, ... ) } else { out <- forecast( ets(object, model = "ZZN", lambda = lambda, biasadj = biasadj, allow.multiplicative.trend = allow.multiplicative.trend, ...), h = h, level = level, fan = fan ) } } else { out <- meanf(object, h = h, level = level, fan = fan, lambda = lambda, biasadj = biasadj, ...) } out$series <- deparse(substitute(object)) return(out) } #' @export print.forecast <- function(x, ...) { print(as.data.frame(x)) } #' @export summary.forecast <- function(object, ...) { cat(paste("\nForecast method:", object$method)) # cat(paste("\n\nCall:\n",deparse(object$call))) cat(paste("\n\nModel Information:\n")) print(object$model) cat("\nError measures:\n") print(accuracy(object)) if (is.null(object$mean)) { cat("\n No forecasts\n") } else { cat("\nForecasts:\n") print(object) } } plotlmforecast <- function(object, PI, shaded, shadecols, col, fcol, pi.col, pi.lty, xlim=NULL, ylim, main, ylab, xlab, ...) { xvar <- attributes(terms(object$model))$term.labels if (length(xvar) > 1) { stop("Forecast plot for regression models only available for a single predictor") } else if (ncol(object$newdata) == 1) { # Make sure column has correct name colnames(object$newdata) <- xvar } if (is.null(xlim)) { xlim <- range(object$newdata[, xvar], model.frame(object$model)[, xvar]) } if (is.null(ylim)) { ylim <- range(object$upper, object$lower, fitted(object$model) + residuals(object$model)) } plot( formula(object$model), data = model.frame(object$model), xlim = xlim, ylim = ylim, xlab = xlab, ylab = ylab, main = main, col = col, ... ) abline(object$model) nf <- length(object$mean) if (PI) { nint <- length(object$level) idx <- rev(order(object$level)) if (is.null(shadecols)) { # require(colorspace) if (min(object$level) < 50) { # Using very small confidence levels. shadecols <- rev(colorspace::sequential_hcl(100)[object$level]) } else { # This should happen almost all the time. Colors mapped to levels. shadecols <- rev(colorspace::sequential_hcl(52)[object$level - 49]) } } if (length(shadecols) == 1) { if (shadecols == "oldstyle") { # Default behaviour up to v3.25. shadecols <- heat.colors(nint + 2)[switch(1 + (nint > 1), 2, nint:1) + 1] } } for (i in 1:nf) { for (j in 1:nint) { if (shaded) { lines(rep(object$newdata[i, xvar], 2), c(object$lower[i, idx[j]], object$upper[i, idx[j]]), col = shadecols[j], lwd = 6) } else { lines(rep(object$newdata[i, xvar], 2), c(object$lower[i, idx[j]], object$upper[i, idx[j]]), col = pi.col, lty = pi.lty) } } } } points(object$newdata[, xvar], object$mean, pch = 19, col = fcol) } #' Forecast plot #' #' Plots historical data with forecasts and prediction intervals. #' #' \code{autoplot} will produce a ggplot object. #' #' plot.splineforecast autoplot.splineforecast #' @param x Forecast object produced by \code{\link{forecast}}. #' @param object Forecast object produced by \code{\link{forecast}}. Used for #' ggplot graphics (S3 method consistency). #' @param include number of values from time series to include in plot. Default #' is all values. #' @param PI Logical flag indicating whether to plot prediction intervals. #' @param showgap If \code{showgap=FALSE}, the gap between the historical #' observations and the forecasts is removed. #' @param shaded Logical flag indicating whether prediction intervals should be #' shaded (\code{TRUE}) or lines (\code{FALSE}) #' @param shadebars Logical flag indicating if prediction intervals should be #' plotted as shaded bars (if \code{TRUE}) or a shaded polygon (if #' \code{FALSE}). Ignored if \code{shaded=FALSE}. Bars are plotted by default #' if there are fewer than five forecast horizons. #' @param shadecols Colors for shaded prediction intervals. To get default #' colors used prior to v3.26, set \code{shadecols="oldstyle"}. #' @param col Colour for the data line. #' @param fcol Colour for the forecast line. #' @param flty Line type for the forecast line. #' @param flwd Line width for the forecast line. #' @param pi.col If \code{shaded=FALSE} and \code{PI=TRUE}, the prediction #' intervals are plotted in this colour. #' @param pi.lty If \code{shaded=FALSE} and \code{PI=TRUE}, the prediction #' intervals are plotted using this line type. #' @param ylim Limits on y-axis. #' @param main Main title. #' @param xlab X-axis label. #' @param ylab Y-axis label. #' @param series Matches an unidentified forecast layer with a coloured object #' on the plot. #' @param fitcol Line colour for fitted values. #' @param type 1-character string giving the type of plot desired. As for #' \code{\link[graphics]{plot.default}}. #' @param pch Plotting character (if \code{type=="p"} or \code{type=="o"}). #' @param ... Other plotting parameters to affect the plot. #' @return None. #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{plot.ts}} #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://OTexts.org/fpp2/} #' @keywords ts #' @examples #' library(ggplot2) #' #' wine.fit <- hw(wineind,h=48) #' plot(wine.fit) #' autoplot(wine.fit) #' #' fit <- tslm(wineind ~ fourier(wineind,4)) #' fcast <- forecast(fit, newdata=data.frame(fourier(wineind,4,20))) #' autoplot(fcast) #' #' @export plot.forecast <- function(x, include, PI=TRUE, showgap = TRUE, shaded=TRUE, shadebars=(length(x$mean) < 5), shadecols=NULL, col=1, fcol=4, pi.col=1, pi.lty=2, ylim=NULL, main=NULL, xlab="", ylab="", type="l", flty = 1, flwd = 2, ...) { if (is.element("x", names(x))) { # Assume stored as x xx <- x$x } else { xx <- NULL } if (is.null(x$lower) || is.null(x$upper) || is.null(x$level)) { PI <- FALSE } else if (!is.finite(max(x$upper))) { PI <- FALSE } if (!shaded) { shadebars <- FALSE } if (is.null(main)) { main <- paste("Forecasts from ", x$method, sep = "") } if (PI) { x$upper <- as.matrix(x$upper) x$lower <- as.matrix(x$lower) } if (is.element("lm", class(x$model)) && !is.element("ts", class(x$mean))) # Non time series linear model { plotlmforecast( x, PI = PI, shaded = shaded, shadecols = shadecols, col = col, fcol = fcol, pi.col = pi.col, pi.lty = pi.lty, ylim = ylim, main = main, xlab = xlab, ylab = ylab, ... ) if (PI) { return(invisible(list(mean = x$mean, lower = as.matrix(x$lower), upper = as.matrix(x$upper)))) } else { return(invisible(list(mean = x$mean))) } } # Otherwise assume x is from a time series forecast n <- length(xx) if (n == 0) { include <- 0 } else if (missing(include)) { include <- length(xx) } # Check if all historical values are missing if (n > 0) { if (sum(is.na(xx)) == length(xx)) { n <- 0 } } if (n > 0) { xx <- as.ts(xx) freq <- frequency(xx) strt <- start(xx) nx <- max(which(!is.na(xx))) xxx <- xx[1:nx] include <- min(include, nx) if (!showgap) { lastObs <- x$x[length(x$x)] lastTime <- time(x$x)[length(x$x)] x$mean <- ts(c(lastObs, x$mean), start = lastTime, frequency = freq) x$upper <- ts(rbind(lastObs, x$upper), start = lastTime, frequency = freq) x$lower <- ts(rbind(lastObs, x$lower), start = lastTime, frequency = freq) } } else { freq <- frequency(x$mean) strt <- start(x$mean) nx <- include <- 1 xx <- xxx <- ts(NA, frequency = freq, end = tsp(x$mean)[1] - 1 / freq) if (!showgap) { warning("Removing the gap requires historical data, provide this via model$x. Defaulting showgap to TRUE.") } } pred.mean <- x$mean if (is.null(ylim)) { ylim <- range(c(xx[(n - include + 1):n], pred.mean), na.rm = TRUE) if (PI) { ylim <- range(ylim, x$lower, x$upper, na.rm = TRUE) } } npred <- length(pred.mean) tsx <- is.ts(pred.mean) if (!tsx) { pred.mean <- ts(pred.mean, start = nx + 1, frequency = 1) type <- "p" } plot( ts(c(xxx[(nx - include + 1):nx], rep(NA, npred)), end = tsp(xx)[2] + (nx - n) / freq + npred / freq, frequency = freq), xlab = xlab, ylim = ylim, ylab = ylab, main = main, col = col, type = type, ... ) if (PI) { if (is.ts(x$upper)) { xxx <- time(x$upper) } else { xxx <- tsp(pred.mean)[1] - 1 / freq + (1:npred) / freq } idx <- rev(order(x$level)) nint <- length(x$level) if (is.null(shadecols)) { # require(colorspace) if (min(x$level) < 50) { # Using very small confidence levels. shadecols <- rev(colorspace::sequential_hcl(100)[x$level]) } else { # This should happen almost all the time. Colors mapped to levels. shadecols <- rev(colorspace::sequential_hcl(52)[x$level - 49]) } } if (length(shadecols) == 1) { if (shadecols == "oldstyle") { # Default behaviour up to v3.25. shadecols <- heat.colors(nint + 2)[switch(1 + (nint > 1), 2, nint:1) + 1] } } for (i in 1:nint) { if (shadebars) { for (j in 1:npred) { polygon( xxx[j] + c(-0.5, 0.5, 0.5, -0.5) / freq, c(rep(x$lower[j, idx[i]], 2), rep(x$upper[j, idx[i]], 2)), col = shadecols[i], border = FALSE ) } } else if (shaded) { polygon( c(xxx, rev(xxx)), c(x$lower[, idx[i]], rev(x$upper[, idx[i]])), col = shadecols[i], border = FALSE ) } else if (npred == 1) { lines(c(xxx) + c(-0.5, 0.5) / freq, rep(x$lower[, idx[i]], 2), col = pi.col, lty = pi.lty) lines(c(xxx) + c(-0.5, 0.5) / freq, rep(x$upper[, idx[i]], 2), col = pi.col, lty = pi.lty) } else { lines(x$lower[, idx[i]], col = pi.col, lty = pi.lty) lines(x$upper[, idx[i]], col = pi.col, lty = pi.lty) } } } if (npred > 1 && !shadebars && tsx) { lines(pred.mean, lty = flty, lwd = flwd, col = fcol) } else { points(pred.mean, col = fcol, pch = 19) } if (PI) { invisible(list(mean = pred.mean, lower = x$lower, upper = x$upper)) } else { invisible(list(mean = pred.mean)) } } #' @export predict.default <- function(object, ...) { forecast(object, ...) } hfitted <- function(object, h=1, FUN=NULL, ...) { if (h == 1) { return(fitted(object)) } # Attempt to get model function if (is.null(FUN)) { FUN <- class(object) for (i in FUN) { if (exists(i)) { if (typeof(eval(parse(text = i)[[1]])) == "closure") { FUN <- i i <- "Y" break } } } if (i != "Y") { stop("Could not find appropriate function to refit, specify FUN=function") } } x <- getResponse(object) tspx <- tsp(x) fits <- fitted(object) * NA n <- length(fits) refitarg <- list(x = NULL, model = object) names(refitarg)[1] <- names(formals(FUN))[1] fcarg <- list(h = h) if (FUN == "ets") { refitarg$use.initial.values <- TRUE } for (i in 1:(n - h)) { refitarg[[1]] <- ts(x[1:i], start = tspx[1], frequency = tspx[3]) if (!is.null(object$xreg)) { refitarg$xreg <- ts(object$xreg[1:i, ], start = tspx[1], frequency = tspx[3]) fcarg$xreg <- ts(object$xreg[(i + 1):(i + h), ], start = tspx[1] + i / tspx[3], frequency = tspx[3]) } fcarg$object <- try(suppressWarnings(do.call(FUN, refitarg)), silent = TRUE) if (!is.element("try-error", class(fcarg$object))) { fits[i + h] <- suppressWarnings(do.call("forecast", fcarg)$mean[h]) } } return(fits) } # The following function is for when users don't realise they already have the forecasts. # e.g., with the dshw(), meanf() or rwf() functions. #' @export forecast.forecast <- function(object, ...) { input_names <- as.list(substitute(list(...))) # Read level argument if (is.element("level", names(input_names))) { level <- list(...)[["level"]] if (!identical(level, object$level)) { stop("Please set the level argument when the forecasts are first computed") } } # Read h argument if (is.element("h", names(input_names))) { h <- list(...)[["h"]] if (h > length(object$mean)) { stop("Please select a longer horizon when the forecasts are first computed") } tspf <- tsp(object$mean) object$mean <- ts(object$mean[1:h], start = tspf[1], frequency = tspf[3]) if (!is.null(object$upper)) { object$upper <- ts(object$upper[1:h, , drop = FALSE], start = tspf[1], frequency = tspf[3]) object$lower <- ts(object$lower[1:h, , drop = FALSE], start = tspf[1], frequency = tspf[3]) } } return(object) } subset.forecast <- function(x, ...) { tspx <- tsp(x$mean) x$mean <- subset(x$mean, ...) x$lower <- subset(ts(x$lower, start = tspx[1], frequency = tspx[3]), ...) x$upper <- subset(ts(x$upper, start = tspx[1], frequency = tspx[3]), ...) return(x) } #' Is an object a particular forecast type? #' #' Returns true if the forecast object is of a particular type #' #' @param x object to be tested #' @export is.forecast <- function(x) { inherits(x, "forecast") } #' @export as.ts.forecast <- function(x, ...) { df <- ts(as.matrix(as.data.frame.forecast(x))) tsp(df) <- tsp(x$mean) return(df) } #' @export as.data.frame.mforecast <- function(x, ...) { tmp <- lapply(x$forecast, as.data.frame) series <- names(tmp) times <- rownames(tmp[[1]]) h <- NROW(tmp[[1]]) output <- cbind(Time = times, Series = rep(series[1], h), tmp[[1]]) if (length(tmp) > 1) { for (i in 2:length(tmp)) output <- rbind( output, cbind(Time = times, Series = rep(series[i], h), tmp[[i]]) ) } rownames(output) <- NULL return(output) } #' @export as.data.frame.forecast <- function(x, ...) { nconf <- length(x$level) out <- matrix(x$mean, ncol = 1) ists <- is.ts(x$mean) fr.x <- frequency(x$mean) if (ists) { out <- ts(out) attributes(out)$tsp <- attributes(x$mean)$tsp } names <- c("Point Forecast") if (!is.null(x$lower) && !is.null(x$upper) && !is.null(x$level)) { x$upper <- as.matrix(x$upper) x$lower <- as.matrix(x$lower) for (i in 1:nconf) { out <- cbind(out, x$lower[, i, drop = FALSE], x$upper[, i, drop = FALSE]) names <- c(names, paste("Lo", x$level[i]), paste("Hi", x$level[i])) } } colnames(out) <- names tx <- time(x$mean) if (max(abs(tx - round(tx))) < 1e-11) { nd <- 0 } else { nd <- max(round(log10(fr.x) + 1), 2) } rownames(out) <- format(tx, nsmall = nd, digits = nd) # Rest of function borrowed from print.ts(), but with header() omitted if (!ists) { return(as.data.frame(out)) } x <- as.ts(out) calendar <- any(fr.x == c(4, 12)) && length(start(x)) == 2L Tsp <- tsp(x) if (is.null(Tsp)) { warning("series is corrupt, with no 'tsp' attribute") print(unclass(x)) return(invisible(x)) } nn <- 1 + round((Tsp[2L] - Tsp[1L]) * Tsp[3L]) if (NROW(x) != nn) { warning(gettextf("series is corrupt: length %d with 'tsp' implying %d", NROW(x), nn), domain = NA, call. = FALSE) calendar <- FALSE } if (NCOL(x) == 1) { if (calendar) { if (fr.x > 1) { dn2 <- if (fr.x == 12) { month.abb } else if (fr.x == 4) { c("Qtr1", "Qtr2", "Qtr3", "Qtr4") } else { paste("p", 1L:fr.x, sep = "") } if (NROW(x) <= fr.x && start(x)[1L] == end(x)[1L]) { dn1 <- start(x)[1L] dn2 <- dn2[1 + (start(x)[2L] - 2 + seq_along(x)) %% fr.x] x <- matrix( format(x, ...), nrow = 1L, byrow = TRUE, dimnames = list(dn1, dn2) ) } else { start.pad <- start(x)[2L] - 1 end.pad <- fr.x - end(x)[2L] dn1 <- start(x)[1L]:end(x)[1L] x <- matrix( c(rep.int("", start.pad), format(x, ...), rep.int("", end.pad)), ncol = fr.x, byrow = TRUE, dimnames = list(dn1, dn2) ) } } else { attributes(x) <- NULL names(x) <- tx } } else { attr(x, "class") <- attr(x, "tsp") <- attr(x, "na.action") <- NULL } } else { if (calendar && fr.x > 1) { tm <- time(x) t2 <- cycle(x) p1 <- format(floor(tm + 1e-8)) rownames(x) <- if (fr.x == 12) { paste(month.abb[t2], p1, sep = " ") } else { paste( p1, if (fr.x == 4) { c("Q1", "Q2", "Q3", "Q4")[t2] } else { format(t2) }, sep = " " ) } } else { rownames(x) <- format(time(x), nsmall = nd) } attr(x, "class") <- attr(x, "tsp") <- attr(x, "na.action") <- NULL } return(as.data.frame(x)) } forecast/R/unitRoot.R0000644000176200001440000004051713553673010014241 0ustar liggesusers#' Number of differences required for a stationary series #' #' Functions to estimate the number of differences required to make a given #' time series stationary. \code{ndiffs} estimates the number of first #' differences necessary. #' #' \code{ndiffs} uses a unit root test to determine the number of differences #' required for time series \code{x} to be made stationary. If #' \code{test="kpss"}, the KPSS test is used with the null hypothesis that #' \code{x} has a stationary root against a unit-root alternative. Then the #' test returns the least number of differences required to pass the test at #' the level \code{alpha}. If \code{test="adf"}, the Augmented Dickey-Fuller #' test is used and if \code{test="pp"} the Phillips-Perron test is used. In #' both of these cases, the null hypothesis is that \code{x} has a unit root #' against a stationary root alternative. Then the test returns the least #' number of differences required to fail the test at the level \code{alpha}. #' #' @param x A univariate time series #' @param alpha Level of the test, possible values range from 0.01 to 0.1. #' @param test Type of unit root test to use #' @param type Specification of the deterministic component in the regression #' @param max.d Maximum number of non-seasonal differences allowed #' @param ... Additional arguments to be passed on to the unit root test #' @return An integer indicating the number of differences required for stationarity. #' @author Rob J Hyndman, Slava Razbash & Mitchell O'Hara-Wild #' @seealso \code{\link{auto.arima}} and \code{\link{ndiffs}} #' @references #' Dickey DA and Fuller WA (1979), "Distribution of the Estimators for #' Autoregressive Time Series with a Unit Root", \emph{Journal of the American #' Statistical Association} \bold{74}:427-431. #' #' Kwiatkowski D, Phillips PCB, Schmidt P and Shin Y (1992) "Testing the Null #' Hypothesis of Stationarity against the Alternative of a Unit Root", #' \emph{Journal of Econometrics} \bold{54}:159-178. #' #' Osborn, D.R. (1990) "A survey of seasonality in UK macroeconomic variables", #' \emph{International Journal of Forecasting}, \bold{6}:327-336. #' #' Phillips, P.C.B. and Perron, P. (1988) "Testing for a unit root in time series regression", #' \emph{Biometrika}, \bold{72}(2), 335-346. #' #' Said E and Dickey DA (1984), "Testing for Unit Roots in Autoregressive #' Moving Average Models of Unknown Order", \emph{Biometrika} #' \bold{71}:599-607. #' @keywords ts #' @examples #' ndiffs(WWWusage) #' ndiffs(diff(log(AirPassengers),12)) #' #' @importFrom urca ur.kpss ur.df ur.pp #' @export ndiffs <- function(x,alpha=0.05,test=c("kpss","adf","pp"), type=c("level", "trend"), max.d=2, ...) { test <- match.arg(test) type <- match(match.arg(type), c("level","trend")) x <- c(na.omit(c(x))) d <- 0 if(alpha < 0.01){ warning("Specified alpha value is less than the minimum, setting alpha=0.01") alpha <- 0.01 } else if(alpha > 0.1){ warning("Specified alpha value is larger than the maximum, setting alpha=0.1") alpha <- 0.1 } if(is.constant(x)) return(d) urca_pval <- function(urca_test){ approx(urca_test@cval[1,], as.numeric(sub("pct", "", colnames(urca_test@cval)))/100, xout=urca_test@teststat[1], rule=2)$y } kpss_wrap <- function(..., use.lag = trunc(3*sqrt(length(x))/13)){ ur.kpss(..., use.lag = use.lag) } runTests <- function(x, test, alpha){ tryCatch( {suppressWarnings( diff <- switch(test, kpss = urca_pval(kpss_wrap(x, type=c("mu","tau")[type], ...)) < alpha, adf = urca_pval(ur.df(x, type=c("drift","trend")[type], ...)) > alpha, pp = urca_pval(ur.pp(x, type="Z-tau", model=c("constant","trend")[type], ...)) > alpha, stop("This shouldn't happen")) ) diff }, error = function(e){ warning(call. = FALSE, sprintf( "The chosen unit root test encountered an error when testing for the %s difference. From %s(): %s %i differences will be used. Consider using a different unit root test.", switch(as.character(d), `0` = "first", `1` = "second", `2` = "third", paste0(d+1, "th")), deparse(e$call[[1]]), e$message, d ) ) FALSE } ) } dodiff <- runTests(x, test, alpha) if(is.na(dodiff)) { return(d) } while(dodiff && d < max.d) { d <- d+1 x <- diff(x) if(is.constant(x)) return(d) dodiff <- runTests(x, test, alpha) if(is.na(dodiff)) return(d-1) } return(d) } # Number of seasonal differences #' Number of differences required for a seasonally stationary series #' #' Functions to estimate the number of differences required to make a given #' time series stationary. \code{nsdiffs} estimates the number of seasonal differences #' necessary. #' #' \code{nsdiffs} uses seasonal unit root tests to determine the number of #' seasonal differences required for time series \code{x} to be made stationary #' (possibly with some lag-one differencing as well). #' #' Several different tests are available: #' * If \code{test="seas"} (default), a measure of seasonal strength is used, where differencing is #' selected if the seasonal strength (Wang, Smith & Hyndman, 2006) exceeds 0.64 #' (based on minimizing MASE when forecasting using auto.arima on M3 and M4 data). #' * If \code{test="ch"}, the Canova-Hansen (1995) test is used #' (with null hypothesis of deterministic seasonality) #' * If \code{test="hegy"}, the Hylleberg, Engle, Granger & Yoo (1990) test is used. #' * If \code{test="ocsb"}, the Osborn-Chui-Smith-Birchenhall #' (1988) test is used (with null hypothesis that a seasonal unit root exists). #' #' @md #' #' @inheritParams ndiffs #' @param x A univariate time series #' @param alpha Level of the test, possible values range from 0.01 to 0.1. #' @param test Type of unit root test to use #' @param m Deprecated. Length of seasonal period #' @param max.D Maximum number of seasonal differences allowed #' #' @return An integer indicating the number of differences required for stationarity. #' #' @references #' #' Wang, X, Smith, KA, Hyndman, RJ (2006) "Characteristic-based clustering #' for time series data", \emph{Data Mining and Knowledge Discovery}, #' \bold{13}(3), 335-364. #' #' Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the #' order of integration for consumption", \emph{Oxford Bulletin of Economics #' and Statistics} \bold{50}(4):361-377. #' #' Canova F and Hansen BE (1995) "Are Seasonal Patterns Constant #' over Time? A Test for Seasonal Stability", \emph{Journal of Business and #' Economic Statistics} \bold{13}(3):237-252. #' #' Hylleberg S, Engle R, Granger C and Yoo B (1990) "Seasonal integration #' and cointegration.", \emph{Journal of Econometrics} \bold{44}(1), pp. 215-238. #' #' @author Rob J Hyndman, Slava Razbash and Mitchell O'Hara-Wild #' #' @seealso \code{\link{auto.arima}}, \code{\link{ndiffs}}, \code{\link{ocsb.test}}, \code{\link[uroot]{hegy.test}}, and \code{\link[uroot]{ch.test}} #' #' @examples #' nsdiffs(AirPassengers) #' #' @export nsdiffs <- function(x, alpha = 0.05, m=frequency(x), test=c("seas", "ocsb", "hegy", "ch"), max.D=1, ...) { test <- match.arg(test) D <- 0 if(alpha < 0.01){ warning("Specified alpha value is less than the minimum, setting alpha=0.01") alpha <- 0.01 } else if(alpha > 0.1){ warning("Specified alpha value is larger than the maximum, setting alpha=0.1") alpha <- 0.1 } if(test == "ocsb" && alpha != 0.05){ warning("Significance levels other than 5% are not currently supported by test='ocsb', defaulting to alpha = 0.05.") alpha <- 0.05 } if(test %in% c("hegy", "ch")){ if(!requireNamespace("uroot", quietly = TRUE)){ stop(paste0("Using a ", test, ' test requires the uroot package. Please install it using `install.packages("uroot")`')) } } if(is.constant(x)) return(D) if(!missing(m)){ warning("argument m is deprecated; please set the frequency in the ts object.", call. = FALSE) x <- ts(x, frequency = m) } if(frequency(x)==1) stop("Non seasonal data") else if(frequency(x) < 1) { warning("I can't handle data with frequency less than 1. Seasonality will be ignored.") return(0) } if(frequency(x) >= length(x)) return(0) # Can't take differences runTests <- function(x, test, alpha){ tryCatch( {suppressWarnings( diff <- switch(test, seas = seas.heuristic(x, ...) > 0.64, # Threshold chosen based on seasonal M3 auto.arima accuracy. ocsb = with(ocsb.test(x, maxlag = 3, lag.method = "AIC", ...), statistics>critical), hegy = tail(uroot::hegy.test(x, deterministic = c(1,1,0), maxlag = 3, lag.method = "AIC", ...)$pvalues, 2)[-2] > alpha, ch = uroot::ch.test(x, type = "trig", ...)$pvalues["joint"] < alpha) ) stopifnot(diff %in% c(0,1)) diff }, error = function(e){ warning(call. = FALSE, sprintf( "The chosen seasonal unit root test encountered an error when testing for the %s difference. From %s(): %s %i seasonal differences will be used. Consider using a different unit root test.", switch(as.character(D), `0` = "first", `1` = "second", `2` = "third", paste0(D+1, "th")), deparse(e$call[[1]]), e$message, D ) ) 0 } ) } dodiff <- runTests(x, test, alpha) if(dodiff && frequency(x) %% 1 != 0){ warning("The time series frequency has been rounded to support seasonal differencing.", call. = FALSE) x <- ts(x, frequency = round(frequency(x))) } while(dodiff==1 && D < max.D) { D <- D + 1 x <- diff(x, lag=frequency(x)) if(is.constant(x)) return(D) dodiff <- runTests(x, test, alpha) } return(D) } # Adjusted from robjhyndman/tsfeatures seas.heuristic <- function(x){ if ("msts" %in% class(x)) { msts <- attributes(x)$msts nperiods <- length(msts) } else if ("ts" %in% class(x)) { msts <- frequency(x) nperiods <- msts > 1 season <- 0 } else { stop("The object provided must be a time-series object (`msts` or `ts`)") } season <- NA stlfit <- mstl(x) remainder <- stlfit[, "Remainder"] seasonal <- stlfit[, grep("Season", colnames(stlfit)), drop = FALSE] vare <- var(remainder, na.rm = TRUE) nseas <- NCOL(seasonal) if (nseas > 0) { season <- numeric(nseas) for (i in seq(nseas)) season[i] <- max(0, min(1, 1 - vare/var(remainder + seasonal[, i], na.rm = TRUE))) } return(season) } # Model specification from Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the order of integration for consumption", Oxford Bulletin of Economics and Statistics 50(4):361-377. # # $\Delta\Delta_m X_t = \beta_1Z_{4,t-1} + \beta_2Z_{5,t-m} + \alpha_1\Delta\Delta_mX_{t-1} + \ldots + \alpha_p\Delta\Delta_mX_{t-p}$ # Where $Z_{4,t} = \hat{\lambda}(B)\Delta_mX_t$, $Z_{5,t} = \hat{\lambda}(B)\Delta X_t$, and $\hat{\lambda}(B)$ is an AR(p) lag operator with coefficients from an estimated AR(p) process of $\Delta\Delta_m X_t$. #' Osborn, Chui, Smith, and Birchenhall Test for Seasonal Unit Roots #' #' An implementation of the Osborn, Chui, Smith, and Birchenhall (OCSB) test. #' #' @inheritParams uroot::hegy.test #' @aliases print.OCSBtest #' @details #' The regression equation may include lags of the dependent variable. When lag.method = "fixed", the lag order is fixed to maxlag; otherwise, maxlag is the maximum number of lags considered in a lag selection procedure that minimises the lag.method criterion, which can be AIC or BIC or corrected AIC, AICc, obtained as AIC + (2k(k+1))/(n-k-1), where k is the number of parameters and n is the number of available observations in the model. #' #' Critical values for the test are based on simulations, which has been smoothed over to produce critical values for all seasonal periods. #' #' @return #' ocsb.test returns a list of class "OCSBtest" with the following components: #' * statistics the value of the test statistics. #' * pvalues the p-values for each test statistics. #' * method a character string describing the type of test. #' * data.name a character string giving the name of the data. #' * fitted.model the fitted regression model. #' #' @references #' Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the #' order of integration for consumption", \emph{Oxford Bulletin of Economics #' and Statistics} \bold{50}(4):361-377. #' #' @seealso \code{\link{nsdiffs}} #' #' @examples #' ocsb.test(AirPassengers) #' #' @importFrom stats AIC BIC #' #' @export ocsb.test <- function(x, lag.method = c("fixed", "AIC", "BIC", "AICc"), maxlag = 0) { lag.method <- match.arg(lag.method) sname <- deparse(substitute(x)) period <- round(frequency(x)) # Avoid non-integer seasonal period if(period == 1){ stop("Data must be seasonal to use `ocsb.test`. Check your ts frequency.") } genLags <- function(y, maxlag){ if(maxlag == 0){ return(ts(numeric(NROW(y)), start = start(y), frequency = frequency(y))) } out <- do.call(cbind, lapply(seq_len(maxlag), function(k) stats::lag(y, -k))) if(NCOL(out) > 1){ colnames(out) <- paste0("lag_", seq_len(maxlag)) } return(out) } fitOCSB <- function(x, lag, maxlag){ period <- round(frequency(x)) # Avoid non-integer seasonal period # Compute (1-B)(1-B^m)y_t y <- diff(diff(x, period)) ylag <- genLags(y, lag) if(maxlag > 0){ # Ensure models are fitted on same length for lag order selection via lag.method y <- tail(y, -maxlag) } mf <- na.omit(cbind(y=y, x=ylag)) # Estimate lambda(B) coefficients ar.fit <- lm(y ~ 0 + ., data = mf) # Compute lambda(B)(1-B^m)y_{t-1} Z4_frame <- na.omit(cbind(y=diff(x, period), x=genLags(diff(x, period), lag))) Z4 <- Z4_frame[,"y"] - suppressWarnings(predict(ar.fit, Z4_frame)) # Compute lambda(B)(1-B)y_{t-m} Z5_frame <- na.omit(cbind(y=diff(x), x=genLags(diff(x), lag))) Z5 <- Z5_frame[,"y"] - suppressWarnings(predict(ar.fit, Z5_frame)) # Combine regressors data <- na.omit(cbind(mf, Z4 = stats::lag(Z4, -1), Z5 = stats::lag(Z5, -period))) y <- data[,1] xreg <- data[,-1] lm(y ~ 0 + xreg) } # Estimate maxlag if (maxlag > 0) { if (lag.method != "fixed"){ tmp <- vector("list", maxlag + 1) fits <- lapply(seq_len(maxlag), function(lag) fitOCSB(x, lag, maxlag)) icvals <- unlist(switch(lag.method, AIC = lapply(fits, AIC), BIC = lapply(fits, BIC), AICc = lapply(fits, function(x) { k <- x$rank + 1 -2 * logLik(x) + 2 * k + (2 * k * (k + 1))/(length(residuals(x)) - k - 1) }) ) ) id <- which.min(icvals) maxlag <- id - 1 } } regression <- fitOCSB(x, maxlag, maxlag) #if(any(is.na(regression$coefficients))) # stop("Model did not reach a solution. Check the time series data.") stat <- summary(regression)$coefficients[c("xregZ4", "xregZ5"), "t value"] if(any(is.na(stat))){ stop("Model did not reach a solution. Consider using a longer series or a different test.") } structure(list(statistics = stat[2], critical = calcOCSBCritVal(period), method = "OCSB test", lag.method = lag.method, lag.order = maxlag, fitted.model = regression, data.name = sname), class = "OCSBtest") } # Return critical values for OCSB test at 5% level # Approximation based on extensive simulations. calcOCSBCritVal <- function(seasonal.period) { log.m <- log(seasonal.period) return(-0.2937411*exp(-0.2850853*(log.m-0.7656451)+(-0.05983644)*((log.m-0.7656451)^2))-1.652202) } #' @export print.OCSBtest <- function(x, ...){ cat("\n") cat(strwrap(x$method, prefix = "\t"), sep = "\n") cat("\n") cat("data: ", x$data.name, "\n\n", sep = "") cat(paste0("Test statistic: ", round(x$statistics, 4), ", 5% critical value: ", round(x$critical, 4))) cat("\n") cat("alternative hypothesis: stationary") cat("\n\n") cat(paste0("Lag order ", x$lag.order, " was selected using ", x$lag.method)) }forecast/R/fitBATS.R0000644000176200001440000005332613553673010013654 0ustar liggesusers# TODO: # # Author: srazbash ############################################################################### fitPreviousBATSModel <- function(y, model, biasadj=FALSE) { seasonal.periods <- model$seasonal.periods if (is.null(seasonal.periods) == FALSE) { seasonal.periods <- as.integer(sort(seasonal.periods)) } paramz <- unParameterise(model$parameters$vect, model$parameters$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.v <- paramz$gamma.v ar.coefs <- paramz$ar.coefs ma.coefs <- paramz$ma.coefs p <- length(ar.coefs) q <- length(ma.coefs) ## Calculate the variance: # 1. Re-set up the matrices w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.v, seasonal.periods, as.integer(p), as.integer(q), PACKAGE = "forecast") F <- makeFMatrix(alpha = alpha, beta = beta.v, small.phi <- small.phi, seasonal.periods = seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = ar.coefs, ma.coefs = ma.coefs) # 2. Calculate! y.touse <- y if (!is.null(lambda)) { y.touse <- BoxCox(y, lambda = lambda) lambda <- attr(y.touse, "lambda") } fitted.values.and.errors <- calcModel(y.touse, model$seed.states, F, g$g, w) e <- fitted.values.and.errors$e fitted.values <- fitted.values.and.errors$y.hat variance <- sum((e * e)) / length(y) if (!is.null(lambda)) { fitted.values <- InvBoxCox(fitted.values, lambda = lambda, biasadj, variance) } model.for.output <- model model.for.output$variance <- variance model.for.output$fitted.values <- c(fitted.values) model.for.output$errors <- c(e) model.for.output$x <- fitted.values.and.errors$x model.for.output$y <- y attributes(model.for.output$fitted.values) <- attributes(model.for.output$errors) <- attributes(y) return(model.for.output) } fitSpecificBATS <- function(y, use.box.cox, use.beta, use.damping, seasonal.periods=NULL, starting.params=NULL, x.nought=NULL, ar.coefs=NULL, ma.coefs=NULL, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE) { if (!is.null(seasonal.periods)) { seasonal.periods <- as.integer(sort(seasonal.periods)) } ## Meaning/purpose of the first if() statement: If this is the first pass, then use default starting values. Else if it is the second pass, then use the values form the first pass as starting values. if (is.null(starting.params)) { ## Check for the existence of ARMA() coefficients if (!is.null(ar.coefs)) { p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { q <- length(ma.coefs) } else { q <- 0 } # Calculate starting values: if (sum(seasonal.periods) > 16) { alpha <- (1e-6) } else { alpha <- .09 } if (use.beta) { if (sum(seasonal.periods) > 16) { beta.v <- (5e-7) } else { beta.v <- .05 } b <- 0.00 if (use.damping) { small.phi <- .999 } else { small.phi <- 1 } } else { beta.v <- NULL b <- NULL small.phi <- NULL use.damping <- FALSE } if (!is.null(seasonal.periods)) { gamma.v <- rep(.001, length(seasonal.periods)) s.vector <- numeric(sum(seasonal.periods)) # for(s in seasonal.periods) { # s.vector <- cbind(s.vector, numeric(s)) # } } else { gamma.v <- NULL s.vector <- NULL } if (use.box.cox) { if (!is.null(init.box.cox)) { lambda <- init.box.cox } else { lambda <- BoxCox.lambda(y, lower = 0, upper = 1.5) } y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") } else { # the "else" is not needed at the moment lambda <- NULL } } else { paramz <- unParameterise(starting.params$vect, starting.params$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta b <- 0 small.phi <- paramz$small.phi gamma.v <- paramz$gamma.v if (!is.null(seasonal.periods)) { s.vector <- numeric(sum(seasonal.periods)) } else { s.vector <- NULL } # ar.coefs <- paramz$ar.coefs # ma.coefs <- paramz$ma.coefs ## Check for the existence of ARMA() coefficients if (!is.null(ar.coefs)) { p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { q <- length(ma.coefs) } else { q <- 0 } } if (is.null(x.nought)) { # Start with the seed states equal to zero if (!is.null(ar.coefs)) { d.vector <- numeric(length(ar.coefs)) } else { d.vector <- NULL } if (!is.null(ma.coefs)) { epsilon.vector <- numeric(length(ma.coefs)) } else { epsilon.vector <- NULL } x.nought <- makeXMatrix(l = 0, b = b, s.vector = s.vector, d.vector = d.vector, epsilon.vector = epsilon.vector)$x } ## Optimise the starting values: # Make the parameter vector parameterise param.vector <- parameterise(alpha = alpha, beta.v = beta.v, small.phi = small.phi, gamma.v = gamma.v, lambda = lambda, ar.coefs = ar.coefs, ma.coefs = ma.coefs) par.scale <- makeParscaleBATS(param.vector$control) # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta.v, gamma.vector=gamma, seasonal.periods=seasonal.periods, p=p, q=q) g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.v, seasonal.periods, as.integer(p), as.integer(q), PACKAGE = "forecast") F <- makeFMatrix(alpha = alpha, beta = beta.v, small.phi = small.phi, seasonal.periods = seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = ar.coefs, ma.coefs = ma.coefs) D <- F - g$g %*% w$w.transpose ## Set up matrices to find the seed states if (use.box.cox) { y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") # x.nought <- BoxCox(x.nought, lambda=lambda) y.tilda <- calcModel(y.transformed, x.nought, F, g$g, w)$e } else { y.tilda <- calcModel(y, x.nought, F, g$g, w)$e } w.tilda.transpose <- matrix(0, nrow = length(y), ncol = ncol(w$w.transpose)) w.tilda.transpose[1, ] <- w$w.transpose # for(i in 2:length(y)) { # w.tilda.transpose[i,] <- w.tilda.transpose[(i-1),] %*% D # } w.tilda.transpose <- .Call( "calcWTilda", wTildaTransposes = w.tilda.transpose, Ds = D, PACKAGE = "forecast" ) ## If there is a seasonal component in the model, then the follow adjustment need to be made so that the seed states can be found if (!is.null(seasonal.periods)) { # drop the lines from w.tilda.transpose that correspond to the last seasonal value of each seasonal period list.cut.w <- cutW(use.beta = use.beta, w.tilda.transpose = w.tilda.transpose, seasonal.periods = seasonal.periods, p = p, q = q) w.tilda.transpose <- list.cut.w$matrix mask.vector <- list.cut.w$mask.vector ## Run the regression to find the SEED STATES coefs <- lm(t(y.tilda) ~ w.tilda.transpose - 1)$coefficients ## Find the ACTUAL SEASONAL seed states x.nought <- calcSeasonalSeeds(use.beta = use.beta, coefs = coefs, seasonal.periods = seasonal.periods, mask.vector = mask.vector, p = p, q = q) } else { # Remove the AR() and MA() bits if they exist if ((p != 0) | (q != 0)) { end.cut <- ncol(w.tilda.transpose) start.cut <- end.cut - (p + q) + 1 w.tilda.transpose <- w.tilda.transpose[, -c(start.cut:end.cut)] } x.nought <- lm(t(y.tilda) ~ w.tilda.transpose - 1)$coefficients x.nought <- matrix(x.nought, nrow = length(x.nought), ncol = 1) ## Replace the AR() and MA() bits if they exist if ((p != 0) | (q != 0)) { arma.seed.states <- numeric((p + q)) arma.seed.states <- matrix(arma.seed.states, nrow = length(arma.seed.states), ncol = 1) x.nought <- rbind(x.nought, arma.seed.states) } } #### # Set up environment opt.env <- new.env() assign("F", F, envir = opt.env) assign("w.transpose", w$w.transpose, envir = opt.env) assign("g", g$g, envir = opt.env) assign("gamma.bold.matrix", g$gamma.bold.matrix, envir = opt.env) assign("y", matrix(y, nrow = 1, ncol = length(y)), envir = opt.env) assign("y.hat", matrix(0, nrow = 1, ncol = length(y)), envir = opt.env) assign("e", matrix(0, nrow = 1, ncol = length(y)), envir = opt.env) assign("x", matrix(0, nrow = length(x.nought), ncol = length(y)), envir = opt.env) if (!is.null(seasonal.periods)) { tau <- sum(seasonal.periods) } else { tau <- 0 } ## Second pass of optimisation if (use.box.cox) { # Un-transform the seed states # x.nought.untransformed <- InvBoxCox(x.nought, lambda=lambda) assign("x.nought.untransformed", InvBoxCox(x.nought, lambda = lambda), envir = opt.env) # Optimise the likelihood function optim.like <- optim(par = param.vector$vect, fn = calcLikelihood, method = "Nelder-Mead", opt.env = opt.env, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, p = p, q = q, tau = tau, bc.lower = bc.lower, bc.upper = bc.upper, control = list(maxit = (100 * length(param.vector$vect) ^ 2), parscale = par.scale)) # Get the parameters out of the param.vector paramz <- unParameterise(optim.like$par, param.vector$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.v <- paramz$gamma.v ar.coefs <- paramz$ar.coefs ma.coefs <- paramz$ma.coefs # Transform the seed states x.nought <- BoxCox(opt.env$x.nought.untransformed, lambda = lambda) lambda <- attr(x.nought, "lambda") ## Calculate the variance: # 1. Re-set up the matrices # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta.v, gamma.vector=gamma, seasonal.periods=seasonal.periods, p=p, q=q) g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.v, seasonal.periods, as.integer(p), as.integer(q), PACKAGE = "forecast") F <- makeFMatrix(alpha = alpha, beta = beta.v, small.phi = small.phi, seasonal.periods = seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = ar.coefs, ma.coefs = ma.coefs) # 2. Calculate! y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") fitted.values.and.errors <- calcModel(y.transformed, x.nought, F, g$g, w) e <- fitted.values.and.errors$e variance <- sum((e * e)) / length(y) fitted.values <- InvBoxCox(fitted.values.and.errors$y.hat, lambda = lambda, biasadj, variance) attr(lambda, "biasadj") <- biasadj # e <- InvBoxCox(e, lambda=lambda) # ee <- y-fitted.values } else { # else if we are not using the Box-Cox transformation # Optimise the likelihood function if (length(param.vector$vect) > 1) { optim.like <- optim(par = param.vector$vect, fn = calcLikelihoodNOTransformed, method = "Nelder-Mead", opt.env = opt.env, x.nought = x.nought, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, p = p, q = q, tau = tau, control = list(maxit = (100 * length(param.vector$vect) ^ 2), parscale = par.scale)) } else { optim.like <- optim(par = param.vector$vect, fn = calcLikelihoodNOTransformed, method = "BFGS", opt.env = opt.env, x.nought = x.nought, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, p = p, q = q, tau = tau, control = list(parscale = par.scale)) } # Get the parameters out of the param.vector paramz <- unParameterise(optim.like$par, param.vector$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.v <- paramz$gamma.v ar.coefs <- paramz$ar.coefs ma.coefs <- paramz$ma.coefs ## Calculate the variance: # 1. Re-set up the matrices # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta.v, gamma.vector=gamma, seasonal.periods=seasonal.periods, p=p, q=q) g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.v, seasonal.periods, as.integer(p), as.integer(q), PACKAGE = "forecast") F <- makeFMatrix(alpha = alpha, beta = beta.v, small.phi <- small.phi, seasonal.periods = seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = ar.coefs, ma.coefs = ma.coefs) # 2. Calculate! fitted.values.and.errors <- calcModel(y, x.nought, F, g$g, w) e <- fitted.values.and.errors$e fitted.values <- fitted.values.and.errors$y.hat variance <- sum((e * e)) / length(y) } # Get the likelihood likelihood <- optim.like$value # Calculate the AIC aic <- likelihood + 2 * (length(param.vector$vect) + nrow(x.nought)) # Make a list object model.for.output <- list(lambda = lambda, alpha = alpha, beta = beta.v, damping.parameter = small.phi, gamma.values = gamma.v, ar.coefficients = ar.coefs, ma.coefficients = ma.coefs, likelihood = likelihood, optim.return.code = optim.like$convergence, variance = variance, AIC = aic, parameters = list(vect = optim.like$par, control = param.vector$control), seed.states = x.nought, fitted.values = c(fitted.values), errors = c(e), x = fitted.values.and.errors$x, seasonal.periods = seasonal.periods, y = y) class(model.for.output) <- "bats" #### return(model.for.output) } calcModel <- function(y, x.nought, F, g, w) { # w is passed as a list length.ts <- length(y) x <- matrix(0, nrow = length(x.nought), ncol = length.ts) y.hat <- matrix(0, nrow = 1, ncol = length.ts) e <- matrix(0, nrow = 1, ncol = length.ts) y.hat[, 1] <- w$w.transpose %*% x.nought e[, 1] <- y[1] - y.hat[, 1] x[, 1] <- F %*% x.nought + g %*% e[, 1] y <- matrix(y, nrow = 1, ncol = length.ts) loop <- .Call("calcBATS", ys = y, yHats = y.hat, wTransposes = w$w.transpose, Fs = F, xs = x, gs = g, es = e, PACKAGE = "forecast") return(list(y.hat = loop$y.hat, e = loop$e, x = loop$x)) } calcLikelihood <- function(param.vector, opt.env, use.beta, use.small.phi, seasonal.periods, p=0, q=0, tau=0, bc.lower=0, bc.upper=1) { # param vector should be as follows: Box-Cox.parameter, alpha, beta, small.phi, gamma.vector, ar.coefs, ma.coefs # Put the components of the param.vector into meaningful individual variables box.cox.parameter <- param.vector[1] alpha <- param.vector[2] if (use.beta) { if (use.small.phi) { small.phi <- param.vector[3] beta.v <- param.vector[4] gamma.start <- 5 } else { small.phi <- 1 beta.v <- param.vector[3] gamma.start <- 4 } } else { small.phi <- NULL beta.v <- NULL gamma.start <- 3 } if (!is.null(seasonal.periods)) { gamma.vector <- param.vector[gamma.start:(gamma.start + length(seasonal.periods) - 1)] final.gamma.pos <- gamma.start + length(gamma.vector) - 1 } else { gamma.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (p != 0) { ar.coefs <- matrix(param.vector[(final.gamma.pos + 1):(final.gamma.pos + p)], nrow = 1, ncol = p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- matrix(param.vector[(final.gamma.pos + p + 1):length(param.vector)], nrow = 1, ncol = q) } else { ma.coefs <- NULL } x.nought <- BoxCox(opt.env$x.nought.untransformed, lambda = box.cox.parameter) lambda <- attr(x.nought, "lambda") # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) # w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") .Call("updateWtransposeMatrix", wTranspose_s = opt.env$w.transpose, smallPhi_s = small.phi, tau_s = as.integer(tau), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta, gamma.vector=gamma.vector, seasonal.periods=seasonal.periods, p=p, q=q) # g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.vector, seasonal.periods, as.integer(p), as.integer(q), PACKAGE="forecast") .Call("updateGMatrix", g_s = opt.env$g, gammaBold_s = opt.env$gamma.bold.matrix, alpha_s = alpha, beta_s = beta.v, gammaVector_s = gamma.vector, seasonalPeriods_s = seasonal.periods, PACKAGE = "forecast") # F <- makeFMatrix(alpha=alpha, beta=beta.v, small.phi=small.phi, seasonal.periods=seasonal.periods, gamma.bold.matrix=g$gamma.bold.matrix, ar.coefs=ar.coefs, ma.coefs=ma.coefs) .Call("updateFMatrix", opt.env$F, small.phi, alpha, beta.v, opt.env$gamma.bold.matrix, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") mat.transformed.y <- BoxCox(opt.env$y, box.cox.parameter) lambda <- attr(mat.transformed.y, "lambda") n <- ncol(opt.env$y) .Call("calcBATSFaster", ys = mat.transformed.y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, sPeriods_s = seasonal.periods, betaV = beta.v, tau_s = as.integer(tau), p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") log.likelihood <- n * log(sum(opt.env$e ^ 2)) - 2 * (box.cox.parameter - 1) * sum(log(opt.env$y)) assign("D", (opt.env$F - opt.env$g %*% opt.env$w.transpose), envir = opt.env) if (checkAdmissibility(opt.env, box.cox = box.cox.parameter, small.phi = small.phi, ar.coefs = ar.coefs, ma.coefs = ma.coefs, tau = tau, bc.lower = bc.lower, bc.upper = bc.upper)) { return(log.likelihood) } else { return(10 ^ 20) } } calcLikelihoodNOTransformed <- function(param.vector, opt.env, x.nought, use.beta, use.small.phi, seasonal.periods, p=0, q=0, tau=0) { # The likelihood function without the Box-Cox Transformation # param vector should be as follows: alpha, beta, small.phi, gamma.vector, ar.coefs, ma.coefs # Put the components of the param.vector into meaningful individual variables alpha <- param.vector[1] if (use.beta) { if (use.small.phi) { small.phi <- param.vector[2] beta.v <- param.vector[3] gamma.start <- 4 } else { small.phi <- 1 beta.v <- param.vector[2] gamma.start <- 3 } } else { small.phi <- NULL beta.v <- NULL gamma.start <- 2 } if (!is.null(seasonal.periods)) { gamma.vector <- param.vector[gamma.start:(gamma.start + length(seasonal.periods) - 1)] final.gamma.pos <- gamma.start + length(gamma.vector) - 1 } else { gamma.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (p != 0) { ar.coefs <- matrix(param.vector[(final.gamma.pos + 1):(final.gamma.pos + p)], nrow = 1, ncol = p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- matrix(param.vector[(final.gamma.pos + p + 1):length(param.vector)], nrow = 1, ncol = q) } else { ma.coefs <- NULL } # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) # w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") .Call("updateWtransposeMatrix", wTranspose_s = opt.env$w.transpose, smallPhi_s = small.phi, tau_s = as.integer(tau), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta, gamma.vector=gamma.vector, seasonal.periods=seasonal.periods, p=p, q=q) # g <- .Call("makeBATSGMatrix", alpha, beta.v, gamma.vector, seasonal.periods, as.integer(p), as.integer(q), PACKAGE="forecast") .Call("updateGMatrix", g_s = opt.env$g, gammaBold_s = opt.env$gamma.bold.matrix, alpha_s = alpha, beta_s = beta.v, gammaVector_s = gamma.vector, seasonalPeriods_s = seasonal.periods, PACKAGE = "forecast") # F <- makeFMatrix(alpha=alpha, beta=beta.v, small.phi=small.phi, seasonal.periods=seasonal.periods, gamma.bold.matrix=g$gamma.bold.matrix, ar.coefs=ar.coefs, ma.coefs=ma.coefs) .Call("updateFMatrix", opt.env$F, small.phi, alpha, beta.v, opt.env$gamma.bold.matrix, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") n <- ncol(opt.env$y) ######################################################################################### # e <- calcModel(y=y, x.nought=x.nought, F=F, g=g$g, w=w)$e ###################### #### calcModel() code: ## # x <- matrix(0, nrow=length(x.nought), ncol=n) # y.hat <- matrix(0,nrow=1, ncol=n) # e <- matrix(0, nrow=1, ncol=n) # opt.env$y.hat[,1] <- w$w.transpose %*% x.nought # opt.env$e[,1] <- opt.env$y[,1]-opt.env$y.hat[,1] # opt.env$x[,1] <- opt.env$F %*% x.nought + g$g %*% opt.env$e[,1] # mat.y <- matrix(opt.env$y, nrow=1, ncol=n) .Call("calcBATSFaster", ys = opt.env$y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, sPeriods_s = seasonal.periods, betaV = beta.v, tau_s = as.integer(tau), p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") ## #### #################################################################### log.likelihood <- n * log(sum(opt.env$e * opt.env$e)) # D <- opt.env$F - g$g %*% w$w.transpose assign("D", (opt.env$F - opt.env$g %*% opt.env$w.transpose), envir = opt.env) if (checkAdmissibility(opt.env = opt.env, box.cox = NULL, small.phi = small.phi, ar.coefs = ar.coefs, ma.coefs = ma.coefs, tau = tau)) { return(log.likelihood) } else { return(10 ^ 20) } } forecast/R/tscv.R0000644000176200001440000002011313553673010013363 0ustar liggesusers# Time series cross-validation # y is a time series # forecastfunction must return an object of class forecast # h is number of steps ahead to forecast # ... are passed to forecastfunction #' Time series cross-validation #' #' \code{tsCV} computes the forecast errors obtained by applying #' \code{forecastfunction} to subsets of the time series \code{y} using a #' rolling forecast origin. #' #' Let \code{y} contain the time series \eqn{y_1,\dots,y_T}{y[1:T]}. Then #' \code{forecastfunction} is applied successively to the time series #' \eqn{y_1,\dots,y_t}{y[1:t]}, for \eqn{t=1,\dots,T-h}, making predictions #' \eqn{\hat{y}_{t+h|t}}{f[t+h]}. The errors are given by \eqn{e_{t+h} = #' y_{t+h}-\hat{y}_{t+h|t}}{e[t+h] = y[t+h]-f[t+h]}. If h=1, these are returned as a #' vector, \eqn{e_1,\dots,e_T}{e[1:T]}. For h>1, they are returned as a matrix with #' the hth column containing errors for forecast horizon h. #' The first few errors may be missing as #' it may not be possible to apply \code{forecastfunction} to very short time #' series. #' #' @param y Univariate time series #' @param forecastfunction Function to return an object of class #' \code{forecast}. Its first argument must be a univariate time series, and it #' must have an argument \code{h} for the forecast horizon. #' @param h Forecast horizon #' @param window Length of the rolling window, if NULL, a rolling window will not be used. #' @param xreg Exogeneous predictor variables passed to the forecast function if required. #' @param initial Initial period of the time series where no cross-validation is performed. #' @param ... Other arguments are passed to \code{forecastfunction}. #' @return Numerical time series object containing the forecast errors as a vector (if h=1) #' and a matrix otherwise. The time index corresponds to the last period of the training #' data. The columns correspond to the forecast horizons. #' @author Rob J Hyndman #' @seealso \link{CV}, \link{CVar}, \link{residuals.Arima}, \url{https://robjhyndman.com/hyndsight/tscv/}. #' #' @keywords ts #' @examples #' #' #Fit an AR(2) model to each rolling origin subset #' far2 <- function(x, h){forecast(Arima(x, order=c(2,0,0)), h=h)} #' e <- tsCV(lynx, far2, h=1) #' #' #Fit the same model with a rolling window of length 30 #' e <- tsCV(lynx, far2, h=1, window=30) #' #' @export tsCV <- function(y, forecastfunction, h=1, window=NULL, xreg=NULL, initial=0, ...) { y <- as.ts(y) n <- length(y) e <- ts(matrix(NA_real_, nrow = n, ncol = h)) if(initial >= n) stop("initial period too long") tsp(e) <- tsp(y) if (!is.null(xreg)) { # Make xreg a ts object to allow easy subsetting later xreg <- ts(as.matrix(xreg)) if(NROW(xreg) != length(y)) stop("xreg must be of the same size as y") tsp(xreg) <- tsp(y) } if (is.null(window)) indx <- seq(1+initial, n - 1L) else indx <- seq(window+initial, n - 1L, by = 1L) for (i in indx) { y_subset <- subset( y, start = ifelse(is.null(window), 1L, ifelse(i - window >= 0L, i - window + 1L, stop("small window")) ), end = i ) if (is.null(xreg)) { fc <- try(suppressWarnings( forecastfunction(y_subset, h = h, ...) ), silent = TRUE) } else { xreg_subset <- as.matrix(subset( xreg, start = ifelse(is.null(window), 1L, ifelse(i - window >= 0L, i - window + 1L, stop("small window"))) )) fc <- try(suppressWarnings( forecastfunction(y_subset, h = h, xreg = xreg_subset, ...) ), silent = TRUE) } if (!is.element("try-error", class(fc))) { e[i, ] <- y[i + (1:h)] - fc$mean } } if (h == 1) { return(e[, 1L]) } else { colnames(e) <- paste("h=", 1:h, sep = "") return(e) } } # Cross-validation for AR models # By Gabriel Caceres ## Note arguments to pass must be named #' k-fold Cross-Validation applied to an autoregressive model #' #' \code{CVar} computes the errors obtained by applying an autoregressive #' modelling function to subsets of the time series \code{y} using k-fold #' cross-validation as described in Bergmeir, Hyndman and Koo (2015). It also #' applies a Ljung-Box test to the residuals. If this test is significant #' (see returned pvalue), there is serial correlation in the residuals and the #' model can be considered to be underfitting the data. In this case, the #' cross-validated errors can underestimate the generalization error and should #' not be used. #' #' @aliases print.CVar #' #' @param y Univariate time series #' @param k Number of folds to use for cross-validation. #' @param FUN Function to fit an autoregressive model. Currently, it only works #' with the \code{\link{nnetar}} function. #' @param cvtrace Provide progress information. #' @param blocked choose folds randomly or as blocks? #' @param LBlags lags for the Ljung-Box test, defaults to 24, for yearly series can be set to 20 #' @param ... Other arguments are passed to \code{FUN}. #' @return A list containing information about the model and accuracy for each #' fold, plus other summary information computed across folds. #' @author Gabriel Caceres and Rob J Hyndman #' @seealso \link{CV}, \link{tsCV}. #' @references Bergmeir, C., Hyndman, R.J., Koo, B. (2018) A note on the #' validity of cross-validation for evaluating time series prediction. #' \emph{Computational Statistics & Data Analysis}, \bold{120}, 70-83. #' \url{https://robjhyndman.com/publications/cv-time-series/}. #' @keywords ts #' @examples #' #' modelcv <- CVar(lynx, k=5, lambda=0.15) #' print(modelcv) #' print(modelcv$fold1) #' #' library(ggplot2) #' autoplot(lynx, series="Data") + #' autolayer(modelcv$testfit, series="Fits") + #' autolayer(modelcv$residuals, series="Residuals") #' ggAcf(modelcv$residuals) #' #' @export CVar <- function(y, k=10, FUN=nnetar, cvtrace=FALSE, blocked=FALSE, LBlags=24, ...) { nx <- length(y) # n-folds at most equal number of points k <- min(as.integer(k), nx) if (k <= 1L) { stop("k must be at least 2") } # Set up folds ind <- seq_len(nx) fold <- if (blocked) { sort(rep(1:k, length.out = nx)) } else { sample(rep(1:k, length.out = nx)) } cvacc <- matrix(NA_real_, nrow = k, ncol = 7) out <- list() alltestfit <- rep(NA, length.out = nx) for (i in 1:k) { out[[paste0("fold", i)]] <- list() testset <- ind[fold == i] trainset <- ind[fold != i] trainmodel <- FUN(y, subset = trainset, ...) testmodel <- FUN(y, model = trainmodel, xreg = trainmodel$xreg) testfit <- fitted(testmodel) acc <- accuracy(y, testfit, test = testset) cvacc[i, ] <- acc out[[paste0("fold", i)]]$model <- trainmodel out[[paste0("fold", i)]]$accuracy <- acc out[[paste0("fold", i)]]$testfit <- testfit out[[paste0("fold", i)]]$testset <- testset alltestfit[testset] <- testfit[testset] if (isTRUE(cvtrace)) { cat("Fold", i, "\n") print(acc) cat("\n") } } out$testfit <- ts(alltestfit) tsp(out$testfit) <- tsp(y) out$residuals <- out$testfit - y out$LBpvalue <- Box.test(out$residuals, type = "Ljung", lag = LBlags)$p.value out$k <- k # calculate mean accuracy accross all folds CVmean <- matrix(apply(cvacc, 2, FUN = mean, na.rm = TRUE), dimnames = list(colnames(acc), "Mean")) # calculate accuracy sd accross all folds --- include? CVsd <- matrix(apply(cvacc, 2, FUN = sd, na.rm = TRUE), dimnames = list(colnames(acc), "SD")) out$CVsummary <- cbind(CVmean, CVsd) out$series <- deparse(substitute(y)) out$call <- match.call() return(structure(out, class = c("CVar", class(trainmodel)))) } #' @export print.CVar <- function(x, ...) { cat("Series:", x$series, "\n") cat("Call: ") print(x$call) # Add info about series, function, and parameters # Add note about any NA/NaN in folds? # # Print number of folds cat("\n", x$k, "-fold cross-validation\n", sep = "") # Print mean & sd accuracy() results print(x$CVsummary) cat("\n") cat("p-value of Ljung-Box test of residuals is ", x$LBpvalue, "\n") cat("if this value is significant (<0.05),\n") cat("the result of the cross-validation should not be used\n") cat("as the model is underfitting the data.\n") invisible(x) } forecast/R/season.R0000644000176200001440000002531313553673010013703 0ustar liggesusers### Functions to handle seasonality #' Number of days in each season #' #' Returns number of days in each month or quarter of the observed time period. #' #' Useful for month length adjustments #' #' @param x time series #' @return Time series #' @author Rob J Hyndman #' @seealso \code{\link[forecast]{bizdays}} #' @keywords ts #' @examples #' #' par(mfrow=c(2,1)) #' plot(ldeaths,xlab="Year",ylab="pounds", #' main="Monthly deaths from lung disease (UK)") #' ldeaths.adj <- ldeaths/monthdays(ldeaths)*365.25/12 #' plot(ldeaths.adj,xlab="Year",ylab="pounds", #' main="Adjusted monthly deaths from lung disease (UK)") #' #' @export monthdays <- function(x) { if (!is.ts(x)) { stop("Not a time series") } f <- frequency(x) if (f == 12) { days <- c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) } else if (f == 4) { days <- c(90, 91, 92, 92) } else { stop("Not monthly or quarterly data") } nyears <- round(length(x) / f + 1) + 1 years <- (1:nyears) + (start(x)[1] - 1) leap.years <- ((years %% 4 == 0) & !(years %% 100 == 0 & years %% 400 != 0))[1:nyears] dummy <- t(matrix(rep(days, nyears), nrow = f)) if (f == 12) { dummy[leap.years, 2] <- 29 } else { dummy[leap.years, 1] <- 91 } xx <- c(t(dummy))[start(x)[2] - 1 + (1:length(x))] return(ts(xx, start = start(x), frequency = f)) } #' Forecast seasonal index #' #' Returns vector containing the seasonal index for \code{h} future periods. If #' the seasonal index is non-periodic, it uses the last values of the index. #' #' #' @param object Output from \code{\link[stats]{decompose}} or #' \link[stats]{stl}. #' @param h Number of periods ahead to forecast #' @return Time series #' @author Rob J Hyndman #' @keywords ts #' @examples #' uk.stl <- stl(UKDriverDeaths,"periodic") #' uk.sa <- seasadj(uk.stl) #' uk.fcast <- holt(uk.sa,36) #' seasf <- sindexf(uk.stl,36) #' uk.fcast$mean <- uk.fcast$mean + seasf #' uk.fcast$lower <- uk.fcast$lower + cbind(seasf,seasf) #' uk.fcast$upper <- uk.fcast$upper + cbind(seasf,seasf) #' uk.fcast$x <- UKDriverDeaths #' plot(uk.fcast,main="Forecasts from Holt's method with seasonal adjustment") #' #' @export sindexf <- function(object, h) { if ("stl" %in% class(object)) { ss <- object$time.series[, 1] m <- frequency(ss) ss <- ss[length(ss) - (m:1) + 1] tsp.x <- tsp(object$time.series) } else if ("decomposed.ts" %in% class(object)) { ss <- object$figure m <- frequency(object$seasonal) n <- length(object$trend) ss <- rep(ss, n / m + 1)[1:n] ss <- ss[n - (m:1) + 1] tsp.x <- tsp(object$seasonal) } else { stop("Object of unknown class") } out <- ts(rep(ss, h / m + 1)[1:h], frequency = m, start = tsp.x[2] + 1 / m) return(out) } #' Seasonal dummy variables #' #' \code{seasonaldummy} returns a matrix of dummy variables suitable for use in #' \code{\link{Arima}}, \code{\link{auto.arima}} or \code{\link{tslm}}. The #' last season is omitted and used as the control. #' #' \code{seasonaldummyf} is deprecated, instead use the \code{h} argument in #' \code{seasonaldummy}. #' #' The number of dummy variables is determined from the time series #' characteristics of \code{x}. When \code{h} is missing, the length of #' \code{x} also determines the number of rows for the matrix returned by #' \code{seasonaldummy}. the value of \code{h} determines the number of rows #' for the matrix returned by \code{seasonaldummy}, typically used for #' forecasting. The values within \code{x} are not used. #' #' @param x Seasonal time series: a \code{ts} or a \code{msts} object #' @param h Number of periods ahead to forecast (optional) #' @return Numerical matrix. #' @author Rob J Hyndman #' @seealso \code{\link{fourier}} #' @keywords ts #' @examples #' #' plot(ldeaths) #' #' # Using seasonal dummy variables #' month <- seasonaldummy(ldeaths) #' deaths.lm <- tslm(ldeaths ~ month) #' tsdisplay(residuals(deaths.lm)) #' ldeaths.fcast <- forecast(deaths.lm, #' data.frame(month=I(seasonaldummy(ldeaths,36)))) #' plot(ldeaths.fcast) #' #' # A simpler approach to seasonal dummy variables #' deaths.lm <- tslm(ldeaths ~ season) #' ldeaths.fcast <- forecast(deaths.lm, h=36) #' plot(ldeaths.fcast) #' #' @export seasonaldummy <- function(x, h=NULL) { if (!is.ts(x)) { stop("Not a time series") } else { fr.x <- frequency(x) } if (is.null(h)) { if (fr.x == 1) { stop("Non-seasonal time series") } dummy <- as.factor(cycle(x)) dummy.mat <- matrix(0, ncol = frequency(x) - 1, nrow = length(x)) nrow <- 1:length(x) for (i in 1:(frequency(x) - 1)) dummy.mat[dummy == paste(i), i] <- 1 colnames(dummy.mat) <- if (fr.x == 12) { month.abb[1:11] } else if (fr.x == 4) { c("Q1", "Q2", "Q3") } else { paste("S", 1:(fr.x - 1), sep = "") } return(dummy.mat) } else { return(seasonaldummy(ts(rep(0, h), start = tsp(x)[2] + 1 / fr.x, frequency = fr.x))) } } #' @rdname seasonaldummy #' @export seasonaldummyf <- function(x, h) { warning("seasonaldummyf() is deprecated, please use seasonaldummy()") if (!is.ts(x)) { stop("Not a time series") } f <- frequency(x) return(seasonaldummy(ts(rep(0, h), start = tsp(x)[2] + 1 / f, frequency = f))) } #' Fourier terms for modelling seasonality #' #' \code{fourier} returns a matrix containing terms from a Fourier series, up #' to order \code{K}, suitable for use in \code{\link{Arima}}, #' \code{\link{auto.arima}}, or \code{\link{tslm}}. #' #' \code{fourierf} is deprecated, instead use the \code{h} argument in #' \code{fourier}. #' #' The period of the Fourier terms is determined from the time series #' characteristics of \code{x}. When \code{h} is missing, the length of #' \code{x} also determines the number of rows for the matrix returned by #' \code{fourier}. Otherwise, the value of \code{h} determines the number of #' rows for the matrix returned by \code{fourier}, typically used for #' forecasting. The values within \code{x} are not used. #' #' Typical use would omit \code{h} when generating Fourier terms for training a model #' and include \code{h} when generating Fourier terms for forecasting. #' #' When \code{x} is a \code{ts} object, the value of \code{K} should be an #' integer and specifies the number of sine and cosine terms to return. Thus, #' the matrix returned has \code{2*K} columns. #' #' When \code{x} is a \code{msts} object, then \code{K} should be a vector of #' integers specifying the number of sine and cosine terms for each of the #' seasonal periods. Then the matrix returned will have \code{2*sum(K)} #' columns. #' #' @param x Seasonal time series: a \code{ts} or a \code{msts} object #' @param K Maximum order(s) of Fourier terms #' @param h Number of periods ahead to forecast (optional) #' @return Numerical matrix. #' @author Rob J Hyndman #' @seealso \code{\link{seasonaldummy}} #' @keywords ts #' @examples #' #' library(ggplot2) #' #' # Using Fourier series for a "ts" object #' # K is chosen to minimize the AICc #' deaths.model <- auto.arima(USAccDeaths, xreg=fourier(USAccDeaths,K=5), seasonal=FALSE) #' deaths.fcast <- forecast(deaths.model, xreg=fourier(USAccDeaths, K=5, h=36)) #' autoplot(deaths.fcast) + xlab("Year") #' #' # Using Fourier series for a "msts" object #' taylor.lm <- tslm(taylor ~ fourier(taylor, K = c(3, 3))) #' taylor.fcast <- forecast(taylor.lm, #' data.frame(fourier(taylor, K = c(3, 3), h = 270))) #' autoplot(taylor.fcast) #' #' @export fourier <- function(x, K, h=NULL) { if (is.null(h)) { return(...fourier(x, K, 1:NROW(x))) } else { return(...fourier(x, K, NROW(x) + (1:h))) } } #' @rdname fourier #' @export fourierf <- function(x, K, h) { warning("fourierf() is deprecated, please use fourier()") return(...fourier(x, K, length(x) + (1:h))) } # Function to do the work. ...fourier <- function(x, K, times) { if (any(class(x) == "msts")) { period <- attr(x, "msts") } else { period <- frequency(x) } # Patch for older versions of R that do not have sinpi and cospi functions. if (!exists("sinpi")) { sinpi <- function(x) { sin(pi * x) } cospi <- function(x) { cos(pi * x) } } if (length(period) != length(K)) { stop("Number of periods does not match number of orders") } if (any(2 * K > period)) { stop("K must be not be greater than period/2") } # Compute periods of all Fourier terms p <- numeric(0) labels <- character(0) for (j in seq_along(period)) { if (K[j] > 0) { p <- c(p, (1:K[j]) / period[j]) labels <- c(labels, paste( paste0(c("S", "C"), rep(1:K[j], rep(2, K[j]))), round(period[j]), sep = "-" )) } } # Remove equivalent seasonal periods due to multiple seasonality k <- duplicated(p) p <- p[!k] labels <- labels[!rep(k, rep(2, length(k)))] # Remove columns where sinpi=0 k <- abs(2 * p - round(2 * p)) > .Machine$double.eps # Compute matrix of Fourier terms X <- matrix(NA_real_, nrow = length(times), ncol = 2L * length(p)) for (j in seq_along(p)) { if (k[j]) { X[, 2L * j - 1L] <- sinpi(2 * p[j] * times) } X[, 2L * j] <- cospi(2 * p[j] * times) } colnames(X) <- labels # Remove missing columns X <- X[, !is.na(colSums(X)), drop = FALSE] return(X) } #' Moving-average smoothing #' #' \code{ma} computes a simple moving average smoother of a given time series. #' #' The moving average smoother averages the nearest \code{order} periods of #' each observation. As neighbouring observations of a time series are likely #' to be similar in value, averaging eliminates some of the randomness in the #' data, leaving a smooth trend-cycle component. \deqn{\hat{T}_{t} = #' \frac{1}{m} \sum_{j=-k}^k #' y_{t+j}}{T[t]=1/m(y[t-k]+y[t-k+1]+\ldots+y[t]+\ldots+y[t+k-1]+y[t+k])} where #' \eqn{k=\frac{m-1}{2}}{k=(m-1)/2} #' #' When an even \code{order} is specified, the observations averaged will #' include one more observation from the future than the past (k is rounded #' up). If centre is TRUE, the value from two moving averages (where k is #' rounded up and down respectively) are averaged, centering the moving #' average. #' #' @param x Univariate time series #' @param order Order of moving average smoother #' @param centre If TRUE, then the moving average is centred for even orders. #' @return Numerical time series object containing the simple moving average #' smoothed values. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{decompose}} #' @keywords ts #' @examples #' #' plot(wineind) #' sm <- ma(wineind,order=12) #' lines(sm,col="red") #' #' @export ma <- function(x, order, centre=TRUE) { if (abs(order - round(order)) > 1e-8) { stop("order must be an integer") } if (order %% 2 == 0 && centre) { # centred and even w <- c(0.5, rep(1, order - 1), 0.5) / order } else { # odd or not centred w <- rep(1, order) / order } return(filter(x, w)) } forecast/R/makeMatrices.R0000644000176200001440000002253513553673010015023 0ustar liggesusers# These functions make the w, F, x and g matrices # # # Author: srazbash ############################################################################### makeTBATSFMatrix <- function(alpha, beta=NULL, small.phi=NULL, seasonal.periods=NULL, k.vector=NULL, gamma.bold.matrix=NULL, ar.coefs=NULL, ma.coefs=NULL) { # 1. Alpha Row F <- matrix(1, nrow = 1, ncol = 1) if (!is.null(beta)) { F <- cbind(F, matrix(small.phi, nrow = 1, ncol = 1)) } if (!is.null(seasonal.periods)) { tau <- sum(k.vector) * 2 zero.tau <- matrix(0, nrow = 1, ncol = tau) F <- cbind(F, zero.tau) } if (!is.null(ar.coefs)) { p <- length(ar.coefs) ar.coefs <- matrix(ar.coefs, nrow = 1, ncol = p) alpha.phi <- alpha * ar.coefs F <- cbind(F, alpha.phi) } if (!is.null(ma.coefs)) { q <- length(ma.coefs) ma.coefs <- matrix(ma.coefs, nrow = 1, ncol = q) alpha.theta <- alpha * ma.coefs F <- cbind(F, alpha.theta) } # 2. Beta Row if (!is.null(beta)) { beta.row <- matrix(c(0, small.phi), nrow = 1, ncol = 2) if (!is.null(seasonal.periods)) { beta.row <- cbind(beta.row, zero.tau) } if (!is.null(ar.coefs)) { beta.phi <- beta * ar.coefs beta.row <- cbind(beta.row, beta.phi) } if (!is.null(ma.coefs)) { beta.theta <- beta * ma.coefs beta.row <- cbind(beta.row, beta.theta) } F <- rbind(F, beta.row) } # 3. Seasonal Row if (!is.null(seasonal.periods)) { seasonal.row <- t(zero.tau) if (!is.null(beta)) { seasonal.row <- cbind(seasonal.row, seasonal.row) } # Make the A matrix A <- matrix(0, tau, tau) last.pos <- 0 for (i in 1:length(k.vector)) { if (seasonal.periods[i] != 2) { C <- .Call("makeCIMatrix", k_s = as.integer(k.vector[i]), m_s = as.double(seasonal.periods[i]), PACKAGE = "forecast") } else { C <- matrix(0, 1, 1) } S <- .Call("makeSIMatrix", k_s = as.integer(k.vector[i]), m_s = as.double(seasonal.periods[i]), PACKAGE = "forecast") # C <- matrix(0,k.vector[i],k.vector[i]) # for(j in 1:k.vector[i]) { # l <- round((2*pi*j/seasonal.periods[i]), digits=15) # C[j,j] <- cos(l) # } # S <- matrix(0,k.vector[i],k.vector[i]) # for(j in 1:k.vector[i]) { # S[j,j] <- sin(2*pi*j/seasonal.periods[i]) # } # print(C) # print(S) Ai <- .Call("makeAIMatrix", C_s = C, S_s = S, k_s = as.integer(k.vector[i]), PACKAGE = "forecast") A[(last.pos + 1):(last.pos + (2 * k.vector[i])), (last.pos + 1):(last.pos + (2 * k.vector[i]))] <- Ai last.pos <- last.pos + (2 * k.vector[i]) } seasonal.row <- cbind(seasonal.row, A) if (!is.null(ar.coefs)) { B <- t(gamma.bold.matrix) %*% ar.coefs seasonal.row <- cbind(seasonal.row, B) } if (!is.null(ma.coefs)) { C <- t(gamma.bold.matrix) %*% ma.coefs seasonal.row <- cbind(seasonal.row, C) } F <- rbind(F, seasonal.row) } # 4. AR() Rows if (!is.null(ar.coefs)) { # p <- length(ar.coefs) ar.rows <- matrix(0, nrow = p, ncol = 1) if (!is.null(beta)) { ar.rows <- cbind(ar.rows, ar.rows) } if (!is.null(seasonal.periods)) { ar.seasonal.zeros <- matrix(0, nrow = p, ncol = tau) ar.rows <- cbind(ar.rows, ar.seasonal.zeros) } ident <- diag((p - 1)) ident <- cbind(ident, matrix(0, nrow = (p - 1), ncol = 1)) ar.part <- rbind(ar.coefs, ident) ar.rows <- cbind(ar.rows, ar.part) if (!is.null(ma.coefs)) { ma.in.ar <- matrix(0, nrow = p, ncol = q) ma.in.ar[1, ] <- ma.coefs ar.rows <- cbind(ar.rows, ma.in.ar) } F <- rbind(F, ar.rows) } # 5. MA() Rows if (!is.null(ma.coefs)) { ma.rows <- matrix(0, nrow = q, ncol = 1) if (!is.null(beta)) { ma.rows <- cbind(ma.rows, ma.rows) } if (!is.null(seasonal.periods)) { ma.seasonal <- matrix(0, nrow = q, ncol = tau) ma.rows <- cbind(ma.rows, ma.seasonal) } if (!is.null(ar.coefs)) { ar.in.ma <- matrix(0, nrow = q, ncol = p) ma.rows <- cbind(ma.rows, ar.in.ma) } ident <- diag((q - 1)) ident <- cbind(ident, matrix(0, nrow = (q - 1), ncol = 1)) ma.part <- rbind(matrix(0, nrow = 1, ncol = q), ident) ma.rows <- cbind(ma.rows, ma.part) F <- rbind(F, ma.rows) } return(F) } # makeWMatrix <- function(small.phi=NULL, seasonal.periods=NULL, ar.coefs=NULL, ma.coefs=NULL) { # # the.list <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = as.integer(seasonal.periods), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") # # # return(the.list) # # } # makeGMatrix <- function(alpha, beta=NULL, gamma.vector=NULL, seasonal.periods=NULL, p=0, q=0) { # li <- .Call("makeBATSGMatrix", alpha, beta, gamma.vector, as.integer(seasonal.periods), as.integer(p), as.integer(q), PACKAGE="forecast") # # return(li) # } makeFMatrix <- function(alpha, beta=NULL, small.phi=NULL, seasonal.periods=NULL, gamma.bold.matrix=NULL, ar.coefs=NULL, ma.coefs=NULL) { # 1. Alpha Row F <- matrix(1, nrow = 1, ncol = 1) if (!is.null(beta)) { F <- cbind(F, matrix(small.phi, nrow = 1, ncol = 1)) } if (!is.null(seasonal.periods)) { tau <- sum(seasonal.periods) zero.tau <- matrix(0, nrow = 1, ncol = tau) F <- cbind(F, zero.tau) } if (!is.null(ar.coefs)) { p <- length(ar.coefs) ar.coefs <- matrix(ar.coefs, nrow = 1, ncol = p) alpha.phi <- alpha * ar.coefs F <- cbind(F, alpha.phi) } if (!is.null(ma.coefs)) { q <- length(ma.coefs) ma.coefs <- matrix(ma.coefs, nrow = 1, ncol = q) alpha.theta <- alpha * ma.coefs F <- cbind(F, alpha.theta) } # 2. Beta Row if (!is.null(beta)) { beta.row <- matrix(c(0, small.phi), nrow = 1, ncol = 2) if (!is.null(seasonal.periods)) { beta.row <- cbind(beta.row, zero.tau) } if (!is.null(ar.coefs)) { beta.phi <- beta * ar.coefs beta.row <- cbind(beta.row, beta.phi) } if (!is.null(ma.coefs)) { beta.theta <- beta * ma.coefs beta.row <- cbind(beta.row, beta.theta) } F <- rbind(F, beta.row) } # 3. Seasonal Row if (!is.null(seasonal.periods)) { seasonal.row <- t(zero.tau) if (!is.null(beta)) { seasonal.row <- cbind(seasonal.row, seasonal.row) } # Make the A matrix for (i in seasonal.periods) { if (i == seasonal.periods[1]) { a.row.one <- matrix(0, nrow = 1, ncol = i) a.row.one[i] <- 1 a.row.two <- cbind(diag((i - 1)), matrix(0, nrow = (i - 1), ncol = 1)) A <- rbind(a.row.one, a.row.two) } else { old.A.rows <- dim(A)[1] old.A.columns <- dim(A)[2] a.row.one <- matrix(0, nrow = 1, ncol = i) a.row.one[i] <- 1 a.row.two <- cbind(diag((i - 1)), matrix(0, nrow = (i - 1), ncol = 1)) Ai <- rbind(a.row.one, a.row.two) A <- rbind(A, matrix(0, nrow = dim(Ai)[1], ncol = old.A.columns)) A <- cbind(A, matrix(0, nrow = dim(A)[1], ncol = dim(Ai)[2])) A[((old.A.rows + 1):(old.A.rows + dim(Ai)[1])), ((old.A.columns + 1):(old.A.columns + dim(Ai)[2]))] <- Ai } } seasonal.row <- cbind(seasonal.row, A) if (!is.null(ar.coefs)) { B <- t(gamma.bold.matrix) %*% ar.coefs seasonal.row <- cbind(seasonal.row, B) } if (!is.null(ma.coefs)) { C <- t(gamma.bold.matrix) %*% ma.coefs seasonal.row <- cbind(seasonal.row, C) } F <- rbind(F, seasonal.row) } # 4. AR() Rows if (!is.null(ar.coefs)) { # p <- length(ar.coefs) ar.rows <- matrix(0, nrow = p, ncol = 1) if (!is.null(beta)) { ar.rows <- cbind(ar.rows, ar.rows) } if (!is.null(seasonal.periods)) { ar.seasonal.zeros <- matrix(0, nrow = p, ncol = tau) ar.rows <- cbind(ar.rows, ar.seasonal.zeros) } ident <- diag((p - 1)) ident <- cbind(ident, matrix(0, nrow = (p - 1), ncol = 1)) ar.part <- rbind(ar.coefs, ident) ar.rows <- cbind(ar.rows, ar.part) if (!is.null(ma.coefs)) { ma.in.ar <- matrix(0, nrow = p, ncol = q) ma.in.ar[1, ] <- ma.coefs ar.rows <- cbind(ar.rows, ma.in.ar) } F <- rbind(F, ar.rows) } # 5. MA() Rows if (!is.null(ma.coefs)) { ma.rows <- matrix(0, nrow = q, ncol = 1) if (!is.null(beta)) { ma.rows <- cbind(ma.rows, ma.rows) } if (!is.null(seasonal.periods)) { ma.seasonal <- matrix(0, nrow = q, ncol = tau) ma.rows <- cbind(ma.rows, ma.seasonal) } if (!is.null(ar.coefs)) { ar.in.ma <- matrix(0, nrow = q, ncol = p) ma.rows <- cbind(ma.rows, ar.in.ma) } ident <- diag((q - 1)) ident <- cbind(ident, matrix(0, nrow = (q - 1), ncol = 1)) ma.part <- rbind(matrix(0, nrow = 1, ncol = q), ident) ma.rows <- cbind(ma.rows, ma.part) F <- rbind(F, ma.rows) } return(F) } makeXMatrix <- function(l, b=NULL, s.vector=NULL, d.vector=NULL, epsilon.vector=NULL) { x.transpose <- matrix(l, nrow = 1, ncol = 1) if (!is.null(b)) { x.transpose <- cbind(x.transpose, matrix(b, nrow = 1, ncol = 1)) } if (!is.null(s.vector)) { x.transpose <- cbind(x.transpose, matrix(s.vector, nrow = 1, ncol = length(s.vector))) } if (!is.null(d.vector)) { x.transpose <- cbind(x.transpose, matrix(d.vector, nrow = 1, ncol = length(d.vector))) } if (!is.null(epsilon.vector)) { x.transpose <- cbind(x.transpose, matrix(epsilon.vector, nrow = 1, ncol = length(epsilon.vector))) } x <- t(x.transpose) return(list(x = x, x.transpose = x.transpose)) } forecast/R/guerrero.R0000644000176200001440000000743513553673010014252 0ustar liggesusers# This R script contains code for extracting the Box-Cox # parameter, lambda, using Guerrero's method (1993). # Written by Leanne Chhay # guer.cv computes the coefficient of variation # Input: # lam = lambda # x = original time series as a time series object # Output: coefficient of variation guer.cv <- function(lam, x, nonseasonal.length=2) { period <- round(max(nonseasonal.length, frequency(x))) nobsf <- length(x) nyr <- floor(nobsf / period) nobst <- floor(nyr * period) x.mat <- matrix(x[(nobsf - nobst + 1):nobsf], period, nyr) x.mean <- apply(x.mat, 2, mean, na.rm = TRUE) x.sd <- apply(x.mat, 2, sd, na.rm = TRUE) x.rat <- x.sd / x.mean ^ (1 - lam) return(sd(x.rat, na.rm = TRUE) / mean(x.rat, na.rm = TRUE)) } # guerrero extracts the required lambda # Input: x = original time series as a time series object # Output: lambda that minimises the coefficient of variation guerrero <- function(x, lower=-1, upper=2, nonseasonal.length=2) { return(optimize( guer.cv, c(lower, upper), x = x, nonseasonal.length = nonseasonal.length )$minimum) } # Modified version of boxcox from MASS package bcloglik <- function(x, lower=-1, upper=2) { n <- length(x) if (any(x <= 0, na.rm = TRUE)) { stop("x must be positive") } logx <- log(na.omit(c(x))) xdot <- exp(mean(logx)) if (all(class(x) != "ts")) { fit <- lm(x ~ 1, data = data.frame(x = x), na.action = na.exclude) } else if (frequency(x) > 1) { fit <- tslm(x ~ trend + season, data = data.frame(x = x)) } else { fit <- tslm(x ~ trend, data = data.frame(x = x)) } xqr <- fit$qr lambda <- seq(lower, upper, by = .05) xl <- loglik <- as.vector(lambda) m <- length(xl) x <- na.omit(c(x)) for (i in 1L:m) { if (abs(la <- xl[i]) > 0.02) { xt <- (x ^ la - 1) / la } else { xt <- logx * (1 + (la * logx) / 2 * (1 + (la * logx) / 3 * (1 + (la * logx) / 4))) } loglik[i] <- -n / 2 * log(sum(qr.resid(xqr, xt / xdot ^ (la - 1)) ^ 2)) } return(xl[which.max(loglik)]) } #' Automatic selection of Box Cox transformation parameter #' #' If \code{method=="guerrero"}, Guerrero's (1993) method is used, where lambda #' minimizes the coefficient of variation for subseries of \code{x}. #' #' If \code{method=="loglik"}, the value of lambda is chosen to maximize the #' profile log likelihood of a linear model fitted to \code{x}. For #' non-seasonal data, a linear time trend is fitted while for seasonal data, a #' linear time trend with seasonal dummy variables is used. #' #' #' @param x a numeric vector or time series of class \code{ts} #' @param method Choose method to be used in calculating lambda. #' @param lower Lower limit for possible lambda values. #' @param upper Upper limit for possible lambda values. #' @return a number indicating the Box-Cox transformation parameter. #' @author Leanne Chhay and Rob J Hyndman #' @seealso \code{\link{BoxCox}} #' @references Box, G. E. P. and Cox, D. R. (1964) An analysis of #' transformations. \emph{JRSS B} \bold{26} 211--246. #' #' Guerrero, V.M. (1993) Time-series analysis supported by power #' transformations. \emph{Journal of Forecasting}, \bold{12}, 37--48. #' @keywords ts #' @examples #' #' lambda <- BoxCox.lambda(AirPassengers,lower=0) #' air.fit <- Arima(AirPassengers, order=c(0,1,1), #' seasonal=list(order=c(0,1,1),period=12), lambda=lambda) #' plot(forecast(air.fit)) #' #' @export BoxCox.lambda <- function(x, method=c("guerrero", "loglik"), lower=-1, upper=2) { if (any(x <= 0, na.rm = TRUE)) { lower <- max(lower, 0) } if (length(x) <= 2 * frequency(x)) { return(1) } # Not enough data to do much more than this # stop("All values must be positive") method <- match.arg(method) if (method == "loglik") { return(bcloglik(x, lower, upper)) } else { return(guerrero(x, lower, upper)) } } forecast/R/fitTBATS.R0000644000176200001440000005053213553673010013774 0ustar liggesusersfitPreviousTBATSModel <- function(y, model, biasadj=FALSE) { seasonal.periods <- model$seasonal.periods if (is.null(seasonal.periods) == FALSE) { seasonal.periods <- sort(seasonal.periods) } # Get the parameters out of the param.vector paramz <- unParameteriseTBATS(model$parameters$vect, model$parameters$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta if (!is.null(beta.v)) { adj.beta <- 1 } else { adj.beta <- 0 } small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } if (!is.null(seasonal.periods)) { tau <- as.integer(2 * sum(model$k.vector)) gamma.bold <- matrix(0, nrow = 1, ncol = (2 * sum(model$k.vector))) } else { tau <- as.integer(0) gamma.bold <- NULL } g <- matrix(0, nrow = ((2 * sum(model$k.vector)) + 1 + adj.beta + p + q), ncol = 1) if (p != 0) { g[(1 + adj.beta + tau + 1), 1] <- 1 } if (q != 0) { g[(1 + adj.beta + tau + p + 1), 1] <- 1 } y.touse <- y if (is.null(lambda) == FALSE) { y.touse <- BoxCox(y, lambda = lambda) lambda <- attr(y.touse, "lambda") } ## Calculate the variance: # 1. Re-set up the matrices w <- .Call("makeTBATSWMatrix", smallPhi_s = small.phi, kVector_s = model$k.vector, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, tau_s = tau, PACKAGE = "forecast") if (!is.null(gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = model$k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v, PACKAGE = "forecast") } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") F <- makeTBATSFMatrix(alpha = alpha, beta = beta.v, small.phi = small.phi, seasonal.periods = seasonal.periods, k.vector = model$k.vector, gamma.bold.matrix = gamma.bold, ar.coefs = ar.coefs, ma.coefs = ma.coefs) .Call("updateFMatrix", F, small.phi, alpha, beta.v, gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") # 2. Calculate! fitted.values.and.errors <- calcModel(y.touse, model$seed.states, F, g, w) e <- fitted.values.and.errors$e fitted.values <- fitted.values.and.errors$y.hat variance <- sum((e * e)) / length(y) if (!is.null(lambda)) { fitted.values <- InvBoxCox(fitted.values, lambda = lambda, biasadj, variance) } model.for.output <- model model.for.output$variance <- variance model.for.output$fitted.values <- ts(c(fitted.values)) model.for.output$errors <- ts(c(e)) tsp(model.for.output$fitted.values) <- tsp(model.for.output$errors) <- tsp(y) model.for.output$x <- fitted.values.and.errors$x model.for.output$y <- y return(model.for.output) } fitSpecificTBATS <- function(y, use.box.cox, use.beta, use.damping, seasonal.periods=NULL, k.vector=NULL, starting.params=NULL, x.nought=NULL, ar.coefs=NULL, ma.coefs=NULL, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE) { if (!is.null(seasonal.periods)) { seasonal.periods <- sort(seasonal.periods) } ## Meaning/purpose of the first if() statement: If this is the first pass, then use default starting values. Else if it is the second pass, then use the values form the first pass as starting values. if (is.null(starting.params)) { ## Check for the existence of ARMA() coefficients if (!is.null(ar.coefs)) { p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { q <- length(ma.coefs) } else { q <- 0 } # Calculate starting values: alpha <- 0.09 if (use.beta) { adj.beta <- 1 beta.v <- 0.05 b <- 0.00 if (use.damping) { small.phi <- .999 } else { small.phi <- 1 } } else { adj.beta <- 0 beta.v <- NULL b <- NULL small.phi <- NULL use.damping <- FALSE } if (!is.null(seasonal.periods)) { gamma.one.v <- rep(0, length(k.vector)) gamma.two.v <- rep(0, length(k.vector)) s.vector <- numeric(2 * sum(k.vector)) k.vector <- as.integer(k.vector) } else { gamma.one.v <- NULL gamma.two.v <- NULL s.vector <- NULL } if (use.box.cox) { if (!is.null(init.box.cox)) { lambda <- init.box.cox } else { lambda <- BoxCox.lambda(y, lower = 0, upper = 1.5) } y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") } else { # the "else" is not needed at the moment lambda <- NULL } } else { paramz <- unParameteriseTBATS(starting.params$vect, starting.params$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta if (!is.null(beta.v)) { adj.beta <- 1 } else { adj.beta <- 0 } b <- 0 small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(seasonal.periods)) { s.vector <- numeric(2 * sum(k.vector)) } else { s.vector <- NULL } # ar.coefs <- paramz$ar.coefs # ma.coefs <- paramz$ma.coefs ## Check for the existence of ARMA() coefficients if (!is.null(ar.coefs)) { p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { q <- length(ma.coefs) } else { q <- 0 } } if (is.null(x.nought)) { # Start with the seed states equal to zero if (!is.null(ar.coefs)) { d.vector <- numeric(length(ar.coefs)) } else { d.vector <- NULL } if (!is.null(ma.coefs)) { epsilon.vector <- numeric(length(ma.coefs)) } else { epsilon.vector <- NULL } x.nought <- makeXMatrix(l = 0, b = b, s.vector = s.vector, d.vector = d.vector, epsilon.vector = epsilon.vector)$x } # Make the parameter vector parameterise param.vector <- parameterise(alpha = alpha, beta.v = beta.v, small.phi = small.phi, gamma.v = cbind(gamma.one.v, gamma.two.v), lambda = lambda, ar.coefs = ar.coefs, ma.coefs = ma.coefs) par.scale <- makeParscale(param.vector$control) if (!is.null(seasonal.periods)) { tau <- as.integer(2 * sum(k.vector)) } else { tau <- as.integer(0) } w <- .Call("makeTBATSWMatrix", smallPhi_s = small.phi, kVector_s = k.vector, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, tau_s = tau, PACKAGE = "forecast") if (!is.null(seasonal.periods)) { gamma.bold <- matrix(0, nrow = 1, ncol = (2 * sum(k.vector))) .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v, PACKAGE = "forecast") } else { gamma.bold <- NULL } g <- matrix(0, nrow = ((2 * sum(k.vector)) + 1 + adj.beta + p + q), ncol = 1) if (p != 0) { g[(1 + adj.beta + tau + 1), 1] <- 1 } if (q != 0) { g[(1 + adj.beta + tau + p + 1), 1] <- 1 } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") F <- makeTBATSFMatrix(alpha = alpha, beta = beta.v, small.phi = small.phi, seasonal.periods = seasonal.periods, k.vector = k.vector, gamma.bold.matrix = gamma.bold, ar.coefs = ar.coefs, ma.coefs = ma.coefs) D <- F - g %*% w$w.transpose #### # Set up environment opt.env <- new.env() assign("F", F, envir = opt.env) assign("w.transpose", w$w.transpose, envir = opt.env) assign("g", g, envir = opt.env) assign("gamma.bold", gamma.bold, envir = opt.env) assign("k.vector", k.vector, envir = opt.env) assign("y", matrix(y, nrow = 1, ncol = length(y)), envir = opt.env) assign("y.hat", matrix(0, nrow = 1, ncol = length(y)), envir = opt.env) assign("e", matrix(0, nrow = 1, ncol = length(y)), envir = opt.env) assign("x", matrix(0, nrow = length(x.nought), ncol = length(y)), envir = opt.env) ## Set up matrices to find the seed states if (use.box.cox) { y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") .Call("calcTBATSFaster", ys = matrix(y.transformed, nrow = 1, ncol = length(y.transformed)), yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, PACKAGE = "forecast") y.tilda <- opt.env$e } else { .Call("calcTBATSFaster", ys = opt.env$y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, PACKAGE = "forecast") y.tilda <- opt.env$e } w.tilda.transpose <- matrix(0, nrow = length(y), ncol = ncol(w$w.transpose)) w.tilda.transpose[1, ] <- w$w.transpose w.tilda.transpose <- .Call("calcWTilda", wTildaTransposes = w.tilda.transpose, Ds = D, PACKAGE = "forecast") # Remove the AR() and MA() bits if they exist if ((p != 0) | (q != 0)) { end.cut <- ncol(w.tilda.transpose) start.cut <- end.cut - (p + q) + 1 w.tilda.transpose <- w.tilda.transpose[, -c(start.cut:end.cut)] } x.nought <- lm(t(y.tilda) ~ w.tilda.transpose - 1)$coefficients x.nought <- matrix(x.nought, nrow = length(x.nought), ncol = 1) ## Replace the AR() and MA() bits if they exist if ((p != 0) | (q != 0)) { arma.seed.states <- numeric((p + q)) arma.seed.states <- matrix(arma.seed.states, nrow = length(arma.seed.states), ncol = 1) x.nought <- rbind(x.nought, arma.seed.states) } ## Optimisation if (use.box.cox) { # Un-transform the seed states assign("x.nought.untransformed", InvBoxCox(x.nought, lambda = lambda), envir = opt.env) # Optimise the likelihood function optim.like <- optim( par = param.vector$vect, fn = calcLikelihoodTBATS, method = "Nelder-Mead", opt.env = opt.env, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, param.control = param.vector$control, p = p, q = q, tau = tau, bc.lower = bc.lower, bc.upper = bc.upper, control = list(maxit = (100 * length(param.vector$vect) ^ 2), parscale = par.scale) ) # Get the parameters out of the param.vector paramz <- unParameteriseTBATS(optim.like$par, param.vector$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } # Transform the seed states x.nought <- BoxCox(opt.env$x.nought.untransformed, lambda = lambda) lambda <- attr(x.nought, "lambda") ## Calculate the variance: # 1. Re-set up the matrices w <- .Call("makeTBATSWMatrix", smallPhi_s = small.phi, kVector_s = k.vector, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, tau_s = tau, PACKAGE = "forecast") if (!is.null(gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v, PACKAGE = "forecast") } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") .Call("updateFMatrix", F, small.phi, alpha, beta.v, gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") # 2. Calculate! y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") fitted.values.and.errors <- calcModel(y.transformed, x.nought, F, g, w) e <- fitted.values.and.errors$e variance <- sum((e * e)) / length(y) fitted.values <- InvBoxCox(fitted.values.and.errors$y.hat, lambda = lambda, biasadj, variance) attr(lambda, "biasadj") <- biasadj # e <- InvBoxCox(e, lambda=lambda) ee <- y - fitted.values } else { # else if we are not using the Box-Cox transformation # Optimise the likelihood function if (length(param.vector$vect) > 1) { optim.like <- optim(par = param.vector$vect, fn = calcLikelihoodNOTransformedTBATS, method = "Nelder-Mead", opt.env = opt.env, x.nought = x.nought, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, param.control = param.vector$control, p = p, q = q, tau = tau, control = list(maxit = (100 * length(param.vector$vect) ^ 2), parscale = par.scale)) } else { optim.like <- optim(par = param.vector$vect, fn = calcLikelihoodNOTransformedTBATS, method = "BFGS", opt.env = opt.env, x.nought = x.nought, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, param.control = param.vector$control, p = p, q = q, tau = tau, control = list(parscale = par.scale)) } # Get the parameters out of the param.vector paramz <- unParameteriseTBATS(optim.like$par, param.vector$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } ## Calculate the variance: # 1. Re-set up the matrices w <- .Call("makeTBATSWMatrix", smallPhi_s = small.phi, kVector_s = k.vector, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, tau_s = tau, PACKAGE = "forecast") if (!is.null(gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v, PACKAGE = "forecast") } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") .Call("updateFMatrix", F, small.phi, alpha, beta.v, gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") # 2. Calculate! fitted.values.and.errors <- calcModel(y, x.nought, F, g, w) e <- fitted.values.and.errors$e fitted.values <- fitted.values.and.errors$y.hat variance <- sum((e * e)) / length(y) } # Get the likelihood likelihood <- optim.like$value # Calculate the AIC aic <- likelihood + 2 * (length(param.vector$vect) + nrow(x.nought)) # Make a list object fits <- ts(c(fitted.values)) e <- ts(c(e)) tsp(fits) <- tsp(e) <- tsp(y) model.for.output <- list( lambda = lambda, alpha = alpha, beta = beta.v, damping.parameter = small.phi, gamma.one.values = gamma.one.v, gamma.two.values = gamma.two.v, ar.coefficients = ar.coefs, ma.coefficients = ma.coefs, likelihood = likelihood, optim.return.code = optim.like$convergence, variance = variance, AIC = aic, parameters = list(vect = optim.like$par, control = param.vector$control), seed.states = x.nought, fitted.values = fits, errors = e, x = fitted.values.and.errors$x, seasonal.periods = seasonal.periods, k.vector = k.vector, y = y, p = p, q = q ) class(model.for.output) <- c("tbats", "bats") return(model.for.output) } calcLikelihoodTBATS <- function(param.vector, opt.env, use.beta, use.small.phi, seasonal.periods, param.control, p=0, q=0, tau=0, bc.lower=0, bc.upper=1) { # param vector should be as follows: Box-Cox.parameter, alpha, beta, small.phi, gamma.vector, ar.coefs, ma.coefs # Put the components of the param.vector into meaningful individual variables paramz <- unParameteriseTBATS(param.vector, param.control) box.cox.parameter <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v ar.coefs <- paramz$ar.coefs ma.coefs <- paramz$ma.coefs if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } x.nought <- BoxCox(opt.env$x.nought.untransformed, lambda = box.cox.parameter) lambda <- attr(x.nought, "lambda") .Call("updateWtransposeMatrix", wTranspose_s = opt.env$w.transpose, smallPhi_s = small.phi, tau_s = as.integer(tau), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") if (!is.null(opt.env$gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = opt.env$gamma.bold, kVector_s = opt.env$k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v) } .Call("updateTBATSGMatrix", g_s = opt.env$g, gammaBold_s = opt.env$gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") .Call("updateFMatrix", opt.env$F, small.phi, alpha, beta.v, opt.env$gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") mat.transformed.y <- BoxCox(opt.env$y, box.cox.parameter) lambda <- attr(mat.transformed.y, "lambda") n <- ncol(opt.env$y) .Call("calcTBATSFaster", ys = mat.transformed.y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, PACKAGE = "forecast") ## #### #################################################################### log.likelihood <- n * log(sum(opt.env$e ^ 2)) - 2 * (box.cox.parameter - 1) * sum(log(opt.env$y)) if (is.na(log.likelihood)) { # Not sure why this would occur return(Inf) } assign("D", (opt.env$F - opt.env$g %*% opt.env$w.transpose), envir = opt.env) if (checkAdmissibility(opt.env, box.cox = box.cox.parameter, small.phi = small.phi, ar.coefs = ar.coefs, ma.coefs = ma.coefs, tau = sum(seasonal.periods), bc.lower = bc.lower, bc.upper = bc.upper)) { return(log.likelihood) } else { return(Inf) } } calcLikelihoodNOTransformedTBATS <- function(param.vector, opt.env, x.nought, use.beta, use.small.phi, seasonal.periods, param.control, p=0, q=0, tau=0) { # The likelihood function without the Box-Cox Transformation # param vector should be as follows: alpha, beta, small.phi, gamma.vector, ar.coefs, ma.coefs # Put the components of the param.vector into meaningful individual variables paramz <- unParameteriseTBATS(param.vector, param.control) box.cox.parameter <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } .Call("updateWtransposeMatrix", wTranspose_s = opt.env$w.transpose, smallPhi_s = small.phi, tau_s = as.integer(tau), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") if (!is.null(opt.env$gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = opt.env$gamma.bold, kVector_s = opt.env$k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v) } .Call("updateTBATSGMatrix", g_s = opt.env$g, gammaBold_s = opt.env$gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") .Call("updateFMatrix", opt.env$F, small.phi, alpha, beta.v, opt.env$gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") n <- ncol(opt.env$y) .Call("calcTBATSFaster", ys = opt.env$y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, PACKAGE = "forecast") ## #### #################################################################### log.likelihood <- n * log(sum(opt.env$e * opt.env$e)) if (is.na(log.likelihood)) { # Not sure why this would occur return(Inf) } assign("D", (opt.env$F - opt.env$g %*% opt.env$w.transpose), envir = opt.env) if (checkAdmissibility(opt.env = opt.env, box.cox = NULL, small.phi = small.phi, ar.coefs = ar.coefs, ma.coefs = ma.coefs, tau = tau)) { return(log.likelihood) } else { return(Inf) } } forecast/R/attach.R0000644000176200001440000000547713571554030013670 0ustar liggesusers.onAttach <- function(...) { if (!interactive() || stats::runif(1) > 0.2) return() tips <- c( "Use suppressPackageStartupMessages() to eliminate package startup messages.", "Stackoverflow is a great place to get help on R issues:\n http://stackoverflow.com/tags/forecasting+r.", "Crossvalidated is a great place to get help on forecasting issues:\n http://stats.stackexchange.com/tags/forecasting.", "Need help getting started? Try the online textbook FPP:\n http://OTexts.org/fpp2/", "Want to stay up-to-date? Read the Hyndsight blog:\n https://robjhyndman.com/hyndsight/", "Want to meet other forecasters? Join the International Institute of Forecasters:\n http://forecasters.org/" ) tip <- sample(tips, 1) msg <- paste("This is forecast", packageVersion("forecast"), "\n ", tip) packageStartupMessage(msg) } register_s3_method <- function(pkg, generic, class, fun = NULL) { stopifnot(is.character(pkg), length(pkg) == 1) stopifnot(is.character(generic), length(generic) == 1) stopifnot(is.character(class), length(class) == 1) if (is.null(fun)) { fun <- get(paste0(generic, ".", class), envir = parent.frame()) } else { stopifnot(is.function(fun)) } if (pkg %in% loadedNamespaces()) { registerS3method(generic, class, fun, envir = asNamespace(pkg)) } # Always register hook in case package is later unloaded & reloaded setHook( packageEvent(pkg, "onLoad"), function(...) { registerS3method(generic, class, fun, envir = asNamespace(pkg)) } ) } overwrite_s3_generic <- function(pkg, generic){ if (pkg %in% loadedNamespaces()) { assign(generic, get(generic, asNamespace(pkg)), envir = asNamespace("forecast")) } # Always register hook in case package is later unloaded & reloaded # setHook( # packageEvent(pkg, "onLoad"), # function(...) { # pkg_env <- asNamespace("forecast") # unlockBinding(generic, pkg_env) # assign(generic, get(generic, asNamespace(pkg)), envir = pkg_env) # lockBinding(generic, pkg_env) # } # ) } #' @importFrom utils methods .onLoad <- function(...) { overwrite_s3_generic("ggplot2", "autolayer") register_s3_method("ggplot2", "autolayer", "ts") register_s3_method("ggplot2", "autolayer", "mts") register_s3_method("ggplot2", "autolayer", "msts") register_s3_method("ggplot2", "autolayer", "forecast") register_s3_method("ggplot2", "autolayer", "mforecast") methods <- strsplit(utils::.S3methods(forecast), ".", fixed = TRUE) overwrite_s3_generic("fabletools", "forecast") for(method in methods){ register_s3_method("fabletools", method[1], method[2]) } methods <- strsplit(utils::.S3methods(accuracy), ".", fixed = TRUE) overwrite_s3_generic("fabletools", "accuracy") for(method in methods){ register_s3_method("fabletools", method[1], method[2]) } invisible() } forecast/R/naive.R0000644000176200001440000002064313566371045013525 0ustar liggesusers# Random walk related forecasts # Based on lagged walks # lag=1 corresponds to standard random walk (i.e., naive forecast) # lag=m corresponds to seasonal naive method lagwalk <- function(y, lag=1, drift=FALSE, lambda=NULL, biasadj=FALSE) { if(!is.ts(y)){ y <- as.ts(y) } origy <- y if (!is.null(lambda)) { y <- BoxCox(y, lambda) lambda <- attr(y, "lambda") } m <- frequency(y) # Complete missing values with lagged values y_na <- which(is.na(y)) y_na <- y_na[y_na>lag] fits <- stats::lag(y, -lag) for(i in y_na){ if(is.na(fits)[i]){ fits[i] <- fits[i-lag] } } fitted <- ts(c(rep(NA, lag), head(fits, -lag)), start = start(y), frequency = m) if(drift){ fit <- summary(lm(y-fitted ~ 1, na.action=na.exclude)) b <- fit$coefficients[1,1] b.se <- fit$coefficients[1,2] sigma <- fit$sigma fitted <- fitted + b method <- "Lag walk with drift" } else{ b <- b.se <- 0 sigma <- sd(y-fitted, na.rm=TRUE) method <- "Lag walk" } res <- y - fitted if (!is.null(lambda)) { fitted <- InvBoxCox(fitted, lambda, biasadj, var(res)) attr(lambda, "biasadj") <- biasadj } model <- structure( list( x = origy, fitted = fitted, future = tail(fits, lag), residuals = res, method = method, series = deparse(substitute(y)), sigma2 = sigma^2, par = list(includedrift = drift, drift = b, drift.se = b.se, lag = lag), lambda = lambda, call = match.call() ), class = "lagwalk" ) } #' @export forecast.lagwalk <- function(object, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, bootstrap=FALSE, npaths=5000, biasadj=FALSE, ...) { lag <- object$par$lag fullperiods <- (h-1)/lag+1 steps <- rep(1:fullperiods, rep(lag,fullperiods))[1:h] # Point forecasts fc <- rep(object$future, fullperiods)[1:h] + steps*object$par$drift # Intervals # Adjust prediction intervals to allow for drift coefficient standard error mse <- mean(object$residuals^2, na.rm=TRUE) se <- sqrt(mse*steps + (steps*object$par$drift.se)^2) if(fan) level <- seq(51,99,by=3) else { if(min(level) > 0 & max(level) < 1) level <- 100*level else if(min(level) < 0 | max(level) > 99.99) stop("Confidence limit out of range") } nconf <- length(level) if (bootstrap) # Compute prediction intervals using simulations { sim <- matrix(NA, nrow = npaths, ncol = h) for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = TRUE, lambda = lambda) lower <- apply(sim, 2, quantile, 0.5 - level / 200, type = 8) upper <- apply(sim, 2, quantile, 0.5 + level / 200, type = 8) if (nconf > 1L) { lower <- t(lower) upper <- t(upper) } else { lower <- matrix(lower, ncol = 1) upper <- matrix(upper, ncol = 1) } } else { z <- qnorm(.5 + level/200) lower <- upper <- matrix(NA,nrow=h,ncol=nconf) for(i in 1:nconf) { lower[,i] <- fc - z[i]*se upper[,i] <- fc + z[i]*se } } if (!is.null(lambda)) { fc <- InvBoxCox(fc, lambda, biasadj, se^2) if(!bootstrap){ # Bootstrap intervals are already backtransformed upper <- InvBoxCox(upper, lambda) lower <- InvBoxCox(lower, lambda) } } # Set tsp m <- frequency(object$x) fc <- ts(fc,start=tsp(object$x)[2]+1/m,frequency=m) lower <- ts(lower,start=tsp(object$x)[2]+1/m,frequency=m) upper <- ts(upper,start=tsp(object$x)[2]+1/m,frequency=m) colnames(lower) <- colnames(upper) <- paste(level,"%",sep="") return(structure( list( method = object$method, model = object, lambda = lambda, x = object$x, fitted = fitted(object), residuals = residuals(object), series = object$series, mean = fc, level = level, lower = lower, upper = upper ), class = "forecast") ) } #' @export print.lagwalk <- function(x, ...) { cat(paste("Call:", deparse(x$call), "\n\n")) if (x$par$includedrift) { cat(paste("Drift: ", round(x$par$drift, 4), " (se ", round(x$par$drift.se, 4), ")\n", sep = "")) } cat(paste("Residual sd:", round(sqrt(x$sigma2), 4), "\n")) } #' @export fitted.lagwalk <- function(object, ...){ object$fitted } # Random walk #' @rdname naive #' #' @examples #' #' gold.fcast <- rwf(gold[1:60], h=50) #' plot(gold.fcast) #' #' @export rwf <- function(y, h=10, drift=FALSE, level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, ..., x=y) { fit <- lagwalk( x, lag = 1, drift = drift, lambda = lambda, biasadj = biasadj ) fc <- forecast(fit, h = h, level = level, fan = fan, lambda = fit$lambda, biasadj = biasadj, ...) fc$model$call <- match.call() fc$series <- deparse(substitute(y)) if (drift) { fc$method <- "Random walk with drift" } else { fc$method <- "Random walk" } return(fc) } #' Naive and Random Walk Forecasts #' #' \code{rwf()} returns forecasts and prediction intervals for a random walk #' with drift model applied to \code{y}. This is equivalent to an ARIMA(0,1,0) #' model with an optional drift coefficient. \code{naive()} is simply a wrapper #' to \code{rwf()} for simplicity. \code{snaive()} returns forecasts and #' prediction intervals from an ARIMA(0,0,0)(0,1,0)m model where m is the #' seasonal period. #' #' The random walk with drift model is \deqn{Y_t=c + Y_{t-1} + Z_t}{Y[t]=c + #' Y[t-1] + Z[t]} where \eqn{Z_t}{Z[t]} is a normal iid error. Forecasts are #' given by \deqn{Y_n(h)=ch+Y_n}{Y[n+h]=ch+Y[n]}. If there is no drift (as in #' \code{naive}), the drift parameter c=0. Forecast standard errors allow for #' uncertainty in estimating the drift parameter (unlike the corresponding #' forecasts obtained by fitting an ARIMA model directly). #' #' The seasonal naive model is \deqn{Y_t= Y_{t-m} + Z_t}{Y[t]=Y[t-m] + Z[t]} #' where \eqn{Z_t}{Z[t]} is a normal iid error. #' #' @aliases print.naive #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting #' @param drift Logical flag. If TRUE, fits a random walk with drift model. #' @param level Confidence levels for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param x Deprecated. Included for backwards compatibility. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{naive} or #' \code{snaive}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link{Arima}} #' @keywords ts #' @examples #' #' plot(naive(gold,h=50),include=200) #' #' @export naive <- function(y, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, ..., x=y) { fc <- rwf( x, h = h, level = level, fan = fan, lambda = lambda, drift = FALSE, biasadj = biasadj, ... ) fc$model$call <- match.call() fc$series <- deparse(substitute(y)) fc$method <- "Naive method" return(fc) } #' @rdname naive #' #' @examples #' #' plot(snaive(wineind)) #' #' @export snaive <- function(y, h=2 * frequency(x), level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, ..., x=y) { fit <- lagwalk( x, lag = frequency(x), drift = FALSE, lambda = lambda, biasadj = biasadj ) fc <- forecast(fit, h = h, level = level, fan = fan, lambda = fit$lambda, biasadj = biasadj, ...) fc$model$call <- match.call() fc$series <- deparse(substitute(y)) fc$method <- "Seasonal naive method" return(fc) } forecast/R/mstl.R0000644000176200001440000005234713607742171013406 0ustar liggesusers#' Multiple seasonal decomposition #' #' Decompose a time series into seasonal, trend and remainder components. #' Seasonal components are estimated iteratively using STL. Multiple seasonal periods are #' allowed. The trend component is computed for the last iteration of STL. #' Non-seasonal time series are decomposed into trend and remainder only. #' In this case, \code{\link[stats]{supsmu}} is used to estimate the trend. #' Optionally, the time series may be Box-Cox transformed before decomposition. #' Unlike \code{\link[stats]{stl}}, \code{mstl} is completely automated. #' @param x Univariate time series of class \code{msts} or \code{ts}. #' @param iterate Number of iterations to use to refine the seasonal component. #' @param s.window Seasonal windows to be used in the decompositions. If scalar, #' the same value is used for all seasonal components. Otherwise, it should be a vector #' of the same length as the number of seasonal components. #' @param ... Other arguments are passed to \code{\link[stats]{stl}}. #' @inheritParams forecast #' #' @seealso \code{\link[stats]{stl}}, \code{\link[stats]{supsmu}} #' @examples #' library(ggplot2) #' mstl(taylor) %>% autoplot() #' mstl(AirPassengers, lambda = "auto") %>% autoplot() #' @export mstl <- function(x, lambda = NULL, iterate = 2, s.window = 13, ...) { # What is x? origx <- x n <- length(x) if ("msts" %in% class(x)) { msts <- attributes(x)$msts if (any(msts >= n / 2)) { warning("Dropping seasonal components with fewer than two full periods.") msts <- msts[msts < n / 2] x <- forecast::msts(x, seasonal.periods = msts) } msts <- sort(msts, decreasing = FALSE) } else if ("ts" %in% class(x)) { msts <- frequency(x) iterate <- 1L } else { x <- as.ts(x) msts <- 1L } # Check dimension if (!is.null(dim(x))) { if (NCOL(x) == 1L) { x <- x[, 1] } } # Replace missing values if necessary if (anyNA(x)) { x <- na.interp(x, lambda = lambda) } # Transform if necessary if (!is.null(lambda)) { x <- forecast::BoxCox(x, lambda = lambda) lambda <- attr(x, "lambda") } tt <- seq_len(n) # Now fit stl models with only one type of seasonality at a time if (msts[1L] > 1) { seas <- as.list(rep(0, length(msts))) deseas <- x if (length(s.window) == 1L) { s.window <- rep(s.window, length(msts)) } iterate <- pmax(1L, iterate) for (j in seq_len(iterate)) { for (i in seq_along(msts)) { deseas <- deseas + seas[[i]] fit <- stl(ts(deseas, frequency = msts[i]), s.window = s.window[i], ...) seas[[i]] <- msts(seasonal(fit), seasonal.periods = msts) attributes(seas[[i]]) <- attributes(x) deseas <- deseas - seas[[i]] } } trend <- msts(trendcycle(fit), seasonal.periods = msts) } else { msts <- NULL deseas <- x trend <- ts(stats::supsmu(seq_len(n), x)$y) } attributes(trend) <- attributes(x) # Put back NAs deseas[is.na(origx)] <- NA # Estimate remainder remainder <- deseas - trend # Package into matrix output <- cbind(origx, trend) if (!is.null(msts)) { for (i in seq_along(msts)) { output <- cbind(output, seas[[i]]) } } output <- cbind(output, remainder) colnames(output)[1L:2L] <- c("Data", "Trend") if (!is.null(msts)) { colnames(output)[2L + seq_along(msts)] <- paste0("Seasonal", round(msts, 2)) } colnames(output)[NCOL(output)] <- "Remainder" if (length(msts) > 1) { attr(output, "seasonal.periods") <- msts return(structure(output, seasonal.periods = msts, class = c("mstl", "mts", "msts", "ts") )) } return(structure(output, class = c("mstl", "mts", "ts"))) } #' @rdname autoplot.seas #' @export autoplot.mstl <- function(object, ...) { autoplot.mts(object, facets = TRUE, ylab = "", ...) } #' Forecasting using stl objects #' #' Forecasts of STL objects are obtained by applying a non-seasonal forecasting #' method to the seasonally adjusted data and re-seasonalizing using the last #' year of the seasonal component. #' #' \code{stlm} takes a time series \code{y}, applies an STL decomposition, and #' models the seasonally adjusted data using the model passed as #' \code{modelfunction} or specified using \code{method}. It returns an object #' that includes the original STL decomposition and a time series model fitted #' to the seasonally adjusted data. This object can be passed to the #' \code{forecast.stlm} for forecasting. #' #' \code{forecast.stlm} forecasts the seasonally adjusted data, then #' re-seasonalizes the results by adding back the last year of the estimated #' seasonal component. #' #' \code{stlf} combines \code{stlm} and \code{forecast.stlm}. It takes a #' \code{ts} argument, applies an STL decomposition, models the seasonally #' adjusted data, reseasonalizes, and returns the forecasts. However, it allows #' more general forecasting methods to be specified via #' \code{forecastfunction}. #' #' \code{forecast.stl} is similar to \code{stlf} except that it takes the STL #' decomposition as the first argument, instead of the time series. #' #' Note that the prediction intervals ignore the uncertainty associated with #' the seasonal component. They are computed using the prediction intervals #' from the seasonally adjusted series, which are then reseasonalized using the #' last year of the seasonal component. The uncertainty in the seasonal #' component is ignored. #' #' The time series model for the seasonally adjusted data can be specified in #' \code{stlm} using either \code{method} or \code{modelfunction}. The #' \code{method} argument provides a shorthand way of specifying #' \code{modelfunction} for a few special cases. More generally, #' \code{modelfunction} can be any function with first argument a \code{ts} #' object, that returns an object that can be passed to \code{\link{forecast}}. #' For example, \code{forecastfunction=ar} uses the \code{\link{ar}} function #' for modelling the seasonally adjusted series. #' #' The forecasting method for the seasonally adjusted data can be specified in #' \code{stlf} and \code{forecast.stl} using either \code{method} or #' \code{forecastfunction}. The \code{method} argument provides a shorthand way #' of specifying \code{forecastfunction} for a few special cases. More #' generally, \code{forecastfunction} can be any function with first argument a #' \code{ts} object, and other \code{h} and \code{level}, which returns an #' object of class \code{\link{forecast}}. For example, #' \code{forecastfunction=thetaf} uses the \code{\link{thetaf}} function for #' forecasting the seasonally adjusted series. #' #' @param y A univariate numeric time series of class \code{ts} #' @param object An object of class \code{stl} or \code{stlm}. Usually the #' result of a call to \code{\link[stats]{stl}} or \code{stlm}. #' @param method Method to use for forecasting the seasonally adjusted series. #' @param modelfunction An alternative way of specifying the function for #' modelling the seasonally adjusted series. If \code{modelfunction} is not #' \code{NULL}, then \code{method} is ignored. Otherwise \code{method} is used #' to specify the time series model to be used. #' @param model Output from a previous call to \code{stlm}. If a \code{stlm} #' model is passed, this same model is fitted to y without re-estimating any #' parameters. #' @param forecastfunction An alternative way of specifying the function for #' forecasting the seasonally adjusted series. If \code{forecastfunction} is #' not \code{NULL}, then \code{method} is ignored. Otherwise \code{method} is #' used to specify the forecasting method to be used. #' @param etsmodel The ets model specification passed to #' \code{\link[forecast]{ets}}. By default it allows any non-seasonal model. If #' \code{method!="ets"}, this argument is ignored. #' @param xreg Historical regressors to be used in #' \code{\link[forecast]{auto.arima}()} when \code{method=="arima"}. #' @param newxreg Future regressors to be used in #' \code{\link[forecast]{forecast.Arima}()}. #' @param h Number of periods for forecasting. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable #' for fan plots. #' @param s.window Either the character string ``periodic'' or the span (in #' lags) of the loess window for seasonal extraction. #' @param t.window A number to control the smoothness of the trend. See #' \code{\link[stats]{stl}} for details. #' @param robust If \code{TRUE}, robust fitting will used in the loess #' procedure within \code{\link[stats]{stl}}. #' @param allow.multiplicative.trend If TRUE, then ETS models with #' multiplicative trends are allowed. Otherwise, only additive or no trend ETS #' models are permitted. #' @param x Deprecated. Included for backwards compatibility. #' @param ... Other arguments passed to \code{forecast.stl}, #' \code{modelfunction} or \code{forecastfunction}. #' @inheritParams forecast #' #' @return \code{stlm} returns an object of class \code{stlm}. The other #' functions return objects of class \code{forecast}. #' #' There are many methods for working with \code{\link{forecast}} objects #' including \code{summary} to obtain and print a summary of the results, while #' \code{plot} produces a plot of the forecasts and prediction intervals. The #' generic accessor functions \code{fitted.values} and \code{residuals} extract #' useful features. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{stl}}, \code{\link{forecast.ets}}, #' \code{\link{forecast.Arima}}. #' @keywords ts #' @examples #' #' tsmod <- stlm(USAccDeaths, modelfunction = ar) #' plot(forecast(tsmod, h = 36)) #' #' decomp <- stl(USAccDeaths, s.window = "periodic") #' plot(forecast(decomp)) #' @export forecast.stl <- function(object, method = c("ets", "arima", "naive", "rwdrift"), etsmodel = "ZZN", forecastfunction = NULL, h = frequency(object$time.series) * 2, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, xreg = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ...) { method <- match.arg(method) if (is.null(forecastfunction)) { if (method != "arima" && (!is.null(xreg) || !is.null(newxreg))) { stop("xreg and newxreg arguments can only be used with ARIMA models") } if (method == "ets") { # Ensure non-seasonal model if (substr(etsmodel, 3, 3) != "N") { warning("The ETS model must be non-seasonal. I'm ignoring the seasonal component specified.") substr(etsmodel, 3, 3) <- "N" } forecastfunction <- function(x, h, level, ...) { fit <- ets(na.interp(x), model = etsmodel, allow.multiplicative.trend = allow.multiplicative.trend, ...) return(forecast(fit, h = h, level = level)) } } else if (method == "arima") { forecastfunction <- function(x, h, level, ...) { fit <- auto.arima(x, xreg = xreg, seasonal = FALSE, ...) return(forecast(fit, h = h, level = level, xreg = newxreg)) } } else if (method == "naive") { forecastfunction <- function(x, h, level, ...) { rwf(x, drift = FALSE, h = h, level = level, ...) } } else if (method == "rwdrift") { forecastfunction <- function(x, h, level, ...) { rwf(x, drift = TRUE, h = h, level = level, ...) } } } if (is.null(xreg) != is.null(newxreg)) { stop("xreg and newxreg arguments must both be supplied") } if (!is.null(newxreg)) { if (NROW(as.matrix(newxreg)) != h) { stop("newxreg should have the same number of rows as the forecast horizon h") } } if (fan) { level <- seq(51, 99, by = 3) } if ("mstl" %in% class(object)) { seasonal.periods <- attr(object, "seasonal.periods") if (is.null(seasonal.periods)) { seasonal.periods <- frequency(object) } seascomp <- matrix(0, ncol = length(seasonal.periods), nrow = h) for (i in seq_along(seasonal.periods)) { mp <- round(seasonal.periods[i], 2) n <- NROW(object) colname <- paste0("Seasonal", mp) seascomp[, i] <- rep(object[n - rev(seq_len(mp)) + 1, colname], trunc(1 + (h - 1) / mp))[seq_len(h)] } lastseas <- rowSums(seascomp) xdata <- object[, "Data"] seascols <- grep("Seasonal", colnames(object)) allseas <- rowSums(object[, seascols, drop = FALSE]) series <- NULL } else if ("stl" %in% class(object)) { m <- frequency(object$time.series) n <- NROW(object$time.series) lastseas <- rep(seasonal(object)[n - (m:1) + 1], trunc(1 + (h - 1) / m))[1:h] xdata <- ts(rowSums(object$time.series)) tsp(xdata) <- tsp(object$time.series) allseas <- seasonal(object) series <- deparse(object$call$x) } else { stop("Unknown object class") } # De-seasonalize x.sa <- seasadj(object) # Forecast fcast <- forecastfunction(x.sa, h = h, level = level, ...) # Reseasonalize fcast$mean <- fcast$mean + lastseas fcast$upper <- fcast$upper + lastseas fcast$lower <- fcast$lower + lastseas fcast$x <- xdata fcast$method <- paste("STL + ", fcast$method) fcast$series <- series # fcast$seasonal <- ts(lastseas[1:m],frequency=m,start=tsp(object$time.series)[2]-1+1/m) fcast$fitted <- fitted(fcast) + allseas fcast$residuals <- fcast$x - fcast$fitted if (!is.null(lambda)) { fcast$x <- InvBoxCox(fcast$x, lambda) fcast$fitted <- InvBoxCox(fcast$fitted, lambda) fcast$mean <- InvBoxCox(fcast$mean, lambda, biasadj, fcast) fcast$lower <- InvBoxCox(fcast$lower, lambda) fcast$upper <- InvBoxCox(fcast$upper, lambda) attr(lambda, "biasadj") <- biasadj fcast$lambda <- lambda } return(fcast) } #' @export forecast.mstl <- function(object, method = c("ets", "arima", "naive", "rwdrift"), etsmodel = "ZZN", forecastfunction = NULL, h = frequency(object) * 2, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, xreg = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ...) { forecast.stl( object, method = method, etsmodel = etsmodel, forecastfunction = forecastfunction, h = h, level = level, fan = fan, lambda = lambda, biasadj = biasadj, xreg = xreg, newxreg = newxreg, allow.multiplicative.trend = allow.multiplicative.trend, ... ) } # Function takes time series, does STL decomposition, and fits a model to seasonally adjusted series # But it does not forecast. Instead, the result can be passed to forecast(). #' @rdname forecast.stl #' @export stlm <- function(y, s.window = 13, robust = FALSE, method = c("ets", "arima"), modelfunction = NULL, model = NULL, etsmodel = "ZZN", lambda = NULL, biasadj = FALSE, xreg = NULL, allow.multiplicative.trend = FALSE, x = y, ...) { method <- match.arg(method) # Check univariate if (NCOL(x) > 1L) { stop("y must be a univariate time series") } else { if (!is.null(ncol(x))) { if (ncol(x) == 1L) { # Probably redundant check x <- x[, 1L] } } } # Check x is a seasonal time series tspx <- tsp(x) if (is.null(tspx)) { stop("y is not a seasonal ts object") } else if (tspx[3] <= 1L) { stop("y is not a seasonal ts object") } # Transform data if necessary origx <- x if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } # Do STL decomposition stld <- mstl(x, s.window = s.window, robust = robust) if (!is.null(model)) { if (inherits(model$model, "ets")) { modelfunction <- function(x, ...) { return(ets(x, model = model$model, use.initial.values = TRUE, ...)) } } else if (inherits(model$model, "Arima")) { modelfunction <- function(x, ...) { return(Arima(x, model = model$model, xreg = xreg, ...)) } } else if (!is.null(model$modelfunction)) { if ("model" %in% names(formals(model$modelfunction))) { modelfunction <- function(x, ...) { return(model$modelfunction(x, model = model$model, ...)) } } } if (is.null(modelfunction)) { stop("Unknown model type") } } # Construct modelfunction if not passed as an argument else if (is.null(modelfunction)) { if (method != "arima" && !is.null(xreg)) { stop("xreg arguments can only be used with ARIMA models") } if (method == "ets") { # Ensure non-seasonal model if (substr(etsmodel, 3, 3) != "N") { warning("The ETS model must be non-seasonal. I'm ignoring the seasonal component specified.") substr(etsmodel, 3, 3) <- "N" } modelfunction <- function(x, ...) { return(ets( x, model = etsmodel, allow.multiplicative.trend = allow.multiplicative.trend, ... )) } } else if (method == "arima") { modelfunction <- function(x, ...) { return(auto.arima(x, xreg = xreg, seasonal = FALSE, ...)) } } } # De-seasonalize x.sa <- seasadj(stld) # Model seasonally adjusted data fit <- modelfunction(x.sa, ...) fit$x <- x.sa # Fitted values and residuals seascols <- grep("Seasonal", colnames(stld)) allseas <- rowSums(stld[, seascols, drop = FALSE]) fits <- fitted(fit) + allseas res <- residuals(fit) if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda, biasadj, var(res)) attr(lambda, "biasadj") <- biasadj } return(structure(list( stl = stld, model = fit, modelfunction = modelfunction, lambda = lambda, x = origx, series = deparse(substitute(y)), m = frequency(origx), fitted = fits, residuals = res ), class = "stlm")) } #' @rdname forecast.stl #' @export forecast.stlm <- function(object, h = 2 * object$m, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ...) { if (!is.null(newxreg)) { if (nrow(as.matrix(newxreg)) != h) { stop("newxreg should have the same number of rows as the forecast horizon h") } } if (fan) { level <- seq(51, 99, by = 3) } # Forecast seasonally adjusted series if (is.element("Arima", class(object$model)) && !is.null(newxreg)) { fcast <- forecast(object$model, h = h, level = level, xreg = newxreg, ...) } else if (is.element("ets", class(object$model))) { fcast <- forecast( object$model, h = h, level = level, allow.multiplicative.trend = allow.multiplicative.trend, ... ) } else { fcast <- forecast(object$model, h = h, level = level, ...) } # In-case forecast method uses different horizon length (such as using xregs) h <- NROW(fcast$mean) # Forecast seasonal series with seasonal naive seasonal.periods <- attributes(object$stl)$seasonal.periods if (is.null(seasonal.periods)) { seasonal.periods <- frequency(object$stl) } seascomp <- matrix(0, ncol = length(seasonal.periods), nrow = h) for (i in seq_along(seasonal.periods)) { mp <- seasonal.periods[i] n <- NROW(object$stl) colname <- paste0("Seasonal", round(mp, 2)) seascomp[, i] <- rep(object$stl[n - rev(seq_len(mp)) + 1, colname], trunc(1 + (h - 1) / mp))[seq_len(h)] } lastseas <- rowSums(seascomp) xdata <- object$stl[, "Data"] seascols <- grep("Seasonal", colnames(object$stl)) allseas <- rowSums(object$stl[, seascols, drop = FALSE]) series <- NULL # m <- frequency(object$stl$time.series) n <- NROW(xdata) # Reseasonalize fcast$mean <- fcast$mean + lastseas fcast$upper <- fcast$upper + lastseas fcast$lower <- fcast$lower + lastseas fcast$method <- paste("STL + ", fcast$method) fcast$series <- object$series # fcast$seasonal <- ts(lastseas[1:m],frequency=m,start=tsp(object$stl$time.series)[2]-1+1/m) # fcast$residuals <- residuals() fcast$fitted <- fitted(fcast) + allseas fcast$residuals <- residuals(fcast) if (!is.null(lambda)) { fcast$fitted <- InvBoxCox(fcast$fitted, lambda) fcast$mean <- InvBoxCox(fcast$mean, lambda, biasadj, fcast) fcast$lower <- InvBoxCox(fcast$lower, lambda) fcast$upper <- InvBoxCox(fcast$upper, lambda) attr(lambda, "biasadj") <- biasadj fcast$lambda <- lambda } fcast$x <- object$x return(fcast) } #' @rdname forecast.stl #' #' @examples #' #' plot(stlf(AirPassengers, lambda = 0)) #' @export stlf <- function(y, h = frequency(x) * 2, s.window = 13, t.window = NULL, robust = FALSE, lambda = NULL, biasadj = FALSE, x = y, ...) { seriesname <- deparse(substitute(y)) # Check univariate if (NCOL(x) > 1L) { stop("y must be a univariate time series") } else { if (!is.null(ncol(x))) { if (ncol(x) == 1L) { # Probably redundant check x <- x[, 1L] } } } # Check x is a seasonal time series tspx <- tsp(x) if (is.null(tspx)) { stop("y is not a seasonal ts object") } else if (tspx[3] <= 1L) { stop("y is not a seasonal ts object") } if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } fit <- mstl(x, s.window = s.window, t.window = t.window, robust = robust) fcast <- forecast(fit, h = h, lambda = lambda, biasadj = biasadj, ...) # if (!is.null(lambda)) # { # fcast$x <- origx # fcast$fitted <- InvBoxCox(fcast$fitted, lambda) # fcast$mean <- InvBoxCox(fcast$mean, lambda) # fcast$lower <- InvBoxCox(fcast$lower, lambda) # fcast$upper <- InvBoxCox(fcast$upper, lambda) # fcast$lambda <- lambda # } fcast$series <- seriesname return(fcast) } #' @rdname is.ets #' @export is.stlm <- function(x) { inherits(x, "stlm") } forecast/R/checkAdmissibility.R0000644000176200001440000000235113553673010016214 0ustar liggesusers# Author: srazbash and Rob J Hyndman ############################################################################### checkAdmissibility <- function(opt.env, box.cox=NULL, small.phi=NULL, ar.coefs=NULL, ma.coefs=NULL, tau=0, bc.lower=0, bc.upper=1) { # Check the range of the Box-Cox parameter if (!is.null(box.cox)) { if ((box.cox <= bc.lower) | (box.cox >= bc.upper)) { return(FALSE) } } # Check the range of small.phi if (!is.null(small.phi)) { if (((small.phi < .8) | (small.phi > 1))) { return(FALSE) } } # Check AR part for stationarity if (!is.null(ar.coefs)) { arlags <- which(abs(ar.coefs) > 1e-08) if (length(arlags) > 0L) { p <- max(arlags) if (min(Mod(polyroot(c(1, -ar.coefs[1L:p])))) < 1 + 1e-2) { return(FALSE) } } } # Check MA part for invertibility if (!is.null(ma.coefs)) { malags <- which(abs(ma.coefs) > 1e-08) if (length(malags) > 0L) { q <- max(malags) if (min(Mod(polyroot(c(1, ma.coefs[1L:q])))) < 1 + 1e-2) { return(FALSE) } } } # Check the eigen values of the D matrix D.eigen.values <- eigen(opt.env$D, symmetric = FALSE, only.values = TRUE)$values return(all(abs(D.eigen.values) < 1 + 1e-2)) } forecast/R/graph.R0000644000176200001440000001716013553673010013515 0ustar liggesusers### Time series graphics and transformations #' Time series display #' #' Plots a time series along with its acf and either its pacf, lagged #' scatterplot or spectrum. #' #' \code{ggtsdisplay} will produce the equivalent plot using ggplot graphics. #' #' @param x a numeric vector or time series of class \code{ts}. #' @param plot.type type of plot to include in lower right corner. #' @param points logical flag indicating whether to show the individual points #' or not in the time plot. #' @param smooth logical flag indicating whether to show a smooth loess curve #' superimposed on the time plot. #' @param ci.type type of confidence limits for ACF that is passed to #' \code{\link[stats]{acf}}. Should the confidence limits assume a white noise #' input or for lag \eqn{k} an MA(\eqn{k-1}) input? #' @param lag.max the maximum lag to plot for the acf and pacf. A suitable #' value is selected by default if the argument is missing. #' @param na.action function to handle missing values in acf, pacf and spectrum #' calculations. The default is \code{\link[stats]{na.contiguous}}. Useful #' alternatives are \code{\link[stats]{na.pass}} and \code{\link{na.interp}}. #' @param theme Adds a ggplot element to each plot, typically a theme. #' @param main Main title. #' @param xlab X-axis label. #' @param ylab Y-axis label. #' @param pch Plotting character. #' @param cex Character size. #' @param \dots additional arguments to \code{\link[stats]{acf}}. #' @return None. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{plot.ts}}, \code{\link{Acf}}, #' \code{\link[stats]{spec.ar}} #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://OTexts.org/fpp2/} #' @keywords ts #' @examples #' tsdisplay(diff(WWWusage)) #' ggtsdisplay(USAccDeaths, plot.type="scatter") #' #' @export tsdisplay <- function(x, plot.type=c("partial", "histogram", "scatter", "spectrum"), points=TRUE, ci.type=c("white", "ma"), lag.max, na.action=na.contiguous, main=NULL, xlab="", ylab="", pch=1, cex=0.5, ...) { plot.type <- match.arg(plot.type) ci.type <- match.arg(ci.type) def.par <- par(no.readonly = TRUE) # save default, for resetting... nf <- layout(matrix(c(1, 1, 2, 3), 2, 2, byrow = TRUE)) if (is.null(main)) { main <- deparse(substitute(x)) } if (!is.ts(x)) { x <- ts(x) } if (missing(lag.max)) { lag.max <- round(min(max(10 * log10(length(x)), 3 * frequency(x)), length(x) / 3)) } plot.ts(x, main = main, ylab = ylab, xlab = xlab, ylim = range(x, na.rm = TRUE), ...) if (points) { points(x, pch = pch, cex = cex, ...) } ylim <- c(-1, 1) * 3 / sqrt(length(x)) junk1 <- stats::acf(c(x), lag.max = lag.max, plot = FALSE, na.action = na.action) junk1$acf[1, 1, 1] <- 0 if (ci.type == "ma") { ylim <- range(ylim, 0.66 * ylim * max(sqrt(cumsum(c(1, 2 * junk1$acf[-1, 1, 1] ^ 2))))) } ylim <- range(ylim, junk1$acf) if (plot.type == "partial") { junk2 <- stats::pacf(c(x), lag.max = lag.max, plot = FALSE, na.action = na.action) ylim <- range(ylim, junk2$acf) } oldpar <- par(mar = c(5, 4.1, 1.5, 2)) plot(junk1, ylim = ylim, xlim = c(1, lag.max), ylab = "ACF", main = "", ci.type = ci.type, ...) if (plot.type == "scatter") { n <- length(x) plot(x[1:(n - 1)], x[2:n], xlab = expression(Y[t - 1]), ylab = expression(Y[t]), ...) } else if (plot.type == "spectrum") { spec.ar(x, main = "", na.action = na.action) } else if (plot.type == "histogram") { graphics::hist(x, breaks = "FD", main = "", xlab = main) } else { plot(junk2, ylim = ylim, xlim = c(1, lag.max), ylab = "PACF", main = "", ...) } par(def.par) layout(1) invisible() } #' Seasonal plot #' #' Plots a seasonal plot as described in Hyndman and Athanasopoulos (2014, #' chapter 2). This is like a time plot except that the data are plotted #' against the seasons in separate years. #' #' @param x a numeric vector or time series of class \code{ts}. #' @param s seasonal frequency of x #' @param season.labels Labels for each season in the "year" #' @param year.labels Logical flag indicating whether labels for each year of #' data should be plotted on the right. #' @param year.labels.left Logical flag indicating whether labels for each year #' of data should be plotted on the left. #' @param type plot type (as for \code{\link[graphics]{plot}}). Not yet #' supported for ggseasonplot. #' @param main Main title. #' @param xlab X-axis label. #' @param ylab Y-axis label. #' @param col Colour #' @param labelgap Distance between year labels and plotted lines #' @param \dots additional arguments to \code{\link[graphics]{plot}}. #' @return None. #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{monthplot}} #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://OTexts.org/fpp2/} #' @keywords ts #' @examples #' seasonplot(AirPassengers, col=rainbow(12), year.labels=TRUE) #' #' @export seasonplot <- function(x, s, season.labels=NULL, year.labels=FALSE, year.labels.left=FALSE, type="o", main, xlab=NULL, ylab="", col=1, labelgap=0.1, ...) { if (missing(main)) { main <- paste("Seasonal plot:", deparse(substitute(x))) } # Check data are seasonal and convert to integer seasonality if (missing(s)) { s <- round(frequency(x)) } if (s <= 1) { stop("Data are not seasonal") } tspx <- tsp(x) x <- ts(x, start = tspx[1], frequency = s) # Pad series tsx <- x startperiod <- round(cycle(x)[1]) if (startperiod > 1) { x <- c(rep(NA, startperiod - 1), x) } x <- c(x, rep(NA, s - length(x) %% s)) Season <- rep(c(1:s, NA), length(x) / s) xnew <- rep(NA, length(x)) xnew[!is.na(Season)] <- x if (s == 12) { labs <- month.abb xLab <- "Month" } else if (s == 4) { labs <- paste("Q", 1:4, sep = "") xLab <- "Quarter" } else if (s == 7) { labs <- c("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat") xLab <- "Day" } else if (s == 52) { labs <- 1:s xLab <- "Week" } else if (s == 24) { labs <- 0:(s - 1) xLab <- "Hour" } else if (s == 48) { labs <- seq(0, 23.5, by = 0.5) xLab <- "Half-hour" } else { if (s < 20) { labs <- 1:s } else { labs <- NULL } xLab <- "Season" } if (is.null(xlab)) { xlab <- xLab } if (is.null(season.labels)) { season.labels <- labs } if (year.labels) { xlim <- c(1 - labelgap, s + 0.4 + labelgap) } else { xlim <- c(1 - labelgap, s) } if (year.labels.left) { xlim[1] <- 0.4 - labelgap } plot(Season, xnew, xaxt = "n", xlab = xlab, type = type, ylab = ylab, main = main, xlim = xlim, col = 0, ...) nn <- length(Season) / s col <- rep(col, nn)[1:nn] for (i in 0:(nn - 1)) lines(Season[(i * (s + 1) + 1):((s + 1) * (i + 1))], xnew[(i * (s + 1) + 1):((s + 1) * (i + 1))], type = type, col = col[i + 1], ...) if (year.labels) { idx <- which(Season[!is.na(xnew)] == s) year <- round(time(tsx)[idx], nchar(s)) text(x = rep(s + labelgap, length(year)), y = tsx[idx], labels = paste(c(trunc(year))), adj = 0, ..., col = col[1:length(idx)]) } if (year.labels.left) { idx <- which(Season[!is.na(xnew)] == 1) year <- round(time(tsx)[idx], nchar(s)) if (min(idx) > 1) { # First year starts after season 1n col <- col[-1] } text(x = rep(1 - labelgap, length(year)), y = tsx[idx], labels = paste(c(trunc(year))), adj = 1, ..., col = col[1:length(idx)]) } if (is.null(labs)) { axis(1, ...) } else { axis(1, labels = season.labels, at = 1:s, ...) } } forecast/R/calendar.R0000644000176200001440000001335613553673010014170 0ustar liggesusers## Add as.Date.timeDate to S3 method table #' @export as.Date.timeDate <- timeDate::as.Date.timeDate #' Number of trading days in each season #' #' Returns number of trading days in each month or quarter of the observed time #' period in a major financial center. #' #' Useful for trading days length adjustments. More on how to define "business #' days", please refer to \code{\link[timeDate]{isBizday}}. #' #' @param x Monthly or quarterly time series #' @param FinCenter Major financial center. #' @return Time series #' @author Earo Wang #' @seealso \code{\link[forecast]{monthdays}} #' @keywords ts #' @examples #' #' x <- ts(rnorm(30), start = c(2013, 2), frequency = 12) #' bizdays(x, FinCenter = "New York") #' #' @export bizdays <- function(x, FinCenter = c( "New York", "London", "NERC", "Tokyo", "Zurich" )) { # Return the number of trading days corresponding to the input ts # # Args: # x: a ts object # FinCenter: inherits holiday calendar from "timeDate" package # # Returns: # A matrix contains the number of trading days if (is.null(tsp(x))) { stop("We cannot handle a time series without time attributes.") } # Convert tsp to date freq <- frequency(x) years <- start(x)[1L]:end(x)[1L] # Grab the holidays from years and financial center FinCenter <- match.arg(FinCenter) if (FinCenter == "New York") { holidays <- timeDate::holidayNYSE(years) } else if (FinCenter == "London") { holidays <- timeDate::holidayLONDON(years) } else if (FinCenter == "NERC") { holidays <- timeDate::holidayNERC(years) } else if (FinCenter == "Tokyo") { holidays <- timeDate::holidayTSX(years) } else if (FinCenter == "Zurich") { holidays <- timeDate::holidayZURICH(years) } if (freq == 12L) { # monthly data date <- zoo::as.Date(time(x)) start <- date[1L] end <- seq(date[length(date)], length = 2L, by = "month")[2L] - 1L days.len <- timeDate::timeSequence(from = start, to = end) # Grab business days biz <- days.len[timeDate::isBizday(days.len, holidays = holidays)] bizdays <- format(biz, format = "%Y-%m") } else if (freq == 4L) { # Quarterly data date <- zoo::as.Date(time(x)) start <- date[1L] end <- seq(date[length(date)], length = 2L, by = "3 month")[2L] - 1L days.len <- timeDate::timeSequence(from = start, to = end) # Grab business days biz <- days.len[timeDate::isBizday(days.len, holidays = holidays)] bizdays <- format(zoo::as.yearqtr(biz), format = "%Y Qtr%q") } # else if (freq == 52L) { # Weekly data # start <- paste0(start(x)[1L], "-01-01") # start <- as.Date(start) + start(x)[2L] * 7L # end <- start + length(time(x)) * 7L # days.len <- as.timeDate(seq(start, end, by = "days"), FinCenter = FinCenter) # biz <- days.len[isBizday(days.len, # holidays = unique(format(days.len, "%Y")))] # bizdays <- format(biz, format = "%Y Wk%W") # } num.days <- table(bizdays) out <- ts(num.days, start = tsp(x)[1L], frequency = freq) return(out) } #' Easter holidays in each season #' #' Returns a vector of 0's and 1's or fractional results if Easter spans March #' and April in the observed time period. Easter is defined as the days from #' Good Friday to Easter Sunday inclusively, plus optionally Easter Monday if #' \code{easter.mon=TRUE}. #' #' Useful for adjusting calendar effects. #' #' @param x Monthly or quarterly time series #' @param easter.mon If TRUE, the length of Easter holidays includes Easter #' Monday. #' @return Time series #' @author Earo Wang #' @keywords ts #' @examples #' #' easter(wineind, easter.mon = TRUE) #' #' @export easter <- function(x, easter.mon = FALSE) { # Return a vector of 0's and 1's for easter holidays # # Args: # x: monthly, quarterly or weekly data # easter.mon: An option including easter.mon if (is.null(tsp(x))) { stop("We cannot handle a time series without time attributes.") } freq <- frequency(x) date <- zoo::as.Date(time(x)) start.yr <- start(x)[1L] end.yr <- end(x)[1L] yr.span <- seq(start.yr, end.yr) gd.fri0 <- Easter(yr.span, -2L) if (easter.mon) { easter0 <- Easter(yr.span, 1L) } else { easter0 <- Easter(yr.span) } if (freq == 12L) { fmat <- "%Y-%m" yr.mon <- format(date, format = fmat) gd.fri <- format(gd.fri0, format = fmat) # good fri easter <- format(easter0, format = fmat) # easter mon } else if (freq == 4L) { fmat <- "%Y-%q" yr.mon <- format(zoo::as.yearqtr(date), format = fmat) # yr.qtr gd.fri <- format(zoo::as.yearqtr(gd.fri0), format = fmat) easter <- format(zoo::as.yearqtr(easter0), format = fmat) } span <- cbind(gd.fri, easter) # the span of easter holidays hdays <- unlist(apply(span, 1, unique)) dummies <- ifelse(yr.mon %in% hdays, 1L, 0L) # Allow fractional results denominator <- (easter0 - gd.fri0 + 1L)[1L] last.mar <- as.timeDate(paste0(yr.span, "-03-31")) dif <- difftimeDate(last.mar, gd.fri0, units = "days") + 1L # Remove easter out of date range if (date[1L] > as.character(last.mar[1L])) { dif <- dif[-1L] } if (date[length(yr.mon)] < as.character(last.mar[length(last.mar)])) { dif <- dif[-length(dif)] } replace <- dif > denominator | dif <= 0L dif[replace] <- denominator # Easter in the same month # Start to insert the remaining part falling in Apr index <- which(dif != denominator) if (length(index) != 0L) { values <- denominator - dif[index] new.index <- index[1L] for (i in 1L:length(index)) { dif <- append(dif, values = values[i], new.index) new.index <- index[i + 1L] + i } dummies[dummies == 1L] <- round(dif / unclass(denominator), digits = 2) } out <- ts(dummies, start = tsp(x)[1L], frequency = freq) return(out) } forecast/R/forecast.varest.R0000644000176200001440000000355313553673010015526 0ustar liggesusers# forecast function for varest, just a wrapper for predict.varest #' @export forecast.varest <- function(object, h=10, level=c(80, 95), fan=FALSE, ...) { out <- list(model = object, forecast = vector("list", object$K)) # Get residuals and fitted values and fix the times tspx <- tsp(object$y) vres <- residuals(object) vfits <- fitted(object) method <- paste0("VAR(", object$p, ")") # Add forecasts with prediction intervals # out$mean <- out$lower <- out$upper <- vector("list",object$K) for (i in seq_along(level)) { pr <- predict(object, n.ahead = h, ci = level[i] / 100, ...) for (j in 1:object$K) { out$forecast[[j]]$lower <- cbind(out$forecast[[j]]$lower, pr$fcst[[j]][, "lower"]) out$forecast[[j]]$upper <- cbind(out$forecast[[j]]$upper, pr$fcst[[j]][, "upper"]) } } j <- 1 for (fcast in out$forecast) { fcast$mean <- ts(pr$fcst[[j]][, "fcst"], frequency = tspx[3], start = tspx[2] + 1 / tspx[3]) fcast$lower <- ts(fcast$lower, frequency = tspx[3], start = tspx[2] + 1 / tspx[3]) fcast$upper <- ts(fcast$upper, frequency = tspx[3], start = tspx[2] + 1 / tspx[3]) colnames(fcast$lower) <- colnames(fcast$upper) <- paste0(level, "%") fcast$residuals <- fcast$fitted <- ts(rep(NA, nrow(object$y))) fcast$residuals[((1 - nrow(vres)):0) + length(fcast$residuals)] <- vres[, j] fcast$fitted[((1 - nrow(vfits)):0) + length(fcast$fitted)] <- vfits[, j] fcast$method <- method fcast$level <- level fcast$x <- object$y[, j] fcast$series <- colnames(object$y)[j] tsp(fcast$residuals) <- tsp(fcast$fitted) <- tspx fcast <- structure(fcast, class = "forecast") out$forecast[[j]] <- fcast j <- j + 1 } names(out$forecast) <- names(pr$fcst) out$method <- rep(method, object$K) names(out$forecast) <- names(out$method) <- names(pr$fcst) return(structure(out, class = "mforecast")) } forecast/R/armaroots.R0000644000176200001440000001104613553673010014420 0ustar liggesusers# Functions to plot the roots of an ARIMA model # Compute AR roots arroots <- function(object) { if (!any(is.element(class(object), c("Arima", "ar")))) { stop("object must be of class Arima or ar") } if (is.element("Arima", class(object))) { parvec <- object$model$phi } else { parvec <- object$ar } if (length(parvec) > 0) { last.nonzero <- max(which(abs(parvec) > 1e-08)) if (last.nonzero > 0) { return(structure(list( roots = polyroot(c(1, -parvec[1:last.nonzero])), type = "AR" ), class = "armaroots")) } } return(structure(list(roots = numeric(0), type = "AR"), class = "armaroots")) } # Compute MA roots maroots <- function(object) { if (!is.element("Arima", class(object))) { stop("object must be of class Arima") } parvec <- object$model$theta if (length(parvec) > 0) { last.nonzero <- max(which(abs(parvec) > 1e-08)) if (last.nonzero > 0) { return(structure(list( roots = polyroot(c(1, parvec[1:last.nonzero])), type = "MA" ), class = "armaroots")) } } return(structure(list(roots = numeric(0), type = "MA"), class = "armaroots")) } plot.armaroots <- function(x, xlab, ylab, main, ...) { if (missing(main)) { main <- paste("Inverse", x$type, "roots") } oldpar <- par(pty = "s") on.exit(par(oldpar)) plot( c(-1, 1), c(-1, 1), xlab = xlab, ylab = ylab, type = "n", bty = "n", xaxt = "n", yaxt = "n", main = main, ... ) axis(1, at = c(-1, 0, 1), line = 0.5, tck = -0.025) axis(2, at = c(-1, 0, 1), labels = c("-i", "0", "i"), line = 0.5, tck = -0.025) circx <- seq(-1, 1, l = 501) circy <- sqrt(1 - circx ^ 2) lines(c(circx, circx), c(circy, -circy), col = "gray") lines(c(-2, 2), c(0, 0), col = "gray") lines(c(0, 0), c(-2, 2), col = "gray") if (length(x$roots) > 0) { inside <- abs(x$roots) > 1 points(1 / x$roots[inside], pch = 19, col = "black") if (sum(!inside) > 0) { points(1 / x$roots[!inside], pch = 19, col = "red") } } } #' Plot characteristic roots from ARIMA model #' #' Produces a plot of the inverse AR and MA roots of an ARIMA model. Inverse #' roots outside the unit circle are shown in red. #' #' \code{autoplot} will produce an equivalent plot as a ggplot object. #' #' @param x Object of class \dQuote{Arima} or \dQuote{ar}. #' @param object Object of class \dQuote{Arima} or \dQuote{ar}. Used for ggplot #' graphics (S3 method consistency). #' @param type Determines if both AR and MA roots are plotted, of if just one #' set is plotted. #' @param main Main title. Default is "Inverse AR roots" or "Inverse MA roots". #' @param xlab X-axis label. #' @param ylab Y-axis label. #' @param ... Other plotting parameters passed to \code{\link[graphics]{par}}. #' @return None. Function produces a plot #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link{Arima}}, \code{\link[stats]{ar}} #' @keywords hplot #' @examples #' #' library(ggplot2) #' #' fit <- Arima(WWWusage, order=c(3,1,0)) #' plot(fit) #' autoplot(fit) #' #' fit <- Arima(woolyrnq,order=c(2,0,0),seasonal=c(2,1,1)) #' plot(fit) #' autoplot(fit) #' #' plot(ar.ols(gold[1:61])) #' autoplot(ar.ols(gold[1:61])) #' #' @export plot.Arima <- function(x, type=c("both", "ar", "ma"), main, xlab="Real", ylab="Imaginary", ...) { type <- match.arg(type) if (!is.element("Arima", class(x))) { stop("This function is for objects of class 'Arima'.") } q <- p <- 0 # AR component if (length(x$model$phi) > 0) { test <- abs(x$model$phi) > 1e-09 if (any(test)) { p <- max(which(test)) } } # MA component if (length(x$model$theta) > 0) { test <- abs(x$model$theta) > 1e-09 if (any(test)) { q <- max(which(test)) } } # Check for MA parts if (type == "both") { if (p == 0) { type <- "ma" } else if (q == 0) { type <- "ar" } } if ((type == "ar" && (p == 0)) || (type == "ma" && (q == 0)) || (p == 0 && q == 0)) { warning("No roots to plot") if(missing(main)){ main <- "No AR or MA roots" } } if (type == "both") { oldpar <- par(mfrow = c(1, 2)) on.exit(par(oldpar)) } if (type != "ma") { plot(arroots(x), main = main, xlab = xlab, ylab = ylab, ...) } if (type != "ar") { plot(maroots(x), main = main, xlab = xlab, ylab = ylab, ...) } } #' @rdname plot.Arima #' @export plot.ar <- function(x, main, xlab="Real", ylab="Imaginary", ...) { if (!is.element("ar", class(x))) { stop("This function is for objects of class 'ar'.") } plot(arroots(x), main = main, xlab = xlab, ylab = ylab, ...) } forecast/R/theta.R0000644000176200001440000001152113556654040013521 0ustar liggesusers# Implement standard Theta method of Assimakopoulos and Nikolopoulos (2000) # More general methods are available in the forecTheta package # Author: RJH #' Theta method forecast #' #' Returns forecasts and prediction intervals for a theta method forecast. #' #' The theta method of Assimakopoulos and Nikolopoulos (2000) is equivalent to #' simple exponential smoothing with drift. This is demonstrated in Hyndman and #' Billah (2003). #' #' The series is tested for seasonality using the test outlined in A&N. If #' deemed seasonal, the series is seasonally adjusted using a classical #' multiplicative decomposition before applying the theta method. The resulting #' forecasts are then reseasonalized. #' #' Prediction intervals are computed using the underlying state space model. #' #' More general theta methods are available in the #' \code{\link[forecTheta]{forecTheta}} package. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting #' @param level Confidence levels for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param x Deprecated. Included for backwards compatibility. #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{rwf}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{arima}}, \code{\link{meanf}}, \code{\link{rwf}}, #' \code{\link{ses}} #' @references Assimakopoulos, V. and Nikolopoulos, K. (2000). The theta model: #' a decomposition approach to forecasting. \emph{International Journal of #' Forecasting} \bold{16}, 521-530. #' #' Hyndman, R.J., and Billah, B. (2003) Unmasking the Theta method. #' \emph{International J. Forecasting}, \bold{19}, 287-290. #' @keywords ts #' @examples #' nile.fcast <- thetaf(Nile) #' plot(nile.fcast) #' #' @export thetaf <- function(y, h=ifelse(frequency(y) > 1, 2 * frequency(y), 10), level=c(80, 95), fan=FALSE, x=y) { # Check inputs if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Check seasonality n <- length(x) x <- as.ts(x) m <- frequency(x) if (m > 1 && !is.constant(x) && n > 2*m) { r <- as.numeric(acf(x, lag.max = m, plot = FALSE)$acf)[-1] stat <- sqrt((1 + 2 * sum(r[-m] ^ 2)) / n) seasonal <- (abs(r[m]) / stat > qnorm(0.95)) } else { seasonal <- FALSE } # Seasonal decomposition origx <- x if (seasonal) { decomp <- decompose(x, type = "multiplicative") if(any(abs(seasonal(decomp)) < 1e-10)) warning("Seasonal indexes equal to zero. Using non-seasonal Theta method") else x <- seasadj(decomp) } # Find theta lines fcast <- ses(x, h = h) tmp2 <- lsfit(0:(n - 1), x)$coef[2] / 2 alpha <- pmax(1e-10,fcast$model$par["alpha"]) fcast$mean <- fcast$mean + tmp2 * (0:(h - 1) + (1 - (1 - alpha) ^ n) / alpha) # Reseasonalize if (seasonal) { fcast$mean <- fcast$mean * rep(tail(decomp$seasonal, m), trunc(1 + h / m))[1:h] fcast$fitted <- fcast$fitted * decomp$seasonal } fcast$residuals <- origx-fcast$fitted # Find prediction intervals fcast.se <- sqrt(fcast$model$sigma) * sqrt((0:(h - 1)) * alpha ^ 2 + 1) nconf <- length(level) fcast$lower <- fcast$upper <- ts(matrix(NA, nrow = h, ncol = nconf)) tsp(fcast$lower) <- tsp(fcast$upper) <- tsp(fcast$mean) for (i in 1:nconf) { zt <- -qnorm(0.5 - level[i] / 200) fcast$lower[, i] <- fcast$mean - zt * fcast.se fcast$upper[, i] <- fcast$mean + zt * fcast.se } # Return results fcast$x <- origx fcast$level <- level fcast$method <- "Theta" fcast$model <- list(alpha = alpha, drift = tmp2, sigma = fcast$model$sigma) fcast$model$call <- match.call() return(fcast) } forecast/R/newarima2.R0000644000176200001440000007754213556654040014320 0ustar liggesusers#' Fit best ARIMA model to univariate time series #' #' Returns best ARIMA model according to either AIC, AICc or BIC value. The #' function conducts a search over possible model within the order constraints #' provided. #' #' The default arguments are designed for rapid estimation of models for many time series. #' If you are analysing just one time series, and can afford to take some more time, it #' is recommended that you set \code{stepwise=FALSE} and \code{approximation=FALSE}. #' #' Non-stepwise selection can be slow, especially for seasonal data. The stepwise #' algorithm outlined in Hyndman & Khandakar (2008) is used except that the default #' method for selecting seasonal differences is now based on an estimate of seasonal #' strength (Wang, Smith & Hyndman, 2006) rather than the Canova-Hansen test. #' There are also some other minor variations to the algorithm described in #' Hyndman and Khandakar (2008). #' #' @inheritParams stats::arima #' @param y a univariate time series #' @param d Order of first-differencing. If missing, will choose a value based #' on \code{test}. #' @param D Order of seasonal-differencing. If missing, will choose a value #' based on \code{season.test}. #' @param max.p Maximum value of p #' @param max.q Maximum value of q #' @param max.P Maximum value of P #' @param max.Q Maximum value of Q #' @param max.order Maximum value of p+q+P+Q if model selection is not #' stepwise. #' @param max.d Maximum number of non-seasonal differences #' @param max.D Maximum number of seasonal differences #' @param start.p Starting value of p in stepwise procedure. #' @param start.q Starting value of q in stepwise procedure. #' @param start.P Starting value of P in stepwise procedure. #' @param start.Q Starting value of Q in stepwise procedure. #' @param stationary If \code{TRUE}, restricts search to stationary models. #' @param seasonal If \code{FALSE}, restricts search to non-seasonal models. #' @param ic Information criterion to be used in model selection. #' @param stepwise If \code{TRUE}, will do stepwise selection (faster). #' Otherwise, it searches over all models. Non-stepwise selection can be very #' slow, especially for seasonal models. #' @param nmodels Maximum number of models considered in the stepwise search. #' @param trace If \code{TRUE}, the list of ARIMA models considered will be #' reported. #' @param approximation If \code{TRUE}, estimation is via conditional sums of #' squares and the information criteria used for model selection are #' approximated. The final model is still computed using maximum likelihood #' estimation. Approximation should be used for long time series or a high #' seasonal period to avoid excessive computation times. #' @param truncate An integer value indicating how many observations to use in #' model selection. The last \code{truncate} values of the series are used to #' select a model when \code{truncate} is not \code{NULL} and #' \code{approximation=TRUE}. All observations are used if either #' \code{truncate=NULL} or \code{approximation=FALSE}. #' @param xreg Optionally, a numerical vector or matrix of external regressors, which #' must have the same number of rows as \code{y}. (It should not be a data frame.) #' @param test Type of unit root test to use. See \code{\link{ndiffs}} for #' details. #' @param test.args Additional arguments to be passed to the unit root test. #' @param seasonal.test This determines which method is used to select the number of seasonal differences. #' The default method is to use a measure of seasonal strength computed from an STL decomposition. #' Other possibilities involve seasonal unit root tests. #' @param seasonal.test.args Additional arguments to be passed to the seasonal #' unit root test. #' See \code{\link{nsdiffs}} for details. #' @param allowdrift If \code{TRUE}, models with drift terms are considered. #' @param allowmean If \code{TRUE}, models with a non-zero mean are considered. #' @param parallel If \code{TRUE} and \code{stepwise = FALSE}, then the #' specification search is done in parallel. This can give a significant #' speedup on mutlicore machines. #' @param num.cores Allows the user to specify the amount of parallel processes #' to be used if \code{parallel = TRUE} and \code{stepwise = FALSE}. If #' \code{NULL}, then the number of logical cores is automatically detected and #' all available cores are used. #' @param x Deprecated. Included for backwards compatibility. #' @param ... Additional arguments to be passed to \code{\link[stats]{arima}}. #' @inheritParams forecast #' #' @return Same as for \code{\link{Arima}} #' @author Rob J Hyndman #' @seealso \code{\link{Arima}} #' @references Hyndman, RJ and Khandakar, Y (2008) "Automatic time series #' forecasting: The forecast package for R", \emph{Journal of Statistical #' Software}, \bold{26}(3). #' #' Wang, X, Smith, KA, Hyndman, RJ (2006) "Characteristic-based clustering #' for time series data", \emph{Data Mining and Knowledge Discovery}, #' \bold{13}(3), 335-364. #' @keywords ts #' @examples #' fit <- auto.arima(WWWusage) #' plot(forecast(fit,h=20)) #' #' @export auto.arima <- function(y, d=NA, D=NA, max.p=5, max.q=5, max.P=2, max.Q=2, max.order=5, max.d=2, max.D=1, start.p=2, start.q=2, start.P=1, start.Q=1, stationary=FALSE, seasonal=TRUE, ic=c("aicc", "aic", "bic"), stepwise=TRUE, nmodels = 94, trace=FALSE, approximation=(length(x) > 150 | frequency(x) > 12), method = NULL, truncate=NULL, xreg=NULL, test=c("kpss", "adf", "pp"), test.args = list(), seasonal.test=c("seas", "ocsb", "hegy", "ch"), seasonal.test.args = list(), allowdrift=TRUE, allowmean=TRUE, lambda=NULL, biasadj=FALSE, parallel=FALSE, num.cores=2, x=y, ...) { # Only non-stepwise parallel implemented so far. if (stepwise && parallel) { warning("Parallel computer is only implemented when stepwise=FALSE, the model will be fit in serial.") parallel <- FALSE } if (trace && parallel) { message("Tracing model searching in parallel is not supported.") trace <- FALSE } series <- deparse(substitute(y)) x <- as.ts(x) if (NCOL(x) > 1) { stop("auto.arima can only handle univariate time series") } # Trim leading NAs and find length of non-missing data orig.x <- x missing <- is.na(x) firstnonmiss <- head(which(!missing),1) lastnonmiss <- tail(which(!missing),1) serieslength <- sum(!missing[firstnonmiss:lastnonmiss]) # Trim initial missing values x <- subset(x, start=firstnonmiss) # Check for constant data if (is.constant(x)) { if(all(is.na(x))) stop("All data are missing") if (allowmean) { fit <- Arima(x, order = c(0, 0, 0), fixed = mean(x, na.rm = TRUE), ...) } else { fit <- Arima(x, order = c(0, 0, 0), include.mean = FALSE, ...) } fit$x <- orig.x fit$series <- series fit$call <- match.call() fit$call$x <- data.frame(x = x) fit$constant <- TRUE return(fit) } ic <- match.arg(ic) test <- match.arg(test) seasonal.test <- match.arg(seasonal.test) # Only consider non-seasonal models if (seasonal) { m <- frequency(x) } else { m <- 1 } if (m < 1) { # warning("I can't handle data with frequency less than 1. Seasonality will be ignored.") m <- 1 } else { m <- round(m) } # Avoid non-integer seasonal periods max.p <- min(max.p, floor(serieslength / 3)) max.q <- min(max.q, floor(serieslength / 3)) max.P <- min(max.P, floor(serieslength / 3 / m)) max.Q <- min(max.Q, floor(serieslength / 3 / m)) # Use AIC if npar <= 3 # AICc won't work for tiny samples. if (serieslength <= 3L) { ic <- "aic" } # Transform data if requested if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") attr(lambda, "biasadj") <- biasadj } # Check xreg and do regression if necessary if (!is.null(xreg)) { if(!is.numeric(xreg)) stop("xreg should be a numeric matrix or a numeric vector") xreg <- as.matrix(xreg) if (is.null(colnames(xreg))) { colnames(xreg) <- if (ncol(xreg) == 1) "xreg" else paste("xreg", 1:ncol(xreg), sep = "") } xregg <- xreg xx <- x # Check that xreg is not rank deficient # First check if any columns are constant constant_columns <- apply(xregg, 2, is.constant) if (all(constant_columns)) { xregg <- NULL } else{ if (any(constant_columns)) { xregg <- xregg[, -which(constant_columns), drop = FALSE] } # Now check if it is rank deficient sv <- svd(na.omit(cbind(rep(1, NROW(xregg)), xregg)))$d if (min(sv) / sum(sv) < .Machine$double.eps) { stop("xreg is rank deficient") } # Finally find residuals from regression in order # to estimate appropriate level of differencing j <- !is.na(x) & !is.na(rowSums(xregg)) xx[j] <- residuals(lm(x ~ xregg)) } } else { xx <- x xregg <- NULL } # Choose order of differencing if (stationary) { d <- D <- 0 } if (m == 1) { D <- max.P <- max.Q <- 0 } else if(is.na(D)) { D <- do.call("nsdiffs", c(list(xx, test=seasonal.test, max.D=max.D), seasonal.test.args)) # Make sure xreg is not null after differencing if (D > 0 && !is.null(xregg)) { diffxreg <- diff(xregg, differences = D, lag = m) if (any(apply(diffxreg, 2, is.constant))) { D <- D - 1 } } # Make sure xx is not all missing after differencing if (D > 0) { dx <- diff(xx, differences = D, lag = m) if (all(is.na(dx))) D <- D - 1 } } if (D > 0) { dx <- diff(xx, differences = D, lag = m) } else { dx <- xx } if (!is.null(xregg)) { if (D > 0) { diffxreg <- diff(xregg, differences = D, lag = m) } else { diffxreg <- xregg } } if (is.na(d)) { d <- do.call("ndiffs", c(list(dx, test = test, max.d = max.d), test.args)) # Make sure xreg is not null after differencing if (d > 0 && !is.null(xregg)) { diffxreg <- diff(diffxreg, differences = d, lag = 1) if (any(apply(diffxreg, 2, is.constant))) { d <- d - 1 } } # Make sure dx is not all missing after differencing if (d > 0) { diffdx <- diff(dx, differences=d, lag=1) if(all(is.na(diffdx))) d <- d - 1 } } # Check number of differences selected if (D >= 2) { warning("Having more than one seasonal differences is not recommended. Please consider using only one seasonal difference.") } else if (D + d > 2) { warning("Having 3 or more differencing operations is not recommended. Please consider reducing the total number of differences.") } if (d > 0) { dx <- diff(dx, differences = d, lag = 1) } if(length(dx) == 0L) stop("Not enough data to proceed") else if (is.constant(dx)) { if (is.null(xreg)) { if (D > 0 && d == 0) { fit <- Arima(x, order = c(0, d, 0), seasonal = list(order = c(0, D, 0), period = m), include.constant = TRUE, fixed = mean(dx/m, na.rm = TRUE), method = method, ...) } else if (D > 0 && d > 0) { fit <- Arima(x, order = c(0, d, 0), seasonal = list(order = c(0, D, 0), period = m), method = method, ...) } else if (d == 2) { fit <- Arima(x, order = c(0, d, 0), method = method, ...) } else if (d < 2) { fit <- Arima(x, order = c(0, d, 0), include.constant = TRUE, fixed = mean(dx, na.rm = TRUE), method = method, ...) } else { stop("Data follow a simple polynomial and are not suitable for ARIMA modelling.") } } else # Perfect regression { if (D > 0) { fit <- Arima(x, order = c(0, d, 0), seasonal = list(order = c(0, D, 0), period = m), xreg = xreg, method = method, ...) } else { fit <- Arima(x, order = c(0, d, 0), xreg = xreg, method = method, ...) } } fit$x <- orig.x fit$series <- series fit$call <- match.call() fit$call$x <- data.frame(x = x) return(fit) } if (m > 1) { if (max.P > 0) { max.p <- min(max.p, m - 1) } if (max.Q > 0) { max.q <- min(max.q, m - 1) } } # Find constant offset for AIC calculation using white noise model if (approximation) { if (!is.null(truncate)) { tspx <- tsp(x) if (length(x) > truncate) { x <- ts(tail(x, truncate), end = tspx[2], frequency = tspx[3]) } } if (D == 0) { fit <- try(stats::arima(x, order = c(0, d, 0), xreg = xreg, ...), silent = TRUE) } else { fit <- try(stats::arima( x, order = c(0, d, 0), seasonal = list(order = c(0, D, 0), period = m), xreg = xreg, ... ), silent = TRUE) } if (!is.element("try-error", class(fit))) { offset <- -2 * fit$loglik - serieslength * log(fit$sigma2) } else # Not sure this should ever happen { # warning("Unable to calculate AIC offset") offset <- 0 } } else { offset <- 0 } allowdrift <- allowdrift & (d + D) == 1 allowmean <- allowmean & (d + D) == 0 constant <- allowdrift | allowmean if (approximation && trace) { cat("\n Fitting models using approximations to speed things up...\n") } if (!stepwise) { bestfit <- search.arima( x, d, D, max.p, max.q, max.P, max.Q, max.order, stationary, ic, trace, approximation, method = method, xreg = xreg, offset = offset, allowdrift = allowdrift, allowmean = allowmean, parallel = parallel, num.cores = num.cores, ... ) bestfit$call <- match.call() bestfit$call$x <- data.frame(x = x) bestfit$lambda <- lambda bestfit$x <- orig.x bestfit$series <- series bestfit$fitted <- fitted.Arima(bestfit) if (trace) { cat("\n\n Best model:", arima.string(bestfit, padding = TRUE), "\n\n") } return(bestfit) } # Starting model if (length(x) < 10L) { start.p <- min(start.p, 1L) start.q <- min(start.q, 1L) start.P <- 0L start.Q <- 0L } p <- start.p <- min(start.p, max.p) q <- start.q <- min(start.q, max.q) P <- start.P <- min(start.P, max.P) Q <- start.Q <- min(start.Q, max.Q) results <- matrix(NA, nrow = nmodels, ncol = 8) bestfit <- myarima(x, order = c(p, d, q), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[1, ] <- c(p, d, q, P, D, Q, constant, bestfit$ic) # Null model with possible constant fit <- myarima(x, order = c(0, d, 0), seasonal = c(0, D, 0), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[2, ] <- c(0, d, 0, 0, D, 0, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- q <- P <- Q <- 0 } k <- 2 # Basic AR model if (max.p > 0 || max.P > 0) { fit <- myarima(x, order = c(max.p > 0, d, 0), seasonal = c((m > 1) & (max.P > 0), D, 0), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k+1, ] <- c(max.p > 0, d, 0, (m > 1) & (max.P > 0), D, 0, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- (max.p > 0) P <- (m > 1) & (max.P > 0) q <- Q <- 0 } k <- k + 1 } # Basic MA model if (max.q > 0 || max.Q > 0) { fit <- myarima(x, order = c(0, d, max.q > 0), seasonal = c(0, D, (m > 1) & (max.Q > 0)), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k+1, ] <- c(0, d, max.q > 0, 0, D, (m > 1) & (max.Q > 0), constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- P <- 0 Q <- (m > 1) & (max.Q > 0) q <- (max.q > 0) } k <- k + 1 } # Null model with no constant if (constant) { fit <- myarima(x, order = c(0, d, 0), seasonal = c(0, D, 0), constant = FALSE, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k+1, ] <- c(0, d, 0, 0, D, 0, 0, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- q <- P <- Q <- 0 } k <- k + 1 } startk <- 0 while (startk < k && k < nmodels) { startk <- k if (P > 0 && newmodel(p, d, q, P - 1, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P - 1, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P - 1, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit P <- (P - 1) next } } if (Q > 0 && newmodel(p, d, q, P, D, Q - 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P, D, Q - 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P, D, Q - 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q - 1) next } } if (P < max.P && newmodel(p, d, q, P + 1, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P + 1, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P + 1, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit P <- (P + 1) next } } if (Q < max.Q && newmodel(p, d, q, P, D, Q + 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P, D, Q + 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P, D, Q + 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q + 1) next } } if (Q > 0 && P > 0 && newmodel(p, d, q, P - 1, D, Q - 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P - 1, D, Q - 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P - 1, D, Q - 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q - 1) P <- (P - 1) next } } if (Q < max.Q && P > 0 && newmodel(p, d, q, P - 1, D, Q + 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P - 1, D, Q + 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P - 1, D, Q + 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q + 1) P <- (P - 1) next } } if (Q > 0 && P < max.P && newmodel(p, d, q, P + 1, D, Q - 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P + 1, D, Q - 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P + 1, D, Q - 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q - 1) P <- (P + 1) next } } if (Q < max.Q && P < max.P && newmodel(p, d, q, P + 1, D, Q + 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P + 1, D, Q + 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P + 1, D, Q + 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q + 1) P <- (P + 1) next } } if (p > 0 && newmodel(p - 1, d, q, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p - 1, d, q), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p - 1, d, q, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- (p - 1) next } } if (q > 0 && newmodel(p, d, q - 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q - 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q - 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q - 1) next } } if (p < max.p && newmodel(p + 1, d, q, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p + 1, d, q), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p + 1, d, q, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- (p + 1) next } } if (q < max.q && newmodel(p, d, q + 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q + 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q + 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q + 1) next } } if (q > 0 && p > 0 && newmodel(p - 1, d, q - 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p - 1, d, q - 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p - 1, d, q - 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q - 1) p <- (p - 1) next } } if (q < max.q && p > 0 && newmodel(p - 1, d, q + 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p - 1, d, q + 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p - 1, d, q + 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q + 1) p <- (p - 1) next } } if (q > 0 && p < max.p && newmodel(p + 1, d, q - 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p + 1, d, q - 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p + 1, d, q - 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q - 1) p <- (p + 1) next } } if (q < max.q && p < max.p && newmodel(p + 1, d, q + 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p + 1, d, q + 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p + 1, d, q + 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q + 1) p <- (p + 1) next } } if (allowdrift || allowmean) { if (newmodel(p, d, q, P, D, Q, !constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P, D, Q), constant = !constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P, D, Q, !constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit constant <- !constant } } } } if(k > nmodels){ warning(sprintf("Stepwise search was stopped early due to reaching the model number limit: `nmodels = %i`", nmodels)) } # Refit using ML if approximation used for IC if (approximation && !is.null(bestfit$arma)) { if (trace) { cat("\n\n Now re-fitting the best model(s) without approximations...\n") } icorder <- order(results[, 8]) nmodels <- sum(!is.na(results[, 8])) for (i in seq(nmodels)) { k <- icorder[i] fit <- myarima( x, order = c(results[k, 1], d, results[k, 3]), seasonal = c(results[k, 4], D, results[k, 6]), constant = results[k, 7] == 1, ic, trace, approximation = FALSE, method = method, xreg = xreg, ... ) if (fit$ic < Inf) { bestfit <- fit break } } } # Nothing fitted if (bestfit$ic == Inf && !isTRUE(method=="CSS")) { if (trace) { cat("\n") } stop("No suitable ARIMA model found") } # Return best fit bestfit$x <- orig.x bestfit$series <- series bestfit$ic <- NULL bestfit$call <- match.call() bestfit$call$x <- data.frame(x = x) bestfit$lambda <- lambda bestfit$fitted <- fitted.Arima(bestfit) if (trace) { cat("\n\n Best model:", arima.string(bestfit, padding = TRUE), "\n\n") } return(bestfit) } # Calls arima from stats package and adds data to the returned object # Also allows refitting to new data # and drift terms to be included. myarima <- function(x, order = c(0, 0, 0), seasonal = c(0, 0, 0), constant=TRUE, ic="aic", trace=FALSE, approximation=FALSE, offset=0, xreg=NULL, method = NULL, ...) { # Length of non-missing interior missing <- is.na(x) firstnonmiss <- head(which(!missing),1) lastnonmiss <- tail(which(!missing),1) n <- sum(!missing[firstnonmiss:lastnonmiss]) m <- frequency(x) use.season <- (sum(seasonal) > 0) & m > 0 diffs <- order[2] + seasonal[2] if(is.null(method)){ if (approximation) { method <- "CSS" } else { method <- "CSS-ML" } } if (diffs == 1 && constant) { xreg <- `colnames<-`(cbind(drift = 1:length(x), xreg), make.unique(c("drift", if(is.null(colnames(xreg)) && !is.null(xreg)) rep("", NCOL(xreg)) else colnames(xreg)))) if (use.season) { suppressWarnings(fit <- try(stats::arima(x = x, order = order, seasonal = list(order = seasonal, period = m), xreg = xreg, method = method, ...), silent = TRUE)) } else { suppressWarnings(fit <- try(stats::arima(x = x, order = order, xreg = xreg, method = method, ...), silent = TRUE)) } } else { if (use.season) { suppressWarnings(fit <- try(stats::arima(x = x, order = order, seasonal = list(order = seasonal, period = m), include.mean = constant, method = method, xreg = xreg, ...), silent = TRUE)) } else { suppressWarnings(fit <- try(stats::arima(x = x, order = order, include.mean = constant, method = method, xreg = xreg, ...), silent = TRUE)) } } if (is.null(xreg)) { nxreg <- 0 } else { nxreg <- ncol(as.matrix(xreg)) } if (!is.element("try-error", class(fit))) { nstar <- n - order[2] - seasonal[2] * m if (diffs == 1 && constant) { # fitnames <- names(fit$coef) # fitnames[length(fitnames)-nxreg] <- "drift" # names(fit$coef) <- fitnames fit$xreg <- xreg } npar <- length(fit$coef[fit$mask]) + 1 if (method == "CSS") { fit$aic <- offset + nstar * log(fit$sigma2) + 2 * npar } if (!is.na(fit$aic)) { fit$bic <- fit$aic + npar * (log(nstar) - 2) fit$aicc <- fit$aic + 2 * npar * (npar + 1) / (nstar - npar - 1) fit$ic <- switch(ic, bic = fit$bic, aic = fit$aic, aicc = fit$aicc) } else { fit$aic <- fit$bic <- fit$aicc <- fit$ic <- Inf } # Adjust residual variance to be unbiased fit$sigma2 <- sum(fit$residuals ^ 2, na.rm = TRUE) / (nstar - npar + 1) # Check for unit roots minroot <- 2 if (order[1] + seasonal[1] > 0) { testvec <- fit$model$phi k <- abs(testvec) > 1e-8 if (sum(k) > 0) { last.nonzero <- max(which(k)) } else { last.nonzero <- 0 } if (last.nonzero > 0) { testvec <- testvec[1:last.nonzero] proots <- try(polyroot(c(1,-testvec))) if (!is.element("try-error", class(proots))) { minroot <- min(minroot, abs(proots)) } else fit$ic <- Inf } } if (order[3] + seasonal[3] > 0 & fit$ic < Inf) { testvec <- fit$model$theta k <- abs(testvec) > 1e-8 if (sum(k) > 0) { last.nonzero <- max(which(k)) } else { last.nonzero <- 0 } if (last.nonzero > 0) { testvec <- testvec[1:last.nonzero] proots <- try(polyroot(c(1,testvec))) if (!is.element("try-error", class(proots))) { minroot <- min(minroot, abs(proots)) } else fit$ic <- Inf } } if (minroot < 1 + 1e-2) { # Previously 1+1e-3 fit$ic <- Inf } # Don't like this model if (trace) { cat("\n", arima.string(fit, padding = TRUE), ":", fit$ic) } fit$xreg <- xreg return(structure(fit, class = c("forecast_ARIMA", "ARIMA", "Arima"))) } else { # Catch errors due to unused arguments if (length(grep("unused argument", fit)) > 0L) { stop(fit[1]) } if (trace) { cat("\n ARIMA(", order[1], ",", order[2], ",", order[3], ")", sep = "") if (use.season) { cat("(", seasonal[1], ",", seasonal[2], ",", seasonal[3], ")[", m, "]", sep = "") } if (constant && (order[2] + seasonal[2] == 0)) { cat(" with non-zero mean") } else if (constant && (order[2] + seasonal[2] == 1)) { cat(" with drift ") } else if (!constant && (order[2] + seasonal[2] == 0)) { cat(" with zero mean ") } else { cat(" ") } cat(" :", Inf) } return(list(ic = Inf)) } } newmodel <- function(p, d, q, P, D, Q, constant, results) { n <- nrow(results) for (i in 1:n) { if(!all(is.na(results[i, seq(7)]))) { if (all(c(p, d, q, P, D, Q, constant) == results[i, 1:7])) { return(FALSE) } } } return(TRUE) } arima.string <- function(object, padding=FALSE) { order <- object$arma[c(1, 6, 2, 3, 7, 4, 5)] m <- order[7] result <- paste("ARIMA(", order[1], ",", order[2], ",", order[3], ")", sep = "") if (m > 1 && sum(order[4:6]) > 0) { result <- paste(result, "(", order[4], ",", order[5], ",", order[6], ")[", m, "]", sep = "") } if (padding && m > 1 && sum(order[4:6]) == 0) { result <- paste(result, " ", sep = "") if (m <= 9) { result <- paste(result, " ", sep = "") } else if (m <= 99) { result <- paste(result, " ", sep = "") } else { result <- paste(result, " ", sep = "") } } if (!is.null(object$xreg)) { if (NCOL(object$xreg) == 1 && is.element("drift", names(object$coef))) { result <- paste(result, "with drift ") } else { result <- paste("Regression with", result, "errors") } } else { if (is.element("constant", names(object$coef)) || is.element("intercept", names(object$coef))) { result <- paste(result, "with non-zero mean") } else if (order[2] == 0 && order[5] == 0) { result <- paste(result, "with zero mean ") } else { result <- paste(result, " ") } } if (!padding) { # Strip trailing spaces result <- gsub("[ ]*$", "", result) } return(result) } #' @export summary.Arima <- function(object, ...) { print(object) cat("\nTraining set error measures:\n") print(accuracy(object)) } # Check that Arima object has positive coefficient variances without returning warnings checkarima <- function(object) { suppressWarnings(test <- any(is.nan(sqrt(diag(object$var.coef))))) return(test) } #' Is an object constant? #' #' Returns true if the object's numerical values do not vary. #' #' #' @param x object to be tested #' @export is.constant <- function(x) { x <- as.numeric(x) y <- rep(x[1], length(x)) return(isTRUE(all.equal(x, y))) } forecast/R/arima.R0000644000176200001440000010153513566112560013507 0ustar liggesuserssearch.arima <- function(x, d=NA, D=NA, max.p=5, max.q=5, max.P=2, max.Q=2, max.order=5, stationary=FALSE, ic=c("aic", "aicc", "bic"), trace=FALSE, approximation=FALSE, xreg=NULL, offset=offset, allowdrift=TRUE, allowmean=TRUE, parallel=FALSE, num.cores=2, ...) { # dataname <- substitute(x) ic <- match.arg(ic) m <- frequency(x) allowdrift <- allowdrift & (d + D) == 1 allowmean <- allowmean & (d + D) == 0 maxK <- (allowdrift | allowmean) # Choose model orders # Serial - technically could be combined with the code below if (parallel == FALSE) { best.ic <- Inf for (i in 0:max.p) { for (j in 0:max.q) { for (I in 0:max.P) { for (J in 0:max.Q) { if (i + j + I + J <= max.order) { for (K in 0:maxK) { fit <- myarima( x, order = c(i, d, j), seasonal = c(I, D, J), constant = (K == 1), trace = trace, ic = ic, approximation = approximation, offset = offset, xreg = xreg, ... ) if (fit$ic < best.ic) { best.ic <- fit$ic bestfit <- fit constant <- (K == 1) } } } } } } } } else if (parallel == TRUE) { to.check <- WhichModels(max.p, max.q, max.P, max.Q, maxK) par.all.arima <- function(l) { .tmp <- UndoWhichModels(l) i <- .tmp[1] j <- .tmp[2] I <- .tmp[3] J <- .tmp[4] K <- .tmp[5] == 1 if (i + j + I + J <= max.order) { fit <- myarima( x, order = c(i, d, j), seasonal = c(I, D, J), constant = (K == 1), trace = trace, ic = ic, approximation = approximation, offset = offset, xreg = xreg, ... ) } if (exists("fit")) { return(cbind(fit, K)) } else { return(NULL) } } if (is.null(num.cores)) { num.cores <- detectCores() } cl <- makeCluster(num.cores) all.models <- parLapply(cl = cl, X = to.check, fun = par.all.arima) stopCluster(cl = cl) # Removing null elements all.models <- all.models[!sapply(all.models, is.null)] # Choosing best model best.ic <- Inf for (i in 1:length(all.models)) { if (!is.null(all.models[[i]][, 1]$ic) && all.models[[i]][, 1]$ic < best.ic) { bestfit <- all.models[[i]][, 1] best.ic <- bestfit$ic constant <- unlist(all.models[[i]][1, 2]) } } class(bestfit) <- c("forecast_ARIMA", "ARIMA", "Arima") } if (exists("bestfit")) { # Refit using ML if approximation used for IC if (approximation) { if (trace) { cat("\n\n Now re-fitting the best model(s) without approximations...\n") } # constant <- length(bestfit$coef) - ncol(xreg) > sum(bestfit$arma[1:4]) newbestfit <- myarima( x, order = bestfit$arma[c(1, 6, 2)], seasonal = bestfit$arma[c(3, 7, 4)], constant = constant, ic, trace = FALSE, approximation = FALSE, xreg = xreg, ... ) if (newbestfit$ic == Inf) { # Final model is lousy. Better try again without approximation # warning("Unable to fit final model using maximum likelihood. AIC value approximated") bestfit <- search.arima( x, d = d, D = D, max.p = max.p, max.q = max.q, max.P = max.P, max.Q = max.Q, max.order = max.order, stationary = stationary, ic = ic, trace = trace, approximation = FALSE, xreg = xreg, offset = offset, allowdrift = allowdrift, allowmean = allowmean, parallel = parallel, num.cores = num.cores, ... ) bestfit$ic <- switch(ic, bic = bestfit$bic, aic = bestfit$aic, aicc = bestfit$aicc) } else { bestfit <- newbestfit } } } else { stop("No ARIMA model able to be estimated") } bestfit$x <- x bestfit$series <- deparse(substitute(x)) bestfit$ic <- NULL bestfit$call <- match.call() if (trace) { cat("\n\n") } return(bestfit) } # Set up seasonal dummies using Fourier series SeasDummy <- function(x) { n <- length(x) m <- frequency(x) if (m == 1) { stop("Non-seasonal data") } tt <- 1:n fmat <- matrix(NA, nrow = n, ncol = 2 * m) for (i in 1:m) { fmat[, 2 * i] <- sin(2 * pi * i * tt / m) fmat[, 2 * (i - 1) + 1] <- cos(2 * pi * i * tt / m) } return(fmat[, 1:(m - 1)]) } # CANOVA-HANSEN TEST # Largely based on uroot package code for CH.test() SD.test <- function(wts, s=frequency(wts)) { if (any(is.na(wts))) { stop("Series contains missing values. Please choose order of seasonal differencing manually.") } if (s == 1) { stop("Not seasonal data") } t0 <- start(wts) N <- length(wts) if (N <= s) { stop("Insufficient data") } frec <- rep(1, as.integer((s + 1) / 2)) ltrunc <- round(s * (N / 100) ^ 0.25) R1 <- as.matrix(SeasDummy(wts)) lmch <- lm(wts ~ R1, na.action = na.exclude) # run the regression : y(i)=mu+f(i)'gamma(i)+e(i) Fhat <- Fhataux <- matrix(nrow = N, ncol = s - 1) for (i in 1:(s - 1)) Fhataux[, i] <- R1[, i] * residuals(lmch) for (i in 1:N) { for (n in 1:(s - 1)) Fhat[i, n] <- sum(Fhataux[1:i, n]) } wnw <- 1 - seq(1, ltrunc, 1) / (ltrunc + 1) Ne <- nrow(Fhataux) Omnw <- 0 for (k in 1:ltrunc) Omnw <- Omnw + (t(Fhataux)[, (k + 1):Ne] %*% Fhataux[1:(Ne - k), ]) * wnw[k] Omfhat <- (crossprod(Fhataux) + Omnw + t(Omnw)) / Ne sq <- seq(1, s - 1, 2) frecob <- rep(0, s - 1) for (i in 1:length(frec)) { if (frec[i] == 1 && i == as.integer(s / 2)) { frecob[sq[i]] <- 1 } if (frec[i] == 1 && i < as.integer(s / 2)) { frecob[sq[i]] <- frecob[sq[i] + 1] <- 1 } } a <- length(which(frecob == 1)) A <- matrix(0, nrow = s - 1, ncol = a) j <- 1 for (i in 1:(s - 1)) { if (frecob[i] == 1) { A[i, j] <- 1 ifelse(frecob[i] == 1, j <- j + 1, j <- j) } } tmp <- t(A) %*% Omfhat %*% A problems <- (min(svd(tmp)$d) < .Machine$double.eps) if (problems) { stL <- 0 } else { stL <- (1 / N ^ 2) * sum(diag(solve(tmp, tol = 1e-25) %*% t(A) %*% t(Fhat) %*% Fhat %*% A)) } return(stL) } #' Forecasting using ARIMA or ARFIMA models #' #' Returns forecasts and other information for univariate ARIMA models. #' #' For \code{Arima} or \code{ar} objects, the function calls #' \code{\link[stats]{predict.Arima}} or \code{\link[stats]{predict.ar}} and #' constructs an object of class "\code{forecast}" from the results. For #' \code{fracdiff} objects, the calculations are all done within #' \code{\link{forecast.fracdiff}} using the equations given by Peiris and #' Perera (1988). #' #' @param object An object of class "\code{Arima}", "\code{ar}" or #' "\code{fracdiff}". Usually the result of a call to #' \code{\link[stats]{arima}}, \code{\link{auto.arima}}, #' \code{\link[stats]{ar}}, \code{\link{arfima}} or #' \code{\link[fracdiff]{fracdiff}}. #' @param h Number of periods for forecasting. If \code{xreg} is used, \code{h} #' is ignored and the number of forecast periods is set to the number of rows #' of \code{xreg}. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param xreg Future values of an regression variables (for class \code{Arima} #' objects only). A numerical vector or matrix of external regressors; it should not be a data frame. #' @param bootstrap If \code{TRUE}, then prediction intervals computed using #' simulation with resampled errors. #' @param npaths Number of sample paths used in computing simulated prediction #' intervals when \code{bootstrap=TRUE}. #' @param ... Other arguments. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.Arima}. #' #' An object of class "\code{forecast}" is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{predict.Arima}}, #' \code{\link[stats]{predict.ar}}, \code{\link{auto.arima}}, #' \code{\link{Arima}}, \code{\link[stats]{arima}}, \code{\link[stats]{ar}}, #' \code{\link{arfima}}. #' @references Peiris, M. & Perera, B. (1988), On prediction with fractionally #' differenced ARIMA models, \emph{Journal of Time Series Analysis}, #' \bold{9}(3), 215-220. #' @keywords ts #' @aliases forecast.forecast_ARIMA #' @examples #' fit <- Arima(WWWusage,c(3,1,0)) #' plot(forecast(fit)) #' #' library(fracdiff) #' x <- fracdiff.sim( 100, ma=-.4, d=.3)$series #' fit <- arfima(x) #' plot(forecast(fit,h=30)) #' #' @export forecast.Arima <- function(object, h=ifelse(object$arma[5] > 1, 2 * object$arma[5], 10), level=c(80, 95), fan=FALSE, xreg=NULL, lambda=object$lambda, bootstrap=FALSE, npaths=5000, biasadj=NULL, ...) { # Check whether there are non-existent arguments all.args <- names(formals()) user.args <- names(match.call())[-1L] # including arguments passed to 3 dots check <- user.args %in% all.args if (!all(check)) { error.args <- user.args[!check] warning(sprintf("The non-existent %s arguments will be ignored.", error.args)) } use.drift <- is.element("drift", names(object$coef)) x <- object$x <- getResponse(object) usexreg <- (use.drift | is.element("xreg", names(object))) # | use.constant) if (!is.null(xreg) && usexreg) { if(!is.numeric(xreg)) stop("xreg should be a numeric matrix or a numeric vector") xreg <- as.matrix(xreg) if (is.null(colnames(xreg))) { colnames(xreg) <- if (ncol(xreg) == 1) "xreg" else paste("xreg", 1:ncol(xreg), sep = "") } origxreg <- xreg <- as.matrix(xreg) h <- nrow(xreg) } else { if(!is.null(xreg)){ warning("xreg not required by this model, ignoring the provided regressors") xreg <- NULL } origxreg <- NULL } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 & max(level) < 1) { level <- 100 * level } else if (min(level) < 0 | max(level) > 99.99) { stop("Confidence limit out of range") } } level <- sort(level) if (use.drift) { missing <- is.na(x) firstnonmiss <- head(which(!missing),1) n <- length(x) - firstnonmiss + 1 if (!is.null(xreg)) { xreg <- `colnames<-`(cbind(drift = (1:h) + n, xreg), make.unique(c("drift", if(is.null(colnames(xreg)) && !is.null(xreg)) rep("", NCOL(xreg)) else colnames(xreg)))) } else { xreg <- `colnames<-`(as.matrix((1:h) + n), "drift") } } # Check if data is constant if (!is.null(object$constant)) { if (object$constant) { pred <- list(pred = rep(x[1], h), se = rep(0, h)) } else { stop("Strange value of object$constant") } } else if (usexreg) { if (is.null(xreg)) { stop("No regressors provided") } object$call$xreg <- getxreg(object) if (NCOL(xreg) != NCOL(object$call$xreg)) { stop("Number of regressors does not match fitted model") } if(!identical(colnames(xreg), colnames(object$call$xreg))){ warning("xreg contains different column names from the xreg used in training. Please check that the regressors are in the same order.") } pred <- predict(object, n.ahead = h, newxreg = xreg) } else { pred <- predict(object, n.ahead = h) } # Fix time series characteristics if there are missing values at end of series, or if tsp is missing from pred if (!is.null(x)) { tspx <- tsp(x) nx <- max(which(!is.na(x))) if (nx != length(x) | is.null(tsp(pred$pred)) | is.null(tsp(pred$se))) { tspx[2] <- time(x)[nx] start.f <- tspx[2] + 1 / tspx[3] pred$pred <- ts(pred$pred, frequency = tspx[3], start = start.f) pred$se <- ts(pred$se, frequency = tspx[3], start = start.f) } } # Compute prediction intervals nint <- length(level) if (bootstrap) # Compute prediction intervals using simulations { sim <- matrix(NA, nrow = npaths, ncol = h) for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = TRUE, xreg = origxreg, lambda = lambda) lower <- apply(sim, 2, quantile, 0.5 - level / 200, type = 8) upper <- apply(sim, 2, quantile, 0.5 + level / 200, type = 8) if (nint > 1L) { lower <- t(lower) upper <- t(upper) } else { lower <- matrix(lower, ncol = 1) upper <- matrix(upper, ncol = 1) } } else { # Compute prediction intervals via the normal distribution lower <- matrix(NA, ncol = nint, nrow = length(pred$pred)) upper <- lower for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- pred$pred - qq * pred$se upper[, i] <- pred$pred + qq * pred$se } if (!is.finite(max(upper))) { warning("Upper prediction intervals are not finite.") } } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") lower <- ts(lower) upper <- ts(upper) tsp(lower) <- tsp(upper) <- tsp(pred$pred) method <- arima.string(object, padding = FALSE) seriesname <- if (!is.null(object$series)) { object$series } else if (!is.null(object$call$x)) { object$call$x } else { object$call$y } fits <- fitted.Arima(object) if (!is.null(lambda) & is.null(object$constant)) { # Back-transform point forecasts and prediction intervals pred$pred <- InvBoxCox(pred$pred, lambda, biasadj, var(residuals.Arima(object), na.rm = TRUE)) if (!bootstrap) { # Bootstrapped intervals already back-transformed lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } } return(structure( list( method = method, model = object, level = level, mean = pred$pred, lower = lower, upper = upper, x = x, series = seriesname, fitted = fits, residuals = residuals.Arima(object) ), class = "forecast" )) } #' @export forecast.forecast_ARIMA <- forecast.Arima #' @rdname forecast.Arima #' @export forecast.ar <- function(object, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, bootstrap=FALSE, npaths=5000, biasadj=FALSE, ...) { x <- getResponse(object) pred <- predict(object, newdata = x, n.ahead = h) if (bootstrap) # Recompute se using simulations { sim <- matrix(NA, nrow = npaths, ncol = h) for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = TRUE) pred$se <- apply(sim, 2, sd) } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 & max(level) < 1) { level <- 100 * level } else if (min(level) < 0 | max(level) > 99.99) { stop("Confidence limit out of range") } } nint <- length(level) lower <- matrix(NA, ncol = nint, nrow = length(pred$pred)) upper <- lower for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- pred$pred - qq * pred$se upper[, i] <- pred$pred + qq * pred$se } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") method <- paste("AR(", object$order, ")", sep = "") f <- frequency(x) res <- residuals.ar(object) fits <- fitted.ar(object) if (!is.null(lambda)) { pred$pred <- InvBoxCox(pred$pred, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) fits <- InvBoxCox(fits, lambda) x <- InvBoxCox(x, lambda) } return(structure( list( method = method, model = object, level = level, mean = pred$pred, lower = lower, upper = upper, x = x, series = deparse(object$call$x), fitted = fits, residuals = res ) , class = "forecast" )) } # Find xreg matrix in an Arima object getxreg <- function(z) { # Look in the obvious place first if (is.element("xreg", names(z))) { return(z$xreg) } # Next most obvious place else if (is.element("xreg", names(z$coef))) { return(eval.parent(z$coef$xreg)) } # Now check under call else if (is.element("xreg", names(z$call))) { return(eval.parent(z$call$xreg)) } # Otherwise check if it exists else { armapar <- sum(z$arma[1:4]) + is.element("intercept", names(z$coef)) npar <- length(z$coef) if (npar > armapar) { stop("It looks like you have an xreg component but I don't know what it is.\n Please use Arima() or auto.arima() rather than arima().") } else { # No xreg used return(NULL) } } } #' Errors from a regression model with ARIMA errors #' #' Returns time series of the regression residuals from a fitted ARIMA model. #' #' This is a deprecated function #' which is identical to \code{\link{residuals.Arima}(object, type="regression")} #' Regression residuals are equal to the original data #' minus the effect of any regression variables. If there are no regression #' variables, the errors will be identical to the original series (possibly #' adjusted to have zero mean). #' #' @param object An object containing a time series model of class \code{Arima}. #' @return A \code{ts} object #' @author Rob J Hyndman #' @seealso \code{\link{residuals.Arima}}. #' @keywords ts #' #' @export arima.errors <- function(object) { message("Deprecated, use residuals.Arima(object, type='regression') instead") residuals.Arima(object, type = "regression") } # Return one-step fits #' h-step in-sample forecasts for time series models. #' #' Returns h-step forecasts for the data used in fitting the model. #' #' @param object An object of class "\code{Arima}", "\code{bats}", #' "\code{tbats}", "\code{ets}" or "\code{nnetar}". #' @param h The number of steps to forecast ahead. #' @param ... Other arguments. #' @return A time series of the h-step forecasts. #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link{forecast.Arima}}, \code{\link{forecast.bats}}, #' \code{\link{forecast.tbats}}, \code{\link{forecast.ets}}, #' \code{\link{forecast.nnetar}}, \code{\link{residuals.Arima}}, #' \code{\link{residuals.bats}}, \code{\link{residuals.tbats}}, #' \code{\link{residuals.ets}}, \code{\link{residuals.nnetar}}. #' @keywords ts #' @aliases fitted.forecast_ARIMA #' @examples #' fit <- ets(WWWusage) #' plot(WWWusage) #' lines(fitted(fit), col='red') #' lines(fitted(fit, h=2), col='green') #' lines(fitted(fit, h=3), col='blue') #' legend("topleft", legend=paste("h =",1:3), col=2:4, lty=1) #' #' @export fitted.Arima <- function(object, h = 1, ...) { if (h == 1) { x <- getResponse(object) if (!is.null(object$fitted)) { return(object$fitted) } else if (is.null(x)) { # warning("Fitted values are unavailable due to missing historical data") return(NULL) } else if (is.null(object$lambda)) { return(x - object$residuals) } else { fits <- InvBoxCox(BoxCox(x, object$lambda) - object$residuals, object$lambda, NULL, var(object$residuals)) return(fits) } } else { return(hfitted(object = object, h = h, FUN = "Arima", ...)) } } #' @export fitted.forecast_ARIMA <- fitted.Arima # Calls arima from stats package and adds data to the returned object # Also allows refitting to new data # and drift terms to be included. #' Fit ARIMA model to univariate time series #' #' Largely a wrapper for the \code{\link[stats]{arima}} function in the stats #' package. The main difference is that this function allows a drift term. It #' is also possible to take an ARIMA model from a previous call to \code{Arima} #' and re-apply it to the data \code{y}. #' #' See the \code{\link[stats]{arima}} function in the stats package. #' #' @aliases print.ARIMA summary.Arima as.character.Arima #' #' @param y a univariate time series of class \code{ts}. #' @param order A specification of the non-seasonal part of the ARIMA model: #' the three components (p, d, q) are the AR order, the degree of differencing, #' and the MA order. #' @param seasonal A specification of the seasonal part of the ARIMA model, #' plus the period (which defaults to frequency(y)). This should be a list with #' components order and period, but a specification of just a numeric vector of #' length 3 will be turned into a suitable list with the specification as the #' order. #' @param xreg Optionally, a numerical vector or matrix of external regressors, which #' must have the same number of rows as y. It should not be a data frame. #' @param include.mean Should the ARIMA model include a mean term? The default #' is \code{TRUE} for undifferenced series, \code{FALSE} for differenced ones #' (where a mean would not affect the fit nor predictions). #' @param include.drift Should the ARIMA model include a linear drift term? #' (i.e., a linear regression with ARIMA errors is fitted.) The default is #' \code{FALSE}. #' @param include.constant If \code{TRUE}, then \code{include.mean} is set to #' be \code{TRUE} for undifferenced series and \code{include.drift} is set to #' be \code{TRUE} for differenced series. Note that if there is more than one #' difference taken, no constant is included regardless of the value of this #' argument. This is deliberate as otherwise quadratic and higher order #' polynomial trends would be induced. #' @param method Fitting method: maximum likelihood or minimize conditional #' sum-of-squares. The default (unless there are missing values) is to use #' conditional-sum-of-squares to find starting values, then maximum likelihood. #' @param model Output from a previous call to \code{Arima}. If model is #' passed, this same model is fitted to \code{y} without re-estimating any #' parameters. #' @param x Deprecated. Included for backwards compatibility. #' @param ... Additional arguments to be passed to \code{\link[stats]{arima}}. #' @inheritParams forecast #' @return See the \code{\link[stats]{arima}} function in the stats package. #' The additional objects returned are \item{x}{The time series data} #' \item{xreg}{The regressors used in fitting (when relevant).} #' \item{sigma2}{The bias adjusted MLE of the innovations variance.} #' #' @export #' #' @author Rob J Hyndman #' @seealso \code{\link{auto.arima}}, \code{\link{forecast.Arima}}. #' @keywords ts #' @examples #' library(ggplot2) #' WWWusage %>% #' Arima(order=c(3,1,0)) %>% #' forecast(h=20) %>% #' autoplot #' #' # Fit model to first few years of AirPassengers data #' air.model <- Arima(window(AirPassengers,end=1956+11/12),order=c(0,1,1), #' seasonal=list(order=c(0,1,1),period=12),lambda=0) #' plot(forecast(air.model,h=48)) #' lines(AirPassengers) #' #' # Apply fitted model to later data #' air.model2 <- Arima(window(AirPassengers,start=1957),model=air.model) #' #' # Forecast accuracy measures on the log scale. #' # in-sample one-step forecasts. #' accuracy(air.model) #' # out-of-sample one-step forecasts. #' accuracy(air.model2) #' # out-of-sample multi-step forecasts #' accuracy(forecast(air.model,h=48,lambda=NULL), #' log(window(AirPassengers,start=1957))) #' Arima <- function(y, order=c(0, 0, 0), seasonal=c(0, 0, 0), xreg=NULL, include.mean=TRUE, include.drift=FALSE, include.constant, lambda=model$lambda, biasadj=FALSE, method=c("CSS-ML", "ML", "CSS"), model=NULL, x=y, ...) { # Remove outliers near ends # j <- time(x) # x <- na.contiguous(x) # if(length(j) != length(x)) # warning("Missing values encountered. Using longest contiguous portion of time series") series <- deparse(substitute(y)) origx <- y if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") if (is.null(attr(lambda, "biasadj"))) { attr(lambda, "biasadj") <- biasadj } } if (!is.null(xreg)) { if(!is.numeric(xreg)) stop("xreg should be a numeric matrix or a numeric vector") xreg <- as.matrix(xreg) if (is.null(colnames(xreg))) { colnames(xreg) <- if (ncol(xreg) == 1) "xreg" else paste("xreg", 1:ncol(xreg), sep = "") } } if (!is.list(seasonal)) { if (frequency(x) <= 1) { seasonal <- list(order = c(0, 0, 0), period = NA) if(length(x) <= order[2L]) stop("Not enough data to fit the model") } else { seasonal <- list(order = seasonal, period = frequency(x)) if(length(x) <= order[2L] + seasonal$order[2L] * seasonal$period) stop("Not enough data to fit the model") } } if (!missing(include.constant)) { if (include.constant) { include.mean <- TRUE if ((order[2] + seasonal$order[2]) == 1) { include.drift <- TRUE } } else { include.mean <- include.drift <- FALSE } } if ((order[2] + seasonal$order[2]) > 1 & include.drift) { warning("No drift term fitted as the order of difference is 2 or more.") include.drift <- FALSE } if (!is.null(model)) { tmp <- arima2(x, model, xreg = xreg, method = method) xreg <- tmp$xreg tmp$fitted <- NULL tmp$lambda <- model$lambda } else { if (include.drift) { xreg <- `colnames<-`(cbind(drift = 1:length(x), xreg), make.unique(c("drift", if(is.null(colnames(xreg)) && !is.null(xreg)) rep("", NCOL(xreg)) else colnames(xreg)))) } if (is.null(xreg)) { suppressWarnings(tmp <- stats::arima(x = x, order = order, seasonal = seasonal, include.mean = include.mean, method = method, ...)) } else { suppressWarnings(tmp <- stats::arima(x = x, order = order, seasonal = seasonal, xreg = xreg, include.mean = include.mean, method = method, ...)) } } # Calculate aicc & bic based on tmp$aic npar <- length(tmp$coef[tmp$mask]) + 1 missing <- is.na(tmp$residuals) firstnonmiss <- head(which(!missing),1) lastnonmiss <- tail(which(!missing),1) n <- sum(!missing[firstnonmiss:lastnonmiss]) nstar <- n - tmp$arma[6] - tmp$arma[7] * tmp$arma[5] tmp$aicc <- tmp$aic + 2 * npar * (nstar / (nstar - npar - 1) - 1) tmp$bic <- tmp$aic + npar * (log(nstar) - 2) tmp$series <- series tmp$xreg <- xreg tmp$call <- match.call() tmp$lambda <- lambda tmp$x <- origx # Adjust residual variance to be unbiased if (is.null(model)) { tmp$sigma2 <- sum(tmp$residuals ^ 2, na.rm = TRUE) / (nstar - npar + 1) } out <- structure(tmp, class = c("forecast_ARIMA", "ARIMA", "Arima")) out$fitted <- fitted.Arima(out) out$series <- series return(out) } # Refits the model to new data x arima2 <- function(x, model, xreg, method) { use.drift <- is.element("drift", names(model$coef)) use.intercept <- is.element("intercept", names(model$coef)) use.xreg <- is.element("xreg", names(model$call)) sigma2 <- model$sigma2 if (use.drift) { driftmod <- lm(model$xreg[, "drift"] ~ I(time(as.ts(model$x)))) newxreg <- driftmod$coeff[1] + driftmod$coeff[2] * time(as.ts(x)) if (!is.null(xreg)) { origColNames <- colnames(xreg) xreg <- cbind(newxreg, xreg) colnames(xreg) <- c("drift", origColNames) } else { xreg <- as.matrix(data.frame(drift = newxreg)) } use.xreg <- TRUE } if (!is.null(model$xreg)) { if (is.null(xreg)) { stop("No regressors provided") } if (ncol(xreg) != ncol(model$xreg)) { stop("Number of regressors does not match fitted model") } } if (model$arma[5] > 1 & sum(abs(model$arma[c(3, 4, 7)])) > 0) # Seasonal model { if (use.xreg) { refit <- Arima( x, order = model$arma[c(1, 6, 2)], seasonal = list(order = model$arma[c(3, 7, 4)], period = model$arma[5]), include.mean = use.intercept, xreg = xreg, method = method, fixed = model$coef ) } else { refit <- Arima( x, order = model$arma[c(1, 6, 2)], seasonal = list(order = model$arma[c(3, 7, 4)], period = model$arma[5]), include.mean = use.intercept, method = method, fixed = model$coef ) } } else if (length(model$coef) > 0) # Nonseasonal model with some parameters { if (use.xreg) { refit <- Arima(x, order = model$arma[c(1, 6, 2)], xreg = xreg, include.mean = use.intercept, method = method, fixed = model$coef) } else { refit <- Arima(x, order = model$arma[c(1, 6, 2)], include.mean = use.intercept, method = method, fixed = model$coef) } } else { # No parameters refit <- Arima(x, order = model$arma[c(1, 6, 2)], include.mean = FALSE, method = method) } refit$var.coef <- matrix(0, length(refit$coef), length(refit$coef)) if (use.xreg) { # Why is this needed? refit$xreg <- xreg } refit$sigma2 <- sigma2 return(refit) } # Modified version of function print.Arima from stats package #' @export print.forecast_ARIMA <- function(x, digits=max(3, getOption("digits") - 3), se=TRUE, ...) { cat("Series:", x$series, "\n") cat(arima.string(x, padding = FALSE), "\n") if (!is.null(x$lambda)) { cat("Box Cox transformation: lambda=", x$lambda, "\n") } # cat("\nCall:", deparse(x$call, width.cutoff=75), "\n", sep=" ") # if(!is.null(x$xreg)) # { # cat("\nRegression variables fitted:\n") # xreg <- as.matrix(x$xreg) # for(i in 1:3) # cat(" ",xreg[i,],"\n") # cat(" . . .\n") # for(i in 1:3) # cat(" ",xreg[nrow(xreg)-3+i,],"\n") # } if (length(x$coef) > 0) { cat("\nCoefficients:\n") coef <- round(x$coef, digits = digits) if (se && NROW(x$var.coef)) { ses <- rep.int(0, length(coef)) ses[x$mask] <- round(sqrt(diag(x$var.coef)), digits = digits) coef <- matrix(coef, 1L, dimnames = list(NULL, names(coef))) coef <- rbind(coef, s.e. = ses) } # Change intercept to mean if no regression variables j <- match("intercept", colnames(coef)) if (is.null(x$xreg) & !is.na(j)) { colnames(coef)[j] <- "mean" } print.default(coef, print.gap = 2) } cm <- x$call$method if (is.null(cm) || cm != "CSS") { cat( "\nsigma^2 estimated as ", format(x$sigma2, digits = digits), ": log likelihood=", format(round(x$loglik, 2L)), "\n", sep = "" ) # npar <- length(x$coef) + 1 npar <- length(x$coef[x$mask]) + 1 missing <- is.na(x$residuals) firstnonmiss <- head(which(!missing),1) lastnonmiss <- tail(which(!missing),1) n <- lastnonmiss - firstnonmiss + 1 nstar <- n - x$arma[6] - x$arma[7] * x$arma[5] bic <- x$aic + npar * (log(nstar) - 2) aicc <- x$aic + 2 * npar * (nstar / (nstar - npar - 1) - 1) cat("AIC=", format(round(x$aic, 2L)), sep = "") cat(" AICc=", format(round(aicc, 2L)), sep = "") cat(" BIC=", format(round(bic, 2L)), "\n", sep = "") } else { cat( "\nsigma^2 estimated as ", format(x$sigma2, digits = digits), ": part log likelihood=", format(round(x$loglik, 2)), "\n", sep = "" ) } invisible(x) } #' Return the order of an ARIMA or ARFIMA model #' #' Returns the order of a univariate ARIMA or ARFIMA model. #' #' #' @param object An object of class \dQuote{\code{Arima}}, dQuote\code{ar} or #' \dQuote{\code{fracdiff}}. Usually the result of a call to #' \code{\link[stats]{arima}}, \code{\link{Arima}}, \code{\link{auto.arima}}, #' \code{\link[stats]{ar}}, \code{\link{arfima}} or #' \code{\link[fracdiff]{fracdiff}}. #' @return A numerical vector giving the values \eqn{p}, \eqn{d} and \eqn{q} of #' the ARIMA or ARFIMA model. For a seasonal ARIMA model, the returned vector #' contains the values \eqn{p}, \eqn{d}, \eqn{q}, \eqn{P}, \eqn{D}, \eqn{Q} and #' \eqn{m}, where \eqn{m} is the period of seasonality. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{ar}}, \code{\link{auto.arima}}, #' \code{\link{Arima}}, \code{\link[stats]{arima}}, \code{\link{arfima}}. #' @keywords ts #' @examples #' WWWusage %>% auto.arima %>% arimaorder #' #' @export arimaorder <- function(object) { if (is.element("Arima", class(object))) { order <- object$arma[c(1, 6, 2, 3, 7, 4, 5)] names(order) <- c("p", "d", "q", "P", "D", "Q", "Frequency") seasonal <- (order[7] > 1 & sum(order[4:6]) > 0) if (seasonal) { return(order) } else { return(order[1:3]) } } else if (is.element("ar", class(object))) { return(c("p" = object$order, "d" = 0, "q" = 0)) } else if (is.element("fracdiff", class(object))) { return(c("p" = length(object$ar), "d" = object$d, "q" = length(object$ma))) } else { stop("object not of class Arima, ar or fracdiff") } } #' @export as.character.Arima <- function(x, ...) { arima.string(x, padding = FALSE) } #' @rdname is.ets #' @export is.Arima <- function(x) { inherits(x, "Arima") } #' @rdname fitted.Arima #' @export fitted.ar <- function(object, ...) { getResponse(object) - residuals(object) } forecast/R/bootstrap.R0000644000176200001440000000720313553673010014426 0ustar liggesusers# Bootstrap functions # Trend estimation like STL without seasonality. # Non-robust version tl <- function(x, ...) { x <- as.ts(x) tspx <- tsp(x) n <- length(x) tt <- 1:n fit <- supsmu(tt, x) out <- ts(cbind(trend = fit$y, remainder = x - fit$y)) tsp(out) <- tsp(x) out <- structure(list(time.series = out), class = "stl") return(out) } # Function to return some bootstrap samples of x # based on LPB lpb <- function(x, nsim=100) { n <- length(x) meanx <- mean(x) y <- x - meanx gamma <- wacf(y, lag.max = n)$acf[, , 1] s <- length(gamma) Gamma <- matrix(1, s, s) d <- row(Gamma) - col(Gamma) for (i in 1:(s - 1)) Gamma[d == i | d == (-i)] <- gamma[i + 1] L <- t(chol(Gamma)) W <- solve(L) %*% matrix(y, ncol = 1) out <- ts(L %*% matrix(sample(W, n * nsim, replace = TRUE), nrow = n, ncol = nsim) + meanx) tsp(out) <- tsp(x) return(out) } # Bootstrapping time series (based on Bergmeir et al., 2016, IJF paper) # Author: Fotios Petropoulos MBB <- function(x, window_size) { bx <- array(0, (floor(length(x) / window_size) + 2) * window_size) for (i in 1:(floor(length(x) / window_size) + 2)) { c <- sample(1:(length(x) - window_size + 1), 1) bx[((i - 1) * window_size + 1):(i * window_size)] <- x[c:(c + window_size - 1)] } start_from <- sample(0:(window_size - 1), 1) + 1 bx[start_from:(start_from + length(x) - 1)] } #' Box-Cox and Loess-based decomposition bootstrap. #' #' Generates bootstrapped versions of a time series using the Box-Cox and #' Loess-based decomposition bootstrap. #' #' The procedure is described in Bergmeir et al. Box-Cox decomposition is #' applied, together with STL or Loess (for non-seasonal time series), and the #' remainder is bootstrapped using a moving block bootstrap. #' #' @param x Original time series. #' @param num Number of bootstrapped versions to generate. #' @param block_size Block size for the moving block bootstrap. #' @return A list with bootstrapped versions of the series. The first series in #' the list is the original series. #' @author Christoph Bergmeir, Fotios Petropoulos #' @seealso \code{\link{baggedETS}}. #' @references Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging #' Exponential Smoothing Methods using STL Decomposition and Box-Cox #' Transformation. International Journal of Forecasting 32, 303-312. #' @keywords ts #' @examples #' bootstrapped_series <- bld.mbb.bootstrap(WWWusage, 100) #' #' @export bld.mbb.bootstrap <- function(x, num, block_size=NULL) { if(length(x) <= 1L) return(rep(list(x), num)) freq <- frequency(x) if(length(x) <= 2*freq) freq <- 1L if (is.null(block_size)) { block_size <- ifelse(freq > 1, 2 * freq, min(8, floor(length(x) / 2))) } xs <- list() xs[[1]] <- x # the first series is the original one if (num > 1) { # Box-Cox transformation if (min(x) > 1e-6) { lambda <- BoxCox.lambda(x, lower = 0, upper = 1) } else { lambda <- 1 } x.bc <- BoxCox(x, lambda) lambda <- attr(x.bc, "lambda") if (freq > 1) { # STL decomposition x.stl <- stl(ts(x.bc, frequency = freq), "per")$time.series seasonal <- x.stl[, 1] trend <- x.stl[, 2] remainder <- x.stl[, 3] } else { # Loess trend <- 1:length(x) suppressWarnings( x.loess <- loess(ts(x.bc, frequency = 1) ~ trend, span = 6 / length(x), degree = 1) ) seasonal <- rep(0, length(x)) trend <- x.loess$fitted remainder <- x.loess$residuals } } # Bootstrap some series, using MBB for (i in 2:num) { xs[[i]] <- ts(InvBoxCox(trend + seasonal + MBB(remainder, block_size), lambda)) tsp(xs[[i]]) <- tsp(x) } xs } forecast/NEWS.md0000644000176200001440000012405413617634745013204 0ustar liggesusers## forecast (development version) * The axis for `gglagplot()` have been reversed for consistency with `stats::lag.plot()`. ## forecast 8.10 (4 December 2019) * Updates to remove new CRAN errors * Bug fixes ## forecast 8.9 (22 August 2019) * Updates for CRAN policies on Suggests packages * Bug fixes ## forecast 8.8 (22 July 2019) * Updates for compatibility with fable * Bug fixes ## forecast 8.7 (26 Apr 2019) * Documentation improvements * Bug fixes ## Version 8.6 (15 Apr 2019) * Reduced conflicts with tidy forecasting packages * Forecast autoplots now use same colour shading as autolayer() and geom_forecast() * Documentation improvements * Bug fixes ## Version 8.5 (18 Jan 2019) * Updated tsCV() to handle exogenous regressors * Reimplemented lagwalk methods (naive, snaive, rwf) for speed improvements * Added support for passing arguments to auto.arima() unit root tests * Improved auto.arima() stepwise search algorithm * Documentation improvements * Bug fixes ## Version 8.4 (20 June 2018) * Added modelAR(), generalising nnetar() to support user-defined functions * Added na.action argument to ets * Documentation improvements * Bug fixes ## Version 8.3 (5 April 2018) * Added mstl() to handle multiple seasonal decomposition * stlf(), stlm(), tsoutliers() and tsclean() all now use mstl(). * Updated tsCV() to handle multiple horizons * Switched unit root tests in ndiffs() to use urca package * Added ocsb.test() * Changed method for choosing D in auto.arima() to a measure of seasonal strength. * Added baggedModel() function to generalize baggedETS() * Added bootstrapped PI to more functions * Allowed lambda='auto' for all functions with lambda argument. * Updated author list to include all major contributors * Documentation improvements * Bug fixes ## Version 8.2 (25 September 2017) * Added pkgdown site * Added rolling window option to tsCV() * Improved robustness to short time series and missing values * Bug fixes ## Version 8.1 (17 June 2017) * Added as.character.ets, as.character.bats, as.character.tbats * Made gghistogram() and checkresiduals() robust to missing values * All documentation now generated using roxygen * Improved documentation for many functions * Added autoplot.msts() and autolayer.msts() * Added as.character methods for many models to generate model names * Added as.ts.forecast() * autoplot method for bats/tbats models * Better ARIMA trace output * Made accuracy an S3 method * Bug fixes ## Version 8.0 (22 February 2017) * Added tips to start up message * Added pipe operator * Added tsCV() and CVar() functions * Added baggedETS() * Added head.ts() and tail.ts(), so head and tail now work properly on ts objects. * Added gghistogram() and checkresiduals() * Added ggseasonplot with polar coordinates * Modified defaults for gglagplot * Added autolayer.ts * Added type argument to residuals() for different types of residuals * Added support for seas objects from the seasonal package * Component extraction for seasonal decomposition methods * Range bars for decomposition autoplots * Added autoplot.StructTS() * Added vignette based on 2008 JSS article by Hyndman and Khandakar * Improved ggplot functions * mforecast objects re-structured * Added as.data.frame.mforecast() * autoplot functions now exported * Refit support for arfima() and stlm() * Better bias adjustment support after Box-Cox transformation * print.ARIMA has better labelling of constants * Bug fixes * Removed fortify method for forecast objects ## Version 7.3 (12 October 2016) * Added prediction intervals and simulation for nnetar(). * Documentation improvement * Bug fixes ## Version 7.2 (8 September 2016) * Faceting for autoplot.mts * Box-Cox support for ses, holt, hw * ets() now works for tiny time series * Added h-step fitted values in fitted() function. * seasonal adjustment added to thetaf() * y now the standard first argument in all modelling functions * Added truncate argument to auto.arima() * seasadj() now an S3 method * series with frequency < 1 and non-integer seasonality now handled better * ggplot2 theme support * Added gglagplot, gglagchull * Arima() and auto.arima() now allow any argument to be passed to stats::arima(). * Bug fixes and speed improvements ## Version 7.1 (14 April 2016) * Fixed bug in auto.arima where the Box-Cox transformation was sometimes applied twice * Improved axes for ggseasonalplot() * Improved tslm() to avoid some problems finding data * nnetar() updated to allow subsets() * Modified initial values for ets() * Improved unit tests to avoid deprecated functions and to avoid data from fpp * Removed fpp from Suggests list ## Version 7.0 (3 April 2016) * Added ggplot2 graphics * Bias adjustment option added for all functions that allow Box-Cox transformations * Added Ccf function, and rewrote Acf to handle multivariate series. * tslm() completely rewritten to be more robust and to handle fourier terms more easily * Support for multivariate linear models added * subset.ts() more robust, and captures some errors. * Added xreg argument to nnetar() * Improved labels in seasonplot() * More unit tests added * Documentation improvements * Bug fixes ## Version 6.2 (20 October 2015) * Many unit tests added using testthat. * Fixed bug in ets when very short seasonal series were passed in a data frame. * Fixed bug in nnetar where the initial predictor vector was reversed. * Corrected model name returned in nnetar(). * Fixed bug in accuracy() when non-integer seasonality used. * Made auto.arima() robust to non-integer seasonality. * Fixed bug in auto.arima where allowmean was ignored when stepwise=FALSE. * Improved robustness of forecast.ets() for explosive models with multiplicative trends. * Exogenous variables now passed to VAR forecasts * Increased maximum nmse in ets() to 30. * Made tsoutliers() more robust to weak seasonality * Changed tsoutliers() to use supsmu on non-seasonal and seasonally adjusted data. * Fixed bug in tbats() when seasonal period 1 is a small multiple of seasonal period 2. * Other bug fixes ## Version 6.1 (11 May 2015) * Made auto.arima more robust ## Version 6.0 (9 May 2015) * Modified dm.test to give error when variance is zero * Corrected help file for splinef(). * Fixed typo in accuracy help file regarding RMSE * Fixed bug in accuracy() which occurred with Arima and ets objects. * Fixed arima.errors() to handle Box-Cox transformed models. * Modified auto.arima() to be stricter on near-unit-roots. * Added allowmean argument in auto.arima(). * Improved handling of constant series in Arima() and forecast.Arima(). * Added plot.Arima() and plot.ar() functions. * Added as.character.Arima() * Captured problem in bats/tbats where data are constant. * Modified TBATS and BATS estimation to avoid occasional instabilities. * Fixed bug in forecasts from bats which labelled them as TBATS. * Added allow.multiplicative.trend argument to ets(). * Set allow.multiplictive.trend=FALSE in stlf(), stlm() and forecast.ts(). * Simplified arguments in stlf(). * Added taperedacf and taperedpacf functions * Added functions for bootstrapping time series ## Version 5.9 (26 February 2015) * Improved documentation of accuracy() function. * Fixed occasional bug in accuracy() when test set is a single observation. * Improved Acf() to give better handling of horizontal axis for seasonal data or when ... is passed. * Removed print.Arima and predict.Arima and added print.ARIMA * method argument now passed when re-fitting an ARIMA model. * Fixed error when CH test applied to short series ## Version 5.8 (6 January 2015) * Fixed bug in versions of R before 3.10 when using fourier and fourierf. * Made BoxCox.lambda() robust to missing values. ## Version 5.7 (17 December 2014) * Fixed bug in tbats/bats where optional arguments were not being passed to auto.arima(). * Revised fourier() and fourierf() to avoid large orders, and to avoid zero columns. * Improved accuracy of fourier() and fourierf(), while simplifying the code. * Removed duplicate columns returned by fourier/fourierf with multiple seasonal periods. * Corrected some bugs in simulate.Arima for models involving xreg. * Centred simulations from simulate.Arima for non-stationary models by conditioning on first observation. * Added findfrequency() function. * Fixed error in computed residuals from forecast.stl(). * Improved handling of very short series in auto.arima(). * Fixed error in forecasting with additive damped models. Damping previously applied only from second forecast horizon. * Fixed misuse of abs() in two places in C code. * Added na.action argument to Acf() and fixed na.action argument in tsdisplay(). ## Version 5.6 (23 September 2014) * Improved tbats and bats by ensuring ARMA coefficients are not close to the boundary of invertibility and stationarity. * Improved nsdiffs() handling of degenerate series (e.g., all zeros). * Improved forecast.ar() when function buried within other functions. * Improved handling of degenerate ARIMA models when xreg used. * More robust ets() initialization. * Fixed problem in na.interp() with seasonal data having frequency <= 5. * Removed undocumented option to use Rmalschains for optimization of ets(). ## Version 5.5 (12 August 2014) * Improved documentation for croston() * Added stlm() and forecast.stlm() functions, and added forecastfunction argument as a way of specifying a forecast method in stlf() and forecast.stl(). * Improved forecast.ar() so that it is more likely to work if ar() and forecast.ar() are embedded within other functions. * Improved handling of ARIMA models with seasonality greater than 48 * Improved handling of some degenerate regression models in nsdiffs() * Changed AIC for poor models from 1e20 to Inf. * Update fourier() and fourierf() to work with msts object. * Added a new argument find.frequency to forecast.ts(). * Added new arguments d and D to accuracy() for MASE. * Corrected bugs in accuracy(). * Better handling of regression models with perfect fit in auto.arima(). * Fixed bug in tbats.components() when there are no seasonal components. ## Version 5.4 (8 May 2014) * Fixed bug in forecast.tbats() and forecast.bats() when ts.frequency does not match seasonal.periods. * Fixed bug in getResponse.lm() when there's a logged dependent variable. * Modified ets() to avoid problems when data contains large numbers. * Modified ets() to produce forecasts when the data are constant. * Improved arima.errors() to find xreg more often, and to return an error if it can't be found. ## Version 5.3 (24 March 2014) * Unit tests added * Fixed bug in zzhw() which reversed the sign of the residuals. * Updated help file for CV() to specify it is only leave-one-out. * Fixed guer.cv() to allow non-integer periods without warning. * Added use.initial.values argument in ets(). * Added arimaorder() function. * Modified warnings suppression by using suppressWarnings() throughout. ## Version 5.2 (25 February 2014) * Changed default number of cores to 2 for all functions that use parallel processing. * Removed remaining call to bats() from examples that are run. ## Version 5.1 (8 February 2014) * Fixed bug in tsoutliers() and tsclean() with very short seasonal series. * Fixed bug in Arima() when seasonal order is specified numerically instead of via a list. * Removed dimension attribution from output of arima.errors() * Improved handling of "test" in accuracy() * Changed parallel processing to parLapply for auto.arima() * Added timeDate dependency to avoid errors in easter() and link to Rcpp >= 0.11.0. ## Version 5.0 (17 January 2014) * Added argument model to dshw(). * Added bizdays() and easter() for calendar variables. * Added arguments max.D and max.d to auto.arima(), ndiffs() and nsdiffs(). * Made several functions more robust to zoo objects. * Corrected an error in the calculation of AICc when using CV(). * Made minimum default p in nnetar equal to 1. * Added tsoutliers() and tsclean() for identifying and replacing outliers * Improved na.interp() to handle seasonality and added argument lambda to na.interp() * Added robust option to forecast.ts() to allow outliers and missing values * Improved output from snaive() and naive() to better reflect user expectations * Allowed Acf() to handle missing values by using na.contiguous * Changed default information criterion in ets() to AICc. * Removed drift term in Arima() when d+D>1. * Added bootstrap option to forecast.Arima() ## Version 4.8 (30 September 2013) * Fixed bug in rwf() that was introduced in v4.7 ## Version 4.7 (26 September 2013) * Added forecast.forecast() to simply return the object that is passed. * Removed leading zero in package number. i.e., 4.7 instead of 4.07. * better handling of nearly constant time series, and nearly linear time series * improved handling of missing values in rwf() * corrected fitted values and residuals in meanf() for time series data * bats() and tbats() now handle missing values in the same way as ets(). i.e., using longest contiguous portion. * better handling of very short time series * initial states for ets() modified for very short time series (less than 3 years). * nsdiffs with CH test now handles degenerate cases without returning an error. * nnetar now handles missing values * Fixed bug in forecast.varest() so residuals and fitted values computed correctly. * Added accuracy() calculation for VAR models * Fixed a bug in simulate.fracdiff() when future=TRUE. Sometimes the future argument was being ignored. ## Version 4.06 (30 June 2013) * accuracy() was returning a mape and mpe 100 times too large for in-sample errors. ## Version 4.05 (19 June 2013) * Fixed bug in hw() so it works when initial="simple" * Allowed bats() and tbats() to take non-positive values. * ets() now calls optim direct via c code making ets() run much faster. * Added Rmalschains as a possible optimizer in ets(). Not documented. * Modified forecast.lm so it is more likely that the original data are stored in the returned object. * Corrected bug in forecast.Arima that occurred when a Box-Cox transformation was used with bootstrap=TRUE. * accuracy() updated so that it gives more information, and returns a matrix of both test and training measures. * Corrected training error measures for splinef() forecasts. ## Version 4.04 (22 April 2013) * Added ylim argument to Acf() * Avoided clash with the signal package when using auto.arima(). * Fixed problem in plot.forecast() when all historical data are NA or when there is no available historical data. * forecast.Arima() is now a little more robust if a zoo object is passed instead of a ts object. * CV() now handles missing values in the residuals. * Fixed bug in holt() and hw() so that the printed model no longer contains missing values. ## Version 4.03 (14 March 2013) * forecast.lm now guesses the variable name if there is only one predictor variable. * Removed error trap in forecast.lm when no xreg variables passed as it was catching legitimate calls. ## Version 4.02 (6 March 2013) * Fixed error in the prediction intervals returned by forecast.ets() when simulation was used and a Box-Cox transformation was specified. * Fixed bug in accuracy() when a numerical f vector was passed. * Fixed man file for Diebold-Mariano test. * Corrected references in nsdiffs() help page. * Added warning to nsdiffs when series too short for seasonal differencing. * Fixed problem in getResponse.Arima when Arima object created by stats::arima() from within a function. * Added tbats.components() and extended seasadj() to allow tbats objects. * Added undocumented functions for forecasting, printing and plotting output from vars::VAR. ## Version 4.01 (22 January 2013) * Error now trapped when newxreg variables not passed to forecast.lm() * Corrected help file for dshw() to remove references to prediction intervals. * Improved help file for dm.test() to give more information about the alternative hypotheses. * Improved dm.test() performance for small samples by using a t-distribution instead of normal. * Modified bats() and tbats() examples to follow CRAN policies on parallel processing. * Moved some packages from Depends to Imports. * Added getResponse() function to return the historical time series from various time series model objects. * Modified accuracy() to use getResponse(). * Allowed user-generated innovations in simulate.ets(), simulate.Arima(), etc. * Allowed xreg argument in forecast.stl() and stlf() when ARIMA model used. * Removed reliance on caret, and associated fitted and residuals functions. ## Version 4.00 (27 November 2012) * More robust handling of degenerate ARIMA models. * New defaults for shaded colors used for prediction intervals in plots. * auto.arima() now remembers the name of the series when a Box-Cox transformation is used. * New function nnetar() for automatic neural network forecasting of time series. * arfima() now tries harder to ensure the ARMA part is stationary. * ts control added for forecast of linear models in forecast.lm(). * Fixed bug in bats() which caused an error when use.box.cox=FALSE and use.trend=FALSE. * Added residuals and fitted methods for train and avNNet objects from caret package. * accuracy() can now figure out overlapping times for x and f. * rwf() now handles missing values. * Revised ses(), holt() and hw() so that they can optionally use traditional initialization. ## Version 3.25 (11 September 2012) * Fixed bug in simulate.Arima. * Improved handling of short seasonal time series in auto.arima(). * Added seasonal argument to auto.arima(). * Fixed bug in splinef() and added gcv method for estimating smoothing parameter. ## Version 3.24 (23 July 2012 * Fixed bug in auto.arima() introduced in v3.23 which meant a ARIMA(0,0,0) model was returned about half the time. ## Version 3.23 (18 July 2012) * Fixed bug in arfima() which meant the drange argument was being ignored. * Extended auto.arima() so it returns something sensible when the data are constant. ## Version 3.22 (07 June 2012) * Increased maximum forecast horizon for ets models from 2000 to unlimited. * Corrected bug in Arima(). Previously include.constant=FALSE was ignored. * Some corrections to bats and tbats. * Modified parallel implementation in auto.arima for Windows. ## Version 3.21 (26 April 2012) * Fixed bug in auto.arima() when lambda is non-zero and stepwise is FALSE. * Fixed bug in auto.arima() in selecting d when D>0. * Fixed bug in ets() when seasonal period is less than 1. * Turned off warnings in auto.arima() and ets() when seasonal period is less than 1. * Added plotting methods for bats and tbats objects. * Changed default forecast horizons for bats and tbats objects. * Modified bats and tbats so they now use seasonal.periods when ts and msts objects are being modelled. ## Version 3.20 (2 April 2012) * Fixed bugs in forecast.lm(). * Improved handling of newdata in forecast.lm() to provide more meaningful error messages. * Fixed bug in dm.test() that occurred when errors were very small. ## Version 3.19 (22 February 2012) * Improved plotting of forecast objects from lm models * Added MASE for lm forecasts using insample mean forecasts for scaling. * Modified definition of MASE for seasonal time series to use seasonal naive insample scaling. * Modified meanf() to allow it to be used with cross-sectional data. * Updated accuracy() to allow it to be used with cross-sectional data, lm forecasts and lm objects. ## Version 3.18 (17 February 2012) * Added method for plotting non-time-series forecasts to plot.forecast(). * Removed partial arg matching. * Cleaned up some code, removing commented out sections, etc. * Added robust option to stlf(). * Added naive and rwdrift options to stlf() and forecast.stl(). * Improved handling of msts objects in BoxCox.lambda() * Fixed some minor bugs in tbats() and bats() * Improved speed of bats() and tbats(). ## Version 3.17 (2 February 2012) * Improved forecast.lm() so it is more likely to find the original data from an lm object. * Parallel processing now available in auto.arima() when stepwise=FALSE * Default model selection in auto.arima() changed to AICc rather than AIC. This may affect model selection for very short time series. * max orders in auto.arima() now restricted to be less than 1/3 of length of data. ## Version 3.16 (24 December 2011) * Corrected problem with AIC computation in bats and tbats * Fixed handlng of non-seasonal data in bats() * Changed dependency to >= R 2.14.0 in order to ensure parallel package available. ## Version 3.15 (22 December 2011) * New functions tbats() and forecast.tbats() for multiple seasonal time series modelling. * bats() and tbats() use parallel processing when possible. * Minor improvements to bats() and forecast.bats(). * decompose() removed as the function in the stats package has now been fixed. ## Version 3.14 (9 December 2011) * Improved documentation for forecast.ts() * Corrected bug in dshw() when applied to a non-ts object. * Added error message when dshw() applied to data containing zeros or negative values * Added checks when dshw() applied to time series with non-nested periods. * Added msts object class for multiple seasonal time series * Made taylor data set an msts object. * Added bats() function for multiple seasonal time series modelling * Added forecast.bats() function for forecasting BATS models * Byte compiling turned on * Depending on Rcpp and RcppArmadillo to speed some code up. ## Version 3.13 (19 November 2011) * Bug fix for forecast.StructTS() due to changes in the StructTS object. The default h was being set to 0. Thanks to Tarmo Leinonen for reporting this problem. * Bug fix for forecast.stl() where h longer than one seasonal period sometimes returned missing forecasts. Thanks to Kevin Burton for reporting this problem. * forecast.stl() no longer allows a seasonal ETS model to be specified. Thanks to Stefano Birmani for the suggestion. ## Version 3.12 (16 November 2011) * Added option to control ets model in stlf() and forecast.stl(). Thanks to Stefano Birmani for the suggestion. * Reordered arguments for forecast.lm() and stlf() to be consistent with other forecast functions. * Modified tslm() so that it is more likely to find the relevant data when it is not passed as an argument. * Fixed bug in forecast.ets which returned all zero forecasts for some models when seasonal period > 24. ## Version 3.11 (2 November 2011) * Fixed bug in dshw() when smallest period is odd ## Version 3.10 (27 October 2011) * Added lambda argument to naive() and snaive(). * Fixed bug in ets() with high frequency data. * Fixed bug in rwf() where incorrect fitted values and residuals were sometimes returned. * Modified number of lags displayed by default in tsdisplay(). ## Version 3.09 (18 October 2011) * Fixed bug causing occasional problems in simulate.Arima() when MA order greater than 2 and future=TRUE. ## Version 3.08 (15 October 2011) * Bug fix in forecast.stl() which occurred when forecast horizon is less than seasonal period. * Added lambda argument to forecast.stl(). ## Version 3.07 (11 October 2011) * Bug fix in ets() concerning non-seasonal models and high-frequency data. It sometimes returned all forecasts equal to zero. ## Version 3.06 (4 October 2011) * Switched to useDynLib in preparation for Rv2.14.0. ## Version 3.05 (3 October 2011) * Fixed bug in ets() which prevent non-seasonal models being fitted to high frequency data. ## Version 3.04 (23 September 2011) * Fixed bug when drift and xreg used together in auto.arima() or Arima(). ## Version 3.03 (2 September 2011) * Bug fix in dshw() which was using slightly incorrect seasonal estimates for the forecasts * Bug fix in forecast.StructTS due to change in structure of StructTS object. * Better error capture in tslm when seasonal dummies are specified for non-seasonal data. * Re-formatted some help files to prevent viewing problems with the pdf manual. ## Version 3.02 (25 August 2011) * Bug fixes ## Version 3.00 (24 August 2011) * Added Box-Cox parameter as argument to Arima(), ets(), arfima(), stlf(), rwf(), meanf(), splinef() * Added Box-Cox parameter as argument to forecast.Arima(), forecast.ets(), forecast.fracdiff(), forecast.ar(), forecast.StructTS, forecast.HoltWinters(). * Removed lambda argument from plot.forecast() and accuracy(). * Added BoxCox.lambda() function to allow automatic choice for Box-Cox parameter using Guerrero's method or the profile log likelihood method. * Modified BoxCox and InvBoxCox to return missing values when lambda < 0 and data < 0. * Add nsdiffs() function for selecting the number of seasonal differences. * Modified selection of seasonal differencing in auto.arima(). * Better error message if seasonal factor used in tslm() with non-seasonal data. * Added PI argument to forecast.ets() to allow only point forecasts to be computed. * Added include.constant argument to Arima(). * Added subset.ts() function. * Upgraded seasonplot() function to allow colors and to fix some bugs. * Fixed fitted values returned by forecast.HoltWinters() * Modified simulate.Arima() because of undocumented changes in filter() function in stats package. * Changed residuals returned by splinef() to be ordinary residuals. The standardized residuals are now returned as standardizedresiduals. * Added dshw() function for double-seasonal Holt-Winters method based on Taylor (2003). * Fixed further bugs in the decompose() function that caused the results to be incorrect with odd frequencies. ## Version 2.19 (4 June 2011) * Added xreg information to the object returned by auto.arima(). * Added Acf(), Pacf(), ma() and CV() functions. * Fixed bugs in re-fitting ARIMA models to new data. ## Version 2.18 (19 May 2011) * Fixed bug in seasonplot() where year labels were sometimes incorrect. ## Version 2.17 (6 April 2011) * Modified simulate.Arima() to handle seasonal ARIMA models. * Modified ets() to handle missing values. The largest continuous section of data is now modelled. * Improved plot.forecast() to handle missing values at the end of the observed series. * Added replacement decompose() to avoid truncation of seasonal term and seasonally adjusted series. * Fixed bug in seasadj() to handle multiplicative decomposition, and to avoid missing values at ends. ## Version 2.16 (6 March 2011) * Changed the way missing values are handled in tslm() ## Version 2.15 (5 March 2011) * Added fourier(), fourierf(), tslm() * Improved forecast.lm() to allow trend and seasonal terms. ## Version 2.14 (4 March 2011) * Added forecast.lm() * Modified accuracy() and print.forecast() to allow non time series forecasts. * Fixed visibility of stlf(). ## Version 2.13 (16 February 2011) * Fixed bug in accuracy() when only 1 forecast is specified. * Added forecast.stl() and stlf() functions * Modified forecast.ts() to use stlf() if frequency > 12. * Made BoxCox() and InvBoxCox() robust to negative values * Fixed bug in simulate.Arima() when future=TRUE. There was a bias in the sample paths. ## Version 2.12 (19 January 2011) * Added naive() and snaive() functions. * Improved handling of seasonal data with frequency < 1. * Added lambda argument to accuracy(). ## Version 2.11 (5 November 2010) * If MLE in arfima() fails (usually because the series is non-stationary), the LS estimate is now returned. ## Version 2.10 (4 November 2010) * Fixed bug in arfima() where the MA parameters were of the wrong sign if estim="mle" chosen. * arfima() now allowed to have a sequence of missing values at the start of the series and end of the series (but not within the series) ## Version 2.09 (15 October 2010) * Fixed bug in forecast.fracdiff() which caused an error when h=1. * Added shadebars to plot.forecast(). * Fixed bug in plot.forecast() to allow plotting when h=1. ## Version 2.08 (22 September 2010) * Added pp test option for auto.arima() and ndiffs(). * Fixed bug in simulate.ets() which was causing problems when forecasting from some ETS models including ETS(M,M,N). ## Version 2.07 (9 September 2010) * Fixed bug in simulate.Arima(). Previous sample paths when d=2 and future=TRUE were incorrect. * Changed way color is implemented in plot.forecast() to avoid colour changes when the graphics window is refreshed. ## Version 2.06 (29 July 2010) * Added MLE option for arfima(). * Added simulate.Arima(), simulate.ar() and simulate.fracdiff() ## Version 2.05 (11 May 2010) * Added arfima() and a forecast method to handle ARFIMA models from arfima() and fracdiff(). * Added residuals and fitted methods for fracdiff objects. ## Version 2.04 (16 April 2010) * Fixed bug in auto.arima() that occurred rarely. ## Version 2.03 (23 December 2009) * Added an option to auto.arima() to allow drift terms to be excluded from the models considered. ## Version 2.02 (23 December 2009) * Fixed bug in auto.arima() that occurred when there was an xreg but no drift, approximation=TRUE and stepwise=FALSE. ## Version 2.01 (14 September 2009) * Fixed bug in time index of croston() output. * Added further explanation about models to croston() help file. ## Version 2.00 (7 September 2009) * Package removed from forecasting bundle ## Version 1.26 (29 August 2009) * Added as.data.frame.forecast(). This allows write.table() to work for forecast objects. ## Version 1.25 (22 July 2009) * Added argument to auto.arima() and ndiffs() to allow the ADF test to be used instead of the KPSS test in selecting the number of differences. * Added argument to plot.forecast() to allow different colors and line types when plotting prediction intervals. * Modified forecast.ts() to give sensible results with a time series containing fewer than four observations. ## Version 1.24 (9 April 2009) * Fixed bug in dm.test() to avoid errors when there are missing values in the residuals. * More informative error messages when auto.arima() fails to find a suitable model. ## Version 1.23 (22 February 2009) * Fixed bugs that meant xreg terms in auto.arima() sometimes caused errors when stepwise=FALSE. ## Version 1.22 (30 January 2009) * Fixed bug that meant regressor variables could not be used with seasonal time series in auto.arima(). ## Version 1.21 (16 December 2008) * Fixed bugs introduced in v1.20. ## Version 1.20 (14 December 2008) * Updated auto.arima() to allow regression variables. * Fixed a bug in print.Arima() which caused problems when the data were inside a data.frame. * In forecast.Arima(), argument h is now set to the length of the xreg argument if it is not null. ## Version 1.19 (7 November 2008) * Updated Arima() to allow regression variables when refitting an existing model to new data. ## Version 1.18 (6 November 2008) * Bug fix in ets(): models with frequency less than 1 would cause R to hang. * Bug fix in ets(): models with frequency greater than 12 would not fit due to parameters being out of range. * Default lower and upper bounds on parameters , and in ets() changed to 0.0001 and 0.9999 (instead of 0.01 and 0.99). ## Version 1.17 (10 October 2008) * Calculation of BIC did not account for reduction in length of series due to differencing. Now fixed in auto.arima() and in print.Arima(). * tsdiag() now works with ets objects. ## Version 1.16 (29 September 2008) * Another bug fix in auto.arima(). Occasionally the root checking would cause an error. The condition is now trapped. ## Version 1.15 (16 September 2008) * Bug fix in auto.arima(). The series wasn't always being stored as part of the return object when stepwise=FALSE. ## Version 1.14 (1 August 2008) * The time series stored in M3 in the Mcomp package did not contain all the components listed in the help file. This problem has now been fixed. ## Version 1.13 (16 June 2008) * Bug in plot.ets() fixed so that plots of non-seasonal models for seasonal data now work. * Warning added to ets() if the time series contains very large numbers (which can cause numerical problems). Anything up to 1,000,000 should be ok, but any larger and it is best to scale the series first. * Fixed problem in forecast.HoltWinters() where the lower and upper limits were interchanged. ## Version 1.12 (22 April 2008) * Objects are now coerced to class ts in ets(). This allows it to work with zoo objects. * A new function dm.test() has been added. This implements the Diebold-Mariano test for predictive accuracy. * Yet more bug-fixes for auto.arima(). ## Version 1.11 (8 February 2008) * Modifications to auto.arima() in the case where ML estimation does not work for the chosen model. Previously this would return no model. Now it returns the model estimated using CSS. * AIC values reported in auto.arima() when trace=TRUE and approximation=TRUE are now comparable to the final AIC values. * Addition of the expsmooth package. ## Version 1.10 (21 January 2008) * Fixed bug in seasadj() so it allows multiple seasonality * Fixed another bug in print.Arima() * Bug fixes in auto.arima(). It was sometimes returning a non-optimal model, and occasionally no model at all. Also, additional stationarity and invertibility testing is now done. ## Version 1.09 (11 December 2007) * A new argument 'restrict' has been added to ets() with default TRUE. If set to FALSE, then the unstable ETS models are also allowed. * A bug in the print.Arima() function was fixed. ## Version 1.08 (21 November 2007) * AICc and BIC corrected. Previously I had not taken account of the sigma^2 parameter when computing the number of parameters. * arima() function changed to Arima() to avoid the clash with the arima() function in the stats package. * auto.arima now uses an approximation to the likelihood when selecting a model if the series is more than 100 observations or the seasonal period is greater than 12. This behaviour can be over-ridden via the approximation argument. * A new function plot.ets() provides a decomposition plot of an ETS model. * predict() is now an alias for forecast() wherever there is not an existing predict() method. * The argument conf has been changed to level in all forecasting methods to be consistent with other R functions. * The functions gof() and forecasterrors() have been replaced by accuracy() which handles in-sample and out-of-sample forecast accuracy. * The initialization method used for a non-seasonal ETS model applied to seasonal data was changed slightly. * The following methods for ets objects were added: summary, coef and logLik. * The following methods for Arima objects were added: summary. ## Version 1.07 (25 July 2007) * Bug fix in summary of in-sample errors. For ets models with multiplicative errors, the reported in-sample values of MSE, MAPE, MASE, etc., in summary() and gof() were incorrect. * ARIMA models with frequency greater than 49 now allowed. But there is no unit-root testing if the frequency is 50 or more, so be careful! * Improvements in documentation. ## Version 1.06 (15 June 2007) * Bug fix in auto.arima(). It would not always respect the stated values of max.p, max.q, max.P and max.Q. * The tseries package is now installed automatically along with the forecasting bundle, whereas previously it was only suggested. ## Version 1.05 (28 May 2007) * Introduced auto.arima() to provide a stepwise approach to ARIMA modelling. This is much faster than the old best.arima(). * The old grid-search method used by best.arima() is still available by using stepwise=FALSE when calling auto.arima(). * Automated choice of seasonal differences introduced in auto.arima(). * Some small changes to the starting values of ets() models. * Fixed a bug in applying ets() to new data using a previously fitted model. ## Version 1.04 (30 January 2007) * Added include.drift to arima() * Fixed bug in seasonal forecasting with ets() ## Version 1.03 (20 October 2006) * Fixed some DOS line feed problems that were bothering unix users. ## Version 1.02 (12 October 2006) * Added AICc option to ets() and best.arima(). * Corrected bug in calculation of fitted values in ets models with multiplicative errors. ## Version 1.01 (25 September 2006) * Modified ndiffs() so that the maximum number of differences allowed is 2. ## Version 1.0 (31 August 2006) * Added MASE to gof(). * croston() now returns fitted values and residuals. * arima() no longer allows linear trend + ARMA errors by default. Also, drift in non-stationary models can be turned off. * This version is the first to be uploaded to CRAN. ## Version 0.99992 (8 August 2006) * Corrections to help files. No changes to functionality. ## Version 0.99991 (2 August 2006) * More bug fixes. ets now converges to a good model more often. ## Version 0.9999 (1 August 2006) * Mostly bug fixes. * A few data sets have been moved from fma to forecast as they are not used in my book. * ets is now considerably slower but gives better results. Full optimization is now the only option (which is what slows it down). I had too many problems with poor models when partial optimization was used. I'll work on speeding it up sometime, but this is not a high priority. It is fast enough for most use. If you really need to forecast 1000 series, run it overnight. * In ets, I've experimented with new starting conditions for optimization and it seems to be fairly robust now. * Multiplicative error models can no longer be applied to series containing zeros or negative values. However, the forecasts from these models are not constrained to be positive. ## Version 0.999 (27 July 2006) * The package has been turned into three packages forming a bundle. The functions and a few datasets are still in the forecast package. The data from Makridakis, Wheelwright and Hyndman (1998) is now in the fma package. The M-competition data is now in the Mcomp package. Both fma and Mcomp automatically load forecast. * This is the first version available on all operating systems (not just Windows). * pegels has been replaced by ets. ets only fits the model; it doesn't produce forecasts. To get forecasts, apply the forecast function to the ets object. * ets has been completely rewritten which makes it slower, but much easier to maintain. Different boundary conditions are used and a different optimizer is used, so don't expect the results to be identical to what was done by the old pegels function. To get something like the results from the old pegels function, use forecast(ets()). * simulate.ets() added to simulate from an ets model. * Changed name of cars to auto to avoid clash with the cars data in the datasets package. * arima2 functionality is now handled by arima() and pegels2 functionality is now handled by ets. * best.arima now allows the option of BIC to be used for model selection. * Croston's method added in function croston(). * ts.display renamed as tsdisplay * mean.f changed to meanf, theta.f changed to thetaf, rw.f changed to rwf, seasonaldummy.f to seasonaldummyf, sindex.f to sindexf, and spline.f to splinef. These changes are to avoid potential problems if anyone introduces an 'f' class. ## Version 0.994 (4 October 2004) * Fixed bug in arima which caused predict() to sometimes fail when there was no xreg term. * More bug fixes in handling regression terms in arima models. * New print.Arima function for more informative output. ## Version 0.993 (20 July 2004) * Added forecast function for structural time series models obtained using StructTS(). * Changed default parameter space for pegels() to force admissibility. * Added option to pegels() to allow restriction to models with finite forecast variance. This restriction is imposed by default. * Fixed bug in arima.errors(). Changes made to arima() meant arima.errors() was often returning an error message. * Added a namespace to the package making fewer functions visible to the user. ## Version 0.99 (21 May 2004) * Added automatic selection of order of differencing for best.arima. * Added possibility of linear trend in arima models. * In pegels(), option added to allow parameters of an exponential smoothing model to be in the 'admissible' (or invertible) region rather than within the usual (0,1) region. * Fixed some bugs in pegels. * Included all M1 and M3 data and some functions to subset and plot them. * Note: This package will only work in R1.9 or later. ## Version 0.98 (23 August 2003) * Added facilities in pegels. o It is now possible to specify particular values of the smoothing parameters rather than always use the optimized values. If none are specified, the optimal values are still estimated as before. o It is also possible to specify upper and lower bounds for each parameter separately. * New function: theta.f. This implements the Theta method which did very well in the M3 competition. * A few minor problems with pegels fixed and a bug in forecast.plot that meant it didn't work when the series contained missing values. ## Version 0.972 (11 July 2003) * Small bug fix: pegels did not return correct model when model was partially specified. ## Version 0.971 (10 July 2003) * Minor fixes to make sure the package will work with R v1.6.x. No changes to functionality. ## Version 0.97 (9 July 2003) * Fully automatic forecasting based on the state space approach to exponential smoothing has now been added. For technical details, see Hyndman, Koehler, Snyder and Grose (2002). * Local linear forecasting using cubic smoothing splines added. For technical details, see Hyndman, King, Pitrun and Billah (2002). ## Version 0.96 (15 May 2003) * Many functions rewritten to make use of methods and classes. Consequently several functions have had their names changed and many arguments have been altered. Please see the help files for details. * Added functions forecast.Arima and forecat.ar * Added functions gof and seasadj * Fixed bug in plot.forecast. The starting date for the plot was sometimes incorrect. * Added residuals components to rw.f and mean.f. * Made several changes to ensure compatibility with Rv1.7.0. * Removed a work-around to fix a bug in monthplot command present in R v<=1.6.2. * Fixed the motel data set (columns were swapped) forecast/MD50000644000176200001440000002517413617774362012421 0ustar liggesusersbc3635145e27533ddc85816521c07c4a *DESCRIPTION 8518691ba45249e6750dec4756ccd45a *NAMESPACE a8cba459aa1513edfcc9c182b9b15911 *NEWS.md c4b02c0c13b37a2035dec0d7e085ba46 *R/DM2.R 3c47eae47a7eb28d4497cbc7c76c5f0a *R/HoltWintersNew.R 1b043feeac43033179eb19344e1acaad *R/acf.R 10aeb00db687cbc0fe804367a30954ea *R/adjustSeasonalSeeds.R 7f987471d031e7cb48d2b2c2293bb8f4 *R/arfima.R 20c8158288d3f4b00f97102d55c7d8fb *R/arima.R b79b966680280302f5d580f0a2e055b3 *R/armaroots.R 24b7598bac070fafde020d0794baacfd *R/attach.R b920eb56936860753d8573c47eb50327 *R/baggedModel.R 17b6b05c7d02cdf8c90d7a074cb89f70 *R/bats.R 2e69187aa9717aadbca3a1e653b6ddcc *R/bootstrap.R 6be8f0b6cc8818ccc60c7c44ad163872 *R/calendar.R 2c2afd2e69e126efdc41a43a633d18b0 *R/checkAdmissibility.R 722e9c8d4ba5e226001ce87a10b6fae2 *R/checkresiduals.R 0ab4f4438620bd1b48d826d82cc5c384 *R/clean.R 53bd366467f8721c4b2082c60a5c95ea *R/components.R 55fe30a71cd7f5ae40f161978e6fdd97 *R/data.R 543155fedb59977083cef4a8935fd1a7 *R/dshw.r 7642f93ffe7eec3621f3c395918276a4 *R/errors.R 208d029d4bb6db978fa9af7bc83b51bb *R/ets.R ee133b26641202b4267327fed876db42 *R/etsforecast.R a3344afd3f0c6ff8df633e4ea69b1e3d *R/fitBATS.R 2cef35ebaa5f11e496874d030af7b01e *R/fitTBATS.R 871167c17031c2fa1e057d80787c1bbb *R/forecast-package.R 3de4968f56516478e2b32381468f09d5 *R/forecast.R 87d693356c97be5cc411f98cb1adfe93 *R/forecast.varest.R e9981655b9694d179542667862149651 *R/forecast2.R 7c0c20e754ff35b7feb377dd2486045e *R/forecastBATS.R 0623b29f1c1455343dfba7c407cf6b13 *R/forecastTBATS.R 59487861d39e9110e24d6bacc9545db5 *R/getResponse.R 1f349680419e4fd2790d8394d8cb42b5 *R/ggplot.R f56657114ae288100a22228c220a6362 *R/graph.R 7994a42eddcf1d55617a1d41e4be3af6 *R/guerrero.R 7af4902470d0c3452100e5a42f7e2506 *R/lm.R 8d6a434df27ab36d14aa837b9c7d28c5 *R/makeMatrices.R 0adc77cdf07adaa24b684119221a6491 *R/makeParamVector.R 628071330458003f0c3c3c15fd9580c2 *R/mforecast.R f3f60222a5bda8991d07ee35db53f237 *R/modelAR.R 8de8ccbd9a8494b4da66e12326c7cd97 *R/mstl.R b8c10730e24fb0980f2974d580257034 *R/msts.R c8d7f6d84239ac507902af1b80a7f36a *R/naive.R cd5aa2068f2fd4150307972da665b756 *R/newarima2.R 45487110b9364e71fcbf39c236e8a008 *R/nnetar.R 4ed698eabfe343e66ba3dc075ddc486e *R/residuals.R c5bdfab63d2ac485461593f224bc3902 *R/seasadj.R 6e59c019d0589002ae9f1d224a488f42 *R/season.R 87c26addd08d108cc8ea7432462311bc *R/simulate.R 457d5365f3024640a25e26cc2ee68ba2 *R/spline.R 82cc2d83895d0a1a6e3efbac95116a62 *R/subset.R c99af22089b0907bc95aa3563167c283 *R/tbats.R ca561d4b2333552fa5af4864e7bb8a40 *R/theta.R 05c9abe5a131aeec5e3a6df1a07e0e85 *R/tscv.R 2556d62253f07e71ea72ad5ed16cc007 *R/unitRoot.R f62e29216b535aca80540bd15d1c1a33 *R/whichmodels.R 7238604557eca232bdfce0502ee4a2eb *R/wrangle.R 75cd220c7862d61ec228d5fed12c835f *README.md 91f2db144d37e8ddfa4e8500b81ee7a1 *build/vignette.rds d83263b393c17189250711ff49f730b6 *data/gas.rda de9a9e1c277aa90de4c8ee718d4ef93f *data/gold.rda f0c82cb5de038d46b4489ef90769d58b *data/taylor.rda 38679e434ddf3856988e34aabbe662fc *data/wineind.rda 20dae67335336c52c4228859c36a22c3 *data/woolyrnq.rda 6e7f925e744e3e0a1f2f91a7601a0254 *inst/CITATION 74af0a2135e16058d635d5f3ef844ade *inst/doc/JSS2008.R 4a27bddcc8879aa251d9ed9f9e40ed5f *inst/doc/JSS2008.Rmd d61976cc34255ed25cfe343b39b16914 *inst/doc/JSS2008.pdf 83f91e71324a77f2e65abbb0a39dac82 *man/Acf.Rd 1233246bc7f32a0e669fe3670261dc78 *man/Arima.Rd f4fa1c6b3edc4306aafaae17b1503fac *man/BoxCox.Rd 0390825433c5207e11a3aa2c9689bbc1 *man/BoxCox.lambda.Rd 3ca5535a7bbcfd97271dc1430948fcf1 *man/CV.Rd 6ecc8c117a35ae3e4a8e20eb546f1856 *man/CVar.Rd 58fdf8136ac00d3b6632333bcb4161de *man/accuracy.Rd 5442830e86a1a0c1013e8d41157871db *man/arfima.Rd 9c2e1f1ef44dcd07a8db27af46c6273f *man/arima.errors.Rd 99d19c63d529f30309f5fa5f22b20d59 *man/arimaorder.Rd e69d432965d8545e0053dfde730179f6 *man/auto.arima.Rd 5b3a4240287e56c91d53af07accfa10c *man/autolayer.Rd f33ea9daffe72a6de6dc4e444c8bf564 *man/autoplot.acf.Rd ebb14e65099dd443e25dea708f13b575 *man/autoplot.seas.Rd 754f7a6d6c5dbe2b9ed636aefc76fba1 *man/autoplot.ts.Rd 790787a95a2779ed85b870d9d774cc8b *man/baggedModel.Rd be8a91e3c4f918dc0015235576849883 *man/bats.Rd 0c0f496f4a52bfcc70f8894c0f695379 *man/bizdays.Rd 209c496a43538dfa3eb929a9a23933c3 *man/bld.mbb.bootstrap.Rd d632a639e3b295215c0d28d8171b35a3 *man/checkresiduals.Rd 3f93237aef8ee4696ddb0bca3f6e9a02 *man/croston.Rd 3e6ce6069c7dbf3aab62f93314edaffe *man/dm.test.Rd fad85528e3c36a57426495d6c0be5ba8 *man/dshw.Rd ec4c08011ea902c9ce9f556497be9591 *man/easter.Rd ff6173e38902afd43e03a4f55bb3a1c5 *man/ets.Rd d2ccaa153869329ea005b8816b2e029f *man/figures/logo.png 42eae7a3f0c79c22c4ab83ed13aee057 *man/findfrequency.Rd 871cc6cc555d50e6d4c82e3eef954207 *man/fitted.Arima.Rd 2908bc5767e76ec58c1541f63e9e841e *man/forecast-package.Rd 0f4856ac677c1f398d034a012f6a5b6a *man/forecast.Arima.Rd f77aeca83a063a307f911797e020d6df *man/forecast.HoltWinters.Rd 822390f161e7ceb6946b6db775f39ae3 *man/forecast.Rd dda85c94530c8b7978d0b9a49968d4c5 *man/forecast.StructTS.Rd a5ede17e227dab77b77e801aff71815f *man/forecast.baggedModel.Rd 3bd2f567500d1898d3d49dae8b120700 *man/forecast.bats.Rd 88a24a1870163ac29156ce1cc8056119 *man/forecast.ets.Rd bb17f9d40014a8987f4ed9f50a4c6f22 *man/forecast.lm.Rd 76069224b3c79f9d51454611ab8ffe2e *man/forecast.mlm.Rd fb7fdcc24bb63ece5fb4e5484f78ae23 *man/forecast.modelAR.Rd 6bbb48242a147f43078a9c543c927e61 *man/forecast.mts.Rd 7b811361f9d9e69f6ef03b68a98f377a *man/forecast.nnetar.Rd fa6f853249f6cbe6114271b41fad6d84 *man/forecast.stl.Rd 8536b20ad167f572a8f41ff7157276a4 *man/fourier.Rd 7d057a2f1aaf1105819032d2a9aeb50d *man/gas.Rd 98afd18b66cd58c44ebab8192f358c42 *man/geom_forecast.Rd 04278fb50a27f247325113f626cd9214 *man/getResponse.Rd c6dbd99bebbefa49cb1cb4a012e66963 *man/gghistogram.Rd fe56843c030a399f300275aae6eee230 *man/gglagplot.Rd a655c9f394843bc1aec3af6deb67f9f6 *man/ggmonthplot.Rd d9e2a4b00f6721bfc733fdb58bb832cd *man/gold.Rd 69c39a9661c43d0631c6a1ef4c3ffae3 *man/is.constant.Rd a1f193faca93a50f329bd52eafbe6d6e *man/is.ets.Rd 0fdb9a4ef0c3274a7a6a1135b5a2f591 *man/is.forecast.Rd c4419f85bca986614f9950fe954b6c86 *man/ma.Rd 4f946a31c5da15d9c1a2697a8144d2d4 *man/meanf.Rd fbb350863c6aecb2c95e3512a54801bb *man/modelAR.Rd ee39cdf50cad2d1aae445da601dc4ac9 *man/monthdays.Rd 6ee0f84bf7c5ec9ddcaecbe6d372a7a5 *man/mstl.Rd 3dd3f55610ab4e231727a51be93086e0 *man/msts.Rd db892ad70cd382574ab7ab004c800e4d *man/na.interp.Rd b84cf459aa295fc8b8203b6ecc85c507 *man/naive.Rd 42db145ae65c46f37ad186bc01c54558 *man/ndiffs.Rd 004989dceb69160146d840b6cb7c7f6a *man/nnetar.Rd 0051f949b5019067049b27bcd8efcea3 *man/nsdiffs.Rd fbca2f04b84090c0a31e325ab1939079 *man/ocsb.test.Rd 47381736440fa91cd5c1b91b4e6cc57b *man/plot.Arima.Rd 05d3c713844dabe0e0ab1e7e877acc94 *man/plot.bats.Rd dda7462647917f639d1ed48e010c0874 *man/plot.ets.Rd 735c2ad8092eeaa0e523c2d86a348c90 *man/plot.forecast.Rd 38c52cdf2a56841b1450c68b7c03a076 *man/plot.mforecast.Rd 60e85f26885e9e469af40650164b87ab *man/reexports.Rd 4348571330fa10db423a393eac8e8a72 *man/residuals.forecast.Rd 43698c8686becf342e68c02aa797cbc0 *man/seasadj.Rd c8a8a9bf21ea57bf9e1304698905cfd3 *man/seasonal.Rd 86a05976843a74991be96ac536fcdfee *man/seasonaldummy.Rd 2737506a3629898b579e10242d3f2426 *man/seasonplot.Rd b4187f061b5c3c65c15db0d31f9af622 *man/ses.Rd aa5b595dd301ae61de9be0a16d9e34fe *man/simulate.ets.Rd 59b2af1fb81a9088a5aa0e8e66507ae3 *man/sindexf.Rd 54be116966779434d622dddcd9eabb1d *man/splinef.Rd 2901ce6660f7b06d7ecaa226276052ae *man/subset.ts.Rd d0f76b8d8872e3b0eb4158086b15a68d *man/taylor.Rd e2723ca1bd6e6df55bc698fd572de580 *man/tbats.Rd 0ae2d2dd61045aefec1202213a05e2f7 *man/tbats.components.Rd 85bc834f6c0b50c5533359fdc258fc07 *man/thetaf.Rd 4ca0eb3c13b76b35981a32d1b93efd08 *man/tsCV.Rd d7ce65f03fe506b7e9c6d20a60fc0b9e *man/tsclean.Rd 4fd545ad769910518b3f06a1089c5948 *man/tsdisplay.Rd 3822a7637be4232e2d420c72a45871be *man/tslm.Rd 910d9724a80d3a742e4692147418ec11 *man/tsoutliers.Rd 1c186b3d7403af141c2bbf1da20dbace *man/wineind.Rd 01a10f3d879e1a57db75d960a426bdcd *man/woolyrnq.Rd 166c1631a4f4969619043bed438faee8 *src/Makevars 6796115ac9817b6e4eddac34dbed1240 *src/Makevars.win 38495f349d41f61303e792b76ff82198 *src/calcBATS.cpp ae018812987303381d480fe7a8bea09b *src/calcBATS.h 1afebfbad00ff84f36f8406bb68ec34f *src/calcTBATS.cpp d080b302a58adf776023dbf7f92b59f0 *src/etsTargetFunction.cpp 4338a6ac27c404c8cd685ae09fd7af06 *src/etsTargetFunction.h 6f948bdcaaebface936ec840f106a695 *src/etsTargetFunctionWrapper.cpp 1993b54b4241d9339a89e94deee49959 *src/etscalc.c 7dedbd0deb64bd81c778de6b4e485a5f *src/etspolyroot.c cb354872396085b03468769a824eba6e *src/makeBATSMatrices.cpp 0d5b93beeb7afcc59db7467dcdfceac2 *src/makeTBATSMatrices.cpp 7c22b1b500cab25872924e218f1645f5 *src/registerDynamicSymbol.c f1daf6f870e4dff16534e36b4db45b59 *src/updateMatrices.cpp 71262d9e1413ed38590fd80d6b92562e *src/updateTBATSMatrices.cpp 22708a41a2f33a810a8353ff0a62a1eb *tests/testthat.R e303d5c3080d10f458814c162e3ee7b9 *tests/testthat/test-accuracy.R 57f73ca6be01e5553012d57b01c07950 *tests/testthat/test-acf.R d72c47f6508a07358651ba36d8adc27d *tests/testthat/test-arfima.R 422346a04c151b194b82e9e0678d5fe1 *tests/testthat/test-arima.R 18a5ec34d18aa5b68dc4c1b6b3ed0fce *tests/testthat/test-armaroots.R ce54b5fd22a9796c6d522b30e65ca2a1 *tests/testthat/test-bats.R 83e26e177e89b0075b65bdd2ca1afab6 *tests/testthat/test-boxcox.R 667c5b486457ddd3aec4f7d045c332e3 *tests/testthat/test-calendar.R 2ffd17fc3ee0018c6199f89c26081c86 *tests/testthat/test-clean.R f23b0d7aed7bcd9e5aa1531e00b12cae *tests/testthat/test-dshw.R 9a6a13751543721272ed8c15054be942 *tests/testthat/test-ets.R 7e311e0fa5fb4ab284fecbfb6a0cb08e *tests/testthat/test-forecast.R a19052feb48bbd72c1c93bb955a1e1d2 *tests/testthat/test-forecast2.R 54f1e522ef3e2809e0491c5a6fe89d33 *tests/testthat/test-ggplot.R 0ed19d9ff6df0c44f09464e45285cdf6 *tests/testthat/test-graph.R 7700141b7fcdbd9373c74ce5bed5103b *tests/testthat/test-hfitted.R 06bca9febdb0840f9421137c6970ac49 *tests/testthat/test-mforecast.R 1e04a72afd48990c908c43a299689fbe *tests/testthat/test-modelAR.R c089470c7a53ba2d59ef2cc8fcc6dd56 *tests/testthat/test-msts.R dd6d2b384b040b8f9b6ebf87062109d0 *tests/testthat/test-newarima2.R de7a4a85284bfc2edec09ea075f1bf17 *tests/testthat/test-nnetar.R 898c38abe7a351b0697fbed504a4be92 *tests/testthat/test-refit.R 431f129b3076507880c0796cba9d55d0 *tests/testthat/test-season.R 45ba43aa2fff1fc3e51030377c816db1 *tests/testthat/test-spline.R f519817c24c1ed526636f8f170a5d8e2 *tests/testthat/test-subset.R 0b3ce8c7a0acca8cd4be1b278aee0255 *tests/testthat/test-tbats.R 035aee59bdd3a5491aa06bd61771e766 *tests/testthat/test-thetaf.R ee475b49f865384e77e3bd07f16156d4 *tests/testthat/test-tslm.R 82829fb73e8190c29808ffef5e25d1de *tests/testthat/test-wrangle.R c84653dccc60c5c7460cf3a4878d501a *vignettes/JSS-paper.bib 4a27bddcc8879aa251d9ed9f9e40ed5f *vignettes/JSS2008.Rmd 16e6ff1f952a8b8b4f77aa0adf736559 *vignettes/jsslogo.jpg forecast/inst/0000755000176200001440000000000013617722446013051 5ustar liggesusersforecast/inst/doc/0000755000176200001440000000000013617722446013616 5ustar liggesusersforecast/inst/doc/JSS2008.pdf0000644000176200001440000153714313617722446015300 0ustar liggesusers%PDF-1.5 % 93 0 obj << /Length 2695 /Filter /FlateDecode >> stream xڥY[۸~_G(o)IPM],5[KXVfHIfE8Dw8i觛T y#&eZ!r**NR[Fws~O} H .47_neqDqbJ?mTKRJGV':%dI\ҕѩ`Sz%XW [$hn#]$3T:Q #véi=ӹ:a'>uTFjIiEk%6wU *w|^Mnc *leAZo[/a7 #I/Ï?DΥ*vZ2+xso3!|4၆XlQ/TnaWh*Wkgmd") Vλ v(z3k돫|3#C#AI` qf/9c0e'&i1*3+j2*f'a)fZIMbS|\_icA:^_`!'[7?_(L`-ũ AG6"Ee1 fBGߙv$@>>KNLZ~I1BKqʿ*@hgH'%@fi;12Mٵ#ݟ=(oMn}8 -;)陣@b hrG2sάIwk,/3)U{+ dS:̆D2P]Uewϰ}̨'rfZQ{8VHXwoʼCP﮵ "媌ҚKpp գ1v̓ qg ''!{ 1h!b`Mh}ڍEz3YOpftkmiߨ Pԋj,Yj81M}5aVkOA"5ER~1{ KaӺ-_g[:k|v*LEaܝC7f@6`&uia5=fKxuTN:P2=/* eoΓ&[P$"$ a4\Kw]UCqd/N+٪q,{׵ 9<,!۰+!6_@t}lGz\M 9%zP}Λ-4 5 rh,q7ȓ|iq>jL$<8Y{xj&h &yyF#M YiDά zx]OṖwrZ`̥SE;Zw~RPwi~#\c! e-pXOny0U;y;d䋭Wyo%3Zi H!@/_o@3ؖǵ/{8߼FjI ޴RIfW$ԟDe85Ђ:i}ȏыW^;0l?@?_m endstream endobj 88 0 obj << /Type /XObject /Subtype /Image /Width 538 /Height 392 /BitsPerComponent 8 /Length 21643 /ColorSpace /DeviceRGB /Filter /DCTDecode >> stream JFIFC    $.' ",#(7),01444'9=82<.342C  2!!22222222222222222222222222222222222222222222222222" X*,QY0)-e+s+YJƕ.u2j!Qk-fg!W4^VT\ʕs1GBJ ¥p%%ZKPM-$(JsbJNXDY`V\A KdJaؔ,0MD L"rLMldQ)J@K$W)2hW?Ͽj6C(@jD*jR iSf"LJՅVXh 0aQr,6AX$BVXA¶6MXtE1$*Kyqq!X4@*e LQ !ȹ4#HHQ  HYaWyyތgˢpmcĞ5u˦{/=kiG6j!"!_0;6|<;ϥ8I,+V\rzӏ};q{G Im4@Y)A&f_+=K,նD5ԸNfnjzypOJYQoDԻeQb>-RZeWwq:8og '}Cy= k<`LQcIhY7edQZշ6Wi]JGW:DZ[Oܻ<=rmEԪ? yԬȃ%e_QOeBF.cMY \?ZrCf{g? DX!8,^Yt1 q1 dúVIK\-űnM; Pjs z{9oA01˛`+ ܷLp@'xgDa_qOL;]Cܠ'[Vn>TD͊rtGS]1}ܨ4v `E]6)4 2w`QE@lK icDvj1#b5rU^vNcn rRoXp(lc$̫RJiz952kۿsLnG_nT%gWeH}Y3r[m3)R [60M988 i &s sc/1svdt"n'DGaꇠ9ro5!ȏD7%! 01@A"2QP?#*TR *TR # 4BJ(PB *TB *TRJ v cH 4uIsq_$Ev8DrAd-23|?T?I24h"Ǜzc#L|u?g% !01@APQ"2?$I$I$IbĖ$I$I$I$,Xbŋ,Xbŋ,I$IbĖ,8bŋ3 03 .\.\r,\bŋõ,'IHRܯdOp'TS!APR(`s7NQb.zGt38#7G zmAW'j\F*7q9Wjo2!1 A"02PQaq@B`Rbpr?+BfQzBoЛFPpҿTB*%l\}27.~00䕈}D"=ڇpDҀ+ij[pq?%U%4qf72[#xiE B hCa@Wwd70\K-J5uq-U_{ğF7a@E3-]^=GvRB#3p+ YW~Wa#Mm "Nі."hBl4GRf}OqOr8P>$|!>(k-n\?9bRlL-"_ aEƝUZ.MN"I\+E%USZ-Da$f#"z5WWlH&1vdHPcq RhAppQL{# NEƸӫJ g_DH"$*~Z[!lWAGj++++bVUX+~ZSa+e碸W V:'3 Ak(rϤ##ߐEY hNoԨH{!6;ꦃЈ[4J~0>"=9Gzs&cAh[%pCeeiP'c%o-2y+N@=VHUVT*q.%b]Z e Q6G%7X/ uW 8ʦow[+io1SJ ˅p.fF{Ax Y7t"(y,BɊTiYZG‹B,nJ/X"U2|?k.­+{~ۃA[S8 y uE;V>W;GhN.KU}/?N?= :">}!.˶X|frsb]NZdD&$O(2MCŊ&T0dJrpO&UX?Z"f!ȘiCYUl>lrBxSYXv"2PW#e# hyR Iy +xMdzwUeԧ:T.T]PMn-C(=tԾF((,H__;*[U<(ByK~*!1A Qaq0@P?!B ^GN fBD4h-[+z=zzOS=tz=:އ}/[§xcJFB{IEA@""tLiHDBh]Н$L`..h"Ү7-35HJCa:Ce&HBiKNM"jBDiN.1h%]D hٹBBXBt! ]Mi4G]cd]$ J{tz.%z.aK0`eo'-^&}ae1\shٍY5BZ9-f/ f[]SҍŦRL6Z 㫓2n#N-qGNc>om6l&c^0 jوp Nt qނjt\!Gҙ"mm>Gitl%.#MX0>y>"2DK"Bd4QR b+琢.l4pg2ۆrcb&Qɳ*'Qrn&]6r,gy iPRwdd9<X'֡4UOYBI2.IOrۑ9\AEG،8tǸmJp?R ?pFpՃ5vjh-fy))p]]73;E]b+7 䳱ї`٩Ļ n9p65r&cKMoIʒ v#DoHOpGeH{ 6>½;"""K2hJ_xz R6D19$;WX7h7b{a c JixSt0{l>Q}Dnc,#`JNcgv6 )n4^RoBa`&{ Ȣv<8ݖW,qO"iq3+ -?Mi mAo 썣L!8)nt|gf_})OrITH[pY7ß{W3[7&O}^î!F (IY/속+Lk6V &a'W,"xһF- q m8;8Yigᾆy~cm{;!Gqx)@?O0[!Y ʼn S; ЛUৗbײxtB ;6W٘=d'A{kLՐș G7{ ̃6r:{"po=Oc<ɞzYewh];Sy:ٔrZfLǯ$!4OJ{= mRR,2RxNbm1]7Mx`Kâ~M^$hwG$Z/~ ^tRn3(mz_~ Rztz)zoNQ /])F]nKJRMҢ÷R+ZzOSrRJRKSQ]礥)K(D2/mM%ʄ~]M#> *v4CEϹۃH7Tڃ!({zomݘ{4s/ţQÆlRs [h<WTb!(8 G7dfp*^^dQ蟏FdcMgȾtzZHff i8/;{phVe=&A:%0XA.h&+6'q{ld~4VI2_-bcyM3l˶H[zH< **V% +X jg9HkFY3Vo>Cis?|7T$]m<7&\[юy LJO~CwGp1M;F avޕ\f!%M&&[; D]n`1 M EKy̱4IFe YB=|=h]Ɣ1xCLb3GoÚ3%¾>SA@==O#cwCSck6A1Mu)*J; F22UXYTk-oݦtE9e;N\y)w䗐E+i2-D VnM [åH&<.E91.@n3SΈ>5o/ 1i9 )/`]<#@e b-}&XHB7 %R0|v\שL+C*6F7y3c5G@ⳀeگZƔvJE/>XK&/([ŭdZѫxQ^dO׌k0ZrpN|Ri%SbwNe;+xD[aGo+S>Wpυ_XyA&YƧè(WTbYE7I7$1u']A]LADr_N.#$A}YM?/|=9Y̲ <}Ԙ0 I^KxLÍ=!n7$;=)LS|׌uִR =ae;.sQIz>MwB !f!{˱=͍ӵL{DU՘ 1䒵kܯKE890]ˏM׀l{3 bJ 8]t0sü]{~hM0ül ,)/Or-\,׬p,҃ ʈ>"-X qɍ|z 7 0 ! 10AQqa@?#dx!! FB2%ܲsBXbBH$$X5̸#`G+*..j!6@ֈ!;D|!x#$v6t(%6G ~ u$%)K" ;4XHw2 eBc2CRM+(f:}a~ !X.C?'ூMHK> @5C4B\RsbF%? 4(qb3KQsJ>,6QV./ R4%P2H/F \ڿPCHOKׅ*%0ѱHddH*(HM m6l6b&8bk Yxj o6RluPl m#H8:ɗcm A#a}+ycBhc$_ Wgy!#.4=1<1RSlCV϶lKmk6?:N$BLA HuF:eCv;s/E# !1 0QA@aq?iQ$AA$&I$ £֌IOHO#ɲZER,QyPBQFv1˚R.S+Š 9 ;%ZH664cĭu6R[fҗXԴ4$7BLطIZ.-u68`KWҾ-QHI.Um?6,kBKQymҔWWx=Ҋ&!`fLh76!\M/Bᴑw;! "#DCK:_lm$6(E1:R:HkYn,”ĊA$xLV?xLBa^%983pSʫ@""-f0TBYqv" dE'5jZc{'pCAz8Qjg*4okubay\E:b"Į2Mo2 %b* z *P-`, 9jTQaH%dPQ,"ε.b~%RQb ]W#ܫ ,r\/ĶٝR-m` ) 5T}Ow}%tq ah+!]*%rΈzlJ-mqs=k-wR• PJ2:Ԡ =}KRmy&2xcǟQ8b-VÈ<@ܩ1LZܫlOQVtef~Xwrټ ` =CtaqopJwl"1YDERŞRH&D^+2ڭAZ`,ɛYtsW2䗥.RgIa70Vo Gn`Ѧ`2gP.S(0Vkw yki`G)H7 pbS=7(91IWdqf FVys1H&X<RWHs4J!Mh _XޘT.frb*DNmP+,0XjU`c)ܧ]B b Sl=17YY)YAlb PLY^~sf ?x`f2RerD5*0N̈́ uSjΊ=">[.gQ9gT?ysCpNoeAE#n\һ-E]喎92YԦ1KOqF^"Zb/D(tFټRÃ&?Xq,LKɠ_D{# ~EȏQH{-E&8jRnGQ(GJ7 oCV%H̾-NҸHĵN&ʕ^aF kT<1um>[S9Fc0@9e.Hlh RnRZ*Zc/΂| Y8  (;k ٭ψ=}%`k^=ƶOM st^:=x"PT;pl5Gpy7Dl6ˁ\Y_by/=-_Ț;>:G}v`ё#R mUObepeXANeb̅0ԲCxP912fߏ-V9p6jOE0 JYؕ RUQ=1`BvbP%0~۾uY@~rYE.0 ˔D-FƦ>ex24bE6YN UDV̥O/Y%[0f3B]s*>ȯIhg  QW5gͥ>j,x)K.8EiG>~^c@9+5Yl.(4s,X%q(B:.})ƚ}W;e5:dLw*r.*f#),] YBZ8KөUuj{KP+ aJWRp]^40Pi< 6'x-@[j̀1_:X76;e k:Doļ=],%_x ̭Ū)ɬc9 8n Eob 2@C7 ":%1o/xBGvp"mIOw=,9=KZ7jZV,/7Rd @sJha*w R])Q y+pna[{ E+ /;*ݜew`hk_4h~#[=nJcpy#51M72wTncMϼs$t:%J8V30YYS솆 V-|:bW0 JSLx]j@s3_jܷ.+>jfJ+1JĢ .` i\nSUjXj"\J;07pCb+4b] /2˗lK/3u/10B<lT^ߤ0B5DqޥoP7WaY{. GsC?O/RPXԸ4>mr2KbYԿ/Aľ]VFjb+y^%x/cx_SnzxD5//r}%1\|ܹĻu-ܸKE6L^qJ6:ƥ\LK[.-_ K\ 2HؽA/ Xĺ*nZ4<ŗ˗xKcpܾYD 8u- ľ|\?pj[,c/9̴̼^w,5i,R|+,JVfJ}=KSq/1m̼ ̫ń| PQqtyBqǤq Uٗc:1ՕuS"fҠ˶\Kcj- B<\JDNDX|*2ԻeˋrMT  uqk2ݘ7<`+$2n0Bk\V}^ pRy̫_qD4L$Yx.=sEBp̦v\|3#-imڔNJbvL/B#xLLj8`ORޥnAr/upKY.[.)|[6V^a8Z4ĹyE#`\ۈba[}"N[Bmfxw73 2;T@h9DLq *fn"BU%$TuPlbf$˹X'1Ǥ5:+)A_.[%ALm&YԮ-i}4e K lc/)`6Ge_8j/J=}(֠1ЇTpco J0l8KfQ'˷>n_3FtulE s2%q^!h~휵Z*&~j1u7`jlq5ϸtM&)p K9MbjT h.-W7[D8-O B^ƊO ̩LC@JhcEi mS/ TEQWG )>} HW~'eh!pG[`.ZK?P&_D((y>R K!(?S QDQ1d5Ej4?7ѵ  ؊Q3@Vs|9HYQ(^0L/`'b œ}=6.,lQ+6G%K)k:eL1C& "6lAŖV-j#uxo28b`KAcSs.ڍu EnT>@4Q}@Q-8#^H7$* cN@7>Qub=)D[wɖe]: ˬY !_2\,e  X' p4yawanw'=A3c @+}XWgsvçqNxG2GiR"6Ǒ7LNkS1L,]ԡudR}F& F5fx7iq娊Z4 ]WG y\f`e[Ʋ{Oډsυ@^_KLތ6F!g *-eH3㈊‹42qS|@T[ĨFgs |ns3)SY]0 @}* siHٱ W\L>[WZ8X.2J[rģ/&KPt#:MpqKf:OˁVJbC1X˾ekkB%7Q`s\J a\ĜfSQaD.G-~X3&2e!XUu\K%#2yrYPixuP,J)lh"yc[VKEl75-5#w[NŪ±8t^Kq.JS^ˊilauqNw5 So3Z,>%>;# .Cb7PElW N  1FSP/xk- !=հUDJ $[h8sEjj(Vd29aVPf!^0!V;!u_mXWf;Yg`@747>8cW[Lj+W(rH h86df5awq<`\\ir k`,{8!r!AVg*2& ͏2?hW&Ut)\ȍ)zHSm100۹HexC(򻎊1xUKCs)UNf';_ c P:|c=c՟:aKV= n12\į8{Ϧ/ ZW ~`R]KĀeLS+l\Dɉ\p%\dE $?:su,ʶ˹%ey5aYu,\UIX9q`fq ^5_0>>L DnPI0 U dX$̿wA8yT&pbp$>/DU7u.L _.ϣ fFe*(0Z/04##^\V*ka؍j)Lwy0UF!s"7I831O\@csS~?u1& c`kVޠ+r  `H^X]Dht keg/) k\!O!S%)+{3 o0_fJrZL08jb~#,ۢ + %f -&n:n?V`D1*|}?oLU#`^DibW$Ĕ(5ys<ͿUyȖS/)x>Z@hsa8~Qm?|3ZNGxP hu0y`ZuASi*FA!eaK<"0 Ꭷ,f0FSy8*xffa?'w38H!{@R; *%..L3Bq;6"omq wQ/ܩ㈗ Ƥ6Xn16]qb]:k_M)%LKmAt8UwzG<]8UT> a%KN.5S)qϬǘ#{#ċRߖ7cZZλ r,P`gpdF6Qʷw3l]7Mcp|LA8{ 7,1kuܩEoq9#-~ w>$'3ġ XnU 33p[2埪a_qM@A%B1'ƽˎRZgoY=EU1,*bq;ly=}Ͷ]gR5SdE*&_Shʜ|;kusTC~7QssN!Je.%k8SO endstream endobj 142 0 obj << /Length 3552 /Filter /FlateDecode >> stream xڵr]_R3{>(nִN`TV~}mBN={]gI6컫o|3'W&y'Wɇ9߼ IZ{[As>t jώה`$K=9g;^*]Ϭ5ӫ;xXFn͛M[|w+ n} }t n2*g\p-c"Rx =a'5{{IAOܠr*slp)s?njw[(W|oϾ:1*s "@sB/BV3Enh;&/)J^>ީ%qp]-5>*V7 ,D-?O<|J\{~ "8/禜" . 1~Q꼴ӺZu;PUd*wA2jxt YCPfw ] ?.iHk&]"jrP&JJww&^lI%m:![3vL;Y!3=̕ϓ&ފo#:-kO VN+a7 jG3LmTft@ ƞ 0`5sSaXO E7N6ܯ"T M; W~.vIc$ cf! %Rv1Q+>_ $ጎ%d誌wNw4>&A"wy+I_ W8,C)sC(ʲ!c  q>:"9dnL&҆P'H8|,x.2PWdL-^ 7yQpIsJV{<,'T0{ҦR N"7):ƒ5` Nh@hT@F'r lHWBw`"xNv39t$y翋߯Pv|_h0aqDj{vq*I הoeKes;*0;bv,+o-zѴծL dXM[8.f$%FH *6#؍|N"+.HMH }J-׫-\݁,(h (REMt*tAH^/f_qD(82rC$ϭZ~:f8ůQq,H.E?|4=%qmdgCzR"%AӺ$uKEvcvt.-TKz%dgu=1-$y 9f.=ˀaPC-uSU_j;lyOlj̈7USsݳDX:8o,mK?pkQr6r# XRFQdEuxtn24otCDtIӗX=*qq`>A>?PDAN5);\rWRo~KhDpGHGlrF?-xpsL8O~yq~C P/1Ž0(rqҊ0%--ILjGhCN<n fmj{ܡLZў&ʞO;p7X_(֠3 x}x%T6uTTj5ӮfƿDzy  ,7+޻Ծ*<80U 9(*cakqJhTG!h\(]ڬP1F! I)9m?\O:Qq";nKMcC@𔼊x'Q{t& 7fԞk{ w.帖!3ƴld^) ʁ:Z4'dv1Fq?g8!/HM9^}-#Xo1+az #slOddw+N\/K_S,\ o: yp1w:{qfMua=aҌ1cK'!r8Q10\'CjpX_@}l^%(d{Ø?&)V&`$ZNhѹ~A/W Izv被DO)ѳq`<.iJtD 0"-PR̝V##rDKQ*($_!RK m9 8.oK|{ |D}uR6QK}Sg#7 iuD 0-9ԚYx<*c{Te L'>Oe#/  >{7n?jlsJ =921`j~ieWZ>euMţb8pD8 HX`:݃ |]F>&':3I:m-+ M:0+>]4;FêGWK Bи/X-Aǔ6eŚ endstream endobj 186 0 obj << /Length 4291 /Filter /FlateDecode >> stream xr#}o&1.?Sc'SM*#ާz` 9+˺Z8-u6Ƒ.g,v !N爭tK%_2v9.3<>'5ª0Rsu96_,vaj ~&at%rل,a"9U]-`jO:[9nO 3;J6֕fYl8ޙby[ S%e(@5L/,_B[iFS-9̿ \,Ya} Q|^= 8y VgW2s<Ի{ rMoFxZB9aP &uym֯lEJ ڰa&y!&!WPaP>6x?ܮ xv-a=|uM`vB9ʓf+W&Y%>flY!h.M=[^53pd`3ixۆe%VYvw3A,Ċ >LKPy“ITg|6yZ}R;yŸ,,k-rvǃ>vXq@Gl#r.K#(G`G # 7mR~B?8^Z12La';v;(cǵeC}x9kP 9wQo-lr?2bBjR@T/oホ\@X3#F\ W5IyP^,NU|H _dOA[m}+}(s)+2^CZ [rdÖ9ɤ\s!^zL!G G!ѐ C ~i?=N0(3DOqx6VU(is2˂O)@``;TO~n#[ ס /?|d``X@c+ÇnƉ|㤇3m]NFd` lֆ.`OPYhH5=Af%xŠo0+{7%_Y}:7tN9]U *$/vCwo%ʻ%_.ʳ.௸ $Ty.p%-5,tOˎ zz+ y<5}O^̑\u纁gڂΗ 2yqmv׆=nJnvB3t]wTLX,\Yq<$yZmwp}j٣c-S 1hKn '(TB1l8-m#|6E#wETqЕI} #p֮ AL eŗl^mII{.:eɪyڔ@C.+P`iБY$>HRWPo u ՙ'TjW}-: %&2b#7I .(9|=O,¤xs2tbXm{])%kqXM '-B%2%]^93" 'G<7"*f(s/Pڷ,Mٜ0]qpD*EHɪ1bdf}4Ah#ay:)zuZBg5@1@TX(̜Q8\g(E F5EE'0AZr<6.7X;CGuRi?[u9ط?ʅpKⱮ;q>v߄>%[[hR0@cr8K[ h +iTw*@5Ocu"f`)[ɧ_@ԁFѝca& _:J8G>.}(o 4݄99 >ZI;Hf/ + 3wE8{B*tkBc]d)^z (*ym aɅ̯_ qJ+qc8%šGYc *ϝ& >RA\]JG8qF%h4A^/}GZفMMa<{!v}xzz挕Ф$eu񑠝ЮCY8`R8xL_B4n0tQ3dgGD픅 %("K+†s3 ]qtp;1`Ǘ& % !w.j)$x<#X Y\^A(v$; ǧjs 7Օ (UeYG4}u 7c֕v3@Е󥞾|P_!#e֧ɹ3Dd*lJJ_1rYowHBZ*db-C&ba:w2>ySf0U|xB"8O ^;]naŝw`2ԇ,=|]ɀ{!a @T,o tOgNIZ _])GxRr"++RAˍǠDz843F$\8@JYRµM<ʽ3^-tKju{y!\{Z=tCőUe)S΋7 MT.>hl6[~ōbEAHi3**wc>H BHnzPMN騨NLdzC :$\0N8I'zX{qYiJ `ccX"v=-{ ?am?mZjym󛎖]Y4ܡ@ ߷\/'|N Ω(c~ℷSk.ўe UIA3^{(0e2 %5s4i4>?60 !1qZMkfc?ɦL}Jc-<'b8?x?)K. endstream endobj 2 0 obj << /Type /ObjStm /N 100 /First 819 /Length 2595 /Filter /FlateDecode >> stream xZo7~_>@K |'K*mb'i7w-۲$ rw8qf8J8a+Ah%B0VQh 0A%lDT&1ϣ )dI\B4*a!N%-D@&$ YV9.Iut4XY ²6b=lW [Ԭ1Vb:4k ڱtb\ 0Mu;=4<%XApM&|aӡle$")^b-ц)E;6\vvEXA[Px6LxpY_Dc#%]g_tOEi5Th2P+fu=M>6|ދ ׿ ODod>jПge8p&BP"ZzlH %LO}fYy^囫 \j܊["VdBZ-mY 7O{5.V<g~?~'i!&ħ2cky|=ƣA(nO^x-% ɫm;޳I*||1.aqɆV3 7:hodz^.}Wˇ7ޒu=+_ܘoʶn('ǧ0AQ}ޟ 8E5^-0Vd &2+>6@ײOfYմTiiS5}hZhYeay.%ÒfNn)vن[E!@ hl}8w`Oe]Onml2lcH?be,c֬|˶m0f8fK'`e-v;$pJgwg_U>԰&v'լq5՟;ŅKrtO˘;3qACr~8CgI ys L>1QQ l0﬊2!JP#SBDr :n&v{YB=O.#YO?v:z ?`O FJ*EbC_HQ+@( H|K] 뉻^0+y(e 2p@$qa@Q @5@ J!*Eq^@F H7ZA"{mWU$ygQçfBC){w 6*fKQw{lt-?`-ċ/E.c*W[\}:˸.Z+Mo Sz*|— _* ?*(_)iFGq.Ib/4"U7"98齖'?!O]B9k`eJ c8i`<UxD ElazHs02Z6JÍ@ȑܟFt h0 d$/( AEp|4&W^Fa͎ PFh"IQ\Z "R d; =TiJI$@fCA=7qN*n7PBѥBGl."pHkbJ6QP-CZGƎap Cַ0᫷ϔ%Ò"Hh Y :Z X!dXȷ: q 8crGIau[{â쥏a2"d5b֤o^[H(mp;- wlaX78|u)gklG9}%ep+#Qt!DBB[ I*wDCd>N:4I5TEVF7PcgŮh ڡ 5{{\O[|-WAy9~Lw6L9=\ M? AQuZU+}Rę M51&/<_4Ci 9Js0`(P _,b H> stream x]Ko#7WQ¬Ya 6@Ʒ$hlxckc{vEAQjd]$+V_\⇋]^|`d!BB u˂/U pe>md7WGx{*ԿSSul~r\1F_7VR~_g[W֌ R0L(cS]\]V(6 #pq X`Č.JBcps@ :$J2bKԮ AD-`U*ciM 6 )K}X?n_AM3X#{S@CZƉ}`#ȒHRrY鵶۝`R" G H\J/Ͱ^4VzB!6ai-é:CYIQ9e!vG8^TABoB:! " +& @x AsV%~o+J)TWޜu`hZ u"0)ymvWTOU]$tګg~3ji`; r)v V'ՎjɱksЍB[ .<{"Q99xG-wy^a~9*ۓduv "Ѥt-N&Ove 94%]p S@,"@, b1bltw~ܟN[wm0b2f(G\lTQ} TuTG=Ut TL" ˠHh-#0mO)^+`FRIPƴ zMCeӌ$P #n+#N|eM[9Û?0i4;Q.4,(4d .V1jNH PbF\ew1>vJ:.^z͔17ٰEFS*6-c` )%LNIAe#ƨ|[RH?k|iŞ>S4Qyn=ϭp,흌\ȉ<E Q{tW81<4H8Z;yLL^?sΙ ]1bv1dyzJVjġϡR ]/|(| g,m(.@s@L{4Φߔtܔ=u2Aa+t_D@{ϡӶŴZ:D4JR%+\$R1pYxsJهu*wg9&WO\s-WdO-2zsw⃓`Ĕ{/&N%;|-?|;8MFa "2 pSFxx4}x6ܔĒ:]`ihp2hpvrN3wip68Ȱ6y6 $tsP<,֙R"u"{iSzy*V=XJ*c4.N1, Y:·![wq/>x*3Ж\xOvpN)y\ =H>҆ #x(m:39cHt80׆$\PL3.2.=dtV&1'qf1Z sGwaMxbב{i϶'ݞKN;4o`x`ud\ R73)e?ssq庮v9\"бi@8lWe;\YiU)TL#ݙLzE`Oyt= x 1gy=W&%2פ^rAh*8RZYb q> (T,?Q(O¼XفA֓du6)&6 TH.%#V`dt+[Q/##C0f#~bS[2Z2Z&p2Z2Z2Z&gy] kiF@e3`p^G׮:X|6>Vu"H},->K+i &e~vLjf>foQkYQI.i;SG}lH #'Bl,ugaNƇI>I|/QEꬅأ{? w.-dn 5fA_8UKn ~UO>?Kʖxx/]R]/_7[cYf}]~~o+}_:WVd_4?K+{ѭW_ou@T$iJ][uؾmlOoϷFlo*5*YszY95"Lm(ȰAHs3QdBXBl7d"Qǫj5[^t 1{uK!QG7 + ^*D9ѡd}),_u/BZ:**@&F B^j0M`=M|IzrJvg_?V=n+ie%0:ʷ? 2`Z$¯W \K"-kX1lQAcp& *;x$Un"d*1ޅu`p l؂DnMN2jFܥOMA[@X52Tt1ni_hثM;Ca\O=3AwL#BZ5bZG}T|h{ K4m|mF50Lt D7 *Iy2F*tk(SKZm\`'B$tG~МjB 3M֥AOnnJuuXy endstream endobj 227 0 obj << /Length 4357 /Filter /FlateDecode >> stream xr#}Peq<3vA8*TNeDfERP+oO$kxh4}W/>#QMوHʦPYcFlf͙ov ڛRrO5~ m;lK]6W3YϤ[h}ͧ_ Ȣ5G+t&ZD*K-pn~6֌E6PW9ay sxp0f}v>{W=@]x 2s(jM1=>7k~ַ{튳z|:D o"Tat3@b}W%"+q0tL"bp93~ɐ% 6Mo}p1amENUT‘ׁ<HVc< /'n&&6BjrY〷5-v~:=z5- {kn?z8q CgL;bƠW?p+:$/czϓ"1FU&w }!4;5O6Aqǀ%?՚pؘ@@3|wMnY:KQvt,x2珄p8Wd&5CcyB[1n93@.]NjYHGvU.z @B;vPN9@(a"Y5d`f|A(tkNV0(v*F.;lz$:(![+S5OS]ҌVQ!%UbW:1ibd"8E:\l1veKIUf|,vtC8m;I1lܜdv\ /nhL={Hx|i5 Δ]פq[GWl9H6AÄi3N)!,`ص6e8sq7'ݰ7@$tkbAUfqG85D4vH8^1F#"QsddΡ. *x3&2&ۙ3a}~qpyuۼuUv^sCLVCibjvjM@%ORM=4)ua,BWu`lFQs7#ˍqc BC2e%|^t˭?_i Me/ݭcItNJDd]Ʀԛ"~)[3lmep}=E?NٺХwe)aQ<7i\7-Vm#MG8gtڲcVUz@Ȣ`xe{#;$b3::9XTe!t\7;EcKUjIRzc;S>RAQ]af&2Q9&ٲ*jy NNQ 42ByNcіkAh!=UQYɂDx+U1~ FGmh<AFR1M3[>Pldbä頤*SFto.xK"U ,ZKf5y{v:ϭgZTͮVju!W݀R( &A!vXkU(@dVrpIFQ5|f^wZ,b&+3{V)t"K8 Zd0HJE"ul:Up"cB nQ?) afHB AÎBX&+2pID7^y:&aة;]"2meΰ9ʜdǯݺ_9ؠp7_Ч,hC^LTe2*uĵ0YTPyljxAv៳)?);~mۏk=`_J"2(ͭ#WzӁL] C.IXQ`kЅnGH l'Xp^G @C8H7Nk/es#+y:|V?܋G‹,`e'XGmSɪfK} "ѾB('nz,0d1JA|J, ,ybiLA ^X+ p>2"Eaoc94{*cf(dh  8 i -g=b-_#pAw32EF*JQh;I)$ +x+%|!gh GiS *]PH \.DY}WeF%JaV:>;N_S|60C_0|zjh>8[.%طe-J> yn;\nwM.#w f9" Xɨ8,.[c2sިgfcyVnʀ_d1Vi]/؁J?(t1_XxY` zQ5RFuyInU( $IowG۲4i 7Fv i.pt퍼..c 8 f*(D G b14yHs~1UL4T* @l9la44!̓N V}k}>jl &기65xF`mUydHCy*$Yϲ[I2bX$tAHhJ,Wqz2x1+s'4\8:/8;_w]d/1'\^hM6 DY%ȫ.-5-_gq.: f w%vƽn5cBRlקztlTbӝ( 7 ,mUYJD &x^Fm|[nM(eL"כC,WYo=5I,\{d5U/zDwcC0˷6r'<>bzЃ ]la?paa [tUbU̺krJ*(AR2=[%OݗCoA+7‘>RM~Г <*M+EmEUbBt0Czz]M6!QšI F>B.xbR6Fo d>C<쌩lUEMΏU"y#Bluw>[Y=?J`3| Mw^L10(~rEޡ((HU>dZTuyK6OQuKfkAoSL؊[~?S>gGʃ"="=]L@氨q%)jQ2~@ʢzp-V[6՝/f ]b#M9/J&ָA:;WY<[k([jp!K}]L]ņv;o;i=(# \;6~`WegXE"Хp؝wY,qi]F_uT͐VEJeW%]_F|bm%\_(Zo#Ys1 %Q.A.y_ɇ^}]|~FQ?_^'wTuw endstream endobj 246 0 obj << /Length 3662 /Filter /FlateDecode >> stream x\oܸbOkW' Cޫ@@o*kڍC΃(뤅QJpa夜go^(bWTL_׳|}5$g}Pf^6^m,4<&ߜ̴Vw __?5>-'3l / o{,iZ.ġߔa~09QEeD"fTdLeb vHY~ıᱹJt5^ۢY黳JSbxMd^UP W)pF`s_g:` f, _9nL:WX+~> [8oWa^Ǣ'On ^( un΢[/n6tw Cw)}Rh쇗{hprF_)!((%*pn-%k} פ̍,@t!;.Pv\=*CYGC)mfP[OVQBc~cP!Rgz&3/kdZ 2\okK&]zw髊>Byk~& Xe`1@ϸ9/ ZʠlೝW;R.oQiizzG :6\@1]>s$.5)`\_* 1>N2 uU <WzcxT5VeRKeMzp@[5ޮƥK˪L󜦉BR)Fy.R!]!z썅ofvE+œ߿z l;ؤDi_V\yv-ϑʉ?r(r(J<N&WSM~!³ϳ#mYoߙ%,J(h(:F,Kd.b_!SSn,ٟIF! !Cn*_(Xe %1\{U5+Pue<:TF?pDg \)= HU |4D@7p˘W˼^ F|n~j1`-<$wr(K;の .;3| åD]e14D #Ԁ_ib8E,0 F{L\?/v{ Hblbˆ?g* ; ),yUQEa}YR -pp֓U)^8G`{GX 2<tn 57YTV ~.+M_v[uF(&z̀CoIUKD !-; FX  6MD[= ! +"/hA0g,?\DIn?Qj#FD .rBtbڭopQB5|㉱8?KZ_64kAkdxנ*J~HRrOb&ֱӈG:@Tmۍ&6C#ԪҦ3QIŷ+&-H=wHd5N,t.: fϘe-4|hE4]<-lE횙 (TKLnx;3mȢ#ŀn֫zUff5 ;[.hc**kNΙ wR:"}8WUsL[j&wsXKO۾c 'ݠ&LyNpv4` Hk= Whr6>kj8,&aϣ򑎍d;P 6h"h؊a!X)s&.w--M/05MePeu5,vMw33|9L;Ǧud T)3U 玸eHZeV!eϿqUmCm%5v^Y㽱+x'sNBp%2q;&bI@kBV򉑰lu$y4\k}PȜŞ^F <}Y%e.nUkH8-Q{svȔ1/5F# 'q7^%E^(k9& Hc6Z-"%@5- DrZ?JL=N5*a AM(mvg!?3ZMO1nFoy/S [ǛGju!*;CtB[7} Q2k~Ǭ͘ ) .%)Ԥ19XR2|==Z'ro o β(DiJ&SO!*ͥ) _7IQ~/Dz]F\qQ*oTm+% ٛ7D endstream endobj 262 0 obj << /Length 4653 /Filter /FlateDecode >> stream xڵ\q~ʟ4Mă>MW\$vUHd3h(q Ξf߾~['gmǙ}Ym^jϰ[(%GU?C+?w _亂v>a}B1hd}j٢lt7Di-GP%q7GO{2]95aNOgGd_  Kf}pd43aqi+lGd}\WؼK+ ;w?ӎ d]k8rwn/N_0<$uχv::"y'IlF4E\ca~}jd%o_#KgJ6 }}IJNhʳ4̈́~jzixl˦F4齃e/n]8aû.vxc[ꡛY(Sl^Bɿ*̲n_e ŃazJSSؼM Q;+ qɹ&aL~J]V^O)S0=<uiy)<|9o7QOի!gcw,mczϨv']56cɉ-(F?l#%FYEN#:W'.UT ֺ/\8sE`[, Ë[Ɋq, f^Xc8v̲^('rϐ5NgQ͟L %F"0Z{ozV tP/7ɮ=fX2;1FM [[\1yF+~{`"{~iZ0@sx ׫` eM)ΐ6jE 'xfNlWXe(كn!2Gڂk[f5YRoFDz{ L%5v2IթsgOTqA&K}C$1?+ zנW}S-!lfU"kͲq@pk O`tIZEz_=V-g.9Ɗ)Իɰ #sx ^n7tѫ ¾<V.yس wL.8]ӑoWM^pn+/tqMhE}޶1N SCk,`$<y% }~gIRmaSS[߼^۸BLY;-o5iCZG4IA&i(,;Kh\'*|Zӽ*`a$7,頄EKb 8t}'mB$a2+<` sVEjڮmwjnNN}yΛU8A`AV Pdg=;%_L;/Bs/N-]G."nG@.KNU1/C8d"Ӵd<= {SqN, EfVgxM>n2 /-vyk;<ay,W2Z7o'M1H1vLC}zb=rBZE?1.ðD{w +4 j|D|ه]x 7/ Z:b47 ET t4SƗkPF^wq7r?˺ {A|{Eo9(wG %c]mJLH_30,9Bmko3bء[[y;#G!M5MCdIlHױ+۶ogGA8;OCk|KR`{%}o”bO iKxtiD1N s+My28l@!oE`8W"H%syx)LЁqlG}0ŵE֌qv̱:㺡$:&{ %N^{[' n2|gVERt-0R Q T٦h3V~t$L3DJ,HߵROxNu2.% ϠF=Y/Xw&#WŌkTJUmUEM ,<\RrZW N$ЍJ H1V zI({/Δhp}0QMIc: Yj댡r~O +\7J)CJtUdOW.DFQ*,Jp)6cm4pe%wzrm2(P@J1w>E]~l+SM0o2-6HnF>)+}-w`XkO!N7|M^ST{GAO>|0t=_f(0w_m>mP@&{q7.%M Ѣ'F [;BںZJir8'}yQ\6"FhUË.]*:m,QBQT RsMQdSM29wxfq\%oFy=?%)an* 3:S t&;P!7TDtEU8V)Ft&NJԜv {@js]UĺTTKqFP!Wř_V^z?l_628Yo!ŧ>"t#j,SqixiЭ+teyDGW7Z QfNt}$L'1Ҹ'E (cW_a=|Hi2׾Դ<§j*`nyd&|q:ŜD r#y|L0a`D}nh DpcɷD_OhMJ&sZLX'~h2nj%ʨBQkV6F lS/ ٬zncfy}uQϔשYH4Q,hDxe aX[ 1f*'!2{MneϧmRJTr^HTÞW7Eܰ`W~8C,'Tj](q[>YDZ(ě{ҷ¦+`[sK 5n" 9DUD{Ut Z%oFh>"$<] ,aj Tr!=?ZM oPJFlku6;{&3uZz*>6&!˅H"Cxf V“{ۜN;T_\kA Ǧ]+Pt(g<+Z9lj_ dSɓT*cq3rIŝ= 'v>q pZQ8QPW=_y[pĉ8 h"-J&Ef(3ُ0@A_(lBߒDtZ-79yzJYȾê.0r[QnfuM?\!dR%|k"MX )P[Y΁L.u<ˎ Ƃh`FWl Ccco㳦>.iF "ug'PY)gN-Yꃀ]0[X>,覩üK@ g˖+@ܴ2 %$*q] 7"44<vs(7ڻ3~5ldz}rybnGOcEO(f.TkBe`lUY$ 1\RG8 2Ot9MR>oOCsŎ2+vԩ>j:QɏzS endstream endobj 281 0 obj << /Length 3479 /Filter /FlateDecode >> stream xڽZݏ_!w#nh`ЇxO'XQs;!Q%- ofY6ۋo^|*/gyYfM>:mm9[~Nի6" xp'?GӘpNaZ2yQta uZ/n֚n ۞{{^4E2_W;oKd盛eڶm6UŲPm[,Nn}[\(P&8봸1M nǓ_ %pgeo<vRGnLS&{ (,U@.?(zn,f'9{=z`s%/x&paB·^v2nƐ#sRE~p?'R,cK{T>6l/y|O |d(|΃;F]݄FP^̊Tv tDQC#Nͥ>x [ЮcS(=:k!]cdn*<&l~PӉ,_éG&UE9"-31/Q:+/Sea4|Il6PYgDQ)fPN}YPp<> VuL"Կe&c`aHgL]E]LISYU̲4/49,*i++5#Lc_0Lhn"m++O:<- mr)-(.q'Q Ϥֺ2 4]CDT9FxՀQ`i\E0K-9X+{i:)Iy:gGL>,ZO#5BIFD=OTIAeJ9/@M,ЈLIq#w8j޺Mk0Ć%uob3Z'RYU@^⑼w'W4^{3m< tM]$:|g3Ep>rB͸ >Ƨ= {ʑkdq)ȶ()xO'q'؏40M[$?:Gu?6P,dYIFIaMp# V6.[7;4؞c\GN{DG?SL'ETEjsW]ySiQJ$uPW)!*uDLs(/0on 6|jR"Ӧeu\&Tiٸau3 YMԲ&5~g޾~9GkNK_>N$LZԎ0g.[@#465gYv濐jTvdB p%ЖL 0乫6x;=o)/fWQБ$5WͤH.q̔WsrEE^\yˡŶ߁>Uϩ(S!R, [~|2̭>"(^5Aqa|shy)N=zB]|TC"ūa1D>M%1auG^ܿfhnpnj6N/8Ri0{UQMkš}b`^g d ]苴FSxဃ^ah:g4M@o٫' M \>݈^ZNS_K+Ce|.}P;&xWeiV6gPKNi8͵?@*7cSR<asǕ}4@b{5tvP _ܣ5 wAT)#5{ƕrA$#~.X^%@]3afe @z)Yx!_;P<_+Mħ%Ҁ <+&'l:/B:\8*/2JL'iŒNI8V-`ifa`m9Gǂ'1-W&4k :WMӷJac+E*/~%G%Z*bypG_M/;@.o2(pijxw JoKF50cklZeρC|[eH5Q-t5;DlԞD5CҠ[ЌWOɇ;D${00S+oע IxnbBo_{'&^5M{S`h~s38/06,|'3тU%:G_ԧ/7˳xݽ endstream endobj 318 0 obj << /Length 4314 /Filter /FlateDecode >> stream x[Iȱϯ`Ď'¨۳}h*(FE@nZ\z[_jX c9B&JdUeeV!M_z5hdF_tw?-fgx wbHdwxӕ=T-d bu82+rsan[SLn=.4i/¯y5UMk{D-hEv 7Ȍ;! kk NwoIw%kiz|7COvN?ǟL}$SMlj;7[n댭41wO5iʖ(Hɋ2c\:>8CLDLd`V33zǚIH|^=ra Z@H] Iw 4W"g- o-32 V/\Dnb\F91bAwrh:FI7P]aFÖD}w{vy=Dް<:e:A\ ^ƊNx=ᎦD+M_?&gg>Yp 2ކAan ~ܵ['<=$*Rc49)isU9G̛bS8Dxs-ޏR 1D$ |ӦYlIYĎIQj Bt!$ϝC!07Nw-6 `.=.]VWwTfJ *co Hg&P|.?DMyU`hq*on|qXgu")*m2 Nr J2`#*ln_&L*5@ʦp=%/ {~1Jh~(%bCe>M/rA&<%W<ⶎ=6s[y&+:47ᅇ>vѫ4.ҙrK^|G/u:Rsau8q>YV#krjJDfHuUe[;SNlhӞvX p<*R 7]HCOH^աN|ѥ t 7HE LLIGTB%"ZGwF̡!ܘ( 24Ǧ m 4MVY ND>l^{B2W/qN$|, q'd=R &LUX`Na 8akM]$jiײ5Ѻ:\#n}v6/t\Lǣ~6Z̠[MAOQ3|TF݂2G?Di⥩tKƁ/i H12e=$:ŧȬhV,?$y!t]dUJ+n)ڿMh1{zȢW% xoR˪pPR۽GDhj8)`DߴЪVޢ%ʻ)(m&Ĥ+ #a2ɪJP,1f::FdR]YUv1 aȋ{0/Ξ;'[W`= ԫ;wh\ۙ3_^3 7{=̌;T ieX‡`NJg'8EK}9wi-q>"Sy|3XQVutrH gP8"T  :W!ܸ֝ ħOL Ιi׺#? JY|ĂҖɬ:0rF2Tusf cUm;|}&.f韃O(-IVrwdU +/9@9'O*IFCZ9_*; |5XOKa?x@+N$#Om<*q6ocKߍlg5 \PaΆ]c:Qe ee9(kmՎtoMzPSI~BKO|EcʝXnN8 Ȟ`1 pS?lܺOYŏ~dGI"S>o-qc52k'?hzpPk/dup|bأ / #͔CD Tx\W- 0.Ηp9_UNV"SU>R]iޢR4SELQ> stream xZn9+x= EV$ Ld 9 G1&celfjٖݮf"#3b&GZЪf+ѯ % e 9)dbqۄc.jD⭚!ZG ƪ^$EGv0e|.pTCp;)c0X'8"Bq\ )'U eBހ U$HEhCdT`".Kp,fm5pzurwⷋo[;"m5 Cok\|?2KEiVk1bF7D up$G_ס1|'FQ!r@P֖SORko>MW8$8YF]0&Pӑqͦ b.ޖ4aU*KJ'[bQDȽXL('$XזȊA{I[\C7m(2vlXIPeJFm+@j<9 Z6O2TwI 5% g$zlPHۮڧ Jʌ0R*͌nFMZ^\sbHWs6aSw{˭r-z+azJoJ2۷G)*`-y+ٟZM@5 dzf6nZ9Ya; MIҕX, 6#3vI!>d2;l9ǩq>ǑYO[zuxEt'kjIZ74!M}HS EJkI-ۦmS˶NeN$+ql 9sP,G̲18ERho$SY;;L NMiVPʖsoޝ P$j@ 9@6gzHBQmLPy9Nd,  (>"y;:(v86ILbj-b-IeCYPv#pێ_LGL#ǀIp<f{1AU_+Xe";aȜ,;lGyz>n{<,օ:O@ޕxHQɍFLr#&# ST$_B[i@۪0eQ-}nS@@'(Aq |2G 5e RjH@>  KOؠ0aܝr@ZQbiR!HYv\UΎFKiRS?oڱ~~1oH=HoT;FbToxqD=ZrsQ[HbMoqSsV`j+0ͥV;}KJkSkL fJ~fZ6P$&MbHA: endstream endobj 343 0 obj << /Length 4630 /Filter /FlateDecode >> stream x;rHv UA7n{Gfn&ldڎ^@c@WUuY k;g&Wr2QyK mv P[ƗkL(Ǟ.Zp aMN=.֭$Q(=eW2$KbHX5 nL|ABD0J$Wz jtze2EhjCZ6+.R5*pxYWk?{F앬-3ma¦DQ$i2,*tzO05Íe&nfQXumBy[/HܩVHiZYa}+D /!൨faUGeGO O':gp3t֖6 D[U N,GGH2ql W, {/ 'x@Pe3/L,<)sxL7LӶn0h= n*2= e,Ri05ռ  IͣJV2PaݟJ$K߄zػr^Fwt 8(0^vv_ 9 ]*x͚y>#/\"6GӔM6|bM;n!24м.}RVM}V'p58+۬(ȨL9/p9Z]tփn."iX{ހmfD_U7{ *ܚ;*s>ߒx`l !8<OPUu%4U#б ]P~L:7ϰɀ`t!>"2@ *z VVaHōn~np5ԣU)H  !ݓ7_`c1FS /6D೙}Rs\GәB~x?V_y&HpEg9«~@gKfSV^ ^O|PQ#nJ@PW g9#z;t^mZ)e [gtYS KNahgauQ܈㏌ h@,Y=' 33'@H׫Zp v "KB8'CpZ]*g42g 0 Juvu/wR2#@ͻ} ;R[[Z ܩW~-6#?;Xzq%o:qvlF 45߳< F "HVFٰd۷ri\Pic5YGZ;_$kЪ wdú63e)M2N@NY'nu^,M:ɄX^fثh䄁84 ?cCvc*|fLΩ+J5?V*Ӷyi6+& ,28ܨrx/hXwfGT0!3k"d&T5ͧ@UohQ> ݹyhY+{iאܶeVi,}d0Z{?c%{穗66(~&V垩?HQMo];x`*O[L2Zo<,mǯy"6dwޮ$ ~zpt޼l]joZX:0H%_'F NA ڜ:j 8si ~:rQKN5ToAPY5_$~$G8\le¶Q&ja kF)j9p>x ԑIycs~lK[W8GaI .J 'm~=-{u$_Ⱥsu\Gq9L".yV>i ϶uR0}KeT=Jb0Qe/r2(g n63tOJѨ8 0h1vgUS'[5 jSPKIAJ;hh}GIZ031nܷ4%< Y)*[Dx)CC4ԓ6>03XMH\3c qQes؋ufLsmsEUJe/ĆVx[svT='"d%AWŐ$iJN r WH<>/j =A.ENd|A)nRS3J(i1 M]'^z<Ы H2ȚD_nׄ4Y(bbؘW'Y3ehb(zn\ V/U8QΙffV'{T!t ϤV*89 $f߸o.GP %W/Nj+oS6Q9$1vk !)+f~:b"zQmRk B .r`iYz GA{p\'YyoN]fD|D8>9 q+⢂2 |J T!W+)onm/>$hcFqUٶ Ս Rɣ JzQȖ+HT8sB-?Wp@Wp-Mr itJ\X(l3fT .!o(zR 1.xI!Ю&!#FQ`QvGè[im5?vRksb ީ|RxQ8Y*hR#O+8k1Ƣn*H kiRsV_f1OҲ,B J>D0ɽy|̊\'|FS"UuYqptSo6 vQk[Biղ.>S5'j4-GC`} mC$RJ[0NGӓv Fу:G7ߴG$ S-⊑rg#8'@]#6U0Z+Gz=%,ìIlUy̪y{ybY\)gڋDiۗD `_¯L@CR]*E~D~D1MJ/nA %'Z4O7*E>JI})^j;@)s\F1OQP<}j9njmG\'6\2G[ 6E[~ nB)%yT T,Gz57a!<./{F2y_GCr endstream endobj 361 0 obj << /Length 2897 /Filter /FlateDecode >> stream xَ_!Xt6Qo>GG+ߵc-nK4|Ou9OFqB2v߇AtLj/9Q{`>z 7ԛs0e(ȽW|0?_= qE!3L#L!}`oAݱHؔ5IaYEQCGmmΙ\3DW/7c>jpMXL[PqVC"|osYZ*Ӡ2yG_?_k}Z+@uT6Y6:maنb|5k է;Spdjst4|>Z;*:a:XP f?T~*#-Pr-ϣg4ӗwD7TPb)ɄviY6#, $SxT1vAHO((` # (>`aYrzD.i!*ϺM;L0DG ˢ!SY&ma}$* 4/{xNq4zCamb#W/'l l4W{J0d! ղum`y؂v*r, ~ j;Y jQɴ}q3 $:#-lv\2S/J{lYG![ 7 UmMTX/in`O!Bض*Q֤{JTE6$ɇƓdsW¼[?89cMϳLԻțsqwV2!paHHNl(Ӄ eJTgC1x/CvA`Bo- $値0t9Hp&c!c ڌ3I-&z'|s5Nm}+ÔJwi]>= <80Zh'>彴Px8[瘖ehL-:|Xi-)(Ǔ}ڣzG(Rt)(؀\(iA\v׋g\ endstream endobj 380 0 obj << /Length 3488 /Filter /FlateDecode >> stream xێ} VD&b`w>$}X_&=WeA`1k^ϝfY6|3M֘Ӭ6&/f/wz۷XE Hpg#u6^l]񗻹syrZlmw<:;y8 v'o盛ҥgHdrֈN ͞-|%?#1j{R&PJrU:@nlG̟%aYt`)'-˺bvD~!^9ٵ{5Q5ӖT<1(LOA""Ri3VL]WQ sDč,yjt%2FՏc&Hc]bl&q{YZJvrV W^e#~cj\0vp4b`ŗ3䔮JFOՠqQT  t_3KMp VWɹ@K\ k5w6٪+Q1bK7=|o);HKa3bi``oÆQS ̑햔:Yry`<QЇ@}qm8OaMd4gD_q+fFۈ{˓40<5B '{Om wL!# $o9v8Ě2w &nEq4Ԯd5s۷3++_Otބ&FZ-}Ʌd{ؖ3sJJ6&x$,Q2lᳫR@$֪h\9˳˕ _oiBIgLt۞$%Spǵvrr"aNϥ.g4eR4HJdHnuQޅDMZR)|%E x;bH` iY sߨcAL'A+'N!9,ypξq;؃>0u ~n<-Xow.ȡJ䭣FQM?mP#/uqe"'f"nzA'eI*AaL!51i!%'jq-OE5v(E\_`M\JAyyӮ!s.roF5˹q W꓏\O|ڷ8 s,ADeE-繽(  aǧQ(?egYf3R-8|`D:LH鴕'?[is|qbpXV8+ VR[eN\6G9ҝt\B~$/0~j>򠲉Bs\ZM%ozSn+^ v Fk>@E0M>0u[.Kgg_3Dqઋ7U//,4 ]aj@S''IύyS30( .68-ti36X9T5_mfקn^s;28 peŦWK~&CK[9 %kHLyPy_G~ȿ7.(g8žwҢpmK3.ÑLJSyTd4.s)ʽ% }r_ Wvɏ14Vy(HGx,l/Y| H:.s oMSU-lBku]2-"2hĚ>uXWHVj?#gZm@qF4Y Є0Oc?lFq.OEދ}~}ŅWl*@TP,H!&|2hr4u;O7çﻩ4,Ý{v endstream endobj 391 0 obj << /Length 2337 /Filter /FlateDecode >> stream xko#Ţ@`^i.hmtWܳwfHCU(;X!93Ëߞ{' ^ׅ)aT+S\\&lZn2UJ5~m?no f"]e"rz޼f: B!o.4x! ! VߟtƌUDICi^x{Y9+OCӴ洷(j֊>} Mx;`i`*&Guܽi@T%E#00J[67akd3=ĵf8*9UuWH Wtz6e۹rC  8gHc9yaN2,?4]咞~\| ƻs]ᘷVYSɴ\匀a @uypyD6!GB>oO8^"e)S!$6pf*3apf'`2#~jp6b\yn!阮xZUUKŽ&R˪CL f$vXeUуțS0UI#Ge} ~g0^iaQ?t<%!Hfs4(P&Ǐ&zAry Snp [Xse9/)>A J TD_ݷmWʍA\v[pҌF_Al؀BH@.J&m[O+f+XvW8ӽNC:#xVp!P-N\٩H-d8KcƯ= B5 2ZNchf%Gs) hTG(Q okce4LP1sXǴktVCsC< 8窡\gXWcU4|ȊaڌT~_4|b{Ey\)ae+1#pYL/_/=8݈2k{aJa }˗ r Puz P9/@}-cuT$ ({HC&WWu{:}aBq 9U)tPx3JTJaJ'М>$ W&LJ'U}LRY$'^Y7"x:I4#[x9Z'Ў'D㓉\ײ8`_ݥwl*Y@t({ױ͑s CM a>4vPZiR \|=m49-Cuw1[|%D)ybl9 Sr̷sso$bYGX{fRtjE6\tоn|Nп YٷݮCeJL)Roϊ-iM[|/C6T!)g 3ЦByCQKK E3ꡫh~B"ruJS5KnT2x1i:@0X$KZYog[u"3YٚJֹJE]NGn_鳐bȰ u,hC >RO\{=k-[}Fk"rTҡGa9X j$S{Jbp /OՕomW3zcMz}7TOc@oܮR3t-V[['b`!uq!z'jI:|vLwx<·x>/ProcSet [ /PDF ] >> /Length 33 /Filter /FlateDecode >> stream x+2T0Bkh˥kT endstream endobj 396 0 obj << /Type /XObject /Subtype /Form /FormType 1 /PTEX.FileName (/tmp/RtmphKmlkG/Rbuild11976da09efb/forecast/vignettes/JSS2008_files/figure-latex/etsexamples-1.pdf) /PTEX.PageNumber 1 /PTEX.InfoDict 397 0 R /BBox [ 0 0 648 504] /Resources << /ProcSet [/PDF/Text] /Font << /F2 398 0 R /F3 399 0 R >> /ExtGState << >> /ColorSpace << /sRGB 400 0 R >> >> /Length 7036 /Filter /FlateDecode >> stream x\ɲ7rݿ%uaVrx$GlE<$p({rBA-lo/[ΗK[,-pv?rݓwf7o܄+ۿޔ|xa{uK52 t6rWN%G#~ %eAK) eF˞@KY/ulhdαAKB/.#qXFK*2s9['KO\UÇZd/3pF%̗ a;չq*Y1^uq &"xeKö@fʻjT|M0W~AX&:;hJ'm{kFK$cj%`;W00c5\kwA; vt9 ]zh0v8l :m8>z);Y;Dw23ǥ'sC `k2j W2qS6t4nOW2M&J2@lt$1F574eh6K ? r}e/hS,:acq}RoPH22ק~)QրgSEM/̲?rZhh>M;1C!pA7Zdh&菙KcLozEn'4G]7Fܧ6yefm8m3E=i0EKf;Ga]t"X/܏1>mA3ڧ"q~/jaɖ= AQ4w4wSM;DwЗI(&֛_ox3=p=t gc &m\>ٷ jc?FBTkAίh|7hCKopvUh~Dzh0?W43ю,;fv*#Ms v:jOgI=1Ζ=lQL΃ä(oO&uzp`l\]xf,yd;:"{3-~'ͱ$&;\͑6u}$^a2,,#Y}/f54 \PuFl3= 0tϋ\`d@F1- ԺP:܀@ŀz|4 >,ԧuh"8@Ʋ0 h@@kBpU Jp$@D3C:p7@`q @CaoH$҅ Dz`AoL ¥<͇3 _1@ILW֢;A1s4\hha%;. rn EfLa8[0eu|9}Cw;xѽ_ .%]f]p>Ԍ fJcwg:yVA夏 O<.0ᥧC_¼,r_\|?v2~Bg,"pg$6W{~3W;vp֯N&g0օ6 W>9;͞?MZ@ENMΦt%I@Es6Fgй;P%Q W'DŽ"   00Q\6Ue2R!v [UUP8 ѣG@e,^)lJ3~e?f (vj~ Ң"*oXDlZTUJdѱ_ywV(6yw,`efux[BE5((dLQPJ`27,"=4g]n/(k]LiPdS9S)̡}*lBy NlDQ-230s"?mlT21!>A'maCc0B( `♌OiʊQ +f"SAPǥ\uX*- ϳ;XVz&BXSb]F fu(a =åiUd*vJE54QG* +LSmy#.N9`"lG@en^ĺp -OTYnT%r"5Ty H^1qH ʇZY>0zy-JC#솭7^5'gy gحHxGˢ·vkKPe6ރc D& )H@9ѹ*Dw8n"zh@#u  Ua'$Uί@ 3`볽"QEZgy 43MWvRৗ{uI𕿺,IUHI~wP%>̫ͅ%uSQ>3m6Ol>ڝ<囎v'vcuNr .>pl`3FK! =O4:ꬃuم, RR3UQ#l=[>Zɥ=1c:E.7,qriǫɥ]9ϽݳXŸ'=E)N㡃ЩJ&?|pɵ0gMIF39;?(QJÊbc.]:QGY YT'3hT~0-!eDQ:t S=,VGpחʿܶ-q^kq24v/H 3v|)YZ˾RbN,.0Jӽra>=_辬&g_ v-h.DY+sx1 e[c&jp,02*$fH%`g&|`29pC#(K|EFZQ1myhf@%iSTnYRl>0꾙o;sGA}}ݹe4-" ZV|/xCɣN>y>Hr3f&dL҄<di *-QSW^+~>Se&Y,)|4qy7tBPh{`8eD>D#Hr9 " +8xCY>ƍ?k>nQ$P>kj^$Έx3i~HZj?=CP%v'#DsDw9dfcUlf xg;a@fzCWGE3ɾh̙wLw5s8M(DĐ=Cd]&lpi=VTuWpD( ",_6jNR171OQaև^0hQ1c/ʅksow?p)l&iks2}6'-}3w3+)]\+_&Zx .)#,B֫:RYd[J ӋQb69:5\YkdiDn A- 8;6o5&8FPޛs1|T#v9ahvT|YVvtzQDQ58H>kv:ng˼2˝x& p#Jr ?pzЈB3ٝNPj@Ud\8V*kj;j5= ^|UO4\\; П=  UN pHD}{ega@ݦ*| +Y5ov\-|iCŶpZ֯i]%8,E(%*܋ohN<uy1p;/83|Wvd0eF} a]w\_f)'3!xy*oSWec*OcY~3^/o]` _f5.r.п3ߠvZ3$TgWxy|dgȩؑ;冥]ynkEzotHHߢˬf_/"."5ʜnyQPYw<܉į_mBTy_%>'t(ĥBKtIY.ӇS'h`qV?6Vs N98FYmsNy||N넷1Еx p>t'WQLEoXm_bs(~bHxGyلVÝLi1? )Y;t]U_Y|xn>l>{݋o޿\^x:_{}e] p懗݉»p;;~YE潀qNXK_KS8˒Q>a (* >W,KD8Ձ _QL矪JDsςTu ς뺳jTc@6RTRV0û*>kFMugU-8p/j?OA#><2)*S񚍩oe(v}LN}@>㹮Uf#ը2..hR 8,y;'2Ǟ&'r,N |氌Y윎\rJbVQ%I))TBB+M zNMNMMwXƘG9m y_!eLcGQs?셕<,fu,nOy4=YΞxO}sZa ҧ癖xaqf[Fw\寫&x> (GK悏7}pSba7 ٫@,ף%h_ ǏE!rR{5+(jE!sn8WJMHT@4<:7d!?D`T endstream endobj 402 0 obj << /Alternate /DeviceRGB /N 3 /Length 2596 /Filter /FlateDecode >> stream xwTSϽ7PkhRH H.*1 J"6DTpDQ2(C"QDqpId߼y͛~kg}ֺLX Xňg` lpBF|،l *?Y"1P\8=W%Oɘ4M0J"Y2Vs,[|e92<se'9`2&ctI@o|N6(.sSdl-c(2-yH_/XZ.$&\SM07#1ؙYrfYym";8980m-m(]v^DW~ emi]P`/u}q|^R,g+\Kk)/C_|Rax8t1C^7nfzDp 柇u$/ED˦L L[B@ٹЖX!@~(* {d+} G͋љς}WL$cGD2QZ4 E@@A(q`1D `'u46ptc48.`R0) @Rt CXCP%CBH@Rf[(t CQhz#0 Zl`O828.p|O×X ?:0FBx$ !i@ڐH[EE1PL ⢖V6QP>U(j MFkt,:.FW8c1L&ӎ9ƌaX: rbl1 {{{;}#tp8_\8"Ey.,X%%Gщ1-9ҀKl.oo/O$&'=JvMޞxǥ{=Vs\x ‰N柜>ucKz=s/ol|ϝ?y ^d]ps~:;/;]7|WpQoH!ɻVsnYs}ҽ~4] =>=:`;cܱ'?e~!ańD#G&}'/?^xI֓?+\wx20;5\ӯ_etWf^Qs-mw3+?~O~ endstream endobj 408 0 obj << /Length 1850 /Filter /FlateDecode >> stream xڭXYoF~Dц{I'=Rޒ>вl)DwvfvKSMq9;~|v3go^-ϞK;ZΖ׳JJ[Zj)fuB%V۹:֫T(aj|a^ڮ Unt1v'yes,ӽuㄟlؾ,L5[h+ u>`. A;_2rKRVtBB lsiv;oS{_ #Ț[X85B5NU!*^)la|*g2GRȪ7Bs8KasXI(2"Q(/ yN2se6CkJ8`s!}[݅Osf{!!$eҊ!ל5N5Քš 3ߠ>_9C!uQ-K]P4vMlUIgf`ŔI+‚礜A抺p~_@I1uMxtK\~m2{?PhkߘGt@"+JYc9Iv{v/[b-5a΀.CA1Vn$c5)jԁ49Pfurb6``!sSy($ ^8>!&ȭB+-61)a2ajڰx;V˓l AeC5y`+' IN vy ui%tWa R4Q D d'Q 6] $fĥ⻞FR;nM:Æ ˦w[DzLs<&gۀ\'s5l 9үvqoAWspyP#.@sܗt72>7v.߆H& 폏F}*LXсWϿ}#ae3=EbbgqE-#˶QuGU#|WX=HbyW5Y $Q`u{ #a*էV,H<'rdQ6B%Gd?|"˷wS g @N%b)Eir\a4~9`b5=S>ݒ.:ŽWB?wbja`?!KIڱY#il7͂j@ 6kg8:{j;"o/8*>4 4&,DP?LXIKe37 )ť6%@kP&|).~6uhd3t磻mHq@&g\vM E_ J \BzQ-$dv lO!t&OmE.a}Fo0F=sט)-[˱1T]iz\đ _s4T.ԏ3.;,£W#oعu2jzuEb㱜{ˑQڽ ioX9좒3UNMKvh8>'>UN}((T4Se =VLF9 4a^# :pX)I`9?k-*]qa-\> fЬi0wrDMu=̹XɊؚ_S3a=l$U@HbH0ꈃRGIdGؼ> stream xڵZ[~#X4à- )lF+Q%qCl~}mn\jmZQ䙙s?9t^d}K/T6YE7iE)rTI?(w{wx3ڋ%E~{MeI;nO/^*ieJ`NVeMRR-VNMU6SUU'tO%)|O\鼀/~dJH|4.<΅/qh!)U=]AVsTCL {lRɽҳRǧBZ6-@-H.ĪE]AvY5@UF6ұ@2mEIC[]LlFVZ#B@l/P,붊 8[#vqhz Ri ~(S' ©lX:"ՆfΑ~Nt3)UM5~a3")xO:RN?I-BB ӎt &L1\8[yB**;ow/t]Dc,I I2n7~k~,.Ȥs@Q)iN5$!Z9w- ޳z>!Z><¹=gCI-eFs?  !;rH{#v$[nGYޮN=8d("/lD'&0EcdoaD+nsت̺dѝ]JֹHYhp.#2źL^${<,OaZ-rof# M(^I{MO};p2zX$Fo#?K}.RRݙ4מ+naax2"S+8[?1,_E]T~ `FPj :[|pry #xm;d@W .fb[8.lTs ɛ*Χ_Nno{& pVxתKdNc |vdr"<> stream x[ko:_on)߼(.M[Ino>(jH~=#ˎ&KHQ$u8Μ!iPB'dR-tpHZ\BGN^DA$oFA*qAS (tCF ld,!OBkBJ( wCh 7 &d,2!"c5C/c<scB¨{G1Yr?\0U@K)sDgLJH*T30H;ˠl n,ZPG9WR8=AVh-fk.14:XsQmH3( 9 ' $B` e1HxJ;Qc&Th&Ű#&UG4"2x2L,1)V&@C=`KHYWP1OzǨ)%Jf9AX ,rPH ]*ntCF`nz9-BDqWzϟËBd;UUO{xOKe0`ۆa<3DNkLLerq]KԹ PD!&0Fhk=a)c""*pFGVjh#M\pHմ.py]`h{i^gQM' 7繚Գ4YShf h8Y1RK~LqKiQijSjSݦMmL[nr(mڴicvw p !`lP;VcF*|##vHyi ]B4BPLE4b4F(]+;VOz MK] | Z7qv)Ҽ ia"`7xSTE "`p \cz$^7Ih!8 wcWfK ҶaQfwN/&|߆iysU7/0mZ#&IuC0)RwYI`0㼭ӒPmMr1B<m ’K[X`wE[QjUdpMOKBMa7jUzFF/Ã{O{CWխ&k5aUXj {ջEc!z>]2-{Dˬ˵Pl*E |/`)AYhжm+WU|c@KĄvvT> Ճpm j.ϕy;9D:~9zV&hY֕]BRCo80l:QzuG $r @21I:dZi\Tó2l1Fo Ojdb5L /& }%t6.d(-tY>F(X:X5B;i:,d lik(T`Z{7X+Uc7|a> ٤&O[a5Iѐ!& 7-GvxVk5ƊOna仪GV݌<ܫ]vڞk_'+/JIg:%lPœaIȸȧ5ޠx]B<HNFoTpCH)M0W.f8*r.Y'0%3-!_yS<~v#%! cwt75FL|6}*T` s*Ίb-z?3~a8I(o}/ eA=,JJu.͕]>{QW%{%75.=Z֢że_eSM䤀Zege5`)fi1*Na N&ev:οY>MlPQ#aq]eu$ ٰ'&$Paql2¯4FlէuU|979Q78㗄QG?twllj`R9w'cꕻnHd ͗6~x#z{[Vş&K|ݴu|/N-馘Auy%AնQ7exn<5lM殬DrبQiQiѺmm{nۣuc4iRy$_S_CqQ;>lX9~ɡ)?Dtp$SpMiOy&u:4!\q0|}j{x~@'Y߭XF endstream endobj 436 0 obj << /Length 2014 /Filter /FlateDecode >> stream xZ[o6~ϯKYIۀ [m؀SŖlqҢ۟EM'Nt8s?bQo&'O3U0J*Zb2/,+2̊W%ӣ'?>y^ D( LhގXO3k,﯑*x:K)ʳ%t4KxY6E/cl֞ʲ]񰧛m~B Z0^ZUԆPf Q+D#NuaKQ|۞ ?qjy{LϝN-ZT`* n2P;YŊqŚ371eGa~̝Kjz,}znN<~htΖ&DۑRe}e{>v3XB-qyĀHغe}`?ۙ[z7!hpЅH,q Ő99&uB k4aT~2:l= xMu> 吏|~m4nfviH%=mq06 Pʉۡy63Y^zǝ;X5 8uF!W~KT b`o.K|r$CN6. jxuR}:J'G➻]7nBvO1GҢAFJMt2rYSNI˵G3.e[XNG s6m6~ *4MMcO#$[4чuLf&a aO(JKM9dstIRaL!a UHi" I1K@321bxe3f^݄it=K$/[bʯs="S ɗzb,wIY&2{"e5z|;$HO };0U:&=Huz[&jc̯)N,}]w#xFzݯi)맃] ~ߕDz]6<PbM B*SNC@T0v*P1"bNK>GMIk4,WaP '¡Hy  8CBQ6!WNP[fW; e BE$ӀɞC rWrG!>9s*d4?C`is9R_ }5kF#dĆ_>Bg.7w\=0FKQeVD*u|P@͔O>x8RRfS^q+/}o/YSԍ-`*1 BieƉLJSVYI AURv-wqMF)Z'Ъ߂n+7ի,\Mؓ7_u8ev{< @ዲ>(?xG%Q#QDNAMe1g1KGgޭ0i.$Q#L7۵mOYi~H޹1,{Py7(:c|.ՇV]YTЬ Tr 0mғO9NR|7>/ProcSet [ /PDF ] >> /Length 33 /Filter /FlateDecode >> stream x+2T0Bkh˥kT endstream endobj 440 0 obj << /Type /XObject /Subtype /Form /FormType 1 /PTEX.FileName (/tmp/RtmphKmlkG/Rbuild11976da09efb/forecast/vignettes/JSS2008_files/figure-latex/arimaexamples-1.pdf) /PTEX.PageNumber 1 /PTEX.InfoDict 441 0 R /BBox [ 0 0 648 504] /Resources << /ProcSet [/PDF/Text] /Font << /F2 442 0 R /F3 443 0 R >> /ExtGState << >> /ColorSpace << /sRGB 444 0 R >> >> /Length 7076 /Filter /FlateDecode >> stream x\KϯxPo ׎FxcƆM,zI,{F$=CQ* U@UV_oa式oh[(av?on877Vw۱}{_ M{>|H۫Q (d3 |y(<$dCOs fpׁ#81{z2|Tןyt@{u{2Ŵzy)&Ur3 =]g rpwoxEY?(.q08|;i_&g1ɇN0}&[?#RsQd mEp<!|ho.̇v,@{:ԿopD?u ?1F\o!'Lr" yvpY;YK_AQLW)=! cL8l c'?>Ē:vwp}`vJ<"b;C.><0hqKkt m7+A!pl[@UN VKe vvX/ln]L]] Zchgs r)SCCcg%~p?53Iȡ,:קZn3҂* d)?v5n̽#<*6\AtvW3bI3&I[P3F)L0|ZpE1ݖrV-=|ڂd\%s<˨+lFP1EΖJrv.3+douB/?#:$|_P.s>9PLUpR^N}WN3>~ WBv'r("1@fR89Q`-+{CUi891iϖٳ|p^=xFgԢlpz&| D˗ ==Ò`E3̻ǸxF9<:wT.aaA X313Wŋg}gel=,-UА隯{Y*H3bgDggwz3ZxF(!O3!7,8Է<,OFyq r9)ՋgX=X%{ӾdH3+3,#,#@2 t NY^/@,:e9)ˁNYdwS@,:e9В݁NYtrS@,Z;Д!9qxdHuYTcZЅ ?٪[ q1*$g@{h2-[&#I3nV\нRA!C")lr!a\TMփ؜b0A5 `e) M P kT7PyK1 J.(R6d0s[A +-d"eR:2K %{LevAt*|ǢMz'<6ac}X=Qh6*n]-R&Sh>5&/NMX\͸ri6+N\ա<,i>KYҬ s>Y/swbi6L&K]䕥&8'Ezk\0<М Jg%>k]xR Bڮs)?'+D&$uY*Qqԣ|6̠[ J?.L%`IJ)!y;`PqIn([tt sE *628$}l?ՠŢTNʤtmY|e0/˚ٖ=?ӭ r,{2p$ 1* OJl.&V[omwkCY*蓖#ZU uUF%>FW0$6E'w;`&2 a2 '!1"Smjo)!7B֘L@B# YhA(:=rdxi{)@#x),Asͩ^`Q5Ȑ2BZweı\ leٻwPv{_nf'W/o|珂qeFIQ#%y?{֛޾b@nn16,H6t,<ų]Gjwlٴ]\{kv.鞳?*S\Br$/?`s^bџ!2o ~rX$]J*H:[qEl!Ig+q\5~\ 4N2v!݁]YHI z;xv8$G;prw1'Ǐ)FT|OF=qv~TqzOcu =pQw'%;_gQt*wF,[v.$WcZ(# /|AYWQ^;^kϳzLGx~j&6'*LZf?h% Jf\j.dv^~droKfG?,O9:'}Au/Ɨ|QHe*~R n#JiP4b|($#VnYe3l\L67rD"V~u"Z5'yk ;r2dj0K#Pd}4- ^8I8^bgږUDնt m/f4iU֕ޢj!<֞\g Z98k{izRPSiC9DOf_ A~?h(0YҝѮ)kѪXN<`恼m hCfv>`ËUp oQ1C'jpQWJ]g @eńw;T)E$nč`rE4 "P˔l +IǬw8w'+ _TA0IH'~Gg,XG!~`,ovw@0|h/ԙLk`*h&M Q;7_Rw79ԚSKy4ْh#-0  ґBH[N6JPyK\2I|;sPj31pH$2؉P6Gr$f(JIJx~| *,"Ke(X6D C?Ug.+ߑF_F{Zt7v0N:JS 9*쯱҉'LB#EH0RdV "B 1* EP^qszͺ]dnTͬt gy[vB&a5Z8?˞Puz uD w?ci6{odܾ}bݾzw^3e{f'[޿}ųRCjMJ~TU/oA&&4{OqA&)VI䱲~9/U:o?? #38~w|JS/b,8y~U%^qvp#r{|큟ILkK!Wz\VPJba:l;QG4_|5GG.'hzƒG7~bΏ?c@Mn?bS>g}UXW[x(Zq'-A•HT~#Cpɝ,`/ \PUcF E2`8C=WzIk9Ho?oOI2EIR0&' %΢3x-"Lć4YH[Stj ^=r rboNk]:4Umu\zP:9\ռ39Y@S٫2s?JVUgWSOĸM{LO{iAϟT_VXnLV9'ɤOwhBp\1UeտSќO+-rgig YY;}A2 PWodM‘vi+mt0"p O endstream endobj 446 0 obj << /Alternate /DeviceRGB /N 3 /Length 2596 /Filter /FlateDecode >> stream xwTSϽ7PkhRH H.*1 J"6DTpDQ2(C"QDqpId߼y͛~kg}ֺLX Xňg` lpBF|،l *?Y"1P\8=W%Oɘ4M0J"Y2Vs,[|e92<se'9`2&ctI@o|N6(.sSdl-c(2-yH_/XZ.$&\SM07#1ؙYrfYym";8980m-m(]v^DW~ emi]P`/u}q|^R,g+\Kk)/C_|Rax8t1C^7nfzDp 柇u$/ED˦L L[B@ٹЖX!@~(* {d+} G͋љς}WL$cGD2QZ4 E@@A(q`1D `'u46ptc48.`R0) @Rt CXCP%CBH@Rf[(t CQhz#0 Zl`O828.p|O×X ?:0FBx$ !i@ڐH[EE1PL ⢖V6QP>U(j MFkt,:.FW8c1L&ӎ9ƌaX: rbl1 {{{;}#tp8_\8"Ey.,X%%Gщ1-9ҀKl.oo/O$&'=JvMޞxǥ{=Vs\x ‰N柜>ucKz=s/ol|ϝ?y ^d]ps~:;/;]7|WpQoH!ɻVsnYs}ҽ~4] =>=:`;cܱ'?e~!ańD#G&}'/?^xI֓?+\wx20;5\ӯ_etWf^Qs-mw3+?~O~ endstream endobj 449 0 obj << /Length 2758 /Filter /FlateDecode >> stream xڵZKsܸWqTēVAoᲕ!TGx968Iq*IFSvQ-~ꧫWo5j!++/WR^hvqu <܇?>i+WƎq<لߐni$9Y>\za/oI[p `eYGW^q).vF)v+_G:9mDºeFO^#W~,Wx_Gfd=C+6Bt}\5bQ7 ^6UeG,J/w4 /{~ kepQ:$]NI||a膐5L"d5!"<# vOUʓ~&:Zx|'fVd[:[de8_<3X)|ՖzXfEX0_Ѽ9>޵dp.;x %j#TPku +i"6M3$zfNfXȳfƟϐ'0oc. Ert Y"I~vdDpHG햜ɶ s w?ߊL30<`lm?ӻ{i}3%G?ց!f5)ݳ4Ɲdb1%T)}1R`4a'%I.KsE[2]CNPTVeˮgh(EǰSB@8:ڿ4<4M㱀e'ȑ9G4U޿dV۔ eE&c@.9z3 3jL'cX!M ͹`OVHsDf;zcü 3.hޮ{=:L3.NiZ:'2WPCkGM\Cd#[4f"Ǒ< g&ԩq\J5UΜ[(tH|Gr$i$,%f}ifMnѨ\i ,}YFwNXx{:I~M1ko`=VK'ե GPdl:뒨޻52%%9ePԇɲI}:m>tg6avjKc[mǃg`0&W 3Tep/_K&y-gRA`_TVO}]~΅6ҹ,M;F>|^ ۫W_' endstream endobj 462 0 obj << /Length 3232 /Filter /FlateDecode >> stream xZݏ۸_a(b}h \ЇKm\wHQ$ơ6&)rH g~3d6{e_7o,JWR WqDw7oA/uj\ DӪSEϑǔ:cuj+?Z|{:Bv XZ/~ mC866x{Wڤ=`̉_wdUnҖqi&HpdS<7kښJp^Q1QV ad"9,V&Ժ7a0MhA RhWi[+Ou \Ƶ,(%eqڨ2UY1Y48mej^FP, łxg8鸸k-# VtaR3%XÖIIC`0N"وe25Mf`2t#r zXԉjo#\୏I%>ze4LB,ѱ-ÏrḐjjTI>/+n:¡r=Oj.q5V "={gV&u|UQNJ~nJw.OGJ̔KWS#_s/a$D"#m'IUco'Xfʗ v WiYؐYwTO6b ;S)J81AS{*\Um|$z ;H~OW͎+7D )tKov%qMKs_0 Q_!4Qiܤ%-R%>5cr\Jz#y׌8L1Z<2mˆ,8/B:)I #Pܶ~!ؿk1d7ܺax*c-dͺ7^B^q{Q(\O։S.-9o2"^q՗50&I:a7r ѤV<"weZY74wrĊZ&QP 䠒7gj*7k]bYq'Z+aׄ y?\_\3>w݃fk9}7IhGb'3VIyty"cDC5+'p~&-`Qg gХ&ao˙1 70 t!&K 8U|NͽoWnM>!B( RfD7a`KҩZHuq%ntkcBcW2_XZYn힅P1"]tk9rUu!7UVG:y& P,34՗"iFn}F Tc0*_Fjt| 5a Sg*8b_&DNyLBQQH J)^ z\?E0(NVNZ񖼢5iIni'2e}S-j%;㲠΋8S _7mmM<ՇpCNHה9FQ#˙*QUUg$ .ϓҷodmPF  , &\8 / m.a@+%.:4㨚|X#m}r0,J<0XMϐp "-Ei_@p0ߺw]?ln#7QUCL]W#!C+fk)S]DÜ0p493ܸLHcSS͛Q؃Kw%Hq=;*Y͊>u*n絕v^GiU7l# y^0[fN U Q6 LCּc$BO[X$m1RQ5IpG1cpP_yM+,CY!( 5N>E -gxD En s}οe4낛m|-ux;kڑӏ`Q_E1x.9GF.__?A &P 1t  .^<*`WN_bFDmo*F޺oe$Cn0a{|s& 43v.Nl+(gg!? />7%mGOwh/}#Nn$e>܀x f=TaZX$Cۺ`'~W2z!XV8 endstream endobj 482 0 obj << /Length 1277 /Filter /FlateDecode >> stream xڥWKoFW=@IsޒjEf)Dd7kPGv`X7Q<:*׎ ",:ZAR'Kg]:wO>-|Gv-~G_pvsh࿷%En{t>{:s*ەJf y H*&$Ӕ)HXuk f|[Hs&0 qȟ;E >Z׵^/M;%La2W)MljUQn5?)˂U-~8zyBzP.-+r#yr~9h&Pwo࢒IjZoȴѯ;d&XLKIH̤Dr :[pPE~>գ $a[}ĝ/ ň"}mLW\ $asHFwWLT4h%)m.h_=+*@Ӌ*#}[%JWY'u.1i`<:^ֈnHhL8"Pª2Ia.+K CC n)SQYU [EqKpbk:~9[;}á}l,.CJ1",1JДCKL: [91 dF<zGb:aJ¬Zqp=1@(t3EESNVI;_&3q l5Hd54H (ejhcUIs dwqC]׶vMx{I2ꌤ iEgɼi\XըYV F!h=Pď_3x lh7J\-ְu'ZZ['At8UNy> stream xڵZYoH~ϯe)b7|I-)I4D9OݼL IYUWE'Q2zŒDgI&FW_F9L|H&]|z )ceR:})"o1KlyL*cuFOVa ݋ɱu7OܬxK{:*6ܯo_$/_d$tn4Q&NK+lWۖD8 Μgyc;?ÜD:8zB󐛟\M͘El\K-6M9B^\Ɓ[nr]48Ku/F\a[? L(FOV;'s s"\˸W⣉42Nd @ ". J.q9 ܢη(|cFFkxҎoXyE\5zx}NǮ+h+fTE |o<R(kV^KP;~k$G'^E-,K> ?g /@[h M D~XOeV ?TJp‹vk2VW<5 LF-]l3W gb[(GkpwHznM :.NT<[5(li#;fTױLc(;Kk}7L %c\ m*{0fd2rc 7=]xp0>Q{|BYmunyIt OKA,Ve '(>|a_-xGo/8;2R< zT@xmrXIrTLa?9 :%@ww]E Xnc׈3& |^Ӵ$8-;";=; l?LyCLc\b7#mڸ֌ [iZaDH;aXN lj:|yD^yQG`+.`t0>p6S3d9vv }.9&<͔D,M{)hfC9R[?<՝9r\n Qׯl3] )m)vXK!j] /:4_w3~I1686e6_k=,XTxꟗEp8i6CK.Hο :CDO z9E}m uB ґutxz(&mXMvI3`fVaE*tsHݜգې/BƠ,:‰mʘ7|sƗ'a2w <3-"h4Ѿ")>YIfUKsy+Z9 .=^!wD?vܾ WGMbv9`C`'B;4u- {ו{p[&iwkZnhexuЎ aB ز:wADx۪ B mSF3q:,I\do|SyPJD!*^+184{TċЦd#`aP n|CBȑ! bAɊ͂Dz_eRD|F)4.x _m_j#SkA"P-&6<}8e,XszvᏴEBkUV jzA - e{,('0sMcs]pЬ3J,sSlFfy%&2]ͷ:eFg!bH73. ]b99!6gOH[U,5wߐ{{ߏTHM\: ş LUJ*^rհۊ h 2'W?'HTBpe@\6{We:S 'өd+/}7"λ4NCGr!BTtS{of; lrg7@r6X+6Yћ{>S^úHq]` C4[ض'~CiTc$Srp;m$ qfݪ3w֪URn^3;}le06h6X4n'CgvBKrf<Ֆ r!y& q6)Ќ/$i&=cy<=&8M(,,&:|v?5xص/8hóDP)f23Agm> 2~(yRɦsߤXΓ)zޫ^f1X 2Ug=2s'܃VPfT޶9UM'c'$\_Sr~k|mGiQTvڬ-5|pBౢ4 Wip嘰\6R},‚T~pq=[Jx~? 4 L7Vr}C䍏!Cǃ(itAy[-02?.cm##@_y% endstream endobj 497 0 obj << /Length 3002 /Filter /FlateDecode >> stream xڵZ]S<6%wNw3 $aP^6|ɲ^$#Yy9]W;'ySiRꝜ.\Rwr{@g7zmjPzo AEfˁv*?N>SYNroazdPU*^Um%>ϹLJxy-=RSEᠨ ߕ'4)tw84:J ^Ou#AؑYlomFo}:ZȂa"?RdƚT+qb%/A?M¬>iaeb 2If i ZiYye]x8Sɓ<˂.k'WZY-2&])Pkro;Ko޶VIdakZVuEw8BÑ<hzv7 |x#JqAFٌAH/GYQ\#3?#L8$e+\l9Ǥd.Ey I8%wq^U(֑ti"@h]pP%in[0 ?ܶJ>M4UQSYIS _/X7ؘAYVY3$8]?Bwܢ<pJ٥Ue&)_$^Mn5uSmBXN+nJ2Wid,+N:y@@s98S,:&V8>'[g׉'8Mr\/ j3Ln^<$WE'x8 s]Tt܁.#JF(*Tʀg11`ɣ|A#B0ǜ!#݌N9qe?׍,i*T8Dx2 vB ZE蔀RQִsXh.G_ %++E(}QHjC ھ.\3޽K ZVy/7ۯ4MR_4m@ʜ );Mf|qǷ+D׵2#ex]<σw{!*jpWQtA&di(UhS/4gD `5VNJ']qw`5>, ]a7ӊÐgqޣP'XоgY-Mu[ǐ00&|_tέOlGQqlW3Fo˟bZ,Cl/Lg||O^I\5 ! q>}X ¨~D~f _qFCIRbn;.!N%E+`_ۿ)'Ltƚӳ!:u{7 D?)}Uc~bͼ(2z X`@l 5t1eڲ;oZs"r(H +c;W1.^o!=Ăe: d3U|H,#NyT;LDZOhq xy-9EFA" ţUl% 꿑]HԓDp)q -xXrEx&*ȯ8ucIsaG\V{+euqwXWtl ˭ Go￧ߢ ]2] $;/ VP ę^v].kGK$ ,4ȟr@YN#M}%c+2~lMtxYo1籞w_z)V6/ 0h+q^!e 8lD7C0#"0llT'&78(RS _ Q~3d/;Je S'ie`Nzµ1&1-͊x~ۋC\ g/Ե_sG6uy(9l#+RYgW(Z06W)XE, %u-̾Yܗ-.׳tz$<8柩WgO4e\`RT#}"|BXB)۰*> //6Q ŕU5.eW?Q/\G܋w{IlFR.GH DHCc;Y Y mf_ ;sQM 'mˈݛ/jqySe|b#?,:pR ŮM$ɛ*6?eSepq5O:$ xRzz~\-aRl26b'b ;ŖES&>#>= ۅ^ֆ}&O^yi<ne]ǹW4y# endstream endobj 510 0 obj << /Length 2577 /Filter /FlateDecode >> stream xZIsWrȀU"sek"$y\LsQe!Tr}A`:zs0>bpu=zeW?)zR0 u^ E_9.6p\6hjG<i-t_-OwpLf(A%81./ 8:Y}X[⯲p}ޑ:u>)>↾5'w6vt"rmwBMo2!P˓oAkQ,{LxU@{3QDj<:nJ3? ALٺߐ ?2݀T3yy- "aR{kp{kdjU%ߛ 3ݒ,E:4X>z(c2["m'-L3QyqjR[*v%8w;}LεM$,dM dmi-avwb:𴂖n6A6Z>SHߔJo.:U 2wP=`A 5MGb!(H|^O>y 1<Lq ܆8aIx7A}TQ|rJR`˶Xn6H0Fdɯl/yN\Fz')C<[ j`hBZbYS p+zIl=!A,$Z0a4H0w=,p~VjAJVHhzcD?!`"m͒P A/D^[;484nh0ٻYTJuС]UU6ObCy 6wKh}guXs ljs : \\I R$JYCRnYh>R+V,%.2DV:30U֫3bQ1"w^\NxIVIt ke`~;3j'cγ5C.1L|1&ﰁphO^J@emYL6VȷI.O,n73ۊd(4I}eX0,Ծr/>_iIOE~aV߂ :) EͫZjRAnTQ .&@I ,sP)5Qʊ]-}]-1L}#md$ n wfz6RMiנŌ3Vvk>s,vAp]|#"mFXGdBiiz:Kp7<G!f/# p۟_S8jgY}yHU+؟ZY줟k6QUm֫?̜0;'Mo\/j~koX^biV(HjZ"u8mՆT-?sǕ5\G/*WΗL< Kr[b>3Re^`vr n{MyȫU[YEJH I0Tޑ D-)QlThS'6a/z%9[.ω ^ endstream endobj 430 0 obj << /Type /ObjStm /N 100 /First 880 /Length 2368 /Filter /FlateDecode >> stream xY[s6~ׯ[ ;:8vdi=wnFmndҕ$;4pt fa" 16N8D%tL>8I(s#gL3h#A:Lb'ϻ9M demFۄNx 3;b4 zt&%``1#睰3^X&z@; X,6f:b"<3ȅH)Σ-w\ʹ ;Ϡw`]gnj>X^gY,QX~g.1A8`.;Y:Ix 1 dd@@* :"a` :,X>g ݙX'C o;v(b4I%b#hcq :E(0ik=?ab&C 4!Hjv^-޷0RϚ٤0JN=W/>HVžҳrk-YaBcCCVo@'vwz%OI#ԁx2RiGa$%/A`h/ d-l8c7vdxcoʡ%t^L$#v]2FZUfxIxaS S8aeVObϰ`>,HL~nݬ>%0@PGf::8܊wu8+GjH˺֐yy0*5ES>{i@#rT̰ГwZ9%nd1.g|<E WCAіFt N7t/C$'򲜀[tFBlf,'\wr6Jrp&FV;C?WK3euPhe!> %vVHQS<9?4x.~]vgǀYu鳆9g>cVb~ͤϺE=|I{O5U/ f;|WUWϫ/ ꢪscѴ٬XbhK5fj:)E15z?+AW&Ws%TͰjj|QŴX9kڑAA? ҄ s` ik;Fdlɖi2Qiu?u2ZZ)jC۾Us:mj| M : ͧ# l<_^\ DHī|ë#/{W_cKkȶ;z`hھu}6m63(h}sai% k=v؎ T6olIf)JפqvL2)JhRy* $\Yh 2xw*NyۃBPj+(pյ\&3I.)=PgͼmM E\P_asE8,°R'\/ntGs5K(Yz$Fyr"⭁z-<6¤&}Uq߈03ֲ#M#MGG_'"us$DJ҂pȱ2 [熓`X(r|CP(QE PQ`;NIzϟi=}NR3|/X IuP0FF`|לAz#G϶j'Ppeio$!Q |.#E^Bx}b1iw_ iaYfg7bzL\\?hvyQjHCL ^& A6hDK0 &S0Z,aF}4^䃵F4  `LCwdh(fzRž!Jf{q7,? 32ٻ.}vKL3t{؈r%{ΑnݟW/m/WӧO|Ԁ]EӴU}&U;wafCo'x"R}6#JJV=bV.(mn;Ö u3jG@h#%x = .Zn-kra$]^2ax+00Y˟mCyg&ts\Kv^M#/{Ms _vEG endstream endobj 520 0 obj << /Length 885 /Filter /FlateDecode >> stream xVKo@W GOB@hJ88q{ff%A)%#ffgvJ\%^ Odr-& aL.NDR['&s):;4+Wad 6S}g֫JX& nCEee2kndl,* <$-2-t%b|pX?6P_&.ȉS2V\!Gu׬`lOծY0R[4^4$v&o7D2 e^/x,y]G٤M UD\_S>\kF0'QweQ7ҌLt8$ݧ*F& _=zxGTJZF(=s,35b-&7-1?n;o4GtWeL0 B\i\F&zxUέ-ݰڰ' ,a谨A] s6Y,acv> stream x313R0P0U0S01CB.cI$r9yr\`W4K)YKE!P E ?0? J!DH" @ l%r38 H.WO@.E endstream endobj 557 0 obj << /Length1 1727 /Length2 10585 /Length3 0 /Length 11679 /Filter /FlateDecode >> stream xڍP\;'HcqiCKp =8Gf̽_^u-kv uf1s)H be$Łl6666v 0F @ d}I@!9[cqH,9=F>+Όa frMV f&utVP?+3 R Z@ 'W9w%;П4!P7'Y` 6;?{؛ e 0wlbfs0[,  07mhb y7q5ۚ>r*s6s;@YKdyڛK@@Pg;̞݃ϓ{`{sE8jڃ]@^V7s` ۣY  g{= 72zcJkhJ)3Y:qq;=y~0*&࿖/_Y{ >oVW5yZ&7`b3{~[ )[?t?j;_M}E6ϡؚNj<bo"Y 2WCͬ?嚿glR8_*f p~n?To `d|>O9C.|'`-x`-gK忉RbCVXu& &гzg7=74/|h/|Nb7r>l`|c|Vȿ9ÿ9ӿ9 >?_h|1d'Af( 3 ڠjn[_i虽:\0髲;]%`-o]$:hG mMTm~x6Ն2??^p VYCt[S惣 /J΍[{ݗѐ-*nyԇ)h()̤H N1g._?3Fsz`IݹH/99Bs9<᱃A} 䵈(߫:2 e)tPw庙e:j+В!yӔH[$VR &>R'3vtukv2bJM_5J,\ɱ[!}\(KZ7#(~j1s/yRp9>ffǶ\n v-ؘ8B{!ٙ{IflR Feu4ku;Пß] 1~mZDgb91s=G)/MtцxoXfg64J ~zgYy(Y( |PjpC.&ksO>k=xSRޗ'.TVܚCҡgroKtTfe?*bhWINT')o#9.N'1Riߵ#Nm\ֽ*&‰W҄{޵HMD]}*h%OX ]EJN?n 9 12 Av"]T^M4]+f$aqKz#KmV :.YXE#k_;-%7hrwQ7~7x-!oK 3sew)o-bNE;x5=Ǻf+ t$o"&>MU 3: $.P 7ew zqU23r7!tG'3CφY1f q^54eྏ,MM q- h$EE6DY ༶(3YYk팗Y&-*T+p7JXٳP]sk95~K)ںOp 4JL_\0cL=K~\^X{L7۽8.ǿA;X5vK $^NpW/w'\~=&h?~Һ)r+؞zp*#taJOp=~lk8~ a7ށbNJѿO%[CpQ(ʿxG"?UK.AZ·=]x+F{ 7896HϡB ö-V+3'^B{KEg/CDGGC]_h"6WizVjڥ >3^#pd|)`.1<*l`-A"Gpzځ4 2_+&z޵ qqc KpWdy/'U_yW_z6Yu7B ~8D|k~x nr d .%Nɹ5-9mN>2]⨂G/LΣY(X1L17Va52|=2j4A6J6x;uV|D{}V5 jnVwY_! j‰;l>T_)Ec};֮l 2mo=BPή//M F<#;ϓ#?6u)o4fPr_ /?RpE]Xj no,[D[a?ֵcSqSbeyW"/*.ɟW*GRУWsI.v%5~lw;x>47$ օLsw :-#I41\рUowysk;q- d֦8]d rac 1ޱD[=KYۇ'~3&Q۱dZ.7 :n( G1OLNY/Ki,:^kMGgf|<ڙ""nB*;UͱxaV@3+߬کe/C]GjsHVa:r'a fwBo0S_ϢQ]^#I @Z%x od婣fBSZMo_G<\ZEJ/1dD)$hne/4?Oď¹#sxЍL>q SfX}MťOH> WQ_wpC\!gQ]eLZdRPs'ҭ}u8uL x;$dveqՌ.&Aڒq6f"/|X= H<feh݆@MӠ;K%,k潅̀A;}j%وQftQWሿhr62bҒׯ8F1W4.&ƎI:/)`fc+%&-wIZֵUQGNU۪\,ҠQ 5^ERdkCƥtv42sSMON`ܙbZ}Ҧ^hJ]w `[l:{VC1F^oFjS ̖NFLrԑS0,f~\xZ̎:B#a*I/ `&8%>ao]k]b91jX ^$zy;{k@c@pg6>1K*]iK9⮩>9MZ̤[$wyTX>cq2uaCËehr),*Rb.7^H+as4ә!xD2pBV Yss˘=Oswm֤<t2Ϫ>3' ݒa/KEO%c+;b{sz{ k?6p+KSR{ZuNlXmr>[VjW:J8T}A|W)9ǚM ]`t.s#bqJt.?\߯ek:QЬ%ʇ{QҸ$!W~$r1cXލ$I&IĆ`U!Z".kŭ}6qF;ʓHNtZa(>`l$N;$* <8*ҟ*փN% ^dۇӧ.s !)q1z|je{F_א;.\]nS.{Vfa٦3`LKI,%k%WP}oĻ"9r.tuK=bğ=* gb.ZX`X:O?0Kg]70Rj6ghܡ_ H˜/c+#^;O%;b)4KߠyU塇Ϊ ;4`Rn.931Kș` ;3Kcy辿HT 1S #3^(`  #^SⰝ 4=/lJ#-_fscv>̻`ȬJ5&͟+ (TyVJI-`!x{ў YZAZ78kxH$ZHW56h # UXEcl'gZU,ʯ樝sAQ&aW/ppϐS A,zHXԨ})D95W?r/(J#o]tAFz<@4o\d"хQL,]#x風0x6"n <d%?Yy:zqf2n]R]_ UHH(WA[XڛH,9) U IRHEjgE/.H c(PD`>E#;ƑkNV4Ƅ~dv o{B3mqg6v3tVI+˲cx 'ܣA@7 ǃ, Ebœe*5DAARP$,j#ӣ/%;Q[]f4hFH܇rGrG~4, åJ}?kʫ$.uJ7bzޭ|&yeb(Ý]}CUЗK~}]|. }=([-5iH"":MVM #BI֖g汒lb9 k_[އx7lO jB'\E^vL^a}͚k]a)݄+t.:49-#>)s+kV?\G{/$+2^ 鞯:*:ՃsRw؛Ixt73qLu أ<cEוP<`W l{:|$dj6F;Vr!VM#b@n\徢ϣm;6Dy|R/%Uiwj(cڤq^6, O0 $,Ź5_?&,Ԉn c:KD+ .iݷ'.TsdHrK{*k*N[y \/ʿ(4Rt}  $ ^'{`5Mu92#g k%@,X+(pЏ(% _湰Jb^"yEILFoaPTzj;Ax;XcH@>% /Q,~ݤ"&wP}=V,{T;8ˮNUs/Hn o]ܺ٩w5ۣc_!#\1\bСJV|n!|l=ig_h6d5Ԓ' ^{z XC9 UЕH#ee4 %+@{2UQ@oT+dwĹa䂂Y0#͏8O.˜ Nh@(KSO}܎m1 ӴOV|U%KkPXmHKiX3z.N8}kx ʲ&N~(Wov<˸_qy[-cVݚ-s.[>XfKl1?)XOx㽮hhހ,|xf *}^{@7 8W/N]1m[ƗDTÂ?X{x"ⰕJ?8/v,Ars6E lF嫌 YnoD,k"_N]"ṃ=ҷy~8|ty Q ywi:[X[ qK|Xt%N(Z#*[j$6ҨLm])y2-}tX cB?*Fy;Ěnh:L OoYy}G#_◈t!EM|dKn4Zۻ{9my:4^$u]s9#O..dҨ~;IT2(EdSm髖$0O讳M56uFKuMy %3({"*42$ xJRw4 ;^'8,GuJEs.w)D,xYg}dǚ9=c 0h!j  t|yɭҘO&s2Hb| Nh**9]կnߜfk z#!ɒz>g;{OE:lstAOᦿ7iw;PF8;'YԱ?>;ŋ &3# 2/Ꝥn0e:&j1EV&vimY&_ `$ۇ_™ޏt\v2i\K*'zDH'r=T,MCgMC=ϩhjňQv!$Qtlx@4hǪyvW|Ir}&"DXd7EqA)OXRfz 8]yE/ Q5,C&)Q~]NJyUW(]*㘹JpD_vshNdPy,L(yp=GCJF+-0ɛb'!ϸv U)a>K=w`[(;.  `p4ǝiDL\A]j0#ޘsN-w\d3MYE 4Noco&=N.^vܚ7uL̇yxY#XP8t&[uj7GtD􎼔%.:WCBS#l#˪@av$̴<#z O}BH'@{HɃBL9{.m8r消@۽ɠP+n 09 9Vpa95R`? Íi1IiRe.EzB(Am5[8]jZ8-kmmMO/ t^Mg'>X Z闩L/}٭66r ^n)P.iZnNX$B?K@_7Tw!T)> stream xڍPk.w:@pwwn ;  N-_{kf~i{o)T5-f +3; @RYB ƁLMijL ؀! ]_1)+QPpsyyll"!)@v SK 6V֮q32iwAĺe5596z :!kWW'VV b%Bq\@w*@ߥ S4m\Rh-]=199Z y%{'_dLow[Oc9 eh(z2.W{;hJ3u @F\ |\!6N.,.6cv;8]]O2=w/ֿkpdihaGnNZ6n y9?9@֬rd~ |-gc zAq7*[BfgXؘ@V6xAɯx ^ǏOFfvgYh1]`O37 }}o?@[yGK0t__)=t/=}_'g ٸ_O)u7#7{?t?zߌus}e.8/UJ-W' |qG+'Bq cmA`?3;^py?UҎ`?@W ?v}5C(7UOo G#X-qE aVV?Ws7?(  pj"ki.xCIe& j Gl|^A+n޼_! 2G ֆV{0o OQo3Cݣ#W} O\ڔ[ 9lGkIRk}}4IPnE?#E"a}rmPqvCWù+]8SţX:mT:,sޕv B#Q gGݴr&Kwo Ia'h|$>)F fF\MV~KJ )aTyיa6SВ!RZ&UR d=Qj$B[0~"<:`M#`u hyg-N*IZZPy?9'>ScRxl k[`"ЋG:`ZtzTo=`P{B"%h$K/ФE/on00Õ`3qUIȟ\sh [z]-h~{}詑K:xՏPxI'iS}ULЍB^gA Ò@jgD%F D۩ dg=w< C7t35X!DyP;~` -́T,vk]7i_< ,|o)'0KxgDE(˲f}dF@Aٺg x`h8"5bJ'^[|ޥG ư9iۉjT 9La*-7= M _( wj[-1 RxɀcXtl(IHKtBw56ޗ#ܘ1 ?1.JC|1åX@VpuAK<3泙re4 K9$F.mMd2M-o=V3'W8Ci/wr7nʺ UG Bѣ)RuLJ G;2)=ٺo`V )g,;F;},]͈x%Hv]zyU~a, ݺW)% RhU^aĨKR4m=Wv"{6mGP8чfnݢą'jO]2;SZdɦPKOɼ5$,`kby2? u,;.H~2EduW.;7- ܲ8[-;0SS!&%ٺΙohC'.Hsxijұ[ҙJӦVgf!XTta +soJp#8rxtS~y"5"~.֗ #=Tя,-:|QOfo6Q 5}ڪnMN㣌 A8?66RcIUC["" "Nqϴl^%ٱTW :6pb^ÇPl3ʥ1=moZ1g| ?׈pfBP=1+(64~VɣjZj&].nBb*Qu,\oO1}|NP5+UG/ hⲠ\U$Cog 0Yɧ~ }z&~*,02G"ZmiZe !־¤DAKN(]5#sSy} Ϡ+)4RK-sTLm[5xHx0rU5nguDAIݯL^lv]GMp^`~-{BQCX!1 ;$ݕ%ұ:]Sq(͙yz3v1Q|r%FDŽTv# u3[O@Qv>E)3d~>Ca|\d+*$$RRKsQȮ-q/?lvn/owc-X5^OיOdp\ߢ6)u\% sŋo=~ؒpW7ࣖ?}0YM*T{ ։lD[贈hToj{Z؀-F3L}e&uy ,Uxȡ;o`A0 ay Ͽ@an74cJPzx`$ɳD D6&'[`v1hj_Km?iҟ`TaP|i_v[}Gu 6"ls>UoַaAG/Ysg,RjMe,&]䃉Dr)99ߟMC8oP~(B9x?H`Eٹ.(%JHT2HDM鈔;FWՔu{i%F\F ҇a_gDwv  鉫]t6FCa:ZL.fuةu];6T~4>Nh&yqr@7'N,ִޣ\<_tEM02X#H>,W9TsDSp}?9kX#Y0~#38*~gӧ }?dӭc;X-<ŨJ3'o]MKre"kE0#ge[lGPOo] F~P ĻxH\ܖWƠ|QCB;B ".B<^[0l6b ev"xcLQj4DEݹB * }%pwl 08Q9ijv"o@_u`U˹,KMB:/X暴*Mwz{`<+YI-fPTڇgo_/Oų+\e-d 9L}`wҚ[!ӫ;rpY_0fI֞נo!ӍN;»M {6BUw[ƣ09"Lz_XHe?gSC'ڶ3@kьjaGiTXn~~L?\cc å(Mx{70t,,ӡT9PqN4NEz%Jx 'V~Rl¾;{sv2I^&l,Q1p{%v*npK٧&P@󪌞7qPQR8z #zboˬR(=mnFC"0Q&Fvw^_\_!rZojfaI %z!N"&QϤvoǫ [GXf;ބ[V.E>(E<0ius@-K#%s|\nS?ThBU7*mb Y:7s2RMuh,zGiEcENQ,}4aӲOo2ޭiGL=;L8s XKnY8ʧ r"@)֑Zg-ٞQf$\!2S Pxu*kWIܑ eZuX049X2d= \ëI˚<<8%^״SMg~:U%5ݖe}zaIO{9V%0jP ~GPĀF='OTӆlyu 2&g~Hhll }ٷŠ~9([rĦ}55D~ 3ce~.֟/O_H{>JIf+uB|moȌX$-E ũ1\ /qq1SdO{bbM>-^-? L6}qW,@zsSW Fy5R`!0A OHsQt1IZ0bwqܭQtubl{;>(t =<am*"ȰBZ"\Qʄ ֐CR 0A-'ܹ4Cy淠PE39&\OF<># ơ isЇ>4 _yi z+VE"%7#%Ȉ%EY<5GtA"gQn8p\P|q#H}Pic#v\_/p4? r }Z<l:HhhÌFŁ.1_,)ģv͜qHufǑ)W.0iY 5䰅FV0Nz1&"Z$BeW΋_b"UKղ_.(aGZlΫW.fz51֟+^:}6[z3%`~hA+w5XjTTA2nB.[tcTl2AĠgI$eQ!1'A}̓8u<[\ dRgtH o 7G羕MoT%wiy,əeЫrUjkR0n@n?Q'dQLb@ LBOArLB=aI93%.cp*^ΰ⌕v}D=I|hĐK6qc[A:h \(׵Zf]C_R3ySϏKTj>c:` )ndGb|G^c5@/[ϸ=бmG|_ +x3] ?gR2 X$Q #w6Zy\.hxaf2bucMf-fYuq_}=anMHk&?0A=+VCٍ%MEgJj< f[#4zV\#h,Z~w,\{|NQΨV. ~fj72d\s'4b%3e ^! O{YLe8%Dp:vsbT֧KDM&98Օ_ɜ^/uYGwq=zk<+8PPFɶI*21{.RҐײ)MwWtNJ҈S6`t4ϹFT#"p*7`xEicAQs[՚I@G(,2o^c41_ ~3h1XGN"S{sfqX.M߉>5df"1h0O2MA_#aTT]*ĤdW Qg$z{9v[wHBT̯K3ZFkVArk.iVVk L~t"/[TO[6PB[Җɑd|ƴ}x\JveųX&G1[7Πˮу\Pt/fܹ%8ٖUNnV#9I>|,vմE0:T3,f -%ͤ |!is:/S6.E]A|iw֤:=&H'Sk!<^^q5![߈8w*08/ѕjsE!_5Hʧb{q~aZse)w9—&JMv6 xNMCRzB6!KA9X=׉bD:U$4[[}.GydFWQt^mL_g[q[cTdK8Y.W(5`}ֈ|P&ߥKU&!uĀO-4a=o5PN Uf#؇۞f^2L= D\.HT3g)ˋv6rd|L6ݫn o9Fáz .2;'L7p%NJD]qoG{cp2ss& d"kIQO'Na9QBʆMMs$uҀ0V)\<*QNiBآFQDa ԨcjGt̤חˆQtEu)CmW w⚖W4)~8!ώGʍ!< '@m )v+9xX60M4Bw8; b ޫX~t^{/0h-trR.3 Fr$L]-/LhmyS>*H|?.pytcw|c-yiDaj@~&?|`hkYd-g4ۙ !Δ&4?u^G_ZQYc^IkO"FG}:7AN "}WU%$ЈWOv8fr":3~qnZŲ=q|#wqueDBNStqË 6Ш C*Z=dh;yZmuϰÖ6*k?}Zڃ"˩,‰I攡ERWXda3s *=JLs,rhЗ9ʹceDsoLg߀Q^< 3;?t΢ObVpRD@:@F*'ϡ@ۇ~prWthr~܀׽nG6IJ W[Uz>iԱwI^$k8:pYWJ址 ZaX R~ڛew{IulK Ҷ DU8|ew^"h6%VV .nE!󆺑S L[Y ֑+@N3%pZfpyGK;жϲ#25pm*Vh\JM0h&Hk3JU8FR7~e:y,">r%<&Wcy}{Of{PZT/Ut ~QSe5~ơi_iX0g͌IG\ևwYJǒ _.Cؑ?>e}aPxR>EZnA 9W+K0ϬzAw+5'z]Pجl8MayGԋCZt, LZKWL&h+TVq0GD|&=KǪ,ӂ8}N+WnSbOFZ`yڈHds0R%3pY|Q===}[O)5ջKfz*MU'1ub@mV,?n)i%TaO6=}^veJTk"}j*^E51QDBbM%ek:hkӁs^e*0~b\!p{dT^| D5bԹZj[VwIy2b endstream endobj 561 0 obj << /Length1 2152 /Length2 15383 /Length3 0 /Length 16670 /Filter /FlateDecode >> stream xڍP\ ]wwi]<;݃[ @p8J=\)IEM FV&>6+ R9<& q'˻죱˻= j `errXXxchhfeP`ڃNV.y1r3:Y .@ƶ5{S+tqqcfvwwg2sfwe[XT@'7/Ec;࿩1S-P7wq7vV@+ xP(9A227ܿ di[JL.. c_ƶnV&n U37?gS'+g&g+ۿ82 3q{;; >Z9Mõٻ̭@f0su`Y9e>]Gftpps%_ =+YsvwZཝ݀'W?YYfV.Ow1_<, `' 3z1MJ11{7#'; doecWdnW}On/-c)ڿO.@gX8YL߿X?.(+t[O/-'} wMZ]1{[q1~Qh,i4Sr1׸KעZV]-FV{.S}&VߗSJL26N.'<(qrY @..wrs{'N,_ ,q ^7 Yb0KAf?,3(Ax3(AT j{?=0AE:S{ca6d0 4K27`s+?ۻ:#?{BYz:XYϻ?;wvv ;z${}iC^{d 4w#e_ _D9?:WsAƼ+? .Ntw87n@w$c{7-3uuzg=؀@)ʢ)u]H}(;V-S#2l2mנM[> U*O>φ3؃SǢ߉>28h@wR: +cܻHy/_[U٫Cx.eՈ ,3Z%qa$C?@Cϙz#M=e/R͹O}|[ Egɻ8^6,؜`Yؑi@ax9LX6ȡZN.zn=w aJ WtW`ASe$[a%e~ Io"l&y.H]SSR lkOB`#ƣ90FZÃo6Be3r6eMW D!~.9ﺆ,g2d{APcrOQlj_g W*هqהDS㷄[\)M6̔F Ո%ȔO6pP@xƚU'i|[ kzH MOa䄬Q( R1h+;q;Mսb"Wޖ"xޙang_ ;2!soBr]9BWgbO~#^W"[ZEWEZyAW˲Ꮣ+ym%-$==K'4&ֈ+nwqvU F`I&`zcᦖd Dq-gpʭgԃ{=Iu汀;}ŷT9b-T-֚wZG'>$[BH]"X()m.ދN_Ǩu.}˾'{Cq5auG?52vm(ƤXQ&:}^kU.šw~KS]q=+Vb%C q&7Ga>tlwX6rMI=cxC<6,O Oal۱EAp Aͷ=CH|y%9f|VX/O l9 p?j`g$9Q/dam^FrS."/ ,) _A0P-/NT#a|8L*}G]d+ӷ䲓B:7&xu`g2J#< ZJj z&)ާq. ohsCsbƟ#jgJUt5-kaⶥ7Tg>ҵ̏}"ld"tMno{jZyBo7Kg-c|X2`w!LҟkzUbmHI =*.feEa>Y3;M/~ !RF5_Wr6ppƫPXou\Qv9)փQDZ4;Y\=37“N6JJݸg®.MKG@~aEH1t{U^cR/A2E^% F) ]dr]s/TD{_^&1} #$d[KH,8``Hܟ[>uqol'ϞxUb;̯J 89ÓwVܰtC?M5bcR+2 lW)k?1Pp# QHF0paB\-hQ] a&nC bB ǕӶGaWsg{uY]>JIнaHh,6ZEhW%`_Q' 'HsבŎ\o:q2+72;qc+<`/e.I9/tz'㖖jCFP&?S3B|2B+^c-RۧX͋A.R2f@lKw+('|tMS!hkx ة|VX9tE)jsFAİu%e(J&5U< h`C-՘খFtN49sYeʪ,#a6WezD:Tw H7 z;!sufv|f*1{}UʀBY&B(ae$om"wM>U9fh?صe7M›u>li2 e$fj[sc>Yxʽ6ofXLOwԦ c8̹][M*<1[[ whTb6H`mwg=.Y(kYqpq¿oa|(}l\3Q_jR=$B6؈1[?m4O y g(Cwp?m'3am<0a& d13I0o;{02I[Ko|g6/'.c}ysGz4ltT: b0v)v13Jp)ɎlZSM໼V:̞f(V/LhΪUw$L͍b|= (m k-#y-=)ÌsvuۍƉBwoxJh]yPk."tqyu$uJWiJPւW%f0Lt*}+ӲU+@1L:O)$V9.:)`vs`JOo:h-^[/)!d5sukյ癙CMñ4",x9?xoY9d*_Ǒ# %™x\޵ xVDQa6?V0^[q?$həIi1W.o> V7fHCeȼm<1r69q4n_;z*0+jkCFW%m4OBk/CID Ĵ~r:X,A0~4B~TEnodp]ڜAƊueۋn0n\܁ҷǤW/JD+]&?v [c]$/=/wo.μUtud[[Dv@ sD;RU) O5<}8oYrQ|bqdDnmzRw)Mw(MUö@Pt;y7!o,ed| f8jU}\7JdKODǂ$(3d,`h;/7:A$L!q53l4zbBN1.882賱:YwH/4w7r[~ 3LM$uU;X2DnIrۯ{E BDŽ:"FؑѵV}.5tنʼ[w އ6C&m]r'4+U)!meQw2L (C}W[+R}^;.y(tFCmѼӅLD\E[Ud CnGyP0P洟N8=ԏ$c|FE~{i]RD5\>n!u5~Xd ئ t݄^6SVKp\i洸N16jr_Cr"+aYk$7Upd/gbdˋyي->X2F̛Vm _o_OCCZ9*e\/0V!I :?)7T` 'qG{gRITP" .2J5%-f֘˕Yx{5WNmU wڥ;F>V3A 6aYҎ}2umGu7ý3`J΂?0̈́]usǦ/KI-b 7xpk"FkQsˊ>I2!Pő6l"@)#6u"%&bWKmE`nVTŞ[uk}(]=k7r.3Q9%.CUwr쌏 o2)lQ0yEz)?AzN<ǥv7nѹ7'r˖=P,%Gڶ")4"Eװ8=,RCWlX+uü!vѝ1d*6LfěmY')y.:Jb ; ^lby?>ΣfZR\u&rREugQQ9@H)W+0[$XL~t M<:ICF R%~vacóG+,p-OYf"mP)Rh [>_ToR߹ym+>PV;ԎcGz%"6또kL}̧Hgh4(pZy ][~_]P$0{k8#;dbվ4_\/hڕRV״22+c.5% i:XRT6pkPgG:K=Xm͝S A̐\ 8bTQ*z9}J8^yф_. #ΥLQ6cMW%t5o Ėtz﫞MZD+̄4"iw<Ձ{7ě/ աnJψUa^}r(jYg7k݃n*rIMcCg`y%͝byf_30΄~&9l`_Z/֤խNm_9X`0P g haJ+5C;*W2κ72A$^S+ʕJ:;p\,0aDab~.#A}cpOzWK U ̤Z`xJA{{nZ ݺ((U\'"p@mxif 4T٢&\9~guj]M@by{TKp!F D(໇AcϏx;MRnu>a˜4pOMݑ ^ y~ظRXOڨ?<3{Qa 9+Y܂Q7]eR;㚶$twLMi-o )$.6Rt -u=ke ]֩ȗxv%&ɯxh+] 8'gwU8=zz輕v9%нSsQbڐOX|O^a)]\[:5 a|,n;8{Y7cPK_Mv.I6ZVk~TApizRZBD ﻞ* @tNw}e5>* to(Q2@šYIx1Wx6=pH8#𮡖h2WKG\MDZݍAܞ %i*C{)ft]fF6D°\r X?Z"07H|Fg~ D̲7d]m&nVVB[u@1Qeg<{tbt  0KPcseKVV F5- T[O;o\(ܑGSc|XqNn&ϯ/( c?pj=f]k0< sNjz  DFܟg Po\[-`Sz  Q7`fNxNΖ_%lй‚tl-B)iw[TOs RRm!=ʌB&>' e]d͘b2A`r-F ]Y!0}~rpddGC̬V/<F^7 ]3-i귩5kީ֗`z-h@<ż\A[Ě䛲pI6%c@EyRSB^%騘8az,Ie8ψ hI ] k#ꕾ/|C>a,SH 2C6{BO[F:\9ըT.(kx?+G+)u̇A!//|w*%Q8닥wdjs8C!&zI]Kٛ鿇ّF,(侦0ϰ1vͨ2ޱQ6՘23TNpƮ+a-U!oj>wBN#Ŕc[B}R W*{L}dq8%km \5QZ@-\qv<(PSm+ Ȫy%]KX|D շ vVuXd`X3qcR-j=~ԟD{il74+LG et$Nym sU ( aX{Vdy)JItu͛>[)_65j^nH @A, 2)*wr\t59rtr8m|вZ .F20JxL`(kSbNXDGݓό zU8%6+&po ٱj8,F*Q 0._fe|7tg+MJNtqX^*IRS W_(z-GJj TRM}}N?)_)2rWԒ%d}k)zӟ?|r¦İl1jw;8`{ϨA˵rhBx>Auٛ;#SJW .ޓ{?QQn!ƪ%/Z^Jh6rEO;%؇vzou" ə-Pst#)S . $s+TޠDѭv TM`K@` :۽YB6]%@0CWwk].@͇rcԛX&r.cƜԦHVC13aJ;+'x\%ah_cnkJ'y=*([rI)RL:Yۧnp5!%#i7{U4=r;Qp\ FwUuvj}`Ufs' 7sG&ȁE$Xh$d-5spt," 9EEkKoh|cizSɀOb(WWMoCcnN:B!;?oD 8aJ NSlnn&ZXco(B"Yx05g6roUeqwtNagb_|og-Uؖ'ҳ0HI`W>;%Uo#3y);N]wOTE4;󰺾i]iϫᤞB 5FKY?|9|=G 5o3|gz*9P*4k~}A4ͰV 1iwYRT!v"@J;tgIV9݁[Oq4pO3}'Km_08JGwo#OLYG#ƐLtetpޣIrxKm!/v-iyTvJr˯ޖN;5bA& _%afl?&j%ԑ D )~0rt A%{LVp0=)ۼ:/aP3>xFi0#adXDN{W.g 3NJHM( @ҤE&1@X8gaJgaozbV4,fYdԜ5ie氇7xH < dru37|f|2L?0s!{9;cD$R*xqJ(K6V"v+2x0νFݑ'Y}]{1gCe]&;TXNmO c*t.Է:=4(dTE شɇ ;#8V °GաX$/\ckR]7@[ w&b*Ώ|9+tgi}ji*B YOX`Fҳ5" Vm> LxjN795Cg}q}[qU)?MV24%hl VASmW/FU';2O8pBuLY~6Ժ~iZh9ZNLTz@} ab%;Ji|qr}"\ 9%ꌹ |sn~*N`p. 33>|cx-oB"T'ѩ[4..0(ヘhEqCgfs|DX勰">e\ܳ . },)|"ܹR\ ڟ@ 2p? 5r O}RcQ!W7Zx덎 Rcfb?Q[b /!HeVgY7#gZ'*5>}*ͽmxeo),tV$RQ/"׳tF1Apu~Y@WwHcȽSGUⷧ-f|XR+˗72eȋ&KC4$)߅?cieSb/.f4{FCྱflޮ1+HMKRX0:[RFJF6@*TPѓw( hnV'_B; CDe,?znCS7U =1N_y(I+M.#ɻD9SPP9{erڵE}U\ֱ>G]&`%mAcnN5U{E˹{|P.^GܞUA/jd-3Sc?Xn| 3VUyozx?ډM \x"3F#E_f| _XXrmD|n38Z+A-բb:ox֥գ&%љܟx'aҨ2D'8]"¼ l'ԧ4حʀB)?UirI˾xۺ־?6xG@idB\o%m) ?ۻ 9(@;V)AdɪI-"Źv[kڣH4VђHM!ZbGOlYs?d֦DggSe+裁3o/Y3#N>0v23*XSSc`sNTj~) spe dZʣǏ+/߼CԳo,.lfy1S8T9ٿ4eh._{CE]y@ opUAD =ΐȈt# [w\*lN ʄ{Ƚ)|=%OvUŒ䭂^^)Ώ?q' #< MKй/+¥8xWnekO-]XT@\;:{`+cm0{@ ɳA'Ey<''bJL(ڎؾϽ`"wչD".fRvsB&R7:+RKȿ=D௑~.}xPAZm-=C|=5yyl1AF=/QVʍF}y'C.ށ-U ϭ28YdPz0{ ntm (>W{f!0mpз߯ $kJcG ;4>l 7cLD֊kEs|o~2<]%)?Z+BСw9;EɃ)0E"FX ׅ`C~ٲyߛ-9[kLD?`'`#ECKܝYG,Q՟N ܧdNnG[UuedWop?1>"Bg&,հih!P Ct}M\qpWRJ%M?L^+3ϙx9Yd\*~lAKB4ڎ>{|ӳf2Y⨩hO#OJ#7c3~;Y.EtN񳤆<^cL5umiv[5﷣OP^4Yc/(Wgj:B0լapl޸MF .:hnwQp]-ƏJWa\%=I-F?օ"f"Xd^䙿 B)xp!ṯ(P"M( /71u,OU4+m`Kν:@S?Z[$#"&hEdYZ*[K]( GgNfɎ_ V'βu[i4D|šlIY@L$8/!օ>#KgioxXimӻٸ )*BM5\P ov\n:m8t C8k@^HYU?^F# XRWY|[ NX~NG@4F]en!gWˎ+Zw'F( l'p}"7ړEG|>Tc9tY$P+KS=t`TʌxrI, ' &9D@AvmAguYt:<)KoKMnt:v ^?P`a^ą0Jm_KPˇy[_RK. 8b泻/THb{`]m3JIj\e*DRHdg5lNj|UroYYV: Qp~]C endstream endobj 563 0 obj << /Length1 1493 /Length2 7076 /Length3 0 /Length 8066 /Filter /FlateDecode >> stream xڍTw8[F D zNQf Q#J.!zH5z5Z-z\9)߽g7]]k6.BB@9u'j0 ll=6}8B#C]olP: |!q0B+: wɡ[כr¸11@G8"PW[͎0PC]SI:6R\<@-PGíj@5`>EEYz@p#]nܐVp4fP !|+; H/hp5\=]yP/"uu"7C2@MvC#\]\Jss H+9#U< ݜ+G<>ckW+VnN =$ "'fw `0 {lA6y2rZߴEXo>.Pw80W% ߨoDZ tAj:5[V  ~Qߙ?+G O*A;Fp ߂7 a7[C/פb@^rn4z3ꨛ@7ˢۧ ͍y0#\p+-+a5v$\ y!`nf fܨ ~3JV CY9~!a zn. } 7ig r 4 FnVL2Ā _r=(B@?0|}3ҿvS(؃ ʠrZ޵~~A/ V׵$r'eG-m`lo4%\r A2m57MMFH+ʽZ.#.*M% ғ=\o|XDEdJe+dFFUX]w7R:z?O`"RhTҷ7ZiT-#l .9ꃞsZi jEc%1)Cv|=O8s`0eMK{wDWJҲHx #֫JʥjKChCǻH\͑~麔1A3wK t܈0GJ%@&|:W܂.Hm0D?Wɓ`f "rܑ ޡJIEڵF mQۛG+oI^F]0}y\c'b%"Y'WD!-r߽~;=>|0tjmk2nйĺkTH[> urpi v>w %~j[ܢ!z3ZG$}wywRig `:tZbʺ=ZH-$\.ѴFC DB= *M Et?7`$^{Y+#=J/GeK(@ [*dXݯ!!u!3qvWFb_4Wq2-+9?IޘEm'M>  I9?Xktu=TD|y_a=q'=Im~еZc3c2FI=s@Y-!gI,50@Nn}ǝN&pGތ}dt4Vc:NW肯cR(AGr6h'ɉb}-n1|{xy@__ ։ni7ȼT̆ :n+]Э$JEKZl=\] BBze, 1$YmvQIȏ@? ykx|iqqH0fC'LCD_;JKW琐4EA&Vրl\e?D$ͺVcP~_y>ۜ >G]}ȖPzLĕ13U9 I8eۛ2 VgQ>j~E.0Xp7eJL]a,$6Jn}~V+]U 0(S5u*C[QT BVv@lyDQBǃfa.XgK ) x(ND>g{EUmyf>JBcZ|TԸP6 &EkL{vϐ>΢߄}[re_!-9(8 sM>(6Owi9>$úݹi?t0QD,]|8qMLrsƓ[ׂH]M1)\@@PNh]%]aWHoR&y%սCΤ u fc~meu(I_[rD';?V=R4K7i-{qfwLۿōj%bI+yu&//\45c 30*t+Wan,sHY{2f:WOn%k#nkc:)}[#d7 l9ZkS~.fOs&DcR uTz7!HewuaUPD⫝̸G_>GG&ƅVw]nM`'f T 6v>l9`J]i1y'aK-ScmHzO7Vk6 .^u8Q)aqNz{;4$h|f@p4= =\$bZs j>h<39fsۓY׌2MYIX p$qJvoBaj2Tl e$6io,EʽR=n;ɗQQu_ꠛz8)HKK]UfqĝZ i5BOH0@ۧ?Ꮾp|,<V$abd *22pm3TN[Usha\IZ[+w3:Z{tb=/(InL֐N+;Ij:%/qL-cͳY.--WL>vp`o|zPj~WQ 9{ sϱB'5@Rշi;8b$.)8pϤ25>riﯮqeʷ7^&17JOYR#U5Qd\7*|$jmj{?YY6W^<(<&VށSF͉92iP͋]6?4G}ޏqʑݺ"wX^g!3garj¢o3+:8>3Tj>pƅlZ,Up꼏PXp*^zyyJ~^PM`S֫UWsȃKҝkj` %VYcTpR'J3{TM$xCD<2Y|xrMAޓOO,ݽL#mֈZ90r C7zBs?xĈ,]U?uc^5J9}iQ#{ۻKW⍓'q+E_Q`BHޱDO+ YYlOo%& x⡱?J V/O@sqY(MuE'ΙS,u73>"^RIΫqdosL2MGդgLѬU/=̺(=Tʩx( |0UEp>DgՓ3kD/+ [k ^ꮧ4\>启GSkJul0 Ë/9VҔ~Nv+[{痮6oUU9,A9)CS4zgQ0l*(J0NNNջ(\nX`KEUXdOס9ނqNu8YjBjm cnPa˴ۑOmE$[,|J!(%5/x;^guliIW<{j{GhOw:祴/dz]:!K'~T-1+v0e rAh*6jTqsBf[<}x7ƒCŗM-OFBT+}-5 ^Qt@`Μu2e'~Vk Yx8;?%( IuS4&I |L=DSmtrG;-znz2j̰㪶qKA 3p!(6%Gg |m&V @Zy"SvWOA!8/N-@EOQՕwB//F6k-9Lh1%U'Г~ oy)jM0Xu9k"My]_0Z(ޒ^חxac;yYjQs濶:º6R=?GEJ1u-VY{>UUqϑϡfrě+T:Lb䛈z矛{e'p ӝ+M mjQ)Wc0bj _#vjF`q Eu_Tx;lwDg[  RZlOh>/8̽3;֦N"|Rbs谇{xL{l0vhwÎZ)mAzYڱv,fE5 V|;xl-ޑmQ-Pb F r)2!5< 0Iݕ@vl}C%LERGӊ"NW ?ڋsWqZ7 lҹ6*(-wf{ x`낷.`-B47]󜸘[_`ؖqIQVmS6PiLq'79(`C6Bk SH`!uDW|NO2N,6v_?$<4{&X(Nݹ[U8IwN;D=YLR@$T{8%~0C"=/r9@fCD@esW'6%;oU O9g—ɤUr]kRyڎ'D丵_ iL>RQ[fH!k&73ߎUawo^$@n4^W/`34-*r>hʘc.k ya#mh`Haӆ}>0IǹrSmG}ܟCi|U%#v"Y#MN>,CIS 7Ӹ/t5p\jb)t˦^UCM]xfeuY7:3ȚmBV=p쮢?(/TAwmHLbN`e ^r:j;K=AEwqMΏMG홎+Y3=Q?r S7bśG3(4 KB=Ivr"tX)dFDmEFzc#T[;u{sKvtHm~곘ZZұs`4eB,݌FlUUk;o:jAEA/|*[XW' hf h[F]?鶦`@դLl(2i]S9ߚ|vUιfևBnQɥ@5r^Iyqr)}]zP_.vy[;ߍ9 R&1\W-(@B+ |^#Y6@_r~,F_=q .F|XAFOwnINW\@7@{{x*EZrh93*Vנ.*w/>$Xs\1RK}@}~קfxw@vki_DMtKq_W 5wGɉ<s1Sn9-u ם{m]nIul_O>zKڀFJP ݒNs`eױ?ZEGyh[Wqu|Ml\7v.9vĆ2+)3Scㅯq,zQaV_NJeBؕ]Haoy%uoi,*}G-1 1[gltRtPSצ]¶}wXu,%Z$Xir3;U{$&]hE\PslƙoY3H^4;j.X`FU*tQln(MSaycV3?M30%S> ?]no[2dݦris''q#?IPZaaV/$E}e8Wg֊ѣ$\J,v$Vo>OUӧ|ۢdUmGZa&043P+\%!Lye*qʤQYY2TVN;Biz("Y*pޒX\nc>Yw}Ҷ ޶q7ѲyۃtE-Sa>6oNA{J'_,zW8i_< Px''^=Ý\TJxÖ3*^S&`7{+/C1̵8 ʟm`ed-*/mHzPPtN9$KA=fJ Y2|I> stream xڍtT\6t"KtKwDz,.,KwwJ)]4HH (-)!- !R?}g;3sg>3 Z\26pk"r^^~n^^>\ff=( pab`! M%aN ?($I#  @2]P;{=@QQa_g  =F0 C!H`}dDxzzrݸ;I6N'iЁAϒ gҸqzP?]-n NP0v tU.d?3Ý]@0o(` u4ո^HNfrr<@P'-W 6t[ ҍ Fan۬;;C`H7ܟCm߽y~?# d ,݅GuuܚpAA^Q>a!!@ 4wޖBnp}@'.kw[3#^S[??3U 7jkp./PVp aa>i #Fo6cio t3^A^_.?*);9Y  u͸;v /ClEUq9H" bEPvAnПv;`` v+_v~} 9h|Bƽ}ۓ x;6_Bp[my[8x #v "v??BcA_n <.[3?¿_>DDDM^|=Z)9tg|GHMn@7vi]w\ą_ tN󧕍/WX]jk(]OA%? /~QLs*7ncfI(K0*ʆqE*4ҵOiAބWh7K͢N - ]nTx){.H5Q*kĊ2I-= z_p+U>Q48:}ދ:nE+0P4׍43<>fqGvuT84kq2)iԾ h(ԗb3@뾯9wM `Sns.ul0e\śrİѯJX|rA) zt{mVd.l>!:xEr8#Q6Gڪѝ=bF=!$Hכ2r+epc6:Æj:8h SH3]FW̟_bȤMEhanM3ߐTiQ4nĐ] ݜ˄ێ Q8Qi @y=☻,Om+Wz|6qӐ7-!;~=1{TfkҼ"* <3 RfQ#4{olVr=6@/Edzivi΄)G4Lw^|Ä"3"{~Y=ɒ)Mcq0/P\WcE7^"qY93cYZED'|" 0oeAs + +"G&('nmZ:Q~fPUv 惀Z _gsm!BЀlu Q?U% cOxrs2 ho5Ƽ})A& O(Z[^j4QH)O3py3k gm~Zd<BdD\h$E6j?'O!cju;~qHΰAeϺZF`.9|6܎5kb]8R޶ˤ&((6-魼{mE96nZselvґ!hJ>'iAk-8'R8u&mvzͲo /3ފ5>.:sŦ3ܳ;m ԓt}VUM\nJᐢZYC!2o$}ͅ[ovlVv}W@j$? Uw!ȇqqR4wTy,\v RBc9XD]LX=;a } `0Ԟ?)?C]QަUh輟cV%qB* ԱtNq',sDICC80 46~<-ԿEҀv'eBkɹ+UT/MmQ}n=71B%A4EܥV(# ,A66; űOqͭ,(^EPͶF=3ق2S~˲  6XU2.{[x>IHaկw9vTYˤeT'뀟mr·ᄫdͪh,2DcnOKx޼Q]} ﻡbzTn(![8xZ@P—Ծ~RMjJq$(:n0^IPkc:-)JSTg}Ͻ,rdF'o*7c#StoLBtw{VrY!:V9+D~+-6e -,W$vcX|㾠i9^xun{&ԛԣ.[%G#iPaTA͆#{jےut}nJcl:|+%l%0^RM;(\DH?bj\`8IL3ײOԘs 0&ۯUOhKH݃{li443ֹpgYrqrge-0<2RjuiBBuHQA!^'zr#[{W^XqSX1̛\ӤHLszBJ?!<4G\Z)'(O$Zc3\fS=1͠5vp /1Z"Q|)aƈk֨#%@cM,u7M`xy˼b:Z5:e:h)S;"ɜd|SrBE9lڇ-R?9}~{ulu#D-)S;@f7P-/>LNe t))]VnL>"O$g|K[4>_jl?#Kf%.\3p1ɼNA̼t鬭ޓ W->6g<"}c>ܟjNȠ"593>B]Ng\9J,\Rp/3@fq'Gt!1I.b1!#5.\¤!h*NEHB(Fbj`d~XM= {AND?aO";M;-3tsi++ 2qatxb*.,s"ط SsN1<ܰ AVh9MhOl{Aٵ̪w,}=r~g洏Q  cR3Dǎྮ7Dٌ^aa3,I3#xz>`K՘V?vD;gp ZyQq̐&'v YG MxȐJTF]<5S=& jK BRB5pMZݑy5bppH|bF) )$ęfMY&[b%<GX%\hO|w,Dcst 1wW{Qݭ4gS/ȅ&:W֭{ s~0W9gћ/}r}<ԹKRg {'{3h\@C>EXAP`r~А)f!b4^tO_>fiƬhU@ッ6vdYH`ϕP6Sչ7z@˓rcQc˂eL;OPm[gIz<`!NqJ.jyT9C1pKIWX߼h0yu^f536o ŭ{xDIRZ~.b**gs+K%7-3U]EDT/y;f#ծտf,S%ك><p{L{FAJ5q57֪x&-N=\8}p[8=Sh(C:?]k,TG;(BR^˾j5B no>մjix>;:vH5=e2(ɒY*y*Gmo-uɝӑd׆Uw=(6۽~ p%N*kp& W Ѓgf SN] v+(N7oGz01Q!&aiuyP"U|9J73)`Tu}8oQ*A!9+Y1L`+ 8_L84j;2a±h ЗAzp0G} .=nh,uͳ3bqm^g1nq\le` )$KY4eHZi@rՏqע$%1m<ޕ(s L zqv.46D:z1!-?1.zɢhˆZa 5.dgOsNK"iLJitɤ/_i+3Ӱ[ewZ~ %ڕ^`=Ra[ -~}=ʼjnEy8c F2Kw: {aCMWd-2//Y'"!*8P@tb,sy>k?8$}1-*183vIw]#wD)]\ZA-87L'@ׯ ) H|ȥ_sPb&O߳O L*!;s6/_O$s׸Fs_i( H" 7ާN _6b:ύfk:Y|=qp%g"o(wIU]V4B^?V|% R/EwYuv"FMC7$:dsbi֓k4CiΪE#RbgxEClRB\_J^y9ڠd`8]I^E|QYSUzngWrTȹRͅ*4wR918O785-P#nXƌXiKξꔘfo ~EySIiK' j=D}ulMmuxvp%-}4-sä endstream endobj 567 0 obj << /Length1 2041 /Length2 13890 /Length3 0 /Length 15147 /Filter /FlateDecode >> stream xڍP #=!4ƝI]Gf꽢 zm;{iHhl 6 ZzN=T d Th`fcG=PfX$- LVN6Nzz#==m9NfFZ5Tv> ) lf})DC}K?%ȹMA [N::gggZ}+Z{^ jt;)dP'(9P19oK3C[v:@IB g ;Xj?02?ٿ Yohhceojfm06DiA. j@}K|}'}3K}Z (@`f#2oc6Z'lf4|+?kamldlfmd-#PB76 B.tPvdm~nkc 0~43www@@O?Ff of\oc'7X[IKiH6.w&F # =6~6 y.@ҵY ~1_)doG[Y&]Gؼ- U2@#3G鿭ɛii9@o48~b޲m -ޞ7mwx[9_ߖ61},}{{}WC,w55n -`lcYYtM#V࿈ @'/b EJ|E:Nm)-mMkx5?:"A{tFLoQy!ogt&HXM~?oJ7魰mY[Cߗ_NoіxQ/d|;񏱽1;ھͿs`z55#㍗?0w03soSuw2?km8&???_mhoޥ7M l ̫Z*pivƸRit`A}"SqJKRŢ :vwYkW574&1s73xs(`_Ւ 8 Q;LHQ<>W,;IʶwVfHcqT5 *\c}i,F,~\104֋+sNBfP]]t@Q_:E]6yB'^qñ=> `}NnrV0!dojOxZO| @)ץɍ'Gd rj8c:ҶH' ϕIh"TI,3Wng5SL[yp411!9 kܸ1|jo$[|Q⑖ጁGajUG˦Mg[E d9If+gE&JAvo96weQL=i>,G!+ EG l\7eSLWΜ re]3\e j~<S[T#&<خLп<" KGzfx!R6-K9rflF"rImrBd b}vo.g 3CM2zju06;)J Ƞ=۾Ltp-T6:u#5nIjf #QAJQL9:qinMÁ̺[ޯ<=0QM.MU3d?fBϋ7z&hY#ս"FMآЗYOQgpn5%n6j $=Tz,~H=cki:/Z圅ASEMN`t`<%\h>>^I#g@N0hwI31ϥ(`dEe;%iDWr:ͨ*́3t9EBV6;c_m1M1yއ8 ?*\!BiLlw"1qU 5  !BNI_>xRɣc5ghT1<>>, _82p5&^ U%+!YueBAF>>ŲGAچf7W;V=XIJkuy ڔVav=ml]\9Y/Su pU4_ ճ4GG1xEf,}:cĪ$Wޤod-^\{*OV2l< کۻnMd8 b* uAt\n $}T -Y]6r\᫝=j(I -+4\m\&O]rR \(1T$ q-s41VFS|'KPjc@ȼ` m(*G+.#ТBMu$/NgpSr뭌e I;bG S'\yo*|uCBJ27fgHKh!;;Lz2I.=.DևF_pt$%Hꛓz40?JjbyϡBrP'exȝr )QC1D"\ ZmI7Zr!TbޟubXOQb<(s,4Aə,%uETb!Cj=wMMBg2^1+W%J?OIyL]m%{,Ճ`Uy:$Zw5^fr^lUeTՊoh>hЀSS*˪2g/bṕXA uz-ia``G_Rft"J |CQMp?>iz> ~]Fa$ꥹeɢHkW"E4 ǿ }ڒvˡP7H!b81~|pY|=Қɫ0 i,Z[,7[ʭWt>w~i|KMք,쌴>/?EV_xfp[Ψ]x; z2v&c#wdxىʨu.)+j̀v9'&ؗh樂Z WKC;$e>=@*5<♬@J~þFsy7xn!=A*e ػ Yp٣=ٰ2>*7c[6toK- UD8NT< bs- cnIBObA,q;Fi%=ɿwRڍ4,ӫSbpHaQdn0{y"Jr%w%ԼbK]yuDGq867oAЂj6xdwOaY8PUYг#wmZ~?^>*m60mg+2g>BW5A\Y@!D%_ԖƞGCCEY-K\p.qcv#y6wgW$9Qgþ&3_$?Z3Uq005J-NfS`A3&+bǡ]7ޡ"9kK4#E.N$4|TKLF=/|rPGC]Z̯+!(W_/h`|)+ ,\m|Ն_oˇ4 :XgGuѣ[bhc?d.8I |pY㲚~.%{m MbBzYz?[簒D[l*Ѽ Wv| _JzBg*b3jJˉr/A b$5à=/"8B`)0ϔZm׭ HX?v}~YejbHcEIBI,VƊ _BT _M~AfP7-6z6\$LEx BqHͧ~'錫埐ߥiU&:>6(Zl@ƼGR7[2Ѫ.?G~6yh# H3DNCY̬Ѥg2Cޟb#ޭTC e90"%_y^'f$J C: 5غf"qR8HH%?1jxX_عgt@V(Pk$?*4f8fR/=ҮQ{#U)@_W0eːt{rMOK镐GjO~W 7=*4^Ds>eΖ视R;:J{@ѭBY {@M "=HEbluV\(xPFb'A˾v͈D[v̬I ==<B $$S(@L.~m>^0jG j<{?h*Z"ŭ=qYҰ7Zξ;ař@a@ne)5}^,V.ȧ EDMOqJ9Zp=T)|δҟN[JM uC5)Klc0{T6t|c釂!}c"0mpBEŃNlۍMޖ,$(lp}$LOC@ǔk `*v9uTD\\6D.Ku~sTw>xg)9 Z:(<.]%*P-ѝ_Y`vZ"b4];`Ծ~3Mp@@)1whT%f'uo;9G љdCw ApAHE5AV&>{} ^l1˒l$Uodf˶c8=@sMIlݳR0ub JunT:-iK$~ ATSDz)LkdP^VH*8Rm \m|K5`nh;!]mT)N| Yy ])4Ҩkߡey AdJ?䗉Q}Z8i$Uf+a2aNT{W7rWJ P.T<>zˆQnfgۯѢ4=5hn6,+%5F"Cd=~ ywM|4[KJzB`~lv@<n|[[PuT%HYMN/oꜙV΅%e"N;t؄]J ]wȰZ+d֎9w{Evʥ~Kdja-:i#ވ`)t$GYS%YNM|_U[؞Q4 |\hI cEH/)Pwf#H-$baI!4n £evRrF17rnx%ĴuCűKOA>vΏB(YAa 9+F-]f&ERoTQTievd+жxiŨ4)D车4k\`g=Ig!" Mgwԏ]$/cIfMYXd_E{V9QH>  sR@cm˸?J'?u| ?cfs!?nfE(goģ}Ă0l+F[Z!^$qHKdS<"].kY"G$%uWÜ}}<|6~ ӏ Ty]mEw01 sKp;d+7=ȿu 8-\VE?Jn{%Z>L9I Q֤󂿍Dl#Qaqm)h>T_uPzYt 7MEA|pL5O^ar _‘O>S>lsJ¨ *t6Ai9Q4%8dJkU{L `^I%)8 <=01R4e`|p^T*)D n gt&a ۼ `JZNrf]uG1ziEz X`孾0y@(ӈ0޹`?B$jwLxrp(ݝTZa^NˊM>+?0L.~NDenP=ϛtSQZX~@F$tgGJV$Tia :MV]6NBFEe!o<;X0S)@x=V< >Ju(Ӏ뇻/Le Ѩj i$}^_D<|-Q{m2Ox՛k";ٜLlySQw<!!o49P6\kQOX?yߨ! #ۂB| 2BaCn405ᑁ)Wj ib]m$'X%iqei*9~t.C(I?סL|\E^{e)Vvl`FNtݍopU}>1pvb-Y!Rev+%6-J㌭B^ < &\ =031!14h6&ѭN+^6A38+jΓd*NWYr`έh\UhJ#~\ĒҖZ#>ǩe0|z*Vxgrk->;CsNIVTAf@ ƶ RRĬH#jӭO,^P*KfѴ@{LIznT3ߨFMIb!`X^dUڜ1r90XYlL! {Vdh/Fx; ZĉuР (L~: ?ęUh&!ii=|e}=PO.,闱[Ozm#.7)Tʙ+kfwy.]mV\L՚"b f+P'[;T8U;~7B("&S-w?YDBy>:3y4\;Xט^ƍmY!,d~C'<bȹJ$8J2_27`+l]cU3ib,bi~b: <WQ<+dq; zAF}:yՖ"L>Q?V>!תSnD˩CP1w4#ue+ܟS%[}BcwW3Qq1 Ù?;٥.rmZa=d!%'9=j)Tw{IPnX&"ـ]ҍ=IH{W5#GN$U#{kCZ;UOB3۞A+cY oW=:juy_j;y EM+I R/LYڏH*Jcn|WJU~$c[E~9ē2VqgeT(!1G+V1»3|CKr:ф->cŁL<Vj D;t\Ɠ5ȷ݅\1]{`з<O-6a+[eir3kkpي|lf ˏW* a/ *)hl>37BL0ClT^M9\P]l;h&t_'-3;td%k%W+Ia:\ 1b8ν|F)+æV kXAy~+5A.CBsb}D׫Y&1;1`&?(:c % `̲7p]!":|}U&9mTw|t\Up4tL).<|8OU%=1 ׵vTщ9} .^(xA%"^i(@vsvo>$tQ\.' MƖ^FOd_^f)%0f3 b1Gii]`9>..7D'Q`,Glj/mOuhR.6 Yႎ2cl[E'\yS$/Ev-x;ɟ!m8W;j{/,޿|`]? hE0R~՛q8C5Hl>/ 6AfMժpe! 0΀G]7ڒ-GP4FpH$`UNB䴴Kk4_A%gЌyǭZh]XgYII]ՊuH _P^5d2!83v>D1Q:o/ y F@,ݲ:R8E.B* ңUn~E%BܼFg-!šSg=bG빣-#Z֚3B_ZV_S%Z#ᗺ߿J:咭2(۬1j8Ǔ>,KYJ<RIcza[t@ =KqJC38' 9K|S~)+b˝_4cB"C殼mGe] kQw f4rbVn_^-p]Gz}W@@PrW\Dii.CyM^-:Q;E>2DHAkեLİ\!_Dye< d[ Jpj dO.Az9Šмgtri#)x ߵIEvyvX+zyҽ``r oy %wٔ^ۺw8&eAf>^<}\2f(eb|0ֹ/yT@WDX6tn%+e,B?v!ToהES(G2!lE"bJVoW }1; b@e- G.}')B-ȕP1#V%y25?LVԿ«(b +-4Y`(+&].k{^Uz0S186ԫ*Ayz' Pʟn7Tv 7| ᬅzuę? x9̄2UVW {E~!QChwǧ^"SǞmxl.#bh,ӸZlQwajL tJ ipۋ,HszKpj:r֠_b.RqpnHL$Kj'+/Nqt2o8W;t^{ *F_:ӟta#E͢oeԍaKtouZE'X5pX(>tf;u1fȖ&LVyeze~¶H$]Gb\,=> /SXuEqW9!0㑕uܒrwe華!qeߎH:}fD"(hJ/kӉ F܎&zkU/ q ?mڌNہxj ZH&szX=M?EH} ô6O\1G:gyӚy<ȁ,k0pC Ԕ+;ԗ+V}Q 6?hW|_7k5fVVܩ &we̘`5P/HSs6* Щ.N{VzjCArTO~%>Ieu-G~IepV0TclQʡJ~ewl4d܃|&CgUg?!\L4= 2DQ!#pgp"5"+o"EMֲD ai"j7Bdb4.4Qk:iƿT=k=X|P4we<{á< @씤S)V% e6T =g !Z0Z +idptttq4F0{m|}J{ę.[o. &i1w^{Wz%)#;U<{]7xGs:h1](f%6&xsJeƽjt>(گrh@|q~֤7*lFO@'x:(,wcjB3e[ M?bKS)ڣlr[YWuΏj&UE$3;7/r]WW5l66|_Z̕ "RWQ--Igɕ"K5w"؈& Pu._7f[(uH?7)#$P,He RbkJg?TJK ĆH!R)2mwoJ)Z2Ip|4N#wlksy,'b% ŁH.gF=Μp,\SZ}y00_t?Ϝ!ѓ;y|^]a 65a$fU5eEYE2'\cPu- /pkfC\,*git@۱OJT' M @ Fd<d?*oyWȅ 5+3| @| &r"㧔=%W C n}jƥ% qBkv꿓b^*gLG V&p•wؔ>]XhYp;6#|@(K+St+\1o(#O9=yE< 4/q&L_"kX$.xS'|$g3_1-:&& ջNdn#119G%OB?W4t7ɚK H>8&i ώl> :}^0Sg`'56-p4D(.(B*Ed{FFQL*uJ҅FF9)Gvl";YYaɎSq8Sߠ_-Zr”{lg8p^ crȱWL?%# |)Mn2&<#uc͂E endstream endobj 569 0 obj << /Length1 1400 /Length2 6357 /Length3 0 /Length 7316 /Filter /FlateDecode >> stream xڍtT. JJ ]Cwww 0C )"-4!]JI8޵]71sm Jp$T@ . e2{@0wHia@5Og /?WHWX @ŀ `/PA<B~y]! P{"=!;O hEm P#?6rP9=^ oHzaξ/,ǟ\| $]DȗVCvO*yM/X bZp$k!@$E>x;_U_$<Y0$')M8RCVbt*,If.^n?C Ӂ"lPԜ0ۂ+ԗ)65!H9E-`ww/.rHKϋ7<08DZ3/o_(cl=ݑ}fٿ@lqg&aOj>\TRysI`f^q Z G/|1ViW'-9W Sj|θ蔶l&PSϩ& QhJܻPthb>Xj:SI(  2jQ=DM 8 lu)ޕDK=}s;iby=@WV4'-~oL,kUȖ~QuvkIgHC9iv^B_`rdH'i蕀 MxmF=ʦN͏/`E%4h+߲OΪ}_XYp~L[fX$do{l.ɔ#0كx E_c::˦S}Y u0^zŮO=g 5>*曫ZVhŸŌo4}+jqrDl$ G%AGWXF?Q,iɇOk%'*εջpS>';n_b3 1ve>PRF;Q՞$}UwMŵtt68?Y r[(*/+ΜY36m쏜E*YBpəV⽭W[!^,X9dnG-:k mqNA|Acww="~qT[7)VY6 6YFȢȞ)13.z!babYOf"L$G +% v+\t)VhҶj*Yw m-Mܕ)4de61m&_Q- "AFc1)o+爽aE_bJ] gŨһ/Щ γ~jvO^ e}p8p,aSQQlWiOcK7n`Tb&#ez(jd(`\ۢQ<^Nɇ)`* 0w",Zz{Pz.JcDD[^}ዳۍZ!y-Sw\Ǘ()Ho:^D 3:Ey{PԢ(gu*݉ѬWGV(MtoFZ5哿 ^aP%2DKb&E£2eBD+GpVjgA ^RqYVswWcRùgZl~;gYQ&6ZmV_'ԶԖMӷ0r2=:"?|:8`oX?P"5} k7{}ÿ\NZ[6O)n}&pi5Ba8r;u,vpjj2QY@4Q%P[u^Wq0iC%.s}9ex?u#\̼}}q,k,ȯZ|ZVl{ArC}}78(9Fcޜ@nTe 蚴 ԗnØI{&ݖ)s[ 0JE&\^HcPRx8h{ŻSQ9,)Qd]FAa,Ro(7jP7;pCnOeKC58XE(w\F#[{YZҮSN\6S۫6ڼI3⑛.մM' ~)1@~i5va]}_oyс(M/3 oFj(~foLHL= uiJ1qoM^7H8mDhTˍۛ%0r@/G uZWDSE~#f7J")'~tLF͒Kt#՟;r5Hd7 [& - h3 ps5 2ż̘Ōjog!fz)WeK\Qg~o@:E3%Drϋl>6T~To3|:˂mS\{L:)ߴnWߘTkҿ1:Ɔx'L^\1嫚ºsWJZst3װW$-&iEF봖@Rn-nO=Ә勬EѾ1ۉTH)??.#XRnѕRiw#LJ~c_( ,/};ȌTt4-Ro% ݲ:Q-#_t폝EuyPvkffx:78_v|VfF&縹~aQfB^ȤB3ROp#V!!Rm oNX)xۼ=Aq;cnrY*>Ϡ “Km&1.F>Ԛ}~OO+cys?֡sԩ{̤i)~Ƨ9ѹ.Kڢ^//#;J6Dwo.'b3vLG-rp\6fgI|e#EȪwÔQz'B[O9~‰[v%S$6d޵ Z3 ";ʮA ~.Ō\: o;{/kM^o<'?ϠyzCc1?wڗAi#zÔ5҂ӓdB|m_.]ؠ%**p ؃ Nc&.Aq SS}T%O6L;Z!Er.v;ǹunj8q\0e4'6P7._N-@7ϖ^X$xq렮-5jri/-}0־ӆyb^ UWӐ< 'O6DFXLm ؆uY ETRe{a1D z|M˦C^aF3SMyt(cɂ,VNB&]YBv|=^̛I+]/=}uƮ<3>aBci[՘hx]pL ۴BI'_#%q8AB-c; ,N^L.܏AЕk qs:اs=B҂"2zg_^n0z~O-;"ŽgM6ܾLpIyt0$H$hhUca濞CGJ}Wb=ṇwrS>Q<:vUbz YǫG_xy>؆VI @ رSϳE&5텕[Sl{GKc?jWyֳ3Gz:C0 4Bi-+fSp:: Mpc8t߆8ɉQQVW‹ƝPyoBrXwudW wS):S#~=d0T.X\8"o&̃xHi{7W>o0>a`oQWTןAgR6G#p ǔt)W}J?ye⎱D<ڔD^)i_%eEc*1A?!_3!i+CGϾvpMJ+P9'~/юtwح JX?Imϭdt*/uzLvl4j*SIN~}IMH[3@y7ӵ?9GtHQRRn:6^"L1rj̪C+iHksBe uV&xg;W՚{<-^@l{?ew:oq 7|5sN®Y}o D? G:3NcoͥSxʹ9rcbaxOQp쬏=^)J>عb}\i=1[c2~.p$8fj/? b]XSuRH}!L,< 2,g|W5~D`4WJ`!](!b#:J9]陬(aVik}:WT[3j"۲;:@Vna iZHc$cpS?xj(b!Ԝ4#aɧC|^;IA[m0Aеw'8j8)oG!iUN[80 endstream endobj 571 0 obj << /Length1 1385 /Length2 6143 /Length3 0 /Length 7086 /Filter /FlateDecode >> stream xڍt4oRZ{UԊ=bD)jSDHإ{5FPVmU(Jk*6-O<9׾|n^N=Ca ea1<@UGGSJ似FvCA `:5遼Bܝb1iy1y  A<0@ \;{ ̿~@LNNFw8 G#$@;+B!NCx#="/*)qvA=G{a_t!?n([' N(醏pGh8PSpAnb"bN7W"w0 E9@k`0BC< ' w;p!~(+ Ց0U3q#՟ [f(O_"jD5U1) {AE7v6V'uAlCp~O\L C@1Ix5_>'/ 1SiLo ),.Kdfу vO&M/Cd( /_PG O65x:u$;q)ixWHl3lQh_k/Gyh4o8KMn8 %BA[N^X|0s kzv*!$7f S"O G|_%ǑiRrg^2W¢Ad`໢+t 8@zU4-leuSkY"˾}һñH -"(I8Kw\tHDce1BV9FPY; u-W4_ cuj|+8'S-ߋlҋiypKsǧ5l HNnȬh9QmL/5¼ 6d>E #8F@k`=rC$HsJ}o@NլlYdcBK֭3:}1 }WGKyS-yE MR)urޛ)':GWDaGN}Fs.ҝ&̆IWzF=JOsRwDѯBNN?u/ %5hӔI]]C|U`ha8(Ӧvv&Yna揌{L=uz< 627#˨%qC!X|,,//H铷W}TY׿2GP:7c?hYѓ2KǼ$w*[sS0f|Rc=a$Sd k$鲩ܺIpOQl<(qYl>{0yжUsGY&iCJzƺ&a\>2WBo;w J]JjdXXc _&/ן46(<: 6++P#dP׵y?^'-3+=("Hq)Ka y+s)ƾmЂ=+W鈚︪äVjC>x¬K6 ? b6bWٗ=~旼-ew:j9-(C:A_J9jc3w4&k^ ^:O؛89!,`W1'L{R;3Dk|nyKiL(X-g>,azg7JSXhEyzD=i"2(^cry{QF܉Q50N MyK_v.R w8l-aGj2փխM82rٌhdOcYSo̵.?ْ~!1{Y^:G1% Bz(Fgq5G߄ہg.Ջ$fݏ2J[^`MԔZ$s͉?+JH0,Aݓ 2Eu6(yQ5]~Rߐ̱֥rHq|qUmU??)(jhq<ݘ5N ?rnn*C|JۛX>^N8hj"cb~#>",PK\67G|NVVXiPf:xQ0>L8'hbr-6:h_$ fZ1otbGJ3.@+sis|xF,nTʬS՗Z݌ ,ljg,*yq^}R1FU=hԹk8sl5œ+NeS1H6&f1QH*_Z2#'V(wr2ɬK;JZ7sSfY gsSqő;-NiwNtorpˏ{s[Dr?'߻ET6hȗ[~k٭hjFIU3648UnYq,EM*>+JR P 'R?#4OG\.=c$ MOh=éYԕvp:rGٮHejFwc ;̗$B;7@] dH9qEt/g-8` Ӯ<2CoJ3PJny-]_=Z񙟴nj$(xQ W`S(yTc}i4!Ws_ryfr*}m0RNI$:KWY)m媃АpC(}[Jχn9Sʎ3fچ̃⩶Zsd(v:2~ػ.\A sk}3|I_z*> stream xڍtT6)"]2HЩt"0  1C -!t7H4"tHq5k콟ϳyaz)i0!HNP y@<&&m׍Ǥ up!" #Q>0SAN6/$ <@ fP("PG<&i_K+ : `8@ڢvmZ tbH7 ֑ `CZ4PgWU-Og\xLmKvP wDe8͠-e{6f*NC [;0 l59e.+m|3f6E~3$5`Ts8쐎\0_-r*:eY4 G:'sBPf_73ՄfU AY@~ jB,v~Qxy!&^0s(  ^;0AS OujF]J^f?˭.+%ǤNAA?aYIU# cc`;l,@ `G@~ KwMῪD߄ll~Y0f5*3*P3G` H-Pbqar0W: #?~_fC_o * j ֨?!#jؐ ECf掇_vpᡮe<@5V6 @R_ Bk mmv 7, N(Ń/SB!xhUU@Y$ ڰ~ҙ>p.[vx)F+-uFHn $gdզ*emcƍNnӂ4z}<' a=Z^#|n4uwYtk1zTuNź0 *6W*.k"L{O(X2mW&Wk=Njq);GE1Wo1I$+K#ÇS;Epsb)yclk)z;XN٢>V;EW5v&־WW)S`i*=Hs$ )*&,ӈk]A"'NҹD>./[lA%gluxE|ynX΋m3g5^O(B2=rHfL51 iVNӬgsh]ybʏnBN7u(ky'V?IJ}mjQb]ڐV§S9~$j1u7RXRp >]L8!C=QO9b]qq°GAC@aXgwG}A:u/]Ļb 1 mrK%|)= hK ޔ)̅Wx/XAR̈o"dzGwzv7H?T[TYsq1L^ F3@JŌq>Lsvm˃qI;S70_Di k5d;\zY{$%·{Y8kJ `-(Q0O1Era4&sK)b0p;L#w\b c/35P4At~KvhGS}#}BhbE+]^uoO|ɼ˞?#RQg؂ ̄>>2/YDyo*)sLy+pܥ]chChh5Δ:| Nf7/ zS^gƂR!Wa Li.T70~hnL}9{Yc0P@v j%9Vaෳ b$h^̠wIs}2]*ՈN sjjcjY_Ξ#tc`B&$b=AAZ=V#\Jo~]28}oXKk R~piq@G\--c߀a{NN<>ϬqV~S@QF/Cr"|3R8/G͖&/(dhR $G*)Fw} R!>~IDpMy[4rNkvLyAMicᅙzO̰ԯKT2VEZ̔q8~ei23QJc3:n|9cVraeU9'w܌Qtt-G%y 7-UId.#oﱏGGn;&X1o O`H?m;m*R@nL"P7GK(CPJiyҥxiLo \1tȦJ,qa~zlB٢4Fs0Y(m`!zK_2Uؒ9-#iR@O]?8$Q(‹db̯gvϙ;cΰӘ;T]E CTvBnJvm:`cp5ĄyJ5o \[bX#J9cݻJhp™fr?ڎ:.[:iRz=lq0BE P؄JKdSfź+'όL1bȲs7sn9؄}Qb,]M%2n: "uJx hf-T/\v&}?5&|ubw49®ҁ҂7j#_6XjtQ<7h3xdΩ~֮O?sZ>ۢ+ت?Xpb LS LW> ]b!:bVh){{#9Xb M YUJ[;]_2;fDW XQ*<]Q瀏htWK>מrQEqWoNzvrM>߻,&H~Ƃn>EfSU|v~z>]i@{o'R?~E35H~N[_5"ܤV;+C||ZDŽ̜UPF#LUwmNYSW ņ/JM dMB pg\WB(_m?=C7Z39@#ZRir{qhu}T<1Kc}\J̩ ,ĀT%a|-ޟ䭖Zg5ҬmmMԫnD=AN0#9%_/E| 3ָ^F kJ_AǔZVf1ey9 ;y ZXIr+{t̹$!equz^,zf3Ӣb 4Y9CgSH\FekjlC\I?W|ʼH*wxB n2Q7NB7H.).' 'r>rI}ز}0(1y->J"llOMܽ_lf=&)rz%z"KoplNG&[Xc,nZ/|M60geC.O׈Xd2g1RDW嵈. FDsf8I&5wCeANI&*gMķ o,unA,;*lH!@O*CR? ë՝2V`ZɊ("iqAȻa\") ى5cHfgՇl侥бԯyˢeOR_V!ޛts gj>[so@RҗУqGNO,r10p+m^;g_wB唓ܘN6([Τ5_±)HZ(N[O&DCUa/k=mPX#iV-FUf8ANn3xzDXĹ`lA*+4pdc=|)PP1z5N!xxUsw(cU59v#2qj&.5E&8GaˑUF7ڞBp6lW{ҏU߭di{dJlt^3ndolx>ZAv3,=Y?LYFzBMC.^Ͷ~c21*zC'b1,VMg9ia.{s-4P"$ϧ@ýJv3~gu  51qבWjLzlj{V+;u, 6">$, ݀5=WT~*\F+2P*}'`m'ˏ/WB&SwBȩUizr^ ]HhkOrT8iףwJfy;0=u__$־r1  `;]s{ͨPGAidFYVrU6 gn ;&#M^M)I|tr:yYDZ8 DZ 2sA5feiR"C5h>a\}'Pݕw>:H(6H% dd\";4ʾ) \k_I 4Α'=e|Mq?T9#{܌[Q#'Nd66_%%߳Rgݿ9p!ݗNKr8NhyYI]d*d;@Jޕ)NC?M^Ex2 ]#u!Q'-%|z`\ٳZ!'aUiy$qT ?Ŏ ͚㈺`g##;V躼+ l”M%@Eh$oU>P4ZCЇ Q3)lHĔg*^Ͼ#51g3N {ZrD%ʇNg%қ Fz2N0dspmx O!\9?G>/c)x;{=ʠ@Uu(@\V'D??~WΟ!!u$ȷBc^XKikEQvJ =p I+ŲL~a8M,! DaE4G"!'NGC{1E+s%[lBr{Oi߬P`l Y?6ORSNX7G57$B~! )G@xks e~l<ϻ CbƳT!:w7liykı7E\V>5?A߯Dv 40Y5,"#*hYNɝ )ױ 7a4'O x-% -[Li?->9c<{YP?C$=xT| ɚl|EL!ql]N}Bo,%#4|8?TMVfix1[jߪO2Wr2:duuP!"k8c:i)ѣhqX{ShJ4;Nebd?P?}3 |GZ>3k2ٵgġ$l|${FVd9vXGct7J+2^)xq$qivnpGXpg 8OiRZn?;馷NV2uI_0>ш Cv0YUo6XLE\D{DZe⡙4U _V\2i )3N ZIf 5爗?xv GFRiwP*ye1HVdOV_K Mx'?sWr?:J¶ ͥ_n"q~cZC~LXyW XT}MTˎλA;;M.9lKkMBBx"pނXp6Q1bA//2ISSX)<+$' {cy_m׃fv=MK]aTgWgfNO7<@h)S`lUeXnGݎń[{q~vOD|:b OOd-wa- /z{_\/fW튘K 7 Cu)s ҧBF,賂ɩ[Է +huVeW&_ˢ >3)> đyvAE#) 5á.6AaC4.y:dlz vfQeRm۝6R8$<'ϙAiՎ95ϱP_M?%y-/+ rZOJf$rnӡTw64e.v&j7ķAze զquTFQ`o(.PM/o%  )5,1/S-yX\q)Y5'|uc%:l͊tSl< ͋=|fs{w=川tsb6n`oWO| [mN]/{VH=5ۧptf]o`iIxKC٪Tj~e{|ni2I/tA9ruDʴ& dO4Aqi>Vf b g{/&": ~6W'*kH.qE{6V#U r'H m`P|L}5*w?\ 7-4t`ex2줣*1oBxB.ϖ 91c5p5 GֹfVG 3RH3Hc>wV>XG9?z 4 endstream endobj 575 0 obj << /Length1 1509 /Length2 7693 /Length3 0 /Length 8709 /Filter /FlateDecode >> stream xڍTk6 J9Cwt -53tH - ݈ % -] H7ysk}ߚy]Ͼ}훅Q#g)#(0/H* xA ~|}G /3>!鈀#@EmP:Ny8XD@b Dx^M^C( |=PnpNȹ<mp&sE la(`t@yH^47Ѓ!a^0[/-+3^|#CyC=`G3<0@ '+W!Gd u]`me ^ |j9, EiB"]~QU}Jp[+ BOf>v_?u#;G/n|pGwO_!h6{ 0_}`_f4@7MhC#^0Oǿ> uAap0?=|G#Z{`ohy"./>~AAK@jw*@n}Ni/Be r3?kwURtqf.UBo& 5ZMzUQP&j ʎ>0[G5G8Ltu@} "ۆ=_ާAZ<~!aꋏ=  6@Sh΁;1 ~_ |Nb>׿!Z=|B!ta7(AP(xzx9V"L"l$ބ7zJbe Y:F- /g+g:N-Z9 Sk|y(o?N95B4I#(MAԣC>O־Bt7+EZDVskMY}:1+z=B?rW\V̞ke2K6'yW,ȗvзņy`D*I) T]z}$V!WgԣQ(HyGev 1v~␌cAbDpP{hz8S}伲^pQʓ쥾5Fl`˲ArXW6`O8CHkME|f a6=l yͱw|--VHD~0n%YcWLPjteq mrED*]eje)xl>8lS 8jJvvyS-KЌQ%tk+[ZGL`PLj`S1 VnLWl*ƪZ^ݱ$~*%1:RvJ2D`::2\x Ԡ ?&D- Mz#Uwa- `;\=b 1r8RѨgf&Y?/&BFnll%b ( 3D,mކo@jϯ_Z(5-wmtQ 6 .>\ AC-[B_]򫇗ׇoد3:ݦz*#~?Ƚ}=)x#9{.iy Kڥ,Mב l7Y\M>Gֻ`-S(`R^/qWU&Bw[uEzAzh h)ѩrpj^h@eRH& kA$Oߦ%39|wDݳÀs^F!Y!yhARSBVOyLJp!Veh%)a0{ [r*^Zh|7iy OF9͍e.ʜ2ape -4V߆lO4}[q/37=£Cu~'͍Ԣcocw\9bKٶFYӉ}yDMViUf 5(dq!Gd,lrgg֗K  tQY6>]{s|POc^8Br>G.\v̯t\,bgcWŅMO+WՄS㶕t&Tlaga?MHs+hI0~;FmvY^UZ8Zּ"=ײe BB õ:iCo$ju˭/ geZqs+$YfS{*u%;Ŧ`'f3͋t>owy8*"`m[ Pih"de>mR*鎏bv&c;I3ؤ''o/dqQGmkocgܢRଙfxz.+4#JҥZT̈́81 iJ!u l8@DGNaJ1Βxoiql6 LS6✺w]_[1C?N>Ql6106ăAmm6g_1;4DGQGM2;Zb`-Fȍ pF`8sbόMqhOJES40-b.LvN➘SduE5Mt.bҊ 55v3I<ir ER1CeM-/"ar3a6oL;^ 3C7AW_` r v =\{BKp[Ӽ>[:g7/ܽ@󽄻q|pCUaϡ߸-][W aSu2\MaiPt/{͹gO`T[2[偽%srR;'86/o JB @f@֒ȃBLceu[xy_786|4‚AnV#Uǥ8j.dl3RGWa26(NH9u߹8hj2bRIMfH}giYn+)^2/ׅ!H}ϑk -~pbe+SV>e2qO2.6*O6jN85EtQ9>M;>N}㺛5W؄D~Wkxw] Z3Ϯ@j"ZXkq+i@73pO)ր~rm;#mp'9-/kfa5,1>̀ =?<3xq%oFT.٫z@q,ɠ;(8f_X 0VL=H+SO=&J{RcVf}>Ҋ1#3\d0d V\Z4-Ԥ7+^l7u1R{ꋁ)5'xoTQ^&H[x>v>,8^E E@x&1Rg?7*~ 2NI 1Y"Y"vʉbi9yfS2f.Ǻ{7* 906A9w[bw}.C8P$H͢yxK#ͷ"i-}ƥ3K)(V`kтFawyKa ;oz_Z*eFGG= J?ra~KJ,$MZ SdgYǹGxX;A!)4>(Ĝ r;Z>[VOc7CLs޿ҽ{W54[hW,! rJA؆ݔc﷖zCSs9>om'Z9ڪ]"1%B눍Ŗ풣β%,HU3++9ǸyR~\%z -k3iR ⫥IlQ٪=䊽G1 vf5GH⫥ A=9fPvkߏn*8ZOo*S9Z6u3}Bo~<~!zC9$R&Ht}usws0UCN*)~тS )׺Ȗ \fEQ.YR!!soZ˂ө'30/ Rd_!K }Ɇ{䃧iւ#[фe鵝[^4TE+[P!cC݂9?NmzeČP9 Փ'͢.7;"1Og<XFVh(zik\$ |oUd1'7&vgAsmyDDaR ,) / _c#7KH3S40R g }q4jh}K] P۽I C XI=5#0X:s1kscGsJ|=u1֏^ :ףu[P/¤:rg \D[c%ĥ1Ĕ{7R@uvmܓk sLÿ<@^>)&fhU%)c/V)(_-LTVg!OK6/هS3Wf'VYdwƫbHOpp~"9AGX3Utb y<ٛ% Tt۝+hH? 3!{]U@6,>av?(ۃwH7tY)I3ݢLdRߌ[?{|1dy$LC b|b#߮XT7ȓ0 sInZ0 WtTѽ06ՙOq.W򽳊T˅M~\m52dq G*T M7w w!GcN^maOג  6Q&;[h]Y^iA%֓i m9n[&'4?>$6}Q3HZ^xİ}Z"5'iX^ݥ8*EbKa[/*H."V?nJje8&O=$B䳼{߈ R h;.ώvj)VW/!4gX, ŗ#,|1KhCÁoG_\y1Ac^ψ\ȗ_=K l|~C.TZDU9+i !x?'sc^3:UYҊ9)i)^frڬnc2uj[BȎ7-_R]VN[w$jQ  SoG,}8~NkT]?fBq)[9   GgˆRyffh Tiz?Oi0nL}!<U=ǍjĻޣ@Y!U [)끤`: Rsͥ͗[(]M ^f< g'M!WHP8r"3&rI*É|"16Hh Aofn5GXm\hL㏁sGg={ ;Gi ^sB2gSG_{vKCdVwM`Rm҄T4t )T 8oBFޝ%Ƿ[U,dS+Co_gԥےbċ%iRh}h{N-PQ[ISнWUO#5nV| =ajt۱&M #XզsнY`0%㺅5h:7込ӵL*|-NopsKS7%>k?myuRIGUMjs eQdzXufiS"lx.]H1#Fi"ѫB3xt2&^3RJ O\Ϟ/w4e0$~bDo^zGJ뉭LGs}XGj}G<{15i2r؆ϻkCJ!&4]3g/N\FŅ՞J9?;X 4DG6a'}-|v-wz[)T2$Ra:MJ;tr!d\aE'b r{c,S]x 1Nw^MW6x 'mg-#Xy ءxsVfWZb)W 2~}1Tver$zSj%yQ/wmGo>^Y5VSIn %l4h^)߀#*zH|B^Y$ƌR3P+S endstream endobj 577 0 obj << /Length1 1362 /Length2 6171 /Length3 0 /Length 7099 /Filter /FlateDecode >> stream xڍwuTTm>]JHIshPbF:aa )SiABZ%%$Ty}o[gsp);`hX$ iA$& "x`O%U թAH=4 bXR,%  @4FP =a6$QEcN^D~(0(Aan،P0AC0/sry 1N /0y0>0GWӀ> wod<3 ``VD@a(O7m], WB~;CP;@9ph {y / C| $ ];P6 jA{y {"{(GU I>5^uE}Q#W#"wPo_ :' `,+;K"(pC ^oXP)#8(GǪa?2` 2 ~=:bIF! =e5ۖ7_V D%@X, HaA dAUjh@O؋fhגƲ6 )_QwwIHo_qC `]=4v!P 5Y`4m^B( ?! 6Ox/ݏTGAюITB`02n#7Ma `Kh ٯzA~hDAo ߳Ɩ{a0?lz+¥:Kef_!QqC%uFU0U^{T]vGkM)$˜26R0X!TVa6u>E7&]9Ef~Kv7n_ǝbQ[q˲|#jPI@oR~cʨ!Ur|:CKg%,At{9 noap&c+ Ӻ'= ooOMsI ӕ)c'}n-o'Zlp_"6fcy&̎cSBZ0#;zLшoѓWW9;4D);GrH!$Un:{ؚ`̀ Ao(&A\?6]l{.VILu>ܮ4\J'7:nX{R ƾaG'*,+u#G;yc Or^67M8Ϯ0.s$aXC0bVYsYV.UAźyO~_MgJ*gѠ5ڷV 'K'Hdq|VdU(hp0$|6U^ $?qhup++k&F}"׭>O!ҽS3^U9NpHR4GBV1BgVI l Q4 0_GDۮIŠ:j ekX7c씧YS[JW-ӫEhԸS ?& (8q-3=yd̽<=jq &d( HH(x:1zoJQ"dBd2tB@˴pvnd .ՎU|sl'NQ,>ܯnF*wnͺnSP&$6 L،L]H4u,56 u婆?="y@'d7DX@Dy:T} AOWHmmwXn00.շ nr`TJ26\9I<ύ$$T'kx,jC2AoU7qwt8h(?f9'5r˺<ئK؅D:zUQ>zg6b"'%1#; AuGy; >%>B1jS,g>yתu~<7SHYO3 כ$+1p0Tzp޽D̷Rok=-y=efo>X9PTL¯*ʵȃAWI/r혟Ҷci(j<ѱOԎT]sꭥv&f "d!e}r~;e;<ƴ Z!B 4c\&64E%b{dg`Ֆܦ6 |D?WhMaZgEtP?|r2LRzfqf}8o=;"[z55BFvfgyKep>#K~ @0wn%Zrt˸iEy|pvi .Xb4f2h[ysު4M,^ ^ٳ{z@Y٥H-"%)m^vT$\u +CtB\|_2-s-s!μ+}gc8<\;(glwJpSn-kgW^)+'^)5 1X-9˕]x_MT:steu|iף'e0^!J'{U>jQT@js8_W+W=-W Ot}\QF<Ԍ*xPNÄafT(}{]`2|=2mcųLH%[hD*]h{h(/"#ZMCᄒJ;x u"/Nxf\ƵUO$Nq=ST4^)bt$ISq[S_jzX7Fph-D з.ə(V2>poҌyܰr:Ⱥ_;kbӕxlL#ʛX+V̗\ɬ:q2z Rsoa%lF;9+mwΰP9d+K1Ll?vAoּC@neh"FXӝ}iH5PS2R#Ǚ\T|*94ݛnkS >@3-< ^$h=1Z5 {r1WK.`z.*L._y6cE.4Z9Hc"8r>8(Mɸx7UXŶשH_.Vt6j󭗜ϩDxSo07r 9+$`DQ`h0C! 6p)lKj^ѱ53:2TUO+7.Ra_S}mb/<"peZ5y9nQ)Bt(TęL dcɱwRt1JDWΡ[ɣ!ҁQ^Y ͶdMC ܢ sq)K1!s[4$v h1aSaLy@k%ݲ0e 7~|~ .:IOn#/T32a UT-Pn/OCzgcNW%.Xw*pX#l[V&Kd6QFIEԉ-&ձw-XH ^99 b҃1~, ;k+<ֲXNo Snsm?r|?'u=p׃wDGRU'5ˎ9ˬD:Toy/n]\$ Q?Q_W̔\Yy ﹪_&ՕIpE,Pj|3 S$!/-+^=Ln<ˡ5Lf%y;.Vox;˥\Y@ZFۣ+uFHK GK7P%)9Bc;z#Ür~QȐ{[FuƗⷜ_t 1I؁C /p^}sL+4J)Q] 3w|&hϙGgKYHZZ|iE~<6N>Eᰶ] l{]Ԇ{_+00Z}L2B.qbS@4jCYhF zBjWiZ~ b,1y1냧<ƚw6{Y^#nd(Z 6WM(L,MO3̻:Qr9vJJ4xpUOc|Ņ۷4ww MXD=9CqƹM8#ܓAxWeu |9z$VMIRTV'g ߢ&?΃_Atuݵ/@lⴋ,@cܫ6j UބLb-z+Mhͱ)Qev~5 JV` 4)7n>ZS~ĸx㛈m4e-{nӟܬ,3 [(R$d-U]F3'!MxaQlw{^5eaH~ږ?f㻥(y>L~~%޵I;'o`e͉^&ڠc. W%Es)v'饆>~Rd00O$2=.R$R F*`~pkm˥ endstream endobj 579 0 obj << /Length1 2847 /Length2 24131 /Length3 0 /Length 25721 /Filter /FlateDecode >> stream xڌPY Cpw޸KAwwo,[!xp$www r;3N2W[T5^yLQI$ngdf)Y̬lTT֠̈T G'K;[?DAF`38N b \@n^VV++ybF9f- J|hM@nƿ6 GK#[|5@,,nnnF6Nvt7Kg 2 2j od3fD*v;3g7#Gl4:3\lMA)Y=`ٿd[_,mJ62175YZ Ό#[_FNv|#W#Kk#cp_̍J#pdhidiE_eS~gk*jgcuvBOdߛ}okf055Մ= HJ  deea w _U=A9|f&@>f ?D/'#WZ8A斶 1xV_~w_ea}""v/&V'+Kd/>-hd?rl<]?Zv`ՂEj_)/!qkܴF6E rvk`C5@_Z9J9/ctt*Z:Xjn-Hc^񁯖{ ,ɿ\ lML]16N."x` )/Xm)p{>3;G_2,"7E7zVo`y/fF@o`,RE7s\d#0E7sQsQ7QdT~#08Pl f٩F`vֿ]L|~=[#DLރ(3v߳cL~#pg&&.6f`9 >,Y8~k7- SK# `ҿ@6i S#'?l" 4x;f!8 AWo fe]Wqeaao#ls֭gkx+sSmυ?~' n\8X+!vY`U_rep]ïy:/r ~AzcqpzĝHp3sCl? G^kl1ӞelNگ1'DfϹ7f0,13ﱥUZ*G6tWϷ4߱ p*[ "߃D[oԿY-yLhY7~Y<064napF` N"oq LȫN 6:kқG~sF+nal3)Yk7L .㎔<| w顴9i}YQM2{opΝQsHItoV DzX2|U":d٫  1۸)-ٷS?+w YWЮ餐*!vsXUĕ̦!MCHgcM\>ޜX~7W! lm w1T;Pm "*w_(< gl MlRzYr48Rf7?_x6h\Qs] JZ=kcg=Ӥ3`$dZmxf=6pҭ^)}۩I*&v0ȕ]"Gɶse%Lb&,^Ng|?:(@x=HxMise,Ew9dLU4k¨h0=բң%^ܰ)ǻ;g$ .#YQg9cU)+_GwGS& }3H/-6 s+g+uu_  ^r'" g$_qWN_uX@CJmjqq& cVҹzb5̳ éns+NT󗯧58wL" XWK/岴Xˆc]V_'u^9f9 |&g{ CKyI7Fu6=U@^͉8,o~.߃6sa@,RZ֖Spke]4[OR3^[ O@(YWi˺N~ry6KT` ?$&]wgXQ)c%QSe/=*]ͰDhӄ'Ky.[*햁+cg?o;9$ g6-.*[#LRx!aK9ib= E DӲMKX,u})y@zN]$VCgW#J%>ciJ2&n#n~[xtTIb^,tkQMp +JUŗkנ{D cAECc (Ck/[G!3<.vUE# O|Y#ߵK$%[}јr }|^%J{|s9#ᓨ( koޔΐug}&D. #~cS\z@5yŷd{%COFJ@þR |@/N1iqswxgd3=b0J"ʋGIpPJZ>S'ϙZsy@I#Ċ+] l*) t M[azNk{f~Bca|; k 3%L.Q7}=’Y쐈:M@a d{1f\96cDsqމ<p2R/y*V)n;>ֆ2绊L,d}ZadKyK3F?ρ *Dlw~ON3@qUM\tuDB{׋o(ġq7sE̎"<(qlDE=%BEo ˇME|̞m]W <(i =WEj d{)H)),|=wG$?ފKo:J%ZLہ*F9E-q>h"= <-!N ZĻ81I/N88KɈ;] 39cS*{+)w>}3L3lk)e!"I rx!H^U{iT.LѺ{>ɀiB~h<&o/z/nC5:@ormFB03?XqoB 3>3a D[i@^:B4R,ΎjyY-{[r%0l~["ۮ}̹TP=С/(Z6 SDӤۂ2SlKm[V4CS6nX^o:oIir'gf)|w.LFg9(:?ڇev|92"!Dܱ5 ; 37bۧmT.u6)-=jved f@cR{& ܟP2"./'N%ͮC-u?<) 0,gڑ#D!zN\m|tϞH|Xn,)@CPM#G/i;Ȃ?_"Ǐs }ϻL\|K,TIi{)ͫx X :ǻUQ;H*`NQ I#3w4c.`N٦rV!P@B6cdW0 bOT]Bva L9oMءz"^3XY9-kOnNH_pK5\e:hX]Lz;YgP1jpWr}`"'[en&)FTPotw n R:b=(.S\/JOh2)cH/IUyK1Xs^N҉W6 mf}θkGCg.,ЭW&u!95r5N){,4'~+mbMc6ìI\"vyF I#R|;qQȫ[$-hePg{]9guNXYɴMka0i̙ [[ӆGtݥ\r \hްCVi»8Ү]2ސ!cI+?#~zLS YZWK(hpCVp,߲$z2${'|OuuyK߸Xv[vG-OXޭϟJ'l*%Jhd@~H  aŸ]WV. Yz1mn\R^%ɱ֖S;Y"6(1!٨v6|w,I6Q,yػMCX>X8&yzp]U4< P hURWʗW)U{P*a6^N D;'OP+T$G4<Ш E+r~TRxW_L I*|VΫ-S{^9HIv%ۍma-aT \Qj,,K~j Huht閄=(J"I/g'u7h ݰɄBVyLJQo4:a\>Үnk+`KzU}WT =q@|E9QGA ׺*p.$Ŗ##zs 3ܜr{s]<,"b-pؤC%Iu,c8G,:[߫H>Btqzl&7}lUnsEkteȬR㗧sA-fM*9OtO|ߢ R/>coTht}ct#}V'SK=d{Pjj=%К8>+l墟1Pz4F(zU C 0AC[-Ƚ$Tro:Y3$JOϤ]kEƎ'I.B5Q ʬ}?cON5FHTH|]ށ=$օ!MHG҂Fe$^:y3*=]-VN"݌RJ(գG@D%T;Z`#VC#q~[?sz>dTrIaQkZ1rP 'D"nٸ(C#!Yo.5]rJZ RjyB$ oꙿht|mZIw]M@LOc %++L^šWc zc%Z獏q֢[/?"{0'3qB8dd)_ckDF~:+#Eg]~󪎡YcJ6@t$DjՌ оС&wUHvth}UنOpcZ&߷Noɉ:00 uOfBxp.^lIM 3 _sd3T9U-6n,Q dg" 0VJS۷`h[r `o;+8CvHei];$VhPH*`y՞D\Kc4tLXЉ'ru(a0aL#x)x873R{&qCwM]n3bY|\k}^z))1wSx 6=-֭n=u|)70p|+7s>Os!1L ʥ|Udr:cm:}rDmw]vgܲ,]0,M܃߬K.\9Sf@ILk];zxcW:A2㷒pXዷ. Hp@m!9tD/v+!Ȑ ANk%P`h a<c#׼зINR4Rp,3ɌPT *^SxnN4w 8ӳ'{ߤKj)$tSAm']t;}Q }!\\A}K`L|R9ƙg|M&4~&b [p_> AwsY5ټiy;Sv6x 7y0l%G:m9oOYLb )8Ʈ- MTcIPƠ<|b#mN$JlJ +:'"_CaAҨ {ߛlwY';@z:,\b!L|''dFaÃ]")9Jӆw$0_)W wi.XBb&P,f5Q:u  x}ݞ"û<^,.w*E-UMT;ѩV6n_ 7Ϻ=/Jf]k Zԛ{Zot_~qIH8! YGck0zT fyc>2~A):4,->8\O,Pa(^0 /kѦޙҷ\γȗ1==^2#[D`-҄r@H`[ON:j,㗾hJd?Js=i\bdhyDz8r&Zǖ/"l*v,BIs~nA#8, mixQi}HfS&F"`'jߛ*V s0ԢCS-{=EcDyri ׊Jn;sg* jrX%>N[ r=釁ZYZvs\I X7a|Zu:v:RZUZ^vYXXO\"GJϿVuqA?/@<`7w\E䅼rQpzTQj8YW*z>0`|a̧}@5C =E;)r5ѧ\R{ ?ʳO%@=1pwKGnHy)&q?MU)/@MrSPB\t7L,;^^Nj{ϐ+B {/|'%ѩLEl hQyЮ"sh(pʡ޼}{T=)PCxn4>MެP# !\}@Xâ4kƯUbR|(,T&?+1[0X$'(4?T.]!v6rHAZEQ2R!\GàaLYF(Xgs}(3j Q$!0J; \E9glFtߢsบnO{77Y<3 w\XDK¤Pș8HCUu6.]I _=DzXи>_G%tW8.i"eF)C3'r1OWp0 sWS4mZNoD K *™?mf|h:Y۰)MkWZY-JҲ/t#oA H sP{v7PtB 'X=`&|Mr͛n }N%WŽQ|;0^bh Kw-}hFNOLa+T&nl΢&b[H 3jЀqy66|{@l(l/BfR+N[rDiH^«}&`,]\;v!1NS2Rޯ*xz1n֭T홽DrFVl~4xׇ 9io^Io%#M`"5r"C|̮N 8ךq_fL~_;{BhÑ/O׫#v_#o=e*Ŗ-valGb:S-Y&v܊Qn f[Բbi;j:Y~UMZpp GēJ%YTG)UX9x5@~{̕Ug_kXm˚DS?:fKc4#\:t ƌTMߡŊˤs9KÌw9P"E{fi(kN(bJ+|2 d]j٦<#~6~&M2)d=f ,!WP/Qw%&[SLi*¸./4F#.'WK❾ь`1(URVTc jgp¸gi ~7ZNVJ3 AhNj/Q0|y =kȻ!gȎ\Dʑ+֨D!0 W)7]iF0V?50}#KlBƂǁm% D!Gl<3i{ѓTUN9^A(|7RA}M黈pDz8đx '(%j> l.IPr8[czTx5an6S9^nQ=O·eW;1zzPSFc)~^ω~Ǥloaʐ-£^|}FѤM֧Zо&y$f*{Lȿ8*ߠ 2Yh҂/s@/Q3t԰jCI뷫VȓoHfD\5RC)l㎯F4O aׇ'Xf^$Aj.k+*νjTHs߮|M=VAtxMq8waPDr,Ǎf^@VsE#NmJڋX Egr[= !IBM#a-EM mUYX)bR#YAF'U0plxPT[[WКA}1glؑ$#K4rh_R>,'w-ir?p#œh*ó0}c: -?SN;^wR׷P]}ݒfՍcG[tgR;=3YJgV?'-Hڿc-}kBa2w y4ZO` Q۬$nzj򆮩|Jv0Bf^ =_XɈSXYBwWiFǸ%MIiNZ{6tyZ5I3/jN*)H}Džt!cCOWĈx>ZԞu )\}:mi8ڭd: *Yh>R!R=YLatu$G1rJ1+Iȱ %H^*r fS%UCc}e*_ps|[ MSt.؟93$pX#GO!H^⢰4},4i͙-C1gJ(oAC7aP)砹1}ri`=8\G`!Hju=Ka"gRP]Ilhx< ٰ)i c5{)0g5Ўeߞj[*}aũgm jo^Sf$\X6E܆r/qcɺg7_YBMXAC|E$` Ռ^T}fҗȝukoڒ| ;֢0p < pk]KUsVe)7 n 'ìz,OXh6t3~Lk!BOӦ&4="DFJ8oW#PMSy zͼ>.MJU7鴰/{sU` a JG?=m*y~ߤ'  "s~)B:"ɻcm-*xV*P-SQr4[vs×'J4"QֶkIo](yI>XywDJ-) 8#VTУ| teqpB3ȞQw8.P\~Ȉa5\`XQNa;s`f$1ZZow>;QԊ^A\V HkFˡQ AHg@i𵑕7>;7}i96G8 k(Lq-<; $n=f1„\V5Iit*7ɻ=~#Dهqbs12omPtyVțtArꊌnmlh+d/oR} -qo8X}:8rA〉O?O+߾}t,P90OC| SbtvqG0tnNJIlZdB $ʐQ|yK*p;X-5 FY@WFkY>@5HaE2B^{.fۏA@?H ۾.<_7 %D@0'y3- 0?7skg)9]ʀӐ5jKbu4J9ꮆISWȥ[o)R!%|Wvtх BV~vl[l rj]z/^>X#R:yBybfi92 [p:4rE"M5',Cd?{'!A+R9i@QJ "1O⢛0WB>+Y~T>9,# C_Rp L!S2әf\)RoC[Îß!6$X18ȳdXF [k>@ڵS( |wJ:ϭzG" ETE*&e2i:mI!ы@(a{ߝ F,nJ!Xi24 aJ^!{cQckuy;,[%\/f|'¬1a^qS YQ ǜU36¸M}*mpG6 (u_UuuXnT3۽9MjleݾMjXO΂Yn jC2#vEŵ(^Ĥ6޷X́A̬-uxQ=A1Ȼ``*lD6r \~uh]E;VJ6_nP/æ6q+sK2jsC}8Q]_Lw;IB|5C"#؜=>vfQ7s» ^Z֫I~8"{Dt)ws~j혬+r yMoB&)7d,xAF~GgD%N!̦LHR!rP+}p cK*D6}ɼBy422.0r2ڿÌ2'E=q{jEXf@z"|Vܱ[hj"o'^{'1aćm%qzC.꺴٤uCUʘ3ϭ >Mq%W)bb75Emڅa;kSEa̻S1gRsĝRg tb-{Ηnyݹ[pscoSŇˋ$AȚ~}W}zsGs秿H#$Cgfy5/CKZFQd/I7J?4qwyߺ4mq4ѳrLvńL t\jyl4n|\99&[Yh{iP"vR/w D͞yvf1dR!ʶ5~,3x'ێUuȓi씰vo6u50( -~zr"(]%gZ=\ds'&iqM5.e CI WDEt0'ڛ˶¼%}uLgs?0_ 8#At% {4q$Ƭ'>.00y@@ X/hbu&M.LT{ . k.r,\ MIDC9T!l]`ongnjNTN_^q gb0;m' WS&B@qbZ-ڦ֠>߈9\( _whH9+24UWRgHjbaWl)!?<GAD^ uKn| 8q" -3'w*ƖG*_md%QJرŀ~2OeK,ȇ/a ]vs*I4'/"ۋ'1XߵZ iRa_^LJ.N)~#jd1jzWd 7 è_L^`QlAJ).7 ׼_Oz"U$m':KrdcD^.qgT}Q|w\L7/<쏢hj,7eSK#'j=D\N= !w1Jr7^$~p"$l[׆Z.)ꠔr,|`~=8>Nq"6vl`.rɶ+6Bm$Rz+UhOjŗ 3yW*{fG+>hg)Nx#df˫pT.!ŏXלljѿI>fhYmOL /}&A4/|ѧ}\WIE bd>DHR"Fe1~-LcإkvKe$ -L{ثGH# (u(!n"m.ϲnIlŧgq5EG8N>UUT ]kJںɍ,xmQ EIjy&&Gc1?hg E,zp!oͣW#P:>B㳢I @8l-ܜO;XU{b6r ³chW(,M0 fP)Ꙙc`VQ-=Ӄ-bj$EitՀ@9d}F^U8QCLFp{zhf އDl6+C<'8Ey#4ǸN[N+cPb< kǣCnH875j hZ+2`gz®LJ9qSa޽g6Oi43vhZi?MV߄m1Ԡ(5_'E{#k1kLY ]N y e2]C@8E iAji$}{$#t8L,- Nr{O7\j)N*^<1 xi hzu |zaeޠcŔmmK"Gr$}ԋa3^֏[!vL0eu#O$a;=vx"Ž mΉ U"EhszbwvK΋h/uvWJw^aw>vʖs~%mpם1/~kI2 {4=NE) vgCBḊLx%;&L#>ں45ERǒkUr Y}+F޺$D"ޓ0:Z_&SEe/mBdbM)"ܶ k(mT㹊⣚OQ@,%Վgz=#Vȑ * j3bN̹p&DܦPRݼ [I*MH('třr ;[P+SYA9g= CnW~cny]լdJ 7%N[D =Z#t $CH)7o=t}O}nof_42Œ2>pZBN[ڿ*%ȭQb>1?#W4 47VyccSa)&ˈD ^ii=A_*C>= -]3^aÌDŮ] W"9a a" M~3P3+q5E0~νڷS3*]yq'Q=y!4SY/li&q _E.Bd"t8lNwV_K%XKnX:暂}zDsk9< w|R.hL 񃘨OMAv4eg0XʢDz9-_Be*%V B`Cne`d~P۠xגKp2ENvʶdk7Ǟ`fpE9-+]@=Dz@ޅDE2B`Ddaf :%Z'#wґ7|"Xn*Ǟ]~O|+%B݆%F>4 # 6Fd#9fK8>E># DI;miO~nlCV"7 umq䰩,lMѠH64h\sK _AF@uY G@:]]UAMp| LԾ6.E& ):Ux2pѥqWO*+H-aUՃ:wpޅtkn4Q_cٺ-Zo%Iż:5_l  M|UE"Q]T b}<.f|||=4lI q|I&,b^~h>sDoi[1+μMl P0l2yN,,mr+侴jZFvȬ-g:<ɁϢ;tRf{ozkHsU\j Y凢el&=G%-oe:ʾ9ܱ|l B YRCē J4%񊁫B[hg:qSˋ(YGGX'n3u%Ds_!PsMyQ;ް)dƋ,iES@ZC]Rium -2-'Y6ISٹs㈦|_v-bv̬M|韐xޝrQڻ| *256SOS#DX+'g.-+i%J01.cze@a;W&B(lDi"5@ub5+ MQȸ(@p]xAtMv&k^{c QN@ +<ۓ%)IC3u{fiN|\lW.-j<_Z8P* ѹ"iՄ3FaC!ŤfZؕl)[պq㇓wO5Civ(}6tfC1&wHB Sq@894Bʽ3K_i_'ﺖrֱv4T@Z-Tu`-pO!K0_CM=ұ=k3nu-XNOZN y7ɕ}!jY\kgI$YiM2{Hd\/GQ+f@)v~e,_p7G#?9vi㰬]\zꡩ1 +,P9Kxާ_4"iv(6HZq[Y mF.;S"g5> MGE'A<Be6@.5׫4uBae$TgC;ճu;ϙ2/ Ԭ =µ*ѓ;vbxc b8yQ"Rj( ,:T|LSm#JClc|ogo< E~iPs._[U@>ݰUJE s9jۈX<2:xNR?3 Ips ĕ ldh եkgec}h*Gt)+?G)?{~no1We-`"SSnb<qxC~eSUR͑YwʱS oŚo[{'Ů%o]7\Dry@ߥ褦r77& c/J%b_kП?GSL>ި PPU1y>OzVm׮""kg4u$bDIDMpIqs~Iߡbq wr17HKɬP̙C׊D)~Z5 Enu:58"=S)\JЧXOI7 f?Xл0ˤČ kQ x+!@$+6SCD0ka4؁Q%: 1q ۪@P–Dr3`wSZ9Hqy,; ^@dN1 ;WJGڻ'g> UY*pt,A7dFQd Y]V:`0(uh5F< ixS56(Q}A֫[,h;\ħЭW+UE5>>@ 8/BSCGun!LG9S6lSQ|1\Od!`YO NJ)īE D(G x)R`Gp6xjQ혨_`w~ȷV_{Qfd!/Mɨ0'ֱ3w֩$+"qX]{Tרd0= ->pbKٚRQyj0A:]ĉwY+XzL r K=hdQ #Y7NbW1q_cShWċ-B qjc-4_hdp \r27RF߇ͼ}َFG tr؍%'s}}{ aKB[ۯio}4Zu .{DY+S"!Q v Pvs FڽǟUSIKg: ^o0 YM[iݭww&m5otǎN`w=r|Wmd ak7 Ƌk\(o)Js7? "!=|7jS:CD0'kۆyCȏzlFhPEr_Z4>M,[dzo2}UHDٙ>.ҏy\F?%JEw!"˿7\'8G6,WH?>MO#%}Ax'g <oF )4n֫N/8\SPPsTR-<(xU%uVw"L`=PSZ^"Q$!V+CCARV } wV; J%&ui!H1 JoSoo$$#}l}qf>t=l˅$"{V o:14 Ow iFOSE8jD *bߙJk1+ K`YAMKΆ64oIOw܃OZ\"ܾb?;rF3/wdU6=|dUV2zk4_S[0mva-Fwȃ  ]u~^pxQDXUPtl\\@ϏB6Ip/4g- .ZV]܋h57.2 9dz ?ع{]bWtk>n?#2p. P{@) VMh!WG-'=4H(U=ò3d Yw={k ;v-`6H%݆˽:COz) ;RX >RR8(d6nrbn:cؖ.LZDǤa/yqn-I endstream endobj 581 0 obj << /Length1 1358 /Length2 5919 /Length3 0 /Length 6861 /Filter /FlateDecode >> stream xڍVT]&@Jj薐f`I%T$AZi]59{~w - !yG" P$dcӇ"l`7o#70)h7M8 J IA P_p7I"j a`!#D%ӆ !!; vڀ`MvFhrm`?RpJCHI~~OOO>3f EB`l . 1>B6>cփ!=An`w- >r8kqu5>+W"(w0yCa;B@0_ 'A@h߅:CظA]>WҠ/Y f !Smз{0' Ղ  VUm"fFD@8 {@%v~]vP;0EyH7w#Bmk=Fwvlg ` DO{e-hhrnߐ + \  fAw*S,U_K\zG3 f@ GM!e?(;9F99C|uG G+F?rBݝUE;eV/9Aa`m8@ZU6-R(&D eh `? Dn),wqrG#ZW'>_"63pPJyOa/lF\3n-($%SOdkJ'r|w5&4]]o"L3#_HpW_n0M-]D;ܳKūtnF:1h(q7Y,H^F?HONPM-Tk,bgL_Nw΄bpWa3Yfʷ`gګA<)d|PݗU3_s =_J,XT4YDV 97[ft'O v~jf#펍=֙\9R9m",tpqٷ]+Ӌf{M/k^iS+w Jo~9:9cyI־I\۵ULa2}5>#kwBrWEEE軐ե./_UM|'d%Jl{:dm&'歁+Q$N[ aw7}8¹V"v)y=~ I˩Ćo$♀gZT` U<Sqg9 ,hbZ>ao$?*4 'íƶ>_EaKހ,ÔD}QS|k#:7g%|O DH҇SX(ħ)UH<ۙZUS;ʼmס?=~;h`|U=VHܯUWo4J;Y"-Ha˹I}Se`NE$$ܾy=0p4[Fq\r`ր`ޜvfc̱ҷyesypPo>|9"+%=/2%P)ux-L`Kh:>Yʊ:\-ջ~m)P頋}Gugž`.ӏLddbj̐`䷤c6Ӿ\8O#Ui0vx@o=DJ)TޡK6 hUKk\]X!>|<ɸ^W3*~5RN qw:2bԹfǴrkz\!1e;7.9YQ6c{~8_ǸsP@7Uls'F%ncԷD!T`BVjר@+9s_q>sD\lz=|[)U}0a+($/\RT]?H2:S=Y>xg#\w021ސS2AK|q5:&%Yֶ#fKMYv}ɧy}\璒Ia"ekv Ύ9 yGK7mL|q7| ?µqɊ, mT!m{E6'(ʔǎ&e6sWhW:\1WRCĻOcyh *c脍&L>8fȌ.5R>TE}+ގ1U =(.B)>lGY#S%XfJÏYv&U;]~eR|YNU1?g;$@}W,x%레5ҁ##v΢fAǻCϢ߬3 $yv!"Mתͫf\N´7} uwddӸ|fvߥj0wW3F.m3!DOɦܮo;{7@ X1?`{OHzF8ag%W̹^({{Ped!z0ʀk5}ɼve/eZ{3yV6\3ͮ}˓yQw??U%|+_b^/[MDG%]ٜ>s.O!g/(!>+\9xAj~Tx>1!lrK l4O^w֮d9 'tήZ ~vD~jAWK|la튠e<8o[Dɲ.|r_N(D92}AHZ1Ƞl筚J! ZDqlmadW ]͚NU DS#լeY~t yl}R)B;}f mVDeKƀ2]& >E}?c!nd5$qܭ^~3A%qIicʆoӬ6ދM?#)]~aGR-j/QMQE}k`(mNxB m0rFo*pIui%RħgևKa jmģê"gfZ&;٣#t7B@NNq)(nyKYy(!NqbgUۨ/l&֑NNt2ᏗbBvOswt\W= )- /K _eY hR3asRe)R7 : |U=a9Moyq)%JbBk3"l>L  Hg4|-:lCY=BL/rKꗚ`9j|. pY噽vLnGxbBe9)!IFÎ%9 (nG)ת?*B t_OԓzC!bՅLOnW)jh0Bg}r/1s*n㎤9vM[P+Bj3 cN)ﳙ:h[UYdT+#D%DKAMjꪵ&Hϙ@1cev1R~%PN#AMUw3&}$^/S-u8;ڤdiP]00aルȢRfszrU:n **P|s[2)8oz5r~JaXޏ{ er"- wϦBem]sM{"$o%]oc.|jS M9`+TjRZ,멭clR |Rt[p]hMqѪ 2c}^JϨvdz_xd˿UMOzpv~GjBAU_W_J&·Tu.c ?XdXZ<Ժ}4R2EZXAd7z)Ǣ~7`*+ //;cNFv;\DyMr+Zu$y5Q^iU:5)dE-+.gD\ߌt㰿a8zSE8uBר {09E -0yNuj|{oxZ ;- ήI4214;ďDg,27X}W`9&k/57:9V0q2+T&8L^"!ʀ0rÎ/ӽ5Lr4)`W" Hon\T?k-9 j"[yZe^ΆcqBnθ+q]}?Je#m5M$vJ|1=(X缦[gj{ ;4bԗݕtA&4`g )\ۏ޽8}<-禯Rz>&lu+|d&(:7wi@ݪ4BE2;KUQfrRHWüuj0*r_G im3:cĉǕr&.WT\>d(#W?v72aρT"|Ss;~ >7~'=LoΈϽV֮rx$O(ԷWcV$#c. E&d,.;g!F"CQZglX/Kn$,3/ glqM0}> stream xڍWT} F%1L`cni AZ)R i) $}s| ej!t QHid.$@ j Rah8hCiA03#$%`eD+ p@P5(<!d eE~Խ`h8A00/BF(`a?B(c0(qq???1"(c0.ݘ5`#@b h CaCz bW 83 Ez <p{&:bFA2x _L0]8n;(=u(+ aM U V GjB0=,"en0 @`.+% @!QWB +4Q8pG'aͣ8<0Ɂ-$ۨ n* $ *JH`$@pgS*@!\?}ھ_31X@oۃAP .E?`==k~'!`I`?Mm`jscS8p>:p/y0S CG`ԃ! F ?Sj#H_쒐@hbM&`!\h_<> Ţ^(KHשZzZyw rtE~tqO_ FOМ&Z%؇:NrS`D_{}r9~0uVdIDѤBO'/-u҃˃c|kW,3'Y^ J{ ׿(2',H*Jݝ_}mq1Ix֓UNд}:ה\b- yz#G]kf&w⑕j3{Dam#d$p)^-;[!yI;m2N<1IK oT BK,blbxSef:G5{؊j ?ҥ -xW:Kbra#՝P8.hu26vafs䜬=]ʹJD|eudB$4ky&@Wj.$); {0-*́YJY1(}7vW+Fc߱vpK`}uF{+ff&$_~뫂I|5p#f&(,+>m|>$F77NAlDknf;8H U 2)˗yAйNF(Ys&:zŅMY EI\lgS>o*lnL^f%SùDoTZ" z!d&5h&S]Hژ +,ӣ޽bk5!/m;mA#2Iaط7jhQ\8NJn)'wD2r߁ywFYxՒmA?u:v}ciLUnNuvYBF]zC0N<%$=7D^NZLyX|!% ~Տ[Uɸ8$&wdINGd_ޛ'SIqdzP 2P`^\浪jf>n(wt5>C9YϜ78㳲}ZK2F2ϸ[LL2z N4T4<Go_RwsD^~7骝&}7@^tpbT\#sim3s\h<= " #& &>'tC$Ie<)j?ᚈLC VO7̪~3+s!?(q{JT&m7iw_ JMoVcv'"Ԍ @ŝ2#xΐA]>Ab,&}|Yy-ha'HH>P5aֲbO6?y̅eNpgA3+$JWG qJqZ:ɷ!Td~!c7dN1r]aNe:-ݳ*8\2WaJ^ro{3K~;ͷwjʳ6~cp3oYשڠu|lȌ޿l2O|idUh"\ި#tſj[芠$}rQwʒUy%b>NJK&y778t#/nD͍)j%\J˦^9oa6UBy M lAGYoJY56-5pR~˺CksvIѮCޢGb0G;f[';]hBc BMt6_oCLƏGG4h=HGDtGDF;*RfU61y86*|iQgbW%s>ü3}Ltƫ'S<[#mUR1~y BJ{5?{%ao9>-.YQ?Ud )]^r'4)/75Ƨ]L8) )Un ZԌ);ܢ3j^ݿGz)G d @JA&{\.d6ln˂9e.d*F1]-luO>~}U}@3bBcQ: Z?jR'8{Z}"6˒uNrɈd];@ ,wJ<G-Y93g^~^DYJG%ݥZwڐH'_$ˉ&s;IJÑZ[Tċ̋oޯ6vn+N ]k:}\Q]1a\!k<:tcCG^g5VRD"j~o<н{#9+䐕ԏx}g[ڰ}`Ŋˑo`~{n2'&HН&Ně4^5x]zmr!%UGMZm1 bD@صzm>PG/ oX=ge>9h)8F{r =C\Fז!1(8+&بjXقO'_6 }+>!{R8 ds iQ#H3nϤkJ?A;ݓ9HU8Z:~cdBj(7߿ oyuJ:hBk^A>SgÃs7YRtdiTAz"='ԍmr@>2XggX#$˺/_م} xbnBLz -J͝ (^l'4"eaY̌4-_UNLBIj5=̓u*i=!!=UIMRg&/x8/>l|Q[ã6}(Xd7KvEO}:)%>qEg35ñ'bN[i\|YZbK/=v1Rgwێ>|:4 uZiFtyۺ4ӝ1ݾz.k)qE06D]7H_]$9bM92!2 Rz{91g߮SduֹU#BcfSZ@Ho 8a 巿Z_3L endstream endobj 585 0 obj << /Length1 1374 /Length2 6054 /Length3 0 /Length 6995 /Filter /FlateDecode >> stream xڍVTڲE@JC(= UPzJ !@$B  MQ)E@)ҥ(U_Y+9g̜3{+fb*.hg&j@0XR #^?V%@Cx:w3@~^@$"#`ܿy:4Qp_;psO(BdEUp E Xw8" 4Cpl?RtbAq(WqS @S/wjhE7&#|Ю(08rcf:@#o8ꏳQ_WC_P 􆢂(7+ 4bEP/G/"x߅C*&@(aX_q_ׯA/Y墆F"(/W}  z(ܟ+?o Qomsc`0XV 0wЯA _( tŷE?/bࡸs@. wCgǛc[`< @ϿWxnQ^A.HE_wTUс@,PLB @$$E?CU;V)K* % !?s&X A4WYoT?P(}4^tOT k@KDj".,U-~ }  *'7NjGj`h_꒐B1h?bNe` HCBh < g/ y{6$o&@8 05ݸjAQBb)L15iýJFoe }i3F+4STӑ&0'*5]\bʫ!g>!D Zu}S2 骘鏙X1Y}.GqZA,".tL8+/ VL~7fl`>oG7U,Y]{4<\·͖h*Nu=S#xhί$^SV DJ=DWVnRD⪦3VWyJziq\u/y[OȪzȴ$w]AaZ:.&qڟGqGk-J DŽJUX+!\H彂(oEH3(Rv:T`#r( ^+^>Jag|hڪR87oJdp*.M(7-s<}<ٳ3s5!i+N=3z*LZpX.7tKv8pÖjF)!͂#")-Wʘ'SڼN;㻟ڲk5*-d_kv)@w>SwakBvfk'E[رPҸ8?bxt}E5ΣzMB;G'".kjZ*p ?IE;4y78W>t(T\DbWs$S<F('E5kX#-Q](»Gz8ENh;g8͸r;\mc,ǿ8T\I#uLK Sl"]FG_QšfL56/ypSy0ΏMY8M:rP%(xVզzk~lQ`-fmOu]F=D*{BiZ HȨV?:/-wI响Aά_@c)腿]#(YQjUoV?mzekmO" =W $GDy3v l5:yg[حYryA[{JmVK}ɮ]k_*EyOܻ杷%[#l𠌵 f }o2˓'!!:.OrUK]~媐D$ e-Agtzy4CWS7$|5U2 4CTiCF%2|VKK}8xsw1:axK}WIH.mˎ{ʤi_tN*2]^zV{5ͣ"Цz]4+VwG\#Be@(Y'8KEbsΉX Ǧ/^A6IXȦߐ*- 0 \cTu[]+gT7!}G - $_+r}cTgS2v ~%d'\U$"؟#9de4#Qdl{4zF-StabVY]7:t}#>e91) 0YԚP7Xu%"}\. F3ٵwI& c؆# Өmh> .n]s!gHjp) AI3ɧW]C.2<CxP?E]Y6Ɖ1x]O԰|;Uu"2%ޑ'Ypx!V:Z~BH~>H™ɲ3Ŋgotof+Y gO+42r'w,+PZќg“@Xb)vn\ )@yuʣN/%@jZwmYqi[yp.s},9'zXԽbp`ʛx'ݎRKZ _z7qV])Fy RNGKg6Y,moN-S֢ShG31eNHv 'F[{.zpqG]"󫺦;j~W@|Pnݓ_FЏYm3Wcݠw_Ozl%&$5nD.ƥ'Jcnkf~zKh/ЦhoNK2Uќ!N!i4 9+˔~0N“qӻ%)kO O;̭Z߮pɿ*/radBٷ(+ BbO}%ޙ_ߴpkkn\tɆaҾ:K]ox(DʽU Q9##6l "+9G]쪄0$, Mry3Aǯm \j202lg Z^:cbVu ]I]b\qAYכ}BR.e<)s$ISYv[ mI7qa7c?`sڞn(թ8Žwk@3v;,ȫ7Hk^hɨ*tu/W;oeQO4_j.i6}Y=18@͝,+ -͔NZ8 8;^."7,`XƄN1PW/h -M'-LZe^0H"T' Cp\Dh} Y*3_+5tRkG˴d3{׽JnR|muүPW1߶yƒZ^*mn΀eyFM痃PASK"/q.̀4Rַcd_=e~dul1-fl]. U-b0M b'' yEx;ZtKDDp~d;UAc0@>{,17[xk_{[~/]G?d4]h=pplyBO\-q\v2*%~Coh945pְɢݍ+|T&iԪ3suZ ah\%~=;$P.1nk\"w82 ȕ9B* t*]"&Lk1FK:ub͙2 GtZ&V +f3Hx9Gog<ŐB!R\ZksȝXyʕ: r[4}+6c bG(!o3i6SS"mi7Pz9r IUŒNbD߳wXw XtPeW0p.ᛖh Vbs+/1bx')2Kwu$ ,f(6- ҽrϊKݫʕ(?H-.ofedI&%IdHyg42Gni>/%oS-79v..{o-qwM֛ Sb'/xw3"\/7.3ܡ |{c<?mwZEy燗ɥdqƾ]])+Ŧs"GU-hذy놚{_"[t$czmA })g:kLlUoEi[ҿ5qs]:pqЕݼ?CtDL HGP+f^vxۢ/`Բ3eph;&׽|8eW7q.'ޮhzֲIv_L4[Xc/-c#… ~M> sežpaa ΑAOaD^2b ^cJ,SɄ㗺^0#nf(bINaÌi~zq/Yn>_0Q-vub#V♭ʲE]ߔ|)(:"?>j!Ǖiz%Ge,}WWus\ǘ^"ýg2vzny*biaffA cxM>kW^J>z&C&G/[o0otHj endstream endobj 587 0 obj << /Length1 1546 /Length2 7688 /Length3 0 /Length 8723 /Filter /FlateDecode >> stream xڍTk6 H Cw ݒJK 000tK7Hw HIJwtJI7yyk}ߚkǽڛ "<|yM]Q?.3>Aq ApW0 *_ry8,H5M@aq8O?08@l W\fylg@#͚mu֖P%|ЃYA`{l@8zxxX:vR\0 rA6_,@eۃ]z0[%@54pڀ=U 3GY$_OfHn7WNIPCw`na7'A%(UbEV?}o_Z0$cAn'giݿ_(A lW70@W)ϸjlnN+UEX"g@j`W%' am*p_CAO`_  rK"rh"fkp.țCoxy0L` '҈OJH7" D".n>ڂG07?o@P ;҅3 !+VFߑq zxq*  Y, ƝYK8Ԅbzg+**\6y#gE3oMuXI:|-u'>~&[CCí/{kքڮƜ&J$GgmOpvIXӠi73 nZl#O)cwtj~{>kq3TLTƔhg$,>r;)j| Vé3#N6--cs1q6*J^42fNE/1-z4YriiCDj,O&j_ Kl'|&AYա%m7Xfy{%_ZOF"#>ի;4㾑XV+R1}9'ShYk7VVȎ6oX8f.OLy/MW2W+;jg'Hd23k~e:YV$5JHY71U45&Z_f7t+CU l>MtV̬u6kxCQ6BmzVds:pnaB>K/~I '9-j-=neͪF`ߌ?vөݏ'ᄎ'SILq&B;w *ׯ8eA/͛D?I^Gg &cL]' ̩&l\G2zt:5ǵ=w(pcro-%+~1h&03J^F ˻׋a4 j1W|ˡm[UmTS̰SeqPQc]; !q"D65Nl̤odkb 5d' ܇7żb\VMbO~{3u@@{f=vd䡁׍Uc\wd~/“/ÿPcNkgs1L+k'pWŦCΐ#(-`h؉At? %iٱx 3I&B nq2&] ~3d&JY00s R-Q:hF 2C}ݏuהkyۛI HA~U_)PC($O:vhBǫ]Q40Mpd3~LcNvk$i%zj4EGN <2 `11LlgTfQ:ut/jo*IJXIA[<u=ў1NIXΕ ^+j{uPr* gz6ЊC33-ZG ?::cKx$`ڇ1,%&Υq&k!4DբM aKMP<}ژ dX8.͇k|oyir>"w>D炭 ?UfM"n~m <%iQ7JOĶuvm|F+Iz@z:ivxѝ|AdK Sq3- =W@;Q:aKu ugl9؜;n89jvtJ*xL=7nƀ賌Ngs4Msm}gXPn,ESO9m!/rE:$7zRGpU>;Լgi+CKt_;q0z-\J^ŹqPyVWFr*i^G͖6*}6+ h ]|7 eHvRn;yYrԖA:l-.x.D- /Ya q{EB:'9Qv,j9LtEdāC\k}zf,=%a9c=[W>.48Uh2}k6?Fn\ԻtYAĢQ<g~ -: P&e$U3,EjDX@᛻Aqp!FtXkRp5VZs볠;ɛ;(JR'qz1^r!sShuàz 4e#Ry0_dܩgŢbRS"*aeSg([A>+c 򸜟'@F޷˒Pm]LeH[RӔU\.}HA%K5Y'+": rEa[^!T]"<>i=%tUrdNB2h*f{Xp$>KDx**$<9P2A@rE@z.7ݿ c45Mw^a+'AcآˎK%)y$EI2v̬S :JY=2VxFt3ye,D1q!iKOvuBr+Nvc=ݧ/e%TW&%C)u/YNjH'q#47o2y0+^:!irLoP02Iw+Lj+Zw$6M ޅD 0*1毨gVh)r:,y]z'-U !dv]XƸ=ƸPqSR٫.[7A 5/ ;6 /.TR+/a&ZӋȹ T*z[ :5{ QW)d~ٓ0rlpRYלA6O!GQ𜧄!+ۥXƵ }+>^)F؆MmLIonzvO!@lU&3L!w!/Țֿn%]}6(\(WʵA?kG֦I^/cdN%~2 yH/gw7ʰpv8yڔy$5Is.vέ?HB"8&$h3طv#RŌ(Dw7oc`7WKL#K}82;nrSܜ:wc tkz4Z1,zF]A64j3@7PpYjgʕwzUb%Y,J1Gc21pK (}Y0iV;z6TګE:_uyVm1Rg҆ hb?+E9|7lts[kPDØ|4nܢ`2bv kɗhwUO[S j@ %-X8 oMtNqSfdQ^To3Tꯧk9Gi Z(HY2C #_=|cF`If+P q@lLzL㧊?>@'BL֫G]\s67ЂM'>6*'e**\08?bjL)|=eW+7lg$qK!B{lRQe5bY3ǖgrBz%UT( jAKn0 m`S;RnLE])mc,=.?Q_M ^g-VCC^.)q>$wFlEU;#%CCPm~ly0[6ݢix yLfQmoOSAxJ@y18œGjԄmSߓ6FݭRMi5o{غgG[̓LZn;Wx}R1ko; COVeǀק5y%u9ܤBws0nf ~VHe #,#\gf,9D}PW6@GE6]Ye&HJz;n'!J{lgur}-!h<&[9&vpXBIu$t)b_R&*u'`2%Cײrͭu6l\$Z@Ӥ%G2bKk{ "KtO_O҆5UwSG7{|4 . H^?j,{}qR)xm EF4_"gڄA4ה6| )nۂRK*Dfy90.pН̵."oGy8e(DZ xs_z!swVTSn%ׅOR/?%K;ݍQ.>Vl3UKz.}6Z`WTH50 uz124053IkDH8_|CtG€\!u~@߶{[My%a[n tmKϖOw?*Ve$bbw(: y|:>w]a{ć϶D $Vpչr˦ZئZ)*WԈ>ܸ endstream endobj 589 0 obj << /Length1 1356 /Length2 6560 /Length3 0 /Length 7497 /Filter /FlateDecode >> stream xڍt4n zMGE5`Qf"JND.ZޢGMK>ֽܻk֚y>ϰ2s!l 8$P@< ?+ JjAp+ !`ԭMuDj>88@0;&@ * \0ʿ-;OLLO8@قM0r[ G (J:P⼼^^^<`w*zwb=.@ 3+Ya#![3w A}U +_\tGN "\\po 9C<(4 v;#n`3O`.|;ӹ"a(ww y}d%GO޾7:^p߿0<\y 07&ؠ@ nցwroW_W+v?rCPHy#lQ'O[304)x|ϿOܲCgFڜ7$/@|B H@ r{g0.@U#b5{Jj;!9Hd{M?!7v s[\5!v0FUQ[ ~D2 Ӂl_vs!:w?7_حln4o܊%H7ooB_[A #P!$}kFH䭪$ !S[ǚ*9:/aq ܾxIArI kJS9_uxau.F7f>Sv3snr3 tjhScv%ɥ8RAvNmnV ^qFNdL2㢸rI&Ng0s^L,tߌ|pW~;Yfڷ(g(ϘEĕMʳ͟V ӋwV{ζg'BP'VSZ!*(PNV{FSjo"LD^C&rԼ_S;^7`Sa`jQ::olvbCق:3w&b鱧6-<4'{AqG+Q{ǒ6 >ҼN˛/:̎:hQ+OXRnoXX˥1۟xl46o&Q7~ MKn/r!ԉ=J^o=W-^G;/;F#ފ`y9lZܻ,|c@ݎӆ=Y {{O \r|r=d4L9Zězifj4Q{vC> ~ (=KLM" yt]e[\ eԻ>YؓԵԮJriБ kD1GX75/$zLj?0FΧsh2'$9 ްubs.]Yr9s7]S=)ӷΧx|Sl}QC[8$dl-dF8Jφ'A6Zw[Σs절ǟ )-Ϳt}^sIv0q-[}qTOύ!?!Ngu#X-i-]beYcևC޸H zbLmw5)7`&#UWCHmt2pnjtfz^qiahȫ/^d4c1!7M@@6j۶T dxmhX$S&UH䉏aOr-N}'id-Qg. ӺN7Tp qB/问sԍՃETZffe2H4\u7u8(k{b.zqf p]N_lspJs,),&4ìmRVs+y;I1|wT^-,N|.=kqkЍE:j|k斫Z* Nu>:7jOﰔz=|nЩ3AS@\א婟ɰ.瓴tO3L3~Al.,7j@0?5A UNd-tfx5ʹV鬵%ԩ[;R ~V%B#e.ZT EƂ?Dss8ͧ< _Ρ;<{"~Q%n  9Pcg?2%_ T _>{H_̢VٍNf[/3xΓޢWW#g6p7M_ٌo~I6>T)(}950`QbAXJ00i'lbЈ\YA~ʔNUھ~Ў#S (bCec%l"R۪Mz6_,_kti Z5VGE0UF tfA@ؼ0=}7OZ+r3"DZ& 8âEѻh ɫhȆW Ӯs<6"J8/0[)&Ԟ}V ֳyƷ8 JůW!eHAȳdQP1%(^Fܓ(tRq'#8-FuiULWY|V+{z}8r8۩]_puBIʹOgۉYL5i!{gBN{:fJIO>ul2jf,=Cň&ۥvA% /_ :^!cYrk:qF/ޱYu}<%CV-&5h';ŞW<d-<@EJ\)P4F׭I{tc zZ( 4 s OIn>X˥D!6JD'ci@ņT?JRHֆvU/6"(R߂2Tvt~Gdh>Bqҁ%FrjYwZhhƊÄ2nQL{"iFՈ>4#UU?s .$K 9'l9Ӎpl"@хv6w=>YƸD^ގō ~6GxҞrlaWf%b_.MsVoBq 9&11Tz.'-J^9-F}eڂmFm^6p  MLBPM}sLd30uD9fb~viQmE\}u\h^sCt1ߛ W{|S偤' 9Z^= v q: W3P-XA ۔Z'^ս6M!ק>K'Msċ"r9T4#88qbQy#u:FWEF95&tOG>-GW߾[ȖbE'y $9IJxu܊kBX U9{ c43 &CMWA̗U9'IG$}]^C;ާ>Z9`CUffņ^*aڙJSAL;IlX%NMulr|rt" (+b}'ԣnJbT]Xnz}Ou;=?>N kL*^2V@΂.JAh̟]}fƶ1=:c@){Oe .Bp<ؽ3N&gaWK=560E(NR6 whDKlk~~͸Lp5j3Zf`\4ZlH⋶ԑ{:}81fb|fa,&?v0.3e&UvCMxF$P׾ Հ&idW;$: v6IB[ 9YS]X!,n֨}\ex^FRQN ]YR5̩GR>mEa.^Ӛ⺔;Qw*a}2oMZIL<&GFqiS㎴چ7`BN˃t`CgmQ].;.Mu;Z`+LFtÀ]/W_VU$Fc'\՟77"h3>8o~o)a>gwFBUoҾ3tGX>^犷a/&~bo!7[ h>5~j. U"YB~털T6j']O)#<| ualDNs ̓Hc`F(xB4T&w<*j J}vY-!vy4ZwcR-̚>Ly95v t\d@4*<2KrKIZ(!MFYm$KCмI)m6ߔ'+EOh&YRT4ѧAi!d?lbǙzyӚP>dho;'SaWQtH Z+f~;kS9nl~wX+P~4nGښl7q ,114BoG+샌)M+5 mд8!wM YĦu2$`A'Ε׀dtNN~uYh`̥lxbxF:2vS\@̽Q7ńH%H+|<=m\hPE]$1c>=t'~4 6`˚z>Rd;PG0y;ʵ|˱Xq& B5jɊj^̾L)A1z?9vxQW:MxzC3wQ0E]ӭe<4[.!'sI3dT2 7~*kSz*֯ꦠW[ϿN=>UxJXFIJOm0ǙU2$";0;8hI.3GX#clkyaڍ޴{0+7rYWODʼFE5Ym] u+~@a(6A8S[+i,=8,pHJV\MAQ֠8'+ M] JV:-bh}Lɪ!;nNthpKz&Yf2 c,%VXJcWwa՜{א+กRtc+51'ˎ<%jln:^S Bs hŮ>]8k[`tYed5O8[`&Qm*4:+I3e3|hM~(’qfѓ4D/cqwJK InȰ?QG[ʅo΢T;["_f̝X*4}J#/ujpS~Ab;{;K[_}R4\q>ȱ0FVۍ`Ng -͌-:ˏ^\\t"KRq fGjN?Lp_p޶e)LR>L+x}C<`NZB[ӛZ4"IvnOAoReqE:zsѬbrȀ*$5mҢދjP.Z 4m0 4bT\~7FۓI`Xnt_l aMI[`%#`ɩ4"A2I5 ?emgoxȌZI1{#C\H8mJ|4"`SڬCAHac'X?ye7fXkzgag3H55xuኅ頗3bh/XlG,߃YX4Ml endstream endobj 591 0 obj << /Length1 1672 /Length2 10224 /Length3 0 /Length 11297 /Filter /FlateDecode >> stream xڍP.LqwohZ( Xpݽww.-^(~r̽{w=/4*@QX jdef++XXؙYXPih4 VQi4 ?,/2 CCE @ `ccaj0trP*8bf_:cz+//7ӟQk=hh~hlhPCnNіrqqa6v`ڛ 3\ 5le5֘QiPSGC{0E`188٘/ e[_ 0>+3G ͟ΆPk[C7b(K)0;:2 mL04r:B ^ , % 0|!?z%mLġ`G?꓀؃_ p-m.6B?0q9e%yGfvppv9n?_zL_{AL/?`㟊F#lAO1/2{+ X,|01HVZ]LM<\l ' qT !_YS(r__%;߱/C,,/_tD+rSOGoh rⅹN/[}5`je _A@^f)+Dhlicݬ 6` deaˎ[\"/S~YN+ic 5c8n/~A֗4exi ` Gc\?7 $@A/:#^n?27yAP%a}3$2|% AHp ?K/ _ _?e8^|`+uqj`QR pw" YkGy_O\=7lAsf/vRR3%phsWAmOC׀ČQmJd|w6PZ]cM)mft0$bvt؀6r*vٷ"?ȤoSF8J^A6S^a_ %tsLiv%Z>9 SgL\\dhb?OF*0a̵h+w((Z p4Y#+_iY8 tkdB;m&}_JW?Me|j\T a|)d]H(*xd3.vU9#Ge'#H)TOa}k.eJ/k^xv~F <*߻Sä\ / 6l~l܍uTbW}M`d~p{Rs'X4tK;=[Gq_;4UMsqjT-gn||VuS^>|S#JD,SSQr6)2`W~1I"KJ{iڭ9D:`^Y^Cא`utN(c@&v_YLhRQG8-^8-Ӣe ֦.Un (4tA䣚~{@.I|KxPɏJm#WQ(lg+Bͫy\ JaѨVpϭr;3w՞3ܥ]DS?ʼn.ˑ)7C8tܸ2EP5ճx>GFiN~r G*OD\fc:^ JE(=Z;4>7>AEɷ7u5)7((@ ߞQ LBSg52LfR2NT򎃋Ol3$bg" Ue[F8$S(:gL8KA:.٠ԽXe}hk S=7(|M6ojČSNJ&WI"f?f׉4Х"^t;]H;S;aɑz?~]'g)棣>Ş$mm!U֜HUI)trARvygiV-!, hru>0nx.W~jtƌ]AقMg]Ѕ Hƛ@Wx-[s;86* W;nQuw͖eaՈDÕzx)iHCg҄wadw5 Zy6d:%hNn}J!q0ȥ 楷H}fhjzGq*| }o-_S:SOUwMMit"4kX`CvEar ^9]V^k;EX4mizC\/Au.Fyıբo"u}vT6tZ[3/4:WSj7Ηy^$$<*| AP@"$ם vؒy}΋"FO? )KSFཱིaa:rNvna;N:H?0R gffUc4dflOI痒LL 2VJoNstwb]>_}Ț~RFePś' ٣ww`4LUެqaZh6?9^2ڄGb\ٟe[ ncOYc! =qtDQ|P ÝCd$R~)Ғٜ$@Lw՝~i"(c :&vGVs>5Q}4ԙP|&ͪH\s^S\ϐk ,s_LW~33ڌ@svV73$ b s=¨8RꬕW1V:N`j8g()pPKY҆IU"[H-ꠏRtL˝q&U2>>;>S'#NՄ O}?֓bCψL'].ᆳ|TB-Rdb?Bn!b颬)y0UsJI!@ lwկ7&cϳ!&o_$(.8m4F*L;y} ǠK1 V@%=UI+"(K^]zfzǪ^a` ~E,QL&h7 s%_u?(;s%\'v&IA 3EyyBULbb\& ݳaJeL8U?iIř= Q`떤Bo~U"ݠ4E3c=dԧ^ ee4I1)7.wذn%ʆ8͈$6 xŰX Lh7\s;Dφx\s- .=K?ΝHRJmʈӆk |+אms`/d$9w)u.i?8(/SG#qmkN%*۰MfkG/FYUQXM˜̣69 MYyKgUʤHI\kmnFgnGzүl813d_?bh&L]25g0cZŧ ct0S! }%n *gF,_:tWvGz«#v3<ׂ0D,MOU>Nǖ_yB s'thVA"IvrC+ ⷩGVLZV<šBiu7S_aqJ"NڂK&}chdwT\mV_-,W=Iy">ŧyWI R9~+#wLj"to4 8f5Ԕ.XTr߮h7ķTX1Ŷ 9u€|!@Ҷo's+*v,_n`mh)Wyr}Ղn {uJ%_ƥN`(Y;H@%NlμeYL&"))ַ/Q'[aoL>: s/Ib$F75ȟBөfK89@]K{pm /it曡7 @3:UT i[//՜Qk+=㧜 e)ʥMW#اJP?gmB.MkW} o쎙 "$Ɠ6M!%c*Ƃ?G?[29#|"o鸒^>3Iy@z+ɻLm䬙gʵ1|;z*xKvxB BkD('4#aPh!|I+Ťxa{#hfWc\D5"<.E]ϻ%}ZZR>֗,8os1W-6 ?udfa3Ja'NLpǡ̕TOe(7*WN Chwȍw^Ó0}L-GtHt s$2Mգ q|K>VϴaWxGkUR{_H򭓭G&԰ݿX $¬!4~mz;ך~ݺQ)!5Mk aRX<| ,j(ޢOp-]#7-2{CP7v[-%t;? JB}ݙ WJ|C2n˥h$2'G5U "gO7V6u|VHyհ3v*I*\/T%DAP Hn*zD5E3gI [QKg%(N|TmoZ~ 1kYKNARyYF f$b sno/-Pw3=SjNnI—>xw$;ck $n䣇Q^x `mGCmu+9ՔCX Ad1-u9v12OoY;t[7|I 錥Ti:³>Iν;/lϒeNqqJ3^3L[(*xaپ0Z1 EnY5C[2Ӈ>I/"݋T|Mh{V"߇1 qϮ0%eyD>%I߹6y]pa!ZB_kʺ=̝Bk%Wd6q`CUqcɈ Xcj\K5k$L% evִ9/R:WU Kf^DIהZ{DCK#[xWSk..R%=G5hoc\k4g-r)nH{gpq?ߠ'!JZvUAUf2]yNX>^黼M8L$h=zZME$9šs5+M5c^%\u?Zojw{S1J,J;ɰEx^؍DAfؼ&M3䣌-cћ(lO!WardE]-/EY~ oHd'%Rtj_0I|{:wc{ BՏ, 9fRݔ}x\&tVV0ǽ2aWD.Z Dk4Z$֍Rb05Fٲb^KfI׊tШ|=Jެ|8gDszD9Y:(&\ ',L|lq#ճPB)|:@1To U|3gc<|sT'! P~0^MRUF:h/}զnZ0*C6 IެzO0K~LuAXX<_۪`mxUWdg˯u3 8N3g["a6M' #%<8%_YMHb ́7=?ig.0%U< g"]Hְǡ8Z|û0 =x\Y/do/ƀͧΒUpDÔ]IVlrR~8g҆Va JlG3s9 U lbvYt Y<#(1D%힅?Oaֱf?zc%NcT? LdG /GO u 0aHbD¬H+MFFTw\|jrՖa0I.4s= 9M}?2O cg)thbzՇf"9~afpD1ҽnA/$`V^>1rz6P{B`K‰\#$S(caA>C潪e2p@ԨiC9=Ib"@'tR8yEn= s?沌~{ԣ*Or/:ɲbO]5:q?e8VczL:lM'wxՒL1%_O7'`&]Wzs5ћL\4mXW!,5DiDUQT*[dti3>h|4gr5+EA![QMWRFj~4KƒX3~dxLwk_FxZ0\JEhRlLRZhw?7k4[:__ӊ*7▒gZ&b5ʢˋd9 1ۊ—t DRJڮq'I3eބ.TJCc e: ?I>omͯE5hߺ "(8ޠ|+ )Vz9:])}z%s|-JN!Z;{P fYQ2F #/'e͇*5\aڦycوu|ȴM/,^Ma RS/~N۰bNcGA@^][C%5 Qk(J[l4t;zn2$uG"j~GxsD"zxk[Dq}EC=֤hi#~8s&~-1<*%HRF PiZin;R;Rq閿W3Jƹ[wj>0v=tNH/$K'Ð#-+oUZ Ny:"!! #><|eV?! endstream endobj 593 0 obj << /Length1 1933 /Length2 10424 /Length3 0 /Length 11628 /Filter /FlateDecode >> stream xڍuXk*! %= ]!) CK7!)](( Jqwy׽z=ze撂-pȨj afֱE8@hp.pld\`#p@  .Y- (aPWf 鿏VK6_)G%P#lȌ`6l'Qnn w`x"lZPW;6@ 9 3@o6 v%trA.d~ @ XoN?޿r[Z0/[5 PW"<0  G` _ŃR0tjbp:w`#p] 9y/la߭@ܜuanP%X!!?5@ȃ:6ܿx9AR}8VVVP+ @A}}_ D,ֶ0?ё0o[O1IB'S$ p@WY^Q㟦QKK=>\ /@G ("@`/W% wAf?{Xjp$?|7 ,_-VBlV FIm. ߰'a&úՀj\< !wy" \*C~ !I+ A.'@t ;X]p~0/[7$"x.`K{(´B>"n$aw@v_7'!)%g7$; Yj˟H mlEb*,)#;u#"_ iOY5 y)gMۋ(9 rX8 fU]4?Fܾ$O~PQ7u`Kö/Q-T 8zdؠ9&;refwCZ.6^SaL& Vpztʷ]e;_i_qf X"1ni{&OZvwTlUy p$xӹIUR}dO$2A٦ȣy2DO@DfѰUgk\ŷŨ} W+Dej:d1 5d6r 8U0Ȁt4I|>Ἴd$Œp8R(_v;L^O|5kO}MIjw`.BWD"D}8#ChAG2=p,d`#ST:$G[Js"MqHzFgi\W)S,?^pO)  ~ݑ=][EQ抷#7>Vk~j'DDgU>hUM7RDXt~jʨ`dgf 9Ɨ`~ ocS2|9nٳͯ 5+2֯RҮŝA_/ݰ9pdi:)_l)ۣ@{H}6TaPKu^en~af폰e;yH#: j(|C:͞dm\1nvVaqZ[K+md]YVQD+c8w.Ukq4p0Ym](&D0 (@ +>iY V!IW~߱,`}( n%J`3.qx igO٫G46QʥJ+|!wӢ1w=}lk(OՉƢ`|GRyu͇_IPLi=?b &GvZ5١$Uda:!^d`;m[U?yn,Ӣ=4ViQŕ]İ/OգUꜟ Út[V:{͡g٩xSzLH",kF⧯8^{^ʝ wTF|&S.λ L5thn4xzg̨P=bjz{&(+^iBDsF.~z9KZGMlEr^o8pݓ[m$%2 . KZC͊eɖY{ .E} $4fI,#1,gA=YX%|^{kwpێ|zkƠy_쮟5ӽ1($Y^&6hïYt.?xVqS5[́ G{fCmh=634r`6/LY, >iO>_'VTǟs)(H([Iln11O7U#،*75M2WOG/5 dٿ]|42..gV.(;<QS`eߝe1qK.^gm08Ghm"ʆ]sg&-2FR:ͲPQ\^}^ܻ)I`4fh,=u`Dz411wW~:=5wBGc"]x>kҔu_D7WEwl<\V'C`YUdShC6s45}$ȢwQ. #F+Ir4)/ӊ#F;ė-K.,5ߩ gY& s$_BS )]RgX@=M_ Ϣ*_6)~ICGI^/&;kfe0{Qy 5h&iIɠR=V׏'zHGܘwx)m& '$jS{ mߓ ys<1oyM8N^!p }h"m;T*_gM䞧1$qZ!eCaiJ*TZdE°xfOW=C($@>'o4N+h҆nNQg{;@[ROJy{V|{hlZK*ޟs=@EE"j E7K8/HXMU$ _ JF6Do^q&qqOcdh)?i7۹&R!I((||Pڇmat剴+6ƅCd_yLZ^G˹Lx!'ki+l8n s\7TZfwBdSg~eA1x&(=ƹbf8 K|zg:w9wdJwdB?gB8:-}s&՜-L's8XWJŪmS }n+~Zoh…ZQc\˟zO<"Z1a78q> "ct~l55DX}>`ߡ-AFBNIJA=}]]6 \C/AuRă~o.2\FݳwN8hT/Q_p ="\sj6~T']ڭ;7yz3CGHGeQhlt BFi8 d?d~}ՠU>4*o`Uɛ:bU6Le2۹}FEfgbTݕ!k:xXc<q uG+gi)T:?.d}<<O~39IY69Qr<^?Pe̤Vvz[kiz\V7x!Vb@Y+lȽpPȈl4 yr{iOb34*W"{Q}Ѷ`So 5[;_8 og jھH)W䉩*3pI8(?vWlxWS()M` ]G*LrXC uT(6A?,oP&!$ro;kK~ z<:k{gUK@ZK2 }s~5ftXns nT>SlI󛤥DjEp۩x*4đΗ+v=۰X=/~"l_vv#d5#oa>3T/POqF %ʚ! 0@.,".Hmt?K@-Ơ3jh5W.#,k!nQ'He_{w`6f#JV}?ӑuB/QYڹ@<wH>ZݠJ,wDP =/~!(zNjiLE?$=C VSuz8栖f%֗?NrfdZw*(3g~ßmj*9M %kyWڸgb s_J7vwXEgr OGCdY֬}TS%|mBjMB/mu^<\SgW)6|=2e#TR R@5j4V[IF,~ņSi!n{Ư@M;1&b\ӄDc(]4o- 2{_gw8WvϜJGEyVIpG 515Q|B7"ܖg_a\l-`BO*7\;Q[]QfSk6Lj= Nqz6t #1{K3zDEXVU1K'7݃QB1)1.N܎}z}2>w%b@k|P/IV1W{ 5;GD'Nm@]\abe&qF:L1];ޯ`M:*X28*yF{[`^&EgR紗?  ζufPz阃pDq 3h`, CF_-*k7au0Iw8,N 鑯[Ӣe%Kĥ|$xWٶdxHỤ8'ꎮoSfml]Hqe2OEZQsŊG%}μ wYNCh`U5 }' K攗T!׷ u!JL45/Fz)8| >q]GQ$'73>Cݭ |>N0=eWJ)ޚ0l9?%Q@/Jg||9lE<_T:Hb"}+λ2'˯j^ڀsꆈ1ﲢs3h+e2)W g2ExaoY>tQGh`$uy]$S%P}~i: 7 W/ 餛t4vD;0)[#5qiey@ nqחyďۑMAAY I mqj91g#t)y_1Ks&t_hp_vpwG{43Ep `EE(X8wC5,%#nߡ9Vh=64h C5'bo-w,!bv ~6`*cx]Un0Grƃ/ȟm_Vq!oxB-R^|7uJ(Y1tF~t1A[v nW?C4YEi)>/;=n9F;wsְc@cIהw=?TP5jj'~ ES;p.zRX!F(}]gT6{":%wז<"JLT5΂C67G"PysGu }ɐĘeWaSԹqYnF:X[?gŸio ~q}!jUgtJ>t+>5Ҭ$?+[j CRix)Lih(%[6uH9zO1OT.&8ƚ&ezJ諄 Y^oe |z.ja6-cc;̽cWo"rkJwk:uMݨk,>0P蘥,g|*NolLV4\T^XJg '\.X }lٟM +1sdo`v$^˜ 3T9g5_?<:s2 1SLUHJ U+YI19Ss$%NdOȪ4Vcѿq ?o*G4;\a]KR~\Gϴ#k~?7gU] /Ҍ]h=T5 44,)hJ8XpnI3ʹH4YOV0MQԟ'x޳Y2E9#cٱzaav)W\Tª=fqȡ= ./Fz/ tr8՝(7W~#zqiDFC M)ɍW=@8}AmS9yM'[/2'r:=5kRE@!}:El$j{0z3p)r/")=QMFؾϑ5a1u$ `tR.+'NŏPY5#s}͗MK+kG@9Wpd%@-mF2w^CScdԷR>#A-F >~ǑFA:dۄBl9+\ fcHQJ86o&*aI !0zGhdqxh_ lAl 8A45%_(:3FwD;۹7:R@kp37<ݸZ/+'h#93[S Mc}|9j਽kFKCRs5YE /߭Jޒ_eDO^ ۙ(ߒ'$q%9n/c|Mq=<2A-Eyr~߸hܲ!._ jn%#ⶹBQr; B-d >7N1AXSywF XzmFZA2>9㘧Ex8XlYf3OQb+jYNYӱ2I3% Z`tcRN<]$jWt'h:OnMK38Q826ĚNco"e8wD!SL+M?,3~w=}۱l-O=vI"< ž!̀/ %̝ØE>]5KC6.J<;PJPյqvUGU>{-EmH˪Dē:DMvU^OU0r_}!B&n̘jFit䂧i}sx '>u+«G3Ԧ\ELu6=۶K_41ω}κb͇#Ԓ"|nUna`$|P~SG VN=Ю7G*Q4 ޱ+U<Յ2xҍtK[5I=8̴{s{vم7?wc{'+G,u!3ڨ#d)! endstream endobj 595 0 obj << /Length1 1412 /Length2 6224 /Length3 0 /Length 7180 /Filter /FlateDecode >> stream xڍw4k׶>zg2Z(Q2E'Jkh(AtBNw'5k<{kgac[#`HO$(h HgcӇa `(8!W}SH bRR  IDJw5"0>nk# %%y`'+ A#!p_%8ehg)~j+ xv. suY?W4!N?vpqWG8@]!aUw@OEr!~x? ]OBpįdtr [ўh^ 8Wwbu5:P 쇂(~?\ZCQ?S W%rH_ ams k7gLEʅDA H0O^ΰ_A|΀0? qhW7m p(Trl~W L@W@???]1pU xwP^ |B @RR]F3 li鯉P>D^psS(z%fV_y)9:s?q⊸n+h o!r5`p7!Wb#l>H8J ֆv6nA] p@]WvTB@?e&$*@\]!^+. >Wzy1 @R+;?]-I@@oKB ~Yz_$/a0O| RZ[33g!fc~#ޠu UwXQ/-iisFtl4R._n׷f[5ƶO<9rN= GExJrC (8&aJEǀ60XПMIbI]MRU}njǞGSZ0MkgFG{s2dުLYO?RgV0Ew[1r? nYd&_~=:͆AU6zM4.R%1c6mҭi1B+R'B1_lVðDK(Ή}pNgs}zؽI|o/6V}ZZQKR􂁵 $(4x#G`q`*.5At`i @DNlnuп}FHGH(^)9{{Y^vZuX&<~ףhH+Oʮw-xنRLM`y MN1 'g&}v=7:-0tOWok_|ƳYJfh8yf  6­YqܰIPRgT"š -ąnwM?vkhZlu5'%+{%8r7k}ʡo=c;͖Be}7?c>^iXu o6a Ah}KH،s(Ĩ ~UaZnawuc%j0/@ t#Ӥ2}*qQuNK!2i K?!$-M:YHl"~,F>ۆzBN1wXաSv,tBiXmƱMI˭nK$)ô[$z!+Mȑ?rM |s=#?\}v/˝)xb}U%a̞*Ϧ _t}-ҝ2r.4|Jy]F0zlm.6D57&\VJ90NLϛI^/: j}]Qp_Y;0J2G;baFʭc=yA~RkŤLd;ó?lNx *5]Jv>C>nWY\@H,p,!LJ{AQAu`{x(Nkʶ6> _&jR!x&Kى5Ebtӯg0T֡$;?xUNxIAJH6V[6P^Xy28+**Ir"01[:Gͅ!!Pvh)\PRP/荸gM(g <3~ժC^]XǷ)͇ )FHaE:Y_3lJɲn,~N[! <9L0,b( `jڋޏu)^ǁ̇%YjPZUJ_!pG ,cs̛׏o*gT1U:p_Z xv4$υ :SJdj)-c!X_sHb4&ىPO&DĂ E&xmpW1_LH"Xki_b<ʥF>%S?mB$X|KӫӾH:XK8'{'$>'w튕7 {d*$Ak@GuuM\Vm*8Q]*&Nܺ۳3֦ ZbwVb"P_|7._fm)o{,jftpu/5WY-{Fb͌u ~=/[K=-`& 9If FVcK:^m$xT*H4 hZc1pJ 8*J=+BHZ_gvu{O -²w7ImܐfTl&iuDRWz~rS|j&59l#y:GG-)En|HEQW52N5JCbfq%&K3Xtӈ󼆷xbu2 k}anz:ckZj#"g5N5\> V#.D,S%^/I^/I?ֹjfiAfnM٭iVP6&>emqʺIߴ^W\ 0*4;`*/ѡC~hW'.תIHh$. %BEsEy*xwy3jBڷ{ ]z&1Smc6U—:l,KsۄL/YGhv#[*L307Mtm¾;y>,q>+F63#U%7Q$΍Ӊ/4e8IoƝ٫2Gˌ~bvq- cxU!ǤJ ,Ҍ4KTa^6b^5֒%vI 0eZ뙽U0x1Si97Xnu ùy1"IqO> )ONJ{=Dܶ/YeQRJ!’ɐ'rM9۝Lt'S'XA7k3J8,PgǢ d=iB4$$oZ/**>p dXCPԏ|V@(62$7MJ"n`.Df #&Wkrveք gdɷ_ig9,U#ҷ*C%HZJmiARYS¥bc<H>W WX9.VS#Q-unU{E_;x`U=F,aFmT+KW-[I:gz٪pFj籣 nKFpz $nǗ}:,^ӼsF:k(|.S,UUҬT&97awR9}5sEe 6O8,xp m?}\Lg3<φ8_q*ou" -Н$| ]C_HElݩN"\:,Gt.k6Eͩm4yHm)$@=ts< ʚڢcrBDv,g{Y[^ϕʮ3Fo)A,1@UĨ^ { ~|,1=8YRxgt/?Ru$-Ў|^f}M(Ηf6 Lkҋx \bePs$寮i ):b?st{h #.rZkUh /%i#r4vlØrTvUxpR;,32H:f֒F[Rk|;O k_8#0IUatnnSmC qG ~DE1NU"LT`er)e/6 [7")wٙn]yV_KOynޥY83nv{ZC9V+t1QZItgFz|v ۆ{adkxp:QF t֤u_p'}w.0#V>n,|s735!M5}-<~V(:~Y]R-X>97w(U7lu8þ ȸ.:~y`ÚҀEe8;.Ow0b^u甊9vZ]h^(kef)͠{^$+ayܦk噓Ԓ󫘯B(weC8{~v|XLY-\YXh<&T &__:Yg Z(4+ wӇ-GTW9v,͓6lsdI+H:en2HUBd*oyJ $dN^)MK)lcד78D,.ND`X:בQ8U+#.^|pNDc2%7uV;CTHc>d0| [Hzm9\o8JT Zm<+IiGT r± +A] +Xҡbq9۰}wMSu:%iǪB]!]#r`s?d'EӢ5` endstream endobj 597 0 obj << /Length1 1385 /Length2 5961 /Length3 0 /Length 6893 /Filter /FlateDecode >> stream xڍTuXIHO$6fñ9BZA;iDDAIIAJnз~dz纮>χ!=Bbx| 0 @66#8'`3(`0ʧ\AQHE$ P BK p{6PQ>h_@N(,..;(Cá$Pq\uB@C*qqCex^pC{쁿@\`/`90^4 x@0U^ku]a?Zxܟٿ !P(#p Ň!H_@u+!@y} j?7tw>w8ז\2^CbSaЫ"Q^H?ik{W~c$'A Hs¼N~W{c]Q@U`p1h??`0`p$Wn po芄` 7+٣/_GP[S+(X^Q0W@Łb +ANHPNꯡ=dB0 ? WS/ Ϥ@Fp_ '+=hTo)+S@T!tD}pw7^:A?ƿ$#az(w  WJgЇW+fdH(E4+K_ @~>$ sZB~ݪ P4Jf/_oM`0(`r u m<]"N=~ ;Ph}өjCp iE#4R;gQ(h]g0~6B '@&kn;~ў7\/CyM3U֨2oLn ]_:e8R5LG`9c%1?¿ѵ '8S9u}6YwWqׄi쁵RT?mO?c5~;3Y ,Ÿ/+t<-yT(ÇibK xWŚe-lx٢ mequpu}  t};dZ[0Nm۵RзqNO^2PJmJԧޯק{3];l{-<$aO@g6jz.YA Pbە!]hWW.D`Se "&B-Y7 ˬ:|,?~[bqA'ld yi;BnV<1bmp XDg_4X\?)xTM@!&>S U2TKx.'n*.jDHUT1=yzm][־_c=.4$y 'RUHjNܙJY&-P@!i4AA'=paVhzJ˔xWTtF6wrl|#qLhnRٜ躰Iyq -Y[\ ! u%dz˽] "b̧^MӸ K!Htqgw3%K3L@zSYPMB^DsGpLYC:4#DeJE%KOTq 7:wx^2]|$މLrqzn YqB έ7Rig.\cP]Hb(נW45{>Ln!;N yl9'˺5řL &1XgWqc3,xgO !3[NE bDOiaƤ)~r~+v{zkLNfJ%vBSu%u0chd \`vPr}')~֑e6L袂 WF#eu|`'"xtzPp"3lTd^v }tb{b_cۖ/gэ!~hT@zo>TWQ8uC2%TlEI^R<6咎–?.] L,ܓ,~deGS2Ikg Na׈u"oHỌ83ZX9Ckn'~ `,g0#&Y9st{ŪDQBvLekxn>V3]8樃iZ}s 9mK3hSSӍ'n܏RoP<>4۽gE=kQb+cCv >IOJZje Сsqo Y4=H_.`[FkE|י8\\1@tay`۹{!hR-8oZa|ǣ:Qw܊:XL+&yG͵R3l ^[+7ޝKMV,# *ួqv: س-o&:;߯s UW6ͦ\hfH/]D&-zmώ(r$qiKC$.tX+_M^ɅvE}⭜V3kvb̺.6d$9f;wFo}EGfJ,?$xVZH./Մ."N$6h$܅68cM* ҁ.gmULZ -r8%M)=zk`cϾ=PLe]nMtVY-Uɭc6\V/V֝.fg9:y6F<_@CN)&CCiumr$~ lSA~ "b6[2N4#kMO~'eE:ӵlınu];bl( ^IJ\KgH^*;u菉Xxws/YG [a'3=Ƨ'_^y&u gH;1} "EaC/79m[23ueaxBZϨ/ܧnԕX[ :B-3 3h΍o)SOiE_^|(-\n3b_'>wMtĩpl\kLeS*z>!!"K5Mޢ2xDc̰y慯&'yOTW,/0uic^/W 4FA3+ft$o=Qj OҰ&\S;]Z w`ePYcWU {46CVUå.?T lLͨsrx;< I>z]Bw;41)l_sx($XmD aM1.g|asn'D|Ne›_S»0>Q{W&Oū԰x?GmI4w.8xP>?%. ?R&"(|SEFQkFejōGppk&2[Ѱq "Tjح 5rdҠ|> stream xڍwX>!C 0J;G+1ƀQ҈ Ғ]M8q<{:|E'o h>A~$PQ VA a~HnG;v؍a(w8!E ؔ h Rjx8%$A $*A<@m~s+"]}Pp{4_@N(PPBBw8PC!6sTB`$C+*) qqGex^pCyl@\``:vh/ Pば@PNgDp`tq |{UG{y/ xB w>ݡ(+ڝkH_i0UDhw(>{iGUw+ ˜ah(D܀0o>N_fHWf?Ce ecmECf۫IDA@APLTDzG: _Fճ4S$\Ab rCG Qs?Sw鿲dT<8D? OhA jC0[{$Z aazp4a78g8tz`B@adu113č\a%3Tc$+0IڰHl$~e-c^( U444fhQ3Ho-kl: Epd/>Y~Ϊ)p H*!1E{7 M,$rxEvf:*ŃM۶wc/ _sąΒ|5S5Kmu~ƌ=t` M͉4D zTs8a.GÄO!tHxd)B3gNOkJijH'&lF 嫡 /ҙ-X-?@@ 0$ ~LJˀ_XN)\JB훗,ݥy%Zb`6 _K T@%׳YFFf^9a?Es4RrJ]|0,~gyDpL XmgvW5jQ:&^QPO鄲wmN~ԧ),xϤˬ>JۨGZMTxطWEŢ7kh"Ljp_=xxI Ȫ]&e.~@ieI^8MƔ&LK>a+SIiheGO蛐jAvMOM1Q7aͬr8#o 58)b²83[] b$ʶ y9u}iy]3Pa)$JeXطqwdP'[M2/+KB)L^P",euPZO^煩OwayzIvb`oq_uߨOZ$($eJyj8%3pQXc6~v ټEh6 &ZsE)5_LG}*4>/Z 7Zdpuze1Mُw'oUn>).ZEв,%m=I@Hϊ7 Yd(O(w QOMO[Ac]7=|}<(dDSP7WUJ1@h7]$zT#wiT/Mpj޶oy#wTDiT$?L 󢂚y]a=2;ѧJԍU9Օ+L[@by g1V@#Ƀ2S%Jo,YgڭRrjvLE(aKL]7=[Fl.D4qÉ!P2QvMVg ~2yl=W=CH¸KkT`Z*akguDibA̋F-_83XXNHo6߭Y|Wdi.⑒RDcQ*PkIDU6 z5Sij.zjji_s~{qg~*qaA\>msy㵠 0ᚄķecl8ʃW(U2,8>XK'1~8sȸCRE꣠Wc @O"1Ss1jc5a R O+捖I +.m21)J}u{]4+fKnp}6(aNE,w2FSNvׂ/srX9Uf_hn0]|;qQ=]9}{]ijA5ys-́k0q93ȝ穂,A/8<³VdĴ2`5~-ާJ?X>dP$D q+M--LhY2)H- :W[9b Ӓ {\l~:sd~+£O^AuHAF#y=$ fzs2lWQo64.=Un&3GoUh, V.۷]dxmed4iO<ܩAMz+^^ |Ѫ4W7eu1;<2<&݌9|şp 3U{Vⷌ'RxIkxfZ<56=I!*k }84'=UcX"L<"-n Y[#3ɗz3' hAɳn$/k4eΪ6.IgE@ԺTKš~~8 0E-2X?Nyw[hea%3ntpոΏm\PE)kwlxWMEэPE9SBq+'F 'T}ȳdH.kq^Ys vByÌ6%qd>imܵBؽίVRG ,4w(Kd1$Tv|#cpR7',d,r 'gLO4\xžLyZʩIe  nGb&j!.z}ƛU(,h_--$0fDfocfaY)kMQ>JһOAɚ:/&iTGdSUn (6HVi>EkD {$UpYLgӄMȥ^;cc:ptA؍Kw/dݲ4C*Y͓ 󪓱TFz3 V26m*c0O➒@R'OH1} EVv_>n!,bUm͠0!ҾSksKSiRۀ/f dо5EFh@m7;ŰݼB_fIOAZ#|̈fY|$J<ߙa`6HV$els|2|g)mvMVˋ 2(ARIǟ ^*epm.;dB?_X^?㪍 QЦϹfJm ` FДM#On>ۢs?8Rng/'WI/I cv7;?7 /ް8F$Yn=Ͳ)="14\xt}ON~)?Sm&ueyR ̍R !\W4jZ97_IEN[ J~ -i|onQLYgCI|ѳBcŸ7X)9;VthvUfnUohMGUe5#/WmOr2 㟅h $i 'x;!ZK.l(ΰL\wNWi6ξ[!GS<ѐdG|E,[%Q:;GxjK]tх'w}6RY?/Rx~8Ǣ9JAdfv,ٽk@*'k40  * &o6EjLٶ#1hZabjc/ 7T3v5}L̅BR x2`0RPv%$,cםk[BRN Eh|YB@[xBHH{]yl.w2*mz\Kþ&ϭE? =eBUPz9u;D'm:/o-gbZ-8rۨbb?M<_ƖJ?Zg >:D尢hS`GbDMAb&*K˓4TKt*]]dXф5nߧ"R:ZZXDCZܔk}fkWJڼ1_ʎi=S$AJK7 /OoP'np◛z!_ukzÁ7_! Տ,Y,̈́!o(fytwt O_2Q } . -JY 5KfQ&Lwa!qe$.hlb7v٦';IjYàw)?$e3)vNKVw{RӗfS[OB-F&'_2?o472p8*r K:ؖ0G`2%itq` F:qE}N!~oZ,umо낵 {S׾ $H@dr"fK2HNWS SHEUKJ鿀f}urDv:V9 rny.[gD]| endstream endobj 601 0 obj << /Length1 1607 /Length2 7328 /Length3 0 /Length 8399 /Filter /FlateDecode >> stream xڍT}74cm4tw# ]t ~s\>׵0;@*0(G_mh.ca1#!q<A%KCG0%{$JQhxBB Q ~~ ?ߊ0@ h "vvA`rEAp0 жGQ! !}]wG2o0`B^ G:?\0'=@0EL< 8`ARKrc{C}Pgh"}{/E{CP SQ=^F_nP׬ uTHޯpu|yC>9Np3{ԕ ` $@_\DDXz ]~0Q5{<N2@`'a ហ@' p3(8`ɏ'kaP@LO`>Aq?@@@@ ** ={cO=Eo\Pk%(1|TEFQ;~^>T0G 'ߎQ`Tz(/iڿϿ-€a:jiyưV׺s/]sT+9V?ԛ/x?@yt5c՛DSP8)v wQ;OfFo'YbZ^ m\tӅضJ.3hy]ks+Re z,8R#eBLB2~y)CTɹ4V $&b ;ѕӅCB:tk Jn>3G%%څ6Yb$FߏIAZ՞]kl~*dy]|Nu4DlK=Ǩkt5 :Cnޯ/f 󝖷#}'0#:$Μ7{HL8Eusn[ļ V{!".%D⺍{}$9Oql4,\Ju4MI; S'+ ?lʞX;H}xo&_eN ڰ}b06g򠿓-}G ÈSX1)QMqyь~+MyRnR^փ)ǮP~vCI(##i+X{s3k٩5$=q˗nÑh=ZdEp!SdN a4i4bt1h:;n)e95OPa X0᝛rtxqaӌNyڱ [IunM"dsPh}y 7>B,|g[e^<`wȨqۛDs ,w%IhTKVR NJYW-?sSUeb -{3eď0nn1ܙp~ؑ KB`B7M%hx5B-BxYzU3-mKgMyY_q-}ڋ#~wc#WOjuCv;Q7yQ4,."r]3#$c5 schs2/XV؏jpqfFp]=-nh ~{PJ:9Y@jG/c'IEoҔ ^/`hU_Q#1@9 z ^:GL"%Gy֍I=qi\۵웭&3_VcT˪ !N4V?T*FiJػES-7-Mi[F+ݡ|Cm_У){˙ZdNT5ʹN$n\(1]H*O,q*BvJMC,?O73DMJw-68],xkh>9sq;g;~w۷W %dE-qf 'fc `5Ƈd>mXcW\?(i[`_6@alR杫X +¸|8Neᇙ_` ֊2?1Tp'8_Ц||8V޷_e;}85^rW` MllD /Yh"+/i J.4 *+Ÿ.U@(+T_ѭ1Q$ߕ$O+.-~VĪOӶB%W kSȥꖐZH?6Q{󼺼86NSxDWeǔyHL=HC -KnΓ{Hn|$DëkjtjC-ޡC6s^0}PSZ858}z_oEE=ohP!b@Us^ϥNe ^SE# )'m\錴;vPl7kvXD!ʞ9ڟ<5D5zui.jO%ƗkZ0OŐNul]nC ntY"ޚl>QQ^3zUh{nRzbŸj#e|k"871t"hu;VôZzH$g%=9v%/gyu\U|R/ۭΗ$b[/~ε?O42]^z? bw0a6WZDw706)r4Hd}N4j|w{kM*Q~*y ;8ր'˪by{5X^[쮔ЍUBgL'SGƽL>5좜̂H>-dM@5 Xw8H*nYRWT&[qay'Y [#\">)s:bZ0GEq ~C/lf\ʢu':X8َqb֧q" ~!kK /v5G2ϸ ;fV~\TE`⍫i5ӳ^Tq zG 6Bߺ9 ՇzB1W nmu@?>1d1څ)Ү=͓ޛmCO|ಉaSpҍ{۾Wi'(j,c IWQk9[EQ\WE;Y=deY81cETD|_vM}KT\)#@+F ?0A^Q,'SW ]JʳO6Se"$0~\1|1<bYcT3]W+|cN^>3_92VtՍ|b>1{HܭIMYک${r/~ʷ]z"y!*ܡug^S5Mwo|_Xg~)3̘ ELG;p9$黾2'Q2ߓk}>ܶ}B?DWAvrWILQ/o{Dԭo"TG~?x-gJsե ă=}Q4 WoO兡#*1|b :|?g K7j> :qLC*oU8[OjOT[u9R϶~k:6)!$4ܑiwy/ZY$+M kݶp<žRZWA M%YpwnPErߗsD۝Ys_+pjtiA%hEȻz%ǂ$c01Ʉq^Iv{?t[]NQrVgT0ţi:J9Ҍi)HNcwwէ: %) x(u J|z%)G9/`laJQ`_>>% 蝟U|$Z" Y#g`cr*B !o#+ "2@y|H^kR8K4N!W] Pbc^~_=)r5ޘ5ʤckf2VL/K3t4g9"9\Ut &IBݡWO(W{s3j6W=S, ";3#Sq"3";-Et|\)^ѳE xP8"UV5yuNOIΒJ1U,("IR9]0@DY:&s=[ZV\eۑ & 桓zHQ1]?HW#D:|0#lo%/Fi0DS8> stream xڍtTS-Aޑ.BPP4TAJH^c}oF89{^k97!PeWA!1@ X mh&`!2nn#&6("(P Sb= h b1wP/"͇zBP,wP>x04.p5"W)+#mQ..p$ƝWJ4=vПuB~vݯ!l=\A7_ "fDbb" s*o \v!;8 ` "` $Ou, ^> c+/[? Q43c (o" JB!C\B2zP6UGڡӿ:+޿ Z8?"a[So U%nHwwC]> Xz`Famo icX##bBD"p nH~՜Hۂ+ pw?UFP|&$*P20VNB?֐pJ( 61`BVaꎝ_8,@ G 04kJop$ v7ܱ:{<puh~%&ɬ+Z陟S17Lz_FN}|2QZV8>;abOYOgЂcɟ .Z%.oTiܖm83NEL%U" "H^"T)g"1v7+]fGXy$-3g@J3}rF;7ŗI7 ]"y]' ll\`@1<"7{]citPW3nkMhT](WQ )OnYk9Q]9 5ʉ59`mƯ3s;P{>3ɵQ1kQQij }Yx|Gk̏hho:}zz2 ~B̶mEXqGybQ\M(zCvf5wܦNukPJp3xJK+ЗTXY^he@đwXadvlZ V_+Vu J2ςFJܪYg|n,vmu[àNZX7:UxIk1C> XZ0j6d v2J+b|5g .ם/g(C ݉D Mˬƅ'jASY3~"TE9DL&u{m^_,uOA|"ICXzzSf1gß#>YZ$m|pg<"s'͋l`BAdC7[RD)9QAu\"jMz$c ^ xϜy,i"#kv5(@^~y/Y |){%;@KUO`b*0B&%kVUݧPj.btfWd/y2@v gb3jM1sEi@nGTO# ׆HB[O߅Sh)P{y&D*@Dk.cZ4jY^9Ho8./|I8?FMTsZdCW99 g?\Ԁ\VTy+›Gٳy aRGk$nW wrytRbҠvgRՌ5J-.Ɇ<Q>/ 2X]n—gʢZ KOmƞY/#F7pCWϛ>P*`,7tNR?C$+w^gaؠbzS;qjȍ!W!t82cLU-13&8g },~A9ݘDį5'h\( zVٗ_<ןݼo넘S֫W3$;bʷ79R]Hx5Z D8kwvޱ.  DD7li K_n>kY_4>_+]b)f<82=oJCUTGiB9cYozfr/˝g^z0E$UE8qT" eo+>YA~=DSASrANdcQMwA6w>hI =yQhrF5}(?;+O\۬xݘ–5^rlYhxb,m{ʍ-1(SWe*#>7ܭ?$H)a2ZoC"[џ=yk"Fgi3)L;~h+~c:̼F5'.:>9z0G>y?\³4Aq!4ڲUF4lq1Ie;hFVM Ƚ} X7eCe>6PNĹ{nPe#[o0C6/GHE{)`̵!\S0m_q4 y^dvrf75WVuR>ciYYx zaYۦ+]ڔ@,d'm%`)w<ɴQnj|C b֎^"kQ4jd. UUB\cm;+>IM)[N((F]k9GjMp9rk:&Zeu޼}L&M"#QP=XMju}6&).a; օtrv'U-WB+#w߶vS- Tm"o8^2hR2 mp]˵ e?BpjN Ƌq߲\k GgEC&7X4ґr$D}xk ︈|_ĝ|ߘ(dL+BblWNM+J7>x*ÀVaڷ>ObS \GmYgY_=Pg0] )]G1O˚`>&ߌVGo)Jlgz., mj-0<8Jj^QYvZ򁻆MD ̿$&Bܝq բ)ɮ8ݵyGoJ:u(s.Rv8M(fri\UmSlhٳ- ^hoY 40.s Ht-;]Vkġ{-.0yHs#XDJ5ķsc"K kQDoǹ%,c)bD]9aTf&itKI 7RL߼>A[[LXRC57C44N mQ'iw'(D/ou`kо_Rkt*!GL/y#险UhuBѶHmϒ%?ls zKMP'¬Bl3{ˏԽ6}ɭAI ~aOx g$YDyZZգ ƾ Zf)6wH\>0} Zg;co:͚[}w%%'rGefg r@ȡ1t1vR}={NfU#8ڳ \h{opu2Ȱ?SXݫh9$xuDFk&? eٗQ5)Hd<HO-U!?7otgNnNOw= X,6r1%A<ܜEt9*[Ž-H`0VW?u؊M҄adLmR6+AYձ)mW:8%%Ww ?Q>TCh,MA0v3onHQ[hVkPeMJ fm/K[2ܹo+(vj pFZFJG{h +Mh)xz:+{ْ>)ZԈ-Y+oW;umKسtG&5˴)T/w7V/0dnG!&rz4aM"o HB 8&|q"j&x/a@zEH1\;_#!9DUl:p-vB}M@^3}ʛ#Hzˉ$ @ЕML;"HGd?;40hU~N9Qyq2{z>3!m؏S:tHɏ/I5T+NA^yO+/o0B^=,M驓v{ڈ׾̂qO.۵%lxd B CìpjBl*}G3;ƺjRqVR;z%En)Lߠ=mWx5IU)\0ݏ.'T);*[_sB̟k}-yoX/۵2;W{}ʴN`hxjjkpiϵ!Ov#)sL{p[J<E1£ QY f/<, -[Hȉ&ܹGu)rnBL:˷g/o蛼rʁFAue'S4kc.- 핿 62\_./h&ێApQoOҦxP,\MҲc1oɥE#7@k*c?:1FM7>Ѹfc*ce2q>bܲBfJcYd*][jC/ H򗉣N좫4f?19dt~f9z}ŹBfB-z劕lL)xw`w3~+p(AFx.&7L;J9|iгQFKҀWTIzj'e~V6`/Jg Dyξj} a؋SzyPƺ+Yg-B6=Uչc'{Wwg|;)k dv4jfn^3%dcey͝@9i9-##"Sc\;4; =۟] L?w8q(u#Ǣ39f#O @&^\ZȾ`5*{%wZu7`1|b ?&M#?Fv{ӏx1V ):I7yɲJ s{y Xd4p]4+AV'-M59jXlg 0ū`գ$a=.1 4#ܖ {EUB' 1oo9cSЎwڙ:/Y/Uf]:]~9 endstream endobj 605 0 obj << /Length1 1430 /Length2 6282 /Length3 0 /Length 7266 /Filter /FlateDecode >> stream xڍuTS[-(AtBU:ҥT II * AEDt^)"Ezz{7{㌑s\eל;`h[]4 '( Ut̥`( ܾm9M,_*8T!8.rs %d%e`,G4F qG n]0{~|y@aiiI@%$BP@]8P-(ݯ&`n.B&(\S/<`i 1i :Jom;qAMvp Á8 a(h Gdp?k1O%=a 5^04RV26ee'GP  K%%~NAUXM(Z1b8xug-ɭ`(GCo E.讛o3oa ^9I @]D !ܐmABPB,(,ػO8L:Ԝ(>uuX^'T[.tFA;hJe$r%|GF/KfMX#l$ʯCkwد~X6q\OI~`YBl20a FQ pӚBzs!?;Ee=/YaMQNLb2$%Ad[Mt㑌gArYZ)i>#%߂d5Lz@νT|Sb–R׬m_L |D梡Ro!jd~Oп%#3tSy80sQS,Hq9P) džmuQXh qkƗ)!+ ]s>RUy@Ig9b-߸am[<+~BC]2pJn1ɀ|}C8dUx}L`P`(j)㬻oݻĸ}M.ϋZy*iF"WN#lLAaM~"+| sl$&[7ȭ̑? ^x5daskN,د$ TgZu0f\+f]!&ly(+bZ{oUYHM=u4C(bn 3~q#eub.jb-6M#i1 w~ޞ}-mMZm}iu+1UH;;=%>#Z\¥TG 0T^rMr5 o?J1~a0I]r^Jg:ϯѵz=Msa3/D4fYowTM pU9uMEN 3mVkOɕ_Kw('{몊H.3yUd ER.ei;m f*+/,a~?%zOAKߗS X˶ ⫪#^BotC^s5^`?9x@upj3jήޙUzyió@lگRޣXjo< 9f$4̅N)wڡ m!<WͷI<7:Xv$u7]o =` M6n60~1 {9:Hڛ4IQpc"^yRTGUFZMP>3ip']'NJ0 ^8 3KtNk'gAtq7}`@yQڪuٳYgKp0eHV2h"5^vNwًb(Kiv>bڻ,3 /GLE [҇/~T[3D, qgqѤ}=G?lyYguӋԖ^6٪jH7>Ie-pFEB*!pG&Ȑp-ҽAB9*ZɫXCdRה>aB:BA3[1}91N0fc@c=2F7rC7W׵BmLFUC3}HYʼ6,`Kw@sO;LL2ɌKۖC:_$:t~͋/JOpX㺄ObgT=;o e'}*/E.d/NJ/.y{YY>saMjHB.WnOm!~ "kCV؜{:`/G̞1-iW4CfANS)KKSO rH1bjZٻѻڇ6yN@KDgKt^2/v V[̏E%q/+gg/Oo-lDx':/c^$&}k"\țn(}]K,\LXlrZri}9 p?nxvJPuV2xtrZ:۷;-ǺV+uyRGLBv͑ 9qt^j$fXI4UwϔcU@&'N4G8V/Ժ,f" +lЄCu#r&_FJGnǵI}R#,3l64y\l(Π]Syk6쥋\޹Sa0(g7k> 0et5>Yqg^Wz4C֔i]EˁyTk$>aYKu8T>=peIc/{7g* 2˩5&3Q~TxVPܝaa߲`ji^c|[g8QS|t bxq0# \8+:#^8=?šل/HoHYOMx>}lɧR»Wnq֒!dsB7[ 'igv/GID7_ߦ#!~EgLb܄@}%Hm7#tѵp =K)G1Қ]|gE!Ǐ=ۻHe]tIt;tMVgxw ^ΦmŖRO"x)!erŗu&ES1,3f1ldG QTw*.2#rY7Kz%^I2*O!YzW[& 7d@3և1|0DQ*,x'QĘ-үֽ'9%|<؊Qn6GO _{Ww޼$5>֚9k8Aw.z VW7W(ލ:gT^d_ ރ$(9H|3l<=~q~q$cyOM.WIp7ߏ Gz>Ah!cԵpyoK6'U^&~h9~XU@#8W5sn=(tNa$=?fcoB_Y+kUP;LUp2Oߞ\cmɻw t9On7FLՒsrh®(ƽ}iz4 %G_fKUFҋO$(2E;fӝN/T) /o1  Zq z~+%& W drVaۂg޼H Ruj^OϏُcM,bC˘-N}3D"|l !gTn\۹#ab;VợUXhluRKv eq5W,6l+2{B7{u&\+-q G Yu!AMRCde@둽q|p9 ]:?(hx!7l܇u;Hqy}P#iCΫbe< %* aQҐ7*mk7X%F\fߟSu7N>+M|r|yqA)SKH" s Su cٝ`ץUVl!euu6jMX'h ɘ|YMˇ@Z/†.p9Vk,Mk-0[eIyFu]ɳkWe4Е :;( /ɼy6^/t"\\{XB9a*Ǜ.6=6DqGk:b#hnZܰd5ZQޮ!2 Thnp7ZE GyM[ (NPYx }1XjrDr"NT=YلpՓݒ/q(?S8Y]#m,AaS:|3a8ΞODI^Q1v ٤@ endstream endobj 607 0 obj << /Length1 2264 /Length2 19219 /Length3 0 /Length 20560 /Filter /FlateDecode >> stream xڌP[ #w]Ctwww]3gfr{gZ&WQg3 fv6~,;Z9" M&af\=̜fn6;;㨘Xʂ,}Jv {AK 6@ݟAgf3{b?-Oz˧S/eAV?-]z˧/{L8j1wx.ڼIM tf`c/7?+o=q ވZolo-Yw+O+ o26o|#d16п S̛3o y5acV|!?q;] V?Be۷ Qrq-?[C8oo=Tt;ۻ_ۮE z΃>~j#rgB鿌ޑ$pS |̋ZyQdfkŏ tgSY|g1'YmZtbw.@Z j<& SXF{Y*KDԾR&HTCC '3;ݒO邮>ҙ:P dL^\ȪY6 l@@o|8@aJe()Q=gF'7miwY4R"Zo> 5CS I3L#X*W Sf#QNLIՂ!blV&\j4zd"/H(}OJYऌ>Ѳ b&AԜ ?fٯ{G{#jL(Ls-|].[7E hJ&ugn2#d}s1-f~l~ z9~MOɽ}q HU 3ĥu?%Hj30 ưXӋ%@<ŭNCvq"wdw=jkγn_Tts>,_Nc?xL,+94;R3-4^S'')%F}$6;vljڜ5 70cvPP)z=4M*N3f'o]Rf`qAvezyP 0|SRـOՆE6Z|X ŔikCfev%Q*Dd ;qHRjj9s}|/JTZM =(!3l|'" tޚLjmT6 3E'pCX[Z(m$QP@J0\P e`nyz!%ѽ=)>g_^LZg2ͧ|O˃h|?VD=݉2#णo <6zhNj&*f 6`+&{O Efq[.~IWZDb(?uuRN #]>#ٔ x,e!{s.n/cdtO %"Uˏe]*Hg r 6 ,ҾIZ|~"%n ;ZvF9'GBq<ޠ&w.)O%~(TS&>"l%Is""#@|p|:0ّ' OǪV秝eԸ"G$~ڟV P9Hm-9d5L1 d#_ΫY [kN@ͤz28U֒{_: 'vO_͜SY~IWi:`+h/Rn7Fk m^űv/)tŜN_ >~eTUr8\puofuλ!SLP<tyOλ?'&pG @"t;B]S?r0 QF8X)N а,!W6]iS. '~DRY e9YiU"A3:Q{? >C"դSbYs:D= H[S4sWMui`+cwo6 Y({9a6"ăXGH ˺JbE,fS8գ\OJ:Q}RnM?!FȐ5#%f?;FG.c-_IMP1D#?/l%mFg08ysCG$1$#&j9RP s{r@dK*(P~gNCr-iSr Ӟ,hZ=CLv~@ˮ})FC/% ..:Dj d0GA#}f wŎأgZEv^:?V 7=(8}jf'8`n=6MdRq=CzGMџΎ9ֻ0H^Cv٫Z=\NڒT)kwia A8INhٙK$aK|YM M!ǜ@ SR{v_߭eF*>&C~#*\B0c-!] 1{M-F3H?BÛAgcMP$ ~HAwVݙ5rVdo=EFu掗kWL9`gn\ܶ&= NEU~.ӮqV+7Qf #$ЂRfOR+:-ÑQƩ(d_3⤧s8YX`~Ba f uC/7dbR[\nx$AMfwA@_Oz f\|KG鰃ֲ%֢Gıν5e!C>Wof *I7W&}^Q10p ɓ!B)\'_MԜ؋׷D 3P\7_ C]0ϼ:,?=Q̭ݝIXܓo D%. Bo?_$u"|+ƈqv#ON-SA$f=ǯGld A#,Y㌉JdftYo:{c܈hii[SL"P ё Ŭsh 0zTmSi[)Y|[skW6ELK!]~a1ãf١՜eX&Q2Yoե^m3E^;xU3!9c*[^G4ĉ L҆.گڿ]Lcif|(3WyCX} t82fGqRlh5Eէ>T۪Z_Uv5 8Y6:;QIbRfE.,(+pwK GS.B}g-k`*UnV s3`25Ş;caEW+7> (fs{@)ڟ%7X˅/,[Q)ʹ4+TlʶN@9U=CcȜSL[D$a"SlBsf䓜m%^1nrrA(L\tC߰ceûu^uPг|Y),㷳@ɦ|3v v'lM'k-uc!(CHDj{<}Qo sta"`->a^_น% ㇈df8ǎa&{?H AS,nQ߽t^2O$Ac<V쇣л&Ql[^ AN΍v, τ_{ō3AKrW\3 W%F{~ztݘƇXIx\SzO5 ^qC;i}y כ--i=U0ppkb_{\ b&s.PJo;1|Dvuqk|^ao/l /b3I< ,d~ (zh&RRГν4ÞtO[Gxd}! ?D^E7@BojMN%'UH뎜 _תvsȗ sjxG }zNjoZ;Drc,5𤥨2N#T7慦DwUC!LbC݉+PG-\JJ]/=]& [P&t&LUͲح|[&ČPv@wp2ev5qZmФJvDc2=tbaF[/X}QzlUw= m(շѹ:IuZJDC$~zp7K4cBOs6](Z JϖuI.MbJm -пt/ Nwm,@@G)0eVޟڴ,{Dʖ'(p׮o^bRfg$#CxIm[^8D5Rߝ2=!ȶ<| y#AԦg;\/0(g6Hy_XffB}>\a ߔ?y#*mͪOoV-[h 0+]$`[@h'kc`X {;J+ir"xâ&:& -1ЗM(/RW2D\D;8&v:+Pu8j͉ґʱLOoU$y-*Go@7ߗ_9@߼OfE6UeOG qGw8dNxwGSix6^ۇ9H! ٦/Uk "˟2 TB#ő?f]^+IJ,wLC ]M#J4X>]?8NuE6֩XHݦ $Ӊ/CJ'M, iC}<{}&;2=f-y/UE#ϟ]Glˁ Qi7jVSA@IM'JL^&S<>u fV»w p b'CUfPìwIIG|mʚ9q|Sc~m8D&/_%tfoZ w\IX]h:j+R33^;_,J݉L }_F%f3w&qfd+Q[ 86J7Ʀ[$m"/8u{~F5 @ҧ6nmа[ {5TpC`NܳmgJl"eAgs GAs,Ѡ&.Z3T}嘈grm-@YfW)QݙOŨ)g G /٘_1[0bZ^3w(N'ݦR]%<ާлእdKCקAn`IAF_M[W燡TU{dErfn@t2{-$W0Qe,nBX"IQZoBH*Q6.,L n苲\M(o\Jzgavveº^,!7k8;?%(':(Slظ\0i.90FnMt`K|NSQ3miXCW[1>.Hq8sXe<r*9]#!+3tFV+'NB$=9x^w?Nc?u1 %-#W5c:k#muyݽ2${)r2(c Ԙ ůC&A}$+H9{&N}.oT},dfc?D"Tk$WlH@}lheLGo5RY%x*`~c6'$R)Z+9w +m0Hj{W@RW]ӪLX.Rͧ9).#É1`lx<sL*T}~,>%-$gj`h 5}W\@ 固Qstሙ/⮝~и#y8EلV1aS?br#p^\U[uq.YO"e*mu-Rޔ΁sꔄ-6R] }!^& .+&%˔I ̬uI@i+kZ%=[ Ge{yrL0Vkz3/PwrTgBʋruÍ ֶJ!rI[OYy(u[ZG了eV^Bpu,Df)YV^,fc69%][i'נO)YWd9ʥ͏.iΈoBûe%=X/a]Mkr/DSK IĤݴLxۊҗN9J‡ х)_LM:3BT-m 8b q*A1_\)=l1ӐaŒ*|.Wg&z;ƴt- 7Ai? s)& H)= X };r>46j`ΖЧKʣhc?, idR+KwU*w0ZHb&ԡ#Áf.Ȁ$%eENUXc =|VQOVE2hh73r&=ux8 ige*o!v'tK\dwYeq%].d}o"X:J-;;bNzozS,YaӠL"6T^?tK8@S EBRqPUh Yk6PYK22KnT|FX°r/<,?vKqag̅K|8OxwZV6YWIkx.m0# tWZ^RkBJY\j^1r8 թMnfuaR#jwP}}K:$(0ݒ-ra#<^X \ q.Ŷ4kI4lЭ!<*6૞``L7}oMGHÒf(eFI9ǰ8 <)/Slt `8mrX;1YKWb}*w6kޒϗ\p̏ 71 ̥mlvJ[xPUqfY>^c-Mqq~a8c]aNcJm6~ PIך5BTwfK kaTߏ?"v]#[Kǜj7|wTl(Q}pM`~s-ųq` MX]єaLjo5.#0&Rϩob>uZtyזؒ>> &p=h8!8<˂cvP)%vcp-wVKkKKFq3'Ngl}@`l9%:_jQ8pܟkH wć_vBc !sUy.*.,4]|oGBĬbZ5g@PJjZ+{ ye 8:'wO0+e$~ oEiG/LjpuQ~cD)-CW,ݧV[EWNW=M)PKn!pu/r/)"&˰;hOMpQI]J):aA(B@(![,zyjRITj#GqG\ɖb)?a9_GYf{-{G*-Ă#ޛމ LԴu1bk:AEYor'Rqa.6A(? >aKyW|F9U;}*%DJK!8'(P"8-KnC+dE*"舖r0!gGQLSY`- OwѳgR#S̀y1f55.0⳩[yy i7 >Wd]bcB W[&|_5SrH2Qg<[*}:wǹ25Hǟ$p{I}~n#tr[sKkY96 }F\ZCSk3ΈwpmitZc-pڼL_Zq|}γ~"z_#Mɲ hkqnᦓv0x#D//Zl+hQ620i%fH Q*C.u=u[ 1 ~@HBƔ9fa*vmo7 w>Yị $ZE6 PWOm3kP"^LDЇpƲ2 8BNo κPh HJ6eWϯ%(Vm 6/5ұ8-ݼ3@5`;TR2QkY%~cԿLybCkNX\zĺ <]c:b2̒,g@<4 吟ы\J1֠n(E e 5[`h9ͦ*:⣘P:ڄEdܖw_0m8SM J4 p-FvGRūu*k]_$~Y{8,js;˛-^i5=ko3rD7nIM a-#sC%SǤUOl>(0z̗м6h.wM+]-Q''M#r| IOK^\Kb@=}h-kd?].5$dI-ou8~GՆg9\+^2R)Wu0e޹ڵw}u >?)+-%:dRCXR9 vtwh MO2lo |x\=&I106{za:b"έhf}7~Xwlk'4a#걡 yDˏ*͢2v0G,6yuǸ@dzoCUu7#;pFu[mOwԯ}</hm`2YC|fgRA\y4z'D'-$I$\Ipm-Mϼc:7@;KԽ!U.IP8ZT!f?-*$4m1kD=7W걠;KT͜Df<-;}ܫbǟUcHyv.PXI >?zPzR5Ff|ط1@vln•05a'+̥h29Qf+}4!Sm =CXk6N%󆉊:tQ}l?fwZ0MfWgфмΚ@VHG|(P[fԥ1\ b 0 >H}ƻa qM*~ehl@hH"nx=@P ^8PMiGUp鿡xg0ȆL>ġ1 E0|;5*0HVuI>BG2\AÆ?*>){ܔ^fyѫا`>*HS>.Ґ_NU4,x(\ v/BdvRʘIR`c$0-;cK*n<}٥SSLk̋<חȶ0'TF-` q1#W!Q(֔@ʿa,66/:JP00ULW` XJ*v1PF!"y0ŃtWIU٠%k>{t;!ˎse˪Ypa6PqF8,d8)hYe8էrMBǣ&"s8gƮd& ]\:Rr8Da#!Fb x!'8IJ.ic!9w7')w:"6|-6޵E[R2Pc8ܯG+MDn6zG>6"hVMx}Z[ALk6ǥ&K?ՊTxMRo#yv:No^Ҋ6 LGh2]1KhJn*8__;涔wڰ7CS1l= f ʽn%X|Xrˀ߀TEt尚MBXe`Xg8nI0aJwJxM5*o'4`[Ku*}gnj׋d@peb9EKY'W,m\u,-SQ ZP9 CsRsS|]A t8Ö~ |&b}irrWy֡לUZZn3 ):§.~Ԁ百d{]}Wyw >x{ד968J VO3^ye{3γߕ!=ETB 呂3}A蔛3elcVmmx΋R9筳RFT.AHt#ԲvQYedvb[],22O=u-aj^r{H g1$Iؚ [ ۮwsVB 6w6tuZ׺{^ٽb==-T/Fyr]\z&o$8kD*`{{V0VV~RJ1NI!%~eDgX(`:^AR'K=ECa\H dİ?*iU2||w&#%kNVI<І#96z)%&F~@Cf&LK6D)&]C'S@&XD]2~l/ܹG%@y kyn͏wza#ʸ0~jY/x6x?87^zPF[+5YߕLL[|L3V_ݜPwo'oeK#W)H=g ##ceZz6oލx{0uB&F%oU1$?}=?,H*S)&193Gz׎%zh؅N7Sīh1i?CZ22)~-0?ٱw#7zGRL ny]FU$$/đ)n.B^s"$sY;I9KC ݈W55r"lݹtMHXi\&_~e)}5cvơ@VzXo fYla\5+!Fɺf AzΊ@U}n陿V/zP_i%kx6.u}>i#FPf̚U7םvAݼT*gҐ ًs|hjz%cj?Mf Z&BA)6${seLn4de99Kգ̂)<*T nF\Sk4\!4ec{qo++a^3U.u|Xw'/!ѺC3hӆ)a}փß6YsNݲH-Qn7$ZTtyg0ͬAʗ3 크eaPut6΂S8ōW%JAK4*cj, T&ܮ=#s\Zw#l/AF d\ w)%6]AHzU|`̝Ig*!f ~³X*cwmp5fzxVsݯ-JC]"WyCvB`P _GmjU` mfg訏l[di!G汱qRH8sbq:>y9=0}E=틠nGINgL!yZ=dLGcX|=B6mܻBwQw!Ւ?_ ׽78\$^`PjN|íƟ{|DXG%8 9ReD1&iwCx/s|I#^%VbiC\jDɡ8p%] ߥyʧ]N1#AWO@QmjvǻSV沩5.'B6Ig&'gfιoGҎBlaROB36hY;Zҁke˾ e ^YC|D ?iEӖ?qŽnpJr㋕$:YYwٳPXly;ՃV0a43W߲| *RXgZ U;ܕQ IJaES IA*u ?^m׭q`^niTRsV®*g::"Ȥ)GiūXzl_V۩z+O R+us.nئp8 nŠDamENP%;ۻ!In"6= Twjբg U86I9EWʁ-ˍ֥R T&*ھ+֭mLS%)<Etk_kiQ3G~,(V#spVED9%-*Nxhȷp9g/0b&gC[m&T=#Gܗw ^^IUITrn-Bt96*(IeuuU!-oXJS)xCfj 2^h)-#)tb2.@j$bqfR8GMLBZ:qڙ8O igF`w#`uu྘w#!iL1`1D/)7lx (b@]\a^c{$$E qɚBm*ZJ@5]B'}aTt58;Ӑ@H` {j v%Beͣ4(ZOB_yZݑ(+ECtc,?(؏o6nR{Chf) ЅJHNc2\'G䄽c .H*(FeV5eF%X=ז{=K#hJ`\Y?}7!)E7Hogcv$UbG)UrFx; %vGhe[x97(ǨX+ HRF_:gDyF9 Ƌ3>q9DH6bDPvxrq۸RYѻ}7z&j*T1N]I~1ӤK;PIR*r6گ7ϸ^6R.*;ʣbˌF4WX;X0r|:]teWYsr)O}QX2}K|O+ル2:$oSiAZǛx&E0G?,vhkxmϳw+;-vBa& <"@y6a4яh&ቴK#ދwQuFZ\5yᱩ=#AWF2yi7K K_7/0)eͅd*F-&[۷dv^bIYjA㳴Xoܸ>bvȩ:VFֲPD ̝JCyVBe[&ѮcT~c2r`j]SSe} D5(e&;ÅwUcVeXg:7+B4A#&x "d-jh*Q`WTiXrGA&r#֖6c/{3!lי:35Φao0p,b^ ߢ( @~ zGOԬ37R$EG!1[>bw,iWw9ծ okjYqqх!?v=^m/i3F:a "cD (v"̭HAp8tqf>X͜{eVn%|~#yu0]%z=T·ɿt;B}AU+ O=?I__8Y ;-eU:G?F`uؤgZn 0UGf"SMk b0vCD dW3FڷMX?>cRIgZ $1`?R]`4yFo 1ΚhmS1O0\4_t.?n.sgB7Ɣ~s *chWnGb!#N"8fZo-^˓\Un3cK#􄁐#~ճJz'{pR;/9> !!jnB.gl)8E/ԑ #REJy# Y'\bc00Z868hV64?uGdNftaw\Hp>-L[>t-xVɜXKڼP*M՝ȴ~=/ eDheank8ſrPP wx4?DdbK7@Vm@Vx9AG}Ŵ-p+MlǿЖ^Xv醉=\L[ZձBHdxe6hk`Q)!+TD*[`~=qZtګsS|ʭ|Fk1PWyU !S턔*ޗ|-&G>/+f <&5oYlMJQ5wxchg3uUcsyLRAD) &1ufy@V[엎q]*?4E~N/ AqaKdg՞S5:WsJci+}±h2ͥ+d\j8嗃 Y IGY"э^ $qX\ ߳#"Z2l{ϿrY".(Ӧʮnjt`7A;V!Ê͸׽Օ79~4e0P]$"T aB(sQ85%KH0P_EbG]['bGv0F g/0G2<VZ{* \kS!3D-{U{!z`D k-ї~R>UWByD.VYbu 1׊)aBd+!@^υ;[s]<qt*X.N]%&=.WA'pɺ9:Ld"g;Vni8LnrǴd Z&統_]J.-R;RŮbs 1l0*Ɇɬ>}~bYCRu&QqYxF?d9Iblc~:<}Btx x5s;>~fm1vwdzYpHn 7%OlL617Ax! |OEqWOS'">Ix~`S_s72 by a4qdhzsm]n{4>$WȂRKf1P\yCTQQ^Fhzk l }Ŗ<v[EaA \5y ˆ. r bWfZ# s eS"W4/a)(OZ=O~&i'rSvd)f+Uh1 yN Z`m'Cŵ.ܕf彰%}5tz@yiάLY]+.ŇY+tSl} ƫEpul) zr_QuNN.3䚲yL~^Ra`8pK&[$nsDOUq+EYM:gqd5jG'ɨD Ƶ:P-jNu_/"mҏ }gqGaٴ\`#>4,@ :\Ȼ jl*%ɨ5}jfW(r zEmƇ% pv[IdF"b 7˛54`GF5H2{=*(7L/?X57IwAQP[]%d]RpUWwDŽCDf> stream xڍP-Sx'whq+N A[^܊Kq;]Ztw2dw=_&ީٙ N,@Vv΁FKvͣj`;<$ANϜӳ lr<@^vv;;ߎvIc@ oAh%Nu xm29M!%c'KsESc)_)^ Y:9 BY-D`'K* rt~ P65+-@ Ӡfgj<6`S 19oA?t`u8 +? #X6 [iEV'7'f1췣 9lclGi15u;AY`3N|R3 ;[[ ?I#\k+odٞMvpILY|fj=#7<=y 7 5vAޞ67Bf`S'  A'3 2?߿# ,? ?ffq+fUVbk,<n dr;;c_}+19ݲ_xׂ0;ݳrA=;7Yw#ig?t؍m6y<+y wZ?WW dv_6A,beCn w`'S?U'{l;;(9lKfj>Kyqp؟ >o1X!vN!vh/&ؤxylJ >'M`S3=4~bjg< of/[ >g|.g|96A???!6 sX_ 'QAۿC; >w2uv|N^=%2Eag*hURԕes ,EkCe[_B37XV >GS6!WJlm :8>TљcK]׈bfAxܠz!:@p=}WFn42%'\`Xzkxt Lt0\z&g{wBip'C`4zy% tXMAU5U1Yφ\,'Svi/yc6 n ! {|dҫ4RH)k]f?|Fٹ@ek6ƥ73K6bO΋QCڿ0!n$"x^5=xÓLGPDt gdStMOZŗ>7߳9uku܁+X=M[peMmYn7NLg`IrhKumݹPH(s@&³5E+eKdA ԒJ9!``RƺZN2?s֌(mžx@C9|[rĺ(J 3ug:74pt.Y;`h4J_i $iaV4X7ެS;ݵIRU/sG_m5I:[_K:*d<⊣i9!!wѱX8;F5,|2۰> %۷}f@J (ԏ#NŸWC2 ZHWnھ~CBDJn.h2jCL-螠~X!H^ M59 i/-M8B .(] qTr_y&=7rN)WC~zE_~Yd`^xgr]▜-F'hA!FTOFi_pѶ'ҟ^,7oDC5!ǁ%F=|f g{0!`˭v&l̙%&:Wr څdU[8H-K#оrisR28 "2̷џ5%A-/|UFdGmF"7޻Bb^hj-~hWsV ^F{#6zNHg.02kMb Nx. !z(R3v!I@>&6KQŪtYn y@2ؓ0ySkBU̺]'PyK _-y`պ|Y %ZxG$ǫ\-D(VjQĖM\;z"j): ګ0$g3v'>r`m.C+}oUxt՚/=0x#f.` XlL&4߹x+XG˱[,1LWSUaœCR* [=?pYsLqpgOy?z7;dݳ2 OoAhi z+$g|įqjɚy_ъkϾ.p2란-_s'u~bye/3b"H&C_)yau: -KmZ._H֋tgѼԕd'5@ "$::ҬpZ5/#1tHmͽzs\A ;RF9FF2#t^_C^jIQw[Q톉u8K_( L +~Nx7=626NԘLס$ l8X9uȶh5O/$^/ M}GӌC?;"bK[/!;:6BCk, ]]x7N -Od(eiG#}p;N!}ZnАȻKpζ\M\[_횏f(5w| Wy@ى9rяvC,<rEԪ)$o)VO}Ju+akzJ~ ~Β^IV¥-}T$W1X>`< m}׸] _{4)4?lw:Ѣꈎsr{SO{p;CGJ''ۗQ71U3 )fzD}a>dw:i立 ZY=.w]EV@? %W/:wqA\EXqS%Y\NK ȍ eL_- ie0r+<pR3T@D~@S986{ v]*ow>q ',~pȴoֶM8D&rQ8)-]h'd9P,FxU#GI'8L;0 0CeiޯS'FD_̏^8л oe -k.'gr''}Gsq&@dlߺ̫{ :ZģĪL//zOʫ#ҭc g $HN}Ԓ@s gmS"5+韆R0k[cKv} X}̘Fg?9וAn\ 1{\ Q[LZ m9nh#&kQqlxC7T}#: ʊ61 Qʐ IdЦXfl&Nj2+ီvY|)Uxgq*3'a9Q03ey$n ^>nijNj51oSDk(TFt6Brjc`fѰOH0~YaVAbXV,\Ή3bJF' e~GKjބ?P{˂q_v!>JOQvy o&ߩqOp?|zc'Ϛoҝ!(g?Ѝ>u>M Xpᵁg/X=4g]fCf%,ף^ĚŹn+ eVp5M4t~u~p|Q)C_PNG}_*͹*4QNVk.ɴM\"~'Ժ6a0X[GveW%cc r-ּ;Gsޗ"7 or1@qtN]~ROM\ FGd!HA9|S?c D l,iVMङ$Q1}yL*S}ǀyUI!41&6}/I3cq^jްj\'}!`V$l3rA'3Fcc0VJoת00<գ95˜,r&Ȩ4P&lԑP% xTۏFoD]߿CU9K?&MI$|~ORA9Xu+.h,ZA#RfMIefUQvn6׷QОjc]O%ß\(Uet:\cWƞ =ML-8 m΃~+7a.>BfyoiԒi}X.:_{eSPZᄖymcq_tM X15Z_uSO4{Ls 5*qDJyKϷ3_!rf]h SbvF"%wRDIk~z]ή]K--T{F`h%5谧ٖ̋ި5.JF/s! }2zǗxfuB4wYCrBԧ'WgQAgwJa;_e+2,c|0UoSe񷌈!l{cQ d!BKǫQ6_76RǷݎfK])'З{?U;][`,!:3ٗV:wda<:m!kmP.Kc! Aj~BfUkdIJKE2SUC{ZI:Sul A]VeR%_fE%rPgHF8C/شN>OdۑkWמlky.EG-,4 4b%_ye7Cs?'WW z5©{((sBxr&Y.OYq?؁H Głl|3]Yk L1G]!}̂ĪDIUT@n$ӽ"3tד,g1-0 K6~lA66CCˁ$o,{$UMԇP!1@Wa56ˡs2Sg"o.x! TߣCWf@a LpP|c!x.9 81⎅8S#r܋+ݛll}r+Xz<9^}nTv~Csq\x5}c+hpbҝ9'@sOe~H$!yEy<"=Ҧ&Yjڂ;M$ */L8R ?dIIUA|Ӷc6^&M֧?Å$2թBj8}c`89 aU=קiE']-Z1m24G{ +B٢y5fe/^{!fYH6\_]4#Es,ìϡHB. 9 =Uk'c unFJ/4yZٝ"-hLZIsZS(\|(aE"f$1fZOm 10V9yPfߺ;$ݤ\9emGgk]fV/$G {?}yϕ;%_0#]TUA}NFr?ȇ=-.?[#]t؆~t- ciE^40mԊtŌPF纓<((l`bJ(khy4wQVd*^ЪR_pN-TN4`vILui\gZ/ ꥮ|%Ct9f)fBtǢE 'MV\ZL|SfjÍϗ. sΦ0[rFq4z_Fڀ+EӋj2I:ŁmBd/}0jsҔHInXe/h^1HEسe+Eް?*1G gynU(8"qsgŊl[G6Mzt LA d8u?H {Iq6|];cG1,Vg,=ʒ9_rFdyŽt,Hs\Watm83_LT孌 \BAo:͎⍯:裋:D5b%W8גN) Zea׹NKiogny:v53 V)oUاg,EZf Mul~O[NB& e93YN6L\}iOs4hyQ&!l48鍋<|3V!!/#p]Ru"q%cLJ`58Y2271 ε w[wD}Hh!4߁xU~G+Ձ6Əoa7De?IKbcHh{U,ñfP#SХ V~sv utV #5qԜ$u#?,}Ϝ^mB9RU` PҌ0OgƃԇgZW LoeZm0)E20m&Ζp*$Y8|QUG7 sn'[mG=X!( y艕^]vX7vgfMc}3c.!2'C>((HbYn@.hXcRF|ߜt!GGQv oQ 74NKdEE^ H"jp'ʘe~0hc`g+n soO4qJ[M:bh7"d6w &̐QȊ X lYys5eb$97G㺅֤/{@)yì~ u/{Y9SfYWlgǿ ݡH$xGhrVF[k_x#F.h Nrfخ LCL\nHz-HVHj<1f 7nK#͖l+0"e*͋Php#M^6hҭÔ@hYI= k JCZxSxI\CS c)ّ}Ud Ҭ)hc\xU=Y{T<7JEK-kʁ] BΠ^V#b|(ޒ>*a_jsԘBJ@ uEJܹc#1?8nKf,QxfÝ~ٷ@gea2, K&O4/){αH~?¨LKw#(2Ibr&.ZvMۈAȲѿb#fzV^w= 5B2CܕS9?4ĭ#5@rO‘5CWmixɌV.%}F8=Tۑ~>j a6g7x'Ћ ?}ˡh"&u`5݁U1OݜW HacjKȞV.׎dggRuvJaOhV6H4gtu/o`?}(*8g&BCPuj.IQt͡ĵ/>,vI^_1[XU'7}]ϫt(ZgBhPSI A>,S߿)E}Xх^sR!dNp-f!}5k f(YWk*meߑb'j7+lThy5ѻZZPDPw@1.v+M9Fe "_;G/ױR /C'k W2)W07|a !aJha.1$CBM̲G6w 7Li5/1d%}ixٷ)V,LC ,%`Qj@DBK(޶L޻.5Sp8[`ja*S.DDzXXGN9Ku;La,p |>_}KkdE 1ˍabD/ĵ:(jjf'@=4Նp7(2 M+Tvo1:HK(햗|qU4AuNY9>z%}N:tV5ol~#ڃWY0^-'Kk endstream endobj 611 0 obj << /Length1 2299 /Length2 16353 /Length3 0 /Length 17717 /Filter /FlateDecode >> stream xڌp% Gut;m۶c6:I:۶m۶rE{dkε+$J4ƶ@Q['ZzN2=D ;,*ֆB@G @ ``d`㤧0s'ց lbn HaIlM͜>Gt5 cd8 dkdtr rn3'';N::WWWZkGZ[S^ j@tp 5[-, @_%['W `enqHq1:>N(IH6 W502l`ddkmg`nnc 01Diܜ6X9~[~]@T@`iͭHGEll6N'l4;ݿ/?/vt*6@ |`LNzzzv&Ft32ew;N=l&2&?.@3E cs#'!h/qn-cO:flkc'+U:m4F&& ,6&ѥ zPKcn?cMBoob[~ ͭ1N; c 67T ŕ;[_.ؘZ探n@cys'#g_f(oha?2x=?FocwD#[㿖``K1I,,Om4=:Z[:o_W/1ENЉD F>8N?@'_)}p*AJ3NР}_ <G#ss#s#g?; (n_3XX[9毋3d0|4_C;L&R>ԛ/?>BL?2SG̀6~Te7%afH?)#b?>l&Zotjgf0|tS_F|;~(9s2s6>JvrG?G\?2~d~л#/|#gv88< n@#9[# W1i] EV'Du;.m[Ǎ5PM ^/zqͰ }սx04{^^$Y9=bnսE#!s {R_hUgH bB:AS\!Md>fqcDѱ[ edS Is޳ /jg'=-HWvSN-i Lz+Ebj:n8]%};*cy,*WWQݽP0*c(YwϽE[[Ӫq-!---zUP{o4Hܛ=\BJni=7־b3gXTD$ܴ:6JTfmrfCN A%QeZ˽7vgWWZw< CFq%`lsERDe͢'2zEDTUO<3?B |p#do"Y4薶5rrC{A1pDFk fEK|b>ځLB24* |*3UL* VhxG)s[5SA29ݩ>6PUZF*7}NR4Eȁ,=ip;gA:4,vf/Z5fpZ~ͺacO,mda$ē^Ëx?]yC` u՝ ڊfj'~}ūhϗb58KLR>!1Vd"hFQWmϢWCVd޹k/fXA%KaBk43x, FkwR-ZNSCIKj2nR# 36(`~G9. )zLG`B> := 'wgg^RYu'cl S`N +ӁL؇}QY=mdHp_ᇟdU%ȗD#UW=Fns%>cG63]^VfS;vDz$!Ex=Fŵ[Y@K/EV,A+\aNhFD{C緑x•ݣ˼vgn˯rd1UoJ{Jt2vIj=o{R?覸ST]3ͼLNjd!r)j\,3TYCCa| gUF͔w%RQyTs^$[B啃Ft_x{g>5x(ț`X6Y s? AjHc"Η͋Oȓ 5䉥d֩Oxx33D6e9e<#Y37h.fu1ԣn}E1hR?kd^=[,ʖ؍뗖lpTBINl%Í8)O% w re6B@EvoBv)+Hۈvr[G_}GYO_ۅ#@k04gOН=}R%'= "ǒ:1iwAuyavcۍZlzH롾oeBf8ljO$~G 8`݄e<__9'vi|8cLF e{CӚԆ8D{e8E9!O:2(@Y;썍rR;O) `K qhsG}bz2Y#EUQtY)_כoe٨X( Z' \30|Jz,3Jr Y~^1l=~rNuZL^,׍v} o Pj)ҽ-B-;K̢::\{dﯵIU=cpKEr>e'B/rbrm׈ I*j(S2dDOkԾ..?A- 3S&4"'~Wl?S(w wgFN:N'hdxODV{T/ NʰNJ^ƽ`kƺJ\ JRe+;+6r^\zՊ o+ ƷcXlHt=$9':X{وr$N@8#nP0s|lekeYl6W8M&czW1H,> MƤ]$ '" >DӠvGZ&p$0nF$|nA s"ro cVcԷﰌBб qrԨNj ϠtdkW,X"vkSY{"E Q2eKt Sßд䜰[Q`$vE7%nKGtҷXmCjoJW^r\[:h a~<&u%dч^o@GH''ct!PY1Q߅@c$ތq1. hĶY@MQ*/3+X:$Six:({f=MĸK%?bO)o|IDiEMZLm!}w-07$La1/l]ڳ6!_k"ǯR_DK-K>WjhOnRGU2;X(3IQtݹU$yz9^s iL$)̀*S3J؛n?Љ{hbT?1PZ'<9E\j_iR?,OeVq#vj)rRwyt75;r ɛ*q"a]ONb ;}3VÝa 'u Ct}ҏm/8؍dž, }A`+F=p%K "el@wVg0'/,Pglj7Rt4odr)4tx4q M$k i^v2-w-BlX<蓆SsYìxH[5Q{t_h"MKM1hB\f/tXPZdAvJ)ONGL(|JBH- Y O)},f4;3r6d/Ee_DfBȸނ0!Cd}}܂;G1C?)$BW1tY3uj.ۂO5y׾O,bHzcq'SY+z⛳V8-c@ƚ@&gҥ5gWNM0] )zkH@Iiz6䥴uo;aL/d[T@/[LѶsCD4gؑmh3XY'dlF{;~F^uh&#A60y ynj0ߩA-(Hn$T5>5`y!ㆹ=/uM!t(JӾ<c{fJӧL2-IN#"FXﵜLBB\rHC.ܾ|<3ߧ<ўd#Hlbf^T٠ 6<* v+e84|))Nw9_iX ~< r40kcr ֟00.u(ph.[575ku'W\jxB}r?`f b0: w*Yig0i:)1$b<C"R5{wʰŠ?IAaaͲn,*{_U tʂ0Dͥ]f%1z6D,$iEL+$4PíT+-87Ɯq893@F|;1S/aR.b515ib#PNGIfKt 2bt=6 ukHimixj;zѸ:Bڰw@H+,b]'np"Z%⸅%e޿[@hO\S@];y UJS#gc@^#ъ1>#0J]ՅuhͿWPԢT:zډ"3$X^dvoe/0hT[W{{@+EN7:-M'~OYHHw@]%W"o/fA*ɚ0|KCQ!Jq![>UcڟZġٌ:YNAfmDADQIRͭI,4 2_oLG' 5a;",yS Mk+{̉vV`;?[5,[YțKHya6*SBMȴCjeWY< nY‡U5r;B=:A>[57I´]^<,>}Aӡ- c-4DDe=uD<р{-JNinJ:|o]wKSh0-RPxW/ӏLK*= F;3|N%uo) F*Lr3Z-P0Y IZFC.$[p Hӊk<2Ua+e ^ 9]gj3, YuRR3c.j^sgIpU 9r1^|bk>J,>e.;X@<)B2Bݘ7#Zf\<`^PǩGzE|6L3 -y,rJOa B&BfFǫpN \ĖU1ia-Ckνj: Ԫ؉h(*QOB:YhBm Gj?K[3xΫnK?D ;aMrVDN\V` @XmiO؛WG {F_*~.2 :b}MO&}OzUM$(\7AoBQ}7'zc}貥Ml٤I^G rW=dm~D* ?y[½Ե: 'llX? OeJr;*^~;T'&[Lt#:ϥ?Q Bh xx):a6#bY,3n|q%bbB.BH룘[QqNXClz8mB=Y/ ]5ѺA*j썐xb5qm‘_";FYEp>M,!}EbCObx&Jϰjqi\zu2롘`@\̱gF=u G t hWd'nKԛ(oS.mbJV&`K2,>5_j#G?M(˽ʤb0K;Ӹ_\ײ&[(AEJBQL./00,?~+5Wّ |/ ?K$G8껸 >1ၟOUrk mEѼR32L_z^:i?L/+ԏb.9>{d(^ NN?XQ\qrfJ3=J-纅n3Xk72m-ǥ*F語ȶ2,bWؔ1Vx!hJI.$/Ǿ ߭sf.(T{/%Vʬf/h|p',Q$S^jp6ŢnYVM+UCEO+ $zULh#/X:)SgA'3m}WGM]C$16" &ot8{sX2ק~ɚO}kQd9sH^㾔~+B H ή DPvD)T7I38MH!Vj}a1gf|c2a; /zqMa_0ncY;nc$ DߢJGGT#%wVmqbz٤NlNvWsqD%[*\<}Uݵ_}C (>&)]$Y2 usNR"ʀt! %P->@R*Kj&,6+=~.OsB.$t/jg+(IO£T?BqñPNWYfNVʥH{|H ; Z6֝ߒ{vӲfTq)O~3co.jży}\sUƮmNLgk?l4,+[{c+PVqGlMY8p{ h3ch' Y,uTw`QXݭ}i:lʾ!Hȵlv5ߋo9 >Fxc_pIqߕ" C5u(^:<*XT>ptˍ 1G5Ǥ׎qv.اr_"y+d!k) kt=7y71|:\^׼CʻDgΐ=v>[1VH p|2msЭtg*0Y"5Y`VrV^N B-zuW'"=Y'SX "  ǗnC_ۅz _ɑ*Q+'Áľ7G IB'\l)q2A>] zs9eI>c)aU.:tl=4T8ų X|h0%-_;"8j#MeF.!p0S'bׂFq?a& z.1~>9)oN0#n;JbO*R`Dhly_vE'9o\G"|Mi8-KBD3꨼fTj5>:ÊOSC/TU#mv6UZ^.qFosP~9jd&dC U u!<3ؗ5m]#T$sb}WV3quC @:a)>C#;|U|OR Op=~QSuH]ZsrΩ_wC\Y$-SAos,*"|GC,{ԈΔ L>Q 2ۧ89HtҺ): C* tϷ ztypl̤ep5G\"R;/5-!&UU>ƵTq2#)nF_7U&~h(o1%O|g h*RT< mv/q-oU u<(KUdcL&< v2L{1~3{L[7kw`ln r#kcxTLŴ^Q@qhry6p݅}e@'Y5[Sd>dGhʗ4Պ'QOغ{vdb__=qU΁Lr5 {4L@"P[Hwa%e0vkᅬW-dR.ICgkשzdUN_:2"m49AKğ|?~rzڔl)4{&4pgBYE?^R:Ƥ/(Pq x>:1Գg6ZJD{jI*˹hZG!pE24vnq*`7UQاN;KUq V^q)hE갋9mz ,%> stream xڍT XLoGBdojRfm]u3][w4J,RB*J EVZTC n0(J a0W.y\By0VBi4LF%T.4OHaA ܸbhp4* `@ 2.!y"!R:Z d2 *GB0ҟPq !"ۻPq9N\$( "/o 쭖.1|H+Cl·Ab%la L<\!!-}c(bONc 9Ϗַwpd!hc@ XRB_[$a|!!~V'ܐ`&`% QD4o﹃#Z[rbFL10aSs3s1"@@.}o8h0rC B@'WM<ޟ8Wώ"Q\q"JqB(Orh@.ƹF{X=`ύ>`@%pt1B\<Av~_>1MLø$:$ ajC~Aq")H}Gjnh>׀eh  dIИbih_LS@I1p?Èپ!͏\)2StQ] XNv7vӂlY:*q}t oges&׼ fO=B[K -`V"e;c!|Lϻ2#)#M 2.6dgԆEް:r#:ź˒}3웺 &5w(CӛG52r^W ܲ9F2;2\WRsXKݑ}vd8r)=ߙE WrVMV1Uc<0*h-L"cC8sfTt>CyqtA۬ݜ4w9WV=޲@c|}_*6b e;.؞^v% ?&w(AfY{+Z.H+X̥Uie95+Dhh$ԤL`сb6?56ae4ziwꆫRgZͩo=X[0UJי1?un ¼qYg-ݐQ2CuTBpbz0+A> Gd^!e9~{bw~Z&{ģg)w~q:F$9`RrwAZcfΪZ{r ^]LNԷd pp&3_Z\ZP;4;|./ɠۭFvU{, Wqmߚ U2i۶{Lry5FsKRɚ7f&հ!B}e_q>c "dKGYI}-M/b<9x״n56ch^h-=ӘvNBT.&^I-l6}>5g:U0 hƒ-;nsUZ_ו>҅޾O9n]{IrNqݟMz;YrxGݮt+[wOzhE0c5/a~|b#N?&/^U?#IO<}X wjqߢ &\EO. np1o=4ir8˂im᳻nϡn' <#6:ļXwƻ42"Whәڦ5~?6Q 9vi+W/}c]a0I K-zT.!p弨 ꋂ,ƪ\iVB] gM1Ѿ䉒[*?X}I[[CYih]Y# NlK5#wQ6w52txC+΂{W#Zy PM}gySC^VDiݔ75j;=-Yʲ-q㺚]EfZ,#Hu &-J|nӒs_4=Ǫ}ys$O &EW&P,݇~m]٪)Uso~\ۮb R ujާȬvZaHOf|Q2(KOyǎ)nAN{ O>26z/ zidό Y]L3K>NPt\W!)G}!7ꃅ6$)='%Q:52(V]ߜ BWT ԴIi/@e2i2xc~M!AyoGP9Qƭj7gt{}7׹ݨ 2vn"<zSulīvzgY=yZ&".hqjGKxsEӔGVLTW4VU&r*.Hև瀵 TOZI+@ 搫Y'cӨQ3x@D{U1[ ^Z|tR9-YE?ymv(d_knsoR endstream endobj 615 0 obj << /Length1 1635 /Length2 7040 /Length3 0 /Length 7867 /Filter /FlateDecode >> stream xڭTu\ZUJCeeXJ %Q.׽wyf}}Yxdm`E8 #`k$gΞ<` <@",,r` '# @  $$$YrpGOg=/?L 09Q  [ 6QT+i0ڮPa.`-0\xd]@#yH{!n# 윁0  u!GgC@ wA!CUmyſDk@`!r=E!0] 8B!pu7lt]\hο過P??QpCmyj 0l۸:s;9 ;zl|pCIMe#Gw%S+B@$?:C@@+ zGj/t qQxm!=}8?~  sI!Wa6oA?>7OXU@{:]HnO7pGP$|M?D5g_߿,(@pˣlퟎ0A?Ol>aLHCP˛t 8W;RW%YV֏>kuʹ1EeL0qt-qmYEyMn Zߢӎ 9c]p32t%k#G")cMܾ@_ Z:5Wv,S o.[Yڍ?SfSmٮG{\d3WDR-n^cquH0Jg8ǩ8v~DILݱќ/!~v(`11I<6 sQtI8QNvkvνf^ 0lYF3Fեqi={ףW8^$-0QS'LU\iu)99=ªpItyoaP;kT68s8Ǡe˦ǦqWOƖx;=.Ɨ*ODgb?i&rZk)"(Kvٲ5aXcg4 i=uIu0qP%M#Xf,xADw4ezO^'60d'. zA3EUAqF+׶n"*)R"9 R.0 ͣĭf-/=?Kx-/t^䚻^={MBӷlA ?} ?ϋʏt5޺_v 2R\;ݫq̩Glzg48,lQn f"6P禼yqBhBn)_p͐L'9%C)ea$"̞dx޴ VoUxRqAKDh> A݊7ҟbq1vtryNRaбpfr]Ⳟ +!j'R gw)N1BXo">n{dnF_cSk%jxS)OoʨQ_̎{S)%(p:ݺ >}acOB_xkmh)ս W ?~'[1tCAu !C"D|5咕rlY_Sx7I | #X#:ĸ[i+33Nt'B[mI}S-{fl o!Cj:RxP[J׮ETf{n_0Q+P?0J/QOhi oȊ  qY1v8 MOaT۾V㓻?o'OcKզAILC=?pN=j_KQ ؞$7g|`b:VhqχdӌrrOj㪃av*.{V8ԽHqR/D^L"Ҭᥗ;3E^16/n`QwaYuD}y'GʋK&rbZ&>gҜMSs`뷚O(^$^i,/oMuFdw莝EK,zOXycJfuhJei=3`_f6LHDhVsqgғBkڇ|+7&\5af4l<2^nJ0i"ٲ^8*fasx2z-Y-T,_,uZXØc%a3x  !1L߮ʩPz:S-|{^\X:4xqY=Ԑ&OzAԱΝYdx3# w]H^o Q}1+RN1<`D^amB4#:7;ەCpH]ڞaVON–;"Ƭt92b&:$#%:KXWBOY6yPX׈x> ޮg<5\A@$76.ɝl1Kq;w֙Gv>8OC }'8%8 н݅"=\:H9 N=yt?6FDpի)Vq^h4zAjZ<fOe 4cSgVh2k}rcfc.V퐘Fnb^e(y2:9vp~8#Ţa:!m=d)5H;obl-x| +g>"jv7$Rn~ivO#ZdanMpl2_k^<h#-Wh_ ]EBH==?0d#$>IL$%U6bADzs=sIHiIBmAhf>ǔp'\ Nf?r IPm|j@묱n\沈;`(I;Ŧv-lbg;kh yYU~2ٰ NVNjyey~G .ep`V,4h 3c[wgf. z#;h7{W!f1|z;->W9D<8~@m 501gJvp[^vWillELZPvPa&;(EnL\?09oB-'mHWlN,1l;%eWby}Dbӑa*Uj^AO[ƿ2PK&/3fK /&890P_c,Z%f!FK|v]Y~ѠԂ@znl"G`;]PK>;U{OzJGW#>quز "n ߐѱlMø@nSf7H@-ͶG>eQ {EjR _XnWV.; >&탹hwKoϐkG|\(?[Y2:JY&+ctw7mh{Y bs$H_ T_=j܉̴8GZ]#K-ϰG tK s8F\fR1΃O /U:(AܣRE&QYN|Y됛G fof%Tc|3c7ݽZPsSy32Kqt偱*KݵbW\q%$[(MvIe pXa\vBY/0+ZT.'?)SȜJ }{![fV;x#Ny;_ǥ|YDm{nIԻ+EGѫ>5 a@~F)"e3>VEJa=T=7#\tdb{0T?l蘼`m}t#Mzǝ~H1 H=O|#tKl(["s̸wM#b9֩>rgKOy2u#!Hx֮TDdt`7v\OTC4[rMދK &"$PTh (c.7^ay#"ў S6x52.!qK6G+J*$ǭ}U#'eĴRT[LfE] l W}%O3njCk mم9z(~r{s#9b1AO9r(Ocu?䮡N=-;gOdD`;6_)P;ߣsU}Yjdb23'c2^Y6k:.}De<Tlwy֡E!CW<$teRFv.47igO:-7*^a΋AHa $Ga2|؁2 #J ]lPHG4X] VSfHJn?K$* I#ކ6+oGmx]1@xtoUEFۣs[E᝽.Pݣڏr}Lpg.IB7Q Z: }(2Tx|8(xuLr[c%tBH|ڰ+ADez$I:m,4c׍f"'e'B@k 読MmŐɈm-U}M=[4YP2X>1'µ siW,qr*ƂR>ڕe]:>] 'SϣJm25`t s FTg2‡@b?RNM3 ER_?|]Q]:y0猠݋r)e `rn#S =f5JoPOa3>`&֊&ޯhNvle>\|(N'aoiG%(9|<N?{e|,7#뙣x{e\ɛ"{ohMީ W)+{\7sCBk9j[«dྦྷN6 jq ~o9U_/``H0!O_~޴c&2mzɍ3VH15a!g#D3qG ںeaOE7&jjRGbuZZ;&R8 !n?t"9A@wl4'Ue:W_|"7wkm fFm/l-yf 9VH8v'f"^RtI J: endstream endobj 517 0 obj << /Type /ObjStm /N 100 /First 932 /Length 5992 /Filter /FlateDecode >> stream x\YSK~WLLkpL\sx ? @>8r~/$u K{g&kˬ/nd-)]@u][Q+G]QE +0:Jfh>o ehcq455;Tl[5U--lT Zhט'5KTAaD9JJwG{Bkx ZB} ]*)9ZԠO}$e銨06/L >8Mm&8ZT'9VTTa2khqThu&Ş1 ΄D&8: q.*t k"xSD nXK_c"/byS`,Ǟm*hMZq55贔i 9iAé6✜5ef"hh,锍'Qqt2DlbQZs8Mk#AXZ\D<@ Dsd]%.B,ԑ葠ǃ="F}k ǟhD|FЙIT$AHCF#H`jшkiN\P]8O C5t5p81ԍK"nDcZu_T뗗Z1GkSzVmiMIڕV *u ovT d07{drϪ[9~>qy|ѻ,'d8Ɠik~a.iOdaMԇڔU$:$F^}{/˽GWNQw6.mҴ=RNP?ۻ7-'@P)u_i!A/5 l3:46nu ʕ?VD^Q{ .Ɏ^^LPR}}eMXT_ʴdWeR}dZU(,՗`/ ,HkR(\-՗4%ծ4nUjVx Hy6yA0rsO Zu9 : g;3E~b=M0.h hOUT}R|jiڤ/'cGG1ܴ?6 VeA ya$,7)tg.n|wO֗4iJM.=.dϽ^geMթ Hy?֔Hoۘ/S/hx;3!5+Q3| 3Kxқ'\ a7?/dX3B񑷤mIFսp{Ru72_.u` vZ iR iH.SGwGH2N/K~>5R4ZwgEv{)f>I 3w4.EzvA6ib='gj׼:OƜ`hxLg2}1shy&Ӂc>2l#-RVS gA 1d|Ln[cN4 ڗXZ ڠA@UL@I+zvYe<-_j]sQN>Ӻ~Ӡ*]:bIPpPT)~h]FJaWzpW9T,jBd EAN4EyI A7P%-k5u@/l)ߧl٥3!.8ep> X(fQqTRbځ#.5TkxA&z 5Egh5/n H朕I j #h!Dv.=L NHX W $yx`MlY7~p E.ӶKG1lYf 8/e0P))+ecfɞFj /Q;)~,,S,#rdx.$/59W@% _fNmNm 0%uQEo_b-jN Ya&7!L<)Ή4SOd~*_ ғ˔6ZD|mrIfW߳ eiㄘU3%vՂqzm?:,l䏏״+ѦcP> >Z똋i㼴k3emF='~k3 ܲ*`?w:RnąӬ]{17:}g.>mGd8W}lw:.l]Gdi7Gxy]=N&xoz*HI|p~yz/hI-U ڿwVWՓjڭ^Uj:UsuT:quֿ.auUq5-oh!e8֫7.8Ydx({ 3G%ݪWzT].Ր>€q+1du5{GO]υu=+;[ܘiZadX%V"7B@5޾@c/u My`vMJ=fw08K(}/U.CMÊi[{|!}p+d ܁h •jv;wX=-sٟyaL߹D@'a#~-ػ)VN M*\. w߶Y ^ndzva: )&qF-yiSz=vw Sx1}~#8}z䏤o|4HZM[{$)j%?{݁NzQ4^Qt?{Z 7!N&uc~;ab&Iַ -BML\LUȅu.=wD"jH\!9jU<7X}"YR}t Ao}GИu_S/pk 򱹼jM/> endobj 617 0 obj << /Type /ObjStm /N 85 /First 743 /Length 3377 /Filter /FlateDecode >> stream xڵZ[S8~ϯ<45eyjjB! -wׄqLϰ~ϑdGٝGsӘ$`TEUB$; BŁܞ}q0~.EX[tyV,}R@g,^d~`!7i“xa|3ӠK8p|3ψc.p>t<V|;Ͽ}!'U+w6|)wzyp؞WW]gw%c=ٮVlIdߡLF6vrNT;X~~rpO|fxCɍh']~x"q:9F/|+d+p>y[b%6ܡ}8 4oPn>^?#|w ??~k.^ wȻH/q[><^cɹG3j᧳G)>qnNǗ'8WC.oO>]xM{+o`? [L;Jgi=lZM?4h1g.#^Er.; ؀-II7f_Jg8Rn89c5] 41T+1Y!n 0{r8 +kl7aeu5 =/gRFx˂S9GG`S#vQy(*VQE=P͌YPӿ@(Ո-j&YMuPj"VcjܢFjGMU)gdpͫ4v6|tÁ wW6æ+Z})=#aQ{XH3C@Nlk\]jnM[&U7à$1b@7'ykN6ja>.v5PwrYvt{5ҐXj'R1(ڋtE*6&Ҭ9!TjuN=]Um tcV5EAٚEy0v'eR{AlֹweУ{0WP GA ̒nN,_HtQɼVn֦!+l:'/,!,򼼇- EM6׿<C>KKcNNwuboyNLUuY%<[}d=}>[/)h`",RsYB֛lfnpSGuS㽣e=$o! +1t65N08bpKK_Yy1KOUESԦQ9&q_R,i:{*Re|OMt# I-m54q[~0jLo/722%RV|^@yeOiQ<ٛ*:W!dE{1-\[ҫyYH?ee]sI-=~Ƨɗw?{O_pѤho4/y6M^R:j*e6n=B kZ]Ws(^U)rtUKTSSWW.%3ɦl@IV(peb iTBV1Ͼ&Y 섹EM_-!Izw8ĵCXo1 >N_?A/|O Ʌ.!&(#?1G ⮢I1[6PsmY[}'V' n6FvC7NDw #Kfg}2OvCHgR.=I97Nd#~^L&׸ϲ'#'׿#>S/IsCxsuޟF^fw?Q?C`0GY,q3"Mq/6i6?a-.1~Pg;nLN0|Bbr ҿƓLU< Hg8j7lmX\͂T-\ lΒ9*,=[7l]g֫~d>%Qʶ::bgJwsLPUƸk$k!X6GKROUA"msorYm;n,ƨ xHc)4.WW6BsZNJk&:Z70]ؼ3w 'l`+Ӏ1i歄㑮 5 "[eF{m "*a$B6 pR+WSB4zZp+?hoϋ`C!'dgrA wԩJ~g!bIO-j=(7`U{G73zC koeПTb e4:rխEJu]<Ǔ CEa:KsN1޺m>#m:1>+d ] /Length 1554 /Filter /FlateDecode >> stream x%IlVU~PhЁ@-{-ghD[ƅC 1 X^ݘc1qaؙPI ߟ/}(RQ,zc3:RI,)B + 2;:qC7k"v"֎ ;M8n1(!vXn)(#6AT["ւ!A^og^b͸`?Ěp*bA#n58HL4րZbĎցwĎ;Ď#X#z\%L1MhRqT5EQ URM,jRMӧpp>p@?C b0 F(`L)0 f,`,K2PVۢj-zmxG XeT[RW&`5]eGyn5ꮂM5%a  :Bq :.C7,zmRnI!$  pHI91N IEO>>TJ{hqM&`Ekv&a2< ` N̶fm: <b4sάeMImVQ\`Px.YK`ʬ^FM5? s f#hU1-f?ШQŴx\cT{if7d!=/]m-)=J#BOp<s9MA9]f74zS(b=(֣X_nvC=UJ.sOq<*أb?$4>Ig="TbOŠ$ꑣ$@*5n< mf_~o^̧3Þ)kuCkSM;C\2kUZ;@5Pu=~)GGwF.:.;qYLԀ}fOsrs-fh08ظ@w7"6qp'@B{"np=s<.+K!K~"+g_5l邟eX񱬋}Yc~Vָxy2E&-S)}'k2KwdXVYE֜e)k2e-+yY $벹~dEY˺bdXvE֪eZ[-V 9O}BjP'> eOC b0 h҉ܪ1Ni Hz2Xw">m endstream endobj startxref 440145 %%EOF forecast/inst/doc/JSS2008.R0000644000176200001440000001634413617722445014721 0ustar liggesusers## ----load_forecast, echo=FALSE, message=FALSE--------------------------------- library('forecast') ## ----load_expsmooth, echo=FALSE, message=FALSE, eval=FALSE-------------------- # library('expsmooth') ## ----expsmooth_datsets, echo=FALSE, message=FALSE----------------------------- bonds <- structure(c(5.83, 6.06, 6.58, 7.09, 7.31, 7.23, 7.43, 7.37, 7.6, 7.89, 8.12, 7.96, 7.93, 7.61, 7.33, 7.18, 6.74, 6.27, 6.38, 6.6, 6.3, 6.13, 6.02, 5.79, 5.73, 5.89, 6.37, 6.62, 6.85, 7.03, 6.99, 6.75, 6.95, 6.64, 6.3, 6.4, 6.69, 6.52, 6.8, 7.01, 6.82, 6.6, 6.32, 6.4, 6.11, 5.82, 5.87, 5.89, 5.63, 5.65, 5.73, 5.72, 5.73, 5.58, 5.53, 5.41, 4.87, 4.58, 4.89, 4.69, 4.78, 4.99, 5.23, 5.18, 5.54, 5.9, 5.8, 5.94, 5.91, 6.1, 6.03, 6.26, 6.66, 6.52, 6.26, 6, 6.42, 6.1, 6.04, 5.83, 5.8, 5.74, 5.72, 5.23, 5.14, 5.1, 4.89, 5.13, 5.37, 5.26, 5.23, 4.97, 4.76, 4.55, 4.61, 5.07, 5, 4.9, 5.28, 5.21, 5.15, 4.9, 4.62, 4.24, 3.88, 3.91, 4.04, 4.03, 4.02, 3.9, 3.79, 3.94, 3.56, 3.32, 3.93, 4.44, 4.29, 4.27, 4.29, 4.26, 4.13, 4.06, 3.81, 4.32, 4.7), .Tsp = c(1994, 2004.33333333333, 12), class = "ts") usnetelec <- structure(c(296.1, 334.1, 375.3, 403.8, 447, 476.3, 550.3, 603.9, 634.6, 648.5, 713.4, 759.2, 797.1, 857.9, 920, 987.2, 1058.4, 1147.5, 1217.8, 1332.8, 1445.5, 1535.1, 1615.9, 1753, 1864.1, 1870.3, 1920.8, 2040.9, 2127.4, 2209.4, 2250.7, 2289.6, 2298, 2244.4, 2313.4, 2419.5, 2473, 2490.5, 2575.3, 2707.4, 2967.3, 3038, 3073.8, 3083.9, 3197.2, 3247.5, 3353.5, 3444.2, 3492.2, 3620.3, 3694.8, 3802.1, 3736.6, 3858.5, 3848), .Tsp = c(1949, 2003, 1), class = "ts") ukcars <- structure(c(330.371, 371.051, 270.67, 343.88, 358.491, 362.822, 261.281, 240.355, 325.382, 316.7, 171.153, 257.217, 298.127, 251.464, 181.555, 192.598, 245.652, 245.526, 225.261, 238.211, 257.385, 228.461, 175.371, 226.462, 266.15, 287.251, 225.883, 265.313, 272.759, 234.134, 196.462, 205.551, 291.283, 284.422, 221.571, 250.697, 253.757, 267.016, 220.388, 277.801, 283.233, 302.072, 259.72, 297.658, 306.129, 322.106, 256.723, 341.877, 356.004, 361.54, 270.433, 311.105, 326.688, 327.059, 274.257, 367.606, 346.163, 348.211, 250.008, 292.518, 343.318, 343.429, 275.386, 329.747, 364.521, 378.448, 300.798, 331.757, 362.536, 389.133, 323.322, 391.832, 421.646, 416.823, 311.713, 381.902, 422.982, 427.722, 376.85, 458.58, 436.225, 441.487, 369.566, 450.723, 462.442, 468.232, 403.636, 413.948, 460.496, 448.932, 407.787, 469.408, 494.311, 433.24, 335.106, 378.795, 387.1, 372.395, 335.79, 397.08, 449.755, 402.252, 391.847, 385.89, 424.325, 433.28, 391.213, 408.74, 445.458, 428.202, 379.048, 394.042, 432.796), .Tsp = c(1977, 2005, 4), class = "ts") visitors <- structure(c(75.7, 75.4, 83.1, 82.9, 77.3, 105.7, 121.9, 150, 98, 118, 129.5, 110.6, 91.7, 94.8, 109.5, 105.1, 95, 130.3, 156.7, 190.1, 139.7, 147.8, 145.2, 132.7, 120.7, 116.5, 142, 140.4, 128, 165.7, 183.1, 222.8, 161.3, 180.4, 185.2, 160.5, 157.1, 163.8, 203.3, 196.9, 179.6, 207.3, 208, 245.8, 168.9, 191.1, 180, 160.1, 136.6, 142.7, 175.4, 161.4, 149.9, 174.1, 192.7, 247.4, 176.2, 192.8, 189.1, 181.1, 149.9, 157.3, 185.3, 178.2, 162.7, 190.6, 198.6, 253.1, 177.4, 190.6, 189.2, 168, 161.4, 172.2, 208.3, 199.3, 197.4, 216, 223.9, 266.8, 196.1, 238.2, 217.8, 203.8, 175.2, 176.9, 219.3, 199.1, 190, 229.3, 255, 302.4, 242.8, 245.5, 257.9, 226.3, 213.4, 204.6, 244.6, 239.9, 224, 267.2, 285.9, 344, 250.5, 304.3, 307.4, 255.1, 214.9, 230.9, 282.5, 265.4, 254, 301.6, 311, 384, 303.8, 319.1, 313.5, 294.2, 244.8, 261.4, 329.7, 304.9, 268.6, 320.7, 342.9, 422.3, 317.2, 392.7, 365.6, 333.2, 261.5, 306.9, 358.2, 329.2, 309.2, 350.4, 375.6, 465.2, 342.9, 408, 390.9, 325.9, 289.1, 308.2, 397.4, 330.4, 330.9, 366.5, 379.5, 448.3, 346.2, 353.6, 338.6, 341.1, 283.4, 304.2, 372.3, 323.7, 323.9, 354.8, 367.9, 457.6, 351, 398.6, 389, 334.1, 298.1, 317.1, 388.5, 355.6, 353.1, 397, 416.7, 460.8, 360.8, 434.6, 411.9, 405.6, 319.3, 347.9, 429, 372.9, 403, 426.5, 459.9, 559.9, 416.6, 429.2, 428.7, 405.4, 330.2, 370, 446.9, 384.6, 366.3, 378.5, 376.2, 523.2, 379.3, 437.2, 446.5, 360.3, 329.9, 339.4, 418.2, 371.9, 358.6, 428.9, 437, 534, 396.6, 427.5, 392.5, 321.5, 260.9, 308.3, 415.5, 362.2, 385.6, 435.3, 473.3, 566.6, 420.2, 454.8, 432.3, 402.8, 341.3, 367.3, 472, 405.8, 395.6, 449.9, 479.9, 593.1, 462.4, 501.6, 504.7, 409.5), .Tsp = c(1985.33333333333, 2005.25, 12), class = "ts") ## ----etsexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using exponential smoothing state space models."---- par(mfrow = c(2,2)) mod1 <- ets(bonds) mod2 <- ets(usnetelec) mod3 <- ets(ukcars) mod4 <- ets(visitors) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ## ----etsnames, echo=FALSE----------------------------------------------------- etsnames <- c(mod1$method, mod2$method, mod3$method, mod4$method) etsnames <- gsub("Ad","A\\\\damped",etsnames) ## ----ets-usnetelec, echo=TRUE------------------------------------------------- etsfit <- ets(usnetelec) ## ----ets-usnetelec-print,echo=TRUE-------------------------------------------- etsfit ## ----ets-usnetelec-accuracy,eval=TRUE,echo=TRUE------------------------------- accuracy(etsfit) ## ----ets-usnetelec-fcast, fig.height=5, fig.width=8, message=FALSE, warning=FALSE, include=FALSE, output=FALSE---- fcast <- forecast(etsfit) plot(fcast) ## ----ets-usnetelec-fcast-print,eval=TRUE,echo=TRUE---------------------------- fcast ## ----ets-usnetelec-newdata,eval=FALSE,echo=TRUE------------------------------- # fit <- ets(usnetelec[1:45]) # test <- ets(usnetelec[46:55], model = fit) # accuracy(test) ## ----ets-usnetelec-fcast-accuracy,eval=FALSE,echo=TRUE------------------------ # accuracy(forecast(fit,10), usnetelec[46:55]) ## ----arimaexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using ARIMA models."---- mod1 <- auto.arima(bonds, seasonal=FALSE, approximation=FALSE) mod2 <- auto.arima(usnetelec) mod3 <- auto.arima(ukcars) mod4 <- auto.arima(visitors) par(mfrow = c(2,2)) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ## ----arima-auto-fcast,eval=TRUE,echo=TRUE,fig.show="hide"--------------------- arimafit <- auto.arima(usnetelec) fcast <- forecast(arimafit) plot(fcast) ## ----arimanames, echo=FALSE--------------------------------------------------- # Convert character strings to latex arimanames <- c(as.character(mod1), as.character(mod2), as.character(mod3), as.character(mod4)) arimanames <- gsub("\\[([0-9]*)\\]", "$_{\\1}$", arimanames) ## ----arimafcastsummary, echo=TRUE, message=FALSE, warning=FALSE, as.is=TRUE---- summary(fcast) forecast/inst/doc/JSS2008.Rmd0000644000176200001440000017307413553673010015236 0ustar liggesusers--- author: - name: Rob J Hyndman affiliation: Monash University address: > Department of Econometrics \& Business Statistics Monash University Clayton VIC 3800, Australia email: Rob.Hyndman@monash.edu url: https://robjhyndman.com - name: Yeasmin Khandakar affiliation: Monash University address: > Department of Econometrics \& Business Statistics Monash University Clayton VIC 3800, Australia title: formatted: "Automatic Time Series Forecasting:\\newline the \\pkg{forecast} Package for \\proglang{R}" # If you use tex in the formatted title, also supply version without plain: "Automatic Time Series Forecasting: the forecast Package for R" # For running headers, if needed short: "\\pkg{forecast}: Automatic Time Series Forecasting" abstract: > This vignette to the \proglang{R} package \pkg{forecast} is an updated version of @HK2008, published in the *Journal of Statistical Software*. Automatic forecasts of large numbers of univariate time series are often needed in business and other contexts. We describe two automatic forecasting algorithms that have been implemented in the \pkg{forecast} package for \proglang{R}. The first is based on innovations state space models that underly exponential smoothing methods. The second is a step-wise algorithm for forecasting with ARIMA models. The algorithms are applicable to both seasonal and non-seasonal data, and are compared and illustrated using four real time series. We also briefly describe some of the other functionality available in the \pkg{forecast} package.} keywords: # at least one keyword must be supplied formatted: [ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, "\\proglang{R}"] plain: [ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, R] preamble: > \usepackage{amsmath,rotating,bm,fancyvrb,paralist,thumbpdf} \Volume{27} \Issue{3} \Month{July} \Year{2008} \Submitdate{2007-05-29} \Acceptdate{2008-03-22} \def\damped{$_{\mbox{\footnotesize d}}$} \let\var=\VAR \def\R{\proglang{R}} \def\dampfactor{\phi_h} \raggedbottom bibliography: JSS-paper.bib vignette: > %\VignetteIndexEntry{Automatic Time Series Forecasting: the forecast Package for R (Hyndman & Khandakar, JSS 2008)} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} documentclass: jss output: if (rmarkdown::pandoc_version() >= 2) rticles::jss_article else rmarkdown::html_vignette fig_width: 7 fig_height: 6 fig_caption: true --- ```{r load_forecast, echo=FALSE, message=FALSE} library('forecast') ``` ```{r load_expsmooth, echo=FALSE, message=FALSE, eval=FALSE} library('expsmooth') ``` ```{r expsmooth_datsets, echo=FALSE, message=FALSE} bonds <- structure(c(5.83, 6.06, 6.58, 7.09, 7.31, 7.23, 7.43, 7.37, 7.6, 7.89, 8.12, 7.96, 7.93, 7.61, 7.33, 7.18, 6.74, 6.27, 6.38, 6.6, 6.3, 6.13, 6.02, 5.79, 5.73, 5.89, 6.37, 6.62, 6.85, 7.03, 6.99, 6.75, 6.95, 6.64, 6.3, 6.4, 6.69, 6.52, 6.8, 7.01, 6.82, 6.6, 6.32, 6.4, 6.11, 5.82, 5.87, 5.89, 5.63, 5.65, 5.73, 5.72, 5.73, 5.58, 5.53, 5.41, 4.87, 4.58, 4.89, 4.69, 4.78, 4.99, 5.23, 5.18, 5.54, 5.9, 5.8, 5.94, 5.91, 6.1, 6.03, 6.26, 6.66, 6.52, 6.26, 6, 6.42, 6.1, 6.04, 5.83, 5.8, 5.74, 5.72, 5.23, 5.14, 5.1, 4.89, 5.13, 5.37, 5.26, 5.23, 4.97, 4.76, 4.55, 4.61, 5.07, 5, 4.9, 5.28, 5.21, 5.15, 4.9, 4.62, 4.24, 3.88, 3.91, 4.04, 4.03, 4.02, 3.9, 3.79, 3.94, 3.56, 3.32, 3.93, 4.44, 4.29, 4.27, 4.29, 4.26, 4.13, 4.06, 3.81, 4.32, 4.7), .Tsp = c(1994, 2004.33333333333, 12), class = "ts") usnetelec <- structure(c(296.1, 334.1, 375.3, 403.8, 447, 476.3, 550.3, 603.9, 634.6, 648.5, 713.4, 759.2, 797.1, 857.9, 920, 987.2, 1058.4, 1147.5, 1217.8, 1332.8, 1445.5, 1535.1, 1615.9, 1753, 1864.1, 1870.3, 1920.8, 2040.9, 2127.4, 2209.4, 2250.7, 2289.6, 2298, 2244.4, 2313.4, 2419.5, 2473, 2490.5, 2575.3, 2707.4, 2967.3, 3038, 3073.8, 3083.9, 3197.2, 3247.5, 3353.5, 3444.2, 3492.2, 3620.3, 3694.8, 3802.1, 3736.6, 3858.5, 3848), .Tsp = c(1949, 2003, 1), class = "ts") ukcars <- structure(c(330.371, 371.051, 270.67, 343.88, 358.491, 362.822, 261.281, 240.355, 325.382, 316.7, 171.153, 257.217, 298.127, 251.464, 181.555, 192.598, 245.652, 245.526, 225.261, 238.211, 257.385, 228.461, 175.371, 226.462, 266.15, 287.251, 225.883, 265.313, 272.759, 234.134, 196.462, 205.551, 291.283, 284.422, 221.571, 250.697, 253.757, 267.016, 220.388, 277.801, 283.233, 302.072, 259.72, 297.658, 306.129, 322.106, 256.723, 341.877, 356.004, 361.54, 270.433, 311.105, 326.688, 327.059, 274.257, 367.606, 346.163, 348.211, 250.008, 292.518, 343.318, 343.429, 275.386, 329.747, 364.521, 378.448, 300.798, 331.757, 362.536, 389.133, 323.322, 391.832, 421.646, 416.823, 311.713, 381.902, 422.982, 427.722, 376.85, 458.58, 436.225, 441.487, 369.566, 450.723, 462.442, 468.232, 403.636, 413.948, 460.496, 448.932, 407.787, 469.408, 494.311, 433.24, 335.106, 378.795, 387.1, 372.395, 335.79, 397.08, 449.755, 402.252, 391.847, 385.89, 424.325, 433.28, 391.213, 408.74, 445.458, 428.202, 379.048, 394.042, 432.796), .Tsp = c(1977, 2005, 4), class = "ts") visitors <- structure(c(75.7, 75.4, 83.1, 82.9, 77.3, 105.7, 121.9, 150, 98, 118, 129.5, 110.6, 91.7, 94.8, 109.5, 105.1, 95, 130.3, 156.7, 190.1, 139.7, 147.8, 145.2, 132.7, 120.7, 116.5, 142, 140.4, 128, 165.7, 183.1, 222.8, 161.3, 180.4, 185.2, 160.5, 157.1, 163.8, 203.3, 196.9, 179.6, 207.3, 208, 245.8, 168.9, 191.1, 180, 160.1, 136.6, 142.7, 175.4, 161.4, 149.9, 174.1, 192.7, 247.4, 176.2, 192.8, 189.1, 181.1, 149.9, 157.3, 185.3, 178.2, 162.7, 190.6, 198.6, 253.1, 177.4, 190.6, 189.2, 168, 161.4, 172.2, 208.3, 199.3, 197.4, 216, 223.9, 266.8, 196.1, 238.2, 217.8, 203.8, 175.2, 176.9, 219.3, 199.1, 190, 229.3, 255, 302.4, 242.8, 245.5, 257.9, 226.3, 213.4, 204.6, 244.6, 239.9, 224, 267.2, 285.9, 344, 250.5, 304.3, 307.4, 255.1, 214.9, 230.9, 282.5, 265.4, 254, 301.6, 311, 384, 303.8, 319.1, 313.5, 294.2, 244.8, 261.4, 329.7, 304.9, 268.6, 320.7, 342.9, 422.3, 317.2, 392.7, 365.6, 333.2, 261.5, 306.9, 358.2, 329.2, 309.2, 350.4, 375.6, 465.2, 342.9, 408, 390.9, 325.9, 289.1, 308.2, 397.4, 330.4, 330.9, 366.5, 379.5, 448.3, 346.2, 353.6, 338.6, 341.1, 283.4, 304.2, 372.3, 323.7, 323.9, 354.8, 367.9, 457.6, 351, 398.6, 389, 334.1, 298.1, 317.1, 388.5, 355.6, 353.1, 397, 416.7, 460.8, 360.8, 434.6, 411.9, 405.6, 319.3, 347.9, 429, 372.9, 403, 426.5, 459.9, 559.9, 416.6, 429.2, 428.7, 405.4, 330.2, 370, 446.9, 384.6, 366.3, 378.5, 376.2, 523.2, 379.3, 437.2, 446.5, 360.3, 329.9, 339.4, 418.2, 371.9, 358.6, 428.9, 437, 534, 396.6, 427.5, 392.5, 321.5, 260.9, 308.3, 415.5, 362.2, 385.6, 435.3, 473.3, 566.6, 420.2, 454.8, 432.3, 402.8, 341.3, 367.3, 472, 405.8, 395.6, 449.9, 479.9, 593.1, 462.4, 501.6, 504.7, 409.5), .Tsp = c(1985.33333333333, 2005.25, 12), class = "ts") ``` # Introduction Automatic forecasts of large numbers of univariate time series are often needed in business. It is common to have over one thousand product lines that need forecasting at least monthly. Even when a smaller number of forecasts are required, there may be nobody suitably trained in the use of time series models to produce them. In these circumstances, an automatic forecasting algorithm is an essential tool. Automatic forecasting algorithms must determine an appropriate time series model, estimate the parameters and compute the forecasts. They must be robust to unusual time series patterns, and applicable to large numbers of series without user intervention. The most popular automatic forecasting algorithms are based on either exponential smoothing or ARIMA models. In this article, we discuss the implementation of two automatic univariate forecasting methods in the \pkg{forecast} package for \proglang{R}. We also briefly describe some univariate forecasting methods that are part of the \pkg{forecast} package. The \pkg{forecast} package for the \proglang{R} system for statistical computing [@R] is available from the Comprehensive \proglang{R} Archive Network at \url{https://CRAN.R-project.org/package=forecast}. Version `r packageVersion('forecast')` of the package was used for this paper. The \pkg{forecast} package contains functions for univariate forecasting and a few examples of real time series data. For more extensive testing of forecasting methods, the \pkg{fma} package contains the 90 data sets from @MWH3, the \pkg{expsmooth} package contains 24 data sets from @expsmooth08, and the \pkg{Mcomp} package contains the 1001 time series from the M-competition [@Mcomp82] and the 3003 time series from the M3-competition [@M3comp00]. The \pkg{forecast} package implements automatic forecasting using exponential smoothing, ARIMA models, the Theta method [@AN00], cubic splines [@HKPB05], as well as other common forecasting methods. In this article, we primarily discuss the exponential smoothing approach (in Section \ref{sec:expsmooth}) and the ARIMA modelling approach (in Section \ref{sec:arima}) to automatic forecasting. In Section \ref{sec:package}, we describe the implementation of these methods in the \pkg{forecast} package, along with other features of the package. # Exponential smoothing {#sec:expsmooth} Although exponential smoothing methods have been around since the 1950s, a modelling framework incorporating procedures for model selection was not developed until relatively recently. @OKS97, @HKSG02 and @HKOS05 have shown that all exponential smoothing methods (including non-linear methods) are optimal forecasts from innovations state space models. Exponential smoothing methods were originally classified by Pegels' (1969)\nocite{Pegels69} taxonomy. This was later extended by @Gardner85, modified by @HKSG02, and extended again by @Taylor03a, giving a total of fifteen methods seen in the following table. \begin{table}[!hbt] \begin{center}\vspace{0.2cm} \begin{tabular}{|ll|ccc|} \hline & &\multicolumn{3}{c|}{Seasonal Component} \\ \multicolumn{2}{|c|}{Trend}& N & A & M\\ \multicolumn{2}{|c|}{Component} & (None) & (Additive) & (Multiplicative)\\ \cline{3-5} &&&\\[-0.3cm] N & (None) & N,N & N,A & N,M\\ &&&&\\[-0.3cm] A & (Additive) & A,N & A,A & A,M\\ &&&&\\[-0.3cm] A\damped & (Additive damped) & A\damped,N & A\damped,A & A\damped,M\\ &&&&\\[-0.3cm] M & (Multiplicative) & M,N & M,A & M,M\\ &&&&\\[-0.3cm] M\damped & (Multiplicative damped) & M\damped,N & M\damped,A & M\damped,M\\ \hline \end{tabular}\vspace{0.2cm} \end{center} \caption{The fifteen exponential smoothing methods.} \end{table} Some of these methods are better known under other names. For example, cell (N,N) describes the simple exponential smoothing (or SES) method, cell (A,N) describes Holt's linear method, and cell (A\damped,N) describes the damped trend method. The additive Holt-Winters' method is given by cell (A,A) and the multiplicative Holt-Winters' method is given by cell (A,M). The other cells correspond to less commonly used but analogous methods. ## Point forecasts for all methods We denote the observed time series by $y_1,y_2,\dots,y_n$. A forecast of $y_{t+h}$ based on all of the data up to time $t$ is denoted by $\hat{y}_{t+h|t}$. To illustrate the method, we give the point forecasts and updating equations for method (A,A), the Holt-Winters' additive method: \begin{subequations}\label{eq:AMmethod}\vspace{-15pt} \begin{align} \mbox{Level:}\quad &\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha)(\ell_{t-1} + b_{t-1})\hspace*{1cm} \label{eq:3-44a}\\ \mbox{Growth:}\quad &b_t = \beta^*(\ell_t - \ell_{t-1}) + (1-\beta^*)b_{t-1} \label{eq:3-45a}\\ \mbox{Seasonal:}\quad &s_t = \gamma(y_t - \ell_{t-1} -b_{t-1}) + (1-\gamma)s_{t-m}\label{eq:3-46a}\\ \mbox{Forecast:}\quad &\hat{y}_{t+h|t} = \ell_t + b_th +s_{t-m+h_m^+}. \label{eq:3-47a} \end{align} \end{subequations} where $m$ is the length of seasonality (e.g., the number of months or quarters in a year), $\ell_t$ represents the level of the series, $b_t$ denotes the growth, $s_t$ is the seasonal component, $\hat{y}_{t+h|t}$ is the forecast for $h$ periods ahead, and $h_m^+ = \big[(h-1) \mbox{ mod } m\big] + 1$. To use method \eqref{eq:AMmethod}, we need values for the initial states $\ell_0$, $b_0$ and $s_{1-m},\dots,s_0$, and for the smoothing parameters $\alpha$, $\beta^*$ and $\gamma$. All of these will be estimated from the observed data. Equation \eqref{eq:3-46a} is slightly different from the usual Holt-Winters equations such as those in @MWH3 or @BOK05. These authors replace \eqref{eq:3-46a} with $$ s_t = \gamma^*(y_t - \ell_{t}) + (1-\gamma^*)s_{t-m}. $$ If $\ell_t$ is substituted using \eqref{eq:3-44a}, we obtain $$s_t = \gamma^*(1-\alpha)(y_t - \ell_{t-1}-b_{t-1}) + \{1-\gamma^*(1-\alpha)\}s_{t-m}. $$ Thus, we obtain identical forecasts using this approach by replacing $\gamma$ in \eqref{eq:3-46a} with $\gamma^*(1-\alpha)$. The modification given in \eqref{eq:3-46a} was proposed by @OKS97 to make the state space formulation simpler. It is equivalent to Archibald's (1990)\nocite{Archibald90} variation of the Holt-Winters' method. \begin{sidewaystable} \begin{small} \begin{center} \begin{tabular}{|c|lll|} \hline & \multicolumn{3}{c|}{Seasonal} \\ {Trend} & \multicolumn{1}{c}{N} & \multicolumn{1}{c}{A} & \multicolumn{1}{c|}{M}\\ \cline{2-4} & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) \ell_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) \ell_{t-1}$\\ {N} & & $s_t = \gamma (y_t - \ell_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / \ell_{t-1}) + (1-\gamma) s_{t-m}$ \\ & $\hat{y}_{t+h|t} = \ell_t$ & $\hat{y}_{t+h|t} = \ell_t + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_ts_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) (\ell_{t-1}+b_{t-1})$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) (\ell_{t-1}+b_{t-1})$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) (\ell_{t-1}+b_{t-1})$\\ {A} & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}-b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}-b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_t+hb_t$ & $\hat{y}_{t+h|t} = \ell_t +hb_t +s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= (\ell_t+hb_t)s_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$\\ {A\damped } & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}-\phi b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}-\phi b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_t+\dampfactor b_t$ & $\hat{y}_{t+h|t} = \ell_t+\dampfactor b_t+s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= (\ell_t+\dampfactor b_t)s_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}b_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) \ell_{t-1}b_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) \ell_{t-1}b_{t-1}$\\ {M} & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_tb_t^h$ & $\hat{y}_{t+h|t} = \ell_tb_t^h + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_tb_t^hs_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}b^\phi_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha)\ell_{t-1}b^\phi_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha)\ell_{t-1}b^\phi_{t-1}$\\ {M\damped } & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}b^\phi_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}b^\phi_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_tb_t^{\phi_h}$ & $\hat{y}_{t+h|t} = \ell_tb_t^{\phi_h} + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_tb_t^{\phi_h}s_{t-m+h_m^+}$ \\ \hline \end{tabular} \end{center} \end{small} \caption{Formulae for recursive calculations and point forecasts. In each case, $\ell_t$ denotes the series level at time $t$, $b_t$ denotes the slope at time $t$, $s_t$ denotes the seasonal component of the series at time $t$, and $m$ denotes the number of seasons in a year; $\alpha$, $\beta^*$, $\gamma$ and $\phi$ are constants, $\phi_h = \phi+\phi^2+\dots+\phi^{h}$ and $h_m^+ = \big[(h-1) \mbox{ mod } m\big] + 1$.}\label{table:pegels} \end{sidewaystable} Table \ref{table:pegels} gives recursive formulae for computing point forecasts $h$ periods ahead for all of the exponential smoothing methods. Some interesting special cases can be obtained by setting the smoothing parameters to extreme values. For example, if $\alpha=0$, the level is constant over time; if $\beta^*=0$, the slope is constant over time; and if $\gamma=0$, the seasonal pattern is constant over time. At the other extreme, naïve forecasts (i.e., $\hat{y}_{t+h|t}=y_t$ for all $h$) are obtained using the (N,N) method with $\alpha=1$. Finally, the additive and multiplicative trend methods are special cases of their damped counterparts obtained by letting $\phi=1$. ## Innovations state space models {#sec:statespace} For each exponential smoothing method in Table \ref{table:pegels}, @expsmooth08 describe two possible innovations state space models, one corresponding to a model with additive errors and the other to a model with multiplicative errors. If the same parameter values are used, these two models give equivalent point forecasts, although different prediction intervals. Thus there are 30 potential models described in this classification. Historically, the nature of the error component has often been ignored, because the distinction between additive and multiplicative errors makes no difference to point forecasts. We are careful to distinguish exponential smoothing \emph{methods} from the underlying state space \emph{models}. An exponential smoothing method is an algorithm for producing point forecasts only. The underlying stochastic state space model gives the same point forecasts, but also provides a framework for computing prediction intervals and other properties. To distinguish the models with additive and multiplicative errors, we add an extra letter to the front of the method notation. The triplet (E,T,S) refers to the three components: error, trend and seasonality. So the model ETS(A,A,N) has additive errors, additive trend and no seasonality---in other words, this is Holt's linear method with additive errors. Similarly, ETS(M,M\damped,M) refers to a model with multiplicative errors, a damped multiplicative trend and multiplicative seasonality. The notation ETS($\cdot$,$\cdot$,$\cdot$) helps in remembering the order in which the components are specified. Once a model is specified, we can study the probability distribution of future values of the series and find, for example, the conditional mean of a future observation given knowledge of the past. We denote this as $\mu_{t+h|t} = \E(y_{t+h} \mid \bm{x}_t)$, where $\bm{x}_t$ contains the unobserved components such as $\ell_t$, $b_t$ and $s_t$. For $h=1$ we use $\mu_t\equiv\mu_{t+1|t}$ as a shorthand notation. For many models, these conditional means will be identical to the point forecasts given in Table \ref{table:pegels}, so that $\mu_{t+h|t}=\hat{y}_{t+h|t}$. However, for other models (those with multiplicative trend or multiplicative seasonality), the conditional mean and the point forecast will differ slightly for $h\ge 2$. We illustrate these ideas using the damped trend method of @GM85. \subsubsection{Additive error model: ETS(A,A$_d$,N)} Let $\mu_t = \hat{y}_t = \ell_{t-1}+b_{t-1}$ denote the one-step forecast of $y_{t}$ assuming that we know the values of all parameters. Also, let $\varepsilon_t = y_t - \mu_t$ denote the one-step forecast error at time $t$. From the equations in Table \ref{table:pegels}, we find that\vspace*{-15pt} \begin{align} \label{ss1} y_t &= \ell_{t-1} + \phi b_{t-1} + \varepsilon_t\\ \ell_t &= \ell_{t-1} + \phi b_{t-1} + \alpha \varepsilon_t \label{ss2}\\ b_t &= \phi b_{t-1} + \beta^*(\ell_t - \ell_{t-1}- \phi b_{t-1}) = \phi b_{t-1} + \alpha\beta^*\varepsilon_t. \label{ss3} \end{align} We simplify the last expression by setting $\beta=\alpha\beta^*$. The three equations above constitute a state space model underlying the damped Holt's method. Note that it is an \emph{innovations} state space model [@AM79;@Aoki87] because the same error term appears in each equation. We an write it in standard state space notation by defining the state vector as $\bm{x}_t = (\ell_t,b_t)'$ and expressing \eqref{ss1}--\eqref{ss3} as \begin{subequations}\vspace*{-15pt} \begin{align} y_t &= \left[ 1 \phi \right] \bm{x}_{t-1} + \varepsilon_t\label{obseq}\\ \bm{x}_t &= \left[\begin{array}{ll} 1 & \phi\\ 0 & \phi \end{array}\right]\bm{x}_{t-1} + \left[\begin{array}{l} \alpha\\ \beta \end{array}\right]\varepsilon_t.\label{stateeq} \end{align} \end{subequations} The model is fully specified once we state the distribution of the error term $\varepsilon_t$. Usually we assume that these are independent and identically distributed, following a normal distribution with mean 0 and variance $\sigma^2$, which we write as $\varepsilon_t \sim\mbox{NID}(0, \sigma^2)$. \subsubsection{Multiplicative error model: ETS(M,A$_d$,N)} A model with multiplicative error can be derived similarly, by first setting $\varepsilon_t = (y_t-\mu_t)/\mu_t$, so that $\varepsilon_t$ is the relative error. Then, following a similar approach to that for additive errors, we find\vspace*{-15pt} \begin{align*} y_t &= (\ell_{t-1} + \phi b_{t-1})(1 + \varepsilon_t)\\ \ell_t &= (\ell_{t-1} + \phi b_{t-1})(1 + \alpha \varepsilon_t)\\ b_t &= \phi b_{t-1} + \beta(\ell_{t-1}+\phi b_{t-1})\varepsilon_t, \end{align*} or\vspace*{-15pt} \begin{align*} y_t &= \left[ 1 \phi \right] \bm{x}_{t-1}(1 + \varepsilon_t)\\ \bm{x}_t &= \left[\begin{array}{ll} 1 & \phi \\ 0 & \phi \end{array}\right]\bm{x}_{t-1} + \left[ 1 \phi \right] \bm{x}_{t-1} \left[\begin{array}{l} \alpha\\ \beta \end{array}\right]\varepsilon_t. \end{align*} Again we assume that $\varepsilon_t \sim \mbox{NID}(0,\sigma^2)$. Of course, this is a nonlinear state space model, which is usually considered difficult to handle in estimating and forecasting. However, that is one of the many advantages of the innovations form of state space models --- we can still compute forecasts, the likelihood and prediction intervals for this nonlinear model with no more effort than is required for the additive error model. ## State space models for all exponential smoothing methods {#sec:ssmodels} There are similar state space models for all 30 exponential smoothing variations. The general model involves a state vector $\bm{x}_t = (\ell_t, b_t$, $s_t, s_{t-1}, \dots, s_{t-m+1})'$ and state space equations of the form \begin{subequations}\label{eq:ss}\vspace*{-15pt} \begin{align} y_t &= w(\bm{x}_{t-1}) + r(\bm{x}_{t-1})\varepsilon_t \label{eq:ss1}\\ \bm{x}_t &= f(\bm{x}_{t-1}) + g(\bm{x}_{t-1})\varepsilon_t \label{eq:ss2} \end{align} \end{subequations} where $\{\varepsilon_t\}$ is a Gaussian white noise process with mean zero and variance $\sigma^2$, and $\mu_t = w(\bm{x}_{t-1})$. The model with additive errors has $r(\bm{x}_{t-1})=1$, so that $y_t = \mu_{t} + \varepsilon_t$. The model with multiplicative errors has $r(\bm{x}_{t-1})=\mu_t$, so that $y_t = \mu_{t}(1 + \varepsilon_t)$. Thus, $\varepsilon_t = (y_t - \mu_t)/\mu_t$ is the relative error for the multiplicative model. The models are not unique. Clearly, any value of $r(\bm{x}_{t-1})$ will lead to identical point forecasts for $y_t$. All of the methods in Table \ref{table:pegels} can be written in the form \eqref{eq:ss1} and \eqref{eq:ss2}. The specific form for each model is given in @expsmooth08. Some of the combinations of trend, seasonality and error can occasionally lead to numerical difficulties; specifically, any model equation that requires division by a state component could involve division by zero. This is a problem for models with additive errors and either multiplicative trend or multiplicative seasonality, as well as for the model with multiplicative errors, multiplicative trend and additive seasonality. These models should therefore be used with caution. The multiplicative error models are useful when the data are strictly positive, but are not numerically stable when the data contain zeros or negative values. So when the time series is not strictly positive, only the six fully additive models may be applied. The point forecasts given in Table \ref{table:pegels} are easily obtained from these models by iterating equations \eqref{eq:ss1} and \eqref{eq:ss2} for $t=n+1, n+2,\dots,n+h$, setting $\varepsilon_{n+j}=0$ for $j=1,\dots,h$. In most cases (notable exceptions being models with multiplicative seasonality or multiplicative trend for $h\ge2$), the point forecasts can be shown to be equal to $\mu_{t+h|t} = \E(y_{t+h} \mid \bm{x}_t)$, the conditional expectation of the corresponding state space model. The models also provide a means of obtaining prediction intervals. In the case of the linear models, where the forecast distributions are normal, we can derive the conditional variance $v_{t+h|t} = \var(y_{t+h} \mid \bm{x}_t)$ and obtain prediction intervals accordingly. This approach also works for many of the nonlinear models. Detailed derivations of the results for many models are given in @HKOS05. A more direct approach that works for all of the models is to simply simulate many future sample paths conditional on the last estimate of the state vector, $\bm{x}_t$. Then prediction intervals can be obtained from the percentiles of the simulated sample paths. Point forecasts can also be obtained in this way by taking the average of the simulated values at each future time period. An advantage of this approach is that we generate an estimate of the complete predictive distribution, which is especially useful in applications such as inventory planning, where expected costs depend on the whole distribution. ## Estimation {#sec:estimation} In order to use these models for forecasting, we need to know the values of $\bm{x}_0$ and the parameters $\alpha$, $\beta$, $\gamma$ and $\phi$. It is easy to compute the likelihood of the innovations state space model \eqref{eq:ss}, and so obtain maximum likelihood estimates. @OKS97 show that\vspace*{-15pt} \begin{equation}\label{likelihood} L^*(\bm\theta,\bm{x}_0) = n\log\Big(\sum_{t=1}^n \varepsilon^2_t\Big) + 2\sum_{t=1}^n \log|r(\bm{x}_{t-1})| \end{equation} is equal to twice the negative logarithm of the likelihood function (with constant terms eliminated), conditional on the parameters $\bm\theta = (\alpha,\beta,\gamma,\phi)'$ and the initial states $\bm{x}_0 = (\ell_0,b_0,s_0,s_{-1},\dots,s_{-m+1})'$, where $n$ is the number of observations. This is easily computed by simply using the recursive equations in Table \ref{table:pegels}. Unlike state space models with multiple sources of error, we do not need to use the Kalman filter to compute the likelihood. The parameters $\bm\theta$ and the initial states $\bm{x}_0$ can be estimated by minimizing $L^*$. Most implementations of exponential smoothing use an ad hoc heuristic scheme to estimate $\bm{x}_0$. However, with modern computers, there is no reason why we cannot estimate $\bm{x}_0$ along with $\bm\theta$, and the resulting forecasts are often substantially better when we do. We constrain the initial states $\bm{x}_0$ so that the seasonal indices add to zero for additive seasonality, and add to $m$ for multiplicative seasonality. There have been several suggestions for restricting the parameter space for $\alpha$, $\beta$ and $\gamma$. The traditional approach is to ensure that the various equations can be interpreted as weighted averages, thus requiring $\alpha$, $\beta^*=\beta/\alpha$, $\gamma^*=\gamma/(1-\alpha)$ and $\phi$ to all lie within $(0,1)$. This suggests $$0<\alpha<1,\qquad 0<\beta<\alpha,\qquad 0<\gamma < 1-\alpha,\qquad\mbox{and}\qquad 0<\phi<1. $$ However, @HAA08 show that these restrictions are usually stricter than necessary (although in a few cases they are not restrictive enough). ## Model selection Forecast accuracy measures such as mean squared error (MSE) can be used for selecting a model for a given set of data, provided the errors are computed from data in a hold-out set and not from the same data as were used for model estimation. However, there are often too few out-of-sample errors to draw reliable conclusions. Consequently, a penalized method based on the in-sample fit is usually better. One such approach uses a penalized likelihood such as Akaike's Information Criterion: $$\mbox{AIC} = L^*(\hat{\bm\theta},\hat{\bm{x}}_0) + 2q, $$ where $q$ is the number of parameters in $\bm\theta$ plus the number of free states in $\bm{x}_0$, and $\hat{\bm\theta}$ and $\hat{\bm{x}}_0$ denote the estimates of $\bm\theta$ and $\bm{x}_0$. We select the model that minimizes the AIC amongst all of the models that are appropriate for the data. The AIC also provides a method for selecting between the additive and multiplicative error models. The point forecasts from the two models are identical so that standard forecast accuracy measures such as the MSE or mean absolute percentage error (MAPE) are unable to select between the error types. The AIC is able to select between the error types because it is based on likelihood rather than one-step forecasts. Obviously, other model selection criteria (such as the BIC) could also be used in a similar manner. ## Automatic forecasting {#sec:algorithm} We combine the preceding ideas to obtain a robust and widely applicable automatic forecasting algorithm. The steps involved are summarized below. \begin{compactenum} \item For each series, apply all models that are appropriate, optimizing the parameters (both smoothing parameters and the initial state variable) of the model in each case. \item Select the best of the models according to the AIC. \item Produce point forecasts using the best model (with optimized parameters) for as many steps ahead as required. \item Obtain prediction intervals for the best model either using the analytical results of Hyndman, Koehler, et al. (2005), or by simulating future sample paths for $\{y_{n+1},\dots,y_{n+h}\}$ and finding the $\alpha/2$ and $1-\alpha/2$ percentiles of the simulated data at each forecasting horizon. If simulation is used, the sample paths may be generated using the normal distribution for errors (parametric bootstrap) or using the resampled errors (ordinary bootstrap). \end{compactenum} @HKSG02 applied this automatic forecasting strategy to the M-competition data [@Mcomp82] and the IJF-M3 competition data [@M3comp00] using a restricted set of exponential smoothing models, and demonstrated that the methodology is particularly good at short term forecasts (up to about 6 periods ahead), and especially for seasonal short-term series (beating all other methods in the competitions for these series). # ARIMA models {#sec:arima} A common obstacle for many people in using Autoregressive Integrated Moving Average (ARIMA) models for forecasting is that the order selection process is usually considered subjective and difficult to apply. But it does not have to be. There have been several attempts to automate ARIMA modelling in the last 25 years. @HR82 proposed a method to identify the order of an ARMA model for a stationary series. In their method the innovations can be obtained by fitting a long autoregressive model to the data, and then the likelihood of potential models is computed via a series of standard regressions. They established the asymptotic properties of the procedure under very general conditions. @Gomez98 extended the Hannan-Rissanen identification method to include multiplicative seasonal ARIMA model identification. @TRAMOSEATS98 implemented this automatic identification procedure in the software \pkg{TRAMO} and \pkg{SEATS}. For a given series, the algorithm attempts to find the model with the minimum BIC. @Liu89 proposed a method for identification of seasonal ARIMA models using a filtering method and certain heuristic rules; this algorithm is used in the \pkg{SCA-Expert} software. Another approach is described by @MP00a whose algorithm for univariate ARIMA models also allows intervention analysis. It is implemented in the software package ``Time Series Expert'' (\pkg{TSE-AX}). Other algorithms are in use in commercial software, although they are not documented in the public domain literature. In particular, \pkg{Forecast Pro} [@ForecastPro00] is well-known for its excellent automatic ARIMA algorithm which was used in the M3-forecasting competition [@M3comp00]. Another proprietary algorithm is implemented in \pkg{Autobox} [@Reilly00]. @OL96 provide an early review of some of the commercial software that implement automatic ARIMA forecasting. ## Choosing the model order using unit root tests and the AIC A non-seasonal ARIMA($p,d,q$) process is given by $$ \phi(B)(1-B^d)y_{t} = c + \theta(B)\varepsilon_t $$ where $\{\varepsilon_t\}$ is a white noise process with mean zero and variance $\sigma^2$, $B$ is the backshift operator, and $\phi(z)$ and $\theta(z)$ are polynomials of order $p$ and $q$ respectively. To ensure causality and invertibility, it is assumed that $\phi(z)$ and $\theta(z)$ have no roots for $|z|<1$ [@BDbook91]. If $c\ne0$, there is an implied polynomial of order $d$ in the forecast function. The seasonal ARIMA$(p,d,q)(P,D,Q)_m$ process is given by $$ \Phi(B^m)\phi(B)(1-B^{m})^D(1-B)^dy_{t} = c + \Theta(B^m)\theta(B)\varepsilon_t $$ where $\Phi(z)$ and $\Theta(z)$ are polynomials of orders $P$ and $Q$ respectively, each containing no roots inside the unit circle. If $c\ne0$, there is an implied polynomial of order $d+D$ in the forecast function. The main task in automatic ARIMA forecasting is selecting an appropriate model order, that is the values $p$, $q$, $P$, $Q$, $D$, $d$. If $d$ and $D$ are known, we can select the orders $p$, $q$, $P$ and $Q$ via an information criterion such as the AIC: $$\mbox{AIC} = -2\log(L) + 2(p+q+P+Q+k)$$ where $k=1$ if $c\ne0$ and 0 otherwise, and $L$ is the maximized likelihood of the model fitted to the \emph{differenced} data $(1-B^m)^D(1-B)^dy_t$. The likelihood of the full model for $y_t$ is not actually defined and so the value of the AIC for different levels of differencing are not comparable. One solution to this difficulty is the ``diffuse prior'' approach which is outlined in @DKbook01 and implemented in the \code{arima()} function [@Ripley:2002] in \R. In this approach, the initial values of the time series (before the observed values) are assumed to have mean zero and a large variance. However, choosing $d$ and $D$ by minimizing the AIC using this approach tends to lead to over-differencing. For forecasting purposes, we believe it is better to make as few differences as possible because over-differencing harms forecasts [@SY94] and widens prediction intervals. [Although, see @Hendry97 for a contrary view.] Consequently, we need some other approach to choose $d$ and $D$. We prefer unit-root tests. However, most unit-root tests are based on a null hypothesis that a unit root exists which biases results towards more differences rather than fewer differences. For example, variations on the Dickey-Fuller test [@DF81] all assume there is a unit root at lag 1, and the HEGY test of @HEGY90 is based on a null hypothesis that there is a seasonal unit root. Instead, we prefer unit-root tests based on a null hypothesis of no unit-root. For non-seasonal data, we consider ARIMA($p,d,q$) models where $d$ is selected based on successive KPSS unit-root tests [@KPSS92]. That is, we test the data for a unit root; if the test result is significant, we test the differenced data for a unit root; and so on. We stop this procedure when we obtain our first insignificant result. For seasonal data, we consider ARIMA$(p,d,q)(P,D,Q)_m$ models where $m$ is the seasonal frequency and $D=0$ or $D=1$ depending on an extended Canova-Hansen test [@CH95]. Canova and Hansen only provide critical values for $21$. Let $C_m$ be the critical value for seasonal period $m$. We plotted $C_m$ against $m$ for values of $m$ up to 365 and noted that they fit the line $C_m = 0.269 m^{0.928}$ almost exactly. So for $m>12$, we use this simple expression to obtain the critical value. We note in passing that the null hypothesis for the Canova-Hansen test is not an ARIMA model as it includes seasonal dummy terms. It is a test for whether the seasonal pattern changes sufficiently over time to warrant a seasonal unit root, or whether a stable seasonal pattern modelled using fixed dummy variables is more appropriate. Nevertheless, we have found that the test is still useful for choosing $D$ in a strictly ARIMA framework (i.e., without seasonal dummy variables). If a stable seasonal pattern is selected (i.e., the null hypothesis is not rejected), the seasonality is effectively handled by stationary seasonal AR and MA terms. After $D$ is selected, we choose $d$ by applying successive KPSS unit-root tests to the seasonally differenced data (if $D=1$) or the original data (if $D=0$). Once $d$ (and possibly $D$) are selected, we proceed to select the values of $p$, $q$, $P$ and $Q$ by minimizing the AIC. We allow $c\ne0$ for models where $d+D < 2$. ## A step-wise procedure for traversing the model space Suppose we have seasonal data and we consider ARIMA$(p,d,q)(P,D,Q)_m$ models where $p$ and $q$ can take values from 0 to 3, and $P$ and $Q$ can take values from 0 to 1. When $c=0$ there is a total of 288 possible models, and when $c\ne 0$ there is a total of 192 possible models, giving 480 models altogether. If the values of $p$, $d$, $q$, $P$, $D$ and $Q$ are allowed to range more widely, the number of possible models increases rapidly. Consequently, it is often not feasible to simply fit every potential model and choose the one with the lowest AIC. Instead, we need a way of traversing the space of models efficiently in order to arrive at the model with the lowest AIC value. We propose a step-wise algorithm as follows. \begin{description} \item[Step 1:] We try four possible models to start with. \begin{itemize} \item ARIMA($2,d,2$) if $m=1$ and ARIMA($2,d,2)(1,D,1)$ if $m>1$. \item ARIMA($0,d,0$) if $m=1$ and ARIMA($0,d,0)(0,D,0)$ if $m>1$. \item ARIMA($1,d,0$) if $m=1$ and ARIMA($1,d,0)(1,D,0)$ if $m>1$. \item ARIMA($0,d,1$) if $m=1$ and ARIMA($0,d,1)(0,D,1)$ if $m>1$. \end{itemize} If $d+D \le 1$, these models are fitted with $c\ne0$. Otherwise, we set $c=0$. Of these four models, we select the one with the smallest AIC value. This is called the ``current'' model and is denoted by ARIMA($p,d,q$) if $m=1$ or ARIMA($p,d,q)(P,D,Q)_m$ if $m>1$. \item[Step 2:] We consider up to seventeen variations on the current model: \begin{itemize} \item where one of $p$, $q$, $P$ and $Q$ is allowed to vary by $\pm1$ from the current model; \item where $p$ and $q$ both vary by $\pm1$ from the current model; \item where $P$ and $Q$ both vary by $\pm1$ from the current model; \item where the constant $c$ is included if the current model has $c=0$ or excluded if the current model has $c\ne0$. \end{itemize} Whenever a model with lower AIC is found, it becomes the new ``current'' model and the procedure is repeated. This process finishes when we cannot find a model close to the current model with lower AIC. \end{description} There are several constraints on the fitted models to avoid problems with convergence or near unit-roots. The constraints are outlined below. \begin{compactitem}\itemsep=8pt \item The values of $p$ and $q$ are not allowed to exceed specified upper bounds (with default values of 5 in each case). \item The values of $P$ and $Q$ are not allowed to exceed specified upper bounds (with default values of 2 in each case). \item We reject any model which is ``close'' to non-invertible or non-causal. Specifically, we compute the roots of $\phi(B)\Phi(B)$ and $\theta(B)\Theta(B)$. If either have a root that is smaller than 1.001 in absolute value, the model is rejected. \item If there are any errors arising in the non-linear optimization routine used for estimation, the model is rejected. The rationale here is that any model that is difficult to fit is probably not a good model for the data. \end{compactitem} The algorithm is guaranteed to return a valid model because the model space is finite and at least one of the starting models will be accepted (the model with no AR or MA parameters). The selected model is used to produce forecasts. ## Comparisons with exponential smoothing There is a widespread myth that ARIMA models are more general than exponential smoothing. This is not true. The two classes of models overlap. The linear exponential smoothing models are all special cases of ARIMA models---the equivalences are discussed in @HAA08. However, the non-linear exponential smoothing models have no equivalent ARIMA counterpart. On the other hand, there are many ARIMA models which have no exponential smoothing counterpart. Thus, the two model classes overlap and are complimentary; each has its strengths and weaknesses. The exponential smoothing state space models are all non-stationary. Models with seasonality or non-damped trend (or both) have two unit roots; all other models---that is, non-seasonal models with either no trend or damped trend---have one unit root. It is possible to define a stationary model with similar characteristics to exponential smoothing, but this is not normally done. The philosophy of exponential smoothing is that the world is non-stationary. So if a stationary model is required, ARIMA models are better. One advantage of the exponential smoothing models is that they can be non-linear. So time series that exhibit non-linear characteristics including heteroscedasticity may be better modelled using exponential smoothing state space models. For seasonal data, there are many more ARIMA models than the 30 possible models in the exponential smoothing class of Section \ref{sec:expsmooth}. It may be thought that the larger model class is advantageous. However, the results in @HKSG02 show that the exponential smoothing models performed better than the ARIMA models for the seasonal M3 competition data. (For the annual M3 data, the ARIMA models performed better.) In a discussion of these results, @Hyndman01 speculates that the larger model space of ARIMA models actually harms forecasting performance because it introduces additional uncertainty. The smaller exponential smoothing class is sufficiently rich to capture the dynamics of almost all real business and economic time series. # The forecast package {#sec:package} The algorithms and modelling frameworks for automatic univariate time series forecasting are implemented in the \pkg{forecast} package in \R. We illustrate the methods using the following four real time series shown in Figure \ref{fig:etsexamples}. \begin{compactitem} \item Figure \ref{fig:etsexamples}(a) shows 125 monthly US government bond yields (percent per annum) from January 1994 to May 2004. \item Figure \ref{fig:etsexamples}(b) displays 55 observations of annual US net electricity generation (billion kwh) for 1949 through 2003. \item Figure \ref{fig:etsexamples}(c) presents 113 quarterly observations of passenger motor vehicle production in the U.K. (thousands of cars) for the first quarter of 1977 through the first quarter of 2005. \item Figure \ref{fig:etsexamples}(d) shows 240 monthly observations of the number of short term overseas visitors to Australia from May 1985 to April 2005. \end{compactitem} ```{r etsexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using exponential smoothing state space models."} par(mfrow = c(2,2)) mod1 <- ets(bonds) mod2 <- ets(usnetelec) mod3 <- ets(ukcars) mod4 <- ets(visitors) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ``` ```{r etsnames, echo=FALSE} etsnames <- c(mod1$method, mod2$method, mod3$method, mod4$method) etsnames <- gsub("Ad","A\\\\damped",etsnames) ``` ## Implementation of the automatic exponential smoothing algorithm The innovations state space modelling framework described in Section \ref{sec:expsmooth} is implemented via the \code{ets()} function in the \pkg{forecast} package. (The default settings of \code{ets()} do not allow models with multiplicative trend, but they can be included using \code{allow.multiplicative.trend=TRUE}.) The models chosen via the algorithm for the four data sets were: \begin{compactitem} \item `r etsnames[1]` for monthly US 10-year bonds yield\\ ($\alpha=`r format(coef(mod1)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod1)['beta'], digits=4, nsmall=4)`$, $\phi=`r format(coef(mod1)['phi'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod1)['l'], digits=4, nsmall=4)`$, $b_0=`r format(coef(mod1)['b'], digits=4, nsmall=4)`$); \item `r etsnames[2]` for annual US net electricity generation\\ ($\alpha=`r format(coef(mod2)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod2)['beta'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod2)['l'], digits=4, nsmall=4)`$, $b_0=`r format(coef(mod2)['b'], digits=4, nsmall=4)`$); \item `r etsnames[3]` for quarterly UK motor vehicle production\\ ($\alpha=`r format(coef(mod3)['alpha'], digits=4, nsmall=4)`$, $\gamma=`r format(coef(mod3)['gamma'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod3)['l'], digits=4, nsmall=4)`$, $s_{-3}=`r format(-sum(coef(mod3)[c('s0','s1','s2')]), digits=4, nsmall=4)`$, $s_{-2}=`r format(coef(mod3)['s2'], digits=4, nsmall=4)`$, $s_{-1}=`r format(coef(mod3)['s1'], digits=4, nsmall=4)`$, $s_0=`r format(coef(mod3)['s0'], digits=4, nsmall=4)`$); \item `r etsnames[4]` for monthly Australian overseas visitors\\ ($\alpha=`r format(coef(mod4)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod4)['beta'], digits=2, nsmall=4)`$, $\gamma=`r format(coef(mod4)['gamma'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod4)['l'], digits=4, nsmall=4)`$, $b_0 = `r format(coef(mod4)['b'], digits=4, nsmall=4)`$, $s_{-11}=`r format(12-sum(tail(coef(mod4),11)), digits=4, nsmall=4)`$, $s_{-10}=`r format(coef(mod4)['s10'], digits=4, nsmall=4)`$, $s_{-9}=`r format(coef(mod4)['s9'], digits=4, nsmall=4)`$, $s_{-8}=`r format(coef(mod4)['s8'], digits=4, nsmall=4)`$, $s_{-7}=`r format(coef(mod4)['s7'], digits=4, nsmall=4)`$, $s_{-6}=`r format(coef(mod4)['s6'], digits=4, nsmall=4)`$, $s_{-5}=`r format(coef(mod4)['s5'], digits=4, nsmall=4)`$, $s_{-4}=`r format(coef(mod4)['s4'], digits=4, nsmall=4)`$, $s_{-3}=`r format(coef(mod4)['s3'], digits=4, nsmall=4)`$, $s_{-2}=`r format(coef(mod4)['s2'], digits=4, nsmall=4)`$, $s_{-1}=`r format(coef(mod4)['s1'], digits=4, nsmall=4)`$, $s_0=`r format(coef(mod4)['s0'], digits=4, nsmall=4)`$). \end{compactitem} Although there is a lot of computation involved, it can be handled remarkably quickly on modern computers. Each of the forecasts shown in Figure \ref{fig:etsexamples} took no more than a few seconds on a standard PC. The US electricity generation series took the longest as there are no analytical prediction intervals available for the ETS(M,M\damped,N) model. Consequently, the prediction intervals for this series were computed using simulation of 5000 future sample paths. To apply the algorithm to the US net electricity generation time series \code{usnetelec}, we use the following command. ```{r ets-usnetelec, echo=TRUE} etsfit <- ets(usnetelec) ``` The object \code{etsfit} is of class ``\code{ets}'' and contains all of the necessary information about the fitted model including model parameters, the value of the state vector $\bm{x}_t$ for all $t$, residuals and so on. Printing the \code{etsfit} object shows the main items of interest. ```{r ets-usnetelec-print,echo=TRUE} etsfit ``` Some goodness-of-fit measures [defined in @HK06] are obtained using \code{accuracy()}. ```{r ets-usnetelec-accuracy,eval=TRUE,echo=TRUE} accuracy(etsfit) ``` There are also \code{coef()}, \code{plot()}, \code{summary()}, \code{residuals()}, \code{fitted()} and \code{simulate()} methods for objects of class ``\code{ets}''. The \code{plot()} function shows time plots of the original time series along with the extracted components (level, growth and seasonal). The \code{forecast()} function computes the required forecasts which are then plotted as in Figure \ref{fig:etsexamples}(b). ```{r ets-usnetelec-fcast, fig.height=5, fig.width=8, message=FALSE, warning=FALSE, include=FALSE, output=FALSE} fcast <- forecast(etsfit) plot(fcast) ``` Printing the \code{fcast} object gives a table showing the prediction intervals. ```{r ets-usnetelec-fcast-print,eval=TRUE,echo=TRUE} fcast ``` The \code{ets()} function also provides the useful feature of applying a fitted model to a new data set. For example, we could withhold 10 observations from the \code{usnetelec} data set when fitting, then compute the one-step forecast errors for the out-of-sample data. ```{r ets-usnetelec-newdata,eval=FALSE,echo=TRUE} fit <- ets(usnetelec[1:45]) test <- ets(usnetelec[46:55], model = fit) accuracy(test) ``` We can also look at the measures of forecast accuracy where the forecasts are based on only the fitting data. ```{r ets-usnetelec-fcast-accuracy,eval=FALSE,echo=TRUE} accuracy(forecast(fit,10), usnetelec[46:55]) ``` ## The HoltWinters() function There is another implementation of exponential smoothing in \R\ via the \code{HoltWinters()} function [@Meyer:2002] in the \pkg{stats} package. It implements only the (N,N), (A,N), (A,A) and (A,M) methods. The initial states $\bm{x}_0$ are fixed using a heuristic algorithm. Because of the way the initial states are estimated, a full three years of seasonal data are required to implement the seasonal forecasts using \code{HoltWinters()}. (See @shortseasonal for the minimal sample size required.) The smoothing parameters are optimized by minimizing the average squared prediction errors, which is equivalent to minimizing \eqref{likelihood} in the case of additive errors. There is a \code{predict()} method for the resulting object which can produce point forecasts and prediction intervals. Although it is nowhere documented, it appears that the prediction intervals produced by \code{predict()} for an object of class \code{HoltWinters} are based on an equivalent ARIMA model in the case of the (N,N), (A,N) and (A,A) methods, assuming additive errors. These prediction intervals are equivalent to the prediction intervals that arise from the (A,N,N), (A,A,N) and (A,A,A) state space models. For the (A,M) method, the prediction interval provided by \code{predict()} appears to be based on @CY91 which is an approximation to the true prediction interval arising from the (A,A,M) model. Prediction intervals with multiplicative errors are not possible using the \code{HoltWinters()} function. ## Implementation of the automatic ARIMA algorithm ```{r arimaexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using ARIMA models."} mod1 <- auto.arima(bonds, seasonal=FALSE, approximation=FALSE) mod2 <- auto.arima(usnetelec) mod3 <- auto.arima(ukcars) mod4 <- auto.arima(visitors) par(mfrow = c(2,2)) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ``` The algorithm of Section \ref{sec:arima} is applied to the same four time series. Unlike the exponential smoothing algorithm, the ARIMA class of models assumes homoscedasticity, which is not always appropriate. Consequently, transformations are sometimes necessary. For these four time series, we model the raw data for series (a)--(c), but the logged data for series (d). The prediction intervals are back-transformed with the point forecasts to preserve the probability coverage. To apply this algorithm to the US net electricity generation time series \code{usnetelec}, we use the following commands. ```{r arima-auto-fcast,eval=TRUE,echo=TRUE,fig.show="hide"} arimafit <- auto.arima(usnetelec) fcast <- forecast(arimafit) plot(fcast) ``` ```{r arimanames, echo=FALSE} # Convert character strings to latex arimanames <- c(as.character(mod1), as.character(mod2), as.character(mod3), as.character(mod4)) arimanames <- gsub("\\[([0-9]*)\\]", "$_{\\1}$", arimanames) ``` The function \code{auto.arima()} implements the algorithm of Section \ref{sec:arima} and returns an object of class \code{Arima}. The resulting forecasts are shown in Figure \ref{fig:arimaexamples}. The fitted models are as follows: \begin{compactitem} \item `r arimanames[1]` for monthly US 10-year bonds yield\\ ($\theta_1= `r format(coef(mod1)['ma1'], digits=4, nsmall=4)`$); \item `r arimanames[2]` for annual US net electricity generation\\ ($\phi_1= `r format(coef(mod2)['ar1'], digits=4, nsmall=4)`$; $\phi_2= `r format(coef(mod2)['ar2'], digits=4, nsmall=4)`$; $\theta_1= `r format(coef(mod2)['ma1'], digits=4, nsmall=4)`$; $\theta_2= `r format(coef(mod2)['ma2'], digits=4, nsmall=4)`$; $c= `r format(coef(mod2)['drift'], digits=4, nsmall=4)`$); \item `r arimanames[3]` for quarterly UK motor vehicle production\\ ($\phi_1= `r format(coef(mod3)['ar1'], digits=4, nsmall=4)`$; $\phi_2= `r format(coef(mod3)['ar2'], digits=4, nsmall=4)`$; $\Phi_1= `r format(coef(mod3)['sar1'], digits=4, nsmall=4)`$; $\Phi_2= `r format(coef(mod3)['sar2'], digits=4, nsmall=4)`$); \item `r arimanames[4]` for monthly Australian overseas visitors\\ ($\phi_1= `r format(coef(mod4)['ar1'], digits=4, nsmall=4)`$; $\theta_1= `r format(coef(mod4)['ma1'], digits=4, nsmall=4)`$; $\Theta_1= `r format(coef(mod4)['sma1'], digits=4, nsmall=4)`$; $\Theta_2= `r format(coef(mod4)['sma2'], digits=4, nsmall=4)`$; $c= `r format(coef(mod4)['drift'], digits=4, nsmall=4)`$). \end{compactitem} Note that the \R\ parameterization has $\theta(B) = (1 + \theta_1B + \dots + \theta_qB)$ and $\phi(B) = (1 - \phi_1B + \dots - \phi_qB)$, and similarly for the seasonal terms. A summary of the forecasts is available, part of which is shown below. ``` Forecast method: ARIMA(2,1,2) with drift Series: usnetelec Coefficients: ar1 ar2 ma1 ma2 drift -1.3032 -0.4332 1.5284 0.8340 66.1585 s.e. 0.2122 0.2084 0.1417 0.1185 7.5595 sigma^2 estimated as 2262: log likelihood=-283.34 AIC=578.67 AICc=580.46 BIC=590.61 Error measures: ME RMSE MAE MPE MAPE MASE ACF1 Training set 0.046402 44.894 32.333 -0.61771 2.1012 0.45813 0.022492 Forecasts: Point Forecast Lo 80 Hi 80 Lo 95 Hi 95 2004 3968.957 3908.002 4029.912 3875.734 4062.180 2005 3970.350 3873.950 4066.751 3822.919 4117.782 2006 4097.171 3971.114 4223.228 3904.383 4289.959 2007 4112.332 3969.691 4254.973 3894.182 4330.482 2008 4218.671 4053.751 4383.591 3966.448 4470.894 2009 4254.559 4076.108 4433.010 3981.641 4527.476 2010 4342.760 4147.088 4538.431 4043.505 4642.014 2011 4393.306 4185.211 4601.401 4075.052 4711.560 2012 4470.261 4248.068 4692.455 4130.446 4810.077 2013 4529.113 4295.305 4762.920 4171.535 4886.690 ``` The training set error measures for the two models are very similar. Note that the information criteria are not comparable. The \pkg{forecast} package also contains the function \code{Arima()} which is largely a wrapper to the \code{arima()} function in the \pkg{stats} package. The \code{Arima()} function in the \pkg{forecast} package makes it easier to include a drift term when $d+D=1$. (Setting \code{include.mean=TRUE} in the \code{arima()} function from the \pkg{stats} package will only work when $d+D=0$.) It also provides the facility for fitting an existing ARIMA model to a new data set (as was demonstrated for the \code{ets()} function earlier). One-step forecasts for ARIMA models are now available via a \code{fitted()} function. We also provide a new function \code{arima.errors()} which returns the original time series after adjusting for regression variables. If there are no regression variables in the ARIMA model, then the errors will be identical to the original series. If there are regression variables in the ARIMA model, then the errors will be equal to the original series minus the effect of the regression variables, but leaving in the serial correlation that is modelled with the AR and MA terms. In contrast, \code{residuals()} provides true residuals, removing the AR and MA terms as well. The generic functions \code{summary()}, \code{print()}, \code{fitted()} and \code{forecast()} apply to models obtained from either the \code{Arima()} or \code{arima()} functions. ## The forecast() function The \code{forecast()} function is generic and has S3 methods for a wide range of time series models. It computes point forecasts and prediction intervals from the time series model. Methods exist for models fitted using \code{ets()}, \code{auto.arima()}, \code{Arima()}, \code{arima()}, \code{ar()}, \code{HoltWinters()} and \texttt{StructTS()}. There is also a method for a \code{ts} object. If a time series object is passed as the first argument to \code{forecast()}, the function will produce forecasts based on the exponential smoothing algorithm of Section \ref{sec:expsmooth}. In most cases, there is an existing \code{predict()} function which is intended to do much the same thing. Unfortunately, the resulting objects from the \code{predict()} function contain different information in each case and so it is not possible to build generic functions (such as \code{plot()} and \code{summary()}) for the results. So, instead, \code{forecast()} acts as a wrapper to \code{predict()}, and packages the information obtained in a common format (the \code{forecast} class). We also define a default \code{predict()} method which is used when no existing \code{predict()} function exists, and calls the relevant \code{forecast()} function. Thus, \code{predict()} methods parallel \code{forecast()} methods, but the latter provide consistent output that is more useable. \subsection[The forecast class]{The \code{forecast} class} The output from the \code{forecast()} function is an object of class ``\code{forecast}'' and includes at least the following information: \begin{compactitem} \item the original series; \item point forecasts; \item prediction intervals of specified coverage; \item the forecasting method used and information about the fitted model; \item residuals from the fitted model; \item one-step forecasts from the fitted model for the period of the observed data. \end{compactitem} There are \code{print()}, \code{plot()} and \code{summary()} methods for the ``\code{forecast}'' class. Figures \ref{fig:etsexamples} and \ref{fig:arimaexamples} were produced using the \code{plot()} method. The prediction intervals are, by default, computed for 80\% and 95\% coverage, although other values are possible if requested. Fan charts [@Wallis99] are possible using the combination \verb|plot(forecast(model.object, fan = TRUE))|. ## Other functions {#sec:other} We now briefly describe some of the other features of the \pkg{forecast} package. Each of the following functions produces an object of class ``\code{forecast}''. \code{croston()} : implements the method of @Croston72 for intermittent demand forecasting. In this method, the time series is decomposed into two separate sequences: the non-zero values and the time intervals between non-zero values. These are then independently forecast using simple exponential smoothing and the forecasts of the original series are obtained as ratios of the two sets of forecasts. No prediction intervals are provided because there is no underlying stochastic model [@SH05]. \code{theta()} : provides forecasts from the Theta method [@AN00]. @HB03 showed that these were equivalent to a special case of simple exponential smoothing with drift. \code{splinef()} : gives cubic-spline forecasts, based on fitting a cubic spline to the historical data and extrapolating it linearly. The details of this method, and the associated prediction intervals, are discussed in @HKPB05. \code{meanf()} : returns forecasts based on the historical mean. \code{rwf()} : gives ``naïve'' forecasts equal to the most recent observation assuming a random walk model. This function also allows forecasting using a random walk with drift. In addition, there are some new plotting functions for time series. \code{tsdisplay()} : provides a time plot along with an ACF and PACF. \code{seasonplot()} : produces a seasonal plot as described in @MWH3. \newpage # Bibliography forecast/inst/CITATION0000644000176200001440000000256013553673010014200 0ustar liggesusers year <- sub("-.*", "", meta$Date) if(!length(year)) year <- substr(Sys.Date(),1,4) vers <- meta$Version if(is.null(vers)) vers <- packageVersion("forecast") vers <- paste("R package version", vers) # Grab authors from DESCRIPTION file # authors <- eval(parse(text=as.list(read.dcf("../DESCRIPTION")[1, ])$`Authors@R`)) # authors <- authors[sapply(authors$role, function(roles) "aut" %in% roles)] # authors <- sapply(authors, function(author) paste(author$given, author$family)) # authors <- paste(authors, collapse = " and ") citHeader("To cite the forecast package in publications, please use:") bibentry(bibtype = "Manual", title = "{forecast}: Forecasting functions for time series and linear models", author = "Rob Hyndman and George Athanasopoulos and Christoph Bergmeir and Gabriel Caceres and Leanne Chhay and Mitchell O'Hara-Wild and Fotios Petropoulos and Slava Razbash and Earo Wang and Farah Yasmeen", year = year, note = vers, url = "http://pkg.robjhyndman.com/forecast") bibentry(bibtype = "Article", title = "Automatic time series forecasting: the forecast package for {R}", author = personList(as.person("Rob J Hyndman"),as.person("Yeasmin Khandakar")), journal = "Journal of Statistical Software", volume = 26, number = 3, pages = "1--22", year = 2008, url = "http://www.jstatsoft.org/article/view/v027i03")