forecast/0000755000176200001440000000000014166733352012072 5ustar liggesusersforecast/NAMESPACE0000644000176200001440000002027614166675746013334 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method("[",msts) S3method(accuracy,default) S3method(accuracy,mforecast) S3method(as.Date,timeDate) S3method(as.character,Arima) S3method(as.character,bats) S3method(as.character,ets) S3method(as.character,tbats) S3method(as.data.frame,forecast) S3method(as.data.frame,mforecast) S3method(as.ts,forecast) S3method(autolayer,forecast) S3method(autolayer,mforecast) S3method(autolayer,msts) S3method(autolayer,mts) S3method(autolayer,ts) S3method(autoplot,Arima) S3method(autoplot,StructTS) S3method(autoplot,acf) S3method(autoplot,ar) S3method(autoplot,bats) S3method(autoplot,decomposed.ts) S3method(autoplot,ets) S3method(autoplot,forecast) S3method(autoplot,mforecast) S3method(autoplot,mpacf) S3method(autoplot,mstl) S3method(autoplot,msts) S3method(autoplot,mts) S3method(autoplot,seas) S3method(autoplot,splineforecast) S3method(autoplot,stl) S3method(autoplot,tbats) S3method(autoplot,ts) S3method(coef,ets) S3method(fitted,ARFIMA) S3method(fitted,Arima) S3method(fitted,ar) S3method(fitted,bats) S3method(fitted,ets) S3method(fitted,forecast_ARIMA) S3method(fitted,lagwalk) S3method(fitted,modelAR) S3method(fitted,nnetar) S3method(fitted,tbats) S3method(fitted,tslm) S3method(forecast,Arima) S3method(forecast,HoltWinters) S3method(forecast,StructTS) S3method(forecast,ar) S3method(forecast,baggedModel) S3method(forecast,bats) S3method(forecast,default) S3method(forecast,ets) S3method(forecast,forecast) S3method(forecast,forecast_ARIMA) S3method(forecast,fracdiff) S3method(forecast,lagwalk) S3method(forecast,lm) S3method(forecast,mlm) S3method(forecast,modelAR) S3method(forecast,mstl) S3method(forecast,mts) S3method(forecast,nnetar) S3method(forecast,stl) S3method(forecast,stlm) S3method(forecast,tbats) S3method(forecast,ts) S3method(forecast,varest) S3method(fortify,ts) S3method(getResponse,Arima) S3method(getResponse,ar) S3method(getResponse,baggedModel) S3method(getResponse,bats) S3method(getResponse,default) S3method(getResponse,fracdiff) S3method(getResponse,lm) S3method(getResponse,mforecast) S3method(getResponse,tbats) S3method(head,ts) S3method(logLik,ets) S3method(nobs,ets) S3method(plot,Arima) S3method(plot,ar) S3method(plot,bats) S3method(plot,ets) S3method(plot,forecast) S3method(plot,mforecast) S3method(plot,splineforecast) S3method(plot,tbats) S3method(predict,default) S3method(print,CVar) S3method(print,OCSBtest) S3method(print,baggedModel) S3method(print,bats) S3method(print,ets) S3method(print,forecast) S3method(print,forecast_ARIMA) S3method(print,lagwalk) S3method(print,mforecast) S3method(print,modelAR) S3method(print,msts) S3method(print,nnetar) S3method(print,nnetarmodels) S3method(print,summary.Arima) S3method(print,summary.ets) S3method(print,summary.forecast) S3method(print,summary.mforecast) S3method(print,tbats) S3method(residuals,ARFIMA) S3method(residuals,Arima) S3method(residuals,ar) S3method(residuals,bats) S3method(residuals,ets) S3method(residuals,forecast) S3method(residuals,forecast_ARIMA) S3method(residuals,nnetar) S3method(residuals,stlm) S3method(residuals,tbats) S3method(residuals,tslm) S3method(seasadj,decomposed.ts) S3method(seasadj,mstl) S3method(seasadj,seas) S3method(seasadj,stl) S3method(seasadj,tbats) S3method(simulate,Arima) S3method(simulate,ar) S3method(simulate,ets) S3method(simulate,fracdiff) S3method(simulate,lagwalk) S3method(simulate,modelAR) S3method(simulate,nnetar) S3method(subset,msts) S3method(subset,ts) S3method(summary,Arima) S3method(summary,ets) S3method(summary,forecast) S3method(summary,mforecast) S3method(summary,tslm) S3method(tail,ts) S3method(window,msts) export("%>%") export(Acf) export(Arima) export(BoxCox) export(BoxCox.lambda) export(CV) export(CVar) export(Ccf) export(GeomForecast) export(InvBoxCox) export(Pacf) export(StatForecast) export(accuracy) export(arfima) export(arima.errors) export(arimaorder) export(auto.arima) export(autolayer) export(autoplot) export(baggedETS) export(baggedModel) export(bats) export(bizdays) export(bld.mbb.bootstrap) export(checkresiduals) export(croston) export(dm.test) export(dshw) export(easter) export(ets) export(findfrequency) export(forecast) export(forecast.ets) export(fourier) export(fourierf) export(geom_forecast) export(getResponse) export(ggAcf) export(ggCcf) export(ggPacf) export(gghistogram) export(gglagchull) export(gglagplot) export(ggmonthplot) export(ggseasonplot) export(ggsubseriesplot) export(ggtaperedacf) export(ggtaperedpacf) export(ggtsdisplay) export(holt) export(hw) export(is.Arima) export(is.acf) export(is.baggedModel) export(is.bats) export(is.constant) export(is.ets) export(is.forecast) export(is.mforecast) export(is.modelAR) export(is.nnetar) export(is.nnetarmodels) export(is.splineforecast) export(is.stlm) export(ma) export(meanf) export(modelAR) export(monthdays) export(mstl) export(msts) export(na.interp) export(naive) export(ndiffs) export(nnetar) export(nsdiffs) export(ocsb.test) export(remainder) export(rwf) export(seasadj) export(seasonal) export(seasonaldummy) export(seasonaldummyf) export(seasonplot) export(ses) export(sindexf) export(snaive) export(splinef) export(stlf) export(stlm) export(taperedacf) export(taperedpacf) export(tbats) export(tbats.components) export(thetaf) export(trendcycle) export(tsCV) export(tsclean) export(tsdisplay) export(tslm) export(tsoutliers) import(Rcpp) import(parallel) importFrom(colorspace,sequential_hcl) importFrom(fracdiff,diffseries) importFrom(fracdiff,fracdiff) importFrom(fracdiff,fracdiff.sim) importFrom(ggplot2,autoplot) importFrom(ggplot2,fortify) importFrom(grDevices,gray) importFrom(grDevices,heat.colors) importFrom(grDevices,nclass.FD) importFrom(grDevices,palette) importFrom(graphics,abline) importFrom(graphics,axis) importFrom(graphics,grid) importFrom(graphics,hist) importFrom(graphics,layout) importFrom(graphics,lines) importFrom(graphics,mtext) importFrom(graphics,par) importFrom(graphics,plot) importFrom(graphics,points) importFrom(graphics,polygon) importFrom(graphics,text) importFrom(graphics,title) importFrom(lmtest,bgtest) importFrom(magrittr,"%>%") importFrom(nnet,nnet) importFrom(stats,"tsp<-") importFrom(stats,AIC) importFrom(stats,BIC) importFrom(stats,Box.test) importFrom(stats,acf) importFrom(stats,aggregate) importFrom(stats,approx) importFrom(stats,ar) importFrom(stats,arima) importFrom(stats,arima.sim) importFrom(stats,as.formula) importFrom(stats,as.ts) importFrom(stats,complete.cases) importFrom(stats,cycle) importFrom(stats,decompose) importFrom(stats,diffinv) importFrom(stats,end) importFrom(stats,extractAIC) importFrom(stats,filter) importFrom(stats,fitted) importFrom(stats,formula) importFrom(stats,frequency) importFrom(stats,hatvalues) importFrom(stats,is.mts) importFrom(stats,is.ts) importFrom(stats,ksmooth) importFrom(stats,lm) importFrom(stats,loess) importFrom(stats,logLik) importFrom(stats,lsfit) importFrom(stats,median) importFrom(stats,model.frame) importFrom(stats,na.contiguous) importFrom(stats,na.exclude) importFrom(stats,na.omit) importFrom(stats,na.pass) importFrom(stats,napredict) importFrom(stats,nobs) importFrom(stats,optim) importFrom(stats,optimize) importFrom(stats,pf) importFrom(stats,plot.ts) importFrom(stats,poly) importFrom(stats,predict) importFrom(stats,pt) importFrom(stats,qnorm) importFrom(stats,qt) importFrom(stats,quantile) importFrom(stats,reformulate) importFrom(stats,residuals) importFrom(stats,rnorm) importFrom(stats,runif) importFrom(stats,sd) importFrom(stats,simulate) importFrom(stats,smooth.spline) importFrom(stats,spec.ar) importFrom(stats,start) importFrom(stats,stl) importFrom(stats,supsmu) importFrom(stats,terms) importFrom(stats,time) importFrom(stats,ts) importFrom(stats,tsdiag) importFrom(stats,tsp) importFrom(stats,var) importFrom(stats,window) importFrom(timeDate,Easter) importFrom(timeDate,as.Date.timeDate) importFrom(timeDate,as.timeDate) importFrom(timeDate,difftimeDate) importFrom(timeDate,isBizday) importFrom(tseries,adf.test) importFrom(tseries,kpss.test) importFrom(tseries,pp.test) importFrom(urca,ur.df) importFrom(urca,ur.kpss) importFrom(urca,ur.pp) importFrom(utils,head) importFrom(utils,head.matrix) importFrom(utils,methods) importFrom(utils,packageVersion) importFrom(utils,tail) importFrom(utils,tail.matrix) importFrom(zoo,as.Date) importFrom(zoo,as.yearqtr) importFrom(zoo,rollmean) useDynLib(forecast, .registration = TRUE) forecast/README.md0000644000176200001440000000540514026463047013351 0ustar liggesusersforecast ====================== ![R build status](https://github.com/robjhyndman/forecast/workflows/R-CMD-check/badge.svg) [![CRAN_Status_Badge](https://www.r-pkg.org/badges/version/forecast)](https://cran.r-project.org/package=forecast) [![cran checks](https://cranchecks.info/badges/worst/forecast)](https://cran.r-project.org/web/checks/check_results_forecast.html) [![Lifecycle: retired](https://img.shields.io/badge/lifecycle-retired-orange.svg)](https://lifecycle.r-lib.org/articles/stages.html#retired) [![Downloads](https://cranlogs.r-pkg.org/badges/forecast)](https://cran.r-project.org/package=forecast) [![Licence](https://img.shields.io/badge/licence-GPL--3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0.en.html) The R package *forecast* provides methods and tools for displaying and analysing univariate time series forecasts including exponential smoothing via state space models and automatic ARIMA modelling. This package is now retired in favour of the [fable](http://fable.tidyverts.org/) package. The forecast package will remain in its current state, and maintained with bug fixes only. For the latest features and development, we recommend forecasting with the fable package. ## Installation You can install the **stable** version from [CRAN](https://cran.r-project.org/package=forecast). ```s install.packages('forecast', dependencies = TRUE) ``` You can install the **development** version from [Github](https://github.com/robjhyndman/forecast) ```s # install.packages("remotes") remotes::install_github("robjhyndman/forecast") ``` ## Usage ```s library(forecast) library(ggplot2) # ETS forecasts USAccDeaths %>% ets() %>% forecast() %>% autoplot() # Automatic ARIMA forecasts WWWusage %>% auto.arima() %>% forecast(h=20) %>% autoplot() # ARFIMA forecasts library(fracdiff) x <- fracdiff.sim( 100, ma=-.4, d=.3)$series arfima(x) %>% forecast(h=30) %>% autoplot() # Forecasting with STL USAccDeaths %>% stlm(modelfunction=ar) %>% forecast(h=36) %>% autoplot() AirPassengers %>% stlf(lambda=0) %>% autoplot() USAccDeaths %>% stl(s.window='periodic') %>% forecast() %>% autoplot() # TBATS forecasts USAccDeaths %>% tbats() %>% forecast() %>% autoplot() taylor %>% tbats() %>% forecast() %>% autoplot() ``` ## For more information * Get started in forecasting with the online textbook at http://OTexts.org/fpp2/ * Read the Hyndsight blog at https://robjhyndman.com/hyndsight/ * Ask forecasting questions on http://stats.stackexchange.com/tags/forecasting * Ask R questions on http://stackoverflow.com/tags/forecasting+r * Join the International Institute of Forecasters: http://forecasters.org/ ## License This package is free and open source software, licensed under GPL-3. forecast/data/0000755000176200001440000000000014003673410012767 5ustar liggesusersforecast/data/gold.rda0000644000176200001440000000532414003673410014410 0ustar liggesusersu ЦQ2bPĄJ%ldz2I泔 }y9!Q}! k$,LFEh׈Yw9yٽ:{+bsssK,ҍ\d_m"sq[CG;-̯Y ?k9GW z(_>߄OB毧χ8mk#~FvTz)~PO׿S= |#]ۿ-)mLY?hn]|e?r id\n/V¡f;<|ѺAuooqwc'"=4~ȯeh3j^a:e5Ke1_UʼnP{^w(8fq_R?A} .?mηseߝt ;~2Tdyzϖ۝iS:ƇLFc,k ꋱ8X}y觥~|{ %O|Vp?mk#r8ʛ-=-jo0./ i廱e?pݜ~i{cG.?V^hN$/o6v&/s Zݾͼ7+z׭mN}_TKu'/ǿ|uS:™krpUgo\OYހo~/yJ~6KUܗ/deLclB'8'8u=.#aZT?NϷ^@xS>g"=DuSGI=Suz;閡cr/ O>:{s˧|Iݔ\PugM#?>#E~e_DFKTG 졞}~֟n>ՠ bAq` 􋁾xz ˗w}8(>:?ϑ?W=YNꄤsΉOīsgl̓g"#Gm_Y<?pO^W{uL,p=_ij<\$(?G~yQ=[U;\]ܪK %y&Iѩ??N"G z;0IgOY-?co{~'=Ng,=r/^gY^5ȹ׳V2{yގW'eCVf8|:ueOWtl߫9Gw:y+tqr>%?z`'~_\j~'}z_,~?٬~/iځ'~Zs9悔>,s~wݨ?<;w2M ^>8]]ٯs& ;D?Mry2qˉ1YmܜVz7&{5>Oa̾'LV>RZν=*yj=U_1/c[|q>エSOV<-8tyuλfb5ν:uDGzԩ:qX\c:({+[a3xt|^މ^=!?zuǏ3㹃| w!uA^]\Ǣ_}K| ^y7os ꌂ7:gK:沈\qwӬE/bo~U;KKg~ZGG?(G:up3nU9es="ɣpynlډ}?ɋg?TK]Q~yV]&WIz_VKj=IOkݮ-}p>>#gׂz=A|k}<9?8/#}#gI<&TǏ,zEUR/&(L4|?I$ר7>}c|}>+gG6&;~ynv`o9(+}A]/R!w#zTfZ}QW;bЇޏ>-l(ǠYA~Z# pj!;xua{f =yx=u.[މ;A_yonޓM ̾zߕhqgN^=XuhlKősqk1#forecast/data/woolyrnq.rda0000644000176200001440000000067214003673410015356 0ustar liggesusersmK(DasLb!,,쐕Ґ K%R36n)Xb9cf "eaeI$I{L9us]p;R6eK7l3?Ifý]=JifvErJt2˰vC0a׎`l*<]@nB7OJ'}Mxx3^,->+ CwOOa0J\&uyƒrDW ةBYwqz:ϘG%껉KϾط@pϋ"zoeT>&n. #CaN3Bwg9׿N\|9st{qhgٿ ZO D[ɓp5τu5Cn {[Wc8m^cMforecast/data/taylor.rda0000644000176200001440000002402414003673410014773 0ustar liggesusers] l[Y1M FQ(*JLsszsqNT3EEED((E$"wUyy}j_kڱ/z3$y$yYO}GmL.ג)8II2I27^ߗ$cI2"$IzwJԘcgߒ$SNkdIr$I$I+WL۝cӽA>q,UMIr Iސ$$9$92$)?%&$Ǧ$.{r$?$WI{cߢp][d|Gl7^Tǧcmy|[M_J$Y$eǛو9c(7$>w>W|Aq{#~^'[}$Yw%2 (3Iٱ]/X. |-3v<emY^3♖lLiO_$w1[0yuX\?Es}Z;HuoO+`Q$Uz~/~ah"~'ZH78dyb]c/iőQcrX_˗z‰1³E[yӚ괥;yGW n8v/v#zr}ѥ=xY`NȣږIv IS1q99o[ןn-FAkvz#V8yZ耽xtkUgԜG+|,w?n^c;u¸h9du =~@lۄvcvް<[ŃLaO;# vJ SzG8|$d!فUX]P?fhT d jķf"4>~ 64]svak.]эjQw:_nи.ͨb]95qEӨ]IgI8hxϲ^ԠqfwKm׆bgW6kO^*}9NK6guuuƔ?6,e]%ׯ>`?VP=cL0B N }ĩ[V܆A/+Sy/hw Flyp m{#(O >y&bǧ.쩯7bѣ!_%Q}̳v&ҵ :(s828g}M̄8)bkWʁWtR o\7#_~ >8yQcϑq*w>+zP; &Bi 2Yk {2ns1\ӌX,.`D'h-ʨ=5Ö&]hн:lhOwU[kҹgp,Vr-d>maoWh;é,f(@#W ))5`=wj})xWJjm9߅=uŽ=K;}4x e=GI͵LSK\5j Vg&dK( zɂZZEkϊe^ʨ#bi! ,ܵ~ֿ6ܒ^v O>pQJ[} W^A__BvY-:%6,j5T_fqՍY84HXnٜ YG ~r]iʋ ^MOMɖGzݠ+5Ż M↚Ѥe-_WjљH9Ņ=}Ԟ5fȝ)=Ȅn'+L5#u6#wx̆i=6ǫM~Wκt ]6v o[Rل&=nDn7@3~zb-v;qƇ\q2Yq͘l8'c9Dzs;+1]~Jn]ƞn6Ŧ[wb0UjOhCX+q[T͛o8#?u?oҌ,Ȱ%#9q&O+qvbc׺r /Xd{,֨:Zzgxhk5쟈)},>?Cߣ} )ygɌ6 cVΖaxO=:k9hwvg\񼡦6ܨ55\\K .qNYxGنM )O4~ EU^mN_WcaR\ӴlvAk񡏣U: Suu%ku߁ (*] &{xoQ~:XU=T+c̨p솻yUG3> zGK!2FOgc-/VMcl6ncvWebcJG.-~qԴSy jݻ͋浱W |M}iT1n^u[jbqg~,a+#LXu^6GO|X,}z6h>^ѯ]Y9#7j?PKkoz٣+]yP`O؂ѺVuj쉜 /c?lM+|)X^ɫmzqQd:˦<\k‡\ ֎.nV>~{Вu5wu9~]{1>WY~ 4q&`-#!1ʅߊYSe`rnKĝ Q]~"<XyzĹHC{rv7bǽĚx^Czhњc=]ϊZQ|W]_3ϵT?O=ooCl8kϦsVq`Vv9eBb?BҴy=$74a)غ.[Ӣm=ۣ{lsݎleKzeugBgm-Ѷ'[q[(/KbTSrz~CӰAဧƺ5&683oՠf^o;w'qpØk|[\gƽ׭}% '%=2 ,=OIQo>G'`cTL#!ai9 )qN=-"x u9G]~}4i6{C2(VqW}--MԘṸNvԂ]*to6tk٪*Ҥŗ t~-ŧ K\u" 8?=߇ؚ)?Sr9Uk ? 5nԯ_xf_gj>܃]T!<mq[yc$\s++nY뛌5XY}T kV7D_QGTg5=aW}_õ\<³uz*kr)[e16ތ)=ߐaPd]Z9X_םɾ-6l=O}&6ºe_vM-R6.axܧaԯh܂\sޔ|pyz+eN<ޥwp鶿[5ju݁L}[4V^_bK1 ź1yXYa A+|/gtLcxuS̈́`y ]x wi]k:ϜeSZp\?|ϫWhꭚ%_E-UA}5ZtU^]W=:c 04 k9GcBҘ\Lۈ+cDa?;j)<^#j8{hǵpW:58t]ǹbo[/ |wؽ ;֯;zUv/31Z> giLEZ|"WԽb؋.: Ws=ڏ?tzxo/UbSNP{5}%|ZG}iv{o}8&{r5}x{ͮsh k4?HK Np;N+~ ыhF9PnԒpmmr%gp݇'w=1wάX>[[l}P7jՇp/gRzZ6θsFb hJU>jk!~\'.s8Sg;m^l;w][}XѐUҬg(#4rއCO^ol pMN Šz5|X6iS7sڼ ˏ,Ő1ZK7 ߿7V,^XsX{\7=#\d]ܦO}O-zܿ^y,ͫ0-E}ZyyK`}u잚w̿+[[gÓ{jgJDUgcz5H=-u]Kc~n ^4YMqʘwjr]__q&?U+~_ǿ0SuSt¶]oL쫱X V6Q=_JiR006閘+x%qu5כtRM 4.喝ԦZζucMc-z]>{oS = #禬RHEl,][6=Ssc}P|5]j\ @//K8 5A~X.>=7oca #cLflL/0 W?^qVS,Ŧaý= =E{6\wD[|]m/z^襜]᥵exwsct Nn.X]W307J>xQ#S7k~6~]{"p¸FrݥLLOi11.DO}Omr:`o+ K۽g kGvNxiqiTw\U uud  q[_xVI5bZyM>4nN\#3lGWĞ(OcMcūbh҆Mm>/>^qCqŦZSsL}Q&>*9ڛH|3׍Ye>O4tFvT>=1—EN\.M\>ZGqͮ6]jY5 q]YGClĵgΆbgw*WGޠKo?^p)ׇ;c%>1&Y Jjw7^mh>Ægz6x.=k\_>s+zLz!sa+9J( h؟É1Z8)BkjY&>u%p[W?cэЗVCzi}.K4BoxC}=-R]dwYSPGfԋ }wceSz! ҼA|o 'ox k'oÖzX3$9;K|(Z{ӷ~JDx~YTpb]X יٙg kx]=0=D-1FVOnzW|SWh_X>.JU V`|rn/lwIixJwںa&R4$CO)uhpmG6`@m kF9輬3LO#}:?:XOOOQW=yv$|?=e\O Oē!~( >E=ئi%-qoӮv2NU޵KwMŶXᯡ>׌}es8g+x95ǾqhFGyϰ"gs|eC϶Qp cM5%'-ý_h?;#&u~Þ/0֨5_LOrf_<ЫlEMz?C3&:!#g,,#.]iz qkx?iܖkޖ5zP \;=iߴ]D-dzM-;ϼ< ~߼d4c>֋Y.[ϋO|k%6½ [-k[}mW67~0d]xZQ ~'RCiq|j3\9;[zXinΫys9sF;K7N}s|[&C[;Zo}g!֧Ozm|l6exз7z">~͎+=X;θ'qg2 ڷj$M=O!g,0{p_fNN1>;>~w};ZѣvӃޣw_m=-jiMjwӜ X񾺻 k{|͉WqO2֋t~d)>o~O'ʘggUrX.77~EO~ǹ_2lXY߳F6\Z7|[q[/SX~_l5>iuNLs~K~'czN86=htG0CWBj{ ̵(0yu.~1+ I7po>ss5&OXYW#i>oG7p.M.yS7 %mG_՟ |44a?forecast/data/gas.rda0000644000176200001440000000273014003673410014233 0ustar liggesusers] Le s9lZ:ԥ9OfZs谴IeehX$f͚3MA8r f^PAS΁'׬o} {~wqq1111"G#6[O{IɑnͶ]Gb~l;O3{ w' { s_Cv scUUO0cpRcxsM0Ye13'Ƕ3&Gs0630NgA‚[iəgɌ7딝ſь`=sE)x?_h摻D{U'ˏ5g9!,zWA}~c_<M3󱧘{3}?i?+Vr;VX\X9+?&qZֹ̯7<4a߱>3Yy>Ч}u6KtK+ϢL|yo7l:N4"h?`ai:7ŗ/W)80TI>;ExzȜlHx?~sqZUjGI5̗®L8{R')>_}a=!>Oyr߸Hny(|'8uPzDnd+✸({ߒ!xޱһӅܓ /{O<O`? >3x\ ]?UtɡRoV<$&1ke$>.ȽѱCm #uK /WdgS?v:_oaQ|%[C4Os΂۵9ǜ=|'p߄ ?'?w!"vue=nA׏M_[[:)?e.î:\pforecast/data/wineind.rda0000644000176200001440000000127014003673410015114 0ustar liggesusersmKHTaǯwHܴUAfD բ QZDP,jуR)Qv-UÜqfq#A[T?.;}MjKJ 0 Bxb]thGy~5z=V՚%١FF`_ s6ȎGܬ:DV~x[Kow]z A$9t#̰p]s58X]򧛄N01Bw.-][}R]Cv:J3pramE :s`U+X#U/^mO=T|X~̛]1]^O}7%;/?:uEj~Wf+L_κ#j/3=}=a0F1ot@FsnΕl_4Z񟑝N>s#ܑD^'xjfq\/[ ?:ȷt'Tg)cBP4o =y׳˃}UhcURtx{wSϠٓ˽(forecast/man/0000755000176200001440000000000014003673410012631 5ustar liggesusersforecast/man/forecast.lm.Rd0000644000176200001440000000662314003673410015344 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/lm.R \name{forecast.lm} \alias{forecast.lm} \title{Forecast a linear model with possible time series components} \usage{ \method{forecast}{lm}( object, newdata, h = 10, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, ts = TRUE, ... ) } \arguments{ \item{object}{Object of class "lm", usually the result of a call to \code{\link[stats]{lm}} or \code{\link{tslm}}.} \item{newdata}{An optional data frame in which to look for variables with which to predict. If omitted, it is assumed that the only variables are trend and season, and \code{h} forecasts are produced.} \item{h}{Number of periods for forecasting. Ignored if \code{newdata} present.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{ts}{If \code{TRUE}, the forecasts will be treated as time series provided the original data is a time series; the \code{newdata} will be interpreted as related to the subsequent time periods. If \code{FALSE}, any time series attributes of the original data will be ignored.} \item{...}{Other arguments passed to \code{\link[stats]{predict.lm}()}.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.lm}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The historical data for the response variable.} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values} } \description{ \code{forecast.lm} is used to predict linear models, especially those involving trend and seasonality components. } \details{ \code{forecast.lm} is largely a wrapper for \code{\link[stats]{predict.lm}()} except that it allows variables "trend" and "season" which are created on the fly from the time series characteristics of the data. Also, the output is reformatted into a \code{forecast} object. } \examples{ y <- ts(rnorm(120,0,3) + 1:120 + 20*sin(2*pi*(1:120)/12), frequency=12) fit <- tslm(y ~ trend + season) plot(forecast(fit, h=20)) } \seealso{ \code{\link{tslm}}, \code{\link[stats]{lm}}. } \author{ Rob J Hyndman } \keyword{stats} forecast/man/ggmonthplot.Rd0000644000176200001440000000222614003673410015464 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{ggmonthplot} \alias{ggmonthplot} \alias{ggsubseriesplot} \title{Create a seasonal subseries ggplot} \usage{ ggmonthplot(x, labels = NULL, times = time(x), phase = cycle(x), ...) ggsubseriesplot(x, labels = NULL, times = time(x), phase = cycle(x), ...) } \arguments{ \item{x}{a time series object (type \code{ts}).} \item{labels}{A vector of labels to use for each 'season'} \item{times}{A vector of times for each observation} \item{phase}{A vector of seasonal components} \item{\dots}{Not used (for consistency with monthplot)} } \value{ Returns an object of class \code{ggplot}. } \description{ Plots a subseries plot using ggplot. Each season is plotted as a separate mini time series. The blue lines represent the mean of the observations within each season. } \details{ The \code{ggmonthplot} function is simply a wrapper for \code{ggsubseriesplot} as a convenience for users familiar with \code{\link[stats]{monthplot}}. } \examples{ ggsubseriesplot(AirPassengers) ggsubseriesplot(woolyrnq) } \seealso{ \code{\link[stats]{monthplot}} } \author{ Mitchell O'Hara-Wild } forecast/man/bizdays.Rd0000644000176200001440000000147714003673410014576 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calendar.R \name{bizdays} \alias{bizdays} \title{Number of trading days in each season} \usage{ bizdays(x, FinCenter = c("New York", "London", "NERC", "Tokyo", "Zurich")) } \arguments{ \item{x}{Monthly or quarterly time series} \item{FinCenter}{Major financial center.} } \value{ Time series } \description{ Returns number of trading days in each month or quarter of the observed time period in a major financial center. } \details{ Useful for trading days length adjustments. More on how to define "business days", please refer to \code{\link[timeDate]{isBizday}}. } \examples{ x <- ts(rnorm(30), start = c(2013, 2), frequency = 12) bizdays(x, FinCenter = "New York") } \seealso{ \code{\link[forecast]{monthdays}} } \author{ Earo Wang } \keyword{ts} forecast/man/forecast.bats.Rd0000644000176200001440000000527514003673410015667 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecastBATS.R, R/forecastTBATS.R \name{forecast.bats} \alias{forecast.bats} \alias{forecast.tbats} \title{Forecasting using BATS and TBATS models} \usage{ \method{forecast}{bats}(object, h, level = c(80, 95), fan = FALSE, biasadj = NULL, ...) \method{forecast}{tbats}(object, h, level = c(80, 95), fan = FALSE, biasadj = NULL, ...) } \arguments{ \item{object}{An object of class "\code{bats}". Usually the result of a call to \code{\link{bats}}.} \item{h}{Number of periods for forecasting. Default value is twice the largest seasonal period (for seasonal data) or ten (for non-seasonal data).} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If TRUE, point forecasts and fitted values are mean forecast. Otherwise, these points can be considered the median of the forecast densities.} \item{...}{Other arguments, currently ignored.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.bats}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A copy of the \code{bats} object} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Forecasts \code{h} steps ahead with a BATS model. Prediction intervals are also produced. } \examples{ \dontrun{ fit <- bats(USAccDeaths) plot(forecast(fit)) taylor.fit <- bats(taylor) plot(forecast(taylor.fit)) } } \references{ De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), Forecasting time series with complex seasonal patterns using exponential smoothing, \emph{Journal of the American Statistical Association}, \bold{106}(496), 1513-1527. } \seealso{ \code{\link{bats}}, \code{\link{tbats}},\code{\link{forecast.ets}}. } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/forecast.HoltWinters.Rd0000644000176200001440000000557414003673410017222 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{forecast.HoltWinters} \alias{forecast.HoltWinters} \title{Forecasting using Holt-Winters objects} \usage{ \method{forecast}{HoltWinters}( object, h = ifelse(frequency(object$x) > 1, 2 * frequency(object$x), 10), level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{HoltWinters}". Usually the result of a call to \code{\link[stats]{HoltWinters}}.} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.HoltWinters}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for univariate Holt-Winters time series models. } \details{ This function calls \code{\link[stats]{predict.HoltWinters}} and constructs an object of class "\code{forecast}" from the results. It is included for completeness, but the \code{\link{ets}} is recommended for use instead of \code{\link[stats]{HoltWinters}}. } \examples{ fit <- HoltWinters(WWWusage,gamma=FALSE) plot(forecast(fit)) } \seealso{ \code{\link[stats]{predict.HoltWinters}}, \code{\link[stats]{HoltWinters}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/bats.Rd0000644000176200001440000000713214003673410014054 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/bats.R \name{bats} \alias{bats} \alias{as.character.bats} \alias{print.bats} \title{BATS model (Exponential smoothing state space model with Box-Cox transformation, ARMA errors, Trend and Seasonal components)} \usage{ bats( y, use.box.cox = NULL, use.trend = NULL, use.damped.trend = NULL, seasonal.periods = NULL, use.arma.errors = TRUE, use.parallel = length(y) > 1000, num.cores = 2, bc.lower = 0, bc.upper = 1, biasadj = FALSE, model = NULL, ... ) } \arguments{ \item{y}{The time series to be forecast. Can be \code{numeric}, \code{msts} or \code{ts}. Only univariate time series are supported.} \item{use.box.cox}{\code{TRUE/FALSE} indicates whether to use the Box-Cox transformation or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{use.trend}{\code{TRUE/FALSE} indicates whether to include a trend or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{use.damped.trend}{\code{TRUE/FALSE} indicates whether to include a damping parameter in the trend or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{seasonal.periods}{If \code{y} is a numeric then seasonal periods can be specified with this parameter.} \item{use.arma.errors}{\code{TRUE/FALSE} indicates whether to include ARMA errors or not. If \code{TRUE} the best fit is selected by AIC. If \code{FALSE} then the selection algorithm does not consider ARMA errors.} \item{use.parallel}{\code{TRUE/FALSE} indicates whether or not to use parallel processing.} \item{num.cores}{The number of parallel processes to be used if using parallel processing. If \code{NULL} then the number of logical cores is detected and all available cores are used.} \item{bc.lower}{The lower limit (inclusive) for the Box-Cox transformation.} \item{bc.upper}{The upper limit (inclusive) for the Box-Cox transformation.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If TRUE, point forecasts and fitted values are mean forecast. Otherwise, these points can be considered the median of the forecast densities.} \item{model}{Output from a previous call to \code{bats}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{...}{Additional arguments to be passed to \code{auto.arima} when choose an ARMA(p, q) model for the errors. (Note that xreg will be ignored, as will any arguments concerning seasonality and differencing, but arguments controlling the values of p and q will be used.)} } \value{ An object of class "\code{bats}". The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{bats} and associated functions. The fitted model is designated BATS(omega, p,q, phi, m1,...mJ) where omega is the Box-Cox parameter and phi is the damping parameter; the error is modelled as an ARMA(p,q) process and m1,...,mJ list the seasonal periods used in the model. } \description{ Fits a BATS model applied to \code{y}, as described in De Livera, Hyndman & Snyder (2011). Parallel processing is used by default to speed up the computations. } \examples{ \dontrun{ fit <- bats(USAccDeaths) plot(forecast(fit)) taylor.fit <- bats(taylor) plot(forecast(taylor.fit)) } } \references{ De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), Forecasting time series with complex seasonal patterns using exponential smoothing, \emph{Journal of the American Statistical Association}, \bold{106}(496), 1513-1527. } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/easter.Rd0000644000176200001440000000136614003673410014411 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calendar.R \name{easter} \alias{easter} \title{Easter holidays in each season} \usage{ easter(x, easter.mon = FALSE) } \arguments{ \item{x}{Monthly or quarterly time series} \item{easter.mon}{If TRUE, the length of Easter holidays includes Easter Monday.} } \value{ Time series } \description{ Returns a vector of 0's and 1's or fractional results if Easter spans March and April in the observed time period. Easter is defined as the days from Good Friday to Easter Sunday inclusively, plus optionally Easter Monday if \code{easter.mon=TRUE}. } \details{ Useful for adjusting calendar effects. } \examples{ easter(wineind, easter.mon = TRUE) } \author{ Earo Wang } \keyword{ts} forecast/man/woolyrnq.Rd0000644000176200001440000000072514003673410015016 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{woolyrnq} \alias{woolyrnq} \title{Quarterly production of woollen yarn in Australia} \format{ Time series data } \source{ Time Series Data Library. \url{https://pkg.yangzhuoranyang.com/tsdl/} } \usage{ woolyrnq } \description{ Quarterly production of woollen yarn in Australia: tonnes. Mar 1965 -- Sep 1994. } \examples{ tsdisplay(woolyrnq) } \keyword{datasets} forecast/man/plot.bats.Rd0000644000176200001440000000244714003673410015035 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/bats.R, R/ggplot.R, R/tbats.R \name{plot.bats} \alias{plot.bats} \alias{autoplot.tbats} \alias{autoplot.bats} \alias{plot.tbats} \title{Plot components from BATS model} \usage{ \method{plot}{bats}(x, main = "Decomposition by BATS model", ...) \method{autoplot}{tbats}(object, range.bars = FALSE, ...) \method{autoplot}{bats}(object, range.bars = FALSE, ...) \method{plot}{tbats}(x, main = "Decomposition by TBATS model", ...) } \arguments{ \item{x}{Object of class \dQuote{bats/tbats}.} \item{main}{Main title for plot.} \item{...}{Other plotting parameters passed to \code{\link[graphics]{par}}.} \item{object}{Object of class \dQuote{bats/tbats}.} \item{range.bars}{Logical indicating if each plot should have a bar at its right side representing relative size. If NULL, automatic selection takes place.} } \value{ None. Function produces a plot } \description{ Produces a plot of the level, slope and seasonal components from a BATS or TBATS model. The plotted components are Box-Cox transformed using the estimated transformation parameter. } \examples{ \dontrun{ fit <- tbats(USAccDeaths) plot(fit) autoplot(fit, range.bars = TRUE)} } \seealso{ \code{\link{bats}},\code{\link{tbats}} } \author{ Rob J Hyndman } \keyword{hplot} forecast/man/na.interp.Rd0000644000176200001440000000217514003673410015023 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clean.R \name{na.interp} \alias{na.interp} \title{Interpolate missing values in a time series} \usage{ na.interp( x, lambda = NULL, linear = (frequency(x) <= 1 | sum(!is.na(x)) <= 2 * frequency(x)) ) } \arguments{ \item{x}{time series} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{linear}{Should a linear interpolation be used.} } \value{ Time series } \description{ By default, uses linear interpolation for non-seasonal series. For seasonal series, a robust STL decomposition is first computed. Then a linear interpolation is applied to the seasonally adjusted data, and the seasonal component is added back. } \details{ A more general and flexible approach is available using \code{na.approx} in the \code{zoo} package. } \examples{ data(gold) plot(na.interp(gold)) } \seealso{ \code{\link[forecast]{tsoutliers}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/is.forecast.Rd0000644000176200001440000000066314003673410015345 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast.R, R/mforecast.R, R/spline.R \name{is.forecast} \alias{is.forecast} \alias{is.mforecast} \alias{is.splineforecast} \title{Is an object a particular forecast type?} \usage{ is.forecast(x) is.mforecast(x) is.splineforecast(x) } \arguments{ \item{x}{object to be tested} } \description{ Returns true if the forecast object is of a particular type } forecast/man/tsoutliers.Rd0000644000176200001440000000215514165140171015342 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clean.R \name{tsoutliers} \alias{tsoutliers} \title{Identify and replace outliers in a time series} \usage{ tsoutliers(x, iterate = 2, lambda = NULL) } \arguments{ \item{x}{time series} \item{iterate}{the number of iterations required} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} } \value{ \item{index}{Indicating the index of outlier(s)} \item{replacement}{Suggested numeric values to replace identified outliers} } \description{ Uses supsmu for non-seasonal series and a periodic stl decomposition with seasonal series to identify outliers and estimate their replacements. } \examples{ data(gold) tsoutliers(gold) } \references{ Hyndman (2021) "Detecting time series outliers" \url{https://robjhyndman.com/hyndsight/tsoutliers/}. } \seealso{ \code{\link[forecast]{na.interp}}, \code{\link[forecast]{tsclean}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/simulate.ets.Rd0000644000176200001440000000774514163717252015564 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/simulate.R \name{simulate.ets} \alias{simulate.ets} \alias{simulate.Arima} \alias{simulate.ar} \alias{simulate.lagwalk} \alias{simulate.fracdiff} \alias{simulate.nnetar} \alias{simulate.modelAR} \title{Simulation from a time series model} \usage{ \method{simulate}{ets}( object, nsim = length(object$x), seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ... ) \method{simulate}{Arima}( object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ... ) \method{simulate}{ar}( object, nsim = object$n.used, seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ... ) \method{simulate}{lagwalk}( object, nsim = length(object$x), seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ... ) \method{simulate}{fracdiff}( object, nsim = object$n, seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ... ) \method{simulate}{nnetar}( object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ... ) \method{simulate}{modelAR}( object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ... ) } \arguments{ \item{object}{An object of class "\code{ets}", "\code{Arima}", "\code{ar}" or "\code{nnetar}".} \item{nsim}{Number of periods for the simulated series. Ignored if either \code{xreg} or \code{innov} are not \code{NULL}. Otherwise the default is the length of series used to train model (or 100 if no data found).} \item{seed}{Either \code{NULL} or an integer that will be used in a call to \code{\link[base]{set.seed}} before simulating the time series. The default, \code{NULL}, will not change the random generator state.} \item{future}{Produce sample paths that are future to and conditional on the data in \code{object}. Otherwise simulate unconditionally.} \item{bootstrap}{Do simulation using resampled errors rather than normally distributed errors or errors provided as \code{innov}.} \item{innov}{A vector of innovations to use as the error series. Ignored if \code{bootstrap==TRUE}. If not \code{NULL}, the value of \code{nsim} is set to length of \code{innov}.} \item{...}{Other arguments, not currently used.} \item{xreg}{New values of \code{xreg} to be used for forecasting. The value of \code{nsim} is set to the number of rows of \code{xreg} if it is not \code{NULL}.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} } \value{ An object of class "\code{ts}". } \description{ Returns a time series based on the model object \code{object}. } \details{ With \code{simulate.Arima()}, the \code{object} should be produced by \code{\link{Arima}} or \code{\link{auto.arima}}, rather than \code{\link[stats]{arima}}. By default, the error series is assumed normally distributed and generated using \code{\link[stats]{rnorm}}. If \code{innov} is present, it is used instead. If \code{bootstrap=TRUE} and \code{innov=NULL}, the residuals are resampled instead. When \code{future=TRUE}, the sample paths are conditional on the data. When \code{future=FALSE} and the model is stationary, the sample paths do not depend on the data at all. When \code{future=FALSE} and the model is non-stationary, the location of the sample paths is arbitrary, so they all start at the value of the first observation. } \examples{ fit <- ets(USAccDeaths) plot(USAccDeaths, xlim = c(1973, 1982)) lines(simulate(fit, 36), col = "red") } \seealso{ \code{\link{ets}}, \code{\link{Arima}}, \code{\link{auto.arima}}, \code{\link{ar}}, \code{\link{arfima}}, \code{\link{nnetar}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/wineind.Rd0000644000176200001440000000070214003673410014554 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{wineind} \alias{wineind} \title{Australian total wine sales} \format{ Time series data } \source{ Time Series Data Library. \url{https://pkg.yangzhuoranyang.com/tsdl/} } \usage{ wineind } \description{ Australian total wine sales by wine makers in bottles <= 1 litre. Jan 1980 -- Aug 1994. } \examples{ tsdisplay(wineind) } \keyword{datasets} forecast/man/autoplot.ts.Rd0000644000176200001440000000422114055364777015437 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{autolayer.mts} \alias{autolayer.mts} \alias{autolayer.msts} \alias{autolayer.ts} \alias{autoplot.ts} \alias{autoplot.mts} \alias{autoplot.msts} \alias{fortify.ts} \title{Automatically create a ggplot for time series objects} \usage{ \method{autolayer}{mts}(object, colour = TRUE, series = NULL, ...) \method{autolayer}{msts}(object, series = NULL, ...) \method{autolayer}{ts}(object, colour = TRUE, series = NULL, ...) \method{autoplot}{ts}( object, series = NULL, xlab = "Time", ylab = deparse(substitute(object)), main = NULL, ... ) \method{autoplot}{mts}( object, colour = TRUE, facets = FALSE, xlab = "Time", ylab = deparse(substitute(object)), main = NULL, ... ) \method{autoplot}{msts}(object, ...) \method{fortify}{ts}(model, data, ...) } \arguments{ \item{object}{Object of class \dQuote{\code{ts}} or \dQuote{\code{mts}}.} \item{colour}{If TRUE, the time series will be assigned a colour aesthetic} \item{series}{Identifies the time series with a colour, which integrates well with the functionality of \link{geom_forecast}.} \item{...}{Other plotting parameters to affect the plot.} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} \item{main}{Main title.} \item{facets}{If TRUE, multiple time series will be faceted (and unless specified, colour is set to FALSE). If FALSE, each series will be assigned a colour.} \item{model}{Object of class \dQuote{\code{ts}} to be converted to \dQuote{\code{data.frame}}.} \item{data}{Not used (required for \link{fortify} method)} } \value{ None. Function produces a ggplot graph. } \description{ \code{autoplot} takes an object of type \code{ts} or \code{mts} and creates a ggplot object suitable for usage with \code{stat_forecast}. } \details{ \code{fortify.ts} takes a \code{ts} object and converts it into a data frame (for usage with ggplot2). } \examples{ library(ggplot2) autoplot(USAccDeaths) lungDeaths <- cbind(mdeaths, fdeaths) autoplot(lungDeaths) autoplot(lungDeaths, facets=TRUE) } \seealso{ \code{\link[stats]{plot.ts}}, \code{\link[ggplot2]{fortify}} } \author{ Mitchell O'Hara-Wild } forecast/man/plot.mforecast.Rd0000644000176200001440000000423414003673410016063 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R, R/mforecast.R \name{autoplot.mforecast} \alias{autoplot.mforecast} \alias{autolayer.mforecast} \alias{plot.mforecast} \title{Multivariate forecast plot} \usage{ \method{autoplot}{mforecast}(object, PI = TRUE, facets = TRUE, colour = FALSE, ...) \method{autolayer}{mforecast}(object, series = NULL, PI = TRUE, ...) \method{plot}{mforecast}(x, main = paste("Forecasts from", unique(x$method)), xlab = "time", ...) } \arguments{ \item{object}{Multivariate forecast object of class \code{mforecast}. Used for ggplot graphics (S3 method consistency).} \item{PI}{If \code{FALSE}, confidence intervals will not be plotted, giving only the forecast line.} \item{facets}{If TRUE, multiple time series will be faceted. If FALSE, each series will be assigned a colour.} \item{colour}{If TRUE, the time series will be assigned a colour aesthetic} \item{\dots}{additional arguments to each individual \code{plot}.} \item{series}{Matches an unidentified forecast layer with a coloured object on the plot.} \item{x}{Multivariate forecast object of class \code{mforecast}.} \item{main}{Main title. Default is the forecast method. For autoplot, specify a vector of titles for each plot.} \item{xlab}{X-axis label. For autoplot, specify a vector of labels for each plot.} } \description{ Plots historical data with multivariate forecasts and prediction intervals. } \details{ \code{autoplot} will produce an equivalent plot as a ggplot object. } \examples{ library(ggplot2) lungDeaths <- cbind(mdeaths, fdeaths) fit <- tslm(lungDeaths ~ trend + season) fcast <- forecast(fit, h=10) plot(fcast) autoplot(fcast) carPower <- as.matrix(mtcars[,c("qsec","hp")]) carmpg <- mtcars[,"mpg"] fit <- lm(carPower ~ carmpg) fcast <- forecast(fit, newdata=data.frame(carmpg=30)) plot(fcast, xlab="Year") autoplot(fcast, xlab=rep("Year",2)) } \references{ Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://otexts.com/fpp2/} } \seealso{ \code{\link[forecast]{plot.forecast}}, \code{\link[stats]{plot.ts}} } \author{ Mitchell O'Hara-Wild } \keyword{ts} forecast/man/splinef.Rd0000644000176200001440000000672414003673410014571 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spline.R \name{splinef} \alias{splinef} \title{Cubic Spline Forecast} \usage{ splinef( y, h = 10, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, method = c("gcv", "mle"), x = y ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{method}{Method for selecting the smoothing parameter. If \code{method="gcv"}, the generalized cross-validation method from \code{\link[stats]{smooth.spline}} is used. If \code{method="mle"}, the maximum likelihood method from Hyndman et al (2002) is used.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{splinef}. An object of class \code{"forecast"} containing the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{onestepf}{One-step forecasts from the fitted model.} \item{fitted}{Smooth estimates of the fitted trend using all data.} \item{residuals}{Residuals from the fitted model. That is x minus one-step forecasts.} } \description{ Returns local linear forecasts and prediction intervals using cubic smoothing splines. } \details{ The cubic smoothing spline model is equivalent to an ARIMA(0,2,2) model but with a restricted parameter space. The advantage of the spline model over the full ARIMA model is that it provides a smooth historical trend as well as a linear forecast function. Hyndman, King, Pitrun, and Billah (2002) show that the forecast performance of the method is hardly affected by the restricted parameter space. } \examples{ fcast <- splinef(uspop,h=5) plot(fcast) summary(fcast) } \references{ Hyndman, King, Pitrun and Billah (2005) Local linear forecasts using cubic smoothing splines. \emph{Australian and New Zealand Journal of Statistics}, \bold{47}(1), 87-99. \url{https://robjhyndman.com/publications/splinefcast/}. } \seealso{ \code{\link[stats]{smooth.spline}}, \code{\link[stats]{arima}}, \code{\link{holt}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.StructTS.Rd0000644000176200001440000000531514003673410016464 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{forecast.StructTS} \alias{forecast.StructTS} \title{Forecasting using Structural Time Series models} \usage{ \method{forecast}{StructTS}( object, h = ifelse(object$coef["epsilon"] > 1e-10, 2 * object$xtsp[3], 10), level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{StructTS}". Usually the result of a call to \code{\link[stats]{StructTS}}.} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.StructTS}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for univariate structural time series models. } \details{ This function calls \code{predict.StructTS} and constructs an object of class "\code{forecast}" from the results. } \examples{ fit <- StructTS(WWWusage,"level") plot(forecast(fit)) } \seealso{ \code{\link[stats]{StructTS}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/tsCV.Rd0000644000176200001440000000515614003673410014006 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tscv.R \name{tsCV} \alias{tsCV} \title{Time series cross-validation} \usage{ tsCV(y, forecastfunction, h = 1, window = NULL, xreg = NULL, initial = 0, ...) } \arguments{ \item{y}{Univariate time series} \item{forecastfunction}{Function to return an object of class \code{forecast}. Its first argument must be a univariate time series, and it must have an argument \code{h} for the forecast horizon. If exogenous predictors are used, then it must also have \code{xreg} and \code{newxreg} arguments corresponding to the training and test periods.} \item{h}{Forecast horizon} \item{window}{Length of the rolling window, if NULL, a rolling window will not be used.} \item{xreg}{Exogeneous predictor variables passed to the forecast function if required.} \item{initial}{Initial period of the time series where no cross-validation is performed.} \item{...}{Other arguments are passed to \code{forecastfunction}.} } \value{ Numerical time series object containing the forecast errors as a vector (if h=1) and a matrix otherwise. The time index corresponds to the last period of the training data. The columns correspond to the forecast horizons. } \description{ \code{tsCV} computes the forecast errors obtained by applying \code{forecastfunction} to subsets of the time series \code{y} using a rolling forecast origin. } \details{ Let \code{y} contain the time series \eqn{y_1,\dots,y_T}{y[1:T]}. Then \code{forecastfunction} is applied successively to the time series \eqn{y_1,\dots,y_t}{y[1:t]}, for \eqn{t=1,\dots,T-h}, making predictions \eqn{\hat{y}_{t+h|t}}{f[t+h]}. The errors are given by \eqn{e_{t+h} = y_{t+h}-\hat{y}_{t+h|t}}{e[t+h] = y[t+h]-f[t+h]}. If h=1, these are returned as a vector, \eqn{e_1,\dots,e_T}{e[1:T]}. For h>1, they are returned as a matrix with the hth column containing errors for forecast horizon h. The first few errors may be missing as it may not be possible to apply \code{forecastfunction} to very short time series. } \examples{ #Fit an AR(2) model to each rolling origin subset far2 <- function(x, h){forecast(Arima(x, order=c(2,0,0)), h=h)} e <- tsCV(lynx, far2, h=1) #Fit the same model with a rolling window of length 30 e <- tsCV(lynx, far2, h=1, window=30) #Example with exogenous predictors far2_xreg <- function(x, h, xreg, newxreg) { forecast(Arima(x, order=c(2,0,0), xreg=xreg), xreg=newxreg) } y <- ts(rnorm(50)) xreg <- matrix(rnorm(100),ncol=2) e <- tsCV(y, far2_xreg, h=3, xreg=xreg) } \seealso{ \link{CV}, \link{CVar}, \link{residuals.Arima}, \url{https://robjhyndman.com/hyndsight/tscv/}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/plot.Arima.Rd0000644000176200001440000000317514003673410015134 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/armaroots.R, R/ggplot.R \name{plot.Arima} \alias{plot.Arima} \alias{plot.ar} \alias{autoplot.Arima} \alias{autoplot.ar} \title{Plot characteristic roots from ARIMA model} \usage{ \method{plot}{Arima}( x, type = c("both", "ar", "ma"), main, xlab = "Real", ylab = "Imaginary", ... ) \method{plot}{ar}(x, main, xlab = "Real", ylab = "Imaginary", ...) \method{autoplot}{Arima}(object, type = c("both", "ar", "ma"), ...) \method{autoplot}{ar}(object, ...) } \arguments{ \item{x}{Object of class \dQuote{Arima} or \dQuote{ar}.} \item{type}{Determines if both AR and MA roots are plotted, of if just one set is plotted.} \item{main}{Main title. Default is "Inverse AR roots" or "Inverse MA roots".} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} \item{...}{Other plotting parameters passed to \code{\link[graphics]{par}}.} \item{object}{Object of class \dQuote{Arima} or \dQuote{ar}. Used for ggplot graphics (S3 method consistency).} } \value{ None. Function produces a plot } \description{ Produces a plot of the inverse AR and MA roots of an ARIMA model. Inverse roots outside the unit circle are shown in red. } \details{ \code{autoplot} will produce an equivalent plot as a ggplot object. } \examples{ library(ggplot2) fit <- Arima(WWWusage, order = c(3, 1, 0)) plot(fit) autoplot(fit) fit <- Arima(woolyrnq, order = c(2, 0, 0), seasonal = c(2, 1, 1)) plot(fit) autoplot(fit) plot(ar.ols(gold[1:61])) autoplot(ar.ols(gold[1:61])) } \seealso{ \code{\link{Arima}}, \code{\link[stats]{ar}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{hplot} forecast/man/fourier.Rd0000644000176200001440000000442314003673410014576 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{fourier} \alias{fourier} \alias{fourierf} \title{Fourier terms for modelling seasonality} \usage{ fourier(x, K, h = NULL) fourierf(x, K, h) } \arguments{ \item{x}{Seasonal time series: a \code{ts} or a \code{msts} object} \item{K}{Maximum order(s) of Fourier terms} \item{h}{Number of periods ahead to forecast (optional)} } \value{ Numerical matrix. } \description{ \code{fourier} returns a matrix containing terms from a Fourier series, up to order \code{K}, suitable for use in \code{\link{Arima}}, \code{\link{auto.arima}}, or \code{\link{tslm}}. } \details{ \code{fourierf} is deprecated, instead use the \code{h} argument in \code{fourier}. The period of the Fourier terms is determined from the time series characteristics of \code{x}. When \code{h} is missing, the length of \code{x} also determines the number of rows for the matrix returned by \code{fourier}. Otherwise, the value of \code{h} determines the number of rows for the matrix returned by \code{fourier}, typically used for forecasting. The values within \code{x} are not used. Typical use would omit \code{h} when generating Fourier terms for training a model and include \code{h} when generating Fourier terms for forecasting. When \code{x} is a \code{ts} object, the value of \code{K} should be an integer and specifies the number of sine and cosine terms to return. Thus, the matrix returned has \code{2*K} columns. When \code{x} is a \code{msts} object, then \code{K} should be a vector of integers specifying the number of sine and cosine terms for each of the seasonal periods. Then the matrix returned will have \code{2*sum(K)} columns. } \examples{ library(ggplot2) # Using Fourier series for a "ts" object # K is chosen to minimize the AICc deaths.model <- auto.arima(USAccDeaths, xreg=fourier(USAccDeaths,K=5), seasonal=FALSE) deaths.fcast <- forecast(deaths.model, xreg=fourier(USAccDeaths, K=5, h=36)) autoplot(deaths.fcast) + xlab("Year") # Using Fourier series for a "msts" object taylor.lm <- tslm(taylor ~ fourier(taylor, K = c(3, 3))) taylor.fcast <- forecast(taylor.lm, data.frame(fourier(taylor, K = c(3, 3), h = 270))) autoplot(taylor.fcast) } \seealso{ \code{\link{seasonaldummy}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/getResponse.Rd0000644000176200001440000000261014003673410015415 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/getResponse.R \name{getResponse} \alias{getResponse} \alias{getResponse.default} \alias{getResponse.lm} \alias{getResponse.Arima} \alias{getResponse.fracdiff} \alias{getResponse.ar} \alias{getResponse.tbats} \alias{getResponse.bats} \alias{getResponse.mforecast} \alias{getResponse.baggedModel} \title{Get response variable from time series model.} \usage{ getResponse(object, ...) \method{getResponse}{default}(object, ...) \method{getResponse}{lm}(object, ...) \method{getResponse}{Arima}(object, ...) \method{getResponse}{fracdiff}(object, ...) \method{getResponse}{ar}(object, ...) \method{getResponse}{tbats}(object, ...) \method{getResponse}{bats}(object, ...) \method{getResponse}{mforecast}(object, ...) \method{getResponse}{baggedModel}(object, ...) } \arguments{ \item{object}{a time series model or forecast object.} \item{...}{Additional arguments that are ignored.} } \value{ A numerical vector or a time series object of class \code{ts}. } \description{ \code{getResponse} is a generic function for extracting the historical data from a time series model (including \code{Arima}, \code{ets}, \code{ar}, \code{fracdiff}), a linear model of class \code{lm}, or a forecast object. The function invokes particular \emph{methods} which depend on the class of the first argument. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/ocsb.test.Rd0000644000176200001440000000342514003673410015030 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/unitRoot.R \name{ocsb.test} \alias{ocsb.test} \alias{print.OCSBtest} \title{Osborn, Chui, Smith, and Birchenhall Test for Seasonal Unit Roots} \usage{ ocsb.test(x, lag.method = c("fixed", "AIC", "BIC", "AICc"), maxlag = 0) } \arguments{ \item{x}{a univariate seasonal time series.} \item{lag.method}{a character specifying the lag order selection method.} \item{maxlag}{the maximum lag order to be considered by \code{lag.method}.} } \value{ ocsb.test returns a list of class "OCSBtest" with the following components: * statistics the value of the test statistics. * pvalues the p-values for each test statistics. * method a character string describing the type of test. * data.name a character string giving the name of the data. * fitted.model the fitted regression model. } \description{ An implementation of the Osborn, Chui, Smith, and Birchenhall (OCSB) test. } \details{ The regression equation may include lags of the dependent variable. When lag.method = "fixed", the lag order is fixed to maxlag; otherwise, maxlag is the maximum number of lags considered in a lag selection procedure that minimises the lag.method criterion, which can be AIC or BIC or corrected AIC, AICc, obtained as AIC + (2k(k+1))/(n-k-1), where k is the number of parameters and n is the number of available observations in the model. Critical values for the test are based on simulations, which has been smoothed over to produce critical values for all seasonal periods. } \examples{ ocsb.test(AirPassengers) } \references{ Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the order of integration for consumption", \emph{Oxford Bulletin of Economics and Statistics} \bold{50}(4):361-377. } \seealso{ \code{\link{nsdiffs}} } forecast/man/dm.test.Rd0000644000176200001440000000513714003673410014504 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/DM2.R \name{dm.test} \alias{dm.test} \title{Diebold-Mariano test for predictive accuracy} \usage{ dm.test( e1, e2, alternative = c("two.sided", "less", "greater"), h = 1, power = 2 ) } \arguments{ \item{e1}{Forecast errors from method 1.} \item{e2}{Forecast errors from method 2.} \item{alternative}{a character string specifying the alternative hypothesis, must be one of \code{"two.sided"} (default), \code{"greater"} or \code{"less"}. You can specify just the initial letter.} \item{h}{The forecast horizon used in calculating \code{e1} and \code{e2}.} \item{power}{The power used in the loss function. Usually 1 or 2.} } \value{ A list with class \code{"htest"} containing the following components: \item{statistic}{the value of the DM-statistic.} \item{parameter}{the forecast horizon and loss function power used in the test.} \item{alternative}{a character string describing the alternative hypothesis.} \item{p.value}{the p-value for the test.} \item{method}{a character string with the value "Diebold-Mariano Test".} \item{data.name}{a character vector giving the names of the two error series.} } \description{ The Diebold-Mariano test compares the forecast accuracy of two forecast methods. } \details{ This function implements the modified test proposed by Harvey, Leybourne and Newbold (1997). The null hypothesis is that the two methods have the same forecast accuracy. For \code{alternative="less"}, the alternative hypothesis is that method 2 is less accurate than method 1. For \code{alternative="greater"}, the alternative hypothesis is that method 2 is more accurate than method 1. For \code{alternative="two.sided"}, the alternative hypothesis is that method 1 and method 2 have different levels of accuracy. } \examples{ # Test on in-sample one-step forecasts f1 <- ets(WWWusage) f2 <- auto.arima(WWWusage) accuracy(f1) accuracy(f2) dm.test(residuals(f1),residuals(f2),h=1) # Test on out-of-sample one-step forecasts f1 <- ets(WWWusage[1:80]) f2 <- auto.arima(WWWusage[1:80]) f1.out <- ets(WWWusage[81:100],model=f1) f2.out <- Arima(WWWusage[81:100],model=f2) accuracy(f1.out) accuracy(f2.out) dm.test(residuals(f1.out),residuals(f2.out),h=1) } \references{ Diebold, F.X. and Mariano, R.S. (1995) Comparing predictive accuracy. \emph{Journal of Business and Economic Statistics}, \bold{13}, 253-263. Harvey, D., Leybourne, S., & Newbold, P. (1997). Testing the equality of prediction mean squared errors. \emph{International Journal of forecasting}, \bold{13}(2), 281-291. } \author{ George Athanasopoulos } \keyword{htest} \keyword{ts} forecast/man/ets.Rd0000644000176200001440000001333714003673410013722 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ets.R \name{ets} \alias{ets} \alias{print.ets} \alias{summary.ets} \alias{as.character.ets} \alias{coef.ets} \alias{tsdiag.ets} \title{Exponential smoothing state space model} \usage{ ets( y, model = "ZZZ", damped = NULL, alpha = NULL, beta = NULL, gamma = NULL, phi = NULL, additive.only = FALSE, lambda = NULL, biasadj = FALSE, lower = c(rep(1e-04, 3), 0.8), upper = c(rep(0.9999, 3), 0.98), opt.crit = c("lik", "amse", "mse", "sigma", "mae"), nmse = 3, bounds = c("both", "usual", "admissible"), ic = c("aicc", "aic", "bic"), restrict = TRUE, allow.multiplicative.trend = FALSE, use.initial.values = FALSE, na.action = c("na.contiguous", "na.interp", "na.fail"), ... ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{model}{Usually a three-character string identifying method using the framework terminology of Hyndman et al. (2002) and Hyndman et al. (2008). The first letter denotes the error type ("A", "M" or "Z"); the second letter denotes the trend type ("N","A","M" or "Z"); and the third letter denotes the season type ("N","A","M" or "Z"). In all cases, "N"=none, "A"=additive, "M"=multiplicative and "Z"=automatically selected. So, for example, "ANN" is simple exponential smoothing with additive errors, "MAM" is multiplicative Holt-Winters' method with multiplicative errors, and so on. It is also possible for the model to be of class \code{"ets"}, and equal to the output from a previous call to \code{ets}. In this case, the same model is fitted to \code{y} without re-estimating any smoothing parameters. See also the \code{use.initial.values} argument.} \item{damped}{If TRUE, use a damped trend (either additive or multiplicative). If NULL, both damped and non-damped trends will be tried and the best model (according to the information criterion \code{ic}) returned.} \item{alpha}{Value of alpha. If NULL, it is estimated.} \item{beta}{Value of beta. If NULL, it is estimated.} \item{gamma}{Value of gamma. If NULL, it is estimated.} \item{phi}{Value of phi. If NULL, it is estimated.} \item{additive.only}{If TRUE, will only consider additive models. Default is FALSE.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated. When \code{lambda} is specified, \code{additive.only} is set to \code{TRUE}.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{lower}{Lower bounds for the parameters (alpha, beta, gamma, phi)} \item{upper}{Upper bounds for the parameters (alpha, beta, gamma, phi)} \item{opt.crit}{Optimization criterion. One of "mse" (Mean Square Error), "amse" (Average MSE over first \code{nmse} forecast horizons), "sigma" (Standard deviation of residuals), "mae" (Mean of absolute residuals), or "lik" (Log-likelihood, the default).} \item{nmse}{Number of steps for average multistep MSE (1<=\code{nmse}<=30).} \item{bounds}{Type of parameter space to impose: \code{"usual" } indicates all parameters must lie between specified lower and upper bounds; \code{"admissible"} indicates parameters must lie in the admissible space; \code{"both"} (default) takes the intersection of these regions.} \item{ic}{Information criterion to be used in model selection.} \item{restrict}{If \code{TRUE} (default), the models with infinite variance will not be allowed.} \item{allow.multiplicative.trend}{If \code{TRUE}, models with multiplicative trend are allowed when searching for a model. Otherwise, the model space excludes them. This argument is ignored if a multiplicative trend model is explicitly requested (e.g., using \code{model="MMN"}).} \item{use.initial.values}{If \code{TRUE} and \code{model} is of class \code{"ets"}, then the initial values in the model are also not re-estimated.} \item{na.action}{A function which indicates what should happen when the data contains NA values. By default, the largest contiguous portion of the time-series will be used.} \item{...}{Other undocumented arguments.} } \value{ An object of class "\code{ets}". The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{ets} and associated functions. } \description{ Returns ets model applied to \code{y}. } \details{ Based on the classification of methods as described in Hyndman et al (2008). The methodology is fully automatic. The only required argument for ets is the time series. The model is chosen automatically if not specified. This methodology performed extremely well on the M3-competition data. (See Hyndman, et al, 2002, below.) } \examples{ fit <- ets(USAccDeaths) plot(forecast(fit)) } \references{ Hyndman, R.J., Koehler, A.B., Snyder, R.D., and Grose, S. (2002) "A state space framework for automatic forecasting using exponential smoothing methods", \emph{International J. Forecasting}, \bold{18}(3), 439--454. Hyndman, R.J., Akram, Md., and Archibald, B. (2008) "The admissible parameter space for exponential smoothing models". \emph{Annals of Statistical Mathematics}, \bold{60}(2), 407--426. Hyndman, R.J., Koehler, A.B., Ord, J.K., and Snyder, R.D. (2008) \emph{Forecasting with exponential smoothing: the state space approach}, Springer-Verlag. \url{http://www.exponentialsmoothing.net}. } \seealso{ \code{\link[stats]{HoltWinters}}, \code{\link{rwf}}, \code{\link{Arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.Rd0000644000176200001440000001135414003673410014732 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast.R \name{forecast} \alias{forecast} \alias{print.forecast} \alias{summary.forecast} \alias{as.data.frame.forecast} \alias{as.ts.forecast} \alias{forecast.default} \alias{forecast.ts} \title{Forecasting time series} \usage{ forecast(object, ...) \method{forecast}{default}(object, ...) \method{forecast}{ts}( object, h = ifelse(frequency(object) > 1, 2 * frequency(object), 10), level = c(80, 95), fan = FALSE, robust = FALSE, lambda = NULL, biasadj = FALSE, find.frequency = FALSE, allow.multiplicative.trend = FALSE, model = NULL, ... ) } \arguments{ \item{object}{a time series or time series model for which forecasts are required} \item{...}{Additional arguments affecting the forecasts produced. If \code{model=NULL}, \code{forecast.ts} passes these to \code{\link{ets}} or \code{\link{stlf}} depending on the frequency of the time series. If \code{model} is not \code{NULL}, the arguments are passed to the relevant modelling function.} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, \code{level} is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{robust}{If TRUE, the function is robust to missing values and outliers in \code{object}. This argument is only valid when \code{object} is of class \code{ts}.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{find.frequency}{If TRUE, the function determines the appropriate period, if the data is of unknown period.} \item{allow.multiplicative.trend}{If TRUE, then ETS models with multiplicative trends are allowed. Otherwise, only additive or no trend ETS models are permitted.} \item{model}{An object describing a time series model; e.g., one of of class \code{ets}, \code{Arima}, \code{bats}, \code{tbats}, or \code{nnetar}.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessors functions \code{fitted.values} and \code{residuals} extract various useful features of the value returned by \code{forecast$model}. An object of class \code{"forecast"} is a list usually containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. For models with additive errors, the residuals will be x minus the fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ \code{forecast} is a generic function for forecasting from time series or time series models. The function invokes particular \emph{methods} which depend on the class of the first argument. } \details{ For example, the function \code{\link{forecast.Arima}} makes forecasts based on the results produced by \code{\link[stats]{arima}}. If \code{model=NULL},the function \code{\link{forecast.ts}} makes forecasts using \code{\link{ets}} models (if the data are non-seasonal or the seasonal period is 12 or less) or \code{\link{stlf}} (if the seasonal period is 13 or more). If \code{model} is not \code{NULL}, \code{forecast.ts} will apply the \code{model} to the \code{object} time series, and then generate forecasts accordingly. } \examples{ WWWusage \%>\% forecast \%>\% plot fit <- ets(window(WWWusage, end=60)) fc <- forecast(WWWusage, model=fit) } \seealso{ Other functions which return objects of class \code{"forecast"} are \code{\link{forecast.ets}}, \code{\link{forecast.Arima}}, \code{\link{forecast.HoltWinters}}, \code{\link{forecast.StructTS}}, \code{\link{meanf}}, \code{\link{rwf}}, \code{\link{splinef}}, \code{\link{thetaf}}, \code{\link{croston}}, \code{\link{ses}}, \code{\link{holt}}, \code{\link{hw}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/msts.Rd0000644000176200001440000000276414003673410014117 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/msts.R \name{msts} \alias{msts} \alias{print.msts} \alias{window.msts} \alias{`[.msts`} \title{Multi-Seasonal Time Series} \usage{ msts(data, seasonal.periods, ts.frequency = floor(max(seasonal.periods)), ...) } \arguments{ \item{data}{A numeric vector, ts object, matrix or data frame. It is intended that the time series data is univariate, otherwise treated the same as ts().} \item{seasonal.periods}{A vector of the seasonal periods of the msts.} \item{ts.frequency}{The seasonal period that should be used as frequency of the underlying ts object. The default value is \code{max(seasonal.periods)}.} \item{...}{Arguments to be passed to the underlying call to \code{ts()}. For example \code{start=c(1987,5)}.} } \value{ An object of class \code{c("msts", "ts")}. If there is only one seasonal period (i.e., \code{length(seasonal.periods)==1}), then the object is of class \code{"ts"}. } \description{ msts is an S3 class for multi seasonal time series objects, intended to be used for models that support multiple seasonal periods. The msts class inherits from the ts class and has an additional "msts" attribute which contains the vector of seasonal periods. All methods that work on a ts class, should also work on a msts class. } \examples{ x <- msts(taylor, seasonal.periods=c(2*24,2*24*7,2*24*365), start=2000+22/52) y <- msts(USAccDeaths, seasonal.periods=12, start=1949) } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/baggedModel.Rd0000644000176200001440000000445614003673410015323 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/baggedModel.R \name{baggedModel} \alias{baggedModel} \alias{print.baggedModel} \alias{baggedETS} \title{Forecasting using a bagged model} \usage{ baggedModel(y, bootstrapped_series = bld.mbb.bootstrap(y, 100), fn = ets, ...) baggedETS(y, bootstrapped_series = bld.mbb.bootstrap(y, 100), ...) } \arguments{ \item{y}{A numeric vector or time series of class \code{ts}.} \item{bootstrapped_series}{bootstrapped versions of y.} \item{fn}{the forecast function to use. Default is \code{\link{ets}}.} \item{\dots}{Other arguments passed to the forecast function.} } \value{ Returns an object of class "\code{baggedModel}". The function \code{print} is used to obtain and print a summary of the results. \item{models}{A list containing the fitted ensemble models.} \item{method}{The function for producing a forecastable model.} \item{y}{The original time series.} \item{bootstrapped_series}{The bootstrapped series.} \item{modelargs}{The arguments passed through to \code{fn}.} \item{fitted}{Fitted values (one-step forecasts). The mean of the fitted values is calculated over the ensemble.} \item{residuals}{Original values minus fitted values.} } \description{ The bagged model forecasting method. } \details{ This function implements the bagged model forecasting method described in Bergmeir et al. By default, the \code{\link{ets}} function is applied to all bootstrapped series. Base models other than \code{\link{ets}} can be given by the parameter \code{fn}. Using the default parameters, the function \code{\link{bld.mbb.bootstrap}} is used to calculate the bootstrapped series with the Box-Cox and Loess-based decomposition (BLD) bootstrap. The function \code{\link{forecast.baggedModel}} can then be used to calculate forecasts. \code{baggedETS} is a wrapper for \code{baggedModel}, setting \code{fn} to "ets". This function is included for backwards compatibility only, and may be deprecated in the future. } \examples{ fit <- baggedModel(WWWusage) fcast <- forecast(fit) plot(fcast) } \references{ Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging Exponential Smoothing Methods using STL Decomposition and Box-Cox Transformation. International Journal of Forecasting 32, 303-312. } \author{ Christoph Bergmeir, Fotios Petropoulos } \keyword{ts} forecast/man/forecast.nnetar.Rd0000644000176200001440000001023414003673410016214 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nnetar.R \name{forecast.nnetar} \alias{forecast.nnetar} \title{Forecasting using neural network models} \usage{ \method{forecast}{nnetar}( object, h = ifelse(object$m > 1, 2 * object$m, 10), PI = FALSE, level = c(80, 95), fan = FALSE, xreg = NULL, lambda = object$lambda, bootstrap = FALSE, npaths = 1000, innov = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{nnetar}" resulting from a call to \code{\link{nnetar}}.} \item{h}{Number of periods for forecasting. If \code{xreg} is used, \code{h} is ignored and the number of forecast periods is set to the number of rows of \code{xreg}.} \item{PI}{If TRUE, prediction intervals are produced, otherwise only point forecasts are calculated. If \code{PI} is FALSE, then \code{level}, \code{fan}, \code{bootstrap} and \code{npaths} are all ignored.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{xreg}{Future values of external regressor variables.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{bootstrap}{If \code{TRUE}, then prediction intervals computed using simulations with resampled residuals rather than normally distributed errors. Ignored if \code{innov} is not \code{NULL}.} \item{npaths}{Number of sample paths used in computing simulated prediction intervals.} \item{innov}{Values to use as innovations for prediction intervals. Must be a matrix with \code{h} rows and \code{npaths} columns (vectors are coerced into a matrix). If present, \code{bootstrap} is ignored.} \item{...}{Additional arguments passed to \code{\link{simulate.nnetar}}} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.nnetar}. An object of class "\code{forecast}" is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} \item{...}{Other arguments} } \description{ Returns forecasts and other information for univariate neural network models. } \details{ Prediction intervals are calculated through simulations and can be slow. Note that if the network is too complex and overfits the data, the residuals can be arbitrarily small; if used for prediction interval calculations, they could lead to misleadingly small values. It is possible to use out-of-sample residuals to ameliorate this, see examples. } \examples{ ## Fit & forecast model fit <- nnetar(USAccDeaths, size=2) fcast <- forecast(fit, h=20) plot(fcast) \dontrun{ ## Include prediction intervals in forecast fcast2 <- forecast(fit, h=20, PI=TRUE, npaths=100) plot(fcast2) ## Set up out-of-sample innovations using cross-validation fit_cv <- CVar(USAccDeaths, size=2) res_sd <- sd(fit_cv$residuals, na.rm=TRUE) myinnovs <- rnorm(20*100, mean=0, sd=res_sd) ## Forecast using new innovations fcast3 <- forecast(fit, h=20, PI=TRUE, npaths=100, innov=myinnovs) plot(fcast3) } } \seealso{ \code{\link{nnetar}}. } \author{ Rob J Hyndman and Gabriel Caceres } \keyword{ts} forecast/man/seasonplot.Rd0000644000176200001440000000411514003673410015310 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R, R/graph.R \name{ggseasonplot} \alias{ggseasonplot} \alias{seasonplot} \title{Seasonal plot} \usage{ ggseasonplot( x, season.labels = NULL, year.labels = FALSE, year.labels.left = FALSE, type = NULL, col = NULL, continuous = FALSE, polar = FALSE, labelgap = 0.04, ... ) seasonplot( x, s, season.labels = NULL, year.labels = FALSE, year.labels.left = FALSE, type = "o", main, xlab = NULL, ylab = "", col = 1, labelgap = 0.1, ... ) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}.} \item{season.labels}{Labels for each season in the "year"} \item{year.labels}{Logical flag indicating whether labels for each year of data should be plotted on the right.} \item{year.labels.left}{Logical flag indicating whether labels for each year of data should be plotted on the left.} \item{type}{plot type (as for \code{\link[graphics]{plot}}). Not yet supported for ggseasonplot.} \item{col}{Colour} \item{continuous}{Should the colour scheme for years be continuous or discrete?} \item{polar}{Plot the graph on seasonal coordinates} \item{labelgap}{Distance between year labels and plotted lines} \item{\dots}{additional arguments to \code{\link[graphics]{plot}}.} \item{s}{seasonal frequency of x} \item{main}{Main title.} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} } \value{ None. } \description{ Plots a seasonal plot as described in Hyndman and Athanasopoulos (2014, chapter 2). This is like a time plot except that the data are plotted against the seasons in separate years. } \examples{ ggseasonplot(AirPassengers, col=rainbow(12), year.labels=TRUE) ggseasonplot(AirPassengers, year.labels=TRUE, continuous=TRUE) seasonplot(AirPassengers, col=rainbow(12), year.labels=TRUE) } \references{ Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://otexts.com/fpp2/} } \seealso{ \code{\link[stats]{monthplot}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/ma.Rd0000644000176200001440000000262014003673410013515 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{ma} \alias{ma} \title{Moving-average smoothing} \usage{ ma(x, order, centre = TRUE) } \arguments{ \item{x}{Univariate time series} \item{order}{Order of moving average smoother} \item{centre}{If TRUE, then the moving average is centred for even orders.} } \value{ Numerical time series object containing the simple moving average smoothed values. } \description{ \code{ma} computes a simple moving average smoother of a given time series. } \details{ The moving average smoother averages the nearest \code{order} periods of each observation. As neighbouring observations of a time series are likely to be similar in value, averaging eliminates some of the randomness in the data, leaving a smooth trend-cycle component. \deqn{\hat{T}_{t} = \frac{1}{m} \sum_{j=-k}^k y_{t+j}}{T[t]=1/m(y[t-k]+y[t-k+1]+\ldots+y[t]+\ldots+y[t+k-1]+y[t+k])} where \eqn{k=\frac{m-1}{2}}{k=(m-1)/2} When an even \code{order} is specified, the observations averaged will include one more observation from the future than the past (k is rounded up). If centre is TRUE, the value from two moving averages (where k is rounded up and down respectively) are averaged, centering the moving average. } \examples{ plot(wineind) sm <- ma(wineind,order=12) lines(sm,col="red") } \seealso{ \code{\link[stats]{decompose}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/gglagplot.Rd0000644000176200001440000000370514003673410015105 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{gglagplot} \alias{gglagplot} \alias{gglagchull} \title{Time series lag ggplots} \usage{ gglagplot( x, lags = ifelse(frequency(x) > 9, 16, 9), set.lags = 1:lags, diag = TRUE, diag.col = "gray", do.lines = TRUE, colour = TRUE, continuous = frequency(x) > 12, labels = FALSE, seasonal = TRUE, ... ) gglagchull( x, lags = ifelse(frequency(x) > 1, min(12, frequency(x)), 4), set.lags = 1:lags, diag = TRUE, diag.col = "gray", ... ) } \arguments{ \item{x}{a time series object (type \code{ts}).} \item{lags}{number of lag plots desired, see arg set.lags.} \item{set.lags}{vector of positive integers specifying which lags to use.} \item{diag}{logical indicating if the x=y diagonal should be drawn.} \item{diag.col}{color to be used for the diagonal if(diag).} \item{do.lines}{if TRUE, lines will be drawn, otherwise points will be drawn.} \item{colour}{logical indicating if lines should be coloured.} \item{continuous}{Should the colour scheme for years be continuous or discrete?} \item{labels}{logical indicating if labels should be used.} \item{seasonal}{Should the line colour be based on seasonal characteristics (TRUE), or sequential (FALSE).} \item{\dots}{Not used (for consistency with lag.plot)} } \value{ None. } \description{ Plots a lag plot using ggplot. } \details{ \dQuote{gglagplot} will plot time series against lagged versions of themselves. Helps visualising 'auto-dependence' even when auto-correlations vanish. \dQuote{gglagchull} will layer convex hulls of the lags, layered on a single plot. This helps visualise the change in 'auto-dependence' as lags increase. } \examples{ gglagplot(woolyrnq) gglagplot(woolyrnq,seasonal=FALSE) lungDeaths <- cbind(mdeaths, fdeaths) gglagplot(lungDeaths, lags=2) gglagchull(lungDeaths, lags=6) gglagchull(woolyrnq) } \seealso{ \code{\link[stats]{lag.plot}} } \author{ Mitchell O'Hara-Wild } forecast/man/Acf.Rd0000644000176200001440000000747114003673410013622 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/acf.R \name{Acf} \alias{Acf} \alias{Pacf} \alias{Ccf} \alias{taperedacf} \alias{taperedpacf} \title{(Partial) Autocorrelation and Cross-Correlation Function Estimation} \usage{ Acf( x, lag.max = NULL, type = c("correlation", "covariance", "partial"), plot = TRUE, na.action = na.contiguous, demean = TRUE, ... ) Pacf( x, lag.max = NULL, plot = TRUE, na.action = na.contiguous, demean = TRUE, ... ) Ccf( x, y, lag.max = NULL, type = c("correlation", "covariance"), plot = TRUE, na.action = na.contiguous, ... ) taperedacf( x, lag.max = NULL, type = c("correlation", "partial"), plot = TRUE, calc.ci = TRUE, level = 95, nsim = 100, ... ) taperedpacf(x, ...) } \arguments{ \item{x}{a univariate or multivariate (not Ccf) numeric time series object or a numeric vector or matrix.} \item{lag.max}{maximum lag at which to calculate the acf. Default is $10*log10(N/m)$ where $N$ is the number of observations and $m$ the number of series. Will be automatically limited to one less than the number of observations in the series.} \item{type}{character string giving the type of acf to be computed. Allowed values are \dQuote{\code{correlation}} (the default), \dQuote{\code{covariance}} or \dQuote{\code{partial}}.} \item{plot}{logical. If \code{TRUE} (the default) the resulting acf, pacf or ccf is plotted.} \item{na.action}{function to handle missing values. Default is \code{\link[stats]{na.contiguous}}. Useful alternatives are \code{\link[stats]{na.pass}} and \code{\link{na.interp}}.} \item{demean}{Should covariances be about the sample means?} \item{...}{Additional arguments passed to the plotting function.} \item{y}{a univariate numeric time series object or a numeric vector.} \item{calc.ci}{If \code{TRUE}, confidence intervals for the ACF/PACF estimates are calculated.} \item{level}{Percentage level used for the confidence intervals.} \item{nsim}{The number of bootstrap samples used in estimating the confidence intervals.} } \value{ The \code{Acf}, \code{Pacf} and \code{Ccf} functions return objects of class "acf" as described in \code{\link[stats]{acf}} from the stats package. The \code{taperedacf} and \code{taperedpacf} functions return objects of class "mpacf". } \description{ The function \code{Acf} computes (and by default plots) an estimate of the autocorrelation function of a (possibly multivariate) time series. Function \code{Pacf} computes (and by default plots) an estimate of the partial autocorrelation function of a (possibly multivariate) time series. Function \code{Ccf} computes the cross-correlation or cross-covariance of two univariate series. } \details{ The functions improve the \code{\link[stats]{acf}}, \code{\link[stats]{pacf}} and \code{\link[stats]{ccf}} functions. The main differences are that \code{Acf} does not plot a spike at lag 0 when \code{type=="correlation"} (which is redundant) and the horizontal axes show lags in time units rather than seasonal units. The tapered versions implement the ACF and PACF estimates and plots described in Hyndman (2015), based on the banded and tapered estimates of autocovariance proposed by McMurry and Politis (2010). } \examples{ Acf(wineind) Pacf(wineind) \dontrun{ taperedacf(wineind, nsim=50) taperedpacf(wineind, nsim=50) } } \references{ Hyndman, R.J. (2015). Discussion of ``High-dimensional autocovariance matrices and optimal linear prediction''. \emph{Electronic Journal of Statistics}, 9, 792-796. McMurry, T. L., & Politis, D. N. (2010). Banded and tapered estimates for autocovariance matrices and the linear process bootstrap. \emph{Journal of Time Series Analysis}, 31(6), 471-482. } \seealso{ \code{\link[stats]{acf}}, \code{\link[stats]{pacf}}, \code{\link[stats]{ccf}}, \code{\link{tsdisplay}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/seasonal.Rd0000644000176200001440000000205414003673410014726 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/components.R \name{seasonal} \alias{seasonal} \alias{trendcycle} \alias{remainder} \title{Extract components from a time series decomposition} \usage{ seasonal(object) trendcycle(object) remainder(object) } \arguments{ \item{object}{Object created by \code{\link[stats]{decompose}}, \code{\link[stats]{stl}} or \code{\link{tbats}}.} } \value{ Univariate time series. } \description{ Returns a univariate time series equal to either a seasonal component, trend-cycle component or remainder component from a time series decomposition. } \examples{ plot(USAccDeaths) fit <- stl(USAccDeaths, s.window="periodic") lines(trendcycle(fit),col="red") library(ggplot2) autoplot(cbind( Data=USAccDeaths, Seasonal=seasonal(fit), Trend=trendcycle(fit), Remainder=remainder(fit)), facets=TRUE) + ylab("") + xlab("Year") } \seealso{ \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, \code{\link{tbats}}, \code{\link{seasadj}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/CVar.Rd0000644000176200001440000000404214003673410013753 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tscv.R \name{CVar} \alias{CVar} \alias{print.CVar} \title{k-fold Cross-Validation applied to an autoregressive model} \usage{ CVar( y, k = 10, FUN = nnetar, cvtrace = FALSE, blocked = FALSE, LBlags = 24, ... ) } \arguments{ \item{y}{Univariate time series} \item{k}{Number of folds to use for cross-validation.} \item{FUN}{Function to fit an autoregressive model. Currently, it only works with the \code{\link{nnetar}} function.} \item{cvtrace}{Provide progress information.} \item{blocked}{choose folds randomly or as blocks?} \item{LBlags}{lags for the Ljung-Box test, defaults to 24, for yearly series can be set to 20} \item{...}{Other arguments are passed to \code{FUN}.} } \value{ A list containing information about the model and accuracy for each fold, plus other summary information computed across folds. } \description{ \code{CVar} computes the errors obtained by applying an autoregressive modelling function to subsets of the time series \code{y} using k-fold cross-validation as described in Bergmeir, Hyndman and Koo (2015). It also applies a Ljung-Box test to the residuals. If this test is significant (see returned pvalue), there is serial correlation in the residuals and the model can be considered to be underfitting the data. In this case, the cross-validated errors can underestimate the generalization error and should not be used. } \examples{ modelcv <- CVar(lynx, k=5, lambda=0.15) print(modelcv) print(modelcv$fold1) library(ggplot2) autoplot(lynx, series="Data") + autolayer(modelcv$testfit, series="Fits") + autolayer(modelcv$residuals, series="Residuals") ggAcf(modelcv$residuals) } \references{ Bergmeir, C., Hyndman, R.J., Koo, B. (2018) A note on the validity of cross-validation for evaluating time series prediction. \emph{Computational Statistics & Data Analysis}, \bold{120}, 70-83. \url{https://robjhyndman.com/publications/cv-time-series/}. } \seealso{ \link{CV}, \link{tsCV}. } \author{ Gabriel Caceres and Rob J Hyndman } \keyword{ts} forecast/man/plot.ets.Rd0000644000176200001440000000207514003673410014674 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ets.R, R/ggplot.R \name{plot.ets} \alias{plot.ets} \alias{autoplot.ets} \title{Plot components from ETS model} \usage{ \method{plot}{ets}(x, ...) \method{autoplot}{ets}(object, range.bars = NULL, ...) } \arguments{ \item{x}{Object of class \dQuote{ets}.} \item{...}{Other plotting parameters to affect the plot.} \item{object}{Object of class \dQuote{ets}. Used for ggplot graphics (S3 method consistency).} \item{range.bars}{Logical indicating if each plot should have a bar at its right side representing relative size. If NULL, automatic selection takes place.} } \value{ None. Function produces a plot } \description{ Produces a plot of the level, slope and seasonal components from an ETS model. } \details{ \code{autoplot} will produce an equivalent plot as a ggplot object. } \examples{ fit <- ets(USAccDeaths) plot(fit) plot(fit,plot.type="single",ylab="",col=1:3) library(ggplot2) autoplot(fit) } \seealso{ \code{\link{ets}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{hplot} forecast/man/arima.errors.Rd0000644000176200001440000000153114003673410015524 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arima.R \name{arima.errors} \alias{arima.errors} \title{Errors from a regression model with ARIMA errors} \usage{ arima.errors(object) } \arguments{ \item{object}{An object containing a time series model of class \code{Arima}.} } \value{ A \code{ts} object } \description{ Returns time series of the regression residuals from a fitted ARIMA model. } \details{ This is a deprecated function which is identical to \code{\link{residuals.Arima}(object, type="regression")} Regression residuals are equal to the original data minus the effect of any regression variables. If there are no regression variables, the errors will be identical to the original series (possibly adjusted to have zero mean). } \seealso{ \code{\link{residuals.Arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/gghistogram.Rd0000644000176200001440000000171514003673410015437 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{gghistogram} \alias{gghistogram} \title{Histogram with optional normal and kernel density functions} \usage{ gghistogram( x, add.normal = FALSE, add.kde = FALSE, add.rug = TRUE, bins, boundary = 0 ) } \arguments{ \item{x}{a numerical vector.} \item{add.normal}{Add a normal density function for comparison} \item{add.kde}{Add a kernel density estimate for comparison} \item{add.rug}{Add a rug plot on the horizontal axis} \item{bins}{The number of bins to use for the histogram. Selected by default using the Friedman-Diaconis rule given by \code{\link[grDevices]{nclass.FD}}} \item{boundary}{A boundary between two bins.} } \value{ None. } \description{ Plots a histogram and density estimates using ggplot. } \examples{ gghistogram(lynx, add.kde=TRUE) } \seealso{ \code{\link[graphics]{hist}}, \code{\link[ggplot2]{geom_histogram}} } \author{ Rob J Hyndman } forecast/man/arimaorder.Rd0000644000176200001440000000211114003673410015240 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arima.R \name{arimaorder} \alias{arimaorder} \title{Return the order of an ARIMA or ARFIMA model} \usage{ arimaorder(object) } \arguments{ \item{object}{An object of class \dQuote{\code{Arima}}, dQuote\code{ar} or \dQuote{\code{fracdiff}}. Usually the result of a call to \code{\link[stats]{arima}}, \code{\link{Arima}}, \code{\link{auto.arima}}, \code{\link[stats]{ar}}, \code{\link{arfima}} or \code{\link[fracdiff]{fracdiff}}.} } \value{ A numerical vector giving the values \eqn{p}, \eqn{d} and \eqn{q} of the ARIMA or ARFIMA model. For a seasonal ARIMA model, the returned vector contains the values \eqn{p}, \eqn{d}, \eqn{q}, \eqn{P}, \eqn{D}, \eqn{Q} and \eqn{m}, where \eqn{m} is the period of seasonality. } \description{ Returns the order of a univariate ARIMA or ARFIMA model. } \examples{ WWWusage \%>\% auto.arima \%>\% arimaorder } \seealso{ \code{\link[stats]{ar}}, \code{\link{auto.arima}}, \code{\link{Arima}}, \code{\link[stats]{arima}}, \code{\link{arfima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/tsclean.Rd0000644000176200001440000000235214165140171014555 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clean.R \name{tsclean} \alias{tsclean} \title{Identify and replace outliers and missing values in a time series} \usage{ tsclean(x, replace.missing = TRUE, iterate = 2, lambda = NULL) } \arguments{ \item{x}{time series} \item{replace.missing}{If TRUE, it not only replaces outliers, but also interpolates missing values} \item{iterate}{the number of iterations required} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} } \value{ Time series } \description{ Uses supsmu for non-seasonal series and a robust STL decomposition for seasonal series. To estimate missing values and outlier replacements, linear interpolation is used on the (possibly seasonally adjusted) series } \examples{ cleangold <- tsclean(gold) } \references{ Hyndman (2021) "Detecting time series outliers" \url{https://robjhyndman.com/hyndsight/tsoutliers/}. } \seealso{ \code{\link[forecast]{na.interp}}, \code{\link[forecast]{tsoutliers}}, \code{\link[stats]{supsmu}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/autolayer.Rd0000644000176200001440000000134614003673410015131 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{autolayer} \alias{autolayer} \title{Create a ggplot layer appropriate to a particular data type} \usage{ autolayer(object, ...) } \arguments{ \item{object}{an object, whose class will determine the behaviour of autolayer} \item{...}{other arguments passed to specific methods} } \value{ a ggplot layer } \description{ \code{autolayer()} uses ggplot2 to draw a particular layer for an object of a particular class in a single command. This defines the S3 generic that other classes and packages can extend. } \seealso{ \code{\link[ggplot2:autoplot]{autoplot()}}, \code{\link[ggplot2:ggplot]{ggplot()}} and \code{\link[ggplot2:fortify]{fortify()}} } forecast/man/croston.Rd0000644000176200001440000000511114003673410014605 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{croston} \alias{croston} \title{Forecasts for intermittent demand using Croston's method} \usage{ croston(y, h = 10, alpha = 0.1, x = y) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting.} \item{alpha}{Value of alpha. Default value is 0.1.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model. The first element gives the model used for non-zero demands. The second element gives the model used for times between non-zero demands. Both elements are of class \code{forecast}.} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is y minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{croston} and associated functions. } \description{ Returns forecasts and other information for Croston's forecasts applied to y. } \details{ Based on Croston's (1972) method for intermittent demand forecasting, also described in Shenstone and Hyndman (2005). Croston's method involves using simple exponential smoothing (SES) on the non-zero elements of the time series and a separate application of SES to the times between non-zero elements of the time series. The smoothing parameters of the two applications of SES are assumed to be equal and are denoted by \code{alpha}. Note that prediction intervals are not computed as Croston's method has no underlying stochastic model. } \examples{ y <- rpois(20,lambda=.3) fcast <- croston(y) plot(fcast) } \references{ Croston, J. (1972) "Forecasting and stock control for intermittent demands", \emph{Operational Research Quarterly}, \bold{23}(3), 289-303. Shenstone, L., and Hyndman, R.J. (2005) "Stochastic models underlying Croston's method for intermittent demand forecasting". \emph{Journal of Forecasting}, \bold{24}, 389-402. } \seealso{ \code{\link{ses}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/is.ets.Rd0000644000176200001440000000120614003673410014324 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/acf.R, R/arima.R, R/baggedModel.R, R/bats.R, % R/ets.R, R/modelAR.R, R/mstl.R, R/nnetar.R \name{is.acf} \alias{is.acf} \alias{is.Arima} \alias{is.baggedModel} \alias{is.bats} \alias{is.ets} \alias{is.modelAR} \alias{is.stlm} \alias{is.nnetar} \alias{is.nnetarmodels} \title{Is an object a particular model type?} \usage{ is.acf(x) is.Arima(x) is.baggedModel(x) is.bats(x) is.ets(x) is.modelAR(x) is.stlm(x) is.nnetar(x) is.nnetarmodels(x) } \arguments{ \item{x}{object to be tested} } \description{ Returns true if the model object is of a particular type } forecast/man/ndiffs.Rd0000644000176200001440000000523114003673410014372 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/unitRoot.R \name{ndiffs} \alias{ndiffs} \title{Number of differences required for a stationary series} \usage{ ndiffs( x, alpha = 0.05, test = c("kpss", "adf", "pp"), type = c("level", "trend"), max.d = 2, ... ) } \arguments{ \item{x}{A univariate time series} \item{alpha}{Level of the test, possible values range from 0.01 to 0.1.} \item{test}{Type of unit root test to use} \item{type}{Specification of the deterministic component in the regression} \item{max.d}{Maximum number of non-seasonal differences allowed} \item{...}{Additional arguments to be passed on to the unit root test} } \value{ An integer indicating the number of differences required for stationarity. } \description{ Functions to estimate the number of differences required to make a given time series stationary. \code{ndiffs} estimates the number of first differences necessary. } \details{ \code{ndiffs} uses a unit root test to determine the number of differences required for time series \code{x} to be made stationary. If \code{test="kpss"}, the KPSS test is used with the null hypothesis that \code{x} has a stationary root against a unit-root alternative. Then the test returns the least number of differences required to pass the test at the level \code{alpha}. If \code{test="adf"}, the Augmented Dickey-Fuller test is used and if \code{test="pp"} the Phillips-Perron test is used. In both of these cases, the null hypothesis is that \code{x} has a unit root against a stationary root alternative. Then the test returns the least number of differences required to fail the test at the level \code{alpha}. } \examples{ ndiffs(WWWusage) ndiffs(diff(log(AirPassengers), 12)) } \references{ Dickey DA and Fuller WA (1979), "Distribution of the Estimators for Autoregressive Time Series with a Unit Root", \emph{Journal of the American Statistical Association} \bold{74}:427-431. Kwiatkowski D, Phillips PCB, Schmidt P and Shin Y (1992) "Testing the Null Hypothesis of Stationarity against the Alternative of a Unit Root", \emph{Journal of Econometrics} \bold{54}:159-178. Osborn, D.R. (1990) "A survey of seasonality in UK macroeconomic variables", \emph{International Journal of Forecasting}, \bold{6}:327-336. Phillips, P.C.B. and Perron, P. (1988) "Testing for a unit root in time series regression", \emph{Biometrika}, \bold{72}(2), 335-346. Said E and Dickey DA (1984), "Testing for Unit Roots in Autoregressive Moving Average Models of Unknown Order", \emph{Biometrika} \bold{71}:599-607. } \seealso{ \code{\link{auto.arima}} and \code{\link{ndiffs}} } \author{ Rob J Hyndman, Slava Razbash & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/tsdisplay.Rd0000644000176200001440000000465114003673410015142 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R, R/graph.R \name{ggtsdisplay} \alias{ggtsdisplay} \alias{tsdisplay} \title{Time series display} \usage{ ggtsdisplay( x, plot.type = c("partial", "histogram", "scatter", "spectrum"), points = TRUE, smooth = FALSE, lag.max, na.action = na.contiguous, theme = NULL, ... ) tsdisplay( x, plot.type = c("partial", "histogram", "scatter", "spectrum"), points = TRUE, ci.type = c("white", "ma"), lag.max, na.action = na.contiguous, main = NULL, xlab = "", ylab = "", pch = 1, cex = 0.5, ... ) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}.} \item{plot.type}{type of plot to include in lower right corner.} \item{points}{logical flag indicating whether to show the individual points or not in the time plot.} \item{smooth}{logical flag indicating whether to show a smooth loess curve superimposed on the time plot.} \item{lag.max}{the maximum lag to plot for the acf and pacf. A suitable value is selected by default if the argument is missing.} \item{na.action}{function to handle missing values in acf, pacf and spectrum calculations. The default is \code{\link[stats]{na.contiguous}}. Useful alternatives are \code{\link[stats]{na.pass}} and \code{\link{na.interp}}.} \item{theme}{Adds a ggplot element to each plot, typically a theme.} \item{\dots}{additional arguments to \code{\link[stats]{acf}}.} \item{ci.type}{type of confidence limits for ACF that is passed to \code{\link[stats]{acf}}. Should the confidence limits assume a white noise input or for lag \eqn{k} an MA(\eqn{k-1}) input?} \item{main}{Main title.} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} \item{pch}{Plotting character.} \item{cex}{Character size.} } \value{ None. } \description{ Plots a time series along with its acf and either its pacf, lagged scatterplot or spectrum. } \details{ \code{ggtsdisplay} will produce the equivalent plot using ggplot graphics. } \examples{ library(ggplot2) ggtsdisplay(USAccDeaths, plot.type="scatter", theme=theme_bw()) tsdisplay(diff(WWWusage)) ggtsdisplay(USAccDeaths, plot.type="scatter") } \references{ Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://otexts.com/fpp2/} } \seealso{ \code{\link[stats]{plot.ts}}, \code{\link{Acf}}, \code{\link[stats]{spec.ar}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/ses.Rd0000644000176200001440000001120014003673410013704 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/HoltWintersNew.R \name{ses} \alias{ses} \alias{holt} \alias{hw} \title{Exponential smoothing forecasts} \usage{ ses( y, h = 10, level = c(80, 95), fan = FALSE, initial = c("optimal", "simple"), alpha = NULL, lambda = NULL, biasadj = FALSE, x = y, ... ) holt( y, h = 10, damped = FALSE, level = c(80, 95), fan = FALSE, initial = c("optimal", "simple"), exponential = FALSE, alpha = NULL, beta = NULL, phi = NULL, lambda = NULL, biasadj = FALSE, x = y, ... ) hw( y, h = 2 * frequency(x), seasonal = c("additive", "multiplicative"), damped = FALSE, level = c(80, 95), fan = FALSE, initial = c("optimal", "simple"), exponential = FALSE, alpha = NULL, beta = NULL, gamma = NULL, phi = NULL, lambda = NULL, biasadj = FALSE, x = y, ... ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{initial}{Method used for selecting initial state values. If \code{optimal}, the initial values are optimized along with the smoothing parameters using \code{\link{ets}}. If \code{simple}, the initial values are set to values obtained using simple calculations on the first few observations. See Hyndman & Athanasopoulos (2014) for details.} \item{alpha}{Value of smoothing parameter for the level. If \code{NULL}, it will be estimated.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{x}{Deprecated. Included for backwards compatibility.} \item{...}{Other arguments passed to \code{forecast.ets}.} \item{damped}{If TRUE, use a damped trend.} \item{exponential}{If TRUE, an exponential trend is fitted. Otherwise, the trend is (locally) linear.} \item{beta}{Value of smoothing parameter for the trend. If \code{NULL}, it will be estimated.} \item{phi}{Value of damping parameter if \code{damped=TRUE}. If \code{NULL}, it will be estimated.} \item{seasonal}{Type of seasonality in \code{hw} model. "additive" or "multiplicative"} \item{gamma}{Value of smoothing parameter for the seasonal component. If \code{NULL}, it will be estimated.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{ets} and associated functions. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for exponential smoothing forecasts applied to \code{y}. } \details{ ses, holt and hw are simply convenient wrapper functions for \code{forecast(ets(...))}. } \examples{ fcast <- holt(airmiles) plot(fcast) deaths.fcast <- hw(USAccDeaths,h=48) plot(deaths.fcast) } \references{ Hyndman, R.J., Koehler, A.B., Ord, J.K., Snyder, R.D. (2008) \emph{Forecasting with exponential smoothing: the state space approach}, Springer-Verlag: New York. \url{http://www.exponentialsmoothing.net}. Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://otexts.com/fpp2/} } \seealso{ \code{\link{ets}}, \code{\link[stats]{HoltWinters}}, \code{\link{rwf}}, \code{\link[stats]{arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.mts.Rd0000644000176200001440000000711314003673410015532 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mforecast.R \name{forecast.mts} \alias{forecast.mts} \alias{mforecast} \alias{print.mforecast} \alias{summary.mforecast} \alias{as.data.frame.mforecast} \title{Forecasting time series} \usage{ \method{forecast}{mts}( object, h = ifelse(frequency(object) > 1, 2 * frequency(object), 10), level = c(80, 95), fan = FALSE, robust = FALSE, lambda = NULL, biasadj = FALSE, find.frequency = FALSE, allow.multiplicative.trend = FALSE, ... ) } \arguments{ \item{object}{a multivariate time series or multivariate time series model for which forecasts are required} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, \code{level} is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{robust}{If TRUE, the function is robust to missing values and outliers in \code{object}. This argument is only valid when \code{object} is of class \code{mts}.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{find.frequency}{If TRUE, the function determines the appropriate period, if the data is of unknown period.} \item{allow.multiplicative.trend}{If TRUE, then ETS models with multiplicative trends are allowed. Otherwise, only additive or no trend ETS models are permitted.} \item{...}{Additional arguments affecting the forecasts produced.} } \value{ An object of class "\code{mforecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the multivariate forecasts and prediction intervals. The generic accessors functions \code{fitted.values} and \code{residuals} extract various useful features of the value returned by \code{forecast$model}. An object of class \code{"mforecast"} is a list usually containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. For models with additive errors, the residuals will be x minus the fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ \code{mforecast} is a class of objects for forecasting from multivariate time series or multivariate time series models. The function invokes particular \emph{methods} which depend on the class of the first argument. } \details{ For example, the function \code{\link{forecast.mlm}} makes multivariate forecasts based on the results produced by \code{\link{tslm}}. } \seealso{ Other functions which return objects of class \code{"mforecast"} are \code{\link{forecast.mlm}}, \code{forecast.varest}. } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } forecast/man/modelAR.Rd0000644000176200001440000000733214003673410014450 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/modelAR.R \name{modelAR} \alias{modelAR} \alias{print.modelAR} \title{Time Series Forecasts with a user-defined model} \usage{ modelAR( y, p, P = 1, FUN, predict.FUN, xreg = NULL, lambda = NULL, model = NULL, subset = NULL, scale.inputs = FALSE, x = y, ... ) } \arguments{ \item{y}{A numeric vector or time series of class \code{ts}.} \item{p}{Embedding dimension for non-seasonal time series. Number of non-seasonal lags used as inputs. For non-seasonal time series, the default is the optimal number of lags (according to the AIC) for a linear AR(p) model. For seasonal time series, the same method is used but applied to seasonally adjusted data (from an stl decomposition).} \item{P}{Number of seasonal lags used as inputs.} \item{FUN}{Function used for model fitting. Must accept argument \code{x} and \code{y} for the predictors and response, respectively (\code{formula} object not currently supported).} \item{predict.FUN}{Prediction function used to apply \code{FUN} to new data. Must accept an object of class \code{FUN} as its first argument, and a data frame or matrix of new data for its second argument. Additionally, it should return fitted values when new data is omitted.} \item{xreg}{Optionally, a vector or matrix of external regressors, which must have the same number of rows as \code{y}. Must be numeric.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{model}{Output from a previous call to \code{nnetar}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{subset}{Optional vector specifying a subset of observations to be used in the fit. Can be an integer index vector or a logical vector the same length as \code{y}. All observations are used by default.} \item{scale.inputs}{If TRUE, inputs are scaled by subtracting the column means and dividing by their respective standard deviations. If \code{lambda} is not \code{NULL}, scaling is applied after Box-Cox transformation.} \item{x}{Deprecated. Included for backwards compatibility.} \item{\dots}{Other arguments passed to \code{FUN} for \code{modelAR}.} } \value{ Returns an object of class "\code{modelAR}". The function \code{summary} is used to obtain and print a summary of the results. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{nnetar}. \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{x}{The original time series.} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} \item{...}{Other arguments} } \description{ Experimental function to forecast univariate time series with a user-defined model } \details{ This is an experimental function and only recommended for advanced users. The selected model is fitted with lagged values of \code{y} as inputs. The inputs are for lags 1 to \code{p}, and lags \code{m} to \code{mP} where \code{m=frequency(y)}. If \code{xreg} is provided, its columns are also used as inputs. If there are missing values in \code{y} or \code{xreg}, the corresponding rows (and any others which depend on them as lags) are omitted from the fit. The model is trained for one-step forecasting. Multi-step forecasts are computed recursively. } \author{ Rob J Hyndman and Gabriel Caceres } \keyword{ts} forecast/man/taylor.Rd0000644000176200001440000000125114003673410014431 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{taylor} \alias{taylor} \title{Half-hourly electricity demand} \format{ Time series data } \source{ James W Taylor } \usage{ taylor } \description{ Half-hourly electricity demand in England and Wales from Monday 5 June 2000 to Sunday 27 August 2000. Discussed in Taylor (2003), and kindly provided by James W Taylor. Units: Megawatts } \examples{ plot(taylor) } \references{ Taylor, J.W. (2003) Short-term electricity demand forecasting using double seasonal exponential smoothing. \emph{Journal of the Operational Research Society}, \bold{54}, 799-805. } \keyword{datasets} forecast/man/seasonaldummy.Rd0000644000176200001440000000313714003673410016005 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{seasonaldummy} \alias{seasonaldummy} \alias{seasonaldummyf} \title{Seasonal dummy variables} \usage{ seasonaldummy(x, h = NULL) seasonaldummyf(x, h) } \arguments{ \item{x}{Seasonal time series: a \code{ts} or a \code{msts} object} \item{h}{Number of periods ahead to forecast (optional)} } \value{ Numerical matrix. } \description{ \code{seasonaldummy} returns a matrix of dummy variables suitable for use in \code{\link{Arima}}, \code{\link{auto.arima}} or \code{\link{tslm}}. The last season is omitted and used as the control. } \details{ \code{seasonaldummyf} is deprecated, instead use the \code{h} argument in \code{seasonaldummy}. The number of dummy variables is determined from the time series characteristics of \code{x}. When \code{h} is missing, the length of \code{x} also determines the number of rows for the matrix returned by \code{seasonaldummy}. the value of \code{h} determines the number of rows for the matrix returned by \code{seasonaldummy}, typically used for forecasting. The values within \code{x} are not used. } \examples{ plot(ldeaths) # Using seasonal dummy variables month <- seasonaldummy(ldeaths) deaths.lm <- tslm(ldeaths ~ month) tsdisplay(residuals(deaths.lm)) ldeaths.fcast <- forecast(deaths.lm, data.frame(month=I(seasonaldummy(ldeaths,36)))) plot(ldeaths.fcast) # A simpler approach to seasonal dummy variables deaths.lm <- tslm(ldeaths ~ season) ldeaths.fcast <- forecast(deaths.lm, h=36) plot(ldeaths.fcast) } \seealso{ \code{\link{fourier}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.modelAR.Rd0000644000176200001440000000705214003673410016254 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/modelAR.R \name{forecast.modelAR} \alias{forecast.modelAR} \title{Forecasting using user-defined model} \usage{ \method{forecast}{modelAR}( object, h = ifelse(object$m > 1, 2 * object$m, 10), PI = FALSE, level = c(80, 95), fan = FALSE, xreg = NULL, lambda = object$lambda, bootstrap = FALSE, npaths = 1000, innov = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{modelAR}" resulting from a call to \code{\link{modelAR}}.} \item{h}{Number of periods for forecasting. If \code{xreg} is used, \code{h} is ignored and the number of forecast periods is set to the number of rows of \code{xreg}.} \item{PI}{If TRUE, prediction intervals are produced, otherwise only point forecasts are calculated. If \code{PI} is FALSE, then \code{level}, \code{fan}, \code{bootstrap} and \code{npaths} are all ignored.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{xreg}{Future values of external regressor variables.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{bootstrap}{If \code{TRUE}, then prediction intervals computed using simulations with resampled residuals rather than normally distributed errors. Ignored if \code{innov} is not \code{NULL}.} \item{npaths}{Number of sample paths used in computing simulated prediction intervals.} \item{innov}{Values to use as innovations for prediction intervals. Must be a matrix with \code{h} rows and \code{npaths} columns (vectors are coerced into a matrix). If present, \code{bootstrap} is ignored.} \item{...}{Additional arguments passed to \code{\link{simulate.nnetar}}} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.nnetar}. An object of class "\code{forecast}" is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} \item{...}{Other arguments} } \description{ Returns forecasts and other information for user-defined models. } \details{ Prediction intervals are calculated through simulations and can be slow. Note that if the model is too complex and overfits the data, the residuals can be arbitrarily small; if used for prediction interval calculations, they could lead to misleadingly small values. } \seealso{ \code{\link{nnetar}}. } \author{ Rob J Hyndman and Gabriel Caceres } \keyword{ts} forecast/man/accuracy.Rd0000644000176200001440000000646614003673410014726 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/errors.R \name{accuracy} \alias{accuracy} \alias{accuracy.default} \title{Accuracy measures for a forecast model} \usage{ accuracy(object, ...) \method{accuracy}{default}(object, x, test = NULL, d = NULL, D = NULL, f = NULL, ...) } \arguments{ \item{object}{An object of class \dQuote{\code{forecast}}, or a numerical vector containing forecasts. It will also work with \code{Arima}, \code{ets} and \code{lm} objects if \code{x} is omitted -- in which case training set accuracy measures are returned.} \item{...}{Additional arguments depending on the specific method.} \item{x}{An optional numerical vector containing actual values of the same length as object, or a time series overlapping with the times of \code{f}.} \item{test}{Indicator of which elements of \code{x} and \code{f} to test. If \code{test} is \code{NULL}, all elements are used. Otherwise test is a numeric vector containing the indices of the elements to use in the test.} \item{d}{An integer indicating the number of lag-1 differences to be used for the denominator in MASE calculation. Default value is 1 for non-seasonal series and 0 for seasonal series.} \item{D}{An integer indicating the number of seasonal differences to be used for the denominator in MASE calculation. Default value is 0 for non-seasonal series and 1 for seasonal series.} \item{f}{Deprecated. Please use `object` instead.} } \value{ Matrix giving forecast accuracy measures. } \description{ Returns range of summary measures of the forecast accuracy. If \code{x} is provided, the function measures test set forecast accuracy based on \code{x-f}. If \code{x} is not provided, the function only produces training set accuracy measures of the forecasts based on \code{f["x"]-fitted(f)}. All measures are defined and discussed in Hyndman and Koehler (2006). } \details{ The measures calculated are: \itemize{ \item ME: Mean Error \item RMSE: Root Mean Squared Error \item MAE: Mean Absolute Error \item MPE: Mean Percentage Error \item MAPE: Mean Absolute Percentage Error \item MASE: Mean Absolute Scaled Error \item ACF1: Autocorrelation of errors at lag 1. } By default, the MASE calculation is scaled using MAE of training set naive forecasts for non-seasonal time series, training set seasonal naive forecasts for seasonal time series and training set mean forecasts for non-time series data. If \code{f} is a numerical vector rather than a \code{forecast} object, the MASE will not be returned as the training data will not be available. See Hyndman and Koehler (2006) and Hyndman and Athanasopoulos (2014, Section 2.5) for further details. } \examples{ fit1 <- rwf(EuStockMarkets[1:200, 1], h = 100) fit2 <- meanf(EuStockMarkets[1:200, 1], h = 100) accuracy(fit1) accuracy(fit2) accuracy(fit1, EuStockMarkets[201:300, 1]) accuracy(fit2, EuStockMarkets[201:300, 1]) plot(fit1) lines(EuStockMarkets[1:300, 1]) } \references{ Hyndman, R.J. and Koehler, A.B. (2006) "Another look at measures of forecast accuracy". \emph{International Journal of Forecasting}, \bold{22}(4), 679-688. Hyndman, R.J. and Athanasopoulos, G. (2018) "Forecasting: principles and practice", 2nd ed., OTexts, Melbourne, Australia. Section 3.4 "Evaluating forecast accuracy". \url{https://otexts.com/fpp2/accuracy.html}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/figures/0000755000176200001440000000000014003673410014275 5ustar liggesusersforecast/man/figures/logo.png0000644000176200001440000001365414003673410015754 0ustar liggesusersPNG  IHDRxb]egAMA a cHRMz&u0`:pQ<bKGDIDATx]WgOПR[jUZrkoϹ'X.R\QYDvYCBl B"{?O&<2|΁df|2))dSu*5cTJ|*5SR3R33t}If2LfV$gliR3CT:[X>IOd|06CnI[ؑۆ/x>-WxnWc۩:mm'9ľL[~yK&|uK۳}4D̶$M{i]C9vuHK" yD_N2Yrl3vށ?+ێ@Cf]'Z@)>d*Jg7M];YJ9VEi:Fl:ZDiDyQVgW(Ŷ RgLiQsvRuH[U3b˱jՖ$tVO|ӂ;^0`#>'~[ro1RO6`gyI-XwmK*)/n;~%ԓ X:JOwL[{><Ƨu_-I-H3'U 3Uזm[-YHݜp8ZnCޅR {dB7XO.t`濾WWPDa$~(%K-6M rMhBu$K-I{@)j'\FšFՇ) LFޅ-{ҶV-[?W%<,4vOr LcCzS[RYyQs;Ӳ6816~_>u &lJI.Z:K[q[O7 e˝@QE֣~/m ƹAkcߖm㽶yq GZQ=6|8tδbMF ʞm<Ɵ7.;|q3F(,n\U#xYuxmڒJX{.\Ł~994ydIrXjƟroQmI%mUΩ&l5.{\~4Ƒ횐%ã`=nN1;LmmɕoQ^S5G\?E_^HepSˢ4{&;:KUXNT;?71=QH FD]Wu^؂Xondž:dYqЕ=Sd:x5c ]6c۹ž>0:'.=-v0 \3enL8[yϯɎ|]dQW+ڟ%YC|䅊]sf+$r>T>p8`{Ygko5h4QB]ez;vTF%eIޯq] ۜxx>Z=_$cBawLh$a56#]q m \}6|tr\;*Aievޯ V V/ẃ]tE7́6j{'hbpbgp?>!\m'3~eV܂fG,,9l7ؚ͓c@j`&9PAՋI9ppṔKn F&sO^ -,5`k@i?J .o8,Zl>b|np*CnaSܺ^/sK0K3E!W[$\'ПVhu-8Σ~/<3}h*ѧw=<)KɎ Ar-c3Džox>Y/i%[8Zn kQ\ou㞞Cȴ$vNzCIASsw|l:=oͱG(!ÿdQ [11jVn꜂OB}HTŃ2[RKց) {Ǟ`6W~;[g}%}x؃Fg/F1yJ&pՁ#嶐8RnõVuNpdaƘ96~}^ԋhܗŨ׏~YvlG 47oc^? h5=}1kk.7Z5= N3WgRpMϡqM ȹ߇Q?6Q45o:ݎŮ?,|ACÞ0>&}}g>EcZ78Pjp l[`Sg,^3臦`>oE+P7gI5=\;\z491N>BA]g`ɇPP 'f]9di)SyT>0<@$ , wL(hcOq/-v9$ Yf")zbG.V/*,H1('۱r/CۂMrՇ1gv\}B5ƞ^4g;mS*"C0KcM3CdP=pMϡ I<| 4r[6qb^|߉uq 6Q30Ƙ4=:0S#8K;+ DoJ_׺᚞ pڮ cˉ {Xf{.6DWWjENQ EDM$ ۜꞤ")ֺb%D]$LOQaBAf{wMϋ>/su}^]An {VXr:@,t\ 4%Ilz~N=Bv{^K%] J.?PM\wZhv,x>1z,elF2+( S-c@e4*""š.4q8M=HWzse7RR+FT,VA?4-Z$%z5O3G\vQf`iOJ;'Pw:*]7*96CcJ{ CPLCcϦldv2L0 rQ^h&8&8K;zI7Œ[7uΠRr&X\iH.;= \ NTGpQq8<-l@(5Q|`$wLTYfð9kǴn^ngճSPqAr+yc"4bڪE(lsGp w;\T Fb{|ogBp!@)T+޵F8c_ E'BAH{-z 'dL.n[3T'K;2i` "WԈ|n*IK &Tz"8 dM;WpbUHR#z+]X^.t fDQzvjAvb-c3nXjDZ^X[ '!$#YP硺tԣozC>'UVj!Nj"gYe-9ul`K)Xͯ )6 -WQXɎ*#8ud!ׯis6jV'*~4( jcz Tty_?(Ճ %>f$jcmDgiRQJ*OK5CwjAeֈ_ tNllէiw 2, rHYPtSvB֫H5lSm㸬EzӴDg&8U<*4;$#vWDW(آ!!Sb 'LI"eB)HY"5 tN,[P է^ArբH['& "c5BOgDzi<*hI)ʘbAOW؅bIX3eFziFL*,0=y ӓL,4+-MvW!A@*  mx# N= Y=G2+G*,ye#,F`{#8Z労 %9cMiRnX'Xy w3=" =KL'8K;t~b"HȅJn6lEvi,sw6f$`]cZ9r[2K{ ]cX&tiyf($wY (- Dgqߓn2ʭQ$91` ?w.ѻ,vf/5:M^ع7g(m$CK:i$I Xr XO@^"ֆB%f!- Pd7$wVha`E{l< &cBq%A0r)yCr7J)bdДfAPf EQK`1ҔLN6,=K!h XȨ_x lB&'\ Ar y!-#X)`2 UM,9ISJ$佊Ş+c%U ?1r ksʖF~>Ns y?gŔ`~e,U"B )-yj! XgH3sLz6%;Y{8rhߢ6b R*7AI"%D<8ra#?rzPU/Ӹ~j1nRFU@6h(fDt v!gLahC}ϙOpZY!YA?ފJ{]Gn4cƢqhuqRqIF6(t y]qPʞ"e]fʞ" وSVjIR3Xk~MNj`/z+cJJJJJͬSu4&'yx+VSoSJͤ/|)yUxkrcՇ,*5RئR3/$[D)eAVgPR3+Rh7Yp>ӤUI(tǚ4̺P}g* z}=fDjSoiI}59RMW$u6 _Z8>9R$ME/D{Sy>쪌{:-%֛Jl׾/O곀RcJdF+$:SNٲr5GiӞ+ҞFW+YVo/ }^Vg}*5Tr.{R,BgH2iҪDjK*(/ŕΆ ߖ\VgZgkKK@ʞi^#K@_Z%ٖTX^\\֖Tۖܔ%hm$:3ڒKP}^ڟhӞڄL{-d*e0yɵmG-I@B+zCAy1qϲiUTHn12 Wo=]/iKf'uvs%bϵ˭̴L%tEXtdate:create2018-03-12T05:44:18+00:00;2?T%tEXtdate:modify2018-03-12T05:44:18+00:00JotEXtSoftwareAdobe ImageReadyqe<IENDB`forecast/man/forecast.ets.Rd0000644000176200001440000000652414003673410015527 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/etsforecast.R \name{forecast.ets} \alias{forecast.ets} \title{Forecasting using ETS models} \usage{ \method{forecast}{ets}( object, h = ifelse(object$m > 1, 2 * object$m, 10), level = c(80, 95), fan = FALSE, simulate = FALSE, bootstrap = FALSE, npaths = 5000, PI = TRUE, lambda = object$lambda, biasadj = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{ets}". Usually the result of a call to \code{\link{ets}}.} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{simulate}{If TRUE, prediction intervals are produced by simulation rather than using analytic formulae. Errors are assumed to be normally distributed.} \item{bootstrap}{If TRUE, then prediction intervals are produced by simulation using resampled errors (rather than normally distributed errors).} \item{npaths}{Number of sample paths used in computing simulated prediction intervals.} \item{PI}{If TRUE, prediction intervals are produced, otherwise only point forecasts are calculated. If \code{PI} is FALSE, then \code{level}, \code{fan}, \code{simulate}, \code{bootstrap} and \code{npaths} are all ignored.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.ets}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. For models with additive errors, the residuals are x - fitted values. For models with multiplicative errors, the residuals are equal to x /(fitted values) - 1.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for univariate ETS models. } \examples{ fit <- ets(USAccDeaths) plot(forecast(fit,h=48)) } \seealso{ \code{\link{ets}}, \code{\link{ses}}, \code{\link{holt}}, \code{\link{hw}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/CV.Rd0000644000176200001440000000135314003673410013432 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/lm.R \name{CV} \alias{CV} \title{Cross-validation statistic} \usage{ CV(obj) } \arguments{ \item{obj}{output from \code{\link[stats]{lm}} or \code{\link{tslm}}} } \value{ Numerical vector containing CV, AIC, AICc, BIC and AdjR2 values. } \description{ Computes the leave-one-out cross-validation statistic (the mean of PRESS -- prediction residual sum of squares), AIC, corrected AIC, BIC and adjusted R^2 values for a linear model. } \examples{ y <- ts(rnorm(120,0,3) + 20*sin(2*pi*(1:120)/12), frequency=12) fit1 <- tslm(y ~ trend + season) fit2 <- tslm(y ~ season) CV(fit1) CV(fit2) } \seealso{ \code{\link[stats]{AIC}} } \author{ Rob J Hyndman } \keyword{models} forecast/man/forecast.baggedModel.Rd0000644000176200001440000000456614003673410017132 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/baggedModel.R \name{forecast.baggedModel} \alias{forecast.baggedModel} \title{Forecasting using a bagged model} \usage{ \method{forecast}{baggedModel}( object, h = ifelse(frequency(object$y) > 1, 2 * frequency(object$y), 10), ... ) } \arguments{ \item{object}{An object of class "\code{baggedModel}" resulting from a call to \code{\link{baggedModel}}.} \item{h}{Number of periods for forecasting.} \item{...}{Other arguments, passed on to the \code{\link{forecast}} function of the original method} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. An object of class "\code{forecast}" is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for bagged models. } \details{ Intervals are calculated as min and max values over the point forecasts from the models in the ensemble. I.e., the intervals are not prediction intervals, but give an indication of how different the forecasts within the ensemble are. } \examples{ fit <- baggedModel(WWWusage) fcast <- forecast(fit) plot(fcast) \dontrun{ fit2 <- baggedModel(WWWusage, fn="auto.arima") fcast2 <- forecast(fit2) plot(fcast2) accuracy(fcast2)} } \references{ Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging Exponential Smoothing Methods using STL Decomposition and Box-Cox Transformation. International Journal of Forecasting 32, 303-312. } \seealso{ \code{\link{baggedModel}}. } \author{ Christoph Bergmeir, Fotios Petropoulos } \keyword{ts} forecast/man/reexports.Rd0000644000176200001440000000075214003673410015157 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast-package.R, R/ggplot.R \docType{import} \name{reexports} \alias{reexports} \alias{\%>\%} \alias{autoplot} \title{Objects exported from other packages} \keyword{internal} \description{ These objects are imported from other packages. Follow the links below to see their documentation. \describe{ \item{ggplot2}{\code{\link[ggplot2]{autoplot}}} \item{magrittr}{\code{\link[magrittr:pipe]{\%>\%}}} }} forecast/man/findfrequency.Rd0000644000176200001440000000224514003673410015765 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast.R \name{findfrequency} \alias{findfrequency} \title{Find dominant frequency of a time series} \usage{ findfrequency(x) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}} } \value{ an integer value } \description{ \code{findfrequency} returns the period of the dominant frequency of a time series. For seasonal data, it will return the seasonal period. For cyclic data, it will return the average cycle length. } \details{ The dominant frequency is determined from a spectral analysis of the time series. First, a linear trend is removed, then the spectral density function is estimated from the best fitting autoregressive model (based on the AIC). If there is a large (possibly local) maximum in the spectral density function at frequency \eqn{f}, then the function will return the period \eqn{1/f} (rounded to the nearest integer). If no such dominant frequency can be found, the function will return 1. } \examples{ findfrequency(USAccDeaths) # Monthly data findfrequency(taylor) # Half-hourly data findfrequency(lynx) # Annual data } \author{ Rob J Hyndman } \keyword{ts} forecast/man/nnetar.Rd0000644000176200001440000001073514003673410014415 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nnetar.R \name{nnetar} \alias{nnetar} \alias{print.nnetar} \alias{print.nnetarmodels} \title{Neural Network Time Series Forecasts} \usage{ nnetar( y, p, P = 1, size, repeats = 20, xreg = NULL, lambda = NULL, model = NULL, subset = NULL, scale.inputs = TRUE, x = y, ... ) } \arguments{ \item{y}{A numeric vector or time series of class \code{ts}.} \item{p}{Embedding dimension for non-seasonal time series. Number of non-seasonal lags used as inputs. For non-seasonal time series, the default is the optimal number of lags (according to the AIC) for a linear AR(p) model. For seasonal time series, the same method is used but applied to seasonally adjusted data (from an stl decomposition).} \item{P}{Number of seasonal lags used as inputs.} \item{size}{Number of nodes in the hidden layer. Default is half of the number of input nodes (including external regressors, if given) plus 1.} \item{repeats}{Number of networks to fit with different random starting weights. These are then averaged when producing forecasts.} \item{xreg}{Optionally, a vector or matrix of external regressors, which must have the same number of rows as \code{y}. Must be numeric.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{model}{Output from a previous call to \code{nnetar}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{subset}{Optional vector specifying a subset of observations to be used in the fit. Can be an integer index vector or a logical vector the same length as \code{y}. All observations are used by default.} \item{scale.inputs}{If TRUE, inputs are scaled by subtracting the column means and dividing by their respective standard deviations. If \code{lambda} is not \code{NULL}, scaling is applied after Box-Cox transformation.} \item{x}{Deprecated. Included for backwards compatibility.} \item{\dots}{Other arguments passed to \code{\link[nnet]{nnet}} for \code{nnetar}.} } \value{ Returns an object of class "\code{nnetar}". The function \code{summary} is used to obtain and print a summary of the results. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{nnetar}. \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{x}{The original time series.} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} \item{...}{Other arguments} } \description{ Feed-forward neural networks with a single hidden layer and lagged inputs for forecasting univariate time series. } \details{ A feed-forward neural network is fitted with lagged values of \code{y} as inputs and a single hidden layer with \code{size} nodes. The inputs are for lags 1 to \code{p}, and lags \code{m} to \code{mP} where \code{m=frequency(y)}. If \code{xreg} is provided, its columns are also used as inputs. If there are missing values in \code{y} or \code{xreg}, the corresponding rows (and any others which depend on them as lags) are omitted from the fit. A total of \code{repeats} networks are fitted, each with random starting weights. These are then averaged when computing forecasts. The network is trained for one-step forecasting. Multi-step forecasts are computed recursively. For non-seasonal data, the fitted model is denoted as an NNAR(p,k) model, where k is the number of hidden nodes. This is analogous to an AR(p) model but with nonlinear functions. For seasonal data, the fitted model is called an NNAR(p,P,k)[m] model, which is analogous to an ARIMA(p,0,0)(P,0,0)[m] model but with nonlinear functions. } \examples{ fit <- nnetar(lynx) fcast <- forecast(fit) plot(fcast) ## Arguments can be passed to nnet() fit <- nnetar(lynx, decay=0.5, maxit=150) plot(forecast(fit)) lines(lynx) ## Fit model to first 100 years of lynx data fit <- nnetar(window(lynx,end=1920), decay=0.5, maxit=150) plot(forecast(fit,h=14)) lines(lynx) ## Apply fitted model to later data, including all optional arguments fit2 <- nnetar(window(lynx,start=1921), model=fit) } \author{ Rob J Hyndman and Gabriel Caceres } \keyword{ts} forecast/man/autoplot.acf.Rd0000644000176200001440000000553614003673410015530 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{autoplot.acf} \alias{autoplot.acf} \alias{ggAcf} \alias{ggPacf} \alias{ggCcf} \alias{autoplot.mpacf} \alias{ggtaperedacf} \alias{ggtaperedpacf} \title{ggplot (Partial) Autocorrelation and Cross-Correlation Function Estimation and Plotting} \usage{ \method{autoplot}{acf}(object, ci = 0.95, ...) ggAcf( x, lag.max = NULL, type = c("correlation", "covariance", "partial"), plot = TRUE, na.action = na.contiguous, demean = TRUE, ... ) ggPacf( x, lag.max = NULL, plot = TRUE, na.action = na.contiguous, demean = TRUE, ... ) ggCcf( x, y, lag.max = NULL, type = c("correlation", "covariance"), plot = TRUE, na.action = na.contiguous, ... ) \method{autoplot}{mpacf}(object, ...) ggtaperedacf( x, lag.max = NULL, type = c("correlation", "partial"), plot = TRUE, calc.ci = TRUE, level = 95, nsim = 100, ... ) ggtaperedpacf(x, ...) } \arguments{ \item{object}{Object of class \dQuote{\code{acf}}.} \item{ci}{coverage probability for confidence interval. Plotting of the confidence interval is suppressed if ci is zero or negative.} \item{...}{Other plotting parameters to affect the plot.} \item{x}{a univariate or multivariate (not Ccf) numeric time series object or a numeric vector or matrix.} \item{lag.max}{maximum lag at which to calculate the acf.} \item{type}{character string giving the type of acf to be computed. Allowed values are "\code{correlation}" (the default), \dQuote{\code{covariance}} or \dQuote{\code{partial}}.} \item{plot}{logical. If \code{TRUE} (the default) the resulting ACF, PACF or CCF is plotted.} \item{na.action}{function to handle missing values. Default is \code{\link[stats]{na.contiguous}}. Useful alternatives are \code{\link[stats]{na.pass}} and \code{\link{na.interp}}.} \item{demean}{Should covariances be about the sample means?} \item{y}{a univariate numeric time series object or a numeric vector.} \item{calc.ci}{If \code{TRUE}, confidence intervals for the ACF/PACF estimates are calculated.} \item{level}{Percentage level used for the confidence intervals.} \item{nsim}{The number of bootstrap samples used in estimating the confidence intervals.} } \value{ A ggplot object. } \description{ Produces a ggplot object of their equivalent Acf, Pacf, Ccf, taperedacf and taperedpacf functions. } \details{ If \code{autoplot} is given an \code{acf} or \code{mpacf} object, then an appropriate ggplot object will be created. ggtaperedpacf } \examples{ library(ggplot2) ggAcf(wineind) wineind \%>\% Acf(plot=FALSE) \%>\% autoplot \dontrun{ wineind \%>\% taperedacf(plot=FALSE) \%>\% autoplot ggtaperedacf(wineind) ggtaperedpacf(wineind)} ggCcf(mdeaths, fdeaths) } \seealso{ \code{\link[stats]{plot.acf}}, \code{\link{Acf}}, \code{\link[stats]{acf}}, \code{\link{taperedacf}} } \author{ Mitchell O'Hara-Wild } forecast/man/is.constant.Rd0000644000176200001440000000046014003673410015363 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/newarima2.R \name{is.constant} \alias{is.constant} \title{Is an object constant?} \usage{ is.constant(x) } \arguments{ \item{x}{object to be tested} } \description{ Returns true if the object's numerical values do not vary. } forecast/man/naive.Rd0000644000176200001440000000777214003673410014237 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/naive.R \name{rwf} \alias{rwf} \alias{naive} \alias{print.naive} \alias{snaive} \title{Naive and Random Walk Forecasts} \usage{ rwf( y, h = 10, drift = FALSE, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, ..., x = y ) naive( y, h = 10, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, ..., x = y ) snaive( y, h = 2 * frequency(x), level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, ..., x = y ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting} \item{drift}{Logical flag. If TRUE, fits a random walk with drift model.} \item{level}{Confidence levels for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Additional arguments affecting the forecasts produced. If \code{model=NULL}, \code{forecast.ts} passes these to \code{\link{ets}} or \code{\link{stlf}} depending on the frequency of the time series. If \code{model} is not \code{NULL}, the arguments are passed to the relevant modelling function.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{naive} or \code{snaive}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ \code{rwf()} returns forecasts and prediction intervals for a random walk with drift model applied to \code{y}. This is equivalent to an ARIMA(0,1,0) model with an optional drift coefficient. \code{naive()} is simply a wrapper to \code{rwf()} for simplicity. \code{snaive()} returns forecasts and prediction intervals from an ARIMA(0,0,0)(0,1,0)m model where m is the seasonal period. } \details{ The random walk with drift model is \deqn{Y_t=c + Y_{t-1} + Z_t}{Y[t]=c + Y[t-1] + Z[t]} where \eqn{Z_t}{Z[t]} is a normal iid error. Forecasts are given by \deqn{Y_n(h)=ch+Y_n}{Y[n+h]=ch+Y[n]}. If there is no drift (as in \code{naive}), the drift parameter c=0. Forecast standard errors allow for uncertainty in estimating the drift parameter (unlike the corresponding forecasts obtained by fitting an ARIMA model directly). The seasonal naive model is \deqn{Y_t= Y_{t-m} + Z_t}{Y[t]=Y[t-m] + Z[t]} where \eqn{Z_t}{Z[t]} is a normal iid error. } \examples{ gold.fcast <- rwf(gold[1:60], h=50) plot(gold.fcast) plot(naive(gold,h=50),include=200) plot(snaive(wineind)) } \seealso{ \code{\link{Arima}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/monthdays.Rd0000644000176200001440000000134614003673410015132 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{monthdays} \alias{monthdays} \title{Number of days in each season} \usage{ monthdays(x) } \arguments{ \item{x}{time series} } \value{ Time series } \description{ Returns number of days in each month or quarter of the observed time period. } \details{ Useful for month length adjustments } \examples{ par(mfrow=c(2,1)) plot(ldeaths,xlab="Year",ylab="pounds", main="Monthly deaths from lung disease (UK)") ldeaths.adj <- ldeaths/monthdays(ldeaths)*365.25/12 plot(ldeaths.adj,xlab="Year",ylab="pounds", main="Adjusted monthly deaths from lung disease (UK)") } \seealso{ \code{\link[forecast]{bizdays}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/gas.Rd0000644000176200001440000000060414003673410013672 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{gas} \alias{gas} \title{Australian monthly gas production} \format{ Time series data } \source{ Australian Bureau of Statistics. } \usage{ gas } \description{ Australian monthly gas production: 1956--1995. } \examples{ plot(gas) seasonplot(gas) tsdisplay(gas) } \keyword{datasets} forecast/man/tslm.Rd0000644000176200001440000000433114003673410014100 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/lm.R \name{tslm} \alias{tslm} \title{Fit a linear model with time series components} \usage{ tslm(formula, data, subset, lambda = NULL, biasadj = FALSE, ...) } \arguments{ \item{formula}{an object of class "formula" (or one that can be coerced to that class): a symbolic description of the model to be fitted.} \item{data}{an optional data frame, list or environment (or object coercible by as.data.frame to a data frame) containing the variables in the model. If not found in data, the variables are taken from environment(formula), typically the environment from which lm is called.} \item{subset}{an optional subset containing rows of data to keep. For best results, pass a logical vector of rows to keep. Also supports \code{\link[base]{subset}()} functions.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments passed to \code{\link[stats]{lm}()}} } \value{ Returns an object of class "lm". } \description{ \code{tslm} is used to fit linear models to time series including trend and seasonality components. } \details{ \code{tslm} is largely a wrapper for \code{\link[stats]{lm}()} except that it allows variables "trend" and "season" which are created on the fly from the time series characteristics of the data. The variable "trend" is a simple time trend and "season" is a factor indicating the season (e.g., the month or the quarter depending on the frequency of the data). } \examples{ y <- ts(rnorm(120,0,3) + 1:120 + 20*sin(2*pi*(1:120)/12), frequency=12) fit <- tslm(y ~ trend + season) plot(forecast(fit, h=20)) } \seealso{ \code{\link{forecast.lm}}, \code{\link[stats]{lm}}. } \author{ Mitchell O'Hara-Wild and Rob J Hyndman } \keyword{stats} forecast/man/forecast.mlm.Rd0000644000176200001440000000710014003673410015510 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mforecast.R \name{forecast.mlm} \alias{forecast.mlm} \title{Forecast a multiple linear model with possible time series components} \usage{ \method{forecast}{mlm}( object, newdata, h = 10, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, ts = TRUE, ... ) } \arguments{ \item{object}{Object of class "mlm", usually the result of a call to \code{\link[stats]{lm}} or \code{\link{tslm}}.} \item{newdata}{An optional data frame in which to look for variables with which to predict. If omitted, it is assumed that the only variables are trend and season, and \code{h} forecasts are produced.} \item{h}{Number of periods for forecasting. Ignored if \code{newdata} present.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{ts}{If \code{TRUE}, the forecasts will be treated as time series provided the original data is a time series; the \code{newdata} will be interpreted as related to the subsequent time periods. If \code{FALSE}, any time series attributes of the original data will be ignored.} \item{...}{Other arguments passed to \code{\link[forecast]{forecast.lm}()}.} } \value{ An object of class "\code{mforecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.lm}. An object of class \code{"mforecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a multivariate time series} \item{lower}{Lower limits for prediction intervals of each series} \item{upper}{Upper limits for prediction intervals of each series} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The historical data for the response variable.} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values} } \description{ \code{forecast.mlm} is used to predict multiple linear models, especially those involving trend and seasonality components. } \details{ \code{forecast.mlm} is largely a wrapper for \code{\link[forecast]{forecast.lm}()} except that it allows forecasts to be generated on multiple series. Also, the output is reformatted into a \code{mforecast} object. } \examples{ lungDeaths <- cbind(mdeaths, fdeaths) fit <- tslm(lungDeaths ~ trend + season) fcast <- forecast(fit, h=10) carPower <- as.matrix(mtcars[,c("qsec","hp")]) carmpg <- mtcars[,"mpg"] fit <- lm(carPower ~ carmpg) fcast <- forecast(fit, newdata=data.frame(carmpg=30)) } \seealso{ \code{\link{tslm}}, \code{\link{forecast.lm}}, \code{\link[stats]{lm}}. } \author{ Mitchell O'Hara-Wild } forecast/man/bld.mbb.bootstrap.Rd0000644000176200001440000000234514003673410016440 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/bootstrap.R \name{bld.mbb.bootstrap} \alias{bld.mbb.bootstrap} \title{Box-Cox and Loess-based decomposition bootstrap.} \usage{ bld.mbb.bootstrap(x, num, block_size = NULL) } \arguments{ \item{x}{Original time series.} \item{num}{Number of bootstrapped versions to generate.} \item{block_size}{Block size for the moving block bootstrap.} } \value{ A list with bootstrapped versions of the series. The first series in the list is the original series. } \description{ Generates bootstrapped versions of a time series using the Box-Cox and Loess-based decomposition bootstrap. } \details{ The procedure is described in Bergmeir et al. Box-Cox decomposition is applied, together with STL or Loess (for non-seasonal time series), and the remainder is bootstrapped using a moving block bootstrap. } \examples{ bootstrapped_series <- bld.mbb.bootstrap(WWWusage, 100) } \references{ Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging Exponential Smoothing Methods using STL Decomposition and Box-Cox Transformation. International Journal of Forecasting 32, 303-312. } \seealso{ \code{\link{baggedETS}}. } \author{ Christoph Bergmeir, Fotios Petropoulos } \keyword{ts} forecast/man/autoplot.seas.Rd0000644000176200001440000000350714003673410015726 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R, R/mstl.R \name{autoplot.decomposed.ts} \alias{autoplot.decomposed.ts} \alias{autoplot.stl} \alias{autoplot.StructTS} \alias{autoplot.seas} \alias{autoplot.mstl} \title{Plot time series decomposition components using ggplot} \usage{ \method{autoplot}{decomposed.ts}(object, labels = NULL, range.bars = NULL, ...) \method{autoplot}{stl}(object, labels = NULL, range.bars = TRUE, ...) \method{autoplot}{StructTS}(object, labels = NULL, range.bars = TRUE, ...) \method{autoplot}{seas}(object, labels = NULL, range.bars = NULL, ...) \method{autoplot}{mstl}(object, ...) } \arguments{ \item{object}{Object of class \dQuote{\code{seas}}, \dQuote{\code{stl}}, or \dQuote{\code{decomposed.ts}}.} \item{labels}{Labels to replace \dQuote{seasonal}, \dQuote{trend}, and \dQuote{remainder}.} \item{range.bars}{Logical indicating if each plot should have a bar at its right side representing relative size. If \code{NULL}, automatic selection takes place.} \item{...}{Other plotting parameters to affect the plot.} } \value{ Returns an object of class \code{ggplot}. } \description{ Produces a ggplot object of seasonally decomposed time series for objects of class \dQuote{\code{stl}} (created with \code{\link[stats]{stl}}), class \dQuote{\code{seas}} (created with \code{\link[seasonal]{seas}}), or class \dQuote{\code{decomposed.ts}} (created with \code{\link[stats]{decompose}}). } \examples{ library(ggplot2) co2 \%>\% decompose() \%>\% autoplot() nottem \%>\% stl(s.window = "periodic") \%>\% autoplot() \dontrun{ library(seasonal) seas(USAccDeaths) \%>\% autoplot() } } \seealso{ \code{\link[seasonal]{seas}}, \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, \code{\link[stats]{StructTS}}, \code{\link[stats]{plot.stl}}. } \author{ Mitchell O'Hara-Wild } forecast/man/dshw.Rd0000644000176200001440000001004214003673410014062 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dshw.r \name{dshw} \alias{dshw} \title{Double-Seasonal Holt-Winters Forecasting} \usage{ dshw( y, period1 = NULL, period2 = NULL, h = 2 * max(period1, period2), alpha = NULL, beta = NULL, gamma = NULL, omega = NULL, phi = NULL, lambda = NULL, biasadj = FALSE, armethod = TRUE, model = NULL ) } \arguments{ \item{y}{Either an \code{\link{msts}} object with two seasonal periods or a numeric vector.} \item{period1}{Period of the shorter seasonal period. Only used if \code{y} is not an \code{\link{msts}} object.} \item{period2}{Period of the longer seasonal period. Only used if \code{y} is not an \code{\link{msts}} object.} \item{h}{Number of periods for forecasting.} \item{alpha}{Smoothing parameter for the level. If \code{NULL}, the parameter is estimated using least squares.} \item{beta}{Smoothing parameter for the slope. If \code{NULL}, the parameter is estimated using least squares.} \item{gamma}{Smoothing parameter for the first seasonal period. If \code{NULL}, the parameter is estimated using least squares.} \item{omega}{Smoothing parameter for the second seasonal period. If \code{NULL}, the parameter is estimated using least squares.} \item{phi}{Autoregressive parameter. If \code{NULL}, the parameter is estimated using least squares.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{armethod}{If TRUE, the forecasts are adjusted using an AR(1) model for the errors.} \item{model}{If it's specified, an existing model is applied to a new data set.} } \value{ An object of class "\code{forecast}" which is a list that includes the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{x}{The original time series.} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{dshw}. } \description{ Returns forecasts using Taylor's (2003) Double-Seasonal Holt-Winters method. } \details{ Taylor's (2003) double-seasonal Holt-Winters method uses additive trend and multiplicative seasonality, where there are two seasonal components which are multiplied together. For example, with a series of half-hourly data, one would set \code{period1=48} for the daily period and \code{period2=336} for the weekly period. The smoothing parameter notation used here is different from that in Taylor (2003); instead it matches that used in Hyndman et al (2008) and that used for the \code{\link{ets}} function. } \examples{ \dontrun{ fcast <- dshw(taylor) plot(fcast) t <- seq(0,5,by=1/20) x <- exp(sin(2*pi*t) + cos(2*pi*t*4) + rnorm(length(t),0,.1)) fit <- dshw(x,20,5) plot(fit) } } \references{ Taylor, J.W. (2003) Short-term electricity demand forecasting using double seasonal exponential smoothing. \emph{Journal of the Operational Research Society}, \bold{54}, 799-805. Hyndman, R.J., Koehler, A.B., Ord, J.K., and Snyder, R.D. (2008) \emph{Forecasting with exponential smoothing: the state space approach}, Springer-Verlag. \url{http://www.exponentialsmoothing.net}. } \seealso{ \code{\link[stats]{HoltWinters}}, \code{\link{ets}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/Arima.Rd0000644000176200001440000001067214003673410014157 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arima.R \name{Arima} \alias{Arima} \alias{print.ARIMA} \alias{summary.Arima} \alias{as.character.Arima} \title{Fit ARIMA model to univariate time series} \usage{ Arima( y, order = c(0, 0, 0), seasonal = c(0, 0, 0), xreg = NULL, include.mean = TRUE, include.drift = FALSE, include.constant, lambda = model$lambda, biasadj = FALSE, method = c("CSS-ML", "ML", "CSS"), model = NULL, x = y, ... ) } \arguments{ \item{y}{a univariate time series of class \code{ts}.} \item{order}{A specification of the non-seasonal part of the ARIMA model: the three components (p, d, q) are the AR order, the degree of differencing, and the MA order.} \item{seasonal}{A specification of the seasonal part of the ARIMA model, plus the period (which defaults to frequency(y)). This should be a list with components order and period, but a specification of just a numeric vector of length 3 will be turned into a suitable list with the specification as the order.} \item{xreg}{Optionally, a numerical vector or matrix of external regressors, which must have the same number of rows as y. It should not be a data frame.} \item{include.mean}{Should the ARIMA model include a mean term? The default is \code{TRUE} for undifferenced series, \code{FALSE} for differenced ones (where a mean would not affect the fit nor predictions).} \item{include.drift}{Should the ARIMA model include a linear drift term? (i.e., a linear regression with ARIMA errors is fitted.) The default is \code{FALSE}.} \item{include.constant}{If \code{TRUE}, then \code{include.mean} is set to be \code{TRUE} for undifferenced series and \code{include.drift} is set to be \code{TRUE} for differenced series. Note that if there is more than one difference taken, no constant is included regardless of the value of this argument. This is deliberate as otherwise quadratic and higher order polynomial trends would be induced.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{method}{Fitting method: maximum likelihood or minimize conditional sum-of-squares. The default (unless there are missing values) is to use conditional-sum-of-squares to find starting values, then maximum likelihood.} \item{model}{Output from a previous call to \code{Arima}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{x}{Deprecated. Included for backwards compatibility.} \item{...}{Additional arguments to be passed to \code{\link[stats]{arima}}.} } \value{ See the \code{\link[stats]{arima}} function in the stats package. The additional objects returned are \item{x}{The time series data} \item{xreg}{The regressors used in fitting (when relevant).} \item{sigma2}{The bias adjusted MLE of the innovations variance.} } \description{ Largely a wrapper for the \code{\link[stats]{arima}} function in the stats package. The main difference is that this function allows a drift term. It is also possible to take an ARIMA model from a previous call to \code{Arima} and re-apply it to the data \code{y}. } \details{ See the \code{\link[stats]{arima}} function in the stats package. } \examples{ library(ggplot2) WWWusage \%>\% Arima(order=c(3,1,0)) \%>\% forecast(h=20) \%>\% autoplot # Fit model to first few years of AirPassengers data air.model <- Arima(window(AirPassengers,end=1956+11/12),order=c(0,1,1), seasonal=list(order=c(0,1,1),period=12),lambda=0) plot(forecast(air.model,h=48)) lines(AirPassengers) # Apply fitted model to later data air.model2 <- Arima(window(AirPassengers,start=1957),model=air.model) # Forecast accuracy measures on the log scale. # in-sample one-step forecasts. accuracy(air.model) # out-of-sample one-step forecasts. accuracy(air.model2) # out-of-sample multi-step forecasts accuracy(forecast(air.model,h=48,lambda=NULL), log(window(AirPassengers,start=1957))) } \seealso{ \code{\link{auto.arima}}, \code{\link{forecast.Arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/plot.forecast.Rd0000644000176200001440000000704014003673410015704 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast.R, R/ggplot.R, R/spline.R \name{plot.forecast} \alias{plot.forecast} \alias{autoplot.forecast} \alias{autoplot.splineforecast} \alias{autolayer.forecast} \alias{plot.splineforecast} \title{Forecast plot} \usage{ \method{plot}{forecast}( x, include, PI = TRUE, showgap = TRUE, shaded = TRUE, shadebars = (length(x$mean) < 5), shadecols = NULL, col = 1, fcol = 4, pi.col = 1, pi.lty = 2, ylim = NULL, main = NULL, xlab = "", ylab = "", type = "l", flty = 1, flwd = 2, ... ) \method{autoplot}{forecast}( object, include, PI = TRUE, shadecols = c("#596DD5", "#D5DBFF"), fcol = "#0000AA", flwd = 0.5, ... ) \method{autoplot}{splineforecast}(object, PI = TRUE, ...) \method{autolayer}{forecast}(object, series = NULL, PI = TRUE, showgap = TRUE, ...) \method{plot}{splineforecast}(x, fitcol = 2, type = "o", pch = 19, ...) } \arguments{ \item{x}{Forecast object produced by \code{\link{forecast}}.} \item{include}{number of values from time series to include in plot. Default is all values.} \item{PI}{Logical flag indicating whether to plot prediction intervals.} \item{showgap}{If \code{showgap=FALSE}, the gap between the historical observations and the forecasts is removed.} \item{shaded}{Logical flag indicating whether prediction intervals should be shaded (\code{TRUE}) or lines (\code{FALSE})} \item{shadebars}{Logical flag indicating if prediction intervals should be plotted as shaded bars (if \code{TRUE}) or a shaded polygon (if \code{FALSE}). Ignored if \code{shaded=FALSE}. Bars are plotted by default if there are fewer than five forecast horizons.} \item{shadecols}{Colors for shaded prediction intervals. To get default colors used prior to v3.26, set \code{shadecols="oldstyle"}.} \item{col}{Colour for the data line.} \item{fcol}{Colour for the forecast line.} \item{pi.col}{If \code{shaded=FALSE} and \code{PI=TRUE}, the prediction intervals are plotted in this colour.} \item{pi.lty}{If \code{shaded=FALSE} and \code{PI=TRUE}, the prediction intervals are plotted using this line type.} \item{ylim}{Limits on y-axis.} \item{main}{Main title.} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} \item{type}{1-character string giving the type of plot desired. As for \code{\link[graphics]{plot.default}}.} \item{flty}{Line type for the forecast line.} \item{flwd}{Line width for the forecast line.} \item{...}{Other plotting parameters to affect the plot.} \item{object}{Forecast object produced by \code{\link{forecast}}. Used for ggplot graphics (S3 method consistency).} \item{series}{Matches an unidentified forecast layer with a coloured object on the plot.} \item{fitcol}{Line colour for fitted values.} \item{pch}{Plotting character (if \code{type=="p"} or \code{type=="o"}).} } \value{ None. } \description{ Plots historical data with forecasts and prediction intervals. } \details{ \code{autoplot} will produce a ggplot object. plot.splineforecast autoplot.splineforecast } \examples{ library(ggplot2) wine.fit <- hw(wineind,h=48) plot(wine.fit) autoplot(wine.fit) fit <- tslm(wineind ~ fourier(wineind,4)) fcast <- forecast(fit, newdata=data.frame(fourier(wineind,4,20))) autoplot(fcast) fcast <- splinef(airmiles,h=5) plot(fcast) autoplot(fcast) } \references{ Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://otexts.com/fpp2/} } \seealso{ \code{\link[stats]{plot.ts}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/arfima.Rd0000644000176200001440000000636214003673410014366 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arfima.R \name{arfima} \alias{arfima} \title{Fit a fractionally differenced ARFIMA model} \usage{ arfima( y, drange = c(0, 0.5), estim = c("mle", "ls"), model = NULL, lambda = NULL, biasadj = FALSE, x = y, ... ) } \arguments{ \item{y}{a univariate time series (numeric vector).} \item{drange}{Allowable values of d to be considered. Default of \code{c(0,0.5)} ensures a stationary model is returned.} \item{estim}{If \code{estim=="ls"}, then the ARMA parameters are calculated using the Haslett-Raftery algorithm. If \code{estim=="mle"}, then the ARMA parameters are calculated using full MLE via the \code{\link[stats]{arima}} function.} \item{model}{Output from a previous call to \code{arfima}. If model is passed, this same model is fitted to y without re-estimating any parameters.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{x}{Deprecated. Included for backwards compatibility.} \item{\dots}{Other arguments passed to \code{\link{auto.arima}} when selecting p and q.} } \value{ A list object of S3 class \code{"fracdiff"}, which is described in the \code{\link[fracdiff]{fracdiff}} documentation. A few additional objects are added to the list including \code{x} (the original time series), and the \code{residuals} and \code{fitted} values. } \description{ An ARFIMA(p,d,q) model is selected and estimated automatically using the Hyndman-Khandakar (2008) algorithm to select p and q and the Haslett and Raftery (1989) algorithm to estimate the parameters including d. } \details{ This function combines \code{\link[fracdiff]{fracdiff}} and \code{\link{auto.arima}} to automatically select and estimate an ARFIMA model. The fractional differencing parameter is chosen first assuming an ARFIMA(2,d,0) model. Then the data are fractionally differenced using the estimated d and an ARMA model is selected for the resulting time series using \code{\link{auto.arima}}. Finally, the full ARFIMA(p,d,q) model is re-estimated using \code{\link[fracdiff]{fracdiff}}. If \code{estim=="mle"}, the ARMA coefficients are refined using \code{\link[stats]{arima}}. } \examples{ library(fracdiff) x <- fracdiff.sim( 100, ma=-.4, d=.3)$series fit <- arfima(x) tsdisplay(residuals(fit)) } \references{ J. Haslett and A. E. Raftery (1989) Space-time Modelling with Long-memory Dependence: Assessing Ireland's Wind Power Resource (with discussion); \emph{Applied Statistics} \bold{38}, 1-50. Hyndman, R.J. and Khandakar, Y. (2008) "Automatic time series forecasting: The forecast package for R", \emph{Journal of Statistical Software}, \bold{26}(3). } \seealso{ \code{\link[fracdiff]{fracdiff}}, \code{\link{auto.arima}}, \code{\link{forecast.fracdiff}}. } \author{ Rob J Hyndman and Farah Yasmeen } \keyword{ts} forecast/man/forecast-package.Rd0000644000176200001440000000113114003673410016313 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast.R \docType{package} \name{forecast-package} \alias{forecast-package} \title{Forecasting Functions for Time Series and Linear Models} \description{ Methods and tools for displaying and analysing univariate time series forecasts including exponential smoothing via state space models and automatic ARIMA modelling. } \details{ \tabular{ll}{ Package: \tab forecast\cr Type: \tab Package\cr License: \tab GPL3\cr LazyLoad: \tab yes\cr } } \author{ Rob J Hyndman Maintainer: Rob.Hyndman@monash.edu } \keyword{package} forecast/man/meanf.Rd0000644000176200001440000000552114003673410014211 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{meanf} \alias{meanf} \title{Mean Forecast} \usage{ meanf( y, h = 10, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, bootstrap = FALSE, npaths = 5000, x = y ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting} \item{level}{Confidence levels for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{bootstrap}{If TRUE, use a bootstrap method to compute prediction intervals. Otherwise, assume a normal distribution.} \item{npaths}{Number of bootstrapped sample paths to use if \code{bootstrap==TRUE}.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{meanf}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and prediction intervals for an iid model applied to y. } \details{ The iid model is \deqn{Y_t=\mu + Z_t}{Y[t]=mu + Z[t]} where \eqn{Z_t}{Z[t]} is a normal iid error. Forecasts are given by \deqn{Y_n(h)=\mu}{Y[n+h]=mu} where \eqn{\mu}{mu} is estimated by the sample mean. } \examples{ nile.fcast <- meanf(Nile, h=10) plot(nile.fcast) } \seealso{ \code{\link{rwf}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/nsdiffs.Rd0000644000176200001440000000527514003673410014565 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/unitRoot.R \name{nsdiffs} \alias{nsdiffs} \title{Number of differences required for a seasonally stationary series} \usage{ nsdiffs( x, alpha = 0.05, m = frequency(x), test = c("seas", "ocsb", "hegy", "ch"), max.D = 1, ... ) } \arguments{ \item{x}{A univariate time series} \item{alpha}{Level of the test, possible values range from 0.01 to 0.1.} \item{m}{Deprecated. Length of seasonal period} \item{test}{Type of unit root test to use} \item{max.D}{Maximum number of seasonal differences allowed} \item{...}{Additional arguments to be passed on to the unit root test} } \value{ An integer indicating the number of differences required for stationarity. } \description{ Functions to estimate the number of differences required to make a given time series stationary. \code{nsdiffs} estimates the number of seasonal differences necessary. } \details{ \code{nsdiffs} uses seasonal unit root tests to determine the number of seasonal differences required for time series \code{x} to be made stationary (possibly with some lag-one differencing as well). Several different tests are available: \itemize{ \item If \code{test="seas"} (default), a measure of seasonal strength is used, where differencing is selected if the seasonal strength (Wang, Smith & Hyndman, 2006) exceeds 0.64 (based on minimizing MASE when forecasting using auto.arima on M3 and M4 data). \item If \code{test="ch"}, the Canova-Hansen (1995) test is used (with null hypothesis of deterministic seasonality) \item If \code{test="hegy"}, the Hylleberg, Engle, Granger & Yoo (1990) test is used. \item If \code{test="ocsb"}, the Osborn-Chui-Smith-Birchenhall (1988) test is used (with null hypothesis that a seasonal unit root exists). } } \examples{ nsdiffs(AirPassengers) } \references{ Wang, X, Smith, KA, Hyndman, RJ (2006) "Characteristic-based clustering for time series data", \emph{Data Mining and Knowledge Discovery}, \bold{13}(3), 335-364. Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the order of integration for consumption", \emph{Oxford Bulletin of Economics and Statistics} \bold{50}(4):361-377. Canova F and Hansen BE (1995) "Are Seasonal Patterns Constant over Time? A Test for Seasonal Stability", \emph{Journal of Business and Economic Statistics} \bold{13}(3):237-252. Hylleberg S, Engle R, Granger C and Yoo B (1990) "Seasonal integration and cointegration.", \emph{Journal of Econometrics} \bold{44}(1), pp. 215-238. } \seealso{ \code{\link{auto.arima}}, \code{\link{ndiffs}}, \code{\link{ocsb.test}}, \code{\link[uroot]{hegy.test}}, and \code{\link[uroot]{ch.test}} } \author{ Rob J Hyndman, Slava Razbash and Mitchell O'Hara-Wild } forecast/man/BoxCox.Rd0000644000176200001440000000364414003673410014331 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{BoxCox} \alias{BoxCox} \alias{InvBoxCox} \title{Box Cox Transformation} \usage{ BoxCox(x, lambda) InvBoxCox(x, lambda, biasadj = FALSE, fvar = NULL) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}.} \item{lambda}{transformation parameter. If \code{lambda = "auto"}, then the transformation parameter lambda is chosen using BoxCox.lambda (with a lower bound of -0.9)} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{fvar}{Optional parameter required if biasadj=TRUE. Can either be the forecast variance, or a list containing the interval \code{level}, and the corresponding \code{upper} and \code{lower} intervals.} } \value{ a numeric vector of the same length as x. } \description{ BoxCox() returns a transformation of the input variable using a Box-Cox transformation. InvBoxCox() reverses the transformation. } \details{ The Box-Cox transformation (as given by Bickel & Doksum 1981) is given by \deqn{f_\lambda(x) =sign(x)(|x|^\lambda - 1)/\lambda}{f(x;lambda)=sign(x)(|x|^lambda - 1)/lambda} if \eqn{\lambda\ne0}{lambda is not equal to 0}. For \eqn{\lambda=0}{lambda=0}, \deqn{f_0(x)=\log(x)}{f(x;0)=log(x)}. } \examples{ lambda <- BoxCox.lambda(lynx) lynx.fit <- ar(BoxCox(lynx,lambda)) plot(forecast(lynx.fit,h=20,lambda=lambda)) } \references{ Box, G. E. P. and Cox, D. R. (1964) An analysis of transformations. \emph{JRSS B} \bold{26} 211--246. Bickel, P. J. and Doksum K. A. (1981) An Analysis of Transformations Revisited. \emph{JASA} \bold{76} 296-311. } \seealso{ \code{\link{BoxCox.lambda}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/tbats.Rd0000644000176200001440000000740514003673410014243 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tbats.R \name{tbats} \alias{tbats} \alias{as.character.tbats} \alias{print.tbats} \title{TBATS model (Exponential smoothing state space model with Box-Cox transformation, ARMA errors, Trend and Seasonal components)} \usage{ tbats( y, use.box.cox = NULL, use.trend = NULL, use.damped.trend = NULL, seasonal.periods = NULL, use.arma.errors = TRUE, use.parallel = length(y) > 1000, num.cores = 2, bc.lower = 0, bc.upper = 1, biasadj = FALSE, model = NULL, ... ) } \arguments{ \item{y}{The time series to be forecast. Can be \code{numeric}, \code{msts} or \code{ts}. Only univariate time series are supported.} \item{use.box.cox}{\code{TRUE/FALSE} indicates whether to use the Box-Cox transformation or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{use.trend}{\code{TRUE/FALSE} indicates whether to include a trend or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{use.damped.trend}{\code{TRUE/FALSE} indicates whether to include a damping parameter in the trend or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{seasonal.periods}{If \code{y} is \code{numeric} then seasonal periods can be specified with this parameter.} \item{use.arma.errors}{\code{TRUE/FALSE} indicates whether to include ARMA errors or not. If \code{TRUE} the best fit is selected by AIC. If \code{FALSE} then the selection algorithm does not consider ARMA errors.} \item{use.parallel}{\code{TRUE/FALSE} indicates whether or not to use parallel processing.} \item{num.cores}{The number of parallel processes to be used if using parallel processing. If \code{NULL} then the number of logical cores is detected and all available cores are used.} \item{bc.lower}{The lower limit (inclusive) for the Box-Cox transformation.} \item{bc.upper}{The upper limit (inclusive) for the Box-Cox transformation.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If TRUE, point forecasts and fitted values are mean forecast. Otherwise, these points can be considered the median of the forecast densities.} \item{model}{Output from a previous call to \code{tbats}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{...}{Additional arguments to be passed to \code{auto.arima} when choose an ARMA(p, q) model for the errors. (Note that xreg will be ignored, as will any arguments concerning seasonality and differencing, but arguments controlling the values of p and q will be used.)} } \value{ An object with class \code{c("tbats", "bats")}. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{bats} and associated functions. The fitted model is designated TBATS(omega, p,q, phi, ,...,) where omega is the Box-Cox parameter and phi is the damping parameter; the error is modelled as an ARMA(p,q) process and m1,...,mJ list the seasonal periods used in the model and k1,...,kJ are the corresponding number of Fourier terms used for each seasonality. } \description{ Fits a TBATS model applied to \code{y}, as described in De Livera, Hyndman & Snyder (2011). Parallel processing is used by default to speed up the computations. } \examples{ \dontrun{ fit <- tbats(USAccDeaths) plot(forecast(fit)) taylor.fit <- tbats(taylor) plot(forecast(taylor.fit))} } \references{ De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), Forecasting time series with complex seasonal patterns using exponential smoothing, \emph{Journal of the American Statistical Association}, \bold{106}(496), 1513-1527. } \seealso{ \code{\link{tbats.components}}. } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/sindexf.Rd0000644000176200001440000000164614003673410014567 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{sindexf} \alias{sindexf} \title{Forecast seasonal index} \usage{ sindexf(object, h) } \arguments{ \item{object}{Output from \code{\link[stats]{decompose}} or \link[stats]{stl}.} \item{h}{Number of periods ahead to forecast} } \value{ Time series } \description{ Returns vector containing the seasonal index for \code{h} future periods. If the seasonal index is non-periodic, it uses the last values of the index. } \examples{ uk.stl <- stl(UKDriverDeaths,"periodic") uk.sa <- seasadj(uk.stl) uk.fcast <- holt(uk.sa,36) seasf <- sindexf(uk.stl,36) uk.fcast$mean <- uk.fcast$mean + seasf uk.fcast$lower <- uk.fcast$lower + cbind(seasf,seasf) uk.fcast$upper <- uk.fcast$upper + cbind(seasf,seasf) uk.fcast$x <- UKDriverDeaths plot(uk.fcast,main="Forecasts from Holt's method with seasonal adjustment") } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.stl.Rd0000644000176200001440000001636514053102646015545 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mstl.R \name{forecast.stl} \alias{forecast.stl} \alias{stlm} \alias{forecast.stlm} \alias{stlf} \title{Forecasting using stl objects} \usage{ \method{forecast}{stl}( object, method = c("ets", "arima", "naive", "rwdrift"), etsmodel = "ZZN", forecastfunction = NULL, h = frequency(object$time.series) * 2, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, xreg = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ... ) stlm( y, s.window = 7 + 4 * seq(6), robust = FALSE, method = c("ets", "arima"), modelfunction = NULL, model = NULL, etsmodel = "ZZN", lambda = NULL, biasadj = FALSE, xreg = NULL, allow.multiplicative.trend = FALSE, x = y, ... ) \method{forecast}{stlm}( object, h = 2 * object$m, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ... ) stlf( y, h = frequency(x) * 2, s.window = 7 + 4 * seq(6), t.window = NULL, robust = FALSE, lambda = NULL, biasadj = FALSE, x = y, ... ) } \arguments{ \item{object}{An object of class \code{stl} or \code{stlm}. Usually the result of a call to \code{\link[stats]{stl}} or \code{stlm}.} \item{method}{Method to use for forecasting the seasonally adjusted series.} \item{etsmodel}{The ets model specification passed to \code{\link[forecast]{ets}}. By default it allows any non-seasonal model. If \code{method!="ets"}, this argument is ignored.} \item{forecastfunction}{An alternative way of specifying the function for forecasting the seasonally adjusted series. If \code{forecastfunction} is not \code{NULL}, then \code{method} is ignored. Otherwise \code{method} is used to specify the forecasting method to be used.} \item{h}{Number of periods for forecasting.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{xreg}{Historical regressors to be used in \code{\link[forecast]{auto.arima}()} when \code{method=="arima"}.} \item{newxreg}{Future regressors to be used in \code{\link[forecast]{forecast.Arima}()}.} \item{allow.multiplicative.trend}{If TRUE, then ETS models with multiplicative trends are allowed. Otherwise, only additive or no trend ETS models are permitted.} \item{...}{Other arguments passed to \code{forecast.stl}, \code{modelfunction} or \code{forecastfunction}.} \item{y}{A univariate numeric time series of class \code{ts}} \item{s.window}{Either the character string ``periodic'' or the span (in lags) of the loess window for seasonal extraction.} \item{robust}{If \code{TRUE}, robust fitting will used in the loess procedure within \code{\link[stats]{stl}}.} \item{modelfunction}{An alternative way of specifying the function for modelling the seasonally adjusted series. If \code{modelfunction} is not \code{NULL}, then \code{method} is ignored. Otherwise \code{method} is used to specify the time series model to be used.} \item{model}{Output from a previous call to \code{stlm}. If a \code{stlm} model is passed, this same model is fitted to y without re-estimating any parameters.} \item{x}{Deprecated. Included for backwards compatibility.} \item{t.window}{A number to control the smoothness of the trend. See \code{\link[stats]{stl}} for details.} } \value{ \code{stlm} returns an object of class \code{stlm}. The other functions return objects of class \code{forecast}. There are many methods for working with \code{\link{forecast}} objects including \code{summary} to obtain and print a summary of the results, while \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features. } \description{ Forecasts of STL objects are obtained by applying a non-seasonal forecasting method to the seasonally adjusted data and re-seasonalizing using the last year of the seasonal component. } \details{ \code{stlm} takes a time series \code{y}, applies an STL decomposition, and models the seasonally adjusted data using the model passed as \code{modelfunction} or specified using \code{method}. It returns an object that includes the original STL decomposition and a time series model fitted to the seasonally adjusted data. This object can be passed to the \code{forecast.stlm} for forecasting. \code{forecast.stlm} forecasts the seasonally adjusted data, then re-seasonalizes the results by adding back the last year of the estimated seasonal component. \code{stlf} combines \code{stlm} and \code{forecast.stlm}. It takes a \code{ts} argument, applies an STL decomposition, models the seasonally adjusted data, reseasonalizes, and returns the forecasts. However, it allows more general forecasting methods to be specified via \code{forecastfunction}. \code{forecast.stl} is similar to \code{stlf} except that it takes the STL decomposition as the first argument, instead of the time series. Note that the prediction intervals ignore the uncertainty associated with the seasonal component. They are computed using the prediction intervals from the seasonally adjusted series, which are then reseasonalized using the last year of the seasonal component. The uncertainty in the seasonal component is ignored. The time series model for the seasonally adjusted data can be specified in \code{stlm} using either \code{method} or \code{modelfunction}. The \code{method} argument provides a shorthand way of specifying \code{modelfunction} for a few special cases. More generally, \code{modelfunction} can be any function with first argument a \code{ts} object, that returns an object that can be passed to \code{\link{forecast}}. For example, \code{forecastfunction=ar} uses the \code{\link{ar}} function for modelling the seasonally adjusted series. The forecasting method for the seasonally adjusted data can be specified in \code{stlf} and \code{forecast.stl} using either \code{method} or \code{forecastfunction}. The \code{method} argument provides a shorthand way of specifying \code{forecastfunction} for a few special cases. More generally, \code{forecastfunction} can be any function with first argument a \code{ts} object, and other \code{h} and \code{level}, which returns an object of class \code{\link{forecast}}. For example, \code{forecastfunction=thetaf} uses the \code{\link{thetaf}} function for forecasting the seasonally adjusted series. } \examples{ tsmod <- stlm(USAccDeaths, modelfunction = ar) plot(forecast(tsmod, h = 36)) decomp <- stl(USAccDeaths, s.window = "periodic") plot(forecast(decomp)) plot(stlf(AirPassengers, lambda = 0)) } \seealso{ \code{\link[stats]{stl}}, \code{\link{forecast.ets}}, \code{\link{forecast.Arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/checkresiduals.Rd0000644000176200001440000000355614003673410016122 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/checkresiduals.R \name{checkresiduals} \alias{checkresiduals} \title{Check that residuals from a time series model look like white noise} \usage{ checkresiduals(object, lag, df = NULL, test, plot = TRUE, ...) } \arguments{ \item{object}{Either a time series model, a forecast object, or a time series (assumed to be residuals).} \item{lag}{Number of lags to use in the Ljung-Box or Breusch-Godfrey test. If missing, it is set to \code{min(10,n/5)} for non-seasonal data, and \code{min(2m, n/5)} for seasonal data, where \code{n} is the length of the series, and \code{m} is the seasonal period of the data. It is further constrained to be at least \code{df+3} where \code{df} is the degrees of freedom of the model. This ensures there are at least 3 degrees of freedom used in the chi-squared test.} \item{df}{Number of degrees of freedom for fitted model, required for the Ljung-Box or Breusch-Godfrey test. Ignored if the degrees of freedom can be extracted from \code{object}.} \item{test}{Test to use for serial correlation. By default, if \code{object} is of class \code{lm}, then \code{test="BG"}. Otherwise, \code{test="LB"}. Setting \code{test=FALSE} will prevent the test results being printed.} \item{plot}{Logical. If \code{TRUE}, will produce the plot.} \item{...}{Other arguments are passed to \code{\link{ggtsdisplay}}.} } \value{ None } \description{ If \code{plot=TRUE}, produces a time plot of the residuals, the corresponding ACF, and a histogram. If the degrees of freedom for the model can be determined and \code{test} is not \code{FALSE}, the output from either a Ljung-Box test or Breusch-Godfrey test is printed. } \examples{ fit <- ets(WWWusage) checkresiduals(fit) } \seealso{ \code{\link{ggtsdisplay}}, \code{\link[stats]{Box.test}}, \code{\link[lmtest]{bgtest}} } \author{ Rob J Hyndman } forecast/man/thetaf.Rd0000644000176200001440000000546414003673410014404 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/theta.R \name{thetaf} \alias{thetaf} \title{Theta method forecast} \usage{ thetaf( y, h = ifelse(frequency(y) > 1, 2 * frequency(y), 10), level = c(80, 95), fan = FALSE, x = y ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting} \item{level}{Confidence levels for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{rwf}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and prediction intervals for a theta method forecast. } \details{ The theta method of Assimakopoulos and Nikolopoulos (2000) is equivalent to simple exponential smoothing with drift. This is demonstrated in Hyndman and Billah (2003). The series is tested for seasonality using the test outlined in A&N. If deemed seasonal, the series is seasonally adjusted using a classical multiplicative decomposition before applying the theta method. The resulting forecasts are then reseasonalized. Prediction intervals are computed using the underlying state space model. More general theta methods are available in the \code{\link[forecTheta]{forecTheta}} package. } \examples{ nile.fcast <- thetaf(Nile) plot(nile.fcast) } \references{ Assimakopoulos, V. and Nikolopoulos, K. (2000). The theta model: a decomposition approach to forecasting. \emph{International Journal of Forecasting} \bold{16}, 521-530. Hyndman, R.J., and Billah, B. (2003) Unmasking the Theta method. \emph{International J. Forecasting}, \bold{19}, 287-290. } \seealso{ \code{\link[stats]{arima}}, \code{\link{meanf}}, \code{\link{rwf}}, \code{\link{ses}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/fitted.Arima.Rd0000644000176200001440000000325514003673410015434 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arfima.R, R/arima.R, R/bats.R, R/ets.R, % R/modelAR.R, R/nnetar.R, R/tbats.R \name{fitted.ARFIMA} \alias{fitted.ARFIMA} \alias{fitted.Arima} \alias{fitted.forecast_ARIMA} \alias{fitted.ar} \alias{fitted.bats} \alias{fitted.ets} \alias{fitted.modelAR} \alias{fitted.nnetar} \alias{fitted.tbats} \title{h-step in-sample forecasts for time series models.} \usage{ \method{fitted}{ARFIMA}(object, h = 1, ...) \method{fitted}{Arima}(object, h = 1, ...) \method{fitted}{ar}(object, ...) \method{fitted}{bats}(object, h = 1, ...) \method{fitted}{ets}(object, h = 1, ...) \method{fitted}{modelAR}(object, h = 1, ...) \method{fitted}{nnetar}(object, h = 1, ...) \method{fitted}{tbats}(object, h = 1, ...) } \arguments{ \item{object}{An object of class "\code{Arima}", "\code{bats}", "\code{tbats}", "\code{ets}" or "\code{nnetar}".} \item{h}{The number of steps to forecast ahead.} \item{...}{Other arguments.} } \value{ A time series of the h-step forecasts. } \description{ Returns h-step forecasts for the data used in fitting the model. } \examples{ fit <- ets(WWWusage) plot(WWWusage) lines(fitted(fit), col='red') lines(fitted(fit, h=2), col='green') lines(fitted(fit, h=3), col='blue') legend("topleft", legend=paste("h =",1:3), col=2:4, lty=1) } \seealso{ \code{\link{forecast.Arima}}, \code{\link{forecast.bats}}, \code{\link{forecast.tbats}}, \code{\link{forecast.ets}}, \code{\link{forecast.nnetar}}, \code{\link{residuals.Arima}}, \code{\link{residuals.bats}}, \code{\link{residuals.tbats}}, \code{\link{residuals.ets}}, \code{\link{residuals.nnetar}}. } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/auto.arima.Rd0000644000176200001440000001462214055364777015211 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/newarima2.R \name{auto.arima} \alias{auto.arima} \title{Fit best ARIMA model to univariate time series} \usage{ auto.arima( y, d = NA, D = NA, max.p = 5, max.q = 5, max.P = 2, max.Q = 2, max.order = 5, max.d = 2, max.D = 1, start.p = 2, start.q = 2, start.P = 1, start.Q = 1, stationary = FALSE, seasonal = TRUE, ic = c("aicc", "aic", "bic"), stepwise = TRUE, nmodels = 94, trace = FALSE, approximation = (length(x) > 150 | frequency(x) > 12), method = NULL, truncate = NULL, xreg = NULL, test = c("kpss", "adf", "pp"), test.args = list(), seasonal.test = c("seas", "ocsb", "hegy", "ch"), seasonal.test.args = list(), allowdrift = TRUE, allowmean = TRUE, lambda = NULL, biasadj = FALSE, parallel = FALSE, num.cores = 2, x = y, ... ) } \arguments{ \item{y}{a univariate time series} \item{d}{Order of first-differencing. If missing, will choose a value based on \code{test}.} \item{D}{Order of seasonal-differencing. If missing, will choose a value based on \code{season.test}.} \item{max.p}{Maximum value of p} \item{max.q}{Maximum value of q} \item{max.P}{Maximum value of P} \item{max.Q}{Maximum value of Q} \item{max.order}{Maximum value of p+q+P+Q if model selection is not stepwise.} \item{max.d}{Maximum number of non-seasonal differences} \item{max.D}{Maximum number of seasonal differences} \item{start.p}{Starting value of p in stepwise procedure.} \item{start.q}{Starting value of q in stepwise procedure.} \item{start.P}{Starting value of P in stepwise procedure.} \item{start.Q}{Starting value of Q in stepwise procedure.} \item{stationary}{If \code{TRUE}, restricts search to stationary models.} \item{seasonal}{If \code{FALSE}, restricts search to non-seasonal models.} \item{ic}{Information criterion to be used in model selection.} \item{stepwise}{If \code{TRUE}, will do stepwise selection (faster). Otherwise, it searches over all models. Non-stepwise selection can be very slow, especially for seasonal models.} \item{nmodels}{Maximum number of models considered in the stepwise search.} \item{trace}{If \code{TRUE}, the list of ARIMA models considered will be reported.} \item{approximation}{If \code{TRUE}, estimation is via conditional sums of squares and the information criteria used for model selection are approximated. The final model is still computed using maximum likelihood estimation. Approximation should be used for long time series or a high seasonal period to avoid excessive computation times.} \item{method}{fitting method: maximum likelihood or minimize conditional sum-of-squares. The default (unless there are missing values) is to use conditional-sum-of-squares to find starting values, then maximum likelihood. Can be abbreviated.} \item{truncate}{An integer value indicating how many observations to use in model selection. The last \code{truncate} values of the series are used to select a model when \code{truncate} is not \code{NULL} and \code{approximation=TRUE}. All observations are used if either \code{truncate=NULL} or \code{approximation=FALSE}.} \item{xreg}{Optionally, a numerical vector or matrix of external regressors, which must have the same number of rows as \code{y}. (It should not be a data frame.)} \item{test}{Type of unit root test to use. See \code{\link{ndiffs}} for details.} \item{test.args}{Additional arguments to be passed to the unit root test.} \item{seasonal.test}{This determines which method is used to select the number of seasonal differences. The default method is to use a measure of seasonal strength computed from an STL decomposition. Other possibilities involve seasonal unit root tests.} \item{seasonal.test.args}{Additional arguments to be passed to the seasonal unit root test. See \code{\link{nsdiffs}} for details.} \item{allowdrift}{If \code{TRUE}, models with drift terms are considered.} \item{allowmean}{If \code{TRUE}, models with a non-zero mean are considered.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{parallel}{If \code{TRUE} and \code{stepwise = FALSE}, then the specification search is done in parallel. This can give a significant speedup on multicore machines.} \item{num.cores}{Allows the user to specify the amount of parallel processes to be used if \code{parallel = TRUE} and \code{stepwise = FALSE}. If \code{NULL}, then the number of logical cores is automatically detected and all available cores are used.} \item{x}{Deprecated. Included for backwards compatibility.} \item{...}{Additional arguments to be passed to \code{\link[stats]{arima}}.} } \value{ Same as for \code{\link{Arima}} } \description{ Returns best ARIMA model according to either AIC, AICc or BIC value. The function conducts a search over possible model within the order constraints provided. } \details{ The default arguments are designed for rapid estimation of models for many time series. If you are analysing just one time series, and can afford to take some more time, it is recommended that you set \code{stepwise=FALSE} and \code{approximation=FALSE}. Non-stepwise selection can be slow, especially for seasonal data. The stepwise algorithm outlined in Hyndman & Khandakar (2008) is used except that the default method for selecting seasonal differences is now based on an estimate of seasonal strength (Wang, Smith & Hyndman, 2006) rather than the Canova-Hansen test. There are also some other minor variations to the algorithm described in Hyndman and Khandakar (2008). } \examples{ fit <- auto.arima(WWWusage) plot(forecast(fit,h=20)) } \references{ Hyndman, RJ and Khandakar, Y (2008) "Automatic time series forecasting: The forecast package for R", \emph{Journal of Statistical Software}, \bold{26}(3). Wang, X, Smith, KA, Hyndman, RJ (2006) "Characteristic-based clustering for time series data", \emph{Data Mining and Knowledge Discovery}, \bold{13}(3), 335-364. } \seealso{ \code{\link{Arima}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/geom_forecast.Rd0000644000176200001440000001055214003673410015740 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \docType{data} \name{StatForecast} \alias{StatForecast} \alias{GeomForecast} \alias{geom_forecast} \title{Forecast plot} \format{ An object of class \code{StatForecast} (inherits from \code{Stat}, \code{ggproto}, \code{gg}) of length 3. An object of class \code{GeomForecast} (inherits from \code{Geom}, \code{ggproto}, \code{gg}) of length 7. } \usage{ StatForecast GeomForecast geom_forecast( mapping = NULL, data = NULL, stat = "forecast", position = "identity", na.rm = FALSE, show.legend = NA, inherit.aes = TRUE, PI = TRUE, showgap = TRUE, series = NULL, ... ) } \arguments{ \item{mapping}{Set of aesthetic mappings created by \code{\link{aes}} or \code{\link{aes_}}. If specified and \code{inherit.aes = TRUE} (the default), it is combined with the default mapping at the top level of the plot. You must supply \code{mapping} if there is no plot mapping.} \item{data}{The data to be displayed in this layer. There are three options: If \code{NULL}, the default, the data is inherited from the plot data as specified in the call to \code{\link{ggplot}}. A \code{data.frame}, or other object, will override the plot data. All objects will be fortified to produce a data frame. See \code{\link{fortify}} for which variables will be created. A \code{function} will be called with a single argument, the plot data. The return value must be a \code{data.frame}, and will be used as the layer data.} \item{stat}{The stat object to use calculate the data.} \item{position}{Position adjustment, either as a string, or the result of a call to a position adjustment function.} \item{na.rm}{If \code{FALSE} (the default), removes missing values with a warning. If \code{TRUE} silently removes missing values.} \item{show.legend}{logical. Should this layer be included in the legends? \code{NA}, the default, includes if any aesthetics are mapped. \code{FALSE} never includes, and \code{TRUE} always includes.} \item{inherit.aes}{If \code{FALSE}, overrides the default aesthetics, rather than combining with them. This is most useful for helper functions that define both data and aesthetics and shouldn't inherit behaviour from the default plot specification, e.g. \code{\link{borders}}.} \item{PI}{If \code{FALSE}, confidence intervals will not be plotted, giving only the forecast line.} \item{showgap}{If \code{showgap=FALSE}, the gap between the historical observations and the forecasts is removed.} \item{series}{Matches an unidentified forecast layer with a coloured object on the plot.} \item{...}{Additional arguments for \code{\link{forecast.ts}}, other arguments are passed on to \code{\link{layer}}. These are often aesthetics, used to set an aesthetic to a fixed value, like \code{color = "red"} or \code{alpha = .5}. They may also be parameters to the paired geom/stat.} } \value{ A layer for a ggplot graph. } \description{ Generates forecasts from \code{forecast.ts} and adds them to the plot. Forecasts can be modified via sending forecast specific arguments above. } \details{ Multivariate forecasting is supported by having each time series on a different group. You can also pass \code{geom_forecast} a \code{forecast} object to add it to the plot. The aesthetics required for the forecasting to work includes forecast observations on the y axis, and the \code{time} of the observations on the x axis. Refer to the examples below. To automatically set up aesthetics, use \code{autoplot}. } \examples{ \dontrun{ library(ggplot2) autoplot(USAccDeaths) + geom_forecast() lungDeaths <- cbind(mdeaths, fdeaths) autoplot(lungDeaths) + geom_forecast() # Using fortify.ts p <- ggplot(aes(x=x, y=y), data=USAccDeaths) p <- p + geom_line() p + geom_forecast() # Without fortify.ts data <- data.frame(USAccDeaths=as.numeric(USAccDeaths), time=as.numeric(time(USAccDeaths))) p <- ggplot(aes(x=time, y=USAccDeaths), data=data) p <- p + geom_line() p + geom_forecast() p + geom_forecast(h=60) p <- ggplot(aes(x=time, y=USAccDeaths), data=data) p + geom_forecast(level=c(70,98)) p + geom_forecast(level=c(70,98),colour="lightblue") #Add forecasts to multivariate series with colour groups lungDeaths <- cbind(mdeaths, fdeaths) autoplot(lungDeaths) + geom_forecast(forecast(mdeaths), series="mdeaths") } } \seealso{ \code{\link{forecast}}, \code{\link[ggplot2]{ggproto}} } \author{ Mitchell O'Hara-Wild } \keyword{datasets} forecast/man/residuals.forecast.Rd0000644000176200001440000000610714003673410016724 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/residuals.R \name{residuals.forecast} \alias{residuals.forecast} \alias{residuals.ar} \alias{residuals.Arima} \alias{residuals.forecast_ARIMA} \alias{residuals.bats} \alias{residuals.tbats} \alias{residuals.ets} \alias{residuals.ARFIMA} \alias{residuals.nnetar} \alias{residuals.stlm} \alias{residuals.tslm} \title{Residuals for various time series models} \usage{ \method{residuals}{forecast}(object, type = c("innovation", "response"), ...) \method{residuals}{ar}(object, type = c("innovation", "response"), ...) \method{residuals}{Arima}(object, type = c("innovation", "response", "regression"), h = 1, ...) \method{residuals}{bats}(object, type = c("innovation", "response"), h = 1, ...) \method{residuals}{tbats}(object, type = c("innovation", "response"), h = 1, ...) \method{residuals}{ets}(object, type = c("innovation", "response"), h = 1, ...) \method{residuals}{ARFIMA}(object, type = c("innovation", "response"), ...) \method{residuals}{nnetar}(object, type = c("innovation", "response"), h = 1, ...) \method{residuals}{stlm}(object, type = c("innovation", "response"), ...) \method{residuals}{tslm}(object, type = c("innovation", "response", "deviance"), ...) } \arguments{ \item{object}{An object containing a time series model of class \code{ar}, \code{Arima}, \code{bats}, \code{ets}, \code{arfima}, \code{nnetar} or \code{stlm}. If \code{object} is of class \code{forecast}, then the function will return \code{object$residuals} if it exists, otherwise it returns the differences between the observations and their fitted values.} \item{type}{Type of residual.} \item{...}{Other arguments not used.} \item{h}{If \code{type='response'}, then the fitted values are computed for \code{h}-step forecasts.} } \value{ A \code{ts} object. } \description{ Returns time series of residuals from a fitted model. } \details{ Innovation residuals correspond to the white noise process that drives the evolution of the time series model. Response residuals are the difference between the observations and the fitted values (equivalent to \code{h}-step forecasts). For functions with no \code{h} argument, \code{h=1}. For homoscedastic models, the innovation residuals and the response residuals for \code{h=1} are identical. Regression residuals are available for regression models with ARIMA errors, and are equal to the original data minus the effect of the regression variables. If there are no regression variables, the errors will be identical to the original series (possibly adjusted to have zero mean). \code{arima.errors} is a deprecated function which is identical to \code{residuals.Arima(object, type="regression")}. For \code{nnetar} objects, when \code{type="innovations"} and \code{lambda} is used, a matrix of time-series consisting of the residuals from each of the fitted neural networks is returned. } \examples{ fit <- Arima(lynx,order=c(4,0,0), lambda=0.5) plot(residuals(fit)) plot(residuals(fit, type='response')) } \seealso{ \code{\link{fitted.Arima}}, \code{\link{checkresiduals}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.Arima.Rd0000644000176200001440000001064414003673410015763 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arfima.R, R/arima.R \name{forecast.fracdiff} \alias{forecast.fracdiff} \alias{forecast.Arima} \alias{forecast.forecast_ARIMA} \alias{forecast.ar} \title{Forecasting using ARIMA or ARFIMA models} \usage{ \method{forecast}{fracdiff}( object, h = 10, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, ... ) \method{forecast}{Arima}( object, h = ifelse(object$arma[5] > 1, 2 * object$arma[5], 10), level = c(80, 95), fan = FALSE, xreg = NULL, lambda = object$lambda, bootstrap = FALSE, npaths = 5000, biasadj = NULL, ... ) \method{forecast}{ar}( object, h = 10, level = c(80, 95), fan = FALSE, lambda = NULL, bootstrap = FALSE, npaths = 5000, biasadj = FALSE, ... ) } \arguments{ \item{object}{An object of class "\code{Arima}", "\code{ar}" or "\code{fracdiff}". Usually the result of a call to \code{\link[stats]{arima}}, \code{\link{auto.arima}}, \code{\link[stats]{ar}}, \code{\link{arfima}} or \code{\link[fracdiff]{fracdiff}}.} \item{h}{Number of periods for forecasting. If \code{xreg} is used, \code{h} is ignored and the number of forecast periods is set to the number of rows of \code{xreg}.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments.} \item{xreg}{Future values of an regression variables (for class \code{Arima} objects only). A numerical vector or matrix of external regressors; it should not be a data frame.} \item{bootstrap}{If \code{TRUE}, then prediction intervals computed using simulation with resampled errors.} \item{npaths}{Number of sample paths used in computing simulated prediction intervals when \code{bootstrap=TRUE}.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.Arima}. An object of class "\code{forecast}" is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for univariate ARIMA models. } \details{ For \code{Arima} or \code{ar} objects, the function calls \code{\link[stats]{predict.Arima}} or \code{\link[stats]{predict.ar}} and constructs an object of class "\code{forecast}" from the results. For \code{fracdiff} objects, the calculations are all done within \code{\link{forecast.fracdiff}} using the equations given by Peiris and Perera (1988). } \examples{ fit <- Arima(WWWusage,c(3,1,0)) plot(forecast(fit)) library(fracdiff) x <- fracdiff.sim( 100, ma=-.4, d=.3)$series fit <- arfima(x) plot(forecast(fit,h=30)) } \references{ Peiris, M. & Perera, B. (1988), On prediction with fractionally differenced ARIMA models, \emph{Journal of Time Series Analysis}, \bold{9}(3), 215-220. } \seealso{ \code{\link[stats]{predict.Arima}}, \code{\link[stats]{predict.ar}}, \code{\link{auto.arima}}, \code{\link{Arima}}, \code{\link[stats]{arima}}, \code{\link[stats]{ar}}, \code{\link{arfima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/tbats.components.Rd0000644000176200001440000000236214003673410016424 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tbats.R \name{tbats.components} \alias{tbats.components} \title{Extract components of a TBATS model} \usage{ tbats.components(x) } \arguments{ \item{x}{A tbats object created by \code{\link{tbats}}.} } \value{ A multiple time series (\code{mts}) object. The first series is the observed time series. The second series is the trend component of the fitted model. Series three onwards are the seasonal components of the fitted model with one time series for each of the seasonal components. All components are transformed using estimated Box-Cox parameter. } \description{ Extract the level, slope and seasonal components of a TBATS model. The extracted components are Box-Cox transformed using the estimated transformation parameter. } \examples{ \dontrun{ fit <- tbats(USAccDeaths, use.parallel=FALSE) components <- tbats.components(fit) plot(components)} } \references{ De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), Forecasting time series with complex seasonal patterns using exponential smoothing, \emph{Journal of the American Statistical Association}, \bold{106}(496), 1513-1527. } \seealso{ \code{\link{tbats}}. } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/gold.Rd0000644000176200001440000000052514003673410014047 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{gold} \alias{gold} \title{Daily morning gold prices} \format{ Time series data } \usage{ gold } \description{ Daily morning gold prices in US dollars. 1 January 1985 -- 31 March 1989. } \examples{ tsdisplay(gold) } \keyword{datasets} forecast/man/subset.ts.Rd0000644000176200001440000000404514003673410015055 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/subset.R \name{subset.ts} \alias{subset.ts} \alias{subset.msts} \title{Subsetting a time series} \usage{ \method{subset}{ts}( x, subset = NULL, month = NULL, quarter = NULL, season = NULL, start = NULL, end = NULL, ... ) \method{subset}{msts}(x, subset = NULL, start = NULL, end = NULL, ...) } \arguments{ \item{x}{a univariate time series to be subsetted} \item{subset}{optional logical expression indicating elements to keep; missing values are taken as false. \code{subset} must be the same length as \code{x}.} \item{month}{Numeric or character vector of months to retain. Partial matching on month names used.} \item{quarter}{Numeric or character vector of quarters to retain.} \item{season}{Numeric vector of seasons to retain.} \item{start}{Index of start of contiguous subset.} \item{end}{Index of end of contiguous subset.} \item{...}{Other arguments, unused.} } \value{ If \code{subset} is used, a numeric vector is returned with no ts attributes. If \code{start} and/or \code{end} are used, a ts object is returned consisting of x[start:end], with the appropriate time series attributes retained. Otherwise, a ts object is returned with frequency equal to the length of \code{month}, \code{quarter} or \code{season}. } \description{ Various types of subsetting of a time series. Allows subsetting by index values (unlike \code{\link[stats]{window}}). Also allows extraction of the values of a specific season or subset of seasons in each year. For example, to extract all values for the month of May from a time series. } \details{ If character values for months are used, either upper or lower case may be used, and partial unambiguous names are acceptable. Possible character values for quarters are \code{"Q1"}, \code{"Q2"}, \code{"Q3"}, and \code{"Q4"}. } \examples{ plot(subset(gas,month="November")) subset(woolyrnq,quarter=3) subset(USAccDeaths, start=49) } \seealso{ \code{\link[base]{subset}}, \code{\link[stats]{window}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/mstl.Rd0000644000176200001440000000316714053102733014106 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mstl.R \name{mstl} \alias{mstl} \title{Multiple seasonal decomposition} \usage{ mstl(x, lambda = NULL, iterate = 2, s.window = 7 + 4 * seq(6), ...) } \arguments{ \item{x}{Univariate time series of class \code{msts} or \code{ts}.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{iterate}{Number of iterations to use to refine the seasonal component.} \item{s.window}{Seasonal windows to be used in the decompositions. If scalar, the same value is used for all seasonal components. Otherwise, it should be a vector of the same length as the number of seasonal components (or longer).} \item{...}{Other arguments are passed to \code{\link[stats]{stl}}.} } \description{ Decompose a time series into seasonal, trend and remainder components. Seasonal components are estimated iteratively using STL. Multiple seasonal periods are allowed. The trend component is computed for the last iteration of STL. Non-seasonal time series are decomposed into trend and remainder only. In this case, \code{\link[stats]{supsmu}} is used to estimate the trend. Optionally, the time series may be Box-Cox transformed before decomposition. Unlike \code{\link[stats]{stl}}, \code{mstl} is completely automated. } \examples{ library(ggplot2) mstl(taylor) \%>\% autoplot() mstl(AirPassengers, lambda = "auto") \%>\% autoplot() } \seealso{ \code{\link[stats]{stl}}, \code{\link[stats]{supsmu}} } forecast/man/BoxCox.lambda.Rd0000644000176200001440000000305614003673410015545 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/guerrero.R \name{BoxCox.lambda} \alias{BoxCox.lambda} \title{Automatic selection of Box Cox transformation parameter} \usage{ BoxCox.lambda(x, method = c("guerrero", "loglik"), lower = -1, upper = 2) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}} \item{method}{Choose method to be used in calculating lambda.} \item{lower}{Lower limit for possible lambda values.} \item{upper}{Upper limit for possible lambda values.} } \value{ a number indicating the Box-Cox transformation parameter. } \description{ If \code{method=="guerrero"}, Guerrero's (1993) method is used, where lambda minimizes the coefficient of variation for subseries of \code{x}. } \details{ If \code{method=="loglik"}, the value of lambda is chosen to maximize the profile log likelihood of a linear model fitted to \code{x}. For non-seasonal data, a linear time trend is fitted while for seasonal data, a linear time trend with seasonal dummy variables is used. } \examples{ lambda <- BoxCox.lambda(AirPassengers,lower=0) air.fit <- Arima(AirPassengers, order=c(0,1,1), seasonal=list(order=c(0,1,1),period=12), lambda=lambda) plot(forecast(air.fit)) } \references{ Box, G. E. P. and Cox, D. R. (1964) An analysis of transformations. \emph{JRSS B} \bold{26} 211--246. Guerrero, V.M. (1993) Time-series analysis supported by power transformations. \emph{Journal of Forecasting}, \bold{12}, 37--48. } \seealso{ \code{\link{BoxCox}} } \author{ Leanne Chhay and Rob J Hyndman } \keyword{ts} forecast/man/seasadj.Rd0000644000176200001440000000177514003673410014544 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/seasadj.R \name{seasadj} \alias{seasadj} \alias{seasadj.stl} \alias{seasadj.mstl} \alias{seasadj.decomposed.ts} \alias{seasadj.tbats} \alias{seasadj.seas} \title{Seasonal adjustment} \usage{ seasadj(object, ...) \method{seasadj}{stl}(object, ...) \method{seasadj}{mstl}(object, ...) \method{seasadj}{decomposed.ts}(object, ...) \method{seasadj}{tbats}(object, ...) \method{seasadj}{seas}(object, ...) } \arguments{ \item{object}{Object created by \code{\link[stats]{decompose}}, \code{\link[stats]{stl}} or \code{\link{tbats}}.} \item{...}{Other arguments not currently used.} } \value{ Univariate time series. } \description{ Returns seasonally adjusted data constructed by removing the seasonal component. } \examples{ plot(AirPassengers) lines(seasadj(decompose(AirPassengers,"multiplicative")),col=4) } \seealso{ \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, \code{\link{tbats}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/DESCRIPTION0000644000176200001440000000524714166733351013607 0ustar liggesusersPackage: forecast Version: 8.16 Title: Forecasting Functions for Time Series and Linear Models Description: Methods and tools for displaying and analysing univariate time series forecasts including exponential smoothing via state space models and automatic ARIMA modelling. Depends: R (>= 3.0.2), Imports: colorspace, fracdiff, ggplot2 (>= 2.2.1), graphics, lmtest, magrittr, nnet, parallel, Rcpp (>= 0.11.0), stats, timeDate, tseries, urca, zoo Suggests: forecTheta, knitr, methods, rmarkdown, rticles, seasonal, testthat, uroot LinkingTo: Rcpp (>= 0.11.0), RcppArmadillo (>= 0.2.35) LazyData: yes ByteCompile: TRUE Authors@R: c( person("Rob", "Hyndman", email="Rob.Hyndman@monash.edu", role=c("aut", "cre", "cph"), comment = c(ORCID = "0000-0002-2140-5352")), person("George", "Athanasopoulos", role='aut'), person("Christoph", "Bergmeir", role="aut", comment = c(ORCID = "0000-0002-3665-9021")), person("Gabriel", "Caceres", role="aut"), person("Leanne", "Chhay", role='aut'), person("Mitchell", "O'Hara-Wild", role="aut", comment=c(ORCID = "0000-0001-6729-7695")), person("Fotios", "Petropoulos", role='aut', comment=c(ORCID = "0000-0003-3039-4955")), person("Slava", "Razbash", role="aut"), person("Earo", "Wang", role="aut"), person("Farah", "Yasmeen", role='aut', comment=c(ORCID="0000-0002-1479-5401")), person("R Core Team", role=c('ctb','cph')), person("Ross", "Ihaka", role=c('ctb','cph')), person("Daniel", "Reid", role='ctb'), person("David", "Shaub", role='ctb'), person("Yuan", "Tang", role='ctb', comment = c(ORCID = "0000-0001-5243-233X")), person("Zhenyu","Zhou", role='ctb') ) BugReports: https://github.com/robjhyndman/forecast/issues License: GPL-3 URL: https://pkg.robjhyndman.com/forecast/, https://github.com/robjhyndman/forecast VignetteBuilder: knitr Encoding: UTF-8 RoxygenNote: 7.1.2 NeedsCompilation: yes Packaged: 2022-01-10 03:36:24 UTC; hyndman Author: Rob Hyndman [aut, cre, cph] (), George Athanasopoulos [aut], Christoph Bergmeir [aut] (), Gabriel Caceres [aut], Leanne Chhay [aut], Mitchell O'Hara-Wild [aut] (), Fotios Petropoulos [aut] (), Slava Razbash [aut], Earo Wang [aut], Farah Yasmeen [aut] (), R Core Team [ctb, cph], Ross Ihaka [ctb, cph], Daniel Reid [ctb], David Shaub [ctb], Yuan Tang [ctb] (), Zhenyu Zhou [ctb] Maintainer: Rob Hyndman Repository: CRAN Date/Publication: 2022-01-10 04:32:41 UTC forecast/build/0000755000176200001440000000000014166724666013201 5ustar liggesusersforecast/build/vignette.rds0000644000176200001440000000041614166724666015541 0ustar liggesusersuQMK@|ؚ I$OM(A$=aNڥͦlVJorM]u`fggyB_/`ȡ~PD|^ehtZ/*Ѫf$(! X[`E[)+\ N5dZ 8hc# $:C`  %? iٔ{{r9ݶ2 k1a͝){qҦ^jT۸O{6w|@Ÿ0otforecast/tests/0000755000176200001440000000000014003673410013220 5ustar liggesusersforecast/tests/testthat/0000755000176200001440000000000014166733351015073 5ustar liggesusersforecast/tests/testthat/test-tslm.R0000644000176200001440000001204714133712615017150 0ustar liggesusers# A unit test for tslm function if (require(testthat)) { context("Tests on building model in tslm") mv_y <- ts(cbind(rnorm(120, 0, 3) + 1:120 + 20 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + 1:120 + 16 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) mv_x <- ts(cbind(rnorm(120, 0, 8) + (1:120) / 2 + 42 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + (1:120) * -1 + 20 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) v_y <- ts(rnorm(120, 0, 8) + (1:120) / 2 + 12 * sin(2 * pi * (1:120) / 12), frequency = 12) v_x <- ts(rnorm(120, 0, 1) + (1:120) * (-1) + 28 * sin(2 * pi * (1:120) / 12), frequency = 12) data <- datamat(mv_y, mv_x, v_y, v_x, fourier(v_y, 3)) test_that("tests on model building with univariate time series", { fit1 <- tslm(v_y ~ trend + season, data = data) fit2 <- tslm(v_y ~ trend + season, data = data, lambda = 2, biasadj = FALSE) fit3 <- tslm(v_y ~ trend + season, data = data, lambda = 2, biasadj = TRUE) expect_false(identical(fit2$fitted.values, fit3$fitted.values)) fit2 <- tslm(v_y ~ trend + season, data = data.frame(trend = rnorm(120))) expect_false(identical(fit1$model, fit2$model)) fit2 <- tslm(v_y ~ trend + season) expect_that(names(fit1), equals(names(fit2))) expect_that(fit1$model, equals(fit2$model)) expect_that(fit1$coefficients, equals(fit2$coefficients)) fit1 <- tslm(USAccDeaths ~ trend + season, data = USAccDeaths) fit2 <- tslm(USAccDeaths ~ trend + season) expect_that(names(fit1), equals(names(fit2))) expect_that(fit1$model, equals(fit2$model)) expect_that(fit1$coefficients, equals(fit2$coefficients)) expect_warning(fit3 <- tslm( USAccDeaths ~ trend + season, data = USAccDeaths, subset = time(USAccDeaths) %% 1 < 0.1 )) fit <- tslm(USAccDeaths ~ trend + season + trend * season, data = USAccDeaths) expect_true("trend:season" %in% attr(fit$terms, "term.labels")) }) test_that("tslm parity with lm", { fit1 <- tslm(v_y ~ v_x + fourier(v_y, 3), data = data.frame(v_y = v_y)) fit2 <- lm(v_y ~ v_x + fourier(v_y, 3), data = data.frame(v_y = v_y)) expect_equal(fit1$coefficients, fit1$coefficients) expect_equal(fit1$model, fit2$model) }) test_that("tests on subsetting data", { a <- mv_y[, 1] expect_warning(fit1 <- tslm(mv_y ~ trend, subset = a < 20), "Subset has been assumed contiguous") expect_error(fit2 <- tslm(mv_y ~ trend, subset = subset(mv_y, mv_y[, 1] < 20))) expect_warning(tslm(v_y ~ trend + season + trend * season, subset = v_y < 100), "Subset has been assumed contiguous") }) test_that("tests on model building with multivariate time series", { fit1 <- tslm(mv_y ~ trend + season) fit2 <- tslm(mv_y ~ trend + season, lambda = 0.5) expect_false(identical(fit1$coefficients, fit2$coefficients)) fit3 <- tslm(mv_y ~ trend + season, lambda = 0.5, biasadj = TRUE) expect_false(identical(fit2$fitted.values, fit3$fitted.values)) fit2 <- tslm(mv_y ~ trend + season, data = data) expect_that(names(fit1), equals(names(fit2))) expect_that(fit1$model, equals(fit2$model)) expect_that(fit1$coefficients, equals(fit2$coefficients)) expect_warning(fit3 <- tslm(mv_y ~ trend + season, subset = mv_y[, 1] < 1), "Subset has been assumed contiguous") expect_warning(fit4 <- tslm(mv_y ~ trend + season, data = data, subset = mv_y[, 1] < 1), "Subset has been assumed contiguous") expect_that(names(fit3), equals(names(fit4))) expect_that(fit3$model, equals(fit4$model)) expect_that(fit3$coefficients, equals(fit4$coefficients)) }) test_that("tests with bad input", { expect_error(tslm(mpg ~ cyl, data = mtcars), "Not time series data") expect_error(tslm(tmp2 ~ trend + season + trend * season, subset = subset(tmp2, month = "January"), "Non-seasonal data cannot be modelled using a seasonal factor")) }) test_that("forecast.lm", { fit1 <- tslm(v_y ~ trend + season, lambda = 2, biasadj = FALSE) fit2 <- tslm(v_y ~ trend + season, lambda = 2, biasadj = TRUE) fcast1 <- forecast(fit1, h = 60, biasadj = FALSE) fcast2 <- forecast(fit2, h = 60, biasadj = TRUE) expect_false(identical(fcast1$mean, fcast2$mean)) fred <- tslm(ldeaths ~ trend + season, lambda = 0) fc <- forecast(fred) }) test_that("Unusual usage", { expect_silent(fit1 <- tslm(v_y ~ trend + v_x + I(v_x ^ 2) + fourier(v_x, 3))) # forecast(fit1, newdata=data.frame(v_x=ts(1:2,freq=12))) # tslm(v_y ~ trend + I(v_x) + I(v_x^2) + fourier(v_x, 3), data=data) # tslm(v_y ~ trend + season + I(v_x) + I(v_x^2) + fourier(ts(season, freq=12), 3)) # fit2 <- tslm(v_y ~ trend + season + I(v_x)*fourier(v_x,3)) # forecast(fit2, newdata=data.frame(v_x=ts(1:2,freq=12))) # tslm(v_y ~ trend + season + I(v_x)*fourier(v_x,3),data=data) }) test_that("Missing values", { USMissingDeaths <- USAccDeaths USMissingDeaths[c(1,44, 72)] <- NA timetrend <- 1:72 fit <- tslm(USMissingDeaths ~ season + timetrend) expect_equal(sum(is.na(residuals(fit))), 3) fc <- forecast(fit, newdata = data.frame(timetrend = 73)) expect_length(fc$mean, 1) }) } forecast/tests/testthat/test-nnetar.R0000644000176200001440000001317114003673410017452 0ustar liggesusers# A unit test for nnetar.R if (require(testthat)) { context("Testing nnetar") test_that("Tests for nnetar", { oilnnet <- nnetar(airmiles, lambda = 0.15) woolyrnqnnet <- nnetar(woolyrnq, repeats = 10) expect_output(print(woolyrnqnnet), regexp = "Series: woolyrnq") expect_true(length(forecast(oilnnet)$mean) == 10) expect_true(length(forecast(woolyrnqnnet)$mean) == 2 * frequency(woolyrnq)) # # Test with single-column xreg (which might be a vector) uscnnet <- nnetar(woolyrnq, xreg = 1:length(woolyrnq)) expect_true(all(dim(uscnnet$xreg) == c(119, 1))) expect_true(length(forecast(uscnnet, xreg = 120:130)$mean) == 11) # Test default size with and without xreg uscnnet <- nnetar(woolyrnq, p = 2, P = 2) expect_output( print(uscnnet), regexp = "NNAR(2,2,2)", fixed = TRUE ) expect_output( print(uscnnet), regexp = "4-2-1 network", fixed = TRUE ) expect_true(uscnnet$size == 2) uscnnet <- nnetar(woolyrnq, p = 2, P = 2, xreg = 1:119, repeats = 10) expect_output( print(uscnnet), regexp = "NNAR(2,2,3)", fixed = TRUE ) expect_output( print(uscnnet), regexp = "5-3-1 network", fixed = TRUE ) expect_true(uscnnet$size == 3) # Test P=0 when m>1 uscnnet <- nnetar(woolyrnq, p = 4, P = 0) expect_true(uscnnet$size == 2) expect_output(print(uscnnet), regexp = "NNAR(4,2)", fixed = TRUE) # Test overlapping p & P uscnnet <- nnetar(woolyrnq, p = 4, P = 2) expect_true(uscnnet$size == 3) expect_output( print(uscnnet), regexp = "NNAR(4,2,3)", fixed = TRUE ) expect_output( print(uscnnet), regexp = "5-3-1 network", fixed = TRUE ) # Test with multiple-column xreg creditnnet <- nnetar( wineind, xreg = cbind(bizdays(wineind), fourier(wineind, 1)) ) expect_warning(forecast(creditnnet, h = 2, xreg = matrix(2, 2, 3))$mean, "different column names") %>% expect_length(2L) # Test if h doesn't match xreg expect_warning(forecast(creditnnet, h = 5, xreg = matrix(2, 2, 3))$mean, "different column names") %>% expect_length(2L) # Test that P is ignored if m=1 expect_warning(creditnnet <- nnetar(WWWusage, p = 2, P = 4, xreg = 1:length(WWWusage))) expect_output( print(creditnnet), regexp = "NNAR(2,2)", fixed = TRUE ) # Test fixed size creditnnet <- nnetar(WWWusage, p = 1, P = 1, xreg = 1:length(WWWusage), size = 12) expect_true(uscnnet$size == 3) expect_output(print(creditnnet), regexp = "NNAR(1,12)", fixed = TRUE) # Test passing arguments to nnet expect_warning(creditnnet <- nnetar( WWWusage, p = 2, P = 4, xreg = 1:length(WWWusage), decay = 0.1 )) expect_output( print(creditnnet), regexp = "decay=0.1", fixed = TRUE ) ## Test output format correct oilnnet <- nnetar(airmiles, p = 1, size = 0, skip = TRUE, Wts = c(0, 1), maxit = 0, repeats = 10) expect_true(all.equal(oilnnet$fitted[-1], airmiles[-length(airmiles)])) ## Test output format correct when NAs present oilna <- airmiles oilna[12] <- NA suppressWarnings(oilnnet <- nnetar(oilna, p = 1, size = 0, skip = TRUE, Wts = c(0, 1), maxit = 0)) expect_true(all.equal(oilnnet$fitted[-c(1, 12, 13)], oilna[-c(11, 12, length(oilna))])) ## Test model argument fit1 <- nnetar( WWWusage, xreg = 1:length(WWWusage), lambda = 2, decay = 0.5, maxit = 25, repeats = 7 ) fit2 <- nnetar(WWWusage, xreg = 1:length(WWWusage), model = fit1) # Check some model parameters expect_true(identical(fit1$p, fit2$p)) expect_true(identical(fit1$lambda, fit2$lambda)) expect_true(identical(fit1$nnetargs, fit2$nnetargs)) # Check fitted values are all the same expect_true(identical(fitted(fit1), fitted(fit2))) # Check residuals all the same expect_true(identical(residuals(fit1), residuals(fit2))) # Check number of neural nets expect_true(identical(length(fit1$model), length(fit2$model))) # Check neural network weights all the same expect_true(identical(fit1$model[[1]]$wts, fit2$model[[1]]$wts)) expect_true(identical(fit1$model[[7]]$wts, fit2$model[[7]]$wts)) # Check subset argument oilnnet <- nnetar(airmiles, subset = 11:20) expect_true(identical(which(!is.na(fitted(oilnnet))), 11:20)) oilnnet <- nnetar(airmiles, subset = c(rep(F, 10), rep(T, 10), rep(F, length(airmiles) - 20))) expect_true(identical(which(!is.na(fitted(oilnnet))), 11:20)) ## Check short and constant data expect_warning(nnetfit <- nnetar(rep(1, 10), p=2, P=0, size=1, repeats=1, lambda = 0.1), "Constant data") expect_true(nnetfit$p == 1) expect_true(is.null(nnetfit$lambda)) expect_true(is.null(nnetfit$scalex)) expect_error(nnetfit <- nnetar(rnorm(2), p=1, P=0, size=1, repeats=1), "Not enough data") expect_silent(nnetfit <- nnetar(rnorm(3), p=1, P=0, size=1, repeats=1)) expect_true(nnetfit$p == 1) expect_silent(nnetfit <- nnetar(rnorm(3), p=2, P=0, size=1, repeats=1)) expect_true(nnetfit$p == 2) expect_warning(nnetfit <- nnetar(rnorm(3), p=3, P=0, size=1, repeats=1), "short series") expect_true(nnetfit$p == 2) expect_warning(nnetfit <- nnetar(rnorm(3), p=4, P=0, size=1, repeats=1), "short series") expect_true(nnetfit$p == 2) expect_warning(nnetfit <- nnetar(rnorm(10), xreg=rep(1, 10), p=2, P=0, size=1, repeats=1, lambda = 0.1), "Constant xreg") expect_true(is.null(nnetfit$scalexreg)) expect_warning(nnetfit <- nnetar(rnorm(3), xreg=matrix(c(1, 2, 3, 1, 1, 1), ncol=2), p=1, P=0, size=1, repeats=1, lambda = 0.1), "Constant xreg") expect_true(is.null(nnetfit$scalexreg)) }) } forecast/tests/testthat/test-ets.R0000644000176200001440000000426614053117140016761 0ustar liggesusers# A unit test for ets function if (require(testthat)) { context("Tests on input") test_that("tests for some arguments in ets", { fit <- ets(wineind, model = "ZZM") comp <- paste0(fit$components[1:3], collapse = "") expect_that(comp, equals("MAM")) }) test_that("tests for some arguments in ets", { fit <- ets(wineind, model = "MAM", alpha = 0.1611) expect_that(as.numeric(fit$par["alpha"]), equals(0.1611)) }) test_that("refit ets model to new data", { fit <- ets(wineind, model = "MAM", alpha = 0.1611) parnames <- c("alpha", "beta", "gamma") par <- fit$par[parnames] expect_identical(ets(wineind, model = fit, alpha = 0.1611)$par[parnames], par) expect_identical(ets(wineind, model = fit, alpha = 0.1611, beta = NA)$par[parnames], par) expect_identical(ets(wineind, model = fit, alpha = 0.1611, gamma = NA)$par[parnames], par) expect_identical(ets(wineind, model = fit, alpha = 0.1611, phi = NA)$par[parnames], par) expect_identical(ets(wineind, model = fit, alpha = 0.1611, use.initial.values = TRUE)$par, fit$par) }) test_that("class methods for ets work", { fit <- ets(wineind, model = "MAM", alpha = 0.1611) expect_output(print(summary(fit)), "Smoothing parameters") expect_equal(length(coef(fit)), 16L) expect_lt(abs(logLik(fit) + 1802.9586023), 1e-5) plot(fit) }) test_that("test ets() for errors", { expect_warning(ets(taylor)) fit1 <- ets(airmiles, lambda = 0.15, biasadj = FALSE) expect_gt(fit1$par["alpha"], 0.95) fit2 <- ets(airmiles, lambda = 0.15, biasadj = TRUE) expect_lt(fit2$par["beta"], 1e-3) expect_false(identical(fit1$fitted, fit2$fitted)) expect_error(ets(taylor, model = "ZZA")) }) test_that("forecast.ets()", { fit <- ets(airmiles, lambda = 0.15, biasadj = TRUE) fcast1 <- forecast(fit, PI = FALSE) expect_true(is.null(fcast1$upper) & is.null(fcast1$lower)) fcast1 <- forecast(fit, biasadj = FALSE) fcast2 <- forecast(fit, biasadj = TRUE) expect_false(identical(fcast1$mean, fcast2$mean)) fcast <- forecast(fit, simulate = TRUE) expect_true(!is.null(fcast$upper) & !is.null(fcast$lower)) expect_true(all(fcast$upper > fcast$lower)) }) } forecast/tests/testthat/test-calendar.R0000644000176200001440000000134514003673410017734 0ustar liggesusers# A unit test for calendar.R if (require(testthat)) { context("Testing calendar functions") test_that("Tests for bizdays()", { expect_error(bizdays(1:20)) b1 <- bizdays(woolyrnq, FinCenter = "New York") b2 <- bizdays(woolyrnq, FinCenter = "London") b3 <- bizdays(woolyrnq, FinCenter = "Zurich") expect_equal(sum(abs(b1 - b2)), 109L) expect_equal(sum(abs(b1 - b3)), 144L) expect_equal(sum(abs(b2 - b3)), 117L) b1 <- bizdays(gas, FinCenter = "NERC") b2 <- bizdays(gas, FinCenter = "Tokyo") expect_equal(sum(abs(b1 - b2)), 244L) }) test_that("Tests for easter()", { expect_true(length(easter(woolyrnq)) == length(woolyrnq)) expect_true(length(easter(wineind)) == length(wineind)) }) } forecast/tests/testthat/test-spline.R0000644000176200001440000000115614003673410017455 0ustar liggesusers# A unit test for spline.R if (require(testthat)) { context("Testing splinef()") test_that("Tests for splinef()", { plot.splineforecast(splinef(airmiles)) fit1 <- splinef(woolyrnq, lambda = 0.2, biasadj = FALSE) fit2 <- splinef(woolyrnq, lambda = 0.2, biasadj = TRUE) expect_false(identical(fit1$mean, fit2$mean)) splinef(woolyrnq, method = "mle") splinef(WWWusage, method = "mle") expect_true(all(as.numeric(splinef(WWWusage, fan = TRUE)$mean) == as.numeric(splinef(WWWusage)$mean))) expect_error(splinef(woolyrnq, level = 110)) expect_error(splinef(woolyrnq, level = -10)) }) } forecast/tests/testthat/test-forecast.R0000644000176200001440000000306614053117140017771 0ustar liggesusers# A unit test for forecast.R if (require(testthat)) { context("Test forecast.R") test_that("tests for findfrequency()", { expect_true(frequency(airmiles) == findfrequency(as.numeric(airmiles))) expect_false(frequency(wineind) == findfrequency(as.numeric(wineind))) expect_true(frequency(woolyrnq) == findfrequency(as.numeric(woolyrnq))) expect_true(frequency(gas) == findfrequency(as.numeric(gas))) }) test_that("tests forecast.ts()", { fc1 <- as.numeric(forecast(as.numeric(airmiles), find.frequency = TRUE)$mean) fc2 <- as.numeric(forecast(airmiles)$mean) expect_true(all(fc1 == fc2)) }) test_that("tests summary.forecast() and forecast.forecast()", { WWWusageforecast <- forecast(WWWusage) expect_output(print(summary(WWWusageforecast)), regexp = "Forecast method:") expect_true(all(predict(WWWusageforecast)$mean == forecast(WWWusageforecast)$mean)) }) # test_that("tests plot.forecast()", { # # Fit several types of models for plotting # batsmod <- bats(woolyrnq) # nnetmod <- nnetar(woolyrnq) # tslmmod <- tslm(woolyrnq ~ trend + season) # nnetfc<- forecast(nnetmod) # batsfc <- forecast(batsmod) # tslmfc <- forecast(tslmmod) # skip_on_travis() # # Plot the forecasts # expect_that(plot(nnetfc), not(throws_error())) # expect_that(plot(batsfc), not(throws_error())) # expect_that(plot(batsfc, shaded = FALSE), not(throws_error())) # expect_that(plot(tslmfc, PI = FALSE), not(throws_error())) # expect_that(plot(forecast(tslmmod, h = 0)), not(throws_error())) # }) } forecast/tests/testthat/test-season.R0000644000176200001440000000721714133703625017465 0ustar liggesusers# A unit test for na.interp() and tsclean() if (require(testthat)) { test_that("tests for monthdays", { expect_error(monthdays(rnorm(10))) expect_error(monthdays(rnorm(10))) expect_true(all(monthdays(ts(rep(100, 12), frequency = 12)) == c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31))) expect_true(all(monthdays(ts(rep(1, 4), frequency = 4)) == c(90, 91, 92, 92))) # Test leapyears expect_true(monthdays(ts(rep(1, 48), frequency = 12))[38] == 29) expect_true(monthdays(ts(rep(1, 16), frequency = 4))[13] == 91) }) test_that("tests for seasonaldummy", { expect_error(seasonaldummy(1)) testseries <- ts(rep(1:7, 5), frequency = 7) dummymat <- seasonaldummy(testseries) expect_true(length(testseries) == nrow(dummymat)) expect_true(ncol(dummymat) == 6) expect_true(all(seasonaldummy(wineind)[1:11, ] == diag(11))) }) test_that("tests for seasonaldummyf", { expect_error(seasonaldummy(1)) expect_warning(dummymat <- seasonaldummyf(wineind, 4), "deprecated") expect_true(nrow(dummymat) == 4) expect_true(ncol(dummymat) == 11) }) test_that("tests for fourier", { expect_error(fourier(1)) testseries <- ts(rep(1:7, 5), frequency = 7) fouriermat <- fourier(testseries, 3) expect_true(length(testseries) == nrow(fouriermat)) expect_true(ncol(fouriermat) == 6) expect_true(all(grep("-7", colnames(fouriermat)))) }) test_that("tests for fourierf", { expect_warning(fouriermat <- fourierf(wineind, 4, 10), "deprecated") expect_true(nrow(fouriermat) == 10) expect_true(ncol(fouriermat) == 8) }) test_that("tests for stlm", { expect_warning(stlm(ts(rep(5, 24), frequency = 4), etsmodel = "ZZZ")) }) test_that("tests for forecast.stlm", { expect_error(forecast.stlm(stlm(wineind), newxreg = matrix(rep(1, 24), ncol = 2))) stlmfit1 <- stlm(woolyrnq, method = "ets") stlmfit2 <- stlm(woolyrnq, method = "arima", approximation = FALSE) fcfit1 <- forecast(stlmfit1) fcfit2 <- forecast(stlmfit1, fan = TRUE) expect_true(all(fcfit2$level == seq(from = 51, to = 99, by = 3))) fcstlmfit3 <- forecast(stlmfit2) expect_true(all(round(forecast(stlm(ts(rep(100, 120), frequency = 12)))$mean, 10) == 100)) expect_true(all(round(forecast(stlm(ts(rep(100, 120), frequency = 12), lambda = 1))$mean, 10) == 100)) }) test_that("tests for stlf", { expect_true(all(forecast(stlm(wineind))$mean == stlf(wineind)$mean)) expect_true(all(forecast(stlm(wineind, lambda = .5))$mean == stlf(wineind, lambda = .5)$mean)) fit1 <- stlf(wineind, lambda = .2, biasadj = FALSE) fit2 <- stlf(wineind, lambda = .2, biasadj = TRUE) expect_false(identical(fit1$mean, fit2$mean)) # Constant series should not error series <- ts(rep(950, 20), frequency = 4) constantForecast <- expect_error(stlf(series), NA) # Small eps expect_true(all(abs(constantForecast$mean - mean(series)) < 10^-8)) y <- ts(rep(1:7, 3), frequency = 7) expect_equal(c(stlf(y)$mean), rep(1:7, 2)) }) test_that("tests for ma", { testseries <- ts(1:20, frequency = 4) expect_true(frequency(ma(testseries, order = 4)) == frequency(testseries)) maseries <- ma(testseries, order = 3) expect_true(identical(which(is.na(maseries)), c(1L, 20L))) expect_true(all(abs(maseries[2:19] - 2:19) < 1e-14)) maseries <- ma(testseries, order = 2, centre = FALSE) expect_true(identical(which(is.na(maseries)), 20L)) expect_true(all(abs(maseries[1:19] - 1:19 - 0.5) < 1e-14)) maseries <- ma(testseries, order = 2, centre = TRUE) expect_true(identical(which(is.na(maseries)), c(1L, 20L))) expect_true(all(abs(maseries[2:19] - 2:19) < 1e-14)) }) } forecast/tests/testthat/test-ggplot.R0000644000176200001440000000533214003673410017457 0ustar liggesusers# A unit test for ggplot support if (require(testthat)) { context("forecast ggplot tests") test_that("tests for autoplot/gg functions", { library(ggplot2) lungDeaths <- cbind(mdeaths, fdeaths) ggAcf(wineind) autoplot(Acf(wineind)) expect_identical(ggAcf(wineind, plot = FALSE)$acf, acf(wineind, plot = FALSE, lag.max = 24)$acf) ggPacf(wineind) autoplot(Pacf(wineind)) expect_identical(ggPacf(wineind, plot = FALSE)$acf, acf(wineind, plot = FALSE, type = "partial", lag.max = 24)$acf) ggCcf(mdeaths, fdeaths) autoplot(Ccf(mdeaths, fdeaths)) expect_identical(ggCcf(mdeaths, fdeaths, plot = FALSE)$acf, ccf(mdeaths, fdeaths, plot = FALSE, type = "correlation", lag.max = 24)$acf) arimafit <- Arima(USAccDeaths, order = c(1, 1, 1), seasonal = c(1, 1, 1)) autoplot(arimafit) autoplot(arimafit, type = "ma") autoplot(arimafit, type = "ar") arfit <- ar(USAccDeaths) autoplot(arfit) decomposefit <- decompose(USAccDeaths) autoplot(decomposefit) etsfit <- ets(USAccDeaths, model = "ANA") autoplot(etsfit) structfit <- StructTS(USAccDeaths) autoplot(structfit) stlfit <- stl(USAccDeaths, s.window = "periodic") autoplot(stlfit) # seasfit <- seasonal::seas(USAccDeaths) # autoplot(seasfit) etsfcast <- forecast(etsfit) autoplot(etsfcast) autoplot(etsfcast, PI = FALSE) lmfit <- lm(mpg ~ disp, data = mtcars) lmfcast <- forecast(lmfit, newdata = data.frame(disp = 214)) autoplot(lmfcast) mfcast <- forecast(lungDeaths) autoplot(mfcast) ggtsdisplay(USAccDeaths, plot.type = "spectrum") ggtsdisplay(USAccDeaths, plot.type = "partial") ggtsdisplay(USAccDeaths, plot.type = "histogram") ggtsdisplay(USAccDeaths, plot.type = "scatter", theme = ggplot2::theme_bw()) gglagplot(woolyrnq, lags = 2) gglagplot(lungDeaths, lags = 2) gglagplot(WWWusage, do.lines = FALSE, colour = FALSE, labels = TRUE) gglagchull(woolyrnq, lags = 4) ggmonthplot(woolyrnq) ggseasonplot(woolyrnq, year.labels = TRUE, year.labels.left = TRUE) ggseasonplot(USAccDeaths, polar = TRUE, col = 1:5, continuous = TRUE) splinefit <- splinef(airmiles, h = 5) autoplot(splinefit) autoplot(USAccDeaths) autoplot(lungDeaths) autoplot(lungDeaths, facets = TRUE) autoplot(USAccDeaths) + geom_forecast() autoplot(USAccDeaths) + autolayer(etsfcast, series = "ETS") autoplot(lungDeaths) + geom_forecast() autoplot(lungDeaths) + autolayer(mfcast, series = c("mdeaths", "fdeaths")) autoplot(lungDeaths) + autolayer(mfcast) autoplot(lungDeaths) + autolayer(mfcast, series = TRUE) autoplot(lungDeaths, facets = TRUE) + geom_forecast() gghistogram(USAccDeaths, add.kde = TRUE) }) } forecast/tests/testthat/test-refit.R0000644000176200001440000001225514003673410017276 0ustar liggesusers# A unit test for re-fitting models if (require(testthat)) { context("Re-fitting models") test_that("tests for re-fitting models", { # arima fit <- Arima(mdeaths, c(1, 0, 0), c(2, 0, 0), include.mean = FALSE, include.drift = TRUE) refit <- Arima(fdeaths, model = fit) expect_true(identical(fit$coef, refit$coef)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- Arima(mdeaths, model = fit) expect_true(identical(fit$coef, refit_same$coef)) expect_true(identical(fit$x, refit_same$x)) expect_true(all.equal(fit$fitted, refit_same$fitted)) expect_true(all.equal(fit$residuals, refit_same$residuals)) # arfima fit <- arfima(mdeaths) refit <- arfima(fdeaths, model = fit) expect_true(identical(fit$ar, refit$ar)) expect_true(identical(fit$ma, refit$ma)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- arfima(mdeaths, model = fit) expect_true(identical(fit$ar, refit_same$ar)) expect_true(identical(fit$ma, refit_same$ma)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(fit$residuals, refit_same$residuals)) # dshw fit <- dshw(mdeaths, period1 = 4, period2 = 12) refit <- dshw(fdeaths, model = fit) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- dshw(mdeaths, model = fit) expect_true(identical(fit$model, refit_same$model)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(fit$residuals, refit_same$residuals)) # ets fit <- ets(mdeaths) refit <- ets(fdeaths, model = fit, use.initial.values = TRUE) expect_true(identical(fit$fit, refit$fit)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- ets(mdeaths, model = fit, use.initial.values = TRUE) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(residuals(fit), residuals(refit_same))) # stlm fit <- stlm(mdeaths) refit <- stlm(fdeaths, model = fit) expect_true(identical(fit$model$par, refit$model$par)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- stlm(mdeaths, model = fit) expect_true(identical(fit$model$par, refit_same$model$par)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(fit$residuals, refit_same$residuals)) # bats fit <- bats(mdeaths) refit <- bats(fdeaths, model = fit) expect_true(identical(fit$parameters, refit$parameters)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted.values, refit$fitted.values)) expect_false(identical(residuals(fit), residuals(refit))) refit_same <- bats(mdeaths, model = fit) expect_true(identical(fit$model$par, refit_same$model$par)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted.values, refit_same$fitted.values)) expect_true(identical(residuals(fit), residuals(refit_same))) # tbats fit <- tbats(mdeaths) refit <- tbats(fdeaths, model = fit) expect_true(identical(fit$parameters, refit$parameters)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted.values, refit$fitted.values)) expect_false(identical(residuals(fit), residuals(refit))) refit_same <- tbats(mdeaths, model = fit) expect_true(identical(fit$model$par, refit_same$model$par)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted.values, refit_same$fitted.values)) expect_true(identical(residuals(fit), residuals(refit_same))) # nnetar fit <- nnetar(mdeaths) refit <- nnetar(fdeaths, model = fit) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- nnetar(mdeaths, model = fit) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(residuals(fit), residuals(refit_same))) # forecast.ts fit <- forecast(mdeaths) refit <- forecast(fdeaths, model = fit, use.initial.values = TRUE) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- forecast(mdeaths, model = fit, use.initial.values = TRUE) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(residuals(fit), residuals(refit_same))) }) } forecast/tests/testthat/test-newarima2.R0000644000176200001440000000411214053117140020041 0ustar liggesusers# A unit test functions in newarima2.R if (require(testthat)) { test_that("test auto.arima() and associated methods", { expect_warning(auto.arima(rep(1, 100), stepwise = TRUE, parallel = TRUE)) set.seed(345) testseries1 <- ts(rnorm(100) + 1:100, frequency = 0.1) xregmat <- matrix(runif(300), ncol = 3) expect_true(frequency(forecast(auto.arima(testseries1))) == 1) fit1 <- auto.arima(testseries1, xreg = xregmat, allowdrift = FALSE) expect_true(all(xregmat == fit1$xreg)) testseries2 <- ts(rep(100, 120), frequency = 12) xregmat <- matrix(runif(240), ncol = 2) expect_output(print(auto.arima(testseries2, xreg = xregmat)), regexp = "Series: testseries2") expect_output(print(summary(auto.arima(testseries2, xreg = xregmat, approximation = TRUE, stepwise = FALSE))), regexp = "Series: testseries2") expect_output(print(auto.arima(ts(testseries2, frequency = 4), approximation = TRUE, trace = TRUE)), regexp = "ARIMA") fit1 <- auto.arima(testseries1, stepwise = FALSE, lambda = 2, biasadj = FALSE) fit2 <- auto.arima(testseries1, stepwise = FALSE, lambda = 2, biasadj = TRUE) expect_false(identical(fit1$fitted, fit2$fitted)) }) test_that("test parallel = TRUE and stepwise = FALSE for auto.arima()", { skip_if(identical(Sys.getenv("GITHUB_ACTIONS"), "true")) expect_equal(auto.arima(WWWusage, parallel = TRUE, stepwise = FALSE)$arma, c(3L, 0L, 0L, 0L, 1L, 1L, 0L)) }) test_that("tests for ndiffs()", { expect_true(ndiffs(AirPassengers, test = "kpss") == 1) expect_true(ndiffs(AirPassengers, test = "adf") == 1) expect_true(ndiffs(AirPassengers, test = "pp") == 1) }) test_that("tests for nsdiffs()", { expect_true(nsdiffs(AirPassengers, test = "seas") == 1) expect_true(nsdiffs(AirPassengers, test = "ocsb") == 1) expect_error(nsdiffs(airmiles)) expect_true(nsdiffs(rep(1, 100)) == 0) expect_warning(nsdiffs(ts(rnorm(10), frequency = 0.1))) skip_if_not_installed("uroot") expect_true(nsdiffs(AirPassengers, test = "hegy") == 1) expect_true(nsdiffs(AirPassengers, test = "ch") == 0) }) } forecast/tests/testthat/test-wrangle.R0000644000176200001440000000233514003673410017622 0ustar liggesusers# A unit test for wrangling functions if (require(testthat)) { context("Tests joining data.frames") mv_y <- ts(cbind(rnorm(120, 0, 3) + 1:120 + 20 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + 1:120 + 16 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) mv_x <- ts(cbind(rnorm(120, 0, 8) + (1:120) / 2 + 42 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + (1:120) * -1 + 20 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) v_y <- ts(rnorm(120, 0, 8) + (1:120) / 2 + 12 * sin(2 * pi * (1:120) / 12), frequency = 12) v_x <- ts(rnorm(120, 0, 1) + (1:120) * (-1) + 28 * sin(2 * pi * (1:120) / 12), frequency = 12) test_that("tests on retaining matrix attributes", { data <- datamat(mv_y, mv_x, v_y, v_x) expect_true(is.ts(data[, 1])) expect_true(identical(tsp(data[, 1]), tsp(data[, 2]))) expect_true(NCOL(data) == 8) expect_true(NCOL(data[, 1]) == 2) expect_true("matrix" %in% class(data[, 1])) expect_true(class(data) == "data.frame") }) test_that("flatten data.frames", { mvdata <- datamat(mv_y, mv_x) vdata <- datamat(v_y, v_x) data <- datamat(mvdata, vdata, flatten = TRUE) expect_true(class(data) == "data.frame") expect_true(!"data.frame" %in% class(data[, 1])) }) } forecast/tests/testthat/test-acf.R0000644000176200001440000000075514003673410016720 0ustar liggesusers# A unit test for Acf() function if (require(testthat)) { test_that("tests for acf", { out <- Acf(wineind, lag.max = 10, type = "partial", plot = FALSE) expect_that(length(out$lag), equals(10)) expect_that(out$acf, equals(Pacf(wineind, lag.max = 10, plot = FALSE)$acf)) expect_equal(dim(Acf(wineind, lag.max = 10, type = "correlation", plot = FALSE)$acf), c(11L, 1L, 1L)) expect_equal(Acf(wineind, lag.max = 10, type = "correlation", plot = TRUE)$acf[1, 1, 1], 1) }) } forecast/tests/testthat/test-modelAR.R0000644000176200001440000002373314133711705017517 0ustar liggesusers# A unit test for modelAR.R if (require(testthat)) { context("Testing modelAR") test_that("Tests for modelAR", { ## Set up functions to match 'nnetar' behavior avnnet2 <- function(x, y, repeats = repeats, linout = TRUE, trace = FALSE, ...) { mods <- list() for (i in 1:repeats) { mods[[i]] <- nnet::nnet(x, y, linout = linout, trace = trace, ...) } return(structure(mods, class = "nnetarmodels")) } ## predict.avnnet2 <- function(model, newdata = NULL) { if (is.null(newdata)) { if (length(predict(model[[1]])) > 1) { rowMeans(sapply(model, predict)) } else { mean(sapply(model, predict)) } } else { if (NCOL(newdata) >= 2 & NROW(newdata) >= 2) { rowMeans(sapply(model, predict, newdata = newdata)) } else { mean(sapply(model, predict, newdata = newdata)) } } } ## compare residuals to 'nnetar' expect_silent({ set.seed(123) nnetar_model <- nnetar(lynx[1:100], p = 2, P = 1, size = 3, repeats = 20) set.seed(123) modelAR_model <- modelAR(lynx[1:100], FUN = avnnet2, predict.FUN = predict.avnnet2, p = 2, P = 1, scale.inputs = TRUE, size = 3, repeats = 20) res1 <- residuals(nnetar_model) res2 <- residuals(modelAR_model) }) expect_true(identical(res1, res2)) ## check re-fitting old model and compare to 'nnetar' expect_silent({ nnetar_model2 <- nnetar(lynx[101:114], model = nnetar_model) modelAR_model2 <- modelAR(lynx[101:114], FUN = avnnet2, predict.FUN = predict.avnnet2, model = modelAR_model) res1 <- residuals(nnetar_model2) res2 <- residuals(modelAR_model2) }) expect_true(identical(res1, res2)) ## compare forecasts with 'nnetar' expect_silent({ f1 <- forecast(nnetar_model)$mean f2 <- forecast(modelAR_model)$mean }) expect_true(identical(f1, f2)) ## test lambda and compare to 'nnetar' expect_silent({ set.seed(123) oilnnet_nnetar <- nnetar(airmiles, lambda = 0.15, size = 1, repeats = 20) set.seed(123) oilnnet_modelAR <- modelAR(airmiles, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, lambda = 0.15, size = 1, repeats = 20) }) expect_true(identical(residuals(oilnnet_nnetar, type = "response"), residuals(oilnnet_modelAR, type = "response"))) expect_true(length(forecast(oilnnet_modelAR)$mean) == 10) ## check print input name expect_silent(woolyrnqnnet <- modelAR(woolyrnq, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, p = 1, P = 0, size = 8, repeats = 10)) expect_output(print(woolyrnqnnet), regexp = "Series: woolyrnq") ## check default forecast length expect_true(length(forecast(woolyrnqnnet)$mean) == 2 * frequency(woolyrnq)) # # Test with single-column xreg (which might be a vector) expect_silent({ set.seed(123) woolyrnqnnet <- modelAR(woolyrnq, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = 1:length(woolyrnq), p = 2, P = 2, size = 4, repeats = 10) set.seed(123) woolyrnqnnet2 <- nnetar(woolyrnq, xreg = 1:length(woolyrnq), p = 2, P = 2, size = 4, repeats = 10) }) expect_true(all(dim(woolyrnqnnet$xreg) == c(119, 1))) expect_true(length(forecast(woolyrnqnnet, xreg = 120:130)$mean) == 11) expect_true(identical(forecast(woolyrnqnnet, xreg = 120:130)$mean, forecast(woolyrnqnnet2, xreg = 120:130)$mean)) ## Test with multiple-column xreg set.seed(123) winennet <- modelAR(wineind, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = cbind(bizdays(wineind), fourier(wineind, 1)), p = 2, P = 1, size = 4, repeats = 10) set.seed(123) winennet2 <- nnetar( wineind, xreg = cbind(bizdays(wineind), fourier(wineind, 1)), p = 2, P = 1, size = 4, repeats = 10 ) expect_true(length(forecast(winennet, h = 2, xreg = matrix(2, 2, 3))$mean) == 2L) ## Test if h matches xreg expect_true(length(forecast(winennet, h = 5, xreg = matrix(2, 2, 3))$mean) == 2L) expect_warning(forecast(winennet2, xreg = matrix(2, 2, 3))$mean, "different column names") %>% expect_equal(forecast(winennet, xreg = matrix(2, 2, 3))$mean) ## Test that P is ignored if m=1 expect_warning(wwwnnet <- modelAR(WWWusage, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = 1:length(WWWusage), p = 2, P = 4, size = 3, repeats = 10)) ## Test passing arguments to nnet expect_silent({ set.seed(123) wwwnnet <- modelAR(WWWusage, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = 1:length(WWWusage), p = 2, P = 0, size = 3, decay = 0.1, repeats = 10) set.seed(123) wwwnnet2 <- nnetar(WWWusage, size = 3, p = 2, P = 0, xreg = 1:length(WWWusage), decay = 0.1, repeats = 10) }) expect_true(identical( forecast(wwwnnet, h = 2, xreg = (length(WWWusage) + 1):(length(WWWusage) + 5))$mean, forecast(wwwnnet2, h = 2, xreg = (length(WWWusage) + 1):(length(WWWusage) + 5))$mean )) ## Test output format correct when NAs present airna <- airmiles airna[12] <- NA expect_warning(airnnet <- modelAR(airna, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, p = 1, size = 0, skip = TRUE, Wts = c(0, 1), maxit = 0, repeats = 5)) expect_equal(airnnet$fitted[-c(1, 12, 13)], airna[-c(11, 12, length(airna))]) ## Test model argument expect_silent({ set.seed(123) fit1 <- modelAR( WWWusage, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = 1:length(WWWusage), p = 3, size = 2, lambda = 2, decay = 0.5, maxit = 25, repeats = 7 ) fit2 <- modelAR(WWWusage, xreg = 1:length(WWWusage), model = fit1) set.seed(123) fit3 <- nnetar(WWWusage, xreg = 1:length(WWWusage), p = 3, size = 2, lambda = 2, decay = 0.5, maxit = 25, repeats = 7) }) # Check some model parameters expect_true(identical(fit1$p, fit2$p)) expect_true(identical(fit1$lambda, fit2$lambda)) expect_true(identical(fit1$modelargs, fit2$modelargs)) # Check fitted values are all the same expect_true(identical(fitted(fit1), fitted(fit2))) expect_true(identical(fitted(fit1, h = 2), fitted(fit2, h = 2))) # Check residuals all the same expect_true(identical(residuals(fit1), residuals(fit2))) # Check number of neural nets expect_true(identical(length(fit1$model), length(fit2$model))) # Check neural network weights all the same expect_true(identical(fit1$model[[1]]$wts, fit2$model[[1]]$wts)) expect_true(identical(fit1$model[[7]]$wts, fit2$model[[7]]$wts)) ## compare results with 'nnetar' expect_true(identical(fitted(fit1), fitted(fit3))) expect_true(identical(fitted(fit1, h = 3), fitted(fit3, h = 3))) expect_true(identical(residuals(fit1, type = "response"), residuals(fit3, type = "response"))) ## Check subset argument using indices expect_silent({ set.seed(123) airnnet <- modelAR(airmiles, , FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, subset = 11:20, p = 1, size = 1, repeats = 10) set.seed(123) airnnet2 <- nnetar(airmiles, , subset = 11:20, p = 1, size = 1, repeats = 10) }) expect_true(identical(which(!is.na(fitted(airnnet))), 11:20)) expect_true(identical(fitted(airnnet), fitted(airnnet2))) expect_true(identical(forecast(airnnet, h = 5)$mean, forecast(airnnet2, h = 5)$mean)) ## Check subset argument using logical vector expect_silent({ set.seed(123) airnnet <- modelAR(airmiles, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, subset = c(rep(F, 10), rep(T, 10), rep(F, length(airmiles) - 20)), p = 1, size = 1, repeats = 10) set.seed(123) airnnet2 <- nnetar(airmiles, , subset = c(rep(F, 10), rep(T, 10), rep(F, length(airmiles) - 20)), p = 1, size = 1, repeats = 10) }) expect_true(identical(which(!is.na(fitted(airnnet))), 11:20)) expect_true(identical(fitted(airnnet), fitted(airnnet2))) expect_true(identical(forecast(airnnet, h = 5)$mean, forecast(airnnet2, h = 5)$mean)) ## compare prediction intervals with 'nnetar' expect_silent({ set.seed(456) f1 <- forecast(airnnet, h = 5, PI = TRUE, npaths = 100) set.seed(456) f2 <- forecast(airnnet2, h = 5, PI = TRUE, npaths = 100) }) expect_true(identical(f1$upper, f2$upper)) expect_true(identical(f1$lower, f2$lower)) ## Check short and constant data expect_warning(nnetfit <- modelAR(rep(1, 10), FUN = avnnet2, predict.FUN = predict.avnnet2, p = 2, P = 0, size = 1, repeats = 1, lambda = 0.1), "Constant data") expect_true(nnetfit$p == 1) expect_true(is.null(nnetfit$lambda)) expect_true(is.null(nnetfit$scalex)) expect_error(nnetfit <- modelAR(rnorm(2), FUN = avnnet2, predict.FUN = predict.avnnet2, p = 1, P = 0, size = 1, repeats = 1), "Not enough data") expect_silent(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, p = 1, P = 0, size = 1, repeats = 1)) expect_true(nnetfit$p == 1) expect_silent(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, p = 2, P = 0, size = 1, repeats = 1)) expect_true(nnetfit$p == 2) expect_warning(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, p = 3, P = 0, size = 1, repeats = 1), "short series") expect_true(nnetfit$p == 2) expect_warning(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, p = 4, P = 0, size = 1, repeats = 1), "short series") expect_true(nnetfit$p == 2) expect_warning(nnetfit <- modelAR(rnorm(10), FUN = avnnet2, predict.FUN = predict.avnnet2, xreg = rep(1, 10), p = 2, P = 0, size = 1, repeats = 1, lambda = 0.1), "Constant xreg") expect_true(is.null(nnetfit$scalexreg)) expect_warning(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, xreg = matrix(c(1, 2, 3, 1, 1, 1), ncol = 2), p = 1, P = 0, size = 1, repeats = 1, lambda = 0.1), "Constant xreg") expect_true(is.null(nnetfit$scalexreg)) }) } forecast/tests/testthat/test-hfitted.R0000644000176200001440000000252714003673410017615 0ustar liggesusers# A unit test for h-step fits if (require(testthat)) { context("Tests for h-step fits with hfitted") test_that("variance test on h-step fits", { mod1 <- ets(WWWusage, model = "AAN", damped = TRUE) h1 <- fitted(mod1, h = 1) h2 <- fitted(mod1, h = 2) j <- !is.na(h1) & !is.na(h2) expect_lt(var(diff(h1[j])), var(diff(h2[j]))) # hfitted automatic function selection h2_1 <- hfitted(mod1, h = 2) expect_true(identical(h2, h2_1)) mod2 <- Arima(WWWusage, order = c(1, 1, 1)) h1 <- fitted(mod2, h = 1) h2 <- fitted(mod2, h = 2) j <- !is.na(h1) & !is.na(h2) expect_lt(var(diff(h1[j])), var(diff(h2[j]))) mod3 <- arfima(WWWusage) h1 <- fitted(mod3, h = 1) h2 <- fitted(mod3, h = 2) j <- !is.na(h1) & !is.na(h2) expect_lt(var(diff(h1[j])), var(diff(h2[j]))) # mod3 <- tbats(WWWusage) # h1 <- fitted(mod3, h=1) # h2 <- fitted(mod3, h=2) # j <- !is.na(h1) & !is.na(h2) # expect_lt(var(diff(h1[j])), var(diff(h2[j]))) # # mod4 <- bats(WWWusage) # h1 <- fitted(mod4, h=1) # h2 <- fitted(mod4, h=2) # j <- !is.na(h1) & !is.na(h2) # expect_lt(var(diff(h1[j])), var(diff(h2[j]))) mod5 <- nnetar(WWWusage) h1 <- fitted(mod5, h = 1) h2 <- fitted(mod5, h = 2) j <- !is.na(h1) & !is.na(h2) expect_lt(var(diff(h1[j])), var(diff(h2[j]))) }) } forecast/tests/testthat/test-msts.R0000644000176200001440000000045014003673410017145 0ustar liggesusers# A unit test for msts.R if (require(testthat)) { context("Test msts.R") test_that("tests for msts() and print.msts()", { x <- msts(taylor, seasonal.periods = c(48, 336), ts.frequency = 48, start = 2000 + 22 / 52) expect_output(print(x), regexp = "Multi-Seasonal Time Series") }) } forecast/tests/testthat/test-accuracy.R0000644000176200001440000000315014003673410017751 0ustar liggesusers# A unit test for accuracy() function if (require(testthat)) { context("Tests on input") test_that("tests for a non-forecast object", { expect_that(accuracy(USAccDeaths), throws_error()) }) context("Tests on output") test_that("tests for dimension", { train <- window(USAccDeaths, start = c(1973, 1), end = c(1976, 12)) test <- window(USAccDeaths, start = c(1977, 1)) fcasts <- forecast(train, h = 6) expect_that(dim(accuracy(fcasts)), equals(c(1, 7))) expect_that(dim(accuracy(fcasts, test)), equals(c(2, 8))) expect_false( all(dim(accuracy(fcasts, test, test = 1:2)) == dim(accuracy(fcasts, test))) ) expect_that(accuracy(fcasts, test = 1:length(train)), equals(accuracy(fcasts))) }) test_that("tests for accuracy", { # Test arima fitarima <- Arima(USAccDeaths, order = c(0, 1, 1), seasonal = c(0, 1, 1)) accuracyarima <- accuracy(fitarima)[1, "RMSE"] accuracyarimasim <- accuracy(Arima(simulate(fitarima, seed = 123), order = c(0, 1, 0), seasonal = c(0, 0, 1)))[1, "RMSE"] expect_lt(accuracyarima, accuracyarimasim) # Test ets fitets <- ets(AirPassengers, model = "MAM", damped = TRUE) accuracyets <- accuracy(fitets)[1, "RMSE"] accuracyetssim <- accuracy(ets(simulate(fitets, seed = 123), model = "MAM", damped = TRUE))[1, "RMSE"] expect_lt(accuracyets, accuracyetssim) # Test lm month <- factor(rep(1:12, 14)) fitlm <- lm(wineind[1:168] ~ month) accuracylm <- accuracy(fitlm)[1, "RMSE"] accuracylmsim <- accuracy(lm(simulate(fitlm, seed = 123)[, 1] ~ month))[1, "RMSE"] expect_gt(accuracylm, accuracylmsim) }) } forecast/tests/testthat/test-arfima.R0000644000176200001440000000230714053117140017417 0ustar liggesusers# A unit test for arfima.R if (require(testthat)) { arfima1 <- arfima(WWWusage, estim = "mle") arfima2 <- arfima(WWWusage, estim = "ls") arfimabc <- arfima(WWWusage, estim = "mle", lambda = 0.75, biasadj = FALSE) arfimabc2 <- arfima(WWWusage, estim = "mle", lambda = 0.75, biasadj = TRUE) test_that("test accuracy(), fitted(), and residuals().", { expect_true(all(arimaorder(arfima1) == arimaorder(arfima2))) fitarfima <- fitted(arfima1) residarfima <- residuals(arfima2) expect_true(length(fitarfima) == length(residarfima)) expect_true(all(getResponse(arfima1) == WWWusage)) expect_false(identical(arfimabc$fitted, arfimabc2$fitted)) expect_error(accuracy(arfima1), NA) expect_equal(mean(residuals(arfima1)), accuracy(arfima1)[, "ME"]) }) test_that("test forecast.fracdiff()", { expect_true(all(forecast(arfima1, fan = TRUE)$mean == forecast(arfima1, fan = FALSE)$mean)) expect_error(forecast(arfimabc, level = -10)) expect_error(forecast(arfimabc, level = 110)) expect_false(identical(forecast(arfimabc, biasadj = FALSE), forecast(arfimabc, biasadj = TRUE))) expect_output(print(summary(forecast(arfimabc))), regexp = "Forecast method: ARFIMA") }) } forecast/tests/testthat/test-boxcox.R0000644000176200001440000000422614003673410017466 0ustar liggesusers# A unit test for boxcox transformations if (require(testthat)) { context("Tests for BoxCox") test_that("tests for biasadj automatically set based on model fit", { # lm fit <- tslm(USAccDeaths ~ trend, lambda = 0.5, biasadj = TRUE) expect_true(all.equal(forecast(fit), forecast(fit, biasadj = TRUE))) # HoltWintersZZ fit <- ses(USAccDeaths, initial = "simple", lambda = 0.5, biasadj = TRUE) expect_true(all.equal(forecast(fit), forecast(fit, biasadj = TRUE))) # arfima x <- fracdiff::fracdiff.sim(100, ma = -.4, d = .3)$series fit <- arfima(x) expect_true(all.equal(forecast(fit), forecast(fit, biasadj=TRUE))) #arima fit1 <- Arima(USAccDeaths, order = c(0,1,1), seasonal = c(0,1,1), lambda = 0.5, biasadj = TRUE) fit2 <- auto.arima(USAccDeaths, max.p=0, max.d=1, max.q=1, max.P=0, max.D=1, max.Q=1, lambda = 0.5, biasadj = TRUE) expect_true(all.equal(forecast(fit1), forecast(fit1, biasadj=TRUE))) expect_true(all.equal(forecast(fit2), forecast(fit2, biasadj=TRUE))) expect_true(all.equal(forecast(fit1)$mean, forecast(fit2)$mean)) # ets fit <- ets(USAccDeaths, model = "ANA", lambda = 0.5, biasadj = TRUE) expect_true(all.equal(forecast(fit), forecast(fit, biasadj = TRUE))) # bats # fit <- bats(USAccDeaths, use.box.cox = TRUE, biasadj = TRUE) # expect_true(all.equal(forecast(fit), forecast(fit, biasadj=TRUE))) # tbats # fit <- tbats(USAccDeaths, use.box.cox = TRUE, biasadj = TRUE) # expect_true(all.equal(forecast(fit), forecast(fit, biasadj=TRUE))) }) test_that("tests for automatic lambda selection in BoxCox transformation", { lambda_auto <- BoxCox.lambda(USAccDeaths) # lm fit <- tslm(USAccDeaths ~ trend, lambda = "auto", biasadj = TRUE) expect_equal(as.numeric(fit$lambda), lambda_auto, tolerance=1e-5) # ets fit <- ets(USAccDeaths, model = "ANA", lambda = "auto", biasadj = TRUE) expect_equal(as.numeric(fit$lambda), lambda_auto, tolerance=1e-5) # arima fit <- Arima(USAccDeaths, order = c(0,1,1), seasonal = c(0,1,1), lambda = "auto", biasadj = TRUE) expect_equal(as.numeric(fit$lambda), lambda_auto, tolerance=1e-5) }) } forecast/tests/testthat/test-arima.R0000644000176200001440000001172514003673410017257 0ustar liggesusers# A unit test for Arima() function if (require(testthat)) { context("Tests on input") test_that("tests for a non-ts object", { set.seed(123) abc <- rnorm(50, 5, 1) fit <- Arima(abc, order = c(2, 0, 1)) expect_that(fit$arma, equals(c(2, 1, 0, 0, 1, 0, 0))) }) test_that("tests for a ts with the seasonal component", { fit <- Arima(wineind, order = c(1, 1, 1), seasonal = c(0, 1, 1)) expect_that(fit$arma, equals(c(1, 1, 0, 1, 12, 1, 1))) }) test_that("tests for ARIMA errors", { fit <- Arima(wineind, order = c(1, 1, 1), seasonal = c(0, 1, 1)) expect_that(residuals(fit, type = "regression"), equals(wineind)) }) test_that("tests for arimaorder", { for (ar in 1:5) { for (i in 0:1) { for (ma in 1:5) { fitarima <- Arima(lynx, order = c(ar, i, ma), method = "ML", include.constant = TRUE, lambda = 0.5) arextracted <- fitarima$arma[1] iextracted <- fitarima$arma[6] maextracted <- fitarima$arma[2] expect_true(all(arimaorder(fitarima) == c(arextracted, iextracted, maextracted))) expect_true(all(names(arimaorder(fitarima)) == c("p", "d", "q"))) expect_true(arimaorder(fitarima)["p"] == ar) expect_true(arimaorder(fitarima)["d"] == i) expect_true(arimaorder(fitarima)["q"] == ma) } } } # Test ar arMod <- ar(lynx, order.max = 2) expect_true(arimaorder(arMod)["p"] == 2) expect_true(arimaorder(arMod)["d"] == 0) expect_true(arimaorder(arMod)["q"] == 0) expect_true(all(names(arimaorder(arMod)) == c("p", "d", "q"))) # Test SARIMA sarimaMod <- Arima(wineind, order = c(1, 1, 2), seasonal=c(0, 1,1)) expect_true(all(names(arimaorder(sarimaMod)) == c("p", "d", "q", "P", "D", "Q", "Frequency"))) expect_true(arimaorder(sarimaMod)["p"] == 1) expect_true(arimaorder(sarimaMod)["d"] == 1) expect_true(arimaorder(sarimaMod)["q"] == 2) expect_true(arimaorder(sarimaMod)["P"] == 0) expect_true(arimaorder(sarimaMod)["D"] == 1) expect_true(arimaorder(sarimaMod)["Q"] == 1) expect_true(arimaorder(sarimaMod)["Frequency"] == frequency(wineind)) # Test fracdiff set.seed(4) fracdiffMod <- fracdiff::fracdiff(lynx, nar = 2) expect_true(all(names(arimaorder(fracdiffMod)) == c("p", "d", "q"))) expect_true(arimaorder(fracdiffMod)["p"] == 2) expect_true(arimaorder(fracdiffMod)["d"] >= 0) expect_true(arimaorder(fracdiffMod)["d"] <= 1) expect_true(arimaorder(fracdiffMod)["p"] == 2) }) test_that("tests for forecast.Arima", { fit1 <- Arima(wineind, order = c(1, 1, 2), seasonal = c(0, 1, 1), method = "CSS") expect_warning(forecast.Arima(fit1, xreg = 1:10), "xreg not required") expect_warning(forecast.Arima(fit1, include.drift = TRUE)) expect_true(all.equal(forecast.Arima(fit1, bootstrap = TRUE, npaths = 100)$ mean, forecast.Arima(fit1)$mean)) fit2 <- Arima(wineind, order = c(1, 0, 1), seasonal = c(0, 0, 0), include.drift = TRUE) expect_warning(Arima(wineind, order = c(1, 2, 1), include.drift = TRUE)) expect_true("drift" %in% names(coef(fit2))) expect_true(length(forecast.Arima(fit2)$mean) == 2 * frequency(wineind)) fit3 <- Arima(wineind, order = c(1, 1, 2), seasonal = c(0, 1, 1), include.mean = FALSE) expect_false("intercept" %in% names(coef(fit3))) expect_true(frequency(forecast.Arima(fit3)$mean) == frequency(wineind)) fit4 <- Arima(wineind, order = c(1, 1, 2), seasonal = c(0, 1, 1), xreg = rnorm(length(wineind))) expect_error(forecast.Arima(fit4)) expect_error(forecast.Arima(fit4, xreg = matrix(rnorm(40), ncol = 2))) forecast.Arima(fit4, xreg = rnorm(20))$mean %>% length %>% expect_equal(20) fit5 <- Arima(wineind[1:150], order = c(1, 1, 2), seasonal = c(0, 1, 1), method = "ML") expect_true(accuracy(fit5)[1, "MAPE"] < accuracy(Arima(wineind, model = fit5))[1, "MAPE"]) fit6 <- Arima(wineind, order = c(1, 1, 2), seasonal = c(0, 1, 1), method = "CSS", lambda = 5) expect_false(identical(fit1$coef, fit6$coef)) }) test_that("tests for search.arima", { set.seed(444) arimasim <- arima.sim(n = 300, model = list(ar = runif(8, -.1, 0.1), ma = runif(8, -0.1, 0.1), sd = 0.1)) expect_true(AIC(auto.arima(arimasim)) >= AIC(auto.arima(arimasim, stepwise = FALSE))) }) test_that("tests for forecast.ar()", { fitar <- ar(taylor) arfc <- forecast.ar(fitar)$mean expect_true(all(arfc == forecast.ar(fitar, bootstrap = TRUE, npaths = 100)$mean)) expect_true(all(arfc == forecast.ar(fitar, fan = TRUE)$mean)) expect_error(forecast.ar(fitar, level = -10)) expect_error(forecast.ar(fitar, level = 110)) expect_true(all(arfc + 1 == forecast.ar(fitar, lambda = 1)$mean)) arfcbc <- forecast.ar(fitar, lambda = 2) arfcabc <- forecast.ar(fitar, lambda = 2, biasadj = TRUE) expect_false(identical(arfcbc$mean, arfcabc$mean)) }) test_that("tests for as.character.Arima()", { expect_match(as.character(auto.arima(woolyrnq)), regexp = "ARIMA") }) } forecast/tests/testthat/test-thetaf.R0000644000176200001440000000075714003673410017444 0ustar liggesusers# A unit test for thetaf.R if (require(testthat)) { test_that("test thetaf()", { thetafc <- thetaf(WWWusage)$mean expect_true(all(thetafc == thetaf(WWWusage, fan = TRUE)$mean)) expect_error(thetaf(WWWusage, level = -10)) expect_error(thetaf(WWWusage, level = 110)) # Constant series should not error series <- ts(rep(950, 20), frequency = 4) constantForecast <- expect_error(thetaf(series), NA) expect_true(is.constant(round(constantForecast$mean, 12))) }) } forecast/tests/testthat/test-clean.R0000644000176200001440000000256614053104600017246 0ustar liggesusers# A unit test for na.interp() and tsclean() if (require(testthat)) { test_that("tests for na.interp", { # Test nonseasonal interpolation expect_true(all(na.interp(c(1, 2, 3, NA, 5, 6, 7)) == 1:7)) # Test for identical on series without NAs expect_true(all(na.interp(wineind) == wineind)) # Test seasonal interpolation testseries <- ts(rep(1:7, 5), frequency = 7) testseries[c(1, 3, 11, 17)] <- NA expect_true(sum(abs(na.interp(testseries) - rep(1:7, 5))) < 1e-14) # Test length of output expect_true(length(testseries) == length(na.interp(testseries))) }) test_that("tests for tsclean", { # Test for no NAs expect_false(any(is.na(tsclean(gold)))) # Test for removing outliers in seasonal series testseries <- ts(rep(1:7, 5), frequency = 7) testseries[c(2, 4, 14)] <- 0 expect_true(sum(abs(tsclean(testseries) - rep(1:7, 5))) < 1e-14) # Test for NAs left with replace.missing = FALSE argument testseries[c(2, 4, 14)] <- NA expect_true(any(is.na(tsclean(testseries, replace.missing = FALSE)))) # Test for outliers in a series expect_equal(sum(abs(wineind - tsclean(wineind)) > 1e-6), 1) # Test for identical on series without NAs or outliers expect_true(identical(USAccDeaths, tsclean(USAccDeaths))) # Test length of output expect_true(length(tsclean(testseries)) == length(testseries)) }) } forecast/tests/testthat/Rplots.pdf0000644000176200001440000064137514133712356017065 0ustar liggesusers%PDF-1.4 %ρ\r 1 0 obj << /CreationDate (D:20211020150141) /ModDate (D:20211020150141) /Title (R Graphics Output) /Producer (R 4.1.1) /Creator (R) >> endobj 2 0 obj << /Type /Catalog /Pages 3 0 R >> endobj 7 0 obj << /Type /Page /Parent 3 0 R /Contents 8 0 R /Resources 4 0 R >> endobj 8 0 obj << /Length 615 /Filter /FlateDecode >> stream xU;o0+n\yQh m:mm=GQr]B>}"E-JZiՏm„Frյ>ն_N΢t&OmN_NVwNڴVΡ}" 2dF)9_Z;ju[1 Wr@[3p5=jњ3]44Gֆd^{T)4}KE@nBjORa+Ð]f~٬zsxyW0U"++ IK:z>^ȼKon"F$USPѕ-6vۿ֗뺺/eY#]G](endstream endobj 9 0 obj << /Type /Page /Parent 3 0 R /Contents 10 0 R /Resources 4 0 R >> endobj 10 0 obj << /Length 10109 /Filter /FlateDecode >> stream xϯ,Q襽+#XX,,c[ =y✈za wvꪨ2zOݫXQ>f~߿~cx +?ӟ}~ow7'uѯux)yz?7W{~o_W_g eo//_|{U?ٯ~ӓ~^k{J-c}b#>\:8_O?O~鲿7pFy՗ O{j}o<_/y_ebk8ptG::~'>yr~Yǚckmڧ6?t<}xzp'7%g]76Gx}7qGX6xR٥=ȿ{,`i Elf8أǛ?p~M^ V.|7qt ;umlxEln'?pƎ/ecq4 ߑuC>sM'J~}qz6boI k5\8M씩Ob'viP q .#xnob_Jq~jx <;u \>΃؈]NGD;xo;BՉ xm l؈&N`پQy 0e8X-L]z*D2GX񊝄jv#ZڸΫ}񽁝bc%ڥqxƾR κhor.#e/UpK_Fzt܊2s+HX/Mw_廌"u[厯&M\۞;Mq`IRȷ1f<egƛho׏BN\"ll,p.T}J`)(%0l]ɳ+VNJJnd}'NkW7YZIywNΨ?yW˹ogz^s#x-hges%"d^uvUȼPv/2.RX rTuO1_~[lxo1_1zCaƣY= ߭mfٌמE!O1f([Pȕ.|!nd}i00_V6K\ qO?/ ݙ33`]\ŷhOxDa|;'}F.X_7xr-b#0߭xyQU<<^\Ss|?ܯ6+}VY4V_x=b}rVO}^5|T7#X~Lxo^#Yt/}gsyt|t֗9>YytﯛJ߾X^Ju{Yꑏƛ;͋3.#lndփqqQ||khzWk͏hG>ft#?nnd͓t#}k>|4Q7ϛWI8V}Κ|kMͣ滦|s>ͽ5O_5-Ɩ?ڇg~&cK't[_kV|~6+k>C15ao6'}~fc?I_ƑzkgH[G8MǑ:415OǙh=4Gq3[W?RbY?/Ё\:X;i\Bާi}:30.}7`Oٍ,>ײ]!pO snn4!0t> |ǤE$#y8pte(%Yt4cq@^./n\We L FtvKf[^<7趁UtmD !6 ңMLn$=VUmD۽n@m۶_^pmmmtܶݹmۍm}mmmkm/w@Os@+t۸cۥA mnmߒٸ ]Z6"6GQF-5 ٕXStK O E!mm@ F)ݶ 6}6{m,X^<涁mcImnƝN7 )ݶ bmqo$NwoHn}r)}doQsrmH>ŋe_ꮻ9cYm}٫۶)Dr`_kڔw鶯w]$_mmcʴg m[r`8ݛƔ7m0t`΅mk6s5mRq'xaQͱ6X6x4$˥[bN _m틶)swJnۦ|}s`Mmm=mpIn^ݶMgM˹3Ϛ*;wrs^djs#_m_|v\mkq, z`SŘ.6XܶLxZmv趍۶ _ϚL`_r6- y%  6vw&䶭{sƗ䶍۶W7j趍}rv"<[ǭgwImӅZep607woqrY.ܶ1ݱM"/1jtz\v#bz#mKns>ǟn|?t>/A}s~mnt>79A}s}mn\t>7O}>nSO}>n7SO}>nWSO}>nwSn?nnܿnn?nnܿnnn?n_nnnnnnn?n_nnnnnnn?n_nn̟n̿nnnn?n_nn̟n̿nnOnOnOn/On;rێvV䶣"hmG_Gn;>rێnlSێvt䶣a%,hwmG7Ln;erKێVvt䶣Q'}غeYw@֐o1 nş>?o"rc|sw;T_o/;\_L^N=@aKE?6:g,8˨?>G{kϢ} -kCDEj&Zo{FӢE>o"d0e17O4H#^U__w;_狴XVqJ71߈FJyj,gk}={&,j;?)kDtZ[XF9nFp7Bߔ8Α#X(3Glaq ,Tα#9厀s-N#p:Es-N#XGlaw,$<%BBb E[87BH9 sm HY$ ƃFq$Gb#R mH$"EƍIb=R(mI$$ŒƔKb0mI$&EƜMb{R8mJ$(œƥObR@mJ %+EƱQbRH6ed)sJj폠pN*rV 8sZ 8Ǖ=8y%XΉ%Gd 8gshI#cK9Kj{?K9Kj%^%G~ 8s 8GV&b)&G 8瘀sImdQ&ea&G 8Ǚ#M9Ҥ4PpN5~Ěs 8sIG 8gsI#M9ߤ7|p7|#M9ߤ7|p7?M9M9ߤ7|p7}>M9Mj_|p7|p7~䛀s 8~䛀s 8q=M9M9ߤZ7|p7>M9Mh|p7|p7d0盀s 81&o&uG 8盀s 8Ա&oc&o&u&M*|p7|p7M9M*&)M9ߤ#M9M7i&}=M'qsI{GI̍&oo&oo&o6盀s 84N>x䛀s 8直=&o&&o&o1M9M7io&oҮo&o&*|p7|vG 8盀sID9o9ߤ]o&o㷆5)M9ߤ]o(c| &Z|Ɖp_9Mu?M9MZy?M9Mڞ|p7|p7i O5}͡t_>,O5K>qk&ݧJ}tk6ݧN} ukFݧR%Gt_>״O5&V>=ʧǚZXs+krcͮ|zO5&X>= ˧ǚbXs,kcͲ|ziO5&ZX3-kc͵|zɖO5%"^o )鱦\>=֜˧ǚtX.kcͻ|zO5鱦^>=˧ǚ|X/kcͿ|z O=鱧@>=ȧǞس {cσ|z쉐O=鱧B>=\ȧǞ س!{:cχ|z O=#鱧D>=ȧǞس"{Zcϋ|z쉑O=3鱧F>=ȧǞس#{zcϏ|z O=C鱧H>=ɧǞ$س${cϓ|z쉒O=S鱧J>=\ɧǞ,س%{cϗ|z O=c鱧L>=ɧǞ4س&{cϛ|z쉓O=s鱧N>=ɧǞ<س'{cϟ|z O=鱧P>=ʧǞDس({cϣ|z쉔O=鱧R>=\ʧǞLس){:cϧ|z O=鱧T>=ʧǞTس*{Zcϫ|z쉕O=鱧V>=ʧǞ\س+{zcϯ|z O=鱧X>=˧Ǟdس,{cϳ|z쉖O=鱧Z>=\˧Ǟlس-{cϷ|z O=鱧\>=˧Ǟtس.{cϻ|z쉗O=鱧^>=˧Ǟ|س/{cϿ|zdȧGf|zd ȧG|zdȧGf|zdȧG|zd"ȧGf|zd*ȧG|zd2ȧGf|zd:ȧG|zdBȧGf|zdJȧG|zdRȧGf|zdZȧG|zdbȧGf|zdjȧG|zdrȧGf|zdzȧG|zdȧGf|zdȧG|zdȧGf|s|zT#SE>9SE>=q{dȧ*"~/}9#SE>||dȧ㞏Ls~ѧ^Ls#SE>TOG>=/OW>=oӣ>ȧGO"G>=|z/oQӣ>ʧGO*W>=|zoQ ȧG>=OqO|z(OʧW>=qO|zc| _ȧC>='1~O|zc|$'_ɧK>=g1~O|zc|(G_ʧS>=Ƨ1~O|zc|,g_˧[>=1~O|zc~ _ȧC>='1O|zc~$'_ɧK>=g1O|zc~(G_ʧS>=1O|zc~,g_˧[>=1O|zӣ? /ȧGA>=ѿO|z?ӣ?"ȧGE>=3ѿO|zӣ?$#/ɧGI>=SѿO|zӣ?&3ɧGM>=sѿO|zӣ?(C/ʧGQ>=ѿO|z?ӣ?*SʧGU>=ѿO|zӣ?,c/˧GY>=ѿO|zӣ?.^/_Lr|TO|zӣ/~@>=|z'_ȧߐO!~D>=|z/gȧߑO#~H>=tK?Ow?>t[Ow?>Tq}9ܧs}Aݧ_t}I/ݧt}QOݧ_u}Yoݧu}aݧ_v}iݧv}/}WO["S"3U=sY"ܾ=gDCbqovsI'$~z.sFOm=}ofX#?+׃ro8\Q,=WU>?w>޷e|ONɿendstream endobj 11 0 obj << /Type /Page /Parent 3 0 R /Contents 12 0 R /Resources 4 0 R >> endobj 12 0 obj << /Length 6057 /Filter /FlateDecode >> stream xͯ%GﯸK{їʏ\bHX  FЈ'Ή|-= }}Uy~]_?__~cϫ?F{h<=~/|g_=r࿨k9=|㗏~<Q~x_U~~fGQ/~_>3Wcz??g%LρdzqߕJW*log]]ɽHW^???ퟱl.؟|Oٿ|Ζ?o|ة<&eGvrG.iF9UNiz l&uAZ z^ry>|_;\su׶ts Ӌo+l6?hOk 9XًeSmbC[mxvkvtzbZiHw5[|v%mS,}e[+~N9] 㷸aݟoK־uH^mh[JOoX~=aw/&:ںMCߺ>Vk,H,?+62tElMf־ s9 l>_uCm W`6vU͏-d|~%JNL#m5"}?ޟv>~KĜ?,làmTͧiˆ7qJz.i1NӶNϰf}5ϣmOacacH}n`[C_-<M&z|0 ?- Ti;zx:O4ϗ\ 6.Ưdr=D fFk][G&>-U^z5ix[z-5<\_'FӶ Bs/ωcJ#xvN)AkӶqΧs>~;%z,-y{QsOLd4'كtQ`5iYg_cHAf|h[ZEz=ၻ[~Sk)ڿt~.h;8~:Gqhj|vpfv0ߴpPkv0l!`WiT)õ߽4|j4ϫfqfz'5DݑfCחjc]ϡ!=k6rqz,םw$:h6fWI9hj=i'&W퇊v]7}y^?j׫kE|nKzt׾>7]ma <\Ww"pBwB>xpneGa&9t \&ꉻW au@}BOj/h$B!=c8'38K~O~jBWg]VV\s6Ms~p_jxZq/?nWY]_'gz,sHO߅|;jP坍vwZ=t5םb@s9F>;ǿXUwDꡎ0 =&:]>4iCg_d=_A/uzbܯVY:0s}J?_ϡ4i+:ؿ!V_!zF>@x^j꟣գcѦ]??9+Cn3fHm~{FZ=t][=>Vm[?ӣnkP/[=TazmX=Ƕ*׶ޟ/zm^e |X=Tv~L/k;V|ZsNOE>V=>LVŗ }CgCm[[oѿ| F3g7wb hw}\#s/ay>W|y>WW^ma!G |;4y0V7~!y/`|gqiV@F#yܟ`4" zlfy`D" F(Yл?c< o@#,>~'0J_F>)y30N@8SG0R7#~{OßzmTo:o *`? =5hXղw߃ =?7ZzhmFkI7Z_Ѻ0Z᷇?Funn*9W",^ =[qCcc/4&~sX?v]96}r]<6}F>7yqm~s 8ۃo 0|~s"? 3o7A`=7@ =y#K?y"7n~D| ~{/1$N?/k!yao~C= ۃP| ^m|($Bzl|^ϛ`/"gbo~v]\vra#r@yi|Muo^'g&Ж  50?m0 72UJ=VB&'ZLbto ln7no;+ ;at"p pω9K,0:{bKbXAh޶_VЖ'F f"F F=)ގA_btЙsNsؾctОcEz1:hҵatOct)bbB50/bj@ֆAާ@f`sKks#:Əl}c蠃cNmFbObtH71:%1 1:atNb%bt?A蠫=yEEX؊twsМ?bt#F=&#FwL4I9F&#F>Mh811:EOat 1ctoI[al]| i1:0ct)=~;FzHLH^F/a c,bplۊnѱ-uѡ^}H|%dFcatatat+?bth~ݏOxHc|mIanFGѡctkj1:”70cth?bt>1:421:*1:4+1zIlKMlLN0cio0:OiihnZ1:46@4ƯUCk Hѡψѡw4Oͯ)1:{&fvCiX1zɯ5{0}MZw|J2'F~#FG|B@eMh8#cthek1"W}N)n: 6= %wmxCM 񆝯K|]"$K;B|逊)@jE Hn˶N12݄Wls|r˗hmIU;دWy@${EP8 FTҥVdmMi""EB|xW<>Ǘ F8K: M^A-,]ס!P'ǡgq癒\ȅgWi\gn- [lwP: Sd$S AjZ;7` |Ƒ$AH#7{ ܍006==Gq?As|PHZ`#(S) 1o{ nHlݐC{bݐ|%ιw9x%W_UrWDgw|;gx;t;gp;l;Wh;׈d;W`;ח\;WX;vBWdڹ눴sOh%L8;iv^e)#3H's_r ]M o+8Zƿ~Sx<ک̦ k/_5;lpP,!RoP?q6 w'>#]fzg~~qKD9sNȱAKeh<+%~ճkKzv7H@endstream endobj 13 0 obj << /Type /Page /Parent 3 0 R /Contents 14 0 R /Resources 4 0 R >> endobj 14 0 obj << /Length 6059 /Filter /FlateDecode >> stream xK%q+\U*3`"`"BЂ)ADiDTF svU#"w9<~|7~뗏r>߾y?o/ޛU'w]Okz{Oߞyy=jk{~κY/;d۟o_q>w~)_/?mۼO>::jlKޟy]sV~<']i;t5v}6޵~پ[Ĉ~|5zcs.>{&\Y_ؘ|s^ýpY'N1/Vo=s6c>`vrO0ߝ[AL^내*|tZwLmVAC+9Ol~J-4ӧ(rZ>NɵĆ ~Ěj+֑V8 /j۷7KڦY$c6ڷW=͇sJ/O4(oq~ ?ߖc?};6ж֟ް{?}_XW0m[naY?hjh:1жNל/[hlzZCG-sL/kx'R ׳  z Q51~ #z)mv [XH蹤8|LF:?ÚCoz,p>`A-)t/lfiAۋkb" ?1~h\o㘮?L[G?;(r`-C*4?@s?J/p  :\?H3in^1ZjHGu '4_3r3]*mّ|=X>GqR]a xh?=~M3?J5ISȠy^ 5/7Ӹ8f>'4zMם/ MT O{|\{|{u=/׍_c|#A{<1pDMz A=Wx6pqH@/~>1qj?Tlu_UOܽ^hU{qG&Ot Y\7y>ٟ_p{"Uzj?COz?Vh+rkPC:?Mu~4aPigu}?㟌ꡲ\!=b9hCw6oݵj\wꊍ(C b=W:F_:n/ 4kDt| -CV!A~|ؾꡋsZ=d)=sh~~y>^b&_L#Csh?[^~q.>~|E!sS۷zzz?VEGw=Sp@Ծ_ _Vպ"H^kumoPXsmLCloP1cPz\{NZmzmO/aP]1vY=iѷ;=[=T0;~X=_/LkOV}_Vն7ӫnouV=~^sb>yg=~}VzfP)w~[hVuKϔ}O `W~s#Y=t;?6ږ?=+¸CmF޿"~m(۝aty>W}Wfй0|?=>i3~QVƍ~?!#iS+E&/mT5ߠV5Y5!sՏ0έQ\f\F^zmQϣ>ч|3>1F yQ[߬B>4qqX4yO`$"Y=T~ƫ#qш|+ݿCq?|7h߹1y?Ch?¨D>%JqO[02~ |_v?vcg~?0:~;uٯl#<g?@Q/`"|RG`(E><8-㗿~{?0N᷇?FHT?Ao W0R᷇/6 *נ᷇c~ss0VAoF+h?Fun_t'h-FoBchퟺѺ_x1o#E7? o~{ӘX_c!oC.r8v_ov؈˿Znl| ~0pp_h'tp_`opl~{O0+c/lۃx l7|1C͏<Џ/ybn䉤|OU-$:ǐ81gHۃynD7>A4|*dBo~B ~{E/Į?z!Izۃ_| zOQΖϓz>oqW\7Q_wqq͇w~ܛo&Agh 5kc׽yh՛|C[>O0/l7b;4 `/TW*aX atxha2V~u74#ctww70wNw7|~=' F/ p-Kb1btF=N^x~ ZA[4o{;F}aAg ;aA{ yI׆ѝ>AA ݛN~-[FMx}-΍?  ;=Ҷq@=A#蠗$džA;ACKhl aYbtZb+btCsA2cth'1:t,6QtG`s?1چ5ѽ&mPx ݤ̇at 50:a}@-I^Z bt,#1!1z S-'mC}btl+/b_!FǶ$%Fzc!=𕬓a^ݏqLѡqѡ5bt?>1:4!1:ath'1bwaC;Fv C}Sctc܎ѡ=,zOܯ%-1:411:4;1:KbdbtbtatcH#?1j_athap1:VAͯ5# ;Ff<#F1:(?1:4D^kH˃si1:41ubi%@e7bth1:4)1: Wbthe 1:415ώ#0ڎѡi;C .\A;cC0L6ލ5;>_ =77xv.u/*nn_# 6_9% -:8̣v2_AIWQM ;._Fy&Wb+_ʗJrAH4arRm;KZArIZ6IED *ZnU^_ڦ+`8, gg&$7y)Xt_@.CCTT:ǝgJZs"u"_ISs?Ϸ4|&,wn C(|N=pocN}σ|Ejd#kInB2G|q"SOJ2p7;DHLoA!G= @qo"i}N 7ƼM6#oH+)CT{7Ȼ!wCn{9XTuCͧƛrj[:۳S\ T^qgIsD9^ +Ig\\#\A\_r\}bڹss_i#=I;@;3yvv< yRxsD(; IvP:y~ɱtc'@q ; |K3"`g"hF|:#!uI댢DcI3\G|-l]:"u1B֑SD#XG>l%\L:2`uAȒBՑCE# TG,L[z%4vHy_:!DzC|:4tT2QMG$45tTPQ_KG%,tTnQ׉IG'$5tTQOGG)htTQNJEG+5Ht"YBDg-%Ht"Y‹Dg/W"DB$:"y+Ht^aD#W "DJ$:X"y+Ht^D'W@"DR$:"y+Ht^aE+W`"DZ$:"y+Ht^E/HtZ"i!D HtZ"iaD!HtZ$"iD"HtZ4"iD#HtZD"i!D$HtZT"iaD%HtZd"iD&HtZt"iD'HtZ"i!D(HtZ"iaD)HtZ"iD*HtZ"iD+HtZ"i!D,HtZ"iaD-HtZ"iD.HtZ"iD/@$:Ht"D "щ DQD'N!D$:Ht"D,"щ`DшD'N#H$:Ht"$DL"щDQD'N%L$:Ht"4Dl"щDщD'N'P$:Ht"DD"щ EQD'N)T$:Ht"TD"щ`D 7HtvAx$:$ۗwot_7t~7='|ȿh/O?Yd_'S{LmO|FoTj;>WO~~/D݃mv,|jKѳ޽Em꩞2H|endstream endobj 15 0 obj << /Type /Page /Parent 3 0 R /Contents 16 0 R /Resources 4 0 R >> endobj 16 0 obj << /Length 5915 /Filter /FlateDecode >> stream xK$GW^LS% @X {$KXwla#!=q✈;2hX0|GDMy|(oxy</xmz˿|7?=n{UzVd~ۯ;Lg_ |ڿ_/x<}R߼㺞?>>ηfܰ/7}{+ۏ,i=^ooe_ǬvqlL>\9>e^s8[aGuSwyL˲[:윍+䘏j]XvrڿO´~wJg]9g3y= Z&j92ݟG|^k^|[/<]g)-@L~\[i8]^L/'׶֟jFjC7dz[k֪H[GZL/ٚoۆ߰#/if@.k\6v)?xl t6k-~Xv!=l>zEm)?ax~oXvtO4z6/Lt}~|-\aRU5?"cidzX,ubm#9_٤-i m3BHЋ_6<0 0F|-}A/loa!璞6D1m k׋ 9\њ=o/j&?{p.c0m954r i;ЌO:wK+Hõ2}Pue{''t.3h w^|AVvo"Cи->΅mb|]﫞{UP9:\w_/=MB(tҳn>?p}?㿴LZDV/z~^ݵjhj5km4WꡆLuj?~4aPigu}?㟌ꡲ\!=b9hCw6oݵj\wꊍ(C b=W:F_:n/ 4kDt| -CV!AO@{^lj_Duzal94?@x^j꟣գcѦ]??9+Cn3fHm~{FZ=t][=>Vm[?ӣnkP/[=TazmX=Ƕ*׶ޟ/zm^e |X=Tv~L/k;V|ZsNOE>V=>LVŗ }CgCm[[oѿ| F3g7wb iw}\#s/ay>W|y>WW^ma!G |;4y0V7~!y/`|gqiV@F#yܟ`4" zlfy`D" F(Yл?c< o@#,>~'0J_F>)y30N@8SG0R7#~{OßzmTo:o *`? =5hXղw߃ =?7ZzhmFkI7Z_Ѻ0Z᷇?Funn*9W|/-]t~{811s}b?Co 9k,DmhBB.vqcc.vQc#A|Ƹ ~6 9n l7@o>9 |zl ~{7 A |q%O ͟B~{KۃozZo =6>yS/Wb0n~17]..;Y0Ώ{xDY H m<4~pm7/mwzoh [vFa6ћjaP%+!n-^&1:ʃ6zVv]0:ibbtO%A=%1zI,Ft`щ oۯA+hv3 bo/1:̹at9l1:hϱat"=40'1:11z{1OekiYSo35йXGuatоatatS6h#'1:h$כ0:h'1:hh atؼ",KZKlE;cth1:hp;]v$F^nE& 4l'Fm_C ݛiՍMj|F0_cv-ݧqJѡE F21Kr&1:4'FǶ"F/btlKb]bth1:q?:ldnnNJ?!FcC3_F|{ >9~ۯ6cth=a0:w09F0Fl !âOMLDJ^CCs#$F&FG'FF>64{@Fw͟ #gY1:Ҡ0cth3bt#CgJ%F@<>螦Cs{Z'v^g ^LiS~ FC3y%Ffѡ߈Q0Cs?{Y8/31:t0Z kD_S<6:dnC@x[#5s}ka* :_)Ҏ8_:&3bu*cQ۲S5ޔOt9.ܞJJ~;L#"olo\nLn)vFBdU[;uF3u 3N]g%Kr:⳸uDoa#/YG"bG:xud+eՑ#UG*RV:8udgaݢ+CbQQG BuQON:Gl: 騑DJ\:/aDrNL:>! EbzR<:MEER:V,:\訁EDZ$:Kl,ED^$:K|DA$:"y+Ht^QD #WDI$:P"y+Ht^D 'W<DQ$:"y+Ht^QE +W\DY$:"y+Ht^E /W|D"N A$:- DӢN C$:-@D"N E$:-`DӢN G$:-贀D"N I$:-&贠DӢN K$:-.D"N M$:-6DӢN O$:->E"N Q$:-F EӢN S$:-N@E"N U$:-V`EӢN W$:-^贀E"N Y$:-f贠EӢN [$:-nE"N ]$:-vEӢN _$:-~D"щD!D'bN!B$:Ht"D "щHDD'bN#F$:Ht"D@"щD!D'bN%J$:Ht".D`"щDD'bN'N$:Ht">D"щE!D'bN)R$:Ht"ND"щHED'bN$:mAD t8Ht ?&_~*GХ! endstream endobj 17 0 obj << /Type /Page /Parent 3 0 R /Contents 18 0 R /Resources 4 0 R >> endobj 18 0 obj << /Length 1306 /Filter /FlateDecode >> stream xn\7 Zڋ"N/@H=@A6I\4LE}R:#LE2DPΐ{ȽsvW85¾ޛW!lN)DU_꫻rOw^pO_Kr^tّ{vdsqQf''%C99[|$۴jRꔽ[?o䤱g/#s+͇M55 nS?ǁtH%nErY˔}-UC`b מK =Oձ~>?༶~f-l!nŸY]F"Y< | !8#z42BS l۴A?|>Tcce-:&}RlY$>h=A$/MWO??)tX߂ɞ_~O1XXh|$)D&O"L.VGɞ1>G4dO&&p3^w.BæIꮷj=>*qxhv!]mڇ\츊ػ\ 崏6~T'*x;4OD?Lt ]tvm Pizx:?Wp/crEx\& 5 qq{u%:]ܼ|tɉ#] L 0.B:j:h0!6ti.l t&.6 tFYϷXtE!Lt6Lt5+\Ix0 C" EioK<ɥY\rasbI.䕅\?ȯ$#$H9p$b,Km0s"m<ɥO)I.R/vuD.j\忑 $US&4RrmaZVA.j5'r/I~{Oc0r 8cܩմڥPr]ѥ'{ރ֐ 4z .{Od)a\ JYX)"[Ffr(C.~O.^}-RN:ˊJɺ~:e0%։֬`P4;3#%8B. psַ3Jm@rexs/&j曛׷n{_|_%绿U2zendstream endobj 19 0 obj << /Type /Page /Parent 3 0 R /Contents 20 0 R /Resources 4 0 R >> endobj 20 0 obj << /Length 4803 /Filter /FlateDecode >> stream x]q_o6iZ '"6$+}ڷo,G0{s8ε|_?--e?#/ޖ\oˏ|h[mۺ9F}IFzf˝NIwW=|DΧ48z  dX>υII9k-R=Я{;e=7Bއ}=i=8֝z?z?Gφׄ68EZ,G%C6lE|I?c3#oO36nӱx?wA:7Dбndf "^2?:V7S!15;1^NY #+p]\lSYO?gϾ!kF}{0k/\"/WJ5*r-?KuB4#~O$΢lw?A={/mYHh@>}$&򉞏=?kҟf7GbYcs#E1˱8^$ǏCF#7/C?kd?[j39_p|Va([C T^Q%qaBK|oĂxc/y^? |TBHK?G|')o3?G4CaWIުe|?c8vy6c?3ȷ]vWmD͉ez!jq>)Q"@%˜A)[>W% * qp|:+Wol&o#Χ}p ?ַf 0:C?^J!g|c#~ϓ@M_ugGuDe8~ uWQ{G.F/20k k{*poPTm5CE!ʈ؝ls"7cGe<ϴ2oq]]lyƸm K..˷wZ1,!P_O!QC1˫G~#{kUˋq@EaP%}zWqKW~ۯ1y9%^3/ mȱ{c~0>f_ch>q s? _VcG GSLZE1xsTD*#L>x=1Y aENK#.4>b|dUL*,>sz6 1>##7|1iGTq,/8%\$>7|DUw [5-py> |GP g|Q0R7GT<>>"XclQ1UGڜ 7qȿGT/|DUr{coU>bc,\XbY5#L5>2eJ? y>* rU>n2>MAc%Y65G\0>j~0>.|#Gxk| ;瓠@ޅce|->[1NT!QKU牏g1څׄ:pXXz KU~2>O#>_U51!Sǘ'|,F+?/ |lMof|D| 1~0>b>ܯG<xd|=1~Gc8M#lsc'UcI>_+| cVkpQqclS[7t'|W}YH|~#3?a19>F{*|iac/\2x(07|4?# / 1Sv7 |#%?o:>f| eX㎏8on#|-|>xG/7>fco|aG;=y↏e|0>|C#'$>">pS_6_G?G| }o|1#$>R/7>ߌ8x~>7>fGm'oyG򈡁HϴlQ*ԫ] Çn֮_pE'埁I~~C,~w;Pev3/ } vQ.v2Uf7ʉp8aWd"InJ2U_"ݪ٭>l[٭NU:Vq[nQ)nHYz^uuevn]nu٭S`7,n+UЮYs>nXS].6QݠGa>Kr5ݸ#vy/]mnyf7n|^t*v|Tۭq>f7ݸc/n|znyՊ U؍vd7lb7#3+mf7n@$l(v0(Qu$vŎb0XD#}nn1ܮd7 Seran;f7nu(vlfۑ`w 2-is; 2e7vH/vC/v˛k'U%vob;?ٍ˪>֮9jMf7~7e|{U%6#!ڍskWE[WSb7lƯٍۊ-Ap$6cO;nnHb [j_f7 AcvC*kW_b|[:mNv~-u^b՗ k7v^؍^b7`jW`_G?nO=nHr&nMCr7[F)<Fqd{|n )>ĸr UN)\L9+u=xD9(p.C'p.SYr 7KN!JNF)(9 %ǁo C9r7F!|@> r7D!C`! 9!p^b8 pnzESq 1>q 7m3;xw݋՝6(x|wc/~7߼^>>~?n 2endstream endobj 21 0 obj << /Type /Page /Parent 3 0 R /Contents 22 0 R /Resources 4 0 R >> endobj 22 0 obj << /Length 908 /Filter /FlateDecode >> stream xVMO1 ϯL2ǂJ%VRC"uNb B=ǛyI-< q26@&.3q?~NV5Z߫o4Ex..>+:`&3E3xe}6nx>ygIST<4V0 N4jV )Eٛ)ӼBͬ ^+#2YQ)OvyOZXA)[BmvP)*8 R0Fʲ BpyG`S'*A#/'V0FL]W4[FcBGx'Ƃ˒2Yׅ 8XkFN "slV~wKޙܠC{)3f=6^`罍& T8O ;/+e}v 볻vֿGt#]!Kt2ppd v[<Jolj?Hx;܏s swxIs Wv * Jns$]J8O@.7\1=Gd|.){geAY;Uz8Ϡ#PLdVF 'X<]0Q[ek‹1hYmo6;uݧwrqK*SVR_8Yy;wHe^*WVr_WǺg9S0 o vrqGJ &+blN;}bv~1Mbv[ l4-v <7dW<7yn /byn /6^`Q 6^`lK'rS`lsx_<,:>endstream endobj 23 0 obj << /Type /Page /Parent 3 0 R /Contents 24 0 R /Resources 4 0 R >> endobj 24 0 obj << /Length 888 /Filter /FlateDecode >> stream xMO1+pgJJA$H%~;@@ȓ{{8 .  &#LDx\w}<` #< `fppF<`Gw>a aS Eg֝&/AցFҘhԢ±)\AYiJCxM )rLhzJ!R^; T6*sAgFg? RT !\%{S++&vPQ; iBᄵJF@A'..Ku;nbsLYjpie(X-Z> qtv$;xFOIz^3ҧ Eg:Ժ`׽sZT:'JzwtUl</u,fGVÇ8?k5~=8}=_~.-׏isLK^R(_ϓ.y.uyoL`ןMKPkՕL c3^=T{ Q vq=I|*Tf v v v}o1)[ v 6]ںV]禮]疮][Jv]fMVs#tst=J]Mզ`׹yk`׹uk`^]?jendstream endobj 25 0 obj << /Type /Page /Parent 3 0 R /Contents 26 0 R /Resources 4 0 R >> endobj 26 0 obj << /Length 890 /Filter /FlateDecode >> stream xK5+Ba*?z+R#@,< ߧu{EF 98!l=b~RT8zHScK%)d(LD(2L2 │92[FqJ9b)5VbhSj̥Ź[FwJtNcoL%Ŋ :*S*2X+fKG`J+P0`32 3ǂ3q N1wͰ Sj,L[I`ŶGv^g禍ۦ

 :v~ 9P]-i> endobj 28 0 obj << /Length 9395 /Filter /FlateDecode >> stream x]KmQ_q~ IH A b>:HQZ'M^u \.mvN~~ǧңoO=>~)KbzwHzǿ|ʏ_Y~2lQV}h]#L] !_FgW#[#糍Sً|3)$#+CڕagMG0k=2VzNzIY utC0|hBZGGhz m:c03#_z|#Cߐʠ"K'}5us<1=::F#q1+9 4sGm1,Mv|(VvΖ\h9]C72vɕDs&2 EDMRxf٥% ϵ;dͭE'En9)~;zD{cCŔXJEZφ"X4[~ީ̿;Rm% ?L~gQ8p ڊW) J74,lx.GB+E)m?|~EP/Cd@J.AўkP,3yr򙴼U0%|ֵ'b-Y/Jj߳썆Qt|Ďh_ox*YW1Q`iSɊ:k^<(eW']2JAuTa)g"1AWF#_s'U&Oab=L!hAʰK(*eeż 5CL2e|6DM2n_dNJu5 N m2RgsI/f4i'Ca"A&TSPV_ @!.hǪ̈̄Hʄ|K%PXSS֏O?2D: 7E"LtsjXv>,ղZ|GX$e]ZJ ھjo[ @pA'Se 绀ʬа:̩| . nX0kב  e5GK` %_|S!^wYU|Y&d΄:Ij Y*\"Z0;_)ղ2aA&\z\|39)?0p*x:!`u*hT1ZHճ2!c r>*x/YN4wʪTJ:S X9ITD胹gke{nKo{*emu=$mMnoˢըLX,dG ty#W,5;DD& 6zHZʛ~2-56tkqwx20w`GD& 6eޖn"+:,h+2;y[[D*g,(͔=OMn\{R:_gJGa'E]mz5nbٞRdIʹeט^CҼ䶶Ԛ8^BըO+Y@:R:obSWT\&0IXM.=L[z)Wd{+"O݋khܑ"$x+̘($aFIz2Ӂי2%;Ӥ^C#F͕1ZRx=m47-;7;x)3Gh*9֧;SOh5|rqމ!ʞg{mmf.?yS0Ŵ@No^7EO)y 綗]+zF72auډ"M O%ncUSo-WqMan\{mmYUw< {)4ƁdggDoi;51"2.ށ($aFIx۲ݎ6Ʈ(xG6Ob.hP{np]܁ED& z{ڲ4uOb+j*2UE([чI&eb*٩Lz8!h(}=r<5 v^CGJ)oeg#GFIKy"FS1xSdǍB~PcL˞'FO#'xyein&r{[6u4 Zd"WyU?;RrՖ{qLu[τmm'vS:S2g~}D E5ܪ>R -7lC&47--ю[=En`u'-x'U]W[| d2R2{:&VM.=pj7d{ V-iKߧ5ߋpйU@v)m,Qvq56e֖^`"+}@zu^ 7P{ 7+([чInr)en&_gĶ h(j@v9ceO'PG|8@EFIx2VuOY[תtI`X_xM9y+vQ6涺nr)eg-EgEb2y]t^+o4#7Sx uMn\{ڂ˳ٯ{ % }ϰ&; sSy'1n\{mmYĿΔ)AC{'+Huzv^COVQtiuIzm7-gE𺧠Z-wVD"oQ:}QS;o F2w4"&0IXwSxO-@:Sx 6K6'_5#%ϋz5m47- yoΔJW%A[2_"o7Sj-*~/v&GbbQCҖֶCt0Yn!ikor[[28kun.2ҁ"=CSH[/;Ju$-MnoK?{ K%؝p}H_Ojj TwfJbM}| K.=嶶-랂ëBT'E8_PJQkto Ń/9~y)]txO-'#il['e-Wr]o7P) ^TQvXoQM.=嶶Ȃ {)=]:V;b;C oTke{nKbS~:u=$MnoVי[wJ)?\WA4F:~u;.~u=$Mnk Ly{46\R'9S݁Ӈ[ |:!8oA X)n\{mm_v=!he HIt2o )8UQC :SASdt';#%s1EFIsy22)t`}3\8|EyG Z;|K:6zHޖv:^gJ\$E]4 v|iG ҟ-w!ior[[C=i_l&R@9b}S =e`xƽn\{mAyΔu>d}^G5+V*[闤-mmy܃uOYꦃj6`q;yYHB9/VHskNWbA_btNW;Ɛ=}ocZ]O̰4^AlE[v弧랂qT33[$ 䖨dΛRx9,Q&rk[p;:)Rv.#nH-2OHSQM.=,>=e&:4.^p~EZ֩hyiٞRBjXx;ZE7I[~ڒu ^:m5.пN\(͔cpUR/Hs[F7۲aGKיRqMζIߠgcGJ/cDbԭK:䶶P+=eiv0ktt5Ĝ7[FҖ+/e+0IfݷmmGZBuOٝb^A{U;02o 닔ƞvQe}$;i]ޖ^6=ꭻz[xMg7\m5IJ=rfL^vʨ!i-or[[ˠיONBPԶ##b]V=RћU߉GFIKy۲8U2yI vѿNep kKM^ u2SnkKo8Y3 +@~R@;]ŎΛRxbQC\lzukיRy9}q۳&{2/zv^C%Fyye9LԭKҜ䶶 :Kיy:]Rԛ#WAeJ<;-h&90IXw9)eC_ PN@P^qޟzwpwx W &j /{&7}s3 W걟:SPy=)Vp.8 ]pѯPg䂃 A2[t[4qp`9ΦsT>{4޹rc*JνC@ B/ ^psȟhSixI!M2nEVxX<25##+Cze0U}r鼻S|d|yeP|ȓx|ɣtdPLq_sZ>.$u 6d(<2|w̕{r(VG-' c擺aq!2( E M!J濆S8fIKj9J櫧a2Yslleҵ!6q9Ω_( FR .y[ǠLȪa4J0_ {IΥoaJܳc<nyLmjaXVQ9@ھƆa|h`T挷pSSr8D1t}+ڃV? ,0:e$È0R5*Xyt7XGYzZ{BCt軴4)<K:D#Uߥj󻫾K.%/n3ʢBTP@]L;nxAH*U);5z kj?vFzB(g7[Eqm7w9ӻy u}-,Jt}( .Ӡ>|յ@T/G]=p5 "x̕RKmx[wt߀zI]~^Y<-ɴs R;Kde! ֘1T0e8U:+(jXtgǘݠc5 V};G3 W- Kϭ*@I'@(K˖|@^%G Hm xz" -L\xȲٌV`.;^ۣ WFpV{R1v8?RQnۏLԢLJuLf-Z2<4:T|5bɈN`O4(PyylŀT_j0Jkڎ.ƒǠcx?mnTj|<3R&sR?@IUVJʸ N`FԖe-5L8"8yz4D ˙ w۵a:ӥ 6/?d>p:6P6k )'몐) v j?B?e0YfNMu*$N47ʢ?uciįmt*mTH0Y(*YeaTu+_8DZub1MkJ.~@61Nt3֣mnZe2aDGz,V҇u)>ۀ}4vYOc)#alZkpP?p=nZW1iU44^|6f8 *]LW.kYh4e'xJ J3+C`(/Mm aѡil 6fm0w` n?T8еC&tWa4ؐmZF+SExdgYEԺ !=jhZ \1(JUY7^lTJX++ۧ"d6mZhR)5'y7J|_0`1}TK͆PT:q] m]|TݪgC>*6Rk^Tnހfħif*˿vreRnӗl]MJiۈ]TJKQlQ)u;gV2R϶2o2עR`J Ww lj5 ^ue{!|ql-Ս,g] ?aSv"GuDC(@/,/~w?;{7up:8px:t|jN8ZG|˟p3gp <ɪp'o>w*cR۟H^?̷Lendstream endobj 29 0 obj << /Type /Page /Parent 3 0 R /Contents 30 0 R /Resources 4 0 R >> endobj 30 0 obj << /Length 6497 /Filter /FlateDecode >> stream x\K%q_q'|?0 xax wċT}`<==3_="dUΏ<㏏~{gJ S ͟z-1=ߍ7b|~Hˏ2jǷVֳGYuҪsGe#?\~iDzi^|^^   @P~XXKa[:%g Dm%F$hЧzVء__zEp-;a2urڧQcABqܞc$Zq'OӸf{-?7 nVt&)n9]hV $E;&Li$ ̂dzSƒ`>j9?h&2?قswNK*,= \#*uNDY[*d SjzlxNC$F+E)mr6z,x\?I_;G{2gV`$Sc] xPe= PZy!u5cnn_ox*YW1չZ0O' ^ܑ5+J'A**WZ0, ck@?O./^RZ3I5@r? nh< O;d(\Dx`txo ? e%[jF-SAdKW ^a45F[I}^GW4(hi=szwK~[BE]w E` ։, B^8-z {cud-7J*\=^v[WT7mmY%K !8ȩх崂P)PO{P. &.!j@jֶPj](dWtN%-A@bdH]Wi+qa n廥eحmHtw/!1ҽ 7$3>B U4| yӕ>_n廥imm)3(2} O2SOԮhY(˞xOF-e2םBx)ak Om):-Vvhu+_Rherԩ;JםB&#BOIcSf-W 2+ץaԭ|KuږU%n)MA%Ԧh<2/]-AwKxڲ-:%l: Zuӥf"4\fe7i.ul5Y YN3"SV\ˮpi[jF-%%lp VBF{J17_`4sJc_꺍'lVڥnkHw;+ٱ[^q}5(AuJ nz@j Vڥnm -@(.3}nA/}tI&xLe zWKn㻥mm sl&SnGWQ:P E,7lEj |4--!SN!ǧPu#-p&UWS~n]ֶ̋_29Eh~ SSn;+Bu;nZ;u+-mnk"n?VpʝԷMבy#Lv;y7zWKPS9nm!ހ"|C" Kt6&<*@VutebCf;ejH1\KrrOX'g%(`B6ڥnm _vS7sAZ=aqS80>N\בf“tlVەҋ֖!ފ" *& ;X7mbڅ~t \nblewbedv^Spkv< |ծv^pM7RݫbeJ+R% ΜLɐݸUM vl^_wbqMNm(/f )gƗSu{FY6.!xƭn]`#Co_/NawL;v]CCt+˹q-ci/vk[8YvS\jKgeWqft(ԷT-Hn]ֶpם.ѩ1('²"ۨ\ˎKcG-[HKL7mm29~) n9u{[{zفd,5JJa+=vn[[8Nqp5Vvr`&S{}Z}w[jF-KnulF Ӓ+8^-SQ˧ MZ)|TuyiΏ@K݇b%K.o\w .n–5 5~2]Gi4( +Jl ֺvnڲ97%Hh ζf3UPR^vu XZ,u45f$'5t| Lב[:eIEe zWKf9n/vk[!-{CSvBV#hex}U H9MaX@j v~ZG7mm~CQ*.V9u.Mݔr ۦeRj9+V[ZˋږW|͠+R@68X$JP]Eݲ@DNjW9e7;bea*[Mם2Ah#wy?5~))ǖ;᛬VڥnmKo3od]RQ[]0U- ^)L7ab^\^춶lꐷɮH8r0ݳjyeԦ<M,^x?be wEJSwBKPo>M:ǖ(Y^(hLx'/[˵n廥Ѝvk[u}S% w~e~Ru 0 2E-L2^qheC@"E<%sF}~~tMt<=[wK| __޿3?dp~ͅ_G~|~ۙX@z|1SJ_>}Xw;YteJs,$9}63o恖 ~;y#]ZAx^|XAvA | ~ K:9^pJܿpJܿ#gB7`{=GݻXmN^ɉݵ_3/@+/%}ƭ\C#G^>c6hL*C׌_qhYt.Go_ {"o <d` prFu`n*QZY2rXi #2P eo߼u&2'))/l'E+*wqs|%@`K78[ "j +x' |ż3U`gBۯ"_/.K>q$tx?w9 n:H#hc۳0jx|)R*L@0vuȸ  !'d!)xmz{$%(PAB0 *v<92Tx4O?q =0ra:;홇YKHJӞC(>-勈˔K8|n̻$DSQ7nǷYw3J:"5=Ky7xs=rὮ[肃5kң)~52y]>~=DW-.r3~%r4A&1{>EgUÿkPG(WCIF/y/|)⯓| ';.Gf;Z;Qē(IXc'1 xLd<*)}/<%VYߕ o5$|Wʛ;̫g7 _U|\!/i|( |2L}^zCs4oAۘ(=s+XH3[@ݑw G`ٹ"C\7=)z nɎH[xy_q˃ S$q$R JMp! rACIE%&/I/gbX!Q-2 GBqodBbߞ--l}R!8^ V"BwʷT(aT >=Ks![<iɐM7ِjn:ğ4|H?3!R̈DL-| UxsQ oN4H›[xsXzdX7?G7ܲG/&HU  Ϟ!_]"UO*>~tendstream endobj 31 0 obj << /Type /Page /Parent 3 0 R /Contents 32 0 R /Resources 4 0 R >> endobj 32 0 obj << /Length 10020 /Filter /FlateDecode >> stream x]KmQ_q Ao{HA H@Q 冀aӝν&ܯ{\vϪr#=~Oe]@?=ӟ}Jx=οpϾW?;|??ǿ~J_ȿ|Jo?}g? |ȳg| qkcgfƔ(Y$%g&*Y}ϼRzQ.v B,~KSN !φjKП"OR<ёX*AEWb5%TzkK[8*KAWg= {n:2p(Ae?ץ߳I2,}"T :Gӥ~**2П2.2b^l `?2>pg}*/ i'XtU [e\QH~i6JC?\ ϊBԙ ]":DYuuap2a`";tR >Tԫ)ǧJf"{伎EMV0 "]0K ɽ `VS!^]wYM He<| @Eة2úm 2%T'sE*dKC*>Hm e5Gr` 9_|S!^NwYU|Y&$;w c4,&m PԌH 2_< :R𝲚N[pStVQ@U/U<+ 0թTFNYUtevy) r.EC=nmHaˍk˭;gmZor[[#'Y)rM " RI=q{JC Oݢ^M.=LX99ztwZEO]Pw\"7S&伂Iq_&ҶhN7&EB:S@SQ8J7$wkekH$x 1Q6чIº&gځי2p*X_۝y^C=FNw^])6zHz7-77lΔE"!f:x M)88eDA=嶶ȁfLʼ)rJ@'nI7^S;o )fm/;Wnr)ei=E@PSA_So-ެW&0IXM.=嶶̢}j^g @CQ2뛪pzNFȝ,DD& 6zHޖbcl姎z?"ַqYw2"ܼED& Ճ&eU4T ,9+֔ue*qXErzNjLq9xy\4mtxO-q;Rd%ZWзy )P*8=S༁<){iVL()6eޖȴ{l@Q[LW\NzJ0@YnK) oAlu2Snk,@r1HI<`5E H܊X_RϵyiٞR㼸>([чI&e`*u|=hqQG_A_ܩP;M8GQCҜ䶶f#Y ^YaQ ǬJiQnƝy4R/+Q6涺nr)eAMn֎יR4R"WT[yW)hj˽PSE3!ior[[Jf,53%q6 j` uNu)7}kS"U/r6aHAISR/#~+NI"ڤJlT\GD6e^@B`֫rp{ZXW dHXl7cy[n\{mmi3)BFSR["hX 7P{ /+([чInr)eBdי2pm*1GLI_kꮁ:Ro^(n^MnkK/jUuOYkQ+ݥ6X"'6NmRܶ9y nQ6涺nr)eюQ̳",Ib NBFf I;PGDV˥7-0]n~S0.aV=VY}8 NC)<e^M.=嶶̄,3S9%+@fYA _kfH.x=Oiңnz2P/=T3I};S6FDD& 7)ed3mxH$E8D3 zNB7~y ]{FIx2L)tU z30\2ov#Eq1am^Cu}GJʛq1n)mm )t`-}3\8|Ӧwݫ #-w^z&e9יKQSd>s2u^.6H&g-6zHӛq'랂۠)\ #7vt28o Ӎ{Եx3"ƽn\{m"ü_gJUw_Z݁zps2R:k䝂n,Qҷ5mmf2M k;'] Vwv2]Hbs0^fHskNWbA_9c4=vCD\VWn1xhe+ZU.=е<_,KC|YO/8}IOR8oJe8zF7[&mt.}S \+]LG>X릫y'kCz&r{[zu,2.:4^S]i^p~EZҩhyiٞRBjXx;py˵M7Ikzڒ ^:Vm5.87#7S.WJl"mu2Sno9^JΔ R4:[#')=Qm{)el^>ƈŨ[[2䶶@)22Ѱs0ZKUu Ĝ7-#ej˕e+0IF䶶#M򺧬F[1MG}lF7+2Hi5^6чIº~{masS nZ?k@?+;'MbPl]/ʴkȀ+/Hn3Z-]46٩Kxұ/2[e6sB@}pu_ǒpei@߷U-΅>]3'~YEe L2(Ud EN!P/^4:'uؽ8Lm5^kWl ; E^_ P>GyTV^ՠFʇ>t+]U"gupRߑm¯?4jM`TuDs(WQ3hyE!n&:"'CO7(W#(r0毌w͠ҥxQo`+lz;MJshZ:/@p-njKc#_N\>&kГu,W]Y*P&Ūz NtYcI  ]^2ײ} 8ucdeo<C#0SVmCkzk9._ Nk]bccK ئaIF\F2dui4_񜑰U:FXl[_U 8l*Xy?СոS#L~<.( Ag"BC1UFؙlSkZ?Ve]ei8IuS )Fm<7 mY*!}Jݼ3*n10NF~PVՋh]0,O巌@-p|ֻQ\Rp~HMS:jm] ;rRK3B7i4]*8'[ke`Xf9g*M(tt]~a-ٞ={C4+x/?B?xT\u|a> |KMWF1˵7 i-[_qpa\\`ϓ >T&trb˿nƖt,v51Dd?H0r]NV* ֍R8-Բ -E#n0ޒ]~6\rO0-P@!.;45p[sEJ6>'끻3/h3`&>̥A< \HK<w`A;bȐ(oQ[χKD{.X4«~ൎ^|Nݕ׀){%60>pvF|E׀1>>X5zdH}~_ѳ> \˾c}2…;|3}7qNjdpE}WŸZp%`*BQ  WOʴw`ڨo]("׮4*<y\(12]YcJ\(=2\ 'D3D|UH"+F>er P`P-[2x\(cߕb&g Pf+e~j$LWBsg)@ܮû!] 8Od̸Ջ\1C5 (Wkʳ+=eFJPF+L2] ( we+: U, dvEmhSJ^EJ[ ͻ)nijxW.g@hM1_VրмRPf+#ewSW\(2\MUCX0-T мYF @nҨ2(!UЍ%1sC>K)<>SxvjumM/OS~*Sx_-Wߩ2~k3Wx?8(#t_~m~|A_GQO᱾s|:;}j("#m4cȧc+ lñu7ݢf:m?ۺcoL~*gaԏGӘ85g+'4"G/vFA \7au%݅o=܅gwў=츹(.A?=d!;_w|Itƙ0 B<";|<ǯ//{GF׫O[endstream endobj 33 0 obj << /Type /Page /Parent 3 0 R /Contents 34 0 R /Resources 4 0 R >> endobj 34 0 obj << /Length 10689 /Filter /FlateDecode >> stream x}K&ݾ~]JN>6`¸3`ˀd^ՔiQD0 3 ߽?/mVٷo> ٿWؿ?moKy//l7cV~Vߩ+ىm6;}+o|Kӗ߻~ KAoRxz, 8-qEg5/~m~^J0d Dk)!x z*hMqiq/B8/q)Qq!^ wh6 C1=;%y:020K 7;>N:YZ/wCǃak =JEA=ѯ?w6p10x-CmTw}_#V[2sZ=е]GQ~ģ˾ǻr2~3Ok11ꥼ2+ct~wNC{Xj{|uCoא@1Lxqxx^9^m{Inߎm㹽o f݇7^1m_+1Ƌq=xt,sSȘz m㵃? |5r{\c(?*#6tƋ~BxgO?^Ko(>>G1KE|?1ۢxP%vɼ7Z 5xk<3G{?9g7PKNjcl?ƣyA||H>:23COx-㟙b!ݢrvPԳJC2@0!LhQw,K0 (g|Ȗ;spTm)7xEZ5T 0@4=e VaMrS~?u7_ta@ƴ޼`DC>bsvXnQ+c> M R0WB$r^rRSNhjISkhTTL1]+1ojA&nL֘-`Ԫ \ /T Ln))*xqea!FpȀ鎥8 DqAS.\sVUJʭR՘eżVJ s"$XcRVu(?)[?=V⧦|}`5{;&@'H jX˙F'eL5){u}H-~jZ'՗IX uK;n-&nS6ѶeiPxS=&h[|%Uo\I3zXƍ&|/D䗘˔m/$1zR++W J_$*-Jpʶ` !MжK˘FGB)00D{ ^q{neXIm~jZ'ݗ3>%y_+ |XD7k47Vj UJSumSZ?魾fL:1x=)c-P;.ڲ; NtEV S3 8bP{y`Y!Ieql]նiV_"dtkTl. D:/RP[PƊ"{Lm'_;?'5`` :s?E|dX-Yֲ[g^f@zIv[}SJ'%W1߈¿@j+>\<)HIl񷬛uoK/ɮz/'<#z=)@>Ze]WbmS&LL¦ʺz˶ŗ^]V_Ƅa{nֽ#?.(8QIMY!Ңxl'ٶi)v_.ڌp{7+Ex_\H@ǑF~Ү) ow]϶OMIo%Lדr$ncc?l%!X" )ԶKKGsAZ)0))C&ҮwGmَuwmSmrIJCyNNXߧa_B)u]n__-~jZ'՗¹%vػXr)e)5)=Sdx>w϶ŗ^]v_.FuM974nCq~"wSiY"ҢDdcj.o݁=gۓ/M{RQmד`.ˑ5S&R\_R:Fڲo&h;5eWݗ#:pxH MmY!Ң:e f&]|^mIo㊏nV 9 Fh/k; . <ꬻL<?5˅O^Oʡ@w<Ҽu{ Xq`ى%ıeOM̵m/$ꭾ=<#Z)5D{ ߪJ0&ަP^lLa 3޼˶OMKr#^+ 5+s Vj |#'%Lbϒu&&jZ'՗ Z)Ѓ]D{7b{8L>-SVhrEr'j[ I gՁ%8=J&` v)[f!Mv].ͤ쪷r⌃7_++ԉ @Bɳɿ)+t?)Zẏ\ݶi;?}#Lο ~#7?ϔ$" Wv'C~~ܶo?˿uy7[:֨.ۏKGztY=K?$U3  ?&;X,v&?^ dF;ק *̰Ȩ kW/-ao}~xJx,O W(4-T1?}%S "-bP2jM-4O O( ^i㤿L6dU|@\$ij@ mJv?҄KsfsKwFgE 'S@D5mKb(2T9L$\ .n3-l/ .96of,;ngˆd 2qĴ|]M>`Cxk*Q nNe ic۹ 8K %OOc{!3¨SP!1[K,D\: [Sq}ˉ蠐 8|F[@1iQY[o] 6U ߫c1\kX zf[.˫vfkfZKZRrK#~KưmwqC~g-}-mSXʳIb/og_6⟖ ‰b?_mo~ݿОۄͮ囍mx ~s+Ƅu_۠}-]RGҽ?xBwJ^a71+j-RX]k^tR-:fWܯ) ǏsQXч7|UvHJab/h>BSVu{AvmM纉-m/$ \[l^+9v"x{?[DSb_S6=&biMSo#.ƍssQ86N(6J4.I4)+;_AR旲ёug6l[|%Uo"X8]|dʉtbc?mr>Lh'ZזFV_Ka~RqtaXn/\B{4 G 3eP7Ke7Dn"VK/>sf2(1SNΤ=GgB>q}/ +td)<l_~>tRŗ^]v3הiNB$u3ereI :){ưmdWט$0oM)"NNUC3E9'')+T3B̑[7ߍȶŗ^]vZ8Oƫis,zeGj`LٍiU;2ٶKZ#?k)s$ PYn}67X-GP|TKĵMzNi.ܢJCI+$ -pV䞸&95`ƨZwMqշr7&t e]JM.rNZ GIR|HhS\j#EWwdR%} X :o#hUɿƔ3euc3 #meW̏תDI82pJn\DJYDIae Į843_zIv"ӆMƼ)$q`!=8XJY+HL͗^]v@oG߯K|Iٱ\ 3=^yi0bGx U4F9] fK/ɮz{՚wxO umчB30.1 oc¥@Zn"f[/>qrbS%tqKOe H|P5<EV&V՗,mez/AŜ2™)VdNZ^/eJ^5%83nirɗ^]DoFP`wr!M>)JA eʺ|d۠^}=籓)89)!̑&"yl0+dHܔ +d ɔܶK ݷ@P6l@vp6 _𻙏) Ī `}bΊ >4L.uܢ/"{G ob9p,"JcIZئFr~qz/WIpUZT ebnWCrR&FjUR-ںczb'fLmt/ilc7&9iaŽLrZ{$WPnզ6) ;q._yvLYy&evMCoo~wEcMJ F oa~['3)KĺYZ+,+ˆTn"Vȷ}m w1lɨ"IyBHiҩMq]}WʷbҼ㠭DcH[D1[`YN^"Vgs> m,orp9M8Ad0-n\Rr3MIluMq]?mNŞfh,&~D31(U|tqag<GK/>w|]B42p58_a{Xlwl,+mGBTvӬU[/>J}0K( Y3{'β!iЇ\ppGZ(kt̳;QbYŗ^]^R^lu&=\Kލ^d:h)+t"QN$;=ٶ֋sv:[>PΠccsOv硻ev $.)ۜF uFKm/$mqkDߋ(JY5oL\ԍtXҙ)Y!MBfleMĺٶT/P7?̅B'.A}rڡn YARYwMCo?α99VʉcH娳y:$+tI,BLCm/$r` 5 ⼷ayq(Jy%'jBrty>ݓKmOASWUsd~ Ib`N |1^`wBZl蘱 7L&0tPn ߑY&7. 2]̰'u$f4X)A-`IܪbdW1Iiq0눀JM>iSVhi(q,:L3+ذr3n-OMAⰊNW8fduMϲB[ND-efMϜ6zYv{FTl)OG(V: Tw>]ˑ^Ϳ47ED)hT\ sWgmwndLEJz%͛h<$,꽤HJr>S*#Z Yk%T HB%LQ3F}ϺXm/f´:v4e?S Cb ~OG3W w%k4(US1&a6zIAsOb7}hH9>8lܓ$Xa۲'a_-}'Ó&Q*ό>'7|mf ~ǭX֨SR.$v2e7ШnK˩{~pV+38O|B㭻C)+t8qkRvyB>C/>i߾7Ou nOF J)( %XiM*Oc~8I+>eŏ]5/>"#RZ—bdy)yI7o ?.o_n_n]oݚ^s4O+ō<"?1DZDŽuo)~<å}Jqc,>M}IxȢ5}EA\E?ջM8,|V "O7cӵȞX|-J_@+xendstream endobj 35 0 obj << /Type /Page /Parent 3 0 R /Contents 36 0 R /Resources 4 0 R >> endobj 36 0 obj << /Length 57407 /Filter /FlateDecode >> stream xܽK/r6?b% y <=Y|4YUVe3%k~^^G?/?G_?S1~~/_7%L)?~wG?%ESʏ_>?K?$3e!!eZϹ !ɟ'LN_޿?oܿE?_L?;_YMY{Ê%/z/z_9_p:϶D_pV޿W_?*ÿakXc߫ΣQ4&}^;_o_?_o~_wݘ~1\?QZX埅(yůEy[9lIxi;ӷ<,K=^}߹ޟ?\~6~/?~O}͹Ñsٟf/Lz=U?~ez`SL5l1?s3@i{{ g\]>6BWGgm:>uAkk.e巔-+ϼAKg!-Zg2^"2-6ڥl1~>x4噼Ѯ\3:[XŸ6:k˿>3}ce*6I3jYfޛ޽Zk?Ɂ{dV{kte$k˿֙V792-VY*n-lQ4gH*WT<}ӀkPw 5kvpKL=#>*y0rǜWɠn~y/sdAqg|Z@_ r"3A<1?fdg\|x4wXNȈAꦫoO ED @Q96v_vF/vk>>{>L6>\pm]v_S{gg@_3g;y96#t 0s38T/A >g^{n(a1D7}>>(1s> J`U(?/%Z.%wMpY!bx"w)fL6ft&v62Vڥl7:~jSpŊ$#~^'0gU-10 [/;Ɉꤾs}};ҷ ÁA}ý/Cv<.gg}<}I$OmUgg_?AX:b ,#}FX9I'PH?$#uɓSS>$O $Ob=$ gg2~yF' ~ُ63 Aq?(;5]~IWV-w뺿~q_Ǹfܟ+ $OnEu;*ס|G1o_"s)|?ՕU69M0٭[?On73*hnb (tEor4 gtx5=5[5]Sb豭"݃:쳾n `I| Ϊ zm/g!Jc[;ORg! `ځJb @_D!W=>UXSȱU ߇Ɇpg -U* z:cizI Y M*֨%_նG%Q+];o`N[ka\6nAˢ;i_by+v :l_녠!8$:aAv4`k҃æ' sT9aAꃠ!I5G|kzMQO}&=kz280oʦ8 M*֨PY7Fo!d>K)~f\HU'YP}jo=vzI ߱@I E2yYeTZ#zlkD>3*B@۫+Ԁ ׉@{Ue@lRFSPM0ˀQY@SPOB kPvB>90o ]{N`I9(miW#\wxP 碣;GYΚD\e0rqθo\˪g qycR^"\IŚ[2.ٴ.x6 U*ȫz2@(OgӮR>r-A޲"{u;yc0.fd9rGDpY-'q! dw\^UǗqYdpVe\~߿Lxse5Ƕ~"]l_HAx;<ŶtۢtIM8q^|cM$ t!"Q2bDJrLԵ_[FL\2]#])Fvo';:w Nh􉵝{%%JРɀѳ4&Ɖw~o;h=f F"0!}( aldɅ0]0!}{a Vڥlw$BDHfD^ϣQdDɅ0]eL:i~5-FL~\R];h2S<52b[Jn2hI˜(q.^F/F\`w$b#߷zU2bBnL~׳7Cݜi3b[f# $0#֍lMWGݾ16ݺXXZrKugۺػ!tp)tnON)~pm-sm_|88nL떣JO. EltYOE$ܗ [/ ;π*lꖀ%1sg`o?90t$sv\q]uuTyL|IIWYW}]v_~y4^61ptW(NFTZ'tXq_^ste؛ul[+#YqzpkkeĸrTt'/,yaӕ͸ڳ8}m%^XF&WKU~~q*wvY`dtvSHsY[@_=pnJ`=r֪Bl3 R@?Mi9vG?h`W)f/:'0SBH#qpvG10imOzie0c{ x-9(`Un:5׈ffPpj6[@S3멘hX-f>n=ܮ]HVڥl3I$ubi`‰/qIFf~N[>30!7]ݎŎ.lȉrtt'y:|_)#EF6D/y\['ҬCaHl7Ia uAdʎ^5bܢI>J+nf7SS\l?e+V\-#ځmo$w .!|tu .qVrVbe/6<+9g$JbƢ\^+]e %9W-zʗݗWI!eI!eI)!eI)!e[;xŨBIQc^ <[2MgNVIѵ+'.}yϝ?^Np_knf"[9 QF+Vڥl7{i3 ,xťO2+''$;}taM%Vu("Fu.Xp]&Osv=|{+{)ZzO/ֱ>>bt ׫uJm$*S }:o/N[pm)־NrZx}?]h)Zۺ92!b| køhr*%;#u+n*>L MxnC{03O p"XN('Iv(< )tdf9eEb]LTaUo_D/zaeh+BLc'74Jsad/?`e`3Ί*{t/8ހiGT EG;#2,!:X|JO7m@:EϹEL'~>V%Cҹ lnh1T牅UWg 3ɍ3,"7KE7}E&9t JLUHڌ?+?FUl&0<N3_bܹA@DC7nPpj݌=PpA sE \I2.qL ?iG/833CJq+Q!)dS)q~s k΄YsW$H&h;z H#ң9!d-QI%e9,܈7|"~CqDa7#ǩ|9GOqƪv_y4_背 }?j2NмLR~ăW?{]e$pV}-Xno/Weܷ⸿*{_v_^u.Sq=|uFGM^ʝ*݌A8o W&zilh4xoX8 լH}KzPO BQ d]ޢ_N縐B'.$BO\ q!#ąO\q!3ą<2 {gq_7}Y;~;Ml튂vm]p۹-fflDRV{Y$i)z]ܖJ=`-#b\jaLc~+vwjQ]JU>[}#ƥ?pz '>EŌ;=0;fN1SV&ճ*WT펙=G~:6=J KrfxJmR3K&&'1syN*`U0 S~vŸdI).~?SH?#ߟ.}s*= k8 BUuїYO> 5]g3:S]y"xx@u쬂OUb%vV_~9犸dPܟK(O褸/rKj}60ӤKw}?g%T7ozčLSϓ?>(GtݟFS?;3~4T2~#"̀ˠkˈq_8 $Txn#1 \O,υ*wݟ3=wG:!LO{lc*c&~l[ɡ~LOc?Jq!dfu\ GJ/KڮKDϓaC7n- ,3u7E;s] jE[/Gp_SđmL4ۋ5\q_uE;AOgG|mIE*il_8_JG`ȅ\">5ECL;2p)0;~NTUv{?>E ݓak\`_"U\*ֵb#p~\Y3TץdsR^T*U#x(2oDy&G]`/2Scm,INV3 ~ݟ #ztؾ>Tx`ʩK^[$+UF ]8ktˁMNSQ4Q@eݕC5DD8zxZ FV9#k{.ӐR%JbqBHU4QL$:oY\am̺\F}V*;@ȪQ3j5Ȯy&˨ֈr[/[ nMpZcmruNk0sС*z}e۹MaD#DKcfHVk5jbth2u{ZRN|rQ /՝xkξqœNhĦT`w}yBE.ɫKVKZѫs6Sk]3齚X& vGo}[[ooo ]ؚx ~ 'V[^:ekPAU"}t[O%}ې3C5b)G>wx%Gn͝v^J@ʒnؒHӍ=1Rҹ}!\-3R|#e)#Ř8R m( eV4e/-6p׺Q*2 H)rC‡D);7o[ڍU7;FNlmxa(tu3YbMZ;k\ik_jdFZHYFo;62RzsH9%FdFL݌XQK? 3eL .uyE6"VDepiP `'{۵߼oXOTÍbWQֻP@b̽ Mutg:L=/QFjwȔ} : MbFXg 3Ơ2 '"wcQ'JkW<m?W`u:^G>|(PHh\#Nk׫}. vj^%3jRc`Ϫڇ\eqc`ϪRj kb Nhm ,pNB{2'I8WO7G'gSe1clәo{P"a1zrzE#`:G"#>GpI9?3#ެ=q̈ϝFM=XmgFY[{gFHL J orD3#޼=|o3~~7ɸ:~7θ_e']J5T*05T)\ Qfߵ<*0WQ`$>^ـ3'өL'Eӕ>-ϙN餺IhNBljNB$4e'$JLΆk0Oqc|Ͻ*u.rE;#'tim)llw9r!tqS1өyK'?kH8#:1e#^I.ѳz8# )PpF w?١p jR[J-z\ yjlPU6>յE<B lP|ޙNuqӫJN2b7.tt3jLL']3l3 ˗NB[ʩxn=[BL .OBs12NB<y'9$tY!r3 v_c>Q~e}I?'IS)L=R//GPT~e%fܻZ={p5P,*RvG'7$> G\ɬV|@ SƗIߗO%g3ʅ U6!)~PکympܢKx=몋]`œk({ ϟq1:c/UTm PW QYGFd& EL\v__kj]lΫE}=ө+9^vm{|u~Zp|p.p]_6@鑏sLẂU*)BBNF=0X?CFU+]vz&Eԟ[4E0.0"SOWlmY`њfܯ_2ͫ{"3g:O?*Nurg?nES^~b0_߯Mtvj5 WiW-Ss,LkgԦ,L݌T6ύ/~{s~{.zøfݟ*x/IXDQr#{wuݤ*] ~_te:/!c`.&-f:}1սC?uoiG;LսC?L7qvk'als~aoύa/q>7}_ewsHWO/EsQ}=uR~}uopCo%kgqh\iw{9?ץv->\q?'^=}R,e?cey Kb/_~O7'y*8r>EڗBLX"w/ɆTRu߈yTѦIG,M-\F9=ݸOmT7}J%/q=G2qH: 'Z;ˉ)=|pgk'=x!="s ~~x7;w<ϲg#'g"{eWdRDjN.ocEw뛟n}3t`'{د&JWj' ]Mj㤗 D{8z'+o!y u{xQouO19hr˟߇Yڑp+Q/!6E;p_>t<~wD}oJ%zoyx-tkqT;czF"} ril1uI9rKu ϣTؼ⹮ E׳sK<}5j}T-?_KN[CI}T'R]>z"!~Ko}EOp~us15>).v;_w[qn붐lF0)cL~): or=_vG?}ǡSjT=lQ>ʥΡVj<#WVed8#wRKDq1&gITMԫ&$G&v]5U&'G?b%s&gL^-AFOՈMc%}7 {+GJ *OmdK{Z׃xP@#Ht^DiOf_ę"ƶU u_w6խw#;GWmUzIHF1Ht[9UnÔ+D?^H !3QߌS?#^8PrGD4d#ر&"(z^/]BBK7MST= 3{gt\Rf/f-ƍz}ѧ)Lgt&=NkSZL_&W!?0Lk֦0Zfiݩm3~}FQ:Dk[8A2sE ȋZA0tS >}rYH§CY*GvD.}sڅ>`T.{z.)ꟲW=R]ܩIɒ46ױmcLͼ +>Q7)ndaJUˈklBz!}~-J_1pdܷKDLbBº U*.z6d'񱭊e0k^5"y!q_F&\7]_Ld}1\6yeqZa@^,L&N?Y7-F[nk* pl6N%a;mlv3Pzn03%EJ(Gt#6~XO޿梵m.e/ow_u ?eppO.p5ޅn憟2 Gq}06/`a EFÂ1ӥJ"}FUvuf}lP|Xf&. 5Aʵ|hI.atۤ8صEnvnS|tvʻRy) wʛO4]7_1ͻ>#}]v_C *3%8MŌ P3u-!#}]v33bIKX 'jwyU涝-y*sp:a)#Cz%38tD#hSWQ9V<E0SOAAӕB6*%y.?m-ƇɁ穁'σRdy*b4µU'O[I(jk97.ZMEQhQBDnϫsZaǾm@GrU^uiR.ռZShVeňśu*֢p {޿}吼D.bLWP]*xl7~\.i6eW$|?7zXDhދm2`)(Ob5~ &[#O0q?SO0EO0E (})Z0]mbs?! 0E qY&?cn)UOl+&W^u￿|Ϸ["|k]쎸?Pv3Enqj׹p[$FVޡq_~n('HkcϘ{D,sTHl7#}8*<\vČH$nm6K'ӴW':UZmO)(1gTJbiyX؛5g;z\ܢpWVEnp[S;^ƞ1UMHlu{\?VDOÀ ͪm=)~:pI F =UHlw}y4W)/߹En/6*H} _H3~T.xbi7S&GNu~  3YU$am$"7D7}~E^ia-: U{:NR0#m6L7sTvm]vxq;{kz"7\]yƅU YYyuʻTdvX!Ofj6 䤀n{kLmmof:-T]e/bK<'ز/@*G\v_Ao嬱DEX,bQ 3x̦$]ZFi7\;q:fQh@3EtRs`c-*~D%7lPD"zSD`^k^ Eڧ^ EZ(<۩F`ߥU> ;CS]Uu&i""Lǖ"Tƙ[k'g^((LFk\axTh x*_@-2m8 $ZqKkdn^pscuh=K{c='cs4ws5qNĜs<- M1{,O)W-&3iBJ1ΏLX["t6i`6N^|W_ٲv#mmM^M/L> }M/MH8 ^ʎM/;Sڙ^.}zlBʣ)gxPQ^ ,r)kÑxKr`k{ʁ]v@ʅY&GJSC|>QvGNx^F*v[$"$6ܡ<"2F2yK +!RzHO(&tuTE',HDk'oJ*,бHj,zrUBi8xrDNXY"s>$7"arx]v|#䴅GI E? g;Tבqň99m!ޤtUJ3%1Hi7r +U'%DJ+RZ2M7qҞX^;qkH v"r=```[(>[}C9ήI5.`"Q}`eBP" aIlʈ_hqww<U E 7_܁ʐ}oMfxUƩj." 8ߖO|mL$8yλX5UR#dp^xEFj^CūL\jQ+*(jl'2zEX((m -O3'g v1}b v_g% x#:ηɀ'!2{Ȁ`q/kJx 1ccHǗGFǽ(2=ccH^; wD;Y#d@. +Fb@cX)Rc+ij£Ӥhb&vx`+˼yIXy݈^V XY.|$ ĝvM +U=f<{/5oXrxc9/Pgu8/tpj{y%6Ւ9MbxAtm~ =`p6^mOq?,' ?~3+<2/q%5'n+3OMp!\H̅P+]\p:h5d 郹ڙ ΅\ EeL.\C3>b6%NGKN%]s ~s8o/%]q Ɲow  ?p!T~x?pQ}e/O/kpO7pܟܟ-wX`ht#~ct{EƐĕ31{8r9)n{({pq3E]Ltqx5Su LQF iLQuݟ,zsxIrl@?"I,zq!}]ģ& 3./?ppWEnHpNIOY奻nipf/q! nW_B^qO&N"'>?<{QTaC g]Q*Q:lVJD5:ĹUjt,bp>S8L"CJb͠986}IhA 94g{ʼ}_'2.dgQ58.=8Nd\<΢O搐Eg'.OcZCrx PQx OojNTNSl!{OC` كL=>ك)d=X~#{XE=!/yN#fTA-A,n(+ԅR]; ⼿c^IqKw3T3?_#-TT{p :(>0L:LiC~b^ZqQUa)h󒯪*ck_>WM^q5./ǷfGgnS\)=ru,eI!3X%}"I2KZW h+3 9;7O3a~/ecUr3Mғ .uNɏ ]Uk,Mv+E"rKu ϟq)hSJ9> 2b7*/z؞1A(h{Re+L)_~TMmP2n aFғLHyr]X[Z\Pr vWybܯ}\ƅ硵&7:>fC qSKqV 2b7Hf#}}ߵkˈvG8cܗao=qwf U6va&Is3-2b7ڕ1`-P G \Ip"fƜKD;+\dJE5 4&L5G/q_n~= {߷lMM.n:0Sg7PSkWoVڥlw}2FA. 1\ptuT yyht@ve rm&߰- yvp]tmGf޷l! -#}]p?`4 -JxAWGfʒ^2bWڥ 47S?_|Uߨ1͸Yik'jڵUv.q_qi8,ػ_Lq_k_K3 U&gq.t&[34' ob^V*d=Dz 隦zTUk&Szֈrw)^ 7mruQHb 3U7#}]\6w: .\M.n:a&gmk'pm]R6<;>U6LSݝkˈqb#3}Kؽ`MWG՛댤nT"7D7q_ qI`bo?1  2b7T9 :5.e/YC=#}OY;II4V}v1TLDŽC:u!icdc8.bp^L*d=Xlo^be: ܗ [#.+6↎#릫zvTsCkg*]q5ۻ7NoZAWG/xӺ/ǵ+5.}=T'ғtpd&ӵѸ.xϨ( k3kŸ?`D‘M.n:*3an/rKtG`qR @dĸoruӕ͸S7C(8 b=gPw9+;os2bWڨ~*ŁUkۏqm]v3ĺ7<ϮpŸߚ47Ǣ)yUNGLbkII:`_Īz61j^$V07ObU`|3'H>w8ϮrUWG8?+ Ap_v_#x]X/V9yydm]9*WGuu73CyNCSw<*WF`}t\Wl%FW8w?"=t6@{:[Xv3Ьt%v_*Vv)/]qPe̸cӛqzR#7+OgUKlVh70s{W @_=nv1.Tlw}Yq_90[몫6S@u,[N.e#ˣyjGA1~_{ ]/w~?2#kŸЫsZ~L_Q-sy2h f~ ~ ~%%Qn9{dߕ EH\d3 F\[j/>*?Q>*"7D7}%N4VJIZnX9yn.G ]>x~*%XrT^^jmW~yuSx1+|D; u@-]ummNb}SbHrBS$CSNb}7Jb%*Ekoq];q* g“X늶Fܗ[ĸԏ$ֺ|qC%!U jWM՛ۛȈpr$Jb~R#IoOOnCVG4Ѽ-mi0^y[mڨ"s+[8%>1o'Rv۪X3Q;un^arTyYjfć6ݼ'vTd\[f-A丯9f͞fحrdj%/ZC9h6#yd`mAڧ]vXG,ZՊ %u"$U$ּe/Jhb-$)uQڪ݌- Se M@~}9.oNpa ~_A]~:fM/{˄Ys[)UA\yQs^gW"#tb$rKu =/ypjT|F mFbdhxqft ϣy|]Zج#U?) ÞS=G-`wb?s}GfV *c:7:Ӷ-TMGp/[` vk7󪫣ϛ)_wIU$qQkVݗ,C=?1mWњ<|쇽<<./OھaoNT8^G*7\]='ѾНO-*~e7y[~.ˌd\W^ֻfbdҺTJ Iqe ^,07`𔓜O}ZG'W몫i]`OeResOfβ4N)N34s4]xWfzXzL84Mp}Zw<ۋuY YۢkiY0p=aϰk\ܘٟ8M]jռ:(dɜ4>eĸor͂Q]yK*YqPɲqeUyCX fg0LcoQJZJuQkj(6GOg.^QW]tqyhӖ'ҺR4ɩ.rf򈸯-hf'A-q e(TT앒^rܰPt:3% #->!WK'R`8 i)<=jNF*hh5} ϒC9E;W*%FOOFuf R'άC9o"@W |y9>(vʽ}D[/ܯR cs܈;Īa972 t+ZzBͬ 2 rVoIW*j z?tsӶNWō郆" E.Czۋ)+WЄH`X7(+z֜P GȆ~΂%Qp=NBUY9 R^; 5:R:5: ]H,KHuY>vdWȂk+`ܳ+YNUFk7:SvEs_u*6Ɂe061'1R+B )Q )[q v?y2IlW9=UUNuF{ߨRutFL@]Soϗ/8q#r~~PUCT%_;Eཪݨyȋt7R9XZ~~P.>uw ;鹮-/6ZV/@l)cX`sTQ#w)I:x2IE/6~8\*~p[; ~;9ྷvWNe6duPf8EUʄsKIqqXGZv DLS"l';CS`0h_>v=vl(2tҹ8DEW9m Yf8EQeSO~8"۹pARNMuFH'0ҫ.v? {KkgRǙzHeܿppH8zJx;a*7Tg͗)+TC!;~ L˩LK<םpu#.m:mwB㯈2EmD[yRnYꭏ ޯHsrTnݽZ_A7k'yVڥlǹsݖ[ueĸore3̂%k3,Xun﹮6w~̒V@2:?\v.6q_:;Sq[cw}yOƬu}r1\.zV`sdտr~w޶ޚ].eϗdṴ%#}E>y]oHW]i\c֮Vk[ ݜG$sKuFo]̼(o>ϷD7 i7r>[ܿ(YO5`@U-Gp ?V3 rt?o5 _.vY/ᄊHO5v}䉣ofO "v B8:Fz5G8fn7p0VE8p_j 剣KPF;uJ'o}rn{䣌w[I{F{JUnH/mu љnqtIiRk1 Vոz ~ 1,Y4Zʈq_kd ݿ:u_[F&>vvMg٥NA>ƓtcQ@c7(C_l0 q)(LROLx>&0Uf [F9&6]Qνv\J=wӞy*Nu+lmFhfޯY}={CF &0]uЙv7[/]^H-rKtAc@>NG&׾#G=zj^5ѓSjAaOeS՗ Y &3lFQU7kgpm]̦ Q>5% }NRىRğ=_Ij*#U>(O6f."|u^cUv.͸_bOXeOjp44_sUF:y>"D=V{z ^DʸgS@2a$l2-bY:Z| *WXuuؖ%>kX~vן##}/CWG ?=ghPVF&7];<{` 22bWڨfe2] ^vm]qޚ$w^B(V:tuԌ3;f7Okg"7D7q_ qWw,#} #$ɮ=cK)۪l4];wم\&d8pk|Ry)݇2b7ڥl[5+H~%bq*C)`R բf'@HQjw0=2 SfYO&$)T/5>!f;ㄗrgVH9.[-#FJv?ržK['Fx3Dc +*Uс]P}hV9[h^O<݀ڙ'##r;Ě+ &'VuOԸ"gf-r;o9sWtuT3g3fh9d*WT~B8KfzQ/ݼ\[T8s֙H9n%άT,:rw;Y鱛 ڌVDk x-‰bfXex##o07GFdpsdtu4~"#5DFOdȈ#7"Yͽ߈,*GsyvNМ+:Z1Nʁm?v>c!~t#7]r_qt}k?:OyIfTsjrtu3CN52o4#n]m7[jd$qJo 7ܕi?3:(DF:.͑* yfSp}u( 8tZxMs?>VZ8\UZ-D&2yj2z0k2Vv lܧXxqO9xq=o/Ity84* UNdcXVjWfܗG3~sgvWn0sMw\n$wĸorTt+ QrmR/a/aʁ|X̯L-;nu-#}]9=|xx`(= 1]>Q0oԱN/oծq[;UL?,|q~2᳋yѵQE )lŋ3HصUv.Mߧۣu%}nYB[-JfMHJE`~f$5e¬۪! Z/fmlmSvw 4]Su%2 cd5ʑwFՅ6~"֒oEkGuem_>n4u[ReȀ<ߝ^-#}払'7Kk\Jkk3kŸ-co͌vW/6]9KH:֮v(k`Ovuz(54[Xk(yT<, >w=vJ]G`ܿ!n1qݜ_߿Kףyu4lAOfsI"2YF>=,tӸQNd@/pVn} w~3]DomYM>;(~v.!ǽqW8[?zqCd>. =AC9[zS:ܟҕAy'%Ucb7aG;W?3F;uR`,qx'^qrd_gc&1_dSMv% ,pb>R~&=>33SyzE{FݼԘcn)1#?v C==1ow2mt/{S]T7U:RT:E%+ĮKyS}<2NT":GF~0=̬sb{JJ9rxW]Qvθv30\[j?)F|f?N:"܇\s3ڌ{1?I%#}k"~p22ޛdhye}Q%nvu=]t&[Mk'tk'Q`w ^:Tݍ9wson K$'x/][fWS] y!!h:?yV]ϳ+6lKj]]eVՙ-;+,U:]Lpeިy?)3vPSfҴ EE ZWvO}U^*q0iڮy'O^/x8ҕ%1_K"E 'uQֈw£NF[8O7nUfpV&1?iF\U\*eYv/\[yw(~ qEܬrb8OQ];<K`6jyp}f6X2 nŐ4˄bL1p|. ||(_@yle T`_H~}*nPpƾ"S1ܿU}T5Um}B\9z]1}.ηٳ %uJ'M܍sy][jwOzo~ >ջڏ +b~*O"zyD\ JH1"nr#'QdyD\ rJGO"/J'uhN*7]Kt_*(t.͸#ʓ:!c -RxmLI~JؕF̀)}Y2`[#ܧJu)=^߃q*di<&f/ONtN=e>RG):ēPS#uN>e>R4eoqO:{D|na#W{F23Eīx`OD[1梵f>d<}8xGA<{P˞L<`~yI?1~0y0X,}E'Gܗ7Zo/Gc=vd?4ø0;|v?wg u;A}#f({tӟ8çh?lRgs 3Rhሾ?`fiQ&W󪋨x$Hlk(=C}/95_IOB=c=%ܿppIJ-}*6r EnH/Oc?Qd Y%f>6I7wJiA&^[3޹1f_*0X>ѽS1uo*;%몎'O.vUs씬fMNX5Av)"vC&Mֵ kJ[1z,ڥle?Exd4+5ZWf Q9u3!$.lp]O/%5p1%{kߌoujө>U8lL~p1+ g2Wd2Sq8ꃘ3ILLs#A`4&7lT߿.:g:=@_ Wg"Ksf`y@o~ > 2cE[pa2gػ̟c`f&3?Lf,Rw+b2瘺cqvz)z2b|rz ^FWzxX9^ CJ}eܿfATO >͟]CyxW'ÀrړϣrTtݏ_ɵAߘtݷ'o#:3f=e><"7`w}繙H~YGQ]eoʾ(yTLP˯M.n6a6Ϡ>NQ.e9Σ.K6y~%0Ԭ>&q3/b q"ڌ7 q*ҙ$:`éޡJF!Σr+QvfFyѦܢc)"> 2"܇\8lLWF6~ͷ4t~{}Ц%YٵM.p_?ZG CH. v?XHi$/a}N : E:Y-:AH`4 v?0NhYnFz7.@>;R9|2ُΣf>jv3\[j_8 'h(9?Y9C כ'lu+Q}|HFBqX5s$x,j<+I7ER/WzW̑TD0B$u +޹Gܟ ]ɔޖM=!p-T#}'D Y3'Wzľ'#" W QcNf _p{"90}Eڜ9/.gN\3:D7|Qg;LgN /aM`UWGN61$vꚂuę*7Um"̉2'."sdNTV]ˌ̉LŠn"Gʑ9cŲ@('3'k4n{gN۽1(sXBM{*dTv9Q, |&G^fsA^ɱ2r-lpʔBsvI翚=[&8ѳAM[7ztV]N5W/T~Di)-6Rш&GLLi+0S%6R?VU) 7j:vA[}vw2;~^3n99jbڡBƫ}lF*v {k?5TFn`:!b Gf˽'DU ѻۥl P@ q"ʽup / 4 Cޓ~PL=;&Q.&Ä23[=CuaUF ]u[f8w-#}]v3? XCm?p5fZ0 X3x3` ^̀_ Xl?G=57N8aMWGջL97NGƵUv.VM踟;q"; ܷjB`ܷjB}fl ?kAxSa 3` TD;~G.:kʕ0`wՄ6#gF#;}&4Wȫ m߰JՄvm]~pBƒ'x&~/ɸ_/q?M4_Oqyv:;!;/rCzѵs0NĦ;b.͸߄XkcR itֶ͘)$Ռ͘m "׈ڞZ]rTj6c0k^WO `oȍb3Nр݉#; ߏZ2-\eX[҇#ʭmruQEfױ[-hk'TڵUveʛڌh28gbdĸruյ]qߊnMv%Z͸r+҈,&\7]3fn }E+k`w}v? {D8M.n:SgXLuX~l6ڥlw}3~O4sWb͟]9wδSt=M.v.Mo?AI3CW߸S<$&8#yxo {8?6q}oϯה <~6QlfU2j` :JO lvmv?߭j 3ػ/?{U^7]uh'a<7\[@o|~o|_냙{M7orG|hk8}?w6{YY#_ nu jJqRg*3}vgmIqA纚^'nQo52C|_WdUo9{&hn`{͝Ў}l繷(y]Vo(=Cq5/:4gזQUv.y0O?X҉*Y<,V*8h"@-{ >hY}bl[lf^k= jDnqѵQ yD~`9CȎ@-Z;y#iXK?^YQqTWGճu&_d5D: q]~<Ә0|,Qq3?yZ!Q^[~s]brfl_Y۹ xO1>k5k^$lT@2t6 L~8L>MlsU3s]*ճbw~%NMZ;}E[#-b?)?}s##}v_f?=qߟkV9:Xt6*VSC u6T*GBfcHt6RyqW~:8 Jߗ5xY]*&GvTFCg.A7ðk\`Og| L30g`:?L30tnk| O>Ov҅yJK9nY"&Oj ]f9V(uFDLv.ɽM:{a[GUYͤ!e^>"\ËT%'xO.xcpD(>ws/3y1gDlûMA/]ًnEn Q])GEc4v_[FTzvG/p̱Gɣ/)EYEd"&/Қ^‹DYBёɋ웼@e½ȕO`[A\NxZhrUWc<_a2$r"2T.=x"}B&G"O"}eD/G"> w/rwgƾIS E/V!+Zǃ#6ȱ4rb/a̫e:%3Z-˝!sZuu̫qsGk*qmܯCF/+w?_WIyҝm$4zȫ*Tv.Yqd:艌PEd0sEg+uQ@fT7Gut G'nE Ƈ Q]QQ7ϧʣ.- ɑGcW?F$$aM@D(YZ @[G*ETy7²C!2G<_+ %!yI=d$JVHjInR,wqku,(;{ĺ;alA~^M.'2R'V]DFz ?.HRd{EFWJ.sVzvxmOOdRdVIZ"#"HH;M"#9DFOd$Hݏ|:<9ο!OΧ3]=t%tkgΧS`!_Hr 9xs +x{~{]7K2]L.eyYĊcQ2sGȈ)28ҞȐm3n {Q!c7wOG"yt5^Ƅ-z>Qd ٸd -A$wG_[Nbr'1SOݼz0tSv S]vL # {ծ3v$nA2I$srt̎ӓNF3|Nɲƹ~N>UYbΞ`fJ YȩC$up~sI[|oOEM93|< η'?p~f_G!H/SĿ>{g_7ٚ;|,9pmel8j|qx)G,uMnSx]OGc?.: X˺骇.I].&.Ԟ1(xI_t]OxQ"'$ǨXȿGcp3?v?X9*UʈRg1hf=|2b45RγLN G^B:MlcԌ>TU[۽NȽnM++yvq4.vZr趛"wJe^B1V\R];ޝķ4ݩܻV<3>*q>*rew#[_Sf{QB Kuka0WLT~ҟn|#ȝK8²C,D&s^/?X/Ÿ^/͸',/qB2e4mXS,:k"xԈI|hn_[Fi#cߛ8mwn vg;mwnvG;*$>,#ЅrP`S 4ka.,{,=v>1W;"xW1-8-o;:e,KL="\TS^gY39pƪP| +f >_ _(Q UF>H(FO!v&_(SA0=!.KD\"SA~>яV=3J٠IM)D+}0k^oma\[7ʈq`BYqj {B1\j\[hk.afyzKdz涌M\! frYYY&ځKdfnT|ERQ,2cL{pv+*ŘB޿DkDyh\7+@T\!"5aqMtm|ڍYT7pLƌt0ܗ.{{=`pSWz^'쥬߬p;KlϾެ>%PgfAjۏ݌K:s|/dޥg)5˸#/kX2)B2:Gmb޶/R$^m&ɒa|B\"Ŏ((_IG.ςD\":(acwy}pPu]qm~.:b798c~쎜{֪O猕qʤ~@l9d;.Ipش36*+sM2fN4;a\`gL=ԟy~bo͈>sM 3>~qǶ)>\R]3VQV!s+Q<"; qBkW<<|e>WMjefhb5YTF<7 s}m1\>v?~ߞs*ļ+x_(\K:A?_[F*7vp v?7E>w쓗Xor}b&KPK؈Pq_jWX;w`܁=p[l?܁-p_bO6ǃT->&I{}8pr$>=T~~zzµ{h'\[OO8 m}‡q'w-26199QFgync>Zo\u2M e~aMWGݼ%LH֧ \[jd?>OwORQ/Uum4/3C۩\KrGUv mkXG972b79p:e{@VZ!SPw&N_B;ʁ=x)V7a#0o`Sc~6߂zD#b􈘙}SwQ!};9czeaq#$_3-sgoS"#p+~칓?g\I2="HOo$d}ß'_?G="-=|e?#ѷ:I dU=Uvt^~g{ʌ=LHl|pӹn1`Q~bk?\y-* }|Sbr|3k,&Wr~Ρ}f=C.ߔ9q__/HE"}w8ۖ|%Bir,f}H(~8[]ot6*h_u%EGc?9c͌tvvɡ7z/V .[ttY17рY"ENc~H .J ['Xo? :ff ~`7<ѐJNG _[-?L| pDFxQDg$nmQV`΋@gxgBy dF n68q OT0>v?Ab  {P7Pu ҢQJN0~{ +PmqCÑU90{Tn#-ok\0-iWػ]> vH|?AJ{]۸H'*8 e?z,f,Hkpcb&c?J;~o\TWhrtu߂Lp-oͮL6@~K8en8'6N'|P1\-8f8 ){ip]tu$k3%{eĸorTun%[Ǡ6-ZױǑr;:[Gp.be(} g)Brc(L5W/)ʰ MWQ~Y%fnm~~Xnv?y`s_ϫ7P_Wܿ}$UHlL^6e-1UF6G 7p# p_j#kŸ_qaFD1\ptuԐ>h3En(*T(Uё͸/$Nf1\ptu|}ѷ-"\#e# 1{B`[ o׻aUF lJHuo$F$rKunRGLM!ī2#iMuIpH;}O\;-@hڞP*gFk&~~kYsB$T1IF X7Ӹڌ߳Qtb E,s:S.F Y " adž&\7]}f,9=crBGjS^[;|Ikʒq5O^KZr,Ou,%)X4C55&GG-Z;}o!{$|0b) hYd3C^qKɫ݈"7Ftr0ăWnTFUH/Nel3ƴL cm2k\R]b_y=uk8p[a;|KlK÷.e#O*9.x/sS(!s6|.?yL-M?Ԡخy'K>_~(5b% %n+ZB44:S92K1ڥl ^Rn bCۦB44XAk'ڵUU)b-JXOk ESzNPZ~XM?)~8g.v'󽴌<2 ?8ΣKz󽃳d_V8Uǃd7_8cܯ)V3ʈq_vn+4463Aq"jamW\-U]yc1dbn%77(G?/,IyrD95}RMI*v\suy$;!]&(wE[sig0f^}j*sv8wny\R];!'KK 砂$옮z(̒VZ{Ц(֘ܲth|l52 R<:bf ~+Lk޻Mn3%جʹ?c}+/g2|'3SԨn㼐yKj>O9bEQ.{rl >{-ϣrd\8 5}r|{5jC>Ohk([ 5j=gIw-8c|*0FM0b>ϲq p_@v?8ۇp3+2 ;^㼷(qvqȥB37IŬLCo:B]r% U~8a\g2 r޿p_FUW}q<|e<[~k!Ǿ}7~]o|@zr%i9ԔZs3]~/ɸ?/cӸө54n?jhd:i] CӸjW֊M&1Ӥ-wo[Xp]tm =o8Uv.͸uo?p@In9Ɩw^#%am{Ny <*udGSE&ꥴQrSub|Hv;a\`돿očZFQ*-6zR NglBertkk/fKn~(_ ōؽ(uo#EE-&sʵ.KuEh_hNZSCJM_EQۏo On?㹮w?׭^b#Q7)5#=v?u[B~?q1߀oȽRtҚ?>o%RM-mm98FIj3M5{'Xm7S/M : KpsgqMq ߓdfr;؃`{邼CA^Hk‼i U\v}'zzq%rz<&Gn8x%W8؃D/4+0]p[דb< ~w] w?NSډOrU73cV $2]A`y|PMEнfldFU.B!+;GϹ]aSYmfrG6_j䦳#|tO|յS3 U!g3 dǘ)Ѱ,0j*W\W]?}uP*c=םV{s]Iz~ɟbo +?de&W\W]9˰s@VV~J +3]d|)Q="7\/\%ks- m\68E %uUܚpd dD?yǃ[V)B;q*v_=&n)*[E@v;a\`OE;=Hój*r "wP |msZe/]8'tA{%^GЛB DoJ2zM~2Ck2ekP)bDn!1y@Y] VyB^yFnbjx|J~jH-3Hda[Q[SѷJƩrݦ$辞:#;'nZ.(wF)[ƐH?#7ɑM? j;7w+,6kU}>v?L(G!V3!-X;ZVR-dkV9rzO͌,p"B@F53TTbxͮʑV[OFKQ3(ϭ"^53ĆRBPƝ,az_;SƝ{R^<HY8?tG)EҔK!k3R͹uז#ɻP.ԈrBQvT#?Q+Oݼ4R/-9,l@TF&此Ԛ{k3kfNyW/N'pRH%k|]s  yI+7pZr)lP#߂'fn:r:/{kQ+y:4bZIv?5 5K2Lyn2 /r]MĢ~lH_HJ6렴\7Í2"1l%`5v\U(#!}m/W&nh~Q~8J*pbU'Vʁ+NÉ'VX%pjA~l2 Vum4ڹS7z> -Dkk{L9tǽ5 r!"P9`kfғCw!CwvwdɀqR&sSL %VM߿?o~xEr("қ.*hR g{JmMv. ~N{IJU3 epy9ڑ~b 9_q_sbO>k(. v??#բWMa[59jkc_1z8&~y=blwfA҈%3IF3ⵚ# 0seHTgFM5]92fiDOpy'DjZ;i3ו!ogȉvʐ)™[P[nkF> X5nq;mƍr'̼l/qIāM.nÂw=ծ6iY֘2b7͸-:VwĸorKuh2ӹ5AUVڨ{ۈb\9pSvC]ܧvaC vGܗ׊q_kf=KXZ\9:3$j3<ЕHMm#"7D7pb]H`OSj]Iz>798rRf,N;Z:S^ru_8c?ZKq|1p}0pJfa3_Fc.;c7A@zp?N罵FF۹ygVa_v,T9tF 2)X xMKOp_!\| >~pKVZ2㾌M>2dCr{=N'mrOt Ӧ7w<<Gkpkthܿm{`lN\+#]pZ4%^rTt3wV=+x]!]5;CL-tʪ#}]bm}??Edto*٠[MMMn40kwêzN\=6.7p:ӎ zvXZ" rKuςCG ~3vuJ8θ^%]+xm~gW]S̡;TɞCn<k#ގk\\9ikykfW_`sݔ'5e"7D7X#n?pf@&bV*W^uX#Y={>+]k.uLJoM"ϫgW?oa=c ˕)uĞ3 l7d>O¸"7JQGax;7Jq\̋o)2/F9&v;a-Fc$y!I B쵒b+}W9>o0GS}.}4WQ(`Y^υ~{żp?݈>TYr|P^n%FQ(pG޹Qνun7ܟ--cOߜ7k^ A&}!{t?NcrGy=v??l3'.Ll z!ž|Ok[O1|f>1/Rp7'BHB~j"f:m}+IS"78;h1؏F}༷8nJW4$do|78+_9eٻWC7ז'S3l2Wf77]n~VB2 ȾtޣJeNkb_1(b0RDvz`{!b#[ΏO6!Rg~KؾEnѵ0ϋDQDk#ԩ]vs`?"]"?4:8!dR9ճ`~f~-.fǿ߼[/+b |ϯcrUV-V(T x(=HhъlW{ 303Aߠ[-}AgWU]:Aѭ}WrP˪Vˬې5|Ip7!~ex4{wy8쉀{NDͫm+_t@.':X$>Bć6r'{Wkvw&lAynxiPӝ^"Wc72Gq݁m./S#4 摁 EIh+uOD6,>\XM_FM 5O_Ɉ5Mendstream endobj 37 0 obj << /Type /Page /Parent 3 0 R /Contents 38 0 R /Resources 4 0 R >> endobj 38 0 obj << /Length 7014 /Filter /FlateDecode >> stream x]O-mݿOq"7?. td)nA}%<n"yx )/~ʯ߿^~껵W}W{?__~)_O~Wzۇ??}+:w}}߾>~dK6w~w >.h|/}㟹'+?d5xmI~kQK*\DU|.㯑A>}h?o^o?v?SqU Yjy?O_?|׿z>oz\_~֖?W07eujZocz;mK_ϴu 2i?^s=4R=k=˺eW.s9U~˕K*T.Q91~˟xnPZm}#Pbe܂_5dkyl}w-ڮ|Ke^j]]xd}*o_^Pȯ;_BDZGܹVsӭZm|dn'k1F~}9dkT\BR[m|Y{ w cKa c={;1f*b`yu5=A "ȁ.BWZ kTm=1[Ve~zb_1YD7cl߫lL-c |Keu`~~ ǁR1UHz>H7 l1u5z{@)dVDkH_c v֏Vr%hzƮX$_a \E@VzE#eZ`)֨zbLo16;3;c!ȂR݌0kHzc_(fv+sχܿ\Ɔ+L?f);l~-c l1u5ؾ~ݎcC5#Ȳ luW{N~madn%3o<EybTG}]ev5!c* BƮ$QNv6Z kTm}rG[KVF#qjZf(\qsEqVBR=~`L^ׁ3sr*' JXҴ1pEЭc l1u5زgݸI;P1!%cC׷?t+3>,cMpxx@[۫jÁ)(R9\;1&ӚUʅ5*Ƕ[>XyOPGqLe݌Yv&O1#}` |KeOb_~|N)ƘC& Jt3ƶu3dܠ1f|YĘa-- 0f|ŐQ1v_:Rc?wwa0f|Xztںǒcw8ej3Ɩs>*b@r1 LD9,tX_XhU"0f\rl1eؽP!{#IC`lcl1|+qhcc+cJ1(3bdAaSk7\|YĘDZ Y*8 A([4.G[)Ƙadncc1R)x`l,cuO78f*^%0RG!8)׳1JqlMVraʱp˟xdl8P1 L(Mk˖} -afvF&k}:j+LfU#t u䊕#ɲݏ1kA.* 1S~̷D1S류.Ȳ݁1vÇcmc"oˮ&D`̸"!ZcX3ܚ6תD`̸j cYy'hH)ƘC5DcVg:0|*&QN#E+ zZ8_0䲠<MLVɅ]jɲݏX5!U| 9,n|L6RYU=i&lޫ?;_|03 5N}ce^{vs`TĀL"A@ʁɭ\1pȱg̹vvR+_sAլF#2pY,Cw_[fv?rEy]GX:km/G.YPV^rKa" l1'Z99P1+L(M+˗rXEs>,MicI>ʎ҇q80f*b劥kW*V:MDP劥݃V%c5c[O-3VNVb}ldc (ƘC5FVEafW*&u}MK׼^m9p|-g,and "9vA>0W#ZPF^r0f|Ő5lG}\Ulӯ >,[GŮrIb>++玛 >kx'2Yؕ.֮sv:Sz9L wI*_Ww+TaP*WRM!mP*㛥*{}Jj<{dpUWԘ,f >RwE,- lg%qQIܓ$5hg%q(X.$uq%en\'m X宰e*boCY/\I \jl5 ,Wz59fʅ5*Ƕ-3ƺcC R1+ k&@"rЭG.adYNbc룴GSG䒵@@ם-]eO1%wy]1} 'C5{}e,qή >,-::bEvKvR=밸1 LN VPfתD`̸0Ocɻ|DڸX5hf%UcqL(ohc7`}bL_cl=ac=g/7 'W Rѧ#X8)3>,L7gje9-CnuCA(~쮸n8;4el7a* N8IH8X:;;\E =_=uAUADQȒ B.iMjT.H,ie-3$oX/=8T7clC+;Э1R=~ılU]9bI\\<q {sCʻo}Ȳݟű486ԗK.u1[RوciqLj8V3vA&$R'06n< -䒄1SMZ}Ӭa \E@ofĀ.A. Z73ʅ5DZczˌRw0?/M׸u,NQ1VQgЭc |KeObM @)ƘC&f ;P1RȲ'cP~%;b A֨l9$}nuڻn.lOXFׯٱӕNJq)/"-"QɁ'~H۝tU Fc;g} ?X@98_1YsGnsYu9V٥'ubΎuv:rF.ٱcvtv=WîD1KY(ZQ"I#r1P+!dn ҅s,ݔ+sEHk1Seo w龴UP+* "ڽ C6<˛>P1A"Œ+q\NR1_T+NJwvꈯZz䊃+br(W1P\ v?☸dž2صfBqLT֨*M&+;z.Ȳ݌kTve M}b *b`J91E,$s9GflmVX l cc[O-E|>ִ,Xuf5Jt[+$Muǔo30=>11|kRs;pR'eAuq٢3,. .<3YyqC{-\2qnjbS5*fX6Keǭ[)cƇ]e c++m-/=XҘt`T@)=k!WkDd.s@DH\4w*A֨z-ASy^@Ɯ_5?Xc7*uO#g*G+*uıɹz%+us^ZG(vs>'JZ1S)߲C5|~ 3۞7z >,[|J2^ceMۥd,|dwa9%͒.!R҈Nc[5+S"Wve8f|ĩL}}<^rѮLP7KUĘ.^ck0cWR1+LԴH>2x}?n$Øa1SWc-8ޡXFw3,|`F%aaDMVMR1]&vJt])y=ʻ2I$ESW!;A̐1dOBk=O0.iqke-3ƥ] (3bF S貕bova١;tP)ccgS8 Uc%Fp Ȧ꺓tswGc7>&v(E3>|!kT/6"*lsLR1.Ȳ݄"Zc2~q{O6"qGѡr gS܎C`k kTm=1n^:uA1ƌQΡAkenc]נM7c,{笏=K9(ƘC& ;g}dhx8wڵ vq~Qn>|(c7aF=O]‰P&l7 76O I劏^*Wr}wZa \!b!."W[orEpaʱ'nyW|᝼+ k׈mdJ)5`ǯ׈ʇ]=~ır :iԈ%VroK+<'Met ٦fɅ_1L,W|R½Tr/ը^*-;,K5ҳj Q/JGHT&vs>Wo|פ,EO8槙؀Hb1e4T*19jA8Vc G[>XX24b/3bj=dF@6Vk+R|L^c>V㔜MqRXN{uپG_J=c9&v?v=Mws&c[RY-"Hj}u{&xQGwsYG{uٺ=]#ĵj}uEq8vs6|lT뉱TuׯDZ=NvAb;vevclsW.IT2A0F49ܕGȱ+sl39ODO0v =ӗC֨aEcސ}1W?[XcLe@u jWZkmi},a,cyTc0G߽K׷>AWQ9l@LNLkCvBSc[?jg[;F򖌐vƱEYϭ^cS/|軗gB1B?談,3,Hws1adՎ,dG_xPXLuδ~֮صO]EOvz{[XGrGG]qW'*Lʤ](WWTb>E.I Z}ʅ5M+A-s3tI]֑k̐5Ygԝlٳe]lկ|KeObu yBFsJ1ƌ+wnXNk+bvA~mw^]"Aq֏Flj}CDE.Ȳ݌[ qjn=dc",z+B~(a2r,A+DZ)AHmıBű+69QnwF;:<UM6:SKwF!RPGGLc=-͖yR1{/A#]=J1ƌK/^6#c15=FMBq ԁBqen+&Ikξ" ȶ,߼p59pljiĎ'/S:a~bǤ;$?cz*z>1'v9kzV_ a]MwvHRb= ] Xi]e>H`?`+W7aldՃA\rf#_*fMl9OEHt֓̈́?Aendstream endobj 39 0 obj << /Type /Page /Parent 3 0 R /Contents 40 0 R /Resources 4 0 R >> endobj 40 0 obj << /Length 11056 /Filter /FlateDecode >> stream xK-qW46~df)r` d`x\˱ɶ!*.IQSEV=z7>Lqg؞-??ߌ?zw'O߽gJ~{|Y竗m?–soW׏wo Q,Ի8BQ<Ի8e/6q0'q}>|=߼엹_ǖǿc-u{o?ֽ֯g~ٳL>ޟҽKgr-,~g{Ƚ񬾔܏>=FA_y=>?5v<|~ծe߽<G{nr ?qÓ-]m{lM0Yp7(XOvg&X8k۩q#.ֻ?UU|K?I`VW~d}_=4mH&_خ_nsO\UZzGE<3psdǫ2~ں}_m~;]cSOa~\Ӯl߲qGpɎwVC[okFm9lCgzz궋~.-Wmi&qʇڤwB[Hַ5ZئmoqlloKb^ecO~<H.$[kc5ms}dcCOOvҧ#Wm9{=?j}$;޹?ma FKZv̒MƱ_U\JK Ne3@=\+OjqЖQ/Ztm%kz嶋8WLKP?49d7V#-$-y Pa+}cC`~\-Wmcd*Zl3OH65ÖmNaH}dcCvϥy xՖsf{>sƺMEjOz?m!.Zp2lx֢g1 =s@WmǃW?5>&Ia}w m)Yl)cBl̾Mqllq.p)CAOu CVe1(CG0dRVui2/x ܻ29~!q m2䡴wabc i*CڜBK&)CR`~ C2dKJaN&)CR[Jַ2d46F&)CBOvƐ6-- Y;[<ûiieHd}/ O1%eHl!1. ,J[[pI C.oa:lpFߐ!COv{eHe>8}G8zd}+C`9mo!_Uo i(0үt?0$dDt2X&ɐmIJ40Ǿۚ- %]Sшٓ! %1%[jW,Mry<&C~0W2X!K?q SJٺ D Cs 7 -s;Ɛv,!g5J^2i\F6%eHW8$VeaH{j!O?%\'/\0$`HB!?so!_Uĭ- qF̔1U#-hjSv=r!jK8.B<ԅ#80~B!xfT g+ IS#m7 YʐONLM2;Cyd¿FKVؖ}0.~ ã2d+8n/ZpKʐ;#R-",˾7qll!}hW3v?crn+$1|>Ɂ;#– IF6Iz嶋 CbZzՖ2Vq9X!80~B|#d >$ z[{C y\ 9ɐ.)CB-ʧ+Z<.J~EcCv{eHeӞr\ύJsAȐfGņ}"df{Uq ,rk۩+C%Ww8n\y.)CRH[HBT=YHEM6Iz嶋+Cʺ<[|muw\s]HEُ)uy:m}ݝ}uyz[Ɛv/ i L74{%CRF-U?9.FW/]^Sef9r\hKʐ;#%[OmsٷKʐCvGF CXeKQbJ#-h7{1]o!_]^҇Geȱ#Ǎ3V2$~BJ(e=Cq} IE[{eHڕ!_پCqKʐ;# !`cIfJ ===de;ViI'ccvq+?Db`˸OZ&{~wǦ2Oʐq:&3,#–Rh [{S3wƱ_n}cH`!NJn-丒l<@ o1Z CC`bSت+C:(Cy.)CBF-$[Od*%eH)loa1~ C6[Ěk{ >0d0cx͞{LM0 =d-9Zy SJ.Cz( x#k3Enu-s;Ɛ CAqma!T06"aLo2- 鶋+C Ǎն=8%eHi )VXe,ںg+ql&6ѷekV!mu/`H}7UUu~L<"ee}+Cc 7$9ox,RGIaf [흣Jw86 ʐ>(CjKʐ;# gAi3h} ==u!}xT,e lcEėDqBe/ś} I=2ʐp!}D0ƁwF-%[nCC;خ_n}Cڴt[.*CA$ϵ>Թ2$`DB![m7%eH.~/ P;CV!!Ȑ;CR-66f$a/ت+C:(C]Gp؅x繤 = n%|udTf8״U!wۀ& i[څ!Ӻ2㙜 O_:&<MrjSs~ܞY-/?U!f)DCjo{\tA@nazl8lv?kpLh! uaH Cn7rvTGp `D1u?dc3lϾ]R~vƐr! e?$! [HrLd iC2l!VWjq> ~x0\RީoaȜm4mxbmdž~TVTج6z6w8b=z?mqsp jw8M=<21ʐ]ce2$ΈT-e;15zZ6-c7V]{$ z;BB:#-C73xll!}hWVg,0 "V8G,zgDR:8RDmF%eH.~ Ӓ2dAR> $vy!RH[Hj鴵w;qll!}JUl:ǍQ sIz0"l!yʐc¶SZ} I=SH3"m)Nhɞ 4֣<6 0PlpLkS~c&}Ƌ&}J7ɩ/-+C5r-qm@d>Y0$Oa6\ߥ<ۋ71X@nG! mv5t-q;.2^ ܓhzw-)-mٷKʐӯrIL8ܸ'hr+8$[H3ym:42$m!VW1ɐ`\AvFǢ[ >.űv7 ʝ!SVE2/}B 2d[j Ls?ر5viaE&ʐ>,!g`?d, H%q?$鶔6fc=jo!; ʐ><*CVOz|0QH3"m]!;H&)CROa~95>kNX"XEZ"/zϙ-kl|$]&ʺAW&#!mZZnmrܨ%Ђ\R[%ʅ!l! F ʐ>.qHq||ƹ0z!ݖRQKX͚2$moqHÁ%iR #!D,3-hIcmٷKʐ/ت+C:(CI )8%eH1o-cU!Wg.)CR~Vӏ0F8֖>ٓ,!Ӏ0asdLlwǥ!m0{Y.95Vۇ0㐶Og{r28!@ CuӮ,S_%DS#/rA"VI2͓eaR;=Z˿mJicCOOvƐVg'R ȰYkdnm>!,n~ 鷺2~z89sK@i if/wނc%]#}.~Sel+/qeC]][H)vk?%eH/ڪ7!F$wmql|{eHeȑuMQ)8zgDBY7ZWmqB߅~/تorjRgiēgmt-z̸̩iKmq;䒮eS`~kNMہfDpI=$flMC^kEǧMsjʐmSּf !Ce#gGsjp Zۧ_3Ƴ-,k}MkV6?2/ۦGe,uƼlFmq3?Gzw(?z4d6Z`ewsmo ypk"%dlLl^dcͩ|afk'95ۖt{eHՕ!k,>]-8%eHi )eT:6X'*l~J mj} Vcn{C}[va{Fr!C`~}|>G^_?j0dvl)Xe z:wɎc~҇GyZ>$OLO[H=pc_F1H߅dž>so7 َ;C]r7K0d>!Ӯ ٖlCZy0dC~2c.oyUyyeSu.o%/;-yM^vZ%/)Urݲ8 =\َ3buwcgb}]7WtP6ײO:כ]R;Z=]fNf~5ײi~ֲ#7P׌l,!cJcS~V>Jx kt6`i2mQʱw7rn,…L:<&p>*1{Zc<݄ ^'%̶!UĵЏ:rdVeg?:6抏~f pn%,I[HsC [[EiH+^{!]Ov{2K:B֬Ku(vF#έer钶KA>zx [{Gb@Z#LI1a i&%{ lo@߉[؉!k.~JCn%! A*0K>JC6)̀L]GY^%G4dn~JC^*IJC>Ӧ3JDZ5HNKi+bJH) ۖY[FYR⣏dJvb볾"%H&>zߊc1ξ]R|~)}/ؑ`fJiNvⵔAcy)C===%fIs"R"8(B>C5޷'jr7ٷKSت߷#R!XnGL^(Sq;cb L bnSm[ajS_n} AH aF!"LR^dTlYVHWѷ b4ٟƍ0n)I% AZЂ<`p`BP5ݎ F AB oN}LeӮ5i.Kt͍3)|`E~u+6Sʍ-Sgkyq],u/݇6?#0dsYUg7|dž~V^e} 9'JiȑåM+ I}̓(o]'a8ҐIB Y2VH]^҇Ge4ϭLy"5)1h 0أoHʐï]oeHڕ!jB:5{hc"b[8zgDR:"-͋öz1r]5-t|K"܎h%e?uK`ױ[:->\36aðU߄ !w,ECeeHtWlVFBW2$^q`)}"T߀xO{EYQ8XƶGߐ!CR} ABǮ!k A^oB<)$<–z2 +bƈhLϖfrg|M-ʙ4OӏV,)I=~s}F\*RB`~+Cyܠc850`L [R|m򉚊Ocz*:F(,G8y9ratK) !/)25nVoKNb]OL;ű7f;]b~JC) yJJo:i!iin~- A) yΔvHiH?!w)鶋߷N%>G iL*iզt,4I)K:&}L1}Ӕ| C2$֌~;dHbRLKJAZV,I,)١_U'C>dd[ͬDȱ6{ BpX: ?.HB>7 G)BF'|6cm~pl <}h˄oII}LdK;HIҰ>+٢[;Wخ_n&8$X+GzLF1I6})lD]5D~lݹP_ _ ˅++AnЃ[+3XPGrM-iR?YJ}c5va\g?${񌷈P1Eڪ+ԷZ k5X!yFSl)3Έ6J΄md]#c~*E=jd_PƬ-^Vl7y786 9K#s)w=?G-9l&Jd\žSxl/]__~ow/I~_c:|Y竗;9񥟎T]vibh1 S Q>EhzO Ewu])wqq(zл(zgл8୸.^/w\=~}Ϗ݇o^~>?<ؤ}/ d~}>/dS/?~􆏾K٭6{/W?˙|1S/Gis5c(|o/_^F_?j漪[=gZpq3?:}? jwÿ}> endstream endobj 41 0 obj << /Type /Page /Parent 3 0 R /Contents 42 0 R /Resources 4 0 R >> endobj 42 0 obj << /Length 11168 /Filter /FlateDecode >> stream x˲.q)!9-\3Km9BRMF@VpS)߾̵ u4$r'**2+~ʯ?ϯﭽZngy3o_//zK)_f^x˯_~GȩrO Q^ϥwqKm9*Kףu|K57?7߿e\՚^7~|/r_?~m(q[%J>[~*Y'yP~5?Ƀǭנz?]bk>⿍x>?wx藻krӼڸu[7?w߿߿;k9S85_?/=~-G5rk\]Y&OClyY yO~+^뫌VӸrsW[yc,\K޷5Zyжozx [2#mkR3c8LM;*K%-$-e}cC`~!1h,quW8z84Wǝ1X=-^XLcSv2+mGfC:Ivq'-$-cyoql/ƹ)C[a1Ivqd}_%ϟ l)oqllo?6~8-R36lnz}2XԖ-EK7HضEߍbz嶛8[@m9扷1^^sD[Iv1'64l!Yh%lc36v=ry.cŐc)CVu Cd M Kaqi1dfCÿCegHBe_ y*A{`AkXCZ_ʐu+sf&(C޸3_reyC溃\Roe9-LMR~'  0d2Yc!ύ!O3ʐ&C^5 !wF-$[r3m1 m2$ ΐ*C pxCCI@aK֣%s 4;7c~ΐ>(CAqĀo@i )<*C[{NvwLOwF-$[ҟذm2$ ΐ>+Ct q0'!#– 9&eM֏MR~!mZwܻigHd}o vXy2$m77Ĕ1d+9.Ϸ<6[з0d\7$aSت;C:(CQ}qcTCx8ЃaKVkٖ9E&)CRv6n i38&>!CF-2aC4_!]vn~ C{"0xIWTt./tĔ0ovaޜ xy!K)s2\qU0d3$O!j'x9Gl]PA^!5Qؕc9 =S_ i|cȪq9} 癴1$A7LE]} =Jdʐ~l iOm05L\/\0$`HB!O?so! ڝ!qh9Gp\ɗL`\Su 1bB J6f;EAiqll!~hK9B<ԕ#80#XnKVQ32$Fn~ ybԕ!S2dZGCV5Z¶%ۤ!_UwQ2$Έ8GK,m}aߙ{z[{gHڕ!nj]{pa+$1\|>ɁЃaKV=v~!cђqż\[H1S^cADd"b#> [{gHef6у\R[Jַ2a[Vw\z嶛;C:(Cw@,:k>>1r Έ9EK}-D߅XH䑶c1.kQmNbns0dkM\ 90m?%oakO!hx<๹H{.Cz(p[m*aEnu-q;ugHʐN7/c,0٭XK[H1"Dv}C#B+ ΐ*C6ˑqrpKʐ;# Oc̹V.)CBOOO}o b-eF㪭r@K[H)ђ쁧DշKʐԻ_UwQrqs7 I3"m!%FrCڎ8]R3ʐlߡs˟\R[H޷2S=9$$sp%Ǧ}+C 9;w1X@*G,zgDB->ae)F z!}JUN1_kϢs ΈԹ+0Zupnz[sEUr\MyIzgDڒ!OZ!c1$m7wtQKqW&%eHi 9~r<%eHn~ CGCv[Zk{ 98|z[8vkn~ CMҶ C}?d9睽\auNe1d!95[Ő=oYΟt?Nep1$O!Ƥb?x=b Jt;Gci nw9^بcB NYfC%w|v8#"ƺLl} =!c?dS~HHCc?dig y~HH[a~ 鷺2UӊCYunǃ2$Nd} Cry-v]-/ت;Cc YmVdUc9XJDƣJ-#"<6m7w!FrkR\R*1%Z|sXïk[ㇶ؎ipD+^ C1l!]Öci]iԝk<6 ΐ>+C{ Lӻ&Kt=Nbh;vIz嶛;C 9j8_F.>/@i ),#Ց}G z嶛;C ٓ;t2$ΈuVco!UwtP,nA%V%eH9&GK-˾ 86[(-}^4p\˶:'vjC<1bvmEbv=roag~ȴ!Ljs<Km"ϼ,95eH{C-',CV5} w)M PQgC3gdO`ȴC[o6tb~ C̶x& CVdTcCB!;$3%eHcq=҇GeY*ǵz|0mQH3"mo.vk\rz7MRVsj|hל6#ZEz"/zR5SdYv,Fݠ;LYWzewd1MKCj$9ny.)CRFƐ}Rl0dN-"}dH 5w)uCZC^D saf[H}E-bu`¶hKʐ/تߏ8lאELDP"g2#(ђMܫo!_n32d 丹\R2RlWW.)CB`- 9~]+`qܷ>œ,!ӄ0akv7gL>AX=SSX VqHۧ˳AqHk`^!]"f eZUvƐڶ˶8b!C_=m!ӣ &?86m7 }fCKj@>a6{mڭ-R2$E!VW+C6J͢*+#–RL2 ZLWAniҶl+1I ֣HB,hm֜ol/~02ryw?&;v#;OUZ5ɓ,ƱjiNx(KM=rf`95}3T_S#u\ҵln~kNMcC^.̱tyrcɚS3^[Ro}ꞗ-a}1|(lhN NAkh3Ƴ,kX},Iڭ(C{d^9'~e,uƼlFmq+?Gzw(?z4Xv[JQ9Z,{,lsخ_n`ȓHsW"y/΁ԃa 錪F10d5mkz3ʐ#˒qy.)CRH[HQ-2/ϒ:yTauKhVCc!gT[u,u>>SP- ihϰշK/k}敽θDת0dvm!U-{V;:qll}?|I._A`zqF}#m!An}WrPz ?CɐP\͒c. YNet(Cm-yC 9C Qn94/]g^6ضI^vӖ>N[^vy>*CwXV'`!wF-VLXX:01GřXGVq@ryeOv_.)CBF-2pRm ٪x8<6m7kّkZO?W_k=-G[c+7dž~v[k'(x:q*لqȔ}cH;}-:oDY#uzM}Tb!)x O|3&'L*Z{] 93 f3_ yusGڊՊI Q87e-!-4߽q؍Ґ_nc]%Vs[[![ѥJq;#rVȺ|ltI%xϮ M=<#mT{ |,&$kJ~gKئ䋾nDZ_U!3CIiȾBJ+L蒆 Ґ] EҐ}Wyq_!QVH}NRrz70mpDUC봕(Ґm ]upe!_%+χ;>ȨxxiLd'+[`Y.RV|[>Qt# c;Ҝ,U)Ҙ#)F]xoe}$Xc^\9|FR|l;KYK-Jw1[!cVzx!H Aglq# xXd0OT2Hi qpX?ld軐)خ_n- +iVio~fvwhS?s8'a?=d)4CDz mSX 0x2h FppYd5zuABS_4'S'Wܖ^N=hvVȻ'l"Iߍ[$#l7~hy˒yY:7׹>\%©ᴅև R͈o. =[]̧'r&MJișå]R>JCE޷.csnҐIBQ2VHت;C 9r2tY)/9R7-+Rr´3m1PȐïCoeHڕ!gQmxmq@tF2/[#mBEkz嶛ߟӔG:vD+/G: X4!.|یc iI0UCugHm !]5y I=<3V֧Di>gYcC*b۾}C }J#i(A -yy,I#y+y2yqW~EC}s KeeG`_dCΌK]hJv-NΤ^shki@ϕ@C[񉚋όٙ|e6EZ5 l!]QX-'j|ۗB:?Q=!1i-L3;54Y[r&(+%f_ԎT^~)uG +N|z3gi*⳥dB=)Req(;q(Kz嶛ʐ/Fo70dNg S▱{=T$_omo|جQ= ^\2=Ry d[ i F[nmN`ymqHh۩­ms-sl#%@*$%{VJvI}Luki./@gጾ7l^U?Ɛۚ3d91Z.#s ^5|Z|^z}fX]<6霕N}+Cr-dV bb]a )EKnfOF߱.z嶛ߏҐCJC^NW4~HZuZiBCJC^+%RViCAV~ewt"46|J4JVZԯt"4I[:Mk|zӘ>ijy C3$֛s>-wȐG1v+C-%-E&eȺdSOv{1unaȒ3[FU{,k qgU9 'E@zQ8K2e;{sKx!tC__&d[]%MʝV"Cr#Im)ŧϣŊms]xl/$8$X+bJF1I6})D4DlݹQ_aʬ2.)Q+GQ&RbnϠ@9HR7|s1󣊭S?YJ}sŵEVeFD =o}+#= W.)AO VQAﰵkCnɍz:B:W-_f<=ImquU-߀Ψ^)m=8eM3/ت{|}3c #2[Js$lɬozqYN㱡_n=_m~7)?|~9F6˭y۾|_|ǯioU`?~^S/oC۟S¾} \]m?K( =?k]?cJ?Gcso~C _/~fߺoJ?ׯ?|v_R燸^_=.$Q QwQ>ťE=DzEy]\z CfE[]\zuQz/y3wO =W_x}_;p[/r\eW/_㟂Y}XV0^׿Sq36>ϹV:^O?gR!W_չCf3o*g?}_8Y-}?&Ʒ`endstream endobj 43 0 obj << /Type /Page /Parent 3 0 R /Contents 44 0 R /Resources 4 0 R >> endobj 44 0 obj << /Length 11168 /Filter /FlateDecode >> stream xAq+RZ@vTJNUeBy49q>qB#/FOM^@7//y_T[~=nϾ/Rz{k{E7/Ǘ/~-?^qgnoYԻ8BQ<Ի8(}a_|\?}_~?њ[َٞW{||}|o^u=?:t;I3?jAkP~u뷮_]R?v[ut?8x~\/wJ㞪֭믾w_}?>_}_ZT.@׳c~Qh_m<[:uu+]ݷLgr[Rgu7-lƭeEY~#,,{Y})-}F;8._nÔq༷g6y?ݨxCr ?1'=x&|ǐ [% C~GKJ3`㰮Cn~3UՖ7wk\L2<ΩvlRѲ?4m%}dc~7Hw̫v\~o{\ .yh9\AۣmoSت8wj\qw]oy".yh9ǽN|sgzx [%'U[aǓ/qkysm8^wF }kʴd}_m5 =roKZ2ijیQ_#cKvO;hL[H޷5Zڸkh1$kzz궋8kFWm9eRZ?}C~& a F,uVٷI86 귟KDU[gp0q8>Fa qԋ㙯1(FKXZu1)^>;&–R|DnCm1q.<.~#bd>9jq16wL-$[1ҟذݞmb$ >+F`N{hc.0I1zDB#O{$HlF̴`d1$ʹw3#]OLt[Jl5}b$mĬ`d,Qn/nz&- =0&o16Eߐ#COa~DGQ}wL-$[1ri}I=}H#mʕ~c$I=1m) f!0r, ϱo`$f~ F'hy\=1rYtS¸cȲ F>#<ܯq#9"[[1 }&Chg`7ݐϹ_#c9/  ;uv,Cv3iH%}/4Y.b$+i4}+F`=2qp#a Vl~3g&)FR`~/U[ory&3"7L [HjbcQ}ű_Uē-w\:x H d}+F!m0}CR~'MoY#]iWL#ӌ?vbd_1+lXkMZ0mWR1(7`sI1zDB⣥Xa)> =b}gL;l$悓'QzDB#[' 2n$Hl#13jKK@M@=H;&ҖRKa$hn *$/]^>*FnFYubK;FR-f"tű_UWt"P< :Q%Hi iЉ: 2˄Nz[{H̫0εF c"m)Y߂n4Gߕ+6qll#-F8.xehdK0^q4oV#3˟ws킑<##FGmC#Fn;F`$xlf`d#ěY'96Yf~#62ZK;&¶yb$m-㚡EQ0a 2-y(aOI6I02)@-^~k43^1tI{oF3I#y~V^1ҟTqĢ. smb$GR#sĢv"u`d.~/QF0rL=rc*\ a wanS ;#[rFf,6r<-|"6R"_4o#wh~/U[w(7;\PyCoX6fKEoM}$z[63-y(Wlw1#'&-%{Ni" b$mò(;a@A聉h dHgc""#J [{H'r6~z.)FRH[Hַba:w+]z[{Hpt?w*z.)FBL-XKE9űV=Ĉ~Ybd,rs4'Fv/M12"6f;4'Fv 97Ysv[00 <7d+iHo4Ðǭh9]Bǰ˸s"b_uonDq&깤Ic"l)leE&)FBOv{HYg8P =PzDB lEv< <6 #n_0v1"e%FRL-XAQ.@Xl#IUB*F%PnlC#wL-ĸ[oH>ٷKԻ_UW]1At[@=#wLK޷b|mc,.)FBL-$[1c0m;FFpISت߂ؑ`Ă-eE~=SBĎ&Y.Ǿx͞ {LֻF#m]:F캨=z+FaOcwxCu-.Fz1 )y gILjl2 |^] Nݽadyufܾ`##ݖRyImZ02ȴ`.~wb^Qn,z.)FRH[H-'6;KqyEluQv]Ni|l. FBִB<'F6]-vX#p%nObdmPc #MaKD [mč ql.~飌bd(W}nƘb$}+Fa[|x8S]^1GHR.!=⾌(Bh#l!v;]R~V^1Gw8q.䀂;&cF#+qll[4fۢv%"+FB?-%=f&jmQ{ #}VU#׊}؛.cVJa )G|cSOv{He#\"9P%H SuQVD&[g߉YqlӯMٷ`$FWm@zF $&-3r0rئNc6z[{H#Enj]D G(BHۋkײ7r, myb$ >3)FLf0@(=0"-rikw/]^1gU=DG1ʞz.)FRH[o1^aA̾]RV^1҉@12QnߌFIc"m!nGf/386N[04-xARĮm %{7lXdS,/ت߂C7"&72p0r?]h?tٳb=^#GL쳔Ff9|uw)M b-88Mq?Z8vmo]u~ˌF=~8\/.726GKFËlo#_n} uް?h4r+h$[H65FIE[{H#GpbdJٸX(Hc"m!EnW4wa, =#rȔ#sQK'F/Fif.- }`do~̲72{b FOTbFBnKicr<ι ncXPw y7WR1"Qf/iDAiY1ж0j#U5f,RnRLT{,"MzO-l|#9[,6=dd`L Fڲ$QnT\R T. 9F v{HUhhir>\HBg-V&l︤I}bn~ߢFK4Ғ""XGhCV3D"m4[H(ђM\o#_UWtQ,V(7RKaKic[B1ξ]R-٧+iSm_ dOL#=&F1wҽYX0yAqK~+S VHdx'7+3i l ޯbd#fql|{H'ȑvMQ,)PzDB:X7Z3l3.dž~V~b/ƏfsY%]Ԇ)4n;SlxbSyHrI_U5&vF#OL&)6cQޥnd;@ `t|4Ŧb)kn)#C,j#GSlp ZC3Ş1ma]X6ۮylncXڮ?W]]g{/e+Qih4tX>YZ;JCDZ]OvSI\V (4,i0a EE0r oKb튑}jG>]{K;&RTgc(~ӓf`bAU׺jȑ6-HoKW0rZeXlhv=:s6J~ ߹iupn, [Jss [sEH+j?}%#]Ov{%2d:E֬ku-Ml]*3ǖӿQ񶝡>DF qj%߃e`!$!!-ϖ8Mvsĝqll[ȍ[d>K3X"%#F"haK+Q2rn~JF^3IJFlD"u枉HIi)yEL)))%#fD.dm.~郣%3,zH҆>I޷خ= r o _UoّlfHiYOAЃax-~$d^jkd=|RR,3Kj}!BB-$[ rI6%%HSq"S-E u1E6&-IHf[am֓",b6 -i#,Ry|ZL"LRy|xeSW-zxH Dgl$v#xXe`OdX2=T8Z,y\;)/تB"tf4OfylFw"Yi|nj1AwKɮ< $' DZB

ajˆ^=z,N[Hu~-ž4H8|][{wP`ȡ Fޓi &nlv36*%/ت߷@͖Ƅ9C9zV& ؤVfWܦWf߳Z7]H$mo~NV앒kʛn]tI1('TzߺCJF&y7g$"v{H!#+Fm-r@Ai Ez/[vS~zkT污_Ud[vD/[vm&|ˮŗ w g[|H7cH  D#G {} a^}]$i%I.IuHg\ݾcLҞ}gh#Ik̮Ȯ5m]3Wi}KҶrIҮx(=#IذT9Z6anM =bbqI1zDBjn;Ȯ(S3l[4f&|W =nKྪhF-p 7Vű s]%ign}ęc޹.# rdG^\\rh$ m=;G_ؒz֔Q3FICꬕ4cHC {]aOC[V'3-~z;Yzvd lϒ=.x$rZ)QFh*UB`~+F yܠgp:ہ1% z;JH 6l*>ͪ%A+ F[6Oa6XyII#} F#;*&-% -r;uSw۽h IGcwLQf.)FB?z;3هrMցa iP˴-q>l#pILCLL!f-emڋ.Ey񎖓_GdΈBZ0DọM껹ճ.ddl221&("Ц̵9hdb"Յ D 4'8^q{FD#-A|iޏ"6Rl:m+-ҶǶt"Fȋ865>A͖CkƖ4@6x4vB ]3`s|dž>>3ko,Tn^XlgeqxJ7l7OG߱. =r["%#OI 3@$:͍E"%#ϙldMY2"Uo;KvMR$R3d&Z>kd7$,5]3]S7ҊX<FV }jd#]?DK12-IY1$Ȳ$i~Vy%}{Ѷ<1YIcG G}B' s# E<³d?܍}~.iv4!6w%7\FCi_H'7-hc~jTLíH:3:$~p_ϴiէ7[ݠ et['g\R`5GȐ̑KcUi"&D.Ȁ?\ ~c5vd\)8Ӫ3˴IԌSB)>=vI a&c9\Dvf-Cuwh#z嶋7|鋖k|}#]۔*_%Kv}o|cC`~C4~yh1<b+A zX [6[m2b)"t<6f~sǗ}/J^~><>O(\}Lb5>>>Owh84~Fo6϶S;ۼO06`%wǻ;m# _c0k~Y[_4 717v;<<~q_؇?/o^6;UHJvLJڥ>;4`*!9%.л8O(zB}Szл(z S.^/w\}ueg_|ޙW}/~7 ^~_Ͼ2>z^>ح9Ji9/Sw뇮j4@@/߾3:_~'?^}co^__r"ߴO֣boZ~_~<>wA_{ endstream endobj 45 0 obj << /Type /Page /Parent 3 0 R /Contents 46 0 R /Resources 4 0 R >> endobj 46 0 obj << /Length 2984 /Filter /FlateDecode >> stream xZˮWRZWe,$H daxaLԩ#ь,êb%.o.Y~ZJ_CXrLkKI}a%,?m7Wo.42՟hm.~^"?^jY{5iM<_z_B%qei- xk+뾶F8[78}׼/95bZiy/vF̗ḿQ.4_)=) Còr%}Ʃ-EMu'dHi*뷺m)&7dHˆ)x*mXOILU¥ lH A0?%B>௟_}&Ӗy.?3Qm[c_?|ûǧ#OEyyP ax>20<4ΑH;iMmmyI^5^zc}}jv\=p;$a)m-]Ѝ^lp9rI^ {\6/\p!Vː ՔK:W+1cN׍K]SӶ}`Ld:ڬBg}CSi_˛ǽ,ߌto7c.] Pބ.ЬmCOj4b7z{eYh?h71 tUjbP hOj!Uu!f}VW }T9V xݕycBH]F@H8.GҐLlđQvNrՆ]G"0[g> ѮYq%1)TGΈN ?1*Bⶊxn֑6b܌ Vƥsܺ-Tz#I5 %Yrfn6maiׁ誑e=RֵԆ;B)1P *Nڙc*MG-Ör܌D{ v[ײZH&j82`hخ*EGre㘛ho{l;-kI)6y^.ħB19.a:қGvR}nAWMqZYv#g*6d&vC/fd籏Dfqd-rKlOqZ2J7#V*׸v[ײk[9|#UUATz̈HC mܢ#m#m;ڻUqj[)#;gyψ̥hv[Ews}\R9n] ]DE~G@D ٢?D>ΈՇs[G96dna܌D[}\R9n]K嶧iƝ,1@\s[A2hr-;N Hŷ=Ҙn^ǺD:;P -'&T L)[yL6./\Aq/HIC1vv41J P1ov ;=KӒ-ٓG\XQ=2mRE.ռKV]p%.W8Q+rɧرv;ngƝaBߩnLmÖ+Gn{Wؤwz%۶;ŝKQ_"hoȇ F)uߧ9.F!ņ '*';\].p5O}wz¿8;tlN1w#b~Λ\{g;.R= & 4о? Oos;Y>ޱ|_۹!j2o_RQx 7vz {. ,LԆJ/}0kHkkٮv$~ w ZD `|8)Osߺw(55цTʽD~yA%|Aډ)X3-3LP?g|ǝx ~e'h5Pײʏٝxc=9`ПiAnhz]p%.W8Z+t&S8V+rIGr(Lգ G:1ҳk(=C%(=+t&~ ‰()ʝDLo(7iO3Aq}T-'p5lkjav׎K;v"l alz_~AΆn'pTcI۟_RFC0}|Mk~ %J '(EzΉ?U3_9endstream endobj 47 0 obj << /Type /Page /Parent 3 0 R /Contents 48 0 R /Resources 4 0 R >> endobj 48 0 obj << /Length 2454 /Filter /FlateDecode >> stream xYI\Q:Gb! $A%! ג3_mV޺޹/t?2|.Gs% ?%Pu|_ڽ n{w~| ߗ߻KDK+~d0q٪6Sj~v}~wnn7ׄjG uoͷ W@\/߂q/뾠?iT?OJ>?9wR _!'#ăQă}~i@ N4߆0џ;Z"1ÿʂdEdﰆdXHz>tcý_ݿ}x&eMv8 S@B1 K,I(qS)3d&L'n҈p&N#~\9GZcx6 c g4rG):!4"롚|s5%/Hsg1}zYWv Ȳ<] ^,]͗7p=UK('wG$Z[)`%% "G4[QZVU=`D} } fʘoXou׹4e' -!hOhk vh39R%_]*1+ް+5g RL Q f NҔ L(tnLۄXZ% L$D.2p|x.hDUZ(.T[3یXVyJ -ڥR.%f*[!uBgou "*-4n 6!o6_eJɴPF9#D*"*-<qmB$c LS&B T7PJTۄv%Kn((LD$%vNC%1dD't;6]PoWSYɀa2S0Э7lP{s$ߝ~cЊ‡@ݫ?vP"z[3g]&Sl\KTbbtkaҏ-guC眔 /5? ^tƚoЊd)KV<^SbHrͲh HSV Kϧxxq<&#}WhyN&'''/f |4 5s(Ns35vvLfj|<7S>B\,pɦW:ekհlsJܔ^E9[?Oq)[X@K"Aږ3&$eAx *gϞ[O&9\iW{3fq8g\F)wbʰUד=R˯^,endstream endobj 49 0 obj << /Type /Page /Parent 3 0 R /Contents 50 0 R /Resources 4 0 R >> endobj 50 0 obj << /Length 4229 /Filter /FlateDecode >> stream x[K&q_Ki_M` g;0m9h~ך\KbSS,do޾sOigZ93m=s?}$-Ӷ8}o%oȿo_2l~V1?Re(<< <yl=Ak3p>:q} u?:{>釗^Vk3엒~nUJ \ {8?j{x7쇗U'.m?dx#m-SLUǹgl?ގo=+&e6kY&Oeieu(nX,S IǑڂ=w{U%[&/󹽿/~^ǭ#_N͆jZ8N —p- -E wgnO3;oG)C|%k)a?>_w3xC{ 7y+{;<'lmJ//_u^ F. o=^Bop&ۭWu@Ƿ2i_-s)j}4W}4d|Yɳ?dUuziX77MN8ɸ ̐sfm~<%w U"466r"OXr!֔Ȯnmmi׃zeol8zp?Cgk`ˌ vkuHjmMoӇ~XJbgWY"/.![W^}\/tsU\3l+z'qWm,Iݯe*Sqml :Ën}P8$1s3vܰ}CU~)oKpz`nʉْ+/-ęr@9[kέs3m"mm}2귍tuZ%ҪuEr 3əo=#mmmU܌w;lgoӛ_]ȝM_csl Dxq'bzn5D$5܆(*l=[ԇ52y4ĵJ:.5f C;"w&)L/T눶I& 6[{ߦ7䷎<ߏ5oY$ O_Lft$y өWXd^+, 4ɚpI.,J I'$l?.JƄ0- 1[naMոH-7l4&[gOetkW8vڼY [sz^g4og iCk=OD8l2`f⦖7TdL n}fY \_r7T " nKE jXw>W"dZ6<~$H 9f"QѭVd.Ϯ%ׁ7ar**9~<ϜUME8,b{5'+w5Siw˯8B\+/՗ce|ljegt+Xռ'Ŝ{ʍ[=74)w/ι`x`z@4dk{qMP^eŬx2Ъ7[|ʁ8Tz9܇`ǃ*Ivʔ<ʦ&djGUo ̪'$\$|n` SsMv{8W O63=h<jU`" 7^gn߬H[U..$Oih}5A>*pAd '&-]Ъ6%y;jUOLoܗ'hLWgM8(@;jX=PsDlj<)'EtzV`VSzW29B)pkC;OA hyIZM~>Mwʵ:21->XS4v/\д2ʔY^>M[lr~'3# rUV di 33#k%d<9XU>RQUo)܂y {-UH%WS r#V YBKЪ6UsPΣmJ[iZxDHVDw`vs2CZOI ӵ-f<.8X7^cN֬8OӪ7[}u'Pq` q۹Af@ACx;fX%~Jk5ozϾedVr'I ߰ "ri\Ejڇ;L.VЪs]Wl{F tp{*bmH:p􄾳\CMr6#vmmz˸> _Q=&CgYtGzj5@0 >d!Voӛ_}ҙƝS%H,8sX#JɏIkJ2q%Xg;f[־]~9wa`|X\ 9,*N&'ݹJ%!Z&r)vAqWm,s3üVs*⤡h"3d/L;m?>R#rs32a;sѷ/>S!L'| d)b81W̹RM"K tv~ik[%r\9|:SǵJTf|IB.kH 5=z)VY Km|nwu|K;aÖSrҚEoWq5>XZ(^[--d;^/-ldbg$ >y[gdbS|ĝVX~l-xʹ( FM'uR?c_`UY&19ԡ`̯>O|S;?ĺX~two_[g96|- $\endstream endobj 51 0 obj << /Type /Page /Parent 3 0 R /Contents 52 0 R /Resources 4 0 R >> endobj 52 0 obj << /Length 5537 /Filter /FlateDecode >> stream xͯ%qﯸK oM2B ^^/\#bS)v߱GҼ_Mbțo_7/r֎{JVn{jo~}~_$S_}t混t|%CpG׽e{q8 so{Br&{s{5 /%{]Z=R}jJ\51wj xY[5/5M4ӓKwk̨qezոlbs[5&[lٚl2ietn`0n 6O//Qz?= wjCWՆ̻2?b,V{>}>ߡ?nqLPpEESOfgMOŧwQ[Yoh)ix:l}T7ha ڼ?G6j%.Nw/+ߏ~;+t%݌hg%]iav4W3q/;߆(`qAЪ}77Zӊxi6XD?m [7kX-Q+=Mڢw V*z0#|aJTT]jf9X*Q@>/ԞjF|JLzR ^SUwWX<Rƻ%|pA^plyѕ+vxZJUކm(.=XbD^X3SLgM)o5$ Z}ֱvrtKyV[Iw'"+poǐٓp"j=ɨΛ1v٤WՄVꥼz-=a^M}\Tt h9MZl.w^XFތ`MzUMV;wMZoeLr'Mn>Vs8"j>1μַ,굷^뭶5XۃɀßWޚ0{3A >"3o|Mrޭ]SR]8mmOlr&VroHTWn}"6.̻I+~0j"/ŵvzUMHWZoŌD-{\TTl[FiA|&f:3pIy3,.;A}RR>omˉ޲-=6m3T+nYj=)țQ.j[骗^뭶t+ÏS p]<>{r3+bٮ֓Qf]v߭tKyV[lOVl'ޚƟʭ~4"j=EIFwMkjKaoͳFoaaH{a .*՝7eZOΛITǻzyާz{[-)B(DN K/`WIJ]'^dMzUMn^{bO䜠l7 >3EwS['ɝMlҫjwpXNYo'z\dXYDǼ@Ygz yE,;zRwp|l'W#}הyV[b%<}y7O T;oez)jQPdTVBqbpHoXvMl5T2re\w+]Rk--p;}!ԝXl(*TX` <Ț[ .*lPKr*ċ$T돋BlpP2:PO@?OwO_xu$*#USկx{t}ӅgNޟ;]ӟ{}s , 1ddZpBdk[ߐfmok+lv7 a#y[N`;|iX^܊$b.5d1'< $:{zł=?e,,/l;c\S?fMH%XoM:~` ~74W'jm}ng_}cyhk/Z\Boγt))Lwi @Šԛ.˯/:S t螋eDԺU/֍Su;(sb vlζI)0p}{=`c!3[(86ҙg{=8& _<&޿'N6ORߢ0DypCSmįV~ f=~}C?{Ĭd \]4t6+v ,,Nr~S?VߩM~* xc7L}x0bDБ&S5_3>i[Eٓ8뾼h) 3Υ1VuטřL}/۞P_+'6O6`x`gSF3f:޸S?l)82KL}k|ǩзw7am8Lkg371Wڌ໳U4w#wQo\+ca }~B)~İp/a8EC7',,gq~m{`??Dzez\vȭW%{mbSI 6}~c9̌w.o oa~] ]Ә avW<Gw15?]빱-^T\Θ0̙Z1Iۘ${z_q $6-ԇI=bJd_\X3-\;cꇾ߿{餮klBJ}MZ7[s!`k>BN}{`P93_ōBcL=AyW*sC㈻U>&/cUFKRM~Zgڼ l!KS%h.}bC_ !;c t2q5VԢkh|XPW;#vs\4}x/e/@yJ\F-$Kq5&K?aqQ}Nmv6ɣʉwKl.[80C7{#k-. ^ C3I}P[<+=b]\ / ch]mL9Ľ4:rS nukEEMvoV@g36xLC7됸V _V m{wv\3M%)X@S8! D9BgXG+\=*Y)n~;X]McÏ WT1z.vCe#'>#ıdCb6]'FQ2-*f^ٺ28.mv̌Z؇d[se67qv ]=5KM74#@˵eMF!;q&` 1UunM< %6["Mlu.q2,Ihrk`tw.=ZmmyҒB!tq}9{!x}.8 %pN5f'.1sǩ±ל7ދXzoS6  bSn"2p4't*T+tqB7o,]3j8Đۛƕߎ-lQ%1;@6X3qNhCwV@VPGڕR$n..Za9> cx!'){4;6\/]\LP%bz87 i覠 X]7% [ۆM`̾$fg472ؿo9$]\Ibs{VFfGDYʸLfm]-{?#(ԉYh q.nI&4kd4,^.1O}.My>T*27iuJD$`ZWKbtuI.W"qb5K V雵%v8HүP5c "[V$"ܼᵡ$} &SnZFt҆@Hu6%9!DZs&vpcj04 ,BH[h\}b7}퐘~,nF0cz)~nb=K-~8 k"tO?*wl vò.vk֛ϽIl.w{]bD` wFֹ!l"ߤ-ђs_!B'Cg>bIbf9C迆s볘p?(&!QzBӄS>F>oX7wLVOwũ>@Wܕ5H!4S7\8o(> W0{~ʽ~dq]c< UxI?!tKzmhݯ7q*p(ԲnUI ` K~X pmhDčFp_nJnՃ}߾O?o ⼱,[o?7Y} +s<4 1[O6endstream endobj 53 0 obj << /Type /Page /Parent 3 0 R /Contents 54 0 R /Resources 4 0 R >> endobj 54 0 obj << /Length 14940 /Filter /FlateDecode >> stream x}Mqg,r"~ R .,7SWs} 7Z;Is03#93>?}Wg=>q|Qy}??|~(q_kϏVϻ}|/zmRl/?G.XRͫŹ>ۙ2>MWme_?<峆xx/期R?/5>sB{cXWS?1-Aj-v?{}{aEW@{gyк1~O9(|^S7)?hr%j%[c'adǓnZMv2O\ݼZYĖIjmv[W^S>lS}6^L>>'2y1mOݤpڶv {iy KrN|\A wNHT9)ςݼZŝnPZmSnֽtGt/<,Y?1 oW[sr۔.b7u/j\|'dv7Sh,kNodrFLth;Kn=a{1/u˾/ϸL*c_(ΌάVے.aW}/7ƫw||/YljWԮn}۔cl7tjq ^vcϺ{Q۔.b7u/}=y{o{k~ԧ;]>wtk y59ǃ!Smb&նKnxb(סŜID/B{߯`EA7&'<'6nRmK.ֽ<~Yz<->.<Þ{?ŻO z΍-hºKn[r%j3#\³x_#އ/SϞpW,AybEQ'sމ}Mݤ4jc\ ڭ{y_oߜx/xƋjx{xDFkXRͫŹ56~өfۖRaWTj?^9TG}=4e:k3=,sz|͉mѝnPZmK.]rpg~(G? +y87:0anRa(\٭{yF}1R~C`EA7}yľˢVے7vۇ9!Pl;yD18jwy "ճ7%]wے.aWy/n@;&+sc,(tZg1SEtjr.> avM mrlݺ-EWS+abGT7^< .{oɁhVے.b7hFQ-^95fb8LENN-IA7r{kb>nRͶ-]ƮvO?s; K+CW;=񲒲{,XQm?J1=˔ˮ3dn3p6cl5Vͭ_e--,)<;c߉3ulrY*j<Nz'u G4zwF-r:ØcwhS7)^Ͷ-]Ʈv۷, [rN|>@{Lq;aZ>+坛S1[䝻mɋGm`W}/4Bg+FoҫE߃Nc;"'Ɗ*yGw42InRmc>b΃g{UDE~!_kꚿ8 a_IQ7۶\v mAOk.A )7-\_-yFbE+ug4-ۦvmw{\񴃽ϐ5)cݖw Ɗr&s&l ;u76嶋n-LTcLb0bXQ푵(P' ˣ=KnEG f{oں)w]HpCXQͫ9Iulr%j%R|KsxkhZ5/~ݼZ3"E 'ug'ۖ\v {y;^]]QwFgSg1ݼ{v)]nvO?)O!z|qe#XD~r;a"9G ,ƾZIOnwm-#FS1xOLٷr*ѹf VN }w;~b-GI F'SGb,9ܶԝ=nPZmK.]vubrFP1FIq81Ƹ>u۔.a7=Vlyg3ԗ3;ۖ\v =ba]̇`)K1ظk[šj)H,w}؇ɶ)]nv۷XbTnΤ:}ǔ5\'6G3|m-}Ǧg&e5ڶ&[22r7Y$Ӊw9=!+Hy>,SpÞtWmeݾ a{ Wq#l6X'EϷ"myCKXS!9v-I)]nvO&H_W-NOj҇g2VtWl&#kemSnvg/#sygW7uWsL#sag4ulrej0{cE1 qs J^fƊ:LN=xkRw9l[r%j}K$bQʨ^;TH8) ;W!oXޒu8<+(,j7Ě?c\pWްD6>Ɗ: <-qū68iݺaDD{`-<=RmP_sN',Ċn^-_smKnRg9"my-C흌H~9V( ۢMk;ٶKn[$k| tᏞET* SNxD%EݼZ>'Y7/yz!®v۷8o}3>ʍ*]sb/y|XSH$85[=}OݠWm-k[Šp@0IhhbM@pb:c&hm9vc߃+G"&;ۈ$Nʫ1ǰ"Ɗ*9sHMWmm'ē(Ca %ko 9K$6&.umaQUsMm[N} {13{WTN}FɟGG5U20ao=r%j},{圈;7N5e)<'̡f,#w5mean彣ɹj hk+˯pRwlrEfe䌣Oix=itZ>w'|N(ۦvm?FmGŔLy|XSPhZs&6ȩԗQba+kCN_F[c<֓vq?9x(c\o,kqߜq-*)]nv^,hV?&ѣ#9xD%32ujq{cdnRDۖRaW=Vre36}^86j9'&ƒ/\˨S7RaWu/qc][$!cs_ܸ3jFV*)Eumeݺu,Or_ _dJ")׸h9#˒SH,<]mK.]Ҹqߜ w.1qnm9wKy8ׄպIjm,v bC9^߼B϶ަNwC$?VT'mFb_.eے.aW=;/iDVx|`>,3yKrp[/nr%j}Kbt}qBWD[|#LfX'6FEy7@uۮ{ƢcKFR^A$b ^4LjIdRGShlUoqÔ$7y7O=*0_̫=2 riFg"1!.xUdV.f`L)1RM]N]f`~av[knwV[_ pO8;kKjolesŗyOݤ쏷Rb7aE/z5K ϙGy]ԗLyGpXS;,9X,lmK.{Wu-*%񫏟T>x#ޙrDjה\l0?w~/oʟ8';hgz?8_#4}jB#ѿ2J'?>Ì4Ŀ+l$:&g?^r~?}?c8)CO *^}oJ{؞Jy3hXyZ?b'LaZ8|_pqnynx㣟'5ooOH q;6{fcM'gDwelAB&ն岋|y!ށ/'] *0" w$)GqĊʤA"ΤٶKxIi 9upsHXϛy E=n.I9$VT HbnLśme[D|/( [ b{(@rL?+Htr*vީ;gے.aW%gSWԩHIWX$(NXQr-9H}X#LmJڽ/֟/Tށ)c3NKN%,b9 L-"v{+^֝pJ hOHRν5ϱ=WeV:ݫmmWM;Kx}-^Ixi ڝ@bݖc௱/533ܶtj[r%fV7pH, qX_aQhHRtX%uv\Hy2 ulrY*jV${1'RAl($)GqĒn^-΍ٳfۖRaW% /N0-$HCꕃTWgU3ԙa\cUK6嶫9˱^o8!8ϭ3uanQ|r2)`r:[)pۖ\vٽ/y]%q:moc_. B{,@FaEoH}CĶymjWmm[^as2k(kI= I͵aIQ7F^CaNIwnnrY*jV$Ɣ\ѐe&;.x3˹fWrˆXRL\ٶKxInV$F6\@4EPQ|ۘrN,) 5&ūٶTxIrn;ˢsʱ4n/9%gpd&ūٶKxIaewp}\N,NbVf{I-]nvoK OԳHXW5b-ҫIʑ3 9YST]۶vڽ/ 0qH$uǃ}4VD< lg'Uۖ]ފy<9RHr) ;M*'{>w B{,@rlWJ,Q79a]Iݢx5Y.K]ފk+^RxhJDB,0"ώm˹QXSy.9\6'Rw˶)]nvoK³?&}0y* YfQecn9wBK]Tɹ MWme[$5tXQB\)K\ I9׶55暣f ^tτnrە {)^_e,0,@r+j_5}u>mK.]ފ T*I-)~K>;8%NvGg&m2r ڽ/'"\0>}s [ߖI56LmSn`{+^R{08 `EtQS$u䚸8B_M>L-v{+^õ/. RxUFD+y&XQ5KCyb{&u[me[ýgNc9ub FD+YwArMLMdb7c<\IbW%MWd#=˝(1X>8 +I͆!{9-S.ERUEZ$2%,"# 3㷰zƔAXlO5cJnmxI4Kzn [aQ_?|I;VTTT&HRwnrEfVyh>.dK]`D6󼵱Lld`97u̕cK)Խ/ Qw@q*Ӕ1b FDcFˑ5OssN 3\S7/y=xI.\dVT@$a&bQ/UA rZ.2p>%@}d"M"v{+^#5qqT@$|k(zdm9kggSFGKzn[rٕ騯ER$X]X@x *wkSUcE)sbf,\ԝEmm[AO礯{9#8]`D{0t$&)L-{ tےˮJ?y{+^;vao"ţ ~$gqcM듮 ycQ uלme[EeqŸ\#E#앴3,Nb,) 왺Ey ;z`W%%ѯFtDa/ͨSZ*su41ūٶKx FӾ/ʰ͑|tE?'RakGǣT?'kE֝kI68iWڽ/&r`He&Bչl亶%UenB&ūJ.]ފ7XxIxX8*[1>T`Dcx7'eŠn^-N➱ȤynmSnފ [] [Px bώ -gXQQ(J7)^Ͷ%]nvoK}oZ$?zw9ѡX$(Nb3a2uqBt۔ۮ2IKĘPdT |srYXS3 8-䏺% ۶v ٽ/)\^9@s\n̔֜[$ufddM,]Swۖ\v ڽ/SypRDB_{\`DqM9W%EݼZI㫻cRaW%1~. }q* #Iʛ}^c\(A,nQN-%v{+^O؊ē>xI|We9k:hܶۖͅ\v ٽ/^z.&ɒj8݉ ;S^8Oe$Gl a*)]u)e%ч'$g,WrƼJb,)\pS ]ފO>9඄CqFFDqlT>Jsq)BU{ nnR%)®voK"l _gOU`γ*@b9kj`3EԌe5ܶKxI G19ا"a.0" I+*0&%4[YDkqے.aW%qK:mVCWKR|vXN9s+t39[Ig]=7ʶ%]®voKvKӳHXwNI5@#TO/פzFͩ>y1P<y1ۖpun`W%u-^a9-b# r͒8IbEu@&c+~EޛmK.gѽ/?y"Fb<eo1rYWXSW0g "lIy7}vٽ/܅A0`Q+r'1TfK&p_tgl[rEfV${ɏjT]`D=8Md< aE͓4oU92me[st&>Wxg#!AcIY 97{`um[.K]ފ`fF3Q{OFmʱFwXSmo3c .9ss۔.b7%p%~p.`dD9R)%uRW5gl`wI9:P.aW%f4{-LuȍXJʕr'1T!IO+yHܶ岋xIiLfq(($)GĒn^-lnRmKnKފ3ۊS. i# Ɗ92؊]|ۖvٽ/00׹2瑨/$ugcM;7Lí}ѽyN ]®voK>\O-(  LkY,^W۔<@Q]/}]&~>`Ѧܶx 6B-/&=#[|u%E-|l 'Q7)ܖٽ/>u-5:%׸qe$ I9$VUs$̺Ijm92v{+^fۊGw\# xS$>rG{,϶%]?Kx488Ǩ">ZSlN9kjdN\mC[SNv{+^2%qJ>@Qg)gqcMsg8/r-v{+^#uk񒘱!T")X<+YƊj9B'u(ۖ\v ڽ/,Y`&\o&Rϣ*@YO~'eȌ- [wql[r%jV$zQ!I ±HqFDy)oIY"DцW2uXɶ%]nvKX`ܣ"~g̻!m8*N"dRy!9C nRfZLi+ƉL,$dѐ $‰ ̠@KkLjSRGjZ$V%17}oQQy,3"/7Ĥø#xz8ۤTں/ 2s$d&pXFDQXS- z2uYmSnފSZF"8ۜfYԗL9k 'a+էnRmEfcb]< ',.r/̶R U$rEEEXu!տ5ImbCK(I)-ROiJ~("4,hyE{Z``]W$c+ݑuE"вTr 1Tqx[7)\-AUOV]㏍`>k<Ĥ.~4> endobj 56 0 obj << /Length 2612 /Filter /FlateDecode >> stream xYɎ]5ݿ2Yp&/ C"w\㩪|3+{8|v-9ѢK-%ݷox\wߛ_w?]{.://-ٻގy|^_F;js9?܀b'1`xNq眏݈GK9::#dy+6\QmI>K14gEXiF?SאQ l(/Hlķc?| 5Й/a'1ԣ#àɺ5Ț߽8nk]&DlGp};]?}G TXNr`:(~KdAt6~w'9Kw΅1 m[7y h2ԑl vTXfO~s?2 :?a<1l ܍zA9M̡*=L[PTLo6T&4Dg(Gcxn 8؂ؙ]ˀԷaR:mSO[e.MdӤFvJG%Z8tx1("`ԑDODYU* UlDo㠳"amߐ*rYDs3$D6KO@` 8*@O*۹*̉C5#`Z(0x Q.tMU* 'ztBUҕ}TبUtK#րmL G A2vTvH'JWi-dzYe ]ŧJm\~t4{K8k3|@1$#vV7O1t m--SQUM0;6)s2U:NϽ%q,An3yF)*JҦk(@d-#R Zɠŷɕ^ֱYi{ 6*t@uIShLۖڂ:%.]Oxf$#W"7^{[ǂft-Rv"wdYS1"%/tm-Ҳb˲-HzoiX%0gw-BNDR8B#z[ q+;2]Elxn4mG Yӷʕu,H?cV«c棋pg-ˈ"UĶ ؘ..ۂ62ݝ[2H\]s6P\@n`#:/j! Mז ?F/Ϳ5n t6 ]pì(*e;=$;'F,"]7WUSj3czuq-&|8#Gg_䗽0YI.|S`5Tn)*O#6_X.3[GvTEҲw߯gP}f6Am> endobj 4 0 obj << /ProcSet [/PDF /Text] /Font <> /ExtGState << >> /ColorSpace << /sRGB 5 0 R >> >> endobj 5 0 obj [/ICCBased 6 0 R] endobj 6 0 obj << /Alternate /DeviceRGB /N 3 /Length 2596 /Filter /FlateDecode >> stream xwTSϽ7PkhRH H.*1 J"6DTpDQ2(C"QDqpId߼y͛~kg}ֺLX Xňg` lpBF|،l *?Y"1P\8=W%Oɘ4M0J"Y2Vs,[|e92<se'9`2&ctI@o|N6(.sSdl-c(2-yH_/XZ.$&\SM07#1ؙYrfYym";8980m-m(]v^DW~ emi]P`/u}q|^R,g+\Kk)/C_|Rax8t1C^7nfzDp 柇u$/ED˦L L[B@ٹЖX!@~(* {d+} G͋љς}WL$cGD2QZ4 E@@A(q`1D `'u46ptc48.`R0) @Rt CXCP%CBH@Rf[(t CQhz#0 Zl`O828.p|O×X ?:0FBx$ !i@ڐH[EE1PL ⢖V6QP>U(j MFkt,:.FW8c1L&ӎ9ƌaX: rbl1 {{{;}#tp8_\8"Ey.,X%%Gщ1-9ҀKl.oo/O$&'=JvMޞxǥ{=Vs\x ‰N柜>ucKz=s/ol|ϝ?y ^d]ps~:;/;]7|WpQoH!ɻVsnYs}ҽ~4] =>=:`;cܱ'?e~!ańD#G&}'/?^xI֓?+\wx20;5\ӯ_etWf^Qs-mw3+?~O~endstream endobj 57 0 obj << /Type /Encoding /BaseEncoding /WinAnsiEncoding /Differences [ 45/minus 96/quoteleft 144/dotlessi /grave /acute /circumflex /tilde /macron /breve /dotaccent /dieresis /.notdef /ring /cedilla /.notdef /hungarumlaut /ogonek /caron /space] >> endobj 58 0 obj << /Type /Font /Subtype /Type1 /Name /F2 /BaseFont /Helvetica /Encoding 57 0 R >> endobj 59 0 obj << /Type /Font /Subtype /Type1 /Name /F3 /BaseFont /Helvetica-Bold /Encoding 57 0 R >> endobj 60 0 obj << /Type /Font /Subtype /Type1 /Name /F6 /BaseFont /Symbol >> endobj xref 0 61 0000000000 65535 f 0000000021 00000 n 0000000163 00000 n 0000208807 00000 n 0000209058 00000 n 0000209192 00000 n 0000209225 00000 n 0000000212 00000 n 0000000292 00000 n 0000000978 00000 n 0000001059 00000 n 0000011242 00000 n 0000011324 00000 n 0000017454 00000 n 0000017536 00000 n 0000023668 00000 n 0000023750 00000 n 0000029738 00000 n 0000029820 00000 n 0000031199 00000 n 0000031281 00000 n 0000036157 00000 n 0000036239 00000 n 0000037219 00000 n 0000037301 00000 n 0000038261 00000 n 0000038343 00000 n 0000039305 00000 n 0000039387 00000 n 0000048855 00000 n 0000048937 00000 n 0000055507 00000 n 0000055589 00000 n 0000065683 00000 n 0000065765 00000 n 0000076528 00000 n 0000076610 00000 n 0000134091 00000 n 0000134173 00000 n 0000141260 00000 n 0000141342 00000 n 0000152472 00000 n 0000152554 00000 n 0000163796 00000 n 0000163878 00000 n 0000175120 00000 n 0000175202 00000 n 0000178259 00000 n 0000178341 00000 n 0000180868 00000 n 0000180950 00000 n 0000185252 00000 n 0000185334 00000 n 0000190944 00000 n 0000191026 00000 n 0000206040 00000 n 0000206122 00000 n 0000211920 00000 n 0000212178 00000 n 0000212276 00000 n 0000212379 00000 n trailer << /Size 61 /Info 1 0 R /Root 2 0 R >> startxref 212457 %%EOF forecast/tests/testthat/test-bats.R0000644000176200001440000000141614003673410017113 0ustar liggesusers# A unit test for bats function if (require(testthat)) { context("Tests on input and output") test_that("tests for a non-ts object", { set.seed(123) abc <- rnorm(50, 5, 1) fit <- bats(abc, use.box.cox = TRUE, use.parallel = FALSE) expect_false(fit$lambda == 0) expect_output(print(fit), "Seed States") expect_equal(length(residuals(fit)), 50L) plot(fit) expect_equal(bats(1, use.box.cox = TRUE, use.parallel = FALSE)$AIC, -Inf) expect_equal(bats(-1, use.box.cox = TRUE, use.parallel = FALSE)$AIC, -Inf) }) test_that("Test parallel of bats", { abc <- rnorm(50, 5, 1) skip_on_cran() skip_if(identical(Sys.getenv("GITHUB_ACTIONS"), "true")) expect_gt(bats(abc, use.box.cox = TRUE, use.parallel = TRUE)$lambda, 0.999) }) } forecast/tests/testthat/test-graph.R0000644000176200001440000000116714133710207017266 0ustar liggesusers# A unit test for graph.R if (require(testthat)) { context("Testing graph") test_that("Tests for seasonplot()", { expect_error(seasonplot(airmiles)) seasonplot(ts(gold, frequency = 7)) seasonplot(woolyrnq) seasonplot(wineind) seasonplot(wineind, year.labels = TRUE) seasonplot(wineind, year.labels.left = TRUE) # seasonplot(taylor) }) test_that("Tests for tsdisplay()", { expect_silent(tsdisplay(airmiles, ci.type = "ma")) expect_silent(tsdisplay(1:20)) expect_silent(tsdisplay(airmiles, plot.type = "scatter")) expect_silent(tsdisplay(airmiles, plot.type = "spectrum")) }) } forecast/tests/testthat/test-dshw.R0000644000176200001440000000207514003673410017131 0ustar liggesusers# A unit test for dshw function if (require(testthat)) { context("Tests on dshw()") test_that("Test dshw()", { # Test negative values and period1 and period2 not specified set.seed(345) expect_error(dshw(-10:10)) expect_error(dshw(abs(rnorm(100)))) # Test fits with period1 and period2 swapped set.seed(5555) t <- seq(0, 1, by = 0.1) x <- exp(sin(2 * pi * t) + cos(2 * pi * t * 4) + rnorm(length(t), 0, 0.1)) fit1 <- dshw(x, period1 = 4, period2 = 2)$mean fit2 <- dshw(x, period1 = 2, period2 = 4)$mean expect_true(all(fit1 == fit2)) # Test fits with lambda specified and armethod = FALSE y <- x + 1 fit3 <- dshw(y, period1 = 2, period2 = 4, lambda = 2, biasadj = FALSE) fit4 <- dshw(y, period1 = 2, period2 = 4, lambda = 2, biasadj = TRUE) expect_false(identical(fit3$mean, fit4$mean)) fit5 <- dshw(x, period1 = 2, period2 = 4, armethod = FALSE) # Test fits with inappropriate periods specified expect_error(dshw(x, period1 = 2, period2 = 2)) expect_error(dshw(x, period1 = 2, period2 = 4.1)) }) } forecast/tests/testthat/test-tbats.R0000644000176200001440000000500514133712547017306 0ustar liggesusers# A unit test for tbats function if (require(testthat)) { context("Tests on tbats() functions") test_that("Test simple cases for tbats", { expect_error(tbats(data.frame(x1 = 1, x2 = 2), use.parallel = FALSE)) expect_warning(tbats(c(1:5, NA, 7:9), use.parallel = FALSE)) expect_true(all(forecast(tbats(rep(1, 100), use.parallel = FALSE))$mean == 1)) }) test_that("Test tbats() and forecasts", { # Fit tbats models tbatsfit1 <- tbats(subset(wineind, end = 50), use.parallel = FALSE) tbatsfit2 <- tbats(WWWusage, use.parallel = FALSE) tbatsfit3 <- tbats(as.numeric(woolyrnq), seasonal.periods = frequency(woolyrnq), use.parallel = FALSE) tbatsfit4 <- tbats(airmiles, use.box.cox = FALSE, use.parallel = FALSE) # Test tbats.components tbats.components(tbatsfit1) tbats.components(tbatsfit2) tbats.components(tbatsfit3) tbats.components(tbatsfit4) # Test accuracy.tbats() function expect_output(print(accuracy(tbatsfit1)), regexp = "ME") expect_output(print(accuracy(tbatsfit2)), regexp = "ME") expect_output(print(accuracy(tbatsfit3)), regexp = "ME") expect_output(print(accuracy(tbatsfit4)), regexp = "ME") # Test summary.tbats() expect_output(print(summary(tbatsfit1)), regexp = "Length") expect_output(print(summary(tbatsfit2)), regexp = "Length") expect_output(print(summary(tbatsfit3)), regexp = "Length") expect_output(print(summary(tbatsfit4)), regexp = "Length") # Test fitted length expect_true(length(fitted(tbatsfit1)) == 50) expect_true(length(fitted(tbatsfit2)) == length(WWWusage)) expect_true(length(fitted(tbatsfit3)) == length(woolyrnq)) expect_true(length(fitted(tbatsfit4)) == length(airmiles)) # Test length of forecast expect_true(length(forecast(tbatsfit1)$mean) == 2 * frequency(wineind)) expect_true(length(forecast(tbatsfit2)$mean) == 10) # expect_true(length(forecast(tbatsfit3)$mean) == 2 * frequency(woolyrnq)) expect_true(length(forecast(tbatsfit4)$mean) == 10) # Test inappropriate levels expect_error(forecast(tbatsfit1, level = -10)) expect_error(forecast(tbatsfit1, level = 110)) # Test forecasts with fan = TRUE expect_true(all(forecast(tbatsfit1, fan = TRUE)$mean == forecast(tbatsfit1)$mean)) }) #test_that("Test tbats() with parallel", { # Tests will not run on Travis in parallel # expect_output(print(tbats(woolyrnq, num.cores = 1)), regexp = "TBATS") # expect_output(print(tbats(elecsales, num.cores = 1, use.trend = FALSE)), regexp = "BATS") #}) } forecast/tests/testthat/test-forecast2.R0000644000176200001440000000742014053117140020051 0ustar liggesusers# A unit test for forecast2.R if (require(testthat)) { context("Test forecast2.R") test_that("test meanf()", { meanfc <- mean(wineind) expect_true(all(meanf(wineind)$mean == meanfc)) bcforecast <- meanf(wineind, lambda = -0.5)$mean expect_true(max(bcforecast) == min(bcforecast)) expect_true(all(meanf(wineind, fan = TRUE)$mean == meanfc)) expect_error(meanf(wineind, level = -10)) expect_error(meanf(wineind, level = 110)) # Constant series should not error series <- ts(rep(950, 20), frequency = 4) constantForecast <- expect_error(rwf(series), NA) expect_true(is.constant(constantForecast$mean)) }) test_that("test rwf()", { rwfc <- rwf(airmiles)$mean expect_true(all(rwfc == naive(airmiles)$mean)) expect_true(all(rwfc < rwf(airmiles, drift = TRUE)$mean)) expect_true(all(rwf(airmiles, fan = TRUE)$mean == rwfc)) expect_true(length(rwf(airmiles, lambda = 0.15)$mean) == 10) expect_false(identical(rwf(airmiles, lambda = 0.15, biasadj = FALSE)$mean, rwf(airmiles, lambda = 0.15, biasadj = TRUE)$mean)) # Constant series should not error series <- ts(rep(950, 20), frequency = 4) constantForecast <- expect_error(rwf(series), NA) expect_true(is.constant(constantForecast$mean)) }) test_that("test forecast.HoltWinters()", { hwmod <- stats::HoltWinters(UKgas) expect_true(all(forecast(hwmod, fan = TRUE)$mean == forecast(hwmod)$mean)) expect_error(forecast(hwmod, level = -10)) expect_error(forecast(hwmod, level = 110)) # Forecasts transformed manually with Box-Cox should match # forecasts when lambda is passed as an argument hwmodbc <- stats::HoltWinters(BoxCox(UKgas, lambda = 0.25)) hwfc <- forecast(hwmodbc, lambda = 0.25, biasadj = FALSE)$mean hwfc2 <- forecast(hwmodbc, lambda = 0.25, biasadj = TRUE)$mean hwbcfc <- InvBoxCox(forecast(hwmodbc)$mean, lambda = 0.25) expect_true(all(hwfc == hwbcfc)) expect_false(identical(hwfc, hwfc2)) }) test_that("test for forecast.StructTS()", { structtsmod <- stats::StructTS(wineind) fc1 <- forecast(structtsmod)$mean expect_true(all(fc1 == forecast(structtsmod, fan = TRUE)$mean)) expect_error(forecast(structtsmod, level = -10)) expect_error(forecast(structtsmod, level = 110)) # Forecasts transformed manually with Box-Cox should match # forecasts when lambda is passed as an argument bcseries <- BoxCox(woolyrnq, lambda = 0.19) fc2 <- InvBoxCox(forecast(stats::StructTS(bcseries))$mean, lambda = 0.19) fc3 <- forecast(stats::StructTS(bcseries), lambda = 0.19, biasadj = FALSE)$mean fc4 <- forecast(stats::StructTS(bcseries), lambda = 0.19, biasadj = TRUE)$mean expect_true(all(fc2 == fc3)) expect_false(identical(fc3, fc4)) }) test_that("test croston()", { set.seed(1234) expect_error(croston(rnorm(100))) expect_true(all(croston(rep(0, 100))$mean == 0)) }) test_that("test hw()", { expect_output(print(summary(holt(wineind))), regexp = "Forecast method: Holt's method") expect_output(print(summary(holt(wineind, damped = TRUE))), regexp = "Forecast method: Damped Holt's method") }) test_that("test holt()", { expect_output(print(summary(hw(wineind))), regexp = "Forecast method: Holt-Winters' additive method") }) test_that("test naive() and snaive()", { # WWWusage has frequency = 1, so naive and snaive should match expect_true(all(snaive(WWWusage, h = 10)$mean == naive(WWWusage)$mean)) expect_true(all(snaive(WWWusage, h = 10)$upper == naive(WWWusage)$upper)) expect_true(all(snaive(WWWusage, h = 10)$lower == naive(WWWusage)$lower)) # Constant series should not error series <- ts(rep(950, 20), frequency = 4) constantForecast <- expect_error(snaive(series), NA) expect_true(is.constant(constantForecast$mean)) }) } forecast/tests/testthat/test-armaroots.R0000644000176200001440000000051014003673410020163 0ustar liggesusers# A unit test for armaroots.R if (require(testthat)) { context("Testing armaroots") test_that("Tests for plot.Arima()", { arimafit <- Arima(lynx, c(2, 0, 2), include.mean = FALSE) plot(arimafit) plot(arimafit, type = "ma") plot(arimafit, type = "ar") expect_warning(plot(Arima(lynx, c(0, 1, 0)))) }) } forecast/tests/testthat/test-subset.R0000644000176200001440000000410014003673410017460 0ustar liggesusers# A unit test for subset function if (require(testthat)) { context("Tests on input") mtsobj <- ts(matrix(rnorm(200), ncol = 2), frequency = 4) test_that("tests specifying correct argument", { sub <- subset(wineind, month = "September") expect_that(length(sub), equals(tsp(sub)[2] - tsp(sub)[1] + 1)) expect_that(round(sum(sub)), equals(338985)) sub2 <- subset(wineind, month = "SEPT") expect_identical(sub, sub2) sub2 <- subset(wineind, month = 9) expect_identical(sub, sub2) sub2 <- subset(wineind, season = 9) expect_identical(sub, sub2) sub <- subset(woolyrnq, quarter = 1) expect_that(length(sub), equals(tsp(sub)[2] - tsp(sub)[1] + 1)) expect_that(sum(sub), equals(153142)) sub2 <- subset(woolyrnq, season = 1) expect_identical(sub, sub2) sub <- subset(wineind, subset = wineind < 25000) expect_that(round(sum(sub)), equals(1948985)) expect_that(length(sub), equals(91)) sub <- subset(mtsobj, c(1, 1, rep(0, 98)) == 1) expect_that(ncol(sub), equals(2)) expect_that(nrow(sub), equals(2)) sub <- subset(mtsobj, quarter = 1) expect_that(ncol(sub), equals(2)) expect_that(nrow(sub), equals(25)) }) test_that("tests specifying wrong argument", { expect_error(subset(wineind, quarter = 1), "Data is not quarterly") expect_error(subset(woolyrnq, month = "January"), "Data is not monthly") }) test_that("test for bad input", { expect_error(subset.ts(mtcars, quarter = 1), "Data must be seasonal") expect_error(subset(wineind, subset = c(1, 2)), "subset must be the same length as x") expect_error(subset(mtsobj, mtsobj < .5), "subset must be a vector of rows to keep") expect_error(subset(wineind, month = "Jaan"), "No recognizable months") expect_error(subset(wineind, season = 1:14), "Seasons must be between 1 and 12") expect_error(subset(wineind, month = 1:14), "Months must be between 1 and 12") expect_error(subset(woolyrnq, quarter = "qq1"), "No recognizable quarters") expect_error(subset(woolyrnq, quarter = 1:6), "Quarters must be between 1 and 4") }) } forecast/tests/testthat/test-mforecast.R0000644000176200001440000000467314133710255020160 0ustar liggesusers# A unit test for forecast.R if (require(testthat)) { context("Test mforecast.R") mv_y <- ts(cbind(rnorm(120, 0, 3) + 1:120 + 20 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + 1:120 + 16 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) mv_x <- ts(cbind(rnorm(120, 0, 8) + (1:120) / 2 + 42 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + (1:120) * -1 + 20 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) v_y <- ts(rnorm(120, 0, 8) + (1:120) / 2 + 12 * sin(2 * pi * (1:120) / 12), frequency = 12) v_x <- ts(rnorm(120, 0, 1) + (1:120) * (-1) + 28 * sin(2 * pi * (1:120) / 12), frequency = 12) test_that("tests for is.mforecast()", { fit <- lm(mv_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) expect_true(is.mforecast(fcast)) fit <- lm(v_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) expect_false(is.mforecast(fcast)) }) test_that("tests for mlmsplit()", { fit <- lm(mv_y ~ v_x) fit1 <- mlmsplit(fit, index = 1) fit2 <- mlmsplit(fit, index = 2) fit3 <- lm(mv_y[, 1] ~ v_x) fit4 <- lm(mv_y[, 2] ~ v_x) expect_identical(fit1$coefficients, fit3$coefficients) expect_identical(fit2$coefficients, fit4$coefficients) expect_identical(fit1$rank, fit3$rank) expect_identical(fit2$rank, fit4$rank) expect_equal(fit1$fitted.values, fit3$fitted.values) expect_equal(fit2$fitted.values, fit4$fitted.values) expect_error(mlmsplit(fit), "Must select lm") }) test_that("tests for forecast.mlm()", { fit <- lm(mv_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) fit2 <- lm(mv_y[, 1] ~ v_x) fcast2 <- forecast(fit2, newdata = data.frame(v_x = 30)) expect_equal(fcast$forecast[[1]]$residuals, fcast2$residuals) }) test_that("tests for forecast.mts()", { lungDeaths <- cbind(mdeaths, fdeaths) fcast_b <- forecast(lungDeaths) fcast_m <- forecast(mdeaths) fcast_f <- forecast(fdeaths) expect_true(all.equal(fcast_b$forecast[[1]]$mean, fcast_m$mean)) expect_true(all.equal(fcast_b$forecast[[2]]$mean, fcast_f$mean)) }) test_that("tests for print.mforecast()", { fit <- lm(mv_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) expect_output(print(fcast), "Series 1") expect_output(print(fcast), "Series 2") }) test_that("tests for plot.mforecast()", { fit <- lm(mv_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) expect_silent(plot(fcast)) }) } forecast/tests/testthat.R0000644000176200001440000000011714003673410015202 0ustar liggesusersSys.setenv("R_TESTS" = "") if (require(testthat)) { test_check("forecast") } forecast/src/0000755000176200001440000000000014166724667012672 5ustar liggesusersforecast/src/etscalc.c0000644000176200001440000001710314003673410014431 0ustar liggesusers#include #define NONE 0 #define ADD 1 #define MULT 2 #define DAMPED 1 #define TOL 1.0e-10 #define HUGEN 1.0e10 #define NA -99999.0 // Functions called by R void etscalc(double *, int *, double *, int *, int *, int *, int *, double *, double *, double *, double *, double *, double *, double *, int*); void etssimulate(double *, int *, int *, int *, int *, double *, double *, double *, double *, int *, double *, double *); void etsforecast(double *, int *, int *, int *, double *, int *, double *); // Internal functions void forecast(double, double, double *, int, int, int, double, double *, int); void update(double *, double *, double *, double *, double *, double *, int, int, int, double, double, double, double, double); // ****************************************************************** void etscalc(double *y, int *n, double *x, int *m, int *error, int *trend, int *season, double *alpha, double *beta, double *gamma, double *phi, double *e, double *lik, double *amse, int *nmse) { int i, j, nstates; double oldl, l, oldb, b, olds[24], s[24], f[30], lik2, tmp, denom[30]; if((*m > 24) & (*season > NONE)) return; else if(*m < 1) *m = 1; if(*nmse > 30) *nmse = 30; nstates = (*m)*(*season>NONE) + 1 + (*trend>NONE); // Copy initial state components l = x[0]; if(*trend > NONE) b = x[1]; if(*season > NONE) { for(j=0; j<(*m); j++) s[j] = x[(*trend>NONE)+j+1]; } *lik = 0.0; lik2 = 0.0; for(j=0; j<(*nmse); j++) { amse[j] = 0.0; denom[j] = 0.0; } for (i=0; i<(*n); i++) { // COPY PREVIOUS STATE oldl = l; if(*trend > NONE) oldb = b; if(*season > NONE) { for(j=0; j<(*m); j++) olds[j] = s[j]; } // ONE STEP FORECAST forecast(oldl, oldb, olds, *m, *trend, *season, *phi, f, *nmse); if(fabs(f[0]-NA) < TOL) { *lik = NA; return; } if(*error == ADD) e[i] = y[i] - f[0]; else e[i] = (y[i] - f[0])/f[0]; for(j=0; j<(*nmse); j++) { if(i+j<(*n)) { denom[j] += 1.0; tmp = y[i+j]-f[j]; amse[j] = (amse[j] * (denom[j]-1.0) + (tmp*tmp)) / denom[j]; } } // UPDATE STATE update(&oldl, &l, &oldb, &b, olds, s, *m, *trend, *season, *alpha, *beta, *gamma, *phi, y[i]); // STORE NEW STATE x[nstates*(i+1)] = l; if(*trend > NONE) x[nstates*(i+1)+1] = b; if(*season > NONE) { for(j=0; j<(*m); j++) x[(*trend>NONE)+nstates*(i+1)+j+1] = s[j]; } *lik = *lik + e[i]*e[i]; lik2 += log(fabs(f[0])); } *lik = (*n) * log(*lik); if(*error == MULT) *lik += 2*lik2; } // ********************************************************************************* void etssimulate(double *x, int *m, int *error, int *trend, int *season, double *alpha, double *beta, double *gamma, double *phi, int *h, double *y, double *e) { int i, j, nstates; double oldl, l, oldb, b, olds[24], s[24], f[10]; if((*m > 24) & (*season > NONE)) return; else if(*m < 1) *m = 1; nstates = (*m)*(*season>NONE) + 1 + (*trend>NONE); // Copy initial state components l = x[0]; if(*trend > NONE) b = x[1]; if(*season > NONE) { for(j=0; j<(*m); j++) s[j] = x[(*trend>NONE)+j+1]; } for (i=0; i<(*h); i++) { // COPY PREVIOUS STATE oldl = l; if(*trend > NONE) oldb = b; if(*season > NONE) { for(j=0; j<(*m); j++) olds[j] = s[j]; } // ONE STEP FORECAST forecast(oldl, oldb, olds, *m, *trend, *season, *phi, f, 1); if(fabs(f[0]-NA) < TOL) { y[0]=NA; return; } if(*error == ADD) y[i] = f[0] + e[i]; else y[i] = f[0]*(1.0+e[i]); // UPDATE STATE update(&oldl, &l, &oldb, &b, olds, s, *m, *trend, *season, *alpha, *beta, *gamma, *phi, y[i]); } } // ********************************************************************************* void etsforecast(double *x, int *m, int *trend, int *season, double *phi, int *h, double *f) { int j; double l, b, s[24]; if((*m > 24) & (*season > NONE)) return; else if(*m < 1) *m = 1; // Copy initial state components l = x[0]; b = 0.0; if(*trend > NONE) b = x[1]; if(*season > NONE) { for(j=0; j<(*m); j++) s[j] = x[(*trend>NONE)+j+1]; } // Compute forecasts forecast(l, b, s, *m, *trend, *season, *phi, f, *h); } // ***************************************************************** void forecast(double l, double b, double *s, int m, int trend, int season, double phi, double *f, int h) { int i,j; double phistar; phistar = phi; // FORECASTS for(i=0; i NONE) { if(trend==ADD) r = (*l) - (*oldl); // l[t]-l[t-1] else //if(trend==MULT) { if(fabs(*oldl) < TOL) r = HUGEN; else r = (*l)/(*oldl); // l[t]/l[t-1] } *b = phib + (beta/alpha)*(r - phib); // b[t] = phi*b[t-1] + beta*(r - phi*b[t-1]) // b[t] = b[t-1]^phi + beta*(r - b[t-1]^phi) } // NEW SEASON if(season > NONE) { if(season==ADD) t = y - q; else //if(season==MULT) { if(fabs(q) < TOL) t = HUGEN; else t = y / q; } s[0] = olds[m-1] + gamma*(t - olds[m-1]); // s[t] = s[t-m] + gamma*(t - s[t-m]) for(j=1; j #include #include //For R's Nelder-Mead solver #include #include #include "etsTargetFunction.h" // This function initializes all the parameters, constructs an // object of type EtsTargetFunction and adds an external pointer // to this object with name "ets.xptr" // to the environment submitted as p_rho // RcppExport SEXP etsTargetFunctionInit(SEXP p_y, SEXP p_nstate, SEXP p_errortype, SEXP p_trendtype, SEXP p_seasontype, SEXP p_damped, SEXP p_lower, SEXP p_upper, SEXP p_opt_crit, SEXP p_nmse, SEXP p_bounds, SEXP p_m, SEXP p_optAlpha, SEXP p_optBeta, SEXP p_optGamma, SEXP p_optPhi, SEXP p_givenAlpha, SEXP p_givenBeta, SEXP p_givenGamma, SEXP p_givenPhi, SEXP p_alpha, SEXP p_beta, SEXP p_gamma, SEXP p_phi, SEXP p_rho) { BEGIN_RCPP; EtsTargetFunction* sp = new EtsTargetFunction(); std::vector y = Rcpp::as< std::vector >(p_y); int nstate = Rcpp::as(p_nstate); int errortype = Rcpp::as(p_errortype); int trendtype = Rcpp::as(p_trendtype); int seasontype = Rcpp::as(p_seasontype); bool damped = Rcpp::as(p_damped); std::vector lower = Rcpp::as< std::vector >(p_lower); std::vector upper = Rcpp::as< std::vector >(p_upper); std::string opt_crit = Rcpp::as(p_opt_crit); int nmse = Rcpp::as(p_nmse); std::string bounds = Rcpp::as< std::string >(p_bounds); int m = Rcpp::as(p_m); bool optAlpha = Rcpp::as(p_optAlpha); bool optBeta = Rcpp::as(p_optBeta); bool optGamma = Rcpp::as(p_optGamma); bool optPhi = Rcpp::as(p_optPhi); bool givenAlpha = Rcpp::as(p_givenAlpha); bool givenBeta = Rcpp::as(p_givenBeta); bool givenGamma = Rcpp::as(p_givenGamma); bool givenPhi = Rcpp::as(p_givenPhi); double alpha = Rcpp::as(p_alpha); double beta = Rcpp::as(p_beta); double gamma = Rcpp::as(p_gamma); double phi = Rcpp::as(p_phi); sp->init(y, nstate, errortype, trendtype, seasontype, damped, lower, upper, opt_crit, nmse, bounds, m, optAlpha, optBeta, optGamma, optPhi, givenAlpha, givenBeta, givenGamma, givenPhi, alpha, beta, gamma, phi); Rcpp::Environment e(p_rho); e["ets.xptr"] = Rcpp::XPtr( sp, true ); return Rcpp::wrap(e); END_RCPP; } // RcppExport double targetFunctionRmalschains(SEXP p_par, SEXP p_env) // { // Rcpp::NumericVector par(p_par); // Rcpp::Environment e(p_env); // Rcpp::XPtr sp(e.get("ets.xptr")); // sp->eval(par.begin(), par.size()); // //return Rcpp::wrap(sp->getObjVal()); // return sp->getObjVal(); // } // RcppExport SEXP etsGetTargetFunctionRmalschainsPtr() { // typedef double (*funcPtr)(SEXP, SEXP); // return (Rcpp::XPtr(new funcPtr(&targetFunctionRmalschains))); // } /* RcppExport SEXP targetFunctionRdonlp2(SEXP p_var, SEXP p_env) { Rcpp::Environment e(p_env); Rcpp::XPtr sp(e.get("ets.xptr")); Rcpp::NumericVector var(p_var); int mode = var[0]; int fun_id = var[1]; sp->eval(var.begin()+2, var.size()-2); if(mode == 0) { if(fun_id == 0) { return Rcpp::wrap(sp->getObjVal()); } else { return Rcpp::wrap(0); //return Rcpp::wrap(sp->restrictions[fun_id-1]); } } else if(mode==1) { // error("Gradients are not implemented, exiting."); }; return R_NilValue; } RcppExport SEXP etsGetTargetFunctionRdonlp2Ptr() { typedef SEXP (*funcPtr)(SEXP, SEXP); return (Rcpp::XPtr(new funcPtr(&targetFunctionRdonlp2))); } */ double targetFunctionEtsNelderMead(int n, double *par, void *ex) { EtsTargetFunction* sp = (EtsTargetFunction*) ex; sp->eval(par, n); return sp->getObjVal(); } RcppExport SEXP etsNelderMead(SEXP p_var, SEXP p_env, SEXP p_abstol, SEXP p_intol, SEXP p_alpha, SEXP p_beta, SEXP p_gamma, SEXP p_trace, SEXP p_maxit) { double abstol = Rcpp::as(p_abstol); double intol = Rcpp::as(p_intol); double alpha = Rcpp::as(p_alpha); double beta= Rcpp::as(p_beta); double gamma= Rcpp::as(p_gamma); int trace = Rcpp::as(p_trace); int maxit = Rcpp::as(p_maxit); int fncount = 0, fail=0; double Fmin = 0.0; Rcpp::NumericVector dpar(p_var); Rcpp::NumericVector opar(dpar.size()); Rcpp::Environment e(p_env); Rcpp::XPtr sp(e.get("ets.xptr")); double (*funcPtr)(int n, double *par, void *ex) = targetFunctionEtsNelderMead; nmmin(dpar.size(), dpar.begin(), opar.begin(), &Fmin, funcPtr, &fail, abstol, intol, sp, alpha, beta, gamma, trace, &fncount, maxit); return Rcpp::List::create(Rcpp::Named("value") = Fmin, Rcpp::Named("par") = opar, Rcpp::Named("fail") = fail, Rcpp::Named("fncount") = fncount); } forecast/src/makeTBATSMatrices.cpp0000644000176200001440000000563414003673410016564 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP makeTBATSWMatrix(SEXP smallPhi_s, SEXP kVector_s, SEXP arCoefs_s, SEXP maCoefs_s, SEXP tau_s) { BEGIN_RCPP double *smallPhi, *arCoefs, *maCoefs; int *kVector, *tau; int adjustPhi = 0; R_len_t numSeasonal = 0, numCols = 1, p = 0, q = 0; if(!Rf_isNull(smallPhi_s)) { smallPhi = REAL(smallPhi_s); adjustPhi = 1; numCols = numCols + 1; } if(!Rf_isNull(kVector_s)) { tau = &INTEGER(tau_s)[0]; kVector = INTEGER(kVector_s); numSeasonal = LENGTH(kVector_s); numCols = numCols + *tau; } if(!Rf_isNull(arCoefs_s)) { arCoefs = REAL(arCoefs_s); p = LENGTH(arCoefs_s); numCols = numCols + p; } if(!Rf_isNull(maCoefs_s)) { maCoefs = REAL(maCoefs_s); q = LENGTH(maCoefs_s); numCols = numCols + q; } NumericMatrix wTranspose_r(1, numCols); arma::mat wTranspose(wTranspose_r.begin(), wTranspose_r.nrow(), wTranspose_r.ncol(), false); if(!Rf_isNull(kVector_s)) { wTranspose.zeros(); int position = adjustPhi; for(R_len_t s = 0; s < numSeasonal; s++) { //wTranspose.submat(0,(position+1), 0, (position + kVector[s])) = arma::ones(1, kVector[s]); for(int j = (position+1); j <= (position + kVector[s]); j++) { wTranspose(0,j) = 1; } position = position + (2 * kVector[s]); } } wTranspose(0,0) = 1; if(adjustPhi == 1) { wTranspose(0,1) = *smallPhi; } if(!Rf_isNull(arCoefs_s)) { for(R_len_t i = 1; i <= p; i++) { wTranspose(0,(adjustPhi + *tau +i)) = arCoefs[(i-1)]; } } if(!Rf_isNull(maCoefs_s)) { for(R_len_t i = 1; i <= q; i++) { wTranspose(0,(adjustPhi + *tau + p + i)) = maCoefs[(i-1)]; } } arma::mat w = arma::trans(wTranspose); smallPhi = 0; arCoefs = 0; maCoefs = 0; kVector = 0; return List::create( Named("w") = w, Named("w.transpose") = wTranspose ); END_RCPP } SEXP makeCIMatrix(SEXP k_s, SEXP m_s) { BEGIN_RCPP double pi = arma::datum::pi; double lambda, *m; int *k; k = &INTEGER(k_s)[0]; m = &REAL(m_s)[0]; NumericMatrix C(*k, *k); for(int j = 1; j<=*k; j++) { lambda = (2 * pi * j) / *m; C((j-1),(j-1)) = std::cos(lambda); } return wrap(C); END_RCPP } SEXP makeSIMatrix(SEXP k_s, SEXP m_s) { BEGIN_RCPP double pi = arma::datum::pi; double lambda, *m; int *k; k = &INTEGER(k_s)[0]; m = &REAL(m_s)[0]; NumericMatrix S(*k, *k); for(int j = 1; j<=*k; j++) { lambda = (2 * pi * j) / *m; S((j-1),(j-1)) = std::sin(lambda); } return wrap(S); END_RCPP } SEXP makeAIMatrix(SEXP C_s, SEXP S_s, SEXP k_s) { int *k; k = &INTEGER(k_s)[0]; NumericMatrix C_r(C_s); NumericMatrix S_r(S_s); arma::mat C(C_r.begin(), C_r.nrow(), C_r.ncol(), false); arma::mat S(S_r.begin(), S_r.nrow(), S_r.ncol(), false); arma::mat A((*k * 2), (*k * 2)); A.submat(0,0, (*k -1), (*k -1)) = C; A.submat(0,*k, (*k -1), ((*k *2) -1)) = S; A.submat(*k,0, ((*k *2) -1), (*k -1)) = (-1 * S); A.submat(*k,*k, ((*k *2) -1), ((*k *2) -1)) = C; return wrap(A); } forecast/src/calcBATS.cpp0000644000176200001440000002171414003673410014732 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP calcBATS(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es ){ BEGIN_RCPP NumericMatrix yr(ys); NumericMatrix yHatr(yHats); NumericMatrix wTransposer(wTransposes); NumericMatrix Fr(Fs); NumericMatrix xr(xs); NumericMatrix gr(gs); NumericMatrix er(es); int t; arma::mat y(yr.begin(), yr.nrow(), yr.ncol(), false); arma::mat yHat(yHatr.begin(), yHatr.nrow(), yHatr.ncol(), false); arma::mat wTranspose(wTransposer.begin(), wTransposer.nrow(), wTransposer.ncol(), false); arma::mat F(Fr.begin(), Fr.nrow(), Fr.ncol(), false); arma::mat x(xr.begin(), xr.nrow(), xr.ncol(), false); arma::mat g(gr.begin(), gr.nrow(), gr.ncol(), false); arma::mat e(er.begin(), er.nrow(), er.ncol(), false); for(t = 1; t < yr.ncol(); t++) { yHat.col(t) = wTranspose * x.col((t-1)); e(0,t) = y(0, t) - yHat(0, t); x.col(t) = F * x.col((t-1)) + g * e(0,t); } return List::create( Named("y.hat") = yHat, Named("e") = e, Named("x") = x ); END_RCPP } SEXP calcBATSFaster(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es, SEXP xNought_s, SEXP sPeriods_s, SEXP betaV, SEXP tau_s, SEXP p_s, SEXP q_s ) { BEGIN_RCPP NumericMatrix yr(ys); NumericMatrix yHatr(yHats); NumericMatrix wTransposer(wTransposes); NumericMatrix Fr(Fs); NumericMatrix xr(xs); NumericMatrix gr(gs); NumericMatrix er(es); NumericMatrix xNought_r(xNought_s); //IntegerVector sPeriodsR(sPeriods); int adjBeta, previousS, lengthArma, *tau, *p, *q, *sPeriods; R_len_t lengthSeasonal; tau = &INTEGER(tau_s)[0]; p = &INTEGER(p_s)[0]; q = &INTEGER(q_s)[0]; lengthArma = *p + *q; if(!Rf_isNull(sPeriods_s)) { sPeriods = INTEGER(sPeriods_s); lengthSeasonal = LENGTH(sPeriods_s); } if(!Rf_isNull(betaV)) { adjBeta = 1; } else { adjBeta = 0; } arma::mat y(yr.begin(), yr.nrow(), yr.ncol(), false); arma::mat yHat(yHatr.begin(), yHatr.nrow(), yHatr.ncol(), false); arma::mat wTranspose(wTransposer.begin(), wTransposer.nrow(), wTransposer.ncol(), false); arma::mat F(Fr.begin(), Fr.nrow(), Fr.ncol(), false); arma::mat x(xr.begin(), xr.nrow(), xr.ncol(), false); arma::mat g(gr.begin(), gr.nrow(), gr.ncol(), false); arma::mat e(er.begin(), er.nrow(), er.ncol(), false); arma::mat xNought(xNought_r.begin(), xNought_r.nrow(), xNought_r.ncol(), false); if(!Rf_isNull(sPeriods_s)) { //One //Rprintf("one-1\n"); yHat.col(0) = wTranspose.cols(0, adjBeta) * xNought.rows(0, adjBeta); //Rprintf("one-2\n"); previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { //Rprintf("one-3\n"); yHat(0,0) = yHat(0,0) + xNought( (previousS + sPeriods[i] + adjBeta), 0); previousS += sPeriods[i]; } if(lengthArma > 0) { //Rprintf("bg-1"); yHat.col(0) = yHat(0,0) + wTranspose.cols((*tau + adjBeta + 1), (xNought.n_rows-1)) * xNought.rows((*tau + adjBeta + 1), (xNought.n_rows-1)); } //Two e(0,0) = y(0, 0) - yHat(0, 0); //Three //Rprintf("three-5\n"); x.submat(0, 0, adjBeta, 0) = F.submat(0,0,adjBeta,adjBeta) * xNought.rows(0,adjBeta); if(lengthArma > 0) { //Rprintf("bg-2"); x.submat(0, 0, adjBeta, 0) += F.submat(0,(adjBeta+ *tau + 1),adjBeta,(F.n_cols - 1)) * xNought.rows((adjBeta+ *tau + 1),(F.n_cols - 1)); } previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { //Rprintf("three-7\n"); x((adjBeta+previousS+1),0) = xNought((adjBeta+previousS+sPeriods[i]),0); if(lengthArma > 0) { //Rprintf("bg-3"); x.submat((adjBeta+previousS+1),0, (adjBeta+previousS+1),0) = x.submat((adjBeta+previousS+1),0, (adjBeta+previousS+1),0) + F.submat((adjBeta + previousS + 1), (adjBeta+*tau+1), (adjBeta + previousS + 1), (F.n_cols-1)) * xNought.rows((adjBeta + *tau +1), (F.n_cols-1)); } //Rprintf("three-9\n"); x.submat((adjBeta + previousS + 2), 0, (adjBeta + previousS + sPeriods[i]), 0) = xNought.rows((adjBeta + previousS + 1), (adjBeta + previousS + sPeriods[i] -1)); previousS += sPeriods[i]; } if(*p > 0) { //Rprintf("bg-4"); x.submat((adjBeta+ *tau + 1),0,(adjBeta+ *tau + 1),0) = F.submat((adjBeta + *tau +1), (adjBeta + *tau +1), (adjBeta + *tau + 1), (F.n_cols-1)) * xNought.rows((adjBeta+*tau+1), (F.n_cols-1)); //Rprintf("bg-5"); ////error is HERE!!! if(*p > 1) { x.submat((adjBeta + *tau + 2),0,(adjBeta + *tau + *p),0) = xNought.rows((adjBeta + *tau + 1),(adjBeta + *tau + *p-1)); } } if(*q > 0) { //Rprintf("three-12\n"); x((adjBeta+ *tau + *p + 1),0) = 0; if(*q > 1) { //Rprintf("three-13\n"); x.submat((adjBeta+ *tau + *p + 2), 0, (adjBeta + *tau + *p + *q) , 0) = xNought.rows((adjBeta + *tau + *p + 1),(adjBeta + *tau + *p + *q - 1)); } } ///Temporary fix! //x.col(0) += g * e(0,0); //End /////////// x(0,0) += g(0,0) * e(0,0); if(adjBeta == 1) { x(1,0) += g(1,0) * e(0,0); } previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { x((adjBeta+previousS+1),0) += g((adjBeta+previousS+1),0) * e(0,0); previousS += sPeriods[i]; } if(*p > 0) { x((adjBeta + *tau + 1),0) += e(0,0); if(*q > 0) { x((adjBeta + *tau + *p + 1),0) += e(0,0); } } else if(*q > 0) { x((adjBeta + *tau + 1),0) += e(0,0); } ///////////////////////////////// for(int t = 1; t < yr.ncol(); t++) { //Rprintf("point-x\n"); //One yHat.col(t) = wTranspose.cols(0, adjBeta) * x.submat(0, (t-1), adjBeta, (t-1)); previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { //mod here //Rprintf("point-xx\n"); yHat(0,t) += x((previousS + sPeriods[i] + adjBeta), (t-1)); previousS += sPeriods[i]; } if(lengthArma > 0) { //Rprintf("bg-6"); yHat.col(t) += wTranspose.cols((*tau + adjBeta + 1), (xNought.n_rows-1)) * x.submat((*tau + adjBeta + 1), (t-1), (x.n_rows-1), (t-1)); } //Two //Rprintf("point-x4\n"); e(0,t) = y(0, t) - yHat(0, t); //Three //Rprintf("point-x5\n"); x.submat(0, t, adjBeta, t) = F.submat(0,0,adjBeta,adjBeta) * x.submat(0, (t-1), adjBeta, (t-1)); if(lengthArma > 0) { //Rprintf("bg-7"); x.submat(0, t, adjBeta, t) += F.submat(0,(adjBeta+ *tau + 1),adjBeta,(F.n_cols - 1)) * x.submat((adjBeta+ *tau + 1), (t-1), (F.n_cols - 1), (t-1)); } previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { //Rprintf("point-x7\n"); x((adjBeta+previousS+1),t) = x((adjBeta+previousS+sPeriods[i]),(t-1)); if(lengthArma > 0) { //Rprintf("bg-8"); x.submat((adjBeta+previousS+1),t, (adjBeta+previousS+1),t) += F.submat((adjBeta + previousS + 1), (adjBeta+*tau+1), (adjBeta + previousS + 1), (F.n_cols-1)) * x.submat((adjBeta + *tau +1), (t-1), (F.n_cols-1), (t-1)); } //Rprintf("Three-L-9\n"); x.submat((adjBeta + previousS + 2), t, (adjBeta + previousS + sPeriods[i]), t) = x.submat((adjBeta + previousS + 1), (t-1), (adjBeta + previousS + sPeriods[i] -1), (t-1)); previousS += sPeriods[i]; } /* if(lengthArma > 0) { x.submat((adjBeta+ *tau + 1),t, (x.n_rows-1),t) = F.submat((adjBeta+ *tau + 1), (adjBeta+ *tau + 1), (F.n_rows - 1), (F.n_rows - 1)) * x.submat((adjBeta+ *tau + 1),(t-1), (x.n_rows-1),(t-1)); } */ if(*p > 0) { //Rprintf("bg-9"); x.submat((adjBeta+ *tau + 1),t, (adjBeta+ *tau + 1),t) = F.submat((adjBeta + *tau +1), (adjBeta + *tau +1), (adjBeta + *tau + 1), (F.n_cols-1)) * x.submat((adjBeta+*tau+1), (t-1), (F.n_cols-1), (t-1)); if(*p > 1) { x.submat((adjBeta + *tau + 2),t,(adjBeta + *tau + *p),t) = x.submat((adjBeta + *tau + 1), (t-1), (adjBeta + *tau + *p -1), (t-1)); } } if(*q > 0) { x((adjBeta+ *tau + *p + 1),t) = 0; if(*q > 1) { x.submat((adjBeta+ *tau + *p + 2), t, (adjBeta + *tau + *p + *q) , t) = x.submat((adjBeta + *tau + *p + 1), (t-1), (adjBeta + *tau + *p + *q - 1), (t-1)); } } ///Temporary fix! //x.col(t) += g * e(0,t); //End /////////// x(0,t) += g(0,0) * e(0,t); if(adjBeta == 1) { x(1,t) += g(1,0) * e(0,t); } previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { x((adjBeta+previousS+1),t) += g((adjBeta+previousS+1),0) * e(0,t); previousS += sPeriods[i]; } if(*p > 0) { x((adjBeta + *tau + 1),t) += e(0,t); if(*q > 0) { x((adjBeta + *tau + *p + 1),t) += e(0,t); } } else if(*q > 0) { x((adjBeta + *tau + 1),t) += e(0,t); } ///////////////////////////////// } } else { yHat.col(0) = wTranspose * xNought; e(0,0) = y(0, 0) - yHat(0, 0); x.col(0) = F * xNought + g * e(0,0); for(int t = 1; t < yr.ncol(); t++) { yHat.col(t) = wTranspose * x.col((t-1)); e(0,t) = y(0, t) - yHat(0, t); x.col(t) = F * x.col((t-1)) + g * e(0,t); } } return R_NilValue; END_RCPP } SEXP calcWTilda(SEXP wTildaTransposes, SEXP Ds) { BEGIN_RCPP NumericMatrix wTildaTransposer(wTildaTransposes); NumericMatrix Dr(Ds); int t; arma::mat wTildaTranspose(wTildaTransposer.begin(), wTildaTransposer.nrow(), wTildaTransposer.ncol(), false); arma::mat D(Dr.begin(), Dr.nrow(), Dr.ncol(), false); for(t = 1; t < wTildaTransposer.nrow(); t++) { wTildaTranspose.row(t) = wTildaTranspose.row((t-1)) * D; } return wTildaTransposer; END_RCPP } forecast/src/calcBATS.h0000644000176200001440000000464014003673410014376 0ustar liggesusers#ifndef _forecast_CALCBATS #define _forecast_CALCBATS ///////////////////////////////////// // if unable to compile, please comment these lines // #define __GXX_EXPERIMENTAL_CXX0X__ 1 // #ifndef HAVE_ERRNO_T // typedef int errno_t; // #endif // #if __WORDSIZE == 64 // # ifndef __intptr_t_defined // typedef long int intptr_t; // # define __intptr_t_defined // # endif // typedef unsigned long int uintptr_t; // #else // # ifndef __intptr_t_defined // typedef int intptr_t; // # define __intptr_t_defined // # endif // typedef unsigned int uintptr_t; // #endif // #include // #include // #include // #include // #include // if unable to compile, please comment these lines ///////////////////////////////////// #include #include RcppExport SEXP calcBATS(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es ) ; RcppExport SEXP calcBATSFaster(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es, SEXP xNought_s, SEXP sPeriods_s, SEXP betaV, SEXP tau_s, SEXP p_s, SEXP q_s ) ; RcppExport SEXP calcWTilda(SEXP wTildaTransposes, SEXP Ds) ; RcppExport SEXP makeBATSWMatrix(SEXP smallPhi_s, SEXP sPeriods_s, SEXP arCoefs_s, SEXP maCoefs_s) ; RcppExport SEXP makeBATSGMatrix(SEXP alpha_s, SEXP beta_s, SEXP gammaVector_s, SEXP seasonalPeriods_s, SEXP p_s, SEXP q_s) ; RcppExport SEXP updateFMatrix(SEXP F_s, SEXP smallPhi_s, SEXP alpha_s, SEXP beta_s, SEXP gammaBold_s, SEXP ar_s, SEXP ma_s, SEXP tau_s) ; RcppExport SEXP updateWtransposeMatrix(SEXP wTranspose_s, SEXP smallPhi_s, SEXP tau_s, SEXP arCoefs_s, SEXP maCoefs_s, SEXP p_s, SEXP q_s) ; RcppExport SEXP updateGMatrix(SEXP g_s, SEXP gammaBold_s, SEXP alpha_s, SEXP beta_s, SEXP gammaVector_s, SEXP seasonalPeriods_s) ; //TBATS Functions RcppExport SEXP makeTBATSWMatrix(SEXP smallPhi_s, SEXP kVector_s, SEXP arCoefs_s, SEXP maCoefs_s, SEXP tau_s) ; RcppExport SEXP makeCIMatrix(SEXP k_s, SEXP m_s) ; RcppExport SEXP makeSIMatrix(SEXP k_s, SEXP m_s) ; RcppExport SEXP makeAIMatrix(SEXP C_s, SEXP S_s, SEXP k_s) ; RcppExport SEXP updateTBATSGammaBold(SEXP gammaBold_s, SEXP kVector_s, SEXP gammaOne_s, SEXP gammaTwo_s) ; RcppExport SEXP updateTBATSGMatrix(SEXP g_s, SEXP gammaBold_s, SEXP alpha_s, SEXP beta_s) ; RcppExport SEXP calcTBATSFaster(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es, SEXP xNought_s) ; #endif forecast/src/Makevars0000644000176200001440000000072014026463266014353 0ustar liggesusers## Also, OpenMP support in Armadillo prefers C++11 support. However, for wider ## availability of the package we do not yet enforce this here. It is however ## recommended for client packages to set it. ## ## And with R 3.4.0, and RcppArmadillo 0.7.960.*, we turn C++11 on as OpenMP ## support within Armadillo prefers / requires it CXX_STD = CXX11 PKG_CXXFLAGS = $(SHLIB_OPENMP_CXXFLAGS) PKG_LIBS = $(SHLIB_OPENMP_CXXFLAGS) $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) forecast/src/Makevars.win0000644000176200001440000000072014026463257015147 0ustar liggesusers## Also, OpenMP support in Armadillo prefers C++11 support. However, for wider ## availability of the package we do not yet enforce this here. It is however ## recommended for client packages to set it. ## ## And with R 3.4.0, and RcppArmadillo 0.7.960.*, we turn C++11 on as OpenMP ## support within Armadillo prefers / requires it CXX_STD = CXX11 PKG_CXXFLAGS = $(SHLIB_OPENMP_CXXFLAGS) PKG_LIBS = $(SHLIB_OPENMP_CXXFLAGS) $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) forecast/src/updateTBATSMatrices.cpp0000644000176200001440000000252614003673410017126 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP updateTBATSGammaBold(SEXP gammaBold_s, SEXP kVector_s, SEXP gammaOne_s, SEXP gammaTwo_s) { BEGIN_RCPP NumericMatrix gammaBold(gammaBold_s); IntegerVector kVector(kVector_s); NumericVector gammaOne(gammaOne_s); NumericVector gammaTwo(gammaTwo_s); int endPos = 0; int numK = kVector.size(); for(int i =0; i < numK; i++) { for(int j = endPos; j < (kVector(i) + endPos); j++) { gammaBold(0,j)=gammaOne(i); } for(int j = (kVector(i) + endPos); j < ((2*kVector(i)) + endPos); j++) { gammaBold(0,j)=gammaTwo(i); } endPos += 2 * kVector(i); } return R_NilValue; END_RCPP } SEXP updateTBATSGMatrix(SEXP g_s, SEXP gammaBold_s, SEXP alpha_s, SEXP beta_s) { BEGIN_RCPP int adjBeta = 0; double *gammaVector; NumericMatrix g_r(g_s); //Rprintf("one\n"); g_r(0,0) = REAL(alpha_s)[0]; //Rprintf("two\n"); if(!Rf_isNull(beta_s)) { //Rprintf("three\n"); g_r(1,0) = REAL(beta_s)[0]; adjBeta = 1; } //Rprintf("four\n"); if(!Rf_isNull(gammaBold_s)) { NumericMatrix gammaBold_r(gammaBold_s); arma::mat gammaBold(gammaBold_r.begin(), gammaBold_r.nrow(), gammaBold_r.ncol(), false); arma::mat g(g_r.begin(), g_r.nrow(), g_r.ncol(), false); g.submat((adjBeta+1), 0,(adjBeta+gammaBold.n_cols), 0) = trans(gammaBold); } //Rprintf("five\n"); return R_NilValue; END_RCPP } forecast/src/registerDynamicSymbol.c0000644000176200001440000000034514003673410017332 0ustar liggesusers// RegisteringDynamic Symbols #include #include #include void R_init_markovchain(DllInfo* info) { R_registerRoutines(info, NULL, NULL, NULL, NULL); R_useDynamicSymbols(info, TRUE); } forecast/src/etspolyroot.c0000644000176200001440000003533314003673410015423 0ustar liggesusers/* Formerly src/appl/cpoly.c: * * Copyright (C) 1997-1998 Ross Ihaka * Copyright (C) 1999-2001 R Core Team * * cpoly finds the zeros of a complex polynomial. * * On Entry * * opr, opi - double precision vectors of real and * imaginary parts of the coefficients in * order of decreasing powers. * * degree - int degree of polynomial. * * * On Return * * zeror, zeroi - output double precision vectors of * real and imaginary parts of the zeros. * * fail - output int parameter, true only if * leading coefficient is zero or if cpoly * has found fewer than degree zeros. * * The program has been written to reduce the chance of overflow * occurring. If it does occur, there is still a possibility that * the zerofinder will work provided the overflowed quantity is * replaced by a large number. * * This is a C translation of the following. * * TOMS Algorithm 419 * Jenkins and Traub. * Comm. ACM 15 (1972) 97-99. * * Ross Ihaka * February 1997 */ #include /* for declaration of hypot */ #include /* for declaration of R_alloc */ #include /* for FLT_RADIX */ #include /* for R_pow_di */ static void calct(Rboolean *); static Rboolean fxshft(int, double *, double *); static Rboolean vrshft(int, double *, double *); static void nexth(Rboolean); static void noshft(int); static void polyev(int, double, double, double *, double *, double *, double *, double *, double *); static double errev(int, double *, double *, double, double, double, double); static double cpoly_cauchy(int, double *, double *); static double cpoly_scale(int, double *, double, double, double, double); static void cdivid(double, double, double, double, double *, double *); /* Global Variables (too many!) */ static int nn; static double *pr, *pi, *hr, *hi, *qpr, *qpi, *qhr, *qhi, *shr, *shi; static double sr, si; static double tr, ti; static double pvr, pvi; static const double eta = DBL_EPSILON; static const double are = /* eta = */DBL_EPSILON; static const double mre = 2. * M_SQRT2 * /* eta, i.e. */DBL_EPSILON; static const double infin = DBL_MAX; void cpolyroot(double *opr, double *opi, int *degree, double *zeror, double *zeroi, Rboolean *fail) { static const double smalno = DBL_MIN; static const double base = (double)FLT_RADIX; static int d_n, i, i1, i2; static double zi, zr, xx, yy; static double bnd, xxx; Rboolean conv; int d1; double *tmp; static const double cosr =/* cos 94 */ -0.06975647374412529990; static const double sinr =/* sin 94 */ 0.99756405025982424767; xx = M_SQRT1_2;/* 1/sqrt(2) = 0.707.... */ yy = -xx; *fail = FALSE; nn = *degree; d1 = nn - 1; /* algorithm fails if the leading coefficient is zero. */ if (opr[0] == 0. && opi[0] == 0.) { *fail = TRUE; return; } /* remove the zeros at the origin if any. */ while (opr[nn] == 0. && opi[nn] == 0.) { d_n = d1-nn+1; zeror[d_n] = 0.; zeroi[d_n] = 0.; nn--; } nn++; /*-- Now, global var. nn := #{coefficients} = (relevant degree)+1 */ if (nn == 1) return; /* Use a single allocation as these as small */ tmp = (double *) R_alloc((size_t) (10*nn), sizeof(double)); pr = tmp; pi = tmp + nn; hr = tmp + 2*nn; hi = tmp + 3*nn; qpr = tmp + 4*nn; qpi = tmp + 5*nn; qhr = tmp + 6*nn; qhi = tmp + 7*nn; shr = tmp + 8*nn; shi = tmp + 9*nn; /* make a copy of the coefficients and shr[] = | p[] | */ for (i = 0; i < nn; i++) { pr[i] = opr[i]; pi[i] = opi[i]; shr[i] = hypot(pr[i], pi[i]); } /* scale the polynomial with factor 'bnd'. */ bnd = cpoly_scale(nn, shr, eta, infin, smalno, base); if (bnd != 1.) { for (i=0; i < nn; i++) { pr[i] *= bnd; pi[i] *= bnd; } } /* start the algorithm for one zero */ while (nn > 2) { /* calculate bnd, a lower bound on the modulus of the zeros. */ for (i=0 ; i < nn ; i++) shr[i] = hypot(pr[i], pi[i]); bnd = cpoly_cauchy(nn, shr, shi); /* outer loop to control 2 major passes */ /* with different sequences of shifts */ for (i1 = 1; i1 <= 2; i1++) { /* first stage calculation, no shift */ noshft(5); /* inner loop to select a shift */ for (i2 = 1; i2 <= 9; i2++) { /* shift is chosen with modulus bnd */ /* and amplitude rotated by 94 degrees */ /* from the previous shift */ xxx= cosr * xx - sinr * yy; yy = sinr * xx + cosr * yy; xx = xxx; sr = bnd * xx; si = bnd * yy; /* second stage calculation, fixed shift */ conv = fxshft(i2 * 10, &zr, &zi); if (conv) goto L10; } } /* the zerofinder has failed on two major passes */ /* return empty handed */ *fail = TRUE; return; /* the second stage jumps directly to the third stage iteration. * if successful, the zero is stored and the polynomial deflated. */ L10: d_n = d1+2 - nn; zeror[d_n] = zr; zeroi[d_n] = zi; --nn; for (i=0; i < nn ; i++) { pr[i] = qpr[i]; pi[i] = qpi[i]; } }/*while*/ /* calculate the final zero and return */ cdivid(-pr[1], -pi[1], pr[0], pi[0], &zeror[d1], &zeroi[d1]); return; } /* Computes the derivative polynomial as the initial * polynomial and computes l1 no-shift h polynomials. */ static void noshft(int l1) { int i, j, jj, n = nn - 1, nm1 = n - 1; double t1, t2, xni; for (i=0; i < n; i++) { xni = (double)(nn - i - 1); hr[i] = xni * pr[i] / n; hi[i] = xni * pi[i] / n; } for (jj = 1; jj <= l1; jj++) { if (hypot(hr[n-1], hi[n-1]) <= eta * 10.0 * hypot(pr[n-1], pi[n-1])) { /* If the constant term is essentially zero, */ /* shift h coefficients. */ for (i = 1; i <= nm1; i++) { j = nn - i; hr[j-1] = hr[j-2]; hi[j-1] = hi[j-2]; } hr[0] = 0.; hi[0] = 0.; } else { cdivid(-pr[nn-1], -pi[nn-1], hr[n-1], hi[n-1], &tr, &ti); for (i = 1; i <= nm1; i++) { j = nn - i; t1 = hr[j-2]; t2 = hi[j-2]; hr[j-1] = tr * t1 - ti * t2 + pr[j-1]; hi[j-1] = tr * t2 + ti * t1 + pi[j-1]; } hr[0] = pr[0]; hi[0] = pi[0]; } } } /* Computes l2 fixed-shift h polynomials and tests for convergence. * initiates a variable-shift iteration and returns with the * approximate zero if successful. */ static Rboolean fxshft(int l2, double *zr, double *zi) { /* l2 - limit of fixed shift steps * zr,zi - approximate zero if convergence (result TRUE) * * Return value indicates convergence of stage 3 iteration * * Uses global (sr,si), nn, pr[], pi[], .. (all args of polyev() !) */ Rboolean pasd, bool, test; static double svsi, svsr; static int i, j, n; static double oti, otr; n = nn - 1; /* evaluate p at s. */ polyev(nn, sr, si, pr, pi, qpr, qpi, &pvr, &pvi); test = TRUE; pasd = FALSE; /* calculate first t = -p(s)/h(s). */ calct(&bool); /* main loop for one second stage step. */ for (j=1; j<=l2; j++) { otr = tr; oti = ti; /* compute next h polynomial and new t. */ nexth(bool); calct(&bool); *zr = sr + tr; *zi = si + ti; /* test for convergence unless stage 3 has */ /* failed once or this is the last h polynomial. */ if (!bool && test && j != l2) { if (hypot(tr - otr, ti - oti) >= hypot(*zr, *zi) * 0.5) { pasd = FALSE; } else if (! pasd) { pasd = TRUE; } else { /* the weak convergence test has been */ /* passed twice, start the third stage */ /* iteration, after saving the current */ /* h polynomial and shift. */ for (i = 0; i < n; i++) { shr[i] = hr[i]; shi[i] = hi[i]; } svsr = sr; svsi = si; if (vrshft(10, zr, zi)) { return TRUE; } /* the iteration failed to converge. */ /* turn off testing and restore */ /* h, s, pv and t. */ test = FALSE; for (i=1 ; i<=n ; i++) { hr[i-1] = shr[i-1]; hi[i-1] = shi[i-1]; } sr = svsr; si = svsi; polyev(nn, sr, si, pr, pi, qpr, qpi, &pvr, &pvi); calct(&bool); } } } /* attempt an iteration with final h polynomial */ /* from second stage. */ return(vrshft(10, zr, zi)); } /* carries out the third stage iteration. */ static Rboolean vrshft(int l3, double *zr, double *zi) { /* l3 - limit of steps in stage 3. * zr,zi - on entry contains the initial iterate; * if the iteration converges it contains * the final iterate on exit. * Returns TRUE if iteration converges * * Assign and uses GLOBAL sr, si */ Rboolean bool, b; static int i, j; static double r1, r2, mp, ms, tp, relstp; static double omp; b = FALSE; sr = *zr; si = *zi; /* main loop for stage three */ for (i = 1; i <= l3; i++) { /* evaluate p at s and test for convergence. */ polyev(nn, sr, si, pr, pi, qpr, qpi, &pvr, &pvi); mp = hypot(pvr, pvi); ms = hypot(sr, si); if (mp <= 20. * errev(nn, qpr, qpi, ms, mp, /*are=*/eta, mre)) { goto L_conv; } /* polynomial value is smaller in value than */ /* a bound on the error in evaluating p, */ /* terminate the iteration. */ if (i != 1) { if (!b && mp >= omp && relstp < .05) { /* iteration has stalled. probably a */ /* cluster of zeros. do 5 fixed shift */ /* steps into the cluster to force */ /* one zero to dominate. */ tp = relstp; b = TRUE; if (relstp < eta) tp = eta; r1 = sqrt(tp); r2 = sr * (r1 + 1.) - si * r1; si = sr * r1 + si * (r1 + 1.); sr = r2; polyev(nn, sr, si, pr, pi, qpr, qpi, &pvr, &pvi); for (j = 1; j <= 5; ++j) { calct(&bool); nexth(bool); } omp = infin; goto L10; } else { /* exit if polynomial value */ /* increases significantly. */ if (mp * .1 > omp) return FALSE; } } omp = mp; /* calculate next iterate. */ L10: calct(&bool); nexth(bool); calct(&bool); if (!bool) { relstp = hypot(tr, ti) / hypot(sr, si); sr += tr; si += ti; } } return FALSE; L_conv: *zr = sr; *zi = si; return TRUE; } static void calct(Rboolean *bool) { /* computes t = -p(s)/h(s). * bool - logical, set true if h(s) is essentially zero. */ int n = nn - 1; double hvi, hvr; /* evaluate h(s). */ polyev(n, sr, si, hr, hi, qhr, qhi, &hvr, &hvi); *bool = hypot(hvr, hvi) <= are * 10. * hypot(hr[n-1], hi[n-1]); if (!*bool) { cdivid(-pvr, -pvi, hvr, hvi, &tr, &ti); } else { tr = 0.; ti = 0.; } } static void nexth(Rboolean bool) { /* calculates the next shifted h polynomial. * bool : if TRUE h(s) is essentially zero */ int j, n = nn - 1; double t1, t2; if (!bool) { for (j=1; j < n; j++) { t1 = qhr[j - 1]; t2 = qhi[j - 1]; hr[j] = tr * t1 - ti * t2 + qpr[j]; hi[j] = tr * t2 + ti * t1 + qpi[j]; } hr[0] = qpr[0]; hi[0] = qpi[0]; } else { /* if h(s) is zero replace h with qh. */ for (j=1; j < n; j++) { hr[j] = qhr[j-1]; hi[j] = qhi[j-1]; } hr[0] = 0.; hi[0] = 0.; } } /*--------------------- Independent Complex Polynomial Utilities ----------*/ static void polyev(int n, double s_r, double s_i, double *p_r, double *p_i, double *q_r, double *q_i, double *v_r, double *v_i) { /* evaluates a polynomial p at s by the horner recurrence * placing the partial sums in q and the computed value in v_. */ int i; double t; q_r[0] = p_r[0]; q_i[0] = p_i[0]; *v_r = q_r[0]; *v_i = q_i[0]; for (i = 1; i < n; i++) { t = *v_r * s_r - *v_i * s_i + p_r[i]; q_i[i] = *v_i = *v_r * s_i + *v_i * s_r + p_i[i]; q_r[i] = *v_r = t; } } static double errev(int n, double *qr, double *qi, double ms, double mp, double a_re, double m_re) { /* bounds the error in evaluating the polynomial by the horner * recurrence. * * qr,qi - the partial sum vectors * ms - modulus of the point * mp - modulus of polynomial value * a_re,m_re - error bounds on complex addition and multiplication */ double e; int i; e = hypot(qr[0], qi[0]) * m_re / (a_re + m_re); for (i=0; i < n; i++) e = e*ms + hypot(qr[i], qi[i]); return e * (a_re + m_re) - mp * m_re; } static double cpoly_cauchy(int n, double *pot, double *q) { /* Computes a lower bound on the moduli of the zeros of a polynomial * pot[1:nn] is the modulus of the coefficients. */ double f, x, delf, dx, xm; int i, n1 = n - 1; pot[n1] = -pot[n1]; /* compute upper estimate of bound. */ x = exp((log(-pot[n1]) - log(pot[0])) / (double) n1); /* if newton step at the origin is better, use it. */ if (pot[n1-1] != 0.) { xm = -pot[n1] / pot[n1-1]; if (xm < x) x = xm; } /* chop the interval (0,x) unitl f le 0. */ for(;;) { xm = x * 0.1; f = pot[0]; for (i = 1; i < n; i++) f = f * xm + pot[i]; if (f <= 0.0) { break; } x = xm; } dx = x; /* do Newton iteration until x converges to two decimal places. */ while (fabs(dx / x) > 0.005) { q[0] = pot[0]; for(i = 1; i < n; i++) q[i] = q[i-1] * x + pot[i]; f = q[n1]; delf = q[0]; for(i = 1; i < n1; i++) delf = delf * x + q[i]; dx = f / delf; x -= dx; } return x; } static double cpoly_scale(int n, double *pot, double eps, double BIG, double small, double base) { /* Returns a scale factor to multiply the coefficients of the polynomial. * The scaling is done to avoid overflow and to avoid * undetected underflow interfering with the convergence criterion. * The factor is a power of the base. * pot[1:n] : modulus of coefficients of p * eps,BIG, * small,base - constants describing the floating point arithmetic. */ int i, ell; double x, high, sc, lo, min_, max_; /* find largest and smallest moduli of coefficients. */ high = sqrt(BIG); lo = small / eps; max_ = 0.; min_ = BIG; for (i = 0; i < n; i++) { x = pot[i]; if (x > max_) max_ = x; if (x != 0. && x < min_) min_ = x; } /* scale only if there are very large or very small components. */ if (min_ < lo || max_ > high) { x = lo / min_; if (x <= 1.) sc = 1. / (sqrt(max_) * sqrt(min_)); else { sc = x; if (BIG / sc > max_) sc = 1.0; } ell = (int) (log(sc) / log(base) + 0.5); return R_pow_di(base, ell); } else return 1.0; } static void cdivid(double ar, double ai, double br, double bi, double *cr, double *ci) { /* complex division c = a/b, i.e., (cr +i*ci) = (ar +i*ai) / (br +i*bi), avoiding overflow. */ double d, r; if (br == 0. && bi == 0.) { /* division by zero, c = infinity. */ *cr = *ci = R_PosInf; } else if (fabs(br) >= fabs(bi)) { r = bi / br; d = br + r * bi; *cr = (ar + ai * r) / d; *ci = (ai - ar * r) / d; } else { r = br / bi; d = bi + r * br; *cr = (ar * r + ai) / d; *ci = (ai * r - ar) / d; } } /* static double cpoly_cmod(double *r, double *i) * --> replaced by hypot() everywhere */ forecast/src/calcTBATS.cpp0000644000176200001440000000231114003673410015046 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP calcTBATSFaster(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es, SEXP xNought_s) { BEGIN_RCPP NumericMatrix yr(ys); NumericMatrix yHatr(yHats); NumericMatrix wTransposer(wTransposes); NumericMatrix Fr(Fs); NumericMatrix xr(xs); NumericMatrix gr(gs); NumericMatrix er(es); NumericMatrix xNought_r(xNought_s); arma::mat y(yr.begin(), yr.nrow(), yr.ncol(), false); arma::mat yHat(yHatr.begin(), yHatr.nrow(), yHatr.ncol(), false); arma::mat wTranspose(wTransposer.begin(), wTransposer.nrow(), wTransposer.ncol(), false); arma::mat F(Fr.begin(), Fr.nrow(), Fr.ncol(), false); arma::mat x(xr.begin(), xr.nrow(), xr.ncol(), false); arma::mat g(gr.begin(), gr.nrow(), gr.ncol(), false); arma::mat e(er.begin(), er.nrow(), er.ncol(), false); arma::mat xNought(xNought_r.begin(), xNought_r.nrow(), xNought_r.ncol(), false); yHat.col(0) = wTranspose * xNought; e(0,0) = y(0, 0) - yHat(0, 0); x.col(0) = F * xNought + g * e(0,0); for(int t = 1; t < yr.ncol(); t++) { yHat.col(t) = wTranspose * x.col((t-1)); e(0,t) = y(0, t) - yHat(0, t); x.col(t) = F * x.col((t-1)) + g * e(0,t); } return R_NilValue; END_RCPP } forecast/src/etsTargetFunction.h0000644000176200001440000000267114003673410016474 0ustar liggesusers#include #include extern "C" { void etscalc(double *, int *, double *, int *, int *, int *, int *, double *, double *, double *, double *, double *, double *, double *, int *); void cpolyroot(double *opr, double *opi, int *degree, double *zeror, double *zeroi, Rboolean *fail); } class EtsTargetFunction { public: void eval(const double* p_var, int p_var_length); void init(std::vector & p_y, int p_nstate, int p_errortype, int p_trendtype, int p_seasontype, bool p_damped, std::vector & p_lower, std::vector & p_upper, std::string p_opt_crit, int p_nmse, std::string p_bounds, int p_m, bool p_optAlpha, bool p_optBeta, bool p_optGamma, bool p_optPhi, bool p_givenAlpha, bool p_givenBeta, bool p_givenGamma, bool p_givenPhi, double alpha, double beta, double gamma, double phi); double getObjVal() { return(objval); }; private: bool check_params(); bool admissible(); std::vector par; std::vector y; int nstate; int errortype; int trendtype; int seasontype; bool damped; std::vector par_noopt; std::vector lower; std::vector upper; std::string opt_crit; int nmse; std::string bounds; int m; int n; std::vector state; double alpha, beta, gamma, phi; std::vector e; std::vector amse; double lik, objval; bool optAlpha, optBeta, optGamma, optPhi, givenAlpha, givenBeta, givenGamma, givenPhi; }; forecast/src/makeBATSMatrices.cpp0000644000176200001440000001165214003673410016435 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP makeBATSWMatrix(SEXP smallPhi_s, SEXP sPeriods_s, SEXP arCoefs_s, SEXP maCoefs_s) { BEGIN_RCPP double *smallPhi, *arCoefs, *maCoefs; int *seasonalPeriods; int adjustPhi = 0; R_len_t numCols = 1, numSeasonal = 0, p = 0, q = 0; int lengthSeasonal = 0; if(!Rf_isNull(smallPhi_s)) { smallPhi = REAL(smallPhi_s); adjustPhi = 1; numCols = numCols + 1; } if(!Rf_isNull(sPeriods_s)) { seasonalPeriods = INTEGER(sPeriods_s); numSeasonal = LENGTH(sPeriods_s); for(R_len_t s = 0; s < numSeasonal; s++) { lengthSeasonal = lengthSeasonal + seasonalPeriods[s]; } numCols = numCols + lengthSeasonal; } else { lengthSeasonal = 0; } if(!Rf_isNull(arCoefs_s)) { arCoefs = REAL(arCoefs_s); p = LENGTH(arCoefs_s); numCols = numCols + p; } if(!Rf_isNull(maCoefs_s)) { maCoefs = REAL(maCoefs_s); q = LENGTH(maCoefs_s); numCols = numCols + q; } NumericMatrix wTranspose_r(1, numCols); arma::mat wTranspose(wTranspose_r.begin(), wTranspose_r.nrow(), wTranspose_r.ncol(), false); if(!Rf_isNull(sPeriods_s)) { wTranspose.zeros(); int position = adjustPhi; for(R_len_t s = 0; s < numSeasonal; s++) { position = position + seasonalPeriods[s]; wTranspose(0,position) = 1; } } wTranspose(0,0) = 1; if(adjustPhi == 1) { wTranspose(0,1) = *smallPhi; } if(!Rf_isNull(arCoefs_s)) { for(R_len_t i = 1; i <= p; i++) { wTranspose(0,(adjustPhi + lengthSeasonal +i)) = arCoefs[(i-1)]; } } if(!Rf_isNull(maCoefs_s)) { for(R_len_t i = 1; i <= q; i++) { wTranspose(0,(adjustPhi + lengthSeasonal + p + i)) = maCoefs[(i-1)]; } } arma::mat w = arma::trans(wTranspose); smallPhi = 0; arCoefs = 0; maCoefs = 0; seasonalPeriods = 0; return List::create( Named("w") = w, Named("w.transpose") = wTranspose ); END_RCPP } SEXP makeBATSGMatrix(SEXP alpha_s, SEXP beta_s, SEXP gammaVector_s, SEXP seasonalPeriods_s, SEXP p_s, SEXP q_s) { BEGIN_RCPP double *gammaVector; int *seasonalPeriods, *p, *q; int numCols, gammaLength = 0; int adjustBeta = 0; p = INTEGER(p_s); q = INTEGER(q_s); numCols = 1 + *p + *q; if(!Rf_isNull(beta_s)) { numCols = numCols + 1; adjustBeta = 1; } //Find the length of the gamma/seasonal bit if((!Rf_isNull(gammaVector_s))&&(!Rf_isNull(seasonalPeriods_s))) { gammaVector = REAL(gammaVector_s); seasonalPeriods = INTEGER(seasonalPeriods_s); for(R_len_t i = 0; i < LENGTH(seasonalPeriods_s); i++) { gammaLength = gammaLength + seasonalPeriods[i]; } numCols = numCols + gammaLength; } else { gammaLength = 0; } NumericMatrix gTranspose_r(1, numCols); arma::mat gTranspose(gTranspose_r.begin(), gTranspose_r.nrow(), gTranspose_r.ncol(), false); gTranspose.zeros(); gTranspose(0,0) = REAL(alpha_s)[0]; if(!Rf_isNull(beta_s)) { gTranspose(0,1) = REAL(beta_s)[0]; } //Copy the gamma/seasonal bits if((!Rf_isNull(gammaVector_s))&&(!Rf_isNull(seasonalPeriods_s))) { int position = adjustBeta + 1; gTranspose(0, position) = gammaVector[0]; if(LENGTH(gammaVector_s) > 1) { for(R_len_t s = 0; s < (LENGTH(seasonalPeriods_s)-1); s++) { position = position + seasonalPeriods[s]; gTranspose(0, position) = gammaVector[(s+1)]; } } } if(*p != 0) { gTranspose(0, (adjustBeta+gammaLength+1)) = 1; } if(*q != 0) { gTranspose(0, (adjustBeta+gammaLength+ *p +1)) = 1; } arma::mat g(arma::trans(gTranspose)); seasonalPeriods = 0; p = 0; q = 0; gammaVector = 0; if((!Rf_isNull(gammaVector_s))&&(!Rf_isNull(seasonalPeriods_s))) { arma::mat gammaBold = gTranspose.cols((1+adjustBeta), (adjustBeta+gammaLength)); return List::create( Named("g") = g, Named("g.transpose") = gTranspose, Named("gamma.bold.matrix") = gammaBold ); } else { return List::create( Named("g") = g, Named("g.transpose") = gTranspose, Named("gamma.bold.matrix") = R_NilValue ); } END_RCPP } /* SEXP makeFMatrix(SEXP alpha_s, SEXP beta_s, SEXP smallPhi_s, SEXP seasonalPeriods_s, SEXP gammaBoldMatrix_s, SEXP arCoefs_s, SEXP maCoefs_s) { BEGIN_RCPP NumericMatrix alpha_r(alpha_s); if(!Rf_isNull(beta_s)) { NumericMatrix beta_r(beta_s); bool indBeta = true; } else { bool indBeta = false; } if(!Rf_isNull(smallPhi_s)) { NumericMatrix smallPhi_r(smallPhi_s); bool indSmallPhi = true; } else { bool indSmallPhi = false; } if(!Rf_isNull(seasonalPeriods_s)) { NumericMatrix seasonalPeriods_r(seasonalPeriods_s); bool indSeasonalPeriods = true; } else { bool indSeasonalPeriods = false; } if(!Rf_isNull(gammaBoldMatrix_s)) { NumericMatrix gammaBoldMatrix_r(gammaBoldMatrix_s); bool indGammaBoldMatrix = true; } else { bool indGammaBoldMatrix = false; } if(!Rf_isNull(arCoefs_s)) { NumericMatrix arCoefs_r(arCoefs_s); bool indArCoefs = true; } else { bool indArCoefs = false; } if(!Rf_isNull(maCoefs_s)) { NumericMatrix maCoefs_r(maCoefs_s); bool indMaCoefs = true; } else { bool indMaCoefs = false; } arma::mat END_RCPP } */ forecast/src/updateMatrices.cpp0000644000176200001440000001127114003673410016325 0ustar liggesusers/* * updateMatrices.cpp * * Created on: 03/11/2011 * Author: srazbash */ #include "calcBATS.h" using namespace Rcpp ; SEXP updateFMatrix(SEXP F_s, SEXP smallPhi_s, SEXP alpha_s, SEXP beta_s, SEXP gammaBold_s, SEXP ar_s, SEXP ma_s, SEXP tau_s) { BEGIN_RCPP NumericMatrix F_r(F_s); arma::mat F(F_r.begin(), F_r.nrow(), F_r.ncol(), false); double *beta, *alpha = &REAL(alpha_s)[0]; int *tau, p, q, betaAdjust; int zero = 0; if(!Rf_isNull(gammaBold_s)) { tau = &INTEGER(tau_s)[0]; } else { tau = &zero; } if(!Rf_isNull(beta_s)) { beta = &REAL(beta_s)[0]; double *smallPhi = &REAL(smallPhi_s)[0]; F(0,1) = *smallPhi; F(1,1) = *smallPhi; betaAdjust = 1; } else { betaAdjust = 0; } if(!Rf_isNull(ar_s)) { //Rprintf("before arma::mat ar\n"); NumericMatrix ar_r(ar_s); arma::mat ar(ar_r.begin(), ar_r.nrow(), ar_r.ncol(), false); //Rprintf("after arma::mat ar\n"); p = ar.n_cols; //Rprintf("line-a-before\n"); F.submat(0,(betaAdjust+ *tau+1),0,(betaAdjust+ *tau+p)) = *alpha * ar; //Rprintf("line-a-after\n"); if(betaAdjust == 1) { //Rprintf("line-b-before\n"); F.submat(1,(betaAdjust+ *tau+1),1,(betaAdjust+ *tau+p)) = *beta * ar; //Rprintf("line-b-after\n"); } if(*tau > 0) { //Rprintf("la\n"); NumericMatrix gammaBold_r(gammaBold_s); //Rprintf("la-2\n"); arma::mat gammaBold(gammaBold_r.begin(), gammaBold_r.nrow(), gammaBold_r.ncol(), false); //Rprintf("la-3\n"); //arma::mat gammaBold = as(gammaBold_s); arma::mat B = trans(gammaBold) * ar; //Rprintf("line-c-before\n"); F.submat((1+betaAdjust),(betaAdjust+ *tau+1), (betaAdjust+ *tau), (betaAdjust+ *tau+p)) = B; //Rprintf("line-c-after\n"); } //Rprintf("line-d-before\n"); F.submat((betaAdjust+ *tau+1),(betaAdjust+ *tau+1),(betaAdjust+ *tau+1),(betaAdjust+ *tau+p)) = ar; //Rprintf("line-d-after\n"); } else { p = 0; } if(!Rf_isNull(ma_s)) { NumericMatrix ma_r(ma_s); arma::mat ma(ma_r.begin(), ma_r.nrow(), ma_r.ncol(), false); q = ma.n_cols; //Rprintf("one-before\n"); F.submat(0,(betaAdjust+ *tau+p+1),0,(betaAdjust+ *tau+p+q)) = *alpha * ma; //Rprintf("one-after\n"); if(betaAdjust == 1) { //Rprintf("two-before\n"); F.submat(1,(betaAdjust+ *tau+p+1),1,(betaAdjust+ *tau+p+q)) = *beta * ma; ///Rprintf("two-after\n"); } if(*tau > 0) { //arma::mat gammaBold = as(gammaBold_s); NumericMatrix gammaBold_r(gammaBold_s); arma::mat gammaBold(gammaBold_r.begin(), gammaBold_r.nrow(), gammaBold_r.ncol(), false); arma::mat C = trans(gammaBold) * ma; //Rprintf("three-before\n"); F.submat((1+betaAdjust),(betaAdjust+ *tau+p+1), (betaAdjust+ *tau), (betaAdjust+ *tau+p+q)) = C; //Rprintf("three-after\n"); } if(!Rf_isNull(ar_s)) { //Rprintf("four-before\n"); F.submat((betaAdjust+ *tau+1), (betaAdjust+ *tau+p+1), (betaAdjust+ *tau+1), (betaAdjust+ *tau+p+q)) = ma; //Rprintf("four-after\n"); } } else { q = 0; } return R_NilValue; END_RCPP } SEXP updateWtransposeMatrix(SEXP wTranspose_s, SEXP smallPhi_s, SEXP tau_s, SEXP arCoefs_s, SEXP maCoefs_s, SEXP p_s, SEXP q_s) { BEGIN_RCPP NumericMatrix wTranspose(wTranspose_s); double *arCoefs, *maCoefs; int *p, *q, *tau, adjBeta = 0; p = &INTEGER(p_s)[0]; q = &INTEGER(q_s)[0]; tau = &INTEGER(tau_s)[0]; if(!Rf_isNull(smallPhi_s)) { adjBeta = 1; wTranspose(0,1) = REAL(smallPhi_s)[0]; } if(*p > 0) { arCoefs = REAL(arCoefs_s); for(int i = 1; i <= *p; i++) { wTranspose(0,(adjBeta + *tau + i)) = arCoefs[(i - 1)]; } if(*q > 0) { maCoefs = REAL(maCoefs_s); for(int i = 1; i <= *q; i++) { wTranspose(0,(adjBeta + *tau + *p + i)) = maCoefs[(i - 1)]; } } } else if(*q > 0) { maCoefs = REAL(maCoefs_s); for(int i = 1; i <= *q; i++) { wTranspose(0,(adjBeta + *tau + i)) = maCoefs[(i - 1)]; } } return R_NilValue; END_RCPP } SEXP updateGMatrix(SEXP g_s, SEXP gammaBold_s, SEXP alpha_s, SEXP beta_s, SEXP gammaVector_s, SEXP seasonalPeriods_s) { BEGIN_RCPP int adjBeta = 0, *seasonalPeriods; double *gammaVector; NumericMatrix g(g_s); g(0,0) = REAL(alpha_s)[0]; if(!Rf_isNull(beta_s)) { g(1,0) = REAL(beta_s)[0]; adjBeta = 1; } if((!Rf_isNull(gammaVector_s))&&(!Rf_isNull(seasonalPeriods_s))) { NumericMatrix gammaBold(gammaBold_s); seasonalPeriods = INTEGER(seasonalPeriods_s); gammaVector = REAL(gammaVector_s); int position = adjBeta + 1; int bPos = 0; gammaBold(0,bPos) = gammaVector[0]; g(position, 0) = gammaVector[0]; if(LENGTH(gammaVector_s) > 1) { for(R_len_t s = 0; s < (LENGTH(seasonalPeriods_s)-1); s++) { position = position + seasonalPeriods[s]; bPos = bPos + seasonalPeriods[s]; g(position, 0) = gammaVector[(s+1)]; } } } return R_NilValue; END_RCPP } forecast/src/etsTargetFunction.cpp0000644000176200001440000001550014003673410017022 0ustar liggesusers#include #include //for isnan, math.h is needed //#include #include "etsTargetFunction.h" #include void EtsTargetFunction::init(std::vector & p_y, int p_nstate, int p_errortype, int p_trendtype, int p_seasontype, bool p_damped, std::vector & p_lower, std::vector & p_upper, std::string p_opt_crit, int p_nmse, std::string p_bounds, int p_m, bool p_optAlpha, bool p_optBeta, bool p_optGamma, bool p_optPhi, bool p_givenAlpha, bool p_givenBeta, bool p_givenGamma, bool p_givenPhi, double alpha, double beta, double gamma, double phi) { this->y = p_y; this->n = this->y.size(); this->nstate = p_nstate; this->errortype = p_errortype; this->trendtype = p_trendtype; this->seasontype = p_seasontype; this->damped = p_damped; this->lower = p_lower; this->upper = p_upper; this->opt_crit = p_opt_crit; this->nmse = p_nmse; this->bounds = p_bounds; this->m = p_m; this->optAlpha = p_optAlpha; this->optBeta = p_optBeta; this->optGamma = p_optGamma; this->optPhi = p_optPhi; this->givenAlpha = p_givenAlpha; this->givenBeta = p_givenBeta; this->givenGamma = p_givenGamma; this->givenPhi = p_givenPhi; /* Rprintf("optAlpha: %d\n", optAlpha); Rprintf("optBeta: %d\n", optBeta); Rprintf("optGamma: %d\n", optGamma); Rprintf("optPhi: %d\n", optPhi); Rprintf("givenAlpha: %d\n", givenAlpha); Rprintf("givenBeta: %d\n", givenBeta); Rprintf("givenGamma: %d\n", givenGamma); Rprintf("givenPhi: %d\n", givenPhi); */ this->alpha = alpha; this->beta = beta; this->gamma = gamma; this->phi = phi; this->lik = 0; this->objval = 0; // for(int i=0; i < 10; i++) this->amse.push_back(0); // for(int i=0; i < n; i++) this->e.push_back(0); this->amse.resize(30, 0); this->e.resize(n, 0); } void EtsTargetFunction::eval(const double* p_par, int p_par_length) { bool equal=true; // ---------show params---------- // Rprintf("par: "); // for(int j=0;j < p_par_length;j++) { // Rprintf("%f ", p_par[j]); // } // Rprintf(" objval: %f\n", this->objval); //Rprintf("\n"); // ---------show params---------- // Check if the parameter configuration has changed, if not, just return. if(p_par_length != this->par.size()) { equal=false; } else { for(int j=0;j < p_par_length;j++) { if(p_par[j] != this->par[j]) { equal=false; break; } } } if(equal) return; this->par.clear(); for(int j=0;j < p_par_length;j++) { this->par.push_back(p_par[j]); } int j=0; if(optAlpha) this->alpha = par[j++]; if(optBeta) this->beta = par[j++]; if(optGamma) this->gamma = par[j++]; if(optPhi) this->phi = par[j++]; if(!this->check_params()) { this->objval = R_PosInf; return; } this->state.clear(); for(int i=par.size()-nstate; i < par.size(); i++) { this->state.push_back(par[i]); } // Add extra state if(seasontype!=0) {//"N"=0, "M"=2 //init.state <- c(init.state, m*(seasontype==2) - sum(init.state[(2+(trendtype!=0)):nstate])) double sum=0; for(int i=(1+((trendtype!=0) ? 1 : 0));iobjval = R_PosInf; return; } // seas.states <- init.state[-(1:(1+(trendtype!=0)))] //if(min(seas.states) < 0) // return(1e8) }; int p = state.size(); for(int i=0; i <= p*this->y.size(); i++) state.push_back(0); etscalc(&this->y[0], &this->n, &this->state[0], &this->m, &this->errortype, &this->trendtype, &this->seasontype, &this->alpha, &this->beta, &this->gamma, &this->phi, &this->e[0], &this->lik, &this->amse[0], &this->nmse); // Avoid perfect fits if (this->lik < -1e10) this->lik = -1e10; // isnan() is a C99 function //if (isnan(this->lik)) this->lik = 1e8; if (ISNAN(this->lik)) this->lik = R_PosInf; if(fabs(this->lik+99999) < 1e-7) this->lik = R_PosInf; if(this->opt_crit=="lik") this->objval = this->lik; else if(this->opt_crit=="mse") this->objval = this->amse[0]; else if(this->opt_crit=="amse") { //return(mean(e$amse[1:nmse])) double mean=0; for(int i=0;i < this->nmse;i++) { mean+=amse[i]/this->nmse; } this->objval=mean; } else if(this->opt_crit=="sigma") { //return(mean(e$e^2)) double mean=0; int ne=e.size(); for(int i=0;iobjval=mean; } else if(this->opt_crit=="mae") { //return(mean(abs(e$e))) double mean=0; int ne=e.size(); for(int i=0;iobjval=mean; } } bool EtsTargetFunction::check_params() { if(bounds != "admissible") { if(optAlpha) { if(alpha < lower[0] || alpha > upper[0]) return(false); } if(optBeta) { if(beta < lower[1] || beta > alpha || beta > upper[1]) return(false); } if(optPhi) { if(phi < lower[3] || phi > upper[3]) return(false); } if(optGamma) { if(gamma < lower[2] || gamma > 1-alpha || gamma > upper[2]) return(false); } } if(bounds != "usual") { if(!admissible()) return(false); } return(TRUE); } bool EtsTargetFunction::admissible() { if(phi < 0 || phi > 1+1e-8) return(false); //If gamma was set by the user or it is optimized, the bounds need to be enforced if(!optGamma && !givenGamma) { if(alpha < 1-1/phi || alpha > 1+1/phi) return(false); if(optBeta || givenBeta) { if(beta < alpha * (phi-1) || beta > (1+phi)*(2-alpha)) return(false); } } else if(m > 1) //Seasonal model { if(!optBeta && !givenBeta) beta = 0; //max(1-1/phi-alpha,0) double d = 1-1/phi-alpha; if(gamma < ((d > 0) ? d : 0) || gamma > 1+1/phi-alpha) return(false); if(alpha < 1-1/phi-gamma*(1-m+phi+phi*m)/(2*phi*m)) return(false); if(beta < -(1-phi)*(gamma/m+alpha)) return(false); // End of easy tests. Now use characteristic equation std::vector opr; opr.push_back(1); opr.push_back(alpha+beta-phi); for(int i=0;i opi; opi.resize(opr.size(),0); std::vector zeror(degree); std::vector zeroi(degree); Rboolean fail; cpolyroot(&opr[0], &opi[0], °ree, &zeror[0], &zeroi[0], &fail); double max = 0; for(int i=0;imax) max = abs_val; } //Rprintf("maxpolyroot: %f\n", max); if(max > 1+1e-10) return(false); // P <- c(phi*(1-alpha-gamma),alpha+beta-alpha*phi+gamma-1,rep(alpha+beta-alpha*phi,m-2),(alpha+beta-phi),1) // roots <- polyroot(P) // if(max(abs(roots)) > 1+1e-10) return(false); } //Passed all tests return(true); } forecast/vignettes/0000755000176200001440000000000014166724666014112 5ustar liggesusersforecast/vignettes/jsslogo.jpg0000644000176200001440000005221314003673410016253 0ustar liggesusersJFIFC    $.' ",#(7),01444'9=82<.342C  2!!22222222222222222222222222222222222222222222222222" X*,QY0)-e+s+YJƕ.u2j!Qk-fg!W4^VT\ʕs1GBJ ¥p%%ZKPM-$(JsbJNXDY`V\A KdJaؔ,0MD L"rLMldQ)J@K$W)2hW?Ͽj6C(@jD*jR iSf"LJՅVXh 0aQr,6AX$BVXA¶6MXtE1$*Kyqq!X4@*e LQ !ȹ4#HHQ  HYaWyyތgˢpmcĞ5u˦{/=kiG6j!"!_0;6|<;ϥ8I,+V\rzӏ};q{G Im4@Y)A&f_+=K,նD5ԸNfnjzypOJYQoDԻeQb>-RZeWwq:8og '}Cy= k<`LQcIhY7edQZշ6Wi]JGW:DZ[Oܻ<=rmEԪ? yԬȃ%e_QOeBF.cMY \?ZrCf{g? DX!8,^Yt1 q1 dúVIK\-űnM; Pjs z{9oA01˛`+ ܷLp@'xgDa_qOL;]Cܠ'[Vn>TD͊rtGS]1}ܨ4v `E]6)4 2w`QE@lK icDvj1#b5rU^vNcn rRoXp(lc$̫RJiz952kۿsLnG_nT%gWeH}Y3r[m3)R [60M988 i &s sc/1svdt"n'DGaꇠ9ro5!ȏD7%! 01@A"2QP?#*TR *TR # 4BJ(PB *TB *TRJ v cH 4uIsq_$Ev8DrAd-23|?T?I24h"Ǜzc#L|u?g% !01@APQ"2?$I$I$IbĖ$I$I$I$,Xbŋ,Xbŋ,I$IbĖ,8bŋ3 03 .\.\r,\bŋõ,'IHRܯdOp'TS!APR(`s7NQb.zGt38#7G zmAW'j\F*7q9Wjo2!1 A"02PQaq@B`Rbpr?+BfQzBoЛFPpҿTB*%l\}27.~00䕈}D"=ڇpDҀ+ij[pq?%U%4qf72[#xiE B hCa@Wwd70\K-J5uq-U_{ğF7a@E3-]^=GvRB#3p+ YW~Wa#Mm "Nі."hBl4GRf}OqOr8P>$|!>(k-n\?9bRlL-"_ aEƝUZ.MN"I\+E%USZ-Da$f#"z5WWlH&1vdHPcq RhAppQL{# NEƸӫJ g_DH"$*~Z[!lWAGj++++bVUX+~ZSa+e碸W V:'3 Ak(rϤ##ߐEY hNoԨH{!6;ꦃЈ[4J~0>"=9Gzs&cAh[%pCeeiP'c%o-2y+N@=VHUVT*q.%b]Z e Q6G%7X/ uW 8ʦow[+io1SJ ˅p.fF{Ax Y7t"(y,BɊTiYZG‹B,nJ/X"U2|?k.­+{~ۃA[S8 y uE;V>W;GhN.KU}/?N?= :">}!.˶X|frsb]NZdD&$O(2MCŊ&T0dJrpO&UX?Z"f!ȘiCYUl>lrBxSYXv"2PW#e# hyR Iy +xMdzwUeԧ:T.T]PMn-C(=tԾF((,H__;*[U<(ByK~*!1A Qaq0@P?!B ^GN fBD4h-[+z=zzOS=tz=:އ}/[§xcJFB{IEA@""tLiHDBh]Н$L`..h"Ү7-35HJCa:Ce&HBiKNM"jBDiN.1h%]D hٹBBXBt! ]Mi4G]cd]$ J{tz.%z.aK0`eo'-^&}ae1\shٍY5BZ9-f/ f[]SҍŦRL6Z 㫓2n#N-qGNc>om6l&c^0 jوp Nt qނjt\!Gҙ"mm>Gitl%.#MX0>y>"2DK"Bd4QR b+琢.l4pg2ۆrcb&Qɳ*'Qrn&]6r,gy iPRwdd9<X'֡4UOYBI2.IOrۑ9\AEG،8tǸmJp?R ?pFpՃ5vjh-fy))p]]73;E]b+7 䳱ї`٩Ļ n9p65r&cKMoIʒ v#DoHOpGeH{ 6>½;"""K2hJ_xz R6D19$;WX7h7b{a c JixSt0{l>Q}Dnc,#`JNcgv6 )n4^RoBa`&{ Ȣv<8ݖW,qO"iq3+ -?Mi mAo 썣L!8)nt|gf_})OrITH[pY7ß{W3[7&O}^î!F (IY/속+Lk6V &a'W,"xһF- q m8;8Yigᾆy~cm{;!Gqx)@?O0[!Y ʼn S; ЛUৗbײxtB ;6W٘=d'A{kLՐș G7{ ̃6r:{"po=Oc<ɞzYewh];Sy:ٔrZfLǯ$!4OJ{= mRR,2RxNbm1]7Mx`Kâ~M^$hwG$Z/~ ^tRn3(mz_~ Rztz)zoNQ /])F]nKJRMҢ÷R+ZzOSrRJRKSQ]礥)K(D2/mM%ʄ~]M#> *v4CEϹۃH7Tڃ!({zomݘ{4s/ţQÆlRs [h<WTb!(8 G7dfp*^^dQ蟏FdcMgȾtzZHff i8/;{phVe=&A:%0XA.h&+6'q{ld~4VI2_-bcyM3l˶H[zH< **V% +X jg9HkFY3Vo>Cis?|7T$]m<7&\[юy LJO~CwGp1M;F avޕ\f!%M&&[; D]n`1 M EKy̱4IFe YB=|=h]Ɣ1xCLb3GoÚ3%¾>SA@==O#cwCSck6A1Mu)*J; F22UXYTk-oݦtE9e;N\y)w䗐E+i2-D VnM [åH&<.E91.@n3SΈ>5o/ 1i9 )/`]<#@e b-}&XHB7 %R0|v\שL+C*6F7y3c5G@ⳀeگZƔvJE/>XK&/([ŭdZѫxQ^dO׌k0ZrpN|Ri%SbwNe;+xD[aGo+S>Wpυ_XyA&YƧè(WTbYE7I7$1u']A]LADr_N.#$A}YM?/|=9Y̲ <}Ԙ0 I^KxLÍ=!n7$;=)LS|׌uִR =ae;.sQIz>MwB !f!{˱=͍ӵL{DU՘ 1䒵kܯKE890]ˏM׀l{3 bJ 8]t0sü]{~hM0ül ,)/Or-\,׬p,҃ ʈ>"-X qɍ|z 7 0 ! 10AQqa@?#dx!! FB2%ܲsBXbBH$$X5̸#`G+*..j!6@ֈ!;D|!x#$v6t(%6G ~ u$%)K" ;4XHw2 eBc2CRM+(f:}a~ !X.C?'ூMHK> @5C4B\RsbF%? 4(qb3KQsJ>,6QV./ R4%P2H/F \ڿPCHOKׅ*%0ѱHddH*(HM m6l6b&8bk Yxj o6RluPl m#H8:ɗcm A#a}+ycBhc$_ Wgy!#.4=1<1RSlCV϶lKmk6?:N$BLA HuF:eCv;s/E# !1 0QA@aq?iQ$AA$&I$ £֌IOHO#ɲZER,QyPBQFv1˚R.S+Š 9 ;%ZH664cĭu6R[fҗXԴ4$7BLطIZ.-u68`KWҾ-QHI.Um?6,kBKQymҔWWx=Ҋ&!`fLh76!\M/Bᴑw;! "#DCK:_lm$6(E1:R:HkYn,”ĊA$xLV?xLBa^%983pSʫ@""-f0TBYqv" dE'5jZc{'pCAz8Qjg*4okubay\E:b"Į2Mo2 %b* z *P-`, 9jTQaH%dPQ,"ε.b~%RQb ]W#ܫ ,r\/ĶٝR-m` ) 5T}Ow}%tq ah+!]*%rΈzlJ-mqs=k-wR• PJ2:Ԡ =}KRmy&2xcǟQ8b-VÈ<@ܩ1LZܫlOQVtef~Xwrټ ` =CtaqopJwl"1YDERŞRH&D^+2ڭAZ`,ɛYtsW2䗥.RgIa70Vo Gn`Ѧ`2gP.S(0Vkw yki`G)H7 pbS=7(91IWdqf FVys1H&X<RWHs4J!Mh _XޘT.frb*DNmP+,0XjU`c)ܧ]B b Sl=17YY)YAlb PLY^~sf ?x`f2RerD5*0N̈́ uSjΊ=">[.gQ9gT?ysCpNoeAE#n\һ-E]喎92YԦ1KOqF^"Zb/D(tFټRÃ&?Xq,LKɠ_D{# ~EȏQH{-E&8jRnGQ(GJ7 oCV%H̾-NҸHĵN&ʕ^aF kT<1um>[S9Fc0@9e.Hlh RnRZ*Zc/΂| Y8  (;k ٭ψ=}%`k^=ƶOM st^:=x"PT;pl5Gpy7Dl6ˁ\Y_by/=-_Ț;>:G}v`ё#R mUObepeXANeb̅0ԲCxP912fߏ-V9p6jOE0 JYؕ RUQ=1`BvbP%0~۾uY@~rYE.0 ˔D-FƦ>ex24bE6YN UDV̥O/Y%[0f3B]s*>ȯIhg  QW5gͥ>j,x)K.8EiG>~^c@9+5Yl.(4s,X%q(B:.})ƚ}W;e5:dLw*r.*f#),] YBZ8KөUuj{KP+ aJWRp]^40Pi< 6'x-@[j̀1_:X76;e k:Doļ=],%_x ̭Ū)ɬc9 8n Eob 2@C7 ":%1o/xBGvp"mIOw=,9=KZ7jZV,/7Rd @sJha*w R])Q y+pna[{ E+ /;*ݜew`hk_4h~#[=nJcpy#51M72wTncMϼs$t:%J8V30YYS솆 V-|:bW0 JSLx]j@s3_jܷ.+>jfJ+1JĢ .` i\nSUjXj"\J;07pCb+4b] /2˗lK/3u/10B<lT^ߤ0B5DqޥoP7WaY{. GsC?O/RPXԸ4>mr2KbYԿ/Aľ]VFjb+y^%x/cx_SnzxD5//r}%1\|ܹĻu-ܸKE6L^qJ6:ƥ\LK[.-_ K\ 2HؽA/ Xĺ*nZ4<ŗ˗xKcpܾYD 8u- ľ|\?pj[,c/9̴̼^w,5i,R|+,JVfJ}=KSq/1m̼ ̫ń| PQqtyBqǤq Uٗc:1ՕuS"fҠ˶\Kcj- B<\JDNDX|*2ԻeˋrMT  uqk2ݘ7<`+$2n0Bk\V}^ pRy̫_qD4L$Yx.=sEBp̦v\|3#-imڔNJbvL/B#xLLj8`ORޥnAr/upKY.[.)|[6V^a8Z4ĹyE#`\ۈba[}"N[Bmfxw73 2;T@h9DLq *fn"BU%$TuPlbf$˹X'1Ǥ5:+)A_.[%ALm&YԮ-i}4e K lc/)`6Ge_8j/J=}(֠1ЇTpco J0l8KfQ'˷>n_3FtulE s2%q^!h~휵Z*&~j1u7`jlq5ϸtM&)p K9MbjT h.-W7[D8-O B^ƊO ̩LC@JhcEi mS/ TEQWG )>} HW~'eh!pG[`.ZK?P&_D((y>R K!(?S QDQ1d5Ej4?7ѵ  ؊Q3@Vs|9HYQ(^0L/`'b œ}=6.,lQ+6G%K)k:eL1C& "6lAŖV-j#uxo28b`KAcSs.ڍu EnT>@4Q}@Q-8#^H7$* cN@7>Qub=)D[wɖe]: ˬY !_2\,e  X' p4yawanw'=A3c @+}XWgsvçqNxG2GiR"6Ǒ7LNkS1L,]ԡudR}F& F5fx7iq娊Z4 ]WG y\f`e[Ʋ{Oډsυ@^_KLތ6F!g *-eH3㈊‹42qS|@T[ĨFgs |ns3)SY]0 @}* siHٱ W\L>[WZ8X.2J[rģ/&KPt#:MpqKf:OˁVJbC1X˾ekkB%7Q`s\J a\ĜfSQaD.G-~X3&2e!XUu\K%#2yrYPixuP,J)lh"yc[VKEl75-5#w[NŪ±8t^Kq.JS^ˊilauqNw5 So3Z,>%>;# .Cb7PElW N  1FSP/xk- !=հUDJ $[h8sEjj(Vd29aVPf!^0!V;!u_mXWf;Yg`@747>8cW[Lj+W(rH h86df5awq<`\\ir k`,{8!r!AVg*2& ͏2?hW&Ut)\ȍ)zHSm100۹HexC(򻎊1xUKCs)UNf';_ c P:|c=c՟:aKV= n12\į8{Ϧ/ ZW ~`R]KĀeLS+l\Dɉ\p%\dE $?:su,ʶ˹%ey5aYu,\UIX9q`fq ^5_0>>L DnPI0 U dX$̿wA8yT&pbp$>/DU7u.L _.ϣ fFe*(0Z/04##^\V*ka؍j)Lwy0UF!s"7I831O\@csS~?u1& c`kVޠ+r  `H^X]Dht keg/) k\!O!S%)+{3 o0_fJrZL08jb~#,ۢ + %f -&n:n?V`D1*|}?oLU#`^DibW$Ĕ(5ys<ͿUyȖS/)x>Z@hsa8~Qm?|3ZNGxP hu0y`ZuASi*FA!eaK<"0 Ꭷ,f0FSy8*xffa?'w38H!{@R; *%..L3Bq;6"omq wQ/ܩ㈗ Ƥ6Xn16]qb]:k_M)%LKmAt8UwzG<]8UT> a%KN.5S)qϬǘ#{#ċRߖ7cZZλ r,P`gpdF6Qʷw3l]7Mcp|LA8{ 7,1kuܩEoq9#-~ w>$'3ġ XnU 33p[2埪a_qM@A%B1'ƽˎRZgoY=EU1,*bq;ly=}Ͷ]gR5SdE*&_Shʜ|;kusTC~7QssN!Je.%k8SOforecast/vignettes/JSS-paper.bib0000644000176200001440000005324014003673410016314 0ustar liggesusers@STRING{advap = {Advances in Applied Probability}} @STRING{amath = {Annals of Mathematics}} @STRING{ams = {The Annals of Mathematical Statistics}} @STRING{amstat = {The American Statistician}} @STRING{annalap = {The Annals of Applied Probability}} @STRING{annalp = {The Annals of Probability}} @STRING{annals = {The Annals of Statistics}} @STRING{anneug = {Annals of Eugenics}} @STRING{anzjs = {Australian \& New Zealand Journal of Statistics}} @STRING{appstat = {Applied Statistics}} @STRING{ausjstat = {Australian Journal of Statistics}} @STRING{bioc = {Biometrics}} @STRING{bioj = {Biometrical Journal}} @STRING{biok = {Biometrika}} @STRING{chance = {Chance}} @STRING{cjs = {The Canadian Journal of Statistics}} @STRING{comms = {Communications in Statistics}} @STRING{commscs = {Communications in Statistics: Computation \& Simulation}} @STRING{commstm = {Communications in Statistics: Theory \& Methods}} @STRING{compstat = {Computational Statistics}} @STRING{csda = {Computational Statistics \& Data Analysis}} @STRING{debs = {Department of Econometrics \& Business Statistics, Monash University}} @STRING{ejor = {European Journal of Operational Research}} @STRING{ijf = {International Journal of Forecasting}} @STRING{isr = {International Statistical Review}} @STRING{jap = {Journal of Applied Probability}} @STRING{jas = {Journal of Applied Statistics}} @STRING{jasa = {Journal of the American Statistical Association}} @STRING{jcgs = {Journal of Computational \& Graphical Statistics}} @STRING{je = {Journal of Econometrics}} @STRING{jes = {Journal of Educational Statistics}} @STRING{jf = {Journal of Forecasting}} @STRING{jma = {Journal of Multivariate Analysis}} @STRING{jors = {Journal of the Operational Research Society}} @STRING{jos = {Journal of Official Statistics}} @STRING{jrssa = {Journal of the Royal Statistical Society A}} @STRING{jrssb = {Journal of the Royal Statistical Society B}} @STRING{jscs = {Journal of Statistical Computation \& Simulation}} @STRING{jspi = {Journal of Statistical Planning \& Inference}} @STRING{jtp = {Journal of Theoretical Probability}} @STRING{jtsa = {Journal of Time Series Analysis}} @STRING{mansci = {Management Science}} @STRING{psyka = {Psychometrika}} @STRING{ptrf = {Probability Theory \& Related Fields}} @STRING{sankhya = {Sankhy\={a}}} @STRING{sasj = {South African Statistical Journal}} @STRING{scandjs = {Scandinavian Journal of Statistics: Theory \& Applications}} @STRING{siamjssc = {SIAM Journal of Scientific \& Statistical Computing}} @STRING{jss = {Journal of Statistical Software}} @STRING{spl = {Statistics \& Probability Letters}} @STRING{statmed = {Statistics in Medicine}} @STRING{statsci = {Statistical Science}} @STRING{statsin = {Statistica Sinica}} @STRING{survmeth = {Survey Methodology}} @STRING{tech = {Technometrics}} @STRING{toap = {to appear}} @STRING{tpaa = {Theory of Probability \& its Applications}} @STRING{tstat = {The Statistician}} @BOOK{AM79, title = {Optimal Filtering}, publisher = {Prentice-Hall}, year = {1979}, author = {B. D. O. Anderson and J. B. Moore}, address = {Englewood Cliffs}, } @BOOK{Aoki87, title = {State Space Modeling of Time Series}, publisher = {Springer-Verlag}, year = {1987}, author = {Masanao Aoki}, address = {Berlin}, } @ARTICLE{Archibald90, author = {Blyth C. Archibald}, title = {Parameter Space of the {H}olt-{W}inters' Model}, journal = ijf, year = {1990}, volume = {6}, pages = {199--209}, fileno = {1151}, keywords = {Exponential smoothing; seasonal; coefficient choice; stability; evaluation}, pdf = {Archibald90.pdf}, } @ARTICLE{AN00, author = {V. Assimakopoulos and K. Nikolopoulos}, title = {The Theta Model: A Decomposition Approach to Forecasting}, journal = ijf, year = {2000}, volume = {16}, pages = {521-530}, fileno = {1047}, keywords = {M3-Competition; Time series; Univariate forecasting method}, } @BOOK{BOK05, title = {Forecasting, Time Series and Regression: An Applied Approach}, publisher = {Thomson Brooks/Cole}, year = {2005}, author = {B. L. Bowerman and R. T. O'Connell and Anne B. Koehler}, address = {Belmont CA}, } @BOOK{BDbook91, title = {Time Series: Theory and Methods}, publisher = {Springer-Verlag}, year = {1991}, author = {P. J. Brockwell and R. A Davis}, address = {New York}, edition = {2nd}, } @BOOK{BDbook91a, title = {Introduction to Time Series and Forecasting}, publisher = {John Wiley \& Sons}, year = {2002}, edition = {2nd}, author = {P.J. Brockwell and R.A. Davis}, } @ARTICLE{CH95, author = {F. Canova and B. E. Hansen}, title = {Are Seasonal Patterns Constant Over Time? {A} Test for Seasonal Stability}, journal = {Journal of Business and Economic Statistics}, year = {1995}, volume = {13}, pages = {237-252}, file = {CH95.pdf:CH95.pdf:PDF}, pdf = {CH95.pdf}, } @ARTICLE{CY91, author = {Chris Chatfield and Mohammad Yar}, title = {Prediction Intervals for Multiplicative {H}olt-{W}inters}, journal = ijf, year = {1991}, volume = {7}, pages = {31-37}, keywords = {Holt-Winters; Prediction intervals; Exponential smoothing}, } @ARTICLE{Croston72, author = {J. D. Croston}, title = {Forecasting and Stock Control for Intermittent Demands}, journal = {Operational Research Quarterly}, year = {1972}, volume = {23}, pages = {289--304}, number = {3}, pdf = {Croston72.pdf}, } @ARTICLE{DF81, author = {D. A. Dickey and W. A. Fuller}, title = {Likelihood Ratio Statistics for Autoregressive Time Series with a Unit Root}, journal = {Econometrica}, year = {1981}, volume = {49}, pages = {1057-1071}, } @BOOK{DKbook01, title = {Time Series Analysis by State Space Methods}, publisher = {Oxford University Press}, year = {2001}, author = {J Durbin and Siem J Koopman}, address = {Oxford}, } @ARTICLE{Gardner85, author = {Gardner, Jr, Everette S.}, title = {Exponential Smoothing: The State of the Art}, journal = jf, year = {1985}, volume = {4}, pages = {1-28}, keywords = {Bibliography; exponential smoothing; comparative methods; ARIMA; exponential smoothing; control charts; CUSUM; evaluation-forecasting monitoring systems; exponential smoothing; adaptive exponential smoothing-adaptive; coefficient choice; higher-order; review; theory seasonality-estimation; harmonics; tracking signal-methodology; use-inventory control}, } @ARTICLE{GM85, author = {Gardner, Jr, Everette S. and Ed McKenzie}, title = {Forecasting Trends in Time Series}, journal = mansci, year = {1985}, volume = {31}, pages = {1237-1246}, number = {10}, keywords = {Forecasting; time series}, } @TECHREPORT{Gomez98, author = {Victor G\'{o}mez}, title = {Automatic Model Identification in the Presence of Missing Observations and Outliers}, institution = {Ministerio de Econom{\'\i}a y Hacienda, Direcci{\'o}n General de An{\'a}lisis y Programaci{\'o}n Presupuestaria}, year = {1998}, type = {Working paper}, number = {D-98009}, pdf = {Gomez98.pdf}, } @TECHREPORT{TRAMOSEATS98, author = {Victor G\'{o}mez and Agust\'{i}n Maravall}, title = {Programs \pkg{TRAMO} and \pkg{SEATS}, Instructions for the Users}, institution = {Ministerio de Econom{\'\i}a y Hacienda, Direcci{\'o}n General de An{\'a}lisis y Programaci{\'o}n Presupuestaria}, year = {1998}, type = {Working paper}, number = {97001}, month = {June}, edition = {Beta version}, } @ARTICLE{ForecastPro00, author = {Robert L Goodrich}, title = {The \pkg{Forecast Pro} Methodology}, journal = ijf, year = {2000}, volume = {16}, pages = {533-535}, number = {4}, pdf = {ForecastPro00.pdf}, } @ARTICLE{HR82, author = {E. J. Hannan and J. Rissanen}, title = {Recursive Estimation of Mixed Autoregressive-Moving Average Order}, journal = biok, year = {1982}, volume = {69}, pages = {81-94}, number = {1}, keywords = {Autoregressive-moving average; best coding; martingale; recursive calculation; strong convergence; vector autoregression}, } @ARTICLE{Hendry97, author = {David F. Hendry}, title = {The Econometrics of Macroeconomic Forecasting}, journal = {The Economic Journal}, year = {1997}, volume = {107}, pages = {1330-1357.}, number = {444}, } @ARTICLE{HEGY90, author = {S. Hylleberg and R. Engle and C. Granger and B. Yoo}, title = {Seasonal Integration and Cointegration}, journal = {Journal of Econometrics}, year = {1990}, volume = {44}, pages = {215-238}, } @ARTICLE{Hyndman01, author = {Rob J Hyndman}, title = {It's Time To Move from `What' To `Why'---Comments on the {M3}-Competition}, journal = ijf, year = {2001}, volume = {17}, pages = {567-570}, number = {4}, keywords = {commentaries on the M3-competition}, } @MANUAL{forecast, title = {\pkg{forecast}: Forecasting Functions for Time Series}, author = {Rob J Hyndman}, year = {2008}, note = {\proglang{R}~package version~1.11}, url = {http://CRAN.R-project.org/package=forecasting}, } @MANUAL{fma, title = {\pkg{fma}: Data Sets from ``{F}orecasting: Methods and Applications'' By {M}akridakis, {W}heelwright \& {H}yndman (1998)}, author = {Rob J Hyndman}, year = {2008}, note = {\proglang{R}~package version~1.11}, url = {http://CRAN.R-project.org/package=forecasting}, } @MANUAL{expsmooth, title = {\pkg{expsmooth}: Data Sets from ``{F}orecasting with Exponential Smoothing'' by Hyndman, Koehler, Ord \& Snyder (2008)}, author = {Rob J Hyndman}, year = {2008}, note = {\proglang{R}~package version~1.11}, url = {http://CRAN.R-project.org/package=forecasting}, } @MANUAL{Mcomp, title = {\pkg{Mcomp}: Data from the {M}-Competitions}, author = {Rob J Hyndman}, year = {2008}, note = {\proglang{R}~package version~1.11}, url = { http://CRAN.R-project.org/package=forecasting}, } @ARTICLE{HAA08, author = {Rob J Hyndman and {Md} Akram and Blyth C Archibald}, title = {The Admissible Parameter Space for Exponential Smoothing Models}, journal = {Annals of the Institute of Statistical Mathematics}, year = {2008}, volume = {60}, number = {2}, pages = {407--426} } @ARTICLE{HB03, author = {Rob J Hyndman and Billah, Baki}, title = {Unmasking the {T}heta Method}, journal = ijf, year = {2003}, volume = {19}, pages = {287-290}, number = {2}, keywords = {Exponential smoothing; forecasting competitions; State space models}, } @ARTICLE{HKPB05, author = {Rob J Hyndman and Maxwell L. King and Pitrun, Ivet and Billah, Baki}, title = {Local Linear Forecasts Using Cubic Smoothing Splines}, journal = anzjs, year = {2005}, volume = {47}, pages = {87-99}, number = {1}, keywords = {ARIMA models; Exponential smoothing; Holt's local linear forecasts; Maximum likelihood estimation; non-parametric regression; smoothing splines; state-space model; stochastic trends}, } @ARTICLE{HK06, author = {Rob J Hyndman and Anne B Koehler}, title = {Another Look at Measures of Forecast Accuracy}, journal = ijf, year = {2006}, volume = {22}, pages = {679-688}, issue = {4}, } @ARTICLE{HK2008, author = {Rob J Hyndman and Yeasmin Khandakar}, title = {Automatic Time Series Forecasting: The Forecast Package for R}, journal = jss, year = {2008}, volume = {27}, issue = {3}, } @ARTICLE{HKOS05, author = {Rob J Hyndman and Anne B Koehler and J Keith Ord and Ralph D Snyder}, title = {Prediction Intervals for Exponential Smoothing Using Two New Classes of State Space Models}, journal = {Journal of Forecasting}, year = {2005}, volume = {24}, pages = {17-37}, } @BOOK{expsmooth08, title = {Forecasting with Exponential Smoothing: The State Space Approach}, publisher = {Springer-Verlag}, year = {2008}, author = {Rob J Hyndman and Anne B Koehler and J Keith Ord and Ralph D Snyder}, url = {http://www.exponentialsmoothing.net/}, } @ARTICLE{HKSG02, author = {Rob J Hyndman and Anne B Koehler and Ralph D Snyder and Simone Grose}, title = {A State Space Framework for Automatic Forecasting Using Exponential Smoothing Methods}, journal = ijf, year = {2002}, volume = {18}, pages = {439-454}, number = {3}, keywords = {Prediction intervals; State space models}, } @ARTICLE{shortseasonal, author = {Rob J Hyndman and Andrey V Kostenko}, title = {Minimum Sample Size Requirements for Seasonal Forecasting Models}, journal = {Foresight: The International Journal of Applied Forecasting}, year = {2007}, volume = {6}, pages = {12-15}, } @ARTICLE{KPSS92, author = {Denis Kwiatkowski and Peter C.B. Phillips and Peter Schmidt and Yongcheol Shin}, title = {Testing the Null Hypothesis of Stationarity Against the Alternative of a Unit Root}, journal = je, year = {1992}, volume = {54}, pages = {159-178}, } @ARTICLE{Liu89, author = {L. M. Liu}, title = {Identification of Seasonal {Arima} Models Using a Filtering Method}, journal = commstm, year = {1989}, volume = {18}, pages = {2279-2288}, keywords = {model identification, seasonal time series, ARIMA models, filtering, intermediary models, calendar variation, intervention, transfer function models}, } @ARTICLE{Mcomp82, author = {S. Makridakis and A. Anderson and R. Carbone and R. Fildes and M. Hibon and R. Lewandowski and J. Newton and E. Parzen and R. Winkler}, title = {The Accuracy of Extrapolation (Time Series) Methods: Results of a Forecasting Competition}, journal = jf, year = {1982}, volume = {1}, pages = {111-153}, keywords = {Forecasting; Time series; Evaluation; Accuracy; Comparison; Empirical Study}, } @ARTICLE{Metal82, author = {Spyros Makridakis and A. Anderson and R. Carbone and R. Fildes and M. Hibon and R. Lewandowskiand J. Newton and E. Parzen and R. Winkler}, title = {The Accuracy of Extrapolation (Time Series) Methods: Results of a Forecasting Competition}, journal = jf, year = {1982}, volume = {1}, pages = {111--153}, } @ARTICLE{Metal93, author = {Spyros Makridakis and Chris Chatfield and Mich\'{e}le Hibon and Michael Lawrence and Terence Mills and J. Keith Ord and LeRoy F. Simmons}, title = {The {M}2-Competition: A Real-Time Judgmentally Based Forecasting study}, journal = ijf, year = {1993}, volume = {9}, pages = {5--22}, } @ARTICLE{M3comp00, author = {Spyros Makridakis and Michele Hibon}, title = {The {M3}-Competition: Results, Conclusions and Implications}, journal = ijf, year = {2000}, volume = {16}, pages = {451-476}, keywords = {Comparative methods-Time series: Univariate; Forecasting competitions; {M}-competition; Forecasting methods; Forecasting accuracy}, } @BOOK{MWH3, title = {Forecasting: Methods and Applications}, publisher = {John Wiley \& Sons}, year = {1998}, author = {Makridakis, Spyros and Wheelwright, Steven C. and Rob J Hyndman}, pages = {642}, address = {New York}, edition = {3rd}, url = {http://www.robhyndman.info/forecasting/}, } @ARTICLE{MP00a, author = {G. M\'{e}lard and J.-M Pasteels}, title = {Automatic {ARIMA} Modeling Including Intervention, Using Time Series Expert Software}, journal = ijf, year = {2000}, volume = {16}, pages = {497-508}, keywords = {M3-Competition; ARIMA models; Expert systems; Intervention analysis; Outliers}, } @ARTICLE{Meyer:2002, author = {David Meyer}, title = {Naive Time Series Forecasting Methods}, journal = {\proglang{R} News}, year = {2002}, volume = {2}, number = {2}, pages = {7--10}, month = {June}, url = {http://CRAN.R-project.org/doc/Rnews/}, } @ARTICLE{OKS97, author = {J. Keith Ord and Anne B. Koehler and Ralph D. Snyder}, title = {Estimation and Prediction for a Class of Dynamic Nonlinear Statistical Models}, journal = jasa, year = {1997}, volume = {92}, pages = {1621-1629}, keywords = {Forecasting; Holt-Winters; Maximum likelihood estimation; State-space models}, pdf = {OKS97.pdf}, } @ARTICLE{OL96, author = {Keith Ord and Sam Lowe}, title = {Automatic Forecasting}, journal = amstat, year = {1996}, volume = {50}, pages = {88-94}, number = {1}, month = {February}, keywords = {automatic, Forecasting, Autobox, AutocastII, Forecast Pro}, } @ARTICLE{Pegels69, author = {C. Carl Pegels}, title = {Exponential Forecasting: Some New Variations}, journal = mansci, year = {1969}, volume = {15}, pages = {311-315}, number = {5}, } @ARTICLE{Reilly00, author = {Reilly, David}, title = {The \pkg{Autobox} System}, journal = ijf, year = {2000}, volume = {16}, pages = {531-533}, number = {4}, pdf = {Reilly00.pdf}, } @ARTICLE{Ripley:2002, author = {Brian D. Ripley}, title = {Time Series in \proglang{R}~1.5.0}, journal = {\proglang{R} News}, year = {2002}, volume = {2}, number = {2}, pages = {2--7}, month = {June}, url = {http://CRAN.R-project.org/doc/Rnews/}, } @ARTICLE{SH05, author = {Lydia Shenstone and Rob J Hyndman}, title = {Stochastic Models Underlying {C}roston's Method for Intermittent Demand Forecasting}, journal = jf, year = {2005}, volume = {24}, pages = {389-402}, } @ARTICLE{SY94, author = {Jeremy Smith and Sanjay Yadav}, title = {Forecasting Costs Incurred from Unit Differencing Fractionally Integrated Processes}, journal = ijf, year = {1994}, volume = {10}, pages = {507-514}, number = {4}, pdf = {SY94.pdf}, } @ARTICLE{SKHO04, author = {Ralph D Snyder and Anne B Koehler and Rob J Hyndman and J Keith Ord}, title = {Exponential Smoothing Models: Means and Variances for Lead-Time Demand}, journal = ejor, year = {2004}, volume = {158}, pages = {444-455}, number = {2}, } @ARTICLE{Taylor03a, author = {James W. Taylor}, title = {Exponential Smoothing with a Damped Multiplicative Trend}, journal = ijf, year = {2003}, volume = {19}, pages = {715-725}, keywords = {Damped trend exponential smoothing, Pegels classification, Multiplicative trend}, } @ARTICLE{Wallis99, author = {Wallis, K. F.}, title = {Asymmetric Density Forecasts of Inflation and the {Bank of England's} Fan Chart}, journal = {National Institute Economic Review}, year = {1999}, volume = {167}, pages = {106-112}, number = {1}, } @Manual{R, title = {\proglang{R}: A Language and Environment for Statistical Computing}, author = {{\proglang{R} Development Core Team}}, organization = {\proglang{R} Foundation for Statistical Computing}, address = {Vienna, Austria}, year = {2008}, note = {{ISBN} 3-900051-07-0}, url = {http://www.R-project.org/}, } forecast/vignettes/JSS2008.Rmd0000644000176200001440000017307314055364445015532 0ustar liggesusers--- author: - name: Rob J Hyndman affiliation: Monash University address: > Department of Econometrics \& Business Statistics Monash University Clayton VIC 3800, Australia email: Rob.Hyndman@monash.edu url: https://robjhyndman.com - name: Yeasmin Khandakar affiliation: Monash University address: > Department of Econometrics \& Business Statistics Monash University Clayton VIC 3800, Australia title: formatted: "Automatic Time Series Forecasting:\\newline the \\pkg{forecast} Package for \\proglang{R}" # If you use tex in the formatted title, also supply version without plain: "Automatic Time Series Forecasting: the forecast Package for R" # For running headers, if needed short: "\\pkg{forecast}: Automatic Time Series Forecasting" abstract: > This vignette to the \proglang{R} package \pkg{forecast} is an updated version of @HK2008, published in the *Journal of Statistical Software*. Automatic forecasts of large numbers of univariate time series are often needed in business and other contexts. We describe two automatic forecasting algorithms that have been implemented in the \pkg{forecast} package for \proglang{R}. The first is based on innovations state space models that underly exponential smoothing methods. The second is a step-wise algorithm for forecasting with ARIMA models. The algorithms are applicable to both seasonal and non-seasonal data, and are compared and illustrated using four real time series. We also briefly describe some of the other functionality available in the \pkg{forecast} package.} keywords: # at least one keyword must be supplied formatted: [ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, "\\proglang{R}"] plain: [ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, R] preamble: > \usepackage{amsmath,rotating,bm,fancyvrb,paralist,thumbpdf} \Volume{27} \Issue{3} \Month{July} \Year{2008} \Submitdate{2007-05-29} \Acceptdate{2008-03-22} \def\damped{$_{\mbox{\footnotesize d}}$} \let\var=\VAR \def\R{\proglang{R}} \def\dampfactor{\phi_h} \raggedbottom bibliography: JSS-paper.bib vignette: > %\VignetteIndexEntry{Automatic Time Series Forecasting: the forecast Package for R (Hyndman & Khandakar, JSS 2008)} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} documentclass: jss output: if (rmarkdown::pandoc_version() >= 2) rticles::jss_article else rmarkdown::html_vignette fig_width: 7 fig_height: 6 fig_caption: true --- ```{r load_forecast, echo=FALSE, message=FALSE} library('forecast') ``` ```{r load_expsmooth, echo=FALSE, message=FALSE, eval=FALSE} library('expsmooth') ``` ```{r expsmooth_datsets, echo=FALSE, message=FALSE} bonds <- structure(c(5.83, 6.06, 6.58, 7.09, 7.31, 7.23, 7.43, 7.37, 7.6, 7.89, 8.12, 7.96, 7.93, 7.61, 7.33, 7.18, 6.74, 6.27, 6.38, 6.6, 6.3, 6.13, 6.02, 5.79, 5.73, 5.89, 6.37, 6.62, 6.85, 7.03, 6.99, 6.75, 6.95, 6.64, 6.3, 6.4, 6.69, 6.52, 6.8, 7.01, 6.82, 6.6, 6.32, 6.4, 6.11, 5.82, 5.87, 5.89, 5.63, 5.65, 5.73, 5.72, 5.73, 5.58, 5.53, 5.41, 4.87, 4.58, 4.89, 4.69, 4.78, 4.99, 5.23, 5.18, 5.54, 5.9, 5.8, 5.94, 5.91, 6.1, 6.03, 6.26, 6.66, 6.52, 6.26, 6, 6.42, 6.1, 6.04, 5.83, 5.8, 5.74, 5.72, 5.23, 5.14, 5.1, 4.89, 5.13, 5.37, 5.26, 5.23, 4.97, 4.76, 4.55, 4.61, 5.07, 5, 4.9, 5.28, 5.21, 5.15, 4.9, 4.62, 4.24, 3.88, 3.91, 4.04, 4.03, 4.02, 3.9, 3.79, 3.94, 3.56, 3.32, 3.93, 4.44, 4.29, 4.27, 4.29, 4.26, 4.13, 4.06, 3.81, 4.32, 4.7), .Tsp = c(1994, 2004.33333333333, 12), class = "ts") usnetelec <- structure(c(296.1, 334.1, 375.3, 403.8, 447, 476.3, 550.3, 603.9, 634.6, 648.5, 713.4, 759.2, 797.1, 857.9, 920, 987.2, 1058.4, 1147.5, 1217.8, 1332.8, 1445.5, 1535.1, 1615.9, 1753, 1864.1, 1870.3, 1920.8, 2040.9, 2127.4, 2209.4, 2250.7, 2289.6, 2298, 2244.4, 2313.4, 2419.5, 2473, 2490.5, 2575.3, 2707.4, 2967.3, 3038, 3073.8, 3083.9, 3197.2, 3247.5, 3353.5, 3444.2, 3492.2, 3620.3, 3694.8, 3802.1, 3736.6, 3858.5, 3848), .Tsp = c(1949, 2003, 1), class = "ts") ukcars <- structure(c(330.371, 371.051, 270.67, 343.88, 358.491, 362.822, 261.281, 240.355, 325.382, 316.7, 171.153, 257.217, 298.127, 251.464, 181.555, 192.598, 245.652, 245.526, 225.261, 238.211, 257.385, 228.461, 175.371, 226.462, 266.15, 287.251, 225.883, 265.313, 272.759, 234.134, 196.462, 205.551, 291.283, 284.422, 221.571, 250.697, 253.757, 267.016, 220.388, 277.801, 283.233, 302.072, 259.72, 297.658, 306.129, 322.106, 256.723, 341.877, 356.004, 361.54, 270.433, 311.105, 326.688, 327.059, 274.257, 367.606, 346.163, 348.211, 250.008, 292.518, 343.318, 343.429, 275.386, 329.747, 364.521, 378.448, 300.798, 331.757, 362.536, 389.133, 323.322, 391.832, 421.646, 416.823, 311.713, 381.902, 422.982, 427.722, 376.85, 458.58, 436.225, 441.487, 369.566, 450.723, 462.442, 468.232, 403.636, 413.948, 460.496, 448.932, 407.787, 469.408, 494.311, 433.24, 335.106, 378.795, 387.1, 372.395, 335.79, 397.08, 449.755, 402.252, 391.847, 385.89, 424.325, 433.28, 391.213, 408.74, 445.458, 428.202, 379.048, 394.042, 432.796), .Tsp = c(1977, 2005, 4), class = "ts") visitors <- structure(c(75.7, 75.4, 83.1, 82.9, 77.3, 105.7, 121.9, 150, 98, 118, 129.5, 110.6, 91.7, 94.8, 109.5, 105.1, 95, 130.3, 156.7, 190.1, 139.7, 147.8, 145.2, 132.7, 120.7, 116.5, 142, 140.4, 128, 165.7, 183.1, 222.8, 161.3, 180.4, 185.2, 160.5, 157.1, 163.8, 203.3, 196.9, 179.6, 207.3, 208, 245.8, 168.9, 191.1, 180, 160.1, 136.6, 142.7, 175.4, 161.4, 149.9, 174.1, 192.7, 247.4, 176.2, 192.8, 189.1, 181.1, 149.9, 157.3, 185.3, 178.2, 162.7, 190.6, 198.6, 253.1, 177.4, 190.6, 189.2, 168, 161.4, 172.2, 208.3, 199.3, 197.4, 216, 223.9, 266.8, 196.1, 238.2, 217.8, 203.8, 175.2, 176.9, 219.3, 199.1, 190, 229.3, 255, 302.4, 242.8, 245.5, 257.9, 226.3, 213.4, 204.6, 244.6, 239.9, 224, 267.2, 285.9, 344, 250.5, 304.3, 307.4, 255.1, 214.9, 230.9, 282.5, 265.4, 254, 301.6, 311, 384, 303.8, 319.1, 313.5, 294.2, 244.8, 261.4, 329.7, 304.9, 268.6, 320.7, 342.9, 422.3, 317.2, 392.7, 365.6, 333.2, 261.5, 306.9, 358.2, 329.2, 309.2, 350.4, 375.6, 465.2, 342.9, 408, 390.9, 325.9, 289.1, 308.2, 397.4, 330.4, 330.9, 366.5, 379.5, 448.3, 346.2, 353.6, 338.6, 341.1, 283.4, 304.2, 372.3, 323.7, 323.9, 354.8, 367.9, 457.6, 351, 398.6, 389, 334.1, 298.1, 317.1, 388.5, 355.6, 353.1, 397, 416.7, 460.8, 360.8, 434.6, 411.9, 405.6, 319.3, 347.9, 429, 372.9, 403, 426.5, 459.9, 559.9, 416.6, 429.2, 428.7, 405.4, 330.2, 370, 446.9, 384.6, 366.3, 378.5, 376.2, 523.2, 379.3, 437.2, 446.5, 360.3, 329.9, 339.4, 418.2, 371.9, 358.6, 428.9, 437, 534, 396.6, 427.5, 392.5, 321.5, 260.9, 308.3, 415.5, 362.2, 385.6, 435.3, 473.3, 566.6, 420.2, 454.8, 432.3, 402.8, 341.3, 367.3, 472, 405.8, 395.6, 449.9, 479.9, 593.1, 462.4, 501.6, 504.7, 409.5), .Tsp = c(1985.33333333333, 2005.25, 12), class = "ts") ``` # Introduction Automatic forecasts of large numbers of univariate time series are often needed in business. It is common to have over one thousand product lines that need forecasting at least monthly. Even when a smaller number of forecasts are required, there may be nobody suitably trained in the use of time series models to produce them. In these circumstances, an automatic forecasting algorithm is an essential tool. Automatic forecasting algorithms must determine an appropriate time series model, estimate the parameters and compute the forecasts. They must be robust to unusual time series patterns, and applicable to large numbers of series without user intervention. The most popular automatic forecasting algorithms are based on either exponential smoothing or ARIMA models. In this article, we discuss the implementation of two automatic univariate forecasting methods in the \pkg{forecast} package for \proglang{R}. We also briefly describe some univariate forecasting methods that are part of the \pkg{forecast} package. The \pkg{forecast} package for the \proglang{R} system for statistical computing [@R] is available from the Comprehensive \proglang{R} Archive Network at \url{https://CRAN.R-project.org/package=forecast}. Version `r packageVersion('forecast')` of the package was used for this paper. The \pkg{forecast} package contains functions for univariate forecasting and a few examples of real time series data. For more extensive testing of forecasting methods, the \pkg{fma} package contains the 90 data sets from @MWH3, the \pkg{expsmooth} package contains 24 data sets from @expsmooth08, and the \pkg{Mcomp} package contains the 1001 time series from the M-competition [@Mcomp82] and the 3003 time series from the M3-competition [@M3comp00]. The \pkg{forecast} package implements automatic forecasting using exponential smoothing, ARIMA models, the Theta method [@AN00], cubic splines [@HKPB05], as well as other common forecasting methods. In this article, we primarily discuss the exponential smoothing approach (in Section \ref{sec:expsmooth}) and the ARIMA modelling approach (in Section \ref{sec:arima}) to automatic forecasting. In Section \ref{sec:package}, we describe the implementation of these methods in the \pkg{forecast} package, along with other features of the package. # Exponential smoothing {#sec:expsmooth} Although exponential smoothing methods have been around since the 1950s, a modelling framework incorporating procedures for model selection was not developed until relatively recently. @OKS97, @HKSG02 and @HKOS05 have shown that all exponential smoothing methods (including non-linear methods) are optimal forecasts from innovations state space models. Exponential smoothing methods were originally classified by Pegels' (1969)\nocite{Pegels69} taxonomy. This was later extended by @Gardner85, modified by @HKSG02, and extended again by @Taylor03a, giving a total of fifteen methods seen in the following table. \begin{table}[!hbt] \begin{center}\vspace{0.2cm} \begin{tabular}{|ll|ccc|} \hline & &\multicolumn{3}{c|}{Seasonal Component} \\ \multicolumn{2}{|c|}{Trend}& N & A & M\\ \multicolumn{2}{|c|}{Component} & (None) & (Additive) & (Multiplicative)\\ \cline{3-5} &&&\\[-0.3cm] N & (None) & N,N & N,A & N,M\\ &&&&\\[-0.3cm] A & (Additive) & A,N & A,A & A,M\\ &&&&\\[-0.3cm] A\damped & (Additive damped) & A\damped,N & A\damped,A & A\damped,M\\ &&&&\\[-0.3cm] M & (Multiplicative) & M,N & M,A & M,M\\ &&&&\\[-0.3cm] M\damped & (Multiplicative damped) & M\damped,N & M\damped,A & M\damped,M\\ \hline \end{tabular}\vspace{0.2cm} \end{center} \caption{The fifteen exponential smoothing methods.} \end{table} Some of these methods are better known under other names. For example, cell (N,N) describes the simple exponential smoothing (or SES) method, cell (A,N) describes Holt's linear method, and cell (A\damped,N) describes the damped trend method. The additive Holt-Winters' method is given by cell (A,A) and the multiplicative Holt-Winters' method is given by cell (A,M). The other cells correspond to less commonly used but analogous methods. ## Point forecasts for all methods We denote the observed time series by $y_1,y_2,\dots,y_n$. A forecast of $y_{t+h}$ based on all of the data up to time $t$ is denoted by $\hat{y}_{t+h|t}$. To illustrate the method, we give the point forecasts and updating equations for method (A,A), the Holt-Winters' additive method: \begin{subequations}\label{eq:AMmethod}\vspace{-15pt} \begin{align} \mbox{Level:}\quad &\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha)(\ell_{t-1} + b_{t-1})\hspace*{1cm} \label{eq:3-44a}\\ \mbox{Growth:}\quad &b_t = \beta^*(\ell_t - \ell_{t-1}) + (1-\beta^*)b_{t-1} \label{eq:3-45a}\\ \mbox{Seasonal:}\quad &s_t = \gamma(y_t - \ell_{t-1} -b_{t-1}) + (1-\gamma)s_{t-m}\label{eq:3-46a}\\ \mbox{Forecast:}\quad &\hat{y}_{t+h|t} = \ell_t + b_th +s_{t-m+h_m^+}. \label{eq:3-47a} \end{align} \end{subequations} where $m$ is the length of seasonality (e.g., the number of months or quarters in a year), $\ell_t$ represents the level of the series, $b_t$ denotes the growth, $s_t$ is the seasonal component, $\hat{y}_{t+h|t}$ is the forecast for $h$ periods ahead, and $h_m^+ = \big[(h-1) \mbox{ mod } m\big] + 1$. To use method \eqref{eq:AMmethod}, we need values for the initial states $\ell_0$, $b_0$ and $s_{1-m},\dots,s_0$, and for the smoothing parameters $\alpha$, $\beta^*$ and $\gamma$. All of these will be estimated from the observed data. Equation \eqref{eq:3-46a} is slightly different from the usual Holt-Winters equations such as those in @MWH3 or @BOK05. These authors replace \eqref{eq:3-46a} with $$ s_t = \gamma^*(y_t - \ell_{t}) + (1-\gamma^*)s_{t-m}. $$ If $\ell_t$ is substituted using \eqref{eq:3-44a}, we obtain $$s_t = \gamma^*(1-\alpha)(y_t - \ell_{t-1}-b_{t-1}) + \{1-\gamma^*(1-\alpha)\}s_{t-m}. $$ Thus, we obtain identical forecasts using this approach by replacing $\gamma$ in \eqref{eq:3-46a} with $\gamma^*(1-\alpha)$. The modification given in \eqref{eq:3-46a} was proposed by @OKS97 to make the state space formulation simpler. It is equivalent to Archibald's (1990)\nocite{Archibald90} variation of the Holt-Winters' method. \begin{sidewaystable} \begin{small} \begin{center} \begin{tabular}{|c|lll|} \hline & \multicolumn{3}{c|}{Seasonal} \\ {Trend} & \multicolumn{1}{c}{N} & \multicolumn{1}{c}{A} & \multicolumn{1}{c|}{M}\\ \cline{2-4} & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) \ell_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) \ell_{t-1}$\\ {N} & & $s_t = \gamma (y_t - \ell_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / \ell_{t-1}) + (1-\gamma) s_{t-m}$ \\ & $\hat{y}_{t+h|t} = \ell_t$ & $\hat{y}_{t+h|t} = \ell_t + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_ts_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) (\ell_{t-1}+b_{t-1})$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) (\ell_{t-1}+b_{t-1})$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) (\ell_{t-1}+b_{t-1})$\\ {A} & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}-b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}-b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_t+hb_t$ & $\hat{y}_{t+h|t} = \ell_t +hb_t +s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= (\ell_t+hb_t)s_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$\\ {A\damped } & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}-\phi b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}-\phi b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_t+\dampfactor b_t$ & $\hat{y}_{t+h|t} = \ell_t+\dampfactor b_t+s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= (\ell_t+\dampfactor b_t)s_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}b_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) \ell_{t-1}b_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) \ell_{t-1}b_{t-1}$\\ {M} & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_tb_t^h$ & $\hat{y}_{t+h|t} = \ell_tb_t^h + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_tb_t^hs_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}b^\phi_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha)\ell_{t-1}b^\phi_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha)\ell_{t-1}b^\phi_{t-1}$\\ {M\damped } & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}b^\phi_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}b^\phi_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_tb_t^{\phi_h}$ & $\hat{y}_{t+h|t} = \ell_tb_t^{\phi_h} + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_tb_t^{\phi_h}s_{t-m+h_m^+}$ \\ \hline \end{tabular} \end{center} \end{small} \caption{Formulae for recursive calculations and point forecasts. In each case, $\ell_t$ denotes the series level at time $t$, $b_t$ denotes the slope at time $t$, $s_t$ denotes the seasonal component of the series at time $t$, and $m$ denotes the number of seasons in a year; $\alpha$, $\beta^*$, $\gamma$ and $\phi$ are constants, $\phi_h = \phi+\phi^2+\dots+\phi^{h}$ and $h_m^+ = \big[(h-1) \mbox{ mod } m\big] + 1$.}\label{table:pegels} \end{sidewaystable} Table \ref{table:pegels} gives recursive formulae for computing point forecasts $h$ periods ahead for all of the exponential smoothing methods. Some interesting special cases can be obtained by setting the smoothing parameters to extreme values. For example, if $\alpha=0$, the level is constant over time; if $\beta^*=0$, the slope is constant over time; and if $\gamma=0$, the seasonal pattern is constant over time. At the other extreme, naïve forecasts (i.e., $\hat{y}_{t+h|t}=y_t$ for all $h$) are obtained using the (N,N) method with $\alpha=1$. Finally, the additive and multiplicative trend methods are special cases of their damped counterparts obtained by letting $\phi=1$. ## Innovations state space models {#sec:statespace} For each exponential smoothing method in Table \ref{table:pegels}, @expsmooth08 describe two possible innovations state space models, one corresponding to a model with additive errors and the other to a model with multiplicative errors. If the same parameter values are used, these two models give equivalent point forecasts, although different prediction intervals. Thus there are 30 potential models described in this classification. Historically, the nature of the error component has often been ignored, because the distinction between additive and multiplicative errors makes no difference to point forecasts. We are careful to distinguish exponential smoothing \emph{methods} from the underlying state space \emph{models}. An exponential smoothing method is an algorithm for producing point forecasts only. The underlying stochastic state space model gives the same point forecasts, but also provides a framework for computing prediction intervals and other properties. To distinguish the models with additive and multiplicative errors, we add an extra letter to the front of the method notation. The triplet (E,T,S) refers to the three components: error, trend and seasonality. So the model ETS(A,A,N) has additive errors, additive trend and no seasonality---in other words, this is Holt's linear method with additive errors. Similarly, ETS(M,M\damped,M) refers to a model with multiplicative errors, a damped multiplicative trend and multiplicative seasonality. The notation ETS($\cdot$,$\cdot$,$\cdot$) helps in remembering the order in which the components are specified. Once a model is specified, we can study the probability distribution of future values of the series and find, for example, the conditional mean of a future observation given knowledge of the past. We denote this as $\mu_{t+h|t} = \E(y_{t+h} \mid \bm{x}_t)$, where $\bm{x}_t$ contains the unobserved components such as $\ell_t$, $b_t$ and $s_t$. For $h=1$ we use $\mu_t\equiv\mu_{t+1|t}$ as a shorthand notation. For many models, these conditional means will be identical to the point forecasts given in Table \ref{table:pegels}, so that $\mu_{t+h|t}=\hat{y}_{t+h|t}$. However, for other models (those with multiplicative trend or multiplicative seasonality), the conditional mean and the point forecast will differ slightly for $h\ge 2$. We illustrate these ideas using the damped trend method of @GM85. \subsubsection{Additive error model: ETS(A,A$_d$,N)} Let $\mu_t = \hat{y}_t = \ell_{t-1}+b_{t-1}$ denote the one-step forecast of $y_{t}$ assuming that we know the values of all parameters. Also, let $\varepsilon_t = y_t - \mu_t$ denote the one-step forecast error at time $t$. From the equations in Table \ref{table:pegels}, we find that\vspace*{-15pt} \begin{align} \label{ss1} y_t &= \ell_{t-1} + \phi b_{t-1} + \varepsilon_t\\ \ell_t &= \ell_{t-1} + \phi b_{t-1} + \alpha \varepsilon_t \label{ss2}\\ b_t &= \phi b_{t-1} + \beta^*(\ell_t - \ell_{t-1}- \phi b_{t-1}) = \phi b_{t-1} + \alpha\beta^*\varepsilon_t. \label{ss3} \end{align} We simplify the last expression by setting $\beta=\alpha\beta^*$. The three equations above constitute a state space model underlying the damped Holt's method. Note that it is an \emph{innovations} state space model [@AM79;@Aoki87] because the same error term appears in each equation. We an write it in standard state space notation by defining the state vector as $\bm{x}_t = (\ell_t,b_t)'$ and expressing \eqref{ss1}--\eqref{ss3} as \begin{subequations}\vspace*{-15pt} \begin{align} y_t &= \left[ 1 \phi \right] \bm{x}_{t-1} + \varepsilon_t\label{obseq}\\ \bm{x}_t &= \left[\begin{array}{ll} 1 & \phi\\ 0 & \phi \end{array}\right]\bm{x}_{t-1} + \left[\begin{array}{l} \alpha\\ \beta \end{array}\right]\varepsilon_t.\label{stateeq} \end{align} \end{subequations} The model is fully specified once we state the distribution of the error term $\varepsilon_t$. Usually we assume that these are independent and identically distributed, following a normal distribution with mean 0 and variance $\sigma^2$, which we write as $\varepsilon_t \sim\mbox{NID}(0, \sigma^2)$. \subsubsection{Multiplicative error model: ETS(M,A$_d$,N)} A model with multiplicative error can be derived similarly, by first setting $\varepsilon_t = (y_t-\mu_t)/\mu_t$, so that $\varepsilon_t$ is the relative error. Then, following a similar approach to that for additive errors, we find\vspace*{-15pt} \begin{align*} y_t &= (\ell_{t-1} + \phi b_{t-1})(1 + \varepsilon_t)\\ \ell_t &= (\ell_{t-1} + \phi b_{t-1})(1 + \alpha \varepsilon_t)\\ b_t &= \phi b_{t-1} + \beta(\ell_{t-1}+\phi b_{t-1})\varepsilon_t, \end{align*} or\vspace*{-15pt} \begin{align*} y_t &= \left[ 1 \phi \right] \bm{x}_{t-1}(1 + \varepsilon_t)\\ \bm{x}_t &= \left[\begin{array}{ll} 1 & \phi \\ 0 & \phi \end{array}\right]\bm{x}_{t-1} + \left[ 1 \phi \right] \bm{x}_{t-1} \left[\begin{array}{l} \alpha\\ \beta \end{array}\right]\varepsilon_t. \end{align*} Again we assume that $\varepsilon_t \sim \mbox{NID}(0,\sigma^2)$. Of course, this is a nonlinear state space model, which is usually considered difficult to handle in estimating and forecasting. However, that is one of the many advantages of the innovations form of state space models --- we can still compute forecasts, the likelihood and prediction intervals for this nonlinear model with no more effort than is required for the additive error model. ## State space models for all exponential smoothing methods {#sec:ssmodels} There are similar state space models for all 30 exponential smoothing variations. The general model involves a state vector $\bm{x}_t = (\ell_t, b_t$, $s_t, s_{t-1}, \dots, s_{t-m+1})'$ and state space equations of the form \begin{subequations}\label{eq:ss}\vspace*{-15pt} \begin{align} y_t &= w(\bm{x}_{t-1}) + r(\bm{x}_{t-1})\varepsilon_t \label{eq:ss1}\\ \bm{x}_t &= f(\bm{x}_{t-1}) + g(\bm{x}_{t-1})\varepsilon_t \label{eq:ss2} \end{align} \end{subequations} where $\{\varepsilon_t\}$ is a Gaussian white noise process with mean zero and variance $\sigma^2$, and $\mu_t = w(\bm{x}_{t-1})$. The model with additive errors has $r(\bm{x}_{t-1})=1$, so that $y_t = \mu_{t} + \varepsilon_t$. The model with multiplicative errors has $r(\bm{x}_{t-1})=\mu_t$, so that $y_t = \mu_{t}(1 + \varepsilon_t)$. Thus, $\varepsilon_t = (y_t - \mu_t)/\mu_t$ is the relative error for the multiplicative model. The models are not unique. Clearly, any value of $r(\bm{x}_{t-1})$ will lead to identical point forecasts for $y_t$. All of the methods in Table \ref{table:pegels} can be written in the form \eqref{eq:ss1} and \eqref{eq:ss2}. The specific form for each model is given in @expsmooth08. Some of the combinations of trend, seasonality and error can occasionally lead to numerical difficulties; specifically, any model equation that requires division by a state component could involve division by zero. This is a problem for models with additive errors and either multiplicative trend or multiplicative seasonality, as well as for the model with multiplicative errors, multiplicative trend and additive seasonality. These models should therefore be used with caution. The multiplicative error models are useful when the data are strictly positive, but are not numerically stable when the data contain zeros or negative values. So when the time series is not strictly positive, only the six fully additive models may be applied. The point forecasts given in Table \ref{table:pegels} are easily obtained from these models by iterating equations \eqref{eq:ss1} and \eqref{eq:ss2} for $t=n+1, n+2,\dots,n+h$, setting $\varepsilon_{n+j}=0$ for $j=1,\dots,h$. In most cases (notable exceptions being models with multiplicative seasonality or multiplicative trend for $h\ge2$), the point forecasts can be shown to be equal to $\mu_{t+h|t} = \E(y_{t+h} \mid \bm{x}_t)$, the conditional expectation of the corresponding state space model. The models also provide a means of obtaining prediction intervals. In the case of the linear models, where the forecast distributions are normal, we can derive the conditional variance $v_{t+h|t} = \var(y_{t+h} \mid \bm{x}_t)$ and obtain prediction intervals accordingly. This approach also works for many of the nonlinear models. Detailed derivations of the results for many models are given in @HKOS05. A more direct approach that works for all of the models is to simply simulate many future sample paths conditional on the last estimate of the state vector, $\bm{x}_t$. Then prediction intervals can be obtained from the percentiles of the simulated sample paths. Point forecasts can also be obtained in this way by taking the average of the simulated values at each future time period. An advantage of this approach is that we generate an estimate of the complete predictive distribution, which is especially useful in applications such as inventory planning, where expected costs depend on the whole distribution. ## Estimation {#sec:estimation} In order to use these models for forecasting, we need to know the values of $\bm{x}_0$ and the parameters $\alpha$, $\beta$, $\gamma$ and $\phi$. It is easy to compute the likelihood of the innovations state space model \eqref{eq:ss}, and so obtain maximum likelihood estimates. @OKS97 show that\vspace*{-15pt} \begin{equation}\label{likelihood} L^*(\bm\theta,\bm{x}_0) = n\log\Big(\sum_{t=1}^n \varepsilon^2_t\Big) + 2\sum_{t=1}^n \log|r(\bm{x}_{t-1})| \end{equation} is equal to twice the negative logarithm of the likelihood function (with constant terms eliminated), conditional on the parameters $\bm\theta = (\alpha,\beta,\gamma,\phi)'$ and the initial states $\bm{x}_0 = (\ell_0,b_0,s_0,s_{-1},\dots,s_{-m+1})'$, where $n$ is the number of observations. This is easily computed by simply using the recursive equations in Table \ref{table:pegels}. Unlike state space models with multiple sources of error, we do not need to use the Kalman filter to compute the likelihood. The parameters $\bm\theta$ and the initial states $\bm{x}_0$ can be estimated by minimizing $L^*$. Most implementations of exponential smoothing use an ad hoc heuristic scheme to estimate $\bm{x}_0$. However, with modern computers, there is no reason why we cannot estimate $\bm{x}_0$ along with $\bm\theta$, and the resulting forecasts are often substantially better when we do. We constrain the initial states $\bm{x}_0$ so that the seasonal indices add to zero for additive seasonality, and add to $m$ for multiplicative seasonality. There have been several suggestions for restricting the parameter space for $\alpha$, $\beta$ and $\gamma$. The traditional approach is to ensure that the various equations can be interpreted as weighted averages, thus requiring $\alpha$, $\beta^*=\beta/\alpha$, $\gamma^*=\gamma/(1-\alpha)$ and $\phi$ to all lie within $(0,1)$. This suggests $$0<\alpha<1,\qquad 0<\beta<\alpha,\qquad 0<\gamma < 1-\alpha,\qquad\mbox{and}\qquad 0<\phi<1. $$ However, @HAA08 show that these restrictions are usually stricter than necessary (although in a few cases they are not restrictive enough). ## Model selection Forecast accuracy measures such as mean squared error (MSE) can be used for selecting a model for a given set of data, provided the errors are computed from data in a hold-out set and not from the same data as were used for model estimation. However, there are often too few out-of-sample errors to draw reliable conclusions. Consequently, a penalized method based on the in-sample fit is usually better. One such approach uses a penalized likelihood such as Akaike's Information Criterion: $$\mbox{AIC} = L^*(\hat{\bm\theta},\hat{\bm{x}}_0) + 2q, $$ where $q$ is the number of parameters in $\bm\theta$ plus the number of free states in $\bm{x}_0$, and $\hat{\bm\theta}$ and $\hat{\bm{x}}_0$ denote the estimates of $\bm\theta$ and $\bm{x}_0$. We select the model that minimizes the AIC amongst all of the models that are appropriate for the data. The AIC also provides a method for selecting between the additive and multiplicative error models. The point forecasts from the two models are identical so that standard forecast accuracy measures such as the MSE or mean absolute percentage error (MAPE) are unable to select between the error types. The AIC is able to select between the error types because it is based on likelihood rather than one-step forecasts. Obviously, other model selection criteria (such as the BIC) could also be used in a similar manner. ## Automatic forecasting {#sec:algorithm} We combine the preceding ideas to obtain a robust and widely applicable automatic forecasting algorithm. The steps involved are summarized below. \begin{compactenum} \item For each series, apply all models that are appropriate, optimizing the parameters (both smoothing parameters and the initial state variable) of the model in each case. \item Select the best of the models according to the AIC. \item Produce point forecasts using the best model (with optimized parameters) for as many steps ahead as required. \item Obtain prediction intervals for the best model either using the analytical results of Hyndman, Koehler, et al. (2005), or by simulating future sample paths for $\{y_{n+1},\dots,y_{n+h}\}$ and finding the $\alpha/2$ and $1-\alpha/2$ percentiles of the simulated data at each forecasting horizon. If simulation is used, the sample paths may be generated using the normal distribution for errors (parametric bootstrap) or using the resampled errors (ordinary bootstrap). \end{compactenum} @HKSG02 applied this automatic forecasting strategy to the M-competition data [@Mcomp82] and the IJF-M3 competition data [@M3comp00] using a restricted set of exponential smoothing models, and demonstrated that the methodology is particularly good at short term forecasts (up to about 6 periods ahead), and especially for seasonal short-term series (beating all other methods in the competitions for these series). # ARIMA models {#sec:arima} A common obstacle for many people in using Autoregressive Integrated Moving Average (ARIMA) models for forecasting is that the order selection process is usually considered subjective and difficult to apply. But it does not have to be. There have been several attempts to automate ARIMA modelling in the last 25 years. @HR82 proposed a method to identify the order of an ARMA model for a stationary series. In their method the innovations can be obtained by fitting a long autoregressive model to the data, and then the likelihood of potential models is computed via a series of standard regressions. They established the asymptotic properties of the procedure under very general conditions. @Gomez98 extended the Hannan-Rissanen identification method to include multiplicative seasonal ARIMA model identification. @TRAMOSEATS98 implemented this automatic identification procedure in the software \pkg{TRAMO} and \pkg{SEATS}. For a given series, the algorithm attempts to find the model with the minimum BIC. @Liu89 proposed a method for identification of seasonal ARIMA models using a filtering method and certain heuristic rules; this algorithm is used in the \pkg{SCA-Expert} software. Another approach is described by @MP00a whose algorithm for univariate ARIMA models also allows intervention analysis. It is implemented in the software package ``Time Series Expert'' (\pkg{TSE-AX}). Other algorithms are in use in commercial software, although they are not documented in the public domain literature. In particular, \pkg{Forecast Pro} [@ForecastPro00] is well-known for its excellent automatic ARIMA algorithm which was used in the M3-forecasting competition [@M3comp00]. Another proprietary algorithm is implemented in \pkg{Autobox} [@Reilly00]. @OL96 provide an early review of some of the commercial software that implement automatic ARIMA forecasting. ## Choosing the model order using unit root tests and the AIC A non-seasonal ARIMA($p,d,q$) process is given by $$ \phi(B)(1-B^d)y_{t} = c + \theta(B)\varepsilon_t $$ where $\{\varepsilon_t\}$ is a white noise process with mean zero and variance $\sigma^2$, $B$ is the backshift operator, and $\phi(z)$ and $\theta(z)$ are polynomials of order $p$ and $q$ respectively. To ensure causality and invertibility, it is assumed that $\phi(z)$ and $\theta(z)$ have no roots for $|z|<1$ [@BDbook91]. If $c\ne0$, there is an implied polynomial of order $d$ in the forecast function. The seasonal ARIMA$(p,d,q)(P,D,Q)_m$ process is given by $$ \Phi(B^m)\phi(B)(1-B^{m})^D(1-B)^dy_{t} = c + \Theta(B^m)\theta(B)\varepsilon_t $$ where $\Phi(z)$ and $\Theta(z)$ are polynomials of orders $P$ and $Q$ respectively, each containing no roots inside the unit circle. If $c\ne0$, there is an implied polynomial of order $d+D$ in the forecast function. The main task in automatic ARIMA forecasting is selecting an appropriate model order, that is the values $p$, $q$, $P$, $Q$, $D$, $d$. If $d$ and $D$ are known, we can select the orders $p$, $q$, $P$ and $Q$ via an information criterion such as the AIC: $$\mbox{AIC} = -2\log(L) + 2(p+q+P+Q+k)$$ where $k=1$ if $c\ne0$ and 0 otherwise, and $L$ is the maximized likelihood of the model fitted to the \emph{differenced} data $(1-B^m)^D(1-B)^dy_t$. The likelihood of the full model for $y_t$ is not actually defined and so the value of the AIC for different levels of differencing are not comparable. One solution to this difficulty is the ``diffuse prior'' approach which is outlined in @DKbook01 and implemented in the \code{arima()} function [@Ripley:2002] in \R. In this approach, the initial values of the time series (before the observed values) are assumed to have mean zero and a large variance. However, choosing $d$ and $D$ by minimizing the AIC using this approach tends to lead to over-differencing. For forecasting purposes, we believe it is better to make as few differences as possible because over-differencing harms forecasts [@SY94] and widens prediction intervals. [Although, see @Hendry97 for a contrary view.] Consequently, we need some other approach to choose $d$ and $D$. We prefer unit-root tests. However, most unit-root tests are based on a null hypothesis that a unit root exists which biases results towards more differences rather than fewer differences. For example, variations on the Dickey-Fuller test [@DF81] all assume there is a unit root at lag 1, and the HEGY test of @HEGY90 is based on a null hypothesis that there is a seasonal unit root. Instead, we prefer unit-root tests based on a null hypothesis of no unit-root. For non-seasonal data, we consider ARIMA($p,d,q$) models where $d$ is selected based on successive KPSS unit-root tests [@KPSS92]. That is, we test the data for a unit root; if the test result is significant, we test the differenced data for a unit root; and so on. We stop this procedure when we obtain our first insignificant result. For seasonal data, we consider ARIMA$(p,d,q)(P,D,Q)_m$ models where $m$ is the seasonal frequency and $D=0$ or $D=1$ depending on an extended Canova-Hansen test [@CH95]. Canova and Hansen only provide critical values for $21$. Let $C_m$ be the critical value for seasonal period $m$. We plotted $C_m$ against $m$ for values of $m$ up to 365 and noted that they fit the line $C_m = 0.269 m^{0.928}$ almost exactly. So for $m>12$, we use this simple expression to obtain the critical value. We note in passing that the null hypothesis for the Canova-Hansen test is not an ARIMA model as it includes seasonal dummy terms. It is a test for whether the seasonal pattern changes sufficiently over time to warrant a seasonal unit root, or whether a stable seasonal pattern modelled using fixed dummy variables is more appropriate. Nevertheless, we have found that the test is still useful for choosing $D$ in a strictly ARIMA framework (i.e., without seasonal dummy variables). If a stable seasonal pattern is selected (i.e., the null hypothesis is not rejected), the seasonality is effectively handled by stationary seasonal AR and MA terms. After $D$ is selected, we choose $d$ by applying successive KPSS unit-root tests to the seasonally differenced data (if $D=1$) or the original data (if $D=0$). Once $d$ (and possibly $D$) are selected, we proceed to select the values of $p$, $q$, $P$ and $Q$ by minimizing the AIC. We allow $c\ne0$ for models where $d+D < 2$. ## A step-wise procedure for traversing the model space Suppose we have seasonal data and we consider ARIMA$(p,d,q)(P,D,Q)_m$ models where $p$ and $q$ can take values from 0 to 3, and $P$ and $Q$ can take values from 0 to 1. When $c=0$ there is a total of 288 possible models, and when $c\ne 0$ there is a total of 192 possible models, giving 480 models altogether. If the values of $p$, $d$, $q$, $P$, $D$ and $Q$ are allowed to range more widely, the number of possible models increases rapidly. Consequently, it is often not feasible to simply fit every potential model and choose the one with the lowest AIC. Instead, we need a way of traversing the space of models efficiently in order to arrive at the model with the lowest AIC value. We propose a step-wise algorithm as follows. \begin{description} \item[Step 1:] We try four possible models to start with. \begin{itemize} \item ARIMA($2,d,2$) if $m=1$ and ARIMA($2,d,2)(1,D,1)$ if $m>1$. \item ARIMA($0,d,0$) if $m=1$ and ARIMA($0,d,0)(0,D,0)$ if $m>1$. \item ARIMA($1,d,0$) if $m=1$ and ARIMA($1,d,0)(1,D,0)$ if $m>1$. \item ARIMA($0,d,1$) if $m=1$ and ARIMA($0,d,1)(0,D,1)$ if $m>1$. \end{itemize} If $d+D \le 1$, these models are fitted with $c\ne0$. Otherwise, we set $c=0$. Of these four models, we select the one with the smallest AIC value. This is called the ``current'' model and is denoted by ARIMA($p,d,q$) if $m=1$ or ARIMA($p,d,q)(P,D,Q)_m$ if $m>1$. \item[Step 2:] We consider up to seventeen variations on the current model: \begin{itemize} \item where one of $p$, $q$, $P$ and $Q$ is allowed to vary by $\pm1$ from the current model; \item where $p$ and $q$ both vary by $\pm1$ from the current model; \item where $P$ and $Q$ both vary by $\pm1$ from the current model; \item where the constant $c$ is included if the current model has $c=0$ or excluded if the current model has $c\ne0$. \end{itemize} Whenever a model with lower AIC is found, it becomes the new ``current'' model and the procedure is repeated. This process finishes when we cannot find a model close to the current model with lower AIC. \end{description} There are several constraints on the fitted models to avoid problems with convergence or near unit-roots. The constraints are outlined below. \begin{compactitem}\itemsep=8pt \item The values of $p$ and $q$ are not allowed to exceed specified upper bounds (with default values of 5 in each case). \item The values of $P$ and $Q$ are not allowed to exceed specified upper bounds (with default values of 2 in each case). \item We reject any model which is ``close'' to non-invertible or non-causal. Specifically, we compute the roots of $\phi(B)\Phi(B)$ and $\theta(B)\Theta(B)$. If either have a root that is smaller than 1.001 in absolute value, the model is rejected. \item If there are any errors arising in the non-linear optimization routine used for estimation, the model is rejected. The rationale here is that any model that is difficult to fit is probably not a good model for the data. \end{compactitem} The algorithm is guaranteed to return a valid model because the model space is finite and at least one of the starting models will be accepted (the model with no AR or MA parameters). The selected model is used to produce forecasts. ## Comparisons with exponential smoothing There is a widespread myth that ARIMA models are more general than exponential smoothing. This is not true. The two classes of models overlap. The linear exponential smoothing models are all special cases of ARIMA models---the equivalences are discussed in @HAA08. However, the non-linear exponential smoothing models have no equivalent ARIMA counterpart. On the other hand, there are many ARIMA models which have no exponential smoothing counterpart. Thus, the two model classes overlap and are complimentary; each has its strengths and weaknesses. The exponential smoothing state space models are all non-stationary. Models with seasonality or non-damped trend (or both) have two unit roots; all other models---that is, non-seasonal models with either no trend or damped trend---have one unit root. It is possible to define a stationary model with similar characteristics to exponential smoothing, but this is not normally done. The philosophy of exponential smoothing is that the world is non-stationary. So if a stationary model is required, ARIMA models are better. One advantage of the exponential smoothing models is that they can be non-linear. So time series that exhibit non-linear characteristics including heteroscedasticity may be better modelled using exponential smoothing state space models. For seasonal data, there are many more ARIMA models than the 30 possible models in the exponential smoothing class of Section \ref{sec:expsmooth}. It may be thought that the larger model class is advantageous. However, the results in @HKSG02 show that the exponential smoothing models performed better than the ARIMA models for the seasonal M3 competition data. (For the annual M3 data, the ARIMA models performed better.) In a discussion of these results, @Hyndman01 speculates that the larger model space of ARIMA models actually harms forecasting performance because it introduces additional uncertainty. The smaller exponential smoothing class is sufficiently rich to capture the dynamics of almost all real business and economic time series. # The forecast package {#sec:package} The algorithms and modelling frameworks for automatic univariate time series forecasting are implemented in the \pkg{forecast} package in \R. We illustrate the methods using the following four real time series shown in Figure \ref{fig:etsexamples}. \begin{compactitem} \item Figure \ref{fig:etsexamples}(a) shows 125 monthly US government bond yields (percent per annum) from January 1994 to May 2004. \item Figure \ref{fig:etsexamples}(b) displays 55 observations of annual US net electricity generation (billion kwh) for 1949 through 2003. \item Figure \ref{fig:etsexamples}(c) presents 113 quarterly observations of passenger motor vehicle production in the U.K. (thousands of cars) for the first quarter of 1977 through the first quarter of 2005. \item Figure \ref{fig:etsexamples}(d) shows 240 monthly observations of the number of short term overseas visitors to Australia from May 1985 to April 2005. \end{compactitem} ```{r etsexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using exponential smoothing state space models."} par(mfrow = c(2,2)) mod1 <- ets(bonds) mod2 <- ets(usnetelec) mod3 <- ets(ukcars) mod4 <- ets(visitors) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ``` ```{r etsnames, echo=FALSE} etsnames <- c(mod1$method, mod2$method, mod3$method, mod4$method) etsnames <- gsub("Ad","A\\\\damped",etsnames) ``` ## Implementation of the automatic exponential smoothing algorithm The innovations state space modelling framework described in Section \ref{sec:expsmooth} is implemented via the \code{ets()} function in the \pkg{forecast} package. (The default settings of \code{ets()} do not allow models with multiplicative trend, but they can be included using \code{allow.multiplicative.trend=TRUE}.) The models chosen via the algorithm for the four data sets were: \begin{compactitem} \item `r etsnames[1]` for monthly US 10-year bonds yield\\ ($\alpha=`r format(coef(mod1)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod1)['beta'], digits=4, nsmall=4)`$, $\phi=`r format(coef(mod1)['phi'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod1)['l'], digits=4, nsmall=4)`$, $b_0=`r format(coef(mod1)['b'], digits=4, nsmall=4)`$); \item `r etsnames[2]` for annual US net electricity generation\\ ($\alpha=`r format(coef(mod2)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod2)['beta'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod2)['l'], digits=4, nsmall=4)`$, $b_0=`r format(coef(mod2)['b'], digits=4, nsmall=4)`$); \item `r etsnames[3]` for quarterly UK motor vehicle production\\ ($\alpha=`r format(coef(mod3)['alpha'], digits=4, nsmall=4)`$, $\gamma=`r format(coef(mod3)['gamma'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod3)['l'], digits=4, nsmall=4)`$, $s_{-3}=`r format(-sum(coef(mod3)[c('s0','s1','s2')]), digits=4, nsmall=4)`$, $s_{-2}=`r format(coef(mod3)['s2'], digits=4, nsmall=4)`$, $s_{-1}=`r format(coef(mod3)['s1'], digits=4, nsmall=4)`$, $s_0=`r format(coef(mod3)['s0'], digits=4, nsmall=4)`$); \item `r etsnames[4]` for monthly Australian overseas visitors\\ ($\alpha=`r format(coef(mod4)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod4)['beta'], digits=2, nsmall=4)`$, $\gamma=`r format(coef(mod4)['gamma'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod4)['l'], digits=4, nsmall=4)`$, $b_0 = `r format(coef(mod4)['b'], digits=4, nsmall=4)`$, $s_{-11}=`r format(12-sum(tail(coef(mod4),11)), digits=4, nsmall=4)`$, $s_{-10}=`r format(coef(mod4)['s10'], digits=4, nsmall=4)`$, $s_{-9}=`r format(coef(mod4)['s9'], digits=4, nsmall=4)`$, $s_{-8}=`r format(coef(mod4)['s8'], digits=4, nsmall=4)`$, $s_{-7}=`r format(coef(mod4)['s7'], digits=4, nsmall=4)`$, $s_{-6}=`r format(coef(mod4)['s6'], digits=4, nsmall=4)`$, $s_{-5}=`r format(coef(mod4)['s5'], digits=4, nsmall=4)`$, $s_{-4}=`r format(coef(mod4)['s4'], digits=4, nsmall=4)`$, $s_{-3}=`r format(coef(mod4)['s3'], digits=4, nsmall=4)`$, $s_{-2}=`r format(coef(mod4)['s2'], digits=4, nsmall=4)`$, $s_{-1}=`r format(coef(mod4)['s1'], digits=4, nsmall=4)`$, $s_0=`r format(coef(mod4)['s0'], digits=4, nsmall=4)`$). \end{compactitem} Although there is a lot of computation involved, it can be handled remarkably quickly on modern computers. Each of the forecasts shown in Figure \ref{fig:etsexamples} took no more than a few seconds on a standard PC. The US electricity generation series took the longest as there are no analytical prediction intervals available for the ETS(M,M\damped,N) model. Consequently, the prediction intervals for this series were computed using simulation of 5000 future sample paths. To apply the algorithm to the US net electricity generation time series \code{usnetelec}, we use the following command. ```{r ets-usnetelec, echo=TRUE} etsfit <- ets(usnetelec) ``` The object \code{etsfit} is of class ``\code{ets}'' and contains all of the necessary information about the fitted model including model parameters, the value of the state vector $\bm{x}_t$ for all $t$, residuals and so on. Printing the \code{etsfit} object shows the main items of interest. ```{r ets-usnetelec-print,echo=TRUE} etsfit ``` Some goodness-of-fit measures [defined in @HK06] are obtained using \code{accuracy()}. ```{r ets-usnetelec-accuracy,eval=TRUE,echo=TRUE} accuracy(etsfit) ``` There are also \code{coef()}, \code{plot()}, \code{summary()}, \code{residuals()}, \code{fitted()} and \code{simulate()} methods for objects of class ``\code{ets}''. The \code{plot()} function shows time plots of the original time series along with the extracted components (level, growth and seasonal). The \code{forecast()} function computes the required forecasts which are then plotted as in Figure \ref{fig:etsexamples}(b). ```{r ets-usnetelec-fcast, fig.height=5, fig.width=8, message=FALSE, warning=FALSE, include=FALSE, output=FALSE} fcast <- forecast(etsfit) plot(fcast) ``` Printing the \code{fcast} object gives a table showing the prediction intervals. ```{r ets-usnetelec-fcast-print,eval=TRUE,echo=TRUE} fcast ``` The \code{ets()} function also provides the useful feature of applying a fitted model to a new data set. For example, we could withhold 10 observations from the \code{usnetelec} data set when fitting, then compute the one-step forecast errors for the out-of-sample data. ```{r ets-usnetelec-newdata,eval=FALSE,echo=TRUE} fit <- ets(usnetelec[1:45]) test <- ets(usnetelec[46:55], model = fit) accuracy(test) ``` We can also look at the measures of forecast accuracy where the forecasts are based on only the fitting data. ```{r ets-usnetelec-fcast-accuracy,eval=FALSE,echo=TRUE} accuracy(forecast(fit,10), usnetelec[46:55]) ``` ## The HoltWinters() function There is another implementation of exponential smoothing in \R\ via the \code{HoltWinters()} function [@Meyer:2002] in the \pkg{stats} package. It implements only the (N,N), (A,N), (A,A) and (A,M) methods. The initial states $\bm{x}_0$ are fixed using a heuristic algorithm. Because of the way the initial states are estimated, a full three years of seasonal data are required to implement the seasonal forecasts using \code{HoltWinters()}. (See @shortseasonal for the minimal sample size required.) The smoothing parameters are optimized by minimizing the average squared prediction errors, which is equivalent to minimizing \eqref{likelihood} in the case of additive errors. There is a \code{predict()} method for the resulting object which can produce point forecasts and prediction intervals. Although it is nowhere documented, it appears that the prediction intervals produced by \code{predict()} for an object of class \code{HoltWinters} are based on an equivalent ARIMA model in the case of the (N,N), (A,N) and (A,A) methods, assuming additive errors. These prediction intervals are equivalent to the prediction intervals that arise from the (A,N,N), (A,A,N) and (A,A,A) state space models. For the (A,M) method, the prediction interval provided by \code{predict()} appears to be based on @CY91 which is an approximation to the true prediction interval arising from the (A,A,M) model. Prediction intervals with multiplicative errors are not possible using the \code{HoltWinters()} function. ## Implementation of the automatic ARIMA algorithm ```{r arimaexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using ARIMA models."} mod1 <- auto.arima(bonds, seasonal=FALSE, approximation=FALSE) mod2 <- auto.arima(usnetelec) mod3 <- auto.arima(ukcars) mod4 <- auto.arima(visitors) par(mfrow = c(2,2)) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ``` The algorithm of Section \ref{sec:arima} is applied to the same four time series. Unlike the exponential smoothing algorithm, the ARIMA class of models assumes homoscedasticity, which is not always appropriate. Consequently, transformations are sometimes necessary. For these four time series, we model the raw data for series (a)--(c), but the logged data for series (d). The prediction intervals are back-transformed with the point forecasts to preserve the probability coverage. To apply this algorithm to the US net electricity generation time series \code{usnetelec}, we use the following commands. ```{r arima-auto-fcast,eval=TRUE,echo=TRUE,fig.show="hide"} arimafit <- auto.arima(usnetelec) fcast <- forecast(arimafit) plot(fcast) ``` ```{r arimanames, echo=FALSE} # Convert character strings to latex arimanames <- c(as.character(mod1), as.character(mod2), as.character(mod3), as.character(mod4)) arimanames <- gsub("\\[([0-9]*)\\]", "$_{\\1}$", arimanames) ``` The function \code{auto.arima()} implements the algorithm of Section \ref{sec:arima} and returns an object of class \code{Arima}. The resulting forecasts are shown in Figure \ref{fig:arimaexamples}. The fitted models are as follows: \begin{compactitem} \item `r arimanames[1]` for monthly US 10-year bonds yield\\ ($\theta_1= `r format(coef(mod1)['ma1'], digits=4, nsmall=4)`$); \item `r arimanames[2]` for annual US net electricity generation\\ ($\phi_1= `r format(coef(mod2)['ar1'], digits=4, nsmall=4)`$; $\phi_2= `r format(coef(mod2)['ar2'], digits=4, nsmall=4)`$; $\theta_1= `r format(coef(mod2)['ma1'], digits=4, nsmall=4)`$; $\theta_2= `r format(coef(mod2)['ma2'], digits=4, nsmall=4)`$; $c= `r format(coef(mod2)['drift'], digits=4, nsmall=4)`$); \item `r arimanames[3]` for quarterly UK motor vehicle production\\ ($\phi_1= `r format(coef(mod3)['ar1'], digits=4, nsmall=4)`$; $\phi_2= `r format(coef(mod3)['ar2'], digits=4, nsmall=4)`$; $\Phi_1= `r format(coef(mod3)['sar1'], digits=4, nsmall=4)`$; $\Phi_2= `r format(coef(mod3)['sar2'], digits=4, nsmall=4)`$); \item `r arimanames[4]` for monthly Australian overseas visitors\\ ($\phi_1= `r format(coef(mod4)['ar1'], digits=4, nsmall=4)`$; $\theta_1= `r format(coef(mod4)['ma1'], digits=4, nsmall=4)`$; $\Theta_1= `r format(coef(mod4)['sma1'], digits=4, nsmall=4)`$; $\Theta_2= `r format(coef(mod4)['sma2'], digits=4, nsmall=4)`$; $c= `r format(coef(mod4)['drift'], digits=4, nsmall=4)`$). \end{compactitem} Note that the \R\ parameterization has $\theta(B) = (1 + \theta_1B + \dots + \theta_qB)$ and $\phi(B) = (1 - \phi_1B + \dots - \phi_qB)$, and similarly for the seasonal terms. A summary of the forecasts is available, part of which is shown below. ``` Forecast method: ARIMA(2,1,2) with drift Series: usnetelec Coefficients: ar1 ar2 ma1 ma2 drift -1.3032 -0.4332 1.5284 0.8340 66.1585 s.e. 0.2122 0.2084 0.1417 0.1185 7.5595 sigma^2 estimated as 2262: log likelihood=-283.34 AIC=578.67 AICc=580.46 BIC=590.61 Error measures: ME RMSE MAE MPE MAPE MASE ACF1 Training set 0.046402 44.894 32.333 -0.61771 2.1012 0.45813 0.022492 Forecasts: Point Forecast Lo 80 Hi 80 Lo 95 Hi 95 2004 3968.957 3908.002 4029.912 3875.734 4062.180 2005 3970.350 3873.950 4066.751 3822.919 4117.782 2006 4097.171 3971.114 4223.228 3904.383 4289.959 2007 4112.332 3969.691 4254.973 3894.182 4330.482 2008 4218.671 4053.751 4383.591 3966.448 4470.894 2009 4254.559 4076.108 4433.010 3981.641 4527.476 2010 4342.760 4147.088 4538.431 4043.505 4642.014 2011 4393.306 4185.211 4601.401 4075.052 4711.560 2012 4470.261 4248.068 4692.455 4130.446 4810.077 2013 4529.113 4295.305 4762.920 4171.535 4886.690 ``` The training set error measures for the two models are very similar. Note that the information criteria are not comparable. The \pkg{forecast} package also contains the function \code{Arima()} which is largely a wrapper to the \code{arima()} function in the \pkg{stats} package. The \code{Arima()} function in the \pkg{forecast} package makes it easier to include a drift term when $d+D=1$. (Setting \code{include.mean=TRUE} in the \code{arima()} function from the \pkg{stats} package will only work when $d+D=0$.) It also provides the facility for fitting an existing ARIMA model to a new data set (as was demonstrated for the \code{ets()} function earlier). One-step forecasts for ARIMA models are now available via a \code{fitted()} function. We also provide a new function \code{arima.errors()} which returns the original time series after adjusting for regression variables. If there are no regression variables in the ARIMA model, then the errors will be identical to the original series. If there are regression variables in the ARIMA model, then the errors will be equal to the original series minus the effect of the regression variables, but leaving in the serial correlation that is modelled with the AR and MA terms. In contrast, \code{residuals()} provides true residuals, removing the AR and MA terms as well. The generic functions \code{summary()}, \code{print()}, \code{fitted()} and \code{forecast()} apply to models obtained from either the \code{Arima()} or \code{arima()} functions. ## The forecast() function The \code{forecast()} function is generic and has S3 methods for a wide range of time series models. It computes point forecasts and prediction intervals from the time series model. Methods exist for models fitted using \code{ets()}, \code{auto.arima()}, \code{Arima()}, \code{arima()}, \code{ar()}, \code{HoltWinters()} and \texttt{StructTS()}. There is also a method for a \code{ts} object. If a time series object is passed as the first argument to \code{forecast()}, the function will produce forecasts based on the exponential smoothing algorithm of Section \ref{sec:expsmooth}. In most cases, there is an existing \code{predict()} function which is intended to do much the same thing. Unfortunately, the resulting objects from the \code{predict()} function contain different information in each case and so it is not possible to build generic functions (such as \code{plot()} and \code{summary()}) for the results. So, instead, \code{forecast()} acts as a wrapper to \code{predict()}, and packages the information obtained in a common format (the \code{forecast} class). We also define a default \code{predict()} method which is used when no existing \code{predict()} function exists, and calls the relevant \code{forecast()} function. Thus, \code{predict()} methods parallel \code{forecast()} methods, but the latter provide consistent output that is more usable. \subsection[The forecast class]{The \code{forecast} class} The output from the \code{forecast()} function is an object of class ``\code{forecast}'' and includes at least the following information: \begin{compactitem} \item the original series; \item point forecasts; \item prediction intervals of specified coverage; \item the forecasting method used and information about the fitted model; \item residuals from the fitted model; \item one-step forecasts from the fitted model for the period of the observed data. \end{compactitem} There are \code{print()}, \code{plot()} and \code{summary()} methods for the ``\code{forecast}'' class. Figures \ref{fig:etsexamples} and \ref{fig:arimaexamples} were produced using the \code{plot()} method. The prediction intervals are, by default, computed for 80\% and 95\% coverage, although other values are possible if requested. Fan charts [@Wallis99] are possible using the combination \verb|plot(forecast(model.object, fan = TRUE))|. ## Other functions {#sec:other} We now briefly describe some of the other features of the \pkg{forecast} package. Each of the following functions produces an object of class ``\code{forecast}''. \code{croston()} : implements the method of @Croston72 for intermittent demand forecasting. In this method, the time series is decomposed into two separate sequences: the non-zero values and the time intervals between non-zero values. These are then independently forecast using simple exponential smoothing and the forecasts of the original series are obtained as ratios of the two sets of forecasts. No prediction intervals are provided because there is no underlying stochastic model [@SH05]. \code{theta()} : provides forecasts from the Theta method [@AN00]. @HB03 showed that these were equivalent to a special case of simple exponential smoothing with drift. \code{splinef()} : gives cubic-spline forecasts, based on fitting a cubic spline to the historical data and extrapolating it linearly. The details of this method, and the associated prediction intervals, are discussed in @HKPB05. \code{meanf()} : returns forecasts based on the historical mean. \code{rwf()} : gives ``naïve'' forecasts equal to the most recent observation assuming a random walk model. This function also allows forecasting using a random walk with drift. In addition, there are some new plotting functions for time series. \code{tsdisplay()} : provides a time plot along with an ACF and PACF. \code{seasonplot()} : produces a seasonal plot as described in @MWH3. \newpage # Bibliography forecast/R/0000755000176200001440000000000014163720413012262 5ustar liggesusersforecast/R/residuals.R0000644000176200001440000001377114003673410014406 0ustar liggesusers#' Residuals for various time series models #' #' Returns time series of residuals from a fitted model. #' #' Innovation residuals correspond to the white noise process that drives the #' evolution of the time series model. Response residuals are the difference #' between the observations and the fitted values (equivalent to \code{h}-step #' forecasts). For functions with no \code{h} argument, \code{h=1}. For #' homoscedastic models, the innovation residuals and the response residuals #' for \code{h=1} are identical. Regression residuals are available for #' regression models with ARIMA errors, and are equal to the original data #' minus the effect of the regression variables. If there are no regression #' variables, the errors will be identical to the original series (possibly #' adjusted to have zero mean). \code{arima.errors} is a deprecated function #' which is identical to \code{residuals.Arima(object, type="regression")}. #' For \code{nnetar} objects, when \code{type="innovations"} and \code{lambda} is used, a #' matrix of time-series consisting of the residuals from each of the fitted neural networks is returned. #' #' @param object An object containing a time series model of class \code{ar}, #' \code{Arima}, \code{bats}, \code{ets}, \code{arfima}, \code{nnetar} or #' \code{stlm}. #' If \code{object} is of class \code{forecast}, then the function will return #' \code{object$residuals} if it exists, otherwise it returns the differences between #' the observations and their fitted values. #' @param type Type of residual. #' @param h If \code{type='response'}, then the fitted values are computed for #' \code{h}-step forecasts. #' @param ... Other arguments not used. #' @return A \code{ts} object. #' @author Rob J Hyndman #' @seealso \code{\link{fitted.Arima}}, \code{\link{checkresiduals}}. #' @keywords ts #' #' @export residuals.forecast <- function(object, type=c("innovation", "response"), ...) { type <- match.arg(type) if (type == "innovation") { object$residuals } else { getResponse(object) - fitted(object) } } #' @rdname residuals.forecast #' @export residuals.ar <- function(object, type=c("innovation", "response"), ...) { type <- match.arg(type) # innovation and response residuals are the same for AR models object$resid } #' @rdname residuals.forecast #' #' @aliases residuals.forecast_ARIMA #' @examples #' fit <- Arima(lynx,order=c(4,0,0), lambda=0.5) #' #' plot(residuals(fit)) #' plot(residuals(fit, type='response')) #' @export residuals.Arima <- function(object, type=c("innovation", "response", "regression"), h=1, ...) { type <- match.arg(type) if (type == "innovation") { object$residuals } else if (type == "response") { getResponse(object) - fitted(object, h = h) } else { x <- getResponse(object) if (!is.null(object$lambda)) { x <- BoxCox(x, object$lambda) } xreg <- getxreg(object) # Remove intercept if (is.element("intercept", names(object$coef))) { xreg <- cbind(rep(1, length(x)), xreg) } # Return errors if (is.null(xreg)) { return(x) } else { norder <- sum(object$arma[1:4]) return(ts( c(x - xreg %*% as.matrix(object$coef[(norder + 1):length(object$coef)])), frequency = frequency(x), start = start(x) )) } } } #' @export residuals.forecast_ARIMA <- residuals.Arima #' @rdname residuals.forecast #' @export residuals.bats <- function(object, type=c("innovation", "response"), h=1, ...) { type <- match.arg(type) if (type == "innovation") { object$errors } else { getResponse(object) - fitted(object, h = h) } } #' @rdname residuals.forecast #' @export residuals.tbats <- function(object, type=c("innovation", "response"), h=1, ...) { type <- match.arg(type) if (type == "innovation") { object$errors } else { getResponse(object) - fitted(object, h = h) } } #' @rdname residuals.forecast #' @export residuals.ets <- function(object, type=c("innovation", "response"), h=1, ...) { type <- match.arg(type) if (type == "innovation") { object$residuals } else { getResponse(object) - fitted(object, h = h) } } #' @rdname residuals.forecast #' @export residuals.ARFIMA <- function(object, type=c("innovation", "response"), ...) { type <- match.arg(type) if (type == "innovation") { if (!is.null(object$residuals)) { # Object produced by arfima() return(object$residuals) } else # Object produced by fracdiff() { if (is.element("x", names(object))) { x <- object$x } else { x <- eval.parent(parse(text = as.character(object$call)[2])) } if (!is.null(object$lambda)) { x <- BoxCox(x, object$lambda) } y <- fracdiff::diffseries(x - mean(x), d = object$d) fit <- arima(y, order = c(length(object$ar), 0, length(object$ma)), include.mean = FALSE, fixed = c(object$ar, -object$ma)) return(residuals(fit, type = "innovation")) } } else { getResponse(object) - fitted(object) } } #' @rdname residuals.forecast #' @export residuals.nnetar <- function(object, type=c("innovation", "response"), h=1, ...) { type <- match.arg(type) if (type == "innovation" && !is.null(object$lambda)) { res <- matrix(unlist(lapply(object$model, residuals)), ncol = length(object$model)) if (!is.null(object$scalex$scale)) { res <- res * object$scalex$scale } } else { res <- getResponse(object) - fitted(object, h = h) } tspx <- tsp(getResponse(object)) res <- ts(res, frequency = tspx[3L], end = tspx[2L]) return(res) } #' @rdname residuals.forecast #' @export residuals.stlm <- function(object, type=c("innovation", "response"), ...) { type <- match.arg(type) if (type == "innovation") { object$residuals } else { getResponse(object) - fitted(object) } } #' @rdname residuals.forecast #' @export residuals.tslm <- function(object, type=c("innovation", "response", "deviance"), ...) { type <- match.arg(type) if (type == "innovation" || type == "deviance") { object$residuals } else { getResponse(object) - fitted(object) } } forecast/R/adjustSeasonalSeeds.R0000644000176200001440000001231514003673410016350 0ustar liggesusers############################################################################### # TBATS code cutWTBATS <- function(use.beta, w.tilda.transpose, seasonal.periods, p=0, q=0) { mask.vector <- numeric(length(seasonal.periods)) i <- length(seasonal.periods) while (i > 1) { for (j in 1:(i - 1)) { if ((seasonal.periods[i] %% seasonal.periods[j]) == 0) { mask.vector[j] <- 1 } } i <- i - 1 } w.pos.counter <- 1 w.pos <- 1 if (use.beta) { w.pos <- w.pos + 1 } for (s in seasonal.periods) { if (mask.vector[w.pos.counter] == 1) { w.tilda.transpose <- w.tilda.transpose[, -((w.pos + 1):(w.pos + s))] } else if (mask.vector[w.pos.counter] < 0) { # Cut more than one off w.pos <- w.pos + s w.tilda.transpose <- w.tilda.transpose[, -c((w.pos + mask.vector[w.pos.counter] + 1):w.pos)] w.pos <- w.pos + mask.vector[w.pos.counter] } else { w.pos <- w.pos + s w.tilda.transpose <- w.tilda.transpose[, -w.pos] w.pos <- w.pos - 1 } w.pos.counter <- w.pos.counter + 1 } if ((p != 0) | (q != 0)) { end.cut <- ncol(w.tilda.transpose) start.cut <- end.cut - (p + q) + 1 w.tilda.transpose <- w.tilda.transpose[, -c(start.cut:end.cut)] } return(list(matrix = w.tilda.transpose, mask.vector = mask.vector)) } # BATS code below ######### cutW <- function(use.beta, w.tilda.transpose, seasonal.periods, p=0, q=0) { mask.vector <- numeric(length(seasonal.periods)) i <- length(seasonal.periods) while (i > 1) { for (j in 1:(i - 1)) { if ((seasonal.periods[i] %% seasonal.periods[j]) == 0) { mask.vector[j] <- 1 } } i <- i - 1 } if (length(seasonal.periods) > 1) { for (s in length(seasonal.periods):2) { for (j in (s - 1):1) { hcf <- findGCD(seasonal.periods[s], seasonal.periods[j]) if (hcf != 1) { if ((mask.vector[s] != 1) && (mask.vector[j] != 1)) { mask.vector[s] <- hcf * -1 } } } } } w.pos.counter <- 1 w.pos <- 1 if (use.beta) { w.pos <- w.pos + 1 } for (s in seasonal.periods) { if (mask.vector[w.pos.counter] == 1) { w.tilda.transpose <- w.tilda.transpose[, -((w.pos + 1):(w.pos + s))] } else if (mask.vector[w.pos.counter] < 0) { # Cut more than one off w.pos <- w.pos + s w.tilda.transpose <- w.tilda.transpose[, -c((w.pos + mask.vector[w.pos.counter] + 1):w.pos)] w.pos <- w.pos + mask.vector[w.pos.counter] } else { w.pos <- w.pos + s w.tilda.transpose <- w.tilda.transpose[, -w.pos] w.pos <- w.pos - 1 } w.pos.counter <- w.pos.counter + 1 } if ((p != 0) | (q != 0)) { end.cut <- ncol(w.tilda.transpose) start.cut <- end.cut - (p + q) + 1 w.tilda.transpose <- w.tilda.transpose[, -c(start.cut:end.cut)] } return(list(matrix = w.tilda.transpose, mask.vector = mask.vector)) } calcSeasonalSeeds <- function(use.beta, coefs, seasonal.periods, mask.vector, p=0, q=0) { x.pos.counter <- 1 sum.k <- 0 if (use.beta) { x.pos <- 2 new.x.nought <- matrix(coefs[1:2], nrow = 2, ncol = 1) } else { x.pos <- 1 new.x.nought <- matrix(coefs[1], nrow = 1, ncol = 1) } x.pos.counter <- 1 for (s in seasonal.periods) { if (mask.vector[x.pos.counter] == 1) { # Make a vector of zeros season <- matrix(0, nrow = s, ncol = 1) new.x.nought <- rbind(new.x.nought, season) } else if (mask.vector[x.pos.counter] < 0) { extract <- coefs[(x.pos + 1):(x.pos + s + mask.vector[x.pos.counter])] # print("extract:") # print(extract) # Find k k <- sum(extract) # update sum.k sum.k <- sum.k + k / s # create the current.periodicity vector current.periodicity <- extract - k / s current.periodicity <- matrix(current.periodicity, nrow = length(current.periodicity), ncol = 1) additional <- matrix(-k / s, nrow = (-1 * mask.vector[x.pos.counter]), ncol = 1) current.periodicity <- rbind(current.periodicity, additional) new.x.nought <- rbind(new.x.nought, current.periodicity) x.pos <- x.pos + s + mask.vector[x.pos.counter] } else { # Find k k <- sum(coefs[(x.pos + 1):(x.pos + s - 1)]) # update sum.k sum.k <- sum.k + k / s # create the current.periodicity vector current.periodicity <- coefs[(x.pos + 1):(x.pos + s - 1)] - k / s current.periodicity <- c(current.periodicity, -k / s) current.periodicity <- matrix(current.periodicity, nrow = length(current.periodicity), ncol = 1) new.x.nought <- rbind(new.x.nought, current.periodicity) x.pos <- x.pos + s - 1 } # Adjust L(t) x.pos.counter <- x.pos.counter + 1 } # print(new.x.nought) # Lastly, get the arma error seed states, if they exist. if ((p != 0) | (q != 0)) { arma.seed.states <- numeric((p + q)) arma.seed.states <- matrix(arma.seed.states, nrow = length(arma.seed.states), ncol = 1) # Final value of x.nought x.nought <- rbind(new.x.nought, arma.seed.states) } else { x.nought <- new.x.nought } return(x.nought) } findGCD <- function(larger, smaller) { remainder <- larger %% smaller if (remainder != 0) { return(findGCD(smaller, remainder)) } else { return(smaller) } } forecast/R/forecastBATS.R0000644000176200001440000001707514133712214014674 0ustar liggesusers#' Forecasting using BATS and TBATS models #' #' Forecasts \code{h} steps ahead with a BATS model. Prediction intervals are #' also produced. #' #' @param object An object of class "\code{bats}". Usually the result of a call #' to \code{\link{bats}}. #' @param h Number of periods for forecasting. Default value is twice the #' largest seasonal period (for seasonal data) or ten (for non-seasonal data). #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to \code{seq(51,99,by=3)}. This is suitable #' for fan plots. #' @param biasadj Use adjusted back-transformed mean for Box-Cox #' transformations. If TRUE, point forecasts and fitted values are mean #' forecast. Otherwise, these points can be considered the median of the #' forecast densities. #' @param ... Other arguments, currently ignored. #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.bats}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A copy of the \code{bats} object} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for #' prediction intervals} \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the time #' series used to create the model stored as \code{object}).} #' \item{residuals}{Residuals from the fitted model.} \item{fitted}{Fitted #' values (one-step forecasts)} #' @author Slava Razbash and Rob J Hyndman #' @seealso \code{\link{bats}}, \code{\link{tbats}},\code{\link{forecast.ets}}. #' @references De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), #' Forecasting time series with complex seasonal patterns using exponential #' smoothing, \emph{Journal of the American Statistical Association}, #' \bold{106}(496), 1513-1527. #' @keywords ts #' @examples #' #' \dontrun{ #' fit <- bats(USAccDeaths) #' plot(forecast(fit)) #' #' taylor.fit <- bats(taylor) #' plot(forecast(taylor.fit)) #' } #' #' @export forecast.bats <- function(object, h, level=c(80, 95), fan=FALSE, biasadj=NULL, ...) { # Set up the variables if (any(class(object$y) == "ts")) { ts.frequency <- frequency(object$y) } else { ts.frequency <- ifelse(!is.null(object$seasonal.periods), max(object$seasonal.periods), 1) } if (missing(h)) { if (is.null(object$seasonal.periods)) { h <- ifelse(ts.frequency == 1, 10, 2 * ts.frequency) } else { h <- 2 * max(object$seasonal.periods) } } else if (h <= 0) { stop("Forecast horizon out of bounds") } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Set up the matrices x <- matrix(0, nrow = nrow(object$x), ncol = h) y.forecast <- numeric(h) # w <- makeWMatrix(small.phi=object$damping.parameter, seasonal.periods=object$seasonal.periods, ar.coefs=object$ar.coefficients, ma.coefs=object$ma.coefficients) w <- .Call("makeBATSWMatrix", smallPhi_s = object$damping.parameter, sPeriods_s = object$seasonal.periods, arCoefs_s = object$ar.coefficients, maCoefs_s = object$ma.coefficients, PACKAGE = "forecast") # g <- makeGMatrix(alpha=object$alpha, beta=object$beta, gamma.vector=object$gamma.values, seasonal.periods=object$seasonal.periods, p=length(object$ar.coefficients), q=length(object$ma.coefficients)) g <- .Call("makeBATSGMatrix", object$alpha, object$beta, object$gamma.values, object$seasonal.periods, length(object$ar.coefficients), length(object$ma.coefficients), PACKAGE = "forecast") F <- makeFMatrix(alpha = object$alpha, beta = object$beta, small.phi = object$damping.parameter, seasonal.periods = object$seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = object$ar.coefficients, ma.coefs = object$ma.coefficients) # Do the forecast y.forecast[1] <- w$w.transpose %*% object$x[, ncol(object$x)] x[, 1] <- F %*% object$x[, ncol(object$x)] # + g$g %*% object$errors[length(object$errors)] if (h > 1) { for (t in 2:h) { x[, t] <- F %*% x[, (t - 1)] y.forecast[t] <- w$w.transpose %*% x[, (t - 1)] } } ## Make prediction intervals here lower.bounds <- upper.bounds <- matrix(NA, ncol = length(level), nrow = h) variance.multiplier <- numeric(h) variance.multiplier[1] <- 1 if (h > 1) { for (j in 1:(h - 1)) { if (j == 1) { f.running <- diag(ncol(F)) } else { f.running <- f.running %*% F } c.j <- w$w.transpose %*% f.running %*% g$g variance.multiplier[(j + 1)] <- variance.multiplier[j] + c.j ^ 2 } } variance <- object$variance * variance.multiplier # print(variance) st.dev <- sqrt(variance) for (i in 1:length(level)) { marg.error <- st.dev * abs(qnorm((100 - level[i]) / 200)) lower.bounds[, i] <- y.forecast - marg.error upper.bounds[, i] <- y.forecast + marg.error } # Inv Box Cox transform if required if (!is.null(object$lambda)) { y.forecast <- InvBoxCox(y.forecast, object$lambda, biasadj, list(level = level, upper = upper.bounds, lower = lower.bounds)) lower.bounds <- InvBoxCox(lower.bounds, object$lambda) if (object$lambda < 1) { lower.bounds <- pmax(lower.bounds, 0) } upper.bounds <- InvBoxCox(upper.bounds, object$lambda) } colnames(upper.bounds) <- colnames(lower.bounds) <- paste0(level, "%") forecast.object <- list( model = object, mean = future_msts(object$y, y.forecast), level = level, x = object$y, series = object$series, upper = future_msts(object$y, upper.bounds), lower = future_msts(object$y, lower.bounds), fitted = copy_msts(object$y, object$fitted.values), method = as.character(object), residuals = copy_msts(object$y, object$errors) ) if (is.null(object$series)) { forecast.object$series <- deparse(object$call$y) } class(forecast.object) <- "forecast" return(forecast.object) } #' @export as.character.bats <- function(x, ...) { name <- "BATS(" if (!is.null(x$lambda)) { name <- paste(name, round(x$lambda, digits = 3), sep = "") } else { name <- paste(name, "1", sep = "") } name <- paste(name, ", {", sep = "") if (!is.null(x$ar.coefficients)) { name <- paste(name, length(x$ar.coefficients), sep = "") } else { name <- paste(name, "0", sep = "") } name <- paste(name, ",", sep = "") if (!is.null(x$ma.coefficients)) { name <- paste(name, length(x$ma.coefficients), sep = "") } else { name <- paste(name, "0", sep = "") } name <- paste(name, "}, ", sep = "") if (!is.null(x$damping.parameter)) { name <- paste(name, round(x$damping.parameter, digits = 3), sep = "") } else { name <- paste(name, "-", sep = "") } name <- paste(name, ", ", sep = "") if (!is.null(x$seasonal.periods)) { name <- paste(name, "{", sep = "") for (i in x$seasonal.periods) { name <- paste(name, i, sep = "") if (i != x$seasonal.periods[length(x$seasonal.periods)]) { name <- paste(name, ",", sep = "") } else { name <- paste(name, "})", sep = "") } } } else { name <- paste(name, "-)", sep = "") } return(name) } forecast/R/seasadj.R0000644000176200001440000000312014003673410014010 0ustar liggesusers## Generic seasadj functions #' Seasonal adjustment #' #' Returns seasonally adjusted data constructed by removing the seasonal #' component. #' #' #' @param object Object created by \code{\link[stats]{decompose}}, #' \code{\link[stats]{stl}} or \code{\link{tbats}}. #' @param ... Other arguments not currently used. #' @return Univariate time series. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, #' \code{\link{tbats}}. #' @keywords ts #' @examples #' plot(AirPassengers) #' lines(seasadj(decompose(AirPassengers,"multiplicative")),col=4) #' #' @export seasadj <- function(object, ...) UseMethod("seasadj") #' @rdname seasadj #' @export seasadj.stl <- function(object, ...) { return(trendcycle(object) + remainder(object)) } #' @rdname seasadj #' @export seasadj.mstl <- function(object, ...) { return(trendcycle(object) + remainder(object)) } #' @rdname seasadj #' @export seasadj.decomposed.ts <- function(object, ...) { if (object$type == "additive") { return(object$x - object$seasonal) } else { return(object$x / object$seasonal) } } #' @rdname seasadj #' @export seasadj.tbats <- function(object, ...) { return(object$y - seasonal(object)) # comp <- tbats.components(object) # scols <- grep("season",colnames(comp)) # sa <- comp[,"observed"] - rowSums(comp[,scols,drop=FALSE]) # # Back transform if necessary # if (!is.null(object$lambda)) # sa <- InvBoxCox(sa, object$lambda) # return(sa) } #' @rdname seasadj #' @export seasadj.seas <- function(object, ...) { return(seasextract_w_na_action(object, "final")) } forecast/R/HoltWintersNew.R0000644000176200001440000004533714147003516015355 0ustar liggesusers# Modelled on the HoltWinters() function but with more conventional # initialization. # Written by Zhenyu Zhou. 21 October 2012 HoltWintersZZ <- function(x, # smoothing parameters alpha = NULL, # level beta = NULL, # trend gamma = NULL, # seasonal component seasonal = c("additive", "multiplicative"), exponential = FALSE, # exponential phi = NULL, # damp lambda = NULL, # box-cox biasadj = FALSE, # adjusted back-transformed mean for box-cox warnings = TRUE # return optimization warnings ) { x <- as.ts(x) origx <- x seasonal <- match.arg(seasonal) m <- frequency(x) lenx <- length(x) if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } if (is.null(phi) || !is.numeric(phi)) { phi <- 1 } if (!is.null(alpha) && !is.numeric(alpha)) { stop("cannot fit models without level ('alpha' must not be 0 or FALSE).") } if (!all(is.null(c(alpha, beta, gamma))) && any(c(alpha, beta, gamma) < 0 | c(alpha, beta, gamma) > 1)) { stop("'alpha', 'beta' and 'gamma' must be within the unit interval.") } if ((is.null(gamma) || gamma > 0)) { if (seasonal == "multiplicative" && any(x <= 0)) { stop("data must be positive for multiplicative Holt-Winters.") } } if (m <= 1) { gamma <- FALSE } ## initialise l0, b0, s0 if (!is.null(gamma) && is.logical(gamma) && !gamma) { seasonal <- "none" l.start <- x[1L] s.start <- 0 if (is.null(beta) || !is.logical(beta) || beta) { if (!exponential) { b.start <- x[2L] - x[1L] } else { b.start <- x[2L] / x[1L] } } } else { ## seasonal Holt-Winters l.start <- mean(x[1:m]) b.start <- (mean(x[m + (1:m)]) - l.start) / m if (seasonal == "additive") { s.start <- x[1:m] - l.start } else { s.start <- x[1:m] / l.start } } # initialise smoothing parameters # lower=c(rep(0.0001,3), 0.8) # upper=c(rep(0.9999,3),0.98) lower <- c(0, 0, 0, 0) upper <- c(1, 1, 1, 1) if (!is.null(beta) && is.logical(beta) && !beta) { trendtype <- "N" } else if (exponential) { trendtype <- "M" } else { trendtype <- "A" } if (seasonal == "none") { seasontype <- "N" } else if (seasonal == "multiplicative") { seasontype <- "M" } else { seasontype <- "A" } ## initialise smoothing parameter optim.start <- initparam( alpha = alpha, beta = beta, gamma = gamma, phi = 1, trendtype = trendtype, seasontype = seasontype, damped = FALSE, lower = lower, upper = upper, m = m ) # if(!is.na(optim.start["alpha"])) # alpha2 <- optim.start["alpha"] # else # alpha2 <- alpha # if(!is.na(optim.start["beta"])) # beta2 <- optim.start["beta"] # else # beta2 <- beta # if(!is.na(optim.start["gamma"])) # gamma2 <- optim.start["gamma"] # else # gamma2 <- gamma # if(!check.param(alpha = alpha2,beta = beta2, gamma = gamma2,phi=1,lower,upper,bounds="haha",m=m)) # { # print(paste("alpha=", alpha2, "beta=",beta2, "gamma=",gamma2)) # stop("Parameters out of range") # } ################################################################################### # optimisation: alpha, beta, gamma, if any of them is null, then optimise them error <- function(p, select) { if (select[1] > 0) { alpha <- p[1L] } if (select[2] > 0) { beta <- p[1L + select[1]] } if (select[3] > 0) { gamma <- p[1L + select[1] + select[2]] } zzhw( x, lenx = lenx, alpha = alpha, beta = beta, gamma = gamma, seasonal = seasonal, m = m, dotrend = (!is.logical(beta) || beta), doseasonal = (!is.logical(gamma) || gamma), exponential = exponential, phi = phi, l.start = l.start, b.start = b.start, s.start = s.start )$SSE } select <- as.numeric(c(is.null(alpha), is.null(beta), is.null(gamma))) if (sum(select) > 0) # There are parameters to optimize { sol <- optim(optim.start, error, method = "L-BFGS-B", lower = lower[select], upper = upper[select], select = select) if (sol$convergence || any(sol$par < 0 | sol$par > 1)) { if (sol$convergence > 50) { if (warnings) { warning(gettextf("optimization difficulties: %s", sol$message), domain = NA) } } else { stop("optimization failure") } } if (select[1] > 0) { alpha <- sol$par[1L] } if (select[2] > 0) { beta <- sol$par[1L + select[1]] } if (select[3] > 0) { gamma <- sol$par[1L + select[1] + select[2]] } } final.fit <- zzhw( x, lenx = lenx, alpha = alpha, beta = beta, gamma = gamma, seasonal = seasonal, m = m, dotrend = (!is.logical(beta) || beta), doseasonal = (!is.logical(gamma) || gamma), exponential = exponential, phi = phi, l.start = l.start, b.start = b.start, s.start = s.start ) tspx <- tsp(x) fitted <- ts(final.fit$fitted, frequency = m, start = tspx[1]) res <- ts(final.fit$residuals, frequency = m, start = tspx[1]) if (!is.null(lambda)) { fitted <- InvBoxCox(fitted, lambda, biasadj, var(final.fit$residuals)) attr(lambda, "biasadj") <- biasadj } states <- matrix(final.fit$level, ncol = 1) colnames(states) <- "l" if (trendtype != "N") { states <- cbind(states, b = final.fit$trend) } if (seasontype != "N") { nr <- nrow(states) nc <- ncol(states) for (i in 1:m) states <- cbind(states, final.fit$season[(m - i) + (1:nr)]) colnames(states)[nc + (1:m)] <- paste("s", 1:m, sep = "") } states <- ts(states, frequency = m, start = tspx[1] - 1 / m) # Package output as HoltWinters class # structure(list(fitted = fitted, # x = x, # alpha = alpha, # beta = beta, # gamma = gamma, # coefficients = c(a = final.fit$level[lenx], # b = if (!is.logical(beta) || beta) final.fit$trend[lenx], # s = if (!is.logical(gamma) || gamma) final.fit$season[lenx - m + 1L:m]), # seasonal = seasonal, # exponential = exponential, # SSE = final.fit$SSE, # call = match.call(), # level = final.fit$level, # trend = final.fit$trend, # season = final.fit$season, # phi = phi # ), # class = "HoltWinters" # ) # Package output as ets class damped <- (phi < 1.0) if (seasonal == "additive") { # This should not happen components <- c("A", trendtype, seasontype, damped) } else if (seasonal == "multiplicative") { components <- c("M", trendtype, seasontype, damped) } else if (seasonal == "none" && exponential) { components <- c("M", trendtype, seasontype, damped) } else { # if(seasonal=="none" & !exponential) components <- c("A", trendtype, seasontype, damped) } initstate <- states[1, ] param <- alpha names(param) <- "alpha" if (trendtype != "N") { param <- c(param, beta = beta) names(param)[length(param)] <- "beta" } if (seasontype != "N") { param <- c(param, gamma = gamma) names(param)[length(param)] <- "gamma" } if (damped) { param <- c(param, phi = phi) names(param)[length(param)] <- "phi" } if (components[1] == "A") { sigma2 <- mean(res ^ 2) } else { sigma2 <- mean((res / fitted) ^ 2) } structure( list( fitted = fitted, residuals = res, components = components, x = origx, par = c(param, initstate), initstate = initstate, states = states, SSE = final.fit$SSE, sigma2 = sigma2, call = match.call(), m = m, lambda = lambda ), class = "ets" ) } ################################################################################### # filter function zzhw <- function(x, lenx, alpha=NULL, beta=NULL, gamma=NULL, seasonal="additive", m, dotrend=FALSE, doseasonal=FALSE, l.start=NULL, exponential = NULL, phi=NULL, b.start=NULL, s.start=NULL) { if (exponential != TRUE || is.null(exponential)) { exponential <- FALSE } if (is.null(phi) || !is.numeric(phi)) { phi <- 1 } # initialise array of l, b, s level <- trend <- season <- xfit <- residuals <- numeric(lenx) SSE <- 0 if (!dotrend) { beta <- 0 b.start <- 0 } if (!doseasonal) { gamma <- 0 s.start[1:length(s.start)] <- ifelse(seasonal == "additive", 0, 1) } lastlevel <- level0 <- l.start lasttrend <- trend0 <- b.start season0 <- s.start for (i in 1:lenx) { # definel l(t-1) if (i > 1) { lastlevel <- level[i - 1] } # define b(t-1) if (i > 1) { lasttrend <- trend[i - 1] } # define s(t-m) if (i > m) { lastseason <- season[i - m] } else { lastseason <- season0[i] } if (is.na(lastseason)) { lastseason <- ifelse(seasonal == "additive", 0, 1) } # stop((lastlevel + phi*lasttrend)*lastseason) # forecast for this period i if (seasonal == "additive") { if (!exponential) { xhat <- lastlevel + phi * lasttrend + lastseason } else { xhat <- lastlevel * lasttrend ^ phi + lastseason } } else { if (!exponential) { xhat <- (lastlevel + phi * lasttrend) * lastseason } else { xhat <- lastlevel * lasttrend ^ phi * lastseason } } xfit[i] <- xhat res <- x[i] - xhat residuals[i] <- res SSE <- SSE + res * res # calculate level[i] if (seasonal == "additive") { if (!exponential) { level[i] <- alpha * (x[i] - lastseason) + (1 - alpha) * (lastlevel + phi * lasttrend) } else { level[i] <- alpha * (x[i] - lastseason) + (1 - alpha) * (lastlevel * lasttrend ^ phi) } } else { if (!exponential) { level[i] <- alpha * (x[i] / lastseason) + (1 - alpha) * (lastlevel + phi * lasttrend) } else { level[i] <- alpha * (x[i] / lastseason) + (1 - alpha) * (lastlevel * lasttrend ^ phi) } } # calculate trend[i] if (!exponential) { trend[i] <- beta * (level[i] - lastlevel) + (1 - beta) * phi * lasttrend } else { trend[i] <- beta * (level[i] / lastlevel) + (1 - beta) * lasttrend ^ phi } # calculate season[i] if (seasonal == "additive") { if (!exponential) { season[i] <- gamma * (x[i] - lastlevel - phi * lasttrend) + (1 - gamma) * lastseason } else { season[i] <- gamma * (x[i] - lastlevel * lasttrend ^ phi) + (1 - gamma) * lastseason } } else { if (!exponential) { season[i] <- gamma * (x[i] / (lastlevel + phi * lasttrend)) + (1 - gamma) * lastseason } else { season[i] <- gamma * (x[i] / (lastlevel * lasttrend ^ phi)) + (1 - gamma) * lastseason } } } list( SSE = SSE, fitted = xfit, residuals = residuals, level = c(level0, level), trend = c(trend0, trend), season = c(season0, season), phi = phi ) } #' Exponential smoothing forecasts #' #' Returns forecasts and other information for exponential smoothing forecasts #' applied to \code{y}. #' #' ses, holt and hw are simply convenient wrapper functions for #' \code{forecast(ets(...))}. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting. #' @param damped If TRUE, use a damped trend. #' @param seasonal Type of seasonality in \code{hw} model. "additive" or #' "multiplicative" #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param initial Method used for selecting initial state values. If #' \code{optimal}, the initial values are optimized along with the smoothing #' parameters using \code{\link{ets}}. If \code{simple}, the initial values are #' set to values obtained using simple calculations on the first few #' observations. See Hyndman & Athanasopoulos (2014) for details. #' @param exponential If TRUE, an exponential trend is fitted. Otherwise, the #' trend is (locally) linear. #' @param alpha Value of smoothing parameter for the level. If \code{NULL}, it #' will be estimated. #' @param beta Value of smoothing parameter for the trend. If \code{NULL}, it #' will be estimated. #' @param gamma Value of smoothing parameter for the seasonal component. If #' \code{NULL}, it will be estimated. #' @param phi Value of damping parameter if \code{damped=TRUE}. If \code{NULL}, #' it will be estimated. #' @param x Deprecated. Included for backwards compatibility. #' @param ... Other arguments passed to \code{forecast.ets}. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{ets} and associated #' functions. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted #' model.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link{ets}}, \code{\link[stats]{HoltWinters}}, #' \code{\link{rwf}}, \code{\link[stats]{arima}}. #' @references Hyndman, R.J., Koehler, A.B., Ord, J.K., Snyder, R.D. (2008) #' \emph{Forecasting with exponential smoothing: the state space approach}, #' Springer-Verlag: New York. \url{http://www.exponentialsmoothing.net}. #' #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://otexts.com/fpp2/} #' @keywords ts #' @examples #' #' fcast <- holt(airmiles) #' plot(fcast) #' deaths.fcast <- hw(USAccDeaths,h=48) #' plot(deaths.fcast) #' #' @export ses <- function(y, h = 10, level = c(80, 95), fan = FALSE, initial=c("optimal", "simple"), alpha=NULL, lambda=NULL, biasadj=FALSE, x=y, ...) { initial <- match.arg(initial) if (initial == "optimal") { fcast <- forecast(ets(x, "ANN", alpha = alpha, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } else { fcast <- forecast(HoltWintersZZ(x, alpha = alpha, beta = FALSE, gamma = FALSE, lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } fcast$method <- fcast$model$method <- "Simple exponential smoothing" fcast$model$call <- match.call() fcast$series <- deparse(substitute(y)) return(fcast) } #' @rdname ses #' @export holt <- function(y, h = 10, damped = FALSE, level = c(80, 95), fan = FALSE, initial=c("optimal", "simple"), exponential=FALSE, alpha=NULL, beta=NULL, phi=NULL, lambda=NULL, biasadj=FALSE, x=y, ...) { initial <- match.arg(initial) if (length(y) <= 1L) { stop("I need at least two observations to estimate trend.") } if (initial == "optimal" || damped) { if (exponential) { fcast <- forecast(ets(x, "MMN", alpha = alpha, beta = beta, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } else { fcast <- forecast(ets(x, "AAN", alpha = alpha, beta = beta, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } } else { fcast <- forecast( HoltWintersZZ(x, alpha = alpha, beta = beta, gamma = FALSE, phi = phi, exponential = exponential, lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ... ) } if (damped) { fcast$method <- "Damped Holt's method" if (initial == "simple") { warning("Damped Holt's method requires optimal initialization") } } else { fcast$method <- "Holt's method" } if (exponential) { fcast$method <- paste(fcast$method, "with exponential trend") } fcast$model$method <- fcast$method fcast$model$call <- match.call() fcast$series <- deparse(substitute(y)) return(fcast) } #' @rdname ses #' @export hw <- function(y, h = 2 * frequency(x), seasonal = c("additive", "multiplicative"), damped = FALSE, level = c(80, 95), fan = FALSE, initial=c("optimal", "simple"), exponential=FALSE, alpha=NULL, beta=NULL, gamma=NULL, phi=NULL, lambda=NULL, biasadj=FALSE, x=y, ...) { initial <- match.arg(initial) seasonal <- match.arg(seasonal) m <- frequency(x) if (m <= 1L) { stop("The time series should have frequency greater than 1.") } if (length(y) < m + 3) { stop(paste("I need at least", m + 3, "observations to estimate seasonality.")) } if (initial == "optimal" || damped) { if (seasonal == "additive" && exponential) { stop("Forbidden model combination") } else if (seasonal == "additive" && !exponential) { fcast <- forecast(ets(x, "AAA", alpha = alpha, beta = beta, gamma = gamma, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } else if (seasonal != "additive" && exponential) { fcast <- forecast(ets(x, "MMM", alpha = alpha, beta = beta, gamma = gamma, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } else { # if(seasonal!="additive" & !exponential) fcast <- forecast(ets(x, "MAM", alpha = alpha, beta = beta, gamma = gamma, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } } else { fcast <- forecast( HoltWintersZZ(x, alpha = alpha, beta = beta, gamma = gamma, phi = phi, seasonal = seasonal, exponential = exponential, lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ... ) } if (seasonal == "additive") { fcast$method <- "Holt-Winters' additive method" } else { fcast$method <- "Holt-Winters' multiplicative method" } if (exponential) { fcast$method <- paste(fcast$method, "with exponential trend") } if (damped) { fcast$method <- paste("Damped", fcast$method) if (initial == "simple") { warning("Damped methods require optimal initialization") } } fcast$model$method <- fcast$method fcast$model$call <- match.call() fcast$series <- deparse(substitute(y)) return(fcast) } forecast/R/acf.R0000644000176200001440000003435314003673410013143 0ustar liggesusers# Replacement for the acf() function. #' (Partial) Autocorrelation and Cross-Correlation Function Estimation #' #' The function \code{Acf} computes (and by default plots) an estimate of the #' autocorrelation function of a (possibly multivariate) time series. Function #' \code{Pacf} computes (and by default plots) an estimate of the partial #' autocorrelation function of a (possibly multivariate) time series. Function #' \code{Ccf} computes the cross-correlation or cross-covariance of two #' univariate series. #' #' The functions improve the \code{\link[stats]{acf}}, #' \code{\link[stats]{pacf}} and \code{\link[stats]{ccf}} functions. The main #' differences are that \code{Acf} does not plot a spike at lag 0 when #' \code{type=="correlation"} (which is redundant) and the horizontal axes show #' lags in time units rather than seasonal units. #' #' The tapered versions implement the ACF and PACF estimates and plots #' described in Hyndman (2015), based on the banded and tapered estimates of #' autocovariance proposed by McMurry and Politis (2010). #' #' @param x a univariate or multivariate (not Ccf) numeric time series object #' or a numeric vector or matrix. #' @param y a univariate numeric time series object or a numeric vector. #' @param lag.max maximum lag at which to calculate the acf. Default is #' $10*log10(N/m)$ where $N$ is the number of observations and $m$ the number #' of series. Will be automatically limited to one less than the number of #' observations in the series. #' @param type character string giving the type of acf to be computed. Allowed #' values are \dQuote{\code{correlation}} (the default), #' \dQuote{\code{covariance}} or \dQuote{\code{partial}}. #' @param plot logical. If \code{TRUE} (the default) the resulting acf, pacf or #' ccf is plotted. #' @param na.action function to handle missing values. Default is #' \code{\link[stats]{na.contiguous}}. Useful alternatives are #' \code{\link[stats]{na.pass}} and \code{\link{na.interp}}. #' @param demean Should covariances be about the sample means? #' @param calc.ci If \code{TRUE}, confidence intervals for the ACF/PACF #' estimates are calculated. #' @param level Percentage level used for the confidence intervals. #' @param nsim The number of bootstrap samples used in estimating the #' confidence intervals. #' @param ... Additional arguments passed to the plotting function. #' @return The \code{Acf}, \code{Pacf} and \code{Ccf} functions return objects #' of class "acf" as described in \code{\link[stats]{acf}} from the stats #' package. The \code{taperedacf} and \code{taperedpacf} functions return #' objects of class "mpacf". #' @author Rob J Hyndman #' @seealso \code{\link[stats]{acf}}, \code{\link[stats]{pacf}}, #' \code{\link[stats]{ccf}}, \code{\link{tsdisplay}} #' @references Hyndman, R.J. (2015). Discussion of ``High-dimensional #' autocovariance matrices and optimal linear prediction''. \emph{Electronic #' Journal of Statistics}, 9, 792-796. #' #' McMurry, T. L., & Politis, D. N. (2010). Banded and tapered estimates for #' autocovariance matrices and the linear process bootstrap. \emph{Journal of #' Time Series Analysis}, 31(6), 471-482. #' @keywords ts #' @examples #' #' Acf(wineind) #' Pacf(wineind) #' \dontrun{ #' taperedacf(wineind, nsim=50) #' taperedpacf(wineind, nsim=50) #' } #' #' @export Acf <- function(x, lag.max = NULL, type = c("correlation", "covariance", "partial"), plot = TRUE, na.action = na.contiguous, demean=TRUE, ...) { type <- match.arg(type) # Set maximum lag nseries <- NCOL(x) if (is.null(lag.max)) { lag.max <- as.integer(max( floor(10 * (log10(NROW(x)) - log10(nseries))), 2 * frequency(x) )) } acf.out <- stats::acf( x, plot = FALSE, lag.max = lag.max, type = type, na.action = na.action, demean = demean ) acf.out$tsp <- tsp(x) acf.out$periods <- attributes(x)$msts acf.out$series <- deparse(substitute(x)) # Make lags in integer units nlags <- dim(acf.out$lag)[1] if (type == "partial") { acf.out$lag[, , ] <- 1:(nlags) } else { acf.out$lag[, , ] <- 0:(nlags - 1) } # Plot if required if (plot) { plot.out <- acf.out # Hide 0 lag if autocorrelations if (type == "correlation") { for (i in 1:NCOL(x)) { plot.out$lag[1, i, i] <- 1 plot.out$acf[1, i, i] <- 0 } } if (nseries > 1) { plot(plot.out, ...) } else { # Check if there is a ylim input input_list <- as.list(substitute(list(...))) ylimarg <- is.element("ylim", names(input_list)) if (ylimarg) { plot(plot.out, xaxt = "n", ...) } else { ylim <- c(-1, 1) * 3 / sqrt(length(x)) ylim <- range(ylim, plot.out$acf) plot(plot.out, ylim = ylim, xaxt = "n", ...) } # Make nice horizontal axis if (is.element("msts", class(x))) { seasonalaxis(attributes(x)$msts, nlags, type = "acf") } else { seasonalaxis(frequency(x), nlags, type = "acf") } if (type == "covariance") { axis(at = 0, side = 1) } } return(invisible(acf.out)) } else { return(acf.out) } } # Make nice horizontal axis with ticks at seasonal lags # Return tick points if breaks=TRUE seasonalaxis <- function(frequency, nlags, type, plot=TRUE) { # List of unlabelled tick points out2 <- NULL # Check for non-seasonal data if (length(frequency) == 1) { # Compute number of seasonal periods np <- trunc(nlags / frequency) evenfreq <- (frequency %% 2L) == 0L # Defaults for labelled tick points if (type == "acf") { out <- pretty(1:nlags) } else { out <- pretty(-nlags:nlags) } if (frequency == 1) { if (type == "acf" && nlags <= 16) { out <- 1:nlags } else if (type == "ccf" && nlags <= 8) { out <- (-nlags:nlags) } else { if (nlags <= 30 && type == "acf") { out2 <- 1:nlags } else if (nlags <= 15 && type == "ccf") { out2 <- (-nlags:nlags) } if (!is.null(out2)) { out <- pretty(out2) } } } else if (frequency > 1 && ((type == "acf" && np >= 2L) || (type == "ccf" && np >= 1L))) { if (type == "acf" && nlags <= 40) { out <- frequency * (1:np) out2 <- 1:nlags # Add half-years if (nlags <= 30 && evenfreq && np <= 3) { out <- c(out, frequency * ((1:np) - 0.5)) } } else if (type == "ccf" && nlags <= 20) { out <- frequency * (-np:np) out2 <- (-nlags:nlags) # Add half-years if (nlags <= 15 && evenfreq && np <= 3) { out <- c(out, frequency * ((-np:np) + 0.5)) } } else if (np < (12 - 4 * (type == "ccf"))) { out <- frequency * (-np:np) } } } else { # Determine which frequency to show np <- trunc(nlags / frequency) frequency <- frequency[which(np <= 16)] if (length(frequency) > 0L) { frequency <- min(frequency) } else { frequency <- 1 } out <- seasonalaxis(frequency, nlags, type, plot = FALSE) } if (plot) { axis(1, at = out) if (!is.null(out2)) { axis(1, at = out2, tcl = -0.2, labels = FALSE) } } else { return(out) } } #' @rdname Acf #' @export Pacf <- function(x, lag.max=NULL, plot = TRUE, na.action = na.contiguous, demean=TRUE, ...) { object <- Acf( x, lag.max = lag.max, type = "partial", na.action = na.action, demean = demean, plot = FALSE ) object$series <- deparse(substitute(x)) # Plot if required if (plot) { nlags <- dim(object$lag)[1] plot.out <- object # Check if there is a ylim input input_list <- as.list(substitute(list(...))) ylimarg <- is.element("ylim", names(input_list)) if (ylimarg) { plot(plot.out, xaxt = "n", ...) } else { ylim <- c(-1, 1) * 3 / sqrt(length(x)) ylim <- range(ylim, plot.out$acf) plot(plot.out, ylim = ylim, xaxt = "n", ...) } # Make nice horizontal axis if (is.element("msts", class(x))) { seasonalaxis(attributes(x)$msts, nlags, type = "acf") } else { seasonalaxis(frequency(x), nlags, type = "acf") } return(invisible(object)) } else { return(object) } } #' @rdname Acf #' @export Ccf <- function(x, y, lag.max=NULL, type=c("correlation", "covariance"), plot=TRUE, na.action=na.contiguous, ...) { type <- match.arg(type) if (is.null(lag.max)) { lag.max <- as.integer(max(floor(10 * log10(NROW(x))), 2 * frequency(x))) } ccf.out <- stats::ccf( x, y, plot = FALSE, type = type, lag.max = lag.max, na.action = na.action ) # Make lags in integer units nlags <- (dim(ccf.out$lag)[1] - 1) / 2 ccf.out$lag[, 1, 1] <- -nlags:nlags # Plot if required if (plot) { vnames <- c(deparse(substitute(x))[1L], deparse(substitute(y))[1L]) ccf.out$snames <- paste(vnames, collapse = " & ") plot(ccf.out, ylab = "CCF", xaxt = "n", ...) seasonalaxis(frequency(x), nlags, type = "ccf") return(invisible(ccf.out)) } else { return(ccf.out) } } kappa <- function(x) { k <- rep(0, length(x)) x <- abs(x) k[x <= 1] <- 1 k[x > 1 & x <= 2] <- 2 - x[x > 1 & x <= 2] return(k) } # McMurray-Politis estimate of ACF wacf <- function(x, lag.max = length(x) - 1) { n <- length(x) lag.max <- min(lag.max, n - 1) if (lag.max < 0) { stop("'lag.max' must be at least 0") } # Standard estimator acfest <- stats::acf( c(x), lag.max = lag.max, plot = FALSE, na.action = na.contiguous ) acfest$series <- deparse(substitute(x)) # Taper estimates s <- 1:length(acfest$acf[, , 1]) upper <- 2 * sqrt(log(n, 10) / n) ac <- abs(acfest$acf[, , 1]) # Find l: ac < upper for 5 consecutive lags j <- (ac < upper) l <- 0 k <- 1 N <- length(j) - 4 while (l < 1 && k <= N) { if (all(j[k:(k + 4)])) { l <- k } else { k <- k + 1 } } acfest$acf[, , 1] <- acfest$acf[, , 1] * kappa(s / l) # End of Tapering # Now do some shrinkage towards white noise using eigenvalues # Construct covariance matrix gamma <- acfest$acf[, , 1] s <- length(gamma) Gamma <- matrix(1, s, s) d <- row(Gamma) - col(Gamma) for (i in 1:(s - 1)) Gamma[d == i | d == (-i)] <- gamma[i + 1] # Compute eigenvalue decomposition ei <- eigen(Gamma) # Shrink eigenvalues d <- pmax(ei$values, 20 / n) # Construct new covariance matrix Gamma2 <- ei$vectors %*% diag(d) %*% t(ei$vectors) Gamma2 <- Gamma2 / mean(d) # Estimate new ACF d <- row(Gamma2) - col(Gamma2) for (i in 2:s) gamma[i] <- mean(Gamma2[d == (i - 1)]) acfest$acf[, , 1] <- gamma ############### end of shrinkage return(acfest) } # Find tapered PACF using LD recursions wpacf <- function(x, lag.max=length(x) - 1) { # Compute pacf as usual, just to set up structure out <- Pacf(x, lag.max = lag.max, plot = FALSE) # Compute acf using tapered estimate acvf <- wacf(x, lag.max = lag.max)$acf[, , 1] # Durbin-Levinson recursions # Modified from http://faculty.washington.edu/dbp/s519/R-code/LD-recursions.R p <- length(acvf) - 1 phis <- acvf[2] / acvf[1] pev <- rep(acvf[1], p + 1) pacf <- rep(phis, p) pev[2] <- pev[1] * (1 - phis ^ 2) if (p > 1) { for (k in 2:p) { old.phis <- phis phis <- rep(0, k) ## compute kth order pacf (reflection coefficient) phis[k] <- (acvf[k + 1] - sum(old.phis * acvf[k:2])) / pev[k] phis[1:(k - 1)] <- old.phis - phis[k] * rev(old.phis) pacf[k] <- phis[k] pev[k + 1] <- pev[k] * (1 - phis[k] ^ 2) # if(abs(pacf[k]) > 1) # warning("PACF larger than 1 in absolute value") } } out$acf[, , 1] <- pacf return(out) } # Function to produce new style plot of ACF or PACF with CI # x = time series #' @rdname Acf #' @export taperedacf <- function(x, lag.max=NULL, type=c("correlation", "partial"), plot=TRUE, calc.ci=TRUE, level=95, nsim=100, ...) { type <- match.arg(type) if (is.null(lag.max)) { lag.max <- max(floor(20 * log10(length(x))), 4 * frequency(x)) } lag <- min(lag.max, length(x) - 1) if (type == "correlation") { z <- wacf(x, )$acf[2:(lag + 1), , 1] } else { z <- wpacf(x, )$acf[1:lag, , 1] } out <- list(z = z, lag = lag, type = type, x = x) if (calc.ci) { # Get confidence intervals for plots bootsim <- lpb(x, nsim = nsim) s1 <- matrix(0, nrow = lag, ncol = nsim) if (type == "correlation") { for (i in 1:nsim) s1[, i] <- wacf(bootsim[, i])$acf[2:(lag + 1), , 1] } else { for (i in 1:nsim) s1[, i] <- wpacf(bootsim[, i])$acf[1:lag, , 1] } prob <- (100 - level) / 200 out$upper <- apply(s1, 1, quantile, prob = 1 - prob) out$lower <- apply(s1, 1, quantile, prob = prob) } out <- structure(out, class = "mpacf") if (!plot) { return(out) } else { plot(out, ...) return(invisible(out)) } return(out) } #' @rdname Acf #' @export taperedpacf <- function(x, ...) { taperedacf(x, type = "partial", ...) } plot.mpacf <- function(object, xlim=NULL, ylim=NULL, xlab="Lag", ylab="", ...) { lagx <- 1:object$lag if (is.null(xlim)) { xlim <- c(1, object$lag) } if (is.null(ylim)) { ylim <- range(object$z, object$upper, object$lower) } if (ylab == "") { ylab <- ifelse(object$type == "partial", "PACF", "ACF") } plot( lagx, object$z, type = "n", xlim = xlim, ylim = ylim, xlab = xlab, ylab = ylab, xaxt = "n", ... ) grid(col = gray(.80), nx = NA, ny = NULL, lty = 1) abline(h = 0, col = gray(.4)) if (frequency(object$x) > 1) { axis(1, at = (0:100) * frequency(object$x)) for (i in 1:100) abline(v = (i - 1) * frequency(object$x), lty = 1, col = gray(0.80)) } else { axis(1) grid(col = gray(.80), ny = NA, lty = 1) } if (!is.null(object$lower)) { for (j in 1:object$lag) { polygon( lagx[j] + c(-0.55, 0.55, 0.55, -0.55), c(rep(object$lower[j], 2), rep(object$upper[j], 2)), col = gray(0.60), border = FALSE ) } # polygon(c(lagx,rev(lagx)),c(object$lower,rev(object$upper)),col=gray(.60),border=FALSE) } lines(lagx, object$z, lwd = 1.5) j <- (object$lower < 0 & object$upper > 0) points(lagx[j], object$z[j], pch = 1, cex = 0.5) points(lagx[!j], object$z[!j], pch = 19) } #' @rdname is.ets #' @export is.acf <- function(x) { inherits(x, "acf") } forecast/R/ggplot.R0000644000176200001440000023711614163720413013713 0ustar liggesusers#' @inherit ggplot2::autolayer #' @export autolayer <- function(object, ...){ UseMethod("autolayer") } #' @importFrom ggplot2 autoplot #' @export ggplot2::autoplot ggAddExtras <- function(xlab=NA, ylab=NA, main=NA) { dots <- eval.parent(quote(list(...))) extras <- list() if ("xlab" %in% names(dots) || is.null(xlab) || any(!is.na(xlab))) { if ("xlab" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::xlab(dots$xlab) } else { extras[[length(extras) + 1]] <- ggplot2::xlab(paste0(xlab[!is.na(xlab)], collapse = " ")) } } if ("ylab" %in% names(dots) || is.null(ylab) || any(!is.na(ylab))) { if ("ylab" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::ylab(dots$ylab) } else { extras[[length(extras) + 1]] <- ggplot2::ylab(paste0(ylab[!is.na(ylab)], collapse = " ")) } } if ("main" %in% names(dots) || is.null(main) || any(!is.na(main))) { if ("main" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::ggtitle(dots$main) } else { extras[[length(extras) + 1]] <- ggplot2::ggtitle(paste0(main[!is.na(main)], collapse = " ")) } } if ("xlim" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::xlim(dots$xlim) } if ("ylim" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::ylim(dots$ylim) } return(extras) } ggtsbreaks <- function(x) { # Make x axis contain only whole numbers (e.g., years) return(unique(round(pretty(floor(x[1]):ceiling(x[2]))))) } #' ggplot (Partial) Autocorrelation and Cross-Correlation Function Estimation #' and Plotting #' #' Produces a ggplot object of their equivalent Acf, Pacf, Ccf, taperedacf and #' taperedpacf functions. #' #' If \code{autoplot} is given an \code{acf} or \code{mpacf} object, then an #' appropriate ggplot object will be created. #' #' ggtaperedpacf #' @param object Object of class \dQuote{\code{acf}}. #' @param x a univariate or multivariate (not Ccf) numeric time series object #' or a numeric vector or matrix. #' @param y a univariate numeric time series object or a numeric vector. #' @param ci coverage probability for confidence interval. Plotting of the #' confidence interval is suppressed if ci is zero or negative. #' @param lag.max maximum lag at which to calculate the acf. #' @param type character string giving the type of acf to be computed. Allowed #' values are "\code{correlation}" (the default), \dQuote{\code{covariance}} or #' \dQuote{\code{partial}}. #' @param plot logical. If \code{TRUE} (the default) the resulting ACF, PACF or #' CCF is plotted. #' @param na.action function to handle missing values. Default is #' \code{\link[stats]{na.contiguous}}. Useful alternatives are #' \code{\link[stats]{na.pass}} and \code{\link{na.interp}}. #' @param demean Should covariances be about the sample means? #' @param calc.ci If \code{TRUE}, confidence intervals for the ACF/PACF #' estimates are calculated. #' @param level Percentage level used for the confidence intervals. #' @param nsim The number of bootstrap samples used in estimating the #' confidence intervals. #' @param ... Other plotting parameters to affect the plot. #' @return A ggplot object. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{plot.acf}}, \code{\link{Acf}}, #' \code{\link[stats]{acf}}, \code{\link{taperedacf}} #' @examples #' #' library(ggplot2) #' ggAcf(wineind) #' wineind %>% Acf(plot=FALSE) %>% autoplot #' \dontrun{ #' wineind %>% taperedacf(plot=FALSE) %>% autoplot #' ggtaperedacf(wineind) #' ggtaperedpacf(wineind)} #' ggCcf(mdeaths, fdeaths) #' #' @export autoplot.acf <- function(object, ci=0.95, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "acf")) { stop("autoplot.acf requires a acf object, use object=object") } acf <- `dimnames<-`(object$acf, list(NULL, object$snames, object$snames)) lag <- `dimnames<-`(object$lag, list(NULL, object$snames, object$snames)) data <- as.data.frame.table(acf)[-1] data$lag <- as.numeric(lag) if (object$type == "correlation" & is.null(object$ccf)) { data <- data[data$lag != 0, ] } # Initialise ggplot object p <- ggplot2::ggplot( ggplot2::aes_(x = ~lag, xend = ~lag, y = 0, yend = ~Freq), data = data ) p <- p + ggplot2::geom_hline(yintercept = 0) # Add data p <- p + ggplot2::geom_segment(lineend = "butt", ...) # Add ci lines (assuming white noise input) ci <- qnorm((1 + ci) / 2) / sqrt(object$n.used) p <- p + ggplot2::geom_hline(yintercept = c(-ci, ci), colour = "blue", linetype = "dashed") # Add facets if needed if(any(dim(object$acf)[2:3] != c(1,1))){ p <- p + ggplot2::facet_grid( as.formula(paste0(colnames(data)[1:2], collapse = "~")) ) } # Prepare graph labels if (!is.null(object$ccf)) { ylab <- "CCF" ticktype <- "ccf" main <- paste("Series:", object$snames) nlags <- round(dim(object$lag)[1] / 2) } else if (object$type == "partial") { ylab <- "PACF" ticktype <- "acf" main <- paste("Series:", object$series) nlags <- dim(object$lag)[1] } else if (object$type == "correlation") { ylab <- "ACF" ticktype <- "acf" main <- paste("Series:", object$series) nlags <- dim(object$lag)[1] } else { ylab <- NULL } # Add seasonal x-axis # Change ticks to be seasonal and prepare default title if (!is.null(object$tsp)) { freq <- object$tsp[3] } else { freq <- 1 } if (!is.null(object$periods)) { periods <- object$periods periods <- periods[periods != freq] minorbreaks <- periods * seq(-20:20) } else { minorbreaks <- NULL } p <- p + ggplot2::scale_x_continuous(breaks = seasonalaxis( freq, nlags, type = ticktype, plot = FALSE ), minor_breaks = minorbreaks) p <- p + ggAddExtras(ylab = ylab, xlab = "Lag", main = main) return(p) } } #' @rdname autoplot.acf #' @export ggAcf <- function(x, lag.max = NULL, type = c("correlation", "covariance", "partial"), plot = TRUE, na.action = na.contiguous, demean=TRUE, ...) { object <- Acf(x, lag.max = lag.max, type = type, na.action = na.action, demean = demean, plot = FALSE) object$tsp <- tsp(x) object$periods <- attributes(x)$msts object$series <- deparse(substitute(x)) if (plot) { return(autoplot(object, ...)) } else { return(object) } } #' @rdname autoplot.acf #' @export ggPacf <- function(x, lag.max = NULL, plot = TRUE, na.action = na.contiguous, demean=TRUE, ...) { object <- Acf(x, lag.max = lag.max, type = "partial", na.action = na.action, demean = demean, plot = FALSE) object$tsp <- tsp(x) object$periods <- attributes(x)$msts object$series <- deparse(substitute(x)) if (plot) { return(autoplot(object, ...)) } else { return(object) } } #' @rdname autoplot.acf #' @export ggCcf <- function(x, y, lag.max=NULL, type=c("correlation", "covariance"), plot=TRUE, na.action=na.contiguous, ...) { object <- Ccf(x, y, lag.max = lag.max, type = type, na.action = na.action, plot = FALSE) object$snames <- paste(deparse(substitute(x)), "&", deparse(substitute(y))) object$ccf <- TRUE if (plot) { return(autoplot(object, ...)) } else { return(object) } } #' @rdname autoplot.acf #' @export autoplot.mpacf <- function(object, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "mpacf")) { stop("autoplot.mpacf requires a mpacf object, use object=object") } if (!is.null(object$lower)) { data <- data.frame(Lag = 1:object$lag, z = object$z, sig = (object$lower < 0 & object$upper > 0)) cidata <- data.frame(Lag = rep(1:object$lag, each = 2) + c(-0.5, 0.5), z = rep(object$z, each = 2), upper = rep(object$upper, each = 2), lower = rep(object$lower, each = 2)) plotpi <- TRUE } else { data <- data.frame(Lag = 1:object$lag, z = object$z) plotpi <- FALSE } # Initialise ggplot object p <- ggplot2::ggplot() p <- p + ggplot2::geom_hline(ggplot2::aes(yintercept = 0), size = 0.2) # Add data if (plotpi) { p <- p + ggplot2::geom_ribbon(ggplot2::aes_(x = ~Lag, ymin = ~lower, ymax = ~upper), data = cidata, fill = "grey50") } p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~Lag, y = ~z), data = data) if (plotpi) { p <- p + ggplot2::geom_point(ggplot2::aes_(x = ~Lag, y = ~z, colour = ~sig), data = data) } # Change ticks to be seasonal freq <- frequency(object$x) msts <- is.element("msts", class(object$x)) # Add seasonal x-axis if (msts) { periods <- attributes(object$x)$msts periods <- periods[periods != freq] minorbreaks <- periods * seq(-20:20) } else { minorbreaks <- NULL } p <- p + ggplot2::scale_x_continuous( breaks = seasonalaxis(frequency(object$x), length(data$Lag), type = "acf", plot = FALSE), minor_breaks = minorbreaks ) if (object$type == "partial") { ylab <- "PACF" } else if (object$type == "correlation") { ylab <- "ACF" } p <- p + ggAddExtras(ylab = ylab) return(p) } } #' @rdname autoplot.acf #' @export ggtaperedacf <- function(x, lag.max=NULL, type=c("correlation", "partial"), plot=TRUE, calc.ci=TRUE, level=95, nsim=100, ...) { cl <- match.call() if (plot) { cl$plot <- FALSE } cl[[1]] <- quote(taperedacf) object <- eval.parent(cl) if (plot) { return(autoplot(object, ...)) } else { return(object) } } #' @rdname autoplot.acf #' @export ggtaperedpacf <- function(x, ...) { ggtaperedacf(x, type = "partial", ...) } #' @rdname plot.Arima #' @export autoplot.Arima <- function(object, type = c("both", "ar", "ma"), ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (is.Arima(object)) { # Detect type type <- match.arg(type) q <- p <- 0 if (length(object$model$phi) > 0) { test <- abs(object$model$phi) > 1e-09 if (any(test)) { p <- max(which(test)) } } if (length(object$model$theta) > 0) { test <- abs(object$model$theta) > 1e-09 if (any(test)) { q <- max(which(test)) } } if (type == "both") { type <- c("ar", "ma") } } else if (inherits(object, "ar")) { type <- "ar" p <- length(arroots(object)$roots) q <- 0 } else { stop("autoplot.Arima requires an Arima object") } # Remove NULL type type <- intersect(type, c("ar", "ma")[c(p > 0, q > 0)]) # Prepare data arData <- maData <- NULL allRoots <- data.frame(roots = numeric(0), type = character(0)) if ("ar" %in% type && p > 0) { arData <- arroots(object) allRoots <- rbind(allRoots, data.frame(roots = arData$roots, type = arData$type)) } if ("ma" %in% type && q > 0) { maData <- maroots(object) allRoots <- rbind(allRoots, data.frame(roots = maData$roots, type = maData$type)) } allRoots$Real <- Re(1 / allRoots$roots) allRoots$Imaginary <- Im(1 / allRoots$roots) allRoots$UnitCircle <- factor(ifelse((abs(allRoots$roots) > 1), "Within", "Outside")) # Initialise general ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~Real, y = ~Imaginary, colour = ~UnitCircle), data = allRoots) p <- p + ggplot2::coord_fixed(ratio = 1) p <- p + ggplot2::annotate( "path", x = cos(seq(0, 2 * pi, length.out = 100)), y = sin(seq(0, 2 * pi, length.out = 100)) ) p <- p + ggplot2::geom_vline(xintercept = 0) p <- p + ggplot2::geom_hline(yintercept = 0) p <- p + ggAddExtras(xlab = "Real", ylab = "Imaginary") if (NROW(allRoots) == 0) { return(p + ggAddExtras(main = "No AR or MA roots")) } p <- p + ggplot2::geom_point(size = 3) if (length(type) == 1) { p <- p + ggAddExtras(main = paste("Inverse", toupper(type), "roots")) } else { p <- p + ggplot2::facet_wrap(~ type, labeller = function(labels) lapply(labels, function(x) paste("Inverse", as.character(x), "roots"))) } } return(p) } #' @rdname plot.Arima #' @export autoplot.ar <- function(object, ...) { autoplot.Arima(object, ...) } #' @rdname autoplot.seas #' @export autoplot.decomposed.ts <- function(object, labels=NULL, range.bars = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "decomposed.ts")) { stop("autoplot.decomposed.ts requires a decomposed.ts object") } if (is.null(labels)) { labels <- c("trend", "seasonal", "remainder") } cn <- c("data", labels) data <- data.frame( datetime = rep(time(object$x), 4), y = c(object$x, object$trend, object$seasonal, object$random), parts = factor(rep(cn, each = NROW(object$x)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~datetime, y = ~y), data = data) # Add data int <- as.numeric(object$type == "multiplicative") p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~y), data = subset(data, data$parts != cn[4]), na.rm = TRUE) p <- p + ggplot2::geom_segment( ggplot2::aes_(x = ~datetime, xend = ~datetime, y = int, yend = ~y), data = subset(data, data$parts == cn[4]), lineend = "butt", na.rm = TRUE ) p <- p + ggplot2::facet_grid("parts ~ .", scales = "free_y", switch = "y") p <- p + ggplot2::geom_hline(ggplot2::aes_(yintercept = ~y), data = data.frame(y = int, parts = factor(cn[4], levels = cn))) if (is.null(range.bars)) { range.bars <- object$type == "additive" } if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = factor(colnames(yranges), levels = cn), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes_(xmin = ~left, xmax = ~right, ymax = ~top, ymin = ~bottom), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } # Add axis labels p <- p + ggAddExtras( main = paste("Decomposition of", object$type, "time series"), xlab = "Time", ylab = "" ) # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = unique(round(pretty(data$datetime)))) return(p) } } #' @rdname plot.ets #' @export autoplot.ets <- function(object, range.bars = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!is.ets(object)) { stop("autoplot.ets requires an ets object, use object=object") } names <- c(y = "observed", l = "level", b = "slope", s1 = "season") data <- cbind(object$x, object$states[, colnames(object$states) %in% names(names)]) cn <- c("y", c(colnames(object$states))) colnames(data) <- cn <- names[stats::na.exclude(match(cn, names(names)))] # Convert to longform data <- data.frame( datetime = rep(time(data), NCOL(data)), y = c(data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~datetime, y = ~y), data = data, ylab = "") # Add data p <- p + ggplot2::geom_line(na.rm = TRUE) p <- p + ggplot2::facet_grid(parts ~ ., scales = "free_y", switch = "y") if (is.null(range.bars)) { range.bars <- is.null(object$lambda) } if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = factor(colnames(yranges), levels = cn), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes_(xmin = ~left, xmax = ~right, ymax = ~top, ymin = ~bottom), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } p <- p + ggAddExtras(xlab = NULL, ylab = "", main = paste("Components of", object$method, "method")) return(p) } } #' @rdname plot.bats #' @export autoplot.tbats <- function(object, range.bars = FALSE, ...) { cl <- match.call() cl[[1]] <- quote(autoplot.bats) eval.parent(cl) } #' @rdname plot.bats #' @export autoplot.bats <- function(object, range.bars = FALSE, ...) { data <- tbats.components(object) cn <- colnames(data) # Convert to longform data <- data.frame( datetime = rep(time(data), NCOL(data)), y = c(data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~datetime, y = ~y), data = data, ylab = "") # Add data p <- p + ggplot2::geom_line(na.rm = TRUE) p <- p + ggplot2::facet_grid(parts ~ ., scales = "free_y", switch = "y") if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = factor(colnames(yranges), levels = cn), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes_(xmin = ~left, xmax = ~right, ymax = ~top, ymin = ~bottom), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } p <- p + ggAddExtras(xlab = NULL, ylab = "", main = paste("Components of", object$method, "method")) return(p) } #' @rdname plot.forecast #' @export autoplot.forecast <- function(object, include, PI=TRUE, shadecols=c("#596DD5", "#D5DBFF"), fcol="#0000AA", flwd=0.5, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!is.forecast(object)) { stop("autoplot.forecast requires a forecast object, use object=object") } if (is.null(object$lower) || is.null(object$upper) || is.null(object$level)) { PI <- FALSE } else if (!is.finite(max(object$upper))) { PI <- FALSE } if (!is.null(object$model$terms) && !is.null(object$model$model)) { # Initialise original dataset mt <- object$model$terms if (!is.null(object$series)) { yvar <- object$series } else { yvar <- deparse(mt[[2]]) } # Perhaps a better way to do this xvar <- attr(mt, "term.labels") vars <- c(yvar = yvar, xvar = xvar) data <- object$model$model colnames(data) <- names(vars)[match(colnames(data), vars)] if (!is.null(object$model$lambda)) { data$yvar <- InvBoxCox(data$yvar, object$model$lambda) } } else { if (!is.null(object$x)) { data <- data.frame(yvar = c(object$x)) } else if (!is.null(object$residuals) && !is.null(object$fitted)) { data <- data.frame(yvar = c(object$residuals + object$fitted)) } else { stop("Could not find data") } if (!is.null(object$series)) { vars <- c(yvar = object$series) } else if (!is.null(object$model$call)) { vars <- c(yvar = deparse(object$model$call$y)) if (vars == "object") { vars <- c(yvar = "y") } } else { vars <- c(yvar = "y") } } # Initialise ggplot object p <- ggplot2::ggplot() # Cross sectional forecasts if (!is.element("ts", class(object$mean))) { if (length(xvar) > 1) { stop("Forecast plot for regression models only available for a single predictor") } if (NCOL(object$newdata) == 1) { # Make sure column has correct name colnames(object$newdata) <- xvar } flwd <- 2 * flwd # Scale for points # Data points p <- p + ggplot2::geom_point(ggplot2::aes_(x = ~xvar, y = ~yvar), data = data) p <- p + ggplot2::labs(y = vars["yvar"], x = vars["xvar"]) # Forecasted intervals if (PI) { levels <- NROW(object$level) interval <- data.frame(xpred = rep(object$newdata[[1]], levels), lower = c(object$lower), upper = c(object$upper), level = rep(object$level, each = NROW(object$newdata[[1]]))) interval <- interval[order(interval$level, decreasing = TRUE), ] # Must be ordered for gg z-index p <- p + ggplot2::geom_linerange(ggplot2::aes_(x = ~xpred, ymin = ~lower, ymax = ~upper, colour = ~level), data = interval, size = flwd) if (length(object$level) <= 5) { p <- p + ggplot2::scale_colour_gradientn(breaks = object$level, colours = shadecols, guide = "legend") } else { p <- p + ggplot2::scale_colour_gradientn(colours = shadecols, guide = "colourbar") } } # Forecasted points predicted <- data.frame(object$newdata, object$mean) colnames(predicted) <- c("xpred", "ypred") p <- p + ggplot2::geom_point(ggplot2::aes_(x = ~xpred, y = ~ypred), data = predicted, color = fcol, size = flwd) # Line of best fit coef <- data.frame(int = 0, m = 0) i <- match("(Intercept)", names(object$model$coefficients)) if (i != 0) { coef$int <- object$model$coefficients[i] if (NROW(object$model$coefficients) == 2) { coef$m <- object$model$coefficients[-i] } } else { if (NROW(object$model$coefficients) == 1) { coef$m <- object$model$coefficients } } p <- p + ggplot2::geom_abline(intercept = coef$int, slope = coef$m) } else { # Time series objects (assumed) if(!missing(shadecols)){ warning( "The `schadecols` argument is deprecated for time series forecasts. Interval shading is now done automatically based on the level and `fcol`.", call. = FALSE) } # Data points if (!is.null(time(object$x))) { timex <- time(object$x) } else if (!is.null(time(object$model$residuals))) { timex <- time(object$model$residuals) } data <- data.frame(yvar = as.numeric(data$yvar), datetime = as.numeric(timex)) if (!missing(include)) { data <- tail(data, include) } p <- p + ggplot2::scale_x_continuous() p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~yvar), data = data) + ggplot2::labs(y = vars["yvar"], x = "Time") # Forecasted intervals p <- p + autolayer(object, PI = PI, colour = fcol, size = flwd) # predicted <- data.frame(xvar = time(object$mean), yvar = object$mean) # colnames(predicted) <- c("datetime", "ypred") # if (PI) { # levels <- NROW(object$level) # interval <- data.frame(datetime = rep(predicted$datetime, levels), lower = c(object$lower), upper = c(object$upper), level = rep(object$level, each = NROW(object$mean))) # interval <- interval[order(interval$level, decreasing = TRUE), ] # Must be ordered for gg z-index # p <- p + ggplot2::geom_ribbon(ggplot2::aes_(x = ~datetime, ymin = ~lower, ymax = ~upper, group = ~-level, fill = ~level), data = interval) # if (min(object$level) < 50) { # scalelimit <- c(1, 99) # } # else { # scalelimit <- c(50, 99) # } # if (length(object$level) <= 5) { # p <- p + ggplot2::scale_fill_gradientn(breaks = object$level, colours = shadecols, limit = scalelimit, guide = "legend") # } # else { # p <- p + ggplot2::scale_fill_gradientn(colours = shadecols, limit = scalelimit) # } # # Negative group is a work around for missing z-index # } # # Forecasted points # p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~ypred), data = predicted, color = fcol, size = flwd) } p <- p + ggAddExtras(main = paste("Forecasts from ", object$method, sep = "")) return(p) } } #' @rdname plot.mforecast #' @export autoplot.mforecast <- function(object, PI = TRUE, facets = TRUE, colour = FALSE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!is.mforecast(object)) { stop("autoplot.mforecast requires a mforecast object, use object=object") } if (is.ts(object$forecast[[1]]$mean)) { # ts forecasts p <- autoplot(getResponse(object), facets = facets, colour = colour) + autolayer(object, ...) if (facets) { p <- p + ggplot2::facet_wrap( ~ series, labeller = function(labels) { if (!is.null(object$method)) { lapply(labels, function(x) paste0(as.character(x), "\n", object$method[as.character(x)])) } else { lapply(labels, function(x) paste0(as.character(x))) } }, ncol = 1, scales = "free_y" ) } p <- p + ggAddExtras(ylab = NULL) return(p) } else { # lm forecasts if (!requireNamespace("grid")) { stop("grid is needed for this function to work. Install it via install.packages(\"grid\")", call. = FALSE) } K <- length(object$forecast) if (K < 2) { warning("Expected at least two plots but forecast required less.") } # Set up vector arguments if (missing(PI)) { PI <- rep(TRUE, K) } # Set up grid # ncol: Number of columns of plots # nrow: Number of rows needed, calculated from # of cols gridlayout <- matrix(seq(1, K), ncol = 1, nrow = K) grid::grid.newpage() grid::pushViewport(grid::viewport(layout = grid::grid.layout(nrow(gridlayout), ncol(gridlayout)))) for (i in 1:K) { partialfcast <- object$forecast[[i]] partialfcast$model <- mlmsplit(object$model, index = i) matchidx <- as.data.frame(which(gridlayout == i, arr.ind = TRUE)) print( autoplot( structure(partialfcast, class = "forecast"), PI = PI[i], ... ) + ggAddExtras(ylab = names(object$forecast)[i]), vp = grid::viewport( layout.pos.row = matchidx$row, layout.pos.col = matchidx$col ) ) } } } } #' @rdname tsdisplay #' #' @examples #' library(ggplot2) #' ggtsdisplay(USAccDeaths, plot.type="scatter", theme=theme_bw()) #' #' @export ggtsdisplay <- function(x, plot.type=c("partial", "histogram", "scatter", "spectrum"), points=TRUE, smooth=FALSE, lag.max, na.action=na.contiguous, theme=NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else if (!requireNamespace("grid", quietly = TRUE)) { stop("grid is needed for this function to work. Install it via install.packages(\"grid\")", call. = FALSE) } else { if (NCOL(x) > 1) { stop("ggtsdisplay is only for univariate time series") } plot.type <- match.arg(plot.type) main <- deparse(substitute(x)) if (!is.ts(x)) { x <- ts(x) } if (missing(lag.max)) { lag.max <- round(min(max(10 * log10(length(x)), 3 * frequency(x)), length(x) / 3)) } dots <- list(...) if (is.null(dots$xlab)) { dots$xlab <- "" } if (is.null(dots$ylab)) { dots$ylab <- "" } labs <- match(c("xlab", "ylab", "main"), names(dots), nomatch = 0) # Set up grid for plots gridlayout <- matrix(c(1, 2, 1, 3), nrow = 2) grid::grid.newpage() grid::pushViewport(grid::viewport(layout = grid::grid.layout(nrow(gridlayout), ncol(gridlayout)))) # Add ts plot with points matchidx <- as.data.frame(which(gridlayout == 1, arr.ind = TRUE)) tsplot <- do.call(ggplot2::autoplot, c(object = quote(x), dots[labs])) if (points) { tsplot <- tsplot + ggplot2::geom_point(size = 0.5) } if (smooth) { tsplot <- tsplot + ggplot2::geom_smooth(method = "loess", se = FALSE) } if (is.null(tsplot$labels$title)) { # Add title if missing tsplot <- tsplot + ggplot2::ggtitle(main) } if (!is.null(theme)) { tsplot <- tsplot + theme } print( tsplot, vp = grid::viewport( layout.pos.row = matchidx$row, layout.pos.col = matchidx$col ) ) # Prepare Acf plot acfplot <- do.call(ggAcf, c(x = quote(x), lag.max = lag.max, na.action = na.action, dots[-labs])) + ggplot2::ggtitle(NULL) if (!is.null(theme)) { acfplot <- acfplot + theme } # Prepare last plot (variable) if (plot.type == "partial") { lastplot <- ggPacf(x, lag.max = lag.max, na.action = na.action) + ggplot2::ggtitle(NULL) # Match y-axis acfplotrange <- ggplot2::layer_scales(acfplot)$y$range$range pacfplotrange <- ggplot2::layer_scales(lastplot)$y$range$range yrange <- range(c(acfplotrange, pacfplotrange)) acfplot <- acfplot + ggplot2::ylim(yrange) lastplot <- lastplot + ggplot2::ylim(yrange) } else if (plot.type == "histogram") { lastplot <- gghistogram(x, add.normal = TRUE, add.rug = TRUE) + ggplot2::xlab(main) } else if (plot.type == "scatter") { scatterData <- data.frame(y = x[2:NROW(x)], x = x[1:NROW(x) - 1]) lastplot <- ggplot2::ggplot(ggplot2::aes_(y = ~y, x = ~x), data = scatterData) + ggplot2::geom_point() + ggplot2::labs(x = expression(Y[t - 1]), y = expression(Y[t])) } else if (plot.type == "spectrum") { specData <- spec.ar(x, plot = FALSE) specData <- data.frame(spectrum = specData$spec, frequency = specData$freq) lastplot <- ggplot2::ggplot(ggplot2::aes_(y = ~spectrum, x = ~frequency), data = specData) + ggplot2::geom_line() + ggplot2::scale_y_log10() } if (!is.null(theme)) { lastplot <- lastplot + theme } # Add ACF plot matchidx <- as.data.frame(which(gridlayout == 2, arr.ind = TRUE)) print( acfplot, vp = grid::viewport( layout.pos.row = matchidx$row, layout.pos.col = matchidx$col ) ) # Add last plot matchidx <- as.data.frame(which(gridlayout == 3, arr.ind = TRUE)) print( lastplot, vp = grid::viewport( layout.pos.row = matchidx$row, layout.pos.col = matchidx$col ) ) } } #' Time series lag ggplots #' #' Plots a lag plot using ggplot. #' #' \dQuote{gglagplot} will plot time series against lagged versions of #' themselves. Helps visualising 'auto-dependence' even when auto-correlations #' vanish. #' #' \dQuote{gglagchull} will layer convex hulls of the lags, layered on a single #' plot. This helps visualise the change in 'auto-dependence' as lags increase. #' #' @param x a time series object (type \code{ts}). #' @param lags number of lag plots desired, see arg set.lags. #' @param set.lags vector of positive integers specifying which lags to use. #' @param diag logical indicating if the x=y diagonal should be drawn. #' @param diag.col color to be used for the diagonal if(diag). #' @param do.lines if TRUE, lines will be drawn, otherwise points will be #' drawn. #' @param colour logical indicating if lines should be coloured. #' @param continuous Should the colour scheme for years be continuous or #' discrete? #' @param labels logical indicating if labels should be used. #' @param seasonal Should the line colour be based on seasonal characteristics #' (TRUE), or sequential (FALSE). #' @param \dots Not used (for consistency with lag.plot) #' @return None. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{lag.plot}} #' @examples #' #' gglagplot(woolyrnq) #' gglagplot(woolyrnq,seasonal=FALSE) #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' gglagplot(lungDeaths, lags=2) #' gglagchull(lungDeaths, lags=6) #' #' @export gglagplot <- function(x, lags=ifelse(frequency(x) > 9, 16, 9), set.lags = 1:lags, diag=TRUE, diag.col="gray", do.lines = TRUE, colour = TRUE, continuous = frequency(x) > 12, labels = FALSE, seasonal = TRUE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { freq <- frequency(x) if (freq > 1) { linecol <- cycle(x) if (freq > 24) { continuous <- TRUE } } else { seasonal <- FALSE continuous <- TRUE } if (!seasonal) { continuous <- TRUE } # Make sure lags is evaluated tmp <- lags x <- as.matrix(x) # Prepare data for plotting n <- NROW(x) data <- data.frame() for (i in 1:NCOL(x)) { for (lagi in set.lags) { sname <- colnames(x)[i] if (is.null(sname)) { sname <- deparse(match.call()$x) } data <- rbind( data, data.frame( lagnum = 1:(n - lagi), freqcur = if(seasonal) linecol[(lagi + 1):n] else (lagi + 1):n, orig = x[(lagi + 1):n, i], lagged = x[1:(n - lagi), i], lagVal = rep(lagi, n - lagi), series = factor(rep(sname, n - lagi)) ) ) } } if (!continuous) { data$freqcur <- factor(data$freqcur) } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~lagged, y = ~orig), data = data) if (diag) { p <- p + ggplot2::geom_abline(colour = diag.col, linetype = "dashed") } if (labels) { linesize <- 0.25 * (2 - do.lines) } else { linesize <- 0.5 * (2 - do.lines) } plottype <- if (do.lines) { ggplot2::geom_path } else { ggplot2::geom_point } if (colour) { p <- p + plottype(ggplot2::aes_(colour = ~freqcur), size = linesize) } else { p <- p + plottype(size = linesize) } if (labels) { p <- p + ggplot2::geom_text(ggplot2::aes_(label = ~lagnum)) } # Ensure all facets are of size size (if extreme values are excluded in lag specification) if (max(set.lags) > NROW(x) / 2) { axissize <- rbind(aggregate(orig ~ series, data = data, min), aggregate(orig~ series, data = data, max)) axissize <- data.frame(series = rep(axissize$series, length(set.lags)), orig = rep(axissize$orig, length(set.lags)), lagVal = rep(set.lags, each = NCOL(x))) p <- p + ggplot2::geom_blank(ggplot2::aes_(x = ~orig, y = ~orig), data = axissize) } # Facet labellerFn <- function(labels) { if (!is.null(labels$series)) { # Multivariate labels labels$series <- as.character(labels$series) } labels$lagVal <- paste("lag", labels$lagVal) return(labels) } if (NCOL(x) > 1) { p <- p + ggplot2::facet_wrap(~series + lagVal, scales = "free", labeller = labellerFn) } else { p <- p + ggplot2::facet_wrap(~lagVal, labeller = labellerFn) } p <- p + ggplot2::theme(aspect.ratio = 1) if (colour) { if (seasonal) { if (freq == 4L) { title <- "Quarter" } else if (freq == 12L) { title <- "Month" } else if (freq == 7L) { title <- "Day" } else if (freq == 24L) { title <- "Hour" } else { title <- "Season" } } else { title <- "Time" } if (continuous) { p <- p + ggplot2::guides(colour = ggplot2::guide_colourbar(title = title)) } else { p <- p + ggplot2::guides(colour = ggplot2::guide_legend(title = title)) } } p <- p + ggAddExtras(ylab = NULL, xlab = NULL) return(p) } } #' @rdname gglagplot #' #' @examples #' gglagchull(woolyrnq) #' #' @export gglagchull <- function(x, lags=ifelse(frequency(x) > 1, min(12, frequency(x)), 4), set.lags = 1:lags, diag=TRUE, diag.col="gray", ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { # Make sure lags is evaluated tmp <- lags x <- as.matrix(x) # Prepare data for plotting n <- NROW(x) data <- data.frame() for (i in 1:NCOL(x)) { for (lag in set.lags) { sname <- colnames(x)[i] if (is.null(sname)) { sname <- deparse(substitute(x)) } data <- rbind(data, data.frame(orig = x[(lag + 1):n, i], lagged = x[1:(n - lag), i], lag = rep(lag, n - lag), series = rep(sname, n - lag))[grDevices::chull(x[(lag + 1):n, i], x[1:(n - lag), i]), ]) } } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~orig, y = ~lagged), data = data) if (diag) { p <- p + ggplot2::geom_abline(colour = diag.col, linetype = "dashed") } p <- p + ggplot2::geom_polygon(ggplot2::aes_(group = ~lag, colour = ~lag, fill = ~lag), alpha = 1 / length(set.lags)) p <- p + ggplot2::guides(colour = ggplot2::guide_colourbar(title = "lag")) p <- p + ggplot2::theme(aspect.ratio = 1) # Facet if (NCOL(x) > 1) { p <- p + ggplot2::facet_wrap(~series, scales = "free") } p <- p + ggAddExtras(ylab = "lagged", xlab = "original") return(p) } } #' Create a seasonal subseries ggplot #' #' Plots a subseries plot using ggplot. Each season is plotted as a separate #' mini time series. The blue lines represent the mean of the observations #' within each season. #' #' The \code{ggmonthplot} function is simply a wrapper for #' \code{ggsubseriesplot} as a convenience for users familiar with #' \code{\link[stats]{monthplot}}. #' #' @param x a time series object (type \code{ts}). #' @param labels A vector of labels to use for each 'season' #' @param times A vector of times for each observation #' @param phase A vector of seasonal components #' @param \dots Not used (for consistency with monthplot) #' @return Returns an object of class \code{ggplot}. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{monthplot}} #' @examples #' #' ggsubseriesplot(AirPassengers) #' ggsubseriesplot(woolyrnq) #' #' @export ggmonthplot <- function(x, labels = NULL, times = time(x), phase = cycle(x), ...) { ggsubseriesplot(x, labels, times, phase, ...) } #' @rdname ggmonthplot #' @export ggsubseriesplot <- function(x, labels = NULL, times = time(x), phase = cycle(x), ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(x, "ts")) { stop("ggsubseriesplot requires a ts object, use x=object") } if (round(frequency(x)) <= 1) { stop("Data are not seasonal") } if("1" %in% dimnames(table(table(phase)))[[1]]){ stop(paste("Each season requires at least 2 observations.", ifelse(frequency(x)%%1 == 0, "Your series length may be too short for this graphic.", "This may be caused from specifying a time-series with non-integer frequency.") ) ) } data <- data.frame(y = as.numeric(x), year = trunc(time(x)), season = as.numeric(phase)) seasonwidth <- (max(data$year) - min(data$year)) * 1.05 data$time <- data$season + 0.025 + (data$year - min(data$year)) / seasonwidth avgLines <- stats::aggregate(data$y, by = list(data$season), FUN = mean) colnames(avgLines) <- c("season", "avg") data <- merge(data, avgLines, by = "season") # Initialise ggplot object # p <- ggplot2::ggplot(ggplot2::aes_(x=~interaction(year, season), y=~y, group=~season), data=data, na.rm=TRUE) p <- ggplot2::ggplot( ggplot2::aes_(x = ~time, y = ~y, group = ~season), data = data, na.rm = TRUE ) # Remove vertical break lines p <- p + ggplot2::theme(panel.grid.major.x = ggplot2::element_blank()) # Add data p <- p + ggplot2::geom_line() # Add average lines p <- p + ggplot2::geom_line(ggplot2::aes_(y = ~avg), col = "#0000AA") # Create x-axis labels xfreq <- frequency(x) if (!is.null(labels)) { if (xfreq != length(labels)) { stop("The number of labels supplied is not the same as the number of seasons.") } else { xbreaks <- labels } } else if (xfreq == 4) { xbreaks <- c("Q1", "Q2", "Q3", "Q4") xlab <- "Quarter" } else if (xfreq == 7) { xbreaks <- c( "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" ) xlab <- "Day" } else if (xfreq == 12) { xbreaks <- month.abb xlab <- "Month" } else { xbreaks <- 1:frequency(x) xlab <- "Season" } # X-axis p <- p + ggplot2::scale_x_continuous(breaks = 0.5 + (1:xfreq), labels = xbreaks) # Graph labels p <- p + ggAddExtras(ylab = deparse(substitute(x)), xlab = xlab) return(p) } } #' @rdname seasonplot #' #' @param continuous Should the colour scheme for years be continuous or #' discrete? #' @param polar Plot the graph on seasonal coordinates #' #' @examples #' ggseasonplot(AirPassengers, col=rainbow(12), year.labels=TRUE) #' ggseasonplot(AirPassengers, year.labels=TRUE, continuous=TRUE) #' #' @export ggseasonplot <- function(x, season.labels=NULL, year.labels=FALSE, year.labels.left=FALSE, type=NULL, col=NULL, continuous=FALSE, polar=FALSE, labelgap=0.04, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } if (!inherits(x, "ts")) { stop("autoplot.seasonplot requires a ts object, use x=object") } if (!is.null(type)) { message("Plot types are not yet supported for seasonplot()") } # Check data are seasonal and convert to integer seasonality s <- round(frequency(x)) if (s <= 1) { stop("Data are not seasonal") } # Grab name for plot title xname <- deparse(substitute(x)) tspx <- tsp(x) x <- ts(x, start = tspx[1], frequency = s) data <- data.frame( y = as.numeric(x), year = trunc(round(time(x), 8)), cycle = as.numeric(cycle(x)), time = as.numeric((cycle(x) - 1) / s) ) data$year <- if (continuous) { as.numeric(data$year) } else { as.factor(data$year) } if (polar) { startValues <- data[data$cycle == 1, ] if (data$cycle[1] == 1) { startValues <- startValues[-1, ] } startValues$time <- 1 - .Machine$double.eps levels(startValues$year) <- as.numeric(levels(startValues$year)) - 1 data <- rbind(data, startValues) } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~time, y = ~y, group = ~year, colour = ~year), data = data, na.rm = TRUE) # p <- p + ggplot2::scale_x_continuous() # Add data p <- p + ggplot2::geom_line() if (!is.null(col)) { if(is.numeric(col)){ col <- palette()[(col-1)%%(length(palette())) + 1] } if (continuous) { p <- p + ggplot2::scale_color_gradientn(colours = col) } else { ncol <- length(unique(data$year)) if (length(col) == 1) { p <- p + ggplot2::scale_color_manual(guide = "none", values = rep(col, ncol)) } else { p <- p + ggplot2::scale_color_manual(values = rep(col, ceiling(ncol / length(col)))[1:ncol]) } } } if (year.labels) { yrlab <- stats::aggregate(time ~ year, data = data, FUN = max) yrlab <- cbind(yrlab, offset = labelgap) } if (year.labels.left) { yrlabL <- stats::aggregate(time ~ year, data = data, FUN = min) yrlabL <- cbind(yrlabL, offset = -labelgap) if (year.labels) { yrlab <- rbind(yrlab, yrlabL) } } if (year.labels || year.labels.left) { yrlab <- merge(yrlab, data) yrlab$time <- yrlab$time + yrlab$offset p <- p + ggplot2::guides(colour = "none") p <- p + ggplot2::geom_text(ggplot2::aes_(x = ~time, y = ~y, label = ~year), data = yrlab) } # Add seasonal labels if (s == 12) { labs <- month.abb xLab <- "Month" } else if (s == 4) { labs <- paste("Q", 1:4, sep = "") xLab <- "Quarter" } else if (s == 7) { labs <- c("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat") xLab <- "Day" } else if (s == 52) { labs <- 1:s xLab <- "Week" } else if (s == 24) { labs <- 0:(s - 1) xLab <- "Hour" } else if (s == 48) { labs <- seq(0, 23.5, by = 0.5) xLab <- "Half-hour" } else { labs <- 1:s xLab <- "Season" } if (!is.null(season.labels)) { if (length(season.labels) != length(labs)) { warning(paste0("Provided season.labels have length ", length(season.labels), ", but ", length(labs), " are required. Ignoring season.labels.")) } else { labs <- season.labels } } breaks <- sort(unique(data$time)) if (polar) { breaks <- head(breaks, -1) p <- p + ggplot2::coord_polar() } p <- p + ggplot2::scale_x_continuous(breaks = breaks, minor_breaks = NULL, labels = labs) # Graph title and axes p <- p + ggAddExtras(main = paste("Seasonal plot:", xname), xlab = xLab, ylab = NULL) return(p) } #' @rdname plot.forecast #' @export autoplot.splineforecast <- function(object, PI=TRUE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { p <- autoplot(object$x) + autolayer(object) p <- p + ggplot2::geom_point(size = 2) fit <- data.frame(datetime = as.numeric(time(object$fitted)), y = as.numeric(object$fitted)) p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~y), colour = "red", data = fit) p <- p + ggAddExtras(ylab = deparse(object$model$call$x)) if (!is.null(object$series)) { p <- p + ggplot2::ylab(object$series) } return(p) } } #' @rdname autoplot.seas #' @export autoplot.stl <- function(object, labels = NULL, range.bars = TRUE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "stl")) { stop("autoplot.stl requires a stl object, use x=object") } # Re-order series as trend, seasonal, remainder object$time.series <- object$time.series[, c("trend", "seasonal", "remainder")] if (is.null(labels)) { labels <- colnames(object$time.series) } data <- object$time.series cn <- c("data", labels) data <- data.frame( datetime = rep(time(data), NCOL(data) + 1), y = c(rowSums(data), data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~datetime, y = ~y), data = data) # Add data # Time series lines p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~y), data = subset(data, data$parts != cn[4]), na.rm = TRUE) p <- p + ggplot2::geom_segment( ggplot2::aes_(x = ~datetime, xend = ~datetime, y = 0, yend = ~y), data = subset(data, data$parts == cn[4]), lineend = "butt" ) # Rangebars if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = factor(colnames(yranges), levels = cn), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes_(xmin = ~left, xmax = ~right, ymax = ~top, ymin = ~bottom), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } # Remainder p <- p + ggplot2::facet_grid("parts ~ .", scales = "free_y", switch = "y") p <- p + ggplot2::geom_hline(ggplot2::aes_(yintercept = ~y), data = data.frame(y = 0, parts = factor(cn[4], levels = cn))) # Add axis labels p <- p + ggAddExtras(xlab = "Time", ylab = "") # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = unique(round(pretty(data$datetime)))) # ^^ Remove rightmost x axis gap with `expand=c(0.05, 0, 0, 0)` argument when assymetric `expand` feature is supported # issue: tidyverse/ggplot2#1669 return(p) } } #' @rdname autoplot.seas #' @export autoplot.StructTS <- function(object, labels = NULL, range.bars = TRUE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "StructTS")) { stop("autoplot.StructTS requires a StructTS object.") } if (is.null(labels)) { labels <- colnames(object$fitted) } data <- object$fitted cn <- c("data", labels) data <- data.frame( datetime = rep(time(data), NCOL(data) + 1), y = c(object$data, data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~datetime, y = ~y), data = data) # Add data p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~y), na.rm = TRUE) p <- p + ggplot2::facet_grid("parts ~ .", scales = "free_y", switch = "y") if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = factor(colnames(yranges), levels = cn), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes_(xmin = ~left, xmax = ~right, ymax = ~top, ymin = ~bottom), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } # Add axis labels p <- p + ggAddExtras(xlab = "Time", ylab = "") # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = unique(round(pretty(data$datetime)))) return(p) } } #' Plot time series decomposition components using ggplot #' #' Produces a ggplot object of seasonally decomposed time series for objects of #' class \dQuote{\code{stl}} (created with \code{\link[stats]{stl}}), class #' \dQuote{\code{seas}} (created with \code{\link[seasonal]{seas}}), or class #' \dQuote{\code{decomposed.ts}} (created with \code{\link[stats]{decompose}}). #' #' @param object Object of class \dQuote{\code{seas}}, \dQuote{\code{stl}}, or #' \dQuote{\code{decomposed.ts}}. #' @param labels Labels to replace \dQuote{seasonal}, \dQuote{trend}, and #' \dQuote{remainder}. #' @param range.bars Logical indicating if each plot should have a bar at its #' right side representing relative size. If \code{NULL}, automatic selection #' takes place. #' @param ... Other plotting parameters to affect the plot. #' @return Returns an object of class \code{ggplot}. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[seasonal]{seas}}, \code{\link[stats]{stl}}, #' \code{\link[stats]{decompose}}, \code{\link[stats]{StructTS}}, #' \code{\link[stats]{plot.stl}}. #' @examples #' #' library(ggplot2) #' co2 %>% #' decompose() %>% #' autoplot() #' nottem %>% #' stl(s.window = "periodic") %>% #' autoplot() #' \dontrun{ #' library(seasonal) #' seas(USAccDeaths) %>% autoplot() #' } #' #' @export autoplot.seas <- function(object, labels = NULL, range.bars = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } if (!inherits(object, "seas")) { stop("autoplot.seas requires a seas object") } if (is.null(labels)) { if ("seasonal" %in% colnames(object$data)) { labels <- c("trend", "seasonal", "irregular") } else { labels <- c("trend", "irregular") } } data <- cbind(object$x, object$data[, labels]) colnames(data) <- cn <- c("data", labels) data <- data.frame( datetime = rep(time(data), NCOL(data)), y = c(data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Is it additive or multiplicative? freq <- frequency(object$data) sum_first_year <- try(sum(seasonal(object)[seq(freq)]), silent=TRUE) if(!inherits(sum_first_year, "try-error")) { int <- as.integer(sum_first_year > 0.5) # Closer to 1 than 0. } else { int <- 0 } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(x = ~datetime, y = ~y), data = data) # Add data p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~y), data = subset(data, data$parts != tail(cn,1)), na.rm = TRUE) p <- p + ggplot2::geom_segment( ggplot2::aes_(x = ~datetime, xend = ~datetime, y = int, yend = ~y), data = subset(data, data$parts == tail(cn,1)), lineend = "butt" ) p <- p + ggplot2::facet_grid("parts ~ .", scales = "free_y", switch = "y") p <- p + ggplot2::geom_hline(ggplot2::aes_(yintercept = ~y), data = data.frame(y = int, parts = factor(tail(cn,1), levels = cn)) ) # Rangebars if (is.null(range.bars)) { range.bars <- object$spc$transform$`function` == "none" } if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = factor(colnames(yranges), levels = cn), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes_(xmin = ~left, xmax = ~right, ymax = ~top, ymin = ~bottom), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } # Add axis labels p <- p + ggAddExtras(xlab = "Time", ylab = "") # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = unique(round(pretty(data$datetime)))) return(p) } #' @rdname autoplot.ts #' @export autolayer.mts <- function(object, colour = TRUE, series = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { cl <- match.call() cl[[1]] <- quote(autolayer) cl$object <- quote(object[, i]) if (length(series) != NCOL(object)) { if (colour) { message("For a multivariate time series, specify a seriesname for each time series. Defaulting to column names.") } series <- colnames(object) } out <- list() for (i in 1:NCOL(object)) { cl$series <- series[i] out[[i]] <- eval(cl) } return(out) } } #' @rdname autoplot.ts #' @export autolayer.msts <- function(object, series = NULL, ...) { if (NCOL(object) > 1) { class(object) <- c("mts", "ts", "matrix") } else { if (is.null(series)) { series <- deparse(substitute(series)) } class(object) <- c("ts") } attr(object, "msts") <- NULL autolayer(object, series = series, ...) } #' @rdname autoplot.ts #' @export autolayer.ts <- function(object, colour=TRUE, series=NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { tsdata <- data.frame( timeVal = as.numeric(time(object)), series = ifelse(is.null(series), deparse(substitute(object)), series), seriesVal = as.numeric(object) ) if (colour) { ggplot2::geom_line(ggplot2::aes_(x = ~timeVal, y = ~seriesVal, group = ~series, colour = ~series), data = tsdata, ..., inherit.aes = FALSE) } else { ggplot2::geom_line(ggplot2::aes_(x = ~timeVal, y = ~seriesVal, group = ~series), data = tsdata, ..., inherit.aes = FALSE) } } } #' @rdname plot.forecast #' @export autolayer.forecast <- function(object, series = NULL, PI = TRUE, showgap = TRUE, ...) { PI <- PI & !is.null(object$level) data <- forecast2plotdf(object, PI = PI, showgap = showgap) mapping <- ggplot2::aes_(x = ~x, y = ~y) if (!is.null(object$series)) { data[["series"]] <- object$series } if (!is.null(series)) { data[["series"]] <- series mapping$colour <- quote(series) } if (PI) { mapping$level <- quote(level) mapping$ymin <- quote(ymin) mapping$ymax <- quote(ymax) } geom_forecast(mapping = mapping, data = data, stat = "identity", ..., inherit.aes = FALSE) } #' @rdname plot.mforecast #' @export autolayer.mforecast <- function(object, series = NULL, PI = TRUE, ...) { cl <- match.call() cl[[1]] <- quote(autolayer) cl$object <- quote(object$forecast[[i]]) if (!is.null(series)) { if (length(series) != length(object$forecast)) { series <- names(object$forecast) } } out <- list() for (i in 1:length(object$forecast)) { cl$series <- series[i] out[[i]] <- eval(cl) } return(out) } #' Automatically create a ggplot for time series objects #' #' \code{autoplot} takes an object of type \code{ts} or \code{mts} and creates #' a ggplot object suitable for usage with \code{stat_forecast}. #' #' \code{fortify.ts} takes a \code{ts} object and converts it into a data frame #' (for usage with ggplot2). #' #' @param object Object of class \dQuote{\code{ts}} or \dQuote{\code{mts}}. #' @param series Identifies the time series with a colour, which integrates well #' with the functionality of \link{geom_forecast}. #' @param facets If TRUE, multiple time series will be faceted (and unless #' specified, colour is set to FALSE). If FALSE, each series will be assigned a #' colour. #' @param colour If TRUE, the time series will be assigned a colour aesthetic #' @param model Object of class \dQuote{\code{ts}} to be converted to #' \dQuote{\code{data.frame}}. #' @param data Not used (required for \link{fortify} method) #' @param ... Other plotting parameters to affect the plot. #' @inheritParams plot.forecast #' @return None. Function produces a ggplot graph. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{plot.ts}}, \code{\link[ggplot2]{fortify}} #' @examples #' #' library(ggplot2) #' autoplot(USAccDeaths) #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' autoplot(lungDeaths) #' autoplot(lungDeaths, facets=TRUE) #' #' @export autoplot.ts <- function(object, series=NULL, xlab = "Time", ylab = deparse(substitute(object)), main = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!is.ts(object)) { stop("autoplot.ts requires a ts object, use object=object") } # Create data frame with time as a column labelled x # and time series as a column labelled y. data <- data.frame(y = as.numeric(object), x = as.numeric(time(object))) if (!is.null(series)) { data <- transform(data, series = series) } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes_(y = ~y, x = ~x), data = data) # Add data if (!is.null(series)) { p <- p + ggplot2::geom_line(ggplot2::aes_(group = ~series, colour = ~series), na.rm = TRUE, ...) } else { p <- p + ggplot2::geom_line(na.rm = TRUE, ...) } # Add labels p <- p + ggAddExtras(xlab = xlab, ylab = ylab, main = main) # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = ggtsbreaks) return(p) } } #' @rdname autoplot.ts #' @export autoplot.mts <- function(object, colour=TRUE, facets=FALSE, xlab = "Time", ylab = deparse(substitute(object)), main = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!stats::is.mts(object)) { stop("autoplot.mts requires a mts object, use x=object") } if (NCOL(object) <= 1) { return(autoplot.ts(object, ...)) } cn <- colnames(object) if (is.null(cn)) { cn <- paste("Series", seq_len(NCOL(object))) } data <- data.frame( y = as.numeric(c(object)), x = rep(as.numeric(time(object)), NCOL(object)), series = factor(rep(cn, each = NROW(object)), levels = cn) ) # Initialise ggplot object mapping <- ggplot2::aes_(y = ~y, x = ~x, group = ~series) if (colour && (!facets || !missing(colour))) { mapping$colour <- quote(series) } p <- ggplot2::ggplot(mapping, data = data) p <- p + ggplot2::geom_line(na.rm = TRUE, ...) if (facets) { p <- p + ggplot2::facet_grid(series~., scales = "free_y") } p <- p + ggAddExtras(xlab = xlab, ylab = ylab, main = main) return(p) } } #' @rdname autoplot.ts #' @export autoplot.msts <- function(object, ...) { sname <- deparse(substitute(object)) if (NCOL(object) > 1) { class(object) <- c("mts", "ts", "matrix") } else { class(object) <- c("ts") } attr(object, "msts") <- NULL autoplot(object, ...) + ggAddExtras(ylab = sname) } #' @rdname autoplot.ts #' @export fortify.ts <- function(model, data, ...) { # Use ggfortify version if it is loaded # to prevent cran errors if (exists("ggfreqplot")) { tsp <- attr(model, which = "tsp") dtindex <- time(model) if (any(tsp[3] == c(4, 12))) { dtindex <- zoo::as.Date.yearmon(dtindex) } model <- data.frame(Index = dtindex, Data = as.numeric(model)) return(ggplot2::fortify(model)) } else { model <- cbind(x = as.numeric(time(model)), y = as.numeric(model)) as.data.frame(model) } } forecast2plotdf <- function(model, data=as.data.frame(model), PI=TRUE, showgap=TRUE, ...) { # Time series forecasts if (is.element("ts", class(model$mean))) { xVals <- as.numeric(time(model$mean)) # x axis is time } # Cross-sectional forecasts else if (!is.null(model[["newdata"]])) { xVals <- as.numeric(model[["newdata"]][, 1]) # Only display the first column of newdata, should be generalised. if (NCOL(model[["newdata"]]) > 1) { message("Note: only extracting first column of data") } } else { stop("Could not find forecast x axis") } Hiloc <- grep("Hi ", names(data)) Loloc <- grep("Lo ", names(data)) if (PI && !is.null(model$level)) { # PI if (length(Hiloc) == length(Loloc)) { if (length(Hiloc) > 0) { out <- data.frame( x = rep(xVals, length(Hiloc) + 1), y = c(rep(NA, NROW(data) * (length(Hiloc))), data[, 1]), level = c(as.numeric(rep(gsub("Hi ", "", names(data)[Hiloc]), each = NROW(data))), rep(NA, NROW(data))), ymax = c(unlist(data[, Hiloc]), rep(NA, NROW(data))), ymin = c(unlist(data[, Loloc]), rep(NA, NROW(data))) ) numInterval <- length(model$level) } } else { warning("missing intervals detected, plotting point predictions only") PI <- FALSE } } if (!PI) { # No PI out <- data.frame(x = xVals, y = as.numeric(model$mean), level = rep(NA, NROW(model$mean)), ymax = rep(NA, NROW(model$mean)), ymin = rep(NA, NROW(model$mean))) numInterval <- 0 } if (!showgap) { if (is.null(model$x)) { warning("Removing the gap requires historical data, provide this via model$x. Defaulting showgap to TRUE.") } else { intervalGap <- data.frame( x = rep(time(model$x)[length(model$x)], numInterval + 1), y = c(model$x[length(model$x)], rep(NA, numInterval)), level = c(NA, model$level)[seq_along(1:(numInterval + 1))], ymax = c(NA, rep(model$x[length(model$x)], numInterval)), ymin = c(NA, rep(model$x[length(model$x)], numInterval)) ) out <- rbind(intervalGap, out) } } return(out) } #' @rdname geom_forecast #' @export StatForecast <- ggplot2::ggproto( "StatForecast", ggplot2::Stat, required_aes = c("x", "y"), compute_group = function(data, scales, params, PI=TRUE, showgap=TRUE, series=NULL, h=NULL, level=c(80, 95), fan=FALSE, robust=FALSE, lambda=NULL, find.frequency=FALSE, allow.multiplicative.trend=FALSE, ...) { ## TODO: Rewrite tspx <- recoverTSP(data$x) if (is.null(h)) { h <- ifelse(tspx[3] > 1, 2 * tspx[3], 10) } tsdat <- ts(data = data$y, start = tspx[1], frequency = tspx[3]) fcast <- forecast( tsdat, h = h, level = level, fan = fan, robust = robust, lambda = lambda, find.frequency = find.frequency, allow.multiplicative.trend = allow.multiplicative.trend ) fcast <- forecast2plotdf(fcast, PI = PI, showgap = showgap) # Add ggplot & series information extraInfo <- as.list(data[1, !colnames(data) %in% colnames(fcast)]) extraInfo$`_data` <- quote(fcast) if (!is.null(series)) { if (data$group[1] > length(series)) { message("Recycling series argument, please provide a series name for each time series") } extraInfo[["series"]] <- series[(abs(data$group[1]) - 1) %% length(series) + 1] } do.call("transform", extraInfo) } ) #' @rdname geom_forecast #' @export GeomForecast <- ggplot2::ggproto( "GeomForecast", ggplot2::Geom, # Produces both point forecasts and intervals on graph required_aes = c("x", "y"), optional_aes = c("ymin", "ymax", "level"), default_aes = ggplot2::aes( colour = "blue", fill = "grey60", size = .5, linetype = 1, weight = 1, alpha = 1, level = NA ), draw_key = function(data, params, size) { lwd <- min(data$size, min(size) / 4) # Calculate and set colour linecol <- blendHex(data$col, "gray30", 1) fillcol <- blendHex(data$col, "#CCCCCC", 0.8) grid::grobTree( grid::rectGrob( width = grid::unit(1, "npc") - grid::unit(lwd, "mm"), height = grid::unit(1, "npc") - grid::unit(lwd, "mm"), gp = grid::gpar( col = fillcol, fill = scales::alpha(fillcol, data$alpha), lty = data$linetype, lwd = lwd * ggplot2::.pt, linejoin = "mitre" ) ), grid::linesGrob( x = c(0, 0.4, 0.6, 1), y = c(0.2, 0.6, 0.4, 0.9), gp = grid::gpar( col = linecol, fill = scales::alpha(linecol, data$alpha), lty = data$linetype, lwd = lwd * ggplot2::.pt, linejoin = "mitre" ) ) ) }, handle_na = function(self, data, params) { ## TODO: Consider removing/changing data }, draw_group = function(data, panel_scales, coord) { data <- if (!is.null(data$level)) { split(data, !is.na(data$level)) } else { list(data) } # Draw forecasted points and intervals if (length(data) == 1) { # PI=FALSE ggplot2:::ggname( "geom_forecast", GeomForecastPoint$draw_panel(data[[1]], panel_scales, coord) ) } else { # PI=TRUE ggplot2:::ggname( "geom_forecast", grid::addGrob( GeomForecastInterval$draw_group(data[[2]], panel_scales, coord), GeomForecastPoint$draw_panel(data[[1]], panel_scales, coord) ) ) } } ) GeomForecastPoint <- ggplot2::ggproto( "GeomForecastPoint", GeomForecast, ## Produces only point forecasts required_aes = c("x", "y"), setup_data = function(data, params) { data[!is.na(data$y), ] # Extract only forecast points }, draw_group = function(data, panel_scales, coord) { linecol <- blendHex(data$colour[1], "gray30", 1) # Compute alpha transparency data$alpha <- grDevices::col2rgb(linecol, alpha = TRUE)[4, ] / 255 * data$alpha # Select appropriate Geom and set defaults if (NROW(data) == 0) { # Blank ggplot2::GeomBlank$draw_panel } else if (NROW(data) == 1) { # Point GeomForecastPointGeom <- ggplot2::GeomPoint$draw_panel pointpred <- transform(data, fill = NA, colour = linecol, size = 1, shape = 19, stroke = 0.5) } else { # Line GeomForecastPointGeom <- ggplot2::GeomLine$draw_panel pointpred <- transform(data, fill = NA, colour = linecol) } # Draw forecast points ggplot2:::ggname( "geom_forecast_point", grid::grobTree(GeomForecastPointGeom(pointpred, panel_scales, coord)) ) } ) blendHex <- function(mixcol, seqcol, alpha=1) { requireNamespace("methods") if (is.na(seqcol)) { return(mixcol) } # transform to hue/lightness/saturation colorspace seqcol <- grDevices::col2rgb(seqcol, alpha = TRUE) mixcol <- grDevices::col2rgb(mixcol, alpha = TRUE) seqcolHLS <- suppressWarnings(methods::coerce(colorspace::RGB(R = seqcol[1, ] / 255, G = seqcol[2, ] / 255, B = seqcol[3, ] / 255), structure(NULL, class = "HLS"))) mixcolHLS <- suppressWarnings(methods::coerce(colorspace::RGB(R = mixcol[1, ] / 255, G = mixcol[2, ] / 255, B = mixcol[3, ] / 255), structure(NULL, class = "HLS"))) # copy luminence mixcolHLS@coords[, "L"] <- seqcolHLS@coords[, "L"] mixcolHLS@coords[, "S"] <- alpha * mixcolHLS@coords[, "S"] + (1 - alpha) * seqcolHLS@coords[, "S"] mixcolHex <- suppressWarnings(methods::coerce(mixcolHLS, structure(NULL, class = "RGB"))) mixcolHex <- colorspace::hex(mixcolHex) mixcolHex <- ggplot2::alpha(mixcolHex, mixcol[4, ] / 255) return(mixcolHex) } GeomForecastInterval <- ggplot2::ggproto( "GeomForecastInterval", GeomForecast, ## Produces only forecasts intervals on graph required_aes = c("x", "ymin", "ymax"), setup_data = function(data, params) { data[is.na(data$y), ] # Extract only forecast intervals }, draw_group = function(data, panel_scales, coord) { # If level scale from fabletools is not loaded, convert to colour if(is.numeric(data$level)){ leveldiff <- diff(range(data$level)) if (leveldiff == 0) { leveldiff <- 1 } shadeVal <- (data$level - min(data$level)) / leveldiff * 0.2 + 8 / 15 data$level <- rgb(shadeVal, shadeVal, shadeVal) } intervalGrobList <- lapply( split(data, data$level), FUN = function(x) { # Calculate colour fillcol <- blendHex(x$colour[1], x$level[1], 0.7) # Compute alpha transparency x$alpha <- grDevices::col2rgb(fillcol, alpha = TRUE)[4, ] / 255 * x$alpha # Select appropriate Geom and set defaults if (NROW(x) == 0) { # Blank ggplot2::GeomBlank$draw_panel } else if (NROW(x) == 1) { # Linerange GeomForecastIntervalGeom <- ggplot2::GeomLinerange$draw_panel x <- transform(x, colour = fillcol, fill = NA, size = 1) } else { # Ribbon GeomForecastIntervalGeom <- ggplot2::GeomRibbon$draw_group x <- transform(x, colour = NA, fill = fillcol) } # Create grob return(GeomForecastIntervalGeom(x, panel_scales, coord)) ## Create list pair with average ymin/ymax to order layers } ) # Draw forecast intervals ggplot2:::ggname("geom_forecast_interval", do.call(grid::grobTree, rev(intervalGrobList))) # TODO: Find reliable method to stacking them correctly } ) #' Forecast plot #' #' Generates forecasts from \code{forecast.ts} and adds them to the plot. #' Forecasts can be modified via sending forecast specific arguments above. #' #' Multivariate forecasting is supported by having each time series on a #' different group. #' #' You can also pass \code{geom_forecast} a \code{forecast} object to add it to #' the plot. #' #' The aesthetics required for the forecasting to work includes forecast #' observations on the y axis, and the \code{time} of the observations on the x #' axis. Refer to the examples below. To automatically set up aesthetics, use #' \code{autoplot}. #' #' @param mapping Set of aesthetic mappings created by \code{\link{aes}} or #' \code{\link{aes_}}. If specified and \code{inherit.aes = TRUE} (the #' default), it is combined with the default mapping at the top level of the #' plot. You must supply \code{mapping} if there is no plot mapping. #' @param data The data to be displayed in this layer. There are three options: #' #' If \code{NULL}, the default, the data is inherited from the plot data as #' specified in the call to \code{\link{ggplot}}. #' #' A \code{data.frame}, or other object, will override the plot data. All #' objects will be fortified to produce a data frame. See \code{\link{fortify}} #' for which variables will be created. #' #' A \code{function} will be called with a single argument, the plot data. The #' return value must be a \code{data.frame}, and will be used as the layer #' data. #' @param stat The stat object to use calculate the data. #' @param position Position adjustment, either as a string, or the result of a #' call to a position adjustment function. #' @param na.rm If \code{FALSE} (the default), removes missing values with a #' warning. If \code{TRUE} silently removes missing values. #' @param show.legend logical. Should this layer be included in the legends? #' \code{NA}, the default, includes if any aesthetics are mapped. \code{FALSE} #' never includes, and \code{TRUE} always includes. #' @param inherit.aes If \code{FALSE}, overrides the default aesthetics, rather #' than combining with them. This is most useful for helper functions that #' define both data and aesthetics and shouldn't inherit behaviour from the #' default plot specification, e.g. \code{\link{borders}}. #' @param PI If \code{FALSE}, confidence intervals will not be plotted, giving #' only the forecast line. #' @param showgap If \code{showgap=FALSE}, the gap between the historical #' observations and the forecasts is removed. #' @param series Matches an unidentified forecast layer with a coloured object #' on the plot. #' @param ... Additional arguments for \code{\link{forecast.ts}}, other #' arguments are passed on to \code{\link{layer}}. These are often aesthetics, #' used to set an aesthetic to a fixed value, like \code{color = "red"} or #' \code{alpha = .5}. They may also be parameters to the paired geom/stat. #' @return A layer for a ggplot graph. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link{forecast}}, \code{\link[ggplot2]{ggproto}} #' @examples #' #' \dontrun{ #' library(ggplot2) #' autoplot(USAccDeaths) + geom_forecast() #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' autoplot(lungDeaths) + geom_forecast() #' #' # Using fortify.ts #' p <- ggplot(aes(x=x, y=y), data=USAccDeaths) #' p <- p + geom_line() #' p + geom_forecast() #' #' # Without fortify.ts #' data <- data.frame(USAccDeaths=as.numeric(USAccDeaths), time=as.numeric(time(USAccDeaths))) #' p <- ggplot(aes(x=time, y=USAccDeaths), data=data) #' p <- p + geom_line() #' p + geom_forecast() #' #' p + geom_forecast(h=60) #' p <- ggplot(aes(x=time, y=USAccDeaths), data=data) #' p + geom_forecast(level=c(70,98)) #' p + geom_forecast(level=c(70,98),colour="lightblue") #' #' #Add forecasts to multivariate series with colour groups #' lungDeaths <- cbind(mdeaths, fdeaths) #' autoplot(lungDeaths) + geom_forecast(forecast(mdeaths), series="mdeaths") #' } #' #' @export geom_forecast <- function(mapping = NULL, data = NULL, stat = "forecast", position = "identity", na.rm = FALSE, show.legend = NA, inherit.aes = TRUE, PI=TRUE, showgap=TRUE, series=NULL, ...) { if (is.forecast(mapping) || is.mforecast(mapping)) { warning("Use autolayer instead of geom_forecast to add a forecast layer to your ggplot object.") cl <- match.call() cl[[1]] <- quote(autolayer) names(cl)[names(cl) == "mapping"] <- "object" return(eval.parent(cl)) } if (is.ts(mapping)) { data <- data.frame(y = as.numeric(mapping), x = as.numeric(time(mapping))) mapping <- ggplot2::aes_(y = ~y, x = ~x) } if (stat == "forecast") { paramlist <- list(na.rm = na.rm, PI = PI, showgap = showgap, series = series, ...) if (!is.null(series)) { if (inherits(mapping, "uneval")) { mapping$colour <- quote(..series..) } else { mapping <- ggplot2::aes_(colour = ~..series..) } } } else { paramlist <- list(na.rm = na.rm, ...) } ggplot2::layer( geom = GeomForecast, mapping = mapping, data = data, stat = stat, position = position, show.legend = show.legend, inherit.aes = inherit.aes, params = paramlist ) } # Produce nice histogram with appropriately chosen bin widths # Designed to work with time series data without issuing warnings. #' Histogram with optional normal and kernel density functions #' #' Plots a histogram and density estimates using ggplot. #' #' #' @param x a numerical vector. #' @param add.normal Add a normal density function for comparison #' @param add.kde Add a kernel density estimate for comparison #' @param add.rug Add a rug plot on the horizontal axis #' @param bins The number of bins to use for the histogram. Selected by default #' using the Friedman-Diaconis rule given by \code{\link[grDevices]{nclass.FD}} #' @param boundary A boundary between two bins. #' @return None. #' @author Rob J Hyndman #' @seealso \code{\link[graphics]{hist}}, \code{\link[ggplot2]{geom_histogram}} #' @examples #' #' gghistogram(lynx, add.kde=TRUE) #' #' @export gghistogram <- function(x, add.normal=FALSE, add.kde=FALSE, add.rug=TRUE, bins, boundary=0) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (missing(bins)) { bins <- min(500, grDevices::nclass.FD(na.exclude(x))) } data <- data.frame(x = as.numeric(c(x))) # Initialise ggplot object and plot histogram binwidth <- (max(x, na.rm = TRUE) - min(x, na.rm = TRUE)) / bins p <- ggplot2::ggplot() + ggplot2::geom_histogram(ggplot2::aes(x), data = data, binwidth = binwidth, boundary = boundary) + ggplot2::xlab(deparse(substitute(x))) # Add normal density estimate if (add.normal || add.kde) { xmin <- min(x, na.rm = TRUE) xmax <- max(x, na.rm = TRUE) if (add.kde) { h <- stats::bw.SJ(x) xmin <- xmin - 3 * h xmax <- xmax + 3 * h } if (add.normal) { xmean <- mean(x, na.rm = TRUE) xsd <- sd(x, na.rm = TRUE) xmin <- min(xmin, xmean - 3 * xsd) xmax <- max(xmax, xmean + 3 * xsd) } xgrid <- seq(xmin, xmax, length.out = 512) if (add.normal) { df <- data.frame(x = xgrid, y = length(x) * binwidth * stats::dnorm(xgrid, xmean, xsd)) p <- p + ggplot2::geom_line(ggplot2::aes(df$x, df$y), col = "#ff8a62") } if (add.kde) { kde <- stats::density(x, bw = h, from = xgrid[1], to = xgrid[512], n = 512) p <- p + ggplot2::geom_line(ggplot2::aes(x = kde$x, y = length(x) * binwidth * kde$y), col = "#67a9ff") } } if (add.rug) { p <- p + ggplot2::geom_rug(ggplot2::aes(x)) } return(p) } } forecast/R/msts.R0000644000176200001440000000730414164752123013403 0ustar liggesusers#' Multi-Seasonal Time Series #' #' msts is an S3 class for multi seasonal time series objects, intended to be #' used for models that support multiple seasonal periods. The msts class #' inherits from the ts class and has an additional "msts" attribute which #' contains the vector of seasonal periods. All methods that work on a ts #' class, should also work on a msts class. #' #' @aliases print.msts window.msts `[.msts` #' #' @param data A numeric vector, ts object, matrix or data frame. It is #' intended that the time series data is univariate, otherwise treated the same #' as ts(). #' @param seasonal.periods A vector of the seasonal periods of the msts. #' @param ts.frequency The seasonal period that should be used as frequency of #' the underlying ts object. The default value is \code{max(seasonal.periods)}. #' @param ... Arguments to be passed to the underlying call to \code{ts()}. For #' example \code{start=c(1987,5)}. #' @return An object of class \code{c("msts", "ts")}. If there is only one #' seasonal period (i.e., \code{length(seasonal.periods)==1}), then the object #' is of class \code{"ts"}. #' @author Slava Razbash and Rob J Hyndman #' @keywords ts #' @examples #' #' x <- msts(taylor, seasonal.periods=c(2*24,2*24*7,2*24*365), start=2000+22/52) #' y <- msts(USAccDeaths, seasonal.periods=12, start=1949) #' #' @export msts <- function(data, seasonal.periods, ts.frequency=floor(max(seasonal.periods)), ...) { # if(!is.element(ts.frequency, round(seasonal.periods-0.5+1e-12))) # stop("ts.frequency should be one of the seasonal periods") if (inherits(data, "ts") && frequency(data) == ts.frequency && length(list(...)) == 0) { object <- data } else { object <- ts(data = data, frequency = ts.frequency, ...) } if (length(seasonal.periods) > 1L) { class(object) <- c("msts", "ts") attr(object, "msts") <- sort(seasonal.periods) } return(object) } #' @export print.msts <- function(x, ...) { cat("Multi-Seasonal Time Series:\n") cat("Start: ") cat(start(x)) # cat("\nEnd: ") # cat(x$end) cat("\nSeasonal Periods: ") cat(attr(x, "msts")) cat("\nData:\n") xx <- unclass(x) # handles both univariate and multivariate ts attr(xx, "tsp") <- attr(xx, "msts") <- NULL print(xx) # print(matrix(x, ncol=length(x)), nrow=1) cat("\n") } #' @export window.msts <- function(x, ...) { seasonal.periods <- attr(x, "msts") class(x) <- c("ts") x <- window(x, ...) class(x) <- c("msts", "ts") attr(x, "msts") <- seasonal.periods return(x) } #' @export `[.msts` <- function(x, i, j, drop = TRUE) { y <- NextMethod("[") if(!inherits(y, "ts")) return(y) class(y) <- c("msts", class(y)) attr(y, "msts") <- attr(x, "msts") y } # Copy msts attributes from x to y copy_msts <- function(x, y) { if(NROW(x) > NROW(y)) { # Pad y with initial NAs if(NCOL(y) == 1) { y <- c(rep(NA, NROW(x) - NROW(y)), y) } else { y <- rbind(matrix(NA, ncol=NCOL(y), nrow = NROW(x) - NROW(y)), y) } } else if(NROW(x) != NROW(y)) { stop("x and y should have the same number of observations") } if(NCOL(y) > 1) { class(y) <- c("mts", "ts", "matrix") } else { class(y) <- "ts" } if("msts" %in% class(x)) class(y) <- c("msts", class(y)) attr <- attributes(x) attributes(y)$tsp <- attr$tsp attributes(y)$msts <- attr$msts return(y) } # Copy msts attributes from x to y shifted to forecast period future_msts <- function(x, y) { if(NCOL(y) > 1) { class(y) <- c("mts", "ts", "matrix") } else { class(y) <- "ts" } if("msts" %in% class(x)) class(y) <- c("msts", class(y)) attr <- attributes(x) attr$tsp[1:2] <- attr$tsp[2] + c(1,NROW(y))/attr$tsp[3] attributes(y)$tsp <- attr$tsp attributes(y)$msts <- attr$msts return(y) } forecast/R/subset.R0000644000176200001440000001231614003673410013712 0ustar liggesusers#' Subsetting a time series #' #' Various types of subsetting of a time series. Allows subsetting by index #' values (unlike \code{\link[stats]{window}}). Also allows extraction of the #' values of a specific season or subset of seasons in each year. For example, #' to extract all values for the month of May from a time series. #' #' If character values for months are used, either upper or lower case may be #' used, and partial unambiguous names are acceptable. Possible character #' values for quarters are \code{"Q1"}, \code{"Q2"}, \code{"Q3"}, and #' \code{"Q4"}. #' #' @param x a univariate time series to be subsetted #' @param subset optional logical expression indicating elements to keep; #' missing values are taken as false. \code{subset} must be the same length as #' \code{x}. #' @param month Numeric or character vector of months to retain. Partial #' matching on month names used. #' @param quarter Numeric or character vector of quarters to retain. #' @param season Numeric vector of seasons to retain. #' @param start Index of start of contiguous subset. #' @param end Index of end of contiguous subset. #' @param ... Other arguments, unused. #' @return If \code{subset} is used, a numeric vector is returned with no ts #' attributes. If \code{start} and/or \code{end} are used, a ts object is #' returned consisting of x[start:end], with the appropriate time series #' attributes retained. Otherwise, a ts object is returned with frequency equal #' to the length of \code{month}, \code{quarter} or \code{season}. #' @author Rob J Hyndman #' @seealso \code{\link[base]{subset}}, \code{\link[stats]{window}} #' @keywords ts #' @examples #' plot(subset(gas,month="November")) #' subset(woolyrnq,quarter=3) #' subset(USAccDeaths, start=49) #' #' @export subset.ts <- function(x, subset=NULL, month=NULL, quarter=NULL, season=NULL, start=NULL, end=NULL, ...) { if (!is.null(subset)) { if (NROW(subset) != NROW(x)) { stop("subset must be the same length as x") } if (NCOL(subset) != 1) { stop("subset must be a vector of rows to keep") } if ("mts" %in% class(x)) { return(subset.matrix(x, subset)) } else { return(subset.default(x, subset)) } } else if (!is.null(start) | !is.null(end)) { if (is.null(start)) { start <- 1 } if (is.null(end)) { end <- NROW(x) } if ("mts" %in% class(x)) { xsub <- x[start:end, , drop=FALSE] } else { xsub <- x[start:end] } tspx <- tsp(x) return(ts(xsub, frequency = tspx[3], start = tspx[1L] + (start - 1) / tspx[3L])) } else if (frequency(x) <= 1) { stop("Data must be seasonal") } if (!is.null(month)) { if (frequency(x) != 12) { stop("Data is not monthly") } if (is.character(month)) { season <- pmatch(tolower(month), tolower(month.name), duplicates.ok = TRUE) } else { season <- month } season <- na.omit(season) if (length(season) == 0L) { stop("No recognizable months") } if (min(season) < 1L | max(season) > 12L) { stop("Months must be between 1 and 12") } } else if (!is.null(quarter)) { if (frequency(x) != 4) { stop("Data is not quarterly") } if (is.character(quarter)) { season <- pmatch(tolower(quarter), paste("q", 1:4, sep = ""), duplicates.ok = TRUE) } else { season <- quarter } season <- na.omit(season) if (length(season) == 0L) { stop("No recognizable quarters") } if (min(season) < 1L | max(season) > 4L) { stop("Quarters must be between 1 and 4") } } else if (is.null(season)) { stop("No subset specified") } else if (min(season) < 1L | max(season) > frequency(x)) { stop(paste("Seasons must be between 1 and", frequency(x))) } start <- utils::head(time(x)[is.element(cycle(x), season)], 1) if ("mts" %in% class(x)) { x <- subset.matrix(x, is.element(cycle(x), season)) } else { x <- subset.default(x, is.element(cycle(x), season)) } return(ts(x, frequency = length(season), start = start)) } #' @export #' @importFrom utils head.matrix head.ts <- function(x, n=6L, ...) { attr_x <- attributes(x) attr_x$names <- NULL if (NCOL(x) > 1) { hx <- head.matrix(as.matrix(x), n = n, ...) } else if ((length(x) + n) > 0) { hx <- head(c(x), n = n, ...) } else { return(numeric(0)) } attr_x$tsp[2] <- attr_x$tsp[1] + (NROW(hx) - 1) / attr_x$tsp[3] if (!is.null(dim(x))) { attr_x$dim[1] <- NROW(hx) } attributes(hx) <- attr_x return(hx) } #' @export #' @importFrom utils tail.matrix tail.ts <- function(x, n=6L, ...) { attr_x <- attributes(x) attr_x$names <- NULL if (NCOL(x) > 1) { hx <- tail.matrix(as.matrix(x), n = n, ...) } else if ((length(x) + n) > 0) { hx <- tail(c(x), n = n, ...) } else { return(numeric(0)) } attr_x$tsp[1] <- attr_x$tsp[2] - (NROW(hx) - 1) / attr_x$tsp[3] if (!is.null(dim(x))) { attr_x$dim[1] <- NROW(hx) } attributes(hx) <- attr_x return(hx) } #' @rdname subset.ts #' @export subset.msts <- function(x, subset=NULL, start=NULL, end=NULL, ...) { out <- subset.ts(x, start = start, end = end, ...) tspx <- tsp(out) msts( out, seasonal.periods = attr(x, "msts"), start = tspx[1], ts.frequency = tspx[3] ) } forecast/R/bats.R0000644000176200001440000003730314003673410013341 0ustar liggesusers# Author: srazbash ############################################################################### #' BATS model (Exponential smoothing state space model with Box-Cox #' transformation, ARMA errors, Trend and Seasonal components) #' #' Fits a BATS model applied to \code{y}, as described in De Livera, Hyndman & #' Snyder (2011). Parallel processing is used by default to speed up the #' computations. #' #' @aliases as.character.bats print.bats #' #' @param y The time series to be forecast. Can be \code{numeric}, \code{msts} #' or \code{ts}. Only univariate time series are supported. #' @param use.box.cox \code{TRUE/FALSE} indicates whether to use the Box-Cox #' transformation or not. If \code{NULL} then both are tried and the best fit #' is selected by AIC. #' @param use.trend \code{TRUE/FALSE} indicates whether to include a trend or #' not. If \code{NULL} then both are tried and the best fit is selected by AIC. #' @param use.damped.trend \code{TRUE/FALSE} indicates whether to include a #' damping parameter in the trend or not. If \code{NULL} then both are tried #' and the best fit is selected by AIC. #' @param seasonal.periods If \code{y} is a numeric then seasonal periods can #' be specified with this parameter. #' @param use.arma.errors \code{TRUE/FALSE} indicates whether to include ARMA #' errors or not. If \code{TRUE} the best fit is selected by AIC. If #' \code{FALSE} then the selection algorithm does not consider ARMA errors. #' @param use.parallel \code{TRUE/FALSE} indicates whether or not to use #' parallel processing. #' @param num.cores The number of parallel processes to be used if using #' parallel processing. If \code{NULL} then the number of logical cores is #' detected and all available cores are used. #' @param bc.lower The lower limit (inclusive) for the Box-Cox transformation. #' @param bc.upper The upper limit (inclusive) for the Box-Cox transformation. #' @param biasadj Use adjusted back-transformed mean for Box-Cox #' transformations. If TRUE, point forecasts and fitted values are mean #' forecast. Otherwise, these points can be considered the median of the #' forecast densities. #' @param model Output from a previous call to \code{bats}. If model is passed, #' this same model is fitted to \code{y} without re-estimating any parameters. #' @param ... Additional arguments to be passed to \code{auto.arima} when #' choose an ARMA(p, q) model for the errors. (Note that xreg will be ignored, #' as will any arguments concerning seasonality and differencing, but arguments #' controlling the values of p and q will be used.) #' @return An object of class "\code{bats}". The generic accessor functions #' \code{fitted.values} and \code{residuals} extract useful features of the #' value returned by \code{bats} and associated functions. The fitted model is #' designated BATS(omega, p,q, phi, m1,...mJ) where omega is the Box-Cox #' parameter and phi is the damping parameter; the error is modelled as an #' ARMA(p,q) process and m1,...,mJ list the seasonal periods used in the model. #' @author Slava Razbash and Rob J Hyndman #' @references De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), #' Forecasting time series with complex seasonal patterns using exponential #' smoothing, \emph{Journal of the American Statistical Association}, #' \bold{106}(496), 1513-1527. #' @keywords ts #' @examples #' #' \dontrun{ #' fit <- bats(USAccDeaths) #' plot(forecast(fit)) #' #' taylor.fit <- bats(taylor) #' plot(forecast(taylor.fit)) #' } #' #' @export bats <- function(y, use.box.cox = NULL, use.trend = NULL, use.damped.trend = NULL, seasonal.periods = NULL, use.arma.errors = TRUE, use.parallel = length(y) > 1000, num.cores = 2, bc.lower = 0, bc.upper = 1, biasadj = FALSE, model = NULL, ...) { if (!is.numeric(y) || NCOL(y) > 1) { stop("y should be a univariate time series") } seriesname <- deparse(substitute(y)) origy <- y attr_y <- attributes(origy) # Get seasonal periods if (is.null(seasonal.periods)) { if (any(class(y) == "msts")) { seasonal.periods <- attr(y, "msts") } else if (class(y) == "ts") { seasonal.periods <- frequency(y) } else { y <- as.ts(y) seasonal.periods <- 1 } seasonal.periods <- seasonal.periods[seasonal.periods < length(y)] if(length(seasonal.periods) == 0L) seasonal.periods <- 1 } else { # Add ts attributes if (!any(class(y) == "ts")) { y <- msts(y, seasonal.periods) } } seasonal.periods <- unique(pmax(seasonal.periods, 1)) if (all(seasonal.periods == 1)) { seasonal.periods <- NULL } ny <- length(y) y <- na.contiguous(y) if (ny != length(y)) { warning("Missing values encountered. Using longest contiguous portion of time series") if (!is.null(attr_y$tsp)) { attr_y$tsp[1:2] <- range(time(y)) } } # Refit model if available if (!is.null(model)) { refitModel <- try(fitPreviousBATSModel(y, model = model), silent = TRUE) return(refitModel) } # Check for constancy if (is.constant(y)) { fit <- list( y = y, x = matrix(y, nrow = 1, ncol = ny), errors = y * 0, fitted.values = y, seed.states = matrix(y[1]), AIC = -Inf, likelihood = -Inf, variance = 0, alpha = 0.9999, method = "BATS", call = match.call() ) return(structure(fit, class = "bats")) } # Check for non-positive data if (any((y <= 0))) { use.box.cox <- FALSE } if ((!is.null(use.box.cox)) && (!is.null(use.trend)) && (use.parallel)) { if (use.trend && (!is.null(use.damped.trend))) { # In the this case, there is only one alternative. use.parallel <- FALSE } else if (use.trend == FALSE) { # As above, in the this case, there is only one alternative. use.parallel <- FALSE } } if (!is.null(seasonal.periods)) { seasonal.mask <- (seasonal.periods == 1) seasonal.periods <- seasonal.periods[!seasonal.mask] } # Check if there is anything to parallelise if (is.null(seasonal.periods) && !is.null(use.box.cox) && !is.null(use.trend)) { use.parallel <- FALSE } if (is.null(use.box.cox)) { use.box.cox <- c(FALSE, TRUE) } if (any(use.box.cox)) { init.box.cox <- BoxCox.lambda(y, lower = bc.lower, upper = bc.upper) } else { init.box.cox <- NULL } if (is.null(use.trend)) { use.trend <- c(FALSE, TRUE) } else if (use.trend == FALSE) { use.damped.trend <- FALSE } if (is.null(use.damped.trend)) { use.damped.trend <- c(FALSE, TRUE) } y <- as.numeric(y) if (use.parallel) { # Set up the control array control.array <- NULL for (box.cox in use.box.cox) { for (trend in use.trend) { for (damping in use.damped.trend) { if (!trend && damping) { next } control.line <- c(box.cox, trend, damping) if (!is.null(control.array)) { control.array <- rbind(control.array, control.line) } else { control.array <- control.line } } } } ## Fit the models if (is.null(num.cores)) { num.cores <- detectCores(all.tests = FALSE, logical = TRUE) } clus <- makeCluster(num.cores) models.list <- clusterApplyLB(clus, c(1:nrow(control.array)), parFilterSpecifics, y = y, control.array = control.array, seasonal.periods = seasonal.periods, use.arma.errors = use.arma.errors, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ...) stopCluster(clus) ## Choose the best model #### Get the AICs aics <- numeric(nrow(control.array)) for (i in 1:nrow(control.array)) { aics[i] <- models.list[[i]]$AIC } best.number <- which.min(aics) best.model <- models.list[[best.number]] } else { best.aic <- Inf best.model <- NULL for (box.cox in use.box.cox) { for (trend in use.trend) { for (damping in use.damped.trend) { current.model <- try( filterSpecifics(y, box.cox = box.cox, trend = trend, damping = damping, seasonal.periods = seasonal.periods, use.arma.errors = use.arma.errors, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ... ), silent=TRUE ) if(!("try-error" %in% class(current.model))) { if (current.model$AIC < best.aic) { best.aic <- current.model$AIC best.model <- current.model } } } } } } if(is.null(best.model)) stop("Unable to fit a model") best.model$call <- match.call() if (best.model$optim.return.code != 0) { warning("optim() did not converge.") } attributes(best.model$fitted.values) <- attributes(best.model$errors) <- attr_y best.model$y <- origy best.model$series <- seriesname best.model$method <- "BATS" return(best.model) } filterSpecifics <- function(y, box.cox, trend, damping, seasonal.periods, use.arma.errors, force.seasonality = FALSE, init.box.cox = NULL, bc.lower = 0, bc.upper = 1, biasadj = FALSE, ...) { if (!trend && damping) { return(list(AIC = Inf)) } first.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (!is.null(seasonal.periods) && !force.seasonality) { non.seasonal.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = NULL, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (first.model$AIC > non.seasonal.model$AIC) { seasonal.periods <- NULL first.model <- non.seasonal.model } } if (use.arma.errors) { suppressWarnings(arma <- auto.arima(as.numeric(first.model$errors), d = 0, ...)) p <- arma$arma[1] q <- arma$arma[2] if (p != 0 || q != 0) { # Did auto.arima() find any AR() or MA() coefficients? if (p != 0) { ar.coefs <- numeric(p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- numeric(q) } else { ma.coefs <- NULL } starting.params <- first.model$parameters # printCASE(box.cox, trend, damping, seasonal.periods, ar.coefs, ma.coefs, p, q) second.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, ar.coefs = ar.coefs, ma.coefs = ma.coefs, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (second.model$AIC < first.model$AIC) { return(second.model) } else { return(first.model) } } else { # Else auto.arima() did not find any AR() or MA()coefficients return(first.model) } } else { return(first.model) } } parFilterSpecifics <- function(control.number, control.array, y, seasonal.periods, use.arma.errors, force.seasonality = FALSE, init.box.cox = NULL, bc.lower = 0, bc.upper = 1, biasadj = FALSE, ...) { box.cox <- control.array[control.number, 1] trend <- control.array[control.number, 2] damping <- control.array[control.number, 3] if (!trend && damping) { return(list(AIC = Inf)) } first.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (!is.null(seasonal.periods) && !force.seasonality) { non.seasonal.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = NULL, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (first.model$AIC > non.seasonal.model$AIC) { seasonal.periods <- NULL first.model <- non.seasonal.model } } if (use.arma.errors) { suppressWarnings(arma <- auto.arima(as.numeric(first.model$errors), d = 0, ...)) p <- arma$arma[1] q <- arma$arma[2] if (p != 0 || q != 0) { # Did auto.arima() find any AR() or MA() coefficients? if (p != 0) { ar.coefs <- numeric(p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- numeric(q) } else { ma.coefs <- NULL } starting.params <- first.model$parameters # printCASE(box.cox, trend, damping, seasonal.periods, ar.coefs, ma.coefs, p, q) second.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, ar.coefs = ar.coefs, ma.coefs = ma.coefs, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (second.model$AIC < first.model$AIC) { return(second.model) } else { return(first.model) } } else { # Else auto.arima() did not find any AR() or MA()coefficients return(first.model) } } else { return(first.model) } } #' @rdname fitted.Arima #' @export fitted.bats <- function(object, h = 1, ...) { if (h == 1) { return(object$fitted.values) } else { return(hfitted(object = object, h = h, FUN = "bats", ...)) } } #' @export print.bats <- function(x, ...) { cat(as.character(x)) cat("\n") cat("\nCall: ") print(x$call) cat("\nParameters") if (!is.null(x$lambda)) { cat("\n Lambda: ") cat(round(x$lambda, 6)) } cat("\n Alpha: ") cat(x$alpha) if (!is.null(x$beta)) { cat("\n Beta: ") cat(x$beta) cat("\n Damping Parameter: ") cat(round(x$damping.parameter, 6)) } if (!is.null(x$gamma.values)) { cat("\n Gamma Values: ") cat(x$gamma.values) } if (!is.null(x$ar.coefficients)) { cat("\n AR coefficients: ") cat(round(x$ar.coefficients, 6)) } if (!is.null(x$ma.coefficients)) { cat("\n MA coefficients: ") cat(round(x$ma.coefficients, 6)) } cat("\n") cat("\nSeed States:\n") print(x$seed.states) cat("\nSigma: ") cat(sqrt(x$variance)) cat("\nAIC: ") cat(x$AIC) cat("\n") } #' Plot components from BATS model #' #' Produces a plot of the level, slope and seasonal components from a BATS or #' TBATS model. The plotted components are Box-Cox transformed using the estimated transformation parameter. #' #' @param x Object of class \dQuote{bats/tbats}. #' @param object Object of class \dQuote{bats/tbats}. #' @param main Main title for plot. #' @param range.bars Logical indicating if each plot should have a bar at its #' right side representing relative size. If NULL, automatic selection takes #' place. #' @param ... Other plotting parameters passed to \code{\link[graphics]{par}}. #' @return None. Function produces a plot #' @author Rob J Hyndman #' @seealso \code{\link{bats}},\code{\link{tbats}} #' @keywords hplot #' #' @export plot.bats <- function(x, main = "Decomposition by BATS model", ...) { # Get original data, transform if necessary if (!is.null(x$lambda)) { y <- BoxCox(x$y, x$lambda) lambda <- attr(y, "lambda") } else { y <- x$y } # Extract states out <- cbind(observed = c(y), level = x$x[1, ]) if (!is.null(x$beta)) { out <- cbind(out, slope = x$x[2, ]) } nonseas <- 2 + !is.null(x$beta) # No. non-seasonal columns in out nseas <- length(x$gamma.values) # No. seasonal periods if (!is.null(x$gamma.values)) { seas.states <- x$x[-(1:(1 + !is.null(x$beta))), ] j <- cumsum(c(1, x$seasonal.periods)) for (i in 1:nseas) { out <- cbind(out, season = seas.states[j[i], ]) } if (nseas > 1) { colnames(out)[nonseas + 1:nseas] <- paste("season", 1:nseas, sep = "") } } # Add time series characteristics out <- ts(out) tsp(out) <- tsp(y) # Do the plot plot.ts(out, main = main, nc = 1, ...) } #' @rdname is.ets #' @export is.bats <- function(x) { inherits(x, "bats") } forecast/R/clean.R0000644000176200001440000001633414165140116013474 0ustar liggesusers# Functions to remove outliers and fill missing values in a time series # Nothing for multiple seasonality yet. # na.interp fills in missing values # Uses linear interpolation for non-seasonal series # Adds seasonality based on a periodic stl decomposition with seasonal series # Argument lambda allows for Box-Cox transformation #' Interpolate missing values in a time series #' #' By default, uses linear interpolation for non-seasonal series. For seasonal series, a #' robust STL decomposition is first computed. Then a linear interpolation is applied to the #' seasonally adjusted data, and the seasonal component is added back. #' #' A more general and flexible approach is available using \code{na.approx} in #' the \code{zoo} package. #' #' @param x time series #' @param linear Should a linear interpolation be used. #' @inheritParams forecast #' @return Time series #' @author Rob J Hyndman #' @seealso \code{\link[forecast]{tsoutliers}} #' @keywords ts #' @examples #' #' data(gold) #' plot(na.interp(gold)) #' #' @export na.interp <- function(x, lambda=NULL, linear=(frequency(x) <= 1 | sum(!is.na(x)) <= 2 * frequency(x))) { missng <- is.na(x) # Do nothing if no missing values if (sum(missng) == 0L) { return(x) } origx <- x rangex <- range(x, na.rm=TRUE) drangex <- rangex[2L] - rangex[1L] # Convert to ts if (is.null(tsp(x))) { x <- ts(x) } if (length(dim(x)) > 1) { if (NCOL(x) == 1) { x <- x[, 1] } else { stop("The time series is not univariate.") } } # Transform if requested if (!is.null(lambda)) { x <- BoxCox(x, lambda = lambda) lambda <- attr(x, "lambda") } freq <- frequency(x) tspx <- tsp(x) n <- length(x) tt <- 1:n idx <- tt[!missng] if (linear) { # Use linear interpolation x <- ts(approx(idx, x[idx], tt, rule = 2)$y) } else { # Otherwise estimate seasonal component robustly # Then add to linear interpolation of seasonally adjusted series # Fit Fourier series for seasonality and a polynomial for the trend, # just to get something reasonable to start with if ("msts" %in% class(x)) { K <- pmin(trunc(attributes(x)$msts / 2), 20L) } else { K <- min(trunc(freq / 2), 5) } X <- cbind(fourier(x, K), poly(tt, degree = pmin(pmax(trunc(n / 10), 1), 6L))) fit <- lm(x ~ X, na.action = na.exclude) pred <- predict(fit, newdata = data.frame(X)) x[missng] <- pred[missng] # Now re-do it with stl to get better results fit <- mstl(x, robust = TRUE) # Interpolate seasonally adjusted values sa <- seasadj(fit) sa <- approx(idx, sa[idx], 1:n, rule = 2)$y # Replace original missing values seas <- seasonal(fit) if (NCOL(seas) > 1) { seas <- rowSums(seas) } x[missng] <- sa[missng] + seas[missng] } # Backtransform if required if (!is.null(lambda)) { x <- InvBoxCox(x, lambda = lambda) } # Ensure time series characteristics not lost tsp(x) <- tspx # Check stability and use linear interpolation if there is a problem if(!linear & (max(x) > rangex[2L]+0.5*drangex | min(x) < rangex[1L]-0.5*drangex)) return(na.interp(origx, lambda=lambda, linear=TRUE)) else return(x) } # Function to identify outliers and replace them with better values # Missing values replaced as well if replace.missing=TRUE #' Identify and replace outliers and missing values in a time series #' #' Uses supsmu for non-seasonal series and a robust STL decomposition for #' seasonal series. To estimate missing values and outlier replacements, #' linear interpolation is used on the (possibly seasonally adjusted) series #' #' @param x time series #' @param replace.missing If TRUE, it not only replaces outliers, but also #' interpolates missing values #' @param iterate the number of iterations required #' @inheritParams forecast #' @return Time series #' @author Rob J Hyndman #' @references Hyndman (2021) "Detecting time series outliers" \url{https://robjhyndman.com/hyndsight/tsoutliers/}. #' @seealso \code{\link[forecast]{na.interp}}, #' \code{\link[forecast]{tsoutliers}}, \code{\link[stats]{supsmu}} #' @keywords ts #' @examples #' #' cleangold <- tsclean(gold) #' #' @export tsclean <- function(x, replace.missing=TRUE, iterate=2, lambda = NULL) { outliers <- tsoutliers(x, iterate = iterate, lambda = lambda) x[outliers$index] <- outliers$replacements if (replace.missing) { x <- na.interp(x, lambda = lambda) } return(x) } # Function to identify time series outlieres #' Identify and replace outliers in a time series #' #' Uses supsmu for non-seasonal series and a periodic stl decomposition with #' seasonal series to identify outliers and estimate their replacements. #' #' #' @param x time series #' @param iterate the number of iterations required #' @inheritParams forecast #' @return \item{index}{Indicating the index of outlier(s)} #' \item{replacement}{Suggested numeric values to replace identified outliers} #' @author Rob J Hyndman #' @seealso \code{\link[forecast]{na.interp}}, \code{\link[forecast]{tsclean}} #' @references Hyndman (2021) "Detecting time series outliers" \url{https://robjhyndman.com/hyndsight/tsoutliers/}. #' @keywords ts #' @examples #' #' data(gold) #' tsoutliers(gold) #' #' @export tsoutliers <- function(x, iterate=2, lambda=NULL) { n <- length(x) freq <- frequency(x) # Identify and fill missing values missng <- is.na(x) nmiss <- sum(missng) if (nmiss > 0L) { xx <- na.interp(x, lambda = lambda) } else { xx <- x } # Check if constant if (is.constant(xx)) { return(list(index = integer(0), replacements = numeric(0))) } # Transform if requested if (!is.null(lambda)) { xx <- BoxCox(xx, lambda = lambda) lambda <- attr(xx, "lambda") } # Seasonally adjust data if necessary if (freq > 1 && n > 2 * freq) { fit <- mstl(xx, robust=TRUE) # Check if seasonality is sufficient to warrant adjustment rem <- remainder(fit) detrend <- xx - trendcycle(fit) strength <- 1 - var(rem) / var(detrend) if (strength >= 0.6) { xx <- seasadj(fit) } } # Use super-smoother on the (seasonally adjusted) data tt <- 1:n mod <- supsmu(tt, xx) resid <- xx - mod$y # Make sure missing values are not interpeted as outliers if (nmiss > 0L) { resid[missng] <- NA } # Limits of acceptable residuals resid.q <- quantile(resid, probs = c(0.25, 0.75), na.rm = TRUE) iqr <- diff(resid.q) limits <- resid.q + 3 * iqr * c(-1, 1) # Find residuals outside limits if ((limits[2] - limits[1]) > 1e-14) { outliers <- which((resid < limits[1]) | (resid > limits[2])) } else { outliers <- numeric(0) } # Replace all missing values including outliers x[outliers] <- NA x <- na.interp(x, lambda = lambda) # Do no more than 2 iterations regardless of the value of iterate if (iterate > 1) { tmp <- tsoutliers(x, iterate = 1, lambda = lambda) if (length(tmp$index) > 0) # Found some more { outliers <- sort(unique(c(outliers, tmp$index))) x[outliers] <- NA if(sum(!is.na(x)) == 1L) { # Only one non-missing value x[is.na(x)] <- x[!is.na(x)] } else x <- na.interp(x, lambda = lambda) } } # Return outlier indexes and replacements return(list(index = outliers, replacements = x[outliers])) } forecast/R/baggedModel.R0000644000176200001440000001707314003673410014604 0ustar liggesusers## #' Forecasting using a bagged model #' #' The bagged model forecasting method. #' #' This function implements the bagged model forecasting method described in #' Bergmeir et al. By default, the \code{\link{ets}} function is applied to all #' bootstrapped series. Base models other than \code{\link{ets}} can be given by the #' parameter \code{fn}. Using the default parameters, the function #' \code{\link{bld.mbb.bootstrap}} is used to calculate the bootstrapped series #' with the Box-Cox and Loess-based decomposition (BLD) bootstrap. The function #' \code{\link{forecast.baggedModel}} can then be used to calculate forecasts. #' #' \code{baggedETS} is a wrapper for \code{baggedModel}, setting \code{fn} to "ets". #' This function is included for backwards compatibility only, and may be #' deprecated in the future. #' #' @aliases print.baggedModel #' #' @param y A numeric vector or time series of class \code{ts}. #' @param bootstrapped_series bootstrapped versions of y. #' @param fn the forecast function to use. Default is \code{\link{ets}}. #' @param \dots Other arguments passed to the forecast function. #' @return Returns an object of class "\code{baggedModel}". #' #' The function \code{print} is used to obtain and print a summary of the #' results. #' #' \item{models}{A list containing the fitted ensemble models.} #' \item{method}{The function for producing a forecastable model.} #' \item{y}{The original time series.} #' \item{bootstrapped_series}{The bootstrapped series.} #' \item{modelargs}{The arguments passed through to \code{fn}.} #' \item{fitted}{Fitted values (one-step forecasts). The #' mean of the fitted values is calculated over the ensemble.} #' \item{residuals}{Original values minus fitted values.} #' @author Christoph Bergmeir, Fotios Petropoulos #' @references Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging #' Exponential Smoothing Methods using STL Decomposition and Box-Cox #' Transformation. International Journal of Forecasting 32, 303-312. #' @keywords ts #' @examples #' fit <- baggedModel(WWWusage) #' fcast <- forecast(fit) #' plot(fcast) #' #' @export baggedModel <- function(y, bootstrapped_series=bld.mbb.bootstrap(y, 100), fn=ets, ...) { # Add package info in case forecast not loaded if(!is.function(fn)){ warning(paste0("Using character specification for `fn` is deprecated. Please use `fn = ", match.arg(fn,c("ets", "auto.arima")), "`.")) fn <- utils::getFromNamespace(match.arg(fn,c("ets", "auto.arima")), "forecast") } mod_boot <- lapply(bootstrapped_series, function(x) { mod <- fn(x, ...) }) # Return results out <- list() out$y <- as.ts(y) out$bootstrapped_series <- bootstrapped_series out$models <- mod_boot out$modelargs <- list(...) fitted_boot <- lapply(out$models, fitted) fitted_boot <- as.matrix(as.data.frame(fitted_boot)) out$fitted <- ts(apply(fitted_boot, 1, mean)) tsp(out$fitted) <- tsp(out$y) out$residuals <- out$y - out$fitted out$series <- deparse(substitute(y)) out$method <- "baggedModel" out$call <- match.call() return(structure(out, class = c("baggedModel"))) } #' @rdname baggedModel #' @export baggedETS <- function(y, bootstrapped_series=bld.mbb.bootstrap(y, 100), ...) { out <- baggedModel(y, bootstrapped_series, fn = ets, ...) class(out) <- c("baggedETS", class(out)) out } #' Forecasting using a bagged model #' #' Returns forecasts and other information for bagged models. #' #' Intervals are calculated as min and max values over the point forecasts from #' the models in the ensemble. I.e., the intervals are not prediction #' intervals, but give an indication of how different the forecasts within the #' ensemble are. #' #' @param object An object of class "\code{baggedModel}" resulting from a call to #' \code{\link{baggedModel}}. #' @param h Number of periods for forecasting. #' @param ... Other arguments, passed on to the \code{\link{forecast}} function of the original method #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' An object of class "\code{forecast}" is a list containing at least the #' following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} #' \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the #' time series used to create the model stored as \code{object}).} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That #' is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' @author Christoph Bergmeir, Fotios Petropoulos #' @seealso \code{\link{baggedModel}}. #' @references Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging #' Exponential Smoothing Methods using STL Decomposition and Box-Cox #' Transformation. International Journal of Forecasting 32, 303-312. #' @keywords ts #' @examples #' fit <- baggedModel(WWWusage) #' fcast <- forecast(fit) #' plot(fcast) #' #' \dontrun{ #' fit2 <- baggedModel(WWWusage, fn="auto.arima") #' fcast2 <- forecast(fit2) #' plot(fcast2) #' accuracy(fcast2)} #' #' @export forecast.baggedModel <- function(object, h=ifelse(frequency(object$y) > 1, 2 * frequency(object$y), 10), ...) { out <- list( model = object, series = object$series, x = object$y, method = object$method, fitted = object$fitted, residuals = object$residuals ) # out <- object tspx <- tsp(out$x) forecasts_boot <- lapply(out$model$models, function(mod) { if (inherits(mod, "ets")) { forecast(mod, PI = FALSE, h = h, ...)$mean } else { forecast(mod, h = h, ...)$mean } }) forecasts_boot <- as.matrix(as.data.frame(forecasts_boot)) colnames(forecasts_boot) <- NULL if (!is.null(tspx)) { start.f <- tspx[2] + 1 / frequency(out$x) } else { start.f <- length(out$x) + 1 } # out <- list() out$forecasts_boot <- forecasts_boot # browser() # out$model$models out$mean <- ts(apply(forecasts_boot, 1, mean), frequency = frequency(out$x), start = start.f) out$median <- ts(apply(forecasts_boot, 1, median)) out$lower <- ts(apply(forecasts_boot, 1, min)) out$upper <- ts(apply(forecasts_boot, 1, max)) out$level <- 100 tsp(out$median) <- tsp(out$lower) <- tsp(out$upper) <- tsp(out$mean) class(out) <- "forecast" out } # fitted.baggedModel <- function(object, h=1, accum_func=mean, ...){ # # fitted_boot <- lapply(object$models, fitted, h) # fitted_boot <- as.matrix(as.data.frame(fitted_boot)) # fitted_boot <- apply(fitted_boot, 2, accum_func) # fitted_boot # } # residuals.baggedModel <- function(object, h=1, ...){ # # residuals_boot <- lapply(object$models, residuals, h) # residuals_boot <- as.matrix(as.data.frame(residuals_boot)) # residuals_boot # # #Alternative implementation: # #object$x - fitted(object, h) # } #' @export print.baggedModel <- function(x, digits = max(3, getOption("digits") - 3), ...) { cat("Series:", x$series, "\n") cat("Model: ", x$method, "\n") cat("Call: ") print(x$call) # print(x$model) # cat("\nsigma^2 estimated as ", format(mean(residuals(x)^2,na.rm=TRUE), digits = digits), "\n", sep = "") invisible(x) } #' @rdname is.ets #' @export is.baggedModel <- function(x) { inherits(x, "baggedModel") } forecast/R/nnetar.R0000644000176200001440000005031214133702563013700 0ustar liggesusers# Defaults: # For non-seasonal data, p chosen using AIC from linear AR(p) model # For seasonal data, p chosen using AIC from linear AR(p) model after # seasonally adjusting with STL decomposition, and P=1 # size set to average of number of inputs and number of outputs: (p+P+1)/2 # if xreg is included then size = (p+P+ncol(xreg)+1)/2 #' Neural Network Time Series Forecasts #' #' Feed-forward neural networks with a single hidden layer and lagged inputs #' for forecasting univariate time series. #' #' A feed-forward neural network is fitted with lagged values of \code{y} as #' inputs and a single hidden layer with \code{size} nodes. The inputs are for #' lags 1 to \code{p}, and lags \code{m} to \code{mP} where #' \code{m=frequency(y)}. If \code{xreg} is provided, its columns are also #' used as inputs. If there are missing values in \code{y} or #' \code{xreg}, the corresponding rows (and any others which depend on them as #' lags) are omitted from the fit. A total of \code{repeats} networks are #' fitted, each with random starting weights. These are then averaged when #' computing forecasts. The network is trained for one-step forecasting. #' Multi-step forecasts are computed recursively. #' #' For non-seasonal data, the fitted model is denoted as an NNAR(p,k) model, #' where k is the number of hidden nodes. This is analogous to an AR(p) model #' but with nonlinear functions. For seasonal data, the fitted model is called #' an NNAR(p,P,k)[m] model, which is analogous to an ARIMA(p,0,0)(P,0,0)[m] #' model but with nonlinear functions. #' #' @aliases print.nnetar print.nnetarmodels #' #' @param y A numeric vector or time series of class \code{ts}. #' @param p Embedding dimension for non-seasonal time series. Number of #' non-seasonal lags used as inputs. For non-seasonal time series, the default #' is the optimal number of lags (according to the AIC) for a linear AR(p) #' model. For seasonal time series, the same method is used but applied to #' seasonally adjusted data (from an stl decomposition). #' @param P Number of seasonal lags used as inputs. #' @param size Number of nodes in the hidden layer. Default is half of the #' number of input nodes (including external regressors, if given) plus 1. #' @param repeats Number of networks to fit with different random starting #' weights. These are then averaged when producing forecasts. #' @param xreg Optionally, a vector or matrix of external regressors, which #' must have the same number of rows as \code{y}. Must be numeric. #' @param model Output from a previous call to \code{nnetar}. If model is #' passed, this same model is fitted to \code{y} without re-estimating any #' parameters. #' @param subset Optional vector specifying a subset of observations to be used #' in the fit. Can be an integer index vector or a logical vector the same #' length as \code{y}. All observations are used by default. #' @param scale.inputs If TRUE, inputs are scaled by subtracting the column #' means and dividing by their respective standard deviations. If \code{lambda} #' is not \code{NULL}, scaling is applied after Box-Cox transformation. #' @param x Deprecated. Included for backwards compatibility. #' @param \dots Other arguments passed to \code{\link[nnet]{nnet}} for #' \code{nnetar}. #' @inheritParams forecast #' #' @return Returns an object of class "\code{nnetar}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{nnetar}. #' #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{x}{The original time series.} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' \item{...}{Other arguments} #' #' @author Rob J Hyndman and Gabriel Caceres #' @keywords ts #' @examples #' fit <- nnetar(lynx) #' fcast <- forecast(fit) #' plot(fcast) #' #' ## Arguments can be passed to nnet() #' fit <- nnetar(lynx, decay=0.5, maxit=150) #' plot(forecast(fit)) #' lines(lynx) #' #' ## Fit model to first 100 years of lynx data #' fit <- nnetar(window(lynx,end=1920), decay=0.5, maxit=150) #' plot(forecast(fit,h=14)) #' lines(lynx) #' #' ## Apply fitted model to later data, including all optional arguments #' fit2 <- nnetar(window(lynx,start=1921), model=fit) #' #' @export nnetar <- function(y, p, P=1, size, repeats=20, xreg=NULL, lambda=NULL, model=NULL, subset=NULL, scale.inputs=TRUE, x=y, ...) { useoldmodel <- FALSE yname <- deparse(substitute(y)) if (!is.null(model)) { # Use previously fitted model useoldmodel <- TRUE # Check for conflicts between new and old data: # Check model class if (!is.nnetar(model)) { stop("Model must be a nnetar object") } # Check new data m <- max(round(frequency(model$x)), 1L) minlength <- max(c(model$p, model$P * m)) + 1 if (length(x) < minlength) { stop(paste("Series must be at least of length", minlength, "to use fitted model")) } if (tsp(as.ts(x))[3] != m) { warning(paste("Data frequency doesn't match fitted model, coercing to frequency =", m)) x <- ts(x, frequency = m) } # Check xreg if (!is.null(model$xreg)) { if (is.null(xreg)) { stop("No external regressors provided") } if (NCOL(xreg) != NCOL(model$xreg)) { stop("Number of external regressors does not match fitted model") } } # Update parameters with previous model lambda <- model$lambda size <- model$size p <- model$p P <- model$P if (P > 0) { lags <- sort(unique(c(1:p, m * (1:P)))) } else { lags <- 1:p } if (is.null(model$scalex)) { scale.inputs <- FALSE } } else { # when not using an old model if (length(y) < 3) { stop("Not enough data to fit a model") } # Check for constant data in time series constant_data <- is.constant(na.interp(x)) if (constant_data){ warning("Constant data, setting p=1, P=0, lambda=NULL, scale.inputs=FALSE") scale.inputs <- FALSE lambda <- NULL p <- 1 P <- 0 } ## Check for constant data in xreg if (!is.null(xreg)){ constant_xreg <- any(apply(as.matrix(xreg), 2, function(x) is.constant(na.interp(x)))) if (constant_xreg){ warning("Constant xreg column, setting scale.inputs=FALSE") scale.inputs <- FALSE } } } # Check for NAs in x if (any(is.na(x))) { warning("Missing values in x, omitting rows") } # Transform data if (!is.null(lambda)) { xx <- BoxCox(x, lambda) lambda <- attr(xx, "lambda") } else { xx <- x } ## Check whether to use a subset of the data xsub <- rep(TRUE, length(x)) if (is.numeric(subset)) { xsub[-subset] <- FALSE } if (is.logical(subset)) { xsub <- subset } # Scale series scalex <- NULL if (scale.inputs) { if (useoldmodel) { scalex <- model$scalex } else { tmpx <- scale(xx[xsub], center = TRUE, scale = TRUE) scalex <- list( center = attr(tmpx, "scaled:center"), scale = attr(tmpx, "scaled:scale") ) } xx <- scale(xx, center = scalex$center, scale = scalex$scale) xx <- xx[, 1] } # Check xreg class & dim xxreg <- NULL scalexreg <- NULL if (!is.null(xreg)) { xxreg <- xreg <- as.matrix(xreg) if (length(x) != NROW(xreg)) { stop("Number of rows in xreg does not match series length") } # Check for NAs in xreg if (any(is.na(xreg))) { warning("Missing values in xreg, omitting rows") } # Scale xreg if (scale.inputs) { if (useoldmodel) { scalexreg <- model$scalexreg } else { tmpx <- scale(xxreg[xsub, ], center = TRUE, scale = TRUE) scalexreg <- list( center = attr(tmpx, "scaled:center"), scale = attr(tmpx, "scaled:scale") ) } xxreg <- scale(xxreg, center = scalexreg$center, scale = scalexreg$scale) } } # Set up lagged matrix n <- length(xx) xx <- as.ts(xx) m <- max(round(frequency(xx)), 1L) if (!useoldmodel) { if (m == 1) { if (missing(p)) { p <- max(length(ar(na.interp(xx))$ar), 1) } if (p >= n) { warning("Reducing number of lagged inputs due to short series") p <- n - 1 } lags <- 1:p if (P > 1) { warning("Non-seasonal data, ignoring seasonal lags") } P <- 0 } else { if (missing(p)) { if (n > 2 * m) { x.sa <- seasadj(mstl(na.interp(xx))) } else { x.sa <- na.interp(xx) } p <- max(length(ar(x.sa)$ar), 1) } if (p >= n) { warning("Reducing number of lagged inputs due to short series") p <- n - 1 } if (P > 0 && n >= m * P + 2) { lags <- sort(unique(c(1:p, m * (1:P)))) } else { lags <- 1:p if (P > 0) { warning("Series too short for seasonal lags") P <- 0 } } } } maxlag <- max(lags) nlag <- length(lags) y <- xx[-(1:maxlag)] lags.X <- matrix(NA_real_, ncol = nlag, nrow = n - maxlag) for (i in 1:nlag) lags.X[, i] <- xx[(maxlag - lags[i] + 1):(n - lags[i])] # Add xreg into lagged matrix lags.X <- cbind(lags.X, xxreg[-(1:maxlag), ]) if (missing(size)) { size <- round((NCOL(lags.X) + 1) / 2) } # Remove missing values if present j <- complete.cases(lags.X, y) ## Remove values not in subset j <- j & xsub[-(1:maxlag)] ## Stop if there's no data to fit (e.g. due to NAs or NaNs) if (NROW(lags.X[j,, drop=FALSE]) == 0) { stop("No data to fit (possibly due to NA or NaN)") } ## Fit average ANN. if (useoldmodel) { fit <- oldmodel_avnnet(lags.X[j, , drop = FALSE], y[j], size = size, model) } else { fit <- avnnet(lags.X[j, , drop=FALSE], y[j], size = size, repeats = repeats, ...) } # Return results out <- list() out$x <- as.ts(x) out$m <- m out$p <- p out$P <- P out$scalex <- scalex out$scalexreg <- scalexreg out$size <- size out$xreg <- xreg out$lambda <- lambda out$subset <- (1:length(x))[xsub] out$model <- fit out$nnetargs <- list(...) if (useoldmodel) { out$nnetargs <- model$nnetargs } if (NROW(lags.X[j,, drop=FALSE]) == 1){ fits <- c(rep(NA_real_, maxlag), mean(sapply(fit, predict))) } else{ fits <- c(rep(NA_real_, maxlag), rowMeans(sapply(fit, predict))) } if (scale.inputs) { fits <- fits * scalex$scale + scalex$center } fits <- ts(fits) if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda) } out$fitted <- ts(rep(NA_real_, length(out$x))) out$fitted[c(rep(TRUE, maxlag), j)] <- fits out$fitted <- copy_msts(out$x, out$fitted) out$residuals <- out$x - out$fitted out$lags <- lags out$series <- yname out$method <- paste("NNAR(", p, sep = "") if (P > 0) { out$method <- paste(out$method, ",", P, sep = "") } out$method <- paste(out$method, ",", size, ")", sep = "") if (P > 0) { out$method <- paste(out$method, "[", m, "]", sep = "") } out$call <- match.call() return(structure(out, class = c("nnetar"))) } # Aggregate several neural network models avnnet <- function(x, y, repeats, linout=TRUE, trace=FALSE, ...) { mods <- list() for (i in 1:repeats) mods[[i]] <- nnet::nnet(x, y, linout = linout, trace = trace, ...) return(structure(mods, class = "nnetarmodels")) } # Fit old model to new data oldmodel_avnnet <- function(x, y, size, model) { repeats <- length(model$model) args <- list(x = x, y = y, size = size, linout = 1, trace = FALSE) # include additional nnet arguments args <- c(args, model$nnetargs) # set iterations to zero (i.e. weights stay fixed) args$maxit <- 0 mods <- list() for (i in 1:repeats) { args$Wts <- model$model[[i]]$wts mods[[i]] <- do.call(nnet::nnet, args) } return(structure(mods, class = "nnetarmodels")) } #' @export print.nnetarmodels <- function(x, ...) { cat(paste("\nAverage of", length(x), "networks, each of which is\n")) print(x[[1]]) } #' Forecasting using neural network models #' #' Returns forecasts and other information for univariate neural network #' models. #' #' Prediction intervals are calculated through simulations and can be slow. #' Note that if the network is too complex and overfits the data, the residuals #' can be arbitrarily small; if used for prediction interval calculations, they #' could lead to misleadingly small values. It is possible to use out-of-sample #' residuals to ameliorate this, see examples. #' #' @param object An object of class "\code{nnetar}" resulting from a call to #' \code{\link{nnetar}}. #' @param h Number of periods for forecasting. If \code{xreg} is used, \code{h} #' is ignored and the number of forecast periods is set to the number of rows #' of \code{xreg}. #' @param PI If TRUE, prediction intervals are produced, otherwise only point #' forecasts are calculated. If \code{PI} is FALSE, then \code{level}, #' \code{fan}, \code{bootstrap} and \code{npaths} are all ignored. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param xreg Future values of external regressor variables. #' @param bootstrap If \code{TRUE}, then prediction intervals computed using #' simulations with resampled residuals rather than normally distributed #' errors. Ignored if \code{innov} is not \code{NULL}. #' @param npaths Number of sample paths used in computing simulated prediction #' intervals. #' @param innov Values to use as innovations for prediction intervals. Must be #' a matrix with \code{h} rows and \code{npaths} columns (vectors are coerced #' into a matrix). If present, \code{bootstrap} is ignored. #' @param ... Additional arguments passed to \code{\link{simulate.nnetar}} #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.nnetar}. #' #' An object of class "\code{forecast}" is a list containing at least the #' following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} #' \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the time series #' used to create the model stored as \code{object}).} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' \item{...}{Other arguments} #' #' @author Rob J Hyndman and Gabriel Caceres #' @seealso \code{\link{nnetar}}. #' @keywords ts #' @examples #' ## Fit & forecast model #' fit <- nnetar(USAccDeaths, size=2) #' fcast <- forecast(fit, h=20) #' plot(fcast) #' #' \dontrun{ #' ## Include prediction intervals in forecast #' fcast2 <- forecast(fit, h=20, PI=TRUE, npaths=100) #' plot(fcast2) #' #' ## Set up out-of-sample innovations using cross-validation #' fit_cv <- CVar(USAccDeaths, size=2) #' res_sd <- sd(fit_cv$residuals, na.rm=TRUE) #' myinnovs <- rnorm(20*100, mean=0, sd=res_sd) #' ## Forecast using new innovations #' fcast3 <- forecast(fit, h=20, PI=TRUE, npaths=100, innov=myinnovs) #' plot(fcast3) #' } #' #' @export forecast.nnetar <- function(object, h=ifelse(object$m > 1, 2 * object$m, 10), PI=FALSE, level=c(80, 95), fan=FALSE, xreg=NULL, lambda=object$lambda, bootstrap=FALSE, npaths=1000, innov=NULL, ...) { # require(nnet) out <- object tspx <- tsp(out$x) # if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Check if xreg was used in fitted model if (is.null(object$xreg)) { if (!is.null(xreg)) { warning("External regressors were not used in fitted model, xreg will be ignored") } xreg <- NULL } else { if (is.null(xreg)) { stop("No external regressors provided") } xreg <- as.matrix(xreg) if (NCOL(xreg) != NCOL(object$xreg)) { stop("Number of external regressors does not match fitted model") } if(!identical(colnames(xreg), colnames(object$xreg))){ warning("xreg contains different column names from the xreg used in training. Please check that the regressors are in the same order.") } h <- NROW(xreg) } fcast <- numeric(h) xx <- object$x xxreg <- xreg if (!is.null(lambda)) { xx <- BoxCox(xx, lambda) lambda <- attr(xx, "lambda") } # Check and apply scaling of fitted model if (!is.null(object$scalex)) { xx <- scale(xx, center = object$scalex$center, scale = object$scalex$scale) if (!is.null(xreg)) { xxreg <- scale(xreg, center = object$scalexreg$center, scale = object$scalexreg$scale) } } # Get lags used in fitted model lags <- object$lags maxlag <- max(lags) flag <- rev(tail(xx, n = maxlag)) # Iterative 1-step forecast for (i in 1:h) { newdata <- c(flag[lags], xxreg[i, ]) if (any(is.na(newdata))) { stop("I can't forecast when there are missing values near the end of the series.") } fcast[i] <- mean(sapply(object$model, predict, newdata = newdata)) flag <- c(fcast[i], flag[-maxlag]) } # Re-scale point forecasts if (!is.null(object$scalex)) { fcast <- fcast * object$scalex$scale + object$scalex$center } # Add ts properties fcast <- ts(fcast, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) # Back-transform point forecasts if (!is.null(lambda)) { fcast <- InvBoxCox(fcast, lambda) } # Compute prediction intervals using simulations if (isTRUE(PI)) { nint <- length(level) sim <- matrix(NA, nrow = npaths, ncol = h) if (!is.null(innov)) { if (length(innov) != h * npaths) { stop("Incorrect number of innovations, need h*npaths values") } innov <- matrix(innov, nrow = h, ncol = npaths) bootstrap <- FALSE } for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = bootstrap, xreg = xreg, lambda = lambda, innov = innov[, i], ...) lower <- apply(sim, 2, quantile, 0.5 - level / 200, type = 8, na.rm = TRUE) upper <- apply(sim, 2, quantile, 0.5 + level / 200, type = 8, na.rm = TRUE) if (nint > 1L) { lower <- ts(t(lower)) upper <- ts(t(upper)) } else { lower <- ts(matrix(lower, ncol = 1L)) upper <- ts(matrix(upper, ncol = 1L)) } out$lower <- future_msts(out$x, lower) out$upper <- future_msts(out$x, upper) } else { level <- NULL lower <- NULL upper <- NULL } out$mean <- future_msts(out$x, fcast) out$level <- level return(structure(out, class = "forecast")) } #' @rdname fitted.Arima #' @export fitted.nnetar <- function(object, h=1, ...) { if (h == 1) { return(object$fitted) } else { return(hfitted(object = object, h = h, FUN = "nnetar", ...)) } } #' @export print.nnetar <- function(x, digits = max(3, getOption("digits") - 3), ...) { cat("Series:", x$series, "\n") cat("Model: ", x$method, "\n") # cat(" one hidden layer with",x$size,"nodes\n") cat("Call: ") print(x$call) print(x$model) cat( "\nsigma^2 estimated as ", format(mean(residuals(x) ^ 2, na.rm = TRUE), digits = digits), "\n", sep = "" ) invisible(x) } #' @rdname is.ets #' @export is.nnetar <- function(x) { inherits(x, "nnetar") } #' @rdname is.ets #' @export is.nnetarmodels <- function(x) { inherits(x, "nnetarmodels") } # Scale a univariate time series scale.ts <- function(x, center=TRUE, scale=TRUE) { tspx <- tsp(x) x <- as.ts(scale.default(x, center = center, scale = scale)) tsp(x) <- tspx return(x) } forecast/R/components.R0000644000176200001440000000712314003673410014572 0ustar liggesusers# Functions to extract components from time series decomposition # These should match corresponding functions in the seasonal package # providing similar functional for stl, decomposed.ts and tbats objects #' Extract components from a time series decomposition #' #' Returns a univariate time series equal to either a seasonal component, #' trend-cycle component or remainder component from a time series #' decomposition. #' #' @param object Object created by \code{\link[stats]{decompose}}, #' \code{\link[stats]{stl}} or \code{\link{tbats}}. #' @return Univariate time series. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, #' \code{\link{tbats}}, \code{\link{seasadj}}. #' @keywords ts #' @examples #' plot(USAccDeaths) #' fit <- stl(USAccDeaths, s.window="periodic") #' lines(trendcycle(fit),col="red") #' #' library(ggplot2) #' autoplot(cbind( #' Data=USAccDeaths, #' Seasonal=seasonal(fit), #' Trend=trendcycle(fit), #' Remainder=remainder(fit)), #' facets=TRUE) + #' ylab("") + xlab("Year") #' #' @export seasonal <- function(object) { if ("mstl" %in% class(object)) { cols <- grep("Season", colnames(object)) return(object[, cols]) } else if ("stl" %in% class(object)) { return(object$time.series[, "seasonal"]) } else if ("decomposed.ts" %in% class(object)) { return(object$seasonal) } else if ("tbats" %in% class(object)) { comp <- tbats.components(object) scols <- grep("season", colnames(comp)) season <- ts(rowSums(comp[, scols, drop = FALSE])) if (!is.null(object$lambda)) { season <- InvBoxCox(season, object$lambda) } tsp(season) <- tsp(comp) return(season) } else if ("seas" %in% class(object)) { return(object$data[, "seasonal"]) } else { stop("Unknown object type") } } #' @rdname seasonal #' @export trendcycle <- function(object) { if ("mstl" %in% class(object)) { return(object[, "Trend"]) } else if ("stl" %in% class(object)) { return(object$time.series[, "trend"]) } else if ("decomposed.ts" %in% class(object)) { return(object$trend) } # else if("tbats" %in% class(object)) # { # trnd <- tbats.components(object)[,"level"] # if (!is.null(object$lambda)) # trnd <- InvBoxCox(trnd, object$lambda) # return(trnd) # } else if ("seas" %in% class(object)) { return(seasextract_w_na_action(object, "trend")) } else { stop("Unknown object type") } } #' @rdname seasonal #' @export remainder <- function(object) { if ("mstl" %in% class(object)) { return(object[, "Remainder"]) } else if ("stl" %in% class(object)) { return(object$time.series[, "remainder"]) } else if ("decomposed.ts" %in% class(object)) { return(object$random) } # else if("tbats" %in% class(object)) # { # comp <- tbats.components(object) # trnd <- comp[,"level"] # scols <- grep("season",colnames(comp)) # season <- rowSums(comp[,scols,drop=FALSE]) # irreg <- ts(comp[,'observed'] - trnd - season) # tsp(irreg) <- tsp(comp) # return(irreg) # } else if ("seas" %in% class(object)) { return(seasextract_w_na_action(object, "irregular")) } else { stop("Unknown object type") } } ## Copied from seasonal:::extract_w_na_action ## Importing is problematic due to issues with ARM processors seasextract_w_na_action <- function(x, name) { if (is.null(x$data)) { return(NULL) } z <- na.omit(x$data[, name]) if (!is.null(x$na.action)) { if (attr(x$na.action, "class") == "exclude") { z <- ts(stats::napredict(x$na.action, z)) tsp(z) <- tsp(x$x) } } z } forecast/R/dshw.r0000644000176200001440000002210414003673410013406 0ustar liggesusers#################################################################### ## Double Seasonal Holt Winters method as per Taylor (2003) ## Periods must be nested. ## y can be an msts object, or periods can be passed explicitly. #################################################################### #' Double-Seasonal Holt-Winters Forecasting #' #' Returns forecasts using Taylor's (2003) Double-Seasonal Holt-Winters method. #' #' Taylor's (2003) double-seasonal Holt-Winters method uses additive trend and #' multiplicative seasonality, where there are two seasonal components which #' are multiplied together. For example, with a series of half-hourly data, one #' would set \code{period1=48} for the daily period and \code{period2=336} for #' the weekly period. The smoothing parameter notation used here is different #' from that in Taylor (2003); instead it matches that used in Hyndman et al #' (2008) and that used for the \code{\link{ets}} function. #' #' @param y Either an \code{\link{msts}} object with two seasonal periods or a #' numeric vector. #' @param period1 Period of the shorter seasonal period. Only used if \code{y} #' is not an \code{\link{msts}} object. #' @param period2 Period of the longer seasonal period. Only used if \code{y} #' is not an \code{\link{msts}} object. #' @param h Number of periods for forecasting. #' @param alpha Smoothing parameter for the level. If \code{NULL}, the #' parameter is estimated using least squares. #' @param beta Smoothing parameter for the slope. If \code{NULL}, the parameter #' is estimated using least squares. #' @param gamma Smoothing parameter for the first seasonal period. If #' \code{NULL}, the parameter is estimated using least squares. #' @param omega Smoothing parameter for the second seasonal period. If #' \code{NULL}, the parameter is estimated using least squares. #' @param phi Autoregressive parameter. If \code{NULL}, the parameter is #' estimated using least squares. #' @param armethod If TRUE, the forecasts are adjusted using an AR(1) model for #' the errors. #' @param model If it's specified, an existing model is applied to a new data #' set. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}" which is a list that includes the #' following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} #' \item{x}{The original time series.} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{dshw}. #' #' @author Rob J Hyndman #' @seealso \code{\link[stats]{HoltWinters}}, \code{\link{ets}}. #' @references Taylor, J.W. (2003) Short-term electricity demand forecasting #' using double seasonal exponential smoothing. \emph{Journal of the #' Operational Research Society}, \bold{54}, 799-805. #' #' Hyndman, R.J., Koehler, A.B., Ord, J.K., and Snyder, R.D. (2008) #' \emph{Forecasting with exponential smoothing: the state space approach}, #' Springer-Verlag. \url{http://www.exponentialsmoothing.net}. #' @keywords ts #' @examples #' #' \dontrun{ #' fcast <- dshw(taylor) #' plot(fcast) #' #' t <- seq(0,5,by=1/20) #' x <- exp(sin(2*pi*t) + cos(2*pi*t*4) + rnorm(length(t),0,.1)) #' fit <- dshw(x,20,5) #' plot(fit) #' } #' #' @export dshw <- function(y, period1=NULL, period2=NULL, h=2 * max(period1, period2), alpha=NULL, beta=NULL, gamma=NULL, omega=NULL, phi=NULL, lambda=NULL, biasadj=FALSE, armethod=TRUE, model = NULL) { if (min(y, na.rm = TRUE) <= 0) { stop("dshw not suitable when data contain zeros or negative numbers") } seriesname <- deparse(substitute(y)) if (!is.null(model) && model$method == "DSHW") { period1 <- model$period1 period2 <- model$period2 } else if (inherits(y, "msts") && (length(attr(y, "msts")) == 2)) { period1 <- as.integer(sort(attr(y, "msts"))[1]) period2 <- as.integer(sort(attr(y, "msts"))[2]) } else if (is.null(period1) || is.null(period2)) { stop("Error in dshw(): y must either be an msts object with two seasonal periods OR the seasonal periods should be specified with period1= and period2=") } else { if (period1 > period2) { tmp <- period2 period2 <- period1 period1 <- tmp } } if (any(class(y) != "msts")) { y <- msts(y, c(period1, period2)) } if (length(y) < 2 * max(period2)) { stop("Insufficient data to estimate model") } if (!armethod) { phi <- 0 } if (period1 < 1 || period1 == period2) { stop("Inappropriate periods") } ratio <- period2 / period1 if (ratio - trunc(ratio) > 1e-10) { stop("Seasonal periods are not nested") } if (!is.null(model)) { lambda <- model$model$lambda } if (!is.null(lambda)) { origy <- y y <- BoxCox(y, lambda) lambda <- attr(y, "lambda") } if (!is.null(model)) { pars <- model$model alpha <- pars$alpha beta <- pars$beta gamma <- pars$gamma omega <- pars$omega phi <- pars$phi } else { pars <- rep(NA, 5) if (!is.null(alpha)) { pars[1] <- alpha } if (!is.null(beta)) { pars[2] <- beta } if (!is.null(gamma)) { pars[3] <- gamma } if (!is.null(omega)) { pars[4] <- omega } if (!is.null(phi)) { pars[5] <- phi } } # Estimate parameters if (sum(is.na(pars)) > 0) { pars <- par_dshw(y, period1, period2, pars) alpha <- pars[1] beta <- pars[2] gamma <- pars[3] omega <- pars[4] phi <- pars[5] } ## Allocate space n <- length(y) yhat <- numeric(n) ## Starting values I <- seasindex(y, period1) wstart <- seasindex(y, period2) wstart <- wstart / rep(I, ratio) w <- wstart x <- c(0, diff(y[1:period2])) t <- t.start <- mean(((y[1:period2] - y[(period2 + 1):(2 * period2)]) / period2) + x) / 2 s <- s.start <- (mean(y[1:(2 * period2)]) - (period2 + 0.5) * t) ## In-sample fit for (i in 1:n) { yhat[i] <- (s + t) * I[i] * w[i] snew <- alpha * (y[i] / (I[i] * w[i])) + (1 - alpha) * (s + t) tnew <- beta * (snew - s) + (1 - beta) * t I[i + period1] <- gamma * (y[i] / (snew * w[i])) + (1 - gamma) * I[i] w[i + period2] <- omega * (y[i] / (snew * I[i])) + (1 - omega) * w[i] s <- snew t <- tnew } # Forecasts fcast <- (s + (1:h) * t) * rep(I[n + (1:period1)], h / period1 + 1)[1:h] * rep(w[n + (1:period2)], h / period2 + 1)[1:h] fcast <- msts(fcast, c(period1, period2), start = tsp(y)[2] + 1 / tsp(y)[3]) # Calculate MSE and MAPE yhat <- ts(yhat) tsp(yhat) <- tsp(y) yhat <- msts(yhat, c(period1, period2)) e <- y - yhat e <- msts(e, c(period1, period2)) if (armethod) { yhat <- yhat + phi * c(0, e[-n]) fcast <- fcast + phi ^ (1:h) * e[n] e <- y - yhat } mse <- mean(e ^ 2) mape <- mean(abs(e) / y) * 100 end.y <- end(y) if (end.y[2] == frequency(y)) { end.y[1] <- end.y[1] + 1 end.y[2] <- 1 } else { end.y[2] <- end.y[2] + 1 } fcast <- msts(fcast, c(period1, period2)) if (!is.null(lambda)) { y <- origy fcast <- InvBoxCox(fcast, lambda, biasadj, var(e)) attr(lambda, "biasadj") <- biasadj # Does this also need a biasadj backtransform? yhat <- InvBoxCox(yhat, lambda) } return(structure(list( mean = fcast, method = "DSHW", x = y, residuals = e, fitted = yhat, series = seriesname, model = list( mape = mape, mse = mse, alpha = alpha, beta = beta, gamma = gamma, omega = omega, phi = phi, lambda = lambda, l0 = s.start, b0 = t.start, s10 = wstart, s20 = I ), period1 = period1, period2 = period2 ), class = "forecast")) } ### Double Seasonal Holt-Winters smoothing parameter optimization par_dshw <- function(y, period1, period2, pars) { start <- c(0.1, 0.01, 0.001, 0.001, 0.0)[is.na(pars)] out <- optim(start, dshw.mse, y = y, period1 = period1, period2 = period2, pars = pars) pars[is.na(pars)] <- out$par return(pars) } dshw.mse <- function(par, y, period1, period2, pars) { pars[is.na(pars)] <- par if (max(pars) > 0.99 | min(pars) < 0 | pars[5] > .9) { return(Inf) } else { return(dshw(y, period1, period2, h = 1, pars[1], pars[2], pars[3], pars[4], pars[5], armethod = (abs(pars[5]) > 1e-7))$model$mse) } } ### Calculating seasonal indexes seasindex <- function(y, p) { n <- length(y) n2 <- 2 * p shorty <- y[1:n2] average <- numeric(n) simplema <- zoo::rollmean.default(shorty, p) if (identical(p %% 2, 0)) # Even order { centeredma <- zoo::rollmean.default(simplema[1:(n2 - p + 1)], 2) average[p / 2 + 1:p] <- shorty[p / 2 + 1:p] / centeredma[1:p] si <- average[c(p + (1:(p / 2)), (1 + p / 2):p)] } else # Odd order { average[(p - 1) / 2 + 1:p] <- shorty[(p - 1) / 2 + 1:p] / simplema[1:p] si <- average[c(p + (1:((p - 1) / 2)), (1 + (p - 1) / 2):p)] } return(si) } forecast/R/mforecast.R0000644000176200001440000002532314053117547014403 0ustar liggesusers#' @rdname is.forecast #' @export is.mforecast <- function(x) { inherits(x, "mforecast") } mlmsplit <- function(x, index=NULL) { if (is.null(index)) { stop("Must select lm using index=integer(1)") } mfit <- match(c("coefficients", "residuals", "effects", "fitted.values"), names(x), 0L) for (j in mfit) { x[[j]] <- x[[j]][, index] } class(x) <- "lm" y <- attr(x$terms, "response") yName <- make.names(colnames(x$model[[y]])[index]) x$model[[y]] <- x$model[[y]][, index] colnames(x$model)[y] <- yName attr(x$model, "terms") <- terms(reformulate(attr(x$terms, "term.labels"), response = yName), data = x$model) if (!is.null(tsp(x$data[, 1]))) { tspx <- tsp(x$data[, 1]) # Consolidate ts attributes for forecast.lm x$data <- lapply(x$model, function(x) ts(x, start = tspx[1], end = tspx[2], frequency = tspx[3])) class(x$data) <- "data.frame" row.names(x$data) <- 1:max(sapply(x$data, NROW)) } x$terms <- terms(x$model) return(x) } #' Forecast a multiple linear model with possible time series components #' #' \code{forecast.mlm} is used to predict multiple linear models, especially #' those involving trend and seasonality components. #' #' \code{forecast.mlm} is largely a wrapper for #' \code{\link[forecast]{forecast.lm}()} except that it allows forecasts to be #' generated on multiple series. Also, the output is reformatted into a #' \code{mforecast} object. #' #' @param object Object of class "mlm", usually the result of a call to #' \code{\link[stats]{lm}} or \code{\link{tslm}}. #' @param newdata An optional data frame in which to look for variables with #' which to predict. If omitted, it is assumed that the only variables are #' trend and season, and \code{h} forecasts are produced. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable #' for fan plots. #' @param h Number of periods for forecasting. Ignored if \code{newdata} #' present. #' @param ts If \code{TRUE}, the forecasts will be treated as time series #' provided the original data is a time series; the \code{newdata} will be #' interpreted as related to the subsequent time periods. If \code{FALSE}, any #' time series attributes of the original data will be ignored. #' @param ... Other arguments passed to \code{\link[forecast]{forecast.lm}()}. #' @inheritParams forecast #' #' @return An object of class "\code{mforecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.lm}. #' #' An object of class \code{"mforecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a multivariate time series} #' \item{lower}{Lower limits for prediction intervals of each series} #' \item{upper}{Upper limits for prediction intervals of each series} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The historical data for the response variable.} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted #' values.} \item{fitted}{Fitted values} #' @author Mitchell O'Hara-Wild #' @seealso \code{\link{tslm}}, \code{\link{forecast.lm}}, #' \code{\link[stats]{lm}}. #' @examples #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' fit <- tslm(lungDeaths ~ trend + season) #' fcast <- forecast(fit, h=10) #' #' carPower <- as.matrix(mtcars[,c("qsec","hp")]) #' carmpg <- mtcars[,"mpg"] #' fit <- lm(carPower ~ carmpg) #' fcast <- forecast(fit, newdata=data.frame(carmpg=30)) #' #' @export forecast.mlm <- function(object, newdata, h=10, level=c(80, 95), fan=FALSE, lambda=object$lambda, biasadj=NULL, ts=TRUE, ...) { out <- list(model = object, forecast = vector("list", NCOL(object$coefficients))) cl <- match.call() cl[[1]] <- quote(forecast.lm) cl$object <- quote(mlmsplit(object, index = i)) for (i in seq_along(out$forecast)) { out$forecast[[i]] <- eval(cl) out$forecast[[i]]$series <- colnames(object$coefficients)[i] } out$method <- rep("Multiple linear regression model", length(out$forecast)) names(out$forecast) <- names(out$method) <- colnames(object$coefficients) return(structure(out, class = "mforecast")) } #' Forecasting time series #' #' \code{mforecast} is a class of objects for forecasting from multivariate #' time series or multivariate time series models. The function invokes #' particular \emph{methods} which depend on the class of the first argument. #' #' For example, the function \code{\link{forecast.mlm}} makes multivariate #' forecasts based on the results produced by \code{\link{tslm}}. #' #' @aliases mforecast print.mforecast summary.mforecast as.data.frame.mforecast #' #' @param object a multivariate time series or multivariate time series model #' for which forecasts are required #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, \code{level} is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param robust If TRUE, the function is robust to missing values and outliers #' in \code{object}. This argument is only valid when \code{object} is of class #' \code{mts}. #' @param find.frequency If TRUE, the function determines the appropriate #' period, if the data is of unknown period. #' @param allow.multiplicative.trend If TRUE, then ETS models with #' multiplicative trends are allowed. Otherwise, only additive or no trend ETS #' models are permitted. #' @param ... Additional arguments affecting the forecasts produced. #' @inheritParams forecast #' @return An object of class "\code{mforecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the multivariate #' forecasts and prediction intervals. #' #' The generic accessors functions \code{fitted.values} and \code{residuals} #' extract various useful features of the value returned by #' \code{forecast$model}. #' #' An object of class \code{"mforecast"} is a list usually containing at least #' the following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' For models with additive errors, the residuals will be x minus the fitted #' values.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso Other functions which return objects of class \code{"mforecast"} #' are \code{\link{forecast.mlm}}, \code{forecast.varest}. #' #' @export forecast.mts <- function(object, h=ifelse(frequency(object) > 1, 2 * frequency(object), 10), level=c(80, 95), fan=FALSE, robust=FALSE, lambda = NULL, biasadj = FALSE, find.frequency = FALSE, allow.multiplicative.trend=FALSE, ...) { out <- list(forecast = vector("list", NCOL(object))) cl <- match.call() cl[[1]] <- quote(forecast.ts) cl$object <- quote(object[, i]) for (i in 1:NCOL(object)) { out$forecast[[i]] <- eval(cl) out$forecast[[i]]$series <- colnames(object)[i] } out$method <- vapply(out$forecast, function(x) x$method, character(1)) names(out$forecast) <- names(out$method) <- colnames(object) return(structure(out, class = "mforecast")) } #' @export print.mforecast <- function(x, ...) { lapply(x$forecast, function(x) { cat(paste0(x$series, "\n")) print(x) cat("\n") }) return(invisible()) } #' Multivariate forecast plot #' #' Plots historical data with multivariate forecasts and prediction intervals. #' #' \code{autoplot} will produce an equivalent plot as a ggplot object. #' #' @param x Multivariate forecast object of class \code{mforecast}. #' @param object Multivariate forecast object of class \code{mforecast}. Used #' for ggplot graphics (S3 method consistency). #' @param main Main title. Default is the forecast method. For autoplot, #' specify a vector of titles for each plot. #' @param xlab X-axis label. For autoplot, specify a vector of labels for each #' plot. #' @param PI If \code{FALSE}, confidence intervals will not be plotted, giving #' only the forecast line. #' @param facets If TRUE, multiple time series will be faceted. If FALSE, each #' series will be assigned a colour. #' @param colour If TRUE, the time series will be assigned a colour aesthetic #' @param series Matches an unidentified forecast layer with a coloured object #' on the plot. #' @param \dots additional arguments to each individual \code{plot}. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[forecast]{plot.forecast}}, \code{\link[stats]{plot.ts}} #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://otexts.com/fpp2/} #' @keywords ts #' @examples #' library(ggplot2) #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' fit <- tslm(lungDeaths ~ trend + season) #' fcast <- forecast(fit, h=10) #' plot(fcast) #' autoplot(fcast) #' #' carPower <- as.matrix(mtcars[,c("qsec","hp")]) #' carmpg <- mtcars[,"mpg"] #' fit <- lm(carPower ~ carmpg) #' fcast <- forecast(fit, newdata=data.frame(carmpg=30)) #' plot(fcast, xlab="Year") #' autoplot(fcast, xlab=rep("Year",2)) #' #' @export plot.mforecast <- function(x, main=paste("Forecasts from", unique(x$method)), xlab="time", ...) { oldpar <- par(mfrow = c(length(x$forecast), 1), mar = c(0, 5.1, 0, 2.1), oma = c(6, 0, 5, 0)) on.exit(par(oldpar)) for (fcast in x$forecast) { plot(fcast, main = "", xaxt = "n", ylab = fcast$series, ...) } axis(1) mtext(xlab, outer = TRUE, side = 1, line = 3) title(main = main, outer = TRUE) } #' @export summary.mforecast <- function(object, ...) { class(object) <- c("summary.mforecast", class(object)) object } #' @export print.summary.mforecast <- function(x, ...) { cat(paste("\nForecast method:", unique(x$method))) cat(paste("\n\nModel Information:\n")) print(x$model) cat("\nError measures:\n") print(accuracy(x)) if (is.null(x$forecast)) { cat("\n No forecasts\n") } else { cat("\nForecasts:\n") NextMethod() } } forecast/R/forecast-package.R0000644000176200001440000000235614003673410015607 0ustar liggesusers#' @import parallel #' @import Rcpp #' #' @importFrom colorspace sequential_hcl #' @importFrom fracdiff fracdiff diffseries fracdiff.sim #' @importFrom tseries adf.test pp.test kpss.test #' @importFrom zoo rollmean as.Date as.yearqtr #' @importFrom timeDate as.timeDate isBizday difftimeDate Easter as.Date.timeDate #' @importFrom nnet nnet #' @importFrom grDevices gray heat.colors nclass.FD palette #' @importFrom graphics abline axis grid layout lines mtext par plot points polygon text title hist #' @importFrom stats Box.test acf approx ar arima arima.sim as.ts complete.cases cycle decompose diffinv end extractAIC fitted formula frequency window filter na.contiguous spec.ar hatvalues is.ts ksmooth lm lsfit loess median model.frame na.exclude na.omit na.pass optim optimize pf plot.ts poly predict pt qnorm qt quantile residuals rnorm runif sd simulate smooth.spline start stl supsmu terms time ts tsp tsp<- tsdiag var logLik nobs napredict #' @importFrom stats aggregate as.formula is.mts reformulate #' @importFrom utils packageVersion tail head #' @importFrom ggplot2 autoplot fortify #' @importFrom lmtest bgtest #' @importFrom stats supsmu #' @importFrom magrittr %>% #' #' @useDynLib forecast, .registration = TRUE NULL #' @export magrittr::`%>%` forecast/R/etsforecast.R0000644000176200001440000002635114133702473014741 0ustar liggesusers#' Forecasting using ETS models #' #' Returns forecasts and other information for univariate ETS models. #' #' #' @param object An object of class "\code{ets}". Usually the result of a call #' to \code{\link{ets}}. #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param simulate If TRUE, prediction intervals are produced by simulation rather #' than using analytic formulae. Errors are assumed to be normally distributed. #' @param bootstrap If TRUE, then prediction intervals are produced by simulation using #' resampled errors (rather than normally distributed errors). #' @param npaths Number of sample paths used in computing simulated prediction #' intervals. #' @param PI If TRUE, prediction intervals are produced, otherwise only point #' forecasts are calculated. If \code{PI} is FALSE, then \code{level}, #' \code{fan}, \code{simulate}, \code{bootstrap} and \code{npaths} are all #' ignored. #' @param ... Other arguments. #' @inheritParams forecast #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.ets}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' For models with additive errors, the residuals are x - fitted values. For #' models with multiplicative errors, the residuals are equal to x /(fitted #' values) - 1.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link{ets}}, \code{\link{ses}}, \code{\link{holt}}, #' \code{\link{hw}}. #' @keywords ts #' @examples #' fit <- ets(USAccDeaths) #' plot(forecast(fit,h=48)) #' #' @export #' @export forecast.ets forecast.ets <- function(object, h=ifelse(object$m > 1, 2 * object$m, 10), level=c(80, 95), fan=FALSE, simulate=FALSE, bootstrap=FALSE, npaths=5000, PI=TRUE, lambda=object$lambda, biasadj=NULL, ...) { # Check inputs # if(h>2000 | h<=0) if (h <= 0) { stop("Forecast horizon out of bounds") } if (is.null(lambda)) { biasadj <- FALSE } else { if (is.null(biasadj)) { biasadj <- attr(lambda, "biasadj") } if (!is.logical(biasadj)) { warning("biasadj information not found, defaulting to FALSE.") biasadj <- FALSE } } if (!PI && !biasadj) { simulate <- bootstrap <- fan <- FALSE if (!biasadj) { npaths <- 2 } # Just to avoid errors level <- 90 } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Order levels level <- sort(level) n <- length(object$x) damped <- as.logical(object$components[4]) if (bootstrap) { simulate <- TRUE } if (simulate) { f <- pegelsfcast.C(h, object, level = level, bootstrap = bootstrap, npaths = npaths) } else if (object$components[1] == "A" && is.element(object$components[2], c("A", "N")) && is.element(object$components[3], c("N", "A"))) { f <- class1(h, object$states[n + 1, ], object$components[2], object$components[3], damped, object$m, object$sigma2, object$par) } else if (object$components[1] == "M" && is.element(object$components[2], c("A", "N")) && is.element(object$components[3], c("N", "A"))) { f <- class2(h, object$states[n + 1, ], object$components[2], object$components[3], damped, object$m, object$sigma2, object$par) } else if (object$components[1] == "M" && object$components[3] == "M" && object$components[2] != "M") { f <- class3(h, object$states[n + 1, ], object$components[2], object$components[3], damped, object$m, object$sigma2, object$par) } else { f <- pegelsfcast.C(h, object, level = level, bootstrap = bootstrap, npaths = npaths) } tsp.x <- tsp(object$x) if (!is.null(tsp.x)) { start.f <- tsp(object$x)[2] + 1 / object$m } else { start.f <- length(object$x) + 1 } out <- list( model = object, mean = future_msts(object$x, f$mu), level = level, x = object$x ) if (PI || biasadj) { if (!is.null(f$var)) { out$lower <- out$upper <- ts(matrix(NA, ncol = length(level), nrow = h)) colnames(out$lower) <- colnames(out$upper) <- paste(level, "%", sep = "") for (i in 1:length(level)) { marg.error <- sqrt(f$var) * abs(qnorm((100 - level[i]) / 200)) out$lower[, i] <- out$mean - marg.error out$upper[, i] <- out$mean + marg.error } out$lower <- copy_msts(out$mean, out$lower) out$upper <- copy_msts(out$mean, out$upper) } else if (!is.null(f$lower)) { out$lower <- copy_msts(out$mean, f$lower) out$upper <- copy_msts(out$mean, f$upper) } else if (PI) { warning("No prediction intervals for this model") } else if (any(biasadj)) { warning("No bias adjustment possible") } } out$fitted <- copy_msts(object$x, fitted(object)) out$method <- object$method if (!is.null(object$series)) { out$series <- object$series } else { out$series <- object$call$y } out$residuals <- copy_msts(object$x, residuals(object)) if (!is.null(lambda)) { # out$x <- InvBoxCox(object$x,lambda) # out$fitted <- InvBoxCox(out$fitted,lambda) out$mean <- InvBoxCox(out$mean, lambda, biasadj, out) if (PI) # PI = TRUE { out$lower <- InvBoxCox(out$lower, lambda) out$upper <- InvBoxCox(out$upper, lambda) } } if (!PI) { out$lower <- out$upper <- out$level <- NULL } return(structure(out, class = "forecast")) } pegelsfcast.C <- function(h, obj, npaths, level, bootstrap) { y.paths <- matrix(NA, nrow = npaths, ncol = h) obj$lambda <- NULL # No need to transform these here as we do it later. for (i in 1:npaths) y.paths[i, ] <- simulate.ets(obj, h, future = TRUE, bootstrap = bootstrap) y.f <- .C( "etsforecast", as.double(obj$states[length(obj$x) + 1, ]), as.integer(obj$m), as.integer(switch(obj$components[2], "N" = 0, "A" = 1, "M" = 2)), as.integer(switch(obj$components[3], "N" = 0, "A" = 1, "M" = 2)), as.double(ifelse(obj$components[4] == "FALSE", 1, obj$par["phi"])), as.integer(h), as.double(numeric(h)), PACKAGE = "forecast" )[[7]] if (abs(y.f[1] + 99999) < 1e-7) { stop("Problem with multiplicative damped trend") } lower <- apply(y.paths, 2, quantile, 0.5 - level / 200, type = 8, na.rm = TRUE) upper <- apply(y.paths, 2, quantile, 0.5 + level / 200, type = 8, na.rm = TRUE) if (length(level) > 1) { lower <- t(lower) upper <- t(upper) } return(list(mu = y.f, lower = lower, upper = upper)) } class1 <- function(h, last.state, trendtype, seasontype, damped, m, sigma2, par) { p <- length(last.state) H <- matrix(c(1, rep(0, p - 1)), nrow = 1) if (seasontype == "A") { H[1, p] <- 1 } if (trendtype == "A") { if (damped) { H[1, 2] <- par["phi"] } else { H[1, 2] <- 1 } } F <- matrix(0, p, p) F[1, 1] <- 1 if (trendtype == "A") { if (damped) { F[1, 2] <- F[2, 2] <- par["phi"] } else { F[1, 2] <- F[2, 2] <- 1 } } if (seasontype == "A") { F[p - m + 1, p] <- 1 F[(p - m + 2):p, (p - m + 1):(p - 1)] <- diag(m - 1) } G <- matrix(0, nrow = p, ncol = 1) G[1, 1] <- par["alpha"] if (trendtype == "A") { G[2, 1] <- par["beta"] } if (seasontype == "A") { G[3, 1] <- par["gamma"] } mu <- numeric(h) Fj <- diag(p) cj <- numeric(h - 1) if (h > 1) { for (i in 1:(h - 1)) { mu[i] <- H %*% Fj %*% last.state cj[i] <- H %*% Fj %*% G Fj <- Fj %*% F } cj2 <- cumsum(cj ^ 2) var <- sigma2 * c(1, 1 + cj2) } else { var <- sigma2 } mu[h] <- H %*% Fj %*% last.state return(list(mu = mu, var = var, cj = cj)) } class2 <- function(h, last.state, trendtype, seasontype, damped, m, sigma2, par) { tmp <- class1(h, last.state, trendtype, seasontype, damped, m, sigma2, par) theta <- numeric(h) theta[1] <- tmp$mu[1] ^ 2 if (h > 1) { for (j in 2:h) theta[j] <- tmp$mu[j] ^ 2 + sigma2 * sum(tmp$cj[1:(j - 1)] ^ 2 * theta[(j - 1):1]) } var <- (1 + sigma2) * theta - tmp$mu ^ 2 return(list(mu = tmp$mu, var = var)) } class3 <- function(h, last.state, trendtype, seasontype, damped, m, sigma2, par) { p <- length(last.state) H1 <- matrix(rep(1, 1 + (trendtype != "N")), nrow = 1) H2 <- matrix(c(rep(0, m - 1), 1), nrow = 1) if (trendtype == "N") { F1 <- 1 G1 <- par["alpha"] } else { F1 <- rbind(c(1, 1), c(0, ifelse(damped, par["phi"], 1))) G1 <- rbind(c(par["alpha"], par["alpha"]), c(par["beta"], par["beta"])) } F2 <- rbind(c(rep(0, m - 1), 1), cbind(diag(m - 1), rep(0, m - 1))) G2 <- matrix(0, m, m) G2[1, m] <- par["gamma"] Mh <- matrix(last.state[1:(p - m)]) %*% matrix(last.state[(p - m + 1):p], nrow = 1) Vh <- matrix(0, length(Mh), length(Mh)) H21 <- H2 %x% H1 F21 <- F2 %x% F1 G21 <- G2 %x% G1 K <- (G2 %x% F1) + (F2 %x% G1) mu <- var <- numeric(h) for (i in 1:h) { mu[i] <- H1 %*% Mh %*% t(H2) var[i] <- (1 + sigma2) * H21 %*% Vh %*% t(H21) + sigma2 * mu[i] ^ 2 vecMh <- c(Mh) Vh <- F21 %*% Vh %*% t(F21) + sigma2 * (F21 %*% Vh %*% t(G21) + G21 %*% Vh %*% t(F21) + K %*% (Vh + vecMh %*% t(vecMh)) %*% t(K) + sigma2 * G21 %*% (3 * Vh + 2 * vecMh %*% t(vecMh)) %*% t(G21)) Mh <- F1 %*% Mh %*% t(F2) + G1 %*% Mh %*% t(G2) * sigma2 } return(list(mu = mu, var = var)) } # ses <- function(x,h=10,level=c(80,95),fan=FALSE,...) # { # fcast <- forecast(ets(x,"ANN"),h,level=level,fan=fan,...) # fcast$method <- "Simple exponential smoothing" # fcast$model$call <- match.call() # return(fcast) # } # holt <- function(x,h=10, damped=FALSE, level=c(80,95), fan=FALSE, ...) # { # junk <- forecast(ets(x,"AAN",damped=damped),h,level=level,fan=fan,...) # if(damped) # junk$method <- "Damped Holt's method" # else # junk$method <- "Holt's method" # junk$model$call <- match.call() # return(junk) # } # hw <- function(x,h=2*frequency(x),seasonal="additive",damped=FALSE,level=c(80,95), fan=FALSE, ...) # { # if(seasonal=="additive") # { # junk <- forecast(ets(x,"AAA",damped=damped),h,level=level,fan=fan,...) # junk$method <- "Holt-Winters' additive method" # } # else # { # junk <- forecast(ets(x,"MAM",damped=damped),h,level=level,fan=fan,...) # junk$method <- "Holt-Winters' multiplicative method" # } # junk$model$call <- match.call() # return(junk) # } forecast/R/checkresiduals.R0000644000176200001440000001150614003673410015376 0ustar liggesusers#' Check that residuals from a time series model look like white noise #' #' If \code{plot=TRUE}, produces a time plot of the residuals, the #' corresponding ACF, and a histogram. If the degrees of freedom for the model #' can be determined and \code{test} is not \code{FALSE}, the output from #' either a Ljung-Box test or Breusch-Godfrey test is printed. #' #' #' @param object Either a time series model, a forecast object, or a time #' series (assumed to be residuals). #' @param lag Number of lags to use in the Ljung-Box or Breusch-Godfrey test. #' If missing, it is set to \code{min(10,n/5)} for non-seasonal data, and #' \code{min(2m, n/5)} for seasonal data, where \code{n} is the length of the series, #' and \code{m} is the seasonal period of the data. It is further constrained to be #' at least \code{df+3} where \code{df} is the degrees of freedom of the model. This #' ensures there are at least 3 degrees of freedom used in the chi-squared test. #' @param df Number of degrees of freedom for fitted model, required for the #' Ljung-Box or Breusch-Godfrey test. Ignored if the degrees of freedom can be #' extracted from \code{object}. #' @param test Test to use for serial correlation. By default, if \code{object} #' is of class \code{lm}, then \code{test="BG"}. Otherwise, \code{test="LB"}. #' Setting \code{test=FALSE} will prevent the test results being printed. #' @param plot Logical. If \code{TRUE}, will produce the plot. #' @param ... Other arguments are passed to \code{\link{ggtsdisplay}}. #' @return None #' @author Rob J Hyndman #' @seealso \code{\link{ggtsdisplay}}, \code{\link[stats]{Box.test}}, #' \code{\link[lmtest]{bgtest}} #' @examples #' #' fit <- ets(WWWusage) #' checkresiduals(fit) #' #' @export checkresiduals <- function(object, lag, df=NULL, test, plot=TRUE, ...) { showtest <- TRUE if (missing(test)) { if (is.element("lm", class(object))) { test <- "BG" } else { test <- "LB" } showtest <- TRUE } else if (test != FALSE) { test <- match.arg(test, c("LB", "BG")) showtest <- TRUE } else { showtest <- FALSE } # Extract residuals if (is.element("ts", class(object)) | is.element("numeric", class(object))) { residuals <- object object <- list(method = "Missing") } else { residuals <- residuals(object) } if (length(residuals) == 0L) { stop("No residuals found") } if ("ar" %in% class(object)) { method <- paste("AR(", object$order, ")", sep = "") } else if (!is.null(object$method)) { method <- object$method } else if ("HoltWinters" %in% class(object)) { method <- "HoltWinters" } else if ("StructTS" %in% class(object)) { method <- "StructTS" } else { method <- try(as.character(object), silent = TRUE) if ("try-error" %in% class(method)) { method <- "Missing" } else if (length(method) > 1 | base::nchar(method[1]) > 50) { method <- "Missing" } } if (method == "Missing") { main <- "Residuals" } else { main <- paste("Residuals from", method) } if (plot) { suppressWarnings(ggtsdisplay(residuals, plot.type = "histogram", main = main, ...)) } # Check if we have the model if (is.element("forecast", class(object))) { object <- object$model } if (is.null(object) | !showtest) { return(invisible()) } # Seasonality of data freq <- frequency(residuals) # Find model df if(grepl("STL \\+ ", method)){ warning("The fitted degrees of freedom is based on the model used for the seasonally adjusted data.") } df <- modeldf(object) if (missing(lag)) { lag <- ifelse(freq > 1, 2 * freq, 10) lag <- min(lag, round(length(residuals)/5)) lag <- max(df+3, lag) } if (!is.null(df)) { if (test == "BG") { # Do Breusch-Godfrey test BGtest <- lmtest::bgtest(object, order = lag) BGtest$data.name <- main #print(BGtest) return(BGtest) } else { # Do Ljung-Box test LBtest <- Box.test(zoo::na.approx(residuals), fitdf = df, lag = lag, type = "Ljung") LBtest$method <- "Ljung-Box test" LBtest$data.name <- main names(LBtest$statistic) <- "Q*" print(LBtest) cat(paste("Model df: ", df, ". Total lags used: ", lag, "\n\n", sep = "")) return(invisible(LBtest)) } } } modeldf <- function(object, ...){ UseMethod("modeldf") } modeldf.default <- function(object, ...){ warning("Could not find appropriate degrees of freedom for this model.") NULL } modeldf.ets <- function(object, ...){ length(object$par) } modeldf.Arima <- function(object, ...){ length(object$coef) } modeldf.bats <- function(object, ...){ length(object$parameters$vect) + NROW(object$seed.states) } modeldf.lm <- function(object, ...){ length(object$coefficients) } modeldf.lagwalk <- function(object, ...){ as.numeric(object$par$includedrift) } modeldf.meanf <- function(object, ...){ 1 } forecast/R/wrangle.R0000644000176200001440000000320114003673410014035 0ustar liggesuserstoMat <- function(x) { if (NCOL(x) > 1 && !is.matrix(x)) { x <- matrix(x, ncol = NCOL(x)) } return(x) } # Converts arguments into data.frame, whilst retaining mts/ts/matrix properties datamat <- function(..., flatten=TRUE, functions=TRUE) { vars <- list(...) if (length(vars) == 0) { return(data.frame()) } if (!is.null(names(vars))) { names(vars)[!nzchar(names(vars))] <- as.character(substitute(list(...))[-1])[!nzchar(names(vars))] } else { names(vars) <- as.character(substitute(list(...))[-1]) } if (flatten) { i <- 1 while (i <= length(vars)) { if (is.data.frame(vars[[i]])) { vars <- c(vars, c(vars[[i]])) # Append data.frame components vars[[i]] <- NULL # Remove data.frame } else if (is.matrix(vars[[i]])) { for (j in 1:NCOL(vars[[i]])) { vars[[length(vars) + 1]] <- vars[[i]][, j] names(vars)[length(vars)] <- make.names(colnames(vars[[i]])[j]) } i <- i + 1 } else { i <- i + 1 } } } class(vars) <- "data.frame" row.names(vars) <- 1:max(sapply(vars, NROW)) # if(is.ts(vars[,1])){ # if(NCOL(vars)>1){ # class(vars) <- c(class(vars),"mts") # } # class(vars) <- c(class(vars),"ts") # tspx <- unique(sapply(vars,tsp), MARGIN = 2) # if(length(tspx)==3){ # attr(vars, "tsp") <- tspx # } # } return(vars) } recoverTSP <- function(times.x) { freq <- sort(unique(round(times.x %% 1, digits = 6))) # The subset cannot increase frequency freq <- length(freq) return(c(min(times.x), min(times.x) + (length(times.x) - 1) / freq, freq)) } forecast/R/simulate.R0000644000176200001440000006623314163717124014247 0ustar liggesusers#' Simulation from a time series model #' #' Returns a time series based on the model object \code{object}. #' #' With \code{simulate.Arima()}, the \code{object} should be produced by #' \code{\link{Arima}} or \code{\link{auto.arima}}, rather than #' \code{\link[stats]{arima}}. By default, the error series is assumed normally #' distributed and generated using \code{\link[stats]{rnorm}}. If \code{innov} #' is present, it is used instead. If \code{bootstrap=TRUE} and #' \code{innov=NULL}, the residuals are resampled instead. #' #' When \code{future=TRUE}, the sample paths are conditional on the data. When #' \code{future=FALSE} and the model is stationary, the sample paths do not #' depend on the data at all. When \code{future=FALSE} and the model is #' non-stationary, the location of the sample paths is arbitrary, so they all #' start at the value of the first observation. #' #' @param object An object of class "\code{ets}", "\code{Arima}", "\code{ar}" #' or "\code{nnetar}". #' @param nsim Number of periods for the simulated series. Ignored if either #' \code{xreg} or \code{innov} are not \code{NULL}. Otherwise the default is #' the length of series used to train model (or 100 if no data found). #' @param seed Either \code{NULL} or an integer that will be used in a call to #' \code{\link[base]{set.seed}} before simulating the time series. The default, #' \code{NULL}, will not change the random generator state. #' @param future Produce sample paths that are future to and conditional on the #' data in \code{object}. Otherwise simulate unconditionally. #' @param bootstrap Do simulation using resampled errors rather than normally #' distributed errors or errors provided as \code{innov}. #' @param innov A vector of innovations to use as the error series. Ignored if #' \code{bootstrap==TRUE}. If not \code{NULL}, the value of \code{nsim} is set #' to length of \code{innov}. #' @param xreg New values of \code{xreg} to be used for forecasting. The value #' of \code{nsim} is set to the number of rows of \code{xreg} if it is not #' \code{NULL}. #' @param ... Other arguments, not currently used. #' @inheritParams forecast #' #' @return An object of class "\code{ts}". #' @author Rob J Hyndman #' @seealso \code{\link{ets}}, \code{\link{Arima}}, \code{\link{auto.arima}}, #' \code{\link{ar}}, \code{\link{arfima}}, \code{\link{nnetar}}. #' @keywords ts #' @examples #' fit <- ets(USAccDeaths) #' plot(USAccDeaths, xlim = c(1973, 1982)) #' lines(simulate(fit, 36), col = "red") #' @export simulate.ets <- function(object, nsim = length(object$x), seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv, inherits = FALSE)) { runif(1) } if (is.null(seed)) { RNGstate <- get(".Random.seed", envir = .GlobalEnv) } else { R.seed <- get(".Random.seed", envir = .GlobalEnv) set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } if (!is.null(object$x)) { if (is.null(tsp(object$x))) { object$x <- ts(object$x, frequency = 1, start = 1) } } else { if (nsim == 0L) { nsim <- 100 } object$x <- ts(10, frequency = object$m, start = 1 / object$m) future <- FALSE } if (future) { initstate <- object$states[length(object$x) + 1, ] } else { # choose a random starting point initstate <- object$states[sample(1:length(object$x), 1), ] } if (bootstrap) { res <- na.omit(c(object$residuals) - mean(object$residuals, na.rm = TRUE)) e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { e <- rnorm(nsim, 0, sqrt(object$sigma2)) } else if (length(innov) == nsim) { e <- innov } else { stop("Length of innov must be equal to nsim") } if (object$components[1] == "M") { e <- pmax(-1, e) } tmp <- ts(.C( "etssimulate", as.double(initstate), as.integer(object$m), as.integer(switch(object$components[1], "A" = 1, "M" = 2 )), as.integer(switch(object$components[2], "N" = 0, "A" = 1, "M" = 2 )), as.integer(switch(object$components[3], "N" = 0, "A" = 1, "M" = 2 )), as.double(object$par["alpha"]), as.double(ifelse(object$components[2] == "N", 0, object$par["beta"])), as.double(ifelse(object$components[3] == "N", 0, object$par["gamma"])), as.double(ifelse(object$components[4] == "FALSE", 1, object$par["phi"])), as.integer(nsim), as.double(numeric(nsim)), as.double(e), PACKAGE = "forecast" )[[11]], frequency = object$m, start = ifelse(future, tsp(object$x)[2] + 1 / tsp(object$x)[3], tsp(object$x)[1])) if (is.na(tmp[1])) { stop("Problem with multiplicative damped trend") } if (!is.null(object$lambda)) { tmp <- InvBoxCox(tmp, object$lambda) } return(tmp) } # Simulate ARIMA model starting with observed data x # Some of this function is borrowed from the arima.sim() function in the stats package. # Note that myarima.sim() does simulation conditional on the values of observed x, whereas # arima.sim() is unconditional on any observed x. myarima.sim <- function(model, n, x, e, ...) { start.innov <- residuals(model) innov <- e data <- x # Remove initial NAs first.nonmiss <- which(!is.na(x))[1] if (first.nonmiss > 1) { tsp.x <- tsp(x) start.x <- tsp.x[1] + (first.nonmiss - 1) / tsp.x[3] x <- window(x, start = start.x) start.innov <- window(start.innov, start = start.x) } model$x <- x n.start <- length(x) x <- ts(c(start.innov, innov), start = 1 - n.start, frequency = model$seasonal.period) flag.noadjust <- FALSE if (is.null(tsp(data))) { data <- ts(data, frequency = 1, start = 1) } if (!is.list(model)) { stop("'model' must be list") } if (n <= 0L) { stop("'n' must be strictly positive") } p <- length(model$ar) q <- length(model$ma) d <- 0 D <- model$seasonal.difference m <- model$seasonal.period if (!is.null(ord <- model$order)) { if (length(ord) != 3L) { stop("'model$order' must be of length 3") } if (p != ord[1L]) { stop("inconsistent specification of 'ar' order") } if (q != ord[3L]) { stop("inconsistent specification of 'ma' order") } d <- ord[2L] if (d != round(d) || d < 0) { stop("number of differences must be a positive integer") } } if (p) { minroots <- min(Mod(polyroot(c(1, -model$ar)))) if (minroots <= 1) { stop("'ar' part of model is not stationary") } } if (length(model$ma)) { # MA filtering x <- stats::filter(x, c(1, model$ma), method = "convolution", sides = 1L) x[seq_along(model$ma)] <- 0 } ## AR "filtering" len.ar <- length(model$ar) if (length(model$ar) && (len.ar <= length(data))) { if ((D != 0) && (d != 0)) { diff.data <- diff(data, lag = 1, differences = d) diff.data <- diff(diff.data, lag = m, differences = D) } else if ((D != 0) && (d == 0)) { diff.data <- diff(data, lag = model$seasonal.period, differences = D) } else if ((D == 0) && (d != 0)) { diff.data <- diff(data, lag = 1, differences = d) } else { diff.data <- data } x.new.innovations <- x[(length(start.innov) + 1):length(x)] x.with.data <- c(diff.data, x.new.innovations) for (i in (length(diff.data) + 1):length(x.with.data)) { lagged.x.values <- x.with.data[(i - len.ar):(i - 1)] ar.coefficients <- model$ar[length(model$ar):1] sum.multiplied.x <- sum((lagged.x.values * ar.coefficients)[abs(ar.coefficients) > .Machine$double.eps]) x.with.data[i] <- x.with.data[i] + sum.multiplied.x } x.end <- x.with.data[(length(diff.data) + 1):length(x.with.data)] x <- ts(x.end, start = 1, frequency = model$seasonal.period) flag.noadjust <- TRUE } else if (length(model$ar)) # but data too short { # AR filtering for all other cases where AR is used. x <- stats::filter(x, model$ar, method = "recursive") } if ((d == 0) && (D == 0) && (flag.noadjust == FALSE)) # Adjust to ensure end matches approximately { # Last 20 diffs if (n.start >= 20) { xdiff <- (model$x - x[1:n.start])[n.start - (19:0)] } else { xdiff <- model$x - x[1:n.start] } # If all same sign, choose last if (all(sign(xdiff) == 1) || all(sign(xdiff) == -1)) { xdiff <- xdiff[length(xdiff)] } else { # choose mean. xdiff <- mean(xdiff) } x <- x + xdiff } if ((n.start > 0) && (flag.noadjust == FALSE)) { x <- x[-(1:n.start)] } ######## Undo all differences if ((D > 0) && (d == 0)) { # Seasonal undifferencing, if there is no regular differencing i <- length(data) - D * m + 1 seasonal.xi <- data[i:length(data)] length.s.xi <- length(seasonal.xi) x <- diffinv(x, lag = m, differences = D, xi = seasonal.xi)[-(1:length.s.xi)] } else if ((d > 0) && (D == 0)) { # Regular undifferencing, if there is no seasonal differencing x <- diffinv(x, differences = d, xi = data[length(data) - (d:1) + 1])[-(1:d)] } else if ((d > 0) && (D > 0)) { # Undifferencing for where the differencing is both Seasonal and Non-Seasonal # Regular first delta.four <- diff(data, lag = m, differences = D) regular.xi <- delta.four[(length(delta.four) - D):length(delta.four)] x <- diffinv(x, differences = d, xi = regular.xi[length(regular.xi) - (d:1) + 1])[-(1:d)] # Then seasonal i <- length(data) - D * m + 1 seasonal.xi <- data[i:length(data)] length.s.xi <- length(seasonal.xi) x <- diffinv(x, lag = m, differences = D, xi = seasonal.xi) x <- x[-(1:length.s.xi)] } x <- ts(x[1:n], frequency = frequency(data), start = tsp(data)[2] + 1 / tsp(data)[3]) return(x) } #' @rdname simulate.ets #' @export simulate.Arima <- function(object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ...) { # Error check: if (object$arma[7] < 0) { stop("Value for seasonal difference is < 0. Must be >= 0") } else if ((sum(object$arma[c(3, 4, 7)]) > 0) && (object$arma[5] < 2)) { stop("Invalid value for seasonal period") } # Check if data is included x <- object$x <- getResponse(object) if (is.null(x)) { n <- 0 future <- FALSE if (nsim == 0L) { nsim <- 100 } } else { if (is.null(tsp(x))) { x <- ts(x, frequency = 1, start = 1) } if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } n <- length(x) } # Check xreg if (!is.null(xreg)) { xreg <- as.matrix(xreg) nsim <- nrow(xreg) } use.drift <- is.element("drift", names(object$coef)) usexreg <- (!is.null(xreg) | use.drift | !is.null(object$xreg)) xm <- oldxm <- 0 if (use.drift) { # Remove existing drift column if (NCOL(xreg) == 1 && all(diff(xreg) == 1)) { xreg <- NULL } else if (!is.null(colnames(xreg))) { xreg <- xreg[, colnames(xreg) != "drift", drop = FALSE] } # Create new drift column xreg <- cbind(drift = as.matrix(seq(nsim) + n * future), xreg) } # Check xreg has the correct dimensions if (usexreg) { if (is.null(xreg)) { stop("xreg argument missing") } else if (is.null(object$xreg)) { stop("xreg not required") } else if (NCOL(xreg) != NCOL(object$xreg)) { stop("xreg has incorrect dimension.") } } ######## Random Seed Code if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv)) { runif(1) } if (is.null(seed)) { RNGstate <- .Random.seed } else { R.seed <- .Random.seed set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } ######## End Random seed code # Check for seasonal ARMA components and set flag accordingly. This will be used later in myarima.sim() flag.s.arma <- (sum(object$arma[c(3, 4)]) > 0) # Check for Seasonality in ARIMA model if (sum(object$arma[c(3, 4, 7)]) > 0) { # return(simulateSeasonalArima(object, nsim=nsim, seed=seed, xreg=xreg, future=future, bootstrap=bootstrap, ...)) if (sum(object$model$phi) == 0) { ar <- NULL } else { ar <- as.double(object$model$phi) } if (sum(object$model$theta) == 0) { ma <- NULL } else { ma <- as.double(object$model$theta) } order <- c(length(ar), object$arma[6], length(ma)) if (future) { model <- list( order = order, ar = ar, ma = ma, sd = sqrt(object$sigma2), residuals = residuals(object), seasonal.difference = object$arma[7], seasonal.period = object$arma[5], flag.seasonal.arma = flag.s.arma, seasonal.order = object$arma[c(3, 7, 4)] ) } else { model <- list(order = order, ar = ar, ma = ma, sd = sqrt(object$sigma2), residuals = residuals(object)) } flag.seasonal.diff <- (object$arma[7] > 0) } else { #### Non-Seasonal ARIMA specific code: Set up the model order <- object$arma[c(1, 6, 2)] if (order[1] > 0) { ar <- object$model$phi[1:order[1]] } else { ar <- NULL } if (order[3] > 0) { ma <- object$model$theta[1:order[3]] } else { ma <- NULL } if (object$arma[2] != length(ma)) { stop("MA length wrong") } else if (object$arma[1] != length(ar)) { stop("AR length wrong") } if (future) { model <- list( order = object$arma[c(1, 6, 2)], ar = ar, ma = ma, sd = sqrt(object$sigma2), residuals = residuals(object), seasonal.difference = 0, flag.seasonal.arma = flag.s.arma, seasonal.order = c(0, 0, 0), seasonal.period = 1 ) } else { model <- list(order = object$arma[c(1, 6, 2)], ar = ar, ma = ma, sd = sqrt(object$sigma2), residuals = residuals(object)) } flag.seasonal.diff <- FALSE ### End non-seasonal ARIMA specific code } if (bootstrap) { res <- na.omit(c(model$residuals) - mean(model$residuals, na.rm = TRUE)) e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { e <- rnorm(nsim, 0, model$sd) } else if (length(innov) == nsim) { e <- innov } else { stop("Length of innov must be equal to nsim") } narma <- sum(object$arma[1L:4L]) if (length(object$coef) > narma) { if (names(object$coef)[narma + 1L] == "intercept") { xreg <- cbind(intercept = rep(1, nsim), xreg) if (future) { object$xreg <- cbind(intercept = rep(1, n), object$xreg) } } if (!is.null(xreg)) { xm <- if (narma == 0) { drop(as.matrix(xreg) %*% object$coef) } else { drop(as.matrix(xreg) %*% object$coef[-(1L:narma)]) } if (future) { oldxm <- if (narma == 0) { drop(as.matrix(object$xreg) %*% object$coef) } else { drop(as.matrix(object$xreg) %*% object$coef[-(1L:narma)]) } } } } if (future) { sim <- myarima.sim(model, nsim, x - oldxm, e = e) + xm } else { if (flag.seasonal.diff) { zeros <- object$arma[5] * object$arma[7] sim <- arima.sim(model, nsim, innov = e) sim <- diffinv(sim, lag = object$arma[5], differences = object$arma[7])[-(1:zeros)] sim <- tail(sim, nsim) + xm } else { sim <- tail(arima.sim(model, nsim, innov = e), nsim) + xm } if (!is.null(x)) { sim <- ts(sim, start = tsp(x)[1], frequency = tsp(x)[3]) } else { sim <- ts(sim, frequency = object$frequency) } # If model is non-stationary, then condition simulated data on first observation if (!is.null(x) & (model$order[2] > 0 || flag.seasonal.diff)) { sim <- sim - sim[1] + x[1] } } if (!is.null(lambda)) { sim <- InvBoxCox(sim, lambda) } return(sim) } #' @rdname simulate.ets #' @export simulate.ar <- function(object, nsim = object$n.used, seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv)) { runif(1) } if (is.null(seed)) { RNGstate <- .Random.seed } else { R.seed <- .Random.seed set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } object$x <- getResponse(object) if (is.null(object$x)) { future <- FALSE x.mean <- 0 if (is.null(nsim)) { nsim <- 100 } } else { x.mean <- object$x.mean object$x <- object$x - x.mean } if (future) { model <- list(ar = object$ar, sd = sqrt(object$var.pred), residuals = object$resid, seasonal.difference = 0, seasonal.period = 1, flag.seasonal.arma = FALSE) } else { model <- list(ar = object$ar, sd = sqrt(object$var.pred), residuals = object$resid) } if (bootstrap) { res <- na.omit(c(model$residuals) - mean(model$residuals, na.rm = TRUE)) e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { e <- rnorm(nsim, 0, model$sd) } else if (length(innov) == nsim) { e <- innov } else { stop("Length of innov must be equal to nsim") } if (future) { return(myarima.sim(model, nsim, x = object$x, e = e) + x.mean) } else { return(arima.sim(model, nsim, innov = e) + x.mean) } } #' @rdname simulate.ets #' @export simulate.lagwalk <- function(object, nsim = length(object$x), seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv)) { runif(1) } if (is.null(seed)) { RNGstate <- .Random.seed } else { R.seed <- .Random.seed set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } if (bootstrap) { res <- na.omit(c(object$residuals) - mean(object$residuals, na.rm = TRUE)) e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { se <- sqrt(object$sigma2) e <- rnorm(nsim, 0, se) } else { e <- innov } # Cumulate errors lag_grp <- rep_len(seq_len(object$par$lag), length(e)) e <- split(e, lag_grp) cumulative_e <- unsplit(lapply(e, cumsum), lag_grp) # Find starting position x <- object$x if (is.null(x)) { future <- FALSE if (nsim == 0L) { nsim <- 100 } x <- 1 } if (!is.null(lambda)) { x <- BoxCox(x, lambda) } if (future) { start <- tail(x, object$par$lag) } else { start <- head(x, object$par$lag) } # Handle missing values if (any(na_pos <- is.na(start))) { if (!is.null(innov)) { warning("Missing values encountered at simulation starting values, simulating starting values from closest observed value.") } lag_grp <- rep_len(seq_len(object$par$lag), length(x)) start[na_pos] <- vapply(split(x, lag_grp)[na_pos], function(x) { if (future) { x <- rev(x) } pos <- which.min(is.na(x)) x[pos] + sum(rnorm(pos - 1, 0, sqrt(object$sigma2))) }, numeric(1L)) } # Construct simulated ts simdrift <- object$par$drift + rnorm(1, 0, object$par$drift.se) sim <- rep_len(start, nsim) + seq_len(nsim) * simdrift + cumulative_e if (!is.null(lambda)) { sim <- InvBoxCox(sim, lambda) } tspx <- tsp(x) ts(sim, start = ifelse(future, tspx[2] + 1 / tspx[3], tspx[1]), frequency = tspx[3]) } #' @rdname simulate.ets #' @export simulate.fracdiff <- function(object, nsim = object$n, seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ...) { x <- getResponse(object) if (is.null(x)) { future <- FALSE if (is.null(nsim)) { nsim <- 100 } x <- 0 } # Strip initial and final missing values xx <- na.ends(x) n <- length(xx) # Remove mean meanx <- mean(xx) xx <- xx - meanx # Difference series (removes mean as well) y <- undo.na.ends(x, diffseries(xx, d = object$d)) # Create ARMA model for differenced series arma <- Arima( y, order = c(length(object$ar), 0, length(object$ma)), include.mean = FALSE, fixed = c(object$ar, -object$ma) ) # Simulate from ARMA model ysim <- simulate(arma, nsim, seed, future = future, bootstrap = bootstrap, innov = innov) # Undo differencing and add back mean return(unfracdiff(xx, ysim, n, nsim, object$d) + meanx) } #' @rdname simulate.ets #' @export simulate.nnetar <- function(object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv, inherits = FALSE)) { runif(1) } if (is.null(seed)) { RNGstate <- get(".Random.seed", envir = .GlobalEnv) } else { R.seed <- get(".Random.seed", envir = .GlobalEnv) set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } if (is.null(object$x)) { future <- FALSE } ## only future currently implemented if (!future) { warning("simulate.nnetar() currently only supports future=TRUE") } ## set simulation innovations if (bootstrap) { res <- na.omit(c(residuals(object, type = "innovation"))) res <- res - mean(res) ## scale if appropriate if (!is.null(object$scalex$scale)) { res <- res / object$scalex$scale } e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { res <- na.omit(c(residuals(object, type = "innovation"))) ## scale if appropriate if (!is.null(object$scalex$scale)) { res <- res / object$scalex$scale } e <- rnorm(nsim, 0, sd(res, na.rm = TRUE)) } else if (length(innov) == nsim) { e <- innov if (!is.null(object$scalex$scale)) { e <- e / object$scalex$scale } } else if (isTRUE(innov == 0L)) { ## to pass innov=0 so simulation equals mean forecast e <- rep(innov, nsim) } else { stop("Length of innov must be equal to nsim") } ## tspx <- tsp(object$x) # Check if xreg was used in fitted model if (is.null(object$xreg)) { if (!is.null(xreg)) { warning("External regressors were not used in fitted model, xreg will be ignored") } xreg <- NULL } else { if (is.null(xreg)) { stop("No external regressors provided") } xreg <- as.matrix(xreg) if (NCOL(xreg) != NCOL(object$xreg)) { stop("Number of external regressors does not match fitted model") } if (NROW(xreg) != nsim) { stop("Number of rows in xreg does not match nsim") } } xx <- object$x if (!is.null(lambda)) { xx <- BoxCox(xx, lambda) lambda <- attr(xx, "lambda") } # Check and apply scaling of fitted model if (!is.null(object$scalex)) { xx <- scale(xx, center = object$scalex$center, scale = object$scalex$scale) if (!is.null(xreg)) { xreg <- scale(xreg, center = object$scalexreg$center, scale = object$scalexreg$scale) } } ## Get lags used in fitted model lags <- object$lags maxlag <- max(lags) flag <- rev(tail(xx, n = maxlag)) ## Simulate by iteratively forecasting and adding innovation path <- numeric(nsim) for (i in 1:nsim) { newdata <- c(flag[lags], xreg[i, ]) if (any(is.na(newdata))) { stop("I can't simulate when there are missing values near the end of the series.") } path[i] <- mean(sapply(object$model, predict, newdata = newdata)) + e[i] flag <- c(path[i], flag[-maxlag]) } ## Re-scale simulated points if (!is.null(object$scalex)) { path <- path * object$scalex$scale + object$scalex$center } ## Add ts properties path <- ts(path, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) ## Back-transform simulated points if (!is.null(lambda)) { path <- InvBoxCox(path, lambda) } return(path) } #' @rdname simulate.ets #' @export simulate.modelAR <- function(object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv, inherits = FALSE)) { runif(1) } if (is.null(seed)) { RNGstate <- get(".Random.seed", envir = .GlobalEnv) } else { R.seed <- get(".Random.seed", envir = .GlobalEnv) set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } if (is.null(object$x)) { future <- FALSE } ## only future currently implemented if (!future) { warning("simulate.nnetar() currently only supports future=TRUE") } ## set simulation innovations if (bootstrap) { res <- na.omit(c(residuals(object, type = "innovation"))) res <- res - mean(res) ## scale if appropriate if (!is.null(object$scalex$scale)) { res <- res / object$scalex$scale } e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { res <- na.omit(c(residuals(object, type = "innovation"))) ## scale if appropriate if (!is.null(object$scalex$scale)) { res <- res / object$scalex$scale } e <- rnorm(nsim, 0, sd(res, na.rm = TRUE)) } else if (length(innov) == nsim) { e <- innov if (!is.null(object$scalex$scale)) { e <- e / object$scalex$scale } } else if (isTRUE(innov == 0L)) { ## to pass innov=0 so simulation equals mean forecast e <- rep(innov, nsim) } else { stop("Length of innov must be equal to nsim") } ## tspx <- tsp(object$x) # Check if xreg was used in fitted model if (is.null(object$xreg)) { if (!is.null(xreg)) { warning("External regressors were not used in fitted model, xreg will be ignored") } xreg <- NULL } else { if (is.null(xreg)) { stop("No external regressors provided") } xreg <- as.matrix(xreg) if (NCOL(xreg) != NCOL(object$xreg)) { stop("Number of external regressors does not match fitted model") } if (NROW(xreg) != nsim) { stop("Number of rows in xreg does not match nsim") } } xx <- object$x if (!is.null(lambda)) { xx <- BoxCox(xx, lambda) lambda <- attr(xx, "lambda") } # Check and apply scaling of fitted model if (!is.null(object$scalex)) { xx <- scale(xx, center = object$scalex$center, scale = object$scalex$scale) if (!is.null(xreg)) { xreg <- scale(xreg, center = object$scalexreg$center, scale = object$scalexreg$scale) } } ## Get lags used in fitted model lags <- object$lags maxlag <- max(lags) flag <- rev(tail(xx, n = maxlag)) ## Simulate by iteratively forecasting and adding innovation path <- numeric(nsim) for (i in 1:nsim) { newdata <- c(flag[lags], xreg[i, ]) if (any(is.na(newdata))) { stop("I can't simulate when there are missing values near the end of the series.") } path[i] <- object$predict.FUN(object$model, newdata) + e[i] flag <- c(path[i], flag[-maxlag]) } ## Re-scale simulated points if (!is.null(object$scalex)) { path <- path * object$scalex$scale + object$scalex$center } ## Add ts properties path <- ts(path, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) ## Back-transform simulated points if (!is.null(lambda)) { path <- InvBoxCox(path, lambda) } return(path) } forecast/R/lm.R0000644000176200001440000004703214166665743013044 0ustar liggesusers#' Fit a linear model with time series components #' #' \code{tslm} is used to fit linear models to time series including trend and #' seasonality components. #' #' \code{tslm} is largely a wrapper for \code{\link[stats]{lm}()} except that #' it allows variables "trend" and "season" which are created on the fly from #' the time series characteristics of the data. The variable "trend" is a #' simple time trend and "season" is a factor indicating the season (e.g., the #' month or the quarter depending on the frequency of the data). #' #' @param formula an object of class "formula" (or one that can be coerced to #' that class): a symbolic description of the model to be fitted. #' @param data an optional data frame, list or environment (or object coercible #' by as.data.frame to a data frame) containing the variables in the model. If #' not found in data, the variables are taken from environment(formula), #' typically the environment from which lm is called. #' @param subset an optional subset containing rows of data to keep. For best #' results, pass a logical vector of rows to keep. Also supports #' \code{\link[base]{subset}()} functions. #' @inheritParams forecast #' #' @param ... Other arguments passed to \code{\link[stats]{lm}()} #' @return Returns an object of class "lm". #' @author Mitchell O'Hara-Wild and Rob J Hyndman #' @seealso \code{\link{forecast.lm}}, \code{\link[stats]{lm}}. #' @keywords stats #' @examples #' #' y <- ts(rnorm(120,0,3) + 1:120 + 20*sin(2*pi*(1:120)/12), frequency=12) #' fit <- tslm(y ~ trend + season) #' plot(forecast(fit, h=20)) #' #' @export tslm <- function(formula, data, subset, lambda=NULL, biasadj=FALSE, ...) { cl <- match.call() if (!("formula" %in% class(formula))) { formula <- stats::as.formula(formula) } if (missing(data)) { mt <- try(terms(formula)) if (is.element("try-error", class(mt))) { stop("Cannot extract terms from formula, please provide data argument.") } } else { mt <- terms(formula, data = data) } ## Categorise formula variables into time-series, functions, and data. vars <- attr(mt, "variables") # Check for time series variables tsvar <- match(c("trend", "season"), as.character(vars), 0L) # Check for functions (which should be evaluated later, in lm) fnvar <- NULL for (i in 2:length(vars)) { term <- vars[[i]] if (!is.symbol(term)) { if (typeof(eval(term[[1]])) == "closure") { # If this term is a function (alike fourier) fnvar <- c(fnvar, i) } } } ## Fix formula's environment for correct `...` scoping. attr(formula, ".Environment") <- environment() ## Ensure response variable is taken from dataset (including transformations) formula[[2]] <- as.symbol(deparse(formula[[2]])) if (sum(c(tsvar, fnvar)) > 0) { # Remove variables not needed in data (trend+season+functions) rmvar <- c(tsvar, fnvar) rmvar <- rmvar[rmvar != attr(mt, "response") + 1] # Never remove the reponse variable if (any(rmvar != 0)) { vars <- vars[-rmvar] } } ## Grab any variables missing from data if (!missing(data)) { # Check for any missing variables in data vars <- vars[c(TRUE, !as.character(vars[-1]) %in% colnames(data))] dataname <- substitute(data) } if (!missing(data)) { data <- datamat(do.call(datamat, as.list(vars[-1]), envir = parent.frame()), data) } else { data <- do.call(datamat, as.list(vars[-1]), envir = parent.frame()) } ## Set column name of univariate dataset if (is.null(dim(data)) && length(data) != 0) { cn <- as.character(vars)[2] } else { cn <- colnames(data) } ## Get time series attributes from the data if (is.null(tsp(data))) { if ((attr(mt, "response") + 1) %in% fnvar) { # Check unevaluated response variable tspx <- tsp(eval(attr(mt, "variables")[[attr(mt, "response") + 1]])) } tspx <- tsp(data[, 1]) # Check for complex ts data.frame } else { tspx <- tsp(data) } if (is.null(tspx)) { stop("Not time series data, use lm()") } tsdat <- match(c("trend", "season"), cn, 0L) ## Create trend and season if missing from the data if (tsdat[1] == 0) { # &tsvar[1]!=0){#If "trend" is not in data, but is in formula trend <- 1:NROW(data) cn <- c(cn, "trend") data <- cbind(data, trend) } if (tsdat[2] == 0) { # &tsvar[2]!=0){#If "season" is not in data, but is in formula if (tsvar[2] != 0 && tspx[3] <= 1) { # Nonseasonal data, and season requested stop("Non-seasonal data cannot be modelled using a seasonal factor") } season <- as.factor(cycle(data[, 1])) cn <- c(cn, "season") data <- cbind(data, season) } colnames(data) <- cn ## Subset the data according to subset argument if (!missing(subset)) { if (!is.logical(subset)) { stop("subset must be logical") } else if (NCOL(subset) > 1) { stop("subset must be a logical vector") } else if (NROW(subset) != NROW(data)) { stop("Subset must be the same length as the number of rows in the dataset") } warning("Subset has been assumed contiguous") timesx <- time(data[, 1])[subset] tspx <- recoverTSP(timesx) if (tspx[3] == 1 && tsdat[2] == 0 && tsvar[2] != 0) { stop("Non-seasonal data cannot be modelled using a seasonal factor") } data <- data[subset, ] # model.frame(formula,as.data.frame(data[subsetTF,])) } if (!is.null(lambda)) { resp_var <- deparse(attr(mt, "variables")[[attr(mt, "response") + 1]]) data[, resp_var] <- BoxCox(data[, resp_var], lambda) lambda <- attr(data[, resp_var], "lambda") } if (tsdat[2] == 0 && tsvar[2] != 0) { data$season <- factor(data$season) # fix for lost factor information, may not be needed? } ## Fit the model and prepare model structure fit <- lm(formula, data = data, na.action = na.exclude, ...) fit$data <- data responsevar <- deparse(formula[[2]]) fit$residuals <- ts(residuals(fit)) fit$x <- fit$residuals fit$x[!is.na(fit$x)] <- model.frame(fit)[, responsevar] fit$fitted.values <- ts(fitted(fit)) tsp(fit$residuals) <- tsp(fit$x) <- tsp(fit$fitted.values) <- tsp(data[, 1]) <- tspx fit$call <- cl fit$method <- "Linear regression model" if (exists("dataname")) { fit$call$data <- dataname } if (!is.null(lambda)) { attr(lambda, "biasadj") <- biasadj fit$lambda <- lambda fit$fitted.values <- InvBoxCox(fit$fitted.values, lambda, biasadj, var(fit$residuals)) fit$x <- InvBoxCox(fit$x, lambda) } class(fit) <- c("tslm", class(fit)) return(fit) } #' @export fitted.tslm <- function(object, ...){ object$fitted.values } #' Forecast a linear model with possible time series components #' #' \code{forecast.lm} is used to predict linear models, especially those #' involving trend and seasonality components. #' #' \code{forecast.lm} is largely a wrapper for #' \code{\link[stats]{predict.lm}()} except that it allows variables "trend" #' and "season" which are created on the fly from the time series #' characteristics of the data. Also, the output is reformatted into a #' \code{forecast} object. #' #' @param object Object of class "lm", usually the result of a call to #' \code{\link[stats]{lm}} or \code{\link{tslm}}. #' @param newdata An optional data frame in which to look for variables with #' which to predict. If omitted, it is assumed that the only variables are #' trend and season, and \code{h} forecasts are produced. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable #' for fan plots. #' @param h Number of periods for forecasting. Ignored if \code{newdata} #' present. #' @param ts If \code{TRUE}, the forecasts will be treated as time series #' provided the original data is a time series; the \code{newdata} will be #' interpreted as related to the subsequent time periods. If \code{FALSE}, any #' time series attributes of the original data will be ignored. #' @param ... Other arguments passed to \code{\link[stats]{predict.lm}()}. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.lm}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The historical data for #' the response variable.} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values} #' @author Rob J Hyndman #' @seealso \code{\link{tslm}}, \code{\link[stats]{lm}}. #' @keywords stats #' @examples #' #' y <- ts(rnorm(120,0,3) + 1:120 + 20*sin(2*pi*(1:120)/12), frequency=12) #' fit <- tslm(y ~ trend + season) #' plot(forecast(fit, h=20)) #' #' @export forecast.lm <- function(object, newdata, h=10, level=c(80, 95), fan=FALSE, lambda=object$lambda, biasadj=NULL, ts=TRUE, ...) { if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } if (!is.null(object$data)) { origdata <- object$data } # no longer exists else if (!is.null(object$model)) { origdata <- object$model } else if (!is.null(object$call$data)) { origdata <- try(object$data <- eval(object$call$data), silent = TRUE) if (is.element("try-error", class(origdata))) { stop("Could not find data. Try training your model using tslm() or attach data directly to the object via object$data<-modeldata for some object<-lm(formula,modeldata).") } } else { origdata <- as.data.frame(fitted(object) + residuals(object)) } if (!is.element("data.frame", class(origdata))) { origdata <- as.data.frame(origdata) if (!is.element("data.frame", class(origdata))) { stop("Could not find data. Try training your model using tslm() or attach data directly to the object via object$data<-modeldata for some object<-lm(formula,modeldata).") } } # Check if the forecasts will be time series if (ts && is.element("ts", class(origdata))) { tspx <- tsp(origdata) timesx <- time(origdata) } else if (ts && is.element("ts", class(origdata[, 1]))) { tspx <- tsp(origdata[, 1]) timesx <- time(origdata[, 1]) } else if (ts && is.element("ts", class(fitted(object)))) { tspx <- tsp(fitted(object)) timesx <- time(fitted(object)) } else { tspx <- NULL } # if(!is.null(object$call$subset)) # { # j <- eval(object$call$subset) # origdata <- origdata[j,] # if(!is.null(tspx)) # { # # Try to figure out times for subset. Assume they are contiguous. # timesx <- timesx[j] # tspx <- tsp(origdata) <- c(min(timesx),max(timesx),tspx[3]) # } # } # Add trend and seasonal to data frame oldterms <- terms(object) # Adjust terms for function variables and rename datamat colnames to match. if (!missing(newdata)) { reqvars <- as.character(attr(object$terms, "variables")[-1])[-attr(object$terms, "response")] # Search for time series variables tsvar <- match(c("trend", "season"), reqvars, 0L) # Check if required variables are functions fnvar <- sapply(reqvars, function(x) !(is.symbol(parse(text = x)[[1]]) || typeof(eval(parse(text = x)[[1]][[1]])) != "closure")) if (!is.data.frame(newdata)) { newdata <- datamat(newdata) colnames(newdata)[1] <- ifelse(sum(tsvar > 0), reqvars[-tsvar][1], reqvars[1]) warning("newdata column names not specified, defaulting to first variable required.") } oldnewdata <- newdata newvars <- make.names(colnames(newdata)) # Check if variables are missing misvar <- match(make.names(reqvars), newvars, 0L) == 0L if (any(!misvar & !fnvar)) { # If any variables are not missing/functions, add them to data tmpdata <- datamat(newdata[reqvars[!misvar]]) rm1 <- FALSE } else { # Prefill the datamat tmpdata <- datamat(1:NROW(newdata)) rm1 <- TRUE } # Remove trend and seasonality from required variables if (sum(tsvar) > 0) { reqvars <- reqvars[-tsvar] fnvar <- fnvar[-tsvar] misvar <- match(make.names(reqvars), newvars, 0L) == 0L } if (any(misvar | fnvar)) { # If any variables are missing/functions reqvars <- reqvars[misvar | fnvar] # They are required fnvar <- fnvar[misvar | fnvar] # Update required function variables for (i in reqvars) { found <- FALSE subvars <- NULL for (j in 1:length(object$coefficients)) { subvars[j] <- pmatch(i, names(object$coefficients)[j]) } subvars <- !is.na(subvars) subvars <- names(object$coefficients)[subvars] # Detect if subvars if multivariate if (length(subvars) > 1) { # Extract prefix only subvars <- substr(subvars, nchar(i) + 1, 999L) fsub <- match(make.names(subvars), newvars, 0L) if (any(fsub == 0)) { # Check for misnamed columns fsub <- grep(paste(make.names(subvars), collapse = "|"), newvars) } if (all(fsub != 0) && length(fsub) == length(subvars)) { imat <- as.matrix(newdata[, fsub], ncol = length(fsub)) colnames(imat) <- subvars tmpdata[[length(tmpdata) + 1]] <- imat found <- TRUE } else { # Attempt to evaluate it as a function subvars <- i } } if (length(subvars) == 1) { # Check if it is a function if (fnvar[match(i, reqvars)]) { # Pre-evaluate function from data tmpdata[[length(tmpdata) + 1]] <- eval(parse(text = subvars)[[1]], newdata) found <- TRUE } } if (found) { names(tmpdata)[length(tmpdata)] <- paste0("solvedFN___", match(i, reqvars)) subvarloc <- match(i, lapply(attr(object$terms, "predvars"), deparse)) attr(object$terms, "predvars")[[subvarloc]] <- attr(object$terms, "variables")[[subvarloc]] <- parse(text = paste0("solvedFN___", match(i, reqvars)))[[1]] } else { warning(paste0("Could not find required variable ", i, " in newdata. Specify newdata as a named data.frame")) } } } if (rm1) { tmpdata[[1]] <- NULL } newdata <- cbind(newdata, tmpdata) h <- nrow(newdata) } if (!is.null(tspx)) { # Always generate trend series trend <- ifelse(is.null(origdata$trend), NCOL(origdata), max(origdata$trend)) + (1:h) if (!missing(newdata)) { newdata <- cbind(newdata, trend) } else { newdata <- datamat(trend) } # Always generate season series x <- ts(1:h, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) season <- as.factor(cycle(x)) newdata <- cbind(newdata, season) } newdata <- as.data.frame(newdata) if (!exists("oldnewdata")) { oldnewdata <- newdata } # If only one column, assume its name. if (ncol(newdata) == 1 && colnames(newdata)[1] == "newdata") { colnames(newdata) <- as.character(formula(object$model))[3] } # Check regressors included in newdata. # Not working so removed for now. # xreg <- attributes(terms(object$model))$term.labels # if(any(!is.element(xreg,colnames(newdata)))) # stop("Predictor variables not included") object$x <- getResponse(object) # responsevar <- as.character(formula(object$model))[2] # responsevar <- gsub("`","",responsevar) # object$x <- model.frame(object$model)[,responsevar] # Remove missing values from residuals predict_object <- object predict_object$residuals <- na.omit(as.numeric(object$residuals)) out <- list() nl <- length(level) for (i in 1:nl) out[[i]] <- predict(predict_object, newdata = newdata, se.fit = TRUE, interval = "prediction", level = level[i] / 100, ...) if (nrow(newdata) != length(out[[1]]$fit[, 1])) { stop("Variables not found in newdata") } object$terms <- oldterms if (is.null(object$series)) { # Model produced via lm(), add series attribute object$series <- deparse(attr(oldterms, "variables")[[1 + attr(oldterms, "response")]]) } fcast <- list( model = object, mean = out[[1]]$fit[, 1], lower = out[[1]]$fit[, 2], upper = out[[1]]$fit[, 3], level = level, x = object$x, series = object$series ) fcast$method <- "Linear regression model" fcast$newdata <- oldnewdata fcast$residuals <- residuals(object) fcast$fitted <- fitted(object) if (NROW(origdata) != NROW(fcast$x)) { # Give up on ts attributes as some data are missing tspx <- NULL } if (NROW(fcast$x) != NROW(fcast$residuals)) { tspx <- NULL } if (!is.null(tspx)) { fcast$x <- ts(fcast$x) fcast$residuals <- ts(fcast$residuals) fcast$fitted <- ts(fcast$fitted) tsp(fcast$x) <- tsp(fcast$residuals) <- tsp(fcast$fitted) <- tspx } if (nl > 1) { for (i in 2:nl) { fcast$lower <- cbind(fcast$lower, out[[i]]$fit[, 2]) fcast$upper <- cbind(fcast$upper, out[[i]]$fit[, 3]) } } if (!is.null(tspx)) { fcast$mean <- ts(fcast$mean, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) fcast$upper <- ts(fcast$upper, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) fcast$lower <- ts(fcast$lower, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) } if (!is.null(lambda)) { #fcast$x <- InvBoxCox(fcast$x, lambda) fcast$mean <- InvBoxCox(fcast$mean, lambda, biasadj, fcast) fcast$lower <- InvBoxCox(fcast$lower, lambda) fcast$upper <- InvBoxCox(fcast$upper, lambda) } return(structure(fcast, class = "forecast")) } #' @export summary.tslm <- function(object, ...) { # Remove NA from object structure as summary.lm() expects (#836) object$residuals <- na.omit(as.numeric(object$residuals)) object$fitted.values <- na.omit(as.numeric(object$fitted.values)) if(!is.null(object$lambda)) { object$fitted.values <- BoxCox(object$fitted.values, object$lambda) } NextMethod() } # Compute cross-validation and information criteria from a linear model #' Cross-validation statistic #' #' Computes the leave-one-out cross-validation statistic (the mean of PRESS #' -- prediction residual sum of squares), AIC, corrected AIC, BIC and adjusted #' R^2 values for a linear model. #' #' #' @param obj output from \code{\link[stats]{lm}} or \code{\link{tslm}} #' @return Numerical vector containing CV, AIC, AICc, BIC and AdjR2 values. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{AIC}} #' @keywords models #' @examples #' #' y <- ts(rnorm(120,0,3) + 20*sin(2*pi*(1:120)/12), frequency=12) #' fit1 <- tslm(y ~ trend + season) #' fit2 <- tslm(y ~ season) #' CV(fit1) #' CV(fit2) #' #' @export CV <- function(obj) { if (!is.element("lm", class(obj))) { stop("This function is for objects of class lm") } n <- length(obj$residuals) k <- extractAIC(obj)[1] - 1 # number of predictors (constant removed) aic <- extractAIC(obj)[2] + 2 # add 2 for the variance estimate aicc <- aic + 2 * (k + 2) * (k + 3) / (n - k - 3) bic <- aic + (k + 2) * (log(n) - 2) cv <- mean((residuals(obj) / (1 - hatvalues(obj))) ^ 2, na.rm = TRUE) adjr2 <- summary(obj)$adj out <- c(cv, aic, aicc, bic, adjr2) names(out) <- c("CV", "AIC", "AICc", "BIC", "AdjR2") return(out) } forecast/R/forecastTBATS.R0000644000176200001440000001333514133712523015016 0ustar liggesusers#' @rdname forecast.bats #' @export forecast.tbats <- function(object, h, level = c(80, 95), fan = FALSE, biasadj = NULL, ...) { # Check if forecast.tbats called incorrectly if (identical(class(object), "bats")) { return(forecast.bats(object, h, level, fan, biasadj, ...)) } # Set up the variables if (any(class(object$y) == "ts")) { ts.frequency <- frequency(object$y) } else { ts.frequency <- ifelse(!is.null(object$seasonal.periods), max(object$seasonal.periods), 1) } if (missing(h)) { if (is.null(object$seasonal.periods)) { h <- ifelse(ts.frequency == 1, 10, 2 * ts.frequency) } else { h <- 2 * max(object$seasonal.periods) } } else if (h <= 0) { stop("Forecast horizon out of bounds") } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } if (!is.null(object$k.vector)) { tau <- 2 * sum(object$k.vector) } else { tau <- 0 } x <- matrix(0, nrow = nrow(object$x), ncol = h) y.forecast <- numeric(h) if (!is.null(object$beta)) { adj.beta <- 1 } else { adj.beta <- 0 } # Set up the matrices w <- .Call("makeTBATSWMatrix", smallPhi_s = object$damping.parameter, kVector_s = as.integer(object$k.vector), arCoefs_s = object$ar.coefficients, maCoefs_s = object$ma.coefficients, tau_s = as.integer(tau), PACKAGE = "forecast") if (!is.null(object$seasonal.periods)) { gamma.bold <- matrix(0, nrow = 1, ncol = tau) .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = as.integer(object$k.vector), gammaOne_s = object$gamma.one.values, gammaTwo_s = object$gamma.two.values, PACKAGE = "forecast") } else { gamma.bold <- NULL } g <- matrix(0, nrow = (tau + 1 + adj.beta + object$p + object$q), ncol = 1) if (object$p != 0) { g[(1 + adj.beta + tau + 1), 1] <- 1 } if (object$q != 0) { g[(1 + adj.beta + tau + object$p + 1), 1] <- 1 } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = object$alpha, beta_s = object$beta.v, PACKAGE = "forecast") # print(g) F <- makeTBATSFMatrix(alpha = object$alpha, beta = object$beta, small.phi = object$damping.parameter, seasonal.periods = object$seasonal.periods, k.vector = as.integer(object$k.vector), gamma.bold.matrix = gamma.bold, ar.coefs = object$ar.coefficients, ma.coefs = object$ma.coefficients) # Do the forecast y.forecast[1] <- w$w.transpose %*% object$x[, ncol(object$x)] x[, 1] <- F %*% object$x[, ncol(object$x)] # + g %*% object$errors[length(object$errors)] if (h > 1) { for (t in 2:h) { x[, t] <- F %*% x[, (t - 1)] y.forecast[t] <- w$w.transpose %*% x[, (t - 1)] } } ## Make prediction intervals here lower.bounds <- upper.bounds <- matrix(NA, ncol = length(level), nrow = h) variance.multiplier <- numeric(h) variance.multiplier[1] <- 1 if (h > 1) { for (j in 1:(h - 1)) { if (j == 1) { f.running <- diag(ncol(F)) } else { f.running <- f.running %*% F } c.j <- w$w.transpose %*% f.running %*% g variance.multiplier[(j + 1)] <- variance.multiplier[j] + c.j^2 } } variance <- object$variance * variance.multiplier # print(variance) st.dev <- sqrt(variance) for (i in 1:length(level)) { marg.error <- st.dev * abs(qnorm((100 - level[i]) / 200)) lower.bounds[, i] <- y.forecast - marg.error upper.bounds[, i] <- y.forecast + marg.error } # Inv Box Cox transform if required if (!is.null(object$lambda)) { y.forecast <- InvBoxCox(y.forecast, object$lambda, biasadj, list(level = level, upper = upper.bounds, lower = lower.bounds)) lower.bounds <- InvBoxCox(lower.bounds, object$lambda) if (object$lambda < 1) { lower.bounds <- pmax(lower.bounds, 0) } upper.bounds <- InvBoxCox(upper.bounds, object$lambda) } colnames(upper.bounds) <- colnames(lower.bounds) <- paste0(level, "%") forecast.object <- list( model = object, mean = future_msts(object$y, y.forecast), level = level, x = object$y, series = object$series, upper = future_msts(object$y, upper.bounds), lower = future_msts(object$y, lower.bounds), fitted = copy_msts(object$y, object$fitted.values), method = as.character(object), residuals = copy_msts(object$y, object$errors) ) if (is.null(object$series)) { forecast.object$series <- deparse(object$call$y) } class(forecast.object) <- "forecast" return(forecast.object) } #' @export as.character.tbats <- function(x, ...) { name <- "TBATS(" if (!is.null(x$lambda)) { name <- paste(name, round(x$lambda, digits = 3), sep = "") } else { name <- paste(name, "1", sep = "") } name <- paste(name, ", {", sep = "") if (!is.null(x$ar.coefficients)) { name <- paste(name, length(x$ar.coefficients), sep = "") } else { name <- paste(name, "0", sep = "") } name <- paste(name, ",", sep = "") if (!is.null(x$ma.coefficients)) { name <- paste(name, length(x$ma.coefficients), sep = "") } else { name <- paste(name, "0", sep = "") } name <- paste(name, "}, ", sep = "") if (!is.null(x$damping.parameter)) { name <- paste(name, round(x$damping.parameter, digits = 3), ",", sep = "") } else { name <- paste(name, "-,", sep = "") } if (!is.null(x$seasonal.periods)) { name <- paste(name, " {", sep = "") M <- length(x$seasonal.periods) for (i in 1:M) { name <- paste(name, "<", round(x$seasonal.periods[i], 2), ",", x$k.vector[i], ">", sep = "") if (i < M) { name <- paste(name, ", ", sep = "") } else { name <- paste(name, "})", sep = "") } } } else { name <- paste(name, "{-})", sep = "") } return(name) } forecast/R/data.R0000644000176200001440000000321014003673410013307 0ustar liggesusers #' Australian monthly gas production #' #' Australian monthly gas production: 1956--1995. #' #' #' @format Time series data #' @source Australian Bureau of Statistics. #' @keywords datasets #' @examples #' plot(gas) #' seasonplot(gas) #' tsdisplay(gas) #' "gas" #' Daily morning gold prices #' #' Daily morning gold prices in US dollars. 1 January 1985 -- 31 March 1989. #' #' #' @format Time series data #' @keywords datasets #' @examples #' tsdisplay(gold) #' "gold" #' Half-hourly electricity demand #' #' Half-hourly electricity demand in England and Wales from Monday 5 June 2000 #' to Sunday 27 August 2000. Discussed in Taylor (2003), and kindly provided by #' James W Taylor. Units: Megawatts #' #' #' @format Time series data #' @references Taylor, J.W. (2003) Short-term electricity demand forecasting #' using double seasonal exponential smoothing. \emph{Journal of the #' Operational Research Society}, \bold{54}, 799-805. #' @source James W Taylor #' @keywords datasets #' @examples #' plot(taylor) #' "taylor" #' Australian total wine sales #' #' Australian total wine sales by wine makers in bottles <= 1 litre. Jan 1980 #' -- Aug 1994. #' #' #' @format Time series data #' @source Time Series Data Library. \url{https://pkg.yangzhuoranyang.com/tsdl/} #' @keywords datasets #' @examples #' tsdisplay(wineind) #' "wineind" #' Quarterly production of woollen yarn in Australia #' #' Quarterly production of woollen yarn in Australia: tonnes. Mar 1965 -- Sep #' 1994. #' #' #' @format Time series data #' @source Time Series Data Library. \url{https://pkg.yangzhuoranyang.com/tsdl/} #' @keywords datasets #' @examples #' tsdisplay(woolyrnq) #' "woolyrnq" forecast/R/arfima.R0000644000176200001440000002525214003673410013647 0ustar liggesusers# Remove missing values from end points na.ends <- function(x) { tspx <- tsp(x) # Strip initial and final missing values nonmiss <- (1:length(x))[!is.na(x)] if (length(nonmiss) == 0) { stop("No non-missing data") } j <- nonmiss[1] k <- nonmiss[length(nonmiss)] x <- x[j:k] if (!is.null(tspx)) { x <- ts(x, start = tspx[1] + (j - 1) / tspx[3], frequency = tspx[3]) } return(x) } # Add back missing values at ends # x is original series. y is the series with NAs removed at ends. # returns y with the nas put back at beginning but not end. undo.na.ends <- function(x, y) { n <- length(x) nonmiss <- (1:length(x))[!is.na(x)] j <- nonmiss[1] k <- nonmiss[length(nonmiss)] if (j > 1) { y <- c(rep(NA, j - 1), y) } if (k < n) { y <- c(y, rep(NA, n - k)) } tspx <- tsp(x) if (!is.null(tspx)) { tsp(y) <- tsp(x) } return(y) } ## Undifference unfracdiff <- function(x, y, n, h, d) { bin.c <- (-1) ^ (0:(n + h)) * choose(d, (0:(n + h))) b <- numeric(n) xnew <- LHS <- numeric(h) RHS <- cumsum(y) bs <- cumsum(bin.c[1:h]) b <- bin.c[(1:n) + 1] xnew[1] <- RHS[1] <- y[1] - sum(b * rev(x)) if (h > 1) { for (k in 2:h) { b <- b + bin.c[(1:n) + k] RHS[k] <- RHS[k] - sum(b * rev(x)) LHS[k] <- sum(rev(xnew[1:(k - 1)]) * bs[2:k]) xnew[k] <- RHS[k] - LHS[k] } } tspx <- tsp(x) if (is.null(tspx)) { tspx <- c(1, length(x), 1) } return(ts(xnew, frequency = tspx[3], start = tspx[2] + 1 / tspx[3])) } ## Automatic ARFIMA modelling ## Will return Arima object if d < 0.01 to prevent estimation problems #' Fit a fractionally differenced ARFIMA model #' #' An ARFIMA(p,d,q) model is selected and estimated automatically using the #' Hyndman-Khandakar (2008) algorithm to select p and q and the Haslett and #' Raftery (1989) algorithm to estimate the parameters including d. #' #' This function combines \code{\link[fracdiff]{fracdiff}} and #' \code{\link{auto.arima}} to automatically select and estimate an ARFIMA #' model. The fractional differencing parameter is chosen first assuming an #' ARFIMA(2,d,0) model. Then the data are fractionally differenced using the #' estimated d and an ARMA model is selected for the resulting time series #' using \code{\link{auto.arima}}. Finally, the full ARFIMA(p,d,q) model is #' re-estimated using \code{\link[fracdiff]{fracdiff}}. If \code{estim=="mle"}, #' the ARMA coefficients are refined using \code{\link[stats]{arima}}. #' #' @param y a univariate time series (numeric vector). #' @param drange Allowable values of d to be considered. Default of #' \code{c(0,0.5)} ensures a stationary model is returned. #' @param estim If \code{estim=="ls"}, then the ARMA parameters are calculated #' using the Haslett-Raftery algorithm. If \code{estim=="mle"}, then the ARMA #' parameters are calculated using full MLE via the \code{\link[stats]{arima}} #' function. #' @param model Output from a previous call to \code{arfima}. If model is #' passed, this same model is fitted to y without re-estimating any parameters. #' @param x Deprecated. Included for backwards compatibility. #' @param \dots Other arguments passed to \code{\link{auto.arima}} when #' selecting p and q. #' @inheritParams forecast #' #' @return A list object of S3 class \code{"fracdiff"}, which is described in #' the \code{\link[fracdiff]{fracdiff}} documentation. A few additional objects #' are added to the list including \code{x} (the original time series), and the #' \code{residuals} and \code{fitted} values. #' #' @export #' #' @author Rob J Hyndman and Farah Yasmeen #' @seealso \code{\link[fracdiff]{fracdiff}}, \code{\link{auto.arima}}, #' \code{\link{forecast.fracdiff}}. #' @references J. Haslett and A. E. Raftery (1989) Space-time Modelling with #' Long-memory Dependence: Assessing Ireland's Wind Power Resource (with #' discussion); \emph{Applied Statistics} \bold{38}, 1-50. #' #' Hyndman, R.J. and Khandakar, Y. (2008) "Automatic time series forecasting: #' The forecast package for R", \emph{Journal of Statistical Software}, #' \bold{26}(3). #' @keywords ts #' @examples #' #' library(fracdiff) #' x <- fracdiff.sim( 100, ma=-.4, d=.3)$series #' fit <- arfima(x) #' tsdisplay(residuals(fit)) #' arfima <- function(y, drange = c(0, 0.5), estim = c("mle", "ls"), model = NULL, lambda = NULL, biasadj = FALSE, x=y, ...) { estim <- match.arg(estim) # require(fracdiff) seriesname <- deparse(substitute(y)) orig.x <- x if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } # Re-fit arfima model if (!is.null(model)) { fit <- model fit$residuals <- fit$fitted <- fit$lambda <- NULL if (!is.null(lambda)) { fit$lambda <- lambda # Required for residuals.fracdiff() } } # Estimate model else { # Strip initial and final missing values xx <- na.ends(x) # Remove mean meanx <- mean(xx) xx <- xx - meanx # Choose differencing parameter with AR(2) proxy to handle correlations suppressWarnings(fit <- fracdiff::fracdiff(xx, nar = 2, drange = drange)) # Choose p and q d <- fit$d y <- fracdiff::diffseries(xx, d = d) fit <- auto.arima(y, max.P = 0, max.Q = 0, stationary = TRUE, ...) # Refit model using fracdiff suppressWarnings(fit <- fracdiff::fracdiff(xx, nar = fit$arma[1], nma = fit$arma[2], drange = drange)) # Refine parameters with MLE if (estim == "mle") { y <- fracdiff::diffseries(xx, d = fit$d) p <- length(fit$ar) q <- length(fit$ma) fit2 <- try(Arima(y, order = c(p, 0, q), include.mean = FALSE)) if (is.element("try-error", class(fit2))) { fit2 <- try(Arima(y, order = c(p, 0, q), include.mean = FALSE, method = "ML")) } if (!is.element("try-error", class(fit2))) { if (p > 0) { fit$ar <- fit2$coef[1:p] } if (q > 0) { fit$ma <- -fit2$coef[p + (1:q)] } fit$residuals <- fit2$residuals } else { warning("MLE estimation failed. Returning LS estimates") } } } # Add things to model that will be needed by forecast.fracdiff fit$x <- orig.x fit$residuals <- undo.na.ends(x, residuals(fit)) fit$fitted <- x - fit$residuals if (!is.null(lambda)) { fit$fitted <- InvBoxCox(fit$fitted, lambda, biasadj, var(fit$residuals)) attr(lambda, "biasadj") <- biasadj } fit$lambda <- lambda fit$call <- match.call() fit$series <- seriesname fit <- structure(fit, class = c("ARFIMA","fracdiff")) # fit$call$data <- data.frame(x=x) #Consider replacing fit$call with match.call for consistency and tidyness return(fit) } # Forecast the output of fracdiff() or arfima() #' @rdname forecast.Arima #' @export forecast.fracdiff <- function(object, h=10, level=c(80, 95), fan=FALSE, lambda=object$lambda, biasadj=NULL, ...) { # Extract data x <- object$x <- getResponse(object) if (is.null(x)) { stop("Unable to find original time series") } if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } xx <- na.ends(x) n <- length(xx) meanx <- mean(xx) xx <- xx - meanx # Construct ARMA part of model and forecast with it y <- fracdiff::diffseries(xx, d = object$d) fit <- Arima(y, order = c(length(object$ar), 0, length(object$ma)), include.mean = FALSE, fixed = c(object$ar, -object$ma)) fcast.y <- forecast(fit, h = h, level = level) # Undifference fcast.x <- unfracdiff(xx, fcast.y$mean, n, h, object$d) # Binomial coefficient for expansion of d bin.c <- (-1) ^ (0:(n + h)) * choose(object$d, (0:(n + h))) # Cumulative forecasts of y and forecast of y # b <- numeric(n) # fcast.x <- LHS <- numeric(h) # RHS <- cumsum(fcast.y$mean) # bs <- cumsum(bin.c[1:h]) # b <- bin.c[(1:n)+1] # fcast.x[1] <- RHS[1] <- fcast.y$mean[1] - sum(b*rev(xx)) # if(h>1) # { # for (k in 2:h) # { # b <- b + bin.c[(1:n)+k] # RHS[k] <- RHS[k] - sum(b*rev(xx)) # LHS[k] <- sum(rev(fcast.x[1:(k-1)]) * bs[2:k]) # fcast.x[k] <- RHS[k] - LHS[k] # } # } # Extract stuff from ARMA model p <- fit$arma[1] q <- fit$arma[2] phi <- theta <- numeric(h) if (p > 0) { phi[1:p] <- fit$coef[1:p] } if (q > 0) { theta[1:q] <- fit$coef[p + (1:q)] } # Calculate psi weights new.phi <- psi <- numeric(h) psi[1] <- new.phi[1] <- 1 if (h > 1) { new.phi[2:h] <- -bin.c[2:h] for (i in 2:h) { if (p > 0) { new.phi[i] <- sum(phi[1:(i - 1)] * bin.c[(i - 1):1]) - bin.c[i] } psi[i] <- sum(new.phi[2:i] * rev(psi[1:(i - 1)])) + theta[i - 1] } } # Compute forecast variances fse <- sqrt(cumsum(psi ^ 2) * fit$sigma2) # Compute prediction intervals if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nint <- length(level) upper <- lower <- matrix(NA, ncol = nint, nrow = h) for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- fcast.x - qq * fse upper[, i] <- fcast.x + qq * fse } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") res <- undo.na.ends(x, residuals(fit)) fits <- x - res data.tsp <- tsp(x) if (is.null(data.tsp)) { data.tsp <- c(1, length(x), 1) } mean.fcast <- ts(fcast.x + meanx, frequency = data.tsp[3], start = data.tsp[2] + 1 / data.tsp[3]) lower <- ts(lower + meanx, frequency = data.tsp[3], start = data.tsp[2] + 1 / data.tsp[3]) upper <- ts(upper + meanx, frequency = data.tsp[3], start = data.tsp[2] + 1 / data.tsp[3]) method <- paste("ARFIMA(", p, ",", round(object$d, 2), ",", q, ")", sep = "") if (!is.null(lambda)) { x <- InvBoxCox(x, lambda) fits <- InvBoxCox(fits, lambda) mean.fcast <- InvBoxCox(mean.fcast, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } seriesname <- if (!is.null(object$series)) { object$series } else { deparse(object$call$x) } return(structure(list( x = x, mean = mean.fcast, upper = upper, lower = lower, level = level, method = method, model = object, series = seriesname, residuals = res, fitted = fits ), class = "forecast")) } # Fitted values from arfima() #' @rdname fitted.Arima #' @export fitted.ARFIMA <- function(object, h = 1, ...) { if (!is.null(object$fitted)) { # Object produced by arfima() if (h == 1) { return(object$fitted) } else { return(hfitted(object = object, h = h, FUN = "arfima", ...)) } } else { if (h != 1) { warning("h-step fits are not supported for models produced by fracdiff(), returning one-step fits (h=1)") } x <- getResponse(object) return(x - residuals(object)) } } forecast/R/tbats.R0000644000176200001440000006305614003673410013531 0ustar liggesusers# Author: srazbash ############################################################################### #' TBATS model (Exponential smoothing state space model with Box-Cox #' transformation, ARMA errors, Trend and Seasonal components) #' #' Fits a TBATS model applied to \code{y}, as described in De Livera, Hyndman & #' Snyder (2011). Parallel processing is used by default to speed up the #' computations. #' #' @aliases as.character.tbats print.tbats #' #' @param y The time series to be forecast. Can be \code{numeric}, \code{msts} #' or \code{ts}. Only univariate time series are supported. #' @param use.box.cox \code{TRUE/FALSE} indicates whether to use the Box-Cox #' transformation or not. If \code{NULL} then both are tried and the best fit #' is selected by AIC. #' @param use.trend \code{TRUE/FALSE} indicates whether to include a trend or #' not. If \code{NULL} then both are tried and the best fit is selected by AIC. #' @param use.damped.trend \code{TRUE/FALSE} indicates whether to include a #' damping parameter in the trend or not. If \code{NULL} then both are tried #' and the best fit is selected by AIC. #' @param seasonal.periods If \code{y} is \code{numeric} then seasonal periods #' can be specified with this parameter. #' @param use.arma.errors \code{TRUE/FALSE} indicates whether to include ARMA #' errors or not. If \code{TRUE} the best fit is selected by AIC. If #' \code{FALSE} then the selection algorithm does not consider ARMA errors. #' @param use.parallel \code{TRUE/FALSE} indicates whether or not to use #' parallel processing. #' @param num.cores The number of parallel processes to be used if using #' parallel processing. If \code{NULL} then the number of logical cores is #' detected and all available cores are used. #' @param bc.lower The lower limit (inclusive) for the Box-Cox transformation. #' @param bc.upper The upper limit (inclusive) for the Box-Cox transformation. #' @param biasadj Use adjusted back-transformed mean for Box-Cox #' transformations. If TRUE, point forecasts and fitted values are mean #' forecast. Otherwise, these points can be considered the median of the #' forecast densities. #' @param model Output from a previous call to \code{tbats}. If model is #' passed, this same model is fitted to \code{y} without re-estimating any #' parameters. #' @param ... Additional arguments to be passed to \code{auto.arima} when #' choose an ARMA(p, q) model for the errors. (Note that xreg will be ignored, #' as will any arguments concerning seasonality and differencing, but arguments #' controlling the values of p and q will be used.) #' @return An object with class \code{c("tbats", "bats")}. The generic accessor #' functions \code{fitted.values} and \code{residuals} extract useful features #' of the value returned by \code{bats} and associated functions. The fitted #' model is designated TBATS(omega, p,q, phi, ,...,) where omega #' is the Box-Cox parameter and phi is the damping parameter; the error is #' modelled as an ARMA(p,q) process and m1,...,mJ list the seasonal periods #' used in the model and k1,...,kJ are the corresponding number of Fourier #' terms used for each seasonality. #' @author Slava Razbash and Rob J Hyndman #' @seealso \code{\link{tbats.components}}. #' @references De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), #' Forecasting time series with complex seasonal patterns using exponential #' smoothing, \emph{Journal of the American Statistical Association}, #' \bold{106}(496), 1513-1527. #' @keywords ts #' @examples #' #' \dontrun{ #' fit <- tbats(USAccDeaths) #' plot(forecast(fit)) #' #' taylor.fit <- tbats(taylor) #' plot(forecast(taylor.fit))} #' #' @export tbats <- function(y, use.box.cox=NULL, use.trend=NULL, use.damped.trend=NULL, seasonal.periods=NULL, use.arma.errors=TRUE, use.parallel=length(y) > 1000, num.cores=2, bc.lower=0, bc.upper=1, biasadj=FALSE, model=NULL, ...) { if (!is.numeric(y) || NCOL(y) > 1) { stop("y should be a univariate time series") } seriesname <- deparse(substitute(y)) origy <- y attr_y <- attributes(origy) # Get seasonal periods if (is.null(seasonal.periods)) { if (any(class(y) == "msts")) { seasonal.periods <- sort(attr(y, "msts")) } else if (class(y) == "ts") { seasonal.periods <- frequency(y) } else { y <- as.ts(y) seasonal.periods <- 1 } } else { # Add ts attributes if (!any(class(y) == "ts")) { y <- msts(y, seasonal.periods) } } seasonal.periods <- unique(pmax(seasonal.periods, 1)) if (all(seasonal.periods == 1)) { seasonal.periods <- NULL } ny <- length(y) y <- na.contiguous(y) if (ny != length(y)) { warning("Missing values encountered. Using longest contiguous portion of time series") if (!is.null(attr_y$tsp)) { attr_y$tsp[1:2] <- range(time(y)) } } # Refit model if available if (!is.null(model)) { if (is.element("tbats", class(model))) { refitModel <- try(fitPreviousTBATSModel(y, model = model), silent = TRUE) } else if (is.element("bats", class(model))) { refitModel <- bats(origy, model = model) } return(refitModel) } # Return constant model if required if (is.constant(y)) { fit <- list( y = y, x = matrix(y, nrow = 1, ncol = ny), errors = y * 0, fitted.values = y, seed.states = matrix(y[1]), AIC = -Inf, likelihood = -Inf, variance = 0, alpha = 0.9999, method = "TBATS", call = match.call() ) return(structure(fit, class = "bats")) } # Check for observations are positive if (any((y <= 0))) { use.box.cox <- FALSE } # Fit non-seasonal model as a benchmark non.seasonal.model <- bats( as.numeric(y), use.box.cox = use.box.cox, use.trend = use.trend, use.damped.trend = use.damped.trend, use.arma.errors = use.arma.errors, use.parallel = use.parallel, num.cores = num.cores, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ... ) # If non-seasonal data, return the non-seasonal model if (is.null(seasonal.periods)) { non.seasonal.model$call <- match.call() attributes(non.seasonal.model$fitted.values) <- attributes(non.seasonal.model$errors) <- attributes(origy) non.seasonal.model$y <- origy return(non.seasonal.model) } else { seasonal.mask <- (seasonal.periods == 1) seasonal.periods <- seasonal.periods[!seasonal.mask] } if (is.null(use.box.cox)) { use.box.cox <- c(FALSE, TRUE) } if (any(use.box.cox)) { init.box.cox <- BoxCox.lambda(y, lower = bc.lower, upper = bc.upper) } else { init.box.cox <- NULL } if (is.null(use.trend)) { use.trend <- c(FALSE, TRUE) } else if (use.trend == FALSE) { use.damped.trend <- FALSE } if (is.null(use.damped.trend)) { use.damped.trend <- c(FALSE, TRUE) } # Set a vector of model params for later comparison model.params <- logical(length = 3) model.params[1] <- any(use.box.cox) model.params[2] <- any(use.trend) model.params[3] <- any(use.damped.trend) y <- as.numeric(y) n <- length(y) k.vector <- rep(1, length(seasonal.periods)) if (use.parallel) { if (is.null(num.cores)) { num.cores <- detectCores(all.tests = FALSE, logical = TRUE) } clus <- makeCluster(num.cores) } best.model <- try(fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE) if (is.element("try-error", class(best.model))) { best.model <- list(AIC = Inf) } for (i in 1:length(seasonal.periods)) { if (seasonal.periods[i] == 2) { next } max.k <- floor(((seasonal.periods[i] - 1) / 2)) if (i != 1) { current.k <- 2 while (current.k <= max.k) { if (seasonal.periods[i] %% current.k != 0) { current.k <- current.k + 1 next } latter <- seasonal.periods[i] / current.k if (any(((seasonal.periods[1:(i - 1)] %% latter) == 0))) { max.k <- current.k - 1 break } else { current.k <- current.k + 1 } } } if (max.k == 1) { next } if (max.k <= 6) { k.vector[i] <- max.k best.model$AIC <- Inf repeat { # old.k <- k.vector[i] # k.vector[i] <- k.vector[i]-1 new.model <- try( fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(new.model))) { new.model <- list(AIC = Inf) } if (new.model$AIC > best.model$AIC) { k.vector[i] <- k.vector[i] + 1 break } else { if (k.vector[i] == 1) { break } k.vector[i] <- k.vector[i] - 1 best.model <- new.model } } next } else { # Three different k vectors step.up.k <- k.vector step.down.k <- k.vector step.up.k[i] <- 7 step.down.k[i] <- 5 k.vector[i] <- 6 # Fit three different models ### if(use.parallel) then do parallel if (use.parallel) { k.control.array <- rbind(step.up.k, step.down.k, k.vector) models.list <- clusterApplyLB( clus, c(1:3), parFitSpecificTBATS, y = y, box.cox = model.params[1], trend = model.params[2], damping = model.params[3], seasonal.periods = seasonal.periods, k.control.matrix = k.control.array, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ) up.model <- models.list[[1]] level.model <- models.list[[3]] down.model <- models.list[[2]] } else { up.model <- try( fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = step.up.k, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(up.model))) { up.model <- list(AIC = Inf) } level.model <- try( fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(level.model))) { level.model <- list(AIC = Inf) } down.model <- try( fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = step.down.k, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(down.model))) { down.model <- list(AIC = Inf) } } # Decide the best model of the three and then follow that direction to find the optimal k aic.vector <- c(up.model$AIC, level.model$AIC, down.model$AIC) ## If shifting down if (min(aic.vector) == down.model$AIC) { best.model <- down.model k.vector[i] <- 5 repeat{ k.vector[i] <- k.vector[i] - 1 down.model <- try( fitSpecificTBATS( y = y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(down.model))) { down.model <- list(AIC = Inf) } if (down.model$AIC > best.model$AIC) { k.vector[i] <- k.vector[i] + 1 break } else { best.model <- down.model } if (k.vector[i] == 1) { break } } ## If staying level } else if (min(aic.vector) == level.model$AIC) { best.model <- level.model next ## If shifting up } else { best.model <- up.model k.vector[i] <- 7 repeat { k.vector[i] <- k.vector[i] + 1 up.model <- try( fitSpecificTBATS(y, model.params[1], model.params[2], model.params[3], seasonal.periods, k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) if (is.element("try-error", class(up.model))) { up.model <- list(AIC = Inf) } if (up.model$AIC > best.model$AIC) { k.vector[i] <- k.vector[i] - 1 break } else { best.model <- up.model } if (k.vector[i] == max.k) { break } } } } } aux.model <- best.model if (non.seasonal.model$AIC < best.model$AIC) { best.model <- non.seasonal.model } if ((length(use.box.cox) == 1) && use.trend[1] && (length(use.trend) == 1) && (length(use.damped.trend) == 1) && (use.parallel)) { # In the this case, there is only one alternative. use.parallel <- FALSE stopCluster(clus) } else if ((length(use.box.cox) == 1) && !use.trend[1] && (length(use.trend) == 1) && (use.parallel)) { # As above, in the this case, there is only one alternative. use.parallel <- FALSE stopCluster(clus) } if (use.parallel) { # Set up the control array control.array <- NULL for (box.cox in use.box.cox) { for (trend in use.trend) { for (damping in use.damped.trend) { if (!trend && damping) { next } control.line <- c(box.cox, trend, damping) if (!is.null(control.array)) { control.array <- rbind(control.array, control.line) } else { control.array <- control.line } } } } models.list <- clusterApplyLB(clus, c(1:nrow(control.array)), parFilterTBATSSpecifics, y = y, control.array = control.array, model.params = model.params, seasonal.periods = seasonal.periods, k.vector = k.vector, use.arma.errors = use.arma.errors, aux.model = aux.model, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ...) stopCluster(clus) ## Choose the best model #### Get the AICs aics <- numeric(nrow(control.array)) for (i in 1:nrow(control.array)) { aics[i] <- models.list[[i]]$AIC } best.number <- which.min(aics) best.seasonal.model <- models.list[[best.number]] if (best.seasonal.model$AIC < best.model$AIC) { best.model <- best.seasonal.model } } else { for (box.cox in use.box.cox) { for (trend in use.trend) { for (damping in use.damped.trend) { if (all((model.params == c(box.cox, trend, damping)))) { new.model <- filterTBATSSpecifics(y, box.cox, trend, damping, seasonal.periods, k.vector, use.arma.errors, aux.model = aux.model, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ...) } else if (trend || !damping) { new.model <- filterTBATSSpecifics(y, box.cox, trend, damping, seasonal.periods, k.vector, use.arma.errors, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ...) } if (new.model$AIC < best.model$AIC) { best.model <- new.model } } } } } best.model$call <- match.call() attributes(best.model$fitted.values) <- attributes(best.model$errors) <- attr_y best.model$y <- origy best.model$series <- seriesname best.model$method <- "TBATS" return(best.model) } ###################################################################################################################################### parFilterTBATSSpecifics <- function(control.number, y, control.array, model.params, seasonal.periods, k.vector, use.arma.errors, aux.model=NULL, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE, ...) { box.cox <- control.array[control.number, 1] trend <- control.array[control.number, 2] damping <- control.array[control.number, 3] if (!all((model.params == c(box.cox, trend, damping)))) { first.model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) } else { first.model <- aux.model } if (is.element("try-error", class(first.model))) { first.model <- list(AIC = Inf) } if (use.arma.errors) { suppressWarnings(arma <- try(auto.arima(as.numeric(first.model$errors), d = 0, ...), silent = TRUE)) if (!is.element("try-error", class(arma))) { p <- arma$arma[1] q <- arma$arma[2] if ((p != 0) || (q != 0)) { # Did auto.arima() find any AR() or MA() coefficients? if (p != 0) { ar.coefs <- numeric(p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- numeric(q) } else { ma.coefs <- NULL } starting.params <- first.model$parameters second.model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, ar.coefs = ar.coefs, ma.coefs = ma.coefs, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) if (is.element("try-error", class(second.model))) { second.model <- list(AIC = Inf) } if (second.model$AIC < first.model$AIC) { return(second.model) } else { return(first.model) } } else { # Else auto.arima() did not find any AR() or MA()coefficients return(first.model) } } else { return(first.model) } } else { return(first.model) } } ################################################################################################# parFitSpecificTBATS <- function(control.number, y, box.cox, trend, damping, seasonal.periods, k.control.matrix, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE) { k.vector <- k.control.matrix[control.number, ] model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) if (is.element("try-error", class(model))) { model <- list(AIC = Inf) } return(model) } filterTBATSSpecifics <- function(y, box.cox, trend, damping, seasonal.periods, k.vector, use.arma.errors, aux.model=NULL, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE, ...) { if (is.null(aux.model)) { first.model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) } else { first.model <- aux.model } if (is.element("try-error", class(first.model))) { first.model <- list(AIC = Inf) } if (use.arma.errors) { suppressWarnings(arma <- try(auto.arima(as.numeric(first.model$errors), d = 0, ...), silent = TRUE)) if (!is.element("try-error", class(arma))) { p <- arma$arma[1] q <- arma$arma[2] if ((p != 0) || (q != 0)) { # Did auto.arima() find any AR() or MA() coefficients? if (p != 0) { ar.coefs <- numeric(p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- numeric(q) } else { ma.coefs <- NULL } starting.params <- first.model$parameters second.model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, ar.coefs = ar.coefs, ma.coefs = ma.coefs, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) if (is.element("try-error", class(second.model))) { second.model <- list(AIC = Inf) } if (second.model$AIC < first.model$AIC) { return(second.model) } else { return(first.model) } } else { # Else auto.arima() did not find any AR() or MA()coefficients return(first.model) } } else { return(first.model) } } else { return(first.model) } } makeSingleFourier <- function(j, m, T) { frier <- matrix(0, nrow = T, ncol = 2) for (t in 1:T) { frier[t, 1] <- cos((2 * pi * j) / m) frier[t, 2] <- sin((2 * pi * j) / m) } return(frier) } calcFTest <- function(r.sse, ur.sse, num.restrictions, num.u.params, num.observations) { f.stat <- ((r.sse - ur.sse) / num.restrictions) / (r.sse / (num.observations - num.u.params)) p.value <- pf(f.stat, num.restrictions, (num.observations - num.u.params), lower.tail = FALSE) return(p.value) } #' @rdname fitted.Arima #' @export fitted.tbats <- function(object, h=1, ...) { if (h == 1) { return(object$fitted.values) } else { return(hfitted(object = object, h = h, FUN = "tbats", ...)) } } #' @export print.tbats <- function(x, ...) { cat(as.character(x)) cat("\n") cat("\nCall: ") print(x$call) cat("\nParameters") if (!is.null(x$lambda)) { cat("\n Lambda: ") cat(round(x$lambda, 6)) } cat("\n Alpha: ") cat(x$alpha) if (!is.null(x$beta)) { cat("\n Beta: ") cat(x$beta) cat("\n Damping Parameter: ") cat(round(x$damping.parameter, 6)) } if (!is.null(x$gamma.one.values)) { cat("\n Gamma-1 Values: ") cat(x$gamma.one.values) } if (!is.null(x$gamma.two.values)) { cat("\n Gamma-2 Values: ") cat(x$gamma.two.values) } if (!is.null(x$ar.coefficients)) { cat("\n AR coefficients: ") cat(round(x$ar.coefficients, 6)) } if (!is.null(x$ma.coefficients)) { cat("\n MA coefficients: ") cat(round(x$ma.coefficients, 6)) } cat("\n") cat("\nSeed States:\n") print(x$seed.states) cat("\nSigma: ") cat(sqrt(x$variance)) cat("\nAIC: ") cat(x$AIC) cat("\n") } #' @rdname plot.bats #' #' @examples #' #' \dontrun{ #' fit <- tbats(USAccDeaths) #' plot(fit) #' autoplot(fit, range.bars = TRUE)} #' #' @export plot.tbats <- function(x, main="Decomposition by TBATS model", ...) { out <- tbats.components(x) plot.ts(out, main = main, nc = 1, ...) } #' Extract components of a TBATS model #' #' Extract the level, slope and seasonal components of a TBATS model. The extracted components are Box-Cox transformed using the estimated transformation parameter. #' #' #' @param x A tbats object created by \code{\link{tbats}}. #' @return A multiple time series (\code{mts}) object. The first series is the observed time series. The second series is the trend component of the fitted model. Series three onwards are the seasonal components of the fitted model with one time series for each of the seasonal components. All components are transformed using estimated Box-Cox parameter. #' @author Slava Razbash and Rob J Hyndman #' @seealso \code{\link{tbats}}. #' @references De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), #' Forecasting time series with complex seasonal patterns using exponential #' smoothing, \emph{Journal of the American Statistical Association}, #' \bold{106}(496), 1513-1527. #' @keywords ts #' @examples #' #' \dontrun{ #' fit <- tbats(USAccDeaths, use.parallel=FALSE) #' components <- tbats.components(fit) #' plot(components)} #' #' @export tbats.components <- function(x) { # Get original data, transform if necessary if (!is.null(x$lambda)) { y <- BoxCox(x$y, x$lambda) lambda <- attr(y, "lambda") } else { y <- x$y } # Compute matrices tau <- ifelse(!is.null(x$k.vector), 2 * sum(x$k.vector), 0) w <- .Call( "makeTBATSWMatrix", smallPhi_s = x$damping.parameter, kVector_s = as.integer(x$k.vector), arCoefs_s = x$ar.coefficients, maCoefs_s = x$ma.coefficients, tau_s = as.integer(tau), PACKAGE = "forecast" ) out <- cbind(observed = c(y), level = x$x[1, ]) if (!is.null(x$beta)) { out <- cbind(out, slope = x$x[2, ]) } # Add seasonal components if they exist if (tau > 0) { nonseas <- 2 + !is.null(x$beta) # No. non-seasonal columns in out nseas <- length(x$seasonal.periods) # No. seasonal periods seas.states <- cbind(x$seed.states, x$x)[-(1:(1 + !is.null(x$beta))), ] seas.states <- seas.states[, -ncol(seas.states)] w <- w$w.transpose[, -(1:(1 + !is.null(x$beta))), drop = FALSE] w <- w[, 1:tau, drop = FALSE] j <- cumsum(c(1, 2 * x$k.vector)) for (i in 1:nseas) out <- cbind(out, season = c(w[, j[i]:(j[i + 1] - 1), drop = FALSE] %*% seas.states[j[i]:(j[i + 1] - 1), ])) if (nseas > 1) { colnames(out)[nonseas + 1:nseas] <- paste("season", 1:nseas, sep = "") } } # Add time series characteristics out <- ts(out) tsp(out) <- tsp(y) return(out) } forecast/R/DM2.R0000644000176200001440000001000514003673410012760 0ustar liggesusers# Diebold-Mariano test. Modified from code by Adrian Trapletti. # Then adapted by M. Yousaf Khan for better performance on small samples #' Diebold-Mariano test for predictive accuracy #' #' The Diebold-Mariano test compares the forecast accuracy of two forecast #' methods. #' #' This function implements the modified test proposed by Harvey, Leybourne and #' Newbold (1997). The null hypothesis is that the two methods have the same #' forecast accuracy. For \code{alternative="less"}, the alternative hypothesis #' is that method 2 is less accurate than method 1. For #' \code{alternative="greater"}, the alternative hypothesis is that method 2 is #' more accurate than method 1. For \code{alternative="two.sided"}, the #' alternative hypothesis is that method 1 and method 2 have different levels #' of accuracy. #' #' @param e1 Forecast errors from method 1. #' @param e2 Forecast errors from method 2. #' @param alternative a character string specifying the alternative hypothesis, #' must be one of \code{"two.sided"} (default), \code{"greater"} or #' \code{"less"}. You can specify just the initial letter. #' @param h The forecast horizon used in calculating \code{e1} and \code{e2}. #' @param power The power used in the loss function. Usually 1 or 2. #' @return A list with class \code{"htest"} containing the following #' components: \item{statistic}{the value of the DM-statistic.} #' \item{parameter}{the forecast horizon and loss function power used in the #' test.} \item{alternative}{a character string describing the alternative #' hypothesis.} \item{p.value}{the p-value for the test.} \item{method}{a #' character string with the value "Diebold-Mariano Test".} \item{data.name}{a #' character vector giving the names of the two error series.} #' @author George Athanasopoulos #' @references Diebold, F.X. and Mariano, R.S. (1995) Comparing predictive #' accuracy. \emph{Journal of Business and Economic Statistics}, \bold{13}, #' 253-263. #' #' Harvey, D., Leybourne, S., & Newbold, P. (1997). Testing the equality of #' prediction mean squared errors. \emph{International Journal of forecasting}, #' \bold{13}(2), 281-291. #' @keywords htest ts #' @examples #' #' # Test on in-sample one-step forecasts #' f1 <- ets(WWWusage) #' f2 <- auto.arima(WWWusage) #' accuracy(f1) #' accuracy(f2) #' dm.test(residuals(f1),residuals(f2),h=1) #' #' # Test on out-of-sample one-step forecasts #' f1 <- ets(WWWusage[1:80]) #' f2 <- auto.arima(WWWusage[1:80]) #' f1.out <- ets(WWWusage[81:100],model=f1) #' f2.out <- Arima(WWWusage[81:100],model=f2) #' accuracy(f1.out) #' accuracy(f2.out) #' dm.test(residuals(f1.out),residuals(f2.out),h=1) #' #' @export dm.test <- function(e1, e2, alternative = c("two.sided", "less", "greater"), h = 1, power = 2) { alternative <- match.arg(alternative) d <- c(abs(e1)) ^ power - c(abs(e2)) ^ power d.cov <- acf(d, na.action = na.omit, lag.max = h - 1, type = "covariance", plot = FALSE)$acf[, , 1] d.var <- sum(c(d.cov[1], 2 * d.cov[-1])) / length(d) dv <- d.var # max(1e-8,d.var) if (dv > 0) { STATISTIC <- mean(d, na.rm = TRUE) / sqrt(dv) } else if (h == 1) { stop("Variance of DM statistic is zero") } else { warning("Variance is negative, using horizon h=1") return(dm.test(e1, e2, alternative, h = 1, power)) } n <- length(d) k <- ((n + 1 - 2 * h + (h / n) * (h - 1)) / n) ^ (1 / 2) STATISTIC <- STATISTIC * k names(STATISTIC) <- "DM" if (alternative == "two.sided") { PVAL <- 2 * pt(-abs(STATISTIC), df = n - 1) } else if (alternative == "less") { PVAL <- pt(STATISTIC, df = n - 1) } else if (alternative == "greater") { PVAL <- pt(STATISTIC, df = n - 1, lower.tail = FALSE) } PARAMETER <- c(h, power) names(PARAMETER) <- c("Forecast horizon", "Loss function power") structure( list( statistic = STATISTIC, parameter = PARAMETER, alternative = alternative, p.value = PVAL, method = "Diebold-Mariano Test", data.name = c(deparse(substitute(e1)), deparse(substitute(e2))) ), class = "htest" ) } is.htest <- function(x) { inherits(x, "htest") } forecast/R/errors.R0000644000176200001440000002543014003673410013722 0ustar liggesusers## Measures of forecast accuracy ## Forecasts in f. This may be a numerical vector or the output from arima or ets or derivatives. ## Actual values in x # dx = response variable in historical data ## test enables a subset of x and f to be tested. # MASE: d is the # of differencing # MASE: D is the # of seasonal differencing testaccuracy <- function(f, x, test, d, D) { dx <- getResponse(f) if (is.data.frame(x)) { responsevar <- as.character(formula(f$model))[2] if (is.element(responsevar, colnames(x))) { x <- x[, responsevar] } else { stop("I can't figure out what data to use.") } } if (is.list(f)) { if (is.element("mean", names(f))) { f <- f$mean } else { stop("Unknown list structure") } } if (is.ts(x) && is.ts(f)) { tspf <- tsp(f) tspx <- tsp(x) start <- max(tspf[1], tspx[1]) end <- min(tspf[2], tspx[2]) # Adjustment to allow for floating point issues start <- min(start, end) end <- max(start, end) f <- window(f, start = start, end = end) x <- window(x, start = start, end = end) } n <- length(x) if (is.null(test)) { test <- 1:n } else if (min(test) < 1 || max(test) > n) { warning("test elements must be within sample") test <- test[test >= 1 & test <= n] } ff <- f xx <- x # Check length of f if (length(f) < n) { stop("Not enough forecasts. Check that forecasts and test data match.") } error <- (xx - ff[1:n])[test] pe <- error / xx[test] * 100 me <- mean(error, na.rm = TRUE) mse <- mean(error^2, na.rm = TRUE) mae <- mean(abs(error), na.rm = TRUE) mape <- mean(abs(pe), na.rm = TRUE) mpe <- mean(pe, na.rm = TRUE) out <- c(me, sqrt(mse), mae, mpe, mape) names(out) <- c("ME", "RMSE", "MAE", "MPE", "MAPE") # Compute MASE if historical data available if (!is.null(dx)) { tspdx <- tsp(dx) if (!is.null(tspdx)) { if (D > 0) { # seasonal differencing nsd <- diff(dx, lag = round(tspdx[3L]), differences = D) } else { # non seasonal differencing nsd <- dx } if (d > 0) { nd <- diff(nsd, differences = d) } else { nd <- nsd } scale <- mean(abs(nd), na.rm = TRUE) } else { # not time series scale <- mean(abs(dx - mean(dx, na.rm = TRUE)), na.rm = TRUE) } mase <- mean(abs(error / scale), na.rm = TRUE) out <- c(out, mase) names(out)[length(out)] <- "MASE" } # Additional time series measures if (!is.null(tsp(x)) && n > 1) { fpe <- (c(ff[2:n]) / c(xx[1:(n - 1)]) - 1)[test - 1] ape <- (c(xx[2:n]) / c(xx[1:(n - 1)]) - 1)[test - 1] theil <- sqrt(sum((fpe - ape)^2, na.rm = TRUE) / sum(ape^2, na.rm = TRUE)) if (length(error) > 1) { r1 <- acf(error, plot = FALSE, lag.max = 2, na.action = na.pass)$acf[2, 1, 1] } else { r1 <- NA } nj <- length(out) out <- c(out, r1, theil) names(out)[nj + (1:2)] <- c("ACF1", "Theil's U") } return(out) } trainingaccuracy <- function(f, test, d, D) { # Make sure x is an element of f when f is a fitted model rather than a forecast # if(!is.list(f)) # stop("f must be a forecast object or a time series model object.") dx <- getResponse(f) if (is.element("splineforecast", class(f))) { fits <- f$onestepf } else { fits <- fitted(f) } # Don't use f$resid as this may contain multiplicative errors. res <- dx - fits n <- length(res) if (is.null(test)) { test <- 1:n } if (min(test) < 1 || max(test) > n) { warning("test elements must be within sample") test <- test[test >= 1 & test <= n] } tspdx <- tsp(dx) res <- res[test] dx <- dx[test] pe <- res / dx * 100 # Percentage error me <- mean(res, na.rm = TRUE) mse <- mean(res^2, na.rm = TRUE) mae <- mean(abs(res), na.rm = TRUE) mape <- mean(abs(pe), na.rm = TRUE) mpe <- mean(pe, na.rm = TRUE) out <- c(me, sqrt(mse), mae, mpe, mape) names(out) <- c("ME", "RMSE", "MAE", "MPE", "MAPE") # Compute MASE if historical data available if (!is.null(dx)) { if (!is.null(tspdx)) { if (D > 0) { # seasonal differencing nsd <- diff(dx, lag = round(tspdx[3L]), differences = D) } else { # non seasonal differencing nsd <- dx } if (d > 0) { nd <- diff(nsd, differences = d) } else { nd <- nsd } scale <- mean(abs(nd), na.rm = TRUE) } else { # not time series scale <- mean(abs(dx - mean(dx, na.rm = TRUE)), na.rm = TRUE) } mase <- mean(abs(res / scale), na.rm = TRUE) out <- c(out, mase) names(out)[length(out)] <- "MASE" } # Additional time series measures if (!is.null(tspdx)) { if (length(res) > 1) { r1 <- acf(res, plot = FALSE, lag.max = 2, na.action = na.pass)$acf[2, 1, 1] } else { r1 <- NA } nj <- length(out) out <- c(out, r1) names(out)[nj + 1] <- "ACF1" } return(out) } #' Accuracy measures for a forecast model #' #' Returns range of summary measures of the forecast accuracy. If \code{x} is #' provided, the function measures test set forecast accuracy #' based on \code{x-f}. If \code{x} is not provided, the function only produces #' training set accuracy measures of the forecasts based on #' \code{f["x"]-fitted(f)}. All measures are defined and discussed in Hyndman #' and Koehler (2006). #' #' The measures calculated are: #' \itemize{ #' \item ME: Mean Error #' \item RMSE: Root Mean Squared Error #' \item MAE: Mean Absolute Error #' \item MPE: Mean Percentage Error #' \item MAPE: Mean Absolute Percentage Error #' \item MASE: Mean Absolute Scaled Error #' \item ACF1: Autocorrelation of errors at lag 1. #' } #' By default, the MASE calculation is scaled using MAE of training set naive #' forecasts for non-seasonal time series, training set seasonal naive forecasts #' for seasonal time series and training set mean forecasts for non-time series data. #' If \code{f} is a numerical vector rather than a \code{forecast} object, the MASE #' will not be returned as the training data will not be available. #' #' See Hyndman and Koehler (2006) and Hyndman and Athanasopoulos (2014, Section #' 2.5) for further details. #' #' @param object An object of class \dQuote{\code{forecast}}, or a numerical vector #' containing forecasts. It will also work with \code{Arima}, \code{ets} and #' \code{lm} objects if \code{x} is omitted -- in which case training set accuracy #' measures are returned. #' @param x An optional numerical vector containing actual values of the same #' length as object, or a time series overlapping with the times of \code{f}. #' @param test Indicator of which elements of \code{x} and \code{f} to test. If #' \code{test} is \code{NULL}, all elements are used. Otherwise test is a #' numeric vector containing the indices of the elements to use in the test. #' @param d An integer indicating the number of lag-1 differences to be used #' for the denominator in MASE calculation. Default value is 1 for non-seasonal #' series and 0 for seasonal series. #' @param D An integer indicating the number of seasonal differences to be used #' for the denominator in MASE calculation. Default value is 0 for non-seasonal #' series and 1 for seasonal series. #' @param ... Additional arguments depending on the specific method. #' @param f Deprecated. Please use `object` instead. #' @return Matrix giving forecast accuracy measures. #' @author Rob J Hyndman #' @references Hyndman, R.J. and Koehler, A.B. (2006) "Another look at measures #' of forecast accuracy". \emph{International Journal of Forecasting}, #' \bold{22}(4), 679-688. Hyndman, R.J. and Athanasopoulos, G. (2018) #' "Forecasting: principles and practice", 2nd ed., OTexts, Melbourne, Australia. #' Section 3.4 "Evaluating forecast accuracy". #' \url{https://otexts.com/fpp2/accuracy.html}. #' @keywords ts #' @examples #' #' fit1 <- rwf(EuStockMarkets[1:200, 1], h = 100) #' fit2 <- meanf(EuStockMarkets[1:200, 1], h = 100) #' accuracy(fit1) #' accuracy(fit2) #' accuracy(fit1, EuStockMarkets[201:300, 1]) #' accuracy(fit2, EuStockMarkets[201:300, 1]) #' plot(fit1) #' lines(EuStockMarkets[1:300, 1]) #' @export accuracy <- function(object, ...) { UseMethod("accuracy") } #' @rdname accuracy #' @method accuracy default #' @export accuracy.default <- function(object, x, test = NULL, d = NULL, D = NULL, f = NULL, ...) { if (!is.null(f)) { warning("Using `f` as the argument for `accuracy()` is deprecated. Please use `object` instead.") object <- f } if (!any(is.element(class(object), c( "ARFIMA", "mforecast", "forecast", "ts", "integer", "numeric", "Arima", "ets", "lm", "bats", "tbats", "nnetar", "stlm", "baggedModel" )))) { stop("First argument should be a forecast object or a time series.") } if (is.element("mforecast", class(object))) { return(accuracy.mforecast(object, x, test, d, D)) } trainset <- (is.list(object)) testset <- (!missing(x)) if (testset && !is.null(test)) { trainset <- FALSE } if (!trainset && !testset) { stop("Unable to compute forecast accuracy measures") } # Find d and D if (is.null(D) && is.null(d)) { if (testset) { d <- as.numeric(frequency(x) == 1) D <- as.numeric(frequency(x) > 1) } else if (trainset) { if (!is.null(object$mean)) { d <- as.numeric(frequency(object$mean) == 1) D <- as.numeric(frequency(object$mean) > 1) } else { d <- as.numeric(frequency(object[["x"]]) == 1) D <- as.numeric(frequency(object[["x"]]) > 1) } } else { d <- as.numeric(frequency(object) == 1) D <- as.numeric(frequency(object) > 1) } } if (trainset) { trainout <- trainingaccuracy(object, test, d, D) trainnames <- names(trainout) } else { trainnames <- NULL } if (testset) { testout <- testaccuracy(object, x, test, d, D) testnames <- names(testout) } else { testnames <- NULL } outnames <- unique(c(trainnames, testnames)) out <- matrix(NA, nrow = 2, ncol = length(outnames)) colnames(out) <- outnames rownames(out) <- c("Training set", "Test set") if (trainset) { out[1, names(trainout)] <- trainout } if (testset) { out[2, names(testout)] <- testout } if (!testset) { out <- out[1, , drop = FALSE] } if (!trainset) { out <- out[2, , drop = FALSE] } return(out) } # Compute accuracy for an mforecast object #' @export accuracy.mforecast <- function(object, x, test = NULL, d, D, f = NULL, ...) { if (!is.null(f)) { warning("Using `f` as the argument for `accuracy()` is deprecated. Please use `object` instead.") object <- f } out <- NULL nox <- missing(x) i <- 1 for (fcast in object$forecast) { if (nox) { out1 <- accuracy(fcast, test = test, d = d, D = D) } else { out1 <- accuracy(fcast, x[, i], test, d, D) } rownames(out1) <- paste(fcast$series, rownames(out1)) out <- rbind(out, out1) i <- i + 1 } return(out) } forecast/R/modelAR.R0000644000176200001440000004173314003673410013735 0ustar liggesusers# Defaults: # For non-seasonal data, p chosen using AIC from linear AR(p) model # For seasonal data, p chosen using AIC from linear AR(p) model after # seasonally adjusting with STL decomposition, and P=1 #' Time Series Forecasts with a user-defined model #' #' Experimental function to forecast univariate time series with a #' user-defined model #' #' This is an experimental function and only recommended for advanced users. #' The selected model is fitted with lagged values of \code{y} as #' inputs. The inputs are for #' lags 1 to \code{p}, and lags \code{m} to \code{mP} where #' \code{m=frequency(y)}. If \code{xreg} is provided, its columns are also #' used as inputs. If there are missing values in \code{y} or #' \code{xreg}, the corresponding rows (and any others which depend on them as #' lags) are omitted from the fit. The model is trained for one-step #' forecasting. Multi-step forecasts are computed recursively. #' #' @aliases print.modelAR #' #' @param y A numeric vector or time series of class \code{ts}. #' @param p Embedding dimension for non-seasonal time series. Number of #' non-seasonal lags used as inputs. For non-seasonal time series, the default #' is the optimal number of lags (according to the AIC) for a linear AR(p) #' model. For seasonal time series, the same method is used but applied to #' seasonally adjusted data (from an stl decomposition). #' @param P Number of seasonal lags used as inputs. #' @param FUN Function used for model fitting. Must accept argument \code{x} #' and \code{y} for the predictors and response, respectively (\code{formula} #' object not currently supported). #' @param predict.FUN Prediction function used to apply \code{FUN} to new data. #' Must accept an object of class \code{FUN} as its first argument, and a #' data frame or matrix of new data for its second argument. Additionally, #' it should return fitted values when new data is omitted. #' @param xreg Optionally, a vector or matrix of external regressors, which #' must have the same number of rows as \code{y}. Must be numeric. #' @param model Output from a previous call to \code{nnetar}. If model is #' passed, this same model is fitted to \code{y} without re-estimating any #' parameters. #' @param subset Optional vector specifying a subset of observations to be used #' in the fit. Can be an integer index vector or a logical vector the same #' length as \code{y}. All observations are used by default. #' @param scale.inputs If TRUE, inputs are scaled by subtracting the column #' means and dividing by their respective standard deviations. If \code{lambda} #' is not \code{NULL}, scaling is applied after Box-Cox transformation. #' @param x Deprecated. Included for backwards compatibility. #' @param \dots Other arguments passed to \code{FUN} for #' \code{modelAR}. #' @inheritParams forecast #' #' @return Returns an object of class "\code{modelAR}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{nnetar}. #' #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{x}{The original time series.} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' \item{...}{Other arguments} #' #' @author Rob J Hyndman and Gabriel Caceres #' @keywords ts #' #' @export modelAR <- function(y, p, P=1, FUN, predict.FUN, xreg=NULL, lambda=NULL, model=NULL, subset=NULL, scale.inputs=FALSE, x=y, ...) { useoldmodel <- FALSE yname <- deparse(substitute(y)) if (!is.null(model)) { # Use previously fitted model useoldmodel <- TRUE # Check for conflicts between new and old data: # Check model class if (!is.modelAR(model)) { stop("Model must be a modelAR object") } # Check new data m <- max(round(frequency(model$x)), 1L) minlength <- max(c(model$p, model$P * m)) + 1 if (length(x) < minlength) { stop(paste("Series must be at least of length", minlength, "to use fitted model")) } if (tsp(as.ts(x))[3] != m) { warning(paste("Data frequency doesn't match fitted model, coercing to frequency =", m)) x <- ts(x, frequency = m) } # Check xreg if (!is.null(model$xreg)) { if (is.null(xreg)) { stop("No external regressors provided") } if (NCOL(xreg) != NCOL(model$xreg)) { stop("Number of external regressors does not match fitted model") } } # Update parameters with previous model lambda <- model$lambda p <- model$p P <- model$P FUN <- model$FUN predict.FUN <- model$predict.FUN if (P > 0) { lags <- sort(unique(c(1:p, m * (1:P)))) } else { lags <- 1:p } if (!is.null(model$scalex)) { scale.inputs <- TRUE } } else { # when not using an old model if (length(y) < 3) { stop("Not enough data to fit a model") } # Check for constant data in time series constant_data <- is.constant(na.interp(x)) if (constant_data){ warning("Constant data, setting p=1, P=0, lambda=NULL, scale.inputs=FALSE") scale.inputs <- FALSE lambda <- NULL p <- 1 P <- 0 } ## Check for constant data in xreg if (!is.null(xreg)){ constant_xreg <- any(apply(as.matrix(xreg), 2, function(x) is.constant(na.interp(x)))) if (constant_xreg){ warning("Constant xreg column, setting scale.inputs=FALSE") scale.inputs <- FALSE } } } # Check for NAs in x if (any(is.na(x))) { warning("Missing values in x, omitting rows") } # Transform data if (!is.null(lambda)) { xx <- BoxCox(x, lambda) lambda <- attr(xx, "lambda") } else { xx <- x } ## Check whether to use a subset of the data xsub <- rep(TRUE, length(x)) if (is.numeric(subset)) { xsub[-subset] <- FALSE } if (is.logical(subset)) { xsub <- subset } # Scale series scalex <- NULL if (scale.inputs) { if (useoldmodel) { scalex <- model$scalex } else { tmpx <- scale(xx[xsub], center = TRUE, scale = TRUE) scalex <- list( center = attr(tmpx, "scaled:center"), scale = attr(tmpx, "scaled:scale") ) } xx <- scale(xx, center = scalex$center, scale = scalex$scale) xx <- xx[, 1] } # Check xreg class & dim xxreg <- NULL scalexreg <- NULL if (!is.null(xreg)) { xxreg <- xreg <- as.matrix(xreg) if (length(x) != NROW(xreg)) { stop("Number of rows in xreg does not match series length") } # Check for NAs in xreg if (any(is.na(xreg))) { warning("Missing values in xreg, omitting rows") } # Scale xreg if (scale.inputs) { if (useoldmodel) { scalexreg <- model$scalexreg } else { tmpx <- scale(xxreg[xsub, ], center = TRUE, scale = TRUE) scalexreg <- list( center = attr(tmpx, "scaled:center"), scale = attr(tmpx, "scaled:scale") ) } xxreg <- scale(xxreg, center = scalexreg$center, scale = scalexreg$scale) } } # Set up lagged matrix n <- length(xx) xx <- as.ts(xx) m <- max(round(frequency(xx)), 1L) if (!useoldmodel) { if (m == 1) { if (missing(p)) { p <- max(length(ar(na.interp(xx))$ar), 1) } if (p >= n) { warning("Reducing number of lagged inputs due to short series") p <- n - 1 } lags <- 1:p if (P > 1) { warning("Non-seasonal data, ignoring seasonal lags") } P <- 0 } else { if (missing(p)) { if (n >= 2 * m) { x.sa <- seasadj(mstl(na.interp(xx))) } else { x.sa <- na.interp(xx) } p <- max(length(ar(x.sa)$ar), 1) } if (p >= n) { warning("Reducing number of lagged inputs due to short series") p <- n - 1 } if (P > 0 && n >= m * P + 2) { lags <- sort(unique(c(1:p, m * (1:P)))) } else { lags <- 1:p if (P > 0) { warning("Series too short for seasonal lags") P <- 0 } } } } maxlag <- max(lags) nlag <- length(lags) y <- xx[-(1:maxlag)] lags.X <- matrix(NA_real_, ncol = nlag, nrow = n - maxlag) for (i in 1:nlag){ lags.X[, i] <- xx[(maxlag - lags[i] + 1):(n - lags[i])] } # Add xreg into lagged matrix lags.X <- cbind(lags.X, xxreg[-(1:maxlag), ]) # Remove missing values if present j <- complete.cases(lags.X, y) ## Remove values not in subset j <- j & xsub[-(1:maxlag)] ## Stop if there's no data to fit (e.g. due to NAs or NaNs) if (NROW(lags.X[j,, drop=FALSE]) == 0) { stop("No data to fit (possibly due to NA or NaN)") } ## Fit selected model if (useoldmodel) { fit <- model$model } else { fit <- FUN(x = lags.X[j,, drop=FALSE], y = y[j], ...) } # Return results out <- list() out$x <- as.ts(x) out$m <- m out$p <- p out$P <- P out$FUN <- FUN out$predict.FUN <- predict.FUN out$scalex <- scalex out$scalexreg <- scalexreg out$xreg <- xreg out$lambda <- lambda out$subset <- (1:length(x))[xsub] out$model <- fit out$modelargs <- list(...) if (useoldmodel) { out$modelargs <- model$modelargs fits <- c(rep(NA_real_, maxlag), predict.FUN(fit, lags.X[j,, drop=FALSE])) } else { fits <- c(rep(NA_real_, maxlag), predict.FUN(fit)) } if (scale.inputs) { fits <- fits * scalex$scale + scalex$center } fits <- ts(fits) if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda) } out$fitted <- ts(rep(NA_real_, length(out$x))) out$fitted[c(rep(TRUE, maxlag), j)] <- fits tsp(out$fitted) <- tsp(out$x) out$residuals <- out$x - out$fitted out$lags <- lags out$series <- yname out$method <- deparse(substitute(FUN)) out$method <- paste0(out$method, "-AR(", p) if (P > 0) out$method <- paste(out$method, ",", P, sep = "") out$method <- paste0(out$method, ")") if (P > 0) out$method <- paste(out$method, "[", m, "]", sep = "") out$call <- match.call() return(structure(out, class = c("modelAR"))) } #' Forecasting using user-defined model #' #' Returns forecasts and other information for user-defined #' models. #' #' Prediction intervals are calculated through simulations and can be slow. #' Note that if the model is too complex and overfits the data, the residuals #' can be arbitrarily small; if used for prediction interval calculations, they #' could lead to misleadingly small values. #' #' @param object An object of class "\code{modelAR}" resulting from a call to #' \code{\link{modelAR}}. #' @param h Number of periods for forecasting. If \code{xreg} is used, \code{h} #' is ignored and the number of forecast periods is set to the number of rows #' of \code{xreg}. #' @param PI If TRUE, prediction intervals are produced, otherwise only point #' forecasts are calculated. If \code{PI} is FALSE, then \code{level}, #' \code{fan}, \code{bootstrap} and \code{npaths} are all ignored. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param xreg Future values of external regressor variables. #' @param bootstrap If \code{TRUE}, then prediction intervals computed using #' simulations with resampled residuals rather than normally distributed #' errors. Ignored if \code{innov} is not \code{NULL}. #' @param npaths Number of sample paths used in computing simulated prediction #' intervals. #' @param innov Values to use as innovations for prediction intervals. Must be #' a matrix with \code{h} rows and \code{npaths} columns (vectors are coerced #' into a matrix). If present, \code{bootstrap} is ignored. #' @param ... Additional arguments passed to \code{\link{simulate.nnetar}} #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.nnetar}. #' #' An object of class "\code{forecast}" is a list containing at least the #' following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} #' \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the time series #' used to create the model stored as \code{object}).} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' \item{...}{Other arguments} #' #' @author Rob J Hyndman and Gabriel Caceres #' @seealso \code{\link{nnetar}}. #' @keywords ts #' #' @export forecast.modelAR <- function(object, h=ifelse(object$m > 1, 2 * object$m, 10), PI=FALSE, level=c(80, 95), fan=FALSE, xreg=NULL, lambda=object$lambda, bootstrap=FALSE, npaths=1000, innov=NULL, ...) { out <- object tspx <- tsp(out$x) # if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Check if xreg was used in fitted model if (is.null(object$xreg)) { if (!is.null(xreg)) { warning("External regressors were not used in fitted model, xreg will be ignored") } xreg <- NULL } else { if (is.null(xreg)) { stop("No external regressors provided") } xreg <- as.matrix(xreg) if (NCOL(xreg) != NCOL(object$xreg)) { stop("Number of external regressors does not match fitted model") } h <- NROW(xreg) } fcast <- numeric(h) xx <- object$x xxreg <- xreg if (!is.null(lambda)) { xx <- BoxCox(xx, lambda) lambda <- attr(xx, "lambda") } # Check and apply scaling of fitted model if (!is.null(object$scalex)) { xx <- scale(xx, center = object$scalex$center, scale = object$scalex$scale) if (!is.null(xreg)) { xxreg <- scale(xreg, center = object$scalexreg$center, scale = object$scalexreg$scale) } } # Get lags used in fitted model lags <- object$lags maxlag <- max(lags) flag <- rev(tail(xx, n = maxlag)) # Iterative 1-step forecast for (i in 1:h) { newdata <- c(flag[lags], xxreg[i, ]) if (any(is.na(newdata))) { stop("I can't forecast when there are missing values near the end of the series.") } fcast[i] <- object$predict.FUN(object$model, newdata) flag <- c(fcast[i], flag[-maxlag]) } # Re-scale point forecasts if (!is.null(object$scalex)) { fcast <- fcast * object$scalex$scale + object$scalex$center } # Add ts properties fcast <- ts(fcast, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) # Back-transform point forecasts if (!is.null(lambda)) { fcast <- InvBoxCox(fcast, lambda) } # Compute prediction intervals using simulations if (isTRUE(PI)) { nint <- length(level) sim <- matrix(NA, nrow = npaths, ncol = h) if (!is.null(innov)) { if (length(innov) != h * npaths) { stop("Incorrect number of innovations, need h*npaths values") } innov <- matrix(innov, nrow = h, ncol = npaths) bootstrap <- FALSE } for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = bootstrap, xreg = xreg, lambda = lambda, innov = innov[, i], ...) lower <- apply(sim, 2, quantile, 0.5 - level / 200, type = 8) upper <- apply(sim, 2, quantile, 0.5 + level / 200, type = 8) if (nint > 1L) { lower <- ts(t(lower)) upper <- ts(t(upper)) } else { lower <- ts(matrix(lower, ncol = 1L)) upper <- ts(matrix(upper, ncol = 1L)) } tsp(lower) <- tsp(upper) <- tsp(fcast) } else { level <- NULL lower <- NULL upper <- NULL } out$mean <- fcast out$level <- level out$lower <- lower out$upper <- upper return(structure(out, class = "forecast")) } #' @rdname fitted.Arima #' @export fitted.modelAR <- function(object, h=1, ...) { if (h == 1) { return(object$fitted) } else { return(hfitted(object = object, h = h, FUN = "modelAR", ...)) } } #' @export print.modelAR <- function(x, digits = max(3, getOption("digits") - 3), ...) { cat("Series:", x$series, "\n") cat("Model: ", x$method, "\n") cat("Call: ") print(x$call) print(x$model) cat( "\nsigma^2 estimated as ", format(mean(residuals(x) ^ 2, na.rm = TRUE), digits = digits), "\n", sep = "" ) invisible(x) } #' @rdname is.ets #' @export is.modelAR <- function(x) { inherits(x, "modelAR") } forecast/R/ets.R0000644000176200001440000011735314121761047013214 0ustar liggesusers#' Exponential smoothing state space model #' #' Returns ets model applied to \code{y}. #' #' Based on the classification of methods as described in Hyndman et al (2008). #' #' The methodology is fully automatic. The only required argument for ets is #' the time series. The model is chosen automatically if not specified. This #' methodology performed extremely well on the M3-competition data. (See #' Hyndman, et al, 2002, below.) #' #' @aliases print.ets summary.ets as.character.ets coef.ets tsdiag.ets #' #' @param y a numeric vector or time series of class \code{ts} #' @param model Usually a three-character string identifying method using the #' framework terminology of Hyndman et al. (2002) and Hyndman et al. (2008). #' The first letter denotes the error type ("A", "M" or "Z"); the second letter #' denotes the trend type ("N","A","M" or "Z"); and the third letter denotes #' the season type ("N","A","M" or "Z"). In all cases, "N"=none, "A"=additive, #' "M"=multiplicative and "Z"=automatically selected. So, for example, "ANN" is #' simple exponential smoothing with additive errors, "MAM" is multiplicative #' Holt-Winters' method with multiplicative errors, and so on. #' #' It is also possible for the model to be of class \code{"ets"}, and equal to #' the output from a previous call to \code{ets}. In this case, the same model #' is fitted to \code{y} without re-estimating any smoothing parameters. See #' also the \code{use.initial.values} argument. #' @param damped If TRUE, use a damped trend (either additive or #' multiplicative). If NULL, both damped and non-damped trends will be tried #' and the best model (according to the information criterion \code{ic}) #' returned. #' @param alpha Value of alpha. If NULL, it is estimated. #' @param beta Value of beta. If NULL, it is estimated. #' @param gamma Value of gamma. If NULL, it is estimated. #' @param phi Value of phi. If NULL, it is estimated. #' @param additive.only If TRUE, will only consider additive models. Default is #' FALSE. #' @param lambda Box-Cox transformation parameter. If \code{lambda="auto"}, #' then a transformation is automatically selected using \code{BoxCox.lambda}. #' The transformation is ignored if NULL. Otherwise, #' data transformed before model is estimated. When \code{lambda} is specified, #' \code{additive.only} is set to \code{TRUE}. #' @param lower Lower bounds for the parameters (alpha, beta, gamma, phi) #' @param upper Upper bounds for the parameters (alpha, beta, gamma, phi) #' @param opt.crit Optimization criterion. One of "mse" (Mean Square Error), #' "amse" (Average MSE over first \code{nmse} forecast horizons), "sigma" #' (Standard deviation of residuals), "mae" (Mean of absolute residuals), or #' "lik" (Log-likelihood, the default). #' @param nmse Number of steps for average multistep MSE (1<=\code{nmse}<=30). #' @param bounds Type of parameter space to impose: \code{"usual" } indicates #' all parameters must lie between specified lower and upper bounds; #' \code{"admissible"} indicates parameters must lie in the admissible space; #' \code{"both"} (default) takes the intersection of these regions. #' @param ic Information criterion to be used in model selection. #' @param restrict If \code{TRUE} (default), the models with infinite variance #' will not be allowed. #' @param allow.multiplicative.trend If \code{TRUE}, models with multiplicative #' trend are allowed when searching for a model. Otherwise, the model space #' excludes them. This argument is ignored if a multiplicative trend model is #' explicitly requested (e.g., using \code{model="MMN"}). #' @param use.initial.values If \code{TRUE} and \code{model} is of class #' \code{"ets"}, then the initial values in the model are also not #' re-estimated. #' @param na.action A function which indicates what should happen when the data #' contains NA values. By default, the largest contiguous portion of the #' time-series will be used. #' @param ... Other undocumented arguments. #' @inheritParams forecast #' #' @return An object of class "\code{ets}". #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{ets} and associated #' functions. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{HoltWinters}}, \code{\link{rwf}}, #' \code{\link{Arima}}. #' @references Hyndman, R.J., Koehler, A.B., Snyder, R.D., and Grose, S. (2002) #' "A state space framework for automatic forecasting using exponential #' smoothing methods", \emph{International J. Forecasting}, \bold{18}(3), #' 439--454. #' #' Hyndman, R.J., Akram, Md., and Archibald, B. (2008) "The admissible #' parameter space for exponential smoothing models". \emph{Annals of #' Statistical Mathematics}, \bold{60}(2), 407--426. #' #' Hyndman, R.J., Koehler, A.B., Ord, J.K., and Snyder, R.D. (2008) #' \emph{Forecasting with exponential smoothing: the state space approach}, #' Springer-Verlag. \url{http://www.exponentialsmoothing.net}. #' @keywords ts #' @examples #' fit <- ets(USAccDeaths) #' plot(forecast(fit)) #' #' @export ets <- function(y, model="ZZZ", damped=NULL, alpha=NULL, beta=NULL, gamma=NULL, phi=NULL, additive.only=FALSE, lambda=NULL, biasadj=FALSE, lower=c(rep(0.0001, 3), 0.8), upper=c(rep(0.9999, 3), 0.98), opt.crit=c("lik", "amse", "mse", "sigma", "mae"), nmse=3, bounds=c("both", "usual", "admissible"), ic=c("aicc", "aic", "bic"), restrict=TRUE, allow.multiplicative.trend=FALSE, use.initial.values=FALSE, na.action = c("na.contiguous", "na.interp", "na.fail"), ...) { # dataname <- substitute(y) opt.crit <- match.arg(opt.crit) bounds <- match.arg(bounds) ic <- match.arg(ic) if(!is.function(na.action)){ na.fn_name <- match.arg(na.action) na.action <- get(na.fn_name) } seriesname <- deparse(substitute(y)) if (any(class(y) %in% c("data.frame", "list", "matrix", "mts"))) { stop("y should be a univariate time series") } y <- as.ts(y) # Check if data is constant if (missing(model) && is.constant(y)) { return(ses(y, alpha = 0.99999, initial = "simple")$model) } # Remove missing values near ends ny <- length(y) y <- na.action(y) if (ny != length(y) && na.fn_name == "na.contiguous") { warning("Missing values encountered. Using longest contiguous portion of time series") ny <- length(y) } orig.y <- y if (identical(class(model), "ets") && is.null(lambda)) { lambda <- model$lambda } if (!is.null(lambda)) { y <- BoxCox(y, lambda) lambda <- attr(y, "lambda") additive.only <- TRUE } if (nmse < 1 || nmse > 30) { stop("nmse out of range") } m <- frequency(y) if (any(upper < lower)) { stop("Lower limits must be less than upper limits") } # If model is an ets object, re-fit model to new data if (class(model) == "ets") { # Prevent alpha being zero (to avoid divide by zero in the C code) alpha <- max(model$par["alpha"], 1e-10) beta <- model$par["beta"] if (is.na(beta)) { beta <- NULL } gamma <- model$par["gamma"] if (is.na(gamma)) { gamma <- NULL } phi <- model$par["phi"] if (is.na(phi)) { phi <- NULL } modelcomponents <- paste(model$components[1], model$components[2], model$components[3], sep = "") damped <- (model$components[4] == "TRUE") if (use.initial.values) { errortype <- substr(modelcomponents, 1, 1) trendtype <- substr(modelcomponents, 2, 2) seasontype <- substr(modelcomponents, 3, 3) # Recompute errors from pegelsresid.C e <- pegelsresid.C(y, m, model$initstate, errortype, trendtype, seasontype, damped, alpha, beta, gamma, phi, nmse) # Compute error measures np <- length(model$par) + 1 model$loglik <- -0.5 * e$lik model$aic <- e$lik + 2 * np model$bic <- e$lik + log(ny) * np model$aicc <- model$aic + 2 * np * (np + 1) / (ny - np - 1) model$mse <- e$amse[1] model$amse <- mean(e$amse) # Compute states, fitted values and residuals tsp.y <- tsp(y) model$states <- ts(e$states, frequency = tsp.y[3], start = tsp.y[1] - 1 / tsp.y[3]) colnames(model$states)[1] <- "l" if (trendtype != "N") { colnames(model$states)[2] <- "b" } if (seasontype != "N") { colnames(model$states)[(2 + (trendtype != "N")):ncol(model$states)] <- paste("s", 1:m, sep = "") } if (errortype == "A") { model$fitted <- ts(y - e$e, frequency = tsp.y[3], start = tsp.y[1]) } else { model$fitted <- ts(y / (1 + e$e), frequency = tsp.y[3], start = tsp.y[1]) } model$residuals <- ts(e$e, frequency = tsp.y[3], start = tsp.y[1]) model$sigma2 <- sum(model$residuals ^ 2, na.rm = TRUE) / (ny - np) model$x <- orig.y model$series <- seriesname if (!is.null(lambda)) { model$fitted <- InvBoxCox(model$fitted, lambda, biasadj, var(model$residuals)) attr(lambda, "biasadj") <- biasadj } model$lambda <- lambda # Return model object return(model) } else { model <- modelcomponents if (missing(use.initial.values)) { message("Model is being refit with current smoothing parameters but initial states are being re-estimated.\nSet 'use.initial.values=TRUE' if you want to re-use existing initial values.") } } } errortype <- substr(model, 1, 1) trendtype <- substr(model, 2, 2) seasontype <- substr(model, 3, 3) if (!is.element(errortype, c("M", "A", "Z"))) { stop("Invalid error type") } if (!is.element(trendtype, c("N", "A", "M", "Z"))) { stop("Invalid trend type") } if (!is.element(seasontype, c("N", "A", "M", "Z"))) { stop("Invalid season type") } if (m < 1 || length(y) <= m) { # warning("I can't handle data with frequency less than 1. Seasonality will be ignored.") seasontype <- "N" } if (m == 1) { if (seasontype == "A" || seasontype == "M") { stop("Nonseasonal data") } else { substr(model, 3, 3) <- seasontype <- "N" } } if (m > 24) { if (is.element(seasontype, c("A", "M"))) { stop("Frequency too high") } else if (seasontype == "Z") { warning("I can't handle data with frequency greater than 24. Seasonality will be ignored. Try stlf() if you need seasonal forecasts.") substr(model, 3, 3) <- seasontype <- "N" # m <- 1 } } # Check inputs if (restrict) { if ((errortype == "A" && (trendtype == "M" || seasontype == "M")) | (errortype == "M" && trendtype == "M" && seasontype == "A") || (additive.only && (errortype == "M" || trendtype == "M" || seasontype == "M"))) { stop("Forbidden model combination") } } data.positive <- (min(y) > 0) if (!data.positive && errortype == "M") { stop("Inappropriate model for data with negative or zero values") } if (!is.null(damped)) { if (damped && trendtype == "N") { stop("Forbidden model combination") } } n <- length(y) # Check we have enough data to fit a model npars <- 2L # alpha + l0 if (trendtype == "A" || trendtype == "M") { npars <- npars + 2L } # beta + b0 if (seasontype == "A" || seasontype == "M") { npars <- npars + m } # gamma + s if (!is.null(damped)) { npars <- npars + as.numeric(damped) } # Produce something non-optimized for tiny data sets if (n <= npars + 4L) { if (!is.null(damped)) { if (damped) { warning("Not enough data to use damping") } } if (seasontype == "A" || seasontype == "M") { fit <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = beta, gamma = gamma, phi = phi, exponential = (trendtype == "M"), seasonal = ifelse(seasontype != "A", "multiplicative", "additive"), lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) if (!("try-error" %in% class(fit))) { fit$call <- match.call() fit$method <- as.character(fit) fit$series <- deparse(substitute(y)) return(fit) } else { warning("Seasonal component could not be estimated") } } if (trendtype == "A" || trendtype == "M") { fit <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = beta, gamma = FALSE, phi = phi, exponential = (trendtype == "M"), lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) if (!("try-error" %in% class(fit))) { fit$call <- match.call() fit$method <- as.character(fit) fit$series <- deparse(substitute(y)) return(fit) } else { warning("Trend component could not be estimated") } } if (trendtype == "N" && seasontype == "N") { fit <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = FALSE, gamma = FALSE, lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) if (!("try-error" %in% class(fit))) { fit$call <- match.call() fit$method <- as.character(fit) fit$series <- deparse(substitute(y)) return(fit) } } # Try holt and ses and return best fit1 <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = beta, gamma = FALSE, phi = phi, exponential = (trendtype == "M"), lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) fit2 <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = FALSE, gamma = FALSE, phi = phi, exponential = (trendtype == "M"), lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) if ("try-error" %in% class(fit1)) { fit <- fit2 } else if (fit1$sigma2 < fit2$sigma2) { fit <- fit1 } else { fit <- fit2 } if("try-error" %in% class(fit)) stop("Unable to estimate a model.") fit$call <- match.call() fit$method <- as.character(fit) fit$series <- deparse(substitute(y)) return(fit) } # Fit model (assuming only one nonseasonal model) if (errortype == "Z") { errortype <- c("A", "M") } if (trendtype == "Z") { if (allow.multiplicative.trend) { trendtype <- c("N", "A", "M") } else { trendtype <- c("N", "A") } } if (seasontype == "Z") { seasontype <- c("N", "A", "M") } if (is.null(damped)) { damped <- c(TRUE, FALSE) } best.ic <- Inf for (i in 1:length(errortype)) { for (j in 1:length(trendtype)) { for (k in 1:length(seasontype)) { for (l in 1:length(damped)) { if (trendtype[j] == "N" && damped[l]) { next } if (restrict) { if (errortype[i] == "A" && (trendtype[j] == "M" || seasontype[k] == "M")) { next } if (errortype[i] == "M" && trendtype[j] == "M" && seasontype[k] == "A") { next } if (additive.only && (errortype[i] == "M" || trendtype[j] == "M" || seasontype[k] == "M")) { next } } if (!data.positive && errortype[i] == "M") { next } fit <- try(etsmodel( y, errortype[i], trendtype[j], seasontype[k], damped[l], alpha, beta, gamma, phi, lower = lower, upper = upper, opt.crit = opt.crit, nmse = nmse, bounds = bounds, ... ), silent=TRUE) if(is.element("try-error", class(fit))) fit.ic <- Inf else fit.ic <- switch(ic, aic = fit$aic, bic = fit$bic, aicc = fit$aicc) if (!is.na(fit.ic)) { if (fit.ic < best.ic) { model <- fit best.ic <- fit.ic best.e <- errortype[i] best.t <- trendtype[j] best.s <- seasontype[k] best.d <- damped[l] } } } } } } if (best.ic == Inf) { stop("No model able to be fitted") } model$m <- m model$method <- paste("ETS(", best.e, ",", best.t, ifelse(best.d, "d", ""), ",", best.s, ")", sep = "") model$series <- seriesname model$components <- c(best.e, best.t, best.s, best.d) model$call <- match.call() model$initstate <- model$states[1, ] np <- length(model$par) model$sigma2 <- sum(model$residuals^2, na.rm = TRUE) / (ny - np) model$x <- orig.y if (!is.null(lambda)) { model$fitted <- InvBoxCox(model$fitted, lambda, biasadj, model$sigma2) attr(lambda, "biasadj") <- biasadj } model$lambda <- lambda # model$call$data <- dataname return(structure(model, class = "ets")) } #' @export as.character.ets <- function(x, ...) { paste( "ETS(", x$components[1], ",", x$components[2], ifelse(x$components[4], "d", ""), ",", x$components[3], ")", sep = "" ) } # myRequire <- function(libName) { # req.suc <- require(libName, quietly=TRUE, character.only=TRUE) # if(!req.suc) stop("The ",libName," package is not available.") # req.suc # } # getNewBounds <- function(par, lower, upper, nstate) { # myLower <- NULL # myUpper <- NULL # if("alpha" %in% names(par)) { # myLower <- c(myLower, lower[1]) # myUpper <- c(myUpper, upper[1]) # } # if("beta" %in% names(par)) { # myLower <- c(myLower, lower[2]) # myUpper <- c(myUpper, upper[2]) # } # if("gamma" %in% names(par)) { # myLower <- c(myLower, lower[3]) # myUpper <- c(myUpper, upper[3]) # } # if("phi" %in% names(par)) { # myLower <- c(myLower, lower[4]) # myUpper <- c(myUpper, upper[4]) # } # myLower <- c(myLower,rep(-1e8,nstate)) # myUpper <- c(myUpper,rep(1e8,nstate)) # list(lower=myLower, upper=myUpper) # } etsmodel <- function(y, errortype, trendtype, seasontype, damped, alpha=NULL, beta=NULL, gamma=NULL, phi=NULL, lower, upper, opt.crit, nmse, bounds, maxit=2000, control=NULL, seed=NULL, trace=FALSE) { tsp.y <- tsp(y) if (is.null(tsp.y)) { tsp.y <- c(1, length(y), 1) } if (seasontype != "N") { m <- tsp.y[3] } else { m <- 1 } # Modify limits if alpha, beta or gamma have been specified. if (!is.null(alpha)) { upper[2] <- min(alpha, upper[2]) upper[3] <- min(1 - alpha, upper[3]) } if (!is.null(beta)) { lower[1] <- max(beta, lower[1]) } if (!is.null(gamma)) { upper[1] <- min(1 - gamma, upper[1]) } # Initialize smoothing parameters par <- initparam(alpha, beta, gamma, phi, trendtype, seasontype, damped, lower, upper, m) names(alpha) <- names(beta) <- names(gamma) <- names(phi) <- NULL par.noopt <- c(alpha = alpha, beta = beta, gamma = gamma, phi = phi) if (!is.null(par.noopt)) { par.noopt <- c(na.omit(par.noopt)) } if (!is.na(par["alpha"])) { alpha <- par["alpha"] } if (!is.na(par["beta"])) { beta <- par["beta"] } if (!is.na(par["gamma"])) { gamma <- par["gamma"] } if (!is.na(par["phi"])) { phi <- par["phi"] } # if(errortype=="M" | trendtype=="M" | seasontype=="M") # bounds="usual" if (!check.param(alpha, beta, gamma, phi, lower, upper, bounds, m)) { print(paste("Model: ETS(", errortype, ",", trendtype, ifelse(damped, "d", ""), ",", seasontype, ")", sep = "")) stop("Parameters out of range") } # Initialize state init.state <- initstate(y, trendtype, seasontype) nstate <- length(init.state) par <- c(par, init.state) lower <- c(lower, rep(-Inf, nstate)) upper <- c(upper, rep(Inf, nstate)) np <- length(par) if (np >= length(y) - 1) { # Not enough data to continue return(list(aic = Inf, bic = Inf, aicc = Inf, mse = Inf, amse = Inf, fit = NULL, par = par, states = init.state)) } # ------------------------------------------------- # if(is.null(seed)) seed <- 1000*runif(1) # if(solver=="malschains" || solver=="malschains_c") { # malschains <- NULL # if(!myRequire("Rmalschains")) # stop("malschains optimizer unavailable") # func <- NULL # #env <- NULL # if(solver=="malschains") { # func <- function(myPar) { # names(myPar) <- names(par) # res <- lik(myPar,y=y,nstate=nstate, errortype=errortype, trendtype=trendtype, # seasontype=seasontype, damped=damped, par.noopt=par.noopt, lowerb=lower, upperb=upper, # opt.crit=opt.crit, nmse=nmse, bounds=bounds, m=m,pnames=names(par),pnames2=names(par.noopt)) # res # } # env <- new.env() # } else { # env <- etsTargetFunctionInit(par=par, y=y, nstate=nstate, errortype=errortype, trendtype=trendtype, # seasontype=seasontype, damped=damped, par.noopt=par.noopt, lowerb=lower, upperb=upper, # opt.crit=opt.crit, nmse=nmse, bounds=bounds, m=m,pnames=names(par),pnames2=names(par.noopt)) # func <- .Call("etsGetTargetFunctionRmalschainsPtr", PACKAGE="forecast") # } # myBounds <- getNewBounds(par, lower, upper, nstate) # if(is.null(control)) { # control <- Rmalschains::malschains.control(ls="simplex", lsOnly=TRUE) # } # control$optimum <- if(opt.crit=="lik") -1e12 else 0 # fredTmp <- Rmalschains::malschains(func, env=env, lower=myBounds$lower, upper=myBounds$upper, # maxEvals=maxit, seed=seed, initialpop=par, control=control) # fred <- NULL # fred$par <- fredTmp$sol # fit.par <- fred$par # names(fit.par) <- names(par) # } else if (solver=="Rdonlp2") { # # donlp2 <- NULL # myRequire("Rdonlp2") # # env <- etsTargetFunctionInit(par=par, y=y, nstate=nstate, errortype=errortype, trendtype=trendtype, # seasontype=seasontype, damped=damped, par.noopt=par.noopt, lowerb=lower, upperb=upper, # opt.crit=opt.crit, nmse=nmse, bounds=bounds, m=m,pnames=names(par),pnames2=names(par.noopt)) # # func <- .Call("etsGetTargetFunctionRdonlp2Ptr", PACKAGE="forecast") # # myBounds <- getNewBounds(par, lower, upper, nstate) # # fred <- donlp2(par, func, env=env, par.lower=myBounds$lower, par.upper=myBounds$upper)#, nlin.lower=c(-1), nlin.upper=c(1)) #nlin.lower=c(0,-Inf, -Inf, -Inf), nlin.upper=c(0,0,0,0)) # # fit.par <- fred$par # # names(fit.par) <- names(par) # } else if(solver=="optim_c"){ env <- etsTargetFunctionInit( par = par, y = y, nstate = nstate, errortype = errortype, trendtype = trendtype, seasontype = seasontype, damped = damped, par.noopt = par.noopt, lowerb = lower, upperb = upper, opt.crit = opt.crit, nmse = as.integer(nmse), bounds = bounds, m = m, pnames = names(par), pnames2 = names(par.noopt) ) fred <- .Call( "etsNelderMead", par, env, -Inf, sqrt(.Machine$double.eps), 1.0, 0.5, 2.0, trace, maxit, PACKAGE = "forecast" ) fit.par <- fred$par names(fit.par) <- names(par) # } else { #if(solver=="optim") # # Optimize parameters and state # if(length(par)==1) # method <- "Brent" # else # method <- "Nelder-Mead" # fred <- optim(par,lik,method=method,y=y,nstate=nstate, errortype=errortype, trendtype=trendtype, # seasontype=seasontype, damped=damped, par.noopt=par.noopt, lowerb=lower, upperb=upper, # opt.crit=opt.crit, nmse=nmse, bounds=bounds, m=m,pnames=names(par),pnames2=names(par.noopt), # control=list(maxit=maxit)) # fit.par <- fred$par # names(fit.par) <- names(par) # } # ------------------------------------------------- init.state <- fit.par[(np - nstate + 1):np] # Add extra state if (seasontype != "N") { init.state <- c(init.state, m * (seasontype == "M") - sum(init.state[(2 + (trendtype != "N")):nstate])) } if (!is.na(fit.par["alpha"])) { alpha <- fit.par["alpha"] } if (!is.na(fit.par["beta"])) { beta <- fit.par["beta"] } if (!is.na(fit.par["gamma"])) { gamma <- fit.par["gamma"] } if (!is.na(fit.par["phi"])) { phi <- fit.par["phi"] } e <- pegelsresid.C(y, m, init.state, errortype, trendtype, seasontype, damped, alpha, beta, gamma, phi, nmse) np <- np + 1 ny <- length(y) aic <- e$lik + 2 * np bic <- e$lik + log(ny) * np aicc <- aic + 2 * np * (np + 1) / (ny - np - 1) mse <- e$amse[1] amse <- mean(e$amse) states <- ts(e$states, frequency = tsp.y[3], start = tsp.y[1] - 1 / tsp.y[3]) colnames(states)[1] <- "l" if (trendtype != "N") { colnames(states)[2] <- "b" } if (seasontype != "N") { colnames(states)[(2 + (trendtype != "N")):ncol(states)] <- paste("s", 1:m, sep = "") } tmp <- c("alpha", rep("beta", trendtype != "N"), rep("gamma", seasontype != "N"), rep("phi", damped)) fit.par <- c(fit.par, par.noopt) # fit.par <- fit.par[order(names(fit.par))] if (errortype == "A") { fits <- y - e$e } else { fits <- y / (1 + e$e) } return(list( loglik = -0.5 * e$lik, aic = aic, bic = bic, aicc = aicc, mse = mse, amse = amse, fit = fred, residuals = ts(e$e, frequency = tsp.y[3], start = tsp.y[1]), fitted = ts(fits, frequency = tsp.y[3], start = tsp.y[1]), states = states, par = fit.par )) } etsTargetFunctionInit <- function(par, y, nstate, errortype, trendtype, seasontype, damped, par.noopt, lowerb, upperb, opt.crit, nmse, bounds, m, pnames, pnames2) { names(par) <- pnames names(par.noopt) <- pnames2 alpha <- c(par["alpha"], par.noopt["alpha"])["alpha"] if (is.na(alpha)) { stop("alpha problem!") } if (trendtype != "N") { beta <- c(par["beta"], par.noopt["beta"])["beta"] if (is.na(beta)) { stop("beta Problem!") } } else { beta <- NULL } if (seasontype != "N") { gamma <- c(par["gamma"], par.noopt["gamma"])["gamma"] if (is.na(gamma)) { stop("gamma Problem!") } } else { m <- 1 gamma <- NULL } if (damped) { phi <- c(par["phi"], par.noopt["phi"])["phi"] if (is.na(phi)) { stop("phi Problem!") } } else { phi <- NULL } # determine which values to optimize and which ones are given by the user/not needed optAlpha <- !is.null(alpha) optBeta <- !is.null(beta) optGamma <- !is.null(gamma) optPhi <- !is.null(phi) givenAlpha <- FALSE givenBeta <- FALSE givenGamma <- FALSE givenPhi <- FALSE if (!is.null(par.noopt["alpha"])) { if (!is.na(par.noopt["alpha"])) { optAlpha <- FALSE givenAlpha <- TRUE } } if (!is.null(par.noopt["beta"])) { if (!is.na(par.noopt["beta"])) { optBeta <- FALSE givenBeta <- TRUE } } if (!is.null(par.noopt["gamma"])) { if (!is.na(par.noopt["gamma"])) { optGamma <- FALSE givenGamma <- TRUE } } if (!is.null(par.noopt["phi"])) { if (!is.na(par.noopt["phi"])) { optPhi <- FALSE givenPhi <- TRUE } } if (!damped) { phi <- 1 } if (trendtype == "N") { beta <- 0 } if (seasontype == "N") { gamma <- 0 } # cat("alpha: ", alpha) # cat(" beta: ", beta) # cat(" gamma: ", gamma) # cat(" phi: ", phi, "\n") # # cat("useAlpha: ", useAlpha) # cat(" useBeta: ", useBeta) # cat(" useGamma: ", useGamma) # cat(" usePhi: ", usePhi, "\n") env <- new.env() res <- .Call( "etsTargetFunctionInit", y = y, nstate = nstate, errortype = switch(errortype, "A" = 1, "M" = 2), trendtype = switch(trendtype, "N" = 0, "A" = 1, "M" = 2), seasontype = switch(seasontype, "N" = 0, "A" = 1, "M" = 2), damped = damped, lowerb = lowerb, upperb = upperb, opt.crit = opt.crit, nmse = as.integer(nmse), bounds = bounds, m = m, optAlpha, optBeta, optGamma, optPhi, givenAlpha, givenBeta, givenGamma, givenPhi, alpha, beta, gamma, phi, env, PACKAGE = "forecast" ) res } initparam <- function(alpha, beta, gamma, phi, trendtype, seasontype, damped, lower, upper, m) { if (any(lower > upper)) { stop("Inconsistent parameter boundaries") } # Select alpha if (is.null(alpha)) { alpha <- lower[1] + 0.2 * (upper[1] - lower[1]) / m if (alpha > 1 || alpha < 0) { alpha <- lower[1] + 2e-3 } par <- c(alpha = alpha) } else { par <- numeric(0) } # Select beta if (trendtype != "N" && is.null(beta)) { # Ensure beta < alpha upper[2] <- min(upper[2], alpha) beta <- lower[2] + 0.1 * (upper[2] - lower[2]) if (beta < 0 || beta > alpha) { beta <- alpha - 1e-3 } par <- c(par, beta = beta) } # Select gamma if (seasontype != "N" && is.null(gamma)) { # Ensure gamma < 1-alpha upper[3] <- min(upper[3], 1 - alpha) gamma <- lower[3] + 0.05 * (upper[3] - lower[3]) if (gamma < 0 || gamma > 1 - alpha) { gamma <- 1 - alpha - 1e-3 } par <- c(par, gamma = gamma) } # Select phi if (damped && is.null(phi)) { phi <- lower[4] + .99 * (upper[4] - lower[4]) if (phi < 0 || phi > 1) { phi <- upper[4] - 1e-3 } par <- c(par, phi = phi) } return(par) } check.param <- function(alpha, beta, gamma, phi, lower, upper, bounds, m) { if (bounds != "admissible") { if (!is.null(alpha)) { if (alpha < lower[1] || alpha > upper[1]) { return(0) } } if (!is.null(beta)) { if (beta < lower[2] || beta > alpha || beta > upper[2]) { return(0) } } if (!is.null(phi)) { if (phi < lower[4] || phi > upper[4]) { return(0) } } if (!is.null(gamma)) { if (gamma < lower[3] || gamma > 1 - alpha || gamma > upper[3]) { return(0) } } } if (bounds != "usual") { if (!admissible(alpha, beta, gamma, phi, m)) { return(0) } } return(1) } initstate <- function(y, trendtype, seasontype) { if (seasontype != "N") { # Do decomposition m <- frequency(y) n <- length(y) if (n < 4) { stop("You've got to be joking (not enough data).") } else if (n < 3 * m) # Fit simple Fourier model. { fouriery <- fourier(y, 1) fit <- tslm(y ~ trend + fouriery) if (seasontype == "A") { y.d <- list(seasonal = y - fit$coefficients[1] - fit$coefficients[2] * (1:n)) } else { # seasontype=="M". Biased method, but we only need a starting point y.d <- list(seasonal = y / (fit$coefficients[1] + fit$coefficients[2] * (1:n))) } } else { # n is large enough to do a decomposition y.d <- decompose(y, type = switch(seasontype, A = "additive", M = "multiplicative")) } init.seas <- rev(y.d$seasonal[2:m]) # initial seasonal component names(init.seas) <- paste("s", 0:(m - 2), sep = "") # Seasonally adjusted data if (seasontype == "A") { y.sa <- y - y.d$seasonal } else { init.seas <- pmax(init.seas, 1e-2) # We do not want negative seasonal indexes if (sum(init.seas) > m) { init.seas <- init.seas / sum(init.seas + 1e-2) } y.sa <- y / pmax(y.d$seasonal, 1e-2) } } else # non-seasonal model { m <- 1 init.seas <- NULL y.sa <- y } maxn <- min(max(10, 2 * m), length(y.sa)) if (trendtype == "N") { l0 <- mean(y.sa[1:maxn]) b0 <- NULL } else # Simple linear regression on seasonally adjusted data { fit <- lsfit(1:maxn, y.sa[1:maxn]) if (trendtype == "A") { l0 <- fit$coefficients[1] b0 <- fit$coefficients[2] # If error type is "M", then we don't want l0+b0=0. # So perturb just in case. if (abs(l0 + b0) < 1e-8) { l0 <- l0 * (1 + 1e-3) b0 <- b0 * (1 - 1e-3) } } else # if(trendtype=="M") { l0 <- fit$coefficients[1] + fit$coefficients[2] # First fitted value if (abs(l0) < 1e-8) { l0 <- 1e-7 } b0 <- (fit$coefficients[1] + 2 * fit$coefficients[2]) / l0 # Ratio of first two fitted values l0 <- l0 / b0 # First fitted value divided by b0 if (abs(b0) > 1e10) { # Avoid infinite slopes b0 <- sign(b0) * 1e10 } if (l0 < 1e-8 || b0 < 1e-8) # Simple linear approximation didn't work. { l0 <- max(y.sa[1], 1e-3) b0 <- max(y.sa[2] / y.sa[1], 1e-3) } } } names(l0) <- "l" if (!is.null(b0)) { names(b0) <- "b" } return(c(l0, b0, init.seas)) } lik <- function(par, y, nstate, errortype, trendtype, seasontype, damped, par.noopt, lowerb, upperb, opt.crit, nmse, bounds, m, pnames, pnames2) { # browser() # cat("par: ", par, "\n") names(par) <- pnames names(par.noopt) <- pnames2 alpha <- c(par["alpha"], par.noopt["alpha"])["alpha"] if (is.na(alpha)) { stop("alpha problem!") } if (trendtype != "N") { beta <- c(par["beta"], par.noopt["beta"])["beta"] if (is.na(beta)) { stop("beta Problem!") } } else { beta <- NULL } if (seasontype != "N") { gamma <- c(par["gamma"], par.noopt["gamma"])["gamma"] if (is.na(gamma)) { stop("gamma Problem!") } } else { m <- 1 gamma <- NULL } if (damped) { phi <- c(par["phi"], par.noopt["phi"])["phi"] if (is.na(phi)) { stop("phi Problem!") } } else { phi <- NULL } if (!check.param(alpha, beta, gamma, phi, lowerb, upperb, bounds, m)) { return(Inf) } np <- length(par) init.state <- par[(np - nstate + 1):np] # Add extra state if (seasontype != "N") { init.state <- c(init.state, m * (seasontype == "M") - sum(init.state[(2 + (trendtype != "N")):nstate])) } # Check states if (seasontype == "M") { seas.states <- init.state[-(1:(1 + (trendtype != "N")))] if (min(seas.states) < 0) { return(Inf) } } e <- pegelsresid.C(y, m, init.state, errortype, trendtype, seasontype, damped, alpha, beta, gamma, phi, nmse) if (is.na(e$lik)) { return(Inf) } if (e$lik < -1e10) { # Avoid perfect fits return(-1e10) } # cat("lik: ", e$lik, "\n") # points(alpha,e$lik,col=2) if (opt.crit == "lik") { return(e$lik) } else if (opt.crit == "mse") { return(e$amse[1]) } else if (opt.crit == "amse") { return(mean(e$amse)) } else if (opt.crit == "sigma") { return(mean(e$e ^ 2)) } else if (opt.crit == "mae") { return(mean(abs(e$e))) } } #' @export print.ets <- function(x, ...) { cat(paste(x$method, "\n\n")) if(!is.null(x$call)) { cat(paste("Call:\n", deparse(x$call), "\n\n")) } ncoef <- length(x$initstate) if (!is.null(x$lambda)) { cat(" Box-Cox transformation: lambda=", round(x$lambda, 4), "\n\n") } cat(" Smoothing parameters:\n") cat(paste(" alpha =", round(x$par["alpha"], 4), "\n")) if (x$components[2] != "N") { cat(paste(" beta =", round(x$par["beta"], 4), "\n")) } if (x$components[3] != "N") { cat(paste(" gamma =", round(x$par["gamma"], 4), "\n")) } if (x$components[4] != "FALSE") { cat(paste(" phi =", round(x$par["phi"], 4), "\n")) } cat("\n Initial states:\n") cat(paste(" l =", round(x$initstate[1], 4), "\n")) if (x$components[2] != "N") { cat(paste(" b =", round(x$initstate[2], 4), "\n")) } else { x$initstate <- c(x$initstate[1], NA, x$initstate[2:ncoef]) ncoef <- ncoef + 1 } if (x$components[3] != "N") { cat(" s = ") if (ncoef <= 8) { cat(round(x$initstate[3:ncoef], 4)) } else { cat(round(x$initstate[3:8], 4)) cat("\n ") cat(round(x$initstate[9:ncoef], 4)) } cat("\n") } cat("\n sigma: ") cat(round(sqrt(x$sigma2), 4)) if (!is.null(x$aic)) { stats <- c(x$aic, x$aicc, x$bic) names(stats) <- c("AIC", "AICc", "BIC") cat("\n\n") print(stats) } # cat("\n AIC: ") # cat(round(x$aic,4)) # cat("\n AICc: ") # cat(round(x$aicc,4)) # cat("\n BIC: ") # cat(round(x$bic,4)) } pegelsresid.C <- function(y, m, init.state, errortype, trendtype, seasontype, damped, alpha, beta, gamma, phi, nmse) { n <- length(y) p <- length(init.state) x <- numeric(p * (n + 1)) x[1:p] <- init.state e <- numeric(n) lik <- 0 if (!damped) { phi <- 1 } if (trendtype == "N") { beta <- 0 } if (seasontype == "N") { gamma <- 0 } amse <- numeric(nmse) Cout <- .C( "etscalc", as.double(y), as.integer(n), as.double(x), as.integer(m), as.integer(switch(errortype, "A" = 1, "M" = 2)), as.integer(switch(trendtype, "N" = 0, "A" = 1, "M" = 2)), as.integer(switch(seasontype, "N" = 0, "A" = 1, "M" = 2)), as.double(alpha), as.double(beta), as.double(gamma), as.double(phi), as.double(e), as.double(lik), as.double(amse), as.integer(nmse), PACKAGE = "forecast" ) if (!is.na(Cout[[13]])) { if (abs(Cout[[13]] + 99999) < 1e-7) { Cout[[13]] <- NA } } tsp.y <- tsp(y) e <- ts(Cout[[12]]) tsp(e) <- tsp.y return(list(lik = Cout[[13]], amse = Cout[[14]], e = e, states = matrix(Cout[[3]], nrow = n + 1, ncol = p, byrow = TRUE))) } admissible <- function(alpha, beta, gamma, phi, m) { if (is.null(phi)) { phi <- 1 } if (phi < 0 || phi > 1 + 1e-8) { return(0) } if (is.null(gamma)) { if (alpha < 1 - 1 / phi || alpha > 1 + 1 / phi) { return(0) } if (!is.null(beta)) { if (beta < alpha * (phi - 1) || beta > (1 + phi) * (2 - alpha)) { return(0) } } } else if (m > 1) # Seasonal model { if (is.null(beta)) { beta <- 0 } if (gamma < max(1 - 1 / phi - alpha, 0) || gamma > 1 + 1 / phi - alpha) { return(0) } if (alpha < 1 - 1 / phi - gamma * (1 - m + phi + phi * m) / (2 * phi * m)) { return(0) } if (beta < -(1 - phi) * (gamma / m + alpha)) { return(0) } # End of easy tests. Now use characteristic equation P <- c(phi * (1 - alpha - gamma), alpha + beta - alpha * phi + gamma - 1, rep(alpha + beta - alpha * phi, m - 2), (alpha + beta - phi), 1) roots <- polyroot(P) # cat("maxpolyroots: ", max(abs(roots)), "\n") if (max(abs(roots)) > 1 + 1e-10) { return(0) } } # Passed all tests return(1) } ### PLOT COMPONENTS #' Plot components from ETS model #' #' Produces a plot of the level, slope and seasonal components from an ETS #' model. #' #' \code{autoplot} will produce an equivalent plot as a ggplot object. #' #' @param x Object of class \dQuote{ets}. #' @param object Object of class \dQuote{ets}. Used for ggplot graphics (S3 #' method consistency). #' @param range.bars Logical indicating if each plot should have a bar at its #' right side representing relative size. If NULL, automatic selection takes #' place. #' @param ... Other plotting parameters to affect the plot. #' @return None. Function produces a plot #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link{ets}} #' @keywords hplot #' @examples #' #' fit <- ets(USAccDeaths) #' plot(fit) #' plot(fit,plot.type="single",ylab="",col=1:3) #' #' library(ggplot2) #' autoplot(fit) #' #' @export plot.ets <- function(x, ...) { if (!is.null(x$lambda)) { y <- BoxCox(x$x, x$lambda) } else { y <- x$x } if (x$components[3] == "N" && x$components[2] == "N") { plot( cbind(observed = y, level = x$states[, 1]), main = paste("Decomposition by", x$method, "method"), ... ) } else if (x$components[3] == "N") { plot( cbind(observed = y, level = x$states[, 1], slope = x$states[, "b"]), main = paste("Decomposition by", x$method, "method"), ... ) } else if (x$components[2] == "N") { plot( cbind(observed = y, level = x$states[, 1], season = x$states[, "s1"]), main = paste("Decomposition by", x$method, "method"), ... ) } else { plot( cbind( observed = y, level = x$states[, 1], slope = x$states[, "b"], season = x$states[, "s1"] ), main = paste("Decomposition by", x$method, "method"), ... ) } } #' @export summary.ets <- function(object, ...) { class(object) <- c("summary.ets", class(object)) object } #' @export print.summary.ets <- function(x, ...) { NextMethod() cat("\nTraining set error measures:\n") print(accuracy(x)) } #' @export coef.ets <- function(object, ...) { object$par } #' @rdname fitted.Arima #' @export fitted.ets <- function(object, h=1, ...) { if (h == 1) { return(object$fitted) } else { return(hfitted(object = object, h = h, FUN = "ets", ...)) } } #' @export logLik.ets <- function(object, ...) { structure(object$loglik, df = length(object$par) + 1, class = "logLik") } #' @export nobs.ets <- function(object, ...) { length(object$x) } #' Is an object a particular model type? #' #' Returns true if the model object is of a particular type #' #' @param x object to be tested #' @export is.ets <- function(x) { inherits(x, "ets") } forecast/R/spline.R0000644000176200001440000002010114003673410013666 0ustar liggesusers############################################### ##### Forecasting Using Smoothing Splines ##### ############################################### # Optimal smoothing paramter denoted by beta # lambda is Box-Cox parameter. ################# FUNCTIONS ################## ## Set up Sigma of order (n x n) make.Sigma <- function(n, n0=0) { nn <- n + n0 Sigma <- matrix(0, nrow = nn, ncol = nn) for (i in 1:nn) Sigma[i, i:nn] <- Sigma[i:nn, i] <- (i * i * (3 * (i:nn) - i)) / 6 return(Sigma / (n ^ 3)) } ## Compute spline matrices spline.matrices <- function(n, beta, cc=1e2, n0=0) { nn <- n + n0 Sigma <- make.Sigma(n, n0) s <- cbind(rep(1, nn), (1:nn) / n) Omega <- cc * s %*% t(s) + Sigma / beta + diag(nn) max.Omega <- max(Omega) inv.Omega <- solve(Omega / max.Omega, tol = 1e-10) / max.Omega P <- chol(inv.Omega) return(list(s = s, Sigma = Sigma, Omega = Omega, inv.Omega = inv.Omega, P = P)) } ## Compute smoothing splines ## Return -loglikelihood # beta multiplied by 1e6 to avoid numerical difficulties in optimization spline.loglik <- function(beta, y, cc=1e2) { n <- length(y) mat <- spline.matrices(n, beta / 1e6, cc = cc) y.star <- mat$P %*% matrix(y) return(-log(det(mat$P)) + 0.5 * n * log(sum(y.star ^ 2))) } # Spline forecasts #' Cubic Spline Forecast #' #' Returns local linear forecasts and prediction intervals using cubic #' smoothing splines. #' #' The cubic smoothing spline model is equivalent to an ARIMA(0,2,2) model but #' with a restricted parameter space. The advantage of the spline model over #' the full ARIMA model is that it provides a smooth historical trend as well #' as a linear forecast function. Hyndman, King, Pitrun, and Billah (2002) show #' that the forecast performance of the method is hardly affected by the #' restricted parameter space. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param method Method for selecting the smoothing parameter. If #' \code{method="gcv"}, the generalized cross-validation method from #' \code{\link[stats]{smooth.spline}} is used. If \code{method="mle"}, the #' maximum likelihood method from Hyndman et al (2002) is used. #' @param x Deprecated. Included for backwards compatibility. #' @inheritParams forecast #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{splinef}. #' #' An object of class \code{"forecast"} containing the following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for #' prediction intervals} \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the time #' series used to create the model stored as \code{object}).} #' \item{onestepf}{One-step forecasts from the fitted model.} #' \item{fitted}{Smooth estimates of the fitted trend using all data.} #' \item{residuals}{Residuals from the fitted model. That is x minus one-step #' forecasts.} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{smooth.spline}}, \code{\link[stats]{arima}}, #' \code{\link{holt}}. #' @references Hyndman, King, Pitrun and Billah (2005) Local linear forecasts #' using cubic smoothing splines. \emph{Australian and New Zealand Journal of #' Statistics}, \bold{47}(1), 87-99. #' \url{https://robjhyndman.com/publications/splinefcast/}. #' @keywords ts #' @examples #' fcast <- splinef(uspop,h=5) #' plot(fcast) #' summary(fcast) #' #' @export splinef <- function(y, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, method=c("gcv", "mle"), x=y) { method <- match.arg(method) if (!is.ts(x)) { x <- ts(x) } n <- length(x) freq <- frequency(x) if (!is.null(lambda)) { origx <- x x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } # Find optimal beta using likelihood approach in Hyndman et al paper. if (method == "mle") { if (n > 100) { # Use only last 100 observations to get beta xx <- x[(n - 99):n] } else { xx <- x } beta.est <- optimize(spline.loglik, interval = c(1e-6, 1e7), y = xx)$minimum / 1e6 # Compute spar which is equivalent to beta r <- 256 * smooth.spline(1:n, x, spar = 0)$lambda lss <- beta.est * n ^ 3 / (n - 1) ^ 3 spar <- (log(lss / r) / log(256) + 1) / 3 splinefit <- smooth.spline(1:n, x, spar = spar) sfits <- splinefit$y } else # Use GCV { splinefit <- smooth.spline(1:n, x, cv = FALSE, spar = NULL) sfits <- splinefit$y beta.est <- pmax(1e-7, splinefit$lambda * (n - 1) ^ 3 / n ^ 3) } # Compute matrices for optimal beta mat <- spline.matrices(n, beta.est) newmat <- spline.matrices(n, beta.est, n0 = h) # Get one-step predictors yfit <- e <- rep(NA, n) if (n > 1000) { warning("Series too long to compute training set fits and residuals") } else # This is probably grossly inefficient but I can't think of a better way right now { for (i in 1:(n - 1)) { U <- mat$Omega[1:i, i + 1] Oinv <- solve(mat$Omega[1:i, 1:i] / 1e6) / 1e6 yfit[i + 1] <- t(U) %*% Oinv %*% x[1:i] sd <- sqrt(mat$Omega[i + 1, i + 1] - t(U) %*% Oinv %*% U) e[i + 1] <- (x[i + 1] - yfit[i + 1]) / sd } } # Compute sigma^2 sigma2 <- mean(e ^ 2, na.rm = TRUE) # Compute mean and var of forecasts U <- newmat$Omega[1:n, n + (1:h)] Omega0 <- newmat$Omega[n + (1:h), n + (1:h)] Yhat <- t(U) %*% mat$inv.Omega %*% x sd <- sqrt(sigma2 * diag(Omega0 - t(U) %*% mat$inv.Omega %*% U)) # Compute prediction intervals. if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nconf <- length(level) lower <- upper <- matrix(NA, nrow = h, ncol = nconf) for (i in 1:nconf) { conf.factor <- qnorm(0.5 + 0.005 * level[i]) upper[, i] <- Yhat + conf.factor * sd lower[, i] <- Yhat - conf.factor * sd } lower <- ts(lower, start = tsp(x)[2] + 1 / freq, frequency = freq) upper <- ts(upper, start = tsp(x)[2] + 1 / freq, frequency = freq) res <- ts(x - yfit, start = start(x), frequency = freq) if (!is.null(lambda)) { Yhat <- InvBoxCox(Yhat, lambda, biasadj, list(level = level, upper = upper, lower = lower)) upper <- InvBoxCox(upper, lambda) lower <- InvBoxCox(lower, lambda) yfit <- InvBoxCox(yfit, lambda) sfits <- InvBoxCox(sfits, lambda) x <- origx } return(structure( list( method = "Cubic Smoothing Spline", level = level, x = x, series = deparse(substitute(y)), mean = ts(Yhat, frequency = freq, start = tsp(x)[2] + 1 / freq), upper = ts(upper, start = tsp(x)[2] + 1 / freq, frequency = freq), lower = ts(lower, start = tsp(x)[2] + 1 / freq, frequency = freq), model = list(beta = beta.est * n ^ 3, call = match.call()), fitted = ts(sfits, start = start(x), frequency = freq), residuals = res, standardizedresiduals = ts(e, start = start(x), frequency = freq), onestepf = ts(yfit, start = start(x), frequency = freq) ), lambda = lambda, class = c("splineforecast", "forecast") )) } #' @rdname plot.forecast #' #' @examples #' fcast <- splinef(airmiles,h=5) #' plot(fcast) #' autoplot(fcast) #' #' @export plot.splineforecast <- function(x, fitcol=2, type="o", pch=19, ...) { plot.forecast(x, type = type, pch = pch, ...) lines(x$fitted, col = fitcol) } #' @rdname is.forecast #' @export is.splineforecast <- function(x) { inherits(x, "splineforecast") } forecast/R/whichmodels.R0000644000176200001440000000104014003673410014703 0ustar liggesusersWhichModels <- function(max.p, max.q, max.P, max.Q, maxK) { total.models <- (max.p + 1) * (max.q + 1) * (max.P + 1) * (max.Q + 1) * length(0:maxK) x <- numeric(total.models) i <- 1 for (x1 in 0:max.p) for (x2 in 0:max.q) { for (x3 in 0:max.P) for (x4 in 0:max.Q) { for (K in 0:maxK) { x[i] <- paste(x1, "f", x2, "f", x3, "f", x4, "f", K, sep = "") i <- i + 1 } } } return(x) } UndoWhichModels <- function(n) { as.numeric(unlist(strsplit(n, split = "f"))) } forecast/R/forecast2.R0000644000176200001440000005077314133676007014317 0ustar liggesusers# Mean forecast #' Mean Forecast #' #' Returns forecasts and prediction intervals for an iid model applied to y. #' #' The iid model is \deqn{Y_t=\mu + Z_t}{Y[t]=mu + Z[t]} where \eqn{Z_t}{Z[t]} #' is a normal iid error. Forecasts are given by \deqn{Y_n(h)=\mu}{Y[n+h]=mu} #' where \eqn{\mu}{mu} is estimated by the sample mean. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting #' @param level Confidence levels for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param bootstrap If TRUE, use a bootstrap method to compute prediction intervals. #' Otherwise, assume a normal distribution. #' @param npaths Number of bootstrapped sample paths to use if \code{bootstrap==TRUE}. #' @param x Deprecated. Included for backwards compatibility. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{meanf}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link{rwf}} #' @keywords ts #' @examples #' nile.fcast <- meanf(Nile, h=10) #' plot(nile.fcast) #' #' @export meanf <- function(y, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, bootstrap=FALSE, npaths=5000, x=y) { n <- length(x) if (!is.null(lambda)) { origx <- x x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } meanx <- mean(x, na.rm = TRUE) fits <- rep(meanx, length(x)) res <- x - fits f <- rep(meanx, h) if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nconf <- length(level) s <- sd(x, na.rm = TRUE) if (bootstrap) { e <- na.omit(res) - mean(res, na.rm = TRUE) sim <- matrix(sample(e, size = npaths * h, replace = TRUE), ncol = npaths, nrow = h) sim <- sweep(sim, 1, f, "+") lower <- t(apply(sim, 1, quantile, prob = .5 - level / 200)) upper <- t(apply(sim, 1, quantile, prob = .5 + level / 200)) } else { lower <- upper <- matrix(NA, nrow = h, ncol = nconf) for (i in 1:nconf) { if (n > 1) { tfrac <- qt(0.5 - level[i] / 200, n - 1) } else { tfrac <- -Inf } w <- -tfrac * s * sqrt(1 + 1 / n) lower[, i] <- f - w upper[, i] <- f + w } } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") if (is.ts(x)) { fits <- copy_msts(x, fits) res <- copy_msts(x, res) f <- future_msts(x, f) lower <- future_msts(x, lower) upper <- future_msts(x, upper) } if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda) x <- origx f <- InvBoxCox(f, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } out <- list( method = "Mean", level = level, x = x, series = deparse(substitute(y)), mean = f, lower = lower, upper = upper, model = structure(list(mu = f[1], mu.se = s / sqrt(length(x)), sd = s, bootstrap = bootstrap), class = "meanf"), lambda = lambda, fitted = fits, residuals = res ) out$model$call <- match.call() return(structure(out, class = "forecast")) } #' Box Cox Transformation #' #' BoxCox() returns a transformation of the input variable using a Box-Cox #' transformation. InvBoxCox() reverses the transformation. #' #' The Box-Cox transformation (as given by Bickel & Doksum 1981) is given by \deqn{f_\lambda(x) =sign(x)(|x|^\lambda - #' 1)/\lambda}{f(x;lambda)=sign(x)(|x|^lambda - 1)/lambda} if \eqn{\lambda\ne0}{lambda #' is not equal to 0}. For \eqn{\lambda=0}{lambda=0}, #' \deqn{f_0(x)=\log(x)}{f(x;0)=log(x)}. #' #' @param x a numeric vector or time series of class \code{ts}. #' @param lambda transformation parameter. If \code{lambda = "auto"}, then #' the transformation parameter lambda is chosen using BoxCox.lambda (with a lower bound of -0.9) #' @param biasadj Use adjusted back-transformed mean for Box-Cox #' transformations. If transformed data is used to produce forecasts and fitted values, #' a regular back transformation will result in median forecasts. If biasadj is TRUE, #' an adjustment will be made to produce mean forecasts and fitted values. #' @param fvar Optional parameter required if biasadj=TRUE. Can either be the #' forecast variance, or a list containing the interval \code{level}, and the #' corresponding \code{upper} and \code{lower} intervals. #' @return a numeric vector of the same length as x. #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link{BoxCox.lambda}} #' @references Box, G. E. P. and Cox, D. R. (1964) An analysis of #' transformations. \emph{JRSS B} \bold{26} 211--246. #' Bickel, P. J. and Doksum K. A. (1981) An Analysis of Transformations Revisited. \emph{JASA} \bold{76} 296-311. #' @keywords ts #' @examples #' #' lambda <- BoxCox.lambda(lynx) #' lynx.fit <- ar(BoxCox(lynx,lambda)) #' plot(forecast(lynx.fit,h=20,lambda=lambda)) #' #' @export BoxCox <- function(x, lambda) { if (lambda == "auto") { lambda <- BoxCox.lambda(x, lower = -0.9) } if (lambda < 0) { x[x < 0] <- NA } if (lambda == 0) { out <- log(x) } else { out <- (sign(x) * abs(x) ^ lambda - 1) / lambda } if (!is.null(colnames(x))) { colnames(out) <- colnames(x) } attr(out, "lambda") <- lambda return(out) } #' @rdname BoxCox #' @export InvBoxCox <- function(x, lambda, biasadj=FALSE, fvar=NULL) { if (lambda < 0) { x[x > -1 / lambda] <- NA } if (lambda == 0) { out <- exp(x) } else { xx <- x * lambda + 1 out <- sign(xx) * abs(xx) ^ (1 / lambda) } if (!is.null(colnames(x))) { colnames(out) <- colnames(x) } if (is.null(biasadj)) { biasadj <- attr(lambda, "biasadj") } if (!is.logical(biasadj)) { warning("biasadj information not found, defaulting to FALSE.") biasadj <- FALSE } if (biasadj) { if (is.null(fvar)) { stop("fvar must be provided when biasadj=TRUE") } if (is.list(fvar)) { # Create fvar from forecast interval level <- max(fvar$level) if (NCOL(fvar$upper) > 1 && NCOL(fvar$lower)) { i <- match(level, fvar$level) fvar$upper <- fvar$upper[, i] fvar$lower <- fvar$lower[, i] } if (level > 1) { level <- level / 100 } level <- mean(c(level, 1)) # Note: Use BoxCox transformed upper and lower values fvar <- as.numeric((fvar$upper - fvar$lower) / stats::qnorm(level) / 2) ^ 2 } if (NCOL(fvar) > 1) { fvar <- diag(fvar) } out <- out * (1 + 0.5 * as.numeric(fvar) * (1 - lambda) / (out) ^ (2 * lambda)) } return(out) } # Deprecated InvBoxCoxf <- function(x=NULL, fvar=NULL, lambda=NULL) { message("Deprecated, use InvBoxCox instead") if (is.null(lambda)) { stop("Must specify lambda using lambda=numeric(1)") } if (is.null(fvar)) { level <- max(x$level) if (NCOL(x$upper) > 1 && NCOL(x$lower)) { i <- match(level, x$level) x$upper <- x$upper[, i] x$lower <- x$lower[, i] } if (level > 1) { level <- level / 100 } level <- mean(c(level, 1)) # Note: Use BoxCox transformed upper and lower values fvar <- ((x$upper - x$lower) / stats::qnorm(level) / 2) ^ 2 } else { x <- list(mean = x) } if ("matrix" %in% class(fvar)) { fvar <- diag(fvar) } return(x$mean * (1 + 0.5 * fvar * (1 - lambda) / (x$mean) ^ (2 * lambda))) } #' Forecasting using Structural Time Series models #' #' Returns forecasts and other information for univariate structural time #' series models. #' #' This function calls \code{predict.StructTS} and constructs an object of #' class "\code{forecast}" from the results. #' #' @param object An object of class "\code{StructTS}". Usually the result of a #' call to \code{\link[stats]{StructTS}}. #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param ... Other arguments. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.StructTS}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{StructTS}}. #' @keywords ts #' @examples #' fit <- StructTS(WWWusage,"level") #' plot(forecast(fit)) #' #' @export forecast.StructTS <- function(object, h=ifelse(object$coef["epsilon"] > 1e-10, 2 * object$xtsp[3], 10), level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=NULL, ...) { x <- object$data pred <- predict(object, n.ahead = h) if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nint <- length(level) upper <- lower <- matrix(NA, ncol = nint, nrow = length(pred$pred)) for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- pred$pred - qq * pred$se upper[, i] <- pred$pred + qq * pred$se } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") if (is.element("seas", names(object$coef))) { method <- "Basic structural model" } else if (is.element("slope", names(object$coef))) { method <- "Local linear structural model" } else { method <- "Local level structural model" } # Compute fitted values and residuals sigma2 <- c(predict(object, n.ahead=1)$se) res <- residuals(object) * sigma2 fits <- x - res if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda) x <- InvBoxCox(x, lambda) pred$pred <- InvBoxCox(pred$pred, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } mean <- future_msts(x, pred$pred) lower <- future_msts(x, lower) upper <- future_msts(x, upper) fits <- copy_msts(x, fits) res <- copy_msts(x, res) return(structure( list( method = method, model = object, level = level, mean = pred$pred, lower = lower, upper = upper, x = x, series = object$series, fitted = fits, residuals = res ), class = "forecast" )) } #' Forecasting using Holt-Winters objects #' #' Returns forecasts and other information for univariate Holt-Winters time #' series models. #' #' This function calls \code{\link[stats]{predict.HoltWinters}} and constructs #' an object of class "\code{forecast}" from the results. #' #' It is included for completeness, but the \code{\link{ets}} is recommended #' for use instead of \code{\link[stats]{HoltWinters}}. #' #' @param object An object of class "\code{HoltWinters}". Usually the result of #' a call to \code{\link[stats]{HoltWinters}}. #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param ... Other arguments. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by #' \code{forecast.HoltWinters}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted #' model.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{predict.HoltWinters}}, #' \code{\link[stats]{HoltWinters}}. #' @keywords ts #' @examples #' fit <- HoltWinters(WWWusage,gamma=FALSE) #' plot(forecast(fit)) #' #' @export forecast.HoltWinters <- function(object, h=ifelse(frequency(object$x) > 1, 2 * frequency(object$x), 10), level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=NULL, ...) { x <- object$x if (!is.null(object$exponential)) { if (object$exponential) { stop("Forecasting for exponential trend not yet implemented.") } } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nint <- length(level) pred <- predict(object, n.ahead = h, prediction.interval = TRUE, level = level[1] / 100) pmean <- pred[, 1] upper <- lower <- matrix(NA, ncol = nint, nrow = length(pred[, 1])) se <- (pred[, 2] - pred[, 3]) / (2 * qnorm(0.5 * (1 + level[1] / 100))) for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- pmean - qq * se upper[, i] <- pmean + qq * se } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") if (!is.null(lambda)) { fitted <- InvBoxCox(object$fitted[, 1], lambda) x <- InvBoxCox(x, lambda) pmean <- InvBoxCox(pmean, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } else { fitted <- object$fitted[, 1] } # Pad fitted values with NAs nf <- length(fitted) n <- length(x) fitted <- ts(c(rep(NA, n - nf), fitted)) fitted <- copy_msts(object$x, fitted) pmean <- future_msts(object$x, pmean) lower <- future_msts(object$x, lower) upper <- future_msts(object$x, upper) return(structure( list( method = "HoltWinters", model = object, level = level, mean = pmean, lower = lower, upper = upper, x = x, series = deparse(object$call$x), fitted = fitted, residuals = x - fitted ), class = "forecast" )) } ## CROSTON #' Forecasts for intermittent demand using Croston's method #' #' Returns forecasts and other information for Croston's forecasts applied to #' y. #' #' Based on Croston's (1972) method for intermittent demand forecasting, also #' described in Shenstone and Hyndman (2005). Croston's method involves using #' simple exponential smoothing (SES) on the non-zero elements of the time #' series and a separate application of SES to the times between non-zero #' elements of the time series. The smoothing parameters of the two #' applications of SES are assumed to be equal and are denoted by \code{alpha}. #' #' Note that prediction intervals are not computed as Croston's method has no #' underlying stochastic model. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting. #' @param alpha Value of alpha. Default value is 0.1. #' @param x Deprecated. Included for backwards compatibility. #' @return An object of class \code{"forecast"} is a list containing at least #' the following elements: \item{model}{A list containing information about the #' fitted model. The first element gives the model used for non-zero demands. #' The second element gives the model used for times between non-zero demands. #' Both elements are of class \code{forecast}.} \item{method}{The name of the #' forecasting method as a character string} \item{mean}{Point forecasts as a #' time series} \item{x}{The original time series (either \code{object} itself #' or the time series used to create the model stored as \code{object}).} #' \item{residuals}{Residuals from the fitted model. That is y minus fitted #' values.} \item{fitted}{Fitted values (one-step forecasts)} #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{croston} and #' associated functions. #' @author Rob J Hyndman #' @seealso \code{\link{ses}}. #' @references Croston, J. (1972) "Forecasting and stock control for #' intermittent demands", \emph{Operational Research Quarterly}, \bold{23}(3), #' 289-303. #' #' Shenstone, L., and Hyndman, R.J. (2005) "Stochastic models underlying #' Croston's method for intermittent demand forecasting". \emph{Journal of #' Forecasting}, \bold{24}, 389-402. #' @keywords ts #' @examples #' y <- rpois(20,lambda=.3) #' fcast <- croston(y) #' plot(fcast) #' #' @export croston <- function(y, h=10, alpha=0.1, x=y) { if (sum(x < 0) > 0) { stop("Series should not contain negative values") } out <- croston2(x, h, alpha) out$x <- x if (!is.null(out$fitted)) { out$residuals <- x - out$fitted } out$method <- "Croston's method" out$series <- deparse(substitute(y)) return(structure(out, class = "forecast")) } croston2 <- function(x, h=10, alpha=0.1, nofits=FALSE) { x <- as.ts(x) y <- x[x > 0] tsp.x <- tsp(x) freq.x <- tsp.x[3] start.f <- tsp.x[2] + 1 / freq.x if (length(y) == 0) # All historical values are equal to zero { fc <- ts(rep(0, h), start = start.f, frequency = freq.x) if (nofits) { return(fc) } else { return(list(mean = fc, fitted = ts(x * 0, start = tsp.x[1], frequency = freq.x))) } } tt <- diff(c(0, (1:length(x))[x > 0])) # Times between non-zero observations if (length(y) == 1 && length(tt) == 1) # Only one non-zero observation { y.f <- list(mean = ts(rep(y, h), start = start.f, frequency = freq.x)) p.f <- list(mean = ts(rep(tt, h), start = start.f, frequency = freq.x)) } else if (length(y) <= 1 || length(tt) <= 1) { # length(tt)==0 but length(y)>0. How does that happen? return(list(mean = ts(rep(NA, h), start = start.f, frequency = freq.x))) } else { y.f <- ses(y, alpha = alpha, initial = "simple", h = h, PI = FALSE) p.f <- ses(tt, alpha = alpha, initial = "simple", h = h, PI = FALSE) } ratio <- ts(y.f$mean / p.f$mean, start = start.f, frequency = freq.x) if (nofits) { return(ratio) } else { n <- length(x) fits <- x * NA if (n > 1) { for (i in 1:(n - 1)) fits[i + 1] <- croston2(x[1:i], h = 1, alpha = alpha, nofits = TRUE) } ratio <- future_msts(x, ratio) fits <- copy_msts(x, fits) return(list(mean = ratio, fitted = fits, model = list(demand = y.f, period = p.f))) } } forecast/R/makeParamVector.R0000644000176200001440000001734614003673410015476 0ustar liggesusers# TODO: Add comment # # Author: srazbash ############################################################################### unParameteriseTBATS <- function(param.vector, control) { # print(control) if (control$use.box.cox) { lambda <- param.vector[1] alpha <- param.vector[2] if (control$use.beta) { if (control$use.damping) { small.phi <- param.vector[3] beta <- param.vector[4] gamma.start <- 5 } else { small.phi <- 1 beta <- param.vector[3] gamma.start <- 4 } } else { small.phi <- NULL beta <- NULL gamma.start <- 3 } if (control$length.gamma > 0) { gamma.one.vector <- param.vector[gamma.start:(gamma.start + (control$length.gamma / 2) - 1)] gamma.two.vector <- param.vector[(gamma.start + (control$length.gamma / 2)):(gamma.start + (control$length.gamma) - 1)] final.gamma.pos <- gamma.start + control$length.gamma - 1 } else { gamma.one.vector <- NULL gamma.two.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (control$p != 0) { ar.coefs <- param.vector[(final.gamma.pos + 1):(final.gamma.pos + control$p)] } else { ar.coefs <- NULL } if (control$q != 0) { ma.coefs <- param.vector[(final.gamma.pos + control$p + 1):length(param.vector)] } else { ma.coefs <- NULL } } else { lambda <- NULL alpha <- param.vector[1] if (control$use.beta) { if (control$use.damping) { small.phi <- param.vector[2] beta <- param.vector[3] gamma.start <- 4 } else { small.phi <- 1 beta <- param.vector[2] gamma.start <- 3 } } else { small.phi <- NULL beta <- NULL gamma.start <- 2 } if (control$length.gamma > 0) { gamma.one.vector <- param.vector[gamma.start:(gamma.start + (control$length.gamma / 2) - 1)] gamma.two.vector <- param.vector[(gamma.start + (control$length.gamma / 2)):(gamma.start + (control$length.gamma) - 1)] final.gamma.pos <- gamma.start + control$length.gamma - 1 } else { gamma.one.vector <- NULL gamma.two.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (control$p != 0) { ar.coefs <- param.vector[(final.gamma.pos + 1):(final.gamma.pos + control$p)] } else { ar.coefs <- NULL } if (control$q != 0) { ma.coefs <- param.vector[(final.gamma.pos + control$p + 1):length(param.vector)] } else { ma.coefs <- NULL } } return(list(lambda = lambda, alpha = alpha, beta = beta, small.phi = small.phi, gamma.one.v = gamma.one.vector, gamma.two.v = gamma.two.vector, ar.coefs = ar.coefs, ma.coefs = ma.coefs)) } makeParscale <- function(control) { # print(control) if (control$use.box.cox) { parscale <- c(.001, .01) } else { parscale <- .01 } if (control$use.beta) { if (control$use.damping) { parscale <- c(parscale, 1e-2, 1e-2) } else { parscale <- c(parscale, 1e-2) } } if (control$length.gamma > 0) { parscale <- c(parscale, rep(1e-5, control$length.gamma)) } if ((control$p != 0) | (control$q != 0)) { parscale <- c(parscale, rep(1e-1, (control$p + control$q))) } # print(parscale) return(parscale) } ############################################################################################################################################################################################## ## BATS related stuff below ######################################## makeParscaleBATS <- function(control) { # print(control) if (control$use.box.cox) { parscale <- c(.001, .1) } else { parscale <- .1 } if (control$use.beta) { if (control$use.damping) { parscale <- c(parscale, 1e-2, 1e-2) } else { parscale <- c(parscale, 1e-2) } } if (control$length.gamma > 0) { parscale <- c(parscale, rep(1e-2, control$length.gamma)) } if ((control$p != 0) | (control$q != 0)) { parscale <- c(parscale, rep(1e-1, (control$p + control$q))) } # print(parscale) return(parscale) } parameterise <- function(alpha, beta.v=NULL, small.phi=1, gamma.v=NULL, lambda=NULL, ar.coefs=NULL, ma.coefs=NULL) { # print("urg") # print(lambda) if (!is.null(lambda)) { param.vector <- cbind(lambda, alpha) use.box.cox <- TRUE } else { # print("hello") param.vector <- alpha use.box.cox <- FALSE # print(use.box.cox) } if (!is.null(beta.v)) { use.beta <- TRUE if (is.null(small.phi)) { use.damping <- FALSE } else if (small.phi != 1) { param.vector <- cbind(param.vector, small.phi) use.damping <- TRUE } else { use.damping <- FALSE } param.vector <- cbind(param.vector, beta.v) } else { use.beta <- FALSE use.damping <- FALSE } if (!is.null(gamma.v)) { gamma.v <- matrix(gamma.v, nrow = 1, ncol = length(gamma.v)) param.vector <- cbind(param.vector, gamma.v) length.gamma <- length(gamma.v) } else { length.gamma <- 0 } if (!is.null(ar.coefs)) { ar.coefs <- matrix(ar.coefs, nrow = 1, ncol = length(ar.coefs)) param.vector <- cbind(param.vector, ar.coefs) p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { ma.coefs <- matrix(ma.coefs, nrow = 1, ncol = length(ma.coefs)) param.vector <- cbind(param.vector, ma.coefs) q <- length(ma.coefs) } else { q <- 0 } # print(use.box.cox) control <- list(use.beta = use.beta, use.box.cox = use.box.cox, use.damping = use.damping, length.gamma = length.gamma, p = p, q = q) return(list(vect = as.numeric(param.vector), control = control)) } unParameterise <- function(param.vector, control) { # print(control) if (control$use.box.cox) { lambda <- param.vector[1] alpha <- param.vector[2] if (control$use.beta) { if (control$use.damping) { small.phi <- param.vector[3] beta <- param.vector[4] gamma.start <- 5 } else { small.phi <- 1 beta <- param.vector[3] gamma.start <- 4 } } else { small.phi <- NULL beta <- NULL gamma.start <- 3 } if (control$length.gamma > 0) { gamma.vector <- param.vector[gamma.start:(gamma.start + control$length.gamma - 1)] final.gamma.pos <- gamma.start + control$length.gamma - 1 } else { gamma.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (control$p != 0) { ar.coefs <- param.vector[(final.gamma.pos + 1):(final.gamma.pos + control$p)] } else { ar.coefs <- NULL } if (control$q != 0) { ma.coefs <- param.vector[(final.gamma.pos + control$p + 1):length(param.vector)] } else { ma.coefs <- NULL } } else { lambda <- NULL alpha <- param.vector[1] if (control$use.beta) { if (control$use.damping) { small.phi <- param.vector[2] beta <- param.vector[3] gamma.start <- 4 } else { small.phi <- 1 beta <- param.vector[2] gamma.start <- 3 } } else { small.phi <- NULL beta <- NULL gamma.start <- 2 } if (control$length.gamma > 0) { gamma.vector <- param.vector[gamma.start:(gamma.start + control$length.gamma - 1)] final.gamma.pos <- gamma.start + control$length.gamma - 1 } else { gamma.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (control$p != 0) { ar.coefs <- param.vector[(final.gamma.pos + 1):(final.gamma.pos + control$p)] } else { ar.coefs <- NULL } if (control$q != 0) { ma.coefs <- param.vector[(final.gamma.pos + control$p + 1):length(param.vector)] } else { ma.coefs <- NULL } } return(list(lambda = lambda, alpha = alpha, beta = beta, small.phi = small.phi, gamma.v = gamma.vector, ar.coefs = ar.coefs, ma.coefs = ma.coefs)) } forecast/R/getResponse.R0000644000176200001440000000635614120274716014720 0ustar liggesusers# Functions to return the response variable for different models. # If a Box-Cox transformation is used, the series returned here should # be on the original scale, not the Box-Cox transformed scale. #' Get response variable from time series model. #' #' \code{getResponse} is a generic function for extracting the historical data #' from a time series model (including \code{Arima}, \code{ets}, \code{ar}, #' \code{fracdiff}), a linear model of class \code{lm}, or a forecast object. #' The function invokes particular \emph{methods} which depend on the class of #' the first argument. #' #' #' @param object a time series model or forecast object. #' @param ... Additional arguments that are ignored. #' @return A numerical vector or a time series object of class \code{ts}. #' @author Rob J Hyndman #' @keywords ts #' #' @export getResponse <- function(object, ...) UseMethod("getResponse") #' @rdname getResponse #' @export getResponse.default <- function(object, ...) { if (is.list(object)) { return(object$x) } else { return(NULL) } } #' @rdname getResponse #' @export getResponse.lm <- function(object, ...) { if(!is.null(object[["x"]])){ object[["x"]] } else{ responsevar <- deparse(formula(object)[[2]]) model.frame(object$model)[, responsevar] } } #' @rdname getResponse #' @export getResponse.Arima <- function(object, ...) { if (is.element("x", names(object))) { x <- object$x } else { series.name <- object$series if (is.null(series.name)) { return(NULL) } else { x <- try(eval.parent(parse(text = series.name)), silent = TRUE) if (is.element("try-error", class(x))) { # Try one level further up the chain x <- try(eval.parent(parse(text = series.name), 2), silent = TRUE) } if (is.element("try-error", class(x))) { # Give up return(NULL) } } } return(as.ts(x)) } #' @rdname getResponse #' @export getResponse.fracdiff <- function(object, ...) { if (is.element("x", names(object))) { x <- object$x } else { series.name <- as.character(object$call)[2] if (is.null(series.name)) { stop("missing original time series") } else { x <- try(eval.parent(parse(text = series.name)), silent = TRUE) if (is.element("try-error", class(x))) { # Try one level further up the chain x <- try(eval.parent(parse(text = series.name), 2), silent = TRUE) } if (is.element("try-error", class(x))) { # Give up return(NULL) } } } return(as.ts(x)) } #' @rdname getResponse #' @export getResponse.ar <- function(object, ...) { getResponse.Arima(object) } #' @rdname getResponse #' @export getResponse.tbats <- function(object, ...) { if (is.element("y", names(object))) { y <- object$y } else { return(NULL) } return(as.ts(y)) } #' @rdname getResponse #' @export getResponse.bats <- function(object, ...) { return(getResponse.tbats(object, ...)) } #' @rdname getResponse #' @export getResponse.mforecast <- function(object, ...) { return(do.call(cbind, lapply(object$forecast, function(x) x$x))) } #' @rdname getResponse #' @export getResponse.baggedModel <- function(object, ...) { if (is.element("y", names(object))) { y <- object$y } else { return(NULL) } return(as.ts(y)) } forecast/R/forecast.R0000644000176200001440000006300314133674656014233 0ustar liggesusers#' Forecasting Functions for Time Series and Linear Models #' #' Methods and tools for displaying and analysing univariate time series #' forecasts including exponential smoothing via state space models and #' automatic ARIMA modelling. #' #' \tabular{ll}{ Package: \tab forecast\cr Type: \tab Package\cr License: \tab #' GPL3\cr LazyLoad: \tab yes\cr } #' #' @docType package #' @name forecast-package #' @author Rob J Hyndman #' #' Maintainer: Rob.Hyndman@monash.edu #' @keywords package NULL # Instead of "_PACKAGE" to remove inclusion of \alias{forecast} # "_PACKAGE" ## Generic forecast functions ## Part of forecast and demography packages #' Forecasting time series #' #' \code{forecast} is a generic function for forecasting from time series or #' time series models. The function invokes particular \emph{methods} which #' depend on the class of the first argument. #' #' For example, the function \code{\link{forecast.Arima}} makes forecasts based #' on the results produced by \code{\link[stats]{arima}}. #' #' If \code{model=NULL},the function \code{\link{forecast.ts}} makes forecasts #' using \code{\link{ets}} models (if the data are non-seasonal or the seasonal #' period is 12 or less) or \code{\link{stlf}} (if the seasonal period is 13 or #' more). #' #' If \code{model} is not \code{NULL}, \code{forecast.ts} will apply the #' \code{model} to the \code{object} time series, and then generate forecasts #' accordingly. #' #' @aliases print.forecast summary.forecast as.data.frame.forecast as.ts.forecast #' #' @param object a time series or time series model for which forecasts are #' required #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, \code{level} is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param robust If TRUE, the function is robust to missing values and outliers #' in \code{object}. This argument is only valid when \code{object} is of class #' \code{ts}. #' @param lambda Box-Cox transformation parameter. If \code{lambda="auto"}, #' then a transformation is automatically selected using \code{BoxCox.lambda}. #' The transformation is ignored if NULL. Otherwise, #' data transformed before model is estimated. #' @param find.frequency If TRUE, the function determines the appropriate #' period, if the data is of unknown period. #' @param allow.multiplicative.trend If TRUE, then ETS models with #' multiplicative trends are allowed. Otherwise, only additive or no trend ETS #' models are permitted. #' @param model An object describing a time series model; e.g., one of of class #' \code{ets}, \code{Arima}, \code{bats}, \code{tbats}, or \code{nnetar}. #' @param ... Additional arguments affecting the forecasts produced. If #' \code{model=NULL}, \code{forecast.ts} passes these to \code{\link{ets}} or #' \code{\link{stlf}} depending on the frequency of the time series. If #' \code{model} is not \code{NULL}, the arguments are passed to the relevant #' modelling function. #' @inheritParams BoxCox #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessors functions \code{fitted.values} and \code{residuals} #' extract various useful features of the value returned by #' \code{forecast$model}. #' #' An object of class \code{"forecast"} is a list usually containing at least #' the following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' For models with additive errors, the residuals will be x minus the fitted #' values.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman #' @seealso Other functions which return objects of class \code{"forecast"} are #' \code{\link{forecast.ets}}, \code{\link{forecast.Arima}}, #' \code{\link{forecast.HoltWinters}}, \code{\link{forecast.StructTS}}, #' \code{\link{meanf}}, \code{\link{rwf}}, \code{\link{splinef}}, #' \code{\link{thetaf}}, \code{\link{croston}}, \code{\link{ses}}, #' \code{\link{holt}}, \code{\link{hw}}. #' @keywords ts #' @examples #' #' WWWusage %>% forecast %>% plot #' fit <- ets(window(WWWusage, end=60)) #' fc <- forecast(WWWusage, model=fit) #' #' @export forecast <- function(object, ...) UseMethod("forecast") #' @rdname forecast #' @export forecast.default <- function(object, ...) forecast.ts(object, ...) ## A function determining the appropriate period, if the data is of unknown period ## Written by Rob Hyndman #' Find dominant frequency of a time series #' #' \code{findfrequency} returns the period of the dominant frequency of a time #' series. For seasonal data, it will return the seasonal period. For cyclic #' data, it will return the average cycle length. #' #' The dominant frequency is determined from a spectral analysis of the time #' series. First, a linear trend is removed, then the spectral density function #' is estimated from the best fitting autoregressive model (based on the AIC). #' If there is a large (possibly local) maximum in the spectral density #' function at frequency \eqn{f}, then the function will return the period #' \eqn{1/f} (rounded to the nearest integer). If no such dominant frequency #' can be found, the function will return 1. #' #' @param x a numeric vector or time series of class \code{ts} #' @return an integer value #' @author Rob J Hyndman #' @keywords ts #' @examples #' #' findfrequency(USAccDeaths) # Monthly data #' findfrequency(taylor) # Half-hourly data #' findfrequency(lynx) # Annual data #' #' @export findfrequency <- function(x) { n <- length(x) x <- as.ts(x) # Remove trend from data x <- residuals(tslm(x ~ trend)) # Compute spectrum by fitting ar model to largest section of x n.freq <- 500 spec <- spec.ar(c(na.contiguous(x)), plot = FALSE, n.freq = n.freq) if (max(spec$spec) > 10) # Arbitrary threshold chosen by trial and error. { period <- floor(1 / spec$freq[which.max(spec$spec)] + 0.5) if (period == Inf) # Find next local maximum { j <- which(diff(spec$spec) > 0) if (length(j) > 0) { nextmax <- j[1] + which.max(spec$spec[(j[1] + 1):n.freq]) if (nextmax < length(spec$freq)) { period <- floor(1 / spec$freq[nextmax] + 0.5) } else { period <- 1L } } else { period <- 1L } } } else { period <- 1L } return(as.integer(period)) } #' @rdname forecast #' @export forecast.ts <- function(object, h=ifelse(frequency(object) > 1, 2 * frequency(object), 10), level=c(80, 95), fan=FALSE, robust=FALSE, lambda = NULL, biasadj = FALSE, find.frequency = FALSE, allow.multiplicative.trend=FALSE, model=NULL, ...) { n <- length(object) if (find.frequency) { object <- ts(object, frequency = findfrequency(object)) obj.freq <- frequency(object) } else { obj.freq <- frequency(object) } if (robust) { object <- tsclean(object, replace.missing = TRUE, lambda = lambda) } if (!is.null(model)) { if (inherits(model, "forecast")) { model <- model$model } if (inherits(model, "ets")) { fit <- ets(object, model = model, ...) } else if (inherits(model, "Arima")) { fit <- Arima(object, model = model, ...) } else if (inherits(model, "tbats")) { fit <- tbats(object, model = model, ...) } else if (inherits(model, "bats")) { fit <- bats(object, model = model, ...) } else if (inherits(model, "nnetar")) { fit <- nnetar(object, model = model, ...) } else { stop("Unknown model class") } return(forecast(fit, h = h, level = level, fan = fan)) } if (n > 3) { if (obj.freq < 13) { out <- forecast( ets(object, lambda = lambda, biasadj = biasadj, allow.multiplicative.trend = allow.multiplicative.trend, ...), h = h, level = level, fan = fan ) } else if (n > 2 * obj.freq) { out <- stlf( object, h = h, level = level, fan = fan, lambda = lambda, biasadj = biasadj, allow.multiplicative.trend = allow.multiplicative.trend, ... ) } else { out <- forecast( ets(object, model = "ZZN", lambda = lambda, biasadj = biasadj, allow.multiplicative.trend = allow.multiplicative.trend, ...), h = h, level = level, fan = fan ) } } else { out <- meanf(object, h = h, level = level, fan = fan, lambda = lambda, biasadj = biasadj, ...) } out$series <- deparse(substitute(object)) return(out) } #' @export print.forecast <- function(x, ...) { print(as.data.frame(x)) } #' @export summary.forecast <- function(object, ...) { class(object) <- c("summary.forecast", class(object)) object } #' @export print.summary.forecast <- function(x, ...) { cat(paste("\nForecast method:", x$method)) # cat(paste("\n\nCall:\n",deparse(x$call))) cat(paste("\n\nModel Information:\n")) print(x$model) cat("\nError measures:\n") print(accuracy(x)) if (is.null(x$mean)) { cat("\n No forecasts\n") } else { cat("\nForecasts:\n") NextMethod() } } plotlmforecast <- function(object, PI, shaded, shadecols, col, fcol, pi.col, pi.lty, xlim=NULL, ylim, main, ylab, xlab, ...) { xvar <- attributes(terms(object$model))$term.labels if (length(xvar) > 1) { stop("Forecast plot for regression models only available for a single predictor") } else if (ncol(object$newdata) == 1) { # Make sure column has correct name colnames(object$newdata) <- xvar } if (is.null(xlim)) { xlim <- range(object$newdata[, xvar], model.frame(object$model)[, xvar]) } if (is.null(ylim)) { ylim <- range(object$upper, object$lower, fitted(object$model) + residuals(object$model)) } plot( formula(object$model), data = model.frame(object$model), xlim = xlim, ylim = ylim, xlab = xlab, ylab = ylab, main = main, col = col, ... ) abline(object$model) nf <- length(object$mean) if (PI) { nint <- length(object$level) idx <- rev(order(object$level)) if (is.null(shadecols)) { # require(colorspace) if (min(object$level) < 50) { # Using very small confidence levels. shadecols <- rev(colorspace::sequential_hcl(100)[object$level]) } else { # This should happen almost all the time. Colors mapped to levels. shadecols <- rev(colorspace::sequential_hcl(52)[object$level - 49]) } } if (length(shadecols) == 1) { if (shadecols == "oldstyle") { # Default behaviour up to v3.25. shadecols <- heat.colors(nint + 2)[switch(1 + (nint > 1), 2, nint:1) + 1] } } for (i in 1:nf) { for (j in 1:nint) { if (shaded) { lines(rep(object$newdata[i, xvar], 2), c(object$lower[i, idx[j]], object$upper[i, idx[j]]), col = shadecols[j], lwd = 6) } else { lines(rep(object$newdata[i, xvar], 2), c(object$lower[i, idx[j]], object$upper[i, idx[j]]), col = pi.col, lty = pi.lty) } } } } points(object$newdata[, xvar], object$mean, pch = 19, col = fcol) } #' Forecast plot #' #' Plots historical data with forecasts and prediction intervals. #' #' \code{autoplot} will produce a ggplot object. #' #' plot.splineforecast autoplot.splineforecast #' @param x Forecast object produced by \code{\link{forecast}}. #' @param object Forecast object produced by \code{\link{forecast}}. Used for #' ggplot graphics (S3 method consistency). #' @param include number of values from time series to include in plot. Default #' is all values. #' @param PI Logical flag indicating whether to plot prediction intervals. #' @param showgap If \code{showgap=FALSE}, the gap between the historical #' observations and the forecasts is removed. #' @param shaded Logical flag indicating whether prediction intervals should be #' shaded (\code{TRUE}) or lines (\code{FALSE}) #' @param shadebars Logical flag indicating if prediction intervals should be #' plotted as shaded bars (if \code{TRUE}) or a shaded polygon (if #' \code{FALSE}). Ignored if \code{shaded=FALSE}. Bars are plotted by default #' if there are fewer than five forecast horizons. #' @param shadecols Colors for shaded prediction intervals. To get default #' colors used prior to v3.26, set \code{shadecols="oldstyle"}. #' @param col Colour for the data line. #' @param fcol Colour for the forecast line. #' @param flty Line type for the forecast line. #' @param flwd Line width for the forecast line. #' @param pi.col If \code{shaded=FALSE} and \code{PI=TRUE}, the prediction #' intervals are plotted in this colour. #' @param pi.lty If \code{shaded=FALSE} and \code{PI=TRUE}, the prediction #' intervals are plotted using this line type. #' @param ylim Limits on y-axis. #' @param main Main title. #' @param xlab X-axis label. #' @param ylab Y-axis label. #' @param series Matches an unidentified forecast layer with a coloured object #' on the plot. #' @param fitcol Line colour for fitted values. #' @param type 1-character string giving the type of plot desired. As for #' \code{\link[graphics]{plot.default}}. #' @param pch Plotting character (if \code{type=="p"} or \code{type=="o"}). #' @param ... Other plotting parameters to affect the plot. #' @return None. #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{plot.ts}} #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://otexts.com/fpp2/} #' @keywords ts #' @examples #' library(ggplot2) #' #' wine.fit <- hw(wineind,h=48) #' plot(wine.fit) #' autoplot(wine.fit) #' #' fit <- tslm(wineind ~ fourier(wineind,4)) #' fcast <- forecast(fit, newdata=data.frame(fourier(wineind,4,20))) #' autoplot(fcast) #' #' @export plot.forecast <- function(x, include, PI=TRUE, showgap = TRUE, shaded=TRUE, shadebars=(length(x$mean) < 5), shadecols=NULL, col=1, fcol=4, pi.col=1, pi.lty=2, ylim=NULL, main=NULL, xlab="", ylab="", type="l", flty = 1, flwd = 2, ...) { if (is.element("x", names(x))) { # Assume stored as x xx <- x$x } else { xx <- NULL } if (is.null(x$lower) || is.null(x$upper) || is.null(x$level)) { PI <- FALSE } else if (!is.finite(max(x$upper))) { PI <- FALSE } if (!shaded) { shadebars <- FALSE } if (is.null(main)) { main <- paste("Forecasts from ", x$method, sep = "") } if (PI) { x$upper <- as.matrix(x$upper) x$lower <- as.matrix(x$lower) } if (is.element("lm", class(x$model)) && !is.element("ts", class(x$mean))) # Non time series linear model { plotlmforecast( x, PI = PI, shaded = shaded, shadecols = shadecols, col = col, fcol = fcol, pi.col = pi.col, pi.lty = pi.lty, ylim = ylim, main = main, xlab = xlab, ylab = ylab, ... ) if (PI) { return(invisible(list(mean = x$mean, lower = as.matrix(x$lower), upper = as.matrix(x$upper)))) } else { return(invisible(list(mean = x$mean))) } } # Otherwise assume x is from a time series forecast n <- length(xx) if (n == 0) { include <- 0 } else if (missing(include)) { include <- length(xx) } # Check if all historical values are missing if (n > 0) { if (sum(is.na(xx)) == length(xx)) { n <- 0 } } if (n > 0) { xx <- as.ts(xx) freq <- frequency(xx) strt <- start(xx) nx <- max(which(!is.na(xx))) xxx <- xx[1:nx] include <- min(include, nx) if (!showgap) { lastObs <- x$x[length(x$x)] lastTime <- time(x$x)[length(x$x)] x$mean <- ts(c(lastObs, x$mean), start = lastTime, frequency = freq) x$upper <- ts(rbind(lastObs, x$upper), start = lastTime, frequency = freq) x$lower <- ts(rbind(lastObs, x$lower), start = lastTime, frequency = freq) } } else { freq <- frequency(x$mean) strt <- start(x$mean) nx <- include <- 1 xx <- xxx <- ts(NA, frequency = freq, end = tsp(x$mean)[1] - 1 / freq) if (!showgap) { warning("Removing the gap requires historical data, provide this via model$x. Defaulting showgap to TRUE.") } } pred.mean <- x$mean if (is.null(ylim)) { ylim <- range(c(xx[(n - include + 1):n], pred.mean), na.rm = TRUE) if (PI) { ylim <- range(ylim, x$lower, x$upper, na.rm = TRUE) } } npred <- length(pred.mean) tsx <- is.ts(pred.mean) if (!tsx) { pred.mean <- ts(pred.mean, start = nx + 1, frequency = 1) type <- "p" } plot( ts(c(xxx[(nx - include + 1):nx], rep(NA, npred)), end = tsp(xx)[2] + (nx - n) / freq + npred / freq, frequency = freq), xlab = xlab, ylim = ylim, ylab = ylab, main = main, col = col, type = type, ... ) if (PI) { if (is.ts(x$upper)) { xxx <- time(x$upper) } else { xxx <- tsp(pred.mean)[1] - 1 / freq + (1:npred) / freq } idx <- rev(order(x$level)) nint <- length(x$level) if (is.null(shadecols)) { # require(colorspace) if (min(x$level) < 50) { # Using very small confidence levels. shadecols <- rev(colorspace::sequential_hcl(100)[x$level]) } else { # This should happen almost all the time. Colors mapped to levels. shadecols <- rev(colorspace::sequential_hcl(52)[x$level - 49]) } } if (length(shadecols) == 1) { if (shadecols == "oldstyle") { # Default behaviour up to v3.25. shadecols <- heat.colors(nint + 2)[switch(1 + (nint > 1), 2, nint:1) + 1] } } for (i in 1:nint) { if (shadebars) { for (j in 1:npred) { polygon( xxx[j] + c(-0.5, 0.5, 0.5, -0.5) / freq, c(rep(x$lower[j, idx[i]], 2), rep(x$upper[j, idx[i]], 2)), col = shadecols[i], border = FALSE ) } } else if (shaded) { polygon( c(xxx, rev(xxx)), c(x$lower[, idx[i]], rev(x$upper[, idx[i]])), col = shadecols[i], border = FALSE ) } else if (npred == 1) { lines(c(xxx) + c(-0.5, 0.5) / freq, rep(x$lower[, idx[i]], 2), col = pi.col, lty = pi.lty) lines(c(xxx) + c(-0.5, 0.5) / freq, rep(x$upper[, idx[i]], 2), col = pi.col, lty = pi.lty) } else { lines(x$lower[, idx[i]], col = pi.col, lty = pi.lty) lines(x$upper[, idx[i]], col = pi.col, lty = pi.lty) } } } if (npred > 1 && !shadebars && tsx) { lines(pred.mean, lty = flty, lwd = flwd, col = fcol) } else { points(pred.mean, col = fcol, pch = 19) } if (PI) { invisible(list(mean = pred.mean, lower = x$lower, upper = x$upper)) } else { invisible(list(mean = pred.mean)) } } #' @export predict.default <- function(object, ...) { forecast(object, ...) } hfitted <- function(object, h=1, FUN=NULL, ...) { if (h == 1) { return(fitted(object)) } # Attempt to get model function if (is.null(FUN)) { FUN <- class(object) for (i in FUN) { if (exists(i)) { if (typeof(eval(parse(text = i)[[1]])) == "closure") { FUN <- i i <- "Y" break } } } if (i != "Y") { stop("Could not find appropriate function to refit, specify FUN=function") } } x <- getResponse(object) tspx <- tsp(x) fits <- fitted(object) * NA n <- length(fits) refitarg <- list(x = NULL, model = object) names(refitarg)[1] <- names(formals(FUN))[1] fcarg <- list(h = h, biasadj=TRUE, lambda=object$lambda) if (FUN == "ets") { refitarg$use.initial.values <- TRUE } for (i in 1:(n - h)) { refitarg[[1]] <- ts(x[1:i], start = tspx[1], frequency = tspx[3]) if (!is.null(object$xreg)) { refitarg$xreg <- ts(object$xreg[1:i, ], start = tspx[1], frequency = tspx[3]) fcarg$xreg <- ts(object$xreg[(i + 1):(i + h), ], start = tspx[1] + i / tspx[3], frequency = tspx[3]) } fcarg$object <- try(suppressWarnings(do.call(FUN, refitarg)), silent = TRUE) if (!is.element("try-error", class(fcarg$object))) { # Keep original variance estimate (for consistent bias adjustment) if(!is.null(object$sigma2)) fcarg$object$sigma2 <- object$sigma2 fits[i + h] <- suppressWarnings(do.call("forecast", fcarg)$mean[h]) } } return(fits) } # The following function is for when users don't realise they already have the forecasts. # e.g., with the dshw(), meanf() or rwf() functions. #' @export forecast.forecast <- function(object, ...) { input_names <- as.list(substitute(list(...))) # Read level argument if (is.element("level", names(input_names))) { level <- list(...)[["level"]] if (!identical(level, object$level)) { stop("Please set the level argument when the forecasts are first computed") } } # Read h argument if (is.element("h", names(input_names))) { h <- list(...)[["h"]] if (h > length(object$mean)) { stop("Please select a longer horizon when the forecasts are first computed") } tspf <- tsp(object$mean) object$mean <- ts(object$mean[1:h], start = tspf[1], frequency = tspf[3]) if (!is.null(object$upper)) { object$upper <- ts(object$upper[1:h, , drop = FALSE], start = tspf[1], frequency = tspf[3]) object$lower <- ts(object$lower[1:h, , drop = FALSE], start = tspf[1], frequency = tspf[3]) } } return(object) } subset.forecast <- function(x, ...) { tspx <- tsp(x$mean) x$mean <- subset(x$mean, ...) x$lower <- subset(ts(x$lower, start = tspx[1], frequency = tspx[3]), ...) x$upper <- subset(ts(x$upper, start = tspx[1], frequency = tspx[3]), ...) return(x) } #' Is an object a particular forecast type? #' #' Returns true if the forecast object is of a particular type #' #' @param x object to be tested #' @export is.forecast <- function(x) { inherits(x, "forecast") } #' @export as.ts.forecast <- function(x, ...) { df <- ts(as.matrix(as.data.frame.forecast(x))) tsp(df) <- tsp(x$mean) return(df) } #' @export as.data.frame.mforecast <- function(x, ...) { tmp <- lapply(x$forecast, as.data.frame) series <- names(tmp) times <- rownames(tmp[[1]]) h <- NROW(tmp[[1]]) output <- cbind(Time = times, Series = rep(series[1], h), tmp[[1]]) if (length(tmp) > 1) { for (i in 2:length(tmp)) output <- rbind( output, cbind(Time = times, Series = rep(series[i], h), tmp[[i]]) ) } rownames(output) <- NULL return(output) } #' @export as.data.frame.forecast <- function(x, ...) { nconf <- length(x$level) out <- matrix(x$mean, ncol = 1) ists <- is.ts(x$mean) fr.x <- frequency(x$mean) if (ists) { out <- ts(out) attributes(out)$tsp <- attributes(x$mean)$tsp } names <- c("Point Forecast") if (!is.null(x$lower) && !is.null(x$upper) && !is.null(x$level)) { x$upper <- as.matrix(x$upper) x$lower <- as.matrix(x$lower) for (i in 1:nconf) { out <- cbind(out, x$lower[, i, drop = FALSE], x$upper[, i, drop = FALSE]) names <- c(names, paste("Lo", x$level[i]), paste("Hi", x$level[i])) } } colnames(out) <- names tx <- time(x$mean) if (max(abs(tx - round(tx))) < 1e-11) { nd <- 0L } else { nd <- max(round(log10(fr.x) + 1), 2L) } if(nd == 0L) rownames(out) <- round(tx) else rownames(out) <- format(tx, nsmall = nd, digits = nd) # Rest of function borrowed from print.ts(), but with header() omitted if (!ists) { return(as.data.frame(out)) } x <- as.ts(out) calendar <- any(fr.x == c(4, 12)) && length(start(x)) == 2L Tsp <- tsp(x) if (is.null(Tsp)) { warning("series is corrupt, with no 'tsp' attribute") print(unclass(x)) return(invisible(x)) } nn <- 1 + round((Tsp[2L] - Tsp[1L]) * Tsp[3L]) if (NROW(x) != nn) { warning(gettextf("series is corrupt: length %d with 'tsp' implying %d", NROW(x), nn), domain = NA, call. = FALSE) calendar <- FALSE } if (NCOL(x) == 1) { if (calendar) { if (fr.x > 1) { dn2 <- if (fr.x == 12) { month.abb } else if (fr.x == 4) { c("Qtr1", "Qtr2", "Qtr3", "Qtr4") } else { paste("p", 1L:fr.x, sep = "") } if (NROW(x) <= fr.x && start(x)[1L] == end(x)[1L]) { dn1 <- start(x)[1L] dn2 <- dn2[1 + (start(x)[2L] - 2 + seq_along(x)) %% fr.x] x <- matrix( format(x, ...), nrow = 1L, byrow = TRUE, dimnames = list(dn1, dn2) ) } else { start.pad <- start(x)[2L] - 1 end.pad <- fr.x - end(x)[2L] dn1 <- start(x)[1L]:end(x)[1L] x <- matrix( c(rep.int("", start.pad), format(x, ...), rep.int("", end.pad)), ncol = fr.x, byrow = TRUE, dimnames = list(dn1, dn2) ) } } else { attributes(x) <- NULL names(x) <- tx } } else { attr(x, "class") <- attr(x, "tsp") <- attr(x, "na.action") <- NULL } } else { if (calendar && fr.x > 1) { tm <- time(x) t2 <- cycle(x) p1 <- format(floor(tm + 1e-8)) rownames(x) <- if (fr.x == 12) { paste(month.abb[t2], p1, sep = " ") } else { paste( p1, if (fr.x == 4) { c("Q1", "Q2", "Q3", "Q4")[t2] } else { format(t2) }, sep = " " ) } } else { rownames(x) <- format(time(x), nsmall = nd) } attr(x, "class") <- attr(x, "tsp") <- attr(x, "na.action") <- NULL } return(as.data.frame(x)) } forecast/R/unitRoot.R0000644000176200001440000004056414003673410014236 0ustar liggesusers#' Number of differences required for a stationary series #' #' Functions to estimate the number of differences required to make a given #' time series stationary. \code{ndiffs} estimates the number of first #' differences necessary. #' #' \code{ndiffs} uses a unit root test to determine the number of differences #' required for time series \code{x} to be made stationary. If #' \code{test="kpss"}, the KPSS test is used with the null hypothesis that #' \code{x} has a stationary root against a unit-root alternative. Then the #' test returns the least number of differences required to pass the test at #' the level \code{alpha}. If \code{test="adf"}, the Augmented Dickey-Fuller #' test is used and if \code{test="pp"} the Phillips-Perron test is used. In #' both of these cases, the null hypothesis is that \code{x} has a unit root #' against a stationary root alternative. Then the test returns the least #' number of differences required to fail the test at the level \code{alpha}. #' #' @param x A univariate time series #' @param alpha Level of the test, possible values range from 0.01 to 0.1. #' @param test Type of unit root test to use #' @param type Specification of the deterministic component in the regression #' @param max.d Maximum number of non-seasonal differences allowed #' @param ... Additional arguments to be passed on to the unit root test #' @return An integer indicating the number of differences required for stationarity. #' @author Rob J Hyndman, Slava Razbash & Mitchell O'Hara-Wild #' @seealso \code{\link{auto.arima}} and \code{\link{ndiffs}} #' @references #' Dickey DA and Fuller WA (1979), "Distribution of the Estimators for #' Autoregressive Time Series with a Unit Root", \emph{Journal of the American #' Statistical Association} \bold{74}:427-431. #' #' Kwiatkowski D, Phillips PCB, Schmidt P and Shin Y (1992) "Testing the Null #' Hypothesis of Stationarity against the Alternative of a Unit Root", #' \emph{Journal of Econometrics} \bold{54}:159-178. #' #' Osborn, D.R. (1990) "A survey of seasonality in UK macroeconomic variables", #' \emph{International Journal of Forecasting}, \bold{6}:327-336. #' #' Phillips, P.C.B. and Perron, P. (1988) "Testing for a unit root in time series regression", #' \emph{Biometrika}, \bold{72}(2), 335-346. #' #' Said E and Dickey DA (1984), "Testing for Unit Roots in Autoregressive #' Moving Average Models of Unknown Order", \emph{Biometrika} #' \bold{71}:599-607. #' @keywords ts #' @examples #' ndiffs(WWWusage) #' ndiffs(diff(log(AirPassengers), 12)) #' @importFrom urca ur.kpss ur.df ur.pp #' @export ndiffs <- function(x, alpha = 0.05, test = c("kpss", "adf", "pp"), type = c("level", "trend"), max.d = 2, ...) { test <- match.arg(test) type <- match(match.arg(type), c("level", "trend")) x <- c(na.omit(c(x))) d <- 0 if (alpha < 0.01) { warning("Specified alpha value is less than the minimum, setting alpha=0.01") alpha <- 0.01 } else if (alpha > 0.1) { warning("Specified alpha value is larger than the maximum, setting alpha=0.1") alpha <- 0.1 } if (is.constant(x)) { return(d) } urca_pval <- function(urca_test) { approx(urca_test@cval[1, ], as.numeric(sub("pct", "", colnames(urca_test@cval))) / 100, xout = urca_test@teststat[1], rule = 2)$y } kpss_wrap <- function(..., use.lag = trunc(3 * sqrt(length(x)) / 13)) { ur.kpss(..., use.lag = use.lag) } runTests <- function(x, test, alpha) { tryCatch( { suppressWarnings( diff <- switch(test, kpss = urca_pval(kpss_wrap(x, type = c("mu", "tau")[type], ...)) < alpha, adf = urca_pval(ur.df(x, type = c("drift", "trend")[type], ...)) > alpha, pp = urca_pval(ur.pp(x, type = "Z-tau", model = c("constant", "trend")[type], ...)) > alpha, stop("This shouldn't happen") ) ) diff }, error = function(e) { warning( call. = FALSE, sprintf( "The chosen unit root test encountered an error when testing for the %s difference. From %s(): %s %i differences will be used. Consider using a different unit root test.", switch(as.character(d), `0` = "first", `1` = "second", `2` = "third", paste0(d + 1, "th")), deparse(e$call[[1]]), e$message, d ) ) FALSE } ) } dodiff <- runTests(x, test, alpha) if (is.na(dodiff)) { return(d) } while (dodiff && d < max.d) { d <- d + 1 x <- diff(x) if (is.constant(x)) { return(d) } dodiff <- runTests(x, test, alpha) if (is.na(dodiff)) { return(d - 1) } } return(d) } # Number of seasonal differences #' Number of differences required for a seasonally stationary series #' #' Functions to estimate the number of differences required to make a given #' time series stationary. \code{nsdiffs} estimates the number of seasonal differences #' necessary. #' #' \code{nsdiffs} uses seasonal unit root tests to determine the number of #' seasonal differences required for time series \code{x} to be made stationary #' (possibly with some lag-one differencing as well). #' #' Several different tests are available: #' * If \code{test="seas"} (default), a measure of seasonal strength is used, where differencing is #' selected if the seasonal strength (Wang, Smith & Hyndman, 2006) exceeds 0.64 #' (based on minimizing MASE when forecasting using auto.arima on M3 and M4 data). #' * If \code{test="ch"}, the Canova-Hansen (1995) test is used #' (with null hypothesis of deterministic seasonality) #' * If \code{test="hegy"}, the Hylleberg, Engle, Granger & Yoo (1990) test is used. #' * If \code{test="ocsb"}, the Osborn-Chui-Smith-Birchenhall #' (1988) test is used (with null hypothesis that a seasonal unit root exists). #' #' @md #' #' @inheritParams ndiffs #' @param x A univariate time series #' @param alpha Level of the test, possible values range from 0.01 to 0.1. #' @param test Type of unit root test to use #' @param m Deprecated. Length of seasonal period #' @param max.D Maximum number of seasonal differences allowed #' #' @return An integer indicating the number of differences required for stationarity. #' #' @references #' #' Wang, X, Smith, KA, Hyndman, RJ (2006) "Characteristic-based clustering #' for time series data", \emph{Data Mining and Knowledge Discovery}, #' \bold{13}(3), 335-364. #' #' Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the #' order of integration for consumption", \emph{Oxford Bulletin of Economics #' and Statistics} \bold{50}(4):361-377. #' #' Canova F and Hansen BE (1995) "Are Seasonal Patterns Constant #' over Time? A Test for Seasonal Stability", \emph{Journal of Business and #' Economic Statistics} \bold{13}(3):237-252. #' #' Hylleberg S, Engle R, Granger C and Yoo B (1990) "Seasonal integration #' and cointegration.", \emph{Journal of Econometrics} \bold{44}(1), pp. 215-238. #' #' @author Rob J Hyndman, Slava Razbash and Mitchell O'Hara-Wild #' #' @seealso \code{\link{auto.arima}}, \code{\link{ndiffs}}, \code{\link{ocsb.test}}, \code{\link[uroot]{hegy.test}}, and \code{\link[uroot]{ch.test}} #' #' @examples #' nsdiffs(AirPassengers) #' @export nsdiffs <- function(x, alpha = 0.05, m = frequency(x), test = c("seas", "ocsb", "hegy", "ch"), max.D = 1, ...) { test <- match.arg(test) D <- 0 if (alpha < 0.01) { warning("Specified alpha value is less than the minimum, setting alpha=0.01") alpha <- 0.01 } else if (alpha > 0.1) { warning("Specified alpha value is larger than the maximum, setting alpha=0.1") alpha <- 0.1 } if (test == "ocsb" && alpha != 0.05) { warning("Significance levels other than 5% are not currently supported by test='ocsb', defaulting to alpha = 0.05.") alpha <- 0.05 } if (test %in% c("hegy", "ch")) { if (!requireNamespace("uroot", quietly = TRUE)) { stop(paste0("Using a ", test, ' test requires the uroot package. Please install it using `install.packages("uroot")`')) } } if (is.constant(x)) { return(D) } if (!missing(m)) { warning("argument m is deprecated; please set the frequency in the ts object.", call. = FALSE ) x <- ts(x, frequency = m) } if (frequency(x) == 1) { stop("Non seasonal data") } else if (frequency(x) < 1) { warning("I can't handle data with frequency less than 1. Seasonality will be ignored.") return(0) } if (frequency(x) >= length(x)) { return(0) } # Can't take differences runTests <- function(x, test, alpha) { tryCatch( { suppressWarnings( diff <- switch(test, seas = seas.heuristic(x, ...) > 0.64, # Threshold chosen based on seasonal M3 auto.arima accuracy. ocsb = with(ocsb.test(x, maxlag = 3, lag.method = "AIC", ...), statistics > critical), hegy = tail(uroot::hegy.test(x, deterministic = c(1, 1, 0), maxlag = 3, lag.method = "AIC", ...)$pvalues, 2)[-2] > alpha, ch = uroot::ch.test(x, type = "trig", ...)$pvalues["joint"] < alpha ) ) stopifnot(diff %in% c(0, 1)) diff }, error = function(e) { warning( call. = FALSE, sprintf( "The chosen seasonal unit root test encountered an error when testing for the %s difference. From %s(): %s %i seasonal differences will be used. Consider using a different unit root test.", switch(as.character(D), `0` = "first", `1` = "second", `2` = "third", paste0(D + 1, "th")), deparse(e$call[[1]]), e$message, D ) ) 0 } ) } dodiff <- runTests(x, test, alpha) if (dodiff && frequency(x) %% 1 != 0) { warning("The time series frequency has been rounded to support seasonal differencing.", call. = FALSE) x <- ts(x, frequency = round(frequency(x))) } while (dodiff && D < max.D) { D <- D + 1 x <- diff(x, lag = frequency(x)) if (is.constant(x)) { return(D) } if (length(x) >= 2 * frequency(x) & D < max.D) { dodiff <- runTests(x, test, alpha) } else { dodiff <- FALSE } } return(D) } # Adjusted from robjhyndman/tsfeatures seas.heuristic <- function(x) { if ("msts" %in% class(x)) { msts <- attributes(x)$msts nperiods <- length(msts) } else if ("ts" %in% class(x)) { msts <- frequency(x) nperiods <- msts > 1 season <- 0 } else { stop("The object provided must be a time-series object (`msts` or `ts`)") } season <- NA stlfit <- mstl(x) remainder <- stlfit[, "Remainder"] seasonal <- stlfit[, grep("Season", colnames(stlfit)), drop = FALSE] vare <- var(remainder, na.rm = TRUE) nseas <- NCOL(seasonal) if (nseas > 0) { season <- numeric(nseas) for (i in seq(nseas)) { season[i] <- max(0, min(1, 1-vare/var(remainder+seasonal[, i], na.rm = TRUE))) } } return(season) } # Model specification from Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the order of integration for consumption", Oxford Bulletin of Economics and Statistics 50(4):361-377. # # $\Delta\Delta_m X_t = \beta_1Z_{4,t-1} + \beta_2Z_{5,t-m} + \alpha_1\Delta\Delta_mX_{t-1} + \ldots + \alpha_p\Delta\Delta_mX_{t-p}$ # Where $Z_{4,t} = \hat{\lambda}(B)\Delta_mX_t$, $Z_{5,t} = \hat{\lambda}(B)\Delta X_t$, and $\hat{\lambda}(B)$ is an AR(p) lag operator with coefficients from an estimated AR(p) process of $\Delta\Delta_m X_t$. #' Osborn, Chui, Smith, and Birchenhall Test for Seasonal Unit Roots #' #' An implementation of the Osborn, Chui, Smith, and Birchenhall (OCSB) test. #' #' @inheritParams uroot::hegy.test #' @aliases print.OCSBtest #' @details #' The regression equation may include lags of the dependent variable. When lag.method = "fixed", the lag order is fixed to maxlag; otherwise, maxlag is the maximum number of lags considered in a lag selection procedure that minimises the lag.method criterion, which can be AIC or BIC or corrected AIC, AICc, obtained as AIC + (2k(k+1))/(n-k-1), where k is the number of parameters and n is the number of available observations in the model. #' #' Critical values for the test are based on simulations, which has been smoothed over to produce critical values for all seasonal periods. #' #' @return #' ocsb.test returns a list of class "OCSBtest" with the following components: #' * statistics the value of the test statistics. #' * pvalues the p-values for each test statistics. #' * method a character string describing the type of test. #' * data.name a character string giving the name of the data. #' * fitted.model the fitted regression model. #' #' @references #' Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the #' order of integration for consumption", \emph{Oxford Bulletin of Economics #' and Statistics} \bold{50}(4):361-377. #' #' @seealso \code{\link{nsdiffs}} #' #' @examples #' ocsb.test(AirPassengers) #' @importFrom stats AIC BIC #' #' @export ocsb.test <- function(x, lag.method = c("fixed", "AIC", "BIC", "AICc"), maxlag = 0) { lag.method <- match.arg(lag.method) sname <- deparse(substitute(x)) period <- round(frequency(x)) # Avoid non-integer seasonal period if (period == 1) { stop("Data must be seasonal to use `ocsb.test`. Check your ts frequency.") } genLags <- function(y, maxlag) { if (maxlag == 0) { return(ts(numeric(NROW(y)), start = start(y), frequency = frequency(y))) } out <- do.call(cbind, lapply(seq_len(maxlag), function(k) stats::lag(y, -k))) if (NCOL(out) > 1) { colnames(out) <- paste0("lag_", seq_len(maxlag)) } return(out) } fitOCSB <- function(x, lag, maxlag) { period <- round(frequency(x)) # Avoid non-integer seasonal period # Compute (1-B)(1-B^m)y_t y <- diff(diff(x, period)) ylag <- genLags(y, lag) if (maxlag > 0) { # Ensure models are fitted on same length for lag order selection via lag.method y <- tail(y, -maxlag) } mf <- na.omit(cbind(y = y, x = ylag)) # Estimate lambda(B) coefficients ar.fit <- lm(y ~ 0 + ., data = mf) # Compute lambda(B)(1-B^m)y_{t-1} Z4_frame <- na.omit(cbind(y = diff(x, period), x = genLags(diff(x, period), lag))) Z4 <- Z4_frame[, "y"] - suppressWarnings(predict(ar.fit, Z4_frame)) # Compute lambda(B)(1-B)y_{t-m} Z5_frame <- na.omit(cbind(y = diff(x), x = genLags(diff(x), lag))) Z5 <- Z5_frame[, "y"] - suppressWarnings(predict(ar.fit, Z5_frame)) # Combine regressors data <- na.omit(cbind(mf, Z4 = stats::lag(Z4, -1), Z5 = stats::lag(Z5, -period))) y <- data[, 1] xreg <- data[, -1] lm(y ~ 0 + xreg) } # Estimate maxlag if (maxlag > 0) { if (lag.method != "fixed") { tmp <- vector("list", maxlag + 1) fits <- lapply(seq_len(maxlag), function(lag) fitOCSB(x, lag, maxlag)) icvals <- unlist(switch(lag.method, AIC = lapply(fits, AIC), BIC = lapply(fits, BIC), AICc = lapply( fits, function(x) { k <- x$rank + 1 -2 * logLik(x) + 2 * k + (2 * k * (k + 1)) / (length(residuals(x)) - k - 1) } ) )) id <- which.min(icvals) maxlag <- id - 1 } } regression <- fitOCSB(x, maxlag, maxlag) # if(any(is.na(regression$coefficients))) # stop("Model did not reach a solution. Check the time series data.") stat <- summary(regression)$coefficients[c("xregZ4", "xregZ5"), "t value"] if (any(is.na(stat))) { stop("Model did not reach a solution. Consider using a longer series or a different test.") } structure(list( statistics = stat[2], critical = calcOCSBCritVal(period), method = "OCSB test", lag.method = lag.method, lag.order = maxlag, fitted.model = regression, data.name = sname ), class = "OCSBtest" ) } # Return critical values for OCSB test at 5% level # Approximation based on extensive simulations. calcOCSBCritVal <- function(seasonal.period) { log.m <- log(seasonal.period) return(-0.2937411 * exp(-0.2850853 * (log.m - 0.7656451) + (-0.05983644) * ((log.m - 0.7656451)^2)) - 1.652202) } #' @export print.OCSBtest <- function(x, ...) { cat("\n") cat(strwrap(x$method, prefix = "\t"), sep = "\n") cat("\n") cat("data: ", x$data.name, "\n\n", sep = "") cat(paste0("Test statistic: ", round(x$statistics, 4), ", 5% critical value: ", round(x$critical, 4))) cat("\n") cat("alternative hypothesis: stationary") cat("\n\n") cat(paste0("Lag order ", x$lag.order, " was selected using ", x$lag.method)) } forecast/R/fitBATS.R0000644000176200001440000005332614003673410013647 0ustar liggesusers# TODO: # # Author: srazbash ############################################################################### fitPreviousBATSModel <- function(y, model, biasadj=FALSE) { seasonal.periods <- model$seasonal.periods if (is.null(seasonal.periods) == FALSE) { seasonal.periods <- as.integer(sort(seasonal.periods)) } paramz <- unParameterise(model$parameters$vect, model$parameters$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.v <- paramz$gamma.v ar.coefs <- paramz$ar.coefs ma.coefs <- paramz$ma.coefs p <- length(ar.coefs) q <- length(ma.coefs) ## Calculate the variance: # 1. Re-set up the matrices w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.v, seasonal.periods, as.integer(p), as.integer(q), PACKAGE = "forecast") F <- makeFMatrix(alpha = alpha, beta = beta.v, small.phi <- small.phi, seasonal.periods = seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = ar.coefs, ma.coefs = ma.coefs) # 2. Calculate! y.touse <- y if (!is.null(lambda)) { y.touse <- BoxCox(y, lambda = lambda) lambda <- attr(y.touse, "lambda") } fitted.values.and.errors <- calcModel(y.touse, model$seed.states, F, g$g, w) e <- fitted.values.and.errors$e fitted.values <- fitted.values.and.errors$y.hat variance <- sum((e * e)) / length(y) if (!is.null(lambda)) { fitted.values <- InvBoxCox(fitted.values, lambda = lambda, biasadj, variance) } model.for.output <- model model.for.output$variance <- variance model.for.output$fitted.values <- c(fitted.values) model.for.output$errors <- c(e) model.for.output$x <- fitted.values.and.errors$x model.for.output$y <- y attributes(model.for.output$fitted.values) <- attributes(model.for.output$errors) <- attributes(y) return(model.for.output) } fitSpecificBATS <- function(y, use.box.cox, use.beta, use.damping, seasonal.periods=NULL, starting.params=NULL, x.nought=NULL, ar.coefs=NULL, ma.coefs=NULL, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE) { if (!is.null(seasonal.periods)) { seasonal.periods <- as.integer(sort(seasonal.periods)) } ## Meaning/purpose of the first if() statement: If this is the first pass, then use default starting values. Else if it is the second pass, then use the values form the first pass as starting values. if (is.null(starting.params)) { ## Check for the existence of ARMA() coefficients if (!is.null(ar.coefs)) { p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { q <- length(ma.coefs) } else { q <- 0 } # Calculate starting values: if (sum(seasonal.periods) > 16) { alpha <- (1e-6) } else { alpha <- .09 } if (use.beta) { if (sum(seasonal.periods) > 16) { beta.v <- (5e-7) } else { beta.v <- .05 } b <- 0.00 if (use.damping) { small.phi <- .999 } else { small.phi <- 1 } } else { beta.v <- NULL b <- NULL small.phi <- NULL use.damping <- FALSE } if (!is.null(seasonal.periods)) { gamma.v <- rep(.001, length(seasonal.periods)) s.vector <- numeric(sum(seasonal.periods)) # for(s in seasonal.periods) { # s.vector <- cbind(s.vector, numeric(s)) # } } else { gamma.v <- NULL s.vector <- NULL } if (use.box.cox) { if (!is.null(init.box.cox)) { lambda <- init.box.cox } else { lambda <- BoxCox.lambda(y, lower = 0, upper = 1.5) } y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") } else { # the "else" is not needed at the moment lambda <- NULL } } else { paramz <- unParameterise(starting.params$vect, starting.params$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta b <- 0 small.phi <- paramz$small.phi gamma.v <- paramz$gamma.v if (!is.null(seasonal.periods)) { s.vector <- numeric(sum(seasonal.periods)) } else { s.vector <- NULL } # ar.coefs <- paramz$ar.coefs # ma.coefs <- paramz$ma.coefs ## Check for the existence of ARMA() coefficients if (!is.null(ar.coefs)) { p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { q <- length(ma.coefs) } else { q <- 0 } } if (is.null(x.nought)) { # Start with the seed states equal to zero if (!is.null(ar.coefs)) { d.vector <- numeric(length(ar.coefs)) } else { d.vector <- NULL } if (!is.null(ma.coefs)) { epsilon.vector <- numeric(length(ma.coefs)) } else { epsilon.vector <- NULL } x.nought <- makeXMatrix(l = 0, b = b, s.vector = s.vector, d.vector = d.vector, epsilon.vector = epsilon.vector)$x } ## Optimise the starting values: # Make the parameter vector parameterise param.vector <- parameterise(alpha = alpha, beta.v = beta.v, small.phi = small.phi, gamma.v = gamma.v, lambda = lambda, ar.coefs = ar.coefs, ma.coefs = ma.coefs) par.scale <- makeParscaleBATS(param.vector$control) # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta.v, gamma.vector=gamma, seasonal.periods=seasonal.periods, p=p, q=q) g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.v, seasonal.periods, as.integer(p), as.integer(q), PACKAGE = "forecast") F <- makeFMatrix(alpha = alpha, beta = beta.v, small.phi = small.phi, seasonal.periods = seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = ar.coefs, ma.coefs = ma.coefs) D <- F - g$g %*% w$w.transpose ## Set up matrices to find the seed states if (use.box.cox) { y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") # x.nought <- BoxCox(x.nought, lambda=lambda) y.tilda <- calcModel(y.transformed, x.nought, F, g$g, w)$e } else { y.tilda <- calcModel(y, x.nought, F, g$g, w)$e } w.tilda.transpose <- matrix(0, nrow = length(y), ncol = ncol(w$w.transpose)) w.tilda.transpose[1, ] <- w$w.transpose # for(i in 2:length(y)) { # w.tilda.transpose[i,] <- w.tilda.transpose[(i-1),] %*% D # } w.tilda.transpose <- .Call( "calcWTilda", wTildaTransposes = w.tilda.transpose, Ds = D, PACKAGE = "forecast" ) ## If there is a seasonal component in the model, then the follow adjustment need to be made so that the seed states can be found if (!is.null(seasonal.periods)) { # drop the lines from w.tilda.transpose that correspond to the last seasonal value of each seasonal period list.cut.w <- cutW(use.beta = use.beta, w.tilda.transpose = w.tilda.transpose, seasonal.periods = seasonal.periods, p = p, q = q) w.tilda.transpose <- list.cut.w$matrix mask.vector <- list.cut.w$mask.vector ## Run the regression to find the SEED STATES coefs <- lm(t(y.tilda) ~ w.tilda.transpose - 1)$coefficients ## Find the ACTUAL SEASONAL seed states x.nought <- calcSeasonalSeeds(use.beta = use.beta, coefs = coefs, seasonal.periods = seasonal.periods, mask.vector = mask.vector, p = p, q = q) } else { # Remove the AR() and MA() bits if they exist if ((p != 0) | (q != 0)) { end.cut <- ncol(w.tilda.transpose) start.cut <- end.cut - (p + q) + 1 w.tilda.transpose <- w.tilda.transpose[, -c(start.cut:end.cut)] } x.nought <- lm(t(y.tilda) ~ w.tilda.transpose - 1)$coefficients x.nought <- matrix(x.nought, nrow = length(x.nought), ncol = 1) ## Replace the AR() and MA() bits if they exist if ((p != 0) | (q != 0)) { arma.seed.states <- numeric((p + q)) arma.seed.states <- matrix(arma.seed.states, nrow = length(arma.seed.states), ncol = 1) x.nought <- rbind(x.nought, arma.seed.states) } } #### # Set up environment opt.env <- new.env() assign("F", F, envir = opt.env) assign("w.transpose", w$w.transpose, envir = opt.env) assign("g", g$g, envir = opt.env) assign("gamma.bold.matrix", g$gamma.bold.matrix, envir = opt.env) assign("y", matrix(y, nrow = 1, ncol = length(y)), envir = opt.env) assign("y.hat", matrix(0, nrow = 1, ncol = length(y)), envir = opt.env) assign("e", matrix(0, nrow = 1, ncol = length(y)), envir = opt.env) assign("x", matrix(0, nrow = length(x.nought), ncol = length(y)), envir = opt.env) if (!is.null(seasonal.periods)) { tau <- sum(seasonal.periods) } else { tau <- 0 } ## Second pass of optimisation if (use.box.cox) { # Un-transform the seed states # x.nought.untransformed <- InvBoxCox(x.nought, lambda=lambda) assign("x.nought.untransformed", InvBoxCox(x.nought, lambda = lambda), envir = opt.env) # Optimise the likelihood function optim.like <- optim(par = param.vector$vect, fn = calcLikelihood, method = "Nelder-Mead", opt.env = opt.env, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, p = p, q = q, tau = tau, bc.lower = bc.lower, bc.upper = bc.upper, control = list(maxit = (100 * length(param.vector$vect) ^ 2), parscale = par.scale)) # Get the parameters out of the param.vector paramz <- unParameterise(optim.like$par, param.vector$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.v <- paramz$gamma.v ar.coefs <- paramz$ar.coefs ma.coefs <- paramz$ma.coefs # Transform the seed states x.nought <- BoxCox(opt.env$x.nought.untransformed, lambda = lambda) lambda <- attr(x.nought, "lambda") ## Calculate the variance: # 1. Re-set up the matrices # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta.v, gamma.vector=gamma, seasonal.periods=seasonal.periods, p=p, q=q) g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.v, seasonal.periods, as.integer(p), as.integer(q), PACKAGE = "forecast") F <- makeFMatrix(alpha = alpha, beta = beta.v, small.phi = small.phi, seasonal.periods = seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = ar.coefs, ma.coefs = ma.coefs) # 2. Calculate! y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") fitted.values.and.errors <- calcModel(y.transformed, x.nought, F, g$g, w) e <- fitted.values.and.errors$e variance <- sum((e * e)) / length(y) fitted.values <- InvBoxCox(fitted.values.and.errors$y.hat, lambda = lambda, biasadj, variance) attr(lambda, "biasadj") <- biasadj # e <- InvBoxCox(e, lambda=lambda) # ee <- y-fitted.values } else { # else if we are not using the Box-Cox transformation # Optimise the likelihood function if (length(param.vector$vect) > 1) { optim.like <- optim(par = param.vector$vect, fn = calcLikelihoodNOTransformed, method = "Nelder-Mead", opt.env = opt.env, x.nought = x.nought, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, p = p, q = q, tau = tau, control = list(maxit = (100 * length(param.vector$vect) ^ 2), parscale = par.scale)) } else { optim.like <- optim(par = param.vector$vect, fn = calcLikelihoodNOTransformed, method = "BFGS", opt.env = opt.env, x.nought = x.nought, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, p = p, q = q, tau = tau, control = list(parscale = par.scale)) } # Get the parameters out of the param.vector paramz <- unParameterise(optim.like$par, param.vector$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.v <- paramz$gamma.v ar.coefs <- paramz$ar.coefs ma.coefs <- paramz$ma.coefs ## Calculate the variance: # 1. Re-set up the matrices # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta.v, gamma.vector=gamma, seasonal.periods=seasonal.periods, p=p, q=q) g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.v, seasonal.periods, as.integer(p), as.integer(q), PACKAGE = "forecast") F <- makeFMatrix(alpha = alpha, beta = beta.v, small.phi <- small.phi, seasonal.periods = seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = ar.coefs, ma.coefs = ma.coefs) # 2. Calculate! fitted.values.and.errors <- calcModel(y, x.nought, F, g$g, w) e <- fitted.values.and.errors$e fitted.values <- fitted.values.and.errors$y.hat variance <- sum((e * e)) / length(y) } # Get the likelihood likelihood <- optim.like$value # Calculate the AIC aic <- likelihood + 2 * (length(param.vector$vect) + nrow(x.nought)) # Make a list object model.for.output <- list(lambda = lambda, alpha = alpha, beta = beta.v, damping.parameter = small.phi, gamma.values = gamma.v, ar.coefficients = ar.coefs, ma.coefficients = ma.coefs, likelihood = likelihood, optim.return.code = optim.like$convergence, variance = variance, AIC = aic, parameters = list(vect = optim.like$par, control = param.vector$control), seed.states = x.nought, fitted.values = c(fitted.values), errors = c(e), x = fitted.values.and.errors$x, seasonal.periods = seasonal.periods, y = y) class(model.for.output) <- "bats" #### return(model.for.output) } calcModel <- function(y, x.nought, F, g, w) { # w is passed as a list length.ts <- length(y) x <- matrix(0, nrow = length(x.nought), ncol = length.ts) y.hat <- matrix(0, nrow = 1, ncol = length.ts) e <- matrix(0, nrow = 1, ncol = length.ts) y.hat[, 1] <- w$w.transpose %*% x.nought e[, 1] <- y[1] - y.hat[, 1] x[, 1] <- F %*% x.nought + g %*% e[, 1] y <- matrix(y, nrow = 1, ncol = length.ts) loop <- .Call("calcBATS", ys = y, yHats = y.hat, wTransposes = w$w.transpose, Fs = F, xs = x, gs = g, es = e, PACKAGE = "forecast") return(list(y.hat = loop$y.hat, e = loop$e, x = loop$x)) } calcLikelihood <- function(param.vector, opt.env, use.beta, use.small.phi, seasonal.periods, p=0, q=0, tau=0, bc.lower=0, bc.upper=1) { # param vector should be as follows: Box-Cox.parameter, alpha, beta, small.phi, gamma.vector, ar.coefs, ma.coefs # Put the components of the param.vector into meaningful individual variables box.cox.parameter <- param.vector[1] alpha <- param.vector[2] if (use.beta) { if (use.small.phi) { small.phi <- param.vector[3] beta.v <- param.vector[4] gamma.start <- 5 } else { small.phi <- 1 beta.v <- param.vector[3] gamma.start <- 4 } } else { small.phi <- NULL beta.v <- NULL gamma.start <- 3 } if (!is.null(seasonal.periods)) { gamma.vector <- param.vector[gamma.start:(gamma.start + length(seasonal.periods) - 1)] final.gamma.pos <- gamma.start + length(gamma.vector) - 1 } else { gamma.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (p != 0) { ar.coefs <- matrix(param.vector[(final.gamma.pos + 1):(final.gamma.pos + p)], nrow = 1, ncol = p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- matrix(param.vector[(final.gamma.pos + p + 1):length(param.vector)], nrow = 1, ncol = q) } else { ma.coefs <- NULL } x.nought <- BoxCox(opt.env$x.nought.untransformed, lambda = box.cox.parameter) lambda <- attr(x.nought, "lambda") # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) # w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") .Call("updateWtransposeMatrix", wTranspose_s = opt.env$w.transpose, smallPhi_s = small.phi, tau_s = as.integer(tau), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta, gamma.vector=gamma.vector, seasonal.periods=seasonal.periods, p=p, q=q) # g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.vector, seasonal.periods, as.integer(p), as.integer(q), PACKAGE="forecast") .Call("updateGMatrix", g_s = opt.env$g, gammaBold_s = opt.env$gamma.bold.matrix, alpha_s = alpha, beta_s = beta.v, gammaVector_s = gamma.vector, seasonalPeriods_s = seasonal.periods, PACKAGE = "forecast") # F <- makeFMatrix(alpha=alpha, beta=beta.v, small.phi=small.phi, seasonal.periods=seasonal.periods, gamma.bold.matrix=g$gamma.bold.matrix, ar.coefs=ar.coefs, ma.coefs=ma.coefs) .Call("updateFMatrix", opt.env$F, small.phi, alpha, beta.v, opt.env$gamma.bold.matrix, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") mat.transformed.y <- BoxCox(opt.env$y, box.cox.parameter) lambda <- attr(mat.transformed.y, "lambda") n <- ncol(opt.env$y) .Call("calcBATSFaster", ys = mat.transformed.y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, sPeriods_s = seasonal.periods, betaV = beta.v, tau_s = as.integer(tau), p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") log.likelihood <- n * log(sum(opt.env$e ^ 2)) - 2 * (box.cox.parameter - 1) * sum(log(opt.env$y)) assign("D", (opt.env$F - opt.env$g %*% opt.env$w.transpose), envir = opt.env) if (checkAdmissibility(opt.env, box.cox = box.cox.parameter, small.phi = small.phi, ar.coefs = ar.coefs, ma.coefs = ma.coefs, tau = tau, bc.lower = bc.lower, bc.upper = bc.upper)) { return(log.likelihood) } else { return(10 ^ 20) } } calcLikelihoodNOTransformed <- function(param.vector, opt.env, x.nought, use.beta, use.small.phi, seasonal.periods, p=0, q=0, tau=0) { # The likelihood function without the Box-Cox Transformation # param vector should be as follows: alpha, beta, small.phi, gamma.vector, ar.coefs, ma.coefs # Put the components of the param.vector into meaningful individual variables alpha <- param.vector[1] if (use.beta) { if (use.small.phi) { small.phi <- param.vector[2] beta.v <- param.vector[3] gamma.start <- 4 } else { small.phi <- 1 beta.v <- param.vector[2] gamma.start <- 3 } } else { small.phi <- NULL beta.v <- NULL gamma.start <- 2 } if (!is.null(seasonal.periods)) { gamma.vector <- param.vector[gamma.start:(gamma.start + length(seasonal.periods) - 1)] final.gamma.pos <- gamma.start + length(gamma.vector) - 1 } else { gamma.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (p != 0) { ar.coefs <- matrix(param.vector[(final.gamma.pos + 1):(final.gamma.pos + p)], nrow = 1, ncol = p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- matrix(param.vector[(final.gamma.pos + p + 1):length(param.vector)], nrow = 1, ncol = q) } else { ma.coefs <- NULL } # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) # w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") .Call("updateWtransposeMatrix", wTranspose_s = opt.env$w.transpose, smallPhi_s = small.phi, tau_s = as.integer(tau), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta, gamma.vector=gamma.vector, seasonal.periods=seasonal.periods, p=p, q=q) # g <- .Call("makeBATSGMatrix", alpha, beta.v, gamma.vector, seasonal.periods, as.integer(p), as.integer(q), PACKAGE="forecast") .Call("updateGMatrix", g_s = opt.env$g, gammaBold_s = opt.env$gamma.bold.matrix, alpha_s = alpha, beta_s = beta.v, gammaVector_s = gamma.vector, seasonalPeriods_s = seasonal.periods, PACKAGE = "forecast") # F <- makeFMatrix(alpha=alpha, beta=beta.v, small.phi=small.phi, seasonal.periods=seasonal.periods, gamma.bold.matrix=g$gamma.bold.matrix, ar.coefs=ar.coefs, ma.coefs=ma.coefs) .Call("updateFMatrix", opt.env$F, small.phi, alpha, beta.v, opt.env$gamma.bold.matrix, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") n <- ncol(opt.env$y) ######################################################################################### # e <- calcModel(y=y, x.nought=x.nought, F=F, g=g$g, w=w)$e ###################### #### calcModel() code: ## # x <- matrix(0, nrow=length(x.nought), ncol=n) # y.hat <- matrix(0,nrow=1, ncol=n) # e <- matrix(0, nrow=1, ncol=n) # opt.env$y.hat[,1] <- w$w.transpose %*% x.nought # opt.env$e[,1] <- opt.env$y[,1]-opt.env$y.hat[,1] # opt.env$x[,1] <- opt.env$F %*% x.nought + g$g %*% opt.env$e[,1] # mat.y <- matrix(opt.env$y, nrow=1, ncol=n) .Call("calcBATSFaster", ys = opt.env$y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, sPeriods_s = seasonal.periods, betaV = beta.v, tau_s = as.integer(tau), p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") ## #### #################################################################### log.likelihood <- n * log(sum(opt.env$e * opt.env$e)) # D <- opt.env$F - g$g %*% w$w.transpose assign("D", (opt.env$F - opt.env$g %*% opt.env$w.transpose), envir = opt.env) if (checkAdmissibility(opt.env = opt.env, box.cox = NULL, small.phi = small.phi, ar.coefs = ar.coefs, ma.coefs = ma.coefs, tau = tau)) { return(log.likelihood) } else { return(10 ^ 20) } } forecast/R/tscv.R0000644000176200001440000002132514104700723013364 0ustar liggesusers# Time series cross-validation # y is a time series # forecastfunction must return an object of class forecast # h is number of steps ahead to forecast # ... are passed to forecastfunction #' Time series cross-validation #' #' \code{tsCV} computes the forecast errors obtained by applying #' \code{forecastfunction} to subsets of the time series \code{y} using a #' rolling forecast origin. #' #' Let \code{y} contain the time series \eqn{y_1,\dots,y_T}{y[1:T]}. Then #' \code{forecastfunction} is applied successively to the time series #' \eqn{y_1,\dots,y_t}{y[1:t]}, for \eqn{t=1,\dots,T-h}, making predictions #' \eqn{\hat{y}_{t+h|t}}{f[t+h]}. The errors are given by \eqn{e_{t+h} = #' y_{t+h}-\hat{y}_{t+h|t}}{e[t+h] = y[t+h]-f[t+h]}. If h=1, these are returned as a #' vector, \eqn{e_1,\dots,e_T}{e[1:T]}. For h>1, they are returned as a matrix with #' the hth column containing errors for forecast horizon h. #' The first few errors may be missing as #' it may not be possible to apply \code{forecastfunction} to very short time #' series. #' #' @param y Univariate time series #' @param forecastfunction Function to return an object of class #' \code{forecast}. Its first argument must be a univariate time series, and it #' must have an argument \code{h} for the forecast horizon. If exogenous predictors are used, #' then it must also have \code{xreg} and \code{newxreg} arguments corresponding to the #' training and test periods. #' @param h Forecast horizon #' @param window Length of the rolling window, if NULL, a rolling window will not be used. #' @param xreg Exogeneous predictor variables passed to the forecast function if required. #' @param initial Initial period of the time series where no cross-validation is performed. #' @param ... Other arguments are passed to \code{forecastfunction}. #' @return Numerical time series object containing the forecast errors as a vector (if h=1) #' and a matrix otherwise. The time index corresponds to the last period of the training #' data. The columns correspond to the forecast horizons. #' @author Rob J Hyndman #' @seealso \link{CV}, \link{CVar}, \link{residuals.Arima}, \url{https://robjhyndman.com/hyndsight/tscv/}. #' #' @keywords ts #' @examples #' #' #Fit an AR(2) model to each rolling origin subset #' far2 <- function(x, h){forecast(Arima(x, order=c(2,0,0)), h=h)} #' e <- tsCV(lynx, far2, h=1) #' #' #Fit the same model with a rolling window of length 30 #' e <- tsCV(lynx, far2, h=1, window=30) #' #' #Example with exogenous predictors #' far2_xreg <- function(x, h, xreg, newxreg) { #' forecast(Arima(x, order=c(2,0,0), xreg=xreg), xreg=newxreg) #' } #' #' y <- ts(rnorm(50)) #' xreg <- matrix(rnorm(100),ncol=2) #' e <- tsCV(y, far2_xreg, h=3, xreg=xreg) #' #' @export tsCV <- function(y, forecastfunction, h=1, window=NULL, xreg=NULL, initial=0, ...) { y <- as.ts(y) n <- length(y) e <- ts(matrix(NA_real_, nrow = n, ncol = h)) if(initial >= n) stop("initial period too long") tsp(e) <- tsp(y) if (!is.null(xreg)) { # Make xreg a ts object to allow easy subsetting later xreg <- ts(as.matrix(xreg)) if(NROW(xreg) != length(y)) stop("xreg must be of the same size as y") # Pad xreg with NAs xreg <- ts(rbind(xreg, matrix(NA, nrow=h, ncol=NCOL(xreg))), start = start(y), frequency = frequency(y)) } if (is.null(window)) indx <- seq(1+initial, n - 1L) else indx <- seq(window+initial, n - 1L, by = 1L) for (i in indx) { y_subset <- subset( y, start = ifelse(is.null(window), 1L, ifelse(i - window >= 0L, i - window + 1L, stop("small window"))), end = i) if (is.null(xreg)) { fc <- try(suppressWarnings( forecastfunction(y_subset, h = h, ...) ), silent = TRUE) } else { xreg_subset <- subset( xreg, start = ifelse(is.null(window), 1L, ifelse(i - window >= 0L, i - window + 1L, stop("small window"))), end = i) xreg_future <- subset( xreg, start = i+1, end = i+h) fc <- try(suppressWarnings( forecastfunction(y_subset, h = h, xreg = xreg_subset, newxreg=xreg_future) ), silent = TRUE) } if (!is.element("try-error", class(fc))) { e[i, ] <- y[i + seq(h)] - fc$mean[seq(h)] } } if (h == 1) { return(e[, 1L]) } else { colnames(e) <- paste("h=", 1:h, sep = "") return(e) } } # Cross-validation for AR models # By Gabriel Caceres ## Note arguments to pass must be named #' k-fold Cross-Validation applied to an autoregressive model #' #' \code{CVar} computes the errors obtained by applying an autoregressive #' modelling function to subsets of the time series \code{y} using k-fold #' cross-validation as described in Bergmeir, Hyndman and Koo (2015). It also #' applies a Ljung-Box test to the residuals. If this test is significant #' (see returned pvalue), there is serial correlation in the residuals and the #' model can be considered to be underfitting the data. In this case, the #' cross-validated errors can underestimate the generalization error and should #' not be used. #' #' @aliases print.CVar #' #' @param y Univariate time series #' @param k Number of folds to use for cross-validation. #' @param FUN Function to fit an autoregressive model. Currently, it only works #' with the \code{\link{nnetar}} function. #' @param cvtrace Provide progress information. #' @param blocked choose folds randomly or as blocks? #' @param LBlags lags for the Ljung-Box test, defaults to 24, for yearly series can be set to 20 #' @param ... Other arguments are passed to \code{FUN}. #' @return A list containing information about the model and accuracy for each #' fold, plus other summary information computed across folds. #' @author Gabriel Caceres and Rob J Hyndman #' @seealso \link{CV}, \link{tsCV}. #' @references Bergmeir, C., Hyndman, R.J., Koo, B. (2018) A note on the #' validity of cross-validation for evaluating time series prediction. #' \emph{Computational Statistics & Data Analysis}, \bold{120}, 70-83. #' \url{https://robjhyndman.com/publications/cv-time-series/}. #' @keywords ts #' @examples #' #' modelcv <- CVar(lynx, k=5, lambda=0.15) #' print(modelcv) #' print(modelcv$fold1) #' #' library(ggplot2) #' autoplot(lynx, series="Data") + #' autolayer(modelcv$testfit, series="Fits") + #' autolayer(modelcv$residuals, series="Residuals") #' ggAcf(modelcv$residuals) #' #' @export CVar <- function(y, k=10, FUN=nnetar, cvtrace=FALSE, blocked=FALSE, LBlags=24, ...) { nx <- length(y) # n-folds at most equal number of points k <- min(as.integer(k), nx) if (k <= 1L) { stop("k must be at least 2") } # Set up folds ind <- seq_len(nx) fold <- if (blocked) { sort(rep(1:k, length.out = nx)) } else { sample(rep(1:k, length.out = nx)) } cvacc <- matrix(NA_real_, nrow = k, ncol = 7) out <- list() alltestfit <- rep(NA, length.out = nx) for (i in 1:k) { out[[paste0("fold", i)]] <- list() testset <- ind[fold == i] trainset <- ind[fold != i] trainmodel <- FUN(y, subset = trainset, ...) testmodel <- FUN(y, model = trainmodel, xreg = trainmodel$xreg) testfit <- fitted(testmodel) acc <- accuracy(y, testfit, test = testset) cvacc[i, ] <- acc out[[paste0("fold", i)]]$model <- trainmodel out[[paste0("fold", i)]]$accuracy <- acc out[[paste0("fold", i)]]$testfit <- testfit out[[paste0("fold", i)]]$testset <- testset alltestfit[testset] <- testfit[testset] if (isTRUE(cvtrace)) { cat("Fold", i, "\n") print(acc) cat("\n") } } out$testfit <- ts(alltestfit) tsp(out$testfit) <- tsp(y) out$residuals <- out$testfit - y out$LBpvalue <- Box.test(out$residuals, type = "Ljung", lag = LBlags)$p.value out$k <- k # calculate mean accuracy accross all folds CVmean <- matrix(apply(cvacc, 2, FUN = mean, na.rm = TRUE), dimnames = list(colnames(acc), "Mean")) # calculate accuracy sd accross all folds --- include? CVsd <- matrix(apply(cvacc, 2, FUN = sd, na.rm = TRUE), dimnames = list(colnames(acc), "SD")) out$CVsummary <- cbind(CVmean, CVsd) out$series <- deparse(substitute(y)) out$call <- match.call() return(structure(out, class = c("CVar", class(trainmodel)))) } #' @export print.CVar <- function(x, ...) { cat("Series:", x$series, "\n") cat("Call: ") print(x$call) # Add info about series, function, and parameters # Add note about any NA/NaN in folds? # # Print number of folds cat("\n", x$k, "-fold cross-validation\n", sep = "") # Print mean & sd accuracy() results print(x$CVsummary) cat("\n") cat("p-value of Ljung-Box test of residuals is ", x$LBpvalue, "\n") cat("if this value is significant (<0.05),\n") cat("the result of the cross-validation should not be used\n") cat("as the model is underfitting the data.\n") invisible(x) } forecast/R/season.R0000644000176200001440000002531314003673410013676 0ustar liggesusers### Functions to handle seasonality #' Number of days in each season #' #' Returns number of days in each month or quarter of the observed time period. #' #' Useful for month length adjustments #' #' @param x time series #' @return Time series #' @author Rob J Hyndman #' @seealso \code{\link[forecast]{bizdays}} #' @keywords ts #' @examples #' #' par(mfrow=c(2,1)) #' plot(ldeaths,xlab="Year",ylab="pounds", #' main="Monthly deaths from lung disease (UK)") #' ldeaths.adj <- ldeaths/monthdays(ldeaths)*365.25/12 #' plot(ldeaths.adj,xlab="Year",ylab="pounds", #' main="Adjusted monthly deaths from lung disease (UK)") #' #' @export monthdays <- function(x) { if (!is.ts(x)) { stop("Not a time series") } f <- frequency(x) if (f == 12) { days <- c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) } else if (f == 4) { days <- c(90, 91, 92, 92) } else { stop("Not monthly or quarterly data") } nyears <- round(length(x) / f + 1) + 1 years <- (1:nyears) + (start(x)[1] - 1) leap.years <- ((years %% 4 == 0) & !(years %% 100 == 0 & years %% 400 != 0))[1:nyears] dummy <- t(matrix(rep(days, nyears), nrow = f)) if (f == 12) { dummy[leap.years, 2] <- 29 } else { dummy[leap.years, 1] <- 91 } xx <- c(t(dummy))[start(x)[2] - 1 + (1:length(x))] return(ts(xx, start = start(x), frequency = f)) } #' Forecast seasonal index #' #' Returns vector containing the seasonal index for \code{h} future periods. If #' the seasonal index is non-periodic, it uses the last values of the index. #' #' #' @param object Output from \code{\link[stats]{decompose}} or #' \link[stats]{stl}. #' @param h Number of periods ahead to forecast #' @return Time series #' @author Rob J Hyndman #' @keywords ts #' @examples #' uk.stl <- stl(UKDriverDeaths,"periodic") #' uk.sa <- seasadj(uk.stl) #' uk.fcast <- holt(uk.sa,36) #' seasf <- sindexf(uk.stl,36) #' uk.fcast$mean <- uk.fcast$mean + seasf #' uk.fcast$lower <- uk.fcast$lower + cbind(seasf,seasf) #' uk.fcast$upper <- uk.fcast$upper + cbind(seasf,seasf) #' uk.fcast$x <- UKDriverDeaths #' plot(uk.fcast,main="Forecasts from Holt's method with seasonal adjustment") #' #' @export sindexf <- function(object, h) { if ("stl" %in% class(object)) { ss <- object$time.series[, 1] m <- frequency(ss) ss <- ss[length(ss) - (m:1) + 1] tsp.x <- tsp(object$time.series) } else if ("decomposed.ts" %in% class(object)) { ss <- object$figure m <- frequency(object$seasonal) n <- length(object$trend) ss <- rep(ss, n / m + 1)[1:n] ss <- ss[n - (m:1) + 1] tsp.x <- tsp(object$seasonal) } else { stop("Object of unknown class") } out <- ts(rep(ss, h / m + 1)[1:h], frequency = m, start = tsp.x[2] + 1 / m) return(out) } #' Seasonal dummy variables #' #' \code{seasonaldummy} returns a matrix of dummy variables suitable for use in #' \code{\link{Arima}}, \code{\link{auto.arima}} or \code{\link{tslm}}. The #' last season is omitted and used as the control. #' #' \code{seasonaldummyf} is deprecated, instead use the \code{h} argument in #' \code{seasonaldummy}. #' #' The number of dummy variables is determined from the time series #' characteristics of \code{x}. When \code{h} is missing, the length of #' \code{x} also determines the number of rows for the matrix returned by #' \code{seasonaldummy}. the value of \code{h} determines the number of rows #' for the matrix returned by \code{seasonaldummy}, typically used for #' forecasting. The values within \code{x} are not used. #' #' @param x Seasonal time series: a \code{ts} or a \code{msts} object #' @param h Number of periods ahead to forecast (optional) #' @return Numerical matrix. #' @author Rob J Hyndman #' @seealso \code{\link{fourier}} #' @keywords ts #' @examples #' #' plot(ldeaths) #' #' # Using seasonal dummy variables #' month <- seasonaldummy(ldeaths) #' deaths.lm <- tslm(ldeaths ~ month) #' tsdisplay(residuals(deaths.lm)) #' ldeaths.fcast <- forecast(deaths.lm, #' data.frame(month=I(seasonaldummy(ldeaths,36)))) #' plot(ldeaths.fcast) #' #' # A simpler approach to seasonal dummy variables #' deaths.lm <- tslm(ldeaths ~ season) #' ldeaths.fcast <- forecast(deaths.lm, h=36) #' plot(ldeaths.fcast) #' #' @export seasonaldummy <- function(x, h=NULL) { if (!is.ts(x)) { stop("Not a time series") } else { fr.x <- frequency(x) } if (is.null(h)) { if (fr.x == 1) { stop("Non-seasonal time series") } dummy <- as.factor(cycle(x)) dummy.mat <- matrix(0, ncol = frequency(x) - 1, nrow = length(x)) nrow <- 1:length(x) for (i in 1:(frequency(x) - 1)) dummy.mat[dummy == paste(i), i] <- 1 colnames(dummy.mat) <- if (fr.x == 12) { month.abb[1:11] } else if (fr.x == 4) { c("Q1", "Q2", "Q3") } else { paste("S", 1:(fr.x - 1), sep = "") } return(dummy.mat) } else { return(seasonaldummy(ts(rep(0, h), start = tsp(x)[2] + 1 / fr.x, frequency = fr.x))) } } #' @rdname seasonaldummy #' @export seasonaldummyf <- function(x, h) { warning("seasonaldummyf() is deprecated, please use seasonaldummy()") if (!is.ts(x)) { stop("Not a time series") } f <- frequency(x) return(seasonaldummy(ts(rep(0, h), start = tsp(x)[2] + 1 / f, frequency = f))) } #' Fourier terms for modelling seasonality #' #' \code{fourier} returns a matrix containing terms from a Fourier series, up #' to order \code{K}, suitable for use in \code{\link{Arima}}, #' \code{\link{auto.arima}}, or \code{\link{tslm}}. #' #' \code{fourierf} is deprecated, instead use the \code{h} argument in #' \code{fourier}. #' #' The period of the Fourier terms is determined from the time series #' characteristics of \code{x}. When \code{h} is missing, the length of #' \code{x} also determines the number of rows for the matrix returned by #' \code{fourier}. Otherwise, the value of \code{h} determines the number of #' rows for the matrix returned by \code{fourier}, typically used for #' forecasting. The values within \code{x} are not used. #' #' Typical use would omit \code{h} when generating Fourier terms for training a model #' and include \code{h} when generating Fourier terms for forecasting. #' #' When \code{x} is a \code{ts} object, the value of \code{K} should be an #' integer and specifies the number of sine and cosine terms to return. Thus, #' the matrix returned has \code{2*K} columns. #' #' When \code{x} is a \code{msts} object, then \code{K} should be a vector of #' integers specifying the number of sine and cosine terms for each of the #' seasonal periods. Then the matrix returned will have \code{2*sum(K)} #' columns. #' #' @param x Seasonal time series: a \code{ts} or a \code{msts} object #' @param K Maximum order(s) of Fourier terms #' @param h Number of periods ahead to forecast (optional) #' @return Numerical matrix. #' @author Rob J Hyndman #' @seealso \code{\link{seasonaldummy}} #' @keywords ts #' @examples #' #' library(ggplot2) #' #' # Using Fourier series for a "ts" object #' # K is chosen to minimize the AICc #' deaths.model <- auto.arima(USAccDeaths, xreg=fourier(USAccDeaths,K=5), seasonal=FALSE) #' deaths.fcast <- forecast(deaths.model, xreg=fourier(USAccDeaths, K=5, h=36)) #' autoplot(deaths.fcast) + xlab("Year") #' #' # Using Fourier series for a "msts" object #' taylor.lm <- tslm(taylor ~ fourier(taylor, K = c(3, 3))) #' taylor.fcast <- forecast(taylor.lm, #' data.frame(fourier(taylor, K = c(3, 3), h = 270))) #' autoplot(taylor.fcast) #' #' @export fourier <- function(x, K, h=NULL) { if (is.null(h)) { return(...fourier(x, K, 1:NROW(x))) } else { return(...fourier(x, K, NROW(x) + (1:h))) } } #' @rdname fourier #' @export fourierf <- function(x, K, h) { warning("fourierf() is deprecated, please use fourier()") return(...fourier(x, K, length(x) + (1:h))) } # Function to do the work. ...fourier <- function(x, K, times) { if (any(class(x) == "msts")) { period <- attr(x, "msts") } else { period <- frequency(x) } # Patch for older versions of R that do not have sinpi and cospi functions. if (!exists("sinpi")) { sinpi <- function(x) { sin(pi * x) } cospi <- function(x) { cos(pi * x) } } if (length(period) != length(K)) { stop("Number of periods does not match number of orders") } if (any(2 * K > period)) { stop("K must be not be greater than period/2") } # Compute periods of all Fourier terms p <- numeric(0) labels <- character(0) for (j in seq_along(period)) { if (K[j] > 0) { p <- c(p, (1:K[j]) / period[j]) labels <- c(labels, paste( paste0(c("S", "C"), rep(1:K[j], rep(2, K[j]))), round(period[j]), sep = "-" )) } } # Remove equivalent seasonal periods due to multiple seasonality k <- duplicated(p) p <- p[!k] labels <- labels[!rep(k, rep(2, length(k)))] # Remove columns where sinpi=0 k <- abs(2 * p - round(2 * p)) > .Machine$double.eps # Compute matrix of Fourier terms X <- matrix(NA_real_, nrow = length(times), ncol = 2L * length(p)) for (j in seq_along(p)) { if (k[j]) { X[, 2L * j - 1L] <- sinpi(2 * p[j] * times) } X[, 2L * j] <- cospi(2 * p[j] * times) } colnames(X) <- labels # Remove missing columns X <- X[, !is.na(colSums(X)), drop = FALSE] return(X) } #' Moving-average smoothing #' #' \code{ma} computes a simple moving average smoother of a given time series. #' #' The moving average smoother averages the nearest \code{order} periods of #' each observation. As neighbouring observations of a time series are likely #' to be similar in value, averaging eliminates some of the randomness in the #' data, leaving a smooth trend-cycle component. \deqn{\hat{T}_{t} = #' \frac{1}{m} \sum_{j=-k}^k #' y_{t+j}}{T[t]=1/m(y[t-k]+y[t-k+1]+\ldots+y[t]+\ldots+y[t+k-1]+y[t+k])} where #' \eqn{k=\frac{m-1}{2}}{k=(m-1)/2} #' #' When an even \code{order} is specified, the observations averaged will #' include one more observation from the future than the past (k is rounded #' up). If centre is TRUE, the value from two moving averages (where k is #' rounded up and down respectively) are averaged, centering the moving #' average. #' #' @param x Univariate time series #' @param order Order of moving average smoother #' @param centre If TRUE, then the moving average is centred for even orders. #' @return Numerical time series object containing the simple moving average #' smoothed values. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{decompose}} #' @keywords ts #' @examples #' #' plot(wineind) #' sm <- ma(wineind,order=12) #' lines(sm,col="red") #' #' @export ma <- function(x, order, centre=TRUE) { if (abs(order - round(order)) > 1e-8) { stop("order must be an integer") } if (order %% 2 == 0 && centre) { # centred and even w <- c(0.5, rep(1, order - 1), 0.5) / order } else { # odd or not centred w <- rep(1, order) / order } return(filter(x, w)) } forecast/R/makeMatrices.R0000644000176200001440000002253514003673410015016 0ustar liggesusers# These functions make the w, F, x and g matrices # # # Author: srazbash ############################################################################### makeTBATSFMatrix <- function(alpha, beta=NULL, small.phi=NULL, seasonal.periods=NULL, k.vector=NULL, gamma.bold.matrix=NULL, ar.coefs=NULL, ma.coefs=NULL) { # 1. Alpha Row F <- matrix(1, nrow = 1, ncol = 1) if (!is.null(beta)) { F <- cbind(F, matrix(small.phi, nrow = 1, ncol = 1)) } if (!is.null(seasonal.periods)) { tau <- sum(k.vector) * 2 zero.tau <- matrix(0, nrow = 1, ncol = tau) F <- cbind(F, zero.tau) } if (!is.null(ar.coefs)) { p <- length(ar.coefs) ar.coefs <- matrix(ar.coefs, nrow = 1, ncol = p) alpha.phi <- alpha * ar.coefs F <- cbind(F, alpha.phi) } if (!is.null(ma.coefs)) { q <- length(ma.coefs) ma.coefs <- matrix(ma.coefs, nrow = 1, ncol = q) alpha.theta <- alpha * ma.coefs F <- cbind(F, alpha.theta) } # 2. Beta Row if (!is.null(beta)) { beta.row <- matrix(c(0, small.phi), nrow = 1, ncol = 2) if (!is.null(seasonal.periods)) { beta.row <- cbind(beta.row, zero.tau) } if (!is.null(ar.coefs)) { beta.phi <- beta * ar.coefs beta.row <- cbind(beta.row, beta.phi) } if (!is.null(ma.coefs)) { beta.theta <- beta * ma.coefs beta.row <- cbind(beta.row, beta.theta) } F <- rbind(F, beta.row) } # 3. Seasonal Row if (!is.null(seasonal.periods)) { seasonal.row <- t(zero.tau) if (!is.null(beta)) { seasonal.row <- cbind(seasonal.row, seasonal.row) } # Make the A matrix A <- matrix(0, tau, tau) last.pos <- 0 for (i in 1:length(k.vector)) { if (seasonal.periods[i] != 2) { C <- .Call("makeCIMatrix", k_s = as.integer(k.vector[i]), m_s = as.double(seasonal.periods[i]), PACKAGE = "forecast") } else { C <- matrix(0, 1, 1) } S <- .Call("makeSIMatrix", k_s = as.integer(k.vector[i]), m_s = as.double(seasonal.periods[i]), PACKAGE = "forecast") # C <- matrix(0,k.vector[i],k.vector[i]) # for(j in 1:k.vector[i]) { # l <- round((2*pi*j/seasonal.periods[i]), digits=15) # C[j,j] <- cos(l) # } # S <- matrix(0,k.vector[i],k.vector[i]) # for(j in 1:k.vector[i]) { # S[j,j] <- sin(2*pi*j/seasonal.periods[i]) # } # print(C) # print(S) Ai <- .Call("makeAIMatrix", C_s = C, S_s = S, k_s = as.integer(k.vector[i]), PACKAGE = "forecast") A[(last.pos + 1):(last.pos + (2 * k.vector[i])), (last.pos + 1):(last.pos + (2 * k.vector[i]))] <- Ai last.pos <- last.pos + (2 * k.vector[i]) } seasonal.row <- cbind(seasonal.row, A) if (!is.null(ar.coefs)) { B <- t(gamma.bold.matrix) %*% ar.coefs seasonal.row <- cbind(seasonal.row, B) } if (!is.null(ma.coefs)) { C <- t(gamma.bold.matrix) %*% ma.coefs seasonal.row <- cbind(seasonal.row, C) } F <- rbind(F, seasonal.row) } # 4. AR() Rows if (!is.null(ar.coefs)) { # p <- length(ar.coefs) ar.rows <- matrix(0, nrow = p, ncol = 1) if (!is.null(beta)) { ar.rows <- cbind(ar.rows, ar.rows) } if (!is.null(seasonal.periods)) { ar.seasonal.zeros <- matrix(0, nrow = p, ncol = tau) ar.rows <- cbind(ar.rows, ar.seasonal.zeros) } ident <- diag((p - 1)) ident <- cbind(ident, matrix(0, nrow = (p - 1), ncol = 1)) ar.part <- rbind(ar.coefs, ident) ar.rows <- cbind(ar.rows, ar.part) if (!is.null(ma.coefs)) { ma.in.ar <- matrix(0, nrow = p, ncol = q) ma.in.ar[1, ] <- ma.coefs ar.rows <- cbind(ar.rows, ma.in.ar) } F <- rbind(F, ar.rows) } # 5. MA() Rows if (!is.null(ma.coefs)) { ma.rows <- matrix(0, nrow = q, ncol = 1) if (!is.null(beta)) { ma.rows <- cbind(ma.rows, ma.rows) } if (!is.null(seasonal.periods)) { ma.seasonal <- matrix(0, nrow = q, ncol = tau) ma.rows <- cbind(ma.rows, ma.seasonal) } if (!is.null(ar.coefs)) { ar.in.ma <- matrix(0, nrow = q, ncol = p) ma.rows <- cbind(ma.rows, ar.in.ma) } ident <- diag((q - 1)) ident <- cbind(ident, matrix(0, nrow = (q - 1), ncol = 1)) ma.part <- rbind(matrix(0, nrow = 1, ncol = q), ident) ma.rows <- cbind(ma.rows, ma.part) F <- rbind(F, ma.rows) } return(F) } # makeWMatrix <- function(small.phi=NULL, seasonal.periods=NULL, ar.coefs=NULL, ma.coefs=NULL) { # # the.list <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = as.integer(seasonal.periods), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") # # # return(the.list) # # } # makeGMatrix <- function(alpha, beta=NULL, gamma.vector=NULL, seasonal.periods=NULL, p=0, q=0) { # li <- .Call("makeBATSGMatrix", alpha, beta, gamma.vector, as.integer(seasonal.periods), as.integer(p), as.integer(q), PACKAGE="forecast") # # return(li) # } makeFMatrix <- function(alpha, beta=NULL, small.phi=NULL, seasonal.periods=NULL, gamma.bold.matrix=NULL, ar.coefs=NULL, ma.coefs=NULL) { # 1. Alpha Row F <- matrix(1, nrow = 1, ncol = 1) if (!is.null(beta)) { F <- cbind(F, matrix(small.phi, nrow = 1, ncol = 1)) } if (!is.null(seasonal.periods)) { tau <- sum(seasonal.periods) zero.tau <- matrix(0, nrow = 1, ncol = tau) F <- cbind(F, zero.tau) } if (!is.null(ar.coefs)) { p <- length(ar.coefs) ar.coefs <- matrix(ar.coefs, nrow = 1, ncol = p) alpha.phi <- alpha * ar.coefs F <- cbind(F, alpha.phi) } if (!is.null(ma.coefs)) { q <- length(ma.coefs) ma.coefs <- matrix(ma.coefs, nrow = 1, ncol = q) alpha.theta <- alpha * ma.coefs F <- cbind(F, alpha.theta) } # 2. Beta Row if (!is.null(beta)) { beta.row <- matrix(c(0, small.phi), nrow = 1, ncol = 2) if (!is.null(seasonal.periods)) { beta.row <- cbind(beta.row, zero.tau) } if (!is.null(ar.coefs)) { beta.phi <- beta * ar.coefs beta.row <- cbind(beta.row, beta.phi) } if (!is.null(ma.coefs)) { beta.theta <- beta * ma.coefs beta.row <- cbind(beta.row, beta.theta) } F <- rbind(F, beta.row) } # 3. Seasonal Row if (!is.null(seasonal.periods)) { seasonal.row <- t(zero.tau) if (!is.null(beta)) { seasonal.row <- cbind(seasonal.row, seasonal.row) } # Make the A matrix for (i in seasonal.periods) { if (i == seasonal.periods[1]) { a.row.one <- matrix(0, nrow = 1, ncol = i) a.row.one[i] <- 1 a.row.two <- cbind(diag((i - 1)), matrix(0, nrow = (i - 1), ncol = 1)) A <- rbind(a.row.one, a.row.two) } else { old.A.rows <- dim(A)[1] old.A.columns <- dim(A)[2] a.row.one <- matrix(0, nrow = 1, ncol = i) a.row.one[i] <- 1 a.row.two <- cbind(diag((i - 1)), matrix(0, nrow = (i - 1), ncol = 1)) Ai <- rbind(a.row.one, a.row.two) A <- rbind(A, matrix(0, nrow = dim(Ai)[1], ncol = old.A.columns)) A <- cbind(A, matrix(0, nrow = dim(A)[1], ncol = dim(Ai)[2])) A[((old.A.rows + 1):(old.A.rows + dim(Ai)[1])), ((old.A.columns + 1):(old.A.columns + dim(Ai)[2]))] <- Ai } } seasonal.row <- cbind(seasonal.row, A) if (!is.null(ar.coefs)) { B <- t(gamma.bold.matrix) %*% ar.coefs seasonal.row <- cbind(seasonal.row, B) } if (!is.null(ma.coefs)) { C <- t(gamma.bold.matrix) %*% ma.coefs seasonal.row <- cbind(seasonal.row, C) } F <- rbind(F, seasonal.row) } # 4. AR() Rows if (!is.null(ar.coefs)) { # p <- length(ar.coefs) ar.rows <- matrix(0, nrow = p, ncol = 1) if (!is.null(beta)) { ar.rows <- cbind(ar.rows, ar.rows) } if (!is.null(seasonal.periods)) { ar.seasonal.zeros <- matrix(0, nrow = p, ncol = tau) ar.rows <- cbind(ar.rows, ar.seasonal.zeros) } ident <- diag((p - 1)) ident <- cbind(ident, matrix(0, nrow = (p - 1), ncol = 1)) ar.part <- rbind(ar.coefs, ident) ar.rows <- cbind(ar.rows, ar.part) if (!is.null(ma.coefs)) { ma.in.ar <- matrix(0, nrow = p, ncol = q) ma.in.ar[1, ] <- ma.coefs ar.rows <- cbind(ar.rows, ma.in.ar) } F <- rbind(F, ar.rows) } # 5. MA() Rows if (!is.null(ma.coefs)) { ma.rows <- matrix(0, nrow = q, ncol = 1) if (!is.null(beta)) { ma.rows <- cbind(ma.rows, ma.rows) } if (!is.null(seasonal.periods)) { ma.seasonal <- matrix(0, nrow = q, ncol = tau) ma.rows <- cbind(ma.rows, ma.seasonal) } if (!is.null(ar.coefs)) { ar.in.ma <- matrix(0, nrow = q, ncol = p) ma.rows <- cbind(ma.rows, ar.in.ma) } ident <- diag((q - 1)) ident <- cbind(ident, matrix(0, nrow = (q - 1), ncol = 1)) ma.part <- rbind(matrix(0, nrow = 1, ncol = q), ident) ma.rows <- cbind(ma.rows, ma.part) F <- rbind(F, ma.rows) } return(F) } makeXMatrix <- function(l, b=NULL, s.vector=NULL, d.vector=NULL, epsilon.vector=NULL) { x.transpose <- matrix(l, nrow = 1, ncol = 1) if (!is.null(b)) { x.transpose <- cbind(x.transpose, matrix(b, nrow = 1, ncol = 1)) } if (!is.null(s.vector)) { x.transpose <- cbind(x.transpose, matrix(s.vector, nrow = 1, ncol = length(s.vector))) } if (!is.null(d.vector)) { x.transpose <- cbind(x.transpose, matrix(d.vector, nrow = 1, ncol = length(d.vector))) } if (!is.null(epsilon.vector)) { x.transpose <- cbind(x.transpose, matrix(epsilon.vector, nrow = 1, ncol = length(epsilon.vector))) } x <- t(x.transpose) return(list(x = x, x.transpose = x.transpose)) } forecast/R/guerrero.R0000644000176200001440000000765214026463047014256 0ustar liggesusers# This R script contains code for extracting the Box-Cox # parameter, lambda, using Guerrero's method (1993). # Written by Leanne Chhay # guer.cv computes the coefficient of variation # Input: # lam = lambda # x = original time series as a time series object # Output: coefficient of variation guer.cv <- function(lam, x, nonseasonal.length=2) { period <- round(max(nonseasonal.length, frequency(x))) nobsf <- length(x) nyr <- floor(nobsf / period) nobst <- floor(nyr * period) x.mat <- matrix(x[(nobsf - nobst + 1):nobsf], period, nyr) x.mean <- apply(x.mat, 2, mean, na.rm = TRUE) x.sd <- apply(x.mat, 2, sd, na.rm = TRUE) x.rat <- x.sd / x.mean ^ (1 - lam) return(sd(x.rat, na.rm = TRUE) / mean(x.rat, na.rm = TRUE)) } # guerrero extracts the required lambda # Input: x = original time series as a time series object # Output: lambda that minimises the coefficient of variation guerrero <- function(x, lower=-1, upper=2, nonseasonal.length=2) { if(any(x <= 0, na.rm = TRUE)) warning("Guerrero's method for selecting a Box-Cox parameter (lambda) is given for strictly positive data.") return(optimize( guer.cv, c(lower, upper), x = x, nonseasonal.length = nonseasonal.length )$minimum) } # Modified version of boxcox from MASS package bcloglik <- function(x, lower=-1, upper=2) { n <- length(x) if (any(x <= 0, na.rm = TRUE)) { stop("x must be positive") } logx <- log(na.omit(c(x))) xdot <- exp(mean(logx)) if (all(class(x) != "ts")) { fit <- lm(x ~ 1, data = data.frame(x = x), na.action = na.exclude) } else if (frequency(x) > 1) { fit <- tslm(x ~ trend + season, data = data.frame(x = x)) } else { fit <- tslm(x ~ trend, data = data.frame(x = x)) } xqr <- fit$qr lambda <- seq(lower, upper, by = .05) xl <- loglik <- as.vector(lambda) m <- length(xl) x <- na.omit(c(x)) for (i in 1L:m) { if (abs(la <- xl[i]) > 0.02) { xt <- (x ^ la - 1) / la } else { xt <- logx * (1 + (la * logx) / 2 * (1 + (la * logx) / 3 * (1 + (la * logx) / 4))) } loglik[i] <- -n / 2 * log(sum(qr.resid(xqr, xt / xdot ^ (la - 1)) ^ 2)) } return(xl[which.max(loglik)]) } #' Automatic selection of Box Cox transformation parameter #' #' If \code{method=="guerrero"}, Guerrero's (1993) method is used, where lambda #' minimizes the coefficient of variation for subseries of \code{x}. #' #' If \code{method=="loglik"}, the value of lambda is chosen to maximize the #' profile log likelihood of a linear model fitted to \code{x}. For #' non-seasonal data, a linear time trend is fitted while for seasonal data, a #' linear time trend with seasonal dummy variables is used. #' #' #' @param x a numeric vector or time series of class \code{ts} #' @param method Choose method to be used in calculating lambda. #' @param lower Lower limit for possible lambda values. #' @param upper Upper limit for possible lambda values. #' @return a number indicating the Box-Cox transformation parameter. #' @author Leanne Chhay and Rob J Hyndman #' @seealso \code{\link{BoxCox}} #' @references Box, G. E. P. and Cox, D. R. (1964) An analysis of #' transformations. \emph{JRSS B} \bold{26} 211--246. #' #' Guerrero, V.M. (1993) Time-series analysis supported by power #' transformations. \emph{Journal of Forecasting}, \bold{12}, 37--48. #' @keywords ts #' @examples #' #' lambda <- BoxCox.lambda(AirPassengers,lower=0) #' air.fit <- Arima(AirPassengers, order=c(0,1,1), #' seasonal=list(order=c(0,1,1),period=12), lambda=lambda) #' plot(forecast(air.fit)) #' #' @export BoxCox.lambda <- function(x, method=c("guerrero", "loglik"), lower=-1, upper=2) { if (any(x <= 0, na.rm = TRUE)) { lower <- max(lower, 0) } if (length(x) <= 2 * frequency(x)) { return(1) } # Not enough data to do much more than this # stop("All values must be positive") method <- match.arg(method) if (method == "loglik") { return(bcloglik(x, lower, upper)) } else { return(guerrero(x, lower, upper)) } } forecast/R/fitTBATS.R0000644000176200001440000005053214003673410013767 0ustar liggesusersfitPreviousTBATSModel <- function(y, model, biasadj=FALSE) { seasonal.periods <- model$seasonal.periods if (is.null(seasonal.periods) == FALSE) { seasonal.periods <- sort(seasonal.periods) } # Get the parameters out of the param.vector paramz <- unParameteriseTBATS(model$parameters$vect, model$parameters$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta if (!is.null(beta.v)) { adj.beta <- 1 } else { adj.beta <- 0 } small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } if (!is.null(seasonal.periods)) { tau <- as.integer(2 * sum(model$k.vector)) gamma.bold <- matrix(0, nrow = 1, ncol = (2 * sum(model$k.vector))) } else { tau <- as.integer(0) gamma.bold <- NULL } g <- matrix(0, nrow = ((2 * sum(model$k.vector)) + 1 + adj.beta + p + q), ncol = 1) if (p != 0) { g[(1 + adj.beta + tau + 1), 1] <- 1 } if (q != 0) { g[(1 + adj.beta + tau + p + 1), 1] <- 1 } y.touse <- y if (is.null(lambda) == FALSE) { y.touse <- BoxCox(y, lambda = lambda) lambda <- attr(y.touse, "lambda") } ## Calculate the variance: # 1. Re-set up the matrices w <- .Call("makeTBATSWMatrix", smallPhi_s = small.phi, kVector_s = model$k.vector, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, tau_s = tau, PACKAGE = "forecast") if (!is.null(gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = model$k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v, PACKAGE = "forecast") } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") F <- makeTBATSFMatrix(alpha = alpha, beta = beta.v, small.phi = small.phi, seasonal.periods = seasonal.periods, k.vector = model$k.vector, gamma.bold.matrix = gamma.bold, ar.coefs = ar.coefs, ma.coefs = ma.coefs) .Call("updateFMatrix", F, small.phi, alpha, beta.v, gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") # 2. Calculate! fitted.values.and.errors <- calcModel(y.touse, model$seed.states, F, g, w) e <- fitted.values.and.errors$e fitted.values <- fitted.values.and.errors$y.hat variance <- sum((e * e)) / length(y) if (!is.null(lambda)) { fitted.values <- InvBoxCox(fitted.values, lambda = lambda, biasadj, variance) } model.for.output <- model model.for.output$variance <- variance model.for.output$fitted.values <- ts(c(fitted.values)) model.for.output$errors <- ts(c(e)) tsp(model.for.output$fitted.values) <- tsp(model.for.output$errors) <- tsp(y) model.for.output$x <- fitted.values.and.errors$x model.for.output$y <- y return(model.for.output) } fitSpecificTBATS <- function(y, use.box.cox, use.beta, use.damping, seasonal.periods=NULL, k.vector=NULL, starting.params=NULL, x.nought=NULL, ar.coefs=NULL, ma.coefs=NULL, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE) { if (!is.null(seasonal.periods)) { seasonal.periods <- sort(seasonal.periods) } ## Meaning/purpose of the first if() statement: If this is the first pass, then use default starting values. Else if it is the second pass, then use the values form the first pass as starting values. if (is.null(starting.params)) { ## Check for the existence of ARMA() coefficients if (!is.null(ar.coefs)) { p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { q <- length(ma.coefs) } else { q <- 0 } # Calculate starting values: alpha <- 0.09 if (use.beta) { adj.beta <- 1 beta.v <- 0.05 b <- 0.00 if (use.damping) { small.phi <- .999 } else { small.phi <- 1 } } else { adj.beta <- 0 beta.v <- NULL b <- NULL small.phi <- NULL use.damping <- FALSE } if (!is.null(seasonal.periods)) { gamma.one.v <- rep(0, length(k.vector)) gamma.two.v <- rep(0, length(k.vector)) s.vector <- numeric(2 * sum(k.vector)) k.vector <- as.integer(k.vector) } else { gamma.one.v <- NULL gamma.two.v <- NULL s.vector <- NULL } if (use.box.cox) { if (!is.null(init.box.cox)) { lambda <- init.box.cox } else { lambda <- BoxCox.lambda(y, lower = 0, upper = 1.5) } y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") } else { # the "else" is not needed at the moment lambda <- NULL } } else { paramz <- unParameteriseTBATS(starting.params$vect, starting.params$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta if (!is.null(beta.v)) { adj.beta <- 1 } else { adj.beta <- 0 } b <- 0 small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(seasonal.periods)) { s.vector <- numeric(2 * sum(k.vector)) } else { s.vector <- NULL } # ar.coefs <- paramz$ar.coefs # ma.coefs <- paramz$ma.coefs ## Check for the existence of ARMA() coefficients if (!is.null(ar.coefs)) { p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { q <- length(ma.coefs) } else { q <- 0 } } if (is.null(x.nought)) { # Start with the seed states equal to zero if (!is.null(ar.coefs)) { d.vector <- numeric(length(ar.coefs)) } else { d.vector <- NULL } if (!is.null(ma.coefs)) { epsilon.vector <- numeric(length(ma.coefs)) } else { epsilon.vector <- NULL } x.nought <- makeXMatrix(l = 0, b = b, s.vector = s.vector, d.vector = d.vector, epsilon.vector = epsilon.vector)$x } # Make the parameter vector parameterise param.vector <- parameterise(alpha = alpha, beta.v = beta.v, small.phi = small.phi, gamma.v = cbind(gamma.one.v, gamma.two.v), lambda = lambda, ar.coefs = ar.coefs, ma.coefs = ma.coefs) par.scale <- makeParscale(param.vector$control) if (!is.null(seasonal.periods)) { tau <- as.integer(2 * sum(k.vector)) } else { tau <- as.integer(0) } w <- .Call("makeTBATSWMatrix", smallPhi_s = small.phi, kVector_s = k.vector, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, tau_s = tau, PACKAGE = "forecast") if (!is.null(seasonal.periods)) { gamma.bold <- matrix(0, nrow = 1, ncol = (2 * sum(k.vector))) .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v, PACKAGE = "forecast") } else { gamma.bold <- NULL } g <- matrix(0, nrow = ((2 * sum(k.vector)) + 1 + adj.beta + p + q), ncol = 1) if (p != 0) { g[(1 + adj.beta + tau + 1), 1] <- 1 } if (q != 0) { g[(1 + adj.beta + tau + p + 1), 1] <- 1 } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") F <- makeTBATSFMatrix(alpha = alpha, beta = beta.v, small.phi = small.phi, seasonal.periods = seasonal.periods, k.vector = k.vector, gamma.bold.matrix = gamma.bold, ar.coefs = ar.coefs, ma.coefs = ma.coefs) D <- F - g %*% w$w.transpose #### # Set up environment opt.env <- new.env() assign("F", F, envir = opt.env) assign("w.transpose", w$w.transpose, envir = opt.env) assign("g", g, envir = opt.env) assign("gamma.bold", gamma.bold, envir = opt.env) assign("k.vector", k.vector, envir = opt.env) assign("y", matrix(y, nrow = 1, ncol = length(y)), envir = opt.env) assign("y.hat", matrix(0, nrow = 1, ncol = length(y)), envir = opt.env) assign("e", matrix(0, nrow = 1, ncol = length(y)), envir = opt.env) assign("x", matrix(0, nrow = length(x.nought), ncol = length(y)), envir = opt.env) ## Set up matrices to find the seed states if (use.box.cox) { y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") .Call("calcTBATSFaster", ys = matrix(y.transformed, nrow = 1, ncol = length(y.transformed)), yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, PACKAGE = "forecast") y.tilda <- opt.env$e } else { .Call("calcTBATSFaster", ys = opt.env$y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, PACKAGE = "forecast") y.tilda <- opt.env$e } w.tilda.transpose <- matrix(0, nrow = length(y), ncol = ncol(w$w.transpose)) w.tilda.transpose[1, ] <- w$w.transpose w.tilda.transpose <- .Call("calcWTilda", wTildaTransposes = w.tilda.transpose, Ds = D, PACKAGE = "forecast") # Remove the AR() and MA() bits if they exist if ((p != 0) | (q != 0)) { end.cut <- ncol(w.tilda.transpose) start.cut <- end.cut - (p + q) + 1 w.tilda.transpose <- w.tilda.transpose[, -c(start.cut:end.cut)] } x.nought <- lm(t(y.tilda) ~ w.tilda.transpose - 1)$coefficients x.nought <- matrix(x.nought, nrow = length(x.nought), ncol = 1) ## Replace the AR() and MA() bits if they exist if ((p != 0) | (q != 0)) { arma.seed.states <- numeric((p + q)) arma.seed.states <- matrix(arma.seed.states, nrow = length(arma.seed.states), ncol = 1) x.nought <- rbind(x.nought, arma.seed.states) } ## Optimisation if (use.box.cox) { # Un-transform the seed states assign("x.nought.untransformed", InvBoxCox(x.nought, lambda = lambda), envir = opt.env) # Optimise the likelihood function optim.like <- optim( par = param.vector$vect, fn = calcLikelihoodTBATS, method = "Nelder-Mead", opt.env = opt.env, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, param.control = param.vector$control, p = p, q = q, tau = tau, bc.lower = bc.lower, bc.upper = bc.upper, control = list(maxit = (100 * length(param.vector$vect) ^ 2), parscale = par.scale) ) # Get the parameters out of the param.vector paramz <- unParameteriseTBATS(optim.like$par, param.vector$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } # Transform the seed states x.nought <- BoxCox(opt.env$x.nought.untransformed, lambda = lambda) lambda <- attr(x.nought, "lambda") ## Calculate the variance: # 1. Re-set up the matrices w <- .Call("makeTBATSWMatrix", smallPhi_s = small.phi, kVector_s = k.vector, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, tau_s = tau, PACKAGE = "forecast") if (!is.null(gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v, PACKAGE = "forecast") } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") .Call("updateFMatrix", F, small.phi, alpha, beta.v, gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") # 2. Calculate! y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") fitted.values.and.errors <- calcModel(y.transformed, x.nought, F, g, w) e <- fitted.values.and.errors$e variance <- sum((e * e)) / length(y) fitted.values <- InvBoxCox(fitted.values.and.errors$y.hat, lambda = lambda, biasadj, variance) attr(lambda, "biasadj") <- biasadj # e <- InvBoxCox(e, lambda=lambda) ee <- y - fitted.values } else { # else if we are not using the Box-Cox transformation # Optimise the likelihood function if (length(param.vector$vect) > 1) { optim.like <- optim(par = param.vector$vect, fn = calcLikelihoodNOTransformedTBATS, method = "Nelder-Mead", opt.env = opt.env, x.nought = x.nought, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, param.control = param.vector$control, p = p, q = q, tau = tau, control = list(maxit = (100 * length(param.vector$vect) ^ 2), parscale = par.scale)) } else { optim.like <- optim(par = param.vector$vect, fn = calcLikelihoodNOTransformedTBATS, method = "BFGS", opt.env = opt.env, x.nought = x.nought, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, param.control = param.vector$control, p = p, q = q, tau = tau, control = list(parscale = par.scale)) } # Get the parameters out of the param.vector paramz <- unParameteriseTBATS(optim.like$par, param.vector$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } ## Calculate the variance: # 1. Re-set up the matrices w <- .Call("makeTBATSWMatrix", smallPhi_s = small.phi, kVector_s = k.vector, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, tau_s = tau, PACKAGE = "forecast") if (!is.null(gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v, PACKAGE = "forecast") } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") .Call("updateFMatrix", F, small.phi, alpha, beta.v, gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") # 2. Calculate! fitted.values.and.errors <- calcModel(y, x.nought, F, g, w) e <- fitted.values.and.errors$e fitted.values <- fitted.values.and.errors$y.hat variance <- sum((e * e)) / length(y) } # Get the likelihood likelihood <- optim.like$value # Calculate the AIC aic <- likelihood + 2 * (length(param.vector$vect) + nrow(x.nought)) # Make a list object fits <- ts(c(fitted.values)) e <- ts(c(e)) tsp(fits) <- tsp(e) <- tsp(y) model.for.output <- list( lambda = lambda, alpha = alpha, beta = beta.v, damping.parameter = small.phi, gamma.one.values = gamma.one.v, gamma.two.values = gamma.two.v, ar.coefficients = ar.coefs, ma.coefficients = ma.coefs, likelihood = likelihood, optim.return.code = optim.like$convergence, variance = variance, AIC = aic, parameters = list(vect = optim.like$par, control = param.vector$control), seed.states = x.nought, fitted.values = fits, errors = e, x = fitted.values.and.errors$x, seasonal.periods = seasonal.periods, k.vector = k.vector, y = y, p = p, q = q ) class(model.for.output) <- c("tbats", "bats") return(model.for.output) } calcLikelihoodTBATS <- function(param.vector, opt.env, use.beta, use.small.phi, seasonal.periods, param.control, p=0, q=0, tau=0, bc.lower=0, bc.upper=1) { # param vector should be as follows: Box-Cox.parameter, alpha, beta, small.phi, gamma.vector, ar.coefs, ma.coefs # Put the components of the param.vector into meaningful individual variables paramz <- unParameteriseTBATS(param.vector, param.control) box.cox.parameter <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v ar.coefs <- paramz$ar.coefs ma.coefs <- paramz$ma.coefs if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } x.nought <- BoxCox(opt.env$x.nought.untransformed, lambda = box.cox.parameter) lambda <- attr(x.nought, "lambda") .Call("updateWtransposeMatrix", wTranspose_s = opt.env$w.transpose, smallPhi_s = small.phi, tau_s = as.integer(tau), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") if (!is.null(opt.env$gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = opt.env$gamma.bold, kVector_s = opt.env$k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v) } .Call("updateTBATSGMatrix", g_s = opt.env$g, gammaBold_s = opt.env$gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") .Call("updateFMatrix", opt.env$F, small.phi, alpha, beta.v, opt.env$gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") mat.transformed.y <- BoxCox(opt.env$y, box.cox.parameter) lambda <- attr(mat.transformed.y, "lambda") n <- ncol(opt.env$y) .Call("calcTBATSFaster", ys = mat.transformed.y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, PACKAGE = "forecast") ## #### #################################################################### log.likelihood <- n * log(sum(opt.env$e ^ 2)) - 2 * (box.cox.parameter - 1) * sum(log(opt.env$y)) if (is.na(log.likelihood)) { # Not sure why this would occur return(Inf) } assign("D", (opt.env$F - opt.env$g %*% opt.env$w.transpose), envir = opt.env) if (checkAdmissibility(opt.env, box.cox = box.cox.parameter, small.phi = small.phi, ar.coefs = ar.coefs, ma.coefs = ma.coefs, tau = sum(seasonal.periods), bc.lower = bc.lower, bc.upper = bc.upper)) { return(log.likelihood) } else { return(Inf) } } calcLikelihoodNOTransformedTBATS <- function(param.vector, opt.env, x.nought, use.beta, use.small.phi, seasonal.periods, param.control, p=0, q=0, tau=0) { # The likelihood function without the Box-Cox Transformation # param vector should be as follows: alpha, beta, small.phi, gamma.vector, ar.coefs, ma.coefs # Put the components of the param.vector into meaningful individual variables paramz <- unParameteriseTBATS(param.vector, param.control) box.cox.parameter <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } .Call("updateWtransposeMatrix", wTranspose_s = opt.env$w.transpose, smallPhi_s = small.phi, tau_s = as.integer(tau), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") if (!is.null(opt.env$gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = opt.env$gamma.bold, kVector_s = opt.env$k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v) } .Call("updateTBATSGMatrix", g_s = opt.env$g, gammaBold_s = opt.env$gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") .Call("updateFMatrix", opt.env$F, small.phi, alpha, beta.v, opt.env$gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") n <- ncol(opt.env$y) .Call("calcTBATSFaster", ys = opt.env$y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, PACKAGE = "forecast") ## #### #################################################################### log.likelihood <- n * log(sum(opt.env$e * opt.env$e)) if (is.na(log.likelihood)) { # Not sure why this would occur return(Inf) } assign("D", (opt.env$F - opt.env$g %*% opt.env$w.transpose), envir = opt.env) if (checkAdmissibility(opt.env = opt.env, box.cox = NULL, small.phi = small.phi, ar.coefs = ar.coefs, ma.coefs = ma.coefs, tau = tau)) { return(log.likelihood) } else { return(Inf) } } forecast/R/attach.R0000644000176200001440000000547714003673410013663 0ustar liggesusers.onAttach <- function(...) { if (!interactive() || stats::runif(1) > 0.2) return() tips <- c( "Use suppressPackageStartupMessages() to eliminate package startup messages.", "Stackoverflow is a great place to get help on R issues:\n http://stackoverflow.com/tags/forecasting+r.", "Crossvalidated is a great place to get help on forecasting issues:\n http://stats.stackexchange.com/tags/forecasting.", "Need help getting started? Try the online textbook FPP:\n http://otexts.com/fpp2/", "Want to stay up-to-date? Read the Hyndsight blog:\n https://robjhyndman.com/hyndsight/", "Want to meet other forecasters? Join the International Institute of Forecasters:\n http://forecasters.org/" ) tip <- sample(tips, 1) msg <- paste("This is forecast", packageVersion("forecast"), "\n ", tip) packageStartupMessage(msg) } register_s3_method <- function(pkg, generic, class, fun = NULL) { stopifnot(is.character(pkg), length(pkg) == 1) stopifnot(is.character(generic), length(generic) == 1) stopifnot(is.character(class), length(class) == 1) if (is.null(fun)) { fun <- get(paste0(generic, ".", class), envir = parent.frame()) } else { stopifnot(is.function(fun)) } if (pkg %in% loadedNamespaces()) { registerS3method(generic, class, fun, envir = asNamespace(pkg)) } # Always register hook in case package is later unloaded & reloaded setHook( packageEvent(pkg, "onLoad"), function(...) { registerS3method(generic, class, fun, envir = asNamespace(pkg)) } ) } overwrite_s3_generic <- function(pkg, generic){ if (pkg %in% loadedNamespaces()) { assign(generic, get(generic, asNamespace(pkg)), envir = asNamespace("forecast")) } # Always register hook in case package is later unloaded & reloaded # setHook( # packageEvent(pkg, "onLoad"), # function(...) { # pkg_env <- asNamespace("forecast") # unlockBinding(generic, pkg_env) # assign(generic, get(generic, asNamespace(pkg)), envir = pkg_env) # lockBinding(generic, pkg_env) # } # ) } #' @importFrom utils methods .onLoad <- function(...) { overwrite_s3_generic("ggplot2", "autolayer") register_s3_method("ggplot2", "autolayer", "ts") register_s3_method("ggplot2", "autolayer", "mts") register_s3_method("ggplot2", "autolayer", "msts") register_s3_method("ggplot2", "autolayer", "forecast") register_s3_method("ggplot2", "autolayer", "mforecast") methods <- strsplit(utils::.S3methods(forecast), ".", fixed = TRUE) overwrite_s3_generic("fabletools", "forecast") for(method in methods){ register_s3_method("fabletools", method[1], method[2]) } methods <- strsplit(utils::.S3methods(accuracy), ".", fixed = TRUE) overwrite_s3_generic("fabletools", "accuracy") for(method in methods){ register_s3_method("fabletools", method[1], method[2]) } invisible() } forecast/R/naive.R0000644000176200001440000002057714133675160013527 0ustar liggesusers# Random walk related forecasts # Based on lagged walks # lag=1 corresponds to standard random walk (i.e., naive forecast) # lag=m corresponds to seasonal naive method lagwalk <- function(y, lag=1, drift=FALSE, lambda=NULL, biasadj=FALSE) { if(!is.ts(y)){ y <- as.ts(y) } origy <- y if (!is.null(lambda)) { y <- BoxCox(y, lambda) lambda <- attr(y, "lambda") } m <- frequency(y) # Complete missing values with lagged values y_na <- which(is.na(y)) y_na <- y_na[y_na>lag] fits <- stats::lag(y, -lag) for(i in y_na){ if(is.na(fits)[i]){ fits[i] <- fits[i-lag] } } fitted <- ts(c(rep(NA, lag), head(fits, -lag)), start = start(y), frequency = m) fitted <- copy_msts(y, fitted) if(drift){ fit <- summary(lm(y-fitted ~ 1, na.action=na.exclude)) b <- fit$coefficients[1,1] b.se <- fit$coefficients[1,2] sigma <- fit$sigma fitted <- fitted + b res <- y - fitted method <- "Lag walk with drift" } else{ res <- y - fitted b <- b.se <- 0 sigma <- sqrt(mean(res^2, na.rm=TRUE)) method <- "Lag walk" } if (!is.null(lambda)) { fitted <- InvBoxCox(fitted, lambda, biasadj, var(res)) attr(lambda, "biasadj") <- biasadj } model <- structure( list( x = origy, fitted = fitted, future = tail(fits, lag), residuals = res, method = method, series = deparse(substitute(y)), sigma2 = sigma^2, par = list(includedrift = drift, drift = b, drift.se = b.se, lag = lag), lambda = lambda, call = match.call() ), class = "lagwalk" ) } #' @export forecast.lagwalk <- function(object, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, simulate=FALSE, bootstrap=FALSE, npaths=5000, biasadj=FALSE, ...) { lag <- object$par$lag fullperiods <- (h-1)/lag+1 steps <- rep(1:fullperiods, rep(lag,fullperiods))[1:h] # Point forecasts fc <- rep(object$future, fullperiods)[1:h] + steps*object$par$drift # Intervals # Adjust prediction intervals to allow for drift coefficient standard error mse <- mean(object$residuals^2, na.rm=TRUE) se <- sqrt(mse*steps + (steps*object$par$drift.se)^2) if(fan) level <- seq(51,99,by=3) else { if(min(level) > 0 & max(level) < 1) level <- 100*level else if(min(level) < 0 | max(level) > 99.99) stop("Confidence limit out of range") } nconf <- length(level) if (simulate | bootstrap) # Compute prediction intervals using simulations { sim <- matrix(NA, nrow = npaths, ncol = h) for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = bootstrap, lambda = lambda) lower <- apply(sim, 2, quantile, 0.5 - level / 200, type = 8) upper <- apply(sim, 2, quantile, 0.5 + level / 200, type = 8) if (nconf > 1L) { lower <- t(lower) upper <- t(upper) } else { lower <- matrix(lower, ncol = 1) upper <- matrix(upper, ncol = 1) } } else { z <- qnorm(.5 + level/200) lower <- upper <- matrix(NA,nrow=h,ncol=nconf) for(i in 1:nconf) { lower[,i] <- fc - z[i]*se upper[,i] <- fc + z[i]*se } } if (!is.null(lambda)) { fc <- InvBoxCox(fc, lambda, biasadj, se^2) if(!bootstrap){ # Bootstrap intervals are already backtransformed upper <- InvBoxCox(upper, lambda) lower <- InvBoxCox(lower, lambda) } } # Set attributes fc <- future_msts(object$x, fc) lower <- future_msts(object$x, lower) upper <- future_msts(object$x, upper) colnames(lower) <- colnames(upper) <- paste(level,"%",sep="") return(structure( list( method = object$method, model = object, lambda = lambda, x = object$x, fitted = fitted(object), residuals = residuals(object), series = object$series, mean = fc, level = level, lower = lower, upper = upper ), class = "forecast") ) } #' @export print.lagwalk <- function(x, ...) { cat(paste("Call:", deparse(x$call), "\n\n")) if (x$par$includedrift) { cat(paste("Drift: ", round(x$par$drift, 4), " (se ", round(x$par$drift.se, 4), ")\n", sep = "")) } cat(paste("Residual sd:", round(sqrt(x$sigma2), 4), "\n")) } #' @export fitted.lagwalk <- function(object, ...){ object$fitted } # Random walk #' @rdname naive #' #' @examples #' #' gold.fcast <- rwf(gold[1:60], h=50) #' plot(gold.fcast) #' #' @export rwf <- function(y, h=10, drift=FALSE, level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, ..., x=y) { fit <- lagwalk( x, lag = 1, drift = drift, lambda = lambda, biasadj = biasadj ) fc <- forecast(fit, h = h, level = level, fan = fan, lambda = fit$lambda, biasadj = biasadj, ...) fc$model$call <- match.call() fc$series <- deparse(substitute(y)) if (drift) { fc$method <- "Random walk with drift" } else { fc$method <- "Random walk" } return(fc) } #' Naive and Random Walk Forecasts #' #' \code{rwf()} returns forecasts and prediction intervals for a random walk #' with drift model applied to \code{y}. This is equivalent to an ARIMA(0,1,0) #' model with an optional drift coefficient. \code{naive()} is simply a wrapper #' to \code{rwf()} for simplicity. \code{snaive()} returns forecasts and #' prediction intervals from an ARIMA(0,0,0)(0,1,0)m model where m is the #' seasonal period. #' #' The random walk with drift model is \deqn{Y_t=c + Y_{t-1} + Z_t}{Y[t]=c + #' Y[t-1] + Z[t]} where \eqn{Z_t}{Z[t]} is a normal iid error. Forecasts are #' given by \deqn{Y_n(h)=ch+Y_n}{Y[n+h]=ch+Y[n]}. If there is no drift (as in #' \code{naive}), the drift parameter c=0. Forecast standard errors allow for #' uncertainty in estimating the drift parameter (unlike the corresponding #' forecasts obtained by fitting an ARIMA model directly). #' #' The seasonal naive model is \deqn{Y_t= Y_{t-m} + Z_t}{Y[t]=Y[t-m] + Z[t]} #' where \eqn{Z_t}{Z[t]} is a normal iid error. #' #' @aliases print.naive #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting #' @param drift Logical flag. If TRUE, fits a random walk with drift model. #' @param level Confidence levels for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param x Deprecated. Included for backwards compatibility. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{naive} or #' \code{snaive}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link{Arima}} #' @keywords ts #' @examples #' #' plot(naive(gold,h=50),include=200) #' #' @export naive <- function(y, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, ..., x=y) { fc <- rwf( x, h = h, level = level, fan = fan, lambda = lambda, drift = FALSE, biasadj = biasadj, ... ) fc$model$call <- match.call() fc$series <- deparse(substitute(y)) fc$method <- "Naive method" return(fc) } #' @rdname naive #' #' @examples #' #' plot(snaive(wineind)) #' #' @export snaive <- function(y, h=2 * frequency(x), level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, ..., x=y) { fit <- lagwalk( x, lag = frequency(x), drift = FALSE, lambda = lambda, biasadj = biasadj ) fc <- forecast(fit, h = h, level = level, fan = fan, lambda = fit$lambda, biasadj = biasadj, ...) fc$model$call <- match.call() fc$series <- deparse(substitute(y)) fc$method <- "Seasonal naive method" return(fc) } forecast/R/mstl.R0000644000176200001440000005322214166675341013404 0ustar liggesusers#' Multiple seasonal decomposition #' #' Decompose a time series into seasonal, trend and remainder components. #' Seasonal components are estimated iteratively using STL. Multiple seasonal periods are #' allowed. The trend component is computed for the last iteration of STL. #' Non-seasonal time series are decomposed into trend and remainder only. #' In this case, \code{\link[stats]{supsmu}} is used to estimate the trend. #' Optionally, the time series may be Box-Cox transformed before decomposition. #' Unlike \code{\link[stats]{stl}}, \code{mstl} is completely automated. #' @param x Univariate time series of class \code{msts} or \code{ts}. #' @param iterate Number of iterations to use to refine the seasonal component. #' @param s.window Seasonal windows to be used in the decompositions. If scalar, #' the same value is used for all seasonal components. Otherwise, it should be a vector #' of the same length as the number of seasonal components (or longer). #' @param ... Other arguments are passed to \code{\link[stats]{stl}}. #' @inheritParams forecast #' #' @seealso \code{\link[stats]{stl}}, \code{\link[stats]{supsmu}} #' @examples #' library(ggplot2) #' mstl(taylor) %>% autoplot() #' mstl(AirPassengers, lambda = "auto") %>% autoplot() #' @export mstl <- function(x, lambda = NULL, iterate = 2, s.window = 7+4*seq(6), ...) { # What is x? origx <- x n <- length(x) if ("msts" %in% class(x)) { msts <- attributes(x)$msts if (any(msts >= n / 2)) { warning("Dropping seasonal components with fewer than two full periods.") msts <- msts[msts < n / 2] x <- msts(x, seasonal.periods = msts) } msts <- sort(msts, decreasing = FALSE) } else if ("ts" %in% class(x)) { msts <- frequency(x) iterate <- 1L } else { x <- as.ts(x) msts <- 1L } # Check dimension if (!is.null(dim(x))) { if (NCOL(x) == 1L) { x <- x[, 1] } } # Replace missing values if necessary if (anyNA(x)) { x <- na.interp(x, lambda = lambda) } # Transform if necessary if (!is.null(lambda)) { x <- BoxCox(x, lambda = lambda) lambda <- attr(x, "lambda") } tt <- seq_len(n) # Now fit stl models with only one type of seasonality at a time if (msts[1L] > 1) { seas <- as.list(rep(0, length(msts))) deseas <- x if (length(s.window) == 1L) { s.window <- rep(s.window, length(msts)) } iterate <- pmax(1L, iterate) for (j in seq_len(iterate)) { for (i in seq_along(msts)) { deseas <- deseas + seas[[i]] fit <- stl(ts(deseas, frequency = msts[i]), s.window = s.window[i], ...) seas[[i]] <- msts(seasonal(fit), seasonal.periods = msts) attributes(seas[[i]]) <- attributes(x) deseas <- deseas - seas[[i]] } } trend <- msts(trendcycle(fit), seasonal.periods = msts) } else { msts <- NULL deseas <- x trend <- ts(stats::supsmu(seq_len(n), x)$y) } attributes(trend) <- attributes(x) # Put back NAs deseas[is.na(origx)] <- NA # Estimate remainder remainder <- deseas - trend # Package into matrix output <- cbind(c(origx), c(trend)) if (!is.null(msts)) { for (i in seq_along(msts)) { output <- cbind(output, c(seas[[i]])) } } output <- cbind(output, c(remainder)) colnames(output) <- paste0("V",seq(NCOL(output))) colnames(output)[1L:2L] <- c("Data", "Trend") if (!is.null(msts)) { colnames(output)[2L + seq_along(msts)] <- paste0("Seasonal", round(msts, 2)) } colnames(output)[NCOL(output)] <- "Remainder" output <- copy_msts(origx, output) class(output) <- c("mstl", class(output)) return(output) } #' @rdname autoplot.seas #' @export autoplot.mstl <- function(object, ...) { autoplot.mts(object, facets = TRUE, ylab = "", ...) } #' Forecasting using stl objects #' #' Forecasts of STL objects are obtained by applying a non-seasonal forecasting #' method to the seasonally adjusted data and re-seasonalizing using the last #' year of the seasonal component. #' #' \code{stlm} takes a time series \code{y}, applies an STL decomposition, and #' models the seasonally adjusted data using the model passed as #' \code{modelfunction} or specified using \code{method}. It returns an object #' that includes the original STL decomposition and a time series model fitted #' to the seasonally adjusted data. This object can be passed to the #' \code{forecast.stlm} for forecasting. #' #' \code{forecast.stlm} forecasts the seasonally adjusted data, then #' re-seasonalizes the results by adding back the last year of the estimated #' seasonal component. #' #' \code{stlf} combines \code{stlm} and \code{forecast.stlm}. It takes a #' \code{ts} argument, applies an STL decomposition, models the seasonally #' adjusted data, reseasonalizes, and returns the forecasts. However, it allows #' more general forecasting methods to be specified via #' \code{forecastfunction}. #' #' \code{forecast.stl} is similar to \code{stlf} except that it takes the STL #' decomposition as the first argument, instead of the time series. #' #' Note that the prediction intervals ignore the uncertainty associated with #' the seasonal component. They are computed using the prediction intervals #' from the seasonally adjusted series, which are then reseasonalized using the #' last year of the seasonal component. The uncertainty in the seasonal #' component is ignored. #' #' The time series model for the seasonally adjusted data can be specified in #' \code{stlm} using either \code{method} or \code{modelfunction}. The #' \code{method} argument provides a shorthand way of specifying #' \code{modelfunction} for a few special cases. More generally, #' \code{modelfunction} can be any function with first argument a \code{ts} #' object, that returns an object that can be passed to \code{\link{forecast}}. #' For example, \code{forecastfunction=ar} uses the \code{\link{ar}} function #' for modelling the seasonally adjusted series. #' #' The forecasting method for the seasonally adjusted data can be specified in #' \code{stlf} and \code{forecast.stl} using either \code{method} or #' \code{forecastfunction}. The \code{method} argument provides a shorthand way #' of specifying \code{forecastfunction} for a few special cases. More #' generally, \code{forecastfunction} can be any function with first argument a #' \code{ts} object, and other \code{h} and \code{level}, which returns an #' object of class \code{\link{forecast}}. For example, #' \code{forecastfunction=thetaf} uses the \code{\link{thetaf}} function for #' forecasting the seasonally adjusted series. #' #' @param y A univariate numeric time series of class \code{ts} #' @param object An object of class \code{stl} or \code{stlm}. Usually the #' result of a call to \code{\link[stats]{stl}} or \code{stlm}. #' @param method Method to use for forecasting the seasonally adjusted series. #' @param modelfunction An alternative way of specifying the function for #' modelling the seasonally adjusted series. If \code{modelfunction} is not #' \code{NULL}, then \code{method} is ignored. Otherwise \code{method} is used #' to specify the time series model to be used. #' @param model Output from a previous call to \code{stlm}. If a \code{stlm} #' model is passed, this same model is fitted to y without re-estimating any #' parameters. #' @param forecastfunction An alternative way of specifying the function for #' forecasting the seasonally adjusted series. If \code{forecastfunction} is #' not \code{NULL}, then \code{method} is ignored. Otherwise \code{method} is #' used to specify the forecasting method to be used. #' @param etsmodel The ets model specification passed to #' \code{\link[forecast]{ets}}. By default it allows any non-seasonal model. If #' \code{method!="ets"}, this argument is ignored. #' @param xreg Historical regressors to be used in #' \code{\link[forecast]{auto.arima}()} when \code{method=="arima"}. #' @param newxreg Future regressors to be used in #' \code{\link[forecast]{forecast.Arima}()}. #' @param h Number of periods for forecasting. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable #' for fan plots. #' @param s.window Either the character string ``periodic'' or the span (in #' lags) of the loess window for seasonal extraction. #' @param t.window A number to control the smoothness of the trend. See #' \code{\link[stats]{stl}} for details. #' @param robust If \code{TRUE}, robust fitting will used in the loess #' procedure within \code{\link[stats]{stl}}. #' @param allow.multiplicative.trend If TRUE, then ETS models with #' multiplicative trends are allowed. Otherwise, only additive or no trend ETS #' models are permitted. #' @param x Deprecated. Included for backwards compatibility. #' @param ... Other arguments passed to \code{forecast.stl}, #' \code{modelfunction} or \code{forecastfunction}. #' @inheritParams forecast #' #' @return \code{stlm} returns an object of class \code{stlm}. The other #' functions return objects of class \code{forecast}. #' #' There are many methods for working with \code{\link{forecast}} objects #' including \code{summary} to obtain and print a summary of the results, while #' \code{plot} produces a plot of the forecasts and prediction intervals. The #' generic accessor functions \code{fitted.values} and \code{residuals} extract #' useful features. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{stl}}, \code{\link{forecast.ets}}, #' \code{\link{forecast.Arima}}. #' @keywords ts #' @examples #' #' tsmod <- stlm(USAccDeaths, modelfunction = ar) #' plot(forecast(tsmod, h = 36)) #' #' decomp <- stl(USAccDeaths, s.window = "periodic") #' plot(forecast(decomp)) #' @export forecast.stl <- function(object, method = c("ets", "arima", "naive", "rwdrift"), etsmodel = "ZZN", forecastfunction = NULL, h = frequency(object$time.series) * 2, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, xreg = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ...) { method <- match.arg(method) if (is.null(forecastfunction)) { if (method != "arima" && (!is.null(xreg) || !is.null(newxreg))) { stop("xreg and newxreg arguments can only be used with ARIMA models") } if (method == "ets") { # Ensure non-seasonal model if (substr(etsmodel, 3, 3) != "N") { warning("The ETS model must be non-seasonal. I'm ignoring the seasonal component specified.") substr(etsmodel, 3, 3) <- "N" } forecastfunction <- function(x, h, level, ...) { fit <- ets(na.interp(x), model = etsmodel, allow.multiplicative.trend = allow.multiplicative.trend, ...) return(forecast(fit, h = h, level = level)) } } else if (method == "arima") { forecastfunction <- function(x, h, level, ...) { fit <- auto.arima(x, xreg = xreg, seasonal = FALSE, ...) return(forecast(fit, h = h, level = level, xreg = newxreg)) } } else if (method == "naive") { forecastfunction <- function(x, h, level, ...) { rwf(x, drift = FALSE, h = h, level = level, ...) } } else if (method == "rwdrift") { forecastfunction <- function(x, h, level, ...) { rwf(x, drift = TRUE, h = h, level = level, ...) } } } if (is.null(xreg) != is.null(newxreg)) { stop("xreg and newxreg arguments must both be supplied") } if (!is.null(newxreg)) { if (NROW(as.matrix(newxreg)) != h) { stop("newxreg should have the same number of rows as the forecast horizon h") } } if (fan) { level <- seq(51, 99, by = 3) } if ("mstl" %in% class(object)) { seasoncolumns <- which(grepl("Season", colnames(object))) nseasons <- length(seasoncolumns) seascomp <- matrix(0, ncol = nseasons, nrow = h) seasonal.periods <- as.numeric(sub("Seasonal","", colnames(object)[seasoncolumns])) n <- NROW(object) for (i in seq(nseasons)) { mp <- seasonal.periods[i] colname <- colnames(object)[seasoncolumns[i]] seascomp[, i] <- rep(object[n - rev(seq_len(mp)) + 1, colname], trunc(1 + (h - 1) / mp))[seq_len(h)] } lastseas <- rowSums(seascomp) xdata <- object[, "Data"] seascols <- grep("Seasonal", colnames(object)) allseas <- rowSumsTS(object[, seascols, drop = FALSE]) series <- NULL } else if ("stl" %in% class(object)) { m <- frequency(object$time.series) n <- NROW(object$time.series) lastseas <- rep(seasonal(object)[n - (m:1) + 1], trunc(1 + (h - 1) / m))[1:h] xdata <- ts(rowSums(object$time.series)) tsp(xdata) <- tsp(object$time.series) allseas <- seasonal(object) series <- deparse(object$call$x) } else { stop("Unknown object class") } # De-seasonalize x.sa <- seasadj(object) # Forecast fcast <- forecastfunction(x.sa, h = h, level = level, ...) # Reseasonalize fcast$mean <- future_msts(xdata, fcast$mean + lastseas) fcast$upper <- future_msts(xdata, fcast$upper + lastseas) fcast$lower <- future_msts(xdata, fcast$lower + lastseas) fcast$x <- xdata fcast$method <- paste("STL + ", fcast$method) fcast$series <- series fcast$fitted <- copy_msts(xdata, fitted(fcast) + allseas) fcast$residuals <- copy_msts(xdata, fcast$x - fcast$fitted) if (!is.null(lambda)) { fcast$x <- InvBoxCox(fcast$x, lambda) fcast$fitted <- InvBoxCox(fcast$fitted, lambda) fcast$mean <- InvBoxCox(fcast$mean, lambda, biasadj, fcast) fcast$lower <- InvBoxCox(fcast$lower, lambda) fcast$upper <- InvBoxCox(fcast$upper, lambda) attr(lambda, "biasadj") <- biasadj fcast$lambda <- lambda } return(fcast) } #' @export forecast.mstl <- function(object, method = c("ets", "arima", "naive", "rwdrift"), etsmodel = "ZZN", forecastfunction = NULL, h = frequency(object) * 2, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, xreg = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ...) { forecast.stl( object, method = method, etsmodel = etsmodel, forecastfunction = forecastfunction, h = h, level = level, fan = fan, lambda = lambda, biasadj = biasadj, xreg = xreg, newxreg = newxreg, allow.multiplicative.trend = allow.multiplicative.trend, ... ) } # rowSums for mts objects # # Applies rowSums and returns ts with same tsp attributes as input. This # allows the result to be added to other time series with different lengths # but overlapping time indexes. # param mts a matrix or multivariate time series # return a vector of rowsums which is a ts if the \code{mts} is a ts rowSumsTS <- function (mts) { the_tsp <- tsp(mts) ret <- rowSums(mts) if (is.null(the_tsp)){ ret } else { tsp(ret) <- the_tsp as.ts(ret) } } # Function takes time series, does STL decomposition, and fits a model to seasonally adjusted series # But it does not forecast. Instead, the result can be passed to forecast(). #' @rdname forecast.stl #' @export stlm <- function(y, s.window = 7+4*seq(6), robust = FALSE, method = c("ets", "arima"), modelfunction = NULL, model = NULL, etsmodel = "ZZN", lambda = NULL, biasadj = FALSE, xreg = NULL, allow.multiplicative.trend = FALSE, x = y, ...) { method <- match.arg(method) # Check univariate if (NCOL(x) > 1L) { stop("y must be a univariate time series") } else { if (!is.null(ncol(x))) { if (ncol(x) == 1L) { # Probably redundant check x <- x[, 1L] } } } # Check x is a seasonal time series tspx <- tsp(x) if (is.null(tspx)) { stop("y is not a seasonal ts object") } else if (tspx[3] <= 1L) { stop("y is not a seasonal ts object") } # Transform data if necessary origx <- x if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } # Do STL decomposition stld <- mstl(x, s.window = s.window, robust = robust) if (!is.null(model)) { if (inherits(model$model, "ets")) { modelfunction <- function(x, ...) { return(ets(x, model = model$model, use.initial.values = TRUE, ...)) } } else if (inherits(model$model, "Arima")) { modelfunction <- function(x, ...) { return(Arima(x, model = model$model, xreg = xreg, ...)) } } else if (!is.null(model$modelfunction)) { if ("model" %in% names(formals(model$modelfunction))) { modelfunction <- function(x, ...) { return(model$modelfunction(x, model = model$model, ...)) } } } if (is.null(modelfunction)) { stop("Unknown model type") } } # Construct modelfunction if not passed as an argument else if (is.null(modelfunction)) { if (method != "arima" && !is.null(xreg)) { stop("xreg arguments can only be used with ARIMA models") } if (method == "ets") { # Ensure non-seasonal model if (substr(etsmodel, 3, 3) != "N") { warning("The ETS model must be non-seasonal. I'm ignoring the seasonal component specified.") substr(etsmodel, 3, 3) <- "N" } modelfunction <- function(x, ...) { return(ets( x, model = etsmodel, allow.multiplicative.trend = allow.multiplicative.trend, ... )) } } else if (method == "arima") { modelfunction <- function(x, ...) { return(auto.arima(x, xreg = xreg, seasonal = FALSE, ...)) } } } # De-seasonalize x.sa <- seasadj(stld) # Model seasonally adjusted data fit <- modelfunction(x.sa, ...) fit$x <- x.sa # Fitted values and residuals seascols <- grep("Seasonal", colnames(stld)) allseas <- rowSumsTS(stld[, seascols, drop = FALSE]) fits <- fitted(fit) + allseas res <- residuals(fit) if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda, biasadj, var(res)) attr(lambda, "biasadj") <- biasadj } return(structure(list( stl = stld, model = fit, modelfunction = modelfunction, lambda = lambda, x = origx, series = deparse(substitute(y)), m = frequency(origx), fitted = fits, residuals = res ), class = "stlm")) } #' @rdname forecast.stl #' @export forecast.stlm <- function(object, h = 2 * object$m, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ...) { if (!is.null(newxreg)) { if (nrow(as.matrix(newxreg)) != h) { stop("newxreg should have the same number of rows as the forecast horizon h") } } if (fan) { level <- seq(51, 99, by = 3) } # Forecast seasonally adjusted series if (is.element("Arima", class(object$model)) && !is.null(newxreg)) { fcast <- forecast(object$model, h = h, level = level, xreg = newxreg, ...) } else if (is.element("ets", class(object$model))) { fcast <- forecast( object$model, h = h, level = level, allow.multiplicative.trend = allow.multiplicative.trend, ... ) } else { fcast <- forecast(object$model, h = h, level = level, ...) } # In-case forecast method uses different horizon length (such as using xregs) h <- NROW(fcast$mean) # Forecast seasonal series with seasonal naive seasonal.periods <- attributes(object$stl)$msts if (is.null(seasonal.periods)) { seasonal.periods <- frequency(object$stl) } seascomp <- matrix(0, ncol = length(seasonal.periods), nrow = h) for (i in seq_along(seasonal.periods)) { mp <- seasonal.periods[i] n <- NROW(object$stl) colname <- paste0("Seasonal", round(mp, 2)) seascomp[, i] <- rep(object$stl[n - rev(seq_len(mp)) + 1, colname], trunc(1 + (h - 1) / mp))[seq_len(h)] } lastseas <- rowSums(seascomp) xdata <- object$stl[, "Data"] seascols <- grep("Seasonal", colnames(object$stl)) allseas <- rowSumsTS(object$stl[, seascols, drop = FALSE]) series <- NULL # m <- frequency(object$stl$time.series) n <- NROW(xdata) # Reseasonalize fcast$mean <- fcast$mean + lastseas fcast$upper <- fcast$upper + lastseas fcast$lower <- fcast$lower + lastseas fcast$method <- paste("STL + ", fcast$method) fcast$series <- object$series # fcast$seasonal <- ts(lastseas[1:m],frequency=m,start=tsp(object$stl$time.series)[2]-1+1/m) # fcast$residuals <- residuals() fcast$fitted <- fitted(fcast) + allseas fcast$residuals <- residuals(fcast) if (!is.null(lambda)) { fcast$fitted <- InvBoxCox(fcast$fitted, lambda) fcast$mean <- InvBoxCox(fcast$mean, lambda, biasadj, fcast) fcast$lower <- InvBoxCox(fcast$lower, lambda) fcast$upper <- InvBoxCox(fcast$upper, lambda) attr(lambda, "biasadj") <- biasadj fcast$lambda <- lambda } fcast$x <- object$x return(fcast) } #' @rdname forecast.stl #' #' @examples #' #' plot(stlf(AirPassengers, lambda = 0)) #' @export stlf <- function(y, h = frequency(x) * 2, s.window = 7+4*seq(6), t.window = NULL, robust = FALSE, lambda = NULL, biasadj = FALSE, x = y, ...) { seriesname <- deparse(substitute(y)) # Check univariate if (NCOL(x) > 1L) { stop("y must be a univariate time series") } else { if (!is.null(ncol(x))) { if (ncol(x) == 1L) { # Probably redundant check x <- x[, 1L] } } } # Check x is a seasonal time series tspx <- tsp(x) if (is.null(tspx)) { stop("y is not a seasonal ts object") } else if (tspx[3] <= 1L) { stop("y is not a seasonal ts object") } if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } fit <- mstl(x, s.window = s.window, t.window = t.window, robust = robust) fcast <- forecast(fit, h = h, lambda = lambda, biasadj = biasadj, ...) # if (!is.null(lambda)) # { # fcast$x <- origx # fcast$fitted <- InvBoxCox(fcast$fitted, lambda) # fcast$mean <- InvBoxCox(fcast$mean, lambda) # fcast$lower <- InvBoxCox(fcast$lower, lambda) # fcast$upper <- InvBoxCox(fcast$upper, lambda) # fcast$lambda <- lambda # } fcast$series <- seriesname return(fcast) } #' @rdname is.ets #' @export is.stlm <- function(x) { inherits(x, "stlm") } forecast/R/checkAdmissibility.R0000644000176200001440000000235114003673410016207 0ustar liggesusers# Author: srazbash and Rob J Hyndman ############################################################################### checkAdmissibility <- function(opt.env, box.cox=NULL, small.phi=NULL, ar.coefs=NULL, ma.coefs=NULL, tau=0, bc.lower=0, bc.upper=1) { # Check the range of the Box-Cox parameter if (!is.null(box.cox)) { if ((box.cox <= bc.lower) | (box.cox >= bc.upper)) { return(FALSE) } } # Check the range of small.phi if (!is.null(small.phi)) { if (((small.phi < .8) | (small.phi > 1))) { return(FALSE) } } # Check AR part for stationarity if (!is.null(ar.coefs)) { arlags <- which(abs(ar.coefs) > 1e-08) if (length(arlags) > 0L) { p <- max(arlags) if (min(Mod(polyroot(c(1, -ar.coefs[1L:p])))) < 1 + 1e-2) { return(FALSE) } } } # Check MA part for invertibility if (!is.null(ma.coefs)) { malags <- which(abs(ma.coefs) > 1e-08) if (length(malags) > 0L) { q <- max(malags) if (min(Mod(polyroot(c(1, ma.coefs[1L:q])))) < 1 + 1e-2) { return(FALSE) } } } # Check the eigen values of the D matrix D.eigen.values <- eigen(opt.env$D, symmetric = FALSE, only.values = TRUE)$values return(all(abs(D.eigen.values) < 1 + 1e-2)) } forecast/R/graph.R0000644000176200001440000001716014003673410013510 0ustar liggesusers### Time series graphics and transformations #' Time series display #' #' Plots a time series along with its acf and either its pacf, lagged #' scatterplot or spectrum. #' #' \code{ggtsdisplay} will produce the equivalent plot using ggplot graphics. #' #' @param x a numeric vector or time series of class \code{ts}. #' @param plot.type type of plot to include in lower right corner. #' @param points logical flag indicating whether to show the individual points #' or not in the time plot. #' @param smooth logical flag indicating whether to show a smooth loess curve #' superimposed on the time plot. #' @param ci.type type of confidence limits for ACF that is passed to #' \code{\link[stats]{acf}}. Should the confidence limits assume a white noise #' input or for lag \eqn{k} an MA(\eqn{k-1}) input? #' @param lag.max the maximum lag to plot for the acf and pacf. A suitable #' value is selected by default if the argument is missing. #' @param na.action function to handle missing values in acf, pacf and spectrum #' calculations. The default is \code{\link[stats]{na.contiguous}}. Useful #' alternatives are \code{\link[stats]{na.pass}} and \code{\link{na.interp}}. #' @param theme Adds a ggplot element to each plot, typically a theme. #' @param main Main title. #' @param xlab X-axis label. #' @param ylab Y-axis label. #' @param pch Plotting character. #' @param cex Character size. #' @param \dots additional arguments to \code{\link[stats]{acf}}. #' @return None. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{plot.ts}}, \code{\link{Acf}}, #' \code{\link[stats]{spec.ar}} #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://otexts.com/fpp2/} #' @keywords ts #' @examples #' tsdisplay(diff(WWWusage)) #' ggtsdisplay(USAccDeaths, plot.type="scatter") #' #' @export tsdisplay <- function(x, plot.type=c("partial", "histogram", "scatter", "spectrum"), points=TRUE, ci.type=c("white", "ma"), lag.max, na.action=na.contiguous, main=NULL, xlab="", ylab="", pch=1, cex=0.5, ...) { plot.type <- match.arg(plot.type) ci.type <- match.arg(ci.type) def.par <- par(no.readonly = TRUE) # save default, for resetting... nf <- layout(matrix(c(1, 1, 2, 3), 2, 2, byrow = TRUE)) if (is.null(main)) { main <- deparse(substitute(x)) } if (!is.ts(x)) { x <- ts(x) } if (missing(lag.max)) { lag.max <- round(min(max(10 * log10(length(x)), 3 * frequency(x)), length(x) / 3)) } plot.ts(x, main = main, ylab = ylab, xlab = xlab, ylim = range(x, na.rm = TRUE), ...) if (points) { points(x, pch = pch, cex = cex, ...) } ylim <- c(-1, 1) * 3 / sqrt(length(x)) junk1 <- stats::acf(c(x), lag.max = lag.max, plot = FALSE, na.action = na.action) junk1$acf[1, 1, 1] <- 0 if (ci.type == "ma") { ylim <- range(ylim, 0.66 * ylim * max(sqrt(cumsum(c(1, 2 * junk1$acf[-1, 1, 1] ^ 2))))) } ylim <- range(ylim, junk1$acf) if (plot.type == "partial") { junk2 <- stats::pacf(c(x), lag.max = lag.max, plot = FALSE, na.action = na.action) ylim <- range(ylim, junk2$acf) } oldpar <- par(mar = c(5, 4.1, 1.5, 2)) plot(junk1, ylim = ylim, xlim = c(1, lag.max), ylab = "ACF", main = "", ci.type = ci.type, ...) if (plot.type == "scatter") { n <- length(x) plot(x[1:(n - 1)], x[2:n], xlab = expression(Y[t - 1]), ylab = expression(Y[t]), ...) } else if (plot.type == "spectrum") { spec.ar(x, main = "", na.action = na.action) } else if (plot.type == "histogram") { graphics::hist(x, breaks = "FD", main = "", xlab = main) } else { plot(junk2, ylim = ylim, xlim = c(1, lag.max), ylab = "PACF", main = "", ...) } par(def.par) layout(1) invisible() } #' Seasonal plot #' #' Plots a seasonal plot as described in Hyndman and Athanasopoulos (2014, #' chapter 2). This is like a time plot except that the data are plotted #' against the seasons in separate years. #' #' @param x a numeric vector or time series of class \code{ts}. #' @param s seasonal frequency of x #' @param season.labels Labels for each season in the "year" #' @param year.labels Logical flag indicating whether labels for each year of #' data should be plotted on the right. #' @param year.labels.left Logical flag indicating whether labels for each year #' of data should be plotted on the left. #' @param type plot type (as for \code{\link[graphics]{plot}}). Not yet #' supported for ggseasonplot. #' @param main Main title. #' @param xlab X-axis label. #' @param ylab Y-axis label. #' @param col Colour #' @param labelgap Distance between year labels and plotted lines #' @param \dots additional arguments to \code{\link[graphics]{plot}}. #' @return None. #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{monthplot}} #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://otexts.com/fpp2/} #' @keywords ts #' @examples #' seasonplot(AirPassengers, col=rainbow(12), year.labels=TRUE) #' #' @export seasonplot <- function(x, s, season.labels=NULL, year.labels=FALSE, year.labels.left=FALSE, type="o", main, xlab=NULL, ylab="", col=1, labelgap=0.1, ...) { if (missing(main)) { main <- paste("Seasonal plot:", deparse(substitute(x))) } # Check data are seasonal and convert to integer seasonality if (missing(s)) { s <- round(frequency(x)) } if (s <= 1) { stop("Data are not seasonal") } tspx <- tsp(x) x <- ts(x, start = tspx[1], frequency = s) # Pad series tsx <- x startperiod <- round(cycle(x)[1]) if (startperiod > 1) { x <- c(rep(NA, startperiod - 1), x) } x <- c(x, rep(NA, s - length(x) %% s)) Season <- rep(c(1:s, NA), length(x) / s) xnew <- rep(NA, length(x)) xnew[!is.na(Season)] <- x if (s == 12) { labs <- month.abb xLab <- "Month" } else if (s == 4) { labs <- paste("Q", 1:4, sep = "") xLab <- "Quarter" } else if (s == 7) { labs <- c("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat") xLab <- "Day" } else if (s == 52) { labs <- 1:s xLab <- "Week" } else if (s == 24) { labs <- 0:(s - 1) xLab <- "Hour" } else if (s == 48) { labs <- seq(0, 23.5, by = 0.5) xLab <- "Half-hour" } else { if (s < 20) { labs <- 1:s } else { labs <- NULL } xLab <- "Season" } if (is.null(xlab)) { xlab <- xLab } if (is.null(season.labels)) { season.labels <- labs } if (year.labels) { xlim <- c(1 - labelgap, s + 0.4 + labelgap) } else { xlim <- c(1 - labelgap, s) } if (year.labels.left) { xlim[1] <- 0.4 - labelgap } plot(Season, xnew, xaxt = "n", xlab = xlab, type = type, ylab = ylab, main = main, xlim = xlim, col = 0, ...) nn <- length(Season) / s col <- rep(col, nn)[1:nn] for (i in 0:(nn - 1)) lines(Season[(i * (s + 1) + 1):((s + 1) * (i + 1))], xnew[(i * (s + 1) + 1):((s + 1) * (i + 1))], type = type, col = col[i + 1], ...) if (year.labels) { idx <- which(Season[!is.na(xnew)] == s) year <- round(time(tsx)[idx], nchar(s)) text(x = rep(s + labelgap, length(year)), y = tsx[idx], labels = paste(c(trunc(year))), adj = 0, ..., col = col[1:length(idx)]) } if (year.labels.left) { idx <- which(Season[!is.na(xnew)] == 1) year <- round(time(tsx)[idx], nchar(s)) if (min(idx) > 1) { # First year starts after season 1n col <- col[-1] } text(x = rep(1 - labelgap, length(year)), y = tsx[idx], labels = paste(c(trunc(year))), adj = 1, ..., col = col[1:length(idx)]) } if (is.null(labs)) { axis(1, ...) } else { axis(1, labels = season.labels, at = 1:s, ...) } } forecast/R/calendar.R0000644000176200001440000001335714003673410014164 0ustar liggesusers## Add as.Date.timeDate to S3 method table #' @export as.Date.timeDate <- timeDate::as.Date.timeDate #' Number of trading days in each season #' #' Returns number of trading days in each month or quarter of the observed time #' period in a major financial center. #' #' Useful for trading days length adjustments. More on how to define "business #' days", please refer to \code{\link[timeDate]{isBizday}}. #' #' @param x Monthly or quarterly time series #' @param FinCenter Major financial center. #' @return Time series #' @author Earo Wang #' @seealso \code{\link[forecast]{monthdays}} #' @keywords ts #' @examples #' #' x <- ts(rnorm(30), start = c(2013, 2), frequency = 12) #' bizdays(x, FinCenter = "New York") #' @export bizdays <- function(x, FinCenter = c( "New York", "London", "NERC", "Tokyo", "Zurich" )) { # Return the number of trading days corresponding to the input ts # # Args: # x: a ts object # FinCenter: inherits holiday calendar from "timeDate" package # # Returns: # A matrix contains the number of trading days if (is.null(tsp(x))) { stop("We cannot handle a time series without time attributes.") } # Convert tsp to date freq <- frequency(x) years <- start(x)[1L]:end(x)[1L] # Grab the holidays from years and financial center FinCenter <- match.arg(FinCenter) if (FinCenter == "New York") { holidays <- timeDate::holidayNYSE(years) } else if (FinCenter == "London") { holidays <- timeDate::holidayLONDON(years) } else if (FinCenter == "NERC") { holidays <- timeDate::holidayNERC(years) } else if (FinCenter == "Tokyo") { holidays <- timeDate::holidayTSX(years) } else if (FinCenter == "Zurich") { holidays <- timeDate::holidayZURICH(years) } if (freq == 12L) { # monthly data date <- zoo::as.Date(time(x)) start <- date[1L] end <- seq(date[length(date)], length.out = 2L, by = "month")[2L] - 1L days.len <- timeDate::timeSequence(from = start, to = end) # Grab business days biz <- days.len[timeDate::isBizday(days.len, holidays = holidays)] bizdays <- format(biz, format = "%Y-%m") } else if (freq == 4L) { # Quarterly data date <- zoo::as.Date(time(x)) start <- date[1L] end <- seq(date[length(date)], length.out = 2L, by = "3 month")[2L] - 1L days.len <- timeDate::timeSequence(from = start, to = end) # Grab business days biz <- days.len[timeDate::isBizday(days.len, holidays = holidays)] bizdays <- format(zoo::as.yearqtr(biz), format = "%Y Qtr%q") } # else if (freq == 52L) { # Weekly data # start <- paste0(start(x)[1L], "-01-01") # start <- as.Date(start) + start(x)[2L] * 7L # end <- start + length(time(x)) * 7L # days.len <- as.timeDate(seq(start, end, by = "days"), FinCenter = FinCenter) # biz <- days.len[isBizday(days.len, # holidays = unique(format(days.len, "%Y")))] # bizdays <- format(biz, format = "%Y Wk%W") # } num.days <- table(bizdays) out <- ts(num.days, start = tsp(x)[1L], frequency = freq) return(out) } #' Easter holidays in each season #' #' Returns a vector of 0's and 1's or fractional results if Easter spans March #' and April in the observed time period. Easter is defined as the days from #' Good Friday to Easter Sunday inclusively, plus optionally Easter Monday if #' \code{easter.mon=TRUE}. #' #' Useful for adjusting calendar effects. #' #' @param x Monthly or quarterly time series #' @param easter.mon If TRUE, the length of Easter holidays includes Easter #' Monday. #' @return Time series #' @author Earo Wang #' @keywords ts #' @examples #' #' easter(wineind, easter.mon = TRUE) #' @export easter <- function(x, easter.mon = FALSE) { # Return a vector of 0's and 1's for easter holidays # # Args: # x: monthly, quarterly or weekly data # easter.mon: An option including easter.mon if (is.null(tsp(x))) { stop("We cannot handle a time series without time attributes.") } freq <- frequency(x) date <- zoo::as.Date(time(x)) start.yr <- start(x)[1L] end.yr <- end(x)[1L] yr.span <- seq(start.yr, end.yr) gd.fri0 <- Easter(yr.span, -2L) if (easter.mon) { easter0 <- Easter(yr.span, 1L) } else { easter0 <- Easter(yr.span) } if (freq == 12L) { fmat <- "%Y-%m" yr.mon <- format(date, format = fmat) gd.fri <- format(gd.fri0, format = fmat) # good fri easter <- format(easter0, format = fmat) # easter mon } else if (freq == 4L) { fmat <- "%Y-%q" yr.mon <- format(zoo::as.yearqtr(date), format = fmat) # yr.qtr gd.fri <- format(zoo::as.yearqtr(gd.fri0), format = fmat) easter <- format(zoo::as.yearqtr(easter0), format = fmat) } span <- cbind(gd.fri, easter) # the span of easter holidays hdays <- unlist(apply(span, 1, unique)) dummies <- ifelse(yr.mon %in% hdays, 1L, 0L) # Allow fractional results denominator <- (easter0 - gd.fri0 + 1L)[1L] last.mar <- as.timeDate(paste0(yr.span, "-03-31")) dif <- difftimeDate(last.mar, gd.fri0, units = "days") + 1L # Remove easter out of date range if (date[1L] > as.character(last.mar[1L])) { dif <- dif[-1L] } if (date[length(yr.mon)] < as.character(last.mar[length(last.mar)])) { dif <- dif[-length(dif)] } replace <- dif > denominator | dif <= 0L dif[replace] <- denominator # Easter in the same month # Start to insert the remaining part falling in Apr index <- which(dif != denominator) if (length(index) != 0L) { values <- denominator - dif[index] new.index <- index[1L] for (i in 1L:length(index)) { dif <- append(dif, values = values[i], new.index) new.index <- index[i + 1L] + i } dummies[dummies == 1L] <- round(dif / unclass(denominator), digits = 2) } out <- ts(dummies, start = tsp(x)[1L], frequency = freq) return(out) } forecast/R/forecast.varest.R0000644000176200001440000000355314003673410015521 0ustar liggesusers# forecast function for varest, just a wrapper for predict.varest #' @export forecast.varest <- function(object, h=10, level=c(80, 95), fan=FALSE, ...) { out <- list(model = object, forecast = vector("list", object$K)) # Get residuals and fitted values and fix the times tspx <- tsp(object$y) vres <- residuals(object) vfits <- fitted(object) method <- paste0("VAR(", object$p, ")") # Add forecasts with prediction intervals # out$mean <- out$lower <- out$upper <- vector("list",object$K) for (i in seq_along(level)) { pr <- predict(object, n.ahead = h, ci = level[i] / 100, ...) for (j in 1:object$K) { out$forecast[[j]]$lower <- cbind(out$forecast[[j]]$lower, pr$fcst[[j]][, "lower"]) out$forecast[[j]]$upper <- cbind(out$forecast[[j]]$upper, pr$fcst[[j]][, "upper"]) } } j <- 1 for (fcast in out$forecast) { fcast$mean <- ts(pr$fcst[[j]][, "fcst"], frequency = tspx[3], start = tspx[2] + 1 / tspx[3]) fcast$lower <- ts(fcast$lower, frequency = tspx[3], start = tspx[2] + 1 / tspx[3]) fcast$upper <- ts(fcast$upper, frequency = tspx[3], start = tspx[2] + 1 / tspx[3]) colnames(fcast$lower) <- colnames(fcast$upper) <- paste0(level, "%") fcast$residuals <- fcast$fitted <- ts(rep(NA, nrow(object$y))) fcast$residuals[((1 - nrow(vres)):0) + length(fcast$residuals)] <- vres[, j] fcast$fitted[((1 - nrow(vfits)):0) + length(fcast$fitted)] <- vfits[, j] fcast$method <- method fcast$level <- level fcast$x <- object$y[, j] fcast$series <- colnames(object$y)[j] tsp(fcast$residuals) <- tsp(fcast$fitted) <- tspx fcast <- structure(fcast, class = "forecast") out$forecast[[j]] <- fcast j <- j + 1 } names(out$forecast) <- names(pr$fcst) out$method <- rep(method, object$K) names(out$forecast) <- names(out$method) <- names(pr$fcst) return(structure(out, class = "mforecast")) } forecast/R/armaroots.R0000644000176200001440000001111014003673410014403 0ustar liggesusers# Functions to plot the roots of an ARIMA model # Compute AR roots arroots <- function(object) { if (!any(is.element(class(object), c("Arima", "ar")))) { stop("object must be of class Arima or ar") } if (is.element("Arima", class(object))) { parvec <- object$model$phi } else { parvec <- object$ar } if (length(parvec) > 0) { last.nonzero <- max(which(abs(parvec) > 1e-08)) if (last.nonzero > 0) { return(structure(list( roots = polyroot(c(1, -parvec[1:last.nonzero])), type = "AR" ), class = "armaroots")) } } return(structure(list(roots = numeric(0), type = "AR"), class = "armaroots")) } # Compute MA roots maroots <- function(object) { if (!is.element("Arima", class(object))) { stop("object must be of class Arima") } parvec <- object$model$theta if (length(parvec) > 0) { last.nonzero <- max(which(abs(parvec) > 1e-08)) if (last.nonzero > 0) { return(structure(list( roots = polyroot(c(1, parvec[1:last.nonzero])), type = "MA" ), class = "armaroots")) } } return(structure(list(roots = numeric(0), type = "MA"), class = "armaroots")) } plot.armaroots <- function(x, xlab, ylab, main, ...) { if (missing(main)) { main <- paste("Inverse", x$type, "roots") } oldpar <- par(pty = "s") on.exit(par(oldpar)) plot( c(-1, 1), c(-1, 1), xlab = xlab, ylab = ylab, type = "n", bty = "n", xaxt = "n", yaxt = "n", main = main, ... ) axis(1, at = c(-1, 0, 1), line = 0.5, tck = -0.025) axis(2, at = c(-1, 0, 1), labels = c("-i", "0", "i"), line = 0.5, tck = -0.025) circx <- seq(-1, 1, length.out = 501) circy <- sqrt(1 - circx^2) lines(c(circx, circx), c(circy, -circy), col = "gray") lines(c(-2, 2), c(0, 0), col = "gray") lines(c(0, 0), c(-2, 2), col = "gray") if (length(x$roots) > 0) { inside <- abs(x$roots) > 1 points(1 / x$roots[inside], pch = 19, col = "black") if (sum(!inside) > 0) { points(1 / x$roots[!inside], pch = 19, col = "red") } } } #' Plot characteristic roots from ARIMA model #' #' Produces a plot of the inverse AR and MA roots of an ARIMA model. Inverse #' roots outside the unit circle are shown in red. #' #' \code{autoplot} will produce an equivalent plot as a ggplot object. #' #' @param x Object of class \dQuote{Arima} or \dQuote{ar}. #' @param object Object of class \dQuote{Arima} or \dQuote{ar}. Used for ggplot #' graphics (S3 method consistency). #' @param type Determines if both AR and MA roots are plotted, of if just one #' set is plotted. #' @param main Main title. Default is "Inverse AR roots" or "Inverse MA roots". #' @param xlab X-axis label. #' @param ylab Y-axis label. #' @param ... Other plotting parameters passed to \code{\link[graphics]{par}}. #' @return None. Function produces a plot #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link{Arima}}, \code{\link[stats]{ar}} #' @keywords hplot #' @examples #' #' library(ggplot2) #' #' fit <- Arima(WWWusage, order = c(3, 1, 0)) #' plot(fit) #' autoplot(fit) #' #' fit <- Arima(woolyrnq, order = c(2, 0, 0), seasonal = c(2, 1, 1)) #' plot(fit) #' autoplot(fit) #' #' plot(ar.ols(gold[1:61])) #' autoplot(ar.ols(gold[1:61])) #' @export plot.Arima <- function(x, type = c("both", "ar", "ma"), main, xlab = "Real", ylab = "Imaginary", ...) { type <- match.arg(type) if (!is.element("Arima", class(x))) { stop("This function is for objects of class 'Arima'.") } q <- p <- 0 # AR component if (length(x$model$phi) > 0) { test <- abs(x$model$phi) > 1e-09 if (any(test)) { p <- max(which(test)) } } # MA component if (length(x$model$theta) > 0) { test <- abs(x$model$theta) > 1e-09 if (any(test)) { q <- max(which(test)) } } # Check for MA parts if (type == "both") { if (p == 0) { type <- "ma" } else if (q == 0) { type <- "ar" } } if ((type == "ar" && (p == 0)) || (type == "ma" && (q == 0)) || (p == 0 && q == 0)) { warning("No roots to plot") if (missing(main)) { main <- "No AR or MA roots" } } if (type == "both") { oldpar <- par(mfrow = c(1, 2)) on.exit(par(oldpar)) } if (type != "ma") { plot(arroots(x), main = main, xlab = xlab, ylab = ylab, ...) } if (type != "ar") { plot(maroots(x), main = main, xlab = xlab, ylab = ylab, ...) } } #' @rdname plot.Arima #' @export plot.ar <- function(x, main, xlab = "Real", ylab = "Imaginary", ...) { if (!is.element("ar", class(x))) { stop("This function is for objects of class 'ar'.") } plot(arroots(x), main = main, xlab = xlab, ylab = ylab, ...) } forecast/R/theta.R0000644000176200001440000001155314003673410013514 0ustar liggesusers# Implement standard Theta method of Assimakopoulos and Nikolopoulos (2000) # More general methods are available in the forecTheta package # Author: RJH #' Theta method forecast #' #' Returns forecasts and prediction intervals for a theta method forecast. #' #' The theta method of Assimakopoulos and Nikolopoulos (2000) is equivalent to #' simple exponential smoothing with drift. This is demonstrated in Hyndman and #' Billah (2003). #' #' The series is tested for seasonality using the test outlined in A&N. If #' deemed seasonal, the series is seasonally adjusted using a classical #' multiplicative decomposition before applying the theta method. The resulting #' forecasts are then reseasonalized. #' #' Prediction intervals are computed using the underlying state space model. #' #' More general theta methods are available in the #' \code{\link[forecTheta]{forecTheta}} package. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting #' @param level Confidence levels for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param x Deprecated. Included for backwards compatibility. #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{rwf}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{arima}}, \code{\link{meanf}}, \code{\link{rwf}}, #' \code{\link{ses}} #' @references Assimakopoulos, V. and Nikolopoulos, K. (2000). The theta model: #' a decomposition approach to forecasting. \emph{International Journal of #' Forecasting} \bold{16}, 521-530. #' #' Hyndman, R.J., and Billah, B. (2003) Unmasking the Theta method. #' \emph{International J. Forecasting}, \bold{19}, 287-290. #' @keywords ts #' @examples #' nile.fcast <- thetaf(Nile) #' plot(nile.fcast) #' @export thetaf <- function(y, h = ifelse(frequency(y) > 1, 2 * frequency(y), 10), level = c(80, 95), fan = FALSE, x = y) { # Check inputs if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Check seasonality n <- length(x) x <- as.ts(x) m <- frequency(x) if (m > 1 && !is.constant(x) && n > 2 * m) { r <- as.numeric(acf(x, lag.max = m, plot = FALSE)$acf)[-1] stat <- sqrt((1 + 2 * sum(r[-m]^2)) / n) seasonal <- (abs(r[m]) / stat > qnorm(0.95)) } else { seasonal <- FALSE } # Seasonal decomposition origx <- x if (seasonal) { decomp <- decompose(x, type = "multiplicative") if (any(abs(seasonal(decomp)) < 1e-4)) { warning("Seasonal indexes close to zero. Using non-seasonal Theta method") } else { x <- seasadj(decomp) } } # Find theta lines fcast <- ses(x, h = h) tmp2 <- lsfit(0:(n - 1), x)$coefficients[2] / 2 alpha <- pmax(1e-10, fcast$model$par["alpha"]) fcast$mean <- fcast$mean + tmp2 * (0:(h - 1) + (1 - (1 - alpha)^n) / alpha) # Reseasonalize if (seasonal) { fcast$mean <- fcast$mean * rep(tail(decomp$seasonal, m), trunc(1 + h / m))[1:h] fcast$fitted <- fcast$fitted * decomp$seasonal } fcast$residuals <- origx - fcast$fitted # Find prediction intervals fcast.se <- sqrt(fcast$model$sigma2) * sqrt((0:(h - 1)) * alpha^2 + 1) nconf <- length(level) fcast$lower <- fcast$upper <- ts(matrix(NA, nrow = h, ncol = nconf)) tsp(fcast$lower) <- tsp(fcast$upper) <- tsp(fcast$mean) for (i in 1:nconf) { zt <- -qnorm(0.5 - level[i] / 200) fcast$lower[, i] <- fcast$mean - zt * fcast.se fcast$upper[, i] <- fcast$mean + zt * fcast.se } # Return results fcast$x <- origx fcast$level <- level fcast$method <- "Theta" fcast$model <- list(alpha = alpha, drift = tmp2, sigma = fcast$model$sigma2) fcast$model$call <- match.call() return(fcast) } forecast/R/newarima2.R0000644000176200001440000010012314071174533014273 0ustar liggesusers#' Fit best ARIMA model to univariate time series #' #' Returns best ARIMA model according to either AIC, AICc or BIC value. The #' function conducts a search over possible model within the order constraints #' provided. #' #' The default arguments are designed for rapid estimation of models for many time series. #' If you are analysing just one time series, and can afford to take some more time, it #' is recommended that you set \code{stepwise=FALSE} and \code{approximation=FALSE}. #' #' Non-stepwise selection can be slow, especially for seasonal data. The stepwise #' algorithm outlined in Hyndman & Khandakar (2008) is used except that the default #' method for selecting seasonal differences is now based on an estimate of seasonal #' strength (Wang, Smith & Hyndman, 2006) rather than the Canova-Hansen test. #' There are also some other minor variations to the algorithm described in #' Hyndman and Khandakar (2008). #' #' @inheritParams stats::arima #' @param y a univariate time series #' @param d Order of first-differencing. If missing, will choose a value based #' on \code{test}. #' @param D Order of seasonal-differencing. If missing, will choose a value #' based on \code{season.test}. #' @param max.p Maximum value of p #' @param max.q Maximum value of q #' @param max.P Maximum value of P #' @param max.Q Maximum value of Q #' @param max.order Maximum value of p+q+P+Q if model selection is not #' stepwise. #' @param max.d Maximum number of non-seasonal differences #' @param max.D Maximum number of seasonal differences #' @param start.p Starting value of p in stepwise procedure. #' @param start.q Starting value of q in stepwise procedure. #' @param start.P Starting value of P in stepwise procedure. #' @param start.Q Starting value of Q in stepwise procedure. #' @param stationary If \code{TRUE}, restricts search to stationary models. #' @param seasonal If \code{FALSE}, restricts search to non-seasonal models. #' @param ic Information criterion to be used in model selection. #' @param stepwise If \code{TRUE}, will do stepwise selection (faster). #' Otherwise, it searches over all models. Non-stepwise selection can be very #' slow, especially for seasonal models. #' @param nmodels Maximum number of models considered in the stepwise search. #' @param trace If \code{TRUE}, the list of ARIMA models considered will be #' reported. #' @param approximation If \code{TRUE}, estimation is via conditional sums of #' squares and the information criteria used for model selection are #' approximated. The final model is still computed using maximum likelihood #' estimation. Approximation should be used for long time series or a high #' seasonal period to avoid excessive computation times. #' @param truncate An integer value indicating how many observations to use in #' model selection. The last \code{truncate} values of the series are used to #' select a model when \code{truncate} is not \code{NULL} and #' \code{approximation=TRUE}. All observations are used if either #' \code{truncate=NULL} or \code{approximation=FALSE}. #' @param xreg Optionally, a numerical vector or matrix of external regressors, which #' must have the same number of rows as \code{y}. (It should not be a data frame.) #' @param test Type of unit root test to use. See \code{\link{ndiffs}} for #' details. #' @param test.args Additional arguments to be passed to the unit root test. #' @param seasonal.test This determines which method is used to select the number of seasonal differences. #' The default method is to use a measure of seasonal strength computed from an STL decomposition. #' Other possibilities involve seasonal unit root tests. #' @param seasonal.test.args Additional arguments to be passed to the seasonal #' unit root test. #' See \code{\link{nsdiffs}} for details. #' @param allowdrift If \code{TRUE}, models with drift terms are considered. #' @param allowmean If \code{TRUE}, models with a non-zero mean are considered. #' @param parallel If \code{TRUE} and \code{stepwise = FALSE}, then the #' specification search is done in parallel. This can give a significant #' speedup on multicore machines. #' @param num.cores Allows the user to specify the amount of parallel processes #' to be used if \code{parallel = TRUE} and \code{stepwise = FALSE}. If #' \code{NULL}, then the number of logical cores is automatically detected and #' all available cores are used. #' @param x Deprecated. Included for backwards compatibility. #' @param ... Additional arguments to be passed to \code{\link[stats]{arima}}. #' @inheritParams forecast #' #' @return Same as for \code{\link{Arima}} #' @author Rob J Hyndman #' @seealso \code{\link{Arima}} #' @references Hyndman, RJ and Khandakar, Y (2008) "Automatic time series #' forecasting: The forecast package for R", \emph{Journal of Statistical #' Software}, \bold{26}(3). #' #' Wang, X, Smith, KA, Hyndman, RJ (2006) "Characteristic-based clustering #' for time series data", \emph{Data Mining and Knowledge Discovery}, #' \bold{13}(3), 335-364. #' @keywords ts #' @examples #' fit <- auto.arima(WWWusage) #' plot(forecast(fit,h=20)) #' #' @export auto.arima <- function(y, d=NA, D=NA, max.p=5, max.q=5, max.P=2, max.Q=2, max.order=5, max.d=2, max.D=1, start.p=2, start.q=2, start.P=1, start.Q=1, stationary=FALSE, seasonal=TRUE, ic=c("aicc", "aic", "bic"), stepwise=TRUE, nmodels = 94, trace=FALSE, approximation=(length(x) > 150 | frequency(x) > 12), method = NULL, truncate=NULL, xreg=NULL, test=c("kpss", "adf", "pp"), test.args = list(), seasonal.test=c("seas", "ocsb", "hegy", "ch"), seasonal.test.args = list(), allowdrift=TRUE, allowmean=TRUE, lambda=NULL, biasadj=FALSE, parallel=FALSE, num.cores=2, x=y, ...) { # Only non-stepwise parallel implemented so far. if (stepwise && parallel) { warning("Parallel computer is only implemented when stepwise=FALSE, the model will be fit in serial.") parallel <- FALSE } if (trace && parallel) { message("Tracing model searching in parallel is not supported.") trace <- FALSE } series <- deparse(substitute(y)) x <- as.ts(x) if (NCOL(x) > 1) { stop("auto.arima can only handle univariate time series") } # Trim leading NAs and find length of non-missing data orig.x <- x missing <- is.na(x) firstnonmiss <- head(which(!missing),1) lastnonmiss <- tail(which(!missing),1) serieslength <- sum(!missing[firstnonmiss:lastnonmiss]) # Trim initial missing values x <- subset(x, start=firstnonmiss) if(!is.null(xreg)) { if(!is.numeric(xreg)) stop("xreg should be a numeric matrix or a numeric vector") xreg <- as.matrix(xreg) xreg <- xreg[firstnonmiss:NROW(xreg),,drop=FALSE] } # Check for constant data if (is.constant(x)) { if(all(is.na(x))) stop("All data are missing") if (allowmean) { fit <- Arima(x, order = c(0, 0, 0), fixed = mean(x, na.rm = TRUE), ...) } else { fit <- Arima(x, order = c(0, 0, 0), include.mean = FALSE, ...) } fit$x <- orig.x fit$series <- series fit$call <- match.call() fit$call$x <- data.frame(x = x) fit$constant <- TRUE return(fit) } ic <- match.arg(ic) test <- match.arg(test) seasonal.test <- match.arg(seasonal.test) # Only consider non-seasonal models if (seasonal) { m <- frequency(x) } else { m <- 1 } if (m < 1) { # warning("I can't handle data with frequency less than 1. Seasonality will be ignored.") m <- 1 } else { m <- round(m) } # Avoid non-integer seasonal periods max.p <- min(max.p, floor(serieslength / 3)) max.q <- min(max.q, floor(serieslength / 3)) max.P <- min(max.P, floor(serieslength / 3 / m)) max.Q <- min(max.Q, floor(serieslength / 3 / m)) # Use AIC if npar <= 3 # AICc won't work for tiny samples. if (serieslength <= 3L) { ic <- "aic" } # Transform data if requested if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") attr(lambda, "biasadj") <- biasadj } # Check xreg and do regression if necessary if (!is.null(xreg)) { if (is.null(colnames(xreg))) { colnames(xreg) <- if (ncol(xreg) == 1) "xreg" else paste("xreg", 1:ncol(xreg), sep = "") } xregg <- xreg xx <- x # Check that xreg is not rank deficient # First check if any columns are constant constant_columns <- apply(xregg, 2, is.constant) if (all(constant_columns)) { xregg <- NULL } else{ if (any(constant_columns)) { xregg <- xregg[, -which(constant_columns), drop = FALSE] } # Now check if it is rank deficient sv <- svd(na.omit(cbind(rep(1, NROW(xregg)), xregg)))$d if (min(sv) / sum(sv) < .Machine$double.eps) { stop("xreg is rank deficient") } # Finally find residuals from regression in order # to estimate appropriate level of differencing j <- !is.na(x) & !is.na(rowSums(xregg)) xx[j] <- residuals(lm(x ~ xregg)) } } else { xx <- x xregg <- NULL } # Choose order of differencing if (stationary) { d <- D <- 0 } if (m == 1) { D <- max.P <- max.Q <- 0 } else if(is.na(D) & length(xx) <= 2*m) { D <- 0 } else if(is.na(D)) { D <- do.call("nsdiffs", c(list(xx, test=seasonal.test, max.D=max.D), seasonal.test.args)) # Make sure xreg is not null after differencing if (D > 0 && !is.null(xregg)) { diffxreg <- diff(xregg, differences = D, lag = m) if (any(apply(diffxreg, 2, is.constant))) { D <- D - 1 } } # Make sure xx is not all missing after differencing if (D > 0) { dx <- diff(xx, differences = D, lag = m) if (all(is.na(dx))) D <- D - 1 } } if (D > 0) { dx <- diff(xx, differences = D, lag = m) } else { dx <- xx } if (!is.null(xregg)) { if (D > 0) { diffxreg <- diff(xregg, differences = D, lag = m) } else { diffxreg <- xregg } } if (is.na(d)) { d <- do.call("ndiffs", c(list(dx, test = test, max.d = max.d), test.args)) # Make sure xreg is not null after differencing if (d > 0 && !is.null(xregg)) { diffxreg <- diff(diffxreg, differences = d, lag = 1) if (any(apply(diffxreg, 2, is.constant))) { d <- d - 1 } } # Make sure dx is not all missing after differencing if (d > 0) { diffdx <- diff(dx, differences=d, lag=1) if(all(is.na(diffdx))) d <- d - 1 } } # Check number of differences selected if (D >= 2) { warning("Having more than one seasonal differences is not recommended. Please consider using only one seasonal difference.") } else if (D + d > 2) { warning("Having 3 or more differencing operations is not recommended. Please consider reducing the total number of differences.") } if (d > 0) { dx <- diff(dx, differences = d, lag = 1) } if(length(dx) == 0L) stop("Not enough data to proceed") else if (is.constant(dx)) { if (is.null(xreg)) { if (D > 0 && d == 0) { fit <- Arima(x, order = c(0, d, 0), seasonal = list(order = c(0, D, 0), period = m), include.constant = TRUE, fixed = mean(dx/m, na.rm = TRUE), method = method, ...) } else if (D > 0 && d > 0) { fit <- Arima(x, order = c(0, d, 0), seasonal = list(order = c(0, D, 0), period = m), method = method, ...) } else if (d == 2) { fit <- Arima(x, order = c(0, d, 0), method = method, ...) } else if (d < 2) { fit <- Arima(x, order = c(0, d, 0), include.constant = TRUE, fixed = mean(dx, na.rm = TRUE), method = method, ...) } else { stop("Data follow a simple polynomial and are not suitable for ARIMA modelling.") } } else # Perfect regression { if (D > 0) { fit <- Arima(x, order = c(0, d, 0), seasonal = list(order = c(0, D, 0), period = m), xreg = xreg, method = method, ...) } else { fit <- Arima(x, order = c(0, d, 0), xreg = xreg, method = method, ...) } } fit$x <- orig.x fit$series <- series fit$call <- match.call() fit$call$x <- data.frame(x = x) return(fit) } if (m > 1) { if (max.P > 0) { max.p <- min(max.p, m - 1) } if (max.Q > 0) { max.q <- min(max.q, m - 1) } } # Find constant offset for AIC calculation using white noise model if (approximation) { if (!is.null(truncate)) { tspx <- tsp(x) if (length(x) > truncate) { x <- ts(tail(x, truncate), end = tspx[2], frequency = tspx[3]) } } if (D == 0) { fit <- try(stats::arima(x, order = c(0, d, 0), xreg = xreg, ...), silent = TRUE) } else { fit <- try(stats::arima( x, order = c(0, d, 0), seasonal = list(order = c(0, D, 0), period = m), xreg = xreg, ... ), silent = TRUE) } if (!is.element("try-error", class(fit))) { offset <- -2 * fit$loglik - serieslength * log(fit$sigma2) } else # Not sure this should ever happen { # warning("Unable to calculate AIC offset") offset <- 0 } } else { offset <- 0 } allowdrift <- allowdrift & (d + D) == 1 allowmean <- allowmean & (d + D) == 0 constant <- allowdrift | allowmean if (approximation && trace) { cat("\n Fitting models using approximations to speed things up...\n") } if (!stepwise) { bestfit <- search.arima( x, d, D, max.p, max.q, max.P, max.Q, max.order, stationary, ic, trace, approximation, method = method, xreg = xreg, offset = offset, allowdrift = allowdrift, allowmean = allowmean, parallel = parallel, num.cores = num.cores, ... ) bestfit$call <- match.call() bestfit$call$x <- data.frame(x = x) bestfit$lambda <- lambda bestfit$x <- orig.x bestfit$series <- series bestfit$fitted <- fitted.Arima(bestfit) if (trace) { cat("\n\n Best model:", arima.string(bestfit, padding = TRUE), "\n\n") } return(bestfit) } # Starting model if (length(x) < 10L) { start.p <- min(start.p, 1L) start.q <- min(start.q, 1L) start.P <- 0L start.Q <- 0L } p <- start.p <- min(start.p, max.p) q <- start.q <- min(start.q, max.q) P <- start.P <- min(start.P, max.P) Q <- start.Q <- min(start.Q, max.Q) results <- matrix(NA, nrow = nmodels, ncol = 8) bestfit <- myarima(x, order = c(p, d, q), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[1, ] <- c(p, d, q, P, D, Q, constant, bestfit$ic) # Null model with possible constant fit <- myarima(x, order = c(0, d, 0), seasonal = c(0, D, 0), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[2, ] <- c(0, d, 0, 0, D, 0, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- q <- P <- Q <- 0 } k <- 2 # Basic AR model if (max.p > 0 || max.P > 0) { fit <- myarima(x, order = c(max.p > 0, d, 0), seasonal = c((m > 1) & (max.P > 0), D, 0), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k+1, ] <- c(max.p > 0, d, 0, (m > 1) & (max.P > 0), D, 0, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- (max.p > 0) P <- (m > 1) & (max.P > 0) q <- Q <- 0 } k <- k + 1 } # Basic MA model if (max.q > 0 || max.Q > 0) { fit <- myarima(x, order = c(0, d, max.q > 0), seasonal = c(0, D, (m > 1) & (max.Q > 0)), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k+1, ] <- c(0, d, max.q > 0, 0, D, (m > 1) & (max.Q > 0), constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- P <- 0 Q <- (m > 1) & (max.Q > 0) q <- (max.q > 0) } k <- k + 1 } # Null model with no constant if (constant) { fit <- myarima(x, order = c(0, d, 0), seasonal = c(0, D, 0), constant = FALSE, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k+1, ] <- c(0, d, 0, 0, D, 0, 0, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- q <- P <- Q <- 0 } k <- k + 1 } startk <- 0 while (startk < k && k < nmodels) { startk <- k if (P > 0 && newmodel(p, d, q, P - 1, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P - 1, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P - 1, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit P <- (P - 1) next } } if (Q > 0 && newmodel(p, d, q, P, D, Q - 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P, D, Q - 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P, D, Q - 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q - 1) next } } if (P < max.P && newmodel(p, d, q, P + 1, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P + 1, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P + 1, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit P <- (P + 1) next } } if (Q < max.Q && newmodel(p, d, q, P, D, Q + 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P, D, Q + 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P, D, Q + 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q + 1) next } } if (Q > 0 && P > 0 && newmodel(p, d, q, P - 1, D, Q - 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P - 1, D, Q - 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P - 1, D, Q - 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q - 1) P <- (P - 1) next } } if (Q < max.Q && P > 0 && newmodel(p, d, q, P - 1, D, Q + 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P - 1, D, Q + 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P - 1, D, Q + 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q + 1) P <- (P - 1) next } } if (Q > 0 && P < max.P && newmodel(p, d, q, P + 1, D, Q - 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P + 1, D, Q - 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P + 1, D, Q - 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q - 1) P <- (P + 1) next } } if (Q < max.Q && P < max.P && newmodel(p, d, q, P + 1, D, Q + 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P + 1, D, Q + 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P + 1, D, Q + 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q + 1) P <- (P + 1) next } } if (p > 0 && newmodel(p - 1, d, q, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p - 1, d, q), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p - 1, d, q, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- (p - 1) next } } if (q > 0 && newmodel(p, d, q - 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q - 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q - 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q - 1) next } } if (p < max.p && newmodel(p + 1, d, q, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p + 1, d, q), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p + 1, d, q, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- (p + 1) next } } if (q < max.q && newmodel(p, d, q + 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q + 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q + 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q + 1) next } } if (q > 0 && p > 0 && newmodel(p - 1, d, q - 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p - 1, d, q - 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p - 1, d, q - 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q - 1) p <- (p - 1) next } } if (q < max.q && p > 0 && newmodel(p - 1, d, q + 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p - 1, d, q + 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p - 1, d, q + 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q + 1) p <- (p - 1) next } } if (q > 0 && p < max.p && newmodel(p + 1, d, q - 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p + 1, d, q - 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p + 1, d, q - 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q - 1) p <- (p + 1) next } } if (q < max.q && p < max.p && newmodel(p + 1, d, q + 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p + 1, d, q + 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p + 1, d, q + 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q + 1) p <- (p + 1) next } } if (allowdrift || allowmean) { if (newmodel(p, d, q, P, D, Q, !constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P, D, Q), constant = !constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P, D, Q, !constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit constant <- !constant } } } } if(k > nmodels){ warning(sprintf("Stepwise search was stopped early due to reaching the model number limit: `nmodels = %i`", nmodels)) } # Refit using ML if approximation used for IC if (approximation && !is.null(bestfit$arma)) { if (trace) { cat("\n\n Now re-fitting the best model(s) without approximations...\n") } icorder <- order(results[, 8]) nmodels <- sum(!is.na(results[, 8])) for (i in seq(nmodels)) { k <- icorder[i] fit <- myarima( x, order = c(results[k, 1], d, results[k, 3]), seasonal = c(results[k, 4], D, results[k, 6]), constant = results[k, 7] == 1, ic, trace, approximation = FALSE, method = method, xreg = xreg, ... ) if (fit$ic < Inf) { bestfit <- fit break } } } # Nothing fitted if (bestfit$ic == Inf && !isTRUE(method=="CSS")) { if (trace) { cat("\n") } stop("No suitable ARIMA model found") } # Return best fit bestfit$x <- orig.x bestfit$series <- series bestfit$ic <- NULL bestfit$call <- match.call() bestfit$call$x <- data.frame(x = x) bestfit$lambda <- lambda bestfit$fitted <- fitted.Arima(bestfit) if (trace) { cat("\n\n Best model:", arima.string(bestfit, padding = TRUE), "\n\n") } return(bestfit) } # Calls arima from stats package and adds data to the returned object # Also allows refitting to new data # and drift terms to be included. myarima <- function(x, order = c(0, 0, 0), seasonal = c(0, 0, 0), constant=TRUE, ic="aic", trace=FALSE, approximation=FALSE, offset=0, xreg=NULL, method = NULL, ...) { # Length of non-missing interior missing <- is.na(x) firstnonmiss <- head(which(!missing),1) lastnonmiss <- tail(which(!missing),1) n <- sum(!missing[firstnonmiss:lastnonmiss]) m <- frequency(x) use.season <- (sum(seasonal) > 0) & m > 0 diffs <- order[2] + seasonal[2] if(is.null(method)){ if (approximation) { method <- "CSS" } else { method <- "CSS-ML" } } if (diffs == 1 && constant) { xreg <- `colnames<-`(cbind(drift = 1:length(x), xreg), make.unique(c("drift", if(is.null(colnames(xreg)) && !is.null(xreg)) rep("", NCOL(xreg)) else colnames(xreg)))) if (use.season) { suppressWarnings(fit <- try(stats::arima(x = x, order = order, seasonal = list(order = seasonal, period = m), xreg = xreg, method = method, ...), silent = TRUE)) } else { suppressWarnings(fit <- try(stats::arima(x = x, order = order, xreg = xreg, method = method, ...), silent = TRUE)) } } else { if (use.season) { suppressWarnings(fit <- try(stats::arima(x = x, order = order, seasonal = list(order = seasonal, period = m), include.mean = constant, method = method, xreg = xreg, ...), silent = TRUE)) } else { suppressWarnings(fit <- try(stats::arima(x = x, order = order, include.mean = constant, method = method, xreg = xreg, ...), silent = TRUE)) } } if (is.null(xreg)) { nxreg <- 0 } else { nxreg <- ncol(as.matrix(xreg)) } if (!is.element("try-error", class(fit))) { nstar <- n - order[2] - seasonal[2] * m if (diffs == 1 && constant) { # fitnames <- names(fit$coef) # fitnames[length(fitnames)-nxreg] <- "drift" # names(fit$coef) <- fitnames fit$xreg <- xreg } npar <- length(fit$coef[fit$mask]) + 1 if (method == "CSS") { fit$aic <- offset + nstar * log(fit$sigma2) + 2 * npar } if (!is.na(fit$aic)) { fit$bic <- fit$aic + npar * (log(nstar) - 2) fit$aicc <- fit$aic + 2 * npar * (npar + 1) / (nstar - npar - 1) fit$ic <- switch(ic, bic = fit$bic, aic = fit$aic, aicc = fit$aicc) } else { fit$aic <- fit$bic <- fit$aicc <- fit$ic <- Inf } # Adjust residual variance to be unbiased fit$sigma2 <- sum(fit$residuals ^ 2, na.rm = TRUE) / (nstar - npar + 1) # Check for unit roots minroot <- 2 if (order[1] + seasonal[1] > 0) { testvec <- fit$model$phi k <- abs(testvec) > 1e-8 if (sum(k) > 0) { last.nonzero <- max(which(k)) } else { last.nonzero <- 0 } if (last.nonzero > 0) { testvec <- testvec[1:last.nonzero] proots <- try(polyroot(c(1,-testvec))) if (!is.element("try-error", class(proots))) { minroot <- min(minroot, abs(proots)) } else fit$ic <- Inf } } if (order[3] + seasonal[3] > 0 & fit$ic < Inf) { testvec <- fit$model$theta k <- abs(testvec) > 1e-8 if (sum(k) > 0) { last.nonzero <- max(which(k)) } else { last.nonzero <- 0 } if (last.nonzero > 0) { testvec <- testvec[1:last.nonzero] proots <- try(polyroot(c(1,testvec))) if (!is.element("try-error", class(proots))) { minroot <- min(minroot, abs(proots)) } else fit$ic <- Inf } } # Avoid bad models if (minroot < 1 + 1e-2 | checkarima(fit)) { fit$ic <- Inf } if (trace) { cat("\n", arima.string(fit, padding = TRUE), ":", fit$ic) } fit$xreg <- xreg return(structure(fit, class = c("forecast_ARIMA", "ARIMA", "Arima"))) } else { # Catch errors due to unused arguments if (length(grep("unused argument", fit)) > 0L) { stop(fit[1]) } if (trace) { cat("\n ARIMA(", order[1], ",", order[2], ",", order[3], ")", sep = "") if (use.season) { cat("(", seasonal[1], ",", seasonal[2], ",", seasonal[3], ")[", m, "]", sep = "") } if (constant && (order[2] + seasonal[2] == 0)) { cat(" with non-zero mean") } else if (constant && (order[2] + seasonal[2] == 1)) { cat(" with drift ") } else if (!constant && (order[2] + seasonal[2] == 0)) { cat(" with zero mean ") } else { cat(" ") } cat(" :", Inf) } return(list(ic = Inf)) } } newmodel <- function(p, d, q, P, D, Q, constant, results) { n <- nrow(results) for (i in 1:n) { if(!all(is.na(results[i, seq(7)]))) { if (all(c(p, d, q, P, D, Q, constant) == results[i, 1:7])) { return(FALSE) } } } return(TRUE) } arima.string <- function(object, padding=FALSE) { order <- object$arma[c(1, 6, 2, 3, 7, 4, 5)] m <- order[7] result <- paste("ARIMA(", order[1], ",", order[2], ",", order[3], ")", sep = "") if (m > 1 && sum(order[4:6]) > 0) { result <- paste(result, "(", order[4], ",", order[5], ",", order[6], ")[", m, "]", sep = "") } if (padding && m > 1 && sum(order[4:6]) == 0) { result <- paste(result, " ", sep = "") if (m <= 9) { result <- paste(result, " ", sep = "") } else if (m <= 99) { result <- paste(result, " ", sep = "") } else { result <- paste(result, " ", sep = "") } } if (!is.null(object$xreg)) { if (NCOL(object$xreg) == 1 && is.element("drift", names(object$coef))) { result <- paste(result, "with drift ") } else { result <- paste("Regression with", result, "errors") } } else { if (is.element("constant", names(object$coef)) || is.element("intercept", names(object$coef))) { result <- paste(result, "with non-zero mean") } else if (order[2] == 0 && order[5] == 0) { result <- paste(result, "with zero mean ") } else { result <- paste(result, " ") } } if (!padding) { # Strip trailing spaces result <- gsub("[ ]*$", "", result) } return(result) } #' @export summary.Arima <- function(object, ...) { class(object) <- c("summary.Arima", class(object)) object } #' @export print.summary.Arima <- function(x, ...) { NextMethod() cat("\nTraining set error measures:\n") print(accuracy(x)) } # Check that Arima object has positive coefficient variances without returning warnings checkarima <- function(object) { suppressWarnings(test <- any(is.nan(sqrt(diag(object$var.coef))))) return(test) } #' Is an object constant? #' #' Returns true if the object's numerical values do not vary. #' #' #' @param x object to be tested #' @export is.constant <- function(x) { x <- as.numeric(x) y <- rep(x[1], length(x)) return(isTRUE(all.equal(x, y))) } forecast/R/arima.R0000644000176200001440000010152214147003516013477 0ustar liggesuserssearch.arima <- function(x, d=NA, D=NA, max.p=5, max.q=5, max.P=2, max.Q=2, max.order=5, stationary=FALSE, ic=c("aic", "aicc", "bic"), trace=FALSE, approximation=FALSE, xreg=NULL, offset=offset, allowdrift=TRUE, allowmean=TRUE, parallel=FALSE, num.cores=2, ...) { # dataname <- substitute(x) ic <- match.arg(ic) m <- frequency(x) allowdrift <- allowdrift & (d + D) == 1 allowmean <- allowmean & (d + D) == 0 maxK <- (allowdrift | allowmean) # Choose model orders # Serial - technically could be combined with the code below if (parallel == FALSE) { best.ic <- Inf for (i in 0:max.p) { for (j in 0:max.q) { for (I in 0:max.P) { for (J in 0:max.Q) { if (i + j + I + J <= max.order) { for (K in 0:maxK) { fit <- myarima( x, order = c(i, d, j), seasonal = c(I, D, J), constant = (K == 1), trace = trace, ic = ic, approximation = approximation, offset = offset, xreg = xreg, ... ) if (fit$ic < best.ic) { best.ic <- fit$ic bestfit <- fit constant <- (K == 1) } } } } } } } } else if (parallel == TRUE) { to.check <- WhichModels(max.p, max.q, max.P, max.Q, maxK) par.all.arima <- function(l, max.order) { .tmp <- UndoWhichModels(l) i <- .tmp[1] j <- .tmp[2] I <- .tmp[3] J <- .tmp[4] K <- .tmp[5] == 1 if (i + j + I + J <= max.order) { fit <- myarima( x, order = c(i, d, j), seasonal = c(I, D, J), constant = (K == 1), trace = trace, ic = ic, approximation = approximation, offset = offset, xreg = xreg, ... ) } if (exists("fit")) { return(cbind(fit, K)) } else { return(NULL) } } if (is.null(num.cores)) { num.cores <- detectCores() } all.models <- mclapply(X = to.check, FUN = par.all.arima, max.order=max.order) # Removing null elements all.models <- all.models[!sapply(all.models, is.null)] # Choosing best model best.ic <- Inf for (i in 1:length(all.models)) { if (!is.null(all.models[[i]][, 1]$ic) && all.models[[i]][, 1]$ic < best.ic) { bestfit <- all.models[[i]][, 1] best.ic <- bestfit$ic constant <- unlist(all.models[[i]][1, 2]) } } class(bestfit) <- c("forecast_ARIMA", "ARIMA", "Arima") } if (exists("bestfit")) { # Refit using ML if approximation used for IC if (approximation) { if (trace) { cat("\n\n Now re-fitting the best model(s) without approximations...\n") } # constant <- length(bestfit$coef) - ncol(xreg) > sum(bestfit$arma[1:4]) newbestfit <- myarima( x, order = bestfit$arma[c(1, 6, 2)], seasonal = bestfit$arma[c(3, 7, 4)], constant = constant, ic, trace = FALSE, approximation = FALSE, xreg = xreg, ... ) if (newbestfit$ic == Inf) { # Final model is lousy. Better try again without approximation # warning("Unable to fit final model using maximum likelihood. AIC value approximated") bestfit <- search.arima( x, d = d, D = D, max.p = max.p, max.q = max.q, max.P = max.P, max.Q = max.Q, max.order = max.order, stationary = stationary, ic = ic, trace = trace, approximation = FALSE, xreg = xreg, offset = offset, allowdrift = allowdrift, allowmean = allowmean, parallel = parallel, num.cores = num.cores, ... ) bestfit$ic <- switch(ic, bic = bestfit$bic, aic = bestfit$aic, aicc = bestfit$aicc) } else { bestfit <- newbestfit } } } else { stop("No ARIMA model able to be estimated") } bestfit$x <- x bestfit$series <- deparse(substitute(x)) bestfit$ic <- NULL bestfit$call <- match.call() if (trace) { cat("\n\n") } return(bestfit) } # Set up seasonal dummies using Fourier series SeasDummy <- function(x) { n <- length(x) m <- frequency(x) if (m == 1) { stop("Non-seasonal data") } tt <- 1:n fmat <- matrix(NA, nrow = n, ncol = 2 * m) for (i in 1:m) { fmat[, 2 * i] <- sin(2 * pi * i * tt / m) fmat[, 2 * (i - 1) + 1] <- cos(2 * pi * i * tt / m) } return(fmat[, 1:(m - 1)]) } # CANOVA-HANSEN TEST # Largely based on uroot package code for CH.test() SD.test <- function(wts, s=frequency(wts)) { if (any(is.na(wts))) { stop("Series contains missing values. Please choose order of seasonal differencing manually.") } if (s == 1) { stop("Not seasonal data") } t0 <- start(wts) N <- length(wts) if (N <= s) { stop("Insufficient data") } frec <- rep(1, as.integer((s + 1) / 2)) ltrunc <- round(s * (N / 100) ^ 0.25) R1 <- as.matrix(SeasDummy(wts)) lmch <- lm(wts ~ R1, na.action = na.exclude) # run the regression : y(i)=mu+f(i)'gamma(i)+e(i) Fhat <- Fhataux <- matrix(nrow = N, ncol = s - 1) for (i in 1:(s - 1)) Fhataux[, i] <- R1[, i] * residuals(lmch) for (i in 1:N) { for (n in 1:(s - 1)) Fhat[i, n] <- sum(Fhataux[1:i, n]) } wnw <- 1 - seq(1, ltrunc, 1) / (ltrunc + 1) Ne <- nrow(Fhataux) Omnw <- 0 for (k in 1:ltrunc) Omnw <- Omnw + (t(Fhataux)[, (k + 1):Ne] %*% Fhataux[1:(Ne - k), ]) * wnw[k] Omfhat <- (crossprod(Fhataux) + Omnw + t(Omnw)) / Ne sq <- seq(1, s - 1, 2) frecob <- rep(0, s - 1) for (i in 1:length(frec)) { if (frec[i] == 1 && i == as.integer(s / 2)) { frecob[sq[i]] <- 1 } if (frec[i] == 1 && i < as.integer(s / 2)) { frecob[sq[i]] <- frecob[sq[i] + 1] <- 1 } } a <- length(which(frecob == 1)) A <- matrix(0, nrow = s - 1, ncol = a) j <- 1 for (i in 1:(s - 1)) { if (frecob[i] == 1) { A[i, j] <- 1 ifelse(frecob[i] == 1, j <- j + 1, j <- j) } } tmp <- t(A) %*% Omfhat %*% A problems <- (min(svd(tmp)$d) < .Machine$double.eps) if (problems) { stL <- 0 } else { stL <- (1 / N ^ 2) * sum(diag(solve(tmp, tol = 1e-25) %*% t(A) %*% t(Fhat) %*% Fhat %*% A)) } return(stL) } #' Forecasting using ARIMA or ARFIMA models #' #' Returns forecasts and other information for univariate ARIMA models. #' #' For \code{Arima} or \code{ar} objects, the function calls #' \code{\link[stats]{predict.Arima}} or \code{\link[stats]{predict.ar}} and #' constructs an object of class "\code{forecast}" from the results. For #' \code{fracdiff} objects, the calculations are all done within #' \code{\link{forecast.fracdiff}} using the equations given by Peiris and #' Perera (1988). #' #' @param object An object of class "\code{Arima}", "\code{ar}" or #' "\code{fracdiff}". Usually the result of a call to #' \code{\link[stats]{arima}}, \code{\link{auto.arima}}, #' \code{\link[stats]{ar}}, \code{\link{arfima}} or #' \code{\link[fracdiff]{fracdiff}}. #' @param h Number of periods for forecasting. If \code{xreg} is used, \code{h} #' is ignored and the number of forecast periods is set to the number of rows #' of \code{xreg}. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param xreg Future values of an regression variables (for class \code{Arima} #' objects only). A numerical vector or matrix of external regressors; it should not be a data frame. #' @param bootstrap If \code{TRUE}, then prediction intervals computed using #' simulation with resampled errors. #' @param npaths Number of sample paths used in computing simulated prediction #' intervals when \code{bootstrap=TRUE}. #' @param ... Other arguments. #' @inheritParams forecast #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.Arima}. #' #' An object of class "\code{forecast}" is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{predict.Arima}}, #' \code{\link[stats]{predict.ar}}, \code{\link{auto.arima}}, #' \code{\link{Arima}}, \code{\link[stats]{arima}}, \code{\link[stats]{ar}}, #' \code{\link{arfima}}. #' @references Peiris, M. & Perera, B. (1988), On prediction with fractionally #' differenced ARIMA models, \emph{Journal of Time Series Analysis}, #' \bold{9}(3), 215-220. #' @keywords ts #' @aliases forecast.forecast_ARIMA #' @examples #' fit <- Arima(WWWusage,c(3,1,0)) #' plot(forecast(fit)) #' #' library(fracdiff) #' x <- fracdiff.sim( 100, ma=-.4, d=.3)$series #' fit <- arfima(x) #' plot(forecast(fit,h=30)) #' #' @export forecast.Arima <- function(object, h=ifelse(object$arma[5] > 1, 2 * object$arma[5], 10), level=c(80, 95), fan=FALSE, xreg=NULL, lambda=object$lambda, bootstrap=FALSE, npaths=5000, biasadj=NULL, ...) { # Check whether there are non-existent arguments all.args <- names(formals()) user.args <- names(match.call())[-1L] # including arguments passed to 3 dots check <- user.args %in% all.args if (!all(check)) { error.args <- user.args[!check] warning(sprintf("The non-existent %s arguments will be ignored.", error.args)) } use.drift <- is.element("drift", names(object$coef)) x <- object$x <- getResponse(object) usexreg <- (use.drift | is.element("xreg", names(object))) # | use.constant) if (!is.null(xreg) && usexreg) { if(!is.numeric(xreg)) stop("xreg should be a numeric matrix or a numeric vector") xreg <- as.matrix(xreg) if (is.null(colnames(xreg))) { colnames(xreg) <- if (ncol(xreg) == 1) "xreg" else paste("xreg", 1:ncol(xreg), sep = "") } origxreg <- xreg <- as.matrix(xreg) h <- nrow(xreg) } else { if(!is.null(xreg)){ warning("xreg not required by this model, ignoring the provided regressors") xreg <- NULL } origxreg <- NULL } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 & max(level) < 1) { level <- 100 * level } else if (min(level) < 0 | max(level) > 99.99) { stop("Confidence limit out of range") } } level <- sort(level) if (use.drift) { n <- length(x) #missing <- is.na(x) #firstnonmiss <- head(which(!missing),1) #n <- length(x) - firstnonmiss + 1 if (!is.null(xreg)) { xreg <- `colnames<-`(cbind(drift = (1:h) + n, xreg), make.unique(c("drift", if(is.null(colnames(xreg)) && !is.null(xreg)) rep("", NCOL(xreg)) else colnames(xreg)))) } else { xreg <- `colnames<-`(as.matrix((1:h) + n), "drift") } } # Check if data is constant if (!is.null(object$constant)) { if (object$constant) { pred <- list(pred = rep(x[1], h), se = rep(0, h)) } else { stop("Strange value of object$constant") } } else if (usexreg) { if (is.null(xreg)) { stop("No regressors provided") } object$call$xreg <- getxreg(object) if (NCOL(xreg) != NCOL(object$call$xreg)) { stop("Number of regressors does not match fitted model") } if(!identical(colnames(xreg), colnames(object$call$xreg))){ warning("xreg contains different column names from the xreg used in training. Please check that the regressors are in the same order.") } pred <- predict(object, n.ahead = h, newxreg = xreg) } else { pred <- predict(object, n.ahead = h) } # Fix time series characteristics if there are missing values at end of series, or if tsp is missing from pred if (!is.null(x)) { tspx <- tsp(x) nx <- max(which(!is.na(x))) if (nx != length(x) | is.null(tsp(pred$pred)) | is.null(tsp(pred$se))) { tspx[2] <- time(x)[nx] start.f <- tspx[2] + 1 / tspx[3] pred$pred <- ts(pred$pred, frequency = tspx[3], start = start.f) pred$se <- ts(pred$se, frequency = tspx[3], start = start.f) } } # Compute prediction intervals nint <- length(level) if (bootstrap) # Compute prediction intervals using simulations { sim <- matrix(NA, nrow = npaths, ncol = h) for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = TRUE, xreg = origxreg, lambda = lambda) lower <- apply(sim, 2, quantile, 0.5 - level / 200, type = 8) upper <- apply(sim, 2, quantile, 0.5 + level / 200, type = 8) if (nint > 1L) { lower <- t(lower) upper <- t(upper) } else { lower <- matrix(lower, ncol = 1) upper <- matrix(upper, ncol = 1) } } else { # Compute prediction intervals via the normal distribution lower <- matrix(NA, ncol = nint, nrow = length(pred$pred)) upper <- lower for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- pred$pred - qq * pred$se upper[, i] <- pred$pred + qq * pred$se } if (!is.finite(max(upper))) { warning("Upper prediction intervals are not finite.") } } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") lower <- ts(lower) upper <- ts(upper) tsp(lower) <- tsp(upper) <- tsp(pred$pred) method <- arima.string(object, padding = FALSE) seriesname <- if (!is.null(object$series)) { object$series } else if (!is.null(object$call$x)) { object$call$x } else { object$call$y } fits <- fitted.Arima(object) if (!is.null(lambda) & is.null(object$constant)) { # Back-transform point forecasts and prediction intervals pred$pred <- InvBoxCox(pred$pred, lambda, biasadj, pred$se^2) if (!bootstrap) { # Bootstrapped intervals already back-transformed lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } } return(structure( list( method = method, model = object, level = level, mean = future_msts(x, pred$pred), lower = future_msts(x, lower), upper = future_msts(x, upper), x = x, series = seriesname, fitted = copy_msts(x, fits), residuals = copy_msts(x, residuals.Arima(object)) ), class = "forecast" )) } #' @export forecast.forecast_ARIMA <- forecast.Arima #' @rdname forecast.Arima #' @export forecast.ar <- function(object, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, bootstrap=FALSE, npaths=5000, biasadj=FALSE, ...) { x <- getResponse(object) pred <- predict(object, newdata = x, n.ahead = h) if (bootstrap) # Recompute se using simulations { sim <- matrix(NA, nrow = npaths, ncol = h) for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = TRUE) pred$se <- apply(sim, 2, sd) } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 & max(level) < 1) { level <- 100 * level } else if (min(level) < 0 | max(level) > 99.99) { stop("Confidence limit out of range") } } nint <- length(level) lower <- matrix(NA, ncol = nint, nrow = length(pred$pred)) upper <- lower for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- pred$pred - qq * pred$se upper[, i] <- pred$pred + qq * pred$se } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") method <- paste("AR(", object$order, ")", sep = "") f <- frequency(x) res <- residuals.ar(object) fits <- fitted.ar(object) if (!is.null(lambda)) { pred$pred <- InvBoxCox(pred$pred, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) fits <- InvBoxCox(fits, lambda) x <- InvBoxCox(x, lambda) } return(structure( list( method = method, model = object, level = level, mean = future_msts(x, pred$pred), lower = future_msts(x, lower), upper = future_msts(x, upper), x = x, series = deparse(object$call$x), fitted = copy_msts(x, fits), residuals = copy_msts(x, res) ) , class = "forecast" )) } # Find xreg matrix in an Arima object getxreg <- function(z) { # Look in the obvious place first if (is.element("xreg", names(z))) { return(z$xreg) } # Next most obvious place else if (is.element("xreg", names(z$coef))) { return(eval.parent(z$coef$xreg)) } # Now check under call else if (is.element("xreg", names(z$call))) { return(eval.parent(z$call$xreg)) } # Otherwise check if it exists else { armapar <- sum(z$arma[1:4]) + is.element("intercept", names(z$coef)) npar <- length(z$coef) if (npar > armapar) { stop("It looks like you have an xreg component but I don't know what it is.\n Please use Arima() or auto.arima() rather than arima().") } else { # No xreg used return(NULL) } } } #' Errors from a regression model with ARIMA errors #' #' Returns time series of the regression residuals from a fitted ARIMA model. #' #' This is a deprecated function #' which is identical to \code{\link{residuals.Arima}(object, type="regression")} #' Regression residuals are equal to the original data #' minus the effect of any regression variables. If there are no regression #' variables, the errors will be identical to the original series (possibly #' adjusted to have zero mean). #' #' @param object An object containing a time series model of class \code{Arima}. #' @return A \code{ts} object #' @author Rob J Hyndman #' @seealso \code{\link{residuals.Arima}}. #' @keywords ts #' #' @export arima.errors <- function(object) { message("Deprecated, use residuals.Arima(object, type='regression') instead") residuals.Arima(object, type = "regression") } # Return one-step fits #' h-step in-sample forecasts for time series models. #' #' Returns h-step forecasts for the data used in fitting the model. #' #' @param object An object of class "\code{Arima}", "\code{bats}", #' "\code{tbats}", "\code{ets}" or "\code{nnetar}". #' @param h The number of steps to forecast ahead. #' @param ... Other arguments. #' @return A time series of the h-step forecasts. #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link{forecast.Arima}}, \code{\link{forecast.bats}}, #' \code{\link{forecast.tbats}}, \code{\link{forecast.ets}}, #' \code{\link{forecast.nnetar}}, \code{\link{residuals.Arima}}, #' \code{\link{residuals.bats}}, \code{\link{residuals.tbats}}, #' \code{\link{residuals.ets}}, \code{\link{residuals.nnetar}}. #' @keywords ts #' @aliases fitted.forecast_ARIMA #' @examples #' fit <- ets(WWWusage) #' plot(WWWusage) #' lines(fitted(fit), col='red') #' lines(fitted(fit, h=2), col='green') #' lines(fitted(fit, h=3), col='blue') #' legend("topleft", legend=paste("h =",1:3), col=2:4, lty=1) #' #' @export fitted.Arima <- function(object, h = 1, ...) { if (h == 1) { x <- getResponse(object) if (!is.null(object$fitted)) { return(object$fitted) } else if (is.null(x)) { # warning("Fitted values are unavailable due to missing historical data") return(NULL) } else if (is.null(object$lambda)) { return(x - object$residuals) } else { fits <- InvBoxCox(BoxCox(x, object$lambda) - object$residuals, object$lambda, NULL, object$sigma2) return(fits) } } else { return(hfitted(object = object, h = h, FUN = "Arima", ...)) } } #' @export fitted.forecast_ARIMA <- fitted.Arima # Calls arima from stats package and adds data to the returned object # Also allows refitting to new data # and drift terms to be included. #' Fit ARIMA model to univariate time series #' #' Largely a wrapper for the \code{\link[stats]{arima}} function in the stats #' package. The main difference is that this function allows a drift term. It #' is also possible to take an ARIMA model from a previous call to \code{Arima} #' and re-apply it to the data \code{y}. #' #' See the \code{\link[stats]{arima}} function in the stats package. #' #' @aliases print.ARIMA summary.Arima as.character.Arima #' #' @param y a univariate time series of class \code{ts}. #' @param order A specification of the non-seasonal part of the ARIMA model: #' the three components (p, d, q) are the AR order, the degree of differencing, #' and the MA order. #' @param seasonal A specification of the seasonal part of the ARIMA model, #' plus the period (which defaults to frequency(y)). This should be a list with #' components order and period, but a specification of just a numeric vector of #' length 3 will be turned into a suitable list with the specification as the #' order. #' @param xreg Optionally, a numerical vector or matrix of external regressors, which #' must have the same number of rows as y. It should not be a data frame. #' @param include.mean Should the ARIMA model include a mean term? The default #' is \code{TRUE} for undifferenced series, \code{FALSE} for differenced ones #' (where a mean would not affect the fit nor predictions). #' @param include.drift Should the ARIMA model include a linear drift term? #' (i.e., a linear regression with ARIMA errors is fitted.) The default is #' \code{FALSE}. #' @param include.constant If \code{TRUE}, then \code{include.mean} is set to #' be \code{TRUE} for undifferenced series and \code{include.drift} is set to #' be \code{TRUE} for differenced series. Note that if there is more than one #' difference taken, no constant is included regardless of the value of this #' argument. This is deliberate as otherwise quadratic and higher order #' polynomial trends would be induced. #' @param method Fitting method: maximum likelihood or minimize conditional #' sum-of-squares. The default (unless there are missing values) is to use #' conditional-sum-of-squares to find starting values, then maximum likelihood. #' @param model Output from a previous call to \code{Arima}. If model is #' passed, this same model is fitted to \code{y} without re-estimating any #' parameters. #' @param x Deprecated. Included for backwards compatibility. #' @param ... Additional arguments to be passed to \code{\link[stats]{arima}}. #' @inheritParams forecast #' @return See the \code{\link[stats]{arima}} function in the stats package. #' The additional objects returned are \item{x}{The time series data} #' \item{xreg}{The regressors used in fitting (when relevant).} #' \item{sigma2}{The bias adjusted MLE of the innovations variance.} #' #' @export #' #' @author Rob J Hyndman #' @seealso \code{\link{auto.arima}}, \code{\link{forecast.Arima}}. #' @keywords ts #' @examples #' library(ggplot2) #' WWWusage %>% #' Arima(order=c(3,1,0)) %>% #' forecast(h=20) %>% #' autoplot #' #' # Fit model to first few years of AirPassengers data #' air.model <- Arima(window(AirPassengers,end=1956+11/12),order=c(0,1,1), #' seasonal=list(order=c(0,1,1),period=12),lambda=0) #' plot(forecast(air.model,h=48)) #' lines(AirPassengers) #' #' # Apply fitted model to later data #' air.model2 <- Arima(window(AirPassengers,start=1957),model=air.model) #' #' # Forecast accuracy measures on the log scale. #' # in-sample one-step forecasts. #' accuracy(air.model) #' # out-of-sample one-step forecasts. #' accuracy(air.model2) #' # out-of-sample multi-step forecasts #' accuracy(forecast(air.model,h=48,lambda=NULL), #' log(window(AirPassengers,start=1957))) #' Arima <- function(y, order=c(0, 0, 0), seasonal=c(0, 0, 0), xreg=NULL, include.mean=TRUE, include.drift=FALSE, include.constant, lambda=model$lambda, biasadj=FALSE, method=c("CSS-ML", "ML", "CSS"), model=NULL, x=y, ...) { # Remove outliers near ends # j <- time(x) # x <- na.contiguous(x) # if(length(j) != length(x)) # warning("Missing values encountered. Using longest contiguous portion of time series") series <- deparse(substitute(y)) origx <- y if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") if (is.null(attr(lambda, "biasadj"))) { attr(lambda, "biasadj") <- biasadj } } if (!is.null(xreg)) { if(!is.numeric(xreg)) stop("xreg should be a numeric matrix or a numeric vector") xreg <- as.matrix(xreg) if (is.null(colnames(xreg))) { colnames(xreg) <- if (ncol(xreg) == 1) "xreg" else paste("xreg", 1:ncol(xreg), sep = "") } } if (!is.list(seasonal)) { if (frequency(x) <= 1) { seasonal <- list(order = c(0, 0, 0), period = NA) if(length(x) <= order[2L]) stop("Not enough data to fit the model") } else { seasonal <- list(order = seasonal, period = frequency(x)) if(length(x) <= order[2L] + seasonal$order[2L] * seasonal$period) stop("Not enough data to fit the model") } } if (!missing(include.constant)) { if (include.constant) { include.mean <- TRUE if ((order[2] + seasonal$order[2]) == 1) { include.drift <- TRUE } } else { include.mean <- include.drift <- FALSE } } if ((order[2] + seasonal$order[2]) > 1 & include.drift) { warning("No drift term fitted as the order of difference is 2 or more.") include.drift <- FALSE } if (!is.null(model)) { tmp <- arima2(x, model, xreg = xreg, method = method) xreg <- tmp$xreg tmp$fitted <- NULL tmp$lambda <- model$lambda } else { if (include.drift) { xreg <- `colnames<-`(cbind(drift = 1:length(x), xreg), make.unique(c("drift", if(is.null(colnames(xreg)) && !is.null(xreg)) rep("", NCOL(xreg)) else colnames(xreg)))) } if (is.null(xreg)) { suppressWarnings(tmp <- stats::arima(x = x, order = order, seasonal = seasonal, include.mean = include.mean, method = method, ...)) } else { suppressWarnings(tmp <- stats::arima(x = x, order = order, seasonal = seasonal, xreg = xreg, include.mean = include.mean, method = method, ...)) } } # Calculate aicc & bic based on tmp$aic npar <- length(tmp$coef[tmp$mask]) + 1 missing <- is.na(tmp$residuals) firstnonmiss <- head(which(!missing),1) lastnonmiss <- tail(which(!missing),1) n <- sum(!missing[firstnonmiss:lastnonmiss]) nstar <- n - tmp$arma[6] - tmp$arma[7] * tmp$arma[5] tmp$aicc <- tmp$aic + 2 * npar * (nstar / (nstar - npar - 1) - 1) tmp$bic <- tmp$aic + npar * (log(nstar) - 2) tmp$series <- series tmp$xreg <- xreg tmp$call <- match.call() tmp$lambda <- lambda tmp$x <- origx # Adjust residual variance to be unbiased if (is.null(model)) { tmp$sigma2 <- sum(tmp$residuals ^ 2, na.rm = TRUE) / (nstar - npar + 1) } out <- structure(tmp, class = c("forecast_ARIMA", "ARIMA", "Arima")) out$fitted <- fitted.Arima(out) out$series <- series return(out) } # Refits the model to new data x arima2 <- function(x, model, xreg, method) { use.drift <- is.element("drift", names(model$coef)) use.intercept <- is.element("intercept", names(model$coef)) use.xreg <- is.element("xreg", names(model$call)) sigma2 <- model$sigma2 if (use.drift) { driftmod <- lm(model$xreg[, "drift"] ~ I(time(as.ts(model$x)))) newxreg <- driftmod$coefficients[1] + driftmod$coefficients[2] * time(as.ts(x)) if (!is.null(xreg)) { origColNames <- colnames(xreg) xreg <- cbind(newxreg, xreg) colnames(xreg) <- c("drift", origColNames) } else { xreg <- as.matrix(data.frame(drift = newxreg)) } use.xreg <- TRUE } if (!is.null(model$xreg)) { if (is.null(xreg)) { stop("No regressors provided") } if (ncol(xreg) != ncol(model$xreg)) { stop("Number of regressors does not match fitted model") } } if (model$arma[5] > 1 & sum(abs(model$arma[c(3, 4, 7)])) > 0) # Seasonal model { if (use.xreg) { refit <- Arima( x, order = model$arma[c(1, 6, 2)], seasonal = list(order = model$arma[c(3, 7, 4)], period = model$arma[5]), include.mean = use.intercept, xreg = xreg, method = method, fixed = model$coef ) } else { refit <- Arima( x, order = model$arma[c(1, 6, 2)], seasonal = list(order = model$arma[c(3, 7, 4)], period = model$arma[5]), include.mean = use.intercept, method = method, fixed = model$coef ) } } else if (length(model$coef) > 0) # Nonseasonal model with some parameters { if (use.xreg) { refit <- Arima(x, order = model$arma[c(1, 6, 2)], xreg = xreg, include.mean = use.intercept, method = method, fixed = model$coef) } else { refit <- Arima(x, order = model$arma[c(1, 6, 2)], include.mean = use.intercept, method = method, fixed = model$coef) } } else { # No parameters refit <- Arima(x, order = model$arma[c(1, 6, 2)], include.mean = FALSE, method = method) } refit$var.coef <- matrix(0, length(refit$coef), length(refit$coef)) if (use.xreg) { # Why is this needed? refit$xreg <- xreg } refit$sigma2 <- sigma2 return(refit) } # Modified version of function print.Arima from stats package #' @export print.forecast_ARIMA <- function(x, digits=max(3, getOption("digits") - 3), se=TRUE, ...) { cat("Series:", x$series, "\n") cat(arima.string(x, padding = FALSE), "\n") if (!is.null(x$lambda)) { cat("Box Cox transformation: lambda=", x$lambda, "\n") } # cat("\nCall:", deparse(x$call, width.cutoff=75), "\n", sep=" ") # if(!is.null(x$xreg)) # { # cat("\nRegression variables fitted:\n") # xreg <- as.matrix(x$xreg) # for(i in 1:3) # cat(" ",xreg[i,],"\n") # cat(" . . .\n") # for(i in 1:3) # cat(" ",xreg[nrow(xreg)-3+i,],"\n") # } if (length(x$coef) > 0) { cat("\nCoefficients:\n") coef <- round(x$coef, digits = digits) if (se && NROW(x$var.coef)) { ses <- rep.int(0, length(coef)) ses[x$mask] <- round(sqrt(diag(x$var.coef)), digits = digits) coef <- matrix(coef, 1L, dimnames = list(NULL, names(coef))) coef <- rbind(coef, s.e. = ses) } # Change intercept to mean if no regression variables j <- match("intercept", colnames(coef)) if (is.null(x$xreg) & !is.na(j)) { colnames(coef)[j] <- "mean" } print.default(coef, print.gap = 2) } cm <- x$call$method cat("\nsigma^2 = ", format(x$sigma2, digits = digits), sep="") if(!is.na(x$loglik)) cat(": log likelihood = ", format(round(x$loglik, 2L)), sep = "") cat("\n") if (is.null(cm) || cm != "CSS") { if(!is.na(x$aic)) { npar <- length(x$coef[x$mask]) + 1 missing <- is.na(x$residuals) firstnonmiss <- head(which(!missing),1) lastnonmiss <- tail(which(!missing),1) n <- lastnonmiss - firstnonmiss + 1 nstar <- n - x$arma[6] - x$arma[7] * x$arma[5] bic <- x$aic + npar * (log(nstar) - 2) aicc <- x$aic + 2 * npar * (nstar / (nstar - npar - 1) - 1) cat("AIC=", format(round(x$aic, 2L)), sep = "") cat(" AICc=", format(round(aicc, 2L)), sep = "") cat(" BIC=", format(round(bic, 2L)), "\n", sep = "") } } invisible(x) } #' Return the order of an ARIMA or ARFIMA model #' #' Returns the order of a univariate ARIMA or ARFIMA model. #' #' #' @param object An object of class \dQuote{\code{Arima}}, dQuote\code{ar} or #' \dQuote{\code{fracdiff}}. Usually the result of a call to #' \code{\link[stats]{arima}}, \code{\link{Arima}}, \code{\link{auto.arima}}, #' \code{\link[stats]{ar}}, \code{\link{arfima}} or #' \code{\link[fracdiff]{fracdiff}}. #' @return A numerical vector giving the values \eqn{p}, \eqn{d} and \eqn{q} of #' the ARIMA or ARFIMA model. For a seasonal ARIMA model, the returned vector #' contains the values \eqn{p}, \eqn{d}, \eqn{q}, \eqn{P}, \eqn{D}, \eqn{Q} and #' \eqn{m}, where \eqn{m} is the period of seasonality. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{ar}}, \code{\link{auto.arima}}, #' \code{\link{Arima}}, \code{\link[stats]{arima}}, \code{\link{arfima}}. #' @keywords ts #' @examples #' WWWusage %>% auto.arima %>% arimaorder #' #' @export arimaorder <- function(object) { if (is.element("Arima", class(object))) { order <- object$arma[c(1, 6, 2, 3, 7, 4, 5)] names(order) <- c("p", "d", "q", "P", "D", "Q", "Frequency") seasonal <- (order[7] > 1 & sum(order[4:6]) > 0) if (seasonal) { return(order) } else { return(order[1:3]) } } else if (is.element("ar", class(object))) { return(c("p" = object$order, "d" = 0, "q" = 0)) } else if (is.element("fracdiff", class(object))) { return(c("p" = length(object$ar), "d" = object$d, "q" = length(object$ma))) } else { stop("object not of class Arima, ar or fracdiff") } } #' @export as.character.Arima <- function(x, ...) { arima.string(x, padding = FALSE) } #' @rdname is.ets #' @export is.Arima <- function(x) { inherits(x, "Arima") } #' @rdname fitted.Arima #' @export fitted.ar <- function(object, ...) { getResponse(object) - residuals(object) } forecast/R/bootstrap.R0000644000176200001440000000720314003673410014421 0ustar liggesusers# Bootstrap functions # Trend estimation like STL without seasonality. # Non-robust version tl <- function(x, ...) { x <- as.ts(x) tspx <- tsp(x) n <- length(x) tt <- 1:n fit <- supsmu(tt, x) out <- ts(cbind(trend = fit$y, remainder = x - fit$y)) tsp(out) <- tsp(x) out <- structure(list(time.series = out), class = "stl") return(out) } # Function to return some bootstrap samples of x # based on LPB lpb <- function(x, nsim=100) { n <- length(x) meanx <- mean(x) y <- x - meanx gamma <- wacf(y, lag.max = n)$acf[, , 1] s <- length(gamma) Gamma <- matrix(1, s, s) d <- row(Gamma) - col(Gamma) for (i in 1:(s - 1)) Gamma[d == i | d == (-i)] <- gamma[i + 1] L <- t(chol(Gamma)) W <- solve(L) %*% matrix(y, ncol = 1) out <- ts(L %*% matrix(sample(W, n * nsim, replace = TRUE), nrow = n, ncol = nsim) + meanx) tsp(out) <- tsp(x) return(out) } # Bootstrapping time series (based on Bergmeir et al., 2016, IJF paper) # Author: Fotios Petropoulos MBB <- function(x, window_size) { bx <- array(0, (floor(length(x) / window_size) + 2) * window_size) for (i in 1:(floor(length(x) / window_size) + 2)) { c <- sample(1:(length(x) - window_size + 1), 1) bx[((i - 1) * window_size + 1):(i * window_size)] <- x[c:(c + window_size - 1)] } start_from <- sample(0:(window_size - 1), 1) + 1 bx[start_from:(start_from + length(x) - 1)] } #' Box-Cox and Loess-based decomposition bootstrap. #' #' Generates bootstrapped versions of a time series using the Box-Cox and #' Loess-based decomposition bootstrap. #' #' The procedure is described in Bergmeir et al. Box-Cox decomposition is #' applied, together with STL or Loess (for non-seasonal time series), and the #' remainder is bootstrapped using a moving block bootstrap. #' #' @param x Original time series. #' @param num Number of bootstrapped versions to generate. #' @param block_size Block size for the moving block bootstrap. #' @return A list with bootstrapped versions of the series. The first series in #' the list is the original series. #' @author Christoph Bergmeir, Fotios Petropoulos #' @seealso \code{\link{baggedETS}}. #' @references Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging #' Exponential Smoothing Methods using STL Decomposition and Box-Cox #' Transformation. International Journal of Forecasting 32, 303-312. #' @keywords ts #' @examples #' bootstrapped_series <- bld.mbb.bootstrap(WWWusage, 100) #' #' @export bld.mbb.bootstrap <- function(x, num, block_size=NULL) { if(length(x) <= 1L) return(rep(list(x), num)) freq <- frequency(x) if(length(x) <= 2*freq) freq <- 1L if (is.null(block_size)) { block_size <- ifelse(freq > 1, 2 * freq, min(8, floor(length(x) / 2))) } xs <- list() xs[[1]] <- x # the first series is the original one if (num > 1) { # Box-Cox transformation if (min(x) > 1e-6) { lambda <- BoxCox.lambda(x, lower = 0, upper = 1) } else { lambda <- 1 } x.bc <- BoxCox(x, lambda) lambda <- attr(x.bc, "lambda") if (freq > 1) { # STL decomposition x.stl <- stl(ts(x.bc, frequency = freq), "per")$time.series seasonal <- x.stl[, 1] trend <- x.stl[, 2] remainder <- x.stl[, 3] } else { # Loess trend <- 1:length(x) suppressWarnings( x.loess <- loess(ts(x.bc, frequency = 1) ~ trend, span = 6 / length(x), degree = 1) ) seasonal <- rep(0, length(x)) trend <- x.loess$fitted remainder <- x.loess$residuals } } # Bootstrap some series, using MBB for (i in 2:num) { xs[[i]] <- ts(InvBoxCox(trend + seasonal + MBB(remainder, block_size), lambda)) tsp(xs[[i]]) <- tsp(x) } xs } forecast/NEWS.md0000644000176200001440000012707014166714071013174 0ustar liggesusers# forecast 8.16 (10 January 2022) * Fixed `tslm()` incorrectly applying Box-Cox transformations when an `mts` is provided to the `data` argument (#886). * Set D=0 when auto.arima applied to series with 2m observations or fewer. * Improved performance of parallel search of ARIMA models (jonlachmann, #891). * Fixed scoping of functions used in `ggAcf()` (#896). * Fixed checks on xreg in `simulate.Arima()` (#818) * Improved docs and bug fixes. # forecast 8.15 (1 June 2021) * Changed `summary()` methods to defer console output until `print()` * Changed default `s.window` values for `mstl()`, `stlf()` and `stlm()`. The new defaults are based on extensive empirical testing. # forecast 8.14 (11 March 2021) * Changed default `BoxCox(lambda = "auto")` lower bound to -0.9. * Use better variance estimates for `ets()` bias adjustments. * Improved robustness of `autoplot.seas()` for non-seasonal decomposition. * Fixed scoping of parameters in `auto.arima(parallel = TRUE)` (#874). * Fixed handling of `xreg` in `tsCV()`. # forecast 8.13 (11 September 2020) * Fixed forecasts from Arima with drift with initial NAs. * Fixed season colours in `gglagplot()` to match y-axis (original data). * Fixed facet order for classical decomposition `autoplot()` * Fixed `summary()` erroring for `tslm()` models containing NA values. # forecast 8.12 (21 March 2020) * Fixed bias adjusted forecast mean for ARIMA forecasts. * Improved naming of `accuracy()` generic formals. * Fix seasonal periods for `taylor` dataset. # forecast 8.11 (9 February 2020) * The axis for `gglagplot()` have been reversed for consistency with `stats::lag.plot()`. # forecast 8.10 (4 December 2019) * Updates to remove new CRAN errors * Bug fixes # forecast 8.9 (22 August 2019) * Updates for CRAN policies on Suggests packages * Bug fixes # forecast 8.8 (22 July 2019) * Updates for compatibility with fable * Bug fixes # forecast 8.7 (26 Apr 2019) * Documentation improvements * Bug fixes # forecast 8.6 (15 Apr 2019) * Reduced conflicts with tidy forecasting packages * Forecast autoplots now use same colour shading as autolayer() and geom_forecast() * Documentation improvements * Bug fixes # forecast 8.5 (18 Jan 2019) * Updated tsCV() to handle exogenous regressors * Reimplemented lagwalk methods (naive, snaive, rwf) for speed improvements * Added support for passing arguments to auto.arima() unit root tests * Improved auto.arima() stepwise search algorithm * Documentation improvements * Bug fixes # forecast 8.4 (20 June 2018) * Added modelAR(), generalising nnetar() to support user-defined functions * Added na.action argument to ets * Documentation improvements * Bug fixes # forecast 8.3 (5 April 2018) * Added mstl() to handle multiple seasonal decomposition * stlf(), stlm(), tsoutliers() and tsclean() all now use mstl(). * Updated tsCV() to handle multiple horizons * Switched unit root tests in ndiffs() to use urca package * Added ocsb.test() * Changed method for choosing D in auto.arima() to a measure of seasonal strength. * Added baggedModel() function to generalize baggedETS() * Added bootstrapped PI to more functions * Allowed lambda='auto' for all functions with lambda argument. * Updated author list to include all major contributors * Documentation improvements * Bug fixes # forecast 8.2 (25 September 2017) * Added pkgdown site * Added rolling window option to tsCV() * Improved robustness to short time series and missing values * Bug fixes # forecast 8.1 (17 June 2017) * Added as.character.ets, as.character.bats, as.character.tbats * Made gghistogram() and checkresiduals() robust to missing values * All documentation now generated using roxygen * Improved documentation for many functions * Added autoplot.msts() and autolayer.msts() * Added as.character methods for many models to generate model names * Added as.ts.forecast() * autoplot method for bats/tbats models * Better ARIMA trace output * Made accuracy an S3 method * Bug fixes # forecast 8.0 (22 February 2017) * Added tips to start up message * Added pipe operator * Added tsCV() and CVar() functions * Added baggedETS() * Added head.ts() and tail.ts(), so head and tail now work properly on ts objects. * Added gghistogram() and checkresiduals() * Added ggseasonplot with polar coordinates * Modified defaults for gglagplot * Added autolayer.ts * Added type argument to residuals() for different types of residuals * Added support for seas objects from the seasonal package * Component extraction for seasonal decomposition methods * Range bars for decomposition autoplots * Added autoplot.StructTS() * Added vignette based on 2008 JSS article by Hyndman and Khandakar * Improved ggplot functions * mforecast objects re-structured * Added as.data.frame.mforecast() * autoplot functions now exported * Refit support for arfima() and stlm() * Better bias adjustment support after Box-Cox transformation * print.ARIMA has better labelling of constants * Bug fixes * Removed fortify method for forecast objects # forecast 7.3 (12 October 2016) * Added prediction intervals and simulation for nnetar(). * Documentation improvement * Bug fixes # forecast 7.2 (8 September 2016) * Faceting for autoplot.mts * Box-Cox support for ses, holt, hw * ets() now works for tiny time series * Added h-step fitted values in fitted() function. * seasonal adjustment added to thetaf() * y now the standard first argument in all modelling functions * Added truncate argument to auto.arima() * seasadj() now an S3 method * series with frequency < 1 and non-integer seasonality now handled better * ggplot2 theme support * Added gglagplot, gglagchull * Arima() and auto.arima() now allow any argument to be passed to stats::arima(). * Bug fixes and speed improvements # forecast 7.1 (14 April 2016) * Fixed bug in auto.arima where the Box-Cox transformation was sometimes applied twice * Improved axes for ggseasonalplot() * Improved tslm() to avoid some problems finding data * nnetar() updated to allow subsets() * Modified initial values for ets() * Improved unit tests to avoid deprecated functions and to avoid data from fpp * Removed fpp from Suggests list # forecast 7.0 (3 April 2016) * Added ggplot2 graphics * Bias adjustment option added for all functions that allow Box-Cox transformations * Added Ccf function, and rewrote Acf to handle multivariate series. * tslm() completely rewritten to be more robust and to handle fourier terms more easily * Support for multivariate linear models added * subset.ts() more robust, and captures some errors. * Added xreg argument to nnetar() * Improved labels in seasonplot() * More unit tests added * Documentation improvements * Bug fixes # forecast 6.2 (20 October 2015) * Many unit tests added using testthat. * Fixed bug in ets when very short seasonal series were passed in a data frame. * Fixed bug in nnetar where the initial predictor vector was reversed. * Corrected model name returned in nnetar(). * Fixed bug in accuracy() when non-integer seasonality used. * Made auto.arima() robust to non-integer seasonality. * Fixed bug in auto.arima where allowmean was ignored when stepwise=FALSE. * Improved robustness of forecast.ets() for explosive models with multiplicative trends. * Exogenous variables now passed to VAR forecasts * Increased maximum nmse in ets() to 30. * Made tsoutliers() more robust to weak seasonality * Changed tsoutliers() to use supsmu on non-seasonal and seasonally adjusted data. * Fixed bug in tbats() when seasonal period 1 is a small multiple of seasonal period 2. * Other bug fixes # forecast 6.1 (11 May 2015) * Made auto.arima more robust # forecast 6.0 (9 May 2015) * Modified dm.test to give error when variance is zero * Corrected help file for splinef(). * Fixed typo in accuracy help file regarding RMSE * Fixed bug in accuracy() which occurred with Arima and ets objects. * Fixed arima.errors() to handle Box-Cox transformed models. * Modified auto.arima() to be stricter on near-unit-roots. * Added allowmean argument in auto.arima(). * Improved handling of constant series in Arima() and forecast.Arima(). * Added plot.Arima() and plot.ar() functions. * Added as.character.Arima() * Captured problem in bats/tbats where data are constant. * Modified TBATS and BATS estimation to avoid occasional instabilities. * Fixed bug in forecasts from bats which labelled them as TBATS. * Added allow.multiplicative.trend argument to ets(). * Set allow.multiplictive.trend=FALSE in stlf(), stlm() and forecast.ts(). * Simplified arguments in stlf(). * Added taperedacf and taperedpacf functions * Added functions for bootstrapping time series # forecast 5.9 (26 February 2015) * Improved documentation of accuracy() function. * Fixed occasional bug in accuracy() when test set is a single observation. * Improved Acf() to give better handling of horizontal axis for seasonal data or when ... is passed. * Removed print.Arima and predict.Arima and added print.ARIMA * method argument now passed when re-fitting an ARIMA model. * Fixed error when CH test applied to short series # forecast 5.8 (6 January 2015) * Fixed bug in versions of R before 3.10 when using fourier and fourierf. * Made BoxCox.lambda() robust to missing values. # forecast 5.7 (17 December 2014) * Fixed bug in tbats/bats where optional arguments were not being passed to auto.arima(). * Revised fourier() and fourierf() to avoid large orders, and to avoid zero columns. * Improved accuracy of fourier() and fourierf(), while simplifying the code. * Removed duplicate columns returned by fourier/fourierf with multiple seasonal periods. * Corrected some bugs in simulate.Arima for models involving xreg. * Centred simulations from simulate.Arima for non-stationary models by conditioning on first observation. * Added findfrequency() function. * Fixed error in computed residuals from forecast.stl(). * Improved handling of very short series in auto.arima(). * Fixed error in forecasting with additive damped models. Damping previously applied only from second forecast horizon. * Fixed misuse of abs() in two places in C code. * Added na.action argument to Acf() and fixed na.action argument in tsdisplay(). # forecast 5.6 (23 September 2014) * Improved tbats and bats by ensuring ARMA coefficients are not close to the boundary of invertibility and stationarity. * Improved nsdiffs() handling of degenerate series (e.g., all zeros). * Improved forecast.ar() when function buried within other functions. * Improved handling of degenerate ARIMA models when xreg used. * More robust ets() initialization. * Fixed problem in na.interp() with seasonal data having frequency <= 5. * Removed undocumented option to use Rmalschains for optimization of ets(). # forecast 5.5 (12 August 2014) * Improved documentation for croston() * Added stlm() and forecast.stlm() functions, and added forecastfunction argument as a way of specifying a forecast method in stlf() and forecast.stl(). * Improved forecast.ar() so that it is more likely to work if ar() and forecast.ar() are embedded within other functions. * Improved handling of ARIMA models with seasonality greater than 48 * Improved handling of some degenerate regression models in nsdiffs() * Changed AIC for poor models from 1e20 to Inf. * Update fourier() and fourierf() to work with msts object. * Added a new argument find.frequency to forecast.ts(). * Added new arguments d and D to accuracy() for MASE. * Corrected bugs in accuracy(). * Better handling of regression models with perfect fit in auto.arima(). * Fixed bug in tbats.components() when there are no seasonal components. # forecast 5.4 (8 May 2014) * Fixed bug in forecast.tbats() and forecast.bats() when ts.frequency does not match seasonal.periods. * Fixed bug in getResponse.lm() when there's a logged dependent variable. * Modified ets() to avoid problems when data contains large numbers. * Modified ets() to produce forecasts when the data are constant. * Improved arima.errors() to find xreg more often, and to return an error if it can't be found. # forecast 5.3 (24 March 2014) * Unit tests added * Fixed bug in zzhw() which reversed the sign of the residuals. * Updated help file for CV() to specify it is only leave-one-out. * Fixed guer.cv() to allow non-integer periods without warning. * Added use.initial.values argument in ets(). * Added arimaorder() function. * Modified warnings suppression by using suppressWarnings() throughout. # forecast 5.2 (25 February 2014) * Changed default number of cores to 2 for all functions that use parallel processing. * Removed remaining call to bats() from examples that are run. # forecast 5.1 (8 February 2014) * Fixed bug in tsoutliers() and tsclean() with very short seasonal series. * Fixed bug in Arima() when seasonal order is specified numerically instead of via a list. * Removed dimension attribution from output of arima.errors() * Improved handling of "test" in accuracy() * Changed parallel processing to parLapply for auto.arima() * Added timeDate dependency to avoid errors in easter() and link to Rcpp >= 0.11.0. # forecast 5.0 (17 January 2014) * Added argument model to dshw(). * Added bizdays() and easter() for calendar variables. * Added arguments max.D and max.d to auto.arima(), ndiffs() and nsdiffs(). * Made several functions more robust to zoo objects. * Corrected an error in the calculation of AICc when using CV(). * Made minimum default p in nnetar equal to 1. * Added tsoutliers() and tsclean() for identifying and replacing outliers * Improved na.interp() to handle seasonality and added argument lambda to na.interp() * Added robust option to forecast.ts() to allow outliers and missing values * Improved output from snaive() and naive() to better reflect user expectations * Allowed Acf() to handle missing values by using na.contiguous * Changed default information criterion in ets() to AICc. * Removed drift term in Arima() when d+D>1. * Added bootstrap option to forecast.Arima() # forecast 4.8 (30 September 2013) * Fixed bug in rwf() that was introduced in v4.7 # forecast 4.7 (26 September 2013) * Added forecast.forecast() to simply return the object that is passed. * Removed leading zero in package number. i.e., 4.7 instead of 4.07. * better handling of nearly constant time series, and nearly linear time series * improved handling of missing values in rwf() * corrected fitted values and residuals in meanf() for time series data * bats() and tbats() now handle missing values in the same way as ets(). i.e., using longest contiguous portion. * better handling of very short time series * initial states for ets() modified for very short time series (less than 3 years). * nsdiffs with CH test now handles degenerate cases without returning an error. * nnetar now handles missing values * Fixed bug in forecast.varest() so residuals and fitted values computed correctly. * Added accuracy() calculation for VAR models * Fixed a bug in simulate.fracdiff() when future=TRUE. Sometimes the future argument was being ignored. # forecast 4.06 (30 June 2013) * accuracy() was returning a mape and mpe 100 times too large for in-sample errors. # forecast 4.05 (19 June 2013) * Fixed bug in hw() so it works when initial="simple" * Allowed bats() and tbats() to take non-positive values. * ets() now calls optim direct via c code making ets() run much faster. * Added Rmalschains as a possible optimizer in ets(). Not documented. * Modified forecast.lm so it is more likely that the original data are stored in the returned object. * Corrected bug in forecast.Arima that occurred when a Box-Cox transformation was used with bootstrap=TRUE. * accuracy() updated so that it gives more information, and returns a matrix of both test and training measures. * Corrected training error measures for splinef() forecasts. # forecast 4.04 (22 April 2013) * Added ylim argument to Acf() * Avoided clash with the signal package when using auto.arima(). * Fixed problem in plot.forecast() when all historical data are NA or when there is no available historical data. * forecast.Arima() is now a little more robust if a zoo object is passed instead of a ts object. * CV() now handles missing values in the residuals. * Fixed bug in holt() and hw() so that the printed model no longer contains missing values. # forecast 4.03 (14 March 2013) * forecast.lm now guesses the variable name if there is only one predictor variable. * Removed error trap in forecast.lm when no xreg variables passed as it was catching legitimate calls. # forecast 4.02 (6 March 2013) * Fixed error in the prediction intervals returned by forecast.ets() when simulation was used and a Box-Cox transformation was specified. * Fixed bug in accuracy() when a numerical f vector was passed. * Fixed man file for Diebold-Mariano test. * Corrected references in nsdiffs() help page. * Added warning to nsdiffs when series too short for seasonal differencing. * Fixed problem in getResponse.Arima when Arima object created by stats::arima() from within a function. * Added tbats.components() and extended seasadj() to allow tbats objects. * Added undocumented functions for forecasting, printing and plotting output from vars::VAR. # forecast 4.01 (22 January 2013) * Error now trapped when newxreg variables not passed to forecast.lm() * Corrected help file for dshw() to remove references to prediction intervals. * Improved help file for dm.test() to give more information about the alternative hypotheses. * Improved dm.test() performance for small samples by using a t-distribution instead of normal. * Modified bats() and tbats() examples to follow CRAN policies on parallel processing. * Moved some packages from Depends to Imports. * Added getResponse() function to return the historical time series from various time series model objects. * Modified accuracy() to use getResponse(). * Allowed user-generated innovations in simulate.ets(), simulate.Arima(), etc. * Allowed xreg argument in forecast.stl() and stlf() when ARIMA model used. * Removed reliance on caret, and associated fitted and residuals functions. # forecast 4.00 (27 November 2012) * More robust handling of degenerate ARIMA models. * New defaults for shaded colors used for prediction intervals in plots. * auto.arima() now remembers the name of the series when a Box-Cox transformation is used. * New function nnetar() for automatic neural network forecasting of time series. * arfima() now tries harder to ensure the ARMA part is stationary. * ts control added for forecast of linear models in forecast.lm(). * Fixed bug in bats() which caused an error when use.box.cox=FALSE and use.trend=FALSE. * Added residuals and fitted methods for train and avNNet objects from caret package. * accuracy() can now figure out overlapping times for x and f. * rwf() now handles missing values. * Revised ses(), holt() and hw() so that they can optionally use traditional initialization. # forecast 3.25 (11 September 2012) * Fixed bug in simulate.Arima. * Improved handling of short seasonal time series in auto.arima(). * Added seasonal argument to auto.arima(). * Fixed bug in splinef() and added gcv method for estimating smoothing parameter. # forecast 3.24 (23 July 2012 * Fixed bug in auto.arima() introduced in v3.23 which meant a ARIMA(0,0,0) model was returned about half the time. # forecast 3.23 (18 July 2012) * Fixed bug in arfima() which meant the drange argument was being ignored. * Extended auto.arima() so it returns something sensible when the data are constant. # forecast 3.22 (07 June 2012) * Increased maximum forecast horizon for ets models from 2000 to unlimited. * Corrected bug in Arima(). Previously include.constant=FALSE was ignored. * Some corrections to bats and tbats. * Modified parallel implementation in auto.arima for Windows. # forecast 3.21 (26 April 2012) * Fixed bug in auto.arima() when lambda is non-zero and stepwise is FALSE. * Fixed bug in auto.arima() in selecting d when D>0. * Fixed bug in ets() when seasonal period is less than 1. * Turned off warnings in auto.arima() and ets() when seasonal period is less than 1. * Added plotting methods for bats and tbats objects. * Changed default forecast horizons for bats and tbats objects. * Modified bats and tbats so they now use seasonal.periods when ts and msts objects are being modelled. # forecast 3.20 (2 April 2012) * Fixed bugs in forecast.lm(). * Improved handling of newdata in forecast.lm() to provide more meaningful error messages. * Fixed bug in dm.test() that occurred when errors were very small. # forecast 3.19 (22 February 2012) * Improved plotting of forecast objects from lm models * Added MASE for lm forecasts using insample mean forecasts for scaling. * Modified definition of MASE for seasonal time series to use seasonal naive insample scaling. * Modified meanf() to allow it to be used with cross-sectional data. * Updated accuracy() to allow it to be used with cross-sectional data, lm forecasts and lm objects. # forecast 3.18 (17 February 2012) * Added method for plotting non-time-series forecasts to plot.forecast(). * Removed partial arg matching. * Cleaned up some code, removing commented out sections, etc. * Added robust option to stlf(). * Added naive and rwdrift options to stlf() and forecast.stl(). * Improved handling of msts objects in BoxCox.lambda() * Fixed some minor bugs in tbats() and bats() * Improved speed of bats() and tbats(). # forecast 3.17 (2 February 2012) * Improved forecast.lm() so it is more likely to find the original data from an lm object. * Parallel processing now available in auto.arima() when stepwise=FALSE * Default model selection in auto.arima() changed to AICc rather than AIC. This may affect model selection for very short time series. * max orders in auto.arima() now restricted to be less than 1/3 of length of data. # forecast 3.16 (24 December 2011) * Corrected problem with AIC computation in bats and tbats * Fixed handling of non-seasonal data in bats() * Changed dependency to >= R 2.14.0 in order to ensure parallel package available. # forecast 3.15 (22 December 2011) * New functions tbats() and forecast.tbats() for multiple seasonal time series modelling. * bats() and tbats() use parallel processing when possible. * Minor improvements to bats() and forecast.bats(). * decompose() removed as the function in the stats package has now been fixed. # forecast 3.14 (9 December 2011) * Improved documentation for forecast.ts() * Corrected bug in dshw() when applied to a non-ts object. * Added error message when dshw() applied to data containing zeros or negative values * Added checks when dshw() applied to time series with non-nested periods. * Added msts object class for multiple seasonal time series * Made taylor data set an msts object. * Added bats() function for multiple seasonal time series modelling * Added forecast.bats() function for forecasting BATS models * Byte compiling turned on * Depending on Rcpp and RcppArmadillo to speed some code up. # forecast 3.13 (19 November 2011) * Bug fix for forecast.StructTS() due to changes in the StructTS object. The default h was being set to 0. Thanks to Tarmo Leinonen for reporting this problem. * Bug fix for forecast.stl() where h longer than one seasonal period sometimes returned missing forecasts. Thanks to Kevin Burton for reporting this problem. * forecast.stl() no longer allows a seasonal ETS model to be specified. Thanks to Stefano Birmani for the suggestion. # forecast 3.12 (16 November 2011) * Added option to control ets model in stlf() and forecast.stl(). Thanks to Stefano Birmani for the suggestion. * Reordered arguments for forecast.lm() and stlf() to be consistent with other forecast functions. * Modified tslm() so that it is more likely to find the relevant data when it is not passed as an argument. * Fixed bug in forecast.ets which returned all zero forecasts for some models when seasonal period > 24. # forecast 3.11 (2 November 2011) * Fixed bug in dshw() when smallest period is odd # forecast 3.10 (27 October 2011) * Added lambda argument to naive() and snaive(). * Fixed bug in ets() with high frequency data. * Fixed bug in rwf() where incorrect fitted values and residuals were sometimes returned. * Modified number of lags displayed by default in tsdisplay(). # forecast 3.09 (18 October 2011) * Fixed bug causing occasional problems in simulate.Arima() when MA order greater than 2 and future=TRUE. # forecast 3.08 (15 October 2011) * Bug fix in forecast.stl() which occurred when forecast horizon is less than seasonal period. * Added lambda argument to forecast.stl(). # forecast 3.07 (11 October 2011) * Bug fix in ets() concerning non-seasonal models and high-frequency data. It sometimes returned all forecasts equal to zero. # forecast 3.06 (4 October 2011) * Switched to useDynLib in preparation for Rv2.14.0. # forecast 3.05 (3 October 2011) * Fixed bug in ets() which prevent non-seasonal models being fitted to high frequency data. # forecast 3.04 (23 September 2011) * Fixed bug when drift and xreg used together in auto.arima() or Arima(). # forecast 3.03 (2 September 2011) * Bug fix in dshw() which was using slightly incorrect seasonal estimates for the forecasts * Bug fix in forecast.StructTS due to change in structure of StructTS object. * Better error capture in tslm when seasonal dummies are specified for non-seasonal data. * Re-formatted some help files to prevent viewing problems with the pdf manual. # forecast 3.02 (25 August 2011) * Bug fixes # forecast 3.00 (24 August 2011) * Added Box-Cox parameter as argument to Arima(), ets(), arfima(), stlf(), rwf(), meanf(), splinef() * Added Box-Cox parameter as argument to forecast.Arima(), forecast.ets(), forecast.fracdiff(), forecast.ar(), forecast.StructTS, forecast.HoltWinters(). * Removed lambda argument from plot.forecast() and accuracy(). * Added BoxCox.lambda() function to allow automatic choice for Box-Cox parameter using Guerrero's method or the profile log likelihood method. * Modified BoxCox and InvBoxCox to return missing values when lambda < 0 and data < 0. * Add nsdiffs() function for selecting the number of seasonal differences. * Modified selection of seasonal differencing in auto.arima(). * Better error message if seasonal factor used in tslm() with non-seasonal data. * Added PI argument to forecast.ets() to allow only point forecasts to be computed. * Added include.constant argument to Arima(). * Added subset.ts() function. * Upgraded seasonplot() function to allow colors and to fix some bugs. * Fixed fitted values returned by forecast.HoltWinters() * Modified simulate.Arima() because of undocumented changes in filter() function in stats package. * Changed residuals returned by splinef() to be ordinary residuals. The standardized residuals are now returned as standardizedresiduals. * Added dshw() function for double-seasonal Holt-Winters method based on Taylor (2003). * Fixed further bugs in the decompose() function that caused the results to be incorrect with odd frequencies. # forecast 2.19 (4 June 2011) * Added xreg information to the object returned by auto.arima(). * Added Acf(), Pacf(), ma() and CV() functions. * Fixed bugs in re-fitting ARIMA models to new data. # forecast 2.18 (19 May 2011) * Fixed bug in seasonplot() where year labels were sometimes incorrect. # forecast 2.17 (6 April 2011) * Modified simulate.Arima() to handle seasonal ARIMA models. * Modified ets() to handle missing values. The largest continuous section of data is now modelled. * Improved plot.forecast() to handle missing values at the end of the observed series. * Added replacement decompose() to avoid truncation of seasonal term and seasonally adjusted series. * Fixed bug in seasadj() to handle multiplicative decomposition, and to avoid missing values at ends. # forecast 2.16 (6 March 2011) * Changed the way missing values are handled in tslm() # forecast 2.15 (5 March 2011) * Added fourier(), fourierf(), tslm() * Improved forecast.lm() to allow trend and seasonal terms. # forecast 2.14 (4 March 2011) * Added forecast.lm() * Modified accuracy() and print.forecast() to allow non time series forecasts. * Fixed visibility of stlf(). # forecast 2.13 (16 February 2011) * Fixed bug in accuracy() when only 1 forecast is specified. * Added forecast.stl() and stlf() functions * Modified forecast.ts() to use stlf() if frequency > 12. * Made BoxCox() and InvBoxCox() robust to negative values * Fixed bug in simulate.Arima() when future=TRUE. There was a bias in the sample paths. # forecast 2.12 (19 January 2011) * Added naive() and snaive() functions. * Improved handling of seasonal data with frequency < 1. * Added lambda argument to accuracy(). # forecast 2.11 (5 November 2010) * If MLE in arfima() fails (usually because the series is non-stationary), the LS estimate is now returned. # forecast 2.10 (4 November 2010) * Fixed bug in arfima() where the MA parameters were of the wrong sign if estim="mle" chosen. * arfima() now allowed to have a sequence of missing values at the start of the series and end of the series (but not within the series) # forecast 2.09 (15 October 2010) * Fixed bug in forecast.fracdiff() which caused an error when h=1. * Added shadebars to plot.forecast(). * Fixed bug in plot.forecast() to allow plotting when h=1. # forecast 2.08 (22 September 2010) * Added pp test option for auto.arima() and ndiffs(). * Fixed bug in simulate.ets() which was causing problems when forecasting from some ETS models including ETS(M,M,N). # forecast 2.07 (9 September 2010) * Fixed bug in simulate.Arima(). Previous sample paths when d=2 and future=TRUE were incorrect. * Changed way color is implemented in plot.forecast() to avoid colour changes when the graphics window is refreshed. # forecast 2.06 (29 July 2010) * Added MLE option for arfima(). * Added simulate.Arima(), simulate.ar() and simulate.fracdiff() # forecast 2.05 (11 May 2010) * Added arfima() and a forecast method to handle ARFIMA models from arfima() and fracdiff(). * Added residuals and fitted methods for fracdiff objects. # forecast 2.04 (16 April 2010) * Fixed bug in auto.arima() that occurred rarely. # forecast 2.03 (23 December 2009) * Added an option to auto.arima() to allow drift terms to be excluded from the models considered. # forecast 2.02 (23 December 2009) * Fixed bug in auto.arima() that occurred when there was an xreg but no drift, approximation=TRUE and stepwise=FALSE. # forecast 2.01 (14 September 2009) * Fixed bug in time index of croston() output. * Added further explanation about models to croston() help file. # forecast 2.00 (7 September 2009) * Package removed from forecasting bundle # forecast 1.26 (29 August 2009) * Added as.data.frame.forecast(). This allows write.table() to work for forecast objects. # forecast 1.25 (22 July 2009) * Added argument to auto.arima() and ndiffs() to allow the ADF test to be used instead of the KPSS test in selecting the number of differences. * Added argument to plot.forecast() to allow different colors and line types when plotting prediction intervals. * Modified forecast.ts() to give sensible results with a time series containing fewer than four observations. # forecast 1.24 (9 April 2009) * Fixed bug in dm.test() to avoid errors when there are missing values in the residuals. * More informative error messages when auto.arima() fails to find a suitable model. # forecast 1.23 (22 February 2009) * Fixed bugs that meant xreg terms in auto.arima() sometimes caused errors when stepwise=FALSE. # forecast 1.22 (30 January 2009) * Fixed bug that meant regressor variables could not be used with seasonal time series in auto.arima(). # forecast 1.21 (16 December 2008) * Fixed bugs introduced in v1.20. # forecast 1.20 (14 December 2008) * Updated auto.arima() to allow regression variables. * Fixed a bug in print.Arima() which caused problems when the data were inside a data.frame. * In forecast.Arima(), argument h is now set to the length of the xreg argument if it is not null. # forecast 1.19 (7 November 2008) * Updated Arima() to allow regression variables when refitting an existing model to new data. # forecast 1.18 (6 November 2008) * Bug fix in ets(): models with frequency less than 1 would cause R to hang. * Bug fix in ets(): models with frequency greater than 12 would not fit due to parameters being out of range. * Default lower and upper bounds on parameters , and in ets() changed to 0.0001 and 0.9999 (instead of 0.01 and 0.99). # forecast 1.17 (10 October 2008) * Calculation of BIC did not account for reduction in length of series due to differencing. Now fixed in auto.arima() and in print.Arima(). * tsdiag() now works with ets objects. # forecast 1.16 (29 September 2008) * Another bug fix in auto.arima(). Occasionally the root checking would cause an error. The condition is now trapped. # forecast 1.15 (16 September 2008) * Bug fix in auto.arima(). The series wasn't always being stored as part of the return object when stepwise=FALSE. # forecast 1.14 (1 August 2008) * The time series stored in M3 in the Mcomp package did not contain all the components listed in the help file. This problem has now been fixed. # forecast 1.13 (16 June 2008) * Bug in plot.ets() fixed so that plots of non-seasonal models for seasonal data now work. * Warning added to ets() if the time series contains very large numbers (which can cause numerical problems). Anything up to 1,000,000 should be ok, but any larger and it is best to scale the series first. * Fixed problem in forecast.HoltWinters() where the lower and upper limits were interchanged. # forecast 1.12 (22 April 2008) * Objects are now coerced to class ts in ets(). This allows it to work with zoo objects. * A new function dm.test() has been added. This implements the Diebold-Mariano test for predictive accuracy. * Yet more bug-fixes for auto.arima(). # forecast 1.11 (8 February 2008) * Modifications to auto.arima() in the case where ML estimation does not work for the chosen model. Previously this would return no model. Now it returns the model estimated using CSS. * AIC values reported in auto.arima() when trace=TRUE and approximation=TRUE are now comparable to the final AIC values. * Addition of the expsmooth package. # forecast 1.10 (21 January 2008) * Fixed bug in seasadj() so it allows multiple seasonality * Fixed another bug in print.Arima() * Bug fixes in auto.arima(). It was sometimes returning a non-optimal model, and occasionally no model at all. Also, additional stationarity and invertibility testing is now done. # forecast 1.09 (11 December 2007) * A new argument 'restrict' has been added to ets() with default TRUE. If set to FALSE, then the unstable ETS models are also allowed. * A bug in the print.Arima() function was fixed. # forecast 1.08 (21 November 2007) * AICc and BIC corrected. Previously I had not taken account of the sigma^2 parameter when computing the number of parameters. * arima() function changed to Arima() to avoid the clash with the arima() function in the stats package. * auto.arima now uses an approximation to the likelihood when selecting a model if the series is more than 100 observations or the seasonal period is greater than 12. This behaviour can be over-ridden via the approximation argument. * A new function plot.ets() provides a decomposition plot of an ETS model. * predict() is now an alias for forecast() wherever there is not an existing predict() method. * The argument conf has been changed to level in all forecasting methods to be consistent with other R functions. * The functions gof() and forecasterrors() have been replaced by accuracy() which handles in-sample and out-of-sample forecast accuracy. * The initialization method used for a non-seasonal ETS model applied to seasonal data was changed slightly. * The following methods for ets objects were added: summary, coef and logLik. * The following methods for Arima objects were added: summary. # forecast 1.07 (25 July 2007) * Bug fix in summary of in-sample errors. For ets models with multiplicative errors, the reported in-sample values of MSE, MAPE, MASE, etc., in summary() and gof() were incorrect. * ARIMA models with frequency greater than 49 now allowed. But there is no unit-root testing if the frequency is 50 or more, so be careful! * Improvements in documentation. # forecast 1.06 (15 June 2007) * Bug fix in auto.arima(). It would not always respect the stated values of max.p, max.q, max.P and max.Q. * The tseries package is now installed automatically along with the forecasting bundle, whereas previously it was only suggested. # forecast 1.05 (28 May 2007) * Introduced auto.arima() to provide a stepwise approach to ARIMA modelling. This is much faster than the old best.arima(). * The old grid-search method used by best.arima() is still available by using stepwise=FALSE when calling auto.arima(). * Automated choice of seasonal differences introduced in auto.arima(). * Some small changes to the starting values of ets() models. * Fixed a bug in applying ets() to new data using a previously fitted model. # forecast 1.04 (30 January 2007) * Added include.drift to arima() * Fixed bug in seasonal forecasting with ets() # forecast 1.03 (20 October 2006) * Fixed some DOS line feed problems that were bothering unix users. # forecast 1.02 (12 October 2006) * Added AICc option to ets() and best.arima(). * Corrected bug in calculation of fitted values in ets models with multiplicative errors. # forecast 1.01 (25 September 2006) * Modified ndiffs() so that the maximum number of differences allowed is 2. # forecast 1.0 (31 August 2006) * Added MASE to gof(). * croston() now returns fitted values and residuals. * arima() no longer allows linear trend + ARMA errors by default. Also, drift in non-stationary models can be turned off. * This version is the first to be uploaded to CRAN. # forecast 0.99992 (8 August 2006) * Corrections to help files. No changes to functionality. # forecast 0.99991 (2 August 2006) * More bug fixes. ets now converges to a good model more often. # forecast 0.9999 (1 August 2006) * Mostly bug fixes. * A few data sets have been moved from fma to forecast as they are not used in my book. * ets is now considerably slower but gives better results. Full optimization is now the only option (which is what slows it down). I had too many problems with poor models when partial optimization was used. I'll work on speeding it up sometime, but this is not a high priority. It is fast enough for most use. If you really need to forecast 1000 series, run it overnight. * In ets, I've experimented with new starting conditions for optimization and it seems to be fairly robust now. * Multiplicative error models can no longer be applied to series containing zeros or negative values. However, the forecasts from these models are not constrained to be positive. # forecast 0.999 (27 July 2006) * The package has been turned into three packages forming a bundle. The functions and a few datasets are still in the forecast package. The data from Makridakis, Wheelwright and Hyndman (1998) is now in the fma package. The M-competition data is now in the Mcomp package. Both fma and Mcomp automatically load forecast. * This is the first version available on all operating systems (not just Windows). * pegels has been replaced by ets. ets only fits the model; it doesn't produce forecasts. To get forecasts, apply the forecast function to the ets object. * ets has been completely rewritten which makes it slower, but much easier to maintain. Different boundary conditions are used and a different optimizer is used, so don't expect the results to be identical to what was done by the old pegels function. To get something like the results from the old pegels function, use forecast(ets()). * simulate.ets() added to simulate from an ets model. * Changed name of cars to auto to avoid clash with the cars data in the datasets package. * arima2 functionality is now handled by arima() and pegels2 functionality is now handled by ets. * best.arima now allows the option of BIC to be used for model selection. * Croston's method added in function croston(). * ts.display renamed as tsdisplay * mean.f changed to meanf, theta.f changed to thetaf, rw.f changed to rwf, seasonaldummy.f to seasonaldummyf, sindex.f to sindexf, and spline.f to splinef. These changes are to avoid potential problems if anyone introduces an 'f' class. # forecast 0.994 (4 October 2004) * Fixed bug in arima which caused predict() to sometimes fail when there was no xreg term. * More bug fixes in handling regression terms in arima models. * New print.Arima function for more informative output. # forecast 0.993 (20 July 2004) * Added forecast function for structural time series models obtained using StructTS(). * Changed default parameter space for pegels() to force admissibility. * Added option to pegels() to allow restriction to models with finite forecast variance. This restriction is imposed by default. * Fixed bug in arima.errors(). Changes made to arima() meant arima.errors() was often returning an error message. * Added a namespace to the package making fewer functions visible to the user. # forecast 0.99 (21 May 2004) * Added automatic selection of order of differencing for best.arima. * Added possibility of linear trend in arima models. * In pegels(), option added to allow parameters of an exponential smoothing model to be in the 'admissible' (or invertible) region rather than within the usual (0,1) region. * Fixed some bugs in pegels. * Included all M1 and M3 data and some functions to subset and plot them. * Note: This package will only work in R1.9 or later. # forecast 0.98 (23 August 2003) * Added facilities in pegels. o It is now possible to specify particular values of the smoothing parameters rather than always use the optimized values. If none are specified, the optimal values are still estimated as before. o It is also possible to specify upper and lower bounds for each parameter separately. * New function: theta.f. This implements the Theta method which did very well in the M3 competition. * A few minor problems with pegels fixed and a bug in forecast.plot that meant it didn't work when the series contained missing values. # forecast 0.972 (11 July 2003) * Small bug fix: pegels did not return correct model when model was partially specified. # forecast 0.971 (10 July 2003) * Minor fixes to make sure the package will work with R v1.6.x. No changes to functionality. # forecast 0.97 (9 July 2003) * Fully automatic forecasting based on the state space approach to exponential smoothing has now been added. For technical details, see Hyndman, Koehler, Snyder and Grose (2002). * Local linear forecasting using cubic smoothing splines added. For technical details, see Hyndman, King, Pitrun and Billah (2002). # forecast 0.96 (15 May 2003) * Many functions rewritten to make use of methods and classes. Consequently several functions have had their names changed and many arguments have been altered. Please see the help files for details. * Added functions forecast.Arima and forecat.ar * Added functions gof and seasadj * Fixed bug in plot.forecast. The starting date for the plot was sometimes incorrect. * Added residuals components to rw.f and mean.f. * Made several changes to ensure compatibility with Rv1.7.0. * Removed a work-around to fix a bug in monthplot command present in R v<=1.6.2. * Fixed the motel data set (columns were swapped) forecast/MD50000644000176200001440000002527014166733352012410 0ustar liggesusers210f09ad877bd59123417c4109901587 *DESCRIPTION 9d7362906f821de21387a28bddb7416e *NAMESPACE 8eb0d9135d517c16861e1603a511619d *NEWS.md c4b02c0c13b37a2035dec0d7e085ba46 *R/DM2.R d01ea9c5e63e6f54895cdfbab6ba7b74 *R/HoltWintersNew.R 1b043feeac43033179eb19344e1acaad *R/acf.R 10aeb00db687cbc0fe804367a30954ea *R/adjustSeasonalSeeds.R 7f987471d031e7cb48d2b2c2293bb8f4 *R/arfima.R d9484165e1ed4b0bbc73e40b394bcca6 *R/arima.R 16bc8fc01b7ffa0a6c6ed0ddbd438a56 *R/armaroots.R 9140e2371589f2408af1baec6104106c *R/attach.R b920eb56936860753d8573c47eb50327 *R/baggedModel.R 74179a4fa05d1871c5e7a9897f744995 *R/bats.R 2e69187aa9717aadbca3a1e653b6ddcc *R/bootstrap.R c628edc22a59bb57412f8ef9996ccdb1 *R/calendar.R 2c2afd2e69e126efdc41a43a633d18b0 *R/checkAdmissibility.R 722e9c8d4ba5e226001ce87a10b6fae2 *R/checkresiduals.R 61a0132819c7b118309dc72a17a4e966 *R/clean.R 53bd366467f8721c4b2082c60a5c95ea *R/components.R 55fe30a71cd7f5ae40f161978e6fdd97 *R/data.R 543155fedb59977083cef4a8935fd1a7 *R/dshw.r 2ba72e718165b24842949ddfb734c3dc *R/errors.R c7248b5b06ee3a523f2e8b9da1201d33 *R/ets.R 14fe3a384d717a0ddce04c691b86099b *R/etsforecast.R a3344afd3f0c6ff8df633e4ea69b1e3d *R/fitBATS.R 2cef35ebaa5f11e496874d030af7b01e *R/fitTBATS.R 871167c17031c2fa1e057d80787c1bbb *R/forecast-package.R 2d2ef4823deac348b313cf52c6b55047 *R/forecast.R 87d693356c97be5cc411f98cb1adfe93 *R/forecast.varest.R 2303402c1303f56882b7b2bc1053fadc *R/forecast2.R 749099b9b689e561057f1c74da2e324d *R/forecastBATS.R afadee60b503269acb89aee9e3291cfa *R/forecastTBATS.R 2e115be90824a757b4284524c260c0de *R/getResponse.R daaa71bf506b02e7c85b7b1c039b74f9 *R/ggplot.R 743957102a0e2d3cf76fa39ae2edd50c *R/graph.R 0853d362c2019600993b6301345e7308 *R/guerrero.R cf7c447dff9dc5d83c4a90bec7591edf *R/lm.R 8d6a434df27ab36d14aa837b9c7d28c5 *R/makeMatrices.R 0adc77cdf07adaa24b684119221a6491 *R/makeParamVector.R 555d0fb4e20b71e6d644b68b9766996c *R/mforecast.R f3f60222a5bda8991d07ee35db53f237 *R/modelAR.R d2dd178f60e7510e05830c3ad5e66dfb *R/mstl.R 3a91d0c24a2601ca3a18fb60acccd08f *R/msts.R 5534a6ba8d842c78e2e9020a50da8d44 *R/naive.R 4c9b2e4c887518a767129ce05826c456 *R/newarima2.R 832c7b3f640d86a3a4110ff7ffcaafbc *R/nnetar.R 4ed698eabfe343e66ba3dc075ddc486e *R/residuals.R c5bdfab63d2ac485461593f224bc3902 *R/seasadj.R 6e59c019d0589002ae9f1d224a488f42 *R/season.R 5a7ce4e74d4f4f12aa95639df70ed393 *R/simulate.R 457d5365f3024640a25e26cc2ee68ba2 *R/spline.R ba07e532a8b3adbd4a6c2fb7a195cc07 *R/subset.R c99af22089b0907bc95aa3563167c283 *R/tbats.R 1db4998c13a6a1da4e228ca7a612111a *R/theta.R 1e1ba134bfb0b87939d5582fb53867d3 *R/tscv.R 25430683535a3072c340c44c5d996c6a *R/unitRoot.R f62e29216b535aca80540bd15d1c1a33 *R/whichmodels.R 7238604557eca232bdfce0502ee4a2eb *R/wrangle.R db34064e2f2174fe572861ee90f4c960 *README.md d81716d5eae017e11da91d3a8fc7d14b *build/vignette.rds d83263b393c17189250711ff49f730b6 *data/gas.rda de9a9e1c277aa90de4c8ee718d4ef93f *data/gold.rda f0c82cb5de038d46b4489ef90769d58b *data/taylor.rda 38679e434ddf3856988e34aabbe662fc *data/wineind.rda 20dae67335336c52c4228859c36a22c3 *data/woolyrnq.rda 9b45fac044642faf3111c502328b4643 *inst/CITATION 74af0a2135e16058d635d5f3ef844ade *inst/doc/JSS2008.R 88707b6f325ca31a04de790665873ab9 *inst/doc/JSS2008.Rmd adf622749eee4a2eb6311b0e4c7ca70f *inst/doc/JSS2008.pdf 83f91e71324a77f2e65abbb0a39dac82 *man/Acf.Rd 1233246bc7f32a0e669fe3670261dc78 *man/Arima.Rd ca00d1c4cb84c84ff99170ab31076541 *man/BoxCox.Rd 0390825433c5207e11a3aa2c9689bbc1 *man/BoxCox.lambda.Rd 4a846add965855d0144767d413211791 *man/CV.Rd 6ecc8c117a35ae3e4a8e20eb546f1856 *man/CVar.Rd bb5fc7e7e23cf80feba2271f436a551f *man/accuracy.Rd 5442830e86a1a0c1013e8d41157871db *man/arfima.Rd 9c2e1f1ef44dcd07a8db27af46c6273f *man/arima.errors.Rd 99d19c63d529f30309f5fa5f22b20d59 *man/arimaorder.Rd b77919994f730e51935edf1c014caa7d *man/auto.arima.Rd a5a62e15bc34c63075b2ee3d2110c7bf *man/autolayer.Rd f33ea9daffe72a6de6dc4e444c8bf564 *man/autoplot.acf.Rd 292043ae77bb3767640d3493a56424b8 *man/autoplot.seas.Rd d1f76dff11374046f0bdbb99749a765c *man/autoplot.ts.Rd 790787a95a2779ed85b870d9d774cc8b *man/baggedModel.Rd 0e974f0da71ec18800366d77440e1356 *man/bats.Rd 63d7c65487c5575c0768a179bdcce194 *man/bizdays.Rd 209c496a43538dfa3eb929a9a23933c3 *man/bld.mbb.bootstrap.Rd d632a639e3b295215c0d28d8171b35a3 *man/checkresiduals.Rd 3f93237aef8ee4696ddb0bca3f6e9a02 *man/croston.Rd 3e6ce6069c7dbf3aab62f93314edaffe *man/dm.test.Rd fad85528e3c36a57426495d6c0be5ba8 *man/dshw.Rd 4c3a40f6807c40d497529da77186946e *man/easter.Rd ff6173e38902afd43e03a4f55bb3a1c5 *man/ets.Rd d2ccaa153869329ea005b8816b2e029f *man/figures/logo.png 42eae7a3f0c79c22c4ab83ed13aee057 *man/findfrequency.Rd 871cc6cc555d50e6d4c82e3eef954207 *man/fitted.Arima.Rd 2908bc5767e76ec58c1541f63e9e841e *man/forecast-package.Rd 0f4856ac677c1f398d034a012f6a5b6a *man/forecast.Arima.Rd f77aeca83a063a307f911797e020d6df *man/forecast.HoltWinters.Rd 822390f161e7ceb6946b6db775f39ae3 *man/forecast.Rd dda85c94530c8b7978d0b9a49968d4c5 *man/forecast.StructTS.Rd a5ede17e227dab77b77e801aff71815f *man/forecast.baggedModel.Rd 3bd2f567500d1898d3d49dae8b120700 *man/forecast.bats.Rd 88a24a1870163ac29156ce1cc8056119 *man/forecast.ets.Rd bb17f9d40014a8987f4ed9f50a4c6f22 *man/forecast.lm.Rd 76069224b3c79f9d51454611ab8ffe2e *man/forecast.mlm.Rd fb7fdcc24bb63ece5fb4e5484f78ae23 *man/forecast.modelAR.Rd 6bbb48242a147f43078a9c543c927e61 *man/forecast.mts.Rd 7b811361f9d9e69f6ef03b68a98f377a *man/forecast.nnetar.Rd e3f2afcecffefa3e11da29c3c7dfc082 *man/forecast.stl.Rd 8536b20ad167f572a8f41ff7157276a4 *man/fourier.Rd 4c310ce65a57cac565c003ba8d1c706c *man/gas.Rd 450d35ff457397166d363d92b3b2b969 *man/geom_forecast.Rd 04278fb50a27f247325113f626cd9214 *man/getResponse.Rd c6dbd99bebbefa49cb1cb4a012e66963 *man/gghistogram.Rd fe56843c030a399f300275aae6eee230 *man/gglagplot.Rd a655c9f394843bc1aec3af6deb67f9f6 *man/ggmonthplot.Rd 2f30c1139541d530824d307fd86a93e7 *man/gold.Rd 69c39a9661c43d0631c6a1ef4c3ffae3 *man/is.constant.Rd a1f193faca93a50f329bd52eafbe6d6e *man/is.ets.Rd 0fdb9a4ef0c3274a7a6a1135b5a2f591 *man/is.forecast.Rd c4419f85bca986614f9950fe954b6c86 *man/ma.Rd 4f946a31c5da15d9c1a2697a8144d2d4 *man/meanf.Rd fbb350863c6aecb2c95e3512a54801bb *man/modelAR.Rd ee39cdf50cad2d1aae445da601dc4ac9 *man/monthdays.Rd 35e54c362d1568b509e7606544d823f9 *man/mstl.Rd c1e2f69e0e056997a5dcf747fba66c1b *man/msts.Rd db892ad70cd382574ab7ab004c800e4d *man/na.interp.Rd b84cf459aa295fc8b8203b6ecc85c507 *man/naive.Rd f457cb0539e4dc98f37f10786af164ba *man/ndiffs.Rd 004989dceb69160146d840b6cb7c7f6a *man/nnetar.Rd d8e464947377cc21729cc855499ab2c3 *man/nsdiffs.Rd e5efcb1e39e1e8816050e0a04f2de0dd *man/ocsb.test.Rd ca040caf6e8cb69a709f093640e05b19 *man/plot.Arima.Rd 05d3c713844dabe0e0ab1e7e877acc94 *man/plot.bats.Rd dda7462647917f639d1ed48e010c0874 *man/plot.ets.Rd 523b0e23da1fef3337e44c0095869c8c *man/plot.forecast.Rd bd4278fe0b6984e2f88c3e3e97f1fa48 *man/plot.mforecast.Rd 528d1221d6bca91b2b5110ff97de1fe8 *man/reexports.Rd 4348571330fa10db423a393eac8e8a72 *man/residuals.forecast.Rd 43698c8686becf342e68c02aa797cbc0 *man/seasadj.Rd c8a8a9bf21ea57bf9e1304698905cfd3 *man/seasonal.Rd 86a05976843a74991be96ac536fcdfee *man/seasonaldummy.Rd 38bc4dcc5611b22fe90b80617693aafd *man/seasonplot.Rd e31cfb0650c537e97c8e8c32e170461a *man/ses.Rd 174c63c9e5ee6458233e63620caf45f9 *man/simulate.ets.Rd 59b2af1fb81a9088a5aa0e8e66507ae3 *man/sindexf.Rd 54be116966779434d622dddcd9eabb1d *man/splinef.Rd 2901ce6660f7b06d7ecaa226276052ae *man/subset.ts.Rd 580471f7024edd6c15f796caefa804a7 *man/taylor.Rd e2723ca1bd6e6df55bc698fd572de580 *man/tbats.Rd 0ae2d2dd61045aefec1202213a05e2f7 *man/tbats.components.Rd 04a2aa0f9f3f2e9314562247f444c302 *man/thetaf.Rd 137757f5d574ca7845b9a651e348d316 *man/tsCV.Rd a068b467a8b0b54c43771c998de15d0f *man/tsclean.Rd fe3f298d209fbf7769faac02f39733f6 *man/tsdisplay.Rd 3822a7637be4232e2d420c72a45871be *man/tslm.Rd 888eba3e26715d9861473ac27e9cbd1f *man/tsoutliers.Rd df48ac7208918eeecd7681e8578ba872 *man/wineind.Rd d456755f11d47e081631c0753c67ddde *man/woolyrnq.Rd 3ab65c83d1ffa85e94bcc15bd4f20743 *src/Makevars 3ab65c83d1ffa85e94bcc15bd4f20743 *src/Makevars.win 38495f349d41f61303e792b76ff82198 *src/calcBATS.cpp ae018812987303381d480fe7a8bea09b *src/calcBATS.h 1afebfbad00ff84f36f8406bb68ec34f *src/calcTBATS.cpp d080b302a58adf776023dbf7f92b59f0 *src/etsTargetFunction.cpp 4338a6ac27c404c8cd685ae09fd7af06 *src/etsTargetFunction.h 6f948bdcaaebface936ec840f106a695 *src/etsTargetFunctionWrapper.cpp 1993b54b4241d9339a89e94deee49959 *src/etscalc.c 7dedbd0deb64bd81c778de6b4e485a5f *src/etspolyroot.c cb354872396085b03468769a824eba6e *src/makeBATSMatrices.cpp 0d5b93beeb7afcc59db7467dcdfceac2 *src/makeTBATSMatrices.cpp 7c22b1b500cab25872924e218f1645f5 *src/registerDynamicSymbol.c f1daf6f870e4dff16534e36b4db45b59 *src/updateMatrices.cpp 71262d9e1413ed38590fd80d6b92562e *src/updateTBATSMatrices.cpp 22708a41a2f33a810a8353ff0a62a1eb *tests/testthat.R d47dd32c9cbde69416bbe9d0b7f9fc1c *tests/testthat/Rplots.pdf e303d5c3080d10f458814c162e3ee7b9 *tests/testthat/test-accuracy.R 57f73ca6be01e5553012d57b01c07950 *tests/testthat/test-acf.R 05607b4a3ee0b4bd6354a7c342bf748c *tests/testthat/test-arfima.R 9cbf40ab33a104b01f6db85bfbaa8fc4 *tests/testthat/test-arima.R 18a5ec34d18aa5b68dc4c1b6b3ed0fce *tests/testthat/test-armaroots.R 5f8dbfe8dd700906956627f36a79bb36 *tests/testthat/test-bats.R 1284efd9233ac8be5df2d558bb41a49c *tests/testthat/test-boxcox.R 667c5b486457ddd3aec4f7d045c332e3 *tests/testthat/test-calendar.R 41948ed5de8a4aa489b0f8f2927fa32d *tests/testthat/test-clean.R f23b0d7aed7bcd9e5aa1531e00b12cae *tests/testthat/test-dshw.R 65125baba6758ae9cf9e09fb313cfa15 *tests/testthat/test-ets.R 9e32c34549e766a7ff9bbc0757be58d4 *tests/testthat/test-forecast.R ecdfe828dc1b766566340f169d2ceb95 *tests/testthat/test-forecast2.R a2ea61c27b03c692731cb31f661db961 *tests/testthat/test-ggplot.R 5b6fc7de53cc8d151a977297b1d99534 *tests/testthat/test-graph.R 7700141b7fcdbd9373c74ce5bed5103b *tests/testthat/test-hfitted.R ca9e792dff0d7e58a67a50128db23c65 *tests/testthat/test-mforecast.R 45626068357a9ef678e369946ffb115c *tests/testthat/test-modelAR.R c089470c7a53ba2d59ef2cc8fcc6dd56 *tests/testthat/test-msts.R 70e2c638f239b24d6ef029d27f8ede8f *tests/testthat/test-newarima2.R de7a4a85284bfc2edec09ea075f1bf17 *tests/testthat/test-nnetar.R 97db657aec00e82e986b295c8be0090d *tests/testthat/test-refit.R 165c2eefc684be35818d4bf75b446312 *tests/testthat/test-season.R 45ba43aa2fff1fc3e51030377c816db1 *tests/testthat/test-spline.R 397b1c57d69a933f1fcf0738905ca72f *tests/testthat/test-subset.R d64e6944239df9c50e84539c1f4aec4d *tests/testthat/test-tbats.R 8741ad0287cb8bbbb0d73fbd7bb630e3 *tests/testthat/test-thetaf.R 5921114a1321ff2f418351dafae79c10 *tests/testthat/test-tslm.R 82829fb73e8190c29808ffef5e25d1de *tests/testthat/test-wrangle.R c84653dccc60c5c7460cf3a4878d501a *vignettes/JSS-paper.bib 88707b6f325ca31a04de790665873ab9 *vignettes/JSS2008.Rmd 16e6ff1f952a8b8b4f77aa0adf736559 *vignettes/jsslogo.jpg forecast/inst/0000755000176200001440000000000014166724666013057 5ustar liggesusersforecast/inst/doc/0000755000176200001440000000000014166724666013624 5ustar liggesusersforecast/inst/doc/JSS2008.pdf0000644000176200001440000063473514166724667015313 0ustar liggesusers%PDF-1.5 % 1 0 obj << /Type /ObjStm /Length 4962 /Filter /FlateDecode /N 89 /First 748 >> stream x\[w۶~?o;]{D>]NKc'[Y-6HroPi79{) 7bi<3̄Y`g[3)`I`Iɤ ʹHLZfE1ch wI4b20keJ0;T"B)42-hr359\=-Y@ ƒ(͂Bڰ`gڲ`sz/^`. hꫲ =F襰|R% tTJ3 x)Ȍ((eNLGJ YԬ4CsRtТff5k~6-%6=$$ j6:w$c`\@ Jz-0J Cցޣ8j!_O9@F=`t=)D -$+=zƐ W SQ9j} Z9:\ 'P Z,G͹F=-E= 59AA٪?,beyb \?Dqu~1VwU3:e?_g_b0S|4 > fߡ,jh(؃G?(~!?hQ{\mf9>?/|fgs`Tgx/6ZT40dլrz/V|tQl>(gatQP6 E9bVH.ͪa><9:`OF|1 pruG1\?WW)H /Aɓ|i0+u{H{l1*ھG/Z\ ^Ƒl?pB7 =N禾̡N{]I٦l'hV)unRmoͦ̿7 Ll 8V Jۚ떄h ? )BZ Ӈ@`ҔAc~f(hq1hIC:/CvQ]SR.ѥ.:G>{pl;_2LbvU45E"6_xs0S)?..ƃ M %XR#|S\(P0R-,e^l/"DlI +#y_yVR3J)+ȧY86t..x|;URuYQP9% =GM×popicJaSb 65EhC r;EIJD4#NucAXӼ#"NGԭvur˼c>$&|Z& Г>{|?O1ŸggCGhTc~_#8 _ëE8os8[ͨxs>0P >r6|X)LHUGy19GǫsWς #wrTLyTMyVb^ )a^*/%YHp|JsE_G 1O\ڪ뻣W?oV/OG НL%[&.Pl^z7x )* _hԒ>["׳"p'+|d/M'tqk[l#Sj#HW&MMCqw4w[Bh]07 &" br?t@#5槵َpxHjlczc7ȹX<{)Z?g?ݠ4З_w[Ė<ʮuSC)̪wd2E`qMo$ P[,ƫѹvjiT93JިnF驷=9=fpA7ؽ= |-09n(OOaFB0.j&\3'uwDպgCb 9 "QɞfIBd4)pr'a)dwxT7"JY-]XwU\%pdG viv"jprFf0Q~ɮ3Emu8 KtT"jqCMTc)Q.Mf$)& ){{ I6|IQk,؎`&dH# ht n6 !HlQ=ыoy.l[5Z:´c!-V(i֒]otJCv0i|Zyw+ʏ`tȰPP6"ק(&B5%8J0^v-닕~~nϷ%4^u JJc08ndC O3>)arRt;P%Pdݴq!!7>DTY`Y κweM҉ S%QܦCԮAIS]'F1!\$%*@=1 ҷD=hS7%8aӖ 3)oIq<ln芌DmBi&ah|cHT@6ty3&i&QԥO57>mZO>e+۳ٴX[m|vP,-u9I4t2}>3D,o\{om-k; =tiYɏصCxUw rz^?p+hLڒ.!,n{5k#e0Bgz_UC~<->D Q'dBfs˜L4z[2:*b<-:VaZ3h:rZ}u$ n_0*d2Jtiwa$L-Nm vy 3AY3$q bbVLi75q4*qc~ޘȩIt<<{~5jʁd.+'Yh-ĈA:M\NlmK(,oY Yi3|r)x`ݧ`u7FRȅo 8ƐmA'k.Oe+OԆbi^Gb`!oNEmm?0xHaendstream endobj 91 0 obj << /Subtype /XML /Type /Metadata /Length 1789 >> stream GPL Ghostscript 9.50 ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, R 2022-01-10T14:36:22+11:00 2022-01-10T14:36:22+11:00 LaTeX with hyperref Automatic Time Series Forecasting: the forecast Package for RRob J Hyndman, Yeasmin KhandakarJournal of Statistical Software endstream endobj 92 0 obj << /Type /ObjStm /Length 3689 /Filter /FlateDecode /N 89 /First 820 >> stream x[YoH~_я xL'YA[,y%9߯DR2Xb lutWUfETt,Zf=y-b`R #:&`dZb e@?% HǔI"!:)NdCa*DАxOJŴP#5FS0=^(-1]q̨@W<3gRf#:DG f %U"bVx4eu&l `1 5t]:2qVD9Ig {P 4JYC$IGYE-x A#kCq̕9*F;I7%>M("4tyЧ?i48oEC-AYp:-I;LZWcqGYUyєG[]y,驒*钎.{=Mht(he/hǒ8ʡx`4@4_0ٓ/_e7u (_vvxeo/K4^Dlh󠴆Θ?5 &- P\kRgY2rw©[NKMS+5|nk$gֺ4Hx~@a>xCr B]Q`69e_T tv*i1'Moy-/^~MReC^Ku1YA_\76;A74VuмI7\t\mYQяFvC=78ya>:'eњS,hOO|hMŢh]׋lt1-.+<>;Yv5^1`Up{;/Lry#k7)@ϗ2+]Z߾}gn{9h{5Z۩ӠЃ9~I`9̦DU=#02Li٠XFM:(˒&-Yd֬ɬS$SקɾV -//W5;6n6$))P4mKkKYiڒі+i #h-;~ Q2rpCZ}W +uS^k] c=׻#=y\o/ I?R2A`v6@s\) g~z<~  lEO{Ĥq *LFx)nLߡtcRiJ;m\Gs` n+I ?=Vaђ4@*ϽIQ)8:FJ1KKK<^1yíu5luڲUE%\t 3e1>m"ف!dexG~Sx)_Msm+òLۧq!)r;NS1cZTA]洃uqZ5=^m@((i9'OL֮nj}kΠu ɇ:67<{t ?FShJ7tiMzC7)n~ϹXHi~F[]d8[E:iklC{vw,f**/I <=xzWw|hNs> mt nx QEʂ4խSk6UvOn2q>*_=Ln}pkl$nyn9j ǴWCłkX^{qz S䃓6HnoG*r-ۖF ^7kSmEQhv&68==ie-uҵ岝YC7HGFSq',j k&ByBSSSIUrfo'oX+Ńl{ =q¥Tw9xr}Y5qՇBp:UXۤ;lM ˷F{39m=oFT XUGipٗ|rkShx"\îkHF#~DFh ӭ # L$I*vD. `8ˣ L_Ci;-E3ܥ-SܠR*m R`jDLIt';7~HSng{':i;LXJ]%h, +ˊxFL7վX߯e?K{_R~li0aD!vwvt*yR!HH9wm~,!-Kh[?Tsdܥ7WuGMZJƕZq@W\u4mņ"JQ6@~^9զM9a(R*tͺK£j{Q44;:t`QO0-{e(fgY#g*EfE¨Y)"[\Nl]gl-'H$\-SAz>49%=4qᣓE3}E[sFWsth_'s۶:mz %⺝4R+hBD^f{A&;:ey>D˟6Oj\)ss>///9͠hrV^U<|2@hTQ历 bi{NXwRj @ L$dDHC=NIb?s*l_pXtA>u:yu!<[+gjUNY/O_7lW2AhH;Ws ]FQnn YQj5[fXFݬSΝ uw4 SPendstream endobj 182 0 obj << /Type /ObjStm /Length 2886 /Filter /FlateDecode /N 89 /First 822 >> stream x[[o~c"<$I6n-:#T Y.P #/߹HD(r>+hQYS>&ѣMSR.X694AnXu&*rQz%EI(DVEmRĨ)o )o9+OV14f@7)*,dd3r֨`YT")@@^x@ Gܰ I(Q+).TڬSHJf€) )΄ )$+S8ٱD#be@%bdU2"RɁ'/0M "b"a@*%!Yʄ1*B1r PCxD &Y#8/A=QCYzB?L5,hIBL,ޢ1X+_\P%9, n9., a^B01/chéf1Q0$J:DFK?b\ߨ-7(5k.W֫W*Oy\ ty6ywAC*[g: |z]]Lo }uwzZMip )OgŻc7g?5Gz*_N-ݲ6cztZ3@kn/z,Uܾ^CV:mV:mUy*OWJFn?yJtJtR*T7|7QxTu<_u<_u<_~d7ۛ? +O!'=A2̧Ǔ||{z,v>Yd>xOѧ9f_zͻ꽸.T0f媥fZԼ˫>af,.1j>/7'Z!.7Vn.(3a \ao c IVpg{HLؑDޕPq`EOO6pڪu/6=P, Hv0x12uok,Fج;V=g|C_|؝So2mںxzDP*WĻ- e9m~i56梹XΗ u5i>7f̛f\7b7w+Y5o* =QO-zkK=Րjs<]U\>X[_^Pʤ!Xl]X|zQ[  y]ڨ#G1INAj}h{A̚3v@Q(xMV4;":-Q@u@Ղ 4!\:ׄ '[lȮ쳀~" 34+9S"i tpd +;15aRg4` |%(FA~-NHbw,LI؁"XscS`踜 U]$$/ cN0"rDσɅyʃT$s{ Y2ϔJjТ;P&J8uxPCќuBluD X|!GU#:WvrIG?s&k @ڣ@S޾=)PQ C8ߎCdylFyendstream endobj 272 0 obj << /Type /ObjStm /Length 3180 /Filter /FlateDecode /N 89 /First 808 >> stream xZks6it:Ɲ8mv|P%VW="~υ@"e[Nxd/܃{. aZsgis# &06(RPLJԊnK~UL:pLzKx&C[JB"iJ$8t0-G˴Tt1miG0b`:bdFXzC0mÃBPd-,33P:-<3Q gCR0$zYb⾖xKfCbcC3FJVedNCZ ,t1Z]QzFi42  t핢<4$ xO`^גJV,Hҟ,h~٤cmYpԎOjB4+Ei="MC4ތ:`:zj (2 !H~RH C0r~Yl/) 3A 14<݅xZ5)IO@LL )\>BNRK˿}=+eÞ9VX L+?P}VM1:ٛKM:{;^UuY3/ٓ_.%^TUs1[?e?e@Q9g9MZ(&ZWzKjU9ZlesQMЬux `U]FrY}7jY7%/Ǔ-i9=|=pO=n =NgS Ic/~>{ۣo?J6=^k.0-[}YynKռ]g Oo>FoU XJXg !.lO-e-{8y 弜w8ر2vwT4fjv=@F9s";'ܖ ܖ}y㗂 ; lkYEUdGE68V]WYV5֔izx!1rxn<ggu.Wդ^4!S!OfQ# e+WjVW& ƛQ [l@!~KY o}QGk`H"k̂r:?!Gh@1kֳFFe]]&eً/?֞KHdf/ ?/K)_ ȀH{ڏb3~jOǫN9ZSne kv*3:3:3 ]tkHϝ6ۭ(SvS6&oq4ĦX-ݖ CZmv-.1/?_F_rgd' ?MD^)fnYdu=_ϱg(v#Er6/Qgw./ʻ鬦F[5_TnvߡxkP W/_lxO4ڡC{YG> stרÉh qֺG2v ۧb6n l(Z߲N'r}N@ZG1BO9|&U{SDɛ[:=[Ca2ӂS\u?L#C-I yelZ'oD9uZm[|bm3%rZA[N= b: ni*"Ae%oePFI[yN(iMzTi+vtM2๡E]}@󵶄"֛6'H;i(^17pimNqz3(4ьWs#WD\ƃFF1: -{D RRX5oe.BCڕh74c@a8cZ*-(^v PwOΛ&u&ofafɛ&d>C>aZSCg+ٶ;=-co&hjTM2QBqIf JKԾjdq2ҎCPKm # mP8[LTCkQNP[HVDݣbq!JM5D thא"b/DRl}0"B#(&J#-B^BejusKrJy!nƴ}LDL??P6Iѻ rkG?}=#͑8"3dmOr~X1?P*O(W{:ϽVPR/kIgBvCG5 (;wRןキ{;endstream endobj 362 0 obj << /Filter /FlateDecode /Length 3894 >> stream xZI7@l׾ N,/@,Mbv=Fmsٔ&9 @&kyzտ/E+7ŗ/\ oBүfPME˫ *A.Rpqy]|j%ZaMP16\}jeu6DdB,$VA-6 a"ny[,]*)VB҃讵Z'nZ+M?ձ;ñCw[v_N9i-AKrA6drZ %DxV TRfvZ6wyJl LFIT3 (ݮFۋ֒\ꐭeXZĥ5d}j- АiоB΅K7qc\J*YFN`kJVkwvրW=Yl^!=Dln@ͱE8:4Dx(_㛻կ Z6`__Q[c7`6otUVYWJCfFf!܄j~Eu`9CZʾho=߼dPvRJF8ב%yIzPf oG|Փn]VFGr-&*KpS7uAcH&P.3Rso* P 3Eet.^H)p^7ԧi]֍APݢJQar Y+ HۅUjz,^]>kϠ;YP27ϯ㡻9QigAr ;MjȊsQhP 4 +9`Xs<ޥayJ,,8@SbGx)pj̱nCApE F5I@@ ȔjNS-&̋MH8p-+UC-U8V6MmD;؈q\y$ΖHq0qW[ .GD,5-ǬɨkD!Pڧ\緔czJQ,% ,5\CSWZ,kAn~Y0jJd.ATl  1LNcbֈ3* j,#a3u~#a36R ʂhPJiObnUh\0gOjLq̼vq@Wi72R0Yq!6he Mew;AQFH;l[ck)+DhI*v%Z_Y{@z= 4+30ҐI$*ij%WK6 Iw'!ϟ~CCSP 1)C03g:p1380՟n~Z,XA?iӘ碉1_Y>-= %%~;HcĮJ`gU-dXJmj6MfbÎ'e #U%=C/O*TQ&1 5pK P,N=*Q0Oׇ?Q$a<wi/TqW~s+!krƅjI$ xA$['fr'i &o*k !H'#0" \FJtU* Zgs~[c(Ulyu8SθAja4͇jSt8/~x&!r1_,8^c`l^drV!oiRU\}]' lf^)>X8mg0hYN+7]jz`o}Ɬ+{ !ʤg*;ᤢ fCh$5)pܧAQץqJD5c+3EzrhDb:bfCHm}޲p[02LI"(5Q$;kILT,f*6#"t[9c<\DYN3MS_t ~VHKn}tLBt/o]=c[(W` 3F4emsQcm<@y6 ڸ7٘T'ha$}s4UCHŞT (isP\e/Wzį',0qPߓ=֐zTu6  a#-ֿNPyj ˕]96˪k$s.@Pt>5"7x~afP*,)cP8zI {%c鳜(yRr@M}Mk$P% ZȫfP^LꍣV?\`PqCݦA"? iD{}!=̆PK˵TЏ-C۠@j]qʢ]+U|Zc45|L6p6d?vu$@Y=tg¿.? 5%]nxr82PcE_=i8b IB D3|)pE.B(B.A;j")=>>ݟppĨvc@vH;9HqCbYjD:2#ubsI9K ~ѻ7nuJN:#3]Zdջ3Jׂ98ʴċipA!杬$aHL98ߕ:EЗ#&(ń9ھ  Ո*>7'uʔd#/FcL;}r"?M0vDkNJm2Z! @c[;F XfiC'eM7P.DZ2 uƐ -^Ec \T[[5|6NX8=Ji^wL?=ԗdJߗ79M(*"iL,d|"{`JN1G"Є!8E A`)gffjUsǕ;h,. 4o$uߎ_/07 .0? eL 4zK 9cIX}'RtW>xS,S@=QSCtSy܁~lҥOfZ K9j‹]jRIIǾJgtn4(7 BN, }12Iް@p&S#i<Tzo`VB#; l=ΐ~ 41/cWhsöJƺ->-[вBt }qU~lϳF2|r/}$i+эtSC+rM Qe; fr11"\'p @4$rV4p Do\&)endstream endobj 363 0 obj << /Filter /FlateDecode /Length 5345 >> stream x\Ko-nk o>gYc}jb7eofB AMHPXtXt/WZ,wgg~],~#$|ӆ.ՇXxpƵAY#W6"+uaٵ^l.s@‰r4g]kۮSR6_r ~sW@Z)_6W郓Kae"吚mA:\z26.iKO3wĹFzQȖm6qR۠ȍa uݯzVWu?6ݵ>h1vߢ??ב-i}sZ"lk ѸYwڪHjOW}jmU5'} fpi ԁq&m^EkҙݷݞMD37=Ǝ0u~pܔB QF[Զw/ I󇫳@cÁ/?T+:.i]!H%u)=nޔka#VA + A6߃`$RJ4j1 y7Ѡ݂IxҨ$\M6{ry\KHFҶ[ ?_7ߧg 7#zR(f<6gL7Gv_ɓ <@b4'8׼LGd2f)a5{ܩY+b͏oW/W ߿ݧp:OF ^1w0aF=ڂ-$_<06 tg<[mnM,XoM(UdR1~91 0B8Tq a1ލSNBug+qj~52^>D{ &(Zb6aPeysJ,ei>cN#0eJ"A49AyP;Oa>>KO,Jɗ=aDJOu ) ѪurRjMT7VA4eqTY\Op31p.lLWN$iqdsOƑql#[ПϮ5,ě=nT`]v!憄 S3T|ݳs=t|qP+ 010')dz{6a$$_KMb~mZ57YוDJ{fZtZ tpڴF5YDToBW ,#g1,󺊲{HCF(Dx AdXX,]1?Hi^V`~}]]Dd@a B{ BSpH.XIx[HA*iZ+ܞ mn~Yb|8ĸ/[sQlbBs.ftÔð@70@4aDOh=IЅhc!I@/ԭДK'ͦ?:u 'd rҖmN"Vt.&n݌ڷw 2I[H}mZ0fPʓӎO;9#]~`yvzKVsaX<͙wQVQ;ct@+ c*cM+YY7Z'f*z#zC[33R S2[]W#=ʼȀyL~3=MCGV]np>19G ~&.o6 xhJDjwdF)Q*7L4ZE4), \ayC,- w@ *P{ɬn_ruw Zߒ;kR6c~ iI@)uZT!0*PcS '@HG@3"2Hx 6Tf&o5(u";n{\dZjxήUMU e_fB6 gd5i + ǀ7rj0tFΊI@ð1}Ѫ/~zoߋjKaD4+TzBx&cIf\bݪiL yZ7`u{3=f\-#ALpj7Qclф/^ty/C|oæ>Ox1L E LSb]X~^c`#QDnug":^ov`gURЊ٬3- ujHPR)D#b]lM: ۱6 Uz%T +fzr(4H#?tPXk8Kq// @Q Wu1TӟԐ,YjmG 4] wXVᛃ,M 7:߂2DO bu->%Լ?ݦh˟}9doqC񵮀p1I6nbiK ۍ]˙qEbKuǧ1}DlIr=6.*yE4Ì߳5o1=1M`?&튺wlASy#Bwk.%mb5 Lzpe"V#em4fEשrj^c*1*dHc.5Qcaa(2iI@V#bSZ 1e܃emj@)OrXw.!i|,tc ߮>LDӺ"|Y3@̈Ğv /azSM'6AuUCk51U58X`ʊYkBVi+I-\EtN≆tW"!uqsAEC}THsz< `q8uh|\:"-bͳ)ǜw31*#Xr|Z1cnjKs,AϰaR5od&2hQqIh͡v"Ll0tX>#}!FX#&f9z=/S\!Ҫ{%qmQ('" xB( jU)s7EwaQAV;9h8|̈j3SJlI.h2r̡P" né:΋}k.5(.7ՉL(Јr;j-[9zǺsdqc+!o}4hAbv,ce__i`ɠtJmZOtg˕?~*ZIWN޴:3  d^7]@^8u`{z=mN{٨~nX9dt*7<`p?ѕ L@Lm2ğ8l_QS?CC@ZtO9"0}rf[`D~;W?%Zx%`46_鐇E;4ԙfǁjNwI%4IyN.}\+ `~H]Ai@7,ahM?6:Ux ZAI&JBLm|*.kTg֨BXnLT@ݢ3xXx BH=!vcR FD؎7ɝä(`ȗk?l;sva񜖕e\}^S@\{W݁0p֛pYAİ*J<+HIO4>ު7ؔACJ{y=0BՌ6.T+7fxT8(H~С6xrqLHj!bgqs og?\aF8JzHQ7Ũ{ J=.DS9usуqF~8PZb)~q8\MA$A){ֆMD2uVU"2a2jNJ^b wDs07Z4>j2ihӳ5˩L.k)?x%XC=x]Nr,% s,Wef]ڂmCYՖ`Za֡ DFYciPؽ=,:g*Rh2s7G3'is/^b;Kn3Ç9)'; Yʓ\7(8j?HlQ~ٺ%66L/TeZ`_B&U\[Cr"e{]:=ezD۴~x-亳jUFW^)&}6-]hyHC (Y|w} tPc_ Dendstream endobj 364 0 obj << /Filter /FlateDecode /Length 5043 >> stream x\K9rvo>gX[xfnvWwvDI-CY;3T!Q({g:, /kŦ7Ǜ_E<|&-w7abk2MooZ ߚ^wۮv{۾.n$I(JDNkvj^lk;amDW9f<qR/N6|/޳8E^>8R~>Yƶ=0|L[TYi i}o!8G$`a% `L:۰QoH)7?} }?->bǜTq;qU~kXn Boкy݁x+p:Ռq}/!Quasx%9&Ts ۶Z{`~Jqw %5nnx []JĘt" _(\s!&>?z-& ([/ȇ^Rt@Ԝ6m,^8| X kC.^qϮn!Hk,E\3fn̴cZE[!Ǿr:w'ta}!OB@ Aztƽj0M.T'%| AK{;Mp Ii=O Lکf&2 :$||G!"XRhN`7r&6bd|>1,$]})M/`yN1R4@R\»U, w|nCP/p_o*\ tN5NqjäW>q) j$pq(3 lȟ*r%4A~՗vOZ7Sqx۞N⚯g Ur'Sf8>RՅ'edA3'bx]0!"QJ.7i&AoM_ sFrqcB1Ran|jrnӄJM>󏹉Lg2\ b$T%s "* [Lah^?O4V#)DIYr?FXd'XkEg:z_O85r.l^4MD:_6/rl~"o۵NNt>/^~G_ \2 E󇐷)7pFji6Ht= 0uJv|]CMr#yE?w\::$ 8oܶ9ڎ(,fxFx󅅥࿦{{ skK@q?PP.%G|?FoGJRc!T T҂&ʁüυ-9к~!"?֎ٷpsbKvM q ۉDž$58-xz &.l"mCfvi83g<0]R9{|@`A<1ELG1Pma@ӚqM qG"{ C¿K+ٗ=OثVu`}? ie[i{ w/ {eǤxPPzWzu0d nNt]hit >? `IiaB3dKt8#s] yjr9aD ?ub+\pda% ÁN ;pHQxL)ҵ23APS2g "beV*bH2dٷ= 7M$)[0qfqb9-l5み3`̈IƬ4YS DW_ʞQ X.NO3$}󽮇#!oE~@ˋ nf[f?"ڇ,4V&npN>RqsuZgKm#;ᛨ71UGmmCaNpx6P ֺ(BT􄊏nYB`ʸ13VC41\A~9bv7{J-%mDȆ9@a 9DZ(ȧHˆq{&mI@:ޝ;^3򅱔K?|yiJMy?BFkHbBFkBd u.*a R, t(ga 8b(U3BoyOXg*$e`E M#JYR+ֵqLsҎQ%+UXc2Ks)5H Ut{*Ԍ(Œ{JQ2I1*Χ/<ғ tq7=~XI.kߛ @>Y`ϢnW(\|R~ؔ $V8a <ǐ+QƟ݅'ڶl*\m&qRA*Vcx\:U!3;I%yZ$31A݉/!rBA-F8( W;eAmߡ,H>Sh8/=Ԉ;Oq7ާSb(B1bHy0GVY|-cv}moc$!gb` M!R[Eytnp\PMRS3 P)@z?!;=Y0yc9`#`) e'W:OM]VaUV{tQ:1U(yrn)?8E!p{C% uʇ1Ө~!NiX 7`F18BK8#܃}NuOs1~H% L8N` [>RАZ:Pp (?TpML>4!/BW:|RNCp }n`hd#Z!u'}s-H510Nuja9][YQ DxeOa5(%^(0R\K,p6p,(C\Xe,v='rbd<05b_1Z!!eYS/7C^ kG=&4pZ 2j`vy7P|`JޟpqsNY0`lFEve Qv5~);1ZEpt]Opɿsi?5I{ ξ쀝jt&ĂJTʛUD'h_\W@Չϐ)މw)4kRI{.i NdRԡ.t<&6 EurAhg3v%>|*œ$Ҕۑ}&>\عS!@C( e|p@@CɐnKII)|ML4zEVz?E%Pq SB21NF$50ASdbVj` ,T,ؾ6Vl) cSRuMtߙPԵ){kZ}%+B|v %?~S< ,ۣ> stream xzXT23h{/{AARI 3bPP^co j%&13&{y8sz=ʤ %V s?ngK0u2=Ԁ̸\oч2{ʂ=B'7qh{ze.n>!>.۬Y9zU@m=<:zkv֋lVۏE{{ EQsl hašK–/pYummm=V{zyo_o#udcǍ7*zN:3>`a tb)j0eKOfPC55Qk5ZGP멑j5HͧFSj3K-Qj)5ZFMSj%5ZEMQ3Փ>zQToՇK̨dJL}FR{GEPeIݢSVJDT R uR)'۔ %^ tV'l1Yirti]􊶥2tZ{\9^Czm޻ϐ>}9)޳KK3f$jKzkVNVr}E[3ꤨ&Ϛ,Tr=7N'nAZ.A {{6Xb/#QWHN˔'899:ogp* <|.@S.`Xs3wƸmݻ/l ohd7u `ZkK7+b )Rt$14fYO=hO< _BԠAn@1J'Υւ%iTmApg.pm4dCFqj6o= Y7F ~$hJ5ѠeRGA\v\ F%œD<BS")bH h4D^^G->\1 3l2),J!G1υmQa+4~7κLĢhtX?" fh=rEzWgMY y`C vBt T^dlO"Ӧ5:)^VL}WDnfުVD|Uu||5|M49mңz^fr$`<&tCJOr|H8X4YkÎG$PK(7\J6yy}s5Gq}%5<8G w1H>! Echgͭ_fh@aUXbʢtZr"2խNk77rq%bEmmkۍz)Rߧ~qGQH<#=D%*PQrTbVw =b.{8+6w{LL%yplհ)9AZ'c%%Cl R+UծE{€0imPhF|S9/YS=|US〉D[;!1'-%h@S\r堺T}.}GcOi$BKSSk[K?=ilz# l;ۣxX)7$Spj8*<O.Mͯ*s& 2 h75 CAⷔ\>@ "d|A}$gs=">U&G(0Zl׈pbnT߸vҋ'H !G< As0Ts怗񭠍(1{QsVPOp TNwrpĀ|eƾ0!!ZLѓ6/7Uy+M)g845xd{k*D虪9wJ>$m0Y%}GJ> W}y/\5!pnAPE 4nNh:&Q ; >Y3- UA*e^(& T⧱/A" "=Zάldn[d?R-dLt\ ,G&?bȹ[(\`"6O!Kھ~+y9!4@*^$/n~EIn[M7%A@mj 5 6u2$'ŌYQ2WꪽPJ"}_6BarvL"/cdd1 &'de aur}"-ZgDjxS%fȫ'Y"EӀ љBno"Z%5 v4mz6nۻp30mYW27nkhN-0Gl 7V^NX>I𥫇.aR\[]Z?<.ɵ-hz:IJ`~p$-As33O:@| );&(C!"U|_|1 Zb oD~Sߡ; cY.b.Vqyd ӄhŮ={VT 0d~ uluYrfLd$Y~[%d0<Ӕ*Rԇma[Hbyp T1T* 9aļٗ,2yuQjF"=V^d׬r!f~ X;ֈgX4y,EH-醋?B*Eɶ{8gEH#TZ(f+ %SǙp*+]vJd)IeD7%+M?x\hE:H<\9\ +u)]=qN{3 ĩbb"2;9T^e#M"xuڀER|+ R3uPT( sl&%ImUs;WWr·}iJуG` ?DKBA|'#޻1ي2dOP 4A|2ÞSPQǜdUۀjnz5 wg(mM\a|:eG5(UU)jiI)T7EVl:|(rG*83`~ehѦ_DX&)>Yz qv6ZN ec!jCBHX"],%8NFllMamLtYK3IL2Ɠ+Qwc8-C#Dɗb\%^n ngkQ/룏ofUUԜ!1'5D_ hR#˥ Ҋ o2@+̋-.(gLI P$x^^R[/s3˫Y=aRԥaC?yhO-{Ntdegm7f4l#=5Iw:BoЀNЉ%mJ!'FjVK xw1_Y.o[[E0C:Bx/,eΈ{6}eunq%Bf,;"(\6[SS@zZ`i\G]r1}Fn(k%l^z=E+}j3 #tDՉ$"vp""%PϢ:TK@$nmg胫@@Qv^HcHu4Y ѼE"q.j -e]l))//unm1,`5P`@ʈq(T G/hNR$CܼdAAE 9ff*- Z J&1M{ٷRŒayﴧB 'ߕ*3 ߗz{۹9OF<9+ޖUP eF)N$* t{(7~^ҁBg9Xj9D!"US=3SkWѝRp=)  Ro G#̌r\`[1bY,J.W(Y.QWVFfZҴhNsBζ_%VũXb Ke楑Ƣ#sK!`a-g/YPd $f|v4oi$ٸmJ"LfmA-BݔWI_y}.` \,JœMϸqNe~v0r%,/e`^< i#̎= o IZ!UOΑ^,ZO2Ei]b飼tUZTܕT"څr[JLgx$DJQZ mz*Sxk8B6B^`#h+QXv'wcs樵ZuU]b~{WVbud?"1ŗ:4М+w_r>WNFӝ&Z8Jil_N˗1Gf܇$#.ذ9pYcѷ"JW+ky#{ Kp.Ê⏑t|6Ɲ~[YZ~3k?,<+ llM<]xV-R*6 Vp/HS0ؕ"ma֥܌ېG̛ v,,!8cǙRE!cmceǐ_3% ?޼,2{ g 1wLؼ͉m \?3^,ݶͶ^W[w)wCsJ󍛗g%sO Oܕ MXI̺:v2|q[tddB "-9 ^zH>LȊ呰|/pYcձ=yy92bI]kك:6yz%%5ɟ;.4m )9 xg-G|beCX:=ЙxA͸'ۖvjDqyVמQ^@SqW_<ӊLp cI %7-k|| $);>;[r6EzF -dcv/nS篏Xf䕝<>w0ٶ{ ov` pgT3kns)M]"a=;c4fthe=pY?:]U^T(΄Ŷ/zE˨]'u V(O[[|ޚ@U܃:{b}#IsI3D hAEf>h рAPo7sZEX݀b!LY9<}Y60 ~oM#"~5B^nzi؋0h}"9v X9ft!2<[LZ3,Ց2lq$ZCYl= ɍJF??n-"q7:I-hsvK,zG2~zƂ+yO-[GƣpЈ(ƞ:bD^u~4O ~K 'ʱ)e\(k)!tzwڵN=B OB&m&]զIQ`Nendstream endobj 366 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 7531 >> stream xyxT Ap$xN@A("C3Lf&gOoe3!4E E@@ *׫իON~?rwZ{1rp&lܴ-!.8sץFz>|yLggC~N\hٟ9~^[/AvZZ F5C Hs>j<}䅟oBd:9̤ӥ̧I]\(Yb[LVkUt4Ov8./̥Ǣ 2Rㄡ3K`*&j15/V U FUZ X,T;z4W+/]M1P:7Z䁺ȣw& ?=^r.{\ROи-X^eK0V(sB4*>>.V`=ib}C(4:/z)G޼`k!x7kbu3˞dGP#X0XL_EM^h$z/_CF\EW_og3gS΢!(mF݁{ p]ASt빣 ?y!IjSPAWbrgX6 @dh7(ôz򥿱|t=C[v%m|/we@-ٚ-:LoxNP;(Lg]bU!5"5RQمq+6%;Q͑Ӡv0; R#JbP:]A W ^%9| 4Չ\u Uj@M`,KT%3( ,)dZdy2UN8G0^9t/P`@ Eїa>}ˈ_ܓMIN_JWު."? \%Qǀ̵ kJ]=BB=rLwGGtUwi]ٴ F跐SE[Nb*?FU ㊛Dycԙ0=Uժ3TdyP}-E-Tl -ڛy;&e@[9 dUj[a*F%K))1gUw^ǎEQvKa'lFVl pr S7GiCz<8U a6SFe ] D.]g>[0Vxh6n>="$Mc cz-e'G<_]ReHPs|o]sEL{ki J]1$-5ԡ7c (#N/Pne&hr„@u:w˴TedAWм(vbB#eu~áNJ&$@dSCCUSQPQ]*m?;D}7-\t/M`qXEzi6mW@PaXUhB21dS= N̓AzdJ,ttہ`1A(?WxHf+>5955"7˩$C Swv5Tl.z@ be~r3_58ƙWF^ȷ) ja1"JڪPy4%"+uzl(HWoW֜ pw4Z,n9Jbj>,ek; 5) h悆Eg,hxa4NO.e3/T}?ԐY V[55aGR{jk(ʷq4W@G8hԛ(g1<:=& OZ-k%Z $v(9 :b%äMte+H]/ypRK5ۆe6E=7Lg=tC< c4v)yhx +=V^RAdoq>Z7D6 el;Ԣ#|X v_rlYퟠ1=*Gw@~p͟G2>b.4 ''&:`{32dh9ΎZ;3>[50ZIHȢs"A BTX3ӞؙCh`Yf>؇l9-:N 8(2ȴRiquuy3N]A-d{|ap$0Fq,^E0 GEknϧ(Cq˅XHt[+}%eƊB[3oKQ(1wjsm@+zS]qhk${TNS[_8M{oQ2BR$D@4iJlTPM<@;1iXSFu_ woۗmC5P@ݽu :;fS1$qu ^OX+5k|ڨX]jh%G%D+sIDO'k"8M_NToјssJ$N]Xc2ІJST#K˷]W(,D&EE˰?kl;O+$jk Ыz=>ꈹ߼vw+SS$r!$b(\YVr!􌑚SD+rq111iFjhrϯ}܊(dlkS꣋/nݫ[2I+WQZo;(th(,Z$ j]V,՟HNEDU]9 |ւ+Z~48x9@!x `5rg_Iev+ȯisH4q47c@uE@yHg0C7s],kj?Z93*bهW@ YiovZ#z%#>+*+7naly 6Cuq{g_o@ktF0a<jD]㍍Q54`z^ӋU >MJP<<ٰS qCIO)mOC 9t/bjH0(PMMa= Z(J&hD5$Sqp'\ZX[%I5WG2w3^ 'u¡ Cp* ?K>/YO ub`/;o7;Z VHggw й|ߞ(ٕ@_r+:sc?bTiʰZBʊVoi+o_ƵY/]^ nM"Tǀs@`/EpW5uFaU\vA^ECJiEyhr^ٳa4HAn)EqY%igņ7`$U *UFzV(^:Ca.J~̮H9w04us28].3;lRBG tQ1yvPd}-`]t{D9x$%mh|L+c,w~O2XMt=u c?:|nqdGK܆~lKг 838̋e@ٔ Y\]A ktFT1¨Ph9b~xңY$lc1ss/>4/I@6xJ6@'={ҡS^8QG]ڀ:ѓp|!akBY0[,H}nͻ}< >F-? \aVלPb NC\6747q goSKVwy^C.:&ӕ֎=zTZ':9k8v6y;uIEPz{ǖcfN_UW&q=Oi 蘄 EYw}z47[naҩIDnyJ:^Y̼tBɁ4hw Z,g9 牉GciV?j؋pQ%/Gk ,~_V]9:|9p!=ܧ 5Mum}K/.?2ع&Į'{ RBĶ 9GS3†t{f0i\{ **'#bgDh@]nRB ʶMDɩ9l65n šY W-8% ("ce&Vsp|vCZ䰭J#hR?zsVơ Ws{ ʙn.ИטQ5{\'pď1kƬ{.eݡ+/ @r]T?F%"^Y$uBҍK<|p+rQ˞p»g>J!׉֏dfx19Z@sɸفH4lW{J`{zҾM-Gv0Ҥ]=r{дB]| < >-lk@Qyx_}_Q.& IT?F+I[ccbS3\K'R~ u<MC{κкɓVKΦE qE=;=)+N/qj^;/no`ueUY֜,Qt fo#; \E/?Ռ>]EeeP{<Ӗ茌 ߟAKBteV`/x^\鹈SLch oA䝹h)r]G(Z)e/hD> stream xXXڞuٝq)!.aI,XADQ`Y*MAݏJ&R kE$j&Qcx!O3^on <3g}~72ƪ#{ $s'R]:m,*;k"[ tcev,I.5(0&Ǒɻ1iLA- 4p6 3L6sr%⪘U6: 6pCLU=J NreE9p#<~o>=F#*9&iJu8L5a@vsR!tw͌(A0.5i}m2M蚥}*q? B>+1XK^>LCfV 7,!F}虔Fc!5BI?dĎطO8V U3pDE= +k*,;и$`x4wb;ovګ,laC$̇&NV!''8U{eaWt̏, Y֚~F@C],&yG:x=`XaE:-Dܡ*I؝zDdX SK.7ৢ/\ [GJ0U1a|3^GL$#H 14tCmZMqCXpY,)p=?HU(LO<Ɔnβ|yhr4QC#դ12t9,?ѶcNW.z)_Kj`]g}=s V|:2&p6qŖ[W97wqעDEokNe4(Q_5Uzy5w\_#3̉[tspRmWأO qyguY[0AV:e[ڂQ{M{֤5z,kϳ?n׳y{7Va-WTw 蔝icRBR'k1VmR#!٫,/3ܕPQ&!#>WD9dVb gwv i X vpPL}&՜ǓG/+ֺwD );Av5a\c¡Bt)tW/(m45RzK#–GA qA/_8gtn ;^|w'.ܤaGKa=iްFy];F4D DP="6uIx(!SB6ctƓxuʈV .tG2SV,o&nZv7fM>/#>7Q㪛p"dRڈ{f8݈hog!VbZY ((KFbeMQR>+ژb8"?G /lMEj22&}I>Qyra!lX^bqAmSj%8C) rr/g#~`>BER1waBxЙPJb'lNBc^I-qoWGmZdpH b#t=K^r!f7M?!-Q=',6)ޱ( t.tl}YI)ѐ0 IQ.8kpN\Vr8P|Hq^ձ2sJizS.H%qٶOIBU{ (Jyg/b:ݷ!,tg`Ϋ26Tof2u"{ӷ[NTkAOUY j}UDžg>d'|O8QM;Vv5K<@*pha5p Q&G-[UC&xo04ҳR!Q&ZXu}OF톝e!Kf9t,؟IBr$A=mhhypXMGpOXt%+i'9yg3Rh6_ڡۗ`U?@ʉW2}Df\3nJRT%C'b`> Z2TVFO ExfO'*3o-mqA@%HfEV2y/B iOg >==Ř.oui")۔QWM9<<,Z{w}vb 8@v!UYczGjhMS/lKůɲN61ejhHJ4d/ecjo؊ywm|HaEJhqMdi5CZ|l+Αn*톔]a%::#:]P9%[e6T%pZ_^AlWWN>*QOBGIR375Uạ\'9RAp):;+;G(MQ$8Vzh^ Fgs YM{};>7R6{WO||"55ik7]Q$t6P>rrq9e+N&y *-߯&![؝t憸CE'$c]XQu=UȌHRUw_&vK`:$5Z`΀lh)pgC+Gwy[DUU\Ox[?oV0,߯%DsG\0ULme`.I} W;κw7+j e0`fhsf?Irӏ[#յUY%Ts;:PV. 6Msip)AM@Up !BKPFr2&?[7ᒝIo\z/[[a: 5cc=i\|YU%{3wB0]$,%$`=!t)|po%z؝z{ǡ!b[]ï5[5V8R5i'VWg^wm ]l=_ vq˿C^p9[6ۇᲄ^a9>/tߑ1ƛfs,}ݎ>=mV.^*["&CFW_\bG}cYFnϝnLLoF;69lq{g< P_ k9Ub5p^X O}g WWnm~ &Po(>Fy"vX}>JK-<~.*1܄Hg߯Yp}$=-Sؓ?@^00 b!WO\X7-%:TZ7\S%M_^z[\XW~n^Hx}b,eOT}(=o"! vI Qеcj6wO( j^uèƼw(zOF,~Q.aח8p/UN/UKG+bfzH yXXdқAɕu` Zr`$5yk%Y$)~^PLt6 bEaCu/S7)Ƥ߹읙v9'牾87{W2ؗSq/9{>SdO r&."ÐVaDR ! eqT툕,\&yXEBi\B!3A! {Ӯ.hbU;i]dE;o 3;fʧt#"rl*b,mwZ /#r%m-h> j-kLާ/cEvE|d݉{gnZ')z#Zk>!D1v5ݕZòڲʁk?1w5jNb1s)`{L8~ }iiu>XTvdR75s_"jb5sֺcվUv2*dw6l34CJ:$Ӱ̆%ZpLݒ#>jK^Q=#j>yoDq OKSj$o\cgXuژg ZQ$2LKYwď*%~xOpq}|f~k)D{VOsA63&C7 8存Gjȉ3U|\_v$mAaDm׆7^N+`}I)ݔ\$TT^MB_4Wp20;!j^ô?^ۘm5[O|:x\q.ZRQy{ M 5co~B KmUB EczUT@ڮHlo8d.9dlr`1"y~ 5jgF+×EM6q8şQ332Y{5.^diG)B+-.\6V(,Ȁ,ƚv&."@ݯE8pbLܾB(T?S'o,7t/|#U]q"mRr}qy:xA# aѰy}`M7l:ݑLJVX! 26 P}N>AB:&|+I[G[]2=7<`:rW#$y`~b~ժ}̌ SFnN[N3d`Iendstream endobj 368 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2944 >> stream x}VyPwa9"pD1/4M*$ "j( 2ܗ p{  mL*1^ɚd5Hl^J6JMUӿ}{d %\mHZ%n,蘌ĈT٢L`#NYxPb"xbԢ[^LpXa>Ceu9ԸtY5:2G3C)AHR^˒nƩԑѱպ5ա!~CWf.Dzr-Q3uW/ٟZ e zɁR^⎓Hg>/Yv]p%)q]2(g!S䁅ȡK jsz lwtu=o G+66`+Qqq/Q#WjHYbcB5uǞ)9wp|xA-乊հvcE PSX^^\įZ5d&q!$ _ n/*LY+$(,בu=#4%N -i3_B+ {PY˴ :Y2cu^Tŕ׭}#8_ElYNpv`F+C;&&hB]<,1oz.hO:ߕ9i7.^|U`W%R WOӰP7vErC%4T&h&Nĭ|vxwΖ#MTTl{*!-  uk3aT$i开A +Ni>dbn@ƪo@HCL6ki /FJq\>1t!+&)۲%72+N]`eku X)âW=}4JLDdaQyq<2s?`?/Y΀:ܿS!7*%W{#/:/.>m ),0]G{wv5D7Cl$gduIb7Qyee|qVFɤ[szsz ǠM|aQVHg%yL\$}nB@Ji>*h[b2JBSѠַ=I4giCP%b7)|t.b3!ؘj#[FӇ+U;Z<s5B' נ[oeburwB8?q?%MP[^O[*spƃbpd)n8Ջ8^IV\{F27=lq0mIyߢF>~2*Ql%[q2>y٩W I)n ֗r:kBKw}q]SAA@šmU5&e?":E/qHy$ɑ[=M>+L{J0Tr![wvΖ>tZ]Ӈ'F3c5nCC7ĊAXJ8}"SȤ'gi?S,(BAY=6%x>A4q̯kqcK]=-ءXv~ ؏2U+*Սw oYSewS`endstream endobj 369 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2663 >> stream xe Pg{nF{@<@]SީnP̀u|Hp=UA:0q爄N85k"/@=ض ( Rn|c@(S8,\qQ^7XlB1@Ekgmur0{D.ߓ_f̈́j>מDge PYU¹fESd~:&#'oТ }rK[{Ù5DI7.^!@f2̹hL#+w +#ۑV;ؿy)\X}}g:A2~CQżOdǖ8?ʽyGjj19n|2ga Q;gF'^:`ݢl̀LvUmىVh-τ8NQY)<6w[f`8k2dTk4*N!%5?1:#)YJB4]pIdJtFUI;Z5?GB6祪m>b.-ڞpHX-m-݆Z/3{9?jyh 9.ݙݕ㠀X™w3y ^ɍ c6)?t#3lv/^>5Lmb 1hY쎠>e[5wOS({,z1HڱXlFV[ xgeف>S-._93NA1\7HE$p(^h1ZKg;H*1dU5wSo>_0w On7iI /I< LR<63rٿπD`T0ySM8W Ί+ӌx:y+Ȗ [3ZkjܯP4Iks3*nG7vHOq ?]n0܅iElksgow0o"!!lShyR6 ݺpN|3rAhho-g݅&DCQ#bm#=kb YՍtD6`^ -vkY,P3r̔reT %0{ fNTtrQv0KTd[PHx/֢+)~#7%E$eB8P#,[ڴ,_*!b; U?=c tl_TaxI"|P{Ӭ" ?zF5*myă+ͯ8¡L?!P nX[]iIT)XUPtTXf8+!b𑸴M~K/⟟;&R DFR,Hd"+$AnSGzK4 _#?_ 4M{Rb`|Zu2,TNjݾ%hǑ GX)hPˠ%~9U(EXha, Q`Y> stream xcd`ab`dd N+64 JM/I, f!Cß^ N)˄yyX&={3#cxzs~AeQfzF.THTpSJL//THKQS/ f*h)$f$))F(+k꡻g```qt c`PbPc0f0g`z!>5|fbXP=r?1=c3z^e=vX5örXplՃI8zzk'-j}0|cإB]SO~{˱M5?Aـ[r_?~bܥ;~3>,}}Q\rUl> stream x[MsS<ؔ8|QeK"YrTe,䄻\yveZnr)*ggׯ_7_/}7{J^}xϻ3(a9|/{*?WBCm>B{d56C(q;0eQjP#31.H7  36a1@ ը9h3t2R$2ўvl'%`fYGȾ@^L6”ۊsu'L)FeBi'>0LXJ9Ƃ3\eU=J^= 5܁.\` E^S!*< c?Ԡ7t muY ,6UY~ $cty(>~٬FΗi2*L OP ݼV |@ Nt3ۼ'7)UY*v8bs=& UrDxʅL\ /7l{wVLW3VԄ6@:Whn"p R<>?fᜦOwݻG79<qI&p G^1|CYXMS%KBi`fXcsMl]x$ ޏKDzoD)*KX7aŗ-Tqd1Y8 @W*oNkZ,tAB|0mBD6o"Qu3 ˃6sZ~X^J^9J!Zhh=htFPdz9+$cQsTI+I@/ S=P`+T"!~N%0٦mɣv:mуgGA|t828Y^577 33&Ny]$*0 S\ARp\F9AÆ'H Ohwbx,A~'x=>霌T*)l(6~ 0ކ ,AW# 4sItw:N/_cwt` !BFXg2cU׀b@ZnZYJu9apk(I/>ETpy6E:r<S4+ƄߪWhwV[3X'As;pC|ω0&b?D`VcބgƐu)I5f>/I+:vlBY ]VBEF SˢLnKʸ#QeK#1hG4UL"H6kI"x6r,ó!x^r.[w>>JqoN59|!Qi> 1&ȿv U]јKm>j&8<#Y@5ϝd u?e 7Yި0noO3]EXuađe~B0z44%0zW;S 6cUqI ObH7+A .b;g5rlUƻuJJDWp0W6\! O \m)c|i?=7#.%(^?F &Hh3=!1'fN#ME4b(w!l ٨>qDN aE҂_S3΃{.t-d ژOz ,9? #t+,'Lc/B1X)z"pPa:>AjD k!'Ĺw#( y\Nh4_}WW-rӱPU=W VRhǝYa;C~`}(  D֪wdJA\N͐ SL5Tu`Oˈt=(b {tۏy,lEۯK w&BIӵ^0|Q˦e({c8;&ْzozLTLZXbpi EKAȎKdKd$"7ѮGR.1ra6=n?YGoesӍ'\>IS DiK@w>C Q l[a\7隷Ȥx~:kgMl]S35%m }"f(-݂&T my< }ic /y$ r"yC?UN'5_$'jyyn91"0BX DHۇuזgȅTvbJ@ 8/+8Ŧ@S}$B7]~+`1 B5jF0ɫ|ۦ+C#8}ɦ(Qh֬0!d.xJe˾s#سXsi/MG\%*aaqqd?z~(b΍w'\8.[C0C`.[Ob.P ʱs]7޻&"ĢoZ`땏@BA6hW(TE5Edo:eNvM XrkU)7v_ym{!m ׵`hxl,1!5@R䜃` L8 (z5(-n!rs~F]K%l3y26L Hhi^kQvD_d'8et?G[.tteA:7J{Qnf#{&l;j7#@WAendstream endobj 372 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1216 >> stream xmS[LSwǢ8( {-n (0 r-`KrI(|)miLtU f%s1;,N%w>DH'4JO7TUu 4nK֊Mʖ%-9ao$/cдuXE#Ն mV]^QlNKے"3*)*Lٓ?9 E50Ҋ2F[er>>d>NkYHikꋊK&9$td,)%RA@2#/qx g8u"Q\fsTo|i@%x4 sHDU_ePqv6Yܡ&g,0nX# cޛicLS=\޾gб9c`q?TÎ^yJ!RkכyU̗L7M ˀƐnMPz/RښLC.vV;p^߃0Վ,b z0GHYBXQ uie35Th |w1ĕ`0Gi^SIr%}{0;zv47)^?ϟGC%ʮM Y{)C,h{RnChvSS#1)E۶ 0F`|߸#窐='||W,T8BgخLeW-mm-2@IiiH$nwp00;tWoFqcxu _Rk1/|ā1 Xq P5TCK7(}_znt12uNȑ_5,8m4w,a*EYTBGc⊷KrHavvC,{\NJB._Aendstream endobj 373 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2843 >> stream xVyTSW~FJj]:VmUpC+jH ($@6K^ !@X kUҪsnBi6>ssSzi9'''_oGx</`PYz4L'Z`^hbB'\-dyS^ŏ9+[~ >瞚4uMEœ)7+=o,#W%^`yrq|8xxs\AYΡ8[狷raxL*OL$eI(qD0;f8}J0H*o8،MLJN vbF?<5 "CpG8,lc*ǚQЉXTqlnޛhND˄iLzn竃*.[{l}ǺO;1S8bE;5OJLRbC&YZ!|܂>3Qz"M@悩ݏ ;`IPZ:Z: ajO `;xhOzS#EeeUݝwyAB,T-智o:M'J!_өxiR*eMV͵Fyʵ%Qq 2.n "(ZD"a0&\Y7 h\x/ A%n%fw pA4EX~gO)W%%&N =+]?b{ųtz`?מML 8z~|.F S!?zPOo"ħzُGm_cc0|lnAs/T{cT,i8X I ^8~k(-Ԥ~O⎰H #ou H.Pǐ>曦20YcwlpcSp@"RK^1^SKՊ/W$&Aw" W?"gjn+с{ t?A+={]?AA$|@F4Qh(=^DQ&A$|-mCFB9ehǠ.^J/-fA 4YwVzӋ#)B$'a*Nw` cnKLZ[$rB(ݐr$Wq>l!Q`u^)vt'DgR<+6"*uLm3;?P&E+%l9IVi8 De֞FRY;a[3(1-FK֯wᵪŽ^K=f@*ujURخ pژ&.Cs0G^̐%KsIVkUQvuYɇ+wφ`xco 0@ROQ2x_M@`4zz󩷹ކ2M:k|,n*2Д ܚے GW5 7ZFݗhEӫl+&& dh{Mׯ^h%hJIAcI]h!%51"y{j"a/Q;|p]ݘTiIӤm] ssvc(R}c)Tf r^LUy!).նlۅ"F #.)kF GPNw Rg'C :u*ʰa6U_0Lw0 ^OERf7ӕVp~<{ܴQДME}5skú*'ߨź18z%2~(xw=K>ƺl IGexd|Au ~BK{NwZvH+JcPhkܭKrν&uHj6 F&>3'<> stream xYTT־Cs,pR4w/KƠ&B XAPzzz?tދ4%K $c&1M&&3oaY3=s}Qz:H$2^, .-S@*f8O'רY~`dMDd a$JW$𗮒DZ.^4>|'{w4&"8;l4h9Kfg)tbmZ 6Ͷ)Zaf/]w!"r]ctw{\|6o m;o[ .ZdޱY9SQ75r6QʃBS[U|jPoQZjrQNb}j ZJ9S.2ʀzOMޣ&R(cʄbGML)3ʜR<5ZI\OQI ԙө;WRo^?6tx\7e&:546z8 [&N3i¤ICƑ_L5`e+&GM<0yys}S .4m7ib1͟[,&Bx=Ft6< Bf'*'gFD fi!( %4_<~L"QfnJff*JbBP-kA]1_}XJ7M?:3 uJJa33\2 *&*]iʞ:.J }˱<ccyVٍwt,׆$'Udu՝/XCOo>sJ1{#'Q[V'%$*RnjSY4tuyMQhH+ղGydq>s7iMAFmoCT;2/:K43!Q {}g\H >ȭ})T1T>tnĀ_8Liʀbs޺hW,J^#X1UP !q=j @1n(0CcS>LǢC*/]qt*mZ9,mDЬҲnXat{SZH ˶ޝ?$fuDY@bں~*4Ϗ/ I9lyF.b GrIIh3díϪ.G*\9$,,&?PT^Hξ L:9F"]tUiLx=YNV8[⵲_yvL9ŭ{|jBfallcd$pL C0em`t+S6Q(:Ñ{H_@_`Y3ʈ}}{yvGp~#ziE]j\ rXrhWrhCS(-XKZ6Z0PZ`=ت+o]Ǎ"|~ƍ1~T؀y=jFA ?uQpF!?v]Xx@q<qL3[<:-dr_ܧ9(1%*'c7c< T$H\CEGTXVh_Ux\QzE|@2 u6]9 @(V!DtB7}&mLhn/ѾJT/φ59xV4aS>uz;XugmCNDLHǰ(}ϡ7^؆a9w Mwx2,fY&t@b7ۈ1GkJ ,< /M/~qF0qOԗF܈MRBDX`_8;KM8/0j2U6T5@L4 |@1 2Z_ .?RQBN1lFnb*%mk\g퍏 > 0+/[vN-PqM%D ~ PfBjnL1KW]t;I4I~@1-7< z|q+ؽ(i_doqFIzEFDUU|%pZĈ3{/*iSj %ԅtPO.RDsaVI1p@G;Ϻ#^C 7ep}׮aD 5J^׮<r&Ek|U=i> s*jڐygEaP*;1e0a {4KωࣶlJ"[dq Q/`ù3D ۋ'>80;_ 9\OWݼ5Dį f[dz? yrݝ3JxpVh'3`&he„SQ']]S+ g'rIG #X$΀ 뉖G&6r˿9Џ9lI`w&cJX );J39kSʶox^PPrAbA~!*@L{dmtdT|Ϧ7?+<;9_xJ'sC+di5;S Eṙ1Im1krk0Ca6mmDHjbBٰ&XGK[i/z;NcD?,ߦZm_CȈ ^(gtBNZ+U'"9-?Ӗ;֮Α4 'ӧ?><<Bkz.PyD~ә7ڈ>A#LZ1?w*މrq(~igwB¤.Xl>ʩ+?dxIH˼啤5G4,ĠT,;|Ay=KmB-Q$1ܠ (nASPPXz<-R8BYFÇJ]x8;|@bd19XEx a~XZ'gD&$v3 $? ";"ڥ(5>)"9@tHui,wZ5[U x5F)8ka6]萸Agkߝ،`Ѕ'bG^#C};dKJ JD ٩9qY(I~vtT7B5@\y|N@=BԣG8Wi(#" P9̽ sz[ԚmXrmí]]*sߟ3ژĤL yU74VW53p4v  Xy6 = p9< \0 f(ܺym6 #,&ZZ汙YR܄\$eduY}%6ⱂW?LLF:G4Owbt䕯-hm<xg_{Jmcsm{EVeZ)_}T'wg5>ptp쒖'<[ iRPp)Lmn0!9t}%΁/-ϱ*oPܟSwu]W_|7J.uND(Q1)f?9ֻ-?.)..-A LKl,2*1x6# S 5[$ a+9}0c_ /#PV^R %\Ƞ.1%*֍3Pw] gjt)`Bt_ y$!o +#ʇWz"DeKp8EvH|~i oKq_)9bGdMtbhڞe߭=0~ttAof&箯 ~?%&ECX#ǓwB5)Y䕉☈ֆ~s|-xSA >o߁K]W9W--ߜ[g?ZHFyaDk-Yb%ޚ]C-@aFzjk;b`PWP@,h𤀝OJw%4+UFmKv_8я.0ņ6'~γmhk1"0˞g;8;p{HG2u]8T\ֻh *sЫdb]<;cq}!=[wOœVΚK ~tMQqq eZe1uI`Fpi w8Ml6~7GƐTbŮPs8ą/齊=ž GzZ~=/ aO> stream xZKSs&bco$[HȻ]CF0y30?™YMUӃ#G3/̞G#bgDf.v~v-FO'UAN?ĮbWeF&7hlCX+Nw>Oc^qӳֳzӌqzC8\%?'ZsU\Xˮqբnf0R bndﲖ|ݤ.Bc/hygEKyvae%MqK<[Xyk͗}|̹d/qR ]KaI`obU.2vr#G&BU]oW:QyZӽ;[42κ+: O7-`Qx..XV/ƃ(S]]ͳU҈NzB H+h"Tet0VM\RUBѮO -ޓ}>!UDVqUO&,ʎlMz*xah"eeDw*6hgl, B7DZ.n[8@M…`GmwsD gظۈ Zx? xOp%–-|h>}"Zh)\$"()_sc,̘c7*L0,H/+*Z@B!yf\ ((^?q6G^d`)=h8\OY!~auB@ )IW86lY6 ju{`4.#R,'%KDH]R&֦XτbT:["(5@戼%wgWC d@dzK؟V+Н xXS ?U4tx7W 61K "iRpDW/"6<':gmF:زM͠H]yȕ629#;}I]hv0TA\bݼU'o 3"jmzTld9eKxr\ M1(0F+v1 yȰ6(Ky84&ߧQm((zH]=;9kT6 EGTңg/ ]Z.h|n0M]ٛvdi:!ͪ[0(}07vIӺ \/$(ꯌ…j\Fop6M;Uj;m%m޶$!(>1N \N A5n8L,%Q `ﺍPQnfE hYtAlC*0H9a9rRHnIT֕^Usb!(߬bݖā&pјF}tDQKrN|`@N'Ӣ9dI9N76*g1X`W%PGΏ .$ɤm$H>#6=(!fZ6I0DIJTgHtͬE9Ujw6b`P41%V¼YI:#c 䌞z:! *z;9t1]6Xkߝͦˎîy%a]ec*s}a2&sŔJaMUc'YBe:}Ю52L.:I:x/N('#tz beqViFat|H3k<ܻ(}q 5"IZI^BEj1l:j]$64XH7z.UޚՀiU#%b`=tx3\cKG" >eywܭ'q]$6ǽ;NDŽ{Ϛ_gtR~ՃNzTt׬=EQAѦ~@GCWҟm)È wޮ:|i*v(2mEVpjB5,+ϣ\*MN4op4'"C$04W9m$ +a*WќV]BDhplVZA ]"+EZmZW¸6VA:͊ Vz܃%hYV0~a_5'Av=i9DQa5 Pq-}yy2jCJH_)_H D\>Hԕ4Fa\PTm%P9C uk4l-i5i@ )JQ,wÐFgN:ұlWߩA n,ٯ='?(w@!jBXd΁ M2Hm\y)C74+vHߺ0U[WcbQ \ϊhJsX?cb*y̌ԏ 9[{8=Hm^4ۅ–ܫ+Rdvb4lϚ"/{ByQmׇr]D\^ n0;{|+c됿 $(oL6Ŏ5T`4p9djvlFKCMȱưg/iu'VDZ:MI_50~0ů֫E.o& r8cŗfCAfy48u}ˮkTOź.avKfq3(46,+b'Y`ٷ#7gޢ`oI-Qο&endstream endobj 376 0 obj << /Filter /FlateDecode /Length 3896 >> stream xZvj*^h9j&GDzq4}r,8RQwSaƏdHU vs$h!6 @֭[,a痒n^/zuxr-)\U~hQp:Z`] k5(+ /\ݼE*Knq=o˭lg:. OL!/fs)eQ; S\kmWY-#ۤ"|d?7.ٗ3+ hil}"p^j'tҼ[6mWU&%`[C?̹Qz*G@Z”r4MP*vrOY.\X0[9i gd@0T$Zưw<+dM6z f,Yuolxc„  ATV6rثd v 율X4Mj[e.(L`.DQTԗž`\E-֚)S(-{[\"#)-{{?S<)\DM@#a.f._ 'Y>XE_oaō,kJ>9L?py)F]ַGԎgZh- tx-SWpXp_ V K4ar_g FCeG>f)5)e`.}Q/0U= $D+7;Z苵`$DX$ GZ;Cvߗ@:m%,p-WWc a[BpP4p ri%˶u&4a6w+vnvNl ‹OWdR~\mĚ¼Nü_[t 7ơ]2PeW+ xܓ G܄$s0?DGq[FWlkyQ(?i}S(_onUss^;iZ}= zg}]9 @΃"wG Rj*T ʚh k9a>b0h_bz DyI2l$<`(] 4vh9gaMl-5Yw^n ZO#܇8-B5C`+.cwO% (F0IUB$$ ^xs iej H+Ch$8(S.$Jc f5 >?E88U>,h),D [I =4b!fBA%ˤtEAy9>Sl&嫸* #9b=.{K?.$ĕL[$j=:2g F`+_y_)m+*N^kA37$<F}=֛`xZ7I1e+z;7AAsV};<+IR=3슘T#l;ORq ڱP!oJl2 Rknw4o4Z3#[6[_[IJb$~ Ik/OM;/id,c4QuHL!cl \woF*3T2$QL˂BdIl?]8} @⑿FS ZcD-)F:W^N2j‘C(MxYꀡb6).Ha1?5C3v b˹r2;~TʒCd&3H,ˈG!{@& ]@eaein^hUɄOi>װRZ"(0=1ZuLmO@2mtmvzˋq^5$zvm@y (, E_*KAU`-UfуpB@-/* Zҁ83TϬyXMXwP2bTf4%gWB#HS)LF|QLJ9c$I(m%q9)x^XO,JRP(M&}9t 46p͇QڇQyD4;UD ze}Vm- 8\sCߖ;%_퇀HX7碑5>a!})m)SV(3m;\/BBSA'8B,<-,(c鳇Dw'n(~k5ΈBtT^5 @9m7 GٛTI)6MHwY˼*ɍj*N~BX~yk.o!k9jfj8&k$ ۠yXT!wY^ed8׾L.b櫺KS$Cxm}=_%m"B #>2mk~wMqĺѶ * l WG&+̟e 4#lJO\A>|Wzn,8W;Ш)G42RJGߡu=OD`wԧXh.5ORE& p2b(i9蜮)!j>_d%oFEӿ07$Ij|+HlԬā [B9zO#>?K8鷡:hvwH6{lbW?h@S6bo]3X@ G=/:?1$_{-Ͳ9%OQ10l~mwGg@e{# ,ܰq"aMURzTfeY^AFԒXfv:pŜZܛG)#:G G67?|pxFDx1ڗ'w de#"h < VѥCOC(-[=Lf[wM>Rt[uWN[1ʟr:yFr;{kUv.M0t:oYTdx! T^lo=B$y3vSܻCW D|XKgq⣕ɹpTX p9eqb+|K `dB-3gCmq/D%QK,wO{WUŸTwxH!*+M%@P:s瓽s᧐ I~*3LO~QOB%pgb5 L/k9 ;ݦ* 9l#39n9rx];U)u"?6.L8|loufB&e0]a{sr \awgOOz?u&2檪:P03dlrX8 %z2WXD~̊*%{y9 wuJ~CmdpݠiN4#h|˘6G$ǂ/A:}Q\|Z祫?@' J.eXC=i|އ1򤬞s)VAU\vxuo? endstream endobj 377 0 obj << /Filter /FlateDecode /Length 419 >> stream x]n0D|0Lw"E$6ZmCSUdrCYzn\˵u|sUӒߤ5[uz5lmƁ26|ӟغ>5ǥ|erهTO!yNUYqӡnl0Ct;`& @$0DiT*pIЀ-]p DxR;Qn&I!DfJ9vI2FIF"L""FUcpTX0eȰCMOi5k>I@oit' ć$9gitXiVXԑ#G^WGΎݸ:rvjق#g -82;3;2;3cDueʇxq||\۾_@O/{Y_7ު כendstream endobj 378 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 5685 >> stream xXgxTe>Cx  TDq*EZ5@H2dRgzO2) PBUi"e_.]սre~̜9s羇C Dq8׬%E% g͜!6>;9*3ѬcA !Ic1~\רGّ05ٚ/U)].'Ÿ9sun>IEE'Q1X3#|m) /6!*9.<-.|S_ݰ1|u#6Nݨ},arZj|TJJTz0KB.#$gEg EEM]di+YՒ輘qR^ =>yl^zZG=A=I&Q&j3F-S;WԫԳ2j95ZA=GVQHj80&P,Gc8fc!sS>ήACYB }4 }{ڱJǨOGB$$27`Yʣm@WsҢz-P@-KJc0Xk}>Ry_ qkvGrք#wSvw)~tɇ: (Wןp?{ -9PV[kbt\U΢.z ECb- ~{F! vUv#~7x(s A*̠5L`h=ˊ{YRn*?0R*Nc䩢W_quVgMCH坐p^kRӶHeͻc-h= Gj|v[cN]Y1),Ƞ]dmkDrzo׎> gyЖ[[NoHqۣmM7֜]2IӋ!.t|5b~ܤ=/_/hT1eǥ @P6A]Hٽ/ӉwJ9͠)@#kjf[@A١ӹbL6w)n䁄{۾b\pwC w"L]dQLؓtZ,CZy-%PAw'o'Xfavh2eJ E4}|<(^@wCOef*qIk3`'Sdlؒ@WzS_o.ar2bBd]x2CPMgP$={-GHd-J;}Ǵߔ_ L)MSGB,=̈́wGrSmUlFhPZ>x wsI&BٖEKMNa".Qz2BңFL)s|HL]sHH4=W)+<%8vUA_i,Yk|%#|v@O=%y,w\wnC[ jOb)TY_-@4 `f? G>HO]p%`,_U gtjEF"ߍWh ۤ{Zne*_we(s艒 ~|ʙoCSxG alj$nS^gxt8f(1q3dJ 2Zr9p=$rE!שZFڪDkM)0|˵'N7ֶΝhk(i@LΜ,Ȃonu^e"Ћ"gM*hي;hZU:Y/H@6n4LukS}3TS^)',4]D]OWϦj$NU>0d\(&.P.0Z:,&a~?~rWY{TkQhfo<Gx6ty=dhdu??sU*J'rf1$&h!B4JĻlޕ٠A{ E "Սxa9r6HYt^KZJUo)7HF*f\UȐ--Ϥs D8wv7"=/WC|Ȼ f_z6uU,g>{`LK'D&yvi%H}0x]W~4rs!cz" WK&0, bz*KEXԼ[AL-19ڠs3Rcw:pLG9| 7e_['^:m*bu;*U`&0W5:cCwW^cc`bh|*؂vQqiPn5Z V2\8bG5<}:ĉܷP϶KG˂ lϷ|}kWCн<0Md[kI{B]4>?9l3GApDB!pՁ.Vg'ID\_4v⍪ Qfv G,I%t(8E7c}%A}k9H 0Ʀ4i:Jd>3z 4Zh3^(<'t,ƅ%4)dmh֐ SD'aG*c $mϟn][۷ėބO: D]n52b<(e ip0 uutI(|RV}!SHa0-ӫX.5ZlgEa 6Dax +H`~ ~!m5fդfed564|^6A+}!hw)3&Q^1;h4N.NXJ&Mh3"Œ Ĝ@@*%wHJJo~Z.5@ꐻ<&ʠ/~\@Iз@9(XJm@a.X$-:C/T0IzΙ/`{[PEnhAC/p{&#cvxwݚJ 5 ik4N9K}@!yRZ|f Z({oZb.u( .~d[aZJE6brTnQ(ـBZ4arnw]?窠SϳL>j%N5U`tٻhh)%|ą"sY3-0?hRg/7ϖ@Oxy'oR֥t_8V7#NT5/zc^P_ʇ~yzT@xуh-3P]/湜Vgjj:Ghq6J|QL-k.ƥ0l@{J<%k;0b޶LHLk}k@SKih@ǒ!Rm2xr5oPQ r^堟jC]*ԪH`N~:)_#Yg-#Wd҉B ^::O)f`2hEi}.cgo=fOДlndR^ām9N9v1\ \F.䀄\ qsg^zfxha zڠśXn؜Ar趫M[ϰ/!c䠕Ν(&$@g:*̪qW?~j1YX |@ˀp6O\ى9xa uZ4ڪZL& 9q5Np!yx>^sue!'n|vD6Y.Ø7m6† Fa,+1j -dYÆS|Mendstream endobj 379 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1720 >> stream x]{TSWo"*r+mm 8"Rap|NmU" `O X <#U10V]aXie]g99g{s>?fox<ѻŪ<%&eƦ/8UcdY/7- +2eB"+^ M dr 2AiZ),2M&h k$]Ž ²f^cdzg M ~]KS(b,P'ixպi m7 [M) zy[A2(a[]֜naoU;xnwu(vtYjy> q(n)3$l+.M3ke{-y M)tŻ@։[UyCg-#DIm}ޗƊ( jVg[L)y1KcJIS{<ݍpǩE¤so3zGY0:RRRPD7 ?n3IƑp[Pd U0bs?BQ_-SSNC&|H!bϬ4 ?WM|op ñ хa,ES@]@a]De9]m.A[hHyI:&P|j8?jPR 17VhsRFB))!/3it@+zaԹVӞ3H3cڅ-4,B.XQV:{v 4ޤ8P|\ :)t{U_a֕Mex M0TŹ?R "!Ft8Zܝo_bv][aŧt@`g1&9iZ%Tϻ.O%'uD*dxj9 ܝeg=2XIDB5mroF篡7P*(&bȼb4͆F\H;FW}yu%G2Ru=ɓ7F֬([Tt}։u M v.7S@+JUg\?w鹘Ft #GK~)n !g%oNQ=|Uݙ4IhzAB:j}XcT;%~Sel#4LOg ip"i.7ib"<_ŚbKpcg52[H}8L,CՎKqtaU:: 6'Oa~G1endstream endobj 380 0 obj << /Filter /FlateDecode /Length 205 >> stream x]1 E{N Q 4d\aq( jgY5E-2x➂~­&֤097N/;뷊*>8~MxT܊t00G!)?널7OV0gUI謫Xք$!$!FMF$D@lJI6=vgip\)_h%9'u~[!*b_ jendstream endobj 381 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1381 >> stream x{LSgϡPXm&Sϩ:tLYL6Q6Q`RjBRZh)B[޶ˠi,PdΙ-N2W<.fK\}'yű d=2Aem ҺJdfkx^^RӇQQk;YSpt,pX]~>ˑhq~2Y\d׶ly=!yoQv&RG+Ҋ2K3eMjHPYyvgf8AV|m>vaXtZY* 2 X&vۍbr,iJFȊ(eT7 >Џc-0]_eh=ץ5f}n hԹ(4ZǓjR?W4vJA"km鱴uO1)[rJTBbT$`uAΐo৕6k~ `pe ȥMDiIX! ?&k.J;OҿMP;8ƶ6SEx?uBB\zBQ)*AHJmB~ :(ͽt t<[k5t:>3dMQX*~sh"̩YWdW WUZO18:"%mu+x4״.A^9{ }nuZ3I Q&+ҴBRwco 1;YE^ D 71+rl.Ǡe/MCCAguR9}DGԚx<_[BEsܐBZ[]!c}dWWcb.%c"Ŝh,Z-VX9cv׀jۼ;g Ad4endstream endobj 382 0 obj << /Filter /FlateDecode /Length 183 >> stream x]OA  ~zhK~p/Pkdvvvg0Fg&6_> stream xmOa74)+_.°/.c[|Ƹc' vo6*zW.hP[3t;]Õ^0:`/mTHP0 R, ynM⍑hx>@i2p9C>Zf?C => stream xcd`ab`dd N+64O,,M f!CgO/VY~'e|=<<<, ={3#cxzs~AeQfzF.THTpSJL//THKQS/ f*h)$f$))F(+k;g``` b`0f`bddI?SUe/>0K|o﷾3{i V[Cs/~7b"Dk\~sU> w5.| pB\OXBBy8yAoMټ~e`_ endstream endobj 385 0 obj << /Filter /FlateDecode /Length 6113 >> stream x][uV%o\x "&}H:%rd;ˤrI  Q 9tLwO Q|ҍo o/__Żo/8}]ZXOz<_\J,zqy{qvݒLsevwʹn}\I)zd}eDmwa?l^/Wqg|[ 3ea)l޽z2EEo\^_tryJ1;|MC('`lQJ~s>&}l~x_ Li{x ? \w홄%lpB\v^^m?ő^vUPl/mNgDBwrm*:`b/>aM[־pyC^x[lsy[FCLutطղbq\u0)h ̣e"Llm," +$2p {c2= +++kϚuov5q3(-/9xtI7q U }ѮD.R&#x_4C4-ydY3kg&O-3V)@n_is` {.LL2E8K4)9WlA&y|A\BI]18GfL]B)VJ[gƾ@dF(hL_ɑqնITg nյ\䁑>8=aed4v+< $v mwm!S=Я6vM$Dq@QDfab>*]udtZ='ȰRr>@0Uior{zũZ.$ >@51}s7$y&a.MKcY=or oQ)5cROC2 /q +;QqCZȹMu)e} heF1n@J~ ƒsη,K Jд0eћL.W7ό7@fOd&\X\f05($qX-)5wΩ7D@+r1MFR՗Ar|" T6U \@h?pMB _ ?k>COfBי<kL92;|a}Aۛ{%YڄC*f5*XV2'ppBrnl1^A1xe|lzH&i"z9=ڙE6gB<ه˥#Ȼ777=֤‘1E.CU-y+uTe؇eKH c;41E@!t3h QLD45:L"7#T;r{*jdq]B,7+ :K~i:#^N-z6Ƀ!OXE=g|~(Y>`X4ʨfe0 {M QJںqϓx̀@ORfsJ;I-?iO>&=1u GhpO333:.O6xpW!T%OGdQed7j&DCDq3zFnx0Bj4sm#L}z s7iscN0~p.B^(AD2QQbF7Lg"H- +P s(GIyuўR@=A1ùn\WcQ:Ɵt6弥_Hts@\ݬ$R^h!5Ӝӱ0l=4CxT+ rLhݳ"9"s`VQM>}uOeΡ$W*ܝAzN CQ$!Rk׹/%}~MT? xc,4ֈo)h|>{rUur΂nk|[64h3;]vdoڝ+Y p3 kMcܖ}ܽ _aX@=}֔z{~@`),xd6c/veKnG¨6*Ɣ+-DRlsԧd,wJ1O)Ip&'!E(#8奩lE;UUyD SAŐNNS-.lWE0, #)=I u4HB%vZ 5~Y F [)48 U{1J3> ]\Xe2J ,G0ؠ)ct*>nba H o\=Z5祱LJ"F PvRHChf:} \3/ QHMHa56(ɻleXÆ3G&΄& X  ?fg&1QS*5Ov)S yYi FϮEBa9⨽^#; # [[NЅ#{GQhwm Ug>U#bz5"OPՐKXa6GFDy"g,ЌXeD"V L@pM ' #Iq>Z7dTUqyNjP"±Za1 P@rE>?Ya,a [zLaPT  X8In멮֖˛P$Y&1U@~D0[N P&c F%;yH 7t ^a MaQ#UhQ1q&+j a,u:6HkfP0MjlTP0UᏞF* iR^@R#v[W*J/0D8!|o-/ZYxU]}|㗡F)"P$Azԩ+^[w6 5KOzUӻ?lnׇ% QJ~moSscceinw~Vɪe t1Ÿ}*Ҍ bB1dY(`x|1ܹBf+F8D,s( fkre 5V5WL1T b`U7 nа4(xܤ)P,QP!&ˁ_{IKГ[a-uOgRL-+k">ݦo6I=*[MhR oПbH;/rFFD11X@'~;vOy $&圬!˧M\0r_m6* 3-0;st~ó+vN[Jk. B27&g)|~vF`+En>+a@C1a4BQii2TOuG2P_іZEoIi~th?Qh,2_eiȅzx7|8#'LLVJT_g5\ +W4s׸^ _@?NOE ifq / FalZ͗Y-OywR"tU *kCP8=3M۪4ֳmj.y FKpԬ20le+ ;Z8X&k+K@ .cnL?jH  o⊋oO*﫶VSt|KVS@٨t,ǯt{z nMA;;޶˃Cm01I,O͡ʺw͡Y,ڠzkv B,Zw-ܣNp3;^L2r„ize7mZ+o:ϻ:}uZc} Y.[;=SV{V{SތEt\-QR/$w7};_vN.(]btqP+'( zyLE[Ϊ(|(!>UV &ϗUx^a1Sț.B5Sn$im8"\%8UU4!2 z(B9Ymg-/ /exb $T$e0W^dO*ϵ- ]ܭDy{GzK;x+}]3`Di.nO վSqZ˪ۡOh5y55.]u/W#юuti8ZGo3PR5qnt/v3XdN /A&V<=EvgiX!Lmr]a4_H34uyUN!L!Eft-brIW\ZʲŶ!wNb! !`X׼k,D^U*^Z鲄xטR 9yH/F-%L}MS.0E}&~Lj x!`C>>s@\XDؐT%j+1tul2[9%.Jbx.ȑ2CvBR n4TIpEW$*zW='kkVFMY]udN>ؔn%e#+ToD.6UFa!f&8u{@>V t4N rr(!W bfʆ]J9rEe 8x&D]NJ zr Stiʲn%&o%9vXuMER*Dn t.qL,vq+Ruӿ5Ze-q *LHyf( y$AᮤA5Y: u9e髌A|v^I2qE±TG4PrD:iJrT]lu|MwqDvBcHn^摆 ЇЅkvb+%[1UMKG^v<<X+N>ٛE}>&N@M8^gJekE+QTBTq^J;/8t̍6䯮 ݯ^+xgld1Bdbh'vφݡ159&K/3Bc=oK+^)ėPumn ?pOfjܝo)ҥ%v;EtRrWb> kS\n.qg*3L3UEr oM+z!].Onx+t)t(܊Œb!D~Ml)Ow0L~ss&pHjD)/*և!CWqݕNȩuStVA218|n<1OK>(_5-Α|@ad^  PNQ\}ǃ['JX}28{&PL)W1ƽ&+VdY R1d?O;;9O: 2Z9QhE6inӪIx$ oGKDzXV~wqgZ >ja34a:2 krkݤb ?sQDmla1S$et41.2ZDB=LMyKgZ`endstream endobj 386 0 obj << /Filter /FlateDecode /Length 5186 >> stream x\KsHr5zv0؍; D0"EfkhUTfSd:DUeeG '-6UJ_7Ϳ]MՈ+?Tljqƕ2U!e۰)+UÀ۫wŇmUVF9Q5t~ﵨm]?NS(WJS8?owZ벪TVɼLuw _,~N޼K?uk]q[˲ia;}{ +KNo# Եl&UՔmI5k J_3dS{fP8 F1<֪>&C D}$8;i*U,♱hW vh{6a}D##,g eU8Jke]VZb*lPэ\?+ xߗ۝ kAHJpx.73#;JO¯Ql||pڷa˖"r彦ٜ%3Rbc]6U+~ JJVy p4n4:BPhcPO2Vn=Fãx8kCaZqs+x|L#CjOIQU'?.ui>k 16QaZ2i\A_"SBB`7x9Ga<CـZ6;눥zLAS\|q/+uE~VY3؂h"!!<27쀉X\9BkD֥N9itWU3M%k㾁j(|!Ξ²eОK 1^4ѿ 13E7/hILr$hGBsO)Xݳ2ǃRCk#D8 QL.h4l)A_s~)#mMPc vcش)j5\cD8bl{F 173sE3(8hMXޒˎsu4{v0s+E{t̥TuCxBS<8fR;Uyf#Pӑ5ƈwF޸r qG61Cm-i|n\k;KbK+N[c7Gt˜V8{*v59[MlpfDјϘ ]5phMIP4O3nݐ?(kճ.m"LEȚjMhX+ )ݵ "w9h6Ըnl(Q<{Q[7PEI[E2#2QKIC)&\Bh!BpiXRxOG\ҳyzJ8IZp abmT0s6GNTJ2ʖR)LصNW3AMw_>y0{g0A`$|$ e鄮/XO/d~zW˚{YYbj7 Zާ>,=5ś>?NZ #YH,㫪Hx3( \9r1(Y6U>*[4֒^ v%@5;CC拒1 ?%0L8>RFZi) 'Et)*,~фUc"۳zL?Bx`N\v$3aݬdFo J0dd~U+EƗVl W:|*굍PQuyI]Jn\ t>!cGTd/Ty⩸J4yـ9;+ڴd7 N$L# _9:m$ nTbIb*' UD[a&e9tg0-Ґ.N`e|/QـWCSXt A$z B@,xh`6"R:+墘o+ [ 0&u1El7`'IH8"i. G4]hi=Zdxt Ocz}e߹'S!fNͫ;-J9*O_>fCM4I!}bů5oE%h6|a&U4q;A>m-ˆ>K$ _ʚuQف;"b/kVy%ZCZmv烁]&߱Y+..)b(rN= dameZ %"(!tfJmz&w8]eBwOԓ` L ^6oDyctƹI$P&a91rVH@RoD мy9$d5|hn3.,p  p#SE0KO=6} 5TбحRG;ŭ4l^>? -Wdu/NuK3pƭ;vѶTW}1`e 膾h=Rc\*{\%˹ω:uc[򂈧 aRJ"f[=| e> stream xcd`ab`dd M̳ JM/I, f!Cdž^ N)ˤzxyyX` ={3#cxzs~AeQfzF.THTpSJL//THKQS/ f*h)$f$))F(+k;g```La`pe`bdd} ߏ KZ{%k|O[UhS\Vc*vI{|f~~JKLql9~+h-^;̕f`.]QS!ܗ[,НpkcݫwM> stream x];n!{N >RX؍DQ -".r q>!t.뮇u^Kjx-xYN5?&g㋯_55`8n+ۇV}8 KWOq: 1aac%TaEGHG ҉"LkfYA  :[òKb n4]p^endstream endobj 389 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1148 >> stream xmL[UϥAe ^ ¶di \t ιTJ \T(ҧ(eȶnfpQ3ė/j .q 9fzyӅhLNys?JNBAd([ʌ Uz-7/Tn*Wrܮ$.[<Wŭ)h{'rA*iǻRdv<(" BgrW fڗ?'G蟣T~.Uz P%qAj:JURZꤪ:UvzS'ʞ[:Z٢hS7-fUSR+t6aU*^њ*0Bi}[-۱3N4C4z!pnARP2:-+EL''͈2D]t:_Ѷ .gċe>۰bu{V+zr|y\nd)Ĉx!fdv0Ŧv0F ஋#VA .%&%t)aq | $~Yw!=X+π/@}rH9Ѥ7dobB vx47 a+d+%A.$pPbpF 6npA@]g8$N3N끴X.gj PGdT-T z n6[>6_X\w's6^^s2z,9Hj:m .»cjڡ+췲7xY97C~O`ɂ:剘dWpj#*Ćm"ġDF%qV-6?<`&"ؕɍaw3,7;f'n!Ktvne6o/? InyRghk8:,Nyߓob=r蓷FND{h_eŒb?\ c/Hbɫq,5&OU<$Ҵ3˸Kxؠ?"}.xXendstream endobj 390 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 844 >> stream xuR]hW>'Ij@u7к)SoFsf6A}i~6iΙ3_M9`t̍@/ ^FFkOt;2X<i<*z!1 (5GJVñAQЀ82hn;p,<@F>bCn+>%|Yh O_G)+ jȬ0")EaCj*\}jJ]'0iZGrVnpR C[hn>ڑ ZlffX[6M׼DxT!y;МM:2E Oy_>7U]vNNc,$./de\pUaKa, Į.,)HE(}Bzi tF|/+aoo*Z WaNh5igw4NfYI=&?|_bd_nf.0VIT!&YNOEy=u㬞suyө3/Ջ8|W,DwUߘ(oMR.W7Xuif.e2|n|dRr?r0Goͪoendstream endobj 391 0 obj << /Filter /FlateDecode /Length 4906 >> stream x][o$u79跔?8@l$HlhggweF$'!EYMV՚iôOrx.Oea?p ~qvrk)(ܽCӌ1x?|(1vt_!#I _o~ŐѻK`;AO?^ &1 ѐjxb,AFRiYw7r`q".뻁cP&L?}/H'гn.JM$\|nc^*P{1Qbp<4 Y`#MGtw,2GL\j)IaLr6 RfB:j8p#.u|H2f{@ Ɗ2qcԇnJWV#ˊA0k6jGZ߁8:LFCNGdtksn l[*T{ylvO0'e^wRFc3TM a-]@) EL+lFŞ.mSr pݪ{)s=5\Xe79G6ӿEqDax|7/BXaF,qd4aè Lؖ$PXn $zXBJ/7&~+wY{`{5%tM*Et;VGaXFꟄg'Ba1hy_O8_Ob# 0jfH 6jvC=ݫ]!PoFP;Qᎃq ؿ1}:Url 6K[S4Ѻk!?y2t ٗVm~P%-Zbrm":~U|:3 Db2a |z=F 3C64}QlGqmư"@ dxb..v`kOV6s # ͏t$KB hwCӳ!hehh*=e8љ-m6$6MULu̞Z&Ž`f/.sc;,FĄؽ Z\E66>/])Q٤ %Þvat6({},>S"3}mo&ZRȰSJ u8Z2@{=,.ӜRNFǒ'2g4pzXexp" g,:+P^ŭ^Q3l.Z ;?IZ(B?$bAMb"h#}0Sr;Ǭ9ӇSÐ2Q7)-e攗QU&`J lD: 3L+ ge,O ߰ҼJ-~BHd -H\g,3 uxc<^AU8ӤP'>=@du(3(T^H"O&XLkPYvw!Nk_Oz0:2:Q.Co99`t9=O^6Hq~h@O >m^^ Spmc| lP+)sh;zSsyGRmEND|UԾB1W:6)-nsNUwJ&*6MJg#6eLɭ]iؘ\Jtp3 af`!1 %2zc~&pD*65שN ʚ1koI\[q)NvWKqމԜцZNkT mOf6G `|B * X# 6Vt~{@\wș;+ 5:RǝkLZyh ^-65THQx}8%/:i QҡK]C/U 9Dm%b~ 9S8ešѓR tRU 9(;>IgT\*vA[;:kt@q(PK݀5A*[0̙B֐*@ \*婙BXO|Ӭ4M'[js[ (mRQ>ΉhZϯeGqQrkF7Q.Mͤ/!w#1s\bT-CO@O< 싏+]mZИݔ-5"^ >%sy$!HrR_{uE$:UF`UsKs-\/]UB :cb]ۗ;эMwv)t;A0wPINSB$ɫK #! aIaG"HLDJ) 盐, ֎p*P9ڐ @r\R:sv,IT-(?4:+ yS 0GNAk~reYUr[L憌I+$bP?[~<&!h CoQk)\bp.I,fJO^׆6K 0Y ]y֐еh1U ]nv][_;/H @9=E-5}s/BVK%uhOjAƈ!\`14ż!U6lk,.y^_Vעh]Ҿ.i_FJjR5~Mt.0{غoOO" URy2Dqǭi]pd2;QƶHR̳ɛ=)0ؖcK>@,]e4L+F}X3ȆrV2iI2^0, inőF&ڝhĊ"x;3Qei8u:֓Z5' $tV%y!F$OH\.lxH h$$x0ڲS[3漊S Tтv ߼ ^}\vOdER|U74 |4aMw}T㎆qcڷo@.kg$'"m)zA`}a"HBZWxetc34&q'|L(K՞ߪ{1~.ӽz܉H2A|W7IrRe9GF?mQOEi Fˬ司MH9t `6az}C5'(gs"&'B 2pάp>RLZLwgH-IXK]3˴xR[k}, c$iU0zckGDK9dk7V  bV;#fxL!@[zs9cŎXAtI9(endstream endobj 392 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 920 >> stream x_L[um B׹)%dƁQg8-%3-PhWZJ+---[n`%q%c:pF65aO*'qѸ.LfhIy~srC AcMͬ똛^wtzk˩vT)lidۺn355q(ژڭ;LL՟hϘ]6 4ALjd()B "D"3~FUE8_-V/)+ Gad2 Ү+%ΈW. cl u EK\9'J !T]Njt;o"-Ĉ/k ȲBJ;uw@|x(F329A~M_Z7`~'s ꡵jVPBlbл?<@@+ GFGC4V1@ڟrt"9(S'Rm5p ׮{O V;䈅O^=ݫ*2@nY㿒o`:0𷇧M muy m)5(@"%.S3q?d8nh,8'{~:;-N0qaAW'ɼ07__*\u>L`e7d6?~zj2KUhK~q[j-؍RX%Z@iNǓim+ɋrA fۇendstream endobj 393 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 513 >> stream xmKoQF0@c;6%p7& .HBScLkVt1XZft:U/d!BC_|~gxhqR(K /I`pҦL27=#}lxڨaW6jM"0/HLZ~yF%/<#iĵ;B'+Iu߰Tʚv*Z5jsy5ZݟBPl B&w4f;JWr-d]^%Svl&DT[Pvy Zjw]/N扮X@edm՛ݴVZ3F{c(_Ţ\SvJJ/v`Ծendstream endobj 394 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 413 >> stream xcd`ab`dd M3 JM/I, f!C7]?uXedSu0w,=Q{ fF|ʢ#c]] iTध_^竧_TSHJHISOSIP v Vp Cw m %h5|ʾM9=?w}]R]+l3uyg+..Z=S;V]xIrߏ?>cqbyL ,Y߽~yIwąw[ʮ=c}:{} 9O/>}h9ǟIYurlYp=olnqqpvp/;i {7O?cendstream endobj 395 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 340 >> stream xcd`ab`dd M3 JM/I, f!C/ N)˘zxyyX~#=Q{hfF|ʢ#c]] iTध_^竧_TSHJHISOSIP v Vp Cw#} 3 ~H!Y0/} ~mfZ Ge3`e3UXֽ\t9M\ȶ+KHH<g7޾ 38e'M4Iendstream endobj 396 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 672 >> stream xcd`ab`ddM,,IL6 JM/I,f!CA,l2~nn?~.S_PYQ`d`` $-*ˋ3R|ˁ y I9i i ! A AzX]"   gchXC| *3۫+ZB"9JZf/?}ŔӖm}B^U@\O =7DUWgW[{{[G}w+G;gnl-gD{Mw'PEܺZ `A) lհ:z^AarM5&'ukv?֟V͡[vv9g:njS[ K KZ$K͛pirnν0aąIg[7819G>A[YSFw ߥ{{%g,];_7ǞqA*%AmmuP~tiӦ}_ȶ>KHH<g7޾i&ٺgB}=< Z/endstream endobj 397 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 599 >> stream xcd`ab`ddM,,IL64uIf!CLJA N)yyX%H{.LFZʢ#c]] iTध_^竧_TSHJHISOSIP v Vp FTԒD !H.fX0}_i2C7~~;5Dgt7շt׵8&usV]`Ɓ)}u|ݪuݒuϯIo.(Sf݆{ JK+kԹkp,Zv đ ΐ[xH7wnxw߽q1Nz?2r9e8~^|z_ݸoܢSf]lE[ͱun|OIC|}ݵeuo>Ŵm|Us~D|/ZöSy=g7ޞϜóki=z'Lendstream endobj 398 0 obj << /Filter /FlateDecode /Length 10701 >> stream x}n\I Fh~{ .vdl5GR!7c""O!$RRUË{Dy~?w^ݻ~םzׯ0z@u{߅!]uc޽~z͏gnO5|=_1{9{72&Vr~yw%aͷ2=7C1Yh{|\~> u9f_~K4ÌDa_s"{x_4.}ޕʞOZ{b?v_} o?A ̋_}tս+@jޗ+3/}Wǘ(Pv j=$}}18i~@bcccRݷH* @2>%@@3|*DEHM|3 ricutΥq4QPǽmz &``},` HI^ ӁA+W qĄ``T!]3h)sX^Axj/:9 $+ql u p!@(M R$lX_ $UׅƮ'0F dD ]9)<*"(dc)\C5x 2JV6p[ *OSChF,Ha7Vm rқrB +T6/`\)J,*\a7xBv;'/֠;CU  CkB+Tm+DA;!l&r#PF1Ѐ+9a^VK}/,{4ja]{a517!C#H!EKQY-9)!)P=`78dY+e/V@9*%@[24Oml0JjB!JTIB$` *c0yl|Bx V$(rrkO+g Z5B@V Nl9䡦Gayi X-?9 &VVx2o9nD~s,!XVO* ( F]X$1,U9G !Q_5 ~сFgst}>+UT7M"l1 0,Yߜ x,sp/ow޼nwg(*\@=w/=C|ƽ^O%qyk&|$C>rqe32+R'6}1`Ѽ~BxLGг70j@QÒF 4j} v[XBa3toENpKڌ+>98',=GRz ٌʎm͘md3ʔ4j5(Ө:FぞNӨٌ2QxA]_L>->64h;v~=)"&}?wX ^D.dU>Q bezs=0F>E?Be #N(eԴ7$cSEP>2&BnN=Ri{l:- nt:cjm'D DJS'RV@mVCtf qaa5]#l:2sY_fN8fnm<:s4XfoY3$a"aSט82D ,bMYĀ`m NrӦFXӌ;gm#`[X=ϧ1eD&>*at R%,n !a9fSC2<4 rQ%0KX\ P4$,c[ՐXe]tPe \ٍĝf^ӷh!Ұa^~4tL\a":"aqa46E]$Ig.i2Q#Jm5ԶUh 6bE*fLʅY̘0Č9(3}rd)xjG%:IŌSF{$:hT[T}qVƸ1OX7Tet^Ly>7SjiHMlg\f<fF"d2! Ylg&$μ/2!i#B~Lԧfc<,5b3f: AX+~KL8(>eEc~Z̈9&=> .efbvMmuObY*=2g*!dӿ63KCa "Cs2#)ӽN7vDSu2U\"0g 5pJuJC$<+ê@%|œ<3~!i;!A ,!.24YlPQ02-V"4bf: !TӒfؓ?_IuaObtfQzSKIoL3n)ऋLu*kcLAki+K ZTM}"u /yn'>IܛYOR?`/J 'S31a~efD3a ԕFM$#Np< S EfPAf[ ]\]|{y"6{Ur 9, Q` "Y +kKr$?So@G)E*èG q5 ӨUdyèG <y'J|^F&ߦ7P=D| >wDjW'@Ώ@cPWfTqbJQٌYiA6X/ʛFd5z5VNzw}@i-Jw:tgȤX 7YX piqW;V-8Н5-?oM 4l(mJwԅyjw8 -fgAV@_7cf \Yl3&g\ݵq=Y}uw2ıhb ɌO.gWQ+f( nxPJDzWX&O@RL$gĥs`spDid88=ĬpۤC*1)JV*w0"~#U#H#58:pi(1k743W&o mF<+9ފEOjNfߐ izc3hfТ^uN@qWC⫖`(&nb&g\ 򴽲JB2:^mcX ]ۭ!W] nRL  6C˲!VPOJ^cbJV\}:.lȕ,SҁY$fzryx}IDh@MmMe"2a:bԘ1l7%Pxv'SLscAxS q ő!dE8*30VL^z5* <8'ɕ ]#- k]d3Mj&^(R4iB ~)K^ OEXD:cJ[+pPӺ+Q]B4RS +&f0f ZPOq < ,IU푉ՈǞ4GВ -h:땪̾Ơcl5ҫ+>W@бs_b[&x>4"rZxIklTOzw): a,UPҭN;B5ҴJ4ner/ WTkEsrITXp/yqV[1tRx򨴳99]Y\jا1XEPfzb~-=tRQmR\a,*dՕ1rd]ql!P β*K[ZH^41`IiIm.asoRW2W48!V$UZBddS tд8TDV%Rr~)-XwL+O*yQ(HrU5 [irж("liӝ2ю5M;C/ T8oМVdIWDkIoiV{Lܰ"л&>y4(5]j(wE%;b"j-Mlqe&|H9=:or A!UMLjIӮh m$y03{մN=Fj~jA^$1K/j R2VMVa)++-|Yݒ"}7196`h \rŝӫV(~a?QCQҺ u$) ػ:$H ZAmv>y@(kz\ O I௨{瘬uyP,ϕ-0 Jd"HRRRcifiB2k Z,ym>I_H*>Gx\$&d"]R. IH߲q@X k"c)-72M:lhG 軬)$ j_T"gĤm\c,1A8Q+Hp!gVY24n9&fC z*E0ɁuB' a Hd\c5 fqF.D ShACq?*A %Q.EicrݮBhŖV#3djCl@3kQ{b O5U2V>DJ)WXw̦dvb5w(0TvV ʒ.I6AVS&X` Z7"FzQkd fz̻3 NNS=2MbԂ|rzTI_p LÞS/ Ğ 3Lz]DMHnSTҟFXݞ-ih1La0}цA "xe8c^jOi5NԺJ^jEZobs+M\|Q!)2fUI'#[ONgDccX?c؀ 474a즇)"5p7Hkϰ(}om Q.SH<-ɺ8ªlZ穣Hvq86,u0"bTӒS[Ys&aj{Ttq{Fрw]~V3Ixw{~uy~Փ(Cl\zk?.pqůo8*h #8= yA0 谥m9JQۃG>{)1^V䘾oH+r߶EՃaЀK92 ĦAٌJ K(lFQӮ>P\q͊ # K9c-+8<-azH ]|_joqTi-Y[fX-|юBkMT1_P;P[*T `TPb׉^[$Qj \#5; RB`9Diı jw6h7RI>D\ZDvmTtIlFh,)&]}@hSbє|5YTL;\:+lSq~A!b)sUO 5!ǴC=@ C8VZ 6`8U01N0s+VOiDMCDZ3ԤvZ,gv :z^GVr89ouZ͔IޏV6 ngnn#h%؆o[6֢7g eFz97 WA+|!|PhA։9KTŁXni f|2l DY67oj9f2J4o` n(c$@42if,hXTVAc`v:HI?Oi,{uW[cCOH٫q`0dq*DmDYE1mN7P荂ށK\b:sV%Ƒʵㆳy?]78?+"Q?KQYИSQ_k)f™w~EwOefԇD_喃 掰H_a٣m|qK1Ϡ1M--7äYuq G_klޤl_yww|^;-V86~y~E~پTZt á,:A |r3=@o_gB꿾x^/FC=w26<^rև@^#K(E(vͥ-EXu1/7?:syj^IyI]JL Q/lY\6^H'{Ku[ds9 ' =b.YӣM0nn/7O?hX1[~ -= snNHDWL_,_Uળn\@G~ǔCȻazߜ #D%`eЩ.ƃEq@1mr>[e2V)M:P_.Ϩ<|ˆx%󩾐0"țEIrL4MO~^/Q8_qq< 7GNb.o\w x0m;?8&p+/|5ʇ71xE3feffϷ`DآlcuA'>J%"~{ :G4N|ӁOod9֗~W_OS|6WczݏiA_q6И-6\ޞQrOrK2⿘c*WTqTYd"O72r:dIiDOD(`)b; ѡz\{^kWUaTȟ~+?eBmiC7AۇD='^Ѭ[wşu7ظ7ǕՎ?EY\!mԽJ/ }5 Wȥx\}G{z [_JMO8׿)b+0'W1*}n`)(>}$j|uD4ZL{>l&SsOCŖsQ9c#~q1p;_򎎾PoCQI\*P-|;Y%K e$b,.- s{} !.Hs7B=g,~DF%$wNiiyN'@dg?vr׋Qt]sxJc7;@R?3c ?fT>bO(->%}|NT^:>!|}:{ sut LS=O5z\=lے)6? oáD3).%sAˑʈ[ImoG:n.{\kzTͩlnO99uV#VL򹐱؊?!"&رiМ >`:ߠ.OkGEy`U/g0ב~usNMÎ I^@jѦ}|K,^E 'PNnB1#]K J+aY'\{a懭E'CGL4$e/$lGo9G47{{޵,v(|& $4H Jh?o7MQN_Q Wnև@ߍϏ};f. >"%@iߜRŅ\ί~ܨ!$:Xb4VvkYN ,ݺ9!r~y/7eQKSz1X[$%×Ķ˱H;l!)7z[f([v- ŗ[803 0IŚI^R]?MZ5tM%n( |Oc:iǦO*;SH.} ZZn-֡kluF =̼g@C$> stream xViPTW-jBƦ Fu"J(B)TpAlm*rLt&Fr^ũU~;;9} A LH]Olˎd#S`7_3gDXJCNpȖa&+t6fY)Iəϼ{kgk>KHIJ8ŚT}.!-34E<+C]fWgdf YpqEh Cs\E"_4BGZQ Fh"B!#Ġ^HMklAWW&yd3&&OR)?VMar1 ~d!_.B9@r@\X.%S9RAǂt{#؋dv2` } me:&n# xU';[t<(ۉxUh|s`z@!h,#)΄!D S^_q}O|i;GϘ͛w(zcEsBXiC>ݩX?Lu7Z|R`]cJK T][/<05*/嫳7)Ju({R?49Oo~^#-yx+cS};J)l-0 ]76QKIEekeJs/issp"~mp}ƢE[NPK%ZSs H>WN/޼Wk G2HvC;xyS*F $&}\[cwߴ,$5G1y=ejN>6P6RGLȲXm/[@n5V$Xc %aդanouè|ryQg<}6ϕ+Xh>qczf9k#cwZѓ`60S#>-N;h:Ya\,51~d8 mpWPy{a!#w pYϑc ԩA]mph[:>ȫԝ]3neNNl 8zuJ<&F(-7:nk"t@%KGv#}a,`Iw }zi_W_bSKJ+*jjpOVg2x$n'ځ=ωHAq ñ޳5%&| J/ȹWe1hsK O&V<V,0^_qݱsYPr_ԩUGGu>yC=x3r9 .\X3Pqc- "3Y518t/ _v^Dt_b vL  eg*<Ԝ-ƓRf'4aixG:ڏï1p;C:B B{.3>Յ(ݟ^'x~,DբNZa < :WS=1 /Hp ^/ki|%KVȑ( vQw*e]ji^?{5EmJ 7Eƴ?{uf]SvmIʙm59Y%Ҿq@X=g7$֤V-ݿk0 X*[guZ%rf-ޞ2AIS2g`$4(F)s*,)da/Wz> stream xUiTW骊TʲUADAH"D¨q\&N5(FeQ(5 (vt21&8=3Ր̜;ԩS}{KIc<33c+KބM xS)Sq'B Og Max@HmEUf϶Z>5;-1a]f<&[{|zbB|>Uᗨ٘._ߍ_( Rևgl̊_.1()Yi3aqAD$VAD0B&wBA,#Y^QRVX"ml^R1 g}%jط jj?`3xҵ®eSXE!+mSʮUOCEﻫ#E8*JEhN#| -̰;VhaX[r/\d,I : 5KxVY `<' .Ii>uߊ &a{^ puB7v9/ZkhJRa}큔,h*T}qJm9${2s(s J)\xJW$~IYGNmTd調_E{dpƸizvW^eԈyr}}l#c9{oV ɑȃjt}s:r0gIHLkb/))8*/8T|N-Y}. J`C]`;;`Ȓ&1A0$1ny#*}Zh|NBV;|ZWX^XчI0|sݼ1shn"l,c/{\:Z *σ}ksybs,!9X%f`zZ*pf,HD!(ʬ.2ZCzkP &HP>G瑩B3b4P tH>pl.*(ZyM UxPR!|!t]sH1  |UWU֔`|FC#L0kEvE˫Bd5i*laRi,[eit>wKPH~<{Zf!7&t_N[O%'}?WЌl W.A#حl\(=2钨aRHAHyoһ@P|, tE=vXihK M±A>ln]`2(|7| >Wi(p%k/i8M Qޕ)[_Z]h7%Ʀ[1bíQE Lˁ1c E|w&;%X^p?Ypf0z y/1/ Eэc Fܳ:6^ |^ 4g~R)ЧbϳvayICV?{HdF?q 7 ;ěUKP+}'6aEJw cb)CD?^FRv3Pu܊ZdqxU{56:qok?!"쵨<ԉ*+*" 0VE{Ef˙X́Ikܩbϕ3/ZsmK;\esm}4~-38IǨcǪד7c7endstream endobj 401 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3252 >> stream xW XSWrz 3)ъ>c*Т૴*@ BBAyEkZkW;+No{NcwىwǷ /$f%ϔG2s.ޖ|@p} =9 s'R8 [7'au\?_sj1)@SV^UHMN ZʠAa!AqreNzjP\fBPdHTHf.-gOL%ɓbwmް-:hӶ-۷F?œz4S=W-UDlCB,{f֬]E@aeڀ6Mif䍞C3Zf"$Bb4 )~A(q ۂVG=N{n$|IxY} Ƕx,XzqY>|_݌Y3g[WwqG18q3Ė~EM=î,a 37dTw"~.Mnׇ7ۍ׏tK"QJ&_vx59L܆"^z =x~ӆEjqQȺu _5|pE6# ~ʁ?r{ɩpqj?Мg!}̰oX-HL3?j,EUe¯0nͅEUk@9n;UwU߫5x3h{_iN\n!Z]^uH9-5RdeuwIVM]LDIXPZjf cvEfjsJ͝bv^WUO}_xT٧5;tIB^[)[\Q )PgPT>Cceȉs=›&ݹVO"ܖi3Z5wܾװFWP:4m%+3B` txpd;gjhU'n㪓o!8$Ŗl(vA/LRKVKe ."z]̡g5{X.< WFWJ`.;D>BAt$oã0}F&kUVVWl'e%/ "٭Zr곑,NtxTCUB$ߋ yq40aI ez~#xm|f,cqr2yj _x7uVqDaȃS䢋EIkd*Zm-,v`~]8/"SGPRY3c䎩hHxPTQdWYNsQBʽʂlm\wi bXIáYw*'bKO&( `+rrK۩,68F>:] {ᢧ#ַ~Bq*)Z o5e}HI+Ff]{p{’uůbn8ZuZ {f7/^X&.&xo @c(:x3aڠ#-C,#3Yn)ob)ԭĤ;,({|#bjB:~[\jJjXj2ɂi ,8*+ۭ.cjOpgMkaďNC@c)aZ3-yȉgL%懿[r@ c&`{~Peӷ淕^uT,jxSl`w"NfOBE39ӄ2-e%6v/gԧꌺNՐ (/.{9wy>Ce_-&T=v_w#MU辀ȌZ>56-{ )zy聮?3SKid}a}2'K]³1QU/>';7σd8S+URȀ}S4C4HbgRB4}2684MI2ݛ`'ڲopo6>y\R,ɿ/4ܤSvĭGE[FϡJcYE]k1DPBQG.B!OjՕUѫ%'ݵ ǝt1Iv0=#Lh4tE^f.,G2;5c{!tmbbP]iʎڭ*)WeEQ]a )fq>'.vĴa&!+ G0`[t\p P䗙:aRt)PG/3utdOWdwC"d2mRW7(G~y5 t&y(D$ LPdk p9^k` ;n]p`B~@W|C&{ ̇m :yzyI@{3G_i$tZ0O?rsA7' ı?y`(rnc,TȏAcAKi][h?- H5ZbfhCA􋉂CoB7߁nRMmvأs`o`4^"1@3X,h~`0LYzՖgigcثM^ecE%U$-nm(+) b n"G1!W:NGiPvQ|ﳝzI! !dF*OJoum[qԡvT׌:"bg^]SKEm+&58B/a:7L-:h`TiȿHTWTfȯ8hU۵ IT muvv^N:ɋ\CIn'H4rR6 AinHF)#N|4ܾ:T38QRb7Tjh}Js e5Z\C"$(Ѻ64\`Xi>n^붋tfU񸑃[Wm~?es\Pi3fC&<"/}> stream x=K$yr뒃2F|pu])I>l9c)pnrYfu2eCadޒ۶h/iև FY8F-oYIJj>koi i9W4Ro3m:3WUl [qӱ;\9=nyi} `)[GfHɀ MN?i60ۀ0]۰<"aifЏٺ# ބCN5GZ 6ҷ˕Hh۷DW)d_"O1*7x[ZNkϚE5Ï]x7^ `1g v~Py* PeN}GAu9l}I rf""˴UA-;~J7Ȏ#R+ .t]N4Jf b9f!Lq&?~հ^Vq!@QBQR6/¾V[UǣdW ͼy1plEIZgѧD g;%k;ƀ=h% gX2 J<Ȩ"ܝ%HzhIn!^2=tDeK9BI.-`gzf{ܤZo)og"7mLBLF+4Zs2,RC'iTڹH" a"Ns .`y`RSYKutrUnL7߆Y#BP%@x@ga ,d[) eLIOٽ>/Y8[Q;w"Vc8&aRU>=T֙xF` fH/VGT 8{^o'^A!ZcyDP-cKU3}-*AF]A~p~Ƒ07N@_o߀]&ܙe>\tbLN%2KR~F&rjYZIz[Α0eH:xDGThc@3bt(nA(*b]6 n%y2U)^A%ĘxME ][!*}23pڸ @/Uqa=I@|/cȂ.w@5+ 7NF d9ˎYr]pER|ddBnbH%tREoKFI )FbqP.~z#(}_ڧ9r+"5E =R攌&%v>}(sZQ#B3Ty 5EΰZ?'+Sg ,Xs:a1L&YDȎ<9OgegM.٨| `ܵ>QOc5' pMs<K#'i2 d)܏x!םN`B{s`kF4[9z%  3D|BlA0"0D(3)5 -[o71|tGVD(zҖ7%qN0(}ѹy/t Uʔ~XંgT(r JYG!7oBksòCAlO#[js]. 2\=ҍ5? I9j!=?iFk!h"7@%C< 4`R輲Tʠov bEҳTV`]yœ/B,el֒MjXvdדKНY$5O iAY+qj?yGIjbyZ{L`;$b:OE465 )Tۖ^:(h7$0y+NajnLbRJ,쎺XC*ݕ8 _x6M^?מoK}.U5A<0.,m Yoj a?X&3ՏLb٬˿Mׇ @^r8>MS1&bSɎţYkb"ާGH '0JrsRM#?/J^)<֡$THOȕ⦚Tx'ya+ΉUdOIwNq&]"iY_҄>ZotĺH>?(7}V^JO1Zq0QnX_6Q!e}]X0>%@VY6,yD}АHXG/WPUʫ}!?SRL ,Kǡd ת4D'ֻh2%J\{SG 4c#ó^_[2V)gx3%+N,eH?'w,` i}QV[މqs8y$[1ZSe !ʹȡ7$T&XF|_?P7!Q˫}Es@@j,Hݕ֧B$w횿 HMg=0TkqC!z5nƍqg>`ʐY2骲Z͖  t&MQNqn)@^VB!BGa;#z|͖P! ۀ,jL㚔#$1D m,;F4Pg}M >Í W|xB87 )7%&,yZ:,Z(׈$yl>/W2@ !VOj yJ 4W&0N1N8(?fqrD `CZ03hl!ra&93 ȴ2A?NxG{T7= =JVVp 1܅*Ծ{]Sk=5OKqMnB!V}b 45mo,(XR%mkq˲ $o!#9j]cۏMݼ[ƤG] ]+2Q'fG-Η#D?ȴ[,Ŝ3~c5`#sjw3'eե\{"F,5 ƌNq {豺J.s6 4~3 ^ogm>sX 'ɗi3ͤ  :ʿőQ~l|YUJݵ0EU]9ʢsUtM݉sMדt3}`9ݓ۞OEacXV^`Wq/F8u!Z}tS13$V_^&y;LV}ԋ *+]ђ=O)ҴbWMuߧy74ǀE~"" gӪ7X8QSL`@e2Q_.mly=[h^nж&mu Y@ӭ f@:q(Vn3#08WI9bNtzT燐U ^'gqҊ7g9n2w1 Ɯyc3hM4 CZe}eVgv~}hN4>r1.x*S=<t IY8V)ViH|r6jJqY@9JIav_P@:9*m덽3ƞy?>~ZuTt^ >YGM>k/,"_:y`04Q,kf?fg!,f1&oY/Q49S'>q|,ߓNPpwcCD0q[TFjӑ9'JK*0ǝq1Sa~R{.]_/:0K=Or gHZɠ5HDEzeoL čyHhWo9+5ow3>q]'n>ft X$|H/bRY(N;oVL|$[9E?)B.N;6L]*ud:F9 !`#"4J}yXz{߱bz'lPx)VXAj!Q#!ձ3'v񁆔[Y FE;}aNX(NkbL_^ N4N# KPj1lG^;7吸#nk ӣ I+-9TiҮJ|7ĩ?[0\bL&HA ѻIǧy$)8 z(E$:n3n>J-ëoh t gلYU9# 4VtWzbɨ*')AB (U}֎vwkplg;.y=!@;rRseY.h)LAƍ9H]R?: uRk*{T#Bw5ޚ.8Æ6tU?}Uxs- :j:*2~}V%{h.a7Oe s6ɛiO$iQ~L.Ĺ,cpM/[OZyvW#^[,kݿ=G8 N;.0]orgwCʹޠЏc85n΅s-rϗ?z@˒,-)"t`>+#+|P+o wyQB* syjԂќ kP=ƈ3{?)~䷔ZKusf~|15[~lAFAt+˱ @"=AuFR-> l&TL"\LSXҞUQE^  YVW49jw/Bڹ>4֔'!L{7x ߠ2XqUw&hSuAқA gj[ )!/|xz=~ :qTZ=DO#E8ԚK{ɣԡ9F;sKWy~tk볞SĸE3^Z1L+`x2FMSz^5Us{E,=DtfO0z%:7h/+Xq Ewg6~xcǜ8_IPv 򯰝tʖJ\z xpƆ؀މEu}dʜgSiUO UtcKCxМ<̓DPu=ϺuCZbU`a lT7Ժ6Īm^CrF:2S󇛿qS^endstream endobj 403 0 obj << /Filter /FlateDecode /Length 5133 >> stream x\ݏ#qF3 !yH+7j~NlqܼvwvV4tSU$[d7[j Hj6Y,U73ÿqf7pz:nft-zMxYm[/nsӘ_E[[${ws߼ivv;0 .m.u|jͫ-4Rrv"޴By+ëx Y3YujoZ/9E!*.y.3ezB#Ә'OqxÛ7@OYG8U-b f^֟,3ja4$[q|?r<m`ه!0F0"*]+d,R;cJ[BrM cs;79bݼnDp R̴JT* \3\@\ND7eoX(tgaMluԗcCАڰJxUu`.Z& ?o[lP.IX=A P^F8|* y*^~qhA kX8}\7\xu>2F|ؒBڄ@g:F\̸50<;5줷d»Vqr]lt[ZpCщqϽ1z`h* lVEjs{6lzg)(t,L0׿73è8[!pک85.9,lyMU|nYymY wr8N,]\`(X@CUq).ťT醡_ዪ&b bHq"I;mKAh}}p5 ҁZųiUV Z"-R0e} 桎?6{9ϸ泮+Fz MĈᵼyKkPہ c{tٚQ6o3uاTC6z /9^=XN»{lmm}6- \;kai$QYwSqZJ*~rI-v^ M 9 w#ii Ee 0/lw 0`zqaH.f{0c #]~-2%@kDtmY.&§?Z>DK)!Dp>;BѼkM q nm]~7טppX>JӍ qi )GP`Vc֨F8zW\ bψ )RFEQ.[9RAmVDl_'JUYS dhX/䆣¦ Y^s{.%kC݁X+$@ٮ7l!N95X\eLHaҲ'djO9XC;E9%-LsONϬBK]A c.K~"JtcՂSDӆV^Mг_euK jWD0APbl/i;y"ĔuBˍ <ɒ$SRl"hiMѐiN@^d |"|zV/tFLh!B,~v^: %}%Q>%H=:ΛAXAF#-$ QѝVLhYS).I!(TrڹI׭41쬫4uX80}|duh|Q;gd 9'4$M ӣZdiφǷM5[wIk܎9q{L~ks1LqlgF^/954w%;~15|@ͥOcmjӷ-&ϫJ٫ 9MT:}gu<1 aXhBTwFWU/ÍCKYC; ,|u0Tnl++"f 3J ƀV uGvFڲc We&;QD\3t㱮[;~k1 qǛEs^D. SXQ`GTufg-ԙ.C;+AM2}Ϳ.s_!pʑxc@)YB%}zbE31Z4~Gt蝾iEI O\ߣʻA * wQ#=Q4V3!r(0?9rX~[)ZwWB&h"P2;ɮm~xtji6񔚜|s/CRJzD+@^a~RaR&#^X lK;yi_7LJ^μ8pCևg@+[K :>`UyiN_ uS~ch9nr8 pT6Uf1wJӱVx4$Q+p4RfTR댽R_<+𱒠c,u2b-E bffC0YC"X6i̙ifʙPxHӜ(<3FW0 `38_8`BTIU_܎l]I3I)x +I}G7vcT˯'jS OKGܔ PG<:ءN= M;}3Rv|~;vz^1e,7۬ # ?FM8Lg)"SL3!QfI긫:u TwZ ឭ*ny@Ds,d]X 2\JM,q9:L6&>;l.9RR5]z٪RPR),֜XR)c8 X4n+(ZŤ8e>D)͝ 'Yoȷ&ܮgKoJ5fBaӕ~n/B +}} ~❏ГA%m?4'\xwg_1 3p qe{#G~03 P]s(lU1zfC\78}mft]Ѷ~h99I0a&Esy޶RMذF+l6%xoz 9b>D#+/@nS*rbjVP@wy}Q|cRmWwOT$&'mtLDLC5,+1F v:Z3tTBa*-2y-1N`7dbnyJ}ڤW?ч}21ߚ^RM C3[qXN!hf:J2КQe@"=.~[{xFwS}(@i]-9_4y| 9&U p4U4U,TEb6]ծʳtcg=Wqe!CX~т8B,qF\4 TwYN\ .V^VbA]b!J8 F.dֽE2űu `@UbvW5-W:!.Des y:&u 1j]pA1(Pk? %Nܯvri3e"ϖ, +iꪉs9 D b0#զ߿ i|<3 7JD؜[@KP _VB!F\~~0`ALZmؚ1͇\^%St>A>6VIti1!dG+OVO*/ v:_^$f plV޵nNv;@"Є≞COHkrpk:ZmFN ~:#Ar#CUIxWѝ7/lAU_{GV\A&jRendstream endobj 404 0 obj << /Filter /FlateDecode /Length 6578 >> stream x]ݏ#q79?$V4IsHL>'$T?d55⻇mIb,VꃜC}K;^ -Znplq͕-8w=|aPMˡpOvr%|wj# \wZ6rYfu+%00}Xrӭo3 s&0۫,p0+z5~{].7P6w1k7Գax } {XO@v:t}LuK+Eխ_gsa~J,u/Ya8_)3'rz7~m!`_Ҳx6ΓrA9wqaeo3vv{VH:W_wI$$T?gƖEWK\3pD^ZO(d IaOϗ \.`{I r8l!2z7ݫ%2+-}`5%?XE_ÓI1(\ ^)LB5^[j:ם>oRƄ&-*F;t1cm`elrŴ]ک` "wAP'[ؠZi͹bf2yך,s+|t/`MPvpJ;`Nn[J06߿ a D5 (@ɘ-Z2aZӽH7C[(@pj$ $؁Y&+Ԝ0KS3=:g6kNI'¢Ga:i}&`ɀoT(6Lm ~8h\OL1n^b .7(GO9V*U".G## !u8Rw]۞69 !ڣ0HzbD}&%3LÐ(PF̄11xWd0 EW3 KL@0cO q$¶%h1atFqE rGXw@WwwydS0XRkx<@mֿlHh?h~EJX dB2m>v΄5\9sE3^jlvZx5baK [R^ዜ+PeR*x'ZW"m'`/q4|_2;SrjGcC3iқm߳3:C)YCČikA HxCor/HiSg  :euW]WQJqJ twv# ;U"lE}IC0;QOBaJ$zy`#i2uP)?~S݁nR% PP>s{η.>Ee`go3z$wق.$9(0`&2LD-]l򹗷L[{_QhXs|q?L| 9P 0MA;Xcq@b9|s;-m 9سyaȰ4Lԃ^sf!Ş4\C?"<ږ 3`%&&)Ć7 κ k }æ Bٟ4`16P64``eBIÓ 2_l('pg vP(`<@{(^/p D8a' '>A`uo{f`ɦj^Aw7*Miz+;koK mt@EĮ- Z*M(0@jX\N]|.Pu<|{ ! ar|"vHC?׌%|'*5n,%صbZZsȶO [X\h} ǏTO8€g~S,G8~|!fF}Hg~H5GL.I>)R˃A X1}{hhpfD3BHNyAw>Еz= sC9˞L1iE_ =>#,&,ǬO΀S LTX~ wSr]!xFFA96Pu#5ݝql4m[H-{o}( f])G%>B60U zKPF.fAbuvc3YVir8%pOoXSe-OM6y;ԒO}H/x x)>;/b<xL);p=J݇QK( kB8H ƛr ȩpa8S8*-Pc}-#4yI^x1J/a# 7`ԑ+niZ@ݮs(L ;ֲ!oO@D 1X@]+{n{H7mڇqcct-Px)niتbZ1*#tK=P}dX`+̜ u.5kTaMΔ%  X'=u1QG]I6t J̻T10d|.ccO@S bB4 l"6}WM% ֢ \p9#͛Dض4hmߑ&7圓xK> /9qF݂ZQJ{9pEѲy1;RHr.( L$)A7F @ ڇ֍ "1eDR+ui'lv #TRyj{bY1ݷE|'`}>đLYz=$c}pz*~lYqewhV hM]WۆXSͺ<; ZEF776zv^ ev ۋf< Qt2i;_ZmpQ:0SM]&]kh>꼤~E0d0]VF΂Bsu0Mdv-(LVe'7ȼ*tD=Lm`N>5cxVmLe{"ߑ,V=8V5Z1"eYѶT%Ϧ7|!qIx(y~f33*+=BLjbӞb(nc,m".MYaEs톉(`30|̾FZN׿YV$N!)i"rPՔKƧqK4#Ztg[+k 2aתCwaiZvneM/9V^ y- -ײ*oĥ1<1 g3FG3t,V]U1yP_vڗ6fEfxJkB4dNJ/[1Y{ EX xc\8 42Ѭd^[ #Uc%ksrN ޗ1iN 6NlvD=̲P*rje  h0m9F?!(eD\MS07er6@*y?8pNbꏮj bZZdy”O.;2WOT"3x|8'8@SC%k.? 쀞 IDv/n`@M`{)Hae-QZF5~? W/ ~(3kLçE`B/ k0Xymqv_*M5ZدT^IQ;. lv4kB6R{lO9yozp]3e$K7E5fvƶmvVƑf-霩tKXg\E,m后URH^+ 7D{:!`>eGTw >ȗ\ tҡ.3hzq1c!Uu3/hWch/ ̓y+z` ;gCLK0*bͩ2PT|\|S62G?Ȇd(/O͵q AuS^R*7L.0da> \01y:ͺIv5'|,촺 lcӲx?)wYIl`q_FxH*ѥʌZ,4cY]JL"eiϋO1Lyǔ$G]3$߉^ nL?U$WR+ihpB-9=\ڜßu~.>`-W3~YPM=ČO:sP.hD)MFx3/΀wA((_֊*5=f w'K7?\PfC\Y#c3`X:6_u)ۜ) Wa1~s'U )P{\]S@"*W<4{8CEO-IYZi s+)$-nBEm .`S(<>.U<39y=gZm.؁(e7C~rlSt@_s*TzLK]hrLJ-iEeF$Aw@\Z9βVnIiaҗmvuYDE5e(A*Sz/55W0)swaVʲ•bIDXIhJ6OUSKAISm +|uUQU^T`E̐n%IklN /9wl)J<+T_KKSEx|x:j)A JͥX, 8 iF{+cuX|}}[m (IDsè^g b'Zg HgFL N}"]rB*]Ke[a_reN\6|n & Or-YU"U#񮿏.rϊ@Et:TsʼnV ŏv)1ވǵ)e3b{j~,Gˇ7^z>^H|kOG qFC G<`_WD(yfof]y6mgzJX}4x(h4om7J Luw.5Zzџx}}&x}/Gendstream endobj 405 0 obj << /Filter /FlateDecode /Length 6182 >> stream x]KGr1'@/zW{wCIzAq(E}pfVU̮Cbz/Z?/V,:/lsjx/.~"lx| -oھXHٷR˅3Y\o.6ݾd^7R)^5j{-QD7agK ozȞ:w5+.S拐l뛋ba'ʴZik^_.%,} <8 oiVBl{vW7=n c=~ZBW}6v ^/=4`ek7lB^7!ΨWlF0Fͷ٧٨U|1z6ƭq=Zn؀KBtwRF򝭍?ւhJ'=6>_@Z{ߟ6!5sڪf:>hGݬj蚫o|ylZ!?iTaTڡfjc\^LGyonȻ}K{7npMvNjDLItk WA&jG+|;7dEߵMAs+mG#z/ڥowqam_ӠO=;FG lX&W(VL0+¯H (a8wSEmwF]V {ҙfq{>2( zt2'Cz˭9NvإFi00w 2`E IW+>Z;lgMzZ8iBZ- ]mϡ4[`:WiYoydeygOxBCϑ,6dm|Q=guX=_zhзHҫ-:pЩzxVfiBNď w"LA] R=TLvh2V븄J}>=snr3ɀV)oio%uH&ɧrZwj>+7]l*vLєh{ux ʸ]陊ǮATVl[gY_w0C@}[sp$J+8lLBqsK# uŏ@T͋ Ω1j{L݃9ѝh]oFA65ME0O0m5*4 "m!NR-GLY%VdN fh4nqD0ZkY:; @|>kM|](94Gѧ9MWnH[p۹!O+~8Xdž^<PkBPok uWUcxa D 5C_UmyX/* j]1g ؍H"~0zSaEަH^p9ڱ_)aUwZG:5<_&Oާ͑ E%9TS' aoʳf%S3X0쥙RQWl :G#$bPp{TU :)ϟ sؿgl\"(׻muwZ=#3 ̚ElIʔ晛bP=tgCn Y87IL X˗En=AFI85jDQh 4i.N;36wi(()ѫdԥ8 @}|CPT ?0.؏V͗j,7'19Poh Ø_x4ߗu1Xi.BW/)^Fc`5 F[S$ sD?t:<#d3*%Ww~&0h367&æ3o31L2WՌaZeT.uZf~<ʛ[|8okpQDq_:>`h|%_Q4B-DO6g$4>TQ@)&!N} a)>5wH VampmKcNrd( GUQڍԕmRݻ.y!ݤqXǩվasƎJ>Ry!aX2p,R rYy/KBiR%˄]aE%2+^q+&3/BS# SV EKGJߣS_"i><D="EDy7ϑJv.cWT_BQpLb1qljzIؐJ_`I"F/ c^j#J]!i@:1F ٷŒ. 47+#a$1w)JybՊ5aF1/c(W$EeZnNRa&SlJ"߯a p eUd7 c[d)yW}&C cx` C!C .NKBGY:HD ߋ\{hFM01DCHqk)>n@|')e(~6DpB zT]*(Fk06ӏ7cOV#qV\h!@xBa=4ʑ9AIcBOyGFŽXC OY meF9k9͓n3Ѓ4cmR'xQB#DZv]O(A*K bKWJMKU֏3<cdSל ; sW@oYӻ{U |ڔjڃ+;Z!ύ :]7M$J*#=4W¸Epd^p]&+o.}ЕT F3•=& >}!1{i> 7M7agr0H2y$DEd> D$T8AB׍KʚN<`a0ez cLOu+pL=?;{gv*yO}ZMxⱋw4ITiiVxy 3A9)@Dy.) ШT\bEOs34M$PfD~,ɳ]*m@ i9,V91w5`JE }V>{,aX{ڏw)5Sj?0kSluK*DT@q$z]yc*da"R8c~L)mɂyUɛ@q@4d,Yװc0D9V4krׅIf 쎳R '䅥pBi:F:}xwGxW 6Yrt2!rzo"%BmOvhF}:ЧhUR ΉM]|,պ KjB19.^W @*$P YCrB,E⻁/Ki}kX c'(!u<&W7٥[D (}.e8y*ۺuU9y5VG&ں7 &Su=tR5=x1? 3jDȫPrm?9FH[E לA2EX)e5v߅GQԇ."*؉u`7 3|qՔ>u7: 6pwdMNXW.z˗,;í) (Fgp:6)qJ%#&]x߉}x.OЅ yJ-+˰+z$rJ%ltV χ[AW&@t,%݊Xx3||wI= WWؑTPj3U&Xb֨G>?-؆lL 4o9OXcPrpw(9½kh׫f:Aw[<)raa"|' ܗ]V'j3~pB>hd,A: X8@m=_Iy7"+pWKm!0fM6TXNn2NR I]K18f?̾1}ʊhe&zAHS!z?!=O3]RM_ؕ1HK3VAhVpAgJRØh8Վ{w˜o.(Lendstream endobj 406 0 obj << /Filter /FlateDecode /Length 6805 >> stream x]IǕ4w;}Sa]3=DВlrkaAΡnaD H%*3 f3b&t [d꧋}}o9<.nDЯ7ۋ?҆.opWqŅ3 \>iDzhlCXS:zy('oV o}y0ʵxRkvjk;ams6qkZ)l^wY|z]n V7W~ĽV2؋KeZo-oQKi\lsm}w?6xEaZuUub Fj.9g*>K1ïA״jv}?[,bb+8>oVto𘃆/{VjMdp԰ fWPrj R0z',>bPq޷eFJ8܀22_|K䗺sR uu`6kO"f|} #[ߙ HRT#i+Nd?xDI_5WH2PQB$$RfqS0˶$IkRNk<8!*AUQj2)߯k\=rss`&B9a>xg}\Av=CV+Ώ\h/o5.Gҙv!ҩA+EyI|s.M>h+)j4{lu(м_5}alj[c513F=͒( ,P(5 IGT*Y[إ&h6.נiv{ws^GRH{Gi TetJn(6𷒡.2Uce> ?s'!. Duc]Xeנw|@p|: *[Qg2 W NPa8IG >Ax,uh;?*v*tC&Qs0ohK 3+Tz,7R<+q;lqw8sI DjĹNuq0GHmfcG #iuN3.wiGT7’<  <_F(M]#BЏA"(׊1DmZx@Rσ-LׂXjrƒ i(9Bd(Z4PLm ^6Ψ`8P:Ty <"'.s{Ae!;Px=Թt9mqVN+lR?w:m@o F2R "Aij~>*cJpZc lyˏA 5V),:WUgr"Z_lx?A)? ZL/SjȂiL=o9!>IKZ!q 8BqYo ]VHAcpѮ5wA@zQ>^)pt%AÔ(DN8!1$Z2Lx[9 g4Weߴv9Bļ 7t'P$!xD8afwDXEaJ6)k;Th̛xP@; D[?nN?axW&m}aZ?i*,+{)z,݂vO_-r"Q`4k  `xh ZhDg%\Z)?,ٷ/Y,3'n7;qbφ>4[HEH&Z53+qCjJ_bn|=/8 HJICjy]&߬ױZ:#'HbE|Z7y˔ѭ)BD1 ;R:cͼ<;lC!]pvˬH?}X jQ6:[,{A\ocf~tS!]qʌ_JqQ $FR1n s!;9dѧ|׏( MQooc#=?hg\N<8W:o $NGYIvL;iO-6Lk5#:\)6 b&L3ą̄9<39L4&EaLDUB ^\cӼ3 6!>(Qky"?/ڧɍ9a5]@Zؓ>QWe>S79g%xR.!<-*֞,׆)!gjmy];hU`doE4KpK`\@:ƒ2_G &"~G&-7*k üqV]DL tZ t;[)#[b'"۝NjbG`S2g~1ցXqR1t*9l,1@Br / lDbQC%v8Y߄as1=:Eg2C燍 9#lBBK0]sҼ>!XHm}Ls]Y4]>+\ QI$HpS*/ 8BЏk %9Y{^*f5eͣ74pq0 8Y9"uŰJwT"@9$i1ϋON0P42H1<:3[dx[vs3aؾu;+TZoOht5-~dY;'#P *(q_0ZZp7b@,X*m`~CFTelxibnVo6fC;`KM) WSD-9-:?ib8LAa2K=J9^ %.+󘒆5w (<772('iìSۦ6> qP Q\5/T%[e0M͒Ē]\f3mbYZW]ڵX0:(?y`XT7Oε*3ipxu{-4_)״i5mLp;9N~epNr8j~\HS栁4.PRdr5T6XHIg#R& 1ݾH,nzrIρ3Z\)mg? ph%Y.Є eL؏߇w"P`~cQSL |SKa1ɱ2[\>P,?svYd@ZG^<~r 'p^ʢЯ;!ݠ͉0nY\vSY@ةզy:z2բUԶ?0B^`( X_tb-Ť`r̀TnHAy|6wPTGO:+j"m>Z Ѝ,0ϸ`F_)>_['14_XFLmJ9VDblF:Tm*6~P93st=H'D+;?f TLO TKX6:3g$vybh y XEG(}UPzG onj[YzcJV{leNB<\|KF11z )~ݼM`=ZɵVׁyQ3uͺA7v}[N F-%xuj2wqN-(9}lxruDj[S~vȨK1~Oo4?]Cz`Tfax;1k(̃B"N|Bjy W݇&6fS葵*RʙK8g!hUMC"7S32ԡS5qeQ|՜"_)Y|>}5W~HPH&:`3 !\PV/`Vy:Vb-ي; p;p)Ku͛뫟7o.tPWa^Q1@}>M\ = MBMZKkzۿ'\RufN8o^&JȔ*oNͅ$٢\ Su(q0h0HstrX;_ZP|ӵ64ҧ5_#-UX9)+{(/q)ӰK1oC \$UKl'hһg+bpr.r{Q*-ߑ&Rs%H?̲ +Z/;V<7t] jQuOjF0~heyFM .gk:|6ͼe5!1 xJ\1bPKQ,竺Qqw v۽?Fz{S=꩷=)2m/g8 #% .A$ .Uq͊ H~_ߌWxO&B~,*o'*.YP_P&N%~sܕItDI m\8\*z.@-Ϲt·3ʨfi?ua*xP@¥kf)"G&BBv"FټUd'-cА.%wL*R gX"GTAqƎOI[s:\)zl̓A$':7c2&:. /5O ha>6Mq78E;NJ ^g74"4@QQ_Γ XٺJ%_q.COUzxgX;6H*zC⻿BVsw)Flل "q,;~r[H)L,?%CQMͩG "pw`T/T9>9g.E}4ʈendstream endobj 407 0 obj << /Filter /FlateDecode /Length 4050 >> stream x\Kr=AǞDCl ;9؈a[^ )J+iȑ/NUu7  t U/˂/G_ugً3N]v#,'+?OBB(qn-gٗ(5WV8TUzJ) WI?)ξ1ұC}hClgL2eCadFTEYr6²}/M2!(3dzQl.*ccVU[\wP `r5`.dnu%m*ߋ6mZ`Dm*߯6AmZ%K ]zϞQޏ_]z/~K('Z)WQ2qU(FIR;Ϫpa^h- q-(߃Vg\$PbؗBD^N죴>ds.('(_M.mdג ,ՆX*7ɥLL T`rXIL9+O_ H:-Q8Z.a98.S[Vg'oʂB$Qadew|Ghؾ NgaH NU*e{MP XT{Qoط{;!R!'{b+a[z̦hmya$X{+r'A>R]jq4.VbrkSbƃޕVE1pp'x[3Nf]M${đHEVp(#0[)-EAs--^"2 TGz!QmFDƢ+:M _*n­ZnӄdMp0\Xy~%P;QqIV{s܌{ؾq.DznBg愲\-3O\lkohUNZ:$^m&.[_ xoRa|FBNމ[zfp5 "Vs{y5mt;E^* *?^ov^Eңdf1(EEp;asRˢ0!Ci'&'=U: 꾥q!Tõ?vCRVIl>l c~Z`^W2_ GE|蘦Rf8i-^X /b[uze`Olp(ȿZT%)ŻCHKXXT6 *dt!o!FSgx1HI{O61ٗ(. ~w'8)GpuSpd.7:bud% O1h)q/LME7y \-F'Od"- 63Z(Q.ɣ D-G5яKxFPRUnΩ7M(%\ۭL?+>/Q䇰 uRLaC78ug~6YnZr=4P4@'o*x}6: &]°T}7O2s`3@N-qzSI3Y;QJș:-XۅuU*O*%%Wdq.LR )xD}x)+tt_} 9o~w_WsXP 7Y")fl }l%DV /o@>dU7I:ӆN0)ط\8Zendstream endobj 408 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 342 >> stream xcd`ab`dd M3 JM/I, f!C?YedS10v0wG{8 ̌E% Fƺ@R!RIO+19;8;S!1/EKWO/(FQ%u~s}r'V7fv/),.n^[]X{ov-]ڽd߯\ҽTlYN^KH>g7ޞI=l?JT^:endstream endobj 409 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 480 >> stream xcd`ab`ddM,,IL6 JM/I,f!Cל N)xzxyyX/={3#cxNs~AeQfzF.THTpSJL//THKQS/ f*h)$f$))F(+kau' c.##˷k`h\C|5W7"5U20!,vNY ?mG9,7kʩ;ۻۦ’Ć)SfL=AnkVwsͨnLnɗ/5,oɯn,0w&>sS֮;ayV8QQܘܜ-۠) qU-U;}]b_قSM^m5n9.|n==}Lóa֢){20TԸendstream endobj 410 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2896 >> stream xytSeoHnKm x((::Ȣl-KK[E ]풒InٓfiKJ@Wh@aiAmd9z3߭K[9ǙOɽ{D B [ZQjٖҌCq|`xY֯{(% #"TrJ )9岼U+W>l\{3!+/1p韞YB .b1H!b=XNl"V/['D<@DDA'"6v1<16_"#"Gg!jm,qƭ m{Pftv2+㤥bI c {j*RX -nkKU>hzW=$A"/[$UZPj<`B/L7>hy"}͙PYh ! [ݶ+h~.0'nyEv\ JSe@fyL8t/|rhV ׅ I)FJ{6=JF~@]@˲Zj^ ;Fi*i>|b@oZJqtB7:e:ϠGv$ HfhG[ĹJ 7|6fug.ױO.%;91Ƞ1/_a  W#أ f30:Qn~)|z`ocwD7]r!>^gd$Z\e)/Y Ӣ1ĺl7HLЂ˜OрT[@6W*j"E vA%ih~Ġ6נKIK6`-QФ&sKre8h}z]nrX,J2W dejwЀ^CZaxfEFvn:=ߠ]~G%V!~Nv~qHX"WUB&wɾnʛyL'5Xl]%OrSruJTk55ؖ tGV6xruRBSHEWDϜF?tR1!=zuۼ,|}&PL$/:͡7aXcĆ;2wE6ҝcOy͹JO'{nݜ\?N`X:*y:i*WSP ~RRnt{jGd $tFw|z*GaAȱÃlOZE"9ASߝx;fwi%hf -Gf@L6c Qc)׺,cVb:6iY?x!+uB/7sYѕJ,y9Ê7jz0MGrXkŽuɍL:2"0FeB[kc3Yr@W%Ɵ@w]T6| A+uBnN͋ p9.OQX$-*nn9 pF Pr~C{_[|+bBp`(N)12٩Hskg,ua?'2Lb7c vP+ܭT>bft`h=vykqFa:z|'2O-Vvy 9 V<Ɗ~{ ^Lf2$;˶shi{Zd&\maB{\ӬIdEpw:jD*5UB#LՃpC1ήZ3dxPjuzak̴y傻8,;B1VѤCY̦J:M#wh,5Rܛ{UO5Կ _%9cV޻pPz9I9͗`5 _IF6G3p E4V(-|>#T5N"K"%HN^U+|*LZݝhQ^% ˧Bn. Ht&qk&xe vO<\ucOSymTh=ʯ=_ PطBIq';XZ mӴߑ~hzhB( Đ/WH+&Ds mͲ|iTY;⣥nQ>C,9-jUbF?eoӒ.ilo>_:D nZ >!?Qᗷq<& F̢"ÒQ5߾n6*mJvlkMv> stream xYKsF?2~jJqٕd-(. Ȏv |0k~lN_dm3z?y:6_Q6ln)P~F\>v1WfT è#q^,2-)v7`aXB+;/K)eA n ʴ& ߶ޗm𒬪&>NnM*XeM<$ypk 6ڒn4/P:J˝WJWPQX,KG>$E*Uk/lje tn[weDB1.^ܺDlf fY/ Lwx!D}Xp4;*=C.cE +Iڏy|xɆ&PSuQ$S> JGB(cbYM,#;͵/\' |D$>}Pfʢ/unf"cv[ّmmIψFzMX3cs.LGeFP[9ikL4ZIM[ݣAǰy"hiedr1#ȟ!is𐡞s©%+4Ȕr4M!vjɯQ%3x!Tcu !+HMp)җB31bʀp=MCaIX0@-'uQC;NڳDp8c8ǘ*3ìjC]gp&ekJbgA zi1PAWcۀq,z:BW~mCeO p?Zb`6 nz;#W7OB/ z_+UG%P"Alr6fY7;u:l;uynKB*ņ1{X _u8v{a99r4&PUD<SMDc^Q2(1Mݗ/%p,\ExG/}1b @AmӘRf$ , .&  ]mz2yڀru h }q"/ e"qءJJ}NI1;Xα ?'UfYH̅kޓ_&ڂ=@{0IVk0+VTv9JENN^$i:22e%>h#uBwuyAVGwْ4{0)+NB'o.1kr.8S+VTZ>__;4|Fu~ LFИ@s3aEh"yTt& S$9/]h-vU3@ I՗~:GX%|e:~Qp2vulfGqpG}[5Rn ׺f3rE;Ę.fẕD6\ա K7mqejeJS#[8+p0 ZX 50*0˅Hi d59zq0EiESVD(8O,rT:CIEOgaendstream endobj 412 0 obj << /Filter /FlateDecode /Length 4075 >> stream x[Ks7"%p%SIYN\)B.e1>=3E*؝Wtukfwk[\z"-Û3oBW·BնR/.vgowK2͕9sfRfeoEm7z,Wqg~h CdµqY?_|b\EkA.2oR5+[g 6{ ,X(TXin7J8J6R |ʋ?.@q:ee>s:PVeauJtq思l WYcV )q.UY%s@V\V.k*gmM,M&8.䓁bAEZ#s΃bfZm˦tƴpq _7oE$@`YlHsHX.Tz96<Z;ɲ6Z8U 3v8QFErX k%s_#ky̓Zed[LDes \Nŵ4o)2Gon {y0l7͇aO|2ASg bQ}9ǺMDѤ5p 7m:vQ7_P<_V6;?aUsujAf{& y#$w>n,\kRV2 nQwoK29,WJykŧؠbq vWLR\a+ .H>)m騏W3RI :}}B]OյrmJAxͻv;lo9Fu֧$t>SaKdKJJg*ar~ c2_@S8Hb5*j6h\Ժj\zoOK Ӕdd>C)tѢmR( g>YL4qP=So^M,^99ټ5\?[ FKЬ|aۧ@OoEj v_O yܬ_ǴOSI +-\z 令%|p!)'t0 fZGa`Ug!~lN+POH r{Wc3N$OYp9D4*nʮ⮘f ,099r&SL>oa?6 % .QsU< =t| g}~$#"r_hT8/偖vCn֜ YI*"<6M3'준\tW8**C@AC)j4%|g x 3m?aPMR뉘!Wz$GC45@Wyد_54>s!<iE>(0HN3Z%!teT0\Mn@,DtVqpV V첡8,d]p:RmO̲`,g]` j, _-o/k0 `5)h.Ii%)ZU:`롟}eö֍:rp+ ߯#2% Z|tR"uu(\a'M;‚g!mvVs=eË3i88?nM*mpŵl-1^dBM8EIzd^/j7+ p=e irSG&`ex[ZHn%3s: .okvO:2&T:dLR +Ϯ@9 %s4 פ\ j*<,#c Ĵ'ɮUj@}|A؏x0*o2t_gs@ :>͗ ]r(&Ƒk,̗b%2$RMYX*@TĻ4l+lũO;L$<<梡3vfafr}$C;2[FFzL`jCDHz9 gIZZ,$ eQw(v0ךzb`WwAVqZR Tw2^Kݓ3pcXaWY^{im.!<J$ǁ:"Fqr`Cb~:91  +Keh Zeȸ7KO(P61\۳0 O(Q. S :u:|(vw|]'&v_J';|tQ&h:"&I D,˪P5(~4h5Q"8$L^&-TBZZ5ԪFVP!Ć,h@>w>)O J&Z0JV`,6 : x@\昢K }gXt%_P²TF2zޤuTگnZ#s!#'G9y˕Ja{mSQ@#%eP/w|lZja\eQSfh0%*`Ig_NQMN$I0$D/gUm\[XK2"|xc PZ"畳{ ,1)< v(4Pb,^HPnSʾ@?)MYodqƑW8)(?k<*9XOD܆mJOנ)w }U/s@xSV0ȅ{@,k1ʐU}gG tys+-=j6]Pqˆlj 衭 Zf~Y ּ s1{$֔ #TtXMS>(SYjͺ1)JsSbYDFĤŎ U8 Ly Xpʄqܜ~xA!P{ܺ i:pM_S )r -c8:!RażO,&Ƽ焞B&d!6TRuDm'2Pna(6"l GǨ!CseZ |g[dFi"#5I=uЍa|uO<>ZP{y} MSS,ra&* yPMrT.o jjݸ b0QeknM# qA؊8)W3.8Y Qw==LxE=Ax^q]וH66Pjύ(RYAܳϿIM˚{$ 7JD{NOarI 2?pBb\GIK 75g6Нw jH=ʬ EqLS̲ÞXBJ:f3g`'.PnpWy |}1hq~2N%IL\&wq9GXWL=i4G'n΂bV!y,WT~Yb˂e.ĨBmC:^ YƭCL 1gfc{)em-3VJty%j w'i:t^+E3PF/ƍ S`4!C|ׯm-4vendstream endobj 413 0 obj << /Filter /FlateDecode /Length 4516 >> stream x\K䶑iFac&؃rxa:hPSrpTx gof Y]al<@/ϫn~Y=|#诫a[l!M/VooBWrb՚ʬn7p۟4utznƨV4w >;ᬫi!ZZ'_7ZiTlFX[}=uiC+eu;<ćVVe-;.BW_'&1FXYKoWejgmXij'}!=K7/9v mtvwK.Ul&q;Npp|qZխlcZY-bq mvzgqu:1C6ϣ™q6]z1 oduE* '%i^h1Vӹm?#B6~?|3jGZyvm鴴5j(B6EKH`moVL/߲;Fj7cIW C{#*f`A+V*Y%-)W6է=_Mҽ5OKdZ<ɽS_}l\m~pߜ` nU>q:*Sam4di(g{v8Շ7>@7ڴâ6VB45S=>ŕ*1 +\|!inmX}E1 k}7nSsZ:/r-jtZyugF/^VK"=%`ʦl{$x:?x4Bxb,ὼ2^2-[5?/x:Y{`VEddݪ[#.U-$< 2jԮi | &$Fyw/t-wǎM.YEÀ: @˙E#8h.womUV?-:O1EoFQko$koa>6v$% I<2{#ff. YqJKN6&XRay? , `%]8uG 7U\JPk3}X"%aY<b㓧IM߲K * YO ;p\VOvۼH[U7QSPkTpMՓ1D v-X[p;AF"9sz?p_&fu֗ AqVkC/QG׏xEc/uV:pVGO_!MP:D8D Jq:=%I3g:h 58]kk 6elS@Q%wq`qJ4x.Oa^8Hִ{۠.pgM--s&IcvډC #ւq>_ 7%Ww˷hy~­E8i-pS|}SЩ!S ;uY°~l['Pfkͅ yh(sڠ$\ϻQJ朖ǧfv0䕽a+BҔb(E-I$_v&f duڦ Vu,a3/\m Fy(˹U-y(|yH0ċMb`^Jde1*ߨz $fu ?P.Zsr>QM=y4btܘkT5K.YmkŧZb0,oS"dH߀hM:9Փۻ[ܞ}M͘xɂ4 J3q|Nnp Ann~й W_f Gd쎻G6FA01n"'UT-Pl"x(\פ\ 2.ҹNCh*2!#w)1EU1}tPt]N|7&?ӄGOUQ0z)i 7rKC*`_wԴN".UVGƲ+Je" 8nG ab&E/]/MSm,Κ7!0OxOEM>T"!nDץ.>N\/ ^6nZbocUydeQ3+=C(( xN;Vp,˓cox_UkˤUѾZb"87B6WPNkڳ5s$̺p\r^?R'Oz,Q/ҋvf'n*Cz C>G9Oria2yX TL(*=_'5ͯ{u$}yjwTzϵ!_qv^9m£ݥU"%JQǜhV#)l?\)&W6v_΄ D@ ]տѥ׵4~hTg,F Jx) x\T9 K2xdB݄TGR\p\CjR*py5:t{^x#5` HnkاB$V:p%4)[YPow5K"k 6%gJH`A}'\PT5dChbݰϱCswTz5 %L<ڄۤ@رo!C1)SL,$)a_!z\3ѳe->ILgA _=W1`i0dX| I7SxWLGH4y.F0oZ& G\k< Zx*8)&!F -VvFcןKf0_7օF'X8|tj f=ldm&Umqފޮ!Oafzb*9=WVauh_n!KF@@[?|?9p̷]eYƥB 3,ZCɡ.C0jg\K`o~#qcx%I^j.X7&Jh&ƤL֑\pZǡuI3-OJ{â:fߏoƷy33gԜ'ĕ #G㮜#[L|2O8ԚOSKP8 S[ޠ,ҭS{ú(|f߰HĢE.;=pzXnPxxELYt g, y`X?5ժ'|qsa =Lr#%ldTW#`&28c1o1#=Y |hQ-vΨİni]td@ :(@ 9DGd[گ/) *3Uȅ~.OEip v.Ҹ|.NũFQi+$K_J1u6G2zsjWwif= 4jsMG`J0g|K%Xu"endstream endobj 414 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2439 >> stream xUkp^a۔:G=fiH4MI 0p_eIÖ,H~HocLMl@!4iǤI'}Ug5I7sܻs{aK`<jE 165J"ۥ=d yK2krf=ww^Xۖ5+0X+W׻_塵\'|r]RePˤZG6m" Wr;IE#%W)vyHm-EYȮ;ɵ;;)b]\&&_)zQI )V*$2Ll jHQQbwҋ)բc2[2 )UZJBjL!$BKJ_yPJV#VTZC,޾^ZvW#ܤ;)Qu|ӊd Ej4*rTjtB :RMIEj܍ؕo#jJ%7ܽ{k|VCk6p '3*H\Z&7l};0 _TkM"1U#; ǰ}X1v;`ؓalہvcװG-ł؟x{xזXҪe盖^N/r3Ud$=# ZQ m,BS,_{ z0ԍ/t1X*8!A/@:]+% rh ZTS`j9?C7S?%nv;XCYyѮ&|~h zaqU^¥4CAGa2,U\͓w364<*(@GWQm]Jx6h5.-ItirwyƬ4:]4kg b\ӾD/W57S ;Ä7 "ޘP~A~RFZ'N7˙ih= :ͽC[NEgf4~?U.Pekc&8_h rEBBWΰ'Wa*/~^c}eC로2l@ls`*J_. ]Ȕz8oH"Hw;b}IP…)z6l6U eRҴz䧷 qg4y^iq B ==@ǛawIfP.{jK7S]EG*%3zefWtr1~* mYaTze<|VBAj/k.T݊f3'ѿiXC-GZh8P{/Zr ]Xhm[T`6fSH-X]_z41ZIY9XpsJNAqU}p{Bzat Ca5f<q snjy(OResIsC z[c^?nj NRflU+KȦݚ q/I 'Z#ز54Θ' Co ^:7TȚ5T,+D}Dd?9}{ xYsDUOXj>R?/{ ]B3#VpY湆]'!qfR~6>^}\/nyYn񼹣-`YnIx}^F߸/;xL`l-.av+mILB?͞8_9WA'ftנG?@+Qcj[mW,q¯> stream xcd`ab`dd N+64uIf!CO/VY~'e|=<<<,%={3#cxJs~AeQfzF.THTpSJL//THKQS/ f*h)$f$))F(+k꡸a``` b`0f`bdd?Ӿ ʾг*URmf]@{8-l_bOVAn7Ʃ33u/<6NJ?DjC^];ǂXOy҅?|\v s=g7y'Ly7}:Me`3Fendstream endobj 416 0 obj << /BitsPerComponent 8 /ColorSpace /DeviceRGB /Filter /DCTDecode /Height 135 /Subtype /Image /Width 186 /Length 3880 >> stream AdobedC    %,'..+'+*17F;14B4*+=S>BHJNON/;V\UL[FMNKC $$K2+2KKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK" }!1AQa"q2#BR$3br %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz w!1AQaq"2B #3Rbr $4%&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz ?cPN@3+г[5>A5Rp@ݑ{Y$r“:Z(eN>g'kR-1JqQ+/O`̿Pl=WoXK(=2Oq@ȭO,fGp22>M)>#PHRfD&gSQhzb4S왜m Mk6ǵjhh~^ə?caKFiYJN{&PFsNہҍҧۏ csڳ2N[x7r:mGCfwXdR4Ȋ䑐GY.Ұw6v?[aA$'ǞéU/umEkb^-Zw S3Y]쓜dSPvѲR5 uciA5]x𒨨ZuvGi~3bզιWPKd0Hc`;[ǿkJ(/n!IRmp9jgBQݎ5y UEt"ʃ)~oe=f$ir )5=FH?*e15,N䝭ZJ*sXF~Ċ Actl8l 9?iJ~sQ(4<3kgL8y>-dX޴r}2MPKbiIc_"0?p I4mAUQ*x)hzڍW/o$lTkj_0wf@iCָ_If(OO_Σ6UX' 8u>ф~cl߻<~1H fSIn#1dOU/֤.ftYVjf[rx ]bT` *gU'3赊tt[̜#Je<==yżo(DJ:1>e`lnQR-kNsH޾ŷ1B@]<09jԐ99#֘͌\zSki'2\! <?[a xOn\NQx?žo#'&1}WDRJ9i1͇2;1'lg^PO!k ԂUy&V=5rLԒsC*Sew) SKzӰMN{҂liBG<9αP5CLïˏ$=HJIsG&hb7Fi pOK;Ի.F7 7P16⛻ HS7SL¸&zfMƪK[N߸1Le-Cɐ0Kzڑ~ ' ~iv3W~+}jeJAAZwad@#.]p>*X֪~?!曚3LLQ`%F4f4L6PZ#wV$b:?#4h;3̤*yd1}yY4$i7M4n4n2@ \EH=?MlҢ6Hѳn(XF^c1p;:i#L$8lV@X]gȣlsϚg $QϸTvlEhi񽕥Ϟ2)Eo s(Uw P]H|Юso ²Mg$0<"B{ B(F YŤ )BǦ$\]kBE$;P5mKYbG-+[Ůnn"#$G班OwG RߌEZH @_3<?wW;=Yp낧LS2op ѧ͹󶲤z~Tn3p&^Ong'u[$*śU.~AQ5^K#a? 1hi=cƪ !}/pIVnš#cQ6; i$UqM,|NJiUKg'ҤTRm]Yݣ|L@Ms\*>TOY)R)JxSm' ][丸<߮pKw#?z#гFp @})s >IaXƺ=d8HN0kJ [ 5x~c}9da4YЦn~R/jm$r=:`WqڬI ]yr}㱬[HK'QPY$1^!ITCX+[I€6wϥhs5GQC  4EHR Td^<0Gjd'}81Aj.<ͻT Zz>0h"4R Z((`[xg5-i1\>TVў$rhI .XF2ZMV2 Ad`ihZ~5Z+hc-Š(HT$db1ӞQ@4o~Nf(i3EtQE(Q@ IEQEendstream endobj 417 0 obj << /Filter /FlateDecode /Length 10426 >> stream x}[odGޮ~47è5^ˌ׀.W[aiQ-Β l_D9y&[MWEɓ/i~?ͫEwݫ?y{k~Wz5z̻7#ņ ^7gr۳s~o8_ST?O!9;O)B=8_iއۛ+LcqҾ԰ry9j.ig-zCWlN_^dkvPfJˮp1.wcU:PZ-#Nop1ۮn@)P0]J^gQ㡂LBqP(*H +Z\I2qeWwGJV@^)#zWxRӡs %:L(wd@Xӹ"mb\"lGt@CU._YmCSǺ@Yև DTuvC,W?T{s/8H?n[5a9aoE8MƸbnPmVY`}E kd#smyC.[B@UH @Dp*CEpC2 eR zBظCVJi-=C!zH"KAA`^U ʝ<m:E ֪R:%x`ׯtaK y3(,"5Bі xTaPI(lU 8l/u%Fȗ\]IS'!# %`b`HMdL6 AM7rإAN}ABDH]ʋIM͗A VV Z6Wt |Q^KY[V >MVTYB -M(N)@.E PPJȅU:A*'X?̎:bd[ZI'=@3Nm|2h!7.D9)X>db勜|V9+8'drS*hJI*&ӂ%aLV_qLEmQ4LU73H *r 6Q )cO"i.EJƯ =jRM2V`*QZ}v:3DQP&̵Sәm6. HBC*; ~*|IÐcemD( $#x7;F(n4xV؁[/5wN! ?((:0i,c_}AπwHyapr"~~xs3WBn}k 0"pa / ^b<$N"\G,Q8EoW_wo/?\}]/7Y…a vİTMe`3Q7&eZ(gG_:jPQËeu]2Fd5(!AG{4bE,6N7';#7%" 7K͸ɚ:A oJ1hʑ`$ƭucKR|avJ|Y/nEwJhzX'#:vԋ3g`/H :|K ϰnv#dR(8dr*^'Dwan^G$5lN3$CZS6 YZ}V*zj^54)q<R "'#faVNuܨ*˂h85Z.m6&K78b{ިD4"GʛTM,u3H[S,F!r賖P,ZFg- C24+Z&0UfEKLMfEKK3-"u~NvU:tH{DXJ3K 2Kh;38Lwf솦9.x9G>DaÐAI" @ʽ 'غBG@T KBi-DlGb$5F|R TEWQz:މxRU[,~biR}&XT1NZb"l)!ܠeDdW_\U /YQHB>?a\H"ЂGx|pw; ˻ bNCVA" w߲FYwZ(SnuԠ̣XG 4jQu]2zyԐuԠ̣=q|?d''Ƿ2oY~ >Tlbݚ.k>"!QvV(QfQ6xϣ*y1cy퟊/3Mٽb烡0ͷ~i1 ^ ry% Im0i2+5 Y*FugMX5QJ,A ч˴l & ޴K%>1Sv.Փ RP^fHZc5\퉞i}^ =:kѶ)$ƃߖna"(6#L"' 03*0Y36"6|'a| ve||ue|x5WYg׭wo#XVf \ƾ<"PV;4Wv+4Xl͇JxԖEu fѪ+?rT *rĴὓ(DĤ' (DrLXک@佊444(K#^{&MB+qrD DUaF\Y{BHzٗƒX8 @کF)s%OeRbyUƻq5 B368 kf=Q׭3@ȮkiXLYİdJsxtkhIT+UEX w- psVkg3ߡNZ7G;uoHk҃Vco@rRrq9JɒfvTkNf6(G0,K%2Z:sdڇ?!aڤ5j A *M&A>uBbG5)qAUĝ"Іx,0BILM{{/ofv4I^(ZY")L3P5H; ^f)gPãE<̠ dC'܊i"3ѝ((mI cbh6BP9.6qozVT{BrUdgMG:Z2٭Ր%ܸ-X 6D}>2Dgm3d5DF8H!xjXgEd `ΰB3pB!d6oYǃǖ NX {&灍OJa Tf.(%XIrLaP D;*(] &hHfSFH5F'H 4[ylAT/]*8j]IvdodI*A7Z "dڤJM:+2o7v7'<֨8/ f M.S/-B%-}|1nWoX)/wRۯ{4" t L}ywfWB, f90#H-h΢Du LX5 3zAʮaɴiO髀q%$ V8-7D<MY]oq%>3-m Ʒ k3(:QFٌFe3*&2(QZyME{7wa3F7=YS(||BB=Kx6tD uKxH1"c!q tw6t1jYP <_t^+kJ"1uƒBwR|%̙wg#!uvɾ 0FpL|XY_la3jS,v[ysm73ಓ0 |*] 'yZm}%G (m q{leg~1?J&yB6=OgXB6Xe66e6LgXE#O 0;hPdy212mj;mY)PDa|S \lUs~HMyIoKc`uZ).᪠HSdd4X*ScJ꩔妒tq$)A$(@!]^dž _aA`&dͣ/X6hm``T6تʊc c#:1:fj zE>=@6G7c"z$|nD=ep Xbٍb Y1h*YwVfI[>4 g T$[RF7Y4R< G9+8bG%,o!2kp>LHɺ66UeBڕB@=iJBW V=EzZsL =&6դ5:Uvwk Eʋ&]֛0qltT 'yډ (@&o8bch(E6=\&abpIe(U\ZggAS˥a2^݁vbW8Ae_ .{c!PhȦuDĨKTH+PLjJJ$Pe2-!=7:I7=EBZԊ^լ'r"NGjuٰc*>\qיd6E.d- 6>{?fƋ.MzzV׈9 @z\ƓpމOߋ5SaHTJI QUM[ 0ΛSv a a,WԽ`=-ĮWzhC8d9~bE _H,HqVT"R73d8i{d>/0FI^WnqgQ{O ݰRkR4p"- RЌREUeR`\PXJ|d9-A'@./i-EH TIRʱ,J0 zC".i~T]U$7owX~<=9C2|CS%T;,ss#OGőߜ˻3kqyqDbdw"x1X,*2CCsȇח^h,¹T OGcTa :hAa4<ҺAm9U=!G>zsIz:MJZcs^Aɕ8ʶ_se2zrP6Xmeͨ:De3*)82fݔQFٌ}O&NO>X?'RA|4 R%_ZRXyX=\Me$(&>l! ϰ7 MA%>/D2YZNGM,FM昼d/(-m)b?]D'nُ:3Z2Ӓx2Z@QaR@ ח&r/>(]>aWo@ʲT ,Ol)Dgs,H|Fe g,P"Mo<N]f}qwsfC]RwZv NsNcq/ݨ3HОTFi_pae£yl13Vlb@LX!*[;"NDMxLq8-Ítg 0Dt(=jczothـW=Nw3)yg9,h͞^ahģ۬hُե{EK)EJ6cQbժhFJ"jb'e,)g~Fϲ⫚r5+#Y,g 5+#?[Ԍu㍖9ZFo, bѲG#P3}MѬf[5FJ_U#n,I>,Z*( IjV닖-0ܢe.:$yhY2g RGҤZ6:_뭗9yԬyfeC!lv$.hh^_[Y,mEt0W+axE|z.zm-HewF_#,t!mμiYWAf\EL ׇ饎::5;2A_KsG¦/{]^o)nHpv>cU iV+3?uދ~_l [lg=OG[oҿ3Vb%:A |bw heq}ri1߿z?\uNJi_|onَ_Rps,+XBSM`Soz;rwu񰑧i&WYɋ,iog-)󐯻oFH~Ǖ4. Q(nb=7[!yY4W_(*bT|V5ܭz-leTIop1 V33[ wH閫Ju}Eepb5'Fe3('iNV~>fkdޜ-;Ye7/{|.LS|˥Li[6x\ߚۚ}sbHV~,Dz %bchs/.h]d c86޴e3SL|6?2))xti?>*.N#@h>Cyl3>p$Z~,>vdq7ܛ\]_=,8ʹAWi9w l*^ei.v\^#"\ۻ76"K-~L?wϚo:`ecueړ$ey ͒g4^)7z}|=|fx83Vm?-f~+2җ|m8cEȷ׷G|lo,vbi-[/o`yQ?O~p{!oV~9;WIy˷k _,wp}A28>?7.$iԤ!i`W5$7s %G.\֦um:LCFd#"x·\j2`Ho3oܝ"ҭ%5lFrm%jP }nc؛h l[Fۿ rKdRUӼ/iCH _!mTq_-,L2ҟtZ|i/ Qل%xE[oN ^ˊ!7gsyUy1cDW~p7̇k)(fpj6 C,_g[_\g[J; 'gL{ґ-g[?iӲeEoOIW{dfɂ`ta'+ɧq?;5Y;N7hl2O=%2bɩ/|SLLp R|3JۜNܧL韰#d;Yh;/=v>b6_6}l;#RJJ=2zZh)W*?0lY?s~d>/o 3I#Lr@aae//Xo&7-dnGq?ZO$J|,2o_:|Jw[/QmYu-LHWoj󓀁oZ߱Ue/OK;{Ri= _aR6);nI7F|D7s۫qw7*U|GB ew>o-Q  a=ѣO_^/ߖ8 ; e=Zuinɵ.mhu)9B-1K:秣 ~|Jzb߹P'C^D tr#z}N|kE,˘-r{q>a?zJ=/#|"Qf~zA'r†~ˍ=/ i|5:}l`_\]_ܭp& 8 +{*sck{J)endstream endobj 418 0 obj << /Filter /FlateDecode /Length 3321 >> stream xZKܶ_H{ '0xtkGe%ޝњLrk$gS{Xh45Yɯw?Vz]OWz)+Vۇ0_ QBkmYI}}zS|s|\i_t+j-(+'=˲RHYUut}sZӘ$,y+JƸ,>-vp7MnBTa#W XK]:chMq3̋.K99wP̀Lڢm U * ndGƱ% UJnvv醋gܪm?`2||PAlne!u)SW?DrX՚ /2vIU|,W0VfquN^~{u7PZk0QMx ,~嫛xƟ8\1´Jm,a7m6];lns!.ݱ9AS*op,YL)l׹cЇ4++mˊQ=IP*H'R4Zcorz\βRjBfMGʠ@񔆲-8 ,fbUasÞ4eBFN{z6Vs {Qdhʂ$;ܶ#0c?:#_桷W;#81l AAc mhEFڦ6Әɀtɣ݊cCLu}x}IqG9kC ϛy?2Ar\ B"e]FPxt|#oPZ<~0\]ʼn ÏIS{n)!T}[@xTIprh{%x6O1VGlc[ z+TX7R>zeL"KEjwM?_,[R xCky8["bR\+ƒ<͎JXc¨qO?<=U:_WJXiSvq.J߅Hf~%l Vb Ѣb!!N'D-^Ӿ#0Ķ=z-,~nBn|h ]g9!Qg˟Უ1V\1<~AE6D=t6%y$Nvq- U@QzxTY̦~4ERso]hȟ'd'P)wax7tt Ϙ:j߆c^,8-]hN} ܗ5XT -Qd z鑚| 8ly~ȗp]8ƥ`LvrH¸%K8ePPzB&g<} !OOyHJm0ݷ$ROG! F3M:,Ew9mS x~Kʧ?[gΙF*Nn+@8 "+FtGS<D+EDPuI>C쥼3Ṡm3,3-T?yt$sr .Sxe2r?7>'<"2~z32-p-e? / )MpKj|X-Үa% 3?:XVէQ$j&M,?'%+x@N~6׃(L$pd檅dG4j!.q"W۱ͷ#%bszw_xu'J0``]v f>>;RdhGecܥʈ.5@W_*y";Kg2t(@TEl[w<=ѓGfF ػxHfs5}hC GkOavS/fp TZz؜ &^jvٔOS ,:/n7۫`|eendstream endobj 419 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1084 >> stream x]R}LSw}eCWߚ&"oS00NZp|XBǢ p[V@ t 9ߒA c07,]ss^luͽ{=qL2s=KV ڭVa=  3!I9bH%Ȇ=SߴFI- ['*Ys$2_zuǕ^۶z.6qMTMD\U7#SbmV;䲫g-|V$Q~hG+ϐuJA7w#[\u`z!m1OK4_`$aEdn_tv;9w*Xm> stream xXKs6Ѓn;KޒN;m'4nN%۬)!8 ]@=\ ,}"ٷ(>fza|w^ M ˫YB\(1^r3`4"/4WVxn9VBJ{'Ys2?i/lw&瓥 "ڗ >:4SF/r\B+R|}ֳ $q\uјB68*Zcjrv@6lwfn C bW^z:G Ƕ)m-+AqO_ l 5б qm]c /ee7=)ͺ/er .uofo.wB8vG(' FmC>UGb%ĕu`SɮM@.m f]ރ^Wu]FpsX;==#\yȅHEzKb-u7R5̺*uZ`/J+^~w3qUqrW2  \'˶$p` 䊕#n W`HQu*..qylfq@QC61ӢKQMzd BF)%u*рrc_Tp[|kξW1_68KPV_/S QP jXli ڴ$5hMٗlg6L+sryS4!am/# g b("i"ۗUM9nVE&*]:+)yu#/ӎNKgL;Ys*4 f#mCTl8S %pdw]R1C\z )ltLHr:ȵ\ ]<`lMHESV^h&͡J? ?@] u'zb@n f]nN:騣$8s&cql$pؗՅ/8*ۀm4Gpޟm,P 짰?3EQD'a&z$L]2+ }\lnN;M y(=|hгc7S Ua6:EF38Wz!J՘^$ы ʎ䬌Ֆ9d=y{@Pd el@丳%M3<B;1\-ƦJH,CpN72ͳ _>3A\OI+oA 4ZjMmE_ (G57G~* HX.sQ!sEjtv/Âdendstream endobj 421 0 obj << /Filter /FlateDecode /Length 1061 >> stream xWKoFG<Z퓻SM۸ XJzآ+IYп쒔u %9S.h|!Y|]G4>EEfXjYl]V*%RÝ+FW +Py[+fř jWbG])OwF@{jAzP6հ鵷?.4yX@B1QhyQJ*+ ][>nO ۅ-&&8 :iJ_ҳc;{삒M!FWqQ囉y5G(?W[<4ތTq(8)8D>+-V;7 pȽY"%ZośڿߥV,涞U9YEY<1tjqY|$q0.:DY8_Yul3?fS ࡌ_#HfDٸM42N?=3IU_m̙|{0kJ(0-NBb #3#MktGz)gH#F6p. :4{p4Jph_RBsOӿW~%&a-)' TV-!ŭ"',f:a2xl<>t 'Ԧ We-pܾ+:mN-Ԝ۞q(6!kNF3ᾮ]@1=4[cujޮ0͋iVr%,'Cۯ#$fY航O{]WF,|Rl7uM($B?Br\)8nD"> /W [ 1 3 1 ] /Info 3 0 R /Root 2 0 R /Size 423 /ID [] >> stream xcb&F~0 $8JP?@6?(<5M!$z ќ6M$z0Jg2G(6s@5 (miH RD 6>"@(d63 endstream endobj startxref 210857 %%EOF forecast/inst/doc/JSS2008.R0000644000176200001440000001634414166724664014726 0ustar liggesusers## ----load_forecast, echo=FALSE, message=FALSE--------------------------------- library('forecast') ## ----load_expsmooth, echo=FALSE, message=FALSE, eval=FALSE-------------------- # library('expsmooth') ## ----expsmooth_datsets, echo=FALSE, message=FALSE----------------------------- bonds <- structure(c(5.83, 6.06, 6.58, 7.09, 7.31, 7.23, 7.43, 7.37, 7.6, 7.89, 8.12, 7.96, 7.93, 7.61, 7.33, 7.18, 6.74, 6.27, 6.38, 6.6, 6.3, 6.13, 6.02, 5.79, 5.73, 5.89, 6.37, 6.62, 6.85, 7.03, 6.99, 6.75, 6.95, 6.64, 6.3, 6.4, 6.69, 6.52, 6.8, 7.01, 6.82, 6.6, 6.32, 6.4, 6.11, 5.82, 5.87, 5.89, 5.63, 5.65, 5.73, 5.72, 5.73, 5.58, 5.53, 5.41, 4.87, 4.58, 4.89, 4.69, 4.78, 4.99, 5.23, 5.18, 5.54, 5.9, 5.8, 5.94, 5.91, 6.1, 6.03, 6.26, 6.66, 6.52, 6.26, 6, 6.42, 6.1, 6.04, 5.83, 5.8, 5.74, 5.72, 5.23, 5.14, 5.1, 4.89, 5.13, 5.37, 5.26, 5.23, 4.97, 4.76, 4.55, 4.61, 5.07, 5, 4.9, 5.28, 5.21, 5.15, 4.9, 4.62, 4.24, 3.88, 3.91, 4.04, 4.03, 4.02, 3.9, 3.79, 3.94, 3.56, 3.32, 3.93, 4.44, 4.29, 4.27, 4.29, 4.26, 4.13, 4.06, 3.81, 4.32, 4.7), .Tsp = c(1994, 2004.33333333333, 12), class = "ts") usnetelec <- structure(c(296.1, 334.1, 375.3, 403.8, 447, 476.3, 550.3, 603.9, 634.6, 648.5, 713.4, 759.2, 797.1, 857.9, 920, 987.2, 1058.4, 1147.5, 1217.8, 1332.8, 1445.5, 1535.1, 1615.9, 1753, 1864.1, 1870.3, 1920.8, 2040.9, 2127.4, 2209.4, 2250.7, 2289.6, 2298, 2244.4, 2313.4, 2419.5, 2473, 2490.5, 2575.3, 2707.4, 2967.3, 3038, 3073.8, 3083.9, 3197.2, 3247.5, 3353.5, 3444.2, 3492.2, 3620.3, 3694.8, 3802.1, 3736.6, 3858.5, 3848), .Tsp = c(1949, 2003, 1), class = "ts") ukcars <- structure(c(330.371, 371.051, 270.67, 343.88, 358.491, 362.822, 261.281, 240.355, 325.382, 316.7, 171.153, 257.217, 298.127, 251.464, 181.555, 192.598, 245.652, 245.526, 225.261, 238.211, 257.385, 228.461, 175.371, 226.462, 266.15, 287.251, 225.883, 265.313, 272.759, 234.134, 196.462, 205.551, 291.283, 284.422, 221.571, 250.697, 253.757, 267.016, 220.388, 277.801, 283.233, 302.072, 259.72, 297.658, 306.129, 322.106, 256.723, 341.877, 356.004, 361.54, 270.433, 311.105, 326.688, 327.059, 274.257, 367.606, 346.163, 348.211, 250.008, 292.518, 343.318, 343.429, 275.386, 329.747, 364.521, 378.448, 300.798, 331.757, 362.536, 389.133, 323.322, 391.832, 421.646, 416.823, 311.713, 381.902, 422.982, 427.722, 376.85, 458.58, 436.225, 441.487, 369.566, 450.723, 462.442, 468.232, 403.636, 413.948, 460.496, 448.932, 407.787, 469.408, 494.311, 433.24, 335.106, 378.795, 387.1, 372.395, 335.79, 397.08, 449.755, 402.252, 391.847, 385.89, 424.325, 433.28, 391.213, 408.74, 445.458, 428.202, 379.048, 394.042, 432.796), .Tsp = c(1977, 2005, 4), class = "ts") visitors <- structure(c(75.7, 75.4, 83.1, 82.9, 77.3, 105.7, 121.9, 150, 98, 118, 129.5, 110.6, 91.7, 94.8, 109.5, 105.1, 95, 130.3, 156.7, 190.1, 139.7, 147.8, 145.2, 132.7, 120.7, 116.5, 142, 140.4, 128, 165.7, 183.1, 222.8, 161.3, 180.4, 185.2, 160.5, 157.1, 163.8, 203.3, 196.9, 179.6, 207.3, 208, 245.8, 168.9, 191.1, 180, 160.1, 136.6, 142.7, 175.4, 161.4, 149.9, 174.1, 192.7, 247.4, 176.2, 192.8, 189.1, 181.1, 149.9, 157.3, 185.3, 178.2, 162.7, 190.6, 198.6, 253.1, 177.4, 190.6, 189.2, 168, 161.4, 172.2, 208.3, 199.3, 197.4, 216, 223.9, 266.8, 196.1, 238.2, 217.8, 203.8, 175.2, 176.9, 219.3, 199.1, 190, 229.3, 255, 302.4, 242.8, 245.5, 257.9, 226.3, 213.4, 204.6, 244.6, 239.9, 224, 267.2, 285.9, 344, 250.5, 304.3, 307.4, 255.1, 214.9, 230.9, 282.5, 265.4, 254, 301.6, 311, 384, 303.8, 319.1, 313.5, 294.2, 244.8, 261.4, 329.7, 304.9, 268.6, 320.7, 342.9, 422.3, 317.2, 392.7, 365.6, 333.2, 261.5, 306.9, 358.2, 329.2, 309.2, 350.4, 375.6, 465.2, 342.9, 408, 390.9, 325.9, 289.1, 308.2, 397.4, 330.4, 330.9, 366.5, 379.5, 448.3, 346.2, 353.6, 338.6, 341.1, 283.4, 304.2, 372.3, 323.7, 323.9, 354.8, 367.9, 457.6, 351, 398.6, 389, 334.1, 298.1, 317.1, 388.5, 355.6, 353.1, 397, 416.7, 460.8, 360.8, 434.6, 411.9, 405.6, 319.3, 347.9, 429, 372.9, 403, 426.5, 459.9, 559.9, 416.6, 429.2, 428.7, 405.4, 330.2, 370, 446.9, 384.6, 366.3, 378.5, 376.2, 523.2, 379.3, 437.2, 446.5, 360.3, 329.9, 339.4, 418.2, 371.9, 358.6, 428.9, 437, 534, 396.6, 427.5, 392.5, 321.5, 260.9, 308.3, 415.5, 362.2, 385.6, 435.3, 473.3, 566.6, 420.2, 454.8, 432.3, 402.8, 341.3, 367.3, 472, 405.8, 395.6, 449.9, 479.9, 593.1, 462.4, 501.6, 504.7, 409.5), .Tsp = c(1985.33333333333, 2005.25, 12), class = "ts") ## ----etsexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using exponential smoothing state space models."---- par(mfrow = c(2,2)) mod1 <- ets(bonds) mod2 <- ets(usnetelec) mod3 <- ets(ukcars) mod4 <- ets(visitors) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ## ----etsnames, echo=FALSE----------------------------------------------------- etsnames <- c(mod1$method, mod2$method, mod3$method, mod4$method) etsnames <- gsub("Ad","A\\\\damped",etsnames) ## ----ets-usnetelec, echo=TRUE------------------------------------------------- etsfit <- ets(usnetelec) ## ----ets-usnetelec-print,echo=TRUE-------------------------------------------- etsfit ## ----ets-usnetelec-accuracy,eval=TRUE,echo=TRUE------------------------------- accuracy(etsfit) ## ----ets-usnetelec-fcast, fig.height=5, fig.width=8, message=FALSE, warning=FALSE, include=FALSE, output=FALSE---- fcast <- forecast(etsfit) plot(fcast) ## ----ets-usnetelec-fcast-print,eval=TRUE,echo=TRUE---------------------------- fcast ## ----ets-usnetelec-newdata,eval=FALSE,echo=TRUE------------------------------- # fit <- ets(usnetelec[1:45]) # test <- ets(usnetelec[46:55], model = fit) # accuracy(test) ## ----ets-usnetelec-fcast-accuracy,eval=FALSE,echo=TRUE------------------------ # accuracy(forecast(fit,10), usnetelec[46:55]) ## ----arimaexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using ARIMA models."---- mod1 <- auto.arima(bonds, seasonal=FALSE, approximation=FALSE) mod2 <- auto.arima(usnetelec) mod3 <- auto.arima(ukcars) mod4 <- auto.arima(visitors) par(mfrow = c(2,2)) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ## ----arima-auto-fcast,eval=TRUE,echo=TRUE,fig.show="hide"--------------------- arimafit <- auto.arima(usnetelec) fcast <- forecast(arimafit) plot(fcast) ## ----arimanames, echo=FALSE--------------------------------------------------- # Convert character strings to latex arimanames <- c(as.character(mod1), as.character(mod2), as.character(mod3), as.character(mod4)) arimanames <- gsub("\\[([0-9]*)\\]", "$_{\\1}$", arimanames) ## ----arimafcastsummary, echo=TRUE, message=FALSE, warning=FALSE, as.is=TRUE---- summary(fcast) forecast/inst/doc/JSS2008.Rmd0000644000176200001440000017307314055364445015244 0ustar liggesusers--- author: - name: Rob J Hyndman affiliation: Monash University address: > Department of Econometrics \& Business Statistics Monash University Clayton VIC 3800, Australia email: Rob.Hyndman@monash.edu url: https://robjhyndman.com - name: Yeasmin Khandakar affiliation: Monash University address: > Department of Econometrics \& Business Statistics Monash University Clayton VIC 3800, Australia title: formatted: "Automatic Time Series Forecasting:\\newline the \\pkg{forecast} Package for \\proglang{R}" # If you use tex in the formatted title, also supply version without plain: "Automatic Time Series Forecasting: the forecast Package for R" # For running headers, if needed short: "\\pkg{forecast}: Automatic Time Series Forecasting" abstract: > This vignette to the \proglang{R} package \pkg{forecast} is an updated version of @HK2008, published in the *Journal of Statistical Software*. Automatic forecasts of large numbers of univariate time series are often needed in business and other contexts. We describe two automatic forecasting algorithms that have been implemented in the \pkg{forecast} package for \proglang{R}. The first is based on innovations state space models that underly exponential smoothing methods. The second is a step-wise algorithm for forecasting with ARIMA models. The algorithms are applicable to both seasonal and non-seasonal data, and are compared and illustrated using four real time series. We also briefly describe some of the other functionality available in the \pkg{forecast} package.} keywords: # at least one keyword must be supplied formatted: [ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, "\\proglang{R}"] plain: [ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, R] preamble: > \usepackage{amsmath,rotating,bm,fancyvrb,paralist,thumbpdf} \Volume{27} \Issue{3} \Month{July} \Year{2008} \Submitdate{2007-05-29} \Acceptdate{2008-03-22} \def\damped{$_{\mbox{\footnotesize d}}$} \let\var=\VAR \def\R{\proglang{R}} \def\dampfactor{\phi_h} \raggedbottom bibliography: JSS-paper.bib vignette: > %\VignetteIndexEntry{Automatic Time Series Forecasting: the forecast Package for R (Hyndman & Khandakar, JSS 2008)} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} documentclass: jss output: if (rmarkdown::pandoc_version() >= 2) rticles::jss_article else rmarkdown::html_vignette fig_width: 7 fig_height: 6 fig_caption: true --- ```{r load_forecast, echo=FALSE, message=FALSE} library('forecast') ``` ```{r load_expsmooth, echo=FALSE, message=FALSE, eval=FALSE} library('expsmooth') ``` ```{r expsmooth_datsets, echo=FALSE, message=FALSE} bonds <- structure(c(5.83, 6.06, 6.58, 7.09, 7.31, 7.23, 7.43, 7.37, 7.6, 7.89, 8.12, 7.96, 7.93, 7.61, 7.33, 7.18, 6.74, 6.27, 6.38, 6.6, 6.3, 6.13, 6.02, 5.79, 5.73, 5.89, 6.37, 6.62, 6.85, 7.03, 6.99, 6.75, 6.95, 6.64, 6.3, 6.4, 6.69, 6.52, 6.8, 7.01, 6.82, 6.6, 6.32, 6.4, 6.11, 5.82, 5.87, 5.89, 5.63, 5.65, 5.73, 5.72, 5.73, 5.58, 5.53, 5.41, 4.87, 4.58, 4.89, 4.69, 4.78, 4.99, 5.23, 5.18, 5.54, 5.9, 5.8, 5.94, 5.91, 6.1, 6.03, 6.26, 6.66, 6.52, 6.26, 6, 6.42, 6.1, 6.04, 5.83, 5.8, 5.74, 5.72, 5.23, 5.14, 5.1, 4.89, 5.13, 5.37, 5.26, 5.23, 4.97, 4.76, 4.55, 4.61, 5.07, 5, 4.9, 5.28, 5.21, 5.15, 4.9, 4.62, 4.24, 3.88, 3.91, 4.04, 4.03, 4.02, 3.9, 3.79, 3.94, 3.56, 3.32, 3.93, 4.44, 4.29, 4.27, 4.29, 4.26, 4.13, 4.06, 3.81, 4.32, 4.7), .Tsp = c(1994, 2004.33333333333, 12), class = "ts") usnetelec <- structure(c(296.1, 334.1, 375.3, 403.8, 447, 476.3, 550.3, 603.9, 634.6, 648.5, 713.4, 759.2, 797.1, 857.9, 920, 987.2, 1058.4, 1147.5, 1217.8, 1332.8, 1445.5, 1535.1, 1615.9, 1753, 1864.1, 1870.3, 1920.8, 2040.9, 2127.4, 2209.4, 2250.7, 2289.6, 2298, 2244.4, 2313.4, 2419.5, 2473, 2490.5, 2575.3, 2707.4, 2967.3, 3038, 3073.8, 3083.9, 3197.2, 3247.5, 3353.5, 3444.2, 3492.2, 3620.3, 3694.8, 3802.1, 3736.6, 3858.5, 3848), .Tsp = c(1949, 2003, 1), class = "ts") ukcars <- structure(c(330.371, 371.051, 270.67, 343.88, 358.491, 362.822, 261.281, 240.355, 325.382, 316.7, 171.153, 257.217, 298.127, 251.464, 181.555, 192.598, 245.652, 245.526, 225.261, 238.211, 257.385, 228.461, 175.371, 226.462, 266.15, 287.251, 225.883, 265.313, 272.759, 234.134, 196.462, 205.551, 291.283, 284.422, 221.571, 250.697, 253.757, 267.016, 220.388, 277.801, 283.233, 302.072, 259.72, 297.658, 306.129, 322.106, 256.723, 341.877, 356.004, 361.54, 270.433, 311.105, 326.688, 327.059, 274.257, 367.606, 346.163, 348.211, 250.008, 292.518, 343.318, 343.429, 275.386, 329.747, 364.521, 378.448, 300.798, 331.757, 362.536, 389.133, 323.322, 391.832, 421.646, 416.823, 311.713, 381.902, 422.982, 427.722, 376.85, 458.58, 436.225, 441.487, 369.566, 450.723, 462.442, 468.232, 403.636, 413.948, 460.496, 448.932, 407.787, 469.408, 494.311, 433.24, 335.106, 378.795, 387.1, 372.395, 335.79, 397.08, 449.755, 402.252, 391.847, 385.89, 424.325, 433.28, 391.213, 408.74, 445.458, 428.202, 379.048, 394.042, 432.796), .Tsp = c(1977, 2005, 4), class = "ts") visitors <- structure(c(75.7, 75.4, 83.1, 82.9, 77.3, 105.7, 121.9, 150, 98, 118, 129.5, 110.6, 91.7, 94.8, 109.5, 105.1, 95, 130.3, 156.7, 190.1, 139.7, 147.8, 145.2, 132.7, 120.7, 116.5, 142, 140.4, 128, 165.7, 183.1, 222.8, 161.3, 180.4, 185.2, 160.5, 157.1, 163.8, 203.3, 196.9, 179.6, 207.3, 208, 245.8, 168.9, 191.1, 180, 160.1, 136.6, 142.7, 175.4, 161.4, 149.9, 174.1, 192.7, 247.4, 176.2, 192.8, 189.1, 181.1, 149.9, 157.3, 185.3, 178.2, 162.7, 190.6, 198.6, 253.1, 177.4, 190.6, 189.2, 168, 161.4, 172.2, 208.3, 199.3, 197.4, 216, 223.9, 266.8, 196.1, 238.2, 217.8, 203.8, 175.2, 176.9, 219.3, 199.1, 190, 229.3, 255, 302.4, 242.8, 245.5, 257.9, 226.3, 213.4, 204.6, 244.6, 239.9, 224, 267.2, 285.9, 344, 250.5, 304.3, 307.4, 255.1, 214.9, 230.9, 282.5, 265.4, 254, 301.6, 311, 384, 303.8, 319.1, 313.5, 294.2, 244.8, 261.4, 329.7, 304.9, 268.6, 320.7, 342.9, 422.3, 317.2, 392.7, 365.6, 333.2, 261.5, 306.9, 358.2, 329.2, 309.2, 350.4, 375.6, 465.2, 342.9, 408, 390.9, 325.9, 289.1, 308.2, 397.4, 330.4, 330.9, 366.5, 379.5, 448.3, 346.2, 353.6, 338.6, 341.1, 283.4, 304.2, 372.3, 323.7, 323.9, 354.8, 367.9, 457.6, 351, 398.6, 389, 334.1, 298.1, 317.1, 388.5, 355.6, 353.1, 397, 416.7, 460.8, 360.8, 434.6, 411.9, 405.6, 319.3, 347.9, 429, 372.9, 403, 426.5, 459.9, 559.9, 416.6, 429.2, 428.7, 405.4, 330.2, 370, 446.9, 384.6, 366.3, 378.5, 376.2, 523.2, 379.3, 437.2, 446.5, 360.3, 329.9, 339.4, 418.2, 371.9, 358.6, 428.9, 437, 534, 396.6, 427.5, 392.5, 321.5, 260.9, 308.3, 415.5, 362.2, 385.6, 435.3, 473.3, 566.6, 420.2, 454.8, 432.3, 402.8, 341.3, 367.3, 472, 405.8, 395.6, 449.9, 479.9, 593.1, 462.4, 501.6, 504.7, 409.5), .Tsp = c(1985.33333333333, 2005.25, 12), class = "ts") ``` # Introduction Automatic forecasts of large numbers of univariate time series are often needed in business. It is common to have over one thousand product lines that need forecasting at least monthly. Even when a smaller number of forecasts are required, there may be nobody suitably trained in the use of time series models to produce them. In these circumstances, an automatic forecasting algorithm is an essential tool. Automatic forecasting algorithms must determine an appropriate time series model, estimate the parameters and compute the forecasts. They must be robust to unusual time series patterns, and applicable to large numbers of series without user intervention. The most popular automatic forecasting algorithms are based on either exponential smoothing or ARIMA models. In this article, we discuss the implementation of two automatic univariate forecasting methods in the \pkg{forecast} package for \proglang{R}. We also briefly describe some univariate forecasting methods that are part of the \pkg{forecast} package. The \pkg{forecast} package for the \proglang{R} system for statistical computing [@R] is available from the Comprehensive \proglang{R} Archive Network at \url{https://CRAN.R-project.org/package=forecast}. Version `r packageVersion('forecast')` of the package was used for this paper. The \pkg{forecast} package contains functions for univariate forecasting and a few examples of real time series data. For more extensive testing of forecasting methods, the \pkg{fma} package contains the 90 data sets from @MWH3, the \pkg{expsmooth} package contains 24 data sets from @expsmooth08, and the \pkg{Mcomp} package contains the 1001 time series from the M-competition [@Mcomp82] and the 3003 time series from the M3-competition [@M3comp00]. The \pkg{forecast} package implements automatic forecasting using exponential smoothing, ARIMA models, the Theta method [@AN00], cubic splines [@HKPB05], as well as other common forecasting methods. In this article, we primarily discuss the exponential smoothing approach (in Section \ref{sec:expsmooth}) and the ARIMA modelling approach (in Section \ref{sec:arima}) to automatic forecasting. In Section \ref{sec:package}, we describe the implementation of these methods in the \pkg{forecast} package, along with other features of the package. # Exponential smoothing {#sec:expsmooth} Although exponential smoothing methods have been around since the 1950s, a modelling framework incorporating procedures for model selection was not developed until relatively recently. @OKS97, @HKSG02 and @HKOS05 have shown that all exponential smoothing methods (including non-linear methods) are optimal forecasts from innovations state space models. Exponential smoothing methods were originally classified by Pegels' (1969)\nocite{Pegels69} taxonomy. This was later extended by @Gardner85, modified by @HKSG02, and extended again by @Taylor03a, giving a total of fifteen methods seen in the following table. \begin{table}[!hbt] \begin{center}\vspace{0.2cm} \begin{tabular}{|ll|ccc|} \hline & &\multicolumn{3}{c|}{Seasonal Component} \\ \multicolumn{2}{|c|}{Trend}& N & A & M\\ \multicolumn{2}{|c|}{Component} & (None) & (Additive) & (Multiplicative)\\ \cline{3-5} &&&\\[-0.3cm] N & (None) & N,N & N,A & N,M\\ &&&&\\[-0.3cm] A & (Additive) & A,N & A,A & A,M\\ &&&&\\[-0.3cm] A\damped & (Additive damped) & A\damped,N & A\damped,A & A\damped,M\\ &&&&\\[-0.3cm] M & (Multiplicative) & M,N & M,A & M,M\\ &&&&\\[-0.3cm] M\damped & (Multiplicative damped) & M\damped,N & M\damped,A & M\damped,M\\ \hline \end{tabular}\vspace{0.2cm} \end{center} \caption{The fifteen exponential smoothing methods.} \end{table} Some of these methods are better known under other names. For example, cell (N,N) describes the simple exponential smoothing (or SES) method, cell (A,N) describes Holt's linear method, and cell (A\damped,N) describes the damped trend method. The additive Holt-Winters' method is given by cell (A,A) and the multiplicative Holt-Winters' method is given by cell (A,M). The other cells correspond to less commonly used but analogous methods. ## Point forecasts for all methods We denote the observed time series by $y_1,y_2,\dots,y_n$. A forecast of $y_{t+h}$ based on all of the data up to time $t$ is denoted by $\hat{y}_{t+h|t}$. To illustrate the method, we give the point forecasts and updating equations for method (A,A), the Holt-Winters' additive method: \begin{subequations}\label{eq:AMmethod}\vspace{-15pt} \begin{align} \mbox{Level:}\quad &\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha)(\ell_{t-1} + b_{t-1})\hspace*{1cm} \label{eq:3-44a}\\ \mbox{Growth:}\quad &b_t = \beta^*(\ell_t - \ell_{t-1}) + (1-\beta^*)b_{t-1} \label{eq:3-45a}\\ \mbox{Seasonal:}\quad &s_t = \gamma(y_t - \ell_{t-1} -b_{t-1}) + (1-\gamma)s_{t-m}\label{eq:3-46a}\\ \mbox{Forecast:}\quad &\hat{y}_{t+h|t} = \ell_t + b_th +s_{t-m+h_m^+}. \label{eq:3-47a} \end{align} \end{subequations} where $m$ is the length of seasonality (e.g., the number of months or quarters in a year), $\ell_t$ represents the level of the series, $b_t$ denotes the growth, $s_t$ is the seasonal component, $\hat{y}_{t+h|t}$ is the forecast for $h$ periods ahead, and $h_m^+ = \big[(h-1) \mbox{ mod } m\big] + 1$. To use method \eqref{eq:AMmethod}, we need values for the initial states $\ell_0$, $b_0$ and $s_{1-m},\dots,s_0$, and for the smoothing parameters $\alpha$, $\beta^*$ and $\gamma$. All of these will be estimated from the observed data. Equation \eqref{eq:3-46a} is slightly different from the usual Holt-Winters equations such as those in @MWH3 or @BOK05. These authors replace \eqref{eq:3-46a} with $$ s_t = \gamma^*(y_t - \ell_{t}) + (1-\gamma^*)s_{t-m}. $$ If $\ell_t$ is substituted using \eqref{eq:3-44a}, we obtain $$s_t = \gamma^*(1-\alpha)(y_t - \ell_{t-1}-b_{t-1}) + \{1-\gamma^*(1-\alpha)\}s_{t-m}. $$ Thus, we obtain identical forecasts using this approach by replacing $\gamma$ in \eqref{eq:3-46a} with $\gamma^*(1-\alpha)$. The modification given in \eqref{eq:3-46a} was proposed by @OKS97 to make the state space formulation simpler. It is equivalent to Archibald's (1990)\nocite{Archibald90} variation of the Holt-Winters' method. \begin{sidewaystable} \begin{small} \begin{center} \begin{tabular}{|c|lll|} \hline & \multicolumn{3}{c|}{Seasonal} \\ {Trend} & \multicolumn{1}{c}{N} & \multicolumn{1}{c}{A} & \multicolumn{1}{c|}{M}\\ \cline{2-4} & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) \ell_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) \ell_{t-1}$\\ {N} & & $s_t = \gamma (y_t - \ell_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / \ell_{t-1}) + (1-\gamma) s_{t-m}$ \\ & $\hat{y}_{t+h|t} = \ell_t$ & $\hat{y}_{t+h|t} = \ell_t + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_ts_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) (\ell_{t-1}+b_{t-1})$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) (\ell_{t-1}+b_{t-1})$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) (\ell_{t-1}+b_{t-1})$\\ {A} & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}-b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}-b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_t+hb_t$ & $\hat{y}_{t+h|t} = \ell_t +hb_t +s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= (\ell_t+hb_t)s_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$\\ {A\damped } & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}-\phi b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}-\phi b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_t+\dampfactor b_t$ & $\hat{y}_{t+h|t} = \ell_t+\dampfactor b_t+s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= (\ell_t+\dampfactor b_t)s_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}b_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) \ell_{t-1}b_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) \ell_{t-1}b_{t-1}$\\ {M} & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_tb_t^h$ & $\hat{y}_{t+h|t} = \ell_tb_t^h + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_tb_t^hs_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}b^\phi_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha)\ell_{t-1}b^\phi_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha)\ell_{t-1}b^\phi_{t-1}$\\ {M\damped } & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}b^\phi_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}b^\phi_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_tb_t^{\phi_h}$ & $\hat{y}_{t+h|t} = \ell_tb_t^{\phi_h} + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_tb_t^{\phi_h}s_{t-m+h_m^+}$ \\ \hline \end{tabular} \end{center} \end{small} \caption{Formulae for recursive calculations and point forecasts. In each case, $\ell_t$ denotes the series level at time $t$, $b_t$ denotes the slope at time $t$, $s_t$ denotes the seasonal component of the series at time $t$, and $m$ denotes the number of seasons in a year; $\alpha$, $\beta^*$, $\gamma$ and $\phi$ are constants, $\phi_h = \phi+\phi^2+\dots+\phi^{h}$ and $h_m^+ = \big[(h-1) \mbox{ mod } m\big] + 1$.}\label{table:pegels} \end{sidewaystable} Table \ref{table:pegels} gives recursive formulae for computing point forecasts $h$ periods ahead for all of the exponential smoothing methods. Some interesting special cases can be obtained by setting the smoothing parameters to extreme values. For example, if $\alpha=0$, the level is constant over time; if $\beta^*=0$, the slope is constant over time; and if $\gamma=0$, the seasonal pattern is constant over time. At the other extreme, naïve forecasts (i.e., $\hat{y}_{t+h|t}=y_t$ for all $h$) are obtained using the (N,N) method with $\alpha=1$. Finally, the additive and multiplicative trend methods are special cases of their damped counterparts obtained by letting $\phi=1$. ## Innovations state space models {#sec:statespace} For each exponential smoothing method in Table \ref{table:pegels}, @expsmooth08 describe two possible innovations state space models, one corresponding to a model with additive errors and the other to a model with multiplicative errors. If the same parameter values are used, these two models give equivalent point forecasts, although different prediction intervals. Thus there are 30 potential models described in this classification. Historically, the nature of the error component has often been ignored, because the distinction between additive and multiplicative errors makes no difference to point forecasts. We are careful to distinguish exponential smoothing \emph{methods} from the underlying state space \emph{models}. An exponential smoothing method is an algorithm for producing point forecasts only. The underlying stochastic state space model gives the same point forecasts, but also provides a framework for computing prediction intervals and other properties. To distinguish the models with additive and multiplicative errors, we add an extra letter to the front of the method notation. The triplet (E,T,S) refers to the three components: error, trend and seasonality. So the model ETS(A,A,N) has additive errors, additive trend and no seasonality---in other words, this is Holt's linear method with additive errors. Similarly, ETS(M,M\damped,M) refers to a model with multiplicative errors, a damped multiplicative trend and multiplicative seasonality. The notation ETS($\cdot$,$\cdot$,$\cdot$) helps in remembering the order in which the components are specified. Once a model is specified, we can study the probability distribution of future values of the series and find, for example, the conditional mean of a future observation given knowledge of the past. We denote this as $\mu_{t+h|t} = \E(y_{t+h} \mid \bm{x}_t)$, where $\bm{x}_t$ contains the unobserved components such as $\ell_t$, $b_t$ and $s_t$. For $h=1$ we use $\mu_t\equiv\mu_{t+1|t}$ as a shorthand notation. For many models, these conditional means will be identical to the point forecasts given in Table \ref{table:pegels}, so that $\mu_{t+h|t}=\hat{y}_{t+h|t}$. However, for other models (those with multiplicative trend or multiplicative seasonality), the conditional mean and the point forecast will differ slightly for $h\ge 2$. We illustrate these ideas using the damped trend method of @GM85. \subsubsection{Additive error model: ETS(A,A$_d$,N)} Let $\mu_t = \hat{y}_t = \ell_{t-1}+b_{t-1}$ denote the one-step forecast of $y_{t}$ assuming that we know the values of all parameters. Also, let $\varepsilon_t = y_t - \mu_t$ denote the one-step forecast error at time $t$. From the equations in Table \ref{table:pegels}, we find that\vspace*{-15pt} \begin{align} \label{ss1} y_t &= \ell_{t-1} + \phi b_{t-1} + \varepsilon_t\\ \ell_t &= \ell_{t-1} + \phi b_{t-1} + \alpha \varepsilon_t \label{ss2}\\ b_t &= \phi b_{t-1} + \beta^*(\ell_t - \ell_{t-1}- \phi b_{t-1}) = \phi b_{t-1} + \alpha\beta^*\varepsilon_t. \label{ss3} \end{align} We simplify the last expression by setting $\beta=\alpha\beta^*$. The three equations above constitute a state space model underlying the damped Holt's method. Note that it is an \emph{innovations} state space model [@AM79;@Aoki87] because the same error term appears in each equation. We an write it in standard state space notation by defining the state vector as $\bm{x}_t = (\ell_t,b_t)'$ and expressing \eqref{ss1}--\eqref{ss3} as \begin{subequations}\vspace*{-15pt} \begin{align} y_t &= \left[ 1 \phi \right] \bm{x}_{t-1} + \varepsilon_t\label{obseq}\\ \bm{x}_t &= \left[\begin{array}{ll} 1 & \phi\\ 0 & \phi \end{array}\right]\bm{x}_{t-1} + \left[\begin{array}{l} \alpha\\ \beta \end{array}\right]\varepsilon_t.\label{stateeq} \end{align} \end{subequations} The model is fully specified once we state the distribution of the error term $\varepsilon_t$. Usually we assume that these are independent and identically distributed, following a normal distribution with mean 0 and variance $\sigma^2$, which we write as $\varepsilon_t \sim\mbox{NID}(0, \sigma^2)$. \subsubsection{Multiplicative error model: ETS(M,A$_d$,N)} A model with multiplicative error can be derived similarly, by first setting $\varepsilon_t = (y_t-\mu_t)/\mu_t$, so that $\varepsilon_t$ is the relative error. Then, following a similar approach to that for additive errors, we find\vspace*{-15pt} \begin{align*} y_t &= (\ell_{t-1} + \phi b_{t-1})(1 + \varepsilon_t)\\ \ell_t &= (\ell_{t-1} + \phi b_{t-1})(1 + \alpha \varepsilon_t)\\ b_t &= \phi b_{t-1} + \beta(\ell_{t-1}+\phi b_{t-1})\varepsilon_t, \end{align*} or\vspace*{-15pt} \begin{align*} y_t &= \left[ 1 \phi \right] \bm{x}_{t-1}(1 + \varepsilon_t)\\ \bm{x}_t &= \left[\begin{array}{ll} 1 & \phi \\ 0 & \phi \end{array}\right]\bm{x}_{t-1} + \left[ 1 \phi \right] \bm{x}_{t-1} \left[\begin{array}{l} \alpha\\ \beta \end{array}\right]\varepsilon_t. \end{align*} Again we assume that $\varepsilon_t \sim \mbox{NID}(0,\sigma^2)$. Of course, this is a nonlinear state space model, which is usually considered difficult to handle in estimating and forecasting. However, that is one of the many advantages of the innovations form of state space models --- we can still compute forecasts, the likelihood and prediction intervals for this nonlinear model with no more effort than is required for the additive error model. ## State space models for all exponential smoothing methods {#sec:ssmodels} There are similar state space models for all 30 exponential smoothing variations. The general model involves a state vector $\bm{x}_t = (\ell_t, b_t$, $s_t, s_{t-1}, \dots, s_{t-m+1})'$ and state space equations of the form \begin{subequations}\label{eq:ss}\vspace*{-15pt} \begin{align} y_t &= w(\bm{x}_{t-1}) + r(\bm{x}_{t-1})\varepsilon_t \label{eq:ss1}\\ \bm{x}_t &= f(\bm{x}_{t-1}) + g(\bm{x}_{t-1})\varepsilon_t \label{eq:ss2} \end{align} \end{subequations} where $\{\varepsilon_t\}$ is a Gaussian white noise process with mean zero and variance $\sigma^2$, and $\mu_t = w(\bm{x}_{t-1})$. The model with additive errors has $r(\bm{x}_{t-1})=1$, so that $y_t = \mu_{t} + \varepsilon_t$. The model with multiplicative errors has $r(\bm{x}_{t-1})=\mu_t$, so that $y_t = \mu_{t}(1 + \varepsilon_t)$. Thus, $\varepsilon_t = (y_t - \mu_t)/\mu_t$ is the relative error for the multiplicative model. The models are not unique. Clearly, any value of $r(\bm{x}_{t-1})$ will lead to identical point forecasts for $y_t$. All of the methods in Table \ref{table:pegels} can be written in the form \eqref{eq:ss1} and \eqref{eq:ss2}. The specific form for each model is given in @expsmooth08. Some of the combinations of trend, seasonality and error can occasionally lead to numerical difficulties; specifically, any model equation that requires division by a state component could involve division by zero. This is a problem for models with additive errors and either multiplicative trend or multiplicative seasonality, as well as for the model with multiplicative errors, multiplicative trend and additive seasonality. These models should therefore be used with caution. The multiplicative error models are useful when the data are strictly positive, but are not numerically stable when the data contain zeros or negative values. So when the time series is not strictly positive, only the six fully additive models may be applied. The point forecasts given in Table \ref{table:pegels} are easily obtained from these models by iterating equations \eqref{eq:ss1} and \eqref{eq:ss2} for $t=n+1, n+2,\dots,n+h$, setting $\varepsilon_{n+j}=0$ for $j=1,\dots,h$. In most cases (notable exceptions being models with multiplicative seasonality or multiplicative trend for $h\ge2$), the point forecasts can be shown to be equal to $\mu_{t+h|t} = \E(y_{t+h} \mid \bm{x}_t)$, the conditional expectation of the corresponding state space model. The models also provide a means of obtaining prediction intervals. In the case of the linear models, where the forecast distributions are normal, we can derive the conditional variance $v_{t+h|t} = \var(y_{t+h} \mid \bm{x}_t)$ and obtain prediction intervals accordingly. This approach also works for many of the nonlinear models. Detailed derivations of the results for many models are given in @HKOS05. A more direct approach that works for all of the models is to simply simulate many future sample paths conditional on the last estimate of the state vector, $\bm{x}_t$. Then prediction intervals can be obtained from the percentiles of the simulated sample paths. Point forecasts can also be obtained in this way by taking the average of the simulated values at each future time period. An advantage of this approach is that we generate an estimate of the complete predictive distribution, which is especially useful in applications such as inventory planning, where expected costs depend on the whole distribution. ## Estimation {#sec:estimation} In order to use these models for forecasting, we need to know the values of $\bm{x}_0$ and the parameters $\alpha$, $\beta$, $\gamma$ and $\phi$. It is easy to compute the likelihood of the innovations state space model \eqref{eq:ss}, and so obtain maximum likelihood estimates. @OKS97 show that\vspace*{-15pt} \begin{equation}\label{likelihood} L^*(\bm\theta,\bm{x}_0) = n\log\Big(\sum_{t=1}^n \varepsilon^2_t\Big) + 2\sum_{t=1}^n \log|r(\bm{x}_{t-1})| \end{equation} is equal to twice the negative logarithm of the likelihood function (with constant terms eliminated), conditional on the parameters $\bm\theta = (\alpha,\beta,\gamma,\phi)'$ and the initial states $\bm{x}_0 = (\ell_0,b_0,s_0,s_{-1},\dots,s_{-m+1})'$, where $n$ is the number of observations. This is easily computed by simply using the recursive equations in Table \ref{table:pegels}. Unlike state space models with multiple sources of error, we do not need to use the Kalman filter to compute the likelihood. The parameters $\bm\theta$ and the initial states $\bm{x}_0$ can be estimated by minimizing $L^*$. Most implementations of exponential smoothing use an ad hoc heuristic scheme to estimate $\bm{x}_0$. However, with modern computers, there is no reason why we cannot estimate $\bm{x}_0$ along with $\bm\theta$, and the resulting forecasts are often substantially better when we do. We constrain the initial states $\bm{x}_0$ so that the seasonal indices add to zero for additive seasonality, and add to $m$ for multiplicative seasonality. There have been several suggestions for restricting the parameter space for $\alpha$, $\beta$ and $\gamma$. The traditional approach is to ensure that the various equations can be interpreted as weighted averages, thus requiring $\alpha$, $\beta^*=\beta/\alpha$, $\gamma^*=\gamma/(1-\alpha)$ and $\phi$ to all lie within $(0,1)$. This suggests $$0<\alpha<1,\qquad 0<\beta<\alpha,\qquad 0<\gamma < 1-\alpha,\qquad\mbox{and}\qquad 0<\phi<1. $$ However, @HAA08 show that these restrictions are usually stricter than necessary (although in a few cases they are not restrictive enough). ## Model selection Forecast accuracy measures such as mean squared error (MSE) can be used for selecting a model for a given set of data, provided the errors are computed from data in a hold-out set and not from the same data as were used for model estimation. However, there are often too few out-of-sample errors to draw reliable conclusions. Consequently, a penalized method based on the in-sample fit is usually better. One such approach uses a penalized likelihood such as Akaike's Information Criterion: $$\mbox{AIC} = L^*(\hat{\bm\theta},\hat{\bm{x}}_0) + 2q, $$ where $q$ is the number of parameters in $\bm\theta$ plus the number of free states in $\bm{x}_0$, and $\hat{\bm\theta}$ and $\hat{\bm{x}}_0$ denote the estimates of $\bm\theta$ and $\bm{x}_0$. We select the model that minimizes the AIC amongst all of the models that are appropriate for the data. The AIC also provides a method for selecting between the additive and multiplicative error models. The point forecasts from the two models are identical so that standard forecast accuracy measures such as the MSE or mean absolute percentage error (MAPE) are unable to select between the error types. The AIC is able to select between the error types because it is based on likelihood rather than one-step forecasts. Obviously, other model selection criteria (such as the BIC) could also be used in a similar manner. ## Automatic forecasting {#sec:algorithm} We combine the preceding ideas to obtain a robust and widely applicable automatic forecasting algorithm. The steps involved are summarized below. \begin{compactenum} \item For each series, apply all models that are appropriate, optimizing the parameters (both smoothing parameters and the initial state variable) of the model in each case. \item Select the best of the models according to the AIC. \item Produce point forecasts using the best model (with optimized parameters) for as many steps ahead as required. \item Obtain prediction intervals for the best model either using the analytical results of Hyndman, Koehler, et al. (2005), or by simulating future sample paths for $\{y_{n+1},\dots,y_{n+h}\}$ and finding the $\alpha/2$ and $1-\alpha/2$ percentiles of the simulated data at each forecasting horizon. If simulation is used, the sample paths may be generated using the normal distribution for errors (parametric bootstrap) or using the resampled errors (ordinary bootstrap). \end{compactenum} @HKSG02 applied this automatic forecasting strategy to the M-competition data [@Mcomp82] and the IJF-M3 competition data [@M3comp00] using a restricted set of exponential smoothing models, and demonstrated that the methodology is particularly good at short term forecasts (up to about 6 periods ahead), and especially for seasonal short-term series (beating all other methods in the competitions for these series). # ARIMA models {#sec:arima} A common obstacle for many people in using Autoregressive Integrated Moving Average (ARIMA) models for forecasting is that the order selection process is usually considered subjective and difficult to apply. But it does not have to be. There have been several attempts to automate ARIMA modelling in the last 25 years. @HR82 proposed a method to identify the order of an ARMA model for a stationary series. In their method the innovations can be obtained by fitting a long autoregressive model to the data, and then the likelihood of potential models is computed via a series of standard regressions. They established the asymptotic properties of the procedure under very general conditions. @Gomez98 extended the Hannan-Rissanen identification method to include multiplicative seasonal ARIMA model identification. @TRAMOSEATS98 implemented this automatic identification procedure in the software \pkg{TRAMO} and \pkg{SEATS}. For a given series, the algorithm attempts to find the model with the minimum BIC. @Liu89 proposed a method for identification of seasonal ARIMA models using a filtering method and certain heuristic rules; this algorithm is used in the \pkg{SCA-Expert} software. Another approach is described by @MP00a whose algorithm for univariate ARIMA models also allows intervention analysis. It is implemented in the software package ``Time Series Expert'' (\pkg{TSE-AX}). Other algorithms are in use in commercial software, although they are not documented in the public domain literature. In particular, \pkg{Forecast Pro} [@ForecastPro00] is well-known for its excellent automatic ARIMA algorithm which was used in the M3-forecasting competition [@M3comp00]. Another proprietary algorithm is implemented in \pkg{Autobox} [@Reilly00]. @OL96 provide an early review of some of the commercial software that implement automatic ARIMA forecasting. ## Choosing the model order using unit root tests and the AIC A non-seasonal ARIMA($p,d,q$) process is given by $$ \phi(B)(1-B^d)y_{t} = c + \theta(B)\varepsilon_t $$ where $\{\varepsilon_t\}$ is a white noise process with mean zero and variance $\sigma^2$, $B$ is the backshift operator, and $\phi(z)$ and $\theta(z)$ are polynomials of order $p$ and $q$ respectively. To ensure causality and invertibility, it is assumed that $\phi(z)$ and $\theta(z)$ have no roots for $|z|<1$ [@BDbook91]. If $c\ne0$, there is an implied polynomial of order $d$ in the forecast function. The seasonal ARIMA$(p,d,q)(P,D,Q)_m$ process is given by $$ \Phi(B^m)\phi(B)(1-B^{m})^D(1-B)^dy_{t} = c + \Theta(B^m)\theta(B)\varepsilon_t $$ where $\Phi(z)$ and $\Theta(z)$ are polynomials of orders $P$ and $Q$ respectively, each containing no roots inside the unit circle. If $c\ne0$, there is an implied polynomial of order $d+D$ in the forecast function. The main task in automatic ARIMA forecasting is selecting an appropriate model order, that is the values $p$, $q$, $P$, $Q$, $D$, $d$. If $d$ and $D$ are known, we can select the orders $p$, $q$, $P$ and $Q$ via an information criterion such as the AIC: $$\mbox{AIC} = -2\log(L) + 2(p+q+P+Q+k)$$ where $k=1$ if $c\ne0$ and 0 otherwise, and $L$ is the maximized likelihood of the model fitted to the \emph{differenced} data $(1-B^m)^D(1-B)^dy_t$. The likelihood of the full model for $y_t$ is not actually defined and so the value of the AIC for different levels of differencing are not comparable. One solution to this difficulty is the ``diffuse prior'' approach which is outlined in @DKbook01 and implemented in the \code{arima()} function [@Ripley:2002] in \R. In this approach, the initial values of the time series (before the observed values) are assumed to have mean zero and a large variance. However, choosing $d$ and $D$ by minimizing the AIC using this approach tends to lead to over-differencing. For forecasting purposes, we believe it is better to make as few differences as possible because over-differencing harms forecasts [@SY94] and widens prediction intervals. [Although, see @Hendry97 for a contrary view.] Consequently, we need some other approach to choose $d$ and $D$. We prefer unit-root tests. However, most unit-root tests are based on a null hypothesis that a unit root exists which biases results towards more differences rather than fewer differences. For example, variations on the Dickey-Fuller test [@DF81] all assume there is a unit root at lag 1, and the HEGY test of @HEGY90 is based on a null hypothesis that there is a seasonal unit root. Instead, we prefer unit-root tests based on a null hypothesis of no unit-root. For non-seasonal data, we consider ARIMA($p,d,q$) models where $d$ is selected based on successive KPSS unit-root tests [@KPSS92]. That is, we test the data for a unit root; if the test result is significant, we test the differenced data for a unit root; and so on. We stop this procedure when we obtain our first insignificant result. For seasonal data, we consider ARIMA$(p,d,q)(P,D,Q)_m$ models where $m$ is the seasonal frequency and $D=0$ or $D=1$ depending on an extended Canova-Hansen test [@CH95]. Canova and Hansen only provide critical values for $21$. Let $C_m$ be the critical value for seasonal period $m$. We plotted $C_m$ against $m$ for values of $m$ up to 365 and noted that they fit the line $C_m = 0.269 m^{0.928}$ almost exactly. So for $m>12$, we use this simple expression to obtain the critical value. We note in passing that the null hypothesis for the Canova-Hansen test is not an ARIMA model as it includes seasonal dummy terms. It is a test for whether the seasonal pattern changes sufficiently over time to warrant a seasonal unit root, or whether a stable seasonal pattern modelled using fixed dummy variables is more appropriate. Nevertheless, we have found that the test is still useful for choosing $D$ in a strictly ARIMA framework (i.e., without seasonal dummy variables). If a stable seasonal pattern is selected (i.e., the null hypothesis is not rejected), the seasonality is effectively handled by stationary seasonal AR and MA terms. After $D$ is selected, we choose $d$ by applying successive KPSS unit-root tests to the seasonally differenced data (if $D=1$) or the original data (if $D=0$). Once $d$ (and possibly $D$) are selected, we proceed to select the values of $p$, $q$, $P$ and $Q$ by minimizing the AIC. We allow $c\ne0$ for models where $d+D < 2$. ## A step-wise procedure for traversing the model space Suppose we have seasonal data and we consider ARIMA$(p,d,q)(P,D,Q)_m$ models where $p$ and $q$ can take values from 0 to 3, and $P$ and $Q$ can take values from 0 to 1. When $c=0$ there is a total of 288 possible models, and when $c\ne 0$ there is a total of 192 possible models, giving 480 models altogether. If the values of $p$, $d$, $q$, $P$, $D$ and $Q$ are allowed to range more widely, the number of possible models increases rapidly. Consequently, it is often not feasible to simply fit every potential model and choose the one with the lowest AIC. Instead, we need a way of traversing the space of models efficiently in order to arrive at the model with the lowest AIC value. We propose a step-wise algorithm as follows. \begin{description} \item[Step 1:] We try four possible models to start with. \begin{itemize} \item ARIMA($2,d,2$) if $m=1$ and ARIMA($2,d,2)(1,D,1)$ if $m>1$. \item ARIMA($0,d,0$) if $m=1$ and ARIMA($0,d,0)(0,D,0)$ if $m>1$. \item ARIMA($1,d,0$) if $m=1$ and ARIMA($1,d,0)(1,D,0)$ if $m>1$. \item ARIMA($0,d,1$) if $m=1$ and ARIMA($0,d,1)(0,D,1)$ if $m>1$. \end{itemize} If $d+D \le 1$, these models are fitted with $c\ne0$. Otherwise, we set $c=0$. Of these four models, we select the one with the smallest AIC value. This is called the ``current'' model and is denoted by ARIMA($p,d,q$) if $m=1$ or ARIMA($p,d,q)(P,D,Q)_m$ if $m>1$. \item[Step 2:] We consider up to seventeen variations on the current model: \begin{itemize} \item where one of $p$, $q$, $P$ and $Q$ is allowed to vary by $\pm1$ from the current model; \item where $p$ and $q$ both vary by $\pm1$ from the current model; \item where $P$ and $Q$ both vary by $\pm1$ from the current model; \item where the constant $c$ is included if the current model has $c=0$ or excluded if the current model has $c\ne0$. \end{itemize} Whenever a model with lower AIC is found, it becomes the new ``current'' model and the procedure is repeated. This process finishes when we cannot find a model close to the current model with lower AIC. \end{description} There are several constraints on the fitted models to avoid problems with convergence or near unit-roots. The constraints are outlined below. \begin{compactitem}\itemsep=8pt \item The values of $p$ and $q$ are not allowed to exceed specified upper bounds (with default values of 5 in each case). \item The values of $P$ and $Q$ are not allowed to exceed specified upper bounds (with default values of 2 in each case). \item We reject any model which is ``close'' to non-invertible or non-causal. Specifically, we compute the roots of $\phi(B)\Phi(B)$ and $\theta(B)\Theta(B)$. If either have a root that is smaller than 1.001 in absolute value, the model is rejected. \item If there are any errors arising in the non-linear optimization routine used for estimation, the model is rejected. The rationale here is that any model that is difficult to fit is probably not a good model for the data. \end{compactitem} The algorithm is guaranteed to return a valid model because the model space is finite and at least one of the starting models will be accepted (the model with no AR or MA parameters). The selected model is used to produce forecasts. ## Comparisons with exponential smoothing There is a widespread myth that ARIMA models are more general than exponential smoothing. This is not true. The two classes of models overlap. The linear exponential smoothing models are all special cases of ARIMA models---the equivalences are discussed in @HAA08. However, the non-linear exponential smoothing models have no equivalent ARIMA counterpart. On the other hand, there are many ARIMA models which have no exponential smoothing counterpart. Thus, the two model classes overlap and are complimentary; each has its strengths and weaknesses. The exponential smoothing state space models are all non-stationary. Models with seasonality or non-damped trend (or both) have two unit roots; all other models---that is, non-seasonal models with either no trend or damped trend---have one unit root. It is possible to define a stationary model with similar characteristics to exponential smoothing, but this is not normally done. The philosophy of exponential smoothing is that the world is non-stationary. So if a stationary model is required, ARIMA models are better. One advantage of the exponential smoothing models is that they can be non-linear. So time series that exhibit non-linear characteristics including heteroscedasticity may be better modelled using exponential smoothing state space models. For seasonal data, there are many more ARIMA models than the 30 possible models in the exponential smoothing class of Section \ref{sec:expsmooth}. It may be thought that the larger model class is advantageous. However, the results in @HKSG02 show that the exponential smoothing models performed better than the ARIMA models for the seasonal M3 competition data. (For the annual M3 data, the ARIMA models performed better.) In a discussion of these results, @Hyndman01 speculates that the larger model space of ARIMA models actually harms forecasting performance because it introduces additional uncertainty. The smaller exponential smoothing class is sufficiently rich to capture the dynamics of almost all real business and economic time series. # The forecast package {#sec:package} The algorithms and modelling frameworks for automatic univariate time series forecasting are implemented in the \pkg{forecast} package in \R. We illustrate the methods using the following four real time series shown in Figure \ref{fig:etsexamples}. \begin{compactitem} \item Figure \ref{fig:etsexamples}(a) shows 125 monthly US government bond yields (percent per annum) from January 1994 to May 2004. \item Figure \ref{fig:etsexamples}(b) displays 55 observations of annual US net electricity generation (billion kwh) for 1949 through 2003. \item Figure \ref{fig:etsexamples}(c) presents 113 quarterly observations of passenger motor vehicle production in the U.K. (thousands of cars) for the first quarter of 1977 through the first quarter of 2005. \item Figure \ref{fig:etsexamples}(d) shows 240 monthly observations of the number of short term overseas visitors to Australia from May 1985 to April 2005. \end{compactitem} ```{r etsexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using exponential smoothing state space models."} par(mfrow = c(2,2)) mod1 <- ets(bonds) mod2 <- ets(usnetelec) mod3 <- ets(ukcars) mod4 <- ets(visitors) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ``` ```{r etsnames, echo=FALSE} etsnames <- c(mod1$method, mod2$method, mod3$method, mod4$method) etsnames <- gsub("Ad","A\\\\damped",etsnames) ``` ## Implementation of the automatic exponential smoothing algorithm The innovations state space modelling framework described in Section \ref{sec:expsmooth} is implemented via the \code{ets()} function in the \pkg{forecast} package. (The default settings of \code{ets()} do not allow models with multiplicative trend, but they can be included using \code{allow.multiplicative.trend=TRUE}.) The models chosen via the algorithm for the four data sets were: \begin{compactitem} \item `r etsnames[1]` for monthly US 10-year bonds yield\\ ($\alpha=`r format(coef(mod1)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod1)['beta'], digits=4, nsmall=4)`$, $\phi=`r format(coef(mod1)['phi'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod1)['l'], digits=4, nsmall=4)`$, $b_0=`r format(coef(mod1)['b'], digits=4, nsmall=4)`$); \item `r etsnames[2]` for annual US net electricity generation\\ ($\alpha=`r format(coef(mod2)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod2)['beta'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod2)['l'], digits=4, nsmall=4)`$, $b_0=`r format(coef(mod2)['b'], digits=4, nsmall=4)`$); \item `r etsnames[3]` for quarterly UK motor vehicle production\\ ($\alpha=`r format(coef(mod3)['alpha'], digits=4, nsmall=4)`$, $\gamma=`r format(coef(mod3)['gamma'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod3)['l'], digits=4, nsmall=4)`$, $s_{-3}=`r format(-sum(coef(mod3)[c('s0','s1','s2')]), digits=4, nsmall=4)`$, $s_{-2}=`r format(coef(mod3)['s2'], digits=4, nsmall=4)`$, $s_{-1}=`r format(coef(mod3)['s1'], digits=4, nsmall=4)`$, $s_0=`r format(coef(mod3)['s0'], digits=4, nsmall=4)`$); \item `r etsnames[4]` for monthly Australian overseas visitors\\ ($\alpha=`r format(coef(mod4)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod4)['beta'], digits=2, nsmall=4)`$, $\gamma=`r format(coef(mod4)['gamma'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod4)['l'], digits=4, nsmall=4)`$, $b_0 = `r format(coef(mod4)['b'], digits=4, nsmall=4)`$, $s_{-11}=`r format(12-sum(tail(coef(mod4),11)), digits=4, nsmall=4)`$, $s_{-10}=`r format(coef(mod4)['s10'], digits=4, nsmall=4)`$, $s_{-9}=`r format(coef(mod4)['s9'], digits=4, nsmall=4)`$, $s_{-8}=`r format(coef(mod4)['s8'], digits=4, nsmall=4)`$, $s_{-7}=`r format(coef(mod4)['s7'], digits=4, nsmall=4)`$, $s_{-6}=`r format(coef(mod4)['s6'], digits=4, nsmall=4)`$, $s_{-5}=`r format(coef(mod4)['s5'], digits=4, nsmall=4)`$, $s_{-4}=`r format(coef(mod4)['s4'], digits=4, nsmall=4)`$, $s_{-3}=`r format(coef(mod4)['s3'], digits=4, nsmall=4)`$, $s_{-2}=`r format(coef(mod4)['s2'], digits=4, nsmall=4)`$, $s_{-1}=`r format(coef(mod4)['s1'], digits=4, nsmall=4)`$, $s_0=`r format(coef(mod4)['s0'], digits=4, nsmall=4)`$). \end{compactitem} Although there is a lot of computation involved, it can be handled remarkably quickly on modern computers. Each of the forecasts shown in Figure \ref{fig:etsexamples} took no more than a few seconds on a standard PC. The US electricity generation series took the longest as there are no analytical prediction intervals available for the ETS(M,M\damped,N) model. Consequently, the prediction intervals for this series were computed using simulation of 5000 future sample paths. To apply the algorithm to the US net electricity generation time series \code{usnetelec}, we use the following command. ```{r ets-usnetelec, echo=TRUE} etsfit <- ets(usnetelec) ``` The object \code{etsfit} is of class ``\code{ets}'' and contains all of the necessary information about the fitted model including model parameters, the value of the state vector $\bm{x}_t$ for all $t$, residuals and so on. Printing the \code{etsfit} object shows the main items of interest. ```{r ets-usnetelec-print,echo=TRUE} etsfit ``` Some goodness-of-fit measures [defined in @HK06] are obtained using \code{accuracy()}. ```{r ets-usnetelec-accuracy,eval=TRUE,echo=TRUE} accuracy(etsfit) ``` There are also \code{coef()}, \code{plot()}, \code{summary()}, \code{residuals()}, \code{fitted()} and \code{simulate()} methods for objects of class ``\code{ets}''. The \code{plot()} function shows time plots of the original time series along with the extracted components (level, growth and seasonal). The \code{forecast()} function computes the required forecasts which are then plotted as in Figure \ref{fig:etsexamples}(b). ```{r ets-usnetelec-fcast, fig.height=5, fig.width=8, message=FALSE, warning=FALSE, include=FALSE, output=FALSE} fcast <- forecast(etsfit) plot(fcast) ``` Printing the \code{fcast} object gives a table showing the prediction intervals. ```{r ets-usnetelec-fcast-print,eval=TRUE,echo=TRUE} fcast ``` The \code{ets()} function also provides the useful feature of applying a fitted model to a new data set. For example, we could withhold 10 observations from the \code{usnetelec} data set when fitting, then compute the one-step forecast errors for the out-of-sample data. ```{r ets-usnetelec-newdata,eval=FALSE,echo=TRUE} fit <- ets(usnetelec[1:45]) test <- ets(usnetelec[46:55], model = fit) accuracy(test) ``` We can also look at the measures of forecast accuracy where the forecasts are based on only the fitting data. ```{r ets-usnetelec-fcast-accuracy,eval=FALSE,echo=TRUE} accuracy(forecast(fit,10), usnetelec[46:55]) ``` ## The HoltWinters() function There is another implementation of exponential smoothing in \R\ via the \code{HoltWinters()} function [@Meyer:2002] in the \pkg{stats} package. It implements only the (N,N), (A,N), (A,A) and (A,M) methods. The initial states $\bm{x}_0$ are fixed using a heuristic algorithm. Because of the way the initial states are estimated, a full three years of seasonal data are required to implement the seasonal forecasts using \code{HoltWinters()}. (See @shortseasonal for the minimal sample size required.) The smoothing parameters are optimized by minimizing the average squared prediction errors, which is equivalent to minimizing \eqref{likelihood} in the case of additive errors. There is a \code{predict()} method for the resulting object which can produce point forecasts and prediction intervals. Although it is nowhere documented, it appears that the prediction intervals produced by \code{predict()} for an object of class \code{HoltWinters} are based on an equivalent ARIMA model in the case of the (N,N), (A,N) and (A,A) methods, assuming additive errors. These prediction intervals are equivalent to the prediction intervals that arise from the (A,N,N), (A,A,N) and (A,A,A) state space models. For the (A,M) method, the prediction interval provided by \code{predict()} appears to be based on @CY91 which is an approximation to the true prediction interval arising from the (A,A,M) model. Prediction intervals with multiplicative errors are not possible using the \code{HoltWinters()} function. ## Implementation of the automatic ARIMA algorithm ```{r arimaexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using ARIMA models."} mod1 <- auto.arima(bonds, seasonal=FALSE, approximation=FALSE) mod2 <- auto.arima(usnetelec) mod3 <- auto.arima(ukcars) mod4 <- auto.arima(visitors) par(mfrow = c(2,2)) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ``` The algorithm of Section \ref{sec:arima} is applied to the same four time series. Unlike the exponential smoothing algorithm, the ARIMA class of models assumes homoscedasticity, which is not always appropriate. Consequently, transformations are sometimes necessary. For these four time series, we model the raw data for series (a)--(c), but the logged data for series (d). The prediction intervals are back-transformed with the point forecasts to preserve the probability coverage. To apply this algorithm to the US net electricity generation time series \code{usnetelec}, we use the following commands. ```{r arima-auto-fcast,eval=TRUE,echo=TRUE,fig.show="hide"} arimafit <- auto.arima(usnetelec) fcast <- forecast(arimafit) plot(fcast) ``` ```{r arimanames, echo=FALSE} # Convert character strings to latex arimanames <- c(as.character(mod1), as.character(mod2), as.character(mod3), as.character(mod4)) arimanames <- gsub("\\[([0-9]*)\\]", "$_{\\1}$", arimanames) ``` The function \code{auto.arima()} implements the algorithm of Section \ref{sec:arima} and returns an object of class \code{Arima}. The resulting forecasts are shown in Figure \ref{fig:arimaexamples}. The fitted models are as follows: \begin{compactitem} \item `r arimanames[1]` for monthly US 10-year bonds yield\\ ($\theta_1= `r format(coef(mod1)['ma1'], digits=4, nsmall=4)`$); \item `r arimanames[2]` for annual US net electricity generation\\ ($\phi_1= `r format(coef(mod2)['ar1'], digits=4, nsmall=4)`$; $\phi_2= `r format(coef(mod2)['ar2'], digits=4, nsmall=4)`$; $\theta_1= `r format(coef(mod2)['ma1'], digits=4, nsmall=4)`$; $\theta_2= `r format(coef(mod2)['ma2'], digits=4, nsmall=4)`$; $c= `r format(coef(mod2)['drift'], digits=4, nsmall=4)`$); \item `r arimanames[3]` for quarterly UK motor vehicle production\\ ($\phi_1= `r format(coef(mod3)['ar1'], digits=4, nsmall=4)`$; $\phi_2= `r format(coef(mod3)['ar2'], digits=4, nsmall=4)`$; $\Phi_1= `r format(coef(mod3)['sar1'], digits=4, nsmall=4)`$; $\Phi_2= `r format(coef(mod3)['sar2'], digits=4, nsmall=4)`$); \item `r arimanames[4]` for monthly Australian overseas visitors\\ ($\phi_1= `r format(coef(mod4)['ar1'], digits=4, nsmall=4)`$; $\theta_1= `r format(coef(mod4)['ma1'], digits=4, nsmall=4)`$; $\Theta_1= `r format(coef(mod4)['sma1'], digits=4, nsmall=4)`$; $\Theta_2= `r format(coef(mod4)['sma2'], digits=4, nsmall=4)`$; $c= `r format(coef(mod4)['drift'], digits=4, nsmall=4)`$). \end{compactitem} Note that the \R\ parameterization has $\theta(B) = (1 + \theta_1B + \dots + \theta_qB)$ and $\phi(B) = (1 - \phi_1B + \dots - \phi_qB)$, and similarly for the seasonal terms. A summary of the forecasts is available, part of which is shown below. ``` Forecast method: ARIMA(2,1,2) with drift Series: usnetelec Coefficients: ar1 ar2 ma1 ma2 drift -1.3032 -0.4332 1.5284 0.8340 66.1585 s.e. 0.2122 0.2084 0.1417 0.1185 7.5595 sigma^2 estimated as 2262: log likelihood=-283.34 AIC=578.67 AICc=580.46 BIC=590.61 Error measures: ME RMSE MAE MPE MAPE MASE ACF1 Training set 0.046402 44.894 32.333 -0.61771 2.1012 0.45813 0.022492 Forecasts: Point Forecast Lo 80 Hi 80 Lo 95 Hi 95 2004 3968.957 3908.002 4029.912 3875.734 4062.180 2005 3970.350 3873.950 4066.751 3822.919 4117.782 2006 4097.171 3971.114 4223.228 3904.383 4289.959 2007 4112.332 3969.691 4254.973 3894.182 4330.482 2008 4218.671 4053.751 4383.591 3966.448 4470.894 2009 4254.559 4076.108 4433.010 3981.641 4527.476 2010 4342.760 4147.088 4538.431 4043.505 4642.014 2011 4393.306 4185.211 4601.401 4075.052 4711.560 2012 4470.261 4248.068 4692.455 4130.446 4810.077 2013 4529.113 4295.305 4762.920 4171.535 4886.690 ``` The training set error measures for the two models are very similar. Note that the information criteria are not comparable. The \pkg{forecast} package also contains the function \code{Arima()} which is largely a wrapper to the \code{arima()} function in the \pkg{stats} package. The \code{Arima()} function in the \pkg{forecast} package makes it easier to include a drift term when $d+D=1$. (Setting \code{include.mean=TRUE} in the \code{arima()} function from the \pkg{stats} package will only work when $d+D=0$.) It also provides the facility for fitting an existing ARIMA model to a new data set (as was demonstrated for the \code{ets()} function earlier). One-step forecasts for ARIMA models are now available via a \code{fitted()} function. We also provide a new function \code{arima.errors()} which returns the original time series after adjusting for regression variables. If there are no regression variables in the ARIMA model, then the errors will be identical to the original series. If there are regression variables in the ARIMA model, then the errors will be equal to the original series minus the effect of the regression variables, but leaving in the serial correlation that is modelled with the AR and MA terms. In contrast, \code{residuals()} provides true residuals, removing the AR and MA terms as well. The generic functions \code{summary()}, \code{print()}, \code{fitted()} and \code{forecast()} apply to models obtained from either the \code{Arima()} or \code{arima()} functions. ## The forecast() function The \code{forecast()} function is generic and has S3 methods for a wide range of time series models. It computes point forecasts and prediction intervals from the time series model. Methods exist for models fitted using \code{ets()}, \code{auto.arima()}, \code{Arima()}, \code{arima()}, \code{ar()}, \code{HoltWinters()} and \texttt{StructTS()}. There is also a method for a \code{ts} object. If a time series object is passed as the first argument to \code{forecast()}, the function will produce forecasts based on the exponential smoothing algorithm of Section \ref{sec:expsmooth}. In most cases, there is an existing \code{predict()} function which is intended to do much the same thing. Unfortunately, the resulting objects from the \code{predict()} function contain different information in each case and so it is not possible to build generic functions (such as \code{plot()} and \code{summary()}) for the results. So, instead, \code{forecast()} acts as a wrapper to \code{predict()}, and packages the information obtained in a common format (the \code{forecast} class). We also define a default \code{predict()} method which is used when no existing \code{predict()} function exists, and calls the relevant \code{forecast()} function. Thus, \code{predict()} methods parallel \code{forecast()} methods, but the latter provide consistent output that is more usable. \subsection[The forecast class]{The \code{forecast} class} The output from the \code{forecast()} function is an object of class ``\code{forecast}'' and includes at least the following information: \begin{compactitem} \item the original series; \item point forecasts; \item prediction intervals of specified coverage; \item the forecasting method used and information about the fitted model; \item residuals from the fitted model; \item one-step forecasts from the fitted model for the period of the observed data. \end{compactitem} There are \code{print()}, \code{plot()} and \code{summary()} methods for the ``\code{forecast}'' class. Figures \ref{fig:etsexamples} and \ref{fig:arimaexamples} were produced using the \code{plot()} method. The prediction intervals are, by default, computed for 80\% and 95\% coverage, although other values are possible if requested. Fan charts [@Wallis99] are possible using the combination \verb|plot(forecast(model.object, fan = TRUE))|. ## Other functions {#sec:other} We now briefly describe some of the other features of the \pkg{forecast} package. Each of the following functions produces an object of class ``\code{forecast}''. \code{croston()} : implements the method of @Croston72 for intermittent demand forecasting. In this method, the time series is decomposed into two separate sequences: the non-zero values and the time intervals between non-zero values. These are then independently forecast using simple exponential smoothing and the forecasts of the original series are obtained as ratios of the two sets of forecasts. No prediction intervals are provided because there is no underlying stochastic model [@SH05]. \code{theta()} : provides forecasts from the Theta method [@AN00]. @HB03 showed that these were equivalent to a special case of simple exponential smoothing with drift. \code{splinef()} : gives cubic-spline forecasts, based on fitting a cubic spline to the historical data and extrapolating it linearly. The details of this method, and the associated prediction intervals, are discussed in @HKPB05. \code{meanf()} : returns forecasts based on the historical mean. \code{rwf()} : gives ``naïve'' forecasts equal to the most recent observation assuming a random walk model. This function also allows forecasting using a random walk with drift. In addition, there are some new plotting functions for time series. \code{tsdisplay()} : provides a time plot along with an ACF and PACF. \code{seasonplot()} : produces a seasonal plot as described in @MWH3. \newpage # Bibliography forecast/inst/CITATION0000644000176200001440000000253314127205271014176 0ustar liggesusers year <- sub("-.*", "", meta$Date) if(!length(year)) year <- substr(Sys.Date(),1,4) vers <- meta$Version if(is.null(vers)) vers <- packageVersion("forecast") vers <- paste("R package version", vers) # Grab authors from DESCRIPTION file # authors <- eval(parse(text=as.list(read.dcf("../DESCRIPTION")[1, ])$`Authors@R`)) # authors <- authors[sapply(authors$role, function(roles) "aut" %in% roles)] # authors <- sapply(authors, function(author) paste(author$given, author$family)) # authors <- paste(authors, collapse = " and ") citHeader("To cite the forecast package in publications, please use:") bibentry(bibtype = "Manual", title = "{forecast}: Forecasting functions for time series and linear models", author = "Rob Hyndman and George Athanasopoulos and Christoph Bergmeir and Gabriel Caceres and Leanne Chhay and Mitchell O'Hara-Wild and Fotios Petropoulos and Slava Razbash and Earo Wang and Farah Yasmeen", year = year, note = vers, url = "https://pkg.robjhyndman.com/forecast/") bibentry(bibtype = "Article", title = "Automatic time series forecasting: the forecast package for {R}", author = personList(as.person("Rob J Hyndman"),as.person("Yeasmin Khandakar")), journal = "Journal of Statistical Software", volume = 26, number = 3, pages = "1--22", year = 2008, doi = "10.18637/jss.v027.i03" )