sparr/0000755000176200001440000000000014024105406011375 5ustar liggesuserssparr/NAMESPACE0000644000176200001440000000356414024073723012632 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method(plot,bivden) S3method(plot,msden) S3method(plot,rrs) S3method(plot,rrst) S3method(plot,stden) S3method(print,bivden) S3method(print,msden) S3method(print,rrs) S3method(print,rrst) S3method(print,stden) S3method(summary,bivden) S3method(summary,msden) S3method(summary,rrs) S3method(summary,rrst) S3method(summary,stden) export(BOOT.density) export(BOOT.spattemp) export(LIK.density) export(LIK.spattemp) export(LSCV.density) export(LSCV.risk) export(LSCV.spattemp) export(NS) export(NS.spattemp) export(OS) export(OS.spattemp) export(SLIK.adapt) export(available.h0) export(bivariate.density) export(fft2d) export(multiscale.density) export(multiscale.slice) export(rimpoly) export(risk) export(spattemp.density) export(spattemp.risk) export(spattemp.slice) export(tol.contour) export(tolerance) import(spatstat) import(spatstat.core) import(spatstat.geom) importFrom(doParallel,registerDoParallel) importFrom(foreach,"%dopar%") importFrom(foreach,foreach) importFrom(grDevices,dev.flush) importFrom(grDevices,dev.hold) importFrom(graphics,axis) importFrom(graphics,box) importFrom(graphics,contour) importFrom(graphics,pairs) importFrom(graphics,par) importFrom(graphics,plot) importFrom(graphics,points) importFrom(graphics,title) importFrom(misc3d,kde3d) importFrom(parallel,detectCores) importFrom(spatstat.utils,inside.range) importFrom(spatstat.utils,prange) importFrom(spatstat.utils,tapplysum) importFrom(stats,IQR) importFrom(stats,bw.SJ) importFrom(stats,density) importFrom(stats,dnorm) importFrom(stats,fft) importFrom(stats,optim) importFrom(stats,optimise) importFrom(stats,pnorm) importFrom(stats,quantile) importFrom(stats,rnorm) importFrom(stats,sd) importFrom(stats,spline) importFrom(stats,var) importFrom(utils,packageDescription) importFrom(utils,packageVersion) importFrom(utils,setTxtProgressBar) importFrom(utils,txtProgressBar) sparr/README.md0000644000176200001440000000426514012076542012670 0ustar liggesusers # sparr: Spatial and Spatiotemporal Relative Risk The `sparr` package for R provides functions to estimate fixed and adaptive kernel-smoothed spatial relative risk surfaces via the density-ratio method and perform subsequent inference. Fixed-bandwidth spatiotemporal density and relative risk estimation is also supported. ## Installation This package is available on CRAN, and we recommend installing it from there using the standard ``` r install.packages('sparr') ``` If you wish to live on the bleeding edge, you may install from github using `devtools`: ``` r # install.packages("devtools") devtools::install_github('tilmandavies/sparr') ``` ## Example This is a basic example of relative risk estimation for primary biliary cirrhosis cases from north east England. ``` r # Load library library(sparr) #> Loading required package: spatstat #> Loading required package: spatstat.data #> Loading required package: nlme #> Loading required package: rpart #> #> spatstat 1.57-1.004 (nickname: 'Chocolate Teapot') #> For an introduction to spatstat, type 'beginner' #> #> #> Welcome to #> _____ ___ ____ ____ ____ #> / ___// _ \/ _ \/ __ \/ __ \ #> \__ \/ ___/ __ / ___/ ___/ #> ___/ / / / / / / /\ \/ /\ \ #> /____/_/ /_/ /_/_/ \__/ \_\ v2.2-13 #> #> - type news(package="sparr") for an overview #> - type help("sparr") for documentation #> - type citation("sparr") for how to cite # Load data on cases of primary biliary cirrhosis from north east England data(pbc) # Split into cases and controls pbc_case <- split(pbc)$case pbc_cont <- split(pbc)$control # Estimate global bandwidth for smoothing h0 <- OS(pbc, nstar="geometric") # Compute a symmetric (pooled) adaptive relative risk estimate # with tolerance contours pbc_rr <- risk(pbc_case, pbc_cont, h0=h0, adapt=TRUE, tolerate=TRUE, hp=OS(pbc)/2, pilot.symmetry="pooled", davies.baddeley=0.05) #> Estimating case density... #> Done. #> Estimating control density...Done. #> Calculating tolerance contours...Done. # And produce a plot plot(pbc_rr) ``` ![](man/figures/README-pbc-example-1.png) sparr/data/0000755000176200001440000000000014012076576012322 5ustar liggesuserssparr/data/burk.rda0000644000176200001440000002551614012076542013757 0ustar liggesusers{yXNw2F!C"TBw!Y$!*Cd 3e *Sn{u_s_X콦k::tCMEa8_JH]5_]Ӹ+ZohG+gΡ|U\Eqȡ.rC!?C8Su`QXY"gJrXâ3?~b GBCBGBdQ_~0hTP|yƠ$D?Hh(>HHF=.y5_ 1$1$D=%l)yp%buHOȐMC |ㆢ|Wo_]HS8qZ]|@4iޣ~,E/,ZP>"!杄:GSn:zq7T)ҺI(š8ԋCK==(6j}4hE=߿@=/,@t8ҨG{#=1?2H_DwwZ8E> ]d(._I?1.YE8)0Ŧxh4E+ ~r!87<U}iH}UxX7(u6SEzҞ?+}CAܫ#a #/D 5W*KU/|@1UWHc0O0?0?z: yd nIxQDBWbݡ, 1$c GRP[BK 뇨|R(g7 Ce󢪙OPz'sS;tS>8.Qԏ^>и^yIqLס><#KIUVU/>PD9X&C`A\G4HuA:\Ci^UgTSgUK!)/]W&RP>\l4:OUTՇ1êTHh"vXEԟơE[jS*q2*-2;q\ ʟGjAnNj qKCj?Oc_dhRz"*:vA(WRT?<iD'nTn S+> iS'C&ډ4yDTo:T(OQ _ZG}v4$?a>v>R4u[:'ҹN^k-3C9/ُriIHriRyWUѺC)kR=nq@G\O]G]:|*A^|E{14z'wQW[Q_.+e*|Kj\/o#%VހZGK͍OԸ 7S殈HG7GX-n8o 8Ae@j!D1@;Ν8sbbm5芸1uUX8q+b,b6bQx! jLv& 'wS|ɲg B"CkpDþňQ!՘h*&˯HD$~h~s#5x88!ʰawюC1"I@D}==yDAɫq#E89r/J5`=Jihq2q "։'(S!F|JąGяGo"E|4DߴniAs~Jң'`f$ _^wXw 7JF{gONj`hhh(eR칡l5bsOu)=אf\ns-_t>\7k(o-C>.Cӯ_k-i[D2+_ͼIi^_6_nl]uv-/^_K5PN/R6.כΣ%o-GWCs;DXxɼ1ZF=Wk1飀ݭgߡ1ƛq{3ih`Xo y[c⩀s3~GHqӠuH4 .Moԭ=jf"6pHH6lUX q=3n;u3";{bŞ.o S"LbopQ ԃEdg㿦粩ўWH16,?0s{OÿZ.FŞ!PVErIz-F]0˞(2O7)k:|8N/][sWw)1fN`oĀn1^6u7c[ߴmX^jRE.~sgs`s(GV,lI ?]~7s)L,7RcE^91q1Cܞ\ym"ϭ2MnMݾ7x[YoY}(Zhռ}f:0_#8E0x?nKrwD37y[[E寫8h̽}I|.{DcōGn]kQ|=]`u+M_;nfl[nM`v<7 >f 27(?܀}XP';d^hsIWe{wJ`&ּlPCٛ0=,,,"f#MN=1Z]Aak1% {F{y]}nu ̚-oLF PsqW0' r=X*u`Á}ް{PSMaomS x୏XO%yI{ȌIlĬޭfn&bŇ/wA!;"KbN~B3qo(1h ɳ ǭbѮo?ڤb7Y311+. oD30%n`W|_fw~B^mqn OS2w|XON7~MZXÚI ~M;,/3z,NRnm#s10?*:X#|{#0X(&z,9f0AZC4%naX-^/4,ԃ?}fM'+`Ϟ|=9ԭZuFjM~u}VA{/ZO[~jdw)`F[į> ֋׋mI sj ܎C_l6minvpCn=wX)I^QFPt("`-sn fNjǫ$ Q^TG3ҫXʦO݉]|\EXԧ韾k_}Tޜ;}B{ݻmA6,j[Yqea (n$u\ Q{ 0GKk\Eo⊁ZH(ZԉeUz Pݸj&c|jMɰ&,ku߁RO]oe=מj@'7`NZT~ޛ[-;m Mͨ3͞sS˻=+H|lc$du1C挲DF[Ӏ{Fٶl$o"o87{za<8EKhjfab݀1Y=_+4 -^)+u7f❲X7<@'G>sr|ҟOr9xnBG$CWN]{=+]E- 1%Ȯ p66]=G~]/viʬDݏ5|3fZ/yDÀ߿̀Nҟ|mUѧ_+.\ ͧ=ȹql{@5{`N*Z ;N|ܟv'r+&$-vµG`^ԗ#Jc7{I&a!qwb}mtүNvo(r.u|;p. {=;50!}S1&-%]nAwpcS[7r7;ؿCq ZzzT/;qyKHZ}0S6e.{F> \^^%=mzQ[<w"Zw"=5!}qJqSFvLD0 ̴UEܽ#|ݼ="a:ӗi~^]l~ r^z"=AD4 \XG;'^, ֺ]6\ߟ<`R,]Yn+{ki`/BFPR{8p#t=sPGOgNY I[ n,A׿o ߏ։?\F톊-nz(; $}K:C3֑8S,03* lOǑ篽AwY+jeZNIXt/M:7dԪJr)n(ވ"*wB`mG̾K"9BL3~#yħC?rz+#}]j yqac/CgGT&{N3@>-=&ER?= >9]Iq@㑂3kf5$ss7_ek+t%&}Z,ۧ5΁i590i밖>rt՟){cbz~lGk> (;R?s˒̮{թ '=v5Fu!?7N eU!c3I\3b ~2*)K'l{ p쿿XJۋďod8ROZ5/Ҋ']k܅>zgloҿPْ~q㗥Ɲ܁4燊;GEd՞3c2}oéf nOˊv3B:խ߁>1[ pnŘa/VTMk60ۜ LI*x`ns?f|1jtO4/59%J> @lmoњC#ZHZW.sy@RŻ m?|-S# 1edyjrW>_ .[¦3Wx7gPy8ҪVڬ'/R^AΎjvaڼ.?O5x<w6O~|;c zO|<I_쥵R|eڋ6=huvB@H7<f`7P( : |K7j߯q_q9/ķO{Up3\F]W0O4fK66g*Aii:O2o4G/\޵(2ߝy!5XʋSn#}^⻵K}^t6 -()zNZPLʬ$F}U%n=A~k-h3YX k޿Mv?1j5(%0̵RPN3[ߢߑԥk lν@ti1 aL3SCև"@X_k5Bڶ%}Nnb[? ~,^xv:9Cctަ 7k ~23c$> >(f7PLk2DǩAy6u,qҭ\7oߨAູt(<]@QV-Qr'9d|w-Fm+B-C-aA0(79 !ޯא A40 <>h,/f Ϻ&Fn7~yv"5!Sqj%&q2ñGO} [g,z b6:_9-{LtqKS=@eXz<w Ɍ&9?--Lj{v0t'Pz?ˎf3l'G5mKi<\4Tޯ(ǞInʷsO/w7Mq;6tzeeg-K"zM\}&wi4cJ׼t>|'WA }=? EM^n9ǃ8<.([&ƫ60ew5?#ô~P*P=3((#n, BI2zSNl-\Dx)#r/\o]|abWfnimUIufbRO+.nJMjrsrpp\ muܑqo ,rF_ ˕HܲP# 8d ҸQ ]:%d 6km:Uzd_{үmK1 }Op |rgK3j?®LTw7Fex2GZ"yUJc~}RdžV`Ck{oWzvjs7 c?.GL&)[~YNApšOn1*gƝ1ZGk6,mp++]JO;p詺#8y^((Nya[㷀jǀ{@$zp:V 3/Y1^MΑ:f˯pt Utűu5^8tMSF+A2bު |c{WuaM B{B2/-ZnfD9$}E\ŌB76  2: ^bBΡu&um׌=7=35ގnp.7_?F{I8_*Ҍݲ?M›VO~Aۆ=̑zI9S߈q SKHxړo\a. [k^FY͵t*A7$?8kүN Q/muHʄFղ%~z2ۺԳW=wq;/ɹTXґ.I]n){LJ.ItZӆJC>W9ztv*\$- m#央> r>;ӥ>Z!K[G;> LݑMtcdz~9f[ c G2fuu6M'[s-}x?Zz'MA^yhLE&r)7s;m;V0<{`i;]'}ia=H?W߄Zi #*ԇjt-_O?EV [B1ӁN 6_Bs %5)9pU KEK@$Uzǥ; " {MVLyEg;pk+̨ >bsv: <{տgrXw0<Ǝ>F W/[7|I2jf_fu (~q;v]Lɗ Z;3Y 9Xx/] xZ%,0saEP X;ogs۲67og<U }ť>3StZޭ(.Ĕ> 0Ɏ5 Tg;7e ]Ro-?PCNG,kBLɒJ{Wo3+ AE_"~| ~2h5 7A,WM}% &"z|xSu1yBRCi-"R[ !hk:o]vvCcQG (f ]{׈t=SR지2N1lmPf?iC Dħ|Ϟ72!ΤĿs+"Rj,bʌm>e%"UV6Xb<+?HFl c% ?(kl@? q|>Iy Wo f^cVJe_cDonG9w+ώPRm*7_;(op^8OAy7EA_IĒzL@_={SѬ4 'Eż"õofȈ[̼ a'}|LFbdȫ13df=?x>fd[㺦@ *Hm| d'/kL!4ϾdI1,;jݏXax#;W@.V7*nE\qJ8! H;z*}F>Bzq"4 HEB^uI} 3z2"}u]T@LHZ;'ieН F و5@ Ce.p"^~? mL ="k!>iuF>s,P'"> M&11E1}Y߫~ԫ8WѣJzDx[QydvMA\{B$z)A37"^MKsh8V5r+s41BG쉼K=ki]c }OS7!e5 "T&agZa[~#NPub}F_t[8.Y߇sReqlw2Bffb2qM pLY ۀ89 Κ_1ӫM273K<[!(>]0SM?n c@n0եb^'RȖ֘!Ɠ8S-sLmOf"/}1۔ub7-֭@ؐtqߵg.@xBK 6n#%r;US@%⌸O}_$q @ܟ*1ճS@mF]0ZJ;^m+DO'epW3[rgV W-P?$ wyF\L٬'pBW@0e;{n{iw2M`z:1$1y N"W{埭kఆ z3Mѓ`  _/BDDGbSJs>?3^g7}m8m+šD7˖#(N]E`^IA?cty>& ~1P&mU_vSv҉:GĚEL $]zNE.oPN^m1}I|QmWRnB];0>OqMOrRn 6m7 c|3RD׈+!9EʏU"8̋kUOμ@4k칄g%O\9?aHK YwJm&vSjݳѶvD{ m]@}}='b y/*{\. X~ dt"jy:[ReZGTvZNU+l&'PwBmzD W[.X[ P]xM%U~ "%_F(*d ?K`p-@{b :\:VP-u/"y71zy? ./Le@roDƬC/3xIr+^_[Yi^cjڛ_N\btf%Xvo!i 'Of.|ܮ` ^GP~-ӄG G}WFqbmFwBF +zDeMAq$ 07mAXU7D\i870&5{l _&~KW`|Ͻ#k7קnf~\ 0.Uq hɯ[{2^wh Zx . WW[ ʭTCT!0];2bߞi.Ė+}:M^.G>9,]ݭA4}Jܓ-^0O[`hEmh>n%S݇w)"~o5;]\{+#"n#9^iY"mz#:kᬦ/[Bq>qOR hϩ_"GZ\ד3=A+ 7,O Ek 0^%al0L˴&c~Ԭ݈-jr"ڤ}:~9\w#*;ٔ0pwB4z.q7 l2ַ'n{:9G:h/}CGDCaXU6a]:D1GŃ !\s+8\uWI̚# @H ifG< y㡠>n;|䓁O߻_­Z+Jj@oqEW\bPgWM׌X+W]߷@/\_SIizwu"YE^2{hR7+o/$!V{2)!׹rQt[-J&7^**:Gb-5$]'ǃ*4[#RB=02NxQDywEoF`9r ХOaGm/tb&̮geGS˫R;eof\Vk0{h-aSg(BVEt(.}1w&c}Q=d=3/جvGh"%wZވ@rK@c 2xM !Jq0[]#]e?4/CI>@l=c؅sz-#f?m^{w<ډ<[fm-MUϾEĻǞU"z8n@ikMCOWcLQ$f_-CԤh@4Ya_~ zkà:_;7^ۃ6֓5xN }kXȷ*_SF8:eGmA5LXʫ,@ԠF(PL+ZWv{3<>I/J ޿c%ɵv)ŔQG?|wY\1E pNJBca7Ȋx XП@e|bb%H7SIQKk~D3QX[;}Uv(7Qgv*qp}y`]'QA%QSxB҅r(=ݜuQ%W/jw%D+U@ݮ*5֙Y%3Į؟O\`Z˻_ucCN9RWC`9"u]D_| **"_x'PC?t5v׬MPø43;|E}潄^Xm@#< Ҁ#qeEN@y#+gDnvb@Uy?MA PǾF AY1p/SQ{FFQq{ć!];#@ݭlwVcP7}XOU,v=+( MT}V'gP-k/ط<5`۵_@s dqwU7aiEמּR'k }%zi1n\u-0v}aF$><=n0]'5x ȏv$`sc-mYoK1q \i̔!oag,gάDߓum^ϚwN0-oi˓Y' <{},`ϛp{jћp[ k7KsNc's?X]\עK ,YOOkC:N Eyq] 0y!ăwNəYKJ) VxqehJF*a”K| L]1\~ rSe4 rzJ~9FY}ŸY@dߋof )8ôjq ,vh7ݼ;vu↶{(Q,% k~ޝ@iݷ` t^a 'mIʇ5^2X Eݽ58n2wi<! ĭcI^@{ɬ"WZ#2~qux|@TqI'x8*@Tg>@y9MwWlM2ml@\+J 3Kx`1驘/[nDBxc5yTee,8l1|obn%uT 9_HZbl|'o89N:%{ٷ{ Eނ'{)G@⺣^ܓa@k 'qkSmf=Gvg>1p-EHEGysM VQqHEK +娯ަ菪~whwͥ@Tz[9;JŶ~QDmL(u'D'J.&vns=ÿ)CEk HB+Uw)wz {ẃKlaq@Hώ<џS @<}H4νM@ڛtHX jɗ`A-C}(ոen5;j}@RmtXs/H$ɜ݃Lb]@ҵOף.Iq_6eH{@bILcls9) 7+āJS%:{vGfq T?jĨ-WZ@|3lE1n,ed,C5RxZo2ݣt=ϩg><1}D?ZG?sMNa)eۉ-;3[x?*`U'rX\މrg+ h[? ^sߌ㫹gh<1sߝ |[ %a]N0][*u*[}8޻y .ȧGV2k l2#)Ha FzQVsHړ@V<;PhJs#C B/f$e;+Z_dV,r}x$@ԭdGbHgNB?0q q;@R]l6{e=.lA~#gDO6DbȻ.!O~;)1Î7O"WWb9cnsm=3)2r ]ڪHT޷R:s⩂`RwzϛCI;@ù=}(C펨koo7.@Puzgӫ$U=k_dd5̪ Ġ_c5׌f'|V =D3;ڑ7(țSi:Q;"9jēd7_߄?QC^ pz6GҋyA)ㅕ"m*۔ďGg$?Ѻ~u+#̐ܧb 7B[ҷu]oiѱw߁#OV >2) oR?SmsyJv pY'*%H"@ιC|0]f?Qޯ-.Hpyr()v8?]LS>iOs[eާZ2A;4 +E)e] sQC7Q=4w'f(u!vG:cLry _l]sr۹M@_+qr̂İ\#@*]C웭|{1 GhL umeJ*)@z֡Z|&ܝl-@^Mn| n}⹁ts:5^==d5{V++>1Z+"DZu{:v}BtDNw@=RC^VgyG N_A>+pɩ@ZC &0nI\K W}GR`pu@L~$pV$Lqü'H8 9"|r̀ta^J>솼!Da;$ϚFh@2; / 9tmB)>p yx S$k/@:H*}J ݆v%*- TҙRkxq -=!@:EM$JO@_R^H{)DE+Zoc2[o ?I{uַM _#}vۏyM@ȿ%~ {z7E>ku~tj Hy`!]۵R(bWih@)f>`10SŸTp,y(%6@M5DyF+(ܿ,6wͲ31XO[q%'[gP}zrmKJYS5jM@|\CV@7\ ➥y6oҽ"T:gfCe{Qi@٘Y*ラD.}T4ld ){xe<'׆]z~퍜:;O?ft&dkW6@^#1')s^9hП8}`‰]^s2(w7*^ \,fИf y";Ccx_e`lԷ哗FkXrKPw`p ~1` ]s pN`0ۨ ?fg7Cܬ[G7Ԩq> ھ{(O1vi<<+Td`\>friUڔyKPn#՗zW%!7|78].c"=izqdf&=^C'~sO7лw.0VΝCM8q0T8 GɟSRVF"W`j C,feRO+!{`CU=f[D-ᇀStX,E^G&mF@:>OZע藣q"r:-7/{nw7W{Qs9M^t\?qZ;>θN ׭W@ Msf۫npVyԨ=gHzV~f [:ct۾dc%`~~g[.vl]}54kGG&S|E1YOB?xqy1ᄹ2/8jwv7A9ڈ7omA{TjuzWR3`ʿvs/#2z&fL oW>ރO9wcX-2c\uXs!wxl[<ݱwY$5oQC/GNJk۵WyN.,ʻFڨo絃Է>W} ̅!/WDts =D&MV(wf"w%j*@j,/䀞,31Et jʣ@/ق|q;H3EʤGjE= ^K_k 3Sg|aAYG{v;=e CבrF<뾋r|)!IR0xeP_o~ u/M ϟϝ &^,0U~cenmO1o&~3 I@ltw©-@K[ԉqu)u4}㺶NJq'xB[E?x%xβ@ ?2󳭼"fmڇGu\7%;P*?%l~O.ڍ?vmvM >[ק5e Q"{t~FD_0k.}zhvd}M *@xpL5йKoenIA4HF}Ϛu'P?|~ }ź/.mF4/Wƭhw]Xс`m_-ٻ~Y|qnWZaCT> GOeRC {)rޤa0y-h=*Z懵1C]]:m`|!>ϠZs[*S/be!h7\S^cU-|z38;9'POp:Bdj>Q2g(wg6>YZ( ڍr,z|/Τpbh3Qp}rͯ WE(مrdjsq(p>t2|!c Ǭw%D+WI=#^=úJ> G&:v* @?+rx3Nkg }[gvPQ~q-{jC"y14XixY\kjS_fGEџS e_@WY-fRJ3Ϲs!ֱŜ8x|"U!0yd7 ʫfs2Ǜ+яWW߳E? |wZTN ઉ)fvztGW%n8ig ([ՍnR43w~fSo3ѼO c7ÞO'#o}Aj/XroMBftgJ-yw-FXW޹ }1u@Ws> ktI_sI!2#0oύG% ޣ tJ.sYMyHJ&LTg0zLxqڥſZCho5ٜ1?~vf%+^ʏ]aN6k —kqX|v`+0^|,-cqNA7 "zxo!`_1e/+ˋ}D5n"Snf2+- ԄY^z= . WU]]ǀDLvp*_?hP!CG=8Pacph-Z~f0c68Ti.%c2Z~8ZF2j-ˤ?5!CvyC8q8qa .-ɹey4ϩޞk}QwKܖ_n{-!\ey| ز,g_q>KsI-_-JaYK).9Ǿ{s̗ݲ(<ӲNl|%.QaKH,pey͖/DZ~z霛/̲Ė[#?)֖e9Dml?+,e*/1eoSXE}َnZ^[~|K/~c|%3o.#8JqZ^wi/;pYGFk.K_ƥ/,yI/9g&,p\aW,/=#q_d 1qj˺RX繼Gɲ>|dK&8_ܳH[1+~bl)^ϼ$˒<_9gޒ-qe5.qyǒ x_1ӼlX^{_dو|gJ r?st/XvQ>*G?|T Q>*G?|T Q>*G?|T Q>*G?8Opf@d{Г=tj%].ob޵_ ;$q܋& Oqr؂g x؅QQ ܋re5@ 4] ' _qJp Z+u^*?/M5Px|[ی7@R ڶ)GHumVjE[!y \~IH.o<@äG >yo>$aBVPS'^#jrqcۃo? 5ì:P4U~oUBo4- 8Oq<@VnF=ny^C ҳkn0˶\*}ĕ #@;ҷ;ݦ2~ 4\9:hT}'P9>(էOf_BM @xy< !BSo"kԉ(wёπ q6^;4pUuÀ.UXM7kp"{x}1:*O9pkz8o_E ! |ϸ5緉S*ahpvyk|@=Gy^>йDO_n4Wq8l=(9eoEI`du= , UrW ufR@yY6Oma)Z p)3^O+'Z$p5W b9; Do?ѳx" :?gWO74zyb*/0ܓ1ogysaq5l{Q\7~℅5>@&dIa>v=&W_fi@ey!dM ȎӲ*a+q2@#ThdmϔO]Sp|,@ ph^ǚ>6c =@'MB_u-4"]#WC7 Z܁2}Mw2ز`Ns^ۉwqoܫ.].O~=I_|s=xF;iߑ*c}W p=kc-swQ<bݍ0&@ZXyw#LNzqU1F@LP΃yN!.,ϒwa>e߷cHAuYwtd%I#"(k1XgQ]̜SN.Ͱ'sQ8ƙBʎv %vqR4H μGCQ`֦z$tU1 QۻoFla+"?1NSO=^ dFc<VpzW IhCE׼¾c #7^>sPqHRm$ӿY{ @zҭ& ]fڞytK yCMq'/l_s@}]!$/ga=L* 9KZc;I rhkZ@{ y<(\ZH'%1i9ݐ$ 50^ /~fPQϱPnzV>>Dӎdj@ ql~3f?ߦ>벇y/VkF2|bQا]=;Ȇqr~䓠=@Fُ)Bly؇/iۛ}Б˺ c*<]NAm$tD ODZO{!0 (k=GSN%|ix޸>xQ"' Ҙk[a[iøu1 ~ n ꐜL0Nsch&n b }`ްH ~빀4X<`E-_17Uߕd'o+q^m@.ԟ d(g}0)@\3ø źT[?~l- HS|2;k,Xԝ©3\zżu@ۢwYV W? 9+rA u?MLrWw{o1UXuU9u׃ƀ>5&VV%~{{dJ\t<ܷ0|IT QRYG@^>N H9@揯U`2##uMJOR>KMg伖Q K׉\ -yK_2m8߱9OȜ3WŰ|q^^랹wfK*Ecxobqg-?yGa”G>swU _R7 $ngr I>: e Gna_RƾveOyGu8<>1gWrG4λ*wUE^^ҍ}ʙyGXX1 7!i˴)}@<>2q%я!> odc ߎ#SWc wbBqS'_]U=Ju ;?ҽ no\MlڔNxX;|~;ݽs Xd g̽չ. wOqՂ8ϸ>@AP3GmNzMȰ.dɬv8NxCba9`YAԋrsoX$ Z'X %_\+<oqg 3ݖ${s_BIF"?5|EP~ Ra?9)Xu% ?žv OuY .u #T?$b=paW@"6>r)o^`q+ƾ^3eGvbo}cĠA'cMq# U[WS=ew:6 j[ Đ#ٿxsH}0os:.2GZ2 mW;a'N Eyێ>_΍%1RyѥT&Kbuse."ϋ"9ULq7KY֯"@|ѻc8~Pnb8Sk#T+r%\]c+=w/bi\Ia;/@Lz.m z:`i@a>t>lhx4\]_܍[H:y[|}1GCtJ܌yo,uļ<]}h'@|u}Q 2ȶ7so[3Y C ࿲뼣;@5-t7wj MbL}Pгlwӛcx}hi [=gC,nq*1*#/P4$Nw~}'=xXu1/?Ӂ}gՏfG n$J0 [} H>79[9;kD*ƒ(>&IM`HL֖ϝa]L~5vLρXc;շ(3{ |b"biHΙ"qt13gVS8rk;ţ諗>my?^| EF"Yc>t>9: JU^ ~U3$?R?]^6wPOc/+q'^_tьE?o-{)Jb4e;N7nolF 9U鰱=z BlLZ:\0Zw2[_sIMZ\'"iF$fPai+9$['8| ɸ۬\6?[SRF7]6,KqO ;-<: 6GWT&1zb52 ˺W~k~'^f4b=ܜz H=^'#cu@|{RIS\buO$}wߣ̕G{)w_/<ᤵ.8Cžя{c8__GǿxUVzsruA7k~\)mfYokWr0\^TƯފ@mJy^_0>/){z8gsk668fuXL FWf£Xb|9Fv; HҴU_i_zQ&CQz} 3ӫK-~- ƭg aH{^HE?ⲟ9(l뽷@HA]#]3SGO?ze @ZyAw{ 7[I~z(HkZKP?yi@gڦFt^]2b=깈\~s(n]E~g]D<0Qz"rdmRՏ1M>7Dfs~I*y=zCo_C^2c' hh %t ƺŹ̍ Α2$ b==57{uc }viW=w'ծ‰؇4llxzpEhY$H]+ ;yfβ!+ uYp;jo'ey^hW 7-c F% K:7;0.HCCHOOOq^zX,+NҐfΊ}q~Ab4 6rk2v b/[aPۀ֜$%X|[f4y^v\_.X]'ο>m z@ј^쿚>Ef`>0IHsXd:sVQ{Onw[_>n5Kh-T fZ}h7{X׹ވXӸ==hUqjTkK3I#߰sv1ۏO Un~t} Jg ;vaV3~[%{\ ci_+;W%r›m~@8Wn~Y_Tq 溏%3YagJ*~Tz%/+Ӂ05]>rabp_ڥ:r9Kħw֭@2"iaՀ|Q 8ed.A97 \9 䛿n2%\Q& ߠt^_D螜K2hu:z@YPQ T3.!-n jO( kvo _Oz:ӣKkI'ӥ II=UӅ9-]y~ 䱉c>D̶Eu C7*.HױYvJY\q.}C[Rx_uJ* Ȇ6By_6Xs0oe|ums# ss8A_a<Ր`=_Q>;<}8%/dпf1g%fbxf,J܂Jғiޫ؟u)uVƸͱROcvfШW7?_lzQcԕ{ۀl10 Z@ũy-P\󛁲?#3aw(6]!Fm5ƾ6޽@}6yK(P,Yl&Vzjve9\_B"P6*|vۭ]H~5'wM~(>+yp`-^=$c@~p;PV/;1{Z~;D74|ߪ 0CZ@y{I%P#_B%'({:('S(#w[0P"ss $+^~ #@iӠ(}@ k>ޓ2xd1ι{Y_1/"\ZeΚqs*MsPwJqƁ?BṼ/Ś'@\=藉)ĪFP:7ؼ؄~=8 H{mN\gd~*P.Fz{*KEAp~:`.b{Q/C;X%//y_;W4̉|Dcw:i !؉?SfgUɋnWq݀|c&?-+ S7ZKK_; O@O_C'lr+ɿ|E(߼tFO;,A2sA^Xt)3PD{x؝ ()\V^|u[al$2_p~ .E 庱h3" Ȼ2VXϨPs^B3"@MZĂX0/?u!0;WGƌ@sj$2y=@|3B+wSy|EN:&U-$t0vOr=gK7ٰnp+tȋ-w mXL&xq{?`K{5C ДfS~^ %?=(bvg~y^KI>M@~׳q`> =^yke耥].qĸ[ "sgf, 53EW hkzyaE5^>@ܨmh ^da7Gyc@#j;8Y.9ji[xeBۉxFӦ|[Cq:ɋ9@kuq2‚~Bb[5+p{0ʫ7ѯR1.=F4~h#x:7Yc%c~J߶:f8뭳]藗w_ f.xa?kjw~n^`7oR]sTv}/Ror/QR;cJ4E=~#[2֠ޒkŽ=[C36 u}1r+(Ǡ? }rW ~1*bpȏn@~8>@ޠkng5u˷ǒM]﯌VzHioLn1l uM^@33;ZijUK} =]HW[{C!@k-ډ}‡,_ق3<AB_b> xbYy1J GP}7Za6;\ nmgi){ȟF>~9X7f"V`ڹHG]ҚXF[Lhޟ~ǐ+C9M#9wwNA=?L ڈuJ+]y\bỵ;$H`~'H&\;q+6Ϫ$X\"\>B?ڭc-u 7>ohVCoՠ+}qF?Wuβ+68t|M7PlOtZ)EO(Jb]$믃?+}G".sJO᪯[z۞`"Uj媛@a8+ a7Ghe P?7( cU:np36V!‘,ȇֺ|haO1c,Q*m"<`'vq=úGgSp[P֤p!^{X J'x G7WmVh)/H߄5-7^a٠.ľ3;{Wb]7ֿGlu0)^H_rxޞ-E=s?!c޴8_aPtO0O hzi:)ןhœe`J|:f嵫Omv>x0Dnpz3#A`ߒv_;d~:? mvݓ G.OU_>!Ju]}?} V+n"H>R5%N>Eُ@-0*A4V: go&] ^Jyy X͂2ho[~_1#Z~V Вrh?&Z,pgWt^6y6Lj'M뵯gV=>@/-p^VM@S~Bs=q&oHA~-PF>Sj  t ixmgr6@.&!@ = <{=X^-\Ԇ;n.@;`{.`~$Z:}OU<Щs^7vwVF2|b}5+^=_iA4zᎍ@c@w˷oW-Ӏ8Rۂ0 ܫz/Zh}}[w-'COPͷI܇ʯ̙Xs߾_vȟpR.γsz'1Weγzow̮@Ъr\x~2_2!߀@C;8^M nřo=~! >ਮ_mQ :ˣ{wHA`l Ibǻ,0ZډXc ? ClޖJ`pםTA=#/a|.~?: E95C5c#J +x/Cuֵa_ۄ}ܻ"=}p}cZ20+5YZЋ`c`lN_!c]x.K|Ux2)y,] #kvQsS\`'fNJbܫqߙ>]>Sp:0FRj(wI}] Khn0 ǎ[avdD0T3WUC `:y'`lzip[f̦8}qA`X\<(5N{G uebO}FHW; U('uuZBΩ_vl v2=2#B}1Ȅu.`>Xnt_^ecA;-`)v1q?B6luEnhѯ{͹ PO\dRzK3 0>*d0*mhވ a'xJV<\1|uup[\?[;`}5xX!\)lTף'0xϵxGHIhtSBJ6K$w7rPqD0^eQ ڣnɾ|̓*eOGC } ǀ꣄;w+i!?'Խ_婔*^5\~ݭ{(=fmJL)r_eގzz0z5$^Vq]-aG~ye߿"Oے<ܘw"9LBs*QH='SKdh_+-ϊrXW'[)񻹠%3QB3~N, vйe y=zWer_sDcyhQ2w+.1t NNIK#KY9W G1;Pv6ynquLfM۹15C|}!ۂyl@ q4EWocĿ._u(C}Jſcجm;&  i߬WY.Q-װ, ҟrn W4wcr(A9/g]/xNDsk:ɥ?Ĥ+%;vL_څuTSۉuzK۩3פO{ѿϙt a$'@lJD=36~l/&~* _*m@}_1z9vy+XLSՙ5uZa3Ճ9XI@ |=PD$;.=vZX ׹( ۹"8Bj7pޣ :9x-|~{04Z/nź`^ lԲ>ZpΌ,H"p=ؓۑrc_ =#&;_y+TIwʌ74Q\4Cyu ]]Ӑl-^8~AyaM̱ŸZ7؊uɦgoJ|, ;$\,8u`c1W~ Xl'bgrB=uR> |Viؑذ?kW|Jq%|O';_d|5.Y"?y@ul6g؟]%0VךO@?,bWao8Al¹/y{6qҰ7؊q;<@P,eW?'vnFR'm@}_^'uFa㨇g~#vLwVfq^|WLۉO}&<ۘKpCN׽_X|~eyVo(wws) _p yB:d4dȿW1O6"XЍvu ԐEش15vAR~՞*pIoOWBl;sC8Ђ'#wUSEχ3@˰o?6{n@.]YB6?Oj+U6,UZLͫBQgR'v6 >_(]j6س߳@뺫8 yb@ q:Y]4; \y@mhEi@WByN I/4 @ ۖ47iG3gvExN6: 7O4S7&xc\ 4Gwu16_ C+o] cC ݨWS3-DbfqцvΞ~s` ׮LIwH]uih́i|nQ7ub~)?FžsG'/ў+W|ޡ߬}<ynz2ʄ@,iC_]k Koee}9ϴ]^Y@/SA@kMRuhXҕB${̛G T08dzQWA.,JZ@|AlĖx{( 8.n4 / P_,i0_J8cܳqZ(Uiɱ0ODYU4W%,=VxD?ZGARjGF9Ű_sx\( :r]o7&!}F>" kk\:sr-}X;iT< 1菏Um(f@&XΟ)CL><h_V<ߞ+߾$*v;؄ڠƈq{LRZ@hFU|O| ,/Mgm+俯 `9BD")t2o:iȟj!y.(ؿ 鿵xΓ}zޠQ(dh2ht 1\TnumnԧEluGzeB6od+ 7Qc'[h-չ&cuz0nzƪЮն6#Hg^Gan&N!plOf"p h*ƏS/ɂnV7%m(_[yw(̇|u]2MҶ.r_^/vkVR"h!>XouoVy ja&k15`mm NckF?mYGR1v}pyv~/q-ydO,>׭99XkN,k">?_5۸^s:sB֦BKua/57rڔU:\܀u6:[@W.(zRJk+]NU9Xoa>_0qv>#ټ h:Fvg4#0eK >GK'mo]u4FI4b0h0c [N,-b|VOP޾lρV2g&t>2eq>X8lx~)Ԯj`KFyB/`|O[phO ,2/n }[hʇZBwSC_;.[<_lZs#XowsLi@`1fNc+c+W8{_&6LaOZ_;璭@Y~h?dkqߙLW1k`Y?@{':/zOEQruFh#h_FM.}u^KVE治99s}B5לc'd> {uls7k}6Jºm-#{)[9źPNDt0J:չsCXdf^OlyD#{ о'!i4Wd7 + 㨯>kNa^/1DtgWPǽ ꗠ\81aM+u4^Po={ Կ&ـڻi`4s{c@ퟤoGTzao>z@6^ԑCC71x[Je4emZ޹}# &yˮdD^yO֙j@2)9TsŇG끭2PS}7U&G,(0j=^+/5{ An(o0ԦubҀzww}vLOP/==Ժ~gI69DWV C~ Xco)o VuOK~4_tEUxHv@mNJڪ{~sw[Rr˪JK0?SF?u$5zE"!'t%fAMlj[Ըٹ@ud}CXQg5Fڡa(aI_U 4жNyI{Χi}n+҈K 9>h@}7q QKעGy`Z}k~geQ^ ׁC_3[КO/,Ƀqn';_11}8Yd8_TiawRŚGLdl> R$O_wvcPx1i3ԣ1us0?}`]ɉ1f9n!*h;~pk'c8\:h+<=QdjU@ y㣫Qگ:쀺`Q;P{X Q2,DfC @XCD^G5;Ņ~hz=nE?Ib|+ʬ*%"Y3f V+b7cPIXDbqVku:t5}E Xc17 'O[;R=@}#6,kkxkӐW=8:.j(LU*rRlV|R'7a"u&ozΠjvPu[4T_KϽ1=p$ŐP4ˮoQ1]AJPa)"$SH" %DdȔTRƤBt}{O]ݎBW7p#"N_}XG\}.ꥇ\괹''V(yp_9/ĝ~[% OA/7Yeyg Ua:A= zt'd^PZeo/6;DY%W}#T}p3,u0H-:nO+w/o?ǣJ}_Õr; m7بʢZDB&,cW.8w̓Bc6Bn&2:C~Zt~nBy~y16w5Z;cơ R>|eZl׸߭E<0yw|ŧ nW0SK--M 40!8p:>o+]`6n5$<`>crJOXOu~?~noxΞyqJhua5}(~Z |NNA]|j>ȫ*Z}oѷi;tEsV yZ>;g>a2KZQo) w' #[_9Lg&;VQpLJDwUʃ7+Sk$u|GMmv)p ܓУ&^^܄UbmIX,MؿҷEN j3[18?ANSMˠCωfEYa|Q߫c*g7(ɢń/6 nOA -7F\PB.!?%%Shgّs3 ~,nD,~LxS|bޟ-p>ӛ4]D(TpK-o5E֙s,ſSyJm efcj*Ѝ8MCQ6vء|܈L)p녧b ]aִI{߿vz1?%6܋~|tV̷׊ Bޚj؋ZP>3]7 uۗrʮE}ڼäNQJ˥hכ핱E=7$N] Cwp++5gLʃc]ӛxBٸY|$ԶFéb }iyf=wߧz%u+ʠʛȚT CgߺR7ݓ ~^?xk'O^{}̝C'fԊSg:Awg{> ~BMՇLuCydW jEx:sk]~۶&;Jb `Ş`4A}袺Znα ;w4o~ZtrKg30k?(Q3 Or/-9Y?1Om#)`MM6,;]; NX.lI>!Ye]Fc#ygໃ"`9ݗ@o@%f9QЛEkK AW'u:ř] '27ʂ{l/V9\~N͏wTFK 3B5>E*u9b*:Tq;8y~as)rsOe6#oK:4߯`y${ٳﰑcSOB/k:c/R:?8Fb|yvʹ)P1UQO^$tiN]PYk~¦a cv6BOa_qiwzꘇj_I1wMӭ@DoVCXbNXCAu?dE1eQ'o>E0XRA/-ayg]_/_O9x{:_{%+w+BK q7"TyM,<{7oXn j 2f~jZl8v*޲gBՏߛ1J ףzCIa 2{dokƼ:T!T߼UYoByG |]&ΐZ髜{3G@絫pƺ_յX͙{Xs%7~N5ż5aT_MB\pW.d:%CB) euu=1?fel8*%0F۳=2hw۰Q1RPmz W1mӣSjor¸gUY!X=6ؙPdW|m ~~:~ $.;4}9#b*; 3eYkćoR=Vkc4.KU"h*S`\ԙ}&ċI䅞Fs`g^/D:Wf*zEaG-2aoKc9Xc|3)vPj>-kKa\1k%{~e9=Ѓ|=եSDApRʭj=wYxl:Z r{swG7ow}P+t krxlIpH~V"^\ 䊫 j7t×`Z9 >m[qy[yt俲td;⊴}̆$Z8^j{\d$Et$+őWuhǑÍG,G C^hۄ\ý/*YrYQ;\oULvV0C~hhނ}7L++\: yO(7O,Uо~QCS ѕW{9.?b}wAJUBM#YWmDBP6;IhxW^EhU-B[Mhyo" }/w1sW/gB;9/wzWw  'J‡&!& J^V")`@6Lcu|ek =7Ct4m% %B.:S&¼4-;gGa ÿA? xM^i^3zX;k _~J* ټ{He&n|^vJ;j&l1?_->=>hGh4l?;{#6޸r2ڶ/Y"be^m#N'qA]ui@:B v @ȷtԹ?ZC$qCJT7 +kA=-*;%p]rxpТ|[{3=QTBo*=~~:_HhjӈIrjB t x1AWJ_[ ?ԃ[ zO8o75i=~F~?g?딱@ۛ&AJ&/zV*.!v]7˔7Jo2-H7y>OXwS&,g5<%˵(7e,6;s[˄%l蔤FXZk&Er’6Q"[km#EX{DR: Kӹn=pZ9H3$ڙ(ɫDnCU79kAXÏcmAga|#%wя4oQn!,q|R RüV'Δgjߊ\Crud-[,W.bohPv<1UDP0t5D7Q஢k MCY)0S&k9fH*az(rKT]r׿*^灁!̳K60dE~56_w[Mc|kgz/'vE2%Kެ?}!a%+6XYV|幛 tfzp`(aW;=~Tfr>lOد{4o51L{54wfc>^v7G_ITJn[SFu5^NÞ]2 }[;:.exή,~1 _u2^286VذuʳgTT&?-4A_-n}k@=}&,~tO0?U4p/rx4Xp>쿇{v?_N{رpTGr8w:'=ٮsDtsparr/data/pbc.rda0000644000176200001440000004176214012076542013561 0ustar liggesusers흏_u]/6XD+h9nĦw0D-F9LX^r{i6C1Tj8NGEQlM)bduyϗx\ίϏgy!SSS .\0``_L8uqsAs#hm[v GO{ޭ?7x??'ܵÿ?ԂÿO\pz>US^e/f>w?Wy_8_˟V3ϛ{k_nCjzs;bjw}wG==7?5̭7aU3?|tVӟ_TӟXWMW桿r7rs5cgj#Wj ϪM5ʖ3kx;̕djfxU5v꽴vv-DN-/'߸꿭}{ wu>R h0Ojv݃ϴt|wxL˷q{9J˟9><9^ԮmϭWs0cs|ǻ^:>un]?}{o;[|sm ^_yK׵ Nw'.ysj46UoIsjwË-}zmwY;^qתv^f3߿+Ϲ'Oh5~8ϬkhN[6wa59)x'9>j؏OxOnNjyT7?]5[[.̑WVrݬmYgίoA՜Y:7/ UspѷSΛvm܍3/އW?}Fol#=s{q5z6Oz;?R~kt{NUg^o?P׵omSo;}]KxagXm]揷/f援>8T/?˧V?+}y6ko<gߛoWU}H˯9~~sj UyfUŅy^Ujϫ/8S]踹U-O/ѢTo;VFn#/yr&pw:,y.p /CϏ tyn70^W.Xx]\y{,- <>X=)+ɺg\S_Qi'|T.ggv3a^;o/F.b|V}{s>[ܗs3GP;ˮBgž[Y<ިiz{uGoMQ㾌8~&@.zẉ?斮>(7WA7vwp լ{Ka﹮~y]8kEVa{?θvuۻ3Cr~o1~t_XzW.ċ%s1I-𴐯b~ OCzzܳ|r?.t4/vA(plSMfS,pϲ_hga9 \=nlj\ie]7#q]?C; y0sѕ_)d<8(%t2޾X8b='u$$Ù2C;y]:Gq?]x-=qA)|7K3Wcls_|END{ƕﴧ8d>:WįG~'%_~4 7}ڍB.k_\CbF~qx/ܟybq+w6ч2f%W X;2\}f>F}o ~h#[~&)0qP?k76O|G䨧6%>?qk_.}U췈C?Wu==is=܏vw58e}.HqSs_У5p }&O\/S3ּaOx>/)Z_C3>'ϯ_}zwrcnx'_yt=p`+GGT_7-ϛg/ooWd|F}Cfg ~-i/jOo[/ć;xUwW\)~cIz믭ҩo;iq ծG>WKwa_ɏ8GO%*vʉ^jK}> g|v~*eXϿzk'h}=x&>spϺQJ[ؽ?KL\_ĸ(ȥqK>%Ws_\:qȫx~Ү#Y?Nߏu]{Nٵ?֛/*gЫBo謹\/. O>^33.l䦏NqN$(gʥ:"e][NݏOC}r"Om A9MtZ=Uߥio7#5UyN^DW.ȉ=H{&]r^;W8 =jFGkӞSN;v>dn*r|)WZ}Yq_ʏrP;i*qN\qo ąK3g;5KrBnƿkO*wK=8?˹zNEow7*W_\_gWܿz\ԑ/_C/;/}-]qi}O]Oy2!%d_N+xOf3vU+12~}%ƩE/d7I|BGhg[pͻ_Kp\e+[׈w׵{&N=qwúrT7&?N_}_='՟S~#׽|XW<ޮޅ9).+Ow oƮnه>jב􋉯PW9.o 8E;_ẄGO3/#GCտ;\Se%؁ 􎝇}K]9[ؕqfG}$?qٗg_} Ϲu?~[/)KmS~p?yѾ!(}ڗXG]x"v^#֑/a7XS<R][q̸}AeKkU+ga]c1ݗ[:C佥'~yK~y ΣKު5=3gMp.uWScwz"_gk{/ޭD/z\ɸ)|IpgXD7ho)*ܾ2cch簻*+o^P;rr:EѿV7l.?}<88%q4oE_ފgQ9^s91ОmRϵ[_1y5a-2+WcYy:k_y;<<~w:Vѷĉl\N=C;, d';S7օvs>/7FƼ;vU^1%hoz|i gKW}pi:>G+oHwc]T}ؓ%?ݎ7c?{}7=K=xVK=8/ tоyg!jwp)zX\Q/x:sYʻqtӯrL |Jl[!Oo){1=2_vN!u*s z*ξ+t<ȡuG˸>Ϟ:먘On+E]@/oƼzk;zcw_CV?|y#q&#Owߪg}߼@& N鬽~_E?/].+'|Yw?3vziOg55}R \{=\c2_Oqf{n犓9ZȃxUS`Ɍc=; =X0_r3(]9|g ~9*uī뤓rz ne]'^l>0?}6޸uN>>:}~~$~S|P(=ڇX}wzO3>!vI!ʋ\s'0yqsSr_}5_'?+YuW6y[!Hqysʍxhzo^ 9Q/k9{1tRo}A/~?qJnZ/t DNoA~8G ~wߣ>ɸ tG/\Z7r_I_讼Sy̧@`|#u^ M~پ&oP3) ?ã/^b|~?'yNYY{΋]H=C/fs}S/+1:8(7 H6uWŐ>rgI5aY:1uU闾)\/N WR=z^P=v\q/˯)74}6bacu$n}4Kbo˾kS=ry_P/htG+ϧmM>ǟ4?b ^̈g_uA棤"_1G;{HR OvTu{t.лjqra8kTjK=CzWwPxZ;>¿r^KmܷCKpGS31rib^YߓvWXx<8O:l=- vq-܏$$>y˹?+*^l/vݯ_؅صx]Up9uqf?]R^f |k/#8:q;C^;L~{Їx$sᅭv?_}H/qK}R33ԃ]~ S~}O:bLwd^76iX|e~8*/@;|h GByJN}RѮJOpN??UWS~~ Fy șv5yQI*؃_kS\m~wc_Na\듼̓κ_XѮle>e..C7dl={w~Ob.;}zuBP 1#y?r' \;eGe.p5 +_݄u^WgMWu?=1,K+vDyOup3mjj gJ |T\2u>"GS?I>񋢷;I<9n߽xHYY#ZxJݺž,>/~g'k'/.K\+dIW0M]\K_׭Kcه+g$N&$^K2 )G_PճmZwϊ/*Oj1)C-{h^d qiKJ7cg\~w\ڇ}=>݊=cC_|4O٧rN[,Fy7ͻ!7}%]] o$w+77PD}ԛ}}PVBq'~9[.]Nza?]u~'[}j1r=Z\ɾwpq}7-)~Jʑ޹use?>N+o׸t-G^qAq:׵G~E:EfQ9Χ]OQ]G{Z|/}5O?Zzew^|_{:u;R/_y(~-x7sngw+s{#t;t2%q y^=_Տ|qnACA:i'w~#?8C_&#s2vuhū8IWGo;B5C 8;u5:yHG N/P~w#8zuG|C=2Oaz|t=k_čQ=0^Gg{)ӽ\o#w.ځ\??~O-ޗ?ŏ>~|/MS=8ؓMӭkQ^Σ_l.~z?z eoDT:9\TAN>z8l_{osžAGp y[߯ts2~tg ~S}wIvIÔѥ8kj}>Fپ7}z |5n#=>uvzVC_F؅8ǟخ~-)#:ܯq<9=oe| o4?賈k~K)$ w+c`Sk+3#0Fy4|#~)O|^_p~z=Gъ/+W?upBy@Opx>|OawG߁y<q a=b8xmsK'qTn>ֽÿ]<_s\̂>/Kx_=R~5uo/#٬[}ʓ9y*c9W?Fvg_~Np\/f'=#Ə/_׭k"tFGWy-wwq|t:祳*mj6|L}"ϋן~Gy?_;2^kGO|8eW /WkGXs8/OwK\}N1vю8_䷂ KF?D/ԃoe^v~@_8g yq( x_"Qwr\J_f ן`SHyإFğծ?ͼݼqu/ũ]qs/d<$8vI:#+- G4Kw9Gī_]_3>9_giVPc$1:ו q[;('cJEBⷷtf];渉G?MA8uCu5䅲8?cXU'' ܫlLp^z*hOwsIG;x^窂Oe>EE?\;[iϴƻ}0oT?r(~n9{*?O\7oao\x_%+K2%B_/8.nN}(u8M?R{Y;4.KwUx\&c=}'H'F|j cO~Cԗ7 zP78)J8Ǹ\|PN'4{ }VOُL}W\b]7Ʊ2f^`7j~7gtG^kȷu)?Cf˟cǡjy8Bφ8 }Շ̫n$M]ו;PX>o_pS>?_ 鷬Gej_>yd|7f7?8y|C{yO^fne\~Qyx 虌ϸS?aCz0mܩ/oy>z_e~^83akq}Z3v<:w?c=Ac1kcWsMFRcp66<3~P_#ĭ5k׍\v~K'[oCFzm|35sum__Ň)7OL@|N./9 :qЇ~e]La3!7~'uؓv>?C]upOk#ҍx&_9U4g|gd֣`~%N^>tC^Nw֣#3|>u5=V+ddO=Q+#oEͻs +cSAOn~ؗQؙR߾7~3#3o\[kPNu7wgSz4y9q_Oc<wq?.'QP%C]yI}uϙ4~#`^ܼqaoO[ğ'q`1v@p 9L<7z#}Q 'wVx:{h'1`'=w>Q!O')8i<]S=:z sqۜu:/ʱz0?%{sB\f|f'ī>wuu3#SQKg>&O}eͿ7H-wY'8WS żOџ|{zWGFCRiOjqS6oi |,7$S3>ʏF=*cbHOs'_Tq]}_-BھwgB>V~\ʛrz#m5:&_[G<>hm(r~0~̃h͗y]{=5Wf߭qV?T`i}+зVZޘgus\(/usj1nNaSG.G&8=R}ROoM?a=_N\_aѼu|R;߾N 𠶮#.};zyLpJ}RXg2s|M}7ޙ<9Cu-k%'׍_6v>I!ւCA/)Ωsu(iקoϲO_xN;~r{օy,ե2ѥ}5WI/yƺ!1Q| ~VkГ8׎KG5f3qdAsg^\{6륿.nfѼf43ES~?5/9~&~x|J|NL>>}W1}Gk:'A'|5η|4{0e:x_~}q1 O3=U}<2C{g>R#{!3)V9|h'.~#Ts~y^<kփjqE~wk]~įa~73#`b;|~UWோ'z?O}nx}I'Pg*1+7'/8NqG׷2~W&eGSKޥ^g]ɸҾ 7d?vU?Hܠˇ.$/]+F~ïZ\.jna{ -^?rpx6N Sޭ?~+1Oco'AAShq8C*_ߍr]^JM>o 7Wv*iΥYxBy>h"~*v:~Iݾp uwYi#7SGIT^՗kͳe,XyPN_OY܏;tnՋYɳe~_8JORnn5oK_ZYYq].z7l=C/=CBxyڭŸgij}NuUu&ү3 X_W^[(_ڟ"|%Ldius}wՃKswFoa|f!f򋅜$~[G3?>< >>^4-pğp>n^籛cgnӰ/x>WTo&ǚ8ls*OQwMJyUorif/A _omǥ\߈ ӗ]/ҟ]Q$vQ~igM7ߗs.zoq}^b빛}^h_F/].<W1~y۫xNzE˾,*k߿ٵ+c/R?0Ĺcz =x8W?πnʇx}"p|!O~ߐx{KrzÔݼu̇}WQ!Ϧo:ټޥ.lgG1Oȁ="}N8ekz>k?8}ߙAO7'IO<[/'>iߍct7ofMעNbw)/~g\!|~mK;H~\\7oqtg)\KGp槥O4vۼi3I71^3R_ўAx/Xۋu5~iGC/;Ƨ1&_, ϲ]1?W0Kx8|D7v-c]UOe>]?D_7π8&.3x/wcx/}h-}#gB棏f`\2xw韉3Myջ[ h?; ǬGr|So}q~$~+"]#b޲Q.?yOӿ8=S>+o֋쿴^+K7}GAԓ܇}a/ȅ*|bgW(Kn1?A<.kjs#ͳ;^ȃ߫G_ /1|%L7~֫:qȟۇdܒ E'/b?d_yC^k}?GGvd;_(s#į{oVsg. 6~ؾ q_=X՛_HW$u$+?U\Oyԟծ>kJmG/.ino'|rwM]Fn|rjTocJ~!ocHG;W{؏cx-'9A7q5}wj]<6?}6Ỹ>l' \w(G/~M̓}5Wn \}`~p*yDqGFuoXnBNhVZ?{`f>﷤g4ՏU}K?94e͛YO=U@βn&֥*jc!vZbӟoO=Xx;gyqJΝq}7PnGC^˺.FԎc'{ q4uǛyyl0j'_%?ͫJMK׾ lo~ߓ|]i;z|t9DgU<׏B{k}>A~'􊟇^w۟|%b~ks~~y9.cZghkO>?c~wk'y\ ͧ(N279Eos[ܳvY&3~ψSQg?!GK?B~mg~86NW֝񝿧w؉Sy5qa a^V{#|o=׾0>Yt仍w>{0Wy'~fS;F>$~O?k5h/b7_|#d3ۣ}}%m>¢+je?}\Y>qb~Ϻ_B+/.{>VN[;|^߽9ߎ;s8OuE3{J|ڬwQIfʧ^?2ӟr<<^X\wjOK[9J8u#msQo/gWo16[mG{uw.f}v4_<%_g]|r77"ߚ|ӈI؛o'>S#描(W}s!]CO>}UM?=zC=NF>n3~oK=1; ˺v4i]48b~s GXsQO~ޏvy#=J>e?ݲ /9>[rn/w_ֱN -xcΝO<m[G/W/<]Ʒ.Ø4ߕ*wK}hw_ 84鏈G-ͷ6qy;:qufdzoξ}q]'<۱!?;#GX>12_QGzue/=<2>0>~Vwy[.#S_Oqgג8>u< ~B'~Ej=V}|~[sL8uq.h~o7M&o7`7M&o7M&o7M&o7M&o7M&o7M&y_qvW\rh.Z۾{S'WN=>'̚u/h=r Z}ц W^̕lof^~vکG?C1sparr/man/0000755000176200001440000000000014024074041012150 5ustar liggesuserssparr/man/spattemp.slice.Rd0000644000176200001440000000710414012076542015401 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spattemp.slice.R \name{spattemp.slice} \alias{spattemp.slice} \title{Slicing a spatiotemporal object} \usage{ spattemp.slice(stob, tt, checkargs = TRUE) } \arguments{ \item{stob}{An object of class \code{\link{stden}} or \code{\link{rrst}} giving the spatiotemporal estimate from which to take slices.} \item{tt}{Desired time(s); the density/risk surface estimate corresponding to which will be returned. This value \bold{must} be in the available range provided by \code{stob$tlim}; see `Details'.} \item{checkargs}{Logical value indicating whether to check validity of \code{stob} and \code{tt}. Disable only if you know this check will be unnecessary.} } \value{ A list of lists of pixel \code{\link[spatstat.geom]{im}}ages, each of which corresponds to the requested times in \code{tt}, and are named as such.\cr If \code{stob} is an object of class \code{\link{stden}}: \item{z}{ Pixel images of the joint spatiotemporal density corresponding to \code{tt}. } \item{z.cond}{ Pixel images of the conditional spatiotemporal density given each time in \code{tt}. } If \code{stob} is an object of class \code{\link{rrst}}: \item{rr}{ Pixel images of the joint spatiotemporal relative risk corresponding to \code{tt}. } \item{rr.cond}{ Pixel images of the conditional spatiotemporal relative risk given each time in \code{tt}. } \item{P}{ Only present if \code{tolerate = TRUE} in the preceding call to \code{\link{spattemp.risk}}. Pixel images of the \eqn{p}-value surfaces for the joint spatiotemporal relative risk. } \item{P.cond}{ Only present if \code{tolerate = TRUE} in the preceding call to \code{\link{spattemp.risk}}. Pixel images of the \eqn{p}-value surfaces for the conditional spatiotemporal relative risk. } } \description{ Takes slices of the spatiotemporal kernel density or relative risk function estimate at desired times } \details{ Contents of the \code{stob} argument are returned based on a discretised set of times. This function internally computes the desired surfaces as pixel-by-pixel linear interpolations using the two discretised times that bound each requested \code{tt}. The function returns an error if any of the requested slices at \code{tt} are not within the available range of times as given by the \code{tlim} component of \code{stob}. } \examples{ \donttest{ data(fmd) fmdcas <- fmd$cases fmdcon <- fmd$controls f <- spattemp.density(fmdcas,h=6,lambda=8) g <- bivariate.density(fmdcon,h0=6) rho <- spattemp.risk(f,g,tolerate=TRUE) f$tlim # requested slices must be in this range # slicing 'stden' object f.slice1 <- spattemp.slice(f,tt=50) # evaluation timestamp f.slice2 <- spattemp.slice(f,tt=150.5) # interpolated timestamp par(mfrow=c(2,2)) plot(f.slice1$z$'50') plot(f.slice1$z.cond$'50') plot(f.slice2$z$'150.5') plot(f.slice2$z.cond$'150.5') # slicing 'rrst' object rho.slices <- spattemp.slice(rho,tt=c(50,150.5)) par(mfrow=c(2,2)) plot(rho.slices$rr$'50');tol.contour(rho.slices$P$'50',levels=0.05,add=TRUE) plot(rho.slices$rr$'150.5');tol.contour(rho.slices$P$'150.5',levels=0.05,add=TRUE) plot(rho.slices$rr.cond$'50');tol.contour(rho.slices$P.cond$'50',levels=0.05,add=TRUE) plot(rho.slices$rr.cond$'150.5');tol.contour(rho.slices$P.cond$'150.5',levels=0.05,add=TRUE) } } \references{ Fernando, W.T.P.S. and Hazelton, M.L. (2014), Generalizing the spatial relative risk function, \emph{Spatial and Spatio-temporal Epidemiology}, \bold{8}, 1-10. } \seealso{ \code{\link{spattemp.density}}, \code{\link{spattemp.risk}}, \code{\link{bivariate.density}} } \author{ T.M. Davies } sparr/man/multiscale.density.Rd0000644000176200001440000001766614012076542016304 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/multiscale.density.R \name{multiscale.density} \alias{multiscale.density} \alias{msden} \title{Multi-scale adaptive kernel density/intensity estimation} \usage{ multiscale.density( pp, h0, hp = NULL, h0fac = c(0.25, 1.5), edge = c("uniform", "none"), resolution = 128, dimz = 64, gamma.scale = "geometric", trim = 5, intensity = FALSE, pilot.density = NULL, xy = NULL, taper = TRUE, verbose = TRUE ) } \arguments{ \item{pp}{An object of class \code{\link[spatstat.geom]{ppp}} giving the observed 2D data set to be smoothed.} \item{h0}{Reference global bandwidth for adaptive smoothing; numeric value > 0. Multiscale estimates will be computed by rescaling this value as per \code{h0fac}.} \item{hp}{Pilot bandwidth (scalar, numeric > 0) to be used for fixed bandwidth estimation of the pilot density. If \code{NULL} (default), it will take on the value of \code{h0}. Ignored when \code{pilot.density} is supplied as a pre-defined pixel image.} \item{h0fac}{A numeric vector of length 2 stipulating the span of the global bandwidths in the multiscale estimates. Interpreted as a multiplicative factor on \code{h0}. See `Details'.} \item{edge}{Character string dictating edge correction. \code{"uniform"} (default) corrects based on evaluation grid coordinate. Setting \code{edge="none"} requests no edge correction.} \item{resolution}{Numeric value > 0. Resolution of evaluation grid in the spatial domain; the densities/intensities will be returned on a [\code{resolution} \eqn{\times}{x} \code{resolution}] grid.} \item{dimz}{Resolution of z- (rescaled bandwidth)-axis in the trivariate convolution. Higher values increase precision of the multiscale estimates at a computational cost. See `Details'.} \item{gamma.scale}{Scalar, numeric value > 0; controls rescaling of the variable bandwidths. Defaults to the geometric mean of the bandwidth factors given the pilot density (as per Silverman, 1986). See the documentation for \code{\link{bivariate.density}}.} \item{trim}{Numeric value > 0; controls bandwidth truncation for adaptive estimation. See the documentation for \code{\link{bivariate.density}}.} \item{intensity}{Logical value indicating whether to return an intensity estimate (integrates to the sample size over the study region), or a density estimate (default, integrates to 1).} \item{pilot.density}{An optional pixel image (class \code{\link[spatstat.geom]{im}}) giving the pilot density to be used for calculation of the variable bandwidths in adaptive estimation, \bold{or} a \code{\link[spatstat.geom:ppp]{ppp.object}} giving the data upon which to base a fixed-bandwidth pilot estimate using \code{hp}. See the documentation for \code{\link{bivariate.density}}.} \item{xy}{Optional alternative specification of the spatial evaluation grid; matches the argument of the same tag in \code{\link[spatstat.geom]{as.mask}}. If supplied, \code{resolution} is ignored.} \item{taper}{Logical value indicating whether to taper off the trivariate kernel outside the range of \code{h0*h0fac} in the scale space; see Davies & Baddeley (2018). Keep at the default \code{TRUE} if you don't know what this means.} \item{verbose}{Logical value indicating whether to print function progress.} } \value{ An object of class \code{"msden"}. This is very similar to a \code{\link{bivden}} object, with lists of pixel \code{\link[spatstat.geom]{im}}ages in the \code{z}, \code{him}, and \code{q} components (instead of standalone images). \item{z}{A list of the resulting density/intensity estimates; each member being a pixel image object of class \code{\link[spatstat.geom]{im}}. They are placed in increasing order of the discretised values of \code{h0}.} \item{h0}{A copy of the reference value of \code{h0} used.} \item{h0range}{A vector of length 2 giving the actual range of global bandwidth values available (inclusive).} \item{hp}{A copy of the value of \code{hp} used.} \item{h}{A numeric vector of length equal to the number of data points, giving the bandwidth used for the corresponding observation in \code{pp} with respect to the reference global bandwidth \code{h0}.} \item{him}{A list of pixel images (class \code{\link[spatstat.geom]{im}}), corresponding to \code{z}, giving the `hypothetical' Abramson bandwidth at each pixel coordinate conditional upon the observed data and the global bandwidth used.} \item{q}{Edge-correction weights; list of pixel \code{\link[spatstat.geom]{im}}ages corresponding to \code{z} if \code{edge = "uniform"}, and \code{NULL} if \code{edge = "none"}.} \item{gamma}{The numeric value of \code{gamma.scale} used in scaling the bandwidths.} \item{geometric}{The geometric mean of the untrimmed variable bandwidth factors. This will be identical to \code{gamma} if \code{gamma.scale = "geometric"} as per default.} \item{pp}{A copy of the \code{\link[spatstat.geom:ppp]{ppp.object}} initially passed to the \code{pp} argument, containing the data that were smoothed.} } \description{ Computes adaptive kernel estimates of spatial density/intensity using a 3D FFT for multiple global bandwidth scales. } \details{ Davies & Baddeley (2018) investigated computational aspects of Abramson's (1982) adaptive kernel smoother for spatial (2D) data. This function is the implementation of the 3D convolution via a fast-Fourier transform (FFT) which allows simultaneous calculation of an adaptive kernel estimate at multiple global bandwidth scales. These `multiple global bandwidth scales' are computed with respect to rescaling a reference value of the global bandwidth passed to the \code{h0} argument. This rescaling is defined by the range provided to the argument \code{h0fac}. For example, by default, the function will compute the adaptive kernel estimate for a range of global bandwidths between 0.25*\code{h0} and 1.5*\code{h0}. The exact numeric limits are subject to discretisation, and so the returned valid range of global bandwidths will differ slightly. The exact resulting range following function execution is returned as the \code{h0range} element of the result, see `Value' below. The distinct values of global bandwidth used (which define the aforementioned \code{h0range}) and hence the total number of pixel \code{\link[spatstat.geom]{im}ages} returned depend on both the width of the span \code{h0fac} and the discretisation applied to the bandwidth axis through \code{dimz}. Increasing this z-resolution will provide more pixel images and hence greater numeric precision, but increases computational cost. The returned pixel \code{\link[spatstat.geom]{im}ages} that represent the multiscale estimates are stored in a named list (see `Value'), whose names reflect the corresponding distinct global bandwidth. See `Examples' for the easy way to extract these distinct global bandwidths. The user can request an interpolated density/intensity estimate for any global bandwidth value within \code{h0range} by using the \code{\link{multiscale.slice}} function, which returns an object of class \code{\link{bivden}}. } \examples{ \donttest{ data(chorley) # Chorley-Ribble data (package 'spatstat') ch.multi <- multiscale.density(chorley,h0=1) plot(ch.multi) ch.pilot <- bivariate.density(chorley,h0=0.75) # with pre-defined pilot density ch.multi2 <- multiscale.density(chorley,h0=1,pilot.density=ch.pilot$z) plot(ch.multi2) data(pbc) # widen h0 scale, increase z-axis resolution pbc.multi <- multiscale.density(pbc,h0=2,hp=1,h0fac=c(0.25,2.5),dimz=128) plot(pbc.multi) } } \references{ Abramson, I. (1982). On bandwidth variation in kernel estimates --- a square root law, \emph{Annals of Statistics}, \bold{10}(4), 1217-1223. Davies, T.M. and Baddeley A. (2018), Fast computation of spatially adaptive kernel estimates, \emph{Statistics and Computing}, \bold{28}(4), 937-956. Silverman, B.W. (1986), \emph{Density Estimation for Statistics and Data Analysis}, Chapman & Hall, New York. } \seealso{ \code{\link{bivariate.density}}, \code{\link{multiscale.slice}} } \author{ T.M. Davies and A. Baddeley } sparr/man/LSCV.spattemp.Rd0000644000176200001440000001004514012076542015047 0ustar liggesusers\name{LIK.spattemp} \alias{LSCV.spattemp} \alias{LIK.spattemp} %- Also NEED an '\alias' for EACH other topic documented here. \title{ Cross-validation bandwidths for spatiotemporal kernel density estimates } \description{ Bandwidth selection for standalone spatiotemporal density/intensity based on either unbiased least squares cross-validation (LSCV) or likelihood (LIK) cross-validation, providing an isotropic scalar spatial bandwidth and a scalar temporal bandwidth. } \usage{ LIK.spattemp(pp, tt = NULL, tlim = NULL, sedge = c("uniform", "none"), tedge = sedge, parallelise = NA, start = NULL, verbose = TRUE) LSCV.spattemp(pp, tt = NULL, tlim = NULL, sedge = c("uniform", "none"), tedge = sedge, sres = 64, tres = sres, parallelise = NA, start = NULL, verbose = TRUE) } %- maybe also 'usage' for other objects documented here. \arguments{ \item{pp}{ An object of class \code{\link[spatstat.geom]{ppp}} giving the spatial coordinates of the observations to be smoothed. Possibly marked with the time of each event; see argument \code{tt}. } \item{tt}{ A numeric vector of equal length to the number of points in \code{pp}, giving the time corresponding to each spatial observation. If unsupplied, the function attempts to use the values in the \code{\link[spatstat.geom]{marks}} attribute of the \code{\link[spatstat.geom:ppp]{ppp.object}} in \code{pp}. } \item{tlim}{ A numeric vector of length 2 giving the limits of the temporal domain over which to smooth. If supplied, all times in \code{tt} must fall within this interval (equality with limits allowed). If unsupplied, the function simply uses the range of the observed temporal values. } \item{sedge}{ Character string dictating spatial edge correction. \code{"uniform"} (default) corrects based on evaluation grid coordinate. Setting \code{sedge="none"} requests no edge correction. } \item{tedge}{ As \code{sedge}, for temporal edge correction. } \item{sres}{ Numeric value > 0. Resolution of the [\code{sres} \eqn{\times}{x} \code{sres}] evaluation grid in the spatial margin. } \item{tres}{ Numeric value > 0. Resolution of the evaluation points in the temporal margin as defined by the \code{tlim} interval. If unsupplied, the density is evaluated at integer values between \code{tlim[1]} and \code{tlim[2]}. } \item{parallelise}{ Optional numeric argument to invoke parallel processing, by giving the number of CPU cores to use optimisation. This is only useful for larger data sets of many thousand observations. Experimental. Test your system first using \code{parallel::detectCores()} to identify the number of cores available to you. } \item{start}{ Optional positive numeric vector of length 2 giving starting values for the internal call to \code{\link[stats]{optim}}, in the order of (, ). } \item{verbose}{ Logical value indicating whether to print a function progress bar to the console during evaluation. } } \value{ A numeric vector of length 2 giving the jointly optimised spatial and temporal bandwidths (named \code{h} and \code{lambda} respectively). } \references{ Silverman, B.W. (1986), \emph{Density Estimation for Statistics and Data Analysis}, Chapman & Hall, New York. } \author{ T. M. Davies } \section{Warning}{ Leave-one-out CV for bandwidth selection in kernel density estimation is notoriously unstable in practice and has a tendency to produce rather small bandwidths in the fixed bandwidth case. Satisfactory bandwidths are not guaranteed for every application. This method can also be computationally expensive for large data sets and fine evaluation grid resolutions. } \seealso{ \code{\link{BOOT.spattemp}}, \code{\link{spattemp.density}} } \examples{ \donttest{ data(burk) # Burkitt's Uganda lymphoma data burkcas <- burk$cases hlam1 <- LSCV.spattemp(burkcas) #~9 secs hlam2 <- LSCV.spattemp(burkcas,tlim=c(400,5800)) hlam3 <- LSCV.spattemp(burkcas,start=c(7,400)) rbind(hlam1,hlam2,hlam3) hlam1 <- LIK.spattemp(burkcas) #~3 secs hlam2 <- LIK.spattemp(burkcas,tlim=c(400,5800)) hlam3 <- LIK.spattemp(burkcas,start=c(7,400)) rbind(hlam1,hlam2,hlam3) } } sparr/man/rimpoly.Rd0000644000176200001440000000615014012076542014141 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rimpoly.R \name{rimpoly} \alias{rimpoly} \title{Random point generation inside polygon} \usage{ rimpoly(n, z, w = NULL, correction = 1.1, maxpass = 50) } \arguments{ \item{n}{Number of points to generate.} \item{z}{A pixel image of class \code{\link[spatstat.geom]{im}} defining the probability density of the points, possibly unnormalised.} \item{w}{A polygonal window of class \code{\link[spatstat.geom]{owin}}. See `Details'.} \item{correction}{An adjustment to the number of points generated at the initial pass of the internal loop in an effort to minimise the total number of passes required to reach \eqn{n} points. See `Details'.} \item{maxpass}{The maximum number of passes allowed before the function exits. If this is reached before \eqn{n} points are found that fall within \code{w}, a warning is issued.} } \value{ An object of class \code{\link[spatstat.geom]{ppp}} containing the \code{n} generated points, defined with the polygonal \code{\link[spatstat.geom]{owin}}, \code{w}. } \description{ Generates a random point pattern of \eqn{n} iid points with any specified distribution based on a pixel image and a corresponding polygonal window. } \details{ This function is a deliberate variant of \code{\link[spatstat.core]{rpoint}} (Baddeley et. al, 2015), to be accessed when the user desires a randomly generated point pattern based on a pixel image, but wants the window of the point pattern to be a corresponding irregular polygon, as opposed to a binary image mask (which, when converted to a polygon directly, gives jagged edges based on the union of the pixels). When the user specifies their own polygonal window, a \code{while} loop is called and repeated as many times as necessary (up to \code{maxpass} times) to find \code{n} points inside \code{w} (when \code{w = NULL}, then the aforementioned union of the pixels of \code{z} is used, obtained via \code{as.polygonal(Window(z))}). The loop is necessary because the standard behaviour of \code{\link[spatstat.core]{rpoint}} can (and often does) yield points that sit in corners of pixels which lie outside the corresponding \code{w}. The \code{correction} argument is used to determine how many points are generated initially, which will be \code{ceiling(correction*n)}; to minimise the number of required passes over the loop this is by default set to give a number slightly higher than the requested \code{n}. An error is thrown if \code{Window(z)} and \code{w} do not overlap. } \examples{ data(pbc) Y <- bivariate.density(pbc,h0=2.5,res=25) # Direct use of 'rpoint': A <- rpoint(500,Y$z) npoints(A) # Using 'rimpoly' without supplying polygon: B <- rimpoly(500,Y$z) npoints(B) # Using 'rimpoly' with the original pbc polygonal window: C <- rimpoly(500,Y$z,Window(Y$pp)) npoints(C) par(mfrow=c(1,3)) plot(A,main="rpoint") plot(B,main="rimpoly (no polygon supplied)") plot(C,main="rimpoly (original polygon supplied)") } \references{ Baddeley, A., Rubak, E. and Turner, R. (2015) \emph{Spatial Point Patterns: Methodology and Applications with R}, Chapman and Hall/CRC Press, UK. } \author{ T.M. Davies } sparr/man/summarysparr.Rd0000644000176200001440000000157114012076542015215 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/summary.bivden.R, R/summary.msden.R, % R/summary.rrs.R, R/summary.rrst.R, R/summary.stden.R \name{summary.bivden} \alias{summary.bivden} \alias{summary.rrs} \alias{summary.msden} \alias{summary.stden} \alias{summary.rrst} \title{Summarising sparr objects} \usage{ \method{summary}{bivden}(object, ...) \method{summary}{msden}(object, ...) \method{summary}{rrs}(object, ...) \method{summary}{rrst}(object, ...) \method{summary}{stden}(object, ...) } \arguments{ \item{object}{An object of class \code{\link{bivden}}, \code{\link{stden}}, \code{\link{rrs}}, \code{\link{rrst}}, or \code{\link{msden}}.} \item{...}{Ignored.} } \description{ \code{summary} methods for classes \code{\link{bivden}}, \code{\link{stden}}, \code{\link{rrs}}, \code{\link{rrst}} and \code{\link{msden}}. } \author{ T.M. Davies } sparr/man/fmd.Rd0000644000176200001440000000375114012076542013220 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/fmd-data.R \docType{data} \name{fmd} \alias{fmd} \title{Veterinary foot-and-mouth disease outbreak data} \format{ \code{fmd} is a named list with two members: \describe{ \item{\code{$cases}}{ An object of class \code{\link[spatstat.geom]{ppp}} giving the spatial locations of the 410 infected farms within a polygonal study region representing the county of Cumbria. The \code{\link[spatstat.geom]{marks}} component of this object contain the integer day of infection (from beginning of study period). } \item{\code{$controls}}{ An object of class \code{\link[spatstat.geom]{ppp}} defined over the same spatial study region with the locations of the 1866 uninfected farms. } } } \description{ Data of the spatial locations and time of farms infected by veterinary foot-and-mouth disease in the county of Cumbria, UK, over a course of nearly 250 days between February and August in 2001. There are 410 infected farms (the cases), and 1866 uninfected farms (the controls). The data have been jittered and randomly thinned by an unspecified amount to preserve anonymity. } \section{Acknowledgements}{ The Animal and Plant Health Agency (APHA), UK, provided permission to use this dataset. } \examples{ data(fmd) summary(fmd$cases) summary(fmd$controls) par(mfrow=c(1,2)) plot(fmd$cases) plot(fmd$controls) } \references{ Fernando, W.T.P.S. and Hazelton, M.L. (2014), Generalizing the spatial relative risk function, \emph{Spatial and Spatio-temporal Epidemiology}, \bold{8}, 1-10. Keeling M, Woolhouse M, Shaw D, Matthews L, Chase-Topping M, Haydon D, et al. (2001), Dynamics of the 2001 UK foot and mouth epidemic: stochastic dispersal in a heterogeneous landscape, \emph{Science}, \bold{294}, 813-817. Lawson A, Zhou H. (2005), Spatial statistical modeling of disease outbreaks with particular reference to the UK foot and mouth disease (FMD) epidemic of 2001, \emph{Preventative Veterinary Medicine}, \bold{71}, 141-156. } \keyword{data} sparr/man/spattemp.risk.Rd0000644000176200001440000001535514012076542015261 0ustar liggesusers\name{spattemp.risk} \alias{spattemp.risk} \alias{rrst} %- Also NEED an '\alias' for EACH other topic documented here. \title{ Spatiotemporal relative risk/density ratio } \description{ Produces a spatiotemporal relative risk surface based on the ratio of two kernel estimates of spatiotemporal densities. } \usage{ spattemp.risk(f, g, log = TRUE, tolerate = FALSE, finiteness = TRUE, verbose = TRUE) } %- maybe also 'usage' for other objects documented here. \arguments{ \item{f}{ An object of class \code{\link{stden}} representing the `case' (numerator) density estimate. } \item{g}{ Either an object of class \code{\link{stden}}, or an object of class \code{\link{bivden}} for the `control' (denominator) density estimate. This object \bold{must} match the spatial (and temporal, if \code{\link{stden}}) domain of \code{f} completely; see `Details'. } \item{log}{ Logical value indicating whether to return the log relative risk (default) or the raw ratio. } \item{tolerate}{ Logical value indicating whether to compute and return asymptotic \eqn{p}-value surfaces for elevated risk; see `Details'. } \item{finiteness}{ Logical value indicating whether to internally correct infinite risk (on the log-scale) to the nearest finite value to avoid numerical problems. A small extra computational cost is required. } \item{verbose}{ Logical value indicating whether to print function progress during execution. } } \details{ Fernando & Hazelton (2014) generalise the spatial relative risk function (e.g. Kelsall & Diggle, 1995) to the spatiotemporal domain. This is the implementation of their work, yielding the generalised log-relative risk function for \eqn{x\in W\subset R^2} and \eqn{t\in T\subset R}. It produces \deqn{\hat{\rho}(x,t)=\log(\hat{f}(x,t))-\log(\hat{g}(x,t)),} where \eqn{\hat{f}(x,t)} is a fixed-bandwidth kernel estimate of the spatiotemporal density of the cases (argument \code{f}) and \eqn{\hat{g}(x,t)} is the same for the controls (argument \code{g}). \itemize{ \item When argument \code{g} is an object of class \code{\link{stden}} arising from a call to \code{\link{spattemp.density}}, the resolution, spatial domain, and temporal domain of this spatiotemporal estimate must match that of \code{f} exactly, else an error will be thrown. \item When argument \code{g} is an object of class \code{\link{bivden}} arising from a call to \code{\link{bivariate.density}}, it is assumed the `at-risk' control density is static over time. In this instance, the above equation for the relative risk becomes \eqn{\hat{\rho}=\log(\hat{f}(x,t))+\log|T|-\log(g(x))}. The spatial density estimate in \code{g} must match the spatial domain of \code{f} exactly, else an error will be thrown. \item The estimate \eqn{\hat{\rho}(x,t)} represents the joint or unconditional spatiotemporal relative risk over \eqn{W\times T}. This means that the raw relative risk \eqn{\hat{r}(x,t)=\exp{\hat{\rho}(x,t)}} integrates to 1 with respect to the control density over space and time: \eqn{\int_W \int_T r(x,t)g(x,t) dt dx = 1}. This function also computes the \bold{conditional} spatiotemporal relative risk at each time point, namely \deqn{\hat{\rho}(x|t)=\log{\hat{f}(x|t)}-\log{\hat{g}(x|t)},} where \eqn{\hat{f}(x|t)} and \eqn{\hat{g}(x|t)} are the conditional densities over space of the cases and controls given a specific time point \eqn{t} (see the documentation for \code{\link{spattemp.density}}). In terms of normalisation, we therefore have \eqn{\int_W r(x|t)g(x|t) dx = 1}. In the case where \eqn{\hat{g}} is static over time, one may simply replace \eqn{\hat{g}(x|t)} with \eqn{\hat{g}(x)} in the above. \item Based on the asymptotic properties of the estimator, Fernando & Hazelton (2014) also define the calculation of tolerance contours for detecting statistically significant fluctuations in such spatiotemporal log-relative risk surfaces. This function can produce the required \eqn{p}-value surfaces by setting \code{tolerate = TRUE}; and if so, results are returned for both the unconditional (x,t) and conditional (x|t) surfaces. See the examples in the documentation for \code{\link{plot.rrst}} for details on how one may superimpose contours at specific \eqn{p}-values for given evaluation times \eqn{t} on a plot of relative risk on the spatial margin. %Calculation of these \eqn{p}-values requires a common estimate of the space-time heterogeneity, which is performed internally by pooling the case and control data, and making use of the geometric means of the spatial- and temporal-margin bandwidths in \code{f} and \code{g} (typically, these are the same for both case and control densities anyway). } } \value{ An object of class ``\code{rrst}''. This is effectively a list with the following members: \item{rr}{ A named (by time-point) list of pixel \code{\link[spatstat.geom]{im}}ages corresponding to the joint spatiotemporal relative risk over space at each discretised time. } \item{rr.cond}{ A named list of pixel \code{\link[spatstat.geom]{im}}ages corresponding to the conditional spatial relative risk given each discretised time. } \item{P}{ A named list of pixel \code{\link[spatstat.geom]{im}}ages of the \eqn{p}-value surfaces testing for elevated risk for the joint estimate. If \code{tolerate = FALSE}, this will be \code{NULL}. } \item{P.cond}{ As above, for the conditional relative risk surfaces. } \item{f}{ A copy of the object \code{f} used in the initial call. } \item{g}{ As above, for \code{g}. } \item{tlim}{ A numeric vector of length two giving the temporal bound of the density estimate. } } \references{ Fernando, W.T.P.S. and Hazelton, M.L. (2014), Generalizing the spatial relative risk function, \emph{Spatial and Spatio-temporal Epidemiology}, \bold{8}, 1-10.\cr\cr } \author{ T.M. Davies } %\note{ %% ~~further notes~~ %} %% ~Make other sections like Warning with \section{Warning }{....} ~ \seealso{ \code{\link{spattemp.density}}, \code{\link{spattemp.slice}}, \code{\link{bivariate.density}} } \examples{ \donttest{ data(fmd) fmdcas <- fmd$cases fmdcon <- fmd$controls f <- spattemp.density(fmdcas,h=6,lambda=8) # stden object as time-varying case density g <- bivariate.density(fmdcon,h0=6) # bivden object as time-static control density rho <- spattemp.risk(f,g,tolerate=TRUE) print(rho) par(mfrow=c(2,3)) plot(rho$f$spatial.z,main="Spatial margin (cases)") # spatial margin of cases plot(rho$f$temporal.z,main="Temporal margin (cases)") # temporal margin of cases plot(rho$g$z,main="Spatial margin (controls)") # spatial margin of controls plot(rho,tselect=50,type="conditional",tol.args=list(levels=c(0.05,0.0001), lty=2:1,lwd=1:2),override.par=FALSE) plot(rho,tselect=100,type="conditional",tol.args=list(levels=c(0.05,0.0001), lty=2:1,lwd=1:2),override.par=FALSE) plot(rho,tselect=200,type="conditional",tol.args=list(levels=c(0.05,0.0001), lty=2:1,lwd=1:2),override.par=FALSE) } } sparr/man/bivariate.density.Rd0000644000176200001440000003022414012076542016071 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/bivariate.density.R \name{bivariate.density} \alias{bivariate.density} \alias{bivden} \title{Bivariate kernel density/intensity estimation} \usage{ bivariate.density( pp, h0, hp = NULL, adapt = FALSE, resolution = 128, gamma.scale = "geometric", edge = c("uniform", "diggle", "none"), weights = NULL, intensity = FALSE, trim = 5, xy = NULL, pilot.density = NULL, leaveoneout = FALSE, parallelise = NULL, davies.baddeley = NULL, verbose = TRUE ) } \arguments{ \item{pp}{An object of class \code{\link[spatstat.geom]{ppp}} giving the observed 2D data set to be smoothed.} \item{h0}{Global bandwidth for adaptive smoothing or fixed bandwidth for constant smoothing. A numeric value > 0.} \item{hp}{Pilot bandwidth (scalar, numeric > 0) to be used for fixed bandwidth estimation of a pilot density in the case of adaptive smoothing. If \code{NULL} (default), it will take on the value of \code{h0}. Ignored when \code{adapt = FALSE} or if \code{pilot.density} is supplied as a pre-defined pixel image.} \item{adapt}{Logical value indicating whether to perform adaptive kernel estimation. See `Details'.} \item{resolution}{Numeric value > 0. Resolution of evaluation grid; the density/intensity will be returned on a [\code{resolution} \eqn{\times}{x} \code{resolution}] grid.} \item{gamma.scale}{Scalar, numeric value > 0; controls rescaling of the variable bandwidths. Defaults to the geometric mean of the bandwidth factors given the pilot density (as per Silverman, 1986). See `Details'.} \item{edge}{Character string giving the type of edge correction to employ. \code{"uniform"} (default) corrects based on evaluation grid coordinate and \code{"diggle"} reweights each observation-specific kernel. Setting \code{edge = "none"} requests no edge correction. Further details can be found in the documentation for \code{\link[spatstat.core]{density.ppp}}.} \item{weights}{Optional numeric vector of nonnegative weights corresponding to each observation in \code{pp}. Must have length equal to \code{npoints(pp)}.} \item{intensity}{Logical value indicating whether to return an intensity estimate (integrates to the sample size over the study region), or a density estimate (default, integrates to 1).} \item{trim}{Numeric value > 0; controls bandwidth truncation for adaptive estimation. See `Details'.} \item{xy}{Optional alternative specification of the evaluation grid; matches the argument of the same tag in \code{\link[spatstat.geom]{as.mask}}. If supplied, \code{resolution} is ignored.} \item{pilot.density}{An optional pixel image (class \code{\link[spatstat.geom]{im}}) giving the pilot density to be used for calculation of the variable bandwidths in adaptive estimation, \bold{or} a \code{\link[spatstat.geom:ppp]{ppp.object}} giving the data upon which to base a fixed-bandwidth pilot estimate using \code{hp}. If used, the pixel image \emph{must} be defined over the same domain as the data given \code{resolution} or the supplied pre-set \code{xy} evaluation grid; \bold{or} the planar point pattern data must be defined with respect to the same polygonal study region as in \code{pp}.} \item{leaveoneout}{Logical value indicating whether to compute and return the value of the density/intensity at each data point for an adaptive estimate. See `Details'.} \item{parallelise}{Numeric argument to invoke parallel processing, giving the number of CPU cores to use when \code{leaveoneout = TRUE}. Experimental. Test your system first using \code{parallel::detectCores()} to identify the number of cores available to you.} \item{davies.baddeley}{An optional numeric vector of length 3 to control bandwidth partitioning for approximate adaptive estimation, giving the quantile step values for the variable bandwidths for density/intensity and edge correction surfaces and the resolution of the edge correction surface. May also be provided as a single numeric value. See `Details'.} \item{verbose}{Logical value indicating whether to print a function progress bar to the console when \code{adapt = TRUE}.} } \value{ If \code{leaveoneout = FALSE}, an object of class \code{"bivden"}. This is effectively a list with the following components: \item{z}{The resulting density/intensity estimate, a pixel image object of class \code{\link[spatstat.geom]{im}}.} \item{h0}{A copy of the value of \code{h0} used.} \item{hp}{A copy of the value of \code{hp} used.} \item{h}{A numeric vector of length equal to the number of data points, giving the bandwidth used for the corresponding observation in \code{pp}.} \item{him}{A pixel image (class \code{\link[spatstat.geom]{im}}), giving the `hypothetical' Abramson bandwidth at each pixel coordinate conditional upon the observed data. \code{NULL} for fixed-bandwidth estimates.} \item{q}{Edge-correction weights; a pixel \code{\link[spatstat.geom]{im}}age if \code{edge = "uniform"}, a numeric vector if \code{edge = "diggle"}, and \code{NULL} if \code{edge = "none"}.} \item{gamma}{The value of \eqn{\gamma} used in scaling the bandwidths. \code{NA} if a fixed bandwidth estimate is computed.} \item{geometric}{The geometric mean \eqn{G} of the untrimmed bandwidth factors \eqn{\tilde{f}(x_i)^{-1/2}}. \code{NA} if a fixed bandwidth estimate is computed.} \item{pp}{A copy of the \code{\link[spatstat.geom:ppp]{ppp.object}} initially passed to the \code{pp} argument, containing the data that were smoothed.} Else, if \code{leaveoneout = TRUE}, simply a numeric vector of length equal to the number of data points, giving the leave-one-out value of the function at the corresponding coordinate. } \description{ Provides an isotropic adaptive or fixed bandwidth kernel density/intensity estimate of bivariate/planar/2D data. } \details{ Given a data set \eqn{x_1,\dots,x_n} in 2D, the isotropic kernel estimate of its probability density function, \eqn{\hat{f}(x)}{\hat{f}(x)}, is given by \deqn{\hat{f}(y)=n^{-1}\sum_{i=1}^{n}h(x_i)^{-2}K((y-x_i)/h(x_i)) } where \eqn{h(x)}{h(x)} is the bandwidth function, and \eqn{K(.)} is the bivariate standard normal smoothing kernel. Edge-correction factors (not shown above) are also implemented. \describe{ \item{\bold{Fixed}}{ The classic fixed bandwidth kernel estimator is used when \code{adapt = FALSE}. This amounts to setting \eqn{h(u)=}\code{h0} for all \eqn{u}. Further details can be found in the documentation for \code{\link[spatstat.core]{density.ppp}}.} \item{\bold{Adaptive}}{Setting \code{adapt = TRUE} requests computation of Abramson's (1982) variable-bandwidth estimator. Under this framework, we have \eqn{h(u)=}\code{h0}*min[\eqn{\tilde{f}(u)^{-1/2}},\eqn{G*}\code{trim}]/\eqn{\gamma}, where \eqn{\tilde{f}(u)} is a fixed-bandwidth kernel density estimate computed using the pilot bandwidth \code{hp}. \itemize{ \item Global smoothing of the variable bandwidths is controlled with the global bandwidth \code{h0}. \item In the above statement, \eqn{G} is the geometric mean of the ``bandwidth factors'' \eqn{\tilde{f}(x_i)^{-1/2}}; \eqn{i=1,\dots,n}. By default, the variable bandwidths are rescaled by \eqn{\gamma=G}, which is set with \code{gamma.scale = "geometric"}. This allows \code{h0} to be considered on the same scale as the smoothing parameter in a fixed-bandwidth estimate i.e. on the scale of the recorded data. You can use any other rescaling of \code{h0} by setting \code{gamma.scale} to be any scalar positive numeric value; though note this only affects \eqn{\gamma} -- see the next bullet. When using a scale-invariant \code{h0}, set \code{gamma.scale = 1}. \item The variable bandwidths must be trimmed to prevent excessive values (Hall and Marron, 1988). This is achieved through \code{trim}, as can be seen in the equation for \eqn{h(u)} above. The trimming of the variable bandwidths is universally enforced by the geometric mean of the bandwidth factors \eqn{G} independent of the choice of \eqn{\gamma}. By default, the function truncates bandwidth factors at five times their geometric mean. For stricter trimming, reduce \code{trim}, for no trimming, set \code{trim = Inf}. \item For even moderately sized data sets and evaluation grid \code{resolution}, adaptive kernel estimation can be rather computationally expensive. The argument \code{davies.baddeley} is used to approximate an adaptive kernel estimate by a sum of fixed bandwidth estimates operating on appropriate subsets of \code{pp}. These subsets are defined by ``bandwidth bins'', which themselves are delineated by a quantile step value \eqn{0<\delta<1}. E.g. setting \eqn{\delta=0.05} will create 20 bandwidth bins based on the 0.05th quantiles of the Abramson variable bandwidths. Adaptive edge-correction also utilises the partitioning, with pixel-wise bandwidth bins defined using the value \eqn{0<\beta<1}, and the option to decrease the resolution of the edge-correction surface for computation to a [\eqn{L} \eqn{\times}{x} \eqn{L}] grid, where \eqn{0 = 2)\cr } Kernel smoothing, and the flexibility afforded by this methodology, provides an attractive approach to estimating complex probability density functions. The \emph{spatial relative risk function}, constructed as a ratio of estimated case to control densities (Bithell, 1990; 1991; Kelsall and Diggle, 1995a,b), describes the variation in the `risk' of the disease, given the underlying at-risk population. This is a technique that has been applied successfully for mainly exploratory purposes in a number of different analyses (see for example Sabel et al., 2000; Prince et al., 2001; Wheeler, 2007). It has also grown in popularity in very different fields that pose similarly styled research questions, such as ecology (e.g. Campos and Fedigan, 2014); physiology (Davies et al., 2013); and archaeology (e.g. Bevan, 2012; Smith et al. 2015). This package provides functions for spatial (i.e. bivariate/planar/2D) kernel density estimation (KDE), implementing both fixed and `variable' or `adaptive' (Abramson, 1982) smoothing parameter options. A selection of bandwidth calculators for bivariate KDE and the relative risk function are provided, including one based on the maximal smoothing principle (Terrell, 1990), and others involving a leave-one-out cross-validation (see below). In addition, the ability to construct both Monte-Carlo and asymptotic \emph{p}-value surfaces (`tolerance' contours of which signal statistically significant sub-regions of extremity in a risk surface - Hazelton and Davies, 2009; Davies and Hazelton, 2010) as well as some visualisation tools are provided. Spatiotemporal estimation is also supported, largely following developments in Fernando and Hazelton (2014). This includes their fixed-bandwith kernel estimator of spatiotemporal densities, relative risk, and asymptotic tolerance contours. Key content of \code{sparr} can be broken up as follows:\cr \bold{DATASETS/DATA GENERATION} \code{\link{pbc}} a case/control planar point pattern (\code{\link[spatstat.geom:ppp]{ppp.object}}) concerning liver disease in northern England. \code{\link{fmd}} an anonymised (jittered) case/control spatiotemporal point pattern of the 2001 outbreak of veterinary foot-and-mouth disease in Cumbria (courtesy of the Animal and Plant Health Agency, UK). \code{\link{burk}} a spatiotemporal point pattern of Burkitt's lymphoma in Uganda; artificially simulated control data are also provided for experimentation. Also available are a number of relevant additional spatial datasets built-in to the \code{\link[spatstat]{spatstat}} package (Baddeley and Turner, 2005; Baddeley et al., 2015) through \code{spatstat.data}, such as \code{\link[spatstat.data]{chorley}}, which concerns the distribution of laryngeal cancer in an area of Lancashire, UK. \code{\link{rimpoly}} a wrapper function of \code{\link[spatstat.core]{rpoint}} to allow generated spatial point patterns based on a pixel \code{\link[spatstat.geom]{im}}age to be returned with a polygonal \code{\link[spatstat.geom]{owin}}.\cr \bold{SPATIAL} \emph{Bandwidth calculators} \code{\link{OS}} estimation of an isotropic smoothing parameter for fixed-bandwidth bivariate KDE, based on the oversmoothing principle introduced by Terrell (1990). \code{\link{NS}} estimation of an isotropic smoothing parameter for fixed-bandwidth bivariate KDE, based on the asymptotically optimal value for a normal density (bivariate normal scale rule - see e.g. Wand and Jones, 1995). \code{\link{LSCV.density}} a least-squares cross-validated (LSCV) estimate of an isotropic fixed bandwidth for bivariate, edge-corrected KDE (see e.g. Bowman and Azzalini, 1997). \code{\link{LIK.density}} a likelihood cross-validated (LIK) estimate of an isotropic fixed bandwidth for bivariate, edge-corrected KDE (see e.g. Silverman, 1986). \code{\link{SLIK.adapt}} an experimental likelihood cross-validation function for simultaneous global/pilot bandwidth selection for adaptive density estimates. \code{\link{BOOT.density}} a bootstrap approach to optimisation of an isotropic fixed bandwidth for bivariate, edge-corrected KDE (see e.g. Taylor, 1989). \code{\link{LSCV.risk}} Estimation of a jointly optimal, common isotropic case-control fixed bandwidth for the kernel-smoothed risk function based on the mean integrated squared error (MISE), a weighted MISE, or the asymptotic MISE (see respectively Kelsall and Diggle, 1995a; Hazelton, 2008; Davies, 2013). \emph{Density and relative risk estimation} \code{\link{bivariate.density}} kernel density estimate of bivariate data; fixed or adaptive smoothing. \code{\link{multiscale.density}} multi-scale adaptive kernel density estimates for multiple global bandwidths as per Davies and Baddeley (2018). \code{\link{multiscale.slice}} a single adaptive kernel estimate based on taking a slice from a multi-scale estimate. \code{\link{risk}} estimation of a (log) spatial relative risk function, either from data or pre-existing bivariate density estimates; fixed (Kelsall and Diggle, 1995a) or both asymmetric (Davies and Hazelton, 2010) and symmetric (Davies et al., 2016) adaptive estimates are possible. \code{\link{tolerance}} calculation of asymptotic or Monte-Carlo \emph{p}-value surfaces. \emph{Visualisation} \code{S3} methods of the \code{plot} function; see \code{\link{plot.bivden}} for visualising a single bivariate density estimate from \code{\link{bivariate.density}}, \code{\link{plot.rrs}} for visualisation of a spatial relative risk function from \code{\link{risk}}, or \code{\link{plot.msden}} for viewing animations of multi-scale density estimates from \code{\link{multiscale.density}}. \code{\link{tol.contour}} provides more flexibility for plotting and superimposing tolerance contours upon an existing plot of spatial relative risk (i.e. given output from \code{\link{tolerance}}). \emph{Printing and summarising} \code{S3} methods (\code{\link{print.bivden}}, \code{\link{print.rrs}}, \code{\link{print.msden}}, \code{\link{summary.bivden}}, \code{\link{summary.rrs}}, and \code{\link{summary.msden}}) are available for the bivariate density, spatial relative risk, and multi-scale adaptive density objects. \bold{SPATIOTEMPORAL} \emph{Bandwidth calculators} \code{\link{OS.spattemp}} estimation of an isotropic smoothing parameter for the spatial margin and another for the temporal margin for spatiotemporal densities, based on the 2D and 1D versions, respectively, of the oversmoothing principle introduced by Terrell (1990). \code{\link{NS.spattemp}} as above, based on the 2D and 1D versions of the normal scale rule (Silverman, 1986). \code{\link{LSCV.spattemp}} least-squares cross-validated (LSCV) estimates of scalar spatial and temporal bandwidths for edge-corrected spatiotemporal KDE. \code{\link{LIK.spattemp}} as above, based on likelihood cross-validation. \code{\link{BOOT.spattemp}} bootstrap bandwidth selection for the spatial and temporal margins; for spatiotemporal, edge-corrected KDE (Taylor, 1989). \emph{Density and relative risk estimation} \code{\link{spattemp.density}} fixed-bandwidth kernel density estimate of spatiotemporal data. \code{\link{spattemp.risk}} fixed-bandwidth kernel density estimate of spatiotemporal relative risk, either with a time-static or time-varying control density (Fernando and Hazelton, 2014). \code{\link{spattemp.slice}} extraction function of the spatial density/relative risk at prespecified time(s). \emph{Visualisation} \code{S3} methods of the \code{plot} function; see \code{\link{plot.stden}} for various options (including animation) for visualisation of a spatiotemporal density, and \code{\link{plot.rrst}} for viewing spatiotemporal relative risk surfaces (including animation and tolerance contour superimposition). \emph{Printing and summarising objects} \code{S3} methods (\code{\link{print.stden}}, \code{\link{print.rrst}}, \code{\link{summary.stden}}, and \code{\link{summary.rrst}}) are available for the spatiotemporal density and spatiotemporal relative risk objects respectively. } \section{Dependencies}{ The \code{sparr} package depends upon \code{\link[spatstat]{spatstat}}. In particular, the user should familiarise themselves with \code{\link[spatstat.geom]{ppp}} objects and \code{\link[spatstat.geom]{im}} objects, which are used throughout. For spatiotemporal density estimation, \code{sparr} is assisted by importing from the \code{misc3d} package, and for the experimental capabilities involving parallel processing, \code{sparr} also currently imports \code{\link[doParallel]{doParallel}}, \code{\link[parallel]{parallel}}, and \code{\link[foreach]{foreach}}. } \section{Citation}{ To cite use of current versions of \code{sparr} in publications or research projects please use:\cr Davies, T.M., Marshall, J.C. and Hazelton, M.L. (2018) Tutorial on kernel estimation of continuous spatial and spatiotemporal relative risk, \emph{Statistics in Medicine}, \bold{37}(7), 1191-1221. Old versions of \code{sparr} (<= 2.1-09) can be referenced by Davies et al. (2011) (see reference list). } \references{ Abramson, I. (1982), On bandwidth variation in kernel estimates --- a square root law, \emph{Annals of Statistics}, \bold{10}(4), 1217-1223. Baddeley, A. and Turner, R. (2005), spatstat: an R package for analyzing spatial point patterns, \emph{Journal of Statistical Software}, \bold{12}(6), 1-42. Baddeley, A., Rubak, E. and Turner, R. (2015) \emph{Spatial Point Patterns: Methodology and Applications with R}, Chapman and Hall/CRC Press, UK. Bevan A. (2012), Spatial methods for analysing large-scale artefact inventories. \emph{Antiquity}, \bold{86}, 492-506. Bithell, J.F. (1990), An application of density estimation to geographical epidemiology, \emph{Statistics in Medicine}, \bold{9}, 691-701. Bithell, J.F. (1991), Estimation of relative risk function, \emph{Statistics in Medicine}, \bold{10}, 1745-1751. Bowman, A.W. and Azzalini, A. (1997), \emph{Applied Smoothing Techniques for Data Analysis: The Kernel Approach with S-Plus Illustrations.} Oxford University Press Inc., New York. ISBN 0-19-852396-3. Campos, F.A. and Fedigan, L.M. (2014) Spatial ecology of perceived predation risk and vigilance behavior in white-faced capuchins, \emph{Behavioral Ecology}, \bold{25}, 477-486. Davies, T.M. (2013), Jointly optimal bandwidth selection for the planar kernel-smoothed density-ratio, \emph{Spatial and Spatio-temporal Epidemiology}, \bold{5}, 51-65. Davies, T.M. and Baddeley A. (2018), Fast computation of spatially adaptive kernel estimates, \emph{Statistics and Computing}, \bold{28}(4), 937-956. Davies, T.M., Cornwall, J. and Sheard, P.W. (2013) Modelling dichotomously marked muscle fibre configurations, \emph{Statistics in Medicine}, \bold{32}, 4240-4258. Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel estimation of spatial relative risk, \emph{Statistics in Medicine}, \bold{29}(23) 2423-2437. Davies, T.M., Hazelton, M.L. and Marshall, J.C. (2011), \code{sparr}: Analyzing spatial relative risk using fixed and adaptive kernel density estimation in \code{R}, \emph{Journal of Statistical Software} \bold{39}(1), 1-14. Davies, T.M., Jones, K. and Hazelton, M.L. (2016), Symmetric adaptive smoothing regimens for estimation of the spatial relative risk function, \emph{Computational Statistics & Data Analysis}, \bold{101}, 12-28. Fernando, W.T.P.S. and Hazelton, M.L. (2014), Generalizing the spatial relative risk function, \emph{Spatial and Spatio-temporal Epidemiology}, \bold{8}, 1-10. Hazelton, M.L. (2008), Letter to the editor: Kernel estimation of risk surfaces without the need for edge correction, \emph{Statistics in Medicine}, \bold{27}, 2269-2272. Hazelton, M.L. and Davies, T.M. (2009), Inference based on kernel estimates of the relative risk function in geographical epidemiology, \emph{Biometrical Journal}, \bold{51}(1), 98-109. Kelsall, J.E. and Diggle, P.J. (1995a), Kernel estimation of relative risk, \emph{Bernoulli}, \bold{1}, 3-16. Kelsall, J.E. and Diggle, P.J. (1995b), Non-parametric estimation of spatial variation in relative risk, \emph{Statistics in Medicine}, \bold{14}, 2335-2342. Prince, M. I., Chetwynd, A., Diggle, P. J., Jarner, M., Metcalf, J. V. and James, O. F. W. (2001), The geographical distribution of primary biliary cirrhosis in a well-defined cohort, \emph{Hepatology} \bold{34}, 1083-1088. Sabel, C. E., Gatrell, A. C., Loytonen, M., Maasilta, P. and Jokelainen, M. (2000), Modelling exposure opportunitites: estimating relative risk for motor disease in Finland, \emph{Social Science & Medicine} \bold{50}, 1121-1137. Smith, B.A., Davies, T.M. and Higham, C.F.W. (2015) Spatial and social variables in the Bronze Age phase 4 cemetery of Ban Non Wat, Northeast Thailand, \emph{Journal of Archaeological Science: Reports}, \bold{4}, 362-370. Taylor, C.C. (1989) Bootstrap choice of the smoothing parameter in kernel density estimation, \emph{Biometrika}, \bold{76}, 705-712. Terrell, G.R. (1990), The maximal smoothing principle in density estimation, \emph{Journal of the American Statistical Association}, \bold{85}, 470-477. Venables, W. N. and Ripley, B. D. (2002). \emph{Modern Applied Statistics with S}, Fourth Edition, Springer, New York. Wand, M.P. and Jones, C.M., 1995. \emph{Kernel Smoothing}, Chapman & Hall, London. Wheeler, D. C. (2007), A comparison of spatial clustering and cluster detection techniques for childhood leukemia incidence in Ohio, 1996-2003, \emph{International Journal of Health Geographics}, \bold{6}(13). } \author{ T.M. Davies\cr \emph{Dept. of Mathematics & Statistics, University of Otago, Dunedin, New Zealand.}\cr J.C. Marshall\cr \emph{Institute of Fundamantal Sciences, Massey University, Palmerston North, New Zealand.}\cr Maintainer: T.M.D. \email{tdavies@maths.otago.ac.nz} } \keyword{package} sparr/man/OS.Rd0000644000176200001440000001170514012076542012771 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/OS.R, R/OS.spattemp.R \name{OS} \alias{OS} \alias{OS.spattemp} \title{Oversmoothing (OS) bandwidth selector} \usage{ OS( pp, nstar = c("npoints", "geometric"), scaler = c("silverman", "IQR", "sd", "var") ) OS.spattemp( pp, tt = NULL, nstar = "npoints", scaler = c("silverman", "IQR", "sd", "var") ) } \arguments{ \item{pp}{An object of class \code{\link[spatstat.geom]{ppp}} giving the observed 2D data to be smoothed.} \item{nstar}{Optional. Controls the value to use in place of the number of observations \emph{n} in the oversmoothing formula. Either a character string, \code{"npoints"} (default) or \code{"geometric"} (only possible for \code{OS}), or a positive numeric value. See `Details'.} \item{scaler}{Optional. Controls the value for a scalar representation of the spatial (and temporal for \code{OS.spattemp}) scale of the data. Either a character string, \code{"silverman"} (default), \code{"IQR"}, \code{"sd"}, or \code{"var"}; or positive numeric value(s). See `Details'.} \item{tt}{A numeric vector of equal length to the number of points in \code{pp}, giving the time corresponding to each spatial observation. If unsupplied, the function attempts to use the values in the \code{\link[spatstat.geom]{marks}} attribute of the \code{\link[spatstat.geom:ppp]{ppp.object}} in \code{pp}.} } \value{ A single numeric value of the estimated spatial bandwidth for \code{OS}, or a named numeric vector of length 2 giving the spatial bandwidth (as \code{h}) and the temporal bandwidth (as \code{lambda}) for \code{OS.spattemp}. } \description{ Provides fixed bandwidths for spatial or spatiotemporal data based on the maximal smoothing (oversmoothing) principle of Terrell (1990). } \details{ These functions calculate scalar smoothing bandwidths for kernel density estimates of spatial or spatiotemporal data: the ``maximal amount of smoothing compatible with the estimated scale of the observed data''. See Terrell (1990). The \code{OS} function returns a single bandwidth for isotropic smoothing of spatial (2D) data. The \code{OS.spattemp} function returns two values -- one for the spatial margin and another for the temporal margin, based on independently applying Terrell's (1990) rule (in 2D and 1D) to the spatial and temporal margins of the supplied data. \describe{ \item{\bold{Effective sample size}}{ The formula requires a sample size, and this can be minimally tailored via \code{nstar}. By default, the function simply uses the number of observations in \code{pp}: \code{nstar = "npoints"}. Alternatively, the user can specify their own value by simply supplying a single positive numeric value to \code{nstar}. For \code{OS} (not applicable to \code{OS.spattemp}), if \code{pp} is a \code{\link[spatstat.geom:ppp]{ppp.object}} with factor-valued \code{\link[spatstat.geom]{marks}}, then the user has the option of using \code{nstar = "geometric"}, which sets the sample size used in the formula to the geometric mean of the counts of observations of each mark. This can be useful for e.g. relative risk calculations, see Davies and Hazelton (2010). } \item{\bold{Spatial (and temporal) scale}}{The \code{scaler} argument is used to specify spatial (as well as temporal, in use of \code{OS.spattemp}) scale. For isotropic smoothing in the spatial margin, one may use the `robust' estimate of standard deviation found by a weighted mean of the interquartile ranges of the \eqn{x}- and \eqn{y}-coordinates of the data respectively (\code{scaler = "IQR"}). Two other options are the raw mean of the coordinate-wise standard deviations (\code{scaler = "sd"}), or the square root of the mean of the two variances (\code{scaler = "var"}). A fourth option, \code{scaler = "silverman"} (default), sets the scaling constant to be the minimum of the \code{"IQR"} and \code{"sd"} options; see Silverman (1986), p. 47. In use of \code{OS.spattemp} the univariate version of the elected scale statistic is applied to the recorded times of the data for the temporal bandwidth. Alternatively, like \code{nstar}, the user can specify their own value by simply supplying a single positive numeric value to \code{scaler} for \code{OS}, or a numeric vector of length 2 (in the order of \emph{[, ]}) for \code{OS.spattemp}. } } } \examples{ data(pbc) OS(pbc) OS(pbc,nstar="geometric") # uses case-control marks to replace sample size OS(pbc,scaler="var") # set different scalar measure of spread data(burk) OS.spattemp(burk$cases) OS.spattemp(burk$cases,scaler="sd") } \references{ Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel estimation of spatial relative risk, \emph{Statistics in Medicine}, \bold{29}(23) 2423-2437. Terrell, G.R. (1990), The maximal smoothing principle in density estimation, \emph{Journal of the American Statistical Association}, \bold{85}, 470-477. } \author{ T.M. Davies } sparr/man/printsparr.Rd0000644000176200001440000000146614012076542014657 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/print.bivden.R, R/print.msden.R, R/print.rrs.R, % R/print.rrst.R, R/print.stden.R \name{print.bivden} \alias{print.bivden} \alias{print.rrs} \alias{print.msden} \alias{print.stden} \alias{print.rrst} \title{Printing sparr objects} \usage{ \method{print}{bivden}(x, ...) \method{print}{msden}(x, ...) \method{print}{rrs}(x, ...) \method{print}{rrst}(x, ...) \method{print}{stden}(x, ...) } \arguments{ \item{x}{An object of class \code{\link{bivden}}, \code{\link{stden}}, \code{\link{rrs}}, \code{\link{rrst}}, or \code{\link{msden}}.} \item{...}{Ignored.} } \description{ \code{print} methods for classes \code{\link{bivden}}, \code{\link{stden}}, \code{\link{rrs}}, \code{\link{rrst}} and \code{\link{msden}}. } \author{ T.M. Davies } sparr/man/plotsparr.Rd0000644000176200001440000001514614012076542014501 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot.bivden.R, R/plot.msden.R, R/plot.rrs.R, % R/plot.rrst.R, R/plot.stden.R \name{plot.bivden} \alias{plot.bivden} \alias{plot.rrs} \alias{plot.msden} \alias{plot.stden} \alias{plot.rrst} \title{Plotting sparr objects} \usage{ \method{plot}{bivden}( x, what = c("z", "edge", "bw"), add.pts = FALSE, auto.axes = TRUE, override.par = TRUE, ... ) \method{plot}{msden}(x, what = c("z", "edge", "bw"), sleep = 0.2, override.par = TRUE, ...) \method{plot}{rrs}( x, auto.axes = TRUE, tol.show = TRUE, tol.type = c("upper", "lower", "two.sided"), tol.args = list(levels = 0.05, lty = 1, drawlabels = TRUE), ... ) \method{plot}{rrst}( x, tselect = NULL, type = c("joint", "conditional"), fix.range = FALSE, tol.show = TRUE, tol.type = c("upper", "lower", "two.sided"), tol.args = list(levels = 0.05, lty = 1, drawlabels = TRUE), sleep = 0.2, override.par = TRUE, expscale = FALSE, ... ) \method{plot}{stden}( x, tselect = NULL, type = c("joint", "conditional"), fix.range = FALSE, sleep = 0.2, override.par = TRUE, ... ) } \arguments{ \item{x}{An object of class \code{\link{bivden}}, \code{\link{stden}}, \code{\link{rrs}}, \code{\link{rrst}}, or \code{\link{msden}}.} \item{what}{A character string to select plotting of result (\code{"z"}; default); edge-correction surface (\code{"edge"}); or variable bandwidth surface (\code{"bw"}).} \item{add.pts}{Logical value indicating whether to add the observations to the image plot using default \code{\link{points}}.} \item{auto.axes}{Logical value indicating whether to display the plot with automatically added x-y axes and an `L' box in default styles.} \item{override.par}{Logical value indicating whether to override the existing graphics device parameters prior to plotting, resetting \code{mfrow} and \code{mar}. See `Details' for when you might want to disable this.} \item{...}{Additional graphical parameters to be passed to \code{\link[spatstat.geom]{plot.im}}, or in one instance, to \code{\link[spatstat.geom]{plot.ppp}} (see `Details').} \item{sleep}{Single positive numeric value giving the amount of time (in seconds) to \code{\link[base]{Sys.sleep}} before drawing the next image in the animation.} \item{tol.show}{Logical value indicating whether to show pre-computed tolerance contours on the plot(s). The object \code{x} must already have the relevant \emph{p}-value surface(s) stored in order for this argument to have any effect.} \item{tol.type}{A character string used to control the type of tolerance contour displayed; a test for elevated risk (\code{"upper"}), decreased risk (\code{"lower"}), or a two-tailed test (\code{two.sided}).} \item{tol.args}{A named list of valid arguments to be passed directly to \code{\link[graphics]{contour}} to control the appearance of plotted contours. Commonly used items are \code{levels}, \code{lty}, \code{lwd} and \code{drawlabels}.} \item{tselect}{Either a single numeric value giving the time at which to return the plot, or a vector of length 2 giving an interval of times over which to plot. This argument must respect the stored temporal bound in \code{x$tlim}, else an error will be thrown. By default, the full set of images (i.e. over the entire available time span) is plotted.} \item{type}{A character string to select plotting of joint/unconditional spatiotemporal estimate (default) or conditional spatial density given time.} \item{fix.range}{Logical value indicating whether use the same color scale limits for each plot in the sequence. Ignored if the user supplies a pre-defined \code{\link[spatstat.geom]{colourmap}} to the \code{col} argument, which is matched to \code{...} above and passed to \code{\link[spatstat.geom]{plot.im}}. See `Examples'.} \item{expscale}{Logical value indicating whether to force a raw-risk scale. Useful for users wishing to plot a log-relative risk surface, but to have the raw-risk displayed on the colour ribbon.} } \value{ Plots to the relevant graphics device. } \description{ \code{plot} methods for classes \code{\link{bivden}}, \code{\link{stden}}, \code{\link{rrs}}, \code{\link{rrst}} and \code{\link{msden}}. } \details{ In all instances, visualisation is deferred to \code{\link[spatstat.geom]{plot.im}}, for which there are a variety of customisations available the user can access via \code{...}. The one exception is when plotting observation-specific \code{"diggle"} edge correction factors---in this instance, a plot of the spatial observations is returned with size proportional to the influence of each correction weight. When plotting a \code{\link{rrs}} object, a pre-computed \emph{p}-value surface (see argument \code{tolerate} in \code{\link{risk}}) will automatically be superimposed at a significance level of 0.05. Greater flexibility in visualisation is gained by using \code{\link{tolerance}} in conjunction with \code{\link{contour}}. An \code{\link{msden}}, \code{\link{stden}}, or \code{\link{rrst}} object is plotted as an animation, one pixel image after another, separated by \code{sleep} seconds. If instead you intend the individual images to be plotted in an array of images, you should first set up your plot device layout, and ensure \code{override.par = FALSE} so that the function does not reset these device parameters itself. In such an instance, one might also want to set \code{sleep = 0}. } \examples{ \donttest{ data(pbc) data(fmd) data(burk) # 'bivden' object pbcden <- bivariate.density(split(pbc)$case,h0=3,hp=2,adapt=TRUE,davies.baddeley=0.05,verbose=FALSE) plot(pbcden) plot(pbcden,what="bw",main="PBC cases\n variable bandwidth surface",xlab="Easting",ylab="Northing") # 'stden' object burkden <- spattemp.density(burk$cases,tres=128) # observation times are stored in marks(burk$cases) plot(burkden,fix.range=TRUE,sleep=0.1) # animation plot(burkden,tselect=c(1000,3000),type="conditional") # spatial densities conditional on each time # 'rrs' object pbcrr <- risk(pbc,h0=4,hp=3,adapt=TRUE,tolerate=TRUE,davies.baddeley=0.025,edge="diggle") plot(pbcrr) # default plot(pbcrr,tol.args=list(levels=c(0.05,0.01),lty=2:1,col="seagreen4"),auto.axes=FALSE) # 'rrst' object f <- spattemp.density(fmd$cases,h=6,lambda=8) g <- bivariate.density(fmd$controls,h0=6) fmdrr <- spattemp.risk(f,g,tolerate=TRUE) plot(fmdrr,sleep=0.1,fix.range=TRUE) plot(fmdrr,type="conditional",sleep=0.1,tol.type="two.sided", tol.args=list(levels=0.05,drawlabels=FALSE)) # 'msden' object pbcmult <- multiscale.density(split(pbc)$case,h0=4,h0fac=c(0.25,2.5)) plot(pbcmult) # densities plot(pbcmult,what="edge") # edge correction surfaces plot(pbcmult,what="bw") # bandwidth surfaces } } \author{ T.M. Davies } sparr/man/fft2d.Rd0000644000176200001440000000250614012076542013454 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/fft.R \name{fft2d} \alias{fft2d} \title{2D fast-Fourier wrapper around 'fftwtools' or 'stats' package} \usage{ fft2d(x, inverse = FALSE, fftw = sparr:::fftw_available()) } \arguments{ \item{x}{A numeric matrix to be transformed.} \item{inverse}{Whether it should compute the inverse transform (defaults to \code{FALSE}).} \item{fftw}{Whether the \code{fftwtools} R package is available.} } \value{ The fast-Fourier (inverse) transform. A complex-valued matrix of the same size as \code{x}. } \description{ Utilises the Fastest Fourier Transform in the West (FFTW) via the 'fftwtools' package if available, else reverts to built-in functionality } \details{ This function is called wherever \code{sparr} seeks to perform a 2D fast-Fourier transform. Where available, computational expense is noticeably reduced by appealing to routines in the independent `FFTW' toolbox. The user is encouraged to install the corresponding R package \code{fftwtools} from CRAN; this function will automatically detect and use the faster option, otherwise will defer to the built-in \code{\link[stats]{fft}}. } \examples{ \donttest{ # System check sparr:::fftw_available() system.time(fft(matrix(1:2000^2,2000))) system.time(fft2d(matrix(1:2000^2,2000))) } } \author{ J.C. Marshall } sparr/man/BOOT.spattemp.Rd0000644000176200001440000001336414012076542015052 0ustar liggesusers\name{BOOT.spattemp} \alias{BOOT.spattemp} \title{ Bootstrap bandwidths for a spatiotemporal kernel density estimate } \description{ Bandwidth selection for standalone spatiotemporal density/intensity based on bootstrap estimation of the MISE, providing an isotropic scalar spatial bandwidth and a scalar temporal bandwidth. } \usage{ BOOT.spattemp(pp, tt = NULL, tlim = NULL, eta = NULL, nu = NULL, sedge = c("uniform", "none"), tedge = sedge, ref.density = NULL, sres = 64, tres = sres, start = NULL, verbose = TRUE) } \arguments{ \item{pp}{ An object of class \code{\link[spatstat.geom]{ppp}} giving the spatial coordinates of the observations to be smoothed. Possibly marked with the time of each event; see argument \code{tt}. } \item{tt}{ A numeric vector of equal length to the number of points in \code{pp}, giving the time corresponding to each spatial observation. If unsupplied, the function attempts to use the values in the \code{\link[spatstat.geom]{marks}} attribute of the \code{\link[spatstat.geom:ppp]{ppp.object}} in \code{pp}. } \item{tlim}{ A numeric vector of length 2 giving the limits of the temporal domain over which to smooth. If supplied, all times in \code{tt} must fall within this interval (equality with limits allowed). If unsupplied, the function simply uses the range of the observed temporal values. } \item{eta}{ Fixed scalar bandwidth to use for the spatial margin of the reference density estimate; if \code{NULL} it is calculated as the oversmoothing bandwidth of \code{pp} using \code{\link{OS}}. Ignored if \code{ref.density} is supplied. See `Details'. } \item{nu}{ Fixed scalar bandwidth to use for the temporal margin of the reference density estimate; if \code{NULL} it is calculated from \code{tt} using the univariate version of Terrell's (1990) oversmoothing principle. Ignored if \code{ref.density} is supplied. See `Details'. } \item{sedge}{ Character string dictating spatial edge correction. \code{"uniform"} (default) corrects based on evaluation grid coordinate. Setting \code{sedge="none"} requests no edge correction. } \item{tedge}{ As \code{sedge}, for temporal edge correction. } \item{ref.density}{ Optional. An object of class \code{\link{stden}} giving the reference density from which data is assumed to originate in the bootstrap. Must be spatially edge-corrected if \code{sedge = "uniform"}. } \item{sres}{ Numeric value > 0. Resolution of the [\code{sres} \eqn{\times}{x} \code{sres}] evaluation grid in the spatial margin. } \item{tres}{ Numeric value > 0. Resolution of the evaluation points in the temporal margin as defined by the \code{tlim} interval. If unsupplied, the density is evaluated at integer values between \code{tlim[1]} and \code{tlim[2]}. } \item{start}{ Optional positive numeric vector of length 2 giving starting values for the internal call to \code{\link[stats]{optim}}, in the order of (, ). } \item{verbose}{ Logical value indicating whether to print a function progress bar to the console during evaluation. } } \details{ For a spatiotemporal kernel density estimate \eqn{\hat{f}} defined on \eqn{W x T \in R^3}, the mean integrated squared error (MISE) is given by \eqn{E[\int_W \int_T (\hat{f}(x,t) - f(x,t))^2 dt dx]}, where \eqn{f} is the corresponding true density. Given observed spatiotemporal locations \eqn{X} (arguments \code{pp} and \code{tt}) of \eqn{n} observations, this function finds the scalar spatial bandwidth \eqn{h} and scalar temporal bandwidth \eqn{\lambda} that jointly minimise \deqn{E^*[\int_W \int_T (\hat{f}^*(x,t) - \hat{f}(x,t))^2 dt dx],} where \eqn{\hat{f}(x,t)} is a density estimate of \eqn{X} constructed with `reference' bandwidths \eqn{\eta} (spatial; argument \code{eta}) and \eqn{\nu} (temporal; argument \code{nu}); \eqn{\hat{f}^*(x,t)} is a density estimate using bandwidths \eqn{h} and \eqn{\lambda} of \eqn{n} observations \eqn{X^*} generated from \eqn{\hat{f}(x,t)}. The notation \eqn{E^*} denotes expectation with respect to the distribution of the \eqn{X^*}. The user may optionally supply \code{ref.density} as an object of class \code{\link{stden}}, which must be evaluated on the same spatial and temporal domains \eqn{W} and \eqn{T} as the data (arguments \code{pp}, \code{tt}, and \code{tlim}). In this case, the reference bandwidths are extracted from this object, and \code{eta} and \code{nu} are ignored. This function is based on an extension of the theory of Taylor (1989) to the spatiotemporal domain and to cope with the inclusion of edge-correction factors. No resampling is necessary due to the theoretical properties of the Gaussian kernel. } \value{ A numeric vector of length 2 giving the jointly optimised spatial and temporal bandwidths (named \code{h} and \code{lambda} respectively). } \references{ Taylor, C.C. (1989) Bootstrap choice of the smoothing parameter in kernel density estimation, \emph{Biometrika}, \bold{76}, 705-712. } \author{ T. M. Davies } \section{Warning}{ Bootstrapping for spatiotemporal bandwidth selection for spatiotemporal data is very computationally demanding. Keeping \code{verbose = TRUE} offers an indication of the computational burden by printing each pair of bandwidths at each iteration of the \code{\link{optim}}isation routine. The `Examples' section also offers some rough indications of evaluation times on this author's local machine. } \seealso{ \code{\link{LSCV.spattemp}}, \code{\link{spattemp.density}} } \examples{ \donttest{ data(burk) # Burkitt's Uganda lymphoma data burkcas <- burk$cases #~85 secs hlam1 <- BOOT.spattemp(burkcas) #~75 secs. Widen time limits, reduce ref. bw. hlam2 <- BOOT.spattemp(burkcas,tlim=c(400,5800),eta=8,nu=450) #~150 secs. Increase ref. bw., custom starting vals hlam3 <- BOOT.spattemp(burkcas,eta=20,nu=800,start=c(7,400)) rbind(hlam1,hlam2,hlam3) } } sparr/man/pbc.Rd0000644000176200001440000000240214024073723013207 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/pbc-data.R \docType{data} \name{pbc} \alias{pbc} \title{Primary biliary cirrhosis data} \format{ \code{pbc} is a dichotomously marked \code{\link[spatstat.geom:ppp]{ppp.object}}, with locations expressed in UK Ordnance Survey Coordinates (km). } \source{ Prince et al. (2001), The geographical distribution of primary biliary cirrhosis in a well-defined cohort, \emph{Hepatology}, \bold{34}, 1083-1088. } \description{ Data of the locations of 761 cases of primary biliary cirrhosis in several adjacent health regions of north-eastern England, along with 3020 controls representing the at-risk population, collected between 1987 and 1994. These data were first presented and analysed by Prince et al. (2001); subsequent analysis of these data in the spirit of \code{\link{sparr}} was performed in Davies and Hazelton (2010). Also included is the polygonal study region. } \section{Acknowledgements}{ The authors thank Prof. Peter Diggle for providing access to these data. } \examples{ data(pbc) summary(pbc) plot(pbc) } \references{ Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel estimation of spatial relative risk, \emph{Statistics in Medicine}, \bold{29}(23) 2423-2437. } \keyword{data} sparr/man/figures/0000755000176200001440000000000014012076651013622 5ustar liggesuserssparr/man/figures/README-pbc-example-1.png0000644000176200001440000017262514012076542017632 0ustar liggesusersPNG  IHDRxa`D pHYse IDATxy|e񫪻 AD% n Q܏8 \࠸QGqaQd|Ed8A % ItVtԧR]U}7 W=m 8888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888'Ov!z5kc={]!h~| ; 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1^ 5~SN9Zu=歷EW&AJ}|;n©LJ:W^y 6.m.(]W)iw{tA#hn \s͆ $-پ믿۷EIqsm^fӦQ8~ M<~ڢE[vܸq+V0 r4h`qu`H17G3H꠿?[`Sqĭ\rѢEn>ꚏ⯽%=e0uj F4u!CtuҤIeSI+W:uj׮]sΈ $ݻwLvUr.%h@|_jݪUj<P1˗<8##H|||͛7o`vFFFzzzzzzQx˗{KUܡCoi1~$5Wk__l@1" b9rd(eh߾1c ֭[rO޼yŋϟ6|uEd@e7z#_Tꫯ>\bDٳgٳGҘ1c~)ST2HԩӸqVZ裏Jڹs9s"W+[ty}MÜۓc{eBgW{ B-[tҪ_iӦ_r"`/F}ی3* w nv4m2dȴi[|Wsa4ǎ:>}zVV֥UO7Ҹ0gyf׮]N0j "={|衇M;a„~w}MLLLHHHHH0 ####==}+Vꫯ~Dp{>տtСC-ZO̞=7AC O=T||ĉ~mɕ_&OL(֫W^z|ͥgffܸq)$۶~=cP/޽_6V6aVVꁖ!(8bIaXaI^UQU'#ooC:u:~޽;%%e͚5mO<9!!{uN@]7r#G t7&J9L{͛7wԩ>eh8/?/ԩS_'$$H7n޽{.P7s=+V8Sg4X$IoF~P3@]ApEͷgΜyZZ|ԨQf͒tȑ_~مu݌3^{؏>ٰ?.4{75YN"h8NRǎorO=zg->hez>0n!͎{7<^<^`핼)۔mJe7m{m,d:(iFF˽{.Yozƪp;Xpݺu[~֭[g}_< 'iM6=:hۏF#͞~4wAEC@Ap7=S+9m۶7n߾/w<DRFF:4I-;Le=~G{=餓"uAQt1--mk. 3g# ,ԢE+oaZϿ~b%mK/믿߿%K5kb=@q7~mN:wyvޝf۶'Op['7bjjjjj;m;///?}nݺM|5r~a_|pǤ1($YAIː ,BNOd@*hV-)߶%dH˔ z [_ɴmIwǟut'tok7o^#P}_~ᇒbcc~cdž/m`+==}ܸq#G+AC3gv~;RSSlْZh'ۅN:9s\}Ց.PdffI+N:y]Nj6s袋-[ֺukN`s ̜9n+Ql۾k9/3 >cǎ;X<'''''G;jԨ뮻/7,3fti佞#hpdU5l0|詧:2޻'K&w#55 /ܺu'_UygT'  쭷zg|wc%[^K>3Jd-L[kK2ے<%[RLI~YC%)= 8+;^ O=O?+ʮTUO7=S+9'4h(+++REpl?7|s)))v[H pbYbرc%pn~[#L-}f+N<ɹQp"h(NJ+R΄tMG>sO99s7Ϟ={Fw_3n-FQt:#--/^b $iɒ%~ Iڵda <лw{.|;v/rR!v; b'Aٳg3v9U1]3lذ^ve|u֭[ʀZiӦMF4i{キf͚[ouֻv:ujO814aCn喿o} Cݱc$׿.]Xzoֶŋw}gvnƶo1%%S\yͼx k7;bڒނז䉲$ysb 7%#/5_$d[=2=^}%MYt_[_^zX9[|||qPOUsHBmѐp¢ È4uN:]{Eo]'\`-Zu]APo /СCCy睵kJ:묳{W_~_{9sqIIIT8[nq4jעEaC;vA۶FE'o߾=؄$4FE`p AC_|Q?̛7O+2Hz۶+̚5+5ND .:u?~]q2eLՠ5Zu&h̜93wؑe˖Bt%:餓̙sWG\pACP|WzjI+wK. .HNN/#S'1m8}D$丆I--I11IQ I17H1%LI!)`]hYLC<55=>k.0<=믿_ht 9>cǎ&zGuu]~>/5hYl8R8 K.]x9p!"*{w92'}N/'Ή}eY۵' 6o޼SO=u&eP4k֬R$VteY @Y;wׯK/lRYr" :S[ov!81UmKC j b;wdɒPpPsUn=Vjժ -Zt9\]v_}W8Ju}7n -z4=z_|sΉ'>EoO>d+V_%K_Էo߈ 87^4ud+U7/C_(C,IfdwÈb_}hPO!NZ%0)a^[m§HmZ,S0%L: FluGbbbe@2$8iϞ=ZG5t7w}m[9wyg'y楦F06HOO߶mڵkWZw^+1'޽[)W]1wѢEoM7ի?YM6Vb  ׯ.]qƲ pue#G<]ĈT4DAR[Oiq%w};v͝0aBϞ=׬YxvuZU靀֭[ ҵkI&%''M$eff\rԩ]v:tΝ;#^&8 8y UTE;9ߪDWI}b.ްû3o(}>lرc#S'8vђvlLW5Tz'uǎ ._. tcm Ժ={|衇M;a„~w}MLLLHHHHH0 ####==}+Vꫯ^?~|=ܭA  yVBNT /xw㩧8qm;99999K|>ɓ'L &0uHʭ(h(E1-lfch?(%QCҀUkpl˰K`eeYFhe؅CB!_ܥQ+\rIj6 %?~wqG˖-+?311qر6m"epcD h0h@]ZgYo&''wm_|EN={Y֯_aÆ-[:t(;;۶:t%))4y . h1#nR6?}t7˖-8p?>n8BMfoڷD)i&%%%%%]8BSs8Fjdɒq_{999nDŽ@pL@ۅA-7G=xy;z'mO1j sgHKJJ~kbD{mۏ<>vQ4sm]{~37SRB;5Tq 5b¾ 5t/CH9ʻF eXa[ m$tުqQwE ?SvQ;{Ơ5pas8p௿:p?]Qt?s9t%MqFM8wAVVѣ@_jj]v>N*(;"xE%*gqKZm 4Q.oYɖ D̪Jr~Û |Z6o߾*ڨS'Akժe];.ڌؽ{nW@}G@СÇ~{R..kػ&vK~7+q4ZһwwϺcgT˩bf=?Ү]l&P.jl94#xf5e8j_`-;Cy=jw.ri6 ;1 0 ,l۴f  m>vmEVmEVm5=S;٣Pk4PnGy$ ^fT?qSN馛[V] P[F6"h?IlkY^ ^HLLw&=3tq',ӏz2B*oPУЕfkQ[G(ۖZ$H0!IA G%Mg6 P[4 -Z0 c?7hj[0uj N ^7:h . 9/{ދt#"$11QRZZۅmƕ77=:]6j "!2}u] K7rwn@}:$zvEʶB(9B`v[(uN v _O(qZE]aKh`J2d dK%)_$mI/|j))0%[|Ð,&f[_̙3;w|m[j#Јz4DƩiWyGK{/_v9# DH\\\ 4s<铟?}tkATGCy`O?eݸ Rᤉ Vi?U/wJ 4lx$ɴ 荒$# IfВmIQރ%eH  JːofUepJ gUFmK:s*B5Nj F49Z"QҕW^v!# D 7 遟nRtelٲ{nׂ YAsw 0`?羝ߺ]Kݷ(+4MȡGc+rg˰5]agRxr{1 _wyxÅ2=,b$We;CwPIV CevQ+ _<۔ rC-dӣ~"]vO<{w~} v9u-{~Ԯ];k~!h /W?32.ْn|ATG# h L|7onSgݣ:Y VkG.8Ӧ̘ܽ ;m拒 ~4q( %oS;0UwC )I슾a kP(gD0`J ~SR~^mSRs34̌ 0`?.UR}g?;kT_K_o>A~)SӧYf͛7СCnݺwީSVZ5nܸW^'N\~ەpijy#cgv:]x۷[ouT6 [n2dH׮]'M|dff\rԩ]v:tΝ;#^&8tz?y8HaʜaM4GA|={~GEG۷o߳g++ٳg6mZtŋ{zj7gУ7zwz>FXcW0LJ]USC Loq󅢦 vѾ't ?9H/^Aa}"~ O皒s=<-༃-ׯ?35j됄v#GfddHj߾1c ֭[rO޼yŋϟ6|uEdp#p*)֌rKڸqۅbY5={={$3~2eJE)N:7nժU>;wΙ3'rp믺ꪬtxeBS'I-Զm۹sT0Lү_?I~5cl`f5ˁ~KYpq\xА#8,Kv"---m?n=/qHQ-_^40|m=ںu+ k%#4ԨQܹ޽{WժU;v녡KZnxI D/͛7wJŰFO#OC7n$hB)/ȑ#].P~vرlٲK8Wmڴ_]^@M"h r>|4ͱcFTpArȑog6Y71={>CӦM͝0a?ܻw}&&&&$$$$$+V|W~?t{np#F|mڴ kIf;gF7Xx^驧8qm;99999K|>ɓ'L & O>EK.=ŌwsL306oܮ7o|wlٲ3ǎi&R':F4PlJS48# {x|F $SpIE̊{4T>f^ *s O }΢mzWGCAw`~Tث 53,I }D_>C]tIJJ2M Y_*|לZu姟~ڸq#A"4ͤ$ Gh @ zgΜ%쮶nSߝy晢MCe#)O>SO=gF9v3 Iْ|[ǰ%F9Dω0Kx]ϒ&PWexU_Y]".<-|Ik@qIApi/%|DhyS'B呤hΟ|/%YhFK<)c_+Th1o{ xtv83!m޼9 z<_ ҡJƈx[nŲӧynb_۷] u#pҒ%KFP&?K^c>r FQP_yhkV1UR[%TFEW$TEC^C7RXu+Ú/:z(8,qhRa=aKcKnֆ N?r U#=`D|C>7+BiAޓ#n@ň+##gy ޷ B9^ۙЁvڍ9ZPF5G4XG?heeeM>}O>dVV֥^N 2]ϟq3f4hr3fx'#)I\W 1^Ch(hs [`#C3R ~Z{}\A3쵠߄mKKْCU@ە:[e3}ɂ CАhcڵwĈsPG4p<.\#H:CNjayZlӡ/x{9k#hx\tE111yycۭ.GW-kرݺus˨D@mAhӦ]w5sWl>yI&PѤ ;|~D- J(1K"%-LV;.sQo-):tI #GL-ɛoJLI@"S/rе{>ݵI&? 5 Q 8N&Lhܸ'|nׂ [փH8qb-.ԬY8d?nׂ ~惙;w{ܮjmZaYm75z]ʗ7 fΜ.z 3۶/MRM976N 3Ք[aY;Y^OKQr[0T^o_Co-KU)*O=**Mg]XRfw}Gݮ8q=nl9+_l[%[R(BQ_I ӊa)1u,G]yo%J}';C5_;͒$#IiJ*aT^ۅ`~{4%7gF֨B?5@'L`ܹsBU+lܸ|0o<G=zCm۶sΜ9sݺuGٳ bccd"")СC?'m6j&j#EF݊ܭr-Y"o6 *('aP5;ϛ z2WV2#@ܺc{ x^].Vh G|H"4;R-ԐK1iO |՗IMgs-cU.U<2eʁgӦMcǎ-i@V֬Y3rHw:ujJDDiii.O"D߉`OQDTKj֝O?4ܵN\h={6&M|9D1vΝ;/OkyDD҈#^yÁG`orkzW<w9d ݘ@DTmpg׮]nVqRΝ;OKYDDTgyf/Ј н{:麾f͚Yf}G}ݑ#G]Q`FO~~>SYlNDDـZ166(Β@}3@!Nj(^1yt]Ps=fN2؆_5O ੒1XO| !#< dO،X !hХh@3j)gQPt:83J k0r<͢%6?9.*ըjTpDHj*^xaS.kɯuqAqW7kB*hiذ!u֝!5 bs匞J.j:ta\iii0H)Ν;?sUW%i#jaѢE .,Q[n6mp""Æ tDtpB5Rn4]#d3j~-[nժU>}}1R1cƼaB:CM>]o}/}vvv_|1bĈ'N(2hР*+ '|HDY&Α@uc" ZDv W"B\[ZGZBZ=m?#~c</@a<"-AZ/Z;)۷6l G+ԩSrrrbbbbb"333##رc+W\d2>|xv[?Bu[ąvB &M8p`wu.((xwCAD5KFr\R˗/_Cv1cFQ5ь3 %7`\NUftOZlܡw/ƍ.4Q Ō@Ç߶mׯ_Ƀ ںu+WLaaȑ# 'M#& g>ᣗ^z(y;vT]YDDW4p9L2޸q͛ӏ?'o޼y֭SSS+5DDU*2^lD\mD3?!Yn 0[ HKcp 6k.%7T[Ϟme4G 2TZ7' t%h={W!-.\ƽ[pe!ƴ(=B`s0mM ̄ _dqE%\rS[WXXشiRiҤ ܪ*qDo8z( @=52is&"7nZ Ҳe#G;v}s&DDTc= ܵPgM\U/܅-k z+ޛ)h5kVU"^P\QCDDUo۶m=]  tmݺ:;?|77nܸc?j(㊆.]TyDD mܸqΜ9 .ܲeKVVV[ڵk^ڴi 4]."`3k%z Aw5 0"3 } _:_Cp:bW 3R | Rp{]²yAXdScĨ,Lb69soFQp8u( f:7l 6߶Oj(:SZѐ_xB8ƍkժս}j/YԫWoСU_-QBC;v}ѣ۶m;w̙3׭[wȑ={nذ!66jK&":#̘1cPui蘀Dcg seiBtI ˶ )5M:adh3'I*-yv4Z03uүEqѹ*ϐeGo-(f=B x),1mty [;u6'(6 @)0$?㏂ if㿅NT/޲eˀU|6n&^5U&N2eʁgӦMcǎ-i@V֬Y3rHw:ujJDt(,,4?-{A8]7AaiӦ3gμ۸@DW}fϞ I&~S&!رc-[dɒ/seo%9tвeN@Cƙz+["R5?tNX"ӹsg-ZpaںuiӼQeIKKs\> l] *]G'znBC=(o?~Ce4gggW]uՉ'E4hPIDt&_똘{.ܵPmuvli߾aƏ_XX8bĈ?W^yeNǎ[r%K\.q۵kj )1'Wvmx’ th]i0`F68,I TK":`$X*9 ـ݆`<.|l-'uȠo'nD6°^~tio']%]U~RBF !ԉk]QpK/?j(%\|K?n3fĈUS!љ`Μ9DZzq]kuo[ze]r$$$o>UU >w&L3gå}בܽ{s=#":|u#&*"o3.LAJU\_W^y%G<U.4p9L2޸q͛ӏ?'o޼y֭SSS!DDo`A:]qt… /^|uׅ seuש̩ҥIŁQ)P"EQRSSSSS]љe۶m{3t͓D 8ݻe3X#8, *cuY D< } ܖkvtc7/_ﶱAYNS5`0fg+SmmΞ\!5ga}JшE[ tjbW4ԳG;"~p8z]w?)ٹn;666..AUDDU DDTO`?~%,vjnݺ Z;IB8k֬ʩRmܸqΜ9 .ܲeKVVV[ڵk^ڴ\DT?N>}֬Y֭įVx]rQ?t3z~%*َ;n.h˗/^ejժq]tEwqݻL"+BXrY322}?`ƍ}AK"J/Tj9Rko6['(HKƵHKw:B r ~klO3h[DA-['<wUo̐k׮믿~׮]۷o̙3FUԺu;5 jpB+/ntPq1*ʕիW/c!%%O>=zCm۶sΜ9sݺuGٳ bccd" ?Ujvq*n馛|k1.d;wc=jϼ Dž.mZ> ܵ$ d bʔ)ЧOM6;UZJKK[fȑ#޽{ԩUW+QBϲe>3g|7? ׮]`W_}uK.駟SN5!"~A 3 z >???P2{lM4#""sbر;w_NN[TU駟S-?sv߅}v~_{f"O>Yx1Y Iw?!V-t--#-ͤ3@2<%5x75嘖a \$H} ։O3[JߛZ `vBʰ[X3,.(v_:jh (AOy'oif4a wkx7BHU /۸[o=ٮ=z,YdΝ."ӎW4lڴ 7l]evZ;wXe0\wu;v?WIDDӎ;~amTu0UƏr"ѩ19xyyy_QB x7G%ZK۷tVGDTĉpG5.8G4벳QMЭe44luN@FU~MDDU >uxbb"’4oKY#"!֮]ޛ4x饗'O>|pkZˆZXh… ֭[M=&bFyw^uE-m3+B2z+Ek"":Y|ovf=@S)`F3t5- 0F '5?b R`|aj$2xB ("Z(y1_E _>kr5!X# W*hEa~@h1~ 6iK6`(67T6tؤHyF3'5 䶸fzW'L|tڅzӧ~= 4(e/bĈ'NPeРAUV*QBO޽/^{QF= w߭XbʕW\qEQoSNUZ.Qo߾RJpCgˣ;+FSӾ}aÆ?pĈN:%'''&&&&& !233322;r%K\.Çk.2.4'O޺u /sΧ~K/pwr-~m~fϞm\xl"K~;vo^pCgOp 73U:ॗ^5jR._|bnj3bĈtBOdd~ 7;v?SSSFq6lHOO˻u떚zшf4h1{O/_PK$FO4 &Z'nۍG|S} mGTߕh0#`Hxf+*m`BQH_tMuvRz+((QΫ׭M .iP4H"R(aܫ}蹕NofpaЙ.w1!!!ܵPms9LO IDAT2޸q͛ӏ?'o޼y֭SSS1aDD5\rӦM[hQQQQI{naĈW_}uUVHDT H) 舺DU`Q>8C**YEIMMMMM w!DDBѣG=rssׯ_y{9F]pڵWpDDvٳP-ܵ}I_bŊsv=`\h(MLLLǎ@Dt' @MPWwÜg Ns!T:C9%ҚIN+N0Ksȥ X2 6r,.af4xNC1QUJl-}I )o=TJXU 㣳$G3IP- K2G  0|Yyf41bmn8fK 贐h5p8] Qa jѢu\9'pBg "ą"""" |Ks_[g7o^2'5r%Qą"""Cݿ@ zD T}uiZ;<B3ZhB*-b20]3H khK/*35JB;ZݮC""W4Qiٲe~GȟJH\bu @y2<#-t7`MgpM`Mg/-j@F)eX9زtGZZ -2B,xba~b~F.1RJOsA~/'5 !(!EH=`onUBBB˪*M!'s|ADD+~.ted4 {#!Ng0fӶa:UNb`P-֌?A h A\գp3R,Ǔ#Xɨzq jmz#- b7Uj?m(((h߾}rrrUiPe}QU DDTՆ VNEeZ,P\\v] Qą""j O?4-'&l=19[|kЬYuM0!ܵY^U\h "0xꩧyE2_tbv("ucǎݴiS!""y@DDaoUVvzǟ/eEp::7yC/"" DDT}nڴiƌ\pA?pEgeBqmذ!fuQc].4Q5e 6<. *{`3εmZ\\<` DDT)2YpHᄏ/B2CKHg0PV:b6 C.}Ss+jY_z_lM/5(O!pZdIk7A&X?+/O"@V|#'c {#$83,-,a38,+bnW{_Gxoc_vKG}X!UYTCDT}""z `6:E9ŇO')fd]6Uw\h "sαڋhk3Q "vskx']K% ݈B6ZXƋ(L#~w]/DDDaF ;[nYM e tO.b`E et JJN7B+%w{eW`F\`&w&>h3 h*Z7 n`0tKFZBFqeo$NhPW=Q&MJ#W,A2 DDT3t%!!! 3ܵYv.ZB v[ncO8kTlBi# 8u`}O7=n袋wСyaNFD)+e]2;&B*rR%NJI=5㵡|7 Q[V/DDTa\h "K.UV\iӦ5k֬Y歷ԡCdN[4np^ZW9h}9h ""*.4QMү_?yyyWV\jժC͟?>@)-Eҥzlg5nM5ˮCn)[4رcGj^Kxt^@DTmpjN:uqݿۏ}&7VytkO5a-jcO7?.عs +""и@DDDf͚5kvw(**ZvU_un_g_ O\p/z)O|,?!'q4R `38%5, "ԛsLo}yku'9 믿OWSS0p<@ݺuO{+<@DDDǦE_̝[rK]6CwFI[е y*km0(P?v"f̥ HYN\fk.~駂H0 B0{3/pcc @h j]w|`îb6n~f@\*#q}믿6nخ]pWDDDTո@DD5~GH8ʁ2Xh^|*goaWújcӬGZc@onDvך5kP~B#F8q".h2p]]s.|aLc p]oo;}uօ""0B-[Xn׺&@W1 $MUhKP:.H`2 q07R(,?Zt_FC`...E K {_=%UG/r""MִiM?K] U% FNT>Vo TǑg.PPΝc"gً7o|ZMݘ@DT}pj3fP>įwˡ:r\Úܝ0pS>n~]I̪l ?mEUSh "檫Wװ3 (tٳ =-<kps[ Ŧ_s`ek0~4B+@Q*".%5 TFqvCpJP(RDDi47TKRh#@{2w3 ƽӻx@SjqP4D< QF:%[A5Hg`)Z)^AX^-s[^nj@`g9 cKD.PݳfQBKϖ{;{2T*  YpU^@DD5Lnn2lp) w-T!O췿i`lTZ]I2C7FHݡs<*NA~]ʞDDDLu[?KAø>ܵЩ۲r6K &V\-{\gX|c5;DwH7%g֯a$Ǐk2f4l "E{:fE"zpMl]Aa\o `^eZ(`K._z `Kc8B+$͙eiv!T (k~6Hv#pBI@ϳ|VfBQaNT&Y'|F1roӄ0'YM6QFLt -٪ռQҲwzOhv01v6;Cpxz^lvC~g7hll,(SÇ0@B Iˡ %,Ыia1ͭQS?}kf޴1 s߳I)}_;hi3,go9lW(G)FreMw+6lIWFDDTp5]Ç_pC%UWȐ T214鵯z/Ki /,-źqў9N=<=ϺXcvay^ߦ8mԩSᇓ933j .4Qvڟe8WO _d7>ud=ܱQ<7?u-9|LJ`:?._9u=?]w (tpC(mΊӭ*aDDD5 Zk۶!C QΛۼyC4[3w<%KhnܤA:t]Bw cx\ꐺYхm aܖQO1b(:VB7B(P@DD՚(G}!;EQ\uAY]|$D)ɞ'!?+h`fd)NEi=mfjݟ;*u0.p~6??SN}wT1?ADDׯʕ+SRRb1ev":uC#g?^ho~}nsixv=D-*f_s֤1+Ii֍W6ةê yHWM*@DDfHMM?:q?O>ïᮈNWF9uGx Ow]#25O􈻼EDGe xQ/,]ɹ*a!!PEvvvQQy*j.4QLkر9(,7tz!~o#4AB—R4b53v5xnodC9ϷB״Y ϋ7MB HM1o0LIv|֋/k7ອn)(RPPA|yyy*S~jKIDhF 7Kcs'##?>;M%|c3]7gHBq:]SMyw~kaGe0Ϛ$efnV귞Z)K?77q DDT{qj;wΙ3Gv)~t-tz $mK-/Z_9}˾ʗzWطF_ ߼7 w_Aqvƶ]C/ז?|&zgi83.:ADD5O^^^  7AkӮefӖ=<7?Wӷ f./ZwKLsʯolDO%R7{#}{֋yG6se].ב#G|>U 4Q s_zEr#.ߘ'/jǼUO-}[$oAy$ިN70pՀApT0f{ .7\ԐCinffD2B77B(7-4RPjC@-D&5( %MNhꑚ7سu>x}ޓuɃ{-)$"02%YvUӳЃ:k.O~¿\}дVh '0e@7b7x\PcIPAU}z6*̌4p!STN:U~Q!nI&j&""r޳> &\OZL33{/_3w4Y+V=|; 2s,XP%hXAAѣG@yQ[l٪U 5]E~*^RJ%5؞䮰F1ĖS 3A)筦GΩtW `P`43UiJhM0ԓ p:ff4m2KD&5 24aOo 7#ؓPD(A3^pɧڥ|_X6yf:O [4> Lg(6"Of:WdhXʥGr S Z8Eԙ}֭c"J׮]7lO?Ua]DDā6l0{?|Gz~:u0x`9LDDۿ/ѣLuJ ۷_ֿ̪d*hӽ^oV٧e˖("㭧~k7nW_}Uz@aaիsss;AU^&Qݲe˖y楤\NBQ=2gl+~[ĻGӞU?ҲxGC)e Pdgg8tP9޽pQMTʕ+ pakK 233333322.((((( ̙ryuj"駟6 [omRӵP5Uɦ9>X)8=i JAAA u>VŊ.%%%/кu.(A-ȑ# g}M7tuu5?9sf̘v믿￯_~ՖLDT'ݻw*K^K9bݞPV.}$ VJbAb+֌"d\ʼC-h?5`t]:eq riC@ E0\[JBRdFV >T-?5#1Xa3Jo +h³\ޛVPdl ΐ>wx@+'X俚nݡWE(Cݯ4*w:tOR ֖-[rrr9眎;s9Çvޝ;a„;VpWTyDD{pM7ܡCѣG?Cƍ'OVUDDuw} jdw >SI=$<h݊f͚e=UvССC֯_O̜9@VVȑ#Z"`FC؇~e˖SN-(ʄ z ?Y}o~s?jk!JNR]PyTY'Fկ_ۻc%%%r}ƍ;vtU2ϧVqDD;v r]wuK.ݾ}驋u駟߿ۍV=ҵRT[&l[D ]BWNXh5>m f?@h]BB {=ُkJKJhim9eR%{{E#rzK.4O$ rK5! 0+Pu(j eZK;!d*`@ԕ RJuDU}yyyrܹs֭۶mjڳg}g!B6mtr a@%r4hĉ+,X0KF˕-盰x<˧zNN7ܷoߘ!DD5['š5k`ڵF͛'&""2>_DӵQ9e!"%%%33zڪU3f 0 DT p!LF-,Z??wmٲeʔ)ۉ4ԩÃ0^k!"""2u";x7 økƍGΚ5UU1bDJDT7M0ѭ'[05r*1.0B]f=]EUX 2O-2|PBL3A:}JKXfy,=fփlp@NaYئ GNiKěeLhc|$۶#򋒟H[(Љ @iS Dmh "8֣Gz詧z?~.K/mܸqFFFFF(.((8tKnjjGyXpx_|uxނޢE g!"4Dx'4h0vX/ꫯrƏj*$"I&m߾}Sߝ88uT^⼣NoY9 ,k5QkAƌ?yM4)ƍ1b˖-e "2III< X*j_T=ҢiO?t-DDD;bhӦߗ_~yÆ 6mڶm۱cN8!HKKkРAv:uԥKUH QU$%$5胼RZ6lG0ׁpR 2@!J<*8P*CF5KGa: BW$.eK%pKP. Qנ[斀 I#Dj:""p7,3݄bTr7c?ΐ@q|"I;{/[]kTQDD4IU.]tB(§~ "ttxuڮuvt9T-\r%Vr "<0} t!D:^ڝm:B;Zg5T5ar"J 0kn߾4nܸ{7v("e޽ׯO+Z;] Q<ҡ˔_~'+W袋.(8zj{nK~{g󭍊+77K.B>L gkpY]KkňqSBGZEv a⸖Z[ʦ[A"hZ$.ޠ B lkco~?FyX5tJg'  -S%=a&XVahAh'I @&pyr =p22*J-)_ |^qx?@DD4@ݿ~}Q !/_޻wc>cUV'Q]6|=OCD^XGp.;(Ctֹ6o[/}u<EUn DDr-(Cuֶm={]vݺuBgdds=IDT@ީX6VyӠPbqFQhv=ѫ=W^y uUC45{J{@C,Y2{liii'N1b̙3GYPP0z7mԹJ֭[؅c^ B[˨A%cx㍱c8p@>}&NUl IDATXPPp~P???WN+RBUU$Єf!ZZ\ 3r7a OofI9(Z9 5`jG6P'^mAV{cs׬Yu=^}RvqNFݕԧODDDK5:u\˻;.2'M@}DDuLVH1 aSǎ\tE˖-[pa.]./"33?iE(IDDIֵG.Ņ XbÇaȐ!;w`*NjӦ q 4/y0^;w<͛W_ٓ}/%K4o|;<OJ'*׭j(uZ-j NT`߾}uVNd׮]7lO?%ԯZزe CDT95vEYx潗srrbs=wŊv߄ƔWhODs Zd}VVș/Xm6PMi/H!@uUvv@1KJ!.:GD5*Сrٽ{7zGU0yyyӧO0`@b!"7nVTT7 Xv,ZMbhԨѽԯRV4idɒ% Xbť^:wƙ}"+222 h@WX1|rJPRRxbA vV⍼ˎjVZmڴ)GQƒ8Ś dѣG7jNװaE={xgQ>}Ν#lڴInڵ+')# Ѷlْs9ts>| /{ &ر+ʋ%"ڴiiӦ8Ԯ쁆LDz9XC@v-~w ~SOUIII3f6lشicpotlDTW\k/˿k>_t]DD͚5z(ۡCCZ/_'9s&#GV}DDuPvv6Sf^4 fR%sǏWUC8a„vyR[o5j襗^|WݺTxyȆdPl0JT%˧{Ca/>*An2`&f"Ѣʩ\玊(:aQO+"SGy V F(– P:-3"V*UęPm3s"-Fկ_zxEa߸q}ǎ]v >󩩩U\0Qx<0!"~_ӰW]u՚5kO(/8nܸ`0xmҥi:YZ- ayyyrܹs֭۶mjڳg}g+iӦ'O8p`UKDTWeddL>[o70jP08&-{K.9g233ラJnL }V=jժ{6or(a\.Wvv@DTq!@ޚkx<zzNN7ܷo_QU>}z駟|ɟs=?݃w}wF 6Ϳ0wcyIDR][;s/ݬ?[Hݡ\ @n yX+!%AtoEb.)RQ;^"挔 #*˫S64~)ΰͣUH\W5g9sԵLw3C~ r"b=mժՌ3Ǝ{wڵ @JJJT*ڰaÜyT :-?ú6P:]Q@l7nܸq՟_~sݻB }kD2 ZlYAAAFFTIDT1 f͚3gsw"""rҥ^MO?;]  ]]vY ㏝xGCի{Q/{g.(ҫWӔ7FDD'XgCKP5%2Y_ -EԺ*Q*_] MQ `&,Vm,=! f~[rRQ|=4+1lmRy Q(z`xzzSlʔC 4\D4kA} ?pذa'u"$qDD?ֳgϞ={^XXx嗏=:!{cU]DDT`<%̵kpǏ;] v>B [KޡCnG6lX׮]!Ə/:Z#9GM49{~:J)I"dܹNR EQ'(Sfffffy.(8P%K̞=@ZZڤI~iӦN2eڵ3f̐AMG޿QUSU?8] ڵ:]Qe0SN+yyy~{ԫ999BoxҤI "oy!JfOgЅ⠛ |1N'grւ ɞhP:U!*aA/tWdF="!"!V=@Pf4D~;ypyv\.%\̾a7kz|: }Cqۿagg{<ڵ8NDD ;*b ÇÐ!C:w `UZUW^y};~>NBę)33<… qЭ[7칝eXO?TueQ5`l?:] j:=TzQ։ {VZO˖-UUQDDTw}k:h+grF֞AXǑ [ӄQFCol=P$n:@ 9եª>et[˲g-M t`*"eB5etVsa(JugguWbJ8E7l0}>v%0ʾs2b#xGC:t}v s(^SϭѣK.u""SÁ ϟcPRRxb[ˆHJJR%C@St]VKA(䓝:ur"DbFCYfYOE:t:tx≙3g9rdWKDD_RQs-֍JDCD4Qn#w2Ȟ CfuqPPFEfF0$b R"`F6H+Πڧ4`jOyP`F! DA@Ĝ_rbZS@b$p 2A2xlpžѠf{Ͳ5zʕ+{.ΐMDDT}QFׯ}B7ZwѵkW9ʠ?ZQu w[xM6KDDT;8@DDTI|NB*Ͼᠷ]jkt9DDDaQ%ɁIZ3&qG,C{!z, I"ts~lPTFy0f蝜 P1o7g SmKeЛEKwG@d'BhKDj$˙¦P= .vOJyR{llzSZ [s&\pm{deej, "Zw4UFaaʕ+Mw3 N ? VtM#F/j(22-Z59t-T;[v]+:MOY+DDT8@DDT ,8]NGs} -w"""Ł""ʘl%ߔ8XdWfkì ]40Sҍ42!E>m2@7{Dhݜ#RWZq)9,$9euBP([ں g©K c$lF  `>ªS5)F sP'Xm$O Tp+T`aOz_yD{gqF_Hm:""S/ ':,%:wrZ.ȫkB@6mn6V_|i $">8@DDxÇ`rrikv}s+ɈnX 99ڈN4%i D@Au)P  0d-93Z-Mf7Bm[*2M"foa_2d  0 +Ax @CB(('vTBF>%S]Zy A-[ 0~pg1b9Sfu(5 V U4%_ߣGAAANƌ3x`@DDulw}4{5hj8hDDF_c޼P v@駟>wjֿ;"4><UZ6b/AY –`n$eH5Ham LgJ(j21 Jd |d]v2hq !wjt(!VRt-4Úڀ(~]?/\./|穣!ƌSϛ7/"" @Q?Vg͚u58]KݢigHJ}#+:%Yh """Js=j+V 45xꩧSUVsA"@DDx'N`0P E(; 0X J+P3mf4JghPBq ZF ` ~0` hu` NHTփDURzO*zdh(qTĎ29YNDD ā""{@kt!D5F(Q:ADD`{:] QU DDD C P[Bhdl@Rj H MfDdD9 fmJ9er"t\ e%aNiA90P(]"5 IDAT+5J5PTȚ ePb<5N.<4!{+ʩJD.]>F@""*))y;k!"""rh ""JK8paÆGMfa#Q-;gϞg}ѣGZ;A|IϞ=ab2F欖0s̉-"m2$[:LgД𔖚KDRa(0 4*Y0Xif,+C!*ANzdrؓh1N$gf >*Nq" @DD`sΛo(ʃ>V!"""Rh ""Jk U4&L߿? |NCDDDTELHDDtZ:}={n޼'A*TPOI3!/CI E $: M0[yA@3Ma )"r a3ŬJf(hy(WLd0DXG+2Af4C`5p :) Y "%xGR~?AwU4F:txr1!zQ ""k?6n8Fc5=- ]AZš2.!簔][ 9eh&KZnO(@C 7@A O"BJn {%RtuRDW\QEN(֌ 3ʥlM:*tR8%Q-;N8c(v"""ӈ DDD(SL~&P*/"{Qu"";t9DDDD 3(xp.W]uy֛C%43 .3. tْlW  Y'% HJ 3]!"P`΂iE3P؃aA;@7 T{"L]<0la~5 jy 1',pm$R39hUq] DDh "":W\q?.6lXI`%C=t7z٠8t9D3j 4U) ٭[7_p `3!QUW|p<~ׅ"Р.Y `ߨ1$Xa `:B aT=. a;Z(A 3Yf]~|e؁&%(u-!P!f=*U*3"8JM DDh ""r@V{=]7y?% qe=33t9DN"UǏ1FZvo=ct$Z'lKs+m[Vx 4!B|IG E; !hh&3U! [ km1`M3|!e0Oj4!A'GK|,5t&0'~?EIDuG j @DDɓ'wq Cp?x믍5jٲӵ9 DDDNW49yQcˡL?#nr9]Kܖ@Ú5k6 i}Ep*ˡJG:tӵ9 DDDΛ^i׿u_ W 2|APC(TCLgPm~0 U3Vfj"t /C5ki"2r>P:*UšNRJtޢ/6%E_@BGjp^^3\)A E~ب ૣ;JhѢW^E'w%Z9rzB9]G:t+""x/_ 1*N_ছnrKq=xCQ"""))>h޼6l#qR~\0 QT|`8E[jt-DDDcFQ5ҬY>wJW] ^5pT,r=AFftZd<ۆ*(؅ ~QVϨZ7΀/TQI sW1KCY`^߭[0 $F@4J6zD0NZ}7|T5`DNh ""^z/xS|:]NU;(ڹԩo}gZ3'NWW&]QlvU5>4i9 DDDm׿Տ 8t9UXwyk Oip›oyӦM{o@ÌU';]cZF~z˩D""8@DDT=s(O'NRE x?;]p?~0j' 6~#Fxޛn8]3@DDT\.=묳ck$f{ &0cq y TUc0C %z* V]!+1(נWxu֋-;짾t^ &N|RM4霫pp8@y"Y PC_Ax*X@>~c&i SYJ͛+tD3"w4US-[.Ft- 2wEᾳ:kgE2HO<_ݭ[o߾AqW[GQFzr}DD/t!Os.Xr|OWʂ ڴiS]v]jտĆˍɟz!R-o=%%Z3 c͚53g|7ΝtEDD""K4^O D4Q"+Q(mF#X|*`W|6P_dܣ4nxo:vXw}ӧ'1W3N@B=~(~sPC_|GЧ}[~XٮiڝwYRիW^zƍ_dȐaÆׯiӦ^z˫T"@Q%o/E#lY_}V999~iƍ,YҩS?`۶m?W^yaÆs>׎KS=^˿/ꈞ={sذaQ /ѣGG X|y޽{챪4`$Qծ]f͚۷o,˩]KvW,y6߳pG}hѢSeE׿uΜ9~ j̨=V?} WNp&ൟD|D5g,[n/zuֶm={]vݺuBgdds=IDTih "".Yfm/5t! [=m%"0<7wܹs߼yٳgSnz nλP\~ziP`-q&c[Xzz^ P/Ɖ;w <,2K,={6'1BQ]N3g9rdAAѣܴiS*%"Zi۶#\xGq5|1>ѣ]v 9WFL矷m~333srr^y۷o߾}wuʕ+.]ڷo‰'i|/`믏PTX@vvc0dyς 2"aQvy祧;^pMt9Q|k<ⱱcp=ܓtw>&z `קO 6̞=SN]tQT.K/tڵO=/J|CQ3nIӾ}t-^]nذ᧟~ºw4Ukj 1pS ; ^ˇ~>bĈWϼBSKQ S :Ϗ9`9g̘e˖C ؐJJKOO G]-[(** DDD5C""ZE=`Oj8IzTK❙ӷO4KZ.뺜3DT?4_c{d K _ (%*|[{ q=tF\ СC{n DD5(Ey7ZhQzoqϞ=NV矟GӵP׿1w())Yx1֭[WeaDD ā"""J_n=WnYYY^-TU|NRY'lْ3nܸw}wݺuÇo޼ݻsssc?vXyGW\Q%LODDDn}v (3LU.jrho̻nH\SOtF>}f͚o@7`m+r~~~}ߟw^0nz\@D|#tEEEfͲ*vvaСKׯ'fΜ ++kȑU_-QBp'-|b 4eTЧOb;XpX󩩩U\0QpKحG?ο߭h<[}P۶m[y "^8]׳4iҼyƍ2@i`TK'W~Ν;nݺm۶={w M6\6f"5 5iYHpƍMmbJs regg&,XOu]ɹrF"D@QPP, FQCtg}vc;CP 7"qPnHIIIIIjjƌCD@h ""m6c߻f708~*_M9c6mڴisA|}m? Cۄwٹo[z'/ֹszO* \yyy>pR@!hӰ0gzXPF^}BBмQ(iJ{]~{+:扶6Ͼo!:g?֭[+**w14@0s/znݺj!7VUUmݺuݺuӧO߸qcqqĉ;t谯 V?8࣏FώӾW+6JPOSצ|޲z7\QQ3-ZԤI)S\z饹~*OBm( EzI"!SO}H3_ P芊Bҿv2lС7|GYGn׮̙3nǫ6OMoYKoyǒҥC=t '|ѠVl޼*--Z  P讹;FB8SZZZTT|[QFM4i֬ٱ۫W,nׯ߷4qkSֺ֩֩uvڝuY-[*]BGNR!@&hBw98y\8䓟i7W(*)OXܴ{?^lٲ^ԩSݛ6m͛C={rDٳgڵkR_kW>s~f|ҋN/))oׯ_ 7?ݴiSUVW_}W\ѬYN9rdr=;)bQFR4ᔆ5k 8p5w[n3fx'[40n 9q&)CFw% :$K/ <2kO=S3g !t_3gΝwOW{  _!T*+C}guB8q+D4@|/b}PYYߟJlr7cu9$hV^|>} 6,_bٲeu24@|eee!= KJJ&LP9Aė\aɒ%{ЩS.,C͞=Vc/ ~^{mOc&Nرc…^nݺ[@.  !o~O=nǴh!O>N3Lj{ <㏿ y䑝 2d!{裏ۜwhO>dȐ7|3p… ~gCi=STTt=|\sM> 3 ':t0I&ua/Rw}wo߾;h\i޼qƍlٲe˖hbO#/K.d?˗/_^H9׽{ݻ}LF 6lذY@3I&y^E;vSTQh~...NR^ KUUUEEEZӒ CҒT8ksKF*Ғ FUUUAKwIKmv/xm>C;vzA}K_ !TTTTTT{-%w^TUUiɆAK6 t:߫ -`hɆa͚5k֬yV\ RdŊ s5W^zk~+.\xwq^ [d_|Gu]^ e„ sνﵐ+W{:t|ϭ!I&mc3.~38e˖>h~{M6vѣGCk.ʪ g44L ,-[lYu_KVB={TziӦ!2uڴiB8ꨣ^{wBX%mDE~Rk9ō7V!nݺs^K@4 A  ?Bn rKNh @4 AFD#h4h p@הaPe˖A)^ yIDAT"uldPZZZRR t:5͜9cm۶m~Yz[o5`|/su֭}^W_4hP*Z/k׮]N_xA{- ~^e˖-<̠A7n? _4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4PO>lذN:hѢYf]t9r̙34+L/۹\еkf͚iӦW^'O^vm^'|2iҤ}tAM4ҥː!CƏ}bvQɢZ^xKJJR3| /YZ24ʕ+O:= <:_>Ο?׉Ǐ/*M־}y6X8e˖v۞&fWu̝J% _EEaf0-YjSJ-Yh";ٕRKE}_m-FcǎҒ4gKؼy!C/^BhԨѩzGVUU-\pܹt駟8p/XVVVseB-Z8#͛7iI&R'|oܸqƌ+WO?_~K._gC7:k!Ν;۷cǎW~,Y~+ibvQɺZ#G{% _-K% M]VDs*RjB|&jvqeeeoOJ޺>'uƐ!Cs̩?cd]J-YΝ|S-YjYJ-Yh"SٕRKɻ7lذ]N6-9_:d\.I-[lʕ!;nРAݻw>}B6mZ`A͉^N?I)ThӦMGlg>z/!o|Ż8䓓_=3cNeWʠ% ۚ5k^YYٿ;oOôde),:٨|w=]eWo_WC9dgyfO1IYG=z8v;*hܸqfgvQǜʮAK#FZụ% \KdᩳceQf)elܸ11s':qsOӉ'fvj P->>OӘ䔇BN2;"Jz衷zk~9f͚uuԨQsq,YlOtǶlٲHjiĈ/^xСCw=}G}4Jٟ]E1+eВo5kVQQԩSw#N,p/eВ*E)hѢm۶XYZ2:w ￧K|O>dUVݻwO>KJJ9UVe/_|Yg__5oTɒ:==]۶mׯ_vڊ<0WE۷o֮];o޼g/R/>c2c:ֱڔ2hB5k֬n)0a„},dAK::ZnR[olM7߿pO>u'p°a9眝C%477n<쳓q̘1M6J>m۶jժdf26oxf,LYRK:TvԒ&߭[?]bsΛo^wuW_KƗf®6n8f̘^ӦM'Ocǎ8ՙ :,X ٙ9'bԩ{ػwdҥKK.Iؚή"_->i:6cƌniCx͚5kCZeW}Ғ&bE1v[f)e|M޷&M,^x/N:iҤƍZ2:wdO>/++$f̘#̚5kO4i|WTT|*9])kCK֙!3gSLI 803jԨd,4Y6dUu̻]K,̝v%={Ә:*%DKF %\rtr]+VLR]e.̑Gl$ҥK׮]רx 6t+dcӦMm۶ VDs*RjɆAK6ZeE1+,4!s]UUU%ɆN@nG?C_~}Nk~_-Zt1vB*:=={l۶sϭ[UV;M~lviپ/O<7زeҥK35UWW!ƍr!*9])dA)++kttٯJaYcYu̝K% M]VDs*RjBӫWƍرc{pd[nɆ/׈A۱cGB:tXn]-g=#?aÆv@iB۷oGN'?iJuu]hQ,s=;o~'N9唚:NvԒޯ %둽RK::NvԒh!T*5{]>:tגq ȡiӦ%oߞ رcnZ~nTgyh%d۴i?桉'&8txA_PWtʔ)5N:5sѝ'x*])d=AK#{),4u\u̝J% М9s7]vO=TCgn߾}ryHK%h 2;#Nڗe˖e&>Ùo1zq;:tnh˖-_;ʆڶmpd.KB޽;nܸ?裏袋v]E1w+/yOD-Y_코ZqE1w+,@w2o~>}..o߾JqUUU;Ғ ȡ䴥ZPsԩS۴iۑwqǞkV;W_}5/8qbQSS\\|u*])dϠ!%}RK::NvԒ .mERѣw:$CK"h 2Wt:aÆɓ'8k׮M49{uׯZj;?SNM6mݺWɓ'g9{_o֕W^9hР.]$رc/_UDs'RjW!%ڔRK::NvԒhܹFҥK͛7o~_xᅯgi(Rt ϞȂFD#h4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4h @4 AFD#h4h)_IENDB`sparr/man/CV.Rd0000644000176200001440000001766214012076542012770 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/LIK.density.R, R/LSCV.density.R \name{LIK.density} \alias{LIK.density} \alias{LSCV.density} \title{Cross-validation bandwidths for spatial kernel density estimates} \usage{ LIK.density( pp, hlim = NULL, hseq = NULL, resolution = 64, edge = TRUE, auto.optim = TRUE, type = c("fixed", "adaptive"), seqres = 30, parallelise = NULL, zero.action = 0, verbose = TRUE, ... ) LSCV.density( pp, hlim = NULL, hseq = NULL, resolution = 64, edge = TRUE, auto.optim = TRUE, type = c("fixed", "adaptive"), seqres = 30, parallelise = NULL, zero.action = 0, verbose = TRUE, ... ) } \arguments{ \item{pp}{An object of class \code{\link[spatstat.geom]{ppp}} giving the observed 2D data to be smoothed.} \item{hlim}{An optional vector of length 2 giving the limits of the optimisation routine with respect to the bandwidth. If unspecified, the function attempts to choose this automatically.} \item{hseq}{An optional increasing sequence of bandwidth values at which to manually evaluate the optimisation criterion. Used only in the case \code{(!auto.optim && is.null(hlim))}.} \item{resolution}{Spatial grid size; the optimisation will be based on a [\code{resolution} \eqn{\times}{x} \code{resolution}] density estimate.} \item{edge}{Logical value indicating whether to edge-correct the density estimates used.} \item{auto.optim}{Logical value indicating whether to automate the numerical optimisation using \code{\link{optimise}}. If \code{FALSE}, the optimisation criterion is evaluated over \code{hseq} (if supplied), or over a seqence of values controlled by \code{hlim} and \code{seqres}.} \item{type}{A character string; \code{"fixed"} (default) performs classical leave-one-out cross-validation for the fixed-bandwidth estimator. Alternatively, \code{"adaptive"} utilises multiscale adaptive kernel estimation (Davies & Baddeley, 2018) to run the cross-validation in an effort to find a suitable global bandwidth for the adaptive estimator. Note that data points are not `left out' of the pilot density estimate when using this option (this capability is currently in development). See also the entry for \code{...}.} \item{seqres}{Optional resolution of an increasing sequence of bandwidth values. Only used if \code{(!auto.optim && is.null(hseq))}.} \item{parallelise}{Numeric argument to invoke parallel processing, giving the number of CPU cores to use when \code{!auto.optim} \bold{and} \code{type = "fixed"}. Experimental. Test your system first using \code{parallel::detectCores()} to identify the number of cores available to you.} \item{zero.action}{A numeric integer, either \code{-1}, \code{0} (default), \code{1} or \code{2} controlling how the function should behave in response to numerical errors at very small bandwidths, when such a bandwidth results in one or more zero or negative density values during the leave-one-out computations. See `Details'.} \item{verbose}{Logical value indicating whether to provide function progress commentary.} \item{...}{Additional arguments controlling pilot density estimation and multi-scale bandwidth-axis resolution when \code{type = "adaptive"}. Relevant arguments are \code{hp}, \code{pilot.density}, \code{gamma.scale}, and \code{trim} from \code{\link{bivariate.density}}; and \code{dimz} from \code{\link{multiscale.density}}. If \code{hp} is missing and required, the function makes a (possibly recursive) call to \code{LSCV.density} to set this using fixed-bandwidth LSCV. The remaining defaults are \code{pilot.density = pp}, \code{gamma.scale = "geometric"}, \code{trim = 5}, and \code{dimz = resolution}.} } \value{ A single numeric value of the estimated bandwidth (if \code{auto.optim = TRUE}). Otherwise, a \eqn{[}\code{seqres} \eqn{x} 2\eqn{]} matrix giving the bandwidth sequence and corresponding CV function value. } \description{ Isotropic fixed or global (for adaptive) bandwidth selection for standalone 2D density/intensity based on either unbiased least squares cross-validation (LSCV) or likelihood (LIK) cross-validation. } \details{ This function implements the bivariate, edge-corrected versions of fixed-bandwidth least squares cross-validation and likelihood cross-validation as outlined in Sections 3.4.3 and 3.4.4 of Silverman (1986) in order to select an optimal fixed smoothing bandwidth. With \code{type = "adaptive"} it may also be used to select the global bandwidth for adaptive kernel density estimates, making use of multi-scale estimation (Davies and Baddeley, 2018) via \code{\link{multiscale.density}}. Note that for computational reasons, the leave-one-out procedure is not performed on the pilot density in the adaptive setting; it is only performed on the final stage estimate. Current development efforts include extending this functionality, see \code{\link{SLIK.adapt}}. See also `Warning' below. Where \code{LSCV.density} is based on minimisation of an unbiased estimate of the mean integrated squared error (MISE) of the density, \code{LIK.density} is based on maximisation of the cross-validated leave-one-out average of the log-likelihood of the density estimate with respect to \eqn{h}. In both functions, the argument \code{zero.action} can be used to control the level of severity in response to small bandwidths that result (due to numerical error) in at least one density value being zero or less. When \code{zero.action = -1}, the function strictly forbids bandwidths that would result in one or more \emph{pixel} values of a kernel estimate of the original data (i.e. anything over the whole region) being zero or less---this is the most restrictive truncation. With \code{zero.action = 0} (default), the function automatically forbids bandwidths that yield erroneous values at the leave-one-out data point locations only. With \code{zero.action = 1}, the minimum machine value (see \code{.Machine$double.xmin} at the prompt) is used to replace these individual leave-one-out values. When \code{zero.action = 2}, the minimum value of the valid (greater than zero) leave-one-out values is used to replace any erroneous leave-one-out values. } \section{Warning}{ Leave-one-out CV for bandwidth selection in kernel density estimation is notoriously unstable in practice and has a tendency to produce rather small bandwidths, particularly for spatial data. Satisfactory bandwidths are not guaranteed for every application; \code{zero.action} can curb adverse numeric effects for very small bandwidths during the optimisation procedures. This method can also be computationally expensive for large data sets and fine evaluation grid resolutions. The user may also need to experiment with adjusting \code{hlim} to find a suitable minimum. } \examples{ data(pbc) pbccas <- split(pbc)$case LIK.density(pbccas) LSCV.density(pbccas) \donttest{ #* FIXED # custom limits LIK.density(pbccas,hlim=c(0.01,4)) LSCV.density(pbccas,hlim=c(0.01,4)) # disable edge correction LIK.density(pbccas,hlim=c(0.01,4),edge=FALSE) LSCV.density(pbccas,hlim=c(0.01,4),edge=FALSE) # obtain objective function hcv <- LIK.density(pbccas,hlim=c(0.01,4),auto.optim=FALSE) plot(hcv);abline(v=hcv[which.max(hcv[,2]),1],lty=2,col=2) #* ADAPTIVE LIK.density(pbccas,type="adaptive") LSCV.density(pbccas,type="adaptive") # change pilot bandwidth used LIK.density(pbccas,type="adaptive",hp=2) LSCV.density(pbccas,type="adaptive",hp=2) } } \references{ Davies, T.M. and Baddeley A. (2018), Fast computation of spatially adaptive kernel estimates, \emph{Statistics and Computing}, \bold{28}(4), 937-956. Silverman, B.W. (1986), \emph{Density Estimation for Statistics and Data Analysis}, Chapman & Hall, New York. Wand, M.P. and Jones, C.M., 1995. \emph{Kernel Smoothing}, Chapman & Hall, London. } \seealso{ \code{\link{SLIK.adapt}} and functions for bandwidth selection in package \code{\link{spatstat}}: \code{\link[spatstat.core]{bw.diggle}}; \code{\link[spatstat.core]{bw.ppl}}; \code{\link[spatstat.core]{bw.scott}}; \code{\link[spatstat.core]{bw.frac}}. } \author{ T. M. Davies } sparr/man/risk.Rd0000644000176200001440000002204014012076542013412 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/risk.R \name{risk} \alias{risk} \alias{rrs} \title{Spatial relative risk/density ratio} \usage{ risk( f, g = NULL, log = TRUE, h0 = NULL, hp = h0, adapt = FALSE, tolerate = FALSE, doplot = FALSE, pilot.symmetry = c("none", "f", "g", "pooled"), epsilon = 0, verbose = TRUE, ... ) } \arguments{ \item{f}{Either a pre-calculated object of class \code{\link{bivden}} representing the `case' (numerator) density estimate, or an object of class \code{\link[spatstat.geom]{ppp}} giving the observed case data. Alternatively, if \code{f} is \code{\link[spatstat.geom]{ppp}} object with dichotomous factor-valued \code{\link[spatstat.geom]{marks}}, the function treats the first level as the case data, and the second as the control data, obviating the need to supply \code{g}.} \item{g}{As for \code{f}, for the `control' (denominator) density; this object must be of the same class as \code{f}. Ignored if, as stated above, \code{f} contains both case and control observations.} \item{log}{Logical value indicating whether to return the (natural) log-transformed relative risk function as recommended by Kelsall and Diggle (1995a). Defaults to \code{TRUE}, with the alternative being the raw density ratio.} \item{h0}{A single positive numeric value or a vector of length 2 giving the global bandwidth(s) to be used for case/control density estimates; defaulting to a common oversmoothing bandwidth computed via \code{\link{OS}} on the pooled data using \code{nstar = "geometric"} if unsupplied. Ignored if \code{f} and \code{g} are already \code{\link{bivden}} objects.} \item{hp}{A single numeric value or a vector of length 2 giving the pilot bandwidth(s) to be used for fixed-bandwidth estimation of the pilot densities for adaptive risk surfaces. Ignored if \code{adapt = FALSE} or if \code{f} and \code{g} are already \code{\link{bivden}} objects.} \item{adapt}{A logical value indicating whether to employ adaptive smoothing for internally estimating the densities. Ignored if \code{f} and \code{g} are already \code{\link{bivden}} objects.} \item{tolerate}{A logical value indicating whether to internally calculate a corresponding asymptotic p-value surface (for tolerance contours) for the estimated relative risk function. See `Details'.} \item{doplot}{Logical. If \code{TRUE}, an image plot of the estimated relative risk function is produced using various visual presets. If additionally \code{tolerate} was \code{TRUE}, asymptotic tolerance contours are automatically added to the plot at a significance level of 0.05 for elevated risk (for more flexible options for calculating and plotting tolerance contours, see \code{\link{tolerance}} and \code{\link{tol.contour}}).} \item{pilot.symmetry}{A character string used to control the type of symmetry, if any, to use for the bandwidth factors when computing an adaptive relative risk surface. See `Details'. Ignored if \code{adapt = FALSE}.} \item{epsilon}{A single non-negative numeric value used for optional scaling to produce additive constant to each density in the raw ratio (see `Details'). A zero value requests no additive constant (default).} \item{verbose}{Logical value indicating whether to print function progress during execution.} \item{...}{Additional arguments passed to any internal calls of \code{\link{bivariate.density}} for estimation of the requisite densities. Ignored if \code{f} and \code{g} are already \code{\link{bivden}} objects.} } \value{ An object of class \code{"rrs"}. This is a named list with the following components: \item{rr}{A pixel \code{\link[spatstat.geom]{im}}age of the estimated risk surface.} \item{f}{An object of class \code{\link{bivden}} used as the numerator or `case' density estimate.} \item{g}{An object of class \code{\link{bivden}} used as the denominator or `control' density estimate.} \item{P}{Only included if \code{tolerate = TRUE}. A pixel \code{\link[spatstat.geom]{im}}age of the \emph{p}-value surface for tolerance contours; \code{NULL} otherwise.} } \description{ Estimates a \emph{relative risk} function based on the ratio of two 2D kernel density estimates. } \details{ The relative risk function is defined here as the ratio of the `case' density to the `control' (Bithell, 1990; 1991). Using kernel density estimation to model these densities (Diggle, 1985), we obtain a workable estimate thereof. This function defines the risk function \emph{r} in the following fashion: \cr\cr \emph{r}\code{ = (fd + epsilon*max(gd))/(gd + epsilon*max(gd))}, \cr\cr where \code{fd} and \code{gd} denote the case and control density estimates respectively. Note the (optional) additive constants defined by \code{epsilon} times the maximum of each of the densities in the numerator and denominator respectively (see Bowman and Azzalini, 1997). The log-risk function \emph{rho}, given by \emph{rho} = log[\emph{r}], is argued to be preferable in practice as it imparts a sense of symmetry in the way the case and control densities are treated (Kelsall and Diggle, 1995a;b). The option of log-transforming the returned risk function is therefore selected by default. When computing adaptive relative risk functions, the user has the option of obtaining a so-called \emph{symmetric} estimate (Davies et al. 2016) via \code{pilot.symmetry}. This amounts to choosing the same pilot density for both case and control densities. By choosing \code{"none"} (default), the result uses the case and control data separately for the fixed-bandwidth pilots, providing the original asymmetric density-ratio of Davies and Hazelton (2010). By selecting either of \code{"f"}, \code{"g"}, or \code{"pooled"}, the pilot density is calculated based on the case, control, or pooled case/control data respectively (using \code{hp[1]} as the fixed bandwidth). Davies et al. (2016) noted some beneficial practical behaviour of the symmetric adaptive surface over the asymmetric. If the user selects \code{tolerate = TRUE}, the function internally computes asymptotic tolerance contours as per Hazelton and Davies (2009) and Davies and Hazelton (2010). When \code{adapt = FALSE}, the reference density estimate (argument \code{ref.density} in \code{\link{tolerance}}) is taken to be the estimated control density. The returned pixel \code{\link[spatstat.geom]{im}}age of \emph{p}-values (see `Value') is interpreted as an upper-tailed test i.e. smaller \emph{p}-values represent greater evidence in favour of significantly increased risk. For greater control over calculation of tolerance contours, use \code{\link{tolerance}}. } \examples{ data(pbc) pbccas <- split(pbc)$case pbccon <- split(pbc)$control h0 <- OS(pbc,nstar="geometric") # Fixed pbcrr1 <- risk(pbccas,pbccon,h0=h0,tolerate=TRUE) # Asymmetric adaptive pbcrr2 <- risk(pbccas,pbccon,h0=h0,adapt=TRUE,hp=c(OS(pbccas)/2,OS(pbccon)/2), tolerate=TRUE,davies.baddeley=0.05) # Symmetric (pooled) adaptive pbcrr3 <- risk(pbccas,pbccon,h0=h0,adapt=TRUE,tolerate=TRUE,hp=OS(pbc)/2, pilot.symmetry="pooled",davies.baddeley=0.05) # Symmetric (case) adaptive; from two existing 'bivden' objects f <- bivariate.density(pbccas,h0=h0,hp=2,adapt=TRUE,pilot.density=pbccas, edge="diggle",davies.baddeley=0.05,verbose=FALSE) g <- bivariate.density(pbccon,h0=h0,hp=2,adapt=TRUE,pilot.density=pbccas, edge="diggle",davies.baddeley=0.05,verbose=FALSE) pbcrr4 <- risk(f,g,tolerate=TRUE,verbose=FALSE) par(mfrow=c(2,2)) plot(pbcrr1,override.par=FALSE,main="Fixed") plot(pbcrr2,override.par=FALSE,main="Asymmetric adaptive") plot(pbcrr3,override.par=FALSE,main="Symmetric (pooled) adaptive") plot(pbcrr4,override.par=FALSE,main="Symmetric (case) adaptive") } \references{ Bithell, J.F. (1990), An application of density estimation to geographical epidemiology, \emph{Statistics in Medicine}, \bold{9}, 691-701. Bithell, J.F. (1991), Estimation of relative risk functions, \emph{Statistics in Medicine}, \bold{10}, 1745-1751. Bowman, A.W. and Azzalini A. (1997), \emph{Applied Smoothing Techniques for Data Analysis: The Kernel Approach with S-Plus Illustrations}, Oxford University Press Inc., New York. Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel estimation of spatial relative risk, \emph{Statistics in Medicine}, \bold{29}(23) 2423-2437. Davies, T.M., Jones, K. and Hazelton, M.L. (2016), Symmetric adaptive smoothing regimens for estimation of the spatial relative risk function, \emph{Computational Statistics & Data Analysis}, \bold{101}, 12-28. Diggle, P.J. (1985), A kernel method for smoothing point process data, \emph{Journal of the Royal Statistical Society Series C}, \bold{34}(2), 138-147. Hazelton, M.L. and Davies, T.M. (2009), Inference based on kernel estimates of the relative risk function in geographical epidemiology, \emph{Biometrical Journal}, \bold{51}(1), 98-109. Kelsall, J.E. and Diggle, P.J. (1995a), Kernel estimation of relative risk, \emph{Bernoulli}, \bold{1}, 3-16. Kelsall, J.E. and Diggle, P.J. (1995b), Non-parametric estimation of spatial variation in relative risk, \emph{Statistics in Medicine}, \bold{14}, 2335-2342. } \author{ T.M. Davies } sparr/man/available.h0.Rd0000644000176200001440000000310214012076542014666 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/available.h0.R \name{available.h0} \alias{available.h0} \title{Available global bandwidth range} \usage{ available.h0(...) } \arguments{ \item{...}{Any number of objects of class \code{\link{msden}}; possibly named.} } \value{ A numeric vector of length 2 providing the range of available global bandwidths compatible with all supplied multi-scale density estimates. } \description{ Gets universally available global bandwidths as represented by several multi-scale density estimate objects } \details{ This simple function merely accesses and returns the maximum lower limit and minimum upper limit of all \code{h0range} components of the \code{\link{msden}} objects passed through \code{...}. Natural numeric error arising from any changes to the bandwidth-axis discretisation resolution in the creation of the \code{\link{msden}} objects (i.e. through the `\code{dimz}' argument) means individual global bandwidth ranges can differ slightly between affected multi-scale estimates, even if they are all applied to the same data set. Can additionally be useful when, for example, creating asymmetric relative risk surfaces based on slices of multi-scale densities with respect to the case and control data sets, because the bandwidth factors differ. Throws an error if one or more of the \code{h0range} components is incompatible (i.e. all \code{h0range} components must overlap). } \examples{ # See ?multiscale.slice } \seealso{ \code{\link{multiscale.density}}, \code{\link{multiscale.slice}} } \author{ T.M. Davies } sparr/man/LSCV.risk.Rd0000644000176200001440000002252114012076542014164 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/LSCV.risk.R \name{LSCV.risk} \alias{LSCV.risk} \title{Jointly optimal bandwidth selection for the spatial relative risk function} \usage{ LSCV.risk( f, g = NULL, hlim = NULL, hseq = NULL, type = c("fixed", "adaptive"), method = c("kelsall-diggle", "hazelton", "davies"), resolution = 64, edge = TRUE, hp = NULL, pilot.symmetry = c("none", "f", "g", "pooled"), auto.optim = TRUE, seqres = 30, parallelise = NA, verbose = TRUE, ... ) } \arguments{ \item{f}{Either a pre-calculated object of class \code{\link{bivden}} representing the `case' (numerator) density estimate, or an object of class \code{\link[spatstat.geom]{ppp}} giving the observed case data. Alternatively, if \code{f} is \code{\link[spatstat.geom]{ppp}} object with dichotomous factor-valued \code{\link[spatstat.geom]{marks}}, the function treats the first level as the case data, and the second as the control data, obviating the need to supply \code{g}.} \item{g}{As for \code{f}, for the `control' (denominator) density; this object must be of the same class as \code{f}. Ignored if, as stated above, \code{f} contains both case and control observations.} \item{hlim}{An optional vector of length 2 giving the limits of the optimisation routine with respect to the bandwidth. If unspecified, the function attempts to choose this automatically.} \item{hseq}{An optional increasing sequence of bandwidth values at which to manually evaluate the optimisation criterion. Used only in the case \code{(!auto.optim && is.null(hlim))}.} \item{type}{A character string; \code{"fixed"} (default) performs classical leave-one-out cross-validation for a jointly optimal fixed bandwidth. Alternatively, \code{"adaptive"} utilises multiscale adaptive kernel estimation (Davies & Baddeley, 2018) to run the cross-validation in an effort to find a suitable jointly optimal, common global bandwidth for the adaptive relative risk function. See `Details'.} \item{method}{A character string controlling the selector to use. There are three types, based on either the mean integrated squared error (MISE) (Kelsall and Diggle, 1995; default -- \code{method = "kelsall-diggle"}); a weighted MISE (Hazelton, 2008 -- \code{method = "hazelton"}); or an approximation to the asymptotic MISE (Davies, 2013 -- \code{method = "davies"}). See `Details'.} \item{resolution}{Spatial grid size; the optimisation will be based on a [\code{resolution} \eqn{\times}{x} \code{resolution}] density estimate.} \item{edge}{Logical value indicating whether to edge-correct the density estimates used.} \item{hp}{A single numeric value or a vector of length 2 giving the pilot bandwidth(s) to be used for estimation of the pilot densities for adaptive risk surfaces. Ignored if \code{type = "fixed"}.} \item{pilot.symmetry}{A character string used to control the type of symmetry, if any, to use for the bandwidth factors when computing an adaptive relative risk surface. See `Details'. Ignored if \code{type = "fixed"}.} \item{auto.optim}{Logical value indicating whether to automate the numerical optimisation using \code{\link{optimise}}. If \code{FALSE}, the optimisation criterion is evaluated over \code{hseq} (if supplied), or over a seqence of values controlled by \code{hlim} and \code{seqres}.} \item{seqres}{Optional resolution of an increasing sequence of bandwidth values. Only used if \code{(!auto.optim && is.null(hseq))}.} \item{parallelise}{Numeric argument to invoke parallel processing, giving the number of CPU cores to use when \code{!auto.optim}. Experimental. Test your system first using \code{parallel::detectCores()} to identify the number of cores available to you.} \item{verbose}{Logical value indicating whether to provide function progress commentary.} \item{...}{Additional arguments such as \code{dimz} and \code{trim} to be passed to the internal calls to \code{\link{multiscale.density}}.} } \value{ A single numeric value of the estimated bandwidth (if \code{auto.optim = TRUE}). Otherwise, a list of two numeric vectors of equal length giving the bandwidth sequence (as \code{hs}) and corresponding CV function value (as \code{CV}). } \description{ Methods to find a jointly optimal, common case-control isotropic bandwidth for use in estimation of the fixed or adaptive kernel-smoothed relative risk function. } \details{ Given the established preference of using a common bandwidth for both case and control density estimates when constructing a relative risk surface, This function calculates a `jointly optimal', common isotropic LSCV bandwidth for the (Gaussian) kernel-smoothed relative risk function (case-control density-ratio). It can be shown that choosing a bandwidth that is equal for both case and control density estimates is preferable to computing `separately optimal' bandwidths (Kelsall and Diggle, 1995). The user can choose to either calculate a common smoothing parameter for a fixed-bandwidth relative risk surface (\code{type = "fixed"}; default), or a common global bandwidth for an adaptive risk surface (\code{type = "adaptive"}). See further comments below. \itemize{ \item\code{method = "kelsall-diggle"}: the function computes the common bandwidth which minimises the approximate mean integrated squared error (MISE) of the log-transformed risk surface (Kelsall and Diggle, 1995). \item\code{method = "hazelton"}: the function minimises a \emph{weighted-by-control} MISE of the (raw) relative risk function (Hazelton, 2008). \item\code{method = "davies"}: the optimal bandwidth is one that minimises a crude plug-in approximation to the \emph{asymptotic} MISE (Davies, 2013). Only possible for \code{type = "fixed"}. } For jointly optimal, common global bandwidth selection when \code{type = "adaptive"}, the optimisation routine utilises \code{\link{multiscale.density}}. Like \code{\link{LSCV.density}}, the leave-one-out procedure does not affect the pilot density, for which additional control is offered via the \code{hp} and \code{pilot.symmetry} arguments. The user has the option of obtaining a so-called \emph{symmetric} estimate (Davies et al. 2016) via \code{pilot.symmetry}. This amounts to choosing the same pilot density for both case and control densities. By choosing \code{"none"} (default), the result uses the case and control data separately for the fixed-bandwidth pilots, providing the original asymmetric density-ratio of Davies and Hazelton (2010). By selecting either of \code{"f"}, \code{"g"}, or \code{"pooled"}, the pilot density is calculated based on the case, control, or pooled case/control data respectively (using \code{hp[1]} as the fixed bandwidth). Davies et al. (2016) noted some beneficial practical behaviour of the symmetric adaptive surface over the asymmetric. (The pilot bandwidth(s), if not supplied in \code{hp}, are calculated internally via default use of \code{\link{LSCV.density}}, using the requested symmetric-based data set, or separately with respect to the case and control datasets \code{f} and \code{g} if \code{pilot.symmetry = "none"}.) } \section{Warning}{ The jointly optimal bandwidth selector can be computationally expensive for large data sets and fine evaluation grid resolutions. The user may need to experiment with adjusting \code{hlim} to find a suitable minimum. } \examples{ \donttest{ data(pbc) pbccas <- split(pbc)$case pbccon <- split(pbc)$control # FIXED (for common h) LSCV.risk(pbccas,pbccon) LSCV.risk(pbccas,pbccon,method="hazelton") hcv <- LSCV.risk(pbccas,pbccon,method="davies",auto.optim=FALSE) plot(hcv[,1],log(hcv[,2]));abline(v=hcv[which.min(hcv[,2]),1],col=2,lty=2) # ADAPTIVE (for common h0) LSCV.risk(pbccas,pbccon,type="adaptive") # change pilot bandwidths used LSCV.risk(pbccas,pbccon,type="adaptive",hp=c(OS(pbccas)/2,OS(pbccon)/2)) # specify pooled-data symmetric relative risk estimator LSCV.risk(pbccas,pbccon,type="adaptive",hp=OS(pbc),pilot.symmetry="pooled") # as above, for Hazelton selector LSCV.risk(pbccas,pbccon,type="adaptive",method="hazelton") LSCV.risk(pbccas,pbccon,type="adaptive",method="hazelton",hp=c(OS(pbccas)/2,OS(pbccon)/2)) LSCV.risk(pbccas,pbccon,type="adaptive",method="hazelton",hp=OS(pbc),pilot.symmetry="pooled") } } \references{ Davies, T. M. (2013), Jointly optimal bandwidth selection for the planar kernel-smoothed density-ratio, \emph{Spatial and Spatio-temporal Epidemiology}, \bold{5}, 51-65. Davies, T.M. and Baddeley A. (2018), Fast computation of spatially adaptive kernel estimates, \emph{Statistics and Computing}, \bold{28}(4), 937-956. Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel estimation of spatial relative risk, \emph{Statistics in Medicine}, \bold{29}(23) 2423-2437. Davies, T.M., Jones, K. and Hazelton, M.L. (2016), Symmetric adaptive smoothing regimens for estimation of the spatial relative risk function, \emph{Computational Statistics & Data Analysis}, \bold{101}, 12-28. Hazelton, M. L. (2008), Letter to the editor: Kernel estimation of risk surfaces without the need for edge correction, \emph{Statistics in Medicine}, \bold{27}, 2269-2272. Kelsall, J.E. and Diggle, P.J. (1995), Kernel estimation of relative risk, \emph{Bernoulli}, \bold{1}, 3-16. Silverman, B.W. (1986), \emph{Density Estimation for Statistics and Data Analysis}, Chapman & Hall, New York. Wand, M.P. and Jones, C.M., 1995. \emph{Kernel Smoothing}, Chapman & Hall, London. } \seealso{ \code{\link{bivariate.density}} } \author{ T. M. Davies } sparr/man/SLIK.adapt.Rd0000644000176200001440000001422014012076542014275 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/SLIK.adapt.R \name{SLIK.adapt} \alias{SLIK.adapt} \title{Simultaneous global/pilot likelihood bandwidth selection} \usage{ SLIK.adapt( pp, hold = TRUE, start = rep(OS(pp), 2), hlim = NULL, edge = TRUE, zero.action = c(-1, 0), optim.control = list(), parallelise = NULL, verbose = TRUE, ... ) } \arguments{ \item{pp}{An object of class \code{\link[spatstat.geom]{ppp}} giving the observed 2D data to be smoothed.} \item{hold}{Logical value indicating whether to hold the global and pilot bandwidths equal throughout the optimisation; defaults to \code{TRUE}. See `Details'.} \item{start}{A positively-valued numeric vector of length 2 giving the starting values to be used for the global/pilot optimisation routine when \code{hold = FALSE}. Defaults to the oversmoothing bandwidth (\code{\link{OS}}) for both values; ignored when \code{hold = TRUE}.} \item{hlim}{An optional vector of length 2 giving the limits of the optimisation routine with respect to the bandwidth when \code{hold = TRUE}. If unspecified, the function attempts to choose this automatically. Ignored when \code{hold = FALSE}.} \item{edge}{Logical value indicating whether to edge-correct the density estimates used.} \item{zero.action}{A numeric vector of length 2, each value being either \code{-1}, \code{0} (default), \code{1} or \code{2} controlling how the function should behave in response to numerical errors at very small bandwidths, when such a bandwidth results in one or more zero or negative density values during the leave-one-out computations. See `Details'.} \item{optim.control}{An optional list to be passed to the \code{control} argument of \code{\link[stats]{optim}} for further control over the numeric optimisation when \code{hold = FALSE}. See the documentation for \code{\link[stats]{optim}} for further details.} \item{parallelise}{Numeric argument to invoke parallel processing in the brute force leave-one-out calculations, giving the number of CPU cores to use. Experimental. Test your system first using \code{parallel::detectCores()} to identify the number of cores available to you. If \code{NA} (default), no parallelisation performed and a single loop is used.} \item{verbose}{Logical value indicating whether to provide function progress commentary.} \item{...}{Additional arguments controlling density estimation for the internal calculations. Relevant arguments are \code{resolution}, \code{gamma.scale}, and \code{trim}. If unsupplied these default to \code{64}, \code{"geometric"}, and \code{5} respectively; see \code{\link{bivariate.density}} for a further explanation of these arguments.} } \value{ A numeric vector of length 2 giving the likelihood-maximised global and pilot bandwidths. } \description{ Isotropic global and pilot bandwidth selection for adaptive density/intensity based on likelihood cross-validation. } \details{ This function is a generalisation of \code{\link{LIK.density}}, and is used in attempts to simultaneously choose an optimal global and pilot bandwidth for adaptive kernel density estimates. Where \code{\link{LIK.density}} for adaptive estimates assumes the pilot density is held constant (and is not subject to the leave-one-out operations), this function allows the pilot bandwidth to vary alongside the global. Thus, in contrast to \code{\link{LIK.density}} the internal leave-one-out operations now also affect the pilot estimation stage. Hence, the set of variable bandwidths changes as each point is left out. In turn, this means the leave-one-out operations must be computed by brute force, and this is computationally expensive. Identifiability problems can sometimes arise when the global and pilot bandwidths are allowed to `float freely' in the bivariate optimisation routine, which is the default behaviour of the function (with \code{hold = FALSE}). This can be curbed by setting \code{hold = TRUE}, which forces both the global and pilot to be held at the same value during optimisation. Doing this also has the beneficial side effect of turning the problem into one of univariate optimisation, thereby reducing total computational cost. Current work (Davies & Lawson, 2018) provides some empirical evidence that this strategy performs quite well in practice. Like \code{\link{LSCV.density}} and \code{\link{LIK.density}}, the argument \code{zero.action} can be used to control the level of severity in response to small bandwidths that result (due to numerical error) in at least one density value being zero or less. When this argument is passed a vector of length 2, the first entry corresponds to the global bandwidth (and hence refers to checks of the final adaptive density estimate and its leave-one-out values) and the second to the pilot bandwidth (and hence checks the fixed-bandwidth pilot density and its leave-one-out values). Alternatively a single value may be supplied, which will be taken to be the same for both global and pilot. See the help page for \code{\link{LIK.density}} for an explanation of the four allowable values (\code{-1}, \code{0}, \code{1}, \code{2}) for each component of this argument. } \section{Note}{ While theoretically valid, this is a largely experimental function. There is presently little in the literature to suggest how well this type of simultaneous global/pilot bandwidth selection might perform in practice. Current research efforts (Davies & Lawson, 2018) seek in part to address these questions. } \examples{ \donttest{ data(pbc) pbccas <- split(pbc)$case SLIK.adapt(pbccas) SLIK.adapt(pbccas,hold=TRUE) } } \references{ Davies, T.M. and Lawson, A.B. (2018), An evaluation of likelihood-based bandwidth selectors for spatial and spatiotemporal kernel estimates, \emph{Submitted for publication}. Silverman, B.W. (1986), \emph{Density Estimation for Statistics and Data Analysis}, Chapman & Hall, New York. Wand, M.P. and Jones, C.M., 1995. \emph{Kernel Smoothing}, Chapman & Hall, London. } \seealso{ Functions for bandwidth selection in package \code{\link{spatstat}}: \code{\link[spatstat.core]{bw.diggle}}; \code{\link[spatstat.core]{bw.ppl}}; \code{\link[spatstat.core]{bw.scott}}; \code{\link[spatstat.core]{bw.frac}}. } \author{ T. M. Davies } sparr/man/sparr-internal.Rd0000755000176200001440000000347714012076542015423 0ustar liggesusers\name{sparr-internal} \Rdversion{1.1} \alias{sparr-internal} \alias{adens} \alias{BAM.single} \alias{BAM.prep} \alias{bivden.LOO} \alias{checkdb} \alias{checkedge} \alias{checkit} \alias{checkran} \alias{edgeh} \alias{gethcats} \alias{identical_windows} \alias{K0} \alias{LSCV.density.spatial.single} \alias{LSCV.risk.single} \alias{posifybivden} \alias{processgamma} \alias{processnstar} \alias{processscaler} \alias{point_image_by_bw} \alias{taperoff} \alias{tol.asy.ada} \alias{tol.asy.fix} \alias{tol.mc.ada} \alias{tol.mc.fix} \title{ Internal \code{sparr} functions } \description{ Internal \code{\link{sparr}} functions, only meant to be accessed by the user directly in a life-threatening situation. } \section{Usage}{ \code{adens(x,bwim,bwpts,resolution,edge,diggle,weights,intensity,hstep,qstep,qres,verbose)}\cr \code{BAM.single(h,edge,BP)}\cr \code{BAM.prep(cases,controls,lambda,erode,res)}\cr \code{bivden.LOO(pp,h0,hp,gamma.scale,trim,resolution,parallel)}\cr \code{checkdb(db)}\cr \code{checkedge(edge)}\cr \code{checkit(h,str)}\cr \code{checkran(ran,nm)}\cr \code{edgeh(bwim,pres,tres,step,W,verbose=FALSE)}\cr \code{gethcats(h,breaks=NULL,step=0.05)}\cr \code{identical_windows(w1,w2)}\cr \code{KO(xy)}\cr \code{LSCV.density.spatial.single(h,pp,res,edge)}\cr \code{LSCV.risk.single(h,cases,controls,res,edge,hazey)}\cr \code{posifybivden(x,eps=.Machine$double.xmin)}\cr \code{processgamma(gamma.scale,pds)}\cr \code{processnstar(n,pp)}\cr \code{processscaler(s,pp)}\cr \code{point_image_by_bw(h,hc,points,weights,WM)}\cr \code{taperoff(x,zeropoint=0,onepoint=1,type=c("smooth", "cosine"))}\cr \code{tol.asy.ada(f,g,beta,verbose=FALSE)}\cr \code{tol.asy.fix(f,g,pooled,verbose=FALSE)}\cr \code{tol.mc.ada(rs,ITER,parallelise,verbose,...)}\cr \code{tol.mc.fix(rs,ITER,parallelise,verbose,...)}\cr } \keyword{internal} \author{T.M. Davies}sparr/man/spattemp.density.Rd0000644000176200001440000001716314012076542015767 0ustar liggesusers\name{spattemp.density} \alias{spattemp.density} \alias{stden} %- Also NEED an '\alias' for EACH other topic documented here. \title{ Spatiotemporal kernel density estimation } \description{ Provides a fixed-bandwidth kernel estimate of continuous spatiotemporal data. } \usage{ spattemp.density(pp, h = NULL, tt = NULL, lambda = NULL, tlim = NULL, sedge = c("uniform", "none"), tedge = sedge, sres = 128, tres = NULL, verbose = TRUE) } %- maybe also 'usage' for other objects documented here. \arguments{ \item{pp}{ An object of class \code{\link[spatstat.geom]{ppp}} giving the spatial coordinates of the observations to be smoothed. Possibly marked with the time of each event; see argument \code{tt}. } \item{h}{ Fixed bandwidth to smooth the spatial margin. A numeric value > 0. If unsupplied, the oversmoothing bandwidth is used as per \code{\link{OS}}. } \item{tt}{ A numeric vector of equal length to the number of points in \code{pp}, giving the time corresponding to each spatial observation. If unsupplied, the function attempts to use the values in the \code{\link[spatstat.geom]{marks}} attribute of the \code{\link[spatstat.geom:ppp]{ppp.object}} in \code{pp}. } \item{lambda}{ Fixed bandwidth to smooth the temporal margin; a numeric value > 0. If unsupplied, the function internally computes the Sheather-Jones bandwith using \code{\link[stats]{bw.SJ}} (Sheather & Jones, 1991). } \item{tlim}{ A numeric vector of length 2 giving the limits of the temporal domain over which to smooth. If supplied, all times in \code{tt} must fall within this interval (equality with limits allowed). If unsupplied, the function simply uses the range of the observed temporal values. } \item{sedge}{ Character string dictating spatial edge correction. \code{"uniform"} (default) corrects based on evaluation grid coordinate. Setting \code{sedge="none"} requests no edge correction. } \item{tedge}{ As \code{sedge}, for temporal edge correction. } \item{sres}{ Numeric value > 0. Resolution of the [\code{sres} \eqn{\times}{x} \code{sres}] evaluation grid in the spatial margin. } \item{tres}{ Numeric value > 0. Resolution of the evaluation points in the temporal margin as defined by the \code{tlim} interval. If unsupplied, the density is evaluated at integer values between \code{tlim[1]} and \code{tlim[2]}. } \item{verbose}{ Logical value indicating whether to print a function progress bar to the console during evaluation. } } \details{ This function produces a fixed-bandwidth kernel estimate of a single spatiotemporal density, with isotropic smoothing in the spatial margin, as per Fernando & Hazelton (2014). Estimates may be edge-corrected for an irregular spatial study window \emph{and} for the bounds on the temporal margin as per \code{tlim}; this edge-correction is performed in precisely the same way as the \code{"uniform"} option in \code{\link{bivariate.density}}. Specifically, for \eqn{n} trivariate points in space-time (\code{pp}, \code{tt}, \code{tlim}), we have \deqn{\hat{f}(x,t)=n^{-1}\sum_{i=1}^{n}h^{-2}\lambda^{-1}K((x-x_i)/h)L((t-t_i)/\lambda)/(q(x)q(t)),} where \eqn{x\in W\subset R^2} and \eqn{t\in T\subset R}; \eqn{K} and \eqn{L} are the 2D and 1D Gaussian kernels controlled by fixed bandwidths \eqn{h} (\code{h}) and \eqn{\lambda} (\code{lambda}) respectively; and \eqn{q(x)=\int_W h^{-2}K((u-x)/h)du} and \eqn{q(t)=\int_T \lambda^{-1}L((w-t)/\lambda)dw} are optional edge-correction factors (\code{sedge} and \code{tedge}). The above equation provides the \emph{joint} or \emph{unconditional} density at a given space-time location \eqn{(x,t)}. In addition to this, the function also yields the \emph{conditional} density at each grid time, defined as \deqn{\hat{f}(x|t)=\hat{f}(x,t)/\hat{f}(t),} where \eqn{\hat{f}(t)=n^{-1}\sum_{i=1}^{n}\lambda^{-1}L((t-t_i)/\lambda)/q(t)} is the univariate kernel estimate of the temporal margin. Normalisation of the two versions \eqn{\hat{f}(x,t)} and \eqn{\hat{f}(x|t)} is the only way they differ. Where in the unconditional setting we have \eqn{\int_W\int_T\hat{f}(x,t)dt dx=1}, in the conditional setting we have \eqn{\int_W\hat{f}(x|t) dx=1} for all \eqn{t}. See Fernando & Hazelton (2014) for further details and practical reasons as to why we might prefer one over the other in certain situations. The objects returned by this function (see `Value' below) are necessary for kernel estimation of spatiotemporal relative risk surfaces, which is performed by \code{\link{spattemp.risk}}. } \value{ An object of class \code{"stden"}. This is effectively a list with the following components: \item{z}{ A named (by time-point) list of pixel \code{\link[spatstat.geom]{im}}ages corresponding to the joint spatiotemporal density over space at each discretised time. } \item{z.cond}{ A named (by time-point) list of pixel \code{\link[spatstat.geom]{im}}ages corresponding to the conditional spatial density given each discretised time. } \item{h}{ The scalar bandwidth used for spatial smoothing. } \item{lambda}{ The scalar bandwidth used for temporal smoothing. } \item{tlim}{ A numeric vector of length two giving the temporal bound of the density estimate. } \item{spatial.z}{ A pixel \code{\link[spatstat.geom]{im}}age giving the overall spatial margin as a single 2D density estimate (i.e. ignoring time). } \item{temporal.z}{ An object of class \code{\link[stats]{density}} giving the overall temporal margin as a single 1D density estimate (i.e. ignoring space). } \item{qs}{ A pixel \code{\link[spatstat.geom]{im}}age giving the edge-correction surface for the spatial margin. \code{NULL} if \code{sedge = "none"}. } \item{qt}{ A numeric vector giving the edge-correction weights for the temporal margin. \code{NULL} if \code{tedge = "none"}. } \item{pp}{ A \code{\link[spatstat.geom:ppp]{ppp.object}} of the spatial data passed to the argument of the same name in the initial function call, with \code{\link[spatstat.geom]{marks}} of the observation times. } \item{tgrid}{ A numeric vector giving the discretised time grid at which the spatiotemporal density was evaluated (matches the names of \code{z} and \code{z.cond}). } } \references{ Duong, T. (2007), ks: Kernel Density Estimation and Kernel Discriminant Analysis for Multivariate Data in R, \emph{Journal of Statistical Software}, \bold{21}(7), 1-16.\cr\cr Fernando, W.T.P.S. and Hazelton, M.L. (2014), Generalizing the spatial relative risk function, \emph{Spatial and Spatio-temporal Epidemiology}, \bold{8}, 1-10.\cr\cr Kelsall, J.E. and Diggle, P.J. (1995), Kernel estimation of relative risk, \emph{Bernoulli}, \bold{1}, 3-16.\cr\cr Sheather, S. J. and Jones, M. C. (1991), A reliable data-based bandwidth selection method for kernel density estimation. Journal of the Royal Statistical Society Series B, \bold{53}, 683-690.\cr\cr Silverman, B.W. (1986), \emph{Density Estimation for Statistics and Data Analysis}, Chapman & Hall, New York. } \author{ T.M. Davies } \seealso{ \code{\link{bivariate.density}}, \code{\link{spattemp.risk}}, \code{\link{spattemp.slice}} } \examples{ data(burk) burkcas <- burk$cases burkden1 <- spattemp.density(burkcas,tres=128) summary(burkden1) \donttest{ hlam <- LIK.spattemp(burkcas,tlim=c(400,5900),verbose=FALSE) burkden2 <- spattemp.density(burkcas,h=hlam[1],lambda=hlam[2],tlim=c(400,5900),tres=256) tims <- c(1000,2000,3500) par(mfcol=c(2,3)) for(i in tims){ plot(burkden2,i,override.par=FALSE,fix.range=TRUE,main=paste("joint",i)) plot(burkden2,i,"conditional",override.par=FALSE,main=paste("cond.",i)) } } } %Evaluation of the spatiotemporal density itself is direct, limiting effective support of the Gaussian kernel to plus or minus 4 bandwidths, based on the implementation of 3D density estimation in the \code{\link[ks]{ks}} package (Duong, 2007). sparr/man/tol.contour.Rd0000644000176200001440000000342514012076542014736 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tol.contour.R \name{tol.contour} \alias{tol.contour} \title{Plot tolerance contours} \usage{ tol.contour(pim, test = c("upper", "lower", "two-sided"), ...) } \arguments{ \item{pim}{A pixel \code{\link[spatstat.geom]{im}}age of \emph{p}-values, typically obtained from a call to \code{\link{tolerance}}, computed with respect to a test for elevated risk.} \item{test}{An optional character string giving the type of manipulation to be applied to the \emph{p}-values, corresponding to a test for significantly elevated risk (\code{"upper"}; default); for reduced risk (\code{"lower"}); or for both (\code{"two-sided"}).} \item{...}{Additional arguments to be passed to \code{\link{contour}}. Commonly used options include \code{add} (to superimpose the contours upon an existing plot); \code{levels} (to control the specific significance levels at which to delineate the \emph{p}-values); and \code{lty} or \code{lwd} for aesthetics.} } \value{ Opens a new graphics device and displays a \code{\link{contour}} plot if \code{add = FALSE}, otherwise adds the contours to the plot in the existing active graphics device. } \description{ Draw contours based on a \emph{p}-value matrix. } \details{ Note that no checks on the numeric content of \code{pim} are made. The function assumes the pixel \code{\link[spatstat.geom]{im}}age of \emph{p}-values in \code{pim} is supplied with respect to an upper-tailed test for elevated risk (this is exactly the way the \emph{p}-value surface is returned when \code{\link{tolerance}} is used). This is important if one makes subsequent use of \code{test}, which manipulates the \emph{p}-values to draw at desired significance \code{levels}. } \examples{ # See ?tolerance } \author{ T. M. Davies } sparr/man/multiscale.slice.Rd0000644000176200001440000000543114012076542015707 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/multiscale.slice.R \name{multiscale.slice} \alias{multiscale.slice} \title{Slicing a multi-scale density/intensity object} \usage{ multiscale.slice(msob, h0, checkargs = TRUE) } \arguments{ \item{msob}{An object of class \code{\link{msden}} giving the multi-scale estimate from which to take slices.} \item{h0}{Desired global bandwidth(s); the density/intensity estimate corresponding to which will be returned. A numeric vector. All values \bold{must} be in the available range provided by \code{msob$h0range}; see `Details'.} \item{checkargs}{Logical value indicating whether to check validity of \code{msob} and \code{h0}. Disable only if you know this check will be unnecessary.} } \value{ If \code{h0} is scalar, an object of class \code{\link{bivden}} with components corresponding to the requested slice at \code{h0}. If \code{h0} is a vector, a list of objects of class \code{\link{bivden}}. } \description{ Takes slices of a multi-scale density/intensity estimate at desired global bandwidths } \details{ Davies & Baddeley (2018) demonstrate that once a multi-scale density/intensity estimate has been computed, we may take slices parallel to the spatial domain of the trivariate convolution to return the estimate at any desired global bandwidth. This function is the implementation thereof based on a multi-scale estimate resulting from a call to \code{\link{multiscale.density}}. The function returns an error if the requested slices at \code{h0} are not all within the available range of pre-computed global bandwidth scalings as defined by the \code{h0range} component of \code{msob}. Because the contents of the \code{msob} argument, an object of class \code{\link{msden}}, are returned based on a discretised set of global bandwidth scalings, the function internally computes the desired surface as a pixel-by-pixel linear interpolation using the two discretised global bandwidth rescalings that bound each requested \code{h0}. (Thus, numeric accuracy of the slices is improved with an increase to the \code{dimz} argument of the preceding call to \code{multiscale.density} at the cost of additional computing time.) } \examples{ \donttest{ data(chorley) # Chorley-Ribble data (package 'spatstat') ch.multi <- multiscale.density(chorley,h0=1,h0fac=c(0.5,2)) available.h0(ch.multi) ch.slices <- multiscale.slice(ch.multi,h0=c(0.7,1.1,1.6)) par(mfcol=c(2,3)) # plot each density and edge-correction surface for(i in 1:3) { plot(ch.slices[[i]]$z); plot(ch.slices[[i]]$q) } } } \references{ Davies, T.M. and Baddeley A. (2018), Fast computation of spatially adaptive kernel estimates, \emph{Statistics and Computing}, \bold{28}(4), 937-956. } \seealso{ \code{\link{multiscale.density}}, \code{\link{bivariate.density}} } \author{ T.M. Davies } sparr/man/tolerance.Rd0000644000176200001440000001532014012423261014413 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tolerance.R \name{tolerance} \alias{tolerance} \title{Tolerance by \emph{p}-value surfaces} \usage{ tolerance( rs, method = c("ASY", "MC"), ref.density = NULL, beta = 0.025, ITER = 100, parallelise = NULL, verbose = TRUE, ... ) } \arguments{ \item{rs}{An object of class \code{\link{rrs}} giving the estimated relative risk function for which to calculate the \emph{p}-value surface.} \item{method}{A character string specifying the method of calculation. \code{"ASY"} (default) instructs the function to compute the \emph{p}-values using asymptotic theory. \code{"MC"} computes the values by random permutations of the data. See `Details'.} \item{ref.density}{Required if \code{rs} is based on fixed-bandwidth estimates of the case and control densities and \code{method = "ASY"}. Either a pixel \code{\link[spatstat.geom]{im}}age or an object of class \code{\link{bivden}} giving the reference density to use in asymptotic formulae. May be unnormalised. Ignored if \code{rs} is based on adaptive kernel estimates or if \code{method = "MC"}.} \item{beta}{A numeric value \eqn{0 <} \code{beta} \eqn{< 1} giving the fineness of the adaptive bandwidth partitioning to use for calculation of the required quantities for asymptotic adaptive \emph{p}-value surfaces. Smaller values provide more accurate bandwidth bins at the cost of additional computing time, see Davies and Baddeley (2018); the default is sensible in most cases. Ignored if \code{rs} is based on fixed-bandwidth kernel estimates.} \item{ITER}{Number of iterations for the Monte-Carlo permutations. Ignored if \code{method = "ASY"}.} \item{parallelise}{Numeric argument to invoke parallel processing, giving the number of CPU cores to use when \code{method = "MC"}. Experimental. Test your system first using \code{parallel::detectCores()} to identify the number of cores available to you.} \item{verbose}{Logical value indicating whether to print function progress during execution.} \item{...}{Additional arguments to be passed to \code{\link{risk}} when \code{method = "MC"}. While most information needed for the MC repetitions is implicitly gleaned from the object passed to \code{rs}, this ellipsis is typically used to set the appropriate \code{epsilon} and \code{pilot.symmetry} values for the internal calls to \code{\link{risk}}.} } \value{ A pixel \code{\link[spatstat.geom]{im}}age of the estimated \emph{p}-value surface. } \description{ Calculates a \emph{p}-value surface based on asymptotic theory or Monte-Carlo (MC) permutations describing the extremity of risk given a fixed or adaptive kernel-smoothed density-ratio, allowing the drawing of \emph{tolerance contours}. } \details{ This function implements developments in Hazelton and Davies (2009) (fixed) and Davies and Hazelton (2010) (adaptive) to compute pointwise \emph{p}-value surfaces based on asymptotic theory of kernel-smoothed relative risk surfaces. Alternatively, the user may elect to calculate the \emph{p}-value surfaces using Monte-Carlo methods (see Kelsall and Diggle, 1995). Superimposition upon a plot of the risk surface contours of these \emph{p}-values at given significance levels (i.e. ``tolerance contours'') can be an informative way of exploring the statistical significance of the extremity of risk across the defined study region. Implementation of the Monte-Carlo method simply involves random allocation of case/control marks and re-estimation of the risk surface \code{ITER} times, against which the original estimate is compared. While not dependent on asymptotic theory, it is computationally expensive, and it has been suggested that it might have some undesirable practical consequences in certain settings (Hazelton and Davies, 2009). When performing the MC simulations, the same global (and pilot, if necessary) bandwidths and edge-correction regimens are employed as were used in the initial density estimates of the observed data. With regard to arguments to be passed to internal calls of \code{\link{risk}}, the user should take care to use \code{...} to set the \code{epsilon} value to match that which was used in creation of the object passed to \code{rs} (if this was set to a non-default value). Furthermore, if performing MC simulations for the adaptive relative risk function, the function borrows the value of the \code{beta} argument to speed things up via partitioning, and the user should additionally access \code{...} to set the same \code{pilot.symmetry} value as was used for creation of the object passed to \code{rs}, in the same way as for any non-default use of \code{epsilon}. This will ensure the simulations are all performed under the same conditions as were used to estimate the original risk function. } \note{ The returned \emph{p}-values are geared so that ``smallness'' corresponds to statistical significance of elevated risk, that is, an upper-tailed test. The complement of the \emph{p}-values will yeild significance of reduced risk; a lower-tailed test. When using \code{\link{tol.contour}}, the user can control what type of contours to display. } \examples{ \donttest{ data(pbc) h0 <- LSCV.risk(pbc,method="hazelton");h0 pbccas <- split(pbc)[[1]] pbccon <- split(pbc)[[2]] # ASY riskfix <- risk(pbc,h0=h0) fixtol1 <- tolerance(riskfix,ref.density=density(pbc,OS(pbc))) riskada <- risk(pbc,h0=h0,adapt=TRUE,hp=NS(pbc),pilot.symmetry="pooled",davies.baddeley=0.025) adatol1 <- tolerance(riskada) par(mfrow=c(1,2)) plot(riskfix) tol.contour(fixtol1,levels=c(0.1,0.05,0.01),lty=3:1,add=TRUE) plot(riskada) tol.contour(adatol1,levels=c(0.1,0.05,0.01),lty=3:1,add=TRUE) # MC fixtol2 <- tolerance(riskfix,method="MC",ITER=200) adatol2 <- tolerance(riskada,method="MC",ITER=200,parallelise=2) # ~90secs with parallelisation par(mfrow=c(1,2)) plot(riskfix) tol.contour(fixtol2,levels=c(0.1,0.05,0.01),lty=3:1,add=TRUE) plot(riskada) tol.contour(adatol2,levels=c(0.1,0.05,0.01),lty=3:1,add=TRUE) } } \references{ Davies, T.M. and Baddeley A. (2018), Fast computation of spatially adaptive kernel estimates, \emph{Statistics and Computing}, \bold{28}(4), 937-956. Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel estimation of spatial relative risk, \emph{Statistics in Medicine}, \bold{29}(23) 2423-2437. Davies, T.M., Jones, K. and Hazelton, M.L. (2016), Symmetric adaptive smoothing regimens for estimation of the spatial relative risk function, \emph{Computational Statistics & Data Analysis}, \bold{101}, 12-28. Hazelton, M.L. and Davies, T.M. (2009), Inference based on kernel estimates of the relative risk function in geographical epidemiology, \emph{Biometrical Journal}, \bold{51}(1), 98-109. Kelsall, J.E. and Diggle, P.J. (1995), Kernel estimation of relative risk, \emph{Bernoulli}, \bold{1}, 3-16. } \author{ T. M. Davies } sparr/man/BOOT.density.Rd0000644000176200001440000002472714012077515014702 0ustar liggesusers\name{BOOT.density} \alias{BOOT.density} %- Also NEED an '\alias' for EACH other topic documented here. \title{ Bootstrap bandwidth for a spatial kernel density estimate } \description{ Isotropic fixed or global (for adaptive) bandwidth selection for a standalone 2D density based on bootstrap estimation of the MISE. } \usage{ BOOT.density(pp, hlim = NULL, eta = NULL, type = c("fixed", "adaptive"), hp = NULL, edge = c("uniform", "none"), ref.density = NULL, resolution = 64, rmdiag = TRUE, sim.adapt = list(N = 50, B = 100, dimz = 64, objective = FALSE), parallelise = NA, verbose = TRUE, ...) } \arguments{ \item{pp}{ An object of class \code{\link[spatstat.geom]{ppp}} giving the observed 2D data to be smoothed. } \item{hlim}{ An optional vector of length 2 giving the limits of the optimisation routine with respect to the bandwidth. If \code{NULL}, the function attempts to choose this automatically. } \item{eta}{ Fixed scalar bandwidth to use for the reference density estimate; if \code{NULL} it is calculated as the oversmoothing bandwidth of \code{pp} using \code{\link{OS}}. Ignored if \code{ref.density} is supplied. See `Details'. } \item{type}{ A character string indicating selection type. Either \code{"fixed"} (default) for selection of a constant bandwidth for the fixed-bandwidth estimator based on theory extended from results in Taylor (1989); or \code{"adaptive"} for selection of the global bandwidth for an adaptive kernel density. See `Details'. } \item{hp}{ Pilot bandwidth used for adaptive estimates in the bootstrap; see the argument of the same tag in \code{\link{bivariate.density}}. Ignored when \code{type = "fixed"} or when \code{ref.density} is supplied. } \item{edge}{ Character string dictating edge correction for the bootstrapped estimates. \code{"uniform"} (default) corrects based on evaluation grid coordinate. Setting \code{edge="none"} requests no edge correction. } \item{ref.density}{ Optional. An object of class \code{\link{bivden}} giving the reference density from which data will be generated. Based on theory, this must be a fixed-bandwidth estimate if \code{type = "fixed"}; see `Details'. Must be edge-corrected if \code{edge = "uniform"}. } \item{resolution}{ Spatial grid size; the optimisation will be based on a [\code{resolution} \eqn{\times}{x} \code{resolution}] density estimate. } \item{rmdiag}{ Logical control value for removal of mirrored evaluation points as suggested by Taylor (1989) in the theoretical expression of the fixed-bandwidth MISE estimate. See `Details'. Ignored when \code{type = "adaptive"} } \item{sim.adapt}{ List of control values for bootstrap simulation in the adaptive case; see `Details'. Ignored when \code{type = "fixed"}. } \item{parallelise}{ Optional numeric argument to reduce computation time by invoking parallel processing, by giving the number of CPU cores to use in either evaluation (fixed) or in the actual bootstrap replicate generation (adaptive). Experimental. Test your system first using \code{parallel::detectCores()} to identify the number of cores available to you. } \item{verbose}{ Logical value indicating whether to print function progress during execution. } \item{\dots}{ Optional arguments controlling scaling to be passed to \code{\link{multiscale.density}} for the adaptive bootstrap; ignored when \code{type = "fixed"}. } } \details{ For a 2D kernel density estimate \eqn{\hat{f}} defined on \eqn{W \in R^2}, the mean integrated squared error (MISE) is given by \eqn{E[\int_W (\hat{f}(x) - f(x))^2 dx]}, where \eqn{f} is the corresponding true density. Given an observed data set \eqn{X} (argument \code{pp}) of \eqn{n} observations, this function finds the bandwidth \eqn{h} that minimises \deqn{E^*[\int_W (\hat{f}^*(x) - \hat{f}(x))^2 dx],} where \eqn{\hat{f}(x)} is a density estimate of \eqn{X} constructed with `reference' bandwidth \eqn{\eta} (argument \code{eta} or \code{ref.density}), and \eqn{\hat{f}^*(x)} is a density estimate using bandwidth \eqn{h} of \eqn{n} observations \eqn{X^*} generated from \eqn{\hat{f}(x)}. The notation \eqn{E^*} denotes expectation with respect to the distribution of the \eqn{X^*}. \describe{ \item{\bold{Fixed}}{ When \code{type = "fixed"}, the function assumes you want to select a constant bandwidth for use with the fixed-bandwith density estimator. This implementation is based on extending the remarkable results of Taylor (1989) (see also Sain et al., 1994), who demonstrates that when the Gaussian kernel is being used, we can find the optimal \eqn{h} with respect to the aforementioned bootstrap-estimated MISE without any actual resampling. This implementation extends these results to the bivariate setting, and allows for edge-correction of both the reference and bootstrap densities. \itemize{ \item Taylor (1989) does not distinguish between the reference bandwidth \eqn{\eta} and the target of optimisation, \eqn{h}, thus allowing the reference bandwidth to vary alongside the target in the optimisation. This is not optimal, and this function always assumes a static reference bandwidth. Hall et al. (1992) indicate that a generous amount of smoothing is to be preferred in the reference density (hence the default \code{eta} set using \code{\link{OS}}). \item If \code{ref.density} is supplied, it \bold{must} be a fixed-bandwidth density estimate as an object of class \code{\link{bivden}} for validity of the theory. Edge-correction must be present if \code{edge = "uniform"}; and it must be evaluated on the same spatial domain as dictated by \code{Window(pp)} and \code{resolution}. If unsupplied, the function internally computes an appropriate fixed-bandwidth density estimate using \code{eta} as the reference bandwidth. \item Finally, Taylor (1989) argues it is preferable to avoid summation at identical evaluation grid points in the expression for the optimal bandwidth, which is performed when \code{rmdiag = TRUE}. Setting \code{rmdiag = FALSE} disables this correction. } } \item{\bold{Adaptive}}{ When \code{type = "adaptive"}, the function assumes you want to select a global bandwidth (argument \code{h0} in \code{\link{bivariate.density}}) for use in 2D adaptive kernel density estimation. \itemize{ \item An expression similar to Taylor (1989) is not possible for the adaptive estimator. Thus, in the adaptive setting, the optimal bootstrap bandwidth is calculated by brute force as was performed in Davies and Baddeley (2018) by taking advantage of the multiscale estimation theory implemented in \code{\link{multiscale.density}}. The value that minimises an interpolating cubic spline of the estimated MISE on bandwidth is identified as the optimal global bandwidth. \item The user can pass either a fixed or adaptive \code{bivden} object to \code{ref.density}. If this is the case, \code{hp} is ignored and the pilot bandwidth for each iteration of the bootstrap in estimation of the \eqn{\hat{f}^*(x)} uses \code{ref.density$hp} (if \code{ref.density} is adaptive) or \code{ref.density$h0} (if \code{ref.density} is fixed). When \code{ref.density} is unsupplied, the function uses a fixed-bandwidth kernel estimate with bandwidth \code{eta} as the reference density, and if additionally \code{hp} is unsupplied, the same value \code{eta} is used for the constant pilot bandwidth. \item Control over the bootstrap is achieved with four optional named arguments passed as a list to \code{sim.adapt}. \code{N} controls the number of bootstrap iterates per bandwidth; \code{B} controls the resolution of the sequence of bandwidths trialled (i.e. between \code{hlim[1]} and \code{hlim[2]}); \code{dimz} specifies the resolution of the bandwidth axis in the trivariate convolution evaluated by \code{\link{multiscale.density}}; and \code{objective} specifies whether to return the set of estimated MISEs for all bandwidths (nice to plot), or merely the optimal bandwidth (see `Value'). \item The \code{\dots} are intended for any relevant optional arguments to be passed to the internal call to \code{\link{multiscale.density}}, such as \code{gamma.scale} or \code{trim}. } } } } \value{ The optimal fixed or global (for adaptive) scalar bandwidth. If \code{simargs$objective = TRUE} for the adaptive bootstrap, the return object is instead a \eqn{[}\code{simargs$B} \eqn{x 2]} matrix, with the first column giving the trialled bandwidth and the second giving the corresponding value of the estimated bootstrap MISE. } \references{ Davies, T.M. and Baddeley A. (2018), Fast computation of spatially adaptive kernel estimates, \emph{Statistics and Computing}, \bold{28}(4), 937-956.\cr\cr Hall, P., Marron, J.S. and Park, B.U. (1992) Smoothed cross-validation, \emph{Probability Theory and Related Fields}, \bold{92}, 1-20.\cr\cr Sain, S.R., Baggerly, K.A. and Scott, D.W. (1994) Cross-validation of multivariate densities, \emph{Journal of the American Statistical Association}, \bold{89}, 807-817.\cr\cr Taylor, C.C. (1989) Bootstrap choice of the smoothing parameter in kernel density estimation, \emph{Biometrika}, \bold{76}, 705-712. } \author{ T.M. Davies } \section{Warning}{ Even with the implemented computational tricks, bootstrapping for bandwidth selection for spatial data is still computationally demanding, especially for adaptive kernel estimates. The user can reduce this time by keeping the evaluation grid at modest \code{resolution}s, and experimenting with parallelising the internal loops via \code{parallelise}. The `Examples' section offers some rough indications of evaluation times on this author's local machine. } \seealso{ \code{\link{bivariate.density}}, \code{\link{OS}}, \code{\link{multiscale.density}} } \examples{ \donttest{ data(pbc) ## Fixed bandwidth selection ## BOOT.density(pbc) # ~20 secs BOOT.density(pbc,eta=OS(pbc)/2) # halve default reference bandwidth BOOT.density(pbc,eta=OS(pbc)*2) # double default reference bandwidth # supplying pre-defined reference density as fixed-bandwidth 'bivden' object pbcfix <- bivariate.density(pbc,h0=2.5,resolution=64) system.time(hfix <- BOOT.density(pbc,ref.density=pbcfix)) hfix ## Global (for adaptive) bandwidth selection ## # ~200 secs next line; use 'parallelise' for speedup system.time(hada <- BOOT.density(pbc,type="adaptive")) # minimal usage for adaptive bootstrap hada # ~80 secs next line. Set custom h limits; increase reference bandwidth; # set custom pilot bandwidth; return objective function; use 'parallelise' for speedup system.time(hada <- BOOT.density(pbc,hlim=c(0.9,8),eta=3.5,type="adaptive", hp=OS(pbc)/2,sim.adapt=list(objective=TRUE))) hada[which.min(hada[,2]),1] plot(hada);abline(v=hada[which.min(hada[,2]),1],col=2) } } sparr/man/NS.Rd0000644000176200001440000001313314012076542012765 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/NS.R, R/NS.spattemp.R \name{NS} \alias{NS} \alias{NS.spattemp} \title{Normal scale (NS) bandwidth selector} \usage{ NS( pp, nstar = c("npoints", "geometric"), scaler = c("silverman", "IQR", "sd", "var") ) NS.spattemp( pp, tt = NULL, nstar = "npoints", scaler = c("silverman", "IQR", "sd", "var") ) } \arguments{ \item{pp}{An object of class \code{\link[spatstat.geom]{ppp}} giving the observed 2D data to be smoothed.} \item{nstar}{Optional. Controls the value to use in place of the number of observations \emph{n} in the normal scale formula. Either a character string, \code{"npoints"} (default) or \code{"geometric"} (only possible for \code{NS}), or a positive numeric value. See `Details'.} \item{scaler}{Optional. Controls the value for a scalar representation of the spatial (and temporal for \code{NS.spattemp}) scale of the data. Either a character string, \code{"silverman"} (default), \code{"IQR"}, \code{"sd"}, or \code{"var"}; or a positive numeric value. See `Details'.} \item{tt}{A numeric vector of equal length to the number of points in \code{pp}, giving the time corresponding to each spatial observation. If unsupplied, the function attempts to use the values in the \code{\link[spatstat.geom]{marks}} attribute of the \code{\link[spatstat.geom:ppp]{ppp.object}} in \code{pp}.} } \value{ A single numeric value of the estimated spatial bandwidth for \code{NS}, or a named numeric vector of length 2 giving the spatial bandwidth (as \code{h}) and the temporal bandwidth (as \code{lambda}) for \code{NS.spattemp}. } \description{ Provides the asymptotically optimal fixed bandwidths for spatial or spatiotemporal normal densities based on a simple expression. } \details{ These functions calculate scalar smoothing bandwidths for kernel density estimates of spatial or spatiotemporal data: the optimal values would minimise the asymptotic mean integrated squared error assuming normally distributed data; see pp. 46-48 of Silverman (1986). The \code{NS} function returns a single bandwidth for isotropic smoothing of spatial (2D) data. The \code{NS.spattemp} function returns two values -- one for the spatial margin and another for the temporal margin, based on independently applying the normal scale rule (in 2D and 1D) to the spatial and temporal margins of the supplied data. \describe{ \item{\bold{Effective sample size}}{ The formula requires a sample size, and this can be minimally tailored via \code{nstar}. By default, the function simply uses the number of observations in \code{pp}: \code{nstar = "npoints"}. Alternatively, the user can specify their own value by simply supplying a single positive numeric value to \code{nstar}. For \code{NS} (not applicable to \code{NS.spattemp}), if \code{pp} is a \code{\link[spatstat.geom:ppp]{ppp.object}} with factor-valued \code{\link[spatstat.geom]{marks}}, then the user has the option of using \code{nstar = "geometric"}, which sets the sample size used in the formula to the geometric mean of the counts of observations of each mark. This can be useful for e.g. relative risk calculations, see Davies and Hazelton (2010). } \item{\bold{Spatial (and temporal) scale}}{The \code{scaler} argument is used to specify spatial (as well as temporal, in use of \code{NS.spattemp}) scale. For isotropic smoothing in the spatial margin, one may use the `robust' estimate of standard deviation found by a weighted mean of the interquartile ranges of the \eqn{x}- and \eqn{y}-coordinates of the data respectively (\code{scaler = "IQR"}). Two other options are the raw mean of the coordinate-wise standard deviations (\code{scaler = "sd"}), or the square root of the mean of the two variances (\code{scaler = "var"}). A fourth option, \code{scaler = "silverman"} (default), sets the scaling constant to be the minimum of the \code{"IQR"} and \code{"sd"} options; see Silverman (1986), p. 47. In use of \code{NS.spattemp} the univariate version of the elected scale statistic is applied to the recorded times of the data for the temporal bandwidth. Alternatively, like \code{nstar}, the user can specify their own value by simply supplying a single positive numeric value to \code{scaler} for \code{NS}, or a numeric vector of length 2 (in the order of \emph{[, ]}) for \code{NS.spattemp}. } } } \section{Warning}{ The NS bandwidth is an approximation, and assumes \emph{that the target density is normal}. This is considered rare in most real-world applications. Nevertheless, it remains a quick and easy `rule-of-thumb' method with which one may obtain a smoothing parameter. Note that a similar expression for the adaptive kernel estimator is not possible (Davies et al., 2018). } \examples{ data(pbc) NS(pbc) NS(pbc,nstar="geometric") # uses case-control marks to replace sample size NS(pbc,scaler="var") # set different scalar measure of spread data(burk) NS.spattemp(burk$cases) NS.spattemp(burk$cases,scaler="sd") } \references{ Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel estimation of spatial relative risk, \emph{Statistics in Medicine}, \bold{29}(23) 2423-2437. Davies, T.M., Flynn, C.R. and Hazelton, M.L. (2018), On the utility of asymptotic bandwidth selectors for spatially adaptive kernel density estimation, \emph{Statistics & Probability Letters} [in press]. Silverman, B.W. (1986), \emph{Density Estimation for Statistics and Data Analysis}, Chapman & Hall, New York. Wand, M.P. and Jones, C.M., 1995. \emph{Kernel Smoothing}, Chapman & Hall, London. } \author{ T.M. Davies } sparr/DESCRIPTION0000644000176200001440000000230614024105406013104 0ustar liggesusersPackage: sparr Type: Package Title: Spatial and Spatiotemporal Relative Risk Version: 2.2-15 Date: 2021-03-16 Authors@R: c( person(c("Tilman", "M."), "Davies", role = c("aut", "cre"), email = "tilman.davies@otago.ac.nz"), person(c("Jonathan", "C."), "Marshall", role = c("aut"), email = "j.c.marshall@massey.ac.nz")) Description: Provides functions to estimate kernel-smoothed spatial and spatio-temporal densities and relative risk functions, and perform subsequent inference. Methodological details can be found in the accompanying tutorial: Davies et al. (2018) . Depends: R (>= 2.10.1), spatstat (>= 2.0-0) Imports: spatstat.utils, spatstat.geom, spatstat.core, doParallel, parallel, foreach, misc3d Suggests: fftwtools (>= 0.9.8) License: GPL (>= 2) LazyLoad: yes NeedsCompilation: no RoxygenNote: 7.1.1 URL: https://tilmandavies.github.io/sparr/, https://github.com/tilmandavies/sparr/ BugReports: https://github.com/tilmandavies/sparr/issues/ Packaged: 2021-03-16 09:19:29 UTC; tilmandavies Author: Tilman M. Davies [aut, cre], Jonathan C. Marshall [aut] Maintainer: Tilman M. Davies Repository: CRAN Date/Publication: 2021-03-16 10:40:06 UTC sparr/R/0000755000176200001440000000000014024074041011576 5ustar liggesuserssparr/R/LSCV.density.spattemp.single.R0000644000176200001440000000243414012076542017272 0ustar liggesusersLSCV.density.spattemp.single <- function(bands,pp,tt,tlim,sres,tres,grx,gry,grt,kt,inside,xyin,xys,sedge,tedge,parallelise,verbose){ if(any(bands<=0)) return(NA) if(verbose) cat("h =",bands[1],"\b; lambda =",bands[2],"\n") h <- bands[1] lam <- bands[2] temp.dens <- kde3d(x=pp$x,y=pp$y,z=tt,h=c(h,h,lam),n=c(sres,sres,tres),lims=c(range(grx),range(gry),kt)) sq <- matrix(1,sres,sres) if(sedge){ sz <- density.ppp(pp,sigma=h,dimyx=sres,spill=1) sq <- sz$edg sq[sq>1] <- 1 } sq[!inside] <- NA sq <- t(as.matrix(sq)) tq <- rep(1,tres) if(tedge){ nearedge <- 1:tres wellinside <- which(grt>(tlim[1]+4*lam) & grt<(tlim[2]-4*lam)) if(length(wellinside)>0) nearedge <- nearedge[-wellinside] for(i in nearedge) tq[i] <- pnorm(tlim[2],mean=grt[i],sd=lam) - pnorm(tlim[1],mean=grt[i],sd=lam) } if(tedge||sedge){ for(i in 1:dim(temp.dens$d)[3]) temp.dens$d[,,i] <- temp.dens$d[,,i]/(sq*tq[i]) } # temp.dens <- spattemp.density2(pp,h,lam,tlim,edge,res,outside=NA) temp.dens.pts <- spattemp.LOO(pp,tt,h,lam,tlim,xyin,xys,sedge,tedge,parallelise=parallelise) # temp.dens.pts <- spattemp.density2(pp,h,lam,tlim,edge,res,outside=NA,leaveoneout=TRUE) return(sum(temp.dens$d^2*xys[1]*xys[2]*(grt[2]-grt[1]),na.rm=TRUE)-2*mean(temp.dens.pts)) } sparr/R/processnstar.R0000644000176200001440000000273414012076542014462 0ustar liggesusersprocessnstar <- function(n,pp){ if(is.numeric(n)){ if(length(n)>1){ n <- n[1] warning("'nstar' if numeric must be of length 1. Using first value only.") } if(n<=0) stop("'nstar' must be positive") } else if(is.character(n)){ if(length(n)>1){ n <- n[1] } if(n=="npoints"){ n <- npoints(pp) } else if(n=="geometric"){ pm <- marks(pp) if(is.null(pm)||(!is.factor(pm))){ n <- npoints(pp) warning("Using 'nstar'=\"geometric\" requires a 'ppp' object with factor-valued marks. Changing to \"npoints\"") } else { n <- prod(as.numeric(table(pm)))^(1/length(levels(pm))) } } else { stop("'nstar' character string must be one of \"npoints\" or \"geometric\"") } } else { stop("Invalid 'nstar' type") } return(n) } processnstar.st <- function(n,pp){ if(is.numeric(n)){ if(length(n)>1){ n <- n[1] warning("'nstar' if numeric must be of length 1. Using first value only.") } if(n<=0) stop("'nstar' must be positive") } else if(is.character(n)){ if(length(n)>1){ n <- n[1] } if(n=="npoints"){ n <- npoints(pp) } else if(n=="geometric"){ n <- npoints(pp) warning("'nstar' character string cannot be \"geometric\" for spatiotemporal estimates -- using \"npoints\"") } else { stop("'nstar' character string only permitted to be \"npoints\"") } } else { stop("Invalid 'nstar' type") } return(n) } sparr/R/available.h0.R0000644000176200001440000000367114012076542014163 0ustar liggesusers#' Available global bandwidth range #' #' Gets universally available global bandwidths as represented by several #' multi-scale density estimate objects #' #' This simple function merely accesses and returns the maximum lower limit and #' minimum upper limit of all \code{h0range} components of the #' \code{\link{msden}} objects passed through \code{...}. Natural numeric error #' arising from any changes to the bandwidth-axis discretisation resolution in #' the creation of the \code{\link{msden}} objects (i.e. through the #' `\code{dimz}' argument) means individual global bandwidth ranges can differ #' slightly between affected multi-scale estimates, even if they are all #' applied to the same data set. Can additionally be useful when, for example, #' creating asymmetric relative risk surfaces based on slices of multi-scale #' densities with respect to the case and control data sets, because the #' bandwidth factors differ. #' #' Throws an error if one or more of the \code{h0range} components is #' incompatible (i.e. all \code{h0range} components must overlap). #' #' @param ... Any number of objects of class \code{\link{msden}}; possibly #' named. #' #' @return A numeric vector of length 2 providing the range of available global #' bandwidths compatible with all supplied multi-scale density estimates. #' #' @author T.M. Davies #' #' @seealso \code{\link{multiscale.density}}, \code{\link{multiscale.slice}} #' #' @examples #' #' # See ?multiscale.slice #' #' @export available.h0 <- function(...){ unpacked <- list(...) cls <- lapply(unpacked,function(x) inherits(x,"msden")) if(!all(unlist(cls))) stop("function arguments must all be of class \"msden\", arising from a call to 'multiscale.density'") lo <- sapply(unpacked,function(x) x$h0range[1]) up <- sapply(unpacked,function(x) x$h0range[2]) rng <- c(max(lo),min(up)) if(rng[1]>=rng[2]) stop("incompatible 'h0range' components -- check bandwidth scales") return(rng) } sparr/R/print.msden.R0000644000176200001440000000054714012076542014175 0ustar liggesusers#' @rdname printsparr #' @method print msden #' @export print.msden <- function(x,...){ cat("Multi-scale Adaptive Kernel Density/Intensity Estimate\n\n") cat("Available global bandwidth range\n (",round(x$h0range[1],4),", ",round(x$h0range[2],4),") ",unitname(x$z[[1]])[[2]]," (to 4 d.p.)\n\n",sep="") cat("No. of observations\n ",npoints(x$pp),"\n") }sparr/R/print.rrst.R0000644000176200001440000000054314012076542014055 0ustar liggesusers#' @rdname printsparr #' @method print rrst #' @export print.rrst <- function(x, ...){ if(all(sapply(x$rrc,min,na.rm=TRUE)>=0)) cat("Spatiotemporal Relative Risk Surface\n\n") else cat("Spatiotemporal Log-Relative Risk Surface\n\n") cat("--Numerator (case) density--\n") print(x$f) cat("\n--Denominator (control) density--\n") print(x$g) }sparr/R/BOOT.spattemp.R0000644000176200001440000002332714012076542014334 0ustar liggesusers#' @export BOOT.spattemp <- function(pp,tt=NULL,tlim=NULL,eta=NULL,nu=NULL, sedge=c("uniform","none"),tedge=sedge, ref.density=NULL,sres=64,tres=sres, start=NULL,verbose=TRUE){ if(!inherits(pp,"ppp")) stop("'pp' must be of spatstat class \"ppp\"; see ?ppp") if(verbose) cat("Initialising...") n <- npoints(pp) W <- Window(pp) WM <- as.mask(W,dimyx=rep(sres,2)) if(is.null(tt)) tt <- marks(pp) tt <- checktt(tt) if(length(tt)!=n) stop(paste("Length of temporal vector does not match number of spatial observations\n npoints(pp) = ",n,"; length(tt) = ",length(tt),sep="")) if(is.null(tlim)) tlim <- range(tt) tlim <- checkranin(tlim,tt,"tlim") if(is.null(tres)){ tcw <- 1 kt <- tlim <- c(floor(tlim[1]),ceiling(tlim[2])) grt <- tlim[1]:tlim[2] tres <- length(grt) } else { tres <- checkit(tres,"'tres'") tcw <- diff(tlim)/tres grt <- tlim[1]+0.5*tcw+(0:(tres-1))*tcw kt <- c(tlim[1]+0.5*tcw,tlim[2]-0.5*tcw) } # LIMIT OPTIMISATION? CURRENTLY UNIMPLEMENTED. # # set h limits if unsupplied # if(is.null(hlim)){ # ppu <- pp # marks(ppu) <- NULL # md <- min(nndist(unique(ppu))) # hlim <- c(md,max(md*50,min(diff(W$xrange),diff(W$yrange))/6)) # } else { # hlim <- checkran(hlim,"'hlim'") # } # # # set lambda limits if unsupplied # if(is.null(lambdalim)){ # ttu <- unique(tt) # ttd <- outer(ttu,ttu,"-") # mt <- min(ttd[lower.tri(ttd)]) # lamlim <- c(mt,max(mt*50,diff(tlim))) # } else { # lamlim <- checkran(lambdalim,"'lamlim'") # } sedg <- checkedge(sedge,v=0)=="uniform" tedg <- checkedge(tedge,v=0)=="uniform" # FOR FUTURE IMPLEMENTATION # if(!is.na(parallelise)){ # if(!is.numeric(parallelise)) stop("'parallelise' must be numeric") # if(is.null(parallelise)) parallelise <- NA # parallelise <- round(parallelise[1]) # } inside <- WM$m inn <- which(as.vector(inside)) evalyx <- as.matrix(expand.grid(WM$yrow,WM$xcol)) evalyx.redu <- evalyx[inn,] GN <- length(inn) if(is.null(ref.density)){ if(is.null(eta)) eta <- OS(pp) else eta <- checkit(eta,"'eta'") if(is.null(nu)) nu <- ((243/(2*sqrt(pi)))/(35*n))^(1/5)*min(sd(tt),IQR(tt)/1.34) # Univariate oversmoothing bandwidth else nu <- checkit(nu,"'nu'") # if(sedg){ # ref.density <- spattemp.density(pp,h=eta,tt=tt,lambda=nu,tlim=tlim,sedge="uniform",) # # d.eta <- density(pp,eta,edge=TRUE,positive=TRUE,dimyx=sres,spill=1) # d.eta$edg[d.eta$edg>1] <- 1 # d.etadens <- d.eta$raw/d.eta$edg # d.etaint <- integral(d.etadens) # d.etadens <- as.matrix(d.etadens)[inn]/d.etaint # epsilon.eta <- safelookup(d.eta$edg,pp,warn=FALSE) # } else { # # d.etadens <- as.matrix(d.eta$raw/integral(d.eta$raw))[inn] # epsilon.eta <- rep(1,n) # } # # if(tedg) epsilon.nu <- pnorm(tlim[2],mean=tt,sd=nu) - pnorm(tlim[1],mean=tt,sd=nu) # else epsilon.nu <- rep(1,n) ref.density <- spattemp.density(pp,h=eta,tt=tt,lambda=nu,tlim=tlim,sedge=sedge,tedge=tedge,sres=sres,tres=tres,verbose=FALSE) } else { if(!inherits(ref.density,"stden")) stop("'ref.density' must be of class \"stden\"; see ?spattemp.density") if(!compatible(as.im(WM),ref.density$z[[1]])) stop("'ref.density' must be evaluated on identical spatial domain as 'Window(pp)' given 'sres'") if(sedg&&is.null(ref.density$qs)) stop("'ref.density' spatial edge-correction must exist if sedge = \"uniform\"") # spatial matching eta <- ref.density$h nu <- ref.density$lambda # temporal matching # reftres <- length(ref.density$grt) # if(tres!=reftres) stop("'ref.density' temporal resolution (currently ",reftres,") must match 'tres' (currently ",tres,")",sep="") # if(!all(ref.density$tlim==range(grt))) stop("ref.density$tlim must be identical to 'tlim'") # if(tedg&&is.null(ref.density$qt)) stop("'ref.density' temporal edge-correction must exist if tedge = \"uniform\"") } if(!is.null(ref.density$qs)){ epsilon.eta <- safelookup(ref.density$qs,pp,warn=FALSE) use_fftw <- fftw_available() ifft_scale <- WM$xstep*WM$ystep/(4*sres^2) Mpad <- matrix(0,2*sres,2*sres) Mpad[1:sres,1:sres] <- inside fM <- fft2d(Mpad,fftw=use_fftw) } else { epsilon.eta <- rep(1,n) ifft_scale <- use_fftw <- fM <- NA } if(!is.null(ref.density$qt)) epsilon.nu <- pnorm(tlim[2],mean=tt,sd=nu) - pnorm(tlim[1],mean=tt,sd=nu) else epsilon.nu <- rep(1,n) xs <- ref.density$z[[1]]$xstep ys <- ref.density$z[[1]]$ystep ts <- ref.density$tgrid[2]-ref.density$tgrid[1] sqz <- lapply(ref.density$z,function(x) x^2) boot3 <- sum(Reduce("+",sqz)*xs*ys*ts,na.rm=TRUE) if(verbose) cat("Done.\nOptimising...\n") if(is.null(start)){ start <- c(eta,nu) } else { if(any(start<=0)) stop("Invalid starting values in 'start'") } result <- optim(par=start,boot.opt.spattemp.fix,sedg=sedg,tedg=tedg,WM=WM,tlim=tlim, sres=sres,tres=tres,fM=fM,ifft_scale=ifft_scale,inn=inn,GN=GN, evalyx.redu=evalyx.redu,evalt=grt,pp=pp,tt=tt,epsilon.eta=epsilon.eta, epsilon.nu=epsilon.nu,eta=eta,nu=nu,n=n,boot3=boot3,use_fftw=use_fftw, parallelise=NA,verbose=verbose)$par if(verbose) cat("Done.\n") names(result) <- c("h","lambda") return(result) } boot.opt.spattemp.fix <- function(hlam,sedg,tedg,WM,tlim,sres,tres,fM,ifft_scale,inn,GN,evalyx.redu,evalt, pp,tt,epsilon.eta,epsilon.nu,eta,nu,n,boot3,use_fftw,parallelise,verbose){ if(any(hlam<=0)) return(NA) h <- hlam[1] lam <- hlam[2] if(verbose) cat("h =",h,"\b; lambda =",lam,"\n") if(sedg){ fK.h <- kernel2d_fft(h,WM$xstep,WM$ystep,sres) fK.con <- fft2d(fM*fK.h,inverse=TRUE,use_fftw)[1:sres,1:sres] edg.h <- Mod(fK.con)*ifft_scale edg.h[edg.h>1] <- 1 GE.h <- edg.h[inn] } else { GE.h <- rep(1,length(inn)) } if(tedg) GE.lam <- pnorm(tlim[2],mean=evalt,sd=lam) - pnorm(tlim[1],mean=evalt,sd=lam) else GE.lam <- rep(1,tres) if(is.na(parallelise)){ bs2temp <- bs1 <- bs2 <- matrix(NA,n,GN) for(i in 1:GN){ evx <- rep(evalyx.redu[i,2],n)-pp$x evy <- rep(evalyx.redu[i,1],n)-pp$y bs2temp[,i] <- epsilon.eta^(-1)*kernel2d(evx,evy,sqrt(h^2+eta^2)) bs2[,i] <- epsilon.eta^(-1)*kernel2d(evx,evy,eta) bs1[,i] <- epsilon.eta^(-1)*kernel2d(evx,evy,sqrt(0.5*h^2+eta^2)) } bt2temp <- bt1 <- bt2 <- matrix(NA,n,tres) b1c <- b2c <- matrix(NA,GN,tres) for(j in 1:tres){ et <- rep(evalt[j],n)-tt bt2temp[,j] <- epsilon.nu^(-1)*dnorm(et,0,sqrt(lam^2+nu^2)) bt2[,j] <- epsilon.nu^(-1)*dnorm(et,0,nu) bt1[,j] <- epsilon.nu^(-1)*dnorm(et,0,sqrt(0.5*lam^2+nu^2)) b2temp.x.j <- colSums(bs2temp*matrix(rep(bt2temp[,j],GN),n,GN)) b2c[,j] <- b2temp.x.j*colSums(bs2*matrix(rep(bt2[,j],GN),n,GN)) b1c[,j] <- (1/(8*pi^1.5*h^2*lam))*colSums(bs1*matrix(rep(bt1[,j],GN),n,GN)) + ((n-1)/n)*b2temp.x.j^2 } } else { stop("'parallelise' unimplemented") } cella <- WM$xstep*WM$ystep*(evalt[2]-evalt[1]) dexi <- matrix(rep(GE.h,tres),GN,tres) deti <- matrix(rep(GE.lam,GN),GN,tres,byrow=TRUE) boot1 <- deti^(-2)*dexi^(-2)*b1c boot2 <- -2*deti^(-1)*dexi^(-1)*b2c return(sum(n^(-2)*(boot1+boot2)*cella,na.rm=TRUE)+boot3) } # boot.hlam <- function(bands,ppt,g=bands[1],gam=bands[2],tlim=NULL,sres=64,tres=sres,parallelise=NA){ # if(any(bands<=0)) return(NA) # # W <- Window(ppt) # Wm <- as.mask(W,dimyx=rep(sres,2)) # n <- npoints(ppt) # ppdat <- cbind(ppt$x,ppt$y) # tt <- marks(ppt) # if(is.null(tlim)) tlim <- range(tt) # h <- bands[1] # lam <- bands[2] # # dh <- density(ppt,h,diggle=FALSE,positive=TRUE,dimyx=c(sres,sres),spill=1) # dh$edg[dh$edg>1] <- 1 # dg <- density(ppt,g,diggle=TRUE,positive=TRUE,dimyx=c(sres,sres),spill=1) # dg$edg[dg$edg>1] <- 1 # nrp <- nearest.raster.point(x=ppt$x,y=ppt$y,w=Wm) # ep.g.i <- dg$edg[cbind(nrp$row,nrp$col)] # # dg.dens <- spattemp.density(ppt,g,gam,tlim,TRUE,sres,tres,NA) # ts <- dg.dens$TT # qtlam <- getQt(ts,lam,tlim=tlim) # ep.gam.i <- getQt(tt,gam,tlim=tlim) # evalyx <- as.matrix(expand.grid(dg.dens$X,dg.dens$Y)) # inn <- inside.owin(x=evalyx[,2],y=evalyx[,1],w=W) # # evalyx.redu <- evalyx[inn,] # GN <- nrow(evalyx.redu) # dexi <- matrix(rep(dh$edg[which(inn)],tres),GN,tres) # deti <- matrix(rep(qtlam,GN),GN,tres,byrow=TRUE) # # # bs2temp <- bs1 <- bs2 <- matrix(NA,n,GN) # for(xi in 1:GN){ # ev <- cbind(rep(evalyx.redu[xi,2],n),rep(evalyx.redu[xi,1],n))-ppdat # bs2temp[,xi] <- ep.g.i^(-1)*dmvnorm(ev,mean=c(0,0),sigma=diag(2)*(h^2+g^2)) # bs2[,xi] <- ep.g.i^(-1)*dmvnorm(ev,mean=c(0,0),sigma=diag(2)*g^2) # bs1[,xi] <- ep.g.i^(-1)*dmvnorm(ev,mean=c(0,0),sigma=diag(2)*(0.5*h^2+g^2)) # } # # bt2temp <- bt1 <- bt2 <- matrix(NA,n,tres) # b1c <- b2c <- matrix(NA,GN,tres) # for(tti in 1:tres){ # et <- rep(ts[tti],n)-tt # bt2temp[,tti] <- ep.gam.i^(-1)*dnorm(et,0,sqrt(lam^2+gam^2)) # bt2[,tti] <- ep.gam.i^(-1)*dnorm(et,0,gam) # bt1[,tti] <- ep.gam.i^(-1)*dnorm(et,0,sqrt(0.5*lam^2+gam^2)) # # b2temp.x.tti <- colSums(bs2temp*matrix(rep(bt2temp[,tti],GN),n,GN)) # b2c[,tti] <- b2temp.x.tti*colSums(bs2*matrix(rep(bt2[,tti],GN),n,GN)) # b1c[,tti] <- (1/(8*pi^1.5*h^2*lam))*colSums(bs1*matrix(rep(bt1[,tti],GN),n,GN)) + ((n-1)/n)*b2temp.x.tti^2 # } # # cella <- (dg.dens$X[2]-dg.dens$X[1])*(dg.dens$Y[2]-dg.dens$Y[1])*(dg.dens$TT[2]-dg.dens$TT[1]) # # boot1 <- deti^(-2)*dexi^(-2)*b1c # boot2 <- -2*deti^(-1)*dexi^(-1)*b2c # boot3 <- sum(dg.dens$Z^2*cella,na.rm=TRUE) # # return(sum(n^(-2)*(boot1+boot2)*cella,na.rm=TRUE)+boot3) # } sparr/R/zzz.R0000644000176200001440000000131214012076542012560 0ustar liggesusers.onAttach <- function(...){ s1 <- " _____ ___ ____ ____ ____ \n" s2 <- " / ___// _ \\/ _ \\/ __ \\/ __ \\ \n" s3 <- " \\__ \\/ ___/ __ / ___/ ___/ \n" s4 <- " ___/ / / / / / / /\\ \\/ /\\ \\ \n" s5 <- paste0("/____/_/ /_/ /_/_/ \\__/ \\_\\ v", packageDescription("sparr")$Version, "\n\n") packageStartupMessage(paste("\n\nWelcome to\n",s1,s2,s3,s4,s5,"- type news(package=\"sparr\") for an overview\n- type help(\"sparr\") for documentation\n- type citation(\"sparr\") for how to cite\n",sep=""),appendLF=TRUE) } #\n*type vignette(\"sparr2\") to access the accompanying article [unimplemented]\n*type citation(\"sparr2\") for how to cite use of this package\n"sparr/R/plot.rrst.R0000644000176200001440000000645114012076542013703 0ustar liggesusers#' @rdname plotsparr #' @method plot rrst #' @export plot.rrst <- function(x, tselect = NULL, type = c("joint", "conditional"), fix.range = FALSE, tol.show = TRUE, tol.type = c("upper", "lower", "two.sided"), tol.args = list(levels = 0.05, lty = 1, drawlabels = TRUE), sleep = 0.2, override.par = TRUE, expscale = FALSE, ...){ ellip <- list(...) if(is.null(ellip)) ellip <- list() if(is.null(ellip$box)) ellip$box <- FALSE if(is.null(ellip$ribargs)) ellip$ribargs <- list(box=TRUE) if(!is.null(ellip$zlim)) fix.range <- TRUE ellip$log <- FALSE mn <- is.null(ellip$main) typ <- type[1] if(typ=="joint"){ lst <- x$rr plst <- x$P } else if(typ=="conditional"){ lst <- x$rr.cond plst <- x$P.cond } else stop("Invalid 'type'") if(override.par) par(mfrow=c(1,1),mar=rep(2,4)) zlimeq <- c(0,min(sapply(lst,max)[sapply(lst,max)>0])) zlimconstant <- range(sapply(lst,range)) grt <- as.numeric(names(lst)) if(!is.null(tselect)){ tsel <- checktsel(tselect) if(!all(sapply(tsel,function(y) y>=x$tlim[1]) & sapply(tsel,function(y) y<=x$tlim[2]))) stop(paste("'tselect' must be within allowable time range of",prange(x$tlim))) index <- which(grt>=tsel[1]&grt<=tsel[2]) if(length(index)==0){ grt <- unique(tsel) intrp <- spattemp.slice(x,grt,checkargs=FALSE) if(typ=="joint"){ lst <- intrp$rr plst <- intrp$P } else { lst <- intrp$rr.cond plst <- intrp$P.cond } } else { grt <- grt[index] lst <- lst[index] plst <- plst[index] } } if(!is.null(ellip$zlim)) zlimconstant <- ellip$zlim if(expscale){ lst <- lapply(lst,exp) ellip$log <- FALSE if(fix.range&&is.null(ellip$col)){ # if(is.null(ellip$zlim)){ ellip$col <- beachcolourmap(range=exp(zlimconstant),sealevel=1) # } else { # ellip$col <- beachcolourmap(range=ellip$zlim,sealevel=1) # } } } if(!fix.range) rngs <- lapply(lst,range,na.rm=TRUE) if(length(lst)==1) sleep <- 0 drawtol <- tol.show&&!is.null(plst) if(drawtol){ plst <- lapply(plst,function(x) t(as.matrix(x))) tellip <- tol.args tellip$add <- TRUE tellip$x <- lst[[1]]$xcol tellip$y <- lst[[1]]$yrow tol.type <- tol.type[1] if(tol.type=="lower"){ plst <- lapply(plst,function(x) 1-x) } else if(tol.type=="two.sided"){ plst <- lapply(plst,function(x) 2*pmin(x,1-x)) } else if(tol.type!="upper"){ stop("invalid 'tol.type'") } } for(i in 1:length(lst)){ dev.hold() ellip$x <- lst[[i]] if(mn) ellip$main <- paste("t =",round(grt[i],5)) if(diff(range(lst[[i]]))==0&&is.null(ellip$zlim)&&!fix.range) ellip$zlim <- zlimeq if(fix.range){ ellip$zlim <- zlimconstant if(expscale) ellip$zlim <- NULL } else { ellip$zlim <- rngs[[i]] if(expscale&&(is.null(ellip$col)||i>1)){ ellip$col <- beachcolourmap(range=ellip$zlim,sealevel=1) ellip$zlim <- NULL } } # print(ellip) do.call("plot.im",ellip) if(drawtol){ tellip$z <- plst[[i]] suppressWarnings(do.call("contour",tellip)) } plot(as.polygonal(Window(x$f$pp)),add=TRUE) axis(1) axis(2) box(bty="l") dev.flush() Sys.sleep(sleep) } invisible(NULL) } sparr/R/print.bivden.R0000644000176200001440000000142214012076542014327 0ustar liggesusers#' Printing sparr objects #' #' \code{print} methods for classes \code{\link{bivden}}, \code{\link{stden}}, #' \code{\link{rrs}}, \code{\link{rrst}} and \code{\link{msden}}. #' #' @aliases print.bivden print.rrs print.msden print.stden print.rrst #' #' @rdname printsparr #' #' @param x An object of class \code{\link{bivden}}, \code{\link{stden}}, #' \code{\link{rrs}}, \code{\link{rrst}}, or \code{\link{msden}}. #' @param ... Ignored. #' #' @author T.M. Davies #' @export print.bivden <- function(x,...){ cat("Bivariate Kernel Density/Intensity Estimate\n\n") if(is.null(x$him)) sm <- "Fixed" else sm <- "Adaptive" cat("Bandwidth\n ",sm,"smoothing with h0 =",round(x$h0,4),unitname(x$z)[[2]],"(to 4 d.p.)\n\n") cat("No. of observations\n ",npoints(x$pp),"\n") } sparr/R/tol.mc.ada.R0000644000176200001440000000276714012076542013662 0ustar liggesusers tol.mc.ada <- function(rs,ITER,parallel,verbose,...){ fd <- rs$f gd <- rs$g pool <- suppressWarnings(superimpose(fd$pp,gd$pp)) nf <- npoints(fd$pp) ng <- npoints(gd$pp) res <- dim(fd$z)[1] indx <- 1:npoints(pool) if(is.null(fd$q)){ edg <- "none" } else if(is.im(fd$q)){ edg <- "uniform" } else { edg <- "diggle" } logt <- !all(rs$rr>=0) rmat <- as.matrix(rs$rr) mcmat <- matrix(1,res,res) if(is.null(parallel)){ if(verbose) pb <- txtProgressBar(0,ITER-1,style=3) for(i in 1:(ITER-1)){ shuff <- sample(indx) rtemp <- as.matrix(risk(pool[shuff[1:nf]],pool[shuff[(nf+1):(nf+ng)]],log=logt,verbose=FALSE,h0=c(fd$h0,gd$h0),hp=c(fd$hp,gd$hp),resolution=res,edg=edg,adapt=TRUE,...)$rr) mcmat <- mcmat+(rtemp>=rmat) if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) } else { ncores <- detectCores() if(verbose) cat(paste("Running MC iterations on",parallel,"/",ncores,"cores...")) if(parallel>ncores) stop("Parallel cores requested exceeds available count") registerDoParallel(cores=parallel) mclist <- foreach(i=1:(ITER-1),.packages=c("spatstat","sparr")) %dopar% { shuff <- sample(indx) rtemp <- as.matrix(risk(pool[shuff[1:nf]],pool[shuff[(nf+1):(nf+ng)]],log=logt,verbose=FALSE,h0=c(fd$h0,gd$h0),hp=c(fd$hp,gd$hp),resolution=res,edg=edg,adapt=TRUE,...)$rr) return(rtemp>=rmat) } if(verbose) cat("Done.\n") mcmat <- Reduce("+",mclist) + mcmat } return(mcmat/ITER) } sparr/R/plot.stden.R0000644000176200001440000000341014012076542014016 0ustar liggesusers#' @rdname plotsparr #' @method plot stden #' @export plot.stden <- function(x, tselect = NULL, type = c("joint", "conditional"), fix.range = FALSE, sleep = 0.2, override.par = TRUE, ...){ ellip <- list(...) if(is.null(ellip)) ellip <- list() if(is.null(ellip$box)) ellip$box <- FALSE if(is.null(ellip$ribargs)) ellip$ribargs <- list(box=TRUE) if(is.null(ellip$log)) ellip$log <- FALSE mn <- is.null(ellip$main) typ <- type[1] if(typ=="joint"){ lst <- x$z } else if(typ=="conditional"){ lst <- x$z.cond } else stop("Invalid 'type'") if(override.par) par(mfrow=c(1,1),mar=rep(2,4)) zlimeq <- c(0,min(sapply(lst,max)[sapply(lst,max)>0])) zlimconstant <- range(sapply(lst,range)) if(ellip$log&&fix.range) zlimconstant <- log(zlimconstant) grt <- as.numeric(names(lst)) if(!is.null(tselect)){ tsel <- checktsel(tselect) if(!all(sapply(tsel,function(y) y>=x$tlim[1]) & sapply(tsel,function(y) y<=x$tlim[2]))) stop(paste("'tselect' must be within allowable time range of",prange(x$tlim))) index <- which(grt>=tsel[1]&grt<=tsel[2]) if(length(index)==0){ grt <- unique(tsel) intrp <- spattemp.slice(x,grt,checkargs=FALSE) if(typ=="joint") lst <- intrp$z else lst <- intrp$z.cond } else { grt <- grt[index] lst <- lst[index] } } if(length(lst)==1) sleep <- 0 for(i in 1:length(lst)){ dev.hold() ellip$x <- lst[[i]] if(mn) ellip$main <- paste("t =",round(grt[i],5)) if(diff(range(lst[[i]]))==0&&is.null(ellip$zlim)&&!fix.range) ellip$zlim <- zlimeq if(fix.range) ellip$zlim <- zlimconstant do.call("plot.im",ellip) plot(as.polygonal(Window(x$pp)),add=TRUE) axis(1) axis(2) box(bty="l") dev.flush() Sys.sleep(sleep) } invisible(NULL) } sparr/R/LSCV.risk.R0000644000176200001440000005350214012076542013451 0ustar liggesusers#' Jointly optimal bandwidth selection for the spatial relative risk function #' #' Methods to find a jointly optimal, common case-control isotropic bandwidth for use in #' estimation of the fixed or adaptive kernel-smoothed relative risk function. #' #' Given the established preference of using a common bandwidth for both case #' and control density estimates when constructing a relative #' risk surface, This function calculates a `jointly optimal', common isotropic #' LSCV bandwidth for the (Gaussian) kernel-smoothed relative risk function #' (case-control density-ratio). It can be shown that choosing a bandwidth that #' is equal for both case and control density estimates is preferable to #' computing `separately optimal' bandwidths (Kelsall and Diggle, 1995). The user #' can choose to either calculate a common smoothing parameter for a fixed-bandwidth #' relative risk surface (\code{type = "fixed"}; default), or a common global bandwidth for #' an adaptive risk surface (\code{type = "adaptive"}). See further comments below. #' #' #' #' \itemize{ #' \item\code{method = "kelsall-diggle"}: the function computes the #' common bandwidth which minimises the approximate mean integrated squared #' error (MISE) of the log-transformed risk surface (Kelsall and Diggle, 1995). #' \item\code{method = "hazelton"}: the function minimises a #' \emph{weighted-by-control} MISE of the (raw) relative risk function #' (Hazelton, 2008). #' \item\code{method = "davies"}: the optimal bandwidth is #' one that minimises a crude plug-in approximation to the \emph{asymptotic} #' MISE (Davies, 2013). Only possible for \code{type = "fixed"}. #' } #' #' For jointly optimal, common global bandwidth selection when \code{type = "adaptive"}, the #' optimisation routine utilises \code{\link{multiscale.density}}. Like \code{\link{LSCV.density}}, #' the leave-one-out procedure does not affect the pilot density, for which additional #' control is offered via the \code{hp} and \code{pilot.symmetry} arguments. The user has the option of #' obtaining a so-called \emph{symmetric} estimate (Davies et al. 2016) via #' \code{pilot.symmetry}. This amounts to choosing the same pilot density for #' both case and control densities. By choosing \code{"none"} (default), the #' result uses the case and control data separately for the fixed-bandwidth #' pilots, providing the original asymmetric density-ratio of Davies and #' Hazelton (2010). By selecting either of \code{"f"}, \code{"g"}, or #' \code{"pooled"}, the pilot density is calculated based on the case, control, #' or pooled case/control data respectively (using \code{hp[1]} as the fixed #' bandwidth). Davies et al. (2016) noted some beneficial practical behaviour #' of the symmetric adaptive surface over the asymmetric. (The pilot bandwidth(s), if not supplied in \code{hp}, are calculated #' internally via default use of \code{\link{LSCV.density}}, using the requested symmetric-based data set, or separately with respect to the case and control datasets \code{f} and \code{g} if #' \code{pilot.symmetry = "none"}.) #' #' @param f Either a pre-calculated object of class \code{\link{bivden}} #' representing the `case' (numerator) density estimate, or an object of class #' \code{\link[spatstat.geom]{ppp}} giving the observed case data. Alternatively, if #' \code{f} is \code{\link[spatstat.geom]{ppp}} object with dichotomous #' factor-valued \code{\link[spatstat.geom]{marks}}, the function treats the first #' level as the case data, and the second as the control data, obviating the #' need to supply \code{g}. #' @param g As for \code{f}, for the `control' (denominator) density; this #' object must be of the same class as \code{f}. Ignored if, as stated above, #' \code{f} contains both case and control observations. #' @param hlim An optional vector of length 2 giving the limits of the #' optimisation routine with respect to the bandwidth. If unspecified, the #' function attempts to choose this automatically. #' @param hseq An optional increasing sequence of bandwidth values at which to #' manually evaluate the optimisation criterion. Used only in the case #' \code{(!auto.optim && is.null(hlim))}. #' @param type A character string; \code{"fixed"} (default) performs classical leave-one-out #' cross-validation for a jointly optimal fixed bandwidth. Alternatively, \code{"adaptive"} utilises #' multiscale adaptive kernel estimation (Davies & Baddeley, 2018) to run the cross-validation #' in an effort to find a suitable jointly optimal, common global bandwidth for the adaptive relative risk function. See `Details'. #' @param method A character string controlling the selector to use. There are #' three types, based on either the mean integrated squared error (MISE) #' (Kelsall and Diggle, 1995; default -- \code{method = "kelsall-diggle"}); a #' weighted MISE (Hazelton, 2008 -- \code{method = "hazelton"}); or an #' approximation to the asymptotic MISE (Davies, 2013 -- \code{method = #' "davies"}). See `Details'. #' @param resolution Spatial grid size; the optimisation will be based on a #' [\code{resolution} \eqn{\times}{x} \code{resolution}] density estimate. #' @param edge Logical value indicating whether to edge-correct the density #' estimates used. #' @param hp A single numeric value or a vector of length 2 giving the pilot #' bandwidth(s) to be used for estimation of the pilot #' densities for adaptive risk surfaces. Ignored if \code{type = "fixed"}. #' @param pilot.symmetry A character string used to control the type of #' symmetry, if any, to use for the bandwidth factors when computing an #' adaptive relative risk surface. See `Details'. Ignored if \code{type = "fixed"}. #' @param auto.optim Logical value indicating whether to automate the numerical #' optimisation using \code{\link{optimise}}. If \code{FALSE}, the optimisation #' criterion is evaluated over \code{hseq} (if supplied), or over a seqence of #' values controlled by \code{hlim} and \code{seqres}. #' @param seqres Optional resolution of an increasing sequence of bandwidth #' values. Only used if \code{(!auto.optim && is.null(hseq))}. #' @param parallelise Numeric argument to invoke parallel processing, giving #' the number of CPU cores to use when \code{!auto.optim}. Experimental. Test #' your system first using \code{parallel::detectCores()} to identify the #' number of cores available to you. #' @param verbose Logical value indicating whether to provide function progress #' commentary. #' @param ... Additional arguments such as \code{dimz} and \code{trim} to be passed to #' the internal calls to \code{\link{multiscale.density}}. #' @return A single numeric value of the estimated bandwidth (if #' \code{auto.optim = TRUE}). Otherwise, a list of two numeric vectors of equal #' length giving the bandwidth sequence (as \code{hs}) and corresponding CV #' function value (as \code{CV}). #' #' @section Warning: The jointly optimal bandwidth selector can be #' computationally expensive for large data sets and fine evaluation grid #' resolutions. The user may need to experiment with adjusting \code{hlim} to #' find a suitable minimum. #' #' @author T. M. Davies #' #' @seealso \code{\link{bivariate.density}} #' #' @references #' #' Davies, T. M. (2013), Jointly optimal bandwidth selection for #' the planar kernel-smoothed density-ratio, \emph{Spatial and Spatio-temporal #' Epidemiology}, \bold{5}, 51-65. #' #' Davies, T.M. and Baddeley A. (2018), Fast computation of #' spatially adaptive kernel estimates, \emph{Statistics and Computing}, \bold{28}(4), 937-956. #' #' Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel #' estimation of spatial relative risk, \emph{Statistics in Medicine}, #' \bold{29}(23) 2423-2437. #' #' Davies, T.M., Jones, K. and Hazelton, M.L. #' (2016), Symmetric adaptive smoothing regimens for estimation of the spatial #' relative risk function, \emph{Computational Statistics & Data Analysis}, #' \bold{101}, 12-28. #' #' Hazelton, M. L. (2008), Letter to the #' editor: Kernel estimation of risk surfaces without the need for edge #' correction, \emph{Statistics in Medicine}, \bold{27}, 2269-2272. #' #' Kelsall, J.E. and Diggle, P.J. (1995), Kernel estimation of relative risk, #' \emph{Bernoulli}, \bold{1}, 3-16. #' #' Silverman, B.W. (1986), \emph{Density Estimation for Statistics and Data Analysis}, #' Chapman & Hall, New York. #' #' Wand, M.P. and Jones, C.M., 1995. \emph{Kernel Smoothing}, Chapman & Hall, #' London. #' #' @examples #' #' \donttest{ #' #' data(pbc) #' pbccas <- split(pbc)$case #' pbccon <- split(pbc)$control #' #' # FIXED (for common h) #' #' LSCV.risk(pbccas,pbccon) #' LSCV.risk(pbccas,pbccon,method="hazelton") #' hcv <- LSCV.risk(pbccas,pbccon,method="davies",auto.optim=FALSE) #' plot(hcv[,1],log(hcv[,2]));abline(v=hcv[which.min(hcv[,2]),1],col=2,lty=2) #' #' #' # ADAPTIVE (for common h0) #' #' LSCV.risk(pbccas,pbccon,type="adaptive") #' #' # change pilot bandwidths used #' LSCV.risk(pbccas,pbccon,type="adaptive",hp=c(OS(pbccas)/2,OS(pbccon)/2)) #' #' # specify pooled-data symmetric relative risk estimator #' LSCV.risk(pbccas,pbccon,type="adaptive",hp=OS(pbc),pilot.symmetry="pooled") #' #' # as above, for Hazelton selector #' LSCV.risk(pbccas,pbccon,type="adaptive",method="hazelton") #' LSCV.risk(pbccas,pbccon,type="adaptive",method="hazelton",hp=c(OS(pbccas)/2,OS(pbccon)/2)) #' LSCV.risk(pbccas,pbccon,type="adaptive",method="hazelton",hp=OS(pbc),pilot.symmetry="pooled") #' } #' #' @export LSCV.risk <- function(f, g = NULL, hlim = NULL, hseq = NULL, type = c("fixed", "adaptive"), method = c("kelsall-diggle", "hazelton", "davies"), resolution = 64, edge = TRUE, hp = NULL, pilot.symmetry = c("none","f","g","pooled"), auto.optim = TRUE, seqres = 30, parallelise = NA, verbose = TRUE, ...){ if(!inherits(f,"ppp")) stop("'f' must be an object of class \"ppp\"") if(is.null(g)){ fm <- marks(f) if(!is.factor(fm)) marks(f) <- fm <- factor(fm) if(nlevels(fm)!=2) stop("'f' marks must be dichotomous if 'g' unsupplied") fs <- split(f) f <- fs[[1]] g <- fs[[2]] } if(!inherits(g,"ppp")) stop("'g' must be an object of class \"ppp\"") W <- Window(f) if(!identical_windows(W,Window(g))) stop("study windows for 'f' and 'g' must be identical") if(!is.null(hlim)){ if(hlim[1]>=hlim[2]) stop("invalid 'hlim'") } else { md <- min(c(nndist(unique(f)),nndist(unique(g)))) hlim <- c(md,max(md*50,min(diff(W$xrange),diff(W$yrange))/6)) } meth <- method[1] typ <- type[1] if(meth=="davies"&&typ=="adaptive") stop("method = \"davies\" not possible for type = \"adaptive\"") if(typ=="fixed"){ if(auto.optim){ if(meth=="kelsall-diggle"){ if(verbose) cat("Searching for optimal Kelsall-Diggle h in [",round(hlim[1],3),",",round(hlim[2],3),"]...",sep="") result <- optimise(LSCV.risk.single,interval=hlim,cases=f,controls=g,res=resolution,edge=edge,hazey=FALSE)$minimum } else if(meth=="hazelton"){ if(verbose) cat("Searching for optimal Hazelton h in [",round(hlim[1],3),",",round(hlim[2],3),"]...",sep="") result <- optimise(LSCV.risk.single,interval=hlim,cases=f,controls=g,res=resolution,edge=edge,hazey=TRUE)$minimum } else if(meth=="davies"){ if(verbose) cat("Searching for optimal Davies h in [",round(hlim[1],3),",",round(hlim[2],3),"]\n -initialisation...",sep="") marks(f) <- NULL marks(g) <- NULL pooled <- suppressWarnings(superimpose(f,g)) lambda <- LSCV.density(pooled,verbose=FALSE) bp <- BAMprep(f,g,lambda,3,resolution) if(verbose) cat("Done.\n -optimisation...") result <- optimise(BAM.single,interval=hlim,edge=edge,BP=bp)$minimum } else { stop("invalid 'method'") } if(verbose) cat("Done.\n") } else { if(is.null(hseq)) hseq <- seq(hlim[1],hlim[2],length=seqres) hn <- length(hseq) if(meth=="kelsall-diggle"){ if(is.na(parallelise)){ lscv.vec <- rep(NA,hn) if(verbose) pb <- txtProgressBar(1,hn) for(i in 1:hn){ lscv.vec[i] <- LSCV.risk.single(hseq[i],cases=f,controls=g,res=resolution,edge=edge,hazey=FALSE) if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) } else { ncores <- detectCores() if(verbose) cat(paste("Evaluating criterion on",parallelise,"/",ncores,"cores...")) if(parallelise>ncores) stop("cores requested exceeds available count") registerDoParallel(cores=parallelise) lscv.vec <- foreach(i=1:hn,.packages="spatstat",.combine=c) %dopar% { return(LSCV.risk.single(hseq[i],cases=f,controls=g,res=resolution,edge=edge,hazey=FALSE)) } if(verbose) cat("Done.\n") } } else if(meth=="hazelton"){ if(is.na(parallelise)){ lscv.vec <- rep(NA,hn) if(verbose) pb <- txtProgressBar(1,hn) for(i in 1:hn){ lscv.vec[i] <- LSCV.risk.single(hseq[i],cases=f,controls=g,res=resolution,edge=edge,hazey=TRUE) if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) } else { ncores <- detectCores() if(verbose) cat(paste("Evaluating criterion on",parallelise,"/",ncores,"cores...")) if(parallelise>ncores) stop("cores requested exceeds available count") registerDoParallel(cores=parallelise) lscv.vec <- foreach(i=1:hn,.packages="spatstat",.combine=c) %dopar% { return(LSCV.risk.single(hseq[i],cases=f,controls=g,res=resolution,edge=edge,hazey=TRUE)) } if(verbose) cat("Done.\n") } } else if(meth=="davies"){ marks(f) <- NULL marks(g) <- NULL pooled <- suppressWarnings(superimpose(f,g)) lambda <- LSCV.density(pooled,verbose=FALSE) bp <- BAMprep(f,g,lambda,3,resolution) if(is.na(parallelise)){ lscv.vec <- rep(NA,hn) if(verbose) pb <- txtProgressBar(1,hn) for(i in 1:hn){ lscv.vec[i] <- BAM.single(hseq[i],edge=edge,BP=bp) if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) } else { ncores <- detectCores() if(verbose) cat(paste("Evaluating criterion on",parallelise,"/",ncores,"cores...")) if(parallelise>ncores) stop("cores requested exceeds available count") registerDoParallel(cores=parallelise) lscv.vec <- foreach(i=1:hn,.packages="spatstat",.combine=c) %dopar% { return(BAM.single(hseq[i],edge=edge,BP=bp)) } if(verbose) cat("Done.\n") } } else { stop("invalid 'method'") } result <- cbind(hseq,lscv.vec) dimnames(result)[[2]] <- c("h","CV") } return(result) } else if(typ=="adaptive"){ pilot.symmetry <- pilot.symmetry[1] pdat <- list() if(pilot.symmetry=="none"){ pdat[[1]] <- f pdat[[2]] <- g } else if(pilot.symmetry=="f"){ pdat[[1]] <- pdat[[2]] <- f } else if(pilot.symmetry=="g"){ pdat[[1]] <- pdat[[2]] <- g } else if(pilot.symmetry=="pooled"){ marks(f) <- NULL marks(g) <- NULL pooled <- suppressWarnings(superimpose(f,g)) pdat[[1]] <- pdat[[2]] <- pooled } else { stop("invalid 'pilot.symmetry' argument") } if(!is.null(hp)){ if(length(hp)>1){ fp <- hp[1] gp <- hp[2] } else { fp <- gp <- hp[1] } } else { if(verbose) cat("Selecting pilot bandwidth(s)...") if(pilot.symmetry=="none"){ if(verbose) cat("\n --f--\n") fp <- LSCV.density(f,verbose=FALSE) if(verbose) cat(" --g--\n") gp <- LSCV.density(g,verbose=FALSE) } else { fp <- gp <- LSCV.density(pdat[[1]],verbose=FALSE) } if(verbose) cat(paste("Done.\n [ Using hp(f) =",fp,"\b; hp(g) =",gp,"]\n")) } hhash <- mean(hlim) if(verbose) cat("Computing multi-scale estimates...\n --f--\n") fms <- multiscale.density(f,h0=hhash,hp=fp,h0fac=hlim/hhash,edge=ifelse(edge,"uniform","none"),resolution=resolution,intensity=FALSE,pilot.density=pdat[[1]],verbose=FALSE,...) if(verbose) cat(" --g--\n") gms <- multiscale.density(g,h0=hhash,hp=gp,h0fac=hlim/hhash,edge=ifelse(edge,"uniform","none"),resolution=resolution,intensity=FALSE,pilot.density=pdat[[2]],verbose=FALSE,...) if(verbose) cat("Done.\n") h0range <- fms$h0range if(meth=="kelsall-diggle"){ if(auto.optim){ if(verbose) cat("Searching for optimal h0 in ",prange(h0range),"...",sep="") h0opt <- optimise(ms.loo.risk,interval=h0range,fob=fms,gob=gms,hazey=FALSE)$minimum if(verbose) cat("Done.\n") return(h0opt) } else { if(is.null(hseq)) hseq <- seq(h0range[1],h0range[2],length=seqres) hn <- length(hseq) if(is.na(parallelise)){ lscv.vec <- rep(NA,hn) if(verbose) pb <- txtProgressBar(1,hn) for(i in 1:hn){ lscv.vec[i] <- suppressWarnings(ms.loo.risk(hseq[i],fob=fms,gob=gms,hazey=FALSE)) if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) } else { ncores <- detectCores() if(verbose) cat(paste("Evaluating criterion on",parallelise,"/",ncores,"cores...")) if(parallelise>ncores) stop("cores requested exceeds available count") registerDoParallel(cores=parallelise) lscv.vec <- foreach(i=1:hn,.combine=c) %dopar% { # .packages="spatstat" return(suppressWarnings(ms.loo.risk(hseq[i],fob=fms,gob=gms,hazey=FALSE))) } if(verbose) cat("Done.\n") } } } else if(meth=="hazelton"){ if(auto.optim){ if(verbose) cat("Searching for optimal h0 in ",prange(h0range),"...",sep="") h0opt <- optimise(ms.loo.risk,interval=h0range,fob=fms,gob=gms,hazey=TRUE)$minimum if(verbose) cat("Done.\n") return(h0opt) } else { if(is.null(hseq)) hseq <- seq(h0range[1],h0range[2],length=seqres) hn <- length(hseq) if(is.na(parallelise)){ lscv.vec <- rep(NA,hn) if(verbose) pb <- txtProgressBar(1,hn) for(i in 1:hn){ lscv.vec[i] <- suppressWarnings(ms.loo.risk(hseq[i],fob=fms,gob=gms,hazey=TRUE)) if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) } else { ncores <- detectCores() if(verbose) cat(paste("Evaluating criterion on",parallelise,"/",ncores,"cores...")) if(parallelise>ncores) stop("cores requested exceeds available count") registerDoParallel(cores=parallelise) lscv.vec <- foreach(i=1:hn,.combine=c) %dopar% { # .packages="spatstat" return(suppressWarnings(ms.loo.risk(hseq[i],fob=fms,gob=gms,hazey=TRUE))) } if(verbose) cat("Done.\n") } } } else { stop("invalid 'method'") } result <- cbind(hseq,lscv.vec) dimnames(result)[[2]] <- c("h","CV") return(result) } else { stop("invalid 'type'") } } # if(!is.null(pilot.args)&&!is.list(pilot.args)) stop("'pilot.args' must be a list") # # if(!is.null(pilot.args$pilot.density)){ # fp <- gp <- NULL # if(is.list(pilot.args$pilot.density)&&!is.im(pilot.args$pilot.density)){ # if(length(pilot.args$pilot.density)>1){ # fpilot <- pilot.args$pilot.density[[1]] # gpilot <- pilot.args$pilot.density[[2]] # if(!is.im(fpilot)||!is.im(gpilot)) stop("pilot.args$pilot.density must be a pixel image ('spatstat' class 'im') or a list of two pixel images") # } # } else { # if(!is.im(pilot.args$pilot.density)||!is.ppp(pilot.args$pilot.density)) stop("pilot.args$pilot.density must be of class 'im' or 'ppp' (or a list of two)") # fpilot <- gpilot <- pilot.args$pilot.density # } # } else { # fpilot <- gpilot <- NULL # # if(!is.null(pilot.args$hp)){ # if(length(pilot.args$hp)>1){ # fp <- pilot.args$hp[1] # gp <- pilot.args$hp[2] # } else { # fp <- gp <- pilot.args$hp[1] # } # } else { # if(verbose) cat("Selecting pilot bandwidths...\n --f--\n") # fp <- LSCV.density(f,verbose=FALSE) # if(verbose) cat(" --g--\n") # gp <- LSCV.density(g,verbose=FALSE) # if(verbose) cat(paste("Done.\n [ Found hp(f) =",fp,"\b; hp(g) =",gp,"]\n")) # } # } # if(!is.null(pilot.args$dimz)){ # dimz <- pilot.args$dimz[1] # } else { # dimz <- resolution # } # # if(!is.null(pilot.args$trim)){ # trim <- pilot.args$trim # } else { # trim <- 5 # } # # the \code{pilot.args} argument. This should be supplied as a named # list, with optional components \code{hp}, \code{pilot.density}, \code{dimz}, and \code{trim}. # See the documentation for these arguments in \code{\link{multiscale.density}}. By default, \code{trim = 5}; # \code{dimz = resolution}; \code{pilot.density = NULL}; and # Otherwise, the \code{pilot.density} component can be a single # pixel \code{\link[spatstat.geom]{im}}age (defined on the same domain as the data in \code{f} and \code{g}, and also matching \code{resolution}), posing as the common pilot density (i.e. if the selected global bandwidth # is intended for a symmetric adaptive relative risk surface, see Davies et al. 2016). Aternatively, the \code{pilot.density} component can be provided as a list # of two pixel images -- one for the case density, the other for the control (in that order). # The \code{hp} component is only used if \code{pilot.args$pilot.density} is unsupplied, in which case it should be a vector of length one or two giving either a common pilot bandwidth # or the case and control pilot bandwidths respectively. Either way, unless \code{pilot.args$pilot.density} is a single pixel \code{\link[spatstat.geom]{im}}age as noted above, # the pilot densities are computed separately using the case (\code{f}) and control (\code{g}) data supplied to the function for an asymmetric adaptive relative risk surface (Davies & Hazelton, 2010). sparr/R/bivden.LOO.R0000644000176200001440000000702014012076542013624 0ustar liggesusersbivden.LOO <- function(pp,h0,hp,edge,gamma.scale,trim,resolution,parallel,weights,za){ n <- npoints(pp) if(is.null(weights)) weights <- rep(1,n) pilot.density.spec.loo <- density(pp,sigma=hp,dimyx=rep(resolution,2),at="points",edge=edge,positive=TRUE,leaveoneout=TRUE,weights=weights/(n-1)) W <- Window(pp) Wm <- as.mask(W,dimyx=rep(resolution,2)) evalxy <- as.matrix(expand.grid(Wm$xcol,Wm$yrow)) notin <- !inside.owin(x=evalxy[,1],y=evalxy[,2],w=W) evalxy.in <- evalxy[!notin,] hsi <- 1:n loo <- rep(NA,n) qv <- rep(1,n) h.spec <- matrix(NA,n,n) if(is.null(parallel)){ for(i in hsi){ ppmi <- pp[-i] wi <- weights[-i] pilot.density <- density(ppmi,sigma=hp,dimyx=rep(resolution,2),edge=edge,positive=FALSE,weights=wi) pilot.density.spec <- safelookup(pilot.density,ppmi,warn=FALSE)#density(ppmi,sigma=hp,dimyx=rep(resolution,2),at="points",positive=TRUE,leaveoneout=FALSE) pi.int <- integral(pilot.density) pilot.density <- pilot.density/pi.int pilot.density.spec <- pilot.density.spec/pi.int if(za==0){ if(any(pilot.density.spec<=0)){ loo[i] <- NA next } } else if(za==1){ pilot.density.spec <- posifybivden(pilot.density.spec) } else if(za==2){ pilot.density.spec[pilot.density.spec<=0] <- min(pilot.density.spec[pilot.density.spec>0]) } pspec <- pilot.density.spec^(-0.5) gamma <- processgamma(gamma.scale,pilot.density.spec) h.spec.mi <- h.spec[hsi[-i],i] <- h0*pmin(pspec/gamma,trim) h.hypo.i <- h.spec[i,i] <- h0*min(pilot.density.spec.loo[i]^(-0.5)/gamma,trim) if(edge){ gxy <- kernel2d(evalxy.in[,1]-pp$x[i],evalxy.in[,2]-pp$y[i],h.hypo.i) qv[i] <- dintegral(gxy,Wm$xstep,Wm$ystep) } ivals <- kernel2d(pp$x[i]-ppmi$x, pp$y[i]-ppmi$y,h.spec.mi) loo[i] <- mean(wi*ivals)/qv[i] #min(,infvec[i]) } } else { if(parallel>detectCores()) stop("Parallel cores requested exceeds available count") registerDoParallel(cores=parallel) loo <- foreach(i=1:n,.packages="spatstat",.combine=c) %dopar% { ppmi <- pp[-i] wi <- weights[-i] pilot.density <- density(ppmi,sigma=hp,dimyx=rep(resolution,2),edge=edge,positive=TRUE,weights=wi) pilot.density.spec <- safelookup(pilot.density,ppmi,warn=FALSE)#density(ppmi,sigma=hp,dimyx=rep(resolution,2),at="points",positive=TRUE,leaveoneout=FALSE) pi.int <- integral(pilot.density) pilot.density$v <- pilot.density$v/pi.int pilot.density.spec <- pilot.density.spec/pi.int if(za==0){ if(any(pilot.density.spec<=0)){ return(NA) } } else if(za==1){ pilot.density.spec <- posifybivden(pilot.density.spec) } else if(za==2){ pilot.density.spec[pilot.density.spec<=0] <- min(pilot.density.spec[pilot.density.spec>0]) } pspec <- pilot.density.spec^(-0.5) gamma <- processgamma(gamma.scale,pilot.density.spec) h.spec.mi <- h0*pmin(pspec/gamma,trim) # h.spec[hsi[-i],i] <- h.hypo.i <- h0*min(pilot.density.spec.loo[i]^(-0.5)/gamma,trim) # h.spec[i,i] <- if(edge){ gxy <- kernel2d(evalxy.in[,1]-pp$x[i],evalxy.in[,2]-pp$y[i],h.hypo.i) qi <- dintegral(gxy,Wm$xstep,Wm$ystep) } else { qi <- 1 } ivals <- kernel2d(pp$x[i]-ppmi$x,pp$y[i]-ppmi$y,h.spec.mi) return(mean(wi*ivals)/qi) } } return(list(loo,qv,h.spec)) # qv and h.spec only filled in when parallel=NULL }sparr/R/SLIK.adapt.R0000644000176200001440000002141414012076542013562 0ustar liggesusers#' Simultaneous global/pilot likelihood bandwidth selection #' #' Isotropic global and pilot bandwidth selection for adaptive density/intensity #' based on likelihood cross-validation. #' #' This function is a generalisation of \code{\link{LIK.density}}, and is used in attempts to simultaneously choose #' an optimal global and pilot bandwidth for adaptive kernel density estimates. Where \code{\link{LIK.density}} for adaptive #' estimates assumes the pilot density is held constant (and is not subject to the leave-one-out operations), this function #' allows the pilot bandwidth to vary alongside the global. #' #' Thus, in contrast to \code{\link{LIK.density}} the internal leave-one-out operations now also affect the #' pilot estimation stage. Hence, the set of variable bandwidths changes as each point is left out. In turn, this means the leave-one-out operations must #' be computed by brute force, and this is computationally expensive. #' #' Identifiability problems can sometimes arise when the global and pilot bandwidths are allowed to `float freely' in the bivariate optimisation routine, which is the default #' behaviour of the function (with \code{hold = FALSE}). This can be curbed by setting \code{hold = TRUE}, which forces both the global and pilot #' to be held at the same value during optimisation. Doing this also has the beneficial side effect of turning the problem into one of univariate optimisation, thereby reducing total computational cost. Current work (Davies & Lawson, 2018) provides some empirical evidence that this strategy performs quite well in practice. #' #' Like \code{\link{LSCV.density}} and \code{\link{LIK.density}}, the argument \code{zero.action} can be used to control the level of severity in response to small bandwidths that result (due to numerical error) in at least one density value being zero or less. #' When this argument is passed a vector of length 2, the first entry corresponds to the global bandwidth (and hence refers to checks of the final adaptive density estimate and its leave-one-out values) and the second to the pilot bandwidth (and hence checks the fixed-bandwidth pilot density and its leave-one-out values). #' Alternatively a single value may be supplied, which will be taken to be the same for both global and pilot. #' See the help page for \code{\link{LIK.density}} for an explanation of the four allowable values (\code{-1}, \code{0}, \code{1}, \code{2}) for each component of this argument. #' #' #' #' @rdname SLIK.adapt #' #' @param pp An object of class \code{\link[spatstat.geom]{ppp}} giving the observed #' 2D data to be smoothed. #' @param hold Logical value indicating whether to hold the global and pilot bandwidths equal throughout the #' optimisation; defaults to \code{TRUE}. See `Details'. #' @param hlim An optional vector of length 2 giving the limits of the #' optimisation routine with respect to the bandwidth when \code{hold = TRUE}. If unspecified, the #' function attempts to choose this automatically. Ignored when \code{hold = FALSE}. #' @param start A positively-valued numeric vector of length 2 giving the starting values to be used for the global/pilot #' optimisation routine when \code{hold = FALSE}. Defaults to the oversmoothing bandwidth (\code{\link{OS}}) for both values; #' ignored when \code{hold = TRUE}. #' @param edge Logical value indicating whether to edge-correct the density #' estimates used. #' @param parallelise Numeric argument to invoke parallel processing in the brute force leave-one-out calculations, giving #' the number of CPU cores to use. Experimental. Test #' your system first using \code{parallel::detectCores()} to identify the #' number of cores available to you. If \code{NA} (default), no parallelisation performed and a single loop is used. #' @param verbose Logical value indicating whether to provide function progress #' commentary. #' @param zero.action A numeric vector of length 2, each value being either \code{-1}, \code{0} (default), \code{1} or \code{2} controlling how the function should behave in response to numerical errors at very small bandwidths, when such a bandwidth results in one or more zero or negative density values during the leave-one-out computations. See `Details'. #' @param optim.control An optional list to be passed to the \code{control} argument of \code{\link[stats]{optim}} for further control over the numeric optimisation when \code{hold = FALSE}. See the documentation for \code{\link[stats]{optim}} for further details. #' @param ... Additional arguments controlling density estimation for the internal calculations. Relevant arguments are \code{resolution}, \code{gamma.scale}, and \code{trim}. If unsupplied these default to \code{64}, \code{"geometric"}, and \code{5} respectively; see \code{\link{bivariate.density}} for a further explanation of these arguments. #' #' @return A numeric vector of length 2 giving the likelihood-maximised global and pilot bandwidths. #' #' @section Note: While theoretically valid, this is a largely experimental function. There is presently little in the literature to suggest how well this #' type of simultaneous global/pilot bandwidth selection might perform in practice. Current research efforts (Davies & Lawson, 2018) #' seek in part to address these questions. #' #' #' #' @author T. M. Davies #' #' @seealso Functions for bandwidth selection in package #' \code{\link{spatstat}}: \code{\link[spatstat.core]{bw.diggle}}; #' \code{\link[spatstat.core]{bw.ppl}}; \code{\link[spatstat.core]{bw.scott}}; #' \code{\link[spatstat.core]{bw.frac}}. #' #' @references #' Davies, T.M. and Lawson, A.B. (2018), An evaluation of likelihood-based bandwidth selectors for spatial and spatiotemporal kernel estimates, \emph{Submitted for publication}. #' #' Silverman, B.W. (1986), \emph{Density Estimation for Statistics #' and Data Analysis}, Chapman & Hall, New York. #' #' Wand, M.P. and Jones, #' C.M., 1995. \emph{Kernel Smoothing}, Chapman & Hall, London. #' #' @examples #' #' #' \donttest{ #' #' data(pbc) #' pbccas <- split(pbc)$case #' #' SLIK.adapt(pbccas) #' SLIK.adapt(pbccas,hold=TRUE) #' #' } #' #' @export SLIK.adapt <- function(pp,hold=TRUE,start=rep(OS(pp),2),hlim=NULL,edge=TRUE,zero.action=c(-1,0),optim.control=list(),parallelise=NULL,verbose=TRUE,...){ if(class(pp)!="ppp") stop("data object 'pp' must be of class \"ppp\"") W <- Window(pp) # resolution <- checkit(resolution,"'resolution'") # Handle the dots # ellip <- list(...) if(is.null(ellip$gamma.scale)){ gamma.scale <- "geometric" } else { gamma.scale <- ellip$gamma.scale } if(is.null(ellip$trim)){ trim <- 5 } else { trim <- ellip$trim } if(is.null(ellip$resolution)){ resolution <- 64 } else { resolution <- ellip$resolution } ## if(length(zero.action)==1) zero.action <- rep(zero.action,2) zero.action <- zero.action[1:2] if((!zero.action[1]%in%c((-1):2))||(!zero.action[2]%in%c((-1):2))) stop("invalid 'zero.action'") # optimise/optim # if(!hold){ result <- optim(start,loowrap_nohold,pp=pp,edge=edge,gamma.scale=gamma.scale,trim=trim,resolution=resolution,vbs=verbose,parallel=parallelise,za=zero.action,control=optim.control)$par result <- as.numeric(result) } else { if(is.null(hlim)){ ppu <- pp marks(ppu) <- NULL md <- min(nndist(unique(ppu))) hlim <- c(md,max(md*50,min(diff(W$xrange),diff(W$yrange))/6)) } else { hlim <- checkran(hlim,"'hlim'") } result <- suppressWarnings(optimise(loowrap_hold,interval=hlim,pp=pp,edge=edge,gamma.scale=gamma.scale,trim=trim,resolution=resolution,vbs=verbose,parallel=parallelise,za=zero.action)$minimum) result <- as.numeric(rep(result,2)) } names(result) <- c("h0","hp") return(result) } loowrap_hold <- function(hh,...) return(loowrap_nohold(c(hh,hh),...)) loowrap_nohold <- function(h0hp,pp,edge,gamma.scale,trim,resolution,vbs,parallel,za){ W <- Window(pp) if(any(h0hp<=0)||any(h0hp>100*max(c(diff(W$xrange),diff(W$yrange))))) return(NA) if(vbs) cat("h0: ",h0hp[1],"; hp: ",h0hp[2],"\n",sep="") if(za[2]==-1){ stopper <- density(pp,sigma=h0hp[2],edge=edge,dimyx=64,weights=NULL) if(any(stopper<=0)) return(Inf) } if(za[1]==-1){ stopper <- bivariate.density(pp,h0hp[1],h0hp[2],adapt=TRUE,edge=ifelse(edge,"uniform","none"),trim=trim,gamma.scale=gamma.scale,verbose=FALSE,resolution=64,weights=NULL,davies.baddeley=0.05)$z if(any(stopper<=0)) return(Inf) } loovals <- bivden.LOO(pp,h0hp[1],h0hp[2],edge=edge,trim=trim,gamma.scale=gamma.scale,resolution=resolution,parallel=parallel,weights=NULL,za[2])[[1]] if(any(loovals<=0)){ if(za[1]==0){ loovals[loovals<=0] <- NA } else if(za[1]==1){ loovals <- posifybivden(loovals) } else if(za[1]==2){ loovals[loovals<=0] <- min(loovals[loovals>0]) } } return(-mean(log(loovals))) } sparr/R/pbc-data.R0000644000176200001440000000232014024073673013403 0ustar liggesusers#' Primary biliary cirrhosis data #' #' Data of the locations of 761 cases of primary biliary cirrhosis in several #' adjacent health regions of north-eastern England, along with 3020 controls #' representing the at-risk population, collected between 1987 and 1994. These #' data were first presented and analysed by Prince et al. (2001); subsequent #' analysis of these data in the spirit of \code{\link{sparr}} was performed in #' Davies and Hazelton (2010). Also included is the polygonal study region. #' #' @name pbc #' @format \code{pbc} is a dichotomously marked #' \code{\link[spatstat.geom:ppp]{ppp.object}}, with locations expressed in UK Ordnance #' Survey Coordinates (km). #' @docType data #' @keywords data #' @section Acknowledgements: The authors thank Prof. Peter Diggle for providing access #' to these data. #' @references Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel #' estimation of spatial relative risk, \emph{Statistics in Medicine}, #' \bold{29}(23) 2423-2437. #' @source Prince et al. (2001), The geographical distribution of primary #' biliary cirrhosis in a well-defined cohort, \emph{Hepatology}, \bold{34}, #' 1083-1088. #' @examples #' #' data(pbc) #' summary(pbc) #' plot(pbc) #' NULL sparr/R/summary.bivden.R0000644000176200001440000000210114012076542014663 0ustar liggesusers#' Summarising sparr objects #' #' \code{summary} methods for classes \code{\link{bivden}}, \code{\link{stden}}, #' \code{\link{rrs}}, \code{\link{rrst}} and \code{\link{msden}}. #' #' @aliases summary.bivden summary.rrs summary.msden summary.stden summary.rrst #' #' @rdname summarysparr #' #' @param object An object of class \code{\link{bivden}}, \code{\link{stden}}, #' \code{\link{rrs}}, \code{\link{rrst}}, or \code{\link{msden}}. #' @param ... Ignored. #' #' @author T.M. Davies #' @export summary.bivden <- function(object, ...){ print.bivden(x=object) W <- Window(object$pp) wt <- summary(W)$type wx <- W$xrange wy <- W$yrange cat("\nSpatial bound\n Type: ",wt,"\n 2D enclosure: [",wx[1],", ",wx[2],"] x [",wy[1],", ",wy[2],"]\n",sep="") cat("\nEvaluation\n ",nrow(object$z)," x ",ncol(object$z)," rectangular grid\n ",sum(!is.na(as.vector(as.matrix(object$z))))," grid cells out of ",prod(dim(object$z))," fall inside study region\n",sep="") cat(" Density/intensity range [",min(object$z,na.rm=TRUE),", ",max(object$z,na.rm=TRUE),"]\n",sep="") } sparr/R/processgamma.R0000644000176200001440000000075414012076542014415 0ustar liggesusersprocessgamma <- function(gamma.scale,pds){ if(length(gamma.scale)==1){ if(is.numeric(gamma.scale)&&gamma.scale>0){ gamma <- gamma.scale } else if(gamma.scale=="geometric"){ gamma <- exp(mean(log(pds^(-0.5)))) } else { gamma <- 1 warning("invalid 'gamma.scale' value -- assuming gamma=1") } } else if(is.null(gamma.scale)){ gamma <- 1 } else { gamma <- 1 warning("invalid 'gamma.scale' value -- assuming gamma=1") } return(gamma) }sparr/R/posifybivden.R0000644000176200001440000000025114012076542014425 0ustar liggesusersposifybivden <- function(x, eps=.Machine$double.xmin) { force(eps) if(is.im(x)) return(eval.im(pmax(eps, x))) if(is.numeric(x)) return(pmax(eps, x)) return(x) } sparr/R/NS.R0000644000176200001440000001361614012076542012255 0ustar liggesusers#' Normal scale (NS) bandwidth selector #' #' Provides the asymptotically optimal fixed bandwidths for spatial or spatiotemporal #' normal densities based on a simple expression. #' #' #' These functions calculate scalar smoothing bandwidths for kernel density #' estimates of spatial or spatiotemporal data: the optimal values would minimise the #' asymptotic mean integrated squared error assuming normally distributed data; see pp. 46-48 #' of Silverman (1986). The \code{NS} function returns a single bandwidth for isotropic smoothing #' of spatial (2D) data. The \code{NS.spattemp} function returns two values -- one for #' the spatial margin and another for the temporal margin, based on independently applying #' the normal scale rule (in 2D and 1D) to the spatial and temporal margins of the supplied data. #' #' \describe{ #' \item{\bold{Effective sample size}}{ The formula #' requires a sample size, and this can be minimally tailored via \code{nstar}. #' By default, the function simply uses the number of observations in #' \code{pp}: \code{nstar = "npoints"}. Alternatively, the user can specify their own value by simply #' supplying a single positive numeric value to \code{nstar}. #' For \code{NS} (not applicable to \code{NS.spattemp}), if \code{pp} is a #' \code{\link[spatstat.geom:ppp]{ppp.object}} with factor-valued #' \code{\link[spatstat.geom]{marks}}, then the user has the option of using #' \code{nstar = "geometric"}, which sets the sample size used in the formula #' to the geometric mean of the counts of observations of each mark. This can #' be useful for e.g. relative risk calculations, see Davies and Hazelton #' (2010). #' } #' \item{\bold{Spatial (and temporal) scale}}{The \code{scaler} argument is used to specify spatial #' (as well as temporal, in use of \code{NS.spattemp}) scale. For isotropic smoothing in the spatial #' margin, one may use the `robust' estimate #' of standard deviation found by a weighted mean of the interquartile ranges #' of the \eqn{x}- and \eqn{y}-coordinates of the data respectively #' (\code{scaler = "IQR"}). Two other options are the raw mean of the #' coordinate-wise standard deviations (\code{scaler = "sd"}), or the square #' root of the mean of the two variances (\code{scaler = "var"}). A fourth #' option, \code{scaler = "silverman"} (default), sets the scaling constant to #' be the minimum of the \code{"IQR"} and \code{"sd"} options; see Silverman #' (1986), p. 47. In use of \code{NS.spattemp} the univariate version of the elected scale #' statistic is applied to the recorded times of the data for the temporal bandwidth. #' Alternatively, like \code{nstar}, the user can specify their #' own value by simply supplying a single positive numeric value to #' \code{scaler} for \code{NS}, or a numeric vector of length 2 (in the order of \emph{[, ]}) #' for \code{NS.spattemp}. #' } #' } #' #' #' @aliases NS.spattemp #' #' @rdname NS #' #' @param pp An object of class \code{\link[spatstat.geom]{ppp}} giving the observed #' 2D data to be smoothed. #' @param tt A numeric vector of equal length to the number of points in \code{pp}, #' giving the time corresponding to each spatial observation. If unsupplied, #' the function attempts to use the values in the \code{\link[spatstat.geom]{marks}} #' attribute of the \code{\link[spatstat.geom:ppp]{ppp.object}} in \code{pp}. #' @param nstar Optional. Controls the value to use in place of the number of #' observations \emph{n} in the normal scale formula. Either a character #' string, \code{"npoints"} (default) or \code{"geometric"} (only possible for \code{NS}), or a positive #' numeric value. See `Details'. #' @param scaler Optional. Controls the value for a scalar representation of #' the spatial (and temporal for \code{NS.spattemp}) scale of the data. Either a character string, \code{"silverman"} #' (default), \code{"IQR"}, \code{"sd"}, or \code{"var"}; or a positive numeric #' value. See `Details'. #' #' @return A single numeric value of the estimated spatial bandwidth for \code{NS}, or a named numeric vector of length 2 giving #' the spatial bandwidth (as \code{h}) and the temporal bandwidth (as \code{lambda}) for \code{NS.spattemp}. #' #' @section Warning: The NS bandwidth is an approximation, and assumes #' \emph{that the target density is normal}. This is considered rare #' in most real-world applications. Nevertheless, it remains a quick and easy #' `rule-of-thumb' method with which one may obtain a smoothing parameter. Note that a similar expression for the adaptive kernel #' estimator is not possible (Davies et al., 2018). #' #' @author T.M. Davies #' #' @references #' Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel #' estimation of spatial relative risk, \emph{Statistics in Medicine}, #' \bold{29}(23) 2423-2437. #' #' Davies, T.M., Flynn, C.R. and Hazelton, M.L. #' (2018), On the utility of asymptotic bandwidth selectors for spatially #' adaptive kernel density estimation, \emph{Statistics & Probability Letters} [in press]. #' #' Silverman, B.W. (1986), \emph{Density Estimation for Statistics and Data Analysis}, Chapman #' & Hall, New York. #' #' Wand, M.P. and Jones, C.M., 1995. \emph{Kernel Smoothing}, Chapman & Hall, London. #' #' @examples #' #' data(pbc) #' #' NS(pbc) #' NS(pbc,nstar="geometric") # uses case-control marks to replace sample size #' NS(pbc,scaler="var") # set different scalar measure of spread #' #' data(burk) #' NS.spattemp(burk$cases) #' NS.spattemp(burk$cases,scaler="sd") #' #' @export NS <- function(pp, nstar = c("npoints", "geometric"), scaler = c("silverman", "IQR", "sd", "var")){ if(!inherits(pp,"ppp")) stop("data argument 'pp' must be of spatstat class \"ppp\"; see ?ppp") nstar <- processnstar(nstar,pp) scaler <- processscaler(scaler,pp) return(scaler*nstar^(-1/6)) # was: 2*nstar in denominator prior to adjust } sparr/R/fft.R0000644000176200001440000000347714012076542012520 0ustar liggesusers#' 2D fast-Fourier wrapper around 'fftwtools' or 'stats' package #' #' Utilises the Fastest Fourier Transform in the West (FFTW) via the 'fftwtools' #' package if available, else reverts to built-in functionality #' #' This function is called wherever \code{sparr} seeks to perform a 2D fast-Fourier #' transform. Where available, computational expense is noticeably reduced by appealing to routines #' in the independent `FFTW' toolbox. The user is encouraged to install the corresponding R package \code{fftwtools} from CRAN; #' this function will automatically detect and use the faster option, otherwise will #' defer to the built-in \code{\link[stats]{fft}}. #' #' #' @param x A numeric matrix to be transformed. #' @param inverse Whether it should compute the inverse transform (defaults to \code{FALSE}). #' @param fftw Whether the \code{fftwtools} R package is available. #' @return The fast-Fourier (inverse) transform. A complex-valued matrix of the same size as \code{x}. #' #' @author J.C. Marshall #' #' @examples #' \donttest{ #' #' # System check #' sparr:::fftw_available() #' #' system.time(fft(matrix(1:2000^2,2000))) #' system.time(fft2d(matrix(1:2000^2,2000))) #' } #' #' @export fft2d <- function(x, inverse=FALSE, fftw = sparr:::fftw_available()) { if (fftw) { fftwtools::fftw2d(data=x, inverse=inverse) } else { stats::fft(z=x, inverse=inverse) } } fftw_available <- function() { yeahnah <- requireNamespace("fftwtools", quietly=TRUE) if(yeahnah) yeahnah <- packageVersion("fftwtools") >= "0.9-8" return(yeahnah) } # The user is encouraged to install the `FFTW' tools # from \url{http://www.fftw.org} and the corresponding R package \code{fftwtools} from CRAN; # this function will automatically detect and use the faster option, otherwise will # defers to the built-in \code{\link[stats]{fft}}. #sparr/R/LIK.density.spatial.single.R0000644000176200001440000000120414012076542016734 0ustar liggesusersLIK.density.spatial.single <- function(h,pp,res,edge,za){ if(h<=0) return(NA) temp.dens.pts <- density(pp,sigma=h,edge=edge,dimyx=res,at="points",positive=FALSE,leaveoneout=TRUE,diggle=FALSE)/npoints(pp) ## tiny bandwidth protector action if(za==-1){ dtest <- density(pp,sigma=h,edge=edge,dimyx=res,diggle=FALSE) if(any(dtest<=0)) return(-Inf) } if(any(temp.dens.pts<=0)){ if(za==2){ temp.dens.pts[temp.dens.pts<=0] <- min(temp.dens.pts[temp.dens.pts>0]) } else if(za==1){ temp.dens.pts <- posifybivden(temp.dens.pts) } else { return(-Inf) } } return(mean(log(temp.dens.pts))) } sparr/R/BOOT.density.R0000644000176200001440000001502214012100230014125 0ustar liggesusers#' @export BOOT.density <- function(pp,hlim=NULL,eta=NULL,type=c("fixed","adaptive"),hp=NULL, edge=c("uniform","none"),ref.density=NULL,resolution=64, rmdiag=TRUE,sim.adapt=list(N=50,B=100,dimz=64,objective=FALSE), parallelise=NA,verbose=TRUE,...){ if(!inherits(pp,"ppp")) stop("'pp' must be of spatstat class \"ppp\"; see ?ppp") n <- npoints(pp) W <- Window(pp) WM <- as.mask(W,dimyx=rep(resolution,2)) # set h-limits if unsupplied if(is.null(hlim)){ ppu <- pp marks(ppu) <- NULL md <- min(nndist(unique(ppu))) hlim <- c(md,max(md*50,min(diff(W$xrange),diff(W$yrange))/6)) } else { hlim <- checkran(hlim,"'hlim'") } edg <- checkedge(edge,v=0) if(!is.na(parallelise)){ if(!is.numeric(parallelise)) stop("'parallelise' must be numeric") if(is.null(parallelise)) parallelise <- NA parallelise <- round(parallelise[1]) } typ <- type[1] if(typ=="fixed"){ if(verbose) cat("Initialising...") # unleash the grid within inside <- WM$m inn <- which(as.vector(inside)) evalyx <- as.matrix(expand.grid(WM$yrow,WM$xcol)) evalyx.redu <- evalyx[inn,] GN <- length(inn) edg <- edg=="uniform" # get reference density and associated diggle edge factors if(is.null(ref.density)){ if(is.null(eta)){ eta <- OS(pp) } else { eta <- checkit(eta,"'eta'") } d.eta <- density(pp,eta,edge=edg,positive=TRUE,dimyx=resolution,spill=1) if(edg){ d.eta$edg[d.eta$edg>1] <- 1 d.etadens <- d.eta$raw/d.eta$edg d.etaint <- integral(d.etadens) d.etadens <- as.matrix(d.etadens)[inn]/d.etaint epsilon.eta <- safelookup(d.eta$edg,pp,warn=FALSE) } else { d.etadens <- as.matrix(d.eta$raw/integral(d.eta$raw))[inn] epsilon.eta <- rep(1,n) } } else { if(!inherits(ref.density,"bivden")) stop("'ref.density' must be of class \"bivden\"; see ?bivden") eta <- ref.density$h0 if(!compatible(as.im(WM),ref.density$z)) stop("'ref.density' must be evaluated on identical spatial domain as 'Window(pp)' given 'resolution'") if(!all(ref.density$h==eta)) stop("'ref.density' must be a fixed-bandwidth density estimate when type = \"fixed\"") if(edg&&is.null(ref.density$q)) stop("'ref.density' edge-correction must exist if edge = \"uniform\"") if(is.null(ref.density$q)){ epsilon.eta <- rep(1,n) } else if(is.vector(ref.density$q)){ epsilon.eta <- ref.density$q } else { epsilon.eta <- safelookup(ref.density$q,pp,warn=FALSE) } d.etadens <- ref.density$z/integral(ref.density$z) d.etadens <- as.matrix(d.etadens)[inn] } if(edg){ # window dressing use_fftw <- fftw_available() ifft_scale <- WM$xstep*WM$ystep/(4*resolution^2) Mpad <- matrix(0,2*resolution,2*resolution) Mpad[1:resolution,1:resolution] <- inside fM <- fft2d(Mpad,fftw=use_fftw) } else { ifft_scale <- use_fftw <- fM <- NA } # fM <- fft2d(Mpad,fftw=use_fftw) # fK.eta <- kernel2d_fft(eta,WM$xstep,WM$ystep,resolution) # fK.con <- fft2d(fM*fK.eta,inverse=TRUE,use_fftw)[1:resolution,1:resolution] # edg.eta <- Mod(fK.con)*ifft_scale # edg.eta[edg.eta>1] <- 1 # edg.eta <- im(matrix(edge.eta,resolution,resolution),xcol=WM$xcol,yrow=WM$yrow) # epsilon.eta <- safelookup(edg.eta,pp,warn=FALSE) # remove expensive constant (just when rmdiag==TRUE) from 'boot.opt.spatial'; supply as argument if(!rmdiag){ boot3 <- d.etadens^2 } else { if(n<10000){ ### need more sophisticated solution here boot3 <- n^(-2)*sum(outer(1:n,1:n,function(i,j) epsilon.eta[i]^(-1)*epsilon.eta[j]^(-1)*kernel2d(pp$x[i]-pp$x[j],pp$y[i]-pp$y[j],sqrt(2*eta^2)))[-seq(1,n^2,n+1)]) } else { nseq <- 1:n boot3 <- 0 if(verbose) pb <- txtProgressBar(0,n,char='.') for(i in nseq){ boot3 <- boot3 + sum(epsilon.eta[i]^(-1)*epsilon.eta[nseq[-i]]^(-1)*kernel2d(pp$x[i]-pp$x[nseq[-i]],pp$y[i]-pp$y[nseq[-i]],sqrt(2*eta^2))) if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) boot3 <- n^(-2)*boot3 } } if(verbose) cat("Done.\nSearching for optimal h in ",prange(hlim),"...\n",sep="") result <- optimise(boot.opt.spatial.fix,interval=hlim,rmdiag=rmdiag,edg=edg,WM=WM, resolution=resolution,fM=fM,ifft_scale=ifft_scale, inn=inn,GN=GN,evalyx.redu=evalyx.redu,pp=pp, epsilon.eta=epsilon.eta,eta=eta,nn=n,boot3=boot3, use_fftw=use_fftw,parallelise=parallelise,verb=verbose)$minimum if(verbose) cat("Done.\n") } else if(typ=="adaptive"){ if(verbose) cat("Initialising...") if(is.null(ref.density)){ if(is.null(eta)){ eta <- OS(pp) } else { eta <- checkit(eta,"'eta'") } if(is.null(hp)){ hp <- eta } else { hp <- checkit(hp,"'hp'") } d.eta <- density(pp,eta,edge=(edg=="uniform"),positive=TRUE,dimyx=resolution) } else { if(!inherits(ref.density,"bivden")) stop("'ref.density' must be of class \"bivden\"; see ?bivden") if(!compatible(as.im(WM),ref.density$z)) stop("'ref.density' must be evaluated on identical spatial domain as 'Window(pp)' given 'resolution'") if(is.null(hp)){ hp <- ifelse(is.null(ref.density$hp),ref.density$h0,ref.density$hp) } else { hp <- checkit(hp,"'hp'") } d.eta <- ref.density$z } d.eta <- d.eta/integral(d.eta) if(is.null(sim.adapt)) sim.adapt <- list() if(is.null(sim.adapt$N)) sim.adapt$N <- 50 if(is.null(sim.adapt$B)) sim.adapt$B <- 100 if(is.null(sim.adapt$dimz)) sim.adapt$dimz <- 64 if(is.null(sim.adapt$objective)) sim.adapt$objective <- FALSE hhash <- mean(hlim) h0fac <- hlim/hhash if(verbose) cat("Done.\nSearching for optimal h0 in ",prange(hlim),":\n",sep="") resultfull <- boot.opt.spatial.adapt(pp,h0ref=hhash,h0fac=h0fac,hp=hp, edg=edg,refden=d.eta,N=sim.adapt$N, B=sim.adapt$B,res=resolution,dimz=sim.adapt$dimz, verbose=verbose,parallelise=parallelise,...) result <- resultfull$h if(sim.adapt$objective) result <- resultfull$mat } else { stop("invalid 'type'") } return(result) } sparr/R/edgeh.R0000644000176200001440000000276214012076542013011 0ustar liggesusersedgeh <- function(bwim,pres,tres,step,W,verbose=FALSE){ if(pres>tres) stop("edge-correction resolution (in 'davies.baddeley[3]') exceeds spatial intensity 'resolution'") hfp <- bwim if(pres1] <- 1 qhz <- im(matrix(qhz,pres,pres),xcol=M$xcol,yrow=M$yrow) if(pres==tres){ qhz[!inside] <- NA } else { qhz <- as.im(interp.im,W=W,Z=qhz,dimyx=rep(tres,2)) } return(qhz) }sparr/R/summary.msden.R0000644000176200001440000000141314012076542014527 0ustar liggesusers#' @rdname summarysparr #' @method summary msden #' @export summary.msden <- function(object,...){ cat("Multi-scale Adaptive Kernel Density/Intensity Estimate\n\n") h0r <- round(object$h0range,4) h0v <- as.numeric(names(object$z)) h0l <- length(h0v) cat("Available global bandwidth range\n (",h0r[1],", ",h0r[2],") ",unitname(object$z[[1]])[[2]],"\n",sep="") cat(" Discretised sequence of length",h0l,"\b: ") cat(round(h0v,4),"\n",sep=", ") cat("\b\b\b.") cat("\n\nNo. of observations\n ",npoints(object$pp),"\n") cat("\nEvaluation per slice\n ",nrow(object$z[[1]])," x ",ncol(object$z[[1]])," rectangular grids\n ",sum(!is.na(as.vector(as.matrix(object$z[[1]]))))," grid cells out of ",prod(dim(object$z[[1]]))," fall inside study region\n",sep="") } sparr/R/LIK.density.spattemp.single.R0000644000176200001440000000065014012076542017140 0ustar liggesusersLIK.density.spattemp.single <- function(bands,pp,tt,tlim,xyin,xys,sedge,tedge,parallelise,verbose){ if(any(bands<=0)) return(NA) if(verbose) cat("h =",bands[1],"\b; lambda =",bands[2],"\n") h <- bands[1] lam <- bands[2] temp.dens.pts <- spattemp.LOO(pp,tt,h,lam,tlim,xyin,xys,sedge,tedge,parallelise=parallelise) if(any(temp.dens.pts<=0)) return(log(min(temp.dens.pts))) return(-mean(log(temp.dens.pts))) } sparr/R/spattemp.risk.R0000644000176200001440000001002414012076542014527 0ustar liggesusers#' @export spattemp.risk <- function(f,g,log=TRUE,tolerate=FALSE,finiteness=TRUE,verbose=TRUE){ if(!inherits(f,"stden")) stop("'f' must be of class 'stden' arising from a call to 'spattemp.density'") gst <- inherits(g,"stden") gbi <- inherits(g,"bivden") if(!(gst||gbi)) stop("'g' must be of class 'stden' or 'bivden'") fse <- all(f$qs==1) fte <- all(f$qt==1) if(verbose) message("Calculating ratio...", appendLF=FALSE) fres <- f$z flen <- length(fres) rr <- rrc <- list() if(gst){ gse <- all(g$qs==1) gte <- all(g$qt==1) if((fse!=gse)||(fte!=gte)) stop("edge-correction for 'f' and 'g' must be consistent") gres <- g$z if(flen!=length(gres)) stop("incompatible temporal domains... 'f' and 'g' must be evaluated at identical timestamps") fn <- as.numeric(names(fres)) gn <- as.numeric(names(gres)) if(any(fn!=gn)) stop("incompatible temporal domains... 'f' and 'g' must be evaluated at identical timestamps") if(!compatible(fres[[1]],gres[[1]])) stop("incompatible images in 'f' and 'g'... kernel estimates must be evaluated on identical spatial domains") # if(positive){ # fres <- lapply(fres,posifybivden) # gres <- lapply(gres,posifybivden) # f$z.cond <- lapply(f$z.cond,posifybivden) # g$z.cond <- lapply(g$z.cond,posifybivden) # } for(i in 1:flen){ rr[[i]] <- suppressWarnings(log(fres[[i]])-log(gres[[i]])) rrc[[i]] <- suppressWarnings(log(f$z.cond[[i]])-log(g$z.cond[[i]])) } if(verbose) message("Done.") if(tolerate){ if(verbose) message("Calculating pooled estimate for tolerance...", appendLF=FALSE) fs <- f gs <- g marks(fs$pp) <- NULL marks(gs$pp) <- NULL pdat <- suppressWarnings(superimpose(fs$pp,gs$pp)) marks(pdat) <- c(marks(f$pp),marks(g$pp)) hpool <- sqrt(prod(c(f$h,g$h))) lpool <- sqrt(prod(c(f$lambda,g$lambda))) pooled <- spattemp.density(pdat,h=hpool,lambda=lpool,tlim=f$tlim,sedge=ifelse(fse,"none","uniform"),tedge=ifelse(fte,"none","uniform"),sres=nrow(f$spatial.z),tres=flen,verbose=FALSE) if(verbose) message("Done.") } } else { if(!compatible(fres[[1]],g$z)) stop("incompatible images in 'f' and 'g'... kernel estimates must be evaluated on identical spatial domains") gse <- is.null(g$q) if(fse!=gse) stop("edge-correction for 'f' and 'g' must be consistent") g$z <- g$z/integral(g$z) # if(positive){ # fres <- lapply(fres,posifybivden) # g$z <- posifybivden(g$z) # f$z.cond <- lapply(f$z.cond,posifybivden) # } for(i in 1:flen){ rr[[i]] <- suppressWarnings(log(fres[[i]])-log(g$z)+log(diff(f$tlim))) rrc[[i]] <- suppressWarnings(log(f$z.cond[[i]])-log(g$z)) } if(verbose) message("Done.") pooled <- NULL } if(finiteness&&log){ if(verbose) message("Ensuring finiteness...\n --joint--") rr <- lapply(rr,fbound) if(verbose) message(" --conditional--") rrc <- lapply(rrc,fbound) if(verbose) message("Done.") } ps <- psc <- NULL if(tolerate){ if(verbose) message("Calculating tolerance contours...") vars <- tol.asy.st(f,g,pooled,verbose) ps <- psc <- list() for(i in 1:flen){ Z <- as.matrix(rr[[i]])/sqrt(vars$v[[i]]) ps[[i]] <- im(pnorm(Z,lower.tail=FALSE),xcol=f$spatial.z$xcol,yrow=f$spatial.z$yrow) Zc <- as.matrix(rrc[[i]])/sqrt(vars$vc[[i]]) psc[[i]] <- im(pnorm(Zc,lower.tail=FALSE),xcol=f$spatial.z$xcol,yrow=f$spatial.z$yrow) } if(verbose) message("Done.") } if(!log){ for(i in 1:flen){ rr[[i]] <- exp(rr[[i]]) rrc[[i]] <- exp(rrc[[i]]) } } names(rr) <- names(rrc) <- names(f$z) if(!is.null(ps)) names(ps) <- names(psc) <- names(f$z) result <- list(rr=rr,rr.cond=rrc,P=ps,P.cond=psc,f=f,g=g,tlim=f$tlim) class(result) <- "rrst" return(result) } fbound <- function(x){ fnt <- is.finite(as.matrix(x)) if(!any(fnt)){ x[] <- 0 } else { imr <- range(x[fnt]) x[x==Inf] <- imr[2] x[x==-Inf] <- imr[1] } return(x) }sparr/R/K0.R0000644000176200001440000000015314012076542012177 0ustar liggesusersKO <- function(xy){ res <- exp(-0.5*rowSums(xy^2))/(2*pi) return((2*res-xy[,1]^2*res-xy[,2]^2*res)^2) }sparr/R/LSCV.density.R0000644000176200001440000002667014012076542014166 0ustar liggesusers#' Cross-validation bandwidths for spatial kernel density estimates #' #' Isotropic fixed or global (for adaptive) bandwidth selection for standalone 2D density/intensity #' based on either unbiased least squares cross-validation (LSCV) or likelihood (LIK) cross-validation. #' #' This function implements the bivariate, edge-corrected versions of fixed-bandwidth least squares cross-validation and likelihood cross-validation #' as outlined in Sections 3.4.3 and 3.4.4 of Silverman (1986) in order to select an optimal fixed smoothing bandwidth. With \code{type = "adaptive"} it may also be used to select the global bandwidth #' for adaptive kernel density estimates, making use of multi-scale estimation (Davies and Baddeley, 2018) via \code{\link{multiscale.density}}. #' Note that for computational reasons, the leave-one-out procedure is not performed on the pilot density in the adaptive setting; it #' is only performed on the final stage estimate. Current development efforts include extending this functionality, see \code{\link{SLIK.adapt}}. See also `Warning' below. #' #' Where \code{LSCV.density} is based on minimisation of an unbiased estimate of the mean integrated squared error (MISE) of the density, \code{LIK.density} is based on #' maximisation of the cross-validated leave-one-out average of the log-likelihood of the density estimate with respect to \eqn{h}. #' #' In both functions, the argument \code{zero.action} can be used to control the level of severity in response to small bandwidths that result (due to numerical error) in at least one density value being zero or less. #' When \code{zero.action = -1}, the function strictly forbids bandwidths that would result in one or more \emph{pixel} values of a kernel estimate of the original data (i.e. anything over the whole region) being zero or less---this is the most restrictive truncation. With \code{zero.action = 0} (default), the function #' automatically forbids bandwidths that yield erroneous values at the leave-one-out data point locations only. With \code{zero.action = 1}, the minimum machine value (see \code{.Machine$double.xmin} at the prompt) is #' used to replace these individual leave-one-out values. When \code{zero.action = 2}, the minimum value of the valid (greater than zero) leave-one-out values is used to replace any erroneous leave-one-out values. #' #' #' #' @aliases LIK.density #' #' @rdname CV #' #' @param pp An object of class \code{\link[spatstat.geom]{ppp}} giving the observed #' 2D data to be smoothed. #' @param hlim An optional vector of length 2 giving the limits of the #' optimisation routine with respect to the bandwidth. If unspecified, the #' function attempts to choose this automatically. #' @param hseq An optional increasing sequence of bandwidth values at which to #' manually evaluate the optimisation criterion. Used only in the case #' \code{(!auto.optim && is.null(hlim))}. #' @param resolution Spatial grid size; the optimisation will be based on a #' [\code{resolution} \eqn{\times}{x} \code{resolution}] density estimate. #' @param edge Logical value indicating whether to edge-correct the density #' estimates used. #' @param auto.optim Logical value indicating whether to automate the numerical #' optimisation using \code{\link{optimise}}. If \code{FALSE}, the optimisation #' criterion is evaluated over \code{hseq} (if supplied), or over a seqence of #' values controlled by \code{hlim} and \code{seqres}. #' @param seqres Optional resolution of an increasing sequence of bandwidth #' values. Only used if \code{(!auto.optim && is.null(hseq))}. #' @param parallelise Numeric argument to invoke parallel processing, giving #' the number of CPU cores to use when \code{!auto.optim} \bold{and} \code{type = "fixed"}. Experimental. Test #' your system first using \code{parallel::detectCores()} to identify the #' number of cores available to you. #' @param verbose Logical value indicating whether to provide function progress #' commentary. #' @param type A character string; \code{"fixed"} (default) performs classical leave-one-out #' cross-validation for the fixed-bandwidth estimator. Alternatively, \code{"adaptive"} utilises #' multiscale adaptive kernel estimation (Davies & Baddeley, 2018) to run the cross-validation #' in an effort to find a suitable global bandwidth for the adaptive estimator. Note that data points are not `left out' of #' the pilot density estimate when using this option (this capability is currently in development). See also the entry for \code{...}. #' @param zero.action A numeric integer, either \code{-1}, \code{0} (default), \code{1} or \code{2} controlling how the function should behave in response to numerical errors at very small bandwidths, when such a bandwidth results in one or more zero or negative density values during the leave-one-out computations. See `Details'. #' @param ... Additional arguments controlling pilot density estimation and multi-scale bandwidth-axis #' resolution when \code{type = "adaptive"}. Relevant arguments are \code{hp}, \code{pilot.density}, #' \code{gamma.scale}, and \code{trim} from \code{\link{bivariate.density}}; and \code{dimz} from #' \code{\link{multiscale.density}}. If \code{hp} is missing and required, the function makes a (possibly recursive) #' call to \code{LSCV.density} to set this using fixed-bandwidth LSCV. The remaining defaults are \code{pilot.density = pp}, #' \code{gamma.scale = "geometric"}, \code{trim = 5}, and \code{dimz = resolution}. #' #' @return A single numeric value of the estimated bandwidth (if #' \code{auto.optim = TRUE}). Otherwise, a \eqn{[}\code{seqres} \eqn{x} 2\eqn{]} matrix #' giving the bandwidth sequence and corresponding CV #' function value. #' #' @section Warning: Leave-one-out CV for bandwidth selection in kernel #' density estimation is notoriously unstable in practice and has a tendency to #' produce rather small bandwidths, particularly for spatial data. Satisfactory bandwidths are not guaranteed #' for every application; \code{zero.action} can curb adverse numeric effects for very small bandwidths during the optimisation procedures. This method can also be computationally expensive for #' large data sets and fine evaluation grid resolutions. The user may also need to #' experiment with adjusting \code{hlim} to find a suitable minimum. #' #' @author T. M. Davies #' #' @seealso \code{\link{SLIK.adapt}} and functions for bandwidth selection in package #' \code{\link{spatstat}}: \code{\link[spatstat.core]{bw.diggle}}; #' \code{\link[spatstat.core]{bw.ppl}}; \code{\link[spatstat.core]{bw.scott}}; #' \code{\link[spatstat.core]{bw.frac}}. #' #' @references #' #' Davies, T.M. and Baddeley A. (2018), Fast computation of #' spatially adaptive kernel estimates, \emph{Statistics and Computing}, \bold{28}(4), 937-956. #' #' Silverman, B.W. (1986), \emph{Density Estimation for Statistics #' and Data Analysis}, Chapman & Hall, New York. #' #' Wand, M.P. and Jones, #' C.M., 1995. \emph{Kernel Smoothing}, Chapman & Hall, London. #' #' @examples #' #' data(pbc) #' pbccas <- split(pbc)$case #' #' LIK.density(pbccas) #' LSCV.density(pbccas) #' #' \donttest{ #' #* FIXED #' #' # custom limits #' LIK.density(pbccas,hlim=c(0.01,4)) #' LSCV.density(pbccas,hlim=c(0.01,4)) #' #' # disable edge correction #' LIK.density(pbccas,hlim=c(0.01,4),edge=FALSE) #' LSCV.density(pbccas,hlim=c(0.01,4),edge=FALSE) #' #' # obtain objective function #' hcv <- LIK.density(pbccas,hlim=c(0.01,4),auto.optim=FALSE) #' plot(hcv);abline(v=hcv[which.max(hcv[,2]),1],lty=2,col=2) #' #' #* ADAPTIVE #' LIK.density(pbccas,type="adaptive") #' LSCV.density(pbccas,type="adaptive") #' #' # change pilot bandwidth used #' LIK.density(pbccas,type="adaptive",hp=2) #' LSCV.density(pbccas,type="adaptive",hp=2) #' } #' #' @export LSCV.density <- function(pp,hlim=NULL,hseq=NULL,resolution=64,edge=TRUE,auto.optim=TRUE, type=c("fixed","adaptive"),seqres=30,parallelise=NULL, zero.action=0,verbose=TRUE,...){ if(class(pp)!="ppp") stop("data object 'pp' must be of class \"ppp\"") W <- Window(pp) if(is.null(hlim)){ ppu <- pp marks(ppu) <- NULL md <- min(nndist(unique(ppu))) hlim <- c(md,max(md*50,min(diff(W$xrange),diff(W$yrange))/6)) } else { hlim <- checkran(hlim,"'hlim'") } if(!zero.action%in%((-1):2)) stop("invalid 'zero.action'") typ <- type[1] if(typ=="fixed"){ if(auto.optim){ if(verbose) cat("Searching for optimal h in ",prange(hlim),"...",sep="") result <- suppressWarnings(optimise(LSCV.density.spatial.single,interval=hlim,pp=pp,res=resolution,edge=edge,za=zero.action)$minimum) if(verbose) cat("Done.\n") } else { if(is.null(hseq)) hseq <- seq(hlim[1],hlim[2],length=seqres) hn <- length(hseq) if(is.null(parallelise)){ lscv.vec <- rep(NA,hn) if(verbose) pb <- txtProgressBar(1,hn) for(i in 1:hn){ lscv.vec[i] <- LSCV.density.spatial.single(hseq[i],pp,resolution,edge,za=zero.action) if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) } else { ncores <- detectCores() if(verbose) cat(paste("Evaluating criterion on",parallelise,"/",ncores,"cores...")) if(parallelise>ncores) stop("cores requested exceeds available count") registerDoParallel(cores=parallelise) lscv.vec <- foreach(i=1:hn,.packages="spatstat",.combine=c) %dopar% { return(LSCV.density.spatial.single(hseq[i],pp,resolution,edge,zero.action)) } if(verbose) cat("Done.\n") } result <- cbind(hseq,lscv.vec) dimnames(result)[[2]] <- c("h","CV") } } else if(typ=="adaptive"){ ellip <- list(...) if(is.null(ellip$hp)){ if(verbose) cat("Selecting pilot bandwidth...") hp <- LSCV.density(pp,verbose=FALSE,zero.action=zero.action) if(verbose) cat(paste("Done.\n [ Found hp =",hp,"]\n")) } else { hp <- ellip$hp } if(is.null(ellip$pilot.density)){ pilot.density <- pp } else { pilot.density <- ellip$pilot.density } if(is.null(ellip$gamma.scale)){ gamma.scale <- "geometric" } else { gamma.scale <- ellip$gamma.scale } if(is.null(ellip$trim)){ trim <- 5 } else { trim <- ellip$trim } if(is.null(ellip$dimz)){ dimz <- resolution } else { dimz <- ellip$dimz } if(verbose) cat("Computing multi-scale estimate...") hhash <- mean(hlim) msobject <- multiscale.density(pp,h0=hhash,hp=hp,h0fac=hlim/hhash,edge=ifelse(edge,"uniform","none"),resolution=resolution,dimz=dimz,gamma.scale=gamma.scale,trim=trim,intensity=TRUE,pilot.density=pilot.density,verbose=FALSE) if(verbose) cat("Done.\n") h0range <- range(as.numeric(names(msobject$z))) if(auto.optim){ if(verbose) cat("Searching for optimal h0 in ",prange(h0range),"...",sep="") h0opt <- suppressWarnings(optimise(ms.loo,interval=h0range,object=msobject,za=zero.action)$minimum) if(verbose) cat("Done.\n") return(h0opt) } else { if(is.null(hseq)) hseq <- seq(h0range[1],h0range[2],length=seqres) hn <- length(hseq) lscv.vec <- rep(NA,hn) if(verbose) pb <- txtProgressBar(1,hn) for(i in 1:hn){ lscv.vec[i] <- ms.loo(hseq[i],msobject,za=zero.action) if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) result <- cbind(hseq,lscv.vec) dimnames(result)[[2]] <- c("h0","CV") } } else stop("invalid 'type'") return(result) } sparr/R/OS.R0000644000176200001440000001244414012076542012254 0ustar liggesusers#' Oversmoothing (OS) bandwidth selector #' #' Provides fixed bandwidths for spatial or spatiotemporal data based on the #' maximal smoothing (oversmoothing) principle of Terrell (1990). #' #' These functions calculate scalar smoothing bandwidths for kernel density #' estimates of spatial or spatiotemporal data: the ``maximal amount of smoothing #' compatible with the estimated scale of the observed data''. See Terrell #' (1990). The \code{OS} function returns a single bandwidth for isotropic smoothing #' of spatial (2D) data. The \code{OS.spattemp} function returns two values -- one for #' the spatial margin and another for the temporal margin, based on independently applying #' Terrell's (1990) rule (in 2D and 1D) to the spatial and temporal margins of the supplied data. #' #' #' \describe{ #' \item{\bold{Effective sample size}}{ The formula #' requires a sample size, and this can be minimally tailored via \code{nstar}. #' By default, the function simply uses the number of observations in #' \code{pp}: \code{nstar = "npoints"}. Alternatively, the user can specify their own value by simply #' supplying a single positive numeric value to \code{nstar}. #' For \code{OS} (not applicable to \code{OS.spattemp}), if \code{pp} is a #' \code{\link[spatstat.geom:ppp]{ppp.object}} with factor-valued #' \code{\link[spatstat.geom]{marks}}, then the user has the option of using #' \code{nstar = "geometric"}, which sets the sample size used in the formula #' to the geometric mean of the counts of observations of each mark. This can #' be useful for e.g. relative risk calculations, see Davies and Hazelton #' (2010). #' } #' \item{\bold{Spatial (and temporal) scale}}{The \code{scaler} argument is used to specify spatial #' (as well as temporal, in use of \code{OS.spattemp}) scale. For isotropic smoothing in the spatial #' margin, one may use the `robust' estimate #' of standard deviation found by a weighted mean of the interquartile ranges #' of the \eqn{x}- and \eqn{y}-coordinates of the data respectively #' (\code{scaler = "IQR"}). Two other options are the raw mean of the #' coordinate-wise standard deviations (\code{scaler = "sd"}), or the square #' root of the mean of the two variances (\code{scaler = "var"}). A fourth #' option, \code{scaler = "silverman"} (default), sets the scaling constant to #' be the minimum of the \code{"IQR"} and \code{"sd"} options; see Silverman #' (1986), p. 47. In use of \code{OS.spattemp} the univariate version of the elected scale #' statistic is applied to the recorded times of the data for the temporal bandwidth. #' Alternatively, like \code{nstar}, the user can specify their #' own value by simply supplying a single positive numeric value to #' \code{scaler} for \code{OS}, or a numeric vector of length 2 (in the order of \emph{[, ]}) #' for \code{OS.spattemp}. #' } #' } #' #' @aliases OS.spattemp #' #' @rdname OS #' #' @param pp An object of class \code{\link[spatstat.geom]{ppp}} giving the observed #' 2D data to be smoothed. #' @param tt A numeric vector of equal length to the number of points in \code{pp}, #' giving the time corresponding to each spatial observation. If unsupplied, #' the function attempts to use the values in the \code{\link[spatstat.geom]{marks}} #' attribute of the \code{\link[spatstat.geom:ppp]{ppp.object}} in \code{pp}. #' @param nstar Optional. Controls the value to use in place of the number of #' observations \emph{n} in the oversmoothing formula. Either a character #' string, \code{"npoints"} (default) or \code{"geometric"} (only possible for \code{OS}), or a positive #' numeric value. See `Details'. #' @param scaler Optional. Controls the value for a scalar representation of #' the spatial (and temporal for \code{OS.spattemp}) scale of the data. Either a character string, \code{"silverman"} #' (default), \code{"IQR"}, \code{"sd"}, or \code{"var"}; or positive numeric #' value(s). See `Details'. #' #' @return A single numeric value of the estimated spatial bandwidth for \code{OS}, or a named numeric vector of length 2 giving #' the spatial bandwidth (as \code{h}) and the temporal bandwidth (as \code{lambda}) for \code{OS.spattemp}. #' #' @author T.M. Davies #' #' @references #' Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel #' estimation of spatial relative risk, \emph{Statistics in Medicine}, #' \bold{29}(23) 2423-2437. #' #' Terrell, G.R. (1990), The maximal smoothing #' principle in density estimation, \emph{Journal of the American Statistical #' Association}, \bold{85}, 470-477. #' #' @examples #' #' data(pbc) #' #' OS(pbc) #' OS(pbc,nstar="geometric") # uses case-control marks to replace sample size #' OS(pbc,scaler="var") # set different scalar measure of spread #' #' data(burk) #' OS.spattemp(burk$cases) #' OS.spattemp(burk$cases,scaler="sd") #' #' @export OS <- function(pp, nstar = c("npoints", "geometric"), scaler = c("silverman", "IQR", "sd", "var")){ if(!inherits(pp,"ppp")) stop("data argument 'pp' must be of spatstat class \"ppp\"; see ?ppp") nstar <- processnstar(nstar,pp) scaler <- processscaler(scaler,pp) RK <- 1/(4*pi) d <- 2 V <- (16*gamma((d+8)/2)*d*(d+2))/((d+8)^((d+6)/2)*pi^(d/2)) return(scaler*(((RK*d)/(nstar*V))^(1/(d+4)))) } sparr/R/LSCV.density.spatial.single.R0000644000176200001440000000141514012076542017070 0ustar liggesusersLSCV.density.spatial.single <- function(h,pp,res,edge,za){ if(h<=0) return(NA) temp.dens <- density(pp,h,edge=edge,dimyx=res,positive=FALSE,diggle=FALSE) temp.int <- integral(temp.dens) temp.dens.pts <- density(pp,sigma=h,edge=edge,dimyx=res,at="points",positive=FALSE,leaveoneout=TRUE,diggle=FALSE)/temp.int temp.dens <- temp.dens/temp.int t2int <- integral(temp.dens^2) ## tiny bandwidth protector action if(za==-1){ if(any(temp.dens<=0)) return(Inf) } if(any(temp.dens.pts<=0)){ #was: return(t2int) if(za==2){ temp.dens.pts[temp.dens.pts<=0] <- min(temp.dens.pts[temp.dens.pts>0]) } else if(za==1){ temp.dens.pts <- posifybivden(temp.dens.pts) } else { return(Inf) } } return(t2int-2*mean(temp.dens.pts)) } sparr/R/tol.asy.st.R0000644000176200001440000000471714012076542013755 0ustar liggesuserstol.asy.st <- function(f,g,pooled,verbose){ flen <- length(f$z) RK <- 1/(4*pi) RL <- rep(1/(2*sqrt(pi)),flen) nf <- npoints(f$pp) ng <- npoints(g$pp) if(verbose) message(" --convolution 1--") M <- Window(f$z[[1]]) inside <- M$m pres <- nrow(inside) res2 <- 2*pres resseq <- 1:pres xcol.ker <- M$xstep*c(0:(pres-1),-rev(resseq)) yrow.ker <- M$ystep*c(0:(pres-1),-rev(resseq)) kerpixarea <- M$xstep*M$ystep len.pad <- res2^2 Mpad <- matrix(0, ncol=2*pres, nrow=2*pres) Mpad[1:pres, 1:pres] <- inside fM <- fft(Mpad) qb <- function(oo,hfac,h){ hfp <- hfac*h densX.ker <- dnorm(xcol.ker,sd=hfp) densY.ker <- dnorm(yrow.ker,sd=hfp) Kern <- outer(densY.ker,densX.ker,"*")*kerpixarea con <- fft(fM*fft(Kern), inverse=TRUE)/len.pad qhz <- im(Mod(con[1:pres,1:pres]),xcol=oo$spatial.z$xcol,yrow=oo$spatial.z$yrow) qhz[qhz>1] <- 1 qhz[!inside] <- NA return(as.matrix(qhz)) } if(verbose) message(" --convolution 2--") if(!is.null(pooled)){ h <- pooled$h lam <- pooled$lambda qs <- as.matrix(pooled$qs) if(!all(qs==1)) RK <- RK*qb(pooled,sqrt(0.5),h)/qs^2 qt <- pooled$qt nearedge <- which(qt<1) if(length(nearedge)>0){ qt2 <- rep(1,length(qt)) tgr <- as.numeric(names(pooled$z)) for(i in nearedge) qt2[i] <- pnorm(pooled$tlim[2],mean=tgr[i],sd=lam/sqrt(2)) - pnorm(pooled$tlim[1],mean=tgr[i],sd=lam/sqrt(2)) RL <- RL*qt2/qt^2 } sig2 <- sig2.cond <- list() for(i in 1:flen){ prefix <- (RK*RL[i])/(h^2*lam) sig2[[i]] <- prefix*(1/nf+1/ng)/as.matrix(pooled$z[[i]]) sig2.cond[[i]] <- prefix*(1/(nf*f$temporal.z$y[i])+1/(ng*g$temporal.z$y[i]))/as.matrix(pooled$z.cond[[i]]) } } else { h <- sqrt(prod(c(f$h,g$h0))) lam <- f$lambda qs <- as.matrix(f$qs) if(h!=f$h) qs <- qb(f,1,h) if(!all(qs==1)) RK <- RK*qb(f,sqrt(0.5),h)/qs^2 qt <- f$qt nearedge <- which(qt<1) if(length(nearedge)>0){ qt2 <- rep(1,length(qt)) tgr <- as.numeric(names(f$z)) for(i in nearedge) qt2[i] <- pnorm(f$tlim[2],mean=tgr[i],sd=lam/sqrt(2)) - pnorm(f$tlim[1],mean=tgr[i],sd=lam/sqrt(2)) RL <- RL*qt2/qt^2 } sig2 <- list() gadd <- RK/(as.matrix(g$z)*ng*h^2) for(i in 1:length(f$z)) sig2[[i]] <- (RK*RL[i])/(h^2*lam*nf*as.matrix(f$z[[i]])) + gadd sig2.cond <- sig2 } # print(summary(as.vector(sig2[[40]]))) return(list(v=sig2,vc=sig2.cond)) } sparr/R/LIK.density.R0000644000176200001440000000745514012076542014036 0ustar liggesusers#' @rdname CV #' @export LIK.density <- function(pp,hlim=NULL,hseq=NULL,resolution=64,edge=TRUE,auto.optim=TRUE, type=c("fixed","adaptive"),seqres=30,parallelise=NULL, zero.action=0,verbose=TRUE,...){ if(class(pp)!="ppp") stop("data object 'pp' must be of class \"ppp\"") W <- Window(pp) if(is.null(hlim)){ ppu <- pp marks(ppu) <- NULL md <- min(nndist(unique(ppu))) hlim <- c(md,max(md*50,min(diff(W$xrange),diff(W$yrange))/6)) } else { hlim <- checkran(hlim,"'hlim'") } if(!zero.action%in%((-1):2)) stop("invalid 'zero.action'") typ <- type[1] if(typ=="fixed"){ if(auto.optim){ if(verbose) cat("Searching for optimal h in ",prange(hlim),"...",sep="") result <- suppressWarnings(optimise(LIK.density.spatial.single,interval=hlim,pp=pp,res=resolution,edge=edge,za=zero.action,maximum=TRUE)$maximum) if(verbose) cat("Done.\n") } else { if(is.null(hseq)) hseq <- seq(hlim[1],hlim[2],length=seqres) hn <- length(hseq) if(is.null(parallelise)){ lik.vec <- rep(NA,hn) if(verbose) pb <- txtProgressBar(1,hn) for(i in 1:hn){ lik.vec[i] <- LIK.density.spatial.single(hseq[i],pp,resolution,edge,za=zero.action) if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) } else { ncores <- detectCores() if(verbose) cat(paste("Evaluating criterion on",parallelise,"/",ncores,"cores...")) if(parallelise>ncores) stop("cores requested exceeds available count") registerDoParallel(cores=parallelise) lik.vec <- foreach(i=1:hn,.packages="spatstat",.combine=c) %dopar% { return(LIK.density.spatial.single(hseq[i],pp,resolution,edge,zero.action)) } if(verbose) cat("Done.\n") } result <- cbind(hseq,lik.vec) dimnames(result)[[2]] <- c("h","CV") } } else if(typ=="adaptive"){ ellip <- list(...) if(is.null(ellip$hp)){ if(verbose) cat("Selecting pilot bandwidth...") hp <- LSCV.density(pp,verbose=FALSE,zero.action=zero.action) if(verbose) cat(paste("Done.\n [ Found hp =",hp,"]\n")) } else { hp <- ellip$hp } if(is.null(ellip$pilot.density)){ pilot.density <- pp } else { pilot.density <- ellip$pilot.density } if(is.null(ellip$gamma.scale)){ gamma.scale <- "geometric" } else { gamma.scale <- ellip$gamma.scale } if(is.null(ellip$trim)){ trim <- 5 } else { trim <- ellip$trim } if(is.null(ellip$dimz)){ dimz <- resolution } else { dimz <- ellip$dimz } if(verbose) cat("Computing multi-scale estimate...") hhash <- mean(hlim) msobject <- multiscale.density(pp,h0=hhash,hp=hp,h0fac=hlim/hhash,edge=ifelse(edge,"uniform","none"),resolution=resolution,dimz=dimz,gamma.scale=gamma.scale,trim=trim,intensity=FALSE,pilot.density=pilot.density,verbose=FALSE) if(verbose) cat("Done.\n") h0range <- range(as.numeric(names(msobject$z))) if(auto.optim){ if(verbose) cat("Searching for optimal h0 in ",prange(h0range),"...",sep="") h0opt <- suppressWarnings(optimise(ms.loo.lik,interval=h0range,object=msobject,za=zero.action,maximum=TRUE)$maximum) if(verbose) cat("Done.\n") return(h0opt) } else { if(is.null(hseq)) hseq <- seq(h0range[1],h0range[2],length=seqres) hn <- length(hseq) lik.vec <- rep(NA,hn) if(verbose) pb <- txtProgressBar(1,hn) for(i in 1:hn){ lik.vec[i] <- ms.loo.lik(hseq[i],msobject,zero.action) if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) result <- cbind(hseq,lik.vec) dimnames(result)[[2]] <- c("h0","CV") } } else stop("invalid 'type'") return(result) } sparr/R/plot.msden.R0000644000176200001440000000177414012076542014022 0ustar liggesusers#' @rdname plotsparr #' @method plot msden #' @export plot.msden <- function(x, what = c("z", "edge", "bw"), sleep = 0.2, override.par = TRUE, ...){ wha <- what[1] ellip <- list(...) if(is.null(ellip)) ellip <- list() if(is.null(ellip$box)) ellip$box <- FALSE if(is.null(ellip$ribargs)) ellip$ribargs <- list(box=TRUE) if(wha=="z"){ lst <- x$z } else if(wha=="edge"){ lst <- x$q if(is.null(lst)) stop("no edge correction present in multi-scale density object") } else if(wha=="bw"){ lst <- x$him if(is.null(ellip$zlim)) ellip$zlim <- range(lapply(lst,range)) } else { stop("invalid 'what'") } if(override.par) par(mfrow=c(1,1),mar=rep(2,4)) hv <- as.numeric(names(lst)) for(i in 1:length(lst)){ dev.hold() ellip$x <- lst[[i]] ellip$main <- paste("h0 =",round(hv[i],5)) do.call("plot.im",ellip) plot(as.polygonal(Window(x$pp)),add=TRUE) axis(1) axis(2) box(bty="l") dev.flush() Sys.sleep(sleep) } invisible(NULL) } sparr/R/risk.R0000644000176200001440000003406314012076542012704 0ustar liggesusers#' Spatial relative risk/density ratio #' #' Estimates a \emph{relative risk} function based on the ratio of two 2D #' kernel density estimates. #' #' The relative risk function is defined here as the ratio of the `case' #' density to the `control' (Bithell, 1990; 1991). Using kernel density #' estimation to model these densities (Diggle, 1985), we obtain a workable #' estimate thereof. This function defines the risk function \emph{r} in the #' following fashion: \cr\cr \emph{r}\code{ = (fd + epsilon*max(gd))/(gd + #' epsilon*max(gd))}, \cr\cr where \code{fd} and \code{gd} denote the case and #' control density estimates respectively. Note the (optional) additive #' constants defined by \code{epsilon} times the maximum of each of the #' densities in the numerator and denominator respectively (see Bowman and #' Azzalini, 1997). #' #' The log-risk function \emph{rho}, given by \emph{rho} = log[\emph{r}], is #' argued to be preferable in practice as it imparts a sense of symmetry in the #' way the case and control densities are treated (Kelsall and Diggle, #' 1995a;b). The option of log-transforming the returned risk function is #' therefore selected by default. #' #' When computing adaptive relative risk functions, the user has the option of #' obtaining a so-called \emph{symmetric} estimate (Davies et al. 2016) via #' \code{pilot.symmetry}. This amounts to choosing the same pilot density for #' both case and control densities. By choosing \code{"none"} (default), the #' result uses the case and control data separately for the fixed-bandwidth #' pilots, providing the original asymmetric density-ratio of Davies and #' Hazelton (2010). By selecting either of \code{"f"}, \code{"g"}, or #' \code{"pooled"}, the pilot density is calculated based on the case, control, #' or pooled case/control data respectively (using \code{hp[1]} as the fixed #' bandwidth). Davies et al. (2016) noted some beneficial practical behaviour #' of the symmetric adaptive surface over the asymmetric. #' #' If the user selects \code{tolerate = TRUE}, the function internally computes #' asymptotic tolerance contours as per Hazelton and Davies (2009) and Davies #' and Hazelton (2010). When \code{adapt = FALSE}, the reference density #' estimate (argument \code{ref.density} in \code{\link{tolerance}}) is taken #' to be the estimated control density. The returned pixel #' \code{\link[spatstat.geom]{im}}age of \emph{p}-values (see `Value') is #' interpreted as an upper-tailed test i.e. smaller \emph{p}-values represent #' greater evidence in favour of significantly increased risk. For greater #' control over calculation of tolerance contours, use \code{\link{tolerance}}. #' #' @aliases risk rrs #' #' @param f Either a pre-calculated object of class \code{\link{bivden}} #' representing the `case' (numerator) density estimate, or an object of class #' \code{\link[spatstat.geom]{ppp}} giving the observed case data. Alternatively, if #' \code{f} is \code{\link[spatstat.geom]{ppp}} object with dichotomous #' factor-valued \code{\link[spatstat.geom]{marks}}, the function treats the first #' level as the case data, and the second as the control data, obviating the #' need to supply \code{g}. #' @param g As for \code{f}, for the `control' (denominator) density; this #' object must be of the same class as \code{f}. Ignored if, as stated above, #' \code{f} contains both case and control observations. #' @param log Logical value indicating whether to return the (natural) #' log-transformed relative risk function as recommended by Kelsall and Diggle #' (1995a). Defaults to \code{TRUE}, with the alternative being the raw density #' ratio. #' @param h0 A single positive numeric value or a vector of length 2 giving the #' global bandwidth(s) to be used for case/control density estimates; #' defaulting to a common oversmoothing bandwidth computed via \code{\link{OS}} #' on the pooled data using \code{nstar = "geometric"} if unsupplied. Ignored if \code{f} and \code{g} are #' already \code{\link{bivden}} objects. #' @param hp A single numeric value or a vector of length 2 giving the pilot #' bandwidth(s) to be used for fixed-bandwidth estimation of the pilot #' densities for adaptive risk surfaces. Ignored if \code{adapt = FALSE} or if #' \code{f} and \code{g} are already \code{\link{bivden}} objects. #' @param adapt A logical value indicating whether to employ adaptive smoothing #' for internally estimating the densities. Ignored if \code{f} and \code{g} #' are already \code{\link{bivden}} objects. #' @param tolerate A logical value indicating whether to internally calculate a #' corresponding asymptotic p-value surface (for tolerance contours) for the #' estimated relative risk function. See `Details'. #' @param doplot Logical. If \code{TRUE}, an image plot of the estimated #' relative risk function is produced using various visual presets. If #' additionally \code{tolerate} was \code{TRUE}, asymptotic tolerance contours #' are automatically added to the plot at a significance level of 0.05 for #' elevated risk (for more flexible options for calculating and plotting #' tolerance contours, see \code{\link{tolerance}} and #' \code{\link{tol.contour}}). #' @param pilot.symmetry A character string used to control the type of #' symmetry, if any, to use for the bandwidth factors when computing an #' adaptive relative risk surface. See `Details'. Ignored if \code{adapt = #' FALSE}. #' @param epsilon A single non-negative numeric value used for optional scaling #' to produce additive constant to each density in the raw ratio (see #' `Details'). A zero value requests no additive constant (default). #' @param verbose Logical value indicating whether to print function progress #' during execution. #' @param ... Additional arguments passed to any internal calls of #' \code{\link{bivariate.density}} for estimation of the requisite densities. #' Ignored if \code{f} and \code{g} are already \code{\link{bivden}} objects. #' #' @return An object of class \code{"rrs"}. This is a named list with the #' following components: #' \item{rr}{A pixel \code{\link[spatstat.geom]{im}}age of the #' estimated risk surface.} #' \item{f}{An object of class \code{\link{bivden}} #' used as the numerator or `case' density estimate.} #' \item{g}{An object of #' class \code{\link{bivden}} used as the denominator or `control' density #' estimate.} #' \item{P}{Only included if \code{tolerate = TRUE}. A pixel #' \code{\link[spatstat.geom]{im}}age of the \emph{p}-value surface for tolerance #' contours; \code{NULL} otherwise.} #' #' @author T.M. Davies #' #' @references #' Bithell, J.F. (1990), An application of density estimation to #' geographical epidemiology, \emph{Statistics in Medicine}, \bold{9}, #' 691-701. #' #' Bithell, J.F. (1991), Estimation of relative risk functions, #' \emph{Statistics in Medicine}, \bold{10}, 1745-1751. #' #' Bowman, A.W. and Azzalini A. (1997), \emph{Applied Smoothing Techniques for Data Analysis: #' The Kernel Approach with S-Plus Illustrations}, Oxford University Press #' Inc., New York. #' #' Davies, T.M. and Hazelton, M.L. (2010), Adaptive #' kernel estimation of spatial relative risk, \emph{Statistics in Medicine}, #' \bold{29}(23) 2423-2437. #' #' Davies, T.M., Jones, K. and Hazelton, M.L. #' (2016), Symmetric adaptive smoothing regimens for estimation of the spatial #' relative risk function, \emph{Computational Statistics & Data Analysis}, #' \bold{101}, 12-28. #' #' Diggle, P.J. (1985), A kernel method for smoothing #' point process data, \emph{Journal of the Royal Statistical Society Series #' C}, \bold{34}(2), 138-147. #' #' Hazelton, M.L. and Davies, T.M. (2009), #' Inference based on kernel estimates of the relative risk function in #' geographical epidemiology, \emph{Biometrical Journal}, \bold{51}(1), #' 98-109. #' #' Kelsall, J.E. and Diggle, P.J. (1995a), Kernel estimation of #' relative risk, \emph{Bernoulli}, \bold{1}, 3-16. #' #' Kelsall, J.E. and #' Diggle, P.J. (1995b), Non-parametric estimation of spatial variation in #' relative risk, \emph{Statistics in Medicine}, \bold{14}, 2335-2342. #' #' @examples #' #' data(pbc) #' pbccas <- split(pbc)$case #' pbccon <- split(pbc)$control #' h0 <- OS(pbc,nstar="geometric") #' #' # Fixed #' pbcrr1 <- risk(pbccas,pbccon,h0=h0,tolerate=TRUE) #' #' # Asymmetric adaptive #' pbcrr2 <- risk(pbccas,pbccon,h0=h0,adapt=TRUE,hp=c(OS(pbccas)/2,OS(pbccon)/2), #' tolerate=TRUE,davies.baddeley=0.05) #' #' # Symmetric (pooled) adaptive #' pbcrr3 <- risk(pbccas,pbccon,h0=h0,adapt=TRUE,tolerate=TRUE,hp=OS(pbc)/2, #' pilot.symmetry="pooled",davies.baddeley=0.05) #' #' # Symmetric (case) adaptive; from two existing 'bivden' objects #' f <- bivariate.density(pbccas,h0=h0,hp=2,adapt=TRUE,pilot.density=pbccas, #' edge="diggle",davies.baddeley=0.05,verbose=FALSE) #' g <- bivariate.density(pbccon,h0=h0,hp=2,adapt=TRUE,pilot.density=pbccas, #' edge="diggle",davies.baddeley=0.05,verbose=FALSE) #' pbcrr4 <- risk(f,g,tolerate=TRUE,verbose=FALSE) #' #' par(mfrow=c(2,2)) #' plot(pbcrr1,override.par=FALSE,main="Fixed") #' plot(pbcrr2,override.par=FALSE,main="Asymmetric adaptive") #' plot(pbcrr3,override.par=FALSE,main="Symmetric (pooled) adaptive") #' plot(pbcrr4,override.par=FALSE,main="Symmetric (case) adaptive") #' #' @export risk <- function(f, g = NULL, log = TRUE, h0 = NULL, hp = h0, adapt = FALSE, tolerate = FALSE, doplot = FALSE, pilot.symmetry = c("none","f","g","pooled"), epsilon = 0, verbose = TRUE, ...){ if(is.null(g)){ if(!inherits(f,"ppp")) stop("'f' must be an object of class 'ppp' if 'g' unsupplied") fm <- marks(f) if(!is.factor(fm)) marks(f) <- fm <- factor(fm) if(nlevels(fm)!=2) stop("'f' marks must be dichotomous if 'g' unsupplied") fs <- split(f) f <- fs[[1]] g <- fs[[2]] } else { fc <- class(f) gc <- class(g) if(!all(fc==gc)) stop("'f' and 'g' must be of identical class") if(!(inherits(f,"ppp")||inherits(f,"bivden"))) stop("'f' and 'g' must be of class 'ppp' or 'bivden'") } epsi <- epsilon[1] if(epsi<0) stop("invalid 'epsilon'; must be scalar and non-negative") if(inherits(f,"ppp")){ if(!identical_windows(Window(f),Window(g))) stop("study windows for 'f' and 'g' must be identical") marks(f) <- NULL marks(g) <- NULL pooled <- suppressWarnings(superimpose(f,g)) if(is.null(h0)) h0 <- OS(pooled,nstar=sqrt(f$n*g$n)) if(length(h0)==1){ h0f <- h0g <- checkit(h0[1],"'h0[1]'") } else { h0f <- checkit(h0[1],"'h0[1]'") h0g <- checkit(h0[2],"'h0[2]'") } if(!adapt){ if(verbose) message("Estimating case and control densities...", appendLF=FALSE) fd <- bivariate.density(f,h0=h0f,adapt=FALSE,...) gd <- bivariate.density(g,h0=h0g,adapt=FALSE,...) if(verbose) message("Done.") } else { if(is.null(hp)) hp <- c(h0f,h0g) if(length(hp)==1){ hfp <- hgp <- checkit(hp[1],"'hp[1]'") } else { hfp <- checkit(hp[1],"'hp[1]'") hgp <- checkit(hp[2],"'hp[2]'") } # ## Problematic doing symmetry by pixel images---trimming calculations inconsistent. ## # # pilotdata <- switch(pilot.symmetry,none=1,f=f,g=g,pooled=pooled,NA) # if(any(is.na(pilotdata))) stop("invalid 'pilot.symmetry' argument") # if(verbose) message("Estimating pilot(s)...", appendLF=FALSE) # if(pilot.symmetry=="none"){ # fp <- bivariate.density(f,h0=hfp,adapt=FALSE,...) # gp <- bivariate.density(g,h0=hgp,adapt=FALSE,...) # fgeo <- log(posifybivden(safelookup(fp$z,f,warn=FALSE))^(-0.5)) # ggeo <- log(posifybivden(safelookup(gp$z,g,warn=FALSE))^(-0.5)) # gam <- exp(npoints(pooled)^(-1)*(sum(fgeo)+sum(ggeo))) # } else { # fp <- gp <- bivariate.density(pilotdata,h0=hfp[1],adapt=FALSE,...) # gam <- exp(mean(log(posifybivden(safelookup(fp$z,pilotdata,warn=FALSE))^(-0.5)))) # } # if(verbose) message("Done.") # Deferring to raw data symmetry below pilot.symmetry <- pilot.symmetry[1] pdat <- list() if(pilot.symmetry=="none"){ pdat[[1]] <- f pdat[[2]] <- g } else if(pilot.symmetry=="f"){ pdat[[1]] <- pdat[[2]] <- f } else if(pilot.symmetry=="g"){ pdat[[1]] <- pdat[[2]] <- g } else if(pilot.symmetry=="pooled"){ pdat[[1]] <- pdat[[2]] <- pooled } else { stop("invalid 'pilot.symmetry' argument") } if(verbose) message("Estimating case density...", appendLF=FALSE) fd <- bivariate.density(f,h0=h0f,hp=hfp,adapt=TRUE,pilot.density=pdat[[1]],verbose=FALSE,...) #gamma.scale=gam, if(verbose) message("Done.\nEstimating control density...", appendLF=FALSE) gd <- bivariate.density(g,h0=h0g,hp=hgp,adapt=TRUE,pilot.density=pdat[[2]],verbose=FALSE,...) #gamma.scale=gam, if(verbose) message("Done.") } } else { if(!compatible(f$z,g$z)) stop("incompatible images in 'f' and 'g'... kernel estimates must be evaluated on identical domains") fd <- f gd <- g fda <- is.na(fd$gamma)||is.na(fd$geometric) gda <- is.na(gd$gamma)||is.na(gd$geometric) adapt <- switch(as.character(fda+gda),"0"=TRUE,"2"=FALSE,NA) if(is.na(adapt)) stop("'f' and 'g' smoothed differently... must both be either fixed or adaptive") } eg <- epsi*max(gd$z) #rr <- (fd$z+eg)/(gd$z+eg) #if(log) rr <- log(rr) if(log) suppressWarnings(rr <- log(fd$z+eg) - log(gd$z+eg)) else rr <- (fd$z+eg)/(gd$z+eg) ps <- NULL if(tolerate){ if(verbose) message("Calculating tolerance contours...", appendLF=FALSE) if(adapt) ps <- tol.asy.ada(fd,gd,0.025,verbose=FALSE)$p else ps <- tol.asy.fix(fd,gd,gd,verbose=FALSE)$p if(verbose) message("Done.") } if(doplot){ plot.im(rr,main="",box=FALSE,ribargs=list(box=TRUE)) axis(1) axis(2) box(bty="l") plot(Window(fd$pp),add=TRUE) if(!is.null(ps)) contour(fd$z$xcol,fd$z$yrow,t(as.matrix(ps)),levels=0.05,add=TRUE) return(invisible(NULL)) } result <- list(rr=rr,f=fd,g=gd,P=ps) class(result) <- "rrs" return(result) } sparr/R/spattemp.density.R0000644000176200001440000000675314012076542015254 0ustar liggesusers#' @export spattemp.density <- function(pp,h=NULL,tt=NULL,lambda=NULL,tlim=NULL,sedge=c("uniform","none"),tedge=sedge,sres=128,tres=NULL,verbose=TRUE){ if(!inherits(pp,"ppp")) stop("data argument 'pp' must be of spatstat class \"ppp\"; see ?ppp") W <- Window(pp) n <- npoints(pp) sres <- checkit(sres,"'sres'") sedge <- checkedge(sedge,v=2) tedge <- checkedge(tedge,v=2) WM <- as.mask(W,dimyx=sres) inside <- WM$m grx <- WM$xcol gry <- WM$yrow if(is.null(tt)) tt <- marks(pp) tt <- checktt(tt) if(length(tt)!=n) stop(paste("Length of temporal vector does not match number of spatial observations\n npoints(pp) = ",n,"; length(tt) = ",length(tt),sep="")) if(is.null(tlim)) tlim <- range(tt) tlim <- checkranin(tlim,tt,"tlim") if(is.null(tres)){ tcw <- 1 kt <- tlim <- c(floor(tlim[1]),ceiling(tlim[2])) grt <- tlim[1]:tlim[2] tres <- length(grt) # kt <- c(tlim[1]-0.5,tlim[2]+0.5) # grt <- grt[-which((grttlim[2]))] } else { tres <- checkit(tres,"'tres'") tcw <- diff(tlim)/tres grt <- tlim[1]+0.5*tcw+(0:(tres-1))*tcw kt <- c(tlim[1]+0.5*tcw,tlim[2]-0.5*tcw) } if(is.null(h)) h <- OS(pp) h <- checkit(h,"'h'") if(is.null(lambda)) lambda <- bw.SJ(tt) lam <- checkit(lambda,"'lam'") # fhat <- kde(cbind(pp$x,pp$y,tt), # H=diag(c(h^2,h^2,lam^2)), # xmin=c(min(grx),min(gry),kt[1]), # xmax=c(max(grx),max(gry),kt[2]), # gridsize=c(sres,sres,tres), # supp=4, # verbose=verbose) if(verbose) message("Calculating trivariate smooth...", appendLF=FALSE) fhat <- kde3d(x=pp$x,y=pp$y,z=tt,h=c(h,h,lam),n=c(sres,sres,tres),lims=c(range(grx),range(gry),kt)) if(verbose) message("Done.") if(verbose&&(sedge=="uniform"||tedge=="uniform")) message("Edge-correcting...", appendLF=FALSE) sz <- density.ppp(pp,sigma=h,edge=(sedge=="uniform"),dimyx=sres,spill=1) sq <- im(matrix(1,sres,sres),xcol=grx,yrow=gry) if(sedge=="uniform"){ sq <- sz$edg sq[sq>1] <- 1 } sq[!inside] <- NA tq <- rep(1,tres) if(tedge=="uniform"){ nearedge <- 1:tres wellinside <- which(grt>(tlim[1]+4*lam) & grt<(tlim[2]-4*lam)) if(length(wellinside)>0) nearedge <- nearedge[-wellinside] for(i in nearedge) tq[i] <- pnorm(tlim[2],mean=grt[i],sd=lam) - pnorm(tlim[1],mean=grt[i],sd=lam) } spatial.z <- sz$raw/sq spatial.z <- spatial.z/integral(spatial.z) temporal.z <- density(tt,bw=lam,from=min(grt),to=max(grt),n=tres) temporal.z$y <- temporal.z$y/tq if(verbose&&(sedge=="uniform"||tedge=="uniform")) message("Done.") if(verbose) message("Conditioning on time...", appendLF=FALSE) z <- z.cond <- list() for(i in 1:tres){ z[[i]] <- im(t(fhat$d[,,i]),xcol=grx,yrow=gry) z[[i]] <- z[[i]]/(sq*tq[i]) z[[i]][!inside] <- NA z.cond[[i]] <- z[[i]]/temporal.z$y[i] # z.cond[[i]] <- z.cond[[i]]/integral(z.cond[[i]]) } names(z) <- names(z.cond) <- grt if(verbose) message("Done.") if(sedge=="none") sq <- NULL if(tedge=="none") tq <- NULL final <- list(z=z) final$z.cond <- z.cond final$h <- h final$lambda <- lam final$tlim <- tlim #range(grt) final$spatial.z <- spatial.z final$temporal.z <- temporal.z # final$tstep <- tcw # final$tbreaks <- seq(tlim[1],tlim[2],length=tres+1) # final$tbin <- findInterval(tt,final$tbreaks,all.inside=TRUE) final$qs <- sq final$qt <- tq marks(pp) <- tt final$pp <- pp final$tgrid <- grt class(final) <- "stden" return(final) } sparr/R/multiscale.slice.R0000644000176200001440000001175514012076542015177 0ustar liggesusers#' Slicing a multi-scale density/intensity object #' #' Takes slices of a multi-scale density/intensity estimate at desired #' global bandwidths #' #' Davies & Baddeley (2018) demonstrate that once a multi-scale #' density/intensity estimate has been computed, we may take slices parallel to #' the spatial domain of the trivariate convolution to return the estimate at #' any desired global bandwidth. This function is the implementation thereof #' based on a multi-scale estimate resulting from a call to #' \code{\link{multiscale.density}}. #' #' The function returns an error if the #' requested slices at \code{h0} are not all within the available range of #' pre-computed global bandwidth scalings as defined by the \code{h0range} #' component of \code{msob}. #' #' Because the contents of the \code{msob} argument, an object of class #' \code{\link{msden}}, are returned based on a discretised set of global #' bandwidth scalings, the function internally computes the desired surface as #' a pixel-by-pixel linear interpolation using the two discretised global #' bandwidth rescalings that bound each requested \code{h0}. (Thus, numeric #' accuracy of the slices is improved with an increase to the \code{dimz} #' argument of the preceding call to \code{multiscale.density} at the cost of #' additional computing time.) #' #' @param msob An object of class \code{\link{msden}} giving the multi-scale #' estimate from which to take slices. #' @param h0 Desired global bandwidth(s); the density/intensity estimate #' corresponding to which will be returned. A numeric vector. All values \bold{must} be in the #' available range provided by \code{msob$h0range}; see `Details'. #' @param checkargs Logical value indicating whether to check validity of #' \code{msob} and \code{h0}. Disable only if you know this check will be #' unnecessary. #' #' @return If \code{h0} is scalar, an object of class \code{\link{bivden}} with components #' corresponding to the requested slice at \code{h0}. If \code{h0} is a vector, a list of objects #' of class \code{\link{bivden}}. #' #' @author T.M. Davies #' #' @seealso \code{\link{multiscale.density}}, \code{\link{bivariate.density}} #' #' @references #' Davies, T.M. and Baddeley A. (2018), Fast computation of #' spatially adaptive kernel estimates, \emph{Statistics and Computing}, \bold{28}(4), 937-956. #' #' @examples #' \donttest{ #' data(chorley) # Chorley-Ribble data (package 'spatstat') #' ch.multi <- multiscale.density(chorley,h0=1,h0fac=c(0.5,2)) #' #' available.h0(ch.multi) #' ch.slices <- multiscale.slice(ch.multi,h0=c(0.7,1.1,1.6)) #' #' par(mfcol=c(2,3)) # plot each density and edge-correction surface #' for(i in 1:3) { plot(ch.slices[[i]]$z); plot(ch.slices[[i]]$q) } #' } #' #' @export multiscale.slice <- function(msob,h0,checkargs=TRUE){ if(checkargs){ if(!inherits(msob,"msden")) stop("'msob' must be of class \"msden\"") if(!is.vector(h0)||!is.numeric(h0)) stop("'h0' must be a numeric vector") h0 <- sapply(h0,checkit,str="'h0'") aran <- msob$h0range if(!all(sapply(h0,function(x) x>=aran[1]) & sapply(h0,function(x) x<=aran[2]))) stop(paste("at least one requested 'h0' is outside available range of",prange(aran))) # was: # if(!inside.range(h0,aran)) stop(paste("requested 'h0' outside available range of",prange(aran))) } avail <- names(msob$z) zz <- msob$z hh <- msob$him qq <- msob$q hlen <- length(h0) if(hlen==1){ slc <- ms.slice.single(h0,avail,zz,hh,qq) result <- list(z=slc$z,h0=h0,hp=msob$hp,h=msob$h/msob$h0*h0,him=slc$h,q=slc$q,gamma=msob$gamma,geometric=msob$geometric,pp=msob$pp) class(result) <- "bivden" } else { result <- list() for(i in 1:hlen){ slc <- ms.slice.single(h0[i],avail,zz,hh,qq) retob <- list(z=slc$z,h0=h0[i],hp=msob$hp,h=msob$h/msob$h0[i]*h0[i],him=slc$h,q=slc$q,gamma=msob$gamma,geometric=msob$geometric,pp=msob$pp) class(retob) <- "bivden" result[[i]] <- retob } } return(result) } ms.slice.single <- function(V,avail,zz,hh,qq,warn=FALSE){ la <- length(avail) if(any(avail==as.character(V))){ index <- which(avail==as.character(V)) zres <- zz[[index]] hres <- hh[[index]] qres <- qq[[index]] } else { marker <- as.numeric(avail)>V if(sum(marker)==la){ zres <- zz[[1]] hres <- hh[[1]] qres <- qq[[1]] if(warn) warning("lower index mismatch") } else if(sum(marker)==0){ zres <- zz[[la]] hres <- hh[[la]] qres <- qq[[la]] if(warn) warning("upper index mismatch") } else { marker <- which(marker)[1] mindex <- c(marker-1,marker) hint <- as.numeric(avail)[mindex] move <- (V-hint[1])/diff(hint) zdiff <- zz[[mindex[2]]]-zz[[mindex[1]]] hdiff <- hh[[mindex[2]]]-hh[[mindex[1]]] qdiff <- qq[[mindex[2]]]-qq[[mindex[1]]] zres <- zz[[mindex[1]]]+move*zdiff hres <- hh[[mindex[1]]]+move*hdiff if(!is.null(qq)) qres <- qq[[mindex[1]]]+move*qdiff else qres <- NULL } } return(list(z=zres,h=hres,q=qres)) } sparr/R/bivariate.density.R0000644000176200001440000004617614012076542015370 0ustar liggesusers#' Bivariate kernel density/intensity estimation #' #' Provides an isotropic adaptive or fixed bandwidth kernel density/intensity #' estimate of bivariate/planar/2D data. #' #' Given a data set \eqn{x_1,\dots,x_n} in 2D, the isotropic kernel estimate of #' its probability density function, \eqn{\hat{f}(x)}{\hat{f}(x)}, is given by #' \deqn{\hat{f}(y)=n^{-1}\sum_{i=1}^{n}h(x_i)^{-2}K((y-x_i)/h(x_i)) } #' where \eqn{h(x)}{h(x)} is the bandwidth function, and \eqn{K(.)} is the #' bivariate standard normal smoothing kernel. Edge-correction factors (not #' shown above) are also implemented. #' #' \describe{ #' \item{\bold{Fixed}}{ #' The classic fixed bandwidth kernel estimator is used when #' \code{adapt = FALSE}. This amounts to setting \eqn{h(u)=}\code{h0} for all \eqn{u}. #' Further details can be found in the documentation for \code{\link[spatstat.core]{density.ppp}}.} #' \item{\bold{Adaptive}}{Setting \code{adapt = TRUE} requests computation of Abramson's (1982) #' variable-bandwidth estimator. Under this framework, we have #' \eqn{h(u)=}\code{h0}*min[\eqn{\tilde{f}(u)^{-1/2}},\eqn{G*}\code{trim}]/\eqn{\gamma}, #' where \eqn{\tilde{f}(u)} is a fixed-bandwidth kernel density estimate #' computed using the pilot bandwidth \code{hp}. #' \itemize{ #' \item Global smoothing of the variable bandwidths is controlled with the global bandwidth #' \code{h0}. #' \item In the above statement, \eqn{G} is the geometric mean of the #' ``bandwidth factors'' \eqn{\tilde{f}(x_i)^{-1/2}}; \eqn{i=1,\dots,n}. By #' default, the variable bandwidths are rescaled by \eqn{\gamma=G}, which is #' set with \code{gamma.scale = "geometric"}. This allows \code{h0} to be #' considered on the same scale as the smoothing parameter in a fixed-bandwidth #' estimate i.e. on the scale of the recorded data. You can use any other #' rescaling of \code{h0} by setting \code{gamma.scale} to be any scalar #' positive numeric value; though note this only affects \eqn{\gamma} -- see #' the next bullet. When using a scale-invariant \code{h0}, set #' \code{gamma.scale = 1}. #' \item The variable bandwidths must be trimmed to #' prevent excessive values (Hall and Marron, 1988). This is achieved through #' \code{trim}, as can be seen in the equation for \eqn{h(u)} above. The #' trimming of the variable bandwidths is universally enforced by the geometric #' mean of the bandwidth factors \eqn{G} independent of the choice of #' \eqn{\gamma}. By default, the function truncates bandwidth factors at five #' times their geometric mean. For stricter trimming, reduce \code{trim}, for #' no trimming, set \code{trim = Inf}. #' \item For even moderately sized data sets #' and evaluation grid \code{resolution}, adaptive kernel estimation can be #' rather computationally expensive. The argument \code{davies.baddeley} is #' used to approximate an adaptive kernel estimate by a sum of fixed bandwidth #' estimates operating on appropriate subsets of \code{pp}. These subsets are #' defined by ``bandwidth bins'', which themselves are delineated by a quantile #' step value \eqn{0<\delta<1}. E.g. setting \eqn{\delta=0.05} will create 20 #' bandwidth bins based on the 0.05th quantiles of the Abramson variable #' bandwidths. Adaptive edge-correction also utilises the partitioning, with #' pixel-wise bandwidth bins defined using the value \eqn{0<\beta<1}, and the #' option to decrease the resolution of the edge-correction surface for #' computation to a [\eqn{L} \eqn{\times}{x} \eqn{L}] grid, where \eqn{0 0. #' @param hp Pilot bandwidth (scalar, numeric > 0) to be used for fixed #' bandwidth estimation of a pilot density in the case of adaptive smoothing. #' If \code{NULL} (default), it will take on the value of \code{h0}. Ignored #' when \code{adapt = FALSE} or if \code{pilot.density} is supplied as a #' pre-defined pixel image. #' @param adapt Logical value indicating whether to perform adaptive kernel #' estimation. See `Details'. #' @param resolution Numeric value > 0. Resolution of evaluation grid; the #' density/intensity will be returned on a [\code{resolution} \eqn{\times}{x} #' \code{resolution}] grid. #' @param gamma.scale Scalar, numeric value > 0; controls rescaling of the #' variable bandwidths. Defaults to the geometric mean of the bandwidth factors #' given the pilot density (as per Silverman, 1986). See `Details'. #' @param edge Character string giving the type of edge correction to employ. #' \code{"uniform"} (default) corrects based on evaluation grid coordinate and #' \code{"diggle"} reweights each observation-specific kernel. Setting #' \code{edge = "none"} requests no edge correction. Further details can be #' found in the documentation for \code{\link[spatstat.core]{density.ppp}}. #' @param weights Optional numeric vector of nonnegative weights corresponding to #' each observation in \code{pp}. Must have length equal to \code{npoints(pp)}. #' @param intensity Logical value indicating whether to return an intensity #' estimate (integrates to the sample size over the study region), or a density #' estimate (default, integrates to 1). #' @param trim Numeric value > 0; controls bandwidth truncation for adaptive #' estimation. See `Details'. #' @param xy Optional alternative specification of the evaluation grid; matches #' the argument of the same tag in \code{\link[spatstat.geom]{as.mask}}. If #' supplied, \code{resolution} is ignored. #' @param pilot.density An optional pixel image (class #' \code{\link[spatstat.geom]{im}}) giving the pilot density to be used for #' calculation of the variable bandwidths in adaptive estimation, \bold{or} a #' \code{\link[spatstat.geom:ppp]{ppp.object}} giving the data upon which to base a #' fixed-bandwidth pilot estimate using \code{hp}. If used, the pixel image #' \emph{must} be defined over the same domain as the data given #' \code{resolution} or the supplied pre-set \code{xy} evaluation grid; #' \bold{or} the planar point pattern data must be defined with respect to the #' same polygonal study region as in \code{pp}. #' @param leaveoneout Logical value indicating whether to compute and return #' the value of the density/intensity at each data point for an adaptive #' estimate. See `Details'. #' @param parallelise Numeric argument to invoke parallel processing, giving #' the number of CPU cores to use when \code{leaveoneout = TRUE}. Experimental. #' Test your system first using \code{parallel::detectCores()} to identify the #' number of cores available to you. #' @param davies.baddeley An optional numeric vector of length 3 to control #' bandwidth partitioning for approximate adaptive estimation, giving the #' quantile step values for the variable bandwidths for density/intensity and #' edge correction surfaces and the resolution of the edge correction surface. #' May also be provided as a single numeric value. See `Details'. #' @param verbose Logical value indicating whether to print a function progress #' bar to the console when \code{adapt = TRUE}. #' #' @return If \code{leaveoneout = FALSE}, an object of class \code{"bivden"}. #' This is effectively a list with the following components: #' \item{z}{The #' resulting density/intensity estimate, a pixel image object of class #' \code{\link[spatstat.geom]{im}}.} #' #' \item{h0}{A copy of the value of \code{h0} #' used.} \item{hp}{A copy of the value of \code{hp} used.} #' #' \item{h}{A numeric #' vector of length equal to the number of data points, giving the bandwidth #' used for the corresponding observation in \code{pp}.} #' #' \item{him}{A pixel #' image (class \code{\link[spatstat.geom]{im}}), giving the `hypothetical' Abramson #' bandwidth at each pixel coordinate conditional upon the observed data. #' \code{NULL} for fixed-bandwidth estimates.} #' #' \item{q}{Edge-correction #' weights; a pixel \code{\link[spatstat.geom]{im}}age if \code{edge = "uniform"}, a #' numeric vector if \code{edge = "diggle"}, and \code{NULL} if \code{edge = #' "none"}.} #' #' \item{gamma}{The value of \eqn{\gamma} used in scaling the #' bandwidths. \code{NA} if a fixed bandwidth estimate is computed.} #' #' \item{geometric}{The geometric mean \eqn{G} of the untrimmed bandwidth #' factors \eqn{\tilde{f}(x_i)^{-1/2}}. \code{NA} if a fixed bandwidth estimate #' is computed.} #' #' \item{pp}{A copy of the \code{\link[spatstat.geom:ppp]{ppp.object}} #' initially passed to the \code{pp} argument, containing the data that were #' smoothed.} #' #' #' Else, if \code{leaveoneout = TRUE}, simply a numeric vector of length equal to the #' number of data points, giving the leave-one-out value of the function at the #' corresponding coordinate. #' #' @author T.M. Davies and J.C. Marshall #' #' @references #' Abramson, I. (1982). On bandwidth variation in kernel estimates #' --- a square root law, \emph{Annals of Statistics}, \bold{10}(4), #' 1217-1223. #' #' Davies, T.M. and Baddeley A. (2018), Fast computation of #' spatially adaptive kernel estimates, \emph{Statistics and Computing}, \bold{28}(4), 937-956. #' #' Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel estimation of spatial relative #' risk, \emph{Statistics in Medicine}, \bold{29}(23) 2423-2437. #' #' Davies, T.M., Jones, K. and Hazelton, M.L. (2016), Symmetric adaptive smoothing #' regimens for estimation of the spatial relative risk function, #' \emph{Computational Statistics & Data Analysis}, \bold{101}, 12-28. #' #' Diggle, P.J. (1985), A kernel method for smoothing point process data, #' \emph{Journal of the Royal Statistical Society, Series C}, \bold{34}(2), #' 138-147. #' #' Hall P. and Marron J.S. (1988) Variable window width kernel #' density estimates of probability densities. \emph{Probability Theory and #' Related Fields}, \bold{80}, 37-49. #' #' Marshall, J.C. and Hazelton, M.L. (2010) Boundary kernels for adaptive density #' estimators on regions with irregular boundaries, \emph{Journal of Multivariate #' Analysis}, \bold{101}, 949-963. #' #' Silverman, B.W. (1986), \emph{Density Estimation for #' Statistics and Data Analysis}, Chapman & Hall, New York. #' #' Wand, M.P. and Jones, C.M., 1995. \emph{Kernel Smoothing}, Chapman & Hall, London. #' #' @examples #' #' data(chorley) # Chorley-Ribble data from package 'spatstat' #' #' # Fixed bandwidth kernel density; uniform edge correction #' chden1 <- bivariate.density(chorley,h0=1.5) #' #' # Fixed bandwidth kernel density; diggle edge correction; coarser resolution #' chden2 <- bivariate.density(chorley,h0=1.5,edge="diggle",resolution=64) #' #' \donttest{ #' # Adaptive smoothing; uniform edge correction #' chden3 <- bivariate.density(chorley,h0=1.5,hp=1,adapt=TRUE) #' #' # Adaptive smoothing; uniform edge correction; partitioning approximation #' chden4 <- bivariate.density(chorley,h0=1.5,hp=1,adapt=TRUE,davies.baddeley=0.025) #' #' par(mfrow=c(2,2)) #' plot(chden1);plot(chden2);plot(chden3);plot(chden4) #' } #' #' @export bivariate.density <- function(pp,h0,hp=NULL,adapt=FALSE,resolution=128,gamma.scale="geometric",edge=c("uniform","diggle","none"),weights=NULL,intensity=FALSE,trim=5,xy=NULL,pilot.density=NULL,leaveoneout=FALSE,parallelise=NULL,davies.baddeley=NULL,verbose=TRUE){ if(!inherits(pp,"ppp")) stop("data argument 'pp' must be of spatstat class \"ppp\"; see ?ppp") W <- Window(pp) if(!is.null(xy)){ xy <- checkxy(xy) dimyx <- NULL resolution <- length(xy$x) } else { resolution <- checkit(resolution,"'resolution'") dimyx <- rep(resolution,2) } edge <- checkedge(edge) h0 <- checkit(h0,"'h0'") if(!is.null(trim)&&!is.na(trim)) trim <- checkit(trim,"'trim'") n <- npoints(pp) if(!is.null(weights)) weights <- checkwei(weights,n) if(adapt){ if(is.null(hp)) hp <- h0 else hp <- checkit(hp,"'hp'") if(leaveoneout) return(bivden.LOO(pp,h0,hp,(edge=="uniform"||edge=="diggle"),gamma.scale,trim,resolution,parallelise,weights,0)[[1]]) pd <- pilot.density pilot.data <- pp if(!is.null(pd)){ if(is.im(pd)){ if(is.null(xy)){ if(!all(dim(pd)==resolution)) stop("'pilot.density' image resolution must strictly have 'resolution' x 'resolution' pixels") } else { if((!all(pd$xcol==xy$x))||(!all(pd$yrow==xy$y))) stop("'pilot.density' xcol and yrow must strictly match coords in 'xy'") } pilot.density[pd<=0] <- min(pd[pd>0]) hp <- NULL } else if(is.ppp(pd)){ pilot.data <- pd if(!identical_windows(Window(pp),Window(pilot.data))) stop("'pilot.density' window must be identical to 'pp' window") pilot.density <- density(pilot.data,sigma=hp,edge=(edge=="uniform"||edge=="diggle"),diggle=(edge=="diggle"),dimyx=dimyx,xy=xy,positive=TRUE) } else { stop("'pilot.density' must be an object of class \"im\" or \"ppp\"") } } else { pilot.density <- density(pp,sigma=hp,edge=(edge=="uniform"||edge=="diggle"),diggle=(edge=="diggle"),dimyx=dimyx,xy=xy,positive=TRUE,weights=weights) } pilot.density.spec <- safelookup(pilot.density,pp,warn=FALSE) pi.int <- integral(pilot.density) pilot.density <- pilot.density/pi.int pilot.density.spec <- pilot.density.spec/pi.int pspec <- pilot.density.spec^(-0.5) gamma <- processgamma(gamma.scale,safelookup(pilot.density,pilot.data,warn=FALSE)) #'was: processgamma(gamma.scale,pilot.density.spec) gs <- gspd <- exp(mean(log(pspec))) if(!is.null(pd)) gspd <- exp(mean(log(safelookup(pilot.density,pilot.data,warn=FALSE)^(-0.5)))) # PREVIOUS TRIMMING REGIMEN # # h.spec <- h0*pilot.density.spec^(-0.5)/gamma # h.hypo <- h0*pilot.density^(-0.5)/gamma # if(is.null(trim)) beta.h <- 5*median(h.spec,na.rm=TRUE) # else if(is.na(trim)) beta.h <- max(h.hypo,na.rm=TRUE) # else beta.h <- trim # h.spec[h.spec>beta.h] <- beta.h # h.hypo[h.hypo>beta.h] <- beta.h # h.hypo.mat <- as.matrix(h.hypo) # NEW TRIMMING REGIMEN # # h.spec <- h0*pmin(pspec/gamma,trim) ### Generalised below for numeric gamma argument vals. Trimming is universally determined by the geometric mean 'gs', regardless of 'gamma.scale' ### # h.hypo <- h0*im(matrix(pmin(as.vector(as.matrix(pilot.density^(-0.5)))/gamma,trim),resolution,resolution),xcol=pilot.density$xcol,yrow=pilot.density$yrow) h.spec <- h0*pmin(pspec,trim*gspd)/gamma h.hypo <- h0*im(matrix(pmin(as.vector(as.matrix(pilot.density^(-0.5))),trim*gspd),resolution,resolution)/gamma,xcol=pilot.density$xcol,yrow=pilot.density$yrow) h.hypo.mat <- as.matrix(h.hypo) h.hypo.vec <- as.numeric(t(h.hypo.mat)) if(!is.null(davies.baddeley)){ db <- checkdb(davies.baddeley) db.result <- adens(x=pp,bwim=h.hypo,bwpts=h.spec,resolution=resolution,intensity=intensity,edge=(edge=="uniform"||edge=="diggle"),diggle=(edge=="diggle"),weights=weights,hstep=db[1],qstep=db[2],qres=ifelse(is.na(db[3]),resolution,db[3]),verbose) result <- list(z=db.result$result,h0=h0,hp=hp,h=h.spec,him=h.hypo,q=db.result$edg,gamma=gamma,geometric=gs,pp=pp) class(result) <- "bivden" return(result) } evalxy <- as.matrix(expand.grid(pilot.density$xcol,pilot.density$yrow)) notin <- !inside.owin(x=evalxy[,1],y=evalxy[,2],w=W) surf <- rep(NA,nrow(evalxy)) ef <- NULL # we need only evaluate on points inside the owin evalxy.in <- evalxy[!notin,] h.hypo.in <- h.hypo.vec[!notin] surf.in <- numeric(nrow(evalxy.in)) if(is.null(weights)) weights <- rep(1,n) if(edge=="uniform"){ qhz.in <- numeric(nrow(evalxy.in)) if(verbose) pb <- txtProgressBar(0,nrow(evalxy.in)) for(i in 1:nrow(evalxy.in)){ gxy <- kernel2d(evalxy.in[,1]-evalxy.in[i,1], evalxy.in[,2]-evalxy.in[i,2], h.hypo.in[i]) qhz.in[i] <- dintegral(gxy, pilot.density$xstep, pilot.density$ystep) ivals <- kernel2d(pp$x-evalxy.in[i,1], pp$y-evalxy.in[i,2], h.spec) # if(!intensity) surf.in[i] <- mean(ivals)/qhz.in[i] # else surf.in[i] <- sum(weights*ivals)/qhz.in[i] if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) qhz <- rep(NA,resolution^2) qhz[!notin] <- qhz.in ef <- im(matrix(qhz,resolution,resolution,byrow=TRUE),xcol=pilot.density$xcol,yrow=pilot.density$yrow) } if(edge=="diggle"){ qx <- rep(1,n) if(verbose) pb <- txtProgressBar(0,n+nrow(evalxy.in)) for(i in 1:n){ pxy <- kernel2d(evalxy.in[,1]-pp$x[i], evalxy.in[,2]-pp$y[i], h.spec[i]) qx[i] <- dintegral(pxy, pilot.density$xstep, pilot.density$ystep) if(verbose) setTxtProgressBar(pb,i) } for(i in 1:nrow(evalxy.in)){ ivals <- kernel2d(pp$x-evalxy.in[i,1], pp$y-evalxy.in[i,2], h.spec) # if(!intensity) surf.in[i] <- mean(ivals/qx) # else surf.in[i] <- sum(weights*ivals/qx) if(verbose) setTxtProgressBar(pb,n+i) } if(verbose) close(pb) ef <- qx } if(edge=="none"){ if(verbose) pb <- txtProgressBar(0,nrow(evalxy.in)) for(i in 1:nrow(evalxy.in)){ ivals <- kernel2d(pp$x-evalxy.in[i,1], pp$y-evalxy.in[i,2], h.spec) # if(!intensity) surf.in[i] <- mean(ivals) # else surf.in[i] <- sum(weights*ivals) if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) } surf[!notin] <- surf.in surf <- im(matrix(surf,resolution,resolution,byrow=TRUE),xcol=pilot.density$xcol,yrow=pilot.density$yrow) } else { h.spec <- rep(h0,n) h.hypo <- NULL gs <- gamma <- NA dens <- density.ppp(pp,sigma=h0,dimyx=dimyx,xy=xy,edge=(edge=="diggle"||edge=="uniform"),diggle=(edge=="diggle"),weights=weights,spill=1) surf <- dens$raw[W,drop=FALSE] ef <- dens$edg[W,drop=FALSE] ef[ef>1] <- 1 if(edge=="diggle"){ ef <- safelookup(ef,pp,warn=FALSE) } else if(edge=="uniform"){ surf <- surf/ef surf[surf<0] <- 0 } else { ef <- NULL } } if(!intensity) surf <- surf/integral(surf) result <- list(z=surf,h0=h0,hp=hp,h=h.spec,him=h.hypo,q=ef,gamma=gamma,geometric=gs,pp=pp) class(result) <- "bivden" return(result) } sparr/R/print.rrs.R0000644000176200001440000000046214012076542013671 0ustar liggesusers#' @rdname printsparr #' @method print rrs #' @export print.rrs <- function(x, ...){ if(all(x$rr>=0)) cat("Relative Risk Surface\n\n") else cat("Log-Relative Risk Surface\n\n") cat("--Numerator (case) density--\n") print.bivden(x$f) cat("\n--Denominator (control) density--\n") print.bivden(x$g) }sparr/R/tol.mc.fix.R0000644000176200001440000000322314012076542013707 0ustar liggesusers tol.mc.fix <- function(rs,ITER,parallel,verbose,...){ fd <- rs$f gd <- rs$g pool <- suppressWarnings(superimpose(fd$pp,gd$pp)) nf <- npoints(fd$pp) ng <- npoints(gd$pp) res <- dim(fd$z)[1] fh <- fd$h0 gh <- gd$h0 indx <- 1:npoints(pool) if(is.null(fd$q)){ edg <- "none" } else if(is.im(fd$q)){ edg <- "uniform" } else { edg <- "diggle" } logt <- !all(rs$rr>=0) rmat <- as.matrix(rs$rr) mcmat <- matrix(1,res,res) if(is.null(parallel)){ if(verbose) pb <- txtProgressBar(0,ITER-1,style=3) for(i in 1:(ITER-1)){ shuff <- sample(indx) ftemp <- bivariate.density(pool[shuff[1:nf]],h0=fh,resolution=res,edge=edg) gtemp <- bivariate.density(pool[shuff[(nf+1):(nf+ng)]],h0=gh,resolution=res,edge=edg) rtemp <- as.matrix(risk(ftemp,gtemp,log=logt,verbose=FALSE,...)$rr) mcmat <- mcmat+(rtemp>=rmat) if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) } else { ncores <- detectCores() if(verbose) cat(paste("Running MC iterations on",parallel,"/",ncores,"cores...")) if(parallel>ncores) stop("Parallel cores requested exceeds available count") registerDoParallel(cores=parallel) mclist <- foreach(i=1:(ITER-1),.packages=c("spatstat","sparr")) %dopar% { shuff <- sample(indx) ftemp <- bivariate.density(pool[shuff[1:nf]],h0=fh,resolution=res,edge=edg) gtemp <- bivariate.density(pool[shuff[(nf+1):(nf+ng)]],h0=gh,resolution=res,edge=edg) rtemp <- as.matrix(risk(ftemp,gtemp,log=logt,verbose=FALSE,...)$rr) return(rtemp>=rmat) } mcmat <- Reduce("+",mclist) + mcmat if(verbose) cat("Done.\n") } return(mcmat/ITER) } sparr/R/BAMprep.R0000644000176200001440000000245214012076542013217 0ustar liggesusers BAMprep <- function(cases,controls,lambda,erode,res){ W <- Window(cases) WM <- as.mask(W,dimyx=res) yx <- expand.grid(WM$yrow,WM$xcol) if(is.na(erode)){ xyin <- inside.owin(x=yx[,2],y=yx[,1],w=W) } else { ewin <- erosion(W,erode*lambda,polygonal=TRUE) xyin <- inside.owin(x=yx[,2],y=yx[,1],w=ewin) } fdd <- nu.dashdash(cbind(cases$x,cases$y),yx[,2],yx[,1],lambda,xyin) gdd <- nu.dashdash(cbind(controls$x,controls$y),yx[,2],yx[,1],lambda,xyin) inside <- WM$m res2 <- 2*res resseq <- 1:res xcol.ker <- WM$xstep*c(0:(res-1),-rev(resseq)) yrow.ker <- WM$ystep*c(0:(res-1),-rev(resseq)) kerpixarea <- WM$xstep*WM$ystep len.pad <- res2^2 Mpad <- matrix(0, ncol=2*res, nrow=2*res) Mpad[1:res,1:res] <- inside fM <- fft(Mpad) return(list(cas=cases,con=controls,xyin=xyin,fdd=fdd,gdd=gdd,res=res,ero=erode,fM=fM,xk=xcol.ker,yk=yrow.ker,kpa=kerpixarea,lp=len.pad,WM=WM)) } nu.dashdash <- function(data,xs,ys,lambda,xyin){ res2 <- length(xs) n <- nrow(data) result <- rep(NA,res2) for(i in 1:res2){ if(!xyin[i]) next xyir <- cbind(rep(xs[i],n),rep(ys[i],n)) data.xydiff2 <- rowSums((data-xyir)^2) kdashdash <- (data.xydiff2/(2*lambda)-1)*exp(data.xydiff2/(-2*lambda^2)) result[i] <- (1/(n*pi*lambda^4))*sum(kdashdash) } return(result) }sparr/R/ms.loo.R0000644000176200001440000000555414012076542013146 0ustar liggesusersms.loo <- function(h0,object,za){ X <- object$pp n <- npoints(X) requested <- multiscale.slice(object,h0,checkargs=FALSE) rh <- requested$h rz <- requested$z rq <- requested$q if(za==-1){ if(any(rz<=0)) return(Inf) } rint <- integral(requested$z) zpoints <- safelookup(rz,X,warn=FALSE) if(is.null(rq)) qpoints <- rep(1,n) else qpoints <- safelookup(rq,X,warn=FALSE) rzn <- (rz/rint)^2 loo.atpoints <- (zpoints-dnorm(0,sd=rh)^2/qpoints)/(n-1) rznint <- integral(rzn) if(any(loo.atpoints<=0)){ if(za==2){ loo.atpoints[loo.atpoints<=0] <- min(loo.atpoints[loo.atpoints>0]) } else if(za==1){ loo.atpoints <- posifybivden(loo.atpoints) } else { return(Inf) } } #was: return(rznint) return(rznint-2*mean(loo.atpoints)) } ms.loo.lik <- function(h0,object,za){ X <- object$pp n <- npoints(X) requested <- multiscale.slice(object,h0,checkargs=FALSE) rh <- requested$h rz <- requested$z rq <- requested$q if(za==-1){ if(any(rz<=0)) return(-Inf) } zpoints <- safelookup(rz,X,warn=FALSE) if(is.null(rq)) qpoints <- rep(1,n) else qpoints <- safelookup(rq,X,warn=FALSE) loo.atpoints <- zpoints-(1/n)*(dnorm(0,sd=rh)^2/qpoints) if(any(loo.atpoints<=0)){ if(za==2){ loo.atpoints[loo.atpoints<=0] <- min(loo.atpoints[loo.atpoints>0]) } else if(za==1){ loo.atpoints <- posifybivden(loo.atpoints) } else { return(-Inf) } } #was: return(log(min(loo.atpoints[loo.atpoints>0]))) return(mean(log(loo.atpoints))) } ms.loo.risk <- function(h0,fob,gob,hazey=FALSE){ fX <- fob$pp gX <- gob$pp n1 <- npoints(fX) n2 <- npoints(gX) f.requested <- multiscale.slice(fob,h0,checkargs=FALSE) g.requested <- multiscale.slice(gob,h0,checkargs=FALSE) frh <- f.requested$h grh <- g.requested$h frz <- f.requested$z grz <- g.requested$z frq <- f.requested$q grq <- g.requested$q limz <- min(c(min(frz[frz>0]),min(grz[grz>0]))) # if(any(frz<=0)||any(grz<=0)) return(Inf) # pretty strict protection; generates optimise warnings by default frz[frz<=0] <- limz grz[grz<=0] <- limz f.fpoints <- safelookup(frz,fX,warn=FALSE) g.gpoints <- safelookup(grz,gX,warn=FALSE) f.gpoints <- safelookup(frz,gX,warn=FALSE) g.fpoints <- safelookup(grz,fX,warn=FALSE) if(is.null(frq)){ fqpoints <- rep(1,n1) gqpoints <- rep(1,n2) } else { fqpoints <- safelookup(frq,fX,warn=FALSE) gqpoints <- safelookup(grq,gX,warn=FALSE) } fminus <- f.fpoints - dnorm(0,sd=frh)^2/n1/fqpoints fminus[fminus<=0] <- limz gminus <- g.gpoints - dnorm(0,sd=grh)^2/n2/gqpoints gminus[gminus<=0] <- limz if(!hazey) return(2*mean((log(f.gpoints) - log(gminus))/gminus) - 2*mean((log(fminus) - log(g.fpoints))/fminus) - integral((log(frz)-log(grz))^2)) else return(mean((f.gpoints/gminus)^2) - 2*mean(fminus/g.fpoints)) } sparr/R/boot.opt.spatial.fix.R0000644000176200001440000000663014011701614015712 0ustar liggesusersboot.opt.spatial.fix <- function(h,rmdiag,edg,WM,resolution,fM,ifft_scale,inn,GN,evalyx.redu, pp,epsilon.eta,eta,nn,boot3,use_fftw,parallelise,verb){ if(verb) cat("h:",h,"\n") if(edg){ # get edge surface for 'h' fK.h <- kernel2d_fft(h,WM$xstep,WM$ystep,resolution) fK.con <- fft2d(fM*fK.h,inverse=TRUE,use_fftw)[1:resolution,1:resolution] edg.h <- Mod(fK.con)*ifft_scale edg.h[edg.h>1] <- 1 GE.h <- edg.h[inn] } else { GE.h <- rep(1,length(inn)) } # do edge-corrected fixed-bandwidth spatial bootstrap for 'h'; no resampling necessary if(!rmdiag){ if(is.na(parallelise)){ b1c <- b2c <- rep(NA,GN) for(i in 1:GN){ evx <- evalyx.redu[i,2]-pp$x evy <- evalyx.redu[i,1]-pp$y b2temp <- sum(epsilon.eta^(-1)*kernel2d(evx,evy,sqrt(h^2+eta^2))) b2c[i] <- b2temp*sum(epsilon.eta^(-1)*kernel2d(evx,evy,eta)) b1c[i] <- (2*sqrt(pi)*h)^(-2)*sum(epsilon.eta^(-1)*kernel2d(evx,evy,sqrt(0.5*h^2+eta^2))) + ((nn-1)/nn)*b2temp^2 } boot1 <- GE.h^(-2)*b1c boot2 <- -2*GE.h^(-1)*b2c } else { totcor <- detectCores() if(parallelise>totcor) stop("Parallel cores requested exceeds available count") # if(verbose) cat(paste(" --optimising on",parallelise,"/",totcor,"cores--\n")) registerDoParallel(cores=parallelise) b12c <- foreach(i=1:GN,.packages="sparr",.combine=rbind) %dopar% { evx <- evalyx.redu[i,2]-pp$x evy <- evalyx.redu[i,1]-pp$y b2temp <- sum(epsilon.eta^(-1)*kernel2d(evx,evy,sqrt(h^2+eta^2))) b2c <- b2temp*sum(epsilon.eta^(-1)*kernel2d(evx,evy,eta)) b1c <- (2*sqrt(pi)*h)^(-2)*sum(epsilon.eta^(-1)*kernel2d(evx,evy,sqrt(0.5*h^2+eta^2))) + ((nn-1)/nn)*b2temp^2 result <- c(b1c,b2c) } boot1 <- GE.h^(-2)*b12c[,1] boot2 <- -2*GE.h^(-1)*b12c[,2] } return(sum((nn^(-2)*boot1+nn^(-2)*boot2+boot3)*WM$xstep*WM$ystep)) } else { if(is.na(parallelise)){ b1c <- b2c <- rep(NA,GN) for(i in 1:GN){ evx <- evalyx.redu[i,2]-pp$x evy <- evalyx.redu[i,1]-pp$y b2temp.a <- epsilon.eta^(-1)*kernel2d(evx,evy,sqrt(h^2+eta^2)) b2temp.b <- epsilon.eta^(-1)*kernel2d(evx,evy,eta) b2c[i] <- sum(b2temp.a)*sum(b2temp.b) - sum(b2temp.a*b2temp.b) b1c[i] <- (2*sqrt(pi)*h)^(-2)*sum(epsilon.eta^(-1)*kernel2d(evx,evy,sqrt(0.5*h^2+eta^2))) + ((nn-1)/nn)*sum(b2temp.a)^2 } boot1 <- GE.h^(-2)*b1c boot2 <- -2*GE.h^(-1)*b2c } else { totcor <- detectCores() if(parallelise>totcor) stop("Parallel cores requested exceeds available count") # if(verbose) cat(paste(" --optimising on",parallelise,"/",totcor,"cores--\n")) registerDoParallel(cores=parallelise) b12c <- foreach(i=1:GN,.packages="sparr",.combine=rbind) %dopar% { evx <- evalyx.redu[i,2]-pp$x evy <- evalyx.redu[i,1]-pp$y b2temp.a <- epsilon.eta^(-1)*kernel2d(evx,evy,sqrt(h^2+eta^2)) b2temp.b <- epsilon.eta^(-1)*kernel2d(evx,evy,eta) b2c <- sum(b2temp.a)*sum(b2temp.b) - sum(b2temp.a*b2temp.b) b1c <- (2*sqrt(pi)*h)^(-2)*sum(epsilon.eta^(-1)*kernel2d(evx,evy,sqrt(0.5*h^2+eta^2))) + ((nn-1)/nn)*sum(b2temp.a)^2 return(c(b1c,b2c)) } boot1 <- GE.h^(-2)*b12c[,1] boot2 <- -2*GE.h^(-1)*b12c[,2] } return(sum(nn^(-2)*(boot1+boot2)*WM$xstep*WM$ystep)+boot3) } }sparr/R/LIK.spattemp.R0000644000176200001440000000272014012076542014202 0ustar liggesusers#' @export LIK.spattemp <- function(pp,tt=NULL,tlim=NULL,sedge=c("uniform","none"),tedge=sedge,parallelise=NA,start=NULL,verbose=TRUE){ if(!inherits(pp,"ppp")) stop("data argument 'pp' must be of spatstat class \"ppp\"; see ?ppp") W <- Window(pp) n <- npoints(pp) sedge <- checkedge(sedge,v=2) tedge <- checkedge(tedge,v=2) WM <- as.mask(W,dimyx=64) if(is.null(tt)) tt <- marks(pp) tt <- checktt(tt) if(length(tt)!=n) stop(paste("Length of temporal vector does not match number of spatial observations\n npoints(pp) = ",n,"; length(tt) = ",length(tt),sep="")) if(is.null(tlim)) tlim <- range(tt) tlim <- checkranin(tlim,tt,"tlim") if(!is.na(parallelise)){ if(!is.numeric(parallelise)) stop("'parallelise' must be numeric") if(is.null(parallelise)) parallelise <- NA parallelise <- round(parallelise[1]) } evalxy <- as.matrix(expand.grid(WM$xcol,WM$yrow)) notin <- !inside.owin(x=evalxy[,1],y=evalxy[,2],w=W) evalxy.in <- evalxy[!notin,] if(is.null(start)) start <- c(OS(pp),bw.SJ(tt)) if(any(start<0)) stop("invalid starting values in 'start'") result <- optim(start,LIK.density.spattemp.single, pp=pp,tt=tt,tlim=tlim,xyin=evalxy.in, xys=c(WM$xstep,WM$ystep), sedge=(sedge=="uniform"), tedge=(tedge=="uniform"), parallelise=parallelise, verbose=verbose)$par names(result) <- c("h","lambda") return(result) } sparr/R/multiscale.density.R0000644000176200001440000004102314012076542015546 0ustar liggesusers#' Multi-scale adaptive kernel density/intensity estimation #' #' Computes adaptive kernel estimates of spatial density/intensity using a 3D #' FFT for multiple global bandwidth scales. #' #' Davies & Baddeley (2018) investigated computational aspects of Abramson's #' (1982) adaptive kernel smoother for spatial (2D) data. This function is the #' implementation of the 3D convolution via a fast-Fourier transform (FFT) #' which allows simultaneous calculation of an adaptive kernel estimate at #' multiple global bandwidth scales. #' #' These `multiple global bandwidth scales' are computed with respect to #' rescaling a reference value of the global bandwidth passed to the \code{h0} #' argument. This rescaling is defined by the range provided to the argument #' \code{h0fac}. For example, by default, the function will compute the #' adaptive kernel estimate for a range of global bandwidths between #' 0.25*\code{h0} and 1.5*\code{h0}. The exact numeric limits are subject to #' discretisation, and so the returned valid range of global bandwidths will #' differ slightly. The exact resulting range following function execution is #' returned as the \code{h0range} element of the result, see `Value' below. #' #' The distinct values of global bandwidth used (which define the #' aforementioned \code{h0range}) and hence the total number of pixel #' \code{\link[spatstat.geom]{im}ages} returned depend on both the width of the span #' \code{h0fac} and the discretisation applied to the bandwidth axis through #' \code{dimz}. Increasing this z-resolution will provide more pixel images and #' hence greater numeric precision, but increases computational cost. The #' returned pixel \code{\link[spatstat.geom]{im}ages} that represent the multiscale #' estimates are stored in a named list (see `Value'), whose names reflect the #' corresponding distinct global bandwidth. See `Examples' for the easy way to #' extract these distinct global bandwidths. #' #' The user can request an interpolated density/intensity estimate for any #' global bandwidth value within \code{h0range} by using the #' \code{\link{multiscale.slice}} function, which returns an object of class #' \code{\link{bivden}}. #' #' @aliases multiscale.density msden #' #' @param pp An object of class \code{\link[spatstat.geom]{ppp}} giving the observed #' 2D data set to be smoothed. #' @param h0 Reference global bandwidth for adaptive smoothing; numeric value > #' 0. Multiscale estimates will be computed by rescaling this value as per #' \code{h0fac}. #' @param hp Pilot bandwidth (scalar, numeric > 0) to be used for fixed #' bandwidth estimation of the pilot density. If \code{NULL} (default), it will #' take on the value of \code{h0}. Ignored when \code{pilot.density} is #' supplied as a pre-defined pixel image. #' @param h0fac A numeric vector of length 2 stipulating the span of the global #' bandwidths in the multiscale estimates. Interpreted as a multiplicative #' factor on \code{h0}. See `Details'. #' @param edge Character string dictating edge correction. \code{"uniform"} #' (default) corrects based on evaluation grid coordinate. Setting \code{edge="none"} #' requests no edge correction. #' @param resolution Numeric value > 0. Resolution of evaluation grid in the #' spatial domain; the densities/intensities will be returned on a #' [\code{resolution} \eqn{\times}{x} \code{resolution}] grid. #' @param dimz Resolution of z- (rescaled bandwidth)-axis in the trivariate #' convolution. Higher values increase precision of the multiscale estimates at #' a computational cost. See `Details'. #' @param gamma.scale Scalar, numeric value > 0; controls rescaling of the #' variable bandwidths. Defaults to the geometric mean of the bandwidth factors #' given the pilot density (as per Silverman, 1986). See the documentation for #' \code{\link{bivariate.density}}. #' @param trim Numeric value > 0; controls bandwidth truncation for adaptive #' estimation. See the documentation for \code{\link{bivariate.density}}. #' @param intensity Logical value indicating whether to return an intensity #' estimate (integrates to the sample size over the study region), or a density #' estimate (default, integrates to 1). #' @param pilot.density An optional pixel image (class #' \code{\link[spatstat.geom]{im}}) giving the pilot density to be used for #' calculation of the variable bandwidths in adaptive estimation, \bold{or} a #' \code{\link[spatstat.geom:ppp]{ppp.object}} giving the data upon which to base a #' fixed-bandwidth pilot estimate using \code{hp}. See the documentation for #' \code{\link{bivariate.density}}. #' @param xy Optional alternative specification of the spatial evaluation grid; #' matches the argument of the same tag in \code{\link[spatstat.geom]{as.mask}}. If #' supplied, \code{resolution} is ignored. #' @param taper Logical value indicating whether to taper off the trivariate #' kernel outside the range of \code{h0*h0fac} in the scale space; see Davies & #' Baddeley (2018). Keep at the default \code{TRUE} if you don't know what this #' means. #' @param verbose Logical value indicating whether to print function progress. #' #' @return An object of class \code{"msden"}. This is very similar to a #' \code{\link{bivden}} object, with lists of pixel #' \code{\link[spatstat.geom]{im}}ages in the \code{z}, \code{him}, and \code{q} #' components (instead of standalone images). #' \item{z}{A list of the resulting #' density/intensity estimates; each member being a pixel image object of class #' \code{\link[spatstat.geom]{im}}. They are placed in increasing order of the #' discretised values of \code{h0}.} #' \item{h0}{A copy of the reference value of \code{h0} used.} #' \item{h0range}{A vector of length 2 giving the actual range #' of global bandwidth values available (inclusive).} #' \item{hp}{A copy of the value of \code{hp} used.} #' \item{h}{A numeric vector of length equal to the #' number of data points, giving the bandwidth used for the corresponding #' observation in \code{pp} with respect to the reference global bandwidth #' \code{h0}.} #' \item{him}{A list of pixel images (class \code{\link[spatstat.geom]{im}}), #' corresponding to \code{z}, giving the #' `hypothetical' Abramson bandwidth at each pixel coordinate conditional upon #' the observed data and the global bandwidth used.} #' \item{q}{Edge-correction weights; list of pixel \code{\link[spatstat.geom]{im}}ages #' corresponding to \code{z} if \code{edge = "uniform"}, and \code{NULL} if #' \code{edge = "none"}.} #' \item{gamma}{The numeric value of \code{gamma.scale} used in scaling the bandwidths.} #' \item{geometric}{The geometric mean of the #' untrimmed variable bandwidth factors. This will be identical to \code{gamma} #' if \code{gamma.scale = "geometric"} as per default.} #' \item{pp}{A copy of the \code{\link[spatstat.geom:ppp]{ppp.object}} initially passed to the #' \code{pp} argument, containing the data that were smoothed.} #' #' @author T.M. Davies and A. Baddeley #' #' @seealso \code{\link{bivariate.density}}, \code{\link{multiscale.slice}} #' #' @references #' #' Abramson, I. (1982). On bandwidth variation in kernel estimates #' --- a square root law, \emph{Annals of Statistics}, \bold{10}(4), #' 1217-1223. #' #' Davies, T.M. and Baddeley A. (2018), Fast computation of #' spatially adaptive kernel estimates, \emph{Statistics and Computing}, \bold{28}(4), 937-956. #' #' Silverman, B.W. (1986), \emph{Density Estimation for Statistics and Data Analysis}, #' Chapman & Hall, New York. #' #' @examples #' \donttest{ #' data(chorley) # Chorley-Ribble data (package 'spatstat') #' ch.multi <- multiscale.density(chorley,h0=1) #' plot(ch.multi) #' #' ch.pilot <- bivariate.density(chorley,h0=0.75) # with pre-defined pilot density #' ch.multi2 <- multiscale.density(chorley,h0=1,pilot.density=ch.pilot$z) #' plot(ch.multi2) #' #' data(pbc) #' # widen h0 scale, increase z-axis resolution #' pbc.multi <- multiscale.density(pbc,h0=2,hp=1,h0fac=c(0.25,2.5),dimz=128) #' plot(pbc.multi) #' } #' @export multiscale.density <- function(pp,h0,hp=NULL,h0fac=c(0.25,1.5),edge=c("uniform","none"),resolution=128,dimz=64,gamma.scale="geometric",trim=5,intensity=FALSE,pilot.density=NULL,xy=NULL,taper=TRUE,verbose=TRUE){ if(!inherits(pp,"ppp")) stop("data argument 'pp' must be of spatstat class \"ppp\"; see ?ppp") if(verbose) cat("Initialising...") n <- npoints(pp) W <- Window(pp) if(!is.null(xy)){ xy <- checkxy(xy) dimyx <- NULL resolution <- length(xy$x) } else { resolution <- checkit(resolution,"'resolution'") dimyx <- rep(resolution,2) } dimz <- checkit(dimz,"'dimz'") h0fac <- checkran(h0fac,"h0fac") h0 <- checkit(h0,"'h0'") if(is.null(hp)) hp <- h0 else hp <- checkit(hp,"'hp'") edge <- checkedge(edge,v=0) if(!is.null(trim)&&!is.na(trim)) trim <- checkit(trim,"'trim'") pd <- pilot.density pilot.data <- pp if(!is.null(pd)){ if(is.im(pd)){ if(is.null(xy)){ if(!all(dim(pd)==resolution)) stop("'pilot.density' image resolution must strictly have 'resolution' x 'resolution' pixels") } else { if((!all(pd$xcol==xy$x))||(!all(pd$yrow==xy$y))) stop("'pilot.density' xcol and yrow must strictly match coords in 'xy'") } pilot.density[pd<=0] <- min(pd[pd>0]) } else if(is.ppp(pd)){ pilot.data <- pd if(!identical_windows(W,Window(pilot.data))) stop("'pilot.density' window must be identical to 'pp' window") pilot.density <- density(pilot.data,sigma=hp,edge=(edge=="uniform"),diggle=FALSE,dimyx=dimyx,xy=xy,positive=TRUE) } else { stop("'pilot.density' must be an object of class \"im\" or \"ppp\"") } } else { pilot.density <- density(pp,sigma=hp,edge=(edge=="uniform"),diggle=FALSE,dimyx=dimyx,xy=xy,positive=TRUE) } pilot.density.spec <- safelookup(pilot.density,pp,warn=FALSE) pi.int <- integral(pilot.density) pilot.density <- pilot.density/pi.int pilot.density.spec <- pilot.density.spec/pi.int pspec <- pilot.density.spec^(-0.5) gamma <- processgamma(gamma.scale,safelookup(pilot.density,pilot.data,warn=FALSE)) gs <- gspd <- exp(mean(log(pspec))) if(!is.null(pd)) gspd <- exp(mean(log(safelookup(pilot.density,pilot.data,warn=FALSE)^(-0.5)))) h.spec <- h0*pmin(pspec,trim*gspd)/gamma h.hypo <- h0*im(matrix(pmin(as.vector(as.matrix(pilot.density^(-0.5))),trim*gspd),resolution,resolution)/gamma,xcol=pilot.density$xcol,yrow=pilot.density$yrow) h.hypo.mat <- as.matrix(h.hypo) marks(pp) <- h.spec weights <- rep(1,n) if(verbose) cat("Done.\nDiscretising...") # discretise window isrect <- is.rectangle(W) WM <- as.mask(W,dimyx=dimyx,xy=xy) insideW <- WM$m dimW <- WM$dim nr <- dimW[1] nc <- dimW[2] xstep <- WM$xstep ystep <- WM$ystep # discretise spatial locations of data points ij <- nearest.raster.point(pp$x, pp$y, WM) # z-mapping zh <- function(h,hhl) log(h)-hhl #' z map zhi <- function(z,hhl) exp(hhl+z) #' inverse z map H <- range(rep(as.vector(h.hypo.mat),2)*rep(h0fac,each=prod(dim(h.hypo.mat))),na.rm=TRUE) hhash.log <- log(mean(H)) zlim <- zh(H,hhash.log) zrange <- c(diff(rev(zlim)),diff(zlim)) # discretise bandwidth values zbreaks <- seq(zrange[1], zrange[2], length=dimz+1) zstep <- diff(zbreaks[1:2]) zvalues <- zbreaks[-1] - zstep/2 kslice <- findInterval(zh(h.spec,hhash.log),zbreaks,all.inside=TRUE) # grid coordinates (padded) xcol.pad <- WM$xcol[1] + xstep * (0:(2*nc-1)) yrow.pad <- WM$yrow[1] + ystep * (0:(2*nr-1)) z.pad <- zvalues[1] + zstep * (0:(2*dimz - 1)) if(verbose) cat("Done.\nForming kernel...") # set up kernel xcol.ker <- xstep * c(0:(nc-1),-(nc:1)) yrow.ker <- ystep * c(0:(nr-1),-(nr:1)) z.ker <- zstep * c(0:(dimz-1), -(dimz:1)) pixarea <- xstep * ystep kerpixvol <- xstep * ystep * zstep # calculating tapering values for z-coordinate ztap <- rep(1,2*dimz) if(taper){ # get 'zero points' to lie somewhere in the middle of the extremes beyond the raw # zrange (zrange) and within the extended zrange (zkrange) zkrange <- range(z.ker) zlo <- zkrange[1]+(zrange[1]-zkrange[1])/2 zup <- zkrange[2]-(zkrange[2]-zrange[2])/2 ztap <- pmin(taperoff(z.ker,zlo,zrange[1],"cosine"),taperoff(z.ker,zup,zrange[2],"cosine")) } weights <- weights/pixarea Kern <- array(0, dim=2*c(nr, nc, dimz)) hk <- zhi(-z.ker,hhash.log) for(k in 1:(2*dimz)) { densX.ker <- dnorm(xcol.ker, sd=hk[k]) densY.ker <- dnorm(yrow.ker, sd=hk[k]) Kern[,,k] <- outer(densY.ker, densX.ker, "*") * pixarea * ztap[k] #' z-tapering inserted here } if(verbose) cat("Done.\nTaking FFT of kernel...") # Fourier transform of kernel fK <- fft(Kern) if(verbose) cat("Done.\nDiscretising point locations...") rowfac <- factor(ij$row, levels=1:(2*nr)) colfac <- factor(ij$col, levels=1:(2*nc)) kfac <- factor(kslice, levels=1:(2*dimz)) Xpad <- tapplysum(weights, list(rowfac, colfac, kfac)) # was: # Xpad <- tsum(weights, list(rowfac, colfac, kfac)) # Xpad <- unname(unclass(Xpad)) # convolve point masses with kernel if(verbose) cat("Done.\nFFT of point locations...") fX <- fft(Xpad) if(verbose) cat("Inverse FFT of smoothed point locations...") sm <- fft(fX * fK, inverse=TRUE)/prod(dim(Xpad)) if(verbose){ cat("Done.\n") cat(paste(" [ Point convolution: maximum imaginary part=", signif(max(abs(Im(sm))),3), "]\n")) } # identify scaling values based on range of available z-coordinates; # restrict attention to those requested by 'h0fac' ei <- exp(-zvalues) eiw <- which(ei>=h0fac[1] & ei<=h0fac[2]) if(length(eiw)==0) stop("'h0fac' too narrow for 'dimz' -- increase either one or both") avals <- ei[eiw] bwvals <- avals*h0 hlist <- list() for(i in 1:length(avals)) hlist[[i]] <- avals[i]*h.hypo[WM,drop=FALSE] # turn each relevant layer of the returned FFT array into a pixel image, # giving the raw (un-edge-corrected) result for that scaling raw <- Re(sm)[1:nr,1:nc,1:dimz] rawlist <- list() for(i in 1:length(eiw)) rawlist[[i]] <- im(raw[,,eiw[i]],xcol=WM$xcol,yrow=WM$yrow)[W,drop=FALSE] names(rawlist) <- bwvals names(hlist) <- bwvals if(!intensity) rawlist <- lapply(rawlist,function(x) x/integral(x)) result <- list(z=rev(rawlist),q=NULL,him=rev(hlist)) WK <- elist <- NULL edgeW <- 1 # edge-correction zeta.index <- which.min((zhi(zvalues,hhash.log)-exp(hhash.log))^2) #' find slice of z corresponding to 'zero plane' (as close as possible) for placement of R^2 (works slightly better on h scale) if(edge=="uniform"){ # convolve window with kernel Wpad <- array(0, dim=2*c(nr, nc, dimz)) Wpad[1:nr, 1:nc, zeta.index] <- WM$m * pixarea #' insert W plane here if(verbose) cat("FFT of window...") fW <- fft(Wpad) if(verbose) cat("Inverse FFT of smoothed window...") WK <- fft(fW * fK, inverse=TRUE)/prod(dim(Wpad)) if(verbose){ cat("Done.\n") cat(paste(" [ Window convolution: maximum imaginary part=", signif(max(abs(Im(WK))),3), "]\n")) cat("Looking up edge correction weights...\n") } elist <- lambda <- list() # Uniform-type edge correction: edge weights associated with pixels # The loop below cycles through each bandwidth adjustment factor # computed above and extracts the edge-correction surface for each. # Note that $\zeta$ is still required here since the R^2 plane won't be # *exactly* at z=0 in general due to discretisation, but will be close, # and is identified as zvalues[zeta.index] for(i in 1:length(avals)){ if(verbose) cat(paste(i," ")) # adj <- avals[i] edgeW <- bwW <- hlist[[i]]#adj*h.hypo[WM, drop=FALSE] kim <- eval.im(findInterval(zvalues[zeta.index]+hhash.log-log(bwW),zbreaks,all.inside=TRUE)) iim <- as.im(row(bwW),W=bwW) jim <- as.im(col(bwW),W=bwW) df <- pairs(iim,jim,kim,plot=FALSE) edgeW[] <- Re(WK)[as.matrix(df)]/pixarea elist[[i]] <- edgeW lambda[[i]] <- rawlist[[i]]/edgeW } # Construct a list of images, with names reflecting the global bandwidth scaling if(!intensity) lambda <- lapply(lambda,function(x) x/integral(x)) names(elist) <- names(lambda) <- bwvals result$q <- rev(elist) result$z <- rev(lambda) } if(verbose) cat("\n") # if(taper) result$ztap <- cbind(z.ker,ztap) # For testing/diagnostics result$h <- h.spec result$h0range <- range(bwvals) result$gamma <- gamma result$geometric <- gs result$pp <- pp result$hp <- hp result$h0 <- h0 result <- result[c("z","h0","h0range","hp","h","him","q","gamma","geometric","pp")] class(result) <- "msden" return(result) } sparr/R/adens.R0000644000176200001440000000511714012076542013024 0ustar liggesuserspoint_image_by_bw <- function(bw_cat, bw, points, weights, WM) { # bw is the binned bandwidths evaluated at the points # bw_cat is the unique, sorted list of bins # We want to create a surface per bin, with each cell containing the # (weighted) sum of points that use that bin. # We can do this efficiently by iterating over the points once, and just # adding each point to the appropriate surface. # Create a map from the bandwidths for each point to our bandwidth categories bw_map <- match(bw, bw_cat) # Create output matrices for each surface. We create them twice as large # so they are padded all ready for the FFT that is coming later im_bw <- vector('list', length(bw_cat)) for (i in 1:length(bw_cat)) { im_bw[[i]] <- matrix(0, WM$dim[1]*2, WM$dim[2]*2) } # Iterate over the points, using bw_map to map point to surface, and fill them in for (i in 1:length(bw_map)) { cat = bw_map[i] x = points$row[i] y = points$col[i] im_bw[[cat]][x,y] = im_bw[[cat]][x,y] + weights[i] } im_bw } adens <- function(x,bwim,bwpts,resolution,edge,diggle,weights,intensity,hstep,qstep,qres,verbose){ n <- npoints(x) hc <- gethcats(bwpts,step=hstep) hu <- sort(unique(hc)) U <- length(hu) W <- Window(x) WM <- as.mask(W,dimyx=rep(resolution,2)) insideW <- WM$m res2 <- 2*resolution resseq <- 1:resolution if(edge){ edg <- edgeh(bwim,pres=qres,tres=resolution,step=qstep,W=W) } else { edg <- im(matrix(1,resolution,resolution),xcol=WM$xcol,yrow=WM$yrow) } if(is.null(weights)) weights <- rep(1,n) if(edge&&diggle) digw <- 1/safelookup(edg,x,warn=FALSE) else digw <- rep(1,n) use_fftw <- fftw_available() weights <- weights*digw imlist <- point_image_by_bw(hu, hc, nearest.raster.point(x$x,x$y,w=WM), weights, WM) xcol.pad <- WM$xcol[1]+WM$xstep*(0:(resolution-1)) yrow.pad <- WM$yrow[1]+WM$ystep*(0:(resolution-1)) len.pad <- res2^2 resultlist <- list() result <- matrix(0,resolution,resolution) if(verbose) pb <- txtProgressBar(0,U) for(i in 1:U){ fK <- kernel2d_fft(hu[i], WM$xstep, WM$ystep, resolution) fZ <- fft2d(imlist[[i]],fftw=use_fftw) sm <- fft2d(fZ*fK,inverse=TRUE,fftw=use_fftw)/len.pad smo <- Re(sm[resseq,resseq]) resultlist[[i]] <- im(smo,xcol.pad,yrow.pad) result <- result + smo if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) result[!insideW] <- NA result <- im(result,xcol=WM$xcol,yrow=WM$yrow) if(edge && !diggle) result <- result/edg if(!intensity) result <- result/integral(result) return(list(result=result,rlist=resultlist,edg=edg)) }sparr/R/boot.opt.spatial.adapt.R0000644000176200001440000000341414012076542016220 0ustar liggesusersboot.opt.spatial.adapt <- function(pp,h0ref,h0fac,hp,edg,refden,N,B,res,dimz,verbose,parallelise,...){ if(is.na(parallelise)){ isemat <- matrix(NA,N,B) if(verbose) pb <- txtProgressBar(0,N) for(i in 1:N){ tempdata <- rimpoly(pp$n,refden,Window(pp)) tempadapt <- multiscale.density(tempdata,h0=h0ref,hp=hp,h0fac=h0fac,edge=edg,resolution=res,dimz=dimz,verbose=FALSE,...) h0seq <- seq(tempadapt$h0range[1],tempadapt$h0range[2],length=B) for(j in 1:B){ bj <- multiscale.slice(tempadapt,h0seq[j]) isemat[i,j] <- integral((bj$z-refden)^2) } if(verbose) setTxtProgressBar(pb,i) } if(verbose) close(pb) resultmat <- cbind(h0seq,colMeans(isemat)) } else { totcor <- detectCores() if(parallelise>totcor) stop("Parallel cores requested exceeds available count") if(verbose) cat(paste(" --bootstrapping on",parallelise,"/",totcor,"cores--\n")) registerDoParallel(cores=parallelise) isemat <- foreach(i=1:N,.packages=c("sparr","spatstat"),.combine=rbind) %dopar% { tempdata <- rimpoly(pp$n,refden,Window(pp)) tempadapt <- multiscale.density(tempdata,h0=h0ref,hp=hp,h0fac=h0fac,edge=edg,resolution=res,dimz=dimz,verbose=FALSE,...) h0seq <- seq(tempadapt$h0range[1],tempadapt$h0range[2],length=B) isevec <- rep(NA,B) for(j in 1:B){ bj <- multiscale.slice(tempadapt,h0seq[j]) isevec[j] <- integral((bj$z-refden)^2) } return(rbind(isevec,h0seq)) } if(verbose) cat("Done.\n") h0seq <- isemat[2,] isemat <- isemat[seq(1,2*N-1,2),] resultmat <- cbind(h0seq,colMeans(isemat)) } #old return val: resultmat[which.min(resultmat[,2]),1][1] rs <- spline(resultmat) return(list(h=rs$x[which.min(rs$y)],mat=resultmat)) }sparr/R/BAM.single.R0000644000176200001440000000353014012076542013606 0ustar liggesusersBAM.single <- function(h,edge,BP){ cases <- BP$cas controls <- BP$con erode <- BP$erode xyin <- BP$xyin res <- BP$res kpa <- BP$kpa xk <- BP$xk yk <- BP$yk lp <- BP$lp fM <- BP$fM WM <- BP$WM fdd <- im(matrix(BP$fdd,res,res),xcol=WM$xcol,yrow=WM$yrow) gdd <- im(matrix(BP$gdd,res,res),xcol=WM$xcol,yrow=WM$yrow) qb <- function(hfac){ hfp <- hfac*h densX.ker <- dnorm(xk,sd=h) densY.ker <- dnorm(yk,sd=h) Kern <- outer(densY.ker,densX.ker,"*")*kpa con <- fft(fM*fft(Kern), inverse=TRUE)/lp qhz <- im(Mod(con[1:res,1:res]),xcol=WM$xcol,yrow=WM$yrow) qhz[qhz>1] <- 1 qhz[!WM$m] <- NA return(qhz) } qq <- qb(1) fd <- density.ppp(cases,sigma=h,dimyx=res,positive=TRUE,edge=FALSE) gd <- density.ppp(controls,sigma=h,dimyx=res,positive=TRUE,edge=FALSE) if(edge){ fd <- fd/qq gd <- gd/qq qq <- as.vector(as.matrix(qq)) qq2 <- as.vector(as.matrix((1/(4*pi))*qb(sqrt(0.5)))) qq[!xyin] <- NA qq2[!xyin] <- NA qqi <- im(matrix(qq,res,res),xcol=WM$xcol,yrow=WM$yrow) qq2i <- im(matrix(qq2,res,res),xcol=WM$xcol,yrow=WM$yrow) rk <- integral(qq2i)/(qqi^2*h^2) ### now using 'image' qq } else { rk <- rep(1/(4*pi),res^2) rk[!xyin] <- NA rk <- im(matrix(rk,res,res),xcol=WM$xcol,yrow=WM$yrow) } fd <- fd/integral(fd) gd <- gd/integral(gd) # RK <- function(xs,ys,h,W,ca,xyin){ # qres <- qhzfunc(xs,ys,"gaus",W,h) # qres$qhz[!xyin] <- NA # qres$qhz_sq[!xyin] <- NA # return(sum(qres$qhz_sq*ca,na.rm=T)/(qres$qhz^2*h^2)) # } return(zeta1(fd,gd,npoints(cases),npoints(controls),rk)/h^2 + 0.5*h^4*zeta2(fd,gd,fdd,gdd)) } zeta1 <- function(fh,gh,n1,n2,rk){ return(integral(rk/fh)/n1+integral(rk/gh)/n2) } zeta2 <- function(fh,gh,fdd,gdd){ return(integral(fdd^2/fh^2)/2-integral((fdd*gdd)/(fh*gh))+integral(gdd^2/gh^2)/2) } sparr/R/tolerance.R0000644000176200001440000002070214012423237013700 0ustar liggesusers#' Tolerance by \emph{p}-value surfaces #' #' Calculates a \emph{p}-value surface based on asymptotic theory or #' Monte-Carlo (MC) permutations describing the extremity of risk given a fixed #' or adaptive kernel-smoothed density-ratio, allowing the drawing of #' \emph{tolerance contours}. #' #' This function implements developments in Hazelton and Davies (2009) (fixed) #' and Davies and Hazelton (2010) (adaptive) to compute pointwise #' \emph{p}-value surfaces based on asymptotic theory of kernel-smoothed #' relative risk surfaces. Alternatively, the user may elect to calculate the #' \emph{p}-value surfaces using Monte-Carlo methods (see Kelsall and Diggle, #' 1995). Superimposition upon a plot of the risk surface contours of these #' \emph{p}-values at given significance levels (i.e. ``tolerance contours'') #' can be an informative way of exploring the statistical significance of the #' extremity of risk across the defined study region. #' #' Implementation of the Monte-Carlo method simply involves random allocation of case/control marks and #' re-estimation of the risk surface \code{ITER} times, against which the #' original estimate is compared. While not dependent on asymptotic theory, it is #' computationally expensive, and it has been suggested that it might have some #' undesirable practical consequences in certain settings (Hazelton and Davies, #' 2009). When performing the MC simulations, the same global (and pilot, if #' necessary) bandwidths and edge-correction regimens are employed as were used #' in the initial density estimates of the observed data. With regard to #' arguments to be passed to internal calls of \code{\link{risk}}, the user #' should take care to use \code{...} to set the \code{epsilon} value to match #' that which was used in creation of the object passed to \code{rs} (if this #' was set to a non-default value). Furthermore, if performing MC simulations #' for the adaptive relative risk function, the function borrows the value of #' the \code{beta} argument to speed things up via partitioning, and the user #' should additionally access \code{...} to set the same \code{pilot.symmetry} #' value as was used for creation of the object passed to \code{rs}, in the #' same way as for any non-default use of \code{epsilon}. This will ensure the #' simulations are all performed under the same conditions as were used to estimate the original risk #' function. #' #' #' #' #' @param rs An object of class \code{\link{rrs}} giving the estimated relative #' risk function for which to calculate the \emph{p}-value surface. #' @param method A character string specifying the method of calculation. #' \code{"ASY"} (default) instructs the function to compute the \emph{p}-values #' using asymptotic theory. \code{"MC"} computes the values by random #' permutations of the data. See `Details'. #' @param ref.density Required if \code{rs} is based on fixed-bandwidth #' estimates of the case and control densities and \code{method = "ASY"}. #' Either a pixel \code{\link[spatstat.geom]{im}}age or an object of class #' \code{\link{bivden}} giving the reference density to use in asymptotic #' formulae. May be unnormalised. Ignored if \code{rs} is based on adaptive #' kernel estimates or if \code{method = "MC"}. #' @param beta A numeric value \eqn{0 <} \code{beta} \eqn{< 1} giving the #' fineness of the adaptive bandwidth partitioning to use for calculation of #' the required quantities for asymptotic adaptive \emph{p}-value surfaces. #' Smaller values provide more accurate bandwidth bins at the cost of #' additional computing time, see Davies and Baddeley (2018); the default is #' sensible in most cases. Ignored if \code{rs} is based on fixed-bandwidth #' kernel estimates. #' @param ITER Number of iterations for the Monte-Carlo permutations. Ignored #' if \code{method = "ASY"}. #' @param parallelise Numeric argument to invoke parallel processing, giving #' the number of CPU cores to use when \code{method = "MC"}. Experimental. Test #' your system first using \code{parallel::detectCores()} to identify the #' number of cores available to you. #' @param verbose Logical value indicating whether to print function progress #' during execution. #' @param ... Additional arguments to be passed to \code{\link{risk}} when #' \code{method = "MC"}. While most information needed for the MC repetitions #' is implicitly gleaned from the object passed to \code{rs}, this ellipsis is #' typically used to set the appropriate \code{epsilon} and #' \code{pilot.symmetry} values for the internal calls to \code{\link{risk}}. #' #' @return A pixel \code{\link[spatstat.geom]{im}}age of the estimated #' \emph{p}-value surface. #' #' @note The returned \emph{p}-values are geared so that ``smallness'' #' corresponds to statistical significance of elevated risk, that is, an #' upper-tailed test. The complement of the \emph{p}-values will yeild #' significance of reduced risk; a lower-tailed test. When using #' \code{\link{tol.contour}}, the user can control what type of contours to #' display. #' #' @author T. M. Davies #' #' @references #' #' Davies, T.M. and Baddeley A. (2018), Fast computation of #' spatially adaptive kernel estimates, \emph{Statistics and Computing}, \bold{28}(4), 937-956. #' #' Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel estimation of spatial relative #' risk, \emph{Statistics in Medicine}, \bold{29}(23) 2423-2437. #' #' Davies, T.M., Jones, K. and Hazelton, M.L. (2016), Symmetric adaptive smoothing regimens for estimation of the spatial #' relative risk function, \emph{Computational Statistics & Data Analysis}, #' \bold{101}, 12-28. #' #' Hazelton, M.L. and Davies, T.M. (2009), Inference based on kernel estimates #' of the relative risk function in geographical epidemiology, #' \emph{Biometrical Journal}, \bold{51}(1), 98-109. #' #' Kelsall, J.E. and Diggle, P.J. (1995), Kernel estimation of relative risk, \emph{Bernoulli}, #' \bold{1}, 3-16. #' #' @examples #' #' \donttest{ #' #' data(pbc) #' h0 <- LSCV.risk(pbc,method="hazelton");h0 #' pbccas <- split(pbc)[[1]] #' pbccon <- split(pbc)[[2]] #' #' # ASY #' riskfix <- risk(pbc,h0=h0) #' fixtol1 <- tolerance(riskfix,ref.density=density(pbc,OS(pbc))) #' #' riskada <- risk(pbc,h0=h0,adapt=TRUE,hp=NS(pbc),pilot.symmetry="pooled",davies.baddeley=0.025) #' adatol1 <- tolerance(riskada) #' #' par(mfrow=c(1,2)) #' plot(riskfix) #' tol.contour(fixtol1,levels=c(0.1,0.05,0.01),lty=3:1,add=TRUE) #' plot(riskada) #' tol.contour(adatol1,levels=c(0.1,0.05,0.01),lty=3:1,add=TRUE) #' #' #' # MC #' fixtol2 <- tolerance(riskfix,method="MC",ITER=200) #' adatol2 <- tolerance(riskada,method="MC",ITER=200,parallelise=2) # ~90secs with parallelisation #' par(mfrow=c(1,2)) #' plot(riskfix) #' tol.contour(fixtol2,levels=c(0.1,0.05,0.01),lty=3:1,add=TRUE) #' plot(riskada) #' tol.contour(adatol2,levels=c(0.1,0.05,0.01),lty=3:1,add=TRUE) #' } #' #' #' @export tolerance <- function(rs, method = c("ASY", "MC"), ref.density = NULL, beta = 0.025, ITER = 100, parallelise = NULL, verbose = TRUE, ...){ if(!inherits(rs,"rrs")) stop("'rs' argument must be of class \"rrs\"") meth <- method[1] adaf <- !is.null(rs$f$him) adag <- !is.null(rs$g$him) ada <- adaf + adag if(ada==1) stop("case/control smoothing regimens (fixed or adaptive) must be identical") if(meth=="ASY"){ if(ada==2){ psurf <- tol.asy.ada(rs$f,rs$g,beta,verbose)$p } else { if(is.null(ref.density)) stop("must supply 'ref.density' for fixed-bandwidth asymptotic tolerance contours") if((!inherits(ref.density,"bivden"))&&(!inherits(ref.density,"im"))) stop("'ref.density' must be of class \"bivden\" or \"im\"") if(is.im(ref.density)){ ref.density <- list(z=ref.density,q=NULL) # was: # rq <- ref.density # rq$v[!is.na(rq$v)] <- 1 # ref.density <- list(z=ref.density,q=rq) } ref.density$z <- ref.density$z/integral(ref.density$z) if(!compatible(rs$f$z,rs$g$z,ref.density$z)) stop("incompatible 'ref.density'... must be evaluated on domain identical to case/control densities") psurf <- tol.asy.fix(rs$f,rs$g,ref.density,verbose)$p } } else if(meth=="MC"){ ITER <- checkit(ITER,"'ITER'") if(ada==2){ psurf <- im(tol.mc.ada(rs,round(ITER),parallelise,verbose,davies.baddeley=beta,...),xcol=rs$rr$xcol,yrow=rs$rr$yrow) } else { psurf <- im(tol.mc.fix(rs,round(ITER),parallelise,verbose,...),xcol=rs$rr$xcol,yrow=rs$rr$yrow) } } else stop("invalid 'method' argument") return(psurf) } sparr/R/tol.asy.ada.R0000644000176200001440000000741514012076542014052 0ustar liggesuserstol.asy.ada <- function(f,g,beta,verbose=FALSE){ if(verbose) cat("Initialising window...") if(!compatible(f$z,g$z)) stop("incompatible images 'f' and 'g' -- kernel estimates must be evaluated on identical domains") M <- Window(f$z) inside <- M$m pres <- nrow(inside) res2 <- 2*pres resseq <- 1:pres xcol.ker <- M$xstep*c(0:(pres-1),-rev(resseq)) yrow.ker <- M$ystep*c(0:(pres-1),-rev(resseq)) kerpixarea <- M$xstep*M$ystep len.pad <- res2^2 Mpad <- matrix(0, ncol=2*pres, nrow=2*pres) Mpad[1:pres, 1:pres] <- inside fM <- fft(Mpad) symmetric <- all(f$him==g$him,na.rm=TRUE) fq <- ifelse(is.null(f$q)||is.vector(f$q),NA,f$q) gq <- ifelse(is.null(g$q)||is.vector(g$q),NA,g$q) fqn <- is.na(fq) gqn <- is.na(gq) needq <- FALSE if(fqn||gqn){ if(!symmetric){ needq <- TRUE } else { if(fqn&&!gqn){ fq <- gq } else if(gqn&&!fqn){ gq <- fq } else { needq <- TRUE } } } qb <- function(oo,hfac,callno){ hfp <- hfac*oo$him hfp$v[is.na(hfp$v)] <- max(hfp,na.rm=TRUE) hypoQ <- unique(quantile(hfp[inside],seq(0,1,beta),na.rm=TRUE)) hypocen <- hypoQ[-length(hypoQ)]+diff(hypoQ/2) corrQ <- as.numeric(cut(as.vector(as.matrix(hfp)),breaks=hypoQ,include.lowest=TRUE)) if(verbose){ if(callno==1) cat("Convolving bandwidth-categorised kernels with window:\n --pass 1...") else cat(paste(" --pass ",callno,"...",sep="")) } qhz <- rep(NA,pres^2) for(i in 1:length(hypocen)){ densX.ker <- dnorm(xcol.ker,sd=hypoQ[i]) densY.ker <- dnorm(yrow.ker,sd=hypoQ[i]) Kern <- outer(densY.ker,densX.ker,"*")*kerpixarea con <- fft(fM*fft(Kern), inverse=TRUE)/len.pad edg <- im(Mod(con[1:pres,1:pres]),xcol=oo$z$xcol,yrow=oo$z$yrow) qhz[which(corrQ==i)] <- as.vector(as.matrix(edg))[which(corrQ==i)] } if(verbose) cat("Done.\n") qhz[qhz>1] <- 1 qhz <- im(matrix(qhz,pres,pres),xcol=oo$z$xcol,yrow=oo$z$yrow) qhz[!inside] <- NA return(qhz) } qd <- function(oo,hfac,callno){ evalxy <- as.matrix(expand.grid(oo$z$xcol,oo$z$yrow)) notin <- !inside.owin(x=evalxy[,1],y=evalxy[,2],w=M) h.hypo.mat <- as.matrix(hfac*oo$him) if(verbose){ if(callno==1) cat("Processing pixel-by-pixel factors:\n --pass 1...") else cat(paste(" --pass ",callno,"...",sep="")) } qhz <- rep(NA,pres^2) for(i in 1:nrow(evalxy)){ ht <- h.hypo.mat[which(oo$z$yrow==evalxy[i,2]),which(oo$z$xcol==evalxy[i,1])] if(is.na(ht)) next gxy <- ht^(-2)*(exp(-0.5*rowSums((cbind(evalxy[,1]-evalxy[i,1],evalxy[,2]-evalxy[i,2])/ht)^2))/(2*pi)) gxy[notin] <- NA qhz[i] <- integral(im(matrix(gxy,pres,pres,byrow=TRUE),xcol=oo$z$xcol,yrow=oo$z$yrow)) } if(verbose) cat("Done.\n") return(im(matrix(qhz,pres,pres,byrow=TRUE),xcol=oo$z$xcol,yrow=oo$z$yrow)) } qob <- function(oo){ if(!is.na(beta)){ q <- oo$q q2 <- qb(oo,sqrt(0.5),callno) if(needq) q <- qb(oo,1,callno+1) } else { q <- oo$q q2 <- qd(oo,sqrt(0.5),callno) if(needq) q <- qd(oo,1,callno+1) } return(list(q=q,q2=q2)) } if(verbose) cat("Done.\n") callno <- 1 qqf <- qqg <- qob(f) if(!symmetric){ callno <- 2 + needq qqg <- qob(g) } fk2 <- (1/(4*pi))*as.matrix(qqf$q2) gk2 <- (1/(4*pi))*as.matrix(qqg$q2) S1rzK <- (1/as.matrix(qqf$q)^2)*(2*fk2 + 0.5*fk2) S2rzK <- (1/as.matrix(qqg$q)^2)*(2*gk2 + 0.5*gk2) numerator <- as.matrix(log(f$z)-log(g$z)) denominator <- sqrt(((S1rzK*f$gamma^2)/(length(f$h)*f$h0^2))+((S2rzK*g$gamma^2)/(length(g$h)*g$h0^2))) zstandard <- numerator/denominator P <- pnorm(zstandard,lower.tail=FALSE) return(list(num=numerator,den=denominator,zst=zstandard,p=im(P,xcol=f$z$xcol,yrow=f$z$yrow),s1=S1rzK,s2=S2rzK)) }sparr/R/processscaler.R0000644000176200001440000000313114012076542014574 0ustar liggesusersprocessscaler <- function(s,pp){ if(is.numeric(s)){ if(length(s)>1){ s <- s[1] warning("'scaler' if numeric must be of length 1. Using first value only.") } if(s<=0) stop("'scaler' must be positive") } else if(is.character(s)){ if(length(s)>1){ s <- s[1] } s <- switch(s,IQR=mean(c(IQR(pp$x)/1.34,IQR(pp$y)/1.34)), sd=mean(c(sd(pp$x),sd(pp$y))), var=sqrt(mean(c(var(pp$x),var(pp$y)))), silverman=min(mean(c(sd(pp$x),sd(pp$y))),mean(c(IQR(pp$x)/1.34,IQR(pp$y)/1.34))),NA) if(is.na(s)){ stop("'scaler' character string must be one of \"silverman\", \"IQR\", \"sd\", or \"var\"") } } else { stop("Invalid 'scaler' type") } return(s) } processscaler.st <- function(s,pp,tt){ if(is.numeric(s)){ if(length(s)==1){ s <- c(s,s) } else if(length(s)>1){ s <- s[1:2] } else { stop("If numeric, 'scaler' must be of length 1 or 2") } if(s<=0) stop("'scaler' must be wholly positive") } else if(is.character(s)){ if(length(s)>1){ s <- s[1] } s <- switch(s,IQR=c(mean(c(IQR(pp$x)/1.34,IQR(pp$y)/1.34)),IQR(tt)/1.34), sd=c(mean(c(sd(pp$x),sd(pp$y))),sd(tt)), var=c(sqrt(mean(c(var(pp$x),var(pp$y)))),sd(tt)), silverman=c(min(mean(c(sd(pp$x),sd(pp$y))),mean(c(IQR(pp$x)/1.34,IQR(pp$y)/1.34))),min(sd(tt),IQR(tt)/1.34)),NA) if(any(is.na(s))){ stop("'scaler' character string must be one of \"silverman\", \"IQR\", \"sd\", or \"var\"") } } else { stop("Invalid 'scaler' type") } return(s) } sparr/R/tol.contour.R0000644000176200001440000000412014012076542014211 0ustar liggesusers#' Plot tolerance contours #' #' Draw contours based on a \emph{p}-value matrix. #' #' Note that no checks on the numeric content of \code{pim} are made. The #' function assumes the pixel \code{\link[spatstat.geom]{im}}age of \emph{p}-values #' in \code{pim} is supplied with respect to an upper-tailed test for elevated #' risk (this is exactly the way the \emph{p}-value surface is returned when #' \code{\link{tolerance}} is used). This is important if one makes subsequent #' use of \code{test}, which manipulates the \emph{p}-values to draw at desired #' significance \code{levels}. #' #' @param pim A pixel \code{\link[spatstat.geom]{im}}age of \emph{p}-values, #' typically obtained from a call to \code{\link{tolerance}}, computed with #' respect to a test for elevated risk. #' @param test An optional character string giving the type of manipulation to #' be applied to the \emph{p}-values, corresponding to a test for significantly #' elevated risk (\code{"upper"}; default); for reduced risk (\code{"lower"}); #' or for both (\code{"two-sided"}). #' @param ... Additional arguments to be passed to \code{\link{contour}}. #' Commonly used options include \code{add} (to superimpose the contours upon #' an existing plot); \code{levels} (to control the specific significance #' levels at which to delineate the \emph{p}-values); and \code{lty} or #' \code{lwd} for aesthetics. #' #' @return Opens a new graphics device and displays a \code{\link{contour}} #' plot if \code{add = FALSE}, otherwise adds the contours to the plot in the #' existing active graphics device. #' #' @author T. M. Davies #' #' @examples #' #' # See ?tolerance #' #' @export tol.contour <- function(pim, test = c("upper", "lower", "two-sided"), ...){ if(!inherits(pim,"im")){ stop("'pim' must be an object of class 'im', typically arising from a call to 'tolerance'") } tt <- t(as.matrix(pim)) test <- test[1] if(test=="lower"){ tt <- 1-tt } else if(test=="two-sided"){ tt <- 2*pmin(tt,1-tt) } else if(test!="upper"){ stop("invalid 'test'") } contour(x=pim$xcol,y=pim$yrow,z=tt,...) } sparr/R/NS.spattemp.R0000644000176200001440000000122614012076542014103 0ustar liggesusers#' @rdname NS #' @export NS.spattemp <- function(pp, tt = NULL, nstar = "npoints", scaler = c("silverman", "IQR", "sd", "var")){ if(!inherits(pp,"ppp")) stop("data argument 'pp' must be of spatstat class \"ppp\"; see ?ppp") if(is.null(tt)) tt <- marks(pp) tt <- checktt(tt) if(length(tt)!=npoints(pp)) stop(paste("Length of temporal vector does not match number of spatial observations\n npoints(pp) = ",npoints(pp),"; length(tt) = ",length(tt),sep="")) scaler <- processscaler.st(scaler,pp,tt) nstar <- processnstar.st(nstar,pp) result <- scaler*c(nstar^(-1/6),0.9*nstar^(-1/5)) names(result) <- c("h","lambda") return(result) } sparr/R/print.stden.R0000644000176200001440000000044314012076542014177 0ustar liggesusers#' @rdname printsparr #' @method print stden #' @export print.stden <- function(x,...){ cat("Spatiotemporal Kernel Density Estimate\n\n") cat("Bandwidths\n h =",round(x$h,4),"(spatial)\n lambda =",round(x$lambda,4),"(temporal)\n\n") cat("No. of observations\n ",npoints(x$pp),"\n") }sparr/R/LSCV.risk.single.R0000644000176200001440000000235714012076542014733 0ustar liggesusersLSCV.risk.single <- function(h,cases,controls,res,edge,hazey){ if(h<=0) return(NA) temp.case <- density.ppp(cases,sigma=h,edge=edge,dimyx=res,positive=TRUE) temp.con <- density.ppp(controls,sigma=h,edge=edge,dimyx=res,positive=TRUE) tcase.int <- integral(temp.case) tcon.int <- integral(temp.con) temp.case <- temp.case/tcase.int temp.con <- temp.con/tcon.int if(any(is.infinite(as.matrix(temp.case/temp.con)))) return(NA) ## pre-fail for infinite rr cells - both HAZE and KELDIG temp.case.pts <- density.ppp(cases,sigma=h,edge=edge,dimyx=res,at="points",leaveoneout=TRUE,positive=TRUE)/tcase.int temp.con.pts <- density.ppp(controls,sigma=h,edge=edge,dimyx=res,at="points",leaveoneout=TRUE,positive=TRUE)/tcon.int caseatcon <- safelookup(temp.case,controls,warn=FALSE) conatcase <- safelookup(temp.con,cases,warn=FALSE) if(any(temp.case.pts<=0)||any(temp.con.pts<=0)||any(caseatcon<=0)||any(conatcase<=0)) return(NA) ## tiny bandwidth protector if(!hazey) result <- 2*mean(log(caseatcon/temp.con.pts)/temp.con.pts) - 2*mean(log(temp.case.pts/conatcase)/temp.case.pts) - integral((log(temp.case)-log(temp.con))^2) else result <- mean((caseatcon/temp.con.pts)^2)-2*mean(temp.case.pts/conatcase) return(result) } sparr/R/summary.rrs.R0000644000176200001440000000112714012076542014231 0ustar liggesusers#' @rdname summarysparr #' @method summary rrs #' @export summary.rrs <- function(object, ...){ if(all(object$rr>=0)) cat("Relative Risk Function.\n\n") else cat("Log-Relative Risk Function.\n\n") if(!all(object$rr>=0)) cat("Estimated risk range [",min(object$rr,na.rm=T),", ",max(object$rr,na.rm=T),"]\n",sep="") else cat("Estimated log-risk range\n [",min(object$rr,na.rm=T),",",max(object$rr,na.rm=T),"]\n",sep="") cat("\n--Numerator (case) density--\n") summary.bivden(object$f) cat("\n--Denominator (control) density--\n") summary.bivden(object$g) } sparr/R/summary.stden.R0000644000176200001440000000127614012076542014545 0ustar liggesusers#' @rdname summarysparr #' @method summary stden #' @export summary.stden <- function(object, ...){ print.stden(x=object) W <- Window(object$pp) wt <- summary(W)$type wx <- W$xrange wy <- W$yrange cat("\nSpatial bound\n Type: ",wt,"\n 2D enclosure: [",wx[1],", ",wx[2],"] x [",wy[1],", ",wy[2],"]\n",sep="") cat("\nTemporal bound\n [",object$tlim[1],", ",object$tlim[2],"]\n",sep="") cat("\nEvaluation\n ",nrow(object$z[[1]])," x ",ncol(object$z[[1]])," x ",length(object$z)," trivariate lattice\n",sep="") minden <- min(sapply(object$z,min,na.rm=TRUE)) maxden <- max(sapply(object$z,max,na.rm=TRUE)) cat(" Density range: [",minden,", ",maxden,"]\n",sep="") } sparr/R/LSCV.spattemp.R0000644000176200001440000000741214012076542014335 0ustar liggesusers#' @export LSCV.spattemp <- function(pp,tt=NULL,tlim=NULL,sedge=c("uniform","none"),tedge=sedge,sres=64,tres=sres,parallelise=NA,start=NULL,verbose=TRUE){ if(!inherits(pp,"ppp")) stop("data argument 'pp' must be of spatstat class \"ppp\"; see ?ppp") W <- Window(pp) n <- npoints(pp) sres <- checkit(sres,"'sres'") sedge <- checkedge(sedge,v=2) tedge <- checkedge(tedge,v=2) WM <- as.mask(W,dimyx=sres) inside <- WM$m grx <- WM$xcol gry <- WM$yrow if(is.null(tt)) tt <- marks(pp) tt <- checktt(tt) if(length(tt)!=n) stop(paste("Length of temporal vector does not match number of spatial observations\n npoints(pp) = ",n,"; length(tt) = ",length(tt),sep="")) if(is.null(tlim)) tlim <- range(tt) tlim <- checkranin(tlim,tt,"tlim") if(is.null(tres)){ tcw <- 1 kt <- tlim <- c(floor(tlim[1]),ceiling(tlim[2])) grt <- tlim[1]:tlim[2] tres <- length(grt) } else { tres <- checkit(tres,"'tres'") tcw <- diff(tlim)/tres grt <- tlim[1]+0.5*tcw+(0:(tres-1))*tcw kt <- c(tlim[1]+0.5*tcw,tlim[2]-0.5*tcw) } if(!is.na(parallelise)){ if(!is.numeric(parallelise)) stop("'parallelise' must be numeric") if(is.null(parallelise)) parallelise <- NA parallelise <- round(parallelise[1]) } evalxy <- as.matrix(expand.grid(WM$xcol,WM$yrow)) notin <- !inside.owin(x=evalxy[,1],y=evalxy[,2],w=W) evalxy.in <- evalxy[!notin,] if(is.null(start)) start <- c(OS(pp),bw.SJ(tt)) if(any(start<0)) stop("invalid starting values in 'start'") result <- optim(start,LSCV.density.spattemp.single, pp=pp,tt=tt,tlim=tlim,sres=sres, tres=tres,grx=grx,gry=gry,grt=grt, kt=kt,inside=inside,xyin=evalxy.in, xys=c(WM$xstep,WM$ystep), sedge=(sedge=="uniform"), tedge=(tedge=="uniform"), parallelise=parallelise, verbose=verbose)$par names(result) <- c("h","lambda") return(result) } # ## deprecated below (doesn't work, something wrong) ## # LSCV.spattemp.single <- function(hv,pp,tt,tlim,sedge,tedge,sres,tres,para,verbose){ # if(any(hv<0)) return(NA) # if(verbose) cat("h =",hv[1],"\b; lambda =",hv[2],"\n") # sttemp <- spattemp.density(pp=pp,h=hv[1],tt=tt,lambda=hv[2],tlim=tlim,sedge=sedge,tedge=tedge,sres=sres,tres=tres,verbose=FALSE) # lala <<- sttemp # # stslices <- spattemp.slice(sttemp,tt=tt,checkargs=FALSE) # # print(range(sttemp$tt)) # # qs <- qt <- rep(1,pp$n) # if(tedge=="uniform") qt <- approx(sttemp$tt,sttemp$qt,xout=tt,rule=2)$y # if(sedge=="uniform") qs <- safelookup(sttemp$qs,pp,warn=FALSE) # # # print("lala") # # sti <- rep(NA,pp$n) # if(is.na(para)){ # #for(i in 1:pp$n) sti[i] <- stsurfs$z[[i]][si$row[i],si$col[i]] - (dnorm(0,sd=hv[1])^2*dnorm(0,sd=hv[2]))/(sttemp$qs[si$row[i],si$col[i]] # for(i in 1:pp$n){ # # print(spattemp.slice(sttemp,tt=tt[i],checkargs=T)$z) # # print(safelookup(spattemp.slice(sttemp,tt=tt[i],checkargs=FALSE)$z[[1]],pp[i],warn=FALSE)) # # print((dnorm(0,sd=hv[1])^2*dnorm(0,sd=hv[2]))/(qs[i]*qt[i])) # # # # print(tt[i]) # sti[i] <- safelookup(spattemp.slice(sttemp,tt=tt[i],checkargs=FALSE)$z[[1]],pp[i],warn=FALSE) - (dnorm(0,sd=hv[1])^2*dnorm(0,sd=hv[2]))/(qs[i]*qt[i]) # } # } else { # registerDoParallel(cores=para) # sti <- foreach(i=1:pp$n,.packages="spatstat",.combine=c) %dopar% { # return(safelookup(spattemp.slice(sttemp,tt=tt[i],checkargs=FALSE)$z[[1]],pp[i],warn=FALSE) - (dnorm(0,sd=hv[1])^2*dnorm(0,sd=hv[2]))/(qs[i]*qt[i])) # } # } # # stsq <- lapply(sttemp$z,function(x) x^2) # stint <- sum(Reduce("+",stsq))*sttemp$z[[1]]$xstep*sttemp$z[[1]]$ystep*(sttemp$tt[2]-sttemp$tt[1]) # # result <- stint-2*mean(sti) # return(result) # } sparr/R/gethcats.R0000644000176200001440000000041314012076542013526 0ustar liggesusersgethcats <- function(h,breaks=NULL,step=0.05){ if(is.null(breaks)) breaks <- unique(as.numeric(quantile(h,seq(0,1,step)))) hc <- as.numeric(cut(h,breaks=breaks,include.lowest=TRUE,right=TRUE)) hcen <- breaks[-length(breaks)]+diff(breaks)/2 return(hcen[hc]) } sparr/R/taperoff.R0000644000176200001440000000127214012076542013536 0ustar liggesusers taperoff <- function(x, zeropoint=0, onepoint=1, type=c("smooth", "cosine")) { type <- match.arg(type) # cosine taper is standard in engineering (apparently) # # smooth taper is the pure mathematicians' favorite example # of a Smooth Partition of Unity y <- (x-zeropoint)/(onepoint - zeropoint) z <- switch(type, cosine = ifelse(y <= 0, 0, ifelse(y >= 1, 1, (1 - cos(pi * y))/2)), smooth = ifelse(y <= 0, 0, ifelse(y >= 1, 1, exp(-1/y)/(exp(-1/y) + exp(-1/(1-y)))))) return(z) } sparr/R/tol.asy.fix.R0000644000176200001440000000311614012076542014105 0ustar liggesuserstol.asy.fix <- function(f,g,pooled,verbose=FALSE){ if(verbose) cat("Initialising window...") if(!compatible(f$z,g$z,pooled$z)) stop("incompatible images 'f', 'g', 'pooled'... kernel estimates must be evaluated on identical domains") M <- Window(pooled$z) inside <- M$m pres <- nrow(inside) res2 <- 2*pres resseq <- 1:pres xcol.ker <- M$xstep*c(0:(pres-1),-rev(resseq)) yrow.ker <- M$ystep*c(0:(pres-1),-rev(resseq)) kerpixarea <- M$xstep*M$ystep len.pad <- res2^2 Mpad <- matrix(0, ncol=2*pres, nrow=2*pres) Mpad[1:pres, 1:pres] <- inside fM <- fft(Mpad) fg.h0 <- mean(c(f$h0,g$h0)) qb <- function(oo,hfac){ hfp <- hfac*fg.h0 densX.ker <- dnorm(xcol.ker,sd=hfp) densY.ker <- dnorm(yrow.ker,sd=hfp) Kern <- outer(densY.ker,densX.ker,"*")*kerpixarea con <- fft(fM*fft(Kern), inverse=TRUE)/len.pad qhz <- im(Mod(con[1:pres,1:pres]),xcol=oo$z$xcol,yrow=oo$z$yrow) qhz[qhz>1] <- 1 qhz[!inside] <- NA return(as.matrix(qhz)) } if(verbose) cat("Done.\nPerforming kernel*window convolution(s)...") qq <- pooled$q if(is.null(qq)||is.vector(qq)){ qq <- qb(pooled,1) } qq <- as.matrix(qq) qq2 <- (1/(4*pi))*qb(pooled,sqrt(0.5)) if(verbose) cat("Done.\n") RrzK <- qq2/qq^2 denominator <- sqrt(RrzK*(1/length(f$h)+1/length(g$h)))/(fg.h0*sqrt(as.matrix(pooled$z))) suppressWarnings(numerator <- as.matrix(log(f$z)-log(g$z))) zstandard <- numerator/denominator P <- pnorm(zstandard,lower.tail=FALSE) return(list(num=numerator,den=denominator,zst=zstandard,p=im(P,xcol=pooled$z$xcol,yrow=pooled$z$yrow),rz=RrzK)) }sparr/R/burk-data.R0000644000176200001440000000412114012076542013576 0ustar liggesusers#' Burkitt's lymphoma in Uganda #' #' Data of the spatiotemporal locations of Burkitt's lymphoma in the Western Nile district of Uganda #' from 1960 to 1975. #' #' @name burk #' @format \code{burk} is a named list with three members: #' \describe{ #' \item{\code{$cases}}{ #' An object of class \code{\link[spatstat.geom]{ppp}} giving the spatial locations (eastings/northings) #' of the 188 cases of Burkitt's lymphoma recorded in individuals of various ages (mostly children); the spatial study region as a polygonal \code{\link[spatstat.geom]{owin}}; as well as the time #' (in days since 1/1/1960) of each observation stored as the \code{marks} of the points. #' } #' #' \item{\code{$cases.age}}{ #' A numeric vector of length 188 giving the age of each individual in \code{$cases}. #' } #' #' \item{\code{$controls}}{ #' An object of class \code{\link[spatstat.geom]{ppp}} giving 500 \bold{artificially simulated} spatial-only #' observations to pose as a `control' data set representing the at-risk population. The data were #' generated from a smooth kernel estimate of the spatial margin of the cases. The similarity between the case point distribution #' and the true at-risk population dispersion can be seen in e.g. Figure 2 of Middleton and Greenland (1954). #' #' } #' #' } #' #' @docType data #' #' @keywords data #' #' @source The case data were extracted from the \code{\link[splancs]{burkitt}} object of the \code{splancs} R package; #' see \cr\cr #' Rowlingson B. and Diggle P.J. (2017), splancs: Spatial and Space-Time Point Pattern Analysis, R #' package version 2.01-40; \url{https://CRAN.R-project.org/package=splancs}. #' #' @references #' #' Bailey, T.C. and Gatrell, A.C. (1995), \emph{Interactive spatial data analysis}, Longman; Harlow. #' #' Middleton, J.F.M. and Greenland, D.J. (1954), Land and population in West Nile District, Uganda, \emph{The Geographical Journal}, \bold{120}, 446--455. #' #' @examples #' data(burk) #' summary(burk$cases) #' #' par(mfrow=c(1,3)) #' plot(burk$cases) #' plot(burk$controls) #' plot(density(marks(burk$cases)),xlim=range(marks(burk$cases))) NULL sparr/R/OS.spattemp.R0000644000176200001440000000137714012076542014113 0ustar liggesusers#' @rdname OS #' @export OS.spattemp <- function(pp, tt = NULL, nstar = "npoints", scaler = c("silverman", "IQR", "sd", "var")){ if(!inherits(pp,"ppp")) stop("data argument 'pp' must be of spatstat class \"ppp\"; see ?ppp") if(is.null(tt)) tt <- marks(pp) tt <- checktt(tt) if(length(tt)!=npoints(pp)) stop(paste("Length of temporal vector does not match number of spatial observations\n npoints(pp) = ",npoints(pp),"; length(tt) = ",length(tt),sep="")) scaler <- processscaler.st(scaler,pp,tt) nstar <- processnstar.st(nstar,pp) RK <- c(1/(4*pi),1/(2*sqrt(pi))) d <- 2:1 V <- (16*gamma((d+8)/2)*d*(d+2))/((d+8)^((d+6)/2)*pi^(d/2)) result <- scaler*(((RK*d)/(nstar*V))^(1/(d+4))) names(result) <- c("h","lambda") return(result) }sparr/R/summary.rrst.R0000644000176200001440000000060314012076542014413 0ustar liggesusers#' @rdname summarysparr #' @method summary rrst #' @export summary.rrst <- function(object, ...){ if(all(sapply(object$rrc,min,na.rm=TRUE)>=0)) cat("Spatiotemporal Relative Risk Surface\n\n") else cat("Spatiotemporal Log-Relative Risk Surface\n\n") cat("--Numerator (case) density--\n") summary(object$f) cat("\n--Denominator (control) density--\n") summary(object$g) }sparr/R/package.R0000644000176200001440000003654514012076542013336 0ustar liggesusers#' The sparr Package: Spatial and Spatiotemporal Relative Risk #' #' Provides functions to estimate fixed and adaptive kernel-smoothed spatial relative #' risk surfaces via the density-ratio method and perform subsequent inference. Fixed-bandwidth spatiotemporal density and relative risk estimation is also supported. #' #' @template version #' @details #' #' Kernel smoothing, and the #' flexibility afforded by this methodology, provides an attractive approach to #' estimating complex probability density functions. #' #' The \emph{spatial relative risk function}, constructed as a ratio of estimated case #' to control densities (Bithell, 1990; 1991; Kelsall and Diggle, 1995a,b), describes the variation in the #' `risk' of the disease, given the underlying at-risk population. This is a #' technique that has been applied successfully for mainly exploratory purposes #' in a number of different analyses (see for example Sabel et al., 2000; #' Prince et al., 2001; Wheeler, 2007). It has also grown in popularity in very #' different fields that pose similarly styled research questions, such as ecology #' (e.g. Campos and Fedigan, 2014); physiology (Davies et al., 2013); and archaeology #' (e.g. Bevan, 2012; Smith et al. 2015). #' #' This package provides functions for spatial (i.e. bivariate/planar/2D) kernel density estimation #' (KDE), implementing both fixed and `variable' or `adaptive' (Abramson, 1982) #' smoothing parameter options. A selection of bandwidth calculators for bivariate KDE and the #' relative risk function are provided, including one based on the maximal #' smoothing principle (Terrell, 1990), and others involving a leave-one-out #' cross-validation (see below). In addition, the ability to #' construct both Monte-Carlo and asymptotic \emph{p}-value surfaces (`tolerance' #' contours of which signal statistically significant sub-regions of extremity #' in a risk surface - Hazelton and Davies, 2009; Davies and Hazelton, 2010) as #' well as some visualisation tools are provided. #' #' Spatiotemporal estimation is also supported, largely following developments #' in Fernando and Hazelton (2014). This includes their fixed-bandwith kernel estimator #' of spatiotemporal densities, relative risk, and asymptotic tolerance contours. #' #' Key content of \code{sparr} can be broken up as follows:\cr #' #' \bold{DATASETS/DATA GENERATION} #' #' \code{\link{pbc}} a case/control planar point pattern (\code{\link[spatstat.geom:ppp]{ppp.object}}) concerning liver disease in northern #' England. #' #' \code{\link{fmd}} an anonymised (jittered) case/control spatiotemporal point pattern of the 2001 outbreak of veterinary foot-and-mouth disease in Cumbria (courtesy of the Animal and Plant Health Agency, UK). #' #' \code{\link{burk}} a spatiotemporal point pattern of Burkitt's lymphoma in Uganda; artificially simulated control data are also provided for experimentation. #' #' Also available are a number of relevant additional spatial datasets built-in to the #' \code{\link[spatstat]{spatstat}} package (Baddeley and Turner, 2005; Baddeley et al., 2015) through \code{spatstat.data}, such as #' \code{\link[spatstat.data]{chorley}}, which concerns the distribution of #' laryngeal cancer in an area of Lancashire, UK. #' #' \code{\link{rimpoly}} a wrapper function of \code{\link[spatstat.core]{rpoint}} to allow generated #' spatial point patterns based on a pixel \code{\link[spatstat.geom]{im}}age to be returned with a #' polygonal \code{\link[spatstat.geom]{owin}}.\cr #' #' #' \bold{SPATIAL} #' #' \emph{Bandwidth calculators} #' #' \code{\link{OS}} estimation of an isotropic #' smoothing parameter for fixed-bandwidth bivariate KDE, based on the #' oversmoothing principle introduced by Terrell (1990). #' #' \code{\link{NS}} #' estimation of an isotropic smoothing parameter for fixed-bandwidth bivariate #' KDE, based on the asymptotically optimal value for a normal density #' (bivariate normal scale rule - see e.g. Wand and Jones, 1995). #' #' \code{\link{LSCV.density}} a least-squares cross-validated (LSCV) estimate #' of an isotropic fixed bandwidth for bivariate, edge-corrected KDE (see e.g. Bowman and #' Azzalini, 1997). #' #' \code{\link{LIK.density}} a likelihood cross-validated (LIK) estimate #' of an isotropic fixed bandwidth for bivariate, edge-corrected KDE (see e.g. Silverman, 1986). #' #' \code{\link{SLIK.adapt}} an experimental likelihood cross-validation function #' for simultaneous global/pilot bandwidth selection for adaptive density estimates. #' #' \code{\link{BOOT.density}} a bootstrap approach to optimisation #' of an isotropic fixed bandwidth for bivariate, edge-corrected KDE (see e.g. Taylor, 1989). #' #' \code{\link{LSCV.risk}} Estimation of a jointly optimal, #' common isotropic case-control fixed bandwidth for the kernel-smoothed risk #' function based on the mean integrated squared error (MISE), a weighted MISE, #' or the asymptotic MISE (see respectively Kelsall and Diggle, 1995a; Hazelton, 2008; #' Davies, 2013). #' #' \emph{Density and relative risk estimation} #' #' \code{\link{bivariate.density}} kernel density #' estimate of bivariate data; fixed or adaptive smoothing. #' #' \code{\link{multiscale.density}} multi-scale adaptive kernel density #' estimates for multiple global bandwidths as per Davies and Baddeley #' (2018). #' #' \code{\link{multiscale.slice}} a single adaptive kernel estimate #' based on taking a slice from a multi-scale estimate. #' #' \code{\link{risk}} estimation of a (log) spatial relative risk function, either from data or #' pre-existing bivariate density estimates; fixed (Kelsall and Diggle, 1995a) or both asymmetric (Davies and Hazelton, 2010) and symmetric (Davies et al., 2016) adaptive estimates are possible. #' #' \code{\link{tolerance}} #' calculation of asymptotic or Monte-Carlo \emph{p}-value surfaces. #' #' \emph{Visualisation} #' #' \code{S3} methods of the \code{plot} function; see #' \code{\link{plot.bivden}} for visualising a single bivariate density #' estimate from \code{\link{bivariate.density}}, \code{\link{plot.rrs}} for #' visualisation of a spatial relative risk function from #' \code{\link{risk}}, or \code{\link{plot.msden}} for viewing animations of #' multi-scale density estimates from \code{\link{multiscale.density}}. #' #' \code{\link{tol.contour}} provides more flexibility for plotting and #' superimposing tolerance contours upon an existing plot of spatial relative risk (i.e. given output from #' \code{\link{tolerance}}). #' #' \emph{Printing and summarising} #' #' \code{S3} methods (\code{\link{print.bivden}}, \code{\link{print.rrs}}, #' \code{\link{print.msden}}, \code{\link{summary.bivden}}, #' \code{\link{summary.rrs}}, and \code{\link{summary.msden}}) are available for #' the bivariate density, spatial relative risk, and multi-scale adaptive density objects. #' #' #' \bold{SPATIOTEMPORAL} #' #' \emph{Bandwidth calculators} #' #' \code{\link{OS.spattemp}} estimation of an isotropic #' smoothing parameter for the spatial margin and another for the temporal margin #' for spatiotemporal densities, based on the 2D and 1D versions, respectively, of the #' oversmoothing principle introduced by Terrell (1990). #' #' \code{\link{NS.spattemp}} as above, based on the 2D and 1D versions of the #' normal scale rule (Silverman, 1986). #' #' \code{\link{LSCV.spattemp}} least-squares cross-validated (LSCV) estimates #' of scalar spatial and temporal bandwidths for edge-corrected spatiotemporal KDE. #' #' \code{\link{LIK.spattemp}} as above, based on likelihood cross-validation. #' #' \code{\link{BOOT.spattemp}} bootstrap bandwidth selection for the spatial and temporal margins; #' for spatiotemporal, edge-corrected KDE (Taylor, 1989). #' #' #' \emph{Density and relative risk estimation} #' #' \code{\link{spattemp.density}} fixed-bandwidth kernel density estimate of spatiotemporal data. #' #' \code{\link{spattemp.risk}} fixed-bandwidth kernel density estimate of spatiotemporal relative risk, either with a time-static or time-varying control density (Fernando and Hazelton, 2014). #' #' \code{\link{spattemp.slice}} extraction function of the spatial density/relative risk at prespecified time(s). #' #' #' #' \emph{Visualisation} #' #' \code{S3} methods of the \code{plot} function; see #' \code{\link{plot.stden}} for various options (including animation) for visualisation of a spatiotemporal density, #' and \code{\link{plot.rrst}} for viewing spatiotemporal relative risk surfaces (including animation and tolerance contour superimposition). #' #' #' \emph{Printing and summarising objects} #' #' \code{S3} methods (\code{\link{print.stden}}, \code{\link{print.rrst}}, \code{\link{summary.stden}}, and \code{\link{summary.rrst}}) are available for #' the spatiotemporal density and spatiotemporal relative risk objects respectively. #' #' #' #' #' #' @name sparr-package #' @aliases sparr-package sparr #' @docType package #' @section Dependencies: The \code{sparr} package depends upon #' \code{\link[spatstat]{spatstat}}. In particular, the user should familiarise #' themselves with \code{\link[spatstat.geom]{ppp}} objects and #' \code{\link[spatstat.geom]{im}} objects, which are used throughout. For spatiotemporal density estimation, \code{sparr} is assisted by importing from the \code{misc3d} package, and for the #' experimental capabilities involving parallel processing, \code{sparr} also #' currently imports \code{\link[doParallel]{doParallel}}, #' \code{\link[parallel]{parallel}}, and \code{\link[foreach]{foreach}}. #' #' @author T.M. Davies\cr \emph{Dept. of Mathematics & Statistics, University of #' Otago, Dunedin, New Zealand.}\cr #' J.C. Marshall\cr #' \emph{Institute of Fundamantal Sciences, Massey University, Palmerston North, New Zealand.}\cr #' #' Maintainer: T.M.D. \email{tdavies@@maths.otago.ac.nz} #' #' @section Citation: #' To cite use of current versions of \code{sparr} in publications or research projects please use:\cr #' #' Davies, T.M., Marshall, J.C. and Hazelton, M.L. (2018) Tutorial on kernel estimation of continuous spatial #' and spatiotemporal relative risk, \emph{Statistics in Medicine}, \bold{37}(7), 1191-1221. #' #' Old versions of \code{sparr} (<= 2.1-09) can be referenced by Davies et al. (2011) (see reference list). #' #' #' @references #' Abramson, I. (1982), On bandwidth variation in kernel estimates #' --- a square root law, \emph{Annals of Statistics}, \bold{10}(4), #' 1217-1223. #' #' Baddeley, A. and Turner, R. (2005), #' spatstat: an R package for analyzing spatial point patterns, \emph{Journal #' of Statistical Software}, \bold{12}(6), 1-42. #' #' Baddeley, A., Rubak, E. and Turner, R. (2015) \emph{Spatial Point Patterns: Methodology and Applications with R}, Chapman and Hall/CRC Press, UK. #' #' Bevan A. (2012), Spatial methods for analysing large-scale artefact inventories. \emph{Antiquity}, \bold{86}, 492-506. #' #' Bithell, J.F. (1990), An #' application of density estimation to geographical epidemiology, #' \emph{Statistics in Medicine}, \bold{9}, 691-701. #' #' Bithell, J.F. (1991), #' Estimation of relative risk function, \emph{Statistics in Medicine}, #' \bold{10}, 1745-1751. #' #' Bowman, A.W. and Azzalini, A. (1997), \emph{Applied #' Smoothing Techniques for Data Analysis: The Kernel Approach with S-Plus #' Illustrations.} Oxford University Press Inc., New York. ISBN #' 0-19-852396-3. #' #' Campos, F.A. and Fedigan, L.M. (2014) Spatial ecology of perceived predation risk and vigilance behavior in white-faced capuchins, \emph{Behavioral Ecology}, \bold{25}, 477-486. #' #' Davies, T.M. (2013), Jointly optimal bandwidth selection #' for the planar kernel-smoothed density-ratio, \emph{Spatial and #' Spatio-temporal Epidemiology}, \bold{5}, 51-65. #' #' Davies, T.M. and Baddeley A. (2018), Fast computation of spatially adaptive kernel estimates, #' \emph{Statistics and Computing}, \bold{28}(4), 937-956. #' #' Davies, T.M., Cornwall, J. and Sheard, P.W. (2013) Modelling dichotomously marked muscle fibre configurations, \emph{Statistics in Medicine}, \bold{32}, 4240-4258. #' #' Davies, T.M. and Hazelton, M.L. (2010), Adaptive kernel #' estimation of spatial relative risk, \emph{Statistics in Medicine}, #' \bold{29}(23) 2423-2437. #' #' Davies, T.M., Hazelton, M.L. and Marshall, J.C. #' (2011), \code{sparr}: Analyzing spatial relative risk using fixed and #' adaptive kernel density estimation in \code{R}, \emph{Journal of Statistical #' Software} \bold{39}(1), 1-14. #' #' Davies, T.M., Jones, K. and Hazelton, M.L. #' (2016), Symmetric adaptive smoothing regimens for estimation of the spatial #' relative risk function, \emph{Computational Statistics & Data Analysis}, #' \bold{101}, 12-28. #' #' Fernando, W.T.P.S. and Hazelton, M.L. (2014), Generalizing the spatial relative risk function, \emph{Spatial and Spatio-temporal Epidemiology}, \bold{8}, 1-10. #' #' Hazelton, M.L. (2008), Letter to the editor: Kernel #' estimation of risk surfaces without the need for edge correction, #' \emph{Statistics in Medicine}, \bold{27}, 2269-2272. #' #' Hazelton, M.L. and #' Davies, T.M. (2009), Inference based on kernel estimates of the relative #' risk function in geographical epidemiology, \emph{Biometrical Journal}, #' \bold{51}(1), 98-109. #' #' Kelsall, J.E. and Diggle, P.J. (1995a), Kernel #' estimation of relative risk, \emph{Bernoulli}, \bold{1}, 3-16. #' #' Kelsall, J.E. and Diggle, P.J. (1995b), Non-parametric estimation of spatial #' variation in relative risk, \emph{Statistics in Medicine}, \bold{14}, #' 2335-2342. #' #' Prince, M. I., Chetwynd, A., Diggle, P. J., Jarner, M., #' Metcalf, J. V. and James, O. F. W. (2001), The geographical distribution of #' primary biliary cirrhosis in a well-defined cohort, \emph{Hepatology} #' \bold{34}, 1083-1088. #' #' Sabel, C. E., Gatrell, A. C., Loytonen, M., #' Maasilta, P. and Jokelainen, M. (2000), Modelling exposure opportunitites: #' estimating relative risk for motor disease in Finland, \emph{Social Science #' & Medicine} \bold{50}, 1121-1137. #' #' Smith, B.A., Davies, T.M. and Higham, C.F.W. (2015) Spatial and social variables in the Bronze Age phase 4 cemetery of Ban Non Wat, Northeast Thailand, \emph{Journal of Archaeological Science: Reports}, \bold{4}, 362-370. #' #' Taylor, C.C. (1989) Bootstrap choice of the smoothing parameter in kernel density estimation, \emph{Biometrika}, \bold{76}, 705-712. #' #' Terrell, G.R. (1990), The maximal #' smoothing principle in density estimation, \emph{Journal of the American #' Statistical Association}, \bold{85}, 470-477. #' #' Venables, W. N. and Ripley, #' B. D. (2002). \emph{Modern Applied Statistics with S}, Fourth Edition, #' Springer, New York. #' #' Wand, M.P. and Jones, C.M., 1995. \emph{Kernel #' Smoothing}, Chapman & Hall, London. #' #' Wheeler, D. C. (2007), A comparison #' of spatial clustering and cluster detection techniques for childhood #' leukemia incidence in Ohio, 1996-2003, \emph{International Journal of Health #' Geographics}, \bold{6}(13). #' #' @keywords package NULL #' @importFrom utils setTxtProgressBar txtProgressBar packageDescription packageVersion #' @importFrom stats IQR density dnorm fft optimise pnorm quantile rnorm sd var bw.SJ spline optim #' @importFrom graphics axis box contour pairs par plot points title #' @importFrom grDevices dev.hold dev.flush #' @importFrom spatstat.utils prange tapplysum inside.range #' @importFrom parallel detectCores #' @importFrom doParallel registerDoParallel #' @importFrom foreach %dopar% foreach #' @importFrom misc3d kde3d #' @import spatstat.geom spatstat.core spatstat NULL sparr/R/checking.R0000644000176200001440000000651414012076542013507 0ustar liggesuserscheckdb <- function(db){ if(!is.vector(db)) stop("'davies.baddeley' argument must be a vector") if(length(db)!=3&&length(db)!=1) stop("invalid 'davies.baddeley' argument length") if(!is.numeric(db)) stop("'davies.baddeley' argument must be numeric") db1 <- db[1]<=0||db[1]>=1 db23 <- FALSE if(length(db)==3){ db23 <- db[2]<=0||db[2]>=1||db[3]<=1 dbres <- c(db[1:2],ceiling(db[3])) } else { dbres <- c(db,db,NA) } if(db1||db23) stop("one or more invalid 'davies.baddeley' values") return(dbres) } checkedge <- function(edge,v=1){ if(!is.vector(edge)) stop("'edge' argument must be a vector") if(!is.character(edge)) stop("'edge' argument must be a character string") etype <- edge[1] if(v==1){ if(!any(etype==c("uniform","diggle","none"))) stop("invalid 'edge' argument") } else { if(!any(etype==c("uniform","none"))) stop("invalid 'edge' argument") } return(etype) } checkit <- function(h,str){ if(!is.numeric(h)) stop(paste(str,"must be numeric")) if(length(h)>1){ warning(paste(str,"must have length 1; using first element only")) h <- h[1] } if(h<=0) stop(paste(str,"must be positive")) return(h) } checkran <- function(ran,nm){ if(!is.vector(ran)) stop(paste("'",nm,"' must be a vector",sep="")) if(!is.numeric(ran)) stop(paste("'",nm,"' must be numeric",sep="")) if(length(ran)!=2) stop(paste("'",nm,"' must have length 2",sep="")) if(ran[2]<=ran[1]) stop(paste("'",nm,"[1]' must be < '",nm,"[2]'",sep="")) if(any(ran<=0)) stop(paste("'",nm,"' must be wholly positive",sep="")) return(ran) } checkranin <- function(ran,vals,nm1){ if(!is.vector(ran)) stop(paste("'",nm1,"' must be a vector",sep="")) if(!is.numeric(ran)) stop(paste("'",nm1,"' must be numeric",sep="")) if(length(ran)!=2) stop(paste("'",nm1,"' must have length 2",sep="")) if(ran[2]<=ran[1]) stop(paste("'",nm1,"[1]' must be < '",nm1,"[2]'",sep="")) if(any(valsran[2])) stop(paste("all time values must lie within interval '",nm1,"'",sep="")) return(ran) } checktsel <- function(tsel){ if(!is.vector(tsel)) stop(paste("'tselect' must be a vector",sep="")) if(!is.numeric(tsel)) stop(paste("'tselect' must be numeric",sep="")) if(length(tsel)==2){ if(tsel[2]=stob$tlim[1]) & sapply(tt,function(x) x<=stob$tlim[2]))) stop(paste("at least one requested time is outside available range of",prange(stob$tlim))) # was: # if(!inside.range(h0,aran)) stop(paste("requested 'h0' outside available range of",prange(aran))) } tlen <- length(tt) if(inherits(stob,"stden")){ avail <- names(stob$z) z <- stob$z zc <- stob$z.cond p <- pc <- NULL result <- list(z=list(),z.cond=list(),P=list(),P.cond=list()) } else { avail <- names(stob$rr) z <- stob$rr zc <- stob$rr.cond p <- stob$P pc <- stob$P.cond result <- list(rr=list(),rr.cond=list(),P=list(),P.cond=list()) } for(i in 1:tlen){ slc <- st.slice.single(tt[i],avail,z,zc,p,pc,warn=checkargs) result[[1]][[i]] <- slc$z result[[2]][[i]] <- slc$zc result[[3]][[i]] <- slc$p result[[4]][[i]] <- slc$pc } zeros <- sapply(result,length)==0 result[which(zeros)] <- NULL names(result[[1]]) <- names(result[[2]]) <- tt if(length(result)>2) names(result[[3]]) <- names(result[[4]]) <- tt return(result) } st.slice.single <- function(V,avail,z,zc,p,pc,warn=FALSE){ la <- length(avail) if(any(avail==as.character(V))){ index <- which(avail==as.character(V)) zres <- z[[index]] zcres <- zc[[index]] pres <- p[[index]] pcres <- pc[[index]] } else { marker <- as.numeric(avail)>V if(sum(marker)==la){ zres <- z[[1]] zcres <- zc[[1]] pres <- p[[1]] pcres <- pc[[1]] if(warn) warning("time point lower than available range") } else if(sum(marker)==0){ zres <- z[[la]] zcres <- zc[[la]] pres <- p[[la]] pcres <- pc[[la]] if(warn) warning("time point higher than available range") } else { marker <- which(marker)[1] mindex <- c(marker-1,marker) tint <- as.numeric(avail)[mindex] move <- (V-tint[1])/diff(tint) zdiff <- z[[mindex[2]]]-z[[mindex[1]]] zcdiff <- zc[[mindex[2]]]-zc[[mindex[1]]] zres <- z[[mindex[1]]]+move*zdiff zcres <- zc[[mindex[1]]]+move*zcdiff if(!is.null(p)){ pdiff <- p[[mindex[2]]]-p[[mindex[1]]] pcdiff <- pc[[mindex[2]]]-pc[[mindex[1]]] pres <- p[[mindex[1]]]+move*pdiff pcres <- pc[[mindex[1]]]+move*pcdiff } else { pres <- pcres <- NULL } } } return(list(z=zres,zc=zcres,p=pres,pc=pcres)) } sparr/R/spattemp.LOO.R0000644000176200001440000000245014012076542014214 0ustar liggesusersspattemp.LOO <- function(pp,tt,h,lambda,tlim,xyin,xys,sedge,tedge,parallelise){ W <- Window(pp) n <- npoints(pp) ppmat <- cbind(pp$x,pp$y) qs <- qt <- 1 if(is.na(parallelise)){ loo <- rep(NA,n) for(i in 1:n){ ppt.i <- pp[i] ppt.mi <- pp[-i] t.i <- tt[i] t.mi <- tt[-i] if(sedge){ pxy <- kernel2d(xyin[,1]-ppt.i$x, xyin[,2]-ppt.i$y, h) qs <- dintegral(pxy,xys[1],xys[2]) } if(tedge) qt <- pnorm(tlim[2],t.i,lambda) - pnorm(tlim[1],t.i,lambda) ut <- (t.i-t.mi)/lambda ivals <- kernel2d(ppt.i$x-ppt.mi$x,ppt.i$y-ppt.mi$y,h)*lambda^(-1)*exp(-0.5*ut^2)/sqrt(2*pi) loo[i] <- mean(ivals)/(qs*qt) } } else { registerDoParallel(cores=parallelise) loo <- foreach(i=1:n,.packages="spatstat",.combine=c) %dopar% { ppt.i <- pp[i] ppt.mi <- pp[-i] t.i <- tt[i] t.mi <- tt[-i] if(sedge){ pxy <- kernel2d(xyin[,1]-ppt.i$x, xyin[,2]-ppt.i$y, h) qs <- dintegral(pxy,xys[1],xys[2]) } if(tedge) qt <- pnorm(tlim[2],t.i,lambda) - pnorm(tlim[1],t.i,lambda) ut <- (t.i-t.mi)/lambda ivals <- kernel2d(ppt.i$x-ppt.mi$x,ppt.i$y-ppt.mi$y,h)*lambda^(-1)*exp(-0.5*ut^2)/sqrt(2*pi) return(mean(ivals)/(qs*qt)) } } return(loo) }sparr/R/fmd-data.R0000644000176200001440000000503314012076542013404 0ustar liggesusers#' Veterinary foot-and-mouth disease outbreak data #' #' Data of the spatial locations and time of farms infected by veterinary foot-and-mouth disease #' in the county of Cumbria, UK, over a course of nearly 250 days between February and August in 2001. #' There are 410 infected farms (the cases), and 1866 uninfected farms (the controls). The data #' have been jittered and randomly thinned by an unspecified amount to preserve anonymity. #' #' @name fmd #' @format \code{fmd} is a named list with two members: #' \describe{ #' \item{\code{$cases}}{ #' An object of class \code{\link[spatstat.geom]{ppp}} giving the spatial locations of the 410 infected #' farms within a polygonal study region representing the county of Cumbria. The \code{\link[spatstat.geom]{marks}} #' component of this object contain the integer day of infection (from beginning of study period). #' } #' #' \item{\code{$controls}}{ #' An object of class \code{\link[spatstat.geom]{ppp}} defined over the same spatial study region with the locations #' of the 1866 uninfected farms. #' } #' } #' #' @docType data #' #' @keywords data #' #' @section Acknowledgements: The Animal and Plant Health Agency (APHA), UK, provided permission to use this dataset. #' #' @references #' Fernando, W.T.P.S. and Hazelton, M.L. (2014), Generalizing the spatial relative risk function, #' \emph{Spatial and Spatio-temporal Epidemiology}, \bold{8}, 1-10. #' #' Keeling M, Woolhouse M, Shaw D, Matthews L, Chase-Topping M, Haydon D, et al. (2001), #' Dynamics of the 2001 UK foot and mouth epidemic: stochastic dispersal in a heterogeneous landscape, #' \emph{Science}, \bold{294}, 813-817. #' #' Lawson A, Zhou H. (2005), Spatial statistical modeling of disease outbreaks with particular #' reference to the UK foot and mouth disease (FMD) epidemic of 2001, #' \emph{Preventative Veterinary Medicine}, \bold{71}, 141-156. #' #' #' @examples #' #' data(fmd) #' summary(fmd$cases) #' summary(fmd$controls) #' #' par(mfrow=c(1,2)) #' plot(fmd$cases) #' plot(fmd$controls) #' NULL #418 farms out of a total of 2813 in the region became infected over the course of the study period, #with time of infection recorded to the nearest day since the start of the study. # that remained uninfected over the course of the study period. # @source \bold{UNSURE IF THESE DATA CAN BE RELEASED WITH sparr} #\item{\code{$cases.size}}{ #A numeric vector of length 410 giving the size of each infected farm in \code{$cases} as the total animal population. #} #\item{\code{$controls.size}}{ #As above, for the uninfected farms. #} sparr/R/identical_windows.R0000644000176200001440000000045214012076542015435 0ustar liggesusersidentical_windows <- function(w1,w2){ w1x <- vertices(w1)$x w1y <- vertices(w1)$y w2x <- vertices(w2)$x w2y <- vertices(w2)$y if(!all(c(length(w1x)==length(w2x),length(w1y)==length(w2y)))) return(FALSE) if(any(c(sum(w1x!=w2x),sum(w1y!=w2y))>0)) return(FALSE) else return(TRUE) }sparr/R/plot.rrs.R0000644000176200001440000000206214012076542013511 0ustar liggesusers#' @rdname plotsparr #' @method plot rrs #' @export plot.rrs <- function(x, auto.axes = TRUE, tol.show = TRUE, tol.type = c("upper", "lower", "two.sided"), tol.args = list(levels = 0.05, lty = 1, drawlabels = TRUE), ...){ ellip <- list(...) if(is.null(ellip)) ellip <- list() if(is.null(ellip$main)) ellip$main <- "" if(is.null(ellip$box)) ellip$box <- FALSE if(is.null(ellip$ribargs)) ellip$ribargs <- list(box=TRUE) ellip$x <- x$rr do.call("plot.im",args=ellip) if(tol.show&&!is.null(x$P)){ ps <- t(as.matrix(x$P)) tellip <- tol.args tellip$add <- TRUE tellip$x <- x$P$xcol tellip$y <- x$P$yrow tol.type <- tol.type[1] if(tol.type=="lower"){ ps <- 1-ps } else if(tol.type=="two.sided"){ ps <- 2*pmin(ps,1-ps) } else if(tol.type!="upper"){ stop("invalid 'tol.type'") } tellip$z <- ps suppressWarnings(do.call("contour",tellip)) } plot(as.polygonal(Window(x$f$pp)),add=TRUE) if(auto.axes){ axis(1) axis(2) box(bty="l") title(xlab=ellip$xlab,ylab=ellip$ylab) } }sparr/R/plot.bivden.R0000644000176200001440000001622214012076542014155 0ustar liggesusers#' Plotting sparr objects #' #' \code{plot} methods for classes \code{\link{bivden}}, \code{\link{stden}}, #' \code{\link{rrs}}, \code{\link{rrst}} and \code{\link{msden}}. #' #' #' In all instances, visualisation is deferred to #' \code{\link[spatstat.geom]{plot.im}}, for which there are a variety of #' customisations available the user can access via \code{...}. The one #' exception is when plotting observation-specific \code{"diggle"} edge #' correction factors---in this instance, a plot of the spatial observations is #' returned with size proportional to the influence of each correction weight. #' #' When plotting a \code{\link{rrs}} object, a pre-computed \emph{p}-value #' surface (see argument \code{tolerate} in \code{\link{risk}}) will #' automatically be superimposed at a significance level of 0.05. Greater #' flexibility in visualisation is gained by using \code{\link{tolerance}} in #' conjunction with \code{\link{contour}}. #' #' An \code{\link{msden}}, \code{\link{stden}}, or \code{\link{rrst}} object is plotted as an animation, one pixel image #' after another, separated by \code{sleep} seconds. If instead you intend the #' individual images to be plotted in an array of images, you should first set #' up your plot device layout, and ensure \code{override.par = FALSE} so that #' the function does not reset these device parameters itself. In such an #' instance, one might also want to set \code{sleep = 0}. #' #' @aliases plot.bivden plot.rrs plot.msden plot.stden plot.rrst #' #' @rdname plotsparr #' #' #' @param x An object of class \code{\link{bivden}}, \code{\link{stden}}, #' \code{\link{rrs}}, \code{\link{rrst}}, or \code{\link{msden}}. #' @param what A character string to select plotting of result (\code{"z"}; #' default); edge-correction surface (\code{"edge"}); or variable bandwidth #' surface (\code{"bw"}). #' @param tselect Either a single numeric value giving the time at which to return the plot, or a vector of length 2 giving an interval of times over which to plot. This argument must respect the stored temporal bound in \code{x$tlim}, else an error will be thrown. By default, the full set of images (i.e. over the entire available time span) is plotted. #' @param type A character string to select plotting of joint/unconditional spatiotemporal #' estimate (default) or conditional spatial density given time. #' @param fix.range Logical value indicating whether use the same color scale limits for each plot in the sequence. Ignored if the user supplies a pre-defined \code{\link[spatstat.geom]{colourmap}} to the \code{col} argument, which is matched to \code{...} above and passed to \code{\link[spatstat.geom]{plot.im}}. See `Examples'. #' @param tol.show Logical value indicating whether to show pre-computed tolerance contours on the plot(s). The object \code{x} must already have the relevant \emph{p}-value surface(s) stored in order for this argument to have any effect. #' @param tol.type A character string used to control the type of tolerance contour displayed; a test for elevated risk (\code{"upper"}), decreased risk (\code{"lower"}), or a two-tailed test (\code{two.sided}). #' @param tol.args A named list of valid arguments to be passed directly to \code{\link[graphics]{contour}} to control the appearance of plotted contours. Commonly used items are \code{levels}, \code{lty}, \code{lwd} and \code{drawlabels}. #' @param add.pts Logical value indicating whether to add the observations to #' the image plot using default \code{\link{points}}. #' @param auto.axes Logical value indicating whether to display the plot with #' automatically added x-y axes and an `L' box in default styles. #' @param sleep Single positive numeric value giving the amount of time (in #' seconds) to \code{\link[base]{Sys.sleep}} before drawing the next image in #' the animation. #' @param expscale Logical value indicating whether to force a raw-risk scale. Useful for users #' wishing to plot a log-relative risk surface, but to have the raw-risk displayed on the colour ribbon. #' @param override.par Logical value indicating whether to override the #' existing graphics device parameters prior to plotting, resetting #' \code{mfrow} and \code{mar}. See `Details' for when you might want to #' disable this. #' @param ... Additional graphical parameters to be passed to #' \code{\link[spatstat.geom]{plot.im}}, or in one instance, to #' \code{\link[spatstat.geom]{plot.ppp}} (see `Details'). #' #' @return Plots to the relevant graphics device. #' #' @author T.M. Davies #' #' @examples #' #' \donttest{ #' data(pbc) #' data(fmd) #' data(burk) #' #' # 'bivden' object #' pbcden <- bivariate.density(split(pbc)$case,h0=3,hp=2,adapt=TRUE,davies.baddeley=0.05,verbose=FALSE) #' plot(pbcden) #' plot(pbcden,what="bw",main="PBC cases\n variable bandwidth surface",xlab="Easting",ylab="Northing") #' #' # 'stden' object #' burkden <- spattemp.density(burk$cases,tres=128) # observation times are stored in marks(burk$cases) #' plot(burkden,fix.range=TRUE,sleep=0.1) # animation #' plot(burkden,tselect=c(1000,3000),type="conditional") # spatial densities conditional on each time #' #' # 'rrs' object #' pbcrr <- risk(pbc,h0=4,hp=3,adapt=TRUE,tolerate=TRUE,davies.baddeley=0.025,edge="diggle") #' plot(pbcrr) # default #' plot(pbcrr,tol.args=list(levels=c(0.05,0.01),lty=2:1,col="seagreen4"),auto.axes=FALSE) #' #' # 'rrst' object #' f <- spattemp.density(fmd$cases,h=6,lambda=8) #' g <- bivariate.density(fmd$controls,h0=6) #' fmdrr <- spattemp.risk(f,g,tolerate=TRUE) #' plot(fmdrr,sleep=0.1,fix.range=TRUE) #' plot(fmdrr,type="conditional",sleep=0.1,tol.type="two.sided", #' tol.args=list(levels=0.05,drawlabels=FALSE)) #' #' # 'msden' object #' pbcmult <- multiscale.density(split(pbc)$case,h0=4,h0fac=c(0.25,2.5)) #' plot(pbcmult) # densities #' plot(pbcmult,what="edge") # edge correction surfaces #' plot(pbcmult,what="bw") # bandwidth surfaces #' } #' @export plot.bivden <- function(x, what = c("z", "edge", "bw"), add.pts = FALSE, auto.axes = TRUE, override.par = TRUE, ...){ ellip <- list(...) if(is.null(ellip)) ellip <- list() if(is.null(ellip$main)) ellip$main <- "" if(is.null(ellip$box)) ellip$box <- FALSE if(is.null(ellip$ribargs)) ellip$ribargs <- list(box=TRUE) wh <- what[1] if(wh=="z"){ ellip$x <- x$z do.call("plot.im",args=ellip) if(add.pts) points(x$pp) } else if(wh=="edge"){ ef <- x$q if(is.null(ef)){ ef <- x$z ef$v[!is.na(ef$v)] <- 1 do.call("plot.im",args=ellip) if(add.pts) points(x$pp) warning("object has no edge correction") } else if(!is.im(ef)){ warning("object has \"diggle\" correction factors as summarised above") print(summary(ef)) } else { ellip$x <- ef do.call("plot.im",args=ellip) if(add.pts) points(x$pp) } } else if(wh=="bw"){ ellip$x <- x$him if(is.null(ellip$x)){ ellip$x <- x$z ellip$x$v[!is.na(ellip$x$v)] <- x$h0 warning("object has fixed bandwidth") } do.call("plot.im",args=ellip) if(add.pts) points(x$pp) } else { stop("invalid 'what'") } plot(as.polygonal(Window(x$pp)),add=TRUE) if(auto.axes){ axis(1) axis(2) box(bty="l") title(xlab=ellip$xlab,ylab=ellip$ylab) } } sparr/MD50000644000176200001440000001262114024105406011707 0ustar liggesuserseb8ab2a139064fa7e6597d3308b4928c *DESCRIPTION 84495c3e9bc758131b2d23fed6d10f08 *NAMESPACE 1033ff9631dfeaa674a889a110c658ae *R/BAM.single.R 19093aaf3c5db83f91072f1c67ead8b7 *R/BAMprep.R eb99b45230bf32d65677e9193236723b *R/BOOT.density.R b0cfa963e8c29f9fe7eae386634b2c8e *R/BOOT.spattemp.R e3bc7c634b3494a1ef6f13228208f7af *R/K0.R 36d177fe3007ab524fb8762ff729fe64 *R/LIK.density.R 0866fc80378ca3e679fb6272a956d560 *R/LIK.density.spatial.single.R e150eeff0f7b7fc77a0e42185080075e *R/LIK.density.spattemp.single.R 0bdffd56e43729c4a88345ffcb9e1cbf *R/LIK.spattemp.R 862d2d15a1beec18144b7d11568d81c1 *R/LSCV.density.R d9730bac13a1f68717ce07225103c381 *R/LSCV.density.spatial.single.R 731d551d8d481fbffa3b3a1cce0db709 *R/LSCV.density.spattemp.single.R 36b4a7ce9336d61a7f77dece690ea38f *R/LSCV.risk.R 9a265baa300f27c4172614cfef9bbe26 *R/LSCV.risk.single.R 298fee1a75de14bc5c59c8ff32a2dc00 *R/LSCV.spattemp.R 167b147f98fa9853fb0949c1c607e3ad *R/NS.R 80d5f2dd5c18cdc13b14001fbd8f877f *R/NS.spattemp.R f6c4c136430913e3011bf3339d6d1c00 *R/OS.R 51f480a6bf576bbb371bcdd464a3370a *R/OS.spattemp.R 2fadfb537dea9c8816927334f5ce7b0c *R/SLIK.adapt.R 96f7191868f1b60a8838ef39cda78e57 *R/adens.R 864c1ed1ba042f9aa98e7bb4c5b644e6 *R/available.h0.R f2f9ea02dd257923d8b45683b091f171 *R/bivariate.density.R 9415e4b3a1cb60712f6faf7dd2c50dc7 *R/bivden.LOO.R a2b1d68a025d973bbf0b9ec28fc3f019 *R/boot.opt.spatial.adapt.R 96c26a9515336cbf8b231f537889620b *R/boot.opt.spatial.fix.R 020efd2989d20c2226e90ea0741940a2 *R/burk-data.R ca87d7c58e5f7db5a7cf0f388e367dbe *R/checking.R ed56557c34dea276f7d8784679a3ee49 *R/edgeh.R 30bcd9da2c6f743b8affc4e07e1f0194 *R/fft.R 56a877fcfea23bce36ebd451f69e6b1c *R/fmd-data.R 7f3e259328854381094e723644c873e6 *R/gethcats.R aa736f2d3f186b1ea1a0decbb5850c63 *R/identical_windows.R b7d8ce59e38554dd864e9a971aa2909c *R/kernel2d.R 34a8e2c9da30ac1a7ac6ae0f8defa8e4 *R/ms.loo.R 244c72368aab002566c99ab5689b3ca9 *R/multiscale.density.R e0f6193f9d3c4343dd5a5978c8890137 *R/multiscale.slice.R d19cad5be30b7af92c2728f3b883d6c1 *R/package.R b13f093e77abd04fbd47c50a85ced8d0 *R/pbc-data.R b6a7e67dde7fe2fa869918a3e4309182 *R/plot.bivden.R b0ec59274fdb004e034843d4cf6f961c *R/plot.msden.R 198e7acfc4b901b8999a070c28a1dc28 *R/plot.rrs.R dd6eb478d233578876557ac578af17a4 *R/plot.rrst.R e9a7e3872e227ed23bd21fde9168f126 *R/plot.stden.R ce42113cd51e95bae438f68b7f620c6f *R/posifybivden.R b5458310e52a70f076d1933950527f08 *R/print.bivden.R 727904449100431261b7b2c7c0da6d80 *R/print.msden.R 889261c519b0a4e4d9e318c0729ab343 *R/print.rrs.R a13344f3a732e0b839b2654a149a9518 *R/print.rrst.R 445c57f04ff5aa5056fca5af9b007aeb *R/print.stden.R 232f3d6a33b477c41238941eda6f0513 *R/processgamma.R 46b5f8e5011ce7724138d597219aca5c *R/processnstar.R 890e3f8f04bc2a14bf64dd60954e1ef4 *R/processscaler.R 1eea8f6893f2944b7d9292ef86aa6228 *R/rimpoly.R 32ae4a9c69b4b8e76ddacfa67021d609 *R/risk.R 887e1267ec4a072d6f68336f6cafe573 *R/spattemp.LOO.R c8f42724edfebbe367e5e054435498a3 *R/spattemp.density.R 0b1252671688f7ef20a094c8f90bfcd5 *R/spattemp.risk.R 72c3c69366c94a30c5c6b7e93e3b6cae *R/spattemp.slice.R 9cd20bac9a89f39df60e4c19c06d66f5 *R/summary.bivden.R 471de386ef827645f4a0b5f460c52ac5 *R/summary.msden.R 0374c39909d3a3cbf39168029422214a *R/summary.rrs.R bf0dc9cd2e51d32bd9159e0bdc07b926 *R/summary.rrst.R bfcaa2154f899cbd65854a736081f39e *R/summary.stden.R 45bc9a3c35318da5502622b826074959 *R/taperoff.R d06e28430710419079089399f179eb53 *R/tol.asy.ada.R 8769d0ed379d4ede019166f54a9c246c *R/tol.asy.fix.R 372bd3ccc02e69b35476c91c35697c07 *R/tol.asy.st.R 7fbf9e3d78c664adc8e53e056158bf65 *R/tol.contour.R 29c2cbaacb65a7f090052d6402be4905 *R/tol.mc.ada.R d001b1ddeeb36ffd567eb1169bacde19 *R/tol.mc.fix.R 329b07a8b59cf64613d02789cd8eb605 *R/tolerance.R 8d448f562a5a7d6d43461290152ce9ea *R/zzz.R 57f565f5ccd90ec23af7ea085e8cc5a9 *README.md f4c28b3ab897ca381388b9170aa449d1 *data/burk.rda 3e4b24510c37079eda8ac87e783353d9 *data/fmd.rda df5a81c6800576cfba360fee9284237e *data/pbc.rda 3dee728cfba1038a7e02cbc7adfdbe80 *inst/CITATION 6059ccf476312e2634007dcff839705d *man/BOOT.density.Rd 5f93f8984cb3c7efb12c657702003efd *man/BOOT.spattemp.Rd 740ae1054530098d2a22aec45f627766 *man/CV.Rd b39b89686c462b0691c0b37be1e0856d *man/LSCV.risk.Rd 55ba67fb5f80456e9887431b70c23681 *man/LSCV.spattemp.Rd 34c1144b90ed2046ae44154b4e7b0362 *man/NS.Rd d986b3d025be0f75a5c35738cd19ad4e *man/OS.Rd 425be56fb29e052682ef0578954f1fb1 *man/SLIK.adapt.Rd 4641aa5a75e414b0d0621bebea5988b5 *man/available.h0.Rd ef6785c6cc936412f27044a05f6d9ebe *man/bivariate.density.Rd aba43697b6a3016dfdf0271e36856240 *man/burk.Rd 9fff60e02b5fe322cf7b75c6c8f40a24 *man/fft2d.Rd 24299c817bb08983fc0697e292dcea0b *man/figures/README-pbc-example-1.png 08671eb68f582948e6e46df3ed5e75ac *man/fmd.Rd bb7be8a5a7113a7dc65b46784bd3da16 *man/multiscale.density.Rd c245b55e1425bd6c5f2f6d8f6222bae5 *man/multiscale.slice.Rd 18299e89a04e5e619453056117bb941e *man/pbc.Rd 41adcc5563ca95f71acb69f542153cf1 *man/plotsparr.Rd bf934161dfd3dd269fec0a0ec589151a *man/printsparr.Rd 778a2942f5fa5c9259a07e19bc316843 *man/rimpoly.Rd 9adb3fcecc66ecafe2f84025dae13c74 *man/risk.Rd d204cc1fe622775e3b925ea23e6adc54 *man/sparr-internal.Rd 41c2e356e178daa471b9b8cde8f5bfb9 *man/sparr-package.Rd c2af10758687c94d38f440fea7d66a07 *man/spattemp.density.Rd e55b6a5960cc08839f315f7335ab79c7 *man/spattemp.risk.Rd 480f952c49bdb00d721dca4047832831 *man/spattemp.slice.Rd b11329b6d2fef22ab4111ae06f07a186 *man/summarysparr.Rd fdb65e43c437788ad4cd00214b560aae *man/tol.contour.Rd f4e597bb20273ba8c8c37c3638842912 *man/tolerance.Rd sparr/inst/0000755000176200001440000000000014012076651012360 5ustar liggesuserssparr/inst/CITATION0000755000176200001440000000127514012076542013524 0ustar liggesuserscitHeader("To cite sparr in publications use:") citEntry(entry = "article", title = "Tutorial on kernel estimation of continuous spatial and spatiotemporal relative risk", author = personList(as.person("T. M. Davies"), as.person("J. C. Marshall"), as.person("M. L. Hazelton")), journal = "Statistics in Medicine", year = "2018", volume = "37", number = "7", pages = "1191--1221", textVersion = paste("T. M. Davies, J. C. Marshall, M. L. Hazelton (2018).", "Tutorial on kernel estimation of continuous spatial and spatiotemporal relative risk;", "Statistics in Medicine, 37(7), 1191-1221.") )