dimRed/0000755000176200001440000000000013464507340011464 5ustar liggesusersdimRed/inst/0000755000176200001440000000000013464323463012443 5ustar liggesusersdimRed/inst/CITATION0000644000176200001440000000105313371631672013600 0ustar liggesusersbibentry(bibtype = "Article", author = c(person("Guido", "Kraemer"), person("Markus", "Reichstein"), person("Miguel", "D.", "Mahecha")), title = "{dimRed} and {coRanking}---Unifying Dimensionality Reduction in R", year = "2018", journal = "The R Journal", url = "https://journal.r-project.org/archive/2018/RJ-2018-039/index.html", pages = "342--358", volume = "10", number = "1", note = sprintf("coRanking version %s", meta$Version)) dimRed/inst/doc/0000755000176200001440000000000013464323463013210 5ustar liggesusersdimRed/inst/doc/dimensionality-reduction.pdf0000644000176200001440000234213413464323463020736 0ustar liggesusers%PDF-1.5 % 1 0 obj << /Type /ObjStm /Length 4989 /Filter /FlateDecode /N 100 /First 842 >> stream x\is8 |ۙJ̀$N5Uqsk>6ms#KD_ (=5S[2M t`"&LDL-LK il-KY*4#1? 7UbY Y@9bS K YXL(L 1I)`hhd*bJÿD3e` 4-NS ƴJY&"8[ 3m\'DP̈4JZ1IJ6"PJYfLF8Tbb&Z#U:J;F Z$፡k Kc͉ g! XK(Gh¾Wb (ZP@Yh=(+ @D @Y*@F(+}qah!o + hD h9!hB P `bL[B/>&0 G<P68@c0]gX( F#eZHXe>,B&@"ܷ@*6GqAW@q cR hW Si*LARˆX rQ(чT CG1Djd(+Wb̰;cdYLi \Hwq>c|nov6g? 9l\޸O{U ͳ(G×g^ϳ[^ܲ[=g~|*bf>]m"{:]-/35{q;[y~_DCeAba|rZ W9T/-Ή; 'KS\7[h*.LʲRת,]uYΘ2<9[^e ҲÏ5E6-7GU>ޙ}WFB^PY-@ /^ )8%pQVU~wmɾ@ߚjw|Q0bq%EQ (~5 e~@L +#qLRiAACizH'ASI=QtMqmOԦ&pT-Ug8;g|gpߣPʵ|v'¿,Au"Pqហ9 rr+4p Nyی/jrzyƿo;g?GcKL~0BʎIqKc?\A>@6 a{ X}|}3H3ԱڢYu,ҧ-ʉ@}r]+KQf4/kkBrO]Ec<T4SCcaDƛiIbÓgGGiMQ 3o[R֔NR~~ʼn>{ǵ=9z9O$@hZ\4V 7izG6ӥ9r,.pD7,@nSddV>R&>I+4rB@ِ6CG}\\ "fEL}]ru2DXWkqg~FߜVh$ˍ#Dk=*Vlcr|7;]Z?QOdX Ĥ=_?H,,B%3jd Ubl珐9N4kP&}"}YQ ]Zz}/L-,'@4Lg9f݃8ʏ|Q- exC5THq^݄7Tp=p X cёݧh nN,FUhݚy^̴hu oV"6J+PAB{_ ~iFC{Aɍp4px˝3~ZX)'f*V/`wZ)UaVBaddƥ\)ק Wj]{ /K,bv1VtbZ80h>۽sk%?[G|t]&%M,+0)7RPbsF (0)#wIO~ivC"Neshu.kcE-g;@ ld'uZueHd֣A+ fR 7 %TTۇK+ZVJWSNxŤ?7 l~:ŏ1EyATK20 6X ۓv⡦bjv IOK5d F󚠧;酡 惃磝gh]G vthTC-Iwm EUݛi?h<\mRSB*f΃|U * ۨX]B6JZ[1:|ɎˍqNƮq` l-''fM1j]Φ0;i1eZ3|b]T&V韵IlIm'JN^8[s&EŠM1bpڇqSXpJwqZS0-W[W&$ &abmSUü zWuV0=lZ됰baQ!:O^?vj:ן =/+U W$o ;j;PJB*Y}AFR(KiO\*3Tx#\Og7ÙX>ʢy-Z$ƴ\7j=@Bû?A]z]~-fE AbV lͧUD]t.<'%ڒӐm+( ]3Yxtn<[]275ᤗw X?u@Zu[z N5KAc;:0 l[Ԩ B/_#'F]P.DUm^#>Jk2PҶJ)N QFc}N>蕖TXLY&:ѲDq=Gap!?3mI9fZB4% qvhlo#W- 2t-&/>;D1s[h=V  DCC@K6o^K`]K̤҅A 6I6;ajn 7\>-|>+A&冱㛓&maUus~ٳNt2h?oicm5r:9GD]NEhSp2@ ) *EH\,C"wn?Sa`+_NX "5Ol%n+PZŐ-K6=Nh=mT]D#0.f?HWϽ˕ 25Yi"G˻;jP__6-/&m7ajnL ]<+m44cXcZ6V ?Khov_5B>L{6P>KDգ'm`r[j􍑤`-  7ȫSgGӃ`iZlU~n; V6md=甶Qv7J@i\vtRUнp'cM|ZZB ;m^5D^5@%z06N=5#j4HrJfƻ6 ;` /p`jSu0^p 9e?AA^~}"u_N X'JU[ MMUkFQ< c>f>UlF^Ɲ2^ 7%1~[NJT-%Vn^6F%n]rR`d.lGtkУX\Tn>QR[oNy"h jc5mc'HIk7XǞu(ԶJ<.ЦܵOeam+#O>@EHKy7ROHendstream endobj 102 0 obj << /Subtype /XML /Type /Metadata /Length 1557 >> stream GPL Ghostscript 9.26 2019-05-07T17:41:38+02:00 2019-05-07T17:41:38+02:00 LaTeX with hyperref package endstream endobj 103 0 obj << /Type /ObjStm /Length 3766 /Filter /FlateDecode /N 99 /First 920 >> stream x[[S~?b>SU` @B da pa[Yt$[l)xtO=-ƂĄƒPߊ0E[4|" ~[""D3ߔs(#Sc4èrS)Ld8 Pq& .%,01(Rjsl„u$Ljl `0ka -N-p2[a%v =0K""Zd H$!q"&Rv#5 Apbw!@ d~"J(adAȘD!# 56R@KI 3@0cCc,{ "-cb%蘂ά:ɘjsd\CE1`@}Za QP˚!>=DxaAG5bSð7mȨBpMp-aHS}c hQQ6FaK=D@DΝ%^[ԴQ\;ZA:dܿvD}Cohg D` an`u $EYBH49~_ĀHϯ\~q?o$:.a(zؠz>YL@wLbR<]"MdN9\4&_;N(vUWp+Іp  8쬱n_mo-m %g@J*5`< e:I)pVq2#oC39!]I*'w1ڲI)'VdOr2]ɒ/ɤm-䙈Av>nɂL;(Sq\ l'\Iem(WW+g=ipDGt-v1Vx7L0l9N2dch#؎E3PI{-~/T}QhRڲGҥ"e;1SS}><+wp4O$v GLyf A}4vJ \&+IҼŧ9u#6]Si2ߡ]ܤd2CEΒ-lM4 y]RD->0C2J200/LĨR A#l -BЃyg} .gz]_}z{$sg֑t ;7AJ IrgKO(7c2wREJI~jwJmŁ‚\i-kЅ뇞oNwWc'$Hܢar6^%2@o4hM4m4Qӆ\t8KWUt4Y57>->@.A]@t IHts`5})B ;<>iCuZMUWVteŨ*S`oֆ#t5k1[4#NC!NF]:$oV/ZEyta*Xn e*4 E >)r2BVx"Fu]MpCG\q 1wX7dK ).U3ǖō)pxqܷqt;Y]qYݛ]tZ:q6'ۻq2|` I&иJ dwI )| +O-3B'#Y7{@oKl{@ND'U=i,CZS !tQTxLuj.7zU) -Pn>PExeDe}`}G E<%iΪ!Bq!]鸺`J+FkU[*I8VO)KUƠI(WFXY8QEa#-Huv4xY!քWkHu6rxqaC@bDD#>\K# QS)hUcF:1B:k+"Aӄz 3n"A8ET!k⭖t_^GoOO7Vx,]\и1[Q=1._Iz3_}>2W/Q 6`_A{ k.Cҫh-q*}tr9+7qtg4zhx޴:;=m 7  xX{fcBꭷ1ӯ''DS%,0 3iBAlڶo6#xݣnDd'e2]0;\&kƃI,[R2(S%z GwKNpѦihqCЬ{;a;NRZdU !/9I_m:@*Ur\s -q -],lI|aKPcl儤rOuXL|IW͈j55xƵm7{t|9jN\PP)Mq|NSyU.gW WvO+Wru>D;r}]+T-Q?\He5/ j?[o\\f ,~llo|]&w¢D;f7?E-,%.~sO Knbp"H LuJoR X+; FSy&<l^w+,ٟzy/h缠Oz}N( `%׸2~>g=<w _iA&rx»zoz̧3endstream endobj 203 0 obj << /Filter /FlateDecode /Length 3176 >> stream xYKsܸ\rYWjn2 "ܒu*&TTiŁF\IH}!-Du7..ǿnxm4^+I_w_r;NnTYݮ6ӾݴW?>rRIѴbvtRp\^wJ(<փ#d1HIIV,Ft}hkK7펈̭♿H/k|/+dcvoy*Ś1[(_68Iw^AKyi^zupMίyEcZЄ+3 gwPz>.tHAo]ę+DLy8tUL <1CP1mC N..+D~Ϸ4T 8Pˊb7 X@DEh=Z24ףK:6p/3/m5B+tN?}pmE+k>Vq!-jJ$yk6⡪ɐPB86²9uaB"׬; 3%:}"S<xw:7f?HS(_ɬ.T,$`Cdjb0] _mZD@pj'Fܾ5]*Z{/^CC15H p;[Nr#\KP*WE0(ya _t(q)Z^<V#sҳBF*/!V!L9{K#OLB`XIͨj)4.C G=y΢ncA#eNT(,C}s?lvaD")L^W]i6*(9,ϋ*$:#DAc Xz'jh)ẋuxl k#p#!cM n~[ I~3?}{ 6:7LWl A$eT]ȴ4wp!6Ǻ(f9eLS LjԉuUtUˬR]6D ޳f@5g rdA~ZmA1ĸxE0UwP8V!ƴ R 0zʫB@q!2F1RVt3$A7'tZZ `5ĒB\U p60@h\ ¨\cSWX+?3cbbԨDa`[¥qNHIkRGșP=YsadВ  8_u$ E<6e~%sW3r~3:sUt!l1jv'VQQTDy!RXo^|Ry=cv~2 8Y%ي^ԋV1ڕȓqp^Xcq,M1,;D98Fa?5fPri}l.2x]vA;46> stream xZKy9 < G=xw,q&Hu#qMc#>j`#_U_}Uw4ћtnM9ܼԺ;᳍vu=PѾHo2'eZijCsWZ3Uos;mo燡Uی[&ei i{k,)\g#.nآtN6ˡP?R\3= 5V?wA MYHeRnzs_K\!&V4o~k`\^Ҕfjw#K/44oMx)W]?e*P*3j-X1̯mރ3a d:wбTVkPt@z}f0Y8P4,X0wߑA;Mq}𐝁Cp9sNmR貔*}e᫶  KZ:W^鯵:V|HV 3Mo0BqlN}xz8 dվ5z.[_-c]ũ׸]a ƙ,?2M5Q!.U3>†c"D/<@N?kq`0ڽPn1fu_y՝xZ$Zg_Coj#^zTCd2C>UoIL$2rzVX*( 0 d=WUcC5|׭ɉ߷h"t/zvkj(6lɓg<3ﰰda0'z8y8v??_As `5Sp˴cPza.% k0;xŇj4)Qߟƺ&U hΈ&׈RF'u_ $-60`?cIl  2 f,[dXI[NtyЃ't">Myq^S4;K}]LG }P$@?ow 1=<]cR4kR Il,S3xlUۍ-"X2qg''!]]|ϽP!H@7D}S:-X9 =ړ̡եkƁR{dLD61Z_NIJ0Y2g)R2t1y~# ~a!NrHɊ <4t/sWFsg4 5@9(NYlN=˧ZkL8ɇs45 `c Q3~vf ynvqyzrd}@^ D v%-5XEK,g鳌zt#9X&73{fP2L˟AKTsa _d;tP,WrG8 pbQ"."KajqKe`!P"@A%E 'yV`+ƘĀT_4+/-bsF"d ,YWY'oCVFgTCc ϭ.q[KMmBkw#.l6(F L[)I5`c&E \X^iZS~m8ͧ .K?tC&fS^ ǹD436yd9E̐L?H $5&ܛ9 d&P:B~g(_2N~h("A<`o^y;$!sjzi"=PRaNUZ20bZ:1#ǟhEwy Iu/s8 A5E8o$%*P0obg, @/R|yNә\=ҔCYI$ =(a.`t^'3;Q"jIu$<񼈉GrNXqM9.cfc`1ɕlV9Q=pТ;iS]Y!#sJܪ ,[yft=#&e 2aDKH|=1 )26E,W~?)6,3SZ"|dXo3kS|#CL_ $d+c[K2)WL-*>b"_~-Q K}DXcEhZ87 }[w|2ܦ 9(qSh 9 a0|lQ m~ V ^0k7h%HB?p/#PF%/u$˔%XAS5Hƕ:250/!pp`P 1=Jȭy="l qan_4n 66vvu&_3;NrcМQcWep$s+q#a)E/#MJ§ .錎\FPOcR92\N un:r{4Ω`,%Ӓ6q0،q34Wl("PTw/xqVG$)Q F)E>1W"pƅhwa<\.`0I" i:f+,z=A` :N7W BIieVRj">?^g8ȣ}^טx =x?on۵r nU"smd1ttwDgs[Pkjj2玅P0`iXoJa~69 =ys?yRMt^sˎQ2ˢz ^x}5}}z+aUZ@^tTs*,Cx=Jʹ o!a%% _qgG[8.isﰲ@ "=Z0cs8lsz'7<&E51 fvrmtM:gZ_{ep:* k Xv-uӯ *I`LI\'*߻?ZGx kvD͜^WmAũdÙoq\&Sϸ#2OsрƧSQ8~ rӡo˥7_ :hII Bk @ & BFoAs<4-|yaFAy,vU&Fg$o[0 roqD僵lb艇!ՠ#,/S\\Z+-F|t &N Na^Zx>b-J̐TIea7X<|endstream endobj 205 0 obj << /Filter /FlateDecode /Length 4738 >> stream x;ru yXX0rA.RMclkPIU@ j6:- d7# 2_}7Y*6 WV*_x%&;nܸ7wW )6"w6*-o 0&HX"$B|uL{Mzk .6inUXɼf֦jY3~I}&f 39STi7 b waJ<~w%%Yש#DQnpD9b:^!QԈNgQ6z i9.s5/w ΀!R+]u[4&J|dNv͵i|΁auRUQcѷǛDS#]<$Q\;.园H3#&p\ٕ{8W7+-\-$QQpHNȄ]!tvP 6ώ,4yu[d*$)UVe}-K7 XIc/BF  pI2K@kH 6&\ PM,`:rW x;7!x lPpDk̨1AQp[W]fX=*HP5(<|Ws9|ʀӡ[xP'mJE{? BV֕})2ܕOzP Ԃx:'UtOБr. ,xJY4w.0v esՐB;pX ;Qm})F,z)/̺R$0lt79G>V]DC77,Z ^T kmDY8" !i%Or`YcȑtĘ>)QcY{L@D뻰6H~I>܁ѪRg~ݣ/߷EXwxkV?.λ|>!7X:Y)2Ѻ.eH~NA=KR+6g Wc-A ~Gz?׽",+2zTz4D Sf,yUX!cjd~T<6⋟!qt$#AM;9\&)f3bXJV$yt/ lDfDzzvdb1 )ǫ+Ga-0* x761b ;9`vbA1*9V]*YI2WkDؓ*ȘbǗq-N:aPk)II+ PRr4\PгP})B`Ce ?o ,2Ъ:rR}71՘nDN86()ԥl&RU-%UGb;M(=~8*^(ŋX0b@((W X#BBK.]9@˺.npAу̪B {;5P)P@m 2^ g̙9;ZS1c?MJ7Cggl!:OI"~c.pW3iVRS[Rv !0&8 wg D4K/- y&bTf/T.2'8:ě|{URgLSیZ6 ϵ:DQTC T6WzBPr񢖟' - ,MZsew>g"`b[QM>w/H1%ѰX;@(̰l&xFs3+ elJ3:-g4c݇AF꒴atvN.cqZȉlB :-Լ'!bHL 3IKn~}DmrAv! i.r,o΍T6𸩈zuA#j1|#C(_@W!j,W/הBgV4n%Pjq&$a(vj Rۀ!1,z˨0P0pF$dj?.3? G:jvNA~27D,ΧNq̭Um1/n7 O>a̸Xi)X]0w2Gc0KR-7cyh+$<g@W֦0`v"s4Ӓa~qfF p32-&\H `ɂ)P $X *<Ye IP٘ľ2O< hR勤Xda~j(7>`PT 9RJ8=XbkΓ8-d[kc:91 P-PPh pyn\ mzna߸ǰ̉P7c0S͝1y54+ymyjn24ic1Ua(Q4!{( M۲oB;kژj˻ {) Pc3jsl_*6(rM$~ *d\11M{l"-%Wcc[=0"ñSC㇃Q2 ErŌȅ*܆xTQ˛0@G ha4L DžC!7 28J< ߋ3%_}ؑ i6U0@Uv0h_SugkcS0@WSNW94w'yPz. \9b\I)Coc PŬu 13D4{UřxHBuw0qElﹳW\2 򁏴V 2DŽ^'uӏ/Ӆa"j%НE.N&,PxNeܧ\eشq5Wed:Y$:,|v!5ԭG8ԃyFPyĚIu~&SÜt:C/4̆Tp}u j2˹S:ZIOLY 63ήWf-7y?Er`0j8Qg$SH~[}qzOfʼnyhDO&߀)N[o"  m<[q(GMi5Ry OQΰ/^Cf M?p8ua3-}fݬyC?1QSo\L[D^ :۵HP%9ޙ|V=w}cW X<4hV8)"Vwɗ#T͊/ɞ>XpM' /bFMQ̖ۇpjwF֩ 2jA W4mendstream endobj 206 0 obj << /Filter /FlateDecode /Length 4153 >> stream xZIrSr$r ;1*67۱ 'O%DDjHjړsRE `EW*K*ÿs\vU]umo+zc+]j^mSxCROLzM?HwkeUM.&ۢ-i&DP{|L&;߮x:Kqpcd[gm|I/$$>=b9@4zW73DQɹc&\]/B%wMc$gXCc8b_O!JRENxZLNnm]BhZ߾ P.k+%ٖkV6^LUCM-WdQS] jՆN[XH)H-"Ļ_p AO`= a+D}ʮdqSlN3܀T{6:tVנs6#tniq% rʼno=9)AqHs'f Of]-Y2AE,Hܛ(*yPnb]ɱ ElHđI>/6 gg>8N$0Л6lR0ɦGV:VA4M^ ܯяHR)iz/Aݹ8T)Gxϐ b0F朳nBɴ0U(+!镀V&^ !ušjS؉y'߃K@my¤6w.}e$9A\?)q[0`݈A#t ,gѫ{iGߍ^]?>UVdOjַ :yó nCp7 i-OGK~k`AGv7}py *)Ojǎ`Tf82]9X2iA.FxDg\n@S¤mE)"@][D/u4 hFB^ Xy`Y¢& \~B1q/ ,GQyE[ EXUx/9 ?Dgܴv!Sfg;gY>eёӴ> %7Z;rXFĊ~31;6eTӆL㚙 .ցmZh^AIpL#FOhD*j;xPV <.57J# 6o-1+h3w.da . gfc' n/Pxq<81B}6 swIۏA,ޕ4bʎ_aD?HN8nc%SvL7<"oQ NhA۱mƙ* ;Y26Z(3V5$L' r!AaGZd|ITDR3yRSNmV Hp~,7 \dNKo)*/Q[6$PVh16 C,Mt5}U*$IY˪K.dC >އyOA"Rl9V"H=sl !Z)a }1fH!7} S@<<Gp(aEzYO(tHpzy@V9iQLxC&ϡ /< #.xkL(FNއ,>!Ǩ ޶rd*b͞x0>o˷-l]lQ籒@-"I&!{.)J)'i5ƙxS> R<:CuL ?AKz ,Yk?AA\W?q27p*\z_CZmkj PN`ʕrµoԇU'pA(pڏLr!\(_ctd?,|jѸlaI¬{8ޥ:e"*S-ɥBX?`7(2- tVn6r$<>qmS&9@ɼ X8HP3سLrߠ>a`t5RsR>]ms %-\94NM,`4Mb 3[7&+R).i5{맙hӇD3 ו`i*Χ9pgcl)ְ(qiC 4-K]E9W2 f)I$֧!ՏUܖ|:fCzPr-GP1,+:V()>i"7ךS9 g9:W6Zf-vZ]+m\I8 b pqRrT.Ȥ7/Hlf,qJs?xX@nU%]tZ E^Pcj*xdY!#m#lE0kuY`k:4"'Rdl#-ڧdPcA4/ ^YHk&cN9^8$f.撃Rs%Aop{°Ĕ{C]hELlDm8K>_ai)+wThgѲHk'p8`! }2Qe>P cNtK~BZFꖏʇ86f'tތFj(.>ʂDjn|̕j\Y~IcFw|\{?j18p9{&ڬ+=.Q / )D+h}S{Ny^!WsCms=zf-'|SCpi:']Լ~n8!bMF7 KTu! 9T?g9RO*\0&0Wq,Ɗ́Qu[n}]='ZA/;D"nHCQkZذc.xtCWI3fG㕃X8Y9ش;m pdTA$Z &482f8@:;&x{ix %I1L!ӃgCuq5,^rp_qS`Z ʹF̐&u*RbfJ iaa^x k L/BXֹ%?9O9x?ZþL甮ĄvdT|i5ۅa}e> stream x[[HF<_@B-^Fi㺺j>E@AhÃ3Mcw¯\$*ӶN];]\/]~pU.n^IֻaU _gʅp]8[J;q+UVl]/WFEY*iA{p(,65-ME3PŮ>brH 忯[3)_H`{כ+Xo,* nZ hܡ>ŧ_ .K].ѽ(6"]wp @oz|(]-^Cړ]R km_fF+yoZ] x`jՌFJtB˜OZS]eSo-Hn=mТЛ~*T1߰R]ؐ]sGŁL*#>;@e>)۶c&q&2\H_]֎D/kvll/]Ȅd+CsL[Iy˪ F8[ ,Sk 4ďun!.ՇCЂgj B )*Pob}Z$DO4D;1wЂ_)4>%iae)u%- +ɼ6][77Mߣ`q)vu vM>CSݾ# Z:JC.-;cd>(lWpx[ ^W.[PM40A&iQ]TV@FzđxeWɂ) LP +|y[mKH:f4b3"1w1(#= b lh ŶmPRI8B췛ёDY;&2诼?B)A 9)0yM3 Ij>TՇcZx~} TmEB`T֏tMԻOL!gt' ſ \G fI@p` tzP'3v^k1h$odHI>rkx0{@ CQlx=E1m$SKYc2,$!Rΰfl"%3I$.@ҏó9#1?PB ?^,K>K |pVʼ HORtK:XVz!@ԵGU1TSӦއBH3ZGo~ߌȠ$AH"JGD3#^Ddwg 4pXxo=UWQqCPFč*M`KDWlWTOPɚҕr@ %`z9+O $Y‘şbj\,xx|U@L@/==tFڮݵ66Lj^7uUJ 2?΂1O0O5R9t]_#$B9QRc-RPO~s.3 )*a)E n&(h1`ʎpert Cˑ?^¹]S- /#3_Da#CXUVsYMUU UTѼz͛zx RyJ="/Mgᾅ#;1k+JtlBQ(d-ƀj210,4EyUb8ŰSTB*c(3̧k޾;[*P~/cJWԭSXO4WA9_ޓ )  REHF Zk@ٸL,$O–V[Vx||,B'@K~4hiB/6-SewhyI>\ԅ/KIbbt2F߉5B Y `xX6 eM|:Ɖ^c\I{ę#p;(/Ĥ#KGkM9Jf&yxx0J#`1 'r}b8po§]k͙@-小su, KbP9\Irճ򘙌> $D f>g3̐\ uiLtY߶f FB<u/CeHd*07rpӶwЋo|jp`11%%V` لH2jd#3eI"9m_czVUzTI񎊅e.!9}C݃~%M&dgRTϑ&+0YA ;l9| v&TL3^<)H|Vi>V^f=aw~Z (?n߭.[wR5xNGފL:\5)FQ Ը85B^v#E=.5]@~sFa2wepWyپi3H /ۏoԴB6fQo+ KӎSr^lZ7t{5DEwy@h2iy-8X: [o~8Qi^x0KPR"w=̃,,`) ]6wSnް @>Ttr%;LTb?\àǹ܄g׌#ݛE72I%M̳(gH>թ!&ƺcVTXa2%2S >6nuWܹ8H?mÁRJht @\R;:nꅰf[ A'5 Pcp!gΈb$tp'$:'%oq܈sp3Ѭ;⡡MK`Ml̛;1%C@snyTKY^>wخ/T482 M?fKZf6`fRſwUx(}vC0Ved\$T;JR^g#S7hG%pjn>s bPXÌ#am޲q]Ud˅>Jj e%<\(6dhfꁲ~$83ׅw|2D> stream xZ[Hy ^偰 ;>l@r=n;y\|LrNwN?VE.V_~j?niFyVnnxX sKeVC( vվb(L>wkBUm_}Xϰ5ͥSVכ"!8Cg4H_fuz+Lp9S+T;ujcBESBy'M0 ۘHbF'l>xi`Vk9gׅ9]8{usvLb^$X!R.HH[ܩy^s|}LL)qyޡfB*ʅ bNe^,[0%6|q 3:\̈́g/`SEDNHk…F&D}s.0ޫ e0e- '5Yu^K\ںƔ;n>߳3\|1M 4s"@.*Ql[~=(5*}[Ƒca©7BRy?kYϽ*itpL/4p@ A;8<ϥbs+s+twK)#Ak w;ҩPiB\\Qovhh8(t7{4 mUMf(y=ACeDx l1`A,JK>2+u q2nIY XX]֬$q8ܫˇv}so9!^QxbT4pDp|vjGSN$ ޗ[f? {=ݯ7DDr[PA&DG KM Nmv6 tkGdTI)/}U? 6EcMM!6P;߁kkͥ;>7ZX&-4՗/ehѾ|UҜ8KYS2.6K_59Va uϮ ֽ:?m+X/̺4f$Yt ؆%CsPC<]eD {U]_C\&1yvuחmAh>T78b88|)ͬaMͨiZ~&A(` -Fu\Y*L oҖ$նU7YsmSG bF*1訴_>dk_Qz>y/Mv•kpS3?C.?lP4Ob)#IX R"X+@ʭ Xa0QysaaofTO`G N C?;P),(eK&Cf=//&A' MY΢|M;;lxw&r¢0+HT- RLx.aqBV "JNf{AZ%$>j!V%H4M}w*~ kܑ{Pk߬YCyDBK;MQ"LWh֍{b7w( 4,!D |+5幫{`!'-G1?W'K=]* 1sH42n&;I6|"} b.@)x[*(%9jeI2nchb(C_y% !xVO([ú<ڐL~ v;cE@JkUj!2t G.Z/:U0OG1|И&Y|\ N@?Y-/DFt9.R pm&8ji# ;}2Q3Vmf!pFqioP|#Fwup 9~ O3> -dSw-Fz3ɾd!XõahL8]"S[Ɣ[Q7 }Em&V\$'"T%p&@X*L K6Zw}~J:lu7u*3cG8V./CŹIwWuBNJ50}6=+ mz;1`ȕ|ԩ!ӥȫ2s4j 94mRGFpAsJFsR<\O RbPO\c"2?`UO2nc:,?ss*ѱh,$6 3N z!8.x)ě;Gf#T[ǀeGjMuFdT>1G0bX 0m.X6Y5 cAJ7T {G ;RԺ8B}}=p(; - #-{U!V7y_±&uC 60kc>ehнԜ8 Ps7xlZHyQЂ/5=RtpCq;I[&hj+h.ފwU0F]7NVvF>ߧ 1!T ՀC4ж##RNcط@ < H%s%`FaHs |†`u_A| `3 },H2vnjRks2J>jQ~q䒞z76@ o_mZR -4Wj(1жQNʆ ߋЋ"^p9~2/>;2âY`H.s%dC!}&dgSh1)PN/8IyNVi5-ʐnBn}\HOwy3߀[ _0R3hZXa" V':?Q/mI0YR)^ n KA~UG3 \%z_6}aS!-s39!-Nz@.fuVA3 <Ѐ6Đ}Dc3l=qdLp^-=a!EF¢+aQ| 7QCVxnKi!=Tc1Pݩʣ&lwͅom~` Z`T(r!㌚JΏRH-`"7@dD#oZ2M{w9NoRmTV*GHѠoic٬~7.*>- /j<`)*դeM 8Pw2d4~HJƷXCnec8o ;Lendstream endobj 209 0 obj << /Filter /FlateDecode /Length 4238 >> stream xZ]s8vͳB^--5 plwb{*c!Udwems&q>DR ǫ?ϳCw>]I _nBU8c)WRzeMTWǫ_DFTc ͕xRkeSgW&D l!ޝ}Uէ<f[z?Uti[ݾݱܞrL&YZ((vUA&$vmU%֤gB<:0RfkFk'vBE&ޕu{4Bu"oMV 7YQ"9h&0 ڴ(ĝxĵr}CԵ ?A:68s;z=R9nnldҹRG f}@B*E.Ml7V[ꁷL9RFDVĎOuȂMO0Gz,0YEzUw'hdT).A,z(W CIF9r6S/}u(WǪߓcun3 [7#t~W>>[p}h@=5qX)BjM .$E-+L f6W3dJ%Ҩb ?sBy_&JFao$U/^S-gءwm90YޟT$BFSv11fQ&I̜DYoTZu(l*vKBŽav _i;e%j6ٜ&fH0nݤIl^P/z5bWehnT.|PCAe"C-ꗶ fF!&W J=?`ÇSa3^ j8L(ϧ:M4M؊R֢y84=P@_0VܘpIDh% 迮 za!R7 Ba(3RWWEGY$*Gg]xgq{'oQn⏴&?ϿmSM'f"E>6ע)>+2-HY!˖=?| AR3N/{)_-~}["S}fdsCK>]yw>lmezŅ)Z^&61дmg@Fp*q$UD`V'7Cp3=6U07X`$[d\"aޥ_}; aiDEwj.Yk9>6_'B}4w(~_,--O$gE VdX8O,^hf/'93΂jLIgKy15s)dTH'syj. +2Ad6P/maU>^hJw18%ie``=`83PJ- Fͬӿ^.mRE?w@,FҌx$}l^41tG1v6_8Av\w_/>uĘYq.j*mcT7Fkˆwf!SQwK1N) dMcCD)˂gYSd>x~/  LLyc-6>~K!|)EQv^)˨Q|*87yJϱ'v=C{L~8I,iYQ0B09u{:>m$D C.VxQٌ#0U/]ĮaW04~hN}[qPQP9ruu<3c s|1a(0:az+ z' J<l2EO5ϟVq=(lZ@S]?σ;o:6 @NLwMhԲ@l$?Xp'XOZ0l5׾!41tby65t|}My,#PdM9+zx]wT-=,+kn $vD2sPyļU lŽӁח[w4DNVZ c _;h05?%˚sI  (k~@޾ `@w]\g0=hɈlSi΃ և\1_|*o!z#Xfo8#cP$KC^&%*K 'Ss:ZbiOOX"MR-cy|f9iHP6~ ^ӹS=OK '{BF#wV{vYk74MP dZ PeT`$}UW 3"2_bX?}!V2r΀@Ρ2b2>i黋&CT |,q6<ۈa&g)u(~#@fK F`i:\OhsLd6ɴWz]f@y\曛 EYDNȉ`M0*B#Z94ln攧h}s~{wװzt7+%dw$ /#~g 4s\.kv0}@16>>)o2PݑG|`7C~>nVN w-6&͜,Eyl{(5oz qϮ}\*_ jeꋊV_Vyz)^Jr6TYfɛ:3a=>}PbvmS \]/2QPIC'FQ^5!2e![I9/\ $,RA9`nqR?xՂdT oP>;OTөۯFyb'/6Ix Jgք ;Ii!Cm.^YopX8 ¿l I'qMQZ錕E-̮pW0`vO}/W "IJek;4@fP*!}{3غvǫ,0R9`bVGErxs헹K8~R诤_~3Pж}&%[D 0 wUPtLg FPMJ",SHDGry8?箫RM89$i?N{'yts*AJLOW0iu$WG G$C)#+ۇ9N%VƒQ+yΗt2Y /9$ 91s1o W|dCPw}[8{-L|3١>tx;EʋEewd p _IF3:U&fɚ-sOkb>]1MgG@MʾjFyCu1Ou]p K>f.Ԃ\lä?D96бX&L1.ikvY`IX7Ӛ|XRqILM29>rA3/MuIc ~z T[D!7i""xSMs~J~eMu"2if7nc՞;<"i=؟my:E3@X2.F)"G!_aF@avg-t*ʹx"t CbTpD>*0=daE c|dn~OŞrSS)˶ca9Xp BɋGu. JF3\AM̂I(/^i䊁!1@) Qz@] |_`4%(!S2?_?endstream endobj 210 0 obj << /Filter /FlateDecode /Length 4170 >> stream x[IuUnji(.~K&Bu7i<4PVE.VWju߾*V޽ul#W>+)V\ROCHB:VM|^H5ZJA4t`+kF[cnwxETo~inFϸ Ƀxyg7@[ [ƫz#Hڕaő&7Hxkǖѹ0dқo&0rޞ(!Wm]Y_ Y}xW`dYWvHkr-j#kjrQ¹:Rv3,yjtGqQS&oqa/rxfI(U1ZX]D`A!vζ-(M@SXNg\BY sgZP:wƄ'R:$"oHSY SI 9Xny_Br_%q#w l/wK^%$@)1a"a ȭkfpSַ?ϷH+s!?4#XiE6=,\KT#s<,*r?g1&b0 Ր +%?B?؃xISġBa~Ov,pXBZwgt/i߻h9`zulAV٧DȚjwnSW-B X;V*~nx_ү_ޢ /dEYBW/AXx6M ;F~2O.ެx߿P04#fH|RXTyF 4?<1<@xޒxEڝY."IUvH*E3u%maonO93ݳJWW=ZDaV2`6^Gl!I%6w4zD؍ ߜ䲉4Q_:j{x$А G‰.4MEE΄_ǻ+&Ww6\ n c2i+f3{P^ KLԂ+F(!hu: o*aqe%3ƑI4,^h$tn,0UimӂUU}`6+gARxr𙴣kj_Aq[p VPG~"HPɦE#7!ߣsp#f dc"A{ut(#oNCDoNZQ<~>x5JKk XPx" fmWJv"nBtކG\[їcz~(?և>q1pq[NSwi]SתiI0-@=FFc77 ',h }KذC@PFc}_%ER}J#2߃.JHf16H0H [L򁯟 $4B6SPK)>8iR'<܎pb i`(f Á/9F3Ngsd!zz1-Ԋ,sAfT )dVsrS!S"Kƛ>a?CPiRF֢\iXNʶc 쏟0ѰL0.z  ́qٖoZ(0qF.=8hHI=4#XٻO`x^C׿( * *DU<*|Y\P6dCK&P@/'ϊg ҷCd7b az<7j:i+ :SҩXd:Oūk^!ff p\*~!Ɋn0g&:wg(CDA8!#)1aOp:`;sm,ba( `!s7v *IFI&b(Ģ}\0e M,8_Jڗ)Z}.wϮq*)i?b܀D*(ؕ-{^sdE<[(t>=c R`!rvI](E‚葯_L q\*?& 3'(Z;ko$#/"zT~ m ֻC#e_ͷX Fʼnp ^7 Fv] 㝱((aYg^ۓ1e:n#W(3i"( !DcdOUBAF0U`W,ÕB{byZ!Y^k9h66B,?tͽK6% U?XGg)uCH w'Nd VI_ѐGk(_~K@ F/;2.\6Yߡ4pEװanH@a}A*n-͓[$f86|_/^3_ =F&a.  TEoU*0R\$jk܏w*-UU8uqǀܟߗeӔz.n" W↠0,!N?veӥ -# |3Hm&BeboBw}ll&^ }=(oD@5 dP>A eeT\_E-"ɻ2..ii؀5DjWo-rt-}sSЏ~RALQs(c9bTSuk9n&'wR<7%yh(ߋPBXp8/Bq ֠c?余VȔZ0oj\Z?}/Os+'Ef%g>R 4 G0JkW85CdrW7C t G;pȡW!LagХEfW&z*ƩK7ۨ| e ]@`yIg hap8XI?ObG,?Mjmʋ{h߷ڟʘq|Ho۪!HF(0xqh| 'S>YWo 3ltV~隉Oΰg=Sn-2InSq eCךJ W|SY}tYC]¢a*GKYHcKRO) :QOUszw~#XBG׌xL\cc&_6=:kpK7#xԒ?"B`r(lA 3!> stream x[KFϚ;ԜiA`pH61#wkfdwKcIm{빏*{,8^Zunzb?w{ճ\nlusBPV\i/xX¦+]Tbuu.)9zWWHuI6ʎ_eJꦫn|Ldk$Mp&)_dC9r?nM^$UyR7L%5Ci M5 8?,v{}ǁQ9xr7YQH`uD4 Z`qC壿hFHEg=M}w}*닧Ǡ%wݧ( -)}U&=/67E3Uw;g0y&hvDafSln1avTvA0NaS5!$L랟nq(2(41 Q`!s=L! vqB@zX@vU jȸ@HɸHMC"#~W`On:"*mu*倈F!>W[qn)0D@ $ɫ5MpcTH^\R= jMHR({~GХl/C?d'UQ-)E4g|B7b- ڌ4zKF(+Gw;A <8 ]D~5&6,E_ i0^A\K:2# *SH`Ha冒FmdAdy.* \RgTߥVMDb6 Ey1-NIyMK75)MZMp_1de]1WBTّFo>wIJ$aTt yq6.j%[q/LB-+0iʌwTKh&a~ʡ +~qQӆ"ǰՎY~]^}ܶH!q+7mW uesC.l؜G(3H`g߯-gv.b#"ӱ Wڅ> jN({0C'jTTqL晃c+}o:{_<4Cq!PyI5.D` T2DI<?F$;@L}9s$C^^R$BܴjSǎv)9Q{[ ĐAVS?20HWč޿Ed3'{”ˣ-03Ob&sHMi ꧀vKPAY S2H}j*FA7(=-OiATTh!Av~f{ްp.\Pd(.V]vcp٫rpsg??<.25G̾+T$<;nAk\'`C[%OТ $y~Jܟ WLI o%@eQ 9zㄪ(B% !0fWJBGL{$|CUd|\* R𼦟OPڤɐ}7P KiJ^(6)80?Q@]!%NGqMDjxpGH) gZ5L@s!A ώwa>0f[Vetd0F9Ru8dЛ7cuN8xzmvSOTAzgDYKRI˜]o=%jϛwLt9 z= ?d(C@Zl&aTQtG͘ǓA< Kg,<3DՐ!BEXN?ܤn} |6X=%sֳjn"q(% ZmGS.y`j?eO^fƜGRFgCrc+J>'V*!}|. 2u) @C!!5ftBcAS<(;M٠)nr[fQH|]$4#$ؠe*I|[m35 XVa"z^'fMt<f%B*B-Gǯt$4 ˫`p<C r@ T`-vNj_ȹ C|u;^%;*ǺZЧoYlP 5,2:cLoJq< bq2/Y}u% I$XzLNYU^2ٖwރQر]LT: jKa"BϢq8 }5k,&(C*@˙D;)5Vq)x]1j&pZ9|oqLK愄.5<朄(yB;5J ܈XpRyJ BK!؈4||Mk>a0vZ>, 5lN 41'xLmn O35tP}J_E3(,wjG@u UP W''^OL'ky!T5m99 Y$ OJ,x|PZ8i ~4xLB{?zS4仓[/ix:: /iSPS>-+bRz0DC>\>F 522 fgV]ۿKEsX0LUU9 nZ&5a:zR\~ X(0$y[T\D͕ ʳOBV{ͥp PJ+,S3{.lg2UN!J0Y, Y6I\ RbHjђi|Hy>r14ƍYdl&L(^sLTvɈWi!H[ Ǡw67K3KAD8͙Zuc~J>CG5>m85"@ E| T/[m$s[ vz:{++T ?q/m]ĸ0[NY'>|sEZvb3ʅ s39P P Ǽy"HYФs&E\?(Mw9C ^M1u"Yn>!jwȑ =-#9nyr ?$-&.RA0)C0Dr9QA$=9c eӞejO8CU|v/5v HxVؕK,Sha^Uendstream endobj 212 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 10227 >> stream xz \S׾Fd(펭C[kk[jy'T@ @Ld^0$YylZS;OtОV{lﹽovOZADPPР GΚ;kV'coʐ>l@p04 X͡Q0x$1 (h%̌8H~l ez~ E)[󣖤FT,͌++{Ve̎]fu'lHܘ9y {7fy^Ҩ9^~eݟ8r҂>9 A1L,$FQv"`׉M0}C!ēM$q`Sb(L #RAE؃p"/愐!Fe:E/j\0dԐ! ]3a'F,qt䘑GMChtG]O>q9z}nsnLΘ_F7{2).2qpSƅkx:gԭSN[5c`gʹ|߃6+G|0. jj`8C[-Eܲ  m(HuV12v.R{[Vb5^S}ޓA̭\'Q R)D&7PLZPHz7Nг(DU{2j6瓵\ Di_"rUmWۄ>NbwR7GcQ[6R*G 2j`/^X?#,;9Rn.,9ƂKotGWo1n4d)pJLyu4.M @Z&Y+ I y&Z(3y وN08 2B KѫQ(*`wP[H>ucY.~$ŠHWL?"σNqQ]䜚}/9&Y 4ZO1Q K?znDj/ts0̡곁@Ł'd"i?p2\%ڡb]l7&j5K}a1&y`m.I[V9O(%:qKv (A若٩kSҙ:GqÕnxO0֑@WP^C'lgX~կLq.fǓ|ِ|DOlNǽ@jbk^1n"DW(PǩK}.4h (T MyO PVG82! N>|\zܿ-isRd494y)@BKbeX p> VI}DmwZ[S} y r$9{Q (f)rzM8Ϊᬒ < ̏a^8W.w5,^U*p٣pi& 8U 'd4r8D@@fdY|6 X~$y$YUU}[G8f+?Y5}+ñsbPDH|fo&$yfJcM{w` }z"Cѷj;\T fQCkEp4` fQs1*ru{}$0sV# n{0;@5{DUTb2ꀅ8JHwضDR( pnj)%BC HXWP`CjNĀ^ udP0 @2o3q# ݿq-ZϚX*v*vbO7qJ/w\Kk定=ncXvY# vGv>*s3pZui l)WK*wFź>K fa/bZV۶҂HBϬ m:sظRl4mBY&zu(yA"P`(| c'gLv>ӍM"/.[3j|M GX 'I`s}oJ a^Z pgnbn?Uw@5ʀ;m@!48 N4V7I!l*9t?)4Uº+| \M"b{A|⸮ <$ Ӄ^Ã{.%[|r:hyϳb|R}R}@/20R06~hZˤC,K^nӮmkWleֲҊ:OICyGd#dޕgj 2㤩55jEz |_,/6`kp>y/-bIz56 OQ ǐ 5 bV7 0Mx ռdhK_ng',eV2N!}\1sKh:aEf0-BKQ{'*[J&'m M9\KnACH_"HMŖ{Xqc}5U$&/O\F˄4^ў'^1=&92*6 M-?Iw8)i.AeZNY (y9YXJ@aM!EU(eejWUY N} -*{>P\aDO^Y _|gBW|֢ <᳇)Pu[pձ1d牰ylXIFQΤ T:J cx5ZEAσ(?[ϵD< QwjwƝ.C[];%>5io3Z a4L t .Tbڪ Ÿ@{IO/ݑϔOA#*>H23(\i2zL^W c?rK\[9ޕ8| {8O"RI|x'oLr,y5RY159ɩtQ᤮n3?uWQv a^Q3 @jF4E.m}ƺO5[MX8 a+~%tzYUQm{Dѓh_O\z5Z-\vkiyL])q JnqYy`R܌MkU۴t쑳'w 8%0ZN WGXooBh[h)Y4 a 1 10es*\y #*kUΞ$U$۴bIQ"<e /\VHխ8*(#ǏG#yQ=\J8\*1T }P.>Oc"lC $jRĂt_JK[>_+:eC;wk:e;KR}?kjl2$ЌM 3N)<\}Bg'vf@A/J0`6!,B8jO78ԵGTr &iښOΓ3黣S$w4g@maܺ^m7$Y qa!)rBg?f7W.M fh1N:8%8 M? m [*Hp(O!g< 6M$|Aq Dq ZI"dZRa tx|Ҹ,aNxtyΡ0^(.1jFϏktNG'a4 'F6XEOnts)s~ =V<;x}OnݿgW9מEc>8iv{/Umk NF.9ItOCxl{T)7+ӰW\ Xi$Vjq(4qwc*Q?67ku[K `YRj= ,RyZ<E0V&ytSX&;K(Q uDS9OibL W%ۀ〉Uk[ZWGGYP5ŵ{?4dj*PO* 70 \9ط&Vzjo> q/JM35pCuPf} cSjYkv$,OЙK`8L̫udQ?|FnfA%ݺfM5gtn+n )GRbse_Ϩ*ZN/A#\{XrZ%h؊+ye)9L(|%QfSdgVQL̓q& D/|Qef;]А 2Miofe,9r ]f8qreh ku3{nPGwafF >Uwt^{S瘱]]Ȟ={U+gxdl,ILA|NA+]lIDcxP?>2Q}l]W։m',z "^ZTR,FmU8s|#ƷDaK-TU;{tqbZ8=Oy'@4Wbʦ-Հ/#3R)O_֫c )JߚE`Т@wW?:!a[{=}lr)}JY 3&Tk2pĸy{h.xQVTQ-'Q-^]qfzO;pW5Uj%bUh; [-UxdsgNg§+Nh͆jP*˶*8&A M)64|' S(s9hJTp&pml5Z@d]֥̋|:~@orev:<p/b<jZϖh+ 7bW"j׀*>}UV@U"IQYln9BZ6ҍXrrXn4YJSngOiycrFIܬ0)F+9۝;ۻwRujKi'pAV ?Kq:j\g|bRҩI01q:;?[u/h>`CN^ 7M9׫u*Z@w4ZUvKpR?L(ýp0GoY0#?i*m 0P/p'o LXHZa&iNX-/QKX?pv.[(&%%$%uz=ѪLcu$q2$+jKJNg-˒)I-A - z`4J*K3WZ֫iEeôL6Sp(<x8%̪ 8tV(,)EMeQ&cJvtLӾcԞT $JB;?%>FsrzL"}'2/Ӿ۵5u >Uu`wz8Ǫprf>TՀ6Z_-D@P%m.X vmRw?rѭM;&0V+\ Ml#u>(bSs(Ŀδ^Vn3 fq{0'nۻDJ+H@kɺbwmMpfq# w8\V#ߤbdeEh8\Eh>V͎eRjGnYvdsUJr )#eb#]at0NzoǦ)gW-XqUkT@B,bmR豅)z>āXC8օ3Μ?y AqOUW{kŵ kG P_ n]p[%^$j31ufVfJo؋7ސ:R3o]Oi1tw2TUV6UYm:PzƤ2-6(1`(p+ ᮇ-WhJTg%M žET,=wQ ǟ צՆvW¥w h\=p8Ow'*mj1wtS HJƦk_?c \0ҍ:/ϐ_}~E#7.rEUg!ͺt;c I;O{ꪫ;}PŸl>h[+IMM|8>p$`46-+!QTʭb \}bZ6R:Ƭ* W)uI9[ `+]ZmRHШ*]FYQ1v6*4n=jlqDb,.#&74: A#֯߷61 mu?*.0:@^[Z=k A憒8;9٭Cebv_Ł[k$vEv%uup3chVSVYPH,LRXr4EIBǷ0D \v"5uZ,̀%N pk א)B0yzb>:<˘v6 @i>~X] p؀.BJY`'by AN=iMt~Q 8^jj2/+`PĽ{Z 9W<ȋ|Qx],ZFN&8TeA'FقYfC֝3 7 ٯn_0oIR?}Jۥw7?Dcjǂu;bbR4:&Prj4$ge[ |'5gje0xm ls& i|2춥\[rsYn+/%-SEKN#rxlw)#Um/⧙c ʏ"(/?woޱf')4C#WVx,FNH*ueAIhVD(1`"00#`?F?2;}uߘ, fZR $V)ۜe6`%1 ڌraF=Wh:ܽk ?z>endstream endobj 213 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3622 >> stream x} T'fƥ.9ԌX϶m^*UDE% H YdeKA!qEjmz.zֵVZ^}oFl?{w$'g{a^07| }0ogoM>0zؘK':FaWc]Ь1 f$ffOYKVk pd=I15@m lr#3[ įjACȺZ,{DGb:-4 Aϼs,m}'c0@Fm餑o򶍙S.Y$ɋqǍ:hzgUEr[Wg5G0h_ߘU$e6ʌ2?L"5H{,:1q%;A 3KѿpZѬhO5p棙ȋS5:^L3 t5K`"4nBo=i 5vu=p.hߎHP,T HR>^Zy_븊BQ^6F,Nȋ8X}lE qknhދBMm*%.Өe43Ү#xT7T lV]c~~*Z>!Oqqfۄ\MKF ^c. 4?W&P Hiαdr@]o@I2 RL)W4(lORGA(gi3Vyz[0b]̟ ѯ=v'v/Oǂ5"%-Bݳ=n.a0VFU`ȷeeg`} mUYŪRC" d&: u6;776oWnlnNUc7įylthGK< 5j R,g 'h )jz awpmf``k>h˞6 Y4ńJe/phBͶ0 c;u|iR),p!blE'PF@1'I/q%TN/LD>~Do"gnKQK  d"0Hj .4\~ ?S PҠ-<އzِM;{z]fWgf.g2Eԥ[bJI+@j4'I  Q Eٞ)YOd,V(L!;OJ4UFTR5#<O3K$EyuP(ZuUT[W}YfЊyBytܷ MUjeV+4}m6ekW(6ElFb+B'ssL(>0(g˥l!f2 &2(̔Z[hsrLCpԈG]ZEk5$&v!ΰe L=lP^3.J֍! 2)\&Ȣiʬ"q L-)l`=_5maъ:&Р-=H !x7rBh DƫJJ!3_όYW⊤l\FeW/M,C^Z$ʄ16a3Bwy]Z;AܬqbaϬaO~ѧqox^~rCT^/̕ ~mJv6w0= A(O4H&L B3q%lR@Ssw.*JKXLޛ> CrVX epTv,)muH@jR% z'i!n)ػ;Y~|yO3,Ԡ(6675ŋӕrPUHYI-3ÔpFԔتYt#"fnK?uUz~֐eU ([~z="x~J2ί4k !u ~A/&io-+'+[x?BvmvYsڑp5š$Ԁny3G1•F=>28J8{.jGBjoJM9 ŷh?;~N媲wap Wwv孲qn_/ꖠ 3PwQD猭!L& Fb@ {Ddh>1 y33&mFn243۳K23Lː@S}_ |;c؊?-yK>ǣૣ?> n I@*d;E¤$uW*,P4Ca5\cxPŝ%ѲǼ2@ǜP %Zr54rDh@*?U9SַlUvPfv5Dbl1֌0W޾uYX; C/+ȁtȱkZsgPvOH ajr6h{pocL4endstream endobj 214 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 4832 >> stream xmW TSgھaZ-VҔ퍃j]R;uQ(*T1lbؗ KY$¢P.uikjVkmkP8|֙!{<>/x<ް5A"f̚5TX׫Gjר|D#^(5Dyxi)9B3S-Jč[,ˎO_RԴřY9XY8$.>!4qUa[$)3ޙ9kwR? &P* >ޢ"E:* R j 5ZJCS3elj9B|)>2%^(!5zܖb(9uAy<Wueޙv0oa%Þe#hxt>C#KFiKF-m/F,Nz)og@ r' Ֆ͕]NN/\bB+;E큰|>,'*UҐ&O.C㰒M**Db!6 `oyo}k7{]+E+k b14b߹{ܸl +~ԭu Fx4=ir딺%9 `=OCK[H:Yp"YG'"?]X< 3Nt FYI R$c[PQ\m# kv1 /8y`{ "K&e|87hmqr6U|MiP"VC EJFV1fE 4MjH=g,_:~6`P'G6W@شYzAWLE3ptvvBuihewuɢ'u w=Nt }:y{_At@)k"e7r7* oΉ"PB,ȕW NQ=%"~4YW.E({gmCFE),T/8hJUfs\_nsi%y Vf7 f&W=[Q(=&nA;{m P)aijfKU+g)-D}nwgpHF{o(^ǿ$@}qUL1{e+"g< HΊ8 53ustD ֜u)ыXlV͆#1 rQXOW;0s}Vم-; tbЛ2[(M.]BE"8[x]qwok`kxWZCjWNԵo " OX1} ZbtxŎ3h)@V֦,+JijiQNԆ(ľx_a0Ui&y_&n|י<^R D 3Q2ߖ#-4.ŲSz4w̅V,!vCR^1T++݊ny{CRRfДHl/hlܶ֒$s:V#Bw<8S{l ,̢i,=|'6B]N+tR/DD+ah*x#Emxcf.v]nZVEM Ҕ b%zGgwݎ;6Vm! Iv߇d#u+DѶV z忢[l ک%uu#aL~ 7qETC>8IG {">1_<[,nypfBk'6@Admr[-Xm.6|.9l( D |U+!,mA1ʍHy3?/ ,Y VoO55jD 18bOtҌc$uYP7ۓ뎼O!xd dл60ڱg߉È=$vsI;'IixӒb(+{eMRc?wr='!BAEc)*n;e3ڒwĐ :<uU0~kA*DgTa_a~AּBK9^>i-}ZlsTѓ"lYw>]=oz^zozZa󳪧h^{k FArBX=aNI5OHF@-d~Mc]_3d Q[2i" \ BNQa }`3 7.PTĨ^AGaWXTԨuMg@&q(KO8h?DC,OL؎ޖ޴Ż\w`$ >r]=K($,:dULYq%"l=v(=IR0frĵqUHtZQgnGUқ2RK+3?V!>~==&'jjK9]x;8qb8 ~qNv5ry2px_5co.yK@_9GaxpPtwue%%Nx;=+?FQ),dԭ6Z諆K$XR"['[[P9"&Tjij$l|(ʆ휽]ZԗQXBQTd?D>qD4ǿ;2]dtΊNԁ:Jr%l$aw%M= Ν[Lx|_4t;+A(lUJ9g̹q$.YRTqZ)epjz/"o kǿ $XM<̼Cm6D`zSDLdBز)1Ae2zm=0ϩm# #?}2rsZ_ ,_ǜ.oR!H%elJ-'$m`#Stj`מ#fEy/<o홰.hs<(GQОURP\i֛LcЉ;Me&Yf @'X?d$0v/I39CuDťqZF!aDmV7 jjjkS5ݬOq+L*G3^88{raO}N@endstream endobj 215 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 7460 >> stream xytSWuh,J#jBz0:۲l˖"+ۖ\%Knr4B'z 1 aB)3ߕM2efMֿʻ>{Eu/$1cDGH wgWD^1Wu>fE1訨0##=S,,YI7)a2(t!CϮGTT?\,!:^'jj 2S+,fO KFRN)(mj|~qVe2N2 !RCl  ;}\~9 $:Rv~k $Y sP.uC7Q,K68@n*Ϡ7Ᾰp;> ?\y[r.NN*o@QLczNcZ 7?ă癓Ųv1$/4O CRp ~*5]-HCN:ذ @ez xYvh4j͚HKo1gxEdZtddv \9Otbe8M6wwC  =+ow-nx[Ưp$Ҩ0+ἢ Weg&rEѐ!Bw1͏c5[`*qdc/ 6L?͑YT*AĒ E**E$劍4\ wԋs ^;W4&W8AN28'.;C,4+ i'3|^xlVh1f i Hr j tCUepW+X"Lc"0AChW:,v[K-c腒ؐ [d܎ߛq<ͥBd3$wݘ'q{-D[- 9M*Y}"䤇$46Z-2!E%PTU$9mД[x'`t #{+`'t<)EZjjʨYu|mVӠMCj,tv X׃q -/mͳgDZ$< ϷxH}u*,Hc[1Gߜ-W˭r*2^p֚es vSA?;'P7BKјH%I%׸/fpi"^109H P|*e2S*v%L__X%N-bu!ݦPȇ+ϭRݶ7uo*2YVp:~<,I,ֻTdӲsjXX.o+`t a~䐒)*oEڡ"{(TqkNe"*߷B=+׉ QA!ϰF)}(-Y DѝB?ZR`ȌOڧXG1pUTB54MЬj]a|ACQZe.4*E O`k\;> jo\m'4_MMH0>kVz|g󩻚ϧg\&&Ȍ@oO/儙Q3) o JBZxY&I̠3rXL!sW"M0۵2u\d'm^oS.}* E.NܩlŸERms6C`"e=v%Rtp Ͽٔvq$RVd^C oW"g^iy!?1MV(]!_8~:5 pX>*ϵve sQGI]x\lSZ_h:`E{u:i,f=NZHě6^?a*$9~s{CAXA5V Zq)df%{:"I{)Gw5sz| H`ÏWW8k ޕrrOPWkUYIkYwQye&/Q'ۥ"2q-xg~}Wm6.p\ņՠ<#HQ_z9Nb͆?OI~Z?}< ࿇kbdMGCvsJΒ_p߱8 }xlNϵuXbYiP1 k+ˬKo<ߧ->^o?ejs #Ղ.CbHķ9JG.!(J4n聥48!Ł;.+t.&W̃tVu~pup3_aFk8g?qSOC7N}ui-"Z_P kU}s7\ZV՘,NOٸX77ՠ%> c8􏚏q(QUoX#iuV0[KX!=f5&,gm12}XE'|R+Z@]umvh\fNrSX^Q-3`0 eƥ_ެtWIo,IbmK7RfCwsLsCǘߪ'O"H3%7:<۬vԟȪMccHߡ(JLS4eְ}\֢%-i`_?^e{W{pnRR3OV=%Ft̏vZg[}C8nvhsL4W& ̶dQO % 0PfTau//Ư KFa/uW+(2WzH: @c{VƤeɅIXėoSlwXVwd3ۡ{Хb=N#/2a=眳j5ݔ/#%Lk+$[7\"9sqYp\(IuLw?Ye$g>pڄ%gx^hQSĩZlChA40au[]En%9}ًqgZe"ZvYp-<Jwv\$w$3iM:$wi8Pej2 [T;{3e,F@ XO U![U3ET&T‘#TTu rR}S_G[)Y{Et (G!hQ0TW 1\_>5;Y!25~"1v~#b^h4/7`%GpiMlj/u]~ïU2ݷ3ey` n`±G"=A=fskN]ŗٵfZ0OcG✎ڒQ?17b @f[;p)MZύCy 1:{ 1xr! B~M%RSԋg79H7| 9Y9i:GQfы~ۖJ f+B6Ae0 6=T5/3 x+|8;x`nk=xRqGQ<`fpOLs@Iof܏^*N$޵tҵ) Y1cM g*qSĩT`]v4CO[5]%N;`xg#_0dUgq6@x㭶"N"m̩a |?u[3&Bp;<7ر2`1B\~Fd0f +5h}h?Bm=wCI_9FWxgԔ 0&r麠1_#)6j0vCcP!ڂg4w>Tg32b#!Seqy[a`~NW/n]*l~X.eH35{Ņ;uNȧ#y9Cǹ)S[^y Ɠ!~9ra7:'L>7!z h9ő#!nCoӑ}Mf%#;sc0pJA<.ýp/V"Go; WZdy_Q?ҥv4d!y\yLe9YVbnqޯ_2ŪIDmelYBx"~_>t6&efWʃE\Z4մL=U: tqNzJmn]Y㎀!a**A&"Tu*"<֯# <{ǾX =._#W@ TY^͡՛SYH4ӅeJ$ۑmz܏E<*&poݻtV}Fd$yy:^O-/z2=b6NQ 3sendstream endobj 216 0 obj << /Filter /FlateDecode /Length 4322 >> stream x}ZrdM;pS&tHm&A0R/^s ڲry9n}x&q n7nBuPm/,}xm,Oo4Gqzhqmqf,RS:Ope܌넍Bsvy'mqdjj݇ ` a2{hE kκWbj٭ynTuWsv1wʾLۇonq@Dqs#sCmnwp0*M?yx_ey{OkCۮ"%ntxOUD4 ח058Oc7/:G)c|6o#GbFP ; zHS3[rA,[__[>*6 2M쀛rơ`]ɬ CjZ|y,+ غhymdglHhFlG Sdt!{ǘBNYF'7gNеoEΐ6ft|{  * % 8w}u|ijn$]il AArEX8Ģ,؟B)x/>!eter/؈t DsB *ۀ%d(T\"kT^7@a~<+P/ʛ;.y MGu5y[$lϙXS5Ubv^z(бMXqI;ʲ!ܽw@Ƃ7M;LiZYDMvrm bøKc#zP;3`ȭ$&&ky.a&-2 i/?.)x\%nZ cv-󦄞ˮ@.+~ $KsrAKI5: g@U,s'2KxRcƌ]RYblJoqbBdi& vK 5\!Z*0c TVP [Pvm) Cz<Sd4MTtYWkg#mkja@-̝M|$&w*-y549zB >9rd\: T @(ʼEfyoNR8}@]r^띚E7Xl4$%&b .n#fb9u w(V:0:=g4pX;aϑ$ iձ36y6]CA$a^f3]}.]ж>E(8atۙk;wD pkD!y"#Ҵ4#q@O0r$$#_ͭ=mL'(`-hǝqk|A0t*ԈAs0Hݳi=]bؐM9 u6Լ|L8IK ǖ9^[HBAA>@ʑ=1!+7/"B}uQ2q񞶙%+}#MKپK/q[w6CUA juX8Z(|-" 41Iy\F3QtB6$1pAi{R;?g;qD}:UidjNaJ|={L]X`kNn?VwNɔ ;}[`'7! 6odޘ:|q!D2(rF@@s`^u;p0+9h i6; u12{B>XmUN/2`'fx;Iœn"co*@8o?cX$n#O6BJ*/2zp KRlkr)ccٍ %'?%q}lXeUE-3PEz|*_$vPN,8~n,>h\ veUpgPKn<225'W((4M{LEv9i)Ig&"s'\@> stream xYtTeڞapQ ͬroTP@QDwj =3$^{LI2!zwźe7囐]]w3s}}ާ_ѣx|>_xgK~gNp. jS&/Z8iE(O*wMZs;˿ uG[y_R=ťei9<أ3ΞXŹ9Y%+$Y(}]iNa:}IقddW>PZ3eUҟ.-*M>ÿI%+KwVUegړUVUVPXY_deKʟ,>\|Euή5k ޳h>÷mּѼ^~ɨ蹣1i Ə]47io{C4)-;wtL(腉0qW_K{*͓X.~ס;šS}ЁuOh9+||9Ǭ:KF- p'Mq W1fJZq PPh m]`&j%TW 12ML Mtn@$W AvGYhu`W;mGr磻N%le+|G3n3q/ӰAj+.TE& ȉM9.6*Md#Y@諡3mv7PYg|4 YPUTW([ &B2|VGAF{{jtErJ_iRp`o4C9ݝ:1t ;ChzrSM4{}/vGm(j3ɺW̪€+VM[JsKxpzޮI, d `7>\%{ΚbjjB7s1j`Dܯkkzժ]9Tp0DHxGh;,)B3_K;}6mnq{ň| j0herqP11˞}1-^)#^]}=&@y3[QXd|[|z_Ji iXtXōCī釋EVAM05@p|ZvWk"Cy5L9jzMԨ:6vA'ޥ : +2Uh;x[$Z%-8Vofe.1ҎY͟<.^lULF1 4cpƶWɸ N=1y߼{{;[\*`Ln7e}@?h *@EhVTz n%L[\P6j;" NQ=iԶWs0$uYL13X"}9.`ؙ"{'_chr ddMOQ&IIrQm|_Cw/=DWb>>N ${tn_uA 2ʁ.ǥT oCcQz@p .t|2TJH[g2P #:T? S(Ř:5mIEǁ) ]<:Πڅ+uZX zJ{+GRX!p7h c5'jK1nlBےah-xѴDMҳWp p1g498:?GINcvPEȃDKkc̅YuX.D(+tH?^tp|`&|1&v1/ P!;U0+<=fU:>.b[x“ߜGSIf,Z93*`H#}̈yzmŒ6B`*ރ,fdL9#džT7\ 4fjgN. #~ J`ܝC=?D;)Dv,?!iPWzY;HR痯籺Xُn$򊀭ADfހ6,ժQk2+Rpx?MA3gpU̓NƯ;DV[0Uk+ܭ,BpOԽtw4g^'A" ]{&UF 3*J}<u 5>p#!H փ$$UPi}JC#bf h04`MX4BQ1ZuXT#4b?tcr84Ybs>Yn D\`_&*:GqDVY al(=dP LerJ|4 :њ-`NPMIE}RCiK7l&=lDw[,\!Qbo}K_&pKF`.LϰxzFTV:b­7*5tVZ+IU@o +RbvMdz'5pOvz~ /ٵeZe [*<{p+x8Cpz kG:v@}b׍p0 XlZn$$1@gP֩pBMCX^*Rei>cOv?ܞ\4 IC.j$%i]јx҈31@ZLq HU(^LkD#P~^N{vX[[lnܺ`CߑDz7< pZ/_Y5'r +*^IKlqH}Ӳ="Ϝ׶`CnL%Z#%]y#%ec 3>iA<~j ! Du Z=`Ja~Еɕ e"FKPaE&`wܛMkCM2Y8w'1=e%>9W]t(`iV$bV'8'ڤ6H7s[t%`hiSDfS~B"w7G"rz99e;v{ZSCކr|#F6Ck ǽ ~H[^vQ.-+ Y=a]$¦pz#M'g0vzGj9yDᠩmXbPפ18u,2RvQkc)&WKK,)j0{Dug  #]g҇E@TVF-jhK쟤Ƒ14p3庀 'O10E#B߅ແӞ}MFKJf*DETmim;#u=Ut~Ww3򊰔*zw[˜wžC0{JێT(Z0:8GVt GG?ez\AiH%sIdF4+[+UgmBIeۊap|ݔ`"^-Tk44:u:^o&%K3*`Զi?a4@sFBn'9棔hƁG==,YvNգyD'_xE\}\p=-?,nMt:Bܢ:îl pwO"\Hʗ?`96oa 54 ͫ]v'UY< QY.˜!lF2O7AoRy|Þ.C*ؘW[?ޖp=ϸ(kرBp._FHE {/]ѢLR4$ZTRcٹ@܉&w;JEy#pꊀO"綷ܿ]87h)Дk%n^nj/ph>F18]A+_r T"jmf6@R$R1ܸ@as)V0\_֩*Gfݷ9&x l'V FajN.!:=Qf7v Û7?S}yzRvԠ#tv7r{0luTS4\h]TR\UE:ښHnU=!s3T٪G]8$ELRO} B{#Ljdf^u90Y B_yOgh)9tܔ5eՆg(@lLZu&:!} #ƫ3U,ۊmܐp63V-j=8 )v:#IcgPWeU;-[++jwǯ:#^yF;߲N?S} *n\fAS՟۴8"9wiZnytll`'Ao>ݵ[Z RlbEqA;Tt^ 0s}b"%ujQF?6ʰ+4HX*dqR.n {Ydw|Nwߙn[5S2ґFB(ANSAxШd\b(/-u~-3CKS(2FD2yyxĮR*WqEi٬א~5c`_A5T*ygn< FWfc. ؜:ɺYqӆAldKZ #MEmDQ1цl)v`9|9J4uIgIYnHvEj}^=ҴS=aP(\E §F0ȡK\BkR@):gd_҂agjh/srh!>+ZanǞ\b0$NB2:}%^Y(+/- >,lz6O'VX"-(r\\nj}Y|/]rtpu8⫏U݅u45}"UN푯ˈmuÈ,f(bVګ`[g IMÃG"HGWT,Lɪ[<G ](M!I ogKb؆۟O E :.ݭ‡z] cjcSD^ibr*!7G9m^YJEw[Ke .$[}it#'+T=K޺i6 R bχJ\oLMoL4ظ q< endstream endobj 218 0 obj << /Filter /FlateDecode /Length 4981 >> stream x[Yqvq  ZCuWBkڲB{:|Ķ!MizQ=+1CUYYU_~YiVbVr7Z7ͧ7o3k 3a|nYrvl[zk*ou5@51VBit~t*  r%PbU~"/ Z (3_ՠaϟ /dAdUjr<6Uja;6*5}%">s;BV<LV56C/a%8ql†=.džN @g4]4y&'dȚ4(6j vOfú&4/q0Pmդ#JrJ,O䍠dG5&dm5w2W:ۤ9~$|VA\{7^deӰ a0>F(e|=%.hgeE+.:4":yiwfzrv긤}S=GwpN/Ia]"|xgf9%wXem{`:mfX(d@Ԧq-+5ƕvk3[1Tp?har[0}jE*K~X6Uo}}u$(cfD"A"F‰[2h?l -4s |7GT9,_(h>(IK2;ր*?M>J26# ĞM lwh[)}q¯bGw`eYp;'+Ws@{J1!6{?;=xvalLHE@ҙ]ncÊw[Щ=hBObSWĶ[꒸B X-[tqF5 6``_ yt[5mjޭ9(5'u[@8^"&-N-ú>wtsH}(%91Ɓ$ D0?`Wʒ@ Жu =@qn=O< \AIYI~g4̙$S|pna(68H6O)C$@f׃h| X{PuÀ1Dq;"U|rẅ́ X!i'6پVQ0׻M-P1 BnX-yݰp_,P c(BV=Tb_FZt]?~`@U ^LAs-fb蓏NMP5b̏:8O(-#vx|ˎ;KFT(>1-sr$BoatРО$d%Z0V3p!*\ };\!T@4}m"l$f?lU{$.¹6DoO'CxED,ȎZa5RFႧj=?!O0aN)рBh3%|Y8uJ^R?bGT O (Ӗ-CU6}yYԑc:H~n`/(FDŽ4DYnʖnY{ Kw'lc,a&wU*6?38KǛfuAr~cOh9C3z"CP ьaBչ$Q8!B~GDb%J<$p澰ݙX"eնMNa6ãK<ӓaGcCbȐBhZ`]#uճ cLM/%v-}8b<Dv6>Rxu'K9 N:ٱ ` BC$PI38I29C fP%Xٛ%ED+R_yr8۱.ILklPjcGs®%`=d2͹; R; 9Al'c=<ی/̄E g$RSxA` |Ojg$ >dOymb_y3mA@k`񒯐\OSW=:n rLF15P7SSens2ޗ8=3H,6603"xH,\X)lOZ >iإ`ؑrNx⼵V5~QvxZOԏVI?@XS["Nsc_ V\f5;0Eb b/:7'mG\;<BjX!\/tVk?q!sBag2[t'sAQ## @źDc^V -伴޺*!饋\ئ^JǩHbn6#n )w="k"'WoP)(12÷t2:(m,'@4L3LAcz3*Y hFamh/~UK̩30Z43&DIhtY@Fe5,$a ?ʰ@o @uexvc/HJVdm!_\P ..ҽ&Xi^\p*^NWyK!W%kP ,Y>LDѮFaTyy~&M;t@.3 J =T^pZTC=)łHųhNZC_ѹMI_&a).D ߡ11:F_D.т_:RwHy)`ЯY.e5ֹQ_]`T wE)C>:} ;"~:McKօn߿FfH D|F!8 50t~?pS/ҋz|LMTjH&夀~nZK : jte7Ċz6^iFd a8Z`mT)tb! CK!F"Yyұ}v]z-՗E}4uT]џNa_ջT{*E0T;S8|Mt VmbX_ay@6ZiL."pʪLϸ 1\Tp`ΝP@WFםU^ ='#L_:50&-٘JAgg`9Zt:/kQj +mEvtr⒉]@ҕvgp7'Sv1Y;YBDS#-0}wR3` iTOTYASX[]R)вEBq"S5pj_9<԰v ivt˻Db](/hd=Sٹ`e ӝ_(N31\i }IW΁nRrSVs?KWRTc5s|%o-*0̊./XAٗ sE)!#fQC^]MPUԴ^wF@Ȉh}:9۾>J0q^}6ξGOŻ&:yY2 @^NLan+o*endstream endobj 219 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1895 >> stream xU{PSW%$xdRPή(ZE*VA"Py0&(b"Ј R[ݵ:]w8^Ι9sΙ{~4F4-LM\:Hfܬ0nsbV8zkGD,hl(yд̜)yqK+%&.UP+Ey%\}R7*:M^R_]Vח&/\X^^%h)q"}"SSj|ED8WTL01jԥRH+%EEkJMUbҢqdSTH}@QR*R3jJFFEP( BIyT8U@[KN& EE.W5k1ws}t 88mE1 2LyQ(}dd!ק/ 3S_8QQt(! ra;ndg]";5MnUx $B%(ɫ;eϿ'aO8 ~&n4lSʥ_^XP%Ҹky8 d*񐣽$ג*3 #~p ,fh/ L@\b-PB9ycͶʡlٵzmhZMX(meA&jWp3vl*v'6#0*q zz&uZ@ 8v8oLo7vZgކ [_ P; n4 |**'۔[![6P5>=8nRVLkiY RL9']s\y+kܓA\5܍^Lye&Vx}^<ɴ_H~zX[cm[dMK|~/ی w0 sT-d[x oFN2P5/]}C k*sپ#Wc.(z]SW.6]UU63?ĩȍ{e뎬xQh5gf<]0_IAb=93%Cx +\3 W;Cp>m1ƣ}1tUƫ,\^rZmYPfIw#в?ܬmX5L(9c.íю; 1Xޞ@:,0ъDs4@c >C+K_Tn p0%nݙ +Vw?BKG'^ѓG,QmXV]Q]6G͐4у{\G3PoE剉Cf7zT+w˼W= ̣#iRl 'gf3_?T4A),F'R&~W%^{t>L[3_=Jm[-%#3Q'FGiό+XW;tz P%TTe˹gd+O vȟU6\|A?({V&bD>~ Џ[͞='e,{w&S٦7Lf"%HԿkiW> stream xkpWmj`"dW %:L ))P8`'$rl,yWڳ[^=,Y" B0LJ k:]9~̽;;swW4-CT*U'xWjSI.VA,G7ܒ_@Zj^&n7 jjht{v[ iw瞵=; ncU3k k)h60(ޣ,b˳YN`*hLaY"+c8N}}6}?pSp$p+ 3Jɸ-4p';8X;iݾ =( #jsyG/0v$6 T"Idžp/Bl4%p,eY)h끴{{(f,4ϕ*P2(8Sl+=h&ϺJffyqڧ/k! > "4%"ā\O pjLTQ:WU zQ'x~p-|=3σ%_xPAAm4\X?Fu@ߘc=tR"XHSǗҫ#Z{}7+6yo.׏ '@9}lbP8 ^3^ 6ݨ.Z" 9!rxq{T[Vz~͚~+25#틹ܡ\cD@xX?%]KF27h:,=A/>!$xK| Q:J 2Z\)1=c0^CH-߲X Z}`la2r#L%@㍃p s$@Zc2mo~C|TV'>$ y=W#Oi<yF0NM`w,<{WD9r,)ϰN@ $יq Ő )/[ܬ$!gX 8=c!Teb%`?D}AB+p5ZbKQgc0 yBJ _Ľ-^):Pc\w*QwZzI[q欃Cg>YĶhT'#I>ytn;:[ B$`-Y'LzXhǾm:J1Q$!KO*sE ~(I,)w2sLN&kݽZdž;M,F8`lFQ;և~[\[5sqKE v,7?&9H~TX^cӿ{ {Z "_N6QWپ Շ+04HrXU|9?v;e@ o7(?4 ld*&jfNZ@2vOS(hL@c1E( !|j-ULUA5螆:= %6fmAhWbK.J?{j-s1B)s1>Fq|rqJt[0j.?>N ߺa/DŘG'̊K"IW 4=l"ޘ \y8^ɖr'fϜz9S"&<&F{aH-۟<녾.Th]:z$/u{J,`ՓcG*к)sn@D*%"iHiRZJ ) $kZQM|4#ĔJk8zTf)%ѥ4f"V>bi2J_ǚ5%ic*K-5]v]ӃkgV'H Zendstream endobj 221 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1195 >> stream x{LSW-W|4ʦ7Fe&snPT t>* mo RQ"ĂLIans$11Qu겋-Yq~H*AA'.3"  , &Ľ;cMġ9 rA 8s*" Bg,7p<m3eiMYz6Igjuj^imYZ {;j9`Μyl~ɦjZSV&io,H߼I< ա*ʰ@j+oT@\y,45y6endstream endobj 222 0 obj << /Filter /FlateDecode /Length 190 >> stream x]M gѰ.4FaQ X^~y3h0:Ju:?B:l@UʩH3e=TI.\`-WxRanF3&zcA60,"!UleQtFމ"mM,`-YylٗG֒[ߏ>dM"/F_endstream endobj 223 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 680 >> stream x5KSaw9lN&t;] sF%y^(Acmg;ͩggm %@( ns<:]|I əgY nУD҂x,>nG{&sZQ"MӞ8~g:z;~Tz1ʻD;lv##7fqMCLZX;鶰"f(dv&1QN_?wvbdH 9OSIt^(w%ib'iɷ)0v0o7 ̩×5xA7AluApO xk %NnF=?0hOfao SC.A HA)p~#Az."I>6ey!'ysP1Bɴ!f> stream x]n <odȗCj 0U%}mg>K`w8˲IKInE3]b<شǴk;BdÕ{]{S\jB)g4Tҿ-K;Y=yqI hD5&TXg#d1{e3*q50(È k FAꙌ֨l%1-~#k.˘x+endstream endobj 225 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1467 >> stream x{LSW^"v&1"Aԩ|,ёIA-pbZZ*E J`U$8XLsٖ%seF"r .osrN~|DrA$xw¹Zv1f'9*i. $V1)ǹ A)F .gQۇΈ'#5v|Q n$Nł1Q7JWT>4_ኧH.'؄x8u_8V+'jܥE[콇DK[*6̒x2<ہES܉GQN8;g";܍ج`Ӊ;8n> _> Ctnc8qSބjZhE ~.?ks}/~H;o\M3=3f 4:mh8-k>vendstream endobj 226 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 513 >> stream xcd`ab`ddd v541~H3a!3#,nn?' ~-Ș_Z_PYQ`hii`d``ZXX('gT*hdX뗗%i(gd(((%*@\\KsR~a`ad3{\e݋u}\}sپkZ8eZE;uRS|䨳ū[FdÍ߹9VK9ʪyݓLh\]U9{e`Tu7O#=btoz {˖w9j^*C׊.^s֌\Es*~}NtS8~wO^<;[]uwyw ǟݍ+9>_-:grzggvςC|Oylo-b>s e`Y'endstream endobj 227 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 8187 >> stream xzxTe !,s] ,H^Ho'{O&Nァ@ K M@& % .~}~ rM\WyϼY>xѣ3LJ?e?/{{rƴ{{}{'}qY>4\$$z}z]eΪ XY"2G3˔.}tЪL@Ebٌ¿ ;xZ ZE7]YR(N>| "|'PJۘۙęzIAB\8}g\ ;KM`ǵ|; . NJg;=\EZ~@YN2+qWfl;ճ4/u?dipLTxZ D !E :+&hXMVЁ~W̵70Mff:T*̋)<5[:Zdl׀MUvHR N{6mӱc/{[ʼ wBaD$>AFz&_{P Q vb,q-53/a * $|>gXkH*0ٺ+gMn=M'.< ȏNLu#D?#ePK>,!? R#6+jw0,m7xHÁlf}ht =`0ɥKة7.M r!n%U*yU_t"&ݾ$uՅʨ_26=6΄}o+5f9> Ȼ0{؟Yj參m|)ȇ/x\.1 )g}ȮON܉hBYp!r"`vl[33E%ꪞ;R9_gfK^lFKJSe\rR}G\mIЫD }+ce ZBiFP}|wBwwH?yI Y6A Ck`j@u.) V[nFYIi5<\p po8fZD{G  ]]CpZ`GSn ,0OڏteBy­[.@|F/sM B 3bdo2#SEo KN<|NZ[ͭ=3!ţ X<;]+Dw6^JbR qXD|m- *"ÌGCRA6CIq (OpF(LEP+AE4,pGp)V]; c CD398˪Cl]ȓ(AB$J@@yJRqUzD3s3<)-ݓwO5iNYmW5u Y|tO2<#Zo1| et{(1UM5 b1BYj1G5궗; O4aLYZXfxH 7eK>r `?fp84~f4"VF&pܮۧn ﶴe UZ̗HN>y|~T$a*"DB]5q{wioK]|(ʳ (k\՗K C;ZBM~i7fQX؄ ثCܣG"mm*ԧYe g}VVEt. ¬B} ?g'Wb4QT5SOe Xl9 .2B UeYmLUe k8vfqP'%@* BtDdFyX髦`VkZmC@IPͿ:dl7üͰ@sTrifu@ESc= _}S9ra o?13L!|l,sJt;mw}rCD `*%`:zVE/}3g&d"!f-,ʌ"_kt>1Ve 3a-`ak!вڴ; x&UL3:?h϶列d1Cc)x7R~PrdQivVn@~gI%֠v"EB&Zs( 4 XQ95±玪W:LjHhP#AF5<1}͌?04zQI:|[(5rÆUKX>| 96vG}vva%Ҟ;XZoDuۉ/3*WVE$- Jkk_s KG ,F3;ϦLtm8:]nԫk=&ivKbʸgp95d=TvEzЋp;ꮂK fy(F#LǍ]G1w)H'j`hD!۶=B?<}z+{bq$C^pe5 @w3m݇%GBmVNT3%^fEvaִz 㼆Iax\d#rA?9bfL1%lXrH!/i6LS@,6ɯv#>NEnEݕ9Qg`4QP$.iľWfZuwI\5I"  z pIn-hB .lX?}oN]rme'O?jŅ-w"{s󃔽 2knֶl#5#Hf_~S {ӗ_aq. UDvp$3,Dǁ3tQBNdr%!"b1@nQcfnH:bw\I# "dUM`R$du5AM# !C23BhXLR2'EˣnBy1O2SˉRM^_~3,xv@[/KSuQ1ocD\ɗ:%Nx 'j韶{>L%Hp枧ts|-|"TC-چ E.ꭀf}-o1]:&:DKYW]{k)!,_E~o8*o?'.(N,Y/R}5 JCEWgrZIl,?Hd 5<+9xl^!b6S7ZhHh6s0ezP#ü0~7p7o[@B0eF{+\gw=q Jb`hxFδA 4h9 ,efiĽ$bnp|k}]iuZ #uB|n >gDNYsژmg7F+!h5 a'<V]F8F— nrBzq?qKh  D2s# rh^K@M2c 07S{:swC _meH8o-F.Mrj5_F$gnDIFj5Z |7,ri\;WO;*oVqMXbՉ) ҕfx{WN\<{ı[(T$z_cr^er͍HOnj?v~ 6mjfզ\<"SK[-t;ׄ"m2ɆP {s4 Q̫_^Oq)7hpp<1p'oH8Hٮu9Np(CTye} 8O.(Ï]7$3EVR~P?٭=v$;\--^--Q֢~u$#X_I-1mʺ9rVPHc{r:QrH#ȋԖʫk)= KG:$8w;7 L^fāYxTSgQ_̅7dyDB.xDen]'x gGDGQe7b[ HIQũ¯򘯈N8:3>Ef;Cc?p,'2҆-Fӂ\VmTr4 V= P͍tEhQVNݾ¥-:@/WQ%jE':d_3ĦJG TΉ0ܝ 7l׷7P}@SȞFhΔbJ3++oH&?3΅90k&Bm[W]:[4sֲ7NEe&Ķ`3ONP=;~ hkj*;&1OakFt0 :^ωGHC,;:;}|D\FB_P+SGn.ޮ蛿]vQ̍` `f|_4O0&}wYM4ۋ1uȻQ?bpADnjG֥+W/mCeƔ5Es)GzDV鳩%JC̟ZbyQ,nO׾bʵ/{y BIZdj X)FFctGMurtrhCbJ }LDNh{W AAAzK 3OxqS?@21=&غ1~kI.,6$^Zȯp{H_ b0C2 3 SN48:TQlVZiV*͙i>ƻ?}Qp)sb{43Mid1g1?AO{#-G4;]Dq(DThPiTe ƾzطj Ty,W7^ҹ4iC@0ƷK,ݳl@$<(ԽGWүe.6#RxD$.ᬨJeTjƠ5upT%T8G΄__ p=h oG([l7v A8""F7""Eb~>yO\8N'CRX=R9:y; `~_ F\_M["n4RUD 5t~S2գBփK<ɂ %8RSғQ !cM<3(2Wq>+;K#Q Ȁԣh!6] `1O!SIHK|`(q(Ͽ cDROXzAHaAr* ň+U᪀)s3g@Ԡ1ĕE )K*bZЛ$I>h-ϟ̞S!jhY,} @B&OAz+#i"Zwv7 E d*JR%N&e~P,XfKy[bυpR*˛;%kv ?6!fBUR(fCjsՇ#})"NXV/C&F아 䵫n8bKmU{m6J/e7'1? ID%ejhwbϛ M\f:2IUgv'u"CӐ Ȗ5R #5p7BHhƍiFM.sILNpn[xz,=o /endstream endobj 228 0 obj << /Filter /FlateDecode /Length 2517 >> stream xXK{/ćidޏrbM2Aa,M-'>3Ab1S]]]=TJr+Dle"' Ͻ֎_-'*xtxbvx'8vs5FyR"OJÚlx'˾52 3_Vx5蔫nn'}:FH;kjӮWunZ"1ʰLyDjT5xln7? ;YU 턿D)]~Ż*ٯSVHX[fD+J,=Q[t8VWGaRJpiQǮKKf y΀Pη}ݍP.tH|&R9Ǖ?!X!8 XOy.:,6f[͆^H)k.5Eqz,_ >Y S`?vD(^hpbvf΁қEicvu7D*#-UTXJj`u  1'fDt*&BUm;!/%" e (=(S%uiYGNGVSB*<[ߦh?{$u 7C1IH]zy$BG "fLLHKA^p,7w}eE}xb-ʽ>KF ;t-%KZG4B뇺$6ΐ'zMT#X,8htrP5駢 م3j}P%Wh͚~s9ɶ#kڴGZXE-yDs79vA}EywhWM-v/;_~](:DXP'h1`NKX̀!V?dJyZIO/59Ż~Oҗv__]=oQyll(+P,^we"= 켽kVBh4.o91X ڝ&"&bI2j̐ Z D@Ӕ6a34C=,:JUu3RDsM )&/;P8-|[(Tu!6bx vDhVKzxGVfߘsɢy`!~Ye,h^p6ᩍWR=%Xj+E fR6P) '9C\Dx TAr<k6f =3h@GndWXh%6RžR 䊢Mw#gU}3;+BF.wd,X7DkStf b4`ʉ(NvM Ӻ,c`s wYJ,܎"{醈.1:Od, P$jP^/QYUu`# \l;i XX3 Z7ZkJ",^?̓5NvF9^_}C tGTIρd~23LP)rs?%Mucn'uv1:o:ӕTdV`/t@zQ 0!b$ &mcFobd݂>/iK9ʀtt^էw0RڻǾ=A<::]7|SW4ignu ܕ']C]J=$<ͤS Qw;DiK[ 2{-∶YͶkjMb-cp˫L#5ʖB䚧#i:rwVOh}S6#naΈٲ<ƌq.)&bgM岮M2r8|Ww|yʳ1s ^ endstream endobj 229 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1867 >> stream xm{PTpGp! K'6L`HdQIaa\vY,XBݕGsy:jP['&El͔mZڜ?Z{;qss{D#CYusw.̪зjqKbX޹ 5pjM$JШ3"UtPU/,WLT~oaU骭YjM|^2hjf0?aLcBf+Ƅ3;Hf a#Q[HCbocG[vvoPϻOF+/93vKpK#p"JV%$͚h D|!4#{1 q}ᅷ%|y(OOmo7~3S׻ʩ'N+|Zc>rܑޠhӻ Yz#N!NOKv/;n *~-kN [PKI !㧎v꿲+vŎ=^*ZN^S4M)5'S :TZ5ܬM5)7Z+e+dt%SqCWŶjFWm.nKw^mOSw#\nv^\R># u*i4wuB/"?Id%r?Sg?HO k Z/c?7WG00 @qv[ڸl `64qn'E?'FihHX{28Nw% yU]{_8\+Ur:N+uKHy)VVr&b57KIO{%#$gQtoT:zo? -6t(oQoy\vBU#!,:%eyܣWKKؤVD; ղwȅ40 ;oU P5UHl0f.z 3Ix t`BBWG>F@A`6g酢"{bP/Еhf(puKR9=2Y4K.㲱UjIѓt+׫TkuUwF5j* u\(hpdD~Bv;Vٸ3)Ƣb@EvI~F\T@!b-g0OB* 1"җ1.wD jqItSBA4HDCZEePppg|s&8]lFit/R G?,|ͣgyytki~Y5?W)ʘ=9u7gd7Y"`TsVIsO6u74}←endstream endobj 230 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 4919 >> stream xX TW"ݯ#1gVkmn Z* bB @Xf% [T}ڱV9u889}7@y_wǡF8S+֭д# NwE>C_E8fIQH24ELT,ML_#ޑ,ɜN,J?RLqֲ9Ic .Lڱ6y0e]jlz(ԯ T,"GͤSjZJͦ6S˨j.ZEFGNPkqFQ3Hs׈F܍E<i=df2'5*}GјآߎK7aڄ{QUQ7'~Q~F!"f e`hC>'D Ȋj[]~^D[er-.{,Eo6@=͔) 86DE  Cֻ.Bq Qؘ 1G[,k&L D<4]Eq?^ EK./ O>ZVVG,I9!* JBߟhtl :+ŢL[Mm͒a \ýmf!k)u{ pv|u< ~bQ2< (q?Nx~a7,>_ؒ&`)&)^P߁)Px} G 9 w'sw^MdIHrpc @OCdZ@cF|4v#<8D4@07Hj+:rXbG1gMzƖ{D.s~I!Nhwm'/ A0Cժ'E9ԙuPin\fVN〻0 b/@3{{^^47Zs̹d\S4tgB-Y(1wy.+U S!2g,3; 1y Xo{>,+ ma| q[|Ҳ2]mzDf@2)].7C@>TJAf&Ŕ<-,~ OT)5a6D jMbT*n 琖n7,*a52Ͷ.d:i*"h`W\̽50Ga*LJrVgҲAKkMuGcG` ^eW\S_7aGqqr 4[T FGg ; TX}r0BF&x2{Uz;&/on{':eka k!Ooq,U. rBKyjFw /ً}ګM='  slB-+trV2̕zNiTʌ.SX|!a(N- "޹5s%8m9~.`=۫>_ne,ei֥{EuSDx6e8vmGG3:~ B<6 m&yϊӭ!q3hV@INAHsSv3f:6&@@2[iQ04OHy1&f55}*ABnVM΂A.cȼDl괷y&tG(*'3=s\|xg#%%ybFePZ97F"JGhڥv?;BCtxu<9a.2cU*Z.UɪYљs2O-H:B9}w"BbRa7tU7'$<' b|"j6`SKEuilҜb@uSFy9Y macK]0;hJ 'Qq,N=k5#4]ٵ! t~6xy^.ZF"r,~ @NS7eoaqec1P3M 1 .i3@Y 5 ޻Ǔkn fBl(C><¢&E.6t \8m+b=aBE){ ,CGn'|pv1 EkwT!jW8g3i4v/XlV LNMCeHgؒ2F: 4'jO\}-@zckL"sO"4Id@ oRi0Nʹ< Iw cn.uj0VvM~kKUSX_+r|S>g'\mEVw~ԕ/v} ѡx&:y7N&&RZJO^ISV~CF^=#e;Lvԓ'>>ܺxuꦘLqyC`ld!+% o>!6k D$uC `RҜ?r\ɐ1? ~*hPu E6t/9IxHI< 2ibG,h8 ۷/fe w?\{ 7})OR(+e)!\_Tm_*pY.6܇vk;r١>2Ĺ^!hr ҷa t}Eȏ:^(b! NN pow6pl 'Ɔh} A ^1?+k159Q04Â9Giy^QA@qp 'ɸ"Gж˗%\OO 0+oW*ذ%[mX$en4s (,,Ż E`"M ίRv[ dGko|֋xD?_`t˔%FZSHRy .s=OI|exg1g]i5وpZ.b 1vX+v+:]3BWBzk w\F#e^Y뮪*?aYiKUEEuS`ƆLq^$MÄ2~B~Աro [?W-Rt-H'mK`+/+OGgeP D~>nS^PQҢu0~4R,s:--6G6*r[UԼ;O3>y4/6OR%L''(cеZQN=n)jJaЫeBKP\ϣ,E/endstream endobj 231 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 4366 >> stream x]WxWcűʼn`nN /dz6FnՋmY$[,Wȶm ƅ dIYH!쒼+8w&//>߹"Ǝ!',H3GDc"ӢKjtbʜYӗ, aؖiy,2<^CzչJ!_!ωOHQ'dȷeݑ?_IH: ;W:o|BHX4uYlK!yN+UğD$H%" 11xKn 2ZH-o{B/NtrO)acr{pS_Ȉ?uHkbRtaՠ6Ifq $[QIK <UуN[4C6@7QZ.m5">/j %;]]ة d1N/^'Dh"8_k:[{MCѴКȓB9ࣻT~BvA*Ζnef1.J״2Cd{Uuj(]-<$] U%-[h|OƏэkQH-*# !7F= . W.|H|p. V3ppyˡ'^t<*{opqObT 5q G:ujbϢG .E&Tk,P?ӵξރ{(Kk]y`sŚ^?֞(Wh4ţCykA-xv:8kzz. s#ܳI+M Ʉ%otƆSc@-{bca';)TJЀa3=nr h>_dS?tW\U 08)Y >/Bd'®piVsD6|m tijdK Hy^vv/e͵Gu0}v% E!kyEXxAIx%serf.qJWڭv+X!o_A[ooCHxe/s<OFz=ea$p ms<@ޭ\| 5gz>M4hb@u$ECJVRV?eԑ`u?3TTdQaVj?")R>uRKnHθ3<z4<(H`jbNm (g#gCl$ZvqwڭuLW)sZu2P-Q M&2 YFcPkLujfeVKYNLQmK^v4R&|0/(8vT&^,pf8{K40P`ϣM, 15*G +s) d u[s{& 3/,Y\^^[Yp 4e+W*֧aH5(lFEo˳_[]Vd5ڋh;YZiV柎\^S_545zXfR*VV)\ҢJw \m#o{e>QnCGU{48؜A#4f#'Ä VB^=>G8.IM{ON vc3ÿގE{)rck_5ָNeX!tnC,ik e>LKfgCݘw *j2kD`F1Sb~qdcZDEdM`oRJGNSF *~{9âW(𔕵gM ̧ٲqW]UPi.g_F3ؙ,uڝझ+\Wf'{ZQzR;u! ?_Ƽ71OZrCqh3>UJn0 `1YmKW$,rS= 1a)Z.o7;bSAN[#GC@UaIiΓ@$`}У\AB-|lk6ODn'{ajSdRkd^nD6tekZ{[DbKd=՞6ֿՏz~ V[/`Z`EMŗ .bOqd;1w[%Rٞeq :.!*<1*Y(+; ?ܲ了1@훿!Ȕ$;PN DW[0ĉNpM H@$wCuupOCha45'SLtxEdN3yYq8f7}5-z^7@׷߅<,Rߟ(TdoS*sdoލe]>;vo㉶ȤBajxcH$}DuOsQ\,ˁm={>lO:#E>L*Fe@O]~TJE4^(vڻh-єQX6^MϸfnTǪF67;?~GJyqc0u|@1Dt{q>EYBQJWèߍhRg.xO+F/B[18$TE0p}Җu\Vp`ф]It a+4g`k\bݚMnVi(-]K.C36Z O6_Dkb A$5VbpΞp\2usOL&UJ6v[J2ɆfW7kE(ԕ/KCZ֤ᾋR`6f ZqծݏbЋ(FTuB;QY u"#_i4$DqܤFnTj)#'%Y9}]==X Oт+vKG~K*q@E^리&s?U NO1vpea)5U"1lOn:!1ًG9K}[bjVE)3Ӌ~P{n+xP[)olP7+a~FJ nyF^'<8L{ h:Aj7faEWC}ݏNzRTV]JzxwEJȄHyʽb"T壳B48(m-$~r +*a_tmq?JPk{~ǃTg uԞ+H̖ƛRC B(Gcی;tlf=u)=8cC(;S_E ն"-6 i;ZC#l|vD]C_6)AAf(-7D 8Pʞ坳7nHvPHۨ(Fx\i6elH.o}<[bff%5[PbS@X[`Z+[n*?ŚI#^,^.HlS_<|nH||PKx ⁰nke7~7po˺<>~q|?1pqswODuʰ0_2aB ~8Z~tǴCW!ept#:M,ʤQ ~㾬tMesljf ]/; t0 NoSïtɢŰFwzae,X'(*؇ZQy:AR2\cBh{2&?\^EnT-> stream xuW TSY}HmRiBJ-*Z;QbD@ATqeȀ aHܐA L*J"U"Bq(i;tA[Uoqo?^zk$sgsijE`KϷ<IUc<̓>KV t>^?sb X3 #::9ƎVBzP6onm[cZ.:HnGT뫈 Ky"voy1lb wO|V.S!F%&'i@mnIWևx6<m!f\in>Xo#eB$vg/#[l\G+?C]D<#Xv;V=/n[_)V:~Wj=̶v7\iQJ[=UpGZpbO7O[;r/Tl慔<͆+B^x&?O4 ^|5WqDfciAњS7^4Mr,>hgNM%t@F gbYR^HʌAa7Đ s\0tret>nϠ8]쉤Y$+}j$sqR\)OX?>3.!d0,q|Ň >v Yѹ[9nsRQ6k< RT)Oٹ9k;B1;OyDn +HÀl*+1NW15kr~G,)Ř^>;inB/L U{檳4( %kSX(óq_xš풊Fg-l$)I-[Z#Z#*B9;qq2XZЫ#*BŧƦSdUeU4"</Ƴ`R^HƏ-/P9{a |+hc..km>W_;a|g0g U%pA ST)El^,iTvnGi{w /|E_wvN(̆okxM<4ISZ[ !UA"Ft0^1F%?_:ѩKGl8Sq& 2V6~RME͈{:-? )8BR5dova}M7pp*rӔ!6YQC_h4զCQ-:6f.[D٤>Xly0֑c-"];9JNuDɱVK#ձ2ZV(/wxM^mӪ:vKխ[nMda'ľ;8!4GKZ7>M#>ˇ:T?x<ߺQ{Mi&a^f힂q]G*XS;U*]5/ +2q. #evNI4 -v=`&scmǯ4eJ,J'/ъ}sLx:2ן_`瑢Mw&M)r;x`H(+$LHaq.AJ2}>t&9d+ @X}w7CQX^]ϔ@$UR8{d6߯-eRtsfIjx)g.!4-bQɟ^{@>z&t,/%ďl34BLh= n Ӆ5Yl>>>뫬yKZYY7[}:lw^rWßڿHYg+V^AeZ;DXjDX.wM=`;zll~-䝶F eЪGӼ`+X$cP&6-.! `7 Fv+O,wl'7D+;FtD\5HIn^I}<.^HTL]T5x\ jtEG5*K5n1y%Jv)?isQv {"WV ˺ Z Ol, n1r7_дAMgYAťUhežMN܎ nWEg) ffH8{p˕LFlv=9^oy5')mq>ވ.;{:JVKQfNendstream endobj 233 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 814 >> stream xmmLSgǟ\ڰ KtjbddQ*&JPo*KV]ojZC4h|YUCbdK uߖ}8$sG (jdJ6k,XKf%(ÜƝNҴ$--=|zPT֪|s_ζx.嬪6jLB:QͨC[QF}K kt(KJӬދڢ2OEhd?l$,tS'IJOeծQ/kd8w 2v&n&-fx}EN+~1׀1j`tn32sT(j &F  Evzj1Qj.IeFR[{Jn)yvlB !r>|w譮>PgtN+hrG]YZdT#)@(՝ ld>͵ )x۸>7z[Ínk'[H2W[&\O$ܷ[.cU& &(xCv`{{o=XMAH`YY&%5gzH.Cw: Q Z$RVǎ]]r^`@X)'魍 oˀ V&;3?ىsgN3qf>@|NBOrv Iʿa"9UAIi ҭwl:`aؙ%˯KoK:`0I+'΍iTz>endstream endobj 234 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 5509 >> stream xX TS>a88+) 5qZm}JX Zo_%m Zo<\Oyq>=bل a'j}ʯp1DsV&eI3&nLH"5)nb~i|J\|ܿ}HQԜ)+VfeJFِ1nS= [nۗ܍'tQ+Oj/AwyP'wdB *L6C`2e-Fl)ӍΕΧiuKnG@\KWn?uʞIzPdm=EiV R`Q;Qdޣv5Ӈ/͸;_VkPpWZb* 8i | }T~;DAhfKםӑj-Vò5,wKoދ&.ݮABjWpbUVDPF\a-b? .G{ހ:ˏo1pzwWssWa ΰUFAd"!{)!⪥8eIH9y]rm>X*|H^䱨}[-H>8zeLV(* UBGZ0hԭK^lė<=zq)TdRWk,r1׃rƮd"^ӂԖ][]RzѨNpvta/ Ӳ(_.΁&> w\\T`єl41Vo1`2rvMYQN^6c/4,Y&rΡT80D'nG> _GM%2ܧG!{;k5P-Hxڐ"li 6{Z 63 P-fRiB/^3~cۆ%~| ΔŮD&^-idZ)b>Q^ 7,AEhҍ;dS; $)ߜ'ᇧ}ɧq Ϲ%;\/40-V`><[!s +q9\g퀩@SPZPx\9lTJ:˸רkb1snknDN_ 4Z\Jw7+b 1C#׋vE^6q*nW {)hv % (,*!ӯ;'wYmS3R4Y_^2FdRť:F>;F9Zj=HЖ*K[bN h,%P Z_ɇHvMR]p,è rn냱m%s|?_kPKML'²kYN9c9|h'(^O3w #tɴ?H~S^HĩHOo\OgZhT?9u`QgЀՑvZU~_VX "(6X#XZM,֕^;7%mǙ*7ళNeUwHj83Ze]2lWqVTsw};nzp''x,,TB6kTVVW8ȘۢӃ 6E@|ug*U@[NtEe`U3vŦ z>JW*~2hշIf;wft#<='Lnnh[cDczϨ0+MJ/Sx[% ҚH zzÃB9uW'AB//)Jy.KA-f?U– 쟰sVnĻqg{&;>?YKVÏxucN;s)E!3`v2n,+Y:Įg?0 O1͵m6胾fYmP|4pX 1o١\owߛ9g%]y|`S͍bW~4Xg(,-h@^eV]w.R"'_)a*PS+?xy0ە/5=`An;/=t f<]eUz_58/!J#:}5a)_!ѽ2̬ZE1aClpfۗ7 P`,pLѩ^|{9|'yI㜄 (@RcZ@!sif-BqIsD7`/` w&FcN^>,7|8B_S ;)87qN2izHAnC1`76I,k4-zT7z;[Gʴ ͵M3[LīTyJFQ m*aub5>){6gMZweM `wLV3WhcNҧ[}Y0a^fNb)/t0'|a+kB LO&/AdSa{nb]ɾ|26Ҹz dA?PNq`I#n&AkXW܀JWo5_eKruYn%>G!ggb,mzrk5UZw\zv5WW;Tݿ0{4Sͳ*:o JYL?uTew:`v[%LƦ,o.ω{jgАgr<zף/?ŷA Jo|"1_bϏp紧RF82Nn m]T(ܷzxe5eT 3pЍ(gH1+rѥwa$h_ M.*^wtf-c >e׊P$⮡0"ZWz0C' ```lԔ牔%-1K N菛QIή~`/_>G\d_X07110,bb릒CwC62~>}#~m@@pE~|~TDwf>z;mqS\L%D=q/v]#n-[[+;Jyh5D%kT*DGispNf-IW. hMX:='IJpFp:**k4C3|eͅs~veHc;/gVHnM=x;5KI8@r\ 服lZZi-RPdwn[0hњkSO &$Fx0qvvޝ2-T㰻^GFG|BJZW$s<+~g1dWk˥eY j*K%HEC-NeTT%*Y=,7VT8VqCk~]JUq7$;>3n{Ov>} 43 ]]mFYZJ2InJnok4ʘV[hޞ;DkrXh֪w!OP㟹Ώ-Wl ;7?!ۛ`5ܿ,f A/KL'\֍ďjvu DZd[q/w퇡 yv֣3GtZIL,ŋg3lu7uC=ͪTWVס2Rr8s'vvwr Ѩka.?Ěîrp5~!LIIOLC< ~ބ'Ù%++ϱx@fM6c+O:CL-l;NwMf`SYGGHwgXvdh-5|5 Lߨ?9HiBLHh5$vjLZr̪̪}ug=d _KdxV?ɏH/֕!}5meYmfd>J_4]n{Mm'&v~#D/iK[س68Yצ"$AF]t3H2[V>{2AtdGamA6 m7,O>HDqu܏H\.h}{ҹnTkV^YZח ~L> stream xm LSgT:dk1c1ɦшS s +-/ѾjuaȂC2|-Kf|addfv d?sNM#(fTLN3nf$AIrEºO4E c es1X# E5QcT5D]~SR<-3%&}~^~-=}F~5u+` ԛyz+W/\Vq%o-^\ZZ-6'M˓4R=WbͬԫN^[̪HgK,kRguɠ7plI[ӛKVc :ђ[ĖhMƭψT+w&.\k.7p%Ӂ5qlHk.̖p[F4zF 8[%rV&{E{hzK7Ѝtn?[9^ߥOD$Dx#.E%zE 7cW4Ġ=LўX1фCEY52![s^@&G#(xW"?-HYdۨ%,z";Qⷘw :p#M.R`#|Б^, B3|>Ʉb&Es>Bn ViK~_Ѐ&+dqB]xP_#p|*> ,Sq4UmP^Yk T=Kpm/ܶR;|,^X:O'[Ur1 0;y|] |q64KF#*¼ 7TU*=Ol{ UdE?%Z>=< z\N4Xm>ONU@*˯9;< gU@ޫl;~:n0`} )hBC~_>`Ys*J'I5YM>%&WȅZaj[G=+=ԏZ+``$(C`x+zUJV ڔD'2,9Y~ -~F N9'wvA UtY.F I:GQ vL~ҳь$qf߮Q@]S4vZ[VT>q?xTx4km/2ی~-"b$H4’fR IZ}x1>[xS(&_Mp U^찔E$ Y!-'8wPUd^G5w|e+||b??Baߡ^0-Ό j郒}S:LHO G.4khDz5 >X̰DעFQZ|E tendstream endobj 236 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 381 >> stream xcd`ab`dddw441~H3a!OVVY~'Y9L}|=<<<,+-={3#c~is~AeQfzFFcnjQfrbobIFjnb ZRaQRR`_^^[_nPYZZTWqr-(-I-ROI-c```,a`e`bddk2+;g<~{5lvϝ].o̶ZI5td7˥6fe+oh=O 9u1k8]mN. h.u \{^ty?N` pq\85endstream endobj 237 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1642 >> stream xuT{TB$ :܈G)yZ^Vr$r $!An '$$$f HV+TnҕVNiq;;ǵ}c?qngl}y{<SnG/iMY Z2{9W WG /nq! HȂqSbÕlko^?=7Xg*}8A=t?7M YNK/81fc[$vwdnhGirФ}ARYipQ}g O[@Y~w6[$Gkv<&g맻𜘬 6#jjI%riIbr0ZxU^m@aʂՁPeXPwi؂B&}qĨ0XuKDڪjjQ dV.[ǂ? \'%lM-]?5(h ^.܀D$Id s]/-"SZ\U^]Ypr!c VĤ ٧t2`2fr|zy~]I$Tm&h27F d*s B6fE$6|LyoQ_%/-ȍ.w 5#XZgΥjqPG $u d|ve3ךicC> stream x[Y<[8[N+ wpErtקrV+Y=zrb/Bծb+N_WFhr7FnnL6RnW?go\_K%sdZ=2eG粦8jE;lۺ)'lxgs-~` cZ U9 0œmdod",0aFq&VmN8%LVZ/ܕ}Y l80c[mlWݾ6'=zq !RvW OxEVo~U{Jos2uhϪ'Z$YsO=>GdC QWdmReIa΢^2ϳzvI^䚻ߥt%]5'Hard+c7|a/K ͍Q1F 3M}^&qkv,z(5P"\lr)]iʾh#Nf9 K^8727^﫶wMqLC8{J  8C0,8yRL{_{\AiBKOPؗ0lUA-Fi>Sv]ea}ieP-$#l}60rX 1lAaiSE,%dNI(6w`i(7B{Ր[ Jps;}w6*#Ymj$Tjo_<4p=h!n0)=o!Y}:u i'q4ʩ>]V6u|h8aTM٦YaJ-v8@8PR*m F{Q\xnq͟&P2.Q\";Clh?9GGvҮj rP;1M51ULvik*;$,fx U5Uq'cMط/.pCqזAע n A'!({<*QҢ)mpNСtyeI[({jMj*9%,DOAqV4udT^ML@}A\|Uz҈ 0`L}Ͱ PdK%е ޺XA@_`u$C+>ۖ]Qa ۲4]W%\hAA h]S'dʦ#!P q >՗;GݤkS[xX.ȭm4#3U]/EVv,A/y  5@*j" (=bǙ"LAǻs m&{{>mH3[1d'UA6@ӧ:\(K rθ)L!I6\=2M9Z2#c+uOtwhAK;ʇJh$Dvut_('jtM yܘ6QPT6>cn ֆN'بf@L=f6Ɓ.f&Z /P)}wFB(@$$ނw,!pX8W a/ 7$Qv) WdJ|m|n])]stD(o8c,%L FOؓt (lQum0ŭ/#4qL݃& %0JL@D{#$VGf-PhS]GIL!LL357ԸKk@_EC{HWӕ"hzAΆ? &lG,YU5|N1޾CoԡDng?Ejyr $j +1ﵱ$3"v*#BB˄X 4|Iɱe=~#v̗ Z<5s/<qF>}#zƓ$&n:CCV[BK-Z)S,~TS[O)sDMf0hYGc *qስCMY(!o2,K lni3Y 'Q'lARs64 F>a:=|$P"Jz[hX(wņ`rTjƴJҁ]_LCxW > ?K _y}Ы[ Ub` PoGA ąȶMqC`V PK lKT%xqV2Xl=Ņo ! R۩<Z* zIƒʞj HN#\CHٳ&t"/gN4Ӎ"d$ 6340?Y8X ٧~BЩ!= ֛Q%?G}f@΍?jbr 0 diYs'o@bK:l5X,dZEfg+&aktZ>Z3|3){Φ"߂-&nLVg3r5\}+,Y&H myrKtD@cOIT:wq'HpI$PIYbe6 bmx}l\; rUWd.VÉ}24gAz~|,,!tHʖ{j7; _OW'^7RVݾ>wÕ)-0(]cWdz?b= %Z1B!z)Nn|lNij) y1LpW.z‚BL@DAx03hK>fbՏ0ydBu|RfE&Cb{1V7^٬ep@Z {K8lQaĄkut HY)՘ȵTF_YpF<6_kXbE?b!`f HC?ϙ^x/ Fm'ķݡ> stream xKq6.<Wfab$%yyL}N̦fiMsDh G=zУ=7|c* H]q!c*9A rEd[Xpͧ*b~02Bl,8nn&`j0L!9def2̜`2J#SLhet!#-5BC~J*fL :2o1apxsS L1_  l *d%,s*%-4 >ĕ(\LWG<@XKl&NT:򛮊,ilynuv_@(N\cov VJ[0CmZP+W$=1: Vn\a7) QXuq HO(Z~}\ovzbo-,k><{6v{{< evK=dGK?' `|8hPHDybё݄qBw5-MUyZ:!q(-RO|NTh"ڪZHeW`J\"*$"Z3iu>dz&ڧD0:CGGLP^ Pendstream endobj 240 0 obj << /Filter /FlateDecode /Length 4191 >> stream x[KGryN}i)|rkk4r(Bc+Z@ z z磪r ]/3?D)gﮍwWbv󕤯gFX5 etnxIJY_*mg7۫?-7IΎ&iYLY]P/!}l)&ʆRX,>2FS?`]n0rﮨ5amz;W/ۼ,93'[76f$" ܉M /t h{ i]oVfi氩JR8[HX ۺ-F[|p J l;'I?Ydnu^BާOզ|5SBIl&ʀ_lB%ff)~iag(hQ4/s@q8O֟:)]j/&A5a 6uiᬎh)gP{ "^yd~cpQ$T*\y쀸~sӹ>ܐK6<-V;aHGlp~X^igpҿ\}{!OY _gNyXWОar`ΈMR:R8%i&~R͑__Zp >G雛@33.|})aKWSOj}\N)։3lK#Z}D Rҿ@F0e:?]tڬrV?T*OC񣤂93E Plv*<+P" $3\B3uÔ)yX#6j5@G{TDE`rWUJ2@*fI5٩C@JhbŦ_tjS# "|m7A qGM[Gi @ay7C5*t~bz>6LP$J*uv˺+28 ai|qE&"l^q m}dSepf!4bDVUHg*K\6_@&?s֓_q|~57v]@kU,'h,V Z23h}uˆ/}uTK21g;% $mo~4'XHjL*x@J9JӤB#'hI`1[ZWV*T6Za{RJR*Os$>Q6D{(7YtV V4s9mRg5I&*,*,gGCK2nWA2®+ت.sACS{fglN^>,h++#W&?2- 4hRN4K·'B2Ek_5./3ԛ<#gAukN"Wk2粊Cnl06N5X=ٟq쓹ǯKG*ֲ#\h*Nή>N q܀ Ζ N4f9Vu-.q^Liuuj龔e6dzOi@$s V_-t5+eY3Y K.99cA7Z*DnƉ6)n}$*@|.ʀ^/ld!hՎbQ_Dt&lj}4Kx946*#$rXKĆ;.i&~@`>yVu!T3 -$ɍC "6ZV!,UAnxaա~e"_!i2-6KKi"t]+$?u.UȒ_AH}SUE_fnRU6dEX_iaʀn.!HlƘ >~=xK.8F`8!--z RZlg.&>)e  uRgTΉKIBR[I52}s\D>yӕwhs&d!B4 u]:n#v{s|>UPxk͍†'5b?8 o|0 |L9nReB"2uߍҔa7Fqc"at'7;>~oqU~޻꙾f^sQh铢Xϧ~XV8+Ro M]٫Ef3 ԥNyu'kbq`eHmS2:6e T Bw@]PlzkB[`UݧQr;XM[F%j¬q Ct+O'ɦb:gI~N7FUM)h[kNNKuJuC !hB&1sE -jrI. @̙r8tء[<%P7|;HW9{zp_MKs3pv eȋ70C58p: .ip[t}w.endstream endobj 241 0 obj << /Filter /FlateDecode /Length 4125 >> stream xZKƑsաf/8'3eeنz`@Z*V5UdɚG k<0CGD3/ߝ>y݇;Iow#̰j<xH3W؍E<~lڨ,ύ?\y| {x RJ-j4*DumYzyu΋ݽ Y. -8Ew-`}\W_5q/{]Ane)y߉g8V1?*p2`\7zl&P.؍!𚏸ߚX89{zTE 59ŒR7UIu[o mK(Ag0/3mW]9.׾U"+4aE&:xb.6s^_,Ry[n{`y?o1AnSñ!å uF!#K?;@:hu ߏ&}R\|sqD+>bhQPå"7M^]ў΍axS¬" 2g-Kb8`Ny^K%>ǁPج1?ÃiI=SWڦ ݹ' "_p8R6 ]7HH2\ʌvQpV2>s Ic4zG4>h[+ښκ{9<NCƫ רy GK1x_<1>" `XgeTSdl3h'T}e! }LRhKNԜ+P5iHI ,9I}52[ODW`WMj+O0,@`\%ŁM,!<鱩?\ٶY}{>dЁ7maBup<8_ӳﶻKi[wdf!E@  Pf Ӭ z Ph t=j$K_EUe`Uw,SȲ92EݤUZF 0Y@:ћ`$I*}טnRuT ǮWF pnf KFNL(uD;M}hO{NCUF&4ޝwA/D/%>:nK v 3tt- RLɋ3RP^\vd))%{3( yNE\irBRd\Vi)GjCrZ7z4Ĺ`h:T7Hj"!Iǖ@Aƅd|*j祮vU?}CyͲyw31@ |3zHhQ1MA8ٕMσO|iQ3=‹bAM+Yg(:U_ФʟJH@WO g fK.h@C<DA%@ޘgh\;89\򰎙/Qc7v-cC.Tr )( v@W굀Vu"l̟$p\ 'Vs5[>5 mGX2ƭ O(z)2F(*S[zWqj@n/0A0YY1#fydn%:h苒) MKmI2~p2I1ƏT1$!(Wz# 8M$SR|jf*wb#Iן>ocjfbq\\ :#{X)2nd(]N!6hGG \NA{u%z@HK1f#]0qm!5{o&E=r swU:J=t(zIoQ- $=3K?# ZZD9VkZN3XJj\$LQ]{y>&f oN-gS~+]W6YŚ4ҵ?+fmW y?gܲId{Lxr!i/}B6ERZuY% .5jnRiB9Plפg#?].HpH}2k>¸6 #"hi~jdxT(^ӐV1FФFg=LQRr_% `C+'U鳹mw}܆J:<2K1$oFP,ZB-ˢ&6d#hT176907Kşb$숕2VM8J=1~S L\;⢥TMhHAdJMo9C):HhOT,*qᴼeQB润eyC/2065-E.ߐZMث5C!bE]^Vsq@:8HJ/xۊm$UM;x%U&HPӰvF7[qPǴ=򊱪oz3lޓ(fO$G]]dBzgҎh龐no:wfT!> stream x[[oȕ~_, t^Eaݫfٙ d1;Cbh;!i{=*^d;dU:sXYXW{b]/W.~0؅йV.oxXsKe](%r8tE?#yV{e?Cv}W-Wʩ6Կ갔!wU,WEs7[x%\l `Ne4~ˬÖ_=oE㨬w4CJ7+J2oȃ ۦEzPY?%-`KiG%*!`e, N4;.ݑ 6ZD|̓w}O@YōIu6V᳏i' &80`^^ȝqTi++5N$!8kJ۬/q|d€P %pAb.HM}0㮊5@;;ݧEB-W}Wnc1!C9hsJf?/ ȃajζr/BNYL6q^QmGaD5U[~S $);j>=EU|mʾtU/:Ӯ)7hm2hVy[Z.B4!P|t[k~Щ H qBH\Z)"jEK +C"r{ʌH*TbYEDaMvn/ 53{YIWʺd%F,uM"++IFnÇ+X}8Ot4}W^aTvVª\ؗ}[z.Jܨ$[1BΟ.Ũ{f94Ae\y)R!]Gb.}I$q 9ba"{Xh@x-9X)+Shp3ˏ48S,{G>\5š*!<ɏn eB]`,L&uYPdxӎi:R4I53ݘ8IՖ}SC fJ0i)Fw%ķ͘3_Oz&@x`}`{;r?.c%Ȃx*F3J \5z~l eUD7+ms''z Tqל R3LrCC{ŷ+ Y(-@d\?Ɛ@M)yd;dZ. sv]Z('䅂jJCF}'{?ҷC"SB)pd-TR 7#tHjs0 P{̍ /&H8("L9pp*נg/k_ އ>;l2q~P]4M]8M(I.V MY a wƐQ'K898"Ϣ9",Y,NTIR2s4&>Re6zv' 8}d_^]e.1?nH6˔6~Jq^1g$Ɇ 8fl%ho6͍ƱNیzߟnPIfzA6f3ZS@Ԕ2nMIO"vidpZ߂oߨ.Ƕ|Vqm{Ynoon9ѷn2~5;{|AVѱ/ЛopFP eQ'BO}dI@"Yj;)sHz.cjF荼?aaFM g\}i[O/֎B=up1WSxTlD7Qk޶,g)%lnRCˇa*wLO_8 4NyP@?Ÿa82+tB1&?|Ohݐ 9Ĭi&]3D8a/NοxD C2 }&q3j HCs/瓲(thkn]*F\gv<+ _hPbק X 'OqK.)z.€l%eWfCFuQHActL>wF "ⓒ_-3CX:!/p]N}/|ʢ^ *'T1я_]*w( غ%mڇz!MVK7աj]9[j­:Yz}XFOxn'h4?=x|N8?0sj3PPEYMÈWq%g,\1x_^ܯ](9U:^f⃗҈VfD::' BIC, ~Lno1aD=~ Ox=g֭q:qyBTMgjLڤU)ʖ@=Js^Ⱥ T9 QD5Ym8( u#Oc-iQ֝#FhgX7K'ŒEb pVr@)#bKV7u[xgD)eXYc!^l,LcDC(݂5V{핐MQA#}08l7*0OQMv2؊9CF*6/}xpWApWDlQJ&VB(of?>J2x8 ?A7VWbu 54 ^C%,9M t[eۘI5w,8&)8 ʫ͝;xb8wMWˈjVĘ!æF\>><>%=nԄЛq.c:~ZףQd\fp9PX+6Erk>qU(q`4ߟyU<^R|C*+48TGNB6jݤ[aRԪ8~ b擇! q1Aͥ1;#g];|_gݱ\W A٥Ed1wN/ RC)3ILP _ǁMV24?UmDi@}] VmpZ: " nD;Q(VNzsD`*z/Aã7њV=bO{+ }hfεAaًӰЭ^U%8qLR-;2@A> h; %,^O_6oSvMUļrm;t\ rlB@_}fq€ݥ$bߐ$R!k&Á}䮩cu{Yk8-}T2Aamgޝ =!XWz71V&ǑQ$^01lq mՠHq4ȴ*Pݍ17p0^ 9I%nx+׫ endstream endobj 243 0 obj << /Filter /FlateDecode /Length 3427 >> stream xZKs/勱)x K)L1ɁR 9jfOwEҁ3G<ſ7gJ.8\ gX2>%̨ ]Ͱܼ*?[6lod .PNqXg<7Bd?Wr"IOs("Lf>WJ)y|!a=[O =ێ$vZ+DhRVjTQ!njl^,O O/'n fvNqNB4p53usҬZKY,엝oMvy:%烦EQ[BB-PRqɮ q!hכjW;!۪bfۣ0 w(&vfңꎤ9$J93|ŀsX7ney6w'a ̿1,Vky,m񨎁ĨF&}XbZFhM[@=ZLM+*$榪@*$Z[*cB(}3cusVnr^nt\0 @/=>=Szԁlmbj3@ =`m)fs`~@]aH'g c6]DŽK ߄A=IU's9Or|ҾxDu`,FSlXG`?IfDuH(ZJ4Y&5Pǹ۲MMj0ۥ"1[NGRe"Y<"8C>E#[JQj= Qap-/> r0_[y,ja~8r50~ABF׀: 8.B 6Ҍ: I)sBJNy?u tڵ54/9梮 c6R>+?iitCG7pUp+^l1^c 1g y@j,-ԐjaΨQ>ޔb&=֗USlwvM o [nYId|2jg!ְ@ C6uB$a _ߡ|&LߔRS<=PSRB 1HfDKbrˤ4|D'PCNݦ14y@?\x HIAA/dt]wc՟ADvӁnn#9dzt]lOoP. ^L!&ާ!05,6.M.ŋr2߱(|d]l)mLDNU| eMER6OL2mKf܈ |u8{c gO!q.dH9Zbi8u>щGr7-fGm<1 =:N>b.] V,AŸuH:I+Vi,FLPQ;:8笠TLJHiv c  a9ܔ}ZӘ"h-LO6UwdLQP.)4a6&9{@x\H$$e"[.Ctb%kcWnfÚKaNp=1 5T붜h̝oSWFE1 L OL )M ̑d[]B Byʛv$9/ċFWq7R#24^5젘{x- ] F۲ Э$@=zn==CMkeR-W+%fqO6^, ]x O}=hB#ʇ 8a척aq ܵXki%x5Џ0^{f [|FR{Y qU8R) :P~q\p{H]+Ǯwi(yvK aMZ(IiTȠ]tXsͬloCnq}'MCۤy}%oKGmPAYWćp?-{ +ȟxS4J+"lub !Ջ ӀՅ+$"bLd+o{bCv|GAk/>QqiFH< p%J`7g|AL+DQC Gx'K}bF/8M3^q[?\ ?Ԍ(b!υ(p>V/Vo >ǁendstream endobj 244 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1635 >> stream xPǟ$$߯HQ,ӓAfU M:醏HtZ!CH ]([o]D6վ(X* a]3H=c*dxϹ9,6 %:Ax'" A W`TA@ >eVt,R^(gvr >[}Ru݉[3<UF[l8,v<9t}fH`>3QCx8v i-G+H\.!=:Sh V֏:;Rӈr&.Zu{Qd`ry -Q{\9dY":z_Xoh,wK@7R>>zM{캭}U'ܪDž%upz*FSAҟʖ=9*o> stream xV}PgɻP-1jboZǶUϏRV-~(a IH$컁@B%ZiW:w]i7mls_73<=# fE *c]zꋯ<$Z 853 I S"baD?^}͟OkKeeʂryBzlWAڽEҭ%Fl&VbDd: b=2=Jl"j9| d l2B߬UF&D#<<*`œDlBKՂchp((QxAfhɤ(ӂ#4/zӶP2&)H`?3\ޣw-pӮ c 'ZUmjA"@4 ѿ&\Q(=)DP Kx %͸~6y՝VZœ YySdm lm!cF\!XΪycqh牃3<g/03hheB%ZHiGH֠UhrCW*rHZ GVh%5`?عG%H RU7ݬ6!@=d7l{BbJ>B<+ ʭ?qd<.vJuԗNF^,"]`|?wIZ=BsA}mF_iPeܸLm#tp1Gf4 uv%W.WQN$͌J_x*tBk%Vl7-y{%NK~XtffGiR T*h uHtD{nOu {! ٦Z'$f3LɈj=6I=:lR~l5& .!O|{9I쬍CReRm^Ug㸵uĥ^8H;X'@uTC}RnA\+'"FsRrBpFU)dyIe uzZ^u8$8I8Yn.bj-6 HcbE^ RAUN9Zghиj^iYFʊ"on@IQhhkR|#-<^?vLNN%fd(o\96fx'bT3˸QFۋ, /|Z̗;>PU)hԁ0 ?*izXx_DV-:Qq)ṔLh4]|ΣiԵ6 'GEϻHoJE79˷f%hM`f]ӟVwynxDX0[у5i|3Zgye%o.$(F "5>a[G'%?몠SSWes3-+"5L "N'sS(A`pظ7L6۱8WaƘj 2DD?XCPM wdeR9P^`%~L/g@*g ~1d;.mNU?UV:~ 2A]r`Wn76jX}yD~IYh(&%N}M> Cw賏QWc'̻endstream endobj 246 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3554 >> stream xWyTTgEI}OE\ŨI3hhcwqQ(,EWQP@UQ66PB%F#6Htz2&fL{b>8$3g̙S˻~_ Ia@ -W,[z$87,8O$au77!DLj7#b~m&t,A \>+3 ?9-YIɹ2C˲bs6ڤLܒ|$5.-Cd]4EmS .Ij7Qj{Jej3JMfR(1E=FMTP &<(|sҊI7"HCg'&/7kS"#xsZ4gdT‡Q؏(+`*N*Hd͈k3ƒRhcd8G<{77c|^ )C2S&S{Qv m'%>8Lזʌ%" SZ_pϢō3('y\x 1MD!_>(F03F8 N kqZ]wbzV45ߦ@eBC¶& 7rH e23<2eukUPL7{Z=/ OXar]ewY\&ܠzٜWzkl^pb;iw?uGxoQj%/F%rXJ^~vϝѦ.3TV[ h$H4ނ&Tw9y ?l#֢^6#Z3[yx~к" U\!eX^xVGe5ݼ%aCRG*5fe4!]Wxf]58I}],2ՠʍwr.p~ĪsOCq)}HfHHT[y`A/9jeŁ@c!D,yblM矜S,u!eM=&x-)^*գ:=qk4ڸֶ6ڭujJRVWdYm΄6B1w[ Cf;{l@59GeaZͪO=hɓЙ MOjAO6'h 9 8SqKXj/w6"cnQFQx@=䠮TVV5ԁ Tg3WZLƖ#s8L_5j0#&EiL⟥>7 .pӭ]V/Å >kF?f |ea~nsixMd?pBo3]<>̱6/fGfʤ)ܳ7y'ѩxۭX'WXp+6lPuS~K6GΡU^cӎ"whλd yYtL&_`۫ctFU*\6uB[PN'K 4|F.K=.lhwXx*rTdR$=f9QUOH7^RSCB|.| :0@'{y\M"G=3؍G+Fl 8H|P92|Ss~FRK)d[s`ߋ2Z5?c#>š!\"'DU{)$Y" >5-+D>Mkvڡw ٥:&@|nH4o^[!D[%#y7̄IVob֥fp"nI7ĺ^Y^e+L^o6e ]^cQ赲I^g{ Id*t/kGFW+n-4npm.],Ldz{rh :-iUq _јG@q&/쨭s4X/굴m0-<=`0~NJ!15 uai(J1f1i:_{–v a)X9,c|ބLPԘHeIEIbzN+ 9,l\ҕK9J唙Y4?- vnxB}J[i.x8D.tq7֪Vݯd/9\ȁ1"<%!'KGpoK"Wd.68ylcIiyCѣ́mRF|dN$_pD 53~ AkowWN|ũuQ**;8 _NH0.32X޸%Y)ylHfZ8g`Hsxmdpy"Y~qvL"\%__~*OmE׊&8\s_KFr_~kƈF|d~D> stream x]{LSgƿ9\ؓ)tjBaө8P7`\ʜRR(C-=CJm8vvVy93زeq:=| Җj)%!D­˭1֛ :5"uGP]a0ՙKJG:G9(mGLH@%_JU҈,W6(qrQk pv|aXW>yKrS",\Ef!aVރm Ib¤s4O@X5ܸuv> AXCd h4[a8jl5װOmqzk37kM4a1ћd QC\jlq|^c<,1tGKgcIȲ%&NM'<Τ:Pc}6 hc=1F11 ED cX}}af04js㥷-_l4r>xˀfC]2fH-zޭC͑3Q{P4\_5;`b0Kn"g|჈ yQ&Y&1h3_ 'J^@z쀤;nMq>o?e(SI|4f'!96endstream endobj 248 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 844 >> stream x}mLSgǟ}.ٲ.t}0LTE7N@PKq9BOjKtUKf˲(.L%[2-6 |݇/'#)*q\ZEqy-[Z=^OS%^VCGb$u=*xo}iCjH$kQff!MT.sKmHnHnBIX )"mD 8 7RͬpDzq;qMt:e YxcB,6Y5J^۰ ,sZ*:aUa7:}] 8c K|yX@F`z|(8$ $L|:$y~NQe.f[&I(1._C{B> stream xYK/$/c`IR+ZRRDIbߧg.+`_̸XfooUk{Ȗ]\;1- eB'Xs2f-=FfP;/\*o_V.Y$;]UVk#dߖ7_|YY( ^wPk ?'_tc}[m~ySq`9 ~Ɵ^z/튮+W9;/%$X16ZzkshzPk!Zlù8T*|SCѕIRjL^Hht {YmoocVe2aMٞ]rVgu"Hw `LZÊ ‚W*{$Ek2:iNNPkjWoݷfv7kɸn+t[ܖցl&k> ARny-vll[ꦅ4碛"= a\W'rx&-pp<+]},j[+WJ_͜AEab8 p5Hr^}^G0LY9A_vWESvPMaJ_?嘽-ss!DRQ+9˶A}ۮl3oA hrMz_m.^ 1(Ó  Yu|8!{*-(T/}7޳=E{C)>R-FJi\)Eԙք:|AsfGNZ? ̾/PI5qr&XIuGd@r ;.CA~S-*U{nY*A"2oE8=HzJ2(97j,f?0$ɩ"sz: %IdۿՇ(ȇH E%-+πQPg6O%40wj. 7(2'ၯO$V71ťiM:O`pXbR]$Ѿ^e(ͼFl`7OE}b/7\Kj HE}$4է=pO%>fE+unoɴI()F,,TψVM(PH-fM7Tn[T(N1AB1 t⾟ ~跜nB% ׌Bejc85 kz;j)FSH"5X}٠^J)9IEfJmD-P lpfrDd9׹)K/ޟ(SsHUC^V*bkceU+NKrg/LpzEHIږŲ ْ6n$ޖV05. k(3FTTrLyx+<ƼBjAͬ`M㯱 N?!!Erjw2 :=o`itIJcŰQ+6%JSB܍gn"H?S3 24aC6Aprk$&@bcEg,w*n X'&hmXAfčʿTߧx kMD0\ ϜYrJI9fyOxY+VT~*c.A)_'&lod`S!-6Sc%8 AǁBx!^5`Sg] Ox6lBq!|FJGKa:Juo } O*M58RF*֮@OqMhO !-{ݖ' hV ^] QϨj*@qxbȓŞ8 s®ܙL]gc dr P+ *rlWlrx@ރf7)0ltfN`pܣ:XM NR[=Y{ 0лr!}P;soPߏׁ{>AH׊&$Ee}"MxrSBO>m#t9 *1y 6ްNţ.]%WG%pղa"5<p'Ɖ+=Ϥut !S2jƕ$QUhDcHs[`Z 1T.l2V.pr#ߞ+ϡNj/ .'l0 6,t[ߪ*Lcu* J*@xQ;s]0O]6]bF& rzvf YZBve>o(b_'94f2\N=t)qFy=42xr>ϛ Mau9 Ua, M, ζ\endstream endobj 250 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3811 >> stream xW t՚$pH`V13{ A a@=5ـ@DY^@t'7 Lyߙw朙}OW׽"bH1QTT4qcǎx('-^bܞAO|snK!WtO9FEE5oUkeF[ky&|!Yܸm&brsBպ9miKC+^xK#&D XHL%ӈCtb 1xxxETsrb2QB #J CDhy*-n-CC^4Rc4tee O>pSͥ7/|mo#F\7>5\/+4? Z NG ) %n5h4Vu^z) %z C* HKjuZJc41&0.~ G{' *:m:FԐjNQY6>UZ鲻a3BXlE,+MvR t֌=^*&cЇ ۲lщ3c9sI2ôQpe$bE $Qcߋ+EAtS^}Ӟ3&r ?4eg穟۲hLd+hA~g$W4>wOrNxYG8S"@t*.j̰qgRڠ]Vݛ !? XB"X%LY+EZ[IA43GXq=4F:X埴3UwVd!7u8;+A8\Tj :MY &VNk̍@_޴*L*=pX*]KPe ?7nǧ?U>f.9uF՜޾`!atيT=!dd/i ocěRc⸰'D>ur^P4ix)Z/Fctg΄!$cD#НNJTo~v漺e+눛A}UfQ";#Wb!#nw~F([;ᡳWmnm>WÝL)CϤ1"ZH=!U'}٣4wta^Z,$:,B)Bd7:/B qx8MMza~.oB܅![p ^X+݇COLk7n'} vJK,f+.</Dl!aSo^st wwQ:?T,Iiڔbtd:)R4 A]=& _2L x\̔ ;'+"U2I5JB3 8\ k!оu9]VHF4avp:ڢ tĵꁒw%=NB"_fRǭ32dEU8HЬ)%Kv~ۺ R7 'FS$Pj&<+ζPw+f>K亸n6FAI@hR}?iwwcO0ߞvhd&*c21Om1;&ۖaJ}Nb0uj(L&A0\QWZLf6R{%O>L̩x4f8;E [t9ϖ% 7Ǭ@*7njZA52O2R5"bYDDQցɱ} $\:Of !5Z6hc或vlxXq:LZ 5n䳹B_π1={'<=mk:AX4lxSVXr]Dqo['=ޣbXW-NT ,N#:6tDRMcGۯg (G'J e!bŻ&˼'83'l o __X+} ^ |_lǹYwqE?+sZ2c/2p"aٹjZ+J3 nt`2l7wj) W96 BH@J{yqy;kk?2Z,޻64$ݠۙO@2Ϥ5m<$)cD f&T&M 7>8yV^C=5'<Eq#/ h e|Cng^=Jp‹'H"RD25)?ٚJ2 d}Ndg?Ɂ5d7V# W!Jnۮɯ(x;+_nѨF^gu ,H-5z8QʚHX4Rm~Qi';*#mծaq<)I,<0/Udl,_|= ,st:^$ؿ㋖EV hc-,:Rp< ,p8%&V=z9m1齌_5G!Kw6k>ӥy٢ANINꑜ={^{YOX/h]\:P ';luUp%ed, ,/P1{!`2axR;ɶ KXӭۮ6X:vkؖPBJj7l)QM`3\eRփ$¨3J͟[!=p6Թ/ZK $2u_+^k&7/_[;$ V2z4jy‚4h[: F8dReFs,V\D39# G.0%>L!p ۀ"bDܲ:z7: 걀˛F:2<ק,.34S$\aZuI ?O ,yG2A hh}Cd VLj)DV߆V{$} ]7a=\Ӗ;dV5/C5Hcϗ@h y\؎Cj?Š&dhRSw?}|JYб]*YVZrŵd8t 6Qy fuԆ]a/w[k|pL"؍jqN~\_IܝWgIt-j,>(]w'F#l[H49;U@*H&sAoT\-ag~CжPIS2(V@'"&U^:=G=o 5 ! Jh-Ў+l  y/m2$8i0o;endstream endobj 251 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 5618 >> stream xXwt[U!@=P&H$pݖ%=^t{o5JHʒN}ٳ{唙ٿv=9yx'񊊊nY>o/N^|}Ő?~|I-Sb8er}e!];w&=6Fq+m5|ɦ{ms:$2zB7U5KRԬbzG6{Kޛe_~[[[{*﷼ټyxsxsyx{7W+»7w;|h7\PtjR}qyGK>#o*uSVNS:~ǜ;)λƧ3;6 ҧJ=?3iʗgEOC?~rTsF!gfX&Ujd&G}OR\ Ӕg~.\ "Q1KuP/K\r\tȢ1jhBPڲ$ Ns]rB/yLÉJUiEDMFPjH6PY-ik p8NNIfYhj5W2F9T1YNAR{Z]y |JFfSiİl>xnׇ'BbD@JUY53~;4HIw.PC75׋CƊbcu!wz:Wn~(Ed@G) ʿqU=`۔t.b5ޯC>*OX-=ȇ/po O|?cx1dU_Umzk=5ioOR@—`Uce CYN:aJazhta5QNk1Hn,,3CuLd^>ϝ)B-MV2R)Mz%B&"{ђNyFТǚji[kN9efl&nڟ f&fp3լ )?Shz+)Y 8 z -&hOo}E}BFmP'bM9 UƇ0F"t ɨDpod4D ; 2)K@7W?ctp:t95Pwȶ~~s>Į:4M׋tֱгG!(->-! %0m6@T$x3+R @ʪЬYN5+[@DޘYrh)Id+HYZ76 ?f JbK>;i/NzR3 *'~OkRJ0.ISG%!C14f]iBT\Yf i̐{G}vk?)x^<Pkh,6r-]+RN2y6ܮcdwՔ PeF{݃.Ҍjt3i0u;N14UUt=tY+ -+l:K+_4HTFd,S-!T)f`:0W8AFԲ[,4{[L#5z-:}1za"zy[ó?jO} _H~)G.R֘$-FO=$TvtJ8>ת o07wh0xвai:BASypch1]TQm@f*4]OzSC?>EMBHy}]]Ju(- 5]l6stUkoB<+]bt0eP!^3ܢPhidP6unCucQHƕ,J;7S[у7e1}r iU/f+i+%s*q#۳;/hݺ[BNkvhmP7ijM% f_sndWz(v3<ðva@~C$=/(+Fԣ"RzK4 tz3T\l<}pУ 1$,=/p=0w99@"?+~Q (ʇ>Q0 "S34iP1MXA)e$Qs)mlc 6uAb5ی~)bSVl^yDy} .$FdtoK'Yw(CpȀEe]H6UO2N:#7Q0f7f#m9\oI]`+ /ϵmX]#2dNlːSbwkyOZޠ0- A}RQ )'19EZ\ҸpoѾ`t^OA]isbZQZiꂟ¯'_0=H#be3hl+KNQ}. `M^Mb+ }\da6K{FYZ Wy;2~mC@OTur¦zDN^SI>u3?7O ]MGUJdS AR6s8#Н'E}&%VS#Ffjp)1Qa"(A&v&\NFpr2ɛ\G+PyFEma 90Iq5Sߤuj]z2Ѝ‹W~\QoKVBj+9^7qXV=;:HY8WUQjh͌]X w|9B^k.)]kԕ߻_4s?,g&D%4bpQF}WvZ#9>w@.zv@aNiԨ T,Vԍ+_~s |Z aX*Mͯͬ~#ͶFaW/ԥ]TiX1OSyVbZ&הcY=v<ӗSe+R C>CH6ED\?[SqʪGKoFнG* Gq%SĻ3|.(\.ir҆9tӊ%#TjL8rlC٣oZX]mX`hIj(* ' Y jEdXcX],Y'Å5H,OwGދhrӨe{|Ad,[F :rp(ƚ0\{l`j}RvX-jZ )b;oi YƐilZ^ n0sSJKZ 1Cn[B \<iuHF242+NDGXu"9H Ǚ'qvY Iz, ]4$4xb[վqth7 H&ed ^=?N);M-il_ ^z#q͕>=:Mte_ZM}R֪z9DQOl iQb1c< ÌD" Xv?efM]s݈|mAoz>6rdwG5wna#0О$ٔ-* JURg1*>;s25җ; ANf z}i)jJ7S1TOtSq~]Y 1X\.˓D}h=^Tͪ&& gӱOH{H QcupaS$1o":ͺ0ίZVЍ'}=˜6ĸÄ\6agЙ ؍ 2FA ,jw3~Ñ`P'cfh2J(u-S5H4610W;BsޏrNlmK3)lPjյ8h|xb8bc;bnn&=#`iCwtzHAS*(fg6&8RP. 8caXwWEV6?ލ.5x( 2Ktk8/P9n,~eYOܽB6#L>:\wW Q]H [!d*^ɞixA݄m d*sȾދ{xC;V;:V7h+5 ㍘TTamyОgK>,uw ~vJ.9Tg5t$Ni#2^pg^gã@77,'KTTE9M:f~ހ{oB;A*ΨCkL ">-2%txlt5.[2K#w,5Ivc2D$~1T:Z]k=pzg> stream x͎4KO˞Eݷ..$@pp1)iє=_׭2رc^Wz]?kyG~:_57GH?quk~t_H_TCu֯4?7ZO'S5`$~jW~jOZt~jwX?ZW*]:?R'OcѾj5ϟzɏZcSϤ'?~*ϵ_ϟzɏ*+?w}}cX_<y?O?SOLx?~_SOO?OUgWWkjǵku~4ǿ}?GWW{?^~]0}ռ^?kq}_>W9k?J=YWg|vk&|njܲk}}~]+^6{ϼ^'!WkUv߁/h/ܡIWoǟU[~+k__A?w~'Q;Kowo>SET)WAWu%7)|_}-ǀKY"QO_e缯+~Uv+_8o_\njw>`ټ??a?6O}'޹WN?'hƣm;<'w' h$]%sSl-|@wm'Q0c& wnہi3P'G3_E}B#2Zv od!Rj'd.[xHՊFxVAڕbcqh*Ni ${{DKҬdzn_:"VˊUqh_&qF<:>m}@zo7~ ʮ+ا=?i?$F;^pFi?ueoPF$K|]Gh]"3jqTwAe}öE*)Eڮ2*/M"pPz u+Oz糝0E <1^,Bo؟lSv>ܗ,xfWj;߁Q2>m>PU8;x>EsxH%G❤um)JUϭvUV[o[`GQ].#sվFcm?%!E0o.97,~"BYָ5EhAŬ qPz錝v;]-D_"=j}DQ! ڻK)2m\wmj4/;}KqEJc|y~?}զ3~wmH%.ÎNJ`S'isKNU<«"XdiAb\wJ5!u TډnGs=6wp74rw WHnי&?[5{0X/t^`@.Goaݺ,Cތf̈́?!F`Hɣ!0g9;/Ĉz5v }{`Uu T4U~pHDrxfH ?G^b^Hzb )L@ڳ@=b~6#q^ - D4~ /gm.F $^dž 2h~?k-2~kk?<9"F':Q[wCxκK%2}^tI&<#K}t l2k)AR54mNvN_]*hכ.g`=T;+L)˞5kJ-AMU`̬0\IRxRS;Nrͯ#oF=TWim?^0G( ES./V}O+>i'_p |C͂'Cz(N^wMb'uɠ3xQ9*ۜ^n"{Jh*x˯OHavP&;ٗϹ]dnj@mWFȻKv]SIAaC+uM&sQ1ssDragF D]UGaWATC, cs!.BD8 tŊWp389i)M5 f~B)w\ Ɩi( *}@|'4MvLD <8ى5'FpFD{YPNbv:՜&Q=uvYYM݂IyyNG_D#,~t.s(BP.xV灮e٢>~F7w-/ KB7E W[1]r. i<@u yj>l4e2L8ֽɆ@E 9hp8f->#;96-c; 0=ݺ/fbj `)ytQ\]3[7 Oi@h+@m ^x $LK#l_!@XMNhh6!sf: OWaf2:%k©Q<O-xeKK`>VqTP8Xy@'.~R f2 fm{GI5YE*^zyEP^6@z؏2UEe@"5P8Y5)NaRE="dq,Mvq)ǣ&iX:P:ݼiRE5|_h2r <6H!fk:$Rڨ98b--GmхRW܄ÊV![SxԲqZ.fW9L%U:NZo/q $1{CTX "ۈ@^pR'IU'uA})R:Kt鿁0o V$rj֨? 8 V|.lCc,NnWo?K$Gz0(K\WY\ E/E~5&Vn< 0kHNWčmM P {߽ܽ,=)y V9(gV _޾/m|2.(02 `Fϝ軱S~ĵi#x).?<' 60sp>ȅB/UPיoyǛ[XxmQƗ@2\دC?O'\ISl)$AswpIm.NT`X5V{P 6$+IbO`z>==X`辖 n߱zpRhQO%q NzwwqPdo\ k8J:w#_TcSz#A5\t7+"lT'rm2qu X8UUg+wc&H.ؚ,,+e $pQ6uَyPDVragM=,bc!ż}JJ5pkP9}JVx1Air"& y6j+ky>mtJ-'YZ9<$#iFLxXtN9^[!.h`gVl6mDuTٳ< 5V#}arfT2"h+cjQ'(εO ޶%P'ܗY6#[ >`*;e8Wj):n=o c;{-Ja[WBTR[% % pЫ# #YHBWqZH4kv<й6N\zvt(ժic_M9}n} Z jjQx>=)jo375qQ?ĬJi/,$lθKya< !FS|lx-v?F0hbgʍx0Am /ٯr O zpFpT@FE;BH;v OV0 09*Q\9(_T_nR-P$/RQD~0- "3W'o?PM~0Wva.RK"; Bm_Q;"1`gzЎȫ[ε<:r,@jx3z/`DR*x"D5A]_vɚH̭N {<ì{պلtv.<*L⤺Ѩ0|U1;=)-!K 8m;utNsDJ9.(Xa(8;Jl L(S}] ]t´EMLJ>tnƩylNhӌ/^قH@Ncb%>pA_6/kğ%'ҫK.‘$$5 3k-{w{}%Mqn1s`Ђ!BE{ݞf,B:gO \bo 0@f!@iӔ|٥HX_W g42~NYh5r},qNE_L $4ۿ5x l8IP&1]x'eN_cRq"gVIn 'WJl,jgү25T17Rw nfۼ Xp$dY`ݗHIU҄'3h} G}փO,!7eTVxQBL?4 s;B:d*^1PO(XMpq LTYpvuB^]{Ze(mW{`)`Mjڏ2spשQ@;w3,nb7鈈s-sJDZe-IW4,S=1]Jx@ F0O!waXti9%Jvhfx3weZSԧԠ_׵UM㇣]xM(ȸJ `̳Sʂyʖ7&c9]XZZlxJ{lapʇD͒ 8E5 쭞cJA b|tDQpq;]K|| ]B 3۩Cg]8>ĵUL8)$9@[)0/+{S[MvɣQ6'"ap7b1-:J9#LGR#DGSz&RGP&63-[D{ -EDUrTNNks69-OAOxYw'"J\ET9kla$jMP%YG^Č4-R݇?Uϣy{ư8y;ވ:.Llᢋʶ@U wR\ h}r|$E շ37H4?h7.h;-Z Wy#.JvwI˓mΖI1V:h%Ӷ\6]; vF]\fhhN3!<'5K,C:6'0d? 6#`76W{$.$Ghl_YdQ3rh=H0A7ljk<$ZO+V8WqGFQƈ01u ]**]/m3eSJ( m"7HH*VoS:Oh^[/W΋˝fMҌe%37fh/J)}<xsTFq{iLAu"y.3WG) 7+(DL[^ݷapà w{-P3lwFգcKjP^w}ԊGvzuK`=>.>aq[Ax꼵Fl:1*K59+0klBWZ,!DQo5:Zņ")pKF .@ gRE Oڳ1AyI+2p-*mR$t!)֏6@;E۝Xw+@&k 5#b,ln1/V^juqζqkP:H㪙L3mH2oK(ޘyEd(L㎌ػgEIqޭ;xz*xS!1NaenabEqc1'`ٕ  fiy (jO\6of3ڑE[ᄕ *OA;=F=7F!9-@t%'&"(K݀ꛏ%b(Bx[Y 9?DVܜhp/723=H70QyAÐf B]"#^IɃ- gU$7l]%U|QN =NX1YK[*ȷ\ڋ~v GeiZ'83߆*jSFLpH==`n6cf/$wv6avzӯƜ^Ja؎lWsB경4ŕ~բ|gk>TC8bZE$1ܵCvRRT-+z$AB-{;]vU(QiQ{FNpw4:X|y yY\A N]"fY(Kyƛm5n0!ez{Nǡ ߡ?٤qS4VG>vY%b C_@ΨL; Qʸ|GTxC䝛TMbO BI,N }J-Cy+MY-"-خu ZX-?LpJ8$lJXD'F_vlAq[BT1ۤǩ 죪2N T9u>JH*PAScB:n B^K_Eϼ<Ɍz[{sJl:S}͑!@[$40a9Z夷A^u04:`K%t= \8`KAYa̽irMbcRٿC{QP5X߷٫/"BR̭Ȳ?@,go]_-hG\;--y lxd)\h0ZQfViXa+\c"+KJh6P; љ qZjt6j3=I"!aPuE .jh#Xurڛ 8TF^(-88MEϣaF/PB;fsS&$wY=}siXCҴ^B Upټ"bW8Ɩ pS\1щ8-z`혟wlc h·#$ͯIRE:`yN^Á0L3)u-VFLIEݻP+ؓ;a+d! Ң}s dg!QϳĘ,m5P) _ g:O 4o9Mjh< QQ\%֯`|N1 [mA(]NQ$6~0"xO%eۣJRH+O>C=y?SIy97;OzQxݾj+<ޱ)]D NȧqTwibb}TdfPnlNݰI lxO<;S_!J%s@5U2ՒF(ltKJGP6 Y/Ue1hDn7kRUR9a_4ϸU \-д,,P࿰_OqpcjQh<6Iڡ2!1N vn\71PA =kn3,|j<,łΤ_csxe -^'RpE1'%AT-d㟾qsFUF,F/lȀA5)wYwnؕR?&$mr %R$yU8_?.h55ƱoHLt aٝ x\A2)*{@Ux~GO( RG ¸Vj @M26lV!޲Fl'i aTP^A". u$<)E = :-;J-0us~ Ø#}h~ؚv,A^U=7l9=-cX_uݚyM/a쟚k(/Vl됷Ύ䁖&&KN@jB3NQ.t@Ӵ/AkW >B渳QCsKlolM`u%Ea0a5Yw57'w=#,C|n.2h;M9mHpD#y˨ PBdM#^t09ƃ@l U䄩dµQߗxN9^遹8ᱻA`"tD8 O{yl7=}[_ޛRRa gy{P76T̀}Mݽh:N3`7`< .v̊&b5gt]>9,Άh۴T!3Zkc5_pR'IYsL%kN@"dkZ]]JMH} ww0<8OGѦһD2}`HηfmR :0Mq[!ybE [^QWRc  [8$fzypV=Zxw\.TF6>*VbB҂ . 9 &y^-0EvB\e66G6r>CiNb]lTY=nøppwQdcÝm3kH _f̡snVfFEf| O:&yDV&0I BQe@ ;*xBAb3#3#$l|Bs0 Yxfr–p`>MцS"`kPgVp 7d.YP)sQ.o`R| #7wҠuJ' gzT~W6H*&c3 k)' (ց-p}eE5Bݟ$Q J ˡ[$zmEf!%TϧW7M+\w;o\$}tr/L"?|B>Cqyl -Ԃ575CÍNN4o3Mv[|©ạZG*m0 gԤ`8AGtH^B2gӼd!,va{ Ibqf=uu<TkV ! x 4c*sR^R&sAR^(PDjQ3ٛsn NKtTOXn([V`D@0~GU.i(DFMb w\ .~CU&L;9kO<؁Mk|y2ڐ~? nTki3TX,7n 5n7[ou5MJbRLUhH# JQQpmZ^' [H\ܹP`~/\>xc'/'!oLہ `L6 LqmP3uډ!Bҫ w+0"ۂ9 `TV_I_T|D]KˣaP@ttڡ.+P߭zXuA_3Bo6ëJH|8e}S36klRb9zm̥ a*^X|@xA\ޒ>f,5]V g}^7\I:0F1]ԙ@ @ab@ӗ+E4oT5pt [I Mph0(-((Wo}:c /LY*:`ôښȗ3gI`!$ߴzvCU,DyLֱɧ :A=8unٵ{ E?hJx^1i Ts*.Ne.T$iaWᣪ)rŒM#X2faWe|-Y|u.֪z VI$כ`Jwr:a{F:Cl!G!U+üt8~+;X!$&czYqtiZXO PkJ@T T^餶wYmy?)%{CjhK8uP԰顲f[ՋI%<#n62|h4wtBaC`E :Z{P]m#h<رCj a,9m$G7!cTk}>o'TfE_tv ) 0N&ƩDz1SqEzS o^][6-Ql[c$V7e/Wh^!0 ޿/+e{HP%6_JO[b] c`c%*c!VZcy $ϨjgԎ6[!%/! v-.JFx57옱>͸zDzO+4%*);:Wʅ O*lAA'?1fy/8嬃YbEڣ̿?]9v¥Mc] KcÓ.O ZYYO= +q۬*&v䙛v x nrSIů@Ejv6=p@JTpLuCUoێMCH*dPzjC8iY~唪DS\1f%2Ģ_|pwv7cZ<#c4%">t#xOrR /ϊ b(1L5$cKB|QkuE&'Y..+S:i[-[T,G_@8/k|Zf:P^.ٲ_P,j@.N7ɍU Pwzr -8<bӢ,w#I졏14MA}qm>72-Gr#Vӻmrjk6c;/T_jm^BMx8\8Ts׵SAAP(]pg " &`BѤ=VKqA{_h_I)x@".rUa,A.xB(.0Dqф,af=O\$>wPnq*\?!ROO L1Z, &Q^z2RA)U]ocY_QC wkEEyPt93&h [cen7m V&1H';A VeQ[(Ǣ1DQJ;KKͼ(F}"8YǜvI;BU{J-,`lҥtQl|\v 9ӺS^8S~|-vJP\6r,-.]=YE3b*~y!N' L#w_ m:P-w{*E$B"H) y)7\=YWo%Tj>/ #"s섎8OW.A;pvTH|pFA\}OQM؊}.'DS$vC!/R[|BrCTd9o x@-OAIR=賉/ߟP }` ozg}EȞR $h%Qп#b(0kwB;8YT>'lwf¢(Z6qְdq@&XTq$w-VmTԎls*3.N{!d/_ܺW$#g ND b˯SX[ ;/2w/c4 !\ p-  jo3;KHQ{sbȝiEl6糏:I\sd]X%#Fڗ0_.9'wC<,vKP#fDLwrS:\ P/L/Z DSl`ƺEuWMԿE _i.luP+~tz*&]=X5F$ m)Taݺ[6'HMb1 F"8=$h(p("J;Q?6@pjz,G>D9a|VX~dQcT%vS7&/r˽vֈ.dSwy XcB;cf7lqY]r)q[ wNF_LM`)F崦'ySC<)x;Y0qp];_~%nR[8TqѨ}c`ƒJy4SbGE%\H;&/{E(Bh٤ ?Z %XBrb]=Sk*xNBr^[drpH7]Lؿ҈0~1qHdZ97!S0&b5*y/zbNQj!#?*3 BLk+R<A*!8E4X,x뽰O*=)3sp,F|Igfղ6k]8:^almbXWst6eD1tFJ.E{v%5G|gi7 ߁O(/#!ҫ;6 .gd]}uKBtB xfz+@ bSD1V=[ }C9pGz)Qw2L7?}g8O %gރz 6: oHj44clL&g,`+!WV4ޥXxWOǃ+kp) NZD]O$J6 &vBJ׍Z)Pb?^MZ0{+M=S,!(F$cEo1š:4aNa] O" qH}3 >=XmS;/#R?I% BȵxIvU'&D0 Nϕv2eA$ ,y{; X]ԑLŀL̩ܞDy5"V_&{5JPreb0+Ƣ}x(0CiQ_*5jA> *0qAGT\U纚Ǹ D,Ր" VŖ_C;D*Ǫ6CD}·9zh98e%XG ^qJ{jcwG6 'Y*3.SyN>:UhA^sNܽf> 9KfRXBr5VSpm7hs%8}xb'r  sіxH-C39NUf<184:e9Ns.%ɵ? PP_' ]:!dS8 mPAڠ_4 uQ;{3e2X%QjB~TS.kۂSqsA\ƨΨֺX\@8=%Tł)REL'pRġu٥,N! .`SJ G+_u"/_1u= ,*Ĉ t}IP0);GVDj g|?vM e?Cq*oCRikЧ sy"Ԧs 4!ʹ*W}.ONHp`r *\4RJ3QQŇHv=]bm\0gC/q88y{"f/z_h'MO7S3F1!K$*Yqo֩t@-9 =p;]r]P՜ Vŷ y_8?]L&󞡉i0>AK*Rgk@u}%{QU<} ~bSE~ )g A;]4S\ wr|pj-GY@5 P*rf9MApR:ؚR(nP cn)'0Gy;4^T&r#'QcTm;FT>#6OUhTbcrq<9"ӈv1u=Hpa`%nY\\+*xpr,}wȕ;M7H@J0&cq)`},PH04n9^z،xu<e"+"A/ M\Ș\J!k׾7w%h(q5{JKK}Uىc_BoHdݤfTHț#gۈ=*$>#`s$3BMk]r];\r?ɣ 2ۍF)VeWuT#)3üJ=yC8=53C1fG$OBcW*q[M _yHd}+0\J G1$4b5a5-MWcBs@X3}A5B=EXFeKڑG83I,$P]db<-HvRG)wZsnjڐ؝~'O:dik;LϮ[mzg7F":+X@T@g:1 4{!)~?%Aa9j%쫎QbEE_0Au(Jt3WzAXV]ǃ{*ްJ(ؚi{Q7Y { ICAT_'>QenYaE!pXdux޳2.0chX0(qsT{STJ9UJ큔\'A_.6oCۻځn=PUsjO$BKBܞFґYVwih80 +BtP1~༆ma3M̹n®+BWk}M]H{t.*kwxvfFl‘)w!Ydە-ok"D;̿q1y+R,O#l_xL$2wA8&EλvW0[샑@uJ0m,Al(e} *ͨhϠ!@#0;c"5* ۼV\"0hEj>U$ۦ"BVԩv< s̵&jx[h_v{Rߺx,^ sQz촊Z<5~rm͠-K$ذ&]G\ Vcf:QRkߗ#*F$c+wdn2)|2ZZ(\C9$Sp8ʡm^B%o}3_VQGW #Y@D@MQdt2ϡ -ntwп穊K#ttjXeR&.~9fY U_߁u#l+Qj 9߄E"+0ME#};88etZM' ;m.Ξ^Ǝ^unhS !GAd2Q́3)tF>uvw]G,v3,GR(\ʀo|sx;A}CtbwWrb(T`j9г9i]B BV.XEWhG*RD\$b6#3pE#gjKvn64y*yrd-E{,n"oGxP=:f_=!c7qT9zSc)![e'@!D$yzo )a(ueIchu0%mOP8P0QpItB\eG,bج rf ҰZ3Dff32zgqDP~k9nJ$zxX' {JQT`bvsޗFC"."~+R@IcZsrvu ٰ "rEeE玥h(䩿Vǜ2 $ WfyMˢtL0LaCTT4E^1bh4~7}H -ˮjڞj;ؑGx.rFh aO}'5|S4A4WNl\{t}xR2͑șB!^S[u`q5(Gn(Y9W]%6Mkν)8Q8cD\v%/ɼ3J2堂;zԩSlӐfnӆuk3\;."XprhffV98Lf[ءwy>"}7j̍#(QW~fSg%io̲ ݡ:{_nܭ1( /Fa}tO1vx ^Ԥ[UsREޖ6Lkv|S >$S\̴D_XŠ=$tWo[F2S$v</"&:tJť$vlz[(WCw=Ջ=kry`OWLwTX2Tpc{ yJaI/a"e6rMb!Dk ʁ3k'&I Qh:PS&ڝL$<.8$yڝaoqFIl4ǩ/ "ns~eÏE`ΜHkuL;p ߹d֩5zxl ELЮSw%-r(lg=nNM 17_#aCh9D|甕|*"Pi9E2E4U8b^f9Bi&iu+U9/G` \̢Gkz7]6f Q?l*PL3)Jl5YB*`^ jF?:XOՄGi9ys~UYWkJybY4)ܚ;nnO PrD%!6 aJ'\L$g{$)ΣtXͨ8XdOQ%?P63=̰)`7].JrȚ)EH;ww\ 5W.rKD B=1A3QqضO}uHB#V1J0̵oX,s_4y%j HaI,o . PgIY DNxB88>U6 -},D(S-7#[5J،܎E(SdXlgv7R[o OB q)En ~<b!RHްVxImmQ{ɺ`+45TF>蝠aߗ; "d\r[BCs(JdOf]{u1xR`l҈XG 1017uӚV+hbbA`m0b픃bȨQ 4K~jҋB^TGpvvo+0 *'\Ikc)!NM اx%ǃK/i^=zx/[}PX%8Z f3jb f"ob,RڛB+Gdh}H׀59wlf7-XufR#|ST5AQ_@}Sc kfLc'X(PS$"rSJ!zC;*O_7Gmu#(lSDT2!"M-螡!Op꣘c_=BNΟ9j_b WXq+[T`|ɞz(!N- Y?vdžś(S~ X&c xGGg ;$pewb3 XSnscB!G5^hy,^#RcB1K`6pI=DB꾘B3`1ccOQԿ*6Nq rOc#<::4r`ᔩMxpQ}bqM,-lyփLQrS9_)JlOW]9:5vA:RDa\\F\:67u:)f{$E6Z@] [5)cp!X BDRx'ģLOڃwoSFv_fsSVcQ/B"㫟8wy Y4w/ }3q6ڮklRFmM9О V[׸oJF%<Ũ&7%׾R?-̌{(/bZCjkHV.hM+xa ]3fν\ Ϊ!)K)<$watyop($0߿X07zd.(RƃV3D'?(-{5 sW vV&S sKΜE x_iC^ÕI{/=89v5b|%0^_l^ NOZO:şPH??U/LOOv?So㧮׿+{掏Q?ݟΝ_\.Lcҟ_qSsxS*ˀn౜?9v OW?Uv?u~ ";w-;WӁd90l HF@-)ȯ5Z_ڒU[aweQjy9Jqi{RSCA}m1%X{ t]W3? =(/޾Ñ%FMbqTQ8z[_t=N 1DN3Ij=pNrb 6^k1 ze 8+1]szcz ]v(;'*C 2tx]H!3TPry*b!- #!y%j 6ce0ȥ褑Ƚ'tٟUН94Ħ1%)v9q =#Q~xhH.4zTͼEyqRYKeLPY2*IOe㶿$J~iE S||t!@I>ߪQݛqreI&8yIۏXKѰ1Lz"4Beǡ3 rEE)'r#>⡑Eb{;" JiMe]0jex+W2Hixy2멖C) "A[ 7@!aF i-[Le~p2K$kpnaC7`/C^pb{b|!nSmmhhN#ɼ:K'tsʫO8W@tP,2:CuuC)@ĉ\cyp CyuB]۰âz,$lX|BI!Id[1n+! _I\xDN*-=Hs_s7N17SfP!A)a칆S`́5:(?_E7fgBu,;?)resؓC'%fciTQ ưC~ZLopwho;I p6-@Gg!9_nLŢ>q0$5%k=Y[oU/H IN s̏&2vXJwΝh{XTVK8TʛGV=x dWsM:qb,V eiUbB \G2:57v[G9 lr[.17TƊ툶1Q `v3쁔]>-C`4q仃Lg $I*'IY(4v@+jq4m-VJ?m$7Kl'޻rABIl# U:1 R/|g&7uU'Kq82 hZa}1U_%`LF 1 zhjX LzL{W& u=b4@Z$堏xHOwxxˡKլs0&0R?`%vA7="?0yG s=U~ =#Ttg b}PpRE>EoVa&cT<Cl[ a.S*VL xԒNg2 +F00g# ^͒<3/uuJ |s-HL8Z(N#^{ (:|b$er;/(^ꅜ92Oǀ#{\uUj$ᨀ\/pLv=ZdL'm40F͇P]k,60nkͷx 6.wgr8ce-;aOsA4 uL ]b 0թkm!<H}ȫSq^VFm<.$#L`?I5AzjlǼ09$ٙEk۸(RέK5≆Ⱦ,t`K&n[X^K*9z2x b,, T?$8%&^ >\|5B"؉/C[ӾW8<8C(Ka\b$Zv]zDI!&C}+b9ߩ[P<A @Ss*:5Dۓc, >Vx3Qׁzq&Tg*$V؎$ȃ_ +jR4$V!joS ~]$dNO/ȌX<`Yc7sPG`tTw[8Q4r#Y1AvmQU(S;>ĵ;rY: W2! $支,MkhwJ»f 'ShO yA?۩ŪEm pCӳt44czwQrLٓIXq߮\ዎux2ui~B5Z% #X. 8z4GO(@!Xc9 [mM@DGhbL5 WTywjV77ZPIڦ;fv+C*!)%gDfΖCŵu( tTfsJըvczaP[ܽ[̈́:nZNJ$J!-l WoqKlzG! C;'A<%Vۇ8, ,E,I+%HXB|sʋg];Ai`1g'ZV'&}i`Qr$JUUPĉ0Eb8f Ms &:DdpD͔ q+2tXT@u$' 6<Ed(أvDҼV$! ;TaxjS@w*pH!5Og$R5t$B @`d (7d]Qb >穩Jq nx{@؍)|I/wol7ykX,tE6pEMAcl[*|d zWkzh+U޽{+i.(g/' EJ0_*!Ad3EQwU'٦;c*ĠEuTR%(9"5<ˣD 8p"8wk<"e= 7ғ(׽RsMO_Uz=N<ҪCKҊm1 ^u6tf,ڼ+AZvTMn[([ kYnV@& Ho%*?%qo,hHA #l^m~yfŨ^hf D{tyׯǮڨEu6v5YT'XS!`"uz 0ASp hrуYټhQUbB/Վ=2+2X0Ԗ8`^OLj@Z*HQ'4$&c>}o/'U?TgAj5Rd]XpۥB,hSTNKsIp*{BaŐj;=yqNfybXe0SXQe,Ъ,j5eX Bl@Jww:ڣtC#yS>Zc ] v^.'-O>`AL' ^RP4&V'{{s̤UŎ 3Jzd0 @ƹ9f%fN-#^W9O@l0Abh 3@= 5L؟'y= Fb%&m{W#ũ&~9Qt!n.خNDG 1cjA3αO /V#`uۥ?AN7EHaE r/l<,Ҧ?QA-QJGL c(AB&v [[l'vQ̬8I* G^@3+]^c^ږƲOk׎ }Q4/uJHT5 bAAIo ,v2:kx\N킐BzHA U Y O fbcK#3eXZGK悢BhL9AԴ+z>EU6VRūT Y(IԔBXH]˴Ww3 Nɶ֓D_|=v:qmPw /pdV6O4,_QxښE¢$Nؙ?=mMf#UV '+@kȟD'JInhH[kxTbt_^ &guA6;';V C;9.zI-L~EQxGrs`)(9jSQaB /R9;&uMFA 1_T+ Ci=>3 {B2}c 7ps`0\]"nJ YSٗYH}=3H, イvV .n@ךWcO\ ޒ@cG0RM|a@9l0x(}I}z}:XBX H&z^9*1"(.J?Cpsf4whD췔dc/C\1 jK`XvⒻcSH7fD#= E1uV']%Tzts"sMG11:D3~0ёGO#x݈ɿ CW97{,԰lz6`$Nƭ/"Ɨ!X\ ,qaTbbUjv\ }/ymy1-ύC~)9&NLrjIdS1G("mw6-io;O{^ﯠ8!%͉8UpdcA-!4a,M5ܲ$%M:v"d!G9A:^VH^rjS(E('\%Ϳ'ؠ!-&ϵhF HUo9dgKY9dLqs N%i)[y̶oa Crzd\syeJs˫J7r;썽AMoC= Tx)R^ǰ0%8LZS&aN 0 DC.ÜUطxxa&9P?sQ~RDn+ݽEq%s $nJX} _Î3NͭJ3'P n)a'l/ "N?R1)elzPtfG;pQ hlU;Pud;#HdF[)4QߡVit Qz^m .H`hBrS#WC0!6M>DŽˎjwj Y `uZm9e 񿘮 j\Ћd/IbLy 8Î*V'Ƈ/O<HWsc)c @.yG)0iiԼtј4 AEZ>4Y^U.ΚVmP.뵬nBa2bUC@:5=% s_Ec $f&(ռ ÙE Թ0d\=XU'9/wɀ,KwMf6P1U>BFRcAc1CՊ=$PUXm2njg*}8*FW4T{|w-aOT m }x86a(uÐ"$&G ~$tavܼIDS"$wGR*TB0fh'.`*3>CH> KzsU& 6rr$e'4Q>3%o}Fd6("aND\;wH!bRh9F8HX%$97QJΠyjYw P ϳ'q(h۝m>a ?>VdM&k !H*G*DdX-wLp=B{Gb۾5a9A8L}o<а1,,Ym)`tmM)],Y 5W]>bb";unN[5#ֹp Pܗl;o5Ѓ~G|0Ɗ 4]<FjaO@W|0XHsDIWYӜ._'CO ! .*nxBҩ2I0 qx `&sq1O S of^#D L1_M@CňLT*|r IY v6Ei:S U ZAB[ [K}i)4-_;3.3:gMzT>*0Yjun-h-u:pӓ<=[nRLOTG6'",t]@1Lr "Q# '9^pqsK\UUL%Eh` ^~#I&C]69:3{;婞q*/'l 6FOi+ F׊HEoCdqT'Bf7!yj4{IX52>vKcWoTZLB;Un?^LzA蜭ԧ0#H#)Q6Bp .gS R 'O4C*tZ0IPhL_.<>W*{BgWG,XP6o]'_]3n[udMMhX\`auQ_PȑS/*dY vPT#C<%rp3 )i ce'SN^c~zk8۽X/By-S Ky Ic+ /ܵ@:!C̦C9e VDtZUȂ% HTj"Q_[[Sa'V$3[ Q`c>Tr3G+f0\9K=OGDv.}#\Y!d UߠYE xu"KHq]U"R͒/IѪ\OTulkF6PBb xQ Qҏnq܀Ⓖ_*e`ɿX2欀@4 *xz3&̳"l/;/@?o?k7CQ@x#X CD6;6W; =*ln ̶pMAhD%ά5)[D ^_\5v/XvQYpƊDe*ǕTn5E]7 ĤFLK^ Z؏# ء(WPU[ĵHNpmY9{Ui\;x%td]Pvj Sh`2VtUfG9,xqL!W#*'-=:Ef"-Ċ%W؛-?BzE,sw?xf}yL<  H־y4 z Szu1mu+c}*rkoIh"я*G_ RD^R엣Ti+߳0;}Q,~z 7u8FNuxcɶ=Jb]ft QCR q &QGF|u (\ws2;x2_w< 28wnj] \(RiU+҆QS.LX123'TN0ˆɰ iҀ`UmԻ.G,r$O(vB#'7Oi_sj.Mb$I@Ow\{V^(FC۽ʽǨ@`RML`ҢI{L^c L㏔@IXoIұ0_+N氨 BCk.sR;&eO.[sXeLk?6X[|WNv\5XYfst#@y8ed0ۥG/쵷\o\1aa%{hvdTbz٧y ElS!{:y"؄ITP楦OBdHZ62_G_l ЧN0AA4t5fXjV򼇧z(⦅a DKl5"rA;Fdݧ{Yã N;_ƅܸ*!ʲ5GXsue~"W|TST -:Õe(D9DO!A˜AFLf%g x!^:1Zў59bsAȪ#cR FhC3=ªHMjx)^%ތCM' UeP!͜ $6mQhӥĊ1{KNVւ1W`s0j(WbE $K^3vjtt/,gR ><=eLy07!o7ثb1#4-[\v5%hS}LIGT&BEq F*pu*8~jiPR/wRtr6y٫bM! lijRh5 OŐ$I u#4Tk =|pEIzr\.ر1XR͋\\!NA9(Y< ʡ CEIJJM{Nh'\u`Nr&.9jFZs: sKsZi㈦ߑL.^rMزAZU^pqN.)Ń1|&0;5/8#5O`- UBH@3!r NAN0:W$/i9 /lw!גC)5`vw[q2`vpYĠqI0'DC[AMU9i@N:Shem!(gk8`i2V/;DU 4Z$c[`"=̢H%^73'n%yE.F,HGVPDVƉml-g_c~杣m͒wʉYMe0_F@R}/~:c*Ԡ/3 zoTH{; 9)#4 [[r2bѻ+h|dl.L1%)RO"HǙ PŅHʦ~AP*bN'`&INdou rw/IQ 72E 13}pD~nYcYl{]Kf)"a{Gz]qZ`s"t<&މ!6Jˁ!}Pb\*v˥<ÞM@2L SmmNi1& #~ʄV@ ;L6KPab]mnm |W4 L $ueէ#n *duz~}bD{ orh+&okT+n!jĞRaNwgaTy֙>2p L g5;|壉%Ad'HNTKU| .5h/OQU}ߢ5yd.1R}/%\hl:nVXp>LgA&u2HaI8&N%DG/#Sam $j#j0=w z|yXM/W咉 9aoHp;<4^҈םjv|q?rcG,AQZG!mi V<kkˑC=CuB"[E%D߯T8z-`\V 3$5/yr;6_V;>{ df: 2SJ%/Vi912(r,֯Ά'mٕhrYP$s g QiT ??TGovIY I"ԏq4$*Px+9/6yIU_'hTAhMG8=kʱm-YL\|3N|ȪAkZ6kb~`;,7ʹT9$ϭCNX:nCa!]%$`;*$|:??ڻ+r b[CjnV-օ9^= 0K:˚㾏Hs;6&8; NimR)Q";Zu3 )x7c0jҐ| $mϙiPloIs89*T1q\ 3¢qDez3HTPXw_E|U l}`QCn#*(P䙊*UaYā"T4k ]ե\h {^fX R 4BkǏŅ#-HNJZ>e"qdM杩 717w\oWSfN_w9dqrBBy*3<%XGN~ucoa` {Q@&"/|a/m_KK˵7,_?J3Ifϟ5zؗ[yZ^`QjyUAjx9Fq/Ep\u.}0`l>Vћ{vo=ը,gãa޾a~3Uˊ2Y*=ρuѱ(/L# U|&8ԁ7 Z> O~ H'pLzП}s+{ſ3\})bYc{Е& > ,]z큉^k",fMZ:ڤÂðᯢ>],fBZ )v ,>Ri;^C.5 r:/.DxaciCA/ X&h_Js[]˚'9!mS"y aG¨ 4R,'rr(YweR~Qn|+,IRa/ U&"~bXOuEAgH: ʥA`J 4J˞#r%` 9i_E^ z1gCPXasR;-Y2ya"cT ;->X8/B~3S^*/W_w䵰G!-2le").* _F#},`3-\cHج >.œ+v/H]i?*M xI bQh%k:=X8Ҿ"79>yd2y7 Mx`HHςai8VaNKW.N!ZޯF"hxt}(pYh 4,i.}(2d=LsTR!]{Ê3-'ރ:0jr$K" xBweOЇ>mY&q,W1 {{eCfS_G(ɗ}=9.=MЙjITmKCNqy^-|B(WoxqO^-UO,HP4.Y$2Qe3`| + $#%c acěj?0Z8dZOdߐI,mW%3ˆϹgŌ' ,9 wiRòRK}<;\{o/wˉ3*o\d3.&f.ђS#WrH\ KQZDMBAz|JeWcyΒGz[yLXj{c< `vmxg{5^|υZbӱ8ݔ9gݘeQJyq-'.?gBzW dNh/Zd)R*\]l $6g[ e>WCa-j(|bӰwJC8Gj\I+ <௦ xEj^KM{957tJR.r:7E[c]MW -xqo})+qw&@ujnsSG>y$STuK,6VSW,F38T.Gn""ޖdqoo@ժҒWK?^n|kl,G\?XZ8 v9PO43#yΒ.n/DKM%s@Q_yx;K~ZM%5RQߪEg9";pQ,{/=fh@:$4`T^z*Gx pE_ĉ6B%d$@W}U6[3-:pt aF\7qbZ (0Y;*X$^IE%ΏX E10CI\,FP`4OL) O3S:.Z761WQC sy!zgM:4Zt0H0Bڱk>H Cqtn>mlд) M:ݷc>~np!G0Y:7ԫoE]XdjBJ>3/͔YX cy:i2fV o!isu74nI.}2g9"HIXnt2! XB 1",+洮7t>Bk<*mZcGhp~[tU ),2QffT"Y\0*\QaaQj(&( xm7RdZVS<[i7ZX([dYTJfpe _v[zV4="ZE^a,d ׶T-/"ۍka1{SBX(-jp:-:5AMS/޼X-K%6h$lU{#hV>: f+,1 RNU,gG*$0JAxe2G.uRu09ij jgq@oPqa+kɄ:FaN*u3zN:Dml5հlVaс!%e"o8i}[2pjR__)O,Icd?}Lc1mK:K/‹ M#0{nyJ]@=)>إnIB1 `gUnS3 N~n{:zyo2uv,Y׳m)PN^-Ux,UmK3K1S v] Z- $,,rYz.@]*ٴ kӁ P_x\1!jK] ge¡!ta ?Ң(ҢH|SeZϝ%CJbG53g>C6gi)4 YVuofa#ّeꐩ"~miEaV?? 6nn)CC;,j`-_f63%^jXtC L~n1Y2/e-u:vA4ӕ)3 R Qz$=v]rC.%s{ :4-Kx~. 4RiZUNQ];SӔgBwp?MYdjI,M}ZB'sT KSW9{X8i嫞q\;wcVXݤ jJ5$䬆dEFegXdźu|SےEA"o:워^MXM] {_ ze)U 2fA{vao-If[CX1V`jCd X&kEiXɼ8$/W@k 7lնTPa;,E~t}=ꖍ Vlz+s]Iؘ'ܖ/X 1RQe#}e n.Ӳ2ԃo%na+`xW$4_6L@xso/TRm_KnKFtUg1'J5r<)nC=h:o["m5,Fzo.j~*OpnY0w$s>%bX>6]*xІZ]f[Xi} $xy[Mt_[,c(뀷SIuºu'=LE-A,apj OyYѬ1_Cd#/f[3<Խ 2C =`$tU<3Kv ϯH 5$"; XP۱W*Ț0ėMbt,jdsaU/a҂٨F@4p&Z4 mf*A(E\&|9!l9DDV`lstr=ܲ@Hՙ@Yچj]]rxN,]?]b=$Qf43f:^'Lgc,Q1D\f iA0ܡՂ}t ]3n`ޛY]&v"|d| zuwti@7lW༎Er]r%F H ЃFfF*Vc0"W`tyt_d$"jb XD`B8-@PMB乓a/tFjn aba 䎽, 5XR 3b], G/Ԃ6<cG G3|[vE" 2.D.VCՂ5bdA\tԂU@re=X3|7yID," !*\&f^g!Xi2yKf~fA8IEa7ɼ#d^x0/NҼI1/5M,Τa^SEzByVt4yQM1/y@y nֿ :q# RMkJ0/(]dcb^PސhRu5yü psy.b^Z$b^8 "1o sqUNy.(I1/p$'ż`_c^0)e"IIUݶv,M[f-Uvm-VF4sq2_h2%L,)|P˫)|[,'4]f-ӈB3s2J3ߖa\o2zMF*|[,E4_(|[ }4m"*|W q!ݡ0oCG1Li;ޱDi2)Y*$1oC;H d[ 1ݫ@QJyAHQܖY ͷkJ4.h-3vJ)?X| h⤩%K#\k3I3 #]i+4jVWhO h^T wh-Q#?Ds0x zLeh?ky2^>bLmF'|/uoCLmQ۲O"e&bLmJbneKJJBd.KIl l0Iwܻ3Zy̷p3T\Cg.:R5RW$x%̷-KP y]&-M*&\Je \kv5..J6gY?|:x%[SZ 2rZ[3GK"ds)p W,n$og(DK/dmIRSԫGt^QRj2cm9WEXל\s)wY6۰i#ޛI5ȅj-ZVuuLs+2zW9kOI90MrEK)䚳:J  gj[ TsaX~`5-Ts['\j&\T}뤚Kj-SKyI209yŚҽ338dF^hڅqAh]jnaWLsDʌV9h4Ƌu|6c#DxQQRME<)R5=OыwͶ;(ȨؙI5NiTTj_7EY71ʨN) TC29Vڍ-odsXlfQ֖͑Sa1wU~Xi\sIQAvw ꆄ79,knW%W9Bz5eA8\U49# E6mK7RBl~ĄͯdsdZu)\zm1lsX %ma{aâ6519.lsd kZ氌9r?:U~V(:AWաttWlKBoګͯ29)\,âX duD=mCaâoGq`a_4:j)nT6&˂b%S%=]qm.SY3VmN4e_\ E:4?#tstUm1uD[n~ōn~ߠ_-P?-+^\qq?nPco8Fǹ ?,mqrC9?C9?TV߇%nK՟Ηʍsn9?x#CW%{HC:?!X4J59&M1KOϙfsdo|7˼Ꜯ,b3|ZαZaٍ[hٍٍhi᝿Xl|nt&Ώѡ#fl+v6ڹsn*7aQҹ ߤc^J2d!C1<#C14qadPA<#ϛӭ|αwppH++jg:|ΏsrHGB8338|^F~Ftby<40!C3X֭% ޮ8=K8 坏iSxcxpI;=Ps|{e+ֹӌr>LaPGϢbr>(۞`s'{cp~"C8Χ|^ p~"C8?+Oֽ8;[ p~C8Wx#b#(1U\_'9ɯ#fxH'h82gtCVJ/W6H砏Ѥ12,!tt4 5S(|CW9,Me)H:T%9O`sX쥒ta1trPtWʹXұp]-B9-,e\Ǯ@⤜?7Ē->֎E>cUg]k-sSD9M6GISC1MI: ݥy(v7H:NryWghL<(d)\,v;A(\9}xWwfN"e>)t[Y聪% q.&x:(f+\\cX'ST;NA b1]Ͳh]s}TJZqpXbstU@LN b1vbS[@CC//*\&m0)pETfam囋ލ_O9C%t [=5λIW?74a`؟f-$N€|s)ѻihX˘Z0&:$=fWYt`Vֹ[cgclؚ 4ٽơk=*|.Y~,S~e&+ieN":O[HJDj-Ȉ&4ϒCT L+OSƱ,3_YPT̟ҩܖLT aK)]6Bf1 imd% DKdyx=> P$?>~xBq+~.XdN?~(UK #qhJ?0sy]sIca46YJ?\8)6"ԌLNe'mR7YD}Hf}!}#VoIB`G؉I@H0&~v奓1FR'T:"?Kd?ń H@_w u8NwcsDэ,qj+1tT5+F$,UČƞD#$sҦ=Vu fi}h;c B]P?k@`n ׶IJVD+_I3#7dZ1LX;jR&cI+Ref9 sH!.MKZ2zheF+=v4a<0-I, kŋ,k M_1=`%abɶA TgPR,3&yԍŰm)/9V=!jmF_nc;RmitW>2Ba(`XP{"35e (^L)3"am3ag|cm3f Wz;SKWü4"yafh5JOYаYX_GRڪ6W'mRq)=DIXyt)K& b v(@I+IZ{L->Jj#2Q-}G%k7W ߾Io~ޖ=rqG DFxysu,ϣTqYG6}Ա£T/qYR QfyEaAjxcZ;AX2cy/ru$UQ~GqYv 5<1cy:gz<>96iBwb:Oό j,dQ'} $+_>qnw~|k{/Gq|%m_%AfOc# % qpV!O 8p¿x>?@ܰ7߮g/lNt/sG#=)$ xORqi{f{C)|*6eK?S  'z۷r5?l箩?'*>^:Ps^ '/_^k/_񋬷 |5 2;H?~ \d~nK_?WM_+/7/|¯^_}_}G~?E_|/֤9ӗyͿ&f_~?ᅮڞU7roQe_y}?{~>ٿ{̯}C/95ُO_Og<u=oEF~#7b?}}rf_E+“xpn?= GQ_~CY g^w1k No~Od.喟΀0̢پwk*_?w?_Y=]s|%& ~{թ?߰5@._~ԋy'; ֤}W?~Yk̶Po c )CcڞΏGkeUz+wSY{O{7=~dO ʮݷ~L]du~C~,`~ 2cOr\fދ^s'/'koq}`&Ǎ{`,db` 59'K]=J?||endstream endobj 253 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3512 >> stream xWy\WET:%t\TYdGPdQ:.1ꋊ(5 " HClQG͜omH^~UU|+2F4m蛜?E47 ]|7D#hdplSD7L6aK&Ob癙4E"_? 䜴 $rL JSXOREM_43;{aNZu ֯H ,H J^:"]4u)ʒ fS!5JM&Q_T5l%Rʃ(OʞhʛrR>TʗFQΔ?BP3@*IͧFS1ESc)s$5@MPs jYyk$HSR&h DZ3L sѰl}MVqo%[ ?;Pn8pqk#Fۈ7m3v6d\gLb D#e@Њ3,ާU2؈?)7ld2]X0nՊ%(%Ĵͽ vuI5RAWW9iÿ_4^M t"_7I&urUM@0X\ 1:Cq ܃h|%;!6bH8(`gK셽V0lz?|ɋ܅:w< uwtk)Hi&ڕ*4oj$Z[$ ؎]`?+بº^YDX(}}")춨-i2in"':X-8-?}H1ٶAc&m3.8^iy\k: rK [?v xDia0Fէ CݫA41p`@  yVB@a>OmqgzUKUtF[@T 9t;?)QJ R%KL̤V\ޮnIvnV"B\:ڍL^5ix n'rP-o G&B{1dnI0J)o_cyJe<͓m:ա37xخw׬]-Xa^}tӣwLLn m;%0gak/lŌrY3 D>eKq N`7KN $ܭ*pn49\ӗ%JiC'j&]ƠYV)½].(C, /ԫM>YHC-_Z`Iٯh4\g +9])3tk 8 ^bFvD/:;9pd\ϾCPS}vJja7_E>C'`=hmR,'k%|KE(Z0%Vl!՛֤NJ>]c𯍵˾8{G^ܿ|' fn+MK_2O>S{byAyû'W^FWyA t =TA^SM!c@\¼bb 2 Ҿ;NM2A'\b>M>#{. I)&RA3ԐnH"*-WBw H(Ju jx'|}kuRNy[:a` gƙDqq+;4__QLNjG-Ac V~ӵ#LQ#`!QdB`}Zx@uF|ނ9 3D&^Lf266ܛD;n+~idC\a\l*ơΎALrC|9mZ Y>zk]]#Ɂ3ad\ mfqȆ1[gu.]T)ɾ#(V_o8HZb嵲6 vm[bW/Wja$O8'PP,XtV೚/u Kt̽5}5"[=^Slz oe]'V}/cGk|=re{F7Gts'$@ OlP"+/WC@pw₸Edp7ɽ*ZA x$lgETy7cljٗ'(W]9W-v`X_R( `7n[w<*q9dCFBzh5]tVi0Ӳ%'k5&گAfw_Oq$tN:[~њ++0s1)/1}mF^U28?=$AUCVl*А~>@6E+.ʝ"y `*X([YrW ^ݰK)bB4Dx5Y%pXw@M ұd<._E/P}G g?{m7wo={9u |+N}N oO(;v'kE~[誨cgf҇R"y>3cX/lVN1տ~%''8 ǩLW53N˥ݍ'մ|b l5٢뻏?Zrz]o4?=@eA`x S Y4n?p6 Ьvp졑+#K%膱1sJu"rbHʎ<5#~N A[pxE%nR2n䑄 .ŷBkƔ&z''ywM_[t)a:%`1&~,^&2Y^>uoމkydd ݵS|`ҫ7$42\n&oJX K҆<=T>vӊ>>($f3ZxRu򌸐s1XjKL~ΊG/alGb+2扰ތY`~tUsͪV=g:%$izp}׋{zitJ]7*bMrCv`wsC͈]FFF݂endstream endobj 254 0 obj << /Filter /FlateDecode /Length 161 >> stream x]O10 Ԫb C 8QC_@g|wYu`A>E0uŭ F,m1,O)x@I>KWu ӴxO$ڪZc:A=0y.9aJM5☛&e=O) nSendstream endobj 255 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 715 >> stream xkHSaߣ<ڼt`^Kc$it 8Ak.9.˚4kް4E.bj/}"#/A}{.x!UZ^Sn*> stream x%}Luhε)jR_ܝd2 QC6lNԦ= J9<{}[J_`k!u2ti712311]wCz'yj}=[5^zuNށUz;qv db]Cn;GXkƦ Mn'vX-=`!q]gP 肌pekE\7&NǓ"q i1fQ}}ݍ@Jx2]uƯԻIL%ޗ4Y*'&%@D=~lbf+1j.\ ̤0B.;MPߠ&#w> sT tOoק2b($ofH^*$>J%W32Z귟]Ӡ3(<\kP#O!›ckx;{d&AM~̉0D#^\_=L?H 1"/EFoĠ'^6~"~g!)}t!W,8vG(ĤpL)&*l%ӃLY0 +tz9JՒ\ܢ9-1?`endstream endobj 257 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 989 >> stream x-[hf$͔u{k,AYBHR50:(llR%/{"bl9vr4k=l{Þ6ZtޤDfLN}p|`0tONM}4־oԳ@'hù+7 `>kin/i0|I'ui8:fl.rF){Aae;M{.a7νa :h;yQuɛ6'E}st;=~S{ׅaXם7'0lF^ 2 vױyN͚/cUylPT3Jߓ',5 |;.,sX6 2D"R3A}q7̣Z4BEDTY! mMOŸ~Y4v Ev 8On m`$xADAVk6ϕO)-QH Q{6Qk2+kQraA*P)o+[dABؒn a[vxX1;c"+߈~5^^ɔė0:xo vkFPxEB eqкf֊8Ԣ tjdXh$s"kY^Bx-`Osz>l W*Q=NWP׭ lVo헢luܤzS>_A\dH&dҌNDkUP`CTѭݭsW'ƞhİ4endstream endobj 258 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 339 >> stream xcd`ab`dddu 21~H3a!#,9|=<<<,k*={3#cnas~AeQfzFFcnjQfrbobIFjnb ZRaQRR`_^^[_nPYZZTWrp-(-I-ROI-c```4d`b`bdd lKo} .ɗ1JHg_}r?سR|нP®> stream x]-7ry~\ sPtGanj#$1/nv.R"- 3j`[ :IPD"?# vۗ_K?};w]A,[u~_ou[eǗ?ͷ<~O7C%ß~pw?||__Ӿ} x߾}?}x?/?C`W.32[~my"k^|{T?oc996ǟٔkv~,b?Dz>߾YlWO.>-=5Ttby9۶x؊-voGѰE?li{m_P׍(_;z.'GQy6KdqʟыF5euZ~Wϵsȳ67e[Tvs8Le>fnjߟ1qPWuZũRFH>i//1Ka?Zޙ2,Z0?漕? Svϰ?(tyy]ₗ6bǷOp-LRl]Dz=laXl^wd/{(J"&/e)|w_mS/1)h܏鸜|=1aYPL4d_.>;_"(mm}8X X6x}I0/6栱l^D]7/u׽,{QKe,sQ+X]5U7_^Z^k^J+3߀cz+Wa왊1?<\lZ|*Fr߄c"WE ka:m7q//ER쁗z6o:_5y4]OMuֵ[9dt/^(]}ᾷs/<VF\fT>Uަҗ2IubXnL-\aeg7,Gl`@c3AS*蒦fzr!.е'辞롨O+h{?@Ŭ7 :~ _A7PQi>A_id*f˧?>çL2r*7+_=b6Kqw*&ڔPHEs9Hl-GZO;aϩ_?K.vАoǿyz[f'OLjh,W˟˓֋0 ͫ4 (Tb2WOzy[|dwϽ1E_/\mb~r̫/jŖf}&v@|˗W? S_q6onWz1uޡx_ٯ~Qv]`痽ӱ/m{< ȿv}y=k?\lS-G)hsѩsC)ھ^9 ݖv;y9OkX 3o񽹺RK{]nǏx3ǞOeSM,_[ɬlF]Wlޱ"4GKYyշ7ͭaSikbv+XfI0o!{7lno=jc;ЇEIМ_?bEiJK:9nk#ek}z{{9Yas웹tOa͋ cyk#GDP?Կ^-lӉ_DzS6[-Nkܥ̾t[?[q3h|Xg_= ?BFͯK1jǡ;V#.F+e8Olz'QL=[$wܳ/_Fn5,*?:UN'Xn7ş%o߮*'ћkf_!7=攝vdoſ禕-mXpEZ6}j/kށLL/Vg\sve̡Z8o.\=aـ~<+bf5Ѿ/nl?ս &k]E=uNqZ^x֨\&fo3Y M>?-ܾ\K( \H흸vY> /Cz_GSѾ-sm-l0.q-j^܇q$]hT]Wr9o+8nQ37g*z+vι5eϚo?ܥ&m.[fP]nZ؃$&=f5g-S^PLoTf3,0{? 6,NNBr{0yl[3؍?}lkgo+4VgQ6ۑ;};g~hGlVse6=2 r6xQpDm˜-٬y/.ұ|L=.b旧3].^qڗe/Q@_{Ə٣~F#bFa =WKg=o}~R-X쟅>xwOi:|~dX{Q6?Ez7W87˰ >- "$qr nδ~Wk XĜUOоs}-EpƧ5 sd5-h9bsJjX,d_/N||zcy,~$C8,_lrl%O,Hۖ*ϯW LvʷŽ{-- fCmь}=?b>3rnOqjY7W;S/ћX3#m:}ksUn׷LM3Cas nӒI쥙'ftM IE<`:ZMXP.u}>΢{vrGЯ;jos6'Ȁѐ\v0˵ToOx+#} =['<4 qO~$n Of} m F5C5Gxkw +h,[VcW\Yv̮o!*bC!j5{n7gx6P\,7t&\^"؛˶ћHi X`V=+rkXNYp"iG@]%Y76~}s'7X~?:Et5/챗SkI"ѰHzr&{1Пԫ4r!"u'nllK &OՉ_{-_"{F ~0=$J?ٛF,TsW̮ 7On$K;H8H:5<Ч`A '@O8:="Zq-qKjyjvdEemKo+]YȲa \G\C{#~ WV r-|x{r<7?2k"ţSOKpy^Aa)dډ{e-pp~ɽC;AEkJY` nYa FBn8V "#GuTTvͮd89SmW BӑSsC|Δ qe}V+M[|(GK?jFȊ>A(ɞu=.o_N4^{|rGB+D'YO3s#@"xz\-2 i UlhWoyp=|z*%8ٌ27 ӓP?z7%Kؠ!LYfw֌b+l>ܒMwv>+t1=xv<~^afC%{j'8mJ̀7*~tӞܚ{tkXb|sg P9V~T$ٱH8TW$ﵖ7YAIv\qVM[q.Cg]R}̙bp6U6:~@wZ-U\"x Qەqjٳ fh1ޓӾMx3UnPW4w1.t (Yԙ@3xpY2ÊSk͞k\*)0]>O*:j>yz] w83[ʐ'AcL_%֬o=a[}A2, 4w|q"=G18}7[†9ҥ#)`VX VʽwlAAr\PjtL}yB/'( gLF`gHb9/Rƨ uWn>u37H< 28Q"9=!"[G7/&/ňl~)^(6E̬lk:?+мvwޫj>'ݹufmA߹8#v7wՌ[a~$Rb=QiY)d j)YZ١v-TY{W CX k+z8u{f1:9*?14  a BHߜcĜPGf\w3 5!Z˾IIMWB_]!~O⦞dqSi~r¸#ܳK^V[3 5\FXB)O7Į7YR,Ay fсGQWN7ywd'VKQH[#mZ(W9gN8jy+H#ѣpMA24g0avXh0GVb}*Ogz$wV?F>q;#VsC/1^^QѦ6@ե1zDP#[ܴr tlrx3^J4S 騘cva4EX'aĴ+y|ڦzƃ)GnXdȇ!zwY9wH'LO]i>SKO0NȦyVIva嶄#s @ )=$ {Oi/}g>$gΒV#52sةY=Y湰 ->eIgWi\c15b6#XzN C8sƦ97Fiz d-kyBe=}tŇۣxX^:K#{* b#/wtKɜ`j8ܿ]79ݻ:'G7(=UAflAj ,H[Sʤ{b8߸}@6;$ɝfpSQ1EmHB3xM#­[sﻚ3j{M~=:@W:&l/]9z=уq1~S\,lB$>>ܿTYDjA]yc ,X=\ =Vy__&J=59>XnTTu/nkgs\ㆣˆڮҚby@"9K_ϔv} {,RGK4$\0EyBVw dӠU~A2<)ran[,%H<M|DZ]}h`jY]Hm澣kD;UB`xD{2_X#Liu:h^ #Y惔ec0U}a >Ùޗ=-K@sNŻr]GPBn!9qwf?MZP^. ƇeV_l'F)UJPo9׋"i|MQsكɨS^x9^Bgu=-At+? 8QӰX琷yܝ$ޒ]zLq@ƹ:տ6x6r9}|+3,Vw[U#ϖKc_)vjݧjǗe~`:ʮ4_Q/O8ةx1_^P\_gD}^R21ZEh%Rly Hj@KicA]kC`+q1rc}:>e_T*>dN;OC(k>j-԰yxj5pvˏe*r,5"k16i׏Л2j~@|ipj E:_Q4ReN`Z9hZ{GMQi0*ƀ9RR9-TXEN 󘔍z%e[ι1`byFȘl:ڹp@qm>V@ЇbWžZ6`Z}k>'K3%ay״Y[EB͖emY,mU87P\^Uis˧̚(@dkna_26V̨rOfO*m8u.”mV5_E(#ò7Em9yr3U4QFa2vc[#X*0{ T F%f`k?}:/n󸉲+NSP%0 1 Oٺδ|>#泝:?wM^8\uB}Kl@Y71ʼnwZ$kpO꽃ۨdfn=1] dpÞZ&2Bۨg7MDmsMek#Pk^Nro/mcj6Pʟu PGFOYMM5;ز~>{ BnqBܴKRQ,AAh@K[)KPe}|Է}5*רjEF7d6ynJ!y 5 X(CՕnWDMUdekQ~mC%Ew Fݞ7#O֭Z):1?DYIżN (_2Qƻ= H jt}b,Jaq7/ԠrpޯPAqS h=0.9|stlgښHE 5QT͂mQ׬Q@f%'9\Et/lwrEYd| rGmF%sz/J$ʏ~3.mQZĄrT[U~=d_ǎ]WV4EH@-G2mO謁FX;SQ+!ά s H眢9sLKsniYZ="ݮ%Kxk- >c *?lbP^75'ܓ(0%y]:Pv?18]Q56-mYP/%*w|s<ˉA1#q\U*qTZX:-T6iN .ri<}4Iqnʚ OADڌ7=0|~"8ߘUs&3#:M2jZNCP9RѬ 'd_~E+ȕ`"?"CB_BfY_m^m9SҹҤ1u6oEnkrllA߮Yx)XS/[K*k'HZT66Akhrӑ赜Sc|f[[QrcRuƭWB?noR~{Q E#Țg-v=u!I-}:Š,EQE"K@XkVUE#HARwP.(? mAgYF`0O)ˇ.PT (p5z62[(Yĭsx^ |ۿ­Ϣ)DѼ<74F6ay¡E=$B ɓC,fZZ)GꎥMY5ݬyIuKxԩ-кĵy1E :?mYh>b>9EQQ0dI\Fa:\E?y5;|C\D5:DJu.ԾEQQQyq#ht^QPp^ Pb9@8EʴIUmx|}{9_( y@a譊舞{; =Æ~ju%.#8j{Ω$hZeRbh4:>Eg5ձ{[Vή=9\]#ۖEQ(ʸY4J.=(෹'?$ǶJ>6y0++J9j?BIbq0=v3a߹ f6W @ bc-\S4F~^UTqJ`ڎ^BT7ڒǝ Mb ) <ш挄Фv{HXo7EQ(F$KL"?ȂpE FG.joFESAT#(*Yp9YPXٺ!-Qǹ_.=Nek։((:yAѩGLҕZ߯ӉQ(OQq^)kyUhSI "%!(rYÈ$H* Afl)n1U4_gȸPAvR1SXc$"LWf?8O1QBJ8%@P朂QZ'm$xfWFK_Z]ϢrՌ0pȹ'?g (JQ#Y)B{mªP^1F7βDq@QhWQtF( Eg^Έ*! %@ 2 T,V H7Kp[ (,/PYdU( {AQ`/s%A[ œ7QЬ7NxiK[Ma:8@("5! IMjш'9YgcVؙle,̄yN^Yv}1nXxlsG(pE;BP#E475R`{sDU%<$! QA$($+|t@Mh?QжPRFE! 6Rr^VFEP" #PZ @#.r;HQn Bw) rF jqk V܀D@j~#@dx_Lk {bQK7AهSΓ9u>"DϏPVwȗ(에O/t7v5Qw(;RT6 Pwhg&MKI_j[szB_s%<]R^ɄTq@{5YfWRV<[@X(ذQ[%Q - ګCPZQVYVi@ e#j(RqU(Z,a-BQ,QPbmB2^|;USX(Bv$19`75ITsWTCl&j ؖ/&D)==o1 B5XSA* ʱ*,泅r&ʱuZR]:(N  8O(8,-hԏ "vgR(!* <,E/0G?Q@P͚} +krT. ,m**EPAGr#*\wQ;Ih_"mX4$yr}zaN;sr=YK]S(J**TR 'EYAhE5݋FFQlGP }"y O&/hY tiE,_';]Ӵ& &Zl96vDBqOmH^y!*ޛ,?ۢ @DgXk9^4}@)9:hKs0XhƠ6yޜQn vܟQW@QlEJ Jn99(*`NpZˆpZ{:B G%7<,SMӵ9If6 70Q;SUgHP,A iBعv-?bo63oO$2laNi+*:)?:6#޳Z]χ^P\FEP~8AVȸUOڿ[OV*-Z(l((hƇ*(Me"-7 .(.-YiQ܉{#nݨAһPлPKuow=o !( MGʚv7,@A`$""Z3bH3խ uAs*)e -D(Xyv=4ǔ?R0Q#Yc(CFҋ ceoW̭%~!oI"f23GeAE`uJdQVM.h@R$*J%Jvbd8KPDiq Բ8(@9UT,@fk A:ÃE@AAQBJi eud,un(9rrmqCM3(҅9陫 Ʌj:[+@A7(iފRV]\u~0weI?Yлd)~hd=*.ۂAmNza {vv'B55 2?ľ~{\ ~z hk{$ )w EluSIG^RBQW>EQ Ȃ- aDKSlo5;ZHM! HAH_! jP$)yHOJC 4 MK:IĮFqĮFQj;E9r{D UU-(*+"^y}DOJm?8v!C$Ԡ%Ғd.O=`1eDUalI( 7G=KH )J :q#NO="4M"REA/NK8Z4ޚe i:-*#6&E%׼hDh(AC YXVgDI0aNpsK4Jj  [>cPh@ #5]k %P+:c+xAq,`̆+{Y~QU'KB $⋩1t')t) ⹯]raȭ !^SCnPX=)H9+W_B7*lz)͌AAeAB0KB5)MzpBVQPR UL|yhhDCT5 ~cN%JՅfeb6ߣͥ(l>mYvshVԿ[Pe6؞-1eiFWf+Hm-]%Wcp*2Arn;r|88sU][ l\-o}J'ւ[[<w4X ' !?S!|&yhIݢ.Qj !ڣ6#Z 2PA ffӝ¹L KInyf4P~0yb~7yX=eN&aSE./mep <4pLT[S Q#{L )>*R|t^#jNE/+9E}`DU+h7@5ã8!FەE, %Bw]E Ky'Z-x,ԍ"o*\PvkoU]&(STo="7P _MP\;Z5EBT QcVhJEQ=ȂPȂ8EA :d Ku5_r'zE&AQʆj#IQ\ *°EAVQ:1 Q9dFNIfKnKNJVJQ JrFpC [( (`Tu{U_* -+j p|E ⾈:"urO- )H3r0X0-(EkiMU7:-/>Y/7A" Rs LGЛm>ה>}8IK&Fҽ*^A3E!_v&2JB^h*J UU5QPJPZJ (%"᭐㞠ʺe)5e]8JQٿg=8PAu:#T?ߐI`z:RAxH j0GsdVPAQ[&-EL0/R(0*juHMNA(E6Ħ"λt&joe FrM=~6b틪}#fe hq(jq ۞%4'*Lv$إ*ݖ,q҈dɈh) ؃aDuT?,M 뇢 PpP7)H.u~g\D+PUiq+EQ܊d&sutMPitD, 2[AAQPB{DQ=" :S`ZPrWBQTU(: @hyEo-K]Uj:CP [AkakAQkQkP+ 6hӀ,2 ; Bww'ŎIEcB55uDtL pL xsgn+ZrAd (e0Ms7B@QD(Pj8- JuͰT,{o : k)s]WgK 6l+tJ(F>eDRT#}Y[KiDamG@Q`!`<ERLJ7E4nEpPQt8 %`$A@ٜ 4R8DmHd8mER,NEEH/y@]`5<ڜVaJeggM:K)O ō+Rl^وQU-CPHޡ#y-T(;`y48XE, 8X5xE$;yW\iԅ̥#= YQT(8K!LK*+.ADD.I5#Kg$ #fmJ*nϤ *`{pɱNQ|( Z!ˀXEZGP-uTu]ր5+7&Jdg-> `p?UQ ()V 5nIږDA6[QPY9?Z؊zlEA=[QD( /ܫnEvԡ"Ckн,-ICeN@ ) ynߘE7&]FU}nG-Yзdjxvdnڊ"EE(h(jԐM[Ap;75k !9$rao P#Bj5aF  D[[ZIe+ (LpFի(w@9Q/B[L'AܟB5ղC])a4iWN-|c>/u|AA/5"E@aݽr =βaB^ Z+J-;;P(- SC{PT$LAKzVa@0;` 8YHUY 36 Ufm^+~xE@} UHRP & HrIv_^J@TXh^/vFY6֧Ce^۳4 FLS5|4:) Dm1k59AWLM]ӑ}'YttZ[ன- ^H4o>"{N}AY^9Q?Sڙ#qH#ԭ#d uO5nʂh@AN{{qOw<̊";JPY`"Pt%Ek* @(2hD|4~$ r%q5G?`v&+ ;,@)BQȢήky~͓yD@AR" 4uQA TTuPd= k棂~TU[e(0E䊢c@A1u0 2evw6 MȰGXuP@5ȨujJ(-0L.ˎz5n}*ݹsc@a r2omEΊD@.Ew Qa^UQYU&P1Â(eEQpQ݋lAvYˀTy\EfT lN-U-S Te?UQO5d)|͹9+zlMb{2m I5iBoLQȏ-(VB,aR}5h) *E4wwn5 u :.j6Ɩ[(jqyɷV[ r$y Qucfa(Z+:p(zk-}>٣܎.JY4>?ũ"c6'Yۛ|76/f ˥ziAٽ^UOKO (EIE$&HIp7]NudH'+gZ(b-~E k[KzQh{S67Q\GEK3SADdk@A€v\?*^h)PUb /'$2ҖE}n̖SVAR8e $ DNYAtKGӾK s8RaB(4@QnϚw@Z 3AؙuxP@dAG2EFЏ4.RZ6AQP YIztw[wW!I_mR*[EWQD/(PW FAX@Aˇ^sAEDU5wE~(AQڽ[#Br>R?TG&RTE qNOEH(N >2@G(j) CPqZgkDQTj׺ܵH@XrWEiŠȮV(oOj$fu[ ]N0AK> ly[+rTp չ(mBz.!kMDFtSIBZFA|sg2c|{"(Z *0͂vvf 8紀PAe~3AdiRsdg'T#}/%L@QL@QQq@PqdPx4X5-o=#B`["u7zò篍C(J R-mmYg>0=B/^Y '@ŭ( #BCSDUĨ4w(raVqVDPE}EYeAn4*ezj],*YXt%{TsD*YZtU7c5:,3}8>3DʀXw"ږ ˭1.P_NIhu`DVcV&+"fo>[n-&MvUc&AqM 2t$("Z>4.0>if`?iۨ=`әM"-N[ isxmh Q8t.M]AȰ T#H=hd13reU* bT6X='rUR\]:ѕ[QTV (Z)J U-L g5֊R k(pÍ7VMK0%O'JQ@M %/ʷ>Ji@$~ejQ B:#Q °(_(GTOԤ(ZWÈ!( [sPOx4*E.2E4Ȃ$EQ<ykGZ=tT%B D#”4?٭Ṷy Tjy¡_*J&&S[{2t*O@z8AN4?H η;  PL(] EG:NgD㜎V>c5DeKӹr$YwD[èM8D!J6EJi >"eI뻸z[k!Kt֊t(Glܸz7] BWBb'Ⱥ_?yH#01PJODET +C+?CQޤ(PԽ\9/p$(i=롕! JepZtK`d6[={G&?/bo  5D+y@%rsJҌ;h.(* )Q[Ђ'Ҕ0nηPUV(uߊ<( l(d7cv-mjh)2foFT$1ϑ-`Us0R/ZvGdy7{7IP@S5+ PxvcB7ᆢdݚk4Qn.TA'sϠ-4Y0T (YeAKJQ%(.*7+N^ij^S"Z!(Pbg B<$lB-E[z-_KGi%s>;@!Z:KBnL*FwM3'l_֙[((Ti>Ny3&Lܗ +02pmTfT\5C+, GR# `Z@V%Ueȩ(p*vo(A3 vH]mTZ| FRH ( < ORn?,(}T2?-{Ճ֐bLQxd=iE6a̽/;Z4Qݭ(qP@7 |lYOC [BQ-sn  7N@ ZD t:%Hhi4jFv!4gQQpoˆԜ Fwjr"0/ޮOAtV]EoEPpyI˺BQvV2RuPުUh)U)a@ Akg0*u\*K@ (81E=%E+$xu)N5^lNE D4X硂 @TgG8Y,EiH%c4Qš j7cmt<]-(*T) H$¼49PAМҚSP(r)N2SEdr7AXS* ʆRG H]%n_9ڈJ0U oKܮY[$IBUh)JEFf#@ ' k[@t%=QN%A7-FPܪ%(plʵfv!bP) <0yJ#TMٸ<(z "sE9\A{. =cqF( uؔȂd䨨I=-%)E (% Ywu#UST4V do(YPFKwzG!yU "n  QT Uk`%bָ6"ZtRN"F@(2Β|8̄ P ɠ(r+0"8A嫜G+ZP-(LVyQ4X7Ͼ|(60C{D SUUj{G\,v+ĮŮz@,*4}S8*PPJ 7Խ3B EURSѼY oo ;N * KHV}G;(ߙ,7gѕUAA\KE`kD"RxEQdA%T6G"-r0 YT1g.ДE-:/uE*<[٣X" b,AX(b,D@Dּ&U E]sEdAgA^n pEl!]@Ĉ˙Ȏ+&++Aaᕢ dA(W[pެ٧ڸ.Q4{bڱ=8 n0-dr=A Fϊ&΄.΂Jёw)6ږUPo镹:С8*t@Q[ Aa[ EQ[ AQRȢuGOQRPشRG~ 6´ޖU裑+2̞o+ IkYV-4F *H{-LIIʁv:g'(lâ(j- (3i}ٯx ׊BA MJ͵]Y)RYʓ^ߕ/gl2Aama}YN|EPx-&`0 a#$_XB((&PE؟&\ךbd@KT_ŬX@~\ 1.yAx.Qʝ(8E'@AWQ{V5F dr,Z AJ(VBGTg?Fd1l-g-d| 5z͔@corθ*6(n}1죦 ʞ0[FQ\IQpiyVe<ñm^Y蝂 x' \H D;ъߓF9sAQ͎,W_"3%Dhp( ]4"cu|qHІCQԆCQԆCPUELiS{x7Hu@4\U߯FP P^PLQtR+ ȫ],PXgD}>Ooi&H&Z|X෾^~Rks?EzUFzDv(jSVKzmofƽeJ)W) Jw@p|)WJ uRRt9Ew%FAdlyTo5V~ٞ9"e??+sbAFQ *zK` ȡ!&E#-&ȴ [י<"E7i]ByN~&(,RN K%*ɱXۑ-YQ(p"HLlE)/P߬yE5HvϚs"ӂỷ5J +AQZPt!Yp6X_"@=!6=׊V)cEKBiFYĢp^/pHGNLQ.7\@TI( ^,`7P8+: EEaIG=(l Mܱ"w"cn:R{Dp*J Iݱ 5_mۗUo$g9~7~x)]QP%-u@E~zo//SFXʶ,,'L!_?9?6&U(wo=.v *c>XsԒiZҬɖi]RvRX>+Xmuq>^D+h4-jLhgH|mT.n(^ޣRQ 7=`ǂK`|eE# X_B,ck:zmHQՖg}͖H WE&{ [+3X%c:sHYK{F2VL2[}:UXUd] n=dndl*vuݐDJ gAddAt2 VZjq}lcCR$,VZ5,ʡ[a+%kߛl:ߒZ-y7y'E2%mdѾm`7ȫ)FZF6G,pżv(M74K1K.Egij֧RI;/gEw|I) %FڳU4x^Lף,}Vuh,JUjs] ;J;u}c= [ = 7ϴ*I'mTг)4mhܫkqwepv,e7^b;u)dA^r.>M2"jcdqCcfUX\ZY`]oScs5,뼦v1Csx-]'*ԓSߔ`%?!G.R|ez)~y׳/~ˮ{ueHnN<ato%斪[66I&$v 1 GQ7T%l1luN/Y/ߙnh6zK*߯2~5GzG~0 ikLP}OyĞ+9^ O^ .ӐͰ/PsE԰s1zV,sݝvqZIge{{wFmԏݹwrҠs5T>!Lȕ`AW7.1ryӇ4d-^!}cOœߴȴꉴpvƠx9.u3m{aiqځz`~Dla%lZXξ>a,SJ!V |Is^d9Py)d46ff$p񇳥A;|wk'$Ma[ l p~LR vp#Ә٥zN}#&sEKaYXbʦ7#QC M> njmZL~.[͒L?P~ŋg1;@#-֙^(9Hc?X$0$sGϨ~v396 %)>Ie3;m!ӍTqܕq'E`,j0,:b,cnjeЅI^x tt&~xG"|}j^hVx4%߻JrOƠɋ{a\fgCvb|9o='/q[a1܅V?Bך2\U|9+;a̱=p pD<蹀 {m>q*szESY{Ǣ<Ȟ}#U;A፠qt%x{ -'7C>OQ]7Hkːi1ݞ#'xrm;;S31Iw Nd_y7-Nu!n2_M@pe!xӸĈga C5X6o~C(E¯g߳c|,{p9Hƞ<%; qy_Ab>;WI >7?m[amgsdīm#c檚yGa1I6p8 pN畆1_8ϖ 7Zu#fTl[lɅ-^-D=8UcY5=FPCLSxv78#UwHIwsiνgM[<^ >TIWl*XvJͮ$S{,orF7 ? |Q d~9G<{y&NX婢c7ո2d Y 9;Q9RQ7ktPJM _d$H ۢC <^̎\.Q XɁ2ja7۔殣𤀅2x-)X)ͤAb{VJ4Jů)qc<-7gq : Sh-g>gX"g1QqBn4cO2Lyw#:,4gdeF(I1;?_߶e39"KEjBfY\>,)yޝpA-?J]FBl|uɖNƠbFκ~I-?l$>F(\僈4XRf?𔉦еYqT}5NȋmOOq\ *.BJ+iv9-;+ DlAn>գF'mc@p ` 560\ buF9ΉQ=܄p= `ň?_`OZ3` >%4Vc:"pR'=+ lI[j`sFzx^를x%BgO9-3Ԛڜ:)pᜩi\)͌GOX_!1Ggqcȩ&fVrl*.й\gr&e[a"p8ǹzf6tf]&`'Z&/q\eٖ=F쎃|sǞ&Zg㝒~!0Z4p!GrP'LyΌP 7\]!04?sl& %s-'<?Vu 6zV ˜ Sef2KcϛxG ֙2gBT>NV[(gEA|8P=;=lc EbGr?aO`Iy f.␤xtCL0A_^VB;;/q"Żs0*g sG~u1(Y,(ȡWn0Ê<<ȓg"7gnLwtn^"2ob2Q }=P,vy=rzE$M6رmwgpG.:ZйF6#,x68MȋgAji{JR) L=TL`*&%XT1M0?+t_bۛ4*<,fhK=#^IÏx 'I]Gi _>}B?b0c;/-od鑄 m,ťJhtEmsZ3-! Fł 0ۙv| Dn=<2$lq 1ND@WR^l;CylEVvp(w3rv悥 {,Uz1LQdlTd-QYi *;mo}]CsAg^mh ܖzϑ8T %՟=4 SOʽX#{9F05nql7aq5'9o1*0VcT-sH,"$2< y64HC/JUFf$*G j'+{HrMS`vWb);n=A(d ^VDz xU}E%;KywzL5c(<xgEDMzL5`"voO9nS{4lS+/ߧ?~k>0"*|Cׂxoz@`r54T߅Gr5n!l5]79F%šI#a-5'I-vZI#[3H>ԦG$#zO*XөnҰ^YkͶG˾{u%_1(6 /3I9YB4&>bOHt?o bڇ/H,WX7u?ǎr5!`^1#2uK0`<1LӉq惦UC&)SGpLM^D ;/"$9)ܣⴹw68K_yӜƕlYd[)Ll`l0){Y:a¨ITg1%;,EOEz$brkG $ wyZcfXrAĵ#3NA%ҲgKO*ͤ[a"Y~Ч}N pb>X5΃G{%?82GAZ4..3~$ltNB"dO0=9Qhu. /3XEmmjs5= bRWl䦠GFYڭ3d7ɖ;Žt(ȄOp8&wuB]P[q)NOD¾Vc~n J7(~փ2w1"HGvw%+&byW(Zs<X3hDNQ dră͵"`\<)+n1'7H[= \>BVj}l(%7Sg pՈ¿_]#Y)dDJufQmǏe{&է6P l!_?}s5l+?u0ۯq믔M6aRҊva5:/7*}=^@ze'oI~j||&/#?bR~5auǾd'Vo$+j:/nd:@ȟ/Xole3ocYp}yM&"G~,w*kQ1Lj"{8~f=ޟf yWL3ҜK"{x,$ty3v[R{^ozZ*泿puP/-䝺mƹgPc9= }W+>jЫ~{D>T=:~Y|G$iێHk\u jާ[gc4M-=Y֭`f[U-O7^k-ޱvQ7l7׺ER{wywf1ǵԞ/wjUֿ_1ߘ~KmGcŊj F*흽 (Xj?mM"H7<`j%Tfl\mz[LEOêWLnKV;{JTdfz~/̜Fguiؾ-OZT wH-n/fuv짩EKǼ]h@DU[uQRӳkOX H+go=c_=.k~+;kej?|4_Xy뜤g;fPk6;L}5zte\B>j$ZiwY[m0G,i~ŷ# 0pXخMmM&`5{÷B5MyVH={Eej6^kx%MU =y_X=#~"ΰ>S4˜ΦXĜ/ڜyM÷5DX+G[]tvݿ1ڀ,o5plv=EW>DnEBQl6DɗoP SDVx-=TZ /e YM BK_%Qt{`y=t3^Cl9*ǙWtQ/nRL׏iOdGnA|eb외^KsZ x U;0W@o_[rGSt=0kvK`Qovl!CTcNE@b_|{L[kn^mv`j`z@ɡ?v-=mS*[O"ot;TQEF Ω T6s5mE"\~;K&sN~}W}3VU:qtVea5qXQ4,o{p:]j~[\K {\M|#\Q4cgaN~-FaYxKN͔N6t4LHۉ_@Մ4a'{RJ@{]ÙejZmw==ItAY\Q_䥎.;V1iG4[(mFb6d~_R8 z|Ԧe"VMP|l(ug6]RP/̙'"/n*I[׶"mS5BJB{/~CV5SAT_o9J#GA.jq߲J-Z[:㉰mZeMߦd?Ž_P95<⴩vҟ ٘492:!/sm@~Zm#^뗩Op0וߓ;I__jG[W6v=[LS e=nf!Mo$K+<9ؖFꛪZZ;J=z9:;>MyFGmQP-< {uG\6/ƞ$[JIۓK[?z<.wp{I(( )BzE!=EAH!=hICz8`Ti`(NmAHڒ66m,mόytoگu_Nɴ^)@~9  ?(;ޕrwzC&O5oL㉹-(n/onݡɳPt6vY4]&K&jz8wUki#ƞ~Lyx 2^~7@U qECTz A4.h|EGO@gVnOλpc:md!D3ZP(=h J{cƥ%1 E9N%M_Ri@S\2GHlI~So (ţt dFPE\? a 閧ْmʣj)Mn@BeG} #*kb?ymJSj/MCϵ3PẀg虧(<`(x{l>Ÿ6>`?y7ln!(~ӶHMP(&(tӶ@M@$& tQXr<#}C)HEo4*=o 7U#PD*cL|-կ贵Ek邲)2_iTjU6qd7Z}]O~of[og"wP^xd|S;pGr45mrbo"JAkDv"J+ys=CA`a"AA ^nzꮽup9 H 9/2YcySIc?^!CҖ^BIHj$$5$$=Qtð: ItuSBR]'!IfrUoZz&WWM˥? &Y,a'BZjxLɏ("C-C @#(Z[VH? kV_f(0Y$oHʵBMZ 0.J9ؔt櫓5ׂL~ԯ[MW1 U}Q ]N0-hMQ\a2`*أ0lHIl:ZV5@\TSI`A VgyHyߌpu+vv[uN0!jsDlIGYsP(Ot zQ;$.Zw.foKPvZbt3i@F 09h8@RK'{~JӇ'oo _jfl9XYu<Ҙ_"A8 %6+u (:CCiK~Q)B~dz[k}KQs>D=*Վf>1?j+r?Q~$$hDEDfB:!-(U$)]@APa(P̀YiFo5CA\ \l )`>AK"(=.@iNAXbu؝h@wу (X"E q?EAE?`!(5vK - m ,Z"KCN~ Y:x4tJIB{TDGE8~ LwHԡ4@m'$?Ō6.EyM8.k5Ad k[ kREH0ql曬ڙI4R"h5YF؎pô&gH%AP ilv$ @H[G{M+k ~DP(\=!d4C"m+`42E/5UH2MQ(G(mA!DEQ!DEM^U~[d$\HMQi)wQE@eh\?zp?tG\"V4ikϷۢHb-DxE]@>q,XŸ/7qSڟcqI-PCnT};|q]&Wl{C D(xy\GA=qMj[&F:ԣV(s{CPpJ]9ti C)Mm/ar90LπWAMےJЊ"FQd@[`@[`U6@P:kDQtԃ9N(e (f4`@[1q-6OmȡP' aBQ_! )  AP(LIP΂`tf0E;{qԦ(Pep@[m@݁UeE% )HPЦ("ʀB mRB6%T5nBb PEd()~S? cOE`t~nÂPh CE 0x))BQ"))$~PdxMjlH%^PHW# ba"v=zE%&B)0,CKJ 7 ID8УRRuLBūكi6LE SŵmͷS mZfN u E"G-ęo84n\s&2E f:a\։k-*loC= ˽d@Z9" 8@mwZ" [BA=!qH@jAHE#~D,!8Ѐd*  -UЯNqQE)(0%(QOQdJ(L E) KM hIM XpWҟwSmIR;/0Y` @i4M{V-DD;Twu N!?}-K"Xͨ(_˙Lx4! 45J m0X;8omABiUZjKehJY6^*$Oh 7E!շ@ Н[Ao ҷI}y[|[=}U "I >,z@,]UUԁ1 7.<\R-0b©̒jx4j0KP s(-)TEUl+ B*dE!a"T:/0, 0)!tUUqT5Sd3JZ$nAQEA%E ( 54zUz݊"%n PzN=󀶠l Rֶ@[AM_(q? '`ȇ! rahK90%_H"y-BY8.to p[BN ݘh==HEA1^Q"(G QGGQ(Qt&HEQtAE;$}]lec*"((r HJ+:oȯHo. ]+T.hǂBڱv,( ށ#GW9ǀ" -b' -yږowuu^ﰏ:ހ"v x4]N9YA$ւPk\Fj%Ȑ4b ӧRtFB%қR2UD@$cj8GB!Okk54c'mX/FU- ҩ  P]PxBvGtꩩ cf?t @AVQx (eR j&j#R"=^; OEHOiGI(^`\ 3,RyVRȳ* R(DB t(ޣ*9 ̊ҽfv(|z@Q!GAa!GAa!GEQ!GT*آ:!75]5hԉad.;CѨ{z(*9>== mPPQ{jTr~Oۚ/aPŎls >_EA*4Ze CZqR1V?6:d =;˥\XOeuʞR/tzp/QEښ9]~@k[TlFƅff-ϊw {/ͩ?z~wћi^'5]yr?K;c>ϚIWBoL+GYvӇ~YIl/Y"rLjL 6Q-&JLJSI{7b0駃D5%(-~[IKόW'TPF(J#Qe(2DE) EQ|f(2D%@A r?` P M&fdsG@<E E=iOqMz"Z ^GT뎀 VQ? |0,}R `ɕLlg_#__@NQXpO%j{MXQDh:,e%$+ P-ez( }E>aEm?Yo㷣=XGOc葒-k[9xq9Vxc)a|տ{K7u7lhxb9({`cP[ R /0x%PJBio$B ie'm\`mQ&ʳR|R-ԭ7K?4a5*(4Gu\dj[` HI&&Y: im٪(%p*:[}e‰l ]7 BiU#d7{Z,nLO^QTPP z倂Z䀂Z:.D3 RUrK|ugG$/(D=I۝M=,TiQl?O/I-&YZPZq\R=УCS (< :*>Q'0Yp@[zꀇgK.Xp_@[p̃RFsvv\@=uP/#SյSUxVUozyUSȣyȃy:l8{AD=E粠\nQ|.kp.KSt.+eAZGEvH粠\ɢsYۂsYWe-=uu\ֶ\֙sYsYQгN%%-)M@\m +<-a';IKTީa&P٠!*$ )MABZ&! 4dUrdoc`ƁD22! &@$0$ ҖH7QXH@>\0avGZ?HhK۬g;dJ>x CzA  MM>X}ػrdAQBiHabm 99-6qƽ PqS6 أH+5Q$aQ(oo7__{vO9籠Ԧ:x:'7ֶYY@x4+f\z8O *Т6e% (Q%Aa)EQ-Aa1]mQ9Aa==T!b3OurN/(H?p@M@܍v?| l-%݁;wcYTQMAxU5dtx~e@oW% klRi6d0%~8Td=ጳ!u)[Pr+&ݧ`~d8}~M% P(?P\ۢ&˲mN(kՁsP9Z=/별l͗éx੩]"<-&ysv++]QJ 'OQIߤA;M^"Pe QCn -΃Y3G)24WeWAQ Y}"A%p(T(أ@J_]y/{P,zDċ)&2=]C6W v.~& %yխ;;f7W{7XУI5W@k@|4ZݛA :qjw˫';ZRr~~x~` 1W)- *қ 6{DLE.E"Mhi*4a&f@qq o7m.'T>TrD7/P܄RMm⦢HqSP)  EZB-MAHKS4[kiҸDKS'4ua7u"HqS7uw^/H+ *QSW|FuV PaXjd(5 pJ*@?Q B=ԼJsR52+|5iH6з8|#OKX%KQ(*=RHEAH*GjSɗ97sz}.l U":a-⋑r( 5ߞ&LbCIhky=x2O+z@FfTQ6AW'Gĥ""hw R/;EQKwЇ(*DEaTaƥ@XlB ْ_=Eϻ~[E7n n0.x)nأ<7y٘VǥVmÁto;0*$-qe(h"W{]Qq>BI@􃀉( },=bCaBa+,V@ ^C|P93CEjh8.2"(yhPEAW=%) R`T~(JS%lA I0ˇ\>yڂ\>7J*?0vy/fх[nk@=ʷ@nuc 6~yhX E_ݒ[Pq&eGQi |A@*$HHQ(* mqRqoyACM'F?n%ѩ1iI(Xe( `}, F hK9~;j0K3@a`*--tIK܂J,'&q ""L%e@Y)Sֆ^ b{@^j9"U":$_(1iXOB{E|P^G!'|D8{BtfnG5 /Z/vQ"D^,.ֿڦ!z,Cl?ҷЯk\e-rYA?ҶFJ. m \4vS1k<D E7EMQ D;bY e@f0WX@.J|0On}פտ:"AaOEL#hK`𠛡( O Qe`"BDIO#lH`D$m@}{)^xRY^[M*7 toAh 9)S=EO=<`!(zAU/^LPP'e95mug95 "h@@PT'cJۢ\v ՠ:Pm[ NQf?ӗڦ8ɩ]kNrjP@vXSqzG=E" EAR ):h .EI!t.H Fȡ aiHP_I!ZM3:EQCQO :TT oEaCPTU[AQh-C8x)mAA&{ 4mާbfJeԣUGVH]WOOYhJh@۠ C)aA2L4eUWm5zIJǑt-Ң%<+:|t{-@VCnbMq ղRHYǵ~dMԮ!"<ȭ9KejQ%Ϸ\HEEztQ*Y RT=3\`)9%1"hKň4^JޯRQ,i"Q,]E0ov'}]DQ,96/ܵ'SUVGUA z( eT{QT-@Q--J4(zA[ 0Жro3Vk_A%|{E#ӸTϞPgªږ J0.Q($-wt=-&KS[TO_(U#yMH$>ն*S>خ)Q)Q(/J@% ʊҁCRMDekKc(m t 4JW~-P KPMU۔6IYjtH+ *jU)Pk&?_}><8z[jJWZ-Jꖶoa\wz- etxڟ @=n~R;( MV'PiìޙE/g u ]ԖN^퐜B'ob'vN^:9yN^Bq+(t+mW@XP Vuӎ>̂V-d.:y4f(@V8 hM#DB(ߣ  A{3?!Dpo> Q۟hא6r%{Wknc߯}IɟFxT ufOe]({j 1nmo>כt*_ d#Z295=sיHvߘ=YG풗L -ӛ=RF!:zXg,L( de?_ ʇ?y\ж әP =Bv2E_mKs6zci1  ?pbLuLo憷_X|9h򜻨z7ByJwkvI @T0թPT(R2g룒ټK(Հo%f6J&r:Ƿ>)<ߊ$}נ6U)PC:N;? 3\8Lٌiڰ _]Vf2+ilPl@Vf!2Tm};xfe6+mY#+mYZ3+A1+]febVfOUqn[/W` T!YҤRmjQi=o6eB[i3&hf$3g*3ɫ퐜 ;6 $A3,c(8zuQ@ԁ5T@HѦ #UA灦 ?R4,PT P8D0Rpy 3 K? "٫LeZPE@@C@E?T6S5u>^wЩ8mVGG16("(-im4}Nui :Ζ{ |^iKɛeCM;v<O09=5y[Gk/ZAziɃϥ\!Ζ(Y1ܗow<:@OӲg \ˀ" P RDeh <a`P+8j3k[`4 gEݬsJ6F~&5ǸYfVPvSE[P%[^QoCEڂ? Da@SK #MOLsGJ$!Gmf˗m_~>0+$G}nELQD5Sy @SD[Ӷm 6E n@p z5Ɩw9l@.Ju"j9M;݊q^K̮|O)!k>2] @",8z>8ьSPTP%@Wv+vٱ4,䡨|Vy!THq^\Z8΅n2U㳗Ò RY˸SDQ?QO@S8$n:&x׵VЃT@Ѫ-OVsNQ3SzZI*i*EzXa{RamK;)RO H(URmq] @Ct#`EAE3Io D/P~ y$P(@}'h֓LK~fDI'M~!Ji8(g&7(Vo")HBo; ؗ[&KahK_4zPTA"M;It3=T"k#LT]ŚZKPFSэsj h"\#V2'|ex]mf_9;ŵ" rt6Iܹ?..|9HfKHkozE[GP[T\QT\PX\PςݙXszTs\'j밨jDPquN2]F*?UK~b(6:_Jϟ@9XTP)5wAu3ۙ!D0z,:?yAo'-٣ Ywgꂪ!~iT'0Tm%4&'z\ 2O;mz.HiA6N:EQ:E=+uPXNۢuuu:.^(Ɲj j^kjwwkڴhMK5N"(PTLPXL2RW Q=0Aa=0m4P=0Aa=0Ai=0dBwm0mi;P鋖XF%y-cB[KnQ {Auxm[Kn?yMSLn@ps{a@ ܠ: vhaX %M!Xg|.Z"@HH#P@$u@!<,C֠HjP'%iq@h\4/(\oI\_Qԝ38*ClWBenUzIUu!xFE8x <#" <#z<ؿJ"ucglpfPowɀ1+]ޠOR$d{88)uȟ m)?P VC}ihؐ䟊QhU BͪD.NuDdL$kՂP bQ+8x@ Mu7 +4!UTt2UR%3~-Z"Ic! *Z# ; )&"QHD@pdI!qdK!CMz|KگH(nLmMc.t!E:ɭVźږ/Z=R"]i\߸ػH`d C'#`A=ɒO2jң cCib!iA**A-Damy4+tE9  QQ~Ce(;TPH $5TL{uD4aEi::(MN$ ŶZ:.*ҰqwgUXo. KaL3_8e騀$ (% 7Q !; ;l\H.E@ KɥRDr)ȥRtTO^1*kR_M*Ԓh,Nh(4U n lf:h%-+t ;ɬE7ny;EmPݐ.2MU8%~b`PVFIw|?l ^+ Vgsۚ; =~}9 W}8d~c66VNwXI6|1P?1kE-o;xl+ RF'(( 7D@Ҁ3̼:)xJ; 2 3Mj+I,zq{~\;-Wܑ[AQ]rEQ5:EQpQڎEyw0_{Ԫu%邺r]?Wv0.-L3E`LS-pqkV[;(ej{]rB]@- ZCA9;-֐Ƕ4ƣ2Ο{e*򷏏ljW1m]9JL QӤj[˅}4To 3~RDEnKjЖro5@ApP]C]MQD7:ְCh?QHi0.`'jXZhBqk*^ EQ(DQPPmAn0z$v|O@ۂ vr;`!6NہA<⪠!IHkf@tQd]+ \}S&].rPP(*(2v3fTa=/㸕 D5 ^kX0a=mƍ+(rF FFGSOk["WB ] !k>YVYWF?qGYאVQUU/^k4dG^$9Ή ."&L챭Hc/r(\؅t M)RNJmwas!%(|r+tK$M (%(`7?H IQVlQɧmQ\M@YE/JQr+`((9M9#Y``N͎ԟH4E&ޓ4Yk_!tn:|,|T}ɵOT Vgzb4`[~S;]@?&mӼ<NRZT>~mҟ#Dv^o/hcNXOD,cLQ$F ?NESPӱJ_0,U,Op"I ((| ^w םumNO;AQFQaEo@FzPހ7ހo 6Cif@Έ(TQ(͊c^AdJӢ:h)X=ԔrĆRoǶ[VfKfDٌ(1q"l& 1۴k+ -HUsڒD >EVDQR=YQTP[N4Ls{\|TNM 2h ;d< ("2C[PAQDQV1)J# Ê*_@ h0@+Њ`\@>D& ePi (ƯNfHKQ$(JVg%QPMݶu قP ]TlXPa%؊Lk\Anh^ZNR@Z1P% JKii9T"Ж-9_wŎ lݯ/> rI)<.t45 @ZDy| 97(JWOBǸ9Ǹ۶$tmPQwtn'!Ny}W'/J@XFQiPe[nAXFZZ2-H4i^ JMÚhiQiH˷QiQe>CRgΫ`0%m3Z YS-DN23OD'd }ݡD)Y;ʗ$ׅ8׵3\6/8XZҗw<$/.O)8YԒi!=G0ʿiHC Z*q16wwya&6H9νML#{dUD! l q62聀8 %s>$ hfowHwlo^hz HmJnI}^QDWEyEQh BEyh a>(${a{${5-z MWf4Qد*BZ}M9FA`EF)˨(*TBlnz|[g^/o((ߧaPu)t^By!AՈLEEA:tR )TIHP(/0Q+EeG-ѵO}^e9tf?YpEY=HBQl(* Aa)i "1  +0vH6U%fj)DZ(ĺl d]{) E"V zASchT!a + i:U_H |#x`\2MⰄ$ݧ(,'bwbwq[MQXNeDeC,cleh2J;(v <v 鳯&dD+(BȰ֊"N:ւBZQdXfoAooEaSOf}wɵSCcTVo P,v (9ڍT(T<5 E5`&vU<1Ggb%ҩu2{AG2/9RK PTAPɌ/c/ ϝ N_FNw(/2ޓ. t=ڪ?=[ڄ"SX{$SƥfKv\4_`*^-wIJ2<㈧Kl\?Ѳ~RXַM.EzU %+-NZBaj-7[7OP4{PqӓMPSTzLcLg6*lThK]S5.8/J"zf 0!4j0,BiSDjz<-P7fT.ieV[ RUr1!>vHV zN3# (RG?OuQ:*W)Jz_oz) F ? @yk+GU@6Lzp"Ŭ&( mA?5S j:S&GH͠!(J$+ AhV5ӃF# (BmvI}a0[AdZCK̆6ly/_!oB)zGWa\̦%M f ԄajPx7%v9rXMQNIt]23Úm閭o2SED-%(%%T *|M|f7B& o=<]ui¨ϞLMidYoG-gˠ*Sꙏym?rٱYfM&a*ػ/sL\~삨6;tYl%.;eS7Q=fXH/j'*J#X#)-_u]ƣ?_.J$Vک%Αj5EػM}V( rU<뷡?MS;<7ԗã "9~>Û6 ?U]3¦F{is(;D. ҂R o-' UWØ4S&sХ2.ꮑ# %H40M~!;K\Z v2:Tճ!b7<[uhAX-*o 0&)  0޷Qi}!I|DMJ9- 'VM3Ȩ"ʄ2gۖZE/b|#p&-L(x:ؼ0tq!~#jNWٷ2!5p؜< 6 ֋x(Bwj(YPQ,BhKa=UT'qN%^_ɂ8xI9 b=ru-D~dX#߯E_EAR(*~aN ~aԭ*LE<% 2/Q WJ`J}٣(jJ ]3W5ةmgwj]0jI@CLP)0)p~-@9ޚ]uC2)?#];hI9-Zj8Iۻ,FQU9E Y@AF !(l, % (gȾ(*@E@`VACR ن> *)EOjQp>--M GRetI3<'(\Wb(*̠CM*ɞO[O@Dl1䔆)C"g /3ѓK?H-蠂6` Ө\PFJK'D O]))ZWG[Q$ (% "DA=Pwf^z sJ:H#t"F vNWP,X@ ֆ4,g@~Dx%5,+ (3 1Ym qpvCuXZ^akK)?ξm1䍀*ՠ- ]:Ϸn!uA?{AUW` ۑ>MnGZͨPV"*> (P{irCSTq,I"a b(e>_ETe} q:]op1?b=>-/ƀ.(P$/m!I^P`R[DqQDQHAPDq4. +BM,y!KY;E=% GPX(h"Aa%^m" @ABQT =ABQ&u9 30_@ 9-6(`TŢ[JHA[$=(H uPq"B@@!̗ |\EjЖ v!*j04jENVԾˊ"#(+"4oG;FpZ&= 8xPhͪd a a&'Rξdz9ܮ_ע0:EQ~Kv86NAc(VقT<Ńai{l13f E'3My=<-_2QK&J =$(:&J 8G+V/-s<ʲjH0[^Դ}% u[sׯ+UrՓQCh/V?;PF),-@ijg_DIКSPYI$UPSe\Ĩr9% u(/||m]j)aG\%y7juyzZ8ćU6d6JG@hNhRd7 sh$qQm8j:*%f8$8θ0i 2ZB(%$h e@%$h Uh ZBRZBxgPB!mQFJ( )Slfd(%H6RMh$@)ɦ"ɦH)n&J89lRlR/#b&QFu4Me$ܩU((^@jVˈ RՇtCT Es:hy!JFohώb)іzDQT?-2  Aʲ PJ~@iЏ#ZЏ#7Q,4H-4hVEh #Vȼ9A ҤU.ȟv%uR2PaQg\Գ TvBg»B5jkKgjiw-IK1ϲ-eZyum/QYܺaL eDI9QV%JPVOˈROhPd_Gˡr([lhCK]t"+@D8fqe;*QRZҏ_9D))e +\ BO[P;nmeǐyMwcʓ6L4kCMj(IGo aʲ0b qh( P- PֳVKUQ%"Z%1*[و *ҝќϙ")՛e ԝQܝ͝aS;z׵vgPssgp4sg ;D;3ww a ( u^B+Qu6QAecJ,13DI(/}D푘K;Bbh.U{b+IեOFdbODT5#IDV}Ɖ;QᷕO8@kyDQmMZHkmEt ^kk.mYץgF@K׮n)N4i[ԝ!2Ԕ\A^̗/*[[TC|eD,:K[JU e:'k<5nkSP]BXlJ8>^g #-X`A.jEr{)sĞMu#8*b'EJkp7~M[~56K2w`1X h=U踣`,dVABT8cRX J(/e ?j:eϵGDFҷ-ޡ rЄ(0sDgO>pY(")V!T||( ѭ0 nQc$>|^ ,(T8a:AԫJp|ze)@P%/' kKuAI (֒!撖"K4\QgOӓH'7בED +5(#,Y[Nq96D؊2"A [H&it3[,!JyDyxeRbHڲS,!Ԝ2' b-iTr*E$.\Hۛ)(׭dNaѽZnge!9t~e5QRF5.| VPDDv3ڵd#z{77AFk{oz+!ro5G/7zv+uVR [I+n%nÉP!,] #?_7># "΃! bzBW0A=0Ǡ{Uf5gk h b[=<:@;Bg5H/I.I 5S₎2;\Q9AQ['Q9;R޸ $FIRU*#QEAA'M@V ֒?DLTG( ؔ, "#t۠:B*bZ'zѥ`E8*oxEIȜ^F1mL8N;Ig'B$ L ӢJR*`(%JI~9('R'# MJ #G4*` mT8*`.VS%0?n(9EDYvQvzˈF"KpЕ%(ϒY%G%Gx4()*_ћ( f&z3e":8δup#ZL9Z˺Ad5g9932۳EꨚB(P%Z ,̈>nMZBS%TE+I[dPA JIE 7۟ꑛmS js9_l3o&EhNo'rGe -hƶx)jJiW!yX:!M0;$t5tGP, A9c,rPYq -r$rY]Wl8&@ip],(vJ|S5J|jIoJ\k)76i۶SPtۈprG~fܟ\>l'ja)PIX:}@RIτ)xa)iB%uSsr(2P@g+Aq cq q5j?۳SW.0k*)S}B-w8~Պ5f`im( u@iPZA^ .j'(3J l AZgAͭԢ:j * LAIn2(ɭ'f ::^PCOf%DY=QVD8E;wV`u;H=Q>De9- ۼ%?}Q"K~"KrŨV< \` 1P*drF >[XRPNv _JpF-gk\; dAZTBYRp3hVw#Too# jƒH^k<-VmTAVQx㥢5F+E! EN gIe'N\pOMIR m ![:I'@Rɥ ?X }Rgl#:撘2̟@Qe}>J)n/wPZ0TȊnz!YB:ժIH>bx|iTF? 'ºW%$|@Vɺ:"-km|FP]NXͧt^VR  + 6xR VEaG4ZJZ% W!Q7* .\by8?D{Oz;Jr2s;ibQy LL* d;H"ibC.e±V)>Q]3qDE MVTk[>!]*WY(7fe^c%`A?X*{Kqj#?-sI}CZ?>:RDnCkJZJyŽlnnT+VN5dUYŎ/ZOUAe2]2v${<]۲7;Kxq g^^sXFD>gEϔ1CA#)D9uE 9kwFA5A]bzRFKA ͥ%d"k"ۨǯz߉˽~> @$JCixm|$Ess*)qڀs.kߺ6-錿BU|SN*_1"DY31PNZ?1( h(YR Gb޺A֣Vs@k+)Z_1e0u㎰b\k pTD ]b(y)qw# ϱc z1aVYLԇ;35XYS(Sd}=sII-75Ե2M 0 C$e$C5JYd@ 3D Ϊe|DD AF6dv؆tTEu (wQF0,BP2de0 J"@YXD@SeaURG ¸S""6Z6SS't1RvN(/A1WQDY^AOY±b*{%,DY$-Y{YDҲd@ޢSD"݅TReEh?U '$)}*0a@>ޠ H3"aT(΍Dİ,cIU@Ƃa Mj$ʖTNsʂ#E #Fz:A, QӺhYGreXΖ 9CY5o(/:E&Di5QdIJ 1!I 2YuѴDTꚲDXߤVQQ@:Q࠲ OYL( 9D*8%VZY!k~DE}} G8GG** v DMYκG4ޕQ! j@ʕEs\?/7C^z]irOoi%7Ŷj, d  1֒p|,Fcob*GXœd1AY( Ld2kh,QS沤ePUg!Ea}4&ʲA22Pe9DI($c')2y J8eDIYLT|"A D3dZ~^YG~ 8횵"~K$:Ckh岜﫢 !;zIvCh(EB 9J%(yQ%4|K }Bcf̣ۆO§/x/49G\OO#i Y IYjBrf$ZKJ3ALCV ͜tPB)gGf4F9GH^4g%iI),w?e bi#[$5D:ɑ]Ka a I*Y؋A# b]HSV]JJEn TeNۚg<tO/%3۱}% Yy6QS+_=3ً,'J.c$z%(az?E2EUxRTMU6j3ffc a=Q׷#a_LzM-^;dx;h,e&?%fxPYT&DYՓȊ4c2T< &u\%r)yWh%HyaBW6 ^Rd8c,,t(uCDiSͰjc3,IQq\dU{~&Or_ ULۈ)KzP,ۃ9>Ȓ=HtteAmGcրϭT=eDUdwi_OK8DFiPBEԶ9) %\ WH Ґ(4$( e~g(ot%QkBP3ăviT!,jUYUFH~yTH[e@(mBļL|,_ a C̦_=6@.OajYj=ƫZ@A_U%tb n.I0F HDki%4JPge%FbPix@"eHlK)ItuߊNuH;+Swb8d5HƩRBq*ᄳzg +A [  7 Z "A:a*շGcYoY1Ae &CeQ!IPXu>0H1cI1uf1מ 4dTmĬue\GRdFȢ+WPveƂ~cj/tH%ʨu5 ic0^뀠'U#SFzBQ>f[kzMD9sLߍ"̿6.( ҆ @EIŶ,m@T^H5NF5eIOiIG{aJ#ieמdOBگydzBk20Pa|D&\D efQK̔5"}lDq0ec%ĝ8)K\ױ(ڶdSX֚>:VWQ*G3r7n(ZHk7iU-$,f2P6EQӭVZjY(0v^Q 9V:R`sMݰ(DM JnDMVQF- z SYVQβˬG12^;\A7sMd @BDu&8dD~EURV!MrMXfIKҼjr7WV~^EKQkq4!mG.Isnmfy<bh? (&ZZMQ!JkN(K~>|qť~AQ2[*S(KZQa埙 $Z" Y]-/ZS0Д I& @ַ ƴ0!$1CH0L *IS՗: yF5W^Ǎ ^:_&:,ta@T7Xlz5D)mv6g,F ii2v+D{VZR'bJ#JNmȎ(9i (GY$̔ <yR&xj,dlpo JNmUȪOm<+]ݜo4$YjQ` P9#aw_Z K$Ed)u* <5uڦdAXO 1M ^"̺6U81NEX.mλ1Nw@E.T_~XPG:?mP CS[D!mkpfR=_DtgA] Xro`5Y !5~4B#% .%"Kr>V*0&Z)(?<"P︕ D˲/';aDI<(5LDAG<˦ Fȶݭ(Du_񈍗/s{XS׌? C7Q첪mJmSUoSp27Zdz9 %۔(ۦDYU(/[,ݖwi/Pm@a?x].Pq=w].Gt%M $myer'!N.#R.{ A^Yeik6Qk(6JaZ:Kˍ68ӈZÞ$-o|M<֯࣠UHI/13ˍ(seXؔ01یeqf8ddXO7ٖ%.!]*s/'䶵QK<_DY㍌G:ZXI%?_p P7hG>8#k`L'd5+C&joEZ޷U[o4S ~'| 7_Sh{e3+ЗsR0|xOFRUmetDY7nSPJt"ȺVB) ١ST!2"覹wZQT[Yn1P[\<$ -ƀ[LQ!fdj. f*$ d@A44e4Pl3#"Ie{x.AzSRވkղO$ŝ RyG(eGm(+$hϋƃ(N,,1qs@㚷+A{5Ѫ<im TMqqe\܂.nE[.nUsq\:Apq *Ok)>X[(&ʸqq VayZˎc@T,MYǒ^aI0ڨFI0>a . k1Qĭ5EbS%6 AgO#A,&QDC(M\D'APJKę oĝ ( 2J%h/ % 5ˀwkd@Yh%%[ePEUR5%J:SST un6Ei0*KɩUڐBVӼ+ cv ގ۱x;v;,܎kߎm%cv1hc,%˺whK:,YDiiz7_fNfnw/KP/L-_i?IZ%A4!5!lj2 t!Ar[9@h%\-X|4E,2:Je\u~ha##J3Aյ]МFt:8^g&.Cg$%MUrC-%vDTpi !J\ ƶ_>kԜu6ԼW*AYܖmbי= 2U~5gy/Z"+([]| _$5S͜~qmDl:K7QNYrBe'Ȓ* *DVUƂBPtUd *TyR*tu[P\tU|U4ןĥʛx",벒qLynj W1e}Y>V5BzHe`r5J91(R5+ס)ͬdH sm6Z>Zz<BHG$wNd-ddReœql)5sI;YnDůmZ,Йb!ןcjY>f.*k؁njaGCt;e{(ۃKMEllڳN9Gq,vdT1ň,(zo(mHvBrY țQݹU+l_ DY(y,bA"Kb"I( 6J1 wc/Yt6 T:opgq ȟ()'*e@'*U+ eROU,,#ZG{,DqùԽDʸg(8 ,DZM n! 0$͜Ύg~Y@ g]l Ipi9 8]E)"ɖ& "lBӥ,YPWF$C@6( ʈ5m$<$0HA(nV/Z+xbuªD}jOև>!VTEkC+HehJ }B&R*BUznV ZIZJOT>sU@YȒbP9d@Љ,+ *1% UņXAJ6k6R!}PRfD ]OFStn$@'yB;qJf?"+!BRX+U&& 8݊ ʎ])jd"x%ŮHCJ&xQ[#RՈn(Uedj"TI@N1SMtfDYQF5%#JޘbޘP(d@ 23&v ;g=7?i[Ue۴ e,Y۲,钨y)nϯ7,QF, ~QIPSȜ'r+(i$V֐WsZ6K#3rh%NjU\?NʠWn4,IuϲFQVfX&MdbI酒8IͧX(#O݅LժlKc/JbZǟl|4 1Qy6 j[u(/e,r@E1_2~`, #555h^4bel%,aQOC_'/x]ۖHZ>Ǎ|ص|5H]GO%#IBCPyM[:@P.UI\4_?HG3O|ᱡg1AFO1Q$:AEN< 2(2 IAE;`*dN<~E'bqnq5;!znKI[&uy' (J?-IE+8d~wx /3PtsY> EW["57CS3YUAy~4,O(4YRYLd_,V ȟ$PJ2( 5=r(hTW33XFbt?ͶIMCHZ-Οm)u_'X6j>P ʊ=|3Ԙe;BPYHg[!r󶿹 5%=,3|*)|ɢ֙5Q 6X>۸~2{]Z t8ۢC\xDֱ@TUp_Ϣp ϳAD00IAH*HA5̱V"%NNS_a+$b, IPhH[/");7GS(9W/ߠطw#IAg&!\wKdѾtM證:.\4גd1եm+1wz-xjA4dE= >YQ4Ȉ6}t`SN1-M Y' H:}rƞ AN#Ɗ?\V?^XPl!+EYec,)uՐOȲ';})J%EJ1I} uDP%O'()e}S5j'i$%9ivG@Ǧ(9,s, 0奀Y )bS)sQ)$1W%mb.Ҥ,CH H+*9ʂk"\\Cy%pWPKcbʮD!;*.Gubf(`=Pש:GK%ȂndY!=[j>c(a݅]lB_^#LW ٯAZ_bI dUC ՗eI+kcvNA[>@rS')MPUO#(K%&jjȪr \bUÁScƱ/1Q$71QƍI^,d^Iαs\0x=AVuhYVOT('ʲT{Q^㈲6%bz$6ˎ{db%5֣\?Mo hFib4%¸A;kkz8cH( J$naJ;83KlX,eJC%Or̵\Գ\-Kұ `d@m r4AFjU&"I_W֕z +@PadSqaa53:֗Dԛ?`ї6+>4x>'~y@Dm /aؚTjo,p-s%%mA1x"gG(!c,,fV(e,lہ'3ĨLzJ&JdDyeEz {(>De|=Q,K]."^:EusuHY^^y*?KZ; %$)%6)@VH m,'eN6ARDJlPSJH9;fz#H9@V=jx$յ4AUǩw|Q.$J…YOPnVAIDy!$ƧWb%B*bUIPY߉^Br$ǚ(nYB.bXWҡA4|6 '\" "/%`3)% fxUsz9_9Nuy_Rl7 |SH{nq]_ç)e{yj(D^3QֵYXxkgna{;ZY !%?K6؏YMϻ ~:Ⓐ`JZ LIÝsMcZŢ"Ω) >l}$>l- 8e3#`FhIrLYXό>UqiEL$IPtYl(փh77NU\K13 ԴO#'( e<,'#J8(p^Yt9J%'zI(^қ`zI[~^. Q?ў \W/QU&Ǐ'dCYJe>D$Q$Y-!$%(K 5*}tA!MeD- UԱ?!h2/!(( ,@Bq+%c"#DFRd#- ($Z@J聯bҍ!>}0lt_jI g)YJF Ҕ)$)b&I@Xnie*6P?h}WyG[`,OH L H`PW[GՁ,0B<% q/%R 9XBB"y%ˍ`׷i|s̑γ2ݥ=? H`ƛk}g֬kV65oop Jx%jj4nZDQ;8e8M"K̪BF(;fl on' UV\/?SA =PB_(e,aZRb_(7_RzhU@UOӓ[2z(d4PDi,!άAʍIƍIT薦Bƞ98c$XcAq^VXΊ"z7mIL3%9a"He4 qVTU#ȼjD%JrɊՍ ]%D1QAˈBL?FyeY"q>ڲ,C !l"Kr8\!( ,븅bVMd QF#񍲄M@&(!}MPn) Q `QQB&C64و(K#EToM3<[hň $7FJHn#F|nf8j~ѾqE sڒH5' wkIF绯 rD(s3,q3eBD3BP"<b.Xe YX <2G"\ne!BX{骂C%HPEe|eDY&Q+$rp)bJjG QUfQY)k(ٞbT_XOx\Fy??o_Ͽ?\u3̿1Ǥɹ>_?r^Gu!˳F~ƙU{UMK|cǸH=׵Y]Wΰ6K)<yW` \{:FHҚp\sz-0p˻* 1ua}zn}s8DsAEvGD:)vz!rJ!sVڷϔ{i5_8|\1aea9Knz[HY<麞}jibs]_k YgcQ86YU3Xsm92v <{ %!~f[,>Cum4}Fʼ$&kGuM/>=V0]HCm]_a/Hhbr=ћF;pY+}sl5U>G1v,%9}߄36prʍ:>ڎ5Usr#ﴸV!]\/΢H^2:hMTkoGZ;D~޵đ*gєamzt^ ~Wˎ-cJ9+d8}5dc'3ٻYgYxw9DAh>%WW׹ݳ =;R0y rlNv82S:){.?hCx`峐0E\eP;屙wu5O= 3./\չ|yGݦr[\ER~d(hOf^`' ^/>̑]z$uap`=,>(ry$}?x$Db|9MrC!>(ߝӢL0i;^nޫ(Ğ2坚>;O>2YZ9 ~ӨeG_HeU>s"C/Nϑ|NrSq^߸˙SX[}ݜg,ywox'aW"jz`IofZˉ{7n/\VރJy>˫pZhtA#;NK⦝Ș7ds/?ލ Vq[s ])\5UEJ5a2O{BLCa-ۻ%YU-ίg(VŚ^r4u5F"_$ȑ+s)ǧrȍBhiyWYޝa2F-wD_>=}HZ꼸Kr /E]yNA7'l'CF;Sstd`'s'T$m)kG˜uA#N(y;dO%w ]S`[sw=y`(Nui^|KmGyG(RDMoVED+y2G)z?ힺsù4^R93yk^."3!h\"{gP!3(}uM+/)_Ysܱs㏸!itGi!.d6uG'ݑ;|-m]Ρ{y^k={'k;8bLYbx>H_8;F)g^Yn%GH| %gGCW|kpytF.AaB&eM";߉D|%C>>cb;|ݶL 7ʳ0mwdH;-^rg+Pr͖Nz!NyFgjg^vERqb#(s7X,NEislq$]>%`̮!>7sxrtt|N9w8_! BFk嗟Ç7V.CM-$!DJO 7RbU(?q=}Ÿ796msD'A􇨎̑bf%Y6Rpz=;%qăwD>)3HֿӾNٷ3}^>[yEyWu^II>lvmdݙ3)T Ìi8Jmv{W?>mP:5M;E3r Y? 0]78c;/?|$^<}LԎt=yߛeLO5I;7AgI2<ƾd6?v<}`>Ukv+/:`b±̂T/|*`>-*s}_K$w[ms G2Lz/GRy]tWꐍZ[~u%4?|,/<~7^QSwhyI$>*I{/!yeB5RV ^[vGsǿI+qfTcZnSE#N)Eߙ}C OoH.['̈v }6<]* o_s^5X1oN'VԾ@Y\y꿼ģs/,YO’uË! 'N؞nUMZ)9Oh?Xw&D޾\ +24RHPR;_>EdqȯNg<6{h;J_}A!54![޷ueq7E2LBa|;:GҬdwxz 2xɻkKvoxv'QWZ;㨈HmPٯ /`ߢżNŭG9(d}(V1]`+BIbK^G._G`=c%6>_t)\k3JBdM%^xGpE"JT)Nqq3OvίV2I<{E'R QR}Uukayr+ i6u4Dr^g?8UG+LwJag_+I)J>m3/G\rl;k*_ZZj#a+.DL dP9ws-yR_fctwIuDnʞ17#$6CG2DF:j|D̫=bV r,y}h >ߵ=^Cˬ][j|砯OsyF">`{ 5睟 gQ"qŘ:w\:HY@dޝ:-!bqv\<^EJ 5R_Küy]?5Yl-.0|5ocWR QM{9PW-y/%!oTa;YWAcKk_^oFJEKX ߢu]!zY`.W5pk#rZJN{s͝~`d,I8_Hp'B24\Xt0Rl'59d!Ǧ|ZDavCHuCtHb}zI!┪͂xʓO:_[%٢%ѯ^ 룶MK`+eƽs2Vn9/=b&R6u筏q˯P-߷O#Yt>^k(wck$QE>*7|t~:uy԰~ĝMJJܷk-{zױ#YCaMe;1Z2,}dS 7!Yŕ6WL*}9+Ң#ju&0fqdӃ;PbCڤ\-3ᝳZNԬWJ[ O7u=\$[|ݝ^ =lXm^_x} PE:#e=>9{vl&p<墽υ1gsqgME͸^tϧ!+uLptTq\.$߮z.}A0\Uϯs-JToN/ۖM~sj>&c6%[$lwMW12G)Y^{juޅ&-r}ZO4U6O[>KUy9 '(I%<'U2F%%܉Tݝ͡7]-By;3Gb> M$`qؤ(*}T2_"l>Juy71+,?zny" m+^Jn ϥNlahO$o۲w[\<Ώ-0Ȼl}&?t_H_.gjY"QT:j׵aqޑJ˧T6)})e`%=ߢH*J2uxQ\_q49_\gPFvD^"P\ެ,ֲ.̯ܹ>ݨ딺T"0Q6o]8"7py%JRPo}ѝ:C3܅D_̄zTy;Kv)#(<1g:|d}2 ~4Ͻx',F`io1AT䈗;s$!~8i[J؎H}82d=Jv!7;_% ؖRgC`Β|Ee)9s˙\0ulc|;V9/nƂsD/j1 Q_'ZkS^+^å+g>,Q}_qcH{fV>ӭ©₹޷u|ci09.?튞K%Ϟ2 \ 2*]BVyt5}߅Eu7z% _`&bheV. w-<|˾ Dή~Ѿ.Mmg}323 ^WЎwT7wi)={=_a=$_d:IP^߅pBm.?8B geM/l?QaIAdAvf g!)@BVse8>{M_MqQ<5 0ՍtOۨmVwgk<Yb{7>̏D3K]Ew@qx75]j7,&XM<*Z{Q e%ǧ/򦬣d<ve{y.Z<7==^_u]rs7)_6rCË6}[O/ӿx'}Tx!qtyyCO+I'ޮL7/S}Z)lg7&wXnq_QE0 6A?E AfIؗ=ojy~2DA|ȾNâYM~f}4{$ղk~ qCA\6 _5MҌ9u@dڳL"¯bD{\<(GJI0XBz /5RM䕳QﭯS%+~"ivO$:_'tDt3qkmH Rѻ,%]4m?&SiO_wo&=.J%#08nW~22Pގ-!_G/QDG4ze{^&>y % s4m磽wq;)UQ`To~*%=c?[1i݄ ,?[鯧EeKa O[D.bh_ǹ[M9mbse-? 痦: mj/޿|Z"NUߩ޿|?릥ۭGȷ ( rZ_v5w<ڎsM4ί??]eMMV6gHt6FOT_~PſrF`]w/ߩC ; ؈0bly/&4?55kʿfdo{ɿhn?xL2?ubUv M%/o_sg[ \jGN_ǒwӂkӳχݵWwy>Su6{<!\{޹U[>שq=7cw5+.f˗6iX@+4\hڢrmeZ[B}GGGќXI-!3&~։g2PtB[Sp63eOR֣iZR @ ˑ7/wCva:m+Js&gJҡo`B5RWk-rѻ)OKs͇+cHG^DZfmqePZ\M=b{S AxbqUnS!͜8]w-q#WwddFJ6+̈zOpjvG1Lh#^]jn}`w9`JKo p6_ &R&6m?]53 5h0i NW:LiTGڤWʌM-7@K9K5Gɺ[ 5L (Cz|+L-xB$e~rϿidX^@͈xK+kz`1״3hv6}mȃФg#a{4KWB=ˡߡlMh|;ᛎyfĞzZa[њ]sn5"FH಼=#CӹV[b*וh|ߗKXLo3W՟[אzJd~{Bd!Ə2츶ONDz=LԸ]" ͼIδ D3^Y~o~Z):s>ֵh˚A[z!,ZD))ks0dLzR<7}~ufgvNƶ5'{}ך> vQ&o˓J,ĮwNOo\%{g9"\7ۙgͬP+^kt|[͔w~f4=r2TC4 `{6,u]UWHDq jSp ˸e?7]k#<ѸXK}=zJ`[1Y}>؋G\#r?0>&yGuI&c=iyA!)s͉LlL湞O2L5b-A70η)|3iz|O"سQ ǃxy%3~c72Vƒ0PIRV/Pk32 jT ^ʌ ! 86!b]]SǤSg\S!m%CeaS$)3,\hTa,:&4tJb;te]mĹ*q B/:פ2Zf6]p#I>}nt;Ӆˌ $l37zs,hm(&tZ˪$+T;v7i^>vLP!⹶vtM ҕEѕ7.EZWcVE'ݖEƢm SR"4ՏW1j\m7?o!%b Zfe!X*{(RY3 ۺ ]Xc=nX; ڍ,f\u߳2<{ :MUk|b| ڣ;c&Oow#_`s9۾x@Oo|ZϽU%Dm r:ɧH/~B0m̹=NmKn˹z$V_n*B~^5KK"fʒH2eu rݡq>̂wzM6Uӽ$r2"?.Q.g9]Ե&Mgǖ[׷Q@"y")eHoY?{wP b?2缱&{@mF]q  (4@ZDg$Jgi.L I}cu;tzzMpVWwEW,+/xH߼s}9+~֥-D '|暮;j>Y/$9W兛H>P,50Շw%^/R7ګ $BN?sSKcV%q5I>n}Ui^%U1q۲Uplaٷ-O^#ʋ7*ʳ+"['UD#jla׼ Kչe/ñ\9~)|eӡCP]g6§-R^*~)=-ƧXe}g\wa(祬g>RwHB$E$*RPM"Q~*TR*x7CDDwͯoBcmidۮwlt7J|c/S3B!|㜏\c!..ԙʪ|>}TaDt"jVeB)ZUz'"U\{}Sg/=캒RR:XWZrME \2,Bw=չCӥ"V.+]PǺR5GRwcRY?X>\R]R~yQKz^Qb 5!Pd7LELT,A3uI |4Vf`Aamt[by~zT@ާjSjȣTBũZcɩz~P,N&1DKkN1@*XB C)OlQ:gB*Vq~7^RB8"ڌ@e6!zA2Β+Tk"_풽H "zM-_%B`|U,^kN E< lTDOlntg@"A*3bkcy`DX?ߐwmkyWȻ~0b!+Dhxӣ >P >ހ2y#> E|^:NA|Mה@|m(r#PEx0yN>sH_DB'ׯוfuA|m]#o ک&<*c k&6k0Ǜsķ_ׄvNyv-:5$1|#?7׏ψk B. H~ue3bځg|Xw&<`Dx&)|)E 6!j[-#k " E_4ݒ *x?#iDQȏpDq&bMb. I\  " $.ˑ$,G+iP*_&YY<ܶ@+L$0+$2J ', ITGW:rט(Y(1h^JM%DV‹&ɵۣr "^bMĚKQ T`_ގ ̈́'&6-z8:g5hrIK3iĪ- " {RK ضLHIEF5D\ԮIjduo9 rՌ@pKX8il5ETM1JNJ ,!R/K'Wb䥄J)!PmD@yr%j ϊßO%i#.fȧ)ܼ?2ِ+W)ç$zJ5Oy!؟FNIBĈ(Wx$2wT "uHe]tJAPtɢ$MT XX 80W N!+$-/P1* E"5$ќCQCe|{ə7Ry?RG#')'C\IRgR8/YfI3<ʌ8Ã@t99\yzqdUm .qfɼ,qyE+HVqhBʯ*F%rMKncgr V**]BZxbSJ¯W7FVx*opaE7+UXx{8*R唉* ~jɯ⩪UU[}U99NGoU;:b]7*ftGڤ?F R9a⑊("Wt\Sy!4"8JF&;*XX,D N[ԕ뫒Ƃ'YB%Yܜ=!>*^5Zor.(Zt]m#!\М2eK(~a'̥ok?Oœ=ߗT >ʢ^qcTo`W˝ {3z* ˡ-xʸQѿ1bzl7xwSj~LUdL|4{E2x8 VSc:gyGm[!鮦}z9=Ln^5M(ɵ ndz*$}U.}I_ketItIUkQ^;R:E [FXsqgOΕ_*to= Xe+*q8eF)iU;9[/ [~,iF&dI/H_{®Y_w+ WQ\[ _,i5*:FEgؾΰ~,12x5%uQ%.}#ZeFqm,: :vhY 8P}PRFKF_mknJZf\΃U^)ɵͦ-T,u7SwM'{pm,H'TmK5//Ta]}S}̤ <~gX3VoO+6*>0h T~ Kd$eΆ`Uࣛ`v?hJqMX>jGg+Uq|Յ4~߫5Y$[}Gݭz̮sYپe6j4c#pMucIWX*GuX"8Ț̳տ15/ёEu6Gm3ܿv'cQoc)oGTSĴ؞*8nb{RYRCm(i##CI/B.F.4n!Q4Ź?1"sG=E՛6gf&T%ǷcdRi"k./TY/Kt1b;F#cICtEc)oKڽȌӑaSXJ3Ⱥ(AM&a|ƈ\=sE%FMG}YwS|=:6oޠ;qgqZJc`WO?ck%p9 ةeӼGhxԼcmWA>-U\ɍ=Uв iO*+fw]$^fo*,nn=XLT_ŶGцŶPz}wW^jD#>N;`>֛;1vOqYl J")rYtAIjԳ_jOWcAdX"M@`I/i$ 0K~"G H5 03 `@rI$d@r$II-|ȥIZ-2x\&b:H{KKX(@r;)|?Wmܒ_r]^nɔ29@"v"A@k+.]nrmA˥\*Br Aʥ\utۣ`rnA H.Ob$5]tǥp <.K&e[?x4'_2aȠx9ҖG'k{X!@#.EWuAd&Kcvcr/Mx"T .wy(|2)ػ75qۻXڂ 2V~? N`wu 4]S9Y][ɂdʋHv%Pw$/?Nvy $W2'FT^ۺ\Z kӔ` `ofErŔvݴ=%*'{Mn'$VeHX#bh&TQ0;f3eIommN#c꣝&Z^\|8 Kz X1,B_%B5 G$'[Yy$Ze*cI6IJ:0q^Ȕ:wbp֨z\X}L՟}]Mo_Te]{dֳYk_y[bReI/MݼjX /p И_sߐHehL21n/]zh1 '$Tc8X$l˜p+to(^Z  &8Dٌ,Flr*lӼE4<;ߩu&^lh~p֭9KMln?s ʠL,ˑh=k1"qzkMO- cF%o{=\m%L#P{dޏMSXWSO\e;Tv{^;}<0PL3T`m ZS!ⱾW ԹR5 X#r#BP#IXm5c>y_ی;Cu6AJ2QRFFI]Gw_eNIeGv*Mڏ~#KS6R]:OFIGHG *C!㸘:Ȍ wX*Y[dKeXtb[:;KY#W2!KQ E&eXdjg1_ 1uS7B%v~PҌ zkd{vaP8jO~AF|J"[:wYsI%5,e>)5γ ]u8t1iYKzDV/zTRϞ'\˼uqb6VWo@ew 9^D#㨇;N8BVႴ>.OJ҈.횡}N5wvlW9R]e&ˣ!mpDWq4QQR3ULW})YpI5;2ߖS@hYz<ȼ5Qf9kY6〵.ˑeIU#G+$4ћz ZS{TG1l\xcX?\Fgi# WMk-kxܲo"@wէ#bÕu:]y4ʤaW7ЙF )Vg),5l7.#.7­;o+?UHTRӍh!U!UBB5ֻC!eYD@DYDU8*Ur"QQ^2VPkPIt,Dz,aYFp)-+%1\RY WpJ WV/1\p(-T"*cWΙDz-˱$+L",JWH/""ZcW$Y"z*%Ez(IWn DzuFDzʍ!^?FzEIWT"\EzIeA\W$+װg. ϒ³n,^-L<q9q/ ^KxV/1p9pJb"1\eqW]EJUΫ]Q#UQQZDcYDUQ;jG2!:T,zr0*&]&k4]ECJUCh4iؖ3CaRI|SV'H2DAIeQPY Ib"jX>QB5{yyC ˪$4k,e<՘EdYT ˺$+ܒ(ո&͌󺠎q^[=cUfMlv)Y$8kLdc=C2ge-8kଋR5'RŐ` -ñ]5- AW'3@x]w n۱f2gy;cځe' e:P.cLEk TPHq̮R2;WG,e/53XB%XnX&R}#.n&e~Wvm^ܗ;VM+YSpCՎ+#rR6a`LU.pQ-YI e;?v?c8!d[PXD5 TGɨ xB%8JBE$Ix8o$2$RHfL'Eu~۲W$(xE9cFFET#)3~y?6uKJd]WD*23u zI2k*r58b W1dJC1$![qjlrcg@v: 2 21@rFOb'Y "c|qlfZp 8C Е2bYqnᲱdnjnqn 2c=lD\u &7nXJlrܦܘ˳d N7". 牥d:)g$݌R&8b)gLof}3[qlF2ّ=zvm&*ћ\Mf]ŭO2 .Oوd.I={8O_7?x=X$TDlTDolT3Gb]s!=XH$=Xy` =fDzrǢq*OdR!بJr cb$bK:J  ޏfP{#~KFH>Q1|⼢-n#ZِvBA4.TDxrxXȲ90u462X.2 5F?Q:ȁE>.*GVf#[*TCZVyGrqvebֲ-Yz kY/;d-+e=H56ubâ}:z.|`1l|`ҲQ1iƎ;jRI4`"*D>,TYי`<ɚ2j@RТJ`[eI&@7ɢϐוoKkc-h{Kx2*D⻊8“%MvAe$%Vu+UfY޲Qm맫 QJPd55r=_dz&&o_j'*I+WiЗ4nfqd#cC!"ly:2Ȅl_VRSpW/@[jdnjNz)Nɴ3a)s6MIM4=є$T8:.F\yQcdK-b)KMmd3veFA@*l3n'ɭ/ӃW)%1Fh^OJ&m7Y7i7'lY9];WF|l#'_Lk Mv-JLaedoy+cPשH!cI^14cXf$87yBżb#b^Mx#}T'ajgB,;6Ŷ|&lU `nQ1ب[lb_}8Yj2ܽ!Sod(y6Sbk.c*qS$OYmZȮ%>*ey}ŗz563mJfγq5/%Z/Ib1@ҧmGڝwG*#3B&5썴KG)Ońf!َ4lTL{6y`}`&To$! U0k v-8hgH5iC76QVFSfӋ fH v >&ͬSxeka+!Wl_|LVJdz %%ؤQcw Jzx=G]bTe9LD-,im7Yc/*k_tYqWOy YR*l1 /1{T]lbaBBi^x2/Vq tBJJUe(:gA,)XӧޓDV6H$_"STN,@1S՟#MԚVV]WG+eRQ/+dWbvϒm(G\BhAy:֪PY)S-r3G&L{kV_̦=R/K[ BƲNmFk+cIXZXGlDwG*ݿInnrNMVV/d?fU32 }zztgWϏt(b9ֵ>p4RXNT wVKY.)55-Z`Ss)1헻xe]V⊅DwWT*jVTVeQ)VOi❪묬ƮuV% eVQZʁۻ GY(kǂfHXHVܨ (-K*I#,4beiĤi3t˱"W!.r*K&%JAF,(iUI#*I#*I#2RG(IB%2t]טK"Kĕ Е,]WRl9*b+J`cŰo1]W6[uLUt]=c"]!}G0^pYa+zq__p9wP+T_8~M3k__Dk,bDP9t] Xu@Fk nf tX3c~y9r^%ڇG*ܘݬ"U ƌd ƌd~mUHVE\c9=lIl+FZ S$72'iH#Vyֺ78KaOeAIJmdnDbFDXFwǯcG"Gbq$rG2O#k_BX%*\5 "o/%ǭ Vj[Ad 봰UκVdeXUJL&0>tk@Z8)*FRM-Y8֌ʮd".x\8u(=X)8.xXw* `Xr-{Ӭ0U=+Nq0{bb:zn֚ǵ-a9*"RqJy'@⒒⑒KV%?Jaq$FMUU|Q:BI")U/(2DѠ.֌zWضFwutQƊ˖"VQ9NlR# N%yd7LF0U4+N,8׉;i͢T:֤ _8\I:VBБ4ɏ Xb묲U Uĉ`*Rͣ*̑B]*P~PYNQLú la]' iKơuzSgt<Oir\u@^ƺ2(WoDMTMUz2o%=WMU8!ThHB*B7UT HP!7cgV(PB2HWQ 2n0 AJd8,d}`9墬)Al ^ PB,kˡ , ^MsJ\ji ^HHe]嶓rV p$˲..tU-.r $,vx%Pɨ$@57#s!bŨn$TSVGcim=tջbKXtgXIe}`U>rJXY ^Wg-^EmYU]r}YTH_V/hS3gxj꡾G1uT0V4eK972P‰2qBpxp]8@U3Kvi.T4R9m7إq,6xYVe94h92Ap8h;!kpPӋW-!zїUOF 3QF Si tjr8(8Y/[d hԸvGzddxPDbȗ ǖ,X4T0*K<$yuqQ] ,7'FypQ/VogBe]W"L>SVhN?I@6*j4j2G7ʱmlczK}Ŝd$#eIulׇɷGl-٠:շ"Qo~庮U*762gj˱2[uuzgzۺ>$={z^ڲ,%)&^Sl`DzWzI\v!=/S,#yv.G,b*y~>T2 m6/2ӽkڦZߚk'D a>l`Ms,:6v8fm=dۄ#v_^ {n[Id)LvkkL6! cY*YnL%FmCWwKxll~'<'TM1絹~+59/j뜻K3_#BuxVv[5m-B5_5&z82KJL5_f XF4a ꌋqЦkti\K9/.z=ƖJͫcJiXMgzFtz]ǖ&W@&ÌJkRsdJX;ZkƻgzO3m2׵5f+mӢG~%LLj&nzUK}/eȄ,ELh&ۻT/c6m.V+uאz SQ59V<>5˧{SJo?&'[]ͅV,{[WLz1}5"c}|%7NRoc+Bzhy_.=6Y-9ݦaK{?Sftt}p$|QLld7}=Hj4#he9HmDCW=MO ȐI{k畞ɦ|XȝkLC2ԁMo}F*wPGsaZrWEem黚DŽj{rrTg4e6V>gɭ9SWR]L|bd/˟ گn-ʱ܏ᅯ[$ KԌ.˟ .f/;Ѵ5l\}vFjS=^RKvKIg3$( * Xr, ʂJ}^ӃfϘCYCYcID %rKn;c!rK*Dntx(r,eeBbY.W B%], $+gG\w9mIea[RYVB63"@>6c! Wi}vnۺgw)v5V=1@ۚ$cӓ2VXF"]@8~S~7&''.fM,#62 .8rl`c% ]\Wp ,kk(wOAl`?4tqK`h"lӍbR;Ol6&6JSBdh,:) ycDg8_([fzE]LfLvFܸ{/[sϷl2E[ *0H;Y7O($zevJ0Ͽ78ȕm'u^?^@®f|tX|+} twX?O {{=;M%PӚ,,%zb@s5ɸیӷQ IcG(X ٖؤ(*?1nԻGua$[oH"yy y@c)D?0f_w3yn$yo0C^ 8[R;_F.PE*J*Rd3n=VW)%{@,ú_-T|1!rPe풃 *uˌ,K3K3ჸhXG/?wI-_˅_dDg;U{LU5NЯßu4~LU5~>X&tdk}{SR*j_ˎ,RUmLjKX% $UWM.ޚ6_ 4^mYtd(I{uF~}"\=|화2><:J<6;=nZ=ggW;P;OfTUWcX#VHeB ]?h%3Z]p=ʍS~^X: ~Up/$t4旺;ve1 t8ƪ3<9דIԬtE.D5y3U-?1r+RBsL"qEr9H9VyNUFWNg^VX Hhr*qTE訊ʥ*ǖqAeAU7K[`QpQUy}P$>(U'+O%+SUy~]rYһ[~ #2|B8(FYFYނtիiDu[hytAE肊ҥ 4rr.qA4SB5J"qA0c"s)B PP7ҥ(;@\-E&{F_ՏWţUfd7ţϑ >3}5"g" |xFEwU%(>6C6QQ]mR Efs/+KKCz- % e6R$TZ@jr]V@JNX)9aX@b)e H!Z@c[x[Zf >h) ed)bTL-35g)Ve,+ VI['ҢO E\sRK}rNsrBxwv jN=VSG|"au|b5'ώiVŇ޽J9Ie՜DqF|jNe愑ԚO<=U/XSm54c3};jeCe DX?~ՏrYV?jWG++ c{'ɌV@JeP2Ԫ>jVJjCs6g.v( &6۳oIeaOKRkI%J B$JObB%qFb~1(H)2# k %(Tf+TBk[x1b%2glgNf;^Z eHe9, M oP%@43t)Ĺݯáf`Qf[f M1vIC/ 6?}cO[H/Tk] 4u1M?tp0i*M=X o00 F/M3uxO\6ۢ6׺oȣ1h矱GU&_W]p1V$-1vjbτZ3ܟaQ-1VFYva"Somg5_s&Qfӎc~͎MY.gVf<U5gԶ&wh/cIٵ;aUk`nhmۅX85v1+ר2<cD3 t_"௬ ʜ]RYn.'+Y/`ٹ\Ro{˱8d[KUN(޵TyY o-_hɼ"[\z} uI-9!IAˡ,TC+%ITɢXF˱֗S|#+4+]EUAƭ+>{dӊ@Kb(C *uW} J},d $W.d_-WT^պC&ɼze QWD^2uU\]RY9~Ge,%% %cW%RvXKή/LY_gд/okqjA*@L,|g6l }\l7M'T@ѲeF̅8͹u_X|zH=, Br K.{,B籽_A2WoIUBV`LͦcmC%T&g}})v ,Y rEI.?ЭOlbdЫIB%}AIdAG*A(G(g6K;bS;cr,k)Տ-od kZ?B4|6G-YSF"T2+QڙrBh*|(Dim*z%lp*Gzu!ML)ǔcIگ+U9fn|*N4?knƻ-^bCIT:36M@7UtZ ſ5J"H$CItV&[;ZU.qH2mB x6ó6MY=c 0i#-gdk\hc 5"FNЎ#-hHe=hDK=B#";̐]bFԍ/Mf`HfFQ̨d]LGS6!5ѹFԍ&k I0FǍA4!uQSiC(}ndlt#N7yb#leSm!P1.u1:r0d_GtQ+q8FHsf:"GxEG$յS{tɑLye6=ܨe^ҙCHcߩ%:ް}d ˷>Kڢ#.:.ZAėiTɥS<f[MtQfL-ߥLޞcnf;*_j'^hFCg;!kŮ_6ˡeDGDwLvE$$2O]{|af6[b-nNS3|v^%"sy}y'ϐeQ<ޮ}~\kgps u;6_'͕*Su ȟlVI#Q\|z4Sl>~};E#eyA$P]-U[JK[ɯke~,xp,|ɄAމ\23gl\Xӷ`W6aB6^3n҃νs߶o=j4^lo5%fs_-/A9 W$PNrY3ëRRM9i\ٺ戳KIC5MQt[jʔ0"`o)麕Y7qMTD Ph MQоBŇM$& T 2Aeo,To *7&&F %Xƒ|c^L$жGG#$wTRkr#DqS!ީcI gm,87fc!Z"1btm(BP{<ݿP'POř:4BnŮaFz6"HPG*0l4"yAI Qō0~7fdo]RFĠ]DZ~D*g6D*FWw$!ւ у%zD]UX~F)uTBSjŸQJg T?c6# 5?@=^,P[OB3z_8Fku(B 1B=jC{Z@UBu\PC؞0! JU N q;U!p!Bӡ[ p2f7JXOk~b='Uq*t6 9:fݶ;ƻfڿ}@͡֏cI[Yu=6=$ǭ\.B&T1i *_;}wGkQ-/+#eE#2* T)Yozux4)K\Դ@ wUƲ;:d,ʱ3m2XUp@Iɘ4*[D9?}]tg|1zŲvvz#RUT=qb$ɑv‰l Txe{}%V`<ҦV .`͑xZJB jaj>d>V~=\[*|V mee>4N-jO벖1̮ޯ=|#Yf_yOsS9럗\:P:ƒjaaϹin|g0:R ܯx7+\xKЮԆʦ6@ ԛ)XrV V5I 8QܝFvd=o:ݟ24yUGWD6ygW+~Y X%X9#kf#*32ĪBU&Dem,bG*5"V4rYXUvIW$+TDֈXYkZ)*k ",_(q_E*X-, K*VڲX[VBmc|(UN0 QXJt1ac)W \U $uHp(ҵP[paS EcnCQHWXCHWtBnse(-P~[SĆU 巅D ,JFŪY=UNj~xEGfjpGs,VV&imDլ^eʽY=cլ3fBmr^z4U8^ 'P[[VU*jku B zdQ[p!bk-PhH=aUp$vP5[*b򍬇u*sc0 s 03a%թX` `sYRvj,%0'4aR0'4aR0?QNe,&hb8՘z1f Ʊl`º2a0EXvG09a7 1nDzӂK%`k2/ĢTK,;$\X(SXsYR,jb/ĢX@6 4\x)o$uc9`,'HE 1341 X%2@ʠMĢqX@MhbX5!Ie &(IJzBXn*vmXG`J`& $²Ռa;u;, f3k,ò(,*r%0l 72[z;Hp*!1`UFmؤbb1,T%cIb%pLzI,*I,X,c{WTL,"I,vIbS)B5 D1l e.P (}1lc16B%&uI.H2d X$cX$cŌaRYư(ö6~HQI%H2gZZ I+uIZ0U@lFC*i jɆB&I33U P Ԓ1_N,l*6AE&CVtaSLE1]ؾ¾gɕdb"-6e]l RRWIE)pLcRӅ]G8&=>@RI͈$äb"#L=eIPdI;?pZ~^w@&*"˲4~Hk7a)p3+sB Vmda~NjWuTrkFH'{SzT]b*OMᲒHJ\ViPiPiӝJ_RikrZZTu;Q|[Rʺ`ⶊH~ӡDX;~5!5!筹6VZ@͹K,[|[DkC8Alnc"1616ѭFsem= O`-p옱 Dz6TnOS7=px=pbRIy3fmٸrjkLNْז4Җ4vᶪ4qY-h\UYDs[3j[<г ],s+I;lc*rLհQ-rx0Qm&Ir$ǵ0:U_EwQEH:.pNGBG:d:ӉDլ]zd$uHhGFJCNV9Yd$MV7 ]o*75סM o&5K'Bv!5MUV7;ԥ] {>7Yd$4q‰:aN5>8hmQIMp:thn#kk tFcԾ6p馠MF7=m4RZգn4Mg+<tv̦^6RRZ5MGB#R"ӡMrhb'yijtS O3= BmrS77^:DhpӋmiho }kqh[ҵ[#|:t:4צ=ޗFn'ot 7 F3motצ7=y#-.OFhJ RBJ!uXx'R{{_Oі^'xXp= jB^dw9VY=o$UpVRg>=h4~X!m` z'}cZ7Z'hx@oHi^(LmT^w_nR>;ڠBvss 6kYEAC]78f/k\.n{*mkw5r^Oku .:XM}6/xZyb^AqⳲ">d3j5̭cӷAoF!G^ozy[x^foE8nş?v?~fۮ뇋@_pZ|x..vg3 z\?,5O+n x1CzPwrojx;h#]zzF8xh޹ ?9ޞflv6!+ñhMצ19mO/<Ri[ȶN#~@Tu0Ry1?Nu1#=k~}hMa[M*C؎c5%C)PM'&Č:DQgaH!%Ra27"6bSH!6&8,"6Ō'!H3"RhUk'+H6c)'RHhORLh"&4/6hMaM=ۗ$c6GPV}rB MhKB'L0١l]%PM{f9f4 &YC56f*K󈅱*˸1w,*$sm9Y͐B1``-`}Α0brm]sV5lA36A3ui8bl4|߉QuB$ #DYD"QpPDQ,,DD]#%2%%:|g!f@\5(8R$ I\e(R" X% #E3;{^EoQpw[D}zW" Qp% W" (Qp,SW" Qp"QpVOXTD2ɋ(%`]wϙ 3ZDp͋XWF97d|F a>(|‘"po) kYOX'O|HM',V>'eOXW>a'\ŋOXo ɋO'_|UA1OL|n^pj ˞'z3!ӳ|}Y*86S޾ ӮT D\{UpwLG+h2Fg!w$$CyØiIqwMsDarmR7|?VTfjJJ.Cڑ =5*(|mA =5›TVh!t_.!,JrẑZxcaxؖw$l&XL$a|b !ȳZjZ/є WT_T];dfp5OWBәeXu |횓ʫ벑rgkZa+nQ݆XxQ>ΨYj[aa(5ˇBNm q BƄxWH^)زS5WQ +5n{YiXζ$==]wcݫ_xڸۑ ӹ: \MNտZWf)~_Xۀ7b5]'R2;v❟R0߇`# j ޟ-(1Tv$hGNX {]@8)|?,G~+71 O>W_w#t71q/#o-C޲Cؕbt{BU&B#yg,_WN#L/BwicM?tp(׽`BHSPjv4g˾U1UkFRsR58@SU)0UtYzX [,-WfZ$7Rr.K8mVBj AXM>.0VnhlT<MlWD5Hh_;/㶵;Vk_p\k]{I u쪫kf$ZEyx*^J^]Z3*]m&xu"~թ-BX{ARUަ=jxgV]3<+0cG(!R*T*oH m!$PӆX;BbH 3 )DƦk]w{ĻI?~"_>ƠniuxkQ߱[PtLXFَwՎ .>NkXw P: Pe Pחؠc|~#47ؕknk]̈`Rv%0RcC ce^ cufZs詋NcCOH!3b=}OGZ}BO)5L{qϯ{jr$b۝cOjcO9ƞ2q<OAJWeQ"՝PBsP){ 0; kF%:qX7"&t,5Xhڱԁc,Δ9Zؽ4>5XWpS ѧJw*BQ+n6Fi@HOMW>5\W@Zdt_^Vic498}THs*8ʓ}{g6]v:peۈVۈ7j{/ <7UMov1C;V.zNktZ:B賃u?1}vЁ M[qeRhK/B hzA4Jӂ3 ][yKͽ^`YB='.gzy3wm00=I֯Ac^et2xj*ejIU:KX1!еm w'ljJGv6괳ͣQ 1][9cZ`,􍁯DKV @_.:eoI40d vh;XF#ٌH6cjfBP@3"ղvBYVJT? Q.F4\BԬdu5DX Q.Ru)`XaP {dpc1iTXр1c1`5`A*b]hmLYPY#/iszh/U4@EUi(F#T@I9]ը'[; =;"ƺeZB]h E(EMjgdS:mRƨ]Wڤ&R37ע:VJW1:M|M|%UhWhV8V;ql- ]RvıHP xRCF=+>tڀ3)maJ8l+# ISlZ"n"TBqT.^py2$&e/l{x?%8ARrtBWq<`*#OU{Z{;/RGJ;kx}qo7c-:TڟE^r`z5\zy@IvRhKUї{ t]kڻAއLx$n&Ú`U>?e=s?='|-Ǻ}'oMGXYdޡ?40*@HO\;'@R?wEV3_QWJKZ*_>>n3:cѭSfoܓn͏\nӑYڡl[ӧ N};:6/K&Wk6oVxS{ |ktZn{P#{z۞q^_w\ g5e,~?Y^reϏĈr^`?,~zek=ƏECvtU)IZx-YSe6/'>+9~AXA`8/]>q:vD룱]#8jZه}e5Vu|ur7-q~y@sw;Okon_}G.Y]v| |}֘<|>~8^b{'?-5G5zݼ}}̶xQߑ-~~zn^i5~>8+{4ynyR~گC]F2*|υR,a lLܑ  aoW;:ؾ*Q[V%XI`0i o`ΑcdDԁ^Fi9 N(F0Vqj8BcU)\K*QN H;t ]P̆Bukr;耨ˮ e7vMa0~fde.2BB- .qFvPx(G`KCG8F<&}QBcǜBp`Unp6:f̙akE SU>k0_5_khWukN e(;%hcG!0vL%n5.FMwZʞa4 hiX`q aiO!#Y!USqoy]R>.[)1C ,X=v!|\X`IO3"Bn/jn/~ 9!ZNמsg92mвB jvٱ2bylׇ ČȎXC}B-Cn V[)bkG&+n1RnF0.'T`+t\mPq!t\ PeM~`UMղjU\=@xM<Ǽ_#'W$~@*y)$bYMhm7{ o}^fks囲ɸ؞N綝ˇ JMf+7VojV߰ԞdN]ӛfMv͟d\xs2>!ԕ:l,Y͒i29R"9R"Q"Q"GGZVy:xI[Ջ3(ޣJ7'Uޣ%ޣHwz#UJH]ؑzD+C)CC)CCR"RO8tʡȡm,R8:ؑAaGIZ;j&J@@4Q"kuNՌͽB (pTZ}JR<]5B X܌UJ_*!3Ԍ׵ԜVQݼ^9˂DC9Wc5xfXP~s/G36yWRޕu5}Bu:'ɹYsx}IfJHBXO|_3nƯW?&jXMXHufS:+_]Re\RHՑm6yz5QXjf,t+'Q#iZ׾~x}'Up ,)Ā& [[۝1u&|-[FHlG"[!@H+>.8+4ì(+4ЮS}m[/G΀ >#Ρ*])Uہx}j.}Vq5`1Q xn+(uDžPa\)leVj snm&h3[7·=u?3Yko١ }O(4x2sß֮Y(]zBG*OSr@E)@(mGCCF)rjQ4$ ˜B߅FyPfduWZ>f|9sRIjQ[jU:I-jW/jW'E@po~V6Tp tp+>md& F IbL";^'_xخ%;KQ[Dɓy<. DO" R*B>iCN3AQĿEۈ0wRҙgED IgT3rd:ON%'?ܿB\y6 GuĪ{!KסB-tB% !H>wB-Fj-Fjm>~ޥ ) T k=5zj)Fj)+8Ro:zs ?W78lt:f'jϟ}}eZ۸ @Ņd0Q>~2z}09#z:9#e5ّ$`Yu^/˜B;;լP}O{4&[ gU\;G㴧,mNqZ=TtQݎ.7kCSu廙W9'B(ŤA-}/+oN}0f}2L鉷Uf-nvȄߴ|(4'|WS+GJixrZ:xk4k¼[HGwn)c}.^|dxB@G1Rj1RTiP!T2O5[LQQj;1]N)Pab0!SZ)50ZwK )zf2.!TjB\e [N ho̲kd)b)]΀}DT 5AiJ3 ")ΔcL6(X9A(Hm2i ˜rp3b S{lR4i?!0io2ᒶ Ԛ&TR;$IX&Do@R2xL pbh$F* ʨ;nuUc!!FYnF2Re@H H_)OeG>ZOieNFIN6LV `9ֆ9߀beB@/!R/+%lR'E'%Ux3TJX2RW.UR~Ymx4jwC|;GJP)EPHXQ]QJ,2l(ZQ/(uh4j5cFkeEaA4|GXJbFqB:uZŸ/|۰x EgXNEHh$>qN.hظ\W-^R7,z7uoB8彋pB|HRϗ .n[eBmJ_C}S7)`E ;P8TfP- bbrI(*UmEu*mW`Tix~nP'iFh<:K 3 EV 8*bB DAZ)ՖT,pjg uPծjv]!*TRUq~bZ)A=VvUU5YVX V(+H@֪ (+O BY+$ZJ/ UmP[ ++?0ktP@]qv5ʛ +=sWXWZIUDj૞pX;Vl VvXq_defuJcJWb`T|#pTsT8A xJLc8Z 5"DG#*LVPTX \X`T߼.;_΋hB[T?;*L+JƴiBqhg,0OU`Y("H1H1HvzYN˨ecP&ʱR vW7RogЈE]T*%V]HU,ԅZЎuiHk[z<22$R4JSNkۡڼTP}D-'VBMv|Ujb6fMKCY^4 kGFe}ȚEeك=ѱ|f}_d,nks7/Ty:Rۇ)įx\ Vpk5GMc*۹zPxzwD ԼC(wsCtEXw"2odj_G]v_ O_<[Ҁ^k]ӂq\_7@k8 o;`Ɯb+e?]_k !oԬ m/ܾ'TكOh4%NaH Bx )mlfLé&l/˷ 0Rjc!@!f7RDH%hYRmc&,Kmvi|s}aHѹuXS !DԐB9 Ǝdncf662D)m~ t:ۡƷܡFަkʄvьvn0TxwZCۍ>7Ƚj#dmQ#3vsʹ7huy6/W6/VT j3Ų~gSϩ)yvwS`j<VFwAN1FB'^+ije}e2J+3c51%/ncP;Xs1ZV;1Fv7JI4J?69&B2ЮKY$`#E6"uX U6/;F `;`3jAlVJl>/+)L´K0m$Ji´0m-U0m I,Lm,L[ _V[J ֝ O(V0m/(L=L[{LS6Ri(´WL[)0mϡ` 0m/2´xi{.Vi{ F,_?`X,˧lZlG]f>[3ۇ LHVz([o*U‚ = 6K'+6*غoB=`{Y՚ fM`u ֱo1 zkg1^@vzTJUvְs#~DDŽ-2|+f/aP(EŪ6(]zO:ϥJoߥ^2M,k7=tH/kmml|k{8xn6)ӣ@nW{\>|vx׻ Y㨐{;H]_ ˠmQ{]};sowCW$b\w\;kt9JZeZ-ic'n՜KZán*uzuט>CLQoβOVTcQc )Ԙj]V UB }+C{u0\L%揎rӛ8ӵkx:)tc<᎒mVx[v es9}h?\ M_~c֗i.Ǎv^F;0ϵM;o7>>8ygv|ʜ[f+h?{X6#<=^냽EP1GzUWW[[*QM8e o)e81@c Or@D x@Ņ'He& c '\AMi#?3_6MԥeԀ`BJ }af5؞=Lf1N OzxHׁ̝bGmv&ć7 BD MO{#Lԙ& p{hYc; /عoЗYA %͎.]Bބ.;zˎ OƦ9J X_+CZPM!RCRD =IBM~X- )' 8##"R{NMXSR(PHiźGDܼcmC+y.K) x,xMPxlo5z|m/x ["ME:E`"થfpU쾰oaSibBgKlf,"j'>#:ED=y`oc TIxQ Aj^ =m R #D 6P`^obaF/J2TxRwqf!ޅOEˣHBZu)FšE o֮%bJ]o)kW3"ޅ&&šE( B(+Vv] eLVJ,hX}T=O ?A?a8Axv|$HXRq9ގ+~p+J"kWbYbYƉe;bY(,օXt>~2uk* "1C @]9v&BC M ]:¿cYl6TgR럇~~4 n)/B]xa'ݤ0B'F}\;q{!ޮ ^+nPbY )$v]@+%W _|Me.VGOX~xź ϻc%U}#v-cE1Zbz@pgD(^Io.FYZnG1v9'm mc+d|!ybS\)enĵC+ƫMW+Byp5Ί@1&Tv/>=\õ5N|L[N׮^=\+9/9;kԝ\Vr鵒ܽoǾj?< tbLtbtzpgK;Xj!`{?)3Sz:OW?eX o.d8lVGm^i{qyt(e9$˙>7XWkLnU]=?>V\] r)؟& ~&~3Y+8:W쵔J]1`VHAQHŌcj#p\!*T k:(Ɗ5c!;W<ޡfYeY1#@Vx>2ZWQ`RFڌѾz\QVTrv~}Dl + gAVHdO#ڣ ,'Q\B̩edZa]º Ju1ܼڭPP >OR VvS k@ hb3bXe]Xf /]X?f$V;T$NH?`k ZUV:fk=ie,UWv1V-ZUWce]X=/X$V/'NXNhӳj:+03`}7{Nz.c3c**uc=c槬v>})5Xc0c[pێT&85XvX$qZp\HJ#-'->L6סZ{%F=TDA(ҨߘF=Z}rƹQڂG8~ܟNA S6Yx溶mIj=brS/M|^up3U?\N<|·'6z4;.X#A n`94޵ܟ~aBr!Lmx|w!ִʁMy`4ݥIYWSO}}mJ#MW҃GO6Wҁ(/M]%5J^s" Tyo.mPں.^S+V:)_+St4J%?&QQVa.M:j832I iJ5<kN:q`WǢ+dG:-vGMLrZ#י[VJ-_3#[v]ju%~Xm 6"X r5:AJUYl6:Bh )}R9i^,|.4Xjm5Ōm[u9lRsX!VJaqgv9ۘDD+LL$fUtqvؠ4&2&-"rI;Z~t팍_zTW BZ=L?ZX@G hy\ӏө,f1cIN1f1c{v(lU8sJKd+a;VJav]k O.3MdyMdyMd;B Md+޼ j3,Vi5SV\{/ iѐ%zţ,DkFsjY,4^2@ұ;,LNuGeR1BlTPŷ VR-XGje-$Jc(TJڑt-=c!5[c:c @ /B \}cSHJ_NK}ms/5Ck{Z )[ WJ̡uu2fcFTc,j[k]u[ey{6W_*-\i>sխ.r4k [iW Uҟu[P+ZWJeKRYZ*`.ձ(A p.X( jje5Tke!ZYmPkee]oթnezmxUcJ W{\ = Y3\ʋzXٴUNreS \uZF#6l;XH f? W$ leC`[+2Xb \uQ7YVaGSL}ㄯׯ0F~8??la, +eۿq!Rh#R|\W\/~r58ev~X B_!{1YXVz)& J]g܊7>S(oҕ9Vc!ct5cбб@ҕ8A( ؠ`%{}g'k3akBB Nkiz| gؘ'9RNȱ^TXxՑ-ޫ. ) |pto_&XW&8:c_G{%UTw]v{LjӇS܃ϯ ev;Ļ>Ioq5I\Uń# 3y33*,u~^OQ`U_jWs^q0!0ZMߕ_N"\Δk-{kn[=²x]Y^L3iJIk,0,K-0,m=N} M\͹ƿ.`g=ud'w;R,͌,eFZJj.rf,vxmiRB $$KV3WPzglJYH6zjFjP[)V_Gֺj̱P BykTފVŔVJX=[ p.WjRRQjӎjS զR)զV+DJZ/p[{~”Qc1J껫SE5:R uqJs/$rQ J/q^_k X/E]H:xNdBJGl߄dI)՘g e<.6LcIR9_,׮uՃ|R[).H-5ny'@ɋR7Aj`(T⭋VxVb36&Y J`A ,< J`%pFCX$A[D=?nl]p+Qcظc=Gp R?ud9A.=-,b %&JԔ۲fߔY(2dL:(j?k^,kьR]E\)Q " [s 8]X dHINgZ bZqƎum®~DmzE uyKCK$&Jj~Z~8P)w]RT+%czC_ 6ƹq5~3r}Fuof#*-.xGKAt@ܡExl-8Xۍyä̱}k-c+9~Za)c%R9"c| ȏ1;~~pH'3uRvʻ?\>}ŇָR5%Ɛ.o|}x{VӴ^} @=3zm=n؊nn2;pjiuuikOIblЁݍo;tgO1u-ĊQN, "TG8Ŋ69V+bGhFjDe9P)i9&օcSF?*`,DF6m΃+&vc%QJu+J4c#7K͑fDP&o\jH|.8)ԁ<T޲+Gc 1q\#INc<,mp9uzK9^"AsahUSmLPPTjTj.2]p4VJ6xQVxї\A].ej]m#=4iXKaa5: +emV9ZWO]T-Q|J_ >Ӟ~>e^-wmrW)Y9h4@v"׌NWSԱՄe Rjxp5E\MJ\M)vq5AjXjXjXj&Ա} & XUv;,L0A`aX`a„e  · %& jXjWMkjLXlO_ &&h\MX``amc&\jD_& p5QjM& p5ԱT)q5a3s5a(p5UJ\M p5 \M8kmՄՄO,WHxۅ0#x݅ɏwF' 53: ѩӉщ1N@1:S%FJщ W`etŃ z.0:5 XF'\`tщ14kFkRe!|H)|FkR "|"0*0XL;~ӎ@`oD` B`+0ID6G`Z)@US,)6iS BȉAٱrb rh2 9s SFȉrV He[m!ꃘ/abNZ@V _ѴG+7+NW4+Dĥ]J).eK1J).U\&+nWDtIK1VR,q)HR^vKjbFĥ4ǥ¸D\ NiKqҖXWDUW\^.ER je)z17zB^q zG]l;+l+/>_l2K>Y|||||2 wF0 kYebֲ4O$p4 p%jBLÕp4oV_%ᚗ؁ԂC;p؁g(4P9{!XvEv`mPف{8Z_ۑtŗHՇC-jK9 ;pW%v3فu#Qռ؁#EvT]؁e\vWJC?7A{%+au_/f_rSW;%N.pOLõe1 s0 wb:3 6(pFڟ-"Lra|diCiXnL}iii%]Lõ-1 kw4Fa2 k4[a}ci n*pgpmP՗8uCFrW_|{]Rtɫ@6R#X dg)KtZKA15pc5 w1!].+n51Sa#F7cc鈈pS!R͐B@\)XK1BSkuxJn:=v;b}< z]B]~uq$Q,>L_~nu, ..e:zDļZüNe-Ge%ž|4"R[oP0Sdqpg6.n/v(A1 A1cCLS 2lؼC!ܭNK\,JsDo`s:ӺЙN|;Bg3BFbh,,u@@*MڱJX}BT#%Ar0 e'o=kñx1]N@֐@N+!,}XnFP5Av:|띈)6Xj)hӱۻv9VcU5@h#5 LZ$z<+Xh#ylz5{H=p0!z`,t6Mw1fDu~ZѾ5Bxݳh]H?bR=;1xGmCzYܷFnidH)NEFkO ]d=os{ȰkƦ<z`Q5#w^32%>0=nsi} 뀭qFGwBX聣J#O5Riov_&2mt 4BK8~GѸؖ4 iz n|5#/k4ӑFU{d}|ԷBG)x~ʚ۷FG}kDz=qNO^t"5yQl|:5] )#k J_rޞOURE_%UnFBcݮԽ7r_v$/,+"B+BH ub i"On(ijkAK)͠*f=<ظM~B^үq|J#=h<+cTq%`)^pv|[7\"zIB UWnkm7"4=Ep# [zfuպ+W>PFs1x~W\d}bcjTe^2PR^-tԕ曩wsNi u8\_w3)MBM#(SWn'U<(|?\-T<~umw95kኽz7'6X,3!aN!a3FkSWii :ǎs:_9U:PC Rr! RLT)[)0(4c) dUcIc7vzY^@SH{ W'4T)$UT/ ab@tX:N=8+X3ݞ d%DX:KT)1t̄HWNc(D䏥QAJHfdj]!j'DM{N֌uJe:Ԥs]z/i>&aY`ٕl43dqn|}dƠ~LDMZUx@ˬ,j(Hʊ!X@WwCaZ-*0] Sv31bUvcIrZǫzgasm3XZ~SLiU_>]vbה@{=P6~|Dt ہZ_6up01Ca@}LȰ;~{sS.»~vDTBQZ}_#4R M#trh) VJ,`, y1n7Rq;b܎7jh&9҃H1 R q+7R qk7X ;V3~u.wv,Y}2~SyՖYz^ }? e psʐ+* N+~_apV0836­Jʴ7Wƥ8X7YJXjbξ4:hHеU@bPڋEA@R@RHf $(r $&Hi5*Ir$xE2F|#%F(Fԫ1b}|su;,+66F7FQT(q_1b&I H22M$IfM $cXsn$׀i2* ~OI|xHG"<utƒ:D:%sx8RBdήp8kpT m8B"Pu#H#͉8_'2T qΦ< <8X8QG"s0Yظx3hae ζ48g0J)qXL/V-Ra稈,8Wp$L`1ݤSe @bm !qgST]G8޹09q #!n|ػ]>5dĹE'T9b"@Β0Lq__8&'za\xuCkp `+ Ux]0g6]pDlؒãWਠYʴP Ak=d:la/])< ? TҒQe fD]_L~u'_]<3XSM&>JbҶ84RH;0Ҏ%c %XII1cR.+H)&Te+' ,RlR x)4R@L!ȴ(ho jB )V)wh !$')Yӎ%J :Km1ncFAV׌bBEQq8*VR`8@SyRU+Uԕؕ ;wW~_. ),)OuP*>( vډqC޾>yJ`0R0xg3v/x͌VJ),~/eZ? fyYaZEZ JOh+l p+VWNڃVo @#+3Ts~8qsίT`*W>+~MVzG{Ǭ%ݶ^n xoOӛeb(0s]^#E:l.27,_1sarLjrdB n"wh{rCsq9J!ƽ\-u=.Xx!Gf;/麶o~.8`^^#>òp xEߝ){[0?&عV[t=JծD~y4wY3j9c5v1^DS#sy`E;~C^?r;8!qN<yQNwzocՙ6߼v]{h/5_Owk8X䠾kge_/AsPg-P!jŶo^1 CX< .1L@ΞzY"ȃ`7`'".6DDK]DD(7GL:Ⱦ^_"bBtDՃ[QfJb y:V8xpEjўGuE N#bzJY{˩cBvDRhT{8~߸^))d ^ȃ U 0V ,&HWuS\(`CcCkko/>^x%MW'LWK DDK\VBW22D! [~yZ3j\"C=_|RX=hxw֏&Yh@$S` Ҷv2DDkQ7QLDtoa"D ԏc]8*n |ex=NjRmuMG)uT논`+h2I-LR `+6R'w,-ӌ䵝b(P̋wIjfm`f6F s7bfribVJa.3:otKI sab w(PԽU̟WߙRFiM[,ftY`3;qwwa.T0a.zLR$N t`Bsy1W,7vEۙٸPߙyҜwfEG 4GϽb]],un9s<CJ=Ī<#"X۲LE6zTDX[SB[O"rjmRf'1GYtRYʿ#XnԚoeRâJ=~+` Gd2IݶI)"Xq-[,s6G j6`g!H s#/b,Dś29R66"v'kW5gKHL2Z|#\s#\]xsK]-e(rZ *˙6TQ\f1]H越W,&̆U"e̴kBW+M ]{ ,m`PO  LK:\5m̦>0_mb::vVjM[CZ#8/1iݖZ{.. ժQ PH<۲k~Á(X؎c-tF 7E#c\uO;TօR̈rPPYOj-\SRؔ9<҅mz__w-c]qs[JS_{e=7Խgo-IŰ#`i&v/'/_-:.Gu[A"RHC_ .\,Z7ֵ׎`|QU)ƖZjŌfVb0~~ގ_c%Un7oXsq|xS)Q+9a<|䃌kO̡ժ`U؂T՚T[/ETYn`Zu`ƖJfG97}T٫.һTcuS"˧\%+i>@_p##pIUZ8([JU^kZe5-k%&Z[ : rQ +MV3"^p.Q}tVU_VZYTo󠓻0׃}vmm 9*ħڮv-Fmmih~Pj(0Bj/:b~"cVW&_㇧OoRBHܮܮ"kKwk;LY2lRjox+.o1_X}A}|fZ]+{߮/=:}:d 6~$. FRۡ5Q~zE4pz TG-N7+ '1>1|;U(Ż  a~!_LRa+J S)4t]XUC5[CRo}d]eOOTK{P+_+)zw+XVU RbuʚSVVjzqk1#ZHZDJ);B`v_hp-j]kM6ƌuUXZU Vv_9W..@, Oj=_VZVN@ bp|G%~<j׋ _Ro|?Rv5V@]gؕ)5ח]\\9Ww{p_)}5cq_gWzs݊gtϩX\?H)cp4_PA]}s!?>>6+YWvS3ȯٜI,uM:{6}=fcj,N_ޞj;j%4sA޷˝`+J)Rj(DţR(p@T!B(^ŲP )v]hKOD]*vtI]蒰?K.R+T$ા˚hP- =Z3:օjYXD[h]NLIMfX`^8KMmĄhUjJU*&*I&.GzGU*ʻ%\]*"J/ cn#^9ޟ="K-EJjzVJ%5)KjKj*VJDOX,˳bY6$81vBZ{kj1jjSSLS R ᨢZO0F8¶PD%ZNp‘Wۓ\j-ÂTT:r# 2(5x=4."HI &LԱN<ϾoQ\O>Qb]hT5c~bhZitbYPW[W.#|[-\T1eKǺW%)A'tjK)r4_b#,IZ^і9҅~>® m6X޻x~}_?ޮcF*Xϡ0CH)=ܶz](}{h{t5c@^ҲJ.ksUuZva9><_WR XjaqCbX~Wk> !Gj@qpFJJIƚ1uRLݱfb Rf~ YRMWƌ%i5*w8 o+=lRKX-KyR]=(7*!RzDʰADX="e2"e+24Hn2 rR "5nŪJf,{*h\!1ƥcK 4wy'&ƌ;j~;x5nxr].a7Nk`H)GHOݎC}~HJrgTL`?ÁEH]}!rbU!uݡH~pq0+X2rC%-4ȕ'1)=RbR><7"bR, 1)ĤtկTwTW<+?8l;C`܈a ֙@tWۮ\-9p'#4)˷mȍw,. TY]&t?J蹣5i`h/ϐyUyxf{(kZ)@A97+td"H#4Ә60'C1N@;`ߚ!fOwM,@0cO9ܑ=ioU bjiL5BPj$QcMeŶ$},u/f^rvIޅ6y~t Yغ=]|]Rr\&⬴y o y lNd !BM2c&ιpםnBXYt<3q@Wq;]7Uof LqϜ0&7[a8)Dz k_ajìm0GR`mPPyZ.06wJݭ{D%Ke(M$"a|]yCM}^xФ]tİP vJRePyRPy5ayxB ZWyTywa՗ث؁!tCc7y1";L>A3;V_vw&뛕TXaH9݈h؆: K !FK4{l/DÚDǎ,FžpP^Wn0L^e7eX)||X;!>bk{GO,| |[3}Gg>byC#I1VI1H!aUaBh"DxH}$¶ؗD,*=Vz`XkKl>x_k_a_3gL"$~6$H36$[H;m3=_z`i3[j6؏Xz7{˿!RdĬ!+BUHUgR4 u1Yt58Uo귁B`oʚD4 T.~΀n+} h@ P^@i]Sԯ+|[#m  2q kHjǝq~҅zS6'D$p9Nf*&ȩߤ`)BbzP8 g֊:RʹTʥ^`\l"\ǕoI.>tK9ob[Ck'\KӺǔSƤ#0,Y5,u2,EHK-IJN £NW#e6OH`XbKt`:ǁϪx ޖ`9Qqc#Rq!8H.F]Sٍbzf@@K~Q.؏A~3|^lJ G_gVJc;b,B`X)bY db(}V?CC`H5XBM"lb,DRW@}b󀱍H 67"Č 6ya'fӁ (2 c$$C 1 )ڜc,Ğڜ6`e:Q 'ΒdY]Q#(;rϕKղ,L j(ɷ2؆8|1ZIq06BIĒBGQ`yˠCl) /L!:Lj eb-&n5U~&#IP!U@P! u.*hU:W n\)o0P4G\5,j*S7,&Y|!VCUs]a|PbK廦HT6Pۡ:X8"b]"_(#.DO)$՝_Vv Jʟ6,$ԭ^_Jw1g{ *JF"bQX4Sm/x#n˵kkԵkԵkԅk*uUXQl  T#*[#(; k dFHv\Z7|ب'$`#HF6Ž66 `%`u`~lx *| `HPJ<$  RR*RZR*R*J $l@@UhbP@RRBR'I X$.%$%h3,O@J61j`y2JLt[*_[QnRR7q*-uP$%. )JHJ|SFIb$u;a$# jZ$\H[H-$HGW z6@ m@6oBpF?\똹/saoT[Ap۵*J-T( i|-Cxl T[*Q䋵 [@=fmjL`;R!L h0xTEm}^_?T߱Kj,; |gxo=)pBJy(ߘ+, Xȴ!$pЮwZhiWsh|/_/ן:î#ȅ_ A \<^ y;6Z](RG(RQҪ3rZX>Eb~ }ATA#RH QOm-5ŵY|!͑&RN&sIY7:w8PGwi/|:֍gąH E~9}P@醣* {d~JA.=ֆv)]w..ꆶV6iTvCĿvF{iQ3s9€ UCd]1З #GR)~US⫩Vc6MV ̄OV**OLBQWLJ[cS<ODsJnWTǒ|j֖kxJT`z! b%k:%ga3 qJTH)ݲǯ \*5:!Kל2Ѣ 3ē$> 7f9Aa&I0501>-sP&lB3i"nZb"E;O8WƎP=qHcH_t#^ qFH|, oɋ`6< J੉օy JrGɵ9$:qyӨR습ʄʄWcW2a5XK9Y;#cʿj=1Jac\J")tRRODm2>ɢjmUX UD\d(WB2XkrCKS[2-Z|z!ȔDKu/i(BrJA;eZVֺjjjPZGݤ8%Uc)U=T3m׶9C4ڱ:BcCdb+i`}SKKEǺ[t QN,j 0vDu/P+VNu/urH)VVBR&]wd=.]`rmJSHt5͋/?TRa;9U*aTXDڱ2)*TXfRa=jZ*,RaHSVKAuL;w|o_P"A9a)4q KV9I숙R*!pH"*o)?yt<C†CƢJ!.{3bRC҆D<$ǯZ'B x" uhĀҹ,7^ӏTH#IAj ێr}L:DOda~x,J;-،uo}\KU>IbS**ޑou*^w B'/_.쟲[MT㧤ĶD`X@bIq:bA}Ɍ$ *b1HJAR>{c .CCztS wypK^x J! w˨f~nj}_{%Jq/.c@'!Vt1,uWwĶ;ὡi}ر}d+}ذ!j0UD*1Cea$A糽Mӭy>bAXP!߸^TtK5s`cV{4~E:l4h p4ݰ5s8lH+nC]P!-6$ϟ.aa]oNe}Q񾛣X(v䚻fqu%^ԽxQq?]Wu`d&V*zWAδTZRr ui[ds?~=~P^S4aX;91{7T͵[`W8&_\zR= I42&0TNdž Q.Rjw"">b8l1,q5۵ՆMvCvL QbpWPTJJ |#RZ(4q&T&&a+M&P8k1M\UXv6RUv{J톊]>RrJ퓵'"k[i)k[ޕՅЬdmk[튭vtRb-rrrem-6+6n)[*cuV|40X9ciV'}![ږ?IVJڕD]9ݦvu"SsTjkZGu4U_KCEg-Y,WP;>tx,yG*DUqՆUlUPaUG݆UP;}ipƎhk~GYCv61 >_ _S}k,B+vMmǚ :RH;_ɠ{:tEXP?֮\?*ceŎ_9NxL٣Q 4J{jYYS\㷛Z k:7j*b6~#D,҈vwtZs+%"Oc ';k_s4]:vJ'VE+hx+UU~,ZZ뾃O=| R_0>F /l\rS6ZMhF+clκmڱ *I{qu.wO>|cm{}LE#Ɠ4}=VrH{Թy/uus3 Tk)&Z1C .9gGFΡcǑ[R1rRCu}6~l6o>^.ȹl}A@<lur}?~;;2(Z  Z [pHkMŠsxۼm ˽¯~=+.<׳?TG&˯G^Av%>w&+tΎ k VCP1tGm\y)t]+tTG`{qApaxvu?PqsR\QOTwݳ(_vu6حbKR!_ׇ'bݪb~m_?~ Fư@l\:Yn/<⫡nGn%qU:#'^sPP 6ԕ)&74"e( vzN/R)Z@5ZbKET TbKu9{L B|oD|6l| k)λ?= u)XKpԚ9>뛶Y[qTcXFTU|;"@el;R}` XP!N4Ɩj{>ᄊ~ `7$MR Gwv%U(I4؅/-ڱ `W_#=?QX+!!N!L˞Cb5ĺ{=Лκ֟yz~-D@Ck35f= 3dlg\Ǹ|1w 1oC 6@ՎC c% ӓD"ToV6DZ`U V15űӔ-T85T95|ij,XYScv Ƃ Sc+/M9bj,$XW55X(!vR55kaj,+ݱfbΖ-fbC̖>cl4[^ei&-lYk- I`,=Nfҫf,ΖP3[ٲ̖-bllٮٲlY5fBZ y<̃B1y̚[G}wHQL_[05jj,6x+Y0sH?ߟBϞˊwF` 1"#/$`LwxSxR%ajCj= 㸠sdm> Ju4(M_+s )Sl oO/?l};@`{ s)6VMbsi綫ÛL!.zY7}VIKl7bM#R?}pCܪ`Vyƭ !n[Kz*j|c͔HuXxd\a-@9 X `%6s-B PLZs)xT*0|v>~0%1j;2a zT@WV!Qq>z76gm>QK>6Q~Y%|,9c[ I:1W/cY5ƠBA3򕌶W>Vjnuط{eӗ1Ý\,5Ѝ+J*p|xm5Wqb具{=Zz }?^Ž ~96J~Z;n) |)-u4vI"5Րθ#U}ͯ[M|~~3.%w<\-Jױ֦u[iض*vG.ԐZ*H`+ N4=M=B)0ԏՏJuH?jŠ~TRjGt ]P/0еJ٧;6ĎZ-QK~T|#:MCNS:M+UuBszJtG=!/b-t{ŵ=p9!Z!>G)8k-tfiʵi98vQG[ PEVSȡPS4v?iZMjj m(l(qm|mU숶U>Ҷsl|jY[*V[`TV+Tѷ l&݊ݭ8EtrtBTJ聅U/tb-tʂ/tB蔅T)KGNYZ) F, ;4t)K9=w1b<e5 T+ u%W5F_?5 bUrX[Z]JPOC^QpN`pւ_lzSt 2U|M ^ZI6YruvхLEߙ˵ ZQuH6!+CJq#؟RJ_g<ϟ(jܪ 1̃LE4~Ǚ>|ǝ;F].A"F2.5 R>J\~"q8ǘOY L(Q~+cL)ޘtFjdʟ>rɻUTbRt~?^RŽCXf*-49UkbpRJ7ܳ/ U/!Ԩ\Jxq׏Қ tUG.4Vjiy*:Uly\Wpw+0CXiZƫ}?kCXy<SօS7n/Ǯ̛ߺ$~d2(W}7\jS]q^ܘjעR2gvA5M]4Avaœk Ky=hg2&IwٿGcfl_q_[T{>#Dh @>keA6XɡhĥJ,C,B,B3r=0yuQk/HE\bh{nP\d%8 ]8J{v\Q&뷓mX e2׮7;ȕ 9 m}_QǸ~m=>w~UګUyUgUpUp?WBxo pmJs>|H֯u6~TU{ec`emՍ6Xu` cUU:4)2!Q *4h,6X3?7ZQW? ۸*cV|!/?/XfZ#B=2!O'[Rի [k%>^?ڙtlMr꧕MVBm?/Qz|?\>jwox[VcCtNݹo*#abͅfͽӛu+}[W_O}dzMS"v)v-J=pTQ< QKQ6Tl-j-eKFYQcxN3l;m_ͳ=b-۲R|j; m>rPcY_/׺ʻrej.}ahP ܵ ܵ ѣ5*\*Zs %Vh5J K 49ڧTmi x!Q4KqOb/n¬拓F/MNf`iYF6_p||ah-xARRE-]jnnt[O;-uuTjS)0ӂB/NknZ e6k ,-2 u,>Suɖsuɖ)uu.Y<;VJ.Ys,]QRiإ Z`6hsHF*E\R_,RB^|T,T!(vaj|kEբ`ة8+*EhƤ 1w`~c,ziV4kV4S3 fS3R\XポM͢jE3( [aL cFj!dKi˜;xRQ"ȘņS2kC^}¥TLI1\b̥Rbi0e`4ed>eTk/z* ^T`iʂ%s-0uW,*yl\?PZPꐹV c|WϖUۄfZ;Pq`l[aLO1y+>w0,gKp* ^x,X Ѳ`OeTHSoZK6]vKڕR^c&PqRKN(ZSYNewTLSLT0⿸3{{8oPzPT{*e4C T'y-T=I|L\?XS 6|x_əJ?Z*WR^x{eÞV\A]Yuy_z\g Ф 6Q '_Ŀa $^gc;͵K_'ZqYkqo~!u,֓P'Eq!;(NLZK}T7S?c*1+T{%L]1_|&ό$qI4BQ"[*bA,?5[sT͟jfF2$88 [ XU'J [Q5a 7-~u9PDW>HZ(v, j߈)*lj;$:BEGL,D{t>ʝo&Vo&VbkƲQjᜉRĊ(P<jEtl?Z-tDD. BĚ!6+A4+mn^wN%ͩ_sW;Ω_~!y~|-j&K!+gLj&šWj_D(ZFsҚ~[S&V$+jU$< DC Z Z !o9glڱY]Ϝcw%͹_vs~Ϲ_YlrxJisW쉦 _@s֢V9_Ԝ$ 45=!bOlROlROl ۥ*ĖwĖJ=BO,OTOl3lOlRO,DX =eKYR'TBZ{AiRi4;k҈UUuBqС RG)dR cV{O+;zOa>=SVcLkMXahccXhD]P;;gXP1i֖XZbq%Ə~W Aʌpp芅ʣ+FW,4]}bq1+vm~wBeX+m DOKTOaSK!<-STOATD OOATg\!-Wjܯt52~T@]LZ*!kr/MkMJ#w R R*Ri8)jbPJXcjǶ ՆX]}ݩj 1-/:NWytu0NJ:VQ uUNj;^PkӬtM950vJ^mՎmVhk$Z혠ҮrN=ZݰV_nXOa-ݰ|ǯ>y% Jk5+ya-U(}cdqmjqo; T )`x,O:cq: ,caA|%’ˎBp#NGQ8W$uZH֖JiXbRM|cUXTm0- :tmJ0Cf')x;Åfpcjˇi [5׵{\Xjs–{ͅ\إ~υŝ`kǐl!X鱼8R_5z? }H /HWP!^Ej˦: [VxN"S"99 xN <mԧ2/ԧPM]K L LQ L@@&HJXp􍝇cy8RT@5u)A5QՄcTSTWoRa TDf"5A R&& HMD 5q 5u) 5Q-<'ts|MT*9ԄCR>HMf 5u-!5#^$HM1HM@jD/.%R k7d] z t%?ftRaff`M=MÑf \ &~`j g&J4@M|xF*5aG5|Ԅ@M|)@M8l5&x5e)5QP@SΉ8'*t 8[ 5JYRϩkGx x(_B} PPPP ->w>u-daJ%ا~`p>EtBtjj*BhŎ}82M?-Tl 0X J%,*rxu@?J<"rW HTX\S[Mh$_n)H#i`@*OB= Q0hGl/zwУ`<@j5+ЌL HT@; cT˜УJ%(У*(J5OSGK_">AE|x~pVσ<p ukNpT*>('2䟡zi n/A 9~ uv\o3utNIعmJ\NP4T @n.Uh%5vW*ĀnR!B\:^ւW_jG;)?- ` MTiD)#FB%N /vT(N wL;iNJvRH0GvGLՎh%UE74mhf{q$/w1%iƕW=U_رj&>8s<"E'4A|@% G&o9jvVHGhwll#Atv6|nisɪLjP̧S>8v RHSn8OX5R\ dEPy S\<|SPd*_OEHOw޷v%bTźtXymFAJ^z!#å#T舅 BG,?uNOE%.],TlcGDťB*K!tȲbCؠB "j 8,&t t6tƆAYZk!t Bg"trRP - kJTgJq`4 5tT:C%J^ZU(JpP SӀX܀XlCHCJOjpm5և9)*$WCpԡդ}ˮKW |!h3Bե }pGkZZZ+@:F~ѵjt "S)5Ā쨬Jl#'676 65na7n2>Xjՠy*҇&dlBVACkm5K\ u?:HxJ<8]qBŁ8@HH2vpaNŅ9|@aa N7@!6i1DU#Pi NqʼTJcs6S{́` qp@[,zרF@uCzͣn< a N4ku N@=IW$4206~csp`l-6csp-vl\00@9dlN46Ɵ9 !vpRid:t;uxOMu* ׁ{ lm2[ive`}s&/LvV)͡.glcs\  !Jv:0]u j{::PX9Cag(,K~F:JT{:r7s%+J(6,tpR3-v4$rwmO,$i{)]Đv4|wݿ=-oK*;>ᝤ+Dym-5@0FXyoR+4>*J˘s^Tg=75̀Rv3Tq #wl3Tq\ @d{Lz(]ʵ% ODqwnN?i6;InZxl\߿Ɏ$^*u$ɂ l*;aF HT ?c=jHuп@4:r^'>?gUTd2_(L'vEX֎wywv<G=V#-BLPJfk5 }R#VJ\W^|Rl9\͠ȋ3;U$>U#OrU|'*"-lMѢ p2,gHSHHd9R(C4||S2tKS;6J0Pv$B礴qN *%_#kz/xoZufjXkxMQ$$8v,t2: ż Tfa`S2TR *u lX'\\.Y6y|g xaSslV&,u2sexO] xLZ&I>sA&EvH9C hce)Cj g޾Qf?TZYQ_Z%*Yl'b0%qZ.I?0:~7ؾ]G&9R'` 澶ETKGlJ2܋ *XXI[,K+ؕDpBuL]zAbi:D>Uvy홫MW{{ #@>Rvz}Xv@]o[G-9|:m]n.5\R9f.U(Qy3Lfh^ yA1+V a; w|` $iQl-U,=-S]WWoqV{vEu\/ |e +uL#FמER1w XJ սX +ulaeӘ %~k_Ib`ܱTD[hfH)`GF@ ''8ģj{<_E *:]x@57HXQ/No.ߣk}.gՑx1t厗}`SaM#< V'cu9>oO'c|R j˞׺?>hz `F䨄y,K~%# {mAu_5z_?-7AA omOcI3iiwԭ·3y?p0ovVk_߾Xtl/kwK;9#J"=~B|_*8|Z8NǏHN[=p9~;j.XZ{/Ƚ?ihOݣ)|nʉj-_>eT }:w#/Mڞ#gQp;~ݖlssYYYl2eQbW;'|'߈XP'T7Ek߈Rr?7B㷘ϚR(3>OaAh>>kktR-1Uul/ =U{,o+?_8W/SX_w:Y8aG9qmY-pm%iE΋#L,BG,Hy#K#JZ7 7 DlR,Mf 1|M5w{3u:Ӂ3@nkWoZ̵n{'?<[kOWAkWOlty>{zqBU.O:h>]t\ 1mY +m|3ZJ>͵͵4y=U8/MkFKIC,*Z]:8w݂* l|}Y!TS^hT[3cج$\)8vMxBnT,%0nKR&"ʂ j ,D) <8] W C.]}Ba9ĸUĥu: K[,TLLڅcsiq BF]mRru5-Wgط|#cNL+7VǛXnahKe`QrsHXC TO%ؒ/9h8>\opüߪs!^ Q$ P..ѝwb;wBmSHOYFԀTVx>S5<-'U'ĵJ{VBfYŘ2zp"zpRkcxR11,1 1,1qc8T c| cp1\*a w(AZzYkd(zg%(JBP5mD<@g)eCEKL /qv$=kD?Ǿ8lW?Rѽـ+~ۥ5l~*W?._?'X/qoM7Gȥ>UaS >n XXbprs b]R!b J*bn5C7PpC\-M-3/먃V,oG~!ЊЊ及V܇ЊЊ[B+ %bNъ^b먋i\Zhڱ]Ku- <x.~sc4^8bg^^)CPbB &D_wz&>&Jd(EvR$_he|!ҘhoƩ{ƣ|`Ҟ~h`Tf4`8*,030Ό +/4|.}ޗ2B WdTz\b=4B~B(O_ͧ&y) Z@MUQ /&*$  7Jd7^iZi Ԏ@sZnRqݔT Z| T|i'0J/2/2N()IOg2j6R3|Q2Ga-GaE)OA$GbRT >J\([ w (0`(NWW!!  V t΅|AO PO!j]]&&P ;J6`$ *@0L$X$(WZ Jz3ϰI&顑E6 ɦ?""(֑v,֑oHX#=iԎEDFii0(n$Z$)|)"[ MHED*"ҟ%r|X;y%5ZP ZߛT Z;{#pUf;T"Hױ`^7+\7:0266ZH<ŵBbiLد kaPWb,N@,u;R)6DA-|JZNqD*K*4-Jn[q;cL(7|&7VmC-7 6D*@kjvZyNbR)nR0BÍơYT7}P)JUj%Uꑒ\7ŷp7cm YH9ͻeF*n1YK ?RZ\+iaM-׵Of;FTC96'd%2y%VB ZdK!L:1xDp9`n_c?4_ +! Y:@!5굏gLg4~FbQud }*ZۿaTc}5|C^cښ{dZ=:Xy*5gv-|?Sk 5H,R]k8H53RuBq~K>j׸ߵi?Ns|T2 j72ϣ~p\#}ŀD3ίhyYk?w)7E e}ѭ/4A~@uKؼi^e9~( |ޮ ?~_ݵ_[w\Ҡ+<ƗWyp|!ğ|*RÈ_.d8j}>CG,Ք,$t>gmc!}:.6m+7m{Yڮv)m+yA5#i+mVH<}&m%&mmes:?c%ӦcVұ`kNJ M޼휟>J=DlV]J?ҭW$ZDLqO?u7e]%/Ҥ\`V9ik՚BVTM ITɾITC?]b2;(X?vȋ"W}SnVxi<)<~־y.LEn~=*AGlK=?))FpۑS9]?Ҧ=]cdB.Ҟ` cFSmpz}z#=?;є:bht^߾?lֶs??ki;??߈Υz{u[o.rHAc5^;6:Zz I|>`qmX]@ㇺ% 8 +o1n'j@u}t_҇Z#02}!ַI==ǯp}DPԁ Uۍq^| ;~nLǎ 끘t>!&Ӡp@U^~V4ӎnv#:N흰~) ޯ T ,F(p MӚt jkUnRr4^$#AͮN"YXy# UNu.+4WY\ynv`L#$l-OrV6noir+Ilr]y$~q?&;ҹ iZ\I4h1-ڂzܷ6#hWrdᾒk^ʹg *sftjm1Rx$GHqq[[/thTМVǦ ( A ZЌ}c9 RRWa(yŠN:3 έNX Npn@v=&< Dw%z.Q#11+Lpf*J%$r $^Q3̵cgL3bRde3fH_ "V$짘HPy"Am\$`zD*p58Ht$H1;wA]#Qr5~֏oO|S "KVrL/ņ_O0|%lr?"Že{}ЈsF1 |M-> P *:ci??>L ǴSN)y)LKbliW3e)0aKJ!zL"N!6pP08N:;la'*kxdfIf'N(=fru]\ b <kG({Ks@V}Tk]e^(>'V/DJ(QPu(kdޯ'翴'c) EU1}ϳSvxg|.Ѩ<=sOy+΃O,|T|afu*1dSF9Cy 7sfᤞG(^4l0nF!Lj(=!ƒVVwQimoJȼrJT:8ɐ?{edjCӱ!D7 -M՘6?ڶa/a)LehRIكQi ^PBuRTBQm$hG`eFƙ.`#U< ? Wч= @j6 -!3];؛ Hsl]vX˵X[6]e BQmTPmjV ~l[[<j߇teA+5%}Vrq] Dȅ7 Kaċ3vʠJV@m]h PtcPti? X8Eb]o;?D6@-sk7zq[|bo)ȷs"[*]BW(vDW(1(IX(I0(I(I)BL!+ց~T)Y5+*){ Q|"== &F#zR B` IVQ5}*}nW(HFThliX<2UhD8|D YR=E)% rm0T``[5U4_*M*/خP{+TѮPm;U%L 63s[(B*ߘB̽)_sb?t [|B I[$`>u:k Oe"4<#|yhj6E)Jɽͣ:6ZQ[k{a~TpEUd[@rq ${1kWK֥J@RT  1E 5-U9A}ZfFҥ᧤JȓNxQNA`2D:J%uևJ};=_)]T+*xjpSI/7]@wI)w:ݙpYM_sÖKJMUMT1Nq‘L,_Rj/ R,)3Ly? TPYg *l)iLc!t;N1OX O:‚< VXy\+GiviG,W@@'H'iWȘe~IwOeE(Q$J~ ;  l+1ug8(bI %_D'UL'IIicnNMRb3Rt&93)*?S$_AE$%$Rq Lvﰘa1bbň 9T*a4bz@S5/40ToHKc@1Kcjh@VT3V&DBV†+8VFvց1QK}T5VF$(MKg( 5jQ藂D %4H(MYҴ ^n*,4aǢ4,8{Gt)4JsAi}&xԢ4a)M!jN%9e XNPAZB|t͑J ~WsDl'J@9Gf(|"У:˼]ydΌ̨BhFM9B2 HXA|G e'|#hA|7ˉμ,'b]r' :);=FM߉6RN7j8*{w؈ITlOw^ōs3o|l!jyo-ۆ-߽mV1 y7w3{;]S"A&ŏZq1kK8Mq{Eܧw[am,s}HuCVkޓD=OyZJvc}ncIu;h2?HNx?~; IVO%ړ$* &Ir$ oW[I=Nhd=>XW8ZZmJϯ?k~a˫eGځק-#rW8^.wqSO?!:iדaC~^߁o+=&Yms%v%Mtx?#^ű?6yݖ(t%I[_oS|=%$Pm;GQl%Q5I+I|3.+%VnõaߤH}Kjzc‡ȶj!ҕW$Mad>#]"Imv؞]OskGڬq}湖sQ9Jzwos~~W:}l=xhN۷ӯbh$iIwaYhNi"2Sƅ㤯 SW*v SwS$ NKLeLL|2L9[2L!&S>NLeL%$apL81H`LMXL8@1NT"1UyS sa*K`\IW^0Mϒ+KRyRx$X*P*J* J^ wK&P@zMN20SL2X LMd*Q!@d' SmxL!BDl;C6Eb @ SVS0LR0I+g&]3S6STS_~ZZ-zIL}ywIg]FuK60z\QΈKr $~T mI%y`-Փwz|`P9n,ȱ|)t_SZ alfVZ_NU~"" UX8lDxEH|)bIWKBNp*^k!4N 6Z; <j}U;BiوPiSG7oLJS/jw™Sw\c"c^. VYR q*1d:.N'6Uƃ)U;UUk)Z\BW}P"zMTny+6TY*~̑gR{%'4>}ƘT:9L~S!j)Dx4RLӫW]hUt൓O so׿/`c4RFD\JA 0xҤQ +⣿~]ዂ|yMbl=^6u<9_b+_A$BzpZx9_8ƥ/}NBs'޿?1w5*cNֺLkuPkT I)GYY]@1u=~qɛPDQ^kpao{3nWxBƵ; H~! wި X*`5Gus犵ݵ7^F)ܿZ~WP3~\Sݎ'!յՉK%TcIc-+5OT.GuN3pӗ$j\曾^'ɪe6kG߻'2YwnӸZX}See/c|N?%2?h26Qdvڎ`}]D?b[ Sgq~|.jze櫟@3_mo=F\.G4ưWz-M}3_s!ů̏ךѣ'G-~`G_pgr[( _U^}]Aە~ q~GZ,ѰcP~lXm߮#X}y.̙>\?nDGS;p\働Ҭ)a9}Tv]u{ڬ|n^ëy\[>l:~ ,FS?l8mSg~ ~z/VRWr@ *b I h_Kk!hZ8Ckd} 4jC%?oR)'"hfÏOwwLh *FP%L_GoMݏz]p*@*<kcTs!d)󺷠~PnQCр2c-pu=춖|\| \a\q./ĦG-})Q d>Q'}sNj`rxT'i's<O+ē1 ۻ;jDTD x_X]1ՠvXNzeoP{:y%+i:J B I U#\;QuRul_t)Q'. 8>GxX]>M42*tZ@x ;+4A,² QQNZ4T8T5;b ʠA7]Kn*UDQn@Y7Slpbya 4')24ѩ Cl@b5Ħ(N#]P!Վ)LRi 1b /L_oĴ:LʴȾq160TTS*\C%H˕t to2T )F@0z|a '86|.\lu> Ng`<Pn2Wv0z0Fϔy ]i`0cC 0Dc-KmZlxo/)N`Vau- .`! Ab)` u:- Z RSޭ`à`p_U_>WM5Ȋ P+Umu?}?`eMu5XT Hl> H, jEsmAs;"O *iEռ)T[k9+L ȐFÅ d؅JAdofdZA}u@7XxcOu>.|F K^Ң^uA-#Ɛv@7- z1XPbi|quE$./+n1n$QO[jVaϿo*w-aBTƄ*0ʗ0ʗ0( cbBq`BaC@ `BU\AU  ' @(bHR1@UA T@**.aKa-`Kက-ERDaKu)aKj5}P6Dpc:AQg'0d>xP+#HODP'=N 'ZE p p::UcȫQ|œhQ.ΘBܥ-kRJ%h)fh)><-k%R8AQf8(   K όAAi3%'+ } bpB0Bp ~N@㤁C1]@CD 1< ``H^8yHkF= TQy Tbc^uFO%Ո $L^u`xV7~9\_?%eG&U 6~[2?J#*zQ2??jLϿ~D[>c6cD+b\Pj:rluˇ_-UǴJMLjmr}q{|mׯ \=o.:~bW)Tj(Qc/et9? u\y_/NuյF 5<TEkcKǾ]ך]ZǏoIkӯD㎪Վ{%ud.k2^7URTgmn)zF;?uy^-03BSQko.Y΃YKU{}V>qEB|a+z.mK" 2bQ(Xx65e!!߄J]XK`ŠX1vl(4pG|kkMZĢ*XOkstVQ5Iw@@` @X Đ;l|;\XzSpaP\XF**$Qb\'4L%\X cYZ =2@b ,* t"ܶg^dUt[mZ^O8O@?[%l ¥qЧ\طՎEZHUr b+4X֚4'z|׍ܻ@ b)R1>Қki)(R16A,8Ϙz=e1w`VbPX2}/⏟1]XVVtaIv|)A7ndMa幤04(*\~k%%b],֫qmYpuMMXOs+Z`.[KW, T "D` ݶRmݖJ=i˙mŎJWˋW;wa'b)ߋcDZ{AJdQjt+'kafԴݶB̺_eɕ$b-Zk܊Г *[+IٞPha?j_(Ŏ}T;M|C@+BjWti9pZhq^V_]m~hkX_}}mΓuG¶b)D˺\- z_n?J:tVm߮ȕ*7k#kŵJo[ɂ{Jo!XKyk)lѲ=-+Z)TkKyODLSlK^#- Sk%.4:Z\mAVDi}]fJ7[o/+:}'JA;X RD1ThjRՊ 1iTWU'툨Vf@{g`C`3 3tib#'zޟ"}B#ٲU%l6bB4=PgZCo(Ӗ1ݷ$jNCm$_-kR.)8G4ؠ :O=Z+W^q<$dCfIڅr- }~vD}QG^)H|XHZO,ZOlX3~; L4BVcVKs;)xNp`1XoK焐k))v9-ylFy=~F7H k TiŽdu4uvW">iNp5[+SoSo0=NCwxCZ?EBwd ?.xyxw *Qu WI. s,g~6$M΂)¼q_C,J_X)Q;J_T(K~AP@ U#c1u+</o1_>> zFIqDV$'hH&/#0 ' 9,٘4]?{Ų"9qݧ^yީ3Xod"&i;t8#w:7xw ]7fq/ѥZm^뱿._߾!Xt}W x:ZV?g*VG1ߟ[e}+,.؉}3_><;Qrdž^Y!􉅘<(~#xOr>Jr)n}%b/<{? $']w/֊ e|QP+*$q^mf?_ILJMo-1Bף͝Q)6g1fY E>G5pp7wPbLIΔ=sVHTVu,$q?.#[?-o%JD i%"SSVSV RRT),Hn·’Jܥ$إI"3XJU@)HHJ:)]`KeXK9;A-UN@ZRRjPYJXJX _ݡ4 !8et2HMU\5ӔTFS`/(W\MgBBg*߅]nB]tIb.*W `l&([+Xo"4- I-MIMJ6ElnpJX^kzOF5{)Dj=0u`m){ I"KU/^RY`/p0]E^`Ic|+9R-RhJw(ĥQ|?J~|JKRدZmGObmmT*տbǢxDbxDzQ֊G]'FR}\,>:U[3t[*i-(x?L| EFt HG|y{W펀-@@"z_k [T-Q?~(>~zrVmKn[p_$ɾHr8ɭZ+Ѻ $B h$M-$?_ZA0i+PwW;{}]&HmM=p^SZAJ.#\{U ,5E0E`:ۋ._?8qvuBo/*TҎh#!-8!ɯt:]?p7^]L <"oxmi =8rЂ, B- v, $XHݱJm|Q `%9[leh6؎VaUC]{UK^RWTU-zUC.P @*ЅՅ *tB^B_B-PPP9B ԅ A TBz 5TQq; .{RG+DVV|#:Z+{uBU ڥъ AVP:Z8KG+i^;Zъ ^ Wb^ahYTU {UyUYWr@*o%R*tU)W*^U|=3*Kw:Zt`=?nOG+U0XͪX ͪ6iV-YOUhVY^ͪͪҬ7ڬ G*$6T<##nݩCL*4@u'SϵSw:U:UiTzTMNU#TZTHѩO:Uѩ KD*RTrSaکXqT]bK\*0BƶTbGdlA-*1:THbC$VӺbi]mش.$ZӺ 9[l,B6VvQ5a+bETX}`S2bR⾩XT,Q7ɪolTQɾadeM{$FAĨ61_ϡ9S'>ߨ`QTk;6}*jbK51*lbTobTTMڂĨZrMꬓ7 IfJiJ5M4MOlR;6WWڱ H_zkRoܯ HPد5M))eIS}BKm AlSdBJ*ѩXI Ǽ,c R].B$DʲE~V-5ǼT/l LQb3Isע2$'98u8SYS *#8e%`3՝L 6S L}tLT#Z 7!/["eR2X 9R!G GÄd~HbOb?R(xGRTHʂIY5)O}G.?fn%ņ7mRg#U|n,6DTȶmȶb-d[ȣb卼F gPxnu^9YnOWs\[k_*uBͶJU`(4w$JJaFke4!+Oل|ǸOpS2fmJ\V!au}9tEx1pIxRYBmk,BZZjZZӴ̧]~VuZd}>@$}:k#Aw[{ԕ{hRW&uynU]܄j^i/]j9Pm:GR)Z]r!M{cEli]Q[ M ڥӞxj-TC]}7uqaZ٫tщMzB]]$a{]t{+tBW{tW+t]+_,=W-t+r梫.ͮ.˹K^n}W :Gv|l5;>W9sImp̂Wh]G׵=GWEm`&t=Z텦=Z=GPV3+t5EӲŀc^y1]5=ID];Q ۝6|ukwؐQ;u]˝9Xr*/.ԯZG~z.֪կ~!UWuvj.O]( *"RvGB[*.Jaq /6DOl[*R e!-QAjRJH *BSD ~BT{QAjT*Li#$_'|),5",ZOBX EX/ a1:W'pą76BMJ͵a"16*R%; ;;C;SXIc-DŰ2DP.DbRvD{63N3O\*|$vUb8FxR5*r!*"*!"*ƍE ]ܞwwW]X,] K..$H@$;'Y,H"YZb"Y|a#YX"Y o${~^F:˿hфZ(z _5 ^ד,e z;^m<'Ǩ^Nn䧏 ;tJn,!Tܞt` esz>.׶4%^iD":*N/ϣuwDiR7侪n-߳hr;}͢MϷwU9u6зbtgqbϽ(?ۿ2\m[qn_s[?_U֗^Qt?0mUeu>֠.G*ވi`6r݌F~.u)swuepVfk4_l(pG>9-x'K oo1%{C9f}ˡp!Ȟba=zJ}< d^o˟"tp4 7N>KgG\.};{3p}w e)QJ/MOOm6|wp o֟\'|nWbՀiTn5+L[>~ωyϽ}g߾|>tY5hAn#8y=KCS(R̼0/[{L]y8珗NeyW?TkoUOZ—;{aJ!&cb|Ǐ_sf~EcQFڰ(G< *B($Jifjd&}nhfI,i8߿/%qQzph~aR_6xckL(nh ^$|CG N m@Kh(a%654ukvD ,5R $Df} uɻ%R;B@YE;t;1O5eZAh_k$h3R+t8|ys|y,0ί`i"*P2/,/%/uh{eʺP/*K?RُY\ T)lXŶ_ *ao/>:_1~R `3,+аVО|q+kx{kq4uzwI~$Ԣ˪  F{*vWTa[O_7'-8N֊,,< $_@b"EUr8n>?7j`Bk°֮RqXkwD[+а */]>~B ՆUaU9qr) {S>gr8hj-cRX|KfˌN{4TV.ZYhh}BE *tт ] }$E[ ksl-GI{mk *MTk+WIƌ tjyѼEu<4~\-HV0|ۓV y_Q5oxJ ;T3< 4f4j}+5s@XWN{>/[֡XuU܎Db9:Vlݎ\d0Ub8WiҚq){mPWP|^?_fUyp[Mۮ>iioeHjN2~屷X̀}0;bԾ^>._7пkۼ;FIJS;K||=Ƒ XHr9lDTvOlU'<k¼WQuɚ֕gvnpF]ܷ |-6 Ѝ#񃲢ޱNuYs[7uܛґy׼Tcu/&ɨX]7\`7 oU LfW܁]D*!.U[8T#A8R_ bP8TIbRla0R~k~k~bhZ"C^Jg]ȥR%rĀcPT .ꂡ\5HY" =y{*9RMn%P2E. e|5\IżuZ,LJQN񔙲X BoL0a*MJmPe̋,رA 8v)'P\ZuJpq@-cd-W( nf>TY u{M1j^j^ZKj^(xv!W6DǏpnW[yj\ь@m)j+m m]%@m#wW8~ӝ8ңSewnەD!gWwꊳo.Z!t߀#DA-(Hv$$zQzk뭧(^o-xU\܀+AWO @@}#lztsvԷ7 @}B ^Wz{ >:")Uw]h?B-8%qj T_P f eĢ??>,|cU|c~35[y^BtcPjyyK%Lb| pv# 08´S '_ʿƼ 9N; T_2P}),=s1|PIZb^_~L4J?`z!ʜb^`>GBo 0lZSZHbN k-m@I yVHbJb6^6L6 M/ _> ݍN! Q ͢w^)Kt]] ZHnх2+_`%]WaJЬӕGV(t脝%I4Y}I1AY;/6au%AQqzJOuDH}*>.s]D&u57ouYAiY#_-C梩u!QGjhTEEEbua+7D\пZĝwJmo\Zu5tfneO粊YƠ= tV|"Fm~]IVĺuUON=D5tes@ -k}Sbޚ ŠC5.3tuO_sN@OD+ <[sr`<~6!"*j^B4tڏnO$#sE홫D%F'JzaK(O6BM\lm\<N#[\z$])VWkSvKMϜꓗu.=haJ/qV'`D^M0To,Հ9~]Tov>#t[^Jäz|w۹(Wbh>qFO+tg/7!VM5R,..҆ |b&H!>))q'T-Uu1;RՆJD`-4KJUg7|P:8ZjTOipig 6\;AEh @cu:ID+9}YM+|?j'oI}b7bIGȬoO?OFH해w>KOonk>.mk%PvlO%0D'Ա&v_?6l.cH$r1~~\x@o9iOTh\qJ_=rPoC?A_"6ʯ]rTeos=ySi+:mE.sT8qp _#Yޏ Wt>u_v7YqJ%&RﴲMLyC ޽ Dn؉5߱oכ&SwOp\f2e-ʐ srfB,Pٮ)ۯѰ)ڱ ղ)PM|w-*6Fl-j̎P[G+*5J]'MW48\ijW7 ъ\}ahEZlc-bkb#]Kĥ8¶9yn-6VTY+_]jha"EKk`6q| ˠ]Zת ؚtU6T쀭B lRlݖ`{C}x#xuy}}U"m`Yrjb Jkej,$4%]5\ ]^'WK{%mꬾJR.ViAT͟:JTIB2֥4Tㇵ^kUr&+i;iMv62W\kBGLݢDmj*/vJU oL:_nRX #HK#HARpX#HAڰE!.LZtQPE: ;]T.1]ZNbtQQutE.*tQ>5e1]T. *LNt$颥&|WS:N.*Z EMںN1];btQǑ}{),rA*mYke3Hu>A* ͮ3H:TwBg^gJS3T6+Z+3H% v SZZ3HeM)5*At R=-;T;vuN`s kA* # Ra)d: .[3Eu<.EVJ.;]TtQIEoVeul.jm^3]TnfgM~Bv:DƂ(I,-9V%Ū;K 2iD͛MAT\I U̪䝔)xJ.TN*,%*Q&j,yPwR" /kNҪ`-!bܦKj!g)'e)9K|\R5uRwuRӐd!$w[rX()HU-E9̢JbQ^ yE\W>>GIxu(OJIOڡ.I$'ܤ[u( +b7eIojPwR?7MJ$)5I+%)CISƻ&>e'{J+Д(ϴ1-L[%Gcn]6%ߩ;5N]sv1t,.ILiJs]sݚCȹ6%*K*TL_(KӧZŗ4: (DPġ'6S㱣r-B%P!QWH|:|z8T Z ֆ ֆ Q:Kݟ[Y7WgXD\}jtKRF7VTŧiŗ*k#q-V!bk6X -*$b`uk#|Pg!j*~{@LWnp Gtk__UK=WHyz7׷Ay.K =Ps|QsA\|@(hs- `IH {!=ڬ5g` SmqE+҆81CVt+oUy|s?Uo{ +Gjrnu\2ITEMڽ~R'_x I150 +xS_7Qc8.L~Pܥ Ou*ak1$8k̉!xN[ƫ76q@.5\eWxAhMi8U[wT\N x봂DM/P*Vv-U悪MЛ@"n%"_|a` RU(}=DnۣRoU۵T q=ʤP *vC-a-vy PA)-Fr(*kT*{K+dsV*R AiF7(![Q MV MPhj )B )Z [)ꏌ?RRX)4RQ#QKrT^+:E8 B1>Bp{e*oQ ɣdT$$ja+L D0HpIDMvM*M0MP`7yk ǃ0M VV&<6@eL\a4L87JT&8T2 T&8K-Eo f%+xKXxKT-A%Mok^IRCo,҂ĖJxK|SPyruTR)oDT@eGEX i]|# ~Ih|GVDEe T+40SA40FEfBu8̕tM Tx)Io7R_(@'cy$ T*< OoㅳKypP!wkk5NotBn&98GL`Iw]-b?75oև&߮?3SX7mX;m[RomU@&Rr݇#`D顇7jZ:G>{G9eΝ"o:ykIr8M &von;llֺ욐Zk:zJ5|dfk%jlQ$X$Ͱ aW"F!B[įb)kv!aIB ( ۰6Dm+ImDk8J{(}7 +oKJ)η2_rػ,x]p=)Z,_w;Zݻl w;4 eC!vKDPKB{li sQ["DK{ -! E-GX"qKp\W8Th 1ovS[EīOK(idk.!nkVVf%D55ĦU8%"SIS=P&,J7 7Qmj^=cU$ =Wo݄]h FE)"^k t.ffe (.9`Uӂ81차 ; ;>GaȬ?wk{eT J|-(Zt; RЗ)`K(UwDV|默NJXi!b*XHbc-r_~HWT_/ML`ury|E\7bv $/% Kg]\*}rVr)`R +1_l]]Y_\*lw9+.U&8TTDnq~\{V}iԭʵ$`j͊#.0Ƚ| |eb>ֆN^Q iWT<P^q9Z{i|K|u= ׬Ef'3I:`/GDަx<̑k.5xzK9>ƉoإWw]~7JVb6_w|cח9n=E8Az^=ǮGe P猦/[/->;TN:JKRRQMT;:J(Z;Wm:VQPR4s۩W;q +>*ooGâjߩԴ}DNe; X }BG)Q*+Z* Xo(.PiNѐ֊ *tQ*Kk\@ m7RWT|3Yn)2ʄ *MmdZf{YkָzY޾;TijSazz*9TjZfWY\TU1YخRծR/+{L싙lSV4dy9O7T=I[{O!vJ!/*E-α> 0.w\JmZEVow ĒDFVV(om+ŮPXUo/ ud9] aԽ?/כD:s$y3ZTs2yǻǓf⋯ϽB oD y}OI L;қϣ4|}Ӹ c0Z=k^1m/Qt>ڞ[=]s,fjvaicz߾sK͈o.KU}+IL[ UbpجR%bJݞ፦hF6_?nQrt[j8#9_B9Fg&8oR|/v@t:J=?wאZHK|}ukxƸ4.oɛb)~#k֌,aem4 }AC:DfOuvFve͢nދ 0ꒇ\Ҙx8~|y|j/o䁣H~\$0jH`F&}u+u<2ii:~ >׵T _7i@`4$^@?>ܫWͷG##Eu?G=_VkZ:2b|}ǰ˞y1&T P+A4$P3$=DZ1K,mo\^-6xؙ*mG;` jEZtbVتz?NǓj-58\8|' FS~-$X_e #Q&rKom*ҸTJ!K_7εXTJV2Yx| #||(u_y;ju, /x3_Ͻ}6XSk!~k/J_ﭰb+jjƥחDsiSƷW/ʖʗeF պB odKk˪[B,y:8](oA'B=ZkRkkbkb-.A^xlP`}V`;s~]|mX猪K#fJ%q%8R*H(b(%D %%PmPQCEr/EK(Jp%",*]ΎZK8kKXvV,r/,᪗P+U, Jp?(\+wV W7K DW~ hDaT"o]Ⱦ}˼}BZD),ȾݱȾB+\d߬%d,EdPٷk W*n ˥[/@A t=./aoT 5j֍6-{Dgua7 owLWn݊w]Ry4'/{뀊,.J\E < 1Ò(1솭%UbX.Tl5:-ET`Q5[ tE@H+R tW)Ĺb+i@7rXPA%UbfؘyK̬3K7Ĺ1q.v`/ehZa}ba~芨d=0pXk5Ef%R Ip֗p-0s@ü KIIOLмqK8Lgpx p;.0eaɴ05Ն ti tVݍ@ws tiA&m7'$$ Oy)')4)etccKxyy.Ɓ/) OcӍ-)|SފA dtm+QB e NR8a_.7ޙ*%%imV| [3&J0Q]K\+¥p y}X5ٻ]sN~=j!`dL$ՋT/ P=5 %_+Dv>~Ar`Q_lX^u!d8З_k}||>R7#K%p`6V$3 q@$c|x.8E@ :!8_x߹CݞIv)WyE`t{p AmRh2窝={*zW55/C; (KqfZJ Lӗz})}^WZ+^Նx~Un_p_Pa-s*O/uګoT0ͷ6X\e$Q3s("~/fjg;]gvފjӈf{iBޞp9?^ 熴|;wŵ^T{: <ޔoެYq>#'8VzxVlJ' t>뛮f 19Ga#5UBm~u,YT_ .Xu>7FU3/l6pRa 0܆leM0KaMpif̀/̚鎚5A` 2TpS ft-~agKwy&tKݨ}Ȁ{̑ _#~#FPqֲP|a@X_}z@q<#1Gn-pR#21pI,1𔙪ʃ_3k`YYaf T3kT43-2hVsmhl m̬J` ͙Fi4x/2ͬef >h`fJx U&3ن&mtdȫm~{߅+{M!irݨu+ v}PX76{ju@"e&MJV^Z4 mpdR18uRhF!)HQm.Ww`B*()4xeK*27'Z_Zi47me.{ˍ'hn۲ݬU ɵ77D c- MS/.8@n}c-[^ӄQB-2_ZzF[aIBV$[ V#t{;-ù[;o@f3UOeE\+x׍Z2wݦuar-gI^R[<֠.*2\-Z; ^+g-|$%iCy Rmk+뷼55Vc5'8*/ݿn. .6VTR*EhX B&6lЂFaVb)8]$tF6:s^^]`$vP- -! 7;b mvfڵn ;ET-[द #`-tx枿柶T֤"mno[.#JK!ڞ.B$R$owbq;m:E;vڰbOycտf-_Hq:şduH[ *tJ!YRJZ*I$1]ҷ:E؁,i LAJ?p>%tzk@)0WJ)pݲ ֤*DbG2.nষYgjԦ3Yd>팔BQGݙ,B:}cg;Eg&Zcl$-v)i[b#[HZ)|itp.?9CcWB':Dh-F@0L[Ti2L(XGTdb-$3V@HYBZ 8 2BTcB 0iEF&GǨl# GJum$*"5 Gv/b*NsҔG,_JiBMY#ٵ0"pUqTj$YMҮȒ3ŎURgXQejGYT TVrR:&UD|#yUpj#`@]]x54嵸dR{qoT3tmɨ}' 8fN!TOZSiDjTTǒ.&c (>}(*.BZC}vCgx•"odtKgIB|}rwo")(-)( Ra+kR)RPb-)(fLHBZ*E=E"WP!rZ+|ߖJ-TmR|:DV(߈(O|[Boa>o!-ķ ŷilk!SD)R)r-_\WR UpJJy%(ŎJaJ!pJy$( nVHҪHBHɲ dE@T $q $ $HfY@L5wH I?@^kE%Y ܄wh nRIn%lS^wx[Qf˱ Z9Ԧ`eM`?.!6gY *O`T5Z3]R{!:yOU=O UQˇ|E}D-v{̟']q aͮ,S~3HӕMW"4JTT")iLi4aHi04iDi Y GCɣ GE!ѪI4Yj_SIVK04DIZDt& SKv%,JY"M,,5敮DW4af4&IYHJ{a`NimcJ#pM)vR1iFi30mgQ.ѣ8L(5x4ih4vTF' -9ZL|P%cIv}a(iL~~GL$@rye Ha}5bތ"h JsHCZav i! fJ2\4lQ9ER4WƊVژ*Z1TLl+-!qu?=gwHsֱ,Q]f7ec}EqixC9zl%15a6Zn421"t˞F*rO{:=_σ_eX0]r"E|Sl񼏻8ƍt\}Zh//||c=êc^G8_r8w vty+>o? sy<{y=_k|rx{xg}/DsP[spy0͸UPZ9wxOs>#y{c=>9~Rh#<~q:ՃgÙw|9 b3=xƙ>i/јq9]?1]kּ0n[e4\N|C5.Q1=0ԭC Gm7W.z;Ux3clFf>-c2Z Oc+TtuhB._;RspwRf+Q2;~ 뗼#8Dnӕ[*bC;w ݱ^A4DJ Pb(J0J0Jp/씈 ;K2Lp~x2{  * oE (,}tGv]dh,)^0>xxv&+^0 1V`c_kh}75:VꬋZB ط   TlW+m% XYKm-5_k̬P:B4 , PWS%/(`WՐQYCFOѠEBa+²[ˉb# ĢJ91R(ak)U ItOO](7L9o9Z KR[*UR%r(ߛ8￱\+u`n[GKE_AKv]cmV pu-ʥ̖o{Itܧtr[Ju-'vCX#eƑ5U)Y:u:}7qn\__$wRRR˳Ym7/Ŷt)YmTm{mEuԪ̃|m|mi:#SqBIƉ,%_JrRKUNIdڒ\B o {kA`LKXT-Zto^k o.4eafU7Gopr8s 䘼%ǼRrLӤSLLY'b̓r=ooX<^?c7H19 oR&c|PO?dsx) \ 06DF2a~b iGm(C}yVxC5?yV%ǛRrLyxTL "TJ [X:aQNTHH_,cɴ%ydɡ%X fՒct3$s}#=ke%ooG*~(MN܎<k7x *`!K ^H[魕:\gH߶bWRZ ؤXs>ۦ^WU~D{I/5R&c+5)4ms]u>6RkO%Ҏs$ee7fwCUA7lAkVkTe)R- vu@zZ+l\V,.G ZܰOnؖZq!"[A+j <:[^knXZi]s^!g:,ƣ׺")mk.Ān&֠BWf|J-Օ,xau)uބtWǜk/r 6m%R1BPI\*ذ!'CȉrJ!'xG *j鲈KK:TA+DCMlfP_kYPK,T6%FX  eW$,1` AgPc@ڗ*ĀD%ɦZK%Ɠ2?EC+ݫ-ool3lyF>}`0uʣRZTTMpVSP!8aINP! gl tlP8ϛ8[lqZZ04" :F֢jnvՕY:ߘYOcfCcfYb_k J\ bl+mn[(-en6kؼw$fɴn2GοXW:Xw[(N١u ߵƜ9`kfy e;Y2G M>ǘj$&! }Eռ/:ݎoy y k9χ?A$K'2ac*o9aRcטrl3DOqx1b?QEAv?إ8h< =o{>^1q~t~zQ|t\3}ぅenbT>k9}(oɾb%9lotp55 Qv>&iμU&k8qwN3se%뜺yRIMIdhtvϝ*um=Pǜ ^XӜyf@:*^h8_R@t _6}jrsnn^E;LTv\Ͻ~z 6(՝oQz]zLzy* {ocBQc~h鰞We5㲻\=}hcD޼&buퟆ[v*{9̞!y5³̯o8;U4G|ު隈Sz~kNՎDMwKc}=_{y$oS{ee/cvDϕ(TX ]Nv=]~Ɠ7=:y,%7w>Z_FxA{b!UȂHšWP!vT@-J#K%aaavbWR!M Xà0D;[ ITp0ccW9` C@$i7FZSZlYQ!FiRTbhT*OTjJ26J2t Lu50Z_.e#x]gI@Jn)zӼvL(,l1 WK m | \AXXTca_Cyg,ѫ*/0ZAї߷Re^|c#͓k5<ݫHqeԍq%U K/laWg\0aWa^kĬ;*Q^ YcaH k7Y\L-A߻o4 ^W"+f-!Rn6Do#E!Rpn6DMBNÓߕoVRFU` (]FFL TtP'a<]q,nMԂ%EH@ixjy$L' LWyvSy+{V@|I*#7]e #H3!?ҽd 9)ya7D C%x#(4 Jq7Rx{$]9bh{@HFЁN즰.Q%,A%ƔQK1l:+%נS/Ĝp c#';+ɳk m20~Q$ޖ "s9) K !&Nl{a.R P>tƥ 1O&Ď-K&xU3 B1\z>:aP}܎Q~8zOh9خ,gVsB0*1x}"+`sk"=NܯWy sw8kuB(P Q qXLjt_·οSDB]KSHJ?QnqЧM#n7p5ׯʭPU7u~T}Z$aɯLz#DfYVP!EXz=M~Ӌ !\jUBU$aQJXTڜg4;~R5Vn Te>T}CU_vjv̍/SEGܭ٥6[e -v+ L] 4D5O)-!?fꙶzÜ♆zC5(UlX%X3B^[o ; כD#.xE D#n|fW:%^̡}SqٕnmKs|41L,5+ xws>yTMxLWO!0_v Օ:lDϽ*ݎ[? tp~`.rR =~ƿ~kfҁ?%?u:ިʒ_s;Q!9W u>4FշÞfђh["L^yO0 =/oVO3)n;%>]srsK3/s-K~"x;LslBLZ<ޟy8;meD.w) azYk2rx/཰,/-6 [A9BIY*X^]K|\-{={{K%^{ G^{+ ދ{tp-{ x/8BF\w߉t~~wT@u _< /( A{{!`B/_;)~@Ż8k BKA ~7/$_\f@ M4T̓JPFx^;2UZ*JCdiRߨtiBDJbCdL+TLVrJҴEִߧ(+ڵ⠑&Z)(aRP@X! $YaȲ/Y+UYX ֮Tkk~OIGT QYYҲ*򲕄JfTJrAn',<=!g˗򳕪Z|#6 DŽL.ԫ\,wBzv\+\f&+7 lߔ.r6Kօb! WH-dvR^]+兝-Jy\@L ^/nxK$oe.H^iv߇,gqHrdhHb^+olc^%|! d|Hh!^iX|&b)%\*!]pZA-UCR! -RX _ɴ`H bɡIX idU$dl0TMIcCd%0IZIK 1U*lvd~fB1BfI6CTH\K䛸ZH\c-$]M6묛lvn2͚!67m ߤSlBڶ%~ִΧik5Z֒BҺ^/+hBZwAX񕄴=j!6lqhN|V8'OV5!mYS͠B٦YڰhP!cl>ھyGC4{aG֌=׊'&kx "kX6/%`Y7*Hg,,Ā,5~pn|! '_ ֜|ssҚDʌ%fN四9,NN1J]^uӘZ5)5:Hc-d(ad({d(uЙ{2[}`jc0VNN]oN_y5)g5)QP!&NENYu$;:ոҚ[qM̛32qwsx2*UYJ^ՎfV$UQ]e`KVKev6VȮ~_^(j`K3P3D NҫI;g6vb)`9Qll'*3" d ]秈gH*2ĆH Cu& HZ5$Ê Ď<^xdߩ1E>|!%R2(s`QkE2PT5e&|e% dBBgzN>.#j,+e%eeZ|'dJ,IN| ~d>;:ӆKϼ'zA{N0?m?y풭h i=#0c1*1Eu[:je}|gC{v;|S^@q>=1` 7>8:; jܷ =<[۫c8Hr>j3~kkpxm{/\u_C\GOٙh6A;w]kI;՚Ӝ'ue#u\6</D[ڟ\5Be 9.EG|?N0\j܏ϯ}zp2+ݖb+zh ѵPHGx"˅M6l?wRuq?Y7~_#JͶ߯[ps/߸? k{…kۻn7΅okK)שT9|.Z8?#.V1xlWwuSs&M7*MWs>աwB^w/Q=.ijyc59'U4i=l8W6­[ù5\͆)nW30χ~y6* [\RܮۿZډv~~x_ʶ0=:SSGg\|wE~lWOGIz=,{[s?N^=FˀW 3I&;^:^JBkP5GkJK:^!St ˚_^֤Rjr\KF{v5Q#RR'O5wR_*`IXhA[,ʶXbs?5BFkW4TI5hJ A*B*&GD٨IK|ʌ3RoiKvk4F)kQL8[ok4Q]IQpX I~PA71kݰ͵]1F!Q^R_RRRudorwP+3,TTahSLͬ)5F3kfVE&-y eΖW!2 fVb6F3+)]i|av;ht,eJիd;pja_~~B椺˨ nU›ӝ"BfQeȌcݿRYJ芯 tA5^ϽLMZg(:V2+C>w0ls &c>g q.c/;z!,H^v~}ޱZP-Id+ImK:PumiD~]) r>q@m1޻Z1͕ teԒT|~~Ρ½{=uVIJ{+i\aXNԭg|~ʦuhbIՓD_ |?O=}l-b66|6}j[5ΨQ-ݮ40bl).k<2~ w܁ߎ}ΟH4QPb6f]4tQ{+h/Bsm58USP}noᵍ@|e˧P'_62:̏q]_zZ =}+}X_eԱ.-U;m,WƸD_(tDuW}tA~G@-t "M$Q,fLG%O>ś?Cߍ&:JBJJ&&RH Jl,l`[SJJJE+aq bR808vS8LqJ T]do%Sٕ6Eb7yB5yTL!ip)ʄDlyXIiXIcS0u8\iSI$򴩺HJ`)r2#Db>p~ n*.18-I޼-1?X2ri'HJN= nLEz7- RrWͅ "evTؕn/Һy!_kI)Kt2+w6%j#OˇN!Kz$m:ThYk r2ȩJ捊jj*.z4t#HӃSJȷF5)yl[V{% %>դR*OUDفjj*UKeo)BoiPu)ۧDms|cZb.x4 ͬD)QMUT٦ O GlO_ݸͿmlxdëeZ^Tb)|j++vD++&Um?Վ~:Wi?MZ ;T%hR ԆQO%h?O}~ *J ~*ްbl?DkSakKwoQ(*xBY૵ޥywZ |!>~u>t^)(i@{/ ߗoU:RCT [uo@Sӓu`rbe{}VZ w|}|ڝ~~cmc'sUF .Qp+j'`6l,Xn$:WXtnGWz=uեO~?߈.UX ضacOBMS]^sDÊ$()X?sHTiP}ӥ۷.S =]H{pM[{c5MƆm<nKXA|UQf=5} >.9(uUPjIv# r-]ƈ^ZY۫Ѯ]j^-.v>/[n=Gªycj_Q<~|rj=*N? MrIIIa$alIĘ$s0يIyMN*5#|aF0A}F;b/1"74D!_Z q1#T[<J鿴٘TcKx|N"%W 9D~5ٗ7 &[N6ɾPӉɾX*W5 vSW.c&B\ZKŘK ]3t[ N+N.nuԙRT}c#N2c@u/ŘK(qD_u/{ @L`/} XK_nLߊɾHod_<"hWvaf/3{(Q.(wV=Sl)>#?O N*bz/zR%$kRoK)KTĎ4c{6j(;5%@dU-xADViM"b-}c6Z +n6LVQY-/В)AW޺߾ Kқlʴ+n+vn+L[WA$"+vwnOD瘸^" v3ʭsOEr$seۊOmZ+rخ*t5])-.mA|:k'W!YVB>v'nF]eרNVV˴_h^[.7 ۪JV}cfJ\K{ܩ wcwa'jR{̕P ~TtWZkS{R4Jr$ΉF&Ы)UrHpWl՘Үw5_U&҈w%+UNE+ys'XI5!`O#Z\\{=J4yo_bEmj>[XԧIF6 TaVt&aa-uV*v&aJݰɖaJݰa/uBa{t| ̣G7,ݰBt| K4B7l.nXOtMݰݰW F7,ݰ)t0j7,ݰInX|aJݰaḲF7l.nX}d=\s }X }p[s >WX\q<"'&d4} #\'۲ߖ2* nX~`tݰVtBk k7lnXa5%+Al.YZfa2 @,-q("Nr%D".) ⤴TBc*nҹ &X78`.H\J06 H tj  v`7n xf$+#  o_Xnp&F`7<bG#pUA  J<2 OApTnr!EZzHsiR4ss sT9[sss*ie)xG)s 9Qs㉞SlS}avbCt ݤ& MM*nRkW&_?El8T X Bé4Nu:ppVRko+ihW l8Zp Aé'NucDbOJ*f+dcjsJ㳕T>>[IeJ3ǺT 4 MpSX6géU6bG4 ʆSPTR͆S;p* 2m׶T]ٖ*mB[@-:Tqm}{!mk[+R-UǓmJ-U6mm` гTl8eéT 0D ,֝m1R%lKՃ+Ru-U~J-AgiK}J,&\(W%تos/> 0XDr%rK½cζ_M!D҈(M{+_Onۅ$)DD: '}5蟽 x}Π:s{8*qZ[Q )C)g@)9d/vD ʩԮ[Qc{I#2ekNtSH v@T[O=l>}=ZQjl@ղ3QxM1aI!g&AJV~od3cSl"`}bD0j `%_Q/tw}`="XP!Ɨ e?|' 7lfqO]Uf1q"dR9jZ'cz6KOQ@컧ƯM[ыG~A0W7zr$*a 1\i+i.Ι:%8.3)k góTX,caq4ˇT{<>2VN7XF|zjy既m=e%ϏA>]s&2JXqU(P T޿k}q[1if& bDŽ|RYˋu Cd ^ȹ/tb궽KXbP:"J^k`Ņ(7况UT *==(׹s _eY+ogTBlӮqܿ_>&XI5~f>}$::|B$숈R%/i|k/2!LQ` eel;am!'rr> Z&l?TW.9[7v|~ZCbCXj%r}+ؼ [2?|c]G/k|GP)Rf4#YO]I!vT <WN0Ea {ЊėՋq햿 B뛸Qk+ @_VϘRkoz%Ɍs/;q>Qj^Ѣ̷JVA{,e5.+,Kk/Zf'[|*k%"1^*nqu xn`\| ZOoZI$lR!RI$lPUT- *eaHYXYX,lR!hZbQ@6+A6HZ5rI!V0QH*1 )E֞]YN:nܲ?Z+M%-# RoXaKT–(Iu)^HT&HTʁw=8r|6A5XImIlS}bbk~tZ*Ϝо-$]OLD¤jI*Lo/<ֱTKz̯WJ&U0Lu/0r *&U0rsiZKUX:Rtt*>;ο|jcb*n^MمzaRb_ rgsDK1W:U?.jUF\VW_}cNE}6/46 M)UvSYwy&y)|)Udc혭Ɯ[l:;f -cl]f'[WE?mGSuѺ*' 0Np:WLͫ2x/U;f_uW^6bm|(-TȌRWEmhK_}`vz.fW z7@_dٖ*ɶTURe>9BXȶTӁu@ټ*I<.^ټcҰ`p)W-ջ7XOpF,#[XtJbT`vNkjsʗ`+Ŏo"ۅ5N_G8~Z+[\uѼ ѼЖc<='kUVRPT;ɵmJ$|VR,W[Ѐ]҈xo OV+W%DoުyUReru\;>n;"6)[埳]V W >{jIVX0^Ze2{XU+UlH*BW:'X?8(ݟ2Q^s.lM{\ܛ@?>1'ܪPMjc_`[a/gwᖳwKUT jW+W_U6):\v뵦 y택J+?8Wv>X0U;ڻwUaϊp%VR%Gy^=NSV. JTXmUd{^m +ϜphUhWZeO9eTjpZhpM*9ҜX B)TDo*9D+i)ˉ f)BThKDrB r3ˉG 3*̅- =RI_ȅb-s$0U\4srIGNdתPP!*1sjfMULJT~7{I%jk/|")4Ҳ~ʧfӦ\nLڍ)CnL)Nf'ǔ 2,6jTa]R1f>T]:HjCu֙YgOZܩ"wDtͦMVC}զMFK]h)Rʴ\WMueTڕiSUӦLJoOTVM+β TչO\QRM՝v85k9X "rZu輻M"LsJăk6ylR 6k5!dKPJA l(HؠPؐز).¦F6" lau |i0HZ+acS 66ll*`cc?Ʀ fSlJKಱ#ecGL \6K +nll:掂ͳ)SzͳPX*^sAE&>DؑE^{'kP\%E'^,~uL Q(AEW뽎"֫^I\ 5jD/"l*aӍ/$Xc<-_lYyD Iвl -+!$l1eMNdY,˷b| -l^ہ\``Ʀ 3VrO4XE ^ VyֹJ !_>_(4ؼ`Sy ?F4M:4ͥ *ubG%tJ !gR1T&2 BF7(Qʴo ^i_| I."`Ҿ=` PyX0=XKZ 6,&授^ sDRH P6sZH5gkLj("@|.\Xb#S "SR6j fskئUJEf/ Yd^Y_>ɗt7M*e}@/Y_p\"`eHm ]#P,a>o"5Nc:s)Db#l) # " AkEVL-Ԯ:W&d.|sF@|q! `P@O- v!rHsmo *LjEG3Ő[9Z3U3[jZۂ'TI,[+gIL.[#zgowr|\,`^,;92޴LA~/)Z*Z'JCci\1+YutՕҭ9މ UX.sͯDMT787UBn2· wtX6!f ভ6a@2%2LmM:l*l=uE Mp(6a~ w6a} -zMk %Ԛ 50@;x5x|Z J`q MF 5jeӄ2&4 dE $"M oRTB~*h p-IB21Z@1)-vґMլIjV!Ys-UB}}Vj ӎRXasm ˔e͕kkP|TeN|JvqB(MIfce D1.Ÿ# i@HPFߘ02In[[I>!T5 RbEIuG01kd۽\kDq#m=mV׈{Í$׫W5ЙT2 i}F2˴ \ ɼbB1ӨT%LO=kF_ S= /hG -e8G ^B'P܋r4jRz\z\SSD.w]S]Uʒ\b[5FB^(IxTkdط Z[|bNN~g"՟8g̠b.3d+2I&V"+T[(_oFd0V, 55 DF4RNzYQyQhrn*(-(Rjf@ ,(>0, lULnN(h.(^3u% Dz&X )]IҤR$(iP|"PSBs-%C"J' QLYD-}|dNsCNs)eOJS *o5Q~BMæf@J9PH50\)o-!_ 3c kE!0VM!+dNqȝRJȱ,k ByVL+,rVm9t[qa#Tʺf5bJW0+LW>"K" K<,\2[nGB+`I섭*"X.4C~yQ91ېV )cR2c-Lu#br/0cW>1 `TJj s'fV\Qe P L`Xq.;kKFpJFp*0elEt:ٰDZ*SJƝJI?q' Ɲ\*N;EqN~^sDs00a0wJ;q"%%̥ˍ9v%YG2Uƹ\+%ROfq.O:k_2x"wQ'm'N:$ŊД'=YNբBj7Ń2N@Z+BӉ]Д7Oi M'ԈM'OmTO M'o2זQW.U9IRM#xPur%xNd%x2x-I"JRI#& D,9dT 0DX_5X V`H,H qTbCHJZ9X8`8RvU\'l $3uHP* *-vb>c- `-L$zeHNDWe6#I1ftB9نVfludsV-[5|20}5[,Gse\#n-%&U1i|TLFJA'Ll*p30f@PSGe9Nٗoh+6 KXHls@2b+&KV$#ǘary־#3y]c9fXGFs'LRi`oZlSh3@ ?Hs%,fUGȡ2'8_9W~9Ait@J6؈_kn5 jQ.<\^@%͕rj(.0\Q.dQ.][ #X(ca|bajr~XǜIZ2ba\at)ai&_ a8@| aATrBį.++nįˈ_q:.GxƯš;~M"įXiͺyͩzxԛ`{/Wz4{?ݒ2U Q.BTrWFh,QE{2>ca+WI+WPg]mc/􋤄/IT}B8_d ik~nyCvq(X `mj:D=N.v[5TaZA^'J({|j@9yLnU"gKR N%uKEg]S| Neks\ȫMe!\m&'K_ah#é1BWtԅ2f3E#!2EͯI@#ǟu%Vo۟>J&S?cPY'R9**V¤(얃b7͉xcm"ScS6=7@4F*SsB@aQQU DDMM kU\v1?ֹVb8V8BA͕8e![a JA\A [aiWpk#`Im1?}(&Z)}|N 03 g߿x v xUhe†LCG+Yg~5lgMO^y7HA(}j\&ؽZ;oGl|γ`s9D*z/_*߭s%yq&PW=e?1s wm}G>:4 /p'GTrp)FឋQu3ंm %vIڙKF*dQ%UY\)%YqHR{K?StטrަCO'߮NMJ\QR3n-Mw~xS):tPNiF9[)\=0Natk1V6k濝 nD-یmBk; q+=#{4='D݉D-kiaȌyb1%V,O'w{ KQ G+7ڊ3 $pOp-ܞ`趽칒xR^?Xd'+`Pq8GMZPl}ĞsczBI1&Mp!`1:T"פāK< FD7D `dҾ"L$ d~D@H"+Nh1%M&@4RФ&OϬk%@hdAo?-stㇸfPJ?,%LjgRu}$ɕ=8vךP1& 0A4)c JL%K.=z|RP0q> P3d\i&zhu+ Qt3PfRM2#c&`Ӓ~.k?dginLn7Z|ZF&W1c]dLK۟O4I$LZJ{?aZ"07K]syɤRb2wTf2BaQ LPerKEvBEz2w_t04fJTyd]IJ HP!+ * JIDe!=Ґ!󰕈Tk&2?QȤR:)3:yldu kOdҒYKP!m GLj%&%!W%SXK"1 Ȥ# YG#t yd^G=2-{L*%{eq k!K[iJGNg(YQtMɼ(yh(0RpȍDruѠQKh#4hDDl wCz~ZF]F:vh "Jh͕J_wT{mʷz@H-A)CBI]I&o>۵'G+l9vnMS1#QƕN]j|ܧg5E6>L=)ɥyc}mLSypOu[|= z;)OυhPTTl >6sH|bm7HZ7G*J#{3/5g72Uܳ2W;2QuLѫ1MC!;S(M^ƤR3c*K=Ƃ*Q12ԉd%x,EU>JyhL$XY'\DV*sEd*]eQR *'F"E \S~b,Rk%Ad祌rǂkJ-U[eHnVF[٢`+ RTB\K ѣ39OgOh *jꨳWS ͚[34T;5& *H[ԍGvL= F1^?}db>1RcNTSNRA?έԺ!0R'\OjdMRuTTM'fͣzlSJ"MÞŝʥIQiXKExEɹr!Q妬T+Q ) d^,&rU  ' ;鎀ѳgãy瓭S9?t-M]3}yӦ}Dž0g6MyB 'Er!4όXfyQ{\v6@96lw B#nII)o$wy þC 12߸ ;bpq_>b.u)p]>^b.F^P SƘ#vy5/R`X P,k!_*3)s6λ_a{o{ФR-3kk̛bE:5G(PLJK2 OD&zsـ/$l}fG!}@ Qޞ+c*-<*R u>KŎ^umқ íy`D:3B;fXg?Gخk׶XB,lVMJ ͙IZYO2#DᎱt~]X14~~)z9g8ZnWrþjrX&?kXkzتUàBf.&f"iʟHaʟfr׷AE&n)JD_ e?/v5,7 Q5B׺^ ʓbIK2c`/3r\i |RE1ZWbJ VUJZ*XeŪCL0Vp_Xc\/2ވ8QE&]n9 kQ%ު6İʜ/ĀI\%[|}:ĬᕼSUwlJ1TLYx!DV4U%L)_>Qg|Ք1SViu@oLT}ci#*8*TEĺ}Cա[Q[ XP&`-Ӓ- cDhWUрMAOX D@WP;iLZ$kUW}+f6Qi"A0&R0&mWØt>2e*lr7%ҮNg#gcFߘISPb-C}B5* ,V&"kq2PhD1dզL4UN4_9шj>*ӑ&T[Z1C]r1iy瘡4WH}4ʉT{Nud)sDs&$&1}&L*$둝ҿUD>Uu-gqYEPI*JZ1hX4r1F5WvIw: ?` mau.?X{VRL6QZg:Ip s%^uU󸊌IVϱUTZ`bpDQ ׈& iRAQLB4h&Fg㻽2RJ0(3n\z'OϹ1WD1W"xv9-WkR9P +ŬYH<^RΣ10/S(??|Xb-.`)rs@$cЮ3tup(N f Nmܾ?+c(2Z(@d+CD !#;dq#SٻX)cr xEv:Fz|!.yA0. J{}_(mϫ%\Lۅp2I>2Cת;B/ ҶFF0F6nj5SMHL;`h#H adI񀚱U](4Wl*03R@S V*c0T0cSpgvjL7L;2ׯ?#9~vp *B&QASI\W4iR8u2Z3*A{&_R^@h ]`Wp% +Rr rUhk wvP/`@\HzJh0D=G%5Z]r)C1+d 4 *aOa=')0^Qq@D@A T# TaPōU>uB) Tq؀P8 Ta KJ`>W@v R ( TЁB'W@B^a=, *ưIDaVİIK!:MV.M˸Z2Ёn@7Bt/<]P!M7Wwp>!KYklV+Х F;*ZtZ ti{rTBÐ})Œttw|ۏ#].>.?#]nn.HDa!azİp\aab)İpaJ1,.[3sBtabtu#ҥKHn$"].\3"ݔ"]ts-DTfIHbHut-UaO#U1,ާa#bXFİ1~njӿ5yFK.._¬37"]*}D`.L.1,E1,?bX>"$6 *a ~𸔲b>Alm)[*[3[T̰ƎboJR6Q֤R5;M:xWdXS\ѽIEU66d65uz([TI6UByXYPay>LJJ3zfU)yJܐ`ͷbcCaOT6MCiTBaEiXca"2 WaS^–QwyDpiBpa@pϵ! k% $: Εr~p`|0>@Uqlס!\CBpk%܎ P:7@ˬmV2@]Q4`@Par9 ǒu%LN"M"L974_K!ۛr@^fRV"b B6LJذ-r`o &O׋r.zlx +(J1]ʛu!KǓ0Pjau.Wwq7t"Ԇƀ^7sN?#gyp¦tn'dc sl(dCB #K:adX" `DtX) ã0anVs/T B- &PظÄ+WlXu9+ 5SlAN T _bu/ 3V Uwo3Iy\y3iEu.uJD)D)vD)يD)6DR$8TT* Ni$#;zq:7:ܣ7T7mTf"js_ q}k BVV ݧRV9q- |qBr4bGdo-{_,Aa# ye 2ISmneH*|jR"ͤrR@w/|!+T3qe&F&8B&],gafUN*e3U DVq<yyD&N725 F`g~u"_ |qR) {EΕWT͹52Z.l8R!".UOed>ʌ|*TT|SF>gK40e|*9pȧ b_ʫG>,]DfaGGo] @U,>Du,cv](4TȔⰑ)$2Sz1[gJ4W.L&C90xPX \)KA $(.s; }\vKqgΤ"g7&8Zh=39]4~7lа~[ @ַwr8ٌhH{z+6t{$p+o .u9Usfz5(Kɾ'_o*y].4pxm#33i@Z`_1]a"v5PA; EM$|W^᧕hL2c]mv=%K5y2w|4bۡ1kuYl`}!btP!Hgm%_$RT-t$b-D1WLjȰ+Ԯa"8P+) @2d-C[nm.Keկ0~'`8 `8՝U:`3|t #5|X^_ WBR5󑖝=j ʼnt*AVj&9AB!CsE|ȋ,DH"9BDp-Z?NYZ^Vke V2 Pü#ХF I PM{|Zxu~oP 9? +ѩ.T eDZ** Zv Ÿq5)hk?~HcJEkgm,o7 O!& rq(Ub6i>j9(I/\OO|_jQ9%)̵>,iW رzӚןSUƿ7>ԉuOBW˾{~Y*7էguTKIZY)DY ޟns+;eu8+ZUOYZmBGwOm])T*zq Is퇈}T/siof?FŦţPZk4rhsNZ,˺=4f4cю|~QBYnm" a|Scz6ݽ=e~ԪZHbGL5ek9X~(q~%_Zx TNP?xU-N^@'_wDO[9Y_+[IbٟSbVqnw?v5.jM3P+bjߧ^{~ ovz\=u=F;{N4EdŽo/]i{E[iM!Mɡ k"*BR}o~:Ufjݪ \WpUϐZSCR I}ՐTT# 3nfܪ VcĭhV}JQ}J_b`YJ 4b`P!6583[CVZѭ9W=2pŎ\'oJZʣf*#UO !pyt~(G᭟%zZ[_5,.oV^#p[ \"U"nWұ? *ő8;4+i-NՎ9vc-Z*F I i1\ޤ\}ƥČK5r7X)t CZ9 Ґ1jQ) &;b`pq`pvNG. qVK_R'b~`&R,yE?^91X~ QXv Nї_VEӮcvYJZK8IӔ񫇾SW{sqL%ZXv.39b('!L/,RϞ-k;f˓IZ 徠қFK(Vm RJ-{%9KvaJ݊nV4Y'v:-#&rfsf׍nVd:yĖIQYi@Jy7RGKL1泹VT|XNx>ϥJqDN-uХu)uUaA5F7%l==8dp*- 4o)4o 4(\^ B5=\&%yKIyKSļ%4- '3 d(r1Y[5h6j68l 4qqqx".;N&&dqW4_T&fayh/2A qgpnqdpq8rR!Q.Ka*029j6@'.KMLIL8dq\Q^398trO1K8 O~' B@[:۹s5~} N-yG\ɱHY۽%dW$:5Ŭ/`ohi} rԸo NNxΏe%y=s(Ǹצԡ7H?Wj=&DێEׅ&axObHN񝣖6~ 2=:Fx~yƻT涹ggkin߱Pmfzj ZQ1Uѓˌf(߉\*L.=F~ujѫь>> -/:oYלu<ӝ:ˊsW~6x&G ->ZJ+:F]w}{nkqcw;ݧ*w^EazkdEU푲O9wq޻^Ow[w\j =>eГ/]›}7Oiy8StS_yNZh_cS„/XeCZ|^Sq7O}_GygjZQ@_^}3[i@˄}Ru^˴5gzEhU.\ K Bgs^IS1/?~&X|۽ǜ|7-|o׵nl3k u3˧WT${bՠbj>դRjRD-OToiRgz2<P!R4BXh@M@Kb*t ɽZK{x5Kk45 A(ą\K]y@wy[ Ao)8FB  Ӡ@!N:mtТݥ8!tb.MEwi tݡVTyfA*N91hi}G*TFyLbaDw)}x.&zPHk4~[.xݠ4~l0SS@R~D*~y>T5Pax-᭣5WXX]涸5frqcjmW SDO+oiMq[5R*ąnUD%Pj+κ{6\^7hqMvĠLDMp6m!?iIa$qnUŠ RTHJ9JT9 ;"öX;L&PWkL&վRHBHJǂ UPaBS)/Wu|X~No&8W`[~KK_KU3+cvP8 3g*/GS=<)*%̘؇c (VĒjfM%՞[ %05$Wm5pֆ=Zc8FDAUcbIsڏ55:}]Qí{$CV?3U QMFV6rN릌?WBQ: *$jue*؝[;wHʤd3lM\8Hw}}2$gEV`ԪFV}'Wۭr =%L^߅%$Y =e?ϵܹdڹ#+2k_oBG닳cIkiT/)O9I9]iDoR\V'!l)M*EI40{8&bX1,掊a! İ1lR)ZabXDư)ŰbؤR A ODt *D*S)B Ns)kCDDĝN(NcĝT9 QcS2El ϋuXu4`!ؤRTBT`2ZT&X,`qYk]07Ҧ#o+)?O BSl)0BSzMi"4#4;)5+BS^%UCSxܬ7"蜻A'eA'50^A'}R|֠N A'5eAgRe ͈L!RD|HEd ׌6reƯ kںO[x`} ^Ujs?Ct'>Db?5d U RQ*0fl=~W*UR-}蹌ʳ IC{ƒVm͛QWë~_V=8j&U9{^3~^b~fj?cqI}}|󩼶:Tૅ4& {oĢ(YZ\<8e6c JxFU˾{)V۫ H _u|%:pWh臍Ƿe _jzr"D2#s ԗF3'Y:v[Z ϓFk4 IoI! H80EOS ^zoO(\~H1qf5u{ק7RF uϏ&xrocUpZ#~z]梣_;6}/r]҇1?/y>Hu\σa3v3(׮}6.lA_n~?գEڝXO;G u97un ϼ1}yӵټx[$CGF/k2!| `Yy^Uwz6q4 ]/$ h}ZuYz/<](~'7Os)5_~S^lԮ]"WƱA!;8#vﶴmrQ"+yྴڞbB [ g}.2W|"\1vzp:e\-<;IJ}jzYH^VzY  ڝָV9(pY5Vc5*JXHVJXUz,]j\99۬qJw_NNgZ JXyV?V;JXݯ٪oL^V:oϜ̕(fRHJ)XHUZmU9ָ~ЫֺO0IzezYH^O^VZ}euS/+?J-?>[UiZ 7\*;-XWF\_n,`u\[-P-V/M:Xr`L\mt`eɭn;-/ȯ:Xu2ZD+\e{q_^u6B `;؎6R `;!ΧC)VJiBioh  vBRhZ)0]=bEb2RϵRQVJ%,eWc)7=,ӄu):6":Tw!"J#"J"~s|گQB])p"8y.‹#\6"\bveݒEl8z.I:+pM0JA%=LÁߨ;1JzRPKJ!X}JA%^4R=j kF艻''5J+.ŕxo o_3>>YO}A{FŨly7~Co![4V B A*uɚtA( H(;A'u:?er4c0^7*8y83ŲV0f-}p$U0Un9\WcD1(7bXQCI~X^>ިs{dUgսJM"Mu4}O^?/l*';MulB`k"4!y8^ [u[+wf$gl/흷KsA] *M[~c{,Uei/. o"ΈJ"Nq/pF*6PBDo7N"q6 ^ 6lz4nz)57;"`gJƼcWBNjgj@[v? = @YR /a 72%u2w'F_k+@y\S ˜'G6f}z8x#jymxYŀ&8btw,panc-nd}h.&¼&P15$.<ӯtknxtBJ0#BRC!ߊI2j&UBͤJɑBQ}xH1!rBT[}+G@x]2Re<H>d+f{|t6*b >/,i5%ݙӨҨ OY`Sr6M}b<}lc ~-8Ci|E644QflRsUgyTj3͐J{x?AdH5VjS޲iM{9 6x֊"}bԙ-BښS)F{9)})F`SRsP#] ڠ$"5]kNWG4]n&o<ʇ|ƺM.ߧsRSVD9ňARH1j]M1J=׾Hy;HaͮMWz]7#&5u~Ժ=դ܉L_5RGAH2k'VlxQ~Uľǯ74JHd@ [_ r_w?8xAYjO2f-?\lijqC<%I8b _v헋xx!$CD9<ˈpxEO283knOdf6;xxq~YQT1xG|͗w.qUqZ;NpYNi}goE[t-O\X^NyʈHF5 Y~{;~:N2o N82@!\O ?M b՞SopOMVW#kuZ2čwin;ExgQ|vŵ2X|e+J+I,}x9~X ulm$n>9)S}~PPDC9P~yUռ*T@Q$Vx:|W,W}b/|ӯR}ӯ]MX)kTݔOSޡ9&X}Ī4є1)SmvΚ o$ʆ[S:1weFGj/Gh?s[Jmf/5 HSIZCբXVJǁ<-56QR(N3+.zRIG=Z)#PO E\R'Ia=]ک Je^i*Nm\**jJnnܡwڢL-R;t*z_ -uZleR{TU_i/]2} %ҧ7{%mny;/#cw.k> ;M_El=lS7qM֮#*M5_?1{N~?ʯRPMJ"7>v=63B(4-Ť/bR?GbṘ2QI]Ư'J,~lm(:ߎnz~rv]׭Ưq՜&4Nt`FSwy1uUCZ)9 Ӈ4%mv '5XxXSTz2"s#/ξrP!ETHBE4K1PeKK]ȗ*a*q*aB*A]o/UJKF/a _&@)/A KF/A JZ#e%KLR| ȗ:ȗpA%nPh`UDd?'$l#L…1s!Asex#o_!CU5!` #Xj=a0P`0‘7>DDpj(,c":Aᦞպs)2y,4aհՋw1q}aìD뚙gf'$\LP(g$48p&u}UYF8Lb'F8Jx49j {u,D6p!y@VGdg`͞F8Rw,#%oC [)z+%nN0RD1"fB}"]RB#| ؜фB,GX •Dpu?۱xF qP@^^%Y3\wF*a,s+%*I S*0a~q:LxWI01>`bZ9z`bHڋCON._Za^A_+%/_XIX06(JPbƁc%.gP `b%IoV|//z@:+_OtE%d/4Q$iߥ)d@ JNbfC1 ,lHĆtu !1EJiT4FŌHj]Mr]q0qǚ2 PbEI.1VĔ:]}5}LHR}Ҿ^7n\~s&e!Jէ Jc jՌMrdd9㜸PSqp޿Ŧ -BK]xy][cpH- PRd@kJ.<iq&n M Lږ, YdQ5c5*{:Or=MyNX,ZyJҘyv獇(}~mT\Hhj}O솵fi=:=u];-F5aIOI Y6*),$9 yݶ˳3jv".RħOrM0e!mB ӜkNrq_U c?j/D0)Y S z`9 lMGMOP5c`<Ȓ=iwXyo5sV)\,b:;J"vrlqG۾~0#ҵB^t#z9] F' _H! 2:Ia,a)>aLį_L/mx>ښ僺Dq1z<vP oBZЄtƄH5C fsSòQa,MCH6X^IdH!=,7=oDJWjJW;))y_Ky_}m:}TrTX찮~-n6֞ )dƦuG%OyG)QI\Ðڬ圵/iZYkYkme4g!_AsZYkJ꓏NCNCTüįLOrMkﺎ0cs)L<3WJmVA3r=imIjWTJPT ef!Uv$r+TnC&7#M{&' +?5P7@g:]d%IVȑBΆ.5 ~r!vP[t1HmkjBSv:ij.kjmv5Cm;@OsR>N7R3wXQRriF.t.9B9šJs$Gؤlrhs%;`&p3*jM-"ˇNI)eezͧF W .p$A-҄ҕ8WC=4=1]zt $=y60PM^6x|5w%! -1MC:q T?28MMsck"KjF9w57J;>H1847rg%&RjFΔE 0nR XdŌAj,isY βeM^%`H+Ջ*R[$@( }$IR 7ՃQcwFXt04Q%(vbvbh]vOymѥeEAt\)/gBrAtlNK;֎Q z²2Oɞ9aee11ooITǝ `%BJLm2vqbz,+ç?9,6qfx 8b#u| k2MU eN2}-3O ›@p/Na?fNaXj]" _eA4ޥ V{0wG60~;jf?? C1,#ɗ_c.} kQ~y|>i̠(E1^ :m@L &ͤpH B83ķRZXZ::*%J*@=j)BZꄢZjRDիj!52Aqg+R'RDRv,(qwBT}DRg$JpI Q"QMD %JD $JH{(Q*$J8 Q3H (lg%ZHpjA z$ZX Q7D X8\HphAR?Q$JTjZjR!QD %$Jؠ(a>(Jv$JE(%%=/~$T#n ?Rg?l#a]GR,J%F($^XpSk #͵.*(y.%Zr-DN\Kg>A6|C5K m_îF43|8O;?Bjxm񨼎JR73ο8g.gŲM,C})&zm._?:_^ :%O:oRA/>,$Y:,a,昘vUJYRb<mzN?9u|mdvoowIς?0}A~?, B S k9v3")r˟^EoGb:VGĶumg!>h.θteU2K\{iomv\ۿ|cЊ<*A+ܭapa_gM?F>wvV/3l`![~Z Lq꾌{uѭ L! ZwK-/{?7Vyuho3Q]3X*#''rV=U0FkP;%]qs5+{fM!]~۟ZTƏfIio/~pϻqZ̊^Yp{=ӭ3Z<pT|-A0-^uY>~Ɨ^dLebʠ ciT|Xzu]W,V̅|+]h5g1{-X>Ƴy9iA+m辶#RmܦUhF8ߎ%oPvY0R@Ch`hh1  e)Y)z;2Pל \Q&0I.KJ 3 f(pf{$j%p%pgTz 00mC@wJVJQ~4HC&¿+# Id!q:d.aڀT&qf_:\9^dx3dc2o+h`Ga9HEvBS_IXjn"iM> +D/T40CO"REIc 'BҚ@ 4pg1Xp~B&ggT^/^8&&\:Ol6])A/DBj$)57I>ҔEz JR%Ռbpo 7*7' 7&|ا"&_j4̾ҕ(()avu:ʸ/Z LD1.A \JKR?QGX=TeFjZ PbIX݁i=96hs?O#zaC)6cuBl|;.ޯ6S/iD_~(j731?PץkV#iFGAFr/v'lDfa{, keA^bj+xߵ,31並܏Q:yMxaS=o5Hݾ.qA.sX~|9~}CmIlo-{{GȒ}_S3}2Mtm}o;e޿D.N)H. e>nJ*z}@59e:g71KQ0H+p$NG4uya *XӸN'LpF[©gs^#4"[Wj#|#m;ЙAbd150.-4qm<]]NﰯBcg;kj+E0Bj*bM%P#U(*4E'oώG'{Rs mC};SDmKW0<S{v;QRC\,RC\9v18\\Ҟ4y?֧^eobƬ87^h|~|"I 2ǭ#syƃಽ9^vk -5lPF4Y1#L!c.?\ +yAOQeD. "/9Vzvǿ&H0}<8BokS9sxƏ{f Dt|ʎH.o~۞o}.mWQ+l(pb F9CRO;hB9 Ns`k{7E2e9 9ȜEtDa1\pjj$bFMqDg!$O+p5;p5 W(\)\j.fM W#p5T =%\*\bGpujUPxXyMDD:EpDaDcDEY"hI(|#Q\+Ds0fIDF1KDؒFHh")FDcDq,BDƖF((`#ј"\ЊD#Hwo"Q$%(^{Dq45MIK؛"Ѽf݈D3PӚtL WnQUh*ޖ WPlCH_7Sj(DcDEUbFPA#QXA#QmK8v?{QffBѡ^ . 1[Q8^kmhiv]%ׇ!0RB__H1 q١EׯP cY՟˨^+UpP9+4ur@h)t֚UĨx@i\3jHQ;~].ORS ,"{e-s\I)kM&Xe7e)lC5̒k`'YmZuvNxQU k:lowG#E8GVG r#^ZvQ~8VMSZū.nu 88ŲPqZ:uZեBYSC,8=rg'N5No_?.(TrTbŵ Q=~VNrqk׶]Տcyϡj?+O~O~H'W˲A|ڨBxfA y[su;Ha#œuu~"*DsT3BGE{R_Mˁtr[M$(7GW9MqjSBG˜Hzc??ekHFHhLO-)O[GXn Xn큺 ڟ׺,v72v6r3`:OAf:$rk[kv _`wwyU@A||_vO˭&tIdsf {%v!mtWHH18ofaNt:%˟,#.~5]pF8w8mg nՌnF]噂˭6K }˗ѻww ̕.̭ PG*zW_nл:)6׍CC+iΥ>*w(5,%~]3߿L{>Ȅ%W!uP,YR,YBgL2BgHYoO4g0X XRhu ~0`*~0X@ `doD?~0~ImZ##l?9nv`ǸvXx4Ƹ~j ' \0zv(͋lm^OEt4};ΝlHFGl t>ΝltR[,,P;h]Q#էGv}Fq;KGCRaHjtގ~pFjv>wv#EۍnF>6:cn_~.7C)P jfP5zܴC᤭̌)X$9#FyFw.w.w;𻚰yBw3Ji-\!Q=!5 Ax/s+%tnEtnέJ oW/ of$c [76A og7 7Ce=-o/W^Hn0]0Txc*WH_ }"}kYWWP*q͌n@H\ : 8BG-jpώP:E g:kB w穽_&c59ݲG\xHTY| NPrAf(s6s*$h1Q¢xsQP&oݲPk5Sa}{ u [ַ5X߾CַRF7g/+ RȳԱqv,-q]!ZŒKĥtLԮIpq z]XgS%ΦJ ΦKMP*WH RqCHOP|vηz[xc}!`H!Y,PؚC )(xpŋ."|S |SVHb7Mo&f T<s eBbVJ&t)MqMu(MQ0Xe TpneP`s+n;Mo )|"xIN7LR5a$*ź7 J O\UxN8 l@ 'H1xsL+KaE,Y``vՀ *f':p6vM|ogm>MlԀ|Q?]0;C lOϲeDC gL@cY*`g0lLƠoco罳~4]R;~vYY&m7?Λs8~{KCP~cuh^K9+Hj^U=_!EvOq`.Ǻ WlYF:DZx.5o$^; %7{JJMo0l?X~ۃ7lzhX}=72|qϑ>^ke^<,5y…E~_x2^u=o̸tLNךd3ׁ~8e([ȈuzθCc\>"r {r[nt_7gصޓ9!N |>~|}0} ?~Xɝnmvr&8Ѓ_>@kZ_,B$蟇:_6˰?YiEU e uOO|8-&?J}:ÕK7x߿G=+Y=D\ޛBڳpG#@z_~~D?*^kb-tlWَ{aI>Ba[s;n{P?H7'N,#5l>#?$J_`fSS6̧>.OPw M|z3 *#RD@TfxDL"&L"&H R%bPp,Acq=X„X8d'Q.z]"21a(p,iYX„XGMb,01"sU?s,aXŒX89t4#$(vLLXm7(scYtwK3LcI?<8[tIf_&R6G{&cʳugIDZOBi{Q#Lw\c#iGkH&Q#.=.ٕWt-#I]2kkK-B_-n+Ⱥ x!oԾcIR%vҌ%vXC bkc 'IV OXb]*=RaOҲXeOˋ$uH̰Z40V_#U4̫8_HH3 S#in4\}Ȍo?̈ VwBC{sHc(PP Q2K 3ibYsq:Yh~a Z PX)}l72u+zFk-qJ~%G<3Oiz40UV40Q#IhKgo,jX"3O*Oғtl~ wb؞QEʟ͈^^\[xZ{mÓ_[@=e0S1*VOǸۇx' Wϣp,iMXbp4s;zaK/{|?RHiJa w6RJb#qy'GRMBj!l-_ȷudMyX8n0>֋|&?("yXhyX t +fO?G V}/ݦ?n?8I5*fq^cHƼKGVFsk kXۘi)_c3W[̩qwFBSr<sH:L6zda=֜_vםNɢ#_qvqaX~?,F$k-5'ku:%/}2qox9c{o=jFGo|=~/jU60 Zyqu>G ow!{_{^$j9kO2'W5*+iKx=-s?K9XߍsVo֫hrY~IXIWk_L VfU?q\}%f=r&|훕F3\݋lbuYjk*O>2n^Qg\͘l=}>8;|Ї|#s'qN )kʚS4KRĠT߇G߱8*[_?h}"x k8VR,0-UJLK LK LK@ˎnAEjRZEj>B {} RcO? &H SOpC);A wºzbݴn61OTgD[*x`8EL{&E~./6)&78*_>H' _(Da,0@UJN 7'LKPj-A̷g U>&v3u.Oq;+WƌζN/;1w_eቢ1'B[HM"lR<3_f3V6JS' OFxL PN$N\Tȝ` wy|9Jċ3M~ͼMjp-~mWi\K6麶?4sϙNhˡvs i3iSᵳJ\oˇM f&2?|! ~rgXQˣ[Ǖ8 oq!gGB4Bql $;fW" !ZB]7H;viXYjd;B#!{ ;iJcPrJWI-Ŷ5~ŝ uB {k}-bGBXEӡ@D~Zf+.U<s!lX,ULZYW W^q759h1-xkia]_^~p~[z\xx#AabMcqB>w.Oiu0KsUjws^}+uh. Ɖj4ҧ(M/2ZGÜY6čш1o6->'jdhh0&ž礅{2[<]z_iY5/Zb@CL*Y"L3ԷRcUv"RT)@}!/giƂx!>dJթȔ9lbRBRƌ[iP_(vzYLL*lXXذ4&†C!\XC `H7^~:+/rp \ 值cRe76Vذ\ țC`[Su\AuYjOpq;f絷wzD@=,lN)N)fnE[/OTV4)5VV2fNQ! U*YR0Ry5xQ U4]!X"XA3{] ]],nX(,P]g X_N&X_أD4~':kv>1L4p2D=L`߿?9}=ύs ܮI YH , LjRBZ*xV5BʺFH,\̦j׍jfSF5Bʨ֘ vtJ֚v$Zi~ZѵU7r\ L)5[EjۆlFRjۆL/6"̹ Ɋ(Z}#皅+{nJөUҩJ:,I.ҩHx#ڛ.RydJ{)Atj7ҩS3ҩR:BȔFHjRdJ )9@TO3=HFHIPP&Xx$.*V7or5.yKs=(]f..j.{"wr5%.lGV2#!R5ʦ.J.G-3ePXn g,B&I+jJN4R"'R&IńD1D1DIN3Z^nOT5iTM A5i?QդkTVJդP=I!jRl6I9cI/ԉv<`Ϩp*j]&uGhXŗWF:QDNuI bXG(u8 m?(ŤRhR&ʽI(t2ZDy-LR-AhT& U&Ol(PQ)͜2zArDxVJe]I1DdLcCnk@}֌J@y]J@0K@(7>b.Ĺ@qS.Nq'-0ŝq֜P>Pވ3Ͱ|#P8(KԿ떓"r(/k0 cF@+%h1bH4,6X-ƌsXWaʚ0Y:hf5׆3c񁈗(ƥ-[ )`u-&b,`9LX@ C dDqʕNY;T2vdKbH[bP)t@V-ƺ-X۾PW$bb}͑ X6Xl1&D 4}\MйR|bE?\XQ^,^ (uIo'"x5cN9e7 ͕6634׏x'MQ 4Wg!&_/Bi0Gz.1OJ +`XUg>s+EV.`Xc0lo3|];ě>3WP[)7Xl&S)5RBmb2cC7 UB#]dK&C2\Mi"Ŧ4b,6㌬asfTJ{M455BdɌ&Œ^)X'/MW4}ijKڗ&RK#۾4՗I/Mfd_Ψ3jǙFn{XJj9#Ж3K-g._U3չӷ,jFT&VٍzdU NۙFiz6sF*Rg*Qkz'v34Gui:V+>55kәh^MgtͶLY9xTjPә^gj'SQ;v2=fj'S՗Hri]j:#ۈQRәKMgSOἮ:SSRo"Л4ܛ/4?fc!$4q&ĕ8RD&ĝ8@+%1{!x +%d22&Q4q?Qhb|"ĕv4qGhb"ĕNNc' M'NO)Cm/a۸R"9'M 5!)zFGj/|0#p'41v^$T 1|D!tWJct)=C[_l>y}u ᓀ:PǝPc(c4c?Al:긋<Ęxb\xw,ቹy2'Xc('O)5voǏ~S꘷bP~tͨc|#83"=o<1'xbZ`钂'kwcB¼ȂcS/"81?Ns81Rc 'F6É)pH1۱RRC)۱ńH dJdڑAʠVJTl523* uMCbyńB ЮtLlgﯺs~wGX!Rɍ { M {u){}Dc%{Z)eO*=V=G3*{u+IBRɱV=7"/(B^R3ŭ%ɋb,dv w: W/׵UY}x?}O]rt$c!4ekw:">7u=Vj:]B|- c}8o#ݿZʃzߓIk{w)굼,Np\v+O_ouoSSc>>_gvjXv׻{nKz܏nuBqWu/ERXFW|,}s=9d-tz(tU}uo#-dۙx羖;p=>89dO7By_x -Gס_T?_w 2xm/{y{uM\ uDvߕ+\&z#Z@_Nןt,ki `^4 t5Q80^ko,r[v[o먋f~+$׺ uSV3E )H!$iqvrww^KʽܟG U,t:L9)WH!xh3OsRR[q,ׯF"nQ6sAl乻??o[յ}úu@h ~~xV}ֵOƀgέBYqN_.Mc(5sPmNǡ?pKfX;6_h4iO >`OV[:jxٍߍ=w&PGEv`[';4^vǩXxJx/^Ϗs qszi53acGjvEq<(p~rǪ?_ ?8n[Ik=DŽuRkSMD&? vIɻ^Zv-1w{npp1?~X|-OqO/z$ھ~y^w2]]#ĺ||h|`7~~q  ΣlnFqĈ.?ۅ9y}spv(~->t:q7=m_\7^?ގACOf9 !d_!*@T)_+ЮBVhVRMbFd`%Uv% [>&!e C!! )pa77ܳ+Xꑂˈպu$iV B6|. l|fsfsl=gs9+s>ve6Sc] GWjcy揥䏥o®$ ]ƚKkqJ*9Z!e%*\#_+sU;X#[Ô}sdʾ͠Y- W]cM|e!/FsU"XiW-+W0ԅgӯz6k\XII*@vqΔIWٔ𵏟:MJMF]LE!Ck5gh5c2Úղ3U`5VrST*_GsT1TSzC5e*L0=Ucc*-oAk/餿9&"E(ZHVCVC NDT)tkd|"CɗR/a _"_ȗ _XUԡĆe  u4|rٗlq)gRW5KJ0/0+Y33 $~Z8c*%f%z0+A`V†N=HSTqf%~~D'$$H3 /gR Xq/ʺ5A\% @??qg|Bcw  @:`;Kh 3zoGGi7FzA~'`s ?X+ef=KjwF6Mh:ׅ춒nm `bep_jcu 7?dKG1np!X^),]<O'wis \3m(|$i_5UQ;b>e tIhrZ¿[@t3^j"yt9T4D44Ưpqιpq<Ɂ ] Ps@txSyNvo.z0!^#Ulޗ#i])q7^7J!8*H3t(a(Qc>^>]2,\p2SC" Md&!bN2bЎ~2$t$tɚBlhZBYHA( }d*62B"'5[}_KD0^t}HUu UuU'!TtUa*hCb@ M560U&zt[BB1R.iBah3C~wx4 IiI7&0BvJYƎPScG9v@o꺮GRY!O}K[:k/#XkB}]zu.Ftl۱@ڱ+G/i߿\?E;-c I;ptlPBc Bla;ӱ[ڱCc <:TIrzFul&c >[`}Euߚq5}]KtD-c? 1Ru۹vg=1ۺϤK=L V_U662XU u靱Fxxl6Cŧ fD4G>uF^$mdq}?<}x?%62mdhi#ţA \?Z#T}gERi6 z}uZ?GؒϤڒfɍs6Et8HJ{8/U`<6?81yڐgXo'&m&4ZaA*|U rYcuc1y@륃"Ѹ[X =4ovׯ<ߩsU_I OMV8޶1_˱qks%0~*=1@__:즉/粉2 K{uZks? ?a#Xz \f&aHI>ufV$,'Q&a~pm>˰ *3~A#uYjUŖr^|>NK7,Dd NCk޶`c}OYR7=vK豆޾\ɾwnwyz ,u}.K]keRCU'X3mXL]%Nb_3't_Я_A3'qv[:}Y3[muy!u>:?~2rq&6|FJHTR {ƙjX\~ DX3g|%7J5l4dLvڀ;s̜ph]}_3Rb,V g08{s2CU )ԝBP] ſwrvb=7_} [~ V a|HqhE0څI1Z#=roŎ RVb-cNP28q)cTޗbl=,Vc)5U>R z+ҭN=nY,"TJXD]}k >Kcu fUCH??~E^#:u0Cu[+3kxv,!4Q (Wf0׺O^ +`XђT>5ڠpKË] C]B]OüP Յ2Pm"] xP t(Q`ۡG;P;]䯬^ygĸZ嗢.4Pwm~u'K@a=#Z>*u! bĺ3a) ;;VJdb1cb“x!%JR'koK0`H%R` V bCA )PbFP✁B)t+$ ]LXp5cBl`c,Qrf j\u\ؠ&Qpt_^tCZ:8{y*.n\pšFC ) Î O_x|ejmb!hbtNS s]1#PO]C1 q1\Tȃ@ 'R` `\WJ0C '< Kt5!{y0f y0L|pO B<ĝ<J<ĕ11B31<~<ĕ1zg~V|f+k/[14؊EV# bv!ȇ1Fxqa!b>C VbbB.l׻43zޛ>ҒJ?؃1@{5X+_I_!^ǺNkߛ |?^yUBhtq,pp[;ǭr^|`g7y>-"7zD~7F׽s\ F8u?J_߱f|W6(I_z/~lsߞzONj~/<(ǟ/TN}dhwR@XpF%Iq?0>k"7̓E1~gX_wH.:eMђ[wy L1^vG=Wn ~vc~z]\~eb},e-q=|C75>å?j?> (CgO7Y{n_?+s??-!xsr>55F.BX+2؎2J 3 O,fl,2XL2X.+e`B(pźPu,} R(` VHep;\+KV_*XH \Z!շVJ5 RJVnR ׌J'd*eyI͕8I\ 7Wxx+oR8WխjRl ,lխfTjP n 0UuԭWg )URn0Oj`qPKcX(5pE#Q) 'JY>#R)KSM,4_WnFWS]RBWj8uRwb_{ؠzBw]_ !E5X!׋cG7COpijk-`#C xd(3_ǽXXu__.AKpd|"Kpd0ٻ{?Tǵ6zm63cNi\n{N0I$Eܡl +GZ_x?Cj %rs0pL^ߝH㬵u||os5->xk +%$5H<=3IUI])6sY6̋l6cBY dNknh5,1ᵊl"?\aZ"d3߁ö3ƭRpoS[i,X7ݗ~D$JCí?Ç~ [BX9=o4_l0;P ~JFll`EEb}c9;#8rwBqkY,'8Rd#q5!6JRb#7h y%X-WJeH WJFhX8R$-@YRg,RK6fRWHA"-@Z,TbwUbGc&>#U@2u* IJK%$qE$\R,8*֘jp]k"y-_ a9;J7{0f pT5WiQ|h Ͱ. պHF,'9d=Fͯ+eԤU;wŲiV/!hW:7קzƹO$ڏV'H͏jZl/߃ӯNkSM Y]gMʉIZv^J8BBW|ÑR GJKD !%&J w<WuRAXAXAum.n= oœ8Rd8u]|>G{ stRũ c My9;'t";4aٚQ* 5)SBenu;pViāN%x*[xTNEc-1Uf$_gT6bgB?K3g4d?僮n;k&tOgsؙ ;s;s2Q*g,R*kY%K(Yrrű[)u}0Yt1ARŌՋ0P!w"!S#VCVbY`Yx06% nX;Ꮆ).! Reu^GaE/@ a=y??pCɑ)*HNFQnR)ՖbJ)ǧPFlR۱pF `b[QOT#J)RtjRS~BAgW3B/AD5 u5IAdtrtv,թAYCŎAPF~jndZ+25hR&M(Nʃ76'R u^Gv@Qϖ]Ŝ1bQ8Nj 'e 'ue̽wբ7.l\*OӸT׸Gqi_Ke8u%/©ըNQkϫBӥjʚF M'4Y^b-K̾ sTOǮܾRTs{"6J(p`'WHXb ,1JRcFRX)!0 Pa„„1!c# Ƅ c(!`l5V 74PX c,2‚ &nL},X;TZh ꚣ;{6u2 ?-XO U4 ,1XǺXbbku;ψcB ]K֌;!XQX]qS*YDZgfNh 8h g ) :E8kA8KE%kJ? *Y/*Cc5c;h-NOcf]uiɌu.`!LNF1:.;ИBXuj S wg@.Y-$Yb!$THl7) !da!,,v,ZbFXB ]knZ>G Vj jYa%ưXRXBh*E4TF4sXMp\U|պ hVhzreib/DVHm!H̭OU&jd -QmsKTH%*V&t}b焮<TABWcbsڄUOV6aŲh `jեNP ;-\BVPS)ګB U|~r͍Z&t$te )VKòVŌsج/&_߿cTҪru_:?'a*3C`NpԖ;yT0uhggc .к3N.9u'/u/뎩Y`F1Y'Ǣ ~۴pNQA&X:si\_<c(S}2 +*T<(Ƽiy-U@'xN_!.qPZ\x-}?_B-~]> )su.DCH09]Z=)Ԍ\?\8Bs cIѲ"i?mGu^ Pdm/}x#VVRw 9e}}}תH& <u)9uiYmmBMeүsr^C:|LJ ak:<;kF~M9E|}.4:]o'fflf&{mgf _eE fdWҬs!s UB@*Km-dDhn ~ t GG~nE2wiMOsɴCwvpMW6uUa|ZVbu{BU2-x_{9^IPfrMo=Qۢ \>*,0ߗvA67X0hMGt4 01drW2 2 1%ŌW 0yJ2ޘ0q2JY7BHHM)@ ~C@B XM)Ab]X ) 2V_$ݔ.\H#RM)ڦhS@H=I $Iͱ}H.w(@r{^m5hzB}$&!5c $a -0o[xT@,ږ2@ksF@mR*P=nJ g1p ˛bUM[F%[0.0]fL70],*tL4tq QjUYj{(]/~U6;S+,/4X^x ]Z|Tp9w"Qţ(`y0=5Ridk  ~O?(D4%/^>{T > )J*G)eQIE(Q-+GSk1_Ot =n&Tuy3D"UNTj>:Q)5ǕLZc\ϙR{bD}%7ĵgz:C[-~no~U_OpNM*c'j^|ԕc]:_9ctbFtrzc5cUBƠ7Z\NOޟ[TZxۉ[~oÚ&FdDB$r=;v[АK_Sr9T4DC.Kl|D,?1:d'DAWXz_(U:͟xa~UM<ѯʱ_uخg^>Zyn̗tu6a >+;di5!;WV4j߲s+W#ݯ4hl⣱uc1_'^)ҲJ--'O:q6{BgQc\jX[{bgӫ`lHZJKmSHD[?.̭Bɑ*B*ZC M9dRt2)K$L~۳Cuk2dB$n&%Mjྏl~i' FJV7m뾮}>2Vlئ(m`o3]#];vb7m&y}ۂ&Sp.%GMN -ػL s(! @oeqTJ(*n6BR'Mjns! sC(p$¨48pɂJ~,kSq0OQ|*-SxSȧT:ȧn8,{׹RNYW3u8uYDR)F(l9Phk͐)yEL)4RqLk3KRKXc,ʼgW`WՑ͞]t&5oXH_+ggƊ],J g3-(+G^Lj]ۭU̮Z]#M䛥6 `dʹ6 hj/c]N*0;{!>[mcwkY";Z~lugXʊF[Zj.6jYܕ"+G3M4jٌk͸[ie͸r,Cٚr⹳{6QR{l}k->i)+~̗6UY_y^"3:9On/dq,oNmPt.|J `!EӔ)@ueiJC4%*䪄2*)2*)B`Mu5iSj>ҦZVtR[Vi5XsFҚ5gT4!֜Qp6 g^ؔ/,Ȉ:W(ttA V jŌA ԩZq$jNiS:b,R %:*+C$Wi jU<\a5sՀ`5NJ)XXq,SB X+y+6hXKޔDҌa V܊7x})\gfzH ~mo, cӺނ[.)q|/+<e6fǍO9_ױ#CgZ?shiMOF? {0w[41+QH:Vh~Zs;Y5Xni;>uf~wg3'>:c*+cQ Ctgr&j}ˋ6쾜`5;yl%۷V(3?#ޭf~Uoӥu .emXJ|G6Fq? z$ cYϹұnD:⏿5vsu)oj0e~/.\WלQu m6חgCݞʛ>L~~0:bR{n6*=r?P߉y`|Q G`ƶҟgOG}PmnۗuoyJ 3yU!`^YXA!vH_=-پbI8j>]>t5?=Tp'z*a='+bRH!& )Ƥ9bR+cRc՘!&H07{iB*MtDXMlMIee!X6DFD8Rq$G318R8R8;+Mmjd͑VkL*g1,5cRc!&T{<ƺ.s"r"rU_lD2\},j cu ֹE|#f| [}cƷ_k|3Xo5# 0[PIu#&PIuGeL31)RI1#bR׌II1!bRIeL*ɘTĤ2IX̘>ƤBL*fL{>ƌI}Ĥ:=ڋԘT5iteeekv[%p2pսʔ3p/n\eՖ<[ JʭEZJ)֔RK+BK+BK+- V6bh|Xh|Xh|TJ([Zb,tJ]٭OnUl#!(cjvB|Ō|"I)Dc=$MN(,$59XjRJ45 2~!ӸQt6k٤b;[iY}=`ǁ9Rֻ~ĐcENθ4X"Vr,6hle"|c2`Y\68 e-" kY8z`Ah F*(mAWK|5t%AXW~ꁔxm& b3mA&ۘ6?zB z^>,\K/y|qMfq$VŁ"4cX봕74JtSH)z\Fw#g%&+R M N*EvSJ )%&,L9Pg5i? iݔBn :vM^ʳvb\(Us$9XT.K0PT@Xfxu (+aHz!{~˞C&T Q Zx`EAWJ(NxXQ9žá(-LD)~` JQ CQ .Rǀ (^E(P5EhsV7RJO8xYnqt MB7XnB7a݄K1p '`n?y<n@7.7x4 n7q&hvX}~DGO8xZvo )_6n&+HvhxF-t1s; yÆT _~:HݯqRj/eoOHc,_RT"yO,Ưnj'Mj XXD6~xϸ狿:ڏ'Ȥ.?}e9v^Z[Jdzp`hnŠ5.F"6?p} 1 )dba%1k{>+1c=u PzCn>PdTx$ F,_0)ʺ-m~ʔՅo&Z81m~a9rr^w8;+};˕ ;/O;=uޘv:aisl3W]Vx=[n?MíkVPNX#_HC bkNJ/=R|ua ƄEɲZ;w7+_i t7ŪA9]|"rX׭%*{j<"][;sWGo`{2x暸H) Gyc4m̓]Jgw+hwYC{lHr}xŹ7lU=#>6?@JBHDΖF_ ,Ag1RH3eK=&cNB XXMkY]VLJMo,]Xe.'dY{GZ߄PW K*sܠ!"KMDΖRC>jiX2%:^[; SjƄX&GhGxyb e?&'UMlҌslнb?~BL]dnP%y_*yb*5`&7Q̵Gv(p&FږfL-ZDmI*Ѷ?ƺmXmdKnkJrJV8ZTh dQYN$wi4F(S7J\+W S s|q6„~3}=X74ay,u0/v/<|}/dYVr|T,:FΙK6YJ,9&3m>yDC ŘqYOYs԰Sm&X;IJK?i!K_#HGmp^ߙp4ၥӎ>ԓ0`Vn65@ ,V%M؛.}4pu$00.pgޗ]djѷ_"'vN*zuk8]ذ kFAPyu9; #F_0X:!a, # ׋oּov6zϿ0ၵ [ٙnw?k^Em;mT }˔DXVd$}K]2y&wv"%a?.Ǿ@:IjO11t."5\ 6D.^A/ l[@u o,Uump_wnT9LxcXQ *-$"1`'"kkY8_G!kYn&ws QɶEe`-C*ء -zl"@[F9Qnl] a [8"="|vP^5m6x^H^BZM ׂ۵5' IZ])U1%ij*`LԲXqVO2XRWEjYQMٿXSS̩d^0OU@Z-9~ߗ\D5-(Y՜|Dƚ)8VVjZ'ʨ1N3Wz3VԲQ'hɁ^h=SʉZ$DV? LoZ}JP|#~~R(i*n""`: Xab) L&*o$oL1Ld~cQ \uGg0(рZ. ɋmx*ohSW$㋓-|c)Xu5%2olOrQ;0_Th7&Jz*$cJ&qjR-0wcuS)Wʿ 7tV74ɺ8 XbjP~I\(L Lҟψ$A%(q2@q7nÄc% mcg P{jr DTuzT]^6%P&KI$IP`r62(8C5Wq&xn] sv- :iàK6ID w%魸1$)acOޓ 4SL))Bx^B;YVm-TJ([ 5abey݅`5Z !gXWR_Z(l-ce۝UZdܷߵ*!U'U0#4V6aUc=^ø5 \}ҼNZ:|b@Nt+*Wtw!nwtBGG'i* U)fڜ :Yzm6 T ʡ,N7R%5JN/843wRvJ}=Ԯ; *}n@Jq;r\VP*pXknrn6hQ]]:uL*BǠ-t>/]~媢O:F*f,t`Җ {{httThXD g Q]~}3v +׷-9yo@XWW ׉9W.>\i|е뻅g"U<^󧍚*W`VŰ!6B1,1lJ!PNsYNxO$'RRR t.<]H!; p.>Q`Ghb`$RUo ;R̓wΨ;ե:Bf̈9f 75u~"k|a)4$\:"k(uj@d o5':Bd !D؝(YddY Ƶ4$<<=񉈬a#KBdM75fuj^5sBp1bf!bf^d5fH)"kOX#K55m0-9aD8 "1#SY=01]tq àql.(-6iE qJYVˍ1!*SJEFӃG]1JmH7:*+hʘ1,XobM2&ʒveǰS9@SYȧ" s!ƥ2b>k0u8/zbC蠪_bxfOƸQdh*tG)$N %$ٔHlJ!ܔnNJp,83 MR#Y ,v;qbsmu>dSJh0'ab)œB<)f (Loom X}J n|l:ݺkP\(&fzm8r@UB[.ǧ8w# V|]۰s[K1c6|pW8571״ $1>:xed隞W..Gx(?y{0w Щ B8*z=ԗ6MG@ЍUTZHM8\˾^H UW~|^^Fnă ނX:tJLM z~2RΧ}m8izGܷ^aǃst ޢ9wK/5xa` 6^mn#]b.m S,"Gy;]Oĩֶ&:0WtE+nn7ϽՅM ?礩<ؽJp]g=c:,:n8}45?`\x j 6yNbC>/ߗuWsA3,nﯓ0zy|lcnˉDPY^< @I^9T|3f%-J-B-VJZHFRY*~Q*R0Z1!IH-nS{P+B)ɜ-ԐڜjM)ŦBL+k_}X}7f _^}ŌeY* @UkkB qVHkRR5TӁ B6O# `³VAjZ̤1d:Ŵ:YLs'b,jիUzT{Z*/2VrraZ}/wnn Msu~sN7RV6:}'JYmPTv(Ta'i%@ F9iumd9:zu3 VVj]/ uWyݥ1- n'qWƯDsUmR_Y꫗DJ]J*c]Wb aoS#7z%)zR=pHgSj@ /PÛBգ:3:7ƥ˷&XË:IgҽͿ1;GeY =7TR*MCC~JecE /^|_>v}# }Sj3-ZQv[>OyzTXhMdu@,Blڗ`{Q3ի8˾vml &#k4?Ŵm믪Ј{**a!42ֱF%,6ׅx#*ai45ޅ>"jçObU G)@B+վ.Ĩ ш`^f բlN]'TtQ\bDk];n10bbkm161YIl{L񼔊:emQRe(UjT|{='Eȹv!cQoVqo-vb(AdA02])4vΐRcYRNs]""X(,6b,D l~#"X|"ӔRpFpA)>)BBB !,4RYD!0T887R B Ԯ7;aa/LD61c,DEB< c,°LsL}LBLs+2~ŕ5RGBHZ[ ^P0->Ba)P^$Ba 1UCa 0.E9Wury"uW\̗+MįtҵEʥG-W,+4W^XW)#2"2T#26VTbdJTߑ)-5"S>kd sRUs"nC'YĜph{iedwWdnղ_!R[ܔj&ޫiTvjyK:3)e`!%i+\뾏?_$5 ^1BZz+Wkk47&үИ0"\I*::W!#\Ie+JF$6{u!IH:ai"Wi"apJ.Z.n?7'!2*meԐQYA{uxw- BbV[Ϋ9Ck ۶j(,7?~noڟɅW\IE ^[֚e)ґ'?(68ݟ֙ؖV̗ WG6LeSİ)1-!'ut%:ݷ~lrrgik!gHuKnDfVLGjd*Z5hRZŒf߰ Lavc]1: $Md(Ѿ~O7g(&mƒk._ٳ,A hR'ޒ#ͯ~SϦTg"a"K%]g%y>Nk}2XZC,92IjLKNXR$s3&+CoVNLW.k᭫R|M!Q(_So򆭑a|u0k)$\<(_yPR[A/0n'wE X)߲7D1,Pb,P[ҺsgİFBI Kwj},FÖ:Qir̦8fp2X pI =6{,5^_Ylarc61  pn=Ylcx/,i/,-A4X\ sI+ V^>4jдz–?iͱDgRhZ``E U 0f0B BiV``2Ll~%G#xZPӚRi:aa+艾Dm8Gl7x%Ĉ* Z?\@t7)%X 7)%HDr 7&L7r@`+Ec P1iW_$b aH%*pK $8 5RW@ 4` u&D;#' KpJzrH_I>T*g.h@HƌISoD2"+l >G-c`='v19m$Tcj*<=aY#d30f`@c!t0>1k4c荗6?Z,0Ui %`m*F:(jbIw7Fa`*KJz#0ɍ ';4`h1*AYaqV)r4!yz8zDJۯ_d:A{u0tR "&L$2 <% d:LGBIŃLG2L2}a`BQt Xp$U]!CXmN^sB@ XJ2$ӑq cLG&;tE kN2$ӁԞd:ڡ$Egue} $+c@KPGL, &G9irIۘ49Z} iJ^tfGg#QAJ<(Qn'&!$H+IJ2GvtErG$)wt@ͺדr>FאBWPAK>(wAA;į0b2jTRQc]s!rImu: +dHH+)w2f,!1)wdI+ϒrG6ة+ AI;(tH%)q, 'brȓB ws(9ݐR"P wCJXRPR!8?1у;|99"RL BLGC)͡ʦBYB\BKl*UAjaNB kXǕ"DR xxCIl#&ʀ7US.:qE!&AL ߌbb4bb>cbD8ֈvq0£"6&2n4ӈSJ11Vc!&N)!t &ACLPLݎ:bbSJ11A!&3&eL #څE弝, hC͐B܌zv;y (@t 5?f""nAh+3E ׅƌA)9."l-#lR+ g@qUu D@J}b^-\'7-_:v}7i}4B4 i@+dQn$~x^+ z@[f̠%lBЬ Z1I"j4 x m?Pk)@E@mlYb豜F޿oa0h*xa'D,) $ns`'M6' t|%>MonjrIYCh49bW]0M;i v15 IzjKt&GJmd|iy(K<^(Y&oK,g6:%1Bls͡M)B U ՔRRțB*ʼ)QTmX{&;1zXS N?/#Sm5WjDukYhc Ѡc!q*)SVbHbF$Ne^Ɣ+41Әj1 5TL8Zĩf)&T([OwʧfӇf2Tx~C&}\M,A_t3e(lOD.ޫuf3(Ah.@>J%41+fd4g\UL*ZThQyEbQ{U>KCcef:EfQ1{Ty<&DO*Z)M7hRu&UXThR1C*1S:Z՘E 2x_ SG d^̔d^̴ÉXH/ʾXzR/M2&! CHU!$TT@S1 5j84#h8GpA 2p c81kpj)|T85] SNhp 8,dp   4L%sS8²@T@ x©o &ߖ`©$` #H(a@$WC|"@FA8-¼ ?8>8%Ѓskͪ9UqtЬ}F*4fU04XjVCfU:hVŌѬ njf5;D*hVѬCY5ԬЬ[6šѬÃfUUS٬J˪ͪ|fUXUs'yX o8hYیfU'Y5FͪwYN4ЬգYG,UqաYhVofUOfjԬ WhѮvUX*U)ܟhWvU>ܢ]vR7?DM)b,PJ*;Q!NTHKRɒ %UQh ݣ) B먶0[G!*ÜSMIcԙs6]P5cJ*TeZI*Mdu_{G5cB_SRR@#>T}cb] JMdOǪݪBeSVNUN"{ZSź@ueSjSNmtd+qĥUce+zSUMɳA֞6 1d% `A.f竬+;_*tZ~vjdT9Xi"ce+gVXY5c6n l+UU+8\u̢UUh>W}.Z]E+O\]W4;_uUJW=qUʞV͘$:*&իǬ'OVݪVfvI(RUʱRIk R)RBKŃzUJL43Z<0rBa*0ANHwX ZSD6T 'lthB=zJ)A8L=S~#h 9`jd NpP9#l%(pP)%81)aU`8Q*0pᔻ f = $m @ = sK*@KGn"ttm R#(AFOjPK V*+p'͌RP *37&6(h|eRFF QhF6MX߄o V&>ᛠz7 A3A]f `&("p02-C@%H%R=Wio;ʱ.K/]pHtE aܧ3{nW/'+q5О0}YT qԨnOu-}4n"$0Mqkŋ=7 0@sFo43=Ra4뻫BFV֋9{S:FCIgٶ{$ƤCvXOgD)וMD"2I dwӚ! $}UT&9#DJ}뱽־E4@]a D%7k_g1FiX :1GGx(AL:wn wo]U۰*F6?ԗWa}91!>iƵ?q}5öKVPW:CoZhq.P' &HIKb6u1R6[/'ub;]לywP!ܶ,>?^/h]sO0نm-PMz&[/9П>Nj_wP(ND8R|^K~y~P}&od_ozn["0饛L2K Ďsx9 .ߝُE'kO&= +ϞYFXMtE%)`~BsJhL4qI~)} %R.);`C We+pYi rM +}'0)LؔDeu'(LRdHL#j@a@4xF9RM[t5]tthծ$*Hqƫo VxHPwbGb&$8[/ʁW2 L-}^uE%.`y@.@,o-+qW$:r' K,% wHJtVӕ.[yh*V@mlvX=[`?x' .zk')oe& D[*L0@ ÔJ8aVg)`u'TQ>-y #%OJETV]rWlϩK8s #Y +0!k M0n£?o DXK|VU}𘞣5L l2 !<%)OM)O}@X* UU8?/Ur{E5V,]@"{JwRO)"}Jo~b&Fz B@dh""q^OϘfL(`ȱNNbC*вhρu!v-Z!~^D,>԰ `lq4\HXXdV,](+61V&PLҁGғʰTcE:A$*i59e פ'Hz3<>WBp\ր?{{h12[SeTfYgo*bɲj,as{ݾ|(hU^EƋ+Һ09Һ|EԴ.`A+i]L҇GZw|b'}~_ c] __,ߖ+}b/ALb_¿IJK`9@ K`k/gԞp~vV8a, p¶Ji kNXO8a|5p~V8a ',&i+Aj.)U I`P@}~*֞ò +P GY 7&Pp)~bOY_MԼRKM-,)vDs,uB{c.o˽Rj'I1〢{4Lum=Eq>@e4RfJ=ÇM?d)K=Rw~@d ulrquS&NQki]rr6N4R"gvvR,PI.bh'Ōh'vRM˚>o4-v4)t mKGP:CBeoo5U[SitkR)L͝RkShKW!{;ڄϽu7=s{ȷk:P{; ĮUR'.(|+O~a3}I(Dq*T@h~brw?& NJ]pVz߶ó P/>dtMSet:NyQ_8zqǛr;b_*<R/vK1V,1{.0W1cv>iu<{Z@cjTL %zr!7T{aǏtS{G)^Z9D),L{_u٘]-lxCskKzc=ƀpB.g"_N(H/'Zl_?)%VNJUU:SPD@]eNJ2'ge&(s H{ 478c]: u99pRj .q3^6A6(s^ ,_P82'%YPpI)QЫe xP"CXJ aKeprF^>?ʼu1n<s0sxЂ1c]+c9s{/k= dQsv=YV 0V*!HnIc$70@@_#޳A_} W5)^qRI_wehrW&D0,:Ư1uoˁFGHשm/3_"r-L8F0h71QC XP 7T֫[}GZ,F )F9OH! )`(9k0O=TJ*ii0ϔRMDR >sFPW!eE|J`0bBB - ͱrw"<=DH L9XB\BJ#BJ!eJ)ZEHӏ2gTHu!ĺRR*BJX4BʴHyj#ZX)q |*@`NX!!!4-:`NE \̵9:2b{=1#BO3|!*,**4/DD)وOaOOa8OXcce Q.Nr^5F'V( * TŠ}"@VAV_BcqN=w[#XґDCa,0X*X*oݐbc)؅ݔRRJXL$*njKnHsl uSJ.nJ)h"M5 ֥enu1Ōd!@-4p@65 RcVJ)I%}{_+RYᝯ󣃾ZޥUGzm^71aa4+|>P}m7F34pN:a +f+_6WG"nbWN.R Eź@uUH')rM=2X[k5cXgo'zkշJ5aɧ",iׯҁU:c}ͭDBlks 6ja kZ-*f_ޏ/65kզY|;XI6 4yl>W)>YZKr3=Rjz/cԘ+tYڻնٹfu g۬_` C>kv??PoumVZvcV!]-?~sR5GOc!]R ƄOv 1oݛ/DIR=(TPKХ(_mGU\OTIRn#C{xgXBRJK2c}l^mtMbWyMsmd4|~}X}> tڦRzGɲ"ï,0 2;wXȻj;5; )dgu3*Md,gmzSϥjBU:S:BY,~ԕRmvJ&/'' U;NWx(6>3/w3j[+Hp_tZϲLQlīϬ~[YH!'Ik=gzVۘY}c5k͓4'>?Jj3]Rd?jUceڕQ}Ѡn*3-[ 1# u;,'Yy, ֝YDgAR>BKoY$^ud1SLWiUFxc\]ٷG]!uX!*LJx f:ן_^ԼkYUu4r9鱏e#Bȥƚ2^N>RsIȤ99ҨUHQwYOM\㊾Մg !LaNWA! j \S " d!_^MsBv5-%gAd`exN-NWSMH55fȪ.M"哇Jh8g$hH(]h*ͼzn(R5$i^fd$HsL dL5Ub^)D~ ]iޕ`2٩dV"2iNkksEH׆6~z^;= DVXd]/䪜`V"UW8bxL撐W%EZUWs^ҋjr֡]15|y,q?y{)*Ӎ`$SOp~efWӟrAA2,U2Vu%+>c^{/~85SS}^ / |kTR+fD+u!wCCN̲b B?&|>s*ʱlTQ+B+R"P CC! ls]em:` Mu cVfXHVD*1aSJ}b]JR*R))im`UG*X.4jHbڷVTnḶoǧ&140ƊYsvjR<c^Xyznaѿ*Ě0WLhlF)U2QH}bL/;S}FZE=RoHB*zNuOH fѾ'eJ ٘W3 ?uU&lrXȏB٘oD~߈(f(=WK!+}j_&Dz'ңCGq2JKz1B Ms(X*)9|sYHnbBjS B-p.>ӒRiW| |sBŦ)"\ 0"\= (LZӜPu):_| TKFojB9#rRoK)upJn; \Sޓq͓ϔf v""PāEt"_fgVҧ?|;sF\~'ӎrxʗUӓB~RB0b`(@10li^}D.wERb&b`=,Hx%]AuAua8xn'kggPD C^W_)`gӗ*,"PX Ga1gXBb 0z S5%*B FScP2 %Ỗ&Aa1ԅqn09ѭT_,Hϯ7p u(o'i9Y(}JXt+r;2q11[VR/:MI_xM^#6kO'qw1Z{IEouoޫWXgZר(:8>h[%T]u'wJǸ3}S,qmUXm s~GDQt䟗2yoQ!Olq"N~+{ln4X]qơX?7~ȟz2N-MW/|C߹wk7{;~vI~/sz(WZ [𽽔k相Iܞ'&& >xS9׷o[} \mwoӡF3Bs5]v;81Zg²Egeh|Nm"{..j>_zO<ӡU/DPsJoi,IV|VI34`1z` Ƅ e]^ձg"URյñ;\[6/'kk)1}5aPͦ]!5ԥ+6LWOuT>> r@?>+4jJEՁMt_i5Q} Kq~|k_SL,=6W꿭14{z=1 qJG }E @AzrOV01FeyWSN^+*wfi! ţE'&I>,u$Ƭ!5kJ(.am)5E (ʞLԭ(4^tIT:rVe 4cI @c_{ ֑UHl2 `7n]}Y`NtCHnL[9w6ctI ^νCVx=OA8ap(ګv'1`G2;ysx%ɺLb @}C˻VawڷR\ WXVO<M71#͑EJ Xcz慁p6 7hm}g'd;<>Ż!p70D%!XWeI1l!ls<'B s"M[ZzfWNes$p88fS蚚DWGDD'i~QH8'Yq"b);`ͫ jѱڦ՘D4Q2 ͸ QhUɽcT@5Ac= 31:PSk+r*IIoJc=.>a^ַ4e*_VjƨW &`/kA.r )CVkin_6B}vz<2ĬRp?od}>ߘB}0Tڟj}l>i_SKָs}G;r]wgݲw-Ch`ހGݲu[QlwS+5VV$O|⺲"YdKTKV[nB[fݲZ,16ғMIx)5cT7{CeuwH*kZ )7UFyfwY,fI%ɾ jj-65ȓ6fOZ0OcEu`Z=9ĵH\[y݌3󹮸oS2o1ouV#cS߭ FB_P>!r5cB`N$ܮZBnW] pvnR#: ܮcK]mPp"ZߍY[~UL^L[3f| #~2{u3OɋWqNFbʸmW'vAʧ~xTwlBJY,qP3|'>>nׯ%ØBu! 6b,DX;r)RO1#i5Vf%1'"b)>ӑRiFĘ.־B~;у&ÄҘFBb̈'B,GoDH˾2$MdH}̐XG6SF C,: C^k^˚3ˌv:p uidxk*k^뎍Bu02$2ck\CbM !؇+]H!ؕV3ص{. v}QHyr\KPNѩ}Mͻ3׭ Z+#]aĝrHwjQ+JkV{XPNPkU"O]QI᩟53)ݙZ}tgr;SLj>3'*ݙ<I fь8QHbɉ-m1 1#Yhj>1YQBȥG#,+ h]# K뢆E ES"wL^:snv.r\ bI_;Ns=d$XзpW^ɉk.tI+9q#3b fNQh[4pA1 bJ3mfPZ`y&m1 &tp0F' 'ɚy-2f J# yNLyN@ObVGRYORx14:kP/-A%Lئ112kxs)U!//'T"`LFyE_Q, Bohc, K]ߟZl~eQ5*%5uw$Ee4Ōo}*^;am'ʚc 鈜Q+7L8nY6wHwz^6Lj$(}*h'tVy=yup+H3>n[>z2(YZa6ƞx\}XYRSeg.z> |{l!jAdPg6s,j,}q}߯TH!黿 e\;ѽESBrQlyXkmYk+|mPu~:ZLu )5j|>i$ 5c>Y._& Ÿ=JPkuá<,Q+<%{'!~H}Ot3TDpH*Iedgո`VF\`4$VRc%L5e ~9TIʪ'v(0ud)pZ[}AuJYt4:f5܊@I&&s X` ~WoNԥ~Wd 48PX`(FUJVmNpbB`(hYa}NUotU9=B趚2 fls8}&认@1#1X;(R"f l4 3Ws$k' uE<11yM+ [+F'PH 3*#MQV F)` 6ѕ1gltM!b:PtʱRt \9r(?R~J)?AOg{m=Q|"?H  )xGc~;Q`6YkG+ߧ1J-a( @S­4]S!L R; r,$}+AH匂*@5 >P)%p(h߈tV\͘R!R{-@S8Ӏ5=[L`ywbq>pT h @S8dTZ zou4Rº!!R!E'RpJTjBRǀRt!Rs* p!'6(r<` դH0„K*{aә}z.'!).'Zife@s F ҕ3)+w,u 7 Pl+ǽ"i%+SK\}Оb=A{ A{WiJe+UR\J!RH41Sw(Ɗ0ՃaR`0JVP2 OFqMWc';rl9JAFPXpjrx}Uqz*)6jr/)oFP֋c}h4Jlㄜ\|+hi\HXV$|;G'#O|"KNSH\*tm] "Rs4Nthe?T}OJO&QDic=y"ER^;>+*J45j4lZ Sz| ۊ\"/LU Uz`Qk=*{УU=*'Qy0JSK)WZ: '2SU6o[8S6̧\,=Ib99[1ؔjv;W-ʔ-ƺ_e{w?k1#k!z]Kۨ9X acOY?U[ݵʯگlŷJtJE{:.٥C Zz}Z:Xf1Ҭ4awNceO'VVct?m\>u;7օY>{b1U}p~b5~=<ɧRp#,'SWK]1]+_'9rkګ )^]+{u$2 /S׏u6 NgTX3ens" lVߵ>r&!tw?vf5V/]/az;OȒ>ckOǪĺ2s{-_My_6ۑ,:Z% YψdwfʪQ!+Ϋn=kOriouyΘ kC R6T[u%M:ф]̤&:ymRVx!VwY6jq3Y,7.ҾҚkaYDHoj笍t:*绬gkO>1zbekmز3VX +!UŐRRCI"P\U9BUv0X1EZC }F3"JX\=w/!%VPX}Vb =SJAeJ)L)!p>)w U¬XzBO=s,p8=szzb,X=E D8i1VF0UDP*"AZ4/8FK /ʌ ;E̘չPP2.*Z0-E0e=4bQB1j;fMr:/xsC^CӮ*ԧؖv(D-Ns84ݔodf_yќPy/.<̓Qa}&$Gx/ؕO]T@ JD O@8O5 KƋˆdaUɢ$ V.lx xW+!ٴ~4ZG"g+E 88[a^d] !|:(4B&c1܁XELư=d'&c틘a2GLƺ d ])Vd vg,HL1ad JcIcKBcXU;ӯ@FcÉcmŕwU/;y1%LƠ-vH+`2ɃI_:"V)2" )2\y@DHaD EVD tǰ@w ߨtǰAJw 1ޟBw tǰ@w ;VJd K"c- 2AhԇJGa7 2{Al%DpȀK4XXU;AUK+tЗrc<PbB(ŸX 6S2dfj_![1R"ce(%Dƍ\QsrqZ BN.N+q"rQN.l#dˆm ٶ md7R-^m/ȶQcrr!'WGTc 2wa% sTmcծ(W^mE% sb;nl)7[{a_9s62waU5sdOʍ{a Q7$|[kȷVJ+Y8{E[GZd<> ;-$8uȣG r+Vfn,9Y*Iqk WP*Iqp))AVGY R*xqClr-ӜFQ,|&BW FdY@!Eci F.iJЀl(ɶ8{Iŋ`1dp =06E!)׫佂JVKv,$Ȏ5"ٱ劅VVX.V"W,W"'vENVܧOa>"Ίܧ0wp}}'ESX>E.O+pB+piL1EQ\y'jpf̄sLKڊę L8̄ #3[3 Tę }3[)aM ]ZHXxp@a$8 u#K*nJZMJBܔ0"))"ڭ ̄mg&:35ܖDB]0+py'.Opy .O4\ }AWK1JF8FA11 Qr¼,,O~)>)Kܧp} S. '9YalS=߻OjNj+:"bQ)+bAn BXJK}bڨJgyw҈Jj#+F^7.FTV{ҮӶXkn+4Ң0&)<~*:%(ªA Vh%XuJstƧhVjOP~U*ŞpT/@˥j#ˠhTA%2YM5@E=kpJK4ieS^-l=A]S ʚj#j EMAjMSX%)i  }7eW˞zPO@fbړ呹< jS;NL1AIT"vQ=7MՎL_ L3 m5UUu0{R^q/K~[NayV:j#ͪ(Ԧμy5^uŵī^xFZ ) %n/3tJ]`v['ls%[JA XPUN. B/{PHs4 S҂xyzߤc kV4٢4XKڴ+X]ZtVr8 v:I9:beANjɪ%f#@ʙ)B&qqDc6K3|Zy @$`k iڊRu@JKOTRmRIa@H%SyW]mqEpTVN98+ ) RR)%ZA)Iԉߘ5a ]S[QU5@& IĄVb1F}bEVA(iS9VFHD!ٓ0- _QaPtv4>9 ҥQaZJN^4 =ݑ4L r,,BShuO&?ߟ5"ai\z Kpkœx%Cɍs-XվaɦmijJ3RQ +*g +KiW ;ѵۀʿ͟nu&?N-XO%VX)"[٭ P$Eȁ/XPVa #2Rҝt\d}V-E^J"x"E{X굂(0nXlkL Vk-\SPv2Ra!#C׭mqF*,߉5"NA d1IZ!4VMa^M/pWZ 2O̠fT\QV@D{FTIZ)V :p'+^, .ƪ& 0K|J}x0Z)ً̥Ya.mBS>֙$:XbSZ JT/EtȔGPϸ ٯL_ 7'W:( q$3R3MJ]x(q]= 8#fK'L!.Wvw:3% .%m΅"NBj ]׾R%V@԰~d2I $qqݓ' qx%R%ȋ $Z &a@ .u2jfqwq<<@N:aśGNJJ#*X.2/Fbgl"nFR'U/@)-ΠV'kIN7\j`rmvB*)PGU9Zgl\"zt_S,\&jJpJ=A)V:-*MS1^oi<" |UwJk)ZV*TsRUQVQLJ8Tiխ"u,"QpiBU(*Odd2%pQuaxM#V&_[7jPaq;鬁 ́-w,@&wU ڮzJTcU2z(u|YկJZ^أ8?Yq*r~NClHxL4M44&_z2K@QͽPEIr/{ 6hib`EI\4q4O(&Bhxl)5e -UWMt՝y $SEd.*뢌Gp3ߑ|QNߣA t{J2!THMsQ%4PŐ[*T9Q2"u@i䛊((R!;(J/{PntPQ *B={UĭI2@E'ɟr(w514Y 13)O"ߔ-4io!Hm8tE6N='9e4U[dQ'G(qa|j*(e0-)jQD 0-R 2N:xBҢa>ᢜ\vs;;m h1,Xh>uD* 9ު5j+ݩ"ZxWHa%4RF|rrYN)҈P,tEEVOVDڪ^,1/7uiE +rnqYF#Rqh>io]OeOts vI&~" F@s8(-> zҼfUjH@ }c֮ \#ZA$,MʱBX.u5pFr,(^[GK!dEi桓Q="1w*JEb`s( ̀,_ʆi4.(SJ7V[X*@+ y 7Uk}ֲ,3ɱϚah\kK )0-*ւ_(ZPI8K#jTm2}yԊKFiZP7nU14!7V+j*6+4/+%VXWTo +M]5jVZ/6FnZ`rA-Pʧn"UT*z@,\3w2ቼIKkoX3(7|bȓ(p}ne+{'QmFaQjj$'̟nZƷqpм\4T \ʁHP9P5Gh^-x^mˆ]ɴeBWP}E[F"- L |J_U;}VPZ9 VFv ah{: a >  *Đ K"dXr-ZL^ZXx9CEg+(( њ2\y7RmE,3L'CkP_& y r}iN RB c]3eUΔUG-|MUB4tch%"23tkF)4J."C_Qd޼iteh /f&nfhnfjn_hZZU-L9W!uii0}&AӰ ٜjphT30Ya4r- }Iky% S1 %f`Pc-2CFfi-C%q2$~G<[mۚ"24$Ԓn/OK׾!d4F`+BaSiETl6ծJ+Ħ:w¦ڊl '6Ql6/l6VMuB6 6El ੫ vES[ KS"} )xx  } S[}C'R੟(' + 5Ox OPJ\W ^[ԗdl sIHWfBWqJfR_qz8%\҆͒rI䒲@\RӔKy%%Q\R\R58-ʐpJk њpʶbN% 'TV:pDM%僑SIZq/sK7'r ǽLehþ2(%ylE& Z¾!>хqTtYBMVwONeS*'J@"mi1{_F,0X0bˆ+0WZa𯶢_m 7d{{ ARTʤ1~-}Qac槮(V[Q-l5VS-.j. X+W?-H 8J }QE  Z bla!bla Vu:CʳV%*c 1p!NDς>Y='Vg*ӸX]. ]Q,kѳ= ѳ#Rrzrc[TCQK҈(^uQK.(%ׅx=jQL/ʍ)j+ECIbz`CL/^cz`kZAKbGEkn"ܵN- hT ED7]LO얋n6d}jD2WH!hH~"B@0q}z2 DH]a*JG(b\rQSP1( i`$61'xCaJ@+(@;ʆ[u ԡ$,wh4Zix3w5ZAKÍI&$&%u+(e\o5U<+$'˥%uh4_%b94/$,\^K$,-ri8$,ov^IPkClR/^iMg/">X"L_ԬWYHwڟ,p8,EkN ryiUs18bejqŽ!L?3=m)D򅑣zIKVe%E1B0)(CʑVX\ sP/K'L JT7L-kL/Jˑdj._9^΅!RzI/kMXh5F+]/۰R/&,} K5|vh/L r˱l_5 ,MK#th ,UwHlfzrh1[Z9Ԙ,--C0"8m@1Tr'͠JRiT[ kg{ueJm'7Vla8<飌 :{_9 8q;mep:΄,]xAx D`mP[ G{$xTnimq¬tS& OFߚ4o˂p*7aWN+)dm[a^QhI֝Y -!"WG`W<&w 6FFDa,87"_*|k#/΍rX㰮딀oXA/ll最X6!,  ²+D BkHca뀇bm4{O[o@UsRbY& )mJ0-8%2`j"\q X{_թ@ z2řSHu4O,:=8" FAJ܀[UYH 5'DHyUtD:oF_,W%I(B+&_iD(r-Fk(대X/jODLSF`:BLwc- 4rD0@ksnW\%Q_M"u$}}oSSݪX'E-(dg` Iۢ$tf-|P$ ªs*JSq3ʋzk[eB/#,nrB=)֥V͛4ͭ94&Kā62O;ƫ,i =-KG'~wYoFLεf:_t-5%3_2a rcžL G1hx%bYCT7ePF?%&IJbxaޮ N?-`&<;c ?yx1;H}Z^i{Y=g^R$J7УVy&V]VIB+ld]:J LF!Z´KjKviwiO9tY7FT]i<Хã^S(*) v~QZzu쁵UY{(Nf|K]%>^::˻xT>޶ܕ*i/vJKZ6vک$B_Z;QJk'ک4dr) SaP;ZU}fٴ\SMTXєOaPjuXzȺ>:{6=|R+M5Nlߞ㓕dNhi4և>,\Fˉ秋aIT+\楕_K9i>,^.uCO>)AWw:D4MZRMeצʓ<\ 9:Nk$Jpך&-u5ii,֑iiYA# }AY#,k\GE"sҼrYҺZmZA-VhX/bJuqO- j+ڰc)_x95"Ule-*hVTleE+b$[ZGbKK[+iNob|2r1VƜ>Fq."co4ƛ}:QCHU\>~OdPQi5\@[p4*ĖKD"1%B+J*6٘VžJ'v%R[=㭾">ѢkZ)BoId]쉭{ lPQWrp \FK?{$c{Q5NPBo8{Ao0/uz>[zõl52dF=Jώ~&b-poixԗA<5J愥>x T!2JP*]:Vv{r& ąPp 8l蚄qĄP £ζyՉwjˊiWsl٬nqЕBJT#)/rb]QφmUf]-aܪ+E8y+[k.`Faq63^m Q!A+u} 5 ڊr[V ZA,|#dz#Ҁʍ]A-}6B-oUξ%Z LVkD9B.XXbF%i4\hyt@MYt]ZUeToԄ]2vy3 2{\+j+J]XUȷVp #B-0 I0/Ⱥ4^X28ɼ%5|gn'KoI5; ʩ|cXZS!Ac&,H#4'L2)YRnOɰ̜J4+͌2kLk-MK3hɒZQsh*I$ijil<چ`DH+fBWg?lGKWfr_9;>3)'j~.R_|K--6@!fW5KLfĦ %hJڊȐfB+LV_ ]A,"%]/``uUmJ:+dk*L8`{4fO,K,r0Q12h+u\L Ș@I)0"Q-i_JF&CHSR#@#1`7¾H[*A!8#8@q#۾COdpYN!mHRo ԕ9b!dr_B-z`7 h@0we$O:IbCX1הy*"j'-=p7J'myx@Zw.m)i+HeE荠NR#2r2(Z8Ih IwR3i^9DV , ^%._y E `U&A,yQ$z 0%5)t-j *O :.؇94inXmE 6Bx*]!<xJ_ݛhl![980ۯV|sGG=lOO-8e]F L5㛽5Ϥƙat+Q/'D;@r~|EEy +\M^j]?9k~g Kt1r4Ғ.,ݍ?i 59vwt7d&.t073W47Xu"nŬ27R$6@4qTqhTG£1_lOoyI饡MsS4/S$e!8t-qC)Rb6mů}&oSuK4MP^A50O|VKqxSdyUϮ:3wWenU7jl'SƋ9gb[!&QsS[X6ֺR'C&U-#N8Aۡ}rR~C2!˸sI޸CVMKj! ʔ)r~43v'[8Θ?k u47M;o ޶2S_~t*غ"gb],և3KS 'ty!Dota&'ZQeh5[Vcij,JPWZguV?3FJK6]vВ-0/ŕג-JKtѺ.0y{rlsa߉ BB-BG˺𚦲.ԕl` \F-J8 X˕J輀7[ ЪJXm^ -kFJߧU]TuUz-^ @WZW4k^ ma&n|`.BY˵rr-ԕk!&x /C-B_ _ ua]Ј\ \-BwO&८ jSOԺ{,t35]Hj5y*@_PvZ+<.$Zr-t.\ ̚BkjMZx‹k9u;W~ \ӅT1D9u\Vsj$W~:m.%aQn݆*2$8T!yކ+&>\F꼜BJ+rR+ˆUÏ=*T%>F0p򀫻aE a.`[r : ڤ(fEShTκlP9?Y/@Wf-^!'OF-a # +Ny2OK 'OKU[{Կ8qrS q׶5#b] 6"FebG:)mnjaZduX%$y!ܸ^1aMVBV/2}#Az8BٗE Ϗb9":}sck#ޛM}鲐_7N}n# hhѿtH/]Pe4E$cXJO`e$)]Ua FTT-0ԔI#~Gx(%N% : %-qmG$[ o @l-k(2*TP2`BU ՜:$IUdԨCTs miirVelrwzph+ -#M`<`մT/Z- ףkLpX-'%=ڈX{`ZP_>QinaZ@bz.'ETA0"pDʵZ`R:K$}؊͵"#jF03aw-5PхD~r[4/1 }N/Fzl)K e%Uזms[%ZE컝Nvz\[h;Kˠ\JpI v:#T{9 u.}p.Y-pO1]Fw}N2ٞS{!NE[1-#S`[07;3V4ū1m|U` 7pXT ۰K홭my" Ѿ{c$ {B$M>Vz$Q)vlWǙ^54, 2w݇ijݷu*T4w|vZ R²Ϯ1l/ֽ9,I>eq,'ESǹMdVkm}7Ya(R;OF.Ѻ +'uv߅}% S+TR]u0e*O?!$'ka1ըϠ|9 n?KT[/fw ,6_m>k4YSiqG}AMb굨 +1aғp,^fT7y, }I/ovOނ=%?!9w]NjJ #i^0;8 t覛=i]6ˁcE.oRWS#?|NOw)ٽ``ei~־檲8rz 1:sv:wF^;6݃1.| ƝnNbˣήs gE֋m+G}ZNu(hH= x9 Bc=WpE1ud{tp#R瓹x;\j`mWIux|[p[SK7dN/,^])ͮ8:lΛ[C5o!ūUy8کtl1yRJy`wSh>aB7$ܬՍ ^&sxP0˸~@ﳴʛ= #+iݺ<[=ނ:Jkʜ`#cmogs|j_ǎ6QitiP27+#\5::}`8hipv k‹ ȕ>381:eՂ!c7`@jrϾ>SLs ml1̶_/Wm?BKOp?9AƝ/zf0fr>N:}r7|^=0M7x?$- "m]AKZ:s'fݍa,]7D,}5,bgy+iaשc:j;d7gy=(ͻ^]P{a ޝ 'x~ |b0ܧ-8ZO8Dp^yL+t`kUjc2=#Y'xO;ϓ7tV)(Ex´F=_as )fCVd;鳃il[+0y_ULa@ -B._.Ч~n":x:( [6Hb.Xn>a+瀇:~MH^5*!|szݎ(}ۊQW .f_xfNYA9\Lx;9;L6՞# l,!٣wn9H(xYɭ2c!("5(7{LF ܧQ1-kkӾ_zu਍Gp1ONU#8˸@&Dim}ƣs.~lV/7Ho_|Wyh/#f>µVEԙf@Nn[g\/WߗTn$o w5|1=umgNgxogq\:} l+@dJdaљW+1!6-0Y3X-#G/u wB].>>4o<T{|^f;޲E Pg6A`ղXv0+<2 X  ;N.w` t.Wm$n=x]s < ^"F{γ눑(=]svgM1{^=s߹3z \btYZHx[;Lw&Y ?ǡ.` {_ٟ+脬0wNпzŽ.7g^׾ /I͝gaso#TwͶŌ-fr~e`ݕѡ'\X_:q&]t7#P8q#Dտ;س$-:Qzan?6#ǯWU'6㉶SM0 kpQ8,Ng! d&g>y }Ȍ3u"9Hqxr3R/w3$x`&N83ڢ_OI~T"nw5i}XqB/])/faH`Z,gܣˡktffO1җzL L WA\g}Yl(3`fqը)7+`W#{'7^t"zɍ|w!` ŝ8#dJM{SztD}JՂ _IY~ơ?Ia?C/^9i) ,(eTۉ(rzE;O+أurzBsOWD6#h;bFwoRq!p!ܺxMR("؆kqS&3-6+Bvp >qJ@őyP,4pLUxח ]j˴F&8a>F[ y¸Ԙ;Kg2jh%kϣ`hH 3|&r:3,tuX= ?$٫ڜjy2F4D.TU7{р..帓YjVܙ i(כZfN.xΗfE ؤA=߁2  V£ihӇ8+,7WLE0|_]D Kܙ֧Rm̶̃ibÑ^L#^R7z "/;p9߇,<0 #sPyy'2akH/R G ֚ E+ ;aJvFXu5QwJ<Ǘތ`Oߜ#?Tb_') ̛Z+G:6z41Oz3=E;_@o'؃!iGPܷ'o~C6r[ɟS@6i;fA"9ږzɺ lU;!#lo-c|KDkZnkX1@!`pR!FYAA>vz]n[Xj}===oH<>Z/>L_‹C2n9A}v@6^Xc`5 !)o8Be;o|wtv0b!JNQ2 ~=4)[@7ځ<ɺ֨AO+(&grKWXᑏr7o ._'tsiZYI[ܜ{4CNM+."i~tfߛ" xKyNp'E'z-j!mփvlWb䉪AbpN9T g{IlGQxGzL Kg'AWP'_dś: 3c+ IR>.:.vG}q_EQ'ӑKϑ/gŚ/83kh}Fij}+TZgNW[+Ԡ>/NQm g]lق;],'^&2[sםu-,@v  f+mo/ItALgOY|?j-wAe[q_(y% +~EVֹw!L=.i/Qk"gis>^0mD oۈvV์յ|Epٽ0x De\owֆ6}F4 . E ؉FT3ㅿGs3YN(3͹eZ&P_z-йH8}&euDehxO gw$9'|ыGPyrRǃ%HgRY GbJ8$ړhm.iD3jKjՃY䪪mX.-b 8[v0-q,y3z;:7/ƀ"?;i'r?Vyѕ[rv#'@{W${C' ^.V #B ]3m>gP9cfX g4ł€" ^\*z-K\Y{5> [đ]u}| ޮ}B㥢5F_l}NsMf=ZQXF;+COX' ӗN89Brב獟1y L{ҀyQ R@Q{0Қx%D aڛeJ-`c1wchBz k¨01\[7ndR`LV}߫n2D\g \" t}&߯a@tm]F?3+ӬATr>EEɁ gy"Ğ_8lݫJA[b_"^qvFrT{qJ/km[u j(ߢDWnnjKo,px`pw#2ORຫ:걆a jxy׸;:xkxk_+):3G'Å!AӥPZ_?Tݩ^ڗk S`;7~9f?3k<Dj]Ns -qCga=V>؝xǏ \oh6 M LԚmFOT'HVt$PXE ^H:i:&χ4R;#ʃ#:sG?IZ;Λ<)btdr: } TQ}6ZDzx;e|q]CRWr.8)d%Ok|S#']g_1ó_f-FJ3c,{"OZc 8h%9`]`]]n ${̰r[0]"7wԹ߇1$ yƃi(Hj8;)§ 3Taw*1BV5l vrI4:ߏi&L6?YOQޅO/<=t.#.n3j b= aLζ -n? y{a(~u1^8x7Wag,fZ2ܥi"=<Vv+&j0]:p8[4j|#%y 6{RG`GsPM%vמByw1B_. HLW1s5ǎb{\Uo. E$r,g; Ky߅`lDXw$WX4:PVDp68{r 2.ah(-a:l/?FEiw/x][WYNVXQ8,ԧUmV)o!;,*,[|(s:~̚hv}s80< T H ;Bx+\Ov [ְ9q"MM۹-ykrt>AE@kbz芼}&NĆTgmg\t9}qt,+@l6]rrKRJERz5^)Yeu Ў2N[/2+0{̢PD\ɷ&}bZb+>lw#!9y*>/IOlN'2mՇ eN%΄پderT^H/).EnWGpg8%fDs@zD \\y;, wԲ}lwfx{} 5=L(qS+{~9oaFQ;ˁh1̮ȋ$?Ox^ T߉p:9/r3Jt̾,oBz$o<.*"< }q}' ,Ut+$;r#  u7v$P$u 1;CY>kqxuw{}^! iFxaDx5SݙraAV3Qvz =PVpOO;~2XE6*|gz r#Q)WחAe/kV*DN取5jU;!(zt/y xʃ?y]7\-Թ W _ƪ!ljN{W,KPx$Z-aRn=jgz?|O bZ7.%P?>_cmq+ ^~ݛ~￳IqI~lR%xgcEKwb̮?7ډrΡK֡xbkb'?~c)y9ô,"7|tV8:Yy׆Paz+czo_EX_[G}[ ;lJFzIcyO>.jlh˶Ǹ.o ?o`~ݯj5;e)SԸU%w=UO|/+[=/>"}Ė˥_֎۹߰_EO_~<'0~}´}7/?~t`o8Ggûcj|_~~u{ݥNZϪ?Bh a.O?vN'[,DjCG2UY݁>NIKӥ+xwyo'Ϙ k˗.Cϯ>n.[kfI{/uᆱz||r5)CfdyoCֆ.N9w?Ś qBG+?'tpM6R7!~KNFP6Vv<BZoԛ}>S}Xo5lQrvު1iO2 Blĸ8JE/Gw?]\?Zw__=ޣ_W hm`n_1bg?6.CWAz{uuOO|~|o-z]L,վׯߙHM毮?M|̖1WD}/s}sb러֗:ԽT'ns}#tܽ*~~Wt:8ٗw_&4ǿ/gf4u(Ǵ%tqږokkg*y~|i5h]wSK\0~aNMOhNA*0DmX}]}5ڜi>OU2zqTXU]fMhw9I6_Qch~Uh_Vը*w&lM>~wMyZU\+CѺK{'s<[M܌4c~zw: ̙nwXn{_KnJ_|/t=̊dpYjRi\^OTgdFNx3(jCc0ݷOm3dUލdwVG_~%{k`imyb#YN _z?ʟ}_>>}YTs >;P@?"3~p(ƪ*} Խbi];Y0|_=?'٫Iϣ&-5N͏]/6:}@?/{_~l{ağ)''ޏ4yPAH,.u-1 hśxendstream endobj 260 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 711 >> stream x%mHSqKӵ, }Ѱ Y9@P*VNwLv:_dzp%pLH$"hQ$}T8<?8< D|eҎK>V~ƯE9C!@"|OƦ(D L&f)X:$VT) $}XJ&:W \Rư6:e+S9ShV٢?@seg.&52ݩ6+YBuń&)B`1$Dh'^v/qﺀ/+2Xk]3j|A-AfU<5 @FenrgB.{l`zq>Òm8aCd|- Oz]\Q`٧ +j)= ) LYqoXiwNR;5_8Ǎc֣gzqͷkd8\' z9&v=oG~Vk]ivA ̈m;'x4\'yVUWZL[hw;z4Z4'naK|d +w媁j'j \zj~Sn^=Mn(PH [ S\}>d%ُi6[endstream endobj 261 0 obj << /Filter /FlateDecode /Length 4914 >> stream x\[ۺ~_rBmyl>xs|c/Nh3X7S__7H D;e< E6Ѝ},DV_kEYɎ7[xoD2/gM}q94 юz|!m.y †̃۫(Ȅdi]^ɥ(K?]~gniK_!|S5ղ[h#֓o/^A\4bT'ժpޑy8yώAE*`B?ʹ”r.{ 38QX9._TDR|,hJ oC|7JaKpҘ*W830Q~8uWn^ypd]Vg1,-dő6x}?V Wbֆ*BUŲy0%a?(UB:w5M ,zeG>,jqK hv.<}|5;dv]jq[w _;ءE0,wI?,gqy|oum pwSt—ta[Vڻ I'CȢ\ MV#Cd7-x\vǿ9]:  nM!eYfJ HMGPтy\<ƍ ؍)yR_7U_o\zjj$ j[Az[ ka1Xig<=27ɝP841(j/_v4k6ٞHt@c4~>\JWśo{oէQ}XIFݽ-5E_jEkvNw4sQΨ,JxWAIj5Z/fb td9wIEvLDLSzFC|q|k/[Eϗh^^ZtzUOw?` 1#эwDgОFDQInqco%&A` M5t"Ξ)&f]WL5R}h&`&a "mQߺ[A"g:BixOM{++ͻPCjbaךnNta;ضh7:Mv0 ^WP|ݏ5+1)o WQ)eW8( CZ!$f~A(ސ{Q(*a8)Ȕx6*6bn b7m~뚸tpHdUq$Punz&O"}ax0#ٗQm|0gzΞϷ;NiٯH1Uw /WSM'B-hqA%NcG}ԍUM񁳡'̷̿qv[ U&nَb#X[7cm몸UȮַxcX}{"RD]/$mIZ`}$ >Jfd:1y"m!h9=I4zH.Mt&JoܕwD#; ?Kt޹4yxkX$$V&FD)Dc{:ΧOڐlS:LCKR'ƀN:S%lE${OьF(eNi؃:W`4i1rTcLMkh %0kLi1 z]4i  g^!*gRg ^72RiF`Wz"e'`x@IQ$\ QiC˃Dx:A4e-FIA/eKTb9aq 4I1i_:n sJҌN!T+S &\c p'F`* ʬsޓDcKCU!-Kv$^R,s}?2Ϝ XѺ({h t6QR48#Ų^sDc+k) 'n>8Z;I 'S'ƀׂM(NO{5</ҩDckD":#O4 "ZD#/C[ǩ 1Mc2:DcS]xjt:JOo4ޥ2M4 4Ŧƀ) Gei2 MTƔ&b0)|h z<Cd2NB ɒAhzC D={h zCpMuj/. "ƠwVRRH]OAE;X&iƀ=$MR{A'ƀV3h7#Dc&dD#{IOBaX;HҌJީ&5I1iJU)؛<{hvCBxTI1i%>f5M4 6‰HṠLj۶~r5Nz87/h0#f8UjVVMOFnB3l˫yHI^NqP>&^ Ӷ"WvਆޡioEWףtIۓLXp9o`9CLo&7چ%mӽ^kI"j$Qadž#ѝѿ,:OY<:]onUO?LMS/VҩӶ q|?X~˿jWbGSlOT q!fw}X[Ds#};=*$QnGӣ:/jl2tQ98C[ķGRӴ ~'\^P}E(;RB*!9"ayB5¥Fm˯ZJ,U}Ir~hbAHr$+ꊇbv,Q}Ҷ8XaPJ:(K!u6ͬ-6+F?ƺ@ܛnZbITp H|_mTl@!4*;C ^q*$StXj3r2cpJISTxzc1*$̯oq>cq?h|tB1^=6f-b6*:PPЖ*󱊒nȇ빨W9kO[ijF:92Ig4G,Q|:S)@ _O P=ՃRD~vŊgl{ڨڏ\"T͢~ܛ~%;>@ŷ|wG xvTnˇ p ŖcTF ښ̓zpmvNVj?B@G?3zI?JN0X-y~7`zsջ㠸`}ߞc9dG[sNI]i1i5?pNgizS}Fo'R88~0pxCA&3ړB<RPʞxQ'魗%?t p,_FPŢ/? j/qJ^]?x2endstream endobj 262 0 obj << /Filter /FlateDecode /Length 3342 >> stream xYMsS.<SK/ 6^R6R)($H?o"?/svxJ$Ev&\?u/9֘Zb6˔_(Uh_zYu~#Ʀ2t3z ݈lfΊˇE˲bLe15C嘭Hž@Uͬx[ěl0Xnz͢xYݖ '-Cx}a c jdL3'v4aD%LeUbvU.%RAkxM)I  gĿک* 93^=Dqo"ϤUI^JL֞+YalIZ2H"$󯮮R"XeƺX̪pE<4N6Wi[:guJzx8Cn|~۶{xp}ru۬5Cydn拶UnNXX?8+Ď?2|S SN0'EԆv 7e?u#G6la)Ahdjm lSgC(ޛ k2hlaBTnth5ൿٯwE宊-z}t;Z)rS>e,>r;܃vj{)2Ou_-׫m-UÈ˪욺Y}'^Lg r`Z,uM}(R΄ /*V(@U|lr~H P|"HڈW/_tFAUc;RDn3L,8T}`ŋr6,qP23WzM9Neq9)dl;1UtjbYæ_IK ;&\_VMd,吝|:Gl1 M(LЄ}p%64p/vh0q{zACqwKblfSyn56/DBDؗLG!U*^5H \ɜÅ+G n#ڈk (O{]~P~!<7cj;NJ(֮ct)ϊ/7` Z.tWAPA {~qHܓa/Ī<;_3d0˚%R O*Y Oaȷ5>0dg*\0G=+ fq4K(B%Yb9LIۭ^c(0 ?i ķx/ oV^1P%"q<0̡S6\'J9t6nFU0u#_\O+ U=|-h]]S55vLOs`eّ7k0 #?4ˊ!›Riq7G+D#$ ّ:IwwzQ|n"li.QyUgᮞ kͩԅ "q6gȄ&7H\eOB.d1*[4_棟hqpk邙{RNbmPT7ӂci8=O˟3 |1s/~m:(󆝆I$P^c$MH@Hb-!&3AT )?Yendstream endobj 263 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 9800 >> stream xz tWCVttv/I} ``ےewRi_,YlK^16`@ @ K'y[yfΜ93G#忕#p('',̙/Odoޛ[ϻjkѤW%-7-n˅&d}0܅LBݔ3eܚEiԥ5%S)*wD/ͩjMͫ/_XذHzcjxiSɲW([sUSO?/ƖDDE/V= !Z-zPFZCuEE6^m=&$zM <'E EDO^=#zVDhh$]"WESD2=|lˢ;DpDbW9M˽4aքO'~FsEqkmݾwLzq7wK7y$_M6."kϛ=uߥtwW3f)}趇=,}x]'Ew{LJnl]\KM eN*ٻ&j=PݮdH#pYuDe8zİ{(Rζ`knָ5sLbF4Kf,;uVUKW7 6\MHA]!C50D{x8h>d/x_}+E]L3#u/UsAz d >G Ir~)\`WڔBev65c nO[ZrOdHy⪗isjɭe>9oc;-fe*n)9{"+/v*sNnYdKR6)۔W.wcq$]K/nUg ?~JR "yO~(m5  ]טQo8p\~Jك'bw  =t6u; ~’vBj:5׃i%LfA~0e4To A58▘-kks~|V Gs{_6 o#w:XgҒ!)q$kAf~c;˪u;ܾuLW0ZIb-觡5Fd?r}i`C N7_ E~ێrN샳$¬wiu &ITQ.]w\ElwЛ z *?&ASvmrYtR8;z5-'<ӱ"L&6t{=!_&tRO׾ M\#@QNX{eJeok{(2_E2)?M;o?DT@Dt/{+EsOyZjoZRTU|]:":=@rı[`g4"ezs#_^]*?5s|>z݋Kݳv>VΊ7za5@(BͤQiD.{Ns߳jԴ)R"fC1T%"(ټ6mZz!xw_rLήEwhSDۗtG;G>m,lU[ =ht|ѧ.PATȸcjn'aЕQw=P u:Jj 8X]]w;a#\"dŰ_KH-f֜o95d&ڒ.8v]f~5Vf,V n%`5p|IS(k:Iֱ ?g} ,n{8zfE3e\7nƄpx *QKV8ޑ_4⇓hi:LhW=GN)8Z29ZZ䉷 P.I]XuO-llq e:4`}R5 ϑ/+}#:ĜX87{?֎G;R)%1eaYHdrԛHx؏;aB!HjD>MVCca-*-^\`Ck}Cp:No}I+C]GSGy{3jR^N70skKfݎQSS ζB0n5k 9hC~\a/9-F_)C^d6:t6JYPnڼIp.DX%  2w%v[ImP_f-QU. ~\PČb6Q?Bw̄}Cn2KS%a49tv6rqvc ^;1 5m lvRV$˚z?x//{ @ڡ&#n#4񥸚;ԖFz3Z洰F=`:1e% M Y^<梤O ~n MP U\5BSM@ FNE;vO\" jη[^ X6?sq=lnbմP'Bl0Wr^݇ʨ|1Ŀg=n/:2hZF{;O!+c1 Mj\aI1Zy; "p;gSoӡX'WQ;^Ү'Ni;wSzXLzNZJ qqp۞aA[ӈuvCAe\gSmt%''L`fF&Ų0rH34Nm_ﰞn jYFyq0䥈p۽V[AԳ w=Zd(z t+ F4L}Dkrx\w|ۻ?L@9Ro7P6sDSnPѡSI'9i+ ЛhIt@6=DYf (c<hw&ӝj7iJv9]Wǹ4Yhx6^PCC-aR\il؀!֐N1\66ﲆg4t~4EP1d_=Љo=ynBG񂣪 ϧm?7'IRu 9CAS4D>9lR 灴]h̍$F WBaJ, gy^^U5C{>8LZѪݫmtGJ.{xIt;5JquRXA>s]?"6T>֟BC}!f49(CvA1Zs%g%5 "QAnR&b#11^=jvlJm,O#*,ɧ/?ȿ|?jFGZ\m[eǠc#sbǩ/L.O V씬6Uf*2k lj5ҒB>Gm~"-p̈́$qʹ[ mH`דDkexa{C'-ORy^>땧_[f6FET<)8+X`Ω pB!X6M08fbŞ` ̏xGn tCPOTaC'"F^%` == {]y|99ٻ ZK0?mA??k ENJ22$auZ JeM%024e3//bvʬ24Q!4fiئ9yMY_>dEKo6/e--Ƣ ;'.N`aHzA(_n.$Xx('Pi4@o3O?1ȼNgp'ڸ2,v2Xs aF?zcg `LfXIx/_ȬՖ*/nOZ mH\&nFw1tIfcvD|>x8n-cUrD\CS$? $uy=ri󸱟'l5XT%\Gigl$?8?_.m?y|&yݥ~lJr'$R\]f8-N7T2Hs4;@բ 둘4 7f o}rGrL=]e'4 Z@iُ`؍s^:,F^aQ+}^Xg,%H4S YtML A:)mI`ߘ5+Եu)b?|zR6w[ #|Iw5r.:1n Y&PM!MGwsdk[:~ zʮ)\9Bgzs7Uk`oˮu:.͐gjsax) DV vÅ;*&,6h&Qh=-r|lc;1$\3w'o>!N٫^** t[7p/Vt6_h_E?*^\A]m9( dqw$0T Xu+*I顃{"Fq!/2 kzF+#)[~ۡ=m]{Sq'/3mTUkܢ?:0 ӱFxuIoPf&u|\.g' ;%s8ED,furV\F)S v|$hFCI󓯁Ck8Oҷҙu U~CKV/\A*t,[קcYd(Zҗ8I;G{C]t\%H՝xq;kCHL;{ё4:;9|w)m }',~uP.|G$0]} z̗-ʼnm&Ӵ_*s0os]:Il=>wEwT0ך}`z`޾݃G=c@'Ƥ69u4?AΨy LSIH ~]G!)ܝ z##|-Q E8>ݝm:]"vaW|Ѓ)w(F\}-<8Zbʹ -&.hwذk(>PcB+OxzoN y"-'By+801Q1Xx}r?.Ƭg4雙LVmR8Ts"6sQĘ1f9#Cl hPXxFdǵ~Bׂtש泹 JOP'TNPdDM1̴N_$C?|&<ɰ/XVU+6on\p?}飨BrASaB=,\>9;ȂU]|;Aq~aIs L6n:y0;:o'mK%&Ls=8]mϻ)KW'gR6\)oHX6P˭#WнWP>~? &,i$jƅ1"Sul[8@YMoKhp.ظ'_rf`#@2-L\6#ڊT&΃=v?>JJPBUB5Wl5(-9DԢ`AVe\K|A+:<% YԆgѾOǟ(b\փ nRKHv֙oi3`\6 !/\o!̯mx~``u'"ߣf^ `\*ʑV:j';1{=]Kv[9uWNnF3W tFF@yMB>߆Gcp\^G3Ջ^޲^Q~AnP`_p_wv} g퉔}w}{WW'ausvTj.1/t}?!H.亀!FsL8pǻW9h1_'#k`!VFoP0߈ók @ AaҨ?=G^"CO},=H7<0oi p0e+CĀm,n3k!7o~(A' +\߲'oTll(,ܼdf1t $!c4}ost*;HauwvW7p#&ѼWV*밼u/kK h$3Or)' [+Xp #r?}~$,9?'Q(74{My n( $ZG9|:|aуEt8p0'UdRVU `._;+K#H+G.MCwJ9-3홎cLכ1`tɇAh:v6jӇS .…#^O8䄬مSk+Lr(1R/ [`ܕ.:?$s]؂?u"Tf ǁԖ2Ns9m*oS*Tn`[ٕ3Ǡ=`3dD,e_t;܃s_S4vvZ`YyF}2~.tMG?_)Ey;Mjo9)jqG^4Yr4>?4?m-?צ4l}{Gpte|z8Ņ 5: Ud&(8xwj+m XR F3ոqjUU$#6]KD7[3$[H1ۼc6{H*d_l<]NH)ɧVțjt]i- #b|NV:N.^{_cԹ@b O=i5lĞutpw&GIjyB5c"yC5:,vz=Km?G8`GH^KYoP\rHN#U)C׵ӻvu P:Q#8g9||Rx"__x^!dp9&^2101 Q&!66czNr)oVݎ1oԿtJZ-em'&h <)BNF)`Sh't2pHҸSK8X A-on r"oendstream endobj 264 0 obj << /Filter /FlateDecode /Length 3781 >> stream xY;wջK. f0w$˒%!uN5 pPZmN~p} )8;w/~9yM::_$90I#)qAlqll"s<]\qdUJ#> qV\gn`/Gw0rG)*qQݺɗh,،B1TJ}(!RF96&iWi-ґG)݁k H4lvcq3Z\Voiū?WG X(X"&9/^_a lw!_fmh7qƽmG q#)1#$`;,m=%u>'l:Zܥ 4hS1ۦU3lracsoey"]3eG{Ok&IRhLRlM퍈(`^h#*mA+P΄s!eռl3p9}Pf|,9K,x m>6 ;ǴҦFXu|tiY:묀qе_v#%>^ŢWd4p5tCF\#|J`]1X&[{ĉ*eD<]$ ,fM:k#1FEMYw쩃YV>IB1xpYu2 7`5RQC1D/nU @N\՘Nh"-H\ U[5#}ŝ Ih};x;nnG_K%~OR- d}Ej~,J8}$N@ݲ)Y8@ T.p&9RAv|.}D*@+'8gdD-- ^ߍ D bKAZ FrwzjybO:J]tq7ȖL+R}B׆/ܫmV~ @ȵr{Syi&!E ?;e8e-hRC>L;a GT>JJ@,ޤE;?zUp"u73D\/nQQKV$yn|/!+;(Y@*zx}{3K @dQEۇ(K~r' !Fq (EQ'$_R"Ԗ-9cxYe[a2KyPPvDS؆ ֐ԋFjRsB9G8e%e $oӂd%Pד.gLĮ\ ,˶䠓(*׻'h.n]mtf}lXV_~0FAj/M^`,#8 "tY2n+MEy_C33R1Hf"/ Pv-ŧyЏxv NfOw`۳nGBq~ 0)rv%ޟf<)c"9XF$ߔWOR{[Hs:ݤmZO|P{?NRFBo7gM-̮}֢|bHTEWL43a"L|Ĉ9 D IĞ|c֎^ƂyTXt j|s/:nBJBoyg1 7AlKC$8}J!a,je} Iy&"I{jPc$d/-|r7 '#CU5u} oq-@tc2rL[ HGNH×*9F~0 m,E85Sar1{lC֥9,`=?1Y6B/a: !B3:p 0^sZ !MYUEʰłZmv٫uS.V$=+ٖoCm%A{rjl4$a0p/?J{[ S,- HAr⦫a0*OLxS/Ino2rC0 :mG?%` ߇۶\:mtJ ?D5YZ+S` k:WnWOAY29 iDž-Z jH>" $,!:GJ&0vU&8">Pn0vPHVXTݎE{5AnO)e7+dv$J1_^qme 2x-x<_Յ݀~3חcX0uRc5na_yHIՠ)-jdxǾ \ۘ6wyz=&c\ xso@o"pY`PkO=p=ĢPVU?Ꜿ6:a.S.IW䎐\]ͫi'E߱"d >wmWϊ Zt3 qS{s;vSy\)]YagGt c!}! ~8"$@ O1/4$3ppApJ!}8pT+SLf2PO+W+pgIQ|w c)Uf" 9qގEa.6L mV doܬKh]N65P(>;-x@fR!0F2]u-/'nGa!?rZG8v#2LfjӴznG`dywU_|#UY5oۏ{FR7T/!jX9-D7 >bkZ5mt[ƹ=@d UmW OHU3"wUMFTtUeW(]k"\DګBwbH!:gyNaџ8tendstream endobj 265 0 obj << /Filter /FlateDecode /Length 3368 >> stream xYMsFtnaˎIHJUqI@{ )-`zf^~,MYJ<91;INR4MSdb}dN tpyJ͢gZuu*q,>$M(1)~h}vJ<4S';pҋ٪0|o]ҞM% {1x=yQУf&Vm=J*Qޘu>X󪫻;QRIqXXy0brg֙7b24a0y)OU>|)\mޚ"1KR⾝ 3Fk%T멖J90tA)ݢʰ~9)zfi;l2bb70M0}~I090)-0PI,'tNq^}].uUIu7aMа(]8f;#QݚhBTe߲(%- o^۷Ϥtl2h)p#ZT+{94W;XQwϑjF 9ꩫVDYK*)@ppj`&a1>K-OC<.1Qp A3eI&-3)VEWh˶Uc>%\WvvwZ4?:k,{D2O=hynY4"4@:jBp# Z%**-'kƮ>@fceb{G6r` F]f):Dr̲r<; ZgIWժ$$f]1 (NQ$|]ʇ?ϸǰȰSOO0T-XM*7ߏS(jÕfmC5ɚL%8Q^;Pm5t< 4/akQ{@8m+1 &t;N41_gSDAhJ h\Ԭ}j(z8w2̦,KκA6Ct9^q7N뺚miԵ;YhC"10aohỀzq:u zf t1|̰Kq:J&@nQ<ˆbjxf"}U r4?#2= Y]Nnɚm")KwEajſKF3G?v$K46,9=`cOPt* K ؾScgN(R1~D|ߴ/bt^T Hͥ@s 6ٜ9ҁ$ĺtaҔHr[9!.2ݣ ̬~@yU`Sj%WC/lzklu,/„Z\]9yUφniCL_\Y-uj_gArP;QԸJ9hZ$z g?UKfҴzU@(X߃x`i`xM;_cG[r8jp Y>(FGY_G}ĤV0&mc}ya#EO"' sR]jlVd *q n6U)֮Hi9 *CcFJH>srkFbz[r#͢.K:L[b?'=(omL*ʹ;sC-i=/Q h*43$R-4ѸcU\ W #UC㞹٪8Җ|fd#[w3ٯ)osZ媭d@$S^Tn?+V#rc9t7U>$'R㽸~ڤOb-'޴we!zSwʼnFnr7[II4 ,:Bͦg+rջѼ@Ut_|uSw~wi3Yrҩˉ3w/45t*\1}&6mAѰ"3az+9r r,i%Ȼ"4t_ACǡ{q4n%2Jظg.Ip|`d}|jH*'︚E =&O!sv Ռpx{WM'ےSM=V%e|ܬjFSkϱ@~hwx/=y X@ E_x^}^^i[4ʥ{aw/J)VuvL״Bx:8)wU# E2cO]L[S(^ b ٜ{Y< QVt9S $ܨῩ*=9D~ӡz=BNı/k=͠a}%SuKʭ" ml:, (R)x":CM'$uendstream endobj 266 0 obj << /Filter /FlateDecode /Length 3708 >> stream xYMs8t[ԆE:bcWۖ^;bTUT,ysþLڝ GXE"$/_&;y?w;[= 7gI=V[HhunԘ͔R$$P>ݝ">y V|$H"f2(JD3A8Ѣ{iN1l,~h1)ʢшZkcAlƢXu]/ſh.guXzFzI)#1)gs|)ꢚ)ʈn^Y Ҝ%L*f®Jʉa(0&0.~_tjI_EVZz6'uI͌Դ6F~/U -rV:Pa$}.d(%Wa^^D4LCb]/qGw&BS6\s T[qbM2LcXkmfMf({ Y;2"_7n3(D,d6YK&GLTy io+.;&MÆ}5:Ĵm^EBe/%cq-.N]/GHG1-eRQL#q= Ⲭš-^&J;—&ﺼy^n%~yadD^m]h|NH7;[7G/Gjy1]/b7G% )\Rgd^YM0!kI|7,HUP#,*B& Yfr!||^=u$;ّQN@z%$Lr~*Hdn X bq# `T/י_*t;E7=o:͗n[WCP }4dA8&M 9z_>N$l&|%SsDSd  [,Pj{ j$v@MKe^* e_GK*'0H-4oF36hbKpz0OU.sŔ t.)MiH|+N[L7.Z@Vݬw~T@J9u!t9jp˕KKF~Q\kϡowrA|$S%%.ΞW+1l{qAcn%T~Z!T|8 TiA›M ) ܆}%܊"7{=Obtr|LKO; RRB7W6du/ U/v&bUW@nÂv8jT7`$06^#dž2#4=y5 '}..rƑQg@hbsS7D@bnugvһrS7E$w 1GX96ӄH)2oI0q'}$BCD=rNMXؗkMbcFsduxWw9/'ԭ?s^tYE+t)(x$~4"9'DAM!y+J1꿧IEL4 qoFqZZ~oU~Ss^U])q~Eʥ?/' B GM&JY+)í7h"a}lq`քĵ疉/sSrқG Վ^螃O?M5׭2-P[\pPd4 Iрȩ9>oK 8yV 1 4Qfˌ'0t3ReZe~/˼!䗎MR^׻?XWwTʼB,=6f/l a-6y5 gLjDrE4.Tp,t{ǂB(L'AzlԖgO!BOv4JI|?-K a/|uُQ /8,n MuF!I흓r2\~'0I])s@!o_Q);[Z˺Iƕ26aT $5ISLȷ+~%XvMeݣJWE[tS͆8}VK^qLQfzԬ'?y?Uܔ|dM~'tͯ'ff2v|KPʼT '#_!۰^[nrbDXh V6m!#鳦bRn D:XA`KdH4~FXpKwŠvM p1|v.Sԋcm^:kI_vΔ!q8}06+5nK/o| @>#gV<es!4sEsFT5qr~0)Z^C/ Xw3pV. : svXo~,Y4t^nmeBʴDv[oռ=u[\Rӹz~_8Yr{alNꘈOߛ۠w(V sG#rG^$WWYoՐY.UP"3..aF:p9(]ɓRw(Dw|7r_7kiFW^2}Y߹%.{o4}tvrafF*OsGAXu7j!;̢KOCUģP@P2&*/:OJ"T.6 cQ/O'j2o_][-Ca)9DNy1#23/ Q/0+Vɇ!~q戅]M=UN *Yؚ&g endstream endobj 267 0 obj << /Filter /FlateDecode /Length 919 >> stream xMsF [~֎:봖&9dz`$ecQ EXv\ubX] < }*T ş{[tcMq{Xvi65Y8,b" b+ WwpUKRC|PwӮmEL p;-62pv%*`wt}}~i[6״j^?<$ ru}^/IjDW'}ԪJ.-{ZWc@SҿfS 7r0EeED\Ht.ѮO,$tnA'ҎJE.X%Х$4wsXQˆ9qjE(1Nb'JIGoW&Os(cVu4ގ=+2T2-zAJl[-HWaWQXisy":Eӷpp;bj`>꣄~3 䔁O@>}=nOH(jNx\[>aeu?ާ|gXfLE }ۮHȒ? z6u^a@T ҃a1~< hhzyvIp]"@xx ?aG20*稬(ѽim\KA}GpV cΒ59pOϞwwvM-&0dq\F&7 HO2yIb"*x4RyQhǟ,=>m24UI@:}b7#JE7Ic GCG_S[]9 ߶2OE+WM9Պf:zL޹JRbxwULAT {@_Y+[endstream endobj 268 0 obj << /Type /XRef /Length 272 /Filter /FlateDecode /DecodeParms << /Columns 5 /Predictor 12 >> /W [ 1 3 1 ] /Info 3 0 R /Root 2 0 R /Size 269 /ID [<5c105eee5f40fe62be140046f57cc6cd><65558b288ae3eb7aa99e9cb65c6aac68>] >> stream xcb&F~0 $8JT#3 dg346JR3uiy7ҕ>f7HAs0LS."$5-[v Rd?8"9Y@$#;dfL M ̎ a Kw؜I`36mg+ Xu~ +" Rh] Vl |>[۶uExNu}! 0X%d|V "YIpX1Lfk0*]^1D/`"0X!- endstream endobj startxref 639545 %%EOF dimRed/inst/doc/dimensionality-reduction.Rnw0000644000176200001440000017352413371631672020737 0ustar liggesusers\documentclass{article} %\VignetteEngine{knitr::knitr} %\VignetteIndexEntry{Dimensionality Reduction} %\VignetteKeyword{Dimensionality Reduction} \usepackage[utf8]{inputenc} \usepackage[T1]{fontenc} \usepackage{hyperref} \usepackage{amsmath,amssymb} \usepackage{booktabs} \usepackage{tikz} \usetikzlibrary{trees} \usepackage[sectionbib,round]{natbib} \title{\pkg{dimRed} and \pkg{coRanking}---Unifying Dimensionality Reduction in R} \author{Guido Kraemer \and Markus Reichstein \and Miguel D.\ Mahecha} % these are taken from RJournal.sty: \makeatletter \DeclareRobustCommand\code{\bgroup\@noligs\@codex} \def\@codex#1{\texorpdfstring% {{\normalfont\ttfamily\hyphenchar\font=-1 #1}}% {#1}\egroup} \newcommand{\kbd}[1]{{\normalfont\texttt{#1}}} \newcommand{\key}[1]{{\normalfont\texttt{\uppercase{#1}}}} \DeclareRobustCommand\samp{`\bgroup\@noligs\@sampx} \def\@sampx#1{{\normalfont\texttt{#1}}\egroup'} \newcommand{\var}[1]{{\normalfont\textsl{#1}}} \let\env=\code \newcommand{\file}[1]{{`\normalfont\textsf{#1}'}} \let\command=\code \let\option=\samp \newcommand{\dfn}[1]{{\normalfont\textsl{#1}}} % \acronym is effectively disabled since not used consistently \newcommand{\acronym}[1]{#1} \newcommand{\strong}[1]{\texorpdfstring% {{\normalfont\fontseries{b}\selectfont #1}}% {#1}} \let\pkg=\strong \newcommand{\CRANpkg}[1]{\href{https://CRAN.R-project.org/package=#1}{\pkg{#1}}}% \let\cpkg=\CRANpkg \newcommand{\ctv}[1]{\href{https://CRAN.R-project.org/view=#1}{\emph{#1}}} \newcommand{\BIOpkg}[1]{\href{https://www.bioconductor.org/packages/release/bioc/html/#1.html}{\pkg{#1}}} \makeatother \begin{document} \maketitle \abstract{ % This document is based on the manuscript of \citet{kraemer_dimred_2018} which was published in the R-Journal and has been modified and extended to fit the format of a package vignette and to match the extended functionality of the \pkg{dimRed} package. ``Dimensionality reduction'' (DR) is a widely used approach to find low dimensional and interpretable representations of data that are natively embedded in high-dimensional spaces. % DR can be realized by a plethora of methods with different properties, objectives, and, hence, (dis)advantages. The resulting low-dimensional data embeddings are often difficult to compare with objective criteria. % Here, we introduce the \CRANpkg{dimRed} and \CRANpkg{coRanking} packages for the R language. % These open source software packages enable users to easily access multiple classical and advanced DR methods using a common interface. % The packages also provide quality indicators for the embeddings and easy visualization of high dimensional data. % The \pkg{coRanking} package provides the functionality for assessing DR methods in the co-ranking matrix framework. % In tandem, these packages allow for uncovering complex structures high dimensional data. % Currently 15 DR methods are available in the package, some of which were not previously available to R users. % Here, we outline the \pkg{dimRed} and \pkg{coRanking} packages and make the implemented methods understandable to the interested reader. % } \section{Introduction} \label{sec:intro} Dimensionality Reduction (DR) essentially aims to find low dimensional representations of data while preserving their key properties. % Many methods exist in literature, optimizing different criteria: % maximizing the variance or the statistical independence of the projected data, % minimizing the reconstruction error under different constraints, % or optimizing for different error metrics, % just to name a few. % Choosing an inadequate method may imply that much of the underlying structure remains undiscovered. % Often the structures of interest in a data set can be well represented by fewer dimensions than exist in the original data. % Data compression of this kind has the additional benefit of making the encoded information better conceivable to our brains for further analysis tasks like classification or regression problems. % For example, the morphology of a plant's leaves, stems, and seeds reflect the environmental conditions the species usually grow in (e.g.,\ plants with large soft leaves will never grow in a desert but might have an advantage in a humid and shadowy environment). % Because the morphology of the entire plant depends on the environment, many morphological combinations will never occur in nature and the morphological space of all plant species is tightly constrained. % \citet{diaz_global_2016} found that out of six observed morphological characteristics only two embedding dimensions were enough to represent three quarters of the totally observed variability. % DR is a widely used approach for the detection of structure in multivariate data, and has applications in a variety of fields. % In climatology, DR is used to find the modes of some phenomenon, e.g.,\ the first Empirical Orthogonal Function of monthly mean sea surface temperature of a given region over the Pacific is often linked to the El Ni\~no Southern Oscillation or ENSO \citep[e.g.,\ ][]{hsieh_nonlinear_2004}. % In ecology the comparison of sites with different species abundances is a classical multivariate problem: each observed species adds an extra dimension, and because species are often bound to certain habitats, there is a lot of redundant information. Using DR is a popular technique to represent the sites in few dimensions, e.g.,\ \citet{aart_distribution_1972} matches wolfspider communities to habitat and \citet{morrall_soil_1974} match soil fungi data to soil types. (In ecology the general name for DR is ordination or indirect gradient analysis.) % Today, hyperspectral satellite imagery collects so many bands that it is very difficult to analyze and interpret the data directly. % Resuming the data into a set of few, yet independent, components is one way to reduce complexity \citep[e.g.,\ see][]{laparra_dimensionality_2015}. % DR can also be used to visualize the interiors of deep neural networks \citep[e.g.,\ see ][]{han_deep_2016}, where the high dimensionality comes from the large number of weights used in a neural network and convergence can be visualized by means of DR\@. % We could find many more example applications here but this is not the main focus of this publication. % The difficulty in applying DR is that each DR method is designed to maintain certain aspects of the original data and therefore may be appropriate for one task and inappropriate for another. % Most methods also have parameters to tune and follow different assumptions. The quality of the outcome may strongly depend on their tuning, which adds additional complexity. % DR methods can be modeled after physical models with attracting and repelling forces (Force Directed Methods), projections onto low dimensional planes (PCA, ICA), divergence of statistical distributions (SNE family), or the reconstruction of local spaces or points by their neighbors (LLE). % As an example for how changing internal parameters of a method can have a great impact, the breakthrough for Stochastic Neighborhood Embedding (SNE) methods came when a Student's $t$-distribution was used instead of a normal distribution to model probabilities in low dimensional space to avoid the ``crowding problem'', that is,\ a sphere in high dimensional space has a much larger volume than in low dimensional space and may contain too many points to be represented accurately in few dimensions. % The $t$-distribution, allows medium distances to be accurately represented in few dimensions by larger distances due to its heavier tails. % The result is called in $t$-SNE and is especially good at preserving local structures in very few dimensions, this feature made $t$-SNE useful for a wide array of data visualization tasks and the method became much more popular than standard SNE (around six times more citations of \citet{van_der_maaten_visualizing_2008} compared to \citet{hinton_stochastic_2003} in Scopus \citep{noauthor_scopus_nodate}). % There are a number of software packages for other languages providing collections of methods: In Python there is scikit-learn \citep{scikit-learn}, which contains a module for DR. In Julia we currently find ManifoldLearning.jl for nonlinear and MultivariateStats.jl for linear DR methods. % There are several toolboxes for DR implemented in Matlab \citep{van_der_maaten_dimensionality_2009, arenas-garcia_kernel_2013}. The Shogun toolbox \citep{soeren_sonnenburg_2017_1067840} implements a variety of methods for dimensionality reduction in C++ and offers bindings for a many common high level languages (including R, but the installation is anything but simple, as there is no CRAN package). % However, there is no comprehensive package for R and none of the former mentioned software packages provides means to consistently compare the quality of different methods for DR. % For many applications it can be difficult to objectively find the right method or parameterization for the DR task. % This paper presents the \pkg{dimRed} and \pkg{coRanking} packages for the popular programming language R. Together, they provide a standardized interface to various dimensionality reduction methods and quality metrics for embeddings. They are implemented using the S4 class system of R, making the packages both easy to use and to extend. The design goal for these packages is to enable researchers, who may not necessarily be experts in DR, to apply the methods in their own work and to objectively identify the most suitable methods for their data. % This paper provides an overview of the methods collected in the packages and contains examples as to how to use the packages. % The notation in this paper will be as follows: $X = [x_i]_{1\leq i \leq n}^T \in \mathbb{R}^{n\times p}$, and the observations $x_i \in \mathbb{R}^p$. % These observations may be transformed prior to the dimensionality reduction step (e.g.,\ centering and/or standardization) resulting in $X' = [x'_i]_{1\leq i \leq n}^T \in \mathbb{R}^{n\times p}$. % A DR method then embeds each vector in $X'$ onto a vector in $Y = [y_i]_{1\leq i \leq n}^T \in \mathbb{R}^{n\times q}$ with $y_i \in \mathbb{R}^q$, ideally with $q \ll p$. % Some methods provide an explicit mapping $f(x'_i) = y_i$. Some even offer an inverse mapping $f^{-1}(y_{i}) = \hat x'_{i}$, such that one can reconstruct a (usually approximate) sample from the low-dimensional representation. % For some methods, pairwise distances between points are needed, we set $d_{ij} = d(x_{i}, x_{j})$ and $\hat{d}_{ij} = d(y_i, y_j)$, where $d$ is some appropriate distance function. When referring to \code{functions} in the \pkg{dimRed} package or base R simply the function name is mentioned, functions from other packages are referenced with their namespace, as with \code{package::function}. \begin{figure}[htbp] \centering \input{classification_tree.tex} \caption{% Classification of dimensionality reduction methods. Methods in bold face are implemented in \pkg{dimRed}. Modified from \citet{van_der_maaten_dimensionality_2009}. }\label{fig:classification} \end{figure} \section{Dimensionality Reduction Methods} \label{sec:dimredtec} In the following section we do not aim for an exhaustive explanation to every method in \pkg{dimRed} but rather to provide a general idea on how the methods work. % An overview and classification of the most commonly used DR methods can be found in Figure~\ref{fig:classification}. In all methods, parameters have to be optimized or decisions have to be made, even if it is just about the preprocessing steps of data. % The \pkg{dimRed} package tries to make the optimization process for parameters as easy as possible, but, if possible, the parameter space should be narrowed down using prior knowledge. % Often decisions can be made based on theoretical knowledge. For example,\ sometimes an analysis requires data to be kept in their original scales and sometimes this is exactly what has to be avoided as when comparing different physical units. % Sometimes decisions based on the experience of others can be made, e.g.,\ the Gaussian kernel is probably the most universal kernel and therefore should be tested first if there is a choice. % All methods presented here have the embedding dimensionality, $q$, as a parameter (or \code{ndim} as a parameter for \code{embed}). % For methods based on eigenvector decomposition, the result generally does not depend on the number of dimensions, i.e.,\ the first dimension will be the same, no matter if we decide to calculate only two dimensions or more. % If more dimensions are added, more information is maintained, the first dimension is the most important and higher dimensions are successively less important. % This means, that a method based on eigenvalue decomposition only has to be run once if one wishes to compare the embedding in different dimensions. % In optimization based methods this is generally not the case, the number of dimensions has to be chosen a priori, an embedding of 2 and 3 dimensions may vary significantly, and there is no ordered importance of dimensions. % This means that comparing dimensions of optimization-based methods is computationally much more expensive. % We try to give the computational complexity of the methods. Because of the actual implementation, computation times may differ largely. % R is an interpreted language, so all parts of an algorithm that are implemented in R often will tend to be slow compared to methods that call efficient implementations in a compiled language. % Methods where most of the computing time is spent for eigenvalue decomposition do have very efficient implementations as R uses optimized linear algebra libraries. Although, eigenvalue decomposition itself does not scale very well in naive implementations ($\mathcal{O}(n^3)$). \subsection{PCA} \label{sec:pca} Principal Component Analysis (PCA) is the most basic technique for reducing dimensions. It dates back to \citet{pearson_lines_1901}. PCA finds a linear projection ($U$) of the high dimensional space into a low dimensional space $Y = XU$, maintaining maximum variance of the data. It is based on solving the following eigenvalue problem: \begin{equation} (C_{XX}-\lambda_k I)u_k=0\label{eq:pca} \end{equation} where $C_{XX} = \frac 1 n X^TX$ is the covariance matrix, $\lambda_k$ and $u_k$ are the $k$-th eigenvalue and eigenvector, and $I$ is the identity matrix. % The equation has several solutions for different values of $\lambda_k$ (leaving aside the trivial solution $u_k = 0$). % PCA can be efficiently applied to large data sets, because it computationally scales as $\mathcal{O}(np^2 + p^3)$, that is, it scales linearly with the number of samples and R uses specialized linear algebra libraries for such kind of computations. PCA is a rotation around the origin and there exist a forward and inverse mapping. % PCA may suffer from a scale problem, i.e.,\ when one variable dominates the variance simply because it is in a higher scale, to remedy this, the data can be scaled to zero mean and unit variance, depending on the use case, if this is necessary or desired. % Base R implements PCA in the functions \code{prcomp} and \code{princomp}; but several other implementations exist i.e., \BIOpkg{pcaMethods} from Bioconductor which implements versions of PCA that can deal with missing data. % The \pkg{dimRed} package wraps \code{prcomp}. \subsection{kPCA} \label{sec:kpca} Kernel Principal Component Analysis (kPCA) extends PCA to deal with nonlinear dependencies among variables. % The idea behind kPCA is to map the data into a high dimensional space using a possibly non-linear function $\phi$ and then to perform a PCA in this high dimensional space. % Some mathematical tricks are used for efficient computation. % If the columns of X are centered around $0$, then the principal components can also be computed from the inner product matrix $K = X^TX$. % Due to this way of calculating a PCA, we do not need to explicitly map all points into the high dimensional space and do the calculations there, it is enough to obtain the inner product matrix or kernel matrix $K \in \mathbb{R}^{n\times n}$ of the mapped points \citep{scholkopf_nonlinear_1998}. % Here is an example calculating the kernel matrix using a Gaussian kernel: \begin{equation}\label{eq:gauss} K = \phi(x_i)^T \phi(x_j) = \kappa(x_i, x_j) = \exp\left( -\frac{\| x_i- x_j\|^2}{2 \sigma^2} \right), \end{equation} where $\sigma$ is a length scale parameter accounting for the width of the kernel. % The other trick used is known as the ``representers theorem.'' The interested reader is referred to \citet{scholkopf_generalized_2001}. The kPCA method is very flexible and there exist many kernels for special purposes. The most common kernel function is the Gaussian kernel (Equation\ \ref{eq:gauss}). % The flexibility comes at the price that the method has to be finely tuned for the data set because some parameter combinations are simply unsuitable for certain data. % The method is not suitable for very large data sets, because memory scales with $\mathcal{O}(n^2)$ and computation time with $\mathcal{O}(n^3)$. % Diffusion Maps, Isomap, Locally Linear Embedding, and some other techniques can be seen as special cases of kPCA. In which case, an out-of-sample extension using the Nyström formula can be applied \citep{bengio_learning_2004}. % This can also yield applications for bigger data, where an embedding is trained with a sub-sample of all data and then the data is embedded using the Nyström formula. Kernel PCA in R is implemented in the \CRANpkg{kernlab} package using the function \code{kernlab::kpca}, and supports a number of kernels and user defined functions. For details see the help page for \code{kernlab::kpca}. The \pkg{dimRed} package wraps \code{kernlab::kpca} but additionally provides forward and inverse methods \citep{bakir_learning_2004} which can be used to fit out-of-sample data or to visualize the transformation of the data space. % \subsection{Classical Scaling} \label{sec:classscale} What today is called Classical Scaling was first introduced by \citet{torgerson_multidimensional_1952}. It uses an eigenvalue decomposition of a transformed distance matrix to find an embedding that maintains the distances of the distance matrix. % The method works because of the same reason that kPCA works, i.e.,\ classical scaling can be seen as a kPCA with kernel $x^Ty$. % A matrix of Euclidean distances can be transformed into an inner product matrix by some simple transformations and therefore yields the same result as a PCA\@. % Classical scaling is conceptually more general than PCA in that arbitrary distance matrices can be used, i.e.,\ the method does not even need the original coordinates, just a distance matrix $D$. % Then it tries to find an embedding $Y$ so that $\hat d_{ij}$ is as similar to $d_{ij}$ as possible. The disadvantage is that it is computationally much more demanding, i.e.,\ an eigenvalue decomposition of an $n\times n$ matrix has to be computed. This step requires $\mathcal{O}(n^2)$ memory and $\mathcal{O}(n^3)$ computation time, while PCA requires only the eigenvalue decomposition of a $d\times d$ matrix and usually $n \gg d$. % R implements classical scaling in the \code{cmdscale} function. % The \pkg{dimRed} package wraps \code{cmdscale} and allows the specification of arbitrary distance functions for calculating the distance matrix. Additionally a forward method is implemented. \subsection{Isomap} \label{sec:isomap} As Classical Scaling can deal with arbitrarily defined distances, \citet{tenenbaum_global_2000} suggested to approximate the structure of the manifold by using geodesic distances. % In practice, a graph is created by either keeping only the connections between every point and its $k$ nearest neighbors to produce a $k$-nearest neighbor graph ($k$-NNG), or simply by keeping all distances smaller than a value $\varepsilon$ producing an $\varepsilon$-neighborhood graph ($\varepsilon$-NNG). % Geodesic distances are obtained by recording the distance on the graph and classical scaling is used to find an embedding in fewer dimensions. This leads to an ``unfolding'' of possibly convoluted structures (see Figure~\ref{fig:knn}). Isomap's computational cost is dominated by the eigenvalue decomposition and therefore scales with $\mathcal{O}(n^3)$. % Other related techniques can use more efficient algorithms because the distance matrix becomes sparse due to a different preprocessing. In R, Isomap is implemented in the \CRANpkg{vegan} package. The \code{vegan::isomap} calculates an Isomap embedding and \code{vegan::isomapdist} calculates a geodesic distance matrix. % The \pkg{dimRed} package uses its own implementation. This implementation is faster mainly due to using a KD-tree for the nearest neighbor search (from the \CRANpkg{RANN} package) and to a faster implementation for the shortest path search in the $k$-NNG (from the \CRANpkg{igraph} package). % The implementation in \pkg{dimRed} also includes a forward method that can be used to train the embedding on a subset of data points and then use these points to approximate an embedding for the remaining points. This technique is generally referred to as landmark Isomap \citep{de_silva_sparse_2004}. % \subsection{Locally Linear Embedding} \label{sec:lle} Points that lie on a manifold in a high dimensional space can be reconstructed through linear combinations of their neighborhoods if the manifold is well sampled and the neighbohoods lie on a locally linear patch. % These reconstruction weights, $W$, are the same in the high dimensional space as the internal coordinates of the manifold. % Locally Linear Embedding \citep[LLE; ][]{roweis_nonlinear_2000} is a technique that constructs a weight matrix $W \in \mathbb{R}^{n\times n}$ with elements $w_{ij}$ so that \begin{equation} \sum_{i=1}^n \bigg\| x_i- \sum_{j=1}^{n} w_{ij}x_j \bigg\|^2\label{eq:lle} \end{equation} is minimized under the constraint that $w_{ij} = 0 $ if $x_j$ does not belong to the neighborhood and the constraint that $\sum_{j=1}^n w_{ij} = 1$. % Finally the embedding is made in such a way that the following cost function is minimized for $Y$, \begin{equation} \sum_{i=1}^n\bigg\| y_i - \sum_{j=1}^n w_{ij}y_j \bigg\|^2.\label{eq:lle2} \end{equation} This can be solved using an eigenvalue decomposition. Conceptually the method is similar to Isomap but it is computationally much nicer because the weight matrix is sparse and there exist efficient solvers. % In R, LLE is implemented by the package \CRANpkg{lle}, the embedding can be calculated with \code{lle::lle}. Unfortunately the implementation does not make use of the sparsity of the weight matrix $W$. % The manifold must be well sampled and the neighborhood size must be chosen appropriately for LLE to give good results. % \subsection{Laplacian Eigenmaps} \label{sec:laplaceigenmaps} Laplacian Eigenmaps were originally developed under the name spectral clustering to separate non-convex clusters. % Later it was also used for graph embedding and DR \citep{belkin_laplacian_2003}. % A number of variants have been proposed. % First, a graph is constructed, usually from a distance matrix, the graph can be made sparse by keeping only the $k$ nearest neighbors, or by specifying an $\varepsilon$ neighborhood. % Then, a similarity matrix $W$ is calculated by using a Gaussian kernel (see Equation \ref{eq:gauss}), if $c = 2 \sigma^2 = \infty$, then all distances are treated equally, the smaller $c$ the more emphasis is given to differences in distance. % The degree of vertex $i$ is $d_i = \sum_{j=1}^n w_{ij}$ and the degree matrix, $D$, is the diagonal matrix with entries $d_i$. % Then we can form the graph Laplacian $L = D - W$ and, then, there are several ways how to proceed, an overview can be found in \citet{luxburg_tutorial_2007}. % The \pkg{dimRed} package implements the algorithm from \citet{belkin_laplacian_2003}. Analogously to LLE, Laplacian eigenmaps avoid computational complexity by creating a sparse matrix and not having to estimate the distances between all pairs of points. % Then the eigenvectors corresponding to the lowest eigenvalues larger than $0$ of either the matrix $L$ or the normalized Laplacian $D^{-1/2}LD^{-1/2}$ are computed and form the embedding. \subsection{Diffusion Maps} \label{sec:isodiffmaplle} Diffusion Maps \citep{coifman_diffusion_2006} take a distance matrix as input and calculates the transition probability matrix $P$ of a diffusion process between the points to approximate the manifold. % Then the embedding is done by an eigenvalue decompositon of $P$ to calculate the coordinates of the embedding. % The algorithm for calculating Diffusion Maps shares some elements with the way Laplacian Eigenmaps are calculated. % Both algorithms depart from the same weight matrix, Diffusion Maps calculate the transition probability on the graph after $t$ time steps and do the embedding on this probability matrix. The idea is to simulate a diffusion process between the nodes of the graph, which is more robust to short-circuiting than the $k$-NNG from Isomap (see bottom right Figure \ref{fig:knn}). % Diffusion maps in R are accessible via the \code{diffusionMap::diffuse()} function, which is available in the \CRANpkg{diffusionMap} package. % Additional points can be approximated into an existing embedding using the Nyström formula \citep{bengio_learning_2004}. % The implementation in \pkg{dimRed} is based on the \code{diffusionMap::diffuse} function. % , which does not contain an % approximation for unequally sampled manifolds % \citep{coifman_geometric_2005}. % \subsection{non-Metric Dimensional Scaling} \label{sec:nmds} While Classical Scaling and derived methods (see section \nameref{sec:classscale}) use eigenvector decomposition to embed the data in such a way that the given distances are maintained, non-Metric Dimensional Scaling \citep[nMDS, ][]{kruskal_multidimensional_1964,kruskal_nonmetric_1964} uses optimization methods to reach the same goal. % Therefore a stress function, \begin{equation} \label{eq:stress} S = \sqrt{\frac{\sum_{i>= if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { library(dimRed); library(ggplot2); #library(dplyr); library(tidyr) ## define which methods to apply embed_methods <- c("Isomap", "PCA") ## load test data set data_set <- loadDataSet("3D S Curve", n = 1000) ## apply dimensionality reduction data_emb <- lapply(embed_methods, function(x) embed(data_set, x)) names(data_emb) <- embed_methods ## plot data set, embeddings, and quality analysis ## plot(data_set, type = "3vars") ## lapply(data_emb, plot, type = "2vars") ## plot_R_NX(data_emb) add_label <- function(label) grid::grid.text(label, 0.2, 1, hjust = 0, vjust = 1, gp = grid::gpar(fontface = "bold", cex = 1.5)) ## pdf('~/phd/text/dimRedPackage/plots/plot_example.pdf', width = 4, height = 4) ## plot the results plot(data_set, type = "3vars", angle = 15, mar = c(3, 3, 0, 0), box = FALSE, grid = FALSE, pch = 16) add_label("a") par(mar = c(4, 4, 0, 0) + 0.1, bty = "n", las = 1) plot(data_emb$Isomap, type = "2vars", pch = 16) add_label("b") plot(data_emb$PCA, type = "2vars", pch = 16) add_label("d") ## calculate quality scores print( plot_R_NX(data_emb) + theme(legend.title = element_blank(), legend.position = c(0.5, 0.1), legend.justification = c(0.5, 0.1)) ) add_label("c") } else { # These cannot all be plot(1:10)!!! It's a mistery to me. plot(1:10) barplot(1:10) hist(1:10) plot(1:10) } @ \includegraphics[page=1,width=.45\textwidth]{figure/pca_isomap_example-1.pdf} \includegraphics[page=1,width=.45\textwidth]{figure/pca_isomap_example-2.pdf} \includegraphics[page=1,width=.45\textwidth]{figure/pca_isomap_example-3.pdf} \includegraphics[page=1,width=.45\textwidth]{figure/pca_isomap_example-4.pdf} \caption[dimRed example]{% Comparing PCA and Isomap: % (a) An S-shaped manifold, colors represent the internal coordinates of the manifold. % (b) Isomap embedding, the S-shaped manifold is unfolded. % (c) $R_{NX}$ plotted agains neighborhood sizes, Isomap is much better at preserving local distances and PCA is better at preserving global Euclidean distances. % The numbers on the legend are the $\text{AUC}_{1 / K}$. (d) PCA projection of the data, the directions of maximum variance are preserved. % }\label{fig:plotexample} \end{figure} <>= ## define which methods to apply embed_methods <- c("Isomap", "PCA") ## load test data set data_set <- loadDataSet("3D S Curve", n = 1000) ## apply dimensionality reduction data_emb <- lapply(embed_methods, function(x) embed(data_set, x)) names(data_emb) <- embed_methods ## figure \ref{fig:plotexample}a, the data set plot(data_set, type = "3vars") ## figures \ref{fig:plotexample}b (Isomap) and \ref{fig:plotexample}d (PCA) lapply(data_emb, plot, type = "2vars") ## figure \ref{fig:plotexample}c, quality analysis plot_R_NX(data_emb) @ The function \code{plot\_R\_NX} produces a figure that plots the neighborhood size ($k$ at a log-scale) against the quality measure $\text{R}_{NX}(k)$ (see Equation \ref{eq:rnx}). % This gives an overview of the general behavior of methods: if $\text{R}_{NX}$ is high for low values of $K$, then local neighborhoods are maintained well; if $\text{R}_{NX}$ is high for large values of $K$, then global gradients are maintained well. % It also provides a way to directly compare methods by plotting more than one $\text{R}_{NX}$ curve and an overall quality of the embedding by taking the area under the curve as an indicator for the overall quality of the embedding (see fig~\ref{eq:auclnk}) which is shown as a number in the legend. Therefore we can see from Figure~\ref{fig:plotexample}c that $t$-SNE is very good a maintaining close and medium distances for the given data set, whereas PCA is only better at maintaining the very large distances. % The large distances are dominated by the overall bent shape of the S in 3D space, while the close distances are not affected by this bending. % This is reflected in the properties recovered by the different methods, the PCA embedding recovers the S-shape, while $t$-SNE ignores the S-shape and recovers the inner structure of the manifold. % Example 2: Often the quality of an embedding strongly depends on the choice of parameters, the interface of \pkg{dimRed} can be used to facilitate searching the parameter space. Isomap has one parameter $k$ which determines the number of neighbors used to construct the $k$-NNG\@. % If this number is too large, then Isomap will resemble an MDS (Figure~\ref{fig:knn} e), if the number is too small, the resulting embedding contains holes (Figure~\ref{fig:knn} c). % The following code finds the optimal value, $k_{\text{max}}$, for $k$ using the $Q_{\text{local}}$ criterion, the results are visualized in Figure~\ref{fig:knn} a: \begin{figure}[htp] \centering <>= if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { library(dimRed) library(cccd) ## Load data ss <- loadDataSet("3D S Curve", n = 500) ## Parameter space kk <- floor(seq(5, 100, length.out = 40)) ## Embedding over parameter space emb <- lapply(kk, function(x) embed(ss, "Isomap", knn = x)) ## Quality over embeddings qual <- sapply(emb, function(x) quality(x, "Q_local")) ## Find best value for K ind_max <- which.max(qual) k_max <- kk[ind_max] add_label <- function(label){ par(xpd = TRUE) b = par("usr") text(b[1], b[4], label, adj = c(0, 1), cex = 1.5, font = 2) par(xpd = FALSE) } names(qual) <- kk } @ <<"select_k",include=FALSE,fig.width=11,fig.height=5>>= if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { par(mfrow = c(1, 2), mar = c(5, 4, 0, 0) + 0.1, oma = c(0, 0, 0, 0)) plot(kk, qual, type = "l", xlab = "k", ylab = expression(Q[local]), bty = "n") abline(v = k_max, col = "red") add_label("a") plot(ss, type = "3vars", angle = 15, mar = c(3, 3, 0, 0), box = FALSE, grid = FALSE, pch = 16) add_label("b") } else { plot(1:10) plot(1:10) } @ <<"knngraphs",include=FALSE,fig.width=8,fig.height=3>>= if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { par(mfrow = c(1, 3), mar = c(5, 4, 0, 0) + 0.1, oma = c(0, 0, 0, 0)) add_knn_graph <- function(ind) { nn1 <- nng(ss@data, k = kk[ind]) el <- get.edgelist(nn1) segments(x0 = emb[[ind]]@data@data[el[, 1], 1], y0 = emb[[ind]]@data@data[el[, 1], 2], x1 = emb[[ind]]@data@data[el[, 2], 1], y1 = emb[[ind]]@data@data[el[, 2], 2], col = "#00000010") } plot(emb[[2]]@data@data, type = "n", bty = "n") add_knn_graph(2) points(emb[[2]]@data@data, col = dimRed:::colorize(ss@meta), pch = 16) add_label("c") plot(emb[[ind_max]]@data@data, type = "n", bty = "n") add_knn_graph(ind_max) points(emb[[ind_max]]@data@data, col = dimRed:::colorize(ss@meta), pch = 16) add_label("d") plot(emb[[length(emb)]]@data@data, type = "n", bty = "n") add_knn_graph(length(emb)) points(emb[[length(emb)]]@data@data, col = dimRed:::colorize(ss@meta), pch = 16) add_label("e") } else { plot(1:10) plot(1:10) plot(1:10) } @ \includegraphics[width=.95\textwidth]{figure/select_k-1.pdf} \includegraphics[width=.95\textwidth]{figure/knngraphs-1.pdf} \caption[estimating $k$ using @Q_\text{local}]{% Using \pkg{dimRed} and the $Q_\text{local}$ indicator to estimate a good value for the parameter $k$ in Isomap. % (a) $Q_\text{local}$ for different values of $k$, the vertical red line indicates the maximum $k_{\text{max}}$. % (b) The original data set, a 2 dimensional manifold bent in an S-shape in 3 dimensional space. % Bottom row: Embeddings and $k$-NNG for different values of $k$. % (c) When $k = 5$, the value for $k$ is too small resulting in holes in the embedding, the manifold itself is still unfolded correctly. % (d) Choose $k = k_\text{max}$, the best representation of the original manifold in two dimensions achievable with Isomap. % (e) $k = 100$, too large, the $k$-NNG does not approximate the manifold any more. % }\label{fig:knn} \end{figure} <>= ## Load data ss <- loadDataSet("3D S Curve", n = 500) ## Parameter space kk <- floor(seq(5, 100, length.out = 40)) ## Embedding over parameter space emb <- lapply(kk, function(x) embed(ss, "Isomap", knn = x)) ## Quality over embeddings qual <- sapply(emb, function(x) quality(x, "Q_local")) ## Find best value for K ind_max <- which.max(qual) k_max <- kk[ind_max] @ Figure~\ref{fig:knn}a shows how the $Q_{\text{local}}$ criterion changes when varying the neighborhood size $k$ for Isomap, the gray lines in Figure~\ref{fig:knn} represent the edges of the $k$-NN Graph. % If the value for $k$ is too low, the inner structure of the manifold will still be recovered, but it will be imperfect (Figure~\ref{fig:knn}c, note that the holes appear in places that are not covered by the edges of the $k$-NN Graph), therefore the $Q_{\text{local}}$ score is lower than optimal. % If $k$ is too large, the error of the embedding is much larger due to short circuiting and we observe a very steep drop in the $Q_{\text{local}}$ score. % The short circuiting can be observed in Figure~\ref{fig:knn}e with the edges that cross the gap between the tips and the center of the S-shape. % % Example 3: It is also very easy to compare across methods and quality scores. % The following code produces a matrix of quality scores and methods, where \code{dimRedMethodList} returns a character vector with all methods. A visualization of the matrix can be found in Figure~\ref{fig:qualityexample}. % \begin{figure}[htp] \centering <<"plot_quality",include=FALSE>>= if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { embed_methods <- dimRedMethodList() quality_methods <- c("Q_local", "Q_global", "AUC_lnK_R_NX", "cophenetic_correlation") iris_data <- loadDataSet("Iris") quality_results <- matrix( NA, length(embed_methods), length(quality_methods), dimnames = list(embed_methods, quality_methods) ) embedded_data <- list() for (e in embed_methods) { try(embedded_data[[e]] <- embed(iris_data, e)) for (q in quality_methods) try(quality_results[e,q] <- quality(embedded_data[[e]], q)) } quality_results <- quality_results[order(rowMeans(quality_results)), ] palette(c("#1b9e77", "#d95f02", "#7570b3", "#e7298a", "#66a61e")) col_hsv <- rgb2hsv(col2rgb(palette())) ## col_hsv["v", ] <- col_hsv["v", ] * 3 / 1 palette(hsv(col_hsv["h",], col_hsv["s",], col_hsv["v",])) par(mar = c(2, 8, 0, 0) + 0.1) barplot(t(quality_results), beside = TRUE, col = 1:4, legend.text = quality_methods, horiz = TRUE, las = 1, cex.names = 0.85, args.legend = list(x = "topleft", bg = "white", cex = 0.8)) } else { plot(1:10) } @ \includegraphics[width=.5\textwidth]{figure/plot_quality-1.pdf} \caption[Quality comparision]{% A visualization of the \code{quality\_results} matrix. % The methods are ordered by mean quality score. % The reconstruction error was omitted, because a higher value means a worse embedding, while in the present methods a higher score means a better embedding. % Parameters were not tuned for the example, therefore it should not be seen as a general quality assessment of the methods. % }\label{fig:qualityexample} \end{figure} <>= embed_methods <- dimRedMethodList() quality_methods <- c("Q_local", "Q_global", "AUC_lnK_R_NX", "cophenetic_correlation") scurve <- loadDataSet("3D S Curve", n = 2000) quality_results <- matrix( NA, length(embed_methods), length(quality_methods), dimnames = list(embed_methods, quality_methods) ) embedded_data <- list() for (e in embed_methods) { embedded_data[[e]] <- embed(scurve, e) for (q in quality_methods) try(quality_results[e, q] <- quality(embedded_data[[e]], q)) } @ This example showcases the simplicity with which different methods and quality criteria can be combined. % Because of the strong dependencies on parameters it is not advised to apply this kind of analysis without tuning the parameters for each method separately. % There is no automatized way to tune parameters in \pkg{dimRed}. % \section{Conclusion} \label{sec:conc} This paper presents the \pkg{dimRed} and \pkg{coRanking} packages and it provides a brief overview of the methods implemented therein. % The \pkg{dimRed} package is written in the R language, one of the most popular languages for data analysis. The package is freely available from CRAN. % The package is object oriented and completely open source and therefore easily available and extensible. % Although most of the DR methods already had implementations in R, \pkg{dimRed} adds some new methods for dimensionality reduction, and \pkg{coRanking} adds methods for an independent quality control of DR methods to the R ecosystem. % DR is a widely used technique. However, due to the lack of easily usable tools, choosing the right method for DR is complex and depends upon a variety of factors. % The \pkg{dimRed} package aims to facilitate experimentation with different techniques, parameters, and quality measures so that choosing the right method becomes easier. % The \pkg{dimRed} package wants to enable the user to objectively compare methods that rely on very different algorithmic approaches. % It makes the life of the programmer easier, because all methods are aggregated in one place and there is a single interface and standardized classes to access the functionality. % \section{Acknowledgments} \label{sec:ack} We thank Dr.\ G.\ Camps-Valls and an anonymous reviewer for many useful comments. % This study was supported by the European Space Agency (ESA) via the Earth System Data Lab project (\url{http://earthsystemdatacube.org}) and the EU via the H2020 project BACI, grant agreement No 640176. % \bibliographystyle{abbrvnat} \bibliography{bibliography} \end{document} dimRed/inst/doc/dimensionality-reduction.R0000644000176200001440000001473413464323406020363 0ustar liggesusers## ----"pca_isomap_example",include=FALSE,fig.width=4,fig.height=4--------- if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { library(dimRed); library(ggplot2); #library(dplyr); library(tidyr) ## define which methods to apply embed_methods <- c("Isomap", "PCA") ## load test data set data_set <- loadDataSet("3D S Curve", n = 1000) ## apply dimensionality reduction data_emb <- lapply(embed_methods, function(x) embed(data_set, x)) names(data_emb) <- embed_methods ## plot data set, embeddings, and quality analysis ## plot(data_set, type = "3vars") ## lapply(data_emb, plot, type = "2vars") ## plot_R_NX(data_emb) add_label <- function(label) grid::grid.text(label, 0.2, 1, hjust = 0, vjust = 1, gp = grid::gpar(fontface = "bold", cex = 1.5)) ## pdf('~/phd/text/dimRedPackage/plots/plot_example.pdf', width = 4, height = 4) ## plot the results plot(data_set, type = "3vars", angle = 15, mar = c(3, 3, 0, 0), box = FALSE, grid = FALSE, pch = 16) add_label("a") par(mar = c(4, 4, 0, 0) + 0.1, bty = "n", las = 1) plot(data_emb$Isomap, type = "2vars", pch = 16) add_label("b") plot(data_emb$PCA, type = "2vars", pch = 16) add_label("d") ## calculate quality scores print( plot_R_NX(data_emb) + theme(legend.title = element_blank(), legend.position = c(0.5, 0.1), legend.justification = c(0.5, 0.1)) ) add_label("c") } else { # These cannot all be plot(1:10)!!! It's a mistery to me. plot(1:10) barplot(1:10) hist(1:10) plot(1:10) } ## ----eval=FALSE---------------------------------------------------------- # ## define which methods to apply # embed_methods <- c("Isomap", "PCA") # ## load test data set # data_set <- loadDataSet("3D S Curve", n = 1000) # ## apply dimensionality reduction # data_emb <- lapply(embed_methods, function(x) embed(data_set, x)) # names(data_emb) <- embed_methods # ## figure \ref{fig:plotexample}a, the data set # plot(data_set, type = "3vars") # ## figures \ref{fig:plotexample}b (Isomap) and \ref{fig:plotexample}d (PCA) # lapply(data_emb, plot, type = "2vars") # ## figure \ref{fig:plotexample}c, quality analysis # plot_R_NX(data_emb) ## ----include=FALSE------------------------------------------------------- if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { library(dimRed) library(cccd) ## Load data ss <- loadDataSet("3D S Curve", n = 500) ## Parameter space kk <- floor(seq(5, 100, length.out = 40)) ## Embedding over parameter space emb <- lapply(kk, function(x) embed(ss, "Isomap", knn = x)) ## Quality over embeddings qual <- sapply(emb, function(x) quality(x, "Q_local")) ## Find best value for K ind_max <- which.max(qual) k_max <- kk[ind_max] add_label <- function(label){ par(xpd = TRUE) b = par("usr") text(b[1], b[4], label, adj = c(0, 1), cex = 1.5, font = 2) par(xpd = FALSE) } names(qual) <- kk } ## ----"select_k",include=FALSE,fig.width=11,fig.height=5------------------ if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { par(mfrow = c(1, 2), mar = c(5, 4, 0, 0) + 0.1, oma = c(0, 0, 0, 0)) plot(kk, qual, type = "l", xlab = "k", ylab = expression(Q[local]), bty = "n") abline(v = k_max, col = "red") add_label("a") plot(ss, type = "3vars", angle = 15, mar = c(3, 3, 0, 0), box = FALSE, grid = FALSE, pch = 16) add_label("b") } else { plot(1:10) plot(1:10) } ## ----"knngraphs",include=FALSE,fig.width=8,fig.height=3------------------ if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { par(mfrow = c(1, 3), mar = c(5, 4, 0, 0) + 0.1, oma = c(0, 0, 0, 0)) add_knn_graph <- function(ind) { nn1 <- nng(ss@data, k = kk[ind]) el <- get.edgelist(nn1) segments(x0 = emb[[ind]]@data@data[el[, 1], 1], y0 = emb[[ind]]@data@data[el[, 1], 2], x1 = emb[[ind]]@data@data[el[, 2], 1], y1 = emb[[ind]]@data@data[el[, 2], 2], col = "#00000010") } plot(emb[[2]]@data@data, type = "n", bty = "n") add_knn_graph(2) points(emb[[2]]@data@data, col = dimRed:::colorize(ss@meta), pch = 16) add_label("c") plot(emb[[ind_max]]@data@data, type = "n", bty = "n") add_knn_graph(ind_max) points(emb[[ind_max]]@data@data, col = dimRed:::colorize(ss@meta), pch = 16) add_label("d") plot(emb[[length(emb)]]@data@data, type = "n", bty = "n") add_knn_graph(length(emb)) points(emb[[length(emb)]]@data@data, col = dimRed:::colorize(ss@meta), pch = 16) add_label("e") } else { plot(1:10) plot(1:10) plot(1:10) } ## ----eval=FALSE---------------------------------------------------------- # ## Load data # ss <- loadDataSet("3D S Curve", n = 500) # ## Parameter space # kk <- floor(seq(5, 100, length.out = 40)) # ## Embedding over parameter space # emb <- lapply(kk, function(x) embed(ss, "Isomap", knn = x)) # ## Quality over embeddings # qual <- sapply(emb, function(x) quality(x, "Q_local")) # ## Find best value for K # ind_max <- which.max(qual) # k_max <- kk[ind_max] ## ----"plot_quality",include=FALSE---------------------------------------- if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { embed_methods <- dimRedMethodList() quality_methods <- c("Q_local", "Q_global", "AUC_lnK_R_NX", "cophenetic_correlation") iris_data <- loadDataSet("Iris") quality_results <- matrix( NA, length(embed_methods), length(quality_methods), dimnames = list(embed_methods, quality_methods) ) embedded_data <- list() for (e in embed_methods) { try(embedded_data[[e]] <- embed(iris_data, e)) for (q in quality_methods) try(quality_results[e,q] <- quality(embedded_data[[e]], q)) } quality_results <- quality_results[order(rowMeans(quality_results)), ] palette(c("#1b9e77", "#d95f02", "#7570b3", "#e7298a", "#66a61e")) col_hsv <- rgb2hsv(col2rgb(palette())) ## col_hsv["v", ] <- col_hsv["v", ] * 3 / 1 palette(hsv(col_hsv["h",], col_hsv["s",], col_hsv["v",])) par(mar = c(2, 8, 0, 0) + 0.1) barplot(t(quality_results), beside = TRUE, col = 1:4, legend.text = quality_methods, horiz = TRUE, las = 1, cex.names = 0.85, args.legend = list(x = "topleft", bg = "white", cex = 0.8)) } else { plot(1:10) } ## ----eval=FALSE---------------------------------------------------------- # embed_methods <- dimRedMethodList() # quality_methods <- c("Q_local", "Q_global", "AUC_lnK_R_NX", # "cophenetic_correlation") # scurve <- loadDataSet("3D S Curve", n = 2000) # quality_results <- matrix( # NA, length(embed_methods), length(quality_methods), # dimnames = list(embed_methods, quality_methods) # ) # # embedded_data <- list() # for (e in embed_methods) { # embedded_data[[e]] <- embed(scurve, e) # for (q in quality_methods) # try(quality_results[e, q] <- quality(embedded_data[[e]], q)) # } dimRed/tests/0000755000176200001440000000000013464323464012631 5ustar liggesusersdimRed/tests/testthat.R0000644000176200001440000000012513463767504014620 0ustar liggesuserslibrary(testthat) library(dimRed) test_check("dimRed", reporter = LocationReporter) dimRed/tests/testthat/0000755000176200001440000000000013464507337014474 5ustar liggesusersdimRed/tests/testthat/test_drr.R0000644000176200001440000000052313371631672016442 0ustar liggesusers context("DRR") test_that("drr forward and backward passes", { spiral <- loadDataSet("Helix", n = 200) drr_spiral <- embed(spiral, "DRR", ndim = 3, .mute = c("message", "output")) expect_equal(3, getNDim(drr_spiral)) dsa <- drr_spiral@apply(spiral) dsi <- drr_spiral@inverse(dsa) expect_equal(dsi, spiral) }) dimRed/tests/testthat/test_dimRedData.R0000644000176200001440000000260113371631672017650 0ustar liggesusers context("the dimRedData class") test_that("constructor", { expect_equal(dimRedData(), new("dimRedData", data = matrix(numeric(0), nrow = 0, ncol = 0), meta = data.frame())) expect_error(dimRedData(iris)) expect_s4_class(dimRedData(iris[, 1:4], iris[, 5]), "dimRedData") expect_s4_class(dimRedData(iris[, 1:4]), "dimRedData") expect_error(dimRedData(iris)) }) test_that("conversion functions", { expect_equal(as(iris[, 1:4], "dimRedData"), dimRedData(iris[, 1:4])) expect_error(as(iris, "dimRedData")) expect_equal(as(loadDataSet("Iris"), "data.frame"), as.data.frame(loadDataSet("Iris"))) expect_equivalent(as.dimRedData( Species ~ Sepal.Length + Sepal.Width + Petal.Length + Petal.Width, iris), loadDataSet("Iris") ) }) test_that("misc functions", { Iris <- loadDataSet("Iris") expect_equal(getData(Iris), Iris@data) expect_equal(getMeta(Iris), Iris@meta) ## No idea why this one is broken with --run-donttest --run-dontrun --timings ## Also broken for devtools::test("dimRed") expect_equal(nrow(Iris), 150) expect_equal(Iris[1:4], Iris[1:4, ]) expect_equal(Iris[1:4], Iris[c(rep(TRUE, 4), rep(FALSE, 146))]) expect_equal(Iris[1:4], Iris[c(rep(TRUE, 4), rep(FALSE, 146)), ]) }) dimRed/tests/testthat/test_dimRedResult.R0000644000176200001440000000106613424014250020243 0ustar liggesusers context("dimRedResult-class") test_that("predict/inverse methods", { dat <- loadDataSet("Iris") emb <- embed(dat, "PCA", ndim = 4) pred <- predict(emb, dat) inv <- inverse(emb, pred) expect_equal(getDimRedData(emb), pred) expect_equal(dat, inv) emb2 <- embed(dat, "tSNE") expect_error(predict(emb2, dat)) expect_error(inverse(emb2, dat)) }) test_that("conversion", { iris_data_frame_as <- as(embed(loadDataSet("Iris"), "PCA"), "data.frame") expect_equal(colnames(iris_data_frame_as), c("meta.Species", "PC1", "PC2", colnames(iris)[-5])) }) dimRed/tests/testthat/test_autoencoder.R0000644000176200001440000002210013424035176020152 0ustar liggesusers context("AutoEncoder") skip_if_no_tensorflow <- function() { if (!reticulate::py_module_available("tensorflow") && Sys.getenv("BNET_FORCE_AUTOENCODER_TESTS") != "1") skip("TensorFlow not available for testing") } skip_if_no_keras <- function() { if (!keras::is_keras_available() && Sys.getenv("BNET_FORCE_AUTOENCODER_TESTS") != "1") skip("Keras not available for testing") } test_that("Check if tensorflow is installed correctly.", { skip_if_no_tensorflow() library(tensorflow) # I have not found a way to suppress the warning tf gives on first use. sess <- tf$Session() hello <- "Hello, TensorFlow!" tf_hello <- tf$constant(hello) tf_hello_res <- sess$run(tf_hello) # in python 3 this returns a `bytes` object $decode() transforms it into a # sting, in python 2 this is a simple string if(!is.character(tf_hello_res)) tf_hello_res <- tf_hello_res$decode() expect(tf_hello_res == hello, paste("tensorflow does not work:\n", "hello =", hello, "\n", "sess$run(tf_hello) =", tf_hello_res)) }) test_that("Check errors when building autoencoder.", { skip_if_no_tensorflow() iris_data <- as(iris[, 1:4], "dimRedData") expect_error(embed(iris_data, "AutoEncoder", activation = "sigmoid"), "declare an activation function for each layer") expect_error(embed(iris_data, "AutoEncoder", n_hidden = c(1, 2, 2, 1)), "the number of layers must be impair") expect_error(embed(iris_data, "AutoEncoder", weight_decay = -1), "weight decay must be > 0") expect_error(embed(iris_data, "AutoEncoder", learning_rate = -1), "learning rate must be > 0") expect_error(embed(iris_data, "AutoEncoder", n_steps = -1), "n_steps must be > 0") expect_error(embed(iris_data, "AutoEncoder", n_hidden = c(4, 2, 4), ndim = 3), "the middle of n_hidden must be equal to ndim") }) test_that("using autoencoder with parameters", { skip_if_no_tensorflow() iris_data <- as(iris[, 1:4], "dimRedData") expect_equal(class(iris_data)[1], "dimRedData") ae <- lapply(1:2, function(x) embed(iris_data, "AutoEncoder", n_hidden = c(10, x, 10), ndim = x, n_steps = 100)) aq <- lapply(ae, function(x) quality(x, "reconstruction_rmse")) lapply(ae, function(x) expect_s4_class(x, "dimRedResult")) ## expect(aq[[1]] > aq[[2]], "the error should decrease with more dimensions") ## expect(aq[[2]] > aq[[3]], "the error should decrease with more dimensions") ## expect(aq[[3]] > aq[[4]], "the error should decrease with more dimensions") lapply(1:length(ae), function(x) expect_equal(x, getNDim(ae[[x]]))) ae <- lapply(1:2, function(x) embed(iris_data, "AutoEncoder", n_hidden = c(10, x, 10), ndim = x, weight_decay = 0.1, n_steps = 100)) aq <- lapply(ae, function(x) quality(x, "reconstruction_rmse")) lapply(ae, function(x) expect_s4_class(x, "dimRedResult")) ## expect(aq[[1]] > aq[[2]], "the error should decrease with more dimensions") ## expect(aq[[2]] > aq[[3]], "the error should decrease with more dimensions") ## expect(aq[[3]] > aq[[4]], "the error should decrease with more dimensions") lapply(1:length(ae), function(x) expect_equal(x, getNDim(ae[[x]]))) ae <- lapply(1:2, function(x) embed(iris_data, "AutoEncoder", n_hidden = c(10, x, 10), ndim = x, learning_rate = 0.1, weight_decay = 0.1, n_steps = 100)) aq <- lapply(ae, function(x) quality(x, "reconstruction_rmse")) lapply(ae, function(x) expect_s4_class(x, "dimRedResult")) ## expect(aq[[1]] > aq[[2]], "the error should decrease with more dimensions") ## expect(aq[[2]] > aq[[3]], "the error should decrease with more dimensions") ## expect(aq[[3]] > aq[[4]], "the error should decrease with more dimensions") lapply(1:length(ae), function(x) expect_equal(x, getNDim(ae[[x]]))) ae <- lapply(1:2, function(x) embed(iris_data, "AutoEncoder", n_hidden = c(10, x, 10), activation = c("sigmoid", "sigmoid", "sigmoid"), ndim = x, learning_rate = 0.1, weight_decay = 0.1, n_steps = 100)) aq <- lapply(ae, function(x) quality(x, "reconstruction_rmse")) lapply(ae, function(x) expect_s4_class(x, "dimRedResult")) aa <- lapply(c("tanh", "sigmoid", "relu", "elu"), function(x) embed(iris_data, "AutoEncoder", n_hidden = c(10, 2, 10), activation = c("sigmoid", "sigmoid", "sigmoid"), ndim = 2, learning_rate = 0.1, weight_decay = 0.1, n_steps = 100)) aaq <- lapply(aa, function(x) quality(x, "reconstruction_rmse")) lapply(aa, function(x) expect_s4_class(x, "dimRedResult")) ## expect(aq[[1]] > aq[[2]], "the error should decrease with more dimensions") ## expect(aq[[2]] > aq[[3]], "the error should decrease with more dimensions") ## expect(aq[[3]] > aq[[4]], "the error should decrease with more dimensions") lapply(1:length(ae), function(x) expect_equal(x, getNDim(ae[[x]]))) }) test_that("using autoencoder with autoencoder results", { skip_if_no_tensorflow() tensorflow::tf$set_random_seed(2) iris_data <- as(iris[, 1:4], "dimRedData") expect_equal(class(iris_data)[1], "dimRedData") ae1 <- lapply(1:2, function(x) embed(iris_data, "AutoEncoder", n_hidden = c(10, x, 10), ndim = x, n_steps = 1)) aq1 <- lapply(ae1, function(x) quality(x, "reconstruction_rmse")) ae2 <- lapply(ae1, function(x) embed(iris_data, "AutoEncoder", autoencoder = x, n_steps = 1000)) aq2 <- lapply(ae2, function(x) quality(x, "reconstruction_rmse")) lapply(ae1, function(x) expect_s4_class(x, "dimRedResult")) lapply(ae2, function(x) expect_s4_class(x, "dimRedResult")) expect(aq1[[1]] > aq2[[1]], "the error should decrease with more steps") expect(aq1[[2]] > aq2[[2]], "the error should decrease with more steps") ## expect(aq1[[3]] > aq2[[3]], "the error should decrease with more steps") ## expect(aq1[[4]] > aq2[[4]], "the error should decrease with more steps") lapply(1:length(ae1), function(x) expect_equal(x, getNDim(ae1[[x]]))) lapply(1:length(ae2), function(x) expect_equal(x, getNDim(ae2[[x]]))) }) test_that("using autoencoder with keras", { skip_if_no_tensorflow() skip_if_no_keras() encoder <- function(i) list(keras::layer_dense(units = 10, activation = "tanh"), keras::layer_dense(units = i)) decoder <- function() list(keras::layer_dense(units = 10, activation = "tanh"), keras::layer_dense(units = 4)) iris_data <- as(iris[, 1:4], "dimRedData") ae1 <- lapply(1:2, function(x) embed(iris_data, "AutoEncoder", keras_graph = list(encoder = encoder(x), decoder = decoder()), n_steps = 2)) aq1 <- lapply(ae1, function(x) quality(x, "reconstruction_rmse")) ae2 <- lapply(ae1, function(x) embed(iris_data, "AutoEncoder", autoencoder = x)) aq2 <- lapply(ae2, function(x) quality(x, "reconstruction_rmse")) lapply(ae1, function(x) expect_s4_class(x, "dimRedResult")) lapply(ae2, function(x) expect_s4_class(x, "dimRedResult")) ## expect(aq1[[1]] > aq2[[1]], "the error should decrease with more steps") ## expect(aq1[[2]] > aq2[[2]], "the error should decrease with more steps") ## expect(aq1[[3]] > aq2[[3]], "the error should decrease with more steps") ## expect(aq1[[4]] > aq2[[4]], "the error should decrease with more steps") lapply(1:length(ae1), function(x) expect_equal(x, getNDim(ae1[[x]]))) lapply(1:length(ae2), function(x) expect_equal(x, getNDim(ae2[[x]]))) }) ## test_that("garbage collection", { ## skip_if_no_tensorflow() ## tmp <- tf$get_session_handle(environment(ae[[1]]@apply)$dec) ## tmp <- tf$get_default_session() ## tmp$close ## tmp ## tf$get_session_handle() ## tf$Session() ## }) dimRed/tests/testthat/test_HLLE.R0000644000176200001440000000024713424014250016364 0ustar liggesuserscontext("HLLE") test_that("HLLE", { expect_error(embed(iris[1:4], "HLLE", ndim = 1, .mute = c("message", "output")), "ndim must be 2 or larger.") }) dimRed/tests/testthat/test_NNMF.R0000644000176200001440000001255613424020513016403 0ustar liggesusersskip_if_no_NMF <- function() { if (!requireNamespace("NMF", quietly = TRUE) && Sys.getenv("BNET_FORCE_NNMF_TESTS") != "1") skip("NMF not available for testing") } context("NNMF") ## if we don't load the library explicitly, the predict function does not work ## (sometimes...). ## library(NMF) ints_trn <- matrix(seq(0, 98, by = 2), ncol = 5) input_trn <- dimRedData(as.data.frame(ints_trn)) input_tst <- dimRedData(ints_trn[1:3,] + 1) test_that("2D projection", { skip_if_no_NMF() dim_2_defaults <- embed(input_trn, "NNMF", seed = 13, nrun = 1) expect_equal(dim_2_defaults@method, "NNMF") ## Expected results from ## tmp <- NMF::nmf(t(ints_trn), rank = 2, nrun = 1, seed = 13) ## coefs <- basis(tmp) ## rownames(coefs) <- paste0("V", 1:5) ## colnames(coefs) <- paste0("NNMF", 1:2) ## coefs ## dput(coefs) dim_2_coef <- structure( c(18.807241710186, 30.2191667888959, 32.1069052462692, 9.53490906878683, 164.109205703974, 0.00064246562138093, 24.3924277525021, 56.4301459918642, 108.103923297376, 17.566220349863), .Dim = c(5L, 2L), .Dimnames = list(c("V1", "V2", "V3", "V4", "V5"), c("NNMF1", "NNMF2"))) expect_equivalent(dim_2_defaults@other.data$w, dim_2_coef) dim_2_apply <- dim_2_defaults@apply(input_tst)@data dim_2_pred <- predict(dim_2_defaults, input_tst)@data ## Expected results from ## t(solve(crossprod(basis(tmp)), t(input_tst@data %*% basis(tmp)))) ## preds <- getData(input_tst) %*% t(MASS::ginv(basis(tmp))) ## getData(getDimRedData(dim_2_defaults)) ## colnames(preds) <- paste0("NNMF", 1:2) ## dput(preds) dim_2_exp <- structure( c(0.427476458116875, 0.440237021147746, 0.452997584178617, 0.512256378881175, 0.5332094651398, 0.554162551398426), .Dim = c(3L, 2L), .Dimnames = list(NULL, c("NNMF1", "NNMF2")) ) expect_equivalent(dim_2_apply, dim_2_exp, tolerance = 0.01) expect_equivalent(dim_2_pred, dim_2_exp, tolerance = 0.01) }) test_that("other arguments", { skip_if_no_NMF() dim_3_args <- embed(input_trn, "NNMF", seed = 13, nrun = 10, ndim = 3, method = "KL", options = list(.pbackend = NULL)) ## Expected results from ## tmp <- NMF::nmf(t(ints_trn), rank = 3, nrun = 10, seed = 13, ## method = "KL", .pbackend = NULL) ## coefs <- t(NMF::coef(tmp)) ## colnames(coefs) <- paste0("NNMF", 1:ncol(coefs)) ## coefs ## dput(coefs) ## rot <- NMF::basis(tmp) ## rownames(rot) <- paste0("V", 1:nrow(rot)) ## dput(rot) dim_3_rot <- structure( c(11.624951277152, 31.2554213278975, 50.8858913786408, 70.5163614293837, 90.1468314801264, 2.22044604925031e-16, 36.4357899711133, 72.8715799422292, 109.307369913346, 145.743159884462, 22.4019808842378, 42.1081005773292, 61.8142202704197, 81.52033996351, 101.2264596566), .Dim = c(5L, 3L), .Dimnames = list(c("V1", "V2", "V3", "V4", "V5"), NULL) ) dim_3_pred <- structure( c(2.22044604925031e-16, 0.0731742704517501, 0.194863499580201, 0.50224638618713, 0.557517908619563, 0.197219538171418, 0.0860784848917408, 0.159094934700865, 0.10366866301249, 0.216483929440989, 0.54891083782883, 0.481738298195276, 0.40204352636632, 0.274419226004639, 0.211867578024856, 0.256578985276104, 0.236980211423017, 0.16984840699324, 0.135869049278152, 0.0584647425861749, 2.22044604925031e-16, 0.0513058500137363, 0.0774360678481537, 0.00720517673339281, 0.0678012129377125, 0.344046917890136, 0.49099862480747, 0.542386371921862, 0.660426277478513, 0.691161417731563), .Dim = c(10L, 3L), .Dimnames = list(NULL, c("NNMF1", "NNMF2", "NNMF3")) ) expect_equivalent(dim_3_args@other.data$w, dim_3_rot) expect_equivalent(getData(getDimRedData(dim_3_args)), dim_3_pred) dim_3_apply <- dim_3_args@apply(input_tst)@data dim_3_pred <- predict(dim_3_args, input_tst)@data ## Expected results from ## crossprod(basis(tmp)) does not have full rank!!! This needs to be considered ## w <- getOtherData(dim_3_args)$w ## preds <- t(solve(crossprod(w), t(input_trn@data %*% w))) ## preds <- t(qr.solve(crossprod(w), t(input_trn@data %*% w))) ## preds <- getData(input_tst) %*% t(MASS::ginv(w)) ## preds ## dput(preds) ## getData(getDimRedData(dim_3_args)) ## preds - getData(getDimRedData(dim_3_args)) ## input_trn@data ## input_tst@data %*% basis(tmp) ## colnames(preds) <- paste0("NNMF", 1:3) dim_3_exp <- structure( c(0.118730450278164, 0.144080695556738, 0.169430940835312, 0.494122495652466, 0.439293850852014, 0.384465206051563, -0.0169733070286198, 0.0591496323928872, 0.135272571814394), .Dim = c(3L, 3L) ) expect_equivalent(dim_3_apply, dim_3_exp, tolerance = 0.01) expect_equivalent(dim_3_pred, dim_3_exp, tolerance = 0.01) }) test_that("Bad args", { skip_if_no_NMF() expect_error(embed(iris, "NNMF")) expect_error(embed(iris[, 1], "NNMF"), "`ndim` should be less than the number of columns") expect_error(embed(iris[1:4], "NNMF", method = c("a", "b")), "only supply one `method`") expect_error(embed(scale(iris[1:4]), "NNMF"), "negative entries") }) test_that("Full_rank", { dim_2_full_rank_example <- embed(input_trn, "NNMF", ndim = ncol(input_trn@data)) dim_2_recon <- inverse(dim_2_full_rank_example, dim_2_full_rank_example@data@data) expect_equivalent(dim_2_recon, input_trn) }) dimRed/tests/testthat/test_umap.R0000644000176200001440000000162013424030326016600 0ustar liggesusers context("UMAP") skip_if_no_umap_learn <- function() { if (!reticulate::py_module_available("umap") && Sys.getenv("BNET_FORCE_UMAP_TESTS") != 1) skip("umap-learn not available, install with `pip install umap-learn`") } test_that("UMAP python", { skip_if_no_umap_learn() res <- embed(iris[1:4], "UMAP", .mute = c("message", "output")) expect_s4_class(res, "dimRedResult") expect_equal(res@method, "UMAP") expect_equal(res@pars$d, "euclidean") expect_equal(res@pars$knn, 15) expect_equal(res@pars$method, "umap-learn") expect_equal(res@pars$ndim, 2) }) test_that("UMAP R", { res <- embed(iris[1:4], "UMAP", method = "naive", .mute = c("message", "output")) expect_s4_class(res, "dimRedResult") expect_equal(res@method, "UMAP") expect_equal(res@pars$d, "euclidean") expect_equal(res@pars$knn, 15) expect_equal(res@pars$method, "naive") expect_equal(res@pars$ndim, 2) }) dimRed/tests/testthat/test_all.R0000644000176200001440000000305313373523703016421 0ustar liggesuserscontext("high level functions") test_that("high level functions working?", { embed_methods <- dimRedMethodList() quality_methods <- dimRedQualityList() scurve <- loadDataSet("3D S Curve", n = 300) for(i in 1:ncol(scurve@data)){ scurve@data[, i] <- scurve@data[, i] - min(scurve@data[, i]) } quality_results <- matrix(NA, length(embed_methods), length(quality_methods), dimnames = list(embed_methods, quality_methods)) embedded_data <- list() for (e in embed_methods) { message("embedding: ", e) if ((e != "AutoEncoder" || reticulate::py_module_available("tensorflow")) && (e != "UMAP" || reticulate::py_module_available("umap-learn")) && (e != "PCA_L1" || ("pcaL1" %in% rownames(installed.packages()))) ) { suppressWarnings( embedded_data[[e]] <- embed( scurve, e, .mute = c("message", "output"))) for (q in quality_methods) { message(" quality: ", q) quality_results[e, q] <- tryCatch( suppressWarnings(quality(embedded_data[[e]], q, .mute = c("message", "output"))), error = function (e) NA ) } } } lapply(embedded_data, function(x) expect_equal(2, getNDim(x))) expect(inherits(quality_results, "matrix"), "should be matrix") expect(storage.mode(quality_results) == "double", 'storage should be "double"') }) dimRed/tests/testthat/test_misc.R0000644000176200001440000000176013371631672016612 0ustar liggesuserscontext("misc functions") a <- matrix(rnorm(25), 5, 5) b <- matrix(rnorm(25), 5, 5) test_that("squared euclidean distance", { expect_equivalent( t(as.matrix(dist(rbind(a, b)))[6:10, 1:5] ^ 2), pdist2(a, b) ) }) test_that("formula functions", { expect_equal(rhs(a + b ~ c + d), ~ c + d + 0) expect_equal(lhs(a + b ~ c + d), ~ a + b + 0) }) test_that("makeEpsGraph", { check_makeEpsGraph <- function(x, eps){ naive <- as.matrix(dist(x)) naive[naive >= eps] <- 0 epsSp <- as.matrix(makeEpsSparseMatrix(x, eps)) all(naive == epsSp) } expect_true(check_makeEpsGraph(iris[1:4], 1000)) expect_true(check_makeEpsGraph(iris[1:4], 1)) expect_true(check_makeEpsGraph(iris[1:4], 0.5)) }) test_that("getRotationMatrixFail", { irisData <- as(iris[, 1:4], "dimRedData") expect_equal(class(irisData)[1], "dimRedData") irisRes <- embed(irisData, "tSNE") expect_error(getRotationMatrix(irisRes), "Not implemented for") }) dimRed/tests/testthat/test_isomap.R0000644000176200001440000000210413371631672017140 0ustar liggesusers context("isomap") ## no isomap specific tests, because forward method is not really ## exact. test_that("check vs vegan isomap", { eps <- 1e-8 a <- loadDataSet("3D S Curve", n = 200) vegiso <- vegan::isomap(dist(getData(a)), k = 8, ndim = 2) vegy <- vegan::scores(vegiso) drdiso <- embed(a, "Isomap", knn = 8, ndim = 2) drdy <- drdiso@data@data ## Randomly fails: ## expect_equivalent(drdy, vegy) err1 <- max(abs(drdy - vegy)) drdy[, 2] <- -drdy[, 2] err2 <- max(abs(drdy - vegy)) drdy[, 1] <- -drdy[, 1] err3 <- max(abs(drdy - vegy)) drdy[, 2] <- -drdy[, 2] err4 <- max(abs(drdy - vegy)) err <- min(err1, err2, err3, err4) expect_true(err < eps, info = paste0("err = ", err, ", eps = ", eps, ", expected err < eps")) }) test_that("check other.data", { a <- loadDataSet("3D S Curve", n = 200) drdiso <- embed(a, "Isomap", knn = 8, ndim = 2, get_geod = TRUE) expect_true(inherits(getOtherData(drdiso)$geod, "dist")) }) dimRed/tests/testthat/test_dataSets.R0000644000176200001440000000030413024272076017412 0ustar liggesuserscontext("dataSets") test_that("datasets load", { for (d in dataSetList()) { ds <- loadDataSet(d) expect(inherits(ds, "dimRedData"), "must be of class 'dimRedData'") } }) dimRed/tests/testthat/test_PCA.R0000644000176200001440000000504513142050030016235 0ustar liggesusers context("PCA") test_that("general data conversions", { irisData <- as(iris[, 1:4], "dimRedData") expect_equal(class(irisData)[1], "dimRedData") irisParsCS <- list(center = TRUE, scale. = TRUE) irisParsC <- list(center = TRUE, scale. = FALSE) irisParsS <- list(center = FALSE, scale. = TRUE) irisPars <- list(center = FALSE, scale. = FALSE) irisResCS <- do.call(function(...) embed(irisData, "PCA", ...), irisParsCS) irisResS <- do.call(function(...) embed(irisData, "PCA", ...), irisParsS) irisResC <- do.call(function(...) embed(irisData, "PCA", ...), irisParsC) irisRes <- do.call(function(...) embed(irisData, "PCA", ...), irisPars) expect_equal(2, getNDim(irisResCS)) expect_equal(2, getNDim(irisResS)) expect_equal(2, getNDim(irisResC)) expect_equal(2, getNDim(irisRes)) expect_equal(class(irisResCS)[1], "dimRedResult") expect_equal(class(irisResS)[1], "dimRedResult") expect_equal(class(irisResC)[1], "dimRedResult") expect_equal(class(irisRes)[1], "dimRedResult") expect_equal(irisResCS@apply(irisData), irisResCS@data) expect_equal(irisResS@apply(irisData), irisResS@data) expect_equal(irisResC@apply(irisData), irisResC@data) expect_equal(irisRes@apply(irisData), irisRes@data) expect(sqrt(mean( (irisResCS@inverse(irisResCS@data)@data - irisData@data) ^ 2 )) < 0.3, "error too large" ) expect(sqrt(mean( (irisResS@inverse(irisResS@data)@data - irisData@data) ^ 2 )) < 0.3, "error too large" ) expect(sqrt(mean( (irisResC@inverse(irisResC@data)@data - irisData@data) ^ 2 )) < 0.3, "error too large" ) expect(sqrt(mean( (irisRes@inverse(irisRes@data)@data - irisData@data) ^ 2 )) < 0.3, "error too large" ) scale2 <- function(x, center, scale.) scale(x, center, scale.) expect_equal( do.call(function(...) scale2(iris[1:4], ...) %*% getRotationMatrix(irisResCS), irisParsCS), getData( getDimRedData(irisResCS) ) ) expect_equal( do.call(function(...) scale2(iris[1:4], ...) %*% getRotationMatrix(irisResS), irisParsS), getData( getDimRedData(irisResS) ) ) expect_equal( do.call(function(...) scale2(iris[1:4], ...) %*% getRotationMatrix(irisResC), irisParsC), getData( getDimRedData(irisResC) ) ) expect_equal( do.call(function(...) scale2(iris[1:4], ...) %*% getRotationMatrix(irisRes), irisPars), getData( getDimRedData(irisRes) ) ) }) dimRed/tests/testthat/test_quality.R0000644000176200001440000000471613463776045017361 0ustar liggesusers context("quality") test_that("quality", { irisData <- loadDataSet("Iris") parsPCA <- list(center = TRUE, scale. = TRUE) resPCA <- do.call(function(...) embed(irisData, "PCA", ...), parsPCA) suppressWarnings( resQual <- list( Q_local(resPCA), Q_global(resPCA), mean_R_NX(resPCA), total_correlation(resPCA), cophenetic_correlation(resPCA), distance_correlation(resPCA), reconstruction_rmse(resPCA) ) ) lapply(resQual, function(x) expect_true(is.numeric(x))) }) test_that("Q_local ndim", { irisData <- loadDataSet("Iris") irisData <- irisData[!duplicated(irisData@data)] parsPCA <- list(center = TRUE, scale. = TRUE, ndim = 4) resPCA <- do.call(function(...) embed(irisData, "PCA", ...), parsPCA) tmp <- sapply(1:4, function(x) quality(resPCA, "Q_local", ndim = x)) expect_equal(rank(tmp), 1:4) }) test_that("rmse_by_ndim", { set.seed(1) ir <- loadDataSet("Iris") ir.drr <- embed(ir, "DRR", .mute = c("message", "output"), ndim = ndims(ir)) ir.pca <- embed(ir, "PCA", ndim = ndims(ir)) rmse_res <- data.frame( drr = reconstruction_error(ir.drr), pca = reconstruction_error(ir.pca) ) for (i in 1:length(rmse_res$pca)) { expect_true(rmse_res$pca[i] - rmse_res$drr[i] + 1e-12 > 0, info = paste0( "ndim = ", i, ", rmse pca = ", rmse_res$pca[i], ", rmse drr = ", rmse_res$drr[i] )) } # expect_true(all((rmse_res$pca - rmse_res$drr) + 1e-12 > 0)) expect_error(reconstruction_error(ir.pca, 5)) expect_error(reconstruction_error(ir.pca, 0)) }) test_that("AUC_lnK_R_NX", { irisData <- loadDataSet("Iris") irisData <- irisData[!duplicated(irisData@data)] parsPCA <- list(center = TRUE, scale. = TRUE, ndim = 4) resPCA <- do.call(function(...) embed(irisData, "PCA", ...), parsPCA) expect(length(AUC_lnK_R_NX(resPCA, weight = "inv")) == 1) expect(length(AUC_lnK_R_NX(resPCA, weight = "log")) == 1) expect(length(AUC_lnK_R_NX(resPCA, weight = "ln")) == 1) expect(length(AUC_lnK_R_NX(resPCA, weight = "log10")) == 1) expect_true(AUC_lnK_R_NX(resPCA, weight = "log") == AUC_lnK_R_NX(resPCA, weight = "ln")) expect_error(AUC_lnK_R_NX(resPCA, weight = "asdf")) }) dimRed/tests/testthat/test_dimRedMethod-class.R0000644000176200001440000000071613424014250021311 0ustar liggesuserscontext("dimRedMethod-class") test_that("pars matching", { for (m in dimRedMethodList()) { mo <- getMethodObject(m) expect( all.equal( mo@stdpars, matchPars(mo, list()) ), paste("par matching for", m, "failed") ) } expect_warning( embed(iris[1:4], "PCA", asdf = 1234), "Parameter matching: asdf is not a standard parameter, ignoring." ) }) dimRed/tests/testthat/test_kPCA.R0000644000176200001440000000545513371631672016442 0ustar liggesusers data(iris) context("kPCA") test_that("general data conversions", { irisData <- loadDataSet("Iris") expect_equal(class(irisData)[1], "dimRedData") irisPars <- list() irisPars[[length(irisPars) + 1]] <- list(kernel = "rbfdot", kpar = list(sigma = 0.1)) irisPars[[length(irisPars) + 1]] <- list(kernel = "rbfdot", kpar = list(sigma = 1)) irisPars[[length(irisPars) + 1]] <- list(kernel = "polydot", kpar = list(degree = 3)) irisPars[[length(irisPars) + 1]] <- list(kernel = "vanilladot", kpar = list()) irisPars[[length(irisPars) + 1]] <- list(kernel = "laplacedot", kpar = list(sigma = 1)) irisPars[[length(irisPars) + 1]] <- list(kernel = "laplacedot", kpar = list(sigma = 0.1)) irisPars[[length(irisPars) + 1]] <- list(kernel = "besseldot", kpar = list(sigma = 0.1, order = 1, degree = 1)) irisPars[[length(irisPars) + 1]] <- list(kernel = "besseldot", kpar = list(sigma = 1, order = 2, degree = 3)) irisPars[[length(irisPars) + 1]] <- list(kernel = "splinedot", kpar = list()) irisRes <- lapply(irisPars, function(x) do.call( function(...) tryCatch(embed(.data = irisData, .method = "kPCA", ...), error = function(e) as.character(e)), x ) ) for (i in 1:length(irisRes)) { if (inherits(irisRes[[i]], "character")){ expect(grepl("singular", irisRes[[i]]), "singular") } else { expect(inherits(irisRes[[i]], "dimRedResult"), 'should be of class "dimRedResult"') } } ## This test fails with multithreaded blas ## for (i in 1:length(irisRes)){ ## if (inherits(irisRes[[i]], "dimRedResult")){ ## expect_equal(irisRes[[i]]@apply(irisData)@data[, 1:2], ## irisRes[[i]]@data@data) ## expect_equal(2, getNDim(irisRes[[i]])) ## ## the reverse is an approximate: ## expect_less_than( ## max( ## irisRes[[i]]@inverse(irisRes[[i]]@data)@data - irisData@data ## ), 300, ## ## paste0("inverse of kpca is an approximate, ", ## ## "so this may fail due to numerical inaccuracy") ## ) ## } ## } ## This one cannot calculate an inverse: kpca.fit <- embed(loadDataSet("3D S", n = 200), "kPCA", kernel = "splinedot", kpar = list()) expect( is.na(kpca.fit@inverse(1)), "The inverse should return NA" ) }) dimRed/tests/testthat/test_diffmap.R0000644000176200001440000000053713424014250017250 0ustar liggesuserscontext("DiffusionMaps") test_that("DiffusionMaps", { expect_s4_class(embed(iris[1:4], "DiffusionMaps", ndim = 1, .mute = c("message", "output")), "dimRedResult") x <- embed(iris[1:4], "DiffusionMaps", ndim = 1, .mute = c("message", "output")) expect_equal(dim(x@data@data), c(150, 1)) }) dimRed/tests/testthat/test_embed.R0000644000176200001440000000017213371631672016727 0ustar liggesusers context("embed") test_that("standard method is PCA", { res <- embed(iris[1:4]) expect_equal(res@method, "PCA") }) dimRed/tests/testthat/test_PCA_L1.R0000644000176200001440000000645013464304712016612 0ustar liggesusers context("PCA L1") test_that("general data conversions", { skip_if_not_installed("pcaL1") irisData <- as(iris[, 1:4], "dimRedData") expect_equal(class(irisData)[1], "dimRedData") irisParsCS <- list(center = TRUE, .mute = c("message", "output"), ndim = 4, scale. = TRUE, projections = "l1", fun = "l1pca") irisParsC <- list(center = TRUE, .mute = c("message", "output"), ndim = 4, scale. = FALSE, projections = "l1", fun = "l1pca") irisParsS <- list(center = TRUE, .mute = c("message", "output"), ndim = 4, scale. = TRUE, projections = "l1", fun = "l1pcahp") irisPars <- list(center = FALSE, .mute = c("message", "output"), ndim = 4, scale. = FALSE, projections = "l1", fun = "l1pcastar") irisResCS <- do.call(function(...) embed(irisData, "PCA_L1", ...), irisParsCS) irisResS <- do.call(function(...) embed(irisData, "PCA_L1", ...), irisParsS) irisResC <- do.call(function(...) embed(irisData, "PCA_L1", ...), irisParsC) irisRes <- do.call(function(...) embed(irisData, "PCA_L1", ...), irisPars) expect_equal(4, getNDim(irisResCS)) expect_equal(4, getNDim(irisResS)) expect_equal(4, getNDim(irisResC)) expect_equal(4, getNDim(irisRes)) expect_equal(class(irisResCS)[1], "dimRedResult") expect_equal(class(irisResS)[1], "dimRedResult") expect_equal(class(irisResC)[1], "dimRedResult") expect_equal(class(irisRes)[1], "dimRedResult") expect_equal(irisResCS@apply(irisData), irisResCS@data) expect_equal(irisResS@apply(irisData), irisResS@data) expect_equal(irisResC@apply(irisData), irisResC@data) expect_equal(irisRes@apply(irisData), irisRes@data) expect(sqrt(mean( (irisResCS@inverse(irisResCS@data)@data - irisData@data) ^ 2 )) < 0.3, "error too large" ) expect(sqrt(mean( (irisResS@inverse(irisResS@data)@data - irisData@data) ^ 2 )) < 0.3, "error too large" ) expect(sqrt(mean( (irisResC@inverse(irisResC@data)@data - irisData@data) ^ 2 )) < 0.3, "error too large" ) expect(sqrt(mean( (irisRes@inverse(irisRes@data)@data - irisData@data) ^ 2 )) < 0.3, "error too large" ) scale2 <- function(x, center, scale.) scale(x, center, scale.) expect_equal( do.call(function(...) scale2(iris[1:4], ...) %*% getRotationMatrix(irisResCS), irisParsCS[c("center", "scale.")]), getData( getDimRedData(irisResCS) ), tolerance = 1e-2 ) expect_equal( do.call(function(...) scale2(iris[1:4], ...) %*% getRotationMatrix(irisResS), irisParsS[c("center", "scale.")]), getData( getDimRedData(irisResS) ), tolerance = 1e-2 ) expect_equal( do.call(function(...) scale2(iris[1:4], ...) %*% getRotationMatrix(irisResC), irisParsC[c("center", "scale.")]), getData( getDimRedData(irisResC) ), tolerance = 1e-2 ) expect_equal( do.call(function(...) scale2(iris[1:4], ...) %*% getRotationMatrix(irisRes), irisPars[c("center", "scale.")]), getData( getDimRedData(irisRes) ), tolerance = 1e-2 ) expect_s4_class({ embed(iris[1:4], "PCA_L1", ndim = 1, .mute = c("message", "output")) }, "dimRedResult") }) dimRed/tests/testthat/test_ICA.R0000644000176200001440000000121113142050040016216 0ustar liggesusers context("FastICA") test_that("general data conversions", { irisData <- as(iris[, 1:4], "dimRedData") expect_equal(class(irisData)[1], "dimRedData") irisRes <- embed(irisData, "FastICA") expect_equal(class(irisRes)[1], "dimRedResult") expect_equal(2, getNDim(irisRes)) expect_equal(irisRes@apply(irisData), irisRes@data) expect(sqrt(mean( (irisRes@inverse(irisRes@data)@data - irisData@data) ^ 2 )) < 0.3, "error too large" ) expect_equal( scale(iris[1:4], TRUE, FALSE) %*% getRotationMatrix(irisRes), unname(as.matrix(getData( getDimRedData(irisRes) )) ) ) }) dimRed/NAMESPACE0000644000176200001440000000457413464323325012714 0ustar liggesusers# Generated by roxygen2: do not edit by hand export(AUC_lnK_R_NX) export(AutoEncoder) export(DRR) export(DiffusionMaps) export(DrL) export(FastICA) export(FruchtermanReingold) export(HLLE) export(Isomap) export(KamadaKawai) export(LCMC) export(LLE) export(LaplacianEigenmaps) export(MDS) export(NNMF) export(PCA) export(PCA_L1) export(Q_NX) export(Q_global) export(Q_local) export(R_NX) export(UMAP) export(dataSetList) export(dimRedData) export(dimRedMethodList) export(dimRedQualityList) export(dimRedResult) export(distance_correlation) export(embed) export(getRotationMatrix) export(installSuggests) export(inverse) export(kPCA) export(loadDataSet) export(mean_R_NX) export(mixColor1Ramps) export(mixColor2Ramps) export(mixColor3Ramps) export(mixColorRamps) export(nMDS) export(plot) export(plot_R_NX) export(predict) export(quality) export(reconstruction_error) export(reconstruction_rmse) export(tSNE) export(total_correlation) exportClasses(AutoEncoder) exportClasses(DRR) exportClasses(DiffusionMaps) exportClasses(DrL) exportClasses(FastICA) exportClasses(FruchtermanReingold) exportClasses(HLLE) exportClasses(Isomap) exportClasses(KamadaKawai) exportClasses(LLE) exportClasses(LaplacianEigenmaps) exportClasses(MDS) exportClasses(NNMF) exportClasses(PCA) exportClasses(PCA_L1) exportClasses(UMAP) exportClasses(dimRedData) exportClasses(dimRedMethod) exportClasses(dimRedResult) exportClasses(kPCA) exportClasses(nMDS) exportClasses(tSNE) exportMethods("[") exportMethods(AUC_lnK_R_NX) exportMethods(LCMC) exportMethods(Q_NX) exportMethods(Q_global) exportMethods(Q_local) exportMethods(R_NX) exportMethods(as.data.frame) exportMethods(as.dimRedData) exportMethods(cophenetic_correlation) exportMethods(distance_correlation) exportMethods(embed) exportMethods(getData) exportMethods(getDimRedData) exportMethods(getMeta) exportMethods(getNDim) exportMethods(getOrgData) exportMethods(getOtherData) exportMethods(getPars) exportMethods(inverse) exportMethods(maximize_correlation) exportMethods(mean_R_NX) exportMethods(ndims) exportMethods(nrow) exportMethods(plot) exportMethods(predict) exportMethods(print) exportMethods(quality) exportMethods(reconstruction_error) exportMethods(reconstruction_rmse) exportMethods(total_correlation) import(DRR) import(methods) import(utils) importFrom(grDevices,colorRamp) importFrom(grDevices,rgb) importFrom(graphics,plot) importFrom(magrittr,"%>%") importFrom(stats,predict) dimRed/NEWS.md0000644000176200001440000000143113424014250012546 0ustar liggesusers# dimRed 0.2.1 and 0.2.2 * Bugfix releases to pass CRAN tests # dimRed 0.2.0 * Added the R-Journal [paper](https://journal.r-project.org/archive/2018/RJ-2018-039/index.html "dimRed and coRanking") as Vignette * Added UMAP * Added NMF (thanks @topepo) * Added the possibility to return other data such as distance matrices/eigenvalues * Added Autoencoder * Added l1 PCA * Added `getNDim` * Added an `ndim` parameter to many quality functions. * fixed bug in kPCA if inverse was not computable. * added autoencoder # dimRed 0.1.0 * Fixed kPCA predict function and documentation typos (@topepo #2) * Added predict and inverse functions * Added a function to extract rotation matrices from PCA and FastICA # dimRed 0.0.3 * First version on CRAN dimRed/R/0000755000176200001440000000000013464323464011670 5ustar liggesusersdimRed/R/dataSets.R0000644000176200001440000001432013033377101013550 0ustar liggesusers#' Example Data Sets for dimensionality reduction #' #' A compilation of standard data sets that are often being used to #' showcase dimensionality reduction techniques. #' #' The argument \code{name} should be one of #' \code{dataSetList()}. Partial matching is possible, see #' \code{\link{match.arg}}. Generated data sets contain the internal #' coordinates of the manifold in the \code{meta} slot. Call #' \code{dataSetList()} to see what data sets are available. #' #' #' #' @param name A character vector that specifies the name of the data #' set. #' @param n In generated data sets the number of points to be #' generated, else ignored. #' @param sigma In generated data sets the standard deviation of the #' noise added, else ignored. #' @return \code{loadDataSet} an object of class #' \code{\link{dimRedData}}. \code{dataSetList()} return a #' character string with the implemented data sets #' #' @examples #' ## a list of available data sets: #' dataSetList() #' #' ## Load a data set: #' swissRoll <- loadDataSet("Swiss Roll") #' \donttest{plot(swissRoll, type = "3vars")} #' #' ## Load Iris data set, partial matching: #' loadDataSet("I") #' #' @name dataSets NULL #' @include dimRedData-class.R #' @rdname dataSets #' @export loadDataSet <- function (name = dataSetList(), n = 2000, sigma = 0.05) { name <- match.arg(name) switch( name, "Swiss Roll" = swissRoll(n, sigma), "Broken Swiss Roll" = brokenSwissRoll(n, sigma), "Helix" = helix(n, sigma), "Twin Peaks" = twinPeaks(n, sigma), "Sphere" = sphere(n, sigma), "FishBowl" = fishbowl(n, sigma), "Ball" = ball(n, sigma), "3D S Curve" = sCurve(n, sigma), "variable Noise Helix" = noisyHelix(n, sigma), "Cube" = cube(n, sigma), "Iris" = irisdata() ) } #' @rdname dataSets #' @export dataSetList <- function () { return(c( "Swiss Roll", "Broken Swiss Roll", "Helix", "Twin Peaks", "Sphere", "Ball", "FishBowl", "3D S Curve", "variable Noise Helix", "Iris", "Cube" )) } irisdata <- function() { dd <- as.matrix(datasets::iris[, 1:4]) new("dimRedData", data = dd, meta = datasets::iris[, 5, drop = FALSE]) } swissRoll <- function (n = 2000, sigma = 0.05) { x <- stats::runif(n, 1.5 * pi, 4.5 * pi) y <- stats::runif(n, 0, 30) new("dimRedData", data = swissRollMapping(x, y) + stats::rnorm(3 * n, sd = sigma), meta = data.frame(x = x, y = y)) } brokenSwissRoll <- function (n = 2000, sigma = 0.05) { x <- c( stats::runif(floor(n / 2), 1.5 * pi, 2.7 * pi), stats::runif(ceiling(n / 2), 3.3 * pi, 4.5 * pi) ) y <- stats::runif(n, 0, 30) new("dimRedData", data = swissRollMapping(x, y) + stats::rnorm(3 * n, sd = sigma), meta = data.frame(x = x, y = y)) } swissRollMapping <- function (x, y) { cbind(x = x * cos(x), y = y, z = x * sin(x)) } helix <- function (n = 2000, sigma = 0.05) { t <- stats::runif(n, 0, 2 * pi) new("dimRedData", data = helixMapping(t) + stats::rnorm(3 * n, sd = sigma), meta = data.frame(t = t)) } helixMapping <- function (x) { cbind(x = (2 + cos(8 * x)) * cos(x), y = (2 + cos(8 * x)) * sin(x), z = (sin(8 * x))) } twinPeaks <- function (n = 2000, sigma = 0.05) { x <- stats::runif(n, -1, 1) y <- stats::runif(n, -1, 1) new("dimRedData", data = twinPeaksMapping(x, y) + stats::rnorm(3 * n, sd = sigma), meta = data.frame(x = x, y = y)) } twinPeaksMapping <- function (x, y) { cbind(x = x, y = y, z = sin(pi * x) * tanh(3 * y)) } sphere <- function (n = 2000, sigma = 0.05) { phi <- stats::runif(n, 0, 2 * pi) psi <- acos(stats::runif(n, -1, 1)) new("dimRedData", data = sphereMapping(phi, psi) + stats::rnorm(3 * n, sd = sigma), meta = data.frame(phi = phi, psi = psi)) } fishbowl <- function (n = 2000, sigma = 0.05) { phi <- stats::runif(n, 0, 2 * pi) psi <- acos(stats::runif(n, -1, 0.8)) new("dimRedData", data = sphereMapping(phi, psi) + stats::rnorm(3 * n, sd = sigma), meta = data.frame(psi = psi)) } sphereMapping <- function (phi, psi) { cbind(x = cos(phi) * sin(psi), y = sin(phi) * sin(psi), z = cos(psi)) } ball <- function (n = 2000, sigma = 0.05) { phi <- stats::runif(n, 0, 2 * pi) psi <- acos(stats::runif(n, -1, 1)) ## make it uniformly distributed inside the sphere r <- stats::runif(n) ^ (1 / 3) new("dimRedData", data = ballMapping(phi, psi, r) + stats::rnorm(3 * n, sd = sigma), meta = data.frame(phi = phi, psi = psi, r = r)) } ballMapping <- function (phi, psi, r) { cbind(x = r * cos(phi) * sin(psi), y = r * sin(phi) * sin(psi), z = r * cos(psi)) } sCurve <- function (n = 2000, sigma = 0.05) { t <- stats::runif(n, -1.5 * pi, 1.5 * pi) y <- stats::runif(n, 0, 2) new("dimRedData", data = sCurveMapping(t, y) + stats::rnorm(3 * n, sd = sigma), meta = data.frame(x = t, y = y)) } sCurveMapping <- function (t, y) { cbind(x = sin(t), y = y, z = sign(t) * (cos(t) - 1)) } noisyHelix <- function (n = 2000, sigma = 0.05) { t <- stats::runif(n, 0, 4 * pi) min_noise <- 0.1 max_noise <- 1.4 new("dimRedData", data = noisyHelixMapping(t, min_noise, max_noise) + stats::rnorm(3 * n, sd = sigma), meta = data.frame(t = t)) } noisyHelixMapping <- function(t, min_noise, max_noise) { make_noise <- function (t){ stats::rnorm(length(t), sd = t * max_noise / max(t) + min_noise) } cbind(x = 3 * cos(t) + make_noise(t), y = 3 * sin(t) + make_noise(t), z = 2 * t + make_noise(t)) } cube <- function(n = 2000, sigma = 0.05){ tmp <- cbind(x = stats::runif(n) + stats::rnorm(n, sd = sigma), y = stats::runif(n) + stats::rnorm(n, sd = sigma), z = stats::runif(n) + stats::rnorm(n, sd = sigma)) new("dimRedData", data = tmp, meta = tmp) } dimRed/R/get_info.R0000644000176200001440000000205713142050314013571 0ustar liggesusers #' getRotationMatrix #' #' Extract the rotation matrix from \code{\link{dimRedResult}} objects derived from PCA and FastICA #' #' The data has to be pre-processed the same way as the method does, e.g. #' centering and/or scaling. #' #' @param x of type \code{\link{dimRedResult}} #' @return a matrix #' #' @examples #' dat <- loadDataSet("Iris") #' #' pca <- embed(dat, "PCA") #' ica <- embed(dat, "FastICA") #' #' rot_pca <- getRotationMatrix(pca) #' rot_ica <- getRotationMatrix(ica) #' #' scale(getData(dat), TRUE, FALSE) %*% rot_pca - getData(getDimRedData(pca)) #' scale(getData(dat), TRUE, FALSE) %*% rot_ica - getData(getDimRedData(ica)) #' #' @family convenience functions #' @export getRotationMatrix <- function(x) { if(!inherits(x, "dimRedResult")) stop("x must be of type 'dimRedResult'") if(x@method == "PCA") return(environment(x@apply)$rot) if(x@method == "PCA_L1") return(environment(x@apply)$rot) if(x@method == "FastICA") return(environment(x@apply)$res$K %*% environment(x@apply)$res$W) stop(paste("Not implemented for", x@method)) } dimRed/R/nnmf.R0000644000176200001440000001244313424016572012750 0ustar liggesusers#' Non-Negative Matrix Factorization #' #' S4 Class implementing NNMF. #' #' NNMF is a method for decomposing a matrix into a smaller #' dimension such that the constraint that the data (and the #' projection) are not negative is taken into account. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' The method can take the following parameters: #' \describe{ #' \item{ndim}{The number of output dimensions.} #' \item{method}{character, which algorithm should be used. See #' \code{\link[NMF]{nmf}} for possible values. Defaults to #' "brunet"} #' \item{nrun}{integer, the number of times the computations are #' conducted. See \code{\link[NMF]{nmf}}} #' \item{seed}{integer, a value to control the random numbers used.} #' \item{options}{named list, other options to pass to \code{\link[NMF]{nmf}}} #' } #' #' @section Implementation: #' #' Wraps around \code{\link[NMF]{nmf}}. Note that the estimation uses random #' numbers. To create reproducible results, set the random number seed in the #' function call. Also, in many cases, the computations will be conducted #' in parallel using multiple cores. To disable this, use the option #' \code{.pbackend = NULL}. #' #' @references #' #' Lee, D.D., Seung, H.S., 1999. Learning the parts of objects by non-negative #' matrix factorization. Nature 401, 788-791. https://doi.org/10.1038/44565 #' #' @examples #' dat <- loadDataSet("Iris") #' #' set.seed(4646) #' factorization <- embed(dat, "NNMF") #' #' proj_dat <- factorization@apply(dat) #' #' plot(proj_dat@data[, 1], proj_dat@data[, 2]) #' #' # project new values: #' #' nn_proj <- predict(factorization, iris[1:7, 1:4]) #' nn_proj #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export NNMF #' @exportClass NNMF NNMF <- setClass( "NNMF", contains = "dimRedMethod", prototype = list( stdpars = list(ndim = 2L, method = "brunet", nrun = 1, seed = sample.int(10^5, 1), options = list()), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("NMF") chckpkg("MASS") ## TODO: remove this, depends on https://github.com/renozao/NMF/issues/114 ## require("NMF") meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL data <- data@data if (!is.matrix(data)) data <- as.matrix(data) # NMF expects variables in rows and samples in columns data <- t(data) if (pars$ndim > nrow(data)) stop("`ndim` should be less than the number of columns.", call. = FALSE) if (length(pars$method) != 1) stop("only supply one `method`", call. = FALSE) args <- list(x = quote(data), rank = pars$ndim, method = pars$method, nrun = pars$nrun, seed = pars$seed) if (length(pars$options) > 0) args <- c(args, pars$options) nmf_result <- do.call(NMF::nmf, args) # this should work but doesn't # call <- c(list(quote(NMF::nmf)), args) w <- NMF::basis(nmf_result) h <- t(NMF::coef(nmf_result)) colnames(w) <- paste0("NNMF", 1:ncol(w)) other.data <- list(w = w) colnames(h) <- paste0("NNMF", 1:ncol(h)) # evaluate results here for functions appl <- function (x) { appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() dat <- if (inherits(x, "dimRedData")) x@data else x if (!is.matrix(dat)) dat <- as.matrix(dat) if (ncol(dat) != nrow(w)) stop("x must have the same number of columns ", "as the original data (", nrow(w), ")", call. = FALSE) res <- dat %*% t(MASS::ginv(w)) colnames(res) <- paste0("NNMF", 1:ncol(res)) scores <- new("dimRedData", data = res, meta = appl.meta) return(scores) } inv <- function (x) { appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() proj <- if (inherits(x, "dimRedData")) x@data else x if (ncol(proj) > ncol(w)) stop("x must have less or equal number of dimensions ", "as the original data") res <- tcrossprod(proj, w) colnames(res) <- colnames(data) res <- new("dimRedData", data = res, meta = appl.meta) return(res) } ## inv <- function(x) { ## appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() ## proj <- if (inherits(x, "dimRedData")) x@data else x ## if (ncol(proj) > ncol(data)) ## stop("x must have less or equal number of dimensions ", ## "as the original data") ## reproj <- proj %*% other.data$H ## reproj <- new("dimRedData", data = reproj, meta = appl.meta) ## return(reproj) ## } res <- new( "dimRedResult", data = new("dimRedData", data = h, meta = meta), org.data = orgdata, apply = appl, inverse = inv, has.org.data = keep.org.data, has.apply = TRUE, has.inverse = TRUE, method = "NNMF", pars = pars, other.data = other.data ) return(res) }) ) dimRed/R/pca.R0000644000176200001440000001027613371631672012564 0ustar liggesusers#' Principal Component Analysis #' #' S4 Class implementing PCA. #' #' PCA transforms the data in orthogonal components so that the first #' axis accounts for the larges variance in the data, all the #' following axes account for the highest variance under the #' constraint that they are orthogonal to the preceding axes. PCA is #' sensitive to the scaling of the variables. PCA is by far the #' fastest and simples method of dimensionality reduction and should #' probably always be applied as a baseline if other methods are tested. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' PCA can take the following parameters: #' \describe{ #' \item{ndim}{The number of output dimensions.} #' \item{center}{logical, should the data be centered, defaults to \code{TRUE}.} #' \item{scale.}{logical, should the data be scaled, defaults to \code{FALSE}.} #' } #' #' @section Implementation: #' #' Wraps around \code{\link{prcomp}}. Because PCA can be reduced to a #' simple rotation, forward and backward projection functions are #' supplied. #' #' @references #' #' Pearson, K., 1901. On lines and planes of closest fit to systems of points in #' space. Philosophical Magazine 2, 559-572. #' #' @examples #' dat <- loadDataSet("Iris") #' #' ## using the S4 Class #' pca <- PCA() #' emb <- pca@fun(dat, pca@stdpars) #' #' ## using embed() #' emb2 <- embed(dat, "PCA") #' #' plot(emb, type = "2vars") #' plot(emb@inverse(emb@data), type = "3vars") #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export PCA #' @exportClass PCA PCA <- setClass( "PCA", contains = "dimRedMethod", prototype = list( stdpars = list(ndim = 2, center = TRUE, scale. = FALSE), fun = function (data, pars, keep.org.data = TRUE) { ndim <- pars$ndim pars$ndim <- NULL meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL data <- data@data res <- do.call( prcomp, c(list(x = data), pars) ) # evaluate results here for functions data <- res$x[, seq_len(ndim), drop = FALSE] ce <- res$center sc <- res$scale rot <- res$rotation[, seq_len(ndim)] rerot <- t(rot) appl <- function(x) { appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() proj <- if (inherits(x, "dimRedData")) x@data else x if (ncol(proj) != ncol(orgdata)) stop("x must have the same number of dimensions ", "as the original data") if (ce[1] != FALSE) proj <- t(apply(proj, 1, function(x) x - ce)) if (sc[1] != FALSE) proj <- t(apply(proj, 1, function(x) x / sc)) proj <- proj %*% rot proj <- new("dimRedData", data = proj, meta = appl.meta) return(proj) } inv <- function(x) { appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() proj <- if (inherits(x, "dimRedData")) x@data else x if (ncol(proj) > ncol(data)) stop("x must have less or equal number of dimensions ", "as the original data") d <- ncol(proj) reproj <- proj %*% rerot[seq_len(d), ] if (sc[1] != FALSE) reproj <- t(apply(reproj, 1, function(x) x * sc)) if (ce[1] != FALSE) reproj <- t(apply(reproj, 1, function(x) x + ce)) reproj <- new("dimRedData", data = reproj, meta = appl.meta) return(reproj) } res <- new( "dimRedResult", data = new("dimRedData", data = data, meta = meta), org.data = orgdata, apply = appl, inverse = inv, has.org.data = keep.org.data, has.apply = TRUE, has.inverse = TRUE, method = "PCA", pars = pars ) return(res) }) ) dimRed/R/dimRedData-class.R0000644000176200001440000001373013424014250015102 0ustar liggesusers#' @include misc.R NULL #' Class "dimRedData" #' #' A class to hold data for dimensionality reduction and methods. #' #' The class hast two slots, \code{data} and \code{meta}. The #' \code{data} slot contains a \code{numeric matrix} with variables in #' columns and observations in rows. The \code{meta} slot may contain #' a \code{data.frame} with additional information. Both slots need to #' have the same number of rows or the \code{meta} slot needs to #' contain an empty \code{data.frame}. #' #' See examples for easy conversion from and to \code{data.frame}. #' #' For plotting functions see \code{\link{plot.dimRedData}}. #' #' @slot data of class \code{matrix}, holds the data, observations in #' rows, variables in columns #' @slot meta of class \code{data.frame}, holds meta data such as #' classes, internal manifold coordinates, or simply additional #' data of the data set. Must have the same number of rows as the #' \code{data} slot or be an empty data frame. #' #' #' @examples #' ## Load an example data set: #' s3d <- loadDataSet("3D S Curve") #' #' ## Create using a constructor: #' #' ### without meta information: #' dimRedData(iris[, 1:4]) #' #' ### with meta information: #' dimRedData(iris[, 1:4], iris[, 5]) #' #' ### using slot names: #' dimRedData(data = iris[, 1:4], meta = iris[, 5]) #' #' ## Convert to a dimRedData objects: #' Iris <- as(iris[, 1:4], "dimRedData") #' #' ## Convert to data.frame: #' head(as(s3d, "data.frame")) #' head(as.data.frame(s3d)) #' head(as.data.frame(as(iris[, 1:4], "dimRedData"))) #' #' ## Extract slots: #' head(getData(s3d)) #' head(getMeta(s3d)) #' #' ## Get the number of observations: #' nrow(s3d) #' #' ## Subset: #' s3d[1:5, ] #' #' @family dimRedData #' @import methods #' @export dimRedData #' @exportClass dimRedData dimRedData <- setClass( "dimRedData", slots = c(data = "matrix", meta = "data.frame"), prototype = prototype(data = matrix(numeric(0), 0, 0), meta = data.frame()), validity = function (object) { retval <- NULL if (!is.matrix(object@data)) { retval <- c( retval, c("data must be a matrix with ", "observations in rows and dimensions in columns") ) } if (!is.numeric(object@data)) { retval <- c( retval, c("data must be numeric") ) } if ((nrow(object@meta) != 0) && (nrow(object@meta) != nrow(object@data))){ retval <- c( retval, c("data and meta must have the same numbers of rows") ) } return(if (is.null(retval)) TRUE else retval) } ) setMethod("initialize", signature = c("dimRedData"), function (.Object, data = matrix(numeric(0), 0, 0), meta = data.frame()) { data <- as.matrix(data) meta <- as.data.frame(meta) .Object <- callNextMethod() return(.Object) }) setAs(from = "ANY", to = "dimRedData", def = function(from) new("dimRedData", data = as.matrix(from))) setAs(from = "dimRedData", to = "data.frame", def = function(from) as.data.frame(from)) #' @param meta.prefix Prefix for the columns of the meta data names. #' @param data.prefix Prefix for the columns of the variable names. #' #' @family dimRedData #' @describeIn dimRedData convert to data.frame #' @export setMethod(f = "as.data.frame", signature = c("dimRedData"), definition = function(x, meta.prefix = "meta.", data.prefix = "") { tmp <- list() if (nrow(x@meta) > 0){ tmp$meta <- as.data.frame(x@meta, stringsAsFactors = FALSE) names(tmp$meta) <- paste0(meta.prefix, colnames(x@meta)) } tmp$data <- as.data.frame(x@data, stringsAsFactors = FALSE) names(tmp$data) <- paste0(data.prefix, colnames(x@data)) names(tmp) <- NULL data.frame(tmp, stringsAsFactors = FALSE) }) #' @param data Will be coerced into a \code{\link{data.frame}} with #' \code{\link{as.data.frame}} #' #' @examples #' ## create a dimRedData object using a formula #' as.dimRedData(Species ~ Sepal.Length + Sepal.Width + Petal.Length + Petal.Width, #' iris)[1:5] #' #' @include misc.R #' @family dimRedData #' @describeIn as.dimRedData Convert a \code{data.frame} to a dimRedData #' object using a formula #' @export setMethod(f = "as.dimRedData", signature = c("formula"), definition = function(formula, data) { data <- as.data.frame(data) meta <- stats::model.frame(lhs(formula), data) data <- stats::model.matrix(rhs(formula), data) return(new("dimRedData", data = data, meta = meta)) }) #' @param object Of class dimRedData. #' @describeIn dimRedData Get the data slot. #' @export setMethod("getData", "dimRedData", function(object) object@data) #' @describeIn dimRedData Get the meta slot. #' @export setMethod("getMeta", "dimRedData", function(object) object@meta) #' @param x Of class dimRedData #' @describeIn dimRedData Get the number of observations. #' @export setMethod("nrow", "dimRedData", function(x) nrow(x@data)) #' @param i a valid index for subsetting rows. #' @examples #' ## Shuffle data: #' s3 <- s3d[nrow(s3d)] #' #' @describeIn dimRedData Subset rows. #' @export setMethod("[", signature(x = "dimRedData", i = "ANY"), function(x, i) { x@data <- x@data[i, , drop = FALSE] if (nrow(x@meta) != 0) x@meta <- x@meta[i, , drop = FALSE] # validObject returns a string with the description of what is wrong or # TRUE, so the following lines have to be as they are! vv <- validObject(x) if (vv == TRUE) return(x) else stop("cannot subset dimRedData object: \n", paste(vv, collapse = "\n")) }) #' @describeIn dimRedData Extract the number of Variables from the data. #' #' @examples #' ## Get the number of variables: #' ndims(s3d) #' #' @export setMethod("ndims", "dimRedData", function(object) ncol(object@data)) dimRed/R/hlle.R0000644000176200001440000001047513424014250012730 0ustar liggesusers#' Hessian Locally Linear Embedding #' #' An S4 Class implementing Hessian Locally Linear Embedding (HLLE) #' #' HLLE uses local hessians to approximate the curvines and is an #' extension to non-convex subsets in lowdimensional space. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' HLLE can take the following parameters: #' \describe{ #' \item{knn}{neighborhood size} #' \item{ndim}{number of output dimensions} #' } #' #' @section Implementation: #' Own implementation, sticks to the algorithm in Donoho and Grimes #' (2003). Makes use of sparsity to speed up final embedding. #' #' @references #' Donoho, D.L., Grimes, C., 2003. Hessian eigenmaps: Locally linear #' embedding techniques for high-dimensional data. PNAS 100, #' 5591-5596. doi:10.1073/pnas.1031596100 #' #' @examples #' dat <- loadDataSet("3D S Curve", n = 300) #' #' ## directy use the S4 class: #' hlle <- HLLE() #' emb <- hlle@fun(dat, hlle@stdpars) #' #' ## using embed(): #' emb2 <- embed(dat, "HLLE", knn = 45) #' #' plot(emb, type = "2vars") #' plot(emb2, type = "2vars") #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export HLLE #' @exportClass HLLE HLLE <- setClass( "HLLE", contains = "dimRedMethod", prototype = list( stdpars = list(knn = 50, ndim = 2), fun = function(data, pars, keep.org.data = TRUE) { chckpkg("RSpectra") chckpkg("Matrix") chckpkg("RANN") if (pars$ndim < 2) stop("ndim must be 2 or larger.") if (is.null(pars$knn)) pars$knn <- 50 if (is.null(pars$ndim)) pars$ndim <- 2 indata <- data@data n <- nrow(indata) hs <- pars$ndim * (pars$ndim + 1) / 2 W <- Matrix::sparseMatrix(i = numeric(0), j = numeric(0), x = numeric(0), dims = c(n, hs * n)) ii <- jj <- ww <- list() ## Identify neighbors: message(Sys.time(), ": Finding nearest neighbors", sep = "") nnidx <- RANN::nn2(data = indata, query = indata, k = pars$knn + 1, treetype = "kd", "standard", eps = 0)$nn.idx#[, -1] message(Sys.time(), ": Calculating Hessian", sep = "") for (i in seq_len(n)) { cat(i, "/", n, "\r", sep = "") ## get neighborhood Nui <- indata[nnidx[i, ], , drop = FALSE] ## Form tangent coordinates: Nui <- sweep(Nui, 2, colMeans(Nui), "-") tc <- svd(Nui, nu = pars$ndim, nv = 0)$u ## Develop Hessian Estimator Xi <- cbind( 1, tc, tc ^ 2, apply(combn(seq_len(pars$ndim), 2), 2, function(x) tc[, x[1]] * tc[, x[2]]) ) tHi <- qr.Q(qr(Xi))[, -(1:(pars$ndim + 1)), drop = FALSE] ## Add quadratic form to hessian ii[[i]] <- rep(nnidx[i, ], hs) jj[[i]] <- rep((i - 1) * hs + (1:hs), each = ncol(nnidx)) ww[[i]] <- as.vector(tHi) } H <- as(Matrix::tcrossprod(Matrix::spMatrix( i = unlist(ii, FALSE, FALSE), j = unlist(jj, FALSE, FALSE), x = unlist(ww, FALSE, FALSE), nrow = n, ncol = n * hs) ), "dgCMatrix") ## Find null space: message(Sys.time(), ": Embedding", sep = "") ## eigs and eigs_sym converges much more reliably and faster ## with sigma = -eps than with which = "L*" outdata <- RSpectra::eigs_sym(H, k = pars$ndim + 1, sigma = -1e-5) message(paste(c("Eigenvalues:", format(outdata$values)), collapse = " ")) outdata <- outdata$vectors[, order(outdata$values)[-1], drop = FALSE] colnames(outdata) <- paste0("HLLE", seq_len(ncol(outdata))) message(Sys.time(), ": DONE", sep = "") return(new( "dimRedResult", data = new("dimRedData", data = outdata, meta = data@meta), org.data = if (keep.org.data) data@data else NULL, has.org.data = keep.org.data, method = "HLLE", pars = pars )) }) ) dimRed/R/dimRed.R0000644000176200001440000000232413371631672013220 0ustar liggesusers#' @title The dimRed package #' #' @description This package simplifies dimensionality reduction in R by #' providing a framework of S4 classes and methods. dimRed collects #' dimensionality reduction methods that are implemented in R and implements #' others. It gives them a common interface and provides plotting #' functions for visualization and functions for quality assessment. #' #' Funding provided by the Department for Biogeochemical Integration, #' Empirical Inference of the Earth System Group, at the Max Plack #' Institute for Biogeochemistry, Jena. #' #' @references #' #' Lee, J.A., Renard, E., Bernard, G., Dupont, P., Verleysen, M., #' 2013. Type 1 and 2 mixtures of Kullback-Leibler divergences as cost #' functions in dimensionality reduction based on similarity #' preservation. Neurocomputing. 112, #' 92-107. doi:10.1016/j.neucom.2012.12.036 #' #' Lee, J.A., Lee, J.A., Verleysen, M., 2008. Rank-based quality #' assessment of nonlinear dimensionality reduction. Proceedings of #' ESANN 2008 49-54. #' #' Chen, L., Buja, A., 2006. Local Multidimensional Scaling for #' Nonlinear Dimension Reduction, Graph Layout and Proximity Analysis. #' #' #' @import methods #' @importFrom magrittr %>% #' "_PACKAGE" dimRed/R/loe.R0000644000176200001440000000310713024273620012561 0ustar liggesusers ## this function produces segfaults and is super slow ## #' Local Ordinal Embedding ## #' ## #' Instance of \code{\link{dimRedMethod}} for Local Ordinal Embedding. ## #' ## #' For details see \code{\link[loe]{LOE}} ## #' ## #' @examples ## #' # for whatever reason the loe package has problems if I run this ## #' # with R CMD check, running it in the REPL works just fine ## #' dat <- loadDataSet("Iris")[sample(20)] ## #' loe <- LOE() ## #' emb <- loe@fun(dat, loe@stdpars) ## #' ## #' ## #' plot(emb@data@data) ## #' ## #' @include dimRedResult-class.R ## #' @include dimRedMethod-class.R ## #' @export ## LOE <- setClass( ## "LOE", ## contains = "dimRedMethod", ## prototype = list( ## stdpars = list(d = stats::dist, knn = 50, ndim = 2), ## fun = function (data, pars, ## keep.org.data = TRUE) { ## chckpkg("loe") ## meta <- data@meta ## orgdata <- if (keep.org.data) data@data else NULL ## indata <- data@data ## data.adj <- loe:::make.kNNG(as.matrix(pars$d(indata)), k = pars$knn) ## outdata <- loe::LOE(data.adj, p = pars$ndim, method = "MM")$X ## colnames(outdata) <- paste0("LOE", 1:ncol(outdata)) ## return(new( ## "dimRedResult", ## data = new("dimRedData", ## data = outdata, ## meta = meta), ## org.data = orgdata, ## has.org.data = keep.org.data, ## method = "loe", ## pars = pars ## )) ## }) ## ) dimRed/R/umap.R0000644000176200001440000000726713447172040012762 0ustar liggesusers#' Umap embedding #' #' An S4 Class implementing the UMAP algorithm #' #' Uniform Manifold Approximation is a gradient descend based algorithm that #' gives results similar to t-SNE, but scales better with the number of points. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' #' UMAP can take the follwing parameters: #' \describe{ #' \item{ndim}{The number of embedding dimensions.} #' \item{knn}{The number of neighbors to be used.} #' \item{d}{The distance metric to use.} #' \item{method}{\code{"naive"} for an R implementation, \code{"python"} #' for the reference implementation.} #' } #' #' Other method parameters can also be passed, see #' \code{\link[umap]{umap.defaults}} for details. The ones above have been #' standardized for the use with \code{dimRed} and will get automatically #' translated for \code{\link[umap]{umap}}. #' #' @section Implementation: #' #' The dimRed package wraps the \code{\link[umap]{umap}} packages which provides #' an implementation in pure R and also a wrapper around the original python #' package \code{umap-learn} (https://github.com/lmcinnes/umap/) #' #' The \code{"naive"} implementation is a pure R implementation and considered #' experimental at the point of writing this, it is also much slower than the #' python implementation. #' #' The \code{"python"} implementation is the reference implementation used by #' McInees et. al. (2018). It requires the \code{\link[reticulate]{reticulate}} #' package for the interaction with python and the python package #' \code{umap-learn} installed (use \code{pip install umap-learn}). #' #' @references #' #' McInnes, Leland, and John Healy. #' "UMAP: Uniform Manifold Approximation and Projection for Dimension Reduction." #' https://arxiv.org/abs/1802.03426 #' #' @examples #' \dontrun{ #' dat <- loadDataSet("3D S Curve", n = 300) #' #' ## use the S4 Class directly: #' umap <- UMAP() #' emb <- umap@fun(dat, umap@stdpars) #' #' ## or simpler, use embed(): #' emb2 <- embed(dat, "UMAP", .mute = NULL, knn = 10) #' plot(emb2, type = "2vars") #' } #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export UMAP #' @exportClass UMAP UMAP <- setClass( "UMAP", contains = "dimRedMethod", prototype = list( stdpars = list( knn = 15, ndim = 2, d = "euclidean", method = "umap-learn" ), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("umap") if (pars$method == "python") { chckpkg("reticulate") if (!reticulate::py_module_available("umap")) stop("cannot find python umap, install with `pip install umap-learn`") } meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data ## Create config umap_pars <- umap::umap.defaults umap_pars$n.neighbors <- pars$knn umap_pars$n.components <- pars$ndim umap_pars$metric.function <- pars$d umap_pars$method <- pars$method umap_pars$d <- indata pars_2 <- pars pars_2$knn <- NULL pars_2$ndim <- NULL pars_2$d <- NULL pars_2$method <- NULL for (n in names(pars_2)) umap_pars[[n]] <- pars_2[[n]] ## Do the embedding outdata <- do.call(umap::umap, umap_pars) ## Post processing colnames(outdata$layout) <- paste0("UMAP", 1:ncol(outdata$layout)) return(new( "dimRedResult", data = new("dimRedData", data = outdata$layout, meta = meta), org.data = orgdata, has.org.data = keep.org.data, method = "UMAP", pars = pars )) } ) ) dimRed/R/autoencoder.R0000644000176200001440000003570313371631672014333 0ustar liggesusers#' AutoEncoder #' #' An S4 Class implementing an Autoencoder #' #' Autoencoders are neural networks that try to reproduce their input. Consider #' this method unstable, as the internals may still be changed. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' Autoencoder can take the following parameters: #' \describe{ #' \item{ndim}{The number of dimensions for reduction.} #' \item{n_hidden}{The number of neurons in the hidden #' layers, the length specifies the number of layers, #' the length must be impair, the middle number must #' be the same as ndim.} #' \item{activation}{The activation functions for the layers, #' one of "tanh", "sigmoid", "relu", "elu", everything #' else will silently be ignored and there will be no #' activation function for the layer.} #' \item{weight_decay}{the coefficient for weight decay, #' set to 0 if no weight decay desired.} #' \item{learning_rate}{The learning rate for gradient descend} #' \item{graph}{Optional: A list of bits and pieces that define the #' autoencoder in tensorflow, see details.} #' \item{keras_graph}{Optional: A list of keras layers that define #' the encoder and decoder, specifying this, will ignore all #' other topology related variables, see details.} #' \item{batchsize}{If NA, all data will be used for training, #' else only a random subset of size batchsize will be used} #' \item{n_steps}{the number of training steps.} #' } #' #' @section Details: #' There are several ways to specify an autoencoder, the simplest is to pass the #' number of neurons per layer in \code{n_hidden}, this must be a vector of #' integers of impair length and it must be symmetric and the middle number must #' be equal to \code{ndim}, For every layer an activation function can be #' specified with \code{activation}. #' #' For regularization weight decay can be specified by setting #' \code{weight_decay} > 0. #' #' Currently only a gradient descent optimizer is used, the learning rate can be #' specified by setting \code{learning_rate}. #' The learner can operate on batches if \code{batchsize} is not \code{NA}. #' The number of steps the learner uses is specified using \code{n_steps}. #' #' @section Further training a model: #' If the model did not converge in the first training phase or training with #' different data is desired, the \code{\link{dimRedResult}} object may be #' passed as \code{autoencoder} parameter; In this case all topology related #' parameters will be ignored. #' #' @section Using Keras layers: #' The encoder and decoder part can be specified using a list of \pkg{keras} #' layers. This requires a list with two entries, \code{encoder} should contain #' a LIST of keras layers WITHOUT the \code{\link[keras]{layer_input}} #' that will be concatenated in order to form the encoder part. #' \code{decoder} should be #' defined accordingly, the output of \code{decoder} must have the same number #' of dimensions as the input data. #' #' @section Using Tensorflow: #' The model can be entirely defined in \pkg{tensorflow}, it must contain a #' list with the following entries: #' \describe{ #' \item{encoder}{A tensor that defines the encoder.} #' \item{decoder}{A tensor that defines the decoder.} #' \item{network}{A tensor that defines the reconstruction (encoder + decoder).} #' \item{loss}{A tensor that calculates the loss (network + loss function).} #' \item{in_data}{A \code{placeholder} that points to the data input of #' the network AND the encoder.} #' \item{in_decoder}{A \code{placeholder} that points to the input of #' the decoder.} #' \item{session}{A \pkg{tensorflow} \code{Session} object that holds #' the values of the tensors.} #' } #' #' @section Implementation: #' Uses \pkg{tensorflow} as a backend, for details an #' problems relating tensorflow, see \url{https://tensorflow.rstudio.com}. #' #' @examples #' \dontrun{ #' dat <- loadDataSet("3D S Curve") #' #' ## use the S4 Class directly: #' autoenc <- AutoEncoder() #' emb <- autoenc@fun(dat, autoenc@stdpars) #' #' ## simpler, use embed(): #' emb2 <- embed(dat, "AutoEncoder") #' #' plot(emb, type = "2vars") #' #' samp <- sample(floor(nrow(dat) / 10)) #' embsamp <- autoenc@fun(dat[samp], autoenc@stdpars) #' embother <- embsamp@apply(dat[-samp]) #' plot(embsamp, type = "2vars") #' points(embother@data) #' } #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export AutoEncoder #' @exportClass AutoEncoder AutoEncoder <- setClass( "AutoEncoder", contains = "dimRedMethod", prototype = list( stdpars = list(ndim = 2, n_hidden = c(10, 2, 10), activation = c("tanh", "lin", "tanh"), weight_decay = 0.001, learning_rate = 0.15, graph = NULL, keras_graph = NULL, ## is.na() of an S4 class gives a warning autoencoder = NULL, batchsize = NA, n_steps = 500), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("tensorflow") meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data graph <- if (!is.null(pars$graph)) { message("using predefined graph, ", "ignoring other parameters that define topology, ", "be sure to set ndim to the correct value ", "else you might run into trouble.") pars$graph } else if (!is.null(pars$autoencoder)) { message("using predefined autoencoder object, ", " ignoring other parameters that define topology.") if (!(inherits(pars$autoencoder, "dimRedResult") && pars$autoencoder@method == "AutoEncoder")) stop("autoencoder must be NULL, ", "or of type dimRedResult by an AutoEncoder object.") ## setting topology related parameters from autoencoder pars$ndim <- pars$autoencoder@pars$ndim pars$n_hidden <- pars$autoencoder@pars$n_hidden pars$activation <- pars$autoencoder@pars$activation pars$autoencoder@pars$graph } else if (!is.null(pars$keras_graph)) { message("using predefined keras graph, ", "ignoring parameters that define topology") tmp <- graph_keras(encoder = pars$keras_graph$encoder, decoder = pars$keras_graph$decoder, n_in = ncol(indata)) pars$ndim <- tmp$encoder$shape$dims[[2]]$value tmp } else { with(pars, { graph_params( d_in = ncol(indata), n_hidden = n_hidden, activation = activation, weight_decay = weight_decay, learning_rate = learning_rate, n_steps = n_steps, ndim = ndim ) }) } if (!"encoder" %in% names(graph)) stop("no encoder in graph") if (!"decoder" %in% names(graph)) stop("no decoder in graph") if (!"network" %in% names(graph)) stop("no network in graph") if (!"loss" %in% names(graph)) stop("no loss in graph") if (!"in_decoder" %in% names(graph)) stop("no in_decoder in graph") if (!"in_data" %in% names(graph)) stop("no in_data in graph") if (!"session" %in% names(graph)) stop("no session in graph") ## TODO: I am not sure if there is a way to do this directly on the list ## objects graph_data_input <- graph$in_data graph_decoder_input <- graph$in_dec sess <- graph$session optimizer <- tensorflow::tf$train$GradientDescentOptimizer(pars$learning_rate) train <- optimizer$minimize(graph$loss) ## TODO: do proper batching and hold out for (step in 1:pars$n_steps) { sess$run(train, feed_dict = tensorflow::dict( graph_data_input = if (is.na(pars$batchsize)) { indata } else { indata[ sample(seq_len(nrow(indata)), pars$batchsize), ] } ) ) } outdata <- sess$run(graph$encoder, feed_dict = tensorflow::dict(graph_data_input = indata)) appl <- function(x) { appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() proj <- if (inherits(x, "dimRedData")) x@data else x if (ncol(proj) != ncol(data@data)) stop("x must have the same number of dimensions ", "as the original data") res <- sess$run(graph$encoder, feed_dict = tensorflow::dict(graph_data_input = proj)) colnames(res) <- paste0("AE", seq_len(ncol(res))) new("dimRedData", data = res, meta = appl.meta) } inv <- function(x) { appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() proj <- if (inherits(x, "dimRedData")) x@data else x if (ncol(proj) != pars$ndim) stop("x must have the same number of dimensions ", "as ndim data") res <- sess$run( graph$decoder, feed_dict = tensorflow::dict( graph_decoder_input = proj )) colnames(res) <- colnames(indata) new("dimRedData", data = res, meta = appl.meta) } ## TODO: this is a hack and there should be an "official" way to save ## extra data in a dimRedResult object pars$graph <- graph colnames(outdata) <- paste0("AE", seq_len(ncol(outdata))) return(new( "dimRedResult", data = new("dimRedData", data = outdata, meta = meta), org.data = orgdata, apply = appl, inverse = inv, has.apply = TRUE, has.inverse = TRUE, has.org.data = keep.org.data, method = "AutoEncoder", pars = pars )) }) ) get_activation_function <- function(x) { switch( x, tanh = tensorflow::tf$tanh, sigmoid = tensorflow::tf$sigmoid, relu = tensorflow::tf$nn$relu, elu = tensorflow::tf$elu, I ) } ## no idea why these and variants do not work: ## chain_list <- function(x1, x2) Reduce(`%>%`, x2, init = x1) ## chain_list <- function(x) Reduce(`%>%`, x) chain_list <- function (x1, x2 = NULL) { if(is.null(x2)) { stopifnot(is.list(x1)) result <- x1[[1]] if(length(x1) > 1) for (i in 2:length(x1)) { result <- result %>% (x1[[i]]) } } else { stopifnot(is.list(x2)) result <- x1 for (i in 1:length(x2)) { result <- result %>% (x2[[i]]) } } return(result) } graph_keras <- function(encoder, decoder, n_in) { chckpkg("keras") chckpkg("tensorflow") inenc <- keras::layer_input(shape = n_in) enc <- inenc %>% chain_list(encoder) ndim <- enc$shape$dims[[2]]$value indec <- keras::layer_input(shape = ndim) dec <- indec %>% chain_list(decoder) encdec <- inenc %>% chain_list(encoder) %>% chain_list(decoder) ## TODO: check if this uses weight decay, probably not: loss <- tensorflow::tf$reduce_mean((encdec - inenc) ^ 2) sess <- keras::backend()$get_session() return(list( encoder = enc, decoder = dec, network = encdec, loss = loss, in_data = inenc, in_decoder = indec, session = sess )) } graph_params <- function ( d_in, n_hidden, activation, weight_decay, learning_rate, n_steps, ndim ) { if (length(n_hidden) %% 2 == 0) stop("the number of layers must be impair") if (ndim != n_hidden[ceiling(length(n_hidden) / 2)]) stop("the middle of n_hidden must be equal to ndim") if (length(n_hidden) != length(activation)) stop("declare an activation function for each layer:", "\nn_hidden: ", paste(n_hidden, collapse = " "), "\nactivation functions: ", paste(activation, collapse = " ")) if (weight_decay < 0) stop("weight decay must be > 0") if (learning_rate <= 0) stop("learning rate must be > 0") if (n_steps <= 0) stop("n_steps must be > 0") tf <- tensorflow::tf input <- tf$placeholder( "float", shape = tensorflow::shape(NULL, d_in), name = "input" ) indec <- tf$placeholder( "float", shape = tensorflow::shape(NULL, ndim), name = "nlpca" ) w <- lapply(seq_len(length(n_hidden) + 1), function(x) { n1 <- if (x == 1) d_in else n_hidden[x - 1] n2 <- if (x > length(n_hidden)) d_in else n_hidden[x] tf$Variable(tf$random_uniform(tensorflow::shape(n1, n2), 1.0, -1.0), name = paste0("w_", x)) }) b <- lapply(seq_len(length(n_hidden) + 1), function (x) { n <- if (x > length(n_hidden)) d_in else n_hidden[x] tf$Variable(tf$zeros(tensorflow::shape(n)), name = paste0("b_", x)) }) enc <- input for (i in 1:ceiling(length(n_hidden) / 2)) { sigma <- get_activation_function(activation[i]) enc <- sigma(tf$matmul(enc, w[[i]]) + b[[i]]) } dec <- indec for (i in (ceiling(length(n_hidden) / 2) + 1):(length(n_hidden) + 1)) { sigma <- get_activation_function(activation[i]) dec <- sigma(tf$matmul(dec, w[[i]]) + b[[i]]) } encdec <- enc for (i in (ceiling(length(n_hidden) / 2) + 1):(length(n_hidden) + 1)) { sigma <- get_activation_function(activation[i]) encdec <- sigma(tf$matmul(encdec, w[[i]]) + b[[i]]) } loss <- Reduce(`+`, lapply(w, function (x) tf$reduce_sum(tf$pow(x, 2))), 0) loss <- Reduce(`+`, lapply(b, function (x) tf$reduce_sum(tf$pow(x, 2))), loss) loss <- tf$reduce_mean((encdec - input) ^ 2) + weight_decay * loss sess <- tensorflow::tf$Session() ## This closes sess if it is garbage collected. reg.finalizer(sess, function(x) x$close()) sess$run(tensorflow::tf$global_variables_initializer()) return(list( encoder = enc, decoder = dec, network = encdec, loss = loss, in_data = input, in_decoder = indec, session = sess )) } dimRed/R/quality.R0000644000176200001440000004707013372577400013512 0ustar liggesusers#' @include dimRedResult-class.R #' @include dimRedData-class.R #' @export setGeneric("quality", function (.data, ...) standardGeneric("quality"), valueClass = "numeric") #' Quality Criteria for dimensionality reduction. #' #' A collection of functions to compute quality measures on #' \code{\link{dimRedResult}} objects. #' #' @section Implemented methods: #' #' Method must be one of \code{"\link{Q_local}", "\link{Q_global}", #' "\link{mean_R_NX}", "\link{total_correlation}", #' "\link{cophenetic_correlation}", "\link{distance_correlation}", #' "\link{reconstruction_rmse}"} #' #' @section Rank based criteria: #' #' \code{Q_local}, \code{Q_global}, and \code{mean_R_nx} are #' quality criteria based on the Co-ranking matrix. \code{Q_local} #' and \code{Q_global} determine the local/global quality of the #' embedding, while \code{mean_R_nx} determines the quality of the #' overall embedding. They are parameter free and return a single #' number. The object must include the original data. The number #' returns is in the range [0, 1], higher values mean a better #' local/global embedding. #' #' @section Correlation based criteria: #' #' \code{total_correlation} calculates the sum of the mean squared #' correlations of the original axes with the axes in reduced #' dimensions, because some methods do not care about correlations #' with axes, there is an option to rotate data in reduced space to #' maximize this criterium. The number may be greater than one if more #' dimensions are summed up. #' #' \code{cophenetic_correlation} calculate the correlation between the #' lower triangles of distance matrices, the correlation and distance #' methods may be specified. The result is in range [-1, 1]. #' #' \code{distance_correlation} measures the independes of samples by #' calculating the correlation of distances. For details see #' \code{\link[energy]{dcor}}. #' #' @section Reconstruction error: #' #' \code{reconstruction_rmse} calculates the root mean squared error #' of the reconstrucion. \code{object} requires an inverse function. #' #' #' @references #' #' Lueks, W., Mokbel, B., Biehl, M., Hammer, B., 2011. How #' to Evaluate Dimensionality Reduction? - Improving the #' Co-ranking Matrix. arXiv:1110.3917 [cs]. #' #' Szekely, G.J., Rizzo, M.L., Bakirov, N.K., 2007. Measuring and #' testing dependence by correlation of distances. Ann. Statist. 35, #' 2769-2794. doi:10.1214/009053607000000505 #' #' Lee, J.A., Peluffo-Ordonez, D.H., Verleysen, M., 2015. Multi-scale #' similarities in stochastic neighbour embedding: Reducing #' dimensionality while preserving both local and global #' structure. Neurocomputing, 169, #' 246-261. doi:10.1016/j.neucom.2014.12.095 #' #' #' #' @param .data object of class \code{dimRedResult} #' @param .method character vector naming one of the methods #' @param .mute what output from the embedding method should be muted. #' @param ... the pameters, internally passed as a list to the #' quality method as \code{pars = list(...)} #' @return a number #' #' @examples #' \dontrun{ #' embed_methods <- dimRedMethodList() #' quality_methods <- dimRedQualityList() #' scurve <- loadDataSet("Iris") #' #' quality_results <- matrix(NA, length(embed_methods), length(quality_methods), #' dimnames = list(embed_methods, quality_methods)) #' embedded_data <- list() #' #' for (e in embed_methods) { #' message("embedding: ", e) #' embedded_data[[e]] <- embed(scurve, e, .mute = c("message", "output")) #' for (q in quality_methods) { #' message(" quality: ", q) #' quality_results[e, q] <- tryCatch( #' quality(embedded_data[[e]], q), #' error = function (e) NA #' ) #' } #' } #' #' print(quality_results) #' } #' @author Guido Kraemer #' @aliases quality quality.dimRedResult #' @family Quality scores for dimensionality reduction #' @describeIn quality Calculate a quality index from a dimRedResult object. #' @export setMethod( "quality", "dimRedResult", function (.data, .method = dimRedQualityList(), .mute = character(0), # c("output", "message"), ...) { method <- match.arg(.method) methodFunction <- getQualityFunction(method) args <- c(list(object = .data), list(...)) devnull <- if (Sys.info()["sysname"] != "Windows") "/dev/null" else "NUL" if ("message" %in% .mute){ devnull1 <- file(devnull, "wt") sink(devnull1, type = "message") on.exit({ sink(file = NULL, type = "message") close(devnull1) }, add = TRUE) } if ("output" %in% .mute) { devnull2 <- file(devnull, "wt") sink(devnull2, type = "output") on.exit({ sink() close(devnull2) }, add = TRUE) } do.call(methodFunction, args) } ) getQualityFunction <- function (method) { switch( method, Q_local = Q_local, Q_global = Q_global, mean_R_NX = mean_R_NX, AUC_lnK_R_NX = AUC_lnK_R_NX, total_correlation = total_correlation, cophenetic_correlation = cophenetic_correlation, distance_correlation = distance_correlation, reconstruction_rmse = reconstruction_rmse ) } #' @export setGeneric( "Q_local", function(object, ...) standardGeneric("Q_local"), valueClass = "numeric" ) #' Method Q_local #' #' Calculate the Q_local score to assess the quality of a dimensionality reduction. #' #' @param object of class dimRedResult. #' @param ndim use the first ndim columns of the embedded data for calculation. #' @family Quality scores for dimensionality reduction #' @aliases Q_local #' @export setMethod( "Q_local", "dimRedResult", function (object, ndim = getNDim(object)) { if (!object@has.org.data) stop("object requires original data") chckpkg("coRanking") Q <- coRanking::coranking(object@org.data, object@data@data[, seq_len(ndim), drop = FALSE]) nQ <- nrow(Q) N <- nQ + 1 Qnx <- diag(apply(apply(Q, 2, cumsum), 1, cumsum)) / seq_len(nQ) / N lcmc <- Qnx - seq_len(nQ) / nQ Kmax <- which.max(lcmc) Qlocal <- sum(lcmc[1:Kmax]) / Kmax return(as.vector(Qlocal)) } ) #' @export setGeneric( "Q_global", function(object, ...) standardGeneric("Q_global"), valueClass = "numeric" ) #' Method Q_global #' #' Calculate the Q_global score to assess the quality of a dimensionality reduction. #' #' @param object of class dimRedResult #' @family Quality scores for dimensionality reduction #' @aliases Q_global #' @export setMethod( "Q_global", "dimRedResult", function(object){ if (!object@has.org.data) stop("object requires original data") chckpkg("coRanking") Q <- coRanking::coranking(object@org.data, object@data@data) nQ <- nrow(Q) N <- nQ + 1 Qnx <- diag(apply(apply(Q, 2, cumsum), 1, cumsum)) / seq_len(nQ) / N lcmc <- Qnx - seq_len(nQ) / nQ Kmax <- which.max(lcmc) Qglobal <- sum(lcmc[(Kmax + 1):nQ]) / (N - Kmax) return(Qglobal) } ) #' @export setGeneric( "mean_R_NX", function(object, ...) standardGeneric("mean_R_NX"), valueClass = "numeric" ) #' Method mean_R_NX #' #' Calculate the mean_R_NX score to assess the quality of a dimensionality reduction. #' #' @param object of class dimRedResult #' @family Quality scores for dimensionality reduction #' @aliases mean_R_NX #' @export setMethod( "mean_R_NX", "dimRedResult", function(object) mean(R_NX(object)) ) #' @export setGeneric( "AUC_lnK_R_NX", function(object, ...) standardGeneric("AUC_lnK_R_NX"), valueClass = "numeric" ) #' Method AUC_lnK_R_NX #' #' Calculate the Area under the R_NX(ln K), used in Lee et. al. (2015). Note #' that despite the name, this does not weight the mean by the logarithm, but by #' 1/K. If explicit weighting by the logarithm is desired use \code{weight = #' "log"} or \code{weight = "log10"} #' #' The naming confusion originated from equation 17 in Lee et al (2015) and the #' name of this method may change in the future to avoid confusion. #' #' @references Lee, J.A., Peluffo-Ordonez, D.H., Verleysen, M., 2015. #' Multi-scale similarities in stochastic neighbour embedding: Reducing #' dimensionality while preserving both local and global structure. #' Neurocomputing 169, 246-261. https://doi.org/10.1016/j.neucom.2014.12.095 #' #' @param object of class dimRedResult #' @param weight the weight function used, one of \code{c("inv", "log", "log10")} #' @family Quality scores for dimensionality reduction #' @aliases AUC_lnK_R_NX #' @export setMethod( "AUC_lnK_R_NX", "dimRedResult", function(object, weight = "inv") { rnx <- R_NX(object) weight <- match.arg(weight, c("inv", "ln", "log", "log10")) switch( weight, inv = auc_ln_k_inv(rnx), log = auc_log_k(rnx), ln = auc_log_k(rnx), log10 = auc_log10_k(rnx), stop("wrong parameter for weight") ) } ) auc_ln_k_inv <- function(rnx) { Ks <- seq_along(rnx) return (sum(rnx / Ks) / sum(1 / Ks)) } auc_log_k <- function(rnx) { Ks <- seq_along(rnx) return (sum(rnx * log(Ks)) / sum(log(Ks))) } auc_log10_k <- function(rnx) { Ks <- seq_along(rnx) return (sum(rnx * log10(Ks)) / sum(log10(Ks))) } #' @export setGeneric( "total_correlation", function(object, ...) standardGeneric("total_correlation"), valueClass = "numeric" ) #' Method total_correlation #' #' Calculate the total correlation of the variables with the axes to #' assess the quality of a dimensionality reduction. #' #' @param object of class dimRedResult #' @param naxes the number of axes to use for optimization. #' @param cor_method the correlation method to use. #' @param is.rotated if FALSE the object is rotated. #' #' @family Quality scores for dimensionality reduction #' @aliases total_correlation #' @export setMethod( "total_correlation", "dimRedResult", function(object, naxes = ndims(object), cor_method = "pearson", is.rotated = FALSE){ if (!object@has.org.data) stop("object requires original data") if (length(naxes) != 1 || naxes < 1 || naxes > ncol(object@data@data)) stop("naxes must specify the numbers of axes to optimize for, ", "i.e. a single integer between 1 and ncol(object@data@data)") ## try to partially match cor_method: cor_methods <- c("pearson", "kendall", "spearman") cor_method <- cor_methods[pmatch(cor_method, cor_methods)] if (is.na(cor_method)) stop("cor_method must match one of ", "'pearson', 'kendall', or 'spearman', ", "at least partially.") if (!is.rotated) { rotated_result <- maximize_correlation( object, naxes, cor_method ) } else { rotated_result <- object } res <- 0 for (i in 1:naxes) res <- res + mean(correlate( rotated_result@data@data, rotated_result@org.data, cor_method )[i, ] ^ 2) return(res) } ) setGeneric("cophenetic_correlation", function(object, ...) standardGeneric("cophenetic_correlation"), valueClass = "numeric") #' Method cophenetic_correlation #' #' Calculate the correlation between the distance matrices in high and #' low dimensioal space. #' #' @param object of class dimRedResult #' @param d the distance function to use. #' @param cor_method The correlation method. #' @aliases cophenetic_correlation #' @family Quality scores for dimensionality reduction #' @export setMethod( "cophenetic_correlation", "dimRedResult", function(object, d = stats::dist, cor_method = "pearson"){ ## if (missing(d)) d <- stats::dist ## if (missing(cor_method)) cor_method <- "pearson" if (!object@has.org.data) stop("object requires original data") cor_methods <- c("pearson", "kendall", "spearman") cor_method <- cor_methods[pmatch(cor_method, cor_methods)] if (is.na(cor_method)) stop("cor_method must match one of ", "'pearson', 'kendall', or 'spearman', ", "at least partially.") d.org <- d(object@org.data) d.emb <- d(object@data@data) if (!inherits(d.org, "dist") || !inherits(d.emb, "dist")) stop("d must return a dist object") res <- correlate( d(object@org.data), d(object@data@data), cor_method ) return(res) } ) #' @export setGeneric( "distance_correlation", function(object) standardGeneric("distance_correlation"), valueClass = "numeric" ) #' Method distance_correlation #' #' Calculate the distance correlation between the distance matrices in #' high and low dimensioal space. #' #' @param object of class dimRedResult #' @aliases distance_correlation #' @family Quality scores for dimensionality reduction #' @export setMethod( "distance_correlation", "dimRedResult", function(object){ if (!object@has.org.data) stop("object requires original data") chckpkg("energy") energy::dcor(object@org.data, object@data@data) } ) #' @export setGeneric( "reconstruction_rmse", function(object) standardGeneric("reconstruction_rmse"), valueClass = "numeric" ) #' Method reconstruction_rmse #' #' Calculate the reconstruction root mean squared error a dimensionality reduction, the method must have an inverse mapping. #' #' @param object of class dimRedResult #' @aliases reconstruction_rmse #' @family Quality scores for dimensionality reduction #' @export setMethod( "reconstruction_rmse", "dimRedResult", function(object){ if (!object@has.org.data) stop("object requires original data") if (!object@has.inverse) stop("object requires an inverse function") recon <- object@inverse(object@data) rmse(recon@data, object@org.data) } ) #' @rdname quality #' #' @export dimRedQualityList <- function () { return(c("Q_local", "Q_global", "mean_R_NX", "AUC_lnK_R_NX", "total_correlation", "cophenetic_correlation", "distance_correlation", "reconstruction_rmse")) } #' @export setGeneric( "R_NX", function(object, ...) standardGeneric("R_NX"), valueClass = "numeric" ) #' Method R_NX #' #' Calculate the R_NX score from Lee et. al. (2013) which shows the neighborhood #' preservation for the Kth nearest neighbors, corrected for random point #' distributions and scaled to range [0, 1]. #' @param object of class dimRedResult #' @param ndim the number of dimensions to take from the embedded data. #' @family Quality scores for dimensionality reduction #' @aliases R_NX #' @export setMethod( "R_NX", "dimRedResult", function(object, ndim = getNDim(object)) { chckpkg("coRanking") if (!object@has.org.data) stop("object requires original data") Q <- coRanking::coranking(object@org.data, object@data@data[, seq_len(ndim), drop = FALSE]) nQ <- nrow(Q) N <- nQ + 1 Qnx <- diag(apply(apply(Q, 2, cumsum), 1, cumsum)) / seq_len(nQ) / N Rnx <- ((N - 1) * Qnx - seq_len(nQ)) / (N - 1 - seq_len(nQ)) Rnx[-nQ] } ) #' @export setGeneric( "Q_NX", function(object, ...) standardGeneric("Q_NX"), valueClass = "numeric" ) #' Method Q_NX #' #' Calculate the Q_NX score (Chen & Buja 2006, the notation in the #' publication is M_k). Which is the fraction of points that remain inside #' the same K-ary neighborhood in high and low dimensional space. #' #' @param object of class dimRedResult #' @family Quality scores for dimensionality reduction #' @aliases Q_NX #' @export setMethod( "Q_NX", "dimRedResult", function(object) { chckpkg("coRanking") Q <- coRanking::coranking(object@org.data, object@data@data) nQ <- nrow(Q) N <- nQ + 1 Qnx <- diag(apply(apply(Q, 2, cumsum), 1, cumsum)) / seq_len(nQ) / N Qnx } ) #'@export setGeneric( "LCMC", function(object, ...) standardGeneric("LCMC"), valueClass = "numeric" ) #' Method LCMC #' #' Calculates the Local Continuity Meta Criterion, which is #' \code{\link{Q_NX}} adjusted for random overlap inside the K-ary #' neighborhood. #' #' @param object of class dimRedResult #' @family Quality scores for dimensionality reduction #' @aliases LCMC #' @export setMethod( "LCMC", "dimRedResult", function(object) { chckpkg("coRanking") Q <- coRanking::coranking(object@org.data, object@data@data) nQ <- nrow(Q) N <- nQ + 1 lcmc <- diag(apply(apply(Q, 2, cumsum), 1, cumsum)) / seq_len(nQ) / N - seq_len(nQ) / nQ lcmc } ) rnx2qnx <- function(rnx, K = seq_along(rnx), N = length(rnx) + 1) { (rnx * (N - 1 - K) + K) / (N - 1) } qnx2rnx <- function(qnx, K = seq_along(qnx), N = length(qnx) + 1) { ((N - 1) * qnx - K) / (N - 1 - K) } #' @export setGeneric( "reconstruction_error", function(object, ...) standardGeneric("reconstruction_error"), valueClass = "numeric" ) #' Method reconstruction_error #' #' Calculate the error using only the first \code{n} dimensions of the embedded #' data. \code{error_fun} can either be one of \code{c("rmse", "mae")} to #' calculate the root mean square error or the mean absolute error respectively, #' or a function that takes to equally sized vectors as input and returns a #' single number as output. #' #' @param object of class dimRedResult #' @param n a positive integer or vector of integers \code{<= ndims(object)} #' @param error_fun a function or string indicating an error function, if #' indication a function it must take to matrices of the same size and return #' a scalar. #' @return a vector of number with the same length as \code{n} with the #' #' @examples #' \dontrun{ #' ir <- loadDataSet("Iris") #' ir.drr <- embed(ir, "DRR", ndim = ndims(ir)) #' ir.pca <- embed(ir, "PCA", ndim = ndims(ir)) #' #' rmse <- data.frame( #' rmse_drr = reconstruction_error(ir.drr), #' rmse_pca = reconstruction_error(ir.pca) #' ) #' #' matplot(rmse, type = "l") #' plot(ir) #' plot(ir.drr) #' plot(ir.pca) #' } #' @author Guido Kraemer #' @family Quality scores for dimensionality reduction #' @aliases reconstruction_error #' @export setMethod( "reconstruction_error", c("dimRedResult"), function (object, n = seq_len(ndims(object)), error_fun = "rmse") { if (any(n > ndims(object))) stop("n > ndims(object)") if (any(n < 1)) stop("n < 1") ef <- if (inherits(error_fun, "character")) { switch( error_fun, rmse = rmse, mae = mae ) } else if (inherits(error_fun, "function")) { error_fun } else { stop("error_fun must be a string or function, see documentation for details") } res <- numeric(length(n)) org <- getData(getOrgData(object)) for (i in seq_along(n)) { rec <- getData(inverse( object, getData(getDimRedData(object))[, seq_len(n[i]), drop = FALSE] )) res[i] <- ef(org, rec) } res } ) rmse <- function (x1, x2) sqrt(mean((x1 - x2) ^ 2)) mae <- function (x1, x2) mean(abs(x1 - x2)) dimRed/R/l1pca.R0000644000176200001440000001416513424014250013004 0ustar liggesusers#' Principal Component Analysis with L1 error. #' #' S4 Class implementing PCA with L1 error. #' #' PCA transforms the data so that the L2 reconstruction error is minimized or #' the variance of the projected data is maximized. This is sensitive to #' outliers, L1 PCA minimizes the L1 reconstruction error or maximizes the sum #' of the L1 norm of the projected observations. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' PCA can take the following parameters: #' \describe{ #' \item{ndim}{The number of output dimensions.} #' \item{center}{logical, should the data be centered, defaults to \code{TRUE}.} #' \item{scale.}{logical, should the data be scaled, defaults to \code{FALSE}.} #' \item{fun}{character or function, the method to apply, see the \code{pcaL1} package} #' \item{\ldots}{other parameters for \code{fun}} #' } #' #' @section Implementation: #' #' Wraps around the different methods is the \code{pcaL1} package. Because PCA #' can be reduced to a simple rotation, forward and backward projection #' functions are supplied. #' #' @references #' #' Park, Y.W., Klabjan, D., 2016. Iteratively Reweighted Least Squares #' Algorithms for L1-Norm Principal Component Analysis, in: Data Mining (ICDM), #' 2016 IEEE 16th International Conference On. IEEE, pp. 430-438. #' #' @examples #' if(requireNamespace("pcaL1", quietly = TRUE)) { #' dat <- loadDataSet("Iris") #' #' ## using the S4 Class #' pca_l1 <- PCA_L1() #' emb <- pca_l1@fun(dat, pca_l1@stdpars) #' #' ## using embed() #' emb2 <- embed(dat, "PCA_L1") #' #' plot(emb, type = "2vars") #' plot(emb@inverse(emb@data), type = "3vars") #' } #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export PCA_L1 #' @exportClass PCA_L1 PCA_L1 <- setClass( "PCA_L1", contains = "dimRedMethod", prototype = list( stdpars = list(ndim = 2, center = TRUE, scale. = FALSE, fun = "awl1pca", projections = "l1"), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("pcaL1") ndim <- pars$ndim orgnames <- colnames(data@data) newnames <- paste0("PC", seq_len(ndim)) meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL data <- data@data fun2 <- if(!is.function(pars$fun)) { get(pars$fun, asNamespace("pcaL1")) } else { pars$fun } ce <- if (is.numeric(pars$center)) { if (length(pars$center) != dim(data)[2]) error("center must be logical or have the same length as the data dimensions") pars$center } else if (is.logical(pars$center)) { if (pars$center) colMeans(data) else FALSE } sc <- if (is.numeric(pars$scale.)) { if (length(pars$scale.) != dim(data)[2]) stop("center must be logical or have the same length as the data dimensions") pars$scale. } else if (is.logical(pars$scale.)) { if (pars$scale.) apply(data, 2, sd) else FALSE } if(!(pars$center == FALSE && pars$scale. == FALSE)) data <- scale(data, ce, sc) pars$center <- NULL pars$scale. <- NULL pars$ndim <- NULL pars$fun <- NULL res <- do.call( fun2, c(list(X = data, projDim = ndim, center = FALSE), pars) ) ## evaluate results here for functions data <- res$scores colnames(data) <- paste0("PC", seq_len(ndim)) rot <- res$loadings[, seq_len(ndim), drop = FALSE] dimnames(rot) <- list(orgnames, newnames) rerot <- t(rot) appl <- function(x) { appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() proj <- if (inherits(x, "dimRedData")) x@data else x if (ncol(proj) != ncol(orgdata)) stop("x must have the same number of dimensions ", "as the original data") if (ce[1] != FALSE) proj <- t(apply(proj, 1, function(x) x - ce)) if (sc[1] != FALSE) proj <- t(apply(proj, 1, function(x) x / sc)) proj <- if (pars$projections == "l1") { tmp <- pcaL1::l1projection(proj, rot)$scores colnames(tmp) <- paste0("PC", seq_len(ndim)) tmp } else if (pars$projections == "l2") { proj %*% rot } else { stop("projections must be eiter 'l1' or 'l2'") } proj <- new("dimRedData", data = proj, meta = appl.meta) return(proj) } inv <- function(x) { appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() proj <- if (inherits(x, "dimRedData")) x@data else x if (ncol(proj) > ncol(data)) stop("x must have less or equal number of dimensions ", "as the original data") d <- ncol(proj) reproj <- proj %*% rerot[seq_len(d), ] if (sc[1] != FALSE) reproj <- t(apply(reproj, 1, function(x) x * sc)) if (ce[1] != FALSE) reproj <- t(apply(reproj, 1, function(x) x + ce)) colnames(reproj) <- colnames(orgdata) reproj <- new("dimRedData", data = reproj, meta = appl.meta) return(reproj) } res <- new( "dimRedResult", data = new("dimRedData", data = data, meta = meta), org.data = orgdata, apply = appl, inverse = inv, has.org.data = keep.org.data, has.apply = TRUE, has.inverse = TRUE, method = "PCA_L1", pars = pars ) return(res) }) ) dimRed/R/plot.R0000644000176200001440000001665413371631672013005 0ustar liggesusers#' Plotting of dimRed* objects #' #' Plots a object of class dimRedResult and dimRedData. For the #' documentation of the plotting function in base see here: #' \code{\link{plot.default}}. #' #' Plotting functions for the classes usind in \code{dimRed}. they are #' intended to give a quick overview over the results, so they are #' somewhat inflexible, e.g. it is hard to modify color scales or #' plotting parameters. #' #' If you require more control over plotting, it is better to convert #' the object to a \code{data.frame} first and use the standard #' functions for plotting. #' #' @param x dimRedResult/dimRedData class, e.g. output of #' embedded/loadDataSet #' @param y Ignored #' @param type plot type, one of \code{c("pairs", "parpl", "2vars", #' "3vars", "3varsrgl")} #' @param col the columns of the meta slot to use for coloring, can be #' referenced as the column names or number of x@data #' @param vars the axes of the embedding to use for plotting #' @param ... handed over to the underlying plotting function. #' #' @examples #' scurve = loadDataSet("3D S Curve") #' plot(scurve, type = "pairs", main = "pairs plot of S curve") #' plot(scurve, type = "parpl") #' plot(scurve, type = "2vars", vars = c("y", "z")) #' plot(scurve, type = "3vars") #' #' @include mixColorSpaces.R #' @include dimRedData-class.R #' @importFrom graphics plot #' #' @aliases plot.dimRed #' @export setGeneric( "plot", function(x, y, ...) standardGeneric("plot"), useAsDefault = graphics::plot ) #' @describeIn plot Ploting of dimRedData objects #' @aliases plot.dimRedData #' @export setMethod( f = "plot", signature = c("dimRedData"), definition = function(x, type = "pairs", vars = seq_len(ncol(x@data)), col = seq_len(min(3, ncol(x@meta))), ...) { cols <- colorize(x@meta[, col, drop = FALSE]) switch( type, "pairs" = { chckpkg("graphics") graphics::pairs(x@data[, vars], col = cols, ... ) }, "parpl" = { chckpkg("MASS") MASS::parcoord(x@data[, vars], col = cols, ... ) }, "2vars" = { chckpkg("graphics") graphics::plot(x@data[, vars[1:2]], col = cols, ... ) }, "3vars" = { chckpkg("scatterplot3d") scatterplot3d::scatterplot3d(x@data[, vars[1:3]], color = cols, ...) }, "3varsrgl" = { chckpkg("rgl") rgl::plot3d(x@data[, vars[1:3]], col = cols, ... ) }, stop("wrong argument to plot.dimRedData") ) } ) #' @describeIn plot Ploting of dimRedResult objects. #' @aliases plot.dimRedResult #' @export setMethod( f = "plot", signature = c("dimRedResult"), definition = function (x, type = "pairs", vars = seq_len(ncol(x@data@data)), col = seq_len(min(3, ncol(x@data@meta))), ...) { plot(x = x@data, type = type, vars = vars, col = col, ...) } ) #' plot_R_NX #' #' Plot the R_NX curve for different embeddings. Takes a list of #' \code{\link{dimRedResult}} objects as input. #' Also the Area under the curve values are computed for a weighted K #' (see \link{AUC_lnK_R_NX} for details) and appear in the legend. #' #' @param x a list of \code{\link{dimRedResult}} objects. The names of the list #' will appear in the legend with the AUC_lnK value. #' @param ndim the number of dimensions, if \code{NA} the original number of #' embedding dimensions is used, can be a vector giving the embedding #' dimensionality for each single list element of \code{x}. #' @param weight the weight function used for K when calculating the AUC, one of #' \code{c("inv", "log", "log10")} #' @family Quality scores for dimensionality reduction #' @return A ggplot object, the design can be changed by appending #' \code{theme(...)} #' #' @examples #' #' ## define which methods to apply #' embed_methods <- c("Isomap", "PCA") #' ## load test data set #' data_set <- loadDataSet("3D S Curve", n = 200) #' ## apply dimensionality reduction #' data_emb <- lapply(embed_methods, function(x) embed(data_set, x)) #' names(data_emb) <- embed_methods #' ## plot the R_NX curves: #' plot_R_NX(data_emb) + #' ggplot2::theme(legend.title = ggplot2::element_blank(), #' legend.position = c(0.5, 0.1), #' legend.justification = c(0.5, 0.1)) #' #' @export plot_R_NX <- function(x, ndim = NA, weight = "inv") { chckpkg("ggplot2") chckpkg("tidyr") chckpkg("scales") lapply( x, function(x) if (!inherits(x, "dimRedResult")) stop("x must be a list and ", "all items must inherit from 'dimRedResult'") ) rnx <- mapply(function(x, ndim) if(is.na(ndim)) R_NX(x) else R_NX(x, ndim), x = x, ndim = ndim) weight <- match.arg(weight, c("inv", "ln", "log", "log10")) w_fun <- switch( weight, inv = auc_ln_k_inv, log = auc_log_k, ln = auc_log_k, log10 = auc_log10_k, stop("wrong parameter for weight") ) auc <- apply(rnx, 2, w_fun) df <- as.data.frame(rnx) df$K <- seq_len(nrow(df)) qnxgrid <- expand.grid(K = df$K, rnx = seq(0.1, 0.9, by = 0.1)) ## TODO: FIND OUT WHY THIS AS IN THE PUBLICATION BUT IS WRONG! qnxgrid$qnx <- rnx2qnx(qnxgrid$rnx, K = qnxgrid$K, N = nrow(df)) # qnxgrid$rnx_group <- factor(qnxgrid$rnx) df <- tidyr::gather_(df, key_col = "embedding", value_col = "R_NX", names(x)) ggplot2::ggplot(df) + ggplot2::geom_line(ggplot2::aes_string(y = "R_NX", x = "K", color = "embedding")) + ## TODO: find out if this is wrong: ## ggplot2::geom_line(data = qnxgrid, ## mapping = ggplot2::aes_string(x = "K", y = "qnx", ## group = "rnx_group"), ## linetype = 2, ## size = 0.1) + ggplot2::geom_line(data = qnxgrid, mapping = ggplot2::aes_string(x = "K", y = "rnx", group = "rnx_group"), linetype = 3, size = 0.1) + ggplot2::scale_x_log10( labels = scales::trans_format("log10", scales::math_format()), expand = c(0, 0) ) + ggplot2::scale_y_continuous(expression(R[NX]), limits = c(0, 1), expand = c(0, 0)) + ggplot2::annotation_logticks(sides = "b") + ggplot2::scale_color_discrete( breaks = names(x), labels = paste(format(auc, digits = 3), names(x))) + ggplot2::labs(title = paste0( "R_NX vs. K", if (length(ndim) == 1 && !is.na(ndim)) paste0(", d = ", ndim) else "" )) + ggplot2::theme_classic() } dimRed/R/embed.R0000644000176200001440000001217513371631672013075 0ustar liggesusers#' dispatches the different methods for dimensionality reduction #' #' wraps around all dimensionality reduction functions. #' #' Method must be one of \code{\link{dimRedMethodList}()}, partial matching #' is performed. All parameters start with a dot, to avoid clashes #' with partial argument matching (see the R manual section 4.3.2), if #' there should ever occur any clashes in the arguments, call the #' function with all arguments named, e.g. \code{embed(.data = dat, #' .method = "mymethod", .d = "some parameter")}. #' #' @param .data object of class \code{\link{dimRedData}}, will be converted to #' be of class \code{\link{dimRedData}} if necessary; see examples for #' details. #' @param .method character vector naming one of the dimensionality reduction #' techniques. #' @param .mute a character vector containing the elements you want to mute #' (\code{c("message", "output")}), defaults to \code{character(0)}. #' @param .keep.org.data \code{TRUE}/\code{FALSE} keep the original data. #' @param ... the parameters, internally passed as a list to the dimensionality #' reduction method as \code{pars = list(...)} #' @return an object of class \code{\link{dimRedResult}} #' #' @examples #' ## embed a data.frame using a formula: #' as.data.frame( #' embed(Species ~ Sepal.Length + Sepal.Width + Petal.Length + Petal.Width, #' iris, "PCA") #' ) #' #' ## embed a data.frame and return a data.frame #' as.data.frame(embed(iris[, 1:4], "PCA")) #' #' ## embed a matrix and return a data.frame #' as.data.frame(embed(as.matrix(iris[, 1:4]), "PCA")) #' #' \dontrun{ #' ## embed dimRedData objects #' embed_methods <- dimRedMethodList() #' quality_methods <- dimRedQualityList() #' dataset <- loadDataSet("Iris") #' #' quality_results <- matrix(NA, length(embed_methods), length(quality_methods), #' dimnames = list(embed_methods, quality_methods)) #' embedded_data <- list() #' #' for (e in embed_methods) { #' message("embedding: ", e) #' embedded_data[[e]] <- embed(dataset, e, .mute = c("message", "output")) #' for (q in quality_methods) { #' message(" quality: ", q) #' quality_results[e, q] <- tryCatch( #' quality(embedded_data[[e]], q), #' error = function(e) NA #' ) #' } #' } #' #' print(quality_results) #' } #' @export setGeneric("embed", function(.data, ...) standardGeneric("embed"), valueClass = "dimRedResult") #' @describeIn embed embed a data.frame using a formula. #' @param .formula a formula, see \code{\link{as.dimRedData}}. #' @export setMethod( "embed", "formula", function(.formula, .data, .method = dimRedMethodList(), .mute = character(0), .keep.org.data = TRUE, ...) { if (!is.data.frame(.data)) stop(".data must be a data.frame") .data <- as.dimRedData(.formula, .data) embed(.data, .method, .mute, .keep.org.data, ...) } ) #' @describeIn embed Embed anything as long as it can be coerced to #' \code{\link{dimRedData}}. #' @export setMethod( "embed", "ANY", function(.data, .method = dimRedMethodList(), .mute = character(0), .keep.org.data = TRUE, ...) { embed(as(.data, "dimRedData"), .method, .mute, .keep.org.data, ...) } ) #' @describeIn embed Embed a dimRedData object #' @export setMethod( "embed", "dimRedData", function(.data, .method = dimRedMethodList(), .mute = character(0), #c("message", "output"), .keep.org.data = TRUE, ...) { .method <- if (all(.method == dimRedMethodList())) "PCA" else match.arg(.method) methodObject <- getMethodObject(.method) args <- list( data = as(.data, "dimRedData"), keep.org.data = .keep.org.data ) args$pars <- matchPars(methodObject, list(...)) devnull <- if (Sys.info()["sysname"] != "Windows") "/dev/null" else "NUL" if ("message" %in% .mute){ devnull1 <- file(devnull, "wt") sink(devnull1, type = "message") on.exit({ sink(file = NULL, type = "message") close(devnull1) }, add = TRUE) } if ("output" %in% .mute) { devnull2 <- file(devnull, "wt") sink(devnull2, type = "output") on.exit({ sink() close(devnull2) }, add = TRUE) } do.call(methodObject@fun, args) } ) getMethodObject <- function (method) { ## switch( ## method, ## graph_kk = kamada_kawai, ## graph_drl = drl, ## graph_fr = fruchterman_reingold, ## drr = drr, ## isomap = isomap, ## diffmap = diffmap, ## tsne = tsne, ## nmds = nmds, ## mds = mds, ## ica = fastica, ## pca = pca, ## lle = lle, ## loe = loe, ## soe = soe, ## leim = leim, ## kpca = kpca ## ) method <- match.arg(method, dimRedMethodList()) do.call(method, list()) } dimRed/R/dimRedResult-class.R0000644000176200001440000001544413371631672015531 0ustar liggesusers#' @include misc.R #' @include dimRedData-class.R NULL #' Class "dimRedResult" #' #' A class to hold the results of of a dimensionality reduction. #' #' @slot data Output data of class dimRedData. #' @slot org.data original data, a matrix. #' @slot apply a function to apply the method to out-of-sampledata, #' may not exist. #' @slot inverse a function to calculate the original coordinates from #' reduced space, may not exist. #' @slot has.org.data logical, if the original data is included in the object. #' @slot has.apply logical, if a forward method is exists. #' @slot has.inverse logical if an inverse method exists. #' @slot method saves the method used. #' @slot pars saves the parameters used. #' @slot other.data other data produced by the method, e.g. a distance matrix. #' #' @examples #' ## Create object by embedding data #' iris.pca <- embed(loadDataSet("Iris"), "PCA") #' #' ## Convert the result to a data.frame #' head(as(iris.pca, "data.frame")) #' head(as.data.frame(iris.pca)) #' #' ## There are no nameclashes to avoid here: #' head(as.data.frame(iris.pca, #' org.data.prefix = "", #' meta.prefix = "", #' data.prefix = "")) #' #' ## Print it more or less nicely: #' print(iris.pca) #' #' ## Get the embedded data as a dimRedData object: #' getDimRedData(iris.pca) #' #' ## Get the original data including meta information: #' getOrgData(iris.pca) #' #' @family dimRedResult #' @export dimRedResult #' @exportClass dimRedResult dimRedResult <- setClass( "dimRedResult", slots = c( data = "dimRedData", org.data = "matrix", apply = "function", inverse = "function", has.org.data = "logical", has.apply = "logical", has.inverse = "logical", method = "character", pars = "list", other.data = "list" ), prototype = list( data = new("dimRedData"), org.data = matrix(numeric(0), 0, 0), apply = function(x) NA, inverse = function(x) NA, has.org.data = FALSE, has.apply = FALSE, has.inverse = FALSE, method = "", pars = list(), other.data = list() ) ) setAs( from = "dimRedResult", to = "data.frame", def = function(from){ if (from@has.org.data) { org.data <- from@org.data names(org.data) <- paste("org", names(org.data), sep = ".") cbind(as(from@data, "data.frame"), as.data.frame(org.data)) } else { as(from@data, "data.frame") } } ) #' @importFrom stats predict #' @export setGeneric( "predict", function(object, ...) standardGeneric("predict"), useAsDefault = stats::predict ) #' @describeIn dimRedResult apply a trained method to new data, does not work #' with all methods, will give an error if there is no \code{apply}. #' In some cases the apply function may only be an approximation. #' @param xnew new data, of type \code{\link{dimRedData}} #' #' @export setMethod(f = "predict", signature = "dimRedResult", definition = function(object, xnew) { if (object@has.apply) object@apply(xnew) else stop("object does not have an apply function") }) #' @export setGeneric( "inverse", function(object, ...) standardGeneric("inverse") ) #' @describeIn dimRedResult inverse transformation of embedded data, does not #' work with all methods, will give an error if there is no \code{inverse}. #' In some cases the apply function may only be an approximation. #' @param ynew embedded data, of type \code{\link{dimRedData}} #' #' @aliases inverse #' @export setMethod(f = "inverse", signature = c("dimRedResult"), definition = function(object, ynew) { if (object@has.inverse) object@inverse(ynew) else stop("object does not have an inverse function") }) #' @param x Of class \code{dimRedResult} #' @param org.data.prefix Prefix for the columns of the org.data slot. #' @param meta.prefix Prefix for the columns of \code{x@@data@@meta}. #' @param data.prefix Prefix for the columns of \code{x@@data@@data}. #' #' @describeIn dimRedResult convert to \code{data.frame} #' @export setMethod(f = "as.data.frame", signature = c("dimRedResult"), definition = function(x, org.data.prefix = "org.", meta.prefix = "meta.", data.prefix = "") { tmp <- list() if (nrow(x@data@meta) > 0){ tmp$meta <- as.data.frame(x@data@meta) names(tmp$meta) <- paste0(meta.prefix, colnames(x@data@meta)) } tmp$data <- as.data.frame(x@data@data) names(tmp$data) <- paste0(data.prefix, colnames(x@data@data)) if (x@has.org.data){ tmp$org.data <- as.data.frame(x@org.data) names(tmp$org.data) <- paste0(org.data.prefix, colnames(x@org.data)) } names(tmp) <- NULL data.frame(tmp, stringsAsFactors = FALSE) }) #' @param object Of class \code{dimRedResult} #' @describeIn dimRedResult Get the parameters with which the method #' was called. #' @export setMethod( f = "getPars", signature = "dimRedResult", definition = function (object) { object@pars } ) #' @describeIn dimRedResult Get the number of embedding dimensions. #' @export setMethod( f = "getNDim", signature = "dimRedResult", definition = function (object) { result <- getPars(object)$ndim if(is.null(result)) dim(object@data@data)[2] else result } ) #' @describeIn dimRedResult Method for printing. #' @import utils #' @export setMethod( f = "print", signature = "dimRedResult", definition = function(x) { cat("Method:\n") cat(x@method, "\n") cat("Parameters:\n") utils::str(x@pars) } ) #' @describeIn dimRedResult Get the original data and meta.data #' @export setMethod( f = "getOrgData", signature = "dimRedResult", definition = function(object) { return(new("dimRedData", data = object@org.data, meta = object@data@meta)) } ) #' @describeIn dimRedResult Get the embedded data #' @export setMethod( f = "getDimRedData", signature = "dimRedResult", definition = function(object) { return(object@data) } ) #' @describeIn dimRedResult Extract the number of embedding dimensions. #' #' @examples #' ## Get the number of variables: #' ndims(iris.pca) #' #' @export setMethod( "ndims", "dimRedResult", function(object) ncol(object@data@data) ) #' @describeIn dimRedResult Get other data produced by the method #' @export setMethod( f = "getOtherData", signature = "dimRedResult", definition = function(object) object@other.data ) dimRed/R/fastica.R0000644000176200001440000000707113371631672013432 0ustar liggesusers#' Independent Component Analysis #' #' An S4 Class implementing the FastICA algorithm for Indepentend #' Component Analysis. #' #' ICA is used for blind signal separation of different sources. It is #' a linear Projection. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' FastICA can take the following parameters: #' \describe{ #' \item{ndim}{The number of output dimensions. Defaults to \code{2}} #' } #' #' @section Implementation: #' Wraps around \code{\link[fastICA]{fastICA}}. FastICA uses a very #' fast approximation for negentropy to estimate statistical #' independences between signals. Because it is a simple #' rotation/projection, forward and backward functions can be given. #' #' @references #' #' Hyvarinen, A., 1999. Fast and robust fixed-point algorithms for independent #' component analysis. IEEE Transactions on Neural Networks 10, 626-634. #' https://doi.org/10.1109/72.761722 #' #' @examples #' dat <- loadDataSet("3D S Curve") #' #' ## use the S4 Class directly: #' fastica <- FastICA() #' emb <- fastica@fun(dat, pars = list(ndim = 2)) #' #' ## simpler, use embed(): #' emb2 <- embed(dat, "FastICA", ndim = 2) #' #' #' plot(emb@data@data) #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export FastICA #' @exportClass FastICA FastICA <- setClass( "FastICA", contains = "dimRedMethod", prototype = list( stdpars = list(ndim = 2), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("fastICA") meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL orgdata.colmeans <- colMeans(orgdata) indata <- data@data res <- fastICA::fastICA(indata, n.comp = pars$ndim, method = "C") outdata <- res$S colnames(outdata) <- paste0("ICA", 1:ncol(outdata)) appl <- function(x){ appl.meta <- if (inherits(x, "dimRedData")) x@meta else matrix(numeric(0), 0, 0) proj <- if (inherits(x, "dimRedData")) x@data else x out <- scale(proj, center = orgdata.colmeans, scale = FALSE) %*% res$K %*% res$W colnames(out) <- paste0("ICA", 1:ncol(out)) return(new("dimRedData", data = out, meta = appl.meta)) } inv <- function(x){ appl.meta <- if (inherits(x, "dimRedData")) x@meta else matrix(numeric(0), 0, 0) proj <- if (inherits(x, "dimRedData")) x@data else x out <- scale(proj %*% res$A[1:ncol(proj), ], center = -orgdata.colmeans, scale = FALSE) reproj <- new("dimRedData", data = out, meta = appl.meta) return(reproj) } return(new( "dimRedResult", data = new("dimRedData", data = outdata, meta = meta), org.data = orgdata, has.org.data = keep.org.data, apply = appl, inverse = inv, has.apply = TRUE, has.inverse = TRUE, method = "FastICA", pars = pars )) }) ) dimRed/R/mds.R0000644000176200001440000001131413371631672012576 0ustar liggesusers#' Metric Dimensional Scaling #' #' An S4 Class implementing classical scaling (MDS). #' #' MDS tries to maintain distances in high- and low-dimensional space, #' it has the advantage over PCA that arbitrary distance functions can #' be used, but it is computationally more demanding. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' MDS can take the following parameters: #' \describe{ #' \item{ndim}{The number of dimensions.} #' \item{d}{The function to calculate the distance matrix from the input coordinates, defaults to euclidean distances.} #' } #' #' @section Implementation: #' #' Wraps around \code{\link[stats]{cmdscale}}. The implementation also #' provides an out-of-sample extension which is not completely #' optimized yet. #' #' @references #' #' Torgerson, W.S., 1952. Multidimensional scaling: I. Theory and method. #' Psychometrika 17, 401-419. https://doi.org/10.1007/BF02288916 #' #' @examples #' \dontrun{ #' dat <- loadDataSet("3D S Curve") #' #' ## Use the S4 Class directly: #' mds <- MDS() #' emb <- mds@fun(dat, mds@stdpars) #' #' ## use embed(): #' emb2 <- embed(dat, "MDS", d = function(x) exp(stats::dist(x))) #' #' #' plot(emb, type = "2vars") #' plot(emb2, type = "2vars") #' } #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export MDS #' @exportClass MDS MDS <- setClass( "MDS", contains = "dimRedMethod", prototype = list( stdpars = list(d = stats::dist, ndim = 2), fun = function (data, pars, keep.org.data = TRUE) { ## meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data ## there are only efficient implementations for euclidean ## distances: extra efficient implementation for euclidean ## distances are possible, D is quared several times, it would be ## much faster to compute the squared distance right away. has.apply <- identical(all.equal(pars$d, dist), TRUE) # == TRUE # necessary, # because # all.equal # returns # TRUE or an # error # string!!!! D <- as.matrix(pars$d(indata)) if (has.apply) mD2 <- mean(D ^ 2) ## cmdscale square the matrix internally res <- stats::cmdscale(D, k = pars$ndim) outdata <- res D <- NULL ## Untested: remove that from environment before creating ## appl function, else it will stay in its environment ## forever appl <- if (!has.apply) function(x) NA else function(x) { appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() proj <- if (inherits(x, "dimRedData")) x@data else x ## double center new data with respect to old: TODO: optimize ## this method, according to the de Silva, Tenenbaum(2004) ## paper. Need an efficient method to calculate the distance ## matrices between different point sets and arbitrary ## distances. Kab <- as.matrix(pars$d(proj) ^ 2) Exa <- colMeans(pdist2(indata, proj)) Kab <- sweep(Kab, 1, Exa) #, "-") Kab <- sweep(Kab, 2, Exa) #, "-") Kab <- -0.5 * (Kab + mD2) ## Eigenvalue decomposition tmp <- eigen(Kab, symmetric = TRUE) ev <- tmp$values[seq_len(pars$ndim)] evec <- tmp$vectors[, seq_len(pars$ndim), drop = FALSE] k1 <- sum(ev > 0) if (k1 < pars$ndim) { warning(gettextf("only %d of the first %d eigenvalues are > 0", k1, k), domain = NA) evec <- evec[, ev > 0, drop = FALSE] ev <- ev[ev > 0] } points <- evec * rep(sqrt(ev), each = nrow(proj)) dimnames(points) <- list(NULL, paste0("MDS", seq_len(ncol(points)))) new("dimRedData", data = points, meta = appl.meta) } colnames(outdata) <- paste0("MDS", seq_len(ncol(outdata))) return(new( "dimRedResult", data = new("dimRedData", data = outdata, meta = meta), org.data = orgdata, apply = appl, has.org.data = keep.org.data, has.apply = has.apply, method = "mds", pars = pars )) }) ) dimRed/R/rotate.R0000644000176200001440000001642513371631672013321 0ustar liggesusers ## rotate X in such a way that the values of Y have maximum squared ## correlation with the dimensions specified in axes. We optimize ## axes[1] first, then axes[2] without axes[1], ... ## we maximize the squared correlations of the original variables ## with the axis of the embeding and the final result is the sum_{axes} sum(squared(correlation(variables, axis))) setGeneric( "maximize_correlation", function(object, ...) standardGeneric("maximize_correlation"), valueClass = "dimRedResult" ) #' Maximize Correlation with the Axes #' #' Rotates the data in such a way that the correlation with the first #' \code{naxes} axes is maximized. #' #' Methods that do not use eigenvector decomposition, like t-SNE often #' do not align the data with axes according to the correlation of #' variables with the data. \code{maximize_correlation} uses the #' \code{\link[optimx]{optimx}} package to rotate the data in such a #' way that the original variables have maximum correlation with the #' embedding axes. #' #' @param object A dimRedResult object #' @param naxes the number of axes to optimize for. #' @param cor_method which correlation method to use #' #' @aliases maximize_correlation #' @export setMethod( "maximize_correlation", "dimRedResult", function(object, naxes = ncol(object@data@data), cor_method = "pearson"){ ## if (missing(naxes)) naxes <- ncol(object@data@data) ## if (missing(cor_method)) cor_method <- "pearson" if (!object@has.org.data) stop("object requires original data") if (length(naxes) != 1 || naxes < 1 || naxes > ncol(object@data@data)) stop("naxes must specify the numbers of axes to optimize for, ", "i.e. a single integer between 1 and ncol(object@data@data)") ## try to partially match cor_method: cor_method <- cor_method[pmatch(cor_method, c("pearson", "kendall", "spearman"))] if (is.na(cor_method)) stop("cor_method must match one of ", "'pearson', 'kendall', or 'spearman', ", "at least partially.") mcres <- .maximize_correlation(object@data@data, object@org.data, 1:naxes, cor_method) res <- object res@data@data <- mcres$rotated return(res) } ) .maximize_correlation <- function(X, Y, axes = 1:ncol(X), cor_method = "pearson"){ if (nrow(X) != nrow(Y)) stop("'X' and 'Y' must have the same number of rows") if (max(axes) > ncol(X)){ axes <- axes[ axes <= ncol(X) ] warning("'max(axes)' must be <= 'ncol(X)', removing some axes") } chckpkg("optimx") xndim <- ncol(X) without_axes <- integer(0) res <- list() for (axis in axes){ without_axes <- c(without_axes, axis) nplanes <- xndim - length(without_axes) planes <- matrix(NA, 2, nplanes) planes[1, ] <- axis planes[2, ] <- (1:xndim)[-without_axes] if (ncol(planes) == 0) break o <- optimx::optimx( par = rep(0, nplanes), fn = obj, method = "L-BFGS-B", lower = 0, upper = 2 * pi, X = as.matrix(X), Y = as.matrix(Y), axis = axis, without_axes = without_axes, cor_method = cor_method ) ## The result looks like this: ## p1 value fevals gevals niter convcode kkt1 kkt2 xtimes ## L-BFGS-B 0 -0.1613494 1 1 NA 0 FALSE NA 0.016 if (o$convcode > 0) stop("rotation did not converge.") res_idx <- length(res) + 1 res[[res_idx]] <- list() res[[res_idx]]$axis <- axis res[[res_idx]]$without_axes <- without_axes res[[res_idx]]$angs <- unname( unlist(o[1, 1:nplanes]) ) res[[res_idx]]$planes <- planes res[[res_idx]]$X <- rotate(res[[res_idx]]$angs, planes, X) res[[res_idx]]$cor <- -o$value } ## calculate the correlation for axes nres <- length(res) if (nres > 0) { ## the result is the sum of the mean squared correlations of the ## original variables with the axes. "res[[i]]$cor" contains the ## mean squared correlation of the variables with axis "i" res$result <- 0 for (i in 1:nres) res$result <- res$result + res[[i]]$cor ^ 2 ## res$result <- res$result / length(res) ## rotate the input to maximize correlations res$rotated <- X for (i in 1:nres) res$rotated <- rotate(res[[i]]$angs, res[[i]]$planes, res$rotated) } else { ## if we only had one dimension, simply return the means squared ## correlation and don't rotate res$result <- sum(correlate(X, Y, cor_method) ^ 2) res$rotated <- X } res } #### helper functions for rotation ## we create a number or rotation matrices around the 2d planes ## spanned by the orthonormal matrices, multiply them for a general ## rotation which is then applied to the data X rotate <- function (angs, planes, X) { ndim <- ncol(X) nplanes <- ncol(planes) if (length(angs) != nplanes) stop("length(angs) not equal to chose(ndim, 2)") ## loop over the planes to construct general rotation matrix rotmat <- diag(ndim) for (p in 1:nplanes) { ## 2d rotation ## possible optimization: create large rotation matrix ## directly and insert values linearly without a for loop rotmat2d <- matrix( c(cos(angs[p]), -sin(angs[p]), sin(angs[p]), cos(angs[p])), 2, 2, byrow = TRUE ) p_rotmat <- diag(ndim) for (i in 1:2) for (j in 1:2) p_rotmat[ planes[i, p], planes[j, p] ] <- rotmat2d[i, j] rotmat <- rotmat %*% p_rotmat } t(rotmat %*% t(X)) } get_planes <- function(ndims, axis, without_axes){ nplanes <- ndims - length(without_axes) planes <- matrix(NA, 2, nplanes) planes[1, ] <- axis planes[2, ] <- (1:ndims)[c(-axis, -without_axes)] planes } obj <- function(alpha, X, Y, axis, without_axes, cor_method = "pearson"){ ## correlation with first axis xndim <- ncol(X) planes <- get_planes(xndim, axis, without_axes) X2 <- rotate(alpha, planes, X) ## cor(x, y) returns a matrix with the correlations between the ## columns of x = X2 (rows) and the columns of y = Y (columns) we ## want the mean of squared correlations of all variables original ## variables with the first axis, i.e. we require the relevant ## (axis) column of the resulting matrix. ## Possible optimization: use only the relevant column of Y -mean(correlate( X2, Y, #use = "pairwise.complete.obs", method = cor_method )[axis, ] ^ 2) } correlate <- function (x, y, method, ...) { if (method != "kendall"){ return(stats::cor(x, y, method = method, ...)) } else { chckpkg("pcaPP") ## make the cor.fk method behave like cor for matrices: if (is.matrix(x) && is.matrix(y)) { res <- matrix( NA, nrow = ncol(x), ncol = ncol(y), dimnames = list(colnames(x), colnames(y)) ) for (i in 1:ncol(x)) { for (j in 1:ncol(y)){ res[i, j] <- pcaPP::cor.fk(x[, i], y[, j]) } } return(res) } else if (is.null(dim(x)) && is.null(dim(y))){ return(pcaPP::cor.fk(x, y)) } else { stop("something is wrong with the input of 'correlate()'") } } } dimRed/R/tsne.R0000644000176200001440000000556413371631672012776 0ustar liggesusers#' t-Distributed Stochastic Neighborhood Embedding #' #' An S4 Class for t-SNE. #' #' t-SNE is a method that uses Kullback-Leibler divergence between the #' distance matrices in high and low-dimensional space to embed the #' data. The method is very well suited to visualize complex #' structures in low dimensions. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' t-SNE can take the following parameters: #' \describe{ #' \item{d}{A distance function, defaults to euclidean distances} #' \item{perplexity}{The perplexity parameter, roughly equivalent to neighborhood size.} #' \item{theta}{Approximation for the nearest neighbour search, large values are more inaccurate.} #' \item{ndim}{The number of embedding dimensions.} #' } #' #' @section Implementation: #' #' Wraps around \code{\link[Rtsne]{Rtsne}}, which is very well #' documented. Setting \code{theta = 0} does a normal t-SNE, larger #' values for \code{theta < 1} use the Barnes-Hut algorithm which #' scales much nicer with data size. Larger values for perplexity take #' larger neighborhoods into account. #' #' @references #' Maaten, L. van der, 2014. Accelerating t-SNE using Tree-Based #' Algorithms. Journal of Machine Learning Research 15, 3221-3245. #' #' van der Maaten, L., Hinton, G., 2008. Visualizing Data using #' t-SNE. J. Mach. Learn. Res. 9, 2579-2605. #' #' @examples #' \dontrun{ #' dat <- loadDataSet("3D S Curve", n = 300) #' #' ## using the S4 class directly: #' tsne <- tSNE() #' emb <- tsne@fun(dat, tsne@stdpars) #' #' ## using embed() #' emb2 <- embed(dat, "tSNE", perplexity = 80) #' #' plot(emb, type = "2vars") #' plot(emb2, type = "2vars") #' } #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export tSNE #' @exportClass tSNE tSNE <- setClass( "tSNE", contains = "dimRedMethod", prototype = list( stdpars = list(d = stats::dist, perplexity = 30, theta = 0.5, ndim = 2), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("Rtsne") meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data outdata <- Rtsne::Rtsne(pars$d(indata), perplexity = pars$perplexity, theta = pars$theta, dims = pars$ndim)$Y colnames(outdata) <- paste0("tSNE", 1:ncol(outdata)) return(new( "dimRedResult", data = new("dimRedData", data = outdata, meta = meta), org.data = orgdata, has.org.data = keep.org.data, method = "tsne", pars = pars )) }) ) dimRed/R/diffmap.R0000644000176200001440000001110713424014250013403 0ustar liggesusers#' Diffusion Maps #' #' An S4 Class implementing Diffusion Maps #' #' Diffusion Maps uses a diffusion probability matrix to robustly #' approximate a manifold. #' #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' Diffusion Maps can take the following parameters: #' \describe{ #' \item{d}{a function transforming a matrix row wise into a #' distance matrix or \code{dist} object, #' e.g. \code{\link[stats]{dist}}.} #' \item{ndim}{The number of dimensions} #' \item{eps}{The epsilon parameter that determines the #' diffusion weight matrix from a distance matrix \code{d}, #' \eqn{exp(-d^2/eps)}, if set to \code{"auto"} it will #' be set to the median distance to the 0.01*n nearest #' neighbor.} #' \item{t}{Time-scale parameter. The recommended value, 0, #' uses multiscale geometry.} #' \item{delta}{Sparsity cut-off for the symmetric graph Laplacian, #' a higher value results in more sparsity and faster calculation. #' The predefined value is 10^-5.} #' } #' #' @section Implementation: #' Wraps around \code{\link[diffusionMap]{diffuse}}, see there for #' details. It uses the notation of Richards et al. (2009) which is #' slightly different from the one in the original paper (Coifman and #' Lafon, 2006) and there is no \eqn{\alpha} parameter. #' There is also an out-of-sample extension, see examples. #' #' #' @references #' Richards, J.W., Freeman, P.E., Lee, A.B., Schafer, #' C.M., 2009. Exploiting Low-Dimensional Structure in #' Astronomical Spectra. ApJ 691, #' 32. doi:10.1088/0004-637X/691/1/32 #' #' Coifman, R.R., Lafon, S., 2006. Diffusion maps. Applied and #' Computational Harmonic Analysis 21, #' 5-30. doi:10.1016/j.acha.2006.04.006 #' #' @examples #' dat <- loadDataSet("3D S Curve", n = 300) #' #' ## use the S4 Class directly: #' diffmap <- DiffusionMaps() #' emb <- diffmap@fun(dat, diffmap@stdpars) #' #' ## simpler, use embed(): #' emb2 <- embed(dat, "DiffusionMaps") #' #' plot(emb, type = "2vars") #' #' samp <- sample(floor(nrow(dat) / 10)) #' embsamp <- diffmap@fun(dat[samp], diffmap@stdpars) #' embother <- embsamp@apply(dat[-samp]) #' plot(embsamp, type = "2vars") #' points(embother@data) #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export DiffusionMaps #' @exportClass DiffusionMaps DiffusionMaps <- setClass( "DiffusionMaps", contains = "dimRedMethod", prototype = list( stdpars = list(d = stats::dist, ndim = 2, eps = "auto", t = 0, delta = 1e-5), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("diffusionMap") meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data distmat <- pars$d(indata) if (pars$eps == "auto") pars$eps <- diffusionMap::epsilonCompute(distmat) diffres <- diffusionMap::diffuse( D = distmat, t = pars$t, eps.val = pars$eps, neigen = pars$ndim, maxdim = pars$ndim, delta = pars$delta ) outdata <- as.matrix(diffres$X) appl <- function(x) { appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() proj <- if (inherits(x, "dimRedData")) x@data else x if (ncol(proj) != ncol(data@data)) stop("x must have the same number of dimensions ", "as the original data") dd <- sqrt(pdist2(proj, indata)) appl.res <- diffusionMap::nystrom(diffres, dd, sigma = diffres$epsilon) dimnames(appl.res) <- list( rownames(x), paste0("diffMap", seq_len(ncol(outdata))) ) new("dimRedData", data = appl.res, meta = appl.meta) } colnames(outdata) <- paste0("diffMap", seq_len(ncol(outdata))) return(new( "dimRedResult", data = new("dimRedData", data = outdata, meta = meta), org.data = orgdata, apply = appl, has.apply = TRUE, has.org.data = keep.org.data, method = "diffmap", pars = pars )) }) ) dimRed/R/drr.R0000644000176200001440000001450413040076314012573 0ustar liggesusers#' Dimensionality Reduction via Regression #' #' An S4 Class implementing Dimensionality Reduction via Regression (DRR). #' #' DRR is a non-linear extension of PCA that uses Kernel Ridge regression. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' DRR can take the following parameters: #' \describe{ #' \item{ndim}{The number of dimensions} #' \item{lambda}{The regularization parameter for the ridge #' regression.} #' \item{kernel}{The kernel to use for KRR, defaults to #' \code{"rbfdot"}.} #' \item{kernel.pars}{A list with kernel parameters, elements depend #' on the kernel used, \code{"rbfdot"} uses \code{"sigma"}.} #' \item{pca}{logical, should an initial pca step be performed, #' defaults to \code{TRUE}.} #' \item{pca.center}{logical, should the data be centered before the #' pca step. Defaults to \code{TRUE}.} #' \item{pca.scale}{logical, should the data be scaled before the #' pca ste. Defaults to \code{FALSE}.} #' \item{fastcv}{logical, should \code{\link[CVST]{fastCV}} from the #' CVST package be used instead of normal cross-validation.} #' \item{fastcv.test}{If \code{fastcv = TRUE}, separate test data set for fastcv.} #' \item{cv.folds}{if \code{fastcv = FALSE}, specifies the number of #' folds for crossvalidation.} #' \item{fastkrr.nblocks}{integer, higher values sacrifice numerical #' accuracy for speed and less memory, see below for details.} #' \item{verbose}{logical, should the cross-validation results be #' printed out.} #' } #' #' @section Implementation: #' Wraps around \code{\link[DRR]{drr}}, see there for details. DRR is #' a non-linear extension of principal components analysis using Kernel #' Ridge Regression (KRR, details see \code{\link[CVST]{constructKRRLearner}} #' and \code{\link[DRR]{constructFastKRRLearner}}). Non-linear #' regression is used to explain more variance than PCA. DRR provides #' an out-of-sample extension and a backward projection. #' #' The most expensive computations are matrix inversions therefore the #' implementation profits a lot from a multithreaded BLAS library. #' The best parameters for each KRR are determined by cross-validaton #' over all parameter combinations of \code{lambda} and #' \code{kernel.pars}, using less parameter values will speed up #' computation time. Calculation of KRR can be accelerated by #' increasing \code{fastkrr.nblocks}, it should be smaller than #' n^{1/3} up to sacrificing some accuracy, for details see #' \code{\link[DRR]{constructFastKRRLearner}}. Another way to speed up #' is to use \code{pars$fastcv = TRUE} which might provide a more #' efficient way to search the parameter space but may also miss the #' global maximum, I have not ran tests on the accuracy of this method. #' #' #' #' @references #' Laparra, V., Malo, J., Camps-Valls, G., #' 2015. Dimensionality Reduction via Regression in Hyperspectral #' Imagery. IEEE Journal of Selected Topics in Signal Processing #' 9, 1026-1036. doi:10.1109/JSTSP.2015.2417833 #' #' @examples #' \dontrun{ #' dat <- loadDataSet("variable Noise Helix", n = 200)[sample(200)] #' #' ## use the S4 Class directly: #' drr <- DRR() #' pars <- drr@stdpars #' pars$ndim <- 3 #' emb <- drr@fun(dat, pars) #' #' ## simpler, use embed(): #' emb2 <- embed(dat, "DRR", ndim = 3) #' #' #' plot(dat, type = "3vars") #' plot(emb, type = "3vars") #' plot(emb@inverse(emb@data@data[, 1, drop = FALSE]), type = "3vars") #' } #' #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @import DRR #' @family dimensionality reduction methods #' @export DRR #' @exportClass DRR DRR <- setClass( "DRR", contains = "dimRedMethod", prototype = list( stdpars = list(ndim = 2, lambda = c(0, 10 ^ (-3:2)), kernel = "rbfdot", kernel.pars = list(sigma = 10 ^ (-3:4)), pca = TRUE, pca.center = TRUE, pca.scale = FALSE, fastcv = FALSE, cv.folds = 5, fastcv.test = NULL, fastkrr.nblocks = 4, verbose = TRUE), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("DRR") chckpkg("kernlab") meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data res <- do.call(DRR::drr, c(list(X = indata), pars)) outdata <- res$fitted.data colnames(outdata) <- paste0("DRR", 1:ncol(outdata)) appl <- function(x){ appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() proj <- if (inherits(x, "dimRedData")) x@data else x if (ncol(proj) != ncol(data@data)) stop("x must have the same number of dimensions ", "as the original data") appl.out <- new("dimRedData", data = res$apply(proj), meta = appl.meta) dimnames(appl.out@data) <- list( rownames(x), paste0("DRR", seq_len(ncol(appl.out@data))) ) return(appl.out) } inv <- function(x) { appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() proj <- if (inherits(x, "dimRedData")) x@data else x if (ncol(proj) > ncol(data@data)) stop("x must have less or equal number of dimensions ", "as the original data") inv.out <- new("dimRedData", data = res$inverse(proj), meta = appl.meta) dimnames(inv.out@data) <- list(rownames(proj), colnames(data@data)) return(inv.out) } return( new("dimRedResult", data = new("dimRedData", data = outdata, meta = meta), org.data = orgdata, apply = appl, inverse = inv, has.org.data = keep.org.data, has.apply = TRUE, has.inverse = TRUE, method = "drr", pars = pars ) ) }) ) dimRed/R/kpca.R0000644000176200001440000001071113371631672012731 0ustar liggesusers#' Kernel PCA #' #' An S4 Class implementing Kernel PCA #' #' Kernel PCA is a nonlinear extension of PCA using kernel methods. #' #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' Kernel PCA can take the following parameters: #' \describe{ #' \item{ndim}{the number of output dimensions, defaults to 2} #' \item{kernel}{The kernel function, either as a function or a #' character vector with the name of the kernel. Defaults to #' \code{"rbfdot"}} #' \item{kpar}{A list with the parameters for the kernel function, #' defaults to \code{list(sigma = 0.1)}} #' } #' #' The most comprehensive collection of kernel functions can be found in #' \code{\link[kernlab]{kpca}}. In case the function does not take any #' parameters \code{kpar} has to be an empty list. #' #' @section Implementation: #' #' Wraps around \code{\link[kernlab]{kpca}}, but provides additionally #' forward and backward projections. #' #' @references #' #' Sch\"olkopf, B., Smola, A., M\"uller, K.-R., 1998. Nonlinear Component Analysis #' as a Kernel Eigenvalue Problem. Neural Computation 10, 1299-1319. #' https://doi.org/10.1162/089976698300017467 #' #' @examples #' \dontrun{ #' dat <- loadDataSet("3D S Curve") #' #' ## use the S4 class directly: #' kpca <- kPCA() #' emb <- kpca@fun(dat, kpca@stdpars) #' #' ## simpler, use embed(): #' emb2 <- embed(dat, "kPCA") #' #' plot(emb, type = "2vars") #' } #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export kPCA #' @exportClass kPCA kPCA <- setClass( "kPCA", contains = "dimRedMethod", prototype = list( stdpars = list(kernel = "rbfdot", kpar = list(sigma = 0.1), ndim = 2), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("kernlab") if (is.null(pars$ndim)) pars$ndim <- 2 meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data message(Sys.time(), ": Calculating kernel PCA") res <- do.call(kernlab::kpca, c(list(x = indata), pars)) kernel <- get_kernel_fun(pars$kernel, pars$kpar) message(Sys.time(), ": Trying to calculate reverse") K_rev <- kernlab::kernelMatrix(kernel, res@rotated) diag(K_rev) <- 0.1 + diag(K_rev) dual_coef <- try(solve(K_rev, indata), silent = TRUE) appl <- function (x) { appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() proj <- if (inherits(x, "dimRedData")) x@data else x proj <- kernlab::predict(res, proj)[, 1:pars$ndim, drop = FALSE] colnames(proj) <- paste0("kPCA", 1:ncol(proj)) new("dimRedData", data = proj, meta = appl.meta) } inv <- if (inherits(dual_coef, "try-error")) { message("No inverse function.") function(x) NA } else { function (x) { appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() proj <- if (inherits(x, "dimRedData")) x@data else x resrot <- res@rotated[, 1:ncol(proj)] rot <- kernlab::kernelMatrix(kernel, proj, resrot) proj <- rot %*% dual_coef new("dimRedData", data = proj, meta = appl.meta) } } outdata <- res@rotated[, 1:pars$ndim, drop = FALSE] colnames(outdata) <- paste0("kPCA", 1:ncol(outdata)) message(Sys.time(), ": DONE") return( new( "dimRedResult", data = new("dimRedData", data = outdata, meta = meta), org.data = orgdata, apply = appl, inverse = inv, has.org.data = keep.org.data, has.apply = TRUE, has.inverse = TRUE, method = "kpca", pars = pars ) ) }) ) ## get the kernel function out of the kernlab namespace: get_kernel_fun <- function (kernel, pars) { if (!is(kernel, "kernel")) { if (is(kernel, "function")) { kernel <- deparse(substitute(kernel)) } else { kernel <- get(kernel, asNamespace("kernlab")) } kernel <- do.call(kernel, pars) } return(kernel) } dimRed/R/dimRedMethod-class.R0000644000176200001440000000650213371631672015466 0ustar liggesusers#' Class "dimRedMethod" #' #' A virtual class "dimRedMethod" to serve as a template to implement #' methods for dimensionality reduction. #' #' Implementations of dimensionality reductions should inherit from #' this class. #' #' The \code{fun} slot should be a function that takes three arguments #' \describe{ #' \item{data}{An object of class \code{\link{dimRedData}}.} #' \item{pars}{A list with the standard parameters.} #' \item{keep.org.data}{Logical. If the original data should be kept in the output.} #' } #' and returns an object of class \code{\link{dimRedResult}}. #' #' The \code{stdpars} slot should take a list that contains standard #' parameters for the implemented methods. #' #' This way the method can be called by \code{embed(data, "method-name", #' ...)}, where \code{...} can be used to to change single parameters. #' #' #' @slot fun A function that does the embedding. #' @slot stdpars A list with the default parameters for the \code{fun} #' slot. #' #' @family dimensionality reduction methods #' @export setClass("dimRedMethod", contains = "VIRTUAL", slots = c(fun = "function", stdpars = "list")) #' dimRedMethodList #' #' Get the names of all methods for dimensionality reduction. #' #' Returns the name of all classes that inherit from #' \code{\link{dimRedMethod-class}} to use with \code{\link{embed}}. #' #' @return a character vector with the names of classes that inherit #' from \code{dimRedMethod}. #' #' @examples #' dimRedMethodList() #' #' @family dimensionality reduction methods #' @export dimRedMethodList <- function () { ## return(c( ## "graph_kk", ## "graph_drl", ## "graph_fr", ## "drr", ## "isomap", ## "diffmap", ## "tsne", ## "nmds", ## "mds", ## "ica", ## "pca", ## "lle", ## ## those two methods are buggy and can produce segfaults: ## ## "loe", "soe", ## "leim", ## "kpca" ## )) names(completeClassDefinition("dimRedMethod", doExtends = FALSE)@subclasses) } # to put standard values for omitted arguments setGeneric("matchPars", function(object, pars) standardGeneric("matchPars"), valueClass = c("list")) setMethod("matchPars", signature(object = "dimRedMethod", pars = "list"), definition = function(object, pars) { nsp <- names(object@stdpars) ncp <- names(pars) nap <- union(nsp, ncp) res <- list() ## exists can deal with elements being NULL ## to assign list@el <- NULL do: ## list["el"] <- list(NULL) for (np in nap) { miss.std <- !exists(np, where = object@stdpars) miss.par <- !exists(np, where = pars) if (miss.std) { warning("Parameter matching: ", np, " is not a standard parameter, ignoring.") } else if (miss.par) { res[np] <- object@stdpars[np] } else { res[np] <- pars[np] } } ## if the method does not accept parameters we have to return ## null, so in embed there is no args$par created. and passed by ## do.call in the embed() function. if (length(res) != 0) ## return(res) else return(NULL) ## first try without the above, all methods should have a pars ## argument. return(res) }) dimRed/R/nmds.R0000644000176200001440000000376613371631672012770 0ustar liggesusers#' Non-Metric Dimensional Scaling #' #' An S4 Class implementing Non-Metric Dimensional Scaling. #' #' A non-linear extension of MDS using monotonic regression #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' nMDS can take the following parameters: #' \describe{ #' \item{d}{A distance function.} #' \item{ndim}{The number of embedding dimensions.} #' } #' #' @section Implementation: #' Wraps around the #' \code{\link[vegan]{monoMDS}}. For parameters that are not #' available here, the standard configuration is used. #' #' @references #' #' Kruskal, J.B., 1964. Nonmetric multidimensional scaling: A numerical method. #' Psychometrika 29, 115-129. https://doi.org/10.1007/BF02289694 #' #' @examples #' dat <- loadDataSet("3D S Curve", n = 300) #' #' ## using the S4 classes: #' nmds <- nMDS() #' emb <- nmds@fun(dat, nmds@stdpars) #' #' #' ## using embed() #' emb2 <- embed(dat, "nMDS", d = function(x) exp(dist(x))) #' #' #' plot(emb, type = "2vars") #' plot(emb2, type = "2vars") #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export nMDS #' @exportClass nMDS nMDS <- setClass( "nMDS", contains = "dimRedMethod", prototype = list( stdpars = list(d = stats::dist, ndim = 2), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("vegan") meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data outdata <- vegan::monoMDS(pars$d(indata), k = pars$ndim)$points colnames(outdata) <- paste0("NMDS", 1:ncol(outdata)) return(new( "dimRedResult", data = new("dimRedData", data = outdata, meta = meta), org.data = orgdata, has.org.data = keep.org.data, method = "nmds", pars = pars )) }) ) dimRed/R/lle.R0000644000176200001440000000436113371631672012573 0ustar liggesusers#' Locally Linear Embedding #' #' An S4 Class implementing Locally Linear Embedding (LLE) #' #' LLE approximates the points in the manifold by linear combination #' of its neighbors. These linear combinations are the same inside the #' manifold and in highdimensional space. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' LLE can take the following parameters: #' \describe{ #' \item{knn}{the number of neighbors for the knn graph., defaults to 50.} #' \item{ndim}{the number of embedding dimensions, defaults to 2.} #' } #' #' @section Implementation: #' Wraps around \code{\link[lle]{lle}}, only #' exposes the parameters \code{k} and \code{m}. #' #' @references #' #' Roweis, S.T., Saul, L.K., 2000. Nonlinear Dimensionality Reduction #' by Locally Linear Embedding. Science 290, #' 2323-2326. doi:10.1126/science.290.5500.2323 #' #' @examples #' dat <- loadDataSet("3D S Curve", n = 500) #' #' ## directy use the S4 class: #' lle <- LLE() #' emb <- lle@fun(dat, lle@stdpars) #' #' ## using embed(): #' emb2 <- embed(dat, "LLE", knn = 45) #' #' plot(emb, type = "2vars") #' plot(emb2, type = "2vars") #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export LLE #' @exportClass LLE LLE <- setClass( "LLE", contains = "dimRedMethod", prototype = list( stdpars = list(knn = 50, ndim = 2), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("lle") meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data outdata <- lle::lle(indata, k = pars$knn, m = pars$ndim)$Y if (is.null(dim(outdata))) { dim(outdata) <- c(length(outdata), 1) } colnames(outdata) <- paste0("LLE", 1:ncol(outdata)) return(new( "dimRedResult", data = new("dimRedData", data = outdata, meta = meta), org.data = orgdata, has.org.data = keep.org.data, method = "lle", pars = pars )) }) ) dimRed/R/soe.R0000644000176200001440000000264213024273620012573 0ustar liggesusers## #' Soft Ordinal Embedding ## #' ## #' Instance of \code{\link{dimRedMethod}} for Soft Ordinal Embedding. ## #' ## #' For details see \code{\link[loe]{SOE}}. ## #' ## #' ## #' @examples ## #' dat <- loadDataSet("3D S Curve", n = 50) ## #' soe <- SOE() ## #' emb <- soe@fun(dat, soe@stdpars) ## #' ## #' ## #' plot(emb@data@data) ## #' ## #' ## #' @include dimRedResult-class.R ## #' @include dimRedMethod-class.R ## #' @export ## SOE <- setClass( ## "SOE", ## contains = "dimRedMethod", ## prototype = list( ## stdpars = list(d = stats::dist, knn = 50, ndim = 2), ## fun = function (data, ## pars, ## keep.org.data = TRUE) { ## chckpkg("loe") ## meta <- data@meta ## orgdata <- if (keep.org.data) data@data else NULL ## indata <- data@data ## outdata <- loe::SOE(loe::get.order(as.matrix(pars$d(indata))), ## N = nrow(indata), p = pars$ndim)$X ## colnames(outdata) <- paste0("SOE", 1:ncol(outdata)) ## return(new( ## "dimRedResult", ## data = new("dimRedData", ## data = outdata, ## meta = meta), ## org.data = orgdata, ## has.org.data = keep.org.data, ## method = "soe", ## pars = pars ## )) ## }) ## ) dimRed/R/misc.R0000644000176200001440000002202613371631672012750 0ustar liggesusers## if (!isClassUnion("missingORnumeric")) setClassUnion("missingORnumeric", c("numeric", "missing")) ## if (!isClassUnion("missingORcharacter")) setClassUnion("missingORcharacter", c("character", "missing")) ## if (!isClassUnion("missingORlogical")) setClassUnion("missingORlogical", c("logical", "missing")) ## if (!isClassUnion("missingORfunction")) setClassUnion("missingORfunction", c("function", "missing")) # Squared euclidean distance between points in A and B # taken from http://blog.felixriedel.com/2013/05/pairwise-distances-in-r/ pdist2 <- function (A, B) { an <- rowSums(A ^ 2) # apply(A, 1, function(rvec) crossprod(rvec, rvec)) bn <- rowSums(B ^ 2) # apply(B, 1, function(rvec) crossprod(rvec, rvec)) m <- nrow(A) n <- nrow(B) matrix(rep(an, n), nrow = m) + matrix(rep(bn, m), nrow = m, byrow = TRUE) - 2 * tcrossprod(A, B) } ## a + b ~ c + d ## becomes ## ~ c + d + 0 rhs <- function (formula) { fs <- as.character(formula)[3] stats::as.formula(paste("~", fs, "+ 0")) } ## a + b ~ c + d ## becomes ## ~ a + b + 0 lhs <- function (formula) { fs <- as.character(formula)[2] stats::as.formula(paste("~", fs, "+ 0")) } ## check if a package is installed chckpkg <- function (pkg) { if (!requireNamespace(pkg, quietly = TRUE)) { stop(paste0("require '", pkg, "' package, install it using install.packages('", pkg, "')")) } } ## create generics that appear in several different places #' Converts to data.frame #' #' General conversions of objects created by \code{dimRed} to \code{data.frame}. #' See class documentations for details (\code{\link{dimRedData}}, #' \code{\link{dimRedResult}}). For the documentation of this function in base #' package, see here: \code{\link[base]{as.data.frame.default}}. #' #' @param x The object to be converted #' @param row.names unused in \code{dimRed} #' @param optional unused in \code{dimRed} #' @param ... other arguments. setGeneric( "as.data.frame", function(x, row.names, optional, ...) standardGeneric("as.data.frame"), useAsDefault = base::as.data.frame, valueClass = "data.frame" ) #' Converts to dimRedData #' #' Conversion functions to dimRedData. #' #' @param formula The formula, left hand side is assigned to the meta slot right #' hand side is assigned to the data slot. #' @param ... other arguments. setGeneric( "as.dimRedData", function(formula, ...) standardGeneric("as.dimRedData"), valueClass = "dimRedData" ) #' Method getData #' #' Extracts the data slot. #' #' @param object The object to be converted. setGeneric("getData", function(object) standardGeneric("getData")) #' Method getMeta #' #' Extracts the meta slot. #' #' @param object The object to be converted. #' @param ... other arguments. setGeneric("getMeta", function(object, ...) standardGeneric("getMeta")) #' Method getPars #' #' Extracts the pars slot. #' #' @param object The object to be converted. #' @param ... other arguments. setGeneric("getPars", function (object, ...) standardGeneric("getPars")) #' Method getNDim #' #' Extract the number of embedding dimensions. #' #' @param object The object to get the dimensions from. #' @param ... other arguments. setGeneric("getNDim", function (object, ...) standardGeneric("getNDim")) #' Method getOrgData #' #' Extract the Original data. #' #' @param object The object to extract data from. #' @param ... other arguments. setGeneric("getOrgData", function (object, ...) standardGeneric("getOrgData")) #' Method getDimRedData #' #' Extract dimRedData. #' @param object The object to extract data from. #' @param ... other arguments. setGeneric("getDimRedData", function (object, ...) standardGeneric("getDimRedData")) #' Method getOtherData #' #' Extract other data produced by a dimRedMethod #' #' @param object The object to extract data from. #' @param ... other arguments. setGeneric("getOtherData", function (object, ...) standardGeneric("getOtherData"), valueClass = "list") #' Method print #' #' Imports the print method into the package namespace. #' #' @param x The object to be printed. #' @param ... Other arguments for printing. setGeneric("print", function(x, ...) standardGeneric("print")) #' Method ndims #' #' Extract the number of dimensions. #' #' @param object To extract the number of dimensions from. #' @param ... Arguments for further methods setGeneric("ndims", function (object, ...) standardGeneric("ndims"), valueClass = "integer") #' getSuggests #' #' Install packages wich are suggested by dimRed. #' #' By default dimRed will not install all the dependencies, because #' there are quite a lot and in case some of them are not available #' for your platform you will not be able to install dimRed without #' problems. #' #' To solve this I provide a function which automatically installes #' all the suggested packages. #' #' @examples #' \dontrun{ #' installSuggests() #' } #' @export installSuggests <- function () { "%w/o%" <- function(x, y) x[!x %in% y] pkgString <- installed.packages()["dimRed", "Suggests"] deps <- strsplit(pkgString, ", |,\n")[[1]] deps <- gsub("\n", "", deps) # Windows needs this installedPkgs <- rownames(installed.packages()) missingPkgs <- deps %w/o% installedPkgs if (length(missingPkgs) > 0) { message("The following packages are missing: ") cat(missingPkgs, "\n") message("installing ...") install.packages(missingPkgs) pkgString <- installed.packages()["dimRed", "Suggests"] installedPkgs <- rownames(installed.packages()) missingPkgs <- deps %w/o% installedPkgs if (length(missingPkgs) > 0) { message("Could not install the following packages:") cat(missingPkgs, "\n") message("please install manually or some methods will not work.") } else { message("All necessary packages installed") message("If things still don't work try 'update.packages()'") message("If it still does not work file a bugreport!!") } } else { message("All necessary packages installed") message("If things still don't work try 'update.packages()'") message("If it still does not work file a bugreport!!") } } ## input data(matrix or data frame) return knn graph implements ## "smart" choices on RANN::nn2 parameters we ignore radius search ## TODO: find out a good limit to switch from kd to bd trees COMMENT: ## bd trees are buggy, they dont work if there are duplicated data ## points and checking would neutralize the performance gain, so bd ## trees are not really usable. #' makeKNNgraph #' #' Create a K-nearest neighbor graph from data x. Uses #' \code{\link[RANN]{nn2}} as a fast way to find the neares neighbors. #' #' @param x data, a matrix, observations in rows, dimensions in #' columns #' @param k the number of nearest neighbors. #' @param eps number, if \code{eps > 0} the KNN search is approximate, #' see \code{\link[RANN]{nn2}} #' @param diag logical, if \code{TRUE} every edge of the returned #' graph will have an edge with weight \code{0} to itself. #' #' @return an object of type \code{\link[igraph]{igraph}} with edge #' weight being the distances. #' #' #' makeKNNgraph <- function(x, k, eps = 0, diag = FALSE){ ## requireNamespace("RANN") ## requireNamespace("igraph") ## consts INF_VAL <- 1.340781e+15 NA_IDX <- 0 BDKD_LIM <- 1000000 #todo: figure out a good value here ## select parameters M <- nrow(x) treetype <- "kd" # if (M < BDKD_LIM) "kd" else "bd" # see: # https://github.com/jefferis/RANN/issues/19 searchtype <- if (eps == 0) "standard" else "priority" ## RANN::nn2 returns the points in data with respect to query ## e.g. the rows in the output are the points in query and the ## columns the points in data. nn2res <- RANN::nn2(data = x, query = x, k = k + 1, treetype = treetype, searchtype = searchtype, eps = eps) ## create graph: the first ny nodes will be y, the last nx nodes ## will be x, if x != y g <- igraph::make_empty_graph(M, directed = FALSE) g[from = if (diag) rep(seq_len(M), times = k + 1) else rep(seq_len(M), times = k), to = if (diag) as.vector(nn2res$nn.idx) else as.vector(nn2res$nn.idx[, -1]), attr = "weight"] <- if (diag) as.vector(nn2res$nn.dists) else as.vector(nn2res$nn.dists[, -1]) return(g) } makeEpsSparseMatrix <- function(x, eps) { chckpkg("Matrix") n <- nrow(x) dd <- stats::dist(x) ddind <- dd < eps rows <- unlist(lapply(2:n, function(x) x:n), use.names = FALSE) cols <- rep(seq_len(n - 1), times = (n - 1):1) Matrix::sparseMatrix(i = rows[ddind], j = cols[ddind], x = dd[ddind], dims = c(n, n), symmetric = TRUE) } dimRed/R/leim.R0000644000176200001440000001317413371631672012747 0ustar liggesusers#' Laplacian Eigenmaps #' #' An S4 Class implementing Laplacian Eigenmaps #' #' Laplacian Eigenmaps use a kernel and were originally developed to #' separate non-convex clusters under the name spectral clustering. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' \code{LaplacianEigenmaps} can take the following parameters: #' \describe{ #' \item{ndim}{the number of output dimensions.} #' #' \item{sparse}{A character vector specifying hot to make the graph #' sparse, \code{"knn"} means that a K-nearest neighbor graph is #' constructed, \code{"eps"} an epsilon neighborhood graph is #' constructed, else a dense distance matrix is used.} #' #' \item{knn}{The number of nearest neighbors to use for the knn graph.} #' \item{eps}{The distance for the epsilon neighborhood graph.} #' #' \item{t}{Parameter for the transformation of the distance matrix #' by \eqn{w=exp(-d^2/t)}, larger values give less weight to #' differences in distance, \code{t == Inf} treats all distances != 0 equally.} #' \item{norm}{logical, should the normed laplacian be used?} #' } #' #' @section Implementation: #' Wraps around \code{\link[loe]{spec.emb}}. #' #' @references #' #' Belkin, M., Niyogi, P., 2003. Laplacian Eigenmaps for #' Dimensionality Reduction and Data Representation. Neural #' Computation 15, 1373. #' #' @examples #' dat <- loadDataSet("3D S Curve") #' leim <- LaplacianEigenmaps() #' emb <- leim@fun(dat, leim@stdpars) #' #' #' plot(emb@data@data) #' #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @export LaplacianEigenmaps #' @exportClass LaplacianEigenmaps LaplacianEigenmaps <- setClass( "LaplacianEigenmaps", contains = "dimRedMethod", prototype = list( stdpars = list(ndim = 2, sparse = "knn", knn = 50, eps = 0.1, t = Inf, norm = TRUE), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("loe") chckpkg("RSpectra") chckpkg("Matrix") meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data if (is.null(pars$d)) pars$d <- dist if (is.null(pars$knn)) pars$knn <- 50 if (is.null(pars$ndim)) pars$ndim <- 2 if (is.null(pars$t)) pars$t <- Inf if (is.null(pars$norm)) pars$norm <- TRUE message(Sys.time(), ": Creating weight matrix") W <- if (pars$sparse == "knn") { knng <- makeKNNgraph(indata, k = pars$knn, eps = 0, diag = TRUE) if (is.infinite(pars$t)){ igraph::set_edge_attr(knng, name = "weight", value = 1) } else { igraph::set_edge_attr( knng, name = "weight", value = exp( -( igraph::edge_attr( knng, name = "weight" ) ^ 2 ) / pars$t ) ) } igraph::as_adj(knng, sparse = TRUE, attr = "weight", type = "both") } else if (pars$sparse == "eps") { tmp <- makeEpsSparseMatrix(indata, pars$eps) tmp@x <- if (is.infinite(pars$t)) rep(1, length(tmp@i)) else exp(- (tmp@x ^ 2) / pars$t) ## diag(tmp) <- 1 as(tmp, "dgCMatrix") } else { # dense case tmp <- dist(indata) tmp[] <- if (is.infinite(pars$t)) 1 else exp( -(tmp ^ 2) / pars$t) tmp <- as.matrix(tmp) diag(tmp) <- 1 tmp } ## we don't need to test for symmetry, because we know the ## matrix is symmetric D <- Matrix::Diagonal(x = Matrix::rowSums(W)) L <- D - W ## for the generalized eigenvalue problem, we do not have a solver ## use A u = \lambda B u ## Lgen <- Matrix::Diagonal(x = 1 / Matrix::diag(D) ) %*% L ## but then we get negative eigenvalues and complex eigenvalues Lgen <- L message(Sys.time(), ": Eigenvalue decomposition") outdata <- if (pars$norm) { DS <- Matrix::Diagonal(x = 1 / sqrt(Matrix::diag(D))) RSpectra::eigs_sym(DS %*% Lgen %*% DS, k = pars$ndim + 1, sigma = -1e-5) } else { RSpectra::eigs_sym(Lgen, k = pars$ndim + 1, sigma = -1e-5) } message("Eigenvalues: ", paste(format(outdata$values), collapse = " ")) ## The eigenvalues are in decreasing order and we remove the ## smallest, which should be approx 0: outdata <- outdata$vectors[, order(outdata$values)[-1], drop = FALSE] colnames(outdata) <- paste0("LEIM", 1:ncol(outdata)) message(Sys.time(), ": DONE") return(new( "dimRedResult", data = new("dimRedData", data = outdata, meta = meta), org.data = orgdata, has.org.data = keep.org.data, method = "leim", pars = pars )) }) ) dimRed/R/mixColorSpaces.R0000644000176200001440000000452313354441317014746 0ustar liggesusers#' Mixing color ramps #' #' mix different color ramps #' #' automatically create colors to represent a varying number of #' dimensions. #' #' @param vars a list of variables #' @param ramps a list of color ramps, one for each variable. #' #' @examples #' cols <- expand.grid(x = seq(0, 1, length.out = 10), #' y = seq(0, 1, length.out = 10), #' z = seq(0, 1, length.out = 10)) #' mixed <- mixColor3Ramps(cols) #' #' \dontrun{ #' library(rgl) #' plot3d(cols$x, cols$y, cols$z, col = mixed, pch = 15) #' #' cols <- expand.grid(x = seq(0, 1, length.out = 10), #' y = seq(0, 1, length.out = 10)) #' mixed <- mixColor2Ramps(cols) #' } #' #' plot(cols$x, cols$y, col = mixed, pch = 15) #' @importFrom grDevices colorRamp #' @importFrom grDevices rgb #' @export mixColorRamps <- function (vars, ramps) { if (length(vars) > length(ramps)) stop("need more or equal ramps than vars") nvars <- length(vars) rgbs <- list() for (i in 1:nvars){ rgbs[[i]] <- ramps[[i]](scale01(as.numeric(vars[[i]]))) } retrgb <- Reduce(`+`, rgbs) res <- apply(retrgb, 2, function(x) (x - min(x)) / (max(x) - min(x))) res[is.nan(res)] <- 0 return(rgb(res)) } #' @rdname mixColorRamps #' @export mixColor1Ramps <- function (vars, ramps = colorRamp(c("blue", "black", "red"))) { mixColorRamps(vars, list(ramps)) } #' @rdname mixColorRamps #' @export mixColor2Ramps <- function (vars, ramps = list(colorRamp(c("blue", "green")), colorRamp(c("blue", "red")))) { mixColorRamps(vars, ramps) } #' @rdname mixColorRamps #' @export mixColor3Ramps <- function (vars, ramps = list(colorRamp(c("#001A00", "#00E600")), colorRamp(c("#00001A", "#0000E6")), colorRamp(c("#1A0000", "#E60000")))) { mixColorRamps(vars, ramps) } colorize <- function (vars) { l <- length(vars) if (l == 1) return(mixColor1Ramps(vars)) if (l == 2) return(mixColor2Ramps(vars)) if (l == 3) return(mixColor3Ramps(vars)) return("#000000") } scale01 <- function(x, low = min(x, na.rm = TRUE), high = max(x, na.rm = FALSE)) { x <- (x - low) / (high - low) x } dimRed/R/isomap.R0000644000176200001440000002031713371631672013306 0ustar liggesusers#' Isomap embedding #' #' An S4 Class implementing the Isomap Algorithm #' #' The Isomap algorithm approximates a manifold using geodesic #' distances on a k nearest neighbor graph. Then classical scaling is #' performed on the resulting distance matrix. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' Isomap can take the following parameters: #' \describe{ #' \item{knn}{The number of nearest neighbors in the graph. Defaults to 50.} #' \item{ndim}{The number of embedding dimensions, defaults to 2.} #' \item{get_geod}{Should the geodesic distance matrix be kept, #' if \code{TRUE}, access it as \code{getOtherData(x)$geod}} #' } #' #' @section Implementation: #' #' The dimRed package uses its own implementation of Isomap which also #' comes with an out of sample extension (known as landmark #' Isomap). The default Isomap algorithm scales computationally not #' very well, the implementation here uses \code{\link[RANN]{nn2}} for #' a faster search of the nearest neighbors. If data are too large it #' may be useful to fit a subsample of the data and use the #' out-of-sample extension for the other points. #' #' @references #' Tenenbaum, J.B., Silva, V. de, Langford, J.C., 2000. A Global Geometric #' Framework for Nonlinear Dimensionality Reduction. Science 290, 2319-2323. #' https://doi.org/10.1126/science.290.5500.2319 #' #' @examples #' dat <- loadDataSet("3D S Curve", n = 500) #' #' ## use the S4 Class directly: #' isomap <- Isomap() #' emb <- isomap@fun(dat, isomap@stdpars) #' #' ## or simpler, use embed(): #' samp <- sample(nrow(dat), size = 200) #' emb2 <- embed(dat[samp], "Isomap", .mute = NULL, knn = 10) #' emb3 <- emb2@apply(dat[-samp]) #' #' plot(emb2, type = "2vars") #' plot(emb3, type = "2vars") #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export Isomap #' @exportClass Isomap Isomap <- setClass( "Isomap", contains = "dimRedMethod", prototype = list( stdpars = list(knn = 50, ndim = 2, get_geod = FALSE), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("RSpectra") chckpkg("igraph") chckpkg("RANN") message(Sys.time(), ": Isomap START") meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data if (is.null(pars$eps)) pars$eps <- 0 if (is.null(pars$get_geod)) pars$get_geod <- FALSE ## geodesic distances message(Sys.time(), ": constructing knn graph") knng <- makeKNNgraph(x = indata, k = pars$knn, eps = pars$eps) message(Sys.time(), ": calculating geodesic distances") geodist <- igraph::distances(knng, algorithm = "dijkstra") message(Sys.time(), ": Classical Scaling") ## TODO: add regularization k <- geodist ^ 2 k <- .Call(stats:::C_DoubleCentre, k) k <- - k / 2 ## TODO: explicit symmetrizing ## TODO: return eigenvectors? e <- RSpectra::eigs_sym(k, pars$ndim, which = "LA", opts = list(retvec = TRUE)) e_values <- e$values e_vectors <- e$vectors neig <- sum(e_values > 0) if (neig < pars$ndim) { warning("Isomap: eigenvalues < 0, returning less dimensions!") e_values <- e_values[seq_len(neig)] e_vectors <- e_vectors[, seq_len(neig), drop = FALSE] } e_vectors <- e_vectors * rep(sqrt(e_values), each = nrow(e_vectors)) colnames(e_vectors) <- paste("iso", seq_len(neig)) appl <- function (x) { message(Sys.time(), ": L-Isomap embed START") appl.meta <- if (inherits(x, "dimRedData")) x@meta else data.frame() indata <- if (inherits(x, "dimRedData")) x@data else x if (ncol(indata) != ncol(data@data)) stop("x must have the same number of dimensions as the original data") nindata <- nrow(indata) norg <- nrow(orgdata) message(Sys.time(), ": constructing knn graph") lknng <- makeKNNgraph(rbind(indata, orgdata), k = pars$knn, eps = pars$eps) message(Sys.time(), ": calculating geodesic distances") lgeodist <- igraph::distances(lknng, seq_len(nindata), nindata + seq_len(norg)) message(Sys.time(), ": embedding") dammu <- sweep(lgeodist ^ 2, 2, colMeans(geodist ^ 2), "-") Lsharp <- sweep(e_vectors, 2, e_values, "/") out <- -0.5 * (dammu %*% Lsharp) message(Sys.time(), ": DONE") return(new("dimRedData", data = out, meta = appl.meta)) } return(new( "dimRedResult", data = new("dimRedData", data = e_vectors, meta = meta), org.data = orgdata, has.org.data = keep.org.data, apply = appl, has.apply = TRUE, method = "Isomap", pars = pars, other.data = if (pars$get_geod) list(geod = as.dist(geodist)) else list() )) }) ) ## input data(matrix or data frame) return knn graph implements ## "smart" choices on RANN::nn2 parameters we ignore radius search ## TODO: find out a good limit to switch from kd to bd trees COMMENT: ## bd trees are buggy, they dont work if there are duplicated data ## points and checking would neutralize the performance gain, so bd ## trees are not really usable. makeKNNgraph <- function (x, k, eps = 0, diag = FALSE){ ## requireNamespace("RANN") ## requireNamespace("igraph") ## consts INF_VAL <- 1.340781e+15 NA_IDX <- 0 BDKD_LIM <- 1000000 #todo: figure out a good value here ## select parameters M <- nrow(x) treetype <- "kd" # if (M < BDKD_LIM) "kd" else "bd" # see: # https://github.com/jefferis/RANN/issues/19 searchtype <- if (eps == 0) "standard" else "priority" ## RANN::nn2 returns the points in data with respect to query ## e.g. the rows in the output are the points in query and the ## columns the points in data. nn2res <- RANN::nn2(data = x, query = x, k = k + 1, treetype = treetype, searchtype = searchtype, eps = eps) ## create graph: the first ny nodes will be y, the last nx nodes ## will be x, if x != y ## it is not really pretty to create a ## directed graph first and then make it undirected. g <- igraph::make_empty_graph(M, directed = TRUE) g[from = if (diag) rep(seq_len(M), times = k + 1) else rep(seq_len(M), times = k), to = if (diag) as.vector(nn2res$nn.idx) else as.vector(nn2res$nn.idx[, -1]), attr = "weight"] <- if (diag) as.vector(nn2res$nn.dists) else as.vector(nn2res$nn.dists[, -1]) return(igraph::as.undirected(g, mode = "collapse", edge.attr.comb = "first")) } ## the original isomap method I'll keep it here for completeness: ## isomap <- new("dimRedMethod", ## stdpars = list(knn = 50, ## d = dist, ## ndim = 2) ## fun = function (data, pars, ## keep.org.data = TRUE) { ## chckpkg("vegan") ## meta <- data@meta ## orgdata <- if (keep.org.data) data@data else NULL ## indata <- data@data ## outdata <- vegan::isomap(pars$d(indata), ## ndim = pars$ndim, ## k = pars$knn)$points ## colnames(outdata) <- paste0("Iso", 1:ncol(outdata)) ## return(new( ## "dimRedResult", ## data = new("dimRedData", ## data = outdata, ## meta = meta), ## org.data = orgdata, ## has.org.data = keep.org.data, ## method = "isomap", ## pars = pars ## )) ## }) dimRed/R/graph_embed.R0000644000176200001440000002231713371631672014255 0ustar liggesusers#' Graph Embedding via the Kamada Kawai Algorithm #' #' An S4 Class implementing the Kamada Kawai Algorithm for graph embedding. #' #' Graph embedding algorithms se the data as a graph. Between the #' nodes of the graph exist attracting and repelling forces which can #' be modeled as electrical fields or springs connecting the #' nodes. The graph is then forced into a lower dimensional #' representation that tries to represent the forces betweent he nodes #' accurately by minimizing the total energy of the attracting and #' repelling forces. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' KamadaKawai can take the following parameters: #' \describe{ #' \item{ndim}{The number of dimensions, defaults to 2. Can only be 2 or 3} #' \item{knn}{Reduce the graph to keep only the neares neighbors. Defaults to 100.} #' \item{d}{The distance function to determine the weights of the graph edges. Defaults to euclidean distances.} #' } #' #' @section Implementation: #' Wraps around \code{\link[igraph]{layout_with_kk}}. The parameters #' maxiter, epsilon and kkconst are set to the default values and #' cannot be set, this may change in a future release. The DimRed #' Package adds an extra sparsity parameter by constructing a knn #' graph which also may improve visualization quality. #' #' @references #' #' Kamada, T., Kawai, S., 1989. An algorithm for drawing general undirected #' graphs. Information Processing Letters 31, 7-15. #' https://doi.org/10.1016/0020-0190(89)90102-6 #' #' @examples #' dat <- loadDataSet("Swiss Roll", n = 200) #' kamada_kawai <- KamadaKawai() #' kk <- kamada_kawai@fun(dat, kamada_kawai@stdpars) #' #' plot(kk@data@data) #' #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export KamadaKawai #' @exportClass KamadaKawai KamadaKawai <- setClass( "KamadaKawai", contains = "dimRedMethod", prototype = list( stdpars = list(ndim = 2, knn = 100, d = stats::dist), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("igraph") meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data outdata <- em_graph_layout( indata, graph_em_method = igraph::layout_with_kk, knn = pars$knn, d = pars$d, ndim = pars$ndim, weight.trans = I #pars$weight.trans ) colnames(outdata) <- paste0("KK", 1:ncol(outdata)) return(new( "dimRedResult", data = new("dimRedData", data = outdata, meta = meta), org.data = orgdata, has.org.data = keep.org.data, method = "graph_kk", pars = pars )) }) ) #' Distributed Recursive Graph Layout #' #' An S4 Class implementing Distributed recursive Graph Layout. #' #' DrL uses a complex algorithm to avoid local minima in the graph #' embedding which uses several steps. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' DrL can take the following parameters: #' \describe{ #' \item{ndim}{The number of dimensions, defaults to 2. Can only be 2 or 3} #' \item{knn}{Reduce the graph to keep only the neares neighbors. Defaults to 100.} #' \item{d}{The distance function to determine the weights of the graph edges. Defaults to euclidean distances.} #' } #' #' @section Implementation: #' Wraps around \code{\link[igraph]{layout_with_drl}}. The parameters #' maxiter, epsilon and kkconst are set to the default values and #' cannot be set, this may change in a future release. The DimRed #' Package adds an extra sparsity parameter by constructing a knn #' graph which also may improve visualization quality. #' #' @references #' #' Martin, S., Brown, W.M., Wylie, B.N., 2007. Dr.l: Distributed Recursive #' (graph) Layout (No. dRl; 002182MLTPL00). Sandia National Laboratories. #' #' @examples #' \dontrun{ #' dat <- loadDataSet("Swiss Roll", n = 300) #' #' ## use the S4 Class directly: #' drl <- DrL() #' emb <- drl@fun(dat, drl@stdpars) #' #' ## simpler, use embed(): #' emb2 <- embed(dat, "DrL") #' #' #' plot(emb) #' } #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export DrL #' @exportClass DrL DrL <- setClass( "DrL", contains = "dimRedMethod", prototype = list( stdpars = list(ndim = 2, knn = 100, d = stats::dist), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("igraph") meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data outdata <- em_graph_layout( indata, graph_em_method = igraph::layout_with_drl, knn = pars$knn, d = pars$d, ndim = pars$ndim, weight.trans = I #pars$weight.trans ) colnames(outdata) <- paste0("DrL", 1:ncol(outdata)) return(new( "dimRedResult", data = new("dimRedData", data = outdata, meta = meta), org.data = orgdata, has.org.data = keep.org.data, method = "graph_drl", pars = pars )) }) ) #' Fruchterman Reingold Graph Layout #' #' An S4 Class implementing the Fruchterman Reingold Graph Layout #' algorithm. #' #' @template dimRedMethodSlots #' #' @template dimRedMethodGeneralUsage #' #' @section Parameters: #' \describe{ #' \item{ndim}{The number of dimensions, defaults to 2. Can only be 2 or 3} #' \item{knn}{Reduce the graph to keep only the neares neighbors. Defaults to 100.} #' \item{d}{The distance function to determine the weights of the graph edges. Defaults to euclidean distances.} #' } #' #' @section Implementation: #' Wraps around \code{\link[igraph]{layout_with_fr}}, see there for #' details. The Fruchterman Reingold algorithm puts the data into #' a circle and puts connected points close to each other. #' #' @references #' #' Fruchterman, T.M.J., Reingold, E.M., 1991. Graph drawing by force-directed #' placement. Softw: Pract. Exper. 21, 1129-1164. #' https://doi.org/10.1002/spe.4380211102 #' #' @examples #' dat <- loadDataSet("Swiss Roll", n = 100) #' #' ## use the S4 Class directly: #' fruchterman_reingold <- FruchtermanReingold() #' pars <- fruchterman_reingold@stdpars #' pars$knn <- 5 #' emb <- fruchterman_reingold@fun(dat, pars) #' #' ## simpler, use embed(): #' emb2 <- embed(dat, "FruchtermanReingold", knn = 5) #' #' plot(emb, type = "2vars") #' #' @include dimRedResult-class.R #' @include dimRedMethod-class.R #' @family dimensionality reduction methods #' @export FruchtermanReingold #' @exportClass FruchtermanReingold FruchtermanReingold <- setClass( "FruchtermanReingold", contains = "dimRedMethod", prototype = list( stdpars = list(ndim = 2, knn = 100, d = stats::dist), fun = function (data, pars, keep.org.data = TRUE) { chckpkg("igraph") meta <- data@meta orgdata <- if (keep.org.data) data@data else NULL indata <- data@data outdata <- em_graph_layout( indata, graph_em_method = igraph::layout_with_fr, knn = pars$knn, d = pars$d, ndim = pars$ndim, weight.trans = I #pars$weight.trans ) colnames(outdata) <- paste0("FR", 1:ncol(outdata)) return(new( "dimRedResult", data = new("dimRedData", data = outdata, meta = meta), org.data = orgdata, has.org.data = keep.org.data, method = "graph_fr", pars = pars )) }) ) em_graph_layout <- function(data, graph_em_method, knn = 50, d = stats::dist, ndim = 2, weight.trans = I){ chckpkg("igraph") data.dist <- as.matrix(d(data)) data.graph <- construct_knn_graph(data.dist, knn) embed_graph(data.graph, graph_em_method, ndim = ndim) } embed_graph <- function(graph, f, weight.trans = I, ndim = 2){ f(graph, weights = weight.trans(igraph::E(graph)$weight), dim = ndim) } construct_knn_graph <- function (data.dist, knn) { chckpkg("igraph") chckpkg("coRanking") data.graph <- igraph::graph_from_adjacency_matrix( adjmatrix = data.dist, mode = "undirected", weighted = TRUE ) if (is.infinite(knn) || is.na(knn)) return(data.graph) ## else: remove all unnecessary edges data.rankm <- coRanking::rankmatrix(data.dist, input = "dist") data.rankm.ind <- data.rankm <= knn + 1 inds <- which( !(data.rankm.ind | t(data.rankm.ind)), arr.ind = TRUE ) data.graph[ from = inds[, 1], to = inds[, 2] ] <- FALSE return(data.graph) } dimRed/vignettes/0000755000176200001440000000000013464323464013477 5ustar liggesusersdimRed/vignettes/Makefile0000644000176200001440000000043413464323464015140 0ustar liggesusersall: echo "BNET_BUILD_VIGNETTE: $(BNET_BUILD_VIGNETTE)" $(R_HOME)/bin/Rscript -e "knitr::knit2pdf('dimensionality-reduction.Rnw')" $(R_HOME)/bin/Rscript -e "tools::compactPDF('dimensionality-reduction.pdf', gs_quality = 'ebook')" rm -rf dimensionality-reduction.tex figure/ auto/ dimRed/vignettes/bibliography.bib0000644000176200001440000006220013371631672016630 0ustar liggesusers @book{rojo-alvarez_digital_2017, edition = {1st}, title = {Digital {Signal} {Processing} with {Kernel} {Methods}}, isbn = {978-1-118-61179-1}, publisher = {Wiley}, author = {Rojo-Álvarez, J. L. and Martínez-Ramón, M. and Muñoz-Marí, J. and Camps-Valls, G.}, month = dec, year = {2017} } @article{arenas-garcia_kernel_2013, title = {Kernel {Multivariate} {Analysis} {Framework} for {Supervised} {Subspace} {Learning}: {A} {Tutorial} on {Linear} and {Kernel} {Multivariate} {Methods}}, volume = {30}, issn = {1053-5888}, shorttitle = {Kernel {Multivariate} {Analysis} {Framework} for {Supervised} {Subspace} {Learning}}, doi = {10.1109/MSP.2013.2250591}, number = {4}, journal = {IEEE Signal Processing Magazine}, author = {Arenas-Garcia, J. and Petersen, K. B. and Camps-Valls, G. and Hansen, L. K.}, month = jul, year = {2013}, pages = {16--29}, } @inproceedings{scholkopf_generalized_2001, title = {A {Generalized} {Representer} {Theorem}}, url = {https://link.springer.com/chapter/10.1007/3-540-44581-1_27}, doi = {10.1007/3-540-44581-1_27}, language = {en}, urldate = {2017-06-12}, booktitle = {Computational {Learning} {Theory}}, publisher = {Springer, Berlin, Heidelberg}, author = {Schölkopf, Bernhard and Herbrich, Ralf and Smola, Alex J.}, month = jul, year = {2001}, pages = {416--426}, } @incollection{bakir_learning_2004, title = {Learning to {Find} {Pre}-{Images}}, url = {http://papers.nips.cc/paper/2417-learning-to-find-pre-images.pdf}, doi = {10.1007/978-3-540-28649-3_31}, urldate = {2017-06-12}, booktitle = {Advances in {Neural} {Information} {Processing} {Systems} 16}, publisher = {MIT Press}, author = {Bakir, Gökhan H. and Weston, Jason and Schölkopf, Prof. Bernhard}, editor = {Thrun, S. and Saul, L. K. and Schölkopf, P. B.}, year = {2004}, pages = {449--456}, } @inproceedings{babaee_assessment_2013, title = {Assessment of dimensionality reduction based on communication channel model; application to immersive information visualization}, url = {http://elib.dlr.de/88828/}, doi = {10.1109/BigData.2013.6691726}, booktitle = {Big {Data} 2013}, publisher = {IEEE Xplore}, author = {Babaee, Mohammadreza and Datcu, Mihai and Rigoll, Gerald}, year = {2013}, pages = {1--6}, } @article{mahecha_nonlinear_2007, title = {Nonlinear dimensionality reduction: {Alternative} ordination approaches for extracting and visualizing biodiversity patterns in tropical montane forest vegetation data}, volume = {2}, issn = {1574-9541}, shorttitle = {Nonlinear dimensionality reduction}, url = {http://www.sciencedirect.com/science/article/pii/S1574954107000325}, doi = {10.1016/j.ecoinf.2007.05.002}, number = {2}, urldate = {2016-08-26}, journal = {Ecological Informatics}, author = {Mahecha, Miguel D. and Martínez, Alfredo and Lischeid, Gunnar and Beck, Erwin}, month = jun, year = {2007}, pages = {138--149}, } @inproceedings{bengio_out--sample_2003, title = {Out-of-{Sample} {Extensions} for {LLE}, {Isomap}, {MDS}, {Eigenmaps}, and {Spectral} {Clustering}}, booktitle = {In {Advances} in {Neural} {Information} {Processing} {Systems}}, publisher = {MIT Press}, author = {Bengio, Yoshua and Paiement, Jean-Francois and Vincent, Pascal}, year = {2004}, pages = {177--184}, } @misc{noauthor_scopus_nodate, author = {Elsevier}, year = {2017}, title = {Scopus - {Advanced} search}, url = {https://www.scopus.com/}, urldate = {2017-03-28} } @article{diaz_global_2016, title = {The global spectrum of plant form and function}, volume = {529}, issn = {0028-0836}, url = {http://www.nature.com/nature/journal/v529/n7585/full/nature16489.html}, doi = {10.1038/nature16489}, language = {en}, number = {7585}, urldate = {2017-03-22}, journal = {Nature}, author = {Díaz, Sandra and Kattge, Jens and Cornelissen, Johannes H. C. and Wright, Ian J. and Lavorel, Sandra and Dray, Stéphane and Reu, Björn and Kleyer, Michael and Wirth, Christian and Colin Prentice, I. and Garnier, Eric and Bönisch, Gerhard and Westoby, Mark and Poorter, Hendrik and Reich, Peter B. and Moles, Angela T. and Dickie, John and Gillison, Andrew N. and Zanne, Amy E. and Chave, Jérôme and Joseph Wright, S. and Sheremet’ev, Serge N. and Jactel, Hervé and Baraloto, Christopher and Cerabolini, Bruno and Pierce, Simon and Shipley, Bill and Kirkup, Donald and Casanoves, Fernando and Joswig, Julia S. and Günther, Angela and Falczuk, Valeria and Rüger, Nadja and Mahecha, Miguel D. and Gorné, Lucas D.}, month = jan, year = {2016}, pages = {167--171}, } first application of pca in ecology? @article{aart_distribution_1972, title = {Distribution {Analysis} of {Wolfspiders} ({Araneae}, {Lycosidae}) in a {Dune} {Area} {By} {Means} of {Principal} {Component} {Analysis}}, volume = {23}, issn = {1568-542X}, url = {http://booksandjournals.brillonline.com/content/journals/10.1163/002829673x00076}, doi = {10.1163/002829673X00076}, number = {3}, urldate = {2016-07-18}, journal = {Netherlands Journal of Zoology}, author = {Aart, P. J. M. Van Der}, month = jan, year = {1972}, pages = {266--329}, } @article{morrall_soil_1974, title = {Soil microfungi associated with aspen in {Saskatchewan}: synecology and quantitative analysis}, volume = {52}, issn = {0008-4026}, shorttitle = {Soil microfungi associated with aspen in {Saskatchewan}}, url = {http://www.nrcresearchpress.com/doi/abs/10.1139/b74-233}, doi = {10.1139/b74-233}, number = {8}, urldate = {2016-07-18}, journal = {Can. J. Bot.}, author = {Morrall, R. A. A.}, month = aug, year = {1974}, pages = {1803--1817}, } @article{pearson_lines_1901, title = {On lines and planes of closest fit to systems of points in space}, volume = {2}, number = {6}, journal = {Philosophical Magazine}, doi = {10.1080/14786440109462720}, author = {Pearson, K}, year = {1901}, pages = {559--572}, } @article{kramer_nonlinear_1991, title = {Nonlinear principal component analysis using autoassociative neural networks}, volume = {37}, issn = {1547-5905}, doi = {10.1002/aic.690370209}, language = {en}, number = {2}, urldate = {2016-07-15}, journal = {AIChE J.}, author = {Kramer, Mark A.}, month = feb, year = {1991}, pages = {233--243}, } @article{hsieh_nonlinear_2004, title = {Nonlinear multivariate and time series analysis by neural network methods}, volume = {42}, issn = {1944-9208}, doi = {10.1029/2002RG000112}, language = {en}, number = {1}, urldate = {2016-07-15}, journal = {Rev. Geophys.}, author = {Hsieh, William W.}, month = mar, year = {2004}, pages = {RG1003}, } @article{optimx, author = {John Nash}, title = {On Best Practice Optimization Methods in R}, journal = {Journal of Statistical Software}, volume = 60, number = 1, year = 2014, issn = {1548-7660}, pages = {1--14}, doi = {10.18637/jss.v060.i02}, url = {https://www.jstatsoft.org/index.php/jss/article/view/v060i02} } @manual{energy, title = {energy: E-statistics (energy statistics)}, author = {Maria L. Rizzo and Gabor J. Szekely}, year = {2014}, note = {R package version 1.6.2}, url = {https://CRAN.R-project.org/package=energy}, } @misc{soeren_sonnenburg_2017_1067840, author = {Soeren Sonnenburg and Heiko Strathmann and Sergey Lisitsyn and Viktor Gal and Fernando J. Iglesias García and Wu Lin and Soumyajit De and Chiyuan Zhang and frx and tklein23 and Evgeniy Andreev and JonasBehr and sploving and Parijat Mazumdar and Christian Widmer and Pan Deng / Zora and Giovanni De Toni and Saurabh Mahindre and Abhijeet Kislay and Kevin Hughes and Roman Votyakov and khalednasr and Sanuj Sharma and Alesis Novik and Abinash Panda and Evangelos Anagnostopoulos and Liang Pang and Alex Binder and serialhex and Björn Esser}, title = {shogun-toolbox/shogun: Shogun 6.1.0}, month = nov, year = 2017, doi = {10.5281/zenodo.1067840}, url = {https://doi.org/10.5281/zenodo.1067840} } @article{scikit-learn, title={Scikit-learn: Machine Learning in {P}ython}, author={Pedregosa, F. and Varoquaux, G. and Gramfort, A. and Michel, V. and Thirion, B. and Grisel, O. and Blondel, M. and Prettenhofer, P. and Weiss, R. and Dubourg, V. and Vanderplas, J. and Passos, A. and Cournapeau, D. and Brucher, M. and Perrot, M. and Duchesnay, E.}, journal={Journal of Machine Learning Research}, volume={12}, pages={2825--2830}, year={2011} } @article{torgerson_multidimensional_1952, title = {Multidimensional scaling: {I}. {Theory} and method}, volume = {17}, issn = {0033-3123, 1860-0980}, shorttitle = {Multidimensional scaling}, url = {http://link.springer.com/article/10.1007/BF02288916}, doi = {10.1007/BF02288916}, language = {en}, number = {4}, urldate = {2016-08-16}, journal = {Psychometrika}, author = {Torgerson, Warren S.}, year = {1952}, pages = {401--419}, } @article{tenenbaum_global_2000, title = {A {Global} {Geometric} {Framework} for {Nonlinear} {Dimensionality} {Reduction}}, volume = {290}, issn = {0036-8075, 1095-9203}, url = {http://science.sciencemag.org/content/290/5500/2319}, doi = {10.1126/science.290.5500.2319}, language = {en}, number = {5500}, urldate = {2016-07-13}, journal = {Science}, author = {Tenenbaum, Joshua B. and Silva, Vin de and Langford, John C.}, month = dec, year = {2000}, pmid = {11125149}, pages = {2319--2323}, } @article{roweis_nonlinear_2000, title = {Nonlinear {Dimensionality} {Reduction} by {Locally} {Linear} {Embedding}}, volume = {290}, issn = {0036-8075, 1095-9203}, url = {http://science.sciencemag.org/content/290/5500/2323}, doi = {10.1126/science.290.5500.2323}, language = {en}, number = {5500}, urldate = {2016-08-16}, journal = {Science}, author = {Roweis, Sam T. and Saul, Lawrence K.}, month = dec, year = {2000}, pmid = {11125150}, pages = {2323--2326}, } @article{kruskal_multidimensional_1964, title = {Multidimensional scaling by optimizing goodness of fit to a nonmetric hypothesis}, volume = {29}, issn = {0033-3123, 1860-0980}, url = {http://link.springer.com/article/10.1007/BF02289565}, doi = {10.1007/BF02289565}, language = {en}, number = {1}, urldate = {2016-12-22}, journal = {Psychometrika}, author = {Kruskal, J. B.}, month = mar, year = {1964}, pages = {1--27}, } @article{kruskal_nonmetric_1964, title = {Nonmetric multidimensional scaling: {A} numerical method}, volume = {29}, issn = {0033-3123, 1860-0980}, shorttitle = {Nonmetric multidimensional scaling}, url = {http://link.springer.com/article/10.1007/BF02289694}, doi = {10.1007/BF02289694}, language = {en}, number = {2}, urldate = {2016-12-22}, journal = {Psychometrika}, author = {Kruskal, J. B.}, month = jun, year = {1964}, pages = {115--129}, } @article{coifman_geometric_2005, title = {Geometric diffusions as a tool for harmonic analysis and structure definition of data: {Diffusion} maps}, volume = {102}, issn = {0027-8424, 1091-6490}, shorttitle = {Geometric diffusions as a tool for harmonic analysis and structure definition of data}, url = {http://www.pnas.org/content/102/21/7426}, doi = {10.1073/pnas.0500334102}, language = {en}, number = {21}, urldate = {2016-03-30}, journal = {Proceedings of the National Academy of Sciences of the United States of America}, author = {Coifman, R. R. and Lafon, S. and Lee, A. B. and Maggioni, M. and Nadler, B. and Warner, F. and Zucker, S. W.}, month = may, year = {2005}, pmid = {15899970}, pages = {7426--7431}, } @article{coifman_diffusion_2006, title = {Diffusion maps}, volume = {21}, issn = {10635203}, url = {http://linkinghub.elsevier.com/retrieve/pii/S1063520306000546}, doi = {10.1016/j.acha.2006.04.006}, language = {en}, number = {1}, urldate = {2016-08-16}, journal = {Applied and Computational Harmonic Analysis}, author = {Coifman, Ronald R. and Lafon, Stéphane}, month = jul, year = {2006}, pages = {5--30}, } @article{scholkopf_nonlinear_1998, title = {Nonlinear {Component} {Analysis} as a {Kernel} {Eigenvalue} {Problem}}, volume = {10}, issn = {08997667}, doi = {10.1162/089976698300017467}, number = {5}, journal = {Neural Computation}, author = {Schölkopf, Bernhard and Smola, Alexander and Müller, Klaus-Robert}, year = {1998}, pages = {1299--1319}, } @article{hyvarinen_fast_1999, title = {Fast and robust fixed-point algorithms for independent component analysis}, volume = {10}, issn = {1045-9227}, doi = {10.1109/72.761722}, number = {3}, journal = {IEEE Transactions on Neural Networks}, author = {Hyvarinen, A.}, month = may, year = {1999}, pages = {626--634}, } @article{comon_independent_1994, title = {Independent component analysis, {A} new concept?}, volume = {36}, issn = {01651684}, url = {http://linkinghub.elsevier.com/retrieve/pii/0165168494900299}, doi = {10.1016/0165-1684(94)90029-9}, language = {en}, number = {3}, urldate = {2016-08-17}, journal = {Signal Processing}, author = {Comon, Pierre}, month = apr, year = {1994}, pages = {287--314}, } @article{kamada_algorithm_1989, title = {An algorithm for drawing general undirected graphs}, volume = {31}, issn = {0020-0190}, url = {http://www.sciencedirect.com/science/article/pii/0020019089901026}, doi = {10.1016/0020-0190(89)90102-6}, number = {1}, urldate = {2016-08-17}, journal = {Information Processing Letters}, author = {Kamada, Tomihisa and Kawai, Satoru}, month = apr, year = {1989}, pages = {7--15}, } @article{fruchterman_graph_1991, title = {Graph drawing by force-directed placement}, volume = {21}, issn = {1097-024X}, doi = {10.1002/spe.4380211102}, language = {en}, number = {11}, urldate = {2016-08-17}, journal = {Softw: Pract. Exper.}, author = {Fruchterman, Thomas M. J. and Reingold, Edward M.}, month = nov, year = {1991}, pages = {1129--1164}, } @techreport{martin_dr.l:_2007, title = {Dr.l: {Distributed} {Recursive} (graph) {Layout}}, shorttitle = {Dr.l}, url = {http://www.osti.gov/scitech/biblio/1231060-dr-distributed-recursive-graph-layout}, number = {dRl; 002182MLTPL00}, urldate = {2016-08-17}, institution = {Sandia National Laboratories}, author = {Martin, Shawn and Brown, W. Michael and Wylie, Brian N.}, month = nov, year = {2007}, } @article{belkin_laplacian_2003, title = {Laplacian {Eigenmaps} for {Dimensionality} {Reduction} and {Data} {Representation}}, volume = 15, issn = 08997667, doi = {10.1162/089976603321780317}, number = 6, urldate = {2016-08-17}, journal = {Neural Computation}, author = {Belkin, Mikhail and Niyogi, Partha}, month = jun, year = 2003, pages = 1373, } @inproceedings{terada_local_2014, title = {Local {Ordinal} {Embedding}}, url = {http://jmlr.org/proceedings/papers/v32/terada14.html}, urldate = {2016-04-21}, author = {Terada, Yoshikazu and Luxburg, Ulrike von}, year = {2014}, pages = {847--855} } @article{van_der_maaten_visualizing_2008, title = {Visualizing {Data} using t-{SNE}}, volume = {9}, issn = {1532-4435}, language = {English}, journal = {J. Mach. Learn. Res.}, author = {van der Maaten, Laurens and Hinton, Geoffrey}, month = nov, year = {2008}, note = {WOS:000262637600007}, pages = {2579--2605}, } @incollection{hinton_stochastic_2003, title = {Stochastic {Neighbor} {Embedding}}, url = {http://papers.nips.cc/paper/2276-stochastic-neighbor-embedding.pdf}, urldate = {2016-08-17}, booktitle = {Advances in {Neural} {Information} {Processing} {Systems} 15}, publisher = {MIT Press}, author = {Hinton, Geoffrey E. and Roweis, Sam T.}, editor = {Becker, S. and Thrun, S. and Obermayer, K.}, year = {2003}, pages = {857--864}, } @article{lee_multi-scale_2015, series = {Learning for {Visual} {Semantic} {Understanding} in {Big} {DataESANN} 2014Industrial {Data} {Processing} and {AnalysisSelected} papers from the 22nd {European} {Symposium} on {Artificial} {Neural} {Networks}, {Computational} {Intelligence} and {Machine} {Learning} ({ESANN} 2014){Selected} papers from the 11th {World} {Congress} on {Intelligent} {Control} and {Automation} ({WCICA}2014)}, title = {Multi-scale similarities in stochastic neighbour embedding: {Reducing} dimensionality while preserving both local and global structure}, volume = {169}, issn = {0925-2312}, shorttitle = {Multi-scale similarities in stochastic neighbour embedding}, url = {http://www.sciencedirect.com/science/article/pii/S0925231215003641}, doi = {10.1016/j.neucom.2014.12.095}, urldate = {2016-04-28}, journal = {Neurocomputing}, author = {Lee, John A. and Peluffo-Ordóñez, Diego H. and Verleysen, Michel}, month = dec, year = {2015}, pages = {246--261}, } @article{lee_type_2013, series = {Advances in artificial neural networks, machine learning, and computational {intelligenceSelected} papers from the 20th {European} {Symposium} on {Artificial} {Neural} {Networks} ({ESANN} 2012)}, title = {Type 1 and 2 mixtures of {Kullback}–{Leibler} divergences as cost functions in dimensionality reduction based on similarity preservation}, volume = {112}, issn = {0925-2312}, url = {http://www.sciencedirect.com/science/article/pii/S0925231213001471}, doi = {10.1016/j.neucom.2012.12.036}, urldate = {2016-04-28}, journal = {Neurocomputing}, author = {Lee, John A. and Renard, Emilie and Bernard, Guillaume and Dupont, Pierre and Verleysen, Michel}, month = jul, year = {2013}, pages = {92--108}, } @article{venna_information_2010, title = {Information {Retrieval} {Perspective} to {Nonlinear} {Dimensionality} {Reduction} for {Data} {Visualization}}, volume = {11}, issn = {1532-4435}, language = {English}, journal = {J. Mach. Learn. Res.}, author = {Venna, Jarkko and Peltonen, Jaakko and Nybo, Kristian and Aidos, Helena and Kaski, Samuel}, month = feb, year = {2010}, note = {WOS:000277186500001}, pages = {451--490}, } @article{laparra_dimensionality_2015, title = {Dimensionality {Reduction} via {Regression} in {Hyperspectral} {Imagery}}, volume = {9}, issn = {1932-4553}, doi = {10.1109/JSTSP.2015.2417833}, number = {6}, journal = {IEEE Journal of Selected Topics in Signal Processing}, author = {Laparra, V. and Malo, J. and Camps-Valls, G.}, month = sep, year = {2015}, pages = {1026--1036}, } @article{chen_local_2006, author = {Lisha Chen and Andreas Buja}, title = {Local Multidimensional Scaling for Nonlinear Dimension Reduction, Graph Drawing, and Proximity Analysis}, journal = {Journal of the American Statistical Association}, volume = {104}, number = {485}, pages = {209-219}, year = {2009}, publisher = {Taylor & Francis}, doi = {10.1198/jasa.2009.0111}, URL = {https://doi.org/10.1198/jasa.2009.0111}, eprint = {https://doi.org/10.1198/jasa.2009.0111} } @inproceedings{saunders_ridge_1998, author = {Saunders, Craig and Gammerman, Alexander and Vovk, Volodya}, title = {Ridge Regression Learning Algorithm in Dual Variables}, booktitle = {Proceedings of the Fifteenth International Conference on Machine Learning}, series = {ICML '98}, year = {1998}, isbn = {1-55860-556-8}, pages = {515--521}, numpages = {7}, url = {http://dl.acm.org/citation.cfm?id=645527.657464}, acmid = {657464}, publisher = {Morgan Kaufmann Publishers Inc.}, address = {San Francisco, CA, USA}, } @article{lee_quality_2009, series = {Advances in {Machine} {Learning} and {Computational} {Intelligence}16th {European} {Symposium} on {Artificial} {Neural} {Networks} 200816th {European} {Symposium} on {Artificial} {Neural} {Networks} 2008}, title = {Quality assessment of dimensionality reduction: {Rank}-based criteria}, volume = {72}, issn = {0925-2312}, shorttitle = {Quality assessment of dimensionality reduction}, url = {http://www.sciencedirect.com/science/article/pii/S0925231209000101}, doi = {10.1016/j.neucom.2008.12.017}, number = {7–9}, urldate = {2016-04-04}, journal = {Neurocomputing}, author = {Lee, John A. and Verleysen, Michel}, month = mar, year = {2009}, pages = {1431--1443}, } @article{sokal_comparison_1962, title = {The {Comparison} of {Dendrograms} by {Objective} {Methods}}, volume = {11}, issn = {0040-0262}, url = {http://www.jstor.org/stable/1217208}, doi = {10.2307/1217208}, number = {2}, urldate = {2016-08-15}, journal = {Taxon}, author = {Sokal, Robert R. and Rohlf, F. James}, year = {1962}, pages = {33--40}, } @article{szekely_measuring_2007, title = {Measuring and testing dependence by correlation of distances}, volume = {35}, issn = {0090-5364, 2168-8966}, url = {http://projecteuclid.org/euclid.aos/1201012979}, doi = {10.1214/009053607000000505}, language = {EN}, number = {6}, urldate = {2016-06-10}, journal = {The Annals of Statistics}, author = {Székely, Gábor J. and Rizzo, Maria L. and Bakirov, Nail K.}, month = dec, year = {2007}, mrnumber = {MR2382665}, zmnumber = {1129.62059}, pages = {2769--2794}, } @article{kireeva_nonlinear_2014, title = {Nonlinear {Dimensionality} {Reduction} for {Visualizing} {Toxicity} {Data}: {Distance}-{Based} {Versus} {Topology}-{Based} {Approaches}}, volume = {9}, issn = {1860-7187}, shorttitle = {Nonlinear {Dimensionality} {Reduction} for {Visualizing} {Toxicity} {Data}}, doi = {10.1002/cmdc.201400027}, language = {en}, number = {5}, urldate = {2016-08-19}, journal = {ChemMedChem}, author = {Kireeva, Natalia V. and Ovchinnikova, Svetlana I. and Tetko, Igor V. and Asiri, Abdullah M. and Balakin, Konstantin V. and Tsivadze, Aslan Yu.}, month = may, year = {2014}, pages = {1047--1059}, } @article{han_deep_2016, author = {Han, Yoonchang and Kim, Jaehun and Lee, Kyogu}, title = {{Deep Convolutional Neural Networks for Predominant Instrument Recognition in Polyphonic Music}}, journal = {{IEEE-ACM TRANSACTIONS ON AUDIO SPEECH AND LANGUAGE PROCESSING}}, year = {{2017}}, volume = {{25}}, number = {{1}}, pages = {{208-221}}, month = {{JAN}}, publisher = {{IEEE-INST ELECTRICAL ELECTRONICS ENGINEERS INC}}, language = {{English}}, doi = {{10.1109/TASLP.2016.2632307}}, issn = {{2329-9290}}, } @article{van_der_maaten_dimensionality_2009, title = {Dimensionality reduction: a comparative review}, volume = {10}, shorttitle = {Dimensionality reduction}, urldate = {2016-06-28}, journal = {J Mach Learn Res}, author = {Van Der Maaten, Laurens and Postma, Eric and Van den Herik, Jaap}, year = {2009}, pages = {66--71}, } @inproceedings{bengio_out--sample_2003, title = {Out-of-{Sample} {Extensions} for {LLE}, {Isomap}, {MDS}, {Eigenmaps}, and {Spectral} {Clustering}}, booktitle = {In {Advances} in {Neural} {Information} {Processing} {Systems}}, publisher = {MIT Press}, author = {Bengio, Yoshua and Paiement, Jean-Francois and Vincent, Pascal}, year = {2003}, pages = {177--184}, } @article{lueks_how_2011, title = {How to {Evaluate} {Dimensionality} {Reduction}? - {Improving} the {Co}-ranking {Matrix}}, shorttitle = {How to {Evaluate} {Dimensionality} {Reduction}?}, url = {http://arxiv.org/abs/1110.3917}, urldate = {2016-03-18}, journal = {arXiv:1110.3917 [cs]}, author = {Lueks, Wouter and Mokbel, Bassam and Biehl, Michael and Hammer, Barbara}, month = oct, year = {2011}, note = {arXiv: 1110.3917}, } @techreport{de_silva_sparse_2004, title = {Sparse multidimensional scaling using landmark points}, author = {De Silva, Vin and Tenenbaum, Joshua B/r}, year = {2004}, } @article{groenen_multidimensional_2016, title = {Multidimensional {Scaling} by {Majorization}: {A} {Review}}, volume = {73}, issn = {1548-7660}, url = {https://www.jstatsoft.org/index.php/jss/article/view/v073i08}, doi = {10.18637/jss.v073.i08}, number = {1}, journal = {Journal of Statistical Software}, author = {Groenen, Patrick and Velden, Michel van de}, year = {2016}, pages = {1--26}, } @article{leeuw_multidimensional_2009, author = {de Leeuw, Jan and Mair, Patrick}, title = {{Multidimensional Scaling Using Majorization: SMACOF in R}}, journal = {{JOURNAL OF STATISTICAL SOFTWARE}}, year = {{2009}}, volume = {{31}}, number = {{3}}, pages = {{1--30}}, month = {{AUG}}, publisher = {{JOURNAL STATISTICAL SOFTWARE}}, ISSN = {{1548-7660}}, } @article{bengio_learning_2004, title = {Learning {Eigenfunctions} {Links} {Spectral} {Embedding} and {Kernel} {PCA}}, volume = {16}, issn = {0899-7667}, url = {http://dx.doi.org/10.1162/0899766041732396}, doi = {10.1162/0899766041732396}, number = {10}, urldate = {2016-10-05}, journal = {Neural Computation}, author = {Bengio, Yoshua and Delalleau, Olivier and Roux, Nicolas Le and Paiement, Jean-François and Vincent, Pascal and Ouimet, Marie}, month = oct, year = {2004}, pages = {2197--2219}, } @misc{_gdkrmr/dimred_????, title = {gdkrmr/{dimRed}}, url = {https://github.com/gdkrmr/dimRed}, urldate = {2016-11-30}, journal = {GitHub}, } @book{luxburg_tutorial_2007, title = {A {Tutorial} on {Spectral} {Clustering}}, author = {Luxburg, Ulrike Von}, year = {2007}, } @article{kraemer_dimred_2018, title = {{dimRed} and {coRanking} - {Unifying} {Dimensionality} {Reduction} in {R}}, url = {https://journal.r-project.org/archive/2018/RJ-2018-039/index.html}, journal = {The R Journal}, author = {Kraemer, Guido and Reichstein, Markus and Mahecha, Miguel D.}, year = {2018}, }dimRed/vignettes/classification_tree.tex0000644000176200001440000000722513371631672020241 0ustar liggesusers\newcommand{\imp}[1] {\textbf{#1}} % style for implemented methods \newcommand{\noimp}[1] {#1} % style for not implemented methods \tikz[ % tree layout, grow cyclic, % level 1/.style={level distance=1.2cm, sibling angle=180, text width=1.5cm, font={\small}}, % level 2/.style={level distance=1.9cm, sibling angle=40, font={\scriptsize}},%, text width=1.4cm}, level 3/.style={level distance=2.2cm, sibling angle=30}, level 4/.style={level distance=2.3cm}, % text width=1.2cm, font=\tiny, innernode/.style={align=flush center},%, text width=1.2}, leaf/.style={% % draw, very thin, % fill=red!30, rounded corners, align=flush left, text width=, inner sep=2pt, font={\hangindent=0.2cm\scriptsize\sffamily}}, ]{ \node[innernode, draw, align=flush center, rounded corners, font={\normalsize\bfseries}]{ Dimensionality \\ reduction} child[] { node[innernode] {Convex} % level 1 child[sibling angle=55]{ node[innernode] {Full spectral} % level 2 child { node[innernode] {Euclidean distances} child { node[leaf, text width=1.3cm]{ \imp{PCA} \\ \imp{Classical scaling} } } } child { node[innernode] {Geodesic distances} child { node[leaf]{ \imp{Isomap} } } } child { node[innernode] {Kernel-based} child { node[leaf]{ \imp{Kernel PCA} \\ \noimp{MVU} } } } child { node[innernode] {Diffusion distance} child { node[leaf]{ \imp{Diffusion maps} } } } } child[] { node[innernode] {Removal of shared information by regression} %level 2 child{ node[leaf]{ \imp{DRR} } } } child[sibling angle=55] { node[innernode] {Sparse spectral} % level 2 child[sibling angle=45] { node[innernode] {Reconstruction weights} child {node[leaf]{ \imp{Local Linear Embedding} } } } child[sibling angle=45] { node[innernode] {Neighborhood graph Laplacian} child { node[leaf]{ \imp{Laplacian Eigenmaps} } } } child[sibling angle=45] { node[innernode] {Local tangent space} child { node[leaf, text width=2cm]{ \imp{Hessian LLE} \\ \noimp{Local tangent space alignment} } } } } } child[level distance=1.8cm] { node[innernode] {Non-convex} %level 1 child { node[innernode] {Weighted Euclidean distances} % level 2 child { node[leaf, text width=2cm]{ \imp{Non-linear MDS} \\ \noimp{Sammon's mapping} \\ \noimp{Stochastic Proximity Embedding} } } } child { node[innernode] {Alignment of local linear models} % level 2 child { node[leaf]{ \noimp{LLC} \\ \noimp{Man.\ charting} } } } child { node[innernode] {Neural network} % level 2 child { node[leaf]{ Autoencoder } } } child { node[innernode] {Discrete mapping} % level 2 child { node[leaf,text width=2.5cm]{ \noimp{Self Organizing Maps} \\ \noimp{Generative Topographic Mapping} \\ \noimp{Elastic Net} } } } child { node[innernode] {Stochastic methods} % level 2 child { node[leaf]{ \noimp{SNE} \\ \imp{t-SNE} \\ \noimp{NeRV} \\ \noimp{JNE} } } } child { node[innernode] {Force directed} % level 2 child { node[leaf, text width=2cm]{ \imp{Kamada-Kawai} \\ \imp{Fruchtermann-Reingold} \\ \imp{DrL} } } } }; } %%% Local Variables: %%% mode: LaTeX %%% TeX-command-extra-options: "-shell-escape" %%% TeX-engine: default %%% TeX-master: "dimensionality-reduction" %%% End:dimRed/vignettes/dimensionality-reduction.Rnw0000644000176200001440000017352413371631672021225 0ustar liggesusers\documentclass{article} %\VignetteEngine{knitr::knitr} %\VignetteIndexEntry{Dimensionality Reduction} %\VignetteKeyword{Dimensionality Reduction} \usepackage[utf8]{inputenc} \usepackage[T1]{fontenc} \usepackage{hyperref} \usepackage{amsmath,amssymb} \usepackage{booktabs} \usepackage{tikz} \usetikzlibrary{trees} \usepackage[sectionbib,round]{natbib} \title{\pkg{dimRed} and \pkg{coRanking}---Unifying Dimensionality Reduction in R} \author{Guido Kraemer \and Markus Reichstein \and Miguel D.\ Mahecha} % these are taken from RJournal.sty: \makeatletter \DeclareRobustCommand\code{\bgroup\@noligs\@codex} \def\@codex#1{\texorpdfstring% {{\normalfont\ttfamily\hyphenchar\font=-1 #1}}% {#1}\egroup} \newcommand{\kbd}[1]{{\normalfont\texttt{#1}}} \newcommand{\key}[1]{{\normalfont\texttt{\uppercase{#1}}}} \DeclareRobustCommand\samp{`\bgroup\@noligs\@sampx} \def\@sampx#1{{\normalfont\texttt{#1}}\egroup'} \newcommand{\var}[1]{{\normalfont\textsl{#1}}} \let\env=\code \newcommand{\file}[1]{{`\normalfont\textsf{#1}'}} \let\command=\code \let\option=\samp \newcommand{\dfn}[1]{{\normalfont\textsl{#1}}} % \acronym is effectively disabled since not used consistently \newcommand{\acronym}[1]{#1} \newcommand{\strong}[1]{\texorpdfstring% {{\normalfont\fontseries{b}\selectfont #1}}% {#1}} \let\pkg=\strong \newcommand{\CRANpkg}[1]{\href{https://CRAN.R-project.org/package=#1}{\pkg{#1}}}% \let\cpkg=\CRANpkg \newcommand{\ctv}[1]{\href{https://CRAN.R-project.org/view=#1}{\emph{#1}}} \newcommand{\BIOpkg}[1]{\href{https://www.bioconductor.org/packages/release/bioc/html/#1.html}{\pkg{#1}}} \makeatother \begin{document} \maketitle \abstract{ % This document is based on the manuscript of \citet{kraemer_dimred_2018} which was published in the R-Journal and has been modified and extended to fit the format of a package vignette and to match the extended functionality of the \pkg{dimRed} package. ``Dimensionality reduction'' (DR) is a widely used approach to find low dimensional and interpretable representations of data that are natively embedded in high-dimensional spaces. % DR can be realized by a plethora of methods with different properties, objectives, and, hence, (dis)advantages. The resulting low-dimensional data embeddings are often difficult to compare with objective criteria. % Here, we introduce the \CRANpkg{dimRed} and \CRANpkg{coRanking} packages for the R language. % These open source software packages enable users to easily access multiple classical and advanced DR methods using a common interface. % The packages also provide quality indicators for the embeddings and easy visualization of high dimensional data. % The \pkg{coRanking} package provides the functionality for assessing DR methods in the co-ranking matrix framework. % In tandem, these packages allow for uncovering complex structures high dimensional data. % Currently 15 DR methods are available in the package, some of which were not previously available to R users. % Here, we outline the \pkg{dimRed} and \pkg{coRanking} packages and make the implemented methods understandable to the interested reader. % } \section{Introduction} \label{sec:intro} Dimensionality Reduction (DR) essentially aims to find low dimensional representations of data while preserving their key properties. % Many methods exist in literature, optimizing different criteria: % maximizing the variance or the statistical independence of the projected data, % minimizing the reconstruction error under different constraints, % or optimizing for different error metrics, % just to name a few. % Choosing an inadequate method may imply that much of the underlying structure remains undiscovered. % Often the structures of interest in a data set can be well represented by fewer dimensions than exist in the original data. % Data compression of this kind has the additional benefit of making the encoded information better conceivable to our brains for further analysis tasks like classification or regression problems. % For example, the morphology of a plant's leaves, stems, and seeds reflect the environmental conditions the species usually grow in (e.g.,\ plants with large soft leaves will never grow in a desert but might have an advantage in a humid and shadowy environment). % Because the morphology of the entire plant depends on the environment, many morphological combinations will never occur in nature and the morphological space of all plant species is tightly constrained. % \citet{diaz_global_2016} found that out of six observed morphological characteristics only two embedding dimensions were enough to represent three quarters of the totally observed variability. % DR is a widely used approach for the detection of structure in multivariate data, and has applications in a variety of fields. % In climatology, DR is used to find the modes of some phenomenon, e.g.,\ the first Empirical Orthogonal Function of monthly mean sea surface temperature of a given region over the Pacific is often linked to the El Ni\~no Southern Oscillation or ENSO \citep[e.g.,\ ][]{hsieh_nonlinear_2004}. % In ecology the comparison of sites with different species abundances is a classical multivariate problem: each observed species adds an extra dimension, and because species are often bound to certain habitats, there is a lot of redundant information. Using DR is a popular technique to represent the sites in few dimensions, e.g.,\ \citet{aart_distribution_1972} matches wolfspider communities to habitat and \citet{morrall_soil_1974} match soil fungi data to soil types. (In ecology the general name for DR is ordination or indirect gradient analysis.) % Today, hyperspectral satellite imagery collects so many bands that it is very difficult to analyze and interpret the data directly. % Resuming the data into a set of few, yet independent, components is one way to reduce complexity \citep[e.g.,\ see][]{laparra_dimensionality_2015}. % DR can also be used to visualize the interiors of deep neural networks \citep[e.g.,\ see ][]{han_deep_2016}, where the high dimensionality comes from the large number of weights used in a neural network and convergence can be visualized by means of DR\@. % We could find many more example applications here but this is not the main focus of this publication. % The difficulty in applying DR is that each DR method is designed to maintain certain aspects of the original data and therefore may be appropriate for one task and inappropriate for another. % Most methods also have parameters to tune and follow different assumptions. The quality of the outcome may strongly depend on their tuning, which adds additional complexity. % DR methods can be modeled after physical models with attracting and repelling forces (Force Directed Methods), projections onto low dimensional planes (PCA, ICA), divergence of statistical distributions (SNE family), or the reconstruction of local spaces or points by their neighbors (LLE). % As an example for how changing internal parameters of a method can have a great impact, the breakthrough for Stochastic Neighborhood Embedding (SNE) methods came when a Student's $t$-distribution was used instead of a normal distribution to model probabilities in low dimensional space to avoid the ``crowding problem'', that is,\ a sphere in high dimensional space has a much larger volume than in low dimensional space and may contain too many points to be represented accurately in few dimensions. % The $t$-distribution, allows medium distances to be accurately represented in few dimensions by larger distances due to its heavier tails. % The result is called in $t$-SNE and is especially good at preserving local structures in very few dimensions, this feature made $t$-SNE useful for a wide array of data visualization tasks and the method became much more popular than standard SNE (around six times more citations of \citet{van_der_maaten_visualizing_2008} compared to \citet{hinton_stochastic_2003} in Scopus \citep{noauthor_scopus_nodate}). % There are a number of software packages for other languages providing collections of methods: In Python there is scikit-learn \citep{scikit-learn}, which contains a module for DR. In Julia we currently find ManifoldLearning.jl for nonlinear and MultivariateStats.jl for linear DR methods. % There are several toolboxes for DR implemented in Matlab \citep{van_der_maaten_dimensionality_2009, arenas-garcia_kernel_2013}. The Shogun toolbox \citep{soeren_sonnenburg_2017_1067840} implements a variety of methods for dimensionality reduction in C++ and offers bindings for a many common high level languages (including R, but the installation is anything but simple, as there is no CRAN package). % However, there is no comprehensive package for R and none of the former mentioned software packages provides means to consistently compare the quality of different methods for DR. % For many applications it can be difficult to objectively find the right method or parameterization for the DR task. % This paper presents the \pkg{dimRed} and \pkg{coRanking} packages for the popular programming language R. Together, they provide a standardized interface to various dimensionality reduction methods and quality metrics for embeddings. They are implemented using the S4 class system of R, making the packages both easy to use and to extend. The design goal for these packages is to enable researchers, who may not necessarily be experts in DR, to apply the methods in their own work and to objectively identify the most suitable methods for their data. % This paper provides an overview of the methods collected in the packages and contains examples as to how to use the packages. % The notation in this paper will be as follows: $X = [x_i]_{1\leq i \leq n}^T \in \mathbb{R}^{n\times p}$, and the observations $x_i \in \mathbb{R}^p$. % These observations may be transformed prior to the dimensionality reduction step (e.g.,\ centering and/or standardization) resulting in $X' = [x'_i]_{1\leq i \leq n}^T \in \mathbb{R}^{n\times p}$. % A DR method then embeds each vector in $X'$ onto a vector in $Y = [y_i]_{1\leq i \leq n}^T \in \mathbb{R}^{n\times q}$ with $y_i \in \mathbb{R}^q$, ideally with $q \ll p$. % Some methods provide an explicit mapping $f(x'_i) = y_i$. Some even offer an inverse mapping $f^{-1}(y_{i}) = \hat x'_{i}$, such that one can reconstruct a (usually approximate) sample from the low-dimensional representation. % For some methods, pairwise distances between points are needed, we set $d_{ij} = d(x_{i}, x_{j})$ and $\hat{d}_{ij} = d(y_i, y_j)$, where $d$ is some appropriate distance function. When referring to \code{functions} in the \pkg{dimRed} package or base R simply the function name is mentioned, functions from other packages are referenced with their namespace, as with \code{package::function}. \begin{figure}[htbp] \centering \input{classification_tree.tex} \caption{% Classification of dimensionality reduction methods. Methods in bold face are implemented in \pkg{dimRed}. Modified from \citet{van_der_maaten_dimensionality_2009}. }\label{fig:classification} \end{figure} \section{Dimensionality Reduction Methods} \label{sec:dimredtec} In the following section we do not aim for an exhaustive explanation to every method in \pkg{dimRed} but rather to provide a general idea on how the methods work. % An overview and classification of the most commonly used DR methods can be found in Figure~\ref{fig:classification}. In all methods, parameters have to be optimized or decisions have to be made, even if it is just about the preprocessing steps of data. % The \pkg{dimRed} package tries to make the optimization process for parameters as easy as possible, but, if possible, the parameter space should be narrowed down using prior knowledge. % Often decisions can be made based on theoretical knowledge. For example,\ sometimes an analysis requires data to be kept in their original scales and sometimes this is exactly what has to be avoided as when comparing different physical units. % Sometimes decisions based on the experience of others can be made, e.g.,\ the Gaussian kernel is probably the most universal kernel and therefore should be tested first if there is a choice. % All methods presented here have the embedding dimensionality, $q$, as a parameter (or \code{ndim} as a parameter for \code{embed}). % For methods based on eigenvector decomposition, the result generally does not depend on the number of dimensions, i.e.,\ the first dimension will be the same, no matter if we decide to calculate only two dimensions or more. % If more dimensions are added, more information is maintained, the first dimension is the most important and higher dimensions are successively less important. % This means, that a method based on eigenvalue decomposition only has to be run once if one wishes to compare the embedding in different dimensions. % In optimization based methods this is generally not the case, the number of dimensions has to be chosen a priori, an embedding of 2 and 3 dimensions may vary significantly, and there is no ordered importance of dimensions. % This means that comparing dimensions of optimization-based methods is computationally much more expensive. % We try to give the computational complexity of the methods. Because of the actual implementation, computation times may differ largely. % R is an interpreted language, so all parts of an algorithm that are implemented in R often will tend to be slow compared to methods that call efficient implementations in a compiled language. % Methods where most of the computing time is spent for eigenvalue decomposition do have very efficient implementations as R uses optimized linear algebra libraries. Although, eigenvalue decomposition itself does not scale very well in naive implementations ($\mathcal{O}(n^3)$). \subsection{PCA} \label{sec:pca} Principal Component Analysis (PCA) is the most basic technique for reducing dimensions. It dates back to \citet{pearson_lines_1901}. PCA finds a linear projection ($U$) of the high dimensional space into a low dimensional space $Y = XU$, maintaining maximum variance of the data. It is based on solving the following eigenvalue problem: \begin{equation} (C_{XX}-\lambda_k I)u_k=0\label{eq:pca} \end{equation} where $C_{XX} = \frac 1 n X^TX$ is the covariance matrix, $\lambda_k$ and $u_k$ are the $k$-th eigenvalue and eigenvector, and $I$ is the identity matrix. % The equation has several solutions for different values of $\lambda_k$ (leaving aside the trivial solution $u_k = 0$). % PCA can be efficiently applied to large data sets, because it computationally scales as $\mathcal{O}(np^2 + p^3)$, that is, it scales linearly with the number of samples and R uses specialized linear algebra libraries for such kind of computations. PCA is a rotation around the origin and there exist a forward and inverse mapping. % PCA may suffer from a scale problem, i.e.,\ when one variable dominates the variance simply because it is in a higher scale, to remedy this, the data can be scaled to zero mean and unit variance, depending on the use case, if this is necessary or desired. % Base R implements PCA in the functions \code{prcomp} and \code{princomp}; but several other implementations exist i.e., \BIOpkg{pcaMethods} from Bioconductor which implements versions of PCA that can deal with missing data. % The \pkg{dimRed} package wraps \code{prcomp}. \subsection{kPCA} \label{sec:kpca} Kernel Principal Component Analysis (kPCA) extends PCA to deal with nonlinear dependencies among variables. % The idea behind kPCA is to map the data into a high dimensional space using a possibly non-linear function $\phi$ and then to perform a PCA in this high dimensional space. % Some mathematical tricks are used for efficient computation. % If the columns of X are centered around $0$, then the principal components can also be computed from the inner product matrix $K = X^TX$. % Due to this way of calculating a PCA, we do not need to explicitly map all points into the high dimensional space and do the calculations there, it is enough to obtain the inner product matrix or kernel matrix $K \in \mathbb{R}^{n\times n}$ of the mapped points \citep{scholkopf_nonlinear_1998}. % Here is an example calculating the kernel matrix using a Gaussian kernel: \begin{equation}\label{eq:gauss} K = \phi(x_i)^T \phi(x_j) = \kappa(x_i, x_j) = \exp\left( -\frac{\| x_i- x_j\|^2}{2 \sigma^2} \right), \end{equation} where $\sigma$ is a length scale parameter accounting for the width of the kernel. % The other trick used is known as the ``representers theorem.'' The interested reader is referred to \citet{scholkopf_generalized_2001}. The kPCA method is very flexible and there exist many kernels for special purposes. The most common kernel function is the Gaussian kernel (Equation\ \ref{eq:gauss}). % The flexibility comes at the price that the method has to be finely tuned for the data set because some parameter combinations are simply unsuitable for certain data. % The method is not suitable for very large data sets, because memory scales with $\mathcal{O}(n^2)$ and computation time with $\mathcal{O}(n^3)$. % Diffusion Maps, Isomap, Locally Linear Embedding, and some other techniques can be seen as special cases of kPCA. In which case, an out-of-sample extension using the Nyström formula can be applied \citep{bengio_learning_2004}. % This can also yield applications for bigger data, where an embedding is trained with a sub-sample of all data and then the data is embedded using the Nyström formula. Kernel PCA in R is implemented in the \CRANpkg{kernlab} package using the function \code{kernlab::kpca}, and supports a number of kernels and user defined functions. For details see the help page for \code{kernlab::kpca}. The \pkg{dimRed} package wraps \code{kernlab::kpca} but additionally provides forward and inverse methods \citep{bakir_learning_2004} which can be used to fit out-of-sample data or to visualize the transformation of the data space. % \subsection{Classical Scaling} \label{sec:classscale} What today is called Classical Scaling was first introduced by \citet{torgerson_multidimensional_1952}. It uses an eigenvalue decomposition of a transformed distance matrix to find an embedding that maintains the distances of the distance matrix. % The method works because of the same reason that kPCA works, i.e.,\ classical scaling can be seen as a kPCA with kernel $x^Ty$. % A matrix of Euclidean distances can be transformed into an inner product matrix by some simple transformations and therefore yields the same result as a PCA\@. % Classical scaling is conceptually more general than PCA in that arbitrary distance matrices can be used, i.e.,\ the method does not even need the original coordinates, just a distance matrix $D$. % Then it tries to find an embedding $Y$ so that $\hat d_{ij}$ is as similar to $d_{ij}$ as possible. The disadvantage is that it is computationally much more demanding, i.e.,\ an eigenvalue decomposition of an $n\times n$ matrix has to be computed. This step requires $\mathcal{O}(n^2)$ memory and $\mathcal{O}(n^3)$ computation time, while PCA requires only the eigenvalue decomposition of a $d\times d$ matrix and usually $n \gg d$. % R implements classical scaling in the \code{cmdscale} function. % The \pkg{dimRed} package wraps \code{cmdscale} and allows the specification of arbitrary distance functions for calculating the distance matrix. Additionally a forward method is implemented. \subsection{Isomap} \label{sec:isomap} As Classical Scaling can deal with arbitrarily defined distances, \citet{tenenbaum_global_2000} suggested to approximate the structure of the manifold by using geodesic distances. % In practice, a graph is created by either keeping only the connections between every point and its $k$ nearest neighbors to produce a $k$-nearest neighbor graph ($k$-NNG), or simply by keeping all distances smaller than a value $\varepsilon$ producing an $\varepsilon$-neighborhood graph ($\varepsilon$-NNG). % Geodesic distances are obtained by recording the distance on the graph and classical scaling is used to find an embedding in fewer dimensions. This leads to an ``unfolding'' of possibly convoluted structures (see Figure~\ref{fig:knn}). Isomap's computational cost is dominated by the eigenvalue decomposition and therefore scales with $\mathcal{O}(n^3)$. % Other related techniques can use more efficient algorithms because the distance matrix becomes sparse due to a different preprocessing. In R, Isomap is implemented in the \CRANpkg{vegan} package. The \code{vegan::isomap} calculates an Isomap embedding and \code{vegan::isomapdist} calculates a geodesic distance matrix. % The \pkg{dimRed} package uses its own implementation. This implementation is faster mainly due to using a KD-tree for the nearest neighbor search (from the \CRANpkg{RANN} package) and to a faster implementation for the shortest path search in the $k$-NNG (from the \CRANpkg{igraph} package). % The implementation in \pkg{dimRed} also includes a forward method that can be used to train the embedding on a subset of data points and then use these points to approximate an embedding for the remaining points. This technique is generally referred to as landmark Isomap \citep{de_silva_sparse_2004}. % \subsection{Locally Linear Embedding} \label{sec:lle} Points that lie on a manifold in a high dimensional space can be reconstructed through linear combinations of their neighborhoods if the manifold is well sampled and the neighbohoods lie on a locally linear patch. % These reconstruction weights, $W$, are the same in the high dimensional space as the internal coordinates of the manifold. % Locally Linear Embedding \citep[LLE; ][]{roweis_nonlinear_2000} is a technique that constructs a weight matrix $W \in \mathbb{R}^{n\times n}$ with elements $w_{ij}$ so that \begin{equation} \sum_{i=1}^n \bigg\| x_i- \sum_{j=1}^{n} w_{ij}x_j \bigg\|^2\label{eq:lle} \end{equation} is minimized under the constraint that $w_{ij} = 0 $ if $x_j$ does not belong to the neighborhood and the constraint that $\sum_{j=1}^n w_{ij} = 1$. % Finally the embedding is made in such a way that the following cost function is minimized for $Y$, \begin{equation} \sum_{i=1}^n\bigg\| y_i - \sum_{j=1}^n w_{ij}y_j \bigg\|^2.\label{eq:lle2} \end{equation} This can be solved using an eigenvalue decomposition. Conceptually the method is similar to Isomap but it is computationally much nicer because the weight matrix is sparse and there exist efficient solvers. % In R, LLE is implemented by the package \CRANpkg{lle}, the embedding can be calculated with \code{lle::lle}. Unfortunately the implementation does not make use of the sparsity of the weight matrix $W$. % The manifold must be well sampled and the neighborhood size must be chosen appropriately for LLE to give good results. % \subsection{Laplacian Eigenmaps} \label{sec:laplaceigenmaps} Laplacian Eigenmaps were originally developed under the name spectral clustering to separate non-convex clusters. % Later it was also used for graph embedding and DR \citep{belkin_laplacian_2003}. % A number of variants have been proposed. % First, a graph is constructed, usually from a distance matrix, the graph can be made sparse by keeping only the $k$ nearest neighbors, or by specifying an $\varepsilon$ neighborhood. % Then, a similarity matrix $W$ is calculated by using a Gaussian kernel (see Equation \ref{eq:gauss}), if $c = 2 \sigma^2 = \infty$, then all distances are treated equally, the smaller $c$ the more emphasis is given to differences in distance. % The degree of vertex $i$ is $d_i = \sum_{j=1}^n w_{ij}$ and the degree matrix, $D$, is the diagonal matrix with entries $d_i$. % Then we can form the graph Laplacian $L = D - W$ and, then, there are several ways how to proceed, an overview can be found in \citet{luxburg_tutorial_2007}. % The \pkg{dimRed} package implements the algorithm from \citet{belkin_laplacian_2003}. Analogously to LLE, Laplacian eigenmaps avoid computational complexity by creating a sparse matrix and not having to estimate the distances between all pairs of points. % Then the eigenvectors corresponding to the lowest eigenvalues larger than $0$ of either the matrix $L$ or the normalized Laplacian $D^{-1/2}LD^{-1/2}$ are computed and form the embedding. \subsection{Diffusion Maps} \label{sec:isodiffmaplle} Diffusion Maps \citep{coifman_diffusion_2006} take a distance matrix as input and calculates the transition probability matrix $P$ of a diffusion process between the points to approximate the manifold. % Then the embedding is done by an eigenvalue decompositon of $P$ to calculate the coordinates of the embedding. % The algorithm for calculating Diffusion Maps shares some elements with the way Laplacian Eigenmaps are calculated. % Both algorithms depart from the same weight matrix, Diffusion Maps calculate the transition probability on the graph after $t$ time steps and do the embedding on this probability matrix. The idea is to simulate a diffusion process between the nodes of the graph, which is more robust to short-circuiting than the $k$-NNG from Isomap (see bottom right Figure \ref{fig:knn}). % Diffusion maps in R are accessible via the \code{diffusionMap::diffuse()} function, which is available in the \CRANpkg{diffusionMap} package. % Additional points can be approximated into an existing embedding using the Nyström formula \citep{bengio_learning_2004}. % The implementation in \pkg{dimRed} is based on the \code{diffusionMap::diffuse} function. % , which does not contain an % approximation for unequally sampled manifolds % \citep{coifman_geometric_2005}. % \subsection{non-Metric Dimensional Scaling} \label{sec:nmds} While Classical Scaling and derived methods (see section \nameref{sec:classscale}) use eigenvector decomposition to embed the data in such a way that the given distances are maintained, non-Metric Dimensional Scaling \citep[nMDS, ][]{kruskal_multidimensional_1964,kruskal_nonmetric_1964} uses optimization methods to reach the same goal. % Therefore a stress function, \begin{equation} \label{eq:stress} S = \sqrt{\frac{\sum_{i>= if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { library(dimRed); library(ggplot2); #library(dplyr); library(tidyr) ## define which methods to apply embed_methods <- c("Isomap", "PCA") ## load test data set data_set <- loadDataSet("3D S Curve", n = 1000) ## apply dimensionality reduction data_emb <- lapply(embed_methods, function(x) embed(data_set, x)) names(data_emb) <- embed_methods ## plot data set, embeddings, and quality analysis ## plot(data_set, type = "3vars") ## lapply(data_emb, plot, type = "2vars") ## plot_R_NX(data_emb) add_label <- function(label) grid::grid.text(label, 0.2, 1, hjust = 0, vjust = 1, gp = grid::gpar(fontface = "bold", cex = 1.5)) ## pdf('~/phd/text/dimRedPackage/plots/plot_example.pdf', width = 4, height = 4) ## plot the results plot(data_set, type = "3vars", angle = 15, mar = c(3, 3, 0, 0), box = FALSE, grid = FALSE, pch = 16) add_label("a") par(mar = c(4, 4, 0, 0) + 0.1, bty = "n", las = 1) plot(data_emb$Isomap, type = "2vars", pch = 16) add_label("b") plot(data_emb$PCA, type = "2vars", pch = 16) add_label("d") ## calculate quality scores print( plot_R_NX(data_emb) + theme(legend.title = element_blank(), legend.position = c(0.5, 0.1), legend.justification = c(0.5, 0.1)) ) add_label("c") } else { # These cannot all be plot(1:10)!!! It's a mistery to me. plot(1:10) barplot(1:10) hist(1:10) plot(1:10) } @ \includegraphics[page=1,width=.45\textwidth]{figure/pca_isomap_example-1.pdf} \includegraphics[page=1,width=.45\textwidth]{figure/pca_isomap_example-2.pdf} \includegraphics[page=1,width=.45\textwidth]{figure/pca_isomap_example-3.pdf} \includegraphics[page=1,width=.45\textwidth]{figure/pca_isomap_example-4.pdf} \caption[dimRed example]{% Comparing PCA and Isomap: % (a) An S-shaped manifold, colors represent the internal coordinates of the manifold. % (b) Isomap embedding, the S-shaped manifold is unfolded. % (c) $R_{NX}$ plotted agains neighborhood sizes, Isomap is much better at preserving local distances and PCA is better at preserving global Euclidean distances. % The numbers on the legend are the $\text{AUC}_{1 / K}$. (d) PCA projection of the data, the directions of maximum variance are preserved. % }\label{fig:plotexample} \end{figure} <>= ## define which methods to apply embed_methods <- c("Isomap", "PCA") ## load test data set data_set <- loadDataSet("3D S Curve", n = 1000) ## apply dimensionality reduction data_emb <- lapply(embed_methods, function(x) embed(data_set, x)) names(data_emb) <- embed_methods ## figure \ref{fig:plotexample}a, the data set plot(data_set, type = "3vars") ## figures \ref{fig:plotexample}b (Isomap) and \ref{fig:plotexample}d (PCA) lapply(data_emb, plot, type = "2vars") ## figure \ref{fig:plotexample}c, quality analysis plot_R_NX(data_emb) @ The function \code{plot\_R\_NX} produces a figure that plots the neighborhood size ($k$ at a log-scale) against the quality measure $\text{R}_{NX}(k)$ (see Equation \ref{eq:rnx}). % This gives an overview of the general behavior of methods: if $\text{R}_{NX}$ is high for low values of $K$, then local neighborhoods are maintained well; if $\text{R}_{NX}$ is high for large values of $K$, then global gradients are maintained well. % It also provides a way to directly compare methods by plotting more than one $\text{R}_{NX}$ curve and an overall quality of the embedding by taking the area under the curve as an indicator for the overall quality of the embedding (see fig~\ref{eq:auclnk}) which is shown as a number in the legend. Therefore we can see from Figure~\ref{fig:plotexample}c that $t$-SNE is very good a maintaining close and medium distances for the given data set, whereas PCA is only better at maintaining the very large distances. % The large distances are dominated by the overall bent shape of the S in 3D space, while the close distances are not affected by this bending. % This is reflected in the properties recovered by the different methods, the PCA embedding recovers the S-shape, while $t$-SNE ignores the S-shape and recovers the inner structure of the manifold. % Example 2: Often the quality of an embedding strongly depends on the choice of parameters, the interface of \pkg{dimRed} can be used to facilitate searching the parameter space. Isomap has one parameter $k$ which determines the number of neighbors used to construct the $k$-NNG\@. % If this number is too large, then Isomap will resemble an MDS (Figure~\ref{fig:knn} e), if the number is too small, the resulting embedding contains holes (Figure~\ref{fig:knn} c). % The following code finds the optimal value, $k_{\text{max}}$, for $k$ using the $Q_{\text{local}}$ criterion, the results are visualized in Figure~\ref{fig:knn} a: \begin{figure}[htp] \centering <>= if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { library(dimRed) library(cccd) ## Load data ss <- loadDataSet("3D S Curve", n = 500) ## Parameter space kk <- floor(seq(5, 100, length.out = 40)) ## Embedding over parameter space emb <- lapply(kk, function(x) embed(ss, "Isomap", knn = x)) ## Quality over embeddings qual <- sapply(emb, function(x) quality(x, "Q_local")) ## Find best value for K ind_max <- which.max(qual) k_max <- kk[ind_max] add_label <- function(label){ par(xpd = TRUE) b = par("usr") text(b[1], b[4], label, adj = c(0, 1), cex = 1.5, font = 2) par(xpd = FALSE) } names(qual) <- kk } @ <<"select_k",include=FALSE,fig.width=11,fig.height=5>>= if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { par(mfrow = c(1, 2), mar = c(5, 4, 0, 0) + 0.1, oma = c(0, 0, 0, 0)) plot(kk, qual, type = "l", xlab = "k", ylab = expression(Q[local]), bty = "n") abline(v = k_max, col = "red") add_label("a") plot(ss, type = "3vars", angle = 15, mar = c(3, 3, 0, 0), box = FALSE, grid = FALSE, pch = 16) add_label("b") } else { plot(1:10) plot(1:10) } @ <<"knngraphs",include=FALSE,fig.width=8,fig.height=3>>= if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { par(mfrow = c(1, 3), mar = c(5, 4, 0, 0) + 0.1, oma = c(0, 0, 0, 0)) add_knn_graph <- function(ind) { nn1 <- nng(ss@data, k = kk[ind]) el <- get.edgelist(nn1) segments(x0 = emb[[ind]]@data@data[el[, 1], 1], y0 = emb[[ind]]@data@data[el[, 1], 2], x1 = emb[[ind]]@data@data[el[, 2], 1], y1 = emb[[ind]]@data@data[el[, 2], 2], col = "#00000010") } plot(emb[[2]]@data@data, type = "n", bty = "n") add_knn_graph(2) points(emb[[2]]@data@data, col = dimRed:::colorize(ss@meta), pch = 16) add_label("c") plot(emb[[ind_max]]@data@data, type = "n", bty = "n") add_knn_graph(ind_max) points(emb[[ind_max]]@data@data, col = dimRed:::colorize(ss@meta), pch = 16) add_label("d") plot(emb[[length(emb)]]@data@data, type = "n", bty = "n") add_knn_graph(length(emb)) points(emb[[length(emb)]]@data@data, col = dimRed:::colorize(ss@meta), pch = 16) add_label("e") } else { plot(1:10) plot(1:10) plot(1:10) } @ \includegraphics[width=.95\textwidth]{figure/select_k-1.pdf} \includegraphics[width=.95\textwidth]{figure/knngraphs-1.pdf} \caption[estimating $k$ using @Q_\text{local}]{% Using \pkg{dimRed} and the $Q_\text{local}$ indicator to estimate a good value for the parameter $k$ in Isomap. % (a) $Q_\text{local}$ for different values of $k$, the vertical red line indicates the maximum $k_{\text{max}}$. % (b) The original data set, a 2 dimensional manifold bent in an S-shape in 3 dimensional space. % Bottom row: Embeddings and $k$-NNG for different values of $k$. % (c) When $k = 5$, the value for $k$ is too small resulting in holes in the embedding, the manifold itself is still unfolded correctly. % (d) Choose $k = k_\text{max}$, the best representation of the original manifold in two dimensions achievable with Isomap. % (e) $k = 100$, too large, the $k$-NNG does not approximate the manifold any more. % }\label{fig:knn} \end{figure} <>= ## Load data ss <- loadDataSet("3D S Curve", n = 500) ## Parameter space kk <- floor(seq(5, 100, length.out = 40)) ## Embedding over parameter space emb <- lapply(kk, function(x) embed(ss, "Isomap", knn = x)) ## Quality over embeddings qual <- sapply(emb, function(x) quality(x, "Q_local")) ## Find best value for K ind_max <- which.max(qual) k_max <- kk[ind_max] @ Figure~\ref{fig:knn}a shows how the $Q_{\text{local}}$ criterion changes when varying the neighborhood size $k$ for Isomap, the gray lines in Figure~\ref{fig:knn} represent the edges of the $k$-NN Graph. % If the value for $k$ is too low, the inner structure of the manifold will still be recovered, but it will be imperfect (Figure~\ref{fig:knn}c, note that the holes appear in places that are not covered by the edges of the $k$-NN Graph), therefore the $Q_{\text{local}}$ score is lower than optimal. % If $k$ is too large, the error of the embedding is much larger due to short circuiting and we observe a very steep drop in the $Q_{\text{local}}$ score. % The short circuiting can be observed in Figure~\ref{fig:knn}e with the edges that cross the gap between the tips and the center of the S-shape. % % Example 3: It is also very easy to compare across methods and quality scores. % The following code produces a matrix of quality scores and methods, where \code{dimRedMethodList} returns a character vector with all methods. A visualization of the matrix can be found in Figure~\ref{fig:qualityexample}. % \begin{figure}[htp] \centering <<"plot_quality",include=FALSE>>= if(Sys.getenv("BNET_BUILD_VIGNETTE") != "") { embed_methods <- dimRedMethodList() quality_methods <- c("Q_local", "Q_global", "AUC_lnK_R_NX", "cophenetic_correlation") iris_data <- loadDataSet("Iris") quality_results <- matrix( NA, length(embed_methods), length(quality_methods), dimnames = list(embed_methods, quality_methods) ) embedded_data <- list() for (e in embed_methods) { try(embedded_data[[e]] <- embed(iris_data, e)) for (q in quality_methods) try(quality_results[e,q] <- quality(embedded_data[[e]], q)) } quality_results <- quality_results[order(rowMeans(quality_results)), ] palette(c("#1b9e77", "#d95f02", "#7570b3", "#e7298a", "#66a61e")) col_hsv <- rgb2hsv(col2rgb(palette())) ## col_hsv["v", ] <- col_hsv["v", ] * 3 / 1 palette(hsv(col_hsv["h",], col_hsv["s",], col_hsv["v",])) par(mar = c(2, 8, 0, 0) + 0.1) barplot(t(quality_results), beside = TRUE, col = 1:4, legend.text = quality_methods, horiz = TRUE, las = 1, cex.names = 0.85, args.legend = list(x = "topleft", bg = "white", cex = 0.8)) } else { plot(1:10) } @ \includegraphics[width=.5\textwidth]{figure/plot_quality-1.pdf} \caption[Quality comparision]{% A visualization of the \code{quality\_results} matrix. % The methods are ordered by mean quality score. % The reconstruction error was omitted, because a higher value means a worse embedding, while in the present methods a higher score means a better embedding. % Parameters were not tuned for the example, therefore it should not be seen as a general quality assessment of the methods. % }\label{fig:qualityexample} \end{figure} <>= embed_methods <- dimRedMethodList() quality_methods <- c("Q_local", "Q_global", "AUC_lnK_R_NX", "cophenetic_correlation") scurve <- loadDataSet("3D S Curve", n = 2000) quality_results <- matrix( NA, length(embed_methods), length(quality_methods), dimnames = list(embed_methods, quality_methods) ) embedded_data <- list() for (e in embed_methods) { embedded_data[[e]] <- embed(scurve, e) for (q in quality_methods) try(quality_results[e, q] <- quality(embedded_data[[e]], q)) } @ This example showcases the simplicity with which different methods and quality criteria can be combined. % Because of the strong dependencies on parameters it is not advised to apply this kind of analysis without tuning the parameters for each method separately. % There is no automatized way to tune parameters in \pkg{dimRed}. % \section{Conclusion} \label{sec:conc} This paper presents the \pkg{dimRed} and \pkg{coRanking} packages and it provides a brief overview of the methods implemented therein. % The \pkg{dimRed} package is written in the R language, one of the most popular languages for data analysis. The package is freely available from CRAN. % The package is object oriented and completely open source and therefore easily available and extensible. % Although most of the DR methods already had implementations in R, \pkg{dimRed} adds some new methods for dimensionality reduction, and \pkg{coRanking} adds methods for an independent quality control of DR methods to the R ecosystem. % DR is a widely used technique. However, due to the lack of easily usable tools, choosing the right method for DR is complex and depends upon a variety of factors. % The \pkg{dimRed} package aims to facilitate experimentation with different techniques, parameters, and quality measures so that choosing the right method becomes easier. % The \pkg{dimRed} package wants to enable the user to objectively compare methods that rely on very different algorithmic approaches. % It makes the life of the programmer easier, because all methods are aggregated in one place and there is a single interface and standardized classes to access the functionality. % \section{Acknowledgments} \label{sec:ack} We thank Dr.\ G.\ Camps-Valls and an anonymous reviewer for many useful comments. % This study was supported by the European Space Agency (ESA) via the Earth System Data Lab project (\url{http://earthsystemdatacube.org}) and the EU via the H2020 project BACI, grant agreement No 640176. % \bibliographystyle{abbrvnat} \bibliography{bibliography} \end{document} dimRed/MD50000644000176200001440000001513713464507340012003 0ustar liggesusersf9c4a7fa944ebcbcf12fdbbc5904b7e2 *DESCRIPTION ae5a59342168733d9988af23c3ca4c2a *LICENSE 2dd5c5db35ffa4df3b59cc134a5382ad *NAMESPACE 796f16f5f6b46687e1983c5506c7f0e5 *NEWS.md 0da527caab59a801dc7c8f668dda7de2 *R/autoencoder.R d4e3e654a96439e20fb41d3e10985af0 *R/dataSets.R 6e445262af4660e0fd28d479538c63a1 *R/diffmap.R 9e17b49bd7954ac4f67b9cec33eac1b9 *R/dimRed.R 0b64904774785f20eea821044c0d004f *R/dimRedData-class.R 9012058299b382d56cb9a4cbbef96be2 *R/dimRedMethod-class.R 3fb979a5be13d3e18b4dde7ddf0506b0 *R/dimRedResult-class.R daac94506b94b9c2b608750722d6cd33 *R/drr.R 78dd0eb23aca3ff89346da8e47e4aa1e *R/embed.R 99bfdc28048dc91980c3c3405dceb451 *R/fastica.R fa3db6a7ba1ecae25d5dce90686bac7e *R/get_info.R 58a1f188d2bdbb663a6bda0743dd6719 *R/graph_embed.R 74a207f86775bed0e027c606a0d06975 *R/hlle.R 3cb6be22aeb179f0e6aaaa4655014340 *R/isomap.R b793dd76cd7ee0bb6b6228d719cc24c9 *R/kpca.R f65dfcf4df65f8ba09854e890d27ba95 *R/l1pca.R 935514a87886c81af2cc6664a60c2208 *R/leim.R 4b3c3d8b7e92880fc11f40f58e78fcc7 *R/lle.R d188fc370cdc5ea1094613ec0b3972da *R/loe.R 0a54fba9f0f683c4dd521cff6c14509e *R/mds.R eb62d31d8645997200186731444ff667 *R/misc.R c115a187e0c2cbbb12219ac0db73e66d *R/mixColorSpaces.R bd08d85dde00adc5fa2f5321eb6b1b6b *R/nmds.R 82a9222df124f6aad86dbada7455b2ea *R/nnmf.R 0c72ee706323134c161e5bbe03cbc90d *R/pca.R f99544db6dff93b3f27f6cba2043695a *R/plot.R b1d4d666a4880c4672609ff700ff0bbf *R/quality.R e9a61868c99870bbcc03e8f6e4db0fc3 *R/rotate.R 546e6d5cf4d954c002b0e5d2031eb69f *R/soe.R 335f874d226c9545d2f32973cde26849 *R/tsne.R 0fc79b6c5c8c1e9889d850936238477e *R/umap.R baa49a7a9d8605ec938bcc893ab74d1a *build/vignette.rds 941a4ba018d10bb388f78d2a47a93147 *inst/CITATION 40f8091d5ac274a166794cad394622b3 *inst/doc/dimensionality-reduction.R 08f8cffd3a53d83eefb2a7a18e3a8cae *inst/doc/dimensionality-reduction.Rnw 51934c169135c5f4bc21634bf5c05dc9 *inst/doc/dimensionality-reduction.pdf 497e59c1f464f52f9c48c9925f8c54fa *man/AUC_lnK_R_NX-dimRedResult-method.Rd 219213beccddce50789168cd1462a4a4 *man/AutoEncoder-class.Rd 32782509d2bc7deadf99a7c2ffac09f4 *man/DRR-class.Rd 76b564ad600f0d0539f6f77e53a609ab *man/DiffusionMaps-class.Rd 9ce4db263b3dbaaba619b940d75f8a3f *man/DrL-class.Rd 6d1258ee5dc46118271a8eea6e9c4088 *man/FastICA-class.Rd 4d916734fd3ebc7210a3ebbc2179d597 *man/FruchtermanReingold-class.Rd 6d10497ea0bf6ece3211bbf55e360893 *man/HLLE-class.Rd 8180902d8ef8cd20e3562b2949dec3db *man/Isomap-class.Rd 22b6a6fd009bf9b74132fb792a3cd97b *man/KamadaKawai-class.Rd fff0c0517c3bcf95b28191896de064d8 *man/LCMC-dimRedResult-method.Rd 8913fb3dec79dbe29467941ae158d966 *man/LLE-class.Rd e1f54c4f9b999f4b005b93b0f59739d9 *man/LaplacianEigenmaps-class.Rd 60f938b6ff067fe299f3f119037a2907 *man/MDS-class.Rd 7d45ae38f7808baba194e3ab35bacfc8 *man/NNMF-class.Rd 574bbb315a8f7c8961ce2c579ea8ec4e *man/PCA-class.Rd e5596bcb1a8cc8333cc848606164c249 *man/PCA_L1-class.Rd 7080c9a2daf9b4159f1b991775a48a0c *man/Q_NX-dimRedResult-method.Rd eb4cdac50dc14d5e2e867171c3447d2c *man/Q_global-dimRedResult-method.Rd fca4e3387e15601dd308c5c53c70818a *man/Q_local-dimRedResult-method.Rd 9c81b4c8d0e287038de73ee20af86248 *man/R_NX-dimRedResult-method.Rd 9d2fee3db9dc2d270e548ed77192ac91 *man/UMAP-class.Rd 718f4e21d3332957a02215ba01e9125f *man/as.data.frame.Rd bc5db8684d65fe654feaeb82b7f09da4 *man/as.dimRedData.Rd 7f3f6a3537c5718c51204672357cbd3b *man/cophenetic_correlation-dimRedResult-method.Rd 419144b1768266c642462c3c959788ff *man/dataSets.Rd 685e9ac76e6c37e56393adb612749f12 *man/dimRed-package.Rd 050de33fcd3f599e4042da938a139501 *man/dimRedData-class.Rd 70a330df1f1623949a1427a6350213d9 *man/dimRedMethod-class.Rd 8e92695ec000e5565c0b55db5ad67f0f *man/dimRedMethodList.Rd 6c2cbf7706f4712ab801988bff5b2251 *man/dimRedResult-class.Rd ad8fd2e14a3179d42692c29ea5e25e25 *man/distance_correlation-dimRedResult-method.Rd 12064571b58bceb2b0fe11b083b014b4 *man/embed.Rd af8863f32422517f0758352f5773d916 *man/getData.Rd beb9c9c0b2b9069d2cfde3a7ef78f810 *man/getDimRedData.Rd 3d700516a933d7d8fe95cebd9e3e0365 *man/getMeta.Rd 966f27a61120f340e6967930d5a4dae4 *man/getNDim.Rd 54af2a8843ad16e4c8daf4b75f80fd51 *man/getOrgData.Rd 47659991b1edf53bbf1a544d8cbfc231 *man/getOtherData.Rd b6a82a60362da823b309497c27b54d61 *man/getPars.Rd 2490bb8ba610ace97954566d69493448 *man/getRotationMatrix.Rd 6df2e37941539cd6055499ba4a0eca83 *man/installSuggests.Rd de0f1cbbc1c9bf7ddde5acea634f3362 *man/kPCA-class.Rd e6fda0f5f8483f08ff20d467eab06cac *man/makeKNNgraph.Rd 0337738c6a82a8f3a041a337d8af7f6f *man/maximize_correlation-dimRedResult-method.Rd 90716e2aa20decda10dfc3bc129452df *man/mean_R_NX-dimRedResult-method.Rd ca6d375b0f0df0043fa5ba19785024f6 *man/mixColorRamps.Rd ed8bd517c5bd2478e17da9e1206e3618 *man/nMDS-class.Rd d129fc72b1b204e139d23885ccd064bc *man/ndims.Rd 85b76c61cc7bd191f2f59e21c56b4cb8 *man/plot.Rd eed9d13b23942d8139829c4a4bf3d1ea *man/plot_R_NX.Rd ecdac99d07501417ba48b07a28ec5f5d *man/print.Rd c73de6bdd2ee2304e6eb135cf7e020a9 *man/quality.Rd be6ab54944073c3772a7ac60c5fcf743 *man/reconstruction_error-dimRedResult-method.Rd 4519b8602e906c93bcbe5b6a2f90a801 *man/reconstruction_rmse-dimRedResult-method.Rd 4addb77b115dcf9d029b6e5e46066846 *man/tSNE-class.Rd f0e26c966c5d16edbaffa400f97fef80 *man/total_correlation-dimRedResult-method.Rd 8ace2d0542826960bc7b59ac72f45236 *tests/testthat.R 5d2a1830e69ab3855dee09cbc64d1788 *tests/testthat/test_HLLE.R 38f08498979faa5581533adbd4bffbcd *tests/testthat/test_ICA.R 3268fc53ab376b48bb9f6e982edf8655 *tests/testthat/test_NNMF.R ac423f8fe5975d8fe26d6e7a1b988da9 *tests/testthat/test_PCA.R 81ac0e38a036949d78ce64a071934063 *tests/testthat/test_PCA_L1.R 977a45bcedfe70de65d7bb0627c6dbbd *tests/testthat/test_all.R 74ce1af6ee874d61dd8779a66532e855 *tests/testthat/test_autoencoder.R eb2b0ed077c46c957b4069250febb644 *tests/testthat/test_dataSets.R 8edfeba82c3cf5d4f8d4155988d32ba4 *tests/testthat/test_diffmap.R f9ecd97a5a0d177661d7ee22a051bf19 *tests/testthat/test_dimRedData.R c9add493b83df66c3509203545016ea2 *tests/testthat/test_dimRedMethod-class.R ff7b55fe97717cdc7f3dc140c110dc5e *tests/testthat/test_dimRedResult.R 5f32a62cc46be17283bb8acb2540d627 *tests/testthat/test_drr.R 84413e591356fb49d6b15763c66fd0e9 *tests/testthat/test_embed.R e9b780f2fe15468d89d547e41dba6ca1 *tests/testthat/test_isomap.R d6b6bf63d34d8ff812d45ba834032333 *tests/testthat/test_kPCA.R a82999dfbdecc5dade5a2804181fc1a4 *tests/testthat/test_misc.R 197881218d406d3ffee67b117c4417d8 *tests/testthat/test_quality.R 85386394bfca56ba1ff9695a9eaa967b *tests/testthat/test_umap.R 085333b04fa2e0963ce84308776bd7c1 *vignettes/Makefile 6b21c813361c55cf8b7361b7a0b3bacf *vignettes/bibliography.bib b3b0dc9c51a39436ebbc3895a9b2f9f6 *vignettes/classification_tree.tex 08f8cffd3a53d83eefb2a7a18e3a8cae *vignettes/dimensionality-reduction.Rnw dimRed/build/0000755000176200001440000000000013464323463012565 5ustar liggesusersdimRed/build/vignette.rds0000644000176200001440000000033213464323463015122 0ustar liggesusersb```b`f@&0rHˤdg%dT&zAyhj%\P*mfAJZ)- V ,LH:YsSьcwI-HK CO/@3GSZY_3E T [fN*]!%psF QY_#/(* @bJI,IK+] dimRed/DESCRIPTION0000644000176200001440000000271513464507337013205 0ustar liggesusersPackage: dimRed Title: A Framework for Dimensionality Reduction Version: 0.2.3 Authors@R: c( person("Guido", "Kraemer", email = "gkraemer@bgc-jena.mpg.de", role = c("aut", "cre")) ) Description: A collection of dimensionality reduction techniques from R packages and a common interface for calling the methods. Depends: R (>= 3.0.0), DRR Imports: magrittr, methods Suggests: NMF, MASS, Matrix, RANN, RSpectra, Rtsne, cccd, coRanking, diffusionMap, energy, fastICA, ggplot2, graphics, igraph, keras, kernlab, knitr, lle, loe, optimx, pcaL1, pcaPP, reticulate, rgl, scales, scatterplot3d, stats, tensorflow, testthat, tidyr, tinytex, umap, vegan VignetteBuilder: knitr License: GPL-3 | file LICENSE URL: https://github.com/gdkrmr/dimRed LazyData: true Encoding: UTF-8 Collate: 'dimRedMethod-class.R' 'misc.R' 'dimRedData-class.R' 'dimRedResult-class.R' 'autoencoder.R' 'dataSets.R' 'diffmap.R' 'dimRed.R' 'drr.R' 'embed.R' 'fastica.R' 'get_info.R' 'graph_embed.R' 'hlle.R' 'isomap.R' 'kpca.R' 'l1pca.R' 'leim.R' 'lle.R' 'loe.R' 'mds.R' 'mixColorSpaces.R' 'nmds.R' 'nnmf.R' 'pca.R' 'plot.R' 'quality.R' 'rotate.R' 'soe.R' 'tsne.R' 'umap.R' RoxygenNote: 6.1.1 NeedsCompilation: yes Packaged: 2019-05-07 15:41:40 UTC; gkraemer Author: Guido Kraemer [aut, cre] Maintainer: Guido Kraemer Repository: CRAN Date/Publication: 2019-05-08 08:10:07 UTC dimRed/man/0000755000176200001440000000000013373523703012237 5ustar liggesusersdimRed/man/getDimRedData.Rd0000644000176200001440000000050213065033470015154 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/misc.R \name{getDimRedData} \alias{getDimRedData} \title{Method getDimRedData} \usage{ getDimRedData(object, ...) } \arguments{ \item{object}{The object to extract data from.} \item{...}{other arguments.} } \description{ Extract dimRedData. } dimRed/man/embed.Rd0000644000176200001440000000603513371631672013611 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/embed.R \docType{methods} \name{embed} \alias{embed} \alias{embed,formula-method} \alias{embed,ANY-method} \alias{embed,dimRedData-method} \title{dispatches the different methods for dimensionality reduction} \usage{ embed(.data, ...) \S4method{embed}{formula}(.formula, .data, .method = dimRedMethodList(), .mute = character(0), .keep.org.data = TRUE, ...) \S4method{embed}{ANY}(.data, .method = dimRedMethodList(), .mute = character(0), .keep.org.data = TRUE, ...) \S4method{embed}{dimRedData}(.data, .method = dimRedMethodList(), .mute = character(0), .keep.org.data = TRUE, ...) } \arguments{ \item{.data}{object of class \code{\link{dimRedData}}, will be converted to be of class \code{\link{dimRedData}} if necessary; see examples for details.} \item{...}{the parameters, internally passed as a list to the dimensionality reduction method as \code{pars = list(...)}} \item{.formula}{a formula, see \code{\link{as.dimRedData}}.} \item{.method}{character vector naming one of the dimensionality reduction techniques.} \item{.mute}{a character vector containing the elements you want to mute (\code{c("message", "output")}), defaults to \code{character(0)}.} \item{.keep.org.data}{\code{TRUE}/\code{FALSE} keep the original data.} } \value{ an object of class \code{\link{dimRedResult}} } \description{ wraps around all dimensionality reduction functions. } \details{ Method must be one of \code{\link{dimRedMethodList}()}, partial matching is performed. All parameters start with a dot, to avoid clashes with partial argument matching (see the R manual section 4.3.2), if there should ever occur any clashes in the arguments, call the function with all arguments named, e.g. \code{embed(.data = dat, .method = "mymethod", .d = "some parameter")}. } \section{Methods (by class)}{ \itemize{ \item \code{formula}: embed a data.frame using a formula. \item \code{ANY}: Embed anything as long as it can be coerced to \code{\link{dimRedData}}. \item \code{dimRedData}: Embed a dimRedData object }} \examples{ ## embed a data.frame using a formula: as.data.frame( embed(Species ~ Sepal.Length + Sepal.Width + Petal.Length + Petal.Width, iris, "PCA") ) ## embed a data.frame and return a data.frame as.data.frame(embed(iris[, 1:4], "PCA")) ## embed a matrix and return a data.frame as.data.frame(embed(as.matrix(iris[, 1:4]), "PCA")) \dontrun{ ## embed dimRedData objects embed_methods <- dimRedMethodList() quality_methods <- dimRedQualityList() dataset <- loadDataSet("Iris") quality_results <- matrix(NA, length(embed_methods), length(quality_methods), dimnames = list(embed_methods, quality_methods)) embedded_data <- list() for (e in embed_methods) { message("embedding: ", e) embedded_data[[e]] <- embed(dataset, e, .mute = c("message", "output")) for (q in quality_methods) { message(" quality: ", q) quality_results[e, q] <- tryCatch( quality(embedded_data[[e]], q), error = function(e) NA ) } } print(quality_results) } } dimRed/man/NNMF-class.Rd0000644000176200001440000000563113371631672014377 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nnmf.R \docType{class} \name{NNMF-class} \alias{NNMF-class} \alias{NNMF} \title{Non-Negative Matrix Factorization} \description{ S4 Class implementing NNMF. } \details{ NNMF is a method for decomposing a matrix into a smaller dimension such that the constraint that the data (and the projection) are not negative is taken into account. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ The method can take the following parameters: \describe{ \item{ndim}{The number of output dimensions.} \item{method}{character, which algorithm should be used. See \code{\link[NMF]{nmf}} for possible values. Defaults to "brunet"} \item{nrun}{integer, the number of times the computations are conducted. See \code{\link[NMF]{nmf}}} \item{seed}{integer, a value to control the random numbers used.} \item{options}{named list, other options to pass to \code{\link[NMF]{nmf}}} } } \section{Implementation}{ Wraps around \code{\link[NMF]{nmf}}. Note that the estimation uses random numbers. To create reproducible results, set the random number seed in the function call. Also, in many cases, the computations will be conducted in parallel using multiple cores. To disable this, use the option \code{.pbackend = NULL}. } \examples{ dat <- loadDataSet("Iris") set.seed(4646) factorization <- embed(dat, "NNMF") proj_dat <- factorization@apply(dat) plot(proj_dat@data[, 1], proj_dat@data[, 2]) # project new values: nn_proj <- predict(factorization, iris[1:7, 1:4]) nn_proj } \references{ Lee, D.D., Seung, H.S., 1999. Learning the parts of objects by non-negative matrix factorization. Nature 401, 788-791. https://doi.org/10.1038/44565 } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/getRotationMatrix.Rd0000644000176200001440000000152113371631672016214 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/get_info.R \name{getRotationMatrix} \alias{getRotationMatrix} \title{getRotationMatrix} \usage{ getRotationMatrix(x) } \arguments{ \item{x}{of type \code{\link{dimRedResult}}} } \value{ a matrix } \description{ Extract the rotation matrix from \code{\link{dimRedResult}} objects derived from PCA and FastICA } \details{ The data has to be pre-processed the same way as the method does, e.g. centering and/or scaling. } \examples{ dat <- loadDataSet("Iris") pca <- embed(dat, "PCA") ica <- embed(dat, "FastICA") rot_pca <- getRotationMatrix(pca) rot_ica <- getRotationMatrix(ica) scale(getData(dat), TRUE, FALSE) \%*\% rot_pca - getData(getDimRedData(pca)) scale(getData(dat), TRUE, FALSE) \%*\% rot_ica - getData(getDimRedData(ica)) } \concept{convenience functions} dimRed/man/LLE-class.Rd0000644000176200001440000000461513371631672014256 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/lle.R \docType{class} \name{LLE-class} \alias{LLE-class} \alias{LLE} \title{Locally Linear Embedding} \description{ An S4 Class implementing Locally Linear Embedding (LLE) } \details{ LLE approximates the points in the manifold by linear combination of its neighbors. These linear combinations are the same inside the manifold and in highdimensional space. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ LLE can take the following parameters: \describe{ \item{knn}{the number of neighbors for the knn graph., defaults to 50.} \item{ndim}{the number of embedding dimensions, defaults to 2.} } } \section{Implementation}{ Wraps around \code{\link[lle]{lle}}, only exposes the parameters \code{k} and \code{m}. } \examples{ dat <- loadDataSet("3D S Curve", n = 500) ## directy use the S4 class: lle <- LLE() emb <- lle@fun(dat, lle@stdpars) ## using embed(): emb2 <- embed(dat, "LLE", knn = 45) plot(emb, type = "2vars") plot(emb2, type = "2vars") } \references{ Roweis, S.T., Saul, L.K., 2000. Nonlinear Dimensionality Reduction by Locally Linear Embedding. Science 290, 2323-2326. doi:10.1126/science.290.5500.2323 } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/cophenetic_correlation-dimRedResult-method.Rd0000644000176200001440000000251313371631672023133 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/quality.R \docType{methods} \name{cophenetic_correlation,dimRedResult-method} \alias{cophenetic_correlation,dimRedResult-method} \alias{cophenetic_correlation} \title{Method cophenetic_correlation} \usage{ \S4method{cophenetic_correlation}{dimRedResult}(object, d = stats::dist, cor_method = "pearson") } \arguments{ \item{object}{of class dimRedResult} \item{d}{the distance function to use.} \item{cor_method}{The correlation method.} } \description{ Calculate the correlation between the distance matrices in high and low dimensioal space. } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{AUC_lnK_R_NX,dimRedResult-method}}, \code{\link{LCMC,dimRedResult-method}}, \code{\link{Q_NX,dimRedResult-method}}, \code{\link{Q_global,dimRedResult-method}}, \code{\link{Q_local,dimRedResult-method}}, \code{\link{R_NX,dimRedResult-method}}, \code{\link{distance_correlation,dimRedResult-method}}, \code{\link{mean_R_NX,dimRedResult-method}}, \code{\link{plot_R_NX}}, \code{\link{quality,dimRedResult-method}}, \code{\link{reconstruction_error,dimRedResult-method}}, \code{\link{reconstruction_rmse,dimRedResult-method}}, \code{\link{total_correlation,dimRedResult-method}} } \concept{Quality scores for dimensionality reduction} dimRed/man/Q_NX-dimRedResult-method.Rd0000644000176200001440000000236013371631672017216 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/quality.R \docType{methods} \name{Q_NX,dimRedResult-method} \alias{Q_NX,dimRedResult-method} \alias{Q_NX} \title{Method Q_NX} \usage{ \S4method{Q_NX}{dimRedResult}(object) } \arguments{ \item{object}{of class dimRedResult} } \description{ Calculate the Q_NX score (Chen & Buja 2006, the notation in the publication is M_k). Which is the fraction of points that remain inside the same K-ary neighborhood in high and low dimensional space. } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{AUC_lnK_R_NX,dimRedResult-method}}, \code{\link{LCMC,dimRedResult-method}}, \code{\link{Q_global,dimRedResult-method}}, \code{\link{Q_local,dimRedResult-method}}, \code{\link{R_NX,dimRedResult-method}}, \code{\link{cophenetic_correlation,dimRedResult-method}}, \code{\link{distance_correlation,dimRedResult-method}}, \code{\link{mean_R_NX,dimRedResult-method}}, \code{\link{plot_R_NX}}, \code{\link{quality,dimRedResult-method}}, \code{\link{reconstruction_error,dimRedResult-method}}, \code{\link{reconstruction_rmse,dimRedResult-method}}, \code{\link{total_correlation,dimRedResult-method}} } \concept{Quality scores for dimensionality reduction} dimRed/man/nMDS-class.Rd0000644000176200001440000000443413371631672014442 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nmds.R \docType{class} \name{nMDS-class} \alias{nMDS-class} \alias{nMDS} \title{Non-Metric Dimensional Scaling} \description{ An S4 Class implementing Non-Metric Dimensional Scaling. } \details{ A non-linear extension of MDS using monotonic regression } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ nMDS can take the following parameters: \describe{ \item{d}{A distance function.} \item{ndim}{The number of embedding dimensions.} } } \section{Implementation}{ Wraps around the \code{\link[vegan]{monoMDS}}. For parameters that are not available here, the standard configuration is used. } \examples{ dat <- loadDataSet("3D S Curve", n = 300) ## using the S4 classes: nmds <- nMDS() emb <- nmds@fun(dat, nmds@stdpars) ## using embed() emb2 <- embed(dat, "nMDS", d = function(x) exp(dist(x))) plot(emb, type = "2vars") plot(emb2, type = "2vars") } \references{ Kruskal, J.B., 1964. Nonmetric multidimensional scaling: A numerical method. Psychometrika 29, 115-129. https://doi.org/10.1007/BF02289694 } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/getOtherData.Rd0000644000176200001440000000053013371631672015102 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/misc.R \name{getOtherData} \alias{getOtherData} \title{Method getOtherData} \usage{ getOtherData(object, ...) } \arguments{ \item{object}{The object to extract data from.} \item{...}{other arguments.} } \description{ Extract other data produced by a dimRedMethod } dimRed/man/maximize_correlation-dimRedResult-method.Rd0000644000176200001440000000177313065033470022634 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rotate.R \docType{methods} \name{maximize_correlation,dimRedResult-method} \alias{maximize_correlation,dimRedResult-method} \alias{maximize_correlation} \title{Maximize Correlation with the Axes} \usage{ \S4method{maximize_correlation}{dimRedResult}(object, naxes = ncol(object@data@data), cor_method = "pearson") } \arguments{ \item{object}{A dimRedResult object} \item{naxes}{the number of axes to optimize for.} \item{cor_method}{which correlation method to use} } \description{ Rotates the data in such a way that the correlation with the first \code{naxes} axes is maximized. } \details{ Methods that do not use eigenvector decomposition, like t-SNE often do not align the data with axes according to the correlation of variables with the data. \code{maximize_correlation} uses the \code{\link[optimx]{optimx}} package to rotate the data in such a way that the original variables have maximum correlation with the embedding axes. } dimRed/man/as.data.frame.Rd0000644000176200001440000000124213065033470015124 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/misc.R \name{as.data.frame} \alias{as.data.frame} \title{Converts to data.frame} \usage{ as.data.frame(x, row.names, optional, ...) } \arguments{ \item{x}{The object to be converted} \item{row.names}{unused in \code{dimRed}} \item{optional}{unused in \code{dimRed}} \item{...}{other arguments.} } \description{ General conversions of objects created by \code{dimRed} to \code{data.frame}. See class documentations for details (\code{\link{dimRedData}}, \code{\link{dimRedResult}}). For the documentation of this function in base package, see here: \code{\link[base]{as.data.frame.default}}. } dimRed/man/total_correlation-dimRedResult-method.Rd0000644000176200001440000000267013371631672022141 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/quality.R \docType{methods} \name{total_correlation,dimRedResult-method} \alias{total_correlation,dimRedResult-method} \alias{total_correlation} \title{Method total_correlation} \usage{ \S4method{total_correlation}{dimRedResult}(object, naxes = ndims(object), cor_method = "pearson", is.rotated = FALSE) } \arguments{ \item{object}{of class dimRedResult} \item{naxes}{the number of axes to use for optimization.} \item{cor_method}{the correlation method to use.} \item{is.rotated}{if FALSE the object is rotated.} } \description{ Calculate the total correlation of the variables with the axes to assess the quality of a dimensionality reduction. } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{AUC_lnK_R_NX,dimRedResult-method}}, \code{\link{LCMC,dimRedResult-method}}, \code{\link{Q_NX,dimRedResult-method}}, \code{\link{Q_global,dimRedResult-method}}, \code{\link{Q_local,dimRedResult-method}}, \code{\link{R_NX,dimRedResult-method}}, \code{\link{cophenetic_correlation,dimRedResult-method}}, \code{\link{distance_correlation,dimRedResult-method}}, \code{\link{mean_R_NX,dimRedResult-method}}, \code{\link{plot_R_NX}}, \code{\link{quality,dimRedResult-method}}, \code{\link{reconstruction_error,dimRedResult-method}}, \code{\link{reconstruction_rmse,dimRedResult-method}} } \concept{Quality scores for dimensionality reduction} dimRed/man/PCA-class.Rd0000644000176200001440000000531513371631672014243 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/pca.R \docType{class} \name{PCA-class} \alias{PCA-class} \alias{PCA} \title{Principal Component Analysis} \description{ S4 Class implementing PCA. } \details{ PCA transforms the data in orthogonal components so that the first axis accounts for the larges variance in the data, all the following axes account for the highest variance under the constraint that they are orthogonal to the preceding axes. PCA is sensitive to the scaling of the variables. PCA is by far the fastest and simples method of dimensionality reduction and should probably always be applied as a baseline if other methods are tested. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ PCA can take the following parameters: \describe{ \item{ndim}{The number of output dimensions.} \item{center}{logical, should the data be centered, defaults to \code{TRUE}.} \item{scale.}{logical, should the data be scaled, defaults to \code{FALSE}.} } } \section{Implementation}{ Wraps around \code{\link{prcomp}}. Because PCA can be reduced to a simple rotation, forward and backward projection functions are supplied. } \examples{ dat <- loadDataSet("Iris") ## using the S4 Class pca <- PCA() emb <- pca@fun(dat, pca@stdpars) ## using embed() emb2 <- embed(dat, "PCA") plot(emb, type = "2vars") plot(emb@inverse(emb@data), type = "3vars") } \references{ Pearson, K., 1901. On lines and planes of closest fit to systems of points in space. Philosophical Magazine 2, 559-572. } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/HLLE-class.Rd0000644000176200001440000000454013371631672014363 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/hlle.R \docType{class} \name{HLLE-class} \alias{HLLE-class} \alias{HLLE} \title{Hessian Locally Linear Embedding} \description{ An S4 Class implementing Hessian Locally Linear Embedding (HLLE) } \details{ HLLE uses local hessians to approximate the curvines and is an extension to non-convex subsets in lowdimensional space. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ HLLE can take the following parameters: \describe{ \item{knn}{neighborhood size} \item{ndim}{number of output dimensions} } } \section{Implementation}{ Own implementation, sticks to the algorithm in Donoho and Grimes (2003). Makes use of sparsity to speed up final embedding. } \examples{ dat <- loadDataSet("3D S Curve", n = 300) ## directy use the S4 class: hlle <- HLLE() emb <- hlle@fun(dat, hlle@stdpars) ## using embed(): emb2 <- embed(dat, "HLLE", knn = 45) plot(emb, type = "2vars") plot(emb2, type = "2vars") } \references{ Donoho, D.L., Grimes, C., 2003. Hessian eigenmaps: Locally linear embedding techniques for high-dimensional data. PNAS 100, 5591-5596. doi:10.1073/pnas.1031596100 } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/DiffusionMaps-class.Rd0000644000176200001440000000663713371631672016417 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/diffmap.R \docType{class} \name{DiffusionMaps-class} \alias{DiffusionMaps-class} \alias{DiffusionMaps} \title{Diffusion Maps} \description{ An S4 Class implementing Diffusion Maps } \details{ Diffusion Maps uses a diffusion probability matrix to robustly approximate a manifold. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ Diffusion Maps can take the following parameters: \describe{ \item{d}{a function transforming a matrix row wise into a distance matrix or \code{dist} object, e.g. \code{\link[stats]{dist}}.} \item{ndim}{The number of dimensions} \item{eps}{The epsilon parameter that determines the diffusion weight matrix from a distance matrix \code{d}, \eqn{exp(-d^2/eps)}, if set to \code{"auto"} it will be set to the median distance to the 0.01*n nearest neighbor.} \item{t}{Time-scale parameter. The recommended value, 0, uses multiscale geometry.} \item{delta}{Sparsity cut-off for the symmetric graph Laplacian, a higher value results in more sparsity and faster calculation. The predefined value is 10^-5.} } } \section{Implementation}{ Wraps around \code{\link[diffusionMap]{diffuse}}, see there for details. It uses the notation of Richards et al. (2009) which is slightly different from the one in the original paper (Coifman and Lafon, 2006) and there is no \eqn{\alpha} parameter. There is also an out-of-sample extension, see examples. } \examples{ dat <- loadDataSet("3D S Curve", n = 300) ## use the S4 Class directly: diffmap <- DiffusionMaps() emb <- diffmap@fun(dat, diffmap@stdpars) ## simpler, use embed(): emb2 <- embed(dat, "DiffusionMaps") plot(emb, type = "2vars") samp <- sample(floor(nrow(dat) / 10)) embsamp <- diffmap@fun(dat[samp], diffmap@stdpars) embother <- embsamp@apply(dat[-samp]) plot(embsamp, type = "2vars") points(embother@data) } \references{ Richards, J.W., Freeman, P.E., Lee, A.B., Schafer, C.M., 2009. Exploiting Low-Dimensional Structure in Astronomical Spectra. ApJ 691, 32. doi:10.1088/0004-637X/691/1/32 Coifman, R.R., Lafon, S., 2006. Diffusion maps. Applied and Computational Harmonic Analysis 21, 5-30. doi:10.1016/j.acha.2006.04.006 } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/makeKNNgraph.Rd0000644000176200001440000000141513065033470015030 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/misc.R \name{makeKNNgraph} \alias{makeKNNgraph} \title{makeKNNgraph} \usage{ makeKNNgraph(x, k, eps = 0, diag = FALSE) } \arguments{ \item{x}{data, a matrix, observations in rows, dimensions in columns} \item{k}{the number of nearest neighbors.} \item{eps}{number, if \code{eps > 0} the KNN search is approximate, see \code{\link[RANN]{nn2}}} \item{diag}{logical, if \code{TRUE} every edge of the returned graph will have an edge with weight \code{0} to itself.} } \value{ an object of type \code{\link[igraph]{igraph}} with edge weight being the distances. } \description{ Create a K-nearest neighbor graph from data x. Uses \code{\link[RANN]{nn2}} as a fast way to find the neares neighbors. } dimRed/man/AUC_lnK_R_NX-dimRedResult-method.Rd0000644000176200001440000000354413371631672020520 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/quality.R \docType{methods} \name{AUC_lnK_R_NX,dimRedResult-method} \alias{AUC_lnK_R_NX,dimRedResult-method} \alias{AUC_lnK_R_NX} \title{Method AUC_lnK_R_NX} \usage{ \S4method{AUC_lnK_R_NX}{dimRedResult}(object, weight = "inv") } \arguments{ \item{object}{of class dimRedResult} \item{weight}{the weight function used, one of \code{c("inv", "log", "log10")}} } \description{ Calculate the Area under the R_NX(ln K), used in Lee et. al. (2015). Note that despite the name, this does not weight the mean by the logarithm, but by 1/K. If explicit weighting by the logarithm is desired use \code{weight = "log"} or \code{weight = "log10"} } \details{ The naming confusion originated from equation 17 in Lee et al (2015) and the name of this method may change in the future to avoid confusion. } \references{ Lee, J.A., Peluffo-Ordonez, D.H., Verleysen, M., 2015. Multi-scale similarities in stochastic neighbour embedding: Reducing dimensionality while preserving both local and global structure. Neurocomputing 169, 246-261. https://doi.org/10.1016/j.neucom.2014.12.095 } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{LCMC,dimRedResult-method}}, \code{\link{Q_NX,dimRedResult-method}}, \code{\link{Q_global,dimRedResult-method}}, \code{\link{Q_local,dimRedResult-method}}, \code{\link{R_NX,dimRedResult-method}}, \code{\link{cophenetic_correlation,dimRedResult-method}}, \code{\link{distance_correlation,dimRedResult-method}}, \code{\link{mean_R_NX,dimRedResult-method}}, \code{\link{plot_R_NX}}, \code{\link{quality,dimRedResult-method}}, \code{\link{reconstruction_error,dimRedResult-method}}, \code{\link{reconstruction_rmse,dimRedResult-method}}, \code{\link{total_correlation,dimRedResult-method}} } \concept{Quality scores for dimensionality reduction} dimRed/man/installSuggests.Rd0000644000176200001440000000112513065033470015713 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/misc.R \name{installSuggests} \alias{installSuggests} \title{getSuggests} \usage{ installSuggests() } \description{ Install packages wich are suggested by dimRed. } \details{ By default dimRed will not install all the dependencies, because there are quite a lot and in case some of them are not available for your platform you will not be able to install dimRed without problems. To solve this I provide a function which automatically installes all the suggested packages. } \examples{ \dontrun{ installSuggests() } } dimRed/man/UMAP-class.Rd0000644000176200001440000000641313371631672014402 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/umap.R \docType{class} \name{UMAP-class} \alias{UMAP-class} \alias{UMAP} \title{Umap embedding} \description{ An S4 Class implementing the UMAP algorithm } \details{ Uniform Manifold Approximation is a gradient descend based algorithm that gives results similar to t-SNE, but scales better with the number of points. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ UMAP can take the follwing parameters: \describe{ \item{ndim}{The number of embedding dimensions.} \item{knn}{The number of neighbors to be used.} \item{d}{The distance metric to use.} \item{method}{\code{"naive"} for an R implementation, \code{"python"} for the reference implementation.} } Other method parameters can also be passed, see \code{\link[umap]{umap.defaults}} for details. The ones above have been standardized for the use with \code{dimRed} and will get automatically translated for \code{\link[umap]{umap}}. } \section{Implementation}{ The dimRed package wraps the \code{\link[umap]{umap}} packages which provides an implementation in pure R and also a wrapper around the original python package \code{umap-learn} (https://github.com/lmcinnes/umap/) The \code{"naive"} implementation is a pure R implementation and considered experimental at the point of writing this, it is also much slower than the python implementation. The \code{"python"} implementation is the reference implementation used by McInees et. al. (2018). It requires the \code{\link[reticulate]{reticulate}} package for the interaction with python and the python package \code{umap-learn} installed (use \code{pip install umap-learn}). } \examples{ \dontrun{ dat <- loadDataSet("3D S Curve", n = 300) ## use the S4 Class directly: umap <- UMAP() emb <- umap@fun(dat, umap@stdpars) ## or simpler, use embed(): emb2 <- embed(dat, "UMAP", .mute = NULL, knn = 10) plot(emb2, type = "2vars") } } \references{ McInnes, Leland, and John Healy. "UMAP: Uniform Manifold Approximation and Projection for Dimension Reduction." https://arxiv.org/abs/1802.03426 } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/getNDim.Rd0000644000176200001440000000051013141064362014043 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/misc.R \name{getNDim} \alias{getNDim} \title{Method getNDim} \usage{ getNDim(object, ...) } \arguments{ \item{object}{The object to get the dimensions from.} \item{...}{other arguments.} } \description{ Extract the number of embedding dimensions. } dimRed/man/getPars.Rd0000644000176200001440000000045113065033470014126 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/misc.R \name{getPars} \alias{getPars} \title{Method getPars} \usage{ getPars(object, ...) } \arguments{ \item{object}{The object to be converted.} \item{...}{other arguments.} } \description{ Extracts the pars slot. } dimRed/man/distance_correlation-dimRedResult-method.Rd0000644000176200001440000000231413371631672022603 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/quality.R \docType{methods} \name{distance_correlation,dimRedResult-method} \alias{distance_correlation,dimRedResult-method} \alias{distance_correlation} \title{Method distance_correlation} \usage{ \S4method{distance_correlation}{dimRedResult}(object) } \arguments{ \item{object}{of class dimRedResult} } \description{ Calculate the distance correlation between the distance matrices in high and low dimensioal space. } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{AUC_lnK_R_NX,dimRedResult-method}}, \code{\link{LCMC,dimRedResult-method}}, \code{\link{Q_NX,dimRedResult-method}}, \code{\link{Q_global,dimRedResult-method}}, \code{\link{Q_local,dimRedResult-method}}, \code{\link{R_NX,dimRedResult-method}}, \code{\link{cophenetic_correlation,dimRedResult-method}}, \code{\link{mean_R_NX,dimRedResult-method}}, \code{\link{plot_R_NX}}, \code{\link{quality,dimRedResult-method}}, \code{\link{reconstruction_error,dimRedResult-method}}, \code{\link{reconstruction_rmse,dimRedResult-method}}, \code{\link{total_correlation,dimRedResult-method}} } \concept{Quality scores for dimensionality reduction} dimRed/man/DrL-class.Rd0000644000176200001440000000522013371631672014314 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/graph_embed.R \docType{class} \name{DrL-class} \alias{DrL-class} \alias{DrL} \title{Distributed Recursive Graph Layout} \description{ An S4 Class implementing Distributed recursive Graph Layout. } \details{ DrL uses a complex algorithm to avoid local minima in the graph embedding which uses several steps. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ DrL can take the following parameters: \describe{ \item{ndim}{The number of dimensions, defaults to 2. Can only be 2 or 3} \item{knn}{Reduce the graph to keep only the neares neighbors. Defaults to 100.} \item{d}{The distance function to determine the weights of the graph edges. Defaults to euclidean distances.} } } \section{Implementation}{ Wraps around \code{\link[igraph]{layout_with_drl}}. The parameters maxiter, epsilon and kkconst are set to the default values and cannot be set, this may change in a future release. The DimRed Package adds an extra sparsity parameter by constructing a knn graph which also may improve visualization quality. } \examples{ \dontrun{ dat <- loadDataSet("Swiss Roll", n = 300) ## use the S4 Class directly: drl <- DrL() emb <- drl@fun(dat, drl@stdpars) ## simpler, use embed(): emb2 <- embed(dat, "DrL") plot(emb) } } \references{ Martin, S., Brown, W.M., Wylie, B.N., 2007. Dr.l: Distributed Recursive (graph) Layout (No. dRl; 002182MLTPL00). Sandia National Laboratories. } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/reconstruction_rmse-dimRedResult-method.Rd0000644000176200001440000000233713371631672022524 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/quality.R \docType{methods} \name{reconstruction_rmse,dimRedResult-method} \alias{reconstruction_rmse,dimRedResult-method} \alias{reconstruction_rmse} \title{Method reconstruction_rmse} \usage{ \S4method{reconstruction_rmse}{dimRedResult}(object) } \arguments{ \item{object}{of class dimRedResult} } \description{ Calculate the reconstruction root mean squared error a dimensionality reduction, the method must have an inverse mapping. } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{AUC_lnK_R_NX,dimRedResult-method}}, \code{\link{LCMC,dimRedResult-method}}, \code{\link{Q_NX,dimRedResult-method}}, \code{\link{Q_global,dimRedResult-method}}, \code{\link{Q_local,dimRedResult-method}}, \code{\link{R_NX,dimRedResult-method}}, \code{\link{cophenetic_correlation,dimRedResult-method}}, \code{\link{distance_correlation,dimRedResult-method}}, \code{\link{mean_R_NX,dimRedResult-method}}, \code{\link{plot_R_NX}}, \code{\link{quality,dimRedResult-method}}, \code{\link{reconstruction_error,dimRedResult-method}}, \code{\link{total_correlation,dimRedResult-method}} } \concept{Quality scores for dimensionality reduction} dimRed/man/dimRed-package.Rd0000644000176200001440000000263013255015610015314 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimRed.R \docType{package} \name{dimRed-package} \alias{dimRed} \alias{dimRed-package} \title{The dimRed package} \description{ This package simplifies dimensionality reduction in R by providing a framework of S4 classes and methods. dimRed collects dimensionality reduction methods that are implemented in R and implements others. It gives them a common interface and provides plotting functions for visualization and functions for quality assessment. Funding provided by the Department for Biogeochemical Integration, Empirical Inference of the Earth System Group, at the Max Plack Institute for Biogeochemistry, Jena. } \references{ Lee, J.A., Renard, E., Bernard, G., Dupont, P., Verleysen, M., 2013. Type 1 and 2 mixtures of Kullback-Leibler divergences as cost functions in dimensionality reduction based on similarity preservation. Neurocomputing. 112, 92-107. doi:10.1016/j.neucom.2012.12.036 Lee, J.A., Lee, J.A., Verleysen, M., 2008. Rank-based quality assessment of nonlinear dimensionality reduction. Proceedings of ESANN 2008 49-54. Chen, L., Buja, A., 2006. Local Multidimensional Scaling for Nonlinear Dimension Reduction, Graph Layout and Proximity Analysis. } \seealso{ Useful links: \itemize{ \item \url{https://github.com/gdkrmr/dimRed} } } \author{ \strong{Maintainer}: Guido Kraemer \email{gkraemer@bgc-jena.mpg.de} } dimRed/man/getOrgData.Rd0000644000176200001440000000047513065033470014550 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/misc.R \name{getOrgData} \alias{getOrgData} \title{Method getOrgData} \usage{ getOrgData(object, ...) } \arguments{ \item{object}{The object to extract data from.} \item{...}{other arguments.} } \description{ Extract the Original data. } dimRed/man/dimRedResult-class.Rd0000644000176200001440000000757013371631672016250 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimRedResult-class.R \docType{class} \name{dimRedResult-class} \alias{dimRedResult-class} \alias{dimRedResult} \alias{predict,dimRedResult-method} \alias{inverse,dimRedResult-method} \alias{inverse} \alias{as.data.frame,dimRedResult-method} \alias{getPars,dimRedResult-method} \alias{getNDim,dimRedResult-method} \alias{print,dimRedResult-method} \alias{getOrgData,dimRedResult-method} \alias{getDimRedData,dimRedResult-method} \alias{ndims,dimRedResult-method} \alias{getOtherData,dimRedResult-method} \title{Class "dimRedResult"} \usage{ \S4method{predict}{dimRedResult}(object, xnew) \S4method{inverse}{dimRedResult}(object, ynew) \S4method{as.data.frame}{dimRedResult}(x, org.data.prefix = "org.", meta.prefix = "meta.", data.prefix = "") \S4method{getPars}{dimRedResult}(object) \S4method{getNDim}{dimRedResult}(object) \S4method{print}{dimRedResult}(x) \S4method{getOrgData}{dimRedResult}(object) \S4method{getDimRedData}{dimRedResult}(object) \S4method{ndims}{dimRedResult}(object) \S4method{getOtherData}{dimRedResult}(object) } \arguments{ \item{object}{Of class \code{dimRedResult}} \item{xnew}{new data, of type \code{\link{dimRedData}}} \item{ynew}{embedded data, of type \code{\link{dimRedData}}} \item{x}{Of class \code{dimRedResult}} \item{org.data.prefix}{Prefix for the columns of the org.data slot.} \item{meta.prefix}{Prefix for the columns of \code{x@data@meta}.} \item{data.prefix}{Prefix for the columns of \code{x@data@data}.} } \description{ A class to hold the results of of a dimensionality reduction. } \section{Methods (by generic)}{ \itemize{ \item \code{predict}: apply a trained method to new data, does not work with all methods, will give an error if there is no \code{apply}. In some cases the apply function may only be an approximation. \item \code{inverse}: inverse transformation of embedded data, does not work with all methods, will give an error if there is no \code{inverse}. In some cases the apply function may only be an approximation. \item \code{as.data.frame}: convert to \code{data.frame} \item \code{getPars}: Get the parameters with which the method was called. \item \code{getNDim}: Get the number of embedding dimensions. \item \code{print}: Method for printing. \item \code{getOrgData}: Get the original data and meta.data \item \code{getDimRedData}: Get the embedded data \item \code{ndims}: Extract the number of embedding dimensions. \item \code{getOtherData}: Get other data produced by the method }} \section{Slots}{ \describe{ \item{\code{data}}{Output data of class dimRedData.} \item{\code{org.data}}{original data, a matrix.} \item{\code{apply}}{a function to apply the method to out-of-sampledata, may not exist.} \item{\code{inverse}}{a function to calculate the original coordinates from reduced space, may not exist.} \item{\code{has.org.data}}{logical, if the original data is included in the object.} \item{\code{has.apply}}{logical, if a forward method is exists.} \item{\code{has.inverse}}{logical if an inverse method exists.} \item{\code{method}}{saves the method used.} \item{\code{pars}}{saves the parameters used.} \item{\code{other.data}}{other data produced by the method, e.g. a distance matrix.} }} \examples{ ## Create object by embedding data iris.pca <- embed(loadDataSet("Iris"), "PCA") ## Convert the result to a data.frame head(as(iris.pca, "data.frame")) head(as.data.frame(iris.pca)) ## There are no nameclashes to avoid here: head(as.data.frame(iris.pca, org.data.prefix = "", meta.prefix = "", data.prefix = "")) ## Print it more or less nicely: print(iris.pca) ## Get the embedded data as a dimRedData object: getDimRedData(iris.pca) ## Get the original data including meta information: getOrgData(iris.pca) ## Get the number of variables: ndims(iris.pca) } \concept{dimRedResult} dimRed/man/Q_local-dimRedResult-method.Rd0000644000176200001440000000235613371631672017770 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/quality.R \docType{methods} \name{Q_local,dimRedResult-method} \alias{Q_local,dimRedResult-method} \alias{Q_local} \title{Method Q_local} \usage{ \S4method{Q_local}{dimRedResult}(object, ndim = getNDim(object)) } \arguments{ \item{object}{of class dimRedResult.} \item{ndim}{use the first ndim columns of the embedded data for calculation.} } \description{ Calculate the Q_local score to assess the quality of a dimensionality reduction. } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{AUC_lnK_R_NX,dimRedResult-method}}, \code{\link{LCMC,dimRedResult-method}}, \code{\link{Q_NX,dimRedResult-method}}, \code{\link{Q_global,dimRedResult-method}}, \code{\link{R_NX,dimRedResult-method}}, \code{\link{cophenetic_correlation,dimRedResult-method}}, \code{\link{distance_correlation,dimRedResult-method}}, \code{\link{mean_R_NX,dimRedResult-method}}, \code{\link{plot_R_NX}}, \code{\link{quality,dimRedResult-method}}, \code{\link{reconstruction_error,dimRedResult-method}}, \code{\link{reconstruction_rmse,dimRedResult-method}}, \code{\link{total_correlation,dimRedResult-method}} } \concept{Quality scores for dimensionality reduction} dimRed/man/MDS-class.Rd0000644000176200001440000000475313371631672014270 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mds.R \docType{class} \name{MDS-class} \alias{MDS-class} \alias{MDS} \title{Metric Dimensional Scaling} \description{ An S4 Class implementing classical scaling (MDS). } \details{ MDS tries to maintain distances in high- and low-dimensional space, it has the advantage over PCA that arbitrary distance functions can be used, but it is computationally more demanding. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ MDS can take the following parameters: \describe{ \item{ndim}{The number of dimensions.} \item{d}{The function to calculate the distance matrix from the input coordinates, defaults to euclidean distances.} } } \section{Implementation}{ Wraps around \code{\link[stats]{cmdscale}}. The implementation also provides an out-of-sample extension which is not completely optimized yet. } \examples{ \dontrun{ dat <- loadDataSet("3D S Curve") ## Use the S4 Class directly: mds <- MDS() emb <- mds@fun(dat, mds@stdpars) ## use embed(): emb2 <- embed(dat, "MDS", d = function(x) exp(stats::dist(x))) plot(emb, type = "2vars") plot(emb2, type = "2vars") } } \references{ Torgerson, W.S., 1952. Multidimensional scaling: I. Theory and method. Psychometrika 17, 401-419. https://doi.org/10.1007/BF02288916 } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/KamadaKawai-class.Rd0000644000176200001440000000571013371631672015772 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/graph_embed.R \docType{class} \name{KamadaKawai-class} \alias{KamadaKawai-class} \alias{KamadaKawai} \title{Graph Embedding via the Kamada Kawai Algorithm} \description{ An S4 Class implementing the Kamada Kawai Algorithm for graph embedding. } \details{ Graph embedding algorithms se the data as a graph. Between the nodes of the graph exist attracting and repelling forces which can be modeled as electrical fields or springs connecting the nodes. The graph is then forced into a lower dimensional representation that tries to represent the forces betweent he nodes accurately by minimizing the total energy of the attracting and repelling forces. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ KamadaKawai can take the following parameters: \describe{ \item{ndim}{The number of dimensions, defaults to 2. Can only be 2 or 3} \item{knn}{Reduce the graph to keep only the neares neighbors. Defaults to 100.} \item{d}{The distance function to determine the weights of the graph edges. Defaults to euclidean distances.} } } \section{Implementation}{ Wraps around \code{\link[igraph]{layout_with_kk}}. The parameters maxiter, epsilon and kkconst are set to the default values and cannot be set, this may change in a future release. The DimRed Package adds an extra sparsity parameter by constructing a knn graph which also may improve visualization quality. } \examples{ dat <- loadDataSet("Swiss Roll", n = 200) kamada_kawai <- KamadaKawai() kk <- kamada_kawai@fun(dat, kamada_kawai@stdpars) plot(kk@data@data) } \references{ Kamada, T., Kawai, S., 1989. An algorithm for drawing general undirected graphs. Information Processing Letters 31, 7-15. https://doi.org/10.1016/0020-0190(89)90102-6 } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/mean_R_NX-dimRedResult-method.Rd0000644000176200001440000000222013371631672020212 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/quality.R \docType{methods} \name{mean_R_NX,dimRedResult-method} \alias{mean_R_NX,dimRedResult-method} \alias{mean_R_NX} \title{Method mean_R_NX} \usage{ \S4method{mean_R_NX}{dimRedResult}(object) } \arguments{ \item{object}{of class dimRedResult} } \description{ Calculate the mean_R_NX score to assess the quality of a dimensionality reduction. } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{AUC_lnK_R_NX,dimRedResult-method}}, \code{\link{LCMC,dimRedResult-method}}, \code{\link{Q_NX,dimRedResult-method}}, \code{\link{Q_global,dimRedResult-method}}, \code{\link{Q_local,dimRedResult-method}}, \code{\link{R_NX,dimRedResult-method}}, \code{\link{cophenetic_correlation,dimRedResult-method}}, \code{\link{distance_correlation,dimRedResult-method}}, \code{\link{plot_R_NX}}, \code{\link{quality,dimRedResult-method}}, \code{\link{reconstruction_error,dimRedResult-method}}, \code{\link{reconstruction_rmse,dimRedResult-method}}, \code{\link{total_correlation,dimRedResult-method}} } \concept{Quality scores for dimensionality reduction} dimRed/man/LCMC-dimRedResult-method.Rd0000644000176200001440000000226013371631672017126 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/quality.R \docType{methods} \name{LCMC,dimRedResult-method} \alias{LCMC,dimRedResult-method} \alias{LCMC} \title{Method LCMC} \usage{ \S4method{LCMC}{dimRedResult}(object) } \arguments{ \item{object}{of class dimRedResult} } \description{ Calculates the Local Continuity Meta Criterion, which is \code{\link{Q_NX}} adjusted for random overlap inside the K-ary neighborhood. } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{AUC_lnK_R_NX,dimRedResult-method}}, \code{\link{Q_NX,dimRedResult-method}}, \code{\link{Q_global,dimRedResult-method}}, \code{\link{Q_local,dimRedResult-method}}, \code{\link{R_NX,dimRedResult-method}}, \code{\link{cophenetic_correlation,dimRedResult-method}}, \code{\link{distance_correlation,dimRedResult-method}}, \code{\link{mean_R_NX,dimRedResult-method}}, \code{\link{plot_R_NX}}, \code{\link{quality,dimRedResult-method}}, \code{\link{reconstruction_error,dimRedResult-method}}, \code{\link{reconstruction_rmse,dimRedResult-method}}, \code{\link{total_correlation,dimRedResult-method}} } \concept{Quality scores for dimensionality reduction} dimRed/man/FruchtermanReingold-class.Rd0000644000176200001440000000506313371631672017602 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/graph_embed.R \docType{class} \name{FruchtermanReingold-class} \alias{FruchtermanReingold-class} \alias{FruchtermanReingold} \title{Fruchterman Reingold Graph Layout} \description{ An S4 Class implementing the Fruchterman Reingold Graph Layout algorithm. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ \describe{ \item{ndim}{The number of dimensions, defaults to 2. Can only be 2 or 3} \item{knn}{Reduce the graph to keep only the neares neighbors. Defaults to 100.} \item{d}{The distance function to determine the weights of the graph edges. Defaults to euclidean distances.} } } \section{Implementation}{ Wraps around \code{\link[igraph]{layout_with_fr}}, see there for details. The Fruchterman Reingold algorithm puts the data into a circle and puts connected points close to each other. } \examples{ dat <- loadDataSet("Swiss Roll", n = 100) ## use the S4 Class directly: fruchterman_reingold <- FruchtermanReingold() pars <- fruchterman_reingold@stdpars pars$knn <- 5 emb <- fruchterman_reingold@fun(dat, pars) ## simpler, use embed(): emb2 <- embed(dat, "FruchtermanReingold", knn = 5) plot(emb, type = "2vars") } \references{ Fruchterman, T.M.J., Reingold, E.M., 1991. Graph drawing by force-directed placement. Softw: Pract. Exper. 21, 1129-1164. https://doi.org/10.1002/spe.4380211102 } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/tSNE-class.Rd0000644000176200001440000000561513371631672014454 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tsne.R \docType{class} \name{tSNE-class} \alias{tSNE-class} \alias{tSNE} \title{t-Distributed Stochastic Neighborhood Embedding} \description{ An S4 Class for t-SNE. } \details{ t-SNE is a method that uses Kullback-Leibler divergence between the distance matrices in high and low-dimensional space to embed the data. The method is very well suited to visualize complex structures in low dimensions. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ t-SNE can take the following parameters: \describe{ \item{d}{A distance function, defaults to euclidean distances} \item{perplexity}{The perplexity parameter, roughly equivalent to neighborhood size.} \item{theta}{Approximation for the nearest neighbour search, large values are more inaccurate.} \item{ndim}{The number of embedding dimensions.} } } \section{Implementation}{ Wraps around \code{\link[Rtsne]{Rtsne}}, which is very well documented. Setting \code{theta = 0} does a normal t-SNE, larger values for \code{theta < 1} use the Barnes-Hut algorithm which scales much nicer with data size. Larger values for perplexity take larger neighborhoods into account. } \examples{ \dontrun{ dat <- loadDataSet("3D S Curve", n = 300) ## using the S4 class directly: tsne <- tSNE() emb <- tsne@fun(dat, tsne@stdpars) ## using embed() emb2 <- embed(dat, "tSNE", perplexity = 80) plot(emb, type = "2vars") plot(emb2, type = "2vars") } } \references{ Maaten, L. van der, 2014. Accelerating t-SNE using Tree-Based Algorithms. Journal of Machine Learning Research 15, 3221-3245. van der Maaten, L., Hinton, G., 2008. Visualizing Data using t-SNE. J. Mach. Learn. Res. 9, 2579-2605. } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}} } \concept{dimensionality reduction methods} dimRed/man/PCA_L1-class.Rd0000644000176200001440000000557413373523703014603 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/l1pca.R \docType{class} \name{PCA_L1-class} \alias{PCA_L1-class} \alias{PCA_L1} \title{Principal Component Analysis with L1 error.} \description{ S4 Class implementing PCA with L1 error. } \details{ PCA transforms the data so that the L2 reconstruction error is minimized or the variance of the projected data is maximized. This is sensitive to outliers, L1 PCA minimizes the L1 reconstruction error or maximizes the sum of the L1 norm of the projected observations. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ PCA can take the following parameters: \describe{ \item{ndim}{The number of output dimensions.} \item{center}{logical, should the data be centered, defaults to \code{TRUE}.} \item{scale.}{logical, should the data be scaled, defaults to \code{FALSE}.} \item{fun}{character or function, the method to apply, see the \code{pcaL1} package} \item{\ldots}{other parameters for \code{fun}} } } \section{Implementation}{ Wraps around the different methods is the \code{pcaL1} package. Because PCA can be reduced to a simple rotation, forward and backward projection functions are supplied. } \examples{ if(requireNamespace("pcaL1", quietly = TRUE)) { dat <- loadDataSet("Iris") ## using the S4 Class pca_l1 <- PCA_L1() emb <- pca_l1@fun(dat, pca_l1@stdpars) ## using embed() emb2 <- embed(dat, "PCA_L1") plot(emb, type = "2vars") plot(emb@inverse(emb@data), type = "3vars") } } \references{ Park, Y.W., Klabjan, D., 2016. Iteratively Reweighted Least Squares Algorithms for L1-Norm Principal Component Analysis, in: Data Mining (ICDM), 2016 IEEE 16th International Conference On. IEEE, pp. 430-438. } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/Isomap-class.Rd0000644000176200001440000000600413371631672015064 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/isomap.R \docType{class} \name{Isomap-class} \alias{Isomap-class} \alias{Isomap} \title{Isomap embedding} \description{ An S4 Class implementing the Isomap Algorithm } \details{ The Isomap algorithm approximates a manifold using geodesic distances on a k nearest neighbor graph. Then classical scaling is performed on the resulting distance matrix. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ Isomap can take the following parameters: \describe{ \item{knn}{The number of nearest neighbors in the graph. Defaults to 50.} \item{ndim}{The number of embedding dimensions, defaults to 2.} \item{get_geod}{Should the geodesic distance matrix be kept, if \code{TRUE}, access it as \code{getOtherData(x)$geod}} } } \section{Implementation}{ The dimRed package uses its own implementation of Isomap which also comes with an out of sample extension (known as landmark Isomap). The default Isomap algorithm scales computationally not very well, the implementation here uses \code{\link[RANN]{nn2}} for a faster search of the nearest neighbors. If data are too large it may be useful to fit a subsample of the data and use the out-of-sample extension for the other points. } \examples{ dat <- loadDataSet("3D S Curve", n = 500) ## use the S4 Class directly: isomap <- Isomap() emb <- isomap@fun(dat, isomap@stdpars) ## or simpler, use embed(): samp <- sample(nrow(dat), size = 200) emb2 <- embed(dat[samp], "Isomap", .mute = NULL, knn = 10) emb3 <- emb2@apply(dat[-samp]) plot(emb2, type = "2vars") plot(emb3, type = "2vars") } \references{ Tenenbaum, J.B., Silva, V. de, Langford, J.C., 2000. A Global Geometric Framework for Nonlinear Dimensionality Reduction. Science 290, 2319-2323. https://doi.org/10.1126/science.290.5500.2319 } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/mixColorRamps.Rd0000644000176200001440000000237513065033470015327 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mixColorSpaces.R \name{mixColorRamps} \alias{mixColorRamps} \alias{mixColor1Ramps} \alias{mixColor2Ramps} \alias{mixColor3Ramps} \title{Mixing color ramps} \usage{ mixColorRamps(vars, ramps) mixColor1Ramps(vars, ramps = colorRamp(c("blue", "black", "red"))) mixColor2Ramps(vars, ramps = list(colorRamp(c("blue", "green")), colorRamp(c("blue", "red")))) mixColor3Ramps(vars, ramps = list(colorRamp(c("#001A00", "#00E600")), colorRamp(c("#00001A", "#0000E6")), colorRamp(c("#1A0000", "#E60000")))) } \arguments{ \item{vars}{a list of variables} \item{ramps}{a list of color ramps, one for each variable.} } \description{ mix different color ramps } \details{ automatically create colors to represent a varying number of dimensions. } \examples{ cols <- expand.grid(x = seq(0, 1, length.out = 10), y = seq(0, 1, length.out = 10), z = seq(0, 1, length.out = 10)) mixed <- mixColor3Ramps(cols) \dontrun{ library(rgl) plot3d(cols$x, cols$y, cols$z, col = mixed, pch = 15) cols <- expand.grid(x = seq(0, 1, length.out = 10), y = seq(0, 1, length.out = 10)) mixed <- mixColor2Ramps(cols) } plot(cols$x, cols$y, col = mixed, pch = 15) } dimRed/man/reconstruction_error-dimRedResult-method.Rd0000644000176200001440000000414013371631672022701 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/quality.R \docType{methods} \name{reconstruction_error,dimRedResult-method} \alias{reconstruction_error,dimRedResult-method} \alias{reconstruction_error} \title{Method reconstruction_error} \usage{ \S4method{reconstruction_error}{dimRedResult}(object, n = seq_len(ndims(object)), error_fun = "rmse") } \arguments{ \item{object}{of class dimRedResult} \item{n}{a positive integer or vector of integers \code{<= ndims(object)}} \item{error_fun}{a function or string indicating an error function, if indication a function it must take to matrices of the same size and return a scalar.} } \value{ a vector of number with the same length as \code{n} with the } \description{ Calculate the error using only the first \code{n} dimensions of the embedded data. \code{error_fun} can either be one of \code{c("rmse", "mae")} to calculate the root mean square error or the mean absolute error respectively, or a function that takes to equally sized vectors as input and returns a single number as output. } \examples{ \dontrun{ ir <- loadDataSet("Iris") ir.drr <- embed(ir, "DRR", ndim = ndims(ir)) ir.pca <- embed(ir, "PCA", ndim = ndims(ir)) rmse <- data.frame( rmse_drr = reconstruction_error(ir.drr), rmse_pca = reconstruction_error(ir.pca) ) matplot(rmse, type = "l") plot(ir) plot(ir.drr) plot(ir.pca) } } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{AUC_lnK_R_NX,dimRedResult-method}}, \code{\link{LCMC,dimRedResult-method}}, \code{\link{Q_NX,dimRedResult-method}}, \code{\link{Q_global,dimRedResult-method}}, \code{\link{Q_local,dimRedResult-method}}, \code{\link{R_NX,dimRedResult-method}}, \code{\link{cophenetic_correlation,dimRedResult-method}}, \code{\link{distance_correlation,dimRedResult-method}}, \code{\link{mean_R_NX,dimRedResult-method}}, \code{\link{plot_R_NX}}, \code{\link{quality,dimRedResult-method}}, \code{\link{reconstruction_rmse,dimRedResult-method}}, \code{\link{total_correlation,dimRedResult-method}} } \author{ Guido Kraemer } \concept{Quality scores for dimensionality reduction} dimRed/man/dimRedMethodList.Rd0000644000176200001440000000225413371631672015735 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimRedMethod-class.R \name{dimRedMethodList} \alias{dimRedMethodList} \title{dimRedMethodList} \usage{ dimRedMethodList() } \value{ a character vector with the names of classes that inherit from \code{dimRedMethod}. } \description{ Get the names of all methods for dimensionality reduction. } \details{ Returns the name of all classes that inherit from \code{\link{dimRedMethod-class}} to use with \code{\link{embed}}. } \examples{ dimRedMethodList() } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/AutoEncoder-class.Rd0000644000176200001440000001271013371631672016045 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/autoencoder.R \docType{class} \name{AutoEncoder-class} \alias{AutoEncoder-class} \alias{AutoEncoder} \title{AutoEncoder} \description{ An S4 Class implementing an Autoencoder } \details{ Autoencoders are neural networks that try to reproduce their input. Consider this method unstable, as the internals may still be changed. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ Autoencoder can take the following parameters: \describe{ \item{ndim}{The number of dimensions for reduction.} \item{n_hidden}{The number of neurons in the hidden layers, the length specifies the number of layers, the length must be impair, the middle number must be the same as ndim.} \item{activation}{The activation functions for the layers, one of "tanh", "sigmoid", "relu", "elu", everything else will silently be ignored and there will be no activation function for the layer.} \item{weight_decay}{the coefficient for weight decay, set to 0 if no weight decay desired.} \item{learning_rate}{The learning rate for gradient descend} \item{graph}{Optional: A list of bits and pieces that define the autoencoder in tensorflow, see details.} \item{keras_graph}{Optional: A list of keras layers that define the encoder and decoder, specifying this, will ignore all other topology related variables, see details.} \item{batchsize}{If NA, all data will be used for training, else only a random subset of size batchsize will be used} \item{n_steps}{the number of training steps.} } } \section{Details}{ There are several ways to specify an autoencoder, the simplest is to pass the number of neurons per layer in \code{n_hidden}, this must be a vector of integers of impair length and it must be symmetric and the middle number must be equal to \code{ndim}, For every layer an activation function can be specified with \code{activation}. For regularization weight decay can be specified by setting \code{weight_decay} > 0. Currently only a gradient descent optimizer is used, the learning rate can be specified by setting \code{learning_rate}. The learner can operate on batches if \code{batchsize} is not \code{NA}. The number of steps the learner uses is specified using \code{n_steps}. } \section{Further training a model}{ If the model did not converge in the first training phase or training with different data is desired, the \code{\link{dimRedResult}} object may be passed as \code{autoencoder} parameter; In this case all topology related parameters will be ignored. } \section{Using Keras layers}{ The encoder and decoder part can be specified using a list of \pkg{keras} layers. This requires a list with two entries, \code{encoder} should contain a LIST of keras layers WITHOUT the \code{\link[keras]{layer_input}} that will be concatenated in order to form the encoder part. \code{decoder} should be defined accordingly, the output of \code{decoder} must have the same number of dimensions as the input data. } \section{Using Tensorflow}{ The model can be entirely defined in \pkg{tensorflow}, it must contain a list with the following entries: \describe{ \item{encoder}{A tensor that defines the encoder.} \item{decoder}{A tensor that defines the decoder.} \item{network}{A tensor that defines the reconstruction (encoder + decoder).} \item{loss}{A tensor that calculates the loss (network + loss function).} \item{in_data}{A \code{placeholder} that points to the data input of the network AND the encoder.} \item{in_decoder}{A \code{placeholder} that points to the input of the decoder.} \item{session}{A \pkg{tensorflow} \code{Session} object that holds the values of the tensors.} } } \section{Implementation}{ Uses \pkg{tensorflow} as a backend, for details an problems relating tensorflow, see \url{https://tensorflow.rstudio.com}. } \examples{ \dontrun{ dat <- loadDataSet("3D S Curve") ## use the S4 Class directly: autoenc <- AutoEncoder() emb <- autoenc@fun(dat, autoenc@stdpars) ## simpler, use embed(): emb2 <- embed(dat, "AutoEncoder") plot(emb, type = "2vars") samp <- sample(floor(nrow(dat) / 10)) embsamp <- autoenc@fun(dat[samp], autoenc@stdpars) embother <- embsamp@apply(dat[-samp]) plot(embsamp, type = "2vars") points(embother@data) } } \seealso{ Other dimensionality reduction methods: \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/R_NX-dimRedResult-method.Rd0000644000176200001440000000250713371631672017222 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/quality.R \docType{methods} \name{R_NX,dimRedResult-method} \alias{R_NX,dimRedResult-method} \alias{R_NX} \title{Method R_NX} \usage{ \S4method{R_NX}{dimRedResult}(object, ndim = getNDim(object)) } \arguments{ \item{object}{of class dimRedResult} \item{ndim}{the number of dimensions to take from the embedded data.} } \description{ Calculate the R_NX score from Lee et. al. (2013) which shows the neighborhood preservation for the Kth nearest neighbors, corrected for random point distributions and scaled to range [0, 1]. } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{AUC_lnK_R_NX,dimRedResult-method}}, \code{\link{LCMC,dimRedResult-method}}, \code{\link{Q_NX,dimRedResult-method}}, \code{\link{Q_global,dimRedResult-method}}, \code{\link{Q_local,dimRedResult-method}}, \code{\link{cophenetic_correlation,dimRedResult-method}}, \code{\link{distance_correlation,dimRedResult-method}}, \code{\link{mean_R_NX,dimRedResult-method}}, \code{\link{plot_R_NX}}, \code{\link{quality,dimRedResult-method}}, \code{\link{reconstruction_error,dimRedResult-method}}, \code{\link{reconstruction_rmse,dimRedResult-method}}, \code{\link{total_correlation,dimRedResult-method}} } \concept{Quality scores for dimensionality reduction} dimRed/man/DRR-class.Rd0000644000176200001440000001104113371631672014260 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/drr.R \docType{class} \name{DRR-class} \alias{DRR-class} \alias{DRR} \title{Dimensionality Reduction via Regression} \description{ An S4 Class implementing Dimensionality Reduction via Regression (DRR). } \details{ DRR is a non-linear extension of PCA that uses Kernel Ridge regression. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ DRR can take the following parameters: \describe{ \item{ndim}{The number of dimensions} \item{lambda}{The regularization parameter for the ridge regression.} \item{kernel}{The kernel to use for KRR, defaults to \code{"rbfdot"}.} \item{kernel.pars}{A list with kernel parameters, elements depend on the kernel used, \code{"rbfdot"} uses \code{"sigma"}.} \item{pca}{logical, should an initial pca step be performed, defaults to \code{TRUE}.} \item{pca.center}{logical, should the data be centered before the pca step. Defaults to \code{TRUE}.} \item{pca.scale}{logical, should the data be scaled before the pca ste. Defaults to \code{FALSE}.} \item{fastcv}{logical, should \code{\link[CVST]{fastCV}} from the CVST package be used instead of normal cross-validation.} \item{fastcv.test}{If \code{fastcv = TRUE}, separate test data set for fastcv.} \item{cv.folds}{if \code{fastcv = FALSE}, specifies the number of folds for crossvalidation.} \item{fastkrr.nblocks}{integer, higher values sacrifice numerical accuracy for speed and less memory, see below for details.} \item{verbose}{logical, should the cross-validation results be printed out.} } } \section{Implementation}{ Wraps around \code{\link[DRR]{drr}}, see there for details. DRR is a non-linear extension of principal components analysis using Kernel Ridge Regression (KRR, details see \code{\link[CVST]{constructKRRLearner}} and \code{\link[DRR]{constructFastKRRLearner}}). Non-linear regression is used to explain more variance than PCA. DRR provides an out-of-sample extension and a backward projection. The most expensive computations are matrix inversions therefore the implementation profits a lot from a multithreaded BLAS library. The best parameters for each KRR are determined by cross-validaton over all parameter combinations of \code{lambda} and \code{kernel.pars}, using less parameter values will speed up computation time. Calculation of KRR can be accelerated by increasing \code{fastkrr.nblocks}, it should be smaller than n^{1/3} up to sacrificing some accuracy, for details see \code{\link[DRR]{constructFastKRRLearner}}. Another way to speed up is to use \code{pars$fastcv = TRUE} which might provide a more efficient way to search the parameter space but may also miss the global maximum, I have not ran tests on the accuracy of this method. } \examples{ \dontrun{ dat <- loadDataSet("variable Noise Helix", n = 200)[sample(200)] ## use the S4 Class directly: drr <- DRR() pars <- drr@stdpars pars$ndim <- 3 emb <- drr@fun(dat, pars) ## simpler, use embed(): emb2 <- embed(dat, "DRR", ndim = 3) plot(dat, type = "3vars") plot(emb, type = "3vars") plot(emb@inverse(emb@data@data[, 1, drop = FALSE]), type = "3vars") } } \references{ Laparra, V., Malo, J., Camps-Valls, G., 2015. Dimensionality Reduction via Regression in Hyperspectral Imagery. IEEE Journal of Selected Topics in Signal Processing 9, 1026-1036. doi:10.1109/JSTSP.2015.2417833 } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/print.Rd0000644000176200001440000000047713065033470013665 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/misc.R \name{print} \alias{print} \title{Method print} \usage{ print(x, ...) } \arguments{ \item{x}{The object to be printed.} \item{...}{Other arguments for printing.} } \description{ Imports the print method into the package namespace. } dimRed/man/quality.Rd0000644000176200001440000001107013371631672014220 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/quality.R \docType{methods} \name{quality,dimRedResult-method} \alias{quality,dimRedResult-method} \alias{quality} \alias{quality.dimRedResult} \alias{dimRedQualityList} \title{Quality Criteria for dimensionality reduction.} \usage{ \S4method{quality}{dimRedResult}(.data, .method = dimRedQualityList(), .mute = character(0), ...) dimRedQualityList() } \arguments{ \item{.data}{object of class \code{dimRedResult}} \item{.method}{character vector naming one of the methods} \item{.mute}{what output from the embedding method should be muted.} \item{...}{the pameters, internally passed as a list to the quality method as \code{pars = list(...)}} } \value{ a number } \description{ A collection of functions to compute quality measures on \code{\link{dimRedResult}} objects. } \section{Methods (by class)}{ \itemize{ \item \code{dimRedResult}: Calculate a quality index from a dimRedResult object. }} \section{Implemented methods}{ Method must be one of \code{"\link{Q_local}", "\link{Q_global}", "\link{mean_R_NX}", "\link{total_correlation}", "\link{cophenetic_correlation}", "\link{distance_correlation}", "\link{reconstruction_rmse}"} } \section{Rank based criteria}{ \code{Q_local}, \code{Q_global}, and \code{mean_R_nx} are quality criteria based on the Co-ranking matrix. \code{Q_local} and \code{Q_global} determine the local/global quality of the embedding, while \code{mean_R_nx} determines the quality of the overall embedding. They are parameter free and return a single number. The object must include the original data. The number returns is in the range [0, 1], higher values mean a better local/global embedding. } \section{Correlation based criteria}{ \code{total_correlation} calculates the sum of the mean squared correlations of the original axes with the axes in reduced dimensions, because some methods do not care about correlations with axes, there is an option to rotate data in reduced space to maximize this criterium. The number may be greater than one if more dimensions are summed up. \code{cophenetic_correlation} calculate the correlation between the lower triangles of distance matrices, the correlation and distance methods may be specified. The result is in range [-1, 1]. \code{distance_correlation} measures the independes of samples by calculating the correlation of distances. For details see \code{\link[energy]{dcor}}. } \section{Reconstruction error}{ \code{reconstruction_rmse} calculates the root mean squared error of the reconstrucion. \code{object} requires an inverse function. } \examples{ \dontrun{ embed_methods <- dimRedMethodList() quality_methods <- dimRedQualityList() scurve <- loadDataSet("Iris") quality_results <- matrix(NA, length(embed_methods), length(quality_methods), dimnames = list(embed_methods, quality_methods)) embedded_data <- list() for (e in embed_methods) { message("embedding: ", e) embedded_data[[e]] <- embed(scurve, e, .mute = c("message", "output")) for (q in quality_methods) { message(" quality: ", q) quality_results[e, q] <- tryCatch( quality(embedded_data[[e]], q), error = function (e) NA ) } } print(quality_results) } } \references{ Lueks, W., Mokbel, B., Biehl, M., Hammer, B., 2011. How to Evaluate Dimensionality Reduction? - Improving the Co-ranking Matrix. arXiv:1110.3917 [cs]. Szekely, G.J., Rizzo, M.L., Bakirov, N.K., 2007. Measuring and testing dependence by correlation of distances. Ann. Statist. 35, 2769-2794. doi:10.1214/009053607000000505 Lee, J.A., Peluffo-Ordonez, D.H., Verleysen, M., 2015. Multi-scale similarities in stochastic neighbour embedding: Reducing dimensionality while preserving both local and global structure. Neurocomputing, 169, 246-261. doi:10.1016/j.neucom.2014.12.095 } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{AUC_lnK_R_NX,dimRedResult-method}}, \code{\link{LCMC,dimRedResult-method}}, \code{\link{Q_NX,dimRedResult-method}}, \code{\link{Q_global,dimRedResult-method}}, \code{\link{Q_local,dimRedResult-method}}, \code{\link{R_NX,dimRedResult-method}}, \code{\link{cophenetic_correlation,dimRedResult-method}}, \code{\link{distance_correlation,dimRedResult-method}}, \code{\link{mean_R_NX,dimRedResult-method}}, \code{\link{plot_R_NX}}, \code{\link{reconstruction_error,dimRedResult-method}}, \code{\link{reconstruction_rmse,dimRedResult-method}}, \code{\link{total_correlation,dimRedResult-method}} } \author{ Guido Kraemer } \concept{Quality scores for dimensionality reduction} dimRed/man/as.dimRedData.Rd0000644000176200001440000000176713371631672015144 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/misc.R, R/dimRedData-class.R \docType{methods} \name{as.dimRedData} \alias{as.dimRedData} \alias{as.dimRedData,formula-method} \title{Converts to dimRedData} \usage{ as.dimRedData(formula, ...) \S4method{as.dimRedData}{formula}(formula, data) } \arguments{ \item{formula}{The formula, left hand side is assigned to the meta slot right hand side is assigned to the data slot.} \item{...}{other arguments.} \item{data}{Will be coerced into a \code{\link{data.frame}} with \code{\link{as.data.frame}}} } \description{ Conversion functions to dimRedData. } \section{Methods (by class)}{ \itemize{ \item \code{formula}: Convert a \code{data.frame} to a dimRedData object using a formula }} \examples{ ## create a dimRedData object using a formula as.dimRedData(Species ~ Sepal.Length + Sepal.Width + Petal.Length + Petal.Width, iris)[1:5] } \seealso{ Other dimRedData: \code{\link{dimRedData-class}} } \concept{dimRedData} dimRed/man/Q_global-dimRedResult-method.Rd0000644000176200001440000000221313371631672020126 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/quality.R \docType{methods} \name{Q_global,dimRedResult-method} \alias{Q_global,dimRedResult-method} \alias{Q_global} \title{Method Q_global} \usage{ \S4method{Q_global}{dimRedResult}(object) } \arguments{ \item{object}{of class dimRedResult} } \description{ Calculate the Q_global score to assess the quality of a dimensionality reduction. } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{AUC_lnK_R_NX,dimRedResult-method}}, \code{\link{LCMC,dimRedResult-method}}, \code{\link{Q_NX,dimRedResult-method}}, \code{\link{Q_local,dimRedResult-method}}, \code{\link{R_NX,dimRedResult-method}}, \code{\link{cophenetic_correlation,dimRedResult-method}}, \code{\link{distance_correlation,dimRedResult-method}}, \code{\link{mean_R_NX,dimRedResult-method}}, \code{\link{plot_R_NX}}, \code{\link{quality,dimRedResult-method}}, \code{\link{reconstruction_error,dimRedResult-method}}, \code{\link{reconstruction_rmse,dimRedResult-method}}, \code{\link{total_correlation,dimRedResult-method}} } \concept{Quality scores for dimensionality reduction} dimRed/man/dimRedData-class.Rd0000644000176200001440000000602113371631672015631 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimRedData-class.R \docType{class} \name{dimRedData-class} \alias{dimRedData-class} \alias{dimRedData} \alias{as.data.frame,dimRedData-method} \alias{getData,dimRedData-method} \alias{getMeta,dimRedData-method} \alias{nrow,dimRedData-method} \alias{[,dimRedData,ANY,ANY,ANY-method} \alias{ndims,dimRedData-method} \title{Class "dimRedData"} \usage{ \S4method{as.data.frame}{dimRedData}(x, meta.prefix = "meta.", data.prefix = "") \S4method{getData}{dimRedData}(object) \S4method{getMeta}{dimRedData}(object) \S4method{nrow}{dimRedData}(x) \S4method{[}{dimRedData,ANY,ANY,ANY}(x, i) \S4method{ndims}{dimRedData}(object) } \arguments{ \item{x}{Of class dimRedData} \item{meta.prefix}{Prefix for the columns of the meta data names.} \item{data.prefix}{Prefix for the columns of the variable names.} \item{object}{Of class dimRedData.} \item{i}{a valid index for subsetting rows.} } \description{ A class to hold data for dimensionality reduction and methods. } \details{ The class hast two slots, \code{data} and \code{meta}. The \code{data} slot contains a \code{numeric matrix} with variables in columns and observations in rows. The \code{meta} slot may contain a \code{data.frame} with additional information. Both slots need to have the same number of rows or the \code{meta} slot needs to contain an empty \code{data.frame}. See examples for easy conversion from and to \code{data.frame}. For plotting functions see \code{\link{plot.dimRedData}}. } \section{Methods (by generic)}{ \itemize{ \item \code{as.data.frame}: convert to data.frame \item \code{getData}: Get the data slot. \item \code{getMeta}: Get the meta slot. \item \code{nrow}: Get the number of observations. \item \code{[}: Subset rows. \item \code{ndims}: Extract the number of Variables from the data. }} \section{Slots}{ \describe{ \item{\code{data}}{of class \code{matrix}, holds the data, observations in rows, variables in columns} \item{\code{meta}}{of class \code{data.frame}, holds meta data such as classes, internal manifold coordinates, or simply additional data of the data set. Must have the same number of rows as the \code{data} slot or be an empty data frame.} }} \examples{ ## Load an example data set: s3d <- loadDataSet("3D S Curve") ## Create using a constructor: ### without meta information: dimRedData(iris[, 1:4]) ### with meta information: dimRedData(iris[, 1:4], iris[, 5]) ### using slot names: dimRedData(data = iris[, 1:4], meta = iris[, 5]) ## Convert to a dimRedData objects: Iris <- as(iris[, 1:4], "dimRedData") ## Convert to data.frame: head(as(s3d, "data.frame")) head(as.data.frame(s3d)) head(as.data.frame(as(iris[, 1:4], "dimRedData"))) ## Extract slots: head(getData(s3d)) head(getMeta(s3d)) ## Get the number of observations: nrow(s3d) ## Subset: s3d[1:5, ] ## Shuffle data: s3 <- s3d[nrow(s3d)] ## Get the number of variables: ndims(s3d) } \seealso{ Other dimRedData: \code{\link{as.dimRedData}} Other dimRedData: \code{\link{as.dimRedData}} } \concept{dimRedData} dimRed/man/FastICA-class.Rd0000644000176200001440000000472313371631672015054 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/fastica.R \docType{class} \name{FastICA-class} \alias{FastICA-class} \alias{FastICA} \title{Independent Component Analysis} \description{ An S4 Class implementing the FastICA algorithm for Indepentend Component Analysis. } \details{ ICA is used for blind signal separation of different sources. It is a linear Projection. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ FastICA can take the following parameters: \describe{ \item{ndim}{The number of output dimensions. Defaults to \code{2}} } } \section{Implementation}{ Wraps around \code{\link[fastICA]{fastICA}}. FastICA uses a very fast approximation for negentropy to estimate statistical independences between signals. Because it is a simple rotation/projection, forward and backward functions can be given. } \examples{ dat <- loadDataSet("3D S Curve") ## use the S4 Class directly: fastica <- FastICA() emb <- fastica@fun(dat, pars = list(ndim = 2)) ## simpler, use embed(): emb2 <- embed(dat, "FastICA", ndim = 2) plot(emb@data@data) } \references{ Hyvarinen, A., 1999. Fast and robust fixed-point algorithms for independent component analysis. IEEE Transactions on Neural Networks 10, 626-634. https://doi.org/10.1109/72.761722 } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/plot.Rd0000644000176200001440000000370213371631672013511 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot.R \docType{methods} \name{plot} \alias{plot} \alias{plot.dimRed} \alias{plot,dimRedData,ANY-method} \alias{plot.dimRedData} \alias{plot,dimRedResult,ANY-method} \alias{plot.dimRedResult} \title{Plotting of dimRed* objects} \usage{ plot(x, y, ...) \S4method{plot}{dimRedData,ANY}(x, type = "pairs", vars = seq_len(ncol(x@data)), col = seq_len(min(3, ncol(x@meta))), ...) \S4method{plot}{dimRedResult,ANY}(x, type = "pairs", vars = seq_len(ncol(x@data@data)), col = seq_len(min(3, ncol(x@data@meta))), ...) } \arguments{ \item{x}{dimRedResult/dimRedData class, e.g. output of embedded/loadDataSet} \item{y}{Ignored} \item{...}{handed over to the underlying plotting function.} \item{type}{plot type, one of \code{c("pairs", "parpl", "2vars", "3vars", "3varsrgl")}} \item{vars}{the axes of the embedding to use for plotting} \item{col}{the columns of the meta slot to use for coloring, can be referenced as the column names or number of x@data} } \description{ Plots a object of class dimRedResult and dimRedData. For the documentation of the plotting function in base see here: \code{\link{plot.default}}. } \details{ Plotting functions for the classes usind in \code{dimRed}. they are intended to give a quick overview over the results, so they are somewhat inflexible, e.g. it is hard to modify color scales or plotting parameters. If you require more control over plotting, it is better to convert the object to a \code{data.frame} first and use the standard functions for plotting. } \section{Methods (by class)}{ \itemize{ \item \code{x = dimRedData,y = ANY}: Ploting of dimRedData objects \item \code{x = dimRedResult,y = ANY}: Ploting of dimRedResult objects. }} \examples{ scurve = loadDataSet("3D S Curve") plot(scurve, type = "pairs", main = "pairs plot of S curve") plot(scurve, type = "parpl") plot(scurve, type = "2vars", vars = c("y", "z")) plot(scurve, type = "3vars") } dimRed/man/ndims.Rd0000644000176200001440000000050613065033470013634 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/misc.R \name{ndims} \alias{ndims} \title{Method ndims} \usage{ ndims(object, ...) } \arguments{ \item{object}{To extract the number of dimensions from.} \item{...}{Arguments for further methods} } \description{ Extract the number of dimensions. } dimRed/man/getMeta.Rd0000644000176200001440000000045113065033470014107 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/misc.R \name{getMeta} \alias{getMeta} \title{Method getMeta} \usage{ getMeta(object, ...) } \arguments{ \item{object}{The object to be converted.} \item{...}{other arguments.} } \description{ Extracts the meta slot. } dimRed/man/plot_R_NX.Rd0000644000176200001440000000430113371631672014373 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plot.R \name{plot_R_NX} \alias{plot_R_NX} \title{plot_R_NX} \usage{ plot_R_NX(x, ndim = NA, weight = "inv") } \arguments{ \item{x}{a list of \code{\link{dimRedResult}} objects. The names of the list will appear in the legend with the AUC_lnK value.} \item{ndim}{the number of dimensions, if \code{NA} the original number of embedding dimensions is used, can be a vector giving the embedding dimensionality for each single list element of \code{x}.} \item{weight}{the weight function used for K when calculating the AUC, one of \code{c("inv", "log", "log10")}} } \value{ A ggplot object, the design can be changed by appending \code{theme(...)} } \description{ Plot the R_NX curve for different embeddings. Takes a list of \code{\link{dimRedResult}} objects as input. Also the Area under the curve values are computed for a weighted K (see \link{AUC_lnK_R_NX} for details) and appear in the legend. } \examples{ ## define which methods to apply embed_methods <- c("Isomap", "PCA") ## load test data set data_set <- loadDataSet("3D S Curve", n = 200) ## apply dimensionality reduction data_emb <- lapply(embed_methods, function(x) embed(data_set, x)) names(data_emb) <- embed_methods ## plot the R_NX curves: plot_R_NX(data_emb) + ggplot2::theme(legend.title = ggplot2::element_blank(), legend.position = c(0.5, 0.1), legend.justification = c(0.5, 0.1)) } \seealso{ Other Quality scores for dimensionality reduction: \code{\link{AUC_lnK_R_NX,dimRedResult-method}}, \code{\link{LCMC,dimRedResult-method}}, \code{\link{Q_NX,dimRedResult-method}}, \code{\link{Q_global,dimRedResult-method}}, \code{\link{Q_local,dimRedResult-method}}, \code{\link{R_NX,dimRedResult-method}}, \code{\link{cophenetic_correlation,dimRedResult-method}}, \code{\link{distance_correlation,dimRedResult-method}}, \code{\link{mean_R_NX,dimRedResult-method}}, \code{\link{quality,dimRedResult-method}}, \code{\link{reconstruction_error,dimRedResult-method}}, \code{\link{reconstruction_rmse,dimRedResult-method}}, \code{\link{total_correlation,dimRedResult-method}} } \concept{Quality scores for dimensionality reduction} dimRed/man/dataSets.Rd0000644000176200001440000000253413065033470014275 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dataSets.R \name{dataSets} \alias{dataSets} \alias{loadDataSet} \alias{dataSetList} \title{Example Data Sets for dimensionality reduction} \usage{ loadDataSet(name = dataSetList(), n = 2000, sigma = 0.05) dataSetList() } \arguments{ \item{name}{A character vector that specifies the name of the data set.} \item{n}{In generated data sets the number of points to be generated, else ignored.} \item{sigma}{In generated data sets the standard deviation of the noise added, else ignored.} } \value{ \code{loadDataSet} an object of class \code{\link{dimRedData}}. \code{dataSetList()} return a character string with the implemented data sets } \description{ A compilation of standard data sets that are often being used to showcase dimensionality reduction techniques. } \details{ The argument \code{name} should be one of \code{dataSetList()}. Partial matching is possible, see \code{\link{match.arg}}. Generated data sets contain the internal coordinates of the manifold in the \code{meta} slot. Call \code{dataSetList()} to see what data sets are available. } \examples{ ## a list of available data sets: dataSetList() ## Load a data set: swissRoll <- loadDataSet("Swiss Roll") \donttest{plot(swissRoll, type = "3vars")} ## Load Iris data set, partial matching: loadDataSet("I") } dimRed/man/getData.Rd0000644000176200001440000000040613065033470014072 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/misc.R \name{getData} \alias{getData} \title{Method getData} \usage{ getData(object) } \arguments{ \item{object}{The object to be converted.} } \description{ Extracts the data slot. } dimRed/man/LaplacianEigenmaps-class.Rd0000644000176200001440000000420313065033470017340 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/leim.R \docType{class} \name{LaplacianEigenmaps-class} \alias{LaplacianEigenmaps-class} \alias{LaplacianEigenmaps} \title{Laplacian Eigenmaps} \description{ An S4 Class implementing Laplacian Eigenmaps } \details{ Laplacian Eigenmaps use a kernel and were originally developed to separate non-convex clusters under the name spectral clustering. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ \code{LaplacianEigenmaps} can take the following parameters: \describe{ \item{ndim}{the number of output dimensions.} \item{sparse}{A character vector specifying hot to make the graph sparse, \code{"knn"} means that a K-nearest neighbor graph is constructed, \code{"eps"} an epsilon neighborhood graph is constructed, else a dense distance matrix is used.} \item{knn}{The number of nearest neighbors to use for the knn graph.} \item{eps}{The distance for the epsilon neighborhood graph.} \item{t}{Parameter for the transformation of the distance matrix by \eqn{w=exp(-d^2/t)}, larger values give less weight to differences in distance, \code{t == Inf} treats all distances != 0 equally.} \item{norm}{logical, should the normed laplacian be used?} } } \section{Implementation}{ Wraps around \code{\link[loe]{spec.emb}}. } \examples{ dat <- loadDataSet("3D S Curve") leim <- LaplacianEigenmaps() emb <- leim@fun(dat, leim@stdpars) plot(emb@data@data) } \references{ Belkin, M., Niyogi, P., 2003. Laplacian Eigenmaps for Dimensionality Reduction and Data Representation. Neural Computation 15, 1373. } dimRed/man/dimRedMethod-class.Rd0000644000176200001440000000343213371631672016203 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dimRedMethod-class.R \docType{class} \name{dimRedMethod-class} \alias{dimRedMethod-class} \title{Class "dimRedMethod"} \description{ A virtual class "dimRedMethod" to serve as a template to implement methods for dimensionality reduction. } \details{ Implementations of dimensionality reductions should inherit from this class. The \code{fun} slot should be a function that takes three arguments \describe{ \item{data}{An object of class \code{\link{dimRedData}}.} \item{pars}{A list with the standard parameters.} \item{keep.org.data}{Logical. If the original data should be kept in the output.} } and returns an object of class \code{\link{dimRedResult}}. The \code{stdpars} slot should take a list that contains standard parameters for the implemented methods. This way the method can be called by \code{embed(data, "method-name", ...)}, where \code{...} can be used to to change single parameters. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding.} \item{\code{stdpars}}{A list with the default parameters for the \code{fun} slot.} }} \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethodList}}, \code{\link{kPCA-class}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/man/kPCA-class.Rd0000644000176200001440000000521613371631672014416 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/kpca.R \docType{class} \name{kPCA-class} \alias{kPCA-class} \alias{kPCA} \title{Kernel PCA} \description{ An S4 Class implementing Kernel PCA } \details{ Kernel PCA is a nonlinear extension of PCA using kernel methods. } \section{Slots}{ \describe{ \item{\code{fun}}{A function that does the embedding and returns a dimRedResult object.} \item{\code{stdpars}}{The standard parameters for the function.} }} \section{General usage}{ Dimensionality reduction methods are S4 Classes that either be used directly, in which case they have to be initialized and a full list with parameters has to be handed to the \code{@fun()} slot, or the method name be passed to the embed function and parameters can be given to the \code{...}, in which case missing parameters will be replaced by the ones in the \code{@stdpars}. } \section{Parameters}{ Kernel PCA can take the following parameters: \describe{ \item{ndim}{the number of output dimensions, defaults to 2} \item{kernel}{The kernel function, either as a function or a character vector with the name of the kernel. Defaults to \code{"rbfdot"}} \item{kpar}{A list with the parameters for the kernel function, defaults to \code{list(sigma = 0.1)}} } The most comprehensive collection of kernel functions can be found in \code{\link[kernlab]{kpca}}. In case the function does not take any parameters \code{kpar} has to be an empty list. } \section{Implementation}{ Wraps around \code{\link[kernlab]{kpca}}, but provides additionally forward and backward projections. } \examples{ \dontrun{ dat <- loadDataSet("3D S Curve") ## use the S4 class directly: kpca <- kPCA() emb <- kpca@fun(dat, kpca@stdpars) ## simpler, use embed(): emb2 <- embed(dat, "kPCA") plot(emb, type = "2vars") } } \references{ Sch\"olkopf, B., Smola, A., M\"uller, K.-R., 1998. Nonlinear Component Analysis as a Kernel Eigenvalue Problem. Neural Computation 10, 1299-1319. https://doi.org/10.1162/089976698300017467 } \seealso{ Other dimensionality reduction methods: \code{\link{AutoEncoder-class}}, \code{\link{DRR-class}}, \code{\link{DiffusionMaps-class}}, \code{\link{DrL-class}}, \code{\link{FastICA-class}}, \code{\link{FruchtermanReingold-class}}, \code{\link{HLLE-class}}, \code{\link{Isomap-class}}, \code{\link{KamadaKawai-class}}, \code{\link{LLE-class}}, \code{\link{MDS-class}}, \code{\link{NNMF-class}}, \code{\link{PCA-class}}, \code{\link{PCA_L1-class}}, \code{\link{UMAP-class}}, \code{\link{dimRedMethod-class}}, \code{\link{dimRedMethodList}}, \code{\link{nMDS-class}}, \code{\link{tSNE-class}} } \concept{dimensionality reduction methods} dimRed/LICENSE0000644000176200001440000007674512772463050012514 0ustar liggesusersGNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright © 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. “This License” refers to version 3 of the GNU General Public License. “Copyright” also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. “The Program” refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”. “Licensees” and “recipients” may be individuals or organizations. To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work or a work “based on” the earlier work. A “covered work” means either the unmodified Program or a work based on the Program. To “propagate” a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To “convey” a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays “Appropriate Legal Notices” to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The “source code” for a work means the preferred form of the work for making modifications to it. “Object code” means any non-source form of a work. A “Standard Interface” means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The “System Libraries” of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A “Major Component”, in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to “keep intact all notices”. c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an “aggregate” if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A “User Product” is either (1) a “consumer product”, which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, “normally used” refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. “Installation Information” for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. “Additional permissions” are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered “further restrictions” within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An “entity transaction” is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A “contributor” is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's “contributor version”. A contributor's “essential patent claims” are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, “control” includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a “patent license” is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To “grant” such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. “Knowingly relying” means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is “discriminatory” if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16.Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee.