kernlab/0000755000175100001440000000000014366231470011714 5ustar hornikuserskernlab/NAMESPACE0000644000175100001440000000532313271617320013132 0ustar hornikusersuseDynLib("kernlab", .registration = TRUE) import("methods") importFrom("stats", "coef", "delete.response", "fitted", "kmeans", "median", "model.extract", "model.matrix", "na.action", "na.omit", "predict", "quantile", "rnorm", "runif", "sd", "terms", "var") importFrom("graphics", "axis", "filled.contour", "plot", "points", "title") importFrom("grDevices", "hcl") export( ## kernel functions "rbfdot", "laplacedot", "besseldot", "polydot", "tanhdot", "vanilladot", "anovadot", "splinedot", "stringdot", "kernelMatrix", "kernelMult", "kernelPol", "kernelFast", "as.kernelMatrix", ## High level functions "kmmd", "kpca", "kcca", "kha", "specc", "kkmeans", "ksvm", "rvm", "gausspr", "ranking", "csi", "lssvm", "kqr", ## Utility functions "ipop", "inchol", "couple", "sigest", ## Accessor functions ## VM "type", "prior", "alpha", "alphaindex", "kernelf", "kpar", "param", "scaling", "xmatrix", "ymatrix", "lev", "kcall", "error", "cross", "SVindex", "nSV", "RVindex", "prob.model", "b", "obj", ## kpca "rotated", "eig", "pcv", ## ipop "primal", "dual", "how", ## kcca "kcor", "xcoef", "ycoef", ## "xvar", ## "yvar", ## specc "size", "centers", "withinss", ## rvm "mlike", "nvar", ## ranking "convergence", "edgegraph", ## onlearn "onlearn", "inlearn", "buffer", "rho", ## kfa "kfa", ## inc.chol "pivots", "diagresidues", "maxresiduals", ## csi "R", "Q", "truegain", "predgain", ## kmmd "H0", "AsympH0", "Radbound", "Asymbound", "mmdstats" ) exportMethods("coef", "fitted", "plot", "predict", "show") exportClasses("ksvm", "kmmd", "rvm", "ipop", "gausspr", "lssvm", "kpca", "kha", "kcca", "kernel", "rbfkernel", "laplacekernel", "besselkernel", "tanhkernel", "polykernel","fourierkernel", "vanillakernel", "anovakernel", "splinekernel", "stringkernel", "specc", "ranking", "inchol", "onlearn", "kfa", "csi","kqr", "kernelMatrix","kfunction") kernlab/data/0000755000175100001440000000000012560371302012616 5ustar hornikuserskernlab/data/income.rda0000644000175100001440000012317414366221257014601 0ustar hornikusers7zXZi"6!X[ >])TW"nRʟ$a<n~u!,_l7d0r t 8Tݴ2o4WHCc*O5Scbq5ñ`M#xl~1d3)q(SNȫ3,W#$C?f"tP@}??؍~p[R.CRUou-zAO8G,auUu`Ou ymXPz{~(u 2%rQBc2sVO"Ѿ- sqI"v΅6o Ob Ǽ68#!>p(q+<vD?fCZⴌg"[XUfD#6erB>lt,gS.bki3TxFVof/HtNNڕX>f*l>djoo 9B2qP3wk-d = v|a,]\خ:cuNsP wWҋTa__Xҡ.c#رӫ6є$Z%zDi ߤJdςt'2;M qN=K\{{*O塠¬lxaR g.SǽH/ ,8"5cjPf hB fK HiyTf#{hE!:'⤪_1_aAe3^\j:>v 2+\NJxq0i#$=*WURI))O,p:DE\/{ѡ.D 2S1QB?[ q#fmUhWy9@ﬠ.!/xMHo ׂƄ^ noSq.jv=a|^/jֺh($ . j@_K Ae_<+a-fʅU^ ܥoO~fS=oo+1gm^mZXLW`,z< ZM5[ܗV(mۧ^Ok ^pk?瞳rQlI}wPNHb2_A")`,9o/\ zֱV%&"_u1 ɣPNd+4Ce'o.VM:~ڴap5`; O ׼Hk=Ȫlwq.QhDBQ[7[Ji' ͣ T_U(rj}.iDV5Í8JM6Ez E쇠;n d Dzf7Mwi-"2^7`lq8e;+[vQΡ\A| XM]][~Pc'j%~E8/J RfADISѥ$+V0$un+C@}B&Cm #RrΏE [qCeYUϬ ݯ{j*ap~#l[v1bhSiG)͎FoA(־L8Yi|B 3KXNmOHjM dQ]v^ e`!q%:p?C@|6CEU؊Q@`JDlwśRXprݑ6R܏U筝TE΁iw%53>=TYb*-wnF]@۴2uPR|s$[OMw@&*Y ݼ p99)h7!KA)Ar{ivMi >/g?Dw35٭R6Q]4v5V^OI`~MHn-UIg:GaB`pjob>, nzY6gU><(ÇmEeGȠ %UQ̜.,% :e'tf WީY V o.Uz7wW?[{ Nktߒ5:tyșk杻B`H hslf-|[aڮF Z+0 ̈Q9u3b3C49sgVzMbY"J@ˉ6u߼.VY:9 '+ s "ӡnd$+{'ؾjvv,_i_wg)Sp [^Ap1k5$gN F2 |5?^=1WT5T=nE)Ht-Dp,R\b=H/Ho 7sV{H~Q `&qo,,䶑=J RVyzȧ ]:j_ jyĤiTZ#d䐹S\ ;umTHilωf+]]ĸp6 /\]o1kc[ƙvYt(a ^{߮Kucvoެ e`G l$sE*Tf͡,4@_B fl̠hg_[ܛsUL=n[A/$SV1ۥ*LbbH8)&-l; JLubҭR>\1$!z|*1>%.΍'1ZW}bPRo%?bTIf`K~auL>D8/ڪucE\x02A_Ta-}*|HSV%X6K5G`wZCKw汞ym6 +Dllw-jP|UV9Pסj_n,Q,tYNwYK|%cfz Y]Sv'P ?9)6`1ՆT`tVEoLߴWt-q Q۳J[ {Vt^F[]OLBjHWvO~ ֜m*F$u KgSZ|ӈ{*^\~e0 39prjHm.Hp@T.u{ֶ_P,HO#M]EǦ G)Qy 4F9~Ibݮ(M_|\ G1֑Mx&-L){D%k|Ho+10s42ӑ`{kIN~tӑojb# `nT5)U~{W@@*3fԤJ ݸ1)O ;^#FOπh?I4Ɣr>EӦW (p,g,M1>^<ȇ̦2)Rlrq,GuC[VR[Lzݙ+??2,56#?fzt;y+[pl|xNhv'd"Okխ~P)JKvlL^WO<`Vۜ"7>0w=fWU4dqej;|TcsCs<Z/'zaC80[5wky#KMؠit 0w[QXWAi$*FyAM:іg޸,"j0?~g57i·UX6hE4N$9icc Vxt׽ս|FƗ&+rWP{oY ;nAzYg"8t"<O՘0mB R]=3o*NύIT[5=Rj$ݮ[aZŁ0#oOUe8@jڛ-VLJ-MQ },wXᮜ-&م(#͏ 6i݋GƉOd\IYM.Ê]m}/STi$亾R/RF/VuQ"ֶAT~UG]㭞іoV7\ѹcxvL "YX`0s&ll4l1**-]m{;RhmROSGϱc^܆=텈ғ̤c@F6-O)(O ) b)":Ɵ;ܜ|3 - ކIB9<=Q" i&.yGXͳmS6Ĝ=e  9 f;V\+.E?4 W1 de>=-sD$ij$]QGْWꚺPC,mmm.lQ{|-nv\"  1J }“ LSr$\5$hA=m8CpS@ߑK&[¡dFyuR?|Ӏ5Uu6ZE=S'IXȎ:6],ZWˆLIP(s [|Dcx@Bщg m iEդ-'q1.tug| N6W;[; mz+Xd@~I"gaįG~X8e S%i(qbIzmJ. Gv#1( EɕNEՅՓޢBRol.ܚC)Tm [ϱOM z N&!sYx6dĖɡ5rPe,ɂR{ 1 QFv,1 V&2:yw< Iגj4[O{4sWoï<.'#? lGGH`d3<=W3Rl$\?M8-m_x'.X$F) 0G!RB4˥"A@c%ʙPS85"R@x{og'h&|`샥 j܎;U~w/D*_Γ\J(3m AWxۀփ,s ]ώDNH;zcKfJ1B}vnFOٳ=ȥkQǕsx9԰T';lI#Oy=щئUs:ZKoF+u;ͪl@6OQ>qFUVvm#$;VypV▁!j:0P+3\q;-kwYoepQn{G}]I^qfdRPca;zVkm^aW>yiq$/JևaLp _}cO?H`jmݣ cGOrn9&PҬ$%ꔼ*MJr}B2+&:i'@YO__ҪP:xV"sDv\p^~%uV.mÿ] EU#$hUʺK jcEr#q}'gea-:,VES˞`Dυqrb/d]T2͡rEY0b$r2 i;ZE\ K}v<^dY3qÔ:tXx$eZF^Qn{Dg}\gc >념*:<Dnj5VHR=>̵t_5D i@5f^izJzpRT̡:uk|ljiiU2RKYRSn,]v]3<7?{'4Mڃt"lǬ ?DZh8ntۧ ClW%HT?P>9}}4 hW֍3l|D ǐÈ/U^ /ZX$.ų[/Wm~fvZ 6oc+?!5A:2 X]9@M} Y i9$+S.k.z3; 7U@Xue"]gгihA$@Uɷ4H$|R,9zJSւFBX ܅~Ģgpcd GnQԃ#[9Oi.k$wn&yqtKo.RD%qI @ ;x5sgMLWYŔ_ֶwN<H"pcNWRX~֜ 㕠>5;+\aωq <싧0%#LS} Y5Bj?twVZePy:r}g_4w*O}]giPAz7T 8 quqPFK> `J R%<_->L.UYi`!*')ä9)s##ױz(h™ڿ>3cl堘|^/9UK3P^(iD.O' Z:2 =3aCɬlÞ7Wւ2j(5sWr\r! }М*%oiZkPn7`E={/\QZ^<\y))4 Z+'fL9-I{iV) =f#P[Dcm TtjeрoЦHk w!y0W29Q5ܓ+noiO-;3eKp0p0=e) n~O tQ8u *.ު}@]=\geBHZx !ŘFpCa޳[c.mT*;N( 7d6BЌ eQW|첥+"m@#p{3F(Wxav7.VVV8e͡L\4S?DaJ2jTCd_So9 C{ߧo`%˺?xLTi>~|~>4OFSmjS L)wc$I7ESDV?9b^JqlᨳJ) sFrwcABzjR(Pb"ARQRHoS6lxw”3oLq&=e  8{p]QWZP(JG 3.b> Q%4w~4afQ0pôˁλar#9zKac05 ?泆I(q(8x!,'^x, ޝfh%I=5&4;"(uRr+ĜЇ1Df3F.,`iűP΄Lvc:m_"7D5PShW;yBX]͹ ?3xd@h9"O&;MM%y&m\f1oLyǑ驯wح˵@int`z4SӖб?fg@6NgB"\|QS j( Z'wFRCIuWJENV !]ѯ"J, `ԋel5N6b-}Fm}?rTaLv@1fsA'a֐䌤A۫PC!K`r\On;"QnVhZE3[})s`?FQe_nF? jpWhD6_ϋ?pj"+ܕ\]䩴XaE',)fK MSVcH}F>"On\ o 0@iAB:`5D!ݺCu Iҧ3|㇒3VEP`ny!4?r6r轡Q& xZ2ekj|:.hOYb WJb+H⛲Pt!O{|AF%la<WL'` RUa?X>>ȀҞu= m+m8rU FUV?$P gy5y %}1'lk8GS6qd WL,J90=Ȩ`g#XvL|5& )c|MwVزw9:F!*<8&1f1M^ᷙ [+ aJ*zT9yizzPUYUo Ku44{Ҍxy):,a$C235*1_Z^| |~5. R T@eD1!#76"ȸgo&*&xdQ2ZXw=>uԻe`|[`g^eRm֗ iprG7YK!xnA ʇl PK?FDQ-d3dL n@-I"s`B|ũ Ël ̃0-<ޭnN&fPP//3yt<\Yf)_C'}XOahHlr,mCůu.La¡[x?t*j mcM~9U9z( gYde=2$bvZoMɌHUneҞJlsy͖hh>kI?Uj)4-lDY,ʸ!B8*jɴ#*x._ {>q~M>[.> P=WI-OejuzOA8))fE,gdJQL3&0Rk) u;* c._86G~9{ZJ\3yb XsA6O>w-1oMeIT񎞟÷q t!ܵ"j` Ęk;pĕa6GCglARH;9c Mvq U 8t#mԔ{dB~c 1ũ,r|);3ϥ56'ppxf3Ug*Ve&¬QSnm6C 5TbDC,2tU阵=vzɵW;mۗvɉ tU:qL+P?^\vO1u4@?U,ϳ>Wx#LLBKˈT&$ҚMȌ`_zŽڳzNGMex{3Jf+h->,>'g#zU5TQ.raΌϛ1LWX 1yFW.8?d}܇Qi6\INk}1l)՗XSJ0Gn\q%(z>?zJl),V6+}ߗIWVoҮ^6QR<,#є7{Gs=t.S& zV'm$񽔙؟6-}QeK-dU]0QgBUi{(8wԛ NvkaU濢WDuEm^_;I'{ڌlP N HR6[ЈiZъ־)p5HF$e\qΩ+39&ћ[N( /1}y.KRa~۳A'5˔6iA: zIH3( cOKǚ`-]nFԓJY~~!^^.m?W49I:5ry4tAOvb"X`~t>DlWI@*ɢA"~pl-aN7ۊ[r|,YܵBXPkć-tR0x7YE;#K56I4ݒ r||XC\qV: fC&qx@l-?Z(Ch9F>ѴjÍa64o-G<.5ru/!hG4XȺmĿokJqɦx[oՋ _xU]k2kcKI^$F,8br:=ԕ2uaqTEgA#KA-2ٌ[;uL!<} ɼTF=9iIQMR?:~Xh_ Q;CvA`n/8 JĻ\"1UYЀGnxY-]%i~h `y"VPݪsp`i&j y\ CHBV'q2*u:h!'ޅ8N0uyѷ1æ}(oVY!krB ñ[^ 4aeAW ڑf-x VB)aO:ەo|qHIBhj᷽;`/ EȅqI#zZ>%kJy/v3hsWH&RufʍǢy̏G"./\Z+HA-V~창{P#Ǟ4fTL>ap`Z *99bXK݇˭>}*zqq3/*ew( OnXS~Z8T9PgF_~Ht߼Q"Wnn@)xeT eJ% hU%Z9~] ,Ք"s[- =#Lw':`)`WB~6QJWMpy6gH I|J \suYt-(iVjIwX2w)@[l0Z}L˦ʊ -2z %bܱąxRjե$ܙ:"$' M:1,\Ѳi1,)/fXJ!IxehؕU poauH>1Mg 1(1y,0hM\!{'@)Uf3`Mb :enfw܏K{8`*)8R2_l5{o(>`dU _6t&yt x`@h)(z e1u"uf碢յMg6cx @={7XD~~h531,k䙢/ҧ:@b|T<".`ԌI-M*Aa1:ٻ .^B/?#5{*:C&`TҒ[0-1F)75ytBao>Ej-Nj! 9{{pR8@ qr$ThvIAF| v:5> |S*(Zdejk[gC]%ݨvSc2UkPeSJ? 1qB7hcLiB آ2k:1#B s cq Xg{uMg#{v'ߩ>8 aDt ~eFz,75X1w`mM˲G(L>,}.;c)0Y&Wz) [cAX?)deNŹ_ft{PHW0/&Ėہጛ7_95KwN qJqXY~&Ĥ Gf@MKlPC%PfB.n0+T%Tk2%UғIX&C?r#w˝Ig;S$ꠌ2"UChN8y6zTmdN8sk 3NT=CmSwq"b1r,fql$e{D^ǝhK;/v89Yp93~jMe q_xvp2lhڒrID)p.#3O[ՉuW7XOl)geO$cw0a/hS\w2 q;9yE>e#JŐF AAS81%\=|"a0~ymհ%wcxHZBS>W)[s(\#2% :PZp`ZįW5v ɸ3Uvg5[PquVUriTFuPՌJMYCy-dl-s6E#͏SEk 9 RJaDdc$|YɾNu`gϴX2MVՌ_.FAz@wN$AG|co[}5Y&94eGA>S (t{EyB= kb tX>h8+E1'o2E #L:wXcf`YnDKU 3AP('.eN|4ƨc fFNNqw~&UȜNkЬ_aoJWGOε%buj +,]fz x*2v"wQ$,'@py1QRP综n&qY\?,{W<4JEU ȁl<['"P+UQb'P}J`]V)pa*EڵجӐQF/j8R|Q!?\aᨺ~$KAfGYdQDSLwgˆҪWed06/]=0,nҊZ˺FKnUhǒʼn4G^첛RH mf8̃k´Agt{oRI -gB=Tld:94phbzᄶ~JP H.ɗR@bJbQT:=pt6BlР'+.~;+ P KS/GhDÚKMSD ݭi+R48&!g|.sNf*xxRw6v-ޭqp>Ԙ}ZQ@UVݿ BWvyθS|- $zKd'`wmPa"82Hwhu\g^0j?d _ETN#TIt%9LeIlE5p#iBяViSpm3nuZ!{57<.J h5=(jΕӄJ}en(/ʬ8 o;ڟ?H`ŻfF\"t)! "W{B;f%pH Mw؞ 2zCvF~AWpayĻ!y[LJ}51^CE~}Pfd{NL3y`Hw:JU U%+,Y*nӆfrW{>(;WSj`?dC GZhgWl>ZLȭW46I<ԭf;B0ڋcF-q!QƮň”ܺPH1Dt\P9VAQ'm'ɿpzW:X*f`WYw=#9\g$ D{fӏ0tcWdG`&G26NX/{b[RÓ,? j;2 vyJ9nhߟ N2zZoVDS_]O:oi<͒"mOCKFx5TaN=ga\[D>N;3nx-4ش/ bi{AM!UBIp+M򲜥M!3>kV;f zBPNٜrqh\Sƹ-oIBJ=wa_oV:-ġQ>ɱ0.K+[a VrU@OV'[=SvZm>p;S(c5T$9 [F˂et h ug%+Vd7f5MIA&%&*j|(ufĜw4>Lj d[?r9WH0>.~F&Rc=? ַ >c ,?U`- a͠5 sKrTR~ZZ5ԗңf#n%|TyNt* ̯ә.SSI@gAyN4/3\W\/~@jԦDy*%.ЈWL{^= R{rD8BRzKԔRRY㬜)ݟgy1K.!ϸC1J-pm]јB rj]:䙭?vM>|FFΤR3‚ҭB= 3gw%tݕSـ 4N}C1u,UP!|㽢kz^ Ҏr SOI@6Aًװ3 \^oqn f+1rck!&MQ[.wZX8N'*x{`թᕤ}3&_[q"źi6ˋk~v|qFҩF(cg Z{nt-RwNiHqD@,@.mYv/&JnW.FHEr>ii(&5"w }4*}֢W}~UE]-o}!R2ɟ"w|4}H==o=!L\.R" @+Uzf3#ӯI4Weq֝gIQT j“+"2~=#A;pͿ. 5QIN7"AdJw#`Pp/rCm=ZA)[Q?-埬1Osdqg8/7H>Eko:SB|ΫKXN~p_ ZQN]fĻL7N-5(|hH3V:ow{@8pn1m+s:<_1__lHZH7CE_M9Io`e@]igY#HħG|^fsmQRf5U龨"|< >R@$#t 1CF*+rb:{}]`5^@Bt8W 7IhY+m&r8uW |دޑ-&id Otg;̉qq"tu8SJ|/cJZKe{PGttSPKgݲ߼ّч߬ L[cJކϽnjvxCb% 2h@?6yA M:=q-xe.)m9 iK.)$X&?98q<7Lu *rJgcvN9 5VqbYt4߬N4Zu:: ~w\Ux ,DTxE7g4%=/ud {fN.M_'G7/0zW2~-[Z3PY4~&{HP헄K%S}A^;-vϕX%ԇe";$?cBp3J |5v= f \*S.|I~Z*^v'2s2lq\1|D3Vdg> ;QÅxxqޕG_XIuovm'r^_j*Q ˰8d#/'u/<[4kQ;V0XA@K(#\cS-r^ œQօF:ץG<78o0`Oۃkغi}Í{NC$>o! ɖ4ýSo=HY$U֥oM["*bHR4Hy3.vĽ?βQӿMlǞ,DO~:k~qlĘj}ooEl pw$[K'CmCTG!@zm q[]9jth@={!FSO. Xy]6(։U:*(2Xo_'CYOv\̠q;i Uj:jMX%+7SfF}A̷0$"4qzށmA|||ʼN1!+qW`JWlltס)XZk4ѡò 8p,j"<ٚ͏< w̮-`ܩ쇋/y>5MР Aq)h Y҅pK:Mo{en'A6Lx:Q5^.@Cz͝"_ٲC_XHV!CCqDB/äNF)QŒw<ܡ"3.|qQ+Hbª2c*iuS璤v2PSo((D%ܗܦ(e4GF6.GVt]r8okF}=.ƨLBE&ZϦVQ\hre+"h˭ຫGt ^dz$8LѤk ɻ6t_!Pk yV} δ5cw%ݐ7W&ZWZfr\\ky`tnZӉz~Na@8N Ȱ!k՝An)!6#G׀Ip,o4W͉%; 8V[Rxg ^n_K0mD^uVT>;kY,sE7wPS !-ފd?iwwe#NQMFNe Oٚ/Z$~>AC:wδG6%C(yE.8܄BDG0C&=[/Qc=›?%I{X)w[׭JBI ֞j v djھ )UOS:B_-XT8h^37f :5(4@u3% O蚝=F?;l饦\͜XA+^dBB&2O`,$}W fw !$qؙ ~?$S>Xxnp97\, 12,iBBa._ݨڅL﻽,_(xT\$"|8q 9;#"-űpz65$1;O,xPnk}O3߂ڜA7y%eON[_;s s_6n)T0 q[y+c6} ֹBzd^Pg.ƄU-j^0TH=w㌂S NK0/ Wq;KDm@0O‘d<8\3X{А}?ߴrJM:œ~u !I֞#MS_|;OI IՄYs5a2sCTUy`z->H1'.I&9~s-# _4Hz3 x"Т?PXpVkvђ`s?Ҭuu?Tͧ{? *[+Z1b)JD;|4&Ds8$: }PjqŖ ܕuK< G@vʭ#u^֥d~@ڀ.cpiGRk:8<ׄ(YHs4K"Gꠊ^Hd1+)^I3uJ.%vq%H2sGo Wz2;4.N&MI%Ӷ?̶ܘE=W7v5:9&gZoev$}(r-we7Dɚ4.0"o njHnH{CS|y0SQJˣSח' s먦!z#*>5-D#{)q$dz]_&e…βtghZE'gPG7֚f_ p`fMۖ[uaXQҎ',*aL^EWk'$5S$գ@Y՘XZ|:B8EkF.?ɫk+jO}hs?E|^\"ӲXZۈuy*%B }׽"Yc33@XvHL 6Q?$”x*p0H UÕ#$׌@!5#n}.كOhwJd9~).pyd]J5, iNnR-X^R ^;X9KϟoJ_CE:.FF>V&57?26Ic-ʭvלgܼY$u0*)hNJx6UKgZL˻x;Ro8d%jlV>%>ksu{=0aH׈( G}^ 8O] % SFs"ck ydxm*v0 56k@9x(T#48<+sO=~`(#ǮDɹ=Ft$Pb%[nf' 1fDp$w}'o٢!QJ F;aBaf4t,pcuZڪ  +[cFVR1=Japl mXw?]hF3r<<|e& O8[dNf_yUMWNrylR"[D,C嚡|&EEL0_$]_wtB˓+E\Gs.}-LPdKC,+e5!t.e%OӱWǧ w؂zi=3 h7ُZVL+jԸ18yH4w[;ܻԵ5jOh9$ kUV'qp280ɻ+,/hGYsTF; c9( 0}0]u̕_ᵭ{c߱N6MFH˟p\-$WTfɨ 3S_$>@=4C`kͺA 8K̒͟nWg68&^"HXm$ǿtNq̟Kmahj鶌9bңEd6d:ʑ1Ya(%Zl\j懢zu)x L &Un$nL >-ıT,ѐ>}20*|fb-=gBS9N;e}e&oI6fI)AL۝D9h@ЫuJ+ 4k!y Y\~)j~cLP\8, 1Rl[O%p_JhI{bT|?Yn#D]ʕO`ٺ@shSт4ZLZ/qy{;GgG_s@ f椆SG.5~/x| Yn`މ~sn~1jW~V`P/>tHw EI/,dU]q2]Y2`h!ڤatmFl~BgiPQ/K6;5MU47w]аF4[3prSjg~I7UPL'NFyn 6SdxFS4;љ]DLZ%׭LڴcFRfuc2(u]%RKɷ;Ly$,#tI@ 62R{ԔKpN+sx'&_r3qS:Wy5!ʎDEehgùIϺ lz75ΰrus <_B䳟 c۲jǖ  <5䷰C|G|0.#Gn8AE54k`]ɇ||i-Y45\ Bi%W3@a5OΦuf'(9#B9"pB.թ!iI?\_jl2|oP+qUY'ɗ4<+[ԛ$ER!vT$.}fC]IzEfcAEkpv ̙1œϛL2!p_|o$I‡Lx[-wDߠlG%{<* 3+}mD{*tZr+zbPY&eOV*ϵ&g:etCz:uzx@NE/U p^Dا°ze_6=#X&1>1wIxi(%5HPYxub++㒡<2X 0Oٌ7Q7B⋒P,&y~A )T3$p{, 1}e  ܔZG#OM`ё3mh"%֋LVEx4:hoS$ݦ\Nȅο0qpZ]@Dz-;'aDA-r SMjg;T1O pK)\1"I7 ֜*㼼  Bb%?TSe?FNC뫽r8PIAt"3KV5f@]Er0kD+K?@`b߁ed7gtn,&N3 ؚr>I@v[\Pz:,51.ubj;I h|V~B9 esi~a|9VͲ^ HŸ}F=W5/Wm< 1:5-?N (498F>Gj>z]x<2knԔ#UmWqB'Y/IZ^8痟kk_.eC3F6ō8$o PIX[r!&6=͒=ֹRȓT$Tzv/TsS_ET_wZ+ƱEnc?g۳RB w#@j*;L|+b3Ud a. cY3zeWF:XKl;_IySx FXicVI12r/1ޞ0"θ/GsO=F2Y5= !D lFPYnpl?D[?;D;NFdv% [Ob8":Ϗ܍mKqZ+e;o029(3xmPG8GD8_w AH=)IwNvD*a4{(ZI3~p$dHT_"ҩGD6GV7$VrxgGQ&\hx"ЌW^[Zj `B|n.z.e@cjO 0Z+06>IHl*!E RxxLds JPsS] zZW1tя0p!rNn 5DazI$ eHJamsic =v=ebz2MMx^/dcsW1n,OӅkڦ`k<|Z!@k?])[tA]sߖz)+̀5UrDZ`Þ}~i! M6vS0̇p~dw= grb?aSҵ_aXp;k@u S_eXd0H$ZSao6=rԾ:{qحAnA+6*g#}8だ>0HBMU'zE?ȗL[Hj_Gez ^؅a/kѻɪ@4ε䞠FIƑwg巧i/a {PsXZ3ߝݍbQlnՒz׿kgY8XЭYk*oNSOñEf;n,NjV#d{{J"R(}3`Uzc'dz %epD`} lk;PeH|a\)A@! `qR@~IO(G̉vpi-?iwo Q)1K32#)@,g;yzԑ@\(mqv3bwNe>V5͑i7m.:(U_ MEz.m1WMBFHNɥ~wF&ʕ׶h.uχr#9>3^F,0mx'CTqY^SwI=`:6-jښ[Y0H&8qT jY""S:#/Z#\p mOZ|Y %aSY' l#oo*y),dRvoKP.eT|c}}BNJ.mm0iAlrH}lǎ%m B&X^ Nyq{ZB ^Ô>a#qP>4(6X.pʹ'%vMu7}zu˸ҡn'Mpyf0b_t{C#!ɆdH ;"#uEP{L/1]JT'L5}Dv<*UZ1/kRΤ& K:nǶ@g-d+H2iI.sƇ8L41wT gst: *a@j仿Ni+`sMϯEۜG~EC8fP7ª  hz4B|`ߥG>VHUcJ ČF`?QöYO]wM| oH P:%ɀ4:eɕFFxnJ ['OBo3$@)YOﵸ"5=cop04Mx|Ģ"0G\cI[hg[}{/G.Lxeu}v!jm픾#Jb1xauC~^x FPv-d BBvB[AgB|䇸}p.3b ÇWR>+g^||4c6Az={1ə\um/׺X\8B8G=ӳ)Ah.u3(:g5~n 6sd2P&A?\_CӒ8:Qu~a(*|+֜|#f8U 3e"D2i+LsHW* x[P neF lGeׇ=$4ܓp7JPHrnt9 (?h;]#\l Ï)Cj[k@`e\a zxTFZn+Y ca> ܽ%(*($8R2T0C;ƫj|{Ro8 PQ7Sz|(JSsty4fq^"o<"&= cQVy [D_Bp^GeeMkaF)%@8_ U2R)ΨPmc&ٜ뇈us@baF()_fg"1ens+!z ss_nJrPf( p, L%2JɶP(jW} ph6u*n Ct p1?,_9DMTD ! U5~OWcK\4Da`%bοvxg0n5_w7Fb@893Maj!?Є,,l3f/#0*3օhj%-mGo%]ƬRJ\jPem? Q`VzUJnWFh2&ŋ7NTw#.bU^T-W?#bk SW5ukOŭZ94xׅ45R`|_w7A]{i"mw7 WMѵ(YdE  0̷߶R1Tf^U􌮧*/hMav#Bt" .bLK[ m$rO&iڗhc0+;7y) ̏]=_`EUKfig%fLZxDGgA߼!@{Eٔ?}0[ ZG _3@I;A)tt9BJ.oWն M6E2}eJ Q! hlg~jmTC?hBBBQmBm͹@޿e6lh%ڷ>/DEq0pk;%dh!֠jeF'An:N{R 1n{PpM^Ce=J:0A#;d-'s\1Nww'vZWΓs7*2w0(y1U(lq 7oMv2;ʴ&:rcL+z6Vs/M.tϼϲzblCr83s ]'Ѥ%^XoP~ ҃{eZE0 mrch ,V)sb>]AQL؁o1Q ;GoLjX^QnOdO-􅂼]yV#J:}Ɠ*- |{m{@&d: `HWFQŗ6 fWzZPK"5H4 _Ve'XTP4ka(4Ν>yj*F\9Äm@nԫv8.FBJw:)dZq/pc5.kTԷζȁ+12å4!m}w s/UG"ޕ1J* Ao&6 rӓ \KMDekkfGS X#IzA LM+cVnJn_w_Cb3"$3"QD:cWŻF1[ Jٚ"ސp] BlZt Խ',iMEig 5ohu% b[n&P(Zh6@p؞.9P\xl-TOzgLB_g wRD/{4(W⋋_) Am֧L'srww% у9_Bӝn@KaY2~ξ Pb7Uw 9ݏ%"9f ͙̈ 9Z\ )l\iyBsĵ0ű#,3buqo1DSh=|dKuW'dcj&E .)]\pѭr[FI'|W4VCq,yD{@&kJBY ursCGh$՚6Fv۳QJze4mP= pvK59G(Z2 @TR~Y| B;Q?{.(ST`a>,_L) ] {=cKT2X*MxE*d4 fG'%+ FK1kX3fG!۞we0C#XQ,߂6!Mo)=oEor#̍!)c7 I~͎0T~;,I~NXR.FDz:ͽ6U?89vSuf]v AN.'$  F\ź~ 03{9\\_1Ǔ}=BG㮦|Fأ5VcGdJi]ZlҔh4-E ",Gdm ?7*>kї<^NTU40͚f>i㌪`o,"9S~_>etUV>\v,0_hX3`sET\J"IN~nl1m "[VM"kN]1':GgJzPg~.U|)ȥݸd%Qopc ahq誀^y$Aق@%@vo6D[e#Ox}]sIRw<\!a1ÒP)Jjaat Ly $+4Ub| ;Y48eU/-^9]1z`*DNaϙ ]&, S5>/%! "cFĞE3ç"޹N3x'4Ju:`.ƗLJc8չ m-cC9T+ DTdص~NK^aJ"Lqѷ.<'ςR TVcS } 7o˓P1nQ@ǫaA_˞s# s:ۢ t3{]yW I%jwK=O gn3ɪD>bZQEwa@ꇋ/d~b6d85$+)o}%!OdY῟X_ܻhi\$J>Mf^c[Rڪ̵>GtD)}ʷ *_ qqN 3,b[/j՝QKM}fP4:IM4acS:]"s>u5[|;L9!/)WqY*zF.4V,>мIyLkpH+-zaK{ tRnr_s=h[[kԑp8[?7JC)0$C'0\R}ƒCr lMCo9dwtئ._0.uUΐ6A>N#\|6?LRR&Յ-r`< +]`8!2;H$ k+{7!~L@KОT;E9K;n,O TgB' NKɈkB嘻ŸGax:;B*ۗ>n, R")ʰ s9FTU=Zd@O><]ekP˥ 7k0k1_+ mO%L)H xk~-ʆDb ?s%:܎tmBt(N궪0[}SRd9fCJމA)*};ZKtS{#,67nRc f0`DU jP.4ruIX̣q䄣z8BviZg3WjxPD Ċ[Z F"85nK!&k78 3o92ijZ$C%N/}sRXt )_/i2֓0SSDo d $g|{NXl ktŲ> Wp Ŧ?^:MwR\pO(|9-)ۏ?f|m/6~+&@Ah}H"@D0I:t6؃lU&2ٝxQܶx+22T"}9zs="#ȿ}ŧ@c{b0:-DQ{/ɛX7S7ņ O=m" tS[4LPHGɰj9ZӰ=>dF2Kն^uRA{`Gm\P19{USNZ2naH+WY6 .3왐R9Ycgjotk?i@ޝv&P850rwJgVJYu [j>@0A+ }R*А#v]x{r,Vt.|EQ'a8B>YBrQhz ÌGX`Jd--䜍p/3N7@+]X$0/yIK>Z>&z p񩽚cեszN\R (/9(*|wnQa2]9f:Jo-)l##ZȋS`wXx!a`h-#+ĦOzIU~l[pt 95)کIVLzrGn:Zۡv=ԲLx[@pLx4!|_]u;4#RFS< /xQxS^7SJ3+њlН94Jq zMG]‹Ldzr_7 GއB%u @G iK+39N\ ɰDUk5ǸnK.,az"z}f8J6()5YT[u*dynKNvOȠhH"<VZzr~qn/܋cl3'lS!/jC:egCGXDrKh+y~_Q [$`$^P_eYQѢqpiKzfZ:s'K􊏡~b:r%>0 YZkernlab/data/musk.rda0000644000175100001440000025462414366221260014305 0ustar hornikusers7zXZi"6!Xv])TW"nRʟ$a<n~u!-nW"V~eS U 2A9Qs"D~eu7ZT('P`Ai<[sc*!6DpTLJ="-"~ዟ/VIBO#di'̉Zr@E9?$N$Waw m0Ϊ۽q] pLgǹzmIXl0qSq]c '&fk |~ݜ$DY H!f#%2gТ6]''p'pD[HCLNG0r 1ˉ]mTM/F ȝ')F]iAC@]jr4qZiMSs qhѨ4љCܠw(5 *c}n\] u6ZK&zا=YT%R-t9[E#PC^+v(CzyMwp&dr#dDIà*DKpv]z>-" &:ȦJ_xxuCUڃ 1N,d" D.DmhXh == ۵ 6*ggvAQo(/i; =q`ŠR?oZ_#+@ +Yz'"T h"D!ɱHk,ğfYzp/?W%cpHYm_bR0lU1l blϧ0kNLI| 6Uבg,ab TB3{ە Z GAfZGU߻=*?y_* IGWXM9= DSAo qMG7g"A7Yvݶ6=eXtќϏOM8m1Q,5dBuOq^AAq9mȀYݶR ԝ8eFV/XPo4ұVb.Q 2ʺG9D3!?k'hfhN7%3Lw+]`,wunjRi+6`;픒 *ʿ7==֊ym̪EBs]F !z2۽0.9i\cnӬU,hPJݺ7{#u%ҳvV=!t5cj0եӂ1fч#X@RQK!$91䵗1; r3uAE _R>!ܭnքխZ~vwQ 2T /?%)OqOZV/ ,,a>ɠ BU$])ħZ@/τE*֯a# cNdHJmb/ٵ:c8#j+a?s,3;<[0.>ao1mypU+)xe&i*o˨=C({+w-Em'+oAg:]P_ו3+2>U$' %c F}thBePG+_ג[{W(1 "D΄dGرZ(˜90S:]mYga~9#y)x*UG2K"`%/ORk'KmL8`6=V{C#)-㠣6L¤eG&AvǪ$ݢS^0`]X|@(y'ԙMi^vdǾu2gV$: JfYu*{fʧE|v %xwP3KƮibfi[l'xwqHm\/ESU^+\'XY9K8Q*bm8^StdILJzPt'a}Qs{i;^鹰/'R;~{ؕAq%;fHB?krљ3=E~!$L[ͮTY@>ѽCHNp]SF, \sGD>m4P|jQFOދ@-JK3AbadA4<4P$@ꇳ _E("+PU+SBǹ皝 B H'{ⅻpV09#~ .,s-Oz80fQ#J~ouC(AEa]BydHonIkRGGa.|%dY"/]4oo)oR%*dʦDfZkИٺRVaIAҌk" z5@[Xc$I > 2/`^18_@:R^49T).\7=a]Z0 +d$}t=fUѲbh>½3vCZG\-}yc)&6x$t@Ю#DD-O|jwJX$ n355YZ g0RKM@rϰ8W M<;#['sYx[eCjG_WM Y2}Hl˶]~%vab"[ũ%4^"$G f?^!:|ݡ٧)zntgAuf"z _C?Lc?L Ze]xRֽϏ1[;~=όyy*·leBe3lxLyQkNB2`'KdQKȠeY}a ޣ";ǬWJ͜.?P˥½PKyL\JK?'.Gw"Xt a^AD8մOĵ uKa=bn0& SaQ%᱑{vCDW8z<{__]2ERJ{fTs ieYsfxgA'_%ө*3Iv&GU\k (ZRqOjIc~*#-{ h81m XP?Xk~!{Ɵ>KОfd;Ĩg1G)Bd[.5n^OxrDwg]zHpyXiX,~.b,0B%eIzG#n%.E ^8+PE|SX#7pl $>$ltsnNː *Z;fb{Bq苶]Ǭu6^ ɷ_\a/W ~~(# 0*g@0u}WGxr.z2dYD@$82}b`Hݽ;ch[OsӄUG "uGKA|* kx^u3g-?U"EU~*( vdɯtsq/vf8_JτelTSGdRp6u<Ew&4Ll[޶I;~ZM'Aqk:%30eƉM-d>ug;`I KR&Lo43UȲd}HʆsFV"LϱtKG!<[&gQbI m,3 /l?FQBA4ԩ ye]#D-1 "SօRi$v̙չgNm1"-ۀ?dxj<3x_@_;l`L/( 9X\I.C?/@79@;ep J[+&o@שtf~?_[s\Ʊc;ZLH|6)V&v*d5wg7] R;"6.P6Z5N2* FFH'|OA2H BG B{\A홲 Ο^7rȐ`~E'*B6%ozTH#8'6$!<:6'ި2 a`m.l/?@ǔ_O 7Fjb҄~ +:ܱ莋DJLZkVwI)J}rܵ[CΞ8R z/l ,") YjC9:o )9I&U$Mzvf]E@lw)^=G| I(/!Ʌ"> a@ ~< Dצ2n9W#}AFaN0 zo5 RV­1oAIVZRp(W.N ^eK3=糁Q&M%0Z$Un%K:;Gby{E_y3"&󖵯*B#J8|8HH*Fi0€zȖBDQy*-W ǒZ7> ,dOK#b.c*|ʇ d =PviLm$!1K,rWبU]pSxQbH[.ӻ]Ÿn1z1_a-&UOŷ{-_l[n_?,4;_~%y rלބ*ĠdF Յ%7o܈B,̟gY z{$D>~CNIV\xE+/bF#Y3pU[V.ͼB)^ :RzL›]Wv}Bo"Ku0&|?9EFD]$WRRvwYɔfQK4ƪυ:'xC#( ,IXvSv"lgzh!  whg`7RwC&{2d hфz0wE_.'YߚOq[s$i`FʂO~Og,0&,܎"N"sBGMwB|mƣu%S=7~p6iZ1 s 7b?¿g a/a0M OW`MuA-eM!Ӟ<;j~s $:RDh` ΎZE5Bnޝ~O{pMFB9"ArzQH#se'ǶEf>'%خ޹T!@ BC·;_F]ـ{,$_Xշ!M(hJ1^W3d֤9cJ!"?[: ;>LjߑW^|{~ҀNe*rF](Dr< bA ŐYMӓ.K?K >O`zxLc[G%'qK(gCkj i+"Dz陴4:k.u0NQ$K }zGK!&Hd>ʄx`[ȱ"@ȝt̉ڂatubG.y#jULpJ\2jO9|q4~\6} E=҈A _Gֈtgd',c㳜^32|_LUUT_Es&=A半pW.<֡'ҏHY 3 1k<8=DC@kf:vXue^4kGh7KK^Vf9Vɰ[XW"wb-y3߭_~wFESϸf@͸"[`G#@&>IcrT0稊Qe7m".Li-r^|6.w,&0ҵG~fAy?dP0nר2E8䥘.$ ^2.fa6C[e-YX:Aq<9jgĔ30TɹIDΩWh/Π /لxW=R(?Y8q$bu3a0hX2=󯯂[|J::I:"@b ,}}wruDm cZ, ngntG# 6j %_SZy7GgL;#UϣS.i܅Y?PK3 ^UHt&hoU[,*F|JCO?K'҅ct+ I_iNEdĜS,/v,ǯU{^k">έ.pXC_H1WU?L%MI <5c,'= um@ȇVB-ZO7D3~17'm qS'~BIaߤƕ,!eHfԟ%8A/DЍŧkYVd F'& 3|z8FP@7ZnvX9 p,r;ow_:"\<z Li~Xs8GnzgPh?#$cTY$&र G-~Jm]n]螗Hْ"H`kO08f4ߡcq>P͗,]K'Hp)`+k[[#~nCA''NK=8x'5sN僛+tv[>:{Go~w8Ϲi?^&5y ܢ') tF"=2WǗ[wu%P9-o^`Eԥ,qcGU=w1mSgKp!>:%A *w{ X.ߖR=5& S} A_3>걚|Qi7bkD1o,A葜H*m^N튭~pq oՒ("qM;NKTȽ*5*zA<4t z+w=&z{]wp+g_6q+ZoTweSF]90p/Nƍ5xUJYp ^>nh(aޕ,H) EQbUjwLʗ$qɴtIrȠخAˋG`HBv;H)(A{|jd*풺vc"ioB ktXy2fvJ%\ɺf? 8v.ϼۑ^.)T|Iz(f5e[6: d*@kZf/YG;EVdl( |RbS57 Qc#|cQL:xHtD gy6 kFq2C ugE5 Mic&T&V}3j5!}{~ǘK̺F|((х6۱gFPKf b''$7 L4i<8W]_lRslj%ݿ̯lt9zq/2 ެQ+q#\AmݚxSa*Nc4J:HYi 7$3Pjcu)#;_G%R[Y,˄Mjr ?Ήx]6_Rs,nĻ=$#r :.CXl*C5vo25NgҿZhXimفbc`1Lʄ *f`r%%KZ:gQ51,qY9<%m(2B9R,i+ȓ4U$뺙`hjnܕ '{<\_o;GܾyL} X(IҰmhFp^ [qxWs:$XaHRS@t)HP2,ܸM?'*Z;?$W^*P]xeҭ̻̄P;@U^og\j3P2xL{up`(VPS8xy$ .wİ;\=$3FPs?\)-% ;EXS0s?h} UW7Q4f_` @:C` L&=\8b wߒ9#u _Lȉl;87LTB XMפoSJn~sPX $Tn[![G笶1 D% r#Pc-rz_T>o{.1dՉem/é̸i2jKÆfʛ>U*'KEoWde58Q3u^ɛ8q4Aۇ)b7_OVuv#@åeSP #' l{$Xr{2?McZ@kR܇so``!?04ywE`ʮ+iu-n2 ,~!/tz6@, h{xE-:䷏ [0ё3FEITh\'̵'m2دn't|:/FmB{BK>$Z\O E"fWA%z5@7 vH+wQ#W,:xWm֒K{dCKm%(@),"<1) <Wa 4L3V°RpbWR %hQ׎\~ur[3M2).=&+ofx<*T2{  x".5#]O|ᙁ|nh ^ubբ}ӺK%2%-Dbfk ,QGB[-Z\m^œwHT2y63WOHQlƞHi4¹-ȸ6fQQR==eifkLl{5y7 ېUHSA<qM ({j6q;T2zIIw[綣UKQė塃g(z0 *Z]R'6 OY('7r>o]v|tw Wd NѵybWU!!fꈘ 5ÇNEx5:XA*[X4}a V$0(yOr>O^@ ̕/FA8~N>/+e;%\'|a[pDĊOoQ~W;,n)}R,4s<ׅ 7"}'˴%w|jЅZ8$r=mGd l.XR # U)LhW9<0%0bwwH O|1|w滩l펝.$ 7o6ҵg0W?UhIRʋ4} HIwnፓPvP爧'1"o_I))֕iz*Ȼo:2y\[\ne/ah I 9=>UVVD zRYD!7 nE?MĻi`kYt߂*Jr<ߗӲ {'w ӷRta2)5tUKDHuv=ؓWpGXh@a~Dڢ1{  ~h}^g/t6ː! #ϢO*0 /c'eULJ]y@H9/G[28m,l"i4=r_y~/,PQ(ysz`Qy\߯,#wN j{4oE`~ @$~+Wxq lpIB#m-BbUA~$R+CH|\#;]~n - mx6<;[ jgw~k>BG}ep2O? >}٦2QM.DԭCqez~D]JF'Z4U^3kuv̼"6J c=\t=L/i+Tc`X[ϲ9m7MC4(v:J5[[${L+d^\3:(6xFj@uJ 腐J$M_%|f Ƚ C`yhG؍*C;US+q¥#/UD?QryǴN~.8EV+ut[ci&Q-R>L!$ fֱ>f +NlZkί>|Gb tAtD\I{0x-_kI{ہu,pS+C7:HJ7wѲ eӷRqsU tcuEgVνW*%l5mdؚEobB~ Lݺ1}a 1Fܽi_:K$.Wϟ wC9bܴ~3OF{2# C3}&u5lWh;;>Y0\tozE|T&V߈O_śM"_@4h~Ud' On`FȌH]u.4Cpuښx|Ḍ\֣1BH.5A}8`&jYHtn){'ۨyJo6Obn7A;Mi9ƷZ)Lp_V/ ]%}H\RigP7r ĺ30AVN=q&T } ~ &FA@ p4 p}-H,C`MLnn PP;:ӅxK" % W>Z ?~Γ3?H{#~XkOD<B)sf,&6׆MxT](5p=i$}R8) kq8`!G bN_^!&[$rϭ`1k!E8mZeK6|Ce=4R'5WZude!Dݯqaľ+r!{jm_ }k .=k-ud|/э}W}SgioK݆Oy> (B:uv;Qu攐S峹R^IhXG`ewK(r+ϵ.ӡ[wEFoQsF1xF2"}OYrW9]v(&&,>EHȵ+=ϼ";m&e3:2" ekG$O1`­ k$f!a_$?<+U^2u=fu8UrOC BK#LHPxu(,TȹdЪ ~pȠl,m36-qu7J*x4=eQĄf 46pzPWxzhzܳU(`|[c鮗| mo,VVC0KLuIm :BEvwD+ܦB_rN"ɋ \碜 n4!!M֍K5Yh@>̞, r|X+R?)ʛWT]F|qK7(y_]PeyCU[|N6Gy'F lGN0%S7}GxySHsŢʰhGԙgΓ'ip]^ ]OI5 u*Yv3H;$Y'T}wxgw#m-38=S!Qj!00o{I;[w;9Mn^<#S% bꌞ9~]1Zp^U 9$úLKN~\a-.j2K =TmϐMЩ3~בZUVP+ HS !Uqg@:rΩAǞkc.Ύp Ln Xf`Йs-Hm!GH#H fS^<?ǣ6h8AB|: hѭsae?jfPf?qimk׽)3bD:KL3" >@Sd AjIN(e|_3J̴y{02,wZqβ8/V _|lh@;BX-I2 h^|F0wAtŸ~[#h+z"Bk"53lbz@ۓ[kABR LH ^q[3&$ş 1Z8GoEC5meA֘ѵ@/TcF[;Əb *FV4h.hR!/~t_kN#a^=Kfm;i|Qs"\5Nup=o1ք;7ۢ3v v-{׭&|NUGۚc!ɺj~ּ5>0)O.YCnsBV;&sJ6E)uRtxW_nnbN+P:-׍>> ԩ gf=غVU\<=ZQ|ihwZ[c/.#OO0pXK5r&_$riS9x uZ?ˆh5|P~ܘ,twv&+.XOe8Jk|Wo R7ibYf5ݵ{<@2ׄ\yf>\ 'Xba r&5udOҭ{׏} ܴp[GT۟@|'6Y "XoVL5J?Λ.˚9v C+z waMn =P<)¬\daAŠٗJ)!yy+/ MGʻƺw4*vޱu7=' ֳM ur4=V4X^uY0Un o^AoKLe2JdwǼfDl2-4q2z uO*"e7?zEj;s )Գho=.&/.Xå `•eW|wvz*,g 8%^2PEuxcx4#noYIJr53~+ExN$YX(5)j6?OrkX07 3,Rvmw%8Ft6on*8vǬ${ HCS::_ tu a3d@Mc!Ō$TqzG36)_5,! <&_hFEwwOԃ~F8^MƸiٙ>)B{Ӈ`R\!5d4xsVNEPGޤ |O)Wh5)ϝ>bS1mݯ 6g{Wbp Y5PİGsQ46 F>FxS}}ꭨ|J'|0"vWLgD~EIPrG?$uy!i%8,u#RZ+/Wl6i&_*.8`m:p)KߐlcPP;E"噴y8DI/8< wJ1}cX!Dd`nbNW6Is4G6\Wn?:A'Egb7Y= MXG>\=%)9Oţ")!~}-jٳ`S[ &ȥGvA`s.RJxB UU[Z?49Q=h?/\CRkjZDjj7oIwvL?n-A/пglcq3R϶k@A7Hk(bFO0>В˪=QKK804Nl>.k*©uw[?Kh/t¨zD̐EcNV[lrT(f|f JPԾ_|>!0S?!w" sJ,/Ӥ`FL >QICRm9 XyZ>"dwG'T}Sfg/&aV16DgH6Vew.:)Ka㷘,:pZh\PJ $ڈi<. 9${# mPwyZR]gzIUFU: 73;o-niT%{H&bZ,b!#:Db7~dD]'TZx(+_ 7nLSh:v-mQT◱y%n_Aj{P2*< E]Lq6g ¥+4U;QcmoӱG vݎΐ[i6TgO|mcGv5R Uƹْ{+@ډnrv0գݏAS+ؓC= :DrPVuj-!Y=%ń x֘eęC?U<Z%ē# T셢iJ =Qz]+I6S,)5Uf}$CzU_[y8p:$BM b܍; eLPds(UP^ňz5X`>mDqw{@5P _SI^NUD+Qb!h<[%=_KKLhEj争;E^&՜P < w,40%Ӥ>D".Zpy7f|tgT4, HkkzU՚,ڑ 4ƞLI.,j@Y&~U/n@.Xwm2mJooUgY7D#ȋ :6YĒJlBȣ-k;I[AH9!"mBqoJ8a vX VmiИ?!4~@J hpK}y\|Ȳ^~Pn#d ^>G$^bE@51fBqJiq/gJf&@4iX-h^=wmѝW7r<ɾTVx*S,8%pI6kO,-;dz{nLSc 榮 >|-Kk^ M&Utڜw@1F!Yс G F/RD3Yx_g^|,O2~aCJ N d{-pӍ{܎tn3 UfA`> cCB3Q(aZ[U! _of$G> Rt`WSR0Թ'}s -숃0r#UA ҙ`R׀ ]V֡e͹c)_ U8 P(nTs`OpfNVF=Jagd(ELcמĩ-w=WgwsOWHo <.$$pq?SikD2]6,C\em&y%4wFR-îJy @߅"&6ѣZM  poe5<$>^YJ~3*hF}=TzD[!B2  89 ɅÑ~B <+7d.ke!OgZYoп{ 9xnRO emKGm/2H1%B5i(ZCxq˖6:bgJ-VcMwcr"}x"c{=9}fEQI-N^;i:#.`]a/B ) #<`itkC|j(TGjrR~ft#xi (>>UYjdWJ-' wD;wM:P ]+ypۿ32UF^ cxhəaygB [qx &9,V7@όGfn[gXI)=Slui̸4=ZZTrT*AP6F†돞rkbXk8uA()*<̕lStI`Yޤt~\94NFEЏhɃ0By 𨢶6"9m Hpn!K dJFV48UVbJ:$k(GߟO~ɋ2U)얖i.t sI|f#3յVظYH]2 wU5h9]q5 NZۀed+U=0-F }D2-@zۀWYа'wQYs&H\HŻX2-l=WggB?g"`)ޕmu ca炎u8_B2슠 24V:[P a'~MY6%*˿ޭ쿦myKU6u<^8*q̏Ky e)E5xAܼ֡Ԟ &xn9YݨT o9\Z%kM W%bm[w$`GT!E)Rxq1d%ג,F mrT)Papݟʁ)[ 9DR[ h!1FbAnBK9ۏADXԽIh194kq^>Jfi8slqz~!ѳib%;OB.2Ӑ `i\`|$2)g\cӷ?5BA'6$JpߔVad-laR Ԣ ?9hMstZ(D`"K+#RAʟ7_dq-2TC_|9_ '1߉;)en| j^(,`RKzG\܀r\ix0cX, F!"Bk҂vKHAGҔbI{|=<v$ XKppTVl@"L4WBG(KŸ@.{*vd}IT\Q0J~k7Y% ;dNI#b6\gB Qƙ;bx`8f{j'1ުU1Kl Iy}: \뵹<6HpI8^#@dK#N0EnYMlK8oOnFT^}ʍ@[x`f;xx3+'=ӦCΝbA5[95K&LG"REGaLmK\Tk;,5+hXz]dj"Fd֝XUurOR7VM|t&qHp 9`'b/f:֥.G8dl}дʉEJ-Qx]d>aZl/𬵹 M$wZHؾtjc|Ew=mistU1茷ב"p`qZЀ7RJ$v5ꏵ$,'!zՊb5WX=iO+K`[ǑPKnHon jEgP} ][]~AWͫiiɤ /ڕZ? .-X _k5@Ty.{n yeo_w :t]; G֐QG^b2b#~IPd /zȤuH%%KiDɳҨԀu?d|/Dp#ԑgy.;"?INOqMegw~)дg/(C&۹teL o.VuFߊ왹q,]?̑Xc]տ.&L"$߻ӛ@/~F)T@%bt/7b 4S0bB,l;z0/R|WΣqfi$k,Gv;NdRT@sJu|mJ>d#zO& 73\`Tˠ#H4=K>aYȏcc5#IɌy2 Y?[?*i+؏u`dHup^Deɑq!̣.32{,D!>TH1/] \/*^KdX&9&d-xL:+WR {S]+'7k۴q CUjDz.ANǻAԡo0Qv#lqo]F/g@@x6N*8vD=]3OPS#:ՙt7~u#1߿q+hV(kO ~&~Gg0c*܊ rl<2eO dk(w &f&C=d?>CDxq[Ll{hodXwEK+gg`1 &[ _Ц0ݰ(a xkhcK?%Ek㧷A(>'hSpΔ랒a^sF[[@!e z1麆}Awa1'5o%oP7}F)+,B@tPQ.a LH|ASh3{e6i.HFF ͩ+i+_L pxѷDgTV,o(4Q*J8e:cdvibw52^98?aXn3)^ءO(Gi*;K dBWm, <˙.̫߽uQEײ_֫NU1,u(3߂>-zBWwk:x)W"҇ 3507W:7'Q<~HX)Y=sKFat l9UǴ%5@YWo+&]ډg@",iUxY.1-`(hо<jAdYN_5oUoU5?_M0ۦ4Ǭ-)a=v)gd^4ɥ\l^]y p=P_2^$7<! 7zGI<=s,O `sƂ=FsdV%Yr&Z!/+12/Tዦ"$ TPm#{­TԔmMiVnXЎiEuI49DO p#"(oa^c㚞4hJmQ^Rqn092TuPrE0t+Z6X'\#QN9BPSUD:xI2P<"H;Zq$_)HQkL#0,<_2s qRtEFV93DG?)6Қ3%-[9TYTv65E<%\p!Kps,qD8C;7]47`PRb=rZ#Dйi@O:Շq]_llC 7;il0$. _3oX9$ifcʖk`3,630Mw788_dYz;ҟ\p}vP6 (5a7X.L:=Vn~[pL6ֵ&YY_7AѪҡLGyR*Ȱcȑ s m=JRm 3X ]KdU= >k}½!{t!qת Ɓ8J6A/;9,@яrjD'0o֔$v{@e"NraYzj#ᕓvIĂ絭?Lulr6{Z8fp;bt{uie'Wm?=y:}=]t-RF0A}gpc,z  qalfj2dzguil)D0qi)$ le*a0MJ=IrQ8i]~#seM\<] RbA51,h%[UG~psNJ"(ļ(2Y)u~v>;p5₼(7Eiga_^ZxB=lXkk=OVFɧ7;Jm>mH8yivv_ȧ{5 e~`[}?NËxaD V yFKxL-B*Wvp!yl4dDkt,O#$?OQҗljXN@{lH7Ka<[~?kCtM}+ݬ{| ) eRr5> fC k"<~l zv=f8s i$%vK8M'y*$:}kpуonU"Ya?1'ʱJ\hv Hd( TgߚqV`Ef*4 N_!cx=tsdÙ@RlT} L甐UP CJB465kW]Yޗ}nCb v Ddi&UP\^ߑXgN)`ow%7nfK|C[A`1ij2{˾5V/m7Y!TSuMh ߬m%HCx%Y ^ƿ noqB)* U%@3ȏiA3f[|sPZg?8>8MEfa s@f_M;mam;DwQT\i$G\p9Z<1#JTE%UY)XO$52B3ڒ7ZgO?&t=9Yҩ+J#\Vxi2V5"SsK)>U-^9asT}pZRa5_D% GL1[Jܱv+_L4mdrɇIݠ̧0dHKQ `Za2:LL%k%W,ǦCL1܍DϺfUt>'nP*r] &8̫y[݃X"Z*T vu֬ynԲxy?Gzc}{ 4W ?O\zi`]yl#ãFE4TKit&s!(* n1gˤ|;W6Ro@hp}Gސx!m+fPvu@TPJ/3ow=c]́.2_{'[?,F77f햐ܧ0KSSHIZw.2B$E-Cav-`Y%~g14Ě·[?t~K@Wvg=s"z-̷g׵s"D>{'&6ܐƮP5ë7pڭzkJO(50f0 VL(+KO .âkӢ' l=fNm$9*08G\Zr!pї-߈6A19Y#fP`vɖ"}Q?{Tq/LH h,FKE졲mϠ&ɼW+NRocB]'8Q/yX 3,6w12%"gY74fEzиshX4xwhQ{44tp_ѱ+Wq|OQ|J[M,]1RDw~b.qi=DhвJFuX~3ְ}RsnzZ";~jWo7Yve鵐`aT*91_"\X,V`W24cy5 ']&Dju5 G r^g䪲JD2:).g.&9kڂ{t ɫ≷ fWQ ڋǻ1څc&;2{j"{j ,S )+{B~=H9\;51- g4}A%ܝd;hkbed4KnYcwH{@|jBޮLohj-umij¾3+㲒=ypevt2!s^*3[; Rd/]Kh m!H΀fDYY`o68|;΍Y ͹ĆV~*T.uK\ ԋQ, $%N_`"Erݼ1{9@QYF.&hKSԿ7zWYK w@mW~b elEO-==@ 3 Tsn\%LăDH45[IZ^ܱLBy|! eB! ΂FSҺ"[T0_ƞ 6$mY 'slCۺbpAO' S+$LI f(5AyN7f[coC Y1 U{";˱F1'o 'fqr޷9EM}~kIzem߿1_qC*j.k/VoEk`V?-LJ=И}QD?\:^ɞdbwHEJ37? Gse֮NuJ+qTtvW{\n5JxNK0D~6+z>FMnV2qF&JagtP9,jȤp ʊ5bRoczS{lzC3fʵAJ?+%\o6ݠ:O˄ "KgbK E4(<'D8q::l]I.bhgv^ҋT{!#GM&!]1gH9a,2ԚZ G9zv Z\7 >>t"xyVzU=\Wmltd\λ .HZ!+'/K~7`E $N)9'axeh?5H74"ݞbfOVOE+uq(!1bgR\[G}Җdz&}˲mL+X$,b+%:F+?zܥ5:kZ֪I;GKE>;*z@\{Mz[|'bL_>K g U+)>a2H< m@4б䄙thM鴸IG&R47q4qȎPB"`NYF%E `A {* OHF_[UG"t+!8ܾm E~Ӄa@kf f éER]T#HncC1;lmnbyc>v+^ ī`.q) mF+ xpL3jLԆ`s=F/ VnWa΄;&gm9 L?N*g!ۋv>4Nro^ 57B(\y6εmZqN9c)l-a~}9#hU[ n ،1(CjBe9ڑ.ȬvZpw=A/hUiGG;V+]UΝd*ylAY TYC0&q,zOJ}7CoáB-]h}Ur>>qid7k4%Z5m7$qj"@P_Nfƕ?fA8NC&Z'sؾZ(ދ$$uΣfBj@6@|u:xL(n3JCTJd-M1֥aPVgU& w`= ޞ W0NZ[&S@`A]>*T0!e-<++j{_D'i:eæ{(Lpkx*\o]+,Aqayp2xݵ/>x8?=D=alj1KV#e%8ጥI1Tj(]ے%DuGwgAqg FpZkԏ'6l}5: f)HDn}(3Nx }0vWtg.5m7WU_86.H|31(VEgS!|v$w m=sr;&ûd9P ɧlp/h%+ .S(ӵK` 9N_v%כo7[`RFK@f=Uʩ(1.σXe,F]ߨ(\CX fG 5:@t0IXzA1D}lAVz$8 d <$[/'ys]a |9E5,8vo0^ϣJAIt$#Lv'k;ɫt|FLoOH!1p!t)NEF,V>|QGKJǁXŢ/끺TAvC%dm_^3QDH"P!K_50c` ;%+vڂ-@guͰ\$"C [D8 ؅'mSdv:s É7k?b=,۰m9=ߞWHk&$bQsGSfVr!2 -//8[ʽoc-kl =v"x"oC 3rPtꆹ -`mY6KGM9?nhntPʧ96|>`L@v{=% ӼtBtkH{6c ~& hdaOp mLwzJG !HGm=auN {1ۈ}&r!c*%!X'(crv (:91u)ΛØZ /"u*Ƌg(8I+ t`:o"XLO[TnG_|=e2…̺ʢk@D{a_C1^QkLRo֎Qŭ^U=ﵦBwCFޠ[@m_ul0/h7m!a"'i׫ BA H5~6.^w\5/v%WC Ї2@DyC@sSj'Lbخ]l˱j;מ5NOPȃ"vh:_$Q;[FBgS&(|0h+qKXMwCJyU; 0+F Q7$rm׬9N!y*{kPXp@^v̇tYMRE0al?T%ӮELM\0>v[WfY8Ƌ26HE&.]?[%P "+`XÿU̓r$y:劐CBIzMn+[Ԙ.BF=U/+fMc p}T:,_׍`)n͓c0(qXcr~C/~m>_ھmwIVn4i];UZ֠q_C_%{xsqƯp{;BWbrgպܴM f~bi8MqS|GћSd(% e lݝDbR'UOEǜGdlXd^,m /F|\y tì5 @J#;+iPoU{dMM@/q1WyF~=OR8W "֘mea}QDN+CjxV?%x{3X众iGKQV|I()9+7hjXLֻ7^ǷtY5f_!"C PPI G#2RYEi0>{k۲V~*adn5>UZ9cpU M~JFӼL3Fj3zf#򃕐X/V)ne y2gHBBdtMfR5$lj׏xJz^kmʎDMpp"ynTQw?-x%Ž 6p\٥Ib2獕uEpSM,r*"w9.L  O"MKޫd\1͘~ʏX!tbF-H{jlo>L]0q}#w͖LWaL_شC-;n%Qq_9BUތkﺖ (<0H24/%@xw}Y(@)]"$w^XpE+o!D'Z0 Y^AA&]&r,AE)utW1J5:x;Y*H͵O9V4OڿQƿi-ϧU*O:kR1:I5 )o'KHUsdaYXM$JKNԾ}3,6NAf \*<{鴻&"ReSMBs%yGWbt Ep: 9i=@ :;U+]eSY_^^ Y|rXYԣ'dLmcT-r/dzw:aU<뭹zA5qW\Fpל0ۻc1t9h"jihk*D8kOy+JzW-RvS XjDX{4y/PL끄5f(|p[3ktsL􇻨>h# Yf.8 q)I0_mo2}3zi~iSvME!}6wct΀.z@`pE=\(,fȋy{P^䷻-wܿZ](ȇ77b9{!zFrK.rpNh YQ焰g~o>t4,E V̟nhx\7KGo FMНcvoI$f{Axێx"w=L]ֱ ';}/pIi^!؋՝oJ "?mBm@&N !LJvrz:ŸOwuOȈfc:Kh~nfJFp#'BJE-po@6-TJtcpЩ? yKf=ԋ|- l SB[7I}N=UJOQP&b{a8i[u) FhTwbFGfF.V<,*CatQwɚu UGiɈQ䕪y7T| Ґl$ ֯C6ck;: ( ')JXD)fjy9Da7nOm_hkT({Zd >+FHdKB2"Y\ D!s}e,h *e<g'C|r~L\%ˣ^C*ɨ+<}.>r%M…$-&V 8Z,)P-H`7J0!oo\ `-.%qilMAxh _NH6ƹ0X};6~?lX( Gf=%8l9|*,(YS6GMɕi/mZh7͹fW=U2 rE>6dcVr=G}nJ"Kaf)dDѤ/kct'j8\.%hv:^,J Ihɖtmf[4d9ϙzZo%t eH9EFEfumZ;(U^C >;_@4iE Z\m-"d!k859Aļi|h;N1c(MtR !;]3 T4r8OtTIF b1n:ʓ?90xyn ~WJq{?c躆<+R4MSh}czJ̔__xpjh@ )% 1ZTvH^ w$U RzcU  BF5 9_)5yqbzc 6YatEd @}ے!/Xɢ#"ws%JP ,C4a#RBMnYvu|7yqmIѺlƙ O6]8ԏL^-yyNEzV<Æ6,>/{m>@9véVcd+V"[UwyY ׷F@`dW u[X4f!Q<#kB-~.kyT/쓳-5AQ+!sa՞1ce h2wC> Ĉ%fܗH=${S W5c@&e{-Y7\ˍUn;ܟ)ۍ/󗊑Sی/ 4$}sGT\Pcza/$UQYJ>m_m2V CĂ fӿ;*ËFjb^2,+LUE&2:)v2e>j Sh Sx0=\u;Sե~O$2N19CRǸʼv'<4(1j U,lPfrSGʈzGٵ$D70|8[VI@KvX[TXRȤპ.`JDa 3tGL0W~o\Y-^ G4-rF $fPGSF,&diHrAFL5*FC}̙-RKEpciN<  ېCD_΅o.ek:R\od"G[wm/7KfMRG%j(ƻ`)%2#9-I6"78E $z4r/c5Kf؆0hsHDь=nGOl<|CN8y~f=f}$iSV)YپrȄK!ylo f8(TmK2_arr=0 zh\K/\o ]CuԦ=~F854mNoO L}8j&m A D+vM GfAQҠ֜{(u2[ObORT'g3FD$g ހnu[}#5_ӟȽe:eս}9RJ9}@Tm$mauzK)r`'Wt ;R2 fEm{q(!p^MlGXcM~%)mIG\F\Wzli2h$/@ȋM̌ү/[G!n(sŒ>+:9_Ny)$57U!B(dDJ\ [jnQ`יCNKI Of 7-z Bo? \^RP&x^,} H^޳&  \^Q:< EVšp:n Wz~۬cyDvSHsپ+]~nSx9t؀<x9Mmg>Ylf:E]m` *; ]1qGw &j<#}><p6@ {dn/~ > O>+R@5t}e]$<xC~rE1uF96Vˈg9dΟ aêZ`wb!xp>rRugUƑ+aA/?GjAf!*d tiWT?O z\_3j =(0caƈfChGMT |g^K$+ɀ;Of/S:bQ36!_GzvFAPAEMaG"@v$, Q`!F1"`D/C4CY'VvL=-%YVlKD;'# &~8T"S !SXzT{`;"RU&_6sB}|/$ ,w 2QG quY㭕f."g#W>1EX4Vҍ$ O9'Q=`=m{9)wwMkǧa8M@,m 3 [+ED~з\G#87soK=Y3B=GW6e^=oH9QԼ5b~! 㰥v@!(T}aG-W[t]8O}SiXXMG!doĻV)αUiE[JbO_\X\b~/7WPCS @o{JG%Q#X #Y]3 g>'>)e H[=ˑD,M{F+N muI ŵgxՎAiu-1ch;yo=>l;WB`÷IkýJ.|Z:Jص- >9} ރͶH4ûķ91)| >NEM|?\<mlK.1鹲"ZFijX ?J~vi)rc}aՙMrgXkYi#h6z7Lr{S۔#TȴEfٳ_n/J!k?!Ǧ1ڪ~; u,yM>I9ƾ5~?俋Kl+~0;YѾKo'_IJڎa'X"!jXvvLSwr9ΥPG fX62ܴ9Qw3J{j>TY'8 y4uA>xN$eUߗe ϩFtKQ3,Ya_ch! ). H>VEbqs;ZP";u Gf mtW4"9ؓx#?j所'?NF'7xQ/1oq3/M1hW/]Ox[K ~S z:m~Npi"vmi,зc$5zr\EsV^=nψį1R!zB͖|=  n˾pUN4N\MK#v6NՋ`]Oh]o:p[Mg5<앮+_,[v>*Mbdq@5` h]F `=rMѢMR7uڷSv)_+ҕ ط)$b`eugK+NB\:T,(pL4[b,# gv0+ԦaрCt4hape_ޤ EJl3)d7;^)ј> jvB1WF<w\O f}yF C5>wY1z eaWs󃰣?׍7.'^ɞIė GNS"Ȳ5^qJCȅNJmɿUO4 Al _cWI`i$ڕ%M;^w=c_f\_@* dwt-Bl*;T޶mi%Ǿ GjD(a?z?:G-RFف,# (=<QM)Y6bjI*cfU= XV1N+/4?Y.k034Z{0#RlRt9ugM,{i׼l&*+(jYt-9Ym!7 7`aCP ׮pQ}ʀ枹 2mWpI蘮M\Bu-ޠ }vfiuO6Jwq^gpN sm\e4Z0{dW ZGѦAA8l}-|7nHXN<\߁: wPBA槬ֵs8L{(N&03<L=ݫQYz\/O#)@әɷ5+'7M gָ $/7woH% ]!V瑾p#7)Dc->!t t` ki r\UXo=)~˔ JV2XnoK T"W kHp.S(Yh Oī`$,R$~]\~,up06A>Co+%p<%2f8἖؝2GKӭIJ[S冿< [6,MA36b_M@ڳ<8%Hr#@;%n#*:/ҫILLnNU9^A8_P ٤6(xr7ك0D?=4tg`,DC})bm1䄼`sNjsBMSYUadt+]rӬ}?|nfw' y6p7=d0V,Ļ3a=XVV[F91L+0 ڔn=ag/(tlV٩ `qize}p*j@$G$"ZFD,F /aGRWɄeTW>A!x@ z7}**E+ ^2;\U^5 {+,h|7nòx`3#/MYyp5GtW1h['7Ђk`kp3  ]dxۨD+N(ZzIܶbw# qpŐhMlɿ_vs 7Y{QmL-t7: lټo}DՎLmB Ԃ[_l6!6Kzc˼yt1جC٘ #'J^dp{@+:͹-mX#?Å9s`| lP(jMtFmՍ\GSA Cg,.P@2ž-D:r?\@O׸U,=i~EANc l<+P"CDu4)=phUc& 4gte#ILr]LA[kK!D^"ZzK~P~>TCaܒDOT )|؆8ᔥ)4 +95+LJ` >_Yʻ1dz{h2i"_n \>,MVz?6w l9Ȕ //Q74p02B..}] yc"j pY@^"1!4U:-h:199 Q%Qr68lnzC9hLOEhdR *Gz봨?# n^l&G.KK[dCF8qoRkPzEm"*āDjr<#: ]_b0 &&>l`l{.'3~)^`70n\^uB(XTc.! (\p\Ls̙g()xq"SCdͿHsFeM 4$H9:p$&}֏)g(4mxf z4;!Wdz8,gUfnOk/yͻEŸMIB4ںH+%A_=AgQsA| 2 >+'~/]\ٚ3̓㎕&Tl-(@ejIQf}ŠԯkVilQ-<}Ob<8=ks/+|wq‰Iy#8լ`0.0GzyԳ, 2v׹,} $6uVϪGm#%m~2<\̢@U_i$<$2L'ͧI HK{W)h׏=W_#1o5Ÿrq|/|:c5Pɤ. >*A<hߝbajC DP.Ȍ`f6ljm #eqv`5+< 7^hD/Rb{\.7"EڛNp-}~"ImvD݅OMN/@T;,Rj^'I+ C+'C8*\\}~i*2! Mz䮹1e<"x1ʚ(E<;Pii?Zn9T'&o~BQd47[Ҏa1P 98< |jP} 積vf㞒U-$qv K' ]v sQ/8ޡ~D(s}2Y'c[}Q 9(CzB5aϟ:NwkjctWKVajZ?:i8E;ˆ>\ ؉%M&IQ}-eW̆9dP=6z򽛛fM;*!Nҭ'!:rTWbE|G|, <%bNɛlWfUU휊̝C—t`Q`+I^c ÛX6~G.l4))z  ү_^1TCpnQh\0Ȉ0 OA\>Hu[.r/>` |CtIO.27jDi0IDnsB*JzON?%ƟF pXyni)~eLj3 plNAXn$XsGP'LWP3`V)|]6n#[㮃E1@wm:"ˇy[RkCP ]0L 2'%Q"LNÖڭzM/-{qOTͪH]L2)̹4Sl,=2|cy?p-07?mJΉnp$is!xz,rQ_ȠAQ-cuAXl\OWxD>%2i(+h8kܼՃmǶ "逳xxG2M\X*ڑGoN[TFd7dx1\ -J!59#h RC%"MZi*t:4iFgڟ?L(ڥɗ=M sTQ+m2焕mnܵ쪝אWjo꼈v 22I޿Q8 ଏE-uk93XGx՗y7d -rHg<늭#=ƾuy&F`BDH;Q+_A_} oU@qS$iGi.?%*-[@Ty VJiāqEL;:v6?*.Z(ZX S/٬%XfSsB $#KMfpAsF@f⡱.qE4һŀ kk-H'bx-E"$0uEBf H1J(1a hEI5[*46b٪ KdDo=Ν^'DXKDw2`G8]1c >F3A>n=9=Hqz~=}+J@*=#hc_nGq"q[Ji@u PT,\W\6xkl0^bPr!kߋ$M+,ju ~$ Zpr ЅvڕfX YJa*p8G~HYkdiw⃰4ތ9r*ay9}Wi!du'vzi_ UMW+@ 2-!_vk`<~z/&f@[ v s(ȣxs"TRA|eDFsu*b)Ƥ]١ b7&$K/Ze~Ф^ X$ oȳ /,2OXo‰ukZo&b*"*6gKz3%dxԓv4w! ğ(Z6x?S!Gc=ղ4潂)m颩sKCHϥ=<oy8j*lf|5t~y =y#x[׋|lh= @yv$]j0Z +a?NKe@n Z]Qk5f?ThމQP[^ʰ&EN?dVW}0[CFCOE z׷E|PN{M2} hzb`^fr[m[ɀd pɐg2o/ָ3Qש9R?.ۮ@LmNU"uZb'GKрu휤:=V(@r0d'nn$)QAJt1C|WWdN0m]h)(w Sގl,F'%rN5qT{PHf*C ,3aotvz/xkM+:%;sY;UG o9Q7#D A.GKD^&B>Bu T[f%2M?(Gb#0A0_Ao/^\Lr`Xv%7 e^N=f@![l- I!Yyٙw :bcgLF0/IR.8d[J1 dD6\EqِO  {VuNxznSL5 Av =k^EmWr0mZS!_ W@}KB9bS|U?}~DkR OB{Mֶ]3e 0kݿ>3Qc > uzWmP!XY-++ M-&klEsuPf\E :QKGƴ'UBMnܜI%2mW΅rd{u/V+BW++yCA*D.8_Qic._Q6RK$xW& Զ=I\k 6;&Q<@6pDlu:4G}j(?D≫4̂#x˝oXqW[$4*:nW!RJe_%޻-#m!(HQ*WNx$N8nkg"Y^:Gsr fde <&uv_?;z[1SjH[@=Y]Ӝ̺LjA|@e?`k2xϦ~KlVߐ<˧RAzr̈<5`'k'gjL R mަ!Mp;HHׁnT5L Q? Ąyn `Vp@5 3H?nutHѵtT[en0o`O VQs/;&G0Fe|ꈪ muƃD*^vȧGO^Z4mIrF~=_ǃ>|dkRI/T2n \m I bgՌ}\yR^EGIqA,3fȺds3~|ļVI;hOY꯴R "ZU*@Z6#,-@j8IltKD (F{n_7?Cps}-e4naٜ&P<VV 5mk~3F4}tY\߼8:[˸b@506U't\N8q=>;ïSoǸ83WVQ>@İɼa ?Rx6BXnCWC&cINQ)1IZ)NqQqx/&c4֬k*\y q3ltj%lbAα*{㳴1;NII 8BH, ce6IU Uw(|U1VX+cEҥV%P6J c˻ h /zs;'܂h1׼?@fWEr~/<<<71'cџ~}%;GGʘ܊h> Tfn2\,EඖHfZ Ǧv[ȹD%6"fVL~Uؤn2mȔboB&:rupM|}4 m['<&LGטUNO ;2* ο6&8~'e1nKCR37e =)uZ O=fk T Ry-'9 < ļy+ue!Fnv Σ[0%vMc` ԓ/P>)P58arS2ɺk,r.4.Ҵo$3t( ߸Kd8Wr򻃘FO8gNr%]Ǐ6I5X?xaR'R+LO^ڑݬebpQ//c8~* #F]s" ՜쩎< L?5˭gszYrH,qgK!4aٹESGE;U샧NS 4K&r14oϮXF`Üm;UW} m4nfe [;¾۱NpO/ 9k#G+AENu%ym]@(Ϡkww KTM9c}kb˃ KpC619%}DߎP{tS$1 $ Dd8,މ7yTA& 1Pn =ܸS TL΅_6| \wl8:21ಠ`+PՙǎIJ=)\ir=e~0I!>W!^b=t[bh:R"ٌFD^@QĶC }v^CgNwu~|4fWޘ0S@x] H`{xar8[’YJ4U AJ>C!HD%\S `]W2kR.pXjSP$p,4Q_=9tċ5&Uǔ I1vۓUCA>f$=A[֒`iؕm)d ^@sEf2NHnp:~"}B?sKmpQK8msp q(dF/xўjVprh<'v`"5axPͦQ`k8<[ǝDҊt CHٵd `#j) VNpxrcQJCp@(!dg+ <M@7>',[*Byq7w56gvT{kx+@:TJ[g/tjLR^SsZ'q9. 'M:. Ias1E2:+ZL5]'j14v[/ц#aHK#zbOo9vcG'; }u!s,O&P#рMa!MZnn6'PFpGVw㰝 y(UOoIrgqB"?ĤH ˆ9b;\6bلQ1"j X<ؑAӊAZ :, 5@||9/jK(fiSn_!E.ěÃxn]}C3Nu#2~NMv*~"wg?ܤgg d '*/ ^=l 3eHgG=jPc:,G>%o@9|u{oVW5NUguw]nh_~ھfK*ɐ_52x?A$(]$Ay&B=ĽcϘ}UIX$Ulύkx #D3%CJlxz8T*M4LZbpb(0D OſX7c6^5Du|_n=gnPcnل?~4AKbUK]@-Ă3oUlEI4. ]{+\*ʙp6]qD:fy+ž-T/] GTp*l<ȡe}Z]r.p!7w):ͬ e|U8&ֆ.a3Jߑ=I}2S2YƑolc1á(?Y 85W 78:j+XW@13c*`rh:Nb2 8YƷBL](T+:!a>#̗c΁<P{Lٽ@G=rf߭B'ņدE´F[˩c[- &k ҅X/p~!j(Y/m}B,y9aBwMf&Sf[j!}}bU.෱!iOvb֏t"祶-mTeSrXtg@1O[C9%)ThGyQ&. [iJ ?{7@eV(L5Rٝ dzVGah hS@l/ g8{zGxj:.2g [~{qCb>\T.0YW fNbٝ0If5~В<%[>`oĞV4?(TD8K0C\߸,w9<] NC* {} Rd̢ }qczWl'~U'On80Dk@.Yj{lUDQ*Gc1֛ 3eq":)x>o,IS:͇5UP!UL/ \9?'ʊM B_ "zl^ekd9I+qSPr.) [)+#LkW){v+H+AM ބk Yןzff56U| FOP5M/cZci$!O -*p1n -0cYfK%hoz!Ꞻ *1|گqhذF7$*f $B)z8:!0sXko緟ptW =(&z%.SP{*@hW>f(P:GSMk &y(Sk x1Yt(@w9*v0mtYarlxgS†$"Q&:\3/a?E@:~f*J!KjVʜ_j 9v dK<]*yٚNzT7?,-òl:/h셊s,⯨ލIX-+8U1zZJ?S:+9c^7gDcZVyأ#}dA!a Վ2BZAW&4R!$u]rt`Zx~+fV_|\MQ"&dْm$ Bnjj6L[f{Ţl??ڂp N4UGEq,d \Nh^ۙKOHXyevWtbԈW1 v!2 *lbN31& iR_*Rh 9@* )g5.JM*ᦴS IEKAX땄S,_9rxz`"X6W*,esVke/S'W_x JKԥQ$/`%ĝ2VU}1F%q $g2Ui*Gmm?)M]y?EqײȠ•A?|]3/6X!Hݱ8p$Xa +RABZ$ +(*?gJHBeFAvdsU*Uj=ޝe|`3M l;39 RYM{,X)bNa$urNa%=#oP?KեZI2@6#zi0a`wr*N'R:s$$+T$k~&"n&A9$0F,ks쾑W\WXT2d_H Qm< 6:[dV>8|xkM!蓋xtJF"!u`uzQ~EdT4'~#Y7XT'r D}Fࡓ0l٘_X 8,hT"}20e6~ Yy0-Ez-I}TV֝G pnn7T 8b7lsd\44+7 (MDBHQ&I_mjOL$abpBk<<k tĖDEET(Jv+#3s^_Tb=^PA{spirn:ɭ.`HȦrNw"eIpW۱c򠦢DŽNoǣn#_:V@.C)Td}z>Tqm<='jvHz) `fG^x?Lܥc'qsG6[^!7ř. חxނtx&R={0)`+8]AAƔjrRB:3b66!Չ9k谏y7'F20 fM$vd"7-X%H}\귩ďgC L٪:w`+xzyn`GzuЯi=N ]x`A $dȵ5R5X!x3A>m]GwQI[ϊ1uV)̻{;5J/ P.d][BR.N]@,ˤ;܈c׃wudżOW dXKJF'{cy~OJÉx zdIF\O.Ӿr5jr]&);eQoF~=GSQ}|{Nk"MP,^o%&*V{[[aEJAy^դ 6bW >!tf ,5g.>*A|Cl-çVSĒ^#V[oMNm-zŊr:4+-)}u4ݵ}:),3>)A_[O,cG_⥧n#4 ?3h02K=70fġ*bj&|=ʚBuvmxc:v+_tsOS.ޥ2SlUC[nN8K!}>QZ5ٲS~m%"R= A2#ߚM H6 Dyv,Q6.B~qӌM:uhS}?`*a> qdTpC_@kjSH)9_~jߣƢ dX]yH }JR?[y_.E>pcRo[L1IeG,^85vBjD_v b3&94 _.g0q}+KHᕭxП)3/$юsUtU :."A2$6Sx"u ^:~LS龅Hk!]0tǒHCDz0W>#X8pQ]o6KmEMڻMy*[!Z]փ7c5 h1PD':QOv/H60S7T\`Rt%`BjEeDЦN&Q'K(N7:'Rmq{1y+ bBǚ#_@| V ]ᣠ~3.ͲiS3 @s<5?V/*xi'7eL (K9I¹1 E(]Ym3#B|s+ 6?wv]"Y=˜=7KeQ:U-i:V׸HY<2ض6q!2B:c,oU*:uZ12z ܣ_9X^m~EO]lOzkFztGr 5RxBRzoْx<:*kLJ!oH;)'Hx De$ ͛W% D0= Bc-Nn2Bnt:tFffqǼsrA(O?݁ ,FyL,cQw5'K%-\[d{ʕ.= 6+aL2v8=wV?ɩi>X9 ^,WJu;Bq~W!>sRqɣpҺt/ePiZh:#e ZyY4b*!hi%0 >gdw1eZDk?#AqԎM+Vk[d~d8)&hA%"bh R MbU4E^|gNaA.@Z\('j{q5PWD\yjcf2SӅG۰X/TLnsN~yH+nJ9}-e.:&%gv)%Ek((!.WVxy޴Ѕ^?]Zcx;s9R(6vn.+F[[~2! :+bS&jߚ~Hop c70fR,$1/u%0MEL Sn*+5cj^ePΫ h!ĴHKv' }%]\dl"g )`Y߯B#Ϥq3[<F) ">Dl%W‡梗WmwH*kNgr4YF.gr-%kňJ X?Rܢ+P 6'J-m :~p #eP(BʙŽXk3fT+REzxQbD 폍 Np)e]Qmg ]]6ER9j uH $`AVݱ:GƴuJ/Vn4hNѴ*$1H_7r?@8`,Gw UW1&]Xkٸ2f^ q-D`|y .0y[\IkvGB`NJm{?̆~W7 f΄ܱ>p֌ʷ)yѳ5aA"vEhFƖNY77nqdgsHXK*0$4my]оZ0ZIMvj` FO$~FYZfנt+_`EtZl)EANlYS5 wbca%Kvߒ[NDly S[2?iMcguS_ p.43,q1ښp/]c- pL*l+8ǎKr|/w<&DxqOj`;da3F7ci[}5W(q,Zܡf `Z)\Hs#R &w_tQ.3E:0١HCpLg>Z ?⼥F҉z cGp$%9Yp0+hhL\)C]gM\kB҆GӚ_zs)-5A(.?3> ~=P j%~ mjyTYCʰј<)i&y{ 7$帮l {LPGOlHڑFN~xSyr" WGZ0MܫJr+8+$ 2$m=t2ㄚBL?W)uOQns||$z|Rg(rHרa%sEaKMoAr8zRfq'?ζ:e혆/_tC(g;gfq8ͣKc҈FLF&0^-[SR a3źG {-,HJFX6]qa=G|gmr[ݿS" 5&Ȟ4kUds;b1;RH\HɛזrA$K HhIeS-B!-Ŏ< wUk& O~FٱRʫeb x"Ԯ+pӮ[FLQ3E'Ư޶ tNriXYp-4rƛsP15%T3Ayلګ|- >/DoR]T3fR-OV/Xt7jd@̑r̀D9wS£[.Sʈ̽y|[h>!h&w"+O~U7fxgchӟleSU\$G^yJ/&- lNl4Z-Dl&>,Dd a?,$4Z36_W .7v3AC+CU "!kߦg ie8Y CR<5C@muVp)W~78䈰k2PTkrِ-wZjPi(]Cݠs.5fNw\8zm*:17kgNo׵hM&eGwV!DG_Y΄okS5XZFRCa M_pĪ 2.Pbf5٥XMV=0G_1F&o7@1=ebLӏOl#)Vn%_PQ. ,L;G|ؼ1^`+r9mFDgܭpD5Hx;đw +Nuh&+Zo{Z@#_$UNS"@c@šRz;7 &{seGYͬvef:#@&Y1a{W[\[z i4XrȓH o=61snfã'gXKzVhe o"vP]q/!2t,Ggۿ_zѾ*ȣk\62NI.ӣUA$e|,OW5O,i w`U)V,۱#v4V -Hx&+ttx\N>=P>,y \{(ߴj:E1y_cg ϖ-6cmPX^xYW{K.WGIs aIJ&nQfz\7܅tUU_6,nmaO['1bq5{s+V:,}ƠΎas9q-6|MCw'XlžGVHx뻵q,C96n=IekzF}Tщ໚OB>u|T QC^%(1MU> <NE7bÖJiH4V(&2h<p(:b04#nEȽ9 .D猉POԁِlYc%gTtEfQ^ q@; \Xl (l:&j EvJ#X1aFReVvs-+.WUۤg:XQ_"tYŗylk*䗗wz \o2+Ny[a|fwOT_,~KS)If3B/)L_Fb`K\V1.6!7G;qAtHUꏓM||[U (AgS6X MMfD&IO3|lpivoӊ+EB }alSGt``z9UBBC=BN1IKn@9c(- c3"O|/u|HvOVRx45t{%RadAN gk}bSyg_1BJ͟Č,itJ;< )<xAL2 >\D,/_uN7fqjMWCktAb)zAh1j Rt=H~' .wm5+ \MkıͺEI\ЋkxH8R #!Km2?-t[u!H,"A]OnF0^ %D%N~='Vt+#e2(&<>)XhMQUBT;U~*%FHps.r쭷-.VoI{J٪0)0v9<;ڥ*gOM0u~ +i#d޿}mZ8phj$mO$@ GñkB|TjNFUh7(.w55u E _B@PS؜ U"oT&W;UO92Hٝϝt뺌G+m}yV?fPq_^9Z ˀ$.?>ua ^8yO](uk;L}Nѵ36 U:_r ԛux?zY'@-jMu:nԭyH^)8ؔ;o\-kZ[r6'?WSQ=peuNȂ]VG9 C0o-o|678PM}Al9l IY&'׬OS# 1!ܭIbw (9Kq/:J:(YW4m)9pZZU@(KAѐ5a&<: Zaouot&- wWu٠4bHsvXyhM >:*(zt5/ċ,@tyZ ;ڑ-yA6F&Lpp+m|­k2MiAd@-.JP@"P#“R=E/+D;jJqC-ycxb  8+91c* {eis @io= (5 ^q!O=Y$IiV/,{6oi4 /픗}.s(䒧!1:>#,9?RT2!#YV6r\ zV$*/ayf}Cc52PpJNBl 1{4I9r`NJᄫauye/I6f1 @H )Fa2ryH>,FeEX2`S,w D8yC@eGV(* ]yP[%8P/ս ] k1N,DLy5U[RbOY"ㄾHfܕP)_mD*ewݧξ+&u k-Yjx ,>G J).E*üLyuwR4a7y AIQܜV5 \ߖFׇi*iXΔßH~h q.&:5a'<ؑ胜ZjQ4T`3#KM!^˄"ǟ~˪|" Eӿ"bZl^xG;c]dYjc]]MzE硻RC8( 7Qisx?# Fu91g(6b1~3TӜA d@yם:pbέ7_ f%ƞ&QE9.*;r̋Q:Vs=ٶѶw9E ;K/":xk 0PͅSO 7ӹ~iu cr kRܔ_.[mL$&E_"nIECz?q>;A1!AaJT )g 1Fsɂ*=>p䥞[I'`>`hCZiΦQ,2.΍2jBء0e )(f8ԝi M0蟯iֲ(w]Mqлݱ!z`"}(GVRI{}*0keFR&ͧ6|*X@z 8x[?)n>z / >F7촪))59Q(rhSCKQMݸlύ;Գe> b]*q5i}5ۯHW;ݪErR+ʄmsMDBL. ++QV@~guK6n BGmRI.ֵ ;#sCZCCHwOˀeZvI]jc@1{T Uqشbc0̉(m܏oP\Et3a8oonF3[ . ޤ]f'k/n2?܏[)1pCz!͚jKry(k9xԔgy FՅkTL'"WJ~[}Њ \\,Q)FGK'q󠗯+ltYrzGG{d jw9d}}(`vS(eS`$I+var)hB/;:::uC<.7285gق4"1eF jU4j]?LdQhk)P\Pts*n03:0ƅEmLI\%uLu-[KZƯմK?c1j"go)U2wa.k/%\ r6s7:\^-44˚dq0PdZ$pH. @o?>qUx鉝e6FeP@wSfǃI^#Ȑ!`Qu30'j`(˛hH@'ܮZɱ=43-LSoL`绎gx+΋do#!-׍{_Ԇ& \} 3zάY%Ytqdy*;Qzkc.[Sh6mg۹+[A;z1N0mLDdu0 7ˉJ:󹖙"APjOi2HuS^,ewʋ9<3 Fo}K|2Iɹ9u^x8mz6Lm 'rcJzY0Sǀc7;t\`T- ơ Ei:{3*\I* ϙ~4N(Ew(@7}{>^plwWUrLX:ұ+ ڠŠmhکs%D6B(sQg;6+_]:t[O9ʨk4DHI04s%];G Z)`/e+CNXc?6GC'\y`۠Ӽ0߻ԕZ!TE83qVVЅT :nzH"+'BvonLcV* xBRLv`3~{.uar$St`6Yh"~n.қr.C_J܎m(CAh Y,\aLH^Wx,6m KRrd`~;i+n޼VMMKm!9GaW'4@tГZ,wUX$34rzy]aD=Kԩ\$\u}&O1֞$Yɧg*R|S7o-O`iV+JevQ"wۑЍ8EV/yz^M鮆LF WРSKfmP&Zh1]N׊ =(m{ Ǵ7K@jpˆBnNeK9e-\Z+ΨխZ<*<1|q 2ۜlC)a1+ɦAZ"P`?Na9VA^-z vaK+')XsqeVחU֍I=}KVѰ4˧t)\+ˁXK~ j-M! 9+XbON1VQ{¿>dteg!?UF|esðM[$$7IWreLj [|K#҇J F+tOSZgA tY:RK=Y2Pl M[ r3 6 >LXi^#+9#%c/2i 2-GrߠK!Be#T`{t5#É}:"kA@2ۛLROl2(d_׎>4jhe6r/qƗ-Q5a1- wCtY'>iW;p~Zb7(p&X+Q~IVf*P/iirk@(?oF6Qyn æ_$^STჾ;j< <zF%%xA/OI0 F W~&մBCmX+/L~J8 *@w(6/K^gkZ?J_1S=Y`!W80N٪  1'=/ѢLcG (Ő;u*G4SUTlM؛c1M"DdΚa]Dh6eTVzM?̱N&^8{n!h(]OJxެp$?~m[~t‡fHIJ p۔PͰZ>(זјM8],@f~o Mmqq8X5J3ԡ6?h^\rySw?cE08u>DE@2L;ý<Հˆ2C=wV&j}{`of ZsZ֝vp󯔵4 :g(^nv^HMy>V8|ua2yఎ$$H/*Kr&%8),So(VU+Ul]R^ᬵ念>$乼8@ Ze䒲o|!;C]XMbK G7^hE8YsOnOc<bQw#rg5oJȄNǖ"RT9fFD)ZJ"j/EI9i>lJcѰy o{RjߊLK-D&Ir"-]8JK;uё2F5QY{8lUiy4a?"m'B3ɴdƵɡG"ў5ZbFqL #/@=zH%;5`$eW˗b^a{!QuL 6o(a#q3n7vԮ}}Ԍ" u}=L:%vGxdR.$4[^m殈m:vZ<TH7 ш+ VAYlEHlĹ%*ߍ5O 5+0{>UK&It yl1R`}43s+ v}φ|_]yUkذ\@ 9+w{L VSȏwJҧN&C >6D# 09pm~ӟlGV ` "1xȡjL&G%=o+tu,H-)-RϢ')$``Oj%]n):1aZqI) dFjvJ#]ʷVٯ| sBpL fL<pccELϺ.CubK$7_/![$)m/JapaAՙZiЯTF](hICN zcǑ2~MjbQa<@?%4/";v@N4깄jCG6Ӏ!0K 6qO\Yu3jIIةѬ9=KBlFvfj\Bf`5P k|BG^Q0#lHRTnz{ai^P}yD|_̂=D7JI dXB-ʼnAl5G:P=۔`;.2t2UKZ+dzE &ۏsIˤam< %?-XrʃEKğvjیe9~*cI4hs@֒cyHd'=;bW(9:N~mMX`J2׽ IG|_05 >Rc1{H%ꗯ6Ӝ޿/V*a *$ ,DFNdT׿+X~bx&t7@_r?[O쳿FߵSi棇@ )(/ }k2%YvݭXr|>fE,=u,/D&iEZ?n[w$]d9OXq?m?-t]]59b@ɟ鋆'ܖz9 M5=y?@ ;^],8;*K _a%%*%3Z"n< Q/WY(9+kڡgat)eUD+XTB&ee70HB."tBic<٠;r*9"^M~^ = 7|NH!? E)hwl祥.\u 7 p3юU%Go|^huZl>g34f2[-YX/?YI`=A#'I `4)^ViKGS.o\decȴ~xjq P~F15.٫lNP?4DM Cx&D *^UʻO:Aw1S2&IK1hϘa" |Ze@hO$i59Wd>nQ'WZg̼`VJuu@ٚqߗ؆6舎nܾ/Uoa;^ AႃrTZ3DAxwKr.f?nk~ؓ}ݔ iAvUnHi {wit2ˑ I[?́ys)#[&S[nufn^߮1(WvUUlX-v*#nJx`1gF5r]N56-LK_6%3.r] m°+>]0 -iA (oߘ ^Qjg* U- ZvS+ԨrQ9x$!ơAVL6$$e#|>!(`Ѕ[F"H`as-6$RN Mё]@@ёz$A{ U}; Q-4uߙ|fJ#Yam,[*Ciȁk4 Pݷ!i *rx. B$܆Dg lTFg6 uFxH骡mu7)HI82Ԑ/tmiQN)r?|-2cPR5p7߻pÍg`Gr?%~48+`)E/U%TԃM̫yr}ߒw3.l|f=d2f0A W# Z[t6va|)RB{A";aQym-UvK+/]X%z!mKd8 EIL?PwK펒2o ?;HR5 ̮̍%U-_-H#ehX9J l =TBDwnnsCPŃ@n'*7} ;ʰSk־)g4jG z"^> ł# 5qke.p&E>B:jҳ)L`شzIB0<ś^Xi6΋Ǔln;W2(- W[鲣ȃ)-B ppYƨez%KlXxC%fr$8٪\*%K@ \ ,t=k<GIRSnأG1%z*֘ͫd7ʷm2,N/%s+kԧ˿FZ ٣Ik~g ʡ3U箽Gd22[XS\ ƖNHpMܿ,̦{j_HΚK8~1;farHI5wI٥t2 ;d)q(MoIvG9a$q/Evaɀax<_^|;@Q6˿>~/#'-Nrm%|1_Tm2;W.8gqCսL3̕>w+{1m0T hH,Z5O G5^,?fTol-הlˆƆ7\Hv Ѽe:WOd,*R)<#T]lΌ!xqN@8>7cIƒn)3|!E$Ĭ# MS#h]7DeÉr:FBr4~|( Bɚ E);\P2%ʩzhB ?nf{ߚF~$1t9T30c\ңۖw@WzVݼ~̵%CD~Ϸ[703CH890B>|^ kuęl绘v1@t>MJGF:H#qe`ٌ)(Lr=?ITc,f(;WwNO6=~oXBy=tQP Gu@_6QoA)z"rEW\-Hkf9 Z~ *s-~eB0PLrRӄZjE8 ĺOV2Y ٍED.\uiZaW2/: 8󤣬|$|FF5S2y Xl ȑ)\z?CH;'rݻ7-@Cu"9 h\|v<ڎ\[0_Cdaoݙ kvE9Q|p|UI8dҤ|XjoI:!w3&>Fh|#'?ska><xWdZ#Ƀ̛kLcp4`dG |6pDݶ]u?~C{Q'y/L_A''եx]FJh'?Kdr _Q,a p63Bx+*/̏t:Eay{B  'm;2g)T5IqKb>UDtoyОHi[K@t$BN$G7Ɲc5{Wsnqbdf'jNz" cMhŝ̴~>w2e;/ˇWޭާ?$!n/tIIg.{L/7Յ[zx\W2d kH$sS [f62@#.?2뙪+֋{'|X <10i)Z}'x>1K8o/5mw|.ŵ5NJ~UEE8|]Û0in{BvJ h]G#ƽLKw]u4l]ENvaS8}1S2)}5[.Nl+teS0xa~h>$<}Qi1p Xu9b8d&REֻeyN>D7,WFVPZ~lvR+!3C1,ӚD< \#n#* Z [{+P%J$J89ǖ)o*Eh S}X1E^ DAZ_x1 }"\ -}œPquu%-f?rgx|oF v|~cgu%K #>:V-n6EG\Cحv%ok P;`YKxG_?/2G,^7"g^@.Z۟ג`i#MǛ fy3U8~ڑnjXu9#2c'NUdQa%٭Cc3'ZNYDC2dpߓGqsHkH?RK}pL8׼>=R&ڢ\hC ޭڭAJp kB? ]i,SJ\Rp<$jGkC]mhrd(G!U՛}@l!y1W91ekpn_HBѶN/]=QZ%EhZ؏8&9!Z,-Cd)>P)c7^F {rtY*&Da=8v *hDڑE key2:7Y닪x.n6.𬷺Y C"z +]Zf34VK[\>H~}3 n~1DAJ pq}ڽB}7(nƞ(H>H{WţI[M1Y@wR- ƌQsu:T^sG&>%|-~5\C=/ΰZn`dhA%Y>iԵ;PõCN?(D(&Hɫ]-||W R%%"S_vMX*N%YFm`] ykYs'H3CkI 4WޜH)c(63Tf@B!XV9,I ezc8(+zG&i?Ir}1eTió=v,?l)@ dgjE^Q_($t!aOnjB`w&1ʾ$K* we7&+āY4lt 08(N@ {f4 tFHX=)ߢ.rK9tJ DC Qn+Ki6W"K=z3RC&#{.t:z~0ʕ p ı-JJ<-J|O%2D( ـEެ(kU׀,9yL9ZJ wHhciG~|%4J'7g6MJ>xE_iS1 |L' ̾aIY Bl!4 U"no.r^>VI'q|wdn"bTi'Gp)Mz:./uR(vopUK`&LT^zi$!%h:[5n,9YjOhc^2U(LEzHY҂p[׼x xWv"GSr[otp25=b0+@dR/Z)31TeQܒ4, \qbq{YLg4ALw[1Tg^΅s`CVDݺ`LY ~UBjCu3hd-Lǯvø܁ae?b(\V,oo{Y .X21kBξEpCyPOBv ?%.y1G[_ GQ T/}Np.#SA %>$ϢӋlmD`ՀϜ'w>0 YZkernlab/data/promotergene.rda0000644000175100001440000000433514366221260016024 0ustar hornikusersBZh91AY&SYX<6H  @/݀a _D=UHԅPɦ@O*%ѐa 4FCA@4 h4Q iF40L3ICNku  eѤv$Ǐy 7,gnccc*qh l: b&Frj䲸XL: ]d}x N%8qAK霼Fc2r6fxJR 0ɀ5\՘euʮ0Aٚ1oT&K zCIX&Ȇ ,uMbҖf 2C")rTKrWj ,έŷ9P;y8f ozѦ,T+VXKU=d4qIC&@\e{h`,2&p9%q CXpD$Ypoof#4Bbq8,w͉=򹣥R)0d4g[A[&2*ڙl yo 9\{!(61h:ֳ)t-7n׌|g.3QpT q4yfp=+V%ΜbR pOFc+1sFf!1 )raJ]c] ue4E+U40F C9YYα8`ڷwbz4ҫN^GE*ӝg aXxPb'#FŨ=+f[!eKHӣZ ¦1VMeA8)ph*z0]%PɗXE$'* gQEbLf K8uGSՎTKA@V QWB6dl/5Ksi,4gFgXTbuF/F{dLeiGFz2qYNڨI\B>6 ^Ku*{1ӼzL˼{⸺>~v۽ > 9 y;c840>כP[r:q#M=eƆlBEGC3Ӑ22b$'065m3l6&&f һ4@b\"Zg99g8+MZn89>  %6O#13e3Ba緍t)TPõdկX#@K!1 NPϯM;xK:`q^b27n#Fy56hB'6At Js2JUWL"Ltk10\`ap *aI5see$鳩 <_3+Z\00WX|G :i(tѽq>c4c^,ӔjQ=x^f$` zM})lM1#0ݛQs+ `΀@WYg Ԧuʹ1:3G:rgSxB, ɭ;zGx9b@Ӷ~{mbn1#bDvTg# BzߞWV5yXQ `©iv߯愣<cwwRL{{Vֵk*3DTLLDXXDF,Y`PYaP~WFcu]뮺M4LΓ8⪪_7?,G@cUUjVW ^jUUZA1cZִ&3kZs6sW1189Us1˟&8"`"`"`& sࣻ#ǕGGG/'?[ deв'A)i Z7 $(я/FܸPأ.p #kernlab/data/spam.rda0000644000175100001440000032223514366221260014260 0ustar hornikusersBZh91AY&SY'H7<@8TJJT(uT(9E%A(:4(PP7 @;U  "TBBRӳ@<;|@0(U@]H8GL{7=wpC7|7ԥJ*R*TJ)%*: RJ@ J)z@ {`5((@%@D(@ TTB*%J$*"PI&$JTDQ"QII%BJ"!%IPUDUD)@DDTBD!H"%ITDJTQDP(TB(@@@TSv ќ `[2J(mZJ ^EV c/ꚇu+PiO0>u@pP'E峌?'`/*{3"@xTϖ|/R٬"c90X?>TN(OYCW rl:N 6K ܱg$ V:#FY8$lcΗ|`D}6P[nIJ{]pYdM';!dz8" VTŽ Х<Ų`-~/,taGZexHNW~ms}wDYv4g⮑~:/ӠdmKLh$4C܁Qu ;՚ܶ)c"xl5yt4TM@ϼ32H %y0PlޢsJ$!ϨޚUP/? f tz,Wpڣ̀F4-D f{L"Rz*]cQܡ뙹^`zE>HD㉕R1%v2$=JA+Оc ֆ*|3>@QCF_Oz?9!1=Uk>/Nb;7 pEGͥQm$De~y=Vϊ|pmVt`~jP%=Gеn A@ԌLEXa,B"O0 '‰)#`93&@XI-jB".x仸 3Koa(eEzEjsJ7"+ث !AvN![4mO8 #c`TdO(^!Nia Y4 I؅A]Z#ė<AX80v{Vp0o4 R$#ywPVHe-Z[:̃_[[uKqRDb Tp`QC-eCr'%hYhoB q'l1/Y\EvΚ|`{a&C X #e2MQ-oZ>1-ì r D_ D:%7gg -Z܊>409AOi Eܛfl @y{bn5]Ba{~x;ke8_$P=a@s,F+W  QǼCXyDpRu9RhfU)L?)—-5}i7dՖ(ޞC-J#UU;xb \6zu8EI)ϳ|&֍+ՔJ^>~oA,+z+"4"~3Ȼǡu~o>_NN9" [~uR|YdwI&q¤/)}s걔i#PXk =wOwt?#Yx$Ow.dw}鑌1흷z~(Jw]p4Tz-  uvʊZO_ =.!GeIwi XL@(?#Lv>C (R9G8ESy"*JjO@njtt-&!ߟ. vʑt lR&12ĬlD-TpZr{#[UyC MG u?aI<u`c$l" $pEߔ^R of!T[ʹL֣B?iyuǢz=0xS 84-p k3 2`7E`Q3 i^5W/$79U "!<ʽkM5*`c T8[3yvI!s+ZC8}B$/פ+]DِBڳ*pzNo3[/90Gxi ׁyZl‘--YnMG*HI۷ޝ "^WWyt~nY`"q2 kZH w- 1p0Î9'}'A3إ/& (vgj >{f`Wmc!D3dRX 尿Vx^RMrX(p, ~QlƚlycLWzg( 6 \bo?^2x>4`r@v*ENFlfbl+SqH564j~c Lx`h04t~MJbS5G*c農*}@XDɟ\\7XX\\B`aL;xb0 ĈB\ ڕ}X y |_8֕[.ϕ%UcbdC/yVbC24A,N_e?e r٧Lςd!Qhbsvצ&qS1!q9&!Xǥ-#2+|](B҉KGA]{ +Mv} 񄍛 !0'áJyد xV(]6K]q\s\McїH 5i*0zg UM֟6jPx\dTa()r9[ҧδ^ K3biݯ]wu٩O_l}}}ޞ҄j1 rnW=-IUVN61W0@T/|no// *P~3W~lXm֡28ie~BRjǠEU-^eOMUJxdLeIOc6.6F]i.|N4ͣC-eme %Ӻ ɺx{ֲqn_;i~EX^d TjZ&Q $,j%13  %) D$t(ums9za޻.ٷ]\@vőd a&`ty9VMoEC'?{`-JzS΀ Rsw΂SW`"kGne ^!ּ!fnEwD򍱷c|0_ rMx z{JQDFydZ;  *ğGquj'͢iшg;%.]Yp+̰IrBH+9)I&̉k>ϳ\50jM+)މg˖͝yA=nNބjr)Oqqd.ҙ:v6DTZ"jTLk,·*Bd_"2 ]v[>,$E:Q'@" f;d~kajkmXȯ\>[0erR>2o+.膜R."sb淹O _Iw^I\$%XMcG=tơg{O1irxsNn'ٖc.dϙyB{UGm=JXNH^c2ZРo-Ωve\t|VyF3ݎ1L0l.rb+1=gqUAљf*G$% Fwf'>c۰Ȓn,yƂ;OSw^̹{]խiKԿ{ލ{Cg3`%B'bpcQё2;X.lUyIbƮ=ZlL)IF"~[5Q~S\zŝ)a"J%RF$ݣG;iV2NVլMY4by]ήIG&t!Zr2e=+JIڥB EV +eH e&NDfnr2,;tlV ̉3zm3R0HECʴ:S{O\?< "l]674ξcQ 9ٙg(k:NY#V6FG,nU5Yv񭰫3!Ľb~Fh8SD6'Oz;Q>Cyj>gTN9F0϶`$ԧSF~o6-]+WKXJ%|- 3Xr1nsfcR1K3djLF vWkSYvW.dm67VZ4+[G.;f& do+W>,l`|=[&zcH9Xl:ݟJe* |숰QarkeJ&}mlؽh +YL[ӧ.֋&(Lvvq -7*$d{4$Uvh"vۓY{^t9v,WzaF4ZIMR1kF6 6anelg=cB3yw27"*V Bֵ\Ho8(!w hϫܘ3z]*V'Cmzu== j<};%^+;?:Ʊ#i*\z8dM,O$D &ɵG"YϰN&3JfN:yp_f)R34#Wѻ}la˹zvdm)cfAv .^uX^ eRjV]p;Pσ9iՑ/xot˝Fu֤ !HԔ9zLJD8:"w{[VROr^-]s&CjjV{,scgMfk1ay6c6N>F,)'FvŢgJ݌eKs+Uܫ`]>=T}W46hU}=~El5GW&nwIjݿ]t8U$k2vϴڧ7x6_znlm/WL6WPPf>8ڂ$?+tZbrAC| xz%Շk"OknuI)ҟ/ sbpbj%fd˹Zag:rSeۛg$c!͵[M.VYFFNCf3Un2ˋyS5|J#*;<%lbed+knV2Ǯb[̣W#rrވ;M3C34|y (H &>ץo~|H1 }$m>8G|$'cv_}?c7ğKvo~ꚸ?@'>ĊO7pI?fU;dϯG:u~LdjمVG``w~QZdۍOSY:).'V}$dn}_ܽi~S;K?Jur.ܽ짍?"khc{g'L_KR aN*6Rh1dEoXNl3Y.Fn-J=*=s292+G{busv 4 Wc&9 Ŋ}/ừYF7^'5vG ̩.|v=d~р Ω6WJFpʯ]o.ܬ<[YWȭ R??gÛ[6zv5Vf|EAzV[*;.Nib2Td6}³ l=!K,_ia/ >^~R`Sy6ʧeTAm&fhPR(IKxui벸OA;uˈ]{W^;{x qaеj%. [)v!wհwʽ8vy8=?]wbR^.ErB5v}vĊH7A9-T9Q*g _HPJ6lQk5hN Ts1qZ@;6X" /كeiJ@AfʸK3-;и{fd.Qq`@>Bz=UȾsL%'R~Riv. C @ltq/jݫh "Lx_ISţ5AXp㶸r>+{Нg&TRMę?"a$XhW9vNBw+j924sٝ7kM2ԐkzgP=%]/{`T坶^yXCeb{x5#tw<+R,UL!>ݽz}xJW[$d`]]:I,m/8AEQ e2(X8|ʷ6zšDiح>zULJ˙r0eE H^Y T8sʏCUʉdzkz|3-T4ӰNr[<ٱĎ=gv>ΜOY9Uiۮf +hXZJNKsR>ӈ"0p>ι@E !hb>J܄;Y҉ =>߿>t~;0!aiKmwYwF[" ՚PEBzoO.}Еd3CF9逍Cwn|ܫ'=|;P=675C:-\]Ս:hEh[A7Z[ M~1 M_ï-?'o-'4hAYŇ4d<9 hq{KuQ6Lʶ,. PF$4 <lZ8fC'GΗPoϨ$Hw`vHA6A#v|=1?5x˚ qFŤS6$1Pr\fH& PBSpx-KxקVDHۦݩ[V<\)2hpk/$OIY畳Pr4Xlve$0'R)b}5DBR̒0O - Ca-<ެ%ʋz=R4ѳR gdϑĬo^؈A.O!KWxBh O⫳ ξ)|s%tK7S2CQ {Ķ:BaaSկRù+ތb-ncp8R~)nΙ/yyC2q=-͸,EkmULJBю TTW)S0m ' m>buəǍaTriӊӂ|amxYRxiZߍlW&Jd1-qj4ݫXT1?wt>l}$8THwi~)?l[uG1$Aa$_sg'o׿Ä3F)fXs׊TѲ!_F֩2E+>p}cRzA,Q h+NOs n: b8 ܂M'!ǯ=/]$*՘npWV4JM]~#l6 :(ԅ:o{+Oۓe)Z p^D-POTL`ᡘA"I!D6aFbޅd1~R˒BT5]A"ەA"tBHu b!rcE ;c} T!ta2 j/7%ڮc 6emZFQ 1Y:8#diJg/&z,pZG8}Jpr74%!c~qlZ񐍑|ײs`6RKWV:( L*Xߓ"9hx~)OLH҂}T}SZ`E|Ly?Gӓo&<g ɲϟƸIJm\L5+#RKgw0P#B M%+@BJp8N8*o/8oMzt^Ob E^CsP6.rM,*V"'ז Tc7)[&_[)i!8o_5hhm԰s)x)tV [@ q2H;0Ӹ%gCz_RW֭WvCEҘ޵s}ZX%A<2kER#Ȋ҈YJ9A~ðMƑ@_o,NLW9*B_f-/-T^4m =@nnY+t'_>ycgH Ԇ '8RJNΊiR]u  'h i q`LjqvVgX:_YOy*AI-Iugah-ofhHڙliq8F /K~):c4bUbJrYj 6&|k^Fi.C  nѽ9VKK(gcOv?d^Y4( "@6=xwmkJNT_0ُŁ風=V%9?粸gl"3sEk:GwF{ uI$bj |HGч|`(-@KsXXDVlODX-ho+''ڕvyN8ӧɡOkΟGo!%=SodNzC]F*Ps{@=;4[MdpbS p dGCڵ :SҭIFm̩W.4T+FMB}`N|hssNϙ^W+( Fw2-=S嘓Z` ,e5@ Q'uΩrv*uq=,[)έh˻l vOa~ !MJ;}%lv233hևWSKLpIggUL@3ROMZ*u7ч{k^fv@ؕt:, a%]Z\*k`AIHGo})%Míl>.GKZ"'ً !<79Piђ J/cT.jpb=WG'i_bgJ$&tr$&%3I- 1PjtķޮvG޵ֳٗDgP|M̅fm] lڽB37qeĔqv: }[诓6/N!6RdbLLġa$2i9{}cJJl`$!k \tku,J$p&3v[;8xTQN)AĂĄO^jfUWC1ZL@%FV8ChIK&[Rb" B"W. rzK40їG*ΙX11MSJs *_%8(niƉY4$)&p-\>PJoo.5JUdF%kR8{ fAȣ.+]. o;ZM<,3#wZjQm*zy\sG^9?9[S$ϪLKvi)6Z XQ1e`ߙ5#kRV[@$4vUȠlw%Bg4U;irA YЂ-ȡ{JLe,3dRZ"f컲eSq~!ӖxP3'ҶiMaAF?50|Jmfm D*ɪK"z'6X-|nU`Tiyr-dZ$/5Mr*!R 3$oM!du0Q4`*g|5[2w$BlFO~}Z#T*]$ku+Ypd+6sQ(SkmEfO6mp@VR`/={IH]&D-> M6bdƄ֢{o@#5lj}O/~gtw'dXUHH#&݌&_+ҰR<(QV2?Y9? r|e!/RH! 0E:O}AW N4& YEdo!*znCQƧ.;ETˢ0Pf.;nr [`/os0T($-(p{NI ٲ-a$I)e/zOhqmjcYv nC@6+T 5P̫ʄX`&Z[Fi{q &Q!2f,iK .\R`nbAY7Em&8N&%7Qf.K is0  1Y q-ʍ O  pcy]]a͘NhLV>0['mĐ*r Ԕ;ʤ)(?S1 +{9zxږ>>R+tDckaWuNj"僀D]\H9}Wz :vVJ Pz|FR/{u*8?&6!g&Hz7Jmfn"-(цHEf.NH&uT;2N:wa@ h2Kʖ@9"Ci6bY3>Sp)/#i:ǃH)އU=iVX HVL#[U`{&VH%h9^s Bm;.rb!v 园euI>@MSx@w+Ƈ# m6OtoA'mFi9*R]Xb{Kifẍ́dmJ+ *{DQF!,61&dH+QhAЖrJjm7^eDi-( 0&"]w Vn~tHC1 <^m *7`SpX'L7=RP2Xbō /Hȯi$=ZIkkݣNpOn5ud$QKjZmIHJVjNTH+зaUE1nU\tl++-b&h" &Vb-(܀" `𒤋ZAy3gEUdg[&ETIѯpfUѲt.ii$6ިQ "B ƪUi/`UtcyřٚQ 0pbԌ3NV|毷4 oz_I iZӮNg7>*(r>I!DսpjĻ/i{9iP\ؿF_~Y&KYŐgȴa ܞ[6-.r<^獣ǧȭG!0vǵM T'wIT  lG9Pvl]NRx (fu KcE6T9cO!38ڐ]05t݆|{ٌ/-XMV::t&@a~wciʙ${[Z:1cƄEc'A&!NR('8] 4pyeGvCϘ yY,j[;5;憙<fߙ$dIqgd" MxiIЃ䆬'b+6Q֌ofW0d9%19sKreK0۲p$Y,՛qKmaֹ5q@c|4]vldگ)ӍyB{i؞Nu&v"Q«16 AW=Ob£ͪlg$$v'esv!vkjAI2]'KM/>CnTђ\S4pO.0eh)_khNM-T S8א٠D1isf%7(^-ehhu|>sG86X[8w4nqL(mGҴME]yԽe:Hs݈PРow>< He[_(Br7bͩ6\ -gk(0 sH:J= o|o6 ՙ|~W6]іkӿpūA0p)|ds-fuy%]bjЗ''eIkɻ |+Kr,<٫& FhhrMq󐘒?^8`㫠4rfQ㗭`NJqO:X@ @ADx:[`ȪI3@$*yv{ZJ A<ъ"sX7G>#InV2}Lն1 ,c0>~ۍW /XmR;7k)Yat"ENfNJPJ7:3'| P[8[Uİ#c{Я' ME5[WJ[}vsJabMɋ<Ɂ O+r2@b$dg :-I"#BKE dg(ѧH , QEiBYmtpHM{{T5lDy*zY8jZ< x@@=8D)[P~9^Ǧ֤N-&賙BN+p M-K:dUZи3Z2ZRMB,8=xY9Y6J$ 9J@B)CKXmkq`A#-/'~\jZYhTl4]Dlّˆr27>o M k8|] )%^WҴj^QZNj/]Q*17/{ l}k{ b˾0<ғ$"Kh NPຩ2lh7qFsB#O(³U0ˠvOT vuVZǓ%v pq@}XuQրc-ao_ P+Gs@^5 @(KhKBPīLEY ű6ywoMUlܿˬêmtDlz'G=BZ8$/VAH%G l1\Rέ|`ŗJy+F3w]s@: D_6b4wZ|aAvi|r7|.T3#EW]<{ƢùE;|JBD%gr|k×z6 cn22r1L9wqPw4*$eIG'*D4L;vBrNjH'!N~ib*,A]~x#>@|AjnˤU?mYJ P¡xLnf#wgJfVj:h:bѪ+7ykh;sBoEӽu;˵2a!0FBOR!Ձ~Ls1aK˖mzǩ bXtu$b>7 r 2y}<Ŗ}e32H-w6-l9c WJ:lHѸR'hMsB$2jOjBcÖ㉠/y*GU1]RQwm^Җ;Y-3+uO_8q(yǃش4Pإ~f/S%0٧?K[⑲ҕ=oֻ,^J*݆voYJ~]'`u_m Zq]5keYa;e]#Oה ^~[biSMmNeo°+` 7Ugv<IJ هYMTMUDUl vV2H}@E#,UNC3A= ~f#-]g+1P1³}F:r*ǽF=ڨwl\1 !T**[ jZA72{Jw\꼐֠# K0VC*t4b"Ĭ&swFu@ғW]uֿhIASQ7SBY\nP0 +}񾌲f4AA ؆Sj@Ydh97_+%%A09?:7KYmsrCTZh3dIa ML2'2z^ƈ2f2)f.1{ Jpm{)(mc+yK=8B2YZ%]X_yo1s>iIE›B61iėUW4 DEt-XS9^ۮYrd 4a>ONt_)m`*E//XX&UXAnS X*/7Fax@8xptXzaRuz(zR +$!i'2Ϗ_:͇{[r8L?soCåF3b]Li-&g4(9 /_yKbV;/Z;=:Y᱋jS$?S̠hrda7` xל]FYNyu;(XZCqn"SY)mpJܹP:ye5ypҍEf8U>xyNy2YQk&,ʓ-־x3[jMkR[6k1ZNgإ,V}k8Q?$,sMGzpuu0!ZlkOv]CQH 6Z |O4OvOWN1gTW^'¨{#ZCKCܾXsa)ɦsN@@:),7ЖPEzS+7g+i4Z>[C> =\p^{6O.咤klTaΨ 35FΜ=^u%vXLcZ#fkc$%Hq>U3BBu8 RɫO*P2^d.95 i( ɘrNV4 g&czg0+tOS`rMw- ;d܃y;_[[ڒK U$Z$~CgeJe1M݋Ub`ih2H/ͽ9߀TQ{sɫ[b1mRS$Ijx9bZSD^6:%1S,5IIrR 셜A Mq '\!e\fNe,07bk`)mL"BAD*C 7@$%A@I@7:"}ޱ{lN|mZ0UKQ 9i>iN~Z3H&nx(vrd)>L`37oIW_klbZE2bbW\\j㔸@2!|#L(0_ʵNW,Pl6E%>#LV-I1*^C.a4XаBI\D9Ң ȁ@ny7Lh¯ x$vZ{ uBMjR Qg0`?Eڛ鵫K&46o`h5aJS4PsoUVn8PJPNXpb8F#β$,^f, !N[M88icSlGCf\`( Q"W i\ %障jeYk  (spñ{C$~*@~ppnv،_L./@:I =-c'ߟ>9u<{Umh7Pi("s&{~ptZ߰)/bP.uwG.g1(мHnXuUiu:VUFbG0<.ARQԉɪ|)0Mh(D[:*ՋIL.녃fCIxM }_F|-i"RdWfh8?8^)Vlzc{.QqP$PٯKUjSD@A@~Tx"q VEO'ֹbډ,,+Qd_ 78#\ p^|r+]5`0׈gǡ}]{#jo3WGsm԰,mr6lOo՞2ҏC>l{,7~R1) ^:pb5sA ~okU=;Hakі{ B[JMTP B'!G!*͞DEU7ĵBZ㪙R<{ȳX7,EFEgbLhF ITiX/^ ʪVr>8K*Zwfg_1qjU;[N.R{&E<<ŀ?xqQ#c }Yu61oq Z5fB0b&f a(Wv/SbG͟qT\ dА  Y(ߝU4 ~!:nSgq%3U}Z @#4+^uuֿ)}|M.A,b؛ k{L2ex0,Bg.؜??PZk 3J.Qs7k'VUc| l2 B 4YhR@Ͻ|k`Vy5=ʢj3O]wU@D+02(?ieǝI-ke@r?O_W&`]80pZƸMq$)n ,9r5g<965)Z\.* qџOḺ:2SpйOi2S2 b%֕&$is-&Bea5iGJh  D䳨C}Xwl;tZMA堁N#6} qǏTp\WoO.>%R š`tAH wN(S Wnf @\1XשM'l~1qW:]Krs 2U ":y0O߻VEb`~81;* F;G 7xCpcg\ɕYȂߨl@@DxӁ0Z GF4p4p+Sm 8F[r9(/LLXfC>)u TLqA4֧$̅B;64t$m# 9 (4ё "_UHLhC+BM44iC:g4ftW.aXc\74΁]!tO@"j gDu͉P9= tdΉ|saPmZA3-B(/H\GtUg?"a` RybsR'P{oo}==@P|;|Ⱦ>k;ο7vg*OPT?W+71 (*AD"[FڪJmTVZ5mULRQƶcjզ|)$4!Vl13UZlVX5kZ5SVX6*mjRMm-lTmkElZe&Qd+:[imj56jXJ+3VʩWsh*$ڳ+56m4j5lX(@~m(H֙VI+ֽmXXUQjjkj(6DAQ  ʫo[^7nrƺHW7+6MTaE2̨M FF(M1$PI1bP#y]px滻" .@hDɥ $Ƣ[ &BHd2! !"θBlm&4GwP E !#(L( ywO˂eB& L,D F,&'u͢6& \bK1E!)(Mu Ld̈0]iIMepaBʼnsM2T%HW jʱS +:ctt!U-imM8ӵ*k%iiV/oˏF燊Avy..uyFw y2sk[2̻wW]:xəw9Ѹvxۉ9;vλ;盯&;뛳fi61rװ i ! "-ii6ʹUjjVkEmTjmTmU6b6j*mmQVVF--kTkm:sdYmZ56۶jYecm|jI%i_/_=[=.wt_Yj˷\?LZ_ ̾:g׵4Djkk *DFlEYET=wqv>np:9_TFA$ TB}zbvv&+E_vO=L;w;M<-֊$!J !($Uޢ*4DBB@G6Pml$DdTEP(#TTPdP{U?\n+ aheL&C Z*}~/ $3ϣt.Iv|u\#\mh8J; S7)z=yPز7}KGL;CYGa ΢>PwrOX zb -i?I( P1X|QY Q2EaAE[_å1&,LQgK!]>k~N5'qEYD\_k`5ߍk3t|*.zN3fPq>f҃KG*,Vy4 @x$u@?UoT8)_s\Y+S;߭4q%Q糰V;{ނn([@B'N .x߅|o]Ju??p_C߿M7}Oh鴵 4IB"d疘u|geyyũHL(!T!h𽟡?+L3oOg||PlDп\D%2)Ei5جi,m , cIU[__,Ws=_ U{Cf.FC_v/16K/t*DEpwַ= mYVŶklVƨ6jcAj-F+Ղ**4kZ6(F-A-Dl#T[bkhEmhb5EQFQZ(*6Lj ֊ŢTFhQؒcQ6hX M%lXQHm%lcZ)+cd%*5bTlQ6,d#T@0bFY,hXѠ5lbQDd&$T*6 eECѢl &6 $md@ՈDhAjbMQ4LS (KQ&5TU0&ƈBLтec5!h2Xdbii(Y h$ @&Rb31Zcl[]Uj̭6ZhlUQQj MEfA_{KX-GyUS\VB+c竈oGso6CD438oPrNVkO_*Z4/c({'~ul خ`DWF2Q ] WQ9*x򙒫$!ˋtcY!*(,Hl!FaZx$aedaQ@Jp D]R Xhߕ`L|U&LBΦ5d,Vvng?%igAȳQ8gPC;gV[X*3؄|xȺâ7DמѶzj'ח]A/bzL X$3fPU=\;{YY;TM*Uk%ZEEȂ\ngv#ޅQk'X߾{p;MWkc۟wcҼ%͚cůt#ƾny^C9(8`IYO܁+2ZGsk'7(I.G~EI6Z=@1a€6ACtWf HH2B#!  /_P|nbDTSOM#1U#0NEe99[Jab@~^Ώbjnm[?@T  r 2 IB ?g#ry^/]Lܮh"kQc@?_jZ֖"b RmE@Ϣ~V5j3sPMZd1II A]dy+{g}w&dXG%GLV>nSmgrnWnÌquy;x${  kP+*[jMM㍞ۤ' Q㧑 `"hE=yQRHWuۊ*u=̣ Je ?y4!9!X]lUeE+s[v}9mZʦ d$HHd"nﶹGr w}ls-SWv}V$Q W7JpHL:P@ bU, YKoͪ=R%yAFoEbTY.O[uwꜽ7]J"/{v\,DPRz_VuҾ1G 7mC)T֞cE:/͠:tV~i@4(f;;a6{ bVj̤ks4,!zut؀[aB $H)4X@FxGa [1‹~ 8_i1)}B-,æZ{kEb{χOIW{vnǼWOUP_V?[Ow)Q0[3g')W_;5uOB;2|G4d*[Zp\ v.on,kZ\N9;XEGG.+?*>5dX0n[M)$5M" ڇ97'su{δb]u¹Ba")φhJ!/=o~o>Z@|FӹӀXCX޲yo{W8;fh 'Z=d1:R'QBqr!\%|Ll+ϙ 5b G_+ ە} cgoAu}YPЉ]|Ez2&ۛW N`|?lSMoʚoaN? Nuh/":W/L"5ɛ+&) BzG{IT!`A1 oziO^ pX3k|\ % a.ͪG>]4C] $"*P@RDEGY* vQD7xHa M$!n3-ް[;߳,[J h!#"%p}眤طM_ Ơj sZx7<7FKMUQk]cg(/5F:嫬U1,!qV$cD+%X5j}ZY0 % ?y-fĵBKuCGKW dAAaP/)d|]vG@σ T*"!>μ{Qw,%^,窱!MY`?]u]u]u 0гo}o<3*f{7ffUUUUUUUUUU`feUUUUUUUUUVffUUUUUUUUUU`feUUUUUUUUUVffUUUUUUUUUU`feUUUUUUUUUVffUUUUUUUUU{33*32Ǐu|uOgOOG|7|7zzzzzzzzzw׎뮿~{>R32/-Vէ[/WwǗߣ@DB]uݻv۷nֵkZֵkZ>7wwu޼u׎wdI IH0$A$ ޼u7}I'C H0$A$ $`I$I$I IRIw^:IH0$A$ $`I$I$I IRI뮷{{޼u`I$I$I IH0&޼uﳾ}xwwwn޼uIH0$A$ $`I$xg}`I] $`I$I$I IH0$A$ $`u$뮺w^:0$A$ $`I$I$wwz^:];׎wwz^:$I$I IH0$Nu׎w}7{NH0$A$ $`I$I$I IH0:wIH0$A$ $`I$I$I Iwc OyOOOOOOOO@~|O믁7w~^:[xwwwI$I$I IH0$o^:ww}I:I IH0$A$ $`I$I$I }E&zw^:0AU$I$I I޼u׎wwz{{޼u׎I IH0$A$ $nw}} {$ $`I$I$I IH0$A$ ;0$A$ $`I$I$I IH0$AԒg{׎wwz $`I$I$I Iwwz^:ww}wwu׎wwzXI IH0$A$ &w}}7$ $`I$I$I IH0$A$ =u}w{[$I IH0$A$ $`I$I$;01^}@OOOOOOOO@yf~EH)B%UYCNS!?` F6, I&6m7i(SRIY'|ՎB-h9@"j,%"NIGwe-Sb_ݠ֓#^\&3Mض]Ć @zQ.o&Ń]Wufҹ=/3~Ww ~8M"B`p橂$t\4}{̮99kkF`t SNU*R.Ѻ*(j=dž k|זәӢ mOd:BX[ K]]%0 J H"vL`AIr؟NHD]un nIQ.Ԉ1Ijuf&vu]F" uh 5jcB"j\D;޶cڑ^4@$(-l4Ksn,b+'(ÓqD|Bָ-L·|ڲa/Dx[VgMY8Xh0]'*ϫ6i G}6>ю#~wA$ $`I$I7z^:ߚ/o{}I'CI$I IH0$A$ $`I$I$;`I$I$I IH0$A$ $`I$$]u[;n$`I$I$I IM[xwwwww^:[xww`IH0$A$ $`M];}I'CI$I IH0$A$ $`I$I$I&{׎I IH0$A$ $`I$I$I ?Z~Gc]z|o~$ $`I$I$I IH0$A$ $`u7w}ww$ $`I$I$I=v0. $`I$I$]v{w^:[$`I$I$I II׎wwz^:ww}wwu׎wwzIH0$A$ $`I׎w=}UqIH0$Ad H0$A$ $`u$w^:ݒA$ $`I$I$I IH0$AԒg{}nfffffffffffffffffffgy7ws3333333333333333333333ys<9!B!B˿]}]p:kPE $k3<7wr9nfffffffffffffffffffffg33<7ws3333333333333333333333ys<9fgsffffffffffffffffffffffs339p{9ϟ9g87ws3333333333333333333333ys]u]u]ufxlXP)hIbZ*ݝk.4Oa5gEvI4qъd?5`Y`ZAd$I6)valMi&L޳3*V9nUUUUUUUUW*89UUUUUUUU\ϞsUUUUUUUUr>ysUUUUUUUUUʼ9|>|UUUUUUUU]w}9UUUUUUUU\ϞsUUUUUUUUr>ysUUUUUUUUUʼ9nUUUUUUFfffgK9)p]u ,1)J5kZֵk@>4=@UUUUUUUUW*89UUUUUUUU\ϞsUUUUUUUUr>ysUUUUUUUUUʼ9nUUUUUUUUW*89UUUUUUUU\ϞsUUUUUUUUr>ysUUUUUUUUUʼ9nUUUUUUUUW*89UUUUUUUU_H$!!"[nTlBǥ*c]f LgRTp8!{"n^ݏwÞ __|g?Ā=@ڪU9pswjW<9ݪ^|7vysڪU9p|ÿ>|UUUUUUffeWu{=}nUUUUUUUUW*89UUUUUUUU\ϞsUUUUUUUUr>ysUUUMmmҏ_O>ǷH# #! HY}>SrunUUUUUUUUW*89UUUUUUUU\ϞsUUUUUUUUr>ysUUUUUUUUUʼ9nUUUUUUUUW*89UUUUUUUU\ϞsUUUUUUUUr>ysUUUUUUUUUʼ9nUUUUUUUUW{;{UUUUUUkZֵkZ4`"CxS\T/Hmm-"p}wSiy[_iQ \ )|M,e dBD$o*d؟:{osw?EGL#y2IT?b0}? Pe`U<# vw_oٷ!=HB|1e 4 6K ?e+)Z/HOw#o{MncѥbgGzGcT<(Y5NI_!IzMm "VANw:nl \^*D̐1BD q(՗;858nh#n509sH=fÊ]J>9[nN3m]X? A; dgFOƈ/}f\& I~"o27$ w:t5 ״AJִnK2gH]ЧQ5ӶA37Y`tŵw(GjN!^GTXqIx%>@7ǠPNwx$أmӵCr~qjtdq4 v#'Nb'[$>5\ &IP@GI3)rӭYXtȸ @6r6!WбW߸y|?|{XݟbQ|Jo,-.3V=QxzEdժ.%3lZguU@>cċj%.DG$މz]1G C=>܉ġ5I<+RA@s'؏˜q Đp?# 0tFۄ0-}MOZ%= bQ/FIJ24(!BwMQ8 #([Xz̛+.Н}צnwt9UfO"UgtdH4Y}CGx~]esV)tSb7N,BdI&26T%X&=9L*/H \މ>hV6ZRKj*w""n`$:t^N ً\mn$:7^cӵڃ_+1s}/i+dG:a{zO#?H۞5= |Y|ސtX{[Rw[t{w~Iqt+-=ls@ vԓ~mџמ$A˓"&]ce{W(գw{Bc p?_ E9"eJ+y\ϡ:sx ON I"x܍qi`y:*5~*j:KnOjf:d}0VoCP7~3#jYIE>A {vѐ<@]3+irB1!b͢ 4qs}(7)Ӊmh)}*ÖC8RW6B[})<auEG.bC>J,?lvJg~tn*3aoT1lܲEtZ㜄ƌ{VHzHFy X::YvTy)l|G ENjQr)'ba,H7-GbxzM/ߛ@?\hqJ~[Q b+ 39%\a5ӂ4|aEML]{G ?m/=Ҳ%y=E4tXhe^C3_ƝPح7͹zv~-؍:{|;f=Vg7ġ*%{e`2zXd̩z~=?Ac];4r-I/iPmuye2 ,O_b^ G7, N,PyqŽΎgh1>у_@ǣի` bẠvj=3)-)/' Y6oPqZ Z-~5Q"LjHE57J9ewmѮzyqW7b*I=*%ޚ7VlǰYhB;TwQ QI#j).~t0͉-WXjM7r[馞OvxhA*뮤aH&*>v#ܗ%hק?5Ӵq}p%4ѳ_!3ZYsh^9|ǥ!mA06i4^PFiIO7WNӗkz1 [ժѾt/!oh_B fV)Ã*#uᘛ,~clR")D=9R#!Pt nb23@3 a"ɏ m wNp(rT_8Dcuod"IBcv/~}j{s܍0L 3Cvlbf'fg>bڂzSpǮ"W|?f7j$P!c8/-wE\ }/e3G}?0ufHNscئ\#Is{O=dkZ2?7i~w綤*ֈbK' "5|tT ` E7_;# J9[5i dO&|b~ZIloxy4*+m@7I/Ii 3~)n .?^ *a2PdYK/'PpQqph)??`hqrm;'ȵjkQwnlI7ߵ]ZH^K\T ^_d$o@X=r} ڊOKJ&'X/DcF!(*R c@w6ꕮM'@|)/&źoh>Q, U(h":5< ptWgϕ_oK'sZ;#g*Ϸ2|o9\ ]);_nc&vAHpky7f!?iUUC[v{!'|vsϓ? 7)/#I(W'PS9]@]qK4!*tL'h3]wfqOllRAkJjvYEjv2s}3STG__W|,qքμ_A11n /@~!_FkҴD$E=G}fb[?5?IoiHom3{WI8ӟ)|ML= mG@~u hbOݷ !oUL5@q 5fKbKl@5P2!NqQ}'PB9~17tR$0&c$A7f:lj*2[0/߈R5Kd7w.I//s4?'BVO0Ѝ80#H@ ~4lQ&ֱFu!r$ ]6vylcoRtZ2с;dŠ[v /I4^X0|bbDnɖ() HWHO@zuתsz#;s:n1b2R=m#$p٨wsI[9<5 i˫yo {P j[ hvb&׍aT)@n7[eWO ?xeAg.^Dq]#/X4<Π5A:_?Ns@S/˚ƐvqeI>喜~khPӀDU9mL~:>hɂeH%9a'z{&NC:z.ҶIeGt)h3 lNF<W O gڱxuJ9ڙ*$" OZIw=QN&W@TDc幈F ?`ke: O4I^rQְR4N>6.2x  G~<4ft?)?ǼMpdH"5BV8],LE*^%s(;3/tdB2͎X+,eIN}IDcsΦYՖ=.Jum ߏk[q5(508 {}\nb0]3=ϗKim1i]k'ζHV~ꕙQCE??]ZTf-ja~Z|~k$1R ,RU6" $$sJ_K];O{ de=#hsXnq.b9ZIdU*x_=uk =3r@8bqJHD7yeTf%F91ECW\Q=G8%/|>ka& uѓ<8_P|+z[wS(9Nƴj׼S8KuF<`hq5~:=X܏7tN\g9;,uYB@x9G8f:fMyn@+/$ۄ+$4ۘrjYf}QɊ]cL\_9ZŸ ETPV?|^ɡ .f~G!Kx&8FwK^ }Xܯ XL ˍ3[J|UJ\hw…{t YIt{&8"n/c5Of>}_G&BMcow0GW3*4ηw/~nS0RX{#X0/¿oj{IL5{ .ccݑz7wb!y4;4GKY ,C{ Ȣs[=L÷费o/(V.xՁ oOrPR!sY&ys/]y~dv@NWHYJ LyyJ6IKdC43„ ;b@ãaK)ۮ`X;GGB5&S̰BB:7үLO;}Y}<,YXߩxe>v"^g_Р2>mW~zJs)e%8j j"Atޞ'emfGL8Z8UU 1B܏6{h5"cQxiv GKS 7>'Z7Y+R) ~hIH~~#G{ZE QwaJAٽMЧFir:\{3tā$|م:: HnСf:Rww!J `eZ9]й2O!f+cB]f z%+C`@6tF_ښu _;c&Ԡ<_4_gQ (^9G{~І <")=HHYF%)ao!Bp۵Sօb}= :z x3F* zG(.gWN{(*995p˞2 Ӟ'-;!=!8\KFw12JݤsǞQ@k̯yph#VVrF,olDnz-^"r.}h65FBt:IkE{t!0\Ps0~7ں>̾fڟtˁ؜ {\l,ksoq}`ꦼp$q%5eG΃TR.$KIxݟ!'WG3(%Y;εǏyu2  <:mAE{@wI8o2w>*oէ,ݓQ*xz8*Y5.'k=@"\[{8_봽%h0=ȾKOǙ"?Η\0/sKC`{(1HG]VpĠia:6 )A r&5c|̠s?qݚ* F!7ۼ/TnU  S!z55s=I"0>J`8)h{z>x (R:f7R#0Gl[/PgP\&7j*o*nDX 3񁃛K<8 1hVʲ箠U>Ҋd+`Jh0s 6jS@bRHd0> ]T#]'rTQ p; zicp"kw7d#g1r3D=zFmS?< e߸ߒ C2bGb9b@3''C»h |{UQw|dSAӌ[Ar݄%^cП\z .i}e4|Z CS-hEuH֝+S-k>A3^ՕjgF"k t_O~%>d?┧PGUS}xq}i'41ՏӾ@56Jl?,w=Z]xnKa|I=/?){Puݧ{u/>Y;^".,Yzn7?{o`߈hbU-}?-G٭Wm;@L@DxG3|w&!6>_7&FS/qzO萼_K8^H7; wIfAv ^+bܓ=;?ޣ.BOAY:750o`*]6̢٘V8\'٤誇K\qwʝz_47eM<$HޙkTjMSi'"9a b}׋ޮM9.PʑOk+:u89+׼{ՙRו->dlgm5͸u{)^Yْu px̞SJ:ES|qW:oGn4iM{7G+9v _E"ot vՆ5~LoJg9ڐGyz/o<$a d 1"_;[mt"ϰtWwi!H*m/yG®9AJZ%a\g룶k)v#6?+DU'K|sLHe $ qgj-\kN< rVV ~vLWm3ND g5GۑI!gc)ܴ^(2lcdք̌i>j5{o"{n_NGԟm/.^(e["XqZ|N_x|F2x9aEs gܞ$0sDB~<4Db,IhPFJci;U!4=utؑ|JbkjOO(~ĨXWѫ> $8 /Rt-^*aFDaǵqO?Xz@EE3&@=GeO%z׆6~ꥍtoyc-1\wOO_8_ȑM5^"Vٓ} RMSbg#I(|ӰVWLgL6vjN)qFi#ca@Q\'ED2s3 7Jf2Gaofj|VLBlTp^NO[ZA/=>^=-*' 7#J22FQ^kfb$!80hygWa$*_|5˄amp^d֖VUb6K&uvE&xKFeo_|c{|SrWԎ=ӸhNp`ɼ="c9B%Y<$2xʌ:R8'V yD ƊQzP3ߛ\H7D7ԅٟwإJ'x oOI1qTh»d9;a- n`Q=puO$BO/r6-L{yn3:4_Ny:6]@d 6~o'6U\gtJm_@wKmPb{,Qd7̤N+HE'.)AM֥WZH {2lK>^'.I(+Ĩ}+ cUƇU?~[<-_/>sW Pp*woCDU!*0atWeUNq]ٓ-?(C@760[50ujU'1gŹ3X vk bwg7C]/SuvWQDV7>^7"~Yx<8BKz|^f]}6Z#g%l* K sp {-9\B/KT⏀e{7=~X8<[,"Q"`;I|& NbՊ!-Gv3'ZŝI˳$:}T/T}#$Tsso 76ߚөN| Nq_:<&aqb[_PKYgthIyR(;#y,}ţѤ acgUBxF Q, pS+Zdj+VN##7?p[ W RQ4\{$ݰ;3٣ pٟ 1ż= @] Y"l($r3o[62z #qo)}l<rЌ@q"Ww2 CV.ż @$(߮=^=}F}ꖚIZf[PHTVo_M3'pںOEKo»%S8W;0^ꊚ[l ͗uٝi w"ҠF,. ڗ˝^_%R`y8,k.5(.j3W>;!'nÖU|[u %[zK:{g{'^7e@I^ j/xsdA+X]|3;ޝ̫6APtϩ@h[kB_ֿ;.103b3R&uu( ߯wj  w&!,:)9]pxN;Q @Qܑ@+D;7%{9&+fޠ˩\6`HUEwb>njM_Xd hmAZy #k+"uuLof!8XcՍPF@ƆIFԩ; S/ Y4?`κX4 fĸegVۏ|[lQOa0[by.G#v{pV Cb8FK<Ɇ.HsdMzh694gg,hW01[%9Y])(c[O麗xGCG0L7lߟcӌ(N[d8ugFm"ϖpfh>^+9f2H6 :;x[prg ,>wSP!doJ?Z^/Ba @{}Wc/_´9{r BWE VdŇj8i4sqe~d +-H5&|&1D#dD~m>ƿo_+u*$fe5rQTvpF@"|(c\$ A#42| cCZE  =n=[v/ -SlЪ/4!|(s&h~IHʆr-Дy]0TrW(olJX@4/ZZ 9EDwQ]u^oe <%lEC.ų<1hzuz3<#ڊ(z=&>XP(/~, wIШ1AKN?Dem-$T r&`|Þ܆Տbh;@rt7P=.,'όWyӋ_E,rW߉>{ v2|K'g+iq[8#& &7THyG XX& 7O_N$ך>!.V17Vj/LbCX ñD f7 %C5y/qE6]w-mr|-HN k~/oaYf`:G'މ+ vµ $ 猄=@i"+S+`IX3~9u,FD<^۴/1J ~۔K~3N%ġ2(  eez5nEa_;/( pz~@ x+ʧ&h~c.[L@(Z %T-G)mܑ냴\e7##1=Asw"tiص>UXj< :<ԘͥbgG;* &xEˠZ[ɍOؖr/Of9yV$ 9͆3 'zn^'uppOBHh27QuUG͎,p#9x#N;!DgJ1 Ki2#]& Br]&vYpkBj _ ֱtF:)%G! rY&qbg=Ghc䐆vB)^wl\2, W.`<,#L=~!0![:S)5ic.dg_dmo-x!"NeWS4SB7$z*u>29s3_?# HoN? G BTHg$ؓ^!C"+>`I&$޹{>TAjD+.挆bXb(s9z0Ytx/75P gn-:?[J'鲙?4 ݐ$:֥"j05 :q*I 7&7KB0$Ke!8$ @x]& +B<-8QPqUv;9  ߱ȇ?_J]ύr_ԛX3޶bBAlDC .m,4b0,_ >^kHl6=FS"&D$Ȓ*:EB6Qd2m7ۏ?G4vZ46]8UP?{[1~=>UFQڒ5H@W)B?ӻt^.%)H *@)Nځ]CH{IN~rhG'4i:$i3K0dP-x_g*25ƎH@Qr) ڧp0ǣk=!IK^f\t~qK8IPf׳|_'hW-9Pq߸ޫI7@|ZT"z lx|(T3@U)>$LVɐRVlI ޑ_8?J? 䬹Lnڿ-9kv?c0x=8С<$ (@Lh'EZŴ s6sH fEz^0dDdiDZd0IRaS)/#&%*rOY4~(/Lv? e mr![jd y% 2i3@hJ4/ҧbYEytS~eޕp&@zcduG+^?06XA 9)Txbۄn IHF{WBk2\TdVT%䢿ъw>Ѯ};aH\bq9:T]|q|[qObn,7V6( L /iRCZf)O B[%I5uiO%ʗq1E T&SoӚth~7&uROټa"Ѭ25z|ʅ`4;-P&OUE5KKQ& zIޯ]ɜ$t-2DaTyNYTtqe'5pO}C.ɲo/FZb=`Vm| st3zmoMT=L] +1g:$/>Sl -l cd7᷾;韀OHbI#qQ4M9'1C=%bg1$慫dZ;yϚǐ5RJ4UA+tXB3 l#!-k2dqyY~322|0N>v#KI|lwlyC#'hԨ <_A|L,=6/"7|U0vd0a"/?a#Ξ//>H^n+jQ 9gI.5v{:F|hgg[V zoʝ{}9/H Is5yX%3suzBc}]izJz#{u^%]azT0ո0OaJTnnmbͷ+ $W>J{ 3bu()zu@Eqomq̾"Jg.Zg`i&~95;Ֆq3Z'! HjRKeuĉDd:"``r`UWGW'9U}-h(oJׄI~iY~FOeOnh11wv[Dʿ`=i֢ضԋ(bFZ$\\$ohxEüĚ##n0Zޭ2Y[" 0ڕmzZ@d#gE{#]ge Z]w.{/1ƜElX2@NFPF]L=W0YI0ʁ&deb9 Һ: f]v7Lid!oNwOzKN@̘%rV]5*XXE"g"S_v#-g ($ ("htɛwGޑm1b`H@ \~z8= kjc6FyR[^G!DVa)r&x>T'`"`(^ؚd Tdީd&"Zi7GUPWy~P7JᇿL$dH5?A:z]r`ۚ9Њ9)ȪP' ỬBjKWd)& i7KA59y` Ep,D7c1f23o FB0<JynD2X w\8'" %ǂm^ S0X38󹪶5#*O(/Vhw0K-H5*Ie)*̤TS X.Ŏ^\k/*/`Cy_'\D/;}[uvŷHwxпA"j i$/GIG7NS'}hqC_E+m# C,>+ b4!|3ywf؋]!w},D;Ǝd1:]+#|⹀0 恮Wi :f|d֧dcdBz )!9ST[# @8xJWRso5]gGqXܡZAy0]#)Z NզUSVg\'՗R0ٜ_5x&g/Pק?l. } Sg?HH=H\@?RNHDB" RaQV%R7 ?;|ځx:3mIvaFfjv w'O}.o)n^Gc 얅H:œ)/هbKDgB'@PFɔ y6rv[-??izxBea@Bpݮ0+a9:PdGaVGyyG&Ǜaĸ'/{Lu|$vQa>nw47qpPeE g| @AP4LlJ~z!urD#j3!@0JmJ>){H"|ѣ{u hf<L} rSqܚ k2K}GBZ @_WaRϴ)~!I%C4˻O9[#3j.ῇ*.F8K UfX}h32,m #D\(Y& m)4!d4#Ee2>@0#܅5: q^ JAi|'_]9@,K?מ^-<PdcT06,q`r-Vp$B]))Z:mj;Eb竊`xkK27jV j#_Я CtNI@eOJQD b3w1p%:0Z? 4r.{O,W3"+#j1]H>BRXsc6ʬhbv_9o ?LszkoüctP*Oj?Q)]գX?ЋC"c U~y#8bRvΨ3wyӯ)WKxo̭3wBs'/6";h:~G~/nݕ2sxFdD& ±0y׮y˚uBTjH8·~iXgD0UB mpa`%9/veV8lx4!Jܤ`GSySs*.|}k˝Kgc7L2 wp/g).Px!?v >=Hisy=Lf5r؀T4..0¬k k CC)sαRU֟U=UT<^ "ƊYOUU@CL&tlz=-,;S1ەV2DC(jZeEe$R? L)H@#c [q 0WF,}ԧ51Rl=k9-))zr"pKOJ hF=pxnRY6$ca2S^󮫊<W'CєH2() J\NHThrf/|~`fwli(.ouXƁ!zi_͗2֔ zlx%%8~We݇}O:ϴ`+^t{DI«ep/yn> 1>Ղ7E\-4c*.sN ~ o_#ڝ?JV"sG7~&Zhb9,E9O~.T #+244 +hȩ k2rPOQ_< Ftb"6<gXނPpB`Q~:=fzEn&3ruz{X#Ys^U rɷnp 1܎~H9B|$T ]ZԗMAhC#р~KK !M9M!>^QRtghhX46_Ti &IOh}4vZy(S1K>q`qzSP4Xvu߫P'y??ԡ|#&棙=J]>a b>wg&vEA+T (62Y<2 mVūׄ| |4G$ӎ3/J;owK~XHŒ^)0\ه1+QlbLߍC{np\#  yo\[ߏtdD( lZMTK(_M^C?(voBk?@k];!9tD&!Dp57SM,@ * MEa׏aK"!$mrz { ~_aVęYIj]En&  \w#jቴLbUo|hFTk~H!r#~~ʬȴZ@5s!H JHY($RJTJ SGC^PtcZ͊A,.oj@e-I9~OCa]h4Yx%)GXS`5 /jϿJʔɏ@0sL2tB T1kV. *ANgr'cIQqͥBɆ"%@W9fWuݔS\:$j!/G#q ΊQ=F mڌ$ɨ4_/GH5˪(rK-ZA0lz녬3=zdz`DSۍ*?J2{|y˩Nbݻ-mr D (w:<~#eG^-H!`Ae DO""p L(%?k5^'G~nA!b!g@ÈebnuZw|(#>:%XDSӾ`*.q\T$Q er0AԳ!,GA'!iQ/CKq;]|Wk::'BSMa֋3쐈*c PtUq౫T6 ŌO:ڗ(mpj32) -}P G1쯠`!sA] bN"rlMmڏW(nݑd=ݱ +" m@JԂ6x3@b3[!E {N|jLv2sq֭-N/fu2'@0$R}v5+4SjcY㠁 J8!uگz3ʺB:Rb YIu^BP{hqzҳa{!IƂzN!~>A{;ѹ0 lʃ_k\710RX|rvvdfvоъоHY:'Ʉsv=թ6ns cRBDj.nwsA|}qPn1ڮn(R= !4*U~ɒYS"Q>C!) 7jh(".&Ewܬ1;2k-ZvMbYeιR2 ThE3RjQ8d ~ht!l!CR[32Bhk yG q8jU\Oh,s3-{LRDk@ -ĂMv{W>"v6oק2֙FHXL@ML׮j;GT'F!& Z9H %BR1eZ< =s/ۢTӐ3 a"GĉRMWh]BWFf|>!}8jEKn݇2.q-ɥ(QTi|=NpQ!_ɨ\8$nRmgũ qQ+C?-g  + 3h ƈAҾyQƊQNr&V`m] #DOS0Ɲ`㵕qv.aJe9=0r\4Ďp1g XLuaQ@d Ih- g)V0$C7fk!:Cod@vCs{ H-#em@W Me&YK=0.@d' v/J 1ltƎAf=t=n-eWzXtXYا@81)S /&S 0Sш"w1X`}yLyBS}y&oYfsIi0oJ gE%"J.#]PG,4g̍M 8e. 'ΟQ3Op"'H5묘|Q]DX> % TZwY̛z\[B@yFpQR&)#i "JaT2H/g/n]f3Tc/SfqZK{*pǡ?GBӼ$8+@=Օd ޭ+d/]ƣԿOﵒR&UL>n:#?i [:>>n./:N=[۫Gņ=He ,ͶuV8JMLv-6-`чL2914k@P݈GOM0h>ޭ2.ĭ'E}gfxq?ǡGh5׾6rVV4o/K1dsԺ+1>L յ.n30 Q\ζTm^&:#KBzA!+*.G@֟P˝}VV0[S5сjlbr(%UD3mhOzJ~n*A0W#Zd b>}G0 Sg2(fbMO/(:O>8 Jl(Hn {b ,ܑNj@6ڪ~6y^A:RdV>38 -OF9&> 7 rG?352E)DCmYۉ$Ҝ$O8(J  #~Jch}^BߪE~:΁pPtd.u/GWvjKJŠL|ƌ)QE 竕z"h t3i t j@ुThj;XkP!;ҨA8EOm'ӱk>Mg OQ$wd%ʷ-+ Pz8@A) Hוgz#)i,j=tynƒ'8K C~t2Q1|W'BsF 0ie(D"{ pNMs ݮtNSԒGe>Wr͇&ϓ @ &5<9iJKO*Gin"HI8DdW"~']჌Ss  lRȒ~$8 i Jpl8&C$#I{ܛ 7= uUØKaBJӈRpˠD+9fǐ-X!HWFRDJּQqVR%bhKehNnWyHO:99~0{n̐1kqa+3dA2WsF NW KzōbmK48b+ ɂcc[L^x[Jm|~Z !Y XUŒ$zisWr'L\KnM%3.R;-!nM7c{T?5?ΡŶ+K|,I+Hۃ.s"h|k]^ZngW I3=E8Ҥ Vb9 %R639b;sFOC hE)P삘\#[kV\WYF<)/./ɧs]!zҍóCy2m >S,[sTMcGEϑjlJqHUs:T[3u<;p{Z%(e~!GƷ;RɋcSjGy)]0K .dGYtHCy A 4hAѠ3y[R| hs74΃x((=9f;cw1;wL)]^u}Uw:zW,q[D%r$)&dϴXόTԕKf'd]亝+) C'|E#@Ƞ6H<7kR*g(^l0RJ8Bf-~#ܿ̋:X4 {Qg[>="{T]a̝d3|0c' jiQTd?USh$`0]Oke56ޫOEN 0p58g9R"' sIQ(=Tg)fiN2 2ڼ®L6)ǧ#Rǀƨc69yti(M-'Ij)s^t 1Q,̶2y]OVL?/C" ʺ׷YVe/\VKB%NnHJA3Q]㙪i2hਖ਼^휧C)D;aD;yd5FREXiA啩7OӍnz<&~P\1ף2(t%'1Bl r)Dƌ¯A%PER=Qs|#5x<G #,%jƧLu#\@\h ㅎ JO ^rD&K$HvV@Ȁ'+$ @'-猲2.cBEa֮ͪkH|?={ސON9c_Cn^^` cH!KlyePXƒtHznF?+06.n"HO51zSM^Àue+2]BDTAԄ}pv*ݯb2~a9~v4cݘ'iX@u΢f^њF품@YIP\92_`3{YkFP:t|u٣z5Dkr-βN &<ԸBEχD=?[4L:+CW;0reXvH>T$4Pk_B,VHd@bwd$TW7i W  I[BƓd*GwKn?m$%!%6Y8YifF3 \H<b)y?-;Éi00y} 6QhJM{F^"Flל2>_F}g|.M8 $hhȦإ8|ʓM1n ݗ@$!]QذaT>\I֧Q<^_y~r, $A8Xq I'( Y(WܠR*}'8L2,5O"܉,[+/*!]>|Jʕ1@$ô;$Ӛ&ylH4bNM9oSPޝ!T^WªfLAjzJ9GMNJl$ȃ xYw sezeyM*Vv۹Ty~+T:LkG>@_1fFϿ}[] ZE^I/.`(eTT8`ey &N:UÃ"8Zӌ j`r%Rk2grMhH8ŸTϞF=1VyA6ϝrYꟌny6a]Uu@\n1ݟeI']$c{Wm 2^tJB^<~g>M)41>[}4Ujj.mXsQn{]eGhĆ$hG'kHX"1AJ dy7) 1%huO&?eBHFZK0ȌBq寚2¬"C&p}{ frf$O+q*\PC%QMXs., z)Na#QupxLSH;slsKx4;: 1 {qxk؎+D&`pEƲУ46lZQҧeArh1nΤ[K6.ND/q>,P-̂rZVJѰs(_4f&d ɐjs~3Pv&ߋFZHSh[!-n2S@yuv=&Ѻ:iWŽ0޼͓b=:$F& ϧ1+۹-TgK?Lz;P!wp^u}_pUI|뢄iªdon0KTr{y~i=];Tx#!ȻB'qBmTyAo %Ch0"ů%ŀFݬ& eBݲL9H!cu!ž򣔱5X Z#2y٥TЍ$(- 7)s;JYe'M\*mw7)s"jN,_ߕtϘ1X 0,Ѕ̚#kZ̥';|j,ZJ1r \|"լąs+fu =DuWq!-}eo`j"o)"P 7ETu?+ ݓ< ,K/W3r'{H+_(V)חL%i̪uzEvw([,rwc[b\A Q4k;*0/̸ݝ۝Ty㟣1MG^Rٸc:ķ /uAmU-d)go\  Cgha )6 t #L#'C3=G =j>OsM -;1s1G_'5A} ^<oͻPYFlcĭf踌`\1%\lWGyrD=s"霟:si #TXnUXL!Qĥ!*TH$ql@\{|^ށM>{%yF%_[plimqGOoIGcjCEeJ4"#^+m-or%$ PvK2>rw}͇79۰8qoB!(Жf^N/y"4h涂E_JJfE3:q88:-b'3YH1rs3ץ>QC 1K˂7f1ߜ%]!yi@?Ul֊ Drj9ʲCc/\j-DCH͆۽1vvOFaU,ޢS'%>?.NĖ^th^9}݈i WD~Q3=Z9[ b?!nI~R$aEū!1cd wifJfj7)SV3bzFc].|/`%{,,ʸniюiEw׷\RYش7sj'4c-)B/F+l4:S:+loIL]'>햌% "da(W!ɆaeFK{P8sIϕHa{φ@x ThAom~I\w۶A^gS y{>߀/ڛTRnvwOi4g_n·l,pj'aRIMyERCvMNoiXkͰ2\N:)?oQۉNa]|.skF6hO@ϾhrŘ&L#x1x8^ 8nBp9?kmgfgud\yb{σ"&]'4D/</4ٺ~Йz,Jr[n?e~W!Y - aV$kcV$#I0FI^b'[֍\J?ϛ,eMʿBg虷C30o%2ᲅJ!tL^{Vt"D ٜ,gLqId]DM)\nn ͔kg+H`.{B }7εӚe_7{sG`5ڗZm p,F !oC}mm /W%̛ ,]v=L䚀er:I&26E4!}Cp:*Vį1U羺v6<!IEv5^Y* ȶҢJTհL0ńK6cK{ȚPe Ҍu/gu@KYKn {C{oz-щ6{2PCϐ7M;y Na[Ԕ%~ #rmoxoV7 8y)\g]*O{O gDH\LH2G8~ կH0Kvr_qwu;u !/5V2U%h7XF^)UFUXYR?425JiHrYoj5kqLF;\uNfRN JQ,DA,G9z"'F$όqb'b^OGnJ 7G~\ ¹M$[hflV~T|HODiv;||} 0הAmI%⌤9 zRstwnZYj.XZcKbi=VrT|PP kfNvSyf5WHZ^.-uJ- `[>+Dr\8;̎+XMN6]V03ZƽD* 7L1Qby -݆l(% 'G#w0hۥ®)-o*R<%0zyZ m7=Gio^~Ѷ/ڱ8a 9RJ R"jVR_׵th|;}R4f?jTo<t2 U;] ǯGG-,6$_{&_m5 ԛjl&ɞz25Q{h/q. #ÿP+('`TE/anQ = l1]Se v ,6]:,\0$!Rm$}?rK*v>&< tTqU: )fc{blhq5=@&& gg;%>J+OI\b ~W=[3̬їcfD"՚“!ۍMa{X.̉xEC".;+hW)޻[ $8lJ@襚fڂT>7ʜ ڻqoyuvkd2]ZS4vޗ=2'C+~jf;hlW̙1S&Vω|)Rg uHhy;evREwpZ+7hCDqۥA`?J_UOYQ(Vz%j8m{ҙ–Mp+݌ 4q3͢l힖S:=}F~:=dpŜ4 d p+MH1E#h׵Ϳ;&tm-E]yqobgaM]H#m9u,xLauܽCO#vLNjW.|" 1 `SLR2g$tdye$aغ;,g|eV.Ko{c5ݺP; rNԪ.%I#{JJh$҈oM/iFɏ%N]Z}^qΚ[޴*cn[=^zJcxf3'q]n-\(s${\G7/#P['&ۑe1ɀӘq$7[\dJo2ቝG":I.2MԂs^,ruG[z#4gSww ΆZ쟷k2 w~ )l-5ujm6)P\":9|F/Gԍ_%]e@+Xdv=uXܙ17gH-WndҲb!J Kh+0&`AheF)#Ȟ⮽?v"ju$-#H=:*fKwk^s;PDIP%5%yјёj5ywN]l  jT7T&o:7 a&;&>TSe nG{ٙ'DJBa}K(Wռl{tfL`D8E Rb.DΚ[P7~!5פmcI'k>'1M_ gHq&'_t&g[l?$dR}y2^V3R%]vIaO'pyTnxoLK3(S㿤?MfRfVKs2`3Ryfn3Jc`&Q'U微^uGft6ơY;YmX6ybn<ɧ}Q+MZljpܞe&Z4;Ǭ,#f̅i:Oq4 oA=A`5i9|Y*`Nd_YL`m[G/^;|ӕW &hq ,1ӵDP@ӃYEӈP`|$jbu̻`8N)|| e* x_OT91zfſoʚq80! _}:txAs3snfm}0Lnqw(c7 *M&Oy~F_TtA0c>,oi{" Fnw91y/b?#ⵟkt\o#fr3U}dI -* T=$Re{;8CVl;Cf|,n vVGsQsf˼ѡ\:E:زvmXTӜ`]|:})ӴFO!WfLMMLYyOLVmI%%'3T: `s <%-8GS>ϥ <77 Vsd7aM!-GMʨek%\ .c 3;ܝ2ʪizM9ӵ7}-fZQ8^{džSG"4'u暉tfPcL5hiԄ5KYT=t7^l{ƞw"$[[ة)(CbJ 2ܚR$4l-|Kz_ȒfߣZgZZl{VQ:h P/#ۏ#ĕzjE1瑡\m}k(HliЉU]9jN9=Ʉ*n&OьYL[j$K. Jh6d]5>a\[C/Z1(>%H53LL*x$ە&pJUo{(hݫ0 `;3Ŧx"f]_}t9Ak ͨ)3[any\{xLc/2TV. _q&fe$GKQ/A*3Msw= M쿂rsbw\!BK6TGwQ&b,#B `S:}&; Ok\N2ݍV46AyZE3?ryCsaUGbNPagfϝ##:5;uQF̕k,Kv9vi|b0b,sc9WmZ|0*'SZ[Ye XᵗwYх@A$ "3EzI}h*0jD (עM1;|suC9fyfǺi)ȝ'J|=w;2R./5zJFX+xus07һϥ2'$@vS;G0a3HaL'IDdPyQ`(ii!kޞz&@Jw R1`Z1X޼SϝVv< Id]UFb+'BnPյ%N?6g~(B7PF1fn 6BQ*xEbnzVcxxjcAr,>͂DZ1 \]dXC ܛAR@Tc'E̐xʩkR1^ P%euvUZpϊ3GH] ,:2O32{Wڅo7%AYBfED \*zE,~E`ҡ{!=i`pdO%YOFY=n&p;̿qj)&|y" ՝ &an=PI٤M5yT7pukNJ6\43xsj';$+RQ[,JSl]h]粳2⤴Zβ]@!ZH ?sr2% "rJ4DIRQcE0 {ktu}Pl0LW}1q#~Z[+C/VxUP YĉežU{ǭms VG_l <i0Y6Gx36>}IP'L)[;5wс;j_O*T3ɺRN$/# 9mS< chVWVG}E\GF/Ddqb*p3P9{6Kyt=r^@mz|m2VfuIZxl4:j]]XU3@AcM@&l9%`5"-z6.k\#Y6! SP$BK9n?1]gI:=yhoF}ԄLC&P>[,HoJvigs5h '=gQ\€;kl ИvM;Ǥ8@MvMpumoaRԮF_0 q2#L*Afgg#b} x'k8kxfbڡ DZ'X;ۑY`1@EH},mLQ )L/ѽiYviSb޾n>8T}\bA5nSLޯeyLE5T^=J|UZڕ8""4iztsc]ljx1<9-_[}H,^υ|"U`1_*2ŀV q a\ ,k\@j܂FҙR$$$2FnY]+{~PQ~Է"{~]=dp,3oTcO/km.3n=${m\H>)N "t}o>~m{Y"AlcTLྉ7Ғci")vdftsL*9*n=^b[[c+ѹ3dS`?bn3j=|$Zv!I`4Y-A"#5tAMKek3͋|Tho0 omۑXZ(dэt(j4+ +ٛ B {%j.-_sֶ2PF-)ۊ+EZLf,F*H0oCM.kgZԶߒenl@ ѩh$REAF %BȜ=?݆vyNihwK*zon&F`YE@#ee/X RtOI2Nuk߳{#2H$H\%v$ח=][Ovʸk6RDI$DhA44#s_[Tщ}5 B8ԶDe!0iEI;k/gdy $B LhHe%=^!WkڹшQCH0H&VnwMʆ_)p7n UH3c7|6TP8CoYz]]($K}sŻ ̆HI! E(BϪ|m6#bd&K<0FsITQArou8IriP#$@h4y:]A "e%'n˷b(vxܷwmV D,i,%S5>)Hb)(Ƞl%2}o/z\j2#7"$_^ f! )6 ^{OMB F% ̔f"2IhQBiA1&KD׫K3`,Aid i>䖌(0I$F#0b0I$f2A3 Ȓ~)1 ݸ*AI H0LaDQFUvIE1(0!o|wӹRM{O[ֆz;  溡mx@K=yLH( oǞ M(A& 3DdS$` w61Ja J^2ܩ S Q1 DbLY%/R !)#rl2'l!L w0L}\̒bLO_PQLXd(DE@(Қ D|SǮɦHM4XڒJ2,0"42`ȍ Ɉ +J!f(#HDb! &&s"6ba "fS#CK#AK%)#\0̙""Hsa&FiлQ*,2AQ$ʌۦъ ؂FR,&(i=uƍnwsc$T$eF2 \Lh㖮 e+wPֺ(Le#4dDFrTM"FȧtC"lfԮ1)Q!I"3#E&FD0"k|{2h(@l;34& ,cPZfILzƢKJQ &hF*PP(&Xc2Ѭ*!C໰cQM](H&2DFI `0lQ` h$DQLcWqxdQRZ`4A=5$\wX-vN#$AtF1CU>CvsE}t]PGh0cGpb(=,&(NPR< R*tF `1S.[nz^,+RRIXM (޳)z@׌&.xO/q={8\g{_稥;KGQNb=1`)QD\p]p>us[\+3QM4Q5@,k5d8@N5q_u9R]:: H  $## H2 H*1<ܔP>?9Xp./O\;6͹eN#{GQ? aPȄ"EZ bh6-2ԖF,mbKX53hԔ&1F4HԔikʳU?_1.yn21H u&/ZSg{Y`AZ5J*HȣHIO(>*NUUO9.G})t$FZ?ƾ?/>56X#4i^B{DV =Kkݨ +{H  6ն+kb[mUQkmsZ-ʮbEI)'nD-UKthtk#/4ea`HtE?zk%EaФ%bB0Ixdp[Dm}󡒦Bh((i՝M̢bWs ;Ro97%d"4GpIh)U`&E]peHz4m UAaEwq6y8ֱeI/r=$87Waܥ§ZM УaǗ};X}\F,V֚85L:EG{>Sګfif*۬1I4 *FkBT8Lai *ᇼ 4CNaY qv>włE'\t HIoaDHfl X8d…{l CȰ't CS,osޫlJ4E^!H-rjOT E.DX RBJ^Ðs7URwX:l錽F]aր#+TmՊ[bRڋk6j4VjH"$$ak]CYe\K% (z?op+ش0/bVc^7)㱼2G" 9Jwzg(!]^?ytwR3~~rmaY^bFOq:2[[dUq$ll,QDٚLkجkE-5`.ջSI$$dIgOdSۛIǬ{?Wu篜X#R$Q? A_?gԐ88͏%ෛqh"P $Y H$[z̃ I+.#N(|P>A=ܴ{ ӻ8XaitRyYrB{]$oi^@,@V{{lEi KFDܨAx4UPޠF01}'ЎmE)TAw߭A Q>b>FM4xJٿ'eԟ T|=;Aν7+(eTT7j_p@ $5\]8)i)DNmߪwyr~x_~xE |(K!ؗű S4W5~¨4m]Kiq^PZVJ9-iF<(Ǝ F+ҽsd~ | <P _X꜡t;^Żt_k4Q‡ kӽѪGx䳢d@0uxU`2ra#DYH8E;^ŭf" [3^PyWNpY -RCo,o\}lnnUD+DT9ԅ HyjWq> D(KuT\n[`d mF*Y 3JP@LS'ENtOA3+}ڝfLe4AMq" I h@RЈ`Y20Q1PBhff ,(IL bL&+2!LTc!͍23(bC3dHbh$J0PV+0IIF׺-(QjE,iHq,„l,?QU;!\T 0|?HRS\ƍZAFn ,@Y`DDE$t`@?fӮw☃hMAe6O󌂀>" ۡPpFohVҠdрH" lQp{:ʸ `1 Vɣ0(l87Lm3 xY`"cj?ZګƬU*cj55PV$"N('և͛ԗPTZ󐖘ZhrZ<%O*UM0#T2E"MrkN?_ V9X \6G9⎽@<FLDq"LXX@"hBD4P?D'ƮSmKDf3B Q4Х`hO:XF;"ޘ:c|!sZ#X+r|\2g._0!i;jZ{r-­nIԈQg ^kWW B,& IŬQ6*dp8'w;^,r]R :CA EduNJb BC!B9h{{of?sh9 4+2U[H @,QAϕy$k 'ה5[-((2(-/SaH&mZHG80JR}3UuShTh{5Dh猰Ft| ^%h52ϰ~;W|:T `NK&D BHP JK17=ߛ Qep-@a\ LU:e1{Y f#!G}{4-f ^yLo}Z5_„!̯r>\7wS|E7 D5~Sc]z/|)Sûs}qX_ ̔iA9֒DNYw |Fݤ%쮵=缶;Awm #I€RIQ" @Ύvs]N#k䵽%JHi%"+,S 7tG6\м B鈒I"PU HЏXy.T}Ӻ gX?Ɗ]jILgZ>Rsogf<"O"o@|e<kň6h'Ucx򷍤&Zx*} @GV97%+zZ!uavmָY38f*_h*Zy{IBQy]3I݌F-/:γx_  2ft!iQm-yN9ߢ}\ZF늮.Qh~Ъ N HQ EJ"F #㫬Y__ZG%K"I()Q:3g1oU{p?ney\ |!ο}$x|Vf]z=[7ȁ쿁wo8|DW * "8D(~EsH©O`C _9;(q!FGc 8WCb8{h)| ы#G/eL[.9^z=:g'wO06@eG 昢eDT20(;UtPi7U;?vpW[/D@AKf(Iڨ֘HLDLvYxElS$"U +1 ɹq0EB(d$eV[i cDB56 շXLY/*k,e*@/vH "g"cuu{KV >*b bWP $qY꺹d#D"!fnlA ;|wUNQvݙ*Z43Gڧ=v84&+9Vm-~갆&\ R y+խBiu]*Nl{3¼{{$F{Ӈd#T jH"(\hB-vٱ1D!7)Ziu{޹+a>GAqFɭ)13,d "0͹ 4@JJ. @PAeA\p݋nDUPJ` =Gwzr&8 X0Drkm=9'STh =m.K%ܦOOf]k1jکhdI"=t6bqiAdd xhmo;esM1 ׀N0N{śʡ̀>5OB{MtU -E&$$?J 4Y&hʩNRH}CHM;앝CJDlm't` KmWZǺ[oH,$Bf؆Bk".ުQ@K7V&yQE-q9y4UA`&] @ P`5su`~wc;/'Ya9@/C@77 ky )(VBGVڑCdξMJ wdD0XH!@b !ɢ Pժ-[W镵smNoq68/! ] Wɏ]6}|j͠填&rE^Ú۠{՟}Ϗ#48b VPEFWk\em$۴}'+Q͞jvَΈh{!1d84Z'G)u/K!;У"JۙB2ÂY|y{dž?*wofLJj-StԫX8UNUÏgh3WӠFvI[8GrƟpy3Bl#a%X Zd޻7/4HHVG].XPp7Ux@Y1tg}kG6x'=1Ĕ3x,皪͊W`>J["x{QϫKg0޴5gU/)ͯ9=h2 Ӥlxop5|pLusyڬzTXdUBHԽ'b5ZE+ 41PGj~%2yC~NtGG^̳sw#p@BW,(lE tM ]AQ̪#jZzCK\% XL{C'3ÅfnUW'65xvճf=u8wUޝmp)Ľ;P٨4tyjν܎ީvku޳:Ido+uDoĽ]gLgRUzȕ߶C Uwc=ݰBvLQQEJ^V7•\WgzI〚$y 6%Z]5޸=40yFoiH+Ʒ“/sV@#Eh 75iU)ÖpPJoztD P0D9lYNvj13%tCvI^ D8'%Q-)}H!" `8X#? b1B$vA(Dh8s f I.]H{gō9E4zޚB$i1؊( G90q3{o+j!n jnp1Xoڕ.TkM{/˅gyB?kB=#@e@Gע2աwx  C$c N+^ns}c]\ZV!GmKi X4P\5¨H ?O" 6[]&Y(_^0v'p⇅]qvصokM M4$+,Ь*э&"KuR"gntWwxr:kN8Ě֖aƛ!T TDr^Nryժh"5qSZL)!"( ?y~^o˪ zy sN텢68dC w-_ 79V$`oM_+WfR;Mru{͋vmK\?ۢRO[rζ=$N 񦼐m<˟e {|kX1pGi,?MxI dt~/尖RFg?UQnc)}ޟR웿n|nN^T436ѷN;TsWпǙmȡȝNUq:Ln;xAkV2Pt݅apọ㺖. n v:?w[;iSZ{)Ї/.oOv:KףRg^\\;c6xu:0Exu:,nln=cp]˥0n?^ wq@;ϕܯ38˖I]SL&:72_ m ?ʗM``u TfE@傟vSibF;YjL'U)լX^25: f] y,K&UyP)r)d/JՀ\ū{zn._˶hWJwG Pڵ:0լ{ ynD`jrЧ(BJCOYʃI5W+LhN@`nƆQ.EYZe۹cZa~M Я9Ἢ׮ f.W"56%Ħbb)8igˑPQ+d^Ѡe㵊fI%"Ǧ1YnrROw(:fO\n ص|I%fQY=U>:PJ=&?47]ͅ2lyr٠WrnV{/{E8Yeo=e`K_aTU.xD!@KB3UQiLwy 1:- ӟz~Xگŧg;$M[ E(0c*$!!'Ȫ; \*] Ii^SN3m;?GYYh0SP**w]`"A( A?P!@*zwZW]'3MRå(!Ѭ1I$,zYJ/mpLN(og*+T9qfk:HWhh(M|dsRgT|_A UVwl ixw` /K(]>P-hvu8Q&LKfL  XH|Qk9fǗev}ۢi $ 6sw*<["_i6 <719hm{dmPpSҍ-b!&Nqd( U\Z".Q@Y'Nw:޺ :;yȳK&{-X)9Ar O EBιjN:NAdNM( d`th _iB';4s8UKIV;c9ϬVY'.z Zc'yf^ ,{g3#%3 Ȯ$"RAUSNyB^ܱ {{vF6c.t3FSXO2PF#FRXu]r 6P䰀H&[@a\2+r?*+UtB z%Yy95pP+~Lȏ PMMip]u u" )QMQdՍj6*Ū‚ Hb$M %%Oy}֗ڬʗLuBaa(@MaeWdvJ$ &$IA1(+g>beVVq.$kX.)"L '[V|~gqFho`zBA?:yϓXBZrnj9 ܆Y5 IddEWJ"$gjQ?-p9\;Q`DDWJ&f!S&oX9Ρ(h+<C``ȇ?o;znTW66d\m8HhqqVU@ QD( $Ёp" Âeq1OHc¼){]T_~,|OK:|R,SsA!AEFDF!uߜ}4Po9nKLlrւEI C'OÂQ`H@&2C+(ѫ7~Nq< zo.pV]b\ZVVGJ#1C{x lDۢL˙\ KU3Ix`P3\>.aD44<z!ƹơ&96>owCO'[oI^>vIS^~yu$4D*1c9zғ~o5>D $Xb@($mYE O>+,5vKĈ q(z3;n(qj*uHc](1򔐩.ErKޘpB[,p &b4Y֋o%yɑbԄ`1,ڶ1&Q^.B:ˉ운׻ng: fp\B]p,$m=5{kW%t[|lsà7[²f7/.,@N8wIU Dq ) z}̪Gurkeuu}Wܷb>̿1o/+=3l\=ZA$$U !ӑ x+ t?Ort]Wgoo_б*%THYb%kWUf&2PxH-I6O4UzD5ѮplzGW5Z5'F?-^k֏:GrTXiݷP0D1 y!4*$;ΞZd/gu d}um7^s՞$T!)a*F# "HȠy訄"'A @!1K?PqF uq 4Qd"<'q6.{;2k4SPP%aP8UzzQ^_W4Jb"2)HGf%*-Y` mB&fm8:Vetsӽkyu IՐ@kDKzzֵ"nu#Eһ-,^U ]{|MMa nk 4g2qqvd!/*tE9U]۾Mo(\O POpBlZ*C? S2`I5pG xe4I )*;B!6aADBȒH 7C EʍGrŀfqZX+y oyAGDs|zLl|33Y=Nv.?_rc^3r8U0GeGT8= m7a]LjxICY;(t(BE@W!\Cy|…(Ј[Ek?Lau=kG86ҶqfumqkG*ж6Jg~Lʻ O"]k\+Hr*QbIlK,Rd5TRC0LrFiѳxLpް*ljχ7/gky> YG@t#}u^w޼=? 5NlteHn=q-o>.z{l!QyX\k>&UY;.hDVd +=}z` lHJI dFZnBU(gHEbp=*LK?xN2EjIF\-+vu! 4c}Uڤl[Y'4*KoŰ (\\14KґER>8R2z3B*u]u]]u#Vŵljڍl[[EU| ֶ7ݚUk[EmETkm-Xb(ƪJبڍ1R7k~zjҾ1 DG)DH"@1)&LW-)VÅǙp6^\igj1vx{ކB4E!I a)!&$S4bZ bLྗ쩺߈jp&ߣAZ/UFc/VNmMAA4c #P5 ~{~۫s W˪I w  bV ,48uLT lLJv(Tc)] MY_W~ qϬo\`fTSr|_=շe_j]]_o(c"d0Wuw~5itD 4Y?3_r;׺X pnG@7Ó GfdFriM䃐0.P @]pv}H|$Aʩzl_< N9<.`x'i\q q1ʓ4dZ2*"ZiWC |p?۝Oߨ눓  ]lVvE#슚!Z?EC70b\5^3T/峚VUK r+o4wNt]nY\Q9o{*#][콝bvw, m\N]T-t5L<2)`dߵ~^wMnMW=ݪ]< .O5oջ{>|vClj;(=g${%J;H Cʻԣ@wݹY"϶<;6=Ƶ޷Xky[ HTQJhZ@Ki)ڗ\ hh$(d^!?2iMToN"1(2A$-ie˔bP8ATdvd퉬MyY#ABI!2qL0RCN+ 4H]qo G r}M&_(UI(9I1؝U-( jFjgVm׉qk7M-Wf%hzi{v=C<շ@YzOI,& r*I$}O62?*'#o@% iL33IjmB5)%8D;C4ǩ("NӠ6RٞPL#T`L1& c|]( n(RSIuI7xW׺. UR)B/SkmٚPm,YcRLll(Xkhce-QY-lmQb,UFhELEcjՒlU꭭b=z{ PYFM 2ݜCh(ShEad`He! El*S(blJ3>t>J`d ^-{=hDIuzc[?&E1-JbQĵ4w HH(5WEIp*2_kX]i)EI>{˝{Y~uiwVmY֩}W0v<=LF1סPAN Tu2v]RU%}U.ԃ4c5a/ty{\&lRiZ"rAKM.oU 7=[I1z_趰VZ6J0(\ؠ=`n*T1P҉>B$'؟F~Z~/il0GFLImExIdH"Axc(* B*N4m{YA\D'%2AKY_vE H}S_clQ,"T _P'!" Y=5 d Ur"e=uThv !O-qM50)An'DOJ&]c+7v3/6\5o%;@(aS W*z7y99_O[ p5áJ%߆:Ӥ,ZZٱArMů4Ț{{<fw6(dr_[?yq-YHr)9à*5!WɈ&a31k5(`,tB5Kԇ(d,5:@M}c;>ͪ"YU:A0KJ+(hѬ3A\ۺqV Ҙ7wr6^fPxɇ^~uY VHa{.DG+A䆬q j4:FS[$<k * ɥ/1nҴDbQ* DT+ ʥ+:I[[aK;.aMNvKyV3%}bOΒ?Rm ?Y@I7,f`#_{Ҏ~O˶ v|Maߕ %GAK,"d:&y tPTJTϟ'we$?!,6LN *eѲ~eNKajrgZ}*3zB@ բUs.z "% _ކ0|W*;(cNB9A03 }mzQFjaj &#IqpR6`KR"H C$J+z ƩKgUkC!t 'a]ÁaH>cYFt@yȲhGDMh/w]ХMݥS6Gs~}*W=OT"f@PTQbjx?jR0ϯ"[hgDB0Ί'S<}iw4=A(h)ph.AԹEZ2)1(v8rdã`Ttұ[7[| kծ)WJrC~Ǿxߦn֏z/ǻPRKDduAwÖ.͖Kb9Z/0Cp4ʈIc\G/ou7j5\s\Y߲[ў*$K0W^:/K#vwD؏M8jY .:ԾuoT5}WjsEY,!ufTZ:(}ac} hZCyLcVxl/߭U=u99{p{O8TnX=r9ȪwX D1CmG2wgufnTm k}%(sLVT̐:+v7Y= @GTd,yjjwֵ{[zkTXJD2 T@˥G204ŝLm>`cŕAzgæLRwqDC4lN+a:eXxTe(G=d(zIw/IC3O"fֳ$D4m -Hbm`aA) `s$E 5t%iͱvIOXtB}Wdr 8Pv+%E7cDcMZTHm*vmq[mtMG  6vvq;ulq}xj/dVt, `X(b'U&X_u"Qz1܉ǻ|Ŝ_0OyVhF;Vȏz(Aejlz\hbUS/*Qe)Q5{I>C`\KKİ+8ިRTyZ(3nv>MCiG-N=lYq<Iy gwf(ʢ+E$WTJ*Q2q" ~4D@BE(^}EuTh댿o}]۲3!i"gQ yx% G]wTzz.ҁjj6D%ITY4T_1SxS ]@$)DIi~ ?m*F/7M_>>y%Ɇid@Q>jN:WRQ mmh ]IuNv}dFvj$,  J@ -0X4ԥX2U' .H|r@Ȱ'+h'mQt@E0?+8?/?~wR!9 tLSm(oG[:a4QFpd@h#8@-2u?۽+v5mlBE5ۜ#+QʪNA+宻DboFn*)2Cm劓!ZB/枢e\hp7o8:`ixдr\no֫ly+Ғ%e^NG=íc/:Xfʳ~^KOXr(էѨa*hلؙ>cb&eHQ=hHCۢ")Y#ӻRQŸq7*92]QݰENI_{S맲 6½]as{3??P> ]j$K8khQΨw ߑRALnV{ɲk Iap&,t hֈ YˣckVltG]|b:]QR> "$uEsڨQ+ۦ؄!hqGLd8(ERΩcP:5kڮLB 1|Zb?ꪷѬFئjslqXG^`QH 8ׂU#ŒtBxe#G 8{ #2gcεh865CWN1Ku ;Ӫc$]NዙCG4B Qbm]+%'Fz)0 $xߵ=oŝ=G&KuxTbs! !ΆRFf l BOE=0źRh4G1K6KƄ̈́fP\8@\*t$kXBVPQ2sZsy\58 !5ϯ7Qcf)Hd(3XB0G/AT9t}3cօ qCOAmBH4ih\dKQT Mvg&gIg+'XlQ+F +VE9ɛipEw.`cTx J$}l 9¯/h7揃ۉ{9>瘰I+懵w斬-@#gAp8Tռ@Bdթ( $ . -ikISidg(LuEKy\򂠍 9BYSEj%Uܝq#%M@PF*MNRe4ˁrE^1~*v0vr!sfTGJo.Z CuP'h|-Z_r@>x~{_ ;Sd%@S(5YV8MtfgzLOF6]N3vAUmjbzd#քvY7/YKK :v/)#ϕSws.wT:s g|>G {9$_H 'E7 .q9DsiRtW}Z~uSr(WpUdp@3OBj4dy*An 2Du$YM8MX0FEyEU%/g̲oɗRD$$DY-uiWKQعk5j[V*j "+ yث}jI'B'ѧ>Le1@Ser2],<>zI]ξ|.H"t)6*Rzcx]O(7w퇾3wag`s33 ^~?%t:m`#Dk$$E&A AA y٧,}7q%kh "X2-(A*2AENVE &R(qcxky򯺉Z3UX/ܷ=W;'5W@$^> |?@(Q*}|GAYHDL"E6FKYY髥S];̀K! H2*a!E?0 ã*DF_ -Vlp,*0CU:5O)# hnI|1 F>r5$Kbʶo+UNt9Ɗ AExhH$wE*Aq":;ֱ*Sҍwi6J ,mCqT$4UFAU` "yjTT3Ac˽`SKE ks»Y%(GȐm(e'm͕@~Jlp ej B+IIt8` c$!yǚq: $qL ckCNJ1dH60IƈD YO["cH J8^1Gn\]=*[DxX"jEI VWϗ۵꺊y]2Y0g 9<(' {]pmmT(A|ޚ}[n^!}ԇXͱ :¯E,/$GG V}0;Omax-܃`/D"Iv 2hڌO#qmPN Ӗ59gl6@Q"TB *m/ֻ݅^S ]~*(i%~~ѳ բM͊B69C6(Bȣbp!"mP[E%I}몬#w0y`|i)ê H;bFP[ݽ #zL6<;c4(mJFB?v2Ue5wi0櫒$a*pY Xc`P]S]&S &/GO GGv+̦r>U εP Ujw]]մzLZX<5> X.$p#$dk9Y,F:[havDYdt]D^Rf22ja+܄@s^ʊ*B1lW$%Ak`?$3!Y!ahNRrST ).xDCY]( QU$@Q aU@DsrVD!{ICgWiYaG}npyv;-Ǽmuh!2I5' `ڢETedX2mk~޹!%O5*2`\FR,?abH (!2HN.8O(2`V2*e< i ze~[gÈK!qIqd3WOI.!~Be)mL$碟`Y``BU~/${D 5AE;~nF4*-RDJ NQ-uri7xH/7GI=P \#@hohpf@<.6K*, UTm] XjVB 2fV\/9 $ )Eܒ65eR.q[\\ 4ʬv2c$ @`&Jʢ ",[,"VT屒*3]eY|U&4C5a+%(*"RҪljCng&H1j᭙H`Y $ .ʶa"wEg:vxlmʔ"YЉE h#̔thVhRH21eq\Ȃ*)"t$!l"\ Ӷ[QR*5xp]Ro  mEprŲII#%Z 2jMa'eM5,٪D+ k?yux8*!6B+` 4@cGy$gDfaZg 7Mߺ@Hw샣 ]d>i^&A]j^)zeh?>韥\v‰~ϛ9F,KЖY芢:܄`-d՜T[4tfd2Q 84H=8&KU(z@swԩU[+yWo/}G}Sv&jlr`юzHvBT \J-]g9R eJoWČeA 뮺뮸6KPH #TGwC`ԂbA$ (d*chʱmEIQjb*mڒDM((;󳞮P,M`<o413LJ=a؏|ۃl+gfh\@H@jH*>?ۅD1@EG!!"m6\-U$5m;kN7< ӳAQ/O_ooN+_ |*}`w?A/Q?5` `"(R(A;q1C-WqDUS@B@#R$kFhkE}6ޏ6Z~@SJ%@K* A)#HH"-D*3~g׉fRZ%NZZybGMnw gۗ@wH("t.w"HHe4M(K&Sb )HLu{6k^k+R*6%?`MꚟmG j]ZPF2/nŰEcXZZɲsV]Ccj Z?KwhN 9Hk00M;2˱c>/yIj¶ؾJ2A95H(f*1Vh*,'`.&RNY̜Pn @E^;(Xt)̃ES4 nA1:mXY5merI:}@MP8tR:X*(һm*w+M4- Y4DN.@PxhdZ5W賥۪Q ]kDXd .!uX ,Jݢ R#=L*fQSP.ݎ5cvxt2pE6IA񉧔;d AG E6EK mqLeTl@k:l3kA1{jq$ ѝvMbVB_=-~S\!l)IBHA/UA a#AR% Qu`41j#'ǐ޻F w)W A%`!ZA*QF/qpc"}3!.ŝ&V^? F6JEX5Ehū6kEEmضضkh֍VUjkXjū~[\cZrbVm*DBADFEA >.`"O_dn.GUB#C@$+X|B$D $HHc +[E[:QT5h+_Я96}>+ͶԦ2M-TEm"pbB @=nW-jTt] ~c1;~~:B*A`(@OgB(U=B 3x-.RB6K)HȄf'c@-H$BYeI4*"@=P}J|Z~8/<}'#/z=,'*N$:8_樋?_eFUֿ~%CDXE!2 A ()iQ>.p Oˬkernlab/data/ticdata.rda0000644000175100001440000056774414366221262014753 0ustar hornikusers7zXZi"6!X])TW"nRʟ$a<n~u!,I~ߺXJnȤ"O+l-o 6ewІ B<L-!litE| Wӿ!BMtZ\YF4l[hTJLcЙ _j#PS~A馎xFe`"jYxK}px으M+H٠c\5T41s]WUF3RIvwlw~u5ߐs 5&|?6N⥤ו]LZCCk"Ԇ}Na /}WUc:VkFX*pʦ|K 0^EY ჴޛ*K:؎aj2}$韦 &U eAo YH4yGFAJ_so= Q(X}y}G6w3R bCQ}܉cɟx;t 5~~- ߩ>s OBBx$nj} ܂'X-sQT^En‡tQL=gcJdU:r!Iz{BԫWb\/|jUzo$Mb+F 6%!UG&>2砱p8#8;;7VaxB/zf_k:0ϑ ϻ9YXDaޭhi 0|As}ƟrAc?h`TkTi7W㽩:U/X% k")Xd0熆"R[WZj`Sy85DmLcNeNWL1 >E1{,⹗AO4z'3]qAB!bkҚV}ФV:Kqx3Hkv.~&xwץA\yTnj%Kj4cpI$8J;T!} >}MP̡i"Р/Н M,+mL%n!θ8sׅ@(vFKL%@k^UH4^@@tA"{h(0_WLt&{,aBH2|^uQFx#ua)84[Ӕ[k;([Z=Dl" oݯ1f[5s;c9jVE9߆>8IPxs}Ѧ3;K6mHzI_6RV,sɖP]3;i";&GLDdذƠ6;nf}>.ì]gx`hM2eU w2s1\̣y6'ܩG 6~5HD;=^ey {nN8'L3Yث:^ b^)~ax^F?snBTiM"=E{V%^NB^,;$WǺ8K:CqLzX;+]f:D*dSe"fU@/Gq߻+" 7b'Բ= roHlC_Z$ Lodx9чbQbIVU_H28XQ) 9j)tU( [&ݨg&.ۙzu>h"utB=s/&_PGZ\9l2 \dN H2Wr,i#AT= La2y8ߟǯw^-{Di8߮=t0$=&6q]c.k[zUZXY㉽o5 Ԅюt/ {*379Fd[`+/x>1PI%@82(4aHE8tOʆW*aA=XTd3QlcXM<]݆#yL(%a;-kIn]2M*6Fesx=lv]FsJO[:Yơw݉/x/? TWItW#.;%lp7*v~38ܽj)͜vg8,$~=eܼk,PRm>}4Oܳa-V&bs G6/1`m Kbt . 9[*n燣5.MV]N9?^v!w.Ui0}1xajE05X8];w <B fy(X9 ZCۺܝq i`Le3s I|Qo5|`82X #n%";s-ūX\)aQG^d ^5+֋ҿDx N(eWKP E-`Pt5z\ٯdb닣#!\}bta[?6S1w)(q=$R~g`⼧Zxi7=/B'%QzB.b/}oՊ_"y rL^x.7iN.Q 6D09NU}Z% k WxM;kɦCamKDkEy7bBi$k[^aT仼AF^^9+c DCw Pi4b4a-}@_GY۫$Oz%8c:[5 E,3 P'x4s?#aÓxN-ƃtӇ­&X-Sc*9gVLvv ވbrUU-LR+E@R cuiISl)WVUY7F4B!߃~Z+p\2lS:~G|RGPR[rn2Bm ufֆ6HnBg)uYU[1m,Pd?ģCCe|i )}$4Lf#7v7/ʹ*M<6tIG$L\%nsIcYڝ>o$Ó%iIJ jLJ/'y`DPn Awl\sSf^vltOyb6ҵ~r0؎D*~NH!0*8=L'7C qb G1'UpG_ hΠaK;`V u$V/yRɾyTړ?Js;& e:;q3_Fq}ĵ͵[C !\}X R$ݞ膉΋ b~fmkiB㓰[Զi*C^ :E}_^Xmw2Q-Pat=IIT0K qR^%Fi~K$u߇BQ*64/Y>ƩDP:_/γZxp(jɱa-M7 DN#4wr97yc' p)#kHuC>.h"r.4j ɚK៶NBDR*kjp⿀$=lia2)~[9c0:6n[K5=:9מ<*25=;&vuh]_h:_QF8J :*Ϋt]wp]oorf^Xn6 YqXW #\M*G'],wb(nA_/tz9p1W@)%6H'YIXc}@v_fEH)p@@IX:h^iIleޕd5^sII;/| -`!CnhvHd e냄C$Tip`j鳰hyuS3nk$'2IhAfGs5Pm+Ea "u{4PJ󵩰o:_gj1 xPe}&(1QtdM%>>GtUf%M/PuNv79sJtZs6'nYr l/ք J_<L" Ȼ׳y*sMƃVt&L(hU!c6φ5$ɭ?xԗF0d[ji7`+ tSfɉAf}B)Hy=~)vsǃz`o$B34Ax3ebgnj]Jțܸchq.f?gp:Ƹuw0;NuiQ)}'RVse/mA9+/cmť]`'w*YGĩk`)Jkj0=x߼'q0Ԧd23`[Teb64=@3$ DQ~_#{ہ'K&A Cj90t5yIܪRKɲW3^}ofBe56jw^[eb0l>MxƕNNOfYPZ4 ,Ɖrɛ(Rperґu)p>CݝQz z &Է/0?JYfyHLnjRU6xk+g\!ԽOAphB4R9D\ Qk;+71O&ItB$tB-' 17:V{Ț 9[= ~ec'fQYS&(zH55'x0F+BWp*ۻ׫8xCb DtSCUˑɼYT f"M=ʰHdQ02<^B6*4ζn04rWhi-Xd%ޮY0b ~EH#HuD!/bwٴzp?^z4j23K_Q>iEN̎R؃˘lg+r a!e=Cxǎ%c븕Bބʌ ]0j^.GĶ*z.;rG;0W>S1$?;<$hO SF{v֑UHfbDoҸL$vn"߰Aˁ.@Ђx(Lv>F39tՏ <h>VGy1ѦgACd a-~Չ ״Q\Ъ @ڤ<fMxLpPDPaJ4 t':hg6gv@r4ۇ+&%&QބbJ[ zhGcZ }/^^2q$hg3uN+0WF&,A/rZ\&I`|BIO:z!8"JN=݈-lɽjpEImLBxS 7 r}v:kcUwS~fd#G$vE`/r6Y«>"օ><֓)[xӻq㫊yVXKLV\{Z=o\U ܋z3Tb<_l|Cy>l@'Ɣ'md>i>#ÉOڭz[>m9VQ̈{ |r^oԓ# c!QJ"/k8m\*k|")Q9{^ڢux}.oEKGDEHɕzXH֐x32 bjA⤊+z~4!MB˗R mTwI$fCV vq]=QJt ~j!#AҿWCRa 5 EoK2fG0cHkjbsE(ǏFn(՝= ֆP | mvXU Y(^ 4yCM3y9qN=tAR!r\(7ulaE%WOo.J/oUł fgk!&m}5hɞ_&4—.8{{LtҍiR}e^5JwS(6 9ɴAe7B0C!~rY*tW(U ag(&Pӻ E 8WFRۥ Q{zL)':$0&ޏn.Rd֮[<'yK겵fۣS;@BsF L_ w~ͿP5$y.8]W smZM(pFewSAɹ:+M0k_Lk5l7[F䓄{֯eߌe9aЃw"V\ݪ ko%fp~g='ĥz[7V4~1/dJCNBC-tXפ+Mj hgJ%_-miE|wkӾip>{?m`CߒX[K4Z|#2%1%S8„x<ґ?'6ĊJ?^ J;/sHӉ+߃a%#^6&޼bعw=-s`1~l~ۖKM D]D GGZ\m+~jn}(~ M+&uQ@SSH҃y5 X&fC}>VEL5Zއ}`xKfWR'Y>]$0,mFؼ3uaS̵P(;oQ8U~R oW8|ʴ-aR0sTR5*p@)ANw~CFB3WL2h{yWlϘ:Nr JToG-}N.p6m7,UrkU;ej SL1i +(mB8\҅Ǵvx<Ȫ:ef9gP9anDĘ:z$ho~0)Ӛs &?)4Ey,o2mdßD+4jDxfv 9qso1RsG /}/-n"|2~&ʼn"$ML &.IrEegW7oO݁c-QՑ/.;þYa8Xȥqm>`9[i"K%U +GUQx}dFhхhK`V𿄫J#JCVM_,.*F}7@!,᜽ !烾 xp7\ޙx,.fs^|6ԑ7o I`E*$ۇ !TH2dG{MP X&ش8C b;6jQۯI$n:sGnBVYr{=_"Gn-N*3g 9DCF*O0&V}hۣv2\o 2U3|Kd/ֳVGEM@Q{}h)J٪=ҽI:g3{maM}kVnhwمn#0i_d Aq˲$|zqϙӪ,J8yI9&ySA0ȈDi8ons(5nZYxTKXDK;)rg^aQ>S}EEH}X >(l~(ۦ!B17{eQc#d'IN˘ˌS߈\_98[9`\\6>_9޶⃇{,$hZ\Z'%o7:s|Hkv?SB:c,"z[u4op!")^gA̐CDG:Ub@Kzo&`x)ӂvD51d~z㙬^+p׳{ʀao[Ui[V}Z4zuX@Np뮑jv-^i``tMJO 9m55=eԪXm'+^Y)cNM T fP궬pK3owĸ|g 0Ӳ3GQ)B}yVױljNY;ڷ3$;ཨr3qWw'w!nNܾe~{c:OfCڝ;muiIVUÆ-x@AJ鑦#M) 0^֗ BqV1=ۀZ\#1.xW447Qʕoj@(Efn^?#^I5L=Lc ]8I9l#OE4ȋr|Mtsj*dm9'ǩC(3Hn$G(#t:S)5Ko=Ot*zf;Dm^풷1 Iq5at. C $z80Z"p ww )IW/aar1e0H _FT:,`4VK75$CF])b%gg˷b7}"<y- \tM;d][l?!kuKGQq6 ]kɲh7lJ" 1NSZЉ]WJevIӛDBɑ~KQEOkWa |p_ zߘRp}gu G7¹h&$D"OQ DռOe4Nl'OjnC,b}!&yd2d1D>obr D][וd!2tFP8#PnXrt>tZR8R=cOU95iZ&N K5t"WjՆ7,bF{w@ 꽛jZSmH]N?7R)᝖!Axl#g_W]i#<&Z/Ȕ7zFXdq=oNE[- 7 c8 ^Q۟!#łH]KҪ}* \ `+,u>,S"F5\vQ׿*NuUXMnsƏ ޽ԑN1ғG 6?ܔ^tf$Gyx2S=h J?SW q=%yޒ0NÀ^׮աEc#8ًQ ,$v fƌ+wib^MsY^4"-B_Qf.7Eb0w,sѠ׬9Xy9טL=-/omL!F3Q@8gjP,zg.'>אe[Pȓ LaѡCAkSuύ)\35] 7 ptN}A wȞ J DZW8v:}'9/AYku@8O%$J d&\0zV&󰂹k9z) t̵è{Z4ObRkl 4~[ AfLdzdk. ǀ8EtBFi!KK|ϔ7WV9#kV,rf|FP} +d4EsnqETorX-ڑAТ8?Rzhm{O¿c:VYO0FҎWFx`0jߒ[i>jmk F_`[ ?R &|Z tT' n޲,< 2Rz,pj$BŞKS?P'WWH/)Nrz٪)DH9?q>AAonv1Fۿߊ@`,̼в~6/%3fOm:x7N (X` &E\YTd}6F8cpVG}tMXXXO[A|"qyq|Fy'og<2O8ONdYcZuBz.E}8(41"35LI9ήq{c?oY@eLFHǒu$i<0ύWwIUE6:lp=`vHr[X!\ܼ!'? ϘHU*@fhpGߢkAuCtLNzVˇ7ڃ) +1(|z{{#`T\E\j-6RlX,PaX=Ta$-a6{܆a#)qöb mV4pp1|x$ТN_h1Pr8֒,PbG,j %6eևCY4P a6fD'[ut m=י_ƮVyRQvs d_Cdvل ǚDDDoCJ*P9A$З]Ll=0sW얢|dP;r%LRx5*|LXY߾~Pk;;a56 P!0gCKo5 hȭE|TnCZPGAKwQ4֗G*ŵȣ`!tNAO})ķU"RA%yZt o#!Pպn h 3+?:#1k3LqbSuda(58K@KohsƫH\Y&;ߡx:P)y4l kv>6N<&wR4E ~eeP,W}֦L2l',FhfCN!UpVo*l=WJ6D(/y=)IMy2^]rZ8ZԿ$F@j^\`j7ݱ%:S?MH]*:%0Elge ?_?NPRX kUPѝ\ \:{ǃO=  +(m{Jb#ȴmE- '&bbI:e%| {·Ofks=5]FY 8̐$yoo3G}sa,jC*jۼ!w̢q>yO2'gaiKH^ X9Tf#R JZ`_ՙ8uJ@^Bݬ[' N]Ind9Y#tﯚQ{teML2VשouΆz^ Lj2[3{V5GUl oG׬up&S!wEvY#HȝvP2ABқc p;+uʤ_R4&UIļ|Zi)#n\̈́3ɭ&L6vOsכ2[|p GJ.FT{wۜz.22w6:Z2 %3LhS7Yv4>EW "IC#0/q]*YB!{ *N6~: DfODPic PFDž"EJE頵j{%,V28hK@Z[U.I/Cs:+g~X:zԧ@ᾍL.x> 9^7Z5m9Y>020<T1E: x>*^>tO&xquF^J' iQ}nR0wufya9ab3b=Pߗ2:qK+ SX"QT!JnVFT{+ČN. d%T}QQvKm}wcF 2v kYO s]U-zݘv깁{"h̓tW/ %RwwȑC Gڤ~ n'N˝ۈ' 'FbP!FNP"J=Ci~CUIn}Ohm|xgtJ&=7bb|\W'*<_n35 ulcxt-N;VDa:MA&(2%rEIDe@`|n. ^m~pw Jwm@%wm Q\[Z * BNݰ5Tq KIXN1$|_'xL(ػR W (U3M5uI"k3`1JUTJzG k:(hOEἛi<o s[vΕ/4mȆftm(Q% ˖W U/NЗ7M%mKkz8f-ݮ ?H͍diCSXO]c^-ZrG1#_XMILg IUeDw`цEItUQ(Ak56[!!A,䀐 ^8e/&y.JˏI͍Fb W_7A?NHV)}*gYc-k`{tyϣz79o yWe1Cq~ߞYK׸(ޭ`97pvTN(npI:"a(5_@V?1{a˘x[1wrzPR܌5cbl1DVǥ|SKeL:_G;~6#VN V;h bTRw+9N?6Ϗ煒g"G@&|w5뜋윣3_qIr<ߟjٰcÃ|&@;!&/zfW^Q Y XL.7+Y*yx27En#Ѷ4Z6Q8Ta5l,m~b7F^Ii{îX,>5=)ٍ_FHt` rj/r ,l8E|ֽVgCs7D%8^<7nܻ&3C|c-t{>XPvޱ#*B8L砬4i,zZwNAk:{zPLO&R1i5 4t B/ 6@+?o WղpoD. , gw Ǩ5%4jzCEߟ8nU, /YMCl[(*{L4^xBu,Lz@:+ۂw&EJ .J&!T_Y-މm>t-tFK&k’!L ma4Yr=t's ήyjXp.5"=l o?l,3 襞5|6^z9}KвF 8?̿h#̿ 9cY7㲁7yAyHzH!;\o60yg'$ܠ Q BV9]XA"zxcOU~S?)O˵WeZ.01,e nbEmP'yS_?+` (Nfpi t&5{b/ KY^קdk.]:"j@Bo_xN '97^gwzkxia{*uHś M*r-]Bm"-io*ǬN&t^^9η7:ܜHzѤ9T0-)Ct|ʏ]_OT~鶉X8 (ޤT,iiWJ Z2 k'-b&BwzAF<1Rx'-]Xu ={KlŬj֯8cy幛& f0s*tBK3B0+R^*tDFӧ u0fTG.SI_=̰o\<"#l\Wyu *$A**}#l_u^F_*^ӳ”yZowm 9\lkz%qM\dcܽqsȻxCHՍd2pDIjj{~=9͗hRxpKWʌԦ-.q4ZrvKK."`mRiBK4Ltevp,Mj#J8dKi8,eۙn d:#9 {*19yb8{%J:io2Drjuj~S(\f YL:D%j{Z_ 7._Q5\\d?VZӠ>""Ì'* 9HUb`.tU|/ռf4r|#77߶L 7URE !B0m/ܬ \]FrǪ| BԘKfr(^Sx 噥:(|٣b}^Rh7!x +\T}oZc ;=z*M*&[bRn[r p/X:NM .JAkdP0ݭ('<_saY(dMCiLD-9lt.* mQŏB[q'U{r>IN}w+uuS4rڰT>C{XqUޑU=7!Q#qhXtE3mFlx#Z^R zpDHg '3.i7Te|E}h=5 '$ `.#_9O1ҹ)'G|3hJg{W+):.8&'7+%XdcEҢUJ{і-4o/jl%#y78%1rV }IW`kU*RoaTU 5 0M;k'ŬgBln:]=K L+Ւu1$9XA!aGC.kIAY9/& ie|D_}PRH&9z2[lb3O._Y S8Sle@hv Q57^S.)/(KuC{KA=Ęvg򝀫(* mZ#_c a3'훫t9ɛјx%: `1k1լ H3 3&L6'47 D1paP֚LLP J]Jk!;Ȼĵh߆pa'xڒf:/J,[ )_r7MWhC߼Vz[99NdӎN8 e&>%GJ8S]@ĜPQp b_}/)cx\HU>/_%dJ %$A4`Λ]FQ2A(% 뼸4%]k<z`faxXa,e ),0D6К0l,ֈ7~nKX枂reo4 ةCkh[ >&ڌ ~ IKN-*_z,k4On;A*U5D}+D/Gjx8Rt~QewqoVy``H"Z)F g ;"y S)VCY;Ns"1DC ')b?R406>#I&7 Vr6 {m΍:+śd3XVhh-*`@kJD2)Ke2WS-Zisy\ p A r~{{MtΝhǹKT@`LqyI Z'Z> NƕSQIf HGu**U R CIJ_i:}v3ٷPݔr A6:JBpO`vIdF_e24¡[ݶSړRx'reL+h"_]@&rF; q2xLFӣlOq7x3u"c Ȇ>mV_WW FX"M ? ]Vjה!yz-_zRh͆@INgUyU/nYVfO t.s18xJ%&Ln%Yg anZT!5|^' dl$=b Bb/Ͱ\ȝ(Z/5;冲G*c8eq0lsgT%0?j2Qgxqf`Դmhp7=qIs#O6%fE[T]h eю:S8VE/5Q&{M:H2JlGG TaG94~EP9#?"USf.JLq8T/BP^=ҳz<8bk2) aA@LtY}.W2@CXݜPd1v憜kE[dSU۔{0 'oe/zxpi E"ae~ ȃ[qyOyܫЁ_\PDX~'^NŸ[!1x,A,54g]N sx<`KÃrʠB^ @@\|h/hID.e៣B= [D,5xe*ίGl Pp+pTO΃ǚԓA>AFAz=kMJZ4,>U;ux~?Y[HK˩!1C]I+PX'ܙx. : ?K)$?kr@6nYOhmJ~I 'v!z,,}ա/w d\\C.I/My@G8;QvF&N/x U#l- Z\J;q RF\_ IOrM'~j" ǀyfp)pքQ[ݗP=E]᭤AN0mUi#pYlÐl"[m-2 DX%gLB{vpx~a4w :ǽրl76\Str`>,j+BGg 鶫OVjf& s+̠ UZOBT݁Ij O c$5j?Kg܀Nao'w[*3j-MeV|H2FRK1!PC9=Jt)`(bWu `J9W2$7؁ǣ]d(2w럁kXCj`Ey3m!Tbƙ\X3ݺpJyy$*;9#zbi{F\RSM/op^PGX{GimM˽3,it~ˆG!wߐ1%%)QA!k9!3mxedHmߞ8ykUiܖ}1k(MIvmSjo) t 8uјyhX.{Dۼ^gPLk0^$+#Ƞi t=X(VkHfL2w:* ֟ήCDYK9Le[GЏ¿xز2CFi2a's(#"!鹮~S%;gʲы[$=xA3,t |st.WcD/vW% n4<µK^K{o}(¤U8.tC=(^'DZ0ds8/ RhkǍ=b8LL$\ O E<.fBxIVZ {}CMOe8ĝw=QߍH}p^0moe\o(BڻvFsBF)*1:_NnaD8z[ҳt{Nl bm _ӽĊW|@AS,z7H[΍,kucU.@cg>VfP%ٿX}*3\zlsyjI*m?XtLVVD?Cyh#]|Af}~79}ǡV@CAסr(? :}{WޏGxR* Iׄc/ ?z {jy"c^տԧSU_7c-Ql}N}\f^H8é40Xr,8@]87QeoqѹPr:PSe*ɩ}ð~)x|Y+k.@>0d?}U'ܣ D8z hZ欅lL 1Gr-bBq.3i$Bka[U@ V@g<(6}Caj"@~ 1UBE]AE 3 buӽ qqzJqReq T`Xj\3Pu\Г3 Y0<̐Fx(ؓ$;: Y,, f_}2JQT)km=w/ X}s"bTۄ.sXq (5q%$;jj9АEf]s4JvCJ/L1In?F$xz pqx:q<e?rx7Ias,"+=l_%FK_HB,x= }bPH}A> x4+I=@%GJV> m;LNu15xsTl0km {E--Qe($+4}*ԋjj3=bWv~@w+ bd^F~$6ڕ\[YCLDՈ c vy B?3_0bYtk E ~" |m(bT'}I5|:㌓я vl!&uE;.ʇ$9}7֪{+e 'LPК&(zH/3aNPr7N2y~U08B]jg˔n$!łGON\X_DY1x<.gaI51iIBlcc@@,"4^tl)Ú/3MO$UFuъXm-4LF;SJj!)70^WxQiyT!1Wؼ㚹Na* "o&{m'sIX紏|wPUT'_?Hw'pO:6=wQnGo(f˙j((4|2Tw0g1&m˷+4 gw ;Bhs7ԒzP>+eb~(@tJVRa~_bO)w7y kVO:o3k`M&/kf1~Pj%7-w%Ce$ Dq7k & !֙.+VAt*O.gŠ~ ťsb{)9lEߓOƳ0UW=xghH"j:_IQe;L3'M)%Lp}W! _ Zs E],v7(/l`o'x IrhyyX<#)oY* 񬄭RzvSQ2,(Z.~ǂ;0o:"||N J7=a0 rX;Oof҂{Ջ a3MaK&kDd\-1쌢eDo X<o3yC޶Ι[] r4;Up`pL\Ailn,u Fe'/i"~#YNnEFx=Oz#c ηpS-+&Blt>p[%]oCkJg/(us>ե\u_ ^3 aoN`H.D-#S[ٻ汜e;%WtХeY86NdP^#6N 4H7ؐH($w6~<% .E#'bI`Hij.'[Qm380`Uk\| |Ú۪Av%z_b=巇D=@^,l{3S-٭F?GD߫27Ȱ75>;pYT'>#z.; 4Y+!}A:ۦ69RM/BѮQ_ˮV3*=B<#n7R)7MiJF/fn̊mϚI|H^WK4<_\TotΔXsm Bq7$.*Snl =ix~\,4naS*rEUVif^oo\$ D*Fi$i?dΘd܅z))[)*=iU] ^hFUad+:Gig[>]M`.%D,U{Lc[Ĝ*UL -m@_!9/C) ,[]-|ݰZ`ᦷL #6di"fbǢ뤚poR9~د4mjMʄ~_(+ۨ646H@FMb'wIB2xmD(aD7`R] 癓;-i?|sbqlᙚKIgWc3D8OgH9u nVQ =bfD.\OT|Ffe'Ȉ0PƉa54jw,%:\C^f6vIQ@qo_4IC]4pd K)T2K7""4g?*DŽh7I,!4J0qRt+m#QOVJȴ@5e`I.H@nR1n.&F-]"J,g$%ľyR ? )|CW٥O%7R]R9o#5\,8ur8xHm\U˭&))v7wiz!+Y,!r2ǒZ],O7MT%CYp%X) 4,HW nϗF S[ft]XQOc 㛸Dcj,Dn ~"Ď;r2o s%7)pzfyɿnG;՝Df&x|x畡^+a~0yB/F0$&C:DBF%,EU7-Dk\lBGgĝp >30DFJ'Is $E MLMog*ot-tr Btczvv+S'~@tNy l 3R4 6߂i=87Lq%jHߙQ-LdCQ f #įcàh(wn.lom^%Z2z ϶ĻL}CVY9ZJ5ɍѨ7a u>lDdIIrUͅ< !׿glcJuT4H9NRגur`= MvyG|KZ⛭;{UԠ4-SZROٚX*i=|VxHbg^B#E c&Og2u챿{Og١KIŽKҳ1.;_JdfեC{PS*Q?˸< Lvo. r |~"%O )hLZ 7DO>>,1Q֢S)Tysh`NݥcWU%32'ŨάxZv_ GXǭ8Nb:3ŵ׻ɞ/l2`i]0E @k}2.4*:lisH@ďjSl(yXʪkpt|YԠV+Cmf{mH|mExe a )MN~퓊"p/ܳ2PPƓC[K 35ѥg@c6pN(g9~ j@^*|hs{:Ub' X8X =/6f0Ό6(lzO ;vג ;a?΅εI?ya\M] \nڬ0ݨeNV"]%^A[dH `h !A1VyslurE Jg#`jkoIdp) XN@Gq(/ _g\yo,SSNʶVfU2ytkk4$ -gHΑ-|!c:iꞴMMR= ˻~ewUv#.Cn~PȌ| TɴTncT[`8qЗ&Ch3~].ˈtaVJ@bzm\$?HohFX1j~6*L։t(N/dǐ464SBb)^i(k8!dM闍;4hcʽͱ;1hIVu~٣?cj͹|U%l~783P'T@C19%fU>e4N5)U#=sYKq`AwlʎcJYL(|<;6yr1ɯyp09 ߅r[1~n"Z .hc1 236ktDnPlBtd,weTi 㬭. 0FNK|\zW#硄UU{ދ v6+Z;iBYgkdh%BF;9.̯v^? @HeXU$˄!YJc>fј[2Xf,rfy'ߜMZoB+tu]L*xoGG3 ^QUƦh?|.(X'-[t!A .$`h̰R' 1ح*S1{$fzL:ՋbJL!8=Oy:N8PXb=L qEw\}z wY„vmJO)#FQ^U.cl>*4ZVbV\43I!-Uꡮ1U .>Dy:0{34:2>oFÇ(|Gt\)Im$2/AI`Eĸ1E5XN g>mݤk{݉.[ڮ9*k{JY U`~mVCDz`6 ,ew'l*$"uoM!/zWΝōY > mMU?/Y"\xK!E#&`‡#k ۨ|+ Tf֤Z.DXEt^ҵThxVs[GgޤfE-VK;"3XZle8d<?I @lޒqkIԪ?%;9㭑I&KM+]|MEH\9d^X,sE_s %m\lkͫYI4jj/ "(]<^%vCWNf\pwmڢALsSiemnr”bwҀq_ ` UXU?g Z H6r RV4gVO9DDJ. ƺ_lSKG4Nt&V*f'qG{r{:4ZC{eꌿyt~nmWաh6yAt38pUu8y&<IRz*^]aPn.jm*^oK;تԀYb{я,|vMgebP].FTPHxY̦aeid@ڼw5O#|̛\3qͪbթDzt lDJ`!S1 ?Ɔ{#!+c0^\yW^xqkwL.=fGSmtM#Lo.xL&ќcąA7Im0aG\ LOEĔ5:JhȖ`j8_'qU\eP| xKN" ^*`n u%Hb}{ Jc06̚hRu]4;Sd4~.N+V* y hF 7pEM᭳rdC [: [bѧ^a H&x:&q^g֘fZƗiO9Ư\+'QsBjw;,1lM*_mw*%7: LQ7+%*$LT4ڮf U# vI[pY|se eCU +BL| {oa>Z)xqlr ^J0'0xWA`3CR94;&/:ȰQDqq[X'cSCibʻVHr4v^P66q,gonE~5m9AR1$5ت2K9o!Kz{A5,C6 鋖-!Tاfdg]&FQ]򆡶MAHդnb\@l~ٳ#ζ!R?J/ Rxly}(EMfnQsƦv䦧$6)@]AaxifMkKmY_m$ REP{}8Fi맰Zf>mo,ڲU)jZKv;J 涫'ɄVjF>ҭaV (ȇŜjC|u}rg^o@v&gT57cx0Un[Ui; a Xqu46AykUiV ~1A.Aә,HҔu9UGA;Sb'u3w>oЛʋL/WB`2 &iTD.;w EB'!hdY_ |0qMOg7ސB2sY[eSFF_–`Edn}ę& x~PE7̸ˑPldRm,,^Y4 ў\>{:13:pyȏڢëi&ed7c3Bw&nFcԈR#%uCţʣ' IUNQ0kzsks0s\$b*Pr}mFw /~11wis"1Ş݅T_X&>FSw?j >Ylڜ8|j{πңHh»G.JzՃ7kTUVOrg-}`CsƋA/)@}]l!!i˔?R]{Bs:2i]!e1{-ը,"U $B12n;߹`jYUn<>k$_:3\8O+:ƿ;R7B(i6LHvJ6"f-OJL 4"/i&s їOo75@K:0!(X~,Q.֓Eu\Vggrԥ;\',3JBߥŇ[?@t:Tl?Dmt_w]'_\6b>_ 1r+_1eQ4i0 #[k>ʹUm 'EP@3o*F3}3GWPIbZ8-iSmv\ϔZQGkT^T//B\x ]8Ex\r蘤Txo.p<ƦJE(_&%L㾊h~=,!]kӓhҦxkژ/ 1DB`߶raȯ5_O-р v+K^6IٜYf&N Ә~J>iPǎ ЪZ ar4s$ u~sNʰv,]bVl 5S.#O|Z$#-e\*□H\:_ոS.7*Sj3tz o͓+*_b,/+M.mS!|`s| L01 03g ऎҐu9*n+Z`{C Ga_L1N#DLko31tkȄ2/"C?##+7=ҳ H~mқ +;.%Q_#QRΑ1p(7_D8GƴD=\n;a i }{XƼ+qP!KG!T4%QrU΃'|XRWݥi!MfHCҶ;q+};p~X=|0 N2$.{bNE$-YƯ>4Aawt]D# A\!|=5swz8āS6I VV'1'ZآdUzKXH>HCM.z*Iy4t>UX-\2C'p,;+>(Ql+dyŃR8m,.yEQ}RSFw Pn]߭H#ؠ6\Vl+Y;uq*DJTa1{MFӹrD;W"])vT/?N1*=vR`:2tULD)]ǠU&Z;tsmA&=Ȱ+bhc9Od pAd!a~VBS8',0x9C rs+#nhHmϋ+Ѿb'7eb)S-]N"[p39b ދ7RzvM ?@[GU T =}edpG֋*oǬGL 㵱 w`U BА%yn{rzX}ٯ$`qCMy^(m3k 8}_!̎n^Rc.ˇlLGrk>i@,~AUU2~1dipR%SV8F{]n 6.`)_ T],* $>=G۶.~ÐL67 o#Zs{#t-+$x1" Rtam71)K--UZ_5P]ҫšwFK덅·r/q(fu2g!RZN=IqI/TVzZu(yRzC^;*B?Lju7s:I(X&am:9{x7qBxEFmT3sk%Sw,A=^Q&GSv4*Nn?C| v2= Utى&M!'Ӟ[𝚗%+jRx$Fmgf \^ bGz4 dA[áN KUPK!̮g1W!>N_NAUژp@L.uL=q;V6vj>,X1SBJ&P,_Fl``4jzv#7;/^Ik@uC.UBvg6S-UkJ0C B98a-C: 9,F 6ӿic-mط N2愵)Vn3p(21C:!Ƣ2D' OEw|]VnlQ6щ-:`Xq%%GQ/ԵXM(ݚ`~{We} :oiLX5j2ce- lr&I_Z~F'%4;55l}xhX5gw-.O<.όsiYYZ:J̢;mdvD=i8RrIY輞9r*Va!rC4ΆJ˶ yTV㖙V4 aoy, ZyJjb?bG]; :;SXf-/UP w+Ilú#5±iV7WWŴNJ2h^TP+53iah=!yi1}}xM(f%<朢[Mި:}֠& _b\+EY8[s!]޽ŭ$2YW2* zPj:)a:o"`4^m%-Ϥ"CK_B9aG\c;|aXJ.ۮ*CČGbȌ"Ex'^݆¦D6N.tfҧjbqd^8) ֌ϔlD` }f|޶OtE<*톤kZݵXBU9M2oT @,6ZE5/9ݒNjuVnw3sեx W\ lcs(\g @J?wau=΢X",_)bz"w#GlY*Ȧ&-)oEpWv rgXRY@S^tf嵭~_?9i?jo^㽻 WE s^Jc'f0 x( pJC v1!,fڷ c2xg'izB)H=~#YmU8&Hb 0geK[:+aYs8῝*y{fHɓ۳`>zBkމnY|3*0#iN{YrfğbN4ɟO郰_ +8t'Dž]set0dCYb`]yl$o ahj燸^5VȘ}I`c$>~Si Mh惰.m#g΄ 6;NM*/R9+]F4}曇ѯH=~jGYTHtP H,"CexrB>qvn_^Q,O8]7b蜛,fo7DNNn޺pg qqPIx!*HԼ[}^Nr)ANiWi6uuq%'F'ߏsC-/`-0]% f$MK$Р ;Wp-bϕ>#84>X4΃Zg$uyoljC@ ^zbWu*\^Ci7LW/Ճr{hM#ӠFҝ{_e֢vgNDͿdO˦SV#s 76,zÜT7|ͱ$xԭrۡifVa |@`Iф(:RXtyC MUI/" [K_ckOoBۖ45fl>>7o[)d!pĸQ\L"5V'A$1g u㉟Pfq#VW#tsV.2ė%j8A1a3m!M'q!N l?@g`<d"U2.x9x=Ju% 1`aɍf S@= muC;#7jlp> sos9^n2Y W_=yԧWV~+:{-ZZ|@tϹ U+)N^[(iA95U@ oJAqց{nMc5 rxgb |AxT8< |I_9釳e (dP,wM V ,#JVKdvD{ncR<AF;d=(11+-Mj,/K!Z߯ˑ GDA)ءv5fZv66% C Q\=g[N}\3W <_ 7PN1))'Am`fy"s]=Т\lX0x/lh6vt]hSO f\#nORSd7"+ɲyLq3`>K2Ъ:]SL4 q7 ml(qOlƀ l.lď6{o'8y^vMr޷mPܳXQR삘h/aͻ ,u72 {C"֠B~+̳AhhZ9$sTU&\}8?S)QZ-؟Gb|:3Syt[䠉tz|bw@G(. kT>)S>\ڡpOl0D۝ n'!)45䒹 SN˳(+&{Q2ܛqe:1+g!wuP(&/솺<1t4>J_yТSWCѠ_="bt8h&CY8pλÎ\3u;,I^dy@@HEJkJP`H(:s#[PU'gை &W~$WפAVRykL(Vn̻Ȁe=^`˹Sl_^hMw-)!0BuGK/SNpH8|ym!q K7 !E}.U kO=Uj'sX`:<ͦg N5z,$_p>cZ7IAOK-K o'gDJ q5T14R\l/gW~](:Pp.R''<ɁM첗ȠsXF4:Y!SQR 2hZMɰ\'v(^Xz$P,Xơ`|+ XX[4氵wg(|sX*5 f) %xn2B:bv038V)x 8-#K״ mB0Eӑ1|\ rְŒ=it {KhwAAN|]S>Q#uW$rC@Z'>9o9Yg?3)%1UqgkLbZ=6j$`(9j]ZWʌJf8ݓ;v[Z_ͮj9ʃEbE en.,(@ w"\K@Q P" @? &ЄA|m&D'p07dR*T>̀R]R<ΐH GGY&QZ O]ⰷZ' isBt˫ǐ{#8Pwf^~x!(.NM.=]:F7(/8)dm Mk'55=0ƭ= -| Kx.6L^f°K2l٬ZL'b׮2~*FrJ\ꇇ/TZ?|YPnPT}QzinvEn/->7;4{QKPH(xQ_8%DIv}gF̞]lr ݩ) _v,(mS%ʾszyk]E!/WM|n c\IO (QSjW:5n`Wv7ƫ6kM1gScDrq$yf@Nt|μ]ߍ{ A>pd~_>x XEx(zvmbNBcļgboUvЭ`~:H(MI4sZhlpu kvew6$iJZ3fFUeE[5Q#V [)7QgÑ$J՜扻Klp5EYŢxd,!R6@e 쿫qu%Bfcټv.J~};(8kw0vs,d* &`ro1,CwD/1ɕ6fm[ܷbKѴu1ԯ$~6dAJ$Vubf-[`.F(@ڵE{*vԧ|a.hc0šVMIqi*Lrc;|[4$Xi7>ݓNR8P"hP{)c*}IRTU_d?Mxk=g-UnNNvNI>[=vKYUL.X{}L\8kqI59wGq=uI=ܨ$8֩*oU"]y~WQr<.(NMhq 2W|7jvfXD!(bfJ&OMB~"}Y5읭H բes.6hgs捊z_iNIiW>yÒ>>I  [ \IUI$7, h7F.Iew z$6˄4+עzb5Hb.UC:5IQ .Q.%I$D.q /ki7e :s #=gBɝ880 o1b䴥ltZ'їp*GٝT/JFZUq B>9:<ҔF֒GדH?,mUqJ|_Rm)Ck/39dBL" Y- rl"Xnmd{8 )Kb3|;J&.Zr$EaA]bV]|n$'҅d"%6 d]F1=a~Cɤc?O1#Pf $gRh*"Rc7M6!6^@L9 g!ƃ֙ÏѰ[k|3BVCje˛H?}pMz f' G =ΰL ֖q@_)c h{NDgz {4_j>sxV_Nݎ4gY;gɀ %l+VA#Rz?ztjmi 5plX 4hXM"d|>HdHYpmI”Wl}AM*;I"6TKZR+x&p#Ɔ9v vQݖD[zbLE!"i$% {Xi d9fVC1 m7C04w9o8Y,2of(ْBmf+dBޏY{܂z ߫oeO&ZHftIhOdFVjWs@ùg+q%9140zgy2= UM{6fwÉ[ :]QnQu&Ue;׷K Q̪TzTޝGO`Cņ`DْU,-WУ`cjh45j6|\y^au4}Bcd r xGPZ ݽnQAрCZ HwlֱwűT!U,U=K2Dcqb~w 0P%*=|*; ^fczpGpu{Nˮ_)W*DoPdNDR;[buӑdyG8rܩOYOȧMn55\Gb99βNQpPcDGXZSǰ:0Q>zFo8B+n5L5cF*у%Ok}~ĊFf–j7j|#$f7AWS7kQ&DS8ta(]%J'U3s%S& Y|\/\ǵ\@" %:  K?ɮ+ZmR]f_gͳc)P[wk%[KbE@3KQ _Lyxf}c]b+ڍ"M2M-E*'Z, =GGM4!,)c*Bah8%zA+XH\QڵY"yYs~f X 5 .bbG-M5B`m#?bH+A.WCx/ |NMj2Ku7K:851Yx#/u1B&Oxv؅8hW vZJÃB%ή< ҅bfSq|G TO8+ςVrO 9I 8\Z5IuBojsktuLkHg%&I`;@ǀ5_.t'[ @*-m9UJ8e? &l]2z(bTxRUеH,=Ɯ%nDLSuO՞\b1,JA: 9Wmkp 2sϨķ9ߺ 6~QM<eAICQ@)#`(흪u|7f{8[R |vRH6&|D%Y>s, nAXyN/2V}fsTpGfSh5í.]~2Xd㳜`6gT+<>HQ,G`+PebA(A3eD"0>ڪ{aej7o=| \J՝6-9w,DN&.PXgY 9he~C+N >DU66t^sChOCFfxG4*9ul+U {n<,~uV׆t:7Ʉe-\>Mުu΋Ϫ =SEz(,e)}57*Xu=K]SGp@[b3"GN9`W1HtqFS^ Q3R~Đΰ }}%igS8 5k>n'\mG|צ)M ,3Ӟ{d' L0FJf8*(=Ӳev)M硟pxDa}U-ƎR`E~sJ".MX 5d^F#H^Uսs~-әv66 WztAy{QOB퍳 [RgIHbl7(@,#hN\‡IÇEa G >56K4iE0rr¬uPCRvmZ (#cVأG%P aFO#Mf+5r=Y$NaXpi]r;6/tܔR P)\G< vbz #2$vu=l+/Q >*%oo@4 ݛ+lp@cmB3:i $9y6k~BZ,VL%f6%gyOv:|Φ7^xKkppL[! yv)/|g!Wp'OU.9y/->-,+M2o〺Je p#CMԤmN ahp\&VvVg3#ld: @Pq*FNM/ݏTѪ_~Njj)qT^ rlJ%Zd"(yj_@|<_cOM ۥO5+X0cQ4TVf<)Ԫ8GtqsИCMx辺K=cfi3k{@jΫ1t `q\uK'^n l> Z}n'e 5bnse ]$cg >#URc7֘q+:v3ZșUpf*b_a ?V:D%l,63{>Ik؏ʃ49T9P'Cw-z]i-sPS E*<,1kށqx8(!evhZ4Q[$֨rz)yw=[BwQ-c-k~( @TM.)α:PjH2v[/ fM`L5=j!^E4:8O`7}}Ti]t'7+Z)- n#4uYzrG GsҕԴT/T$5}Wt$fBF?}^vr,N)x96#QVߩu7gOR 3 ,OcuHϽ D$HZs]wqb>:__ ۃz.0~Vd3_*عvp$.~?Y$j}5(LN"wt uN^'_rZԈQs0L?:\|ǺuYoKgݿ,Y-i4ۮ'Cv&` ʠ ƻz{7> Ip噴k~uMorPkVIN YsӢl&c%⥽Ğd~bY`m~W1a:[U2TSHJU/]0t>.3pƔH8ٽvh_Z-L^Upw7'L{xe(-x.x\}Gu$Mfh2'a:ʴzX_b@^kb%xV ] n-;ts|bC?ʫ ^!>y 2LBf-qP^N~.^*ÄBQ"o}pHԄNj1{o$Jl59;).XK;? 1)Ϛ˙ARC srQMw_б-Kt\g\W~3N,_7I\h2x~4+rTJNq(ԛd$7q]ø%]7'yV .!RR.HTѦlk L! b! 9`H*׏ ;|CE >ݖ;t ˒L,x3E{D{ݏ5/jw:Sd1m!&^ZC6f|mX;qW^Zzi=W]J l55􀵞`_a[@v\~Xl\GSuXCxk&g`Kmd;0ԅ T󿔘NX|Ph# K-$Msinnx݄kb8vf!`?m+_b2~1lm`1D"m?oGbYYA'ccY{,4,hv9'|4%h DHN u"z*>)[;`" (d: OE>-`c \bg]3Q~r8ib(J?R8(wyq#z{^&~mJbBv(IBMi0KxuovX3CzEW,p>;;U7%2'PqwY8&Jr2j(Q+luX|gs3-p-rD f?@~wېHk|@T:@+5pBc;q02u HDžN`jpc%PD6aMy)|JXp1peh ٵ~,4)CJ&έ/yĕN1 rQR{]o+c\JBE~/ n6>f(Nlz!ռ P{ ]P{>& 8DK  g=@y>jAq f6?|1% ˫6b0pԣX\fcKqރRK ;7c̳1*֭b?u}$gj\SO.;BHh^seċ%r!X> y&y̙5^NjcLJFl>.ܶY,ل4n^)y̎K hgnwjMeKm9xBiNЍ>|Jvi %WT.){-RZ2&Ԇ96hPFh!u G;.?OK>*4D̊;*4E Ә,Ӄۡh 4e]n!l`9]IpG..+]0%gW KoP^ZcfJJhgp&" #Z%*WƓ^shUY2{c1쐜f̫W 7ɼKѷрv\2 ف", v]Ä=DR}x c\ U'bGYNȩu- NnH |&7 I⾌{~Y@hB;vˠFA'EJT<ז⮋}Bp.%-pcS~,iba1us@B\[퍥drcCnay]~Q'74SԈC_=:GxْhQ5CqVH0"n'+|&&*(xkmD|7Ȁ?rθ.2$DTk]*䦾hH^ٌ> Lݡ yWzJ5@%7Nԏ:bWa냟wN-uBEҖ^QOJ6܊D/*"Ax_y]rBI Lԑ/fpZ4lw8y B迢x;oDy<k =+6ӭovfԿo닜20ZQ^ 7 ͰIAjeLGA wТMk`A2/JnlHq :{sApz,ݳr J:3z4Y <c,E4įo9=a$]dd^H X&7ϴ # Ғ8 e T 8 X#hvZhMTrb쨇5xFÎT i&# *C=/ н*#Д5f#YБv(u$\ģMRJT0a@[KH7mY2pś~du@r2g\ɂw6[һh7|݇g\A( P >rYA%&:( ^yT _+"DT-e3z-g+%է܉)K tA}N5 khrjjg>"V~z+ |ՔVĩbPyW>66Gx]ymwv^qIiij9` ޴zzU?}3[#wĨ{%-:B&!2E"N9^Ԗ![Ɗjq2?BˆZ1sr tV ڟYCt_őev3vo:vSsD2qGw3 #K~p~BZVF⾞ch kLkX% HӜ@ISw?Ѫ)DnQC>D@",}ܣH.Zq7;8ܔYy> .~MŌP[Kx*U@̱TI!LŜ Rľo4g-q^uwǍnz5G9v3"$RdS[!wN99s11nAPd o[9B_o30T.`ͩ^N@ddL =˃QbpO'n%ȦT|phl Q4ԿiRڗTkrbPV ~Y{bi.XV͋ +Rm+zUzŰWXY@\ES*}\M:#ۮP$Aa3ۡ@B࠴s5Fړs8ha|01e=JSJY>:58aI/4T@*z Q%yY>>-t^5n4,E d+cdvWzNlY K`v`&,dPPY67mNPs!PP8LyoL({I|[6N% D 3gY6E ZQF3Jsj%Mϥ- zrO=}Kб9Nد,mw#!x>}C`=Yi7\fPY֎c:խ}NPU݈))ud}"}U2 Υڻ趀AU? Y"᠗ jpujƉͷzS<R#g GyQk"DŽSU'ɪׯ!j;im)/D yW\Uxkxjh)"Ŏ)G'Ш 5$gANuQ59;".K-Յ?*C^ ׼נ|Ae@$r6] .tqD%!vO24Ɋ*&z6'=zx = #&|݈e|;S^,483#S5xyZx$>sJJNلaN|y% m/2a7U!|dςe:;yg}4 s/aSF$]&8VU[ND8]G7ts{ YC@S }Q`Q={v0s)GE86 Ն|~YW P늘 $V;3U o8WxJo8r4b ~a_H% c2*n^]xFF9Zq28P^n=6ւ6_7 *"Ëw8|8%ROXs╾bnƫ9v=4~47[pm~oۆ{HSp$y_#kG)'@(* ;#o3Q~ɐIsA4\@7z4U^5.4ei"{wB};FLFtla6<B{P¹Ue"Kc֐ų&q|Usn2N/@=Rnj~ʼnd8i%_@t2u|%_v7:͓Q1<~vCGƋ)2^KIZ*DOdֻUuK۫v~!*Z{"v+2uOYƿ?W);6Ɗf( ]̈́XӉcgZ>$ ibzrnc].3 ֣0°}eE T@(H`'։Y\`GƷB3Yw0ٜ3y0MPp5ڼJ,$VcdD*r+%1 P+㼋_=ؗ P~oe/ 2wm՛OHΜq LPHi^7S `Xoa,I[1ߌcxX߯D]}UgO6vIA{3nR3qTϐq:Rt5QP.Z%B?r9mwˮ,s-r,)obˍa[+ߋOþpVodߠ6x_ b}aEP7-N1ĩGAR:9$ICޝ3$;P+Y3g x%+9m MJc.Lr:3wZ琙L`>i^p_^l 1Ȼ\P|jTB4`<>_mQT%4ASI>֊zY.< Ak13B5x?niAUQ_$7~G^'db樣KFۼb_ٞe7Tj(c:=p+ y A!=5LS1!ceN*HHnah #mfN+;̨^Z#S!9m1!HHxj9oi]=7TO׸GmOj/\4Y</T8ϵGs8 's+߮l( '3|ƞUdЃK= :̦ FdpH+3QeO.N{6q6㼰҆:=y/sN!M|p հ߈M7y}#Âms 2~$G+o0Dfo7 QW_, ۄQ8 E-VEqrCx,DLkv4K44`!X~%hKPVz9.!LDTdEi8x"%9۰Z~;5V4׾BʣP l>_jLPBB֓N7}resKEM7bi9`nMP1R-}ܞ}nCegx)!@i_񮋔ỂItdCJ-8 #- #":]&?!8mD }z~$ˬUZ@YBji :'X8|d7h$6jmNDU6ĠR(uۨ_ 2lYUWyWqX5mY\qtZx焹cbw(Vp]`6h*nH3W128 Ѥ*P"h{HXBȯiFQ!%sII\r;ӕfE7AHį*(WF̋_uʸ㍹@\sO` nȭA܍e:6Jk-|~? -D,1a^ ѡӽc #swB94EoNaP*jZ*J0(r>iRqF"_cfṬå=T;W|]Ң_Zpi$*Wu[rm$`"(I 0JHv?#$D$-SZezE||,,qF?-h|4R;/b#rR" m5|*U Aǽ==u (-*Se˦iv]T.+_b~z` >f{TԍJ o#| us87PRǁ.Hz)Ogpy^j0 2~\ ~4-Mn$Ťg ƭk0|ma|yGaL=H-t Ԭ>Z;}P :j$81\~bUNRS:H\ *Sc.ak'P Ex$4yV1"[@s"u1C*H^Vex&h: bә̏OKG ǹ.f5_9+֩x0/_rdQHڕ#{/TTl\y`w74}[ >ʢ s^ xzƻI§(:)yu8w+?S׾"4{!芎jh呝ڴ0Deb d_d`4C$̾bLZs3aѡC^`CURzt37\ EKv!bWa*F0vDr%1'/Y? S TIjV"Dު\O P^Yzܑ՝Y<G=pl]G誴,D8OS?TW}`frxj撃O0lO}Ԡ=+iPՀE`?t3$[dxz=Lѩ`C:V(d\7IwiSTKBT)hݘ<|W2ax_Q|K7IW^ u)^Χ_ԟRG]Ø'5(P,j:CtUbXjg5bJ#1D o+ N79 I\,9Gܭj@O%^<&bMxF'ʎ$S`PH jpRf-xVeQ(ki襧%l\ho=w6K>!X{<-[[iqZ4(~ ۹:㱝0/O~F*V&H} w욫 (`[&8xc.%,GȐM=8X1IUVqm(Z~ \͵`SB3!ZQSxYJ[s=_L:sJ1)H6%y3Wm&-pv%> $Vп JzST p`X}proιBVN^s,7Z0ǩ7j-LE6@LC.h5ϖbpn62bfb*[Ѝp_]v& m2-.gCA(ۘВK@Xy'Vg/>sFSV"iY⺊D̿c̩״%И uญo pHZ]+_H*~(b\6Vn?<# smOa2NR͇W8+3 &O~w/:DKH)Ix?2z=eR6@[ef/#gcEB)w`T=$bKM&+}z=6)zs7H <(R GX)~0lY'/1uTA@XL[z#m &"c-ªjrunn%k]Q\?p/nK#qÇpj||IٕMb(맓K1 PO[҅ :+Y=h81zl7'jPfbI<|Q͟ŷ>a!`?>j](b(R˼agEkϖh2Z&$HGa7^p::L)r a ,^y4lc vK察jqENB( RY- A_ B% "Ǔ%: ~(U%IqDɶ4aA^ CyQ/NU1uLyϰ:du8EWKMˏ kGh*8Zh}3g\7 L 'O7S^Wwrapk]07nhÛg[h{ǢӒLiV !}jUħNqLN \ʆ3Fl/$(:.PoWW*rw 3ow0kՈh\4dxz rK UIu 1OfMYW%LxLP| LHT໴x5.7%|_(?Φ*^g g)`-S f.,?,2]8YbU'h>>J7qó&xS[olj HľxI؇ς<=8K8Tyo 6jxevq- w __8GynLٍIux JG 8y~%AQ]><m}QJHr ߺMJ+ls3λ(?>9P@G))]`cIKaFjnlT!8CL{u@=-y4MTZ敃<;"$JJ8^Fi-̌,LŗfgtYXwS, H;r 7B`U@ÚLޫs@Ur*{iH:ֽ;%\^ϓ q,kJbX1# # GM&mrf"n<"f sD~,w}h>%2(ڝ<1%SO"#b8x~B[x5Xiv1={)Hwױ[3ĀYzt哭9Gxʡ{mU'O7ǣUB<KDp9sL}3x4Ai` ~ˆIx/Qt/;ߞesᣱZd&Q:u;G6H8Z|=k3C(3ը y%{Fb(J|S¨'MU> Z5B>XufrtatO ږ51 :ѳ0GdaF#):22%t9盧÷;-8 `j^EU"#4ҍn)RBeK#?瑵ZRp#7& \_S3_";~ : &/@yBt PJ!)~?CݺkWcų=4"l^A^_'I6园E!KƭuI#nm$jXDo4M bĥ`+5d{(C8L8چ,*dmݤ0=c$SrR*w1/QQA& 6Gh^U5GHQ9)U>\M(#qW8GV*L.;\q<c_V  Zd6TP!Hs"*cY7RrZe:2;d(@8F]vji= SNoIRլ -Baf0kn봰!']gsf`+H8—Grǧ$x8*vrՋy@I]0O+B4O8K/(lf`f‚p:XP..AmozBZ(g-r죕Ae?~Fģ/jJc>cXzbj{| @ MB;P`"|6VcO$58SJD-7mubkmh813EVh/\C  ΃'!T1omEtIhqQ ؍e|,J%R'bѣ9p AaC{ي yR=i>!d8nj v863Coᕷ4 q+ `6۰h"L.*SD PJDjV}5Wl~WWR,.H(vF5O&Xh&cE,J\<+2 Qeh:dBha3|:s a;DJ8iix!Jc:rэ~QBVl$S"&BH[RyMO8\ԿG>VD+EpeCyogJPӱ 4 sM"2]]z-FZA`1LX"`h2~R}q0`Y_xD_}Sܲr|/S.ZuyJUEAˮ ^^ fBB,HdmʫE,6/N tӊzO5Yܱ+9]t8$IH_i}wSD z킔wM۲t)m2LCsq<.c\u݊#Zb(Bmzgq)= IKp\:Pp,=zSqz@.:C ڌ9A_竷ML˻leҍ A䝖Za fKlCG۔ÍUiw4' X\W539?`&_e,~ȹv\Ai +݇Qdp|og_e`E>%x;C6*HI*. Dd&[` 6o W^_}+:,R[on%ߓ{zK,z>J^Ж ):uDe?Zd528Mt͙#| P&M?D8g ^ZOx7vW .[BWSY Gӭlh#˞+k~L hYBԑDۆ w>(ZU.= '4ӏXIL2bU41K2ख़f}F'&,q{p!W7YWsgM,޴%Qě.Z,Nsș|JOCl`Ȓ " r^)E(wWO_\[[ƌBؕ ?L.M{.~1ƅeO!̃rZhgdmI՜װ u#,7q t7q`S^?'7EP7{lYZ!z,&PiWZ-ڔPN[LجcWp $ /zob$+wU%mr|icmN?: aQ6玲aMA=L0C*t;`?8"shQ 6P9TAg#/deF7jJ} )O)}l.FkCL$P6A-f`Gm!ى v Mz^׸L?$% LAkJ׏Wp$c.jDI#P6eVvu/ڈwfj#  ŀ5Zs/_H/LP,`xŌLxC-Xod=~2z'?TQ b { 8ksT β pXq^!c"),&v^-1Vu' :IǵS $ȨJ%¦ p r+} *OVa.kN )y. 鷵sI !ܗ[ g ArgMb8 bVi0~Ig`Wp㯂>fAP9ćoiX*M)C#p\. v-P0a֝vo)ʮAAw&#zml:k-6!x͍Rz: 7? N9ui p5'AKLbyݏI?g~Bus~CrH\qpAQteFfZ\ 0d'SU3:L;'OB>=Pwm(J3cް/N;ϒ7nfjI!CW< m:* -X#Z =vF[@K+fh !AC]o3#b12Ze` n(xsP7=# yU3s)x^yꓛDndnh;E~[j 5S&1:5)^GKusnO)5:A7m͵@HžV20G~$"n,[ O!#MŖa_SOg{{ISg>V6_8t4U<$2ή=VW#nĒl Q Z<Ǔ.2U cHpD~(N d;"_i\fL\J AKYm~CSLܗ2.z\c~O!L["Z2܉Txl/>G`pBb[J `oPWF԰hCcs?IDr4#p6A-e 7`]O%ow(/=7I,juAs&p=Ml5EZ#2.TtI IGC|SzK P &:|o2R{c)T:˚*oؑ7ʽfE $+g``)NБi mΗD/{5r47Skk'ULa s н;z%Y"r[~Fg:eƱM(zJkKgcΓy=İ q )@o.zLىg#qލCr 4C ^I4>Cʖzlx 2)3rr#_E @8%c{}_;R< ^~5J JMv|krs 9U5_ٯld^d;jy}W:"'vMui9;.T(-]~7Y+E|hW)ˤ}L)pCP~Gr7Y?׀YյrtT3.0dఱnWS|P"+Vr\5^S"1x6ٹ_[⑎+iڽ&ŷh[NA@) ~DX#:)uYEm-%5lI=kzf3o.8,7'yǢ77 +ҩC$uE E jn:\nCOaqYZl'W[56'! [ E!p煌U%z|Pt ޠX8ϣltbQV( H"|d`5DGiݛǠ@͑*YԦJ^Ƭ_T`fم(*ZYCEl 5/rӘr)Å}v-+' r(M]>?455葬@-ɷ(]SLϡ/v,h%2GFeKU6u^[7|4R [L:b*~W_"- rչ= Qa-XZ;}{33Ó/I=֯IJcJ󢔰oC!_OB GicI?|yeu푾'W q͌n%O6<!p2,Z=߻LvpϮ(ٹ>n wPh~ %{x ܟsQh72g%Uw6<,`Gѡ ;95Vt $RE)E<;*8[z49Լ.qo8g2oK"b{u@ձfXc?t2/t0 zERD "WFB*_^@4.hK~LУ]oǹ>-7YF˸5sRRڟ[e@B4דUa2BFJ*ra[)`1;;JmUv Ái-2#°\W,!hm|T8\/icpM޲+cY~M{f5v8: 9;ȶ|CtOocM|M$!]2&r9XHl,U5l}چP$' RL12ؽ$=ͽK@fWzހ:L.' 5xJ۲nTӭsr0w#ԿN!띌oCaJKjXA6Z􍟿CwNdVHs( ZA"o@QOB7.0Fmo#)!BӛɭԒ3|]B y֟k/8AGiaw?Rӎd]!kvtk$87v^N=CcyEPHr[{@ɚa;F"LL$C (iҵJ=qfQFAO8}-2%Ic>-@í4/8*УjzSIsV  Rޥ%ϩ|DքR^s= eі̚ 'Kj'f JU\ 7@3Ңl5X hs]߃/vF }B}~cUc;rT6q)D>ghnBDFV3#f.9';ZԃQCJdGr#&,JRglSdޖQtwMܸ :U &O@[6 8ɠS/&ܙ3E!Z r<$YxL d*!&6ʍr he\ $)~n+6Єu+b!XD@ `8ej&Jp1?ǎ^َw;76`6ˢ$9=Oq$Pa“1nnk%Uܟ5mcZxoA" 2JZ(C2W8V>'̀0KUv 99&Sx8/ 3lŽ: stWo:$0  #8+? q;$u*j? m7gcO*˲u3·,.odZ yw`,4p}Y{5V g!QmU~)Hv+x%Ѻ5EԴ #fN췆Ymzm0A&}KވulՎ^+m2f\q q׶)ӸD^sEcԁ]/gS%}2rEUb_bQ2 ,c!J/eW3ja,t:,rWªt^$Yl0ck?ޠJ?H=@jo=dr"= %EͰ*M4\=i>.ApNJ9]^BN mtXѓ¢S Ҡod/[.C\ySŃAeDrJuobQݢ ^eCR~\*`004vv7^puedm.F6꒲2}%KRPW}|}U>oXOPCIk$K=#9$!*קtÄ =iύ2H6.B\V%# }_ ;t/F}\*1 |*ӽrVҦ&j -DKtS佂T!I)êBz1Fk.1 N}- X9iܢe"G5c$ 3sTxUoA~ƒهY'5Υ uW(kqZ&t灮 ռH Ms&p%Ê`pٴ5lb@!ˌ9G`5!PAw*ՕlK 8r5wr^5Du8#_ҭ˝'lU"s8h~]C SۢTT.?[w!A#{{yZyt]S` İ[p͡:L;Y?Asved  m^qRmH0#cSGQjS kndwt&gp|1)Pk~Jla@gܹ؆Չ.}د8R˂yԮ3֗6s.ѧ¾ZC e,M%'qfpَ2_*gFAhek\W s ;$D Q"N9k=&nmd%ęWN z٩pS|2owbl#t*`G2ʶx?[@%}!efӯdH[rWԜ+y6>ׇZ?G/'!|L9̓UOQ?<+ӄBD!XͻN]]D2_dn[CQL*Ȼ1F{'OgT`2Coh1F.QƤE'ٮ/ id]`C*G!^oAgo$fXd}". lz'"7[Oo\ZaQ WݬKԈBWR&A;CѨJ9-nݴ^S.Hu7=!hJ%i#k@ݹ 늆X\Q wlbX v[ o;> ^uMpmТ06gL2YU&ԐfCKF]adˠ,gk3Ġ^f:˥ R,aa+zyu0ijri~(Ej#(YqOn\ 7C:|~IGWrHewoA8+XЁ"XQ-RnR3c-v<@aǼ#9|ren:OAUj{AAyfOͬD86wF xLDTM3D%'Euߴu~J},|ϸv@}z=4Tsm0_tv8B&7W*VC?aO@Ɠ]JpbjEg9~AJz{³|ϰn𜫚قdu:): ?AcR!"[;hXd?jfj=u2Ƈh!p#ZʹN߳X: .,[&XTsaٗuHoJr?¨:2 g3 fl^ګJcAD5yȼ|’CS#6+L9aHPt wUOK pM 8<O\9y[[koWcՆu*9ʜnR]n 4oiUHO\v#g?B".řlP5H|!w.|'}n@F&n O~OUA iKZT8IIi>Z^eezo|:o Qۜ+ ,63:R v8f9;d!NޯuJb-:a6<%}e#:z-XV1 #+W<P6DnAՂT<Ϳ <~dxﴟ1MX52yI"G[*l(lewdFi`PNQwuOkƑv;~In?-dy (^tWlgFssħn+>$ \𙨣GRfѻ%~ D!cBXC! !`4h0n~?`l)Ȭ6>O>|!ǽLҜ-r-|-Ǣ=lNc%_)q?!rg]1U)DIZ ܰ |T{jbM_r v|I>ZP+KՑ׹7`B:y`a7WW_>b Et؈Q!?PS3p8X䁪^rvU /aec5q@p #ɵV"MT_!p .+ZFh*| |K3g)0]C1$z;,5c?ܙK]>=lT97g3S>[H?]v`WT2c0.#&ۨ t3xɬ_u8qmyHI 59B\*x ?S3!U2*ҧcIŅd=dǔS!c|lG䙒07Hh{E5xyc,qމeNsge/a@a%ے!y]uرqC!vTКx[BݬVt"\W)(ajCe{ $ItJe0Mlآhʱ!ۙiν1f3tI/F ʪy0nA?ѧ|$X͜wY6G*8^]0\TPF/[9V 2V t)O3ۇ6Jv:D'A?{1\9s6;զRW"E/)h g_t2Ǿ^'lCIc_9낼&'tI wsP\4!=]:n+S8Ts15io <<ToTZsUN,7"paZU8*%G=^GSLkpjx o73s.purJ>O_y=?P2uT C:!?YfX} SK{qDlk4>(^s';߷a# b2;9IPjnHWL1F̿$퍈BJ++M3{sRk۸g;zJOǡ;Y*dR'5ā$> 4Ԭ)\|uhnP/c:TgpNy ֥dc#t8G[WVxzdb;idŁ@BF1WT~Yu8 BB4 殺;zKLPfTlUƴ)ܖ^!"ԫZ{dQgsnBٜ70cp2}7?hJ:"%j߿ij ^8QVFKJR%06^ra1hT AWo,9&FHyV<ɴ U5WOWp|[tMQ/!{RxYs,F1tsP;?y>άvEHV]䓼UU QKOF=Boֽ XXnzKߪ r"lk̜xe)eP ;GQ荝ZM 8 >0SSv\< jX`cU(3dGzMɣUsGGyl`JO܅I(VrAAf׫,Zw>.wQtdiB~~"lp'<5+_ONoWT񬙓hB9,4UE10<z`VpBY}U 7 j~$-K ޷l2䧭IؠIق fGQH!M8i sϲ\|[ qlXВ@73IKlGJm~"qL|ϙd^W }/2x%ϱdP=/^t6 2\KD=d/e9~)4ĵd;pAX6PT+83iҁJXLɻAPeu2\X}6_$:V,cq}$+g|h/r )`h2L, 8SrV4zD/O]$n"zD`f#\r/g3C z誏5RqL'6+EV6q$ ht.'J~`HhvOx\BQ&79#x t9IY >ƣ[Ѿt 94-ǶItmOЁhKS5=HĆFOӁ$YEU,H!@j#My̧Ffv=uٳevQ /{e{hHoRvGXJ(Ii71"&)!`wuE6. Tshr]%9aۥ4pxWd-Gp˪)_6[ VVԆ^f\w-vtˑh70b#nK R|7gBОo-ح203倮sYR0X s+EE(tT^vG! 5o<(_Y@&'=8X5kk"D)BE7 B';[U 5 i55i 0~k_@s+ތSGe?>YUh Ey%l2p{RSVJwXNwlJo3dP|LIOV={-Q 1yʥFʲ<&-8M >qkI""9;7N7q6yQ8/E?Ee,_C]j࠙ NH *p0IvF{7NqtZ:㵘9_VrK'^DymQJ(ޜaB{6$5!/Rf4g {HV~yl2/4OopZp$E|3N!'պ@(Q&}-؂ ׃ RU8~Uv'5pì]`x}i0w۩$Rx8C\$QA1(jxɼ;Z׌+Eh< ʠB>U0 NeFM26$+la$>` ^#b{b5XE;MQ}WbhAPCjyw$˭`d~ L-cn]?<28zݙtY[){Y gR=Y*t1NO$?<\EK/(,WAzwr'7?8G*`A` Pİ>J‘I{ ORK* *~C prjZDɣFbشW0HA:!ݷTEx}Q'{F52+zImv]Oh `}O h.x{Z2|-!%>ЁAuNafANDNTܪ4q^Mi׵pvzYs:N+BdqHseTiP?grDmHYBkqYc|3ړ#j[[-|5uI̚XZAU>#kI$wS;8,cI9~gw:Ѷ'zċ&?#z9%D#1*w챏#! ~χC_.s yPA2!И{ wOiu{KYz_ nP6X/Vw `,X(hۄf,!k`rt/"2Ѱ\ T=.n_e1H|,0[)& ]y[51AZM_Q|,1h/ZL-=~J|مP" ۳D:+XG7ٻj/&K6E6\prd(U/7tf|bczEݦۭ-Ir]+B6J"[TNJ6a-zr.?Ѿi6j=%Ӯew+[[R"˜[6HU?^J|  o m߅7TV{'93d% `b}[!`Sԩ'S X^GTB\L-2ثYa֜ x!AAC5s~7pp.B&ԩVFC3E,,o4:*Yb+M|DW݋uY~z7{c/t&- }9438hntx}304ȯKɳZf F5K)EsxU.o  7 eam=vĝ 1p\~&M=VEVЖ+n! 0oA4.:X94Q>-!D;t z @4i`D 聄W)z{1"`v}LUBz@I%B 1*,Wo9Tq27:T!\6*d@_U+a)swk B,mxRVs. ̛9ukDe. n߶i\EWUkڼIf(l\7J[sdu-ܿ%LX6x;@\vVYZm{N{IS9#?ʠԔjnh"p,_J0LW$z1 c(!Sޚ}pXklOs$acO̍6*'5΋Y@ûbfԚ9g|7? ,b}7b]@4]_XWϳA sHӦnobņ C*殏Kz}O;(pFӾ蜝FȚ9=%o=>-…xL(U6%rD.P9r_k|b (C[E?Y=B*s]C&PY.2. gXD jfil^|^mwhn\f*C)gSpKX!8 4!Qglpi[f}VĽ=B1Xz>ލ">lm$3'_nfEȃ9:`GxՃSb|m5~"KaaF-D57)`<|#głƤt|px,&1X6km:Jenȼ =.$@sehTqD|}"\fc*{edܘlAƤ&E⠜Jk0̎A-zbin ذnte\b,b9Žv*9 2G8caLF-'28q(ZƊog6iς)`@j P/g\:dx`‚g::w˷HG2f^*ٰr"L񐹘/탰< CnZOđK|%@;"G̓|EphM[2@YɌ׶{bùAB!˕įp[?Fgi"a|'AF]% lfɵp5S ZZ2K\ZUmmG{Ь9q ō o`C?eC_{2/]W>P|WCSScwF]vpB*-]}W=SYa:nGCGj=Q񖎁SPT+_ ȷA[w/=FmA/bra@Xv ɉo+F{_Q^1Z7 ukj@AfƬw'iYcyJr 'hi \<h "8#v6T Ҭ:0U^ PK%JVѠttK tq]%z4o+mX +*H> {˻2_/_Oi{ aDCFpfؗC<mh-cZ?H3ҍ.Ƴ,o{HpA{\ߵQeٲ4q} CEQv,~ 9iNO^~%a); UBӽ8a뢔ۗ{]MBku @7a 'ߒP42Ea&e/qF-ޡp_^(z]9-LuWc9C ./L0&1+ p JiiqV*ŕ_6sM5XuvXrm,i(IxTy`4tU,M&Lh_[EvKS {e(wIԪs(Д[M}BAAUÇ6JLEt22lZh-9|e` +js9$wD:pr3FknaQ.ZĞ$b{XJWo|:"")2ע͜i7vHIL+;;i#p<,fyQo>^Ty|v1 ._-ͼ6D`5Ue챈?P%;WS@fi@y4.=5>y˜:  $)KI+kd4\6bx&n P/O^4w M"6 :r{V it'ǃ3wT*5}'4D>G8wXt(%*~ai̍/\ݱu' W̙w{xpU~Kyf`-.uLV9qC2w41n09A9.p`K tV κp .Ϻ`B N*sysV@otqbTHO>؊F{S?{6YM ?:]:n^LLkؿF= VoKOp0uЛ=og:zzN/h1y&h4n`9*[uUw?ЃWu`0s7VAaj-)]JnH o?;b-h]Ba{3&=mYI+|XSHpm+Dg rzw".v7J50l 6`1C1;ݾӄݼ;Wat;Uy#7U.Q,™.>sb>qw9η@WK˛D06%`k1訟,5@򹕈0dd>5oNS5/E_HTM+6Y>1+fYw*Ydwk^< *;mVREG 6DcOEƮm`sB.l-6c앫hA7aXo/K4ڐ@|cZH02#`E*mmqz^`A!ꨄI/)Ti۠/R@Tϕwcg8#"6_/ob_۪BM_/AqZ*~5x}97-}Jz~G5Qj\/KvUA~-zg j,W#}`?sK' ,tIw2RPo±}I5XL+(5 `>;'ݹoi'ΐp3R[+}$S[px N!;]+A,*^/Fp+K0|2d1v.fO|>͙cI傰8/ vX0r2&հc2q1g7%G7+a;/WY$fUGL~+C }A9S?J,(q}&G==.k]_ZJ1Mu,Wic ܷ*b=SiW&hU0%Qu<]q<6ZLޑ Ďvl@&I18:x,,|׻TbC=֝eۻY'+(8HUN Q뀋/2|F UQ4:&̝$DI9Ȫ53κ[2G!@}CG*Ep1>os0,G,;ٻ mp߄y m_K,K qTpϡ/D7(^ @K.v<qsIt s|sP]=8IUe?CлLfaEN&yNVw]<Xz3I9MҺ 1a=<1('cJZb, *]5./M$}~Qҏ|[Gk(,IqzgBMҚ- )nZ.t+pl+cUZ97kOP( <kz!q0&u⏀X/ٕ8#:}&@J~{5q5ՖQ0C̉gv㢾f;4쳯8?OGȄ! e1AꎶMOqنK%I¥8xx"T5RGʂn U)Yn۬@QsC2T09e>_\2g-p(̡sͼĩ)y!MQ|ġ")|Ey{ݥ>o kc[DvnxGA%+h$OcڐEm#^zYPѝrB`yhǸc 8:﹒Vq-^{gnۍyMNK]+PJAkjnChg jpq9i"Tۯ-{ptOvc'Qa!y$x@fq)5UP (Z:S%;qJ >ֻߣ,gU1ihd=;f n!Mǩ1yުRn[6/ yqX7)Pl|&E v`U0oFt5Fmdei>5%`xAP0R۰fSɭPӥҠ2GD7E1(^R즿3"CAuSսua 3`X'r۶g+"ț;5~"04zUL;&4kX 9_r$v9~ ]Oړv:5?XzջhO GmY)?E;#I?7?bBȍ6({F^L艈=3s \NMcƑmEdoTɽ)'<;7JژK;e$tvf>:5MJT#YFWK8v ,M@RElo>A!h1Ub[RiVõ@9$ W-k&8Y77I(GdCp&A )(`-f@pSr`-^J:py<r<tj tsh.W4 uuFxYgh {:ɋDجp 1稂i'L!xeq\,-#tƿݒIIU`sI+ł3l(1 *xO,$BH dGN8|5wB UT}{VPc6-Df) "ws^:ǹt˥Z&n8Ыjn g^zEv?5=Qf" ׹UZf9sAQ1@F Bx;^#I&χ'Z}˲-ڏ `}Z6=5c~˵u4s񹴵-Fޒ8^:߆{9^R!gA" 2M4kly7_eDH'|.G$p8e?g{݌T KmGA .Ţh:fZפF5ִSķ_kmdrKx4CQS{qaVo(=H`kxmHc~u;ncFExv$Uffyzơ%z+[yK^EQ1ZMr ~Q,hk _w^jH9p6TF>b2+?IrTM(G`joxdzJ` Hp:-5k tZ7ql+* 5G흞[5#oC5 ONS_Is8V.5Ip ȳܭEae =2#-RNה.0LuA{nm [0xn楰RMZW#ch&{)`=V큸> 6)`=w o@R-^s[˧Q%].xD+$OM}NR(?w8l*r,%xt P瀐p@7^Ѫ>9WNvT =::gZ7bT8-T=<[ƨs`dRӗi\VpqqJG_ 'R e7Z~"(PNU +6p4}'zMRWOzE-~Q~T̪Xg 8FN(u,:[KQ|VJ#Nf:ϙXQf,\;j:5F?^BJW$B!0Zț3hƊGc[P?SIyMQgPɞꍥϞNQìx v'-!r0Hzӛ, ڹai?|&J^Qɨ \\WV}$d5hWKhtsˮJM9i)H(i5T/+^$f+ Id[T}n&});+>GNtOQa:Ⱥ*   WrЫ2h+U&<KĴxd*K6A` Qb&]UX%o}4G+ksV^0pQN vPm.Ok ~$dsd>zNihQA[GMEYNI6-vm@I|dQ| \~le9bqBXnHVc? \)|x%l Lnk{\OjC=$N Pl < ,bNk`ᏲR_'Yk}!XS>5P I*ӿ ; Iޘt1J`Z P.E^`Yy㑯FuZK>׾".7:at<ƒpk̗sMP)Ug ͟^9qM 3!eNI}3ׁGCQI``_E?|Y1Y-wo[K& )u>mGgG8ޅ/ͣa\w96^םbE3Fw Ih%ɾItݯ"†1L([|g@ WTo`Bڡ6#u~"ȣbQ *sgS0K+FXTmfr dyW'xfL#RV֐}&l<[qV΢jw،5/umP~[YBs%O׻W*V"FakG m_HGe=E2>y>p'ic/ SFV)iYUu6n1;Yx(abe5z,的`g+אl|["@+є3nOs!\(ˑ0}#w#GH#H]2 "r $Ax!Y6%/ %=wޱ:Uާ`3j4cq>=5,o4mŢclɤLGv"pDz&+&RsO5,"Ȃ]~;]ú"t L@^?AEm |{' i#8asM`=Czi4 fjxBmU͡ud.3S 60ÉTM>DTw-Y};8DZJ9;VlAʴƼp FQ:;Á>o,NNl+=&bp1'rc{ҹq]cҢВ^T#dƓ(ywё`;'\A-gjsPo`#V!i?7\TgXHxt$lyʙ^HXx(Lis34n [||h+w(S^b{ +H*H#8Ե pcK3O)5xd1|ȓ=R З<0ie#s4k[P'A&a'z$|}[mw[5r庽 gëGTY րخO$4&*"6+O*S'4{xN`SS'ae&QU>*۬~ B^U3B>xpi ADwXN](.+guj73w(vv\/Ɋ-DoF; M_]awu ΦMT0ߤjb*U>?uNO ,y@)zZL`HBO)q_0eҏ\9-i" @4ϨHksWрXLޫuf{K,IÒVh8a?C0#p\#-f הG:Gc!`Jb~sX_zt#>P>WbRu$QZ}pWq$N~ MljDձ6ZۑY_`n.vry<ї(CƆh^N0ױ OjW=o_6rq@[Ya>qxNІ? "T%P[+E2P'KSy*Q;k 9uRe-숌q:)z@X͵ D8 vi#ef×IL*o2&W.OcE)Z[y<>(4 DR'Yb~3I ,ʁH^pT] t(^dQ5GE[ TVAIVQsֿYVw5PS>~qb9%Z +~*h7Џ#LZ~oC2 .]fܴl.C>aƂ"\a"04-ҍŪgiС[]E@S%lț.|  j]/L#'ŗdσj'd;6x2d~Z`\F ЖIr3wN3ƲkùWwTziBze=Ii>)[2v0QT: :+nEuT*nV;pnLAh s_\w㻐$_EE6M*0 .XLJmdQ)/!KGz6j8ۇIY]ibA| ઍ+iLѰG1A!@c$}KMa̹8ej贈R{ qDSNM $:` `0UIH$C(GG -ԤѤb ˵Rt4]'ǏJsL[A3C2F<7KBVP_{gmW%ƓBf+%2_JHp#,RDF: X3359c1OBy{heTE)Hjb k"Ϥ܎ Km:}ɺ^IW ?ÿ0EI3r% suH9$ xR^ha*\9‚K&=ѝc!~!>`Qs3;*P:mCQ:n\SȮr=׺ʼV\I4NBh, >ĩ~4&b͛cuܢf,Ԡ|H,wNFD ±՘a췲@Ȁ3mBe)pj 8*u2jЂTKX[+CjřkAuwڗZݓ Y|+R|'XqRQ4“< "4B|X۠~XL'. %1R")cV-2]괸 ? L땃dKF]1g|rJORĸ0%-<'Ç6y. $oP _,GDwP0ͽ] !\Lu'y6:ř:Aw/CP MW)AiO*/D$IRiLFiϯW{mӬ ûqE15OpԫZ2kU쳪߆\D!K"㤱6dH:mboxPMdEأY!ݿ{kPM!r_#j;W*";G?Ip@YrQ'^|t)8n%qYB@F>H-?OCCr b`_38^H)naq P"nX̎WHZqE᪾FwY)Y!=FLr!p>bc5g,ΖT&pba/( 粄jlܽMSrNQu5Ӎg7V3.:34{$hmiAT2 {5d7 ]D.,P=x61ɽZE'$#_-i`6"!Lj/tZUA.ZoSnhh$l*K%@I} 螂ۯ8g'~ |1#.L-o`EwF!By^]HSA;= r!||6sMb)ѻ?UCZN3S6/͢Gb0 -JcO0h6Q}4?&c=e _@ ";o{ #Ǎ 2LίJ RP  WfZ1oR/ zoU]%*q0lB Pv$ZY%P߇yҖǛW[aM^>$tn@.lVp.886L# VH+l'.|,9ӟs|\Uc3+Q1/%+)s6 KvkLs-b2-(b12#w(H%ŕ6xeULz.f|ʮ7U0.P!x)!VBAŞ]{fBM?}//6h=%P_k, 閤tH4>(+8Gq3FJ1SL ޥpaœB'uo??4`suZ3_a-D|k4R oO7:KI`8wC Qӹ!Ցl]BƻX.5^q}`i*F,uϪ TCYy#܅_#EGvrB3Xw͑]y戝81ii4HJ25λ!TKay_X4H0VE6HUfF_c ׯ5%h!%a>qz(cT,ЎDmxUZ˳R uyVXJBpe_E'di0QTTB]["^o4`lқkU<Qb~|^@ϡ сp6!/碰p6i|VSJu['rAZzp@vy9o̬r4G5xhctk ÞW &@{~Kw$63U!AU= pX>bv>%(XDO  2}}e6+6:EL[h&[>Ab2|lVN^)>y. 3T w i]N LJ]n6 qGü-XE)>{K m*[}s?\3qG) Kgf0 /&""h ⮦@`7tjF>Kc֖Z1w'9?m{nG`5PV#aI^ EAE G$36v>h=t$ UGE|we{7{0P-]#sWO/Nd.P ND ZbRŸ3ɀ?ld\VMT$*#M 7bh`^nVVY,lmw2 ijnbJ2ʲ>[$yB~q+CJ&-rH ]s&DoElnb7ՅrZPDZ'V`8*߳B< n|TYQ]cxǺNl,џƴXߺ@nD~aD+N9ffn&:ϲ*vXByemgI iv~E$CQ;ƙ[M6jN7OA9CXVD})m<+g#IľN,BpHӢVkO53S]v<|sU\]m9#i9A8%wgq#@h☝^H8ա|RO>3xŁ27L}܋[Rrhr4#dJtajl~֡,jј/;rPs咩qVyrt4DۓbD)Ath [o6H#F|P ?-:K]ri7LQ!0}. y808hY{#Fr !ģSP4x _cȂ/',3* 86pN؋{)G@ݕqAXrڸ NxN3qBpn8[6 `.@kueΣy ˗ fr]!7C:^}ۯ: @  GC!YX#[31G&pw0369iY[#BE_3A4_=Ft/r#œ"3A1G'7D-fZ:b*ZTϪf}Q=$:%Y%5~D ,J,,ZM x3%.BHr.nDRS0&E.2+] S6>AT+|5ˬOl T,Dч;8yeYr޾xיV)usP?DzMy `EUpq\PM(É-cn8d7-AwQ$f_dyP-/pZWoEf0un\)yUdAwrdw1 Cl2HPʛ$C6Q!^ߤ4mS}*)ɢN˦Н|_ :U*VkHL Yt?Q|I߄Ф P#hrxa%J?9Zߒ)BH9K@oFĘ| bΕ(@v[%:51^fj^Js27M'S^G5hBɞIi8yL=}Y_ psNo6*S"/q+jrHrP@-L8 5fh`#%@۠{cGf7-҈1 㡰 YMe.̝Ձ`X󲃷Ɋ_n(ɷpr.S_#U=<^x8mK φi $[ީӝr㩂PQ>Ũ@Px1Tvw\E>HXMO;#i6RA'JP-𲼳*+["F4d|2%a}r`3~F t[\2W\=l U]ˑ> 6)9M׳y{&f6{do6a* t Z/sEبc0 l{ Ib)wxugqm!/OLib@Mqq ۨLkL;[EOLqP 8E(`MtnV:iSptK RK`u8˸삦WrG e)J@-%R:FWHY4w %~ Happb>i:wB/ &HOދ6tGm`z\ W 7Rs!#Y$ez4`7~Gb-~ŪF0Ha" '7ЕavW+Tޠ;caFP2<\6)r0I"T62L;Q2]"aY|3I*Iʜv89C+}ܮ\so\kiְ|DV 9>c̿9EG [dz=I=;dvK+ }Y"XvoE6~'C3hrc\wIkDr툃nuI:QOrBa=wxEϥ a˩o`% %`Cs5 G=h=.O!C_YZE:sOrIҗS,TE9ꡨńL142_쎺wp͛4fd@q32_K%d~-,XCP/ ]86M yChQ'@IiLk=ҷ}v*N~kμD@/|qHʨ` a?tU/E&gߓ[eZVEdU /7VVHb6yׅBg]GtԻ#+gwpD7x х Ҋ+j’oii]k09( F=`SsH}?,/*aF!#翂~E,NkƉ֝@f5 wH#V|l*APr`~o0љAq?'2>h2cajr$A\j !rC @ ҘYb&+;s m!+473_8Mr9]4H)/|V7 [Ngf UK^Mo'vnPS~r0mz/rH ϤiN Jz8i ,@ ?e5;Z*hCuiKmwC#Za0"b^_Ѯy6p'2 DP~]~VtӒ j5c6Dz+ێZPZq I)'& {(Ps|+9Gg 鱨icLa;|vu{z tUq3$̉F5Ǿ!> RVSx}p+ҹC|C;IlQ=U||tF%c~_7m"i՞ӎ@7ϲ4eQt<GFPz=J\`ulr> XGv1a."A 謪IVW?|.*-=iGV^ӆ $C Z"⭏!\D'[Xȵq }C c+ aKhZ'@l7,](c<Ӓ5qYfU.l4|vM+wQG /{OzA?BpM>#|O_ܫ!qN $u^f%ak='a^/u@T1J ^k'q>BG']$KE$5#Z>zMjXQ+'&lO /ߕ$eGIڇD޴m,"Hԟ G2VG|3F" _PTE]R)%ā{OC|)JU؊* JY,j%i_]O,"Tə˰t='J <6IJ9nmG45]-s؀QF?q'bF#Q~}Iyv)ɰ u$Lh"أO Fؖ݊1Q;i` etkmW诨4 ^O6o>Gb ٺVS+5*j4U@y{o $j|C]R=r6 HI>~p$i4U?"v v]4!j +t=̽_Oe8)OiއĬz3 9pDmV? ܳXpFP*5imI)L ]@$6T$9'iM .߈g;iWJT!2K~} wgj}Ej:fb#cjN~fL1vu8@IŐ@fD?%>R(Aۄz cn*vVr3IQ|&A%QɇIw ؠ#KX XUhe=U4QR5Ny||KmIreM"S^ j9; ]La#x Gqhs 5\9 6JZ$o <UN¨bw6\fv11 {ܥ1;g>3zIJD|*+Ķ8Zv66L\ݚuC!B&3Yk>a~ݶK-ǹW>+hx&!_LZ )0F!,E9AOef\*C!OHXX`#Ic]헋{&c9cy/jH<&8 lVWL\R[ YLKX~ i EZ㍾(p!Q<=ȰGHvMWndl>HzYX ~`~DEW%p6yg+J_7hHML^ M-;'j-`MuznmW淈 دDHgN\ԆZD)["+Feol^.2@z)eum}nq< C -<vc+rN@5q嗧 @.)0Ě+oͺI TGyvBy͵݉n2m(M;My#3eZJcA >Sj43bR1Ԁ~ ;ʦHJ qAH-,{!*T^uWtʻUP+)Y LGcW.7Ӷ3յ]G 7P&8;RFO!؁YvӬճ2kؖY* ,evDӕ2JA&CŞ l‡^D !J#Kh?"ྐgn!ֺ@2QAt`vJ 02`҉WWOAc"\t\t94 6m͵ |p@ Ics.̭ZC[WLV2 $Qw-rz´4B%ca6Y}k<;ʌiY41q=ZP_ weIs6C5 IvESoܿgT}Z`L"n%EwyW$aUG)=^qsp3 ۵>~4,t7N;8$1YoJndھZs*.;k$Fs}R4Lj،ihh#5^㓪2e*|SuPB[Mm]vd1 Ș4v,I7*Lr<8etЩ{ٰX j>2)l4eϗQZ,3%Ji q5O!;%Nl5= ;/CyxCZcNߒR}BQC,}duK4㛧U j /hи'1_[XY|n1Dµ&UAɶ=͖O1 OgI ȣwSst狭&0C0;\'N#[3G`[tnH]r}޷5̔+c=Tx'};D.ou,$'/g1Uj(7'< ٤a Q7#94MyKuwS~s}﹆5v}̭lTI #*x Zu1B 6 dʢh *bnU +.+x z%Ӵu3{;0"LVY& FFOB_º/Ecc 've`_e +Ё ayӧHa-$,^2 ;F-јʝ+M ]Nu&`MYii %ce⥑i$~~Eʇ5"w"sZR@.+$AKl<  l hlj!+/xt yo oHps|mp>*fy@pv|\$㛒[Tj>pv^wiǮinig)Nm'mIIuuVY.3!`Z, c3Vy7%V̢ dWX&6ۭnSI9q-F|銓:Y(5>U?zYȃ^gt,J;Rjˌ5J+^ȖVx^s߀XIs.C4QhEd-ĺbĦ9, JtE4R563V;߷q;ɕ -ssO;}eO(N($ć:ު@wKA72vILG%F8l-y3y8%Qݮ':q\3Nge^k-Y Ѻ U(-C P8]kx_h &,zf`;D O}0K{}7,-WݐRO T>V03=Wm՗~}ښWzTQRTYJKm%/1]~p&8y%wO9Hv?<MkENs4zQ#Vm>߇jCp4s<@ Wdt_tǏ?}ߴS=`vʥSeq.3E!~K=FVmKXao>IxCx|,}h} 1$?9x 3E_<ܬ3Y:Cz5^"H s6oZ!ɩti4p\KlErPNe"Lr#9w s$M8 _΋u?*\PQS[ݍBFumGPIbRRƻϟ\E {Җ3בaGbQ+~yoi;L#a<րxT)x.cMm{jyhj~3,YJh6ik/Bie9Wcq/ɳ)k{mHM+Xazކ cd0ؐ}=xDl+kpb궵H{8Y <̿j>6h26K:,ώi]g t F,n?|̙c4NxȒ %׼e WӸ#71 tQ1QMnBOy}c'޽XR*.k BnxJ35 +Ti5vCZ) |cBɕ?E+,.>V(,+g6_Hi2C؞0.)+x-&t0aT/QSTAV ^k6”ʮo"Jhqj181+WO,ҿ=w M$ѫNrKx"Dsrۗ˗!Tvq}2l,3dU4-Ȑ"SҭyBxzU)6eo+<J6 w\dzj$/;zM5:NACֈnE7 {buYlwY甡`r[X1 &2k9>BrU'2  `֏V3fl>A ާ8|9}O_#G0āwG SZj}k};42356SB~7!?T0JGl?P MOJ1ʛFR⇦~ Ջ_o9HOh"I@YIeW qԀ6-oXv칹+֌ }WSiKs~agPgibfٴ $G7i 5nk`3&C Jڔa:ᚳ0xPS`Lݒ%K_,JEJ"Vu,(GVEȇߡ~#h ֚MTvIm 8_([Q%R y#S[4v7  ,yIj,;q$~+Kx Ec*CXs$$^>Ͱ<3ﲆ=]:/ J6鲒RӷQ!Rqfv줮L2ͧ52!#kfP@'0hx3:d4`Q|w(N?S}{,ٍGU*+ߧ[1c'_$ T&~S{ @z]!8_7dW~7/58]G-9$G,lIu[{w!$ZÍA?#1uwZ=P ޷mi>hoǕ{ka{8Wax[z>[ȔyN4:XPT^HaOƹܘ(w&[g~dr`$L[鲨__c }rdDDC#3%e@SzFwJWb{FT̗dٚJ;] ӫ<߫ h+!q|D#\Yj29/,)W{tޑ'<h$HrodCy| iL*/\]zT\+ i%YXepwڳXEF|7rq1rI\`L ֒ev.%9M@k&b eO唦Nn%qTޛ1txP*MZN0Q5$ysHY$hkP̩%M1!#N$juLvo*z疄 [2s<^Fq1չT4-t(iFvDM3L ~ű`ˣ Kpb@&LF:h&0m5Qr#{z]S{7_Efvm$HaNNښ7MHH9Z&VHQb70QV7* 4ʏ < :M 2&"/ eX'~قS8^'&_raWv r_a&<иs%U>_y!0Z`쩃n,;52J9QMZNid>2lN|ggY|HJCuO4 l듦y2Awp/Mt7eB%PЈ"ø .ʇ]j3lFe>h/,CKpMʮU@ ^w =5%ODõs3f A8M{ 5@<@688R5E6}weS="6D':w% ð$(B%s2N4suL.h5{D!4%>ѭoWً ѣHVF0gՋT$T+y7r_D,1Rᘼv J fSEm ?/RWDdP<{Bh$}PȞӱW݋( kEݕav%D] 2|r+R[i .Pnb1 9|3=Tv\*PF  -vZ5gr?DΆg/ ܻbpL՘Yht_qbńu}Ns:Ayxk#=~\KK1yH"lu`\N%7-?pInk$x~`{VڑϽ(jy* YaIė(TQdM  פڑLa5K@%sw ג#6sU)z]cN?36-3kOfЎPA +c#]=Wi!HѼ̅#OGX%>‰n{baIhxu9onkAS˱ 墲2J@7nL+BhyW.1^;X(%WK+YfP$,QQnɻb =syJ,J4TBŷc_B7ٽ lؒwEwvἕ_hB gϷ U`̯FU2+oN"nH,8,eژ3#^TXhEg/ M3W{Q;RȴioEՕsz _- ]^:GR br!F\2W/ĥM}x^߃]v ygwRڪ1$įGޭ":RŦҔ1i țTcq"߇ԼdyozWt2 egT2\}}3?k%D-1Oϭ!R9냞IaXZ8H Ri:FWQ#c5[ 8%՛k14veR`ځZŖ cJX`'jޕ4%\~>be0'(`DO/l<4 /i\S7CQcH 3ąukpN{Z \l#J50@r 7&Ξzӣ?cq JAUJ$Hj)~cΞ+SR jܭ Fz?&XLD+#oy{-g+0Q?ٗ\T vLq-=}PYp558l63T*K;?&?dq q7XzAB@ yJQ9 m6/$\'*++IӪA6(E 2;# C`fkb./ Z |F,3.A̠ Z[O@~۴HRРzev3~!-?LUJNprN:ՂϱEl@M Ɵ&ܚT֋lBMM8u}ח?z*\PZRJS|[OS0pF8mPv_8e:IZZ8䀦XP%w71dB3̤G ]ykkU=hEG&x, 듈:*Mcĝ]~g ﳃLUSb2Χz={bmΥH?1I+lK9K$NH684Զ+>9ut`co_ CQ5 (R ˭Ggnzsjo)O49Z<%,T'̣ $a@ھO]G7yF6|koiZeLM/e$e73/{cP<P(7L|mhY,RzK}Npdhr;aQDtR" ==}z$q./hE>`&d]mA}[> {s^^mQFXjsIXVYJ i/A3"_2'Q w!fLL^heŽKFўU<".Y~.Hy#1}kW0~gewf/lLt\wO(3Μxm(̢\ҏ$0h.U+xKX ȮQ](dqBO} 3 !;v,F;Xe1ъT.GNbqk{\_yQt-'I/z Vc]`"3CyMTA0?0QYFwwPK[]{AIv]dTϤ" ~Cvpn~xTVQ+u`_.%:x-jW v.n9y }|Θڼ3&1*K,F2(z؆HqAL3A?YND.O*QTEW"!N*Fɑ1@)^ iz l;av G`v]^QW1Vx ;hS3I2CgDkh2enomT =3wF oU (bHz8Cן.6T8xNw~6@ <lǟ)-cFH-4 X=rRV;9`:UbL?Uз54?c:s]XKhNqykEuA9f1ٺ -"e@*-4ʐ{"%Vh>&¶j`' 6sze=;@|>f ݨZτ_9gBX_5yX#CJn0!CD/,lũj^ ƧDr;\= qr"`լ f*=P/ Iv'O,N~m",|v̓dT܏IRZWsj<^\5B^ucX@\0y$ʷw$ڢܪhgh.l&\'̕O bˢV!; ,] [!_)5 *QީO#e\̢.1S!QԷ ý/0u0i2 ?y 6xA&evxYk,iF SFYevN};N\ܶfK;iV>͝V}|SVL"| ^YoGضQ'7C(vIWu 0im\K&)mo|nIO Z\QMedIkZA%^]גoߘ:EVONP [bx@ ,jfGzAj')‹MԄ w[ST8mn;ה*VҺJ<)5gl@^y~0)P/#ռR{vJ0Q7zI{*d֍Yɶ9Íء鳃A%< o̴(O6"f68- Mx`PNO˥6&U8 u'PW} >M*0V2.8tc=8T%ƉGGkdsu658N_4&u<ǃdnF^%\IH Y7/J+P#Aw1,2460`2UMZ`ђӈZlz4ٔJqxrI!v^*:~89YI..M'^.VږhA0WGƧUmuWnV&uL,39I]ۗG^t37hCj! @/:@t7P$Y=[iE)/x]NEyە~r~V0AҥWPOH GKS|0z٘8`sc/*8WBiA9;hV \$?Y2K S8X6~{o 1 >A*VH@aJաG9]\[6rәE!1W93蔅L!ę EKT|NZoEٍ(*K_vl Oؿ`3d *Kzʭuo7l kE6v5Fn>QbwRb}ypY xҸ&j#yYݏ9sp(m)o I  &Ƹx3WFnBICqQ¼JC[)X*%>h%X5Y"/sWk^/:_lo[J 3VB\ORF**oCܽF%+Ğ$J9es}B\e~TWQ'ϮzBk!r4Ud Ms "'H%??_Ɏ|޽)x?1j]/Grb_z Joa? ZzywCOVMG[1몳}/0ȦO+*\1un r gX7F=n& c .Iќ^6gݫ5,8% ~CrGBE\/I$qӼogQM l @$^5%<ܝ q/uAP 75vY%!uޅMjt C#@b"xs𮖖Ⅎ |BrE`,ѭZ]g~ z2{BP[[¶[Jv}O\M?mu*O6;V``ɑ䎑 |f{%sf%5cΎ0zJDq3j'L} TLBIYݕ ksO0ho$v4mBIKlA0̚6f~9fr-)')Л C'i\G[@i!ېq`.n{'Z ̘j\Z4S҂TG2}p\V\{F-#Hj3=%՜%e+BULm#Zi0j4>M_0QAF)K(:-hj r2򷜂 {Uʔ'v^F/4ٯ[KW.CvW:ސy n .&&euZn6)‹e%/ҰlT֮X\D>n`Mϯe8 lW5dQ8a %PRd-_^A㌨U훾 ȘʼnŢ: \[@_#W3b5i#`0N17)hP@o+R7XJ=@,-';KvElzƷ+sHTdϷLFd9a8S@=cjx>Z)n<:X~  Lؘqk;+Y3cڐM- K4t~ʸsx1wQ[W٢/^d^0%yulcԸlWTյJK&D\eTVW0OnϜgޝl N81{N=<#j^B:+ZplWP0dTc%Fqx5CJ2nZ̺skfrG{&l2zrՖIZ/ALX@<%[Fڲ&f|73tW<{g4Nlz_2 c';$?>ߣ] q#E@ԲdVw/+%+-$pWq:C)Ek}Xsq;&zK??- 9"G0K]߇npXq,x]pa۩9?Aϼ% ~ qX͇mQ >6IɰSifa$E+I{~ׯ|ѡ2GwMȆMjLIoŮi1ȍ!a+'W*7#0~&!,v8(k?UZ$~N@t HI!{Zp"M X@l_S|vHY}pEHڤʶ1tg$ d u>q!7uiX)Κ'&hN;9& s㻗3X#U)WG\0b3\(Ņ* EREPNmr[pUKbܱ^&٪x#ȀFWүb&fK$t APYef!0L%iY%sV}X`k[Ύjə,=!-y'X$Mc.&A1Tm٪.>0ЁMwb{5afJb$oxN''E:hT*a3B`'}ꤼ8pf36fX !_'{7_6 #|9 {4\=g;h 0pѼA{]񔟬X4*oFƛ}p p39?j)g6a4Sʹ׹٪A:ϫDĸ`i7u#o#teĀ,t՞4eN4!M eS*t)!<5}pܓ/6WOFp] VxX0 o<0&aVPĜ/I5P;6id̆s<g `db(:IɻbB"oם,8tIrN2^qK5&5̅I5E$$A&qvrKWk3A"QsaU3:Zh£IhxCt!+;DE}l'}1pL?͋dnf\s`t-->ҰMHh(65L$}=Ne]&@\<c@j qhQv.L]nn;v_W0~3.oQRg6GyK =O5dJTɞrI}mur9p~U u*L}kėU=12~rbiU`Mnt^;80a$Vi,?;KKT3v7)ŞI[ [p Jr{B)/BPiUgO;۷n7*GeN4n4 u Te!;#m$ۂnojhh3AB#<7HSz3[Ҳ*%gO 60a-{N1.*f|kQx7mhLu)i0ΐ:ܛy9Og;j}[Ic\XKwɯ^Ѽt&S.{ɸ6aL: @ ̦?tmhPmL'i}N5!)2qڳ.a u"UјnR U,!3.?tXAIݡnAZS[3MޫPR)³nY ]1 +{%hfBpNB>\$CGW!?|*F+||ҀJd&KecZ4v%ymUf:%Yl5)olD/Crf4$Yzи? B b:rc:m36i2x&u-]_Zڻ*G)uK{&f֋L̕%ìbzz\딯U F'mAK 38MV[kd:_M"4S2SPBu"qLڮRRhXͩ3V1r$eua*1(Ѣ>|֬t4W&`T7\ߊ+Xn핀~3)ʆ*N6t)>a`=KAP=7y>Ėd $l.!(BbyRP^.9ٺ*rxlXrSҚ1PV¶\=McLK@h5k<2d0ɗ0~W;/=*Ɗp;w_Cw,ɹC\C晿P-(ߒsY-4kJ1 W# !nb̶\vq(e;,/~rI(<_h{dNLZ4%;R{P i69s=w(ĺqO3]:ۀ1maM#SţPlY= !~*N覘8mGl) ̗-='8~GH8>q S\0Ș} ]oi{bNTMpgWI˴iV9[6cI%Q-.Nr~>Wh]FۊUO>lKaϊWUB9j˫hEtme:&f^B?!)d <`k)iNL~ݢš:@9Rm/AiҟmtYNdR*G UmOd, 2CoI=9½+U*M V Չ=/3.3REIbBtaBn9Ϟ >]˿]U:X98`GECڱ _2epoO<ƟM %bX5az X.^  X0k7Af4 {T(Qh ŕ$,_R1J"AQ U)[@]VCcۅxׇ sh٢VQ01^ȍ LpD\XYvRO7Z>֖I ($__LU.&C@L<ڋaTJ{Yj[pche![*nȎHWB.ktsk= 㗸d~%[{>zJ *&Q"[Bw"Ҡ5,)9xF q̤ܲQqAa4PzM[q: Ev+[^- D"fo%}'MJvGBqHpBZS>4 7IhZ_f4e5=ʜF9 gֳk7Wߛ} \`6RѰ}/4Mix [kiv~a=}G.^7?f9Owl =:Z]V_ut#YR,t HoqBxRb zIMCzmp) x!}3#mnp՛j u K=sp]+(N;k!h0>}Mc+WvXH :‘RjQ.v^Z |evNP+%kXZ'n@=iVQoh~ .fڸ $[p|m'`B"=c >f Y֌!UDP5_4X9x9ޛܕNXp[ zLޭ5`viݬwqtt@2g8%` DV]ˆibؖl&t{Ҟ`&-gtј8?J׵Bd`dmOu KbD>r;BǙQcx+xɺ/jy A˾Vx'j\5&5 K*EZgVJ~kk܈nIJm@U(hXwIѐC@6%=~Xf6T*`h͋L| ۷۰)^L5]=.Q%kdPZM.)M'ڣcrq9Ƃ8,2gC ϔB9_Nc&}HM}512-asw.e|F:h/#Ƚҳd\4)s\0dUwZq~.ڌpM+ [$AxP l#МG[\ݺc`Eo)>t:i}$_F#_/ץN8Vf;mH ǃMLBDe+g]W2go跊R4T%9 q5y߄.#WNs5rBt KX9PԖ 8=624`/;$,CR(wOͦ{P=[l.x+~:(᧑(,-!%wLsRLQ޻G!@El$8tHQ~r1Urƒ%z[؈Ɉ%pF{w5mJۮ+ zewAK,Qx+2 /R^剨DUɕ>#蔝wN}z_džwbZ*yݗ]ՌuDMdC{| ]"Zo G@`"XV(Yv. ^>wT cM\avvٟb72uN@HwVl^ܮ`~1\PW#QYѽ|۬!R)Hj0{=9kKÔ^ +G`lۊ/47)2qVvWˣlAbSw$52n9`Ku(U( zQ cݗơǜ|Uk`&|G{",wybY_7=2@PyC:b'2f$Q%=pcT:_x̊i\7t+\q<%K6 q VzMUxB~Y x?zJ'tB#;_DpycIY6ke7h62̀KgJY@Z+fk>‹B2'n=w{?X (^R!ߊm>~9#L>lwcr4=h+?ݼ*ޖlbdƺeiT!!)}4+/3([qHO- ̘9ߜߏe " MIx*mEKh2U6tj3ֆBOY-,"ghf9ҥU m"pV#hJ\W]. 7Mb׺XK Q:gag`!?@gZ mC(I]:vĻ6 ǐAn'ke̋;Hu[X "<t!}Z~9< T^yM?B5]wD{݉"P6ck߽B_ ώ.P_VFbcҥLJoKR֊ >K,ۇ./Id 1x}$ε3%o2)ԊL{or*ªPڳ^L޸Š(qZ"򒔅xI>4 %u^T& pDvNa'Ƽ/>^ds}!]5eH t DZah$j' :Aѳtybu)wFk{XG+`bsvy~ 21fyMgEAC#N}ĝwuez!__TB8hބhUrj ##%(" 6hҷjgǦ*BIbX{NKflDnl+`tM y0+";˴~q]azտ$ybad,=,:bTӰi*]81yaCP^6:oMA]h+qW-~ޒ+_7Bqb5Khw`1YtGh7d UC{Ws ;wE,MhKZ&x=Sri`.y|A7eM*<~X M\ =4p$]`qKK+%/B ՟8A Hညxɾ%ml~G96{TK:e35Pf:FOŌ6lU45Lޟ5Nr WK7y-?G*ޡSݬ~]eYI <ǣsSqnbg6S@k HA|Q>֣'y<$59ƓZha2%ܚXHxT#kX!AzH)E~ WkDjy\Q\J gnla*m/Roo37;o9N/)#gUtSc7K"c"RI"lĤ 0)GH}G3 5e텱T̓*J?8IJtg)=.gP_ aM`T!DCT5hy 9uNNU'QoKo Nj$/6m{_ϖFctz PLJ1M$זWWD_ Pn;a0x[-/uȓP.D`~~>Jk;$%ڪXMY/r4Ԉum.y/~#_2I?pk~6R&WB p65mA-Y]C|ƜcP7rؘi=-v@r:ٲ#zھ<42J?H)Kh #;e\'C!U;OS0n@s lr_&k6G0g%bV{|tp?tv´G'K &Q}fD5;c*@kHuVm];'I}d-V]ia*JE] u5:3n:[‚(A>Z3 "mV6WlYA1geM~/*&s=Q'<ƓӸmhz;OޮᗂrnlzDD.T-VXPao)m "܎ʜrϟoX쥟N8UVvѝIw`imCʰ?' Oܭ㺗tn IpwX\@~KNZQ0E,-I얛G@^o?P:LE0@[SYn 7a,CY9}ysaK K6 ) $]Up駁>L|S ; pl^C/" Rɇ dcݩ&ɂ Vӿyj^%;e.o֩`ţ q6AaVF̢d½PNS娿dTm!s8-گ?ncĴ 'q=ZI7< - %&1$K ͇ҒyC`ӄمéL;gPj>ݲKd@٧@3˵LOhʥ 6S|R?Q#k?LNfr. nͮmeTuaIe@U>K+=Am_h,J;˟vVXsuh穣MnToq,b/*ժxQ xoPfx&uZ"' Lk7ޑ{ktiz`ʾG%U]WTAR@T족dW8bč5MM(,Q+P\J z[rCL8#E̕;U҄ςir152CA}U' ^uN0-)A#y:tɮ)> ij~ Kf=ϒ|>H!.>IԮ?~J"ü<®̨:4˷x]鴞d'eeT_u ϭȻ'rFnW~az[nvNy2oMġͭ_d;{ف Znj O}#lʱ)Ɏ2pPdZx /z)sNa,mommN#8w]d$;){,)p{mD⟗^۹_fчf4ؕlݓߧ_DJU6j [L#{SnOx'o H;sו*p<;JR閺7uWON7.Wꘛd`zWxyy~J%`71~deVhh`XnCb-ZˌMIĆޗEv5ըRڋ۾RƏ늂[;wKPbm/*X=-X- 7Gc/@5yN=53/0զ]w:u}Duiac)n4O\A8)ń9ar*8ҳXlΚfYۣq kn7L߱Ӛ qh-Aa4rg9 mRT1\#-k(s]wG-.F|&oB^ǾzcGJ%+yΗ#:$;z)kDjr_ٌ3̓PaZiy3S=U^V8 {Y8.o!Mp7SmuxUtݰ c*TvB<"ZP%JV>}Q pIwL *MOyY<򀺒sdP(Gc袦jf4 ,\h`VןIx&hn^G`./x_vLhl.]\FKd(>| %&-)+az$4~J'/J6ƐVR=Xa4YE$Рd۲`h |(M]ViGV>7ɹ `#Sš7XcVd|3P8939 Âؔ|"WsInq{yn/7;ò+q7Ҁ}ɢW+T87SlCko*ަŽ1"Jݗ[v0DPx1 ;*Y~(VJƂVoA7zo uFEiã\ 3Yӹ h@,4EhP tB?fNuWV pY?P;ax8l+rlWn̤K e_?#UãU?D5|XPoRH x62zXqԿ165B]Om}{N)(JLG1L.:jOjru  [WN'fܦɝ6ZUߗ-zicv㷔R2H55Ȣp[i1B`jCL@`'iF: 9*E> FܸV ϐuΈoPH`0bw~(UH $B#hFn:ⅽ $}^A4 sj{]on24-*Y|Ʒv)ՃTӪ<ɁրQ!2I׋NVƭTL*V 1$_ͯd%j' 3n`<eU2) sgZ~Мd4/~G$ѯ2I2pzq&oO^Oi=Q`\t 1?LF"H葭g-xz7b߁@ZPzuZÍ =Pl_Sɵ *阴X_\]8]27eNSJ3Ϟ}޵m&lއˑ*ipxPXȠ?^ߍfU{H\12`8 oTiI$N0K8@d'D'L6-sMX+5-XT*?Վn6kn`,|5碣S=ԜY^N+v5 G,#A[3.q4+'slaiqvYߌoTH(GpS{<3gr T@J"PxmW[or/3%QMo>2oj/Oe/ǽvV}Ī?G(\n Uh)eOPeki0aθvCX_"jYL^y8f;jVEjS["hsg oOОBI.B/UֲRǏ reZW.UPIag:CZ'MP2ˤi U7$RODW]ys*۸Oh=h٣.%bZ8]a4y f/#%g7nWh*>[/I:Mitkvc@}?~5xb0qKJv*)( Fhbm vc[xƠG󤵃J;qkp(bǥKUeei\'sVM+r\r;J\yS0pI *o#~:_vQm6ӛr3|fR=8F AͯcY* ?05lF7Vis4ڊAWJ(}Xy(Y3$H^|.GrBi?c޺2d#[Φ؆} a<6їl"|7 wu~q'QȜtYK fyC)"cᨊihZ ͒cu7ڽc@E\#Y.P3ARqC7XM=SI z($6|`ݱU rt![Yq;kskcJNPC&Ǭ؟gEAž(Sd Ll?]@ywWLgAA-/9S#TL#ׂ0^L꣕i ]R؂𪡳E3vi] ˓r>5 A>0lwgR&ǜ Dg_UML䕀Tmլ/ZA~..V4I+4 +Z*.i'xA1d gcb`B)MJp2Z%t,~x]P{ϑ}o$2ȹ,΀!O@z(:n) NQ/(hՅ9>Ro}hnWfj "۰L猥 @{uG*I>!B4R ]B=c!AՀď`oJޯm ķ?x ŕ,T˂hߥb6֬tds b*^Ϸpb[9YyKmIMUl4JPXf0h2ztsml,k? /v66=eb_R })&s *o XHϞ2= 8.Sk"jhGը(Ern}݉N>eBSQ > ajߠx eB1pcDM+Vy8a]'1Uײ&ra7d+Kݒ5hHB>X!/cbёxD| 0gy9d 1=i$%+:T(&aR*o}+}6Cf K#fRᛧVOM[ DT $w5 )LI:य p9/%-> j, ڋe2oN}sBk(5}Hs|wgQ4TmDm5HӤ?\^R!A RX&ԝ&yr6[oPW/~Q8J* D_3^pf)J 2YE?tE<Ǎ|Lbx\jIGB']?D=5܈)n\dX'*UVM2@[/5:]]yYsv̈́HlƜhuc)TЭ0h)ƈp_L$<&(o婍70̘헴\o-}tA;›J'-z$W|R6wĸC>h#Ȯ&}/tBNp=E33O%-KEꗲO׻DLC#ƂQ)4 ؋`tဈvdX&7*OBB8um* 5y UE?6I B&֊`Mw%*L P;! A0>8Λ r1ꎷmaoruzK3p J,t,9'ǩܧNS"j\eX¾)5铻EXml}{J\UyH#Q*M&36S5Äwg!ƀbJaR6NJlC)TO!^ ~Ϸ=TZ.$jJh)󊕘$N6J$~cR`;z4cXaP'5e,Kt_xP[xQ b,Rm .$>  >A -'+qE;>fb\3 9_I'>Kq6eLdXbl/}+γU<)DfbQ{GoL`%ˁ]~d 1#TO}k@G:7|#K8&[smؐ6j*4 n<;\05%L.5.7ƙ_5}0# mJ[Kyim)Uyf* TN!05…翦@Hh- 0޴y5`6Fj eX%m ¤!LZ/տ R M~سBB*.E;Y, Lߩ c4-30?G'j.±Ӷ^* 9kȍ>׆ZN[/KIۏ3@B>z΅`Δꕙ k#Pt_+Jr ?#KV>#U^sת:<}OH~vRIQj"~!<ͫ_]9{I~7`o߽uSTڡ!<>Y 'pw>ڕ젧_>giҐmPO bk`b(נ |W2x,?)v ba+jX$ { =KYN{"Sq&wx-2S6{" Ox/AKQkA[{ܮ`U(%C?Vtv|bJ<}+Co&'X6W;~۰ÁT{eu+TB2ODM6C̕[*T- IPC/[@O*8jyC&B*b4TTbx B(^^ma c8!qz^s4v `$+JMm|:Bɇ~Mjd opޚAg#TV]-FkbBMQF :::H#xu{.8`γ/|#WZ'I*61iD'_']pÐms!V"?6#ޢ"=,OK,][ڥlO_~ZgIG J)XvoN5EoƭL=}eIqYBҢta5ֹ6ۢ"NgvO*W'>ru%wk>068DSXk3\J?DoR^S$6]a E,Uч1ztj@MEP߉MѫیM~ ì?%FǭnGM@W dmMKHDCU\YbS-3 [Dy_~e3#[1L*.p |Jj6t!&@V,t3AQ,JC1"CUaVD-@zj^dg8us{^nJdN~ NZZݺ%ͬX|x3w_=&b{E7z5}#soqY+L gtJ**Iv:M'_+#N͸so{P& vz8nʋY-lU2?.͖T  WX@X g9P6i>۩7\1Yu7孭QF#@UYԎdG[a?B$L4@ ]eF; jT]q4]r onGl)~޵9rh?Uoz)2M4 h~2!ŗ܀ƍ- NpA9؇Ν5B5tNS/\ z"7FLdٜݻܰOwչTVzCnca+ (ȁ]if{~/\.2YOH q¥7}g2㩺oο+uVdDFIKg>jJc^'ŧԮԡ =M̬:%-P)>b?K$e꯼| I @pYeWzt(5abg`&p5`ެ_ͤ:ڑ{_7vϋC> 䖣+gv YҵFɬ. <MċNf} vp!o]QIa߾ų/AB p׻1y=b~Ƙ ,+*K19q]/UlR" Ď~tT͒iAw7)+7B2<6 VK-",%T :jOA J .Wi( d-oE{nYoLmoRpdP%°⌄*0`j 3s;rQ@<.e"YGK梆 .+?Q>w}ŻܑUKC>bc(|)I}֎r$&a:$tUf.RA\m 446$Tޫ4i g*kѣW2{JR"'Sj?~iHL*a]bp/@-q~-bZqYsag->U"5TYI܊UBQ[f$ՒTX#$*e؇ɡZ=-Ͻ² L# >WB e۩aȳ?Q 4}N{ z1'!;6m|H\# wpLe]WX'a%! .xϜҍ42qdEmt+Hp$b$u.tWK?E&^[Tj!ӭ)yR%1K[u[Ydc}jڡrf[uq5Blmɫ)a2Ώjjb4*S,|EwX71D>o$\CCr+F`Wyh6WQ-E-Y:\ri6f]3-@DI_|W$zPѭ-+Rq/&KanPWRBN#488@K dtpk,-~=8fC.īt'\EL2~ 9&!WٮGO˲=G/[ܺXL* % 5+afnč֞a_\c*]3&z-]?+$"A:]4MZuB}lQC!#Iݿo+smj.<{/" -H74^^1]8u@@@UjViۏ1hvb囈Nţۼ~u΂^5^El{FIԳhvl(Z81:>%n74hmB@%[h4ó!& wou+ܐ,(*2axeckJr iUN# U&5|`RA.K<rϽ WQ~^2v*e K,jqwF3R\,3@nA֌2bfB=sm_Nep09gx %h3lmJ=5ĽE[UCs'u|6ji/-Ҟ+b]%q?wҦ[K*iK+ۤ.d~iHE9CƩEvFGx,Z.-gs|fjPF ȞwBY(&`RyJvP/ |0m݌V`CwUw o/$Lϐ\$. ؃ ϓS.u6Ib@Ո;rǵ,.9f%$N98޹ q1SiMtɽRQiUo؟y3i\t cnj.uױ~'C_"!tL1O̲ϖ8-+y)ӭɩ-k9!џ(Ԍ1ҴK M7`+`l 7Uo,!}DsR,9m`"d\e4T=_1}vlK-KŰ1cq= NV F (u; [y\Rߢ[!;%`s1qǥBta=\6M5m59b ڗ*5(8þFg->Q X5-AٟP&`][KM,nqtJݱg3 Tqov)N z2!& ښ["f>C9;`a;._afv+Q҈,+D.31G|%9lyoDVty">Lw3Y,p:Z3D'*oPB 7P܌MnPȥw X+J_{+JIH3H%d.[Zۙ5nڍG8-e}QMӉ)̢f\@vkڂˑ8JLg\=J ]AS6RR$szX*q!.&I+)0Pe,!Fຝ?SSCQ7M)Ǟ[LgM24 +0֑пYñ^]$sˉ~fWA&W XTB?ߞp x<9~T5LZR'jD1t'$f ?3>-pM}y!bzG"Z}(Hv,gT`iiԏ G#w jϗTlJߩxu -q'4IC)^˭y@\_i6%^#TadvuT U9y|-]zW{?L2FN%+`Al5hDOAcvC4ʗa+Od_iXו%NAuAVzNkV5TVY\v!2ۂ+#vIB k{_AHQd/iNX[?/=R,hd>vcO,4sL2rd¥X逑$)CjBe< F/F}DxGtd!O"Eʆ][N_iЙ:kZS;;'n PR+ˡKS_G5$Sjċ谍zqjm@f)Y.zQV\SZɎCYu^DQK#/ 3H6ANGAJ\\mXCvGcN b}1t9hɰz۟,f38vBnKbftH}LfggNYclBl{CtV5 ȯ/ZsSSV:K1:.^Ҍ:V9׷$V5;:κ(KdE_(#,`4D*D%c5yKM„_m !FYh}nS6s͹2z'qk\apwVA,CF*5^߲ !YN)1_s%:^_gtq=C5}tJRǗ+T, !pq354z醪U>| JǼR\s2$o-< Z!aiV,O=%1V̐\1]'@^YI[L;; tAf#drl31WWňrT]@#rY GI!c%` u;'H3{6qXPaN1F_exPuRRSm&z2GiA}@OJQi63Q.?hlLKqk ]0 2 H޾kp8xЙ0M8[ ^(-Y0uK5!O)7hۥXդs=:qdUZ^;oi$$-f)ƧHS0j1kPc>p^xIA&}b$`.0Rv9 ҬPS22dCkWi~-}qģh[<{kއOo+eE#s|xcK.*aKv0u_-D|gyOHF<ÜS -k{3c?_d:ʸ g{ʱAYjkNb4/V[Ykq) UlS6DNCN=n n(E(G,#z/cl|-6a,>s>Q~j-G=\]J1G3x5,R)ǡ,*03C5 "Ja 3gl9A^نf U&j9hsƲ~OJ|[g;I>~>~ܖw 38ԭ{`lVJ-ե[c:K.>9~\xƯ}*Ndv0pNgp2dx.)W)@3 sSeQI $Μf3=G)Z@Nݨ[(fȭdÍHJ Q}w HU%De%;KAܦD< M?֡ J?:oeۮ?h&% H5Ls7W&dO1{oV+~oꊈz7XԒB:H {9YELڪH*F]s:<uYo+7o%|$P:PkqG0C=Cu=CB}/֙%]`֣u\T "r7oQM Y-l!:Mw"01s?MX"KoC2!u80gO>wԷڭ|l|p.$LQ? `Oy^Yg"`Q ́H찐e(帗$GF,ĩrKD$ ; ,qoT TH.<̬c]*?厳vuł_ÙyTM-]n"d2 wsm|Vޤz#m ķd|jMWȾPoӇ;;aYWָ^Eٍ2fMxeò\eLc'OdiLCH`z8dgUh! Ya/?iW0 ^UzȂ%--{՚)f 4 H7pޭ)h0$.S:ʊ ]UxoJqJ]\!>l=0EP^C>Xf2+u[bUnCbCx%SJ:y\ݎȉdSGM(k f P)v' ȶ(a.=?W/upcD;8\E $p؏PǭtB|t/Kgɹ#m8\XսdRI+v)沘OTrVZ{J0V8r z({H˕b^dyE8&1SwN3P)LP x({;:G{,+M6jΛaY)ӛ܌Y7Z$?+T ܶhxNUb91 N{H>gW^tWp0wѤ}a8C+ڧL^fxvm_88(ZؿMp-(؁OyWf>{HRLo.*kAĬJK(bX*} 0kE2b ~1cPYg_IJ-xR3uKe ctu)kp<Xa<ַ"kҁXbg#ƥH:I:U4 5àm~P_4.lգx# ZѰ TVTXH 'qD`ך9.2-TI^y (E%RRz559ً$g Lj7فF0`HTr(]|N2tBT@ݍUX nG: WB pH2`(($όj`=ӗmV #{=7tm(^ G(c/AYw5![͝Xt};F:VPu J qU5*"2.{rA_q`sǧ"oMA3oj{AZ*:U6vl:s=eV֦_.ߜ椞q掼!x&=vG x0w cwQZ&,)s\)Ԃ#s.;(\?,h6u%*[Ր!cPZr- ',׾@ NDG­ ֕sҹ~a#'=t [QhTw`d_.v1HI7g R|21vU=FpdmdDw LTR^ͶIfwK;25 zC% 8+Oڼ^:jbHj]o_[";^1КYv%u7{ngVLxbjotT٦*(Șu7u~ө/@skgȦfFϪg'=d?4tO?\5)| ^̠br;a t W+`t"_GKGmdbDsp 2h0RB>^?b"}[KxK5^YT"aFل?`u]G |-YկjxڦY^V_PO{ ?)}-u5ZƦ}^lNXJb{[QP2du6P)JtS,Kе7-M0\_HXG l~-_g,t$Yt kGcReT\hE+s9u`"j;["0hBp4{5K)4qwQW/dXN|-' e 3UJVQ<ٳ)s2ߣ']wlޗPö9v?KvoJC8wI!OjTqlv/rՋ65,0U߿Ϝߤ>uVMFiס_Ǐ)7-CtW SnVН~hm㈗jY{4 ]6d|wrǬuDnlؒڀ?~K((DSE40wHQ|t^\9?= 4CL!Rfܠ~7p$4b~}l"`=ώ t+|)gLoS kDHO>а'~{g6|(z{zG)9<ꀏi$7,R.GmHi>e]]T2U f{.VQEovWg2\Հ5,aq3҂\ϗl̆"c_i@M߀FHt&LÕ<_|xҮ}:-B2 6s&{!؅,KdoFaG׊'U})"lC;|Tn^} w.6sbxBɿ,|Z@E."+Iol>.ߟgaP"#cյa@OU p:zp3{wg8&J |]Idb1X$xj)9eB™$|dcq܄^1!b-ߕ&*,O٥y$Jjۓf2Xط,]nop .+}[B`ȡL"SEK]pO1kÉ!ʱZ#fuI9nSlt$D"wb6=1~D| 4^?H,jo@Akئ5s=?`}3+QCn0p ˨N4 ք6*0nB;:U]r&H'hOxi=,Oqġ07q| ' [`# 5ҪhwοHgW ol`#w'PL7lP`@%`,yz2idN4.hG}M׃)9=x}_$7x:ߥvԣ_9{ nQA5eU*~ PSڿù(>IW[X KUf)wߌm%dCF\>rѵ÷8)] Bx [x+4+[ʹy b4:࣍:"xI$b-=bZ6O?j$.J܏bPiv^q9~s=CUWxʪcJ-S‘tf =R$W 5`Xt C$lpzHX_nc?.xqyj5V$. 5*d$dIApmT/" 3n@ke O; ,*Q:5 ;\Crw1BOcaR9oD|OuYԣ \rt2F;Ɋ\$] eypbB$-jl=Y\2v LBC2yq6i@& lWd~̈W =Ң\)4Rf+{M`Dt \hy̘.ُӦq\炣%66rD|͠sIBB!vBgL?ړ^Z; _}b3Nb2=mgtKݍaa8.RI Q3U_Ԓ!ڒȠs<)dUD6{y7|}$z1/@?*;ْOluq ;zZ"#;:a )cN>PTJP\XrvB8m]iH/+՞v~g.!$I#R8uϨ!t.x{`n5,Ͻ+.2J[j$ӽ!Ǩ#DvY_PJzR>(G0oL]VbeC.}pIKD0U m0j*z̮]3s^c M)wdAӐmNgbؤҞZnQ?JSYNg%,xT͕N ޭ:A>&>pސ Ͳ$)/ep%k˾>>gun_yI}PvC͐-9^F-muE!X #2<[CBQxF= %[ST~6d99m7X8V*RA }º938^`K%_P$!IK. +\^x "qq2[hX 7K>B /nf+0L4#3յo Zt8 s !![֫ylr8'G:+Drzf(W;+*@Tҽc\ .̳\ ^WDP"{؅=K(#F14 bQ'l ˌD%hkacur^M.~i3kz'< HdqhLkfl"լk1.q/*~!eT\l,l|N.AQ7/G DSu!}!^b*Dy~qרK?4>;T۬*HkDB4T{G(A:{ID094x>>hWA%de,=z>46A6?Uq4T:5Rt03kn-{_qWs$({OOJ#ޭՐ.ꪉl=%ٖ K~\vS`qIx:YHљܡ<8:/t^+4ZM~ Ac{x-X.+=RnR~eV 'b h1҃ Cto6v+Ȯ?4h\| ~L^dyG< gTwЦM*1E ۓ4_"ImɌ1͊ϼ ӳd2lכ/5v -Z[FO*'{3x8R Y|0SHr^[;Zxt\%6KL@^{~`Ld^d8lM5jg=>vC|g/y_ A|>V |kjg6KH& 4ځ²lg?䬚1!*:w#{X9wNd]o4^FLo=t&_wK EfNzbU H([hq W-NhbgDk=z;,ݐ!-]\Ap˛-kF]12xe8{arVL.qM[ZK3gޏx2}zsewQ>Ҵ۱dp u},ё omGx|ZYK9L"a L:{NsJpM҇N;;i.1=[]R!td*P &3kV cJJ/j8Dz32 f9.)~2sS=j3-9әbmdU>Op&u|>fJbhΗí!B ~.d棕{4;>*M}FoݵQt p~cz(i ڢ v鲍dj{[.C@_^a&[t'L&R@G}# wL]W ҫmkB%B2ns3BI=/n1qByج-]TYga˵ɧJ@Z6aotfgtX׀ HZ@;O/eBk9o ܴf7|ȢQ{R|Vi(U7̽k﷈;QoROʕ)J#=s`r-(S¹IV'>#>RHaM57FFΰib/_J,c2:xc*jV$ȥ̓= 6ߦ݈ct*hW_VC Z,eK1mF[hQkn;=ԛCCާ@mr5M8ڶ啓O]e<[(&Iu<{~Ua3٬+%`a9yXB!T`Fb*5tko1ط!/"m64i+ރcSÖ \)]8~bx_@YF5Rm #<=.ZVtӫVJD}u@zxAf#ي9ת]\;fŷhw,w (Hqܬ3VUx,@\hd'JDd7 &ϩIL /:5y)ZtmepZw/f\R}aќ! so7E]lY2 s>pc?N3j4bsq$C3DZ-VI~!Ks*}/[i$yKfȴY 88|Cx@f2KCs\hܦv\Ãb8!"vYʁۿ=@~C F  ԇ8)]X! { *IRM3̅:z% z6oIew<2..ŝgτ`6Z35~`YAș>bӮrZ$v@] uK:;!nfaq<'Ml{o[D| Mwt&`ykΗM 'eKT\:0G- ҕKQVrMYX~wv 4\3]lp!᮪t.#4h<$2q-0|3&(;䲭h[1RQnY!Ss苊J&*A+ƫMv ]a4DVezqEʼn7̠L*WVN TCޢLZ#{E>ZÈ(w pKbb8Ʉg?IHUbFet/ho.䁤Z.1+$oܠZs7~-j\qftU+,ZmjEsEH3'$~g1k p.ɠ(EIӍ%ke!xNjeCWj:/퐆~6{v.5bax<ш8>؄r4d'[ju(rb4*P|@.TxnkaZC`9ˈ89Ξ(faZ0bS'둫MoL7|yzSs(;?f$W2&Νq ;:(JC!4z?J!eEOb٠i }Qoh^M3:(Qq<`>Td{*R;J"^[e 0tNw;%U HиYj2.<#tu'8(4uF; d106, Q+ TyF z]9Zz!^8+Y5!i,y;mXO:bWC:'*} hNU7-ʷ x|u)|$Jf~ '@,&?B\iwͳܖ| #ɖ/wc\o !q>i@A]fňm䦾3Dk$goӭj > r,V8sQ8UX藁@FCX[``0&Q4{8},)6\f,; 0yYXF4?mի8tUxDN G᝴ExЇ2Qϩ*TD]`7Up4*'bӘ2s&B D%c{kЇمÍֶ.Ԧjfl.jsq,Y]64Lg̖ޮU'lPzYaA_q&^K^*g~n+mo^OWpo>oe)|J&tz6\2 c:~LU,\4ifnBA=7`uBU=W#?lbP܏}}z64/sMI]}|=Nwf3kҠDQ[񄝋T~znD{>/@GWS6Ik@-tlDmf&hvsph3s=/Ow!꾊֓$~.i AT7c&s_"8#D:YL4bWt»bف!k$Yї:OT#? od4~Vex{&l}|pJ;gmSx;]DAr"*?gm7fReI*%y.5E֞= *DZ 5Ϻl`Oڳs)h./mt7Ⱥ@WXb6Kz;E^*@ᶙ室sNo:? sj۝Tј~^c{im; ~}(f6VfyB)!5xAn i0)YCuޞ7 gnv yK<1)p3{{+‭Nlë 5r9 ŵ%f/Le,)2C5MouVǜ" OCZ]H'2LV\ LZ& b[ʡ@i^?jS q\((}# o[ry‚4۳_"lU>֨Bn'Sʯ0iZkZ4F. Nfo(/~j D V!zFt|LW$mfxRKŽI Z\>8D %yzp1VBDЛʛI$8u|LXKt|spk<>.\gd%zDC6pwKΐ \~:(B~(v9FCS]ġu&s6P"e 9qeE1s k( 1ס.\-^ܤ7stZ^!q@AV7%ݐ:b;رpoEN'AP{3 ੥<5+ (ȑh֞y9h;A,M{XxE+/|RME:[LlXb_}LBRY q#.-ģ?""jֻwm^?yț`4HGrKtD".xJڶM$ ly jQjT+uGwAR::/+TH9 mKm •)2 cn~)kSvKW+C17nYl䡉Q A#[0 WghO 5ʫMd-LLP#Mqc%(x ^f{hP{E--1zID]J! 2"ysD+33oO;鲀q,M 3Uy0є<9 3i*ˌ (fFQN@t|0UѰ &3}(w+Ғ>o/hT HPV|lV2v!|JVǯtPd]l6psmg(礀Oy X37l Q9\|5r$@"sԛ*:~mZamz S0=[ ҏ)Ѳ6ff bOVt&:^ 89M2~cki8pB0ܔaEj3w7Bjnjݨ/jp,iA}a_XHԸ^G{QVTVbD"!T&P[E"?RH!~['OsG(\C(=V(kr=~:Ip8S ozA+XH H5$ѭSknU ՖJ#oaAsDòmύE;HW;-?G}up{9Իw*c&h*CB-r~I{=RKF*ڵ~ w q`{V BKc,%F)|U_KZQ㥡 V?HѹT a=75mn}+AvafK~iIK>D" ^/ίaG$ {Z^|5])\N [CC[@R z?/ XDEok1OM yM) a~c&_:g&^|ݞIk~j>I$,}eo#u=[~9s1PKS|)5ū8\*6-<:B,^*\c1wY( iXxeh)_Y6L8JWtbdaMݪw~η2smleсH-<@HcL> QiDq3 4xχt[6Da3/T-ǡ==d7ުNcyſsJj<:Bmn5omaa9=vQtLF|dpaHnmM,Ka*SyQpv_&k_QЫzj FioS+`fMP%ygiԱQ0+Rs|9u1$&]BsaJJVliCncVMӗ?54^gF.^\­* esבIeo7'Jf`K#@&CJ bZoSc/$J7`p c>IީuQU~L^ҀJ_ѽSUO7e;E͆z͹j \[>$.01uښ!%3YhCRE8ەg)nA:l eJ5P+9\:kN/"ŘAp,"~^VL{@M ƅ?O6* ;oQ.MOAYUNQlKK(W{27ZmiEWtQ.F9r}`E+2íA@X^VSTaV /Y+ێІo3d32؃.?v:wwU9𑨼u㨝 q 5%5B~zjӱu J; r9FP^VZh9F8l#i6 Lig>G^ ٩l҂K2"6Qiud>M&)pTsUR }'(:Mʼ+Jt];#S \WB^NID4ksB-7c+1_睛"8Oqרuk}d ZJ/-܌WͰȹyIo`يAi$+|E.,y-D֔oiy MpvZVҐo}:y!:/^-jeeDggͷƔ8Qʉ6x,跣Ĝ1g1Yğ .sjݔ_IƏaWjЏكҡc?4oóAx{["Qf*W;u8 j"B?Tz0Xlfx}bK`ԅZuz{C< :K2UbB10`4aRե7D팁)f(ץBeUĉ\v—Lpɳu ނ>H̩[Gsfb-rtoyޛEרc4g]Zαcj̠h7_̪? s?NӕC> Cd_ ᣂry@Nf6 \M'fq73odG#9lBl]8ӊR]`Z^2>88z˿![6\鞻$2? :Hi&Ga!RY "B$uyŇzLijV#n!] _& l\:T$?QH1w sk}itdTWS7lo3 - LH4 Pv@RքT]ϾiI-_\?e&@Z=F'1@k(Gh5\5IaNQ 4' Ky*"ֿ"{sl{O/W!pnee_W/b5̪Fh"R N"9x"KgK?(6VG6O,(= &xa'GSLYb>9EA'/KJ70$QzHIIID*A~>{zM:yHq@"L5d5fF=$A0O81 ٸ)fsyK$N~/?kF0SS UD'̜)=@ 0޲fDZ|qyoN`/˗B/ɤ`kҋt'e_s2=8Ga9IұKG r yD*F >+:[dzY}{K*hcLr~ăc6\|P,ۺܬD2l=|1 -syŜ9IZ\[\x~O?<|{鰔"Qk$kOpP fOuoit2 /;ߧu< xJ]~Q̵^%(9 sdM)D~ryptl І{!Laթm8&W[yTܕ8نZW:\;5ujtO~+3! M{RK+~\, 0 H^w?ܝbix tҟQRjvmK\TrVc$XhنH0cty!@(d5 »MKCTu|0?\D'y O"]t"5? Y3?5!3!QEd6e%,cR޸0ͭQM,FeJ}_Ws4cϑ$|*yNI%{`^’e1d-Ngc*2 I #+h9t? P@@ڀ[}GI6h_ٸ*ҩZuߘ F4*`'x4d g,qza8K>AG!B~RR6Iza\>;>u>,(?#rg*j *d5#3jJIe` VL[& 鑗"NuP:h-D;‰&y|8"#Ջ#}_2l5RGOC@ Vt= +96 oh{CeݰsE hG ce(x+01LgD2$PZYvmQI`?$$+jkժHq*{n{>"qO ͪ|?Ja/5[,o67H*D?Kڝ% t.Byޝ1숒ȝ:ɓhPҧ[;熊+.j&Wi;}&PcނH"j#yƂ FdrybEc0F^\/@qj{ə"8U?qc@rpΧ`4ay{4=U1P)S#c/|vNޗ3Y͂hٓ96lMcMǣ+]xح7+'Mt3א/==A0dԘTFX3&Y^:)Ns#<)a~F-Uw#C5s_kO+tɒMn wS~HAo&F ='hGw> !ER Ievmli8K2C}`u\љpeGȽ;)ԇR<dK =XfqD֌1Vkm7~K߯oҢc1 JUiRacd>hOQoeޠV*r+0{\!|isnv+nrWۧ ˧E?*+I_CڿAcw}Qȯѽ mh{xA:xwyi%#[e1! 4Zq'H&"ʷ|Լ=G4jeJpзusY$@ۧRy#vIC1([,"AhDG,;7K t:xqMrDQ&~pGΖ8Q@VTuxx"fA]ț;~őu(שm^.hoe@T~6fLIf)epxJ_%-jQl׼j&l+4޽ĂW*$J.~x$Q?1 Qׁy3e$Ck/Tb`~Xy6d*N__Є#@IP5wgRfpX;m _VAͷ * K1̋|˵RZY k?/Ks|rt\;7KGAa³G몟(c. +@ta_l:[^7Ȃ*!mL{#So^-;-ѝRf~PP~vcs" \jh} /?ZCZDAtoQdA.mR{DžB*tK6'.?YS-I,CXu߲룔CNMzxo-2EfϬΘe[ :.1 k0+rh50RH:N0|Y I \]]o3âѕ/V6&_0ƽzg 3j~R51|06vXQ?'PY>ϟCwq >Ң(ȟ0y+ldEr!VqAif;9&X_$&S% _YSnf5zNjjrTUR='KC)Iʖϊn \Is:]u"m-hĔ/d,66ZLE0* :+%i cتGNh6m Q~i`"/r^DCӮ8^ӁZ TF$lkO֙4Vy u/4 mު6Ϋ|Yocv[q oU1"smjgTDZHf>*%} 'Ňj d#P4"[v䕱+g*m>(u5-?75T=0u%ڥv/ U?p", Ft_dPfFI'kE]1ReQ񳼍*`$_ácPrۚYq_ /qPhΆ:]gD– W0A9?HuGkh[5Q y- _t,c < \"* 6>/Ӛ['+KgR^J{<&'.x _9A3уAmxf\"ikl**Ğ V#S)Ҹq*? љ[cֵK E쨙<&g،i)'fĵFgU!c&t\h2B3D:cT5SCS"疵Ĝ UpBGɘ.Qa>Ŗ࿒ ;v#4@mEN9N&#ihզoR<UVl9 Q;U[&Yeow|5ر`#&EheI*[ci3?mOmUoab6tzMƅ0/\X씎 [xڎͫOo|Cz3j},rdwR$yᩯk jKW.w;ImpvzIXŪ tT)6lt:BQ,סwTO凌[yl} )R^eqw>1|KVDq$ڕɈ'T~lQ  Nm q[ԑcD"96Ifz %~}F(hZ]D6@cgpX !V6a+Qoe\'?Қ^\'/Ƙ(AOFnì?>-zD@ eÏI+y]uNyj}fUK#8u0_򤔼:IG~A ITUW9 1. `E7:2aX ֧,(KeE9 eWim=PM*Xo?R~MSMAEl|>ɰԭa?>5ˇp1LO&_L\4X9 !Se&>-m;;5 Ѳ`Špl7hoB̷꾮:6u*gc䝰,AQ7~SS21qYމ䋵SAjl]$G4w~)i Mj -| = S1m:8<'al]I̋Ur~%: ])҃C@&I%)@z.\_puM%X\vb,-o!x}+Nqz0Pzrg1VڱxpX1dDtי$=~ˋ*FP|XgF)"0Kf!z@0l'5ႀwL2$ǵu* Ύ)hTX/Tc%Le\T{7J R56wrmLRu4$}PZH3P4ee߹̥ bԿo%=[![5%m3 >NQۥG?\DNIjr$}G,@[48#9mlF!Grkf(mP ^[k~YNc Қva`I]~ omeu>xrW` 691w`;.a$'ٲ< &/Eѣ JV99ݙ'D싄^Eji.ݓC$c G./w~BԢ@ HLo#Vl}0;}W "`1ƯUILW*|Gk57V7G άNAtTBl>(]1[gsԒсA %q$(L>6$\/&VT*_%MgEk|VYJִ, {>x0qD{)GI}YՁM :iۣR%]Ut0_GcSGgBoa 4[ۊ}Z;[|fZz'5'_;%LpY8SJ,?kh= =lzҞ%Hc-i%ڞ?!R=0xӗ}DZLB9oStY:"O"qlRG`H+/Yz>G*/ݏF*2rO,v9@LgFBYk+NZ-7}!;] Aw Ykńع'ϦU+sp1X%%%ФO<֟ 1e}w3TfSD.Qbl{S1MYn78yU19lщ΂76sy4Z*p0&q'JLG;$c3k|e%= 29Z:'-_Dӎ)DD<Qe0JN.&JqT&5Q,b0#XLyǪ<B=$nGTk92QbWk D9Fjps%IO筎f sQ&>GDY)GW !˝QgF0pꑤl0J:uy=PK TZU.r,0ТZ1h#\x:^;Yp٣Ʃ~2t'el&Z{ORH](ATۉnϩj5=V^by4cSҴhC7-+eSazJEj*,Θ-J&r^pkاbh) ųJ30J.n 5H_B 4'Q2+[X wܾύE\t=NfYx6ɉ*/qBM%Js&O~1< dL^sSE@:㽙8UrzW֝E4`༞0tTgq^MuE[e ֝B4QZdM]EZ>#X u^caԌt#L/Xb#Qxjӳ,aAa&AOTћK2^:4}t=cD_h=7 2GXȉ3^dFD0`:ݮDͳӚIDZJͯuaFA\no6KKBq.y\=I"E;1n)[buqӱˊ nu¦.E˜kTpvj3"5ZɜKWiB`OI+p>5ذlj} ߍQ@YZkernlab/data/reuters.rda0000644000175100001440000003773114366221260015015 0ustar hornikusers}ێI]O6 YwrA4P.69.{fSqbzM7H/X1s,,0Z ;@TWeF~s&/W_~ŗ_?u_?_?j+W2+MӺU,i4t5T>ZvOKyzѣGij7yϮ2M^ҬY]glsN~fY\.]=j&O/,ҷ᯻EVV6o?U+niH]DRӼ&zmQx?ڦɷp?Ү',/i^O>:MU;ˬ\pӪlWzsIkRӮm"\>(ns'˖sCJd*]k1|FiYԉ[ҥi#ק鲬FEo?AhV{Yt0h޵^"r ӟj6p(/NXcʧ?jJ>SQ]Cᾭbv\Zq{ |󹣴e,fa{jWrDOv>M9?[b-ߤ"=d7ӪiU0H84넛3WZeNp%*^dWeI+6ԔP;{5͡en)Y!((2[Fns7WHVٕ_FHn˃5wv )*A/2{8?ϛ!M37-rQBL%}\C8ծ x5uc޾g"$"q7 "Fpbk) v+-"bXFYүy##6||*ᗗ]u2L3JJ7j2\S>li?zL2f@;OG7+X`p6AM( M_H ƱUR=mM=m;1mu(O6+ފa=8^^ό}ƉG*azeBU_-XT*3;~?N^( WVAA^zoՑO Ѡ[2H&}{[QNňWBÇv[$FpnTEYWE8g('" -ܑ;7nm]rzI߹Ϣ1* yFϠ{E]:[0wVV: :9stBnV?grCôwMV|+f4]"fA3$l- 6Uh;{q:x84Sۀk1GALEU6W,C=A'QhdQ)&N6fMHg}TRB5%&"% gH_`%n]jvYmX"QS*Q^gٳ>/'-w}dg tMॊ'ǃMa|vj69xʈe)wC\BD,2 @-bFl`"7Y.:%X2S共 ̀QAKSx bAKEN6L,GSߺU}T+歚 UI?FL7ӢZ7“?8y4:IS'# 3Üߎ Ok;y2fHCCx]bS-'b%!g 0J Som22-?Su]Ȇ|Su_Ǐ=f]|\ԋ(q񕄶&/I*C2Vεu^m*q+w_;q#YM5h͟7Q_>ؚrۉ31G&\z#]wHN`KQPVeye!ciV'5&be{`1:Ai'ԿITUz/X%5PM\uIUfS(ZkKlYIX\*c.zQH G+}|28;K>*xF%n6͸ eؘB~ f҆yl*Wk ߹"~5×~RP/zIU]@M; RhPJfܶ*#q6/F7(6&#~!^CjAWnԨ̝Xj>#JCBoYF! *J^4k﹍qqM&, UG)EZ%Ab,3qWn|m "b;ى={q2Kx7`8ɬn- !-SO` /P7ݷm9Y+c"7r!wJwwX>(Įq M$-}j늒p<]-y '% 6E&jȷ`ARFӛoF_bDjdZpAxŰ GUɓaʳ`=RF9KC'5 U?c 抰6(gDK-W6sTm`P؍Y-\L]9wugQ޳l:tXW[`h+R'h-r%4*|x$ܵS`CܭkME?Yv ||JQ⏥HGD_'B =۪o#bs8>N'Vem1MMOBNߓ] W %$jPl8PVzp/Lu $@bZ{o˲'~,DY 4>ԶI*rd/4o'>!(ײ *>un%V 4ظY8}ѵ3Y7r3q ^ۼu4}0ZJ| 1&8:>n(i; o|h+ չQLz4u BLKR6AÓѩsos&5w.d]zjF; %3v9}}obr:OD}7s7qf(!ƕ +Ѳino u_P{.@: (&QȦ5#\&RAvĐ-|L0>0( &.*ޚd~{8#-cr;QՅ|qwVMrKMLZ~-j"XC#}&;7n:8|xtd{ۇaƯHtgdT7ѓr8-+!'Hܵ30}x n"ecE[@1pMz΋ޤB]oMjF_ZCXOiO(G46'HCIP dxC`n$&D!;Zق0 "TY^rD I  @nҫiRV6P$[FN?X@1|@"=S9{~SbOSUfw; s  2Àcߠmk$J/~up:baՌj1M^e{KM{ίoTŰqOZAسJl =/LD{~1@S͘jTe%?pD ܳUz|Q[8"澺E[1dt5V[SoP!DKτCQ6;?(Y(/ms >+`2$o+<ߠ O?SY0X:F. / ψ*oF `فgW_b 53W!u6~%X{/rX#P&}ɿQ&*}\擼U {;_r`o"Di_2͠aYA(寉^ H;Nvv;ٶ</<@yz~4zrdt"ڢR/vTPij:#99qV ,u!b8ߩ`B|hWvJuA`rӃ1q}wYN\L?xAt<8[Njl)d<}zr|٧nBTTH|=UWWAL0@"=_}ic:oO.x1L5&[Z)l%:8tmB(! kϾ`tАQPC +\"WH˸" +-J'^$/XkH6.@ߑ@}QgbOMOOp;=]i*uT&hV#3swKɧ}fS#Enxn7E2@v#J>C0VVsH$BIxNB;<>5fm=™CF>\pua_ފ]@ߕb9fUSOM2<2wuSD]]uVRxs>曬ezjw&zԚ5NUBPC+Y#=Z7=Jj+`%ZM_ŲL4cb:)aS:yeFq_EIw^+sk`[F[}PQ8L]G˶qGqgKLKls(P( 8,ۨ8CwgqS>?H\({_ ղP2G~(r aVD>qn:zG Ѵ^n h_s6ᑨ{_;ǽ7l1(7eB2[[~2VK'邡su7&GC} %-&H;ڙm퓞PUBGWȞϝ Y7/:֦ߨo0le7 d̋>OHVx0c_~`9ǪA^Ar[YBPL"HDV)|vyE-L:e<|jyxlǹdPz~%W߫Πqz5hXein"N$VYq :Q-(DlY~y6ĐW!2\IQ˰JuΊycglG6{$*L?3G,ڠП(#5mC.' }trPʛˋ3e ~i+qVM1 :]ZW2F5wn~y17X q{@U-YBB;|_?_Ys+yZ}3s>2#Xl;Hes.7a+%`,-6'9@ 3Ϛaӗ__{˼t1^AɈxk0o $||֚&mPM^ˋkE xl$}p,Zj7OTMOW,1ڼ+B )H!)ث'F T2#l;'q(1,V@M`BҦB'lloP U${,>J,M Жǜyd] 둴!OvfB\l]ͼ \Yo :> {T2YeM<*TakdnIz0Ssy/U d8@\Oiߖ);1W?ַN0gHllvp1/"!ʋR=OᙨB`ίG%JL( "'G[ΞڿWWn悧F3Ŕ%XOZ~uh/;DIyz/[BD}xVgխM`EU7D'4Y/9>l޽84Bׁ ._wPa"26-ӵe Q4V5Y4hS0U&|=²G9eE.}, ++8~XLxD"ZiR"(c-ނ9n6;&jW i[ [汿E[b^_Ӯl%c&v%(0)|~l@(O(g4p4t5Q)gm /k(eHv?.,FF<2Y7Ow7EV׺šeVT}bF G"Cfb7{HNd%ڹuL}H3 lcu5JŀܿPsn!B^Ԑ9PgBNJPht) p ]U≊0)L឵1Yҟ; ZA߹%VJ%z,:3JHy̠0(QP;1b/5R6WE0չX2*kWԋ [u#q +bF:cڲ[Lj?2bM? ڈeMK%[zj/m9YIZ=vB ŤH4vaFb 7~Jlvl2O:sivM;4|5x_*5 lTˋ&,zY֐r{g|@ 0,cgEu (#8<q(:Z$ކ.ƺBZNw8&bREgMB)A멅i݉6Nܻjûu^6VGtP `Ӱj= 5V[-殦m}CKncBwgn{ߚ^v__=പECy3&hT0F}]e}^Y2JH6 $*aV ɇ zjaVZbxD]1MIΰRj#= >F)_+C [v>EV#LU}\GQf7ycGY7,Ѹ@^v5X;Vc \cHV֪9 Q;h`|:cf|{00B.p-pw[W Q5f]oyN[ ޕDM>NECE!-Bq^MMoAkkN:5UA Gpk"];pZxt_h^^;elG!HE>=-KvnBn&LfSNn(OF!PEދ5]-;-$oHoO4''"m6ݑNR.s~ߞ"tAEG@3CMlnE,~6e[^"OLN CD".tEe~hpsԣcoi ~NIS,_}-r/2PgClP2MwdSׅO:j%-Ml;ij(Z QOFA譆 >5zGygN.׎NՔOF6CxR,V[`s2 EVQ,.` T]g5LY*њ}M][&К*#NCתG$/ervyȢaA-U Llgf>l|M"?wkC}Bo\F!a1  n#YLQ97.i$YM; ӥac]wGHN|XOt!L%m E(ߞ}62}B;wgz#I%Ԉ9Tr"sѡU!{8MyO^|= l:Gz^V舓oF7Z*IR~ h6"B!lH kzF>c饡<[X}AӇ)FMoI{=P9?~h".]6kQyH{z'ìuC}O=p?+|vg(-{Iv|WEn߯7fЕ9Dū;sA(@[m)?:=p2j0B>,E3m\p sA}(]-͹1:ro)̃1;D (.lUt>V*z+cl2늽O'YBF%-)O]%h;nsϯ|6}68ҘWfO0v.lkhD8?"yS'޿=.!yI/M(`=!,/k&oVlJ/͋lbη|Q֎*Ly6OX4oq@4hb=Eej+e,H\?ʤ¥] ut 69#I荌V2ц[6A2%x*-]b{ܒowОY3I`l0e(1 ѡ,\ՙZX:l@KbSuf=-xhbVAH h7lF.Dd܍GWfF9D%e6h@<"r-xy-&̚Fӫifk}FY]swMiHeTuPGv_u)^#伏gRkxWBזo8t~c˗ r9vs}"v굽WeF-3޷;&Fx^zteS-tæצ[=v}ڄ[3{j)\ҷ'ZV-ܵ$> =Yo`YmEj;s 3u/ߟŕ3@,UGsLhq(}4{ײR2{U23]V A͎0 @{a%fTL GG׊)EA~l0Tm-Ԟ \ǯʊULXn:Z>:ߎ9Cԅr {dĉ]azV,-oi4P`+ؚ(vF޸?>*:t#^)2F\fMont5yj}TX ~KY*{mts_M~fP-04$- b cM`zl5}aE(]!%c#4cIeyL5ÁEeC}\~o 㪌0!'n:&J?j_"'ںjIKAr 뺒gԙ~=6 l!H}5&:ې=UXE6pVx-mxHZMrke7r2'T};m7G5"BᩉB@T6XVnhźk;y-bpv^9^ekB3b"8Gϗf=ˏ躑6rСTD%{їATJs{iYqҋe9ho޽v$N߼yL~c=<%:_:|ŏq4V.q"1 ۤfձポ^A ɶX֣b}FB9 qQwqig@ӾO9fQ"6T$_)jATn_3CcAQ CStbxwXO1k(@L~l>a]66+'̉8fma}w *@{X+M# Hc㯨=˖L@ VI? " \/;e* %.5tG=ղ#وt!yWF񩏟pQUwA185X0G#@wHːiYw2ϫ]9EGWߟ[_l+f\ ]yi?b@G78;tg_byIy(d%,%Zq_0-Wk,8{xٺOu_J~~}`3G Y+ߕbvvvm߅`FZ@zc|ZCڎM$w{*ϵ&Ck|kmD+4*Ivu#Vrݬh>ȚtGM+7ҏ73[@ Xv%Jd`Yi^LVvl ,OF‹/GQU9}*d;UЋ- Jot9pqFV2gb $Z+o&a2Wk_y^cʎyTPA}Zvm Ckj`'S 3M16?۵`:]OeȄ@ j&Q^/޸T4b%R0SE^Y'u+O"άo;,r5.I$؏z"/iv({t*lddr20Em640fƦX\'XeQ|zl#bNgcQKd@k@YIlPYLc/·gX\{ÊhZxO\A'p_`b;m C6H&s( o[|OT/%# $%~k1 cm hC4),f̐ZDè!.':Z%&F!;óN|N`hMT iM:ɒu=w!>ZEAX~TMNvӬ vvVwM:hB7PڞXz }ZRmﭧ4 o\Ft- *FҚ[0-u m6I\gsH7iOxl1uO7i*UYt\4y0E;=r6ӿN3? ҋe7W9 S+&CR~*$qƘܪymzMZ[X%.i,ýkݏf2=H6멶;CvԙN2ðJhzK_._HQ8,m׸RI_3ҝu(Yy! ؼ  7 j";T?)=&_U^Z3 9HX~-99&V={ HU(P)M6]*ho)68GanscߕECO^үrz|[#t3$[of1ȞGvVa/\Az7??H[>ͦ*w\svj07眄E(Lt]i7AJO¡Qc:dj~c &x?l"l_ ϯ??_qZ7~l'oɴ(\\A<_$k Pkernlab/data/spirals.rda0000644000175100001440000001115414366221260014770 0ustar hornikusers]xi8n dl!)eP[JQI%)PJ E$Dَ`XfA sϹs]>ױ67!%~Ew GXBXK=ݼκ{/[4{`.V,ot9Z/SX=cmAGVvˇ$O*ڞƪ_Iʖc[hC,t~K‘Wa?q辣Lu} @Nfx=TG3R$pؿGНӖh4^9sv UXrs_k$}WMlf~-g js:XE`R;G >WotrTYbT˰GBe⌗-Nn|dqNͯVm ԭPK_{(h }EJK@; }v"m;Y96XC_܄Tw"2Fc3c!&X^q̽%k  }tVhcɾ2jIٮ,0BMP ~3$`( _}g*ph?gV?GZUe93\OihIn4$pŷhHyQHC%J RYoO)H ?4hYtv ݼuc]& (}jf{y+],)5E1ԉCvÝb+0,!6}엝>5!'`zύX )o@_[p N}>T->KFV,,gZ7ۭpV=Qcq|EݣE eli7Tj1˅uH+a>ˀad5u 5߬/qt󿅄phlwPʆoK !u2U>'#]V҇~mr^nuRBvpbe喇C$۬2^xchuUd.>c:޻.4!hd" =|{Ox ~޲w=U *efLR5+?|u Q1YlHf.y,'O{x+VF@cyfz;Ի}ݾdo %D֏',`2C|e8u$FPeFiw3tsi&H% gN4eԲ5nx:wMwðT6? u8zNF2qa0pyPcj;'CO\u 7bZɔQb tz9ZKD^\DyI>҇]i2HD$= 1^ v,r™OвM8HeQ/yai4z'e*9vH&=>` &'9߰]Cm^%vfHHƹbɶyMQ - ަ4,,ֳ,%oa|s>#8u2i1.[QhdT;5h(9J{kjW yW-d ]~kSN֤e)]O"$׺ٌ oVMt^_݉3ߚ9Lp i0%f_I+cd|Dc+;>N @nR.n+sgCı"1(wYW{5Tdō8 Q>D? u=+Ɖ[ԅ\};j3_BGva; JӋp| &fFPH$W&eTxO>Q<uT@! WCOלLYChx/GHa)~iս=WྊTb :GMC3g"Uy|LFt`Nѽ"j4JT:9 kmO R疸@7kYkx+dߕrCK%$Z;@tZԙ]0"oTWyF[hya^ |Tf{ vٱA&&{ Q@M2-[OܛjRAs%&!XrK$AW-=zNDf°c'ԿC3\:8=1 o>/ecOȪRkE4>[?MW{1/bMWW:ir}7ؑSf|گuR iYD35N&Dx(.~P[ P'f> D8ء'ǕGfȳ Ői~Vu781P"0H2~,Й~<*<*Yr+YGF"]>".Uu!fvX_doEi "!>s߱U,-dՅX.\n-zJ i Qu}WK/vνz ˏ3Sl5c;- `-VZ]`l}Ѝ0g$io:ܐͺ6gc s[2TIkpt=E3(Cېyx]j$ w݌Eⲵ~*±j8hP/gce 6A_n8RLǝ|qTKzǝ|,!f@^p I Wx|t.>护@.E0]wAEoȪ!yJ5 ڌ<ǃ:qioϫVXUJ3ھYSFn)=>h VaI1Iُ#2jBٿpTGRҽSKC[=lyw$ϑ!E'qh(9Y8b X yFR=:G](WO.p۲ZSWbsӊLd6r(: ̰{򢾞5̆-AnkUh`)ڈJ57G MKoGo ?{k H൰c/<(Ұ/-ք2g63i-%06WR 󶏄1Z]26K"+e~J#qq'.(ONH_W B#C źvtDNcYi!~u NEe:a Њ]F߉[0zl:)|=FIXkB`re۱fX?}ciC(X\&L[8ñlV%@/>up>Y/Ytc.mcw,-HqV>8 L0p8d3 Y  Y0j^]̭A99z*Yz~jH"%(7*SvW_'kXd~xhZR*׌ø_[z\U`|Qҷ lM>zɬAq;9WäS'hF߷Z2,MLx¾HH˯7<섓·)b, ,LmF{mu #,(o|l#+axx[𘡎UJU]u~nSŮ6_[Q tWZCZ3'D݃p#WɣA*CGF[<a jkernlab/man/0000755000175100001440000000000014366217164012473 5ustar hornikuserskernlab/man/kfa.Rd0000644000175100001440000001115012117362655013517 0ustar hornikusers\name{kfa} \alias{kfa} \alias{kfa,formula-method} \alias{kfa,matrix-method} \alias{show,kfa-method} \alias{coef,kfa-method} \title{Kernel Feature Analysis} \description{ The Kernel Feature Analysis algorithm is an algorithm for extracting structure from possibly high-dimensional data sets. Similar to \code{kpca} a new basis for the data is found. The data can then be projected on the new basis. } \usage{ \S4method{kfa}{formula}(x, data = NULL, na.action = na.omit, ...) \S4method{kfa}{matrix}(x, kernel = "rbfdot", kpar = list(sigma = 0.1), features = 0, subset = 59, normalize = TRUE, na.action = na.omit) } \arguments{ \item{x}{ The data matrix indexed by row or a formula describing the model. Note, that an intercept is always included, whether given in the formula or not.} \item{data}{an optional data frame containing the variables in the model (when using a formula).} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes an inner product in feature space between two vector arguments. \pkg{kernlab} provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{features}{Number of features (principal components) to return. (default: 0 , all)} \item{subset}{the number of features sampled (used) from the data set} \item{normalize}{normalize the feature selected (default: TRUE)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{ additional parameters} } \details{ Kernel Feature analysis is similar to Kernel PCA, but instead of extracting eigenvectors of the training dataset in feature space, it approximates the eigenvectors by selecting training patterns which are good basis vectors for the training set. It works by choosing a fixed size subset of the data set and scaling it to unit length (under the kernel). It then chooses the features that maximize the value of the inner product (kernel function) with the rest of the patterns. } \value{ \code{kfa} returns an object of class \code{kfa} containing the features selected by the algorithm. \item{xmatrix}{contains the features selected} \item{alpha}{contains the sparse alpha vector} The \code{predict} function can be used to embed new data points into to the selected feature base. } \references{Alex J. Smola, Olvi L. Mangasarian and Bernhard Schoelkopf\cr \emph{Sparse Kernel Feature Analysis}\cr Data Mining Institute Technical Report 99-04, October 1999\cr \url{ftp://ftp.cs.wisc.edu/pub/dmi/tech-reports/99-04.ps} } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{kpca}}, \code{\link{kfa-class}}} \examples{ data(promotergene) f <- kfa(~.,data=promotergene,features=2,kernel="rbfdot", kpar=list(sigma=0.01)) plot(predict(f,promotergene),col=as.numeric(promotergene[,1])) } \keyword{cluster} kernlab/man/ksvm.Rd0000644000175100001440000003646514366221170013750 0ustar hornikusers\name{ksvm} \alias{ksvm} \alias{ksvm,formula-method} \alias{ksvm,vector-method} \alias{ksvm,matrix-method} \alias{ksvm,kernelMatrix-method} \alias{ksvm,list-method} \alias{show,ksvm-method} \alias{coef,ksvm-method} \title{Support Vector Machines} \description{ Support Vector Machines are an excellent tool for classification, novelty detection, and regression. \code{ksvm} supports the well known C-svc, nu-svc, (classification) one-class-svc (novelty) eps-svr, nu-svr (regression) formulations along with native multi-class classification formulations and the bound-constraint SVM formulations.\cr \code{ksvm} also supports class-probabilities output and confidence intervals for regression. } \usage{ \S4method{ksvm}{formula}(x, data = NULL, ..., subset, na.action = na.omit, scaled = TRUE) \S4method{ksvm}{vector}(x, ...) \S4method{ksvm}{matrix}(x, y = NULL, scaled = TRUE, type = NULL, kernel ="rbfdot", kpar = "automatic", C = 1, nu = 0.2, epsilon = 0.1, prob.model = FALSE, class.weights = NULL, cross = 0, fit = TRUE, cache = 40, tol = 0.001, shrinking = TRUE, ..., subset, na.action = na.omit) \S4method{ksvm}{kernelMatrix}(x, y = NULL, type = NULL, C = 1, nu = 0.2, epsilon = 0.1, prob.model = FALSE, class.weights = NULL, cross = 0, fit = TRUE, cache = 40, tol = 0.001, shrinking = TRUE, ...) \S4method{ksvm}{list}(x, y = NULL, type = NULL, kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), C = 1, nu = 0.2, epsilon = 0.1, prob.model = FALSE, class.weights = NULL, cross = 0, fit = TRUE, cache = 40, tol = 0.001, shrinking = TRUE, ..., na.action = na.omit) } \arguments{ \item{x}{a symbolic description of the model to be fit. When not using a formula x can be a matrix or vector containing the training data or a kernel matrix of class \code{kernelMatrix} of the training data or a list of character vectors (for use with the string kernel). Note, that the intercept is always excluded, whether given in the formula or not.} \item{data}{an optional data frame containing the training data, when using a formula. By default the data is taken from the environment which `ksvm' is called from.} \item{y}{a response vector with one label for each row/component of \code{x}. Can be either a factor (for classification tasks) or a numeric vector (for regression).} \item{scaled}{A logical vector indicating the variables to be scaled. If \code{scaled} is of length 1, the value is recycled as many times as needed and all non-binary variables are scaled. Per default, data are scaled internally (both \code{x} and \code{y} variables) to zero mean and unit variance. The center and scale values are returned and used for later predictions.} \item{type}{\code{ksvm} can be used for classification , for regression, or for novelty detection. Depending on whether \code{y} is a factor or not, the default setting for \code{type} is \code{C-svc} or \code{eps-svr}, respectively, but can be overwritten by setting an explicit value.\cr Valid options are: \itemize{ \item \code{C-svc} C classification \item \code{nu-svc} nu classification \item \code{C-bsvc} bound-constraint svm classification \item \code{spoc-svc} Crammer, Singer native multi-class \item \code{kbb-svc} Weston, Watkins native multi-class \item \code{one-svc} novelty detection \item \code{eps-svr} epsilon regression \item \code{nu-svr} nu regression \item \code{eps-bsvr} bound-constraint svm regression } } \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes the inner product in feature space between two vector arguments (see \code{\link{kernels}}). \cr kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel "Gaussian" \item \code{polydot} Polynomial kernel \item \code{vanilladot} Linear kernel \item \code{tanhdot} Hyperbolic tangent kernel \item \code{laplacedot} Laplacian kernel \item \code{besseldot} Bessel kernel \item \code{anovadot} ANOVA RBF kernel \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } Setting the kernel parameter to "matrix" treats \code{x} as a kernel matrix calling the \code{kernelMatrix} interface.\cr The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. For valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{length, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well. In the case of a Radial Basis kernel function (Gaussian) kpar can also be set to the string "automatic" which uses the heuristics in \code{\link{sigest}} to calculate a good \code{sigma} value for the Gaussian RBF or Laplace kernel, from the data. (default = "automatic").} \item{C}{cost of constraints violation (default: 1) this is the `C'-constant of the regularization term in the Lagrange formulation.} \item{nu}{parameter needed for \code{nu-svc}, \code{one-svc}, and \code{nu-svr}. The \code{nu} parameter sets the upper bound on the training error and the lower bound on the fraction of data points to become Support Vectors (default: 0.2).} \item{epsilon}{epsilon in the insensitive-loss function used for \code{eps-svr}, \code{nu-svr} and \code{eps-bsvm} (default: 0.1)} \item{prob.model}{if set to \code{TRUE} builds a model for calculating class probabilities or in case of regression, calculates the scaling parameter of the Laplacian distribution fitted on the residuals. Fitting is done on output data created by performing a 3-fold cross-validation on the training data. For details see references. (default: \code{FALSE})} \item{class.weights}{a named vector of weights for the different classes, used for asymmetric class sizes. Not all factor levels have to be supplied (default weight: 1). All components have to be named.} \item{cache}{cache memory in MB (default 40)} \item{tol}{tolerance of termination criterion (default: 0.001)} \item{shrinking}{option whether to use the shrinking-heuristics (default: \code{TRUE})} \item{cross}{if a integer value k>0 is specified, a k-fold cross validation on the training data is performed to assess the quality of the model: the accuracy rate for classification and the Mean Squared Error for regression} \item{fit}{indicates whether the fitted values should be computed and included in the model or not (default: \code{TRUE})} \item{\dots}{additional parameters for the low level fitting function} \item{subset}{An index vector specifying the cases to be used in the training sample. (NOTE: If given, this argument must be named.)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} } \value{ An S4 object of class \code{"ksvm"} containing the fitted model, Accessor functions can be used to access the slots of the object (see examples) which include: \item{alpha}{The resulting support vectors, (alpha vector) (possibly scaled).} \item{alphaindex}{The index of the resulting support vectors in the data matrix. Note that this index refers to the pre-processed data (after the possible effect of \code{na.omit} and \code{subset})} \item{coef}{The corresponding coefficients times the training labels.} \item{b}{The negative intercept.} \item{nSV}{The number of Support Vectors} \item{obj}{The value of the objective function. In case of one-against-one classification this is a vector of values} \item{error}{Training error} \item{cross}{Cross validation error, (when cross > 0)} \item{prob.model}{Contains the width of the Laplacian fitted on the residuals in case of regression, or the parameters of the sigmoid fitted on the decision values in case of classification.} } \details{ \code{ksvm} uses John Platt's SMO algorithm for solving the SVM QP problem an most SVM formulations. On the \code{spoc-svc}, \code{kbb-svc}, \code{C-bsvc} and \code{eps-bsvr} formulations a chunking algorithm based on the TRON QP solver is used. \cr For multiclass-classification with \eqn{k} classes, \eqn{k > 2}, \code{ksvm} uses the `one-against-one'-approach, in which \eqn{k(k-1)/2} binary classifiers are trained; the appropriate class is found by a voting scheme, The \code{spoc-svc} and the \code{kbb-svc} formulations deal with the multiclass-classification problems by solving a single quadratic problem involving all the classes.\cr If the predictor variables include factors, the formula interface must be used to get a correct model matrix. \cr In classification when \code{prob.model} is \code{TRUE} a 3-fold cross validation is performed on the data and a sigmoid function is fitted on the resulting decision values \eqn{f}. The data can be passed to the \code{ksvm} function in a \code{matrix} or a \code{data.frame}, in addition \code{ksvm} also supports input in the form of a kernel matrix of class \code{kernelMatrix} or as a list of character vectors where a string kernel has to be used.\cr The \code{plot} function for binary classification \code{ksvm} objects displays a contour plot of the decision values with the corresponding support vectors highlighted.\cr The predict function can return class probabilities for classification problems by setting the \code{type} parameter to "probabilities". \cr The problem of model selection is partially addressed by an empirical observation for the RBF kernels (Gaussian , Laplace) where the optimal values of the \eqn{sigma} width parameter are shown to lie in between the 0.1 and 0.9 quantile of the \eqn{\|x- x'\|} statistics. When using an RBF kernel and setting \code{kpar} to "automatic", \code{ksvm} uses the \code{sigest} function to estimate the quantiles and uses the median of the values. } \note{Data is scaled internally by default, usually yielding better results.} \references{ \itemize{ \item Chang Chih-Chung, Lin Chih-Jen\cr \emph{LIBSVM: a library for Support Vector Machines}\cr \url{https://www.csie.ntu.edu.tw/~cjlin/libsvm/} \item Chih-Wei Hsu, Chih-Jen Lin\cr \emph{BSVM} \url{https://www.csie.ntu.edu.tw/~cjlin/bsvm/} \item J. Platt\cr \emph{Probabilistic outputs for support vector machines and comparison to regularized likelihood methods} \cr Advances in Large Margin Classifiers, A. Smola, P. Bartlett, B. Schoelkopf and D. Schuurmans, Eds. Cambridge, MA: MIT Press, 2000. \item H.-T. Lin, C.-J. Lin and R. C. Weng\cr \emph{A note on Platt's probabilistic outputs for support vector machines}\cr \url{https://www.csie.ntu.edu.tw/~htlin/paper/doc/plattprob.pdf} \item C.-W. Hsu and C.-J. Lin \cr \emph{A comparison on methods for multi-class support vector machines}\cr IEEE Transactions on Neural Networks, 13(2002) 415-425.\cr \url{https://www.csie.ntu.edu.tw/~cjlin/papers/multisvm.pdf} \item K. Crammer, Y. Singer\cr \emph{On the learnability and design of output codes for multiclass prolems}\cr Computational Learning Theory, 35-46, 2000.\cr \url{http://www.learningtheory.org/colt2000/papers/CrammerSinger.pdf} \item J. Weston, C. Watkins\cr \emph{Multi-class support vector machines}. Technical Report CSD-TR-98-04, Royal Holloway, University of London, Department of Computer Science. } } \author{ Alexandros Karatzoglou (SMO optimizers in C++ by Chih-Chung Chang & Chih-Jen Lin)\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{\code{\link{predict.ksvm}}, \code{\link{ksvm-class}}, \code{\link{couple}} } \keyword{methods} \keyword{regression} \keyword{nonlinear} \keyword{classif} \keyword{neural} \examples{ ## simple example using the spam data set data(spam) ## create test and training set index <- sample(1:dim(spam)[1]) spamtrain <- spam[index[1:floor(dim(spam)[1]/2)], ] spamtest <- spam[index[((ceiling(dim(spam)[1]/2)) + 1):dim(spam)[1]], ] ## train a support vector machine filter <- ksvm(type~.,data=spamtrain,kernel="rbfdot", kpar=list(sigma=0.05),C=5,cross=3) filter ## predict mail type on the test set mailtype <- predict(filter,spamtest[,-58]) ## Check results table(mailtype,spamtest[,58]) ## Another example with the famous iris data data(iris) ## Create a kernel function using the build in rbfdot function rbf <- rbfdot(sigma=0.1) rbf ## train a bound constraint support vector machine irismodel <- ksvm(Species~.,data=iris,type="C-bsvc", kernel=rbf,C=10,prob.model=TRUE) irismodel ## get fitted values fitted(irismodel) ## Test on the training set with probabilities as output predict(irismodel, iris[,-5], type="probabilities") ## Demo of the plot function x <- rbind(matrix(rnorm(120),,2),matrix(rnorm(120,mean=3),,2)) y <- matrix(c(rep(1,60),rep(-1,60))) svp <- ksvm(x,y,type="C-svc") plot(svp,data=x) ### Use kernelMatrix K <- as.kernelMatrix(crossprod(t(x))) svp2 <- ksvm(K, y, type="C-svc") svp2 # test data xtest <- rbind(matrix(rnorm(20),,2),matrix(rnorm(20,mean=3),,2)) # test kernel matrix i.e. inner/kernel product of test data with # Support Vectors Ktest <- as.kernelMatrix(crossprod(t(xtest),t(x[SVindex(svp2), ]))) predict(svp2, Ktest) #### Use custom kernel k <- function(x,y) {(sum(x*y) +1)*exp(-0.001*sum((x-y)^2))} class(k) <- "kernel" data(promotergene) ## train svm using custom kernel gene <- ksvm(Class~.,data=promotergene[c(1:20, 80:100),],kernel=k, C=5,cross=5) gene #### Use text with string kernels data(reuters) is(reuters) tsv <- ksvm(reuters,rlabels,kernel="stringdot", kpar=list(length=5),cross=3,C=10) tsv ## regression # create data x <- seq(-20,20,0.1) y <- sin(x)/x + rnorm(401,sd=0.03) # train support vector machine regm <- ksvm(x,y,epsilon=0.01,kpar=list(sigma=16),cross=3) plot(x,y,type="l") lines(x,predict(regm,x),col="red") } kernlab/man/inchol-class.Rd0000644000175100001440000000315211304023134015317 0ustar hornikusers\name{inchol-class} \docType{class} \alias{inchol-class} \alias{diagresidues} \alias{maxresiduals} \alias{pivots} \alias{diagresidues,inchol-method} \alias{maxresiduals,inchol-method} \alias{pivots,inchol-method} \title{Class "inchol" } \description{The reduced Cholesky decomposition object} \section{Objects from the Class}{Objects can be created by calls of the form \code{new("inchol", ...)}. or by calling the \code{inchol} function.} \section{Slots}{ \describe{ \item{\code{.Data}:}{Object of class \code{"matrix"} contains the decomposed matrix} \item{\code{pivots}:}{Object of class \code{"vector"} contains the pivots performed} \item{\code{diagresidues}:}{Object of class \code{"vector"} contains the diagonial residues} \item{\code{maxresiduals}:}{Object of class \code{"vector"} contains the maximum residues} } } \section{Extends}{ Class \code{"matrix"}, directly. } \section{Methods}{ \describe{ \item{diagresidues}{\code{signature(object = "inchol")}: returns the diagonial residues} \item{maxresiduals}{\code{signature(object = "inchol")}: returns the maximum residues} \item{pivots}{\code{signature(object = "inchol")}: returns the pivots performed} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{inchol}}, \code{\link{csi-class}}, \code{\link{csi}}} \examples{ data(iris) datamatrix <- as.matrix(iris[,-5]) # initialize kernel function rbf <- rbfdot(sigma=0.1) rbf Z <- inchol(datamatrix,kernel=rbf) dim(Z) pivots(Z) diagresidues(Z) maxresiduals(Z) } \keyword{classes} kernlab/man/csi.Rd0000644000175100001440000001231012560414652013530 0ustar hornikusers\name{csi} \docType{methods} \alias{csi} \alias{csi-methods} \alias{csi,matrix-method} \title{Cholesky decomposition with Side Information} \description{ The \code{csi} function in \pkg{kernlab} is an implementation of an incomplete Cholesky decomposition algorithm which exploits side information (e.g., classification labels, regression responses) to compute a low rank decomposition of a kernel matrix from the data. } \usage{ \S4method{csi}{matrix}(x, y, kernel="rbfdot", kpar=list(sigma=0.1), rank, centering = TRUE, kappa = 0.99 ,delta = 40 ,tol = 1e-5) } \arguments{ \item{x}{The data matrix indexed by row} \item{y}{the classification labels or regression responses. In classification y is a \eqn{m \times n} matrix where \eqn{m} the number of data and \eqn{n} the number of classes \eqn{y} and \eqn{y_i} is 1 if the corresponding x belongs to class i.} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class \code{kernel}, which computes the inner product in feature space between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well. } \item{rank}{maximal rank of the computed kernel matrix} \item{centering}{if \code{TRUE} centering is performed (default: TRUE)} \item{kappa}{trade-off between approximation of K and prediction of Y (default: 0.99)} \item{delta}{number of columns of cholesky performed in advance (default: 40)} \item{tol}{minimum gain at each iteration (default: 1e-4)} } \details{An incomplete cholesky decomposition calculates \eqn{Z} where \eqn{K= ZZ'} \eqn{K} being the kernel matrix. Since the rank of a kernel matrix is usually low, \eqn{Z} tends to be smaller then the complete kernel matrix. The decomposed matrix can be used to create memory efficient kernel-based algorithms without the need to compute and store a complete kernel matrix in memory. \cr \code{csi} uses the class labels, or regression responses to compute a more appropriate approximation for the problem at hand considering the additional information from the response variable. } \value{ An S4 object of class "csi" which is an extension of the class "matrix". The object is the decomposed kernel matrix along with the slots : \item{pivots}{Indices on which pivots where done} \item{diagresidues}{Residuals left on the diagonal} \item{maxresiduals}{Residuals picked for pivoting} \item{predgain}{predicted gain before adding each column} \item{truegain}{actual gain after adding each column} \item{Q}{QR decomposition of the kernel matrix} \item{R}{QR decomposition of the kernel matrix} slots can be accessed either by \code{object@slot} or by accessor functions with the same name (e.g., \code{pivots(object))}} \references{ Francis R. Bach, Michael I. Jordan\cr \emph{Predictive low-rank decomposition for kernel methods.}\cr Proceedings of the Twenty-second International Conference on Machine Learning (ICML) 2005\cr \url{http://www.di.ens.fr/~fbach/bach_jordan_csi.pdf} } \author{Alexandros Karatzoglou (based on Matlab code by Francis Bach)\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{inchol}}, \code{\link{chol}}, \code{\link{csi-class}}} \examples{ data(iris) ## create multidimensional y matrix yind <- t(matrix(1:3,3,150)) ymat <- matrix(0, 150, 3) ymat[yind==as.integer(iris[,5])] <- 1 datamatrix <- as.matrix(iris[,-5]) # initialize kernel function rbf <- rbfdot(sigma=0.1) rbf Z <- csi(datamatrix,ymat, kernel=rbf, rank = 30) dim(Z) pivots(Z) # calculate kernel matrix K <- crossprod(t(Z)) # difference between approximated and real kernel matrix (K - kernelMatrix(kernel=rbf, datamatrix))[6,] } \keyword{methods} \keyword{algebra} \keyword{array} kernlab/man/kcca-class.Rd0000644000175100001440000000345511304023134014752 0ustar hornikusers\name{kcca-class} \docType{class} \alias{kcca-class} \alias{kcor} \alias{xcoef} \alias{ycoef} %%\alias{yvar} %%\alias{xvar} \alias{kcor,kcca-method} \alias{xcoef,kcca-method} \alias{xvar,kcca-method} \alias{ycoef,kcca-method} \alias{yvar,kcca-method} \title{Class "kcca"} \description{The "kcca" class } \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("kcca", ...)}. or by the calling the \code{kcca} function. } \section{Slots}{ \describe{ \item{\code{kcor}:}{Object of class \code{"vector"} describing the correlations} \item{\code{xcoef}:}{Object of class \code{"matrix"} estimated coefficients for the \code{x} variables} \item{\code{ycoef}:}{Object of class \code{"matrix"} estimated coefficients for the \code{y} variables } %% \item{\code{xvar}:}{Object of class \code{"matrix"} holds the %% canonical variates for \code{x}} %% \item{\code{yvar}:}{Object of class \code{"matrix"} holds the %% canonical variates for \code{y}} } } \section{Methods}{ \describe{ \item{kcor}{\code{signature(object = "kcca")}: returns the correlations} \item{xcoef}{\code{signature(object = "kcca")}: returns the estimated coefficients for the \code{x} variables} \item{ycoef}{\code{signature(object = "kcca")}: returns the estimated coefficients for the \code{y} variables } %% \item{xvar}{\code{signature(object = "kcca")}: returns the canonical %% variates for \code{x}} %% \item{yvar}{\code{signature(object = "kcca")}: returns the canonical %% variates for \code{y}} } } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{kcca}}, \code{\link{kpca-class}} } \examples{ ## dummy data x <- matrix(rnorm(30),15) y <- matrix(rnorm(30),15) kcca(x,y,ncomps=2) } \keyword{classes} kernlab/man/spirals.Rd0000644000175100001440000000054311304023134014416 0ustar hornikusers\name{spirals} \alias{spirals} \title{Spirals Dataset} \description{A toy data set representing two spirals with Gaussian noise. The data was created with the \code{mlbench.spirals} function in \code{mlbench}. } \usage{data(spirals)} \format{ A matrix with 300 observations and 2 variables. } \examples{ data(spirals) plot(spirals) } \keyword{datasets} kernlab/man/musk.Rd0000644000175100001440000000257011304023134013722 0ustar hornikusers\name{musk} \alias{musk} \docType{data} \title{Musk data set} \description{ This dataset describes a set of 92 molecules of which 47 are judged by human experts to be musks and the remaining 45 molecules are judged to be non-musks. } \usage{data(musk)} \format{ A data frame with 476 observations on the following 167 variables. Variables 1-162 are "distance features" along rays. The distances are measured in hundredths of Angstroms. The distances may be negative or positive, since they are actually measured relative to an origin placed along each ray. The origin was defined by a "consensus musk" surface that is no longer used. Hence, any experiments with the data should treat these feature values as lying on an arbitrary continuous scale. In particular, the algorithm should not make any use of the zero point or the sign of each feature value. Variable 163 is the distance of the oxygen atom in the molecule to a designated point in 3-space. This is also called OXY-DIS. Variable 164 is the X-displacement from the designated point. Variable 165 is the Y-displacement from the designated point. Variable 166 is the Z-displacement from the designated point. Class: 0 for non-musk, and 1 for musk } \source{ UCI Machine Learning data repository \cr } \examples{ data(musk) muskm <- ksvm(Class~.,data=musk,kernel="rbfdot",C=1000) muskm } \keyword{datasets} kernlab/man/stringdot.Rd0000644000175100001440000000631111304023134014755 0ustar hornikusers\name{stringdot} \alias{stringdot} \title{String Kernel Functions} \description{ String kernels. } \usage{ stringdot(length = 4, lambda = 1.1, type = "spectrum", normalized = TRUE) } \arguments{ \item{length}{The length of the substrings considered} \item{lambda}{The decay factor} \item{type}{Type of string kernel, currently the following kernels are supported : \cr \code{spectrum} the kernel considers only matching substring of exactly length \eqn{n} (also know as string kernel). Each such matching substring is given a constant weight. The length parameter in this kernel has to be \eqn{length > 1}.\cr \code{boundrange} this kernel (also known as boundrange) considers only matching substrings of length less than or equal to a given number N. This type of string kernel requires a length parameter \eqn{length > 1}\cr \code{constant} The kernel considers all matching substrings and assigns constant weight (e.g. 1) to each of them. This \code{constant} kernel does not require any additional parameter.\cr \code{exponential} Exponential Decay kernel where the substring weight decays as the matching substring gets longer. The kernel requires a decay factor \eqn{ \lambda > 1}\cr \code{string} essentially identical to the spectrum kernel, only computed using a more conventional way.\cr \code{fullstring} essentially identical to the boundrange kernel only computed in a more conventional way. \cr } \item{normalized}{normalize string kernel values, (default: \code{TRUE})} } \details{ The kernel generating functions are used to initialize a kernel function which calculates the dot (inner) product between two feature vectors in a Hilbert Space. These functions or their function generating names can be passed as a \code{kernel} argument on almost all functions in \pkg{kernlab}(e.g., \code{ksvm}, \code{kpca} etc.). The string kernels calculate similarities between two strings (e.g. texts or sequences) by matching the common substring in the strings. Different types of string kernel exists and are mainly distinguished by how the matching is performed i.e. some string kernels count the exact matchings of \eqn{n} characters (spectrum kernel) between the strings, others allow gaps (mismatch kernel) etc. } \value{ Returns an S4 object of class \code{stringkernel} which extents the \code{function} class. The resulting function implements the given kernel calculating the inner (dot) product between two character vectors. \item{kpar}{a list containing the kernel parameters (hyperparameters) used.} The kernel parameters can be accessed by the \code{kpar} function. } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \note{ The \code{spectrum} and \code{boundrange} kernel are faster and more efficient implementations of the \code{string} and \code{fullstring} kernels which will be still included in \code{kernlab} for the next two versions. } \seealso{ \code{\link{dots} }, \code{\link{kernelMatrix} }, \code{\link{kernelMult}}, \code{\link{kernelPol}}} \examples{ sk <- stringdot(type="string", length=5) sk } \keyword{symbolmath} kernlab/man/kmmd-class.Rd0000644000175100001440000000415311304023134014775 0ustar hornikusers\name{kmmd-class} \docType{class} \alias{kmmd-class} \alias{kernelf,kmmd-method} \alias{H0,kmmd-method} \alias{AsympH0,kmmd-method} \alias{Radbound,kmmd-method} \alias{Asymbound,kmmd-method} \alias{mmdstats,kmmd-method} \title{Class "kqr"} \description{The Kernel Maximum Mean Discrepancy object class} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("kmmd", ...)}. or by calling the \code{kmmd} function } \section{Slots}{ \describe{ \item{\code{kernelf}:}{Object of class \code{"kfunction"} contains the kernel function used} \item{\code{xmatrix}:}{Object of class \code{"kernelMatrix"} containing the data used } \item{H0}{Object of class \code{"logical"} contains value of : is H0 rejected (logical)} \item{\code{AsympH0}}{Object of class \code{"logical"} contains value : is H0 rejected according to the asymptotic bound (logical)} \item{\code{mmdstats}}{Object of class \code{"vector"} contains the test statistics (vector of two)} \item{\code{Radbound}}{Object of class \code{"numeric"} contains the Rademacher bound} \item{\code{Asymbound}}{Object of class \code{"numeric"} contains the asymptotic bound} } } \section{Methods}{ \describe{ \item{kernelf}{\code{signature(object = "kmmd")}: returns the kernel function used} \item{H0}{\code{signature(object = "kmmd")}: returns the value of H0 being rejected} \item{AsympH0}{\code{signature(object = "kmmd")}: returns the value of H0 being rejected according to the asymptotic bound} \item{mmdstats}{\code{signature(object = "kmmd")}: returns the values of the mmd statistics} \item{Radbound}{\code{signature(object = "kmmd")}: returns the value of the Rademacher bound} \item{Asymbound}{\code{signature(object = "kmmd")}: returns the value of the asymptotic bound} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{kmmd}}, } \examples{ # create data x <- matrix(runif(300),100) y <- matrix(runif(300)+1,100) mmdo <- kmmd(x, y) H0(mmdo) } \keyword{classes} kernlab/man/kpca-class.Rd0000644000175100001440000000455712117363140015002 0ustar hornikusers\name{kpca-class} \docType{class} \alias{kpca-class} \alias{rotated} \alias{eig,kpca-method} \alias{kcall,kpca-method} \alias{kernelf,kpca-method} \alias{pcv,kpca-method} \alias{rotated,kpca-method} \alias{xmatrix,kpca-method} \title{Class "kpca"} \description{ The Kernel Principal Components Analysis class} \section{Objects of class "kpca"}{ Objects can be created by calls of the form \code{new("kpca", ...)}. or by calling the \code{kpca} function. } \section{Slots}{ \describe{ \item{\code{pcv}:}{Object of class \code{"matrix"} containing the principal component vectors } \item{\code{eig}:}{Object of class \code{"vector"} containing the corresponding eigenvalues} \item{\code{rotated}:}{Object of class \code{"matrix"} containing the projection of the data on the principal components} \item{\code{kernelf}:}{Object of class \code{"function"} containing the kernel function used} \item{\code{kpar}:}{Object of class \code{"list"} containing the kernel parameters used } \item{\code{xmatrix}:}{Object of class \code{"matrix"} containing the data matrix used } \item{\code{kcall}:}{Object of class \code{"ANY"} containing the function call } \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed on NA } } } \section{Methods}{ \describe{ \item{eig}{\code{signature(object = "kpca")}: returns the eigenvalues } \item{kcall}{\code{signature(object = "kpca")}: returns the performed call} \item{kernelf}{\code{signature(object = "kpca")}: returns the used kernel function} \item{pcv}{\code{signature(object = "kpca")}: returns the principal component vectors } \item{predict}{\code{signature(object = "kpca")}: embeds new data } \item{rotated}{\code{signature(object = "kpca")}: returns the projected data} \item{xmatrix}{\code{signature(object = "kpca")}: returns the used data matrix } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{ksvm-class}}, \code{\link{kcca-class}} } \examples{ # another example using the iris data(iris) test <- sample(1:50,20) kpc <- kpca(~.,data=iris[-test,-5],kernel="rbfdot", kpar=list(sigma=0.2),features=2) #print the principal component vectors pcv(kpc) rotated(kpc) kernelf(kpc) eig(kpc) } \keyword{classes} kernlab/man/ipop.Rd0000644000175100001440000000532014221633756013730 0ustar hornikusers\name{ipop} \alias{ipop} \alias{ipop,ANY,matrix-method} \title{Quadratic Programming Solver} \description{ ipop solves the quadratic programming problem :\cr \eqn{\min(c'*x + 1/2 * x' * H * x)}\cr subject to: \cr \eqn{b <= A * x <= b + r}\cr \eqn{l <= x <= u} } \usage{ ipop(c, H, A, b, l, u, r, sigf = 7, maxiter = 40, margin = 0.05, bound = 10, verb = 0) } \arguments{ \item{c}{Vector or one column matrix appearing in the quadratic function} \item{H}{square matrix appearing in the quadratic function, or the decomposed form \eqn{Z} of the \eqn{H} matrix where \eqn{Z} is a \eqn{n x m} matrix with \eqn{n > m} and \eqn{ZZ' = H}.} \item{A}{Matrix defining the constrains under which we minimize the quadratic function} \item{b}{Vector or one column matrix defining the constrains} \item{l}{Lower bound vector or one column matrix} \item{u}{Upper bound vector or one column matrix} \item{r}{Vector or one column matrix defining constrains} \item{sigf}{Precision (default: 7 significant figures)} \item{maxiter}{Maximum number of iterations} \item{margin}{how close we get to the constrains} \item{bound}{Clipping bound for the variables} \item{verb}{Display convergence information during runtime} } \details{ ipop uses an interior point method to solve the quadratic programming problem. \cr The \eqn{H} matrix can also be provided in the decomposed form \eqn{Z} where \eqn{ZZ' = H} in that case the Sherman Morrison Woodbury formula is used internally. } \value{ An S4 object with the following slots \item{primal}{Vector containing the primal solution of the quadratic problem} \item{dual}{The dual solution of the problem} \item{how}{Character string describing the type of convergence} all slots can be accessed through accessor functions (see example) } \references{ R. J. Vanderbei\cr \emph{LOQO: An interior point code for quadratic programming}\cr Optimization Methods and Software 11, 451-484, 1999 \cr \url{https://vanderbei.princeton.edu/ps/loqo5.pdf} } \author{Alexandros Karatzoglou (based on Matlab code by Alex Smola) \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{solve.QP}, \code{\link{inchol}}, \code{\link{csi}}} \examples{ ## solve the Support Vector Machine optimization problem data(spam) ## sample a scaled part (500 points) of the spam data set m <- 500 set <- sample(1:dim(spam)[1],m) x <- scale(as.matrix(spam[,-58]))[set,] y <- as.integer(spam[set,58]) y[y==2] <- -1 ##set C parameter and kernel C <- 5 rbf <- rbfdot(sigma = 0.1) ## create H matrix etc. H <- kernelPol(rbf,x,,y) c <- matrix(rep(-1,m)) A <- t(y) b <- 0 l <- matrix(rep(0,m)) u <- matrix(rep(C,m)) r <- 0 sv <- ipop(c,H,A,b,l,u,r) sv dual(sv) } \keyword{optimize} kernlab/man/rvm.Rd0000644000175100001440000001565514221633644013575 0ustar hornikusers\name{rvm} \alias{rvm} \alias{rvm-methods} \alias{rvm,formula-method} \alias{rvm,list-method} \alias{rvm,vector-method} \alias{rvm,kernelMatrix-method} \alias{rvm,matrix-method} \alias{show,rvm-method} \alias{predict,rvm-method} \alias{coef,rvm-method} \title{Relevance Vector Machine} \description{ The Relevance Vector Machine is a Bayesian model for regression and classification of identical functional form to the support vector machine. The \code{rvm} function currently supports only regression. } \usage{ \S4method{rvm}{formula}(x, data=NULL, ..., subset, na.action = na.omit) \S4method{rvm}{vector}(x, ...) \S4method{rvm}{matrix}(x, y, type="regression", kernel="rbfdot", kpar="automatic", alpha= ncol(as.matrix(x)), var=0.1, var.fix=FALSE, iterations=100, verbosity = 0, tol = .Machine$double.eps, minmaxdiff = 1e-3, cross = 0, fit = TRUE, ... , subset, na.action = na.omit) \S4method{rvm}{list}(x, y, type = "regression", kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), alpha = 5, var = 0.1, var.fix = FALSE, iterations = 100, verbosity = 0, tol = .Machine$double.eps, minmaxdiff = 1e-3, cross = 0, fit = TRUE, ..., subset, na.action = na.omit) } \arguments{ \item{x}{a symbolic description of the model to be fit. When not using a formula x can be a matrix or vector containing the training data or a kernel matrix of class \code{kernelMatrix} of the training data or a list of character vectors (for use with the string kernel). Note, that the intercept is always excluded, whether given in the formula or not.} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which `rvm' is called from.} \item{y}{a response vector with one label for each row/component of \code{x}. Can be either a factor (for classification tasks) or a numeric vector (for regression).} \item{type}{\code{rvm} can only be used for regression at the moment.} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel "Gaussian" \item \code{polydot} Polynomial kernel \item \code{vanilladot} Linear kernel \item \code{tanhdot} Hyperbolic tangent kernel \item \code{laplacedot} Laplacian kernel \item \code{besseldot} Bessel kernel \item \code{anovadot} ANOVA RBF kernel \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. For valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{length, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well. In the case of a Radial Basis kernel function (Gaussian) kpar can also be set to the string "automatic" which uses the heuristics in \code{\link{sigest}} to calculate a good \code{sigma} value for the Gaussian RBF or Laplace kernel, from the data. (default = "automatic").} \item{alpha}{The initial alpha vector. Can be either a vector of length equal to the number of data points or a single number.} \item{var}{the initial noise variance} \item{var.fix}{Keep noise variance fix during iterations (default: FALSE)} \item{iterations}{Number of iterations allowed (default: 100)} \item{tol}{tolerance of termination criterion} \item{minmaxdiff}{termination criteria. Stop when max difference is equal to this parameter (default:1e-3) } \item{verbosity}{print information on algorithm convergence (default = FALSE)} \item{fit}{indicates whether the fitted values should be computed and included in the model or not (default: TRUE)} \item{cross}{if a integer value k>0 is specified, a k-fold cross validation on the training data is performed to assess the quality of the model: the Mean Squared Error for regression} \item{subset}{An index vector specifying the cases to be used in the training sample. (NOTE: If given, this argument must be named.)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{ additional parameters} } \details{The Relevance Vector Machine typically leads to sparser models then the SVM. It also performs better in many cases (specially in regression). } \value{ An S4 object of class "rvm" containing the fitted model. Accessor functions can be used to access the slots of the object which include : \item{alpha}{The resulting relevance vectors} \item{alphaindex}{ The index of the resulting relevance vectors in the data matrix} \item{nRV}{Number of relevance vectors} \item{RVindex}{The indexes of the relevance vectors} \item{error}{Training error (if \code{fit = TRUE})} ... } \references{ Tipping, M. E.\cr \emph{Sparse Bayesian learning and the relevance vector machine}\cr Journal of Machine Learning Research 1, 211-244\cr \url{https://www.jmlr.org/papers/volume1/tipping01a/tipping01a.pdf} } \author{ Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{ksvm}}} \examples{ # create data x <- seq(-20,20,0.1) y <- sin(x)/x + rnorm(401,sd=0.05) # train relevance vector machine foo <- rvm(x, y) foo # print relevance vectors alpha(foo) RVindex(foo) # predict and plot ytest <- predict(foo, x) plot(x, y, type ="l") lines(x, ytest, col="red") } \keyword{regression} \keyword{nonlinear} kernlab/man/specc.Rd0000644000175100001440000001421314366220464014055 0ustar hornikusers\name{specc} \alias{specc} \alias{specc,matrix-method} \alias{specc,formula-method} \alias{specc,list-method} \alias{specc,kernelMatrix-method} \alias{show,specc-method} \title{Spectral Clustering} \description{ A spectral clustering algorithm. Clustering is performed by embedding the data into the subspace of the eigenvectors of an affinity matrix. } \usage{ \S4method{specc}{formula}(x, data = NULL, na.action = na.omit, ...) \S4method{specc}{matrix}(x, centers, kernel = "rbfdot", kpar = "automatic", nystrom.red = FALSE, nystrom.sample = dim(x)[1]/6, iterations = 200, mod.sample = 0.75, na.action = na.omit, ...) \S4method{specc}{kernelMatrix}(x, centers, nystrom.red = FALSE, iterations = 200, ...) \S4method{specc}{list}(x, centers, kernel = "stringdot", kpar = list(length=4, lambda=0.5), nystrom.red = FALSE, nystrom.sample = length(x)/6, iterations = 200, mod.sample = 0.75, na.action = na.omit, ...) } \arguments{ \item{x}{the matrix of data to be clustered, or a symbolic description of the model to be fit, or a kernel Matrix of class \code{kernelMatrix}, or a list of character vectors.} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which `specc' is called from.} \item{centers}{Either the number of clusters or a set of initial cluster centers. If the first, a random set of rows in the eigenvectors matrix are chosen as the initial centers.} \item{kernel}{the kernel function used in computing the affinity matrix. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{a character string or the list of hyper-parameters (kernel parameters). The default character string \code{"automatic"} uses a heuristic to determine a suitable value for the width parameter of the RBF kernel. The second option \code{"local"} (local scaling) uses a more advanced heuristic and sets a width parameter for every point in the data set. This is particularly useful when the data incorporates multiple scales. A list can also be used containing the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{length, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{nystrom.red}{use nystrom method to calculate eigenvectors. When \code{TRUE} a sample of the dataset is used to calculate the eigenvalues, thus only a \eqn{n x m} matrix where \eqn{n} the sample size is stored in memory (default: \code{FALSE}} \item{nystrom.sample}{number of data points to use for estimating the eigenvalues when using the nystrom method. (default : dim(x)[1]/6)} \item{mod.sample}{proportion of data to use when estimating sigma (default: 0.75)} \item{iterations}{the maximum number of iterations allowed. } \item{na.action}{the action to perform on NA} \item{\dots}{additional parameters} } \details{ Spectral clustering works by embedding the data points of the partitioning problem into the subspace of the \eqn{k} largest eigenvectors of a normalized affinity/kernel matrix. Using a simple clustering method like \code{kmeans} on the embedded points usually leads to good performance. It can be shown that spectral clustering methods boil down to graph partitioning.\cr The data can be passed to the \code{specc} function in a \code{matrix} or a \code{data.frame}, in addition \code{specc} also supports input in the form of a kernel matrix of class \code{kernelMatrix} or as a list of character vectors where a string kernel has to be used.} \value{ An S4 object of class \code{specc} which extends the class \code{vector} containing integers indicating the cluster to which each point is allocated. The following slots contain useful information \item{centers}{A matrix of cluster centers.} \item{size}{The number of point in each cluster} \item{withinss}{The within-cluster sum of squares for each cluster} \item{kernelf}{The kernel function used} } \references{ Andrew Y. Ng, Michael I. Jordan, Yair Weiss\cr \emph{On Spectral Clustering: Analysis and an Algorithm}\cr Neural Information Processing Symposium 2001\cr \url{https://papers.neurips.cc/paper/2092-on-spectral-clustering-analysis-and-an-algorithm.pdf} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{\code{\link{kkmeans}}, \code{\link{kpca}}, \code{\link{kcca}} } \examples{ ## Cluster the spirals data set. data(spirals) sc <- specc(spirals, centers=2) sc centers(sc) size(sc) withinss(sc) plot(spirals, col=sc) } \keyword{cluster} kernlab/man/kpca.Rd0000644000175100001440000001203014366214206013666 0ustar hornikusers\name{kpca} \alias{kpca} \alias{kpca,formula-method} \alias{kpca,matrix-method} \alias{kpca,kernelMatrix-method} \alias{kpca,list-method} \alias{predict,kpca-method} \title{Kernel Principal Components Analysis} \description{ Kernel Principal Components Analysis is a nonlinear form of principal component analysis.} \usage{ \S4method{kpca}{formula}(x, data = NULL, na.action, ...) \S4method{kpca}{matrix}(x, kernel = "rbfdot", kpar = list(sigma = 0.1), features = 0, th = 1e-4, na.action = na.omit, ...) \S4method{kpca}{kernelMatrix}(x, features = 0, th = 1e-4, ...) \S4method{kpca}{list}(x, kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), features = 0, th = 1e-4, na.action = na.omit, ...) } \arguments{ \item{x}{the data matrix indexed by row or a formula describing the model, or a kernel Matrix of class \code{kernelMatrix}, or a list of character vectors} \item{data}{an optional data frame containing the variables in the model (when using a formula).} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{features}{Number of features (principal components) to return. (default: 0 , all)} \item{th}{the value of the eigenvalue under which principal components are ignored (only valid when features = 0). (default : 0.0001) } \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{ additional parameters} } \details{Using kernel functions one can efficiently compute principal components in high-dimensional feature spaces, related to input space by some non-linear map.\cr The data can be passed to the \code{kpca} function in a \code{matrix} or a \code{data.frame}, in addition \code{kpca} also supports input in the form of a kernel matrix of class \code{kernelMatrix} or as a list of character vectors where a string kernel has to be used. } \value{ An S4 object containing the principal component vectors along with the corresponding eigenvalues. \item{pcv}{a matrix containing the principal component vectors (column wise)} \item{eig}{The corresponding eigenvalues} \item{rotated}{The original data projected (rotated) on the principal components} \item{xmatrix}{The original data matrix} all the slots of the object can be accessed by accessor functions. } \note{The predict function can be used to embed new data on the new space} \references{ Schoelkopf B., A. Smola, K.-R. Mueller :\cr \emph{Nonlinear component analysis as a kernel eigenvalue problem}\cr Neural Computation 10, 1299-1319\cr \doi{10.1162/089976698300017467}. } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{kcca}}, \code{pca}} \examples{ # another example using the iris data(iris) test <- sample(1:150,20) kpc <- kpca(~.,data=iris[-test,-5],kernel="rbfdot", kpar=list(sigma=0.2),features=2) #print the principal component vectors pcv(kpc) #plot the data projection on the components plot(rotated(kpc),col=as.integer(iris[-test,5]), xlab="1st Principal Component",ylab="2nd Principal Component") #embed remaining points emb <- predict(kpc,iris[test,-5]) points(emb,col=as.integer(iris[test,5])) } \keyword{cluster} kernlab/man/specc-class.Rd0000644000175100001440000000315311304023134015141 0ustar hornikusers\name{specc-class} \docType{class} \alias{specc-class} \alias{centers} \alias{size} \alias{withinss} \alias{centers,specc-method} \alias{withinss,specc-method} \alias{size,specc-method} \alias{kernelf,specc-method} \title{Class "specc"} \description{ The Spectral Clustering Class} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("specc", ...)}. or by calling the function \code{specc}. } \section{Slots}{ \describe{ \item{\code{.Data}:}{Object of class \code{"vector"} containing the cluster assignments} \item{\code{centers}:}{Object of class \code{"matrix"} containing the cluster centers} \item{\code{size}:}{Object of class \code{"vector"} containing the number of points in each cluster} \item{\code{withinss}:}{Object of class \code{"vector"} containing the within-cluster sum of squares for each cluster} \item{\code{kernelf}}{Object of class \code{kernel} containing the used kernel function.} } } \section{Methods}{ \describe{ \item{centers}{\code{signature(object = "specc")}: returns the cluster centers} \item{withinss}{\code{signature(object = "specc")}: returns the within-cluster sum of squares for each cluster} \item{size}{\code{signature(object = "specc")}: returns the number of points in each cluster } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{specc}}, \code{\link{kpca-class}} } \examples{ ## Cluster the spirals data set. data(spirals) sc <- specc(spirals, centers=2) centers(sc) size(sc) } \keyword{classes} kernlab/man/inlearn.Rd0000644000175100001440000000600712117362575014414 0ustar hornikusers\name{inlearn} \alias{inlearn} \alias{inlearn,numeric-method} \title{Onlearn object initialization} \description{ Online Kernel Algorithm object \code{onlearn} initialization function. } \usage{ \S4method{inlearn}{numeric}(d, kernel = "rbfdot", kpar = list(sigma = 0.1), type = "novelty", buffersize = 1000) } \arguments{ \item{d}{the dimensionality of the data to be learned} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. For valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the \code{kpar} parameter as well.} \item{type}{the type of problem to be learned by the online algorithm : \code{classification}, \code{regression}, \code{novelty}} \item{buffersize}{the size of the buffer to be used} } \details{ The \code{inlearn} is used to initialize a blank \code{onlearn} object. } \value{ The function returns an \code{S4} object of class \code{onlearn} that can be used by the \code{onlearn} function. } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{onlearn}}, \code{\link{onlearn-class}} } \examples{ ## create toy data set x <- rbind(matrix(rnorm(100),,2),matrix(rnorm(100)+3,,2)) y <- matrix(c(rep(1,50),rep(-1,50)),,1) ## initialize onlearn object on <- inlearn(2, kernel = "rbfdot", kpar = list(sigma = 0.2), type = "classification") ## learn one data point at the time for(i in sample(1:100,100)) on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) sign(predict(on,x)) } \keyword{classif} \keyword{neural} \keyword{regression} \keyword{ts} kernlab/man/kernelMatrix.Rd0000644000175100001440000001254111304023134015407 0ustar hornikusers\name{kernelMatrix} \alias{kernelMatrix} \alias{kernelMult} \alias{kernelPol} \alias{kernelFast} \alias{kernelPol,kernel-method} \alias{kernelMatrix,kernel-method} \alias{kernelMult,kernel-method} \alias{kernelFast,kernel-method} \alias{kernelMatrix,rbfkernel-method} \alias{kernelMatrix,polykernel-method} \alias{kernelMatrix,vanillakernel-method} \alias{kernelMatrix,tanhkernel-method} \alias{kernelMatrix,laplacekernel-method} \alias{kernelMatrix,anovakernel-method} \alias{kernelMatrix,splinekernel-method} \alias{kernelMatrix,besselkernel-method} \alias{kernelMatrix,stringkernel-method} \alias{kernelMult,rbfkernel,ANY-method} \alias{kernelMult,splinekernel,ANY-method} \alias{kernelMult,polykernel,ANY-method} \alias{kernelMult,tanhkernel,ANY-method} \alias{kernelMult,laplacekernel,ANY-method} \alias{kernelMult,besselkernel,ANY-method} \alias{kernelMult,anovakernel,ANY-method} \alias{kernelMult,vanillakernel,ANY-method} \alias{kernelMult,character,kernelMatrix-method} \alias{kernelMult,stringkernel,ANY-method} \alias{kernelPol,rbfkernel-method} \alias{kernelPol,splinekernel-method} \alias{kernelPol,polykernel-method} \alias{kernelPol,tanhkernel-method} \alias{kernelPol,vanillakernel-method} \alias{kernelPol,anovakernel-method} \alias{kernelPol,besselkernel-method} \alias{kernelPol,laplacekernel-method} \alias{kernelPol,stringkernel-method} \alias{kernelFast,rbfkernel-method} \alias{kernelFast,splinekernel-method} \alias{kernelFast,polykernel-method} \alias{kernelFast,tanhkernel-method} \alias{kernelFast,vanillakernel-method} \alias{kernelFast,anovakernel-method} \alias{kernelFast,besselkernel-method} \alias{kernelFast,laplacekernel-method} \alias{kernelFast,stringkernel-method} \alias{kernelFast,splinekernel-method} \title{Kernel Matrix functions} \description{ \code{kernelMatrix} calculates the kernel matrix \eqn{K_{ij} = k(x_i,x_j)} or \eqn{K_{ij} = k(x_i,y_j)}.\cr \code{kernelPol} computes the quadratic kernel expression \eqn{H = z_i z_j k(x_i,x_j)}, \eqn{H = z_i k_j k(x_i,y_j)}.\cr \code{kernelMult} calculates the kernel expansion \eqn{f(x_i) = \sum_{i=1}^m z_i k(x_i,x_j)}\cr \code{kernelFast} computes the kernel matrix, identical to \code{kernelMatrix}, except that it also requires the squared norm of the first argument as additional input, useful in iterative kernel matrix calculations. } \usage{ \S4method{kernelMatrix}{kernel}(kernel, x, y = NULL) \S4method{kernelPol}{kernel}(kernel, x, y = NULL, z, k = NULL) \S4method{kernelMult}{kernel}(kernel, x, y = NULL, z, blocksize = 256) \S4method{kernelFast}{kernel}(kernel, x, y, a) } \arguments{ \item{kernel}{the kernel function to be used to calculate the kernel matrix. This has to be a function of class \code{kernel}, i.e. which can be generated either one of the build in kernel generating functions (e.g., \code{rbfdot} etc.) or a user defined function of class \code{kernel} taking two vector arguments and returning a scalar.} \item{x}{a data matrix to be used to calculate the kernel matrix, or a list of vector when a \code{stringkernel} is used} \item{y}{second data matrix to calculate the kernel matrix, or a list of vector when a \code{stringkernel} is used} \item{z}{a suitable vector or matrix} \item{k}{a suitable vector or matrix} \item{a}{the squared norm of \code{x}, e.g., \code{rowSums(x^2)}} \item{blocksize}{the kernel expansion computations are done block wise to avoid storing the kernel matrix into memory. \code{blocksize} defines the size of the computational blocks.} } \details{ Common functions used during kernel based computations.\cr The \code{kernel} parameter can be set to any function, of class kernel, which computes the inner product in feature space between two vector arguments. \pkg{kernlab} provides the most popular kernel functions which can be initialized by using the following functions: \itemize{ \item \code{rbfdot} Radial Basis kernel function \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} the Spline kernel } (see example.) \code{kernelFast} is mainly used in situations where columns of the kernel matrix are computed per invocation. In these cases, evaluating the norm of each row-entry over and over again would cause significant computational overhead. } \value{ \code{kernelMatrix} returns a symmetric diagonal semi-definite matrix.\cr \code{kernelPol} returns a matrix.\cr \code{kernelMult} usually returns a one-column matrix. } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{rbfdot}}, \code{\link{polydot}}, \code{\link{tanhdot}}, \code{\link{vanilladot}}} \examples{ ## use the spam data data(spam) dt <- as.matrix(spam[c(10:20,3000:3010),-58]) ## initialize kernel function rbf <- rbfdot(sigma = 0.05) rbf ## calculate kernel matrix kernelMatrix(rbf, dt) yt <- as.matrix(as.integer(spam[c(10:20,3000:3010),58])) yt[yt==2] <- -1 ## calculate the quadratic kernel expression kernelPol(rbf, dt, ,yt) ## calculate the kernel expansion kernelMult(rbf, dt, ,yt) } \keyword{algebra} \keyword{array} kernlab/man/onlearn.Rd0000644000175100001440000000467214221634662014425 0ustar hornikusers\name{onlearn} \alias{onlearn} \alias{onlearn,onlearn-method} \title{Kernel Online Learning algorithms} \description{ Online Kernel-based Learning algorithms for classification, novelty detection, and regression. } \usage{ \S4method{onlearn}{onlearn}(obj, x, y = NULL, nu = 0.2, lambda = 1e-04) } \arguments{ \item{obj}{\code{obj} an object of class \code{onlearn} created by the initialization function \code{inlearn} containing the kernel to be used during learning and the parameters of the learned model} \item{x}{vector or matrix containing the data. Factors have to be numerically coded. If \code{x} is a matrix the code is run internally one sample at the time.} \item{y}{the class label in case of classification. Only binary classification is supported and class labels have to be -1 or +1. } \item{nu}{the parameter similarly to the \code{nu} parameter in SVM bounds the training error.} \item{lambda}{the learning rate} } \details{ The online algorithms are based on a simple stochastic gradient descent method in feature space. The state of the algorithm is stored in an object of class \code{onlearn} and has to be passed to the function at each iteration. } \value{ The function returns an \code{S4} object of class \code{onlearn} containing the model parameters and the last fitted value which can be retrieved by the accessor method \code{fit}. The value returned in the classification and novelty detection problem is the decision function value phi. The accessor methods \code{alpha} returns the model parameters. } \references{ Kivinen J. Smola A.J. Williamson R.C. \cr \emph{Online Learning with Kernels}\cr IEEE Transactions on Signal Processing vol. 52, Issue 8, 2004\cr \url{https://alex.smola.org/papers/2004/KivSmoWil04.pdf}} \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{inlearn}}} \examples{ ## create toy data set x <- rbind(matrix(rnorm(100),,2),matrix(rnorm(100)+3,,2)) y <- matrix(c(rep(1,50),rep(-1,50)),,1) ## initialize onlearn object on <- inlearn(2,kernel="rbfdot",kpar=list(sigma=0.2), type="classification") ind <- sample(1:100,100) ## learn one data point at the time for(i in ind) on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) ## or learn all the data on <- onlearn(on,x[ind,],y[ind],nu=0.03,lambda=0.1) sign(predict(on,x)) } \keyword{classif} \keyword{neural} \keyword{regression} \keyword{ts} kernlab/man/kha-class.Rd0000644000175100001440000000450312117362716014626 0ustar hornikusers\name{kha-class} \docType{class} \alias{kha-class} \alias{eig,kha-method} \alias{kcall,kha-method} \alias{kernelf,kha-method} \alias{pcv,kha-method} \alias{xmatrix,kha-method} \alias{eskm,kha-method} \title{Class "kha"} \description{ The Kernel Hebbian Algorithm class} \section{Objects objects of class "kha"}{ Objects can be created by calls of the form \code{new("kha", ...)}. or by calling the \code{kha} function. } \section{Slots}{ \describe{ \item{\code{pcv}:}{Object of class \code{"matrix"} containing the principal component vectors } \item{\code{eig}:}{Object of class \code{"vector"} containing the corresponding normalization values} \item{\code{eskm}:}{Object of class \code{"vector"} containing the kernel sum} \item{\code{kernelf}:}{Object of class \code{"kfunction"} containing the kernel function used} \item{\code{kpar}:}{Object of class \code{"list"} containing the kernel parameters used } \item{\code{xmatrix}:}{Object of class \code{"matrix"} containing the data matrix used } \item{\code{kcall}:}{Object of class \code{"ANY"} containing the function call } \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed on NA } } } \section{Methods}{ \describe{ \item{eig}{\code{signature(object = "kha")}: returns the normalization values } \item{kcall}{\code{signature(object = "kha")}: returns the performed call} \item{kernelf}{\code{signature(object = "kha")}: returns the used kernel function} \item{pcv}{\code{signature(object = "kha")}: returns the principal component vectors } \item{eskm}{\code{signature(object = "kha")}: returns the kernel sum} \item{predict}{\code{signature(object = "kha")}: embeds new data } \item{xmatrix}{\code{signature(object = "kha")}: returns the used data matrix } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{kha}}, \code{\link{ksvm-class}}, \code{\link{kcca-class}} } \examples{ # another example using the iris data(iris) test <- sample(1:50,20) kpc <- kha(~.,data=iris[-test,-5], kernel="rbfdot", kpar=list(sigma=0.2),features=2, eta=0.001, maxiter=65) #print the principal component vectors pcv(kpc) kernelf(kpc) eig(kpc) } \keyword{classes} kernlab/man/predict.gausspr.Rd0000644000175100001440000000417014221634075016073 0ustar hornikusers\name{predict.gausspr} \alias{predict.gausspr} \alias{predict,gausspr-method} \title{predict method for Gaussian Processes object} \description{Prediction of test data using Gaussian Processes} \usage{ \S4method{predict}{gausspr}(object, newdata, type = "response", coupler = "minpair") } \arguments{ \item{object}{an S4 object of class \code{gausspr} created by the \code{gausspr} function} \item{newdata}{a data frame or matrix containing new data} \item{type}{one of \code{response}, \code{probabilities} indicating the type of output: predicted values or matrix of class probabilities} \item{coupler}{Coupling method used in the multiclass case, can be one of \code{minpair} or \code{pkpd} (see reference for more details).} } \value{ \item{response}{predicted classes (the classes with majority vote) or the response value in regression.} \item{probabilities}{matrix of class probabilities (one column for each class and one row for each input).} } \references{ \itemize{ \item C. K. I. Williams and D. Barber \cr Bayesian classification with Gaussian processes. \cr IEEE Transactions on Pattern Analysis and Machine Intelligence, 20(12):1342-1351, 1998\cr \url{https://homepages.inf.ed.ac.uk/ckiw/postscript/pami_final.ps.gz} \item T.F. Wu, C.J. Lin, R.C. Weng. \cr \emph{Probability estimates for Multi-class Classification by Pairwise Coupling}\cr \url{https://www.csie.ntu.edu.tw/~cjlin/papers/svmprob/svmprob.pdf} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \keyword{methods} \keyword{regression} \keyword{classif} \examples{ ## example using the promotergene data set data(promotergene) ## create test and training set ind <- sample(1:dim(promotergene)[1],20) genetrain <- promotergene[-ind, ] genetest <- promotergene[ind, ] ## train a support vector machine gene <- gausspr(Class~.,data=genetrain,kernel="rbfdot", kpar=list(sigma=0.015)) gene ## predict gene type probabilities on the test set genetype <- predict(gene,genetest,type="probabilities") genetype } kernlab/man/kmmd.Rd0000644000175100001440000001224114366220452013704 0ustar hornikusers\name{kmmd} \alias{kmmd} \alias{kmmd,matrix-method} \alias{kmmd,list-method} \alias{kmmd,kernelMatrix-method} \alias{show,kmmd-method} \alias{H0} \alias{Asymbound} \alias{Radbound} \alias{mmdstats} \alias{AsympH0} \title{Kernel Maximum Mean Discrepancy.} \description{The Kernel Maximum Mean Discrepancy \code{kmmd} performs a non-parametric distribution test.} \usage{ \S4method{kmmd}{matrix}(x, y, kernel="rbfdot",kpar="automatic", alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 150, frac = 1, ...) \S4method{kmmd}{kernelMatrix}(x, y, Kxy, alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 100, frac = 1, ...) \S4method{kmmd}{list}(x, y, kernel="stringdot", kpar = list(type = "spectrum", length = 4), alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 150, frac = 1, ...) } \arguments{ \item{x}{data values, in a \code{matrix}, \code{list}, or \code{kernelMatrix}} \item{y}{data values, in a \code{matrix}, \code{list}, or \code{kernelMatrix}} \item{Kxy}{\code{kernlMatrix} between \eqn{x} and \eqn{y} values (only for the kernelMatrix interface)} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. \code{kernlab} provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{lenght, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the \code{kpar} parameter as well. In the case of a Radial Basis kernel function (Gaussian) kpar can also be set to the string "automatic" which uses the heuristics in 'sigest' to calculate a good 'sigma' value for the Gaussian RBF or Laplace kernel, from the data. (default = "automatic"). } \item{alpha}{the confidence level of the test (default: 0.05)} \item{asymptotic}{calculate the bounds asymptotically (suitable for smaller datasets) (default: FALSE)} \item{replace}{use replace when sampling for computing the asymptotic bounds (default : TRUE)} \item{ntimes}{number of times repeating the sampling procedure (default : 150)} \item{frac}{fraction of points to sample (frac : 1) } \item{\dots}{additional parameters.} } \details{\code{kmmd} calculates the kernel maximum mean discrepancy for samples from two distributions and conducts a test as to whether the samples are from different distributions with level \code{alpha}. } \value{ An S4 object of class \code{kmmd} containing the results of whether the H0 hypothesis is rejected or not. H0 being that the samples \eqn{x} and \eqn{y} come from the same distribution. The object contains the following slots : \item{\code{H0}}{is H0 rejected (logical)} \item{\code{AsympH0}}{is H0 rejected according to the asymptotic bound (logical)} \item{\code{kernelf}}{the kernel function used.} \item{\code{mmdstats}}{the test statistics (vector of two)} \item{\code{Radbound}}{the Rademacher bound} \item{\code{Asymbound}}{the asymptotic bound} see \code{kmmd-class} for more details. } \references{Gretton, A., K. Borgwardt, M. Rasch, B. Schoelkopf and A. Smola\cr \emph{A Kernel Method for the Two-Sample-Problem}\cr Neural Information Processing Systems 2006, Vancouver \cr \url{https://papers.neurips.cc/paper/3110-a-kernel-method-for-the-two-sample-problem.pdf} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{ksvm}} \examples{ # create data x <- matrix(runif(300),100) y <- matrix(runif(300)+1,100) mmdo <- kmmd(x, y) mmdo } \keyword{htest} \keyword{nonlinear} \keyword{nonparametric} kernlab/man/income.Rd0000644000175100001440000000370611304023134014217 0ustar hornikusers\name{income} \alias{income} \title{Income Data} \description{ Customer Income Data from a marketing survey. } \usage{data(income)} \format{ A data frame with 14 categorical variables (8993 observations). Explanation of the variable names: \tabular{rllll}{ \tab 1 \tab \code{INCOME} \tab annual income of household \tab \cr \tab \tab \tab (Personal income if single) \tab ordinal\cr \tab 2 \tab \code{SEX} \tab sex \tab nominal\cr \tab 3 \tab \code{MARITAL.STATUS} \tab marital status \tab nominal\cr \tab 4 \tab \code{AGE} \tab age \tab ordinal\cr \tab 5 \tab \code{EDUCATION} \tab educational grade \tab ordinal\cr \tab 6 \tab \code{OCCUPATION} \tab type of work \tab nominal \cr \tab 7 \tab \code{AREA} \tab how long the interviewed person has lived\tab \cr \tab \tab \tab in the San Francisco/Oakland/San Jose area \tab ordinal\cr \tab 8 \tab \code{DUAL.INCOMES} \tab dual incomes (if married) \tab nominal\cr \tab 9 \tab \code{HOUSEHOLD.SIZE} \tab persons living in the household \tab ordinal\cr \tab 10 \tab \code{UNDER18} \tab persons in household under 18 \tab ordinal\cr \tab 11 \tab \code{HOUSEHOLDER} \tab householder status \tab nominal\cr \tab 12 \tab \code{HOME.TYPE} \tab type of home \tab nominal\cr \tab 13 \tab \code{ETHNIC.CLASS} \tab ethnic classification \tab nominal\cr \tab 14 \tab \code{LANGUAGE} \tab language most often spoken at home \tab nominal\cr } } \details{ A total of N=9409 questionnaires containing 502 questions were filled out by shopping mall customers in the San Francisco Bay area. The dataset is an extract from this survey. It consists of 14 demographic attributes. The dataset is a mixture of nominal and ordinal variables with a lot of missing data. The goal is to predict the Anual Income of Household from the other 13 demographics attributes. } \source{ Impact Resources, Inc., Columbus, OH (1987). } \keyword{datasets} kernlab/man/sigest.Rd0000644000175100001440000000631712117366220014255 0ustar hornikusers\name{sigest} \alias{sigest} \alias{sigest,formula-method} \alias{sigest,matrix-method} \title{Hyperparameter estimation for the Gaussian Radial Basis kernel} \description{ Given a range of values for the "sigma" inverse width parameter in the Gaussian Radial Basis kernel for use with Support Vector Machines. The estimation is based on the data to be used. } \usage{ \S4method{sigest}{formula}(x, data=NULL, frac = 0.5, na.action = na.omit, scaled = TRUE) \S4method{sigest}{matrix}(x, frac = 0.5, scaled = TRUE, na.action = na.omit) } \arguments{ \item{x}{a symbolic description of the model upon the estimation is based. When not using a formula x is a matrix or vector containing the data} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which `ksvm' is called from.} \item{frac}{Fraction of data to use for estimation. By default a quarter of the data is used to estimate the range of the sigma hyperparameter.} \item{scaled}{A logical vector indicating the variables to be scaled. If \code{scaled} is of length 1, the value is recycled as many times as needed and all non-binary variables are scaled. Per default, data are scaled internally to zero mean and unit variance (since this the default action in \code{ksvm} as well). The center and scale values are returned and used for later predictions. } \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} } \details{ \code{sigest} estimates the range of values for the sigma parameter which would return good results when used with a Support Vector Machine (\code{ksvm}). The estimation is based upon the 0.1 and 0.9 quantile of \eqn{\|x -x'\|^2}. Basically any value in between those two bounds will produce good results. } \value{ Returns a vector of length 3 defining the range (0.1 quantile, median and 0.9 quantile) of the sigma hyperparameter. } \references{ B. Caputo, K. Sim, F. Furesjo, A. Smola, \cr \emph{Appearance-based object recognition using SVMs: which kernel should I use?}\cr Proc of NIPS workshop on Statitsical methods for computational experiments in visual processing and computer vision, Whistler, 2002. } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{ksvm}}} \examples{ ## estimate good sigma values for promotergene data(promotergene) srange <- sigest(Class~.,data = promotergene) srange s <- srange[2] s ## create test and training set ind <- sample(1:dim(promotergene)[1],20) genetrain <- promotergene[-ind, ] genetest <- promotergene[ind, ] ## train a support vector machine gene <- ksvm(Class~.,data=genetrain,kernel="rbfdot", kpar=list(sigma = s),C=50,cross=3) gene ## predict gene type on the test set promoter <- predict(gene,genetest[,-1]) ## Check results table(promoter,genetest[,1]) } \keyword{classif} \keyword{regression} kernlab/man/ipop-class.Rd0000644000175100001440000000313311304023134015011 0ustar hornikusers\name{ipop-class} \docType{class} \alias{ipop-class} \alias{primal,ipop-method} \alias{dual,ipop-method} \alias{how,ipop-method} \alias{primal} \alias{dual} \alias{how} \title{Class "ipop"} \description{The quadratic problem solver class} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("ipop", ...)}. or by calling the \code{ipop} function. } \section{Slots}{ \describe{ \item{\code{primal}:}{Object of class \code{"vector"} the primal solution of the problem} \item{\code{dual}:}{Object of class \code{"numeric"} the dual of the problem} \item{\code{how}:}{Object of class \code{"character"} convergence information} } } \section{Methods}{ \describe{ \item{primal}{Object of class \code{ipop}}{Return the primal of the problem} \item{dual}{Object of class \code{ipop}}{Return the dual of the problem} \item{how}{Object of class \code{ipop}}{Return information on convergence} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{ipop}} } \examples{ ## solve the Support Vector Machine optimization problem data(spam) ## sample a scaled part (300 points) of the spam data set m <- 300 set <- sample(1:dim(spam)[1],m) x <- scale(as.matrix(spam[,-58]))[set,] y <- as.integer(spam[set,58]) y[y==2] <- -1 ##set C parameter and kernel C <- 5 rbf <- rbfdot(sigma = 0.1) ## create H matrix etc. H <- kernelPol(rbf,x,,y) c <- matrix(rep(-1,m)) A <- t(y) b <- 0 l <- matrix(rep(0,m)) u <- matrix(rep(C,m)) r <- 0 sv <- ipop(c,H,A,b,l,u,r) primal(sv) dual(sv) how(sv) } \keyword{classes} kernlab/man/ticdata.Rd0000644000175100001440000002013411304023134014350 0ustar hornikusers\name{ticdata} \alias{ticdata} \title{The Insurance Company Data} \description{ This data set used in the CoIL 2000 Challenge contains information on customers of an insurance company. The data consists of 86 variables and includes product usage data and socio-demographic data derived from zip area codes. The data was collected to answer the following question: Can you predict who would be interested in buying a caravan insurance policy and give an explanation why ? } \usage{data(ticdata)} \format{ ticdata: Dataset to train and validate prediction models and build a description (9822 customer records). Each record consists of 86 attributes, containing sociodemographic data (attribute 1-43) and product ownership (attributes 44-86). The sociodemographic data is derived from zip codes. All customers living in areas with the same zip code have the same sociodemographic attributes. Attribute 86, \code{CARAVAN:Number of mobile home policies}, is the target variable. Data Format \tabular{rlll}{ \tab 1 \tab \code{STYPE} \tab Customer Subtype\cr \tab 2 \tab \code{MAANTHUI} \tab Number of houses 1 - 10\cr \tab 3 \tab \code{MGEMOMV} \tab Avg size household 1 - 6\cr \tab 4 \tab \code{MGEMLEEF} \tab Average age\cr \tab 5 \tab \code{MOSHOOFD} \tab Customer main type\cr \tab 6 \tab \code{MGODRK} \tab Roman catholic \cr \tab 7 \tab \code{MGODPR} \tab Protestant ... \cr \tab 8 \tab \code{MGODOV} \tab Other religion \cr \tab 9 \tab \code{MGODGE} \tab No religion \cr \tab 10 \tab \code{MRELGE} \tab Married \cr \tab 11 \tab \code{MRELSA} \tab Living together \cr \tab 12 \tab \code{MRELOV} \tab Other relation \cr \tab 13 \tab \code{MFALLEEN} \tab Singles \cr \tab 14 \tab \code{MFGEKIND} \tab Household without children \cr \tab 15 \tab \code{MFWEKIND} \tab Household with children \cr \tab 16 \tab \code{MOPLHOOG} \tab High level education \cr \tab 17 \tab \code{MOPLMIDD} \tab Medium level education \cr \tab 18 \tab \code{MOPLLAAG} \tab Lower level education \cr \tab 19 \tab \code{MBERHOOG} \tab High status \cr \tab 20 \tab \code{MBERZELF} \tab Entrepreneur \cr \tab 21 \tab \code{MBERBOER} \tab Farmer \cr \tab 22 \tab \code{MBERMIDD} \tab Middle management \cr \tab 23 \tab \code{MBERARBG} \tab Skilled labourers \cr \tab 24 \tab \code{MBERARBO} \tab Unskilled labourers \cr \tab 25 \tab \code{MSKA} \tab Social class A \cr \tab 26 \tab \code{MSKB1} \tab Social class B1 \cr \tab 27 \tab \code{MSKB2} \tab Social class B2 \cr \tab 28 \tab \code{MSKC} \tab Social class C \cr \tab 29 \tab \code{MSKD} \tab Social class D \cr \tab 30 \tab \code{MHHUUR} \tab Rented house \cr \tab 31 \tab \code{MHKOOP} \tab Home owners \cr \tab 32 \tab \code{MAUT1} \tab 1 car \cr \tab 33 \tab \code{MAUT2} \tab 2 cars \cr \tab 34 \tab \code{MAUT0} \tab No car \cr \tab 35 \tab \code{MZFONDS} \tab National Health Service \cr \tab 36 \tab \code{MZPART} \tab Private health insurance \cr \tab 37 \tab \code{MINKM30} \tab Income >30.000 \cr \tab 38 \tab \code{MINK3045} \tab Income 30-45.000 \cr \tab 39 \tab \code{MINK4575} \tab Income 45-75.000 \cr \tab 40 \tab \code{MINK7512} \tab Income 75-122.000 \cr \tab 41 \tab \code{MINK123M} \tab Income <123.000 \cr \tab 42 \tab \code{MINKGEM} \tab Average income \cr \tab 43 \tab \code{MKOOPKLA} \tab Purchasing power class \cr \tab 44 \tab \code{PWAPART} \tab Contribution private third party insurance \cr \tab 45 \tab \code{PWABEDR} \tab Contribution third party insurance (firms) \cr \tab 46 \tab \code{PWALAND} \tab Contribution third party insurance (agriculture) \cr \tab 47 \tab \code{PPERSAUT} \tab Contribution car policies \cr \tab 48 \tab \code{PBESAUT} \tab Contribution delivery van policies \cr \tab 49 \tab \code{PMOTSCO} \tab Contribution motorcycle/scooter policies \cr \tab 50 \tab \code{PVRAAUT} \tab Contribution lorry policies \cr \tab 51 \tab \code{PAANHANG} \tab Contribution trailer policies \cr \tab 52 \tab \code{PTRACTOR} \tab Contribution tractor policies \cr \tab 53 \tab \code{PWERKT} \tab Contribution agricultural machines policies \cr \tab 54 \tab \code{PBROM} \tab Contribution moped policies \cr \tab 55 \tab \code{PLEVEN} \tab Contribution life insurances \cr \tab 56 \tab \code{PPERSONG} \tab Contribution private accident insurance policies \cr \tab 57 \tab \code{PGEZONG} \tab Contribution family accidents insurance policies \cr \tab 58 \tab \code{PWAOREG} \tab Contribution disability insurance policies \cr \tab 59 \tab \code{PBRAND} \tab Contribution fire policies \cr \tab 60 \tab \code{PZEILPL} \tab Contribution surfboard policies \cr \tab 61 \tab \code{PPLEZIER} \tab Contribution boat policies \cr \tab 62 \tab \code{PFIETS} \tab Contribution bicycle policies \cr \tab 63 \tab \code{PINBOED} \tab Contribution property insurance policies \cr \tab 64 \tab \code{PBYSTAND} \tab Contribution social security insurance policies \cr \tab 65 \tab \code{AWAPART} \tab Number of private third party insurance 1 - 12 \cr \tab 66 \tab \code{AWABEDR} \tab Number of third party insurance (firms) ... \cr \tab 67 \tab \code{AWALAND} \tab Number of third party insurance (agriculture) \cr \tab 68 \tab \code{APERSAUT} \tab Number of car policies \cr \tab 69 \tab \code{ABESAUT} \tab Number of delivery van policies \cr \tab 70 \tab \code{AMOTSCO} \tab Number of motorcycle/scooter policies \cr \tab 71 \tab \code{AVRAAUT} \tab Number of lorry policies \cr \tab 72 \tab \code{AAANHANG} \tab Number of trailer policies \cr \tab 73 \tab \code{ATRACTOR} \tab Number of tractor policies \cr \tab 74 \tab \code{AWERKT} \tab Number of agricultural machines policies \cr \tab 75 \tab \code{ABROM} \tab Number of moped policies \cr \tab 76 \tab \code{ALEVEN} \tab Number of life insurances \cr \tab 77 \tab \code{APERSONG} \tab Number of private accident insurance policies \cr \tab 78 \tab \code{AGEZONG} \tab Number of family accidents insurance policies \cr \tab 79 \tab \code{AWAOREG} \tab Number of disability insurance policies \cr \tab 80 \tab \code{ABRAND} \tab Number of fire policies \cr \tab 81 \tab \code{AZEILPL} \tab Number of surfboard policies \cr \tab 82 \tab \code{APLEZIER} \tab Number of boat policies \cr \tab 83 \tab \code{AFIETS} \tab Number of bicycle policies \cr \tab 84 \tab \code{AINBOED} \tab Number of property insurance policies \cr \tab 85 \tab \code{ABYSTAND} \tab Number of social security insurance policies \cr \tab 86 \tab \code{CARAVAN} \tab Number of mobile home policies 0 - 1 \cr } Note: All the variables starting with M are zipcode variables. They give information on the distribution of that variable, e.g., Rented house, in the zipcode area of the customer. } \details{ Information about the insurance company customers consists of 86 variables and includes product usage data and socio-demographic data derived from zip area codes. The data was supplied by the Dutch data mining company Sentient Machine Research and is based on a real world business problem. The training set contains over 5000 descriptions of customers, including the information of whether or not they have a caravan insurance policy. The test set contains 4000 customers. The test and data set are merged in the ticdata set. More information about the data set and the CoIL 2000 Challenge along with publications based on the data set can be found at \url{http://www.liacs.nl/~putten/library/cc2000/}. } \source{ \itemize{ \item UCI KDD Archive:\url{http://kdd.ics.uci.edu} \item Donor: Sentient Machine Research \cr Peter van der Putten \cr Sentient Machine Research \cr Baarsjesweg 224 \cr 1058 AA Amsterdam \cr The Netherlands \cr +31 20 6186927 \cr pvdputten@hotmail.com, putten@liacs.nl } } \references{Peter van der Putten, Michel de Ruiter, Maarten van Someren \emph{CoIL Challenge 2000 Tasks and Results: Predicting and Explaining Caravan Policy Ownership}\cr \url{http://www.liacs.nl/~putten/library/cc2000/}} \keyword{datasets} kernlab/man/ranking-class.Rd0000644000175100001440000000261612117365252015515 0ustar hornikusers\name{ranking-class} \docType{class} \alias{ranking-class} \alias{edgegraph} \alias{convergence} \alias{convergence,ranking-method} \alias{edgegraph,ranking-method} \alias{show,ranking-method} \title{Class "ranking"} \description{Object of the class \code{"ranking"} are created from the \code{ranking} function and extend the class \code{matrix}} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("ranking", ...)}. } \section{Slots}{ \describe{ \item{\code{.Data}:}{Object of class \code{"matrix"} containing the data ranking and scores} \item{\code{convergence}:}{Object of class \code{"matrix"} containing the convergence matrix} \item{\code{edgegraph}:}{Object of class \code{"matrix"} containing the edgegraph} } } \section{Extends}{ Class \code{"matrix"}, directly. } \section{Methods}{ \describe{ \item{show}{\code{signature(object = "ranking")}: displays the ranking score matrix} } } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{ \code{\link{ranking}} } \examples{ data(spirals) ## create data set to be ranked ran<-spirals[rowSums(abs(spirals)<0.55)==2,] ## rank points according to "relevance" to point 54 (up left) ranked<-ranking(ran,54,kernel="rbfdot", kpar=list(sigma=100),edgegraph=TRUE) ranked edgegraph(ranked)[1:10,1:10] } \keyword{classes} kernlab/man/as.kernelMatrix.Rd0000644000175100001440000000230411304023134016005 0ustar hornikusers\name{as.kernelMatrix} \docType{methods} \alias{kernelMatrix-class} \alias{as.kernelMatrix} \alias{as.kernelMatrix-methods} \alias{as.kernelMatrix,matrix-method} \title{Assing kernelMatrix class to matrix objects} \description{\code{as.kernelMatrix} in package \pkg{kernlab} can be used to coerce the kernelMatrix class to matrix objects representing a kernel matrix. These matrices can then be used with the kernelMatrix interfaces which most of the functions in \pkg{kernlab} support.} \usage{ \S4method{as.kernelMatrix}{matrix}(x, center = FALSE) } \arguments{ \item{x}{matrix to be assigned the \code{kernelMatrix} class } \item{center}{center the kernel matrix in feature space (default: FALSE) } } \author{ Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{\code{\link{kernelMatrix}}, \code{\link{dots}}} \keyword{methods} \examples{ ## Create toy data x <- rbind(matrix(rnorm(10),,2),matrix(rnorm(10,mean=3),,2)) y <- matrix(c(rep(1,5),rep(-1,5))) ### Use as.kernelMatrix to label the cov. matrix as a kernel matrix ### which is eq. to using a linear kernel K <- as.kernelMatrix(crossprod(t(x))) K svp2 <- ksvm(K, y, type="C-svc") svp2 } kernlab/man/gausspr-class.Rd0000644000175100001440000001041212055335061015535 0ustar hornikusers\name{gausspr-class} \docType{class} \alias{gausspr-class} \alias{alpha,gausspr-method} \alias{cross,gausspr-method} \alias{error,gausspr-method} \alias{kcall,gausspr-method} \alias{kernelf,gausspr-method} \alias{kpar,gausspr-method} \alias{lev,gausspr-method} \alias{type,gausspr-method} \alias{alphaindex,gausspr-method} \alias{xmatrix,gausspr-method} \alias{ymatrix,gausspr-method} \alias{scaling,gausspr-method} \title{Class "gausspr"} \description{The Gaussian Processes object class} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("gausspr", ...)}. or by calling the \code{gausspr} function } \section{Slots}{ \describe{ \item{\code{tol}:}{Object of class \code{"numeric"} contains tolerance of termination criteria} \item{\code{kernelf}:}{Object of class \code{"kfunction"} contains the kernel function used} \item{\code{kpar}:}{Object of class \code{"list"} contains the kernel parameter used } \item{\code{kcall}:}{Object of class \code{"list"} contains the used function call } \item{\code{type}:}{Object of class \code{"character"} contains type of problem } \item{\code{terms}:}{Object of class \code{"ANY"} contains the terms representation of the symbolic model used (when using a formula)} \item{\code{xmatrix}:}{Object of class \code{"input"} containing the data matrix used } \item{\code{ymatrix}:}{Object of class \code{"output"} containing the response matrix} \item{\code{fitted}:}{Object of class \code{"output"} containing the fitted values } \item{\code{lev}:}{Object of class \code{"vector"} containing the levels of the response (in case of classification) } \item{\code{nclass}:}{Object of class \code{"numeric"} containing the number of classes (in case of classification) } \item{\code{alpha}:}{Object of class \code{"listI"} containing the computes alpha values } \item{\code{alphaindex}}{Object of class \code{"list"} containing the indexes for the alphas in various classes (in multi-class problems).} \item{\code{sol}}{Object of class \code{"matrix"} containing the solution to the Gaussian Process formulation, it is used to compute the variance in regression problems.} \item{\code{scaling}}{Object of class \code{"ANY"} containing the scaling coefficients of the data (when case \code{scaled = TRUE} is used).} \item{\code{nvar}:}{Object of class \code{"numeric"} containing the computed variance} \item{\code{error}:}{Object of class \code{"numeric"} containing the training error} \item{\code{cross}:}{Object of class \code{"numeric"} containing the cross validation error} \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed in NA } } } \section{Methods}{ \describe{ \item{alpha}{\code{signature(object = "gausspr")}: returns the alpha vector} \item{cross}{\code{signature(object = "gausspr")}: returns the cross validation error } \item{error}{\code{signature(object = "gausspr")}: returns the training error } \item{fitted}{\code{signature(object = "vm")}: returns the fitted values } \item{kcall}{\code{signature(object = "gausspr")}: returns the call performed} \item{kernelf}{\code{signature(object = "gausspr")}: returns the kernel function used} \item{kpar}{\code{signature(object = "gausspr")}: returns the kernel parameter used} \item{lev}{\code{signature(object = "gausspr")}: returns the response levels (in classification) } \item{type}{\code{signature(object = "gausspr")}: returns the type of problem} \item{xmatrix}{\code{signature(object = "gausspr")}: returns the data matrix used} \item{ymatrix}{\code{signature(object = "gausspr")}: returns the response matrix used} \item{scaling}{\code{signature(object = "gausspr")}: returns the scaling coefficients of the data (when \code{scaled = TRUE} is used)} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{gausspr}}, \code{\link{ksvm-class}}, \code{\link{vm-class}} } \examples{ # train model data(iris) test <- gausspr(Species~.,data=iris,var=2) test alpha(test) error(test) lev(test) } \keyword{classes} kernlab/man/kernel-class.Rd0000644000175100001440000000422311304023134015323 0ustar hornikusers\name{kernel-class} \docType{class} \alias{rbfkernel-class} \alias{polykernel-class} \alias{vanillakernel-class} \alias{tanhkernel-class} \alias{anovakernel-class} \alias{besselkernel-class} \alias{laplacekernel-class} \alias{splinekernel-class} \alias{stringkernel-class} \alias{fourierkernel-class} \alias{kfunction-class} \alias{kernel-class} \alias{kpar,kernel-method} \title{Class "kernel" "rbfkernel" "polykernel", "tanhkernel", "vanillakernel"} \description{ The built-in kernel classes in \pkg{kernlab}} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("rbfkernel")}, \code{new{"polykernel"}}, \code{new{"tanhkernel"}}, \code{new{"vanillakernel"}}, \code{new{"anovakernel"}}, \code{new{"besselkernel"}}, \code{new{"laplacekernel"}}, \code{new{"splinekernel"}}, \code{new{"stringkernel"}} or by calling the \code{rbfdot}, \code{polydot}, \code{tanhdot}, \code{vanilladot}, \code{anovadot}, \code{besseldot}, \code{laplacedot}, \code{splinedot}, \code{stringdot} functions etc.. } \section{Slots}{ \describe{ \item{\code{.Data}:}{Object of class \code{"function"} containing the kernel function } \item{\code{kpar}:}{Object of class \code{"list"} containing the kernel parameters } } } \section{Extends}{ Class \code{"kernel"}, directly. Class \code{"function"}, by class \code{"kernel"}. } \section{Methods}{ \describe{ \item{kernelMatrix}{\code{signature(kernel = "rbfkernel", x = "matrix")}: computes the kernel matrix} \item{kernelMult}{\code{signature(kernel = "rbfkernel", x = "matrix")}: computes the quadratic kernel expression} \item{kernelPol}{\code{signature(kernel = "rbfkernel", x = "matrix")}: computes the kernel expansion} \item{kernelFast}{\code{signature(kernel = "rbfkernel", x = "matrix"),,a}: computes parts or the full kernel matrix, mainly used in kernel algorithms where columns of the kernel matrix are computed per invocation } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{ \code{\link{dots}} } \examples{ rbfkernel <- rbfdot(sigma = 0.1) rbfkernel is(rbfkernel) kpar(rbfkernel) } \keyword{classes} kernlab/man/dots.Rd0000644000175100001440000001005711304023134013713 0ustar hornikusers\name{dots} \alias{dots} \alias{kernels} \alias{rbfdot} \alias{polydot} \alias{tanhdot} \alias{vanilladot} \alias{laplacedot} \alias{besseldot} \alias{anovadot} \alias{fourierdot} \alias{splinedot} \alias{kpar} \alias{kfunction} \alias{show,kernel-method} \title{Kernel Functions} \description{ The kernel generating functions provided in kernlab. \cr The Gaussian RBF kernel \eqn{k(x,x') = \exp(-\sigma \|x - x'\|^2)} \cr The Polynomial kernel \eqn{k(x,x') = (scale + offset)^{degree}}\cr The Linear kernel \eqn{k(x,x') = }\cr The Hyperbolic tangent kernel \eqn{k(x, x') = \tanh(scale + offset)}\cr The Laplacian kernel \eqn{k(x,x') = \exp(-\sigma \|x - x'\|)} \cr The Bessel kernel \eqn{k(x,x') = (- Bessel_{(\nu+1)}^n \sigma \|x - x'\|^2)} \cr The ANOVA RBF kernel \eqn{k(x,x') = \sum_{1\leq i_1 \ldots < i_D \leq N} \prod_{d=1}^D k(x_{id}, {x'}_{id})} where k(x,x) is a Gaussian RBF kernel. \cr The Spline kernel \eqn{ \prod_{d=1}^D 1 + x_i x_j + x_i x_j min(x_i, x_j) - \frac{x_i + x_j}{2} min(x_i,x_j)^2 + \frac{min(x_i,x_j)^3}{3}} \\ The String kernels (see \code{stringdot}. } \usage{ rbfdot(sigma = 1) polydot(degree = 1, scale = 1, offset = 1) tanhdot(scale = 1, offset = 1) vanilladot() laplacedot(sigma = 1) besseldot(sigma = 1, order = 1, degree = 1) anovadot(sigma = 1, degree = 1) splinedot() } \arguments{ \item{sigma}{The inverse kernel width used by the Gaussian the Laplacian, the Bessel and the ANOVA kernel } \item{degree}{The degree of the polynomial, bessel or ANOVA kernel function. This has to be an positive integer.} \item{scale}{The scaling parameter of the polynomial and tangent kernel is a convenient way of normalizing patterns without the need to modify the data itself} \item{offset}{The offset used in a polynomial or hyperbolic tangent kernel} \item{order}{The order of the Bessel function to be used as a kernel} } \details{ The kernel generating functions are used to initialize a kernel function which calculates the dot (inner) product between two feature vectors in a Hilbert Space. These functions can be passed as a \code{kernel} argument on almost all functions in \pkg{kernlab}(e.g., \code{ksvm}, \code{kpca} etc). Although using one of the existing kernel functions as a \code{kernel} argument in various functions in \pkg{kernlab} has the advantage that optimized code is used to calculate various kernel expressions, any other function implementing a dot product of class \code{kernel} can also be used as a kernel argument. This allows the user to use, test and develop special kernels for a given data set or algorithm. For details on the string kernels see \code{stringdot}. } \value{ Return an S4 object of class \code{kernel} which extents the \code{function} class. The resulting function implements the given kernel calculating the inner (dot) product between two vectors. \item{kpar}{a list containing the kernel parameters (hyperparameters) used.} The kernel parameters can be accessed by the \code{kpar} function. } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \note{If the offset in the Polynomial kernel is set to $0$, we obtain homogeneous polynomial kernels, for positive values, we have inhomogeneous kernels. Note that for negative values the kernel does not satisfy Mercer's condition and thus the optimizers may fail. \cr In the Hyperbolic tangent kernel if the offset is negative the likelihood of obtaining a kernel matrix that is not positive definite is much higher (since then even some diagonal elements may be negative), hence if this kernel has to be used, the offset should always be positive. Note, however, that this is no guarantee that the kernel will be positive. } \seealso{\code{stringdot}, \code{\link{kernelMatrix} }, \code{\link{kernelMult}}, \code{\link{kernelPol}}} \examples{ rbfkernel <- rbfdot(sigma = 0.1) rbfkernel kpar(rbfkernel) ## create two vectors x <- rnorm(10) y <- rnorm(10) ## calculate dot product rbfkernel(x,y) } \keyword{symbolmath} kernlab/man/spam.Rd0000644000175100001440000000412514227556630013724 0ustar hornikusers\name{spam} \alias{spam} \title{Spam E-mail Database} \description{A data set collected at Hewlett-Packard Labs, that classifies 4601 e-mails as spam or non-spam. In addition to this class label there are 57 variables indicating the frequency of certain words and characters in the e-mail.} \usage{data(spam)} \format{A data frame with 4601 observations and 58 variables. The first 48 variables contain the frequency of the variable name (e.g., business) in the e-mail. If the variable name starts with num (e.g., num650) the it indicates the frequency of the corresponding number (e.g., 650). The variables 49-54 indicate the frequency of the characters `;', `(', `[', `!', `$', and `#'. The variables 55-57 contain the average, longest and total run-length of capital letters. Variable 58 indicates the type of the mail and is either \code{"nonspam"} or \code{"spam"}, i.e. unsolicited commercial e-mail.} \details{ The data set contains 2788 e-mails classified as \code{"nonspam"} and 1813 classified as \code{"spam"}. The ``spam'' concept is diverse: advertisements for products/web sites, make money fast schemes, chain letters, pornography... This collection of spam e-mails came from the collectors' postmaster and individuals who had filed spam. The collection of non-spam e-mails came from filed work and personal e-mails, and hence the word 'george' and the area code '650' are indicators of non-spam. These are useful when constructing a personalized spam filter. One would either have to blind such non-spam indicators or get a very wide collection of non-spam to generate a general purpose spam filter. } \source{ \itemize{ \item Creators: Mark Hopkins, Erik Reeber, George Forman, Jaap Suermondt at Hewlett-Packard Labs, 1501 Page Mill Rd., Palo Alto, CA 94304 \item Donor: George Forman (gforman at nospam hpl.hp.com) 650-857-7835 } These data have been taken from the UCI Repository Of Machine Learning Databases at \url{http://www.ics.uci.edu/~mlearn/MLRepository.html}} \references{ T. Hastie, R. Tibshirani, J.H. Friedman. \emph{The Elements of Statistical Learning.} Springer, 2001. } \keyword{datasets} kernlab/man/inchol.Rd0000644000175100001440000001025114221633672014231 0ustar hornikusers\name{inchol} \alias{inchol} \alias{inchol,matrix-method} %- Also NEED an '\alias' for EACH other topic documented here. \title{Incomplete Cholesky decomposition} \description{ \code{inchol} computes the incomplete Cholesky decomposition of the kernel matrix from a data matrix. } \usage{ inchol(x, kernel="rbfdot", kpar=list(sigma=0.1), tol = 0.001, maxiter = dim(x)[1], blocksize = 50, verbose = 0) } %- maybe also 'usage' for other objects documented here. \arguments{ \item{x}{The data matrix indexed by row} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class \code{kernel}, which computes the inner product in feature space between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well. } \item{tol}{algorithm stops when remaining pivots bring less accuracy then \code{tol} (default: 0.001)} \item{maxiter}{maximum number of iterations and columns in \eqn{Z}} \item{blocksize}{add this many columns to matrix per iteration} \item{verbose}{print info on algorithm convergence} } \details{An incomplete cholesky decomposition calculates \eqn{Z} where \eqn{K= ZZ'} \eqn{K} being the kernel matrix. Since the rank of a kernel matrix is usually low, \eqn{Z} tends to be smaller then the complete kernel matrix. The decomposed matrix can be used to create memory efficient kernel-based algorithms without the need to compute and store a complete kernel matrix in memory.} \value{ An S4 object of class "inchol" which is an extension of the class "matrix". The object is the decomposed kernel matrix along with the slots : \item{pivots}{Indices on which pivots where done} \item{diagresidues}{Residuals left on the diagonal} \item{maxresiduals}{Residuals picked for pivoting} slots can be accessed either by \code{object@slot} or by accessor functions with the same name (e.g., \code{pivots(object))}} \references{ Francis R. Bach, Michael I. Jordan\cr \emph{Kernel Independent Component Analysis}\cr Journal of Machine Learning Research 3, 1-48\cr \url{https://www.jmlr.org/papers/volume3/bach02a/bach02a.pdf} } \author{Alexandros Karatzoglou (based on Matlab code by S.V.N. (Vishy) Vishwanathan and Alex Smola)\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{csi}}, \code{\link{inchol-class}}, \code{\link{chol}}} \examples{ data(iris) datamatrix <- as.matrix(iris[,-5]) # initialize kernel function rbf <- rbfdot(sigma=0.1) rbf Z <- inchol(datamatrix,kernel=rbf) dim(Z) pivots(Z) # calculate kernel matrix K <- crossprod(t(Z)) # difference between approximated and real kernel matrix (K - kernelMatrix(kernel=rbf, datamatrix))[6,] } \keyword{methods} \keyword{algebra} \keyword{array} kernlab/man/lssvm.Rd0000644000175100001440000002010212117365064014114 0ustar hornikusers\name{lssvm} \docType{methods} \alias{lssvm} \alias{lssvm-methods} \alias{lssvm,formula-method} \alias{lssvm,vector-method} \alias{lssvm,matrix-method} \alias{lssvm,list-method} \alias{lssvm,kernelMatrix-method} \alias{show,lssvm-method} \alias{coef,lssvm-method} \alias{predict,lssvm-method} \title{Least Squares Support Vector Machine} \description{ The \code{lssvm} function is an implementation of the Least Squares SVM. \code{lssvm} includes a reduced version of Least Squares SVM using a decomposition of the kernel matrix which is calculated by the \code{csi} function. } \usage{ \S4method{lssvm}{formula}(x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE) \S4method{lssvm}{vector}(x, ...) \S4method{lssvm}{matrix}(x, y, scaled = TRUE, kernel = "rbfdot", kpar = "automatic", type = NULL, tau = 0.01, reduced = TRUE, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, cross = 0, fit = TRUE, ..., subset, na.action = na.omit) \S4method{lssvm}{kernelMatrix}(x, y, type = NULL, tau = 0.01, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, cross = 0, fit = TRUE, ...) \S4method{lssvm}{list}(x, y, scaled = TRUE, kernel = "stringdot", kpar = list(length=4, lambda = 0.5), type = NULL, tau = 0.01, reduced = TRUE, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, cross = 0, fit = TRUE, ..., subset) } \arguments{ \item{x}{a symbolic description of the model to be fit, a matrix or vector containing the training data when a formula interface is not used or a \code{kernelMatrix} or a list of character vectors.} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which `lssvm' is called from.} \item{y}{a response vector with one label for each row/component of \code{x}. Can be either a factor (for classification tasks) or a numeric vector (for classification or regression - currently nor supported -).} \item{scaled}{A logical vector indicating the variables to be scaled. If \code{scaled} is of length 1, the value is recycled as many times as needed and all non-binary variables are scaled. Per default, data are scaled internally to zero mean and unit variance. The center and scale values are returned and used for later predictions.} \item{type}{Type of problem. Either "classification" or "regression". Depending on whether \code{y} is a factor or not, the default setting for \code{type} is "classification" or "regression" respectively, but can be overwritten by setting an explicit value. (regression is currently not supported)\cr} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel "Gaussian" \item \code{polydot} Polynomial kernel \item \code{vanilladot} Linear kernel \item \code{tanhdot} Hyperbolic tangent kernel \item \code{laplacedot} Laplacian kernel \item \code{besseldot} Bessel kernel \item \code{anovadot} ANOVA RBF kernel \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } Setting the kernel parameter to "matrix" treats \code{x} as a kernel matrix calling the \code{kernelMatrix} interface.\cr The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{ the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. For valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{length, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.\cr \code{kpar} can also be set to the string "automatic" which uses the heuristics in \code{\link{sigest}} to calculate a good \code{sigma} value for the Gaussian RBF or Laplace kernel, from the data. (default = "automatic"). } \item{tau}{the regularization parameter (default 0.01) } \item{reduced}{if set to \code{FALSE} the full linear problem of the lssvm is solved, when \code{TRUE} a reduced method using \code{csi} is used.} \item{rank}{the maximal rank of the decomposed kernel matrix, see \code{csi}} \item{delta}{number of columns of cholesky performed in advance, see \code{csi} (default 40)} \item{tol}{tolerance of termination criterion for the \code{csi} function, lower tolerance leads to more precise approximation but may increase the training time and the decomposed matrix size (default: 0.0001)} \item{fit}{indicates whether the fitted values should be computed and included in the model or not (default: 'TRUE')} \item{cross}{if a integer value k>0 is specified, a k-fold cross validation on the training data is performed to assess the quality of the model: the Mean Squared Error for regression} \item{subset}{An index vector specifying the cases to be used in the training sample. (NOTE: If given, this argument must be named.)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{ additional parameters} } \details{Least Squares Support Vector Machines are reformulation to the standard SVMs that lead to solving linear KKT systems. The algorithm is based on the minimization of a classical penalized least-squares cost function. The current implementation approximates the kernel matrix by an incomplete Cholesky factorization obtained by the \code{\link{csi}} function, thus the solution is an approximation to the exact solution of the lssvm optimization problem. The quality of the solution depends on the approximation and can be influenced by the "rank" , "delta", and "tol" parameters. } \value{ An S4 object of class \code{"lssvm"} containing the fitted model, Accessor functions can be used to access the slots of the object (see examples) which include: \item{alpha}{the parameters of the \code{"lssvm"}} \item{coef}{the model coefficients (identical to alpha)} \item{b}{the model offset.} \item{xmatrix}{the training data used by the model} } \references{ J. A. K. Suykens and J. Vandewalle\cr \emph{Least Squares Support Vector Machine Classifiers}\cr Neural Processing Letters vol. 9, issue 3, June 1999\cr } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{ksvm}}, \code{\link{gausspr}}, \code{\link{csi}} } \examples{ ## simple example data(iris) lir <- lssvm(Species~.,data=iris) lir lirr <- lssvm(Species~.,data= iris, reduced = FALSE) lirr ## Using the kernelMatrix interface iris <- unique(iris) rbf <- rbfdot(0.5) k <- kernelMatrix(rbf, as.matrix(iris[,-5])) klir <- lssvm(k, iris[, 5]) klir pre <- predict(klir, k) } \keyword{classif} \keyword{nonlinear} \keyword{methods} kernlab/man/ksvm-class.Rd0000644000175100001440000001532112117364353015042 0ustar hornikusers\name{ksvm-class} \docType{class} \alias{ksvm-class} \alias{SVindex} \alias{alphaindex} \alias{prob.model} \alias{scaling} \alias{prior} \alias{show} \alias{param} \alias{b} \alias{obj} \alias{nSV} \alias{coef,vm-method} \alias{SVindex,ksvm-method} \alias{alpha,ksvm-method} \alias{alphaindex,ksvm-method} \alias{cross,ksvm-method} \alias{error,ksvm-method} \alias{param,ksvm-method} \alias{fitted,ksvm-method} \alias{prior,ksvm-method} \alias{prob.model,ksvm-method} \alias{kernelf,ksvm-method} \alias{kpar,ksvm-method} \alias{lev,ksvm-method} \alias{kcall,ksvm-method} \alias{scaling,ksvm-method} \alias{type,ksvm-method} \alias{xmatrix,ksvm-method} \alias{ymatrix,ksvm-method} \alias{b,ksvm-method} \alias{obj,ksvm-method} \alias{nSV,ksvm-method} \title{Class "ksvm" } \description{An S4 class containing the output (model) of the \code{ksvm} Support Vector Machines function } \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("ksvm", ...)} or by calls to the \code{ksvm} function. } \section{Slots}{ \describe{ \item{\code{type}:}{Object of class \code{"character"} containing the support vector machine type ("C-svc", "nu-svc", "C-bsvc", "spoc-svc", "one-svc", "eps-svr", "nu-svr", "eps-bsvr")} \item{\code{param}:}{Object of class \code{"list"} containing the Support Vector Machine parameters (C, nu, epsilon)} \item{\code{kernelf}:}{Object of class \code{"function"} containing the kernel function} \item{\code{kpar}:}{Object of class \code{"list"} containing the kernel function parameters (hyperparameters)} \item{\code{kcall}:}{Object of class \code{"ANY"} containing the \code{ksvm} function call} \item{\code{scaling}:}{Object of class \code{"ANY"} containing the scaling information performed on the data} \item{\code{terms}:}{Object of class \code{"ANY"} containing the terms representation of the symbolic model used (when using a formula)} \item{\code{xmatrix}:}{Object of class \code{"input"} (\code{"list"} for multiclass problems or \code{"matrix"} for binary classification and regression problems) containing the support vectors calculated from the data matrix used during computations (possibly scaled and without NA). In the case of multi-class classification each list entry contains the support vectors from each binary classification problem from the one-against-one method.} \item{\code{ymatrix}:}{Object of class \code{"output"} the response \code{"matrix"} or \code{"factor"} or \code{"vector"} or \code{"logical"}} \item{\code{fitted}:}{Object of class \code{"output"} with the fitted values, predictions using the training set.} \item{\code{lev}:}{Object of class \code{"vector"} with the levels of the response (in the case of classification)} \item{\code{prob.model}:}{Object of class \code{"list"} with the class prob. model} \item{\code{prior}:}{Object of class \code{"list"} with the prior of the training set} \item{\code{nclass}:}{Object of class \code{"numeric"} containing the number of classes (in the case of classification)} \item{\code{alpha}:}{Object of class \code{"listI"} containing the resulting alpha vector (\code{"list"} or \code{"matrix"} in case of multiclass classification) (support vectors)} \item{\code{coef}:}{Object of class \code{"ANY"} containing the resulting coefficients} \item{\code{alphaindex}:}{Object of class \code{"list"} containing} \item{\code{b}:}{Object of class \code{"numeric"} containing the resulting offset } \item{\code{SVindex}:}{Object of class \code{"vector"} containing the indexes of the support vectors} \item{\code{nSV}:}{Object of class \code{"numeric"} containing the number of support vectors } \item{\code{obj}:}{Object of class \code{vector} containing the value of the objective function. When using one-against-one in multiclass classification this is a vector.} \item{\code{error}:}{Object of class \code{"numeric"} containing the training error} \item{\code{cross}:}{Object of class \code{"numeric"} containing the cross-validation error } \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed for NA } } } \section{Methods}{ \describe{ \item{SVindex}{\code{signature(object = "ksvm")}: return the indexes of support vectors} \item{alpha}{\code{signature(object = "ksvm")}: returns the complete 5 alpha vector (wit zero values)} \item{alphaindex}{\code{signature(object = "ksvm")}: returns the indexes of non-zero alphas (support vectors)} \item{cross}{\code{signature(object = "ksvm")}: returns the cross-validation error } \item{error}{\code{signature(object = "ksvm")}: returns the training error } \item{obj}{\code{signature(object = "ksvm")}: returns the value of the objective function} \item{fitted}{\code{signature(object = "vm")}: returns the fitted values (predict on training set) } \item{kernelf}{\code{signature(object = "ksvm")}: returns the kernel function} \item{kpar}{\code{signature(object = "ksvm")}: returns the kernel parameters (hyperparameters)} \item{lev}{\code{signature(object = "ksvm")}: returns the levels in case of classification } \item{prob.model}{\code{signature(object="ksvm")}: returns class prob. model values} \item{param}{\code{signature(object="ksvm")}: returns the parameters of the SVM in a list (C, epsilon, nu etc.)} \item{prior}{\code{signature(object="ksvm")}: returns the prior of the training set} \item{kcall}{\code{signature(object="ksvm")}: returns the \code{ksvm} function call} \item{scaling}{\code{signature(object = "ksvm")}: returns the scaling values } \item{show}{\code{signature(object = "ksvm")}: prints the object information} \item{type}{\code{signature(object = "ksvm")}: returns the problem type} \item{xmatrix}{\code{signature(object = "ksvm")}: returns the data matrix used} \item{ymatrix}{\code{signature(object = "ksvm")}: returns the response vector} } } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzolgou@ci.tuwien.ac.at}} \seealso{ \code{\link{ksvm}}, \code{\link{rvm-class}}, \code{\link{gausspr-class}} } \examples{ ## simple example using the promotergene data set data(promotergene) ## train a support vector machine gene <- ksvm(Class~.,data=promotergene,kernel="rbfdot", kpar=list(sigma=0.015),C=50,cross=4) gene # the kernel function kernelf(gene) # the alpha values alpha(gene) # the coefficients coef(gene) # the fitted values fitted(gene) # the cross validation error cross(gene) } \keyword{classes} kernlab/man/kfa-class.Rd0000644000175100001440000000371511304023134014611 0ustar hornikusers\name{kfa-class} \docType{class} \alias{kfa-class} \alias{alpha,kfa-method} \alias{alphaindex,kfa-method} \alias{kcall,kfa-method} \alias{kernelf,kfa-method} \alias{predict,kfa-method} \alias{xmatrix,kfa-method} \title{Class "kfa"} \description{The class of the object returned by the Kernel Feature Analysis \code{kfa} function} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("kfa", ...)} or by calling the \code{kfa} method. The objects contain the features along with the alpha values. } \section{Slots}{ \describe{ \item{\code{alpha}:}{Object of class \code{"matrix"} containing the alpha values } \item{\code{alphaindex}:}{Object of class \code{"vector"} containing the indexes of the selected feature} \item{\code{kernelf}:}{Object of class \code{"kfunction"} containing the kernel function used} \item{\code{xmatrix}:}{Object of class \code{"matrix"} containing the selected features} \item{\code{kcall}:}{Object of class \code{"call"} containing the \code{kfa} function call} \item{\code{terms}:}{Object of class \code{"ANY"} containing the formula terms} } } \section{Methods}{ \describe{ \item{alpha}{\code{signature(object = "kfa")}: returns the alpha values } \item{alphaindex}{\code{signature(object = "kfa")}: returns the index of the selected features} \item{kcall}{\code{signature(object = "kfa")}: returns the function call } \item{kernelf}{\code{signature(object = "kfa")}: returns the kernel function used } \item{predict}{\code{signature(object = "kfa")}: used to embed more data points to the feature base} \item{xmatrix}{\code{signature(object = "kfa")}: returns the selected features. } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{kfa}}, \code{\link{kpca-class}} } \examples{ data(promotergene) f <- kfa(~.,data=promotergene) } \keyword{classes} kernlab/man/lssvm-class.Rd0000644000175100001440000001040611304023134015207 0ustar hornikusers\name{lssvm-class} \docType{class} \alias{lssvm-class} \alias{alpha,lssvm-method} \alias{b,lssvm-method} \alias{cross,lssvm-method} \alias{error,lssvm-method} \alias{kcall,lssvm-method} \alias{kernelf,lssvm-method} \alias{kpar,lssvm-method} \alias{param,lssvm-method} \alias{lev,lssvm-method} \alias{type,lssvm-method} \alias{alphaindex,lssvm-method} \alias{xmatrix,lssvm-method} \alias{ymatrix,lssvm-method} \alias{scaling,lssvm-method} \alias{nSV,lssvm-method} \title{Class "lssvm"} \description{The Gaussian Processes object } \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("lssvm", ...)}. or by calling the \code{lssvm} function } \section{Slots}{ \describe{ \item{\code{kernelf}:}{Object of class \code{"kfunction"} contains the kernel function used} \item{\code{kpar}:}{Object of class \code{"list"} contains the kernel parameter used } \item{\code{param}:}{Object of class \code{"list"} contains the regularization parameter used.} \item{\code{kcall}:}{Object of class \code{"call"} contains the used function call } \item{\code{type}:}{Object of class \code{"character"} contains type of problem } \item{\code{coef}:}{Object of class \code{"ANY"} contains the model parameter } \item{\code{terms}:}{Object of class \code{"ANY"} contains the terms representation of the symbolic model used (when using a formula)} \item{\code{xmatrix}:}{Object of class \code{"matrix"} containing the data matrix used } \item{\code{ymatrix}:}{Object of class \code{"output"} containing the response matrix} \item{\code{fitted}:}{Object of class \code{"output"} containing the fitted values } \item{\code{b}:}{Object of class \code{"numeric"} containing the offset } \item{\code{lev}:}{Object of class \code{"vector"} containing the levels of the response (in case of classification) } \item{\code{scaling}:}{Object of class \code{"ANY"} containing the scaling information performed on the data} \item{\code{nclass}:}{Object of class \code{"numeric"} containing the number of classes (in case of classification) } \item{\code{alpha}:}{Object of class \code{"listI"} containing the computes alpha values } \item{\code{alphaindex}}{Object of class \code{"list"} containing the indexes for the alphas in various classes (in multi-class problems).} \item{\code{error}:}{Object of class \code{"numeric"} containing the training error} \item{\code{cross}:}{Object of class \code{"numeric"} containing the cross validation error} \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed in NA } \item{\code{nSV}:}{Object of class \code{"numeric"} containing the number of model parameters } } } \section{Methods}{ \describe{ \item{alpha}{\code{signature(object = "lssvm")}: returns the alpha vector} \item{cross}{\code{signature(object = "lssvm")}: returns the cross validation error } \item{error}{\code{signature(object = "lssvm")}: returns the training error } \item{fitted}{\code{signature(object = "vm")}: returns the fitted values } \item{kcall}{\code{signature(object = "lssvm")}: returns the call performed} \item{kernelf}{\code{signature(object = "lssvm")}: returns the kernel function used} \item{kpar}{\code{signature(object = "lssvm")}: returns the kernel parameter used} \item{param}{\code{signature(object = "lssvm")}: returns the regularization parameter used} \item{lev}{\code{signature(object = "lssvm")}: returns the response levels (in classification) } \item{type}{\code{signature(object = "lssvm")}: returns the type of problem} \item{scaling}{\code{signature(object = "ksvm")}: returns the scaling values } \item{xmatrix}{\code{signature(object = "lssvm")}: returns the data matrix used} \item{ymatrix}{\code{signature(object = "lssvm")}: returns the response matrix used} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{lssvm}}, \code{\link{ksvm-class}} } \examples{ # train model data(iris) test <- lssvm(Species~.,data=iris,var=2) test alpha(test) error(test) lev(test) } \keyword{classes} kernlab/man/ranking.Rd0000644000175100001440000001253414366220457014417 0ustar hornikusers\name{ranking} \alias{ranking} \alias{ranking,matrix-method} \alias{ranking,list-method} \alias{ranking,kernelMatrix-method} \title{Ranking} \description{ A universal ranking algorithm which assigns importance/ranking to data points given a query. } \usage{ \S4method{ranking}{matrix}(x, y, kernel ="rbfdot", kpar = list(sigma = 1), scale = FALSE, alpha = 0.99, iterations = 600, edgegraph = FALSE, convergence = FALSE ,...) \S4method{ranking}{kernelMatrix}(x, y, alpha = 0.99, iterations = 600, convergence = FALSE,...) \S4method{ranking}{list}(x, y, kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), alpha = 0.99, iterations = 600, convergence = FALSE, ...) } \arguments{ \item{x}{a matrix containing the data to be ranked, or the kernel matrix of data to be ranked or a list of character vectors} \item{y}{The index of the query point in the data matrix or a vector of length equal to the rows of the data matrix having a one at the index of the query points index and zero at all the other points.} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. For valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{scale}{If TRUE the data matrix columns are scaled to zero mean and unit variance.} \item{alpha}{ The \code{alpha} parameter takes values between 0 and 1 and is used to control the authoritative scores received from the unlabeled points. For 0 no global structure is found the algorithm ranks the points similarly to the original distance metric.} \item{iterations}{Maximum number of iterations} \item{edgegraph}{Construct edgegraph (only supported with the RBF kernel)} \item{convergence}{Include convergence matrix in results} \item{\dots}{Additional arguments} } \details{ A simple universal ranking algorithm which exploits the intrinsic global geometric structure of the data. In many real world applications this should be superior to a local method in which the data are simply ranked by pairwise Euclidean distances. Firstly a weighted network is defined on the data and an authoritative score is assigned to each query. The query points act as source nodes that continually pump their authoritative scores to the remaining points via the weighted network and the remaining points further spread the scores they received to their neighbors. This spreading process is repeated until convergence and the points are ranked according to their score at the end of the iterations. } \value{ An S4 object of class \code{ranking} which extends the \code{matrix} class. The first column of the returned matrix contains the original index of the points in the data matrix the second column contains the final score received by each point and the third column the ranking of the point. The object contains the following slots : \item{edgegraph}{Containing the edgegraph of the data points. } \item{convergence}{Containing the convergence matrix} } \references{ D. Zhou, J. Weston, A. Gretton, O. Bousquet, B. Schoelkopf \cr \emph{Ranking on Data Manifolds}\cr Advances in Neural Information Processing Systems 16.\cr MIT Press Cambridge Mass. 2004 \cr \url{https://papers.neurips.cc/paper/2447-ranking-on-data-manifolds.pdf} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{ranking-class}}, \code{\link{specc}} } \examples{ data(spirals) ## create data from spirals ran <- spirals[rowSums(abs(spirals) < 0.55) == 2,] ## rank points according to similarity to the most upper left point ranked <- ranking(ran, 54, kernel = "rbfdot", kpar = list(sigma = 100), edgegraph = TRUE) ranked[54, 2] <- max(ranked[-54, 2]) c<-1:86 op <- par(mfrow = c(1, 2),pty="s") plot(ran) plot(ran, cex=c[ranked[,3]]/40) } \keyword{cluster} \keyword{classif} kernlab/man/predict.kqr.Rd0000644000175100001440000000214112117365174015203 0ustar hornikusers\name{predict.kqr} \alias{predict.kqr} \alias{predict,kqr-method} \title{Predict method for kernel Quantile Regression object} \description{Prediction of test data for kernel quantile regression} \usage{ \S4method{predict}{kqr}(object, newdata) } \arguments{ \item{object}{an S4 object of class \code{kqr} created by the \code{kqr} function} \item{newdata}{a data frame, matrix, or kernelMatrix containing new data} } \value{The value of the quantile given by the computed \code{kqr} model in a vector of length equal to the the rows of \code{newdata}. } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \keyword{methods} \keyword{regression} \examples{ # create data x <- sort(runif(300)) y <- sin(pi*x) + rnorm(300,0,sd=exp(sin(2*pi*x))) # first calculate the median qrm <- kqr(x, y, tau = 0.5, C=0.15) # predict and plot plot(x, y) ytest <- predict(qrm, x) lines(x, ytest, col="blue") # calculate 0.9 quantile qrm <- kqr(x, y, tau = 0.9, kernel = "rbfdot", kpar= list(sigma=10), C=0.15) ytest <- predict(qrm, x) lines(x, ytest, col="red") } kernlab/man/promotergene.Rd0000644000175100001440000000311514221633377015466 0ustar hornikusers\name{promotergene} \alias{promotergene} \docType{data} \title{E. coli promoter gene sequences (DNA)} \description{ Promoters have a region where a protein (RNA polymerase) must make contact and the helical DNA sequence must have a valid conformation so that the two pieces of the contact region spatially align. The data contains DNA sequences of promoters and non-promoters. } \usage{data(promotergene)} \format{ A data frame with 106 observations and 58 variables. The first variable \code{Class} is a factor with levels \code{+} for a promoter gene and \code{-} for a non-promoter gene. The remaining 57 variables \code{V2 to V58} are factors describing the sequence. The DNA bases are coded as follows: \code{a} adenine \code{c} cytosine \code{g} guanine \code{t} thymine } \source{ UCI Machine Learning data repository \cr \url{https://archive.ics.uci.edu/ml/machine-learning-databases/molecular-biology/promoter-gene-sequences/} } \references{ Towell, G., Shavlik, J. and Noordewier, M. \cr \emph{Refinement of Approximate Domain Theories by Knowledge-Based Artificial Neural Networks.} \cr In Proceedings of the Eighth National Conference on Artificial Intelligence (AAAI-90) } \examples{ data(promotergene) ## Create classification model using Gaussian Processes prom <- gausspr(Class~.,data=promotergene,kernel="rbfdot", kpar=list(sigma=0.02),cross=4) prom ## Create model using Support Vector Machines promsv <- ksvm(Class~.,data=promotergene,kernel="laplacedot", kpar="automatic",C=60,cross=4) promsv } \keyword{datasets} kernlab/man/kha.Rd0000644000175100001440000001161514221633615013522 0ustar hornikusers\name{kha} \alias{kha} \alias{kha,formula-method} \alias{kha,matrix-method} \alias{predict,kha-method} \encoding{latin1} \title{Kernel Principal Components Analysis} \description{ Kernel Hebbian Algorithm is a nonlinear iterative algorithm for principal component analysis.} \usage{ \S4method{kha}{formula}(x, data = NULL, na.action, ...) \S4method{kha}{matrix}(x, kernel = "rbfdot", kpar = list(sigma = 0.1), features = 5, eta = 0.005, th = 1e-4, maxiter = 10000, verbose = FALSE, na.action = na.omit, ...) } \arguments{ \item{x}{ The data matrix indexed by row or a formula describing the model. Note, that an intercept is always included, whether given in the formula or not.} \item{data}{an optional data frame containing the variables in the model (when using a formula).} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes the inner product in feature space between two vector arguments (see \code{\link{kernels}}). \pkg{kernlab} provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{features}{Number of features (principal components) to return. (default: 5)} \item{eta}{The hebbian learning rate (default : 0.005)} \item{th}{the smallest value of the convergence step (default : 0.0001) } \item{maxiter}{the maximum number of iterations.} \item{verbose}{print convergence every 100 iterations. (default : FALSE)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{ additional parameters} } \details{The original form of KPCA can only be used on small data sets since it requires the estimation of the eigenvectors of a full kernel matrix. The Kernel Hebbian Algorithm iteratively estimates the Kernel Principal Components with only linear order memory complexity. (see ref. for more details) } \value{ An S4 object containing the principal component vectors along with the corresponding normalization values. \item{pcv}{a matrix containing the principal component vectors (column wise)} \item{eig}{The normalization values} \item{xmatrix}{The original data matrix} all the slots of the object can be accessed by accessor functions. } \note{The predict function can be used to embed new data on the new space} \references{Kwang In Kim, M.O. Franz and B. Schlkopf\cr \emph{Kernel Hebbian Algorithm for Iterative Kernel Principal Component Analysis}\cr Max-Planck-Institut fr biologische Kybernetik, Tbingen (109)\cr \url{https://is.mpg.de/fileadmin/user_upload/files/publications/pdf2302.pdf} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{kpca}}, \code{\link{kfa}}, \code{\link{kcca}}, \code{pca}} \examples{ # another example using the iris data(iris) test <- sample(1:150,70) kpc <- kha(~.,data=iris[-test,-5],kernel="rbfdot", kpar=list(sigma=0.2),features=2, eta=0.001, maxiter=65) #print the principal component vectors pcv(kpc) #plot the data projection on the components plot(predict(kpc,iris[,-5]),col=as.integer(iris[,5]), xlab="1st Principal Component",ylab="2nd Principal Component") } \keyword{cluster} kernlab/man/kqr.Rd0000644000175100001440000002055314221633732013555 0ustar hornikusers\name{kqr} \alias{kqr} \alias{kqr,formula-method} \alias{kqr,vector-method} \alias{kqr,matrix-method} \alias{kqr,list-method} \alias{kqr,kernelMatrix-method} \alias{coef,kqr-method} \alias{show,kqr-method} \title{Kernel Quantile Regression.} \description{The Kernel Quantile Regression algorithm \code{kqr} performs non-parametric Quantile Regression.} \usage{ \S4method{kqr}{formula}(x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE) \S4method{kqr}{vector}(x,...) \S4method{kqr}{matrix}(x, y, scaled = TRUE, tau = 0.5, C = 0.1, kernel = "rbfdot", kpar = "automatic", reduced = FALSE, rank = dim(x)[1]/6, fit = TRUE, cross = 0, na.action = na.omit) \S4method{kqr}{kernelMatrix}(x, y, tau = 0.5, C = 0.1, fit = TRUE, cross = 0) \S4method{kqr}{list}(x, y, tau = 0.5, C = 0.1, kernel = "strigdot", kpar= list(length=4, C=0.5), fit = TRUE, cross = 0) } \arguments{ \item{x}{e data or a symbolic description of the model to be fit. When not using a formula x can be a matrix or vector containing the training data or a kernel matrix of class \code{kernelMatrix} of the training data or a list of character vectors (for use with the string kernel). Note, that the intercept is always excluded, whether given in the formula or not.} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which \code{kqr} is called from.} \item{y}{a numeric vector or a column matrix containing the response.} \item{scaled}{A logical vector indicating the variables to be scaled. If \code{scaled} is of length 1, the value is recycled as many times as needed and all non-binary variables are scaled. Per default, data are scaled internally (both \code{x} and \code{y} variables) to zero mean and unit variance. The center and scale values are returned and used for later predictions. (default: TRUE)} \item{tau}{the quantile to be estimated, this is generally a number strictly between 0 and 1. For 0.5 the median is calculated. (default: 0.5)} \item{C}{the cost regularization parameter. This parameter controls the smoothness of the fitted function, essentially higher values for C lead to less smooth functions.(default: 1)} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. \code{kernlab} provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{lenght, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the \code{kpar} parameter as well. In the case of a Radial Basis kernel function (Gaussian) kpar can also be set to the string "automatic" which uses the heuristics in 'sigest' to calculate a good 'sigma' value for the Gaussian RBF or Laplace kernel, from the data. (default = "automatic"). } \item{reduced}{use an incomplete cholesky decomposition to calculate a decomposed form \eqn{Z} of the kernel Matrix \eqn{K} (where \eqn{K = ZZ'}) and perform the calculations with \eqn{Z}. This might be useful when using \code{kqr} with large datasets since normally an n times n kernel matrix would be computed. Setting \code{reduced} to \code{TRUE} makes use of \code{csi} to compute a decomposed form instead and thus only a \eqn{n \times m} matrix where \eqn{m < n} and \eqn{n} the sample size is stored in memory (default: FALSE)} \item{rank}{the rank m of the decomposed matrix calculated when using an incomplete cholesky decomposition. This parameter is only taken into account when \code{reduced} is \code{TRUE}(default : dim(x)[1]/6)} \item{fit}{indicates whether the fitted values should be computed and included in the model or not (default: 'TRUE')} \item{cross}{if a integer value k>0 is specified, a k-fold cross validation on the training data is performed to assess the quality of the model: the Pinball loss and the for quantile regression} \item{subset}{An index vector specifying the cases to be used in the training sample. (NOTE: If given, this argument must be named.)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{additional parameters.} } \details{In quantile regression a function is fitted to the data so that it satisfies the property that a portion \eqn{tau} of the data \eqn{y|n} is below the estimate. While the error bars of many regression problems can be viewed as such estimates quantile regression estimates this quantity directly. Kernel quantile regression is similar to nu-Support Vector Regression in that it minimizes a regularized loss function in RKHS. The difference between nu-SVR and kernel quantile regression is in the type of loss function used which in the case of quantile regression is the pinball loss (see reference for details.). Minimizing the regularized loss boils down to a quadratic problem which is solved using an interior point QP solver \code{ipop} implemented in \code{kernlab}. } \value{ An S4 object of class \code{kqr} containing the fitted model along with information.Accessor functions can be used to access the slots of the object which include : \item{alpha}{The resulting model parameters which can be also accessed by \code{coef}.} \item{kernelf}{the kernel function used.} \item{error}{Training error (if fit == TRUE)} see \code{kqr-class} for more details. } \references{Ichiro Takeuchi, Quoc V. Le, Timothy D. Sears, Alexander J. Smola\cr \emph{Nonparametric Quantile Estimation}\cr Journal of Machine Learning Research 7,2006,1231-1264 \cr \url{https://www.jmlr.org/papers/volume7/takeuchi06a/takeuchi06a.pdf} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{predict.kqr}}, \code{\link{kqr-class}}, \code{\link{ipop}}, \code{\link{rvm}}, \code{\link{ksvm}}} \examples{ # create data x <- sort(runif(300)) y <- sin(pi*x) + rnorm(300,0,sd=exp(sin(2*pi*x))) # first calculate the median qrm <- kqr(x, y, tau = 0.5, C=0.15) # predict and plot plot(x, y) ytest <- predict(qrm, x) lines(x, ytest, col="blue") # calculate 0.9 quantile qrm <- kqr(x, y, tau = 0.9, kernel = "rbfdot", kpar= list(sigma=10), C=0.15) ytest <- predict(qrm, x) lines(x, ytest, col="red") # calculate 0.1 quantile qrm <- kqr(x, y, tau = 0.1,C=0.15) ytest <- predict(qrm, x) lines(x, ytest, col="green") # print first 10 model coefficients coef(qrm)[1:10] } \keyword{regression} \keyword{nonlinear} \keyword{methods} kernlab/man/reuters.Rd0000644000175100001440000000111711304023134014430 0ustar hornikusers\name{reuters} \alias{reuters} \alias{rlabels} \title{Reuters Text Data} \description{A small sample from the Reuters news data set.} \usage{data(reuters)} \format{ A list of 40 text documents along with the labels. \code{reuters} contains the text documents and \code{rlabels} the labels in a vector. } \details{ This dataset contains a list of 40 text documents along with the labels. The data consist out of 20 documents from the \code{acq} category and 20 documents from the crude category. The labels are stored in \code{rlabels} } \source{Reuters} \keyword{datasets} kernlab/man/vm-class.Rd0000644000175100001440000000732511304023134014473 0ustar hornikusers\name{vm-class} \docType{class} \alias{vm-class} \alias{cross} \alias{alpha} \alias{error} \alias{type} \alias{kernelf} \alias{xmatrix} \alias{ymatrix} \alias{lev} \alias{kcall} \alias{alpha,vm-method} \alias{cross,vm-method} \alias{error,vm-method} \alias{fitted,vm-method} \alias{kernelf,vm-method} \alias{kpar,vm-method} \alias{lev,vm-method} \alias{kcall,vm-method} \alias{type,vm-method} \alias{xmatrix,vm-method} \alias{ymatrix,vm-method} \title{Class "vm" } \description{An S4 VIRTUAL class used as a base for the various vector machine classes in \pkg{kernlab}} \section{Objects from the Class}{ Objects from the class cannot be created directly but only contained in other classes. } \section{Slots}{ \describe{ \item{\code{alpha}:}{Object of class \code{"listI"} containing the resulting alpha vector (list in case of multiclass classification) (support vectors)} \item{\code{type}:}{Object of class \code{"character"} containing the vector machine type e.g., ("C-svc", "nu-svc", "C-bsvc", "spoc-svc", "one-svc", "eps-svr", "nu-svr", "eps-bsvr")} \item{\code{kernelf}:}{Object of class \code{"function"} containing the kernel function} \item{\code{kpar}:}{Object of class \code{"list"} containing the kernel function parameters (hyperparameters)} \item{\code{kcall}:}{Object of class \code{"call"} containing the function call} \item{\code{terms}:}{Object of class \code{"ANY"} containing the terms representation of the symbolic model used (when using a formula)} \item{\code{xmatrix}:}{Object of class \code{"input"} the data matrix used during computations (support vectors) (possibly scaled and without NA)} \item{\code{ymatrix}:}{Object of class \code{"output"} the response matrix/vector } \item{\code{fitted}:}{Object of class \code{"output"} with the fitted values, predictions using the training set.} \item{\code{lev}:}{Object of class \code{"vector"} with the levels of the response (in the case of classification)} \item{\code{nclass}:}{Object of class \code{"numeric"} containing the number of classes (in the case of classification)} \item{\code{error}:}{Object of class \code{"vector"} containing the training error} \item{\code{cross}:}{Object of class \code{"vector"} containing the cross-validation error } \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed for NA } } } \section{Methods}{ \describe{ \item{alpha}{\code{signature(object = "vm")}: returns the complete alpha vector (wit zero values)} \item{cross}{\code{signature(object = "vm")}: returns the cross-validation error } \item{error}{\code{signature(object = "vm")}: returns the training error } \item{fitted}{\code{signature(object = "vm")}: returns the fitted values (predict on training set) } \item{kernelf}{\code{signature(object = "vm")}: returns the kernel function} \item{kpar}{\code{signature(object = "vm")}: returns the kernel parameters (hyperparameters)} \item{lev}{\code{signature(object = "vm")}: returns the levels in case of classification } \item{kcall}{\code{signature(object="vm")}: returns the function call} \item{type}{\code{signature(object = "vm")}: returns the problem type} \item{xmatrix}{\code{signature(object = "vm")}: returns the data matrix used(support vectors)} \item{ymatrix}{\code{signature(object = "vm")}: returns the response vector} } } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzolgou@ci.tuwien.ac.at}} \seealso{ \code{\link{ksvm-class}}, \code{\link{rvm-class}}, \code{\link{gausspr-class}} } \keyword{classes} kernlab/man/kcca.Rd0000644000175100001440000000724113561515565013671 0ustar hornikusers\name{kcca} \alias{kcca} \alias{kcca,matrix-method} \title{Kernel Canonical Correlation Analysis} \description{ Computes the canonical correlation analysis in feature space. } \usage{ \S4method{kcca}{matrix}(x, y, kernel="rbfdot", kpar=list(sigma=0.1), gamma = 0.1, ncomps = 10, ...) } %- maybe also 'usage' for other objects documented here. \arguments{ \item{x}{a matrix containing data index by row} \item{y}{a matrix containing data index by row} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a inner product in feature space between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{gamma}{regularization parameter (default : 0.1)} \item{ncomps}{number of canonical components (default : 10) } \item{\dots}{additional parameters for the \code{kpca} function} } \details{ The kernel version of canonical correlation analysis. Kernel Canonical Correlation Analysis (KCCA) is a non-linear extension of CCA. Given two random variables, KCCA aims at extracting the information which is shared by the two random variables. More precisely given \eqn{x} and \eqn{y} the purpose of KCCA is to provide nonlinear mappings \eqn{f(x)} and \eqn{g(y)} such that their correlation is maximized. } \value{ An S4 object containing the following slots: \item{kcor}{Correlation coefficients in feature space} \item{xcoef}{estimated coefficients for the \code{x} variables in the feature space} \item{ycoef}{estimated coefficients for the \code{y} variables in the feature space} %% \item{xvar}{The canonical variates for \code{x}} %% \item{yvar}{The canonical variates for \code{y}} } \references{ Malte Kuss, Thore Graepel \cr \emph{The Geometry Of Kernel Canonical Correlation Analysis}\cr \url{https://www.microsoft.com/en-us/research/publication/the-geometry-of-kernel-canonical-correlation-analysis/}} \author{ Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{\code{\link{cancor}}, \code{\link{kpca}}, \code{\link{kfa}}, \code{\link{kha}}} \examples{ ## dummy data x <- matrix(rnorm(30),15) y <- matrix(rnorm(30),15) kcca(x,y,ncomps=2) } \keyword{multivariate} kernlab/man/gausspr.Rd0000644000175100001440000001661514221634017014445 0ustar hornikusers\name{gausspr} \alias{gausspr} \alias{gausspr,formula-method} \alias{gausspr,vector-method} \alias{gausspr,matrix-method} \alias{coef,gausspr-method} \alias{show,gausspr-method} %- Also NEED an '\alias' for EACH other topic documented here. \title{ Gaussian processes for regression and classification} \description{ \code{gausspr} is an implementation of Gaussian processes for classification and regression. } \usage{ \S4method{gausspr}{formula}(x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE) \S4method{gausspr}{vector}(x,...) \S4method{gausspr}{matrix}(x, y, scaled = TRUE, type= NULL, kernel="rbfdot", kpar="automatic", var=1, variance.model = FALSE, tol=0.0005, cross=0, fit=TRUE, ... , subset, na.action = na.omit) } %- maybe also 'usage' for other objects documented here. \arguments{ \item{x}{a symbolic description of the model to be fit or a matrix or vector when a formula interface is not used. When not using a formula x is a matrix or vector containing the variables in the model} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which `gausspr' is called from.} \item{y}{a response vector with one label for each row/component of \code{x}. Can be either a factor (for classification tasks) or a numeric vector (for regression).} \item{type}{Type of problem. Either "classification" or "regression". Depending on whether \code{y} is a factor or not, the default setting for \code{type} is \code{classification} or \code{regression}, respectively, but can be overwritten by setting an explicit value.\cr} \item{scaled}{A logical vector indicating the variables to be scaled. If \code{scaled} is of length 1, the value is recycled as many times as needed and all non-binary variables are scaled. Per default, data are scaled internally (both \code{x} and \code{y} variables) to zero mean and unit variance. The center and scale values are returned and used for later predictions.} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{var}{the initial noise variance, (only for regression) (default : 0.001)} \item{variance.model}{build model for variance or standard deviation estimation (only for regression) (default : FALSE)} \item{tol}{tolerance of termination criterion (default: 0.001)} \item{fit}{indicates whether the fitted values should be computed and included in the model or not (default: 'TRUE')} \item{cross}{if a integer value k>0 is specified, a k-fold cross validation on the training data is performed to assess the quality of the model: the Mean Squared Error for regression} \item{subset}{An index vector specifying the cases to be used in the training sample. (NOTE: If given, this argument must be named.)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{ additional parameters} } \details{ A Gaussian process is specified by a mean and a covariance function. The mean is a function of \eqn{x} (which is often the zero function), and the covariance is a function \eqn{C(x,x')} which expresses the expected covariance between the value of the function \eqn{y} at the points \eqn{x} and \eqn{x'}. The actual function \eqn{y(x)} in any data modeling problem is assumed to be a single sample from this Gaussian distribution. Laplace approximation is used for the parameter estimation in gaussian processes for classification.\cr The predict function can return class probabilities for classification problems by setting the \code{type} parameter to "probabilities". For the regression setting the \code{type} parameter to "variance" or "sdeviation" returns the estimated variance or standard deviation at each predicted point. } \value{ An S4 object of class "gausspr" containing the fitted model along with information. Accessor functions can be used to access the slots of the object which include : \item{alpha}{The resulting model parameters} \item{error}{Training error (if fit == TRUE)} } \references{ C. K. I. Williams and D. Barber \cr Bayesian classification with Gaussian processes. \cr IEEE Transactions on Pattern Analysis and Machine Intelligence, 20(12):1342-1351, 1998\cr \url{https://homepages.inf.ed.ac.uk/ckiw/postscript/pami_final.ps.gz} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{predict.gausspr}}, \code{\link{rvm}}, \code{\link{ksvm}}, \code{\link{gausspr-class}}, \code{\link{lssvm}} } \examples{ # train model data(iris) test <- gausspr(Species~.,data=iris,var=2) test alpha(test) # predict on the training set predict(test,iris[,-5]) # class probabilities predict(test, iris[,-5], type="probabilities") # create regression data x <- seq(-20,20,0.1) y <- sin(x)/x + rnorm(401,sd=0.03) # regression with gaussian processes foo <- gausspr(x, y) foo # predict and plot ytest <- predict(foo, x) plot(x, y, type ="l") lines(x, ytest, col="red") #predict and variance x = c(-4, -3, -2, -1, 0, 0.5, 1, 2) y = c(-2, 0, -0.5,1, 2, 1, 0, -1) plot(x,y) foo2 <- gausspr(x, y, variance.model = TRUE) xtest <- seq(-4,2,0.2) lines(xtest, predict(foo2, xtest)) lines(xtest, predict(foo2, xtest)+2*predict(foo2,xtest, type="sdeviation"), col="red") lines(xtest, predict(foo2, xtest)-2*predict(foo2,xtest, type="sdeviation"), col="red") } \keyword{classif} \keyword{regression} \keyword{nonlinear} \keyword{methods} kernlab/man/onlearn-class.Rd0000644000175100001440000000672412117365114015523 0ustar hornikusers\name{onlearn-class} \docType{class} \alias{onlearn-class} \alias{alpha,onlearn-method} \alias{b,onlearn-method} \alias{buffer,onlearn-method} \alias{fit,onlearn-method} \alias{kernelf,onlearn-method} \alias{kpar,onlearn-method} \alias{predict,onlearn-method} \alias{rho,onlearn-method} \alias{rho} \alias{show,onlearn-method} \alias{type,onlearn-method} \alias{xmatrix,onlearn-method} \alias{buffer} \title{Class "onlearn"} \description{ The class of objects used by the Kernel-based Online learning algorithms} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("onlearn", ...)}. or by calls to the function \code{inlearn}. } \section{Slots}{ \describe{ \item{\code{kernelf}:}{Object of class \code{"function"} containing the used kernel function} \item{\code{buffer}:}{Object of class \code{"numeric"} containing the size of the buffer} \item{\code{kpar}:}{Object of class \code{"list"} containing the hyperparameters of the kernel function.} \item{\code{xmatrix}:}{Object of class \code{"matrix"} containing the data points (similar to support vectors) } \item{\code{fit}:}{Object of class \code{"numeric"} containing the decision function value of the last data point} \item{\code{onstart}:}{Object of class \code{"numeric"} used for indexing } \item{\code{onstop}:}{Object of class \code{"numeric"} used for indexing} \item{\code{alpha}:}{Object of class \code{"ANY"} containing the model parameters} \item{\code{rho}:}{Object of class \code{"numeric"} containing model parameter} \item{\code{b}:}{Object of class \code{"numeric"} containing the offset} \item{\code{pattern}:}{Object of class \code{"factor"} used for dealing with factors} \item{\code{type}:}{Object of class \code{"character"} containing the problem type (classification, regression, or novelty } } } \section{Methods}{ \describe{ \item{alpha}{\code{signature(object = "onlearn")}: returns the model parameters} \item{b}{\code{signature(object = "onlearn")}: returns the offset } \item{buffer}{\code{signature(object = "onlearn")}: returns the buffer size} \item{fit}{\code{signature(object = "onlearn")}: returns the last decision function value} \item{kernelf}{\code{signature(object = "onlearn")}: return the kernel function used} \item{kpar}{\code{signature(object = "onlearn")}: returns the hyper-parameters used} \item{onlearn}{\code{signature(obj = "onlearn")}: the learning function} \item{predict}{\code{signature(object = "onlearn")}: the predict function} \item{rho}{\code{signature(object = "onlearn")}: returns model parameter} \item{show}{\code{signature(object = "onlearn")}: show function} \item{type}{\code{signature(object = "onlearn")}: returns the type of problem} \item{xmatrix}{\code{signature(object = "onlearn")}: returns the stored data points} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{onlearn}}, \code{\link{inlearn}} } \examples{ ## create toy data set x <- rbind(matrix(rnorm(100),,2),matrix(rnorm(100)+3,,2)) y <- matrix(c(rep(1,50),rep(-1,50)),,1) ## initialize onlearn object on <- inlearn(2,kernel="rbfdot",kpar=list(sigma=0.2), type="classification") ## learn one data point at the time for(i in sample(1:100,100)) on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) sign(predict(on,x)) } \keyword{classes} kernlab/man/couple.Rd0000644000175100001440000000372214366220445014251 0ustar hornikusers\name{couple} \alias{couple} \title{Probabilities Coupling function} \description{ \code{couple} is used to link class-probability estimates produced by pairwise coupling in multi-class classification problems. } \usage{ couple(probin, coupler = "minpair") } \arguments{ \item{probin}{ The pairwise coupled class-probability estimates} \item{coupler}{The type of coupler to use. Currently \code{minpar} and \code{pkpd} and \code{vote} are supported (see reference for more details). If \code{vote} is selected the returned value is a primitive estimate passed on given votes.} } \details{ As binary classification problems are much easier to solve many techniques exist to decompose multi-class classification problems into many binary classification problems (voting, error codes, etc.). Pairwise coupling (one against one) constructs a rule for discriminating between every pair of classes and then selecting the class with the most winning two-class decisions. By using Platt's probabilities output for SVM one can get a class probability for each of the \eqn{k(k-1)/2} models created in the pairwise classification. The couple method implements various techniques to combine these probabilities. } \value{ A matrix with the resulting probability estimates. } \references{ Ting-Fan Wu, Chih-Jen Lin, ruby C. Weng\cr \emph{Probability Estimates for Multi-class Classification by Pairwise Coupling}\cr Neural Information Processing Symposium 2003 \cr \url{https://papers.neurips.cc/paper/2454-probability-estimates-for-multi-class-classification-by-pairwise-coupling.pdf} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{ \code{\link{predict.ksvm}}, \code{\link{ksvm}}} \examples{ ## create artificial pairwise probabilities pairs <- matrix(c(0.82,0.12,0.76,0.1,0.9,0.05),2) couple(pairs) couple(pairs, coupler="pkpd") couple(pairs, coupler ="vote") } \keyword{classif} kernlab/man/plot.Rd0000644000175100001440000000216511304023134013721 0ustar hornikusers\name{plot} \alias{plot.ksvm} \alias{plot,ksvm,missing-method} \alias{plot,ksvm-method} \title{plot method for support vector object} \description{Plot a binary classification support vector machine object. The \code{plot} function returns a contour plot of the decision values. } \usage{ \S4method{plot}{ksvm}(object, data=NULL, grid = 50, slice = list()) } \arguments{ \item{object}{a \code{ksvm} classification object created by the \code{ksvm} function} \item{data}{a data frame or matrix containing data to be plotted} \item{grid}{granularity for the contour plot.} \item{slice}{a list of named numeric values for the dimensions held constant (only needed if more than two variables are used). Dimensions not specified are fixed at 0. } } \seealso{\code{\link{ksvm}}} \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \keyword{methods} \keyword{regression} \keyword{classif} \examples{ ## Demo of the plot function x <- rbind(matrix(rnorm(120),,2),matrix(rnorm(120,mean=3),,2)) y <- matrix(c(rep(1,60),rep(-1,60))) svp <- ksvm(x,y,type="C-svc") plot(svp,data=x) } kernlab/man/prc-class.Rd0000644000175100001440000000353311304023134014632 0ustar hornikusers\name{prc-class} \docType{class} \alias{prc-class} \alias{eig} \alias{pcv} \alias{eig,prc-method} \alias{kcall,prc-method} \alias{kernelf,prc-method} \alias{pcv,prc-method} \alias{xmatrix,prc-method} \title{Class "prc"} \description{Principal Components Class} \section{Objects of class "prc"}{Objects from the class cannot be created directly but only contained in other classes.} \section{Slots}{ \describe{ \item{\code{pcv}:}{Object of class \code{"matrix"} containing the principal component vectors } \item{\code{eig}:}{Object of class \code{"vector"} containing the corresponding eigenvalues} \item{\code{kernelf}:}{Object of class \code{"kfunction"} containing the kernel function used} \item{\code{kpar}:}{Object of class \code{"list"} containing the kernel parameters used } \item{\code{xmatrix}:}{Object of class \code{"input"} containing the data matrix used } \item{\code{kcall}:}{Object of class \code{"ANY"} containing the function call } \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed on NA } } } \section{Methods}{ \describe{ \item{eig}{\code{signature(object = "prc")}: returns the eigenvalues } \item{kcall}{\code{signature(object = "prc")}: returns the performed call} \item{kernelf}{\code{signature(object = "prc")}: returns the used kernel function} \item{pcv}{\code{signature(object = "prc")}: returns the principal component vectors } \item{predict}{\code{signature(object = "prc")}: embeds new data } \item{xmatrix}{\code{signature(object = "prc")}: returns the used data matrix } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{kpca-class}},\code{\link{kha-class}}, \code{\link{kfa-class}} } \keyword{classes} kernlab/man/predict.ksvm.Rd0000644000175100001440000000513214366217302015366 0ustar hornikusers\name{predict.ksvm} \alias{predict.ksvm} \alias{predict,ksvm-method} \title{predict method for support vector object} \description{Prediction of test data using support vector machines} \usage{ \S4method{predict}{ksvm}(object, newdata, type = "response", coupler = "minpair") } \arguments{ \item{object}{an S4 object of class \code{ksvm} created by the \code{ksvm} function} \item{newdata}{a data frame or matrix containing new data} \item{type}{one of \code{response}, \code{probabilities} ,\code{votes}, \code{decision} indicating the type of output: predicted values, matrix of class probabilities, matrix of vote counts, or matrix of decision values.} \item{coupler}{Coupling method used in the multiclass case, can be one of \code{minpair} or \code{pkpd} (see reference for more details).} } \value{ If \code{type(object)} is \code{C-svc}, \code{nu-svc}, \code{C-bsvm} or \code{spoc-svc} the vector returned depends on the argument \code{type}: \item{response}{predicted classes (the classes with majority vote).} \item{probabilities}{matrix of class probabilities (one column for each class and one row for each input).} \item{votes}{matrix of vote counts (one column for each class and one row for each new input)} If \code{type(object)} is \code{eps-svr}, \code{eps-bsvr} or \code{nu-svr} a vector of predicted values is returned. If \code{type(object)} is \code{one-classification} a vector of logical values is returned. } \references{ \itemize{ \item T.F. Wu, C.J. Lin, R.C. Weng. \cr \emph{Probability estimates for Multi-class Classification by Pairwise Coupling}\cr \url{https://www.csie.ntu.edu.tw/~cjlin/papers/svmprob/svmprob.pdf} \item H.T. Lin, C.J. Lin, R.C. Weng (2007), A note on Platt's probabilistic outputs for support vector machines. \emph{Machine Learning}, \bold{68}, 267--276. \doi{10.1007/s10994-007-5018-6}. } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \keyword{methods} \keyword{regression} \keyword{classif} \examples{ ## example using the promotergene data set data(promotergene) ## create test and training set ind <- sample(1:dim(promotergene)[1],20) genetrain <- promotergene[-ind, ] genetest <- promotergene[ind, ] ## train a support vector machine gene <- ksvm(Class~.,data=genetrain,kernel="rbfdot", kpar=list(sigma=0.015),C=70,cross=4,prob.model=TRUE) gene ## predict gene type probabilities on the test set genetype <- predict(gene,genetest,type="probabilities") genetype } kernlab/man/kqr-class.Rd0000644000175100001440000001051412117363316014654 0ustar hornikusers\name{kqr-class} \docType{class} \alias{kqr-class} \alias{alpha,kqr-method} \alias{cross,kqr-method} \alias{error,kqr-method} \alias{kcall,kqr-method} \alias{kernelf,kqr-method} \alias{kpar,kqr-method} \alias{param,kqr-method} \alias{alphaindex,kqr-method} \alias{b,kqr-method} \alias{xmatrix,kqr-method} \alias{ymatrix,kqr-method} \alias{scaling,kqr-method} \title{Class "kqr"} \description{The Kernel Quantile Regression object class} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("kqr", ...)}. or by calling the \code{kqr} function } \section{Slots}{ \describe{ \item{\code{kernelf}:}{Object of class \code{"kfunction"} contains the kernel function used} \item{\code{kpar}:}{Object of class \code{"list"} contains the kernel parameter used } \item{\code{coef}:}{Object of class \code{"ANY"} containing the model parameters} \item{\code{param}:}{Object of class \code{"list"} contains the cost parameter C and tau parameter used } \item{\code{kcall}:}{Object of class \code{"list"} contains the used function call } \item{\code{terms}:}{Object of class \code{"ANY"} contains the terms representation of the symbolic model used (when using a formula)} \item{\code{xmatrix}:}{Object of class \code{"input"} containing the data matrix used } \item{\code{ymatrix}:}{Object of class \code{"output"} containing the response matrix} \item{\code{fitted}:}{Object of class \code{"output"} containing the fitted values } \item{\code{alpha}:}{Object of class \code{"listI"} containing the computes alpha values } \item{\code{b}:}{Object of class \code{"numeric"} containing the offset of the model.} \item{\code{scaling}}{Object of class \code{"ANY"} containing the scaling coefficients of the data (when case \code{scaled = TRUE} is used).} \item{\code{error}:}{Object of class \code{"numeric"} containing the training error} \item{\code{cross}:}{Object of class \code{"numeric"} containing the cross validation error} \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed in NA } \item{\code{nclass}:}{Inherited from class \code{vm}, not used in kqr} \item{\code{lev}:}{Inherited from class \code{vm}, not used in kqr} \item{\code{type}:}{Inherited from class \code{vm}, not used in kqr} } } \section{Methods}{ \describe{ \item{coef}{\code{signature(object = "kqr")}: returns the coefficients (alpha) of the model} \item{alpha}{\code{signature(object = "kqr")}: returns the alpha vector (identical to \code{coef})} \item{b}{\code{signature(object = "kqr")}: returns the offset beta of the model.} \item{cross}{\code{signature(object = "kqr")}: returns the cross validation error } \item{error}{\code{signature(object = "kqr")}: returns the training error } \item{fitted}{\code{signature(object = "vm")}: returns the fitted values } \item{kcall}{\code{signature(object = "kqr")}: returns the call performed} \item{kernelf}{\code{signature(object = "kqr")}: returns the kernel function used} \item{kpar}{\code{signature(object = "kqr")}: returns the kernel parameter used} \item{param}{\code{signature(object = "kqr")}: returns the cost regularization parameter C and tau used} \item{xmatrix}{\code{signature(object = "kqr")}: returns the data matrix used} \item{ymatrix}{\code{signature(object = "kqr")}: returns the response matrix used} \item{scaling}{\code{signature(object = "kqr")}: returns the scaling coefficients of the data (when \code{scaled = TRUE} is used)} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{kqr}}, \code{\link{vm-class}}, \code{\link{ksvm-class}} } \examples{ # create data x <- sort(runif(300)) y <- sin(pi*x) + rnorm(300,0,sd=exp(sin(2*pi*x))) # first calculate the median qrm <- kqr(x, y, tau = 0.5, C=0.15) # predict and plot plot(x, y) ytest <- predict(qrm, x) lines(x, ytest, col="blue") # calculate 0.9 quantile qrm <- kqr(x, y, tau = 0.9, kernel = "rbfdot", kpar = list(sigma = 10), C = 0.15) ytest <- predict(qrm, x) lines(x, ytest, col="red") # print model coefficients and other information coef(qrm) b(qrm) error(qrm) kernelf(qrm) } \keyword{classes} kernlab/man/kkmeans.Rd0000644000175100001440000001345214250171226014406 0ustar hornikusers\name{kkmeans} \alias{kkmeans} \alias{kkmeans,matrix-method} \alias{kkmeans,formula-method} \alias{kkmeans,list-method} \alias{kkmeans,kernelMatrix-method} \title{Kernel k-means} \description{ A weighted kernel version of the famous k-means algorithm. } \usage{ \S4method{kkmeans}{formula}(x, data = NULL, na.action = na.omit, ...) \S4method{kkmeans}{matrix}(x, centers, kernel = "rbfdot", kpar = "automatic", alg="kkmeans", p=1, na.action = na.omit, ...) \S4method{kkmeans}{kernelMatrix}(x, centers, ...) \S4method{kkmeans}{list}(x, centers, kernel = "stringdot", kpar = list(length=4, lambda=0.5), alg ="kkmeans", p = 1, na.action = na.omit, ...) } \arguments{ \item{x}{the matrix of data to be clustered, or a symbolic description of the model to be fit, or a kernel Matrix of class \code{kernelMatrix}, or a list of character vectors.} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which `kkmeans' is called from.} \item{centers}{Either the number of clusters or a matrix of initial cluster centers. If the first a random initial partitioning is used.} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a inner product in feature space between two vector arguments (see \code{link{kernels}}). \pkg{kernlab} provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel "Gaussian" \item \code{polydot} Polynomial kernel \item \code{vanilladot} Linear kernel \item \code{tanhdot} Hyperbolic tangent kernel \item \code{laplacedot} Laplacian kernel \item \code{besseldot} Bessel kernel \item \code{anovadot} ANOVA RBF kernel \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } Setting the kernel parameter to "matrix" treats \code{x} as a kernel matrix calling the \code{kernelMatrix} interface.\cr The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{a character string or the list of hyper-parameters (kernel parameters). The default character string \code{"automatic"} uses a heuristic the determine a suitable value for the width parameter of the RBF kernel.\cr A list can also be used containing the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{length, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{alg}{the algorithm to use. Options currently include \code{kkmeans} and \code{kerninghan}. } \item{p}{a parameter used to keep the affinity matrix positive semidefinite} \item{na.action}{The action to perform on NA} \item{\dots}{additional parameters} } \details{ \code{kernel k-means} uses the 'kernel trick' (i.e. implicitly projecting all data into a non-linear feature space with the use of a kernel) in order to deal with one of the major drawbacks of \code{k-means} that is that it cannot capture clusters that are not linearly separable in input space. \cr The algorithm is implemented using the triangle inequality to avoid unnecessary and computational expensive distance calculations. This leads to significant speedup particularly on large data sets with a high number of clusters. \cr With a particular choice of weights this algorithm becomes equivalent to Kernighan-Lin, and the norm-cut graph partitioning algorithms. \cr The function also support input in the form of a kernel matrix or a list of characters for text clustering.\cr The data can be passed to the \code{kkmeans} function in a \code{matrix} or a \code{data.frame}, in addition \code{kkmeans} also supports input in the form of a kernel matrix of class \code{kernelMatrix} or as a list of character vectors where a string kernel has to be used. } \value{ An S4 object of class \code{specc} which extends the class \code{vector} containing integers indicating the cluster to which each point is allocated. The following slots contain useful information \item{centers}{A matrix of cluster centers.} \item{size}{The number of point in each cluster} \item{withinss}{The within-cluster sum of squares for each cluster} \item{kernelf}{The kernel function used} } \references{ Inderjit Dhillon, Yuqiang Guan, Brian Kulis\cr A Unified view of Kernel k-means, Spectral Clustering and Graph Partitioning\cr UTCS Technical Report\cr \url{https://people.bu.edu/bkulis/pubs/spectral_techreport.pdf} } \author{ Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{specc}}, \code{\link{kpca}}, \code{\link{kcca}} } \examples{ ## Cluster the iris data set. data(iris) sc <- kkmeans(as.matrix(iris[,-5]), centers=3) sc centers(sc) size(sc) withinss(sc) } \keyword{cluster} kernlab/man/csi-class.Rd0000644000175100001440000000545411304023134014630 0ustar hornikusers\name{csi-class} \docType{class} \alias{csi-class} \alias{Q} \alias{R} \alias{predgain} \alias{truegain} \alias{diagresidues,csi-method} \alias{maxresiduals,csi-method} \alias{pivots,csi-method} \alias{predgain,csi-method} \alias{truegain,csi-method} \alias{Q,csi-method} \alias{R,csi-method} \title{Class "csi"} \description{The reduced Cholesky decomposition object} \section{Objects from the Class}{Objects can be created by calls of the form \code{new("csi", ...)}. or by calling the \code{csi} function.} \section{Slots}{ \describe{ \item{\code{.Data}:}{Object of class \code{"matrix"} contains the decomposed matrix} \item{\code{pivots}:}{Object of class \code{"vector"} contains the pivots performed} \item{\code{diagresidues}:}{Object of class \code{"vector"} contains the diagonial residues} \item{\code{maxresiduals}:}{Object of class \code{"vector"} contains the maximum residues} \item{predgain}{Object of class \code{"vector"} contains the predicted gain before adding each column} \item{truegain}{Object of class \code{"vector"} contains the actual gain after adding each column} \item{Q}{Object of class \code{"matrix"} contains Q from the QR decomposition of the kernel matrix} \item{R}{Object of class \code{"matrix"} contains R from the QR decomposition of the kernel matrix} } } \section{Extends}{ Class \code{"matrix"}, directly. } \section{Methods}{ \describe{ \item{diagresidues}{\code{signature(object = "csi")}: returns the diagonial residues} \item{maxresiduals}{\code{signature(object = "csi")}: returns the maximum residues} \item{pivots}{\code{signature(object = "csi")}: returns the pivots performed} \item{predgain}{\code{signature(object = "csi")}: returns the predicted gain before adding each column} \item{truegain}{\code{signature(object = "csi")}: returns the actual gain after adding each column} \item{Q}{\code{signature(object = "csi")}: returns Q from the QR decomposition of the kernel matrix} \item{R}{\code{signature(object = "csi")}: returns R from the QR decomposition of the kernel matrix} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{csi}}, \code{\link{inchol-class}}} \examples{ data(iris) ## create multidimensional y matrix yind <- t(matrix(1:3,3,150)) ymat <- matrix(0, 150, 3) ymat[yind==as.integer(iris[,5])] <- 1 datamatrix <- as.matrix(iris[,-5]) # initialize kernel function rbf <- rbfdot(sigma=0.1) rbf Z <- csi(datamatrix,ymat, kernel=rbf, rank = 30) dim(Z) pivots(Z) # calculate kernel matrix K <- crossprod(t(Z)) # difference between approximated and real kernel matrix (K - kernelMatrix(kernel=rbf, datamatrix))[6,] } \keyword{classes} kernlab/man/rvm-class.Rd0000644000175100001440000001100211304023134014640 0ustar hornikusers\name{rvm-class} \docType{class} \alias{rvm-class} \alias{RVindex} \alias{mlike} \alias{nvar} \alias{RVindex,rvm-method} \alias{alpha,rvm-method} \alias{cross,rvm-method} \alias{error,rvm-method} \alias{kcall,rvm-method} \alias{kernelf,rvm-method} \alias{kpar,rvm-method} \alias{lev,rvm-method} \alias{mlike,rvm-method} \alias{nvar,rvm-method} \alias{type,rvm-method} \alias{xmatrix,rvm-method} \alias{ymatrix,rvm-method} \title{Class "rvm"} \description{Relevance Vector Machine Class} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("rvm", ...)}. or by calling the \code{rvm} function. } \section{Slots}{ \describe{ \item{\code{tol}:}{Object of class \code{"numeric"} contains tolerance of termination criteria used.} \item{\code{kernelf}:}{Object of class \code{"kfunction"} contains the kernel function used } \item{\code{kpar}:}{Object of class \code{"list"} contains the hyperparameter used} \item{\code{kcall}:}{Object of class \code{"call"} contains the function call} \item{\code{type}:}{Object of class \code{"character"} contains type of problem} \item{\code{terms}:}{Object of class \code{"ANY"} containing the terms representation of the symbolic model used (when using a formula interface)} \item{\code{xmatrix}:}{Object of class \code{"matrix"} contains the data matrix used during computation} \item{\code{ymatrix}:}{Object of class \code{"output"} contains the response matrix} \item{\code{fitted}:}{Object of class \code{"output"} with the fitted values, (predict on training set).} \item{\code{lev}:}{Object of class \code{"vector"} contains the levels of the response (in classification)} \item{\code{nclass}:}{Object of class \code{"numeric"} contains the number of classes (in classification)} \item{\code{alpha}:}{Object of class \code{"listI"} containing the the resulting alpha vector} \item{\code{coef}:}{Object of class \code{"ANY"} containing the the resulting model parameters} \item{\code{nvar}:}{Object of class \code{"numeric"} containing the calculated variance (in case of regression)} \item{\code{mlike}:}{Object of class \code{"numeric"} containing the computed maximum likelihood} \item{\code{RVindex}:}{Object of class \code{"vector"} containing the indexes of the resulting relevance vectors } \item{\code{nRV}:}{Object of class \code{"numeric"} containing the number of relevance vectors} \item{\code{cross}:}{Object of class \code{"numeric"} containing the resulting cross validation error } \item{\code{error}:}{Object of class \code{"numeric"} containing the training error} \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed on NA} } } \section{Methods}{ \describe{ \item{RVindex}{\code{signature(object = "rvm")}: returns the index of the relevance vectors } \item{alpha}{\code{signature(object = "rvm")}: returns the resulting alpha vector} \item{cross}{\code{signature(object = "rvm")}: returns the resulting cross validation error} \item{error}{\code{signature(object = "rvm")}: returns the training error } \item{fitted}{\code{signature(object = "vm")}: returns the fitted values } \item{kcall}{\code{signature(object = "rvm")}: returns the function call } \item{kernelf}{\code{signature(object = "rvm")}: returns the used kernel function } \item{kpar}{\code{signature(object = "rvm")}: returns the parameters of the kernel function} \item{lev}{\code{signature(object = "rvm")}: returns the levels of the response (in classification)} \item{mlike}{\code{signature(object = "rvm")}: returns the estimated maximum likelihood} \item{nvar}{\code{signature(object = "rvm")}: returns the calculated variance (in regression)} \item{type}{\code{signature(object = "rvm")}: returns the type of problem} \item{xmatrix}{\code{signature(object = "rvm")}: returns the data matrix used during computation} \item{ymatrix}{\code{signature(object = "rvm")}: returns the used response } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{rvm}}, \code{\link{ksvm-class}} } \examples{ # create data x <- seq(-20,20,0.1) y <- sin(x)/x + rnorm(401,sd=0.05) # train relevance vector machine foo <- rvm(x, y) foo alpha(foo) RVindex(foo) fitted(foo) kernelf(foo) nvar(foo) ## show slots slotNames(foo) } \keyword{classes} kernlab/DESCRIPTION0000644000175100001440000000272614366231470013431 0ustar hornikusersPackage: kernlab Version: 0.9-32 Title: Kernel-Based Machine Learning Lab Authors@R: c(person("Alexandros", "Karatzoglou", role = c("aut", "cre"), email = "alexandros.karatzoglou@gmail.com"), person("Alex", "Smola", role = "aut"), person("Kurt", "Hornik", role = "aut", email = "Kurt.Hornik@R-project.org", comment = c(ORCID = "0000-0003-4198-9911")), person("National ICT Australia (NICTA)", role = "cph"), person(c("Michael", "A."), "Maniscalco", role = c("ctb", "cph")), person(c("Choon", "Hui"), "Teo", role = "ctb")) Description: Kernel-based machine learning methods for classification, regression, clustering, novelty detection, quantile regression and dimensionality reduction. Among other methods 'kernlab' includes Support Vector Machines, Spectral Clustering, Kernel PCA, Gaussian Processes and a QP solver. Depends: R (>= 2.10) Imports: methods, stats, grDevices, graphics LazyLoad: Yes License: GPL-2 NeedsCompilation: yes Packaged: 2023-01-31 14:16:15 UTC; hornik Author: Alexandros Karatzoglou [aut, cre], Alex Smola [aut], Kurt Hornik [aut] (), National ICT Australia (NICTA) [cph], Michael A. Maniscalco [ctb, cph], Choon Hui Teo [ctb] Maintainer: Alexandros Karatzoglou Repository: CRAN Date/Publication: 2023-01-31 15:26:48 UTC kernlab/build/0000755000175100001440000000000014366221257013015 5ustar hornikuserskernlab/build/vignette.rds0000644000175100001440000000045514366221257015360 0ustar hornikusersuPN0t$T!¥?TR!Um\MI$vp"n|9eCakhwv^BBK|%_oaC|{{vG$~I,Y4n>>ʤ\$<.~)an*_VBmsWPHnn(7y՚}TTCqG.`(yL+%3ʒl4֠Ɗ_pN |R 'k^smWqğ%hpǶ;.Xm&L(E; ekernlab/build/partial.rdb0000644000175100001440000000007514366221216015137 0ustar hornikusersb```b`abb`b1 H020piּb C".X7kernlab/src/0000755000175100001440000000000014366221257012505 5ustar hornikuserskernlab/src/dtrqsol.c0000644000175100001440000000333611304023134014325 0ustar hornikusers#include #include extern double mymax(double, double); /* LEVEL 1 BLAS */ /*extern double ddot_(int *, double *, int *, double *, int *);*/ void dtrqsol(int n, double *x, double *p, double delta, double *sigma) { /* c ********** c c Subroutine dtrqsol c c This subroutine computes the largest (non-negative) solution c of the quadratic trust region equation c c ||x + sigma*p|| = delta. c c The code is only guaranteed to produce a non-negative solution c if ||x|| <= delta, and p != 0. If the trust region equation has c no solution, sigma = 0. c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x must contain the vector x. c On exit x is unchanged. c c p is a double precision array of dimension n. c On entry p must contain the vector p. c On exit p is unchanged. c c delta is a double precision variable. c On entry delta specifies the scalar delta. c On exit delta is unchanged. c c sigma is a double precision variable. c On entry sigma need not be specified. c On exit sigma contains the non-negative solution. c c ********** */ int inc = 1; double dsq = delta*delta, ptp, ptx, rad, xtx; ptx = F77_CALL(ddot)(&n, p, &inc, x, &inc); ptp = F77_CALL(ddot)(&n, p, &inc, p, &inc); xtx = F77_CALL(ddot)(&n, x, &inc, x, &inc); /* Guard against abnormal cases. */ rad = ptx*ptx + ptp*(dsq - xtx); rad = sqrt(mymax(rad, 0)); if (ptx > 0) *sigma = (dsq - xtx)/(ptx + rad); else if (rad > 0) *sigma = (rad - ptx)/ptp; else *sigma = 0; } kernlab/src/esa.h0000644000175100001440000001062112234152620013413 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ESA.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef ESA_H #define ESA_H #include "datatype.h" #include "errorcode.h" #include "lcp.h" #include "ctable.h" #include "ilcpfactory.h" #include "isafactory.h" #include #include //#define SLINK // #define SSARRAY // does not yeet work correctly, CW class ESA { private: int _verb; public: UInt32 size; //' The length of #text# SYMBOL *text; //' Text corresponds to SA #ifdef SSARRAY int *suftab; //' Suffix Array #else UInt32 *suftab; //' Suffix Array #endif LCP lcptab; //' LCP array ChildTable childtab; //' Child table (fields merged) UInt32 *suflink; //' Suffix link table. Two fields: l,r //' --- for bucket table --- UInt32 bcktab_depth; //' Number of char defining each bucket UInt32 bcktab_size; //' size of bucket table UInt32 *bcktab_val; //' value column of bucket table UInt32 *bcktab_key4; //' 4-bytes key column of Bucket table UInt32 *coef4; UInt32 hash_value4; UInt64 *bcktab_key8; //' 8-bytes key column of Bucket table UInt64 *coef8; UInt64 hash_value8; //' --- /// Constructors ESA(const UInt32 & size_, SYMBOL *text_, int verb=INFO); /// Destructor virtual ~ESA(); /// Construct child table ErrorCode ConstructChildTable(); /// Get suffix link interval ErrorCode GetSuflink(const UInt32 &i, const UInt32 &j, UInt32 &sl_i, UInt32 &sl_j); /// Find the suffix link ErrorCode FindSuflink(const UInt32 &parent_i, const UInt32 &parent_j, const UInt32 &child_i, const UInt32 &child_j, UInt32 &sl_i, UInt32 &sl_j); /// Construct suffix link table ErrorCode ConstructSuflink(); /// Construct bucket table ErrorCode ConstructBcktab(const UInt32 &alphabet_size=256); /// Get all non-singleton child-intervals ErrorCode GetChildIntervals(const UInt32 &lb, const UInt32 &rb, std::vector > &q); /// Get intervals by index ErrorCode GetIntervalByIndex(const UInt32 &parent_i, const UInt32 &parent_j, const UInt32 &start_idx, UInt32 &child_i, UInt32 &child_j); /// Get intervals by character ErrorCode GetIntervalByChar(const UInt32 &parent_i, const UInt32 &parent_j, const SYMBOL &start_ch, const UInt32 &depth, UInt32 &child_i, UInt32 &child_j); /// Get lcp value ErrorCode GetLcp(const UInt32 &i, const UInt32 &j, UInt32 &val); /// Compare pattern to text[suftab[idx]..length]. ErrorCode Compare(const UInt32 &idx, const UInt32 &depth, SYMBOL *pattern, const UInt32 &p_len, UInt32 &matched_len); /// Find longest substring of pattern in enhanced suffix array. ErrorCode Match(const UInt32 &i, const UInt32 &j, SYMBOL *pattern, const UInt32 p_len, UInt32 &lb, UInt32 &rb, UInt32 &matched_len); /// Similar to Match() but returns also floor interval of [lb..rb] ErrorCode ExactSuffixMatch(const UInt32 &i, const UInt32 &j, const UInt32 &offset, SYMBOL *pattern, const UInt32 p_len, UInt32 &lb, UInt32 &rb, UInt32 &matched_len, UInt32 &floor_lb, UInt32 &floor_rb, UInt32 &floor_len); }; #endif kernlab/src/stack.h0000644000175100001440000000623313333062601013754 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the MSufSort suffix sorting algorithm (Version 2.2). * * The Initial Developer of the Original Code is * Michael A. Maniscalco * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Michael A. Maniscalco * * ***** END LICENSE BLOCK ***** */ #ifndef MSUFSORT_STACK_H #define MSUFSORT_STACK_H //============================================================================================= // A quick and dirty stack class for use with the MSufSort algorithm // // Author: M.A. Maniscalco // Date: 7/30/04 // email: michael@www.michael-maniscalco.com // //============================================================================================= #include "memory.h" template class Stack { public: Stack(unsigned int initialSize, unsigned int maxExpandSize, bool preAllocate = false): m_initialSize(initialSize), m_maxExpandSize(maxExpandSize), m_preAllocate(preAllocate) { Initialize(); } virtual ~Stack(){SetSize(0);} void Push(T value); T & Pop(); T & Top(); void SetSize(unsigned int stackSize); void Initialize(); unsigned int Count(); void Clear(); T * m_stack; T * m_stackPtr; T * m_endOfStack; unsigned int m_stackSize; unsigned int m_initialSize; unsigned int m_maxExpandSize; bool m_preAllocate; }; template inline void Stack::Clear() { m_stackPtr = m_stack; } template inline unsigned int Stack::Count() { return (unsigned int)(m_stackPtr - m_stack); } template inline void Stack::Initialize() { m_stack = m_endOfStack = m_stackPtr = 0; m_stackSize = 0; if (m_preAllocate) SetSize(m_initialSize); } template inline void Stack::Push(T value) { if (m_stackPtr >= m_endOfStack) { unsigned int newSize = (m_stackSize < m_maxExpandSize) ? m_stackSize + m_maxExpandSize : (m_stackSize << 1); SetSize(newSize); } *(m_stackPtr++) = value; } template inline T & Stack::Pop() { return *(--m_stackPtr); } template inline T & Stack::Top() { return *(m_stackPtr - 1); } template inline void Stack::SetSize(unsigned int stackSize) { if (m_stackSize == stackSize) return; T * newStack = 0; if (stackSize) { newStack = new T[stackSize]; unsigned int bytesToCopy = (unsigned int)(m_stackPtr - m_stack) * (unsigned int)sizeof(T); if (bytesToCopy) memcpy((void *)newStack, m_stack, bytesToCopy); m_stackPtr = &newStack[m_stackPtr - m_stack]; m_endOfStack = &newStack[stackSize]; m_stackSize = stackSize; } if (m_stack) delete [] m_stack; m_stack = newStack; } #endif kernlab/src/errorcode.h0000644000175100001440000000374312234152620014636 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ErrorCode.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef _ERRORCODE_H_ #define _ERRORCODE_H_ #include "datatype.h" #include // Verbosity level enum verbosity {QUIET, INFO, DEBUG1}; #define ErrorCode UInt32 /** * for general use */ #define NOERROR 0 #define GENERAL_ERROR 1 #define MEM_ALLOC_FAILED 2 #define INVALID_PARAM 3 #define ARRAY_EMPTY 4 #define OPERATION_FAILED 5 /** * SuffixArray */ #define MATCH_NOT_FOUND 101 #define PARTIAL_MATCH 102 /** * LCP */ #define LCP_COMPACT_FAILED 201 #define CHECKERROR(i) { \ if((i) != NOERROR) { \ exit(EXIT_FAILURE); \ } \ } // #define MESSAGE(msg) { std::cout<<(msg)< * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/LCP.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 11 Oct 2006 #ifndef LCP_H #define LCP_H #include "datatype.h" #include "errorcode.h" #include #include #include #include #include /** * LCP array class */ class LCP { private: /// Compacted array /* std::vector _p_array; */ /* std::vector _idx_array; */ /* std::vector _val_array; */ Byte1 *_p_array; UInt32 *_idx_array; UInt32 *_val_array; UInt32 _size; bool _is_compact; UInt32 *_beg; UInt32 *_end; UInt32 *_cache; /* typedef std::vector::const_iterator const_itr; */ /* const_itr _beg; */ /* const_itr _end; */ /* const_itr _cache; */ UInt32 _dist; public: /// Original array - 4bytes //std::vector array; UInt32 *array; /// Constructors LCP(const UInt32 &size); /// Destructors virtual ~LCP(); /// Methods /// Compact 4n bytes array into (1n+8p) bytes arrays ErrorCode compact(void); /// Retrieve lcp array value // ErrorCode lcp(const UInt32 &idx, UInt32 &value); UInt32 operator[] (const UInt32& idx); friend std::ostream& operator << (std::ostream& os, LCP& lcp); }; #endif kernlab/src/dspcg.c0000644000175100001440000001631214221631215013741 0ustar hornikusers#include #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include extern void *xmalloc(size_t); extern double mymin(double, double); extern double mymax(double, double); /* LEVEL 1 BLAS */ /*extern double dnrm2_(int *, double *, int *);*/ /* LEVEL 2 BLAS */ /*extern int dsymv_(char *, int *, double *, double *, int *, double *, int *, double *, double *, int *);*/ /*extern void dtrsv_(char *, char *, char *, int *, double *, int *, double *, int *);*/ /* MINPACK 2 */ extern void dprsrch(int, double *, double *, double *, double *, double *, double *); extern double dprecond(int, double *, double *); extern void dtrpcg(int, double*, double *, double, double *, double, double, double *, int *, int *); void dspcg(int n, double *x, double *xl, double *xu, double *A, double *g, double delta, double rtol, double *s, int *info) { /* c ********* c c Subroutine dspcg c c This subroutine generates a sequence of approximate minimizers c for the subproblem c c min { q(x) : xl <= x <= xu }. c c The quadratic is defined by c c q(x[0]+s) = 0.5*s'*A*s + g'*s, c c where x[0] is a base point provided by the user, A is a symmetric c positive semidefinite dense matrix, and g is a vector. c c At each stage we have an approximate minimizer x[k], and generate c a direction p[k] by solving the subproblem c c min { q(x[k]+p) : || p || <= delta, s(fixed) = 0 }, c c where fixed is the set of variables fixed at x[k], delta is the c trust region bound. c c B = A(free:free), c c where free is the set of free variables at x[k]. Given p[k], c the next minimizer x[k+1] is generated by a projected search. c c The starting point for this subroutine is x[1] = x[0] + s, where c x[0] is a base point and s is the Cauchy step. c c The subroutine converges when the step s satisfies c c || (g + A*s)[free] || <= rtol*|| g[free] || c c In this case the final x is an approximate minimizer in the c face defined by the free variables. c c The subroutine terminates when the trust region bound does c not allow further progress, that is, || L'*p[k] || = delta. c In this case the final x satisfies q(x) < q(x[k]). c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is the final minimizer. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c A is a double precision array of dimension n*n. c On entry A specifies the matrix A. c On exit A is unchanged. c c g is a double precision array of dimension n. c On entry g must contain the vector g. c On exit g is unchanged. c c delta is a double precision variable. c On entry delta is the trust region size. c On exit delta is unchanged. c c rtol is a double precision variable. c On entry rtol specifies the accuracy of the final minimizer. c On exit rtol is unchanged. c c s is a double precision array of dimension n. c On entry s is the Cauchy step. c On exit s contain the final step. c c info is an integer variable. c On entry info need not be specified. c On exit info is set as follows: c c info = 1 Convergence. The final step s satisfies c || (g + A*s)[free] || <= rtol*|| g[free] ||, c and the final x is an approximate minimizer c in the face defined by the free variables. c c info = 2 Termination. The trust region bound does c not allow further progress. */ int i, j, nfaces, nfree, inc = 1, infotr, iters = 0, itertr; double gfnorm, gfnormf, stol = 1e-16, alpha; double one = 1, zero = 0; double *B = (double *) xmalloc(sizeof(double)*n*n); double *L = (double *) xmalloc(sizeof(double)*n*n); double *w = (double *) xmalloc(sizeof(double)*n); double *wa = (double *) xmalloc(sizeof(double)*n); double *wxl = (double *) xmalloc(sizeof(double)*n); double *wxu = (double *) xmalloc(sizeof(double)*n); int *indfree = (int *) xmalloc(sizeof(int)*n); double *gfree = (double *) xmalloc(sizeof(double)*n); /* Compute A*(x[1] - x[0]) and store in w. */ F77_CALL(dsymv)("U", &n, &one, A, &n, s, &inc, &zero, w, &inc FCONE); /* Compute the Cauchy point. */ for (j=0;j * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/I_WeightFactory.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef I_WEIGHTFACTORY_H #define I_WEIGHTFACTORY_H #include "datatype.h" #include "errorcode.h" /// Weight Factory interface for string kernel class I_WeightFactory { public: /// Constructor I_WeightFactory(){} /// Destructor virtual ~I_WeightFactory(){} /// Compute edge weight between floor interval and the end of matched substring. virtual ErrorCode ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight) = 0; }; #endif kernlab/src/isafactory.h0000644000175100001440000000306412234152620015012 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/I_SAFactory.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 //' Interface for Enhanced Suffix Array construction algorithms #ifndef I_SAFACTORY_H #define I_SAFACTORY_H #include "datatype.h" #include "errorcode.h" class I_SAFactory { public: ///Constructor I_SAFactory(){} ///Destructor virtual ~I_SAFactory(){} ///Methods virtual ErrorCode ConstructSA(SYMBOL *text, const UInt32 &len, UInt32 *&array) = 0; }; #endif kernlab/src/dtrpcg.c0000644000175100001440000001532714221631100014122 0ustar hornikusers#include #include #include #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include extern void *xmalloc(size_t); /* LEVEL 1 BLAS */ /* extern int daxpy_(int *, double *, double *, int *, double *, int *); */ /* extern double ddot_(int *, double *, int *, double *, int *); */ /* extern double dnrm2_(int *, double *, int *); */ /* extern int dscal_(int *, double *, double *, int *); */ /* LEVEL 2 BLAS */ /* extern int dtrsv_(char *, char *, char *, int *, double *, int *, double *, int *); */ /* extern int dsymv_(char *, int *, double *, double *, int *, double *, int *, double *, double *, int *); */ /* MINPACK 2 */ extern void dtrqsol(int, double *, double *, double , double *); void dtrpcg(int n, double *A, double *g, double delta, double *L, double tol, double stol, double *w, int *iters, int *info) { /* c ********* c c Subroutine dtrpcg c c Given a dense symmetric positive semidefinite matrix A, this c subroutine uses a preconditioned conjugate gradient method to find c an approximate minimizer of the trust region subproblem c c min { q(s) : || L'*s || <= delta }. c c where q is the quadratic c c q(s) = 0.5*s'*A*s + g'*s, c c This subroutine generates the conjugate gradient iterates for c the equivalent problem c c min { Q(w) : || w || <= delta }. c c where Q is the quadratic defined by c c Q(w) = q(s), w = L'*s. c c Termination occurs if the conjugate gradient iterates leave c the trust region, a negative curvature direction is generated, c or one of the following two convergence tests is satisfied. c c Convergence in the original variables: c c || grad q(s) || <= tol c c Convergence in the scaled variables: c c || grad Q(w) || <= stol c c Note that if w = L'*s, then L*grad Q(w) = grad q(s). c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c A is a double precision array of dimension n*n. c On entry A specifies the matrix A. c On exit A is unchanged. c c g is a double precision array of dimension n. c On entry g must contain the vector g. c On exit g is unchanged. c c delta is a double precision variable. c On entry delta is the trust region size. c On exit delta is unchanged. c c L is a double precision array of dimension n*n. c On entry L need not to be specified. c On exit the lower triangular part of L contains the matrix L. c c tol is a double precision variable. c On entry tol specifies the convergence test c in the un-scaled variables. c On exit tol is unchanged c c stol is a double precision variable. c On entry stol specifies the convergence test c in the scaled variables. c On exit stol is unchanged c c w is a double precision array of dimension n. c On entry w need not be specified. c On exit w contains the final conjugate gradient iterate. c c iters is an integer variable. c On entry iters need not be specified. c On exit iters is set to the number of conjugate c gradient iterations. c c info is an integer variable. c On entry info need not be specified. c On exit info is set as follows: c c info = 1 Convergence in the original variables. c || grad q(s) || <= tol c c info = 2 Convergence in the scaled variables. c || grad Q(w) || <= stol c c info = 3 Negative curvature direction generated. c In this case || w || = delta and a direction c c of negative curvature w can be recovered by c solving L'*w = p. c c info = 4 Conjugate gradient iterates exit the c trust region. In this case || w || = delta. c c info = 5 Failure to converge within itermax(n) iterations. c c ********** */ int i, inc = 1; double one = 1, zero = 0, alpha, malpha, beta, ptq, rho; double *p, *q, *t, *r, *z, sigma, rtr, rnorm, rnorm0, tnorm; p = (double *) xmalloc(sizeof(double)*n); q = (double *) xmalloc(sizeof(double)*n); t = (double *) xmalloc(sizeof(double)*n); r = (double *) xmalloc(sizeof(double)*n); z = (double *) xmalloc(sizeof(double)*n); /* Initialize the iterate w and the residual r. Initialize the residual t of grad q to -g. Initialize the residual r of grad Q by solving L*r = -g. Note that t = L*r. */ for (i=0;i 0) alpha = rho/ptq; else alpha = 0; dtrqsol(n, w, p, delta, &sigma); /* Exit if there is negative curvature or if the iterates exit the trust region. */ if (ptq <= 0 || alpha >= sigma) { F77_CALL(daxpy)(&n, &sigma, p, &inc, w, &inc); if (ptq <= 0) *info = 3; else *info = 4; goto return0; } /* Update w and the residuals r and t. Note that t = L*r. */ malpha = -alpha; F77_CALL(daxpy)(&n, &alpha, p, &inc, w, &inc); F77_CALL(daxpy)(&n, &malpha, q, &inc, r, &inc); F77_CALL(daxpy)(&n, &malpha, z, &inc, t,&inc); /* Exit if the residual convergence test is satisfied. */ rtr = F77_CALL(ddot)(&n, r, &inc, r, &inc); rnorm = sqrt(rtr); tnorm = sqrt(F77_CALL(ddot)(&n, t, &inc, t, &inc)); if (tnorm <= tol) { *info = 1; goto return0; } if (rnorm <= stol) { *info = 2; goto return0; } /* Compute p = r + beta*p and update rho. */ beta = rtr/rho; F77_CALL(dscal)(&n, &beta, p, &inc); F77_CALL(daxpy)(&n, &one, r, &inc, p, &inc); rho = rtr; } /* iters > itermax = n */ *info = 5; return0: free(p); free(q); free(r); free(t); free(z); } kernlab/src/inductionsort.h0000644000175100001440000000554312234152620015556 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the MSufSort suffix sorting algorithm (Version 2.2). * * The Initial Developer of the Original Code is * Michael A. Maniscalco * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Michael A. Maniscalco * * ***** END LICENSE BLOCK ***** */ #ifndef MSUFSORT_INDUCTION_SORTING_H #define MSUFSORT_INDUCTION_SORTING_H #include "introsort.h" class InductionSortObject { public: InductionSortObject(unsigned int inductionPosition = 0, unsigned int inductionValue = 0, unsigned int suffixIndex = 0); bool operator <= (InductionSortObject & object); bool operator == (InductionSortObject & object); InductionSortObject& operator = (InductionSortObject & object); bool operator >= (InductionSortObject & object); bool operator > (InductionSortObject & object); bool operator < (InductionSortObject & object); unsigned int m_sortValue[2]; }; inline bool InductionSortObject::operator <= (InductionSortObject & object) { if (m_sortValue[0] < object.m_sortValue[0]) return true; else if (m_sortValue[0] == object.m_sortValue[0]) return (m_sortValue[1] <= object.m_sortValue[1]); return false; } inline bool InductionSortObject::operator == (InductionSortObject & object) { return ((m_sortValue[0] == object.m_sortValue[0]) && (m_sortValue[1] == object.m_sortValue[1])); } inline bool InductionSortObject::operator >= (InductionSortObject & object) { if (m_sortValue[0] > object.m_sortValue[0]) return true; else if (m_sortValue[0] == object.m_sortValue[0]) return (m_sortValue[1] >= object.m_sortValue[1]); return false; } inline InductionSortObject & InductionSortObject::operator = (InductionSortObject & object) { m_sortValue[0] = object.m_sortValue[0]; m_sortValue[1] = object.m_sortValue[1]; return *this; } inline bool InductionSortObject::operator > (InductionSortObject & object) { if (m_sortValue[0] > object.m_sortValue[0]) return true; else if (m_sortValue[0] == object.m_sortValue[0]) return (m_sortValue[1] > object.m_sortValue[1]); return false; } inline bool InductionSortObject::operator < (InductionSortObject & object) { if (m_sortValue[0] < object.m_sortValue[0]) return true; else if (m_sortValue[0] == object.m_sortValue[0]) return (m_sortValue[1] < object.m_sortValue[1]); return false; } #endif kernlab/src/lcp.cpp0000644000175100001440000001271513561512465013775 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/LCP.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 11 Oct 2006 #ifndef LCP_CPP #define LCP_CPP #include "lcp.h" // Threshold for compacting LCP[] const Real THRESHOLD = 0.3; LCP::LCP(const UInt32 &size): _p_array(0), _idx_array(0), _val_array(0), _size(size), _is_compact(false), _beg(0), _end(0), _cache(0), _dist(0), array(new UInt32[size]){ } LCP::~LCP() { if(array) {delete [] array; array = 0;} if(_p_array) {delete [] _p_array; _p_array = 0;} if(_idx_array) {delete [] _idx_array; _idx_array = 0;} if(_val_array) {delete [] _val_array; _val_array = 0;} } /** * Compact initial/original lcp array of n elements (i.e. 4n bytes) * into a n byte array with 8 bytes of secondary storage. * */ ErrorCode LCP::compact(void){ // Validate pre-conditions //assert(!array.empty() && array.size() == _size); assert(array); // Already compact. Nothing to do if (_is_compact) return NOERROR; // Count number of lcp-values >= 255. // UInt32 idx_len = std::count_if(array.begin(), array.end(), // std::bind2nd(std::greater(),254)); #ifdef _RWSTD_NO_CLASS_PARTIAL_SPEC UInt32 idx_len = 0; std::count_if(array, array + _size, std::bind2nd(std::greater(),254), idx_len); #else UInt32 idx_len = std::count_if(array, array + _size, std::bind(std::greater(), std::placeholders::_1, 254)); #endif // Compact iff idx_len/|array| > THRESHOLD if((Real)idx_len/_size > THRESHOLD) { //std::cout<< "Not compacting " << std::endl; return NOERROR; } // std::cout<< "Compacting with : " << idx_len << std::endl; // We know how much space to use // _p_array.resize(_size); // _idx_array.resize(idx_len); // _val_array.resize(idx_len); _p_array = new Byte1[_size]; _idx_array = new UInt32[idx_len]; _val_array = new UInt32[idx_len]; // Hold pointers for later. Avoids function calls // _beg = _idx_array.begin(); // _end = _idx_array.end(); // _cache = _idx_array.begin(); _beg = _idx_array; _end = _idx_array + idx_len; _cache = _idx_array; _dist = 0; for(UInt32 i=0, j=0; i<_size; i++) { if(array[i] < 255){ _p_array[i] = array[i]; }else { _p_array[i] = 255; _idx_array[j] = i; _val_array[j] = array[i]; j++; } } //array.resize(0); // array.clear(); delete [] array; array = 0; _is_compact = true; return NOERROR; } /** * Retrieve lcp array values. * * \param idx - (IN) Index of lcp array */ UInt32 LCP::operator [] (const UInt32 &idx) { // input is valid? // assert (idx >= 0 && idx < _size); if(!_is_compact){ // LCP array has not been compacted yet! return array[idx]; } if(_p_array[idx] < 255){ // Found in primary index return (UInt32) _p_array[idx]; } // svnvish: BUGBUG // Do some caching here. // // Now search in secondary index as last resort // std::pair< const_itr, const_itr > p = equal_range(_beg, _end, idx); // return _val_array[std::distance(_beg, p.first)]; if (++_cache == _end){ _cache = _beg; _dist = 0; }else{ _dist++; } UInt32 c_idx = *(_cache); if (c_idx == idx){ return _val_array[_dist]; } // _cache = std::equal_range(_beg, _end, idx).first; _cache = std::lower_bound(_beg, _end, idx); #ifdef _RWSTD_NO_CLASS_PARTIAL_SPEC _dist = 0; std::distance(_beg, _cache, _dist); #else _dist = std::distance(_beg, _cache); #endif //std::cout << "here" << std::endl; // _cache = equal_range(_beg, _end, idx).first; // _dist = std::distance(_beg, _cache); return _val_array[_dist]; // if (c_idx > idx){ // _cache = equal_range(_beg, _cache, idx).first; // }else{ // _cache = equal_range(_cache, _end, idx).first; // } // //_cache = p.first; // _dist = std::distance(_beg, _cache); // return _val_array[_dist]; } /** * Dump array elements to output stream. * * \param os - (IN) Output stream * \param lcp - (IN) LCP object. */ std::ostream& operator << (std::ostream& os, LCP& lcp){ for( UInt32 i = 0; i < lcp._size; i++ ){ os << "lcp[ " << i << "]: " << lcp[i] << std::endl; } return os; } #endif kernlab/src/cweight.cpp0000644000175100001440000000427412234152620014637 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ConstantWeight.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 // 12 Oct 2006 #ifndef CWEIGHT_CPP #define CWEIGHT_CPP #include "cweight.h" #include /** * Constant weight function. Computes number of common substrings. Every * matched substring is of same weight (i.e. 1) * W(y,t) := tau - gamma * * \param floor_len - (IN) Length of floor interval of matched substring. * (cf. gamma in VisSmo02). * \param x_len - (IN) Length of the matched substring. * (cf. tau in visSmo02). * \param weight - (OUT) The weight value. * */ ErrorCode ConstantWeight::ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight) { //' Input validation assert(x_len >= floor_len); //' x_len == floor_len when the substring found ends on an interval. weight = (x_len - floor_len); // std::cout << "floor_len : " << floor_len // << " x_len : " << x_len // << " weight : " << weight << std::endl; return NOERROR; } #endif kernlab/src/expdecayweight.cpp0000644000175100001440000000557312234152620016222 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ExpDecayWeight.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 #ifndef EXPDECAYWEIGHT_CPP #define EXPDECAYWEIGHT_CPP #include #include #include "expdecayweight.h" using namespace std; /** * Exponential Decay weight function. * W(y,t) := (lambda^{-gamma} - lambda^{-tau}) / (lambda - 1) * * \param floor_len - (IN) Length of floor interval of matched substring. * (cf. gamma in VisSmo02). * \param x_len - (IN) Length of the matched substring. * (cf. tau in visSmo02). * \param weight - (OUT) The weight value. * */ ErrorCode ExpDecayWeight::ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight) // ErrorCode // ExpDecayWeight::ComputeWeight(const Real &floor_len, const Real &x_len, Real &weight) { //' Input validation assert(x_len >= floor_len); //' x_len == floor_len when the substring found ends on an interval. if(floor_len == x_len) { //' substring ended on an interval, so, get the val from val[] weight = 0.0; } else { //weight = (pow(-(floor_len-1), lambda) - pow(-x_len, lambda)) / (1-lambda); //weight = (pow(lambda,((Real)floor_len)) - pow(lambda, (Real)x_len+1)) / (1-lambda); // double a=floor_len*-1.0; // double b=x_len*-1.0; // weight = (pow(lambda,a) - pow(lambda, b)) / (lambda-1); weight = (pow(lambda,Real(-1.0*floor_len)) - pow(lambda, Real(-1.0*x_len))) / (lambda-1); } // std::cout << "floor_len : " << floor_len // << " x_len : " << x_len // << " pow1 : " << pow(lambda,-((Real)floor_len)) // << " pow2 : " << pow(lambda,-(Real)x_len) // << " weight : " << weight << std::endl; return NOERROR; } #endif kernlab/src/wkasailcp.h0000644000175100001440000000337712234152620014633 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/W_kasai_lcp.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef W_KASAI_LCP_H #define W_KASAI_LCP_H #include "datatype.h" #include "errorcode.h" #include "ilcpfactory.h" #include "lcp.h" /** * Kasai et al's LCP array computation algorithm is * is slightly faster than Manzini's algorithm. However, * it needs inverse suffix array which costs extra memory. */ class W_kasai_lcp : public I_LCPFactory { public: /// Constructor W_kasai_lcp(){} /// Desctructor virtual ~W_kasai_lcp(){} /// Compute LCP array. ErrorCode ComputeLCP(const SYMBOL *text, const UInt32 &len, const UInt32 *sa, LCP& lcp); }; #endif kernlab/src/init.c0000644000175100001440000000234413271617375013623 0ustar hornikusers#include #include #include // for NULL #include /* .Call calls */ extern SEXP fullsubstringk(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP smo_optim(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP stringtv(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP subsequencek(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP substringk(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP tron_optim(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); static const R_CallMethodDef CallEntries[] = { {"fullsubstringk", (DL_FUNC) &fullsubstringk, 6}, {"smo_optim", (DL_FUNC) &smo_optim, 23}, {"stringtv", (DL_FUNC) &stringtv, 7}, {"subsequencek", (DL_FUNC) &subsequencek, 6}, {"substringk", (DL_FUNC) &substringk, 6}, {"tron_optim", (DL_FUNC) &tron_optim, 27}, {NULL, NULL, 0} }; void R_init_kernlab(DllInfo *dll) { R_registerRoutines(dll, NULL, CallEntries, NULL, NULL); R_useDynamicSymbols(dll, FALSE); } kernlab/src/ilcpfactory.h0000644000175100001440000000304512234152620015164 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/I_LCPFactory.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef ILCPFACTORY_H #define ILCPFACTORY_H #include "datatype.h" #include "errorcode.h" #include "lcp.h" class I_LCPFactory { public: /// Constructor I_LCPFactory(){} /// Destructor virtual ~I_LCPFactory(){} /// Methods virtual ErrorCode ComputeLCP(const SYMBOL *text, const UInt32 &length, const UInt32 *sa, LCP& lcp) = 0; }; #endif kernlab/src/dgpnrm.c0000644000175100001440000000217211304023134014121 0ustar hornikusers#include double dgpnrm(int n, double *x, double *xl, double *xu, double *g) { /* c ********** c c Function dgpnrm c c This function computes the infinite norm of the c projected gradient at x. c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is unchanged. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c g is a double precision array of dimension n. c On entry g specifies the gradient g. c On exit g is unchanged. c c ********** */ int i; double norm = 0; for (i=0;i= 0 && x[i] == xl[i]))) if (fabs(g[i]) > norm) norm = fabs(g[i]); return norm; } kernlab/src/dbreakpt.c0000644000175100001440000000417111304023134014427 0ustar hornikusersextern double mymin(double, double); extern double mymax(double, double); void dbreakpt(int n, double *x, double *xl, double *xu, double *w, int *nbrpt, double *brptmin, double *brptmax) { /* c ********** c c Subroutine dbreakpt c c This subroutine computes the number of break-points, and c the minimal and maximal break-points of the projection of c x + alpha*w on the n-dimensional interval [xl,xu]. c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is unchanged. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c w is a double precision array of dimension n. c On entry w specifies the vector w. c On exit w is unchanged. c c nbrpt is an integer variable. c On entry nbrpt need not be specified. c On exit nbrpt is the number of break points. c c brptmin is a double precision variable c On entry brptmin need not be specified. c On exit brptmin is minimal break-point. c c brptmax is a double precision variable c On entry brptmax need not be specified. c On exit brptmax is maximal break-point. c c ********** */ int i; double brpt; *nbrpt = 0; for (i=0;i 0) { (*nbrpt)++; brpt = (xu[i] - x[i])/w[i]; if (*nbrpt == 1) *brptmin = *brptmax = brpt; else { *brptmin = mymin(brpt, *brptmin); *brptmax = mymax(brpt, *brptmax); } } else if (x[i] > xl[i] && w[i] < 0) { (*nbrpt)++; brpt = (xl[i] - x[i])/w[i]; if (*nbrpt == 1) *brptmin = *brptmax = brpt; else { *brptmin = mymin(brpt, *brptmin); *brptmax = mymax(brpt, *brptmax); } } if (*nbrpt == 0) *brptmin = *brptmax = 0; } kernlab/src/dcauchy.c0000644000175100001440000001172214221630254014263 0ustar hornikusers#include #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include extern void *xmalloc(size_t); /* LEVEL 1 BLAS */ /* extern double ddot_(int *, double *, int *, double *, int *); extern double dnrm2_(int *, double *, int *); */ /* LEVEL 2 BLAS */ /* extern int dsymv_(char *, int *, double *, double *, int *, double *, int *, double *, double *, int *); */ /* MINPACK 2 */ extern void dbreakpt(int, double *, double *, double *, double *, int *, double *, double *); extern void dgpstep(int, double *, double *, double *, double, double *, double *); void dcauchy(int n, double *x, double *xl, double *xu, double *A, double *g, double delta, double *alpha, double *s, double *wa) { /* c ********** c c Subroutine dcauchy c c This subroutine computes a Cauchy step that satisfies a trust c region constraint and a sufficient decrease condition. c c The Cauchy step is computed for the quadratic c c q(s) = 0.5*s'*A*s + g'*s, c c where A is a symmetric matrix , and g is a vector. Given a c parameter alpha, the Cauchy step is c c s[alpha] = P[x - alpha*g] - x, c c with P the projection onto the n-dimensional interval [xl,xu]. c The Cauchy step satisfies the trust region constraint and the c sufficient decrease condition c c || s || <= delta, q(s) <= mu_0*(g'*s), c c where mu_0 is a constant in (0,1). c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is unchanged. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c A is a double precision array of dimension n*n. c On entry A specifies the matrix A. c On exit A is unchanged. c c g is a double precision array of dimension n. c On entry g specifies the gradient g. c On exit g is unchanged. c c delta is a double precision variable. c On entry delta is the trust region size. c On exit delta is unchanged. c c alpha is a double precision variable. c On entry alpha is the current estimate of the step. c On exit alpha defines the Cauchy step s[alpha]. c c s is a double precision array of dimension n. c On entry s need not be specified. c On exit s is the Cauchy step s[alpha]. c c ********** */ double one = 1, zero = 0; /* Constant that defines sufficient decrease. Interpolation and extrapolation factors. */ double mu0 = 0.01, interpf = 0.1, extrapf = 10; int search, interp, nbrpt, nsteps = 1, i, inc = 1; double alphas, brptmax, brptmin, gts, q; /* FIXME KH 2019-11-09: double *wa = (double *) xmalloc(sizeof(double)*n); */ /* Find the minimal and maximal break-point on x - alpha*g. */ for (i=0;i delta) interp = 1; else { F77_CALL(dsymv)("U", &n, &one, A, &n, s, &inc, &zero, wa, &inc FCONE); gts = F77_CALL(ddot)(&n, g, &inc, s, &inc); q = 0.5*F77_CALL(ddot)(&n, s, &inc, wa, &inc) + gts; interp = q >= mu0*gts ? 1 : 0; } /* Either interpolate or extrapolate to find a successful step. */ if (interp) { /* Reduce alpha until a successful step is found. */ search = 1; while (search) { /* This is a crude interpolation procedure that will be replaced in future versions of the code. */ nsteps++; (*alpha) *= interpf; dgpstep(n, x, xl, xu, -(*alpha), g, s); if (F77_CALL(dnrm2)(&n, s, &inc) <= delta) { F77_CALL(dsymv)("U", &n, &one, A, &n, s, &inc, &zero, wa, &inc FCONE); gts = F77_CALL(ddot)(&n, g, &inc, s, &inc); q = 0.5 * F77_CALL(ddot)(&n, s, &inc, wa, &inc) + gts; search = q > mu0*gts ? 1 : 0; } } } else { search = 1; alphas = *alpha; /* Increase alpha until a successful step is found. */ while (search && (*alpha) <= brptmax) { /* This is a crude extrapolation procedure that will be replaced in future versions of the code. */ nsteps++; alphas = *alpha; (*alpha) *= extrapf; dgpstep(n, x, xl, xu, -(*alpha), g, s); if (F77_CALL(dnrm2)(&n, s, &inc) <= delta) { F77_CALL(dsymv)("U", &n, &one, A, &n, s, &inc, &zero, wa, &inc FCONE); gts = F77_CALL(ddot)(&n, g, &inc, s, &inc); q = 0.5 * F77_CALL(ddot)(&n, s, &inc, wa, &inc) + gts; search = q < mu0*gts ? 1 : 0; } else search = 0; } *alpha = alphas; dgpstep(n, x, xl, xu, -(*alpha), g, s); } /* FIXME KH 2019-11-09: free(wa); */ } kernlab/src/svm.cpp0000644000175100001440000025315612652176277014041 0ustar hornikusers#include #include #include #include #include #include #include #include #include #include #include "svm.h" typedef float Qfloat; typedef signed char schar; #ifndef min template inline T min(T x,T y) { return (x inline T max(T x,T y) { return (x>y)?x:y; } #endif template inline void swap(T& x, T& y) { T t=x; x=y; y=t; } template inline void clone(T*& dst, S* src, int n) { dst = new T[n]; memcpy((void *)dst,(void *)src,sizeof(T)*n); } inline double powi(double base, int times) { double tmp = base, ret = 1.0; for(int t=times; t>0; t/=2) { if(t%2==1) ret*=tmp; tmp = tmp * tmp; } return ret; } #define INF HUGE_VAL # define TAU 1e-12 #define Malloc(type,n) (type *)malloc((n)*sizeof(type)) #if 0 void info(char *fmt,...) { va_list ap; va_start(ap,fmt); //vprintf(fmt,ap); va_end(ap); } void info_flush() { fflush(stdout); } #else void info(char *fmt,...) {} void info_flush() {} #endif // // Kernel Cache // // l is the number of total data items // size is the cache size limit in bytes // class Cache { public: Cache(int l,long int size, int qpsize); ~Cache(); // request data [0,len) // return some position p where [p,len) need to be filled // (p >= len if nothing needs to be filled) int get_data(const int index, Qfloat **data, int len); void swap_index(int i, int j); // future_option private: int l; long int size; struct head_t { head_t *prev, *next; // a cicular list Qfloat *data; int len; // data[0,len) is cached in this entry }; head_t *head; head_t lru_head; void lru_delete(head_t *h); void lru_insert(head_t *h); }; Cache::Cache(int l_,long int size_,int qpsize):l(l_),size(size_) { head = (head_t *)calloc(l,sizeof(head_t)); // initialized to 0 size /= sizeof(Qfloat); size -= l * sizeof(head_t) / sizeof(Qfloat); size = max(size, (long int) qpsize*l); // cache must be large enough for 'qpsize' columns lru_head.next = lru_head.prev = &lru_head; } Cache::~Cache() { for(head_t *h = lru_head.next; h != &lru_head; h=h->next) free(h->data); free(head); } void Cache::lru_delete(head_t *h) { // delete from current location h->prev->next = h->next; h->next->prev = h->prev; } void Cache::lru_insert(head_t *h) { // insert to last position h->next = &lru_head; h->prev = lru_head.prev; h->prev->next = h; h->next->prev = h; } int Cache::get_data(const int index, Qfloat **data, int len) { head_t *h = &head[index]; if(h->len) lru_delete(h); int more = len - h->len; if(more > 0) { // free old space while(size < more) { head_t *old = lru_head.next; lru_delete(old); free(old->data); size += old->len; old->data = 0; old->len = 0; } // allocate new space h->data = (Qfloat *)realloc(h->data,sizeof(Qfloat)*len); size -= more; swap(h->len,len); } lru_insert(h); *data = h->data; return len; } void Cache::swap_index(int i, int j) { if(i==j) return; if(head[i].len) lru_delete(&head[i]); if(head[j].len) lru_delete(&head[j]); swap(head[i].data,head[j].data); swap(head[i].len,head[j].len); if(head[i].len) lru_insert(&head[i]); if(head[j].len) lru_insert(&head[j]); if(i>j) swap(i,j); for(head_t *h = lru_head.next; h!=&lru_head; h=h->next) { if(h->len > i) { if(h->len > j) swap(h->data[i],h->data[j]); else { // give up lru_delete(h); free(h->data); size += h->len; h->data = 0; h->len = 0; } } } } // // Kernel evaluation // // the static method k_function is for doing single kernel evaluation // the constructor of Kernel prepares to calculate the l*l kernel matrix // the member function get_Q is for getting one column from the Q Matrix // class QMatrix { public: virtual Qfloat *get_Q(int column, int len) const = 0; virtual double *get_QD() const = 0; virtual void swap_index(int i, int j) const = 0; virtual ~QMatrix() {} }; class Kernel: public QMatrix{ public: Kernel(int l, svm_node * const * x, const svm_parameter& param); virtual ~Kernel(); static double k_function(const svm_node *x, const svm_node *y, const svm_parameter& param); virtual Qfloat *get_Q(int column, int len) const = 0; virtual double *get_QD() const = 0; virtual void swap_index(int i, int j) const // no so const... { swap(x[i],x[j]); if(x_square) swap(x_square[i],x_square[j]); } protected: double (Kernel::*kernel_function)(int i, int j) const; private: const svm_node **x; double *x_square; // svm_parameter const int kernel_type; const int degree; const double gamma; const double coef0; const double lim; const double *K; const int m; static double dot(const svm_node *px, const svm_node *py); static double anova(const svm_node *px, const svm_node *py, const double sigma, const int degree); double kernel_linear(int i, int j) const { return dot(x[i],x[j]); } double kernel_poly(int i, int j) const { return powi(gamma*dot(x[i],x[j])+coef0,degree); } double kernel_rbf(int i, int j) const { return exp(-gamma*(x_square[i]+x_square[j]-2*dot(x[i],x[j]))); } double kernel_sigmoid(int i, int j) const { return tanh(gamma*dot(x[i],x[j])+coef0); } double kernel_laplace(int i, int j) const { return exp(-gamma*sqrt(fabs(x_square[i]+x_square[j]-2*dot(x[i],x[j])))); } double kernel_bessel(int i, int j) const { double bkt = gamma*sqrt(fabs(x_square[i]+x_square[j]-2*dot(x[i],x[j]))); if (bkt < 0.000001){ return 1 ; } else { return(powi(((jn((int)degree, bkt)/powi(bkt,((int)degree)))/lim),(int) coef0)); } } double kernel_anova(int i, int j) const { return anova(x[i], x[j], gamma, degree); } double kernel_spline(int i, int j) const { double result=1.0; double min; double t1,t4; const svm_node *px = x[i], *py= x[j]; // px = x[i]; // py = x[j]; while(px->index != -1 && py->index != -1) { if(px->index == py->index) { min=((px->valuevalue)?px->value:py->value); t1 = (px->value * py->value); t4 = min*min; result*=( 1.0 + t1 + (t1*min) ) - ( ((px->value+py->value)/2.0) * t4) + ((t4 * min)/3.0); } ++px; ++py; } return result; } double kernel_R(int i, int j) const { return *(K + m*i +j); } }; Kernel::Kernel(int l, svm_node * const * x_, const svm_parameter& param) :kernel_type(param.kernel_type), degree(param.degree), gamma(param.gamma), coef0(param.coef0), lim(param.lim), K(param.K), m(param.m) { switch(kernel_type) { case LINEAR: kernel_function = &Kernel::kernel_linear; break; case POLY: kernel_function = &Kernel::kernel_poly; break; case RBF: kernel_function = &Kernel::kernel_rbf; break; case SIGMOID: kernel_function = &Kernel::kernel_sigmoid; break; case LAPLACE: kernel_function = &Kernel::kernel_laplace; break; case BESSEL: kernel_function = &Kernel::kernel_bessel; break; case ANOVA: kernel_function = &Kernel::kernel_anova; break; case SPLINE: kernel_function = &Kernel::kernel_spline; break; case R: kernel_function = &Kernel::kernel_R; break; } clone(x,x_,l); if(kernel_type == RBF || kernel_type == LAPLACE || kernel_type == BESSEL) { x_square = new double[l]; for(int i=0;iindex != -1 && py->index != -1) { if(px->index == py->index) { sum += px->value * py->value; ++px; ++py; } else { if(px->index > py->index) ++py; else ++px; } } return sum; } double Kernel::anova(const svm_node *px, const svm_node *py, const double sigma, const int degree) { double sum = 0; double tv; while(px->index != -1 && py->index != -1) { if(px->index == py->index) { tv = (px->value - py->value) * (px->value - py->value); sum += exp( - sigma * tv); ++px; ++py; } else { if(px->index > py->index) { sum += exp( - sigma * (py->value * py->value)); ++py; } else { sum += exp( - sigma * (px->value * px->value)); ++px; } } } return (powi(sum,degree)); } double Kernel::k_function(const svm_node *x, const svm_node *y, const svm_parameter& param) { switch(param.kernel_type) { case LINEAR: return dot(x,y); case POLY: return powi(param.gamma*dot(x,y)+param.coef0,param.degree); case RBF: { double sum = 0; while(x->index != -1 && y->index !=-1) { if(x->index == y->index) { double d = x->value - y->value; sum += d*d; ++x; ++y; } else { if(x->index > y->index) { sum += y->value * y->value; ++y; } else { sum += x->value * x->value; ++x; } } } while(x->index != -1) { sum += x->value * x->value; ++x; } while(y->index != -1) { sum += y->value * y->value; ++y; } return exp(-param.gamma*sum); } case SIGMOID: return tanh(param.gamma*dot(x,y)+param.coef0); default: return 0; /* Unreachable */ } } // Generalized SMO+SVMlight algorithm // Solves: // // min 0.5(\alpha^T Q \alpha) + p^T \alpha // // y^T \alpha = \delta // y_i = +1 or -1 // 0 <= alpha_i <= Cp for y_i = 1 // 0 <= alpha_i <= Cn for y_i = -1 // // Given: // // Q, p, y, Cp, Cn, and an initial feasible point \alpha // l is the size of vectors and matrices // eps is the stopping criterion // // solution will be put in \alpha, objective value will be put in obj // class Solver { public: Solver() {}; virtual ~Solver() {}; struct SolutionInfo { double obj; double rho; double upper_bound_p; double upper_bound_n; double r; // for Solver_NU }; void Solve(int l, const QMatrix& Q, const double *p_, const schar *y_, double *alpha_, double Cp, double Cn, double eps, SolutionInfo* si, int shrinking); protected: int active_size; schar *y; double *G; // gradient of objective function enum { LOWER_BOUND, UPPER_BOUND, FREE }; char *alpha_status; // LOWER_BOUND, UPPER_BOUND, FREE double *alpha; const QMatrix *Q; const double *QD; double eps; double Cp,Cn; double *p; int *active_set; double *G_bar; // gradient, if we treat free variables as 0 int l; bool unshrink; // XXX double get_C(int i) { return (y[i] > 0)? Cp : Cn; } void update_alpha_status(int i) { if(alpha[i] >= get_C(i)) alpha_status[i] = UPPER_BOUND; else if(alpha[i] <= 0) alpha_status[i] = LOWER_BOUND; else alpha_status[i] = FREE; } bool is_upper_bound(int i) { return alpha_status[i] == UPPER_BOUND; } bool is_lower_bound(int i) { return alpha_status[i] == LOWER_BOUND; } bool is_free(int i) { return alpha_status[i] == FREE; } void swap_index(int i, int j); void reconstruct_gradient(); virtual int select_working_set(int &i, int &j); virtual double calculate_rho(); virtual void do_shrinking(); private: bool be_shrunk(int i, double Gmax1, double Gmax2); }; void Solver::swap_index(int i, int j) { Q->swap_index(i,j); swap(y[i],y[j]); swap(G[i],G[j]); swap(alpha_status[i],alpha_status[j]); swap(alpha[i],alpha[j]); swap(p[i],p[j]); swap(active_set[i],active_set[j]); swap(G_bar[i],G_bar[j]); } void Solver::reconstruct_gradient() { // reconstruct inactive elements of G from G_bar and free variables if(active_size == l) return; int i,j; int nr_free = 0; for(j=active_size;j 2*active_size*(l-active_size)) { for(i=active_size;iget_Q(i,active_size); for(j=0;jget_Q(i,l); double alpha_i = alpha[i]; for(j=active_size;jl = l; this->Q = &Q; QD=Q.get_QD(); clone(p, p_,l); clone(y, y_,l); clone(alpha,alpha_,l); this->Cp = Cp; this->Cn = Cn; this->eps = eps; unshrink = false; // initialize alpha_status { alpha_status = new char[l]; for(int i=0;iINT_MAX/100 ? INT_MAX : 100*l); int counter = min(l,1000)+1; while(iter < max_iter) { // show progress and do shrinking if(--counter == 0) { counter = min(l,1000); if(shrinking) do_shrinking(); } int i,j; if(select_working_set(i,j)!=0) { // reconstruct the whole gradient reconstruct_gradient(); // reset active set size and check active_size = l; if(select_working_set(i,j)!=0) break; else counter = 1; // do shrinking next iteration } ++iter; // update alpha[i] and alpha[j], handle bounds carefully const Qfloat *Q_i = Q.get_Q(i,active_size); const Qfloat *Q_j = Q.get_Q(j,active_size); double C_i = get_C(i); double C_j = get_C(j); double old_alpha_i = alpha[i]; double old_alpha_j = alpha[j]; if(y[i]!=y[j]) { double quad_coef = QD[i]+QD[j]+2*Q_i[j]; if (quad_coef <= 0) quad_coef = TAU; double delta = (-G[i]-G[j])/quad_coef; double diff = alpha[i] - alpha[j]; alpha[i] += delta; alpha[j] += delta; if(diff > 0) { if(alpha[j] < 0) { alpha[j] = 0; alpha[i] = diff; } } else { if(alpha[i] < 0) { alpha[i] = 0; alpha[j] = -diff; } } if(diff > C_i - C_j) { if(alpha[i] > C_i) { alpha[i] = C_i; alpha[j] = C_i - diff; } } else { if(alpha[j] > C_j) { alpha[j] = C_j; alpha[i] = C_j + diff; } } } else { double quad_coef = QD[i]+QD[j]-2*Q_i[j]; if (quad_coef <= 0) quad_coef = TAU; double delta = (G[i]-G[j])/quad_coef; double sum = alpha[i] + alpha[j]; alpha[i] -= delta; alpha[j] += delta; if(sum > C_i) { if(alpha[i] > C_i) { alpha[i] = C_i; alpha[j] = sum - C_i; } } else { if(alpha[j] < 0) { alpha[j] = 0; alpha[i] = sum; } } if(sum > C_j) { if(alpha[j] > C_j) { alpha[j] = C_j; alpha[i] = sum - C_j; } } else { if(alpha[i] < 0) { alpha[i] = 0; alpha[j] = sum; } } } // update G double delta_alpha_i = alpha[i] - old_alpha_i; double delta_alpha_j = alpha[j] - old_alpha_j; for(int k=0;k= max_iter) { if(active_size < l) { // reconstruct the whole gradient to calculate objective value reconstruct_gradient(); active_size = l; } } // calculate rho si->rho = calculate_rho(); // calculate objective value { double v = 0; int i; for(i=0;iobj = v/2; } // put back the solution { for(int i=0;iupper_bound_p = Cp; si->upper_bound_n = Cn; delete[] p; delete[] y; delete[] alpha; delete[] alpha_status; delete[] active_set; delete[] G; delete[] G_bar; } // return 1 if already optimal, return 0 otherwise int Solver::select_working_set(int &out_i, int &out_j) { // return i,j such that // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha) // j: minimizes the decrease of obj value // (if quadratic coefficeint <= 0, replace it with tau) // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha) double Gmax = -INF; double Gmax2 = -INF; int Gmax_idx = -1; int Gmin_idx = -1; double obj_diff_min = INF; for(int t=0;t= Gmax) { Gmax = -G[t]; Gmax_idx = t; } } else { if(!is_lower_bound(t)) if(G[t] >= Gmax) { Gmax = G[t]; Gmax_idx = t; } } int i = Gmax_idx; const Qfloat *Q_i = NULL; if(i != -1) // NULL Q_i not accessed: Gmax=-INF if i=-1 Q_i = Q->get_Q(i,active_size); for(int j=0;j= Gmax2) Gmax2 = G[j]; if (grad_diff > 0) { double obj_diff; double quad_coef = QD[i]+QD[j]-2.0*y[i]*Q_i[j]; if (quad_coef > 0) obj_diff = -(grad_diff*grad_diff)/quad_coef; else obj_diff = -(grad_diff*grad_diff)/TAU; if (obj_diff <= obj_diff_min) { Gmin_idx=j; obj_diff_min = obj_diff; } } } } else { if (!is_upper_bound(j)) { double grad_diff= Gmax-G[j]; if (-G[j] >= Gmax2) Gmax2 = -G[j]; if (grad_diff > 0) { double obj_diff; double quad_coef = QD[i]+QD[j]+2.0*y[i]*Q_i[j]; if (quad_coef > 0) obj_diff = -(grad_diff*grad_diff)/quad_coef; else obj_diff = -(grad_diff*grad_diff)/TAU; if (obj_diff <= obj_diff_min) { Gmin_idx=j; obj_diff_min = obj_diff; } } } } } if(Gmax+Gmax2 < eps) return 1; out_i = Gmax_idx; out_j = Gmin_idx; return 0; } bool Solver::be_shrunk(int i, double Gmax1, double Gmax2) { if(is_upper_bound(i)) { if(y[i]==+1) return(-G[i] > Gmax1); else return(-G[i] > Gmax2); } else if(is_lower_bound(i)) { if(y[i]==+1) return(G[i] > Gmax2); else return(G[i] > Gmax1); } else return(false); } void Solver::do_shrinking() { int i; double Gmax1 = -INF; // max { -y_i * grad(f)_i | i in I_up(\alpha) } double Gmax2 = -INF; // max { y_i * grad(f)_i | i in I_low(\alpha) } // find maximal violating pair first for(i=0;i= Gmax1) Gmax1 = -G[i]; } if(!is_lower_bound(i)) { if(G[i] >= Gmax2) Gmax2 = G[i]; } } else { if(!is_upper_bound(i)) { if(-G[i] >= Gmax2) Gmax2 = -G[i]; } if(!is_lower_bound(i)) { if(G[i] >= Gmax1) Gmax1 = G[i]; } } } if(unshrink == false && Gmax1 + Gmax2 <= eps*10) { unshrink = true; reconstruct_gradient(); active_size = l; } for(i=0;i i) { if (!be_shrunk(active_size, Gmax1, Gmax2)) { swap_index(i,active_size); break; } active_size--; } } } double Solver::calculate_rho() { double r; int nr_free = 0; double ub = INF, lb = -INF, sum_free = 0; for(int i=0;i0) r = sum_free/nr_free; else r = (ub+lb)/2; return r; } // // Solver for nu-svm classification and regression // // additional constraint: e^T \alpha = constant // class Solver_NU: public Solver { public: Solver_NU() {} void Solve(int l, const QMatrix& Q, const double *p, const schar *y, double *alpha, double Cp, double Cn, double eps, SolutionInfo* si, int shrinking) { this->si = si; Solver::Solve(l,Q,p,y,alpha,Cp,Cn,eps,si,shrinking); } private: SolutionInfo *si; int select_working_set(int &i, int &j); double calculate_rho(); bool be_shrunk(int i, double Gmax1, double Gmax2, double Gmax3, double Gmax4); void do_shrinking(); }; // return 1 if already optimal, return 0 otherwise int Solver_NU::select_working_set(int &out_i, int &out_j) { // return i,j such that y_i = y_j and // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha) // j: minimizes the decrease of obj value // (if quadratic coefficeint <= 0, replace it with tau) // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha) double Gmaxp = -INF; double Gmaxp2 = -INF; int Gmaxp_idx = -1; double Gmaxn = -INF; double Gmaxn2 = -INF; int Gmaxn_idx = -1; int Gmin_idx = -1; double obj_diff_min = INF; for(int t=0;t= Gmaxp) { Gmaxp = -G[t]; Gmaxp_idx = t; } } else { if(!is_lower_bound(t)) if(G[t] >= Gmaxn) { Gmaxn = G[t]; Gmaxn_idx = t; } } int ip = Gmaxp_idx; int in = Gmaxn_idx; const Qfloat *Q_ip = NULL; const Qfloat *Q_in = NULL; if(ip != -1) // NULL Q_ip not accessed: Gmaxp=-INF if ip=-1 Q_ip = Q->get_Q(ip,active_size); if(in != -1) Q_in = Q->get_Q(in,active_size); for(int j=0;j= Gmaxp2) Gmaxp2 = G[j]; if (grad_diff > 0) { double obj_diff; double quad_coef = QD[ip]+QD[j]-2*Q_ip[j]; if (quad_coef > 0) obj_diff = -(grad_diff*grad_diff)/quad_coef; else obj_diff = -(grad_diff*grad_diff)/TAU; if (obj_diff <= obj_diff_min) { Gmin_idx=j; obj_diff_min = obj_diff; } } } } else { if (!is_upper_bound(j)) { double grad_diff=Gmaxn-G[j]; if (-G[j] >= Gmaxn2) Gmaxn2 = -G[j]; if (grad_diff > 0) { double obj_diff; double quad_coef = QD[in]+QD[j]-2*Q_in[j]; if (quad_coef > 0) obj_diff = -(grad_diff*grad_diff)/quad_coef; else obj_diff = -(grad_diff*grad_diff)/TAU; if (obj_diff <= obj_diff_min) { Gmin_idx=j; obj_diff_min = obj_diff; } } } } } if(max(Gmaxp+Gmaxp2,Gmaxn+Gmaxn2) < eps) return 1; if (y[Gmin_idx] == +1) out_i = Gmaxp_idx; else out_i = Gmaxn_idx; out_j = Gmin_idx; return 0; } bool Solver_NU::be_shrunk(int i, double Gmax1, double Gmax2, double Gmax3, double Gmax4) { if(is_upper_bound(i)) { if(y[i]==+1) return(-G[i] > Gmax1); else return(-G[i] > Gmax4); } else if(is_lower_bound(i)) { if(y[i]==+1) return(G[i] > Gmax2); else return(G[i] > Gmax3); } else return(false); } void Solver_NU::do_shrinking() { double Gmax1 = -INF; // max { -y_i * grad(f)_i | y_i = +1, i in I_up(\alpha) } double Gmax2 = -INF; // max { y_i * grad(f)_i | y_i = +1, i in I_low(\alpha) } double Gmax3 = -INF; // max { -y_i * grad(f)_i | y_i = -1, i in I_up(\alpha) } double Gmax4 = -INF; // max { y_i * grad(f)_i | y_i = -1, i in I_low(\alpha) } // find maximal violating pair first int i; for(i=0;i Gmax1) Gmax1 = -G[i]; } else if(-G[i] > Gmax4) Gmax4 = -G[i]; } if(!is_lower_bound(i)) { if(y[i]==+1) { if(G[i] > Gmax2) Gmax2 = G[i]; } else if(G[i] > Gmax3) Gmax3 = G[i]; } } if(unshrink == false && max(Gmax1+Gmax2,Gmax3+Gmax4) <= eps*10) { unshrink = true; reconstruct_gradient(); active_size = l; } for(i=0;i i) { if (!be_shrunk(active_size, Gmax1, Gmax2, Gmax3, Gmax4)) { swap_index(i,active_size); break; } active_size--; } } } double Solver_NU::calculate_rho() { int nr_free1 = 0,nr_free2 = 0; double ub1 = INF, ub2 = INF; double lb1 = -INF, lb2 = -INF; double sum_free1 = 0, sum_free2 = 0; for(int i=0;i 0) r1 = sum_free1/nr_free1; else r1 = (ub1+lb1)/2; if(nr_free2 > 0) r2 = sum_free2/nr_free2; else r2 = (ub2+lb2)/2; si->r = (r1+r2)/2; return (r1-r2)/2; } /////////////////// BSVM code class Solver_SPOC { public: Solver_SPOC() {}; ~Solver_SPOC() {}; void Solve(int l, const Kernel& Q, double *alpha_, short *y_, double *C_, double eps, int shrinking, int nr_class); private: int active_size; double *G; // gradient of objective function short *y; bool *alpha_status; // free:true, bound:false double *alpha; const Kernel *Q; double eps; double *C; int *active_set; int l, nr_class; bool unshrinked; double get_C(int i, int m) { if (y[i] == m) return C[m]; return 0; } void update_alpha_status(int i, int m) { if(alpha[i*nr_class+m] >= get_C(i, m)) alpha_status[i*nr_class+m] = false; else alpha_status[i*nr_class+m] = true; } void swap_index(int i, int j); double select_working_set(int &q); void solve_sub_problem(double A, double *B, double C, double *nu); void reconstruct_gradient(); void do_shrinking(); }; void Solver_SPOC::swap_index(int i, int j) { Q->swap_index(i, j); swap(y[i], y[j]); swap(active_set[i], active_set[j]); for (int m=0;mget_Q(i,l); double alpha_i_m = alpha[i*nr_class+m]; for (int j=active_size;jl = l; this->nr_class = nr_class; this->Q = &Q; clone(y,y_,l); clone(alpha,alpha_,l*nr_class); C = C_; this->eps = eps; unshrinked = false; int i, m, q, old_q = -1; // initialize alpha_status { alpha_status = new bool[l*nr_class]; for(i=0;i 0) solve_sub_problem(A, B, C[y[q]], nu); else { i = 0; for (m=1;m B[i]) i = m; nu[i] = -C[y[q]]; } nu[y[q]] += C[y[q]]; for (m=0;m 1e-12) #endif { alpha[q*nr_class+m] = nu[m]; update_alpha_status(q, m); for (i=0;i 0) nSV++; } //info("\noptimization finished, #iter = %d, obj = %lf\n",iter, obj); // info("nSV = %d, nFREE = %d\n",nSV,nFREE); // put back the solution { for(int i=0;i vio_q) { q = i; vio_q = lb - ub; } } return vio_q; } void Solver_SPOC::do_shrinking() { int i, m; double Gm = select_working_set(i); if (Gm < eps) return; // shrink for (i=0;i= th) goto out; for (m++;m= th) goto out; --active_size; swap_index(i, active_size); --i; out: ; } // unshrink, check all variables again before final iterations if (unshrinked || Gm > 10*eps) return; unshrinked = true; reconstruct_gradient(); for (i=l-1;i>=active_size;i--) { double *G_i = &G[i*nr_class]; double th = G_i[y[i]] - Gm/2; for (m=0;m= th) goto out1; for (m++;m= th) goto out1; swap_index(i, active_size); ++active_size; ++i; out1: ; } } int compar(const void *a, const void *b) { if (*(double *)a > *(double *)b) return -1; else if (*(double *)a < *(double *)b) return 1; return 0; } void Solver_SPOC::solve_sub_problem(double A, double *B, double C, double *nu) { int r; double *D; clone(D, B, nr_class+1); qsort(D, nr_class, sizeof(double), compar); D[nr_class] = -INF; double phi = D[0] - A*C; for (r=0;phi<(r+1)*D[r+1];r++) phi += D[r+1]; delete[] D; phi /= (r+1); for (r=0;r 0)? Cp : Cn; } void update_alpha_status(int i) { if(alpha[i] >= get_C(i)) alpha_status[i] = UPPER_BOUND; else if(alpha[i] <= 0) alpha_status[i] = LOWER_BOUND; else alpha_status[i] = FREE; } bool is_upper_bound(int i) { return alpha_status[i] == UPPER_BOUND; } bool is_lower_bound(int i) { return alpha_status[i] == LOWER_BOUND; } bool is_free(int i) { return alpha_status[i] == FREE; } virtual void swap_index(int i, int j); virtual void reconstruct_gradient(); virtual void shrink_one(int k); virtual void unshrink_one(int k); double select_working_set(int &q); void do_shrinking(); private: double Cp, Cn; double *b; schar *y; }; void Solver_B::swap_index(int i, int j) { Q->swap_index(i,j); swap(y[i],y[j]); swap(G[i],G[j]); swap(alpha_status[i],alpha_status[j]); swap(alpha[i],alpha[j]); swap(b[i],b[j]); swap(active_set[i],active_set[j]); swap(G_bar[i],G_bar[j]); } void Solver_B::reconstruct_gradient() { // reconstruct inactive elements of G from G_bar and free variables if(active_size == l) return; int i; for(i=active_size;iget_Q(i,l); double alpha_i = alpha[i]; for(int j=active_size;jl = l; this->Q = &Q; b = b_; clone(y, y_, l); clone(alpha,alpha_,l); this->Cp = Cp; this->Cn = Cn; this->eps = eps; this->qpsize = qpsize; unshrinked = false; // initialize alpha_status { alpha_status = new char[l]; for(int i=0;i1e-12) { alpha[working_set[i]] = qp.x[i]; Qfloat *QB_i = QB[i]; for(j=0;jobj = v/2; } // juggle everything back /*{ for(int i=0;iupper_bound = new double[2]; si->upper_bound[0] = Cp; si->upper_bound[1] = Cn; // info("\noptimization finished, #iter = %d\n",iter); // put back the solution { for(int i=0;i= positive_max[j]) break; positive_max[j-1] = positive_max[j]; positive_set[j-1] = positive_set[j]; } positive_max[j-1] = v; positive_set[j-1] = i; } } for (i=0;i0) continue; } if (v > positive_max[0]) { for (j=1;j= -Gm) continue; } else continue; --active_size; shrink_one(k); --k; // look at the newcomer } // unshrink, check all variables again before final iterations if (unshrinked || Gm > eps*10) return; unshrinked = true; reconstruct_gradient(); for(k=l-1;k>=active_size;k--) { if (is_lower_bound(k)) { if (G[k] > Gm) continue; } else if (is_upper_bound(k)) { if (G[k] < -Gm) continue; } else continue; unshrink_one(k); active_size++; ++k; // look at the newcomer } } class Solver_B_linear : public Solver_B { public: Solver_B_linear() {}; ~Solver_B_linear() {}; int Solve(int l, svm_node * const * x_, double *b_, schar *y_, double *alpha_, double *w, double Cp, double Cn, double eps, SolutionInfo* si, int shrinking, int qpsize); private: double get_C(int i) { return (y[i] > 0)? Cp : Cn; } void swap_index(int i, int j); void reconstruct_gradient(); double dot(int i, int j); double Cp, Cn; double *b; schar *y; double *w; const svm_node **x; }; double Solver_B_linear::dot(int i, int j) { const svm_node *px = x[i], *py = x[j]; double sum = 0; while(px->index != -1 && py->index != -1) { if(px->index == py->index) { sum += px->value * py->value; ++px; ++py; } else { if(px->index > py->index) ++py; else ++px; } } return sum; } void Solver_B_linear::swap_index(int i, int j) { swap(y[i],y[j]); swap(G[i],G[j]); swap(alpha_status[i],alpha_status[j]); swap(alpha[i],alpha[j]); swap(b[i],b[j]); swap(active_set[i],active_set[j]); swap(x[i], x[j]); } void Solver_B_linear::reconstruct_gradient() { int i; for(i=active_size;iindex != -1;px++) sum += w[px->index]*px->value; sum += w[0]; G[i] = y[i]*sum + b[i]; } } int Solver_B_linear::Solve(int l, svm_node * const * x_, double *b_, schar *y_, double *alpha_, double *w, double Cp, double Cn, double eps, SolutionInfo* si, int shrinking, int qpsize) { this->l = l; clone(x, x_, l); clone(b, b_, l); clone(y, y_, l); clone(alpha,alpha_,l); this->Cp = Cp; this->Cn = Cn; this->eps = eps; this->qpsize = qpsize; this->w = w; unshrinked = false; // initialize alpha_status { alpha_status = new char[l]; for(int i=0;iindex != -1;px++) sum += w[px->index]*px->value; sum += w[0]; G[i] += y[i]*sum; } } // optimization step int iter = 0; int counter = min(l*2/qpsize,2000/qpsize)+1; while(1) { // show progress and do shrinking if(--counter == 0) { counter = min(l*2/qpsize, 2000/qpsize); if(shrinking) do_shrinking(); // info("."); } int i,j,q; if (select_working_set(q) < eps) { // reconstruct the whole gradient reconstruct_gradient(); // reset active set size and check active_size = l; // info("*");info_flush(); if (select_working_set(q) < eps) break; else counter = 1; // do shrinking next iteration } if (counter == min(l*2/qpsize, 2000/qpsize)) { bool same = true; for (i=0;i1e-12) { alpha[Bi] = qp.x[i]; update_alpha_status(Bi); double yalpha = y[Bi]*d; for (const svm_node *px = x[Bi];px->index != -1;px++) w[px->index] += yalpha*px->value; w[0] += yalpha; } } for(j=0;jindex != -1;px++) sum += w[px->index]*px->value; sum += w[0]; G[j] = y[j]*sum + b[j]; } } // calculate objective value { double v = 0; int i; for(i=0;iobj = v/2; } // juggle everything back /*{ for(int i=0;iupper_bound = new double[2]; si->upper_bound[0] = Cp; si->upper_bound[1] = Cn; // info("\noptimization finished, #iter = %d\n",iter); // put back the solution { for(int i=0;iget_Q(real_i[i],real_l); double alpha_i = alpha[i], t; int y_i = y[i], yy_i = yy[i], ub, k; t = 2*alpha_i; ub = start2[yy_i*nr_class+y_i+1]; for (j=start2[yy_i*nr_class+y_i];jl = l; this->nr_class = nr_class; this->real_l = l/(nr_class - 1); this->Q = &Q; this->lin = lin; clone(y,y_,l); clone(alpha,alpha_,l); C = C_; this->eps = eps; this->qpsize = qpsize; unshrinked = false; // initialize alpha_status { alpha_status = new char[l]; for(int i=0;i 1e-12) { alpha[Bi] = qp.x[i]; Qfloat *QB_i = QB[i]; int y_Bi = y[Bi], yy_Bi = yy[Bi], ub, k; double t = 2*d; ub = start1[yy_Bi*nr_class+y_Bi+1]; for (j=start1[yy_Bi*nr_class+y_Bi];jobj = v/4; } clone(si->upper_bound,C,nr_class); //info("\noptimization finished, #iter = %d\n",iter); // put back the solution { for(int i=0;i0;i--) swap_index(start2[i], start2[i-1]); t = s + 1; for (i=nr_class*nr_class;i>t;i--) swap_index(start1[i], start1[i-1]); t = nr_class*nr_class; for (i=s+1;i<=t;i++) start1[i]++; for (i=0;i<=s;i++) start2[i]++; } // // Q matrices for various formulations // class BSVC_Q: public Kernel { public: BSVC_Q(const svm_problem& prob, const svm_parameter& param, const schar *y_) :Kernel(prob.l, prob.x, param) { clone(y,y_,prob.l); cache = new Cache(prob.l,(int)(param.cache_size*(1<<20)),param.qpsize); QD = new double[1]; QD[0] = 1; } Qfloat *get_Q(int i, int len) const { Qfloat *data; int start; if((start = cache->get_data(i,&data,len)) < len) { for(int j=start;j*kernel_function)(i,j) + 1); } return data; } double *get_QD() const { return QD; } void swap_index(int i, int j) const { cache->swap_index(i,j); Kernel::swap_index(i,j); swap(y[i],y[j]); } ~BSVC_Q() { delete[] y; delete cache; delete[] QD; } private: schar *y; Cache *cache; double *QD; }; class BONE_CLASS_Q: public Kernel { public: BONE_CLASS_Q(const svm_problem& prob, const svm_parameter& param) :Kernel(prob.l, prob.x, param) { cache = new Cache(prob.l,(int)(param.cache_size*(1<<20)),param.qpsize); QD = new double[1]; QD[0] = 1; } Qfloat *get_Q(int i, int len) const { Qfloat *data; int start; if((start = cache->get_data(i,&data,len)) < len) { for(int j=start;j*kernel_function)(i,j) + 1; } return data; } double *get_QD() const { return QD; } ~BONE_CLASS_Q() { delete cache; delete[] QD; } private: Cache *cache; double *QD; }; class BSVR_Q: public Kernel { public: BSVR_Q(const svm_problem& prob, const svm_parameter& param) :Kernel(prob.l, prob.x, param) { l = prob.l; cache = new Cache(l,(int)(param.cache_size*(1<<20)),param.qpsize); QD = new double[1]; QD[0] = 1; sign = new schar[2*l]; index = new int[2*l]; for(int k=0;kget_data(real_i,&data,l) < l) { for(int j=0;j*kernel_function)(real_i,j) + 1; } // reorder and copy Qfloat *buf = buffer[next_buffer]; next_buffer = (next_buffer+1)%q; schar si = sign[i]; for(int j=0;j*kernel_function)(i,i); } Qfloat *get_Q(int i, int len) const { Qfloat *data; int start; if((start = cache->get_data(i,&data,len)) < len) { for(int j=start;j*kernel_function)(i,j)); } return data; } double *get_QD() const { return QD; } void swap_index(int i, int j) const { cache->swap_index(i,j); Kernel::swap_index(i,j); swap(y[i],y[j]); swap(QD[i],QD[j]); } ~SVC_Q() { delete[] y; delete cache; delete[] QD; } private: schar *y; Cache *cache; double *QD; }; class ONE_CLASS_Q: public Kernel { public: ONE_CLASS_Q(const svm_problem& prob, const svm_parameter& param) :Kernel(prob.l, prob.x, param) { cache = new Cache(prob.l,(long int)(param.cache_size*(1<<20)),param.qpsize); QD = new double[prob.l]; for(int i=0;i*kernel_function)(i,i); } Qfloat *get_Q(int i, int len) const { Qfloat *data; int start; if((start = cache->get_data(i,&data,len)) < len) { for(int j=start;j*kernel_function)(i,j); } return data; } double *get_QD() const { return QD; } void swap_index(int i, int j) const { cache->swap_index(i,j); Kernel::swap_index(i,j); swap(QD[i],QD[j]); } ~ONE_CLASS_Q() { delete cache; delete[] QD; } private: Cache *cache; double *QD; }; class SVR_Q: public Kernel { public: SVR_Q(const svm_problem& prob, const svm_parameter& param) :Kernel(prob.l, prob.x, param) { l = prob.l; cache = new Cache(l,(long int)(param.cache_size*(1<<20)),param.qpsize); QD = new double[2*l]; sign = new schar[2*l]; index = new int[2*l]; for(int k=0;k*kernel_function)(k,k); QD[k+l]=QD[k]; } buffer[0] = new Qfloat[2*l]; buffer[1] = new Qfloat[2*l]; next_buffer = 0; } void swap_index(int i, int j) const { swap(sign[i],sign[j]); swap(index[i],index[j]); swap(QD[i],QD[j]); } Qfloat *get_Q(int i, int len) const { Qfloat *data; int real_i = index[i]; if(cache->get_data(real_i,&data,l) < l) { for(int j=0;j*kernel_function)(real_i,j); } // reorder and copy Qfloat *buf = buffer[next_buffer]; next_buffer = 1 - next_buffer; schar si = sign[i]; for(int j=0;jsvm_type; if(svm_type != C_BSVC && svm_type != EPSILON_BSVR && svm_type != KBB && svm_type != SPOC) return "unknown svm type"; // kernel_type int kernel_type = param->kernel_type; if(kernel_type != LINEAR && kernel_type != POLY && kernel_type != RBF && kernel_type != SIGMOID && kernel_type != R && kernel_type != LAPLACE&& kernel_type != BESSEL&& kernel_type != ANOVA) return "unknown kernel type"; // cache_size,eps,C,nu,p,shrinking if(kernel_type != LINEAR) if(param->cache_size <= 0) return "cache_size <= 0"; if(param->eps <= 0) return "eps <= 0"; if(param->C <= 0) return "C <= 0"; if(svm_type == EPSILON_BSVR) if(param->p < 0) return "p < 0"; if(param->shrinking != 0 && param->shrinking != 1) return "shrinking != 0 and shrinking != 1"; if(svm_type == C_BSVC || svm_type == KBB || svm_type == SPOC) if(param->qpsize < 2) return "qpsize < 2"; if(kernel_type == LINEAR) if (param->Cbegin <= 0) return "Cbegin <= 0"; if(kernel_type == LINEAR) if (param->Cstep <= 1) return "Cstep <= 1"; return NULL; } const char *svm_check_parameter(const svm_problem *prob, const svm_parameter *param) { // svm_type int svm_type = param->svm_type; if(svm_type != C_SVC && svm_type != NU_SVC && svm_type != ONE_CLASS && svm_type != EPSILON_SVR && svm_type != NU_SVR) return "unknown svm type"; // kernel_type int kernel_type = param->kernel_type; if(kernel_type != LINEAR && kernel_type != POLY && kernel_type != RBF && kernel_type != SIGMOID && kernel_type != R && kernel_type != LAPLACE&& kernel_type != BESSEL&& kernel_type != ANOVA&& kernel_type != SPLINE) return "unknown kernel type"; // cache_size,eps,C,nu,p,shrinking if(param->cache_size <= 0) return "cache_size <= 0"; if(param->eps <= 0) return "eps <= 0"; if(svm_type == C_SVC || svm_type == EPSILON_SVR || svm_type == NU_SVR) if(param->C <= 0) return "C <= 0"; if(svm_type == NU_SVC || svm_type == ONE_CLASS || svm_type == NU_SVR) if(param->nu < 0 || param->nu > 1) return "nu < 0 or nu > 1"; if(svm_type == EPSILON_SVR) if(param->p < 0) return "p < 0"; if(param->shrinking != 0 && param->shrinking != 1) return "shrinking != 0 and shrinking != 1"; // check whether nu-svc is feasible if(svm_type == NU_SVC) { int l = prob->l; int max_nr_class = 16; int nr_class = 0; int *label = Malloc(int,max_nr_class); int *count = Malloc(int,max_nr_class); int i; for(i=0;iy[i]; int j; for(j=0;jnu*(n1+n2)/2 > min(n1,n2)) { free(label); free(count); return "specified nu is infeasible"; } } } } return NULL; } #include #include #include extern "C" { struct svm_node ** sparsify (double *x, int r, int c) { struct svm_node** sparse; int i, ii, count; sparse = (struct svm_node **) malloc (r * sizeof(struct svm_node *)); for (i = 0; i < r; i++) { /* determine nr. of non-zero elements */ for (count = ii = 0; ii < c; ii++) if (x[i * c + ii] != 0) count++; /* allocate memory for column elements */ sparse[i] = (struct svm_node *) malloc ((count + 1) * sizeof(struct svm_node)); /* set column elements */ for (count = ii = 0; ii < c; ii++) if (x[i * c + ii] != 0) { sparse[i][count].index = ii; sparse[i][count].value = x[i * c + ii]; count++; } /* set termination element */ sparse[i][count].index = -1; } return sparse; } struct svm_node ** transsparse (double *x, int r, int *rowindex, int *colindex) { struct svm_node** sparse; int i, ii, count = 0, nnz = 0; sparse = (struct svm_node **) malloc (r * sizeof(struct svm_node*)); for (i = 0; i < r; i++) { /* allocate memory for column elements */ nnz = rowindex[i+1] - rowindex[i]; sparse[i] = (struct svm_node *) malloc ((nnz + 1) * sizeof(struct svm_node)); /* set column elements */ for (ii = 0; ii < nnz; ii++) { sparse[i][ii].index = colindex[count]; sparse[i][ii].value = x[count]; count++; } /* set termination element */ sparse[i][ii].index = -1; } return sparse; } void tron_run(const svm_problem *prob, const svm_parameter* param, double *alpha, double *weighted_C, Solver_B::SolutionInfo* sii, int nr_class, int *count) { int l = prob->l; int i; double Cp = param->C; double Cn = param->C; if(param->nr_weight > 0) { Cp = param->C*param->weight[0]; Cn = param->C*param->weight[1]; } switch(param->svm_type) { case C_BSVC: { // double *alpha = new double[l]; double *minus_ones = new double[l]; schar *y = new schar[l]; for(i=0;iy[i] > 0) y[i] = +1; else y[i]=-1; } if (param->kernel_type == LINEAR) { double *w = new double[prob->n+1]; for (i=0;i<=prob->n;i++) w[i] = 0; Solver_B_linear s; int totaliter = 0; double Cpj = param->Cbegin, Cnj = param->Cbegin*Cn/Cp; while (Cpj < Cp) { totaliter += s.Solve(l, prob->x, minus_ones, y, alpha, w, Cpj, Cnj, param->eps, sii, param->shrinking, param->qpsize); if (Cpj*param->Cstep >= Cp) { for (i=0;i<=prob->n;i++) w[i] = 0; for (i=0;i= Cpj) alpha[i] = Cp; else if (y[i] == -1 && alpha[i] >= Cnj) alpha[i] = Cn; else alpha[i] *= Cp/Cpj; double yalpha = y[i]*alpha[i]; for (const svm_node *px = prob->x[i];px->index != -1;px++) w[px->index] += yalpha*px->value; w[0] += yalpha; } } else { for (i=0;iCstep; for (i=0;i<=prob->n;i++) w[i] *= param->Cstep; } Cpj *= param->Cstep; Cnj *= param->Cstep; } totaliter += s.Solve(l, prob->x, minus_ones, y, alpha, w, Cp, Cn, param->eps, sii, param->shrinking, param->qpsize); //info("\noptimization finished, #iter = %d\n",totaliter); delete[] w; } else { Solver_B s; s.Solve(l, BSVC_Q(*prob,*param,y), minus_ones, y, alpha, Cp, Cn, param->eps, sii, param->shrinking, param->qpsize); } // double sum_alpha=0; // for(i=0;iC*prob->l)); // for(i=0;ip - prob->y[i]; y[i] = 1; alpha2[i+l] = 0; linear_term[i+l] = param->p + prob->y[i]; y[i+l] = -1; } if (param->kernel_type == LINEAR) { double *w = new double[prob->n+1]; for (i=0;i<=prob->n;i++) w[i] = 0; struct svm_node **x = new svm_node*[2*l]; for (i=0;ix[i]; Solver_B_linear s; int totaliter = 0; double Cj = param->Cbegin; while (Cj < param->C) { totaliter += s.Solve(2*l, x, linear_term, y, alpha, w, Cj, Cj, param->eps, sii, param->shrinking, param->qpsize); if (Cj*param->Cstep >= param->C) { for (i=0;i<=prob->n;i++) w[i] = 0; for (i=0;i<2*l;i++) { if (alpha[i] >= Cj) alpha[i] = param->C; else alpha[i] *= param->C/Cj; double yalpha = y[i]*alpha[i]; for (const svm_node *px = x[i];px->index != -1;px++) w[px->index] += yalpha*px->value; w[0] += yalpha; } } else { for (i=0;i<2*l;i++) alpha[i] *= param->Cstep; for (i=0;i<=prob->n;i++) w[i] *= param->Cstep; } Cj *= param->Cstep; } totaliter += s.Solve(2*l, x, linear_term, y, alpha2, w, param->C, param->C, param->eps, sii, param->shrinking, param->qpsize); //info("\noptimization finished, #iter = %d\n",totaliter); } else { Solver_B s; s.Solve(2*l, BSVR_Q(*prob,*param), linear_term, y, alpha2, param->C, param->C, param->eps, sii, param->shrinking, param->qpsize); } double sum_alpha = 0; for(i=0;iC*l)); delete[] y; delete[] alpha2; delete[] linear_term; } break; case KBB: { Solver_B::SolutionInfo si; int i=0 , j=0 ,k=0 , ll = l*(nr_class - 1); double *alpha2 = Malloc(double, ll); short *y = new short[ll]; for (i=0;iy[q]; else q += count[j]; } Solver_MB s; s.Solve(ll, BONE_CLASS_Q(*prob,*param), -2, alpha2, y, weighted_C, 2*param->eps, &si, param->shrinking, param->qpsize, nr_class, count); //info("obj = %f, rho = %f\n",si.obj,0.0); int *start = Malloc(int,nr_class); start[0] = 0; for(i=1;iy[i]; } Solver_SPOC s; s.Solve(l, ONE_CLASS_Q(*prob, *param), alpha, y, weighted_C, param->eps, param->shrinking, nr_class); free(weighted_C); delete[] y; } break; } } SEXP tron_optim(SEXP x, SEXP r, SEXP c, SEXP y, SEXP K, SEXP colindex, SEXP rowindex, SEXP sparse, SEXP nclass, SEXP countc, SEXP kernel_type, SEXP svm_type, SEXP cost, SEXP eps, SEXP gamma, SEXP degree, SEXP coef0, SEXP Cbegin, SEXP Cstep, SEXP weightlabels, SEXP weights, SEXP nweights, SEXP weightedc, SEXP cache, SEXP epsilon, SEXP qpsize, SEXP shrinking ) { struct svm_parameter param; struct svm_problem prob; int i ,*count = NULL; double *alpha2 = NULL; SEXP alpha3 = NULL; int nr_class; const char* s; struct Solver_B::SolutionInfo si; param.svm_type = *INTEGER(svm_type); param.kernel_type = *INTEGER(kernel_type); param.degree = *INTEGER(degree); param.gamma = *REAL(gamma); param.coef0 = *REAL(coef0); param.cache_size = *REAL(cache); param.eps = *REAL(epsilon); param.C = *REAL(cost); param.Cbegin = *REAL(Cbegin); param.Cstep = *REAL(Cstep); param.K = REAL(K); param.qpsize = *INTEGER(qpsize); nr_class = *INTEGER(nclass); param.nr_weight = *INTEGER(nweights); if (param.nr_weight > 0) { param.weight = (double *) malloc (sizeof(double) * param.nr_weight); memcpy (param.weight, REAL(weights), param.nr_weight * sizeof(double)); param.weight_label = (int *) malloc (sizeof(int) * param.nr_weight); memcpy (param.weight_label, INTEGER(weightlabels), param.nr_weight * sizeof(int)); } param.p = *REAL(eps); param.shrinking = *INTEGER(shrinking); param.lim = 1/(gammafn(param.degree+1)*powi(2,param.degree)); /* set problem */ prob.l = *INTEGER(r); prob.n = *INTEGER(c); prob.y = (double *) malloc (sizeof(double) * prob.l); memcpy(prob.y, REAL(y), prob.l*sizeof(double)); if (*INTEGER(sparse) > 0) prob.x = transsparse(REAL(x), *INTEGER(r), INTEGER(rowindex), INTEGER(colindex)); else prob.x = sparsify(REAL(x), *INTEGER(r), *INTEGER(c)); s = svm_check_parameterb(&prob, ¶m); //if (s) //printf("%s",s); //else { double *weighted_C = Malloc(double, nr_class); memcpy(weighted_C, REAL(weightedc), nr_class*sizeof(double)); if(param.svm_type == 7) { alpha2 = (double *) malloc (sizeof(double) * prob.l*nr_class); } if(param.svm_type == 8) { count = Malloc(int, nr_class); memcpy(count, INTEGER(countc), nr_class*sizeof(int)); alpha2 = (double *) malloc (sizeof(double) * prob.l*(nr_class-1)); } if(param.svm_type == 5||param.svm_type==6) { alpha2 = (double *) malloc (sizeof(double) * prob.l); } tron_run(&prob, ¶m, alpha2, weighted_C , &si, nr_class, count); //} /* clean up memory */ if (param.nr_weight > 0) { free(param.weight); free(param.weight_label); } if(param.svm_type == 7) { PROTECT(alpha3 = allocVector(REALSXP, (nr_class*prob.l + 1))); UNPROTECT(1); for (i = 0; i < prob.l; i++) free (prob.x[i]); for (i = 0; i l; int i; switch(param->svm_type) { case C_SVC: { double Cp,Cn; double *minus_ones = new double[l]; schar *y = new schar[l]; for(i=0;iy[i] > 0) y[i] = +1; else y[i]=-1; } if(param->nr_weight > 0) { Cp = C*param->weight[0]; Cn = C*param->weight[1]; } else Cp = Cn = C; Solver s; //have to weight cost parameter for multiclass. problems s.Solve(l, SVC_Q(*prob,*param,y), minus_ones, y, alpha, Cp, Cn, param->eps, si, param->shrinking); delete[] minus_ones; delete[] y; } break; case NU_SVC: { schar *y = new schar[l]; double nu = param->nu; double sum_pos = nu*l/2; double sum_neg = nu*l/2; for(i=0;iy[i]>0) { y[i] = +1; alpha[i] = min(1.0,sum_pos); sum_pos -= alpha[i]; } else { y[i] = -1; alpha[i] = min(1.0,sum_neg); sum_neg -= alpha[i]; } double *zeros = new double[l]; for(i=0;ieps, si, param->shrinking); double r = si->r; //info("C = %f\n",1/r); for(i=0;irho /= r; si->obj /= (r*r); si->upper_bound_p = 1/r; si->upper_bound_n = 1/r; delete[] y; delete[] zeros; } break; case ONE_CLASS: { double *zeros = new double[l]; schar *ones = new schar[l]; int n = (int)(param->nu*l); // # of alpha's at upper bound // set initial alpha probably usefull for smo for(i=0;inu * l - n; for(i=n+1;ieps, si, param->shrinking); delete[] zeros; delete[] ones; } break; case EPSILON_SVR: { double *alpha2 = new double[2*l]; double *linear_term = new double[2*l]; schar *y = new schar[2*l]; for(i=0;ip - prob->y[i]; y[i] = 1; alpha2[i+l] = 0; linear_term[i+l] = param->p + prob->y[i]; y[i+l] = -1; } Solver s; s.Solve(2*l, SVR_Q(*prob,*param), linear_term, y, alpha2, param->C, param->C, param->eps, si, param->shrinking); double sum_alpha = 0; for(i=0;iC*l)); delete[] alpha2; delete[] linear_term; delete[] y; } break; case NU_SVR: { double C = param->C; double *alpha2 = new double[2*l]; double *linear_term = new double[2*l]; schar *y = new schar[2*l]; double sum = C * param->nu * l / 2; for(i=0;iy[i]; y[i] = 1; linear_term[i+l] = prob->y[i]; y[i+l] = -1; } Solver_NU s; s.Solve(2*l, SVR_Q(*prob,*param), linear_term, y, alpha2, C, C, param->eps, si, param->shrinking); //info("epsilon = %f\n",-si->r); for(i=0;i 0) { param.weight = (double *) malloc (sizeof(double) * param.nr_weight); memcpy (param.weight, REAL(weights), param.nr_weight * sizeof(double)); param.weight_label = (int *) malloc (sizeof(int) * param.nr_weight); memcpy (param.weight_label, INTEGER(weightlabels), param.nr_weight * sizeof(int)); } param.p = *REAL(eps); param.shrinking = *INTEGER(shrinking); param.lim = 1/(gammafn(param.degree+1)*powi(2,param.degree)); /* set problem */ prob.l = *INTEGER(r); prob.y = REAL(y); prob.n = *INTEGER(c); if (*INTEGER(sparse) > 0) prob.x = transsparse(REAL(x), *INTEGER(r), INTEGER(rowindex), INTEGER(colindex)); else prob.x = sparsify(REAL(x), *INTEGER(r), *INTEGER(c)); double *alpha2 = (double *) malloc (sizeof(double) * prob.l); s = svm_check_parameter(&prob, ¶m); //if (s) { //printf("%s",s); //} //else { solve_smo(&prob, ¶m, alpha2, &si, *REAL(cost), REAL(linear_term)); //} PROTECT(alpha = allocVector(REALSXP, prob.l+2)); /* clean up memory */ if (param.nr_weight > 0) { free(param.weight); free(param.weight_label); } for (i = 0; i < prob.l; i++) {free (prob.x[i]); REAL(alpha)[i] = *(alpha2+i); } free (prob.x); REAL(alpha)[prob.l] = si.rho; REAL(alpha)[prob.l+1] = si.obj; free(alpha2); UNPROTECT(1); return alpha; } } kernlab/src/brweight.cpp0000644000175100001440000000435112233654617015030 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/BoundedRangeWeight.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 #ifndef BRWEIGHT_CPP #define BRWEIGHT_CPP #include "brweight.h" #include #define MIN(x,y) (((x) < (y)) ? (x) : (y)) #define MAX(x,y) (((x) > (y)) ? (x) : (y)) /** * Bounded Range weight function. * W(y,t) := max(0,min(tau,n)-gamma) * * \param floor_len - (IN) Length of floor interval of matched substring. * (cf. gamma in VisSmo02). * \param x_len - (IN) Length of the matched substring. * (cf. tau in visSmo02). * \param weight - (OUT) The weight value. * */ ErrorCode BoundedRangeWeight::ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight) { //' Input validation assert(x_len >= floor_len); //' x_len == floor_len when the substring found ends on an interval. Real tau = (Real)x_len; Real gamma = (Real)floor_len; weight = MAX(0,MIN(tau,n)-gamma); // std::cout << "floor_len:"< * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ConstantWeight.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 // 12 Oct 2006 #ifndef CWEIGHT_H #define CWEIGHT_H #include "datatype.h" #include "errorcode.h" #include "iweightfactory.h" #include //' Constant weight class class ConstantWeight : public I_WeightFactory { public: /// Constructor ConstantWeight(){} /// Destructor virtual ~ConstantWeight(){} /// Compute weight ErrorCode ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight); }; #endif kernlab/src/ctable.h0000644000175100001440000000430312234152620014075 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ChildTable.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef CTABLE_H #define CTABLE_H #include #include #include "datatype.h" #include "errorcode.h" #include "lcp.h" // using namespace std; /** * ChildTable represents the parent-child relationship between * the lcp-intervals of suffix array. * Reference: AboKurOhl04 */ class ChildTable : public std::vector { private: // childtab needs lcptab to differentiate between up, down, and // nextlIndex values. LCP& _lcptab; public: // Constructors ChildTable(const UInt32 &size, LCP& lcptab): std::vector(size), _lcptab(lcptab){} // Destructor virtual ~ChildTable() {} // Get first l-index of an l-[i..j] interval ErrorCode l_idx(const UInt32 &i, const UInt32 &j, UInt32 &idx); // .up field ErrorCode up(const UInt32 &idx, UInt32 &val); // .down field ErrorCode down(const UInt32 &idx, UInt32 &val); // .next field can be retrieved by accessing the array directly. friend std::ostream& operator << (std::ostream& os, const ChildTable& ct); }; #endif kernlab/src/esa.cpp0000644000175100001440000007370512761213650013770 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ESA.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 11 Oct 2006 #ifndef ESA_CPP #define ESA_CPP #include #include #include #include #include #include #include #include #include "esa.h" #ifdef SSARRAY #ifdef __cplusplus extern "C" { #endif #include "sarray.h" #ifdef __cplusplus } #endif #else #include "wmsufsort.h" #endif #include "wkasailcp.h" #define MIN(x,y) (((x) < (y)) ? (x):(y)) ESA::ESA(const UInt32 & size_, SYMBOL *text_, int verb): _verb(verb), size(size_), text(text_), suftab(0), lcptab(size_), childtab(size_, lcptab), suflink(0), bcktab_depth(0), bcktab_size(0), bcktab_val(0), bcktab_key4(0), coef4(0), bcktab_key8(0), coef8(0) { I_SAFactory* sa_fac = 0; I_LCPFactory* lcp_fac = 0; //' input validation assert(size > 0); // if(text[size-1] != SENTINEL) // text = (SYMBOL*)(std::string(text)+SENTINEL).c_str(); assert(text[size-1] == SENTINEL); // CW Sanity test for (unsigned int i = 0; i < size-1 ; i++) { assert(text[i] != 0); } // for (int i = 0; i < size ; i++) { // printf("%c : %i\n", text[i], (int) text[i]); // } #if SSARRAY suftab = new int[size]; for (unsigned int i = 0; i < size - 1 ; i++) { suftab[i] = text[i]; } suftab[size-1] = 0; ssarray((int*) suftab); #else //' Construct Suffix Array if(!sa_fac){ sa_fac = new W_msufsort(); } // CW Try // size = 10; // text[size-1] = 0; suftab = new UInt32[size]; sa_fac->ConstructSA(text, size, suftab); if(sa_fac) { delete sa_fac; sa_fac = NULL; } #endif //' Compute LCP array if(!lcp_fac){ lcp_fac = new W_kasai_lcp(); } // CW lcp_fac->ComputeLCP(text, size, suftab, lcptab); lcp_fac->ComputeLCP(text, size, (UInt32 *) suftab, lcptab); if(lcp_fac) { delete lcp_fac; lcp_fac = NULL; } //' Compress LCP array lcptab.compact(); //' Construct Child Table ConstructChildTable(); #ifdef SLINK //' Construct Suffix link table //' The suffix link interval, (l-1)-[p..q] of interval l-[i..j] can be retrieved //' by following method: //' Let k be the firstlIndex of l-[i..j], p = suflink[2*k], q = suflink[2*k+1]. suflink = new UInt32[2 * size + 2]; //' extra space for extra sentinel char! memset(suflink,0,sizeof(UInt32)*(2 * size +2)); ConstructSuflink(); #else //' Threshold for constructing bucket table if(size >= 1024) ConstructBcktab(); //' Otherwise, just do plain binary search to search for suffix link interval #endif } ESA::~ESA() { //if(text) { delete text; text = 0;} if(suflink) { delete [] suflink; suflink=0; } if(suftab) { delete [] suftab; suftab=0; } if(bcktab_val) { delete [] bcktab_val; bcktab_val=0; } if(bcktab_key4) { delete [] bcktab_key4; bcktab_key4=0;} if(coef4) { delete [] coef4; coef4 = 0; } if(bcktab_key8) { delete [] bcktab_key8; bcktab_key8=0;} if(coef8) { delete [] coef8; coef8 = 0; } } /// The lcp-interval structure. Used in ESA::ConstructChildTable() class lcp_interval { public: UInt32 lcp; UInt32 lb; UInt32 rb; std::vector child; /// Constructors lcp_interval(){} lcp_interval(const UInt32 &lcp_, const UInt32 lb_, const UInt32 &rb_, lcp_interval *itv) { lcp = lcp_; lb = lb_; rb = rb_; if(itv) child.push_back(itv); } /// Destructor ~lcp_interval(){ for(UInt32 i=0; i< child.size(); i++) delete child[i]; child.clear(); } }; /** * Construct 3-fields-merged child table. */ ErrorCode ESA::ConstructChildTable(){ // Input validation assert(text); assert(suftab); //' stack for lcp-intervals std::stack lit; //' Refer to: Abo05::Algorithm 4.5.2. lcp_interval *lastInterval = 0; lcp_interval *new_itv = 0; lit.push(new lcp_interval(0, 0, 0, 0)); //' root interval // Variables to handle 0-idx bool first = true; UInt32 prev_0idx = 0; UInt32 first0idx = 0; // Loop thru and process each index. for(UInt32 idx = 1; idx < size + 1; idx++) { UInt32 tmp_lb = idx - 1; //svnvish: BUGBUG // We just assume that the lcp of size + 1 is zero. // This simplifies the logic of the code UInt32 lcp_idx = 0; if(idx < size){ lcp_idx = lcptab[idx]; } while (lcp_idx < lit.top()->lcp){ lastInterval = lit.top(); lit.pop(); lastInterval->rb = idx - 1; // svnvish: Begin process UInt32 n_child = lastInterval->child.size(); UInt32 i = lastInterval->lb; UInt32 j = lastInterval->rb; // idx -1 ? //Step 1: Set childtab[i].down or childtab[j+1].up to first l-index UInt32 first_l_index = i+1; if(n_child && (lastInterval->child[0]->lb == i)) first_l_index = lastInterval->child[0]->rb+1; //svnvish: BUGBUG // ec = childtab.Set_Up(lastInterval->rb+1, first_l_index); // ec = childtab.Set_Down(lastInterval->lb, first_l_index); childtab[lastInterval->rb] = first_l_index; childtab[lastInterval->lb] = first_l_index; // Now we need to set the NextlIndex fields The main problem here // is that the child intervals might not be contiguous UInt32 ptr = i+1; UInt32 child_count = 0; while(ptr < j){ UInt32 first = j; UInt32 last = j; // Get next child to process if(n_child - child_count){ first = lastInterval->child[child_count]->lb; last = lastInterval->child[child_count]->rb; child_count++; } // Eat away singleton intervals while(ptr < first){ childtab[ptr] = ptr + 1; ptr++; } // Handle an child interval and make appropriate entries in // child table ptr = last + 1; if(last < j){ childtab[first] = ptr; } } //' Free lcp_intervals for(UInt32 child_cnt = 0; child_cnt < n_child; child_cnt++) { delete lastInterval->child[child_cnt]; lastInterval->child[child_cnt] = 0; } // svnvish: End process tmp_lb = lastInterval->lb; if(lcp_idx <= lit.top()->lcp) { lit.top()->child.push_back(lastInterval); lastInterval = 0; } }// while if(lcp_idx > lit.top()->lcp) { new_itv = new lcp_interval(lcp_idx, tmp_lb,0, lastInterval); lit.push(new_itv); new_itv = 0; lastInterval = 0; } // Handle the 0-indices. // 0-indices := { i | LCP[i]=0, \forall i = 0,...,n-1} if((idx < size) && (lcp_idx == 0)) { // svnvish: BUGBUG // ec = childtab.Set_NextlIndex(prev_0_index,k); childtab[prev_0idx] = idx; prev_0idx = idx; // Handle first 0-index specially // Store in childtab[(size-1)+1].up if(first){ // svnvish: BUGBUG // ec = childtab.Set_Up(size,k); CHECKERROR(ec); first0idx = idx; first = false; } } } // for childtab[size-1] = first0idx; // svnvish: All remaining elements in the stack are ignored. // chteo: Free all remaining elements in the stack. while(!lit.empty()) { lastInterval = lit.top(); delete lastInterval; lit.pop(); } assert(lit.empty()); return NOERROR; } #ifdef SLINK /** * Get suffix link interval, [sl_i..sl_j], of a given interval, [i..j]. * * \param i - (IN) Left bound of interval [i..j] * \param j - (IN) Right bound of interval [i..j] * \param sl_i - (OUT) Left bound of suffix link interval [sl_i..sl_j] * \param sl_j - (OUT) Right bound of suffix link interval [sl_i..sl_j] */ ErrorCode ESA::GetSuflink(const UInt32 &i, const UInt32 &j, UInt32 &sl_i, UInt32 &sl_j) { //' Input validation assert(i=0 && j= (j-i)); return NOERROR; } #elif defined(LSEARCH) /** * "Linear" Search version of GetSuflink. Suffix link intervals are not stored * explicitly but searched when needed. * * Note: Slow!!! especially in the case of long and similar texts. */ ErrorCode ESA::GetSuflink(const UInt32 &i, const UInt32 &j, UInt32 &sl_i, UInt32 &sl_j) { //' Variables SYMBOL ch; UInt32 lcp=0; UInt32 final_lcp = 0; UInt32 lb = 0, rb = size-1; //' root interval //' First suflink interval char := Second char of original interval ch = text[suftab[i]+1]; //' lcp of suffix link interval := lcp of original interval - 1 final_lcp = 0; GetLcp(i,j,final_lcp); final_lcp = (final_lcp > 0) ? final_lcp-1 : 0; //' Searching for suffix link interval sl_i = lb; sl_j = rb; while(lcp < final_lcp) { GetIntervalByChar(lb,rb,ch,lcp,sl_i, sl_j); GetLcp(sl_i, sl_j, lcp); lb = sl_i; rb = sl_j; ch = text[suftab[i]+lcp+1]; } assert(sl_j > sl_i); assert((sl_j-sl_i) >= (j-i)); return NOERROR; } #else /** * Construct bucket table. * * \param alpahabet_size - Size of alphabet set */ ErrorCode ESA::ConstructBcktab(const UInt32 &alphabet_size) { UInt32 MAX_DEPTH = 8; //' when alphabet_size is 256 UInt32 sizeof_uint4 = 4; //' 4 bytes integer UInt32 sizeof_uint8 = 8; //' 8 bytes integer UInt32 sizeof_key = sizeof_uint8; //' Step 1: Determine the bcktab_depth for(bcktab_depth = MAX_DEPTH; bcktab_depth >0; bcktab_depth--) { bcktab_size = 0; for(UInt32 i=0; i < size; i++) if(lcptab[i] < bcktab_depth) bcktab_size++; if(bcktab_depth <= 4) sizeof_key = sizeof_uint4; if(bcktab_size <= size/(sizeof_key + sizeof_uint4)) break; } //' Step 2: Allocate memory for bcktab_key and bcktab_val. //' Step 3: Precompute coefficients for computing hash values of prefixes later. //' Step 4: Collect the prefixes with lcp <= bcktab_depth and //' convert them into hash value. if(sizeof_key == sizeof_uint4) { //' (2) bcktab_key4 = new UInt32[bcktab_size]; bcktab_val = new UInt32[bcktab_size]; assert(bcktab_key4 && bcktab_val); //' (3) coef4 = new UInt32[4]; coef4[0] = 1; for(UInt32 i=1; i < 4; i++) coef4[i] = coef4[i-1]*alphabet_size; //' (4) for(UInt32 i=0, k=0; i < size; i++) { if(lcptab[i] < bcktab_depth) { UInt32 c = MIN((size-suftab[i]), bcktab_depth); hash_value4 = 0; for(UInt32 j=0; j < c; j++) hash_value4 += text[suftab[i]+j]*coef4[bcktab_depth-1-j]; bcktab_key4[k] = hash_value4; bcktab_val[k] = i; k++; } } } else { //' (2) bcktab_key8 = new UInt64[bcktab_size]; bcktab_val = new UInt32[bcktab_size]; assert(bcktab_key8 && bcktab_val); //' (3) coef8 = new UInt64[9]; coef8[0] = 1; for(UInt32 i=1; i < 9; i++) coef8[i] = coef8[i-1]*alphabet_size; //' (4) for(UInt32 i=0, k=0; i < size; i++) { if(lcptab[i] < bcktab_depth) { UInt32 c = MIN( (size-suftab[i]), bcktab_depth); hash_value8 = 0; for(UInt32 j=0; j < c; j++) hash_value8 += text[suftab[i]+j]*coef8[bcktab_depth-1-j]; bcktab_key8[k] = hash_value8; bcktab_val[k] = i; k++; } } } //' check if bcktab in ascending order // for(UInt32 ii=1; ii= 1); //' the interval [i..j] must has at least 2 suffixes. //' Variables UInt32 left=0, mid=0, right=0, tmp_right=0; UInt32 llcp=0, mlcp=0, rlcp=0; UInt32 orig_lcp = 0; UInt32 c = 0; UInt32 offset = 0; GetLcp(i, j, orig_lcp); if(orig_lcp <= 1) { sl_i = 0; sl_j = size-1; return NOERROR; } //' Default left = 0; right = size-1; //' Make use of bcktab here. Maximum lcp value is always 1 less than bcktab_depth. //' This is because including lcp values equal to bcktab_depth will violate //' the constraint of prefix uniqueness. offset = MIN(orig_lcp-1, bcktab_depth); assert(offset>=0); if(bcktab_key4) { hash_value4 = 0; for(UInt32 cnt=0; cnt < offset; cnt++) hash_value4 += coef4[bcktab_depth-1-cnt]*text[suftab[i]+1+cnt]; //' lower bound return the exact position of of target, if found one UInt32 *p = std::lower_bound(bcktab_key4, bcktab_key4+bcktab_size, hash_value4); left = bcktab_val[p - bcktab_key4]; //' this hash value is used to find the right bound of target interval hash_value4 += coef4[bcktab_depth-offset]; //' upper bound return the smallest value > than target. UInt32 *q = std::upper_bound(p, bcktab_key4+bcktab_size, hash_value4); if(q == bcktab_key4+bcktab_size) right = size-1; else right = bcktab_val[q - bcktab_key4] - 1; } else if(bcktab_key8) { hash_value8 = 0; for(UInt32 cnt=0; cnt < offset; cnt++) hash_value8 += coef8[bcktab_depth-1-cnt]*text[suftab[i]+1+cnt]; //' lower bound return the exact position of of target, if found one UInt64 *p = std::lower_bound(bcktab_key8, bcktab_key8+bcktab_size, hash_value8); left = bcktab_val[p - bcktab_key8]; //' this hash value is used to find the right bound of target interval hash_value8 += coef8[bcktab_depth-offset]; //' upper bound return the smallest value > than target. UInt64 *q = std::upper_bound(p, bcktab_key8+bcktab_size, hash_value8); if(q == bcktab_key8+bcktab_size) right = size-1; else right = bcktab_val[q - bcktab_key8] - 1; } tmp_right = right; assert(right <= size-1); assert(right > left); offset = 0; //' Compute LEFT boundary of suflink interval Compare(left, offset, &text[suftab[i]+1+offset], orig_lcp-1-offset, llcp); llcp += offset; if(llcp < orig_lcp-1) { Compare(right, offset, &text[suftab[i]+1+offset], orig_lcp-1-offset, rlcp); rlcp += offset; c = MIN(llcp,rlcp); while(right-left > 1){ mid = (left + right)/2; Compare(mid, c, &text[suftab[i]+1+c], orig_lcp-1-c, mlcp); mlcp += c; //' if target not found yet... if(mlcp < orig_lcp-1) { if(text[suftab[mid]+mlcp] < text[suftab[i]+mlcp+1]) { left = mid; llcp = mlcp; } else { right = mid; rlcp = mlcp; } } else { //' mlcp == orig_lcp-1 assert(mlcp == orig_lcp-1); //' target found, but want to make sure it is the LEFTmost... right = mid; rlcp = mlcp; } c = MIN(llcp, rlcp); } sl_i = right; llcp = rlcp; } else { sl_i = left; } //' Compute RIGHT boundary of suflink interval right = tmp_right; left = sl_i; Compare(right, offset, &text[suftab[i]+1+offset], orig_lcp-1-offset, rlcp); rlcp += offset; if(rlcp < orig_lcp-1) { c = MIN(llcp,rlcp); while(right-left > 1){ mid = (left + right)/2; Compare(mid, c, &text[suftab[i]+1+c], orig_lcp-1-c, mlcp); mlcp += c; //' if target not found yet... if(mlcp < orig_lcp-1) { if(text[suftab[mid]+mlcp] < text[suftab[i]+mlcp+1]) { //' target is on the right half left = mid; llcp = mlcp; } else { //' target is on the left half right = mid; rlcp = mlcp; } } else { //' mlcp == orig_lcp-1 assert(mlcp == orig_lcp-1); //' target found, but want to make sure it is the RIGHTmost... left = mid; llcp = mlcp; } c = MIN(llcp, rlcp); } sl_j = left; } else { sl_j = right; } assert(sl_i < sl_j); return NOERROR; } #endif /** * Find suffix link interval, [p..q], for a child interval, [c_i..c_j], given its * parent interval [p_i..p_j]. * * Pre : 1. Suffix link interval for parent interval has been computed. * 2. [child_i..child_j] is not a singleton interval. * * * \param parent_i - (IN) Left bound of parent interval. * \param parent_j - (IN) Right bound of parent interval. * \param child_i - (IN) Left bound of child interval. * \param child_j - (IN) Right bound of child interval. * \param sl_i - (OUT) Left bound of suffix link interval of child interval * \param sl_j - (OUT) Right bound of suffix link interval of child interval */ ErrorCode ESA::FindSuflink(const UInt32 &parent_i, const UInt32 &parent_j, const UInt32 &child_i, const UInt32 &child_j, UInt32 &sl_i, UInt32 &sl_j) { assert(child_i != child_j); //' Variables SYMBOL ch; UInt32 tmp_i = 0; UInt32 tmp_j = 0; UInt32 lcp_child = 0; UInt32 lcp_parent = 0; UInt32 lcp_sl = 0; //' Step 1: Get suffix link interval of parent interval and its lcp value. //' 2: Get lcp values of parent and child intervals. //' Shortcut! if(parent_i ==0 && parent_j == size-1) { //' this is root interval //' (1) sl_i = 0; sl_j = size-1; lcp_sl = 0; //' (2) lcp_parent = 0; GetLcp(child_i,child_j,lcp_child); assert(lcp_child > 0); } else { //' (1) GetSuflink(parent_i,parent_j,sl_i,sl_j); GetLcp(sl_i, sl_j, lcp_sl); //' (2) GetLcp(parent_i,parent_j,lcp_parent); GetLcp(child_i,child_j,lcp_child); assert(lcp_child > 0); } //' Traversing down the subtree of [sl_i..sl_j] and looking for //' the suffix link interval of child interval. while (lcp_sl < lcp_child-1) { //' The character that we want to look for in suflink interval. ch = text[suftab[child_i]+lcp_sl+1]; tmp_i = sl_i; tmp_j = sl_j; GetIntervalByChar(tmp_i, tmp_j, ch, lcp_sl, sl_i, sl_j); assert(sl_i > q; //' The interval queue std::pair interval; //' Step 0: Push root onto queue. And define itself as its suflink. q.push(std::make_pair((unsigned int)0,size-1)); UInt32 idx = 0; childtab.l_idx(0,size-1,idx); suflink[idx+idx] = 0; //' left bound of suffix link interval suflink[idx+idx+1] = size-1; //' right bound of suffix link interval //' Step 1: Breadth first traversal. while (!q.empty()) { //' Step 1.1: Pop interval from queue. interval = q.front(); q.pop(); //' Step 1.2: For each non-singleton child-intervals, [p..q], "find" its //' suffix link interval and then "push" it onto the interval queue. UInt32 i=0,j=0, sl_i=0, sl_j=0, start_idx=interval.first; do { //' Notes: interval.first := left bound of suffix link interval //' interval.second := right bound of suffix link interval assert(interval.first>=0 && interval.second < size); GetIntervalByIndex(interval.first, interval.second, start_idx, i, j); if(j > i) { //' [i..j] is non-singleton interval FindSuflink(interval.first, interval.second, i,j, sl_i, sl_j); assert(!(sl_i == i && sl_j == j)); //' Store suflink of [i..j] UInt32 idx=0; childtab.l_idx(i, j, idx); suflink[idx+idx] = sl_i; suflink[idx+idx+1] = sl_j; //' Push suflink interval onto queue q.push(std::make_pair(i,j)); } start_idx = j+1; //' prepare to get next child interval }while(start_idx < interval.second); } return NOERROR; } /** * Get all child-intervals, including singletons. * Store all non-singleton intervals onto #q#, where interval is defined as * (i,j) where i and j are left and right bounds. * * \param lb - (IN) Left bound of current interval. * \param rb - (IN) Right bound of current interval. * \param q - (OUT) Storage for intervals. */ ErrorCode ESA::GetChildIntervals(const UInt32 &lb, const UInt32 &rb, std::vector > &q) { //' Variables UInt32 k=0; //' general index UInt32 i=0,j=0; //' for interval [i..j] //' Input validation assert(rb-lb >= 1); k = lb; do { assert(lb>=0 && rb 0) { if(j > i) { // chteo: saved 1 operation ;) [260906] //' Non-singleton interval q.push_back(std::make_pair(i,j)); } k = j+1; }while(k < rb); return NOERROR; } /** * Given an l-interval, l-[i..j] and a starting index, idx \in [i..j], * return the child-interval, k-[p..q], of l-[i..j] where p == idx. * * Reference: Abo05::algorithm 4.6.4 * * Pre: #start_idx# is a l-index or equal to parent_i. * * \param parent_i - (IN) Left bound of parent interval. * \param parent_j - (IN) Right bound of parent interval. * \param start_idx - (IN) Predefined left bound of child interval. * \param child_i - (OUT) Left bound of child interval. * \param child_j - (OUT) Right bound of child interval. * * Time complexity: O(|alphabet set|) */ ErrorCode ESA::GetIntervalByIndex(const UInt32 &parent_i, const UInt32 &parent_j, const UInt32 &start_idx, UInt32 &child_i, UInt32 &child_j) { //' Variables UInt32 lcp_child_i = 0; UInt32 lcp_child_j = 0; //' Input validation assert( (parent_i < parent_j) && (parent_i >= 0) && (parent_j < size) && (start_idx >= parent_i) && (start_idx < parent_j)); child_i = start_idx; //' #start_idx# is not and l-index, i.e. #start_idx == #parent_i# if(child_i == parent_i) { childtab.l_idx(parent_i,parent_j,child_j); child_j--; return NOERROR; } //' #start_idx# is a l-index // svnvish:BUGBUG child_j = childtab[child_i]; lcp_child_i = lcptab[child_i]; lcp_child_j = lcptab[child_j]; if(child_i < child_j && lcp_child_i == lcp_child_j) child_j--; else { //' child_i is the left bound of last child interval child_j = parent_j; } return NOERROR; } /** * Given an l-interval, l-[i..j] and a starting character, ch \in alphabet set, * return the child-interval, k-[p..q], of l-[i..j] such that text[sa[p]+depth] == ch. * * Reference: Abo05::algorithm 4.6.4 * * Post: Return [i..j]. If interval was found, i<=j, otherwise, i>j. * * \param parent_i - (IN) Left bound of parent interval. * \param parent_j - (IN) Right bound of parent interval. * \param ch - (IN) Starting character of left bound (suffix) of child interval. * \param depth - (IN) The position where #ch# is located in #text# * i.e. ch = text[suftab[parent_i]+depth]. * \param child_i - (OUT) Left bound of child interval. * \param child_j - (OUT) Right bound of child interval. * * Time complexity: O(|alphabet set|) */ ErrorCode ESA::GetIntervalByChar(const UInt32 &parent_i, const UInt32 &parent_j, const SYMBOL &ch, const UInt32 &depth, UInt32 &child_i, UInt32 &child_j) { //' Input validation assert(parent_i < parent_j && parent_i >= 0 && parent_j < size); //' Variables UInt32 idx = 0; UInt32 idx_next = 0; UInt32 lcp_idx = 0; UInt32 lcp_idx_next = 0; UInt32 lcp = 0; //' #depth# is actually equal to the following statement! //ec = GetLcp(parent_i, parent_j, lcp); CHECKERROR(ec); lcp = depth; //' Step 1: Check if #ch# falls in the initial range. if(text[suftab[parent_i]+lcp] > ch || text[suftab[parent_j]+lcp] < ch) { //' No child interval starts with #ch#, so, return undefined interval. child_i = 1; child_j = 0; return NOERROR; } //' Step 2: #ch# is in the initial range, but not necessarily exists in the range. //' Step 2.1: Get first l-index childtab.l_idx(parent_i, parent_j, idx); assert(idx > parent_i && idx <= parent_j); if(text[suftab[idx-1]+lcp] == ch) { child_i = parent_i; child_j = idx-1; return NOERROR; } //' Step 3: Look for child interval which starts with #ch# // svnvish: BUGBUG //ec = childtab.NextlIndex(idx, idx_next); CHECKERROR(ec); idx_next = childtab[idx]; lcp_idx = lcptab[idx]; lcp_idx_next = lcptab[idx_next]; while(idx < idx_next && lcp_idx == lcp_idx_next && text[suftab[idx]+lcp] < ch) { idx = idx_next; // svnvish: BUGBUG // ec = childtab.NextlIndex(idx, idx_next); CHECKERROR(ec); idx_next = childtab[idx]; lcp_idx = lcptab[idx]; lcp_idx_next = lcptab[idx_next]; } if(text[suftab[idx]+lcp] == ch) { child_i = idx; if(idx < idx_next && lcp_idx == lcp_idx_next) child_j = idx_next - 1; else child_j = parent_j; return NOERROR; } //' Child interval starts with #ch# not found child_i = 1; child_j = 0; return NOERROR; } /** * Return the lcp value of a given interval, l-[i..j]. * * Pre: [i..j] \subseteq [0..size] * * \param i - (IN) Left bound of the interval. * \param j - (IN) Right bound of the interval. * \param val - (OUT) The lcp value of the interval. */ ErrorCode ESA::GetLcp(const UInt32 &i, const UInt32 &j, UInt32 &val) { //' Input validation assert(i < j && i >= 0 && j < size); //' Variables UInt32 up, down; //' 0-[0..size-1]. This is a shortcut! if(i == 0 && j == size) { val = 0; } else { childtab.up(j+1,up); if( (i < up) && (up <= j)) { val = lcptab[up]; } else { childtab.down(i,down); val = lcptab[down]; } } return NOERROR; } /** * Compare #pattern# string to text[suftab[#idx#]..size] and return the * length of the substring matched. * * \param idx - (IN) The index of esa. * \param depth - (IN) The start position of matching mechanism. * \param pattern - (IN) The pattern string. * \param p_len - (IN) The length of #pattern#. * \param matched_len - (OUT) The length of matched substring. */ ErrorCode ESA::Compare(const UInt32 &idx, const UInt32 &depth, SYMBOL *pattern, const UInt32 &p_len, UInt32 &matched_len) { //' Variables UInt32 min=0; min = (p_len < size-(suftab[idx]+depth)) ? p_len : size-(suftab[idx]+depth); matched_len = 0; for(UInt32 k=0; k < min; k++) { if(text[suftab[idx]+depth+k] == pattern[k]) matched_len++; else break; } return NOERROR; } /** * Find the longest matching of text and pattern. * * Note: undefinded interval := [i..j] where i>j * * Post: Return "floor" and "ceil" of longest substring of pattern that exists in text. * Otherwise, that is, no substring of pattern ever exists in text, * return the starting interval, [i..j]. * * \param i - (IN) Left bound of the starting interval. * \param j - (IN) Right bound of the starting interval. * \param offset - (IN) The number of characters between the head of suffix and the * position to start matching. * \param pattern - (IN) The pattern string to match to esa. * \param p_len - (IN) The length of #pattern# * \param lb - (OUT) The left bound of the interval containing * longest matched suffix. * \param rb - (OUT) The right bound of the interval containing * longest matched suffix. * \param matched_len - (OUT) The length of the longest matched suffix. * \param floor_lb - (OUT) Left bound of floor interval of [lb..rb]. * \param floor_rb - (OUT) Right bound of floor interval of [lb..rb]. * \param floor_len - (OUT) The lcp value of floor interval. */ ErrorCode ESA::ExactSuffixMatch(const UInt32 &i, const UInt32 &j, const UInt32 &offset, SYMBOL *pattern, const UInt32 p_len, UInt32 &lb, UInt32 &rb, UInt32 &matched_len, UInt32 &floor_lb, UInt32 &floor_rb, UInt32 &floor_len) { //' Input validation assert(i != j); //' Variables UInt32 min, lcp; bool queryFound = true; //' Initial setting. floor_lb = lb = i; floor_rb = rb = j; matched_len = offset; //' Step 1: Get lcp of floor/starting interval. GetLcp(floor_lb, floor_rb, lcp); floor_len = lcp; //' Step 2: Skipping #offset# characters while(lcp < matched_len) { floor_lb = lb; floor_rb = rb; floor_len = lcp; GetIntervalByChar(floor_lb, floor_rb, pattern[lcp], lcp, lb, rb); // printf("lb, rb : %i, %i\n", lb, rb); assert(lb <= rb); if(lb == rb) break; GetLcp(lb, rb, lcp); } //' Step 3: Continue matching from the point (either an interval or singleton) we stopped. while( (lb<=rb) && queryFound ) { if(lb != rb) { GetLcp(lb, rb, lcp); min = (lcp < p_len) ? lcp : p_len; while(matched_len < min) { queryFound = (text[suftab[lb]+matched_len] == pattern[matched_len]); if(queryFound) matched_len++; else return NOERROR; } assert(matched_len == min); //' Full pattern found! if(matched_len == p_len) return NOERROR; floor_lb = lb; floor_rb = rb; floor_len = lcp; GetIntervalByChar(floor_lb, floor_rb,pattern[matched_len],matched_len,lb,rb); }else { //' lb == rb, i.e. singleton interval. min = (p_len < size-suftab[lb]) ? p_len : size-suftab[lb]; while(matched_len rb) { lb = floor_lb; rb = floor_rb; } return NOERROR; } #endif kernlab/src/expdecayweight.h0000644000175100001440000000342012234152620015654 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ExpDecayWeight.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 #ifndef EXPDECAYWEIGHT_H #define EXPDECAYWEIGHT_H #include "datatype.h" #include "errorcode.h" #include "iweightfactory.h" #include class ExpDecayWeight : public I_WeightFactory { public: Real lambda; /// Constructors //' NOTE: lambda shouldn't be equal to 1, othexrwise there will be //' divide-by-zero error. ExpDecayWeight(const Real &lambda_=2.0):lambda(lambda_) {} /// Destructor virtual ~ExpDecayWeight(){} /// Compute weight ErrorCode ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight); }; #endif kernlab/src/Makevars0000644000175100001440000000006011470002321014153 0ustar hornikusersPKG_LIBS = $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) kernlab/src/dprecond.c0000644000175100001440000000207614221630477014452 0ustar hornikusers#include #include #include #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include /* LAPACK */ /* extern int dpotf2_(char *, int *, double *, int *, int *); */ double dcholfact(int n, double *A, double *L) { /* if A is p.d. , A = L*L' if A is p.s.d. , A + lambda*I = L*L'; */ int indef, i; static double lambda = 1e-3/512/512; memcpy(L, A, sizeof(double)*n*n); F77_CALL(dpotf2)("L", &n, L, &n, &indef FCONE); if (indef != 0) { memcpy(L, A, sizeof(double)*n*n); for (i=0;i #include #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include /* LEVEL 1 BLAS */ /*extern double ddot_(int *, double *, int *, double *, int *); */ /* LEVEL 2 BLAS */ /*extern int dsymv_(char *, int *, double *, double *, int *, double *, int *, double *, double *, int *);*/ /* MINPACK 2 */ extern void dtron(int, double *, double *, double *, double, double, double, double, int, double); struct BQP { double eps; int n; double *x, *C, *Q, *p; }; int nfev, inc = 1; double one = 1, zero = 0, *A, *g0; int uhes(int n, double *x, double **H) { *H = A; return 0; } int ugrad(int n, double *x, double *g) { /* evaluate the gradient g = A*x + g0 */ memcpy(g, g0, sizeof(double)*n); F77_CALL(dsymv)("U", &n, &one, A, &n, x, &inc, &one, g, &inc FCONE); return 0; } int ufv(int n, double *x, double *f) { /* evaluate the function value f(x) = 0.5*x'*A*x + g0'*x */ double *t = (double *) malloc(sizeof(double)*n); F77_CALL(dsymv)("U", &n, &one, A, &n, x, &inc, &zero, t, &inc FCONE); *f = F77_CALL(ddot)(&n, x, &inc, g0, &inc) + 0.5 * F77_CALL(ddot)(&n, x, &inc, t, &inc); free(t); return ++nfev; } void solvebqp(struct BQP *qp) { /* driver for positive semidefinite quadratic programing version of tron */ int i, n, maxfev; double *x, *xl, *xu; double frtol, fatol, fmin, gtol, cgtol; n = qp->n; maxfev = 1000; /* ? */ nfev = 0; x = qp->x; xu = qp->C; A = qp->Q; g0 = qp->p; xl = (double *) malloc(sizeof(double)*n); for (i=0;ieps; dtron(n, x, xl, xu, gtol, frtol, fatol, fmin, maxfev, cgtol); free(xl); } kernlab/src/wmsufsort.h0000644000175100001440000000347512234152620014725 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/W_msufsort.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 13 Jul 2007 : use MSufSort v3.1 instead of v2.2 // Wrapper for Michael Maniscalco's MSufSort version 3.1 algorithm #ifndef W_MSUFSORT_H #define W_MSUFSORT_H #include "datatype.h" #include "isafactory.h" #include "msufsort.h" class W_msufsort : public I_SAFactory { public: ///Variables //'Declaration of object POINTERS, no initialization needed. //'If Declaration of objects, initialize them in member initialization list. MSufSort *msuffixsorter; ///Constructor W_msufsort(); ///Destructor virtual ~W_msufsort(); ///Methods ErrorCode ConstructSA(SYMBOL *text, const UInt32 &len, UInt32 *&array); }; #endif kernlab/src/dtron.c0000644000175100001440000001714714221630764014006 0ustar hornikusers#include #include #include #include #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include extern void *xmalloc(size_t); extern double mymin(double, double); extern double mymax(double, double); extern int ufv(int, double *, double *); extern int ugrad(int, double *, double *); extern int uhes(int, double *, double **); /* LEVEL 1 BLAS */ /*extern double dnrm2_(int *, double *, int *);*/ /*extern double ddot_(int *, double *, int *, double *, int *);*/ /* LEVEL 2 BLAS */ /*extern int dsymv_(char *, int *, double *, double *, int *, double *, int *, double *, double *, int *);*/ /* MINPACK 2 */ extern double dgpnrm(int, double *, double *, double *, double *); extern void dcauchy(int, double *, double *, double *, double *, double *, double, double *, double *, double *); extern void dspcg(int, double *, double *, double *, double *, double *, double, double, double *, int *); void dtron(int n, double *x, double *xl, double *xu, double gtol, double frtol, double fatol, double fmin, int maxfev, double cgtol) { /* c ********* c c Subroutine dtron c c The optimization problem of BSVM is a bound-constrained quadratic c optimization problem and its Hessian matrix is positive semidefinite. c We modified the optimization solver TRON by Chih-Jen Lin and c Jorge More' into this version which is suitable for this c special case. c c This subroutine implements a trust region Newton method for the c solution of large bound-constrained quadratic optimization problems c c min { f(x)=0.5*x'*A*x + g0'*x : xl <= x <= xu } c c where the Hessian matrix A is dense and positive semidefinite. The c user must define functions which evaluate the function, gradient, c and the Hessian matrix. c c The user must choose an initial approximation x to the minimizer, c lower bounds, upper bounds, quadratic terms, linear terms, and c constants about termination criterion. c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is the final minimizer. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c gtol is a double precision variable. c On entry gtol specifies the relative error of the projected c gradient. c On exit gtol is unchanged. c c frtol is a double precision variable. c On entry frtol specifies the relative error desired in the c function. Convergence occurs if the estimate of the c relative error between f(x) and f(xsol), where xsol c is a local minimizer, is less than frtol. c On exit frtol is unchanged. c c fatol is a double precision variable. c On entry fatol specifies the absolute error desired in the c function. Convergence occurs if the estimate of the c absolute error between f(x) and f(xsol), where xsol c is a local minimizer, is less than fatol. c On exit fatol is unchanged. c c fmin is a double precision variable. c On entry fmin specifies a lower bound for the function. c The subroutine exits with a warning if f < fmin. c On exit fmin is unchanged. c c maxfev is an integer variable. c On entry maxfev specifies the limit of function evaluations. c On exit maxfev is unchanged. c c cgtol is a double precision variable. c On entry gqttol specifies the convergence criteria for c subproblems. c On exit gqttol is unchanged. c c ********** */ /* Parameters for updating the iterates. */ double eta0 = 1e-4, eta1 = 0.25, eta2 = 0.75; /* Parameters for updating the trust region size delta. */ double sigma1 = 0.25, sigma2 = 0.5, sigma3 = 4; double p5 = 0.5, one = 1; double gnorm, gnorm0, delta, snorm; double alphac = 1, alpha, f, fc, prered, actred, gs; int search = 1, iter = 1, info, inc = 1; double *xc = (double *) xmalloc(sizeof(double)*n); double *s = (double *) xmalloc(sizeof(double)*n); double *wa = (double *) xmalloc(sizeof(double)*n); double *g = (double *) xmalloc(sizeof(double)*n); double *A = NULL; uhes(n, x, &A); ugrad(n, x, g); ufv(n, x, &f); gnorm0 = F77_CALL(dnrm2)(&n, g, &inc); delta = 1000*gnorm0; gnorm = dgpnrm(n, x, xl, xu, g); if (gnorm <= gtol*gnorm0) { /* //printf("CONVERGENCE: GTOL TEST SATISFIED\n"); */ search = 0; } while (search) { /* Save the best function value and the best x. */ fc = f; memcpy(xc, x, sizeof(double)*n); /* Compute the Cauchy step and store in s. */ dcauchy(n, x, xl, xu, A, g, delta, &alphac, s, wa); /* Compute the projected Newton step. */ dspcg(n, x, xl, xu, A, g, delta, cgtol, s, &info); if (ufv(n, x, &f) > maxfev) { /* //printf("ERROR: NFEV > MAXFEV\n"); */ search = 0; continue; } /* Compute the predicted reduction. */ memcpy(wa, g, sizeof(double)*n); F77_CALL(dsymv)("U", &n, &p5, A, &n, s, &inc, &one, wa, &inc FCONE); prered = -F77_CALL(ddot)(&n, s, &inc, wa, &inc); /* Compute the actual reduction. */ actred = fc - f; /* On the first iteration, adjust the initial step bound. */ snorm = F77_CALL(dnrm2)(&n, s, &inc); if (iter == 1) delta = mymin(delta, snorm); /* Compute prediction alpha*snorm of the step. */ gs = F77_CALL(ddot)(&n, g, &inc, s, &inc); if (f - fc - gs <= 0) alpha = sigma3; else alpha = mymax(sigma1, -0.5*(gs/(f - fc - gs))); /* Update the trust region bound according to the ratio of actual to predicted reduction. */ if (actred < eta0*prered) /* Reduce delta. Step is not successful. */ delta = mymin(mymax(alpha, sigma1)*snorm, sigma2*delta); else { if (actred < eta1*prered) /* Reduce delta. Step is not sufficiently successful. */ delta = mymax(sigma1*delta, mymin(alpha*snorm, sigma2*delta)); else if (actred < eta2*prered) /* The ratio of actual to predicted reduction is in the interval (eta1,eta2). We are allowed to either increase or decrease delta. */ delta = mymax(sigma1*delta, mymin(alpha*snorm, sigma3*delta)); else /* The ratio of actual to predicted reduction exceeds eta2. Do not decrease delta. */ delta = mymax(delta, mymin(alpha*snorm, sigma3*delta)); } /* Update the iterate. */ if (actred > eta0*prered) { /* Successful iterate. */ iter++; /* uhes(n, x, &A); */ ugrad(n, x, g); gnorm = dgpnrm(n, x, xl, xu, g); if (gnorm <= gtol*gnorm0) { /* //printf("CONVERGENCE: GTOL = %g TEST SATISFIED\n", gnorm/gnorm0); */ search = 0; continue; } } else { /* Unsuccessful iterate. */ memcpy(x, xc, sizeof(double)*n); f = fc; } /* Test for convergence */ if (f < fmin) { //printf("WARNING: F .LT. FMIN\n"); search = 0; /* warning */ continue; } if (fabs(actred) <= fatol && prered <= fatol) { //printf("CONVERGENCE: FATOL TEST SATISFIED\n"); search = 0; continue; } if (fabs(actred) <= frtol*fabs(f) && prered <= frtol*fabs(f)) { /* //printf("CONVERGENCE: FRTOL TEST SATISFIED\n"); */ search = 0; continue; } } free(g); free(xc); free(s); free(wa); } kernlab/src/brweight.h0000644000175100001440000000325412234152620014462 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/BoundedRangeWeight.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 #ifndef BRWEIGHT_H #define BRWEIGHT_H #include "datatype.h" #include "errorcode.h" #include "iweightfactory.h" #include //' Bounded Range weight class class BoundedRangeWeight : public I_WeightFactory { Real n; public: /// Constructor BoundedRangeWeight(const Real &n_=1): n(n_){} /// Destructor virtual ~BoundedRangeWeight(){} /// Compute weight ErrorCode ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight); }; #endif kernlab/src/Makevars.win0000644000175100001440000000006011470002335014754 0ustar hornikusersPKG_LIBS = $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) kernlab/src/msufsort.h0000644000175100001440000006500412761213650014540 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the MSufSort suffix sorting algorithm (Version 2.2). * * The Initial Developer of the Original Code is * Michael A. Maniscalco * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Michael A. Maniscalco * * ***** END LICENSE BLOCK ***** */ #ifndef MSUFSORT_H #define MSUFSORT_H //==================================================================// // // // v // // MSufSort Version 2.2 // // Author: Michael A Maniscalco // // Date: Nov. 3, 2005 // // // // Notes: // // // //==================================================================// #include "stdio.h" #include "stack.h" #include "introsort.h" #include "inductionsort.h" //==================================================================// // Test app defines: // //==================================================================// #define SHOW_PROGRESS // display progress during sort #define CHECK_SORT // verify that sorting is correct. // #define SORT_16_BIT_SYMBOLS // enable 16 bit symbols. #define USE_INDUCTION_SORTING // enable induction sorting feature. #define USE_ENHANCED_INDUCTION_SORTING // enable enhanced induction sorting feature. #define USE_TANDEM_REPEAT_SORTING // enable the tandem repeat sorting feature. //#define USE_ALT_SORT_ORDER // enable alternative sorting order #define ENDIAN_SWAP_16(value) ((value >> 8) | (value << 8)) #define SUFFIX_SORTED 0x80000000 // flag marks suffix as sorted. #define END_OF_CHAIN 0x3ffffffe // marks the end of a chain #define SORTED_BY_ENHANCED_INDUCTION 0x3fffffff // marks suffix which will be sorted by enhanced induction sort. #ifdef SORT_16_BIT_SYMBOLS #define SYMBOL_TYPE unsigned short #else #define SYMBOL_TYPE unsigned char #endif class MSufSort { public: MSufSort(); virtual ~MSufSort(); unsigned int Sort(SYMBOL_TYPE * source, unsigned int sourceLength); unsigned int GetElapsedSortTime(); unsigned int GetMemoryUsage(); unsigned int ISA(unsigned int index); bool VerifySort(); static void ReverseAltSortOrder(SYMBOL_TYPE * data, unsigned int nBytes); private: int CompareStrings(SYMBOL_TYPE * stringA, SYMBOL_TYPE * stringB, int len); bool IsTandemRepeat2(); bool IsTandemRepeat(); void PassTandemRepeat(); bool IsSortedByInduction(); bool IsSortedByEnhancedInduction(unsigned int suffixIndex); void ProcessSuffixesSortedByInduction(); // MarkSuffixAsSorted // Sets the final inverse suffix array value for a given suffix. // Also invokes the OnSortedSuffix member function. void MarkSuffixAsSorted(unsigned int suffixIndex, unsigned int & sortedIndex); void MarkSuffixAsSorted2(unsigned int suffixIndex, unsigned int & sortedIndex); void MarkSuffixAsSortedByEnhancedInductionSort(unsigned int suffixIndex); // PushNewChainsOntoStack: // Moves all new suffix chains onto the stack of partially sorted // suffixes. (makes them ready for further sub sorting). void PushNewChainsOntoStack(bool originalChains = false); void PushTandemBypassesOntoStack(); // OnSortedSuffix: // Event which is invoked with each sorted suffix at the time of // its sorting. virtual void OnSortedSuffix(unsigned int suffixIndex); // Initialize: // Initializes this object just before sorting begins. void Initialize(); // InitialSort: // This is the first sorting pass which makes the initial suffix // chains from the given source string. Pushes these chains onto // the stack for further sorting. void InitialSort(); // Value16: // Returns the two 8 bit symbols located // at positions N and N + 1 where N = the sourceIndex. unsigned short Value16(unsigned int sourceIndex); // ProcessChain: // Sorts the suffixes of a given chain by the next two symbols of // each suffix in the chain. This creates zero or more new suffix // chains with each sorted by two more symbols than the original // chain. Then pushes these new chains onto the chain stack for // further sorting. void ProcessNextChain(); void AddToSuffixChain(unsigned int suffixIndex, unsigned short suffixChain); void AddToSuffixChain(unsigned int firstSuffixIndex, unsigned int lastSuffixIndex, unsigned short suffixChain); void ProcessSuffixesSortedByEnhancedInduction(unsigned short suffixId); void ResolveTandemRepeatsNotSortedWithInduction(); unsigned int m_sortTime; Stack m_chainMatchLengthStack; Stack m_chainCountStack; Stack m_chainHeadStack; unsigned int m_endOfSuffixChain[0x10000]; unsigned int m_startOfSuffixChain[0x10000]; // m_source: // Address of the string to sort. SYMBOL_TYPE * m_source; // m_sourceLength: // The length of the string pointed to by m_source. unsigned int m_sourceLength; unsigned int m_sourceLengthMinusOne; // m_ISA: // The address of the working space which, when the sort is // completed, will contain the inverse suffix array for the // source string. unsigned int * m_ISA; // m_nextSortedSuffixValue: unsigned int m_nextSortedSuffixValue; // unsigned int m_numSortedSuffixes; // m_newChainIds // Array containing the valid chain numbers in m_newChain array. unsigned short m_newChainIds[0x10000]; // m_numNewChains: // The number of new suffix chain ids stored in m_numChainIds. unsigned int m_numNewChains; Stack m_suffixesSortedByInduction; unsigned int m_suffixMatchLength; unsigned int m_currentSuffixIndex; // m_firstSortedPosition: // For use with enhanced induction sorting. unsigned int m_firstSortedPosition[0x10000]; unsigned int m_firstSuffixByEnhancedInductionSort[0x10000]; unsigned int m_lastSuffixByEnhancedInductionSort[0x10000]; unsigned int m_currentSuffixChainId; #ifdef SHOW_PROGRESS // ShowProgress: // Update the progress indicator. void ShowProgress(); // m_nextProgressUpdate: // Indicates when to update the progress indicator. unsigned int m_nextProgressUpdate; // m_progressUpdateIncrement: // Indicates how many suffixes should be sorted before // incrementing the progress indicator. unsigned int m_progressUpdateIncrement; #endif // members used if alternate sorting order should be applied. SYMBOL_TYPE m_forwardAltSortOrder[256]; static SYMBOL_TYPE m_reverseAltSortOrder[256]; // for tandem repeat sorting bool m_hasTandemRepeatSortedByInduction; unsigned int m_firstUnsortedTandemRepeat; unsigned int m_lastUnsortedTandemRepeat; bool m_hasEvenLengthTandemRepeats; unsigned int m_tandemRepeatDepth; unsigned int m_firstSortedTandemRepeat; unsigned int m_lastSortedTandemRepeat; unsigned int m_tandemRepeatLength; }; //inline unsigned short MSufSort::Value16(unsigned int sourceIndex) //{ // return (sourceIndex < m_sourceLengthMinusOne) ? *(unsigned short *)(m_source + sourceIndex) : m_source[sourceIndex]; //} // fix by Brian Ripley inline unsigned short MSufSort::Value16(unsigned int sourceIndex) { union {unsigned short u; unsigned char b[2];} u16; u16.b[0] = m_source[sourceIndex]; u16.b[1] = (sourceIndex < m_sourceLengthMinusOne) ? m_source[sourceIndex + 1] : 0; return u16.u; } inline bool MSufSort::IsSortedByInduction() { unsigned int n = m_currentSuffixIndex + m_suffixMatchLength - 1; #ifndef USE_INDUCTION_SORTING if (n < m_sourceLengthMinusOne) return false; #endif if ((m_ISA[n] & SUFFIX_SORTED) && ((m_ISA[n] & 0x3fffffff) < m_nextSortedSuffixValue)) { InductionSortObject i(0, m_ISA[n], m_currentSuffixIndex); m_suffixesSortedByInduction.Push(i); } else if ((m_ISA[n + 1] & SUFFIX_SORTED) && ((m_ISA[n + 1] & 0x3fffffff) < m_nextSortedSuffixValue)) { InductionSortObject i(1, m_ISA[n + 1], m_currentSuffixIndex); m_suffixesSortedByInduction.Push(i); } else return false; return true; } inline bool MSufSort::IsSortedByEnhancedInduction(unsigned int suffixIndex) { if (suffixIndex > 0) if (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION) return true; return false; } inline bool MSufSort::IsTandemRepeat() { #ifndef USE_TANDEM_REPEAT_SORTING return false; #else if ((!m_tandemRepeatDepth) && (m_currentSuffixIndex + m_suffixMatchLength) == (m_ISA[m_currentSuffixIndex] + 1)) return true; #ifndef SORT_16_BIT_SYMBOLS if ((!m_tandemRepeatDepth) && ((m_currentSuffixIndex + m_suffixMatchLength) == (m_ISA[m_currentSuffixIndex]))) { m_hasEvenLengthTandemRepeats = true; return false; } #endif return false; #endif } inline void MSufSort::PassTandemRepeat() { unsigned int nextIndex; unsigned int lastIndex; // unsigned int firstIndex = m_currentSuffixIndex; while ((m_currentSuffixIndex + m_suffixMatchLength) == ((nextIndex = m_ISA[m_currentSuffixIndex]) + 1)) { lastIndex = m_currentSuffixIndex; m_currentSuffixIndex = nextIndex; } if (IsSortedByInduction()) { m_hasTandemRepeatSortedByInduction = true; m_currentSuffixIndex = m_ISA[m_currentSuffixIndex]; } else { if (m_firstUnsortedTandemRepeat == END_OF_CHAIN) m_firstUnsortedTandemRepeat = m_lastUnsortedTandemRepeat = lastIndex; else m_lastUnsortedTandemRepeat = (m_ISA[m_lastUnsortedTandemRepeat] = lastIndex); } } inline void MSufSort::PushNewChainsOntoStack(bool originalChains) { // Moves all new suffix chains onto the stack of partially sorted // suffixes. (makes them ready for further sub sorting). #ifdef SORT_16_BIT_SYMBOLS unsigned int newSuffixMatchLength = m_suffixMatchLength + 1; #else unsigned int newSuffixMatchLength = m_suffixMatchLength + 2; #endif if (m_numNewChains) { if (m_hasEvenLengthTandemRepeats) { m_chainCountStack.Push(m_numNewChains - 1); m_chainMatchLengthStack.Push(newSuffixMatchLength); m_chainCountStack.Push(1); m_chainMatchLengthStack.Push(newSuffixMatchLength - 1); } else { m_chainCountStack.Push(m_numNewChains); m_chainMatchLengthStack.Push(newSuffixMatchLength); } if (m_numNewChains > 1) IntroSort(m_newChainIds, m_numNewChains); while (m_numNewChains) { unsigned short chainId = m_newChainIds[--m_numNewChains]; chainId = ENDIAN_SWAP_16(chainId); // unsigned int n = m_startOfSuffixChain[chainId]; m_chainHeadStack.Push(m_startOfSuffixChain[chainId]); m_startOfSuffixChain[chainId] = END_OF_CHAIN; m_ISA[m_endOfSuffixChain[chainId]] = END_OF_CHAIN; } } m_hasEvenLengthTandemRepeats = false; if (m_firstUnsortedTandemRepeat != END_OF_CHAIN) { // Tandem repeats with a terminating suffix that did not get // sorted via induction has occurred (at least once). // We have a suffix chain (indicated by m_firstTandemRepeatWithoutSuffix) // of the suffix in each tandem repeat which immediately proceeded the // terminating suffix in each chain. We want to sort them relative to // each other and then process the tandem repeats. unsigned int tandemRepeatLength = m_suffixMatchLength - 1; unsigned int numChains = m_chainHeadStack.Count(); m_chainHeadStack.Push(m_firstUnsortedTandemRepeat); m_chainCountStack.Push(1); m_chainMatchLengthStack.Push((m_suffixMatchLength << 1) - 1); m_ISA[m_lastUnsortedTandemRepeat] = END_OF_CHAIN; m_firstUnsortedTandemRepeat = END_OF_CHAIN; m_tandemRepeatDepth = 1; while (m_chainHeadStack.Count() > numChains) ProcessNextChain(); m_suffixMatchLength = tandemRepeatLength + 1; ResolveTandemRepeatsNotSortedWithInduction(); m_tandemRepeatDepth = 0; } } inline void MSufSort::AddToSuffixChain(unsigned int suffixIndex, unsigned short suffixChain) { if (m_startOfSuffixChain[suffixChain] == END_OF_CHAIN) { m_endOfSuffixChain[suffixChain] = m_startOfSuffixChain[suffixChain] = suffixIndex; m_newChainIds[m_numNewChains++] = ENDIAN_SWAP_16(suffixChain); } else m_endOfSuffixChain[suffixChain] = m_ISA[m_endOfSuffixChain[suffixChain]] = suffixIndex; } inline void MSufSort::AddToSuffixChain(unsigned int firstSuffixIndex, unsigned int lastSuffixIndex, unsigned short suffixChain) { if (m_startOfSuffixChain[suffixChain] == END_OF_CHAIN) { m_startOfSuffixChain[suffixChain] = firstSuffixIndex; m_endOfSuffixChain[suffixChain] = lastSuffixIndex; m_newChainIds[m_numNewChains++] = ENDIAN_SWAP_16(suffixChain); } else { m_ISA[m_endOfSuffixChain[suffixChain]] = firstSuffixIndex; m_endOfSuffixChain[suffixChain] = lastSuffixIndex; } } inline void MSufSort::OnSortedSuffix(unsigned int suffixIndex) { // Event which is invoked with each sorted suffix at the time of // its sorting. m_numSortedSuffixes++; #ifdef SHOW_PROGRESS if (m_numSortedSuffixes >= m_nextProgressUpdate) { m_nextProgressUpdate += m_progressUpdateIncrement; ShowProgress(); } #endif } #ifdef SORT_16_BIT_SYMBOLS inline void MSufSort::MarkSuffixAsSorted(unsigned int suffixIndex, unsigned int & sortedIndex) { // Sets the final inverse suffix array value for a given suffix. // Also invokes the OnSortedSuffix member function. if (m_tandemRepeatDepth) { // we are processing a list of suffixes which we the second to last in tandem repeats // that were not terminated via induction. These suffixes are not actually to be // marked as sorted yet. Instead, they are to be linked together in sorted order. if (m_firstSortedTandemRepeat == END_OF_CHAIN) m_firstSortedTandemRepeat = m_lastSortedTandemRepeat = suffixIndex; else m_lastSortedTandemRepeat = (m_ISA[m_lastSortedTandemRepeat] = suffixIndex); return; } m_ISA[suffixIndex] = (sortedIndex++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif #ifdef USE_ENHANCED_INDUCTION_SORTING if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { suffixIndex--; unsigned short symbol = Value16(suffixIndex); m_ISA[suffixIndex] = (m_firstSortedPosition[symbol]++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { suffixIndex--; symbol = ENDIAN_SWAP_16(symbol); if (m_firstSuffixByEnhancedInductionSort[symbol] == END_OF_CHAIN) m_firstSuffixByEnhancedInductionSort[symbol] = m_lastSuffixByEnhancedInductionSort[symbol] = suffixIndex; else { m_ISA[m_lastSuffixByEnhancedInductionSort[symbol]] = suffixIndex; m_lastSuffixByEnhancedInductionSort[symbol] = suffixIndex; } } } #endif } inline void MSufSort::MarkSuffixAsSorted2(unsigned int suffixIndex, unsigned int & sortedIndex) { // Sets the final inverse suffix array value for a given suffix. // Also invokes the OnSortedSuffix member function. if (m_tandemRepeatDepth) { // we are processing a list of suffixes which we the second to last in tandem repeats // that were not terminated via induction. These suffixes are not actually to be // marked as sorted yet. Instead, they are to be linked together in sorted order. if (m_firstSortedTandemRepeat == END_OF_CHAIN) m_firstSortedTandemRepeat = m_lastSortedTandemRepeat = suffixIndex; else m_lastSortedTandemRepeat = (m_ISA[m_lastSortedTandemRepeat] = suffixIndex); return; } m_ISA[suffixIndex] = (sortedIndex++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif #ifdef USE_ENHANCED_INDUCTION_SORTING if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { unsigned short symbol = Value16(suffixIndex); symbol = ENDIAN_SWAP_16(symbol); suffixIndex--; if (m_firstSuffixByEnhancedInductionSort[symbol] == END_OF_CHAIN) m_firstSuffixByEnhancedInductionSort[symbol] = m_lastSuffixByEnhancedInductionSort[symbol] = suffixIndex; else { m_ISA[m_lastSuffixByEnhancedInductionSort[symbol]] = suffixIndex; m_lastSuffixByEnhancedInductionSort[symbol] = suffixIndex; } } #endif } #else inline void MSufSort::MarkSuffixAsSorted(unsigned int suffixIndex, unsigned int & sortedIndex) { // Sets the final inverse suffix array value for a given suffix. // Also invokes the OnSortedSuffix member function. if (m_tandemRepeatDepth) { // we are processing a list of suffixes which we the second to last in tandem repeats // that were not terminated via induction. These suffixes are not actually to be // marked as sorted yet. Instead, they are to be linked together in sorted order. if (m_firstSortedTandemRepeat == END_OF_CHAIN) m_firstSortedTandemRepeat = m_lastSortedTandemRepeat = suffixIndex; else m_lastSortedTandemRepeat = (m_ISA[m_lastSortedTandemRepeat] = suffixIndex); return; } m_ISA[suffixIndex] = (sortedIndex++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif #ifdef USE_ENHANCED_INDUCTION_SORTING if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { suffixIndex--; unsigned short symbol = Value16(suffixIndex); m_ISA[suffixIndex] = (m_firstSortedPosition[symbol]++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { suffixIndex--; unsigned short symbol2 = symbol; symbol = Value16(suffixIndex); m_ISA[suffixIndex] = (m_firstSortedPosition[symbol]++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { if (m_source[suffixIndex] < m_source[suffixIndex + 1]) symbol2 = ENDIAN_SWAP_16(symbol); else symbol2 = ENDIAN_SWAP_16(symbol2); suffixIndex--; if (m_firstSuffixByEnhancedInductionSort[symbol2] == END_OF_CHAIN) m_firstSuffixByEnhancedInductionSort[symbol2] = m_lastSuffixByEnhancedInductionSort[symbol2] = suffixIndex; else { m_ISA[m_lastSuffixByEnhancedInductionSort[symbol2]] = suffixIndex; m_lastSuffixByEnhancedInductionSort[symbol2] = suffixIndex; } } } } #endif } inline void MSufSort::MarkSuffixAsSorted2(unsigned int suffixIndex, unsigned int & sortedIndex) { // Sets the final inverse suffix array value for a given suffix. // Also invokes the OnSortedSuffix member function. if (m_tandemRepeatDepth) { // we are processing a list of suffixes which we the second to last in tandem repeats // that were not terminated via induction. These suffixes are not actually to be // marked as sorted yet. Instead, they are to be linked together in sorted order. if (m_firstSortedTandemRepeat == END_OF_CHAIN) m_firstSortedTandemRepeat = m_lastSortedTandemRepeat = suffixIndex; else m_lastSortedTandemRepeat = (m_ISA[m_lastSortedTandemRepeat] = suffixIndex); return; } m_ISA[suffixIndex] = (sortedIndex++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif #ifdef USE_ENHANCED_INDUCTION_SORTING if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { unsigned short symbol; if (m_source[suffixIndex] < m_source[suffixIndex + 1]) symbol = Value16(suffixIndex); else symbol = Value16(suffixIndex + 1); symbol = ENDIAN_SWAP_16(symbol); suffixIndex--; if (m_firstSuffixByEnhancedInductionSort[symbol] == END_OF_CHAIN) m_firstSuffixByEnhancedInductionSort[symbol] = m_lastSuffixByEnhancedInductionSort[symbol] = suffixIndex; else { m_ISA[m_lastSuffixByEnhancedInductionSort[symbol]] = suffixIndex; m_lastSuffixByEnhancedInductionSort[symbol] = suffixIndex; } } #endif } #endif inline void MSufSort::ProcessNextChain() { // Sorts the suffixes of a given chain by the next two symbols of // each suffix in the chain. This creates zero or more new suffix // chains with each sorted by two more symbols than the original // chain. Then pushes these new chains onto the chain stack for // further sorting. while (--m_chainCountStack.Top() < 0) { m_chainCountStack.Pop(); m_chainMatchLengthStack.Pop(); } m_suffixMatchLength = m_chainMatchLengthStack.Top(); m_currentSuffixIndex = m_chainHeadStack.Pop(); #ifdef USE_ENHANCED_INDUCTION_SORTING if (m_chainMatchLengthStack.Count() == 1) { // one of the original buckets from InitialSort(). This is important // when enhanced induction sorting is enabled. unsigned short chainId = Value16(m_currentSuffixIndex); unsigned short temp = chainId; chainId = ENDIAN_SWAP_16(chainId); while (m_currentSuffixChainId <= chainId) ProcessSuffixesSortedByEnhancedInduction(m_currentSuffixChainId++); m_nextSortedSuffixValue = m_firstSortedPosition[temp]; } #endif if (m_ISA[m_currentSuffixIndex] == END_OF_CHAIN) MarkSuffixAsSorted(m_currentSuffixIndex, m_nextSortedSuffixValue); // only one suffix in bucket so it is sorted. else { do { if (IsTandemRepeat()) PassTandemRepeat(); else if ((m_currentSuffixIndex != END_OF_CHAIN) && (IsSortedByInduction())) m_currentSuffixIndex = m_ISA[m_currentSuffixIndex]; else { unsigned int firstSuffixIndex = m_currentSuffixIndex; unsigned int lastSuffixIndex = m_currentSuffixIndex; unsigned short targetSymbol = Value16(m_currentSuffixIndex + m_suffixMatchLength); unsigned int nextSuffix; do { nextSuffix = m_ISA[lastSuffixIndex = m_currentSuffixIndex]; if ((m_currentSuffixIndex = nextSuffix) == END_OF_CHAIN) break; else if (IsTandemRepeat()) { PassTandemRepeat(); break; } else if (IsSortedByInduction()) { m_currentSuffixIndex = m_ISA[nextSuffix]; break; } } while (Value16(m_currentSuffixIndex + m_suffixMatchLength) == targetSymbol); AddToSuffixChain(firstSuffixIndex, lastSuffixIndex, targetSymbol); } } while (m_currentSuffixIndex != END_OF_CHAIN); ProcessSuffixesSortedByInduction(); PushNewChainsOntoStack(); } } inline void MSufSort::ProcessSuffixesSortedByInduction() { unsigned int numSuffixes = m_suffixesSortedByInduction.Count(); if (numSuffixes) { InductionSortObject * objects = m_suffixesSortedByInduction.m_stack; if (numSuffixes > 1) IntroSort(objects, numSuffixes); if (m_hasTandemRepeatSortedByInduction) { // During the last pass some suffixes which were sorted via induction were also // determined to be the terminal suffix in a tandem repeat. So when we mark // the suffixes as sorted (where were sorted via induction) we make chain together // the preceding suffix in the tandem repeat (if there is one). unsigned int firstTandemRepeatIndex = END_OF_CHAIN; unsigned int lastTandemRepeatIndex = END_OF_CHAIN; unsigned int tandemRepeatLength = m_suffixMatchLength - 1; m_hasTandemRepeatSortedByInduction = false; for (unsigned int i = 0; i < numSuffixes; i++) { unsigned int suffixIndex = (objects[i].m_sortValue[1] & 0x3fffffff); if ((suffixIndex >= tandemRepeatLength) && (m_ISA[suffixIndex - tandemRepeatLength] == suffixIndex)) { // this suffix was a terminating suffix in a tandem repeat. // add the preceding suffix in the tandem repeat to the list. if (firstTandemRepeatIndex == END_OF_CHAIN) firstTandemRepeatIndex = lastTandemRepeatIndex = (suffixIndex - tandemRepeatLength); else lastTandemRepeatIndex = (m_ISA[lastTandemRepeatIndex] = (suffixIndex - tandemRepeatLength)); } MarkSuffixAsSorted(suffixIndex, m_nextSortedSuffixValue); } // now process each suffix in the tandem repeat list making each as sorted. // build a new list for tandem repeats which preceded each in the list until there are // no preceding tandem suffix for any suffix in the list. while (firstTandemRepeatIndex != END_OF_CHAIN) { m_ISA[lastTandemRepeatIndex] = END_OF_CHAIN; unsigned int suffixIndex = firstTandemRepeatIndex; firstTandemRepeatIndex = END_OF_CHAIN; while (suffixIndex != END_OF_CHAIN) { if ((suffixIndex >= tandemRepeatLength) && (m_ISA[suffixIndex - tandemRepeatLength] == suffixIndex)) { // this suffix was a terminating suffix in a tandem repeat. // add the preceding suffix in the tandem repeat to the list. if (firstTandemRepeatIndex == END_OF_CHAIN) firstTandemRepeatIndex = lastTandemRepeatIndex = (suffixIndex - tandemRepeatLength); else lastTandemRepeatIndex = (m_ISA[lastTandemRepeatIndex] = (suffixIndex - tandemRepeatLength)); } unsigned int nextSuffix = m_ISA[suffixIndex]; MarkSuffixAsSorted(suffixIndex, m_nextSortedSuffixValue); suffixIndex = nextSuffix; } } // finished. } else { // This is the typical branch on the condition. There were no tandem repeats // encountered during the last chain that were terminated with a suffix that // was sorted via induction. In this case we just mark the suffixes as sorted // and we are done. for (unsigned int i = 0; i < numSuffixes; i++) MarkSuffixAsSorted(objects[i].m_sortValue[1] & 0x3fffffff, m_nextSortedSuffixValue); } m_suffixesSortedByInduction.Clear(); } } inline void MSufSort::ProcessSuffixesSortedByEnhancedInduction(unsigned short suffixId) { // if (m_firstSuffixByEnhancedInductionSort[suffixId] != END_OF_CHAIN) { unsigned int currentSuffixIndex = m_firstSuffixByEnhancedInductionSort[suffixId]; unsigned int lastSuffixIndex = m_lastSuffixByEnhancedInductionSort[suffixId]; m_firstSuffixByEnhancedInductionSort[suffixId] = END_OF_CHAIN; m_lastSuffixByEnhancedInductionSort[suffixId] = END_OF_CHAIN; do { unsigned short symbol = Value16(currentSuffixIndex); unsigned int nextIndex = m_ISA[currentSuffixIndex]; MarkSuffixAsSorted2(currentSuffixIndex, m_firstSortedPosition[symbol]); if (currentSuffixIndex == lastSuffixIndex) { if (m_firstSuffixByEnhancedInductionSort[suffixId] == END_OF_CHAIN) return; currentSuffixIndex = m_firstSuffixByEnhancedInductionSort[suffixId]; lastSuffixIndex = m_lastSuffixByEnhancedInductionSort[suffixId]; m_firstSuffixByEnhancedInductionSort[suffixId] = END_OF_CHAIN; m_lastSuffixByEnhancedInductionSort[suffixId] = END_OF_CHAIN; } else currentSuffixIndex = nextIndex; } while (true); } } #ifdef SHOW_PROGRESS inline void MSufSort::ShowProgress() { // Update the progress indicator. //double p = ((double)(m_numSortedSuffixes & 0x3fffffff) / m_sourceLength) * 100; // printf("Progress: %.2f%% %c", p, 13); } #endif #endif kernlab/src/dprsrch.c0000644000175100001440000001050614221630630014305 0ustar hornikusers#include #include #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include extern double mymin(double, double); extern double mymax(double, double); extern void *xmalloc(size_t); /* LEVEL 1 BLAS */ /*extern double ddot_(int *, double *, int *, double *, int *);*/ /*extern int daxpy_(int *, double *, double *, int *, double *, int *);*/ /* LEVEL 2 BLAS */ /*extern int dsymv_(char *, int *, double *, double *, int *, double *, int *, double *, double *, int *);*/ /* MINPACK 2 */ extern void dbreakpt(int, double *, double *, double *, double *, int *, double *, double *); extern void dgpstep(int, double *, double *, double *, double, double *, double *); void dprsrch(int n, double *x, double *xl, double *xu, double *A, double *g, double *w) { /* c ********** c c Subroutine dprsrch c c This subroutine uses a projected search to compute a step c that satisfies a sufficient decrease condition for the quadratic c c q(s) = 0.5*s'*A*s + g'*s, c c where A is a symmetric matrix and g is a vector. Given the c parameter alpha, the step is c c s[alpha] = P[x + alpha*w] - x, c c where w is the search direction and P the projection onto the c n-dimensional interval [xl,xu]. The final step s = s[alpha] c satisfies the sufficient decrease condition c c q(s) <= mu_0*(g'*s), c c where mu_0 is a constant in (0,1). c c The search direction w must be a descent direction for the c quadratic q at x such that the quadratic is decreasing c in the ray x + alpha*w for 0 <= alpha <= 1. c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is set to the final point P[x + alpha*w]. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c A is a double precision array of dimension n*n. c On entry A specifies the matrix A c On exit A is unchanged. c c g is a double precision array of dimension n. c On entry g specifies the vector g. c On exit g is unchanged. c c w is a double prevision array of dimension n. c On entry w specifies the search direction. c On exit w is the step s[alpha]. c c ********** */ double one = 1, zero = 0; /* Constant that defines sufficient decrease. */ /* Interpolation factor. */ double mu0 = 0.01, interpf = 0.5; double *wa1 = (double *) xmalloc(sizeof(double)*n); double *wa2 = (double *) xmalloc(sizeof(double)*n); /* Set the initial alpha = 1 because the quadratic function is decreasing in the ray x + alpha*w for 0 <= alpha <= 1 */ double alpha = 1, brptmin, brptmax, gts, q; int search = 1, nbrpt, nsteps = 0, i, inc = 1; /* Find the smallest break-point on the ray x + alpha*w. */ dbreakpt(n, x, xl, xu, w, &nbrpt, &brptmin, &brptmax); /* Reduce alpha until the sufficient decrease condition is satisfied or x + alpha*w is feasible. */ while (search && alpha > brptmin) { /* Calculate P[x + alpha*w] - x and check the sufficient decrease condition. */ nsteps++; dgpstep(n, x, xl, xu, alpha, w, wa1); F77_CALL(dsymv)("U", &n, &one, A, &n, wa1, &inc, &zero, wa2, &inc FCONE); gts = F77_CALL(ddot)(&n, g, &inc, wa1, &inc); q = 0.5*F77_CALL(ddot)(&n, wa1, &inc, wa2, &inc) + gts; if (q <= mu0*gts) search = 0; else /* This is a crude interpolation procedure that will be replaced in future versions of the code. */ alpha *= interpf; } /* Force at least one more constraint to be added to the active set if alpha < brptmin and the full step is not successful. There is sufficient decrease because the quadratic function is decreasing in the ray x + alpha*w for 0 <= alpha <= 1. */ if (alpha < 1 && alpha < brptmin) alpha = brptmin; /* Compute the final iterate and step. */ dgpstep(n, x, xl, xu, alpha, w, wa1); F77_CALL(daxpy)(&n, &alpha, w, &inc, x, &inc); for (i=0;i * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/StringKernel.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 // 10 Aug 2006 // 11 Oct 2006 #ifndef STRINGKERNEL_CPP #define STRINGKERNEL_CPP #include #include #include #include #include #include #include #include #include "stringkernel.h" StringKernel::StringKernel(): esa(0), weigher(0), val(0), lvs(0) {} /** * Construct string kernel given constructed enhanced suffix array. * * \param esa_ - ESA instance. */ StringKernel::StringKernel(ESA *esa_, int weightfn, Real param, int verb): esa(esa_), val(new Real[esa_->size + 1]), lvs(0), _verb(verb) { switch (weightfn) { case CONSTANT: weigher = new ConstantWeight(); break; case EXPDECAY: weigher = new ExpDecayWeight(param); break; case KSPECTRUM: weigher = new KSpectrumWeight(param); break; case BOUNDRANGE: weigher = new BoundedRangeWeight(param); break; default: weigher = new ConstantWeight(); //int nothing = 0; } } /** * Construct string kernel when given only text and its length. * * \param text - (IN) The text which SuffixArray and StringKernel correspond to. * \param text_length - (IN) The length of #_text#. * \param verb - (IN) Verbosity level. */ StringKernel::StringKernel(const UInt32 &size, SYMBOL *text, int weightfn, Real param, int verb): lvs(0), _verb(verb) { // Build ESA. esa = new ESA(size, text, verb); // Allocate memory space for #val# val = new Real[esa->size + 1]; // Instantiate weigher. switch (weightfn) { case CONSTANT: weigher = new ConstantWeight(); break; case EXPDECAY: weigher = new ExpDecayWeight(param); break; case KSPECTRUM: weigher = new KSpectrumWeight(param); break; case BOUNDRANGE: weigher = new BoundedRangeWeight(param); break; default: weigher = new ConstantWeight(); //int nothing = 0; } } /** * StringKernel destructor. * */ StringKernel::~StringKernel() { //' Delete objects and release allocated memory space. if (esa) { delete esa; esa = 0; } if (val) { delete [] val; val = 0; } if (lvs) { delete [] lvs; lvs = 0; } if (weigher) { delete weigher; weigher = 0; } } /** * An Iterative auxiliary function used in PrecomputeVal(). * * Note: Every lcp-interval can be represented by its first l-index. * Hence, 'val' is stored in val[] at the index := first l-index. * * Pre: val[] is initialised to 0. * * @param left Left bound of current interval * @param right Right bound of current interval */ void StringKernel::IterativeCompute(const UInt32 &left, const UInt32 &right) { //std::cout << "In IterativeCompute() " << std::endl; //' Variables queue > q; vector > childlist; pair p; UInt32 lb = 0; UInt32 rb = 0; UInt32 floor_len = 0; UInt32 x_len = 0; Real cur_val = 0.0; Real edge_weight = 0.0; //' Step 1: At root, 0-[0..size-1]. Store all non-single child-intervals onto #q#. lb = left; //' Should be equal to 0. rb = right; //' Should be equal to size-1. esa->GetChildIntervals(lb, rb, childlist); for (UInt32 jj = 0; jj < childlist.size(); jj++) q.push(childlist[jj]); //' Step 2: Do breadth-first traversal. For every interval, compute val and add //' it to all its non-singleton child-intervals' val-entries in val[]. //' Start with child-interval [i..j] of 0-[0..size-1]. //' assert(j != size-1) while (!q.empty()) { //' Step 2.1: Get an interval from queue, #q#. p = q.front(); q.pop(); //' step 2.2: Get the lcp of floor interval. UInt32 a = 0, b = 0; a = esa->lcptab[p.first]; //svnvish: BUGBUG // Glorious hack. We have to remove it later. // This gives the lcp of parent interval if (p.second < esa->size - 1) { b = esa->lcptab[p.second + 1]; } else { b = 0; } floor_len = (a > b) ? a : b; //' Step 2.3: Get the lcp of current interval. esa->GetLcp(p.first, p.second, x_len); //' Step 2.4: Compute val of current interval. weigher->ComputeWeight(floor_len, x_len, edge_weight); cur_val = edge_weight * (lvs[p.second + 1] - lvs[p.first]); //' Step 2.5: Add #cur_val# to val[]. UInt32 firstlIndex1 = 0; esa->childtab.l_idx(p.first, p.second, firstlIndex1); val[firstlIndex1] += cur_val; // std::cout << "p.first:"<GetChildIntervals(p.first, p.second, childlist); //' Step 2.7: (a) Add #cur_val# to child-intervals' val-entries in val[]. //' (b) Push child-interval onto #q#. for (UInt32 kk = 0; kk < childlist.size(); kk++) { //' (a) UInt32 firstlIndex2 = 0; pair tmp_p = childlist[kk]; if (esa->text[esa->suftab[tmp_p.first]] == SENTINEL) continue; esa->childtab.l_idx(tmp_p.first, tmp_p.second, firstlIndex2); // assert( val[firstlIndex2] == 0 ); val[firstlIndex2] = val[firstlIndex1]; // cur_val; //' (b) q.push(make_pair(tmp_p.first, tmp_p.second)); } } //std::cout << "Out IterativeCompute() " << std::endl; } /** * Precomputation of val(t) of string kernel. * Observation :Every internal node of a suffix tree can be represented by at * least one index of the corresponding lcp array. So, the val * of a node is stored in val[] at the index corresponding to that of * the fist representative lcp value in lcp[]. */ void StringKernel::PrecomputeVal() { //' Memory space requirement check. assert(val != 0); //' Initialise all val entries to zero! memset(val, 0, sizeof(Real)*esa->size + 1); //' Start iterative precomputation of val[] IterativeCompute(0, esa->size - 1); } /** * Compute k(text,x) by performing Chang and Lawler's matching statistics collection * algorithm on the enhanced suffix array. * * \param x - (IN) The input string which is to be evaluated together with * the text in esa. * \param x_len - (IN) The length of #x#. * \param value - (IN) The value of k(x,x'). */ void StringKernel::Compute_K(SYMBOL *x, const UInt32 &x_len, Real &value) { //' Variables UInt32 floor_i = 0; UInt32 floor_j = 0; UInt32 i = 0; UInt32 j = 0; UInt32 lb = 0; UInt32 rb = 0; UInt32 matched_len = 0; UInt32 offset = 0; UInt32 floor_len = 0; UInt32 firstlIndex = 0; Real edge_weight = 0.0; //' Initialisation value = 0.0; lb = 0; rb = esa->size - 1; //' for each suffix, xprime[k..xprime_len-1], find longest match in text for (UInt32 k = 0; k < x_len; k++) { //' Step 1: Matching esa->ExactSuffixMatch(lb, rb, offset, &x[k], x_len - k, i, j, matched_len, floor_i, floor_j, floor_len); //' Step 2: Get suffix link for [floor_i..floor_j] esa->GetSuflink(floor_i, floor_j, lb, rb); assert((floor_j - floor_i) <= (rb - lb)); //' Range check //' Step 3: Compute contribution of this matched substring esa->childtab.l_idx(floor_i, floor_j, firstlIndex); assert(firstlIndex > floor_i && firstlIndex <= floor_j); assert(floor_len <= matched_len); weigher->ComputeWeight(floor_len, matched_len, edge_weight); value += val[firstlIndex] + edge_weight * (lvs[j + 1] - lvs[i]); // std::cout << "i:"<size); //' Allocate memory space for lvs[] lvs = new (nothrow) Real[esa->size + 1]; assert(lvs); //' Assign leaf weight to lvs element according to its position in text. for (UInt32 j = 0; j < esa->size; j++) { pos = esa->suftab[j]; UInt32 *p = upper_bound(clen, clen + m, pos); //' O(log n) lvs[j + 1] = leafWeight[p - clen]; } //' Compute cumulative lvs[]. To be used in matching statistics computation later. lvs[0] = 0.0; partial_sum(lvs, lvs + esa->size + 1, lvs); //chteo: [101006] delete [] clen; clen = 0; } /** * Set lvs[i] = i, for i = 0 to esa->size * Memory space for lvs[] will be allocated. */ void StringKernel::Set_Lvs() { //' Clean up previous lvs, if any. if (lvs) { delete lvs; lvs = 0; } //' Allocate memory space for lvs[] lvs = new (nothrow) Real[esa->size + 1]; //' Check if memory correctly allocated. assert(lvs != 0); //' Range := [0..esa->size] UInt32 localsize = esa->size; for (UInt32 i = 0; i <= localsize; i++) lvs[i] = i; } #endif #include #include #include extern "C" { SEXP stringtv(SEXP rtext, // text document SEXP ltext, // list or vector of text documents to compute kvalues against SEXP nltext, // number of text documents in ltext SEXP vnchar, // number of characters in text SEXP vnlchar, // characters per document in ltext SEXP stype, // type of kernel SEXP param) // parameter for kernel { // R interface for text and list of text computation. Should return a vector of computed kernel values. // Construct ESASK UInt32 text_size = *INTEGER(vnchar); int number_ltext = *INTEGER(nltext); unsigned int *ltext_size = (unsigned int *) malloc (sizeof(unsigned int) * number_ltext); memcpy(ltext_size, INTEGER(vnlchar), number_ltext*sizeof(int)); int weightfn = *INTEGER(stype); const char *text = CHAR(STRING_ELT(rtext,0)); Real kparam = *REAL(param); double kVal; SEXP alpha; PROTECT(alpha = allocVector(REALSXP, number_ltext)); // Check if stringlength reported from R is correct if(strlen(text)!= text_size) text_size= strlen(text); StringKernel sk(text_size, (SYMBOL*)text, (weightfn - 1), kparam, 0); sk.Set_Lvs(); sk.PrecomputeVal(); for (int i=0; i * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/DataType.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 11 Oct 2006 #ifndef DATATYPE_H #define DATATYPE_H // #define UInt32 unsigned int // #define UInt64 unsigned long long // #define Byte1 unsigned char // #define Byte2 unsigned short // #define Real double typedef unsigned int UInt32; // Seems that even using __extension__ g++ 4.6 will complain that // ISO C++ 1998 does not support 'long long' ... /* #if defined __GNUC__ && __GNUC__ >= 2 __extension__ typedef unsigned long long UInt64; #else typedef unsigned long long UInt64; #endif */ #include typedef uint64_t UInt64; typedef unsigned char Byte1; typedef unsigned short Byte2; typedef double Real; // #define SENTINEL '\n' // #define SENTINEL2 '\0' const char SENTINEL = '\n'; const char SENTINEL2 = '\0'; #ifndef UNICODE // # define SYMBOL Byte1 typedef Byte1 SYMBOL; #else // # define SYMBOL Byte2 typedef Byte2 SYMBOL; #endif #endif kernlab/src/inductionsort.cpp0000644000175100001440000000264612234152620016112 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the MSufSort suffix sorting algorithm (Version 2.2). * * The Initial Developer of the Original Code is * Michael A. Maniscalco * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Michael A. Maniscalco * * ***** END LICENSE BLOCK ***** */ #include "inductionsort.h" InductionSortObject::InductionSortObject(unsigned int inductionPosition, unsigned int inductionValue, unsigned int suffixIndex) { // sort value is 64 bits long. // bits are ... // 63 - 60: induction position (0 - 15) // 59 - 29: induction value at induction position (0 - (2^30 -1)) // 28 - 0: suffix index for the suffix sorted by induction (0 - (2^30) - 1) m_sortValue[0] = inductionPosition << 28; m_sortValue[0] |= ((inductionValue & 0x3fffffff) >> 2); m_sortValue[1] = (inductionValue << 30); m_sortValue[1] |= suffixIndex; } kernlab/src/wkasailcp.cpp0000644000175100001440000000452112234152620015156 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/W_kasai_lcp.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 11 Oct 2006 #ifndef W_KASAI_LCP_CPP #define W_KASAI_LCP_CPP #include "wkasailcp.h" #include /** * Compute LCP array. Algorithm adapted from Manzini's SWAT2004 paper. * Modification: array indexing changed from 1-based to 0-based. * * \param text - (IN) The text which corresponds to SA. * \param len - (IN) Length of text. * \param sa - (IN) Suffix array. * \param lcp - (OUT) Computed LCP array. */ ErrorCode W_kasai_lcp::ComputeLCP(const SYMBOL *text, const UInt32 &len, const UInt32 *sa, LCP& lcp) { //chteo: [111006:0141] //std::vector isa(len); UInt32 *isa = new UInt32[len]; //' Step 1: Compute inverse suffix array for(UInt32 i=0; i0) h--; } //chteo: [111006:0141] delete [] isa; isa = 0; return NOERROR; } #endif kernlab/src/introsort.h0000644000175100001440000001560012234152620014710 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the MSufSort suffix sorting algorithm (Version 2.2). * * The Initial Developer of the Original Code is * Michael A. Maniscalco * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Michael A. Maniscalco * * ***** END LICENSE BLOCK ***** */ #ifndef TERNARY_INTRO_SORT_H #define TERNARY_INTRO_SORT_H //======================================================================// // Class: IntroSort // // // // Template based implementation of Introspective sorting algorithm // // using a ternary quicksort. // // // // Author: M.A. Maniscalco // // Date: January 20, 2005 // // // //======================================================================// // *** COMPILER WARNING DISABLED *** // Disable a warning which appears in MSVC // "conversion from '__w64 int' to ''" // Just plain annoying ... Restored at end of this file. #ifdef WIN32 #pragma warning (disable : 4244) #endif #define MIN_LENGTH_FOR_QUICKSORT 32 #define MAX_DEPTH_BEFORE_HEAPSORT 128 //===================================================================== // IntroSort class declaration // Notes: Any object used with this class must implement the following // the operators: <=, >=, == //===================================================================== template void IntroSort(T * array, unsigned int count); template void Partition(T * left, unsigned int count, unsigned int depth = 0); template T SelectPivot(T value1, T value2, T value3); template void Swap(T * valueA, T * valueB); template void InsertionSort(T * array, unsigned int count); template void HeapSort(T * array, int length); template void HeapSort(T * array, int k, int N); template inline void IntroSort(T * array, unsigned int count) { // Public method used to invoke the sort. // Call quick sort partition method if there are enough // elements to warrant it or insertion sort otherwise. if (count >= MIN_LENGTH_FOR_QUICKSORT) Partition(array, count); InsertionSort(array, count); } template inline void Swap(T * valueA, T * valueB) { // do the ol' "switch-a-me-do" on two values. T temp = *valueA; *valueA = *valueB; *valueB = temp; } template inline T SelectPivot(T value1, T value2, T value3) { // middle of three method. if (value1 < value2) return ((value2 < value3) ? value2 : (value1 < value3) ? value3 : value1); return ((value1 < value3) ? value1 : (value2 < value3) ? value3 : value2); } template inline void Partition(T * left, unsigned int count, unsigned int depth) { if (++depth > MAX_DEPTH_BEFORE_HEAPSORT) { // If enough recursion has happened then we bail to heap sort since it looks // as if we are experiencing a 'worst case' for quick sort. This should not // happen very often at all. HeapSort(left, count); return; } T * right = left + count - 1; T * startingLeft = left; T * startingRight = right; T * equalLeft = left; T * equalRight = right; // select the pivot value. T pivot = SelectPivot(left[0], right[0], left[((right - left) >> 1)]); // do three way partitioning. do { while ((left < right) && (*left <= pivot)) if (*(left++) == pivot) Swap(equalLeft++, left - 1); // equal to pivot value. move to far left. while ((left < right) && (*right >= pivot)) if (*(right--) == pivot) Swap(equalRight--, right + 1); // equal to pivot value. move to far right. if (left >= right) { if (left == right) { if (*left >= pivot) left--; if (*right <= pivot) right++; } else { left--; right++; } break; // done partitioning } // left and right are ready for swaping Swap(left++, right--); } while (true); // move values that were equal to pivot from the far left into the middle. // these values are now placed in their final sorted position. if (equalLeft > startingLeft) while (equalLeft > startingLeft) Swap(--equalLeft, left--); // move values that were equal to pivot from the far right into the middle. // these values are now placed in their final sorted position. if (equalRight < startingRight) while (equalRight < startingRight) Swap(++equalRight, right++); // Calculate new partition sizes ... unsigned int leftSize = left - startingLeft + 1; unsigned int rightSize = startingRight - right + 1; // Partition left (less than pivot) if there are enough values to warrant it // otherwise do insertion sort on the values. if (leftSize >= MIN_LENGTH_FOR_QUICKSORT) Partition(startingLeft, leftSize, depth); // Partition right (greater than pivot) if there are enough values to warrant it // otherwise do insertion sort on the values. if (rightSize >= MIN_LENGTH_FOR_QUICKSORT) Partition(right, rightSize, depth); } template inline void InsertionSort(T * array, unsigned int count) { // A basic insertion sort. if (count < 3) { if ((count == 2) && (array[0] > array[1])) Swap(array, array + 1); return; } T * ptr2, * ptr3 = array + 1, * ptr4 = array + count; if (array[0] > array[1]) Swap(array, array + 1); while (true) { while ((++ptr3 < ptr4) && (ptr3[0] >= ptr3[-1])); if (ptr3 >= ptr4) break; if (ptr3[-2] <= ptr3[0]) { if (ptr3[-1] > ptr3[0]) Swap(ptr3, ptr3 - 1); } else { ptr2 = ptr3 - 1; T v = *ptr3; while ((ptr2 >= array) && (ptr2[0] > v)) { ptr2[1] = ptr2[0]; ptr2--; } ptr2[1] = v; } } } template inline void HeapSort(T * array, int length) { // A basic heapsort. for (int k = length >> 1; k > 0; k--) HeapSort(array, k, length); do { Swap(array, array + (--length)); HeapSort(array, 1, length); } while (length > 1); } template inline void HeapSort(T * array, int k, int N) { // A basic heapsort. T temp = array[k - 1]; int n = N >> 1; int j = (k << 1); while (k <= n) { if ((j < N) && (array[j - 1] < array[j])) j++; if (temp >= array[j - 1]) break; else { array[k - 1] = array[j - 1]; k = j; j <<= 1; } } array[k - 1] = temp; } // Restore the default warning which appears in MSVC for // warning #4244 which was disabled at top of this file. #ifdef WIN32 #pragma warning (default : 4244) #endif #endif kernlab/src/ctable.cpp0000644000175100001440000000661712234152620014442 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ChildTable.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef CTABLE_CPP #define CTABLE_CPP #include "ctable.h" #include /** * Return the value of idx-th "up" field of child table. * val = childtab[idx -1]; * * \param idx - (IN) The index of child table. * \param val - (OUT) The value of idx-th entry in child table's "up" field. */ ErrorCode ChildTable::up(const UInt32 &idx, UInt32 &val){ if(idx == size()) { // Special case: To get the first 0-index val = (*this)[idx-1]; return NOERROR; } // svnvish: BUGBUG // Do we need to this in production code? UInt32 lcp_idx = 0, lcp_prev_idx = 0; lcp_idx = _lcptab[idx]; lcp_prev_idx = _lcptab[idx-1]; assert(lcp_prev_idx > lcp_idx); val = (*this)[idx-1]; return NOERROR; } /** * Return the value of idx-th "down" field of child table. Deprecated. * Instead use val = childtab[idx]; * * \param idx - (IN) The index of child table. * \param val - (OUT) The value of idx-th entry in child table's "down" field. */ ErrorCode ChildTable::down(const UInt32 &idx, UInt32 &val){ // For a l-interval, l-[i..j], childtab[i].down == childtab[j+1].up // If l-[i..j] is last child-interval of its parent OR 0-[0..n], // childtab[i].nextlIndex == childtab[i].down // svnvish: BUGBUG // Do we need to this in production code? // UInt32 lcp_idx = 0, lcp_nextidx = 0; // lcp_nextidx = _lcptab[(*this)[idx]]; // lcp_idx = _lcptab[idx]; // assert(lcp_nextidx > lcp_idx); // childtab[i].down := childtab[i].nextlIndex val = (*this)[idx]; return NOERROR; } /** * Return the first l-index of a given l-[i..j] interval. * * \param i - (IN) Left bound of l-[i..j] * \param j - (IN) Right bound of l-[i..j] * \param idx - (OUT) The first l-index. */ ErrorCode ChildTable::l_idx(const UInt32 &i, const UInt32 &j, UInt32 &idx){ UInt32 up = (*this)[j]; if(i < up && up <= j){ idx = up; }else { idx = (*this)[i]; } return NOERROR; } /** * Dump array elements to output stream * * \param os - (IN) Output stream. * \param ct - (IN) ChildTable object. */ std::ostream& operator << (std::ostream& os, const ChildTable& ct){ for( UInt32 i = 0; i < ct.size(); i++ ){ os << "ct[ " << i << "]: " << ct[i] << std::endl; } return os; } #endif kernlab/src/misc.c0000644000175100001440000000055311304023134013566 0ustar hornikusers#include #include void *xmalloc(size_t size) { void *ptr = (void *) malloc(size); return ptr; } double mymax(double a, double b) { if (a > b) return a; return b; } double mymin(double a, double b) { if (a < b) return a; return b; } double sign(double a, double b) { if (b >= 0) return fabs(a); return -fabs(a); } kernlab/src/kspectrumweight.h0000644000175100001440000000326212234152620016073 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/KSpectrumWeight.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 #ifndef KSPECTRUMWEIGHT_H #define KSPECTRUMWEIGHT_H #include "datatype.h" #include "errorcode.h" #include "iweightfactory.h" #include //' K-spectrum weight class class KSpectrumWeight : public I_WeightFactory { Real k; public: /// Constructor KSpectrumWeight(const Real & k_=5.0):k(k_) {} /// Destructor virtual ~KSpectrumWeight(){} /// Compute weight ErrorCode ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight); }; #endif kernlab/src/wmsufsort.cpp0000644000175100001440000000442512234152620015254 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/W_msufsort.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 //' Wrapper for Michael Maniscalco's MSufSort version 2.2 algorithm #ifndef W_MSUFSORT_CPP #define W_MSUFSORT_CPP #include #include #include #include "wmsufsort.h" W_msufsort::W_msufsort() { msuffixsorter = new MSufSort(); } W_msufsort::~W_msufsort() { delete msuffixsorter; } /** * Construct Suffix Array using Michael Maniscalco's algorithm * * \param _text - (IN) The text which resultant SA corresponds to. * \param _len - (IN) The length of the text. * \param _sa - (OUT) Suffix array instance. */ ErrorCode W_msufsort::ConstructSA(SYMBOL *text, const UInt32 &len, UInt32 *&array){ //' A temporary copy of text SYMBOL *text_copy = new SYMBOL[len]; //' chteo: BUGBUG //' redundant? assert(text_copy != NULL); memcpy(text_copy, text, sizeof(SYMBOL) * len); msuffixsorter->Sort(text_copy, len); //' Code adapted from MSufSort::verifySort() for (UInt32 i = 0; i < len; i++) { UInt32 tmp = msuffixsorter->ISA(i)-1; array[tmp] = i; } //' Deallocate the memory allocated for #text_copy# delete [] text_copy; return NOERROR; } #endif kernlab/src/dgpstep.c0000644000175100001440000000275111304023134014303 0ustar hornikusersvoid dgpstep(int n, double *x, double *xl, double *xu, double alpha, double *w, double *s) { /* c ********** c c Subroutine dgpstep c c This subroutine computes the gradient projection step c c s = P[x + alpha*w] - x, c c where P is the projection on the n-dimensional interval [xl,xu]. c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is unchanged. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c alpha is a double precision variable. c On entry alpha specifies the scalar alpha. c On exit alpha is unchanged. c c w is a double precision array of dimension n. c On entry w specifies the vector w. c On exit w is unchanged. c c s is a double precision array of dimension n. c On entry s need not be specified. c On exit s contains the gradient projection step. c c ********** */ int i; for (i=0;i xu[i]) s[i] = xu[i] - x[i]; else s[i] = alpha*w[i]; } kernlab/src/stringkernel.h0000644000175100001440000000542612761213650015367 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/StringKernel.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 // 10 Aug 2006 #ifndef STRINGKERNEL_H #define STRINGKERNEL_H #include "datatype.h" #include "errorcode.h" #include "esa.h" #include "isafactory.h" #include "ilcpfactory.h" #include "iweightfactory.h" //#include "W_msufsort.h" #include "wkasailcp.h" #include "cweight.h" #include "expdecayweight.h" #include "brweight.h" #include "kspectrumweight.h" //' Types of substring weighting functions enum WeightFunction{CONSTANT, EXPDECAY, KSPECTRUM, BOUNDRANGE}; using namespace std; class StringKernel { public: /// Variables ESA *esa; I_WeightFactory *weigher; Real *val; //' val array. Storing precomputed val(t) values. Real *lvs; //' leaves array. Storing weights for leaves. /// Constructors StringKernel(); //' Given contructed suffix array StringKernel(ESA *esa_, int weightfn, Real param, int verb=INFO); //' Given text, build suffix array for it StringKernel(const UInt32 &size, SYMBOL *text, int weightfn, Real param, int verb=INFO); /// Destructor virtual ~StringKernel(); //' Methods /// Precompute the contribution of each intervals (or internal nodes) void PrecomputeVal(); /// Compute Kernel matrix void Compute_K(SYMBOL *xprime, const UInt32 &xprime_len, Real &value); /// Set leaves array, lvs[] void Set_Lvs(const Real *leafWeight, const UInt32 *len, const UInt32 &m); /// Set leaves array as lvs[i]=i for i=0 to esa->length void Set_Lvs(); private: int _verb; /// An iterative auxiliary function used in PrecomputeVal() void IterativeCompute(const UInt32 &left, const UInt32 &right); }; #endif kernlab/src/msufsort.cpp0000644000175100001440000002410012774377717015106 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the MSufSort suffix sorting algorithm (Version 2.2). * * The Initial Developer of the Original Code is * Michael A. Maniscalco * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Michael A. Maniscalco * * ***** END LICENSE BLOCK ***** */ #include "msufsort.h" #include #include #include #include //============================================================================= // MSufSort. //============================================================================= SYMBOL_TYPE MSufSort::m_reverseAltSortOrder[256]; // chteo: Changed the member initialisation order to get rid of compilation warning [181006] // MSufSort::MSufSort():m_ISA(0), m_chainHeadStack(8192, 0x20000, true), m_suffixesSortedByInduction(120000, 1000000, true), // m_chainMatchLengthStack(8192, 0x10000, true), m_chainCountStack(8192, 0x10000, true) MSufSort::MSufSort():m_chainMatchLengthStack(8192, 0x10000, true), m_chainCountStack(8192, 0x10000, true), m_chainHeadStack(8192, 0x20000, true), m_ISA(0), m_suffixesSortedByInduction(120000, 1000000, true) { // constructor. unsigned char array[10] = {'a', 'e', 'i', 'o', 'u', 'A', 'E', 'I', 'O', 'U'}; int n = 0; for (; n < 10; n++) { m_forwardAltSortOrder[array[n]] = n; m_reverseAltSortOrder[n] = array[n]; } for (int i = 0; i < 256; i++) { bool unresolved = true; for (int j = 0; j < 10; j++) if (array[j] == i) unresolved = false; if (unresolved) { m_forwardAltSortOrder[i] = n; m_reverseAltSortOrder[n++] = i; } } } MSufSort::~MSufSort() { // destructor. // delete the inverse suffix array if allocated. if (m_ISA) delete [] m_ISA; m_ISA = 0; } void MSufSort::ReverseAltSortOrder(SYMBOL_TYPE * data, unsigned int nBytes) { #ifndef SORT_16_BIT_SYMBOLS for (unsigned int i = 0; i < nBytes; i++) data[i] = m_reverseAltSortOrder[data[i]]; #endif } unsigned int MSufSort::GetElapsedSortTime() { return m_sortTime; } unsigned int MSufSort::GetMemoryUsage() { /* unsigned int ret = 5 * m_sourceLength; ret += (m_chainStack.m_stackSize * 4); ret += (m_suffixesSortedByInduction.m_stackSize * 8); ret += sizeof(*this); */ return 0; } unsigned int MSufSort::Sort(SYMBOL_TYPE * source, unsigned int sourceLength) { ///tch: //printf("\nIn MSufSort::Sort()\n"); // set the member variables to the source string and its length. m_source = source; m_sourceLength = sourceLength; m_sourceLengthMinusOne = sourceLength - 1; Initialize(); unsigned int start = clock(); InitialSort(); while (m_chainHeadStack.Count()) ProcessNextChain(); while (m_currentSuffixChainId <= 0xffff) ProcessSuffixesSortedByEnhancedInduction(m_currentSuffixChainId++); unsigned int finish = clock(); m_sortTime = finish - start; ///tch: //printf("\nFinished MSufSort::Sort()\nPress any key to continue...\n"); //printf("%s\n",m_source); //system("pause"); //getchar(); // printf(" %c", 13); return ISA(0); } void MSufSort::Initialize() { // Initializes this object just before sorting begins. if (m_ISA) delete [] m_ISA; m_ISA = new unsigned int[m_sourceLength + 1]; memset(m_ISA, 0, sizeof(unsigned int) * (m_sourceLength + 1)); m_nextSortedSuffixValue = 0; m_numSortedSuffixes = 0; m_suffixMatchLength = 0; m_currentSuffixChainId = 0; m_tandemRepeatDepth = 0; m_firstSortedTandemRepeat = END_OF_CHAIN; m_hasTandemRepeatSortedByInduction = false; m_hasEvenLengthTandemRepeats = false; m_firstUnsortedTandemRepeat = END_OF_CHAIN; for (unsigned int i = 0; i < 0x10000; i++) m_startOfSuffixChain[i] = m_endOfSuffixChain[i] = m_firstSuffixByEnhancedInductionSort[i] = END_OF_CHAIN; for (unsigned int i = 0; i < 0x10000; i++) m_firstSortedPosition[i] = 0; m_numNewChains = 0; #ifdef SHOW_PROGRESS m_progressUpdateIncrement = (unsigned int)(m_sourceLength / 100); m_nextProgressUpdate = 1; #endif } void MSufSort::InitialSort() { // This is the first sorting pass which makes the initial suffix // chains from the given source string. Pushes these chains onto // the stack for further sorting. #ifndef SORT_16_BIT_SYMBOLS #ifdef USE_ALT_SORT_ORDER for (unsigned int suffixIndex = 0; suffixIndex < m_sourceLength; suffixIndex++) m_source[suffixIndex] = m_forwardAltSortOrder[m_source[suffixIndex]]; #endif #endif #ifdef USE_ENHANCED_INDUCTION_SORTING m_ISA[m_sourceLength - 1] = m_ISA[m_sourceLength - 2] = SORTED_BY_ENHANCED_INDUCTION; m_firstSortedPosition[Value16(m_sourceLength - 1)]++; m_firstSortedPosition[Value16(m_sourceLength - 2)]++; for (int suffixIndex = m_sourceLength - 3; suffixIndex >= 0; suffixIndex--) { unsigned short symbol = Value16(suffixIndex); m_firstSortedPosition[symbol]++; #ifdef SORT_16_BIT_SYMBOLS unsigned short valA = ENDIAN_SWAP_16(m_source[suffixIndex]); unsigned short valB = ENDIAN_SWAP_16(m_source[suffixIndex + 1]); if ((suffixIndex == m_sourceLengthMinusOne) || (valA > valB)) m_ISA[suffixIndex] = SORTED_BY_ENHANCED_INDUCTION; else AddToSuffixChain(suffixIndex, symbol); #else bool useEIS = false; if ((m_source[suffixIndex] > m_source[suffixIndex + 1]) || ((m_source[suffixIndex] < m_source[suffixIndex + 1]) && (m_source[suffixIndex] > m_source[suffixIndex + 2]))) useEIS = true; if (!useEIS) { if (m_endOfSuffixChain[symbol] == END_OF_CHAIN) { m_endOfSuffixChain[symbol] = m_startOfSuffixChain[symbol] = suffixIndex; m_newChainIds[m_numNewChains++] = ENDIAN_SWAP_16(symbol); } else { m_ISA[suffixIndex] = m_startOfSuffixChain[symbol]; m_startOfSuffixChain[symbol] = suffixIndex; } } else m_ISA[suffixIndex] = SORTED_BY_ENHANCED_INDUCTION; #endif } #else for (unsigned int suffixIndex = 0; suffixIndex < m_sourceLength; suffixIndex++) { unsigned short symbol = Value16(suffixIndex); AddToSuffixChain(suffixIndex, symbol); } #endif #ifdef USE_ENHANCED_INDUCTION_SORTING unsigned int n = 1; for (unsigned int i = 0; i < 0x10000; i++) { unsigned short p = ENDIAN_SWAP_16(i); unsigned int temp = m_firstSortedPosition[p]; if (temp) { m_firstSortedPosition[p] = n; n += temp; } } #endif MarkSuffixAsSorted(m_sourceLength, m_nextSortedSuffixValue); PushNewChainsOntoStack(true); } void MSufSort::ResolveTandemRepeatsNotSortedWithInduction() { unsigned int tandemRepeatLength = m_suffixMatchLength - 1; unsigned int startOfFinalList = END_OF_CHAIN; while (m_firstSortedTandemRepeat != END_OF_CHAIN) { unsigned int stopLoopAtIndex = startOfFinalList; m_ISA[m_lastSortedTandemRepeat] = startOfFinalList; startOfFinalList = m_firstSortedTandemRepeat; unsigned int suffixIndex = m_firstSortedTandemRepeat; m_firstSortedTandemRepeat = END_OF_CHAIN; while (suffixIndex != stopLoopAtIndex) { if ((suffixIndex >= tandemRepeatLength) && (m_ISA[suffixIndex - tandemRepeatLength] == suffixIndex)) { if (m_firstSortedTandemRepeat == END_OF_CHAIN) m_firstSortedTandemRepeat = m_lastSortedTandemRepeat = (suffixIndex - tandemRepeatLength); else m_lastSortedTandemRepeat = (m_ISA[m_lastSortedTandemRepeat] = (suffixIndex - tandemRepeatLength)); } suffixIndex = m_ISA[suffixIndex]; } } m_tandemRepeatDepth--; if (!m_tandemRepeatDepth) { while (startOfFinalList != END_OF_CHAIN) { unsigned int next = m_ISA[startOfFinalList]; MarkSuffixAsSorted(startOfFinalList, m_nextSortedSuffixValue); startOfFinalList = next; } } else { m_firstSortedTandemRepeat = startOfFinalList; } } unsigned int MSufSort::ISA(unsigned int index) { return (m_ISA[index] & 0x3fffffff); } int MSufSort::CompareStrings(SYMBOL_TYPE * stringA, SYMBOL_TYPE * stringB, int len) { #ifdef SORT_16_BIT_SYMBOLS while (len) { unsigned short valA = ENDIAN_SWAP_16(stringA[0]); unsigned short valB = ENDIAN_SWAP_16(stringB[0]); if (valA > valB) return 1; if (valA < valB) return -1; stringA++; stringB++; len--; } #else while (len) { if (stringA[0] > stringB[0]) return 1; if (stringA[0] < stringB[0]) return -1; stringA++; stringB++; len--; } #endif return 0; } bool MSufSort::VerifySort() { //printf("\n\nVerifying sort\n\n"); bool error = false; int progressMax = m_sourceLength; int progressValue = 0; int progressUpdateStep = progressMax / 100; int nextProgressUpdate = 1; unsigned int * suffixArray = new unsigned int[m_sourceLength]; for (unsigned int i = 0; ((!error) && (i < m_sourceLength)); i++) { if (!(m_ISA[i] & 0x80000000)) error = true; unsigned int n = (m_ISA[i] & 0x3fffffff) - 1; suffixArray[n] = i; } // all ok so far. // now compare the suffixes in lexicographically sorted order to confirm the sort was good. for (unsigned int suffixIndex = 0; ((!error) && (suffixIndex < (m_sourceLength - 1))); suffixIndex++) { if (++progressValue == nextProgressUpdate) { nextProgressUpdate += progressUpdateStep; //printf("Verify sort: %.2f%% complete%c", ((double)progressValue / progressMax) * 100, 13); } SYMBOL_TYPE * ptrA = &m_source[suffixArray[suffixIndex]]; SYMBOL_TYPE * ptrB = &m_source[suffixArray[suffixIndex + 1]]; int maxLen = (ptrA < ptrB) ? m_sourceLength - (ptrB - m_source) : m_sourceLength - (ptrA - m_source); int c = CompareStrings(ptrA, ptrB, maxLen); if (c > 0) error = true; else if ((c == 0) && (ptrB > ptrA)) error = true; } //printf(" %c", 13); delete [] suffixArray; return !error; } kernlab/src/stringk.c0000644000175100001440000001100611304023134014307 0ustar hornikusers#include #include #include #include #include #include #include #include #include #include double ***cache ; double kaux (const char *u, int p, const char *v, int q, int n, double lambda) { register int j; double tmp; /* case 1: if a full substring length is processed, return*/ if (n == 0) return (1.0); /* check, if the value was already computed in a previous computation */ if (cache [n] [p] [q] != -1.0) return (cache [n] [p] [q]); /* case 2: at least one substring is to short */ if (p < n || q < n) return (0.0); /* case 3: recursion */ for (j= 0, tmp = 0; j < q; j++) { if (v [j] == u [p - 1]) tmp += kaux (u, p - 1, v, j, n - 1, lambda) * pow (lambda, (float) (q - j + 1)); } cache [n] [p] [q] = lambda * kaux (u, p - 1, v, q, n, lambda) + tmp; return (cache [n] [p] [q]); } double seqk (const char *u, int p, const char *v, int q, int n, double lambda) { register int j; double kp; /* the simple case: (at least) one string is to short */ if (p < n || q < n) return (0.0); /* the recursion: use kaux for the t'th substrings*/ for (j = 0, kp = 0.0; j < q; j++) { if (v [j] == u [p - 1]) kp += kaux (u, p - 1, v, j, n - 1, lambda) * lambda * lambda; } return (seqk (u, p - 1, v, q, n, lambda) + kp); } /* recursively computes the subsequence kernel between s1 and s2 where subsequences of exactly length n are considered */ SEXP subsequencek(SEXP s1, SEXP s2, SEXP l1, SEXP l2, SEXP nr, SEXP lambdar) { const char *u = CHAR(STRING_ELT(s1, 0)); const char *v = CHAR(STRING_ELT(s2, 0)); int p = *INTEGER(l1); int q = *INTEGER(l2); int n = *INTEGER(nr); double lambda = *REAL(lambdar); int i, j, k; SEXP ret; /* allocate memory for auxiallary cache variable */ cache = (double ***) malloc (n * sizeof (double **)); for (i = 1; i < n; i++) { cache [i] = (double **) malloc (p * sizeof (double *)); for (j = 0; j < p; j++) { cache [i] [j] = (double *) malloc (q * sizeof (double)); for (k = 0; k < q; k++) cache [i] [j] [k] = -1.0; } } PROTECT(ret = allocVector(REALSXP, 1)); /* invoke recursion */ REAL(ret)[0] = seqk (u, p, v, q, n, lambda); /* free memory */ for (i = 1; i < n; i++) { for (j = 0; j < p; j++) free (cache [i] [j]); free (cache [i]); } free (cache); UNPROTECT(1); return (ret); } /* computes the substring kernel between s1 and s2 where substrings up to length n are considered */ SEXP fullsubstringk (SEXP s1, SEXP s2, SEXP l1, SEXP l2, SEXP nr, SEXP lambdar) { const char *u = CHAR(STRING_ELT(s1, 0)); const char *v = CHAR(STRING_ELT(s2, 0)); int p = *INTEGER(l1); int q = *INTEGER(l2); int n = *INTEGER(nr); double lambda = *REAL(lambdar); register int i, j, k; double ret, tmp; SEXP retk; /* computes the substring kernel */ for (ret = 0.0, i = 0; i < p; i++) { for (j = 0; j < q; j++) if (u [i] == v [j]) { for (k = 0, tmp = lambda * lambda; /* starting condition */ (i + k < p) && (j + k < q) && (u [i + k] == v [j + k]) && (k < n); /* stop conditions */ k++, tmp *= (lambda * lambda)) /* update per iteration */ ret += tmp; } } PROTECT(retk = allocVector(REALSXP, 1)); REAL(retk)[0] = ret; UNPROTECT(1); return (retk); } /* computes the substring kernel between s1 and s2 where substrings of exactly length n are considered */ SEXP substringk (SEXP s1, SEXP s2, SEXP l1, SEXP l2, SEXP nr, SEXP lambdar) { const char *u = CHAR(STRING_ELT(s1, 0)); const char *v = CHAR(STRING_ELT(s2, 0)); int p = *INTEGER(l1); int q = *INTEGER(l2); int n = *INTEGER(nr); double lambda = *REAL(lambdar); SEXP retk; register int i, j, k; double ret, tmp; /* computes the substring kernel */ for (ret = 0.0, i = 0; i < p; i++) { for (j = 0; j < q; j++) { for (k = 0, tmp = lambda * lambda; /* starting condition */ (i + k < p) && (j + k < q) && (u [i + k] == v [j + k]) && (k < n); /* stop conditions */ k++, tmp *= (lambda * lambda)); /* update per iteration */ if (k == n) ret += tmp; /* update features in case of full match */ } } PROTECT(retk = allocVector(REALSXP, 1)); REAL(retk)[0] = ret; UNPROTECT(1); return (retk); } kernlab/src/kspectrumweight.cpp0000644000175100001440000000652312234152620016431 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/KSpectrumWeight.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 #ifndef KSPECTRUMWEIGHT_CPP #define KSPECTRUMWEIGHT_CPP #include "kspectrumweight.h" #include /** * K-spectrum weight function. Compute number of common (exactly) k character substring. * * \param floor_len - (IN) Length of floor interval of matched substring. (cf. gamma in VisSmo02). * \param x_len - (IN) Length of the matched substring. (cf. tau in VisSmo02). * \param weight - (OUT) The weight value. * */ ErrorCode KSpectrumWeight::ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight) { //' Input validation assert(x_len >= floor_len); //' x_len == floor_len when the substring found ends on an interval. weight = 0.0; if(floor_len < k && x_len >= k) weight = 1.0; // std::cout << "floor_len : " << floor_len // << " x_len : " << x_len // << " weight : " << weight << std::endl; return NOERROR; } #endif //' Question: Why return only 0 or 1? //' Answer : In k-spectrum method, any length of matched substring other than k //' does not play a significant role in the string kernel. So, returning 1 //' means that the substring weight equals to # of suffix in the current interval. //' When 0 is returned, it means that substring weight equals to the floor //' interval entry in val[]. (See the definition of substring weight in //' StringKernel.cpp) //' Question: Why is the following a correct implementation of k-spectrum ? //' Answer : [Val precomputation phase] Every Interval with lcp < k has val := 0. //' For intervals with (lcp==k) or (lcp>k but floor_lcp= k but floor interval //' has val := 0 (floor_lcp < k). Hence, returning weight:=1 will make substring //' weight equals to the size of the immediate ceil interval (# of substring in common). kernlab/vignettes/0000755000175100001440000000000014366221257013726 5ustar hornikuserskernlab/vignettes/A.cls0000644000175100001440000001273612055335060014611 0ustar hornikusers\def\fileversion{1.0} \def\filename{A} \def\filedate{2004/10/08} %% %% \NeedsTeXFormat{LaTeX2e} \ProvidesClass{A}[\filedate\space\fileversion\space A class ] %% options \LoadClass[10pt,a4paper,twoside]{article} \newif\if@notitle \@notitlefalse \DeclareOption{notitle}{\@notitletrue} \ProcessOptions %% required packages \RequirePackage{graphicx,a4wide,color,hyperref,ae,fancyvrb,thumbpdf} \RequirePackage[T1]{fontenc} \usepackage[authoryear,round,longnamesfirst]{natbib} \bibpunct{(}{)}{;}{a}{}{,} \bibliographystyle{jss} %% paragraphs \setlength{\parskip}{0.7ex plus0.1ex minus0.1ex} \setlength{\parindent}{0em} %% commands \let\code=\texttt \let\proglang=\textsf \newcommand{\pkg}[1]{{\normalfont\fontseries{b}\selectfont #1}} \newcommand{\email}[1]{\href{mailto:#1}{\normalfont\texttt{#1}}} \newcommand{\E}{\mathsf{E}} \newcommand{\VAR}{\mathsf{VAR}} \newcommand{\COV}{\mathsf{COV}} \newcommand{\Prob}{\mathsf{P}} %% for all publications \newcommand{\Plaintitle}[1]{\def\@Plaintitle{#1}} \newcommand{\Shorttitle}[1]{\def\@Shorttitle{#1}} \newcommand{\Plainauthor}[1]{\def\@Plainauthor{#1}} \newcommand{\Keywords}[1]{\def\@Keywords{#1}} \newcommand{\Plainkeywords}[1]{\def\@Plainkeywords{#1}} \newcommand{\Abstract}[1]{\def\@Abstract{#1}} %% defaults \author{Firstname Lastname\\Affiliation} \title{Title} \Abstract{---!!!---an abstract is required---!!!---} \Plainauthor{\@author} \Plaintitle{\@title} \Shorttitle{\@title} \Keywords{---!!!---at least one keyword is required---!!!---} \Plainkeywords{\@Keywords} %% Sweave(-like) %\DefineVerbatimEnvironment{Sinput}{Verbatim}{fontshape=sl} %\DefineVerbatimEnvironment{Soutput}{Verbatim}{} %\DefineVerbatimEnvironment{Scode}{Verbatim}{fontshape=sl} %\newenvironment{Schunk}{}{} \DefineVerbatimEnvironment{Code}{Verbatim}{} \DefineVerbatimEnvironment{CodeInput}{Verbatim}{fontshape=sl} \DefineVerbatimEnvironment{CodeOutput}{Verbatim}{} \newenvironment{CodeChunk}{}{} \setkeys{Gin}{width=0.8\textwidth} %% new \maketitle \def\maketitle{ \begingroup \def\thefootnote{\fnsymbol{footnote}} \def\@makefnmark{\hbox to 0pt{$^{\@thefnmark}$\hss}} \long\def\@makefntext##1{\parindent 1em\noindent \hbox to1.8em{\hss $\m@th ^{\@thefnmark}$}##1} \@maketitle \@thanks \endgroup \setcounter{footnote}{0} \thispagestyle{empty} \markboth{\centerline{\@Shorttitle}}{\centerline{\@Plainauthor}} \pagestyle{myheadings} \let\maketitle\relax \let\@maketitle\relax \gdef\@thanks{}\gdef\@author{}\gdef\@title{}\let\thanks\relax } \def\@maketitle{\vbox{\hsize\textwidth \linewidth\hsize {\centering {\LARGE\bf \@title\par} \def\And{\end{tabular}\hfil\linebreak[0]\hfil \begin{tabular}[t]{c}\large\bf\rule{\z@}{24pt}\ignorespaces}% \begin{tabular}[t]{c}\large\bf\rule{\z@}{24pt}\@author\end{tabular}% \vskip 0.3in minus 0.1in \hrule \begin{abstract} \@Abstract \end{abstract}} \textit{Keywords}:~\@Keywords. \vskip 0.1in minus 0.05in \hrule \vskip 0.2in minus 0.1in }} %% sections, subsections, and subsubsections \newlength{\preXLskip} \newlength{\preLskip} \newlength{\preMskip} \newlength{\preSskip} \newlength{\postMskip} \newlength{\postSskip} \setlength{\preXLskip}{1.8\baselineskip plus 0.5ex minus 0ex} \setlength{\preLskip}{1.5\baselineskip plus 0.3ex minus 0ex} \setlength{\preMskip}{1\baselineskip plus 0.2ex minus 0ex} \setlength{\preSskip}{.8\baselineskip plus 0.2ex minus 0ex} \setlength{\postMskip}{.5\baselineskip plus 0ex minus 0.1ex} \setlength{\postSskip}{.3\baselineskip plus 0ex minus 0.1ex} \newcommand{\jsssec}[2][default]{\vskip \preXLskip% \pdfbookmark[1]{#1}{Section.\thesection.#1}% \refstepcounter{section}% \centerline{\textbf{\Large \thesection. #2}} \nopagebreak \vskip \postMskip \nopagebreak} \newcommand{\jsssecnn}[1]{\vskip \preXLskip% \centerline{\textbf{\Large #1}} \nopagebreak \vskip \postMskip \nopagebreak} \newcommand{\jsssubsec}[2][default]{\vskip \preMskip% \pdfbookmark[2]{#1}{Subsection.\thesubsection.#1}% \refstepcounter{subsection}% \textbf{\large \thesubsection. #2} \nopagebreak \vskip \postSskip \nopagebreak} \newcommand{\jsssubsecnn}[1]{\vskip \preMskip% \textbf{\large #1} \nopagebreak \vskip \postSskip \nopagebreak} \newcommand{\jsssubsubsec}[2][default]{\vskip \preSskip% \pdfbookmark[3]{#1}{Subsubsection.\thesubsubsection.#1}% \refstepcounter{subsubsection}% {\large \textit{#2}} \nopagebreak \vskip \postSskip \nopagebreak} \newcommand{\jsssubsubsecnn}[1]{\vskip \preSskip% {\textit{\large #1}} \nopagebreak \vskip \postSskip \nopagebreak} \newcommand{\jsssimplesec}[2][default]{\vskip \preLskip% %% \pdfbookmark[1]{#1}{Section.\thesection.#1}% \refstepcounter{section}% \textbf{\large #1} \nopagebreak \vskip \postSskip \nopagebreak} \newcommand{\jsssimplesecnn}[1]{\vskip \preLskip% \textbf{\large #1} \nopagebreak \vskip \postSskip \nopagebreak} \renewcommand{\section}{\secdef \jsssec \jsssecnn} \renewcommand{\subsection}{\secdef \jsssubsec \jsssubsecnn} \renewcommand{\subsubsection}{\secdef \jsssubsubsec \jsssubsubsecnn} %% colors \definecolor{Red}{rgb}{0.7,0,0} \definecolor{Blue}{rgb}{0,0,0.8} \hypersetup{% hyperindex = {true}, colorlinks = {true}, linktocpage = {true}, plainpages = {false}, linkcolor = {Blue}, citecolor = {Blue}, urlcolor = {Red}, pdfstartview = {Fit}, pdfpagemode = {UseOutlines}, pdfview = {XYZ null null null} } \AtBeginDocument{ \hypersetup{% pdfauthor = {\@Plainauthor}, pdftitle = {\@Plaintitle}, pdfkeywords = {\@Plainkeywords} } } \if@notitle %% \AtBeginDocument{\maketitle} \else \AtBeginDocument{\maketitle} \fi kernlab/vignettes/jss.bib0000644000175100001440000003421314130771476015210 0ustar hornikusers@Article{kernlab:Karatzoglou+Smola+Hornik:2004, author = {Alexandros Karatzoglou and Alex Smola and Kurt Hornik and Achim Zeileis}, title = {kernlab -- An \proglang{S4} Package for Kernel Methods in \proglang{R}}, year = 2004, journal = {Journal of Statistical Software}, volume = 11, number = 9, pages = {1--20}, doi = {10.18637/jss.v011.i09}, } @Book{kernlab:Schoelkopf+Smola:2002, author = {Bernhard Sch\"olkopf and Alex Smola}, title = {Learning with Kernels}, publisher = {MIT Press}, year = 2002, } @Book{kernlab:Chambers:1998, Author = {John M. Chambers}, title = {Programming with Data}, Publisher = {Springer, New York}, Year = 1998, note = {ISBN 0-387-98503-4}, } @Book{kernlab:Hastie:2001, author = {T. Hastie and R. Tibshirani and J. H. Friedman}, title = {The Elements of Statistical Learning}, publisher = {Springer}, Year = 2001, } @Article{kernlab:Vanderbei:1999, author = {Robert Vanderbei}, title = {{LOQO}: An Interior Point Code for Quadratic Programming}, journal = {Optimization Methods and Software}, year = 1999, volume = 12, pages = {251--484}, url = {http://www.sor.princeton.edu/~rvdb/ps/loqo6.pdf}, } @Misc{kernlab:Leisch+Dimitriadou, author = {Fiedrich Leisch and Evgenia Dimitriadou}, title = {\pkg{mlbench}---{A} Collection for Artificial and Real-world Machine Learning Benchmarking Problems}, howpublished = {\textsf{R} package, Version 0.5-6}, note = {Available from \url{https://CRAN.R-project.org}}, year = 2001, month = 12, } @Misc{kernlab:Roever:2004, author = {Christian Roever and Nils Raabe and Karsten Luebke and Uwe Ligges}, title = { \pkg{klaR} -- Classification and Visualization}, howpublished = {\textsf{R} package, Version 0.3-3}, note = {Available from \url{http://cran.R-project.org}}, year = 2004, month = 7, } @Article{kernlab:Hsu+Lin:2002, author = {C.-W. Hsu and Chih-Jen Lin}, title = {A Comparison of Methods for Multi-class Support Vector Machines}, journal = {IEEE Transactions on Neural Networks}, year = 2002, volume = 13, pages = {415--425}, url = {http://www.csie.ntu.edu.tw/~cjlin/papers/multisvm.ps.gz}, } @Misc{kernlab:Chang+Lin:2001, author = {Chih-Chung Chang and Chih-Jen Lin}, title = {{LIBSVM}: A Library for Support Vector Machines}, note = {Software available at \url{http://www.csie.ntu.edu.tw/~cjlin/libsvm}}, year = 2001, } @Article{kernlab:Platt:2000, Author = {J. C. Platt}, Title = {Probabilistic Outputs for Support Vector Machines and Comparison to Regularized Likelihood Methods}, Journal = {Advances in Large Margin Classifiers, A. Smola, P. Bartlett, B. Sch\"olkopf and D. Schuurmans, Eds.}, Year = 2000, publisher = {Cambridge, MA: MIT Press}, url = {http://citeseer.nj.nec.com/platt99probabilistic.html}, } @Article{kernlab:Platt:1998, Author = {J. C. Platt}, Title = {Probabilistic Outputs for Support Vector Machines and Comparison to Regularized Likelihood Methods}, Journal = {B. Sch\"olkopf, C. J. C. Burges, A. J. Smola, editors, Advances in Kernel Methods --- Support Vector Learning}, Year = 1998, publisher = {Cambridge, MA: MIT Press}, url = {http://research.microsoft.com/~jplatt/abstracts/smo.html}, } @Article{kernlab:Keerthi:2002, Author = {S. S. Kerthi and E. G. Gilbert}, Title = {Convergence of a Generalized {SMO} Algorithm for {SVM} Classifier Design}, Journal = {Machine Learning}, pages = {351--360}, Year = 2002, volume = 46, url = {http://guppy.mpe.nus.edu.sg/~mpessk/svm/conv_ml.ps.gz}, } @Article{kernlab:Olvi:2000, Author = {Alex J. Smola and Olvi L. Mangasarian and Bernhard Sch\"olkopf}, Title = {Sparse Kernel Feature Analysis}, Journal = {24th Annual Conference of Gesellschaft f\"ur Klassifikation}, publisher = {University of Passau}, Year = 2000, url = {ftp://ftp.cs.wisc.edu/pub/dmi/tech-reports/99-04.ps}, } @Unpublished{kernlab:Lin:2001, Author = {H.-T. Lin and Chih-Jen Lin and R. C. Weng}, Title = {A Note on {Platt's} Probabilistic Outputs for Support Vector Machines}, Year = 2001, note = {Available at \url{http://www.csie.ntu.edu.tw/~cjlin/papers/plattprob.ps}}, } @Unpublished{kernlab:Weng:2004, Author = {C.-J Lin and R C. Weng}, Title = {Probabilistic Predictions for Support Vector Regression}, Year = 2004, note = {Available at \url{http://www.csie.ntu.edu.tw/~cjlin/papers/svrprob.pdf}}, } @Article{kernlab:Crammer:2000, Author = {K. Crammer and Y. Singer}, Title = {On the Learnability and Design of Output Codes for Multiclass Prolems}, Year = 2000, Journal = {Computational Learning Theory}, Pages = {35--46}, url = {http://www.cs.huji.ac.il/~kobics/publications/mlj01.ps.gz}, } @Article{kernlab:joachim:1999, Author = {Thorsten Joachims}, Title = {Making Large-scale {SVM} Learning Practical}, Journal = {In Advances in Kernel Methods --- Support Vector Learning}, Chapter = 11, Year = 1999, publisher = {MIT Press}, url = {http://www-ai.cs.uni-dortmund.de/DOKUMENTE/joachims_99a.ps.gz}, } @Article{kernlab:Meyer:2001, author = {David Meyer}, title = {Support Vector Machines}, journal = {R News}, year = 2001, volume = 1, number = 3, pages = {23--26}, month = {September}, url = {http://CRAN.R-project.org/doc/Rnews/}, note = {\url{http://CRAN.R-project.org/doc/Rnews/}} } @ARTICLE{kernlab:meyer+leisch+hornik:2003, AUTHOR = {David Meyer and Friedrich Leisch and Kurt Hornik}, TITLE = {The Support Vector Machine under Test}, JOURNAL = {Neurocomputing}, YEAR = 2003, MONTH = {September}, PAGES = {169--186}, VOLUME = 55, } @Book{kernlab:Vapnik:1998, author = {Vladimir Vapnik}, Title = {Statistical Learning Theory}, Year = 1998, publisher = {Wiley, New York}, } @Book{kernlab:Vapnik2:1995, author = {Vladimir Vapnik}, Title = {The Nature of Statistical Learning Theory}, Year = 1995, publisher = {Springer, NY}, } @Article{kernlab:Wu:2003, Author = {Ting-Fan Wu and Chih-Jen Lin and Ruby C. Weng}, Title = {Probability Estimates for Multi-class Classification by Pairwise Coupling}, Year = 2003, Journal = {Advances in Neural Information Processing}, Publisher = {MIT Press Cambridge Mass.}, Volume = 16, url = {http://books.nips.cc/papers/files/nips16/NIPS2003_0538.pdf}, } @Article{kernlab:Williams:1995, Author = {Christopher K. I. Williams and Carl Edward Rasmussen}, Title = {Gaussian Processes for Regression}, Year = 1995, Journal = {Advances in Neural Information Processing}, Publisher = {MIT Press Cambridge Mass.}, Volume = 8, url = {http://books.nips.cc/papers/files/nips08/0514.pdf}, } @Article{kernlab:Schoelkopf:1998, Author = {B. Sch\"olkopf and A. Smola and K. R. M\"uller}, Title = {Nonlinear Component Analysis as a Kernel Eigenvalue Problem}, Journal = {Neural Computation}, Volume = 10, Pages = {1299--1319}, Year = 1998, url = {http://mlg.anu.edu.au/~smola/papers/SchSmoMul98.pdf}, } @Article{kernlab:Tipping:2001, Author = {M. E. Tipping}, Title = {Sparse Bayesian Learning and the Relevance Vector Machine}, Journal = {Journal of Machine Learning Research}, Volume = 1, Year = 2001, Pages = {211--244}, url = {http://www.jmlr.org/papers/volume1/tipping01a/tipping01a.pdf}, } @Article{kernlab:Zhou:2003, Author = {D. Zhou and J. Weston and A. Gretton and O. Bousquet and B. Sch\"olkopf}, Title = {Ranking on Data Manifolds}, Journal = {Advances in Neural Information Processing Systems}, Volume = 16, Year = 2003, Publisher = {MIT Press Cambridge Mass.}, url = {http://www.kyb.mpg.de/publications/pdfs/pdf2334.pdf}, } @Article{kernlab:Andrew:2001, Author = {Andrew Y. Ng and Michael I. Jordan and Yair Weiss}, Title = {On Spectral Clustering: Analysis and an Algorithm}, Journal = {Advances in Neural Information Processing Systems}, Volume = 14, Publisher = {MIT Press Cambridge Mass.}, url = {http://www.nips.cc/NIPS2001/papers/psgz/AA35.ps.gz}, } @Article{kernlab:Caputo:2002, Author = {B. Caputo and K. Sim and F. Furesjo and A. Smola}, Title = {Appearance-based Object Recognition using {SVMs}: Which Kernel Should {I} Use?}, Journal = {Proc of NIPS workshop on Statistical methods for computational experiments in visual processing and computer vision, Whistler, 2002}, Year = 2002, } @Article{kernlab:Putten:2000, Author = {Peter van der Putten and Michel de Ruiter and Maarten van Someren}, Title = {CoIL Challenge 2000 Tasks and Results: Predicting and Explaining Caravan Policy Ownership}, Journal = {Coil Challenge 2000}, Year = 2000, url = {http://www.liacs.nl/~putten/library/cc2000/}, } @Article{kernlab:Hsu:2002, Author = {C.-W. Hsu and Chih-Jen Lin}, Title = {A Simple Decomposition Method for Support Vector Machines}, Journal = {Machine Learning}, Year = 2002, Pages = {291--314}, volume = 46, url = {http://www.csie.ntu.edu.tw/~cjlin/papers/decomp.ps.gz}, } @Article{kernlab:Knerr:1990, Author = {S. Knerr and L. Personnaz and G. Dreyfus}, Title = {Single-layer Learning Revisited: A Stepwise Procedure for Building and Training a Neural Network.}, Journal = {J. Fogelman, editor, Neurocomputing: Algorithms, Architectures and Applications}, Publisher = {Springer-Verlag}, Year = 1990, } @Article{kernlab:Kressel:1999, Author = {U. Kre{\ss}el}, Title = {Pairwise Classification and Support Vector Machines}, Year = 1999, Journal = {B. Sch\"olkopf, C. J. C. Burges, A. J. Smola, editors, Advances in Kernel Methods --- Support Vector Learning}, Pages = {255--268}, Publisher = {Cambridge, MA, MIT Press}, } @Article{kernlab:Hsu2:2002, Title = {A Comparison of Methods for Multi-class Support Vector Machines}, Author = {C.-W. Hsu and Chih-Jen Lin}, Journal = {IEEE Transactions on Neural Networks}, Volume = 13, Year = 2002, Pages = {1045--1052}, url = {http://www.csie.ntu.edu.tw/~cjlin/papers/multisvm.ps.gz}, } @Article{kernlab:Tax:1999, Title = {Support Vector Domain Description}, Author = {David M. J. Tax and Robert P. W. Duin}, Journal = {Pattern Recognition Letters}, Volume = 20, Pages = {1191--1199}, Year = 1999, Publisher = {Elsevier}, url = {http://www.ph.tn.tudelft.nl/People/bob/papers/prl_99_svdd.pdf}, } @Article{kernlab:Williamson:1999, Title = {Estimating the Support of a High-Dimensonal Distribution}, Author = {B. Sch\"olkopf and J. Platt and J. Shawe-Taylor and A. J. Smola and R. C. Williamson}, Journal = {Microsoft Research, Redmond, WA}, Volume = {TR 87}, Year = 1999, url = {http://research.microsoft.com/research/pubs/view.aspx?msr_tr_id=MSR-TR-99-87}, } @Article{kernlab:Smola1:2000, Title = {New Support Vector Algorithms}, Author = {B. Sch\"olkopf and A. J. Smola and R. C. Williamson and P. L. Bartlett}, Journal = {Neural Computation}, Volume = 12, Year = 2000, Pages = {1207--1245}, url = {http://caliban.ingentaselect.com/vl=3338649/cl=47/nw=1/rpsv/cgi-bin/cgi?body=linker&reqidx=0899-7667(2000)12:5L.1207}, } @Article{kernlab:Wright:1999, Title = {Modified {Cholesky} Factorizations in Interior-point Algorithms for Linear Programming}, Author = {S. Wright}, Journal = {Journal in Optimization}, Volume = 9, publisher = {SIAM}, Year = 1999, Pages = {1159--1191}, ur = {http://www-unix.mcs.anl.gov/~wright/papers/P600.pdf}, } @Article{kernlab:more:1999, Title = {Newton's Method for Large-scale Bound Constrained Problems}, Author = {Chih-Jen Lin and J. J. More}, Journal = {SIAM Journal on Optimization}, volume = 9, pages = {1100--1127}, Year = 1999, } @Article{kernlab:Ng:2001, Title = {On Spectral Clustering: Analysis and an Algorithm}, Author = {Andrew Y. Ng and Michael I. Jordan and Yair Weiss}, Journal = {Neural Information Processing Symposium 2001}, Year = 2001, url = {http://www.nips.cc/NIPS2001/papers/psgz/AA35.ps.gz} } @Article{kernlab:kuss:2003, Title = {The Geometry of Kernel Canonical Correlation Analysis}, Author = {Malte Kuss and Thore Graepel}, Journal = {MPI-Technical Reports}, url = {http://www.kyb.mpg.de/publication.html?publ=2233}, Year = 2003, } %% Mathias Seeger gp pub. @Article{kernlab:Kivinen:2004, Title = {Online Learning with Kernels}, Author = {Jyrki Kivinen and Alexander Smola and Robert Williamson}, Journal ={IEEE Transactions on Signal Processing}, volume = 52, Year = 2004, url = {http://mlg.anu.edu.au/~smola/papers/KivSmoWil03.pdf}, } kernlab/vignettes/kernlab.Rnw0000644000175100001440000014230512055335060016030 0ustar hornikusers\documentclass{A} \usepackage{amsfonts,thumbpdf,alltt} \newenvironment{smallverbatim}{\small\verbatim}{\endverbatim} \newenvironment{smallexample}{\begin{alltt}\small}{\end{alltt}} \SweaveOpts{engine=R,eps=FALSE} %\VignetteIndexEntry{kernlab - An S4 Package for Kernel Methods in R} %\VignetteDepends{kernlab} %\VignetteKeywords{kernel methods, support vector machines, quadratic programming, ranking, clustering, S4, R} %\VignettePackage{kernlab} <>= library(kernlab) options(width = 70) @ \title{\pkg{kernlab} -- An \proglang{S4} Package for Kernel Methods in \proglang{R}} \Plaintitle{kernlab - An S4 Package for Kernel Methods in R} \author{Alexandros Karatzoglou\\Technische Universit\"at Wien \And Alex Smola\\Australian National University, NICTA \And Kurt Hornik\\Wirtschaftsuniversit\"at Wien } \Plainauthor{Alexandros Karatzoglou, Alex Smola, Kurt Hornik} \Abstract{ \pkg{kernlab} is an extensible package for kernel-based machine learning methods in \proglang{R}. It takes advantage of \proglang{R}'s new \proglang{S4} object model and provides a framework for creating and using kernel-based algorithms. The package contains dot product primitives (kernels), implementations of support vector machines and the relevance vector machine, Gaussian processes, a ranking algorithm, kernel PCA, kernel CCA, kernel feature analysis, online kernel methods and a spectral clustering algorithm. Moreover it provides a general purpose quadratic programming solver, and an incomplete Cholesky decomposition method. } \Keywords{kernel methods, support vector machines, quadratic programming, ranking, clustering, \proglang{S4}, \proglang{R}} \Plainkeywords{kernel methods, support vector machines, quadratic programming, ranking, clustering, S4, R} \begin{document} \section{Introduction} Machine learning is all about extracting structure from data, but it is often difficult to solve problems like classification, regression and clustering in the space in which the underlying observations have been made. Kernel-based learning methods use an implicit mapping of the input data into a high dimensional feature space defined by a kernel function, i.e., a function returning the inner product $ \langle \Phi(x),\Phi(y) \rangle$ between the images of two data points $x, y$ in the feature space. The learning then takes place in the feature space, provided the learning algorithm can be entirely rewritten so that the data points only appear inside dot products with other points. This is often referred to as the ``kernel trick'' \citep{kernlab:Schoelkopf+Smola:2002}. More precisely, if a projection $\Phi: X \rightarrow H$ is used, the dot product $\langle\Phi(x),\Phi(y)\rangle$ can be represented by a kernel function~$k$ \begin{equation} \label{eq:kernel} k(x,y)= \langle \Phi(x),\Phi(y) \rangle, \end{equation} which is computationally simpler than explicitly projecting $x$ and $y$ into the feature space~$H$. One interesting property of kernel-based systems is that, once a valid kernel function has been selected, one can practically work in spaces of any dimension without paying any computational cost, since feature mapping is never effectively performed. In fact, one does not even need to know which features are being used. Another advantage is the that one can design and use a kernel for a particular problem that could be applied directly to the data without the need for a feature extraction process. This is particularly important in problems where a lot of structure of the data is lost by the feature extraction process (e.g., text processing). The inherent modularity of kernel-based learning methods allows one to use any valid kernel on a kernel-based algorithm. \subsection{Software review} The most prominent kernel based learning algorithm is without doubt the support vector machine (SVM), so the existence of many support vector machine packages comes as little surprise. Most of the existing SVM software is written in \proglang{C} or \proglang{C++}, e.g.\ the award winning \pkg{libsvm}\footnote{\url{http://www.csie.ntu.edu.tw/~cjlin/libsvm/}} \citep{kernlab:Chang+Lin:2001}, \pkg{SVMlight}\footnote{\url{http://svmlight.joachims.org}} \citep{kernlab:joachim:1999}, \pkg{SVMTorch}\footnote{\url{http://www.torch.ch}}, Royal Holloway Support Vector Machines\footnote{\url{http://svm.dcs.rhbnc.ac.uk}}, \pkg{mySVM}\footnote{\url{http://www-ai.cs.uni-dortmund.de/SOFTWARE/MYSVM/index.eng.html}}, and \pkg{M-SVM}\footnote{\url{http://www.loria.fr/~guermeur/}} with many packages providing interfaces to \proglang{MATLAB} (such as \pkg{libsvm}), and even some native \proglang{MATLAB} toolboxes\footnote{ \url{http://www.isis.ecs.soton.ac.uk/resources/svminfo/}}\,\footnote{ \url{http://asi.insa-rouen.fr/~arakotom/toolbox/index}}\,\footnote{ \url{http://www.cis.tugraz.at/igi/aschwaig/software.html}}. Putting SVM specific software aside and considering the abundance of other kernel-based algorithms published nowadays, there is little software available implementing a wider range of kernel methods with some exceptions like the \pkg{Spider}\footnote{\url{http://www.kyb.tuebingen.mpg.de/bs/people/spider/}} software which provides a \proglang{MATLAB} interface to various \proglang{C}/\proglang{C++} SVM libraries and \proglang{MATLAB} implementations of various kernel-based algorithms, \pkg{Torch} \footnote{\url{http://www.torch.ch}} which also includes more traditional machine learning algorithms, and the occasional \proglang{MATLAB} or \proglang{C} program found on a personal web page where an author includes code from a published paper. \subsection[R software]{\proglang{R} software} The \proglang{R} package \pkg{e1071} offers an interface to the award winning \pkg{libsvm} \citep{kernlab:Chang+Lin:2001}, a very efficient SVM implementation. \pkg{libsvm} provides a robust and fast SVM implementation and produces state of the art results on most classification and regression problems \citep{kernlab:Meyer+Leisch+Hornik:2003}. The \proglang{R} interface provided in \pkg{e1071} adds all standard \proglang{R} functionality like object orientation and formula interfaces to \pkg{libsvm}. Another SVM related \proglang{R} package which was made recently available is \pkg{klaR} \citep{kernlab:Roever:2004} which includes an interface to \pkg{SVMlight}, a popular SVM implementation along with other classification tools like Regularized Discriminant Analysis. However, most of the \pkg{libsvm} and \pkg{klaR} SVM code is in \proglang{C++}. Therefore, if one would like to extend or enhance the code with e.g.\ new kernels or different optimizers, one would have to modify the core \proglang{C++} code. \section[kernlab]{\pkg{kernlab}} \pkg{kernlab} aims to provide the \proglang{R} user with basic kernel functionality (e.g., like computing a kernel matrix using a particular kernel), along with some utility functions commonly used in kernel-based methods like a quadratic programming solver, and modern kernel-based algorithms based on the functionality that the package provides. Taking advantage of the inherent modularity of kernel-based methods, \pkg{kernlab} aims to allow the user to switch between kernels on an existing algorithm and even create and use own kernel functions for the kernel methods provided in the package. \subsection[S4 objects]{\proglang{S4} objects} \pkg{kernlab} uses \proglang{R}'s new object model described in ``Programming with Data'' \citep{kernlab:Chambers:1998} which is known as the \proglang{S4} class system and is implemented in the \pkg{methods} package. In contrast with the older \proglang{S3} model for objects in \proglang{R}, classes, slots, and methods relationships must be declared explicitly when using the \proglang{S4} system. The number and types of slots in an instance of a class have to be established at the time the class is defined. The objects from the class are validated against this definition and have to comply to it at any time. \proglang{S4} also requires formal declarations of methods, unlike the informal system of using function names to identify a certain method in \proglang{S3}. An \proglang{S4} method is declared by a call to \code{setMethod} along with the name and a ``signature'' of the arguments. The signature is used to identify the classes of one or more arguments of the method. Generic functions can be declared using the \code{setGeneric} function. Although such formal declarations require package authors to be more disciplined than when using the informal \proglang{S3} classes, they provide assurance that each object in a class has the required slots and that the names and classes of data in the slots are consistent. An example of a class used in \pkg{kernlab} is shown below. Typically, in a return object we want to include information on the result of the method along with additional information and parameters. Usually \pkg{kernlab}'s classes include slots for the kernel function used and the results and additional useful information. \begin{smallexample} setClass("specc", representation("vector", # the vector containing the cluster centers="matrix", # the cluster centers size="vector", # size of each cluster kernelf="function", # kernel function used withinss = "vector"), # within cluster sum of squares prototype = structure(.Data = vector(), centers = matrix(), size = matrix(), kernelf = ls, withinss = vector())) \end{smallexample} Accessor and assignment function are defined and used to access the content of each slot which can be also accessed with the \verb|@| operator. \subsection{Namespace} Namespaces were introduced in \proglang{R} 1.7.0 and provide a means for packages to control the way global variables and methods are being made available. Due to the number of assignment and accessor function involved, a namespace is used to control the methods which are being made visible outside the package. Since \proglang{S4} methods are being used, the \pkg{kernlab} namespace also imports methods and variables from the \pkg{methods} package. \subsection{Data} The \pkg{kernlab} package also includes data set which will be used to illustrate the methods included in the package. The \code{spam} data set \citep{kernlab:Hastie:2001} set collected at Hewlett-Packard Labs contains data on 2788 and 1813 e-mails classified as non-spam and spam, respectively. The 57 variables of each data vector indicate the frequency of certain words and characters in the e-mail. Another data set included in \pkg{kernlab}, the \code{income} data set \citep{kernlab:Hastie:2001}, is taken by a marketing survey in the San Francisco Bay concerning the income of shopping mall customers. It consists of 14 demographic attributes (nominal and ordinal variables) including the income and 8993 observations. The \code{ticdata} data set \citep{kernlab:Putten:2000} was used in the 2000 Coil Challenge and contains information on customers of an insurance company. The data consists of 86 variables and includes product usage data and socio-demographic data derived from zip area codes. The data was collected to answer the following question: Can you predict who would be interested in buying a caravan insurance policy and give an explanation why? The \code{promotergene} is a data set of E. Coli promoter gene sequences (DNA) with 106 observations and 58 variables available at the UCI Machine Learning repository. Promoters have a region where a protein (RNA polymerase) must make contact and the helical DNA sequence must have a valid conformation so that the two pieces of the contact region spatially align. The data contains DNA sequences of promoters and non-promoters. The \code{spirals} data set was created by the \code{mlbench.spirals} function in the \pkg{mlbench} package \citep{kernlab:Leisch+Dimitriadou}. This two-dimensional data set with 300 data points consists of two spirals where Gaussian noise is added to each data point. \subsection{Kernels} A kernel function~$k$ calculates the inner product of two vectors $x$, $x'$ in a given feature mapping $\Phi: X \rightarrow H$. The notion of a kernel is obviously central in the making of any kernel-based algorithm and consequently also in any software package containing kernel-based methods. Kernels in \pkg{kernlab} are \proglang{S4} objects of class \code{kernel} extending the \code{function} class with one additional slot containing a list with the kernel hyper-parameters. Package \pkg{kernlab} includes 7 different kernel classes which all contain the class \code{kernel} and are used to implement the existing kernels. These classes are used in the function dispatch mechanism of the kernel utility functions described below. Existing kernel functions are initialized by ``creator'' functions. All kernel functions take two feature vectors as parameters and return the scalar dot product of the vectors. An example of the functionality of a kernel in \pkg{kernlab}: <>= ## create a RBF kernel function with sigma hyper-parameter 0.05 rbf <- rbfdot(sigma = 0.05) rbf ## create two random feature vectors x <- rnorm(10) y <- rnorm(10) ## compute dot product between x,y rbf(x, y) @ The package includes implementations of the following kernels: \begin{itemize} \item the linear \code{vanilladot} kernel implements the simplest of all kernel functions \begin{equation} k(x,x') = \langle x, x' \rangle \end{equation} which is useful specially when dealing with large sparse data vectors~$x$ as is usually the case in text categorization. \item the Gaussian radial basis function \code{rbfdot} \begin{equation} k(x,x') = \exp(-\sigma \|x - x'\|^2) \end{equation} which is a general purpose kernel and is typically used when no further prior knowledge is available about the data. \item the polynomial kernel \code{polydot} \begin{equation} k(x, x') = \left( \mathrm{scale} \cdot \langle x, x' \rangle + \mathrm{offset} \right)^\mathrm{degree}. \end{equation} which is used in classification of images. \item the hyperbolic tangent kernel \code{tanhdot} \begin{equation} k(x, x') = \tanh \left( \mathrm{scale} \cdot \langle x, x' \rangle + \mathrm{offset} \right) \end{equation} which is mainly used as a proxy for neural networks. \item the Bessel function of the first kind kernel \code{besseldot} \begin{equation} k(x, x') = \frac{\mathrm{Bessel}_{(\nu+1)}^n(\sigma \|x - x'\|)} {(\|x-x'\|)^{-n(\nu+1)}}. \end{equation} is a general purpose kernel and is typically used when no further prior knowledge is available and mainly popular in the Gaussian process community. \item the Laplace radial basis kernel \code{laplacedot} \begin{equation} k(x, x') = \exp(-\sigma \|x - x'\|) \end{equation} which is a general purpose kernel and is typically used when no further prior knowledge is available. \item the ANOVA radial basis kernel \code{anovadot} performs well in multidimensional regression problems \begin{equation} k(x, x') = \left(\sum_{k=1}^{n}\exp(-\sigma(x^k-{x'}^k)^2)\right)^{d} \end{equation} where $x^k$ is the $k$th component of $x$. \end{itemize} \subsection{Kernel utility methods} The package also includes methods for computing commonly used kernel expressions (e.g., the Gram matrix). These methods are written in such a way that they take functions (i.e., kernels) and matrices (i.e., vectors of patterns) as arguments. These can be either the kernel functions already included in \pkg{kernlab} or any other function implementing a valid dot product (taking two vector arguments and returning a scalar). In case one of the already implemented kernels is used, the function calls a vectorized implementation of the corresponding function. Moreover, in the case of symmetric matrices (e.g., the dot product matrix of a Support Vector Machine) they only require one argument rather than having to pass the same matrix twice (for rows and columns). The computations for the kernels already available in the package are vectorized whenever possible which guarantees good performance and acceptable memory requirements. Users can define their own kernel by creating a function which takes two vectors as arguments (the data points) and returns a scalar (the dot product). This function can then be based as an argument to the kernel utility methods. For a user defined kernel the dispatch mechanism calls a generic method implementation which calculates the expression by passing the kernel function through a pair of \code{for} loops. The kernel methods included are: \begin{description} \item[\code{kernelMatrix}] This is the most commonly used function. It computes $k(x, x')$, i.e., it computes the matrix $K$ where $K_{ij} = k(x_i, x_j)$ and $x$ is a \emph{row} vector. In particular, \begin{verbatim} K <- kernelMatrix(kernel, x) \end{verbatim} computes the matrix $K_{ij} = k(x_i, x_j)$ where the $x_i$ are the columns of $X$ and \begin{verbatim} K <- kernelMatrix(kernel, x1, x2) \end{verbatim} computes the matrix $K_{ij} = k(x1_i, x2_j)$. \item[\code{kernelFast}] This method is different to \code{kernelMatrix} for \code{rbfdot}, \code{besseldot}, and the \code{laplacedot} kernel, which are all RBF kernels. It is identical to \code{kernelMatrix}, except that it also requires the squared norm of the first argument as additional input. It is mainly used in kernel algorithms, where columns of the kernel matrix are computed per invocation. In these cases, evaluating the norm of each column-entry as it is done on a \code{kernelMatrix} invocation on an RBF kernel, over and over again would cause significant computational overhead. Its invocation is via \begin{verbatim} K = kernelFast(kernel, x1, x2, a) \end{verbatim} Here $a$ is a vector containing the squared norms of $x1$. \item[\code{kernelMult}] is a convenient way of computing kernel expansions. It returns the vector $f = (f(x_1), \dots, f(x_m))$ where \begin{equation} f(x_i) = \sum_{j=1}^{m} k(x_i, x_j) \alpha_j, \mbox{~hence~} f = K \alpha. \end{equation} The need for such a function arises from the fact that $K$ may sometimes be larger than the memory available. Therefore, it is convenient to compute $K$ only in stripes and discard the latter after the corresponding part of $K \alpha$ has been computed. The parameter \code{blocksize} determines the number of rows in the stripes. In particular, \begin{verbatim} f <- kernelMult(kernel, x, alpha) \end{verbatim} computes $f_i = \sum_{j=1}^m k(x_i, x_j) \alpha_j$ and \begin{verbatim} f <- kernelMult(kernel, x1, x2, alpha) \end{verbatim} computes $f_i = \sum_{j=1}^m k(x1_i, x2_j) \alpha_j$. \item[\code{kernelPol}] is a method very similar to \code{kernelMatrix} with the only difference that rather than computing $K_{ij} = k(x_i, x_j)$ it computes $K_{ij} = y_i y_j k(x_i, x_j)$. This means that \begin{verbatim} K <- kernelPol(kernel, x, y) \end{verbatim} computes the matrix $K_{ij} = y_i y_j k(x_i, x_j)$ where the $x_i$ are the columns of $x$ and $y_i$ are elements of the vector~$y$. Moreover, \begin{verbatim} K <- kernelPol(kernel, x1, x2, y1, y2) \end{verbatim} computes the matrix $K_{ij} = y1_i y2_j k(x1_i, x2_j)$. Both \code{x1} and \code{x2} may be matrices and \code{y1} and \code{y2} vectors. \end{description} An example using these functions : <>= ## create a RBF kernel function with sigma hyper-parameter 0.05 poly <- polydot(degree=2) ## create artificial data set x <- matrix(rnorm(60), 6, 10) y <- matrix(rnorm(40), 4, 10) ## compute kernel matrix kx <- kernelMatrix(poly, x) kxy <- kernelMatrix(poly, x, y) @ \section{Kernel methods} Providing a solid base for creating kernel-based methods is part of what we are trying to achieve with this package, the other being to provide a wider range of kernel-based methods in \proglang{R}. In the rest of the paper we present the kernel-based methods available in \pkg{kernlab}. All the methods in \pkg{kernlab} can be used with any of the kernels included in the package as well as with any valid user-defined kernel. User defined kernel functions can be passed to existing kernel-methods in the \code{kernel} argument. \subsection{Support vector machine} Support vector machines \citep{kernlab:Vapnik:1998} have gained prominence in the field of machine learning and pattern classification and regression. The solutions to classification and regression problems sought by kernel-based algorithms such as the SVM are linear functions in the feature space: \begin{equation} f(x) = w^\top \Phi(x) \end{equation} for some weight vector $w \in F$. The kernel trick can be exploited in this whenever the weight vector~$w$ can be expressed as a linear combination of the training points, $w = \sum_{i=1}^{n} \alpha_i \Phi(x_i)$, implying that $f$ can be written as \begin{equation} f(x) = \sum_{i=1}^{n}\alpha_i k(x_i, x) \end{equation} A very important issue that arises is that of choosing a kernel~$k$ for a given learning task. Intuitively, we wish to choose a kernel that induces the ``right'' metric in the space. Support Vector Machines choose a function $f$ that is linear in the feature space by optimizing some criterion over the sample. In the case of the 2-norm Soft Margin classification the optimization problem takes the form: \begin{eqnarray} \nonumber \mathrm{minimize} && t(w,\xi) = \frac{1}{2}{\|w\|}^2+\frac{C}{m}\sum_{i=1}^{m}\xi_i \\ \mbox{subject to~} && y_i ( \langle x_i , w \rangle +b ) \geq 1- \xi_i \qquad (i=1,\dots,m)\\ \nonumber && \xi_i \ge 0 \qquad (i=1,\dots, m) \end{eqnarray} Based on similar methodology, SVMs deal with the problem of novelty detection (or one class classification) and regression. \pkg{kernlab}'s implementation of support vector machines, \code{ksvm}, is based on the optimizers found in \pkg{bsvm}\footnote{\url{http://www.csie.ntu.edu.tw/~cjlin/bsvm}} \citep{kernlab:Hsu:2002} and \pkg{libsvm} \citep{kernlab:Chang+Lin:2001} which includes a very efficient version of the Sequential Minimization Optimization (SMO). SMO decomposes the SVM Quadratic Problem (QP) without using any numerical QP optimization steps. Instead, it chooses to solve the smallest possible optimization problem involving two elements of $\alpha_i$ because they must obey one linear equality constraint. At every step, SMO chooses two $\alpha_i$ to jointly optimize and finds the optimal values for these $\alpha_i$ analytically, thus avoiding numerical QP optimization, and updates the SVM to reflect the new optimal values. The SVM implementations available in \code{ksvm} include the C-SVM classification algorithm along with the $\nu$-SVM classification formulation which is equivalent to the former but has a more natural ($\nu$) model parameter taking values in $[0,1]$ and is proportional to the fraction of support vectors found in the data set and the training error. For classification problems which include more than two classes (multi-class) a one-against-one or pairwise classification method \citep{kernlab:Knerr:1990, kernlab:Kressel:1999} is used. This method constructs ${k \choose 2}$ classifiers where each one is trained on data from two classes. Prediction is done by voting where each classifier gives a prediction and the class which is predicted more often wins (``Max Wins''). This method has been shown to produce robust results when used with SVMs \citep{kernlab:Hsu2:2002}. Furthermore the \code{ksvm} implementation provides the ability to produce class probabilities as output instead of class labels. This is done by an improved implementation \citep{kernlab:Lin:2001} of Platt's posteriori probabilities \citep{kernlab:Platt:2000} where a sigmoid function \begin{equation} P(y=1\mid f) = \frac{1}{1+ e^{Af+B}} \end{equation} is fitted on the decision values~$f$ of the binary SVM classifiers, $A$ and $B$ are estimated by minimizing the negative log-likelihood function. To extend the class probabilities to the multi-class case, each binary classifiers class probability output is combined by the \code{couple} method which implements methods for combing class probabilities proposed in \citep{kernlab:Wu:2003}. Another approach for multIn order to create a similar probability output for regression, following \cite{kernlab:Weng:2004}, we suppose that the SVM is trained on data from the model \begin{equation} y_i = f(x_i) + \delta_i \end{equation} where $f(x_i)$ is the underlying function and $\delta_i$ is independent and identical distributed random noise. Given a test data $x$ the distribution of $y$ given $x$ and allows one to draw probabilistic inferences about $y$ e.g. one can construct a predictive interval $\Phi = \Phi(x)$ such that $y \in \Phi$ with a certain probability. If $\hat{f}$ is the estimated (predicted) function of the SVM on new data then $\eta = \eta(x) = y - \hat{f}(x)$ is the prediction error and $y \in \Phi$ is equivalent to $\eta \in \Phi $. Empirical observation shows that the distribution of the residuals $\eta$ can be modeled both by a Gaussian and a Laplacian distribution with zero mean. In this implementation the Laplacian with zero mean is used : \begin{equation} p(z) = \frac{1}{2\sigma}e^{-\frac{|z|}{\sigma}} \end{equation} Assuming that $\eta$ are independent the scale parameter $\sigma$ is estimated by maximizing the likelihood. The data for the estimation is produced by a three-fold cross-validation. For the Laplace distribution the maximum likelihood estimate is : \begin{equation} \sigma = \frac{\sum_{i=1}^m|\eta_i|}{m} \end{equation} i-class classification supported by the \code{ksvm} function is the one proposed in \cite{kernlab:Crammer:2000}. This algorithm works by solving a single optimization problem including the data from all classes: \begin{eqnarray} \nonumber \mathrm{minimize} && t(w_n,\xi) = \frac{1}{2}\sum_{n=1}^k{\|w_n\|}^2+\frac{C}{m}\sum_{i=1}^{m}\xi_i \\ \mbox{subject to~} && \langle x_i , w_{y_i} \rangle - \langle x_i , w_{n} \rangle \geq b_i^n - \xi_i \qquad (i=1,\dots,m) \\ \mbox{where} && b_i^n = 1 - \delta_{y_i,n} \end{eqnarray} where the decision function is \begin{equation} \mathrm{argmax}_{m=1,\dots,k} \langle x_i , w_{n} \rangle \end{equation} This optimization problem is solved by a decomposition method proposed in \cite{kernlab:Hsu:2002} where optimal working sets are found (that is, sets of $\alpha_i$ values which have a high probability of being non-zero). The QP sub-problems are then solved by a modified version of the \pkg{TRON}\footnote{\url{http://www-unix.mcs.anl.gov/~more/tron/}} \citep{kernlab:more:1999} optimization software. One-class classification or novelty detection \citep{kernlab:Williamson:1999, kernlab:Tax:1999}, where essentially an SVM detects outliers in a data set, is another algorithm supported by \code{ksvm}. SVM novelty detection works by creating a spherical decision boundary around a set of data points by a set of support vectors describing the spheres boundary. The $\nu$ parameter is used to control the volume of the sphere and consequently the number of outliers found. Again, the value of $\nu$ represents the fraction of outliers found. Furthermore, $\epsilon$-SVM \citep{kernlab:Vapnik2:1995} and $\nu$-SVM \citep{kernlab:Smola1:2000} regression are also available. The problem of model selection is partially addressed by an empirical observation for the popular Gaussian RBF kernel \citep{kernlab:Caputo:2002}, where the optimal values of the hyper-parameter of sigma are shown to lie in between the 0.1 and 0.9 quantile of the $\|x- x'\| $ statistics. The \code{sigest} function uses a sample of the training set to estimate the quantiles and returns a vector containing the values of the quantiles. Pretty much any value within this interval leads to good performance. An example for the \code{ksvm} function is shown below. <>= ## simple example using the promotergene data set data(promotergene) ## create test and training set tindex <- sample(1:dim(promotergene)[1],5) genetrain <- promotergene[-tindex, ] genetest <- promotergene[tindex,] ## train a support vector machine gene <- ksvm(Class~.,data=genetrain,kernel="rbfdot",kpar="automatic",C=60,cross=3,prob.model=TRUE) gene predict(gene, genetest) predict(gene, genetest, type="probabilities") @ \begin{figure} \centering <>= set.seed(123) x <- rbind(matrix(rnorm(120),,2),matrix(rnorm(120,mean=3),,2)) y <- matrix(c(rep(1,60),rep(-1,60))) svp <- ksvm(x,y,type="C-svc") plot(svp,data=x) @ \caption{A contour plot of the SVM decision values for a toy binary classification problem using the \code{plot} function} \label{fig:ksvm Plot} \end{figure} \subsection{Relevance vector machine} The relevance vector machine \citep{kernlab:Tipping:2001} is a probabilistic sparse kernel model identical in functional form to the SVM making predictions based on a function of the form \begin{equation} y(x) = \sum_{n=1}^{N} \alpha_n K(\mathbf{x},\mathbf{x}_n) + a_0 \end{equation} where $\alpha_n$ are the model ``weights'' and $K(\cdotp,\cdotp)$ is a kernel function. It adopts a Bayesian approach to learning, by introducing a prior over the weights $\alpha$ \begin{equation} p(\alpha, \beta) = \prod_{i=1}^m N(\beta_i \mid 0 , a_i^{-1}) \mathrm{Gamma}(\beta_i\mid \beta_\beta , \alpha_\beta) \end{equation} governed by a set of hyper-parameters $\beta$, one associated with each weight, whose most probable values are iteratively estimated for the data. Sparsity is achieved because in practice the posterior distribution in many of the weights is sharply peaked around zero. Furthermore, unlike the SVM classifier, the non-zero weights in the RVM are not associated with examples close to the decision boundary, but rather appear to represent ``prototypical'' examples. These examples are termed \emph{relevance vectors}. \pkg{kernlab} currently has an implementation of the RVM based on a type~II maximum likelihood method which can be used for regression. The functions returns an \proglang{S4} object containing the model parameters along with indexes for the relevance vectors and the kernel function and hyper-parameters used. <>= x <- seq(-20, 20, 0.5) y <- sin(x)/x + rnorm(81, sd = 0.03) y[41] <- 1 @ <>= rvmm <- rvm(x, y,kernel="rbfdot",kpar=list(sigma=0.1)) rvmm ytest <- predict(rvmm, x) @ \begin{figure} \centering <>= plot(x, y, cex=0.5) lines(x, ytest, col = "red") points(x[RVindex(rvmm)],y[RVindex(rvmm)],pch=21) @ \caption{Relevance vector regression on data points created by the $sinc(x)$ function, relevance vectors are shown circled.} \label{fig:RVM sigmoid} \end{figure} \subsection{Gaussian processes} Gaussian processes \citep{kernlab:Williams:1995} are based on the ``prior'' assumption that adjacent observations should convey information about each other. In particular, it is assumed that the observed variables are normal, and that the coupling between them takes place by means of the covariance matrix of a normal distribution. Using the kernel matrix as the covariance matrix is a convenient way of extending Bayesian modeling of linear estimators to nonlinear situations. Furthermore it represents the counterpart of the ``kernel trick'' in methods minimizing the regularized risk. For regression estimation we assume that rather than observing $t(x_i)$ we observe $y_i = t(x_i) + \xi_i$ where $\xi_i$ is assumed to be independent Gaussian distributed noise with zero mean. The posterior distribution is given by \begin{equation} p(\mathbf{y}\mid \mathbf{t}) = \left[ \prod_ip(y_i - t(x_i)) \right] \frac{1}{\sqrt{(2\pi)^m \det(K)}} \exp \left(\frac{1}{2}\mathbf{t}^T K^{-1} \mathbf{t} \right) \end{equation} and after substituting $\mathbf{t} = K\mathbf{\alpha}$ and taking logarithms \begin{equation} \ln{p(\mathbf{\alpha} \mid \mathbf{y})} = - \frac{1}{2\sigma^2}\| \mathbf{y} - K \mathbf{\alpha} \|^2 -\frac{1}{2}\mathbf{\alpha}^T K \mathbf{\alpha} +c \end{equation} and maximizing $\ln{p(\mathbf{\alpha} \mid \mathbf{y})}$ for $\mathbf{\alpha}$ to obtain the maximum a posteriori approximation yields \begin{equation} \mathbf{\alpha} = (K + \sigma^2\mathbf{1})^{-1} \mathbf{y} \end{equation} Knowing $\mathbf{\alpha}$ allows for prediction of $y$ at a new location $x$ through $y = K(x,x_i){\mathbf{\alpha}}$. In similar fashion Gaussian processes can be used for classification. \code{gausspr} is the function in \pkg{kernlab} implementing Gaussian processes for classification and regression. \subsection{Ranking} The success of Google has vividly demonstrated the value of a good ranking algorithm in real world problems. \pkg{kernlab} includes a ranking algorithm based on work published in \citep{kernlab:Zhou:2003}. This algorithm exploits the geometric structure of the data in contrast to the more naive approach which uses the Euclidean distances or inner products of the data. Since real world data are usually highly structured, this algorithm should perform better than a simpler approach based on a Euclidean distance measure. First, a weighted network is defined on the data and an authoritative score is assigned to every point. The query points act as source nodes that continually pump their scores to the remaining points via the weighted network, and the remaining points further spread the score to their neighbors. The spreading process is repeated until convergence and the points are ranked according to the scores they received. Suppose we are given a set of data points $X = {x_1, \dots, x_{s}, x_{s+1}, \dots, x_{m}}$ in $\mathbf{R}^n$ where the first $s$ points are the query points and the rest are the points to be ranked. The algorithm works by connecting the two nearest points iteratively until a connected graph $G = (X, E)$ is obtained where $E$ is the set of edges. The affinity matrix $K$ defined e.g.\ by $K_{ij} = \exp(-\sigma\|x_i - x_j \|^2)$ if there is an edge $e(i,j) \in E$ and $0$ for the rest and diagonal elements. The matrix is normalized as $L = D^{-1/2}KD^{-1/2}$ where $D_{ii} = \sum_{j=1}^m K_{ij}$, and \begin{equation} f(t+1) = \alpha Lf(t) + (1 - \alpha)y \end{equation} is iterated until convergence, where $\alpha$ is a parameter in $[0,1)$. The points are then ranked according to their final scores $f_{i}(t_f)$. \pkg{kernlab} includes an \proglang{S4} method implementing the ranking algorithm. The algorithm can be used both with an edge-graph where the structure of the data is taken into account, and without which is equivalent to ranking the data by their distance in the projected space. \begin{figure} \centering <>= data(spirals) ran <- spirals[rowSums(abs(spirals) < 0.55) == 2,] ranked <- ranking(ran, 54, kernel = "rbfdot", kpar = list(sigma = 100), edgegraph = TRUE) ranked[54, 2] <- max(ranked[-54, 2]) c<-1:86 op <- par(mfrow = c(1, 2),pty="s") plot(ran) plot(ran, cex=c[ranked[,3]]/40) @ \caption{The points on the left are ranked according to their similarity to the upper most left point. Points with a higher rank appear bigger. Instead of ranking the points on simple Euclidean distance the structure of the data is recognized and all points on the upper structure are given a higher rank although further away in distance than points in the lower structure.} \label{fig:Ranking} \end{figure} \subsection{Online learning with kernels} The \code{onlearn} function in \pkg{kernlab} implements the online kernel algorithms for classification, novelty detection and regression described in \citep{kernlab:Kivinen:2004}. In batch learning, it is typically assumed that all the examples are immediately available and are drawn independently from some distribution $P$. One natural measure of quality for some $f$ in that case is the expected risk \begin{equation} R[f,P] := E_{(x,y)~P}[l(f(x),y)] \end{equation} Since usually $P$ is unknown a standard approach is to instead minimize the empirical risk \begin{equation} R_{emp}[f,P] := \frac{1}{m}\sum_{t=1}^m l(f(x_t),y_t) \end{equation} Minimizing $R_{emp}[f]$ may lead to overfitting (complex functions that fit well on the training data but do not generalize to unseen data). One way to avoid this is to penalize complex functions by instead minimizing the regularized risk. \begin{equation} R_{reg}[f,S] := R_{reg,\lambda}[f,S] := R_{emp}[f] = \frac{\lambda}{2}\|f\|_{H}^2 \end{equation} where $\lambda > 0$ and $\|f\|_{H} = {\langle f,f \rangle}_{H}^{\frac{1}{2}}$ does indeed measure the complexity of $f$ in a sensible way. The constant $\lambda$ needs to be chosen appropriately for each problem. Since in online learning one is interested in dealing with one example at the time the definition of an instantaneous regularized risk on a single example is needed \begin{equation} R_inst[f,x,y] := R_{inst,\lambda}[f,x,y] := R_{reg,\lambda}[f,((x,y))] \end{equation} The implemented algorithms are classical stochastic gradient descent algorithms performing gradient descent on the instantaneous risk. The general form of the update rule is : \begin{equation} f_{t+1} = f_t - \eta \partial_f R_{inst,\lambda}[f,x_t,y_t]|_{f=f_t} \end{equation} where $f_i \in H$ and $\partial_f$< is short hand for $\partial \ \partial f$ (the gradient with respect to $f$) and $\eta_t > 0$ is the learning rate. Due to the learning taking place in a \textit{reproducing kernel Hilbert space} $H$ the kernel $k$ used has the property $\langle f,k(x,\cdotp)\rangle_H = f(x)$ and therefore \begin{equation} \partial_f l(f(x_t)),y_t) = l'(f(x_t),y_t)k(x_t,\cdotp) \end{equation} where $l'(z,y) := \partial_z l(z,y)$. Since $\partial_f\|f\|_H^2 = 2f$ the update becomes \begin{equation} f_{t+1} := (1 - \eta\lambda)f_t -\eta_t \lambda '( f_t(x_t),y_t)k(x_t,\cdotp) \end{equation} The \code{onlearn} function implements the online learning algorithm for regression, classification and novelty detection. The online nature of the algorithm requires a different approach to the use of the function. An object is used to store the state of the algorithm at each iteration $t$ this object is passed to the function as an argument and is returned at each iteration $t+1$ containing the model parameter state at this step. An empty object of class \code{onlearn} is initialized using the \code{inlearn} function. <>= ## create toy data set x <- rbind(matrix(rnorm(90),,2),matrix(rnorm(90)+3,,2)) y <- matrix(c(rep(1,45),rep(-1,45)),,1) ## initialize onlearn object on <- inlearn(2,kernel="rbfdot",kpar=list(sigma=0.2),type="classification") ind <- sample(1:90,90) ## learn one data point at the time for(i in ind) on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) sign(predict(on,x)) @ \subsection{Spectral clustering} Spectral clustering \citep{kernlab:Ng:2001} is a recently emerged promising alternative to common clustering algorithms. In this method one uses the top eigenvectors of a matrix created by some similarity measure to cluster the data. Similarly to the ranking algorithm, an affinity matrix is created out from the data as \begin{equation} K_{ij}=\exp(-\sigma\|x_i - x_j \|^2) \end{equation} and normalized as $L = D^{-1/2}KD^{-1/2}$ where $D_{ii} = \sum_{j=1}^m K_{ij}$. Then the top $k$ eigenvectors (where $k$ is the number of clusters to be found) of the affinity matrix are used to form an $n \times k$ matrix $Y$ where each column is normalized again to unit length. Treating each row of this matrix as a data point, \code{kmeans} is finally used to cluster the points. \pkg{kernlab} includes an \proglang{S4} method called \code{specc} implementing this algorithm which can be used through an formula interface or a matrix interface. The \proglang{S4} object returned by the method extends the class ``vector'' and contains the assigned cluster for each point along with information on the centers size and within-cluster sum of squares for each cluster. In case a Gaussian RBF kernel is being used a model selection process can be used to determine the optimal value of the $\sigma$ hyper-parameter. For a good value of $\sigma$ the values of $Y$ tend to cluster tightly and it turns out that the within cluster sum of squares is a good indicator for the ``quality'' of the sigma parameter found. We then iterate through the sigma values to find an optimal value for $\sigma$. \begin{figure} \centering <>= data(spirals) sc <- specc(spirals, centers=2) plot(spirals, pch=(23 - 2*sc)) @ \caption{Clustering the two spirals data set with \code{specc}} \label{fig:Spectral Clustering} \end{figure} \subsection{Kernel principal components analysis} Principal component analysis (PCA) is a powerful technique for extracting structure from possibly high-dimensional datasets. PCA is an orthogonal transformation of the coordinate system in which we describe the data. The new coordinates by which we represent the data are called principal components. Kernel PCA \citep{kernlab:Schoelkopf:1998} performs a nonlinear transformation of the coordinate system by finding principal components which are nonlinearly related to the input variables. Given a set of centered observations $x_k$, $k=1,\dots,M$, $x_k \in \mathbf{R}^N$, PCA diagonalizes the covariance matrix $C = \frac{1}{M}\sum_{j=1}^Mx_jx_{j}^T$ by solving the eigenvalue problem $\lambda\mathbf{v}=C\mathbf{v}$. The same computation can be done in a dot product space $F$ which is related to the input space by a possibly nonlinear map $\Phi:\mathbf{R}^N \rightarrow F$, $x \mapsto \mathbf{X}$. Assuming that we deal with centered data and use the covariance matrix in $F$, \begin{equation} \hat{C}=\frac{1}{C}\sum_{j=1}^N \Phi(x_j)\Phi(x_j)^T \end{equation} the kernel principal components are then computed by taking the eigenvectors of the centered kernel matrix $K_{ij} = \langle \Phi(x_j),\Phi(x_j) \rangle$. \code{kpca}, the the function implementing KPCA in \pkg{kernlab}, can be used both with a formula and a matrix interface, and returns an \proglang{S4} object of class \code{kpca} containing the principal components the corresponding eigenvalues along with the projection of the training data on the new coordinate system. Furthermore, the \code{predict} function can be used to embed new data points into the new coordinate system. \begin{figure} \centering <>= data(spam) train <- sample(1:dim(spam)[1],400) kpc <- kpca(~.,data=spam[train,-58],kernel="rbfdot",kpar=list(sigma=0.001),features=2) kpcv <- pcv(kpc) plot(rotated(kpc),col=as.integer(spam[train,58]),xlab="1st Principal Component",ylab="2nd Principal Component") @ \caption{Projection of the spam data on two kernel principal components using an RBF kernel} \label{fig:KPCA} \end{figure} \subsection{Kernel feature analysis} Whilst KPCA leads to very good results there are nevertheless some issues to be addressed. First the computational complexity of the standard version of KPCA, the algorithm scales $O(m^3)$ and secondly the resulting feature extractors are given as a dense expansion in terms of the of the training patterns. Sparse solutions are often achieved in supervised learning settings by using an $l_1$ penalty on the expansion coefficients. An algorithm can be derived using the same approach in feature extraction requiring only $n$ basis functions to compute the first $n$ feature. Kernel feature analysis \citep{kernlab:Olvi:2000} is computationally simple and scales approximately one order of magnitude better on large data sets than standard KPCA. Choosing $\Omega [f] = \sum_{i=1}^m |\alpha_i |$ this yields \begin{equation} F_{LP} = \{ \mathbf{w} \vert \mathbf{w} = \sum_{i=1}^m \alpha_i \Phi(x_i) \mathrm{with} \sum_{i=1}^m |\alpha_i | \leq 1 \} \end{equation} This setting leads to the first ``principal vector'' in the $l_1$ context \begin{equation} \mathbf{\nu}^1 = \mathrm{argmax}_{\mathbf{\nu} \in F_{LP}} \frac{1}{m} \sum_{i=1}^m \langle \mathbf{\nu},\mathbf{\Phi}(x_i) - \frac{1}{m}\sum_{j=1}^m\mathbf{\Phi}(x_i) \rangle^2 \end{equation} Subsequent ``principal vectors'' can be defined by enforcing optimality with respect to the remaining orthogonal subspaces. Due to the $l_1$ constrain the solution has the favorable property of being sparse in terms of the coefficients $\alpha_i$. The function \code{kfa} in \pkg{kernlab} implements Kernel Feature Analysis by using a projection pursuit technique on a sample of the data. Results are then returned in an \proglang{S4} object. \begin{figure} \centering <>= data(promotergene) f <- kfa(~.,data=promotergene,features=2,kernel="rbfdot",kpar=list(sigma=0.013)) plot(predict(f,promotergene),col=as.numeric(promotergene[,1]),xlab="1st Feature",ylab="2nd Feature") @ \caption{Projection of the spam data on two features using an RBF kernel} \label{fig:KFA} \end{figure} \subsection{Kernel canonical correlation analysis} Canonical correlation analysis (CCA) is concerned with describing the linear relations between variables. If we have two data sets $x_1$ and $x_2$, then the classical CCA attempts to find linear combination of the variables which give the maximum correlation between the combinations. I.e., if \begin{eqnarray*} && y_1 = \mathbf{w_1}\mathbf{x_1} = \sum_j w_1 x_{1j} \\ && y_2 = \mathbf{w_2}\mathbf{x_2} = \sum_j w_2 x_{2j} \end{eqnarray*} one wishes to find those values of $\mathbf{w_1}$ and $\mathbf{w_2}$ which maximize the correlation between $y_1$ and $y_2$. Similar to the KPCA algorithm, CCA can be extended and used in a dot product space~$F$ which is related to the input space by a possibly nonlinear map $\Phi:\mathbf{R}^N \rightarrow F$, $x \mapsto \mathbf{X}$ as \begin{eqnarray*} && y_1 = \mathbf{w_1}\mathbf{\Phi(x_1)} = \sum_j w_1 \Phi(x_{1j}) \\ && y_2 = \mathbf{w_2}\mathbf{\Phi(x_2)} = \sum_j w_2 \Phi(x_{2j}) \end{eqnarray*} Following \citep{kernlab:kuss:2003}, the \pkg{kernlab} implementation of a KCCA projects the data vectors on a new coordinate system using KPCA and uses linear CCA to retrieve the correlation coefficients. The \code{kcca} method in \pkg{kernlab} returns an \proglang{S4} object containing the correlation coefficients for each data set and the corresponding correlation along with the kernel used. \subsection{Interior point code quadratic optimizer} In many kernel based algorithms, learning implies the minimization of some risk function. Typically we have to deal with quadratic or general convex problems for support vector machines of the type \begin{equation} \begin{array}{ll} \mathrm{minimize} & f(x) \\ \mbox{subject to~} & c_i(x) \leq 0 \mbox{~for all~} i \in [n]. \end{array} \end{equation} $f$ and $c_i$ are convex functions and $n \in \mathbf{N}$. \pkg{kernlab} provides the \proglang{S4} method \code{ipop} implementing an optimizer of the interior point family \citep{kernlab:Vanderbei:1999} which solves the quadratic programming problem \begin{equation} \begin{array}{ll} \mathrm{minimize} & c^\top x+\frac{1}{2}x^\top H x \\ \mbox{subject to~} & b \leq Ax \leq b + r\\ & l \leq x \leq u \\ \end{array} \end{equation} This optimizer can be used in regression, classification, and novelty detection in SVMs. \subsection{Incomplete cholesky decomposition} When dealing with kernel based algorithms, calculating a full kernel matrix should be avoided since it is already a $O(N^2)$ operation. Fortunately, the fact that kernel matrices are positive semidefinite is a strong constraint and good approximations can be found with small computational cost. The Cholesky decomposition factorizes a positive semidefinite $N \times N$ matrix $K$ as $K=ZZ^T$, where $Z$ is an upper triangular $N \times N$ matrix. Exploiting the fact that kernel matrices are usually of low rank, an \emph{incomplete Cholesky decomposition} \citep{kernlab:Wright:1999} finds a matrix $\tilde{Z}$ of size $N \times M$ where $M\ll N$ such that the norm of $K-\tilde{Z}\tilde{Z}^T$ is smaller than a given tolerance $\theta$. The main difference of incomplete Cholesky decomposition to the standard Cholesky decomposition is that pivots which are below a certain threshold are simply skipped. If $L$ is the number of skipped pivots, we obtain a $\tilde{Z}$ with only $M = N - L$ columns. The algorithm works by picking a column from $K$ to be added by maximizing a lower bound on the reduction of the error of the approximation. \pkg{kernlab} has an implementation of an incomplete Cholesky factorization called \code{inc.chol} which computes the decomposed matrix $\tilde{Z}$ from the original data for any given kernel without the need to compute a full kernel matrix beforehand. This has the advantage that no full kernel matrix has to be stored in memory. \section{Conclusions} In this paper we described \pkg{kernlab}, a flexible and extensible kernel methods package for \proglang{R} with existing modern kernel algorithms along with tools for constructing new kernel based algorithms. It provides a unified framework for using and creating kernel-based algorithms in \proglang{R} while using all of \proglang{R}'s modern facilities, like \proglang{S4} classes and namespaces. Our aim for the future is to extend the package and add more kernel-based methods as well as kernel relevant tools. Sources and binaries for the latest version of \pkg{kernlab} are available at CRAN\footnote{\url{http://CRAN.R-project.org}} under the GNU Public License. A shorter version of this introduction to the \proglang{R} package \pkg{kernlab} is published as \cite{kernlab:Karatzoglou+Smola+Hornik:2004} in the \emph{Journal of Statistical Software}. \bibliography{jss} \end{document} kernlab/R/0000755000175100001440000000000014221633213012104 5ustar hornikuserskernlab/R/sigest.R0000644000175100001440000000465612676465031013555 0ustar hornikusers## sigma estimation for RBF kernels ## author: alexandros setGeneric("sigest", function(x, ...) standardGeneric("sigest")) setMethod("sigest",signature(x="formula"), function (x, data=NULL, frac = 0.5, na.action = na.omit, scaled = TRUE){ call <- match.call() m <- match.call(expand.dots = FALSE) if (is.matrix(eval(m$data, parent.frame()))) m$data <- as.data.frame(data) ## m$... <- NULL m$formula <- m$x m$x <- NULL m$scaled <- NULL m$frac <- NULL m[[1L]] <- quote(stats::model.frame) m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { remove <- unique(c(which(labels(Terms) %in% names(attr(x, "contrasts"))), which(!scaled) ) ) scaled <- !attr(x, "assign") %in% remove } ret <- sigest(x, scaled = scaled, frac = frac, na.action = na.action) return (ret) }) setMethod("sigest",signature(x="matrix"), function (x, frac = 0.5, scaled = TRUE, na.action = na.omit) { x <- na.action(x) if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { co <- !apply(x[,scaled, drop = FALSE], 2, var) if (any(co)) { scaled <- rep(FALSE, ncol(x)) warning(paste("Variable(s)", paste("`",colnames(x[,scaled, drop = FALSE])[co], "'", sep="", collapse=" and "), "constant. Cannot scale data.") ) } else { xtmp <- scale(x[,scaled]) x[,scaled] <- xtmp } } m <- dim(x)[1] n <- floor(frac*m) index <- sample(1:m, n, replace = TRUE) index2 <- sample(1:m, n, replace = TRUE) temp <- x[index,, drop=FALSE] - x[index2,,drop=FALSE] dist <- rowSums(temp^2) srange <- 1/quantile(dist[dist!=0],probs=c(0.9,0.5,0.1)) ## ds <- sort(dist[dist!=0]) ## sl <- ds[ceiling(0.2*length(ds))] ## su <- ds[ceiling(0.8*length(ds))] ## srange <- c(1/su,1/median(ds), 1/sl) ## names(srange) <- NULL return(srange) }) kernlab/R/ksvm.R0000644000175100001440000034643414221633101013221 0ustar hornikusers## Support Vector Machines ## author : alexandros karatzoglou ## updated : 08.02.06 setGeneric("ksvm", function(x, ...) standardGeneric("ksvm")) setMethod("ksvm",signature(x="formula"), function (x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE){ cl <- match.call() m <- match.call(expand.dots = FALSE) if (is.matrix(eval(m$data, parent.frame()))) m$data <- as.data.frame(data) m$... <- NULL m$formula <- m$x m$x <- NULL m$scaled <- NULL m[[1L]] <- quote(stats::model.frame) m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 ## no intercept x <- model.matrix(Terms, m) y <- model.extract(m, "response") if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { remove <- unique(c(which(labels(Terms) %in% names(attr(x, "contrasts"))), which(!scaled) ) ) scaled <- !attr(x, "assign") %in% remove } ret <- ksvm(x, y, scaled = scaled, ...) kcall(ret) <- cl attr(Terms,"intercept") <- 0 ## no intercept terms(ret) <- Terms if (!is.null(attr(m, "na.action"))) n.action(ret) <- attr(m, "na.action") return (ret) }) setMethod("ksvm",signature(x="vector"), function(x, ...) { x <- t(t(x)) ret <- ksvm(x, ...) return(ret) }) setMethod("ksvm",signature(x="matrix"), function (x, y = NULL, scaled = TRUE, type = NULL, kernel = "rbfdot", kpar = "automatic", C = 1, nu = 0.2, epsilon = 0.1, prob.model = FALSE, class.weights = NULL, cross = 0, fit = TRUE, cache = 40, tol = 0.001, shrinking = TRUE, ... ,subset ,na.action = na.omit) { ## Comment out sparse code, future impl. will be based on "Matrix" ## sparse <- inherits(x, "matrix.csr") ## if (sparse) { ## if (!require(SparseM)) ## stop("Need SparseM package for handling of sparse structures!") ## } sparse <- FALSE if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","matrix")) if(kernel == "matrix") if(dim(x)[1]==dim(x)[2]) return(ksvm(as.kernelMatrix(x), y = y, type = type, C = C, nu = nu, epsilon = epsilon, prob.model = prob.model, class.weights = class.weights, cross = cross, fit = fit, cache = cache, tol = tol, shrinking = shrinking, ...)) else stop(" kernel matrix not square!") if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } ## subsetting and na-handling for matrices ret <- new("ksvm") if (!missing(subset)) x <- x[subset,] if (is.null(y)) x <- na.action(x) else { df <- na.action(data.frame(y, x)) y <- df[,1] x <- as.matrix(df[,-1]) } n.action(ret) <- na.action if (is.null(type)) type(ret) <- if (is.null(y)) "one-svc" else if (is.factor(y)) "C-svc" else "eps-svr" if(!is.null(type)) type(ret) <- match.arg(type,c("C-svc", "nu-svc", "kbb-svc", "spoc-svc", "C-bsvc", "one-svc", "eps-svr", "eps-bsvr", "nu-svr")) ## ## scaling, subsetting, and NA handling ## if (sparse) { ## scale <- rep(FALSE, ncol(x)) ## if(!is.null(y)) na.fail(y) ## x <- t(t(x)) ## make shure that col-indices are sorted ## } x.scale <- y.scale <- NULL ## scaling if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { co <- !apply(x[,scaled, drop = FALSE], 2, var) if (any(co)) { scaled <- rep(FALSE, ncol(x)) warning(paste("Variable(s)", paste("`",colnames(x[,scaled, drop = FALSE])[co], "'", sep="", collapse=" and "), "constant. Cannot scale data.") ) } else { xtmp <- scale(x[,scaled]) x[,scaled] <- xtmp x.scale <- attributes(xtmp)[c("scaled:center","scaled:scale")] if (is.numeric(y)&&(type(ret)!="C-svc"&&type(ret)!="nu-svc"&&type(ret)!="C-bsvc"&&type(ret)!="spoc-svc"&&type(ret)!="kbb-svc")) { y <- scale(y) y.scale <- attributes(y)[c("scaled:center","scaled:scale")] y <- as.vector(y) } } } ncols <- ncol(x) m <- nrows <- nrow(x) if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "rbfkernel") || is(kernel, "laplacedot") || kernel == "laplacedot"|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=mean(sigest(x,scaled=FALSE)[c(1,3)])) #cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if (!is(y,"vector") && !is.factor (y) & is(y,"matrix") & !(type(ret)=="one-svc")) stop("y must be a vector or a factor.") if(!(type(ret)=="one-svc")) if(is(y,"vector") | is(y,"factor") ) ym <- length(y) else if(is(y,"matrix")) ym <- dim(y)[1] else stop("y must be a matrix or a vector") if ((type(ret) != "one-svc") && ym != m) stop("x and y don't match.") if(nu > 1|| nu <0) stop("nu must be between 0 an 1.") weightlabels <- NULL nweights <- 0 weight <- 0 wl <- 0 ## in case of classification: transform factors into integers if (type(ret) == "one-svc") # one class classification --> set dummy y <- 1 else if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) if (!is.null(class.weights)) { weightlabels <- match (names(class.weights),lev(ret)) if (any(is.na(weightlabels))) stop ("At least one level name is missing or misspelled.") } } else { if ((type(ret) =="C-svc" || type(ret) == "nu-svc" ||type(ret) == "C-bsvc" || type(ret) == "spoc-svc" || type(ret) == "kbb-svc") && any(as.integer (y) != y)) stop ("dependent variable has to be of factor or integer type for classification mode.") if (type(ret) != "eps-svr" || type(ret) != "nu-svr"|| type(ret)!="eps-bsvr") lev(ret) <- sort(unique (y)) } ## initialize nclass(ret) <- length (unique(y)) p <- 0 K <- 0 svindex <- problem <- NULL sigma <- 0.1 degree <- offset <- scale <- 1 switch(is(kernel)[1], "rbfkernel" = { sigma <- kpar(kernel)$sigma ktype <- 2 }, "tanhkernel" = { sigma <- kpar(kernel)$scale offset <- kpar(kernel)$offset ktype <- 3 }, "polykernel" = { degree <- kpar(kernel)$degree sigma <- kpar(kernel)$scale offset <- kpar(kernel)$offset ktype <- 1 }, "vanillakernel" = { ktype <- 0 }, "laplacekernel" = { ktype <- 5 sigma <- kpar(kernel)$sigma }, "besselkernel" = { ktype <- 6 sigma <- kpar(kernel)$sigma degree <- kpar(kernel)$order offset <- kpar(kernel)$degree }, "anovakernel" = { ktype <- 7 sigma <- kpar(kernel)$sigma degree <- kpar(kernel)$degree }, "splinekernel" = { ktype <- 8 }, { ktype <- 4 } ) prior(ret) <- list(NULL) ## C classification if(type(ret) == "C-svc"){ indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ## prepare the data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) if(ktype==4) K <- kernelMatrix(kernel,x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]) resv <- .Call(smo_optim, as.double(t(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE])), as.integer(li+lj), as.integer(ncol(x)), as.double(yd), as.double(K), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]@ia else 0), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), ##linear term as.integer(ktype), as.integer(0), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(wl), ##weightlabel as.double(weight), as.integer(nweights), as.double(cache), as.double(tol), as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix tmpres <- resv[c(-(li+lj+1),-(li+lj+2))][reind] ## alpha svind <- tmpres > 0 alpha(ret)[p] <- list(tmpres[svind]) ## coefficients alpha*y coef(ret)[p] <- list(alpha(ret)[[p]]*yd[reind][svind]) ## store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][svind]) ## store Support Vectors xmatrix(ret)[p] <- list(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE][reind,,drop=FALSE][svind, ,drop=FALSE]) ## save the indexes from all the SV in a vector (use unique?) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- c(b(ret), resv[li+lj+1]) ## store objective function values in a vector obj(ret) <- c(obj(ret), resv[li+lj+2]) ## used to reconstruct indexes for the patterns matrix x from "indexes" (really usefull ?) problem[p] <- list(c(i,j)) ##store C in return object param(ret)$C <- C ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## nu classification if(type(ret) == "nu-svc"){ indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) if(ktype==4) K <- kernelMatrix(kernel,x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]) resv <- .Call(smo_optim, as.double(t(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE])), as.integer(li+lj), as.integer(ncol(x)), as.double(yd), as.double(K), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]@ia else 0), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), #linear term as.integer(ktype), as.integer(1), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(wl), #weightlabl. as.double(weight), as.integer(nweights), as.double(cache), as.double(tol), as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix tmpres <- resv[c(-(li+lj+1),-(li+lj+2))][reind] svind <- tmpres != 0 alpha(ret)[p] <- coef(ret)[p] <- list(tmpres[svind]) ##store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][svind]) ## store Support Vectors xmatrix(ret)[p] <- list(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE][reind,,drop=FALSE][svind,,drop=FALSE]) ##save the indexes from all the SV in a vector (use unique!) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- c(b(ret), resv[li+lj+1]) ## store objective function values in a vector obj(ret) <- c(obj(ret), resv[li+lj+2]) ## used to reconstruct indexes for the patterns matrix x from "indexes" problem[p] <- list(c(i,j)) param(ret)$nu <- nu ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## Bound constraint C classification if(type(ret) == "C-bsvc"){ if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) if(ktype==4) K <- kernelMatrix(kernel,x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]) resv <- .Call(tron_optim, as.double(t(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE])), as.integer(li+lj), as.integer(ncol(x)), as.double(yd), as.double(K), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]@ia else 0), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]@ja else 0), as.integer(sparse), as.integer(2), as.double(0), ##countc as.integer(ktype), as.integer(5), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), ## cost value of alpha seeding as.double(2), ## step value of alpha seeding as.integer(wl), ##weightlabel as.double(weight), as.integer(nweights), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), ##qpsize as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix svind <- resv[-(li+lj+1)][reind] > 0 alpha(ret)[p] <- list(resv[-(li+lj+1)][reind][svind]) ## nonzero alpha*y coef(ret)[p] <- list(alpha(ret)[[p]] * yd[reind][svind]) ## store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][svind]) ## store Support Vectors xmatrix(ret)[p] <- list(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE][reind,,drop = FALSE][svind,,drop = FALSE]) ## save the indexes from all the SV in a vector (use unique?) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- - sapply(coef(ret),sum) ## store obj. values in vector obj(ret) <- c(obj(ret), resv[(li+lj+1)]) ## used to reconstruct indexes for the patterns matrix x from "indexes" (really usefull ?) problem[p] <- list(c(i,j)) ##store C in return object param(ret)$C <- C ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## SPOC multiclass classification if(type(ret) =="spoc-svc") { if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) xd <- matrix(x[yd$ix,],nrow=dim(x)[1]) count <- 0 if(ktype==4) K <- kernelMatrix(kernel,x) resv <- .Call(tron_optim, as.double(t(xd)), as.integer(nrow(xd)), as.integer(ncol(xd)), as.double(rep(yd$x-1,2)), as.double(K), as.integer(if (sparse) xd@ia else 0), as.integer(if (sparse) xd@ja else 0), as.integer(sparse), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(7), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(C), as.double(2), #Cstep as.integer(0), #weightlabel as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) reind <- sort(yd$ix,method="quick",index.return=TRUE)$ix alpha(ret) <- t(matrix(resv[-(nclass(ret)*nrow(xd) + 1)],nclass(ret)))[reind,,drop=FALSE] coef(ret) <- lapply(1:nclass(ret), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) names(coef(ret)) <- lev(ret) alphaindex(ret) <- lapply(sort(unique(y)), function(x) which(alpha(ret)[,x]!=0)) xmatrix(ret) <- x obj(ret) <- resv[(nclass(ret)*nrow(xd) + 1)] names(alphaindex(ret)) <- lev(ret) svindex <- which(rowSums(alpha(ret)!=0)!=0) b(ret) <- 0 param(ret)$C <- C } ## KBB multiclass classification if(type(ret) =="kbb-svc") { if(!is.null(class.weights)) weightedC <- weightlabels * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) x <- x[yd$ix,,drop=FALSE] count <- sapply(unique(yd$x), function(c) length(yd$x[yd$x==c])) if(ktype==4) K <- kernelMatrix(kernel,x) resv <- .Call(tron_optim, as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(yd$x-1), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(8), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(C), #Cbegin as.double(2), #Cstep as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) reind <- sort(yd$ix,method="quick",index.return=TRUE)$ix alpha(ret) <- matrix(resv[-(nrow(x)*(nclass(ret)-1)+1)],nrow(x))[reind,,drop=FALSE] xmatrix(ret) <- x<- x[reind,,drop=FALSE] coef(ret) <- lapply(1:(nclass(ret)-1), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) alphaindex(ret) <- lapply(sort(unique(y)), function(x) which((y == x) & (rowSums(alpha(ret))!=0))) svindex <- which(rowSums(alpha(ret)!=0)!=0) b(ret) <- - sapply(coef(ret),sum) obj(ret) <- resv[(nrow(x)*(nclass(ret)-1)+1)] param(ret)$C <- C } ## Novelty detection if(type(ret) =="one-svc") { if(ktype==4) K <- kernelMatrix(kernel,x) resv <- .Call(smo_optim, as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(matrix(rep(1,m))), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(2), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres != 0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex,,drop=FALSE] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$nu <- nu } ## epsilon regression if(type(ret) =="eps-svr") { if(ktype==4) K <- kernelMatrix(kernel,x) resv <- .Call(smo_optim, as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(y), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(3), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres != 0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex, ,drop=FALSE] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$epsilon <- epsilon param(ret)$C <- C } ## nu regression if(type(ret) =="nu-svr") { if(ktype==4) K <- kernelMatrix(kernel,x) resv <- .Call(smo_optim, as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(y), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(4), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres!=0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex,,drop=FALSE] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$epsilon <- epsilon param(ret)$nu <- nu } ## bound constraint eps regression if(type(ret) =="eps-bsvr") { if(ktype==4) K <- kernelMatrix(kernel,x) resv <- .Call(tron_optim, as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(y), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(2), as.integer(0), as.integer(ktype), as.integer(6), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), #Cbegin as.double(2), #Cstep as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(0), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) tmpres <- resv[-(m + 1)] alpha(ret) <- coef(ret) <- tmpres[tmpres!=0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex,,drop=FALSE] b(ret) <- -sum(alpha(ret)) obj(ret) <- resv[(m + 1)] param(ret)$epsilon <- epsilon param(ret)$C <- C } kcall(ret) <- match.call() kernelf(ret) <- kernel ymatrix(ret) <- y SVindex(ret) <- sort(unique(svindex),method="quick") nSV(ret) <- length(unique(svindex)) if(nSV(ret)==0) stop("No Support Vectors found. You may want to change your parameters") fitted(ret) <- if (fit) predict(ret, x) else NULL if(any(scaled)) scaling(ret) <- list(scaled = scaled, x.scale = x.scale, y.scale = y.scale) if (fit){ if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="one-svc") error(ret) <- sum(!fitted(ret))/m if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr"){ if (!is.null(scaling(ret)$y.scale)){ scal <- scaling(ret)$y.scale$"scaled:scale" fitted(ret) <- fitted(ret) # / scaling(ret)$y.scale$"scaled:scale" + scaling(ret)$y.scale$"scaled:center" } else scal <- 1 error(ret) <- drop(crossprod(fitted(ret) - y)/m) } } cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") { if(is.null(class.weights)) cret <- ksvm(x[cind,],y[cind],type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, scaled=FALSE, cross = 0, fit = FALSE ,cache = cache) else cret <- ksvm(x[cind,],as.factor(lev(ret)[y[cind]]),type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, scaled=FALSE, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache) cres <- predict(cret, x[vgr[[i]],,drop=FALSE]) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="one-svc") { cret <- ksvm(x[cind,],type=type(ret),kernel=kernel,kpar = NULL,C=C,nu=nu,epsilon=epsilon,tol=tol,scaled=FALSE, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, x[vgr[[i]],, drop=FALSE]) cerror <- (1 - sum(cres)/length(cres))/cross + cerror } if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") { cret <- ksvm(x[cind,],y[cind],type=type(ret),kernel=kernel,kpar = NULL,C=C,nu=nu,epsilon=epsilon,tol=tol,scaled=FALSE, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, x[vgr[[i]],,drop=FALSE]) if (!is.null(scaling(ret)$y.scale)) scal <- scaling(ret)$y.scale$"scaled:scale" else scal <- 1 cerror <- drop((scal^2)*crossprod(cres - y[vgr[[i]]])/m) + cerror } } cross(ret) <- cerror } prob.model(ret) <- list(NULL) if(prob.model) { if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc") { p <- 0 for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(j,i)] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(i,j)] wl <- c(0,1) nweigths <- 2 } } m <- li+lj suppressWarnings(vgr <- split(c(sample(1:li,li),sample((li+1):(li+lj),lj)),1:3)) pres <- yres <- NULL for(k in 1:3) { cind <- unsplit(vgr[-k],factor(rep((1:3)[-k],unlist(lapply(vgr[-k],length))))) if(is.null(class.weights)) cret <- ksvm(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE][cind,],yd[cind],type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, scaled=FALSE, cross = 0, fit = FALSE ,cache = cache, prob.model = FALSE) else cret <- ksvm(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE][cind,],as.factor(lev(ret)[y[c(indexes[[i]],indexes[[j]])][cind]]),type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, scaled=FALSE, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache, prob.model = FALSE) yres <- c(yres, yd[vgr[[k]]]) pres <- rbind(pres, predict(cret, x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE][vgr[[k]],],type="decision")) } prob.model(ret)[[p]] <- .probPlatt(pres,yres) } } } if(type(ret) == "eps-svr"||type(ret) == "nu-svr"||type(ret)=="eps-bsvr"){ suppressWarnings(vgr<-split(sample(1:m,m),1:3)) pres <- NULL for(i in 1:3) { cind <- unsplit(vgr[-i],factor(rep((1:3)[-i],unlist(lapply(vgr[-i],length))))) cret <- ksvm(x[cind,],y[cind],type=type(ret),kernel=kernel,kpar = NULL,C=C,nu=nu,epsilon=epsilon,tol=tol,scaled=FALSE, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, x[vgr[[i]],]) if (!is.null(scaling(ret)$y.scale)) cres <- cres * scaling(ret)$y.scale$"scaled:scale" + scaling(ret)$y.scale$"scaled:center" pres <- rbind(pres, cres) } pres[abs(pres) > (5*sd(pres))] <- 0 prob.model(ret) <- list(sum(abs(pres))/dim(pres)[1]) } } return(ret) }) ## kernelmatrix interface setMethod("ksvm",signature(x="kernelMatrix"), function (x, y = NULL, type = NULL, C = 1, nu = 0.2, epsilon = 0.1, prob.model = FALSE, class.weights = NULL, cross = 0, fit = TRUE, cache = 40, tol = 0.001, shrinking = TRUE, ...) { sparse <- FALSE ## subsetting and na-handling for matrices ret <- new("ksvm") if (is.null(type)) type(ret) <- if (is.null(y)) "one-svc" else if (is.factor(y)) "C-svc" else "eps-svr" if(!is.null(type)) type(ret) <- match.arg(type,c("C-svc", "nu-svc", "kbb-svc", "spoc-svc", "C-bsvc", "one-svc", "eps-svr", "eps-bsvr", "nu-svr")) ncols <- ncol(x) m <- nrows <- nrow(x) if (!is(y,"vector") && !is.factor (y) & !is(y,"matrix") & !(type(ret)=="one-svc")) stop("y must be a vector or a factor.") if(!(type(ret)=="one-svc")) if(is(y,"vector") | is(y,"factor")) ym <- length(y) else if(is(y,"matrix")) ym <- dim(y)[1] else stop("y must be a matrix or a vector") if ((type(ret) != "one-svc") && ym != m) stop("x and y don't match.") if(nu > 1|| nu <0) stop("nu must be between 0 an 1.") weightlabels <- NULL nweights <- 0 weight <- 0 wl <- 0 ## in case of classification: transform factors into integers if (type(ret) == "one-svc") # one class classification --> set dummy y <- 1 else if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) if (!is.null(class.weights)) { if (is.null(names (class.weights))) stop ("Weights have to be specified along with their according level names !") weightlabels <- match (names(class.weights),lev(ret)) if (any(is.na(weightlabels))) stop ("At least one level name is missing or misspelled.") } } else { if ((type(ret) =="C-svc" || type(ret) == "nu-svc" ||type(ret) == "C-bsvc" || type(ret) == "spoc-svc" || type(ret) == "kbb-svc") && any(as.integer (y) != y)) stop ("dependent variable has to be of factor or integer type for classification mode.") if (type(ret) != "eps-svr" || type(ret) != "nu-svr"|| type(ret)!="eps-bsvr") lev(ret) <- sort(unique (y)) } ## initialize nclass(ret) <- length (unique(y)) p <- 0 svindex <- problem <- NULL sigma <- 0.1 degree <- offset <- scale <- 1 ktype <- 4 prior(ret) <- list(NULL) ## C classification if(type(ret) == "C-svc"){ indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) xdd <- matrix(1,li+lj,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd), as.double(as.vector(x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE])), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]@ia else 0), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), ##linear term as.integer(ktype), as.integer(0), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(wl), ##weightlabel as.double(weight), as.integer(nweights), as.double(cache), as.double(tol), as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix tmpres <- resv[c(-(li+lj+1),-(li+lj+2))][reind] ## alpha svind <- tmpres > 0 alpha(ret)[p] <- list(tmpres[svind]) ## coefficients alpha*y coef(ret)[p] <- list(alpha(ret)[[p]]*yd[reind][svind]) ## store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][svind]) ## store Support Vectors ## xmatrix(ret)[p] <- list(xd[svind, svind,drop=FALSE]) ## save the indexes from all the SV in a vector (use unique?) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- c(b(ret), resv[li+lj+1]) ## store objective function values in vector obj(ret) <- c(obj(ret), resv[li+lj+2]) ## used to reconstruct indexes for the patterns matrix x from "indexes" (really usefull ?) problem[p] <- list(c(i,j)) ##store C in return object param(ret)$C <- C } } } ## nu classification if(type(ret) == "nu-svc"){ indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) ##xd <- matrix(0,(li+lj),(li+lj)) ##xdi <- 1:(li+lj) <= li ##xd[xdi,rep(TRUE,li+lj)] <- x[indexes[[i]],c(indexes[[i]],indexes[[j]])] ##xd[xdi == FALSE,rep(TRUE,li+lj)] <- x[indexes[[j]],c(indexes[[i]],indexes[[j]])] if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) xdd <- matrix(1,li+lj,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd), as.double(x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]@ia else 0), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), #linear term as.integer(ktype), as.integer(1), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(wl), #weightlabl. as.double(weight), as.integer(nweights), as.double(cache), as.double(tol), as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix tmpres <- resv[c(-(li+lj+1),-(li+lj+2))][reind] alpha(ret)[p] <- coef(ret)[p] <- list(tmpres[tmpres != 0]) ##store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][tmpres != 0]) ## store Support Vectors ## xmatrix(ret)[p] <- list(xd[tmpres != 0,tmpres != 0,drop=FALSE]) ##save the indexes from all the SV in a vector (use unique!) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- c(b(ret), resv[li+lj+1]) ## store objective function values in vector obj(ret) <- c(obj(ret), resv[li+lj+2]) ## used to reconstruct indexes for the patterns matrix x from "indexes" problem[p] <- list(c(i,j)) param(ret)$nu <- nu ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## Bound constraint C classification if(type(ret) == "C-bsvc"){ if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) xdd <- matrix(rnorm(li+lj),li+lj,1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd), as.double(x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]@ia else 0), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]@ja else 0), as.integer(sparse), as.integer(2), as.double(0), ##countc as.integer(ktype), as.integer(5), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), ## cost value of alpha seeding as.double(2), ## step value of alpha seeding as.integer(wl), ##weightlabel as.double(weight), as.integer(nweights), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), ##qpsize as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix alpha(ret)[p] <- list(resv[-(li+lj+1)][reind][resv[-(li+lj+1)][reind] > 0]) ## nonzero alpha*y coef(ret)[p] <- list(alpha(ret)[[p]] * yd[reind][resv[-(li+lj+1)][reind] > 0]) ## store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][resv[-(li+lj+1)][reind] > 0]) ## store Support Vectors ## xmatrix(ret)[p] <- list(xd[resv > 0 ,resv > 0,drop = FALSE]) ## save the indexes from all the SV in a vector (use unique?) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- - sapply(coef(ret),sum) ## store objective function values vector obj(ret) <- c(obj(ret), resv[(li+lj+1)]) ## used to reconstruct indexes for the patterns matrix x from "indexes" (really usefull ?) problem[p] <- list(c(i,j)) ##store C in return object param(ret)$C <- C } } } ## SPOC multiclass classification if(type(ret) =="spoc-svc") { if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) x <- matrix(x[yd$ix,yd$ix],nrow=dim(x)[1]) count <- 0 xdd <- matrix(1,m,1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(rep(yd$x-1,2)), as.double(x), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(7), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(C), as.double(2), #Cstep as.integer(0), #weightlabel as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) reind <- sort(yd$ix,method="quick",index.return=TRUE)$ix alpha(ret) <- t(matrix(resv[-(nclass(ret)*nrow(xdd)+1)],nclass(ret)))[reind,,drop=FALSE] coef(ret) <- lapply(1:nclass(ret), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) names(coef(ret)) <- lev(ret) alphaindex(ret) <- lapply(sort(unique(y)), function(x) which(alpha(ret)[,x]!=0)) ## xmatrix(ret) <- x names(alphaindex(ret)) <- lev(ret) svindex <- which(rowSums(alpha(ret)!=0)!=0) b(ret) <- 0 obj(ret) <- resv[(nclass(ret)*nrow(xdd)+1)] param(ret)$C <- C } ## KBB multiclass classification if(type(ret) =="kbb-svc") { if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) x <- matrix(x[yd$ix,yd$ix],nrow=dim(x)[1]) count <- sapply(unique(yd$x), function(c) length(yd$x[yd$x==c])) xdd <- matrix(1,m,1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd$x-1), as.double(x), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(8), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), #Cbegin as.double(2), #Cstep as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) reind <- sort(yd$ix,method="quick",index.return=TRUE)$ix alpha(ret) <- matrix(resv[-(nrow(x)*(nclass(ret)-1) + 1)],nrow(x))[reind,,drop=FALSE] coef(ret) <- lapply(1:(nclass(ret)-1), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) alphaindex(ret) <- lapply(sort(unique(y)), function(x) which((y == x) & (rowSums(alpha(ret))!=0))) svindex <- which(rowSums(alpha(ret)!=0)!=0) b(ret) <- - sapply(coef(ret),sum) obj(ret) <- resv[(nrow(x)*(nclass(ret)-1) + 1)] param(ret)$C <- C } ## Novelty detection if(type(ret) =="one-svc") { xdd <- matrix(1,m,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(matrix(rep(1,m))), as.double(x), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(2), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres != 0] svindex <- alphaindex(ret) <- which(tmpres != 0) ## xmatrix(ret) <- x[svindex,svindex,drop=FALSE] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$nu <- nu } ## epsilon regression if(type(ret) =="eps-svr") { xdd <- matrix(1,m,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(y), as.double(x), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(3), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres != 0] svindex <- alphaindex(ret) <- which(tmpres != 0) ## xmatrix(ret) <- x[svindex,svindex ,drop=FALSE] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$epsilon <- epsilon param(ret)$C <- C } ## nu regression if(type(ret) =="nu-svr") { xdd <- matrix(1,m,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(y), as.double(x), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(4), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres!=0] svindex <- alphaindex(ret) <- which(tmpres != 0) ## xmatrix(ret) <- x[svindex,svindex,drop=FALSE] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$epsilon <- epsilon param(ret)$nu <- nu } ## bound constraint eps regression if(type(ret) =="eps-bsvr") { xdd <- matrix(1,m,1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(y), as.double(x), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(2), as.integer(0), as.integer(ktype), as.integer(6), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), #Cbegin as.double(2), #Cstep as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(0), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) tmpres <- resv[-(m+1)] alpha(ret) <- coef(ret) <- tmpres[tmpres!=0] svindex <- alphaindex(ret) <- which(tmpres != 0) ## xmatrix(ret) <- x[svindex,,drop=FALSE] b(ret) <- -sum(alpha(ret)) obj(ret) <- resv[(m+1)] param(ret)$epsilon <- epsilon param(ret)$C <- C } kcall(ret) <- match.call() kernelf(ret) <- " Kernel matrix used as input." ymatrix(ret) <- y SVindex(ret) <- unique(sort(svindex,method="quick")) nSV(ret) <- length(unique(svindex)) if(nSV(ret)==0) stop("No Support Vectors found. You may want to change your parameters") fitted(ret) <- if (fit) predict(ret, as.kernelMatrix(x[,SVindex(ret),drop = FALSE])) else NULL if (fit){ if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="one-svc") error(ret) <- sum(!fitted(ret))/m if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr <- split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") { if(is.null(class.weights)) cret <- ksvm(as.kernelMatrix(x[cind,cind]),y[cind],type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache) else cret <- ksvm(as.kernelMatrix(x[cind,cind]), as.factor(lev(ret)[y[cind]]),type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind,drop = FALSE][,SVindex(cret),drop=FALSE])) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="one-svc") { cret <- ksvm(as.kernelMatrix(x[cind,cind]),type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind,drop = FALSE][,SVindex(cret),drop=FALSE])) cerror <- (1 - sum(cres)/length(cres))/cross + cerror } if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") { cret <- ksvm(as.kernelMatrix(x[cind,cind]),y[cind],type=type(ret), C=C,nu=nu,epsilon=epsilon,tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind,drop = FALSE][,SVindex(cret),drop=FALSE])) cerror <- drop(crossprod(cres - y[vgr[[i]]])/m) + cerror } } cross(ret) <- cerror } prob.model(ret) <- list(NULL) if(prob.model) { if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc") { p <- 0 for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(j,i)] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(i,j)] wl <- c(0,1) nweigths <- 2 } } m <- li+lj suppressWarnings(vgr <- split(c(sample(1:li,li),sample((li+1):(li+lj),lj)),1:3)) pres <- yres <- NULL for(k in 1:3) { cind <- unsplit(vgr[-k],factor(rep((1:3)[-k],unlist(lapply(vgr[-k],length))))) if(is.null(class.weights)) cret <- ksvm(as.kernelMatrix(x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE][cind,cind]),yd[cind],type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache, prob.model=FALSE) else cret <- ksvm(as.kernelMatrix(x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE][cind,cind]), as.factor(lev(ret)[y[c(indexes[[i]],indexes[[j]])][cind]]),type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache, prob.model=FALSE) yres <- c(yres,yd[vgr[[k]]]) pres <- rbind(pres,predict(cret, as.kernelMatrix(x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE][vgr[[k]], cind,drop = FALSE][,SVindex(cret),drop = FALSE]),type="decision")) } prob.model(ret)[[p]] <- .probPlatt(pres,yres) } } } if(type(ret) == "eps-svr"||type(ret) == "nu-svr"||type(ret)=="eps-bsvr"){ suppressWarnings(vgr<-split(sample(1:m,m),1:3)) pres <- NULL for(i in 1:3) { cind <- unsplit(vgr[-i],factor(rep((1:3)[-i],unlist(lapply(vgr[-i],length))))) cret <- ksvm(as.kernelMatrix(x[cind,cind]),y[cind],type=type(ret), C=C, nu=nu, epsilon=epsilon, tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind, drop = FALSE][,SVindex(cret), drop = FALSE])) pres <- rbind(pres,predict(cret, as.kernelMatrix(x[vgr[[i]],cind , drop = FALSE][,SVindex(cret) ,drop = FALSE]),type="decision")) } pres[abs(pres) > (5*sd(pres))] <- 0 prob.model(ret) <- list(sum(abs(pres))/dim(pres)[1]) } } return(ret) }) .classAgreement <- function (tab) { n <- sum(tab) if (!is.null(dimnames(tab))) { lev <- intersect(colnames(tab), rownames(tab)) p0 <- sum(diag(tab[lev, lev])) / n } else { m <- min(dim(tab)) p0 <- sum(diag(tab[1:m, 1:m])) / n } return(p0) } ## List Interface setMethod("ksvm",signature(x="list"), function (x, y = NULL, type = NULL, kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), C = 1, nu = 0.2, epsilon = 0.1, prob.model = FALSE, class.weights = NULL, cross = 0, fit = TRUE, cache = 40, tol = 0.001, shrinking = TRUE, ... ,na.action = na.omit) { ret <- new("ksvm") if (is.null(y)) x <- na.action(x) n.action(ret) <- na.action sparse <- FALSE if (is.null(type)) type(ret) <- if (is.null(y)) "one-svc" else if (is.factor(y)) "C-svc" else "eps-svr" if(!is.null(type)) type(ret) <- match.arg(type,c("C-svc", "nu-svc", "kbb-svc", "spoc-svc", "C-bsvc", "one-svc", "eps-svr", "eps-bsvr", "nu-svr")) m <- length(x) if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","stringdot")) if(is.character(kpar)) if(kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot" || kernel == "rbfdot" || kernel == "laplacedot" ) { stop("List interface supports only the stringdot kernel.") } } if(is(kernel,"kernel") & !is(kernel,"stringkernel")) stop("List interface supports only the stringdot kernel.") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if (!is(y,"vector") && !is.factor(y) & !is(y,"matrix") & !(type(ret)=="one-svc")) stop("y must be a vector or a factor.") if(!(type(ret)=="one-svc")) if(is(y,"vector") | is(y,"factor")) ym <- length(y) else if(is(y,"matrix")) ym <- dim(y)[1] else stop("y must be a matrix or a vector") if ((type(ret) != "one-svc") && ym != m) stop("x and y don't match.") if(nu > 1|| nu <0) stop("nu must be between 0 an 1.") weightlabels <- NULL nweights <- 0 weight <- 0 wl <- 0 ## in case of classification: transform factors into integers if (type(ret) == "one-svc") # one class classification --> set dummy y <- 1 else if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) if (!is.null(class.weights)) { if (is.null(names (class.weights))) stop ("Weights have to be specified along with their according level names !") weightlabels <- match (names(class.weights),lev(ret)) if (any(is.na(weightlabels))) stop ("At least one level name is missing or misspelled.") } } else { if ((type(ret) =="C-svc" || type(ret) == "nu-svc" ||type(ret) == "C-bsvc" || type(ret) == "spoc-svc" || type(ret) == "kbb-svc") && any(as.integer (y) != y)) stop ("dependent variable has to be of factor or integer type for classification mode.") if (type(ret) != "eps-svr" || type(ret) != "nu-svr"|| type(ret)!="eps-bsvr") lev(ret) <- sort(unique (y)) } ## initialize if (type(ret) =="C-svc" || type(ret) == "nu-svc" ||type(ret) == "C-bsvc" || type(ret) == "spoc-svc" || type(ret) == "kbb-svc") nclass(ret) <- length (unique(y)) p <- 0 K <- 0 svindex <- problem <- NULL ktype <- 4 prior(ret) <- list(NULL) sigma <- 0.1 degree <- offset <- scale <- 1 ## C classification if(type(ret) == "C-svc"){ indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) K <- kernelMatrix(kernel,x[c(indexes[[i]],indexes[[j]])]) xdd <- matrix(1,li+lj,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), ##linear term as.integer(ktype), as.integer(0), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(wl), ##weightlabel as.double(weight), as.integer(nweights), as.double(cache), as.double(tol), as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix tmpres <- resv[c(-(li+lj+1),-(li+lj+2))][reind] ## alpha alpha(ret)[p] <- list(tmpres[tmpres > 0]) ## coefficients alpha*y coef(ret)[p] <- list(alpha(ret)[[p]]*yd[reind][tmpres > 0]) ## store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][tmpres>0]) ## store Support Vectors xmatrix(ret)[p] <- list(x[c(indexes[[i]],indexes[[j]])][reind][tmpres > 0]) ## save the indexes from all the SV in a vector (use unique?) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- c(b(ret), resv[li+lj+1]) obj(ret) <- c(obj(ret),resv[li+lj+2]) ## used to reconstruct indexes for the patterns matrix x from "indexes" (really usefull ?) problem[p] <- list(c(i,j)) ##store C in return object param(ret)$C <- C ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## nu classification if(type(ret) == "nu-svc"){ indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) K <- kernelMatrix(kernel,x[c(indexes[[i]],indexes[[j]])]) xdd <- matrix(1,li+lj,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), #linear term as.integer(ktype), as.integer(1), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(wl), #weightlabl. as.double(weight), as.integer(nweights), as.double(cache), as.double(tol), as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix tmpres <- resv[c(-(li+lj+1),-(li+lj+2))][reind] alpha(ret)[p] <- coef(ret)[p] <- list(tmpres[tmpres != 0]) ##store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][tmpres!=0]) ## store Support Vectors xmatrix(ret)[p] <- list(x[c(indexes[[i]],indexes[[j]])][reind][tmpres != 0]) ##save the indexes from all the SV in a vector (use unique!) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- c(b(ret), resv[li+lj+1]) obj(ret) <- c(obj(ret), resv[li+lj+2]) ## used to reconstruct indexes for the patterns matrix x from "indexes" problem[p] <- list(c(i,j)) param(ret)$nu <- nu ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## Bound constraint C classification if(type(ret) == "C-bsvc"){ if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) K <- kernelMatrix(kernel,x[c(indexes[[i]],indexes[[j]])]) xdd <- matrix(1,li+lj,1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(2), as.double(0), ##countc as.integer(ktype), as.integer(5), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), ## cost value of alpha seeding as.double(2), ## step value of alpha seeding as.integer(wl), ##weightlabel as.double(weight), as.integer(nweights), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), ##qpsize as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix alpha(ret)[p] <- list(resv[-(li+lj+1)][reind][resv[-(li+lj+1)][reind] > 0]) ## nonzero alpha*y coef(ret)[p] <- list(alpha(ret)[[p]] * yd[reind][resv[-(li+lj+1)][reind] > 0]) ## store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][resv[-(li+lj+1)][reind] > 0]) ## store Support Vectors xmatrix(ret)[p] <- list(x[c(indexes[[i]],indexes[[j]])][reind][resv[-(li+lj+1)][reind] > 0]) ## save the indexes from all the SV in a vector (use unique?) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- - sapply(coef(ret),sum) obj(ret) <- c(obj(ret),resv[(li+lj+1)]) ## used to reconstruct indexes for the patterns matrix x from "indexes" (really usefull ?) problem[p] <- list(c(i,j)) ##store C in return object param(ret)$C <- C ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## SPOC multiclass classification if(type(ret) =="spoc-svc") { if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) x <- x[yd$ix] count <- 0 K <- kernelMatrix(kernel,x) xdd <- matrix(1,length(x),1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(rep(yd$x-1,2)), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(7), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(C), as.double(2), #Cstep as.integer(0), #weightlabel as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) reind <- sort(yd$ix,method="quick",index.return=TRUE)$ix alpha(ret) <- t(matrix(resv[-(nclass(ret)*nrow(xdd) + 1)],nclass(ret)))[reind,,drop=FALSE] coef(ret) <- lapply(1:nclass(ret), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) names(coef(ret)) <- lev(ret) alphaindex(ret) <- lapply(1:nclass(ret), function(x) which(alpha(ret)[,x]!=0)) names(alphaindex(ret)) <- lev(ret) xmatrix(ret) <- x svindex <- which(rowSums(alpha(ret)!=0)!=0) b(ret) <- 0 obj(ret) <- resv[(nclass(ret)*nrow(xdd) + 1)] param(ret)$C <- C } ## KBB multiclass classification if(type(ret) =="kbb-svc") { if(!is.null(class.weights)) weightedC <- weightlabels * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) x <- x[yd$ix] count <- sapply(unique(yd$x), function(c) length(yd$x[yd$x==c])) K <- kernelMatrix(kernel,x) xdd <- matrix(1,length(x),1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd$x-1), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(8), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), #Cbegin as.double(2), #Cstep as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) reind <- sort(yd$ix,method="quick",index.return=TRUE)$ix alpha(ret) <- matrix(resv[-((nclass(ret)-1)*length(x)+1)],length(x))[reind,,drop=FALSE] xmatrix(ret) <- x<- x[reind] coef(ret) <- lapply(1:(nclass(ret)-1), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) alphaindex(ret) <- lapply(sort(unique(y)), function(x) which((y == x) & (rowSums(alpha(ret))!=0))) svindex <- which(rowSums(alpha(ret)!=0)!=0) b(ret) <- - sapply(coef(ret),sum) obj(ret) <- resv[((nclass(ret)-1)*length(x)+1)] param(ret)$C <- C } ## Novelty detection if(type(ret) =="one-svc") { K <- kernelMatrix(kernel,x) xdd <- matrix(1,length(x),1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(matrix(rep(1,m))), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(2), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres != 0] svindex <- alphaindex(ret) <- which(tmpres !=0) xmatrix(ret) <- x[svindex] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$nu <- nu } ## epsilon regression if(type(ret) =="eps-svr") { K <- kernelMatrix(kernel,x) xdd <- matrix(1,length(x),1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(y), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(3), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres != 0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$epsilon <- epsilon param(ret)$C <- C } ## nu regression if(type(ret) =="nu-svr") { K <- kernelMatrix(kernel,x) xdd <- matrix(1,length(x),1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(y), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(4), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres!=0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$epsilon <- epsilon param(ret)$nu <- nu } ## bound constraint eps regression if(type(ret) =="eps-bsvr") { K <- kernelMatrix(kernel,x) xdd <- matrix(1,length(x),1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(y), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(2), as.integer(0), as.integer(ktype), as.integer(6), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), #Cbegin as.double(2), #Cstep as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(0), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) tmpres <- resv[-(m+1)] alpha(ret) <- coef(ret) <- tmpres[tmpres!=0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex] b(ret) <- -sum(alpha(ret)) obj(ret) <- resv[(m+1)] param(ret)$epsilon <- epsilon param(ret)$C <- C } kcall(ret) <- match.call() kernelf(ret) <- kernel ymatrix(ret) <- y SVindex(ret) <- unique(svindex) nSV(ret) <- length(unique(svindex)) if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") nclass(ret) <- m if(type(ret)=="one-svc") nclass(ret) <- 1 if(nSV(ret)==0) stop("No Support Vectors found. You may want to change your parameters") fitted(ret) <- if (fit) { if((type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc") & nclass(ret) > 2) predict(ret, x) else if((type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc"||type(ret)=="spoc-bsvc"||type(ret)=="kbb-bsvc")) predict(ret,as.kernelMatrix(K[reind,reind][,SVindex(ret), drop=FALSE])) else predict(ret,as.kernelMatrix(K[,SVindex(ret), drop=FALSE])) } else NULL if (fit){ if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="one-svc") error(ret) <- sum(!fitted(ret))/m if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(!((type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc") & nclass(ret) > 2)) { if((type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc"||type(ret)=="spoc-bsvc"||type(ret)=="kbb-bsvc")) K <- as.kernelMatrix(K[reind,reind]) if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr <- split(sample(1:dim(K)[1],dim(K)[1]),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") { if(is.null(class.weights)) cret <- ksvm(as.kernelMatrix(K[cind,cind]),y[cind],type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache) else cret <- ksvm(as.kernelMatrix(K[cind,cind]),as.factor(lev(ret)[y[cind]]),type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache) cres <- predict(cret, as.kernelMatrix(K[vgr[[i]], cind,drop = FALSE][,SVindex(cret),drop=FALSE])) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="one-svc") { cret <- ksvm(as.kernelMatrix(K[cind,cind]), type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache) cres <- predict(cret, as.kernelMatrix(K[vgr[[i]], cind,drop = FALSE][,SVindex(cret),drop=FALSE])) cerror <- (1 - sum(cres)/length(cres))/cross + cerror } if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") { cret <- ksvm(as.kernelMatrix(K[cind,cind]),y[cind],type=type(ret), C=C,nu=nu,epsilon=epsilon,tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, as.kernelMatrix(K[vgr[[i]], cind,drop = FALSE][,SVindex(cret),drop=FALSE])) cerror <- drop(crossprod(cres - y[vgr[[i]]])/m) + cerror } } cross(ret) <- cerror } prob.model(ret) <- list(NULL) if(prob.model) { if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc") { p <- 0 for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(j,i)] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(i,j)] wl <- c(0,1) nweigths <- 2 } } m <- li+lj suppressWarnings(vgr <- split(c(sample(1:li,li),sample((li+1):(li+lj),lj)),1:3)) pres <- yres <- NULL for(k in 1:3) { cind <- unsplit(vgr[-k],factor(rep((1:3)[-k],unlist(lapply(vgr[-k],length))))) cret <- ksvm(as.kernelMatrix(as.kernelMatrix(K[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE][cind,cind])), yd[cind], type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model=FALSE) yres <- c(yres,yd[vgr[[k]]]) pres <- rbind(pres,predict(cret, as.kernelMatrix(K[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE][vgr[[k]], cind,drop = FALSE][,SVindex(cret),drop = FALSE]),type="decision")) } prob.model(ret)[[p]] <- .probPlatt(pres,yres) } } } if(type(ret) == "eps-svr"||type(ret) == "nu-svr"||type(ret)=="eps-bsvr"){ suppressWarnings(vgr<-split(sample(1:m,m),1:3)) pres <- NULL for(i in 1:3) { cind <- unsplit(vgr[-i],factor(rep((1:3)[-i],unlist(lapply(vgr[-i],length))))) cret <- ksvm(as.kernelMatrix(K[cind,cind]),y[cind],type=type(ret), C=C, nu=nu, epsilon=epsilon, tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, as.kernelMatrix(K[vgr[[i]], cind, drop = FALSE][,SVindex(cret), drop = FALSE])) pres <- rbind(pres,predict(cret, as.kernelMatrix(K[vgr[[i]],cind , drop = FALSE][,SVindex(cret) ,drop = FALSE]),type="decision")) } pres[abs(pres) > (5*sd(pres))] <- 0 prob.model(ret) <- list(sum(abs(pres))/dim(pres)[1]) } } } else{ if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") { if(is.null(class.weights)) cret <- ksvm(x[cind],y[cind],type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache) else cret <- ksvm(x[cind],as.factor(lev(ret)[y[cind]]),type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache) cres <- predict(cret, x[vgr[[i]]]) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") { cret <- ksvm(x[cind],y[cind],type=type(ret),kernel=kernel,kpar = NULL,C=C,nu=nu,epsilon=epsilon,tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, x[vgr[[i]]]) cerror <- drop(crossprod(cres - y[vgr[[i]]])/m)/cross + cerror } } cross(ret) <- cerror } prob.model(ret) <- list(NULL) if(prob.model) { if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc") { p <- 0 for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(j,i)] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(i,j)] wl <- c(0,1) nweigths <- 2 } } m <- li+lj suppressWarnings(vgr <- split(c(sample(1:li,li),sample((li+1):(li+lj),lj)),1:3)) pres <- yres <- NULL for(k in 1:3) { cind <- unsplit(vgr[-k],factor(rep((1:3)[-k],unlist(lapply(vgr[-k],length))))) if(is.null(class.weights)) cret <- ksvm(x[c(indexes[[i]], indexes[[j]])][cind],yd[cind],type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache, prob.model=FALSE) else cret <- ksvm(x[c(indexes[[i]], indexes[[j]])][cind],as.factor(lev(ret)[y[cind]]),type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache, prob.model=FALSE) yres <- c(yres,yd[vgr[[k]]]) pres <- rbind(pres,predict(cret, x[c(indexes[[i]], indexes[[j]])][vgr[[k]]],type="decision")) } prob.model(ret)[[p]] <- .probPlatt(pres,yres) } } } if(type(ret) == "eps-svr"||type(ret) == "nu-svr"||type(ret)=="eps-bsvr"){ suppressWarnings(vgr<-split(sample(1:m,m),1:3)) for(i in 1:3) { cind <- unsplit(vgr[-i],factor(rep((1:3)[-i],unlist(lapply(vgr[-i],length))))) cret <- ksvm(x[cind],y[cind],type=type(ret),kernel=kernel,kpar = NULL,C=C,nu=nu,epsilon=epsilon,tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, x[vgr[[i]]]) pres <- rbind(pres,predict(cret, x[vgr[[i]]],type="decision")) } pres[abs(pres) > (5*sd(pres))] <- 0 prob.model(ret) <- list(sum(abs(pres))/dim(pres)[1]) } } } return(ret) }) ##**************************************************************# ## predict for matrix, data.frame input setMethod("predict", signature(object = "ksvm"), function (object, newdata, type = "response", coupler = "minpair") { type <- match.arg(type,c("response","probabilities","votes","decision")) if (missing(newdata) && type=="response" & !is.null(fitted(object))) return(fitted(object)) else if(missing(newdata)) stop("Missing data !") if(!is(newdata,"list")){ if (!is.null(terms(object)) & !is(newdata,"kernelMatrix")) { if(!is.matrix(newdata)) newdata <- model.matrix(delete.response(terms(object)), as.data.frame(newdata), na.action = n.action(object)) } else newdata <- if (is.vector(newdata)) t(t(newdata)) else as.matrix(newdata) newnrows <- nrow(newdata) newncols <- ncol(newdata) if(!is(newdata,"kernelMatrix") && !is.null(xmatrix(object))){ if(is(xmatrix(object),"list") && is(xmatrix(object)[[1]],"matrix")) oldco <- ncol(xmatrix(object)[[1]]) if(is(xmatrix(object),"matrix")) oldco <- ncol(xmatrix(object)) if (oldco != newncols) stop ("test vector does not match model !") } } else newnrows <- length(newdata) p <- 0 if (is.list(scaling(object))) newdata[,scaling(object)$scaled] <- scale(newdata[,scaling(object)$scaled, drop = FALSE], center = scaling(object)$x.scale$"scaled:center", scale = scaling(object)$x.scale$"scaled:scale") if(type == "response" || type =="decision" || type=="votes") { if(type(object)=="C-svc"||type(object)=="nu-svc"||type(object)=="C-bsvc") { predres <- 1:newnrows if(type=="decision") votematrix <- matrix(0,nclass(object)*(nclass(object)-1)/2,newnrows) else votematrix <- matrix(0,nclass(object),newnrows) for(i in 1:(nclass(object)-1)) { jj <- i+1 for(j in jj:nclass(object)) { p <- p+1 if(is(newdata,"kernelMatrix")) ret <- newdata[,which(SVindex(object)%in%alphaindex(object)[[p]]), drop=FALSE] %*% coef(object)[[p]] - b(object)[p] else ret <- kernelMult(kernelf(object),newdata,xmatrix(object)[[p]],coef(object)[[p]]) - b(object)[p] if(type=="decision") votematrix[p,] <- ret else{ votematrix[i,ret<0] <- votematrix[i,ret<0] + 1 votematrix[j,ret>0] <- votematrix[j,ret>0] + 1 } } } if(type == "decision") predres <- t(votematrix) else predres <- sapply(predres, function(x) which.max(votematrix[,x])) } if(type(object) == "spoc-svc") { predres <- 1:newnrows votematrix <- matrix(0,nclass(object),newnrows) for(i in 1:nclass(object)){ if(is(newdata,"kernelMatrix")) votematrix[i,] <- newdata[,which(SVindex(object)%in%alphaindex(object)[[i]]), drop=FALSE] %*% coef(object)[[i]] else if (is(newdata,"list")) votematrix[i,] <- kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]]],coef(object)[[i]]) else votematrix[i,] <- kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]],,drop=FALSE],coef(object)[[i]]) } predres <- sapply(predres, function(x) which.max(votematrix[,x])) } if(type(object) == "kbb-svc") { predres <- 1:newnrows votematrix <- matrix(0,nclass(object),newnrows) A <- rowSums(alpha(object)) for(i in 1:nclass(object)) { for(k in (1:i)[-i]) if(is(newdata,"kernelMatrix")) votematrix[k,] <- votematrix[k,] - (newdata[,which(SVindex(object)%in%alphaindex(object)[[i]]), drop=FALSE] %*% alpha(object)[,k][alphaindex(object)[[i]]] + sum(alpha(object)[,k][alphaindex(object)[[i]]])) else if (is(newdata,"list")) votematrix[k,] <- votematrix[k,] - (kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]]],alpha(object)[,k][alphaindex(object)[[i]]]) + sum(alpha(object)[,k][alphaindex(object)[[i]]])) else votematrix[k,] <- votematrix[k,] - (kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]],,drop=FALSE],alpha(object)[,k][alphaindex(object)[[i]]]) + sum(alpha(object)[,k][alphaindex(object)[[i]]])) if(is(newdata,"kernelMatrix")) votematrix[i,] <- votematrix[i,] + (newdata[,which(SVindex(object)%in%alphaindex(object)[[i]]), drop=FALSE] %*% A[alphaindex(object)[[i]]] + sum(A[alphaindex(object)[[i]]])) else if (is(newdata,"list")) votematrix[i,] <- votematrix[i,] + (kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]]],A[alphaindex(object)[[i]]]) + sum(A[alphaindex(object)[[i]]])) else votematrix[i,] <- votematrix[i,] + (kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]],,drop=FALSE],A[alphaindex(object)[[i]]]) + sum(A[alphaindex(object)[[i]]])) if(i <= (nclass(object)-1)) for(kk in i:(nclass(object)-1)) if(is(newdata,"kernelMatrix")) votematrix[kk+1,] <- votematrix[kk+1,] - (newdata[,which(SVindex(object)%in%alphaindex(object)[[i]]), drop=FALSE] %*% alpha(object)[,kk][alphaindex(object)[[i]]] + sum(alpha(object)[,kk][alphaindex(object)[[i]]])) else if (is(newdata,"list")) votematrix[kk+1,] <- votematrix[kk+1,] - (kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]]],alpha(object)[,kk][alphaindex(object)[[i]]]) + sum(alpha(object)[,kk][alphaindex(object)[[i]]])) else votematrix[kk+1,] <- votematrix[kk+1,] - (kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]],,drop=FALSE],alpha(object)[,kk][alphaindex(object)[[i]]]) + sum(alpha(object)[,kk][alphaindex(object)[[i]]])) } predres <- sapply(predres, function(x) which.max(votematrix[,x])) } } if(type == "probabilities") { if(is.null(prob.model(object)[[1]])) stop("ksvm object contains no probability model. Make sure you set the paramater prob.model in ksvm during training.") if(type(object)=="C-svc"||type(object)=="nu-svc"||type(object)=="C-bsvc") { binprob <- matrix(0, newnrows, nclass(object)*(nclass(object) - 1)/2) for(i in 1:(nclass(object)-1)) { jj <- i+1 for(j in jj:nclass(object)) { p <- p+1 if(is(newdata,"kernelMatrix")) binprob[,p] <- 1 - .SigmoidPredict(as.vector(newdata[,which(SVindex(object)%in%alphaindex(object)[[p]]), drop=FALSE] %*% coef(object)[[p]] - b(object)[p]), prob.model(object)[[p]]$A, prob.model(object)[[p]]$B) else binprob[,p] <- 1 - .SigmoidPredict(as.vector(kernelMult(kernelf(object),newdata,xmatrix(object)[[p]],coef(object)[[p]]) - b(object)[p]), prob.model(object)[[p]]$A, prob.model(object)[[p]]$B) } } multiprob <- couple(binprob, coupler = coupler) } else stop("probability estimates only supported for C-svc, C-bsvc and nu-svc") } if(type(object) == "one-svc") { if(is(newdata,"kernelMatrix")) ret <- newdata %*% coef(object) - b(object) else ret <- kernelMult(kernelf(object),newdata,xmatrix(object),coef(object)) - b(object) ##one-class-classification: return TRUE/FALSE (probabilities ?) if(type=="decision") return(ret) else { ret[ret>0]<-1 return(ret == 1) } } else { if(type(object)=="eps-svr"||type(object)=="nu-svr"||type(object)=="eps-bsvr") { if(is(newdata,"kernelMatrix")) predres <- newdata %*% coef(object) - b(object) else predres <- kernelMult(kernelf(object),newdata,xmatrix(object),coef(object)) - b(object) } else { ##classification & votes : return votematrix if(type == "votes") return(votematrix) ##classification & probabilities : return probability matrix if(type == "probabilities") { colnames(multiprob) <- lev(object) return(multiprob) } if(is.numeric(lev(object)) && type == "response") return(lev(object)[predres]) if (is.character(lev(object)) && type!="decision") { ##classification & type response: return factors if(type == "response") return(factor (lev(object)[predres], levels = lev(object))) } } } if (!is.null(scaling(object)$y.scale) & !is(newdata,"kernelMatrix") & !is(newdata,"list")) ## return raw values, possibly scaled back return(predres * scaling(object)$y.scale$"scaled:scale" + scaling(object)$y.scale$"scaled:center") else ##else: return raw values return(predres) }) #****************************************************************************************# setMethod("show","ksvm", function(object){ cat("Support Vector Machine object of class \"ksvm\"","\n") cat("\n") cat(paste("SV type:", type(object))) switch(type(object), "C-svc" = cat(paste(" (classification)", "\n")), "nu-svc" = cat(paste(" (classification)", "\n")), "C-bsvc" = cat(paste(" (classification)", "\n")), "one-svc" = cat(paste(" (novelty detection)", "\n")), "spoc-svc" = cat(paste(" (classification)", "\n")), "kbb-svc" = cat(paste(" (classification)", "\n")), "eps-svr" = cat(paste(" (regression)","\n")), "nu-svr" = cat(paste(" (regression)","\n")) ) switch(type(object), "C-svc" = cat(paste(" parameter : cost C =",param(object)$C, "\n")), "nu-svc" = cat(paste(" parameter : nu =", param(object)$nu, "\n")), "C-bsvc" = cat(paste(" parameter : cost C =",param(object)$C, "\n")), "one-svc" = cat(paste(" parameter : nu =", param(object)$nu, "\n")), "spoc-svc" = cat(paste(" parameter : cost C =",param(object)$C, "\n")), "kbb-svc" = cat(paste(" parameter : cost C =",param(object)$C, "\n")), "eps-svr" = cat(paste(" parameter : epsilon =",param(object)$epsilon, " cost C =", param(object)$C,"\n")), "nu-svr" = cat(paste(" parameter : epsilon =", param(object)$epsilon, " nu =", param(object)$nu,"\n")) ) cat("\n") show(kernelf(object)) cat(paste("\nNumber of Support Vectors :", nSV(object),"\n")) cat("\nObjective Function Value :", round(obj(object),4),"\n") ## if(type(object)=="C-svc" || type(object) == "nu-svc") ## cat(paste("Margin width :",margin(object),"\n")) if(!is.null(fitted(object))) cat(paste("Training error :", round(error(object),6),"\n")) if(cross(object)!= -1) cat("Cross validation error :",round(cross(object),6),"\n") if(!is.null(prob.model(object)[[1]])&&(type(object)=="eps-svr" ||type(object)=="nu-svr"||type(object)=="eps-bsvr")) cat("Laplace distr. width :",round(prob.model(object)[[1]],6),"\n") if(!is.null(prob.model(object)[[1]]) & (type(object) == "C-svc"| type(object) == "nu-svc"| type(object) == "C-bsvc")) cat("Probability model included.","\n") ##train error & loss }) setMethod("plot", signature(x = "ksvm", y = "missing"), function(x, data = NULL, grid = 50, slice = list(), ...) { if (type(x) =="C-svc" || type(x) == "nu-svc") { if(nclass(x) > 2) stop("plot function only supports binary classification") if (!is.null(terms(x))&&!is.null(data)) { if(!is.matrix(data)) sub <- model.matrix(delete.response(terms(x)), as.data.frame(data), na.action = n.action(x)) } else if(!is.null(data)) sub <- as.matrix(data) else sub <- xmatrix(x)[[1]] ## sub <- sub[,!colnames(xmatrix(x)[[1]])%in%names(slice)] xr <- seq(min(sub[,2]), max(sub[,2]), length = grid) yr <- seq(min(sub[,1]), max(sub[,1]), length = grid) sc <- 0 # if(is.null(data)) # { # sc <- 1 # data <- xmatrix(x)[[1]] # } if(is.data.frame(data) || !is.null(terms(x))){ lis <- c(list(yr), list(xr), slice) names(lis)[1:2] <- setdiff(colnames(sub),names(slice)) new <- expand.grid(lis)[,labels(terms(x))] } else new <- expand.grid(xr,yr) if(sc== 1) scaling(x) <- NULL preds <- predict(x, new ,type = "decision") if(is.null(terms(x))) xylb <- colnames(sub) else xylb <- names(lis) lvl <- 37 mymax <- max(abs(preds)) mylevels <- pretty(c(0, mymax), 15) nl <- length(mylevels)-2 mycols <- c(hcl(0, 100 * (nl:0/nl)^1.3, 90 - 40 *(nl:0/nl)^1.3), rev(hcl(260, 100 * (nl:0/nl)^1.3, 90 - 40 *(nl:0/nl)^1.3))) mylevels <- c(-rev(mylevels[-1]), mylevels) index <- max(which(mylevels < min(preds))):min(which(mylevels > max(preds))) mycols <- mycols[index] mylevels <- mylevels[index] #FIXME# previously the plot code assumed that the y values are either #FIXME# -1 or 1, but this is not generally true. If generated from a #FIXME# factor, they are typically 1 and 2. Maybe ymatrix should be #FIXME# changed? ymat <- ymatrix(x) ymean <- mean(unique(ymat)) filled.contour(xr, yr, matrix(as.numeric(preds), nrow = length(xr), byrow = TRUE), col = mycols, levels = mylevels, plot.axes = { axis(1) axis(2) if(!is.null(data)){ points(sub[-SVindex(x),2], sub[-SVindex(x),1], pch = ifelse(ymat[-SVindex(x)] < ymean, 2, 1)) points(sub[SVindex(x),2], sub[SVindex(x),1], pch = ifelse(ymat[SVindex(x)] < ymean, 17, 16))} else{ ## points(sub[-SVindex(x),], pch = ifelse(ymat[-SVindex(x)] < ymean, 2, 1)) points(sub, pch = ifelse(ymat[SVindex(x)] < ymean, 17, 16)) }}, nlevels = lvl, plot.title = title(main = "SVM classification plot", xlab = xylb[2], ylab = xylb[1]), ... ) } else { stop("Only plots of classification ksvm objects supported") } }) setGeneric(".probPlatt", function(deci, yres) standardGeneric(".probPlatt")) setMethod(".probPlatt",signature(deci="ANY"), function(deci,yres) { if (is.matrix(deci)) deci <- as.vector(deci) if (!is.vector(deci)) stop("input should be matrix or vector") yres <- as.vector(yres) ## Create label and count priors boolabel <- yres >= 0 prior1 <- sum(boolabel) m <- length(yres) prior0 <- m - prior1 ## set parameters (should be on the interface I guess) maxiter <- 100 minstep <- 1e-10 sigma <- 1e-3 eps <- 1e-5 ## Construct target support hiTarget <- (prior1 + 1)/(prior1 + 2) loTarget <- 1/(prior0 + 2) length <- prior1 + prior0 t <- rep(loTarget, m) t[boolabel] <- hiTarget ##Initial Point & Initial Fun Value A <- 0 B <- log((prior0 + 1)/(prior1 + 1)) fval <- 0 fApB <- deci*A + B bindex <- fApB >= 0 p <- q <- rep(0,m) fval <- sum(t[bindex]*fApB[bindex] + log(1 + exp(-fApB[bindex]))) fval <- fval + sum((t[!bindex] - 1)*fApB[!bindex] + log(1+exp(fApB[!bindex]))) for (it in 1:maxiter) { h11 <- h22 <- sigma h21 <- g1 <- g2 <- 0 fApB <- deci*A + B bindex <- fApB >= 0 p[bindex] <- exp(-fApB[bindex])/(1 + exp(-fApB[bindex])) q[bindex] <- 1/(1+exp(-fApB[bindex])) bindex <- fApB < 0 p[bindex] <- 1/(1 + exp(fApB[bindex])) q[bindex] <- exp(fApB[bindex])/(1 + exp(fApB[bindex])) d2 <- p*q h11 <- h11 + sum(d2*deci^2) h22 <- h22 + sum(d2) h21 <- h21 + sum(deci*d2) d1 <- t - p g1 <- g1 + sum(deci*d1) g2 <- g2 + sum(d1) ## Stopping Criteria if (abs(g1) < eps && abs(g2) < eps) break ## Finding Newton Direction -inv(t(H))%*%g det <- h11*h22 - h21^2 dA <- -(h22*g1 - h21*g2) / det dB <- -(-h21*g1 + h11*g2) / det gd <- g1*dA + g2*dB ## Line Search stepsize <- 1 while(stepsize >= minstep) { newA <- A + stepsize * dA newB <- B + stepsize * dB ## New function value newf <- 0 fApB <- deci * newA + newB bindex <- fApB >= 0 newf <- sum(t[bindex] * fApB[bindex] + log(1 + exp(-fApB[bindex]))) newf <- newf + sum((t[!bindex] - 1)*fApB[!bindex] + log(1 + exp(fApB[!bindex]))) ## Check decrease if (newf < (fval + 0.0001 * stepsize * gd)) { A <- newA B <- newB fval <- newf break } else stepsize <- stepsize/2 } if (stepsize < minstep) { cat("line search fails", A, B, g1, g2, dA, dB, gd) ret <- .SigmoidPredict(deci, A, B) return(ret) } } if(it >= maxiter -1) cat("maximum number of iterations reached",g1,g2) ret <- list(A=A, B=B) return(ret) }) ## Sigmoid predict function .SigmoidPredict <- function(deci, A, B) { fApB <- deci*A +B k <- length(deci) ret <- rep(0,k) bindex <- fApB >= 0 ret[bindex] <- exp(-fApB[bindex])/(1 + exp(-fApB[bindex])) ret[!bindex] <- 1/(1 + exp(fApB[!bindex])) return(ret) } kernlab/R/kernels.R0000644000175100001440000026556713271622147013727 0ustar hornikusers## kernel functions ## Functions for computing a kernel value, matrix, matrix-vector ## product and quadratic form ## ## author : alexandros karatzoglou ## Define the kernel objects, ## functions with an additional slot for the kernel parameter list. ## kernel functions take two vector arguments and return a scalar (dot product) rbfdot<- function(sigma=1) { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must a vector") if (is(x,"vector") && is.null(y)){ return(1) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") return(exp(sigma*(2*crossprod(x,y) - crossprod(x) - crossprod(y)))) # sigma/2 or sigma ?? } } return(new("rbfkernel",.Data=rval,kpar=list(sigma=sigma))) } setClass("rbfkernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) laplacedot<- function(sigma=1) { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must a vector") if (is(x,"vector") && is.null(y)){ return(1) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") return(exp(-sigma*sqrt(-(round(2*crossprod(x,y) - crossprod(x) - crossprod(y),9))))) } } return(new("laplacekernel",.Data=rval,kpar=list(sigma=sigma))) } setClass("laplacekernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) besseldot<- function(sigma = 1, order = 1, degree = 1) { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must a vector") if (is(x,"vector") && is.null(y)){ return(1) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") lim <- 1/(gamma(order+1)*2^(order)) bkt <- sigma*sqrt(-(2*crossprod(x,y) - crossprod(x) - crossprod(y))) if(bkt < 10e-5) res <- lim else res <- besselJ(bkt,order)*(bkt^(-order)) return((res/lim)^degree) } } return(new("besselkernel",.Data=rval,kpar=list(sigma=sigma ,order = order ,degree = degree))) } setClass("besselkernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) anovadot<- function(sigma = 1, degree = 1) { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must a vector") if (is(x,"vector") && is.null(y)){ return(1) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") res <- sum(exp(- sigma * (x - y)^2)) return((res)^degree) } } return(new("anovakernel",.Data=rval,kpar=list(sigma=sigma ,degree = degree))) } setClass("anovakernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) splinedot<- function() { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must a vector") if (is(x,"vector") && is.null(y)){ return(1) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") minv <- pmin(x,y) res <- 1 + x*y*(1+minv) - ((x+y)/2)*minv^2 + (minv^3)/3 fres <- prod(res) return(fres) } } return(new("splinekernel",.Data=rval,kpar=list())) } setClass("splinekernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) fourierdot <- function(sigma = 1) { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must a vector") if (is(x,"vector") && is.null(y)){ return(1) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") res <- (1 - sigma^2)/2*(1 - 2*sigma*cos(x - y) + sigma^2) fres <- prod(res) return(fres) } } return(new("fourierkernel",.Data=rval,kpar=list())) } setClass("fourierkernel",prototype=structure(.Data=function(){},kpar=list(sigma = 1)),contains=c("kernel")) tanhdot <- function(scale = 1, offset = 1) { rval<- function(x, y = NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") if (is(x,"vector") && is.null(y)){ tanh(scale*crossprod(x)+offset) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") tanh(scale*crossprod(x,y)+offset) } } return(new("tanhkernel",.Data=rval,kpar=list(scale=scale,offset=offset))) } setClass("tanhkernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) setClass("polykernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) polydot <- function(degree = 1, scale = 1, offset = 1) { rval<- function(x, y = NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") if (is(x,"vector") && is.null(y)){ (scale*crossprod(x)+offset)^degree } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") (scale*crossprod(x,y)+offset)^degree } } return(new("polykernel",.Data=rval,kpar=list(degree=degree,scale=scale,offset=offset))) } setClass("vanillakernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) vanilladot <- function( ) { rval<- function(x, y = NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") if (is(x,"vector") && is.null(y)){ crossprod(x) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") crossprod(x,y) } } return(new("vanillakernel",.Data=rval,kpar=list())) } setClass("stringkernel",prototype=structure(.Data=function(){},kpar=list(length = 4, lambda = 1.1, type = "spectrum", normalized = TRUE)),contains=c("kernel")) stringdot <- function(length = 4, lambda = 1.1, type = "spectrum", normalized = TRUE) { type <- match.arg(type,c("sequence","string","fullstring","exponential","constant","spectrum", "boundrange")) ## need to do this to set the length parameters if(type == "spectrum" | type == "boundrange") lambda <- length switch(type, "sequence" = { rval<- function(x, y = NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") if (is(x,"vector") && is.null(y) && normalized == FALSE) return(.Call(subsequencek, as.character(x), as.character(x), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(length), as.double(lambda))) if (is(x,"vector") && is(y,"vector") && normalized == FALSE) return(.Call(subsequencek, as.character(x), as.character(y), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(length), as.double(lambda))) if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(subsequencek, as.character(x), as.character(y), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(length), as.double(lambda)) / sqrt(.Call(subsequencek, as.character(x), as.character(x), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(length), as.double(lambda)) * .Call(subsequencek, as.character(y), as.character(y), as.integer(nchar(y)), as.integer(nchar(y)), as.integer(length), as.double(lambda)))) } }, "exponential" = { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") x <- paste(x,"\n",sep="") if(!is.null(y)) y <- paste(y,"\n",sep="") if (normalized == FALSE){ if(is.null(y)) y <- x return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(2), as.double(lambda))) } if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(2), as.double(lambda)) / sqrt(.Call(stringtv, as.character(x), as.character(x), as.integer(1), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(2), as.double(lambda)) * .Call(stringtv, as.character(y), as.character(y), as.integer(1), as.integer(nchar(y)), as.integer(nchar(y)), as.integer(2), as.double(lambda)))) } }, "constant" = { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") x <- paste(x,"\n",sep="") if(!is.null(y)) y <- paste(y,"\n",sep="") if (normalized == FALSE){ if(is.null(y)) y <- x return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(1), as.double(lambda))) } if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(1), as.double(lambda)) / sqrt(.Call(stringtv, as.character(x), as.character(x), as.integer(1), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(1), as.double(lambda)) * .Call(stringtv, as.character(y), as.character(y), as.integer(1), as.integer(nchar(y)), as.integer(nchar(y)), as.integer(1), as.double(lambda)))) } }, "spectrum" = { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") x <- paste(x,"\n",sep="") if(!is.null(y)) y <- paste(y,"\n",sep="") n <- nchar(x) m <- nchar(y) if(n < length | m < length){ warning("String length smaller than length parameter value") return(0)} if (normalized == FALSE){ if(is.null(y)) y <- x return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(n), as.integer(m), as.integer(3), as.double(length))) } if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(n), as.integer(m), as.integer(3), as.double(length)) / sqrt(.Call(stringtv, as.character(x), as.character(x), as.integer(1), as.integer(n), as.integer(n), as.integer(3), as.double(lambda)) * .Call(stringtv, as.character(y), as.character(y), as.integer(1), as.integer(m), as.integer(m), as.integer(3), as.double(length)))) } }, "boundrange" = { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") x <- paste(x,"\n",sep="") if(!is.null(y)) y <- paste(y,"\n",sep="") if (normalized == FALSE){ if(is.null(y)) y <- x return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(4), as.double(lambda))) } if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(4), as.double(lambda)) / sqrt(.Call(stringtv, as.character(x), as.character(x), as.integer(1), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(4), as.double(lambda)) * .Call(stringtv, as.character(y), as.character(y), as.integer(1), as.integer(nchar(y)), as.integer(nchar(y)), as.integer(4), as.double(lambda)))) } }, "string" = { rval<- function(x, y = NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") if (is(x,"vector") && is.null(y) && normalized == FALSE) return(.Call(substringk, as.character(x), as.character(x), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(length), as.double(lambda))) if (is(x,"vector") && is(y,"vector") && normalized == FALSE) return(.Call(substringk, as.character(x), as.character(y), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(length), as.double(lambda))) if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(substringk, as.character(x), as.character(y), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(length), as.double(lambda)) / sqrt(.Call(substringk, as.character(x), as.character(x), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(length), as.double(lambda)) * .Call(substringk, as.character(y), as.character(y), as.integer(nchar(y)), as.integer(nchar(y)), as.integer(length), as.double(lambda)))) } }, "fullstring" = { rval<- function(x, y = NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") if (is(x,"vector") && is.null(y) && normalized == FALSE) return(.Call(fullsubstringk, as.character(x), as.character(x), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(length), as.double(lambda))) if (is(x,"vector") && is(y,"vector") && normalized == FALSE) return(.Call(fullsubstringk, as.character(x), as.character(y), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(length), as.double(lambda))) if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(fullsubstringk, as.character(x), as.character(y), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(length), as.double(lambda)) / sqrt(.Call(fullsubstringk, as.character(x), as.character(x), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(length), as.double(lambda)) * .Call(fullsubstringk, as.character(y), as.character(y), as.integer(nchar(y)), as.integer(nchar(y)), as.integer(length), as.double(lambda)))) } }) return(new("stringkernel",.Data=rval,kpar=list(length=length, lambda =lambda, type = type, normalized = normalized))) } ## show method for kernel functions setMethod("show",signature(object="kernel"), function(object) { switch(class(object), "rbfkernel" = cat(paste("Gaussian Radial Basis kernel function.", "\n","Hyperparameter :" ,"sigma = ", kpar(object)$sigma,"\n")), "laplacekernel" = cat(paste("Laplace kernel function.", "\n","Hyperparameter :" ,"sigma = ", kpar(object)$sigma,"\n")), "besselkernel" = cat(paste("Bessel kernel function.", "\n","Hyperparameter :" ,"sigma = ", kpar(object)$sigma,"order = ",kpar(object)$order, "degree = ", kpar(object)$degree,"\n")), "anovakernel" = cat(paste("Anova RBF kernel function.", "\n","Hyperparameter :" ,"sigma = ", kpar(object)$sigma, "degree = ", kpar(object)$degree,"\n")), "tanhkernel" = cat(paste("Hyperbolic Tangent kernel function.", "\n","Hyperparameters :","scale = ", kpar(object)$scale," offset = ", kpar(object)$offset,"\n")), "polykernel" = cat(paste("Polynomial kernel function.", "\n","Hyperparameters :","degree = ",kpar(object)$degree," scale = ", kpar(object)$scale," offset = ", kpar(object)$offset,"\n")), "vanillakernel" = cat(paste("Linear (vanilla) kernel function.", "\n")), "splinekernel" = cat(paste("Spline kernel function.", "\n")), "stringkernel" = { if(kpar(object)$type =="spectrum" | kpar(object)$type =="boundrange") cat(paste("String kernel function.", " Type = ", kpar(object)$type, "\n","Hyperparameters :","sub-sequence/string length = ",kpar(object)$length, "\n")) else if(kpar(object)$type =="exponential" | kpar(object)$type =="constant") cat(paste("String kernel function.", " Type = ", kpar(object)$type, "\n","Hyperparameters :"," lambda = ", kpar(object)$lambda, "\n")) else cat(paste("String kernel function.", " Type = ", kpar(object)$type, "\n","Hyperparameters :","sub-sequence/string length = ",kpar(object)$length," lambda = ", kpar(object)$lambda, "\n")) if(kpar(object)$normalized == TRUE) cat(" Normalized","\n") if(kpar(object)$normalized == FALSE) cat(" Not Normalized","\n")} ) }) ## create accesor function as in "S4 Classses in 15 pages more or less", well.. if (!isGeneric("kpar")){ if (is.function(kpar)) fun <- kpar else fun <- function(object) standardGeneric("kpar") setGeneric("kpar",fun) } setMethod("kpar","kernel", function(object) object@kpar) ## Functions that return usefull kernel calculations (kernel matrix etc.) ## kernelMatrix function takes two or three arguments kernelMatrix <- function(kernel, x, y=NULL) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(x,"matrix")) stop("x must be a matrix") if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix") n <- nrow(x) res1 <- matrix(rep(0,n*n), ncol = n) if(is.null(y)){ for(i in 1:n) { for(j in i:n) { res1[i,j] <- kernel(x[i,],x[j,]) } } res1 <- res1 + t(res1) diag(res1) <- diag(res1)/2 } if (is(y,"matrix")){ m<-dim(y)[1] res1 <- matrix(0,dim(x)[1],dim(y)[1]) for(i in 1:n) { for(j in 1:m) { res1[i,j] <- kernel(x[i,],y[j,]) } } } return(as.kernelMatrix(res1)) } setGeneric("kernelMatrix",function(kernel, x, y = NULL) standardGeneric("kernelMatrix")) kernelMatrix.rbfkernel <- function(kernel, x, y = NULL) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma n <- dim(x)[1] dota <- rowSums(x*x)/2 if (is(x,"matrix") && is.null(y)){ res <- crossprod(t(x)) for (i in 1:n) res[i,]<- exp(2*sigma*(res[i,] - dota - rep(dota[i],n))) return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m) res[,i]<- exp(2*sigma*(res[,i] - dota - rep(dotb[i],n))) return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="rbfkernel"),kernelMatrix.rbfkernel) kernelMatrix.laplacekernel <- function(kernel, x, y = NULL) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma n <- dim(x)[1] dota <- rowSums(x*x)/2 if (is(x,"matrix") && is.null(y)){ res <- crossprod(t(x)) for (i in 1:n) res[i,]<- exp(-sigma*sqrt(round(-2*(res[i,] - dota - rep(dota[i],n)),9))) return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m) res[,i]<- exp(-sigma*sqrt(round(-2*(res[,i] - dota - rep(dotb[i],n)),9))) return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="laplacekernel"),kernelMatrix.laplacekernel) kernelMatrix.besselkernel <- function(kernel, x, y = NULL) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma nu = kpar(kernel)$order ni = kpar(kernel)$degree n <- dim(x)[1] lim <- 1/(gamma(nu+1)*2^(nu)) dota <- rowSums(x*x)/2 if (is(x,"matrix") && is.null(y)){ res <- crossprod(t(x)) for (i in 1:n){ xx <- sigma*sqrt(round(-2*(res[i,] - dota - rep(dota[i],n)),9)) res[i,] <- besselJ(xx,nu)*(xx^(-nu)) res[i,which(xx<10e-5)] <- lim } return(as.kernelMatrix((res/lim)^ni)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m){ xx <- sigma*sqrt(round(-2*(res[,i] - dota - rep(dotb[i],n)),9)) res[,i] <- besselJ(xx,nu)*(xx^(-nu)) res[which(xx<10e-5),i] <- lim } return(as.kernelMatrix((res/lim)^ni)) } } setMethod("kernelMatrix",signature(kernel="besselkernel"),kernelMatrix.besselkernel) kernelMatrix.anovakernel <- function(kernel, x, y = NULL) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma degree = kpar(kernel)$degree n <- dim(x)[1] if (is(x,"matrix") && is.null(y)){ a <- matrix(0, dim(x)[2], n) res <- matrix(0, n ,n) for (i in 1:n) { a[rep(TRUE,dim(x)[2]), rep(TRUE,n)] <- x[i,] res[i,]<- colSums(exp( - sigma*(a - t(x))^2))^degree } return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] b <- matrix(0, dim(x)[2],m) res <- matrix(0, dim(x)[1],m) for( i in 1:n) { b[rep(TRUE,dim(x)[2]), rep(TRUE,m)] <- x[i,] res[i,]<- colSums(exp( - sigma*(b - t(y))^2))^degree } return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="anovakernel"),kernelMatrix.anovakernel) kernelMatrix.polykernel <- function(kernel, x, y = NULL) { if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix") scale = kpar(kernel)$scale offset = kpar(kernel)$offset degree = kpar(kernel)$degree if (is(x,"matrix") && is.null(y)) { res <- (scale*crossprod(t(x))+offset)^degree return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") res <- (scale*crossprod(t(x),t(y)) + offset)^degree return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="polykernel"),kernelMatrix.polykernel) kernelMatrix.vanilla <- function(kernel, x, y = NULL) { if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix") if (is(x,"matrix") && is.null(y)){ res <- crossprod(t(x)) return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") res <- crossprod(t(x),t(y)) return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="vanillakernel"),kernelMatrix.vanilla) kernelMatrix.tanhkernel <- function(kernel, x, y = NULL) { if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix") if (is(x,"matrix") && is.null(y)){ scale = kpar(kernel)$scale offset = kpar(kernel)$offset res <- tanh(scale*crossprod(t(x)) + offset) return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") res <- tanh(scale*crossprod(t(x),t(y)) + offset) return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="tanhkernel"),kernelMatrix.tanhkernel) kernelMatrix.splinekernel <- function(kernel, x, y = NULL) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma degree = kpar(kernel)$degree n <- dim(x)[1] if (is(x,"matrix") && is.null(y)){ a <- matrix(0, dim(x)[2], n) res <- matrix(0, n ,n) x <- t(x) for (i in 1:n) { dr <- x + x[,i] dp <- x * x[,i] dm <- pmin(x,x[,i]) res[i,] <- apply((1 + dp + dp*dm - (dr/2)*dm^2 + (dm^3)/3),2, prod) } return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] b <- matrix(0, dim(x)[2],m) res <- matrix(0, dim(x)[1],m) x <- t(x) y <- t(y) for( i in 1:n) { dr <- y + x[,i] dp <- y * x[,i] dm <- pmin(y,x[,i]) res[i,] <- apply((1 + dp + dp*dm - (dr/2)*dm^2 + (dm^3)/3),2, prod) } return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="splinekernel"),kernelMatrix.splinekernel) kernelMatrix.stringkernel <- function(kernel, x, y=NULL) { n <- length(x) res1 <- matrix(rep(0,n*n), ncol = n) normalized = kpar(kernel)$normalized if(is(x,"list")) x <- sapply(x,paste,collapse="") if(is(y,"list")) y <- sapply(y,paste,collapse="") if (kpar(kernel)$type == "sequence" |kpar(kernel)$type == "string"|kpar(kernel)$type == "fullstring") { resdiag <- rep(0,n) if(normalized == TRUE) kernel <- stringdot(length = kpar(kernel)$length, type = kpar(kernel)$type, lambda = kpar(kernel)$lambda, normalized = FALSE) ## y is null if(is.null(y)){ if(normalized == TRUE){ ## calculate diagonal elements first, and use them to normalize for (i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:n) { for(j in (i:n)[-1]) { res1[i,j] <- kernel(x[[i]],x[[j]])/sqrt(resdiag[i]*resdiag[j]) } } res1 <- res1 + t(res1) diag(res1) <- rep(1,n) } else{ for (i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:n) { for(j in (i:n)[-1]) { res1[i,j] <- kernel(x[[i]],x[[j]]) } } res1 <- res1 + t(res1) diag(res1) <- resdiag } } if (!is.null(y)){ m <- length(y) res1 <- matrix(0,n,m) resdiag1 <- rep(0,m) if(normalized == TRUE){ for(i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:m) resdiag1[i] <- kernel(y[[i]],y[[i]]) for(i in 1:n) { for(j in 1:m) { res1[i,j] <- kernel(x[[i]],y[[j]])/sqrt(resdiag[i]*resdiag1[j]) } } } else{ for(i in 1:n) { for(j in 1:m) { res1[i,j] <- kernel(x[[i]],y[[j]]) } } } } return(as.kernelMatrix(res1)) } else { switch(kpar(kernel)$type, "exponential" = sktype <- 2, "constant" = sktype <- 1, "spectrum" = sktype <- 3, "boundrange" = sktype <- 4) if(sktype==3 &(any(nchar(x) < kpar(kernel)$length)|any(nchar(x) < kpar(kernel)$length))) stop("spectral kernel does not accept strings shorter than the length parameter") if(is(x,"list")) x <- unlist(x) if(is(y,"list")) y <- unlist(y) x <- paste(x,"\n",sep="") if(!is.null(y)) y <- paste(y,"\n",sep="") if(is.null(y)) ret <- matrix(0, length(x),length(x)) else ret <- matrix(0,length(x),length(y)) if(is.null(y)){ for(i in 1:length(x)) ret[i,i:length(x)] <- .Call(stringtv, as.character(x[i]), as.character(x[i:length(x)]), as.integer(length(x) - i + 1), as.integer(nchar(x[i])), as.integer(nchar(x[i:length(x)])), as.integer(sktype), as.double(kpar(kernel)$lambda)) ret <- ret + t(ret) diag(ret) <- diag(ret)/2 } else for(i in 1:length(x)) ret[i,] <- .Call(stringtv, as.character(x[i]), as.character(y), as.integer(length(y)), as.integer(nchar(x[i])), as.integer(nchar(y)), as.integer(sktype), as.double(kpar(kernel)$lambda)) if(normalized == TRUE){ if(is.null(y)) ret <- t((1/sqrt(diag(ret)))*t(ret*(1/sqrt(diag(ret))))) else{ norm1 <- rep(0,length(x)) norm2 <- rep(0,length(y)) for( i in 1:length(x)) norm1[i] <- .Call(stringtv, as.character(x[i]), as.character(x[i]), as.integer(1), as.integer(nchar(x[i])), as.integer(nchar(x[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) for( i in 1:length(y)) norm2[i] <- .Call(stringtv, as.character(y[i]), as.character(y[i]), as.integer(1), as.integer(nchar(y[i])), as.integer(nchar(y[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) ret <- t((1/sqrt(norm2))*t(ret*(1/sqrt(norm1)))) } } } return(as.kernelMatrix(ret)) } setMethod("kernelMatrix",signature(kernel="stringkernel"),kernelMatrix.stringkernel) ## kernelMult computes kernel matrix - vector product ## function computing * z ( %*% z) kernelMult <- function(kernel, x, y=NULL, z, blocksize = 128) { # if(is.function(kernel)) ker <- deparse(substitute(kernel)) # kernel <- do.call(kernel, kpar) if(!is(x,"matrix")) stop("x must be a matrix") if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must ba a matrix or a vector") n <- nrow(x) if(is.null(y)) { ## check if z,x match z <- as.matrix(z) if(is.null(y)&&!dim(z)[1]==n) stop("z columns/length do not match x columns") res1 <- matrix(rep(0,n*n), ncol = n) for(i in 1:n) { for(j in i:n) { res1[j,i] <- kernel(x[i,],x[j,]) } } res1 <- res1 + t(res1) diag(res1) <- diag(res1)/2 } if (is(y,"matrix")) { m <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1] == m) stop("z has wrong dimension") res1 <- matrix(rep.int(0,m*n),ncol=m) for(i in 1:n) { for(j in 1:m) { res1[i,j] <- kernel(x[i,],y[j,]) } } } return(res1%*%z) } setGeneric("kernelMult", function(kernel, x, y=NULL, z, blocksize = 256) standardGeneric("kernelMult")) kernelMult.character <- function(kernel, x, y=NULL, z, blocksize = 256) { return(x%*%z) } setMethod("kernelMult",signature(kernel="character", x="kernelMatrix"),kernelMult.character) kernelMult.rbfkernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or a vector") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) sigma <- kpar(kernel)$sigma n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 dota <- as.matrix(rowSums(x^2)) if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { dotab <- rep(1,blocksize)%*%t(dota) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- exp(sigma*(2*x[lowerl:upperl,]%*%t(x) - dotab - dota[lowerl:upperl]%*%t(rep.int(1,n))))%*%z lowerl <- upperl + 1 } } if(lowerl <= n) res[lowerl:n,] <- exp(sigma*(2*x[lowerl:n,]%*%t(x) - rep.int(1,n+1-lowerl)%*%t(dota) - dota[lowerl:n]%*%t(rep.int(1,n))))%*%z } if(is(y,"matrix")) { n2 <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) dotb <- as.matrix(rowSums(y*y)) if(nblocks > 0) { dotbb <- rep(1,blocksize)%*%t(dotb) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- exp(sigma*(2*x[lowerl:upperl,]%*%t(y) - dotbb - dota[lowerl:upperl]%*%t(rep.int(1,n2))))%*%z lowerl <- upperl + 1 } } if(lowerl <= n) res[lowerl:n,] <- exp(sigma*(2*x[lowerl:n,]%*%t(y) - rep.int(1,n+1-lowerl)%*%t(dotb) - dota[lowerl:n]%*%t(rep.int(1,n2))))%*%z } return(res) } setMethod("kernelMult",signature(kernel="rbfkernel"),kernelMult.rbfkernel) kernelMult.laplacekernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or a vector") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) sigma <- kpar(kernel)$sigma n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 dota <- as.matrix(rowSums(x^2)) if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { dotab <- rep(1,blocksize)%*%t(dota) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- exp(-sigma*sqrt(-round(2*x[lowerl:upperl,]%*%t(x) - dotab - dota[lowerl:upperl]%*%t(rep.int(1,n)),9)))%*%z lowerl <- upperl + 1 } } if(lowerl <= n) res[lowerl:n,] <- exp(-sigma*sqrt(-round(2*x[lowerl:n,]%*%t(x) - rep.int(1,n+1-lowerl)%*%t(dota) - dota[lowerl:n]%*%t(rep.int(1,n)),9)))%*%z } if(is(y,"matrix")) { n2 <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) dotb <- as.matrix(rowSums(y*y)) if(nblocks > 0) { dotbb <- rep(1,blocksize)%*%t(dotb) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- exp(-sigma*sqrt(-round(2*x[lowerl:upperl,]%*%t(y) - dotbb - dota[lowerl:upperl]%*%t(rep.int(1,n2)),9)))%*%z lowerl <- upperl + 1 } } if(lowerl <= n) res[lowerl:n,] <- exp(-sigma*sqrt(-round(2*x[lowerl:n,]%*%t(y) - rep.int(1,n+1-lowerl)%*%t(dotb) - dota[lowerl:n]%*%t(rep.int(1,n2)),9)))%*%z } return(res) } setMethod("kernelMult",signature(kernel="laplacekernel"),kernelMult.laplacekernel) kernelMult.besselkernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) sigma <- kpar(kernel)$sigma nu <- kpar(kernel)$order ni <- kpar(kernel)$degree n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 lim <- 1/(gamma(nu+1)*2^(nu)) dota <- as.matrix(rowSums(x^2)) if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { dotab <- rep(1,blocksize)%*%t(dota) for(i in 1:nblocks) { upperl = upperl + blocksize xx <- sigma*sqrt(-round(2*x[lowerl:upperl,]%*%t(x) - dotab - dota[lowerl:upperl]%*%t(rep.int(1,n)),9)) res1 <- besselJ(xx,nu)*(xx^(-nu)) res1[which(xx<10e-5)] <- lim res[lowerl:upperl,] <- ((res1/lim)^ni)%*%z lowerl <- upperl + 1 } } if(lowerl <= n) { xx <- sigma*sqrt(-round(2*x[lowerl:n,]%*%t(x) - rep.int(1,n+1-lowerl)%*%t(dota) - dota[lowerl:n]%*%t(rep.int(1,n)),9)) res1 <- besselJ(xx,nu)*(xx^(-nu)) res1[which(xx<10e-5)] <- lim res[lowerl:n,] <- ((res1/lim)^ni)%*%z } } if(is(y,"matrix")) { n2 <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) dotb <- as.matrix(rowSums(y*y)) if(nblocks > 0) { dotbb <- rep(1,blocksize)%*%t(dotb) for(i in 1:nblocks) { upperl = upperl + blocksize xx <- sigma*sqrt(-round(2*x[lowerl:upperl,]%*%t(y) - dotbb - dota[lowerl:upperl]%*%t(rep.int(1,n2)),9)) res1 <- besselJ(xx,nu)*(xx^(-nu)) res1[which(xx < 10e-5)] <- lim res[lowerl:upperl,] <- ((res1/lim)^ni)%*%z lowerl <- upperl + 1 } } if(lowerl <= n) { xx <- sigma*sqrt(-round(2*x[lowerl:n,]%*%t(y) - rep.int(1,n+1-lowerl)%*%t(dotb) - dota[lowerl:n]%*%t(rep.int(1,n2)),9)) res1 <- besselJ(xx,nu)*(xx^(-nu)) res1[which(xx < 10e-5)] <- lim res[lowerl:n,] <- ((res1/lim)^ni)%*%z } } return(res) } setMethod("kernelMult",signature(kernel="besselkernel"),kernelMult.besselkernel) kernelMult.anovakernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or a vector") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) sigma <- kpar(kernel)$sigma degree <- kpar(kernel)$degree n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { a <- matrix(0,m,blocksize) re <- matrix(0, n, blocksize) for(i in 1:nblocks) { upperl = upperl + blocksize for(j in 1:n) { a[rep(TRUE,m),rep(TRUE,blocksize)] <- x[j,] re[j,] <- colSums(exp( - sigma*(a - t(x[lowerl:upperl,]))^2))^degree } res[lowerl:upperl,] <- t(re)%*%z lowerl <- upperl + 1 } } if(lowerl <= n){ a <- matrix(0,m,n-lowerl+1) re <- matrix(0,n,n-lowerl+1) for(j in 1:n) { a[rep(TRUE,m),rep(TRUE,n-lowerl+1)] <- x[j,] re[j,] <- colSums(exp( - sigma*(a - t(x[lowerl:n,,drop=FALSE]))^2))^degree } res[lowerl:n,] <- t(re)%*%z } } if(is(y,"matrix")) { n2 <- dim(y)[1] nblocks <- floor(n2/blocksize) z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { b <- matrix(0, m, blocksize) re <- matrix(0, n, blocksize) for(i in 1:nblocks) { upperl = upperl + blocksize for(j in 1:n) { b[rep(TRUE,dim(x)[2]), rep(TRUE,blocksize)] <- x[j,] re[j,]<- colSums(exp( - sigma*(b - t(y[lowerl:upperl,]))^2)^degree) } res[,1] <- res[,1] + re %*%z[lowerl:upperl,] lowerl <- upperl + 1 } } if(lowerl <= n) { b <- matrix(0, dim(x)[2], n2-lowerl+1) re <- matrix(0, n, n2-lowerl+1) for( i in 1:n) { b[rep(TRUE,dim(x)[2]),rep(TRUE,n2-lowerl+1)] <- x[i,] re[i,]<- colSums(exp( - sigma*(b - t(y[lowerl:n2,,drop=FALSE]))^2)^degree) } res[,1] <- res[,1] + re%*%z[lowerl:n2] } } return(res) } setMethod("kernelMult",signature(kernel="anovakernel"),kernelMult.anovakernel) kernelMult.splinekernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or a vector") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") n <- dim(x)[1] m <- dim(x)[2] if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) x <- t(x) if(nblocks > 0) { re <- matrix(0, dim(z)[1], blocksize) for(i in 1:nblocks) { upperl = upperl + blocksize for (j in lowerl:upperl) { dr <- x + x[ , j] dp <- x * x[ , j] dm <- pmin(x,x[,j]) re[,j-(i-1)*blocksize] <- apply((1 + dp + dp*dm - (dr/2)*dm^2 + (dm^3)/3),2, prod) } res[lowerl:upperl,] <- crossprod(re,z) lowerl <- upperl + 1 } } if(lowerl <= n){ a <- matrix(0,m,n-lowerl+1) re <- matrix(0,dim(z)[1],n-lowerl+1) for(j in lowerl:(n-lowerl+1)) { dr <- x + x[ , j] dp <- x * x[ , j] dm <- pmin(x,x[,j]) re[,j-nblocks*blocksize] <- apply((1 + dp + dp*dm - (dr/2)*dm^2 + (dm^3)/3),2, prod) } res[lowerl:n,] <- crossprod(re,z) } } if(is(y,"matrix")) { n2 <- dim(y)[1] nblocks <- floor(n2/blocksize) z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) x <- t(x) y <- t(y) if(nblocks > 0) { re <- matrix(0, dim(z)[1], blocksize) for(i in 1:nblocks) { upperl = upperl + blocksize for(j in lowerl:upperl) { dr <- y + x[ , j] dp <- y * x[ , j] dm <- pmin(y,x[,j]) re[,j-(i-1)*blocksize] <- apply((1 + dp + dp*dm - (dr/2)*dm^2 + (dm^3)/3),2, prod) } res[lowerl:upperl] <- crossprod(re, z) lowerl <- upperl + 1 } } if(lowerl <= n) { b <- matrix(0, dim(x)[2], n-lowerl+1) re <- matrix(0, dim(z)[1], n-lowerl+1) for(j in lowerl:(n-lowerl+1)) { dr <- y + x[, j] dp <- y * x[, j] dm <- pmin(y,x[,j]) re[,j-nblocks*blocksize] <- apply((1 + dp + dp*dm - (dr/2)*dm^2 + (dm^3)/3),2, prod) } res[lowerl:n] <- crossprod(re, z) } } return(res) } setMethod("kernelMult",signature(kernel="splinekernel"),kernelMult.splinekernel) kernelMult.polykernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) degree <- kpar(kernel)$degree scale <- kpar(kernel)$scale offset <- kpar(kernel)$offset n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- ((scale*x[lowerl:upperl,]%*%t(x) + offset)^degree) %*% z lowerl <- upperl + 1 } if(lowerl <= n) res[lowerl:n,] <- ((scale*x[lowerl:n,]%*%t(x) +offset)^degree)%*%z } if(is(y,"matrix")) { n2 <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- ((scale*x[lowerl:upperl,]%*%t(y) + offset)^degree)%*%z lowerl <- upperl + 1 } if(lowerl <= n) res[lowerl:n,] <- ((scale*x[lowerl:n,]%*%t(y) + offset)^degree)%*%z } return(res) } setMethod("kernelMult",signature(kernel="polykernel"),kernelMult.polykernel) kernelMult.tanhkernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or a vector") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) scale <- kpar(kernel)$scale offset <- kpar(kernel)$offset n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- tanh(scale*x[lowerl:upperl,]%*%t(x) + offset) %*% z lowerl <- upperl + 1 } if(lowerl <= n) res[lowerl:n,] <- tanh(scale*x[lowerl:n,]%*%t(x) +offset)%*%z } if(is(y,"matrix")) { n2 <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- tanh(scale*x[lowerl:upperl,]%*%t(y) + offset)%*%z lowerl <- upperl + 1 } if(lowerl <= n) res[lowerl:n,] <- tanh(scale*x[lowerl:n,]%*%t(y) + offset)%*%z } return(res) } setMethod("kernelMult",signature(kernel="tanhkernel"),kernelMult.tanhkernel) kernelMult.vanillakernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or vector") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") n <- dim(x)[1] m <- dim(x)[2] if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- t(crossprod(crossprod(x,z),t(x))) } if(is(y,"matrix")) { n2 <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- t(crossprod(crossprod(y,z),t(x))) } return(res) } setMethod("kernelMult",signature(kernel="vanillakernel"),kernelMult.vanillakernel) kernelMult.stringkernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") normalized = kpar(kernel)$normalized n <- length(x) res1 <- matrix(rep(0,n*n), ncol = n) resdiag <- rep(0,n) if(is(x,"list")) x <- sapply(x,paste,collapse="") if(is(y,"list")) y <- sapply(y,paste,collapse="") if (kpar(kernel)$type == "sequence" |kpar(kernel)$type == "string"|kpar(kernel)$type == "fullstring") { if(normalized == TRUE) kernel <- stringdot(length = kpar(kernel)$length, type = kpar(kernel)$type, lambda = kpar(kernel)$lambda, normalized = FALSE) ## y is null if(is.null(y)){ if(normalized == TRUE){ z <- as.matrix(z) if(dim(z)[1]!= n) stop("z rows must be equal to x length") dz <- dim(z)[2] vres <- matrix(0,n,dz) ## calculate diagonal elements first, and use them to normalize for (i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:n) { for(j in (i:n)[-1]) { res1[i,j] <- kernel(x[[i]],x[[j]])/sqrt(resdiag[i]*resdiag[j]) } } res1 <- res1 + t(res1) diag(res1) <- rep(1,n) vres <- res1 %*% z } else{ z <- as.matrix(z) if(dim(z)[1]!= n) stop("z rows must be equal to x length") dz <- dim(z)[2] vres <- matrix(0,n,dz) ## calculate diagonal elements first, and use them to normalize for (i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:n) { for(j in (i:n)[-1]) { res1[i,j] <- kernel(x[[i]],x[[j]]) } } res1 <- res1 + t(res1) diag(res1) <- resdiag vres <- res1 %*% z } } if (!is.null(y)){ if(normalized == TRUE){ nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 m <- length(y) z <- as.matrix(z) if(dim(z)[1]!= m) stop("z rows must be equal to y length") resdiag1 <- rep(0,m) dz <- dim(z)[2] vres <- matrix(0,n,dz) for(i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:m) resdiag1[i] <- kernel(y[[i]],y[[i]]) if (nblocks > 0){ res1 <- matrix(0,blocksize,m) for(k in 1:nblocks){ upperl <- upperl + blocksize for(i in lowerl:(upperl)) { for(j in 1:m) { res1[i - (k-1)*blocksize,j] <- kernel(x[[i]],y[[j]])/sqrt(resdiag[i]*resdiag1[j]) } } vres[lowerl:upperl,] <- res1 %*% z lowerl <- upperl +1 } } if(lowerl <= n) { res1 <- matrix(0,n-lowerl+1,m) for(i in lowerl:n) { for(j in 1:m) { res1[i - nblocks*blocksize,j] <- kernel(x[[i]],y[[j]])/sqrt(resdiag[i]*resdiag1[j]) } } vres[lowerl:n,] <- res1 %*% z } } else { nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 m <- length(y) z <- as.matrix(z) if(dim(z)[1]!= m) stop("z rows must be equal to y length") dz <- dim(z)[2] vres <- matrix(0,n,dz) if (nblocks > 0){ res1 <- matrix(0,blocksize,m) for(k in 1:nblocks){ upperl <- upperl + blocksize for(i in lowerl:(upperl)) { for(j in 1:m) { res1[i - (k-1)*blocksize, j] <- kernel(x[[i]],y[[j]]) } } vres[lowerl:upperl,] <- res1 %*% z lowerl <- upperl +1 } } if(lowerl <= n) { res1 <- matrix(0,n-lowerl+1,m) for(i in lowerl:n) { for(j in 1:m) { res1[i - nblocks*blocksize,j] <- kernel(x[[i]],y[[j]]) } } vres[lowerl:n,] <- res1 %*% z } } } } else { switch(kpar(kernel)$type, "exponential" = sktype <- 2, "constant" = sktype <- 1, "spectrum" = sktype <- 3, "boundrange" = sktype <- 4) if(sktype==3 &(any(nchar(x) < kpar(kernel)$length)|any(nchar(x) < kpar(kernel)$length))) stop("spectral kernel does not accept strings shorter than the length parameter") x <- paste(x,"\n",sep="") if(!is.null(y)) y <- paste(y,"\n",sep="") ## y is null if(is.null(y)){ if(normalized == TRUE){ nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 z <- as.matrix(z) if(dim(z)[1]!= n) stop("z rows must be equal to y length") dz <- dim(z)[2] vres <- matrix(0,n,dz) for (i in 1:n) resdiag[i] <- .Call(stringtv, as.character(x[i]), as.character(x[i]), as.integer(1), as.integer(nchar(x[i])), as.integer(nchar(x[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) if (nblocks > 0){ res1 <- matrix(0,blocksize,n) for(k in 1:nblocks){ upperl <- upperl + blocksize for(i in lowerl:(upperl)) { res1[i - (k-1)*blocksize, ] <- .Call(stringtv, as.character(x[i]), as.character(x), as.integer(length(x)), as.integer(nchar(x[i])), as.integer(nchar(x)), as.integer(sktype), as.double(kpar(kernel)$lambda)) / sqrt(resdiag[i]*resdiag) } vres[lowerl:upperl,] <- res1 %*% z lowerl <- upperl +1 } } if(lowerl <= n) { res1 <- matrix(0,n-lowerl+1,n) for(i in lowerl:n) { res1[i - nblocks*blocksize,] <- .Call(stringtv, as.character(x[i]), as.character(x), as.integer(length(x)), as.integer(nchar(x[i])), as.integer(nchar(x)), as.integer(sktype), as.double(kpar(kernel)$lambda)) / sqrt(resdiag[i]*resdiag) } vres[lowerl:n,] <- res1 %*% z } } else { nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 z <- as.matrix(z) if(dim(z)[1]!= n) stop("z rows must be equal to y length") dz <- dim(z)[2] vres <- matrix(0,n,dz) if (nblocks > 0){ res1 <- matrix(0,blocksize,n) for(k in 1:nblocks){ upperl <- upperl + blocksize for(i in lowerl:(upperl)) { res1[i - (k-1)*blocksize, ] <- .Call(stringtv, as.character(x[i]), as.character(x), as.integer(length(x)), as.integer(nchar(x[i])), as.integer(nchar(x)), as.integer(sktype), as.double(kpar(kernel)$lambda)) } vres[lowerl:upperl,] <- res1 %*% z lowerl <- upperl +1 } } if(lowerl <= n) { res1 <- matrix(0,n-lowerl+1,n) for(i in lowerl:n) { res1[i - nblocks*blocksize,] <- .Call(stringtv, as.character(x[i]), as.character(x), as.integer(length(x)), as.integer(nchar(x[i])), as.integer(nchar(x)), as.integer(sktype), as.double(kpar(kernel)$lambda)) } vres[lowerl:n,] <- res1 %*% z } } } if (!is.null(y)){ if(normalized == TRUE){ nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 m <- length(y) z <- as.matrix(z) if(dim(z)[1]!= m) stop("z rows must be equal to y length") resdiag1 <- rep(0,m) dz <- dim(z)[2] vres <- matrix(0,n,dz) for(i in 1:n) resdiag[i] <- .Call(stringtv, as.character(x[i]), as.character(x[i]), as.integer(1), as.integer(nchar(x[i])), as.integer(nchar(x[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) for(i in 1:m) resdiag1[i] <- .Call(stringtv, as.character(y[i]), as.character(y[i]), as.integer(1), as.integer(nchar(y[i])), as.integer(nchar(y[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) if (nblocks > 0){ res1 <- matrix(0,blocksize,m) for(k in 1:nblocks){ upperl <- upperl + blocksize for(i in lowerl:(upperl)) { res1[i - (k-1)*blocksize, ] <- .Call(stringtv, as.character(x[i]), as.character(y), as.integer(length(y)), as.integer(nchar(x[i])), as.integer(nchar(y)), as.integer(sktype), as.double(kpar(kernel)$lambda)) / sqrt(resdiag[i]*resdiag1) } vres[lowerl:upperl,] <- res1 %*% z lowerl <- upperl +1 } } if(lowerl <= n) { res1 <- matrix(0,n-lowerl+1,m) for(i in lowerl:n) { res1[i - nblocks*blocksize,] <- .Call(stringtv, as.character(x[i]), as.character(y), as.integer(length(y)), as.integer(nchar(x[i])), as.integer(nchar(y)), as.integer(sktype), as.double(kpar(kernel)$lambda)) / sqrt(resdiag[i]*resdiag1) } vres[lowerl:n,] <- res1 %*% z } } else { nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 m <- length(y) z <- as.matrix(z) if(dim(z)[1]!= m) stop("z rows must be equal to y length") dz <- dim(z)[2] vres <- matrix(0,n,dz) if (nblocks > 0){ res1 <- matrix(0,blocksize,m) for(k in 1:nblocks){ upperl <- upperl + blocksize for(i in lowerl:(upperl)) { res1[i - (k-1)*blocksize, ] <- .Call(stringtv, as.character(x[i]), as.character(y), as.integer(length(y)), as.integer(nchar(x[i])), as.integer(nchar(y)), as.integer(sktype), as.double(kpar(kernel)$lambda)) } vres[lowerl:upperl,] <- res1 %*% z lowerl <- upperl +1 } } if(lowerl <= n) { res1 <- matrix(0,n-lowerl+1,m) for(i in lowerl:n) { res1[i - nblocks*blocksize,] <- .Call(stringtv, as.character(x[i]), as.character(y), as.integer(length(y)), as.integer(nchar(x[i])), as.integer(nchar(y)), as.integer(sktype), as.double(kpar(kernel)$lambda)) } vres[lowerl:n,] <- res1 %*% z } } } } return(vres) } setMethod("kernelMult",signature(kernel="stringkernel"),kernelMult.stringkernel) ## kernelPol return the quadratic form of a kernel matrix ## kernelPol returns the scalar product of x y componentwise with polarities ## of z and k kernelPol <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(x,"matrix")) stop("x must be a matrix") if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must ba a matrix or a vector") n <- nrow(x) z <- as.matrix(z) if(!dim(z)[1]==n) stop("z must have the length equal to x colums") res1 <- matrix(rep(0,n*n), ncol = n) if (is.null(y)) { for(i in 1:n) { for(j in i:n) { res1[i,j] <- kernel(x[i,],x[j,])*z[j]*z[i] } } res1 <- res1 + t(res1) diag(res1) <- diag(res1)/2 } if (is(x,"matrix") && is(y,"matrix")){ m <- dim(y)[1] if(is.null(k)) stop("k not specified!") k <- as.matrix(k) if(!dim(x)[2]==dim(y)[2]) stop("matrixes must have the same number of columns") if(!dim(z)[2]==dim(k)[2]) stop("z and k vectors must have the same number of columns") if(!dim(x)[1]==dim(z)[1]) stop("z and x must have the same number of rows") if(!dim(y)[1]==dim(k)[1]) stop("y and k must have the same number of rows") res1 <- matrix(0,dim(x)[1],dim(y)[1]) for(i in 1:n) { for(j in 1:m) { res1[i,j] <- kernel(x[i,],y[j,])*z[i]*k[j] } } } return(res1) } setGeneric("kernelPol", function(kernel, x, y=NULL, z, k = NULL) standardGeneric("kernelPol")) kernelPol.rbfkernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix a vector or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) sigma <- kpar(kernel)$sigma n <- dim(x)[1] dota <- rowSums(x*x)/2 z <- as.matrix(z) if(!dim(z)[1]==n) stop("z must have the length equal to x colums") if (is.null(y)) { if(is(z,"matrix")&&!dim(z)[1]==n) stop("z must have size equal to x colums") res <- crossprod(t(x)) for (i in 1:n) res[i,] <- z[i,]*(exp(2*sigma*(res[i,] - dota - rep(dota[i],n)))*z) return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.matrix(k) if(!dim(k)[1]==m) stop("k must have equal rows to y") if(!dim(x)[2]==dim(y)[2]) stop("matrixes must have the same number of columns") dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m)#2*sigma or sigma res[,i]<- k[i,]*(exp(2*sigma*(res[,i] - dota - rep(dotb[i],n)))*z) return(res) } } setMethod("kernelPol",signature(kernel="rbfkernel"),kernelPol.rbfkernel) kernelPol.laplacekernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix, vector or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") sigma <- kpar(kernel)$sigma if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) n <- dim(x)[1] dota <- rowSums(x*x)/2 z <- as.matrix(z) if(!dim(z)[1]==n) stop("z must have the length equal to x colums") if (is.null(y)) { if(is(z,"matrix")&&!dim(z)[1]==n) stop("z must have size equal to x colums") res <- crossprod(t(x)) for (i in 1:n) res[i,] <- z[i,]*(exp(-sigma*sqrt(-round(2*(res[i,] - dota - rep(dota[i],n)),9)))*z) return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.matrix(k) if(!dim(k)[1]==m) stop("k must have equal rows to y") if(!dim(x)[2]==dim(y)[2]) stop("matrixes must have the same number of columns") dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m)#2*sigma or sigma res[,i]<- k[i,]*(exp(-sigma*sqrt(-round(2*(res[,i] - dota - rep(dotb[i],n)),9)))*z) return(res) } } setMethod("kernelPol",signature(kernel="laplacekernel"),kernelPol.laplacekernel) kernelPol.besselkernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") sigma <- kpar(kernel)$sigma if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) nu <- kpar(kernel)$order ni <- kpar(kernel)$degree n <- dim(x)[1] lim <- 1/(gamma(nu + 1)*2^nu) dota <- rowSums(x*x)/2 z <- as.matrix(z) if(!dim(z)[1]==n) stop("z must have the length equal to x colums") if (is.null(y)) { if(is(z,"matrix")&&!dim(z)[1]==n) stop("z must have size equal to x colums") res <- crossprod(t(x)) for (i in 1:n) { xx <- sigma*sqrt(-round(2*(res[i,] - dota - rep(dota[i],n)),9)) res[i,] <- besselJ(xx,nu)*(xx^(-nu)) res[i,which(xx < 10e-5)] <- lim res[i,] <- z[i,]*(((res[i,]/lim)^ni)*z) } return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] if(!dim(k)[1]==m) stop("k must have equal rows to y") k <- as.matrix(k) if(!dim(x)[2]==dim(y)[2]) stop("matrixes must have the same number of columns") dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m){#2*sigma or sigma xx <- sigma*sqrt(-round(2*(res[,i] - dota - rep(dotb[i],n)),9)) res[,i] <- besselJ(xx,nu)*(xx^(-nu)) res[which(xx<10e-5),i] <- lim res[,i]<- k[i,]*(((res[,i]/lim)^ni)*z) } return(res) } } setMethod("kernelPol",signature(kernel="besselkernel"),kernelPol.besselkernel) kernelPol.anovakernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") sigma <- kpar(kernel)$sigma degree <- kpar(kernel)$degree if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) n <- dim(x)[1] z <- as.matrix(z) if(!dim(z)[1]==n) stop("z must have the length equal to x colums") if (is.null(y)) { if(is(z,"matrix")&&!dim(z)[1]==n) stop("z must have size equal to x colums") a <- matrix(0, dim(x)[2], n) res <- matrix(0,n,n) for (i in 1:n) { a[rep(TRUE,dim(x)[2]), rep(TRUE,n)] <- x[i,] res[i,]<- z[i,]*((colSums(exp( - sigma*(a - t(x))^2))^degree)*z) } return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.matrix(k) if(!dim(k)[1]==m) stop("k must have equal rows to y") if(!dim(x)[2]==dim(y)[2]) stop("matrixes must have the same number of columns") b <- matrix(0, dim(x)[2],m) res <- matrix(0, dim(x)[1],m) for( i in 1:n) { b[rep(TRUE,dim(x)[2]), rep(TRUE,m)] <- x[i,] res[i,] <- z[i,]*((colSums(exp( - sigma*(b - t(y))^2))^degree)*k) } return(res) } } setMethod("kernelPol",signature(kernel="anovakernel"),kernelPol.anovakernel) kernelPol.splinekernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) sigma <- kpar(kernel)$sigma degree <- kpar(kernel)$degree n <- dim(x)[1] z <- as.vector(z) if(!(length(z)==n)) stop("z must have the length equal to x colums") if (is.null(y)) { res <- kernelMatrix(kernel,x) return(unclass(z*t(res*z))) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.vector(k) if(!(length(k)==m)) stop("k must have length equal to rows of y") res <- kernelMatrix(kernel,x,y) return(unclass(k*t(res*z))) } } setMethod("kernelPol",signature(kernel="splinekernel"),kernelPol.splinekernel) kernelPol.polykernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) degree <- kpar(kernel)$degree scale <- kpar(kernel)$scale offset <- kpar(kernel)$offset n <- dim(x)[1] if(is(z,"matrix")) { z <- as.vector(z) } m <- length(z) if(!(m==n)) stop("z must have the length equal to x colums") if (is.null(y)) { res <- z*t(((scale*crossprod(t(x))+offset)^degree)*z) return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.vector(k) if(!(length(k)==m)) stop("k must have length equal to rows of y") if(!dim(x)[2]==dim(y)[2]) stop("matrixes must have the same number of columns") res<- k*t(((scale*x%*%t(y) + offset)^degree)*z) return(res) } } setMethod("kernelPol",signature(kernel="polykernel"),kernelPol.polykernel) kernelPol.tanhkernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix, vector or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) scale <- kpar(kernel)$scale offset <- kpar(kernel)$offset n <- dim(x)[1] if(is(z,"matrix")) { z <- as.vector(z) } m <- length(z) if(!(m==n)) stop("z must have the length equal to x colums") if (is.null(y)) { res <- z*t(tanh(scale*crossprod(t(x))+offset)*z) return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.vector(k) if(!(length(k)==m)) stop("k must have length equal rows to y") if(!dim(x)[2]==dim(y)[2]) stop("matrixes x, y must have the same number of columns") res<- k*t(tanh(scale*x%*%t(y) + offset)*z) return(res) } } setMethod("kernelPol",signature(kernel="tanhkernel"),kernelPol.tanhkernel) kernelPol.vanillakernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix, vector or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") n <- dim(x)[1] if(is(z,"matrix")) { z <- as.vector(z) } m <- length(z) if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!(m==n)) stop("z must have the length equal to x colums") if (is.null(y)) { res <- z*t(crossprod(t(x))*z) return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.vector(k) if(!length(k)==m) stop("k must have length equal rows to y") if(!dim(x)[2]==dim(y)[2]) stop("matrixes x, y must have the same number of columns") for( i in 1:m) res<- k*t(x%*%t(y)*z) return(res) } } setMethod("kernelPol",signature(kernel="vanillakernel"),kernelPol.vanillakernel) kernelPol.stringkernel <- function(kernel, x, y=NULL ,z ,k=NULL) { n <- length(x) res1 <- matrix(rep(0,n*n), ncol = n) resdiag <- rep(0,n) if(is(x,"list")) x <- sapply(x,paste,collapse="") if(is(y,"list")) y <- sapply(y,paste,collapse="") normalized = kpar(kernel)$normalized if(normalized == TRUE) kernel <- stringdot(length = kpar(kernel)$length, type = kpar(kernel)$type, lambda = kpar(kernel)$lambda, normalized = FALSE) z <- as.matrix(z) ## y is null if (kpar(kernel)$type == "sequence" |kpar(kernel)$type == "string"|kpar(kernel)$type == "fullstring") { if(is.null(y)){ if(normalized == TRUE){ ## calculate diagonal elements first, and use them to normalize for (i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:n) { for(j in (i:n)[-1]) { res1[i,j] <- (z[i,]*kernel(x[[i]],x[[j]])*z[j,])/sqrt(resdiag[i]*resdiag[j]) } } res1 <- res1 + t(res1) diag(res1) <- z^2 } else { for (i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:n) { for(j in (i:n)[-1]) { res1[i,j] <- (z[i,]*kernel(x[[i]],x[[j]])*z[j,]) } } res1 <- res1 + t(res1) diag(res1) <- resdiag * z^2 } } if (!is.null(y)){ if(normalized == TRUE){ m <- length(y) res1 <- matrix(0,n,m) resdiag1 <- rep(0,m) k <- as.matrix(k) for(i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:m) resdiag1[i] <- kernel(y[[i]],y[[i]]) for(i in 1:n) { for(j in 1:m) { res1[i,j] <- (z[i,]*kernel(x[[i]],y[[j]])*k[j,])/sqrt(resdiag[i]*resdiag1[j]) } } } } else{ m <- length(y) res1 <- matrix(0,n,m) k <- as.matrix(k) for(i in 1:n) { for(j in 1:m) { res1[i,j] <- (z[i,]*kernel(x[[i]],y[[j]])*k[j,]) } } } } else { switch(kpar(kernel)$type, "exponential" = sktype <- 2, "constant" = sktype <- 1, "spectrum" = sktype <- 3, "boundrange" = sktype <- 4) if(is(x,"list")) x <- unlist(x) if(is(y,"list")) y <- unlist(y) x <- paste(x,"\n",seq="") if(!is.null(y)) y <- paste(y,"\n",seq="") if(is.null(y)) ret <- matrix(0, length(x),length(x)) else ret <- matrix(0,length(x),length(y)) if(is.null(y)){ for( i in 1:length(x)) ret[i,] <- .Call(stringtv, as.character(x[i]), as.character(x), as.integer(length(x)), as.integer(nchar(x[i])), as.integer(nchar(x)), as.integer(sktype), as.double(kpar(kernel)$lambda)) res1 <- k*ret*k } else{ for( i in 1:length(x)) ret[i,] <- .Call(stringtv, as.character(x[i]), as.character(y), as.integer(length(x)), as.integer(nchar(x[i])), as.integer(nchar(y)), as.integer(sktype), as.double(kpar(kernel)$lambda)) res1 <- k*ret*z } if(normalized == TRUE){ if(is.null(y)){ ret <- t((1/sqrt(diag(ret)))*t(ret*(1/sqrt(diag(ret))))) res1 <- k*ret*k } else{ norm1 <- rep(0,length(x)) norm2 <- rep(0,length(y)) for( i in 1:length(x)) norm1[i] <- .Call(stringtv, as.character(x[i]), as.character(x[i]), as.integer(1), as.integer(nchar(x[i])), as.integer(nchar(x[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) for( i in 1:length(y)) norm2[i] <- .Call(stringtv, as.character(y[i]), as.character(y[i]), as.integer(1), as.integer(nchar(y[i])), as.integer(nchar(y[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) ret <- t((1/sqrt(norm2))*t(ret*(1/sqrt(norm1)))) res1 <- k*ret*z } } } return(res1) } setMethod("kernelPol",signature(kernel="stringkernel"),kernelPol.stringkernel) ## kernelFast returns the kernel matrix, its usefull in algorithms ## which require iterative kernel matrix computations kernelFast <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setGeneric("kernelFast",function(kernel, x, y, a) standardGeneric("kernelFast")) kernelFast.rbfkernel <- function(kernel, x, y, a) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma n <- dim(x)[1] dota <- a/2 if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m) res[,i]<- exp(2*sigma*(res[,i] - dota - rep(dotb[i],n))) return(res) } } setMethod("kernelFast",signature(kernel="rbfkernel"),kernelFast.rbfkernel) kernelFast.laplacekernel <- function(kernel, x, y, a) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma n <- dim(x)[1] dota <- a/2 if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m) res[,i]<- exp(-sigma*sqrt(round(-2*(res[,i] - dota - rep(dotb[i],n)),9))) return(res) } } setMethod("kernelFast",signature(kernel="laplacekernel"),kernelFast.laplacekernel) kernelFast.besselkernel <- function(kernel, x, y, a) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma nu = kpar(kernel)$order ni = kpar(kernel)$degree n <- dim(x)[1] lim <- 1/(gamma(nu+1)*2^(nu)) dota <- a/2 if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m){ xx <- sigma*sqrt(round(-2*(res[,i] - dota - rep(dotb[i],n)),9)) res[,i] <- besselJ(xx,nu)*(xx^(-nu)) res[which(xx<10e-5),i] <- lim } return((res/lim)^ni) } } setMethod("kernelFast",signature(kernel="besselkernel"),kernelFast.besselkernel) kernelFast.anovakernel <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setMethod("kernelFast",signature(kernel="anovakernel"),kernelFast.anovakernel) kernelFast.polykernel <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setMethod("kernelFast",signature(kernel="polykernel"),kernelFast.polykernel) kernelFast.vanilla <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setMethod("kernelFast",signature(kernel="vanillakernel"),kernelFast.vanilla) kernelFast.tanhkernel <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setMethod("kernelFast",signature(kernel="tanhkernel"),kernelFast.tanhkernel) kernelFast.stringkernel <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setMethod("kernelFast",signature(kernel="stringkernel"),kernelFast.stringkernel) kernelFast.splinekernel <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setMethod("kernelFast",signature(kernel="splinekernel"),kernelFast.splinekernel) kernlab/R/csi.R0000644000175100001440000003653711304023134013015 0ustar hornikusers## 15.09.2005 alexandros setGeneric("csi", function(x, y, kernel="rbfdot",kpar=list(sigma=0.1), rank, centering = TRUE, kappa =0.99 ,delta = 40 ,tol = 1e-4) standardGeneric("csi")) setMethod("csi",signature(x="matrix"), function(x, y, kernel="rbfdot",kpar=list(sigma=0.1), rank, centering = TRUE, kappa =0.99 ,delta = 40 ,tol = 1e-5) { ## G,P,Q,R,error1,error2,error,predicted.gain,true.gain ## INPUT ## x : data ## y : target vector n x d ## m : maximal rank ## kappa : trade-off between approximation of K and prediction of y (suggested: .99) ## centering : 1 if centering, 0 otherwise (suggested: 1) ## delta : number of columns of cholesky performed in advance (suggested: 40) ## tol : minimum gain at iteration (suggested: 1e-4) ## OUTPUT ## G : Cholesky decomposition -> K(P,P) is approximated by G*G' ## P : permutation matrix ## Q,R : QR decomposition of G (or center(G) if centering) ## error1 : tr(K-G*G')/tr(K) at each step of the decomposition ## error2 : ||y-Q*Q'*y||.F^2 / ||y||.F^2 at each step of the decomposition ## predicted.gain : predicted gain before adding each column ## true.gain : actual gain after adding each column n <- dim(x)[1] d <- dim(y)[2] if(n != dim(y)[1]) stop("Labels y and data x dont match") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") m <- rank ## make sure rank is smaller than n m <- min(n-2,m) G <- matrix(0,n,min(m+delta,n)) ## Cholesky factor diagK <- rep(drop(kernel(x[1,],x[1,])),n) P <- 1:n ## pivots Q <- matrix(0,n,min(m+delta,n)) ## Q part of the QR decomposition R <- matrix(0,min(m+delta,n),min(m+delta,n)) ## R part of the QR decomposition traceK <- sum(diagK) lambda <- (1-kappa)/traceK if (centering) y <- y - (1/n) * t(matrix(colSums(y),d,n)) sumy2 <- sum(y^2) mu <- kappa/sumy2 error1 <- traceK error2 <- sumy2 predictedgain <- truegain <- rep(0,min(m+delta,n)) k <- 0 # current index of the Cholesky decomposition kadv <- 0 # current index of the look ahead steps Dadv <- diagK D <- diagK ## makes sure that delta is smaller than n - 2 delta <- min(delta,n - 2) ## approximation cost cached quantities A1 <- matrix(0,n,1) A2 <- matrix(0,n,1) A3 <- matrix(0,n,1) GTG <- matrix(0,m+delta,m+delta) QTy <- matrix(0,m+delta,d) QTyyTQ <- matrix(0,m+delta,m+delta) ## first performs delta steps of Cholesky and QR decomposition if(delta > 0) for (i in 1:delta) { kadv <- kadv + 1 ## select best index diagmax <- Dadv[kadv] jast <- 1 for (j in 1:(n-kadv+1)) { if (Dadv[j+kadv-1] > diagmax/0.99){ diagmax <- Dadv[j+kadv-1] jast <- j } } if (diagmax < 1e-12){ kadv <- kadv - 1 ## all pivots are too close to zero, stops break ## this can only happen if the matrix has rank less than delta } else{ jast <- jast + kadv-1 ## permute indices P[c(kadv,jast)] <- P[c(jast,kadv)] Dadv[c(kadv, jast)] <- Dadv[c(jast, kadv)] D[c(kadv, jast)] <- D[c(jast, kadv)] A1[c(kadv, jast)] <- A1[c(jast, kadv)] G[c(kadv, jast),1:kadv-1] <- G[c(jast,kadv),1:kadv-1] Q[c(kadv, jast),1:kadv-1] <- Q[c(jast, kadv),1:kadv-1] ## compute new Cholesky column G[kadv,kadv] <- Dadv[kadv] G[kadv,kadv] <- sqrt(G[kadv,kadv]) newKcol <- kernelMatrix(kernel, x[P[(kadv+1):n],,drop = FALSE],x[P[kadv],,drop=FALSE]) G[(kadv+1):n,kadv]<- (1/G[kadv,kadv])*(newKcol - G[(kadv+1):n,1:kadv-1,drop=FALSE] %*% t(G[kadv,1:kadv-1,drop=FALSE])) ## update diagonal Dadv[(kadv+1):n] <- Dadv[(kadv+1):n] - G[(kadv+1):n,kadv]^2 Dadv[kadv] <- 0 ## performs QR if (centering) Gcol <- G[,kadv,drop=FALSE] - (1/n) * matrix(sum(G[,kadv]),n,1) else Gcol <- G[,kadv, drop=FALSE] R[1:kadv-1,kadv] <- crossprod(Q[,1:kadv-1, drop=FALSE], Gcol) Q[,kadv] <- Gcol - Q[,1:kadv-1,drop=FALSE] %*% R[1:kadv-1,kadv,drop=FALSE] R[kadv,kadv] <- sqrt(sum(Q[,kadv]^2)) Q[,kadv] <- Q[,kadv]/drop(R[kadv,kadv]) ## update cached quantities if (centering) GTG[1:kadv,kadv] <- crossprod(G[,1:kadv], G[,kadv]) else GTG[1:kadv,kadv] <- crossprod(R[1:kadv,1:kadv], R[1:kadv,kadv]) GTG[kadv,1:kadv] <- t(GTG[1:kadv,kadv]) QTy[kadv,] <- crossprod(Q[,kadv], y[P,,drop = FALSE]) QTyyTQ[kadv,1:kadv] <- QTy[kadv,,drop=FALSE] %*% t(QTy[1:kadv,,drop=FALSE]) QTyyTQ[1:kadv,kadv] <- t(QTyyTQ[kadv,1:kadv]) ## update costs A1[kadv:n] <- A1[kadv:n] + GTG[kadv,kadv] * G[kadv:n,kadv]^2 A1[kadv:n] <- A1[kadv:n] + 2 * G[kadv:n,kadv] *(G[kadv:n,1:kadv-1] %*% GTG[1:kadv-1,kadv,drop=FALSE]) } } ## compute remaining costs for all indices A2 <- rowSums(( G[,1:kadv,drop=FALSE] %*% crossprod(R[1:kadv,1:kadv], QTy[1:kadv,,drop=FALSE]))^2) A3 <- rowSums((G[,1:kadv,drop=FALSE] %*% t(R[1:kadv,1:kadv]))^2) ## start main loop while (k < m){ k <- k +1 ## compute the gains in approximation for all remaining indices dJK <- matrix(0,(n-k+1),1) for (i in 1:(n-k+1)) { kast <- k+i-1 if (D[kast] < 1e-12) dJK[i] <- -1e100 ## this column is already generated by already ## selected columns -> cannot be selected else { dJK[i] <- A1[kast] if (kast > kadv) ## add eta dJK[i] <- dJK[i] + D[kast]^2 - (D[kast] - Dadv[kast])^2 dJK[i] <- dJK[i] / D[kast] } } dJy <- matrix(0,n-k+1,1) if (kadv > k){ for (i in 1:(n-k+1)) { kast <- k+i-1 if (A3[kast] < 1e-12) dJy[i] <- 0 else dJy[i] <- A2[kast] / A3[kast] } } ## select the best column dJ <- lambda * dJK + mu * dJy diagmax <- -1 jast <- 0 for (j in 1:(n-k+1)) { if (D[j+k-1] > 1e-12) if (dJ[j] > diagmax/0.9){ diagmax <- dJ[j] jast <- j } } if (jast==0) { ## no more good indices, exit k <- k-1 break } jast <- jast + k - 1 predictedgain[k] <- diagmax ## performs one cholesky + QR step: ## if new pivot not already selected, use pivot ## otherwise, select new look ahead index that maximize Dadv if (jast > kadv){ newpivot <- jast jast <- kadv + 1 } else{ a <- 1e-12 b <- 0 for (j in 1:(n-kadv)) { if (Dadv[j+kadv] > a/0.99){ a <- Dadv[j+kadv] b <- j+kadv } } if (b==0) newpivot <- 0 else newpivot <- b } if (newpivot > 0){ ## performs steps kadv <- kadv + 1 ## permute P[c(kadv, newpivot)] <- P[c(newpivot, kadv)] Dadv[c(kadv, newpivot)] <- Dadv[c(newpivot, kadv)] D[c(kadv, newpivot)] <- D[c(newpivot, kadv)] A1[c(kadv, newpivot)] <- A1[c(newpivot, kadv)] A2[c(kadv, newpivot)] <- A2[c(newpivot, kadv)] A3[c(kadv, newpivot)] <- A3[c(newpivot, kadv)] G[c(kadv, newpivot),1:kadv-1] <- G[c(newpivot, kadv),1:kadv-1] Q[c(kadv, newpivot),1:kadv-1] <- Q[ c(newpivot, kadv),1:kadv-1] ## compute new Cholesky column G[kadv,kadv] <- Dadv[kadv] G[kadv,kadv] <- sqrt(G[kadv,kadv]) newKcol <- kernelMatrix(kernel,x[P[(kadv+1):n],,drop=FALSE],x[P[kadv],,drop=FALSE]) G[(kadv+1):n,kadv] <- 1/G[kadv,kadv]*( newKcol - G[(kadv+1):n,1:kadv-1,drop=FALSE]%*%t(G[kadv,1:kadv-1,drop=FALSE])) ## update diagonal Dadv[(kadv+1):n] <- Dadv[(kadv+1):n] - G[(kadv+1):n,kadv]^2 Dadv[kadv] <- 0 ## performs QR if (centering) Gcol <- G[,kadv,drop=FALSE] - 1/n * matrix(sum(G[,kadv]),n,1 ) else Gcol <- G[,kadv,drop=FALSE] R[1:kadv-1,kadv] <- crossprod(Q[,1:kadv-1], Gcol) Q[,kadv] <- Gcol - Q[,1:kadv-1, drop=FALSE] %*% R[1:kadv-1,kadv, drop=FALSE] R[kadv,kadv] <- sum(abs(Q[,kadv])^2)^(1/2) Q[,kadv] <- Q[,kadv] / drop(R[kadv,kadv]) ## update the cached quantities if (centering) GTG[k:kadv,kadv] <- crossprod(G[,k:kadv], G[,kadv]) else GTG[k:kadv,kadv] <- crossprod(R[1:kadv,k:kadv], R[1:kadv,kadv]) GTG[kadv,k:kadv] <- t(GTG[k:kadv,kadv]) QTy[kadv,] <- crossprod(Q[,kadv], y[P,,drop =FALSE]) QTyyTQ[kadv,k:kadv] <- QTy[kadv,,drop = FALSE] %*% t(QTy[k:kadv,,drop = FALSE]) QTyyTQ[k:kadv,kadv] <- t(QTyyTQ[kadv,k:kadv]) ## update costs A1[kadv:n] <- A1[kadv:n] + GTG[kadv,kadv] * G[kadv:n,kadv]^2 A1[kadv:n] <- A1[kadv:n] + 2 * G[kadv:n,kadv] * (G[kadv:n,k:kadv-1,drop = FALSE] %*% GTG[k:kadv-1,kadv,drop=FALSE]) A3[kadv:n] <- A3[kadv:n] + G[kadv:n,kadv]^2 * sum(R[k:kadv,kadv]^2) temp <- crossprod(R[k:kadv,kadv,drop = FALSE], R[k:kadv,k:kadv-1,drop = FALSE]) A3[kadv:n] <- A3[kadv:n] + 2 * G[kadv:n,kadv] * (G[kadv:n,k:kadv-1] %*% t(temp)) temp <- crossprod(R[k:kadv,kadv,drop = FALSE], QTyyTQ[k:kadv,k:kadv,drop = FALSE]) temp1 <- temp %*% R[k:kadv,kadv,drop = FALSE] A2[kadv:n] <- A2[kadv:n] + G[kadv:n,kadv,drop = FALSE]^2 %*% temp1 temp2 <- temp %*% R[k:kadv,k:kadv-1] A2[kadv:n] <- A2[kadv:n] + 2 * G[kadv:n,kadv] * (G[kadv:n,k:kadv-1,drop=FALSE] %*% t(temp2)) } ## permute pivots in the Cholesky and QR decomposition between p,q p <- k q <- jast if (p < q){ ## store some quantities Gbef <- G[,p:q] Gbeftotal <- G[,k:kadv] GTGbef <- GTG[p:q,p:q] QTyyTQbef <- QTyyTQ[p:q,k:kadv] Rbef <- R[p:q,p:q] Rbeftotal <- R[k:kadv,k:kadv] tempG <- diag(1,q-p+1,q-p+1) tempQ <- diag(1,q-p+1,q-p+1) for (s in seq(q-1,p,-1)) { ## permute indices P[c(s, s+1)] <- P[c(s+1, s)] Dadv[c(s, s+1)] <- Dadv[c(s+1, s)] D[c(s, s+1)] <- D[c(s+1, s)] A1[c(s, s+1)] <- A1[c(s+1, s)] A2[c(s, s+1)] <- A2[c(s+1, s)] A3[c(s, s+1)] <- A3[c(s+1, s)] G[c(s, s+1),1:kadv] <- G[c(s+1,s), 1:kadv] Gbef[c(s, s+1),] <- Gbef[c(s+1, s),] Gbeftotal[c(s, s+1),] <- Gbeftotal[c(s+1, s),] Q[c(s, s+1),1:kadv] <- Q[c(s+1, s) ,1:kadv] ## update decompositions res <- .qr2(t(G[s:(s+1),s:(s+1)])) Q1 <- res$Q R1 <- res$R G[,s:(s+1)] <- G[,s:(s+1)] %*% Q1 G[s,(s+1)] <- 0 R[1:kadv,s:(s+1)] <- R[1:kadv,s:(s+1)] %*% Q1 res <- .qr2(R[s:(s+1),s:(s+1)]) Q2 <- res$Q R2 <- res$R R[s:(s+1),1:kadv] <- crossprod(Q2, R[s:(s+1),1:kadv]) Q[,s:(s+1)] <- Q[,s:(s+1)] %*% Q2 R[s+1,s] <- 0 ## update relevant quantities if( k <= (s-1) && s+2 <= kadv) nonchanged <- c(k:(s-1), (s+2):kadv) if( k <= (s-1) && s+2 > kadv) nonchanged <- k:(s-1) if( k > (s-1) && s+2 <= kadv) nonchanged <- (s+2):kadv GTG[nonchanged,s:(s+1)] <- GTG[nonchanged,s:(s+1)] %*% Q1 GTG[s:(s+1),nonchanged] <- t(GTG[nonchanged,s:(s+1)]) GTG[s:(s+1),s:(s+1)] <- crossprod(Q1, GTG[s:(s+1),s:(s+1)] %*% Q1) QTy[s:(s+1),] <- crossprod(Q2, QTy[s:(s+1),]) QTyyTQ[nonchanged,s:(s+1)] <- QTyyTQ[nonchanged,s:(s+1)] %*% Q2 QTyyTQ[s:(s+1),nonchanged] <- t(QTyyTQ[nonchanged,s:(s+1)]) QTyyTQ[s:(s+1),s:(s+1)] <- crossprod(Q2, QTyyTQ[s:(s+1),s:(s+1)] %*% Q2) tempG[,(s-p+1):(s-p+2)] <- tempG[,(s-p+1):(s-p+2)] %*% Q1 tempQ[,(s-p+1):(s-p+2)] <- tempQ[,(s-p+1):(s-p+2)] %*% Q2 } ## update costs tempG <- tempG[,1] tempGG <- GTGbef %*% tempG A1[k:n] <- A1[k:n] - 2 * G[k:n,k] * (Gbef[k:n,] %*% tempGG) # between p and q -> different if(k > (p-1) ) kmin <- 0 else kmin <- k:(p-1) if((q+1) > kadv) qmin <- 0 else qmin <- (q+1):kadv A1[k:n] <- A1[k:n] - 2 * G[k:n,k] * (G[k:n,kmin,drop=FALSE] %*% GTG[kmin,k,drop=FALSE]) # below p A1[k:n] <- A1[k:n] - 2 * G[k:n,k] * (G[k:n,qmin,drop=FALSE] %*% GTG[qmin,k,drop=FALSE]) # above q tempQ <- tempQ[,1] temp <- G[k:n,qmin,drop=FALSE] %*% t(R[k,qmin,drop=FALSE]) temp <- temp + G[k:n,kmin,drop=FALSE] %*% t(R[k,kmin,drop=FALSE]) temp <- temp + Gbef[k:n,] %*% crossprod(Rbef, tempQ) A3[k:n] <- A3[k:n] - temp^2 A2[k:n] <- A2[k:n] + temp^2 * QTyyTQ[k,k] temp2 <- crossprod(tempQ,QTyyTQbef) %*% Rbeftotal A2[k:n] <- A2[k:n] - 2 * temp * (Gbeftotal[k:n,,drop=FALSE] %*% t(temp2)) } else { ## update costs A1[k:n] <- A1[k:n] - 2 * G[k:n,k] * (G[k:n,k:kadv,drop=FALSE] %*% GTG[k:kadv,k,drop=FALSE]) A3[k:n]<- A3[k:n] - (G[k:n,k:kadv,drop=FALSE] %*% t(R[k,k:kadv,drop=FALSE]))^2 temp <- G[k:n,k:kadv,drop=FALSE] %*% t(R[k,k:kadv,drop=FALSE]) A2[k:n] <- A2[k:n] + (temp^2) * QTyyTQ[k,k] temp2 <- QTyyTQ[k,k:kadv,drop=FALSE] %*% R[k:kadv,k:kadv,drop=FALSE] A2[k:n] <- A2[k:n] - 2 * temp * (G[k:n,k:kadv,drop=FALSE] %*% t(temp2)) } ## update diagonal and other quantities (A1,B1) D[(k+1):n] <- D[(k+1):n] - G[(k+1):n,k]^2 D[k] <- 0 A1[k:n] <- A1[k:n] + GTG[k,k] * (G[k:n,k]^2) ## compute errors and true gains temp2 <- crossprod(Q[,k], y[P,]) temp2 <- sum(temp2^2) temp1 <- sum(G[,k]^2) truegain[k] <- temp1 * lambda + temp2 * mu error1[k+1] <- error1[k] - temp1 error2[k+1] <- error2[k] - temp2 if (truegain[k] < tol) break } ## reduce dimensions of decomposition G <- G[,1:k,drop=FALSE] Q <- Q[,1:k,drop=FALSE] R <- R[1:k,1:k,drop=FALSE] ## compute and normalize errors error <- lambda * error1 + mu * error2 error1 <- error1 / traceK error2 <- error2 / sumy2 repivot <- sort(P, index.return = TRUE)$ix return(new("csi",.Data=G[repivot, ,drop=FALSE],Q= Q[repivot,,drop = FALSE], R = R, pivots=repivot, diagresidues = error1, maxresiduals = error2, truegain = truegain, predgain = predictedgain)) }) ## I guess we can replace this with qr() .qr2 <- function(M) { ## QR decomposition for 2x2 matrices Q <- matrix(0,2,2) R <- matrix(0,2,2) x <- sqrt(M[1,1]^2 + M[2,1]^2) R[1,1] <- x Q[,1] <- M[,1]/x R[1,2] <- crossprod(Q[,1], M[,2]) Q[,2] <- M[,2] - R[1,2] * Q[,1] R[2,2] <- sum(abs(Q[,2])^2)^(1/2) Q[,2] <- Q[,2] / R[2,2] return(list(Q=Q,R=R)) } kernlab/R/kernelmatrix.R0000644000175100001440000000050311304023134014724 0ustar hornikusers setGeneric("as.kernelMatrix",function(x, center = FALSE) standardGeneric("as.kernelMatrix")) setMethod("as.kernelMatrix", signature(x = "matrix"), function(x, center = FALSE) { if(center){ m <- dim(x)[1] x <- t(t(x - colSums(x)/m) - rowSums(x)/m) + sum(x)/m^2 } return(new("kernelMatrix",.Data = x)) }) kernlab/R/kfa.R0000644000175100001440000001020212676464656013014 0ustar hornikusers ## This code takes the set x of vectors from the input space ## and does projection pursuit to find a good basis for x. ## ## The algorithm is described in Section 14.5 of ## Learning with Kernels by B. Schoelkopf and A. Smola, entitled ## Kernel Feature Analysis. ## ## created : 17.09.04 alexandros ## updated : setGeneric("kfa",function(x, ...) standardGeneric("kfa")) setMethod("kfa", signature(x = "formula"), function(x, data = NULL, na.action = na.omit, ...) { mt <- terms(x, data = data) if(attr(mt, "response") > 0) stop("response not allowed in formula") attr(mt, "intercept") <- 0 cl <- match.call() mf <- match.call(expand.dots = FALSE) mf$formula <- mf$x mf$... <- NULL mf[[1L]] <- quote(stats::model.frame) mf <- eval(mf, parent.frame()) Terms <- attr(mf, "terms") na.act <- attr(mf, "na.action") x <- model.matrix(mt, mf) res <- kfa(x, ...) ## fix up call to refer to the generic, but leave arg name as `formula' cl[[1]] <- as.name("kfa") kcall(res) <- cl attr(Terms,"intercept") <- 0 terms(res) <- Terms if(!is.null(na.act)) n.action(res) <- na.act return(res) }) setMethod("kfa",signature(x="matrix"), function(x, kernel="rbfdot", kpar=list(sigma=0.1), features = 0, subset = 59, normalize = TRUE, na.action = na.omit) { if(!is.matrix(x)) stop("x must be a matrix") x <- na.action(x) if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") ## initialize variables m <- dim(x)[1] if(subset > m) subset <- m if (features==0) features <- subset alpha <- matrix(0,subset,features) alphazero <- rep(1,subset) alphafeat <- matrix(0,features,features) idx <- -(1:subset) randomindex <- sample(1:m, subset) K <- kernelMatrix(kernel,x[randomindex,,drop=FALSE],x) ## main loop for (i in 1:features) { K.cols <- K[-idx, , drop = FALSE] if(i > 1) projections <- K.cols * (alphazero[-idx]%*%t(rep(1,m))) + crossprod(t(alpha[-idx,1:(i-1),drop=FALSE]),K[idx, ,drop = FALSE]) else projections <- K.cols * (alphazero%*%t(rep(1,m))) Q <- apply(projections, 1, sd) Q.tmp <- rep(0,subset) Q.tmp[-idx] <- Q Qidx <- which.max(Q.tmp) Qmax <- Q.tmp[Qidx] if(i > 1) alphafeat[i,1:(i-1)] <- alpha[Qidx,1:(i-1)] alphafeat[i,i] <- alphazero[Qidx] if (i > 1) idx <- c(idx,Qidx) else idx <- Qidx if (i > 1) Qfeat <- c(Qfeat, Qmax) else Qfeat <- Qmax Ksub <- K[idx, idx, drop = FALSE] alphasub <- alphafeat[i,1:i] phisquare <- alphasub %*% Ksub %*% t(t(alphasub)) dotprod <- (alphazero * (K[,idx, drop = FALSE] %*% t(t(alphasub))) + alpha[,1:i]%*%(Ksub%*%t(t(alphasub))))/drop(phisquare) alpha[,1:i] <- alpha[,1:i] - dotprod %*%alphasub if(normalize){ sumalpha <- alphazero + rowSums(abs(alpha)) alphazero <- alphazero / sumalpha alpha <- alpha/ (sumalpha %*% t(rep(1,features))) } } obj <- new("kfa") alpha(obj) <- alphafeat alphaindex(obj) <- randomindex[idx] xmatrix(obj) <- x[alphaindex(obj),] kernelf(obj) <- kernel kcall(obj) <- match.call() return(obj) }) ## project a new matrix into the feature space setMethod("predict",signature(object="kfa"), function(object , x) { if (!is.null(terms(object))) { if(!is.matrix(x)) x <- model.matrix(delete.response(terms(object)), as.data.frame(x), na.action = n.action(object)) } else x <- if (is.vector(x)) t(t(x)) else as.matrix(x) if (!is.matrix(x)) stop("x must be a matrix a vector or a data frame") tmpres <- kernelMult(kernelf(object), x, xmatrix(object), alpha(object)) return(tmpres - matrix(colSums(tmpres)/dim(tmpres)[1],dim(tmpres)[1],dim(tmpres)[2],byrow=TRUE)) }) setMethod("show",signature(object="kfa"), function(object) { cat(paste("Number of features :",dim(alpha(object))[2],"\n")) show(kernelf(object)) }) kernlab/R/kpca.R0000644000175100001440000001214412676464735013176 0ustar hornikusers## kpca function ## author : alexandros setGeneric("kpca",function(x, ...) standardGeneric("kpca")) setMethod("kpca", signature(x = "formula"), function(x, data = NULL, na.action = na.omit, ...) { mt <- terms(x, data = data) if(attr(mt, "response") > 0) stop("response not allowed in formula") attr(mt, "intercept") <- 0 cl <- match.call() mf <- match.call(expand.dots = FALSE) mf$formula <- mf$x mf$... <- NULL mf[[1L]] <- quote(stats::model.frame) mf <- eval(mf, parent.frame()) na.act <- attr(mf, "na.action") Terms <- attr(mf, "terms") x <- model.matrix(mt, mf) res <- kpca(x, ...) ## fix up call to refer to the generic, but leave arg name as `formula' cl[[1]] <- as.name("kpca") kcall(res) <- cl attr(Terms,"intercept") <- 0 terms(res) <- Terms if(!is.null(na.act)) n.action(res) <- na.act return(res) }) ## Matrix Interface setMethod("kpca",signature(x="matrix"), function(x, kernel = "rbfdot", kpar = list(sigma = 0.1), features = 0, th = 1e-4, na.action = na.omit, ...) { x <- na.action(x) x <- as.matrix(x) m <- nrow(x) ret <- new("kpca") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") km <- kernelMatrix(kernel,x) ## center kernel matrix kc <- t(t(km - colSums(km)/m) - rowSums(km)/m) + sum(km)/m^2 ## compute eigenvectors res <- eigen(kc/m,symmetric=TRUE) if(features == 0) features <- sum(res$values > th) else if(res$values[features] < th) warning(paste("eigenvalues of the kernel matrix are below threshold!")) pcv(ret) <- t(t(res$vectors[,1:features])/sqrt(res$values[1:features])) eig(ret) <- res$values[1:features] names(eig(ret)) <- paste("Comp.", 1:features, sep = "") rotated(ret) <- kc %*% pcv(ret) kcall(ret) <- match.call() kernelf(ret) <- kernel xmatrix(ret) <- x return(ret) }) ## List Interface setMethod("kpca",signature(x="list"), function(x, kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), features = 0, th = 1e-4, na.action = na.omit, ...) { x <- na.action(x) m <- length(x) ret <- new("kpca") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") km <- kernelMatrix(kernel,x) ## center kernel matrix kc <- t(t(km - colSums(km)/m) - rowSums(km)/m) + sum(km)/m^2 ## compute eigenvectors res <- eigen(kc/m,symmetric=TRUE) if(features == 0) features <- sum(res$values > th) else if(res$values[features] < th) warning(paste("eigenvalues of the kernel matrix are below threshold!")) pcv(ret) <- t(t(res$vectors[,1:features])/sqrt(res$values[1:features])) eig(ret) <- res$values[1:features] names(eig(ret)) <- paste("Comp.", 1:features, sep = "") rotated(ret) <- kc %*% pcv(ret) kcall(ret) <- match.call() kernelf(ret) <- kernel xmatrix(ret) <- x return(ret) }) ## Kernel Matrix Interface setMethod("kpca",signature(x= "kernelMatrix"), function(x, features = 0, th = 1e-4, ...) { ret <- new("kpca") m <- dim(x)[1] if(m!= dim(x)[2]) stop("Kernel matrix has to be symetric, and positive semidefinite") ## center kernel matrix kc <- t(t(x - colSums(x)/m) - rowSums(x)/m) + sum(x)/m^2 ## compute eigenvectors res <- eigen(kc/m,symmetric=TRUE) if(features == 0) features <- sum(res$values > th) else if(res$values[features] < th) warning(paste("eigenvalues of the kernel matrix are below threshold!")) pcv(ret) <- t(t(res$vectors[,1:features])/sqrt(res$values[1:features])) eig(ret) <- res$values[1:features] names(eig(ret)) <- paste("Comp.", 1:features, sep = "") rotated(ret) <- kc %*% pcv(ret) kcall(ret) <- match.call() xmatrix(ret) <- x kernelf(ret) <- " Kernel matrix used." return(ret) }) ## project a new matrix into the feature space setMethod("predict",signature(object="kpca"), function(object , x) { if (!is.null(terms(object))) { if(!is.matrix(x) || !is(x,"list")) x <- model.matrix(delete.response(terms(object)), as.data.frame(x), na.action = n.action(object)) } else x <- if (is.vector(x)) t(t(x)) else if (!is(x,"list")) x <- as.matrix(x) if (is.vector(x) || is.data.frame(x)) x <- as.matrix(x) if (!is.matrix(x) && !is(x,"list")) stop("x must be a matrix a vector, a data frame, or a list") if(is(x,"matrix")) { n <- nrow(x) m <- nrow(xmatrix(object))} else { n <- length(x) m <- length(xmatrix(object)) } if(is.character(kernelf(object))) { knc <- x ka <- xmatrix(object) } else { knc <- kernelMatrix(kernelf(object),x,xmatrix(object)) ka <- kernelMatrix(kernelf(object),xmatrix(object)) } ## center ret <- t(t(knc - rowSums(knc)/m) - rowSums(ka)/m) + sum(ka)/(m*n) return(ret %*% pcv(object)) }) kernlab/R/ranking.R0000644000175100001440000002172113561524074013675 0ustar hornikusers## manifold ranking ## author: alexandros setGeneric("ranking",function(x, ...) standardGeneric("ranking")) setMethod("ranking",signature(x="matrix"), function (x, y, kernel = "rbfdot", kpar = list(sigma = 1), scale = FALSE, alpha = 0.99, iterations = 600, edgegraph = FALSE, convergence = FALSE, ...) { m <- dim(x)[1] d <- dim(x)[2] if(length(y) != m) { ym <- matrix(0,m,1) ym[y] <- 1 y <- ym } if (is.null(y)) y <- matrix(1, m, 1) labelled <- y != 0 if (!any(labelled)) stop("no labels sublied") if(is.character(kernel)) kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","besseldot","laplacedot")) if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if(scale) x <- scale(x) ## scaling from ksvm ## normalize ? if (is(kernel)[1]=='rbfkernel' && edgegraph){ sigma = kpar(kernel)$sigma n <- dim(x)[1] dota <- rowSums(x*x)/2 sed <- crossprod(t(x)) for (i in 1:n) sed[i,] <- - 2*(sed[i,] - dota - rep(dota[i],n)) diag(sed) <- 0 K <- exp(- sigma * sed) mst <- minimum.spanning.tree(sed) algo.mst <- mst$E max.squared.edge.length <- mst$max.sed.in.tree edgegraph <- (sed <= max.squared.edge.length) K[!edgegraph] <- 0 ##algo.edge.graph <- sparse(algo.edge.graph) rm(sed) gc() } else { if(edgegraph && is(kernel)[1]!="rbfkernel") warning('edge graph is only implemented for use with the RBF kernel') edgegraph <- matrix() K <- kernelMatrix(kernel,x) } diag(K) <- 0 ##K <- sparse(K) cs <- colSums(K) ##cs[cs <= 10e-6] <- 1 D <- 1/sqrt(cs) K <- D * K %*% diag(D) if(sum(labelled)==1) y <- K[, labelled,drop = FALSE] else y <- as.matrix(colSums(K[, labelled])) K <- alpha * K[, !labelled] ym <- matrix(0,m,iterations) ym[,1] <- y for (iteration in 2:iterations) ym[, iteration] <- ym[, 1] + K %*% ym[!labelled, iteration-1] ym[labelled,] <- NA r <- ym r[!labelled,] <- compute.ranks(-r[!labelled, ]) if(convergence) convergence <- (r - rep(r[,dim(r)[2]],iterations))/(m-sum(labelled)) else convergence <- matrix() res <- cbind(t(t(1:m)), ym[,iterations], r[,iterations]) return(new("ranking", .Data=res, convergence = convergence, edgegraph = edgegraph)) }) ## kernelMatrix interface setMethod("ranking",signature(x="kernelMatrix"), function (x, y, alpha = 0.99, iterations = 600, convergence = FALSE, ...) { m <- dim(x)[1] if(length(y) != m) { ym <- matrix(0,m,1) ym[y] <- 1 y <- ym } if (is.null(y)) y <- matrix(1, m, 1) labelled <- y != 0 if (!any(labelled)) stop("no labels sublied") diag(x) <- 0 ##K <- sparse(K) cs <- colSums(x) ##cs[cs <= 10e-6] <- 1 D <- 1/sqrt(cs) x <- D * x %*% diag(D) if(sum(labelled)==1) y <- x[, labelled,drop = FALSE] else y <- as.matrix(colSums(x[, labelled])) x <- alpha * x[, !labelled] ym <- matrix(0,m,iterations) ym[,1] <- y for (iteration in 2:iterations) ym[, iteration] <- ym[, 1] + x %*% ym[!labelled, iteration-1] ym[labelled,] <- NA r <- ym r[!labelled,] <- compute.ranks(-r[!labelled, ]) if(convergence) convergence <- (r - rep(r[,dim(r)[2]],iterations))/(m-sum(labelled)) else convergence <- matrix() res <- cbind(t(t(1:m)), ym[,iterations], r[,iterations]) return(new("ranking", .Data=res, convergence = convergence)) }) ## list interface setMethod("ranking",signature(x="list"), function (x, y, kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), alpha = 0.99, iterations = 600, convergence = FALSE, ...) { m <- length(x) if(length(y) != m) { ym <- matrix(0,m,1) ym[y] <- 1 y <- ym } if (is.null(y)) y <- matrix(1, m, 1) labelled <- y != 0 if (!any(labelled)) stop("no labels sublied") if(is.character(kernel)) kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","besseldot","laplacedot")) if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") edgegraph <- matrix() K <- kernelMatrix(kernel,x) diag(K) <- 0 ##K <- sparse(K) cs <- colSums(K) ##cs[cs <= 10e-6] <- 1 D <- 1/sqrt(cs) K <- D * K %*% diag(D) if(sum(labelled)==1) y <- K[, labelled,drop = FALSE] else y <- as.matrix(colSums(K[, labelled])) K <- alpha * K[, !labelled] ym <- matrix(0,m,iterations) ym[,1] <- y for (iteration in 2:iterations) ym[, iteration] <- ym[, 1] + K %*% ym[!labelled, iteration-1] ym[labelled,] <- NA r <- ym r[!labelled,] <- compute.ranks(-r[!labelled, ]) if(convergence) convergence <- (r - rep(r[,dim(r)[2]],iterations))/(m-sum(labelled)) else convergence <- matrix() res <- cbind(t(t(1:m)), ym[,iterations], r[,iterations]) return(new("ranking", .Data=res, convergence = convergence, edgegraph = NULL)) }) minimum.spanning.tree <- function(sed) { max.sed.in.tree <- 0 E <- matrix(0,dim(sed)[1],dim(sed)[2]) n <- dim(E)[1] C <- logical(n) cmp <- sed diag(cmp) <- NA ans <- min(cmp, na.rm = TRUE) i <- which.min(cmp) j <- i%/%n + 1 i <- i%%n +1 for (nC in 1:n) { cmp <- sed cmp[C,] <- NA cmp[,!C] <- NA if(nC == 1) { ans <- 1 i <- 1 } else{ ans <- min(cmp, na.rm=TRUE) i <- which.min(cmp)} j <- i%/%n + 1 i <- i%%n + 1 E[i, j] <- nC E[j, i] <- nC C[i] <- TRUE max.sed.in.tree <- max(max.sed.in.tree, sed[i, j]) } ## E <- sparse(E) res <- list(E=E, max.sed.in.tree=max.sed.in.tree) } compute.ranks <- function(am) { rm <- matrix(0,dim(am)[1],dim(am)[2]) for (j in 1:dim(am)[2]) { a <- am[, j] sort <- sort(a, index.return = TRUE) sorted <- sort$x r <- sort$ix r[r] <- 1:length(r) while(1) { if(sum(na.omit(diff(sorted) == 0)) == 0) break tied <- sorted[min(which(diff(sorted) == 0))] sorted[sorted==tied] <- NA r[a==tied] <- mean(r[a==tied]) } rm[, j] <- r } return(rm) } setMethod("show","ranking", function(object) { cat("Ranking object of class \"ranking\"","\n") cat("\n") show(object@.Data) cat("\n") if(!any(is.na(convergence(object)))) cat("convergence matrix included.","\n") if(!any(is.na(edgegraph(object)))) cat("edgegraph matrix included.","\n") }) kernlab/R/kha.R0000644000175100001440000001042612676464711013016 0ustar hornikusers #Kernel Hebbian Algorithm function setGeneric("kha",function(x, ...) standardGeneric("kha")) setMethod("kha", signature(x = "formula"), function(x, data = NULL, na.action = na.omit, ...) { mt <- terms(x, data = data) if(attr(mt, "response") > 0) stop("response not allowed in formula") attr(mt, "intercept") <- 0 cl <- match.call() mf <- match.call(expand.dots = FALSE) mf$formula <- mf$x mf$... <- NULL mf[[1L]] <- quote(stats::model.frame) mf <- eval(mf, parent.frame()) na.act <- attr(mf, "na.action") Terms <- attr(mf, "terms") x <- model.matrix(mt, mf) res <- kha(x, ...) ## fix up call to refer to the generic, but leave arg name as `formula' cl[[1]] <- as.name("kha") kcall(res) <- cl attr(Terms,"intercept") <- 0 terms(res) <- Terms if(!is.null(na.act)) n.action(res) <- na.act return(res) }) setMethod("kha",signature(x="matrix"), function(x, kernel = "rbfdot", kpar = list(sigma = 0.1), features = 5, eta = 0.005, th = 1e-4, maxiter = 10000, verbose = FALSE, na.action = na.omit, ...) { x <- na.action(x) x <- as.matrix(x) m <- nrow(x) ret <- new("kha") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") ## Initialize A dual variables A <- matrix(runif(features*m),m,features)*2 - 1 AOld <- A ## compute square norm of data a <- rowSums(x^2) ## initialize the empirical sum kernel map eskm <- rep(0,m) for (i in 1:m) eskm[i] <- sum(kernelFast(kernel,x,x[i,,drop=FALSE], a)) eks <- sum(eskm) counter <- 0 step <- th + 1 Aold <- A while(step > th && counter < maxiter) { y <- rep(0, features) ot <- rep(0,m) ## Hebbian Iteration for (i in 1:m) { ## compute y output etkm <- as.vector(kernelFast(kernel,x,x[i,,drop=FALSE], a)) sum1 <- as.vector(etkm %*% A) sum2 <- as.vector(eskm%*%A)/m asum <- colSums(A) sum3 <- as.vector(eskm[i]*asum)/m sum4 <- as.vector(eks * asum)/m^2 y <- sum1 - sum2 - sum3 + sum4 ## update A yy <- y%*%t(y) yy[upper.tri(yy)] <- 0 tA <- t(A) A <- t(tA - eta * yy%*%tA) A[i,] <- A[i,] + eta * y } if (counter %% 100 == 0 ) { step = mean(abs(Aold - A)) Aold <- A if(verbose) cat("Iteration :", counter, "Converged :", step,"\n") } counter <- counter + 1 } ## Normalize in Feature space cA <- t(A) - colSums(A) Fnorm <- rep(0,features) for (j in 1:m) Fnorm <- Fnorm + colSums(t(cA[,j] * cA) * as.vector(kernelFast(kernel,x,x[j,,drop=FALSE],a))) if(any(Fnorm==0)) { warning("Normalization vector contains zeros, replacing them with ones") Fnorm[which(Fnorm==0)] <- 1 } A <- t(t(A)/sqrt(Fnorm)) pcv(ret) <- A eig(ret) <- Fnorm names(eig(ret)) <- paste("Comp.", 1:features, sep = "") eskm(ret) <- eskm kcall(ret) <- match.call() kernelf(ret) <- kernel xmatrix(ret) <- x return(ret) }) ## Project a new matrix into the feature space setMethod("predict",signature(object="kha"), function(object , x) { if (!is.null(terms(object))) { if(!is.matrix(x)) x <- model.matrix(delete.response(terms(object)), as.data.frame(x), na.action = n.action(object)) } else x <- if (is.vector(x)) t(t(x)) else as.matrix(x) if (is.vector(x)||is.data.frame(x)) x<-as.matrix(x) if (!is.matrix(x)) stop("x must be a matrix a vector or a data frame") n <- nrow(x) m <- nrow(xmatrix(object)) A <- pcv(object) y <- matrix(0,n,dim(A)[2]) eks <- sum(eskm(object)) a <- rowSums(xmatrix(object)^2) ## Project data sum2 <- as.vector(eskm(object)%*%A)/m asum <- colSums(A) sum4 <- as.vector(eks * asum)/m^2 for (i in 1:n) { ## compute y output etkm <- as.vector(kernelFast(kernelf(object),xmatrix(object),x[i,,drop=FALSE], a)) sum1 <- as.vector(etkm %*% A) sum3 <- sum(etkm)*asum/m y[i,] <- sum1 - sum2 - sum3 + sum4 } return(y) }) kernlab/R/onlearn.R0000644000175100001440000001667712560371302013710 0ustar hornikusers## kernel based on-line learning algorithms for classification, novelty detection and regression. ## ## created 15.09.04 alexandros ## updated setGeneric("onlearn",function(obj, x, y = NULL, nu = 0.2, lambda = 1e-4) standardGeneric("onlearn")) setMethod("onlearn", signature(obj = "onlearn"), function(obj , x, y = NULL, nu = 0.2, lambda = 1e-4) { if(onstart(obj) == 1 && onstop(obj) < buffer(obj)) buffernotfull <- TRUE else buffernotfull <- FALSE if(is.vector(x)) x <- matrix(x,,length(x)) d <- dim(x)[2] for (i in 1:dim(x)[1]) { xt <- x[i,,drop=FALSE] yt <- y[i] if(type(obj)=="novelty") { phi <- fit(obj) if(phi < 0) { alpha(obj) <- (1-lambda) * alpha(obj) if(buffernotfull) onstop(obj) <- onstop(obj) + 1 else{ onstop(obj) <- onstop(obj)%%buffer(obj) + 1 onstart(obj) <- onstart(obj)%%buffer(obj) +1 } alpha(obj)[onstop(obj)] <- lambda xmatrix(obj)[onstop(obj),] <- xt rho(obj) <- rho(obj) + lambda*(nu-1) } else rho(obj) <- rho(obj) + lambda*nu rho(obj) <- max(rho(obj), 0) if(onstart(obj) == 1 && onstop(obj) < buffer(obj)) fit(obj) <- drop(kernelMult(kernelf(obj), xt, matrix(xmatrix(obj)[1:onstop(obj),],ncol=d), matrix(alpha(obj)[1:onstop(obj)],ncol=1)) - rho(obj)) else fit(obj) <- drop(kernelMult(kernelf(obj), xt, xmatrix(obj), matrix(alpha(obj),ncol=1)) - rho(obj)) } if(type(obj)=="classification") { if(is.null(pattern(obj)) && is.factor(y)) pattern(obj) <- yt if(!is.null(pattern(obj))) if(pattern(obj) == yt) yt <- 1 else yt <- -1 phi <- fit(obj) alpha(obj) <- (1-lambda) * alpha(obj) if(yt*phi < rho(obj)) { if(buffernotfull) onstop(obj) <- onstop(obj) + 1 else{ onstop(obj) <- onstop(obj)%%buffer(obj) + 1 onstart(obj) <- onstart(obj)%%buffer(obj) +1 } alpha(obj)[onstop(obj)] <- lambda*yt b(obj) <- b(obj) + lambda*yt xmatrix(obj)[onstop(obj),] <- xt rho(obj) <- rho(obj) + lambda*(nu-1) ## (1-nu) ?? } else rho(obj) <- rho(obj) + lambda*nu rho(obj) <- max(rho(obj), 0) if(onstart(obj) == 1 && onstop(obj) < buffer(obj)) fit(obj) <- drop(kernelMult(kernelf(obj), xt, xmatrix(obj)[1:onstop(obj),,drop=FALSE], matrix(alpha(obj)[1:onstop(obj)],ncol=1)) + b(obj)) else fit(obj) <-drop(kernelMult(kernelf(obj), xt, xmatrix(obj), matrix(alpha(obj),ncol=1)) + b(obj)) } if(type(obj)=="regression") { alpha(obj) <- (1-lambda) * alpha(obj) phi <- fit(obj) if(abs(-phi) < rho(obj)) { if(buffernotfull) onstop(obj) <- onstop(obj) + 1 else{ onstop(obj) <- onstop(obj)%%buffer(obj) + 1 onstart(obj) <- onstart(obj)%% buffer(obj) +1 } alpha(obj)[onstop(obj)] <- sign(yt-phi)*lambda xmatrix(obj)[onstop(obj),] <- xt rho(obj) <- rho(obj) + lambda*(1-nu) ## (1-nu) ?? } else{ rho(obj) <- rho(obj) - lambda*nu alpha(obj)[onstop(obj)] <- sign(yt-phi)/rho(obj) } if(onstart(obj) == 1 && onstop(obj) < buffer(obj)) fit(obj) <- drop(kernelMult(kernelf(obj), xt, matrix(xmatrix(obj)[1:onstop(obj),],ncol=d), matrix(alpha(obj)[1:onstop(obj)],ncol=1)) + b(obj)) else fit(obj) <- drop(kernelMult(kernelf(obj), xt, xmatrix(obj), matrix(alpha(obj),ncol=1)) + b(obj)) } } return(obj) }) setGeneric("inlearn",function(d, kernel = "rbfdot", kpar = list(sigma=0.1), type = "novelty", buffersize = 1000) standardGeneric("inlearn")) setMethod("inlearn", signature(d = "numeric"), function(d ,kernel = "rbfdot", kpar = list(sigma=0.1), type = "novelty", buffersize = 1000) { obj <- new("onlearn") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") type(obj) <- match.arg(type,c("novelty","classification","regression")) xmatrix(obj) <- matrix(0,buffersize,d) kernelf(obj) <- kernel onstart(obj) <- 1 onstop(obj) <- 1 fit(obj) <- 0 b(obj) <- 0 alpha(obj) <- rep(0, buffersize) rho(obj) <- 0 buffer(obj) <- buffersize return(obj) }) setMethod("show","onlearn", function(object){ cat("On-line learning object of class \"onlearn\"","\n") cat("\n") cat(paste("Learning problem :", type(object), "\n")) cat cat(paste("Data dimensions :", dim(xmatrix(object))[2], "\n")) cat(paste("Buffersize :", buffer(object), "\n")) cat("\n") show(kernelf(object)) }) setMethod("predict",signature(object="onlearn"), function(object, x) { if(is.vector(x)) x<- matrix(x,1) d <- dim(xmatrix(object))[2] if(type(object)=="novelty") { if(onstart(object) == 1 && onstop(object) < buffer(object)) res <- drop(kernelMult(kernelf(object), x, matrix(xmatrix(object)[1:onstop(object),],ncol= d), matrix(alpha(object)[1:onstop(object)],ncol=1)) - rho(object)) else res <- drop(kernelMult(kernelf(object), x, matrix(xmatrix(object),ncol=d), matrix(alpha(object),ncol=1)) - rho(object)) } if(type(object)=="classification") { if(onstart(object) == 1 && onstop(object) < buffer(object)) res <- drop(kernelMult(kernelf(object), x, matrix(xmatrix(object)[1:onstop(object),],ncol=d), matrix(alpha(object)[1:onstop(object)],ncol=1)) + b(object)) else res <- drop(kernelMult(kernelf(object), x, matrix(xmatrix(object),ncol=d), matrix(alpha(object),ncol=1)) + b(object)) } if(type(object)=="regression") { if(onstart(object) == 1 && onstop(object) < buffer(object)) res <- drop(kernelMult(kernelf(object), x, matrix(xmatrix(object)[1:onstop(object),],ncol=d), matrix(alpha(object)[1:onstop(object)],ncol=1)) + b(object)) else res <- drop(kernelMult(kernelf(object), x, matrix(xmatrix(object),ncol=d), matrix(alpha(object),ncol=1)) + b(object)) } return(res) }) kernlab/R/kqr.R0000644000175100001440000002444614221633042013036 0ustar hornikuserssetGeneric("kqr", function(x, ...) standardGeneric("kqr")) setMethod("kqr",signature(x="formula"), function (x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE){ cl <- match.call() m <- match.call(expand.dots = FALSE) if (is.matrix(eval(m$data, parent.frame()))) m$data <- as.data.frame(data) m$... <- NULL m$formula <- m$x m$x <- NULL m[[1L]] <- quote(stats::model.frame) m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { remove <- unique(c(which(labels(Terms) %in% names(attr(x, "contrasts"))), which(!scaled) ) ) scaled <- !attr(x, "assign") %in% remove } ret <- kqr(x, y, scaled = scaled, ...) kcall(ret) <- cl terms(ret) <- Terms if (!is.null(attr(m, "na.action"))) n.action(ret) <- attr(m, "na.action") return (ret) }) setMethod("kqr",signature(x="vector"), function(x,...) { x <- t(t(x)) ret <- kqr(x, ...) ret }) setMethod("kqr",signature(x="matrix"), function (x, y, scaled = TRUE, tau = 0.5, C = 0.1, kernel = "rbfdot", kpar = "automatic", reduced = FALSE, rank = dim(x)[1]/6, fit = TRUE, cross = 0, na.action = na.omit) { if((tau > 1)||(tau < 0 )) stop("tau has to be strictly between 0 and 1") ret <- new("kqr") param(ret) <- list(C = C, tau = tau) if (is.null(y)) x <- na.action(x) else { df <- na.action(data.frame(y, x)) y <- df[,1] x <- as.matrix(df[,-1]) } ncols <- ncol(x) m <- nrows <- nrow(x) tmpsc <- NULL x.scale <- y.scale <- NULL ## scaling if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { co <- !apply(x[,scaled, drop = FALSE], 2, var) if (any(co)) { scaled <- rep(FALSE, ncol(x)) warning(paste("Variable(s)", paste("`",colnames(x[,scaled, drop = FALSE])[co], "'", sep="", collapse=" and "), "constant. Cannot scale data.") ) } else { xtmp <- scale(x[,scaled]) x[,scaled] <- xtmp x.scale <- attributes(xtmp)[c("scaled:center","scaled:scale")] y <- scale(y) y.scale <- attributes(y)[c("scaled:center","scaled:scale")] y <- as.vector(y) tmpsc <- list(scaled = scaled, x.scale = x.scale,y.scale = y.scale) } } ## Arrange all the kernel mambo jumpo if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot")) if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "rbfkernel") || is(kernel, "laplacedot") || kernel == "laplacedot"|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=mean(sigest(x,scaled=FALSE,frac=1)[c(1,3)])) cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") ## Setup QP problem and call ipop if(!reduced) H = kernelMatrix(kernel,x) else H = csi(x, kernel = kernel, rank = rank) c = -y A = rep(1,m) b = 0 r = 0 l = matrix(C * (tau-1),m,1) u = matrix(C * tau ,m,1) qpsol = ipop(c, H, A, b, l, u, r) alpha(ret)= coef(ret) = primal(qpsol) b(ret) = dual(qpsol)[1] ## Compute training error/loss xmatrix(ret) <- x ymatrix(ret) <- y kernelf(ret) <- kernel kpar(ret) <- kpar type(ret) <- ("Quantile Regresion") if (fit){ fitted(ret) <- predict(ret, x) if (!is.null(scaling(ret)$y.scale)) fitted(ret) <- fitted(ret) * tmpsc$y.scale$"scaled:scale" + tmpsc$y.scale$"scaled:center" error(ret) <- c(pinloss(y, fitted(ret), tau), ramploss(y,fitted(ret),tau)) } else fitted(ret) <- NULL if(any(scaled)) scaling(ret) <- tmpsc ## Crossvalidation cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { pinloss <- 0 ramloss <- 0 crescs <- NULL suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) cret <- kqr(x[cind,],y[cind], tau = tau, C = C, scale = FALSE, kernel = kernel, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],]) crescs <- c(crescs,cres) } if (!is.null(scaling(ret)$y.scale)){ crescs <- crescs * tmpsc$y.scale$"scaled:scale" + tmpsc$y.scale$"scaled:center" ysvgr <- y[unlist(vgr)] * tmpsc$y.scale$"scaled:scale" + tmpsc$y.scale$"scaled:center" } else ysvgr <- y[unlist(vgr)] pinloss <- drop(pinloss(ysvgr, crescs, tau)) ramloss <- drop(ramloss(ysvgr, crescs, tau)) cross(ret) <- c(pinloss, ramloss) } return(ret) }) setMethod("kqr",signature(x="list"), function (x, y, tau = 0.5, C = 0.1, kernel = "strigdot", kpar = list(length=4, C=0.5), fit = TRUE, cross = 0) { if((tau > 1)||(tau < 0 )) stop("tau has to be strictly between 0 and 1") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") K <- kernelMatrix(kernel,x) ret <- kqr(K,y = y,tau = tau, C = C, fit = fit, cross = cross) kernelf(ret) <- kernel kpar(ret) <- kpar return(ret) }) setMethod("kqr",signature(x="kernelMatrix"), function (x, y, tau = 0.5, C = 0.1, fit = TRUE, cross = 0) { if((tau > 1)||(tau < 0 )) stop("tau has to be strictly between 0 and 1") ret <- new("kqr") param(ret) <- list(C = C, tau = tau) ncols <- ncol(x) m <- nrows <- nrow(x) y <- as.vector(y) ## Setup QP problem and call ipop H = x c = -y A = rep(1,m) b = 0 r = 0 l = matrix(C * (tau-1),m,1) u = matrix(C * tau ,m,1) qpsol = ipop(c, H, A, b, l, u, r) alpha(ret)= coef(ret) = primal(qpsol) b(ret) = dual(qpsol)[1] ## Compute training error/loss ymatrix(ret) <- y kernelf(ret) <- "Kernel Matrix used." type(ret) <- ("Quantile Regresion") if (fit){ fitted(ret) <- predict(ret, x) error(ret) <- c(pinloss(y, fitted(ret), tau), ramploss(y,fitted(ret),tau)) } else NA ## Crossvalidation cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { pinloss <- 0 ramloss <- 0 crescs <- NULL suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) cret <- kqr(x[cind,cind],y[cind], tau = tau, C = C, scale = FALSE, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],vgr[[i]]]) crescs <- c(crescs,cres) } ysvgr <- y[unlist(vgr)] pinloss <- drop(pinloss(ysvgr, crescs, tau)) ramloss <- drop(ramloss(ysvgr, crescs, tau)) cross(ret) <- c(pinloss, ramloss) } return(ret) }) pinloss <- function(y,f,tau) { if(is.vector(y)) m <- length(y) else m <- dim(y)[1] tmp <- y - f return((tau *sum(tmp*(tmp>=0)) + (tau-1) * sum(tmp * (tmp<0)))/m) } ramploss <- function(y,f,tau) { if(is.vector(y)) m <- length(y) else m <- dim(y)[1] return(sum(y<=f)/m) } setMethod("predict", signature(object = "kqr"), function (object, newdata) { sc <- 0 if (missing(newdata)) if(!is.null(fitted(object))) return(fitted(object)) else stop("newdata is missing and no fitted values found.") if(!is(newdata,"kernelMatrix")){ ncols <- ncol(xmatrix(object)) nrows <- nrow(xmatrix(object)) oldco <- ncols if (!is.null(terms(object))) { newdata <- model.matrix(delete.response(terms(object)), as.data.frame(newdata), na.action = na.action) } else newdata <- if (is.vector (newdata)) t(t(newdata)) else as.matrix(newdata) newcols <- 0 newnrows <- nrow(newdata) newncols <- ncol(newdata) newco <- newncols if (oldco != newco) stop ("test vector does not match model !") if (is.list(scaling(object)) && sc != 1) newdata[,scaling(object)$scaled] <- scale(newdata[,scaling(object)$scaled, drop = FALSE], center = scaling(object)$x.scale$"scaled:center", scale = scaling(object)$x.scale$"scaled:scale" ) predres <- kernelMult(kernelf(object),newdata,xmatrix(object),as.matrix(alpha(object))) - b(object) if (!is.null(scaling(object)$y.scale)) return(predres * scaling(object)$y.scale$"scaled:scale" + scaling(object)$y.scale$"scaled:center") else return(predres) } else { return(newdata%*%alpha(object) - b(object)) } }) setMethod("show","kqr", function(object){ cat("Kernel Quantile Regression object of class \"kqr\"","\n") cat("\n") show(kernelf(object)) cat("\n") cat("Regularization Cost Parameter C: ",round(param(object)[[1]],9)) cat(paste("\nNumber of training instances learned :", dim(xmatrix(object))[1],"\n")) if(!is.null(fitted(object))) cat(paste("Train error :"," pinball loss : ", round(error(object)[1],9)," rambloss :", round(error(object)[2],9),"\n")) ##train error & loss if(cross(object)!=-1) cat("Cross validation error :", " pinballoss : ", round(cross(object)[1],9)," rambloss :", round(cross(object)[2],9),"\n") }) kernlab/R/gausspr.R0000644000175100001440000003566614221632627013743 0ustar hornikusers## Gaussian Processes implementation. Laplace approximation for classification. ## author : alexandros karatzoglou setGeneric("gausspr", function(x, ...) standardGeneric("gausspr")) setMethod("gausspr",signature(x="formula"), function (x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE){ cl <- match.call() m <- match.call(expand.dots = FALSE) if (is.matrix(eval(m$data, parent.frame()))) m$data <- as.data.frame(data) m$... <- NULL m$formula <- m$x m$x <- NULL m[[1L]] <- quote(stats::model.frame) m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { remove <- unique(c(which(labels(Terms) %in% names(attr(x, "contrasts"))), which(!scaled) ) ) scaled <- !attr(x, "assign") %in% remove } ret <- gausspr(x, y, scaled = scaled, ...) kcall(ret) <- cl terms(ret) <- Terms if (!is.null(attr(m, "na.action"))) n.action(ret) <- attr(m, "na.action") return (ret) }) setMethod("gausspr",signature(x="vector"), function(x,...) { x <- t(t(x)) ret <- gausspr(x, ...) ret }) setMethod("gausspr",signature(x="matrix"), function (x, y, scaled = TRUE, type = NULL, kernel = "rbfdot", kpar = "automatic", var = 1, variance.model = FALSE, tol = 0.0005, cross = 0, fit = TRUE, ... ,subset ,na.action = na.omit) { ## should become an option reduced <- FALSE ## subsetting and na-handling for matrices ret <- new("gausspr") if (!missing(subset)) x <- x[subset,] if (is.null(y)) x <- na.action(x) else { df <- na.action(data.frame(y, x)) y <- df[,1] x <- as.matrix(df[,-1]) } ncols <- ncol(x) m <- nrows <- nrow(x) if (is.null (type)) type(ret) <- if (is.factor(y)) "classification" else "regression" else type(ret) <- type x.scale <- y.scale <- NULL ## scaling if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { co <- !apply(x[,scaled, drop = FALSE], 2, var) if (any(co)) { scaled <- rep(FALSE, ncol(x)) warning(paste("Variable(s)", paste("`",colnames(x[,scaled, drop = FALSE])[co], "'", sep="", collapse=" and "), "constant. Cannot scale data.") ) } else { xtmp <- scale(x[,scaled]) x[,scaled] <- xtmp x.scale <- attributes(xtmp)[c("scaled:center","scaled:scale")] if (is.numeric(y)&&(type(ret)!="classification")) { y <- scale(y) y.scale <- attributes(y)[c("scaled:center","scaled:scale")] y <- as.vector(y) } tmpsc <- list(scaled = scaled, x.scale = x.scale, y.scale = y.scale) } } if (var < 10^-3) stop("Noise variance parameter var has to be greater than 10^-3") # in case of classification: transform factors into integers if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) } else { if (type(ret) == "classification" && any(as.integer (y) != y)) stop ("dependent variable has to be of factor or integer type for classification mode.") if(type(ret) == "classification") lev(ret) <- unique (y) } # initialize nclass(ret) <- length (lev(ret)) if(!is.null(type)) type(ret) <- match.arg(type,c("classification", "regression")) if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot")) if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "rbfkernel") || is(kernel, "laplacedot") || kernel == "laplacedot"|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=mean(sigest(x,scaled=FALSE)[c(1,3)])) cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") p <- 0 if (type(ret) == "classification") { indexes <- lapply(1:nclass(ret), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) xd <- matrix(0,(li+lj),dim(x)[2]) xdi <- 1:(li+lj) <= li xd[xdi,rep(TRUE,dim(x)[2])] <- x[indexes[[i]],] xd[xdi == FALSE,rep(TRUE,dim(x)[2])] <- x[indexes[[j]],] if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) yd <- c(rep(1,li),rep(-1,lj)) else yd <- c(rep(-1,li),rep(1,lj)) if(reduced == FALSE){ K <- kernelMatrix(kernel,xd) gradnorm <- 1 alphag <- solut <- rep(0,li+lj) while (gradnorm > tol) { f <- crossprod(K,alphag) grad <- -yd/(1 + exp(yd*f)) hess <- exp(yd*f) hess <- hess / ((1 + hess)^2) ## We use solveiter instead of solve to speed up things ## A <- t(t(K)*as.vector(hess)) ## diag(A) <- diag(A) + 1 ## alphag <- alphag - solve(A,(grad + alphag)) solut <- solveiter(K, hess, (grad + alphag), solut) alphag <- alphag - solut gradnorm <- sqrt(sum((grad + alphag)^2)) } } else if (reduced ==TRUE) { yind <- t(matrix(unique(yd),2,length(yd))) ymat <- matrix(0, length(yd), 2) ymat[yind==yd] <- 1 ##Z <- csi(xd, ymat, kernel = kernel, rank = dim(yd)[1]) ##Z <- Z[sort(pivots(Z),index.return = TRUE)$ix, ,drop=FALSE] Z <- inchol(xd, kernel = kernel) gradnorm <- 1 alphag <- rep(0,li+lj) m1 <- dim(Z)[1] n1 <- dim(Z)[2] Ksub <- diag(rep(1,n1)) while (gradnorm > tol) { f <- drop(Z%*%crossprod(Z,alphag)) f[which(f>20)] <- 20 grad <- -yd/(1 + exp(yd*f)) hess <- exp(yd*f) hess <- as.vector(hess / ((1 + hess)^2)) alphag <- alphag - (- Z %*%solve(Ksub + (t(Z)*hess)%*%Z) %*% (t(Z)*hess))%*%(grad + alphag) + (grad + alphag) gradnorm <- sqrt(sum((grad + alphag)^2)) } } alpha(ret)[[p]] <- alphag alphaindex(ret)[[p]] <- c(indexes[[i]],indexes[[j]]) } } } if (type(ret) == "regression") { K <- kernelMatrix(kernel,x) if(variance.model) { sol <- solve(K + diag(rep(var, length = m))) rm(K) alpha(ret) <- sol%*%y } else alpha(ret) <- solve(K + diag(rep(var, length = m))) %*% y } kcall(ret) <- match.call() kernelf(ret) <- kernel xmatrix(ret) <- x if(variance.model) sol(ret) <- sol fitted(ret) <- if (fit) predict(ret, x) else NA if (fit){ if(type(ret)=="classification") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="regression"){ if (!is.null(scaling(ret)$y.scale)) fitted(ret) <- fitted(ret) * tmpsc$y.scale$"scaled:scale" + tmpsc$y.scale$"scaled:center" error(ret) <- drop(crossprod(fitted(ret) - y)/m) } } if(any(scaled)) scaling(ret) <- tmpsc cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="classification") { cret <- gausspr(x[cind,], y[cind], scaled = FALSE, type=type(ret),kernel=kernel,var = var, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],]) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="regression") { cret <- gausspr(x[cind,],y[cind],type=type(ret),scaled = FALSE, kernel=kernel,var = var,tol=tol, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],]) if (!is.null(scaling(ret)$y.scale)) scal <- scaling(ret)$y.scale$"scaled:scale" cerror <- drop((scal^2)*crossprod(cres - y[vgr[[i]]])/m) + cerror } } cross(ret) <- cerror } return(ret) }) setMethod("predict", signature(object = "gausspr"), function (object, newdata, type = "response", coupler = "minpair") { sc <- 0 type <- match.arg(type,c("response","probabilities","votes", "variance", "sdeviation")) if (missing(newdata) && type!="response") return(fitted(object)) else if(missing(newdata)) { newdata <- xmatrix(object) sc <- 1 } ncols <- ncol(xmatrix(object)) nrows <- nrow(xmatrix(object)) oldco <- ncols if (!is.null(terms(object))) { newdata <- model.matrix(delete.response(terms(object)), as.data.frame(newdata), na.action = na.action) } else newdata <- if (is.vector (newdata)) t(t(newdata)) else as.matrix(newdata) newcols <- 0 newnrows <- nrow(newdata) newncols <- ncol(newdata) newco <- newncols if (oldco != newco) stop ("test vector does not match model !") if (is.list(scaling(object)) && sc != 1) newdata[,scaling(object)$scaled] <- scale(newdata[,scaling(object)$scaled, drop = FALSE], center = scaling(object)$x.scale$"scaled:center", scale = scaling(object)$x.scale$"scaled:scale" ) p <- 0 if(type == "response") { if(type(object)=="classification") { predres <- 1:newnrows votematrix <- matrix(0,nclass(object),nrows) for(i in 1:(nclass(object)-1)) { jj <- i+1 for(j in jj:nclass(object)) { p <- p+1 ret <- kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[p]],],alpha(object)[[p]]) votematrix[i,ret>0] <- votematrix[i,ret>0] + 1 votematrix[j,ret<0] <- votematrix[j,ret<0] + 1 } } predres <- sapply(predres, function(x) which.max(votematrix[,x])) } } if(type == "probabilities") { if(type(object)=="classification") { binprob <- matrix(0, newnrows, nclass(object)*(nclass(object) - 1)/2) for(i in 1:(nclass(object)-1)) { jj <- i+1 for(j in jj:nclass(object)) { p <- p+1 binprob[,p] <- 1/(1+exp(-kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[p]],],alpha(object)[[p]]))) } } ## multiprob <- sapply(1:newnrows, function(x) couple(binprob[x ,],coupler = coupler)) multiprob <- couple(binprob, coupler = coupler) } } if(type(object) == "regression") { if (type == "variance"||type == "sdeviation") { Ktest <- kernelMatrix(kernelf(object),xmatrix(object), newdata) predres <- diag(kernelMatrix(kernelf(object),newdata) - t(Ktest) %*% sol(object) %*% Ktest) if (type== "sdeviation") predres <- sqrt(predres) if (!is.null(scaling(object)$y.scale)) predres <- predres * scaling(object)$y.scale$"scaled:scale" + scaling(object)$y.scale$"scaled:center" } else { predres <- kernelMult(kernelf(object),newdata,xmatrix(object),as.matrix(alpha(object))) if (!is.null(scaling(object)$y.scale)) predres <- predres * scaling(object)$y.scale$"scaled:scale" + scaling(object)$y.scale$"scaled:center" } } if (is.character(lev(object))) { ##classification & probabilities : return probabilitie matrix if(type == "probabilities") { colnames(multiprob) <- lev(object) return(multiprob) } ##classification & type response: return factors if(type == "response") return(factor (lev(object)[predres], levels = lev(object))) ##classification & votes : return votematrix if(type == "votes") return(votematrix) } else ##else: return raw values return(predres) }) setMethod("show","gausspr", function(object){ cat("Gaussian Processes object of class \"gausspr\"","\n") cat(paste("Problem type:", type(object),"\n")) cat("\n") show(kernelf(object)) cat(paste("\nNumber of training instances learned :", dim(xmatrix(object))[1],"\n")) if(!is.null(fitted(object))) cat(paste("Train error :", round(error(object),9),"\n")) ##train error & loss if(cross(object)!=-1) cat("Cross validation error :",round(cross(object),9),"\n") }) solveiter <- function(B,noiseproc,b,x,itmax = 50,tol = 10e-4 ,verbose = FALSE) { ## ---------------------------- ## Preconditioned Biconjugate Gradient method ## solves linear system Ax <- b for general A ## ------------------------------------------ ## x : initial guess ## itmax : max # iterations ## iterates while mean(abs(Ax-b)) > tol ## ## Simplified form of Numerical Recipes: linbcg ## ## The preconditioned matrix is set to inv(diag(A)) ## A defined through A <- I + N*B diagA <- matrix(1,dim(B)[1],1) + colSums(B)+ diag(B)*(noiseproc-1) ## diags of A cont <- 0 iter <- 0 r <- .Amul2(x,B,noiseproc) r <- b - r rr <- r znrm <- 1 bnrm <- sqrt(sum((b)^2)) z <- r/diagA err <- sqrt(sum((.Amul2(x,B,noiseproc) - b)^2))/bnrm while (iter <= itmax){ iter <- iter + 1 zm1nrm <- znrm zz <- rr/diagA bknum<- drop(crossprod(z,rr)) if (iter == 1) { p <- z pp <- zz } else { bk <- bknum/bkden p <- bk*p + z pp <- bk*pp + zz } bkden <- bknum z <- .Amul2(p,B,noiseproc) akden <- drop(crossprod(z,pp)) ak <- bknum/akden zz <- .Amul2T(pp,B,noiseproc) x <- x + ak*p r <- r - ak*z rr <- rr - ak*zz z <- r/diagA znrm <- 1 err <- mean(abs(r)) if (err tol && counter < maxiter ) { ## Aggressively allocate memory if(counter %% BLOCKSIZE == 0) { Tktmp <- matrix(0, m, dim(Tk)[2] + BLOCKSIZE) Tktmp[1:m > 0, 1:(dim(Tk)[2] + BLOCKSIZE) <= dim(Tk)[2]] <- Tk Tk <- Tktmp Ttmp <- matrix(0, dim(T)[1]+BLOCKSIZE, BLOCKSIZE+counter) ind <- 1:(dim(T)[1]+BLOCKSIZE) <= dim(T)[1] ind2 <- 1:(BLOCKSIZE + counter) <= counter Ttmp[ind , ind2] <- T Ttmp[ind == FALSE, ind2 == FALSE] <- diag(1, BLOCKSIZE) T <- Ttmp padded.veck.tmp <- matrix(0,dim(padded.veck)[1]+BLOCKSIZE) padded.veck.tmp[1:(dim(padded.veck)[1]+BLOCKSIZE) <= dim(padded.veck)[1]] <- padded.veck padded.veck <- padded.veck.tmp pivots.tmp <- matrix(0, dim(pivots)[1]+BLOCKSIZE) pivots.tmp[1:(dim(pivots)[1] + BLOCKSIZE)<= dim(pivots)[1]] <- pivots pivots <- pivots.tmp maxresiduals.tmp <- matrix(0,dim(maxresiduals)[1]+BLOCKSIZE) maxresiduals.tmp[1:(dim(maxresiduals)[1]+BLOCKSIZE) <= dim(maxresiduals)[1]] <- maxresiduals maxresiduals <- maxresiduals.tmp if(counter == 0) t <- rep(0,BLOCKSIZE) else t <- rep(0,length(t)+BLOCKSIZE) } veck <- kernelFast(kernel, x, x[index, ,drop=FALSE],dota) if (counter == 0) { ## No need to compute t here tau <- sqrt(veck[index]) ## Update T T[1, 1] <- tau ## Compute the update for Tk update <- veck/tau } else { padded.veck[1:counter] <- veck[pivots[1:counter]] ## First compute t ## t <- t(crossprod(padded.veck,backsolve(T,diag(1,nrow=dim(T)[1])))) ## cat("T: ",dim(T), " p:",length(padded.veck),",\n") t[1:counter] <- backsolve(T, k=counter, padded.veck, transpose = TRUE) ## Now compute tau tau <- as.vector(sqrt(veck[index] - crossprod(t))) ## Update T T[1:counter, counter+1] <- t[1:counter] T[counter + 1, counter + 1] <- tau ## Compute the update for Tk update <- (1/tau) * (veck - Tk %*% t) } ## Update Tk Tk[,counter + 1] <- update ## Update diagonal residuals diag.residues <- diag.residues - update^2 ## Update pivots pivots[counter + 1] <- index ## Monitor residuals maxresiduals[counter + 1] <- residue ## Choose next candidate residue <- max( diag.residues ) index <- which.max(diag.residues) ## Update counter counter <- counter + 1 ## Report progress to the user if(counter%%blocksize == 0 && (verbose == TRUE)) cat("counter = ",counter," ", "residue = ", residue, "\n") } ## Throw away extra columns which we might have added Tk <- Tk[, 1:counter] pivots <- pivots[1:counter] maxresiduals <- maxresiduals[1:counter] return(new("inchol",.Data=Tk,pivots=pivots,diagresidues = diag.residues, maxresiduals = maxresiduals)) }) kernlab/R/aobjects.R0000644000175100001440000010724112055335057014036 0ustar hornikusers## S4 object definitions and assigment/accessor functions for the slots. ## ## created 10.09.03 alexandros karatzoglou ## updated 23.08.05 setClass("kernel",representation("function",kpar="list")) setClass("kernelMatrix",representation("matrix"),prototype=structure(.Data=matrix())) setClassUnion("listI", c("list","numeric","vector","integer","matrix")) setClassUnion("output", c("matrix","factor","vector","logical","numeric","list","integer","NULL")) setClassUnion("input", c("matrix","list")) setClassUnion("kfunction", c("function","character")) setClassUnion("mpinput", c("matrix","data.frame","missing")) setClassUnion("lpinput", c("list","missing")) setClassUnion("kpinput", c("kernelMatrix","missing")) setClass("vm", representation(alpha = "listI", ## since setClassUnion is not working type = "character", kernelf = "kfunction", kpar = "list", xmatrix = "input", ymatrix = "output", fitted = "output", lev = "vector", nclass = "numeric", error = "vector", cross = "vector", n.action= "ANY", terms = "ANY", kcall = "call"), contains= "VIRTUAL") #Generic Vector Machine object if(!isGeneric("type")){ if (is.function("type")) fun <- type else fun <- function(object) standardGeneric("type") setGeneric("type", fun) } setMethod("type", "vm", function(object) object@type) setGeneric("type<-", function(x, value) standardGeneric("type<-")) setReplaceMethod("type", "vm", function(x, value) { x@type <- value x }) if(!isGeneric("kernelf")){ if (is.function("kernelf")) fun <- kernelf else fun <- function(object) standardGeneric("kernelf") setGeneric("kernelf", fun) } setMethod("kernelf", "vm", function(object) object@kernelf) setGeneric("kernelf<-", function(x, value) standardGeneric("kernelf<-")) setReplaceMethod("kernelf", "vm", function(x, value) { x@kernelf <- value x }) if(!isGeneric("kpar")){ if (is.function("kpar")) fun <- kpar else fun <- function(object) standardGeneric("kpar") setGeneric("kpar", fun) } setMethod("kpar", "vm", function(object) object@kpar) setGeneric("kpar<-", function(x, value) standardGeneric("kpar<-")) setReplaceMethod("kpar", "vm", function(x, value) { x@kpar <- value x }) if(!isGeneric("kcall")){ if (is.function("kcall")) fun <- kcall else fun <- function(object) standardGeneric("kcall") setGeneric("kcall", fun) } setMethod("kcall", "vm", function(object) object@kcall) setGeneric("kcall<-", function(x, value) standardGeneric("kcall<-")) setReplaceMethod("kcall", "vm", function(x, value) { x@kcall <- value x }) setMethod("terms", "vm", function(x, ...) x@terms) setGeneric("terms<-", function(x, value) standardGeneric("terms<-")) setReplaceMethod("terms", "vm", function(x, value) { x@terms <- value x }) if(!isGeneric("xmatrix")){ if (is.function("xmatrix")) fun <- xmatrix else fun <- function(object) standardGeneric("xmatrix") setGeneric("xmatrix", fun) } setMethod("xmatrix", "vm", function(object) object@xmatrix) setGeneric("xmatrix<-", function(x, value) standardGeneric("xmatrix<-")) setReplaceMethod("xmatrix", "vm", function(x, value) { x@xmatrix <- value x }) if(!isGeneric("ymatrix")){ if (is.function("ymatrix")) fun <- ymatrix else fun <- function(object) standardGeneric("ymatrix") setGeneric("ymatrix", fun) } setMethod("ymatrix", "vm", function(object) object@ymatrix) setGeneric("ymatrix<-", function(x, value) standardGeneric("ymatrix<-")) setReplaceMethod("ymatrix", "vm", function(x, value) { x@ymatrix <- value x }) setMethod("fitted", "vm", function(object, ...) object@fitted) setGeneric("fitted<-", function(x, value) standardGeneric("fitted<-")) setReplaceMethod("fitted", "vm", function(x, value) { x@fitted <- value x }) if(!isGeneric("lev")){ if (is.function("lev")) fun <- lev else fun <- function(object) standardGeneric("lev") setGeneric("lev", fun) } setMethod("lev", "vm", function(object) object@lev) setGeneric("lev<-", function(x, value) standardGeneric("lev<-")) setReplaceMethod("lev", "vm", function(x, value) { x@lev <- value x }) if(!isGeneric("nclass")){ if (is.function("nclass")) fun <- nclass else fun <- function(object) standardGeneric("nclass") setGeneric("nclass", fun) } setMethod("nclass", "vm", function(object) object@nclass) setGeneric("nclass<-", function(x, value) standardGeneric("nclass<-")) setReplaceMethod("nclass", "vm", function(x, value) { x@nclass <- value x }) if(!isGeneric("alpha")){ if (is.function("alpha")) fun <- alpha else fun <- function(object) standardGeneric("alpha") setGeneric("alpha", fun) } setMethod("alpha", "vm", function(object) object@alpha) setGeneric("alpha<-", function(x, value) standardGeneric("alpha<-")) setReplaceMethod("alpha", "vm", function(x, value) { x@alpha <- value x }) if(!isGeneric("error")){ if (is.function("error")) fun <- error else fun <- function(object) standardGeneric("error") setGeneric("error", fun) } setMethod("error", "vm", function(object) object@error) setGeneric("error<-", function(x, value) standardGeneric("error<-")) setReplaceMethod("error", "vm", function(x, value) { x@error <- value x }) if(!isGeneric("cross")){ if (is.function("cross")) fun <- cross else fun <- function(object) standardGeneric("cross") setGeneric("cross", fun) } setMethod("cross", "vm", function(object) object@cross) setGeneric("cross<-", function(x, value) standardGeneric("cross<-")) setReplaceMethod("cross", "vm", function(x, value) { x@cross <- value x }) if(!isGeneric("n.action")){ if (is.function("n.action")) fun <- n.action else fun <- function(object) standardGeneric("n.action") setGeneric("n.action", fun) } setMethod("n.action", "vm", function(object) object@n.action) setGeneric("n.action<-", function(x, value) standardGeneric("n.action<-")) setReplaceMethod("n.action", "vm", function(x, value) { x@n.action <- value x }) setClass("ksvm", representation(param = "list", scaling = "ANY", coef = "ANY", alphaindex = "ANY", b = "numeric", obj = "vector", SVindex = "vector", nSV = "numeric", prior = "list", prob.model = "list" ), contains="vm") if(!isGeneric("param")){ if (is.function("param")) fun <- param else fun <- function(object) standardGeneric("param") setGeneric("param", fun) } setMethod("param", "ksvm", function(object) object@param) setGeneric("param<-", function(x, value) standardGeneric("param<-")) setReplaceMethod("param", "ksvm", function(x, value) { x@param <- value x }) if(!isGeneric("scaling")){ if (is.function("scaling")) fun <- scaling else fun <- function(object) standardGeneric("scaling") setGeneric("scaling", fun) } setMethod("scaling", "ksvm", function(object) object@scaling) setGeneric("scaling<-", function(x, value) standardGeneric("scaling<-")) setReplaceMethod("scaling", "ksvm", function(x, value) { x@scaling<- value x }) if(!isGeneric("obj")){ if (is.function("obj")) fun <- obj else fun <- function(object) standardGeneric("obj") setGeneric("obj", fun) } setMethod("obj", "ksvm", function(object) object@obj) setGeneric("obj<-", function(x, value) standardGeneric("obj<-")) setReplaceMethod("obj", "ksvm", function(x, value) { x@obj<- value x }) setMethod("coef", "ksvm", function(object, ...) object@coef) setGeneric("coef<-", function(x, value) standardGeneric("coef<-")) setReplaceMethod("coef", "ksvm", function(x, value) { x@coef <- value x }) if(!isGeneric("alphaindex")){ if (is.function("alphaindex")) fun <- alphaindex else fun <- function(object) standardGeneric("alphaindex") setGeneric("alphaindex", fun) } setMethod("alphaindex", "ksvm", function(object) object@alphaindex) setGeneric("alphaindex<-", function(x, value) standardGeneric("alphaindex<-")) setReplaceMethod("alphaindex", "ksvm", function(x, value) { x@alphaindex <- value x }) if(!isGeneric("b")){ if (is.function("b")) fun <- b else fun <- function(object) standardGeneric("b") setGeneric("b", fun) } setMethod("b", "ksvm", function(object) object@b) setGeneric("b<-", function(x, value) standardGeneric("b<-")) setReplaceMethod("b", "ksvm", function(x, value) { x@b <- value x }) if(!isGeneric("SVindex")){ if (is.function("SVindex")) fun <- SVindex else fun <- function(object) standardGeneric("SVindex") setGeneric("SVindex", fun) } setMethod("SVindex", "ksvm", function(object) object@SVindex) setGeneric("SVindex<-", function(x, value) standardGeneric("SVindex<-")) setReplaceMethod("SVindex", "ksvm", function(x, value) { x@SVindex <- value x }) if(!isGeneric("nSV")){ if (is.function("nSV")) fun <- nSV else fun <- function(object) standardGeneric("nSV") setGeneric("nSV", fun) } setMethod("nSV", "ksvm", function(object) object@nSV) setGeneric("nSV<-", function(x, value) standardGeneric("nSV<-")) setReplaceMethod("nSV", "ksvm", function(x, value) { x@nSV <- value x }) if(!isGeneric("prior")){ if (is.function("prior")) fun <- prior else fun <- function(object) standardGeneric("prior") setGeneric("prior", fun) } setMethod("prior", "ksvm", function(object) object@prior) setGeneric("prior<-", function(x, value) standardGeneric("prior<-")) setReplaceMethod("prior", "ksvm", function(x, value) { x@prior <- value x }) if(!isGeneric("prob.model")){ if (is.function("prob.model")) fun <- prob.model else fun <- function(object) standardGeneric("prob.model") setGeneric("prob.model", fun) } setMethod("prob.model", "ksvm", function(object) object@prob.model) setGeneric("prob.model<-", function(x, value) standardGeneric("prob.model<-")) setReplaceMethod("prob.model", "ksvm", function(x, value) { x@prob.model <- value x }) setClass("lssvm", representation(param = "list", scaling = "ANY", coef = "ANY", alphaindex = "ANY", ## prob.model = "list", b = "numeric", nSV = "numeric" ), contains="vm") ##setMethod("prob.model", "lssvm", function(object) object@prob.model) ##setGeneric("prob.model<-", function(x, value) standardGeneric("prob.model<-")) ##setReplaceMethod("prob.model", "lssvm", function(x, value) { ## x@prob.model <- value ## x ##}) setMethod("param", "lssvm", function(object) object@param) setReplaceMethod("param", "lssvm", function(x, value) { x@param <- value x }) setMethod("scaling", "lssvm", function(object) object@scaling) setReplaceMethod("scaling", "lssvm", function(x, value) { x@scaling<- value x }) setMethod("coef", "lssvm", function(object, ...) object@coef) setReplaceMethod("coef", "lssvm", function(x, value) { x@coef <- value x }) setMethod("alphaindex", "lssvm", function(object) object@alphaindex) setReplaceMethod("alphaindex", "lssvm", function(x, value) { x@alphaindex <- value x }) setMethod("b", "lssvm", function(object) object@b) setReplaceMethod("b", "lssvm", function(x, value) { x@b <- value x }) setMethod("nSV", "lssvm", function(object) object@nSV) setReplaceMethod("nSV", "lssvm", function(x, value) { x@nSV <- value x }) setClass("kqr", representation(param = "list", scaling = "ANY", coef = "ANY", b = "numeric" ), contains="vm") setMethod("b", "kqr", function(object) object@b) setReplaceMethod("b", "kqr", function(x, value) { x@b <- value x }) setMethod("scaling", "kqr", function(object) object@scaling) setReplaceMethod("scaling", "kqr", function(x, value) { x@scaling <- value x }) setMethod("coef", "kqr", function(object) object@coef) setReplaceMethod("coef", "kqr", function(x, value) { x@coef <- value x }) setMethod("param", "kqr", function(object) object@param) setReplaceMethod("param", "kqr", function(x, value) { x@param <- value x }) ## failed attempt to get rid of all this above ## mkaccesfun <- function(cls) #{ # snames <- slotNames(cls) ## # # for(i in 1:length(snames)) # { resF <- paste("\"",snames[i],"\"",sep="") # if(!isGeneric(snames[i])) # eval(parse(file="",text=paste("setGeneric(",resF,",function(object)","standardGeneric(",resF,")",")",sep=" "))) # setGeneric(snames[i], function(object) standardGeneric(snames[i])) # # setMethod(snames[i], cls, function(object) eval(parse(file="",text=paste("object@",snames[i],sep="")))) # resG <- paste("\"",snames[i],"<-","\"",sep="") #eval(parse(file="",text=paste("setGeneric(",resG,",function(x, value)","standardGeneric(",resG,")",")",sep=" "))) # setReplaceMethod(snames[i], cls, function(x, value) { # eval(parse(file="",text=paste("x@",snames[i],"<-value",sep=""))) # x # }) # } #} setClass("prc", representation(pcv = "matrix", eig = "vector", kernelf = "kfunction", kpar = "list", xmatrix = "input", kcall = "ANY", terms = "ANY", n.action = "ANY"),contains="VIRTUAL") #accessor functions if(!isGeneric("pcv")){ if (is.function("pcv")) fun <- pcv else fun <- function(object) standardGeneric("pcv") setGeneric("pcv", fun) } setMethod("pcv", "prc", function(object) object@pcv) setGeneric("pcv<-", function(x, value) standardGeneric("pcv<-")) setReplaceMethod("pcv", "prc", function(x, value) { x@pcv <- value x }) if(!isGeneric("eig")){ if (is.function("eig")) fun <- eig else fun <- function(object) standardGeneric("eig") setGeneric("eig", fun) } setMethod("eig", "prc", function(object) object@eig) setGeneric("eig<-", function(x, value) standardGeneric("eig<-")) setReplaceMethod("eig", "prc", function(x, value) { x@eig <- value x }) setMethod("kernelf","prc", function(object) object@kernelf) setReplaceMethod("kernelf","prc", function(x, value){ x@kernelf <- value x }) setMethod("xmatrix","prc", function(object) object@xmatrix) setReplaceMethod("xmatrix","prc", function(x, value){ x@xmatrix <- value x }) setMethod("kcall","prc", function(object) object@kcall) setReplaceMethod("kcall","prc", function(x, value){ x@kcall <- value x }) setMethod("terms","prc", function(x, ...) x@terms) setReplaceMethod("terms","prc", function(x, value){ x@terms <- value x }) setMethod("n.action","prc", function(object) object@n.action) setReplaceMethod("n.action","prc", function(x, value){ x@n.action <- value x }) ##kernel principal components object setClass("kpca", representation(rotated = "matrix"),contains="prc") #accessor functions if(!isGeneric("rotated")){ if (is.function("rotated")) fun <- rotated else fun <- function(object) standardGeneric("rotated") setGeneric("rotated", fun) } setMethod("rotated", "kpca", function(object) object@rotated) setGeneric("rotated<-", function(x, value) standardGeneric("rotated<-")) setReplaceMethod("rotated", "kpca", function(x, value) { x@rotated <- value x }) ## kernel maximum mean discrepancy setClass("kmmd", representation(H0="logical", AsympH0 ="logical", kernelf = "kfunction", Asymbound="numeric", Radbound="numeric", xmatrix="input", mmdstats="vector")) if(!isGeneric("mmdstats")){ if (is.function("mmdstats")) fun <- mmdstats else fun <- function(object) standardGeneric("mmdstats") setGeneric("mmdstats", fun) } setMethod("mmdstats","kmmd", function(object) object@mmdstats) setGeneric("mmdstats<-", function(x, value) standardGeneric("mmdstats<-")) setReplaceMethod("mmdstats","kmmd", function(x, value){ x@mmdstats <- value x }) if(!isGeneric("Radbound")){ if (is.function("Radbound")) fun <- Radbound else fun <- function(object) standardGeneric("Radbound") setGeneric("Radbound", fun) } setMethod("Radbound","kmmd", function(object) object@Radbound) setGeneric("Radbound<-", function(x, value) standardGeneric("Radbound<-")) setReplaceMethod("Radbound","kmmd", function(x, value){ x@Radbound <- value x }) if(!isGeneric("Asymbound")){ if (is.function("Asymbound")) fun <- Asymbound else fun <- function(object) standardGeneric("Asymbound") setGeneric("Asymbound", fun) } setMethod("Asymbound","kmmd", function(object) object@Asymbound) setGeneric("Asymbound<-", function(x, value) standardGeneric("Asymbound<-")) setReplaceMethod("Asymbound","kmmd", function(x, value){ x@Asymbound <- value x }) if(!isGeneric("H0")){ if (is.function("H0")) fun <- H0 else fun <- function(object) standardGeneric("H0") setGeneric("H0", fun) } setMethod("H0","kmmd", function(object) object@H0) setGeneric("H0<-", function(x, value) standardGeneric("H0<-")) setReplaceMethod("H0","kmmd", function(x, value){ x@H0 <- value x }) if(!isGeneric("AsympH0")){ if (is.function("AsympH0")) fun <- AsympH0 else fun <- function(object) standardGeneric("AsympH0") setGeneric("AsympH0", fun) } setMethod("AsympH0","kmmd", function(object) object@AsympH0) setGeneric("AsympH0<-", function(x, value) standardGeneric("AsympH0<-")) setReplaceMethod("AsympH0","kmmd", function(x, value){ x@AsympH0 <- value x }) setMethod("kernelf","kmmd", function(object) object@kernelf) setReplaceMethod("kernelf","kmmd", function(x, value){ x@kernelf <- value x }) setClass("ipop", representation(primal = "vector", dual = "numeric", how = "character" )) if(!isGeneric("primal")){ if (is.function("primal")) fun <- primal else fun <- function(object) standardGeneric("primal") setGeneric("primal", fun) } setMethod("primal", "ipop", function(object) object@primal) setGeneric("primal<-", function(x, value) standardGeneric("primal<-")) setReplaceMethod("primal", "ipop", function(x, value) { x@primal <- value x }) if(!isGeneric("dual")){ if (is.function("dual")) fun <- dual else fun <- function(object) standardGeneric("dual") setGeneric("dual", fun) } setMethod("dual", "ipop", function(object) object@dual) setGeneric("dual<-", function(x, value) standardGeneric("dual<-")) setReplaceMethod("dual", "ipop", function(x, value) { x@dual <- value x }) if(!isGeneric("how")){ if (is.function("how")) fun <- how else fun <- function(object) standardGeneric("how") setGeneric("how", fun) } setMethod("how", "ipop", function(object) object@how) setGeneric("how<-", function(x, value) standardGeneric("how<-")) setReplaceMethod("how", "ipop", function(x, value) { x@how <- value x }) # Kernel Canonical Correlation Analysis setClass("kcca", representation(kcor = "vector", xcoef = "matrix", ycoef = "matrix" ##xvar = "matrix", ##yvar = "matrix" )) if(!isGeneric("kcor")){ if (is.function("kcor")) fun <- kcor else fun <- function(object) standardGeneric("kcor") setGeneric("kcor", fun) } setMethod("kcor", "kcca", function(object) object@kcor) setGeneric("kcor<-", function(x, value) standardGeneric("kcor<-")) setReplaceMethod("kcor", "kcca", function(x, value) { x@kcor <- value x }) if(!isGeneric("xcoef")){ if (is.function("xcoef")) fun <- xcoef else fun <- function(object) standardGeneric("xcoef") setGeneric("xcoef", fun) } setMethod("xcoef", "kcca", function(object) object@xcoef) setGeneric("xcoef<-", function(x, value) standardGeneric("xcoef<-")) setReplaceMethod("xcoef", "kcca", function(x, value) { x@xcoef <- value x }) if(!isGeneric("ycoef")){ if (is.function("ycoef")) fun <- ycoef else fun <- function(object) standardGeneric("ycoef") setGeneric("ycoef", fun) } setMethod("ycoef", "kcca", function(object) object@ycoef) setGeneric("ycoef<-", function(x, value) standardGeneric("ycoef<-")) setReplaceMethod("ycoef", "kcca", function(x, value) { x@ycoef <- value x }) ##if(!isGeneric("xvar")){ ## if (is.function("xvar")) ## fun <- xvar ## else fun <- function(object) standardGeneric("xvar") ## setGeneric("xvar", fun) ##} ##setMethod("xvar", "kcca", function(object) object@xvar) ##setGeneric("xvar<-", function(x, value) standardGeneric("xvar<-")) ##setReplaceMethod("xvar", "kcca", function(x, value) { ## x@xvar <- value ## x ##}) ##if(!isGeneric("yvar")){ ## if (is.function("yvar")) ## fun <- yvar ## else fun <- function(object) standardGeneric("yvar") ## setGeneric("yvar", fun) ##} ##setMethod("yvar", "kcca", function(object) object@yvar) ##setGeneric("yvar<-", function(x, value) standardGeneric("yvar<-")) ##setReplaceMethod("yvar", "kcca", function(x, value) { ## x@yvar <- value ## x ##}) ## Gaussian Processes object setClass("gausspr",representation(tol = "numeric", scaling = "ANY", sol = "matrix", alphaindex="list", nvar = "numeric" ),contains="vm") setMethod("alphaindex","gausspr", function(object) object@alphaindex) setReplaceMethod("alphaindex","gausspr", function(x, value){ x@alphaindex <- value x }) if(!isGeneric("sol")){ if (is.function("sol")) fun <- sol else fun <- function(object) standardGeneric("sol") setGeneric("sol", fun) } setMethod("sol","gausspr", function(object) object@sol) setGeneric("sol<-", function(x, value) standardGeneric("sol<-")) setReplaceMethod("sol","gausspr", function(x, value){ x@sol <- value x }) setMethod("scaling","gausspr", function(object) object@scaling) setReplaceMethod("scaling","gausspr", function(x, value){ x@scaling <- value x }) setMethod("coef", "gausspr", function(object, ...) object@alpha) # Relevance Vector Machine object setClass("rvm", representation(tol = "numeric", nvar = "numeric", mlike = "numeric", RVindex = "vector", coef = "ANY", nRV = "numeric"),contains ="vm") if(!isGeneric("tol")){ if (is.function("tol")) fun <- tol else fun <- function(object) standardGeneric("tol") setGeneric("tol", fun) } setMethod("tol", "rvm", function(object) object@tol) setGeneric("tol<-", function(x, value) standardGeneric("tol<-")) setReplaceMethod("tol", "rvm", function(x, value) { x@tol <- value x }) setMethod("coef", "rvm", function(object, ...) object@coef) setReplaceMethod("coef", "rvm", function(x, value) { x@coef <- value x }) if(!isGeneric("RVindex")){ if (is.function("RVindex")) fun <- RVindex else fun <- function(object) standardGeneric("RVindex") setGeneric("RVindex", fun) } setMethod("RVindex", "rvm", function(object) object@RVindex) setGeneric("RVindex<-", function(x, value) standardGeneric("RVindex<-")) setReplaceMethod("RVindex", "rvm", function(x, value) { x@RVindex <- value x }) if(!isGeneric("nvar")){ if (is.function("nvar")) fun <- nvar else fun <- function(object) standardGeneric("nvar") setGeneric("nvar", fun) } setMethod("nvar", "rvm", function(object) object@nvar) setGeneric("nvar<-", function(x, value) standardGeneric("nvar<-")) setReplaceMethod("nvar", "rvm", function(x, value) { x@nvar <- value x }) if(!isGeneric("nRV")){ if (is.function("nRV")) fun <- nRV else fun <- function(object) standardGeneric("nRV") setGeneric("nRV", fun) } setMethod("nRV", "rvm", function(object) object@nRV) setGeneric("nRV<-", function(x, value) standardGeneric("nRV<-")) setReplaceMethod("nRV", "rvm", function(x, value) { x@nRV <- value x }) setMethod("coef", "rvm", function(object, ...) object@alpha) if(!isGeneric("mlike")){ if (is.function("mlike")) fun <- mlike else fun <- function(object) standardGeneric("mlike") setGeneric("mlike", fun) } setMethod("mlike", "rvm", function(object) object@mlike) setGeneric("mlike<-", function(x, value) standardGeneric("mlike<-")) setReplaceMethod("mlike", "rvm", function(x, value) { x@mlike <- value x }) setClass("inchol",representation("matrix", pivots="vector", diagresidues="vector", maxresiduals="vector"), prototype=structure(.Data=matrix(), pivots=vector(), diagresidues=vector(), maxresiduals=vector())) if(!isGeneric("pivots")){ if (is.function("pivots")) fun <- pivots else fun <- function(object) standardGeneric("pivots") setGeneric("pivots", fun) } setMethod("pivots", "inchol", function(object) object@pivots) setGeneric("pivots<-", function(x, value) standardGeneric("pivots<-")) setReplaceMethod("pivots", "inchol", function(x, value) { x@pivots <- value x }) if(!isGeneric("diagresidues")){ if (is.function("diagresidues")) fun <- diagresidues else fun <- function(object) standardGeneric("diagresidues") setGeneric("diagresidues", fun) } setMethod("diagresidues", "inchol", function(object) object@diagresidues) setGeneric("diagresidues<-", function(x,value) standardGeneric("diagresidues<-")) setReplaceMethod("diagresidues", "inchol", function(x, value) { x@diagresidues <- value x }) if(!isGeneric("maxresiduals")){ if (is.function("maxresiduals")) fun <- maxresiduals else fun <- function(object) standardGeneric("maxresiduals") setGeneric("maxresiduals", fun) } setMethod("maxresiduals", "inchol", function(object) object@maxresiduals) setGeneric("maxresiduals<-", function(x,value) standardGeneric("maxresiduals<-")) setReplaceMethod("maxresiduals", "inchol", function(x, value) { x@maxresiduals <- value x }) ## csi object setClass("csi",representation(Q = "matrix", R = "matrix", truegain = "vector", predgain = "vector"),contains="inchol") if(!isGeneric("Q")){ if (is.function("Q")) fun <- Q else fun <- function(object) standardGeneric("Q") setGeneric("Q", fun) } setMethod("Q", "csi", function(object) object@Q) setGeneric("Q<-", function(x, value) standardGeneric("Q<-")) setReplaceMethod("Q", "csi", function(x, value) { x@Q <- value x }) if(!isGeneric("R")){ if (is.function("R")) fun <- R else fun <- function(object) standardGeneric("R") setGeneric("R", fun) } setMethod("R", "csi", function(object) object@R) setGeneric("R<-", function(x, value) standardGeneric("R<-")) setReplaceMethod("R", "csi", function(x, value) { x@R <- value x }) if(!isGeneric("truegain")){ if (is.function("truegain")) fun <- truegain else fun <- function(object) standardGeneric("truegain") setGeneric("truegain", fun) } setMethod("truegain", "csi", function(object) object@truegain) setGeneric("truegain<-", function(x, value) standardGeneric("truegain<-")) setReplaceMethod("truegain", "csi", function(x, value) { x@truegain <- value x }) if(!isGeneric("predgain")){ if (is.function("predgain")) fun <- predgain else fun <- function(object) standardGeneric("predgain") setGeneric("predgain", fun) } setMethod("predgain", "csi", function(object) object@predgain) setGeneric("predgain<-", function(x, value) standardGeneric("predgain<-")) setReplaceMethod("predgain", "csi", function(x, value) { x@predgain <- value x }) setClass("specc",representation("vector", centers="matrix", size="vector", kernelf="kfunction", withinss = "vector" ),prototype=structure(.Data=vector(), centers = matrix(), size=matrix(), kernelf = ls, withinss=vector())) if(!isGeneric("centers")){ if (is.function("centers")) fun <- centers else fun <- function(object) standardGeneric("centers") setGeneric("centers", fun) } setMethod("centers", "specc", function(object) object@centers) setGeneric("centers<-", function(x,value) standardGeneric("centers<-")) setReplaceMethod("centers", "specc", function(x, value) { x@centers <- value x }) if(!isGeneric("size")){ if (is.function("size")) fun <- size else fun <- function(object) standardGeneric("size") setGeneric("size", fun) } setMethod("size", "specc", function(object) object@size) setGeneric("size<-", function(x,value) standardGeneric("size<-")) setReplaceMethod("size", "specc", function(x, value) { x@size <- value x }) if(!isGeneric("withinss")){ if (is.function("withinss")) fun <- withinss else fun <- function(object) standardGeneric("withinss") setGeneric("withinss", fun) } setMethod("withinss", "specc", function(object) object@withinss) setGeneric("withinss<-", function(x,value) standardGeneric("withinss<-")) setReplaceMethod("withinss", "specc", function(x, value) { x@withinss <- value x }) setMethod("kernelf","specc", function(object) object@kernelf) setReplaceMethod("kernelf","specc", function(x, value){ x@kernelf <- value x }) setClass("ranking",representation("matrix", convergence="matrix", edgegraph="matrix"), prototype=structure(.Data=matrix(), convergence=matrix(), edgegraph=matrix())) if(!isGeneric("convergence")){ if (is.function("convergence")) fun <- convergence else fun <- function(object) standardGeneric("convergence") setGeneric("convergence", fun) } setMethod("convergence", "ranking", function(object) object@convergence) setGeneric("convergence<-", function(x,value) standardGeneric("convergence<-")) setReplaceMethod("convergence", "ranking", function(x, value) { x@convergence <- value x }) if(!isGeneric("edgegraph")){ if (is.function("edgegraph")) fun <- edgegraph else fun <- function(object) standardGeneric("edgegraph") setGeneric("edgegraph", fun) } setMethod("edgegraph", "ranking", function(object) object@edgegraph) setGeneric("edgegraph<-", function(x,value) standardGeneric("edgegraph<-")) setReplaceMethod("edgegraph", "ranking", function(x, value) { x@edgegraph <- value x }) ## online learning algorithms class setClass("onlearn", representation( kernelf = "kfunction", buffer = "numeric", kpar = "list", xmatrix = "matrix", fit = "numeric", onstart = "numeric", onstop = "numeric", alpha = "ANY", rho = "numeric", b = "numeric", pattern ="ANY", type="character" )) if(!isGeneric("fit")){ if (is.function("fit")) fun <- fit else fun <- function(object) standardGeneric("fit") setGeneric("fit", fun) } setMethod("fit","onlearn", function(object) object@fit) setGeneric("fit<-", function(x, value) standardGeneric("fit<-")) setReplaceMethod("fit","onlearn", function(x, value){ x@fit <- value x }) if(!isGeneric("onstart")){ if (is.function("onstart")) fun <- onstart else fun <- function(object) standardGeneric("onstart") setGeneric("onstart", fun) } setMethod("onstart", "onlearn", function(object) object@onstart) setGeneric("onstart<-", function(x, value) standardGeneric("onstart<-")) setReplaceMethod("onstart", "onlearn", function(x, value) { x@onstart <- value x }) if(!isGeneric("onstop")){ if (is.function("onstop")) fun <- onstop else fun <- function(object) standardGeneric("onstop") setGeneric("onstop", fun) } setMethod("onstop", "onlearn", function(object) object@onstop) setGeneric("onstop<-", function(x, value) standardGeneric("onstop<-")) setReplaceMethod("onstop", "onlearn", function(x, value) { x@onstop <- value x }) if(!isGeneric("buffer")){ if (is.function("buffer")) fun <- buffer else fun <- function(object) standardGeneric("buffer") setGeneric("buffer", fun) } setMethod("buffer", "onlearn", function(object) object@buffer) setGeneric("buffer<-", function(x, value) standardGeneric("buffer<-")) setReplaceMethod("buffer", "onlearn", function(x, value) { x@buffer <- value x }) setMethod("kernelf","onlearn", function(object) object@kernelf) setReplaceMethod("kernelf","onlearn", function(x, value){ x@kernelf <- value x }) setMethod("kpar","onlearn", function(object) object@kpar) setReplaceMethod("kpar","onlearn", function(x, value){ x@kpar <- value x }) setMethod("xmatrix","onlearn", function(object) object@xmatrix) setReplaceMethod("xmatrix","onlearn", function(x, value){ x@xmatrix <- value x }) setMethod("alpha","onlearn", function(object) object@alpha) setReplaceMethod("alpha","onlearn", function(x, value){ x@alpha <- value x }) setMethod("b","onlearn", function(object) object@b) setReplaceMethod("b","onlearn", function(x, value){ x@b <- value x }) setMethod("type","onlearn", function(object) object@type) setReplaceMethod("type","onlearn", function(x, value){ x@type <- value x }) if(!isGeneric("rho")){ if (is.function("rho")) fun <- rho else fun <- function(object) standardGeneric("rho") setGeneric("rho", fun) } setMethod("rho", "onlearn", function(object) object@rho) setGeneric("rho<-", function(x, value) standardGeneric("rho<-")) setReplaceMethod("rho", "onlearn", function(x, value) { x@rho <- value x }) if(!isGeneric("pattern")){ if (is.function("pattern")) fun <- pattern else fun <- function(object) standardGeneric("pattern") setGeneric("pattern", fun) } setMethod("pattern", "onlearn", function(object) object@pattern) setGeneric("pattern<-", function(x, value) standardGeneric("pattern<-")) setReplaceMethod("pattern", "onlearn", function(x, value) { x@pattern <- value x }) setClass("kfa",representation(alpha = "matrix", alphaindex = "vector", kernelf = "kfunction", xmatrix = "matrix", kcall = "call", terms = "ANY" )) setMethod("coef", "kfa", function(object, ...) object@alpha) setMethod("kernelf","kfa", function(object) object@kernelf) setReplaceMethod("kernelf","kfa", function(x, value){ x@kernelf <- value x }) setMethod("alphaindex","kfa", function(object) object@alphaindex) setReplaceMethod("alphaindex","kfa", function(x, value){ x@alphaindex <- value x }) setMethod("alpha","kfa", function(object) object@alpha) setReplaceMethod("alpha","kfa", function(x, value){ x@alpha <- value x }) setMethod("xmatrix","kfa", function(object) object@xmatrix) setReplaceMethod("xmatrix","kfa", function(x, value){ x@xmatrix <- value x }) setMethod("kcall","kfa", function(object) object@kcall) setReplaceMethod("kcall","kfa", function(x, value){ x@kcall <- value x }) setMethod("terms","kfa", function(x, ...) x@terms) setReplaceMethod("terms","kfa", function(x, value){ x@terms <- value x }) ## kernel hebbian algorithm object setClass("kha", representation(eskm ="vector"),contains="prc") ## accessor functions if(!isGeneric("eskm")){ if (is.function("eskm")) fun <- eskm else fun <- function(object) standardGeneric("eskm") setGeneric("eskm", fun) } setMethod("eskm", "kha", function(object) object@eskm) setGeneric("eskm<-", function(x, value) standardGeneric("eskm<-")) setReplaceMethod("eskm", "kha", function(x, value) { x@eskm <- value x }) kernlab/R/kmmd.R0000644000175100001440000002030214221632765013167 0ustar hornikusers## calculates the kernel maximum mean discrepancy for samples from two distributions ## author: alexandros karatzoglou setGeneric("kmmd",function(x,...) standardGeneric("kmmd")) setMethod("kmmd", signature(x = "matrix"), function(x, y, kernel="rbfdot",kpar="automatic", alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 150, frac = 1, ...) { x <- as.matrix(x) y <- as.matrix(y) res <- new("kmmd") if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","matrix")) if(kernel == "matrix") if(dim(x)[1]==dim(x)[2]) return(kmmd(x= as.kernelMatrix(x), y = y, Kxy = as.kernelMatrix(x)%*%y, alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 100, frac = 1, ...)) else stop(" kernel matrix not square!") if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "laplacedot")|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=sigest(rbind(x,y),scaled=FALSE)[2]) cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") m <- dim(x)[1] n <- dim(y)[1] N <- max(m,n) M <- min(m,n) Kxx <- kernelMatrix(kernel,x) Kyy <- kernelMatrix(kernel,y) Kxy <- kernelMatrix(kernel,x,y) resmmd <- .submmd(Kxx, Kyy, Kxy, alpha) H0(res) <- (resmmd$mmd1 > resmmd$D1) Radbound(res) <- resmmd$D1 Asymbound(res) <- 0 mmdstats(res)[1] <- resmmd$mmd1 mmdstats(res)[2] <- resmmd$mmd3 if(asymptotic){ boundA <- .submmd3bound(Kxx, Kyy, Kxy, alpha, frac, ntimes, replace) AsympH0(res) <- (resmmd$mmd3 > boundA) Asymbound(res) <- boundA } kernelf(res) <- kernel return(res) }) setMethod("kmmd",signature(x="list"), function(x, y, kernel="stringdot",kpar=list(type="spectrum",length=4), alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 150, frac = 1, ...) { if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") Kxx <- kernelMatrix(kernel,x) Kyy <- kernelMatrix(kernel,y) Kxy <- kernelMatrix(kernel,x,y) ret <- kmmd(x=Kxx,y = Kyy,Kxy=Kxy, alpha=alpha, asymptotic= asymptotic, replace = replace, ntimes = ntimes, frac= frac) kernelf(ret) <- kernel return(ret) }) setMethod("kmmd",signature(x="kernelMatrix"), function (x, y, Kxy, alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 100, frac = 1, ...) { res <- new("kmmd") resmmd <- .submmd(x, y, Kxy, alpha) H0(res) <- (resmmd$mmd1 > resmmd$D1) Radbound(res) <- resmmd$D1 Asymbound(res) <- 0 mmdstats(res)[1] <- resmmd$mmd1 mmdstats(res)[2] <- resmmd$mmd3 if(asymptotic){ boundA <- .submmd3bound(x, y, Kxy, alpha, frac, ntimes, replace) AsympH0(res) <- (resmmd$mmd1 > boundA) Asymbound(res) <- boundA } kernelf(res) <- " Kernel matrix used as input." return(res) }) .submmd <- function(Kxx,Kyy, Kxy, alpha) { m <- dim(Kxx)[1] n <- dim(Kyy)[1] N <- max(m,n) M <- min(m,n) sumKxx <- sum(Kxx) if(m!=n) sumKxxM <- sum(Kxx[1:M,1:M]) else sumKxxM <- sumKxx dgxx <- diag(Kxx) sumKxxnd <- sumKxx - sum(dgxx) R <- max(dgxx) RM <- max(dgxx[1:M]) hu <- colSums(Kxx[1:M,1:M]) - dgxx[1:M] sumKyy <- sum(Kyy) if(m!=n) sumKyyM <- sum(Kyy[1:M,1:M]) else sumKyyM <- sumKyy dgyy <- diag(Kyy) sumKyynd <- sum(Kyy) - sum(dgyy) R <- max(R,dgyy) RM <- max(RM,dgyy[1:M]) # RM instead of R in original hu <- hu + colSums(Kyy[1:M,1:M]) - dgyy[1:M] sumKxy <- sum(Kxy) if (m!=n) sumKxyM <- sum(Kxy[1:M,1:M]) else sumKxyM <- sumKxy dg <- diag(Kxy) # up to M only hu <- hu - colSums(Kxy[1:M,1:M]) - colSums(t(Kxy[1:M,1:M])) + 2*dg # one sided sum mmd1 <- sqrt(max(0,sumKxx/(m*m) + sumKyy/(n*n) - 2/m/n* sumKxy)) mmd3 <- sum(hu)/M/(M-1) D1 <- 2*sqrt(RM/M)+sqrt(log(1/alpha)*4*RM/M) return(list(mmd1=mmd1,mmd3=mmd3,D1=D1)) } .submmd3bound <- function(Kxx,Kyy, Kxy, alpha, frac, ntimes, replace) { ## implements the bootstrapping approach to the MMD3 bound by shuffling ## the kernel matrix ## frac : fraction of data used for bootstrap ## ntimes : how many times MMD is to be evaluated m <- dim(Kxx)[1] n <- dim(Kyy)[1] M <- min(m,n) N <- max(m,n) poslabels <- 1:m neglabels <- (m+1):(m+n) ## bootstrap bootmmd3 <- rep(0,ntimes) for (i in 1:ntimes) { nsamples <- ceiling(frac*min(m,n)) xinds <- sample(1:m,nsamples,replace=replace) yinds <- sample(1:n,nsamples,replace=replace) newlab <- c(poslabels[xinds],neglabels[yinds]) samplenew <- sample(newlab, length(newlab), replace=FALSE) xinds <- samplenew[1:nsamples] yinds <- samplenew[(nsamples+1):length(samplenew)] newm <- length(xinds) newn <- length(yinds) newM <- min(newm,newn) ##get new kernel matrices (without concat to big matrix to save memory) xind1 <- xinds[xinds<=m] xind2 <- xinds[xinds>m]- m yind1 <- yinds[yinds<=m] yind2 <- yinds[yinds>m]-m ##Kxx (this should be implemented with kernelMult for memory efficiency) nKxx <- rbind(cbind(Kxx[xind1,xind1],Kxy[xind1,xind2]), cbind(t(Kxy[xind1,xind2]),Kyy[xind2,xind2])) dgxx <- diag(nKxx) hu <- colSums(nKxx[1:newM,1:newM]) - dgxx[1:newM] # one sided sum rm(nKxx) #Kyy nKyy <- rbind(cbind(Kxx[yind1,yind1],Kxy[yind1,yind2]), cbind(t(Kxy[yind1,yind2]), Kyy[yind2,yind2])) dgyy <- diag(nKyy) hu <- hu + colSums(nKyy[1:newM,1:newM]) - dgyy[1:newM] rm(nKyy) ## Kxy nKxy <- rbind(cbind(Kxx[yind1,xind1],Kxy[yind1,xind2]), cbind(t(Kxy[xind1,yind2]),Kyy[yind2,xind2])) dg <- diag(nKxy) hu <- hu - colSums(nKxy[1:newM,1:newM]) - colSums(t(nKxy[1:newM,1:newM])) + 2*dg rm(nKxy) ## now calculate mmd3 bootmmd3[i] <- sum(hu)/newM/(newM-1) } bootmmd3 <- sort(bootmmd3, decreasing=TRUE); aind <- floor(alpha*ntimes) ## better less than too much (-> floor); ## take threshold in between aind and the next smaller value: bound <- sum(bootmmd3[c(aind,aind+1)])/2; return(bound) } setMethod("show","kmmd", function(object){ cat("Kernel Maximum Mean Discrepancy object of class \"kmmd\"","\n","\n") show(kernelf(object)) if(is.logical(object@H0)){ cat("\n") cat("\n","H0 Hypothesis rejected : ", paste(H0(object))) cat("\n","Rademacher bound : ", paste(Radbound(object))) } cat("\n") if(Asymbound(object)!=0){ cat("\n","H0 Hypothesis rejected (based on Asymptotic bound): ", paste(AsympH0(object))) cat("\n","Asymptotic bound : ", paste(Asymbound(object))) } cat("\n","1st and 3rd order MMD Statistics : ", paste( mmdstats(object))) cat("\n") }) kernlab/R/lssvm.R0000644000175100001440000005505514221633137013412 0ustar hornikusers## reduced least squares support vector machines ## author : alexandros setGeneric("lssvm", function(x, ...) standardGeneric("lssvm")) setMethod("lssvm",signature(x="formula"), function (x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE){ cl <- match.call() m <- match.call(expand.dots = FALSE) if (is.matrix(eval(m$data, parent.frame()))) m$data <- as.data.frame(data) m$... <- NULL m$formula <- m$x m$x <- NULL m$scaled <- NULL m[[1L]] <- quote(stats::model.frame) m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 ## no intercept x <- model.matrix(Terms, m) y <- model.extract(m, "response") if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { remove <- unique(c(which(labels(Terms) %in% names(attr(x, "contrasts"))), which(!scaled) ) ) scaled <- !attr(x, "assign") %in% remove } ret <- lssvm(x, y, scaled = scaled, ...) kcall(ret) <- cl attr(Terms,"intercept") <- 0 ## no intercept terms(ret) <- Terms if (!is.null(attr(m, "na.action"))) n.action(ret) <- attr(m, "na.action") return (ret) }) setMethod("lssvm",signature(x="vector"), function(x,...) { x <- t(t(x)) ret <- lssvm(x, ...) return(ret) }) setMethod("lssvm",signature(x="matrix"), function (x, y, scaled = TRUE, kernel = "rbfdot", kpar = "automatic", type = NULL, tau = 0.01, reduced = TRUE, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, ## prob.model = FALSE, cross = 0, fit = TRUE, ..., subset, na.action = na.omit) { ## subsetting and na-handling for matrices ret <- new("lssvm") if (!missing(subset)) x <- x[subset,] df <- unique(na.action(data.frame(y, x))) y <- df[,1] x <- as.matrix(df[,-1]) n.action(ret) <- na.action if(!is.null(type)) type(ret) <- match.arg(type,c("classification","regression")) if (is.null(type)) type(ret) <- if (is.factor(y)) "classification" else "regression" else type(ret) <- type ## scaling, subsetting, and NA handling x.scale <- y.scale <- NULL ## scaling if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { co <- !apply(x[,scaled, drop = FALSE], 2, var) if (any(co)) { scaled <- rep(FALSE, ncol(x)) warning(paste("Variable(s)", paste("`",colnames(x[,scaled, drop = FALSE])[co], "'", sep="", collapse=" and "), "constant. Cannot scale data.") ) } else { xtmp <- scale(x[,scaled]) x[,scaled] <- xtmp x.scale <- attributes(xtmp)[c("scaled:center","scaled:scale")] } } ncols <- ncol(x) m <- nrows <- nrow(x) if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","matrix")) if(kernel == "matrix") if(dim(x)[1]==dim(x)[2]) return(lssvm(as.kernelMatrix(x), y = y,type = NULL, tau = 0.01, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, cross = 0, fit = TRUE, ...)) else stop(" kernel matrix not square!") if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "rbfkernel") || is(kernel, "laplacedot") || kernel == "laplacedot"|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=mean(sigest(x,scaled=FALSE)[c(1,3)])) cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if(type(ret)=="classification") { if (!is.vector(y) && !is.factor (y)) stop("y must be a vector or a factor.") if(is(y,"vector")) { y <- as.matrix(y) if (nrows != nrow(y)) stop("x and y don't match.") } if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) if (nrows != length(y)) stop("x and y don't match.") } else if (is.numeric(y)) { y <- as.integer(y) lev(ret) <- unique (y) } else stop ("dependent variable has to be of factor or integer type for classification mode.") ## initialize nclass(ret) <- length (unique(y)) p <- 0 svindex <- NULL ## create multidimensional y matrix yind <- t(matrix(1:nclass(ret),nclass(ret),m)) ymat <- matrix(0, m, nclass(ret)) ymat[yind==y] <- 1 if(reduced == FALSE) { K <- kernelMatrix(kernel,x) KP <- K - (1/m)*colSums(K) beta <- solve((KP%*%K + m * tau * K), KP%*%ymat) b <- colMeans(ymat) - colMeans(K%*%beta) alphaindex(ret) <- 1:m } else { G <- csi(x, ymat, rank = rank ,kernel= kernel, delta = delta , tol = tol) rep <- sort(pivots(G),index.return=TRUE)$ix G <- G[rep,] GtP <- t(G) - matrix(rowSums(t(G))/dim(G)[1],dim(G)[2],dim(G)[1]) Gtalpha <- (GtP)%*%G diag(Gtalpha) <- diag(Gtalpha) + tau Gtalpha <- solve(Gtalpha) %*% GtP %*% ymat[rep,,drop=FALSE] beta <- solve(t(G[1:dim(G)[2],]), Gtalpha) b <- colMeans(ymat) - colMeans(G%*%Gtalpha) alphaindex(ret) <- rep[1:dim(G)[2]] } alpha(ret) <- beta ## nonzero alpha*y coef(ret) <- alpha(ret) ## store SV indexes from current problem for later use in predict ## save the indexes from all the SV in a vector (use unique?) svindex <- alphaindex(ret) ## store betas in a vector b(ret) <- b ##store C in return object param(ret)$tau <- tau ## calculate class prob. ## if (prob.model& reduced== TRUE) # warning("Class Probapilities not supported for reduced model.) ## if(prob.model & reduced == FALSE) ## { ## pos <- as.vector(ymat)==1 ## neg <- as.vector(ymat)==-1 ## ones <- rep(1,dim(x)[1]) ## onesneg <- ones[pos] <- 0 ## ones <- rep(1,dim(x)[1]) ## onespos <- ones[neg] <- 0 ##Kpos <- kernelMult(kernel,x,x[pos,],rep(1,sum(pos))) ##Kneg <- kernelMult(kernel,x,x[neg,],rep(1,sum(neg))) ## Kpos <- K[,pos]%*%rep(1,sum(pos)) ## Kneg <- K[,neg]%*%rep(1,sum(neg)) ## classmeans <- c(sum( Kpos * coef(ret)[pos] * as.vector(ymat)[pos]),sum( Kneg * coef(ret)[pos] * as.vector(ymat)[pos])) ## kneg <- K%*%onesneg ## kpos <- K%*%onespos ## M <- (diag(dim(x)[1])- (1/dim(x)[1])*rep(1,dim(x)[1])%*%t(rep(1,dim(x)[1]))) ## kcentered <- M%*%solve(diag(dim(x)[1]) - tau*M%*%K%*%M)%*%M ## prob.model(ret) <- list(Kpos=Kpos, Kneg=Kneg, kcentered=kcentered, classmeans=classmeans) ## } } if(type(ret)=="regression") { if (nrows != nrow(x)) stop("x and y don't match.") ## initialize p <- 0 svindex <- NULL ymat <- y G <- csi(x, ymat, rank = rank ,kernel= kernel, delta = delta , tol = tol) GtP <- t(G) - matrix(rowSums(t(G))/dim(G)[1],dim(G)[2],dim(G)[1]) Gtalpha <- (GtP)%*%G diag(Gtalpha) <- diag(Gtalpha) + tau Gtalpha <- solve(Gtalpha) %*% GtP %*% ymat beta <- solve(t(G[1:dim(G)[2],]), Gtalpha) b <- colMeans(ymat) - colMeans(G%*%Gtalpha) alpha(ret) <- beta ## nonzero alpha*y coef(ret) <- alpha(ret) ## store SV indexes from current problem for later use in predict alphaindex(ret) <- pivots(G)[1:dim(G)[2]] ## save the indexes from all the SV in a vector (use unique?) svindex <- alphaindex(ret) ## store betas in a vector b(ret) <- b ##store C in return object param(ret)$tau <- tau } kcall(ret) <- match.call() kernelf(ret) <- kernel ## param(ret) <- list(C=C, nu = nu, epsilon = epsilon) xmatrix(ret) <- x[alphaindex(ret),,drop = FALSE] ymatrix(ret) <- y nSV(ret) <- length(svindex) if(nSV(ret)==0) stop("No Support Vectors found. You may want to change your parameters") fitted(ret) <- if (fit) predict(ret, x) else NA scaling(ret) <- list(scaled = scaled, x.scale = x.scale) if (fit){ if(type(ret)=="classification") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="regression") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) cret <- lssvm(x[cind,],y[cind],type = type(ret),kernel=kernel,kpar = NULL,reduced = reduced, tau=tau, tol=tol, rank = floor(rank/cross), delta = floor(delta/cross), scaled=FALSE, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],]) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } cross(ret) <- cerror } return(ret) }) ## kernelMatrix interface setMethod("lssvm",signature(x="kernelMatrix"), function (x, y, type = NULL, tau = 0.01, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, cross = 0, fit = TRUE, ...) { ## subsetting and na-handling for matrices ret <- new("lssvm") if(!is.null(type)) type(ret) <- match.arg(type,c("classification","regression")) if (is.null(type)) type(ret) <- if (is.factor(y)) "classification" else "regression" else type(ret) <- type ncols <- ncol(x) m <- nrows <- nrow(x) if(type(ret)=="classification") { if (!is.vector(y) && !is.factor (y)) stop("y must be a vector or a factor.") if (is(y,"vector")) { y <- as.matrix(y) if (nrows != nrow(y)) stop("x and y don't match.")} if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) if (nrows != length(y)) stop("x and y don't match.") } else if (is.numeric(y)) { y <- as.integer(y) lev(ret) <- unique (y) } else stop ("dependent variable has to be of factor or integer type for classification mode.") ## initialize nclass(ret) <- length (unique(y)) p <- 0 svindex <- NULL ## create multidimensional y matrix yind <- t(matrix(1:nclass(ret),nclass(ret),m)) ymat <- matrix(0, m, nclass(ret)) ymat[yind==y] <- 1 KP <- x - (1/m)*colSums(x) beta <- solve((KP%*%x + m * tau * x), KP%*%ymat) b <- colMeans(ymat) - colMeans(x%*%beta) alphaindex(ret) <- 1:m alpha(ret) <- beta ## nonzero alpha*y coef(ret) <- alpha(ret) ## store SV indexes from current problem for later use in predict ## save the indexes from all the SV in a vector (use unique?) svindex <- alphaindex(ret) ## store betas in a vector b(ret) <- b ##store C in return object param(ret)$tau <- tau } if(type(ret)=="regression") { if (nrows != nrow(x)) stop("x and y don't match.") ## initialize p <- 0 svindex <- NULL ymat <- y G <- csi(x, ymat, rank = rank , delta = delta , tol = tol) GtP <- t(G) - matrix(rowSums(t(G))/dim(G)[1],dim(G)[2],dim(G)[1]) Gtalpha <- (GtP)%*%G diag(Gtalpha) <- diag(Gtalpha) + tau Gtalpha <- solve(Gtalpha) %*% GtP %*% ymat[pivots(G),,drop=FALSE] beta <- solve(t(G[1:dim(G)[2],]), Gtalpha) b <- colMeans(ymat) - colMeans(G%*%Gtalpha) alpha(ret) <- beta ## nonzero alpha*y coef(ret) <- alpha(ret) ## store SV indexes from current problem for later use in predict alphaindex(ret) <- pivots(G)[1:dim(G)[2]] ## save the indexes from all the SV in a vector (use unique?) svindex <- alphaindex(ret) ## store betas in a vector b(ret) <- b ##store C in return object param(ret)$tau <- tau } kcall(ret) <- match.call() ## param(ret) <- list(C=C, nu = nu, epsilon = epsilon) xmatrix(ret) <- x ymatrix(ret) <- y kernelf(ret) <- "Kernel matrix used for training." nSV(ret) <- length(svindex) if(nSV(ret)==0) stop("No Support Vectors found. You may want to change your parameters") fitted(ret) <- if (fit) predict(ret, x) else NA if (fit){ if(type(ret)=="classification") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="regression") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) cret <- lssvm(x[cind,cind],y[cind],type = type(ret), tau=tau, rank = floor(rank/cross), delta = floor(delta/cross), cross = 0, fit = FALSE) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind,drop = FALSE][,svindex,drop=FALSE])) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } cross(ret) <- cerror } return(ret) }) ## list interface setMethod("lssvm",signature(x="list"), function (x, y, scaled = TRUE, kernel = "stringdot", kpar = list(length=4, lambda = 0.5), type = NULL, tau = 0.01, reduced = TRUE, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, cross = 0, fit = TRUE, ..., subset) { ## subsetting and na-handling for matrices ret <- new("lssvm") if (!missing(subset)) x <- x[subset] if(!is.null(type)) type(ret) <- match.arg(type,c("classification","regression")) if (is.null(type)) type(ret) <- if (is.factor(y)) "classification" else "regression" else type(ret) <- type m <- nrows <- length(x) if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","stringdot")) if(is.character(kpar)) if(kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot" || kernel == "rbfdot" || kernel == "laplacedot" ) { stop("List interface supports only the stringdot kernel.") } } if(is(kernel,"kernel")) if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if(type(ret)=="classification") { if (!is.vector(y) && !is.factor (y)) stop("y must be a vector or a factor.") if (nrows != nrow(x)) stop("x and y don't match.") if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) } else if (is.numeric(y)) { y <- as.integer(y) lev(ret) <- unique (y) } else stop ("dependent variable has to be of factor or integer type for classification mode.") ## initialize nclass(ret) <- length (unique(y)) p <- 0 svindex <- NULL ## create multidimensional y matrix yind <- t(matrix(1:nclass(ret),nclass(ret),m)) ymat <- matrix(0, m, nclass(ret)) ymat[yind==y] <- 1 if(reduced == FALSE) { K <- kernelMatrix(kernel,x) KP <- K - (1/m)*colSums(K) beta <- solve((KP%*%K + m * tau * K), KP%*%ymat) b <- colMeans(ymat) - colMeans(K%*%beta) alphaindex(ret) <- 1:m } else { G <- csi(x, ymat, rank = rank ,kernel= kernel, delta = delta , tol = tol) GtP <- t(G) - matrix(rowSums(t(G))/dim(G)[1],dim(G)[2],dim(G)[1]) Gtalpha <- (GtP)%*%G diag(Gtalpha) <- diag(Gtalpha) + tau Gtalpha <- solve(Gtalpha) %*% GtP %*% ymat[pivots(G),,drop=FALSE] beta <- solve(t(G[1:dim(G)[2],]), Gtalpha) b <- colMeans(ymat) - colMeans(G%*%Gtalpha) alphaindex(ret) <- pivots(G)[1:dim(G)[2]] } alpha(ret) <- beta ## nonzero alpha*y coef(ret) <- alpha(ret) ## store SV indexes from current problem for later use in predict ## save the indexes from all the SV in a vector (use unique?) svindex <- alphaindex(ret) ## store betas in a vector b(ret) <- b ##store C in return object param(ret)$tau <- tau } if(type(ret)=="regression") { if (nrows != nrow(x)) stop("x and y don't match.") ## initialize p <- 0 svindex <- NULL ymat <- y G <- csi(x, ymat, rank = rank ,kernel= kernel, delta = delta , tol = tol) GtP <- t(G) - matrix(rowSums(t(G))/dim(G)[1],dim(G)[2],dim(G)[1]) Gtalpha <- (GtP)%*%G diag(Gtalpha) <- diag(Gtalpha) + tau Gtalpha <- solve(Gtalpha) %*% GtP %*% ymat[pivots(G),,drop=FALSE] beta <- solve(t(G[1:dim(G)[2],]), Gtalpha) b <- colMeans(ymat) - colMeans(G%*%Gtalpha) alpha(ret) <- beta ## nonzero alpha*y coef(ret) <- alpha(ret) ## store SV indexes from current problem for later use in predict alphaindex(ret) <- pivots(G)[1:dim(G)[2]] ## save the indexes from all the SV in a vector (use unique?) svindex <- alphaindex(ret) ## store betas in a vector b(ret) <- b ##store C in return object param(ret)$tau <- tau } kcall(ret) <- match.call() kernelf(ret) <- kernel ## param(ret) <- list(C=C, nu = nu, epsilon = epsilon) xmatrix(ret) <- x[alphaindex(ret)] ymatrix(ret) <- y SVindex(ret) <- svindex nSV(ret) <- length(svindex) if(nSV(ret)==0) stop("No Support Vectors found. You may want to change your parameters") fitted(ret) <- if (fit) predict(ret, x) else NA if (fit){ if(type(ret)=="classification") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="regression") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) cret <- lssvm(x[cind,],y[cind],type = type(ret),kernel=kernel,kpar = NULL,reduced = reduced, tau=tau, tol=tol, rank = floor(rank/cross), delta = floor(delta/cross), scaled=FALSE, cross = 0, fit = FALSE ) cres <- predict(cret, x[vgr[[i]],]) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } cross(ret) <- cerror } return(ret) }) #**************************************************************# setMethod("predict", signature(object = "lssvm"), function (object, newdata, type = "response", coupler = "minpair") { sc <- 0 type <- match.arg(type,c("response","probabilities","decision")) if (missing(newdata) && type!="response") return(fitted(object)) else if(missing(newdata)) { newdata <- xmatrix(object) sc <- 1 } ncols <- ncol(xmatrix(object)) nrows <- nrow(xmatrix(object)) oldco <- ncols if (!is.null(terms(object))) { if(!is.matrix(newdata)) newdata <- model.matrix(delete.response(terms(object)), as.data.frame(newdata), na.action = n.action(object)) } else newdata <- if (is.vector(newdata)) t(t(newdata)) else as.matrix(newdata) newcols <- 0 newnrows <- nrow(newdata) newncols <- ncol(newdata) newco <- newncols if (oldco != newco) stop ("test vector does not match model !") p<-0 if (!is.null(scaling(object)$x.scale) && sc != 1) newdata[,scaling(object)$scaled] <- scale(newdata[,scaling(object)$scaled, drop = FALSE], center = scaling(object)$x.scale$"scaled:center", scale = scaling(object)$x.scale$"scaled:scale" ) if(is(newdata,"kernelMatrix")) res <- newdata %*% coef(object) - b(object) else res <- t(t(kernelMult(kernelf(object), newdata,xmatrix(object), alpha(object))) + b(object)) if(type == "response" && type(object)=="classification"){ predres <- max.col(res) return(factor (lev(object)[predres], levels = lev(object))) } if (type == "decision" || type(object)=="regression") return(res) if (type =="probabilities" && type(object)=="classification") { res - prob.model(object)$classmeans return(res) } }) #****************************************************************************************# setMethod("show","lssvm", function(object){ cat("Least Squares Support Vector Machine object of class \"lssvm\"","\n") cat("\n") cat(paste("problem type :",type(object), "\n")) cat(paste(" parameter : tau =",param(object)$tau, "\n")) cat("\n") show(kernelf(object)) cat(paste("\nNumber of data points used for training :", nSV(object),"\n")) if(!is.null(fitted(object))) cat(paste("Training error :", round(error(object),6),"\n")) if(cross(object)!= -1) cat("Cross validation error :",round(cross(object),6),"\n") }) ##.partopro <- function(z,s,m){ ##return(2*pi*(1/sqrt((1/z)+s^2))*exp(-(m^2)/(2*((1/z)+s^2)))) ##} kernlab/R/kcca.R0000644000175100001440000000451012105726255013140 0ustar hornikusers## Simple kernel canonical corelation analysis ## author: alexandros karatzoglou setGeneric("kcca",function(x, y, kernel="rbfdot", kpar=list(sigma = 0.1), gamma=0.1, ncomps = 10, ...) standardGeneric("kcca")) setMethod("kcca", signature(x = "matrix"), function(x,y,kernel="rbfdot",kpar=list(sigma=0.1), gamma=0.1, ncomps =10, ...) { x <- as.matrix(x) y <- as.matrix(y) if(!(nrow(x)==nrow(y))) stop("Number of rows in x, y matrixes is not equal") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") Kx <- kernelMatrix(kernel,x) Ky <- kernelMatrix(kernel,y) n <- dim(Kx)[1] m <- 2 ## Generate LH VK <- matrix(0,n*2,n); VK[0:n,] <- Kx VK[(n+1):(2*n),] <- Ky LH <- tcrossprod(VK, VK) for (i in 1:m) LH[((i-1)*n+1):(i*n),((i-1)*n+1):(i*n)] <- 0 ## Generate RH RH <- matrix(0,n*m,n*m) RH[1:n,1:n] <- (Kx + diag(rep(gamma,n)))%*%Kx + diag(rep(1e-6,n)) RH[(n+1):(2*n),(n+1):(2*n)] <- (Ky + diag(rep(gamma,n)))%*%Ky + diag(rep(1e-6,n)) RH <- (RH+t(RH))/2 ei <- .gevd(LH,RH) ret <- new("kcca") kcor(ret) <- as.double(ei$gvalues[1:ncomps]) xcoef(ret) <- matrix(as.double(ei$gvectors[1:n,1:ncomps]),n) ycoef(ret) <- matrix(as.double(ei$gvectors[(n+1):(2*n),1:ncomps]),n) ## xvar(ret) <- rotated(xpca) %*% cca$xcoef ## yvar(ret) <- rotated(ypca) %*% cca$ycoef return(ret) }) ## gevd compute the generalized eigenvalue ## decomposition for (a,b) .gevd<-function(a,b=diag(nrow(a))) { bs<-.mfunc(b,function(x) .ginvx(sqrt(x))) ev<-eigen(bs%*%a%*%bs) return(list(gvalues=ev$values,gvectors=bs%*%ev$vectors)) } ## mfunc is a helper to compute matrix functions .mfunc<-function(a,fn=sqrt) { e<-eigen(a); y<-e$vectors; v<-e$values return(tcrossprod(y%*%diag(fn(v)),y)) } ## ginvx is a helper to compute reciprocals .ginvx<-function(x) {ifelse(x==0,0,1/x)} kernlab/R/rvm.R0000644000175100001440000004145014221633213013037 0ustar hornikusers## relevance vector machine ## author : alexandros setGeneric("rvm", function(x, ...) standardGeneric("rvm")) setMethod("rvm",signature(x="formula"), function (x, data=NULL, ..., subset, na.action = na.omit){ cl <- match.call() m <- match.call(expand.dots = FALSE) if (is.matrix(eval(m$data, parent.frame()))) m$data <- as.data.frame(data) m$... <- NULL m$formula <- m$x m$x <- NULL m[[1L]] <- quote(stats::model.frame) m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") ret <- rvm(x, y, ...) kcall(ret) <- cl terms(ret) <- Terms if (!is.null(attr(m, "na.action"))) n.action(ret) <- attr(m, "na.action") return (ret) }) setMethod("rvm",signature(x="vector"), function(x,...) { x <- t(t(x)) ret <- rvm(x, ...) ret }) setMethod("rvm",signature(x="list"), function (x, y, type = "regression", kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), alpha = 5, var = 0.1, # variance var.fix = FALSE, # fixed variance? iterations = 100, # no. of iterations verbosity = 0, tol = .Machine$double.eps, minmaxdiff = 1e-3, cross = 0, fit = TRUE, ... ,subset ,na.action = na.omit) { if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") K <- kernelMatrix(kernel,x) ret <- rvm(x=K, y=y, kernel=kernel, alpha = alpha, var= var, var.fix = var.fix, iterations = iterations, verbosity = verbosity, tol = tol, minmaxdiff=minmaxdiff,cross=cross,fit=fit, na.action=na.action) kernelf(ret) <- kernel xmatrix(ret) <- x return(ret) }) setMethod("rvm",signature(x="matrix"), function (x, y, type = "regression", kernel = "rbfdot", kpar = "automatic", alpha = ncol(as.matrix(x)), var = 0.1, # variance var.fix = FALSE, # fixed variance? iterations = 100, # no. of iterations verbosity = 0, tol = .Machine$double.eps, minmaxdiff = 1e-3, cross = 0, fit = TRUE, ... ,subset ,na.action = na.omit) { ## subsetting and na-handling for matrices ret <- new("rvm") if (!missing(subset)) x <- x[subset,] if (is.null(y)) x <- na.action(x) else { df <- na.action(data.frame(y, x)) y <- df[,1] x <- as.matrix(df[,-1]) } ncols <- ncol(x) m <- nrows <- nrow(x) if (is.null (type)) type(ret) <- if (is.factor(y)) "classification" else "regression" else type(ret) <- "regression" # in case of classification: transform factors into integers if (is.factor(y)) { stop("classification not supported with rvm, you can use ksvm(), lssvm() or gausspr()") } else { if (type(ret) == "classification" && any(as.integer (y) != y)) stop ("classification not supported with rvm, you can use ksvm(), lssvm() or gausspr()") if(type(ret) == "classification") lev(ret) <- unique (y) } # initialize nclass(ret) <- length (lev(ret)) if(!is.null(type)) type(ret) <- match.arg(type,c("classification", "regression")) if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","matrix")) if(kernel == "matrix") if(dim(x)[1]==dim(x)[2]) return(rvm(as.kernelMatrix(x), y = y,type = type, alpha = alpha, var = var, # variance var.fix = var.fix, # fixed variance? iterations = iterations, # no. of iterations verbosity = verbosity, tol = tol, minmaxdiff = minmaxdiff, cross = cross, fit = fit ,subset ,na.action = na.omit, ...)) else stop(" kernel matrix not square!") if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "rbfkernel") || is(kernel, "laplacedot") || kernel == "laplacedot"|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=mean(sigest(x,scaled=FALSE)[c(1,3)])) cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if(length(alpha) == m) thetavec <- 1/alpha else if (length(alpha) == 1) thetavec <- rep(1/alpha, m) else stop("length of initial alpha vector is wrong (has to be one or equal with number of train data") wvec <- rep(1, m) piter <- iterations*0.4 if (type(ret) == "regression") { K <- kernelMatrix(kernel, x) diag(K) <- diag(K)+ 10e-7 Kml <- crossprod(K, y) for (i in 1:iterations) { nzindex <- thetavec > tol thetavec [!nzindex] <- wvec [!nzindex] <- 0 Kr <- K [ ,nzindex, drop = FALSE] thetatmp <- thetavec[nzindex] n <- sum (nzindex) Rinv <- backsolve(chol(crossprod(Kr)/var + diag(1/thetatmp)),diag(1,n)) ## compute the new wvec coefficients wvec [nzindex] <- (Rinv %*% (crossprod(Rinv, Kml [nzindex])))/var diagSigma <- rowSums(Rinv^2) ## error err <- sum ((y - Kr %*% wvec [nzindex])^2) if(var < 2e-9) { warning("Model might be overfitted") break } ## log some information if (verbosity > 0) { log.det.Sigma.inv <- - 2 * sum (log (diag (Rinv))) ## compute the marginal likelihood to monitor convergence mlike <- -1/2 * (log.det.Sigma.inv + sum (log (thetatmp)) + m * log (var) + 1/var * err + (wvec [nzindex]^2) %*% (1/thetatmp)) cat ("Marg. Likelihood =", formatC (mlike), "\tnRV=", n, "\tvar=", var, "\n") } ## compute zeta zeta <- 1 - diagSigma / thetatmp ## compute logtheta for convergence checking logtheta <- - log(thetavec[nzindex]) ## update thetavec if(i < piter){ thetavec [nzindex] <- wvec [nzindex]^2 / zeta thetavec [thetavec <= 0] <- 0 } else{ thetavec [nzindex] <- (wvec [nzindex]^2/zeta - diagSigma)/zeta thetavec [thetavec <= 0] <- 0 } ## Stop if largest alpha change is too small maxdiff <- max(abs(logtheta[thetavec[which(nzindex)]!=0] + log(thetavec[thetavec!=0]))) if(maxdiff < minmaxdiff) break; ## update variance if (!var.fix) { var <- err / (m - sum (zeta)) } } if(verbosity == 0) mlike(ret) <- drop(-1/2 * (-2*sum(log(diag(Rinv))) + sum (log (thetatmp)) + m * log (var) + 1/var * err + (wvec [nzindex]^2) %*% (1/thetatmp))) nvar(ret) <- var error(ret) <- sqrt(err/m) if(fit) fitted(ret) <- Kr %*% wvec [nzindex] } if(type(ret)=="classification") { stop("classification with the relevance vector machine not implemented yet") } kcall(ret) <- match.call() kernelf(ret) <- kernel alpha(ret) <- wvec[nzindex] tol(ret) <- tol xmatrix(ret) <- x ymatrix(ret) <- y RVindex(ret) <- which(nzindex) nRV(ret) <- length(RVindex(ret)) if (fit){ if(type(ret)=="classification") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="regression") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(cross!=0) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="classification") { cret <- rvm(x[cind,],factor (lev(ret)[y[cind]], levels = lev(ret)),type=type(ret),kernel=kernel,alpha = alpha,var = var, var.fix=var.fix, tol=tol, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],]) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="regression") { cret <- rvm(x[cind,],y[cind],type=type(ret),kernel=kernel,tol=tol,alpha = alpha, var = var, var.fix=var.fix, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],]) cerror <- drop(crossprod(cres - y[vgr[[i]]])/m) + cerror } } cross(ret) <- cerror } return(ret) }) setMethod("rvm",signature(x="kernelMatrix"), function (x, y, type = "regression", alpha = ncol(as.matrix(x)), var = 0.1, # variance var.fix = FALSE, # fixed variance? iterations = 100, # no. of iterations verbosity = 0, tol = .Machine$double.eps, minmaxdiff = 1e-3, cross = 0, fit = TRUE, ... ,subset ) { ## subsetting and na-handling for matrices ret <- new("rvm") if (!missing(subset)) x <- as.kernelMatrix(x[subset,subset]) if (is.null(y)) stop("response y missing") ncols <- ncol(x) m <- nrows <- nrow(x) if (is.null (type)) type(ret) <- if (is.factor(y)) "classification" else "regression" else type(ret) <- "regression" # in case of classification: transform factors into integers if (is.factor(y)) { stop("Claasification is not implemented, you can use ksvm(), gausspr() or lssvm()") } else { if (type(ret) == "classification" && any(as.integer (y) != y)) stop ("dependent variable has to be of factor or integer type for classification mode.") if(type(ret) == "classification") lev(ret) <- unique (y) } # initialize nclass(ret) <- length (lev(ret)) if(!is.null(type)) type(ret) <- match.arg(type,c("classification", "regression")) if(length(alpha) == m) thetavec <- 1/alpha else if (length(alpha) == 1) thetavec <- rep(1/alpha, m) else stop("length of initial alpha vector is wrong (has to be one or equal with number of train data") wvec <- rep(1, m) piter <- iterations*0.4 if (type(ret) == "regression") { Kml <- crossprod(x, y) for (i in 1:iterations) { nzindex <- thetavec > tol thetavec [!nzindex] <- wvec [!nzindex] <- 0 Kr <- x [ ,nzindex, drop = FALSE] thetatmp <- thetavec[nzindex] n <- sum (nzindex) Rinv <- backsolve(chol(crossprod(Kr)/var + diag(1/thetatmp)),diag(1,n)) ## compute the new wvec coefficients wvec [nzindex] <- (Rinv %*% (crossprod(Rinv, Kml [nzindex])))/var diagSigma <- rowSums(Rinv^2) ## error err <- sum ((y - Kr %*% wvec [nzindex])^2) if(var < 2e-9) { warning("Model might be overfitted") break } ## log some information if (verbosity > 0) { log.det.Sigma.inv <- - 2 * sum (log (diag (Rinv))) ## compute the marginal likelihood to monitor convergence mlike <- -1/2 * (log.det.Sigma.inv + sum (log (thetatmp)) + m * log (var) + 1/var * err + (wvec [nzindex]^2) %*% (1/thetatmp)) cat ("Marg. Likelihood =", formatC (mlike), "\tnRV=", n, "\tvar=", var, "\n") } ## compute zeta zeta <- 1 - diagSigma / thetatmp ## compute logtheta for convergence checking logtheta <- - log(thetavec[nzindex]) ## update thetavec if(i < piter){ thetavec [nzindex] <- wvec [nzindex]^2 / zeta thetavec [thetavec <= 0] <- 0 } else{ thetavec [nzindex] <- (wvec [nzindex]^2/zeta - diagSigma)/zeta thetavec [thetavec <= 0] <- 0 } ## Stop if largest alpha change is too small maxdiff <- max(abs(logtheta[thetavec[which(nzindex)]!=0] + log(thetavec[thetavec!=0]))) if(maxdiff < minmaxdiff) break; ## update variance if (!var.fix) { var <- err / (m - sum (zeta)) } } if(verbosity == 0) mlike(ret) <- drop(-1/2 * (-2*sum(log(diag(Rinv))) + sum (log (thetatmp)) + m * log (var) + 1/var * err + (wvec [nzindex]^2) %*% (1/thetatmp))) nvar(ret) <- var error(ret) <- sqrt(err/m) if(fit) fitted(ret) <- Kr %*% wvec [nzindex] } if(type(ret)=="classification") { stop("classification with the relevance vector machine not implemented yet") } kcall(ret) <- match.call() kernelf(ret) <- " Kernel Matrix used. \n" coef(ret) <- alpha(ret) <- wvec[nzindex] tol(ret) <- tol xmatrix(ret) <- x ymatrix(ret) <- y RVindex(ret) <- which(nzindex) nRV(ret) <- length(RVindex(ret)) if (fit){ if(type(ret)=="classification") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="regression") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(cross!=0) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="classification") { cret <- rvm(as.kernelMatrix(x[cind,cind]),factor (lev(ret)[y[cind]], levels = lev(ret)),type=type(ret),alpha = alpha,var = var, var.fix=var.fix, tol=tol, cross = 0, fit = FALSE) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind][,RVindex(cret),drop=FALSE])) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="regression") { cret <- rvm(as.kernelMatrix(x[cind,cind]),y[cind],type=type(ret),tol=tol,alpha = alpha, var = var, var.fix=var.fix, cross = 0, fit = FALSE) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind][,RVindex(cret),drop=FALSE])) cerror <- drop(crossprod(cres - y[vgr[[i]]])/m)/cross + cerror } } cross(ret) <- cerror } return(ret) }) setMethod("predict", signature(object = "rvm"), function (object, newdata, ...) { if (missing(newdata)) return(fitted(object)) if(!is(newdata,"kernelMatrix") && !is(newdata,"list")){ ncols <- ncol(xmatrix(object)) nrows <- nrow(xmatrix(object)) oldco <- ncols if (!is.null(terms(object))) { newdata <- model.matrix(delete.response(terms(object)), as.data.frame(newdata), na.action = na.action) } else newdata <- if (is.vector (newdata)) t(t(newdata)) else as.matrix(newdata) newcols <- 0 newnrows <- nrow(newdata) newncols <- ncol(newdata) newco <- newncols if (oldco != newco) stop ("test vector does not match model !") p<-0 } if(type(object) == "regression") { if(is(newdata,"kernelMatrix")) ret <- newdata %*% coef(object) - b(object) if(is(newdata,"list")) ret <- kernelMult(kernelf(object),newdata,xmatrix(object)[RVindex(object)],alpha(object)) else ret <- kernelMult(kernelf(object),newdata,as.matrix(xmatrix(object)[RVindex(object),,drop=FALSE]),alpha(object)) } ret }) setMethod("show","rvm", function(object){ cat("Relevance Vector Machine object of class \"rvm\"","\n") cat("Problem type: regression","\n","\n") show(kernelf(object)) cat(paste("\nNumber of Relevance Vectors :", nRV(object),"\n")) cat("Variance : ",round(nvar(object),9)) cat("\n") if(!is.null(fitted(object))) cat(paste("Training error :", round(error(object),9),"\n")) if(cross(object)!= -1) cat("Cross validation error :",round(cross(object),9),"\n") ##train error & loss }) kernlab/R/specc.R0000644000175100001440000002543412676465043013354 0ustar hornikusers## Spectral clustering ## author : alexandros setGeneric("specc",function(x, ...) standardGeneric("specc")) setMethod("specc", signature(x = "formula"), function(x, data = NULL, na.action = na.omit, ...) { mt <- terms(x, data = data) if(attr(mt, "response") > 0) stop("response not allowed in formula") attr(mt, "intercept") <- 0 cl <- match.call() mf <- match.call(expand.dots = FALSE) mf$formula <- mf$x mf$... <- NULL mf[[1L]] <- quote(stats::model.frame) mf <- eval(mf, parent.frame()) na.act <- attr(mf, "na.action") x <- model.matrix(mt, mf) res <- specc(x, ...) cl[[1]] <- as.name("specc") if(!is.null(na.act)) n.action(res) <- na.action return(res) }) setMethod("specc",signature(x="matrix"),function(x, centers, kernel = "rbfdot", kpar = "automatic", nystrom.red = FALSE, nystrom.sample = dim(x)[1]/6, iterations = 200, mod.sample = 0.75, na.action = na.omit, ...) { x <- na.action(x) rown <- rownames(x) x <- as.matrix(x) m <- nrow(x) if (missing(centers)) stop("centers must be a number or a matrix") if (length(centers) == 1) { nc <- centers if (m < centers) stop("more cluster centers than data points.") } else nc <- dim(centers)[2] if(is.character(kpar)) { kpar <- match.arg(kpar,c("automatic","local")) if(kpar == "automatic") { if (nystrom.red == TRUE) sam <- sample(1:m, floor(mod.sample*nystrom.sample)) else sam <- sample(1:m, floor(mod.sample*m)) sx <- unique(x[sam,]) ns <- dim(sx)[1] dota <- rowSums(sx*sx)/2 ktmp <- crossprod(t(sx)) for (i in 1:ns) ktmp[i,]<- 2*(-ktmp[i,] + dota + rep(dota[i], ns)) ## fix numerical prob. ktmp[ktmp<0] <- 0 ktmp <- sqrt(ktmp) kmax <- max(ktmp) kmin <- min(ktmp + diag(rep(Inf,dim(ktmp)[1]))) kmea <- mean(ktmp) lsmin <- log2(kmin) lsmax <- log2(kmax) midmax <- min(c(2*kmea, kmax)) midmin <- max(c(kmea/2,kmin)) rtmp <- c(seq(midmin,0.9*kmea,0.05*kmea), seq(kmea,midmax,0.08*kmea)) if ((lsmax - (Re(log2(midmax))+0.5)) < 0.5) step <- (lsmax - (Re(log2(midmax))+0.5)) else step <- 0.5 if (((Re(log2(midmin))-0.5)-lsmin) < 0.5 ) stepm <- ((Re(log2(midmin))-0.5) - lsmin) else stepm <- 0.5 tmpsig <- c(2^(seq(lsmin,(Re(log2(midmin))-0.5), stepm)), rtmp, 2^(seq(Re(log2(midmax))+0.5, lsmax,step))) diss <- matrix(rep(Inf,length(tmpsig)*nc),ncol=nc) for (i in 1:length(tmpsig)){ ka <- exp((-(ktmp^2))/(2*(tmpsig[i]^2))) diag(ka) <- 0 d <- 1/sqrt(rowSums(ka)) if(!any(d==Inf) && !any(is.na(d))&& (max(d)[1]-min(d)[1] < 10^4)) { l <- d * ka %*% diag(d) xi <- eigen(l,symmetric=TRUE)$vectors[,1:nc] yi <- xi/sqrt(rowSums(xi^2)) res <- kmeans(yi, centers, iterations) diss[i,] <- res$withinss } } ms <- which.min(rowSums(diss)) kernel <- rbfdot((tmpsig[ms]^(-2))/2) ## Compute Affinity Matrix if (nystrom.red == FALSE) km <- kernelMatrix(kernel, x) } if (kpar=="local") { if (nystrom.red == TRUE) stop ("Local Scaling not supported for nystrom reduction.") s <- rep(0,m) dota <- rowSums(x*x)/2 dis <- crossprod(t(x)) for (i in 1:m) dis[i,]<- 2*(-dis[i,] + dota + rep(dota[i],m)) ## fix numerical prob. dis[dis < 0] <- 0 for (i in 1:m) s[i] <- median(sort(sqrt(dis[i,]))[1:5]) ## Compute Affinity Matrix km <- exp(-dis / s%*%t(s)) kernel <- "Localy scaled RBF kernel" } } else { if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") ## Compute Affinity Matrix if (nystrom.red == FALSE) km <- kernelMatrix(kernel, x) } if (nystrom.red == TRUE){ n <- floor(nystrom.sample) ind <- sample(1:m, m) x <- x[ind,] tmps <- sort(ind, index.return = TRUE) reind <- tmps$ix A <- kernelMatrix(kernel, x[1:n,]) B <- kernelMatrix(kernel, x[-(1:n),], x[1:n,]) d1 <- colSums(rbind(A,B)) d2 <- rowSums(B) + drop(matrix(colSums(B),1) %*% .ginv(A)%*%t(B)) dhat <- sqrt(1/c(d1,d2)) A <- A * (dhat[1:n] %*% t(dhat[1:n])) B <- B * (dhat[(n+1):m] %*% t(dhat[1:n])) Asi <- .sqrtm(.ginv(A)) Q <- A + Asi %*% crossprod(B) %*% Asi tmpres <- svd(Q) U <- tmpres$u L <- tmpres$d V <- rbind(A,B) %*% Asi %*% U %*% .ginv(sqrt(diag(L))) yi <- matrix(0,m,nc) ## for(i in 2:(nc +1)) ## yi[,i-1] <- V[,i]/V[,1] for(i in 1:nc) ## specc yi[,i] <- V[,i]/sqrt(sum(V[,i]^2)) res <- kmeans(yi[reind,], centers, iterations) } else{ if(is(kernel)[1] == "rbfkernel") diag(km) <- 0 d <- 1/sqrt(rowSums(km)) l <- d * km %*% diag(d) xi <- eigen(l)$vectors[,1:nc] yi <- xi/sqrt(rowSums(xi^2)) res <- kmeans(yi, centers, iterations) } cent <- matrix(unlist(lapply(1:nc,ll<- function(l){colMeans(x[which(res$cluster==l), ,drop=FALSE])})),ncol=dim(x)[2], byrow=TRUE) withss <- unlist(lapply(1:nc,ll<- function(l){sum((x[which(res$cluster==l),, drop=FALSE] - cent[l,])^2)})) names(res$cluster) <- rown return(new("specc", .Data=res$cluster, size = res$size, centers=cent, withinss=withss, kernelf= kernel)) }) setMethod("specc",signature(x="list"),function(x, centers, kernel = "stringdot", kpar = list(length=4, lambda=0.5), nystrom.red = FALSE, nystrom.sample = length(x)/6, iterations = 200, mod.sample = 0.75, na.action = na.omit, ...) { x <- na.action(x) m <- length(x) if (missing(centers)) stop("centers must be a number or a matrix") if (length(centers) == 1) { nc <- centers if (m < centers) stop("more cluster centers than data points.") } else nc <- dim(centers)[2] if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if (nystrom.red == TRUE){ n <- nystrom.sample ind <- sample(1:m, m) x <- x[ind,] tmps <- sort(ind, index.return = TRUE) reind <- tmps$ix A <- kernelMatrix(kernel, x[1:n,]) B <- kernelMatrix(kernel, x[-(1:n),], x[1:n,]) d1 <- colSums(rbind(A,B)) d2 <- rowSums(B) + drop(matrix(colSums(B),1) %*% .ginv(A)%*%t(B)) dhat <- sqrt(1/c(d1,d2)) A <- A * (dhat[1:n] %*% t(dhat[1:n])) B <- B * (dhat[(n+1):m] %*% t(dhat[1:n])) Asi <- .sqrtm(.ginv(A)) Q <- A + Asi %*% crossprod(B) %*% Asi tmpres <- svd(Q) U <- tmpres$u L <- tmpres$d V <- rbind(A,B) %*% Asi %*% U %*% .ginv(sqrt(diag(L))) yi <- matrix(0,m,nc) ## for(i in 2:(nc +1)) ## yi[,i-1] <- V[,i]/V[,1] for(i in 1:nc) ## specc yi[,i] <- V[,i]/sqrt(sum(V[,i]^2)) res <- kmeans(yi[reind,], centers, iterations) } else{ ## Compute Affinity Matrix / in our case just the kernel matrix km <- kernelMatrix(kernel, x) if(is(kernel)[1] == "rbfkernel") diag(km) <- 0 d <- 1/sqrt(rowSums(km)) l <- d * km %*% diag(d) xi <- eigen(l)$vectors[,1:nc] sqxi <- rowSums(xi^2) if(any(sqxi==0)) stop("Zero eigenvector elements, try using a lower value for the length hyper-parameter") yi <- xi/sqrt(sqxi) res <- kmeans(yi, centers, iterations) } return(new("specc", .Data=res$cluster, size = res$size, kernelf= kernel)) }) setMethod("specc",signature(x="kernelMatrix"),function(x, centers, nystrom.red = FALSE, iterations = 200, ...) { m <- nrow(x) if (missing(centers)) stop("centers must be a number or a matrix") if (length(centers) == 1) { nc <- centers if (m < centers) stop("more cluster centers than data points.") } else nc <- dim(centers)[2] if(dim(x)[1]!=dim(x)[2]) { nystrom.red <- TRUE if(dim(x)[1] < dim(x)[2]) x <- t(x) m <- nrow(x) n <- ncol(x) } if (nystrom.red == TRUE){ A <- x[1:n,] B <- x[-(1:n),] d1 <- colSums(rbind(A,B)) d2 <- rowSums(B) + drop(matrix(colSums(B),1) %*% .ginv(A)%*%t(B)) dhat <- sqrt(1/c(d1,d2)) A <- A * (dhat[1:n] %*% t(dhat[1:n])) B <- B * (dhat[(n+1):m] %*% t(dhat[1:n])) Asi <- .sqrtm(.ginv(A)) Q <- A + Asi %*% crossprod(B) %*% Asi tmpres <- svd(Q) U <- tmpres$u L <- tmpres$d V <- rbind(A,B) %*% Asi %*% U %*% .ginv(sqrt(diag(L))) yi <- matrix(0,m,nc) ## for(i in 2:(nc +1)) ## yi[,i-1] <- V[,i]/V[,1] for(i in 1:nc) ## specc yi[,i] <- V[,i]/sqrt(sum(V[,i]^2)) res <- kmeans(yi, centers, iterations) } else{ d <- 1/sqrt(rowSums(x)) l <- d * x %*% diag(d) xi <- eigen(l)$vectors[,1:nc] yi <- xi/sqrt(rowSums(xi^2)) res <- kmeans(yi, centers, iterations) } ## cent <- matrix(unlist(lapply(1:nc,ll<- function(l){colMeans(x[which(res$cluster==l),])})),ncol=dim(x)[2], byrow=TRUE) ## withss <- unlist(lapply(1:nc,ll<- function(l){sum((x[which(res$cluster==l),] - cent[l,])^2)})) return(new("specc", .Data=res$cluster, size = res$size, centers = matrix(0), withinss = c(0), kernelf= "Kernel Matrix used as input.")) }) setMethod("show","specc", function(object){ cat("Spectral Clustering object of class \"specc\"","\n") cat("\n","Cluster memberships:","\n","\n") cat(object@.Data,"\n","\n") show(kernelf(object)) cat("\n") if(!any(is.na(centers(object)))){ cat(paste("Centers: ","\n")) show(centers(object)) cat("\n")} cat(paste("Cluster size: ","\n")) show(size(object)) cat("\n") if(!is.logical(withinss(object))){ cat(paste("Within-cluster sum of squares: ", "\n")) show(withinss(object)) cat("\n")} }) .ginv <- function (X, tol = sqrt(.Machine$double.eps)) { if (length(dim(X)) > 2 || !(is.numeric(X) || is.complex(X))) stop("'X' must be a numeric or complex matrix") if (!is.matrix(X)) X <- as.matrix(X) Xsvd <- svd(X) if (is.complex(X)) Xsvd$u <- Conj(Xsvd$u) Positive <- Xsvd$d > max(tol * Xsvd$d[1], 0) if (all(Positive)) Xsvd$v %*% (1/Xsvd$d * t(Xsvd$u)) else if (!any(Positive)) array(0, dim(X)[2:1]) else Xsvd$v[, Positive, drop = FALSE] %*% ((1/Xsvd$d[Positive]) * t(Xsvd$u[, Positive, drop = FALSE])) } .sqrtm <- function(x) { tmpres <- eigen(x) V <- t(tmpres$vectors) D <- tmpres$values if(is.complex(D)) D <- Re(D) D <- pmax(D,0) return(crossprod(V*sqrt(D),V)) } kernlab/R/ipop.R0000644000175100001440000002544511304023134013202 0ustar hornikusers##ipop solves the quadratic programming problem ##minimize c' * primal + 1/2 primal' * H * primal ##subject to b <= A*primal <= b + r ## l <= x <= u ## d is the optimizer itself ##returns primal and dual variables (i.e. x and the Lagrange ##multipliers for b <= A * primal <= b + r) ##for additional documentation see ## R. Vanderbei ## LOQO: an Interior Point Code for Quadratic Programming, 1992 ## Author: R version Alexandros Karatzoglou, orig. matlab Alex J. Smola ## Created: 12/12/97 ## R Version: 12/08/03 ## Updated: 13/10/05 ## This code is released under the GNU Public License setGeneric("ipop",function(c, H, A, b, l, u, r, sigf=7, maxiter=40, margin=0.05, bound=10, verb=0) standardGeneric("ipop")) setMethod("ipop",signature(H="matrix"), function(c, H, A, b, l, u, r, sigf=7, maxiter=40, margin=0.05, bound=10, verb=0) { if(!is.matrix(H)) stop("H must be a matrix") if(!is.matrix(A)&&!is.vector(A)) stop("A must be a matrix or a vector") if(!is.matrix(c)&&!is.vector(c)) stop("c must be a matrix or a vector") if(!is.matrix(l)&&!is.vector(l)) stop("l must be a matrix or a vector") if(!is.matrix(u)&&!is.vector(u)) stop("u must be a matrix or a vector") n <- dim(H)[1] ## check for a decomposed H matrix if(n == dim(H)[2]) smw <- 0 if(n > dim(H)[2]) smw <- 1 if(n < dim(H)[2]) { smw <- 1 n <- dim(H)[2] H <- t(H) } if (is.vector(A)) A <- matrix(A,1) m <- dim(A)[1] primal <- rep(0,n) if (missing(b)) bvec <- rep(0, m) ## if(n !=nrow(H)) ## stop("H matrix is not symmetric") if (n != length(c)) stop("H and c are incompatible!") if (n != ncol(A)) stop("A and c are incompatible!") if (m != length(b)) stop("A and b are incompatible!") if(n !=length(u)) stop("u is incopatible with H") if(n !=length(l)) stop("l is incopatible with H") c <- matrix(c) l <- matrix(l) u <- matrix(u) m <- nrow(A) n <- ncol(A) H.diag <- diag(H) if(smw == 0) H.x <- H else if (smw == 1) H.x <- t(H) b.plus.1 <- max(svd(b)$d) + 1 c.plus.1 <- max(svd(c)$d) + 1 one.x <- -matrix(1,n,1) one.y <- -matrix(1,m,1) ## starting point if(smw == 0) diag(H.x) <- H.diag + 1 else smwn <- dim(H)[2] H.y <- diag(1,m) c.x <- c c.y <- b ## solve the system [-H.x A' A H.y] [x, y] = [c.x c.y] if(smw == 0) { AP <- matrix(0,m+n,m+n) xp <- 1:(m+n) <= n AP[xp,xp] <- -H.x AP[xp == FALSE,xp] <- A AP[xp,xp == FALSE] <- t(A) AP[xp == FALSE, xp== FALSE] <- H.y s.tmp <- solve(AP,c(c.x,c.y)) x <- s.tmp[1:n] y <- s.tmp[-(1:n)] } else { V <- diag(smwn) smwinner <- chol(V + crossprod(H)) smwa1 <- t(A) smwc1 <- c.x smwa2 <- smwa1 - (H %*% solve(smwinner,solve(t(smwinner),crossprod(H,smwa1)))) smwc2 <- smwc1 - (H %*% solve(smwinner,solve(t(smwinner),crossprod(H,smwc1)))) y <- solve(A %*% smwa2 + H.y , c.y + A %*% smwc2) x <- smwa2 %*% y - smwc2 } g <- pmax(abs(x - l), bound) z <- pmax(abs(x), bound) t <- pmax(abs(u - x), bound) s <- pmax(abs(x), bound) v <- pmax(abs(y), bound) w <- pmax(abs(y), bound) p <- pmax(abs(r - w), bound) q <- pmax(abs(y), bound) mu <- as.vector(crossprod(z,g) + crossprod(v,w) + crossprod(s,t) + crossprod(p,q))/(2 * (m + n)) sigfig <- 0 counter <- 0 alfa <- 1 if (verb > 0) # print at least one status report cat("Iter PrimalInf DualInf SigFigs Rescale PrimalObj DualObj","\n") while (counter < maxiter) { ## update the iteration counter counter <- counter + 1 ## central path (predictor) if(smw == 0) H.dot.x <- H %*% x else if (smw == 1) H.dot.x <- H %*% crossprod(H,x) rho <- b - A %*% x + w nu <- l - x + g tau <- u - x - t alpha <- r - w - p sigma <- c - crossprod(A, y) - z + s + H.dot.x beta <- y + q - v gamma.z <- - z gamma.w <- - w gamma.s <- - s gamma.q <- - q ## instrumentation x.dot.H.dot.x <- crossprod(x, H.dot.x) primal.infeasibility <- max(svd(rbind(rho, tau, matrix(alpha), nu))$d)/ b.plus.1 dual.infeasibility <- max(svd(rbind(sigma,t(t(beta))))$d) / c.plus.1 primal.obj <- crossprod(c,x) + 0.5 * x.dot.H.dot.x dual.obj <- crossprod(b,y) - 0.5 * x.dot.H.dot.x + crossprod(l, z) - crossprod(u,s) - crossprod(r,q) old.sigfig <- sigfig sigfig <- max(-log10(abs(primal.obj - dual.obj)/(abs(primal.obj) + 1)), 0) if (sigfig >= sigf) break if (verb > 0) # final report cat( counter, "\t", signif(primal.infeasibility,6), signif(dual.infeasibility,6), sigfig, alfa, primal.obj, dual.obj,"\n") ## some more intermediate variables (the hat section) hat.beta <- beta - v * gamma.w / w hat.alpha <- alpha - p * gamma.q / q hat.nu <- nu + g * gamma.z / z hat.tau <- tau - t * gamma.s / s ## the diagonal terms d <- z / g + s / t e <- 1 / (v / w + q / p) ## initialization before the big cholesky if (smw == 0) diag(H.x) <- H.diag + d diag(H.y) <- e c.x <- sigma - z * hat.nu / g - s * hat.tau / t c.y <- rho - e * (hat.beta - q * hat.alpha / p) ## and solve the system [-H.x A' A H.y] [delta.x, delta.y] <- [c.x c.y] if(smw == 0){ AP[xp,xp] <- -H.x AP[xp == FALSE, xp== FALSE] <- H.y s1.tmp <- solve(AP,c(c.x,c.y)) delta.x<-s1.tmp[1:n] ; delta.y <- s1.tmp[-(1:n)] } else { V <- diag(smwn) smwinner <- chol(V + chunkmult(t(H),2000,d)) smwa1 <- t(A) smwa1 <- smwa1 / d smwc1 <- c.x / d smwa2 <- t(A) - (H %*% solve(smwinner,solve(t(smwinner),crossprod(H,smwa1)))) smwa2 <- smwa2 / d smwc2 <- (c.x - (H %*% solve(smwinner,solve(t(smwinner),crossprod(H,smwc1)))))/d delta.y <- solve(A %*% smwa2 + H.y , c.y + A %*% smwc2) delta.x <- smwa2 %*% delta.y - smwc2 } ## backsubstitution delta.w <- - e * (hat.beta - q * hat.alpha / p + delta.y) delta.s <- s * (delta.x - hat.tau) / t delta.z <- z * (hat.nu - delta.x) / g delta.q <- q * (delta.w - hat.alpha) / p delta.v <- v * (gamma.w - delta.w) / w delta.p <- p * (gamma.q - delta.q) / q delta.g <- g * (gamma.z - delta.z) / z delta.t <- t * (gamma.s - delta.s) / s ## compute update step now (sebastian's trick) alfa <- - (1 - margin) / min(c(delta.g / g, delta.w / w, delta.t / t, delta.p / p, delta.z / z, delta.v / v, delta.s / s, delta.q / q, -1)) newmu <- (crossprod(z,g) + crossprod(v,w) + crossprod(s,t) + crossprod(p,q))/(2 * (m + n)) newmu <- mu * ((alfa - 1) / (alfa + 10))^2 gamma.z <- mu / g - z - delta.z * delta.g / g gamma.w <- mu / v - w - delta.w * delta.v / v gamma.s <- mu / t - s - delta.s * delta.t / t gamma.q <- mu / p - q - delta.q * delta.p / p ## some more intermediate variables (the hat section) hat.beta <- beta - v * gamma.w / w hat.alpha <- alpha - p * gamma.q / q hat.nu <- nu + g * gamma.z / z hat.tau <- tau - t * gamma.s / s ## initialization before the big cholesky ##for ( i in 1 : n H.x(i,i) <- H.diag(i) + d(i) ) { ##H.y <- diag(e) c.x <- sigma - z * hat.nu / g - s * hat.tau / t c.y <- rho - e * (hat.beta - q * hat.alpha / p) ## and solve the system [-H.x A' A H.y] [delta.x, delta.y] <- [c.x c.y] if (smw == 0) { AP[xp,xp] <- -H.x AP[xp == FALSE, xp== FALSE] <- H.y s1.tmp <- solve(AP,c(c.x,c.y)) delta.x<-s1.tmp[1:n] ; delta.y<-s1.tmp[-(1:n)] } else if (smw == 1) { smwc1 <- c.x / d smwc2 <- (c.x - (H %*% solve(smwinner,solve(t(smwinner),crossprod(H,smwc1))))) / d delta.y <- solve(A %*% smwa2 + H.y , c.y + A %*% smwc2) delta.x <- smwa2 %*% delta.y - smwc2 } ## backsubstitution delta.w <- - e * (hat.beta - q * hat.alpha / p + delta.y) delta.s <- s * (delta.x - hat.tau) / t delta.z <- z * (hat.nu - delta.x) / g delta.q <- q * (delta.w - hat.alpha) / p delta.v <- v * (gamma.w - delta.w) / w delta.p <- p * (gamma.q - delta.q) / q delta.g <- g * (gamma.z - delta.z) / z delta.t <- t * (gamma.s - delta.s) / s ## compute the updates alfa <- - (1 - margin) / min(c(delta.g / g, delta.w / w, delta.t / t, delta.p / p, delta.z / z, delta.v / v, delta.s / s, delta.q / q, -1)) x <- x + delta.x * alfa g <- g + delta.g * alfa w <- w + delta.w * alfa t <- t + delta.t * alfa p <- p + delta.p * alfa y <- y + delta.y * alfa z <- z + delta.z * alfa v <- v + delta.v * alfa s <- s + delta.s * alfa q <- q + delta.q * alfa ## these two lines put back in ? ## mu <- (crossprod(z,g) + crossprod(v,w) + crossprod(s,t) + crossprod(p,q))/(2 * (m + n)) ## mu <- mu * ((alfa - 1) / (alfa + 10))^2 mu <- newmu } if (verb > 0) ## final report cat( counter, primal.infeasibility, dual.infeasibility, sigfig, alfa, primal.obj, dual.obj) ret <- new("ipop") ## repackage the results primal(ret) <- x dual(ret) <- drop(y) if ((sigfig > sigf) & (counter < maxiter)) how(ret) <- 'converged' else { ## must have run out of counts if ((primal.infeasibility > 10e5) & (dual.infeasibility > 10e5)) how(ret) <- 'primal and dual infeasible' if (primal.infeasibility > 10e5) how(ret) <- 'primal infeasible' if (dual.infeasibility > 10e5) how(ret) <- 'dual infeasible' else ## don't really know how(ret) <- 'slow convergence, change bound?' } ret }) setGeneric("chunkmult",function(Z, csize, colscale) standardGeneric("chunkmult")) setMethod("chunkmult",signature(Z="matrix"), function(Z, csize, colscale) { n <- dim(Z)[1] m <- dim(Z)[2] d <- sqrt(colscale) nchunks <- ceiling(m/csize) res <- matrix(0,n,n) for( i in 1:nchunks) { lowerb <- (i - 1) * csize + 1 upperb <- min(i * csize, m) buffer <- t(Z[,lowerb:upperb,drop = FALSE]) bufferd <- d[lowerb:upperb] buffer <- buffer / bufferd res <- res + crossprod(buffer) } return(res) }) kernlab/R/kkmeans.R0000644000175100001440000004650414221632720013673 0ustar hornikusers## kernel kmeans function ## author: alexandros setGeneric("kkmeans",function(x, ...) standardGeneric("kkmeans")) setMethod("kkmeans", signature(x = "formula"), function(x, data = NULL, na.action = na.omit, ...) { mt <- terms(x, data = data) if(attr(mt, "response") > 0) stop("response not allowed in formula") attr(mt, "intercept") <- 0 cl <- match.call() mf <- match.call(expand.dots = FALSE) mf$formula <- mf$x mf$... <- NULL mf[[1L]] <- quote(stats::model.frame) mf <- eval(mf, parent.frame()) na.act <- attr(mf, "na.action") x <- model.matrix(mt, mf) res <- kkmeans(x, ...) cl[[1]] <- as.name("kkmeans") if(!is.null(na.act)) n.action(res) <- na.action return(res) }) setMethod("kkmeans",signature(x="matrix"),function(x, centers, kernel = "rbfdot", kpar = "automatic", alg ="kkmeans", p = 1, na.action = na.omit, ...) { x <- na.action(x) rown <- rownames(x) x <- as.matrix(x) m <- nrow(x) if (missing(centers)) stop("centers must be a number or a matrix") if (length(centers) == 1) { nc <- centers if (m < centers) stop("more cluster centers than data points.") } else nc <- dim(centers)[2] if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","stringdot")) if(kernel == "matrix") if(dim(x)[1]==dim(x)[2]) return(kkmeans(as.kernelMatrix(x), centers= centers)) else stop(" kernel matrix not square!") if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot"||kernel=="stringdot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "rbfkernel") || is(kernel, "laplacedot") || kernel == "laplacedot"|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=mean(sigest(x,scaled=FALSE)[c(1,3)])) cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if(length(centers) == 1){ suppressWarnings(vgr<- vgr2 <- split(sample(1:m,m),1:centers)) ncenters <- centers } else { ncenters <- ns <- dim(centers)[1] dota <- rowSums(x*x)/2 dotb <- rowSums(centers*centers)/2 ktmp <- x%*%t(centers) for(i in 1:ns) ktmp[,i]<- ktmp[,i] - dota - rep(dotb[i],m) prts <- max.col(ktmp) vgr <- vgr2 <- lapply(1:ns, function(x) which(x==prts)) } if(is.character(alg)) alg <- match.arg(alg,c("kkmeans","kerninghan", "normcut")) if(alg == "kkmeans") { p <- NULL D <- NULL D1 <- NULL w <- rep(1,m) } if(alg=="kerninghan") { p <- p D <- kernelMult(kernel,x, , rep(1,m)) w <- rep(1,m) D1 <- NULL } if(alg=="normcut") { p <- p D1 <- 1 w <- kernelMult(kernel,x, , rep(1,m)) } ## initialize lower bound and distance matrix dismat <- lower <- matrix(0,m,ncenters) ## calculate diagonal kdiag <- rep(1,m) for (i in 1:m) kdiag[i] <- drop(kernel(x[i,],x[i,])) ## initialize center-newcenter distance vector second sum vector secsum <- dc <- rep(1,ncenters) mindis <- rep(0,m) cind <- 1:ncenters for ( i in 1:ncenters) { ## compute second sum eq. 1 secsum[i] <- sum(affinMult(kernel, x[vgr[[i]],,drop=FALSE],,w[vgr[[i]]], p , D, D1) * w[vgr[[i]]])/sum(w[vgr[[i]]])^2 ## calculate initial distance matrix and lower bounds lower[,i] <- dismat[,i] <- - 2 * affinMult(kernel,x,x[vgr[[i]],,drop=FALSE], w[vgr[[i]]], p ,D, D1)/sum(w[vgr[[i]]]) + secsum[i] + kdiag } cluserm <- max.col(-dismat) for(i in 1:ncenters) vgr2[[i]] <- which(cluserm==i) while(1){ for (z in 1:ncenters) dc[z] <- -2*sum(affinMult(kernel, x[vgr2[[z]],,drop=FALSE], x[vgr[[z]],,drop=FALSE], w[vgr[[z]]], p, D, D1)*w[vgr2[[z]]])/(sum(w[vgr[[z]]])*sum(w[vgr2[[z]]])) + sum(affinMult(kernel, x[vgr[[z]],,drop=FALSE], ,w[vgr[[z]]], p, D, D1) * w[vgr[[z]]]) / sum(w[vgr[[z]]])^2 + sum(affinMult(kernel, x[vgr2[[z]],,drop=FALSE], ,w[vgr2[[z]]], p, D, D1) * w[vgr2[[z]]]) / sum(w[vgr2[[z]]])^2 ## assign new cluster indexes vgr <- vgr2 if(sum(abs(dc)) < 1e-15) break for (u in 1:ncenters){ ## compare already calulated distances of every poit to intra - center distance to determine if ## it is necesary to compute the distance at this point, we create an index of points to compute distance if(u > 1) compin <- apply(t(t(dismat[,1:(u-1)]) < dismat[,u] - dc[u]),1,sum)==0 else compin <- rep(TRUE,m) ## compute second sum eq. 1 secsum[u] <- sum(affinMult(kernel, x[vgr[[u]],,drop=FALSE], ,w[vgr[[u]]], p, D, D1) * w[vgr[[u]]])/sum(w[vgr[[u]]])^2 ## compute distance matrix and lower bounds lower[compin,u] <- dismat[compin,u] <- - 2 * affinMult(kernel,x[compin,],x[vgr[[u]],,drop=FALSE], w[vgr[[u]]], p , D, D1)/sum(w[vgr[[u]]]) + secsum[u] + kdiag[compin] } ## calculate new cluster indexes cluserm <- max.col(-dismat) for(i in 1:ncenters) vgr2[[i]] <- which(cluserm==i) } cluster <- max.col(-dismat) size <- unlist(lapply(1:ncenters, ll <- function(l){length(which(cluster==l))})) cent <- matrix(unlist(lapply(1:ncenters,ll<- function(l){colMeans(x[which(cluster==l),])})),ncol=dim(x)[2], byrow=TRUE) withss <- unlist(lapply(1:ncenters,ll<- function(l){sum((x[which(cluster==l),] - cent[l,])^2)})) names(cluster) <- rown return(new("specc", .Data=cluster, size = size, centers=cent, withinss=withss, kernelf= kernel)) }) ## kernel Matrix interface setMethod("kkmeans",signature(x="kernelMatrix"),function(x, centers, ...) { m <- nrow(x) if (missing(centers)) stop("centers must be a number or a matrix") if (length(centers) == 1) { nc <- centers if (m < centers) stop("more cluster centers than data points.") } else nc <- dim(centers)[2] if(length(centers) == 1){ suppressWarnings(vgr<- vgr2 <- split(sample(1:m,m),1:centers)) ncenters <- centers } else ncenters <- dim(centers)[1] ## initialize lower bound and distance matrix dismat <- lower <- matrix(0,m,ncenters) ## diagonal kdiag <- diag(x) ## weigths (should be adapted for future versions !!) w <- rep(1,m) ## initialize center-newcenter distance vector second sum vector secsum <- dc <- rep(1,ncenters) mindis <- rep(0,m) cind <- 1:ncenters for ( i in 1:ncenters) { ## compute second sum eq. 1 secsum[i] <- sum(drop(crossprod(x[vgr[[i]],vgr[[i]],drop=FALSE],w[vgr[[i]]])) * w[vgr[[i]]])/sum(w[vgr[[i]]])^2 ## calculate initial distance matrix and lower bounds lower[,i] <- dismat[,i] <- - 2 * x[,vgr[[i]],drop=FALSE]%*%w[vgr[[i]]]/sum(w[vgr[[i]]]) + secsum[i] + kdiag } cluserm <- max.col(-dismat) for(i in 1:ncenters) vgr2[[i]] <- which(cluserm==i) while(1){ for (z in 1:ncenters) dc[z] <- -2*sum((x[vgr2[[z]],vgr[[z]],drop=FALSE] %*% w[vgr[[z]]])*w[vgr2[[z]]])/(sum(w[vgr[[z]]])*sum(w[vgr2[[z]]])) + sum(drop(crossprod(x[vgr[[z]],vgr[[z]],drop=FALSE],w[vgr[[z]]])) * w[vgr[[z]]]) / sum(w[vgr[[z]]])^2 + sum(drop(crossprod(x[vgr2[[z]],vgr2[[z]],drop=FALSE],w[vgr2[[z]]])) * w[vgr2[[z]]]) / sum(w[vgr2[[z]]])^2 ## assign new cluster indexes vgr <- vgr2 if(sum(abs(dc))<1e-15) break for (u in 1:ncenters){ ## compare already calulated distances of every point to intra - center distance to determine if ## it is necesary to compute the distance at this point, we create an index of points to compute distance if(u > 1) compin <- apply(t(t(dismat[,1:(u-1)]) < dismat[,u] - dc[u]),1,sum)==0 else compin <- rep(TRUE,m) ## compute second sum eq. 1 secsum[u] <- sum(drop(crossprod(x[vgr[[u]],vgr[[u]],drop=FALSE],w[vgr[[u]]])) * w[vgr[[u]]])/sum(w[vgr[[u]]])^2 ## compute distance matrix and lower bounds lower[compin,u] <- dismat[compin,u] <- - 2 * (x[which(compin),vgr[[u]],drop=FALSE] %*% w[vgr[[u]]])/sum(w[vgr[[u]]]) + secsum[u] + kdiag[compin] } ## calculate new cluster indexes cluserm <- max.col(-dismat) for(i in 1:ncenters) vgr2[[i]] <- which(cluserm==i) } cluster <- max.col(-dismat) size <- unlist(lapply(1:ncenters, ll <- function(l){length(which(cluster==l))})) cent <- matrix(unlist(lapply(1:ncenters,ll<- function(l){colMeans(x[which(cluster==l),])})),ncol=dim(x)[2], byrow=TRUE) withss <- unlist(lapply(1:ncenters,ll<- function(l){sum((x[which(cluster==l),] - cent[l,])^2)})) return(new("specc", .Data=cluster, size = size, centers=cent, withinss=withss, kernelf= "Kernel matrix used")) }) ## List interface setMethod("kkmeans",signature(x="list"),function(x, centers, kernel = "stringdot", kpar = list(length=4, lambda=0.5), alg ="kkmeans", p = 1, na.action = na.omit, ...) { x <- na.action(x) m <- length(x) if (missing(centers)) stop("centers must be a number or a matrix") if (length(centers) == 1) { nc <- centers if (m < centers) stop("more cluster centers than data points.") } else nc <- dim(centers)[2] if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if(length(centers) == 1){ suppressWarnings(vgr<- vgr2 <- split(sample(1:m,m),1:centers)) ncenters <- centers } else ncenters <- dim(centers)[1] if(is.character(alg)) alg <- match.arg(alg,c("kkmeans","kerninghan", "normcut")) if(alg == "kkmeans") { p <- NULL D <- NULL D1 <- NULL w <- rep(1,m) } if(alg=="kerninghan") { p <- p D <- kernelMult(kernel,x, , rep(1,m)) w <- rep(1,m) D1 <- NULL } if(alg=="normcut") { p <- p D1 <- 1 w <- kernelMult(kernel,x, , rep(1,m)) } ## initialize lower bound and distance matrix dismat <- lower <- matrix(0,m,ncenters) ## calculate diagonal kdiag <- rep(1,m) for (i in 1:m) kdiag[i] <- drop(kernel(x[[i]],x[[i]])) ## initialize center-newcenter distance vector second sum vector secsum <- dc <- rep(1,ncenters) mindis <- rep(0,m) cind <- 1:ncenters for ( i in 1:ncenters) { ## compute second sum eq. 1 secsum[i] <- sum(affinMult(kernel, x[vgr[[i]]],,w[vgr[[i]]], p , D, D1) * w[vgr[[i]]])/sum(w[vgr[[i]]])^2 ## calculate initial distance matrix and lower bounds lower[,i] <- dismat[,i] <- - 2 * affinMult(kernel,x,x[vgr[[i]]], w[vgr[[i]]], p ,D, D1)/sum(w[vgr[[i]]]) + secsum[i] + kdiag } cluserm <- max.col(-dismat) for(i in 1:ncenters) vgr2[[i]] <- which(cluserm==i) while(1){ for (z in 1:ncenters) dc[z] <- -2*sum(affinMult(kernel, x[vgr2[[z]]], x[vgr[[z]]], w[vgr[[z]]], p, D, D1)*w[vgr2[[z]]])/(sum(w[vgr[[z]]])*sum(w[vgr2[[z]]])) + sum(affinMult(kernel, x[vgr[[z]]], ,w[vgr[[z]]], p, D, D1) * w[vgr[[z]]]) / sum(w[vgr[[z]]])^2 + sum(affinMult(kernel, x[vgr2[[z]]], ,w[vgr2[[z]]], p, D, D1) * w[vgr2[[z]]]) / sum(w[vgr2[[z]]])^2 ## assign new cluster indexes vgr <- vgr2 if(sum(abs(dc))<1e-15) break for (u in 1:ncenters){ ## compare already calulated distances of every poit to intra - center distance to determine if ## it is necesary to compute the distance at this point, we create an index of points to compute distance if(u > 1) compin <- apply(t(t(dismat[,1:(u-1)]) < dismat[,u] - dc[u]),1,sum)==0 else compin <- rep(TRUE,m) ## compute second sum eq. 1 secsum[u] <- sum(affinMult(kernel, x[vgr[[u]]], ,w[vgr[[u]]], p, D, D1) * w[vgr[[u]]])/sum(w[vgr[[u]]])^2 ## compute distance matrix and lower bounds lower[compin,u] <- dismat[compin,u] <- - 2 * affinMult(kernel,x[compin,],x[vgr[[u]]], w[vgr[[u]]], p , D, D1)/sum(w[vgr[[u]]]) + secsum[u] + kdiag[compin] } ## calculate new cluster indexes cluserm <- max.col(-dismat) for(i in 1:ncenters) vgr2[[i]] <- which(cluserm==i) } cluster <- max.col(-dismat) size <- unlist(lapply(1:ncenters, ll <- function(l){length(which(cluster==l))})) cent <- matrix(unlist(lapply(1:ncenters,ll<- function(l){colMeans(x[which(cluster==l),])})),ncol=dim(x)[2], byrow=TRUE) withss <- unlist(lapply(1:ncenters,ll<- function(l){sum((x[which(cluster==l),] - cent[l,])^2)})) return(new("specc", .Data=cluster, size = size, centers=cent, withinss=withss, kernelf= kernel)) }) setGeneric("affinMult",function(kernel, x, y = NULL, z, p, D, D1, blocksize = 256) standardGeneric("affinMult")) affinMult.rbfkernel <- function(kernel, x, y=NULL, z, p, D, D1,blocksize = 256) { if(is.null(p)&is.null(D)&is.null(D1)) res <- kernelMult(kernel,x,y,z) else{ if(!is.matrix(y)&&!is.null(y)) stop("y must be a matrix") if(!is.matrix(z)&&!is.vector(z)) stop("z must be a matrix or a vector") sigma <- kpar(kernel)$sigma n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 dota <- as.matrix(rowSums(x^2)) if (is.null(y) & is.null(D1)) { if(is.vector(z)) { if(!length(z) == n) stop("vector z length must be equal to x rows") z <- matrix(z,n,1) } if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { dotab <- rep(1,blocksize)%*%t(dota) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- exp(sigma*(2*x[lowerl:upperl,]%*%t(x) - dotab - dota[lowerl:upperl]%*%t(rep.int(1,n))))%*%z - z[lowerl:upperl,]*(1-p) lowerl <- upperl + 1 } } if(lowerl <= n) res[lowerl:n,] <- exp(sigma*(2*x[lowerl:n,]%*%t(x) - rep.int(1,n+1-lowerl)%*%t(dota) - dota[lowerl:n]%*%t(rep.int(1,n))))%*%z- z[lowerl:upperl,]*(1-p) } if(is.matrix(y) & is.null(D1)) { n2 <- dim(y)[1] if(is.vector(z)) { if(!length(z) == n2) stop("vector z length must be equal to y rows") z <- matrix(z,n2,1) } if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) dotb <- as.matrix(rowSums(y*y)) if(nblocks > 0) { dotbb <- rep(1,blocksize)%*%t(dotb) for(i in 1:nblocks) { upperl = upperl + blocksize if(upperl < n2) res[lowerl:upperl,] <- exp(sigma*(2*x[lowerl:upperl,]%*%t(y) - dotbb - dota[lowerl:upperl]%*%t(rep.int(1,n2))))%*%z-z[lowerl:upperl,]*(1-p) - z[lowerl:upperl,]*D[lowerl:upperl] if(upperl >n2 & lowerl n2 & n>=n2){ res[lowerl:n,] <- exp(sigma*(2*x[lowerl:n,]%*%t(y) - rep.int(1,n+1-lowerl)%*%t(dotb) -dota[lowerl:n]%*%t(rep.int(1,n2))))%*%z res[lowerl:n2,] <- res[lowerl:n2,] - z[lowerl:n2,]*(1-p) - z[lowerl:n2,]*D[lowerl:n2] } else res[lowerl:n,] <- exp(sigma*(2*x[lowerl:n,]%*%t(y) - rep.int(1,n+1-lowerl)%*%t(dotb) - dota[lowerl:n]%*%t(rep.int(1,n2))))%*%z } } if (is.null(y) & !is.null(D1)) { if(is.vector(z)) { if(!length(z) == n) stop("vector z length must be equal to x rows") z <- matrix(z,n,1) } if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { dotab <- rep(1,blocksize)%*%t(dota) for(i in 1:nblocks) { upperl = upperl + blocksize tmp <- exp(sigma*(2*x[lowerl:upperl,]%*%t(x) - dotab - dota[lowerl:upperl]%*%t(rep.int(1,n)))) D1 <- 1/colSums(tmp) res[lowerl:upperl,] <- D1*tmp%*%diag(D1)%*%z - z[lowerl:upperl,]*(1-D1) lowerl <- upperl + 1 } } if(lowerl <= n){ tmp <- exp(sigma*(2*x[lowerl:n,]%*%t(x) - rep.int(1,n+1-lowerl)%*%t(dota) - dota[lowerl:n]%*%t(rep.int(1,n)))) res[lowerl:n,] <- D1*tmp%*%diag(D1)%*%z- z[lowerl:upperl,]*(1-D1) } } if(is.matrix(y) &!is.null(D1)) { n2 <- dim(y)[1] if(is.vector(z)) { if(!length(z) == n2) stop("vector z length must be equal to y rows") z <- matrix(z,n2,1) } if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) dotb <- as.matrix(rowSums(y*y)) ones <- rep(1,blocksize) if(nblocks > 0) { dotbb <- rep(1,blocksize)%*%t(dotb) for(i in 1:nblocks) { upperl = upperl + blocksize if(upperl < n2) tmp <- exp(sigma*(2*x[lowerl:upperl,]%*%t(y) - dotbb - dota[lowerl:upperl]%*%t(rep.int(1,n2)))) D1 <- 1/colSums(tmp) res[lowerl:upperl,] <- D1*tmp%*%diag(D1)%*%z-z[lowerl:upperl,]*(1-D1) if(upperl >n2 & lowerl n2 & n>=n2){ tmp <- exp(sigma*(2*x[lowerl:n,]%*%t(y) -rep.int(1,n+1-lowerl)%*%t(dotb) -dota[lowerl:n]%*%t(rep.int(1,n2)))) D1 <- 1/colSums(tmp) res[lowerl:n,] <- D1*tmp%*%diag(D1)%*%z res[lowerl:n2,] <- res[lowerl:n2,] - z[lowerl:n2,]*(1-D1) } else{ tmp <- exp(sigma*(2*x[lowerl:n,]%*%t(y) -rep.int(1,n+1-lowerl)%*%t(dotb) -dota[lowerl:n]%*%t(rep.int(1,n2)))) D1 <- 1/colSums(tmp) res[lowerl:n,] <- D1*tmp%*%diag(D1)%*%z } } } } return(res) } setMethod("affinMult",signature(kernel="kernel", x="matrix"),affinMult.rbfkernel) kernlab/R/couplers.R0000644000175100001440000000770211304023134014063 0ustar hornikusers## wrapper function for couplers ## author : alexandros karatzoglou couple <- function(probin, coupler = "minpair") { if(is.vector(probin)) probin <- matrix(probin,1) m <- dim(probin)[1] coupler <- match.arg(coupler, c("minpair", "pkpd", "vote", "ht")) # if(coupler == "ht") # multiprob <- sapply(1:m, function(x) do.call(coupler, list(probin[x ,], clscnt))) # else multiprob <- sapply(1:m, function(x) do.call(coupler, list(probin[x ,]))) return(t(multiprob)) } ht <- function(probin, clscnt, iter=1000) { nclass <- length(clscnt) probim <- matrix(0, nclass, nclass) for(i in 1:nclass) for(j in 1:nclass) if(j>i) { probim[i,j] <- probin[i] probim[j,i] <- 1 - probin[i] } p <- rep(1/nclass,nclass) u <- matrix((1/nclass)/((1/nclass)+(1/nclass)) ,nclass,nclass) iter <- 0 while(TRUE) { iter <- iter + 1 stoperror <- 0 for(i in 1:nclass){ num <- den <- 0 for(j in 1:nclass) { if (j!=i) { num <- num + (clscnt[i] + clscnt[j]) * probim[i,j] den <- den + (clscnt[i] + clscnt[j]) * u[i,j] } } alpha <- num/(den + 1e-308) p[i] <- p[i]*alpha stoperror <- stoperror + (alpha -1)^2 if(0) { sum <- 0 sum <- sum(p) + sum p <- p/sum for(ui in 1:nclass) for(uj in 1:nclass) u[ui, uj] <- p[ui]/(p[ui] + p[uj]) } else { for(j in 1:nclass) if (i!=j) { u[i,j] <- p[i]/(p[i] + p[j]) u[j,i] <- 1 - u[i,j] } } } if(stoperror < 1e-3) break if(iter > 400) { cat("Too many iterations: aborting", probin, iter, stoperror, p) break } } ## normalize prob. p <- p/sum(p) return(p) } minpair <- function(probin) { ## Count number of classes and construct prob. matrix nclass <- (1+sqrt(1 + 8*length(probin)))/2 if(nclass%%1 != 0) stop("Vector has wrong length only one against one problems supported") probim <- matrix(0, nclass, nclass) probim[upper.tri(probim)] <- probin probim[lower.tri(probim)] <- 1 - probin sum <- colSums(probim^2) Q <- diag(sum) Q[upper.tri(Q)] <- - probin*(1 - probin) Q[lower.tri(Q)] <- - probin*(1 - probin) SQ <- matrix(0,nclass +1, nclass +1) SQ[1:(nclass+1) <= nclass, 1:(nclass+1) <= nclass] <- Q SQ[1:(nclass+1) > nclass, 1:(nclass+1) <= nclass] <- rep(1,nclass) SQ[1:(nclass+1) <= nclass, 1:(nclass+1) > nclass] <- rep(1,nclass) rhs <- rep(0,nclass+1) rhs[nclass + 1] <- 1 p <- solve(SQ,rhs) p <- p[-(nclass+1)]/sum(p[-(nclass+1)]) return(p) } pkpd <- function(probin) { ## Count number of classes and constuct prob. matrix nclass <- k <- (1+sqrt(1 + 8*length(probin)))/2 if(nclass%%1 != 0) stop("Vector has wrong length only one against one problems supported") probim <- matrix(0, nclass, nclass) probim[upper.tri(probim)] <- probin probim[lower.tri(probim)] <- 1 - probin probim[probim==0] <- 1e-300 R <- 1/probim diag(R) <- 0 p <- 1/(rowSums(R) - (k-2)) p <- p/sum(p) return(p) } vote<- function(probin) { nclass <- (1+sqrt(1 + 8*length(probin)))/2 if(nclass%%1 != 0) stop("Vector has wrong length only one against one problems supported") votev <- rep(0,nclass) p <- 0 for(i in 1:(nclass-1)) { jj <- i+1 for(j in jj:nclass) { p <- p+1 votev[i][probin[i] >= 0.5] <- votev[i][probin[i] >= 0.5] + 1 votev[j][probin[j] < 0.5] <- votev[j][probin[j] < 0.5] + 1 } } p <- votev/sum(votev) return(p) } kernlab/MD50000644000175100001440000001563114366231470012232 0ustar hornikusers162739e9e56685441163d227719e68ac *DESCRIPTION 3114a4e24b015242a1b61147f7cffe25 *NAMESPACE 7db9a58cb6e5aeae749727781fe388f5 *R/aobjects.R 0750c9216dfd490ac36814b8b1ae24f2 *R/couplers.R f8e0ac1a792745090fa9a8da65847804 *R/csi.R f08fe0d980d102c2df4158217bbbcff2 *R/gausspr.R ab289bc31386f29fa9b2bc9a667504f4 *R/inchol.R bfa34b64d293a380c5c4d045105d4496 *R/ipop.R 5f574afe5df7904fb80bb214f01fcc6c *R/kcca.R 67aed700531a0ce066bb9300e7f0169c *R/kernelmatrix.R c2688c1b636fb4fb3cf51870ddaafee6 *R/kernels.R 4df2eb88a79a9ba527515d471042c5ef *R/kfa.R 894f285bbb8e123968cdfcf88c2363c4 *R/kha.R 8ddbcd5e7218d96c96446caec78e04fc *R/kkmeans.R 0053416d76fbd2b79749f02cd0d78b92 *R/kmmd.R 03fc2d9d2bc5e3d2719397c9e1bf137f *R/kpca.R ef1e9cfc8727afee41e93643fa582c5c *R/kqr.R 2eeaa24800294a773283f481e2f75f2c *R/ksvm.R 6f8c85ed6f159df002605fa81313c0b4 *R/lssvm.R 9a6305a7f6f48b3d5b9897aee24c7a88 *R/onlearn.R e011e88368b20e857e139cea577cc056 *R/ranking.R ab9bb9489e4eb5e68e70d944e0833c38 *R/rvm.R 42578bea93efc1ad1488b72c8acec274 *R/sigest.R 159df23cf242faa6b7c1a0feb40bdf6d *R/specc.R ea124a494ad7922985532ad3b308e4d5 *build/partial.rdb 6832fa94fb75dd65ae1036b751f3dd81 *build/vignette.rds 4a2e5f676b743dbc552be3075d81fbcb *data/income.rda e04ee4e7a62d2b55d4ebd3f74b2dfde5 *data/musk.rda ca3702ff3a836f47f233201037015694 *data/promotergene.rda 2da919cf2cf2e698c7aa29b9d593d3ec *data/reuters.rda 95036049c37adf985595a538bc62ecb2 *data/spam.rda e8719f89caae80c901df4004b48ae2ed *data/spirals.rda 312518a26b01f7fc4c5c86e34bd377f9 *data/ticdata.rda 2eecd4c2eb5c8860aeba6751bd9bb13d *inst/CITATION 68fe0d0d842fbc1b217f45934a8edf7a *inst/COPYRIGHTS 0d1b1a09dbb52e3b0e58676170c3ce3d *inst/doc/kernlab.R c4c223d07206b59e2d43a585d07164b1 *inst/doc/kernlab.Rnw 3a7f6ec39c320cd612eae2a9c300f6c3 *inst/doc/kernlab.pdf ca7923a78d389602d891a3cf6a5193d9 *man/as.kernelMatrix.Rd 76f275d529c9836865d1001120dcb49f *man/couple.Rd e36dc0b16ba570c99ead7a48394dc66d *man/csi-class.Rd f87d54c4c4bf47f760cc6a779c7e525d *man/csi.Rd 704bfeedf89329461a20e4cb51a237f0 *man/dots.Rd 285c27b5d9a389dfd7e2f8e392de215c *man/gausspr-class.Rd 7a7928c7baab5fcafe7eebd0eca2b29d *man/gausspr.Rd b61d371ba2f8d8b137ec3c32a115c3ab *man/inchol-class.Rd 07ae4d31f3ec7b47b0d954178edf2f94 *man/inchol.Rd 452553ee15225244a50b73aa08cca861 *man/income.Rd 9599ae27d6ebe41302c6236aa381b313 *man/inlearn.Rd bbcfe86bcb66e4b222b9ba13869fa2b0 *man/ipop-class.Rd 8a8a11772b987cdb2b25c76a38c719c4 *man/ipop.Rd 62c2b5318bb86222cb8d9cd361998d36 *man/kcca-class.Rd a5309043c8dccb762f1de6138b713b05 *man/kcca.Rd ef26a19723ffb7f6eb6dd3539905d6c4 *man/kernel-class.Rd 7357130456764a2b77cbf39d05d8dc98 *man/kernelMatrix.Rd 7a1e2bc5f883b6e7339bd717f0569eaf *man/kfa-class.Rd 22c7587c02310941aa5c484a3551ff70 *man/kfa.Rd 54afaeff97629d4a1353cdd98b5dde37 *man/kha-class.Rd bb2daaf182abb852cf5d23b6acae968b *man/kha.Rd d2c0e45b17490751137f9c3d9737b92a *man/kkmeans.Rd c3458139340043b2d63e9a642386582e *man/kmmd-class.Rd a5ec024bf428181db56378ef64213ffb *man/kmmd.Rd b39a018897562f1cf907c7d0920186ce *man/kpca-class.Rd 02c0ae81032f682604b9777087e0f52c *man/kpca.Rd 5a3b2344811fded04018d0b56d9bca23 *man/kqr-class.Rd d52f9afb916eac019e41a487d8ddad62 *man/kqr.Rd 3bdce4dc10887da4bacdac6830e66db8 *man/ksvm-class.Rd 464ccb70df6aabdf9d714f329a73d929 *man/ksvm.Rd dd6a605572b276158f753cf3e3dce63e *man/lssvm-class.Rd bab982b9b6cdbdfa1d9c50cacd72408d *man/lssvm.Rd 95f670451348298d1c5daa00498f9f65 *man/musk.Rd 6d1c014b9f6bb8b59d032fd444bf5a04 *man/onlearn-class.Rd 46d67ee15c0ca6feeaf2859a0a9dd375 *man/onlearn.Rd 75f80214439e10c8d1b0104f5bcb44ba *man/plot.Rd f67747838e34ee3400ad4ffe299eba71 *man/prc-class.Rd 04ae737bbae10c747929f4367c9a4236 *man/predict.gausspr.Rd 69e21e71600ccf8a8df4a1adb84213fe *man/predict.kqr.Rd 4171d85f12929bb2f37bafb47ef3c477 *man/predict.ksvm.Rd 0589fce657a7c30f20cf904b152fd526 *man/promotergene.Rd f3a2c50017ea501680b53c9e221bf6b5 *man/ranking-class.Rd 7ff29d050c5127aaf2a91f9cc654d899 *man/ranking.Rd 8bee0b6c367f1c5f749b296ff48dcc23 *man/reuters.Rd 2b1f6b6093d9d0a915995b59caf1561d *man/rvm-class.Rd ce6beae5455a6c2545058e3797345ac8 *man/rvm.Rd 86c5fd418857bae9a5c736e8c57a5c5e *man/sigest.Rd 04f98f98f24c146c6ca74a11aa2ef92c *man/spam.Rd b176c7c0f1edb61818e9ecfde276f349 *man/specc-class.Rd 2e5911ac1b9795811e67385276ae77c4 *man/specc.Rd c707c7af1229bdfca87272866bb3199a *man/spirals.Rd 149b3590c24913c3718c9f1d6c265b9a *man/stringdot.Rd 5a3d623ac56f129716429ba87481eaeb *man/ticdata.Rd fa4feb7dd29492877886e4d86d0cb8f4 *man/vm-class.Rd 2a6f9e9e044a78154d3cfda5936d6f48 *src/Makevars 2a6f9e9e044a78154d3cfda5936d6f48 *src/Makevars.win 3b77d80677bb88fb39cab4a7d2351056 *src/brweight.cpp 048d635dbf0db99a0b707bf0a9c06984 *src/brweight.h 50cd06527f816675b128669d222bee56 *src/ctable.cpp cb1e056dfcc52d5319e71981f9c90611 *src/ctable.h 342cbb0568a2fa8f27b1f0c42542737e *src/cweight.cpp 0ede046d861731d10f965e2ff8f50e4e *src/cweight.h 5c02223129df9d548c614acd0593645d *src/datatype.h f085fe8cca3cb634567600216eb4aad2 *src/dbreakpt.c ca367ba012c1f6b88f1c385d682374b2 *src/dcauchy.c 455ccdeed46ccda0958453306fe9a951 *src/dgpnrm.c c9ae627ea63dec6d72867c2026121648 *src/dgpstep.c da04ccbb77940ddb1043ff8c53da74ce *src/dprecond.c 2d8d64d9eb60161b1efc8af9f58a50ce *src/dprsrch.c 320bbdd591c4f3521c0d891d87b149f3 *src/dspcg.c b06f93e7f1a64c0b3861550eb40868e5 *src/dtron.c 7d35d1a550d9c3e072f87d42b6693af0 *src/dtrpcg.c 616fbd8165eddace388ffc7ffd90c753 *src/dtrqsol.c beb2c099ff3dd87e3474a30a49a8437e *src/errorcode.h a0f99b7568a3b1c4f0e47437b022e4dc *src/esa.cpp ab96f4b2f43cc0306c88547ab6abe1ad *src/esa.h 5a7166f36e34cc037b9c2006f8bc00c9 *src/expdecayweight.cpp 7f04e95fcd76ee21dcea4d7138d96326 *src/expdecayweight.h d16372bf79ce22a92dfcf3c0d0b769e7 *src/ilcpfactory.h f103b80f529451ab71a425a31ed1eabf *src/inductionsort.cpp fd4a5ad4b79ca119885410bb45c7d12f *src/inductionsort.h a73c84f3f5fff2b34dfc76999c312068 *src/init.c 76adf49038c3585cf216cd033a9b4183 *src/introsort.h 0073f847ac8606d19e03cb0eeb27e0a2 *src/isafactory.h 94245de3f9b29eee07fd1f7d8d8929cd *src/iweightfactory.h d2d7af10799002c2392f038e7d767c3f *src/kspectrumweight.cpp b5d07bb286e3767cda7a371c50d0122e *src/kspectrumweight.h b1a983bdf87a406584defc0fa332c455 *src/lcp.cpp 6de81523902a1d4dce2b38ce3d57ce98 *src/lcp.h f47f3118ea197009f6f0e12edeb5fc17 *src/misc.c d5d113bf04eb7759c8fd0f915dd24c64 *src/msufsort.cpp 82af93b02f090a83152b52239e0e3711 *src/msufsort.h e5346edb7625a7d8d192443b23f90323 *src/solvebqp.c 01a09c0f7f2fb72637644b3830b56c26 *src/stack.h 079a2f29ea98ab6f5ca4e814bb2917ba *src/stringk.c 801972af49fa57499fc3e519d202a8ad *src/stringkernel.cpp 1c19c2215be7a2b25f7439fc061f2daa *src/stringkernel.h ae74f6ea199b5d5b9b4b045afac5fa40 *src/svm.cpp 670301bb88ff2b0f28ece190a96635c7 *src/svm.h 5f5910aab31dc2ebacb4b15caba8e873 *src/wkasailcp.cpp fd6807b3526c7d5442f66a2660bd9e4c *src/wkasailcp.h f48a5df5ecbf1ac1831e5582798eb57d *src/wmsufsort.cpp 2694af88ced7e4391e92120d0c90587c *src/wmsufsort.h a324922cf3b84ae82f364be31135168f *vignettes/A.cls 7deb7278b233e702bdf87d4cc1ec3897 *vignettes/jss.bib c4c223d07206b59e2d43a585d07164b1 *vignettes/kernlab.Rnw kernlab/inst/0000755000175100001440000000000014366206771012677 5ustar hornikuserskernlab/inst/doc/0000755000175100001440000000000014366221257013440 5ustar hornikuserskernlab/inst/doc/kernlab.pdf0000644000175100001440000211070614366221257015560 0ustar hornikusers%PDF-1.5 % 117 0 obj << /Length 3593 /Filter /FlateDecode >> stream xZIϯ@r XWʖǚTRe!A=$1HF7q ^_{K#6QJ7W_|]QV'&+feiTj%iE7~a_dx;nytXp/3")Bo iʤC]:nkcįܴ'Maq +qOg,)Vwn-Kڲ48:/)7KLQH<w@A6|͛pMI[x iY491z\C‘q9FmI&#3G jZd]I7俩# '_ݖYH; Iái@9-~\4ELfzq2̓*ok$E3OB‘@$߇MYqDQ$a%׭餫Ǿ9&ovOWM*3IZTrivX(&zQI] !cWl%M&*[Tm;-HB\1.NuD-hBMj[15 'ch^Vp-k(u?m "44qu)=e%2+d|;oKtMUڈz8}X.s~mqJkJxu@˽UcG _' TW䀣Ve-\)G9p&FFy c CAy^`HX+~f1qk'Y+8o#g=#j iͽ !phO TB*Hkc ̚K݆ tl hH0*Fr8W"vޕltW"$ '=.-Ea ~ދx2=CC-:(pz f<H;Fo譄Qپlȭ,򲌏h\qe*=|OVj7yRiy/ŬBVtPK0L2Y<;S(t 60*뢪o_</Sjz^9+:^["s܉H`?% gw`Lxb"o& (aM S`O?HЖ!&iF;{%0rk%xT#6l)HC߃rvy?I:ZۓqqMKNul/GkM ;?\{=nycN4^8rɇ^ ?-Mcad~LlKRIl>UTATQ΁*ɹ[iNYi6cW>lhb|{li@Z4)3rU^D!fBQsQAlQM Q+I BYft:0(XٞT`ҍQlFbzvD=3Z̩4x&hH9嵸dZgK'pD4ƾ mwM,Yi =3HzBd*m=?S5~)|]ڌ:݃x ±uX@!yLAgĶoyDVoV%0wn(dL%(1uoHBS;ÎIcYP>Ou7\$QIjv^|VzͥZ7]\bTa.y6lu1x!P ,"f_BϤEɏArLaTgs$^M2xTmBɁ<.\n[𕒲nCI(ې!gB11QӶsFeR`\Ky${EN\(9 Q^-*Z⽿Yw-R:s%qpi֏WegǠfI\dE{Ʈ EN&-;MT'(=M]eipfi3tDaci RP@"#@gfQj8|ⳉ%Uը)] 8֍iy._`Z=rS:B1,/a=9 ݶrſڕ"UJ%&7>~*>\ \d^Өjx tlTgN8LM2ijъlymsjŌuɀ2R'D2Qʣ- QQ H.T{ZVuGA{yұI^l?l X:W@ǧɵ{!qgsWd)UU)kҐ6I窕8h!n-mZ?YA%-x?f$mU@iwnbEYP8?ӹi PJ.J^GN~ɐG4'HBo?\mow/yP:OKXF"mtOS1e=ٰc([fU hO>\Ƌ2&3ռZ>CH"L-(Z︌)DL+T/α[Rt?hyEZDk%Q(/O]|%Nh@.ଧꐸY[UZw:>>*˜KFe3/Қ͹Љd[Og|F I'i pJ|^>ɀ]4Yw`"ngg͐뿸vT멯SU`"oŽS{r>W'] %F&Qj3Tbii?'ɛK NB U< "T5}M0,d.A2pJq1- n$ϝS̜Թ9@5URwWJ~W T]=!+@Ou}ou nog vS-9di|4zt&% Y_3ЙK {kǯndB endstream endobj 148 0 obj << /Length 4388 /Filter /FlateDecode >> stream xڵ[sȑ_Pu!fMuTjw "&$;W=5JW.|?~3LW\M~uswU&W.s癹OrщS:-`Z{-*hTe~]ϭ{ 5ʵC?ŠĂڪ*TTk〠,U_q?=>G^aU][hP[Q?7Hu$ҹե@ܦp<$K`qEw4u-7D-=ן_JW{ǟMG|o٬AG7kyr$d/KH;X *iŭGY{@bzdv4$қkLXoN_=ҳ5vf"Q wGd "["UΕ~Cv"h0ybг2Z6,ZJxVٞ㕰%E6mܙD4(FΏPaVJg "P1W,9KF/- ~8Y]gGZGsrPޅ DZzndfdH2Ŗ\piN^z n|F\bG|Zze&Ζޟ:tGeyxČ"uXn.(Ǿ$T@I,4,}h/80S?;]moCO"DtW1٥$m&h59"WPgD_P&7|H0`YtlĎמGU h@MJ~:T(S&-;?6R .0.Y݄mľ}@4l&PH)ILF+o =IE|؎ tNKoqvWmؼnS3 GFhHȭ7IQ JkG;wκc8kR;ɋޖ@{rE$kgLL"j*H}y~0,@eXOi%8n=*ub)ݴiH8)K\bbT>+qʔNsrH ! גͅUܳyj jV5TA|fcFMӝ zЖVRY[@b6q|rM#)󅅒f܉s^Iq;5O;EU`ӎLBg壉wD+0'Qz9CG|~un<>r{=>ȲwMO 9Pe9-[T=^%J#T0%^à GOV|tc0 {(kH!1fKްTf#bԜ<ʳ0N:+e3+UsƝ#%>oa" i~w&T,X? Rc\4bpGC]AiG;$'!Qm26<T'X:pjR! l{u3(p`14k/0w~^:lNG^ZD"|#/Ri֟&)6 $e~A|~~=uCU2w3mPSi%|^1& bF:1 ٱ nlsH ke}yQ9鸇],sA"A]a6Td`_v.ȉZ8Phc^\c[_dDH[8q5&E¾ >s.nQA #gL97Fu6W* 4i5Aq= 2`|8A_-\L!KE\F 4Z$<.z+ 8ߘ* d$>P=4AڌjM+[: 6w:Bsxn8DmX $߻$T=ފ7vۗ/wwf_;S(-Gۆ :P('%w ʏM|9#Qt$&M@i  dj̍kސfX{JP2 cu^K:F_jvH_c't1\5St܊H|0"F87GNֆrtE!3Dx<)SM+#[͹6SY1j\~6} Qc,"{ɾJ]rBDŽdA AlH<}GWK2ⲫK!e-l'?!bڇ17i5kB|'d|4ѽr (S1,%p;9/Ua:5 ~ 5qwӗD`v2)qEIj5̊;e4x_MBE s@NZuPjPfPe1]q-t'03ŝX& @j:ܿ{.u2鴿^ik;lO{V-e]qZ&7Lu77Lgϻh-?QlL 5y3y lwN3qV՗YtQХsm^;օZAS Q2il]òo2L{ǐ WrtZe?:CUpwrpo㍦8Xislkrvʂ>ѨTD]}|WALIƮUI.JJ=)r &yK/d( Iyso-1fpg[>]H^637Ԓ6~+J| +b^7Я?Z OTbXQBjȕ^.SWJ+El@gwTæCEST9X @UÒ{ӓ=/:^/18VA- {:`'mUAyNR-aMŶVZlr36o=UtB7ƗOOkbg1$_au-3D7 8Hg*'˥=>twuE/kO@ Pp endstream endobj 2 0 obj << /Type /ObjStm /N 100 /First 823 /Length 2784 /Filter /FlateDecode >> stream xZ]s۶}ׯc3$L3irfڴ;7McTIʎ޳E) \],0%2cY̤Y`)&SdR3-3L;v@BzfbŔdVR:~ ~bB ,Thg0oH P=h҈ֻ$Ð 3d4&mXyf s,yO) 1SJ9JþJKF;w1c@%|2oz|̴r!bl!5&-=( @i W0[ }mB }7 ,,HB h Vrv@1upptb RE94NvBO$B @&$89)sxҩ/3H;9ڝS\POc e0`d x G W )r z%%@IlC8Ш4$MDQ% nf[YKjbܑT`OYtʢ7%^N˂ hrZw#7~M.]Tt)2ˊ.i6\՝Y çysToˆ*ʳk?5Vh2ᓫP۽>HQwa95TѽK~U<9Geɟ>:EYdz^Rhځ}o|IJYLҽc_[>v("i}^ZE^r~~ֻSMĮ|77tȚY9}V96g7[wጭJ}X>Ҩ)WeY5 ˪EUU^c@[9/ǪtͳHp2drŒlz'QdUyReUY]g{Ъuom; pQI v@$).^b/89\FYRBy3\0`d8v0_'lgd/-?]S%mH竺ɪ{H=w+~$O x'U-0͗r,hj%E2Ȼ=ҾP~msȶw ҭ֛:CCy4uTô]y a~giREn<\U$(@;fzYYyѬ陶}L+Y[xw-نs>]nw">j^CYz:][ԇePgMKR:+Y}ٖhӬu>Ӿ=;B|]ީì/5eAIaطn v¢y,<}:,3\dtB$U=;hΦymt,W.Ɔ[rM lZYQ1xmn2JM"D̋ImYM#EG`YڰR nǭÍul!<͛l{r8颜'O1miϵ#q~?"9' VwH:B)!1t#H)ǔ=[ ՗#.Dg3TFCIJo:xtli1:~脶 /(O3K>4#3Ό3;֣ktvݺYcsu+Vuעd'7m';y(lN]1k5INp]۽VNNoZyG6 Ljca!*{ge\eQҌ (B糤x s_Ft<3c-" ϕ_ KLy ?aHI?lR#\2H.7@R<SOdaU,ۈR6ϏmZJ6:;  \O〘XvE6ZvqHJvD AEJ̠=H<Ppk8Xc67&L,=X9 k [ 1c2U/FML3p\Ƈf#F+ @+LPJ1}VjE7|˚+j ,xz|y\ TzNʢ=&u:{}Y%AԣcV`  I0~`c*QQjc>`AuiT@0pO*-C0*,Xrchvsd<}كLˣf,j*{%@scـ ƒCV}΂;Hwk={=k 'Y,D5OkОb"P{,<>/* h8~˛3T/4hyF o53,ю`_PW@,Ї(4MN,pƏ&G[§ endstream endobj 185 0 obj << /Length 3839 /Filter /FlateDecode >> stream xڭk42UcY모;X`( Op<'m8a~SgvښXV/ukz޾_VN"+f3SI2/;3o?tZ;bCs?5.d?BqacC#2qAga尅ww_fiͲĔ6@Y*.uIUrowy1opJlWw gynpCϋ;cؽsNkISxS8JX7`heyb$|o Cq0m6&qR  KQD^CҷZ'9;#&L-g c:f!j-w[0c\:k=Y⇝|'4e[$#>!M+ v/za8b66{ٽqX= b:Whx'Y(Ƥ?:\WhGk6Mr>J,z!=4 5^&* Hv@rWe$ir'h;Ħ危V^874z IʪؿC~uo=BO#⅞`jNk} GF>!N$6F~SQq+vgEEYf`,->ɲԥ ܷT4j vM4 }R-Nᮐ:},"!r8D';~ u=-,i\w k1کAk1f$̦= ~)S+["2pYom\DzKHgUE't+ˑP#_"Ӿ#Q;ɹòS(2z`QAYl<]YoCz[9$ckUBEtd) ؊vy0Dbg-/(a] A>n耤Β鎩aTsBJڌ|+V7Yfz@Q4#K(ڞasخFp]6(jχ ba}-O sҵ:^}29AvI]El.Td'Ti;fH@>-rdO1ۤpI.Fxߖp[:̳Rz[vr7|Eib8hwwdێBs~}{a\B\j5JEq$@[BkUt%E.:|[cš:'TyrzZY6A+~V ߶0`|\,đUVwd\;%d%su0ӈg (}#ZIRT9'J'N:6l[))^L0Z2)[0[JIp}>\dP@%B]{ 9X4!gݖ 9x0}*N }8ޑCCe;<$ s)ҷvn7b$ŬC;d}zI>s{'{qBaHrWĮ()6XԦN+y:xs,] cu~0^_RNyo_~6^CK׷z~Oj[ΦC6%FH@fFўiz&~3~eܪw {v0iZ䭝*Q[|OGpD'䬢C (CFOРayИU-DCTr _ K -PF 6:]x ׅd^'uow"nKVP/NKa7U,T&1Y-půzý6ceHQG8Y)_7#M*ڊ~"'>3Bc kZߞ>C25%jK20zMϓ:J] BGQz}jj̖YjPQFсMM-jECu(Iv~٨đ䭃g_mkoy'Rյe/n}˜؄ozڍ } B?#JZBٷEM]bipx?\aTzl1lU~؅!˟ pʐ+r/ETqN]ec4[Lj1>2@e~BEk F CGVCmqDQ27pP'4)s XǷ*+2 ՚^͍ 9NE"dq, @t Fp|HPiOcM>R4moԪ ^LVk^TQ-˚.e)7v!ևņTF<}o£/BW>Qox!)IP.\9ֱ-0d;4-hrRQ<^d39gɾEӰ/9 HTd:i,hV3VǑ>M>}M}Vʅ!Su:zwm<)hX'͉di ɼns06=nF׭<$x0N,>FrT74sbs&w'&8y+]%NXg> stream xڽZYs~ׯ@bwU*l'‡TI~XK1 C 1(&b/F5ͤ)&UYƖ]x篾U2FҮhOWvWz¿5|(E@ gW:ݱ J9DYs)'W/w %V Ȇ#P?r,1Q~F ۾ڬAdnwayI2.#t w| EJ3~愨.X}˓Y94hMNC$2! v\Gi1+SqiH#!uj kj7ɤX @pV鿺D~Ila)%s?ˁ(< ;(ZX HZH y$&:8nrsG އxf)"v`JwhAMWIfĘx,<2\//3S9vNNolLZ|;rT6I0ktM4MzUJ9*48W, ,Ea5cr4pK*kK0!I<㲹";(mJq6m݂i 1xHXI$#ixJ6gT ` FzNRRS z:1gq#r"el&d*ѮQd`Џ!OTO< ou "8,uW{`WC:Ha%7f-R`z,"M 2gA)qmIqHH/u!xإ rk(9`d年R4JhHxϯ۞'161[Nu܇xcl`{~\]; sAzL@"-aW[҉daQE:mȮjK[ArSFR#VeHOuF6ւ_2ࡀ@UyB܌%$)ߣ.5|<\S0GW}( B˶K%M*PR2w"nI۝lB!m^*2ka(DBAoqt}-aV^Il}yx//jX:?3`~`!)R+L 2JH| Y&72jj(w!)$bDոv78Ba)iL4۩ɕF,oeGˏR aXʐ/i |j |!0Pey<ͱ1;= ſ:JlR*\p(DyrWA^^Y}gtq邑ZP3ENbdam-fgA:AOLTP3}o8ߠ;̘7C98?B1X!bj ?EWz, ,uJVU{e_ekպ8ghqeHFejLIRaݳ 9EW4UN@ @g0^Z<ױPMN{^^ īYC8µa68*^h,qR">>oPίH〶?8z6{l3cR*ꕪ R⪍ǃdR))E`1JyBRd1$,I.N 3Te||~3iu-@T{uw(=%n2.rZ5k3 yUo#Zm`>}(0!X7RLp2j\{6+2 2҃NڧQ QE\%lN`\elmI~r-5T>?ÜcHK6C`XSz;CK0A Apga}!!XX26Rd%xٕzQG{;$Y @͵xF}.k8e*8ŘjI枌ǐ 俩p+1ԐLqeЭW'#o_H" endstream endobj 209 0 obj << /Length 3550 /Filter /FlateDecode >> stream xr_ӈ!@C2MMdf7DH$$;۝A" ~a>oξ8'uV[m'WU癃۪('闫s5mb(+Ł~^> ~#ܮ8 M ⎟Ňޞr/uXhr"RUhfre9&s/}U[=؉>x: \Li^-l9Ůs]ťٮJ1 ny# (ΔT'32UX"do2O*T;)[yb9Y]-36Vg uVh쫀n wBf ,eVX#:cu|cؓX&k9_\ Lƕ5-3fژ,{Ԕ9 ruY=%")ndfÏ ]\:ͦڬd޾-Z k*W؞%];vsWn޴|\1۠NቷCP[g3[VK1}{1OUY+`sUeշxcp|0Z[^ [j/B*-J;RFYGrFV+7:e |%tA׮Uٺs[}}q5 @2_%,w<+j7I[M~<7G"}imw]fz;ǓVS.rmAY&`;Pw 6:MR;j%Nb2] ̈/8e-l=5 Y8 j3-u]3ޙv:9%Es[p%4>2 GfJ~oKnVby|$ Æ'DT[8r+8g;//9(G 6Vf<.7!Y"VsATu ΢c/0y/|^#)NqÊ<䪙wC$4eoB.L@^۠*+o={(pT*dէ;PK=fbָ*S>N̞1G>8"4ѵJ4,2ձZo? /ƒ`älFbo3W^7XX+6o Bg3 Ýw5XL}xl63H__OJ<ַHens@|\sTw!/4y'hhzBiRD(!\ & DlWA|AGZ!G>r~>3#FΔ!()s{JtNߔOT*^ror lKW^fj(0t?'rhbzy/$qgu>#?3NZ|=J"FM<^ML9%_r(@u9=zcD[$4AH`ǒbAKUE rC^e%CJw2}otɘTe8D A~,ؾ"/Q{c ۫` OnTs]lC] ˝̘bsT]-@.a%OE=݄4b7tXD^K5X\[r2ج &T+v\ѣG0+ 9$ko(E,Uχ(5d()zθ!cJחa wko9%oJ-havI Ia/| x1FZ$D%7w%]T+iHC`(Zh؞rj‘8=UNL:\{>)JpeX6ʀ#Bo!tb304O%.KN6yF;sK)zDmY7]M&zY ggcޑ#Bp~6>9i+ b(>X&K-jߓFw+> # ,US&བྷwy?x LCvn!͛.߳X\Ac1ɔ!Pͤ |+8*xrdpE9„F@$)HyQyT/zLpX󻠇Ӻ#D83qh#Pen#:^f6E{۝H\k(,#TK {짽yhʍw\8~wm uѽ*₽F;<1k&~p,OXnlAM`(씼o@G٧s "&uƠ žE0MQl -"=[iRRʮpo֭o!>)ᙴO_Ov5e<ɮ}b (ioEyp5 **hwSp2Yyo2t(zuŒg] >w+Ǧw{'FEL%4p\0$?ͮ VOYqЂ~CREpX 9l#_Iy"U78=Mq nO c$KJ>fI f^sq a87D<'v4%9>EaAT5YÆg#b3^`8 ge[lH!%m\u$‰c:YnG\H17W:'' IdP9`¢LgMbj@%W! Uq^'RƋƏ_o_d1QK] kƤ+# =>3EADԳM?#5;]WޕFGBnۑhi!y I6ZxWb0KP]VW.`g J⇟%F՗T#{d^ߒ1uF(2j3ߴN\~D?;Q v ؐ2<A:/xo·s4d2ZkLi9hcqz Ou@*IԝhW*8LtaVA3_[ge4xy:HUz2C;ӯMkc&ѬR~H&-7Nvĥ345YWAJfE 'r*0(*7<׹+Z}JB[U0FE{~gG:9AX{A t ձe1PM #1ln#}"-E*?*=R؞=*y~֢E%Mz}SdKW?myѹΌ> stream xZY~/`qWL5'a`ᝇ<%͌:fuxFɟOUfkfAnH=\͓(??~<1/2ZFמx7EK2{Bdm)f;b߱N9_7("n9wӽmq\Yr4TJыw_q(iGWn0(s.+QQ=:w[n-0m_8Jڃ(N0Pd4ZH.W;P6Ho-9 3uM\"E\ {MMn :wMi kOW ^"0ݷMp@ ypJrHe?FF[sf>Arj6 2zh4.cBNJ3V_y곰(OKFL]DWl6_VUX~dB:`~#b71-J)c[@U-_t}r@=dN^RMB0\UօS0b/,x9ߺ^c6 G>~ Ui% @PWi{`Ch=mX+˜m6Mb H̱,t῟됬z ]6:4^+6a^D 2/?%w_޸IRdeij<1VQvEs#%}pF:23^!{TT bD"JzN R c% ȡ*GKo@[2ٯ ]"TRdB)Eu)r%)c(+q1h^ U KMֲl!PqY[C<3m h:($]wI۸@8ۈ5?#EZ1J~nJ:t Rz󨗔 v=A1|{u qT4l6=/M[yz) ga,e3QڲV>etehu Uz~#3GL+#.):f.'lƜc33y*`"xg+ںϗ-DVUF?{6YE*+! } J $\ݒ]&x~3ICWQTC\tƀ I /vvEw7q]W"{ij#u7>3?0c<_wlS{(`%lK̥W'Rgu  [KOԪh+S&y/u/$s*mΰtSl~;C?2yz*I6=Ʋ9 ,׏~6$_oa@mB~,;NCte(C0h;|!uu:T';}V.mR endstream endobj 243 0 obj << /Length 4519 /Filter /FlateDecode >> stream xڭ[Ys7~ׯ`˒UIvd*È$"p$+ `Td;h4n4rf_|kgm֦VMfg??^wZ;k?vq P!n=6W:}nTD5M lrdEf–KӨli5eYHͅk}+\z K]~U%=_@PBWh6'w(9z<V 7; N_~΂I7 9֍m@?Lyj6ńCeEt6m`ɫ2RDѺ @d>! 7 jR]@v,1Sh#sEn@ lU0C2>DʴWE-{.$,+MI°~F<ڒd`RO[Y3QXΨlAιaP[=qlcn,?{ { 8n5a#|Kee@. dX"'7LiTp|[EWBy eAۄ6gǨ2Dvpϲd ^ZT@hRYYh-߳ЮN˨H:B?Rű5͢MWUc*DܓY6}Kl9(AfN9X<@-gF1pAZ կ70QHAL}td| i[6*"df745fU6^+^#XiwHrӫDc/UboQEN36 eoZ[q8侁h6Ap(uDxiJp)sF&hp:g:wLa f ۠oɄ)+64zo}3RuJ7nF_𦬻['AH[I0 )Nٖi\ڏ7^Gx[@VV@Rc-mfFSUC#ElIcQRY ^Mk#B>|kd6Q;^& <CQP+?LkݲcK\qЃL^eƨʱS}cǏ5p4C'k;chMXo#!UcNl]6\?,\95j(4lH/Q7Ĩyp;=ˤKǏ)CWK\]rZ = ~h@=EW\ޟf6Ծ8kgoLMT00-÷Q#g"k; CZZX|avl7ɫq 6 !uO'AkeUE]GL;,b:ؼo'"Tzf\S3l=5;pHX=5݂ :v={yF&Ąg:WM-SOvԩd<5 NB6%k@(qfH3txU. 3)c[ae%p3 9؜=_S;q|#1)XLpis.=&lcXh |h9Y8%t$Ս!7r}-/AB3--!397E?x/Vn^ qk"{ "̽pE\u]|ٕ!۬:N I~z a6Z/>Rw`iZ }x7&!?͙kXbo0CԲj9 tsc2X{Jqe=]g$ޥfF7b'A'KO -ƥ۳O4"AX;9XK9XQ>YyUtnUq^]?DhŇEw R{ᥗ 22:#؆-X`>sj3 ~Ra]bu]=bc@! Hdx`2g<)N e'ǟBE &-Ϝ& Fojdy.AubO hQM,^DpANqpM@YƊ!2z$?{ /3H>HzZ(u[h;B!J)Ϧa?cY2MKzS1hs6äMIɅK̖ vD|q'KLB/h4%Czxo$ )ԉՕ+ʜ t:{#R#5Ih >D_T?V>ΚI A SLg20(Ib@ù n㝉E+cqi]Z6崺WCct;}B36Tt& @T98A;J1vbܫ9/u۪Ro՜(-d}tR?mo A{3B5k4]*ב0; Y94U*΢Mˁ Ȯ5R2DX,oWσC䷾wX)rofDZ8|;+I'Q66IQϽ;~$HSꥡ?LF{ј?͔[aw>o|֒l9{Gu Т^+]7|3i\hhR䨬jA[ d;EɊNKNh|OO+th*hw!G~TS xaO`9PU endstream endobj 278 0 obj << /Length 4831 /Filter /FlateDecode >> stream xJΔpϔ. ZL;NZV啐Boj?^+%୸ix{ ^0V] nƆwzڶkqg0N==<[nkp=5E=a>u H&`r&D*O 2W͏?~lñ 3me ~~\p,R63A"&JȢ4bR׺0W~[9YGB9;y M~C%ª@Ҹ0LJ9MDd"`댲l#ۖ;}Q \KEMN]Q_BÜu':1; ڙ.ajZUX=UDoyߑ]"{=]҂y&HME$9\\ r7O`GvomvEuah0~Y+Lċ [ޤ2_x9dAί 5*Xu|\F Vb)VPںmT+"ns; Rv;ڇ,Y!yUʆAmñs~"}!R`+ԯ{O 4fL1'EUXG Tgz:8emF*|*:с6yCV ?P㲇ĽkO':4jFJ# e%Tϵ>/xaEeO#r-MjzvrMQk79]39䖻77NAjD#'m™Y *ĩ7^ ,/u4 >#< 6=r6OְmQj̱vn3܃!qsF]4CE[~KEᔉ? $xeJ4wdA925g'E@6@;cOc &S\ YOv6v@eі3K߀uKd IN1 oCVCplvXqO+JCJT<?-ݜ>0ll.7~³  y0}Ѕ!;?#E)Dϋ`u 3?ceHRtSQP3L3_)]JVY;_DVk3AFߔ@!xSY7z?N["JStH&BEl\ߑLm4Q^j!"@MO1%ʪ˴ZCFʰKq\e3LJM~n PN.Q'tRB1kr&[,p Vj~O͞o0lAh"&yᏜq!v &$] aЀه{otX849>;Tb&H( gYEaF=dNyc8R3 t|u94J%p,= tlsE3$As:Jf>R1cd 3 eTc"$a)UԽKyR%P!g5Kע0J3[C"!Ps^~AbJ´Zq+v`TgHyWAzjДh`\tYyDo4G֩#I Ua 9cPe@^+B ?SÄu6e$cy ߂Ǵ:d"uר]fg;Q^?bNкn#A&:r; : _BQYM.|h&z΁{f kκuaAthKu]Ѩ-.ð 倘Q K0㍁XN@) Y.a fy5 Aju dao3 9arZ:nD(V)9@gA=P%S&2EըЭSdt=*QGai23:GIЯ )DmROjmG|\rC yǵ-vƘK@T 3"BɁHZgHE˫1||XU, Kh_MuLQi9}QVW.=pC.`)q@ʃ@hzaB!!I)ɕH˥>&,jX= lo1>\vHVmf>J/^x.GhI2?&ͣ:qctƆH3G.A$EBGxb8W>>UpiG(++xϙKaFG:$~ݕcEnߐ5hJpNJ=$_FM k b6Acg.4aCʖIeLP "ʨ $iXr3wx봹Z^ߵ@י|ӵ>N%y!9y =0>Uޜӷt /s1 AQ$@)<#[ ]81t4$ׇC 0_ %ˀp>r=Jro0ʵ][H.&"nJ'lnXIdzӷ2rDzjxgJ}͊7E;PBx,+ HcJx뢋_<>0/MiidlXrA鮮/ 4BjpǶEL+p!% 4]Wԛ97!,>cTҲR܇?|H&oApo_ et>91q0kwV"!jo cࡿBX6M@zڇijD  LCưe@ BV*k$xڥt8S lI(6Hf<ϟcjVKN,?`jT݂R⵲OHQ`~ endstream endobj 176 0 obj << /Type /ObjStm /N 100 /First 881 /Length 2336 /Filter /FlateDecode >> stream xZێI}WFyChg%`Zm#=/{Nl55KEE{jTK5pH:.&$Q'TV(jRM c15q w]ݎ8dR*oT <`HKTsK҇Q(ukŁ69N@"Vpdxpp1xOrs$Dp#E!NUNHlR!c4ZT!z'"jTRBsql,(!( !E^ߛLLT&9wFr7&lL"otru&zLVЀ!T(>C6;ʐLB19tЫR %ԉDU *Vw0M0DVz eGMjꂩ9nt#29i<%OusBevJu6;PG:9;4 94׿q%Z zko󫋋:qQ˗F'Uh d2X Pa!قF,(0bmC \PGڜGP3͂-n"|^"ݖPd0-Х ?h,ڵya'yqmv} 7y|vE>i]u~kz6iѼ#'ԗxt 8>ِߟ8f,}?n^Iպ[24?-eĽl7?7~HwU휩\G6<3͟lwr~1}ut9=9 *j QlQQN*MaVD(i#JVaeJCrE+ޥA1[kGj c#*7 vvfwEtnrv0u2f\xt.[f;{=gorڞ)g\L;ܚ?s5ְqUtб(%;a]hluv:lM_/(Hl#8بg5 2dP'ޔs܆y.5e9B!2:TPxzܠf2ĺNv8vllnq-O\Xe8@YPY>pqE)ߜG p( g#Y 1+D_mRyFM)5Ύ]6˗;*:F*j#a'Z85kld=jǽOgZˈމ*5*-v6N*A-~Y,k<^F, x=(e"âfu4}L@5Ci:o#&ϕٞxU2P=-,J ; 2 ˼].ikc 1=&pRPU- =8j^V͑^Vx֗p@,aƗت +Tźjub|Yk+۾o͛W_/?(cHZs 9e:)%)6jȺb`>lcLB=-fIP9>!d~_jK.%91A }BR?a&}QǞQ=]D2d)z~[~h eJߕrDANgFt|RLГa Xȯ5u O68&dŴ{ 9da)͢:!y"!`HO.ؽa/Pet 5QX?2GWE1?2Qor1CTM,/H7c\f0j)rBL*~N>* ~^^ˑ5uAL"qpk-ş *N1V,J|ZEw" {?V&)!ZϘ^ #*אrlG@{J9 9wr'rRLߘz eŪcS9W..fb ].)Ǖ#|p\ϧZ'@]c]%}OWG{y}߬.K΃,Dq o=c 97&+OˢV5w=wy/ endstream endobj 308 0 obj << /Length 2460 /Filter /FlateDecode >> stream xYIsFW XnHSef q aO 6%99X ^ q]$f`kW˸Ndpm4lj;iFaٹWvw63_ \ =+Y_'F&Il)հa,I4Q*8Q׬ݩV rLmi0.)))2Zm[@gDĘ1 kFvܡa[]xak3kRD6C::;% ƙ1qYY/&)n׀V9ڹ(뜣d1ݱGͶtޣ;&t<`ps^!ʗh2ˆ5<(*-k%3nЛjG/oy dF vsvT\fn71tA׻\I3ɋt \ s3k_qc`; ;pD;9'cmx"KRφb1 #`@B\*5αmHuޅ'0% <]u3nҪf"ey/[lIc0XbR87yH>p.i\X prRJUni\]ow"`+Euz?kEp5–޶%r-5- %ܖ:mZyӍ7MBn1lgRL$mDϥ] _v\Zж0;SPTTBLYr:a8K&UKjS>X7:ʐ.rgN6.+T+]'|~'z2ʊE2ٟpyKR&u0yZ0kUrݧZ}-kiE~Te\Q(?i0G׊I2ͶtB-x,ӸrU3hU8\pˉLdp'i]Jz/LC;ԽU *v-nLdű2q2a.RNxmh~a.tcI"xy+6Q,سdX#Nm-07(ҡ[A[ X3ֳS ۧthV_*nGlB'<.}R2G,RiϞ]P/l0`8mНO_:C?lʯS|\sqv|g'jnԾͺSn?NJ+axlbEt5)Xf[HLMzz\;BW<@3"rt}8A? ~Phn} ΏAֿ""1SdO0tƦ*:9BL9%:D!)S|~5y{vuf-[+:uwi,q#0Zc]\įZk[mGΏ> stream xڭUKo0 W;숒6{dX{;8m,q#%* -"}$FfJ"*R\RdI: ~NG48T)}r߬RNmT^ttIڿZYO(̹~Ì@BQrXU2$~-Û?~bZ?Q7EAf[tAHr_Ǔ\V7b䝠Pt" :-s#&Pne"q BH^ƃ^gh S^%op^pDt΢2՜B^u+2QWE [yu/H/2>Mwq7i߯ю<[fRTlϚAéLӢ:%.xR'B(bE~{Qe7`Ļ?la'`7T2| ;{3^aeUN6;sfѴ4Ifnpjqo>W3ijT]mJУGk>rcٲtzk@n%ĄZPLٻ^ZbI^Zs^"ЧE1yjVC}׍x{_ endstream endobj 303 0 obj << /Type /XObject /Subtype /Form /FormType 1 /PTEX.FileName (/tmp/RtmpoWpZyv/Rbuildaf72a4a49dcea/kernlab/vignettes/kernlab-005.pdf) /PTEX.PageNumber 1 /PTEX.InfoDict 320 0 R /BBox [0 0 720 720] /Resources << /ProcSet [ /PDF /Text ] /Font << /F2 321 0 R/F3 322 0 R>> /ExtGState << >>/ColorSpace << /sRGB 323 0 R >>>> /Length 63303 /Filter /FlateDecode >> stream xM$?^‚g3@y%DfegVeDf}`qgdVn"=w??ٗ.|=>wǟ?Oݿ|6g? 3?q]NeKweԠɃϠc-AMQߩmBԲ=Kȣ+Kɵ}wֿG{9F}׿N3,oDrG-#=>jdݰ]:㾮%3kū`ȣyM{|ܓWkʵ,I4pw;~^˟?5?*|Fw:J9kHX?};u~p7x{ՠ_߆pNn;?>FmUf,s֠h/gϱW3Ea7GONê|P7>>T3JB9uowAL+j_܎TB$' ź,hVO#\B$'Joq(\Kg+Q:aq>N'Ͳ+ٮ= K߇%{L gɭ5$GC>Ts۰ (\ m }8C8^ᶔGo߇}TC }TC _kAϬ§sͷQ-ٕ>éAkjB sBpO7˲,߲VVicwz g٪ՄZ%P+|7^z3n}|酋tU|x=sZntK CVh/^k%Kr>XGlNIdJOW{O%xt!g5[[|k@Vjo00VVK hi-_ڗ@PCtJrӤk#j%CBc$YP0%BrԾ*\h*h zVq?.[xP r$Bq4dcڗc 7J$Jg= EMZy .[xP r$Bq4odj_8.4lNIk Ws|/ 5ьh;y>G-fi>j:֖$'LIzoͧ}5T8oNIk W|L sPx>&}s.>$|L ]}H8@akcp ]}H8i8cȧݑ*lROZ<,?4c[a2*\C8eO:p*t!&WMݹz$o1xrQcxnʍE~ #lglU-=pdO׎Qe:TUՉAY.L ?CW8) N&A45b WsQ!H4VVLXk |.Nae$V5nKѩbZgǠ8'\BR$ppBZjP xS(8$pcq^0V&4ԈS#z (. ! xS(@!-{ (!8$A!C7 BZu'hKORUU8cP iBUw[,$Fae9;IFjՊ4.i^G  6J$;Bo MhA)>g/I_#"%;B^W8 6J$g=|]Ch՝ *@cp>U!xz\B Yae9{IՊ4w" op0lNIK[q%@RXY}^V_0%ް<BS'z,O( l Jae9{CV/Nƒ|oKBZ78 6J$漤Iٔsdi4Iy:r}E/t.4*Ϗi-%MYA)>?;@q4wEt#O2D6lNIj2U SIgU hڬ*x3lNIk5QH8pB,d58$% 8p YP59jgY# 5Nx((BF5 8$D ,ww\}J0P \7;r ިq~(\nus'.ck~e{u8FS<4S8j*=AWH ~ (m_ C i{pB 3P!_m,_ƒ8b}Z5y5>jnH ]8C8^!-8{<PH ~ (gB ҂_C iٯP! +pH8axzZwW} ~֗Z 59{t[ZҚ_C y%]ƭr%T;-Sqjn˽TB^3`5ҸycrM;t8Ϭ^;m%.ҐVVdm"&+;+BSpNZzVpiP7l>g/IV/NFLߵu ,v< 6J$%[6=,v`ȶKRXY}^V_0%9B^D_y6lNIKxZĥmݬV_0&)؉t$*gIK! . Jae9{IZ}q4d-/-;WBSbqr~?kȶKac9](NFLYJ˽H!T:%yՖ{fJdʹfJKlQXY}b}${ԭVorog$*r W^fXHcY>}X+@!# 6<Ed-Wƃ<`Mq>k/@jƲK0Jl4O d1)<.Õg)#Yl4צEM q>kO+ɪ X<h)\=/O2M6^L˓ Iy[(Rp_0Qw?AFwU>?@y4e+|AbJ6l Wy59 2̪U,L$z /'TO㟽Y&J>K&zt0 1^C"  ! IN2FSs```D! I:aؤv(ݜg`G,SB5hB'3$[6J2 mTOSTd}\NZ"I9 I.#C1 ^gcc<&1F1/{MIQK^)cc_cc_Ucc2u117ދ$(e$(%e $(Ee$(ee$(e$(e$(eXccn|W|?.N&ŕ2w3 Vkd0/:_OqNfz.sMyٯ11y7eqWGk\3wר:fbLߏ>^ј;<_>ky\5j4NqcҚ}&\\L˝zɣui]+Ln;In`q7%T2q57 }+̔:}bZ}hUWkL^;In`Q7%֤L&AŴMnQ߹l;_и*>}]_)n>s7SZQ"fݕZ4mWHD1PѸ>>}]i)̐8%2Y_uaE/c1`zp}Vx҄y|ЍΣ]T5%5+hھj-/Sn̼0rj\#d2빾fż`Wb[a1>]ױFeN_SRb^+xLbbZm<&1aگn11yַZ~N䕘r9"7ס*7:7Vy{n֤O9~K7%fIPÍϽU^*+2tpFaX<&1F1*1cxr;`gL-Se"*5GXaF|^gjqeL5Up1}&^F|ύYҚ};פWaIQ+$(t\uccnVaIQ+i6>W][G?ObN5A+}dV:( k14fX1#%U78hq͍j$&V(F5HIjq}WccbR@@5cc TMĘ«&xLb(&Q m?M7fj&9"~| YqeL5WՄkTu'8äXЫӻ?,I͘檢pMYw[٬1"\325%2-h@Wf\s6Fe8OZGL\3fW_|TFƎgfJk5JdLu#֑cL5;;WG2C^t<<ak_ L^[oJ `j2?*tNԴX5Ԥu">#CqcHI?w<$5c|-?F_U325%2&u|R>|LF+kIw@-d0hi-9$fuÏ^bMʴ&ì;knQ߹iL5:j!WUG2C^4u w'3 V}/?ݘx Δ8ZeԀT,+̐)Fe'u*h%d5pG2Jk_(ET| ;໣nQ߹iF5ܖ itQw,p74ZHS8pF=:9Y7fi'TNILW8ⅴ+pLbbJ@_ccW}EܑU_YU_E/![Oz>б1$aw>! 7޺t,=i#]O&iؘqMi,Lc#nc ϑȡ xx8?E6aR0Ҵ5B蚩׬ Ť'$8 A11y} M!s4Y1 A11˳}l+ /STyUdǨ&tc+vYIQLjpLb̍~ <Ŭ_$>;rţ2V2}@QO%y)ƕ2=\5F=5f$I7̔ypƼ+EH?P3f}}~g=5xGF1LO]W314 g%ukV\)sUo浮URWo,I͘i2/VQi<:ŕ22+߲(JD:zĚ W˜RCw0~kRr&=ϕwݠL`;8Fj Nqe̽L zUQO0cJj>~k&9=t3Ж\2}?8 %ViX ¨yQ-d1}f7a <%W\!s7H2 RW1`~gmxAt4׬2S皫5*o$V5H<$5c]~<F_5ݍ3df#I3M2FUZxǜj 55&}$UZhn@.s5X?]RLZC<窜m cNˑI{xoPUyn@P-2kV|[XX䡿Ƥb jzٺF.j Yk`lHMM#\b^#-&ZCj<\2wG#\b:@ccցwN^!MS<9lm99IY`l 6t1UcἊY/IQz h,pLbbK@cccWEܑUc~wXnXō}/¤u3q=ɄhZSi  i<z?$!$(ok?-٘YaC1k%11Y+}IQ }E#,5f*VUq+\#d15!WfŬ$(&8&1A+}IQZ2;ev8&1F18&1F1żzAcc}_P>0R2E:50 "n C&YQ2`[iwJV#0%5*̔pƵ}ڭ$֨2S皫5/=nf$Fŕ2=\?[}ڝҩ1IQqeL5WkTLktckLOOzhU))j;SxqJoLe=> 7G2CnS`,Ӧ+ <+(`j8F`ԍ7h!s9]qȲ䮲U\oM{銻F_ZgTM{銻F~p?W 4TcLn@.s⮑^ujLǜߑEjLǜk䴗fi5׵zě_'%L_(Ԭ>&2 [gV¨nQu-d1JFs JcS;n݀=T)Ȳ>RWVG*RMǜ*y޳}}Y)XՎ*& cN ƦvPL7n@.sX1Rk#& cNW졊d w2Uw<_J VㅊuيF_uvJȟFjd2Ef&ԬƦ~<(yĔ[Mu "T!sy5+fMul\kMO㻰Gb3_PLDĪ{:VבT݂8hIQ̚1k]^W*LLji: "S^^Mj9?}dsZW%\B7'3ܩEJR㍷տvTccpա4uޝyCWuԪWް!Y[F!UnCnd,dHpq=hҡ# wWV(}~ "I- IQ IQԤ,u} NרKKi*)^5tW#7ZXb֩_NkkFg רu*(>H:$(& -8&1Am IQ:h[pLbb֩@ۂccNcx]ռE w&ejLx=:--ɴ߈LjR̺T.#C1H IQ\~/X<֑ejL;6m+ɴ.#nt" ϑȡuM$>W.{<îdVejL:k:oQ7=OiԨ1c皛5{uX=Oy͞Ib3fzii\~ b)3IQqcL57j4CUŬ3K2=\Q|Dtz)l e4%4%Ť)lسx݀=TSO1M)鍿^7 s9c]LŬ1Ho4̙>1Z;gz)n eVh5sMqӔ +~/ƣL)SȤfŬ^16yLMqԌL>o_MZ.LQ 8##?'C1ewRGu/QeʃfbfJaX 2EfLi>2Ns%|14f|fN)FM[ʍ b5"FkTj`T+I5ӃkZMY _pLbb`n󎃔gzdM(Ǘ-ƒ"$!UE+oT՞?sԙk4IÚ c&ì$f}v1 1Y/1 1Yì_$f 4/8&!0Wy1 1~c+мd,kL$/SwR5XV|pQ/ո5Q皛שq{5Q)&ì1 Y/71 1l]zN/55bs~w{E>~{`tq)2j*[}46Mϙ?w_bG"ԯn9-2Ogw˵խlҾe}"x~0 Lc5˽2Vqՠ-hz貾Ljflݳ2w LRd5wg?ߍn(U b_1Sd6Ug`4(U\uFH}Lj~d6232eշ=nV>*)~t)2[͌M| W%432eY^~Bi}kY~xžbl53>0@qnk)ru8q-[*~t)2@jZvSo;}L'O{OKat)2[7;]gdJ_N }LjXug)y)U>}MYeLoUλKWܔ׵4HuKrF\&1ߝ~ &}{skLjflKS;v1qܯe.i{yljLv>1갏PaR#wfb;sZ ;Wᘄì8ᘄì8ᘄì $f5X8&!pI|>]U'*}/XBV&G8$!e}4> 'zEg3b F"11&8>+W)1`-10^ccQ<&1F1^ccQ<&1F1^cc (snzEIQ<22G+Ua`j*1?^Md &tcV3#Cq*?GPib STYhRe?2s'fYM luaĚgs$r(furccQ<&1F1ϻ-r~'Gϻ0Ě Sd ,Df5ߘMƦ>e Lozfi8L1S~z3lܰe/35$Lj T4xR)j7k5~tY_}l/s]Ĕ>\j;\Gw߁2Ef/|.sgdJصeЦ_ztҾLjXgdNL5 7G}3Ef9ů 5sgdJXcύ%G}3Ef9ſq0>#SjE%wM7贯ѡ>Sd]#A͜RVknn+G}3EfGRFUjM7+~tI_!Sd~0hJx*6zpQWXNm?)2Z@wL~??x_{MϫhPGʶ]Wm}S>Ӡ~bS`WO:9#U?^9(15e㾱Wh@qH:Nc.Lʹ<;W~gůb "ؔjnws4Uq}d܊Y xmXov>nb]I~aK}C4*8Z-3e5Ȭ7U;IjD&;!Rb7v*^_x$#%֨Y՛X$5"Y>RbJ۱h$|fk޼5*nqX$5"Y>Rb `/+,/ь5HR#2̜_-Rb͙p}nT<\#2/<wMռpμpj>SC, DJ4\ /  izñŷcyO MtOCݰ8*Wk?*qccWp 7DĔvF\A舩brjIQN pw11٩8&1F1;5$(&;Ę[QI6&b~.S4?n%Ff5ݘLxDwcx0:725&d~v47Iտ#Wj}dRbvj0٩11p_"39%"vxNqln <&1F1;s<&1F1;$(f';k$v ~eY;1Ě Sd7n Ԭ C#SjGHæϟk-8Ҿe}" ^3fL5~D>~tY_}l5>(>#SjI q?)2[͌™S}y){m*(OH!Bl536dz39H:}>~/QkW'8RG]WM{#\}Μ /F?}iyο߁>GA6gI=8HQbʔ_iy<]A)2[1vgqs6ӛw8$'2H2i+O oC G "|iN7)톯)ƍa/ _1gss Z<Ljxw |%y7)F)F9q?1qw$|fjx(w+w FbsVW\=^;5Y>Ro\%5H|fywP㍫ssw@ k$&;Wļe>1Sd3s5#C1YUYE#o@>`j2?S?gԤ2LxD xny͇Cl.VgkL1S^jqseq$|?5WL WNAh!Sd//:h `JͯS^z7}3E2w@$+2F)Ft~_$=2Y͌7ܝtz:ڟ~y1$2Ef9m30]:%y oGgVsC32}F| r14?N ;G)sd*5?13͘Mk X^/x5s:ĔGW SxOwff/XK,QG3HL "{D? ~yzz36$32Eҝy RǕ3晬͟ l~ SџOW?ߝNZֲ~t|߲"|B(Ɍ!u9ߞ֙A`J:m{:mdg-1)2/xR$3L֍?Sd.֐)>J:cɺ0,~gpvi=yKg-1)21,rǠ3dN7|Ԭ|iy['aZ[W0uI:7ؼnJ>{g-ixk{|S:7oL[0vLh~ghs6v :m5z'}WJ}w~;HL֍?a9®m M`aA?I:3Ef9 #oh͜wnnܸ7pgCV.x>8YcthpPHcq߶CIas{ad vMoDN o\^_J86b2jNpLbL[_ N>:{ސ)b 5*ދvhtd=oF'8&1Fqkotcc;|Adcfcό$ك58&1Fqyc z PsFC1{$(>._˞}l=ZD̏Αd>QM>2Ac399 ~c IQ411 U=ao)ΰ#{K_PCR#d 2͙5HLGK`)I7nNoY ܠf5"O'r7lӮ9;Ah!Sd;05eݸqrw}$=2<3Xȉ[~$2Ef9`u>wy n S~z׸eٮM-3Ef9_iz8kȔqײ1Κ>~tY_}\&՛Ϡo|dΟtWX1Gge .{ '?ZIꙬ{b(џwwa/%W2t" 1GR̯?#ٵ3GfeJ Iz-x&3cK>;ZsmSlG~ќg'/vykfN22\dr?O c~$6JsVv f tSl{ cgeZ&BG_Ȕ^+~$<9 )̽t dJ5Oٍiac>coٌLy^?spV3(Ɍ!u#V[33LBG_JHV₳21߷l|\>J:cn<;Fޚ)AdJ :%i;ك2 a|\ư~CtF?Lf3Y7=#Co͔ 1 iӿIwes9#Gv2;1I3ٸ^z9yk|)sn~~VOۉlL-)2OiL1Icl܌qг[3sEL%tq;uv:d5K28jlxi;~}i8$z _8]k洣̔tӢaWy}0]W)2iJd3s _S֍W7]Db5I=1Y?!V@ԫ=6?-wa_yatg׳ջo|dλkʺq+)!3/Ijfl~4ew$oܼKF'Gfyjbnlޅ8rq$2ERs*n4fg1nn\?$f|tv{ꆯI_k+g3[$5޸坳 FbSPsϟMMj"=xLb!巙?{"NKd@L\\?W^_j8Zb9x\ע_ܘFb͍j)LnG&G%hk1Lxs?$!ՆY~`W ;&:pq>c9>QODŽ_ccWqcpBdILSQn#;&1~L.E8&1Fq_n7wc.nQ;;77jT}u#&$)=pNhwS:7jwCleavIQ $a;ׁI8sܮpLby%_ l$ 19 bvo11I S8&1F1;I} $(n~̣܎GSGnT9pzS$&Q19;6O m3z`9fGkqpvG7[<ܕԼ\{FE)&-3ZOWⶕMGkF3Ef9`>weλL7]2gA*nƹ 3Er}G[3q_GqWq,p>ݥ}[C_}9j;}^$3o< og-1)2_ oq}%-6㽟s3L~MquAl>coٌL^)St(y&c>>8ong)uEuU<Z~_v.uv92]7\ᗓ{wkg}?q^gHkόo$ĝkR/7ڰy\g(|\OȔ^+~$6ɺ`x~$6Ψ׶smS꽹~$ t6dׂm*_M?C\ d 5vk ԑm.r3͘Kw)QGIg3Y7+xF3$Db}]I ًΘ[6c>Sd.kMgQC&csܨ~?4Ú]sA1,es9#Շ5nӹ|nC J8~L\SȱHZw rl6߷ln||]>S$s16@\5S>WĔXtNݻ5Y^tn|"sL>J27dxo\^G޸-i$ 2Y[m亮S"nީ+kii_$/nG隉Lv>2j]P6vkc6]W)27Qh%Czw޼>Sg?ڬ7Nh%+2Y7^(>#Sіqyy_$/]7uD}%&;?CF)ϻQU7VI#ؼin S _$qo#sc2S>RoRo\ww~v5Hjx~ dTcc៻ `m"x>jUʁ0Tiv9upLoensGr}%}Ljfl<>Jދh˦k]_/bm}q3GRo6cзd sdn<k "Sz56w~{pyۻ{l}Lf 2Eyن3w 2Y7;fg^>wó}kM5rsd[q'*?#ɵ#|>SN?C\ d}fB^32Owd)ʲxtϐ$2Y7^YfLk܌ ̽ }-jOf oMH6/sd*w!WurA\>2Δ^~%ʐCG͔_u4s"_6Gh._-s:pFط{ӫBgޕ_~.$udҎF.U,2ou\UWu laFYL}>0΅cJON6 \H::؜ڃfYW1e_Nbr-gOf2Eҭ%fJ ?C̟r 8κ26઀YULF;x>5||}:u}2)2n%oVN ?:ܥEOwzt=ڠ\.3WбdWm1;ߡ#2gs;& uss!T>wܻ|Ow6cog2E3e3(Ɍ!u#C23LiaIWIoVNG+I_z X"_d#s꠰ƹuw1@o\0:}LzDwWbn06sߛ]M|ޯirz}Ȥ5GQ}gbJb:iVa ~t"sލ<`dg )>?e;&IFLVs=QwZw$q#y~H3wzzc1s>Rb#rsM$HLv~zq-p+b}咆$2EK1^F'޺L݀n(q~3$qd536gswǼӍun$^q:;?̽[oܼʏkX#1ܫOak"UxLb=At9|Cdub"bWy}MGKLv~=ZW/9Q͕k$&;?c}̿Wpk"&y}M^x!R|imW3<0O͌}L.O3_8}\&hbq7q5>A!f^ Vp|ځ1MZ:Pq7qz(0*⩘<f8&1FqP˲RTp|Cd󡎙.hx= y qtd8x{뢎; ،=3p(nK js|u>)wרߞj5H1ULnpp11g(+m "[G>0S7 j8:b2ޱ$x୬W"arf<'Psp11G(3nW"ER61q\&1f;8v5_wzޏUɏ]\dOҚ}dV'~)K1sd#svtknk%/j8Zg3Ñ>Io^i3&܌7#k14YwkvVk~k2E 3}Bϐ$2Y7^p'Q͔_Ȕtۿ>@hC4i92"_3\ 92[7ߊ^;~$6ɰ&y\irL_IGl;Չ^mWv#}nsK}rA\L?\ud}flTW]UL<д4wQagE63rj[#UBʸoFszUd֑:v ?ҿ )sY_͓U@;]~FYLzY0΅$167xpgIIT"M~Iz Sn;XxW\k1{o[ILJ>~V 2Gk2כ{I'd>G;RܪL/(w2W'zpκl}ȬB!S?υWg*Ȝ]~.$ud}fl{OFWe^ȔϠc67n>" 721{}t]Kb#y-Rb>_yz~jFd9EJ_;ov@ k$&=?0wRPbuE?蘨&b2%xa‹])-r0q۾,U 5hGf)Fm_µf Id#s`s;F5OSbM0S{'uA;. lW)WfNe]/:{6b ɷ-5($Փ61J~Έ7t")רx^W 7g<Syn n{ ut̔B?ɀ$ K0r֍t 5*&0_QkSŴ{Q\^CF;x[νG̏Æ3Nhb ?j8:b2`=G1{/_bDYy`J{IL\濾xAZI"fXb0Í65#௾,/ֹݺo~SF#q)2[<]A}WniBoՐ2Ef9kn]y7);:;q. 蒾Bluh|gdt[֍.(O蒾Bl`l{F(>g?O3\]Gw? }gQ?1M7gqw!}Kf 2Gf q0vMtfgjqYկdؿOc[ B̑|\QC{?\ud}x\{s~CԻ/{S'W"mΛ#Ƿ?דz#sy^ctφeI/d2C?^_o# Rg͠ ;йZeJЯe,Kr}a;`݃1`%^+| 5LI;X̰y+|~6˛:92+#uYM'72>Kr=!9`xH1l?]3fE6>Sd.cHT>}?\TߥH ?:2Y:WS:s_w vp_ )2Z~*.kAXW$׆gr3z׊{])U0SK~!wIKN&V Iz-x&3c۟1VKȘHsnD\[; r7[2c)2$L3>J2ct]aWh/Nc3l'ُ63[ӽ w?ZIL~ˡb{B_ɺvQ߸y}T }-uW#/k8zGu.[$葩 ̣n48ߑ_xn$^qs?ꆯI#v$Rb|.m ~ͭ5"Ove=ڨ^} G "x-tԝt:x?zҍ{= FOLv>15HIjq~+: FbSs_˻6&*F:8ĘG׷s11z.v~I}bkT\חƿ 5I2Zd9nkkT\E_5v_OSbMTsy/~IHkT\}YDŏw}P#|df!_V+4 xq?|v?!eHJ4~Ɵh`PDͯ{Xr'~dqqctN֌<TLsx>c/ApCd>0]@&kg GLy{>cxy]ߌ=39 '=S:nd5*>P12_ʅ\.}d߿Lk4t˯5~yﱇcb fuJ;m11?>`όDuqT:.yg㊫WWVO؃~žbl5 Cu}Fz+?7žbl^ճMy)?S9/'gvod*57>y߷8c92[;3L3]?ӇD]B̑i]vlfN k3GfF=o$׊gr3ԛ \^J\6e{-}}ݞ}̑imϯ2?U3GfF_ʟ ULuF t:`i6uo-s>p1|BL[pȧדz#;`\0\oT{K?)'CVד<牧2v3G粳}v=̑ٺKraa?~{}!Sqw~M:w{c3_\~[52G~! ,gדg$S,Lg;}Dޝヾ4ggx~-ʮ)2O9qg/@6ɺ0Vc@:;@s؞\W͹[4:FTq^^)3\ǫ 3GAjd~%ʐ~h{1Su+sUOHWDk&|\]~$6.|-ɮ)2]@k3$ɵL'Ԝ3$鵰fr]+~{m S> ޸ }_?;|%1d*O93QC&cfϐgoy{ſuM LM a\gQ#ٍ@1Lv>1wq~ϫsP3w}wts_ܛC_ɺ|};>#Sg?ZIzcfO=@g?Z }er_h>}+2Ĝv$ol^Ygz _3ӵV$=2FI\si7|MFL}$G&;;chj4Ko|P$5"O̝?nh{wヌ$3vH 5f<_Hk~kPa{7=cݎ$o|3Soר9r=P$E&=;-RBS{cΧ5"O̝{y޹F'/˱C jd&;'^)I7^&ףLsݻ9?ipCdߓLYk4l>GLv>1g4~}ZTw'uhƱ'wYCnx%x㳭A&YCHw?{1'1BL\ioo?\M4:b2=xLb>?#NqbLƊ\Xޕv|dߙLkT}7q_kS'wYccws<11gs}~#NqdMLFsyy c_˅~vI1Lgt 5޸= &1ULNxcc{y3殿[d L>ר[<W#|dkkTG1\s5"y>Rb?-O/.5HR#2!ӚlEqO;G j8Zfƶ{;mD4voǟ7貾LjXE<~y){FV9}3Ef9ć;WjȔߣ5W`74s7+<[2c92pq1a! 2׳ _7暟[A2vLi}et=}M9 eׂόmoϘ$ 2=׎{])ubGZwkc̼"_G!T4ȯ2?U3GfFWsW!2kEw<&&R'~~xuzO::7?uG)՝ #,gדmҟ]0\oTKuGB̉?w\'c s[_92]'0דz#u#݇KraLźdp# R7s\sOׇm؅8A!Tj>O`ή'?3Gfc2`%ސFF;ꮏBm3f{ixeLe3w*s]e>sdn<_F&Z*Dz& s%Lџw?. ןEt=gWuN&ީ^όm઄JW07]/G+|-ʮ)21LaHϐ$2Y7^BogJ Tgtos/kR 7?H?#ٵ3E9Rܰ $CgHk޵^z3]w.-poO1ιˣ53F;(ŒLR~!S}dƐɺxiCg>f}qu߫P!u4J+1d`wMA}E&;kκkd7n_ x=qd5A P+nrcghG3G&;}7q Id3s^sX.ws_g/EJR㍫||5Y>R/ܜӵʽkX#19/<0&b2=)zwsSg 5*n뵼{kddswP湾,./X$5"Oi>Rbz/Aq5"Oi>Rb_ޠgz]g_qEv$$@] `V7b"?EJ"% ~Œmhx_'u FdyYS[Q$ɶ'u׽u=hT{޲>;Y#2Ĝj5V{ʟd7QDĤ3ྥGJ'!5:&DL S{11Ž{/^wc9SN^{s{:fKL=1* C+޷Ҙ;;i"&ýýC"[@^ak?M7}\[9Mh 5n4~RH76=yg[#H!q㱔K4L=-8&1Fq1J5+與`VF;|}hvd89oo<11+:N{{f&Pz#ѡ}ϙySoE$Ύ _1CϹu|d_S:bbo_v7M4;b2z[qLb߳vlٙ;`ό$oCuz ߔX֙}%r3^mOhx>uGcg"M0uS_ɧ;Njw-9%jL)h}x\q>ʛx4Hl{dN:Sż]mz q)2f;Ԭf;Ld󆭋"9oo/=_W?l]}lI9_gd\ᔏM}w9_F)gMs0-sȔ_VU"ʅm?wO#y[d:&{D憡#i C 2Ou="unݵ| Ŵ92:oZK PL\_XWO5pg(+O6m0Lcm>;6ˎ $2uneye+Ȕuԩ&]oYߦEL3;0ώ4Sd=Lh 9w [v(;дgI-^ϫpx#F}>9(xCyD)Ϗ7dJts[y5t ?;3 3UT:(#cIp,Hr"Sڃ>9*ᨒQ1Q^g-nuZHO}x,TTNtGQ3Gf[ ֓>9*X(D}rTQ%R/PgyVֿzwμ c??UT;gGQ3Gf[ Ԏz왉c>8)ٗF{j~ɱ3E2O̔~Irl F<7{1ez׹tnuzgvI=-c>SdV3eN_G3v,Z; 3gsŽw7?[IL3c瞭_G31]W+㏺펕s{l]av2E2Gҟ=ټds^Snx/}E$qĤ jl=2Ĝ$+n}'.9] I쉩l7OVw~ɶ'_J$ <)^Sí~F6N#mÕGJԨ>o^ Q#1Ĝ}$+nU:՟5HHL=1~z$&;i"&űu~Ž np}+WX֋\[NK&%&۞+:)Ɗ[/N4hl{fz/ 4a8&1F13h}2IQL9cc*]$(]oV蚽 [bJ bb[Wn9+wU6F-1ĜHI4VUZcg"M0wU5;yqLbY[!g HZ0}ֳ{zv+C*<XjqzJ zV"WIb@9TEܥ%&֨\Ʈ4y gGLC3IQIOm NUbցŻAֿz\s?uN: rKLSX=ӎD#c?k3IQ3 }Uw-Dvf'eqCq'}] 7_!rܩJL鼉)hT<{͎ o(~m;$Xb@Le[xinO#%Ѩ 0"L s?11{t~ݶ9\%t5*NUi25pȔ]E[WY.r{f#q)2[{y2M;8e3EfFɽXЖRT7ʛ08=ʎ92KuϮXA&G?7&þٱn0]g{Y~_8 =)bLQbe}GYm2r?$9 ͿOzr캘G0:  fVk7V92]G1^Ύ'#rؼ~Ah%9ސڿ^LYOξ<{./CrhОW`L1#0;^ώ/$27͓I7dr :rWTi{9aqgJ20G&"?^ώ/$27̓'oUU穮^Ǜ1;V92]Mi_f[ lyru$27̓2Ȕ:%_Vz!K.?zt,= =Sd6k؁=(ɱLm=cA 0.s~|]?Hܑdsd2m-Pvlx%*e|l S~6innbN_lL4^v0%/j|?- %wgUsĻV=-st(w7u)x ݬe3Ere{L疉l{b߷wޥuSk8{b2͌v:vȔ$+nqSNɶ'TDcchLL)ccū "tIq%1@LQq],7J{^mÕs|T_gMd8tIMSq<,%y"̔SШW.D#&oiحh;)qLbE:qLb5X*կEx"̔#4VܺOzz+h8[b9s葒jd0@1~Kф11{4aLbLšz4aLbb˜Eo?11_k6:bG#Sz\kTܺ˫[(MgKL=1)Ɗ+DpѬӬ]W|K-uR*K]qF![BPao>IޜY.nnI4ԑU|&gE< S$$(n77!riL*0SШ$Ύ '0&1F{+Vp5!3s"cό$[^KWM GbJMLAgApY菴YwZ"S>bb_< mhvd8G7˜ŭ}_N&D;)[f Cgä1IqWt=8&1FqO|?JKMWM Gd 1sv뽡IlɶG)Ɗ{Wqw{CA$ɶGUMVqWzʭexi~z g "ifl=ݛj<}}}c3ug̦9հ2\gd\-IVw?}wfXW)2怵Fm˜32׌Uhk}.0Pױ:_) 1G&|b1ۃ^Idn;m=Lxha1+ ?BwfvϤ]}KwcJ"us.7\XAoIWnC4Vaqp;0.?w(F:w(usg¹!~bJw1c1pm11[My;vq?2FoON<(D#&íg?z&D)]Wb +޷}QfGL s׏oIQzeq蝹ѓ~bJ 1iĉhvdu?)-vIq2JLAc|9 qv0v(W☊{^NOD)74VzqM_0{o&%&۞S>R^2$n y 5mO=RsJW M4wfHL=2miqmoewIf3Ef ̃L\5~Z~R'R~vٺL4:#SK_5[ڻKi5u"gMs~f^:#S~]]5_NNzRg+'K!d=1ۃ^IdnJ7Z7j*BOw!7y[~GYgw6?页Kb2 9 nˎ $2Xҍ~)?V)cx^/EN޹6c֟̑f;Eomw|k2 d TӶ [g~]$NV=&Lc[~dGWۺQ{]G0j:~[~}Ҟ[C+Lc{,;6 ٜõLcN~Ov)=igu>A˿)?6ˎ 92u?.ipSZUM~=bPW2E&û^؀C7BqR!wdn1mxVd[΍>0=J̙i w΢}ΐ92.Ǫw 3ebX_nt>Y7 3d2g~uV#smy<ȱ֬;[pLbb;'$(. xIQ|UzNDvf16 o{4`LbKx1>idg=3npT-G$(~>Nz6Xٙ"`nj$z4`Lbb -G$^|Q=-_x 0L}6'dzBf|dbx V a߁=!0&1=!0&1F1w@OIQccs=!0&1Fq xwKlMsBjoYC*lm ΗnZQsds%MmI!X11[y9N/2贀qW1XWYV;ώ ^mmםٙgmŭ*E[p"] Ĕ͛X}z}u&15L~QϧbJ 1FÇ.xgGL{/\|sov)S>bbw19krFIlI'X5*níAFbyt>}kTߏ2ܘY#1Ȝk5*n>Yw(AFb!6M~;JqsL{z]>{Al!Sd6͌?b:-h5iYz>\?ΛwHϚ~vɺBluK7&>Ov`L1gzlK Pۺ4:V)U Wݽ.g'Ԉ92ƀ߾ǒcDGXѱLi-\5}Z 7lxaQ2]#K˨̑42·ɱ{,96@IdnnXBL{aNuM d oPotL=L7]sO/$.mw}eLP^ZAP2dμ|}Й[@~wd M2Efz*]0-hݑ)[ ДFšӁpXLLލsgtcY0&1F1wV@7IQ̝Ѝcc6ú3gIcmšwcĘ;cyFL{f&xsypwgLהI:6&b^Az@uL4AdI1wn269s'tUjտ_ ӫ:e1;gZI+Ys3%wf̴ܻ[e)#%jR̝#}&F"bo,9&0&19&0&1F1w@ IQ̝#mccst9&0&1Fq6otmr\oHn&Rt! r'ˆPqݎ'M š+¿$(n]ǣ=9 t)hTkhvdw]|ޱ ~D970kTz%؁7S,VF-1Ȝ >Rr_4{dwyʮ ]7zR r?BL5*nQZ⿑3uF-1ĜiHI46|,Y)Q1]۩-t}遁a3s &X\܍~$lIgTQqP/$шL=1}D{ǧNhD#2;׹oDI~vɺBlΕuoQ&Khc>F(])2怟kޢ32޸j<#>wf >"Zd.a=J"s[|dֵz^{&Əޥ++#r|/bڟ2NQ^:SIg1oŮj| E 958~5jۺZLvh ރyĜyޭn~5uX,&.dLmu~wXA/ʳ[kgLyݑQ &τH0;:#٘CAI8sa:8FҞ{u;WS3u&d^wN\uB̮<|dI1w0p ްcc_A11N\11:`LbbvS$(f:`LbbvS$(f:`Lb)SFtɵNu:*<6(_ qᠱN~HB@N~HB@Y! M^#zͷ11G)7bYq!r~3S+vܴqA$E&yp<=w2;ݠAd@sQqvI4"mO̩f)Q,__* Fd9?{>G)\k8[M3c׫jS T5~;#9*yuL43;:#Sz>k09U])2怟˷R:p)vWρـۚ_& #sÏ`eĔy砾ߙygKyʢۼ g1pauϧY 7[>ގbVAdflnG++P{0jv't\]߳1;R?(B̑4}cɱJ"s;&=ݱ+,|k}ViE4أM JzdLm5~ syBsZoTc;ve1oB׳ syB!sVP&+ם͡uC̵1G?~j@!sYiV܀ɓt4wW23;/1yc7<<̔j%2iOPhN:<ʴudcWW48m35&b^knn~дJg޻#)XqgLkͽ{ ׸xI1;{'ȡO)L=SgBuD5Y,#M wLDyt_Iɯ2L ]{` [DS' pϻ=6M0JS3!7u+䚬[avE#M ` ȡ;0&1F1w@IQ̝=ccs't 0&1 0&1F1w@IQ̝=ccs't 0&1FqotHߏW!0_aOǍ& 4$Ny&K] Lh bt*!Zݫ_,q1 vgb_ gL St*W9ݫ,q1kLpyI{k0;fj{I1|4~/fDot@ u@&h{R5mxkdd#x4*nzsAd2g1R~l$hd&s>R.O4wfL=2}$+|!b3Fd 95MG4l)|!bL43NjSk?t>_+gMsu&4~zհυy~t]u"ixg8g"-s4Y]=˼q za; -sĔѦsn40+#sÏw+&1;yԧz w-V_ChV9h~>+핬3f1gNEMkր騋^?3j4މv;W댙GYųϵk7 f oArރdȜT-r{ݓ`̫VksJ{44v*lg$wI03}_+_ t6f<4P̤1yGqu0i]ԥMı6gg3֙_9c2~Ϟ5fGτH04<=cb>K3ge:2wsMb}dd]&F᯽٪h'<>Q3!@|,?OȨ09L3j#83IǍ W5+>Qavvp q8K cb .5I1̎30&!0;RØ8K cb\jcgpaLBav1 1q$fٻsgܽaNuzsYfvŕmc; I[o 9kF3ѤsEW<00+iDM)2[ΐyȜV:h꤂l}L>Sdv:cΠdȜ:51kᎷu쥵+g*Gy?Pv[ ҤLU G)7=]-Zw^ 4ݶCJdU}aT(6uǔN@7fʴ\],gҳԙf`Dcō2578h<$2N5{&I4Vܙ1Zswѱ#%jR0c ȡ<ӣGp L=Sc"\ꁦ/'*#3Mgm$r(m/UŎpxJ{{L=Sc"\\Ssg|dem$r(f?Kcc]c=oƏCgLȼ .utqoħp)Ѩ3Lko!D'Gmw'owf̴]Ҩ ч.cmO̩DcIL)Q1f_;8&1Fqpccc*vn~};9D}nfJגXNV7hdd}DOXMн\31 en o!u"γLoxi耟N3嚉)3M>㩸{yY'9<[-2M4L<ْyg0u;_KwF̫Tdžf8镏m929z WΔ n~'y7 #ԳUOc0ܜ(u=hm*sd`l^{ΠdĜzٺ gX3o>Ygjy_cнẇ/gk)l}q^gn#C1;uM119IQ M#/wٹ=[5̓cmccsnIQm0&1F1;gdp4r=]wהyd}6TsUV!g! ynPayXMrKD,bJWXoش4lɶ'L3DJQgT,䔨 SIQ< cc=1Mr_D,bJ1Fz=KSIlɶ'L3DJ݅*6޹<ܙI#1Y{$+nӫr@DdsGJJ}g o< $шL=1dzQ1[_Pŝ͡WDsĔ]Ý] uv~v@6g "SϦwr!*'tz2fK 16wn.2E5~?˴ Ct3]9]A?l}lV7"ϢjC Tŧg+[=Sd6o02g+JL?*Sgju}gju"" 732gLeD;tlu#s[m.R?f+]~Gܟ572ϙu2?݈a=j@s:[yr7f߲"s[ןg.jsxAɈ9!iG0Y͓:jd)G3Mj&oc错vMO몸1CaT?kܽkfϚf_%@0(uOVgj&e=#u'|͎Zi^eեG>dqU}aT('|j:Z ֬1S怲fߠgDcō2575h+kCqմ9{ @I Tp\ { +IQn&<ccO?cc7<-o'Z;9v)4V<7#k8[b9#%X;i"=$(f7 11]ħaLbL˜쾑CUN]nukS&-2}dl}KAoAdd Vt 5&q^9LcsнἏyV[ K1Cw~S"5w ̙\|re_ƫ̉WVٺ=SūG̩bX6iv?W2SG4]qpT+ī{뉝97ܶ/j@sFhQ.:sF6f5W2Q[L\pQ֙|Vgΰ:!kAmN҂bϰZir[[&719*h 2;q vP7f<4tT3zɽDz=J怙š[Oc0&1F1W1/ߧ#t{sٙ36 b#qLbLšyP11[ ]cM*3DĔCb +>Wߛ gGLIQ|p`LbbxF~7!DM)2[ "3S5?ҁM2LL3Xqpq11=$(fE;0&1F1Wo8㙴; |>kCqM9ɸwǡB2qC*$?$!cc{IQ80&1F1WI%kIQj yĚ=D4Vjy$qdsGJV?n=$j$&۞އ/=Y.7<$j$&sGJ1\LhD#2w)Qbv+?ЛI4W5˿U$CĔUQN3/s8xy̜5/bUk/q1W2ӚiՈ5}K,pj@sZ7?!>a%~Os8x5 jzݏa36_b4W2ʻF_+?g~Os8x5 9kdyRPi'l |^ f3&`4߁W2;:e%܂Y5. fމif&-c3# sLBkg|]ŝ23 ]33is<~$; iu0Zs=Xb011$(f|ҧbO-xͯl=SԘygh2e~LLS0p3A$T<Y`LbbLg11y/eiJ(_5WaM3dfWךR?glY 6I0x&와ccg> IQ̞ ,0&1F1{&a 0&1F1{ |IQL5~|>$Tj|IQ5p_Mmoch/<<g $:sM:O8:V漊fLʪy݁vf =8Z]5B!ne֬+#wkxUY#d3'U޷X_Ej7jVmՀ#派jz8W ?j36Y }5$(RNj˼or&hygH:˩t!j5m!!aS2`A,BuI0TZ: ccs0&1F1WZ: ccs0&1F1WZ: ccs0&1F1WZ: ccS0&1Pin<[;3Db2jzN< LIeU5bu6ͱUUVgCsZY5VqseUU^gՀ#洲ۦa ̫ͪTY*rBe5MVgՀ#-?WʪUoT٭̳Wܙ}Q,fYij*{=iRSk^{&d9_-Zw^|V4PådEΫA&Nص^cLuj523͠)Ѭ8ٮީ?וVN4:2k3CaL3hȤ}3bRWsz9`V fM ]0&1F1W>$($t5Ø\\%IQLH^˜ĘC%0&1F1W"z ccs%0&1F1W"z ccs%0&1F1W"z ccs%0&1F1U"z cc*H^˜\%IQ̕H^˜\%IQ̕H^˜ŭzI=ml~es 6G?$!^  Bmo1*&(a{^_ !A#Epmbm$Zssߋy(03 ccs=j0&1F13 ccs=j0&1F13 ccs=5Pݦ~qx$5R;~t\7?':΋1\3~ysM4͝! ud1nysM:eaLbbYިZ V5?n{]5B!2͊pHje}CWhnO̓ F\o&U8<~U8ĭj2*j|p * mIj@s^<ֲ guE y;<5{_Wrx"fOL\&dV֝W2ߨж MT;eTt:F+h5H-d1Uq|U8f%Wܙ}aAMr{V9V&Ӽ:LjѺP{Z[ ФL5 cՂuՠLkͭɚ*o>@qU9Xcf&ŝgfJ5{,G+IhuxiUWYbBu$(J)TWaLbbBu$(.JaQ$(J)VWaLbLšR U+P]11R U+P]11R U+P]11R U*X]11J)TWaLbbBu$(J)TWaLbbBu$(J)TWaLbV]oTW}9U?^fUT,PHNC Ck~LB^c@ofZmuY 5딿Xۄ& U]'%Wܘ!hٹGJԤkLD\{q\T|$>?^PC*3ԫ C0&1F1מ^ ccs0&1F1מ^ ccs0&1F1מ^ ccs0&1F1՞^ cc*gWØ\{z5IQ̵gWØšW[Go2*լ2'5Vf͊68&1Fq.%kǦ0WynbykJ4WUL{Y[䷾05c-vWuY#d1qՋWuN}v7f<4LffL/;c5Rl9`IҿtTm*[g1Cy&}KqY i^kcgJuOVgDM}]as&ZiRJX{;Uqyv3de^D&{_ws~yī怙&5.\{̾Uqyvwf<Δ1FL׷uN͎BZ047>:'^'oU\33kN31i[d Sݓv9(ZsŲ&\׼Q}~n5?0DM;<̔j%2qĺŎ@7fʴjјO vLu]̒hLkͭ̚*uc5Ø\7Z3IQuc5Ø\7Z3IQuc5Ø\7Z3IQuc5ØÿrTPGxL=Sc"kAӧ\:3)xanj]-}|DM.gm$r(:mů!ԑ;hu0ӤZuܪz?NqcLcϢif&g 8#; iu0Ӥ{cU~6<;ō22]D&~=Xon࣐V'3M/㵮Cg| ϳSܘ!<tr8-X;96}5Mk{&doZ:y~5)+PϺyhy5(ӬZ;Gpf<;ŝge5{jw9?< 8vys9P]د^pʏg<;ŝggޑ;^*՝uYqcLzyZҩ*?}lMg!SW2Μdt5|euV4[O`u`y3eZknNkTWk̆OY͘i9 IqүSa*='swfy)5Jdz](; 9j;3eZjNk0̗k2Z1QI)ρ257O5)f{j8&1Fq;GD#C88Qy7O5B1sxMfա7%yܿk/4&ō23e^(2}} Od Nf ۥli)n̐y)wDFLeJ_=yyV4Xpy_ 3^qcLk͉blUy-ځY͘irʮXz/Mfߜ& Ǚ4FbؙJī怙f-WijSܙ}aT(IW]r9jfō257g5e&)5Vܘ)ZssX1K33K3-5w'5)fց11uu`LbbvAi] xZ$(fց11{|=bU{zezsC DMWiah|=5ܘ)Zss2_] |'$xanjs2xϽ>R&B0po; ?IzDk9ѕxGFϔi1S$2͞I{_DwjR̯0Iy%tXxy,s~{D&RoDQ< I, IQ~x@0&1F19ccIQ~x@0&1F19cc*~x@0&1F19cc= 8'u d|F kRܘ!0S4JdL(f{ī怙֚ĚroOiNZc|XSŝ23 },O+'aO(;iu0ZsX,FR2Y 465)n̐y)s9FL_}n\~);iu0ӤoL\tffoNqc̣L5JdLDxu0Ӥ^Nϵ7'óSܘ!"9^D&{; ;7t9`@it9fo~ Ϯ ;4S}($ :air_.l/)n̐yiR蚙i(ˏ:ZLfUkg(1}Kx.W2ʜ$j`emԭi*#zP:<{ō257?5#zdI7fʴhoUs3K3MΕd͉U}aT(<|3Ch5XL֚3ߘ]zf( Lh1SDSp虡TgDcō2575E 4S +n̔i9m|$=3486uL֚7hLDS269_11{)׉=lZDspwXS .?}iLƊ3eZknN@9m˫kĚ*f*` ȡ_.L{gjLļvŚ˔.:_Fϔi< Lk h4'l~WQMw,S^uVיM9_ΗP!Y|!  /],p`Lbbv11 $(f /],p`Lbbv11B $T\,p`Lbbv11 $(f /]&|ա/'F~w^aru_4>Ku5B!sxMfcc*. K/b=,]kkRܘ!8SPx4Ӷi02%s9P&M9D7~Lcs~X Ǚ2kȤʅX9PvAf67f<ʔ{^D^=.?h5XL&>yY\x#_Ͼ9?<[{S-(eWgyKd֙g_qgL{Ht] ̾{J<[|Õ,2LW2/XjTsxuhy3eZknkT7^%G%q3KL&齭]tfoNqc̣LVL1. s`͊3eZknkTNQ>,Q3eTΟs麰4}xv3dg*^DϻׅRh5Xsŝ2-5wg5KOt]s4{&7fʴܜ!#.9e=D3eZknk9ua)$jTܘ)ZssXZot]s5w&7fʴܜ!x-09FgQqcLk)b/8Sktfzl/c/kVܘ)ZsXcgaZf2`T4qO d鏡g7}sxv;<Δi%2:0cX6Z ֬1Ssb˿`D3f2OyhZ6}xv;<ʔk%2LG(kB-ρ2^ݦ_컓ƳSܙ}Eif4xKFs`͊;3fW`΁ׄlIٚ/jEfZIjPe= V5]j7V֙g3cw*XgQqcL*XXf<:j<뙼yfZ$j`jK"l:<{ŝ35w5~y#l3IXcPXQk;wf̴=Iw3u&d^{bMR(y^ΔhTܙ1Ӥ[#{JgP!a! ' 30p`Lbbv=11  $(f' 30p`Lbbv䄡{cc*Ng0&1F1;a섁{ccIQNg0&1F1;U=11,멦3*֤3Lwktfzљ'9j;3eZjk58WesfI5LkMcMٙʼn3sEscϳUIqgǙ2^D&ʹ˧{;fuDsLksbMf9?c4<%֤3LfQ"f:1 %| *ρ2ͮ,]tfoϮb32Mff:ʃuɾs`͊;3fZki4pF)%jL{w9ʖ^HoNqg:L?|rh5XΌ֚CWH"b͔iR8WK]?óSܙ}y&5J27/V5+̘i,W.73IXcLKe!gF߰f$QL֚O7:CI7fʴ\ZՂ73IL֚ڿ3IL֚v02&+h1SuT73IΌ֚rgG#o$jTܙ1Zsw]X $e3IXcޒN4;WgQ] cȚ?^^%bDŽqdgnLi<5W'h~pPR4zLcŝ35w5|>ѡɞ+D$QΌ֚k4iGJԤ]&F"} C z΄j޶7/RHE~H8@!?!$f"c`LBa~/1 1$f"c`LBa~/1 1~I1E0&!0{?_cb ~I1E0&!0{3/1 1w˜ř7ͽ Xzrϰ/aM33e5y(e+#л 99`foXaJcb 7qT>3&d3S@t )p5XacLkpc>%xtPw%3ͮkU291<;Ýgg^7:y\/T^? W5+nLi)iF8dD3fݵ˟ҫ@d6{sbxv;ϼpja7`͆;3fZk~ FyL^+<͜iVzzҫ| wfy)EFIܐw1k6ܙ1ZsS0?ȝ$h4lLi;zA?og5+<h3c^5ZGJdd_/_Q.q9x?7U_{䯿*~?H  3(t A۠`1(tOРopnO]d_=?*.D7KRۿ?/y {B~𶽮W߈=Oo-ߖc9FA-vyH5\Gm,5S6xh(te 6n<j\~ߧ\b͝Rc?_1JbsŸ)F񯿶s{ݝz|wO;)v0kjS:E[n#[9[xBukͭ4?y.̫F~]ܗ)|;;kk'K!VT:r=loO޹+[領Ƃn˧ܠ_W#Z~˶up?Knzb ]ܦy^(7B֖[]uX]粽/kJ7r3SGCqP}kWw3nܚ[]ui׭s?cVʍ>-6ݥuw^_??k[^^bv}:sͭj,ֹs}ďͣ=^z^V-]֑LbkŖ骱[r ?y:9E+b `.-+jk~J*:*-v+tPZs뿫.(i#fy2o_׻:rIcl܊˕tXmSն_t}ڞ?FG<.^QWF^kZc:^i^6 Uu|ڔPVer)[ѵvKݷ~[7ݹ+[Kcn˫tN뚫5͍\:2.W5TW~[n#[9uac[knwե^cVSבLq]ϖ˼:OC:[>ߏz؊*wZGp+ȮxJnwU^Υ؜][~u;i EYk[fb/q_5ꪱ[rBQ{ȳxy׭P4z|Q¬*u{y{]Qݺ,u!ď^NI\>ˮ"=z1o.|KcϫG箨n]sۿ.ywT#[Bٗ!){Z셔ۮJL׸ٽX/TcnsxYׯ^'Vֶl\%zk8 տLґ6Ԯ|m_=,ZsۿW]5u~>._ԣuW==[ѥu-Vfwm묧խknwե^g/6WtYWݳ2TT>׭{գ*r|%ŞWo]KnwSZcAwKt*_k׮cmD nvG]`e@Qž ;p4S[d5,֓j'wu9$HZh(r֯Yfٴr#!Wrl/>P7gS=0+(p}96 ΔtMKwKҗc|wC.~ G8ܻW (zVP*`BE,.bl/>=f:b^_4$]*xMڄRR$;nLSiX/0:(yCxU3js&% L%h 軤/v2]PƷ4?K`nJ =E>~j7oDHrf)ve'ߡ_\J ݃%@$ۯh$;><Cv@ʾGho9ڃ/Ѿ6|Tze'.ƪ[,KJ qGE5zxVh-ݓ7!c|ɷϗ%rҖ;tM:aFT98q7w60g7!_,B%-5vS>|)C7utBJRVIN=شDdդE [Py;͗hd+~]~Kq)"P-s0Kf̱|h ǝ bR[oԠfGSig#~m >Bg5}ve'![5KaQR (K2=N(iW1S$9ʾ ;ͩi|~(,_OA ^>. (K9]>h]`A+a}RnZz:}F.\~c&_Oʳ[\J>mQsP1'ZxwMHve'/:%0t2#2@7qMcn aIwK-H.M3;Tv}O}1u "(p}푴 s^d]rF.Ds_{`n(vcoyr ??OOK{ ZC󡃯aI~~/>F uo}`^TD=Qx. h e"1(IvSPoA5ve'ߡOM! 4+$BӁNL0nfB=.;|—~/iv؟v^*g@;t=a)ڀjxbڵN[P˴o|0Q%ӻ$a]e;5w4Ay} 週|Xc6j邊}*r ߩiPyR B/ٷ!cg2gB eb4܈#G /j LU۴E2]c|Ո!F (m5wW`p/5AkLW$-#]w 9c|w \Q9$Ta6:cAiݻ }EM }Iwx{Ko|oi 3`:S$`C0hs/aPAa@DFPʴw/\TLۘ^} c=8xzg}}ctWe,Hԛ^ ,t ]$aЌz1̌ yCf> _,E _\4M0A 'X@ȅcڰhx+wP } `giEp 9~|MN|ͬ֓!U¤AHf}]\}`,i 0b4KQ~͉,Qo\Gh{}E7 D k v9w:8+ `ucpK5lc큻Eo1B\ iGA`:8&s&+o\ZG9@ iw'2*;nB:NUv2 1]` ykܬ7!b32a589N%~p٥v)#kmE5aSk%X;xXJ_kfξNh :Kuݜh 2kD[~Q0itZUu*lbcQaSu\79` 6-tOτK z`#XxK= g5nər^Nv_PC7~N!;͗i|~u~ W;[f@ jP<]"c!93^S#Doz*?n5{{U#@_#HtTגIfSFtO^>PPwU(_O ˈ]a4tk$s{mJSm@=f2{"Qc|o?KkwwUg?0r@vob薩/9];rvGtMб3A -O;Ko|dz(|ٺ6y`Aع+w*[A~r >#87#{"1 %m;u%yve'xpg;}.: KU:]yN~n(@aw¢a^6jjAwm ۄO,\IHz/z9֌LEs滓>ԄA3[C?&zm@,ꍡ߶_aS&}}o:}ljs&o GxqPZºWXsZ_X0Gs 5׌(cǴg/I3Xp}Xo.$L%or³,?ݿO:]}b}#J!=2?~|~~ǿǏ/oWf endstream endobj 325 0 obj << /Alternate /DeviceRGB /N 3 /Length 2596 /Filter /FlateDecode >> stream xwTSϽ7PkhRH H.*1 J"6DTpDQ2(C"QDqpId߼y͛~kg}ֺLX Xňg` lpBF|،l *?Y"1P\8=W%Oɘ4M0J"Y2Vs,[|e92<se'9`2&ctI@o|N6(.sSdl-c(2-yH_/XZ.$&\SM07#1ؙYrfYym";8980m-m(]v^DW~ emi]P`/u}q|^R,g+\Kk)/C_|Rax8t1C^7nfzDp 柇u$/ED˦L L[B@ٹЖX!@~(* {d+} G͋љς}WL$cGD2QZ4 E@@A(q`1D `'u46ptc48.`R0) @Rt CXCP%CBH@Rf[(t CQhz#0 Zl`O828.p|O×X ?:0FBx$ !i@ڐH[EE1PL ⢖V6QP>U(j MFkt,:.FW8c1L&ӎ9ƌaX: rbl1 {{{;}#tp8_\8"Ey.,X%%Gщ1-9ҀKl.oo/O$&'=JvMޞxǥ{=Vs\x ‰N柜>ucKz=s/ol|ϝ?y ^d]ps~:;/;]7|WpQoH!ɻVsnYs}ҽ~4] =>=:`;cܱ'?e~!ańD#G&}'/?^xI֓?+\wx20;5\ӯ_etWf^Qs-mw3+?~O~ endstream endobj 331 0 obj << /Length 3614 /Filter /FlateDecode >> stream x]s۸ݿBI6rinĽsZmŖrbwvHbX.f35{qͬkjXUb5yǻs=_=EW?F}< /" P"R&Ư,W@Y7hd Mu5JLuqY X(Pxxh-st!]W:/>=͹i q)"z T`PZ!.@Nà6 90wZ%ng p)/_ߡC~"Gn7v+ ַKAA \𠳦- E- .y5z1x?U/]L"qYs∀ؐԟYsAPyبi2t> Hztݶ†U;n ֒;H\3YqM?ts ȲAMޟ&R#ښea4EV7~8=+i1fAdw4-Zqg×,g G[, ?eL*UontA-1V򻁬A47Mu<2 G\I JQw(7QD+\&t;QE>.PHuc ueceo+ T#%jQA[dM*bCDϵ6smLa?*f;odhzkS~F"cz(w % b+"欵SEec#XZ,Di3nLk v%G 8rg/.LH`%6rTh/V?سuNE33Z&A\㘛O5%΄^*M=~wYnض\BHIr9;s\㗂H\KOeF2GHYGׅS% 8m.xN+%} Ɨ:KI.(קU(uT)x^f3UNw re' #Tekv+7ըvsAʎ4-fI# U m#q7TMc` Ṿҕe5CkMn`հG(?p\GzDcoCw Wgt̍g|ژ>wøƮVq,?͠'~:ؘwaD ;'Af,Eu֞]4 E?.3f ٱYj^mE~ /Ϣߌ{o^(zچ,gѼ ~L#ԯu з>ӪFCiزOhc+,u%*:VJjYkMrL1J S+3PHFS:Suzo&R_Cx/֩u.:CㄱHvZ w10Z_w C~ʇ#Q0BM^e_ʥze{JW>t'G; *'(M7&XC4Og GG~d5x!zþ˩%hwqwSe Ie. ñvVp'TVAB'nr .}2Fn#X[_q v9=XsUne+P%4)X *G`(i%-vT;X'DmNtՄ۱*n\Hzk<ݔ_ -$MU<>Jp瀛ӒP4BTAK:1}JWvăv>#\Mt o1NˠvT^L5.abTJ&XߧX~DI-G5vcuŇq˪INû>;.=e0٠|J;ף);>nHz3NeHTV&P0>B*1(7:+#\ U:IZ!\zO/cyH52:GInŊur\W9Aclw/q4q)MIEVן'f=OO݉)J*/Ϯ(tY<*+-UxM3TD'>iw͘$mIJ<¦5BqMS4KvkW㇨h>vc㛮݊/o%Ht>v i|~967r,ͪ OWK]?5#х&P.":>+:>4~IyIؑ9}L9u#`x/h.OvB76 ~ͽΩ  kH&oRzHбBtq3Mud(Cd2_lc5d9Y{2fow#<*+hB 3t/*|jAx'Z0f3$Tgui9q KRHtUA 9VZ?!0îiv `2` o ƤZ)e$T~M'E04wȤ[_El_(@Es쥮Bzlp°m 1䟺kV5%@d.MA]Ep endstream endobj 343 0 obj << /Length 3552 /Filter /FlateDecode >> stream xn6cbEdf:&Urݵ8R-$EX9 mG zV̾T1kDSjvu=kY]֢j5{7# )Ryw)lg&KaOBk=ˎ!e#PeD%k+`4R +]^6TVv0}OvmpPA`X F;d_-\BK1jY T-\Ȫ">ynjoZxi%P >Dn\}u(+=!{" h˃% 5πH;柫gC|4"㧁ᖄ=+}dOǬP1'tQ %p[: gQ9T!Ȝ O#Q_.}“uI3I!k )9Zbw2?xdkK D"ZD{Vz?nQя)BU4 ju (~\ h,uMXSDmOJBiad]f{旼 ls$,:\,$`>h] xJ%TQrj&k!K;E@ g@mTjD̔ Gl}v@3  mfA;YWTF4V׎@J4L LmJ52SSSh0s*H':Y3ӳ޺V ,"sl$ OYPt` ss\H#*0_DsTi*KUB-M-&hϱsX)9W47J̒"sbhКmϟhx9 fW(;ʨܾXHcxhߣ[h92! z߶sl]'?\`38'B;u{pQ@hoVe QtC#@9mą0 <Qm_ObPhʚ546C'kj,MvT8mvqfˆpE \gz8:F8',!O0eЉ+:1Z`= e[Z$MHo+4 qIe zɘ4㱛v{\ޭvV : ཧd*.* un ZX=paAybYC|Ñ~rͥR͆Q8F fD}!S$Z;!|JSq*q$hJjs'9In<9xP*l*t4F/NOo/0I*} ;9 WE95_+Ca):1Tr:b apۤPbNq&!nӐh'vl"L]OJ Nc!P:eiYnіCTOxoŨxp"Un'DKчA ]NLWV ګ.84uZ^}Rvvߐ픠pu9 jh>8r v-yyMU;|]z`v^E͆ēӝ/œ%)FmMdҒzAPqX*Х+ )҉Նj̆CȠC^Y_eAu#b!h?osraR;v )Gb(2JDOr9OQ!m"GNě$Iɷ zxU© J?>L=b|qW}XX8o:+iй"`WbqJ?ޠqpK~WŋIQgqwr0jwӽszDeh>m&$WP<-~>Y?rRIju܂GVI.rEQy0W`N˩fl cerN-P2B)5sml_Hg5fִ>.[*Â"d: 5UzJ2PRY9+N_·9:8ҫ]~? +y{ e\&P GXvw3A,Ύ3߰}Hʒ)MU>4 Ue ߎ/-E"9s[PxuOtrر}U:0VmTq378&ZAffX|!]تumSŊAfa7yca<=U(zKsFGf㕏! !GպX1v(l8Y6=t5ؤH)eM4'mJ}*.ƊH]$CUr/ 3WjQ{ڲ(J9pFD.{ҤHo`4:Qd =-!zWDHexzx3 Ez&H>M-hwt*㠄srtUӭ#>XM:o2 vؓh q &5UdjO&.M\h䌨dTs]V"#v\i}n\ͅ ۘT~Ά@m8o3Mpw"gդ 蕆is-֚s|&kY7m}"i纶Ќ{d' > /ExtGState << >>/ColorSpace << /sRGB 353 0 R >>>> /Length 6603 /Filter /FlateDecode >> stream x\K,mbzF@8YYMI}b[s8~Hͣ<~oxyٞ;笏6˳~O_8Oׯ xlQxxou,9˛Rk<缒ןfhMv___?Y?[7Qy%ϳ:<ykEv=G^єy;fhMv_rV,?%Yh:1^h=<6.!gʻ!ן@/zr[~ͱ[tkU;кWhBWz^:n@ѷZCs&P'n[zzJ7R+6cI0NsBWxz}5$ͭo=mK+j\>kwhZ){u+}5$V,[@/zꭧ_~s,B6м.{355go?ysn} lSOXgzӐcK{.DSMR&fC_6iͱxFr)Ԑ g$N!pG!UM _FK}z-="wc駯^vIO2F NyWn"D▸XF; Sb9Z,qзZSò(Z6):z\&\ȚzŶ ;"سÂ7* 0LnKr}izEƂ$O-M.KS W%*:$P&ִz9kwf$@O#>==r8ܞKDD;ZSݑv!7P!I;ҏ|@<'~0k .8 ozHSR]*mSOXJA jޟ[_؞k94n!W/=>$7+R|L̻@:lKOXl&:.niox\$%Ј)Q]+lRސJyeV)AnHȂtO<-/ִM="wc9#ir,~"^,tٽȤIJ&$#eeChizE7ґO*!"ސ^7Yo"6(7$/xrȻ(/rw9Q3,)9=n#^`B OKł59)$z}W9i pw9O_8Iϊ |` wn} KtX(!7_whXHw^6e ^9B}(-@z(7S/)9}ט\TRfM[z9K](@g_Q,HKrbZV-}٦~oB,zq,,{5 &~(Td"7֛Dԧe|weibj?%"צ^i1涧祏bAqR))%0.JmKXk߻Q"QꟆ){9,ł*$d$wSCڦ>=wc=wccޟ{w"Rn} '(mSOXZjE)M&](,ł&]D܎k}i[t7 vG rF DAӋ!{'rǩbAq2Y_䞾e@њ_~k,V//m %G3Qa ^,]bAqb˔L\\LTLC/7#y"%פEWxЄ`[ł e|X7EJ;ys?X,Gy4"I$19BK?t\C9ܨR߁-}%7b!4d挬!#)ޭ(ç>%JɭIGP6oeEt)78c1=rhBfsgM©4\˾(]ڦ\ͱxY03II7/$}8!#=Oe EI6>.6apwc,h: V`-][ZwꥏbAqzϗA{qɾsoo:<巏?eR?;=?}`R#n X[f<ԀpkS5ϼ?K~v?'O`{mC}~/׏7??O#"^osO_w?yM?៭<_` . W lpkP }j@7T ֠Yx> &Οg[>"nW=!V?u,ɂw'o៥{&:PX q@mj<`N_Bxk9MtAa.EGݰ~a`#ohma]aupð\wQ}ܲ]-h /ف?yc6|,uoZqfpsm* Y|3J#Ɵ|QuVϳ,* Fl*vuvB > KìO>kW||[=fM/YsLU'5êAPw]zsYѿJ\OMq|Q|QP7 3G3ӿ6V@ՈJ ck`z OO}ewhX%W~O~Aa}3Ѐ8wB}URz/G<]q?`\o_J+W&)0E( +2b>Τϳ6~}y=܀x>?k#mL{ݿ+~߉7f(Aq0@̥M_-j+LS~ܘeKoIEP܊E$7,}Бm/qwUK#U!'jVqXݲ6?yugmLf-Nږ4|w(H!~y5# '+QQr+FN˯~QVBq[RIY>mE)6E\{w m{- T~=61Hg.< 7Q~̥ {:17i;+0tBbSħ^%SQ}z%?Pti #zyԭULR2=Zq N.ӴvK&q.%8<\dY’ڳ,Z endstream endobj 355 0 obj << /Alternate /DeviceRGB /N 3 /Length 2596 /Filter /FlateDecode >> stream xwTSϽ7PkhRH H.*1 J"6DTpDQ2(C"QDqpId߼y͛~kg}ֺLX Xňg` lpBF|،l *?Y"1P\8=W%Oɘ4M0J"Y2Vs,[|e92<se'9`2&ctI@o|N6(.sSdl-c(2-yH_/XZ.$&\SM07#1ؙYrfYym";8980m-m(]v^DW~ emi]P`/u}q|^R,g+\Kk)/C_|Rax8t1C^7nfzDp 柇u$/ED˦L L[B@ٹЖX!@~(* {d+} G͋љς}WL$cGD2QZ4 E@@A(q`1D `'u46ptc48.`R0) @Rt CXCP%CBH@Rf[(t CQhz#0 Zl`O828.p|O×X ?:0FBx$ !i@ڐH[EE1PL ⢖V6QP>U(j MFkt,:.FW8c1L&ӎ9ƌaX: rbl1 {{{;}#tp8_\8"Ey.,X%%Gщ1-9ҀKl.oo/O$&'=JvMޞxǥ{=Vs\x ‰N柜>ucKz=s/ol|ϝ?y ^d]ps~:;/;]7|WpQoH!ɻVsnYs}ҽ~4] =>=:`;cܱ'?e~!ańD#G&}'/?^xI֓?+\wx20;5\ӯ_etWf^Qs-mw3+?~O~ endstream endobj 362 0 obj << /Length 2787 /Filter /FlateDecode >> stream xڵn]_A Ɯ&)ТI-M\7D˪%ё$BS, "9s̹͹y/?]M.6.sUʮn3rc]2Zd?/?~;3k8~K{p}q%C(Łv?v  EI.,[dլ06Y i^Yit|-/sz ZD㦰LkrуoGxsHe3\('&n˒Ћ t R^!o*^p| e$?W!aE|wB4ݾY-‚D~7D B^%] Z.0Er uBR]&|"wNu y)L.WS3rqRxsgEa'"BqS Qӫ;h@L=ٛg~Эdb="<ڢP+Yq.{%{|ެDWD~'Qxsd:AG8q{*\XH<<7ߓa?3] 2jIǹ@Yd Ad_Vb!~I W|_0'W^9lM6ы,i%nNlЎE`_ڵqG b,~]GgZ4xsr'hy݇M=d&<[mE]_ɲ.$Ĩ[R|L{" ڝI6lӄ4qφ9>jBœ!r-ou>wU+} U@ƸA[''@Mz on(6>5"1DZ5:&f ,K}ؠۏIGpmHS@*\1R(,t? ؂b#YUqݎlW*]GiŀUVQ:U P3 kDA z7cX<.tfY/B!:"NO+Q䧐8*u/xIaTtt`[oH4SwW^͕*oϝ6{k{B }"y(:cQViՔhT`Lc2<Χ[J>y.R)r;M9_ҊZUp ߧ8~Pg&$ GL>NU햙|ٵ^" CMQDה[~ۣy1} "Aސ(;0m(DVyT$=9'+N7z$zXZKNh]h{<7#^wM-dBoJ_D4a,Bx.V8Y>"3jp!bmmE#Fnĺ /#~ zsa d;\I&cs( l9T~~H].c#aݨx.G0AVQ@2p_ ףV{#%x lvI^?L1 /?<麌uT*cU +%W`k=Z*@V*sW|/8*W,aF>wüdQ ` ګS6/cJ;p:"Uňu!䤒nOmw1y6G*%Շj~Nn\KgAU 2+bP#,L#3Z5Jղ]ֺqP0CѐQ6|3g Ц, .u~4{TRN``|fUU+׈| BqAbǬCPz=v`^ 4;!8c'N((e~/mw]hM6c2o!${'zpG֠sdo%p/F3/\r\mKs`pEpI鴓"KC+H+48G+Tv0:Bo}_jB`e.}~&yzo/8[5!9 m&JC?' sd51 qHm-`m'JjY"~H0AH^$CVGKRsʣwc/0 endstream endobj 357 0 obj << /Type /XObject /Subtype /Form /FormType 1 /PTEX.FileName (/tmp/RtmpoWpZyv/Rbuildaf72a4a49dcea/kernlab/vignettes/kernlab-ranking.pdf) /PTEX.PageNumber 1 /PTEX.InfoDict 369 0 R /BBox [0 0 864 504] /Resources << /ProcSet [ /PDF /Text ] /Font << /F2 370 0 R>> /ExtGState << >>/ColorSpace << /sRGB 371 0 R >>>> /Length 10936 /Filter /FlateDecode >> stream x}M-3R$UM0? Ã$A`p{q}Pڷ =x<(VJ(||y=^Q5N??}8>urG~ۏ~+[ >˷rx>Jv~|;)W}eR<'u'n^*Vtjq [_Vے.aw?z^_kl~p/UǤƫIW{&uZQ^eGg maEQ7g# Z7)\-ľ٭{9}z=|/x%j)w4 + y8uՅ=㝦<ۖ\v ۭ{鸾ܼsdz_e3z^B{ʕvšn^-GbgnR|lrEcęi~3+>Z=L\jVtjqcx^u&ūٶKnK{6yw1q2Zx ӵb{O KXXQ~"чԧ\<9eݼ2_?Ջ9S/K}̆xZfiƚ |6aEA7 cWtj[r%nyǸw>=sFtzb=|;cI0TŜ՟)]ľ>6;G u/W Pë)#Ċne=bIݠжKnˍ vN>=~U1/`Nc'haMA7gZ{M7(cjrEݾ9K"Nl;7ݟK0e,A~YVtjqĔlrE'&ΰez9_;gNF;PKώ+T'Vtjq|A}cGەnnUAӷoC{*+n*kF~t >t僫me$7_ٯyvN?eW#9?|9)fšn\mN;O,.Ej[r%nG_"{>f׏UoHysܰ} 5'q}u'tw7pnLsU[z-Xȋ* %aM"1N\IQ7۶\v ۭrLk_sjxX؉g7׍’07-X!%VT#fc;}nR:9Kn\CpW|;&Xǂq`?yB{#6$\= k i8b Oݠ[mK.'ox<foƊȊn 1³J,,pÊ:-sDTMW!]ľ{h{!)19^D`kNT:)n,U,XR΍[kfۖRawu/k-b<3Wלc1>݉vB{RKIbE]-s"|il 1uٶKn'u*s˪wI(y + y8P;ݮM7(^%]vO)9>-ɹ;]3#ϦOwb~g7gWᨦ#W3ވOKEjm-%n1|qe]>9 7?e1_vE/ XQGř{̥NYIjm92vuE~/Tw"}v.<ڧB{')cL(;nRm[.ֽL#sM'ֽ(c'_.)s%V;74c#tlr%n05Lq_;+sw{q?y%s g~}1?U0cIQ|ppb+DKme NFϜ"v1f)_YXSC3ֹz!uz"ۖ\v {)%.oṐsEψYfO΋GLEO$mJ*kmg5qrV325Tk`ۤT۪Q1FĒpr͡i ^Ntc>mcEA7 ԥfۖ.awy/^ފH!y'b5$qDT߲H2yUz~&7[Q_ t$EFG:ԎfS4B)+]$C k&ūٶKnˀB Mc!` (>"ƍpp&2 $w!@ZL=Pv[e>sSkTzz4/ ]2")q&UkAK+ _[Je q!`1 휊1yx21kNB7"Heb&q$ ,ܜJM2lpoȼ%M )XΆ X rd>,y'#W℻q'vNkm&,92v۫'f(Oā=e`UL >8kĊ:ܡi\Ij-n8ʙbN=WA0 Dk$\y/s@ܚ1Vԙb{=4nRmKn}[^C ܷWyO}3[>R$R$ĊZ{ tl6aᥛ^g%nW=p`I9zO0ZFBI_r$kjS Eۑe=ܶ䶋7f{^'B FE>=Lq/"`9%EݼZ3ZIybm)ovD~Xs37lO%6,%H)ɍU10[@SZM2d qo98#~10ʐS>  RT d; eJLavCD1 C2iYn&ߏ)|[:`cs-بmeov;-J|-UL!N4o"/:NYXXRͫc }X7)^Ͷ-v{w*`m΍pS+Iqq+.;ONF؋ |ٶ䲋7^"kA'{1>9{Es^|/cy6n<Q駜oXQc|77ǀEyo[˒.awX^ؤ97="̓g"40Gc \XS3iH5ԧv-v!:zЇvCmX6|y>XQ%Np7"ݤm[. 0i/:Wg˭yÜJLyz%G|-z റU]3_mK.ֽ nb#Ŝ; OX ^XU[޸V{1iƔtlr%n#\] 6"FI<=9B{$8&^9εLYI9~%]ľQ8Sھvbx6`a Hy'[XSPp=Ǝ&2۶v fkL}AI8*adNT+wj0'Tɓ {0`"ݤm[Uݞ_5Fr9mx >gΏ>Hy~S$,@bE5ɓs[ܐ8y]v۫,s*h̅+ܳʕݵ8XQzY2*ֺm̶-]v;7Mذ9c˱XmDn(7U9 k909W *:0ۖ\vf ? пvG =.CÉ(>;l8)4X䭛Ghr%nw˿|X fW#_I8 c/#/.v& n D ެZF '=??|4Ot].w~8^/?~ݷ]3-~9<@@ apv_6 :7t8KȋR8>/7Kr v@~A? Hn`#b] rV3nCw>Ώ^6l=G=$?}4O&ӳIO[mqp]F3 N>~?P~~Ql1XIXQy$^92ۖ\v ۭsOD5NH1V|ʝ;c/VԪ1vdݤW¶%ľ٭{\cw[D=ӥEڠ2 #BߏMomKn}} `Oё9:b?hLcYwIݤx5۶\v ۽9D=#,}NGsI+ +jU|J%63%uGےRbV*ցR#[ŧj>~p'OwxƊʊOɹT)KжKng=KVŧP28YRB1lcEeŧ\Hݠ>6嶋7ǰaU|JD{'`]wŧ{}HγƊʊO)hO;sBے.aw}/mWŧ%DqBW'EPQ;C +*s.ȲuŶ%]ľ>SrKP'8fb`)բ$Lb2M Wmeovۏd*>%犞_6 M}qrOH,q`XXrY*DU)9=&ǂ}Gk4|nD۱VTV|Jw# WW\v {,̖#{Tip?6_5s'ǭ'mSn}؃2:Sr=,^\80rOd&.;ov;i+*Wa:vbUnZw%Gh ^nRxKnVŧ í1b Lїxזp+*+>%]XǬӬnr%n[V|Je >%jg'P|>-DU&Υ6LMٶKnKW|Jۅڨ4նq'|.IL3L)meovW|J2=W%X*Ӷc[I&g[?LŞhM"nkGd~ir| Vefh+>eC/yc] bMe~9L{Y7)+ѶK7u/cOADnpΗ(GOLƚʊODm3nRͶ-]v^ģO8P:1feSw9Srpa<Vے.bT~͊Oy"3b}w UfbXP`yi nRm"nKD .4׍>Yx]&}ai]I,RwVɶ%]vO1ns 5sZ#?IƊʊO9x~YlbHȪn[rEݾıO<(GΪa56}(~j+jU|JPNOݤpڶ\feV)9=ay8 *} <{TV|JeC>S.U|:f*>m;v._Qy%0JədlRYK*:Dn-v[gYIxƒ/V|ʱuǓ\ ^W;f?cMe'sz1kGznr%ݎ]VÜTУ_7 tJPXQ"Grp׺I1v-ľ٭{[ŧ$Kgn.zE9JQY9S;SrV6ܶ岋7}/<*>m95Xd}e=GHv=6)$I0HMݙmK.[ŧHhg3}^P#Zڋ2Eg#\5k*+>%# %]ľu%:V)9(.LQc"(L}jS:WbEeŧ PglϹX7)^Ͷ-]v^V)9#6;soO#?=Xڋl( TV|J{ 7޺IQ7۶\v ۭ{({ŧ0ۍ.cӗ)SG2ڝXSyVT1/@Ejreݺ:9<Kf.PoXg67XRSrp؂pul[r[Jݺ-b?:t6ly#Qh#g<8f5$]I1jʋRb^r{ŧԈ6u /vUdwKް>TV|2gH龳ۖ\v f^Wޅ9OrZ휩Uڋű$kʙEjwaN,U\k&ն岋7폡rViT(Eu]>uud/yOɁwXM_ɶS~aMK;OqU<EDXW<۞+~ǽķ(Xoݤ^Ƶ䲋7u/1@SrpM'Nnb-ʒ9e'97 {&նK7}/JJ~8-U1zyTh΢`2DCoĢm[.}[ MZPA>щ9QՃ͒Tr,=b>u϶%]v^励Jځ8:Iy0gwB+gac;k6R]ۭ>6P0y+ q3ua9!'V.bΈ}鉺meݺV*9 s*bfN\Qj8Od[ƚ\sSO=}emm9u/2A/.vG{w5{iwE9V fl#uGjrYJ^nrʪeN=cԎP4uƵVd,{emrRvNKm5$"3L^MqEJyg5aˮ9*NqcuY7OYr%]v/ݭK~&*=5G~)@ĊL`Xv.&ūٶKnY Vs)9(=qIwc^v:?<9&${unmvk16Vs)9ذn>C^zcMF~2O)9'y8ۖ\v7jSk.VT#uSMe@/Ԫq"OVX{#UmҾj$3`R~/}c8VIrPh$ Oէme^qbo,TcToKHHObוAr 5Fme.Iظg56vVO9Q~br~qjݱ/^O^mK.]v;>v٩Rr*PWN~yX3ŠʚKXLY}2ۖ;v[>q!'b}3ºHͱp+*k.miHb1mK8 9iwڐ|r;T2ziCŘUi浢6ߌ=ns+նTn ^KOs7m/ԧfM˝XS8&#"q,u_>mK.}kdm[ѥ`_ jTu 4a9O+*K/s\$}p?.cwu/'U)95iB?Y{~B)=XQY)9aZapY-[*܍v٪ƴq*EP?XD^xW“&'#}XROYj=N,n.g5A6nSWguǚy8Wu; b-v&۪Ɣq+~WnvJ_劰Sr&q9"-U Nz/V5BBH콱S&'X}&8H jRFwF{wCzLadB<$NCf1ygP k*s&-tD[wseov1Srx) vTS8U|S>ĚzLAAĢRng۔.b꘥V=TT]zV'벜2=Y-8w([_ugVے.ak~^)9ڈ1^#]q{~rdJ`kAy~qc%nRSzKljzL:U+:`cEe=7FuX%]v˱<;Sr0 ՙѢSzFN nǴq"XYwfdےv`zǔB ? O)jaMe={pԓ#wAݶ t1E"Vn)cR$mL "3vdr]hzL& vzL~NA~=? 8Q ?Tr& .ޫK}Q&]Ut^ɗ./ 3mWf%{iw%eqQfu&)3Q^@B[ow{&YIWMXG@S麮Lӷ]:I endstream endobj 373 0 obj << /Alternate /DeviceRGB /N 3 /Length 2596 /Filter /FlateDecode >> stream xwTSϽ7PkhRH H.*1 J"6DTpDQ2(C"QDqpId߼y͛~kg}ֺLX Xňg` lpBF|،l *?Y"1P\8=W%Oɘ4M0J"Y2Vs,[|e92<se'9`2&ctI@o|N6(.sSdl-c(2-yH_/XZ.$&\SM07#1ؙYrfYym";8980m-m(]v^DW~ emi]P`/u}q|^R,g+\Kk)/C_|Rax8t1C^7nfzDp 柇u$/ED˦L L[B@ٹЖX!@~(* {d+} G͋љς}WL$cGD2QZ4 E@@A(q`1D `'u46ptc48.`R0) @Rt CXCP%CBH@Rf[(t CQhz#0 Zl`O828.p|O×X ?:0FBx$ !i@ڐH[EE1PL ⢖V6QP>U(j MFkt,:.FW8c1L&ӎ9ƌaX: rbl1 {{{;}#tp8_\8"Ey.,X%%Gщ1-9ҀKl.oo/O$&'=JvMޞxǥ{=Vs\x ‰N柜>ucKz=s/ol|ϝ?y ^d]ps~:;/;]7|WpQoH!ɻVsnYs}ҽ~4] =>=:`;cܱ'?e~!ańD#G&}'/?^xI֓?+\wx20;5\ӯ_etWf^Qs-mw3+?~O~ endstream endobj 379 0 obj << /Length 3669 /Filter /FlateDecode >> stream x[Y~__ZASq*Ğ<#i4򎤱ϧ)J9~D @(O_|Ġ+vP3.\NW,חuQ')U.UCP7CUfӡKO}OU6yiLx?Ñ:f=QWUVGH (u^2'rQCfl$n՚ {lTz _ws8&5$R }.WkO# h'KOBzԌ_ ̘l]P#S?5 79ˠ:"H+] FRT Q l^9]bG[ F2/{6 4W *wȨHZF4}3Tf/_d*RY9_ٗ? eR,T9ROʝ-U4M6;Ø2=Ɠ|\tXPŷ\HB @; lJ.` ~ r]uK~ſ r0٨hBZ}WkЂf@·Ex`8=^#RXBV[p!;F0*}職>q&*L4[ P[-Y~ulJIVw^VzE..W$zwXw>R2Ϛ*#xG,׉6u[':ѰjˤCI$NЦj{T%WyzMH:HZ6Od"ԸvlD!_qýڏs m3,c_;iY!o}[D5~EFQz;ږ2i)Y& -oY}yՆBB1 Ey$has;"7)ˋb}{QoONk?5<39\#[ [+S\0k u"0![theW< )C<ڄL\N#YLx',0[`ۀoD=zW1j{c{`9!gp׵rc G|?%W1&VUy0g} 5#V:8V%.#׹x˞EuZn^iVӀ#ZXgrY~D@j0p(D]LD޿*XdFXidyzzFH\wa@e*΀e/^^Vfm1 w#5[[mu;{ ={Ͷct,wuQ1S{hDZyNO@2|q ȡ|!Ѹ4:^tV'(&Hf`8+2bF$}#zr{dw,ֆ.Ur类MBnxKt.$B ]i8NbI:I$LEΪn:B`9GN8IYՏǔ4Vc7$b$dJ(xJbY~NHb.z-J c!3640Q"*t0PSa2]N2d_N$+uj}toWEA䏾WGrzI!\~t CpH _d7ԧKΨe݄a[d(Ǧ[ )8 FRЛg'1^r&YR܃؄mXo;A_zU:U3(xWEgT!+J)ݗ 7P<\x)HgA#Fx>&ǰUsYXV;6ޑ@xcD܏G)61UٕY-䤧PE)4o:.!G{%":G0G)rZ7Uz:;͞c=#iL}UPOݽ:d%|[iOxG+\1lhjm<>g ُaa@r=X9)?tgYtpgbFW4؁"`ɁU:[;d6Q)^E.*ڴvK;śɶԖRcmd|(u<`[|fMoӾ2 0KUMo=1ňV ^@^]bI _Ce>:"LiW; TPZ-CǷ,0lnGB~p_Fh O}Č6F4WRUIn , )!k >$'ܾ]q#o-[:= mP~K9 /oE ":5p0ߜ =z}`۸jJ]Ձew tciL܆Wf*.y6jNS;O  I2^1u@ꡠthiguI{]/7ٮj$=Pv\#Q\6І+Mɳ6ZE4{; ׸T (L|f$*0e}8tم32(:$);ɑ.m^ם},*<1r~^}?=Uxщ6;zܴnS.z䟯ۈ/TǗ&&,"c{;,^*+)VGu>/ʤ|?nO'w 7\ =:DOqgHYQzu`QX="mnqǞyuXF*=U(/hg_c~LmlCDU$v8I6,񕷫|Xxi8 qdOs?{bJ% Ff2Z&Yft&%у3溽0B۞6=n#oEǪi/괹.{p]ږCv&"u덗ogش#"74s{XGS+=x9T$ 7KUz] 9r}BN?< }JJ6?F/s#\|-Wòa ?b׼鰎BGL䴄Դov-!ySy,v`=f&̰SV$@yI%w(7Q3s(RL|*T3;ǩË.z@S{rR}_'nR_}&Z=\:1|+y3Ra _DܼZù~xTi^f\Z`B[\S\ւ\f,emb4ڲ!* endstream endobj 394 0 obj << /Length 2231 /Filter /FlateDecode >> stream xX[~_a.bx%MMCP>h5wGc{2)fvҧb0Eb[onynVjn Uð*+ru{aǍ^?i֮,Nge!?_0/OONDa8_'|qۿzcDDkҕH8]"M!*7Zɪ3vӼOmi%( x(Q>=u2n@ s EV^WO ϶z#Q@sv \pZɎ)gxDf&i.N+T xRhmn6dutba-jejxV:{gPXiz$^b U pZ~: >J?F9fk3wi)p!#v<>Yr8*qiڇsf[drj O߆??C+"#e*4fyUc=>2*ʃ 3۔:WDƑ߶)TvnVARl"28 *Ӑ֫u,^lJC/B9WTƦ I1X{6JW׹(F>P|%"~Kxkm SzeL!AmFB gb{Dmeb ],Pۅ{5jseM1E3֪)첦 S}Q\ I0 8 a XZB |hy*CsFo#YRtHKU1kacl[#BS [WI'Nd@B@#-//$#J4PQjVPL]OЗe)܇3x%iGž=^:C G㼌V(м D `:KߘikOEeY~>$#|t׿3  Yz>-tئCC>[7\<<#׻ ~kw-gzOcDi.x mw-#- g~HdLĽu(|lq', ӞbL2XƷF&su_CnȾcRF@]b"AB9)Wi5F.$`9&ԁ_T10{P56&X.k&W .IJ3}[=ȿc;?FȞq?{TQT!!V cd\gxpρC^N#ߓ=58p bf`RLv5`!߷t1#~ڛ~ۧ۵]/ɍ4i87/Fk&MȇgY28 _ǡxb\4ݗC@%~[/F{ed:Hc q?gA9^VnzO;0s c8"9ΊiLE BFS%=~e4gl_ %%w~g9w &OaLm~hT^WΖ 80]_aáQ(p),9ZEi_''hvƥ8T;B,ΉZK䐓c}2[~ûqJa?>̯!!fڔg)q KXU~byuMDrT.m6/K;,#Ҁ/kDZc`'k*s5˜bFh1"t` 4^:Q l>!z6\i; /`Ws0؟Apfoz+[l.VP09sh$J|8lk0lqmgK]{IjM8AG!֧ޒ[qQE25($H2(0_\er#„f{y g8kPaT!DCu.]ęX/E3X8S' /$cToo e endstream endobj 389 0 obj << /Type /XObject /Subtype /Form /FormType 1 /PTEX.FileName (/tmp/RtmpoWpZyv/Rbuildaf72a4a49dcea/kernlab/vignettes/kernlab-011.pdf) /PTEX.PageNumber 1 /PTEX.InfoDict 398 0 R /BBox [0 0 504 504] /Resources << /ProcSet [ /PDF /Text ] /Font << /F2 399 0 R>> /ExtGState << >>/ColorSpace << /sRGB 400 0 R >>>> /Length 18501 /Filter /FlateDecode >> stream xK.n6?b% ުWwuO%$؀|/Ap w࿟zmdp^,~dկ__ٿu|g?o~ӯ]J:ow^_?WϿ߯_Ww_~]ˢeϢw4W}XQͫɩw퉭ݺIjm92]׷|뫭X~Kr,˭rZoXSͫYAjM"n~||׋Ŝ^x.ϵ=^Sw끬XQͫŹ;ǺIj-"߿ח~ 9󻽋c݆;[+{-'VtjqFv Wme-u\s,zu'~Kr[z/~M)-]xWoKUƚn\m4ލ]/ޡumez~^KŜuEUwzݛ6߃w=)%F>Esw[ަOmK.=;>7xo<3W4ruї~MO;7Vujq쉽k&ն岔[WL|x?',V|aKGd9ĊzT[bo|MWmmvѾKz' z67ͲLj" zY> Jy8kw[F)žvOxZNdO8ы·=c(߻z|5}cMAHpF ]&ASے.b?;~fCy|!ãbzLٺYXQ7܉7&&%ږܖaIPЯ'F ^_zy_S7W;Ċn^-N5S7(^%]žvkTV:%9 /s. 띭k]=)<>g<8>atlݤx5۶v{x4nv/\aO.kl1 [s_rV2,XQ$6HmK.=2lݺ_|o)lHjl)vO1Lݠ~Bے.aO[FŜ{k}cY]}M5,ύ5ݼaM Wmiv{ FsrztF{icyغWS5?XH7)~lrE^5z0"y1ջ0nOGd}L-_=ϽWkh[7)\-]~ؽZ;[b_eµLi͔,[bk#9 7k]s)]~حu~~ǒs,#b^C ז5%qR)lrN߱lrEݞ47&?giXH=:ˋ%[~{V]xbIQ{=p֛QpnRm[.K=v|aVZs>ˉh/VGϴ͘YuSܱ}28끴]͡ږ\v {ڭyҰ&UYotN SKmͩudyW31vM Wmm)v[\̹qnC#Z ZRwuzR^L2ث+q:V)ZWc<>KJ[ኚx#-ʞl,%njhcp0*[O[>N'VtNI< z[Y[{bzSOno=|-_M(t5® >r%i}q?9-+$v}4ůE6To΅kh[7ʳKή?'gxYpxdo\}Cl+Ċn^-Έ Hb{L*S7(^%]žvkno7[Sc Sc.K 긮-DŽ<9c{`/[7)^Ͷ%]~>W_/r)7v:]]@h[>1=7ugb+ӷnRǵTn}/k:3n}X_(~CtMĉan{}ڼ6%5ĝQp4NNMF/~M³>[~d4[_kU)]~حG֠[.t^L:7b )ܸ F#aEr1`DeVߺIjm[nB99UçR&C'4)|{6痯~nco(nRm[.=֒eoڃ:=q \j쥄',Ry'Vtjq:K7(^v)\1|}iRԛ)NyXS,]u,ږvaKNӰocru߻Gƒn?Ep *l&[oHm,[gt)~d|$rhqR>2Vtjq ^a tl[rEvUb5bNqDy+w855sm΅ZG.ݶKncv9Oj|,yW)n[b XQͫŹbS#غAj-[|46xmc(zmD+|)7XQͫoAr#ݤx5۶ or%Krgڋjpw}չb{XO+*976΄WnROѶKn^V/<X03IR[1Ca55s|1.1_޺IQ7۶\v {-}U.>hsQc(o$9cM ;j1 Fnvd۔.b?ַ?0Վ'[yek)Tn;;_u5ԓۭPJlL#u=\vުn,xl%LJW[1CXLޭb5oJXQ_srJƆ[k&ūٶKnGtuaw8X VB:,X5VTr^]?of [ ;Mzp.˜cMj^( `%{拃[bs3ۖ\va~˚Q91TgYs0D?-Wv󹌫aj+KrfQ゗'Œ-a ^m'uٺ<϶%]žv {] 8@R^xq-Զ[૳i^ N~ǚﶘs٥~.ŷݖ['\bEm1'CV/mqے.aO_y&9#^p EPj7*V7Lš6妾uf̱v! m/'7PcED>-Ii,)u@1bˡºIjm,ۿeēA}9CdxY1h}ަl]G>v~1qr&5d-KJ#2 gFqHyy1ʖxkuoNS؆;m-}-[0+_.l"( <j9F5r~eNE\tIݠPے.b?oimspb[l&J7[ ɹk,)uM &eNm,{cwq}fy.1MwP7H9Guu37vx_OS#vaa2z?9/;~g7[^[GOXmx?%]~ؽ#x# ΋ة7/ܽ~{zo97+ s^ĮݺIjm;=|hvZũN GDhɍڶ QbE䚳F$6[7)^Ͷ-]ƞv{-6~rt-N$K'}o=pDXFkJNŬZƐ&ŵۖva\0M3^`!]ѯc?c $>j;+u/f۔.b?ۂCAUsR9pD`AǠ z );ݼZ cԒ#jmev[0Z,9XOsZ+<JZ, rx+jʈS2beuʨmi^3o3,)r_xLuyOel9 (ZmK.=V?ָֻ^s1c6tDEYL=$__@XS[/ī멻z%ۖ\va]1_G%~XC6Z,n"$6卛Ċn^-+욉{ --%ng Js6'׍{]Ǻ=m醱 ѺmWG{wa ;?'!P%8vaLvbt]K~VT͍q>(l}5|rFm[N=_hKF&F[%b۳::D ؆(=n-/l+>ӄrFmxm#kU^f-AzTLݬ^'a *s#xXQ53ɉؑ-WwF {ڽq_jб.LȞcgGo)趷3癉}5lr%݊keQykl{?fk|$7s[ZٺIjm[e}9/gO]xj Z7zڹV'xru=pOl͘hbxn2e%iFȇkk&7*Éq7vW|(&tsk+0&6_[7)fۖ.aO\Av?'sprD- w)}Ons"ںkfme  1gM+*ӈbn X 4S5z$Vr{me~.aZ99Fd G;sso +*3Փн[,{gn[r%i˚ - ?'qj!)ʴ:Mu&ZΘ~cELq/zXs&ūٶnc"w~8NN<v7e^XbzE<Ք_G1TG)?S(\ͶS>7&n1^.'3/aDtM̋/XQ3]cJ#=n[r%iV1E;4 [Y0;HѫP3%VTͪ &{  nRm[N=sY]Nr_ŨXs0zb}ͅit+|PsZ}n;6n@qr8P !|ț6$SΌcE ϐӝV ,Sw$9ۖ8?0Joѥ@RSJ9&\ 9!-FMWmeWu8t iDƾͽ(V_ w,-g-cE[gΚ/Cnݤx5ۖ\v aW(>k/N$txH&AmLi9/gʱnO,)-/8Ȉ1 #ƺIٳ3gm)v,Xq0_b|5L s`NlR-CKab%é84ۖ\vi/ksog:7{[3[/&uΨX' =ݶKi-g[E^HizI/Z!SȵݺI}eݞ'3g2BrP$?wv2DqMhw%LFHNx>dlrEݮρ}b Ju0"ћpy+Kx֒2S#\:ZISے.aOEVGl_yGle Q˯yEݼ;. bO[7)\%~߂fTs1E`=ky1<`fݔ蔫aEeRrX@XlL[ʶ%]žv0y8MEjw0hvcMY09uݶ䷃a :;&nef ,PR~9XQ;&dn[r%iw+zZK̃X勾f@Y&\̅_B9/tnnm925-Xh]_tH[l?w\šz27 [nR;mmvOH@f;u){2Hykm=^r_{})wȻmiv>w$UճAIH2ج)1E3XSӵsg6BynRͶ-]žv{irCJ_9F|gҹSzŠ5uq&N)]~]}e99;V@ެzgy{Bo9ۏFM1N"nc-I+we t5둷ݧF&Rf[-c%:?')1RBJf|e_)$̽fsnJv0ZIy0-]~ؽ>A,?Z}B)B2YFs(LeY2 )I,(dLTܨbJ*; v=.t5w Ar̊?HA+fs<snRmmvy~[aɹE2]`DېkyXS^4n ˂c]VͶ)]~m 6Ea'Rs-iFO16X>1n+jyq:0 Q;SmeݮaŴ'y r>1gT=½7W^kVTΏs1A-_A4wt&ǔ{-gSaM 4 Mʃ\v a'a7.+K .³<.˺mK.=v`0Ō,wѯ-ۈۖjAr1zS(^}/)]Os INg1\Y7erD&VTsHrglE&u@mKnlX!4zFY;q!{0r+d89xnظoū/z@%{0Q$ӃKrXGFI1ύI a"9552kEi-o[K.=1kwnrF,mƕlv}vp{ XQ}'+Sc/6嶋=\tdŁc- b)/KI95ў؊]۶\vaP]D: Prv^W oSPc_d + pq%-ݶKcܿqO/>̧0?Tז_hXQm?EqGelVu3%]~حqA . 8/8XQ'(uKn[r%iSt=xY?P EP(noctWKQN8{/Ȍ齆{O$˯6%yĝ:~u:b<Nc,]1P5p{u8XSQ",h$wXԓĊ"ia ª:t,-]žvbM2Wa΍eVt {ޞJoл]H_9c¢@nW)]~ؽ4%8OL|Eoy-J&Kq63uoy[mv;;u \E8fP1邚{.ƚ.ݕE1]եme N[~5.yqS"V`%*k)'l_W$Ct=mev;&A+ԙC9zs1䃻oŽG֠x7V3v%ݶ O[*,ZK9ZCX!Wve6!>c mK.[`o} fKGˌC :>~}LVʢnRm[.K=֒R1B/*DǨj؅Ęq(.+XEƚzJJ[7)fۖ.aOSJ F~NKugLB)@ãU.ozjNF0]rduےCvk.t~Nlc*N/Lw.Fcֽ2C79ꄥú3C7ۖ\va~3P}|,:ޱ匦'6'NիQ7Qqz.us`9;Fs"ֳ͐r1.:]k[cbE5G& BnRme VEZK6#L* )sP !aG|v/HY99(LAX6"E~ŠjYhlnRmKn{=m/\E ;{#QڛD}(xF(G^k[jbI7ªvtWےRb?B%h9n`3Ό|ucG<ϓAϽ2,9s ܮԝYdٶ䲋O6)KҘYKD^ ?0VȐ(_u;dsے.aOMT! |a0ׄEp^HL_3&ƊB0`aYwAmK.= +FԜ[j5DVX7)qz.aOWbpz.{XBkER$rcw7g9jR$a'@a@dOx{@ :sI-ϸ`OdKxaOo Qq7aMlrXXܚͶ)]~ͣN'YZ/9&krZ봃nfs`EbtԀ0ֽsۖ.cODչ(ȲəC^9.A$GĊ䰌=xnD;Y Yjse"D\=5Ľ 'cW#.־ gN[u18c^ m뵈EagG՚VX뛺_2G䮉m:Cq<۶\NSݙVxՑV=)Giɏq<4rn;O>7s؁P=s-ۇOX]i$1X  DÓb7Όxe9@HHOya/}];m[.=ޡHFsrx|6C=: z\DxSR>p vGgKOۖ\v {ڽKocLlg @{QnK̕%Hx0Vu*T,`)ݤ[we-ʮc;QypD]> _iƚ5xacYcǺw -1b5@Ul:F D3@_<'-Ɗ^$[]Lɶ%]žvoQ Xwq rrP+#յqIoɇ kS;\pw{̖tgnQ.w,[8FƹOul/ _db'.snց"v-iM*=fseRH׾DrZ$ ȹ1+'9'7k*wBb7ur~ح2Y/\L="g1ڛH J9ճ&9:.YmK.=oyX;f^>,9QgyEHRhYBJrƭ+rOΐ%aÙȶ%]žvԬ%9Ȕ a+4+ނ\+*wdQcX;2nr%i1,Ks*k8E \XQ={a`a]2mSn{A0u̙ȹm8L!P执Dn7rf kΘ}sPu0ݑB3fmSn'P&넮|e{0Sy-2n*OMtԝfے.aOxSiGKԮ|A(ܻ OFA~n^=%y΋ͩfۖ.aO~mNTbL)(gJi<5kFmoB8{rZi'2׺{M$ҧ,b3s\,^gmrҮgC4W.Nwʮ!0Y vSĊ}Db=.-υu*~N+ngʔDiPbMՌK΄I؊]r-žv빼W?Hg2et5!9"P+*#3"%_n7h N^#_L쮅˫⪃.jTgGuZ9{3qx5gon[iڜ~XW"ɁcDIvQUK6s#Si[*9H4v=w&¶%]~횻XۡġSx(|;=7B& ]Ģpū3mϥ0FRrfeA7\Ql\p`EeDrXl<[-yq.xkfǤ8Ov<ي';nS~P(trol<l[z7F04jxVYU{|t +q9焽pnunmvUs|1gjr⾎۔wl9 e+9#eiDs|qےRb?s~%sr׸fkQ罳Ɗ.jlsnRTmr%i/81p\w7>eJƘw{a3̩El&5u6ۖ\v {X z.q LSt>dXQ=9$-\)^%]žvq?~{Kun~L܀~?'`b;._mSn{PL{(>P #,](Dt3wV\%fr.ONo6嶋pApfܻ4(S^5yn{V^b1)ٶKnAowtq&ZCs½Fn'Š8~X>-g/kx%#&JUٶKnyPC̟}xt|c-w][y{bE)Q;Eԝ dے.aO\:,+N5>cyϤ~ۛvo9O6VT>X.ЏJl,}lrei#&\?')L8y@% 朌͕_dbE,r)BF\MWmi'+UksErm>>LR_N9/k*wkS$Jwd۔.b?v=&-K=kv22W "ylEa%Nao۶\v {ڭ\-xR){5kc?hcMݹi΅X+ۖ\vaX>3S[xl]_ߣjF-n*  R83ZŸb ב:2Um|w<@,aOW~ ʔ#6S+*K$;xtg l{xۯnWroۻmYR}Ċ(991ۙ>*}r`O}h7j&'VCg`@I0^z%-ǂ)\>Q7p8c&uR(j.%)\bMWKn,B\Olr'R0F„fmn^M W׽ӷ]~#(gY3*{œĖ>8\icMɩu [x*tUے.b?;Ѻ*'Bq []o-N8}KbeNs~YmK.=v7̴QrF:ů]CQKzcMe9_!]3m[N.1ďũr ' 5f'-D*!˙:1Q،L3L -;.m<9E>YEm[.K=v)+M|Kqs!N{^o%"d2[~@bcM]\Kdli%_-"no_V/2IG<} IoXVXStr|L,*s搔l[r%iJ|hod>cb7})o:[n9+*{*ctQvcW;ޢV8ڜ5k 6ag gۈ,eOaIQ7gculrY*i1Ӈ390̰fs25]~ؽ)GɁASuX9verxԹ=>"s~m,{Oai<[>CP1G{xayx.[iO2E.[-žv{>'f1qnG߯fȈI+joΚә%lctY-z0@swQwAV6GVҧnmi>wvw7g0>Bѝ7"Vg'QVFYKwM?۶\v {ڭƓx%4bnXB;˓)Ċn\mI%MٶTn/oQ7>(/KkVTS:$fۖ.aO},5L${p`pҫ)zU[ ecMusK;aƺ{v6嶋]ɧ%o?9(N~NSw,^aEel%AdQmD;kȴӱxliQ^c /h3IlGu,-]žvTjN|2puS5v.:^ X{YmSnhyYd.lS~›)F<ܟ'V.lN9]8]mK.=;v ;Y{53Tx.}-z OŠjE6bū;w$]žv,Ž3韓Qzw~.#T+9%Tume/t֎<<ƃRqp}u/Q޷FV/oHf՗z87-oCz z.ϡX;=9,ITvp<⅍ VTH\.lg մK]3 yM_SS^]]s/D+jfԡ9pS[7)^Ͷ-]žvTdHayu=0Ds]e3VԓA`6’˺Ijm[߾*ץ̹a[gĬQhL֤a:aW|=aKfۖRaO[Xa$J43E*#FjIA$bm9kK7sН̺IjmkdN cLl y=nXQ=PQ)v!1Df)Jk gb3щ1֚i@j [܇|R#Iff#[#k~l,'Ěj.,B;1 1e[Rn;>_+??´*_W1K?Һx^`/__ DF_"^'/ y\eM/}゘G D|XFu~߸ _~?_|KHW<_GhnJndƔ?xVفhny~!/d Hw{u~BD> stream xwTSϽ7PkhRH H.*1 J"6DTpDQ2(C"QDqpId߼y͛~kg}ֺLX Xňg` lpBF|،l *?Y"1P\8=W%Oɘ4M0J"Y2Vs,[|e92<se'9`2&ctI@o|N6(.sSdl-c(2-yH_/XZ.$&\SM07#1ؙYrfYym";8980m-m(]v^DW~ emi]P`/u}q|^R,g+\Kk)/C_|Rax8t1C^7nfzDp 柇u$/ED˦L L[B@ٹЖX!@~(* {d+} G͋љς}WL$cGD2QZ4 E@@A(q`1D `'u46ptc48.`R0) @Rt CXCP%CBH@Rf[(t CQhz#0 Zl`O828.p|O×X ?:0FBx$ !i@ڐH[EE1PL ⢖V6QP>U(j MFkt,:.FW8c1L&ӎ9ƌaX: rbl1 {{{;}#tp8_\8"Ey.,X%%Gщ1-9ҀKl.oo/O$&'=JvMޞxǥ{=Vs\x ‰N柜>ucKz=s/ol|ϝ?y ^d]ps~:;/;]7|WpQoH!ɻVsnYs}ҽ~4] =>=:`;cܱ'?e~!ańD#G&}'/?^xI֓?+\wx20;5\ӯ_etWf^Qs-mw3+?~O~ endstream endobj 409 0 obj << /Length 4715 /Filter /FlateDecode >> stream xSHJ5SEŲRZ/np K #i\adVބF<#3&_x)DG2O"3nL&<0O=3(/t2@ |2~ ^F{|Cy$.`=| !agw`?)rfG:bk H{Sël, fBddtB˯Hئ5:Lj!X?.|,dEYtil) $=0{xe kXF''8E6 z-A9i6yGbVĜwld$`&[(xE r͓Yo󏵳r+@gRٓWQ078ͬٳ?7fO4s;c޾wF䴏dּ)QlNm3т8@ӳq#VePJ}ep}گ!L6}׽wuZK7ȔM: y3u؋j%✁ EƦI7?1#3!s{n¸r '_.NK UQ}'R6݀Dtַ4KwśMz>dÏH7_҈E\ haxC)xGyuӀ,B&/VڸHIa!ԫA}<0x2z,eVZ*:"̵{%`%yLiDӱ=x?{465:WəՂKhZ>cSgl\ fis-ʱzaydp p&B&jnEj9R^llޔvo= \yQ0Rr쭔L ز*xjDGZ_BC*Ө5֒:Ш*3E@|&iYʞ4 fGZCжi- 50Hm'Q7>JjVPi2 R]4lϟT"Czk/Uj@ǔ-a>wKz&ihD}BeSCʤ >[rF9rV}LIck|%NphvMADq ƪm[oavI[X&1sbpKǥLk5YY@~}Q4階ډ) NlЂ[%f30"i3='uɇ͡|ωmLʢDփNݥehT*?~]Y)|f,D [N q1MppB1&|}K\ eX{1SxMS͗?"ج!x8튡:Mlŧ2+btdO:dT'z1CȀs~(y~Yue5-er?B3ߘӛh{lu2|+_[ TWٷ-လb Rf47Okj8G Cna>Uz|pwCGJObh'U[~E Y޹V8ÛSn8/Z> stream xZo7_\r8 }i5Eb\? ?lgY#dK-){fȥQbRVŠlL Ϥ|w*r?\S24XYn{@84{6ag1[+g0]҃9L aTϑUD٢9!&[ VBl"XRx'qH# /sN^Gt;2fvΣϋd s /!Y "s)&EdxVx` j`4I$s@o$/Z=-¡ B  x`clW V CTsрk{XtAQo1:4D`!eidTdU$lK0!*q@hx( beЙqb;e4,/UdlM*fl,Cx( VI a{分#40d;%#1J Ғrֻ`9l>}ՌsUGZ`;`1@~`aW>x'x-Ao9͒xMY#*lN1괼M:#Kf(iu:)&ø-y}wWE&ۡLо )AX>Uރf@ ~kdj9j Զ*cM\<kl,㪜謜V3l[H#?9fz:ěqyOԛpҮm0Wcy˚4,jd}N+~*oInQC5XM~+17P鯬hQ߾l^OOe/WZs_R^&.ȶ]*nlcLau5EUŠ^'ϦUѯհTL*nh0ŠzRhx*Ƙ8lt]gr6-fTRM~AiCYYƺW}jĩ~}?[$pn8< I&kQ%u0P&?rXp㴈R.r:%V^ƹJF,4$5Dۉ1PHaF |* ^Pa+C8ʨՒhxrw"K1ma6宇$ d{K̞up?gogG ?KHɣt=3 I ȡ,jb7Hhm9PD%(($22WɬZ`+1RiA4IӭrDrn#.s5pzLMNQh{‚l} sB9 `~9{E:) 50,MYGHwx)\-bu)KP{U2$r[N,q S 0B(>2"&,5(t{=}:ޖ-E̵|[7gGmE ]{rc#9Є|Dž8Nb:\1˘U2ф:&Ah;q{ \'i=uČ1.NV=Sv;mmYzd^`=vV:y||_gLד>#]m)DDZVruG@Z6"m.?/n u"wЄϠB(wM5i07uZn^2NnYA6 %6qff`/2qHl[`(y$8J3#Iɶ:2|>3{~O` endstream endobj 422 0 obj << /Length 664 /Filter /FlateDecode >> stream xڵUK0 Wxl\ۉfX.> /ExtGState << >>/ColorSpace << /sRGB 427 0 R >>>> /Length 22527 /Filter /FlateDecode >> stream xK5Ir_Kr[3h0QD([Z9o&Y T[ވp7G|Gߟ}Gs:g/?~?/_~>SJ__wG_?ǟ]~c~ng^ܞ~ճ|~(׍6Σ_ͱ)]'Z[\R>KQV/?z~Wsj;WϺ|(W BAj-9-%/Ꮟg{~V>08~?z*~_^mjKujriM4t;96崋n ;3qHGC߈+>~qaEn\ 5юnP~5Ǧvb1uρ%N}ޡᦗgZ>9(fFȟG‚n\MrynPcKNKc}e8=yg_V!y&Ex 7eĂn\Mdž2? Wcle)/v} f҇x9`z^ O=5V8xk(_x%⎓00S7(\%]vy V| ޤ`:#_t/tβ/ nP?[ؒRbo-_7kj0f+0\~ s}-9"=~߅?sGz~=ViOKȗ=W%q59s|1䴋n̚Kp ,S?[ܿJ%q59fJCScSN21b7b4m)zw'[6s D=Db<d:ajLHi^l}:.{b0 OK`7ݠtlli)^x1-os!ݾj#kKH% /Řn[3]籧Vsʢtj:ecAAnP"Ȟ"bsyt i)|W5}]gCUǶ}x?Q0!6tjr| Wcli)@>> 2|us=}~lC9/\7&g^Cg5nP~5ǖvbw_'y *&v䴔nm odnNe4SCf^dPذN%綎XRǽ]R \t'M=16崋wb"_'n[gCT,ۑO u3}!@s7 Jwk#v}itMw-.y^|!yگ*V4Ȼ>2e؊uW9xble/vuؒ4:ڦXsc$[Y6ڻ(Bh+jia[a'ݠ~=fe/vwG8nEu\|JOG|/,~;rMu7`c H66_7gvJ M'r+oB}k.,mTpvo,\5\#KJƔYkמT)6 uAf"k}mNoA(c?ߘt\_q̐nP6䴋wuM޻<ap5m{ <ԜM$SQ.~faEn\MNu[nj nNٵ0x3]툒 _ (Nf`En\MNj_)]{ p#uuK8Ű Ns_-vʯؐ.`_S$F1]5xJGp`B,(eG7X+#vn=3g1Sz 1<D>+@icɷ%庵qN֟XISZؔ.bo߮͟EyB5/o]soUն|6!~l&,(,[ [-ؒRbo̦|HQ}z {7 )<7Ei?p rlHQJ)!_V;;O8ص' ӈ/d K*ǭ'Iݐt[rEm!W'r^Y' [DmȻ Kuk4fTX٥)mu}li^'0'?Z8-cʎܟycE3'z٠I:}FS19PV~))UZ:9%X('V3oQ{\忞9] ~ *ӑӵNmǷhJn>P>&]G`E{kM3gּrݖ~8[ͦSsbap6! nʛGK jq'쳪G7(yL9-%=| {Ofp#,[IKq0 NDVy~#RFSbw^΍i{l{<k(tt‚nwY>WW.#vkol*ڃ4QlLpY=φ)y䞏O}9XREd?6٭ I~rEm9A}pw}T` 8q;N[rZJm7wQw=Pe>ZmfnNOK=Z#\7&ۻfkؔ.bo,9h.IYO.HɢԷ‘Jl/qujNqLJi pz2bz_7g23d򧈘@9Be%.3ztrXsli[=[i,9q33(D 'Cr I/NUbc\w%Ʀv{ۭ5Tt^M+>DLjZ~*'*%ׂ J~,<r[/3ET ;d,<JXF|_Pk5Ǥ8VM Cz-qj gjq9y}u@[ z5u67 SƖ|Ұ/v2wQrgm)+~ R+rϵ,1?ǖv{-\vNdӑՍ58æ(5i’j '3OBwVGM9"X}v|ba׌?M0ג E؄euilio:+-4g f/ :=ۑwEXR'7>̰!Au%*'㉝rs|aOkmxTӑRbo`s=&NEgd% [mL,1q\"Qk46 YD(pM-8XR8`8ubI-y4 aN3ǖv{ͩ!/?Eem(|RDҁJ#p>]Hk_1!5ݶaeƺ/lC׆=˾|"vYVtlEi]ʑW9ݼAte^몭fAmeWHǚGj$/ K֤]Ƅw:p,`r8x>i[xk|J2~hLu&b+e9"A[y>2pބy dc9}Uq98F%AHA.}1e! &?ArR ?<`A(ATrvF;Aj-9`Y~VᆖkE[QyEةrJ̜mtn lV& [ฆXt9(RZL:rVT8~'ҝcc!]}"s4-N_p8 J/T/,(}Znc=WR[ Tˍv a.=Ƿ| tшo_Si.ao;oLK˔;?|f50嘵N9 >(ݠ񙎼CM?=D 6T688QU:jul.$ ȨP 'V  3.mfG)v,w]Kj%t"i78 #i7Ʀvb2IEL(?9tFrlH|x\dORҝ4oؔ.boy_?̎Ҫ78ۃdzX%/uf%m%y턄`E-Vt6䴋nnKުs`Mw!}/xl3?sJP2ڣ~ 7IA2Io=`<vd-={|8b"S`+ 'Z[rE ED֬|-w\=R;\XKw=Xَ]vsЌEZ,©ǒ,xyg~uIYҁcq9 *[/o"<ڂ͸6ã6rZ3WGO%tz q$SҚ$Ҙn[u ˰#gMY*j(8TIMa ەIB+ZA<.[Usjr_ 49z_OPܧ(aIA7&K[ =GA!cS.K}[nyANxq#3aIZtIul [jM9"[ڢ-88Qd 1P#cR[t>W9^-+^{RGG>pXR]0E-H-6.JD*3{jwlWV"?s7{͐{ugJY*R!Ʀv{Ϳ4#8ޱC`<*vpǑ{:NJaO31|Ve1 | b#.HđZĂt~H[Bm$GAFp0Ooia)tw[ʽxM`Ie9I`r*uf#]v_9JqM…Ҳ(_A^jv'T'']RcSN}QD8sg:Cʩd=ڑQ+dFl@7(}xxjrG!-NIvG"3}^G+ K*QAp™Pѝ@cKNKBM<.eEG^=$LMJVzp K*53Dls}Α%]vC)Va j/.h'!nU&%#SQVN|GS+zmLJip__yM##DT -TI/$V8B(iVEjMn@`;18۫iT0[ܴ,ʧ\$H VԒ;18)AQAÁ%]-S+@V\Yd$;w6S1{R'?۝rY('<*K ,pg X쭥{E-9",2?cA@kH^P=%9b~Ӄ;iscSN~aT12[YQQ<Ύ}AQ7}O h婎3ؗ8PI3`<ǽ~=ռ2eewA(m?}`IUyH¢HtGbxM9"a vc׈* PV=AU=.bo~W== g`qX7Šp )SbĊ+NG/bCZؔ.`_~ͳkl 7^\4Hʭr0"Ԋ nqIV1䴋Dž\vX,' ;%#h,RB>E]!MxllԎ<?}gYLHΪSwmڎ[X3!=2.WPdaL7(\%v`U jW9 5*F)*w l[k_ )"6;l7B3)?#O{~=G^vRd*GZ<㱘8+_ZAy4Őy{A޲;N!b=8,/aM\F#A?șC[3"/) )Lg(ZUGbxFuG+oFe#pV&)(sLHi pooyI\[6ӣK9fѼ ) '"JVmy 6yd53d^l}mMV>8>pvl^Dn, r;r?XQ5)H &6 uؔ.bo.dT{IVj; S΢!GZa5ABN;&cnPr x GaZ#Q@Aq'M/텗8\M܌yC1bI$c+jcPw=.L 5\}fg +[q<%)(i٦(yPȫs&VTBqe-(LA% 56 yn8kǑ,"kŅ,={ճK~r?ZVT GKMcle/v' VaJr*x??A zyPENC/"b*B*wDarݪ/24#eQ#ČP&Jr #>XQ,cXEݠnalɇko}GjYEձ8KT0U#Y#!<??d<"lAɱicKNKngePA`wǡUCTEY>yaE|r!im鬎ELf\T7T?;'rN_!Ɔ\vUdڥA[ 8;*ێ$rςV@Kq[;н\k;46䴔3;}-lp!cl"5#9XQ3jAR-nP8ؒ.`_.ɳ.-;.gw $>EiW)X HNJ_g%wd{[Q@Z4Lŭ 9 jFôLwL1䴔=_O)|vBn/(LQKxn:8gDlFs[rZJmc`MvT<'Z'U7;Gt%jbyݧBUb4mANz]о} \#QY*Fb%wPkنƄwz*.lIWi3**8(rCv9 B`Sw.#vֲ\x]XHvQRRn2+j*'8^Qt3rEm)h.NF'i +/JAs!z)Hh+Bfp _ B7mK km\韹8\Dž%@WH߸8.mۄppU68ȵ)IzHé qQ7KXQ$9:Ģ:t_u 96崋x%eyܯ mA}R=: #>I곝pN@+.!\YWhK}LIaqoM -8ܛpy+ov[Ec2tYXR/8~Xo_ n.h>l;"hy冊W|ݞr/XR=j=l.پt)]vfn@Ŕ(KXy_&ũpw[<WKkle/v3V9}] v~eLg_ H%ZdSshX/ Y[>A*^9s(A\/$QZW}S7jЭ9kZns-GNKVn`>Ot_hl<_/K*:'؄ԝb!]f{w!W_ Z1/lICCȱo@ lP+:3 գE{E-9j)/oB 5#Tձ)nwewTL,dPҊ[dkli)/v)9)sJŽ:>+wLPxij/p /x86ChWӱӼtt;t OuX6?lpi.I׻ Č2dxpڢ Ǥw{)40bۥsl˃}-|Ҳ(9KnuCēغ{ddT 8٦AǷPq?EBJhtGSPZ= ,N.Kg|y쐰q.S&xnPx61䰋zx֣š8(^G?3pNŐ{'QaMU֧]šƦv{]9!%sJGK #{S$tÚlxDWC$-yW)"Yߗnm&yrl&+2+!] E6=D~ҩ ;ˎ Jގv {>vFdD'_I(+H' gc-VǔԭmG[JܧwyH{/gԌʒ N/)3jKjli)95v4Lp3>kF{D K -93V= ֞y.mwdvxPli;CJG7pJw "_u7|v\ O_b`$ )47 9̟htjrХ،t Wcli)ݜin9u1{vPa!+!XR@fdq =hMᘔVj{縆hD**ޕr.%]EV 9 &}g4*tg/`y04%eh'-4yF՞]؏!]M5 +V;I|k Q+FyJfq/]NXta<;aϾ kD@8RT/8 lR5DjM9"[+U1SD"qe[ \c7f% K*>V9%]Tˤ;nntD;j$9Z9 K*"!NVhcSN愁4>ɐM^iiz%}_+yϑmpw?=t(w%]]}ok0a jd}zU#]y ) L%U o E%TAͪc% >`ߪHN’t:gRad%ߵ-9-%;D8MXm}7DS=O̡[O^FOmC Z%|v?{YP}÷n΁&QV~ϮlPB(c+D;b۩n8^=uzrEm[}U~c;Ӄp';;nH"~`ccRJPU%6Tm* "8˿ ٦L-_XC^Q}V~Iw)x nwtd9n$.;Oꑫ ~.bEHAVݥFw2M9G<73?8^۶L^"C Q(OByE6bES(bnN Z,)yȄJ¦-oWP>hzbAY//lC1Pn3ǖ{ۭ8өbidϙE1*MP3#hJ.a)A֜zؓ :Q X=Vw ɐ3bJaM|16;.`_VU״u2#s'jxi/t7Da |IZ?M %Zkli j{Aqqp!wp`IM!%u>via] 9/Ya<ڑcJT;mYjŦ"Aj-9w *<laYw6|5Ĕ.#T -rEm7giܲ2ngU{tHJ;"ﲵG>M,gAuؒ.bo%! qyt+sd=GS'&ܗ G%r#Yhn3Tf9rZJm7('focM(6F50@ԌP/ZO3SP cXJipr_7gD0) .I^+2Om^tUAhc/v+ypnNuУk4[X-qP9و%Y!lvt[(nrL5;N9BM>KXR. lUf16崋nFQzKy[6^BAb߳X8U6bxNTCZ4&Vo`jq}V0{Oׇ ,ɞN3/.<<{ roSjE2Pfw5Sf/'!V8ލ"mԻᘔ6*ַ3K%I{<=~#91E="0\4A{L4?sV`0bp9I{Գue'ȱT/qpxAnPcKN~[Vn|F~ MA&=v@@_J#_1)yݶ*跓Z5@bm%q|Mgn0[U"M\h*mTVH8śN [P˩ı%]v#:]%NvpÇnPrKO=bNh˄Dѭgڢ4u $%5`cb+A;16䲋m|?.SxD TxEǑĄXRЍY HXG7(,Xgg(*yTSq2_),\.:'k9!lҝcSNԙU"18IW{G1PXx#XQY q y *Τ.ao&FtB@#ص CrBVq.,t|4&ֲnPzoK?r6W'D/¬ԤTЄFZB$(rtjr5&AjM,n9 jp ll- jc&ɷS K*J"t ؔ.boߞ}7iE=avVŖ=I12I‚n\M{XtuG7(\]ԙ0ciȗs򂳷i9AKbo۷!%WBn[U&NiktgbliW8V j#`&nEg[cL\r̄‚n̈́A2 ~tLבRbo5OV:P~xN 9^ ղk+X޺Â#Ԉ21jWvY.]vh| B7s /#N]VԈݼ8^D>^6>t{7cC.}hZ h6 /ى*6,O^tgaI8g%8Em#}y-G}ݜ.>牾oH(tsZ[3rX+F:8-wНƆ\vbx8 k>gx5FwemXU}V]’q- g WclaoCjl¯X7zWo HV#M#ԼgCQa{B'%fLC[s(̅n]Ƚ~c`IE'/~Jؔ.boz6Dpw69䳎%$/Itm-Jj[XoIgcC}iHccp@{(ԴRY>!e`IEOq2څ;GO-yTqgvVQp밼EֽKW#0v#_(p@,vU@n' cKNK~ _7xVKY?5k> K*i8 ,KSKt 8 y6-pm=YgeCvJ6ơ’jQIa6qt()ozCwE&Nn_7ޚ< }vŶM-K '#5Kn-NR;ܳ`58gI+SuaK*zǟsS4ؐ.`_>9D,P[:El'r&a-tyB K+,byݠΛiFTD&8'wa5u`ue)B.QQ.!9m!b\K=<[V)ŃSW8d[m"}0˟uHt}NhJ訾]GkkLHiMc+uiіΫ-}hŚ;Hwww_7A)  %#O"b"є Ʀv{] W`R_yG|Dc~B}_~#G 'jpli)gQb@ ӝ uҎަM.KjƦb N[+ BgLIa po ) ueTYZdrcB>E"agIw$-9-%?=k:G V40 i ܛ Wz,5 ݱˉ)]v341}?{۟Dܦ*g]XRCxi`#T('Qc3vuiKߗOyѓ3vWǺ?۸v|T@_}{ bE MVJ >Ʀvb"=.:a_~vH5CZc;pt5xi̢=%p(_ᰎ{[gJVW*sMro(,(Aǔs̛X%XC7),r}le)/vkΆ3V xGcz2\d]GȾQp:\UJ tjn.ݿoMUG>aNp G@mC5kt5+/9K*EIr6q6e[r%m?Ȼo?ЁL /Kҡб:J`vXG~:t~pnUU1AFW{K',I CwR,7cob;rޢ[>Y[]myqƅ%;NÙ7 Jw#]vCdX;]mڐd sMb|AAw6lTRboߧi.-feaFy03aYa7] 9J+jJEB;68稻JEcC.}JC_ @,"9y@ZSyXVRZm+gRޕdؖ A2ٱx/wKG q띂_7xXJ?dR:?p:J~QCEmGζL>'g{<0{UE'yuːKj_k1䴋n4Q(ʹmWϔKUz@8%5cI)hͱ瘔.[~*VddMg$%uf7wBbP,TZ-’wF 6׏~hΑY}1}+K?{:8J&܂&3J556 j/ƙW A|D)P9EyCN%0qַTؐ.`_VU 8d{sj%öHz#Ìߋz_+!]/=߻|Oo~EKZ????= zG+~|䠆oF\L$ J]@nG$ [qo.?7_?LyO=()6ݏg&??u{d_.ϛnރ+xLsh_1k_[Jp7RQݑҦq~~AF躓40/ y_qk;oauOp9,&qw=z{`k+Lz߂ 3Uk;ϗvN4"d7Aw@LEUmUw??_?~/3VqXٹ_Al endstream endobj 429 0 obj << /Alternate /DeviceRGB /N 3 /Length 2596 /Filter /FlateDecode >> stream xwTSϽ7PkhRH H.*1 J"6DTpDQ2(C"QDqpId߼y͛~kg}ֺLX Xňg` lpBF|،l *?Y"1P\8=W%Oɘ4M0J"Y2Vs,[|e92<se'9`2&ctI@o|N6(.sSdl-c(2-yH_/XZ.$&\SM07#1ؙYrfYym";8980m-m(]v^DW~ emi]P`/u}q|^R,g+\Kk)/C_|Rax8t1C^7nfzDp 柇u$/ED˦L L[B@ٹЖX!@~(* {d+} G͋љς}WL$cGD2QZ4 E@@A(q`1D `'u46ptc48.`R0) @Rt CXCP%CBH@Rf[(t CQhz#0 Zl`O828.p|O×X ?:0FBx$ !i@ڐH[EE1PL ⢖V6QP>U(j MFkt,:.FW8c1L&ӎ9ƌaX: rbl1 {{{;}#tp8_\8"Ey.,X%%Gщ1-9ҀKl.oo/O$&'=JvMޞxǥ{=Vs\x ‰N柜>ucKz=s/ol|ϝ?y ^d]ps~:;/;]7|WpQoH!ɻVsnYs}ҽ~4] =>=:`;cܱ'?e~!ańD#G&}'/?^xI֓?+\wx20;5\ӯ_etWf^Qs-mw3+?~O~ endstream endobj 432 0 obj << /Length 609 /Filter /FlateDecode >> stream xڝTێ0}WXHcǹ @bE`۲&)& $q<:KNΜ#؂ 2E*S6kX!X3^(f !ng'4@,b"u~5z4ϵvN?X)>Z$AeS2&'A\dQ#aB: EmC4ce=>>d#CWK6).$:A9 SIYQ +8<=ɏB ;+ς-It fBQƊLjB0'gtI!j@XcWvq95a.~GQ=%KȼR\Z`$Q@;ފǐ?kuNp׭J?rYPXv7 hVnvpЭ_> 7ITnnY8<(Tf9o/O3H#&e=ܭwqTʳ\3 ~ISQlJEq(\tX)Tݼ'vkmcװjwÍXp핶؛-,~{娝}cwKJB]mWԶ|(qJBfve endstream endobj 406 0 obj << /Type /XObject /Subtype /Form /FormType 1 /PTEX.FileName (/tmp/RtmpoWpZyv/Rbuildaf72a4a49dcea/kernlab/vignettes/kernlab-kfa.pdf) /PTEX.PageNumber 1 /PTEX.InfoDict 435 0 R /BBox [0 0 504 504] /Resources << /ProcSet [ /PDF /Text ] /Font << /F2 436 0 R>> /ExtGState << >>/ColorSpace << /sRGB 437 0 R >>>> /Length 5709 /Filter /FlateDecode >> stream x\ϯm߿[Ƌ(6AS@ ]Y/. #׀K3pUII3$3ׯ뇷o^?{iun\_߿߾~-Wwjsj?V/oW:_eږvefXSԗXڑsTb0XeL piؕ>W/ޛ?] qe?Z{u,>{MK*tjr߉] W67eۗ_{2ߘ?a7ߗq; <ۇ -āָkipױ\S+b))#Js[ܮuZ5nFBқ0vq/wbEn\MNB}jq-,S*|8i6l(粮qxw6셮\6(WLPanݖmO9&B7&g_ر;(\Mې/`~^h9u^ٗ{]E3Ek{=%q59ݧĮK/s ۔/b߼nzpsmh4tٷ(ה5B7&kAղM9zIv{l[˂1|T̴˦)XRWJ' A)mobx{[kF]]Ŵ 6r{{bIA7&ǔnc>ui[rz o+{mR׍wGOQÞ)P+*t+&:v-u N9"8f5ip, '6<}L[lOcž=;K5[)8q,=unؖ[VCl+E$Ydr鈫!v6J. G%2r &3|:&X?"(1{b[ e>S7( lKN}͉oB]1YıpF[=uc}<0Ws.gX #uհ-9=%2B09#"߿[R- Δ]SnOzK,)[ ΥTXk#mKNO~kxAP8p= G ^#;2 ONUap;lKNO~^Ot+8g" YW E7R誫9HݠtL!_Vq/dz<9R'ir7mW;p6  ; irElţss___咗ibAܟbnKw}٦\;8kmno_M,,A., z΂_!_>ֽ+; bq_sk\{Y;<ЂE*ւTYEXJw.erElbnk "eٝEӭ {6%fIInt\& ;(>BN~+xWYXr2[@ߘ{bIea),ka ۔/`~^<^E%^Q' K (N*K+U$=+DUQ_wEEE piؔW|U]ET`]kc׼^ǚث- wR&Za\6wgz$)n35=zm{|Ċ]XRA#鏺KzmGXֽNѨU6zgkpPJ9ϚXQޭzN6ɔA%_Vy/oaqJ+-"0|%G VTaqJE ò BkAxL6#5Ċ(RHҝQ!_>Vru^{{jݣ=TruI# "R0.ARؤW|J\m#q |g[Ȫ65|Ml֏h IwdrV r m!==sB" n?1 (.+)|:{5tNd`X*0Xd>Yt8'D'20,ÀN )n׆gE<9,. E}8! ԩ{A`_}a2kv1b]]r5{`#bh88ʩ5))!ʖm#F0CA0Mʬ>_oI7HY!FnbKI)!޸t87ȸ|@z{/ہ/,)n@V}Ps6/5%JI)!enWwnpcwsm3[o4lTݷmw=>NW};h[]ۢYwx^9쿶F/>X| 3K*7JʼnΉZq6 ؇ߙrOٮV&)I]qɫ l0~6qA+’u6hrh ۱)N=7irEl[!BkErM[}X5Zb}|L,V=bVm)^~ ! d=Rzy/IT398(l{iroM5^g{r๗r"P~n{!XCgAG3ߋ<%uF[wə3S7*9ɑz<,tnDӐ'4t}[nk8',-X ntgX)_>s8-%N]|qf'.]]?\ܮ$1']|bŧŧm8n}3Cy#zQ֣ g92%^(YL(mSN1}Qy%שYBlq2uu*ڦ~[x=5>v{aX<9}ͷ=cz_jZ,lwKlSw]6ڦ|q ? 3{\ E: .Q-WʯOXPDpAjؖ[Οh}콊{^='{qט{{N{Du,zҝWٖ[{G|3}mV"z}8Z,0) H d4],3G~{{hi#z@/x^/{V|~"z}>G{tlmpreJA 7@zȟO<^F \ׯyr捸m^@\@PoׯZ*g½o3oo?~~_u|{ Nqơ\_aYOR:d<.__noo׿~}wІTQs& endstream endobj 439 0 obj << /Alternate /DeviceRGB /N 3 /Length 2596 /Filter /FlateDecode >> stream xwTSϽ7PkhRH H.*1 J"6DTpDQ2(C"QDqpId߼y͛~kg}ֺLX Xňg` lpBF|،l *?Y"1P\8=W%Oɘ4M0J"Y2Vs,[|e92<se'9`2&ctI@o|N6(.sSdl-c(2-yH_/XZ.$&\SM07#1ؙYrfYym";8980m-m(]v^DW~ emi]P`/u}q|^R,g+\Kk)/C_|Rax8t1C^7nfzDp 柇u$/ED˦L L[B@ٹЖX!@~(* {d+} G͋љς}WL$cGD2QZ4 E@@A(q`1D `'u46ptc48.`R0) @Rt CXCP%CBH@Rf[(t CQhz#0 Zl`O828.p|O×X ?:0FBx$ !i@ڐH[EE1PL ⢖V6QP>U(j MFkt,:.FW8c1L&ӎ9ƌaX: rbl1 {{{;}#tp8_\8"Ey.,X%%Gщ1-9ҀKl.oo/O$&'=JvMޞxǥ{=Vs\x ‰N柜>ucKz=s/ol|ϝ?y ^d]ps~:;/;]7|WpQoH!ɻVsnYs}ҽ~4] =>=:`;cܱ'?e~!ańD#G&}'/?^xI֓?+\wx20;5\ӯ_etWf^Qs-mw3+?~O~ endstream endobj 450 0 obj << /Length 4446 /Filter /FlateDecode >> stream xڽ[[sƱ~ׯ`ި:!*gvX"!Y ʫ݇e*?HuOٗWws/vm-ݬjˢGMJ=Y;3voxRb)tqǶ-7vŽZ 8|٭߻n/D((*`D\b.iPBRa{--sLVpǿ"k]OwV0r[f?,v"pRfĬ732K~<+ڄsCKx_ $LQERc%J~5I;%5eRsv'5ER^UjfB~^jf2EYuST@B4-vG55C,ZRW*~ur{n ^;O]VJi. 5BƇivʶD'.xl`-zk>)+*4V&5,\vʠcfh­C)~Q]d,~( R+m Uj7q 5:Lm&;@ gOlqp({=!.b x =p㞉⧥l||DX˦.jȰV6A[O8,4R CSm,치h /ܡGZMGw6YXIO 8`Z!s^so=ʲۨNDi&w(Dammf0zƫ ! o.&yh@l y Xj^ KIJr\_.sIzdK$JnU#]qq!͔y鼠vʼ9l5G 6`e~kAg3&g$[dh\\\l}Pu*(U.kgu5Xf`m5` U E3cGhGY×WA@?20$qS#R_Q2*٥SZDPݍvza}q!WF6֪bEQ6sAa\aڙkoq7%p<ƑN"|^;gu)SaŖlC 󞟗 ͮ3N>nɡ,'6`2q@ַQ:|߸)BͶH`Y.>:Ps%#g_3"녩M$L$P*(wˈЅn{=8N41[%W>Yp{># DTIAI0 7$ptLf٦{ٸ\P8&X\'rd7X߃;|`!:\p!MJ9,KmdFUt&pS )MI2fͦ;E;%qaܘ4`̬:R#VrgL]^Gp.-I?u¡;OHUっ0Gg$&F bJvλh|nt^3tC>GnZb}{ ?4G0La6tS㰵5;~C휂})u=nTI+x@ ?BbnPOn/A;˕~/N4 TcZɤ ~;r1uT`lqnVI5DJ!IgtS]EKr@6HiТi/Z4G"p m)͌^ug]T*Q=ާkKhǴoç{ȋbv`ߌZ\xrD'N̿A:3gΩb+Y7SY6=T?Z/z`JB;ezϥ9&9  j J38<' 5Rp͉XB(rDZ#K~Ü|A 8҂I>ɭ  K65~'04'-*3(b `_0`4ƍB[5`BC=*;mBT q-Xt;q +"K sA%Ef[Q1!٢A-S XE?yP$˻2V*$x@{}w_#{]LKAcv[ 6p& mO:lU))Qj.Ӟh`(c}P?.XAa9 spSh#v"uCARd!8V(C0&9`4E=? p˸p}ܾ)*-hKAIBl*p]* @5u>IO.Y3P,>|w M2FCYa/'לjyI\T+O źl=lQXO=e\Zu w3!w&_OvTjOpSc[PWm[_T;u>`Ն`%,4Qu6 imR]&jNu :I5H[~r*}Y <ʢ,O/hK}QX`% ]%i7ٰ+fE:[P'|{b'QnyB*UcyQ&,:\EoUeen- @Pou͑<]ư $jH/mFM%(̨[L G#G\|=/MDf ^ 4 *8gBcZOd)8̵KȰJ\L\ٮάlq$[H]rG\(sȵQ~_,$Fڎ V^QoȆR&0V/Y$KFdhd!Õ X/ع*eΩV8$-vgSōHP9ռF1Nǃ ;cz;9+mѪ w\\rV!R:Zr$uNnPq=W\B~_WxdMQCV9t騝6DU^Q DZ{CqєM-SZy4HlLEˤslz7V0TXtݳW>p>b+>ػM΂Blo=Ŭ5JJHxv:%>q::J}=|߄ wiVs膋ӄ:eԸ.^O% ͒N(֎=㢚㽥6E;;iE˼MZQ5wN{ގDl#=`jYW>Ӷ {n;7ǔ;6ۯ1MpmPX>c֯r*ْ-&^.~~s_: endstream endobj 473 0 obj << /Length 3961 /Filter /FlateDecode >> stream x[[s6~ϯR3CHˎ&mN3m١%Vb]*Jqt} $(Q8;Wj }:q\ Y\; ~t2. ұLA4:VsK(cZ?pdL4v*0c32qsO(CCEUKu5RG0ä)r RgH cRilRxlgp#yCx8JS/1ۦpżE;)gcg`U<y v w-N}t=ƳL8银NܘWt0]iw5:CXJ\[Eeq_KH*q ~]p.o(̐@uGx$6H]@B(8qYG[[UIU[(ML驸V뵼 W.As{ͶӸZdZxV}L{$Yf4E3-HPe]Mv"hc:DɃ{ p+bxإSx'܎Y33}lp~|B"q\Un vv݂M}@X8^6~T[!%dbo!sqq /BWӽXv*z_"7j,SE0dqZ=^iTcΖL]K9˜aR@`IWR&d i!yV_FvYr*zdy]0듚-ڰĵ3Q9aT+1ޣ@=mW˔5/sC *]yOCkU@Խm13dYn҃$sT #|zkK5h1gr]@bPAv¡rTCNn~a ڎN%|_0Qf'FkQ*J8 E;̬h31р޿7wո17཮440JZ'BqKP@rm= zPOp,S68^oTsa&BQ]$8RHNh? W-(<ģc圑EYJnvM;-.XyWS y $ÔR9;‚#i$ϽDf3H]t<)-U}NɨشQe`!@]S>H8˕{xMW;GG`@@Az\"<7< jVang |*xnJnEmeI$F*w;$ͬZlS0+Y|@L Ƌ0=!0[%od<-ö tg%_Jd6 7>Ǜ;4&%,-g*09ȣ[ f\r39+xP`L3֡ _  ɖyżsMm0i#IS|-TJH/*V ]b2o)AsJ0Xf he<1tIo+&I:Zi Zg6h!_$O$HEި.~Td>W|:vFɛ٭yZ7h9JS0 K/\SGt 5)3;ACČ}'! q7:+cL9s~9~=Qգ9yԟq2<#Ew.H]r}Ew\Lxxk?W=nh`(!#B mX(㌜%LyPnz\c|5)1mK,uO7[Z|d?4? ~_UqTG=]0g3|>ϛ:V+u)$*}%f I1+︮dRr+ŃSquj 閔%t87:, )V#s^<&itR &.+oAzbQ %u^$zsvA {#J[)4/<9[=?ij/D}4/.f~Lh/r*2oBʼn5wiƙr]-lꝒ2^Ǹ M'o}gBAg5dF4_.X* ł}@L6&]lsd bnuod3*:z'O μۂhl 1qQe92 )ߞė> 5F{)gl%Y'3I5e} .Y&]/PUro`Gu҈­YcPGҙ Lnb6Krn&P~Q[|qK@ݮ,ź-=lkc?cLi嬣ˮzwib[tsu0`St)_61M{A;Yq갔ԓ{|G+l8:]A@멷Y z^ۮe8, vuCdcS4K=I;B}1 fMR\A9Y}Hk&6Xa&tRUNǩv/E#w.NVrGE. $0hD5Aǂ8!D]UmR 'ރ| 0F63 ߇v)'Mm5d'k5 T Ƶ_(%K`4:=5ajUTaXA=eX, @t2<퉻7^t D/fDFy-ШbFRx3d j߅RyUP;>z>o+\kWTxtK me\b>r)|{iKB>n#`߆?Vs{낣4(9 }4{ů8y| {t(0dnn+w4.qDg^TI{Y6\G+6`*uLIwQSi;=B|JLq`)CbvG}jr 3M牼d piNoqV}gLڋAiH%A'|1k|PDK躇+2EFs?a85zSYPׂsq_hvزz3 ~ɐ/\Pw2_#Tk;䂤6('!LT&dg.INLxg"H"P?ES|>A7-79t+LnGU]pѾR7CE?/%|,~]3UEWj#ًpzYq#z4W;ETV7ۧI8\PMC÷_>vV o\E+r9(,Psd7\fCO_dN7raqDx;䪴χ R$x&;Ƈ|J eL^267Ge['s\V !̥A~_@i $oSRhTHYV!~NBD=kK߈/S߅Lۙ/€{ט DE)B̻ |O9Rm.\<"(8`<{` Y7-* ,D팾o4wI!`<'@O598s䶗!4ΎNg:-_VLUzo?aM endstream endobj 498 0 obj << /Length 4092 /Filter /FlateDecode >> stream x[[o8~abE$E] sINd, 5V7嵜i}ύȎSbdQ$ERspgiY㝋A &Afxۡ'x ?e5c+|.'ط]/ϡ8+2!VS) yq%D8ϋP>{EL @U_E׊qH'[n Rb0&Ltq]4@zW@\y/3yg"cٌ=HWb EGw:f1CslRچMJ17V1lH$ ^ejb/fSYR*1,~ŷUQƉ7,pMdݲg#yaD՛}^!R>|5$V/]p)1 WZSoE!%XB%@#);Q@L/SNSҝ/O 5Azh`ZG m<'nz~uMv4Jz%RΞ!*1`DIlՑCڇ 1kقؑ0$ rx&%a2yJ om.(#4(ptJeYHN I\|S(8JLI& Y6^r*2q<8 [L6[X+'TS-*fߘ$65!?qڒ9X@G)ԃ/Lxox? Lw{TRIg(R݌@C0N1+.P@0Xy f^N)VMAN6;ChF\@\bA\"R('4Y1!( Ƶ$i sL(~ 7>25LMbNNwY̌ZI`#/wff8OQmf&;0X~Q|H{mXbSZMCjGÞ}|zڰŜ<m0y6LPp,Y6NQX 2l*э"R1;-2dGD vaJ]sg_C(m{/L&P1wgbvWw*(mP2yk3F!wl?2KOD&4 %e>8)@p˂~jsRpSa ՜ެGvx&E"ꞈP{B7|GU (H|dLy~΂&_'/?z H4D_r-u_%qTG(к ☠n[*<hu>cy2ofG9KTlD#lDCi@$b4TO}elW^bU Wy>i}/әc9Jj" r'Ir76@:9[~~O&6nȘO[SD&uu"8Hu- (I{ď c%%¥+``f]ЗI/9b㩸n-8\`vpx XP;#0Z&!C M͏rq#b7IOJkWi+wE3P$F]z:ݩN7}.tT)tJak+W&Wro#7($e>{691mVhCAw~7R|ߋxRkc+~d*[(ǡYԚ#i7}14 OxHv|]`1㐸>bq,Fq/ޛb|N7FQZ6~^c-lܔ<5@ow((4t[Z 3_SFatY[e _. էxp[[qX<&:d.02M=Ȧ%uzo= A/(ŌE:H˨jV(6lf=Яf1eDy 9BN_e95xzWKQFOyM/kW܍,_ߋՖj(Ug^Vqb-{|/(Ed?t){ _$m=NBpnJNb(XWcq6aEl,j+xB1"9#& ëҫ'aŕ=V\Y?֫ObŎܹ /Qi=1&\; ' }wKG#5^0`㸓KdYSңpdN"!SǢ%#n+gP:VJH 3]`_7 /"{ @4'I6pFy~"9 |Ԟ9aQhS)gy=3XȆ U4yO9$B=2*t _$iu( N4'U):& KL}*q! 2 /x,;}1Kݞd! a<~ˬR;ϒhؼ(zE 4:Rm}r!Pl^aQ6FpqUp|p6쟰six `yRNN&lωru)FqŢ qӆK*ul;SE=SrBqf`\ZsRyhM6J}Q[j@W?x~Qi;A13]y 2Ƕ}>TC6dN]|U-rq!)5]5q(xI-ꋽ9^4JG@?sCBh)9 _4nYVUk,az k:ݢˋ)NklK]5QSWAEqh;XBU0r$SPnrIgV~P5@T-Y!/BwoOOؘ}o}% > stream xZ[oF~ׯ"F_4M& t4(SJRv҇~|inl-̙seMDmbO$"|"J&*\%6H\}C]ueh""Q'FDJA7 ]"ƒ[I37nt"$ cGk;(iF%JkitLxG6Qf,#׀NP`ьMO *zu {{ĈGN 8:4d==p@!bUt*0hI: 1d Y@$Nc +5ng %e,d 9M7Ώ$+ܐj#IM:$hEOCB KҾFFþFJFd f,cz$a=F fMfTB$$ OR ITʏ,l+<8rSOI~[I^Є)8y0 H|~]QW.O<r"`zgϋē&Y4fn?d =5L13UC6Gp>!MNo65YY:UY=)>'z߿O}ye/~GG9E}ӢV2? 8Irf.YdQ]QNr>MAW,"mrV$? Xɼ:NL*?YJ뵳4˯9^@U2S%`|Ǯy5C} <[xJiYod}sts4w3w_%A0M,O 6r+sn Tݫ,B⪆f._8׹b?)n&ys"^G16xIC /' -Ĕ`X&Vk&~/Ä_?NVt9;͛LwOm zR^aׯ'K6boT ΰ@(۽c.|v]qq{|}0җ|VlPCCF)Om C%S(5rEĕU2ްv@06&֖Y) ߊBѭR\C׫녿"pZX k3{iMĂYjW alq{>. i tond?A'Y,b}>:LE PÕlF|>6iG}\u~n2 H<@NJvjhF%pְ)= \ՙ ~a>?Ƙd 8C0Ai*7d'!~>xLt.,kUݜ9uYT,tgmFu,ZKD!);.XN1%` iZH6kd N5r IMZ$xY^2Qxy!u 0z1KgyeWP$smQXc[̷r0sePC6TZ|l#K̅Uk؝M7W6~wO_ivRLW1[h,Jw$gZ/glO{m L@巺yx,`~(ΐ8)Jlr>\*``V#FX|P7Sv;kjj9޹Z ;]%C +thZd%ZxXDnu x0v5R(&h5X0z5RCYVP+*^j AVʍiQLY@OJx.#V5{nZ`@҉pi 6ĥn~p4}]\<:|sCw>.~ Kq>r6E=F3'p ] Zfa$umd=ܣjrMgǔԹ줛ĞRZՍՁ3% }Es3C$<I(xVfcdF 1&s¯=kxPJ->X@_Ygxg/J$!vU2C%`=DEUE~&y{7^4P}(ss9S+UYg ea1I,ڮhq~q(jx `ݹʵB4390}^y&ܐQuGyb5hd'lZdMG]/4e9Kٛomk^'vc ~ ] 0:XͧN+Zzp]x>>Fgفyo2Y!5gsׄulRn,pҢ:FyIJsVi3g:ߓg<;.vȧ~3'o\O_M{1y'; ~ۑvl/ʒyZپ=$q¥U?ύT:/ڌ L nk9!FR[zzY(0W˯2~F$v\-Ra5Zx vI'䃮 endstream endobj 515 0 obj << /Length 1668 /Filter /FlateDecode >> stream xڵXmo6_a ԌHom͒6u: 8AVnyhx${3ĝ{/{&*OM\t򸓹LuΟ1ݿGi(mrPA2]㣢^Sx%{ͫX$wA`݋#Z.\Lpl&c/=߄C*g+ ⾗k2wB?d4Vx?חāHz ΀06杲Вa?)e g` R%rSi g1Y`.yKe6("ijs+nUlZEG. 3/Ck>3N$9qW^Wɝ05ɤU[ V vY{gM2ggr.=e6P:UqV÷HV+gc\XqAqLG ʷƽxпe GuTdbd{ؓӷ'PK.>*9l1 ֧ 'u?\yhtz< wzQHk;%h&&E=2.ʢjq +LVE&Ăk9Qpp+TBRΚA]l=BP2ffM%켨ڭR(N C&LhcPp;76 L w΍Йp!;H85t˾9[K,0xBrwF# N62?Ft*Kٰ!svX48&be EQM%5K6g5?'l!8xJT7s݋BH_5 uPA$H2i=*`лeMZq{ ibgW^Y endstream endobj 547 0 obj << /Length1 2172 /Length2 15435 /Length3 0 /Length 16724 /Filter /FlateDecode >> stream xڍP\۶  qwwwwwwwww$Cpw{{7sPAV(fgLHbd000100X8[#!U::Yr@h!1tHXl\\ &1sZdRv@'Ra;{G 3s400rr:Zd ́6 v@g͍ٞƉьflP:]& m0![8#W3uv3t>@[[##9@YR oXz`coxogCcc;{C[ [35 /&CL05 bdhaDdaE|tYDhW}"@㏶{sVvn^&0qWppJCGft20019_U<+0񲷳~X?~` ]gG׿02L,F@3 [??@;Z>fOebgk󥗑ӒuBBv/Z6&-+#=ǃ`h2+ikjڏ6OŮ,%ǒZ ϐ02|1]M_Q߆$bmo?Cvk`MՁ,I:~h$f4Qp66gVc@;'.-#},1{Sۙ`LlCGGC#@/ƏM4=z:[;9#_ Kb Az"vFGL?#Sq0 JG?@8?A|E_! >8C'9}i_QR۹8_ʟp,37qX|,?k/_q+j1etO1vKAW ?ޅEӟ!뿨~;}\z>RAlWw?8გ˿G7\?8?#ǿY?d?"yI6 1~@cE;c`˺A7ڽI9=tJZN')߳SFwE)nV ^[>%)?{'(,OM AҪ~uV k"sp@P(@}pw6;4˷Yhy|L"HgZ<(* wۻ9ܩwjb/L *LN=X$XZx`(3d^BRK^e%[K9q# jXD 崼2ݨ$=nƙjSr2rz(2qӤj2l\ Wb /=wVa_>Ii|PZOd@we(HC< URLm.]L^'zsсA1Au 7 _–v;fs&3 (kքs}rKVۋ>$v7Ҟ1,ZlqDcfBy$5vUv.sݔEh{OP)!=dLb+j2s͚5Lds 9 l:`ύIkX.F7lHtiEVzyò.َtdC~.gӂ6,_˥W]*F|,m@&#JNjL/6Yu=]KAS}-ĉfbu O+Z|!~V/`y+f:3 qfu&Y4Xhs,7Dp ~N=Gb*wG{=2ֆٛx8e%eG8J >CO vIPr 1qj^ ۪6Ɇfs\?3 5 ;Fݴ^}z %".a@۱16lBbT3ͭ+~l\! mj|k $Y THum lƸj;@ϧJ=bE1yG˰y았@z- +/qji,t>1F:(f{}rN5RHvc=G[d)(m+/FxnQ 9ݫՑհW7Vd3/DJ~Ԏn>:_iCcGQ~jp(YKD*hTec!i2L6n?|dO3b4L%[U"4&f^K5D|9~C"M١Ru3$ϞWLsӺ3 OzgtXK $'Zs}*EeC%3 .|XQ˃=A,4^(ldž9eucm@i"O=kFPn{ sRc4m/ rrSQh!D՝qb]$-uv5iڠ 9jl|47TެOtP`Te )F$V8u^ePHLl7fF~}*w)ŰB2Rȓ *(k~5_.Uq_n!:!B5{9НmPl?ǣ_j3ݭ䮀e 7MΖ؆A! +zF̝vpOxZJ;mDei-__A 6TpB뜶ߩ%Y=aYAZoj2ven 0|Mq&7Ԏ14 cttYY!ʼn%r-xC8} gLNȔo03ܔqكc< j0ȵr?YPz;&>Jw%jqG9[ax@;Л)WWEU7V->uB'$v:fSNzJyb:9Nu0ؽf"FDp;JnR;tF߫@ 1\ٍdnq_pod2ߙ'5 I$Z`U̺K1sSֺyAJvP^V9'XO]"el-]k|02DbUqi#mҋ[ b"srȸjk~drH)ˆɽ<"fTD)EscR$3/K+Z^lz'qƺ̂Ovˊit n◟*İc{ݦѓ򌠵jd$frVZ%sk-KTъmT쏏D Dq=4QCpţ<\\gqќ*<WȯkVה`0'F6.+7$u3Wg e1&D,m~?t6 ݏPPez*/B;I ||> S#\pmI>pn+C:bʙƼQ_AKB~!*<U:L)Z)Xb'F~,+NZ$L9+6ٺT ^:*a?ϔ|fMnz0*~ҾڧO%аA4h?+it襾2vY/4rSA}K'lDкW.-sP@5oot6,L:kpݣ9Z{N>0B9R% kOs +"3E'ԫLx9VX>@d{UmUk#F.VstXH}Tk-bDVo'mj[sE EP=?DHp Aw/6c=ݥnWgSrB$+z_7Lm`4pr>dLx?5 LOmrby碖}AcȄ9X+O)մĮ $B`xL3X7MCN`{ S{3uBwa2 1-AiGL?UZ9)-|:5<I{*KnU6; Q;YfXiq|ǑTHk|r[ Y$`)H=b[o)N+!/2I!B|'hbhWWﵕᬋ* 3e(<CxJ`.no ~x)s3AfGvG\|ԝbQVeOQ™XK}[7$CU'0AχGgo2Eүt![^6Δl\st8D_ y 5E,-H&7^.u?5Yd8f.7v㨕h`gfΎ/u I63^0+W`jjh8f(kȶntEYsEOFאR?1Fyz=#d_ &뀼 j-~Q(~&7 2}{ N<:V7Vs3Ιrxw< /#›iր6`xۤkvoXPss5טs}tr^m?N|h%+_.&}\zM@\G9nUR ˂" \n"Lb3pY4g\g$&5=vUW$ }Rc;fg7 %L_YTb9傗YptDBO+w'vlKեaѻEӨV S4:Ta^"z-/!hD )?6/-K4ٍJHaCAkA}# P+?hQ6V`nE]qYp+Mq/W4ZLⴁ-l׷2\cP>wѶͨaܮzP_Hϗ21pzvfhd5q]͕^+1>еߙ|k_ *yCwŧ^l-dn`< NWw5,ߛWE𓙜$\@k߯gSKAa3h&3j=1Cì?S:~s-3zfpi'Mc)y"Q/z5IHV"$b^Pyl<~ !>R Y FNbr18&> ,)43J:K,NzBJ&x`ҶJYCʼn{yxѧUT6t.h^pIZ6? 8'dp~{hE{:JI.V^8n|}ho-}x#r0N()lAŗR+rm~r#QXmV,: Qlzgz]A&oj#*n¦ϙD[ב$QiSiL_ AYC iw4vyG($#&65hM3a})@(=v &Fwcո7)qv-Qՠxjz-;̻|+,D@49p8 ~I0<5Zlg]L֝>NXcfo0(g{hiz]?T,wn!r(m&fʏ{/SWIVAz3Pj0-L>P,zOPXM~o4)n|!wSп]T` ȖL[. ⴯&g}CmȠ!#Q80׶4!Fdz ^]v$fhR &ʰ3w/w]Pҷ>s]c|qb}[m=8ᅳ2>h S~21D-xL]i#838ḧ́)- BZA8UނU TbhS5(*3e?yv2-Ks`v=RdiB6ohÅ{!Fi+пN^C%ztW LWPÕDR- OwUwr(͔tuU3R\ bV25c~s9"ˈ6I.pvC@'5 qG:HmΫTK~/]^RRd@A%V݌_-(UaHNزIu0}R 8UƉˏAq#sҔIs!EFDquh{<aUb Ñ^[/[}-ddjVB8n]]ZۙO'nw(r_ѡ8Z"rzk:fro1WknI^çD\ i:`z[KMtUnD~T9|ꌛaW7#c|D[kOΦ盼dȈ ͛;n'Kf:RqTݝ-dk i t:CvCnZ)ZKDoY6O|rä\ͅ&oY`J !dXqQT)޼3dw]ǷݴmԏBAׯEk.OrIp=}=*/'+5$,=l+kM\,z8?+uƝ Qϊ\[bEͭW]`E?Dqt ZFY=f!bIաLۚa>mZ=^ 1>E8$}r+ye@F}}aZ=8Ɓc˺1o/pbiʽƍe, {.G[Fq.m34.AxH*gXQK@^z! ?ڀ?HIO#@c:*.rdeA/{_ ,n=h2?~~Na"#Ԟ}BRum@n.v\2Zw3/Ѣ*Nܮ@!Si2CYdEFpum{ B־2,h"yjCaZ$D9=T/3*G*:W׺sX݆o+=YD}6 *(, Zw i#ITOn_Nwcv]e^'NbT(|zyWп$^Xp~1@2H_ZPA`]3 K pĸ/]WG;"xyk4g 7WP?Cli̽P-zJ_{/LtR3{4 =IH}ڭtgNuML(q#f!,islH!zd`ng^䄋G@pKFQn ~*)ANjy˂ ,_ްdqj 5+&r[?Rū510])ϨE`WzqbV8Su^]ݍ8~39"PtRC0%s+?7^(C\z'dYB'8=#uEV"*t8-Xbs)-c*jJjsǘXHBYHIik 'Wwr =AҠG( MO:?t:Ju'% o2łcb8wL~?t8S iV+PRQ73!X EId_b :6zdPΈ͠SR]gpkٕ\KjNIpxcǙjX#A3<Xv$|B8oF15Ɲ>Q.aH5X qP~7mٻ&蔵a]J k E<BNv˟KשZ DET6fv๵n< J  n16ie?|bU[!F+(:~߫t>tj(j]ސZDZĖ H-?ȯ]Vl32\OD|S CQy^~-AЋ{jt~'<>&سڳ8T_Aufֽjs[g8&eP jSW-kڽ')UTX_jP]=R1>W38So@x8#Bח|Myô=%LG;H>da$\]Ƭ֖;T$Kʶ̅}DnE?{᝶Ev;)g'т1W}ο$_ڱK`;J\ȡ^e[s,]n졚,<ӂX% UWV:ł}i:xEHa/Q\1mX223>mmC-uB )kou@|$d|n֠=)mfEX͜5r=ô4cwPS_q +\πkYEIj=i%#R Q)Wi1=O2M?yFτ3|<@49?Jí ^(bM:FYMZ s4Ug rd ܚ\&;vH\UT RZOA Ⱜ'w# `e 7X2Ɗg N~ t/bS;dмmPc)a R[$>:K={.5~FKXo,7ןϫ.^;'TQ]Ɍm՚YxDGD.T6|.!Vz"j% )}7Mi#)QJBw/28xW %_Q9\EC/\Mϻ:<4Ff8vZ +oI[#?=ps=&r+{{b әďGKnF{ŏFn&7oESG1 avhx]Ih+B;ա!72LK~&Z43CߡFJQ pPE W}Ni*=w.%UX|@luMmґxh2 fKRE5VwΐF9V},ĹPk>owLlO:$ k1>l? %`ДzqvV1Eu' 2" vi1o_\ N:N/6ȊWq^"pԡ]d=k1ra F$"*  6ЄZߤ_ u[\~q #KskgZ>t (o!} v\&UD>闐akݰ)R֌yXF)I os162b9,'ӏ H{- G> ǙZ2u~*$,$q'}_>Z$lO] ~"|,vkEf֦-9rslF-_t`UoVH3}F{?@s Wi :m˞ {gjӋniN*.[FC8K Рr(i@lP'`T*|.E;Q1} 4Bu@{Ag f7D̯'{+5ѝW)*vbպG} *HPGh+ŒmĴ܆\>ZsTwDt@Rbq jX(;@z߬pNh1rI+'C* ŘJ׺#>5I#V:%7Lexp6;S T9Kk}q(sȄh-"L  B =d/ggJ^nÒ^y{ص`AXݜl᜽oe]VG˖+҈l슊g^zzAw#zW m$4>?h)2GlͦK㬕6{5F-XTGEyKbQGp $1"RĪdLHwiu$tk-Y%[U`hR GzRUӜra5 ۇpXPXeG:Oprh}M'܄ Um?`f(}jxC7sܰ뮱$=F)ꎷ-S UZm.1+ 2?B;n{ ={@7McY#}a@3H_KWEt9k+5 ătfuBULGiw4e\jy=ҿm rV^qD7NI7HTf;1:[}vۤ+DۉFj^Y;AW~Ir!\w?\# n`AXpTNtw\\pzr}swNc@HDpZUFCZ#rҡK BuLbiBHD۠(@y370ɀNEw%g[>>2N3t!ufrs}]Wn@u_kvN@˔~@I:huj}r.j O笠B^b"ksT;pLY2OMeZ<jZ,%}o6;1ԺS|[mY_: әJij޶F'QW#G&ufȞ޲0]R,^2D+\&>K?#3El[bt b>O~S :xe h+Qf-DT? 5)JCDaE\{o Z>6}5KaI+Iؗ$؅.ֹ R.A!v ^{_QDI]VMma87"Wh|1YE_g-5@Y̎@Q@tj, (J̏ahQ];PhbhoX2'GR PCB C}z4^w4!C&sEv]8kdF"O 2|x_=Y$;IL6+n\uv [yB}E7פߴr&|o:&`1"wxHk)=Ljd}dS~oGHDlA~x眑'T2G<vW]XOBE7C0HY--BoR:UZka*ՔޕI8Q' "PI5 ϕ*k|>/RU9_3&qaM5f`vpwd4(P!f TZ _ f/_^mr޼X$.Qa*E׺$YU洡-/K[ ~~=c Y],eX&st 8|?ȦT qU=ctt n(Y'M6r1LCü\Y#>1O2!jSg+gN(ʂy%=*G$sD<:В:ʳ *րFo> stream xڍT. [bA;$@Ehwť?s=_뽕|3{fovԵX%APs!VpppqppiC`vt.0@ؓM{"BJvNn''C_D@TJP 4be {_K#SPO8@ :T0kӎ@;yW kQݝ hucdC`M n@C j s:O;)v<RTv;EVplNwD?@ #`؁T`0s>݀;O@X8Ca.l.=Nt̲ i=>3=\[%d #(7Ʉ rpp N5 =O=z;BOm}!4osvz''V?wx 9 Ia ??W̮d"/wvJIA=ެ|V.^''iyԁXEK(@r_%߹ԠO/w"9W;?~?b7I)P>͂R>Ei$}9YX%:V@~?-VN=M?.P)za3钟/iA`?*9@aO!|Pg7/`Wm5`=yxO/ۿOlOt ? ',\F:` )pMEPU$;ݦ^#w,//$qe%(꫐C4o}L?jm6͌ޓ"G%cՖw}kE.UK= ʽSޣ`~ԦV)2]8Hy?2S'`(Lx?/&G)>26XኺZ,ri%%1 &v78F-Ih[ntXt\Pg줲. :sP>|twv#jW(t-JhAiȵbA(ZuZ|b8?_kN㻻Fb/ɫ &X-̩~1>+*۰1-MO.hICH}\j;>%ƺϰ0pǴY0t}WĢE$uP*Ы1I`B7qߧ"ǚOm>(89kF(|@7gKv7"vC0M#T5[&>لX=?œRmw;b"GO\SGis^HxjV2|dRI}?ZLs>l%MG4PWW8,YRV)6EteMo/?@w EEt fj My6;%YX)JD7VQr* tLwaP;^t99L)`- .l.}RCfB1 oD R:7t/Dx6hJo,d,?y M|eks|6Hp%;]a #' ;KlddT .Cʝ .W#MÀ*O6ށ r|w~ O|;~MƼԮBm\Y:[Wncmo$FfABXk|v$퍛OX{iVG6.R5>HݪJZ9Ӫĕ\ܠc:Df b^[بh$R,مɎTMd[7U(t~ TshJn.XؾI=NS[+|J]pUQ5X;|FJ__[ f|ș߇7A5Ss)v ܮHYpLTd%Kʩ s-{}4 ] %|Wg@ePY6-[[]nlm0z|ԣX[ٶ7vY6Vʞsd\Q|26Ԟ! 25>WMbېԦ}8|=zvqc屽#zTքMI LFLy n  /-iZгr~uR!,gYx6 ssDWG b+ fQM:kqjƣ*Jr}S6Bq;w3oMc[;C!vi'jmHJo(l6חى3{Oc%<(N"{ؔBi@QQ g kf-a!N,|AfO ?: wWn˪q֍&Dtct/h-TWnHڶTSE9%B#&n05ŗny<^a<ӗo2=iȏ"GC: V3K f#ﳉ̹9Yu˼JQLb"LB vR=kkQwhb{`pբ꼩_]m{Ktђ%DEG&L "_FeTR5icSɪe߫"f hQ+I=G~))" R MTVo>%M gbPКr0%KxDc5vY\H {/MUR&&2J*2T0˰->8RG!CEMiMPp$Je&K&발ʖ<jJĆæFQz`v[d%K. >F9qbMBsF<קfÏ%D(dfxLװ[dkȀwu}y LXf}&N m τa/M3Cz|;]ηY#͗Δ7DJVB[2/ mcOA_Ì}7odAB舵99<zw>l}EnDa~A[g*RRf0l_TMPM!j}ϑ#WM \B7] x2&OƱ~ԃǃHE _ ~1xS8<>R^C b=RL5H'eS?"M+=Il|ԉB%tq} 7o !VJ~\X|hɌqM8mLZ:h3r zs5"#0\qw}<**k(ԛ_CRÎ-iY1Ρ\U25Q1ˆD3O7wf̲ >08C"פ_\F_ũ+pY™n2^#[ero/ñ;a' 70#w1v }USqQsFr'PGIDdoaxxO]?>WYl]Ա'reY $2["-<:D̀wml˫`{k?Z{hu ʺCr*{^o81Fcy\v!>>%>^VQn2idaVUz]mxsfk*<]}gIG L= 沯]4UęG8`'z1lMw3>2zgjH& Low:k}'(ZQ6[JpIW(cO 70fl%,a`-pC ?*R[_r0'#g!;$t1ML2 Q~e}~6`#QCf{Ÿ{_&\S GcdV$6\!ՆibLU:T/=*HӌH#:w2lCSpSM<\GQc(4ئ^(N+ހ snu? Wpeb=P&;Q7c4E~@;-G#,?:}^KYX$Ce0<,C7nv˰)OC̖M9Sɭ".8_):UlE&sj!*'.#+ pC-ԡ@nN tx5m%J?V97dq͠t.}t/<ۈB Uug|)RZ-YzfNWD;b#KwSqjvT 4oL6_nZ2w )lU6lFwrF0? \P LL)hïKr<i9~AJWAd')"QYB 7S_>/- ,GSp(x)+S4-hli]w"c2ɰwW&tL#g8UԼY['NdzSL)TasS,ZV(%W^>(Ʋ痁N_bQ'U>&1TWͩDL֎f5?:tW)Xi1U@q*MfVHx6ut1/"a0_7h6`X@\\#} ~YZS#ͽNWƉ%jې[l6Ŏ?RV!0[Z0XFܕc+dƝWCz&Sm\|t q NGcsKY~l ?24GjC$z-P*7)y%!V,{=eD!ƴxF!a"g{3H?ގ"c15ɓ=~.îI(1߈m/px _竭.^ŋH+* Z&^mu%;F]`e#V:!Y ^6x"qoޛԕҋj@߫v|ny"l;VoІJk&KdW$CNUځO)AU3GcDdhf_Wi*eObԁtTY6s`[m[SW0X/ܦ '؍s"Uku9)o6Lw,z;Z*ҫlS"hnIŐDEpeiuuy19!(MM;O\a\.q[P;wIP_Be:&u?:z9i|LԷQ?H)~ZX}Zs|LzMѱ\9u C^6 Y^gaKpV($6.<yGAtvep\E.9Uw_oѢ O[ ٺȡ`j"Uųf47Ŋ*H ZWu];bmmړLmI4Z(iOVO2 .k`9Hl o{}/NQ.[!diGɡ<;@d@Zq )'vZOnqy.!.~ BP 6IBs eתjʕPѓy/f|{+Br(U"aH]<^4,\;5)IN6!ϥBhʅU`g,Hme#o^oN~:0Sy9S~L\Vpʅۜߩ%ϓusMr#sPl!Ȭ&\e;$ezOQd*N{T?;R*Һ'̭FD1)MUz2D|ZUQ n#O)Zz"ne6VpU+h(Y*ULVt[U>槤Mڧܿm>%j)~@\"O{KۦBy3l !#udL7-ė!p6MrZo-ͦ+!hu}.roR's.MNnv7v0{d5. Oq{(0ԋaQ D\|3 ("5ߕe5gn*8 N:T;_~| R9**.>oD9gQ19b}6m~ZN߮W:Tx;[d vd~eJ*M"㜭4:: !\ε'm4{Z(;)Ie4[yF :)W@[Sd\ mY$x,4hHŨ_Q(y-Z EvAB3*Mm)aJbSWcJBΩ HȠ e͆ EfGo7 ѿfK5854fUl3˒^>u05-=kA`Ǣs4+`~g0y't> stream xڍtT "% %=t J 1%4 HIIKIHtH19߹w{׬ͷ~v|lLv(G %Z b@~~!? ! c{EzpPDB!(O Bp P@( *) &*A`v@-Pz)"|0GͿ^@ 1@yW(f  (G+-hAQ(!BIy{{  A C9Pka6g3oGyCP =p;(n4PAP;]W~i"<`>*@^~epx ;E-_BH/і_D;o @p D/G ~ݧim4D"JM ljߞUznLm>Cy^qV儮 O/+s3^4jIk iaovo|]/:^C+!Θl℺g=*>ue Ñ3z[UwMřLNS1cxqHML0's  &O-U ztRRRc 9]Db{z˙F6"p S[~5i˪kR5Rzh9.\! ۢlӈ(yj&5ӄY=^/|2J"?%CX sV旉| SlT"QSE60F^ y-QG+re\BE&P]t6~Fvb !k(_gm+6)Mi)qgFm{m˙ݘ^ UBJs;EoEm^|(eެ^ԃ֗R.sA5e*+/&}`^73dэ1 p KsadDEe)nE+UIewy07pJ)Ba0HeB2U'8'DVC6o͸/ IjCʃGOb.VRo!_6ݭoTrDHe#W.9:>K ^p.jMLdgjGrs3n._+ٯ&^]+g&ɿSMft?kj"R :,+8&/Hvݽd ~YLAO[«u Jߪ7/U66\LIf nu#v=U[)Acrx"J3?.T뵻CGj*4D߰aVkw+|T9&GŬSùvJ9N& -Ǟ9'kɦ9ݩkc/ޔAqi?{<WK1ip+$H<&kVFSsb>1 @U{²͓$I֑-mGWcU C4z"wB򧹉ao-j;ǒy'rdQ c 3]QE^l@)b 烶 qR+Ӳ+IS17MtXg4>/˙o(9W$kYZ'kH0m y.xs%Ru΁ lCɴ)i:ßW򪀙Ԥ$Ղ6]JeS[MT7qY[Mbۧ ju FZ7-nu;-91y2~m =юS/+_X֋^%k4ڐ{:wXmX28qqF`DߤO*͙l(16x]LR·D"|c1mba?\Xgj.=\)?Qۭ)ѤκU}J.%.e}&"?wC&.kZd t+3o6ч\BeWNw9J?ݓ +;4I}OhyzR_9퇥uQTv>Rt5l |~bPpkg7Ak,LR&:~U3 fOOʲnT?~ 16IH] Ȟ{G`y`DEz=qEmLFRp\mǢKkg+5u _a FUM<+aVwAT*񂘪̼h-pVkuVEEUݘZRkJ^| \`7K/وh+5}%cÕX_6WIBy678οs4i?o Kx$o"sD@ :J ֒iEXW˕ivgp3wp*gr:S,ȸ!!+7YrS?N(ԏoYJV,;xj<ٺJHzn;ԍ,]%#U;[V[O 1ιvXbXO2pg64 S9cm_tiY([)bݯ3Hq v~-XhtE!uA_o%<.5dNX+ s |W^M0:ypTEHY6QsoAj-fI̲؏4{k;zfO\Ƣ# U4 uDb%?&  6.o)g㈶h)nÜ3TvSP|al\R /峭*mEЍ;i i%y(Ho_WT<Y.~|özӷٛ\\Pќ.<>6c

8D)]NZ1&b~}QMs]QYsg^.K8B`= %3/eTynX;zRic4o< Z;i%lf.šoγ.{{FV iJ_(S m-y{K;Ead|Qn+W+9Dڠ쬊=.>Em [^9Uw$|@*W؏wcSRߪ mz Ap4zZa ?+9ɢRL;2+ܔ,cde/}̐k/b?I[BOfq5%j0JfەױE#}e_UodYU 9,v5M+dxWgT^ʱ7'}~9Z)‰՛:ZQ=`>ZBIS/lE@1o*g->(Gwd)/UL6I n"3wȰG{N=;惋:Ҿ̐x5j r;R+AeAVca\ֱ =ݗ@G'7{ɉ1lJeq7SI myW~ xm|vi&!ZMC" x*a2ܪ,6t|C83NHR]P9ͣ' C7w<7zT{&eLD\fݍ4RQ`뻴*{~ˡu"%+ Cngf.L.4qYMc)OoʬǁG~!:i 4Em*q8EҮ#[lMT/|?X0)LlkLpJ|KA`#eHdf4e _k]69\{ h쮠7k3=.' DkNp}| HOU}>K%ӡtḅaUk8Y8z|l!͞*ՙ;H'hĠڂ횑8419,IG knwʏe${8fk(e%Cf@SA TH78Rf7%26G*)Iqx{эg5JObK{~58Y߉3(YhxhlJrσ5|$^:M`Rt}VtmF؍{|M`?`aOK<F=p"vǸqjMz\{3}CU{_5S5mK~jXɎCv9;EHds|{۟9:U/7׍:_.q{OumH~dy=4H:c.QwN9B0t[7dk>Я:.<)UKs~Q[~{[ }=:mf(딏?z[>CV\O,∴k7\%t1ዉ5 )x9LT~kQv>2RHF!G(zWñ+DIi{}1(m4 "pulL@'ƙ_o;%Hފ ϣOL@#Vq1{=$(udlg&ͭ$p am )}Iᔛr+掽m"Ed+^&#>oqTM1V;:KϽ'w`> stream xڍtTj6)tH- ұtHwwK²Ē"H " Ғ҂ ?zgwgyaa疵Y`P7O h 㲰@,F`;C!l Qy88PD'(n7_4~!a!q|w&[^( q+`ꩀ qmA;L?>-mG,?O_ a6[3.m8M.7w{D"ވȿ`K{Tz63S0Pǚ*Y/a,kܾ3 B_')D+l2 ׾ۍX^6]]=}7ք;=J3R-@f( M%CP'룲OSkxW%qAE,֙TLt$'O>d2=߉(5[?Y(3wo~HmFE~Li쑯F x;[ei kV! m|9O\\ʎ/y {_Y"(W6q:-z# S_f2~@=E!_s9̻ ^ tgR?ʵ[Q x^v|DQCQ%Tt2cG{§%H׫E3gb 8ʍe Nߥ[gg,bg*cT,=] P >LV<; <P3igٷމr`mU|58NdB@rEAc^ˏMp)\ޛ,&-*nEI!}C5X6e Âu M]K`fbЩৈ|o]t2 j5qqJJh>AR3(L3ZpH;p:ql&l\h9\Gٙ$SqK>zc RJU}NUV!=cV{; da-KH=Xo/v5ړ{gA]vmշ9k/R't-6Ň%-'w^j# _ĔCvd\D_S;یH&df~NFZqHJށu_G aq ~x&)Yȸ ?pvI2GomS\l2:ot)U/#ae2@W-aCz U\vn[zjGH,ĘD h1UZ{_vT&`~j`jaRmxn.<|?-{77☽I*ۓ$M_- [h}6}sDSm6 z1 ֮)={r3O>j /ߘ~99j KrFRԭ838ǙxȲgVŔ ^aN?~F%ådGz7sZpa!ToV7n()>Zfm/ k/$>{zrt2hMu zϬXw vjIjŪ%@D3v԰zH2FDi"rNsDCڔ6l7 w !0ci4ӦG_R5vKO^} ȕY\M%$e>,B/*q'r.9X* ac &W~|٥{O NYmw}T;Cd웦:cPE>&Em)ڢC* )t Z1bkx2__?){ŝװ_o$OEf]:M*a-$ם(+<..vA30HƄZ jIB: 8n4|G}s{8kْ[a'gkģV2U-{JKXB8QPпQ3WT6,ǷϪ6'dK ^NÊey̖`&9+FFOZZ;(KM5;wsHc?nsX<_$cz9θnNLD%1Hs '=˜N $p->)qT 5VR{_C6avT0d*Bޗ} P6&E닥(R北ʢo̮+j,C y!]8B}PH%h6^Y[0BW5fU~(eF\xzA.VXfƿʷ}Opb.@3?%YS*[g#*L\q 1G*,^bqlIV e奺`$ݞldAgȤ|5 h1*iHX,,r92įҊ~`D"y5= gc]^AB&A X~`E2w~g0ߧ1,a+gnR@@g^x^DCe?`#,;cU8ZC^ޏ5rx (?Qc^Kf67^P|h|UT?g2q{{gy0'%"fR1v"\,qZo%ğ7)x~ٺ NZ4s@(V@BiǚFZ`%l&̞.#8GY[#+d l'ntLe61>zG~-swb;v=`4emug8yOǓ&wo;-2*ns!ULo %~}^uBm ?jO"&B ifMײl. ίKJ1,'kЭ`)vvyy981g+ixd64ӂR+⃝ϔјiοL|'*%ii*B:~ {`4֝ >.9G]o }ѤSmG{mU tS7}0 PA,;4}HiECI3 }E:{'0;ub07JD""CLya/8Am'˹oZ6)< dl9<Pr)ڮEO1j=O:x4f-ٔѸ،LۯegY $Vf!KzV);*˙_XzV%AN ˋDĎ~ˍ"&-{n$Qjw7}ϙbia`-<8D?ZeP I=f7s&;jrFֽHn,࢛[mG|9l=ʾgʿQo%>d~F.xw{g#h cuDdC ')<6ΦQTq9rlH^O"5;7=Y'펥q.ƆkC0A,qG̜LRzz&,mYy Q\**" r "MlGZ ab){,bǬ Cz9BG!Js\1ki'J#JA/ڹu Gk|Ƅܳy )B˪/.s96ˍ2N0-VZT,tpkWL@z⢢SD|!]^X8<1bQ~ ;SF)pz.32x#j @ #\!b5AyG8t`ڴsvĩjH@1I7Oinf+o6ƚT-U%==od_rK^ے4eYrmA 1[>RK9`DkXwg<@i ɺDdo-LwM5#X}oLUy{iKoozsoĜ_4U\D5OtL&ߢؙOy ?RR>k7":ب7Fʼn*D/ z3 p/=ګˈYX4;=όhB&6s"",)@Z  D亊wՍx֝h%CB-hw vV5G6Ytf0Q(}K2c 0Ku 4, p)琪eDmLv$BtӻW* >pL(t:AQ^.^XciT<,ݶ:|.l,J6uc-(X,kԳ0CVtM8L򞵐UOfK}ӽҰ؛Q%=}kqR9SZ +(ck(Ty^&yAzg}b T4/WJnO@Dc^d*amCFE$QnqE'Jq(ld 0Lα8 ĻAY@ٴF7BHh,sps/JQ`ͱ OA:ii.jtצ^V 4%nqھLyշ$aVWKxFW*fusm*|XG9 CMԺ}POhGT[K]L1p$zg5F D=roz۾ހ&o;u6>reҥNY{HK&Q]-HPV0gpTUSI2oq H38 _hj߷~*dnIYQǚmye{Ow7s*GڴI4n,NL:6:g֐i`Oh(3Dt#VȤKb/,;{˒0[4k{>A}/_BţqNX6xP΄`]TS,,F))Nu"wo)j8җZo0 FF=HCg l3#ҌprYmV;4 ]d)}mC)'惁𾾋d4zvAjpOOc=죇ʿ叹G8bf)2{;Q S|:R;xNЁޜڷ2d:"x?9 wKA̳v>")ZD#ުaB,6f;.:Ҥg _%[kL}FMIEQ~1(o冚 xU/O'e/Da 򓉡-^mNTHUG~:24sJzBoQ"uuE DYd~K_ʷz~\N (t YN&54'?+<7.9XY+ZNcho?[5DYfcH#p۱i>K3aBҶ;?-`R\-L؛#$H{p[,D ԴxSavZ+fX0^R^SQ~8|3>Uҥ sHeT}G+  OWmN909zةzil@Zhp eمpːmPQ/kLh, gNh szk$ W.lR$rf˱0ݬ. 'i t hv^]sqd4uYÓfL9UvY.T/xp=_6Ls{ͅK4aWT[ endstream endobj 555 0 obj << /Length1 2230 /Length2 17658 /Length3 0 /Length 18999 /Filter /FlateDecode >> stream xڌP ^8%hqwwwwŵ)Zҽ}3Nf߳YZorbE:!S1PdLD bb02322#Z:G@ttqBh.5r~7XLL܌fFF19rD\,Mri \dhin&T&..l&Fv9#g {F# BPZ8;s30:уheh C od 5zrӿ* 3gW#G ]`ciszwbg tgHv6-?037 `fi(;9L8\,m U@\H `?L-흝,mpdbv" [[ŸD-&}wgZہ\<YڙaŞA PJ?6"2s3 tL,$PuKGd0{4 x:Ύ_ޞTo04q-F >~L?>O)ARRDXQ?=_OῈ=_A/b0EQ4(ZE\QFE`dcoW=1&ElS?<wh~ ߋ/fO&z&,{l!{E6FƦ37DuyfF}/оWh7{-lL-`{ 4$toM卵,nw{矝on(OIz 4T&@7 <Ȅ'Ȫ>w+$/EoMf"}8ie ~&q}nya9 מ DwtDDqow೉$(`)ŸK #Q{LTQ>-&R`sHwwԖebpn+;|ݶ ܉ O z@HpA^E'v ػ$ qF&eQ_-@~-u9@4<1ܠJ]/e{uGLaadt@f)WaJ2ğxޗn)}6thlT)Ar |UM.f!kSvh lW(g5u8,:TKV4XX>XU!34 kyz\wu S,;DEGyl6ze?^014Exnyոmقztزm.DD9A8$_Wvn?UAęU>cM :X\O<^N 2yԧه !h~*I 1(4ϷQc{v`sm[~! bMB!Si8(!6Ch-7j  :Hˀ dR@hq0` J b9;H[I!*(”5JI[QKd䎳:&+@2dn&㮶, #/.Rc_^W  FzM0i*-оtfy|VC;nBh`M5WJ|%} 8UPzpJ̆ˏXޟ7m c,6-Ε]3fB?e. zqܟZB5 I86{jC8Tg4ݱy;Q{bղۃ7=gjElA{U+ئX%@S~c#KS)M&8{$Kz6!Bӧ[$)2&G7#Ϫ)"j֑G"%R{ tIzl6%fmna^-)Oo&Rr$ÝM*OBL,X.'ӿXSJ,y,jb4ɝR7GyWs`撰u4!2 ˙e k&E33^Y ' ڜ&knKjEBwbxӑedcS6yX*);4 !Yg[#ޱ~a}HhX}44U,9UF$bo|8!O xD!CPP"0qnpS 6mb,}|5CB5E -tC"݆v)f8q+,~Yzzb́ٸCjw;PGUIaW~۳SjV(hKP&_M$1$B34%~C#OmAfa3ջ.@Rq8:Y٪<խ^3 R׽[=G0@ 0QpYZzꂙ.: yǯjDfJn]:m(O9'}TJwqMJlw8Ʋ l8|J Xla jXMC"*rwss܇!b^"=<$!nh};ZM1E0UJ"knO\Uv8ĸ/I1GMdo6`z,q ~;)'P`Cp ؑo-B-Vp)N+nE XCn9e7 V~H GwTOAbVОLLなu/l2^~;^ΰEV¦G|K,=>:]Ms,/.yvjHrwZt·iG_=~";35~3؏m:"4N5!8(V3a; ɊNt E4 ȗTFe 3R/gXFW*rзHW ZL0ߣ3e/ֱxsXQ!"U0@'3=Q!&+=5wRͺ[[`u7OնkEBzgzhɔ𶳢:_{}./a4W^8md prX(rIOnw n5NԜBrڏ|7elbQdc&gc*BNa֑mb;\i ]AD#T=egSxԽjyg_&`ux <ֳtǬ䥥eH!r)CF)W'EW$I)#e.|jÍwnX]+%Q4X/8Ӈp̉mEb9M\af8|d[Ng&16 N3BԏNBfF'& /I@s^K O6ov_D a(B984wB"x V|gc]j,hDs=Σe#+hӷCEu@JNd{1.),1tܾ>>ۣR!ˑ$M*e8~7x)e2RvQI%a&6,b bn(a wWq^ܟ>}^W-h7 /%D5BCO6U67F>N"2kWbIIroUn$ua0i]DyR.2_Γ]&TmIBMkP%;rbe#m&3dw1i=ɲ!9+ a''߇L(__NTG{zp H#9)ؘ@OHβSua^?f>Qm8COvOQȅ(s/n\Xb?BZ3"=宏Bf[-g#].JxyS X `O[4XK##/AN:zlU{a!A}w3`]b-85GJ78L.N ~m +slN)1u{ӼCn\ %~`zj{e>AY%aY8PM$-%TB=PK( a$^L!}'k||.c(/ oQ<=հti.5OJV?s|{ӼjMdQnUr=| eB8fJh GmDtQSrTrƑMov$$LT?j!RқGP@ifGOk2Ȉ>hrQ8ī V54tODM+xJWַb .^l0DM| X,-+F/ظF7b(T(*5.FwoYuVP0 !dWp#sKp;@[7j̜SFtQx9>f=As*< zPZe||%Hߑ1(j5  2{"͙_dNM`jwV٘|ivaȇ* ^!|Hӕmdl:Kƀ.]%غi7QDja?zDNG_L=?]v:fqx%EPI~>WyP3=k"Nr~izPF (MP`BY\fܸṈ}+X iUQ[L ,{|pO3YgܰQ#bMɸG̓RmiT{B_'3B%S-M,rpl(V6ָ۾*GrESAbdʹ% Ȧ> |7E~g`p^ނGb:%B_q|81Sx@M'ګnp.6vIM1KeK[C57[0(&*$:(ǢF<VnJ ]gV!); 0^+g_fsM/ږ5d(DK iq-ryO,`pQp/2:-l)vKVxшvgPZ2Wy ӚED2o إ|W3H;*)n e-aOEA bw{;'*19τf: $AZ+ %cy؏g̼c0χHl6 ѣ6EsњNѪLdI[Wn2/zҮcVXq8\§ 'pP( b4HY;$v#娔U+ 46(T$AYsm u\Etdf̻^𵀌Fc!^%-b*`ƪ;9^&DVMQ`dtLm4 ֱ;AGb견< 1d!a$YNzV7 [BS_3` d<7[8ơZ VC1vv~?b7DT1|!:N%IxishK=*]lKia^_3"7J ;aȼwsIdVO%%#R?OwW=~Q18\%6`mϠ޷4rch_Թ,~^j ) Wg]clZ- 7D&vqŽo11e&\\4`v1koG)d^FDcItw[N\Ѻ޺{q ?KP +c/!8[Y [|8V"k̵Mq KYn|sϡcT[vՙGlT 87 iM&uQYJC}6Ѕa/4Zy'stJ5 |2'޼ȹ`U XsP_u`eig!/UcdgCny| 8-S">SN A%!{XiFm|B!oƦ-zQoO6Z^{yVQO۴7/3FLB`TEtRp$_̉ Da/9T&YvaBV}+ f;P ڏ @Ŧ=w/aN- ճPu6iqU\!誎dE$vHO d]_ Y<e&ST♆|8!̞ɚm~vJs`/]D։L_b*g.: `mr]s/2]SBCiJB,z m`"xTG^8VV `c =. pM ii@~!WwLV<1ß&4iU$HDk""CtoHg5txtZB@ӷ3#j(ހ,U4ւ>!qGYUӻ"ABIX߹eQla~'xf#8e|Us5[1_?ڙ4gZ(*_I G#̾Q{DO`>>`n;3z.pg4Ul,py ;ZC`A V+^lLR%yjSS<`KLaţ2W)Yjr*˳+m;C\&/yJ,&6ZpGnZg?mVK"c_Pp׆Wv9K >Zp`~N 8fΛO{YeM\_"V'lsۺs%y a0VF8Fh>]M ( T-wEf \u$SZh d ?k琲R,P)ͅ ylS l$tYyJӆKM9){O0,obA)$Z8vµ6uwY=2 JwבO&!ҸRzF L,7(qCC⨎Qc> ׽)!98}|h}?9NQ_#; v zɾq!J ?,PM'K<6;c<[ ֏JB5JV8K`Iڃe{Cs;sM_ʇ&!¬G֒FY1.;f:u`-8 90DQV)usLna8xy__onY1}a~bR9`'4n`} +lݓ=L+U^ \x@5w L4:H1$N<ဲ /ZoY<>ځ%k*Q>8>TPDUhB@X+GbW,)pofY=%.?<+z9zZyQ.b5qHrr |O;{K1g#6؁:9Na3ELA]laz_X2ߺAe <uvMꑦ֯Ph/7b$ʓe7ӭ )u&g%M0jBUU~\ԕhDx7tp+ɭY9#ѹ쵒Xf!q?_1dqc-T{u} wkpeZw/b;0@=GLxTbx#QZy(WAm^9hp9X>3 OMeԔ?x3mL]UĔ B Efٟk3hJ6/ eE# j?R,-9<4Ƚ{Hw -i5N2-i/@]вb,d˱8aDx72Q ٳ;{X<59ˮ,[reA f(Q 3SND2(nBY8wtS^t2 e3l65)`jCẻ*7Wwy(pyvqƏ^G*It\?k?a2IUQ֤A ]/z C7V#W "0GCEð4dr,H29h:h |w(xZr-X 6g\'iK[ ]|ɒFsw7z>euq܌0ˉi97?yP&YO4J2" oO ,0{bG͹z(rP͜˱Suk'g3/q'7)r [7d(Lb ;#iݎ;2r3Tޱ}󜚟1@̄(6xkAGI;Qr}))Yr^ :b-o?T}竖~wĿYF"h >E6<-'!e\RA꿏'" PYb@8x}o/oFUu7l'=UGP$R 7l&d9^,K|RIHyɘۚQQ Cp#|b?N3-nW+R%)S=享x(8̺DP\Dn#\OX*O#_._O9<0 V ۗ\zcf?Ⱥ9jZNq@*,ƨ^+~Dbr]Qz뿦PMܶnOţgĮ3݁}E^HB5ґ1И>+eݴi@hrXkd;3b.3dvOZ4_f}5C3ً?4ڤ_[?MsȂAِ"׆ 9܆|<֟k- GhB}BMi=R?θί ݶ0>]vKoNI,r-Fmm֠aA%Nk*T!+q0Qk@1"4Tm[E!{v*yjQu}Z>rtXYhCŷi>qfFQ5k\Kt-L9{\FZR0.u z@ϕxCKly:rK 52bWU|f5S"SiS4I~ DUb^3-lmu:/ Mim݃P򉖵ه!@:]K-|Վuw!nQͅ.Lm;J3pQԏk (D 8I"0*h-?!2 _M]ֵZ/Z/1E0?f=DVưN!uBAxZԍnH'gWlpo=]v4w7wft=?fU|Ӱ|܅d;k!Q JH7R66gP5ؼDXZDK0:zYA|3n.ϗUq jI+O%Bf8XR!|=Q:~r)U#FRJ!";5SFfoXr0K;BfÿT8%P4dSʉV1Y֪գSOz,Gڎ--!߰tPP+&z4VNH7~S~ As8C@<ju{V7͏AG6._1Av f rY#riu]5+g Ϯ&zznoaq5q?%Aǻ UEPsa0G)$yH]ՙO 4G^3V|by%Ȕ /'gբ&Kp.AsCw.D2{ZCZvSsěg[!$gU #UN սOWտh輆>Xa.xjnWDz%zȘ8#MN p?*jxo58c:ۚ'5RjG-L !S,_a:ѳˎZlQ{W)"ژzs==*{Uw"=Gʽ?xe1j {IF5E+ђg LngJِ"{22h`t&>4-Q#](nr!):ijQx~´([e^'F36 G2RI}[3ƪ{jqM6}Y PFQ|28"j,^G"jwØЩ=Q.Ԡ }Kv B! gr=[1Iؓ8'<u2=xxQ>%RKVb67#Xm| X6qrZm/Pi2s e{݃MNyu~֍`H0a(al6zK X]/FM5ZW~n6cAH%~{lnXby2R#fm} AD@z#~ֽ_A;V9ޙht$ 2[ט"jJ@~JLwxy@ʣD\gTJp&ibo Yvd29a~|U1N(+BgW>[V;PAL²:QQإ/to?f$-qj#EUWpBFklzxͱa&ǝdQyگ-OEIf,ݞs66Klڴc5VY6ވzrvO֡ul݃"wg({,ճ\GpC V(I +o YdYÞj%jUQ5 YwQP"#fc6} EKڋH7"fXyNPlj0M}J͢@=bkMkK;_.] ^ !1| ŏu9j'mHywq2Er;54ў~ k[OFL:TU9cYHG-Jg)Yb~edDz:9f'gA 0_y%[`!jzv[}:ivmWT2T|GVfؘD( &?x7M\c,bhk#?Zc:lty쳅%ɧ0y&ɂ2 !_LbF+ށfrr~ g>;L_3۷i;imӻhf^V"8S&k_w4Uugj#ddl~ ] 0U*VȟW SZ𧘈:eR9zqbܽ=V u~v̗l8 'u`Lj񬳃+A-~ 8ϗ'6yBoIśVO.5%o qϘ~Bv_ vQ)z.ϯyŸH/I|(6S2DOOus_>nDG\wo.7u)BÂ(Ex|n`ne)|Z;4Q1U3Gw>EPFw[4=_I!':ATVպ7z(ry Z^W[oey_%Um==]WoR~e38SLklS8-pnq2*D#OuİŲdn|ʑ$o؍_mh%nHV4?)V)LS k^_sѤpG!.ËIٷu 4/HgQ C'MNq/`/ٸ?[V 5>P[;S!Ҹ4lwYC2-t@"U“&׉aifήdb}m$M;H:d~јC:]J9:-ϊ;yq::_ uIf},*c~.T QWCK5cJ<03zC? +ӯӭMf}fC>o/5G b$7&&y:PpA!jՆÙԫ4I-E擊U}x ->RPL.CN }Q-DG<ׇ1\֒OJ}LnКu >EMjs(5˟\V:FRFܡyҡ t 0ɀv+WI ӟw2Tжy` TsI%0 LVAqas&DiNUw9_oeI.*JjŊ|S>*9ѯO_? Ĺ\nÍU k$Gv^6sTN?"m?&B(\zi;Y .bZz0(|Hj#?^ ȓAu3xW߫=ʯN8jjUW0M\Upwg/2LMO]{]}ZM'-wf|O@x9->01fQ|,x1ؙ+o^0^{Do|/^'ᖀd^2mNse;)kyL|{[hό'HN$b\.)7Dqo5ma1e k  _ 'S(%;ޢT% R`;Q2m w).`)౪Sx=FK}^ODͥ5 òFGQ^VIw 7?4l;==2m?ﵞ8.u7V]#\&?0anijH=|}t~ c2KG+4Xj;')|,s(~Zm %o~;!pIA|f5/|ݭsEͲ}ēKƊ']EIfiH ֛6-S|<+#XԜl!]~:Bm:3r;,WbN6A/29_4h@M. a!^HK9Ͳqt22]d?k;ʔa+`_*2 J̖O.iLWٯ>(%d>Cl2Vn {t Vy$;}\jqȀ*jS&/ǏB=k]%'{~p TbIj?\}>0ش?0|.+2Efa9T Qr0>y/5DbD]!r2?)k\ȉ#I'(tuu> stream xڍvP\[- .ad`4HKp \Np~{꽚3W{]ZCM j:8فBiUUE^ra@`ܘtz`W7I] 'qP''/@B P:0餡^[5Y-`W% قo90_%Ela0g!OOOv;F n`W5#dt[6 r% dv_VT;UX re*q :: N6k.{ c~AnPx>qY;$5 f qA~ |e`'d `K{q9Y{'_5VNw_   s `-ǯ:^A_n~>Pg5| qy0WwXA,a p7 ?|WK1=N)^VP'ϗCPSKKǤ/>6.^ <~U4@d pt ߦtԠpւ ?8;_U_$~0NZw\P #ZU񿣊0\N6p2qy!nr`+ euIրA~-, bp}Y78/@np~/ ӿuZ/ „=pj~v'( b:f~>/o .-N /p8p³a\\ztwuZc(`Kp]upU$'ƨQh:GvZrݜrfvjR.W˳?|Pz\QmPY<\ N%\5 'KQ  hPDѦeK' ǯҢC 7-XE C7ؠh4ek վEnJCa2Ď\bHD }=p 诉R4uй@_~~ERz[ &ImBTBFYpB&S2Dejfp-6lyãǣɡ+6G9j],7T'\h}(dßieHm1bZ,[Ⱦ#ZӋ"Qk"# 'K0;?stk*zM(&kNŅ?]>]j7NћW:띬՝$׹ZS @]hq<8Jް ]1& /ܙ|_!R@5P`Z`Y7\R:I jly/4ŗ){#Si%"܉hL)V'hdiwRi_[MV^qN.4ڼd2܈oJ1~G[QZ(Xs+ \|~"=3V~_Ui(>$[±|b `L9 6!_ɎX^sݨ76꟮f[~ԣEİi'tG{5V-fսdhl 2#FvpϏEҐ]gV~FfDGws~[UZO_bUwL'hcO ^'ݣj>9BgC <~F/][7cy)[G`JY3ORl1U a yd2ڐe6#(BDңO/1D1]1x)HbMguݶ&X#)I:Sz1Yg-| ?2DvO#rENSQob`3WFZ$ i%{lޝYoV>SE. e,+ڔ@Sغ>`XdX|Él_ZqR,ȇi|ЗwQ7.Jv17TtYf8)t2Ѫy_g´kY7m 95z]\4աyo( ey0KRvs"X$psruwp=فΧt친INuOfD]؈OyC§ϱ񽋢>c]FJ +`zQ! yahmbQ ֛.|b"v`1zEsgM4n@Gԧ&Iͩ~j}s!+su[/8n!WJ;]NJ4D2(E/R,X(Ql"Jgq2jX_ȓ'˧_wE<0+ y'ƺJ;0"7%#Ћ3O_:Ѡj01WU6'@eTxc}JR_2w']%K'u5\U4)Nb$-6WfҤ31wt{.]j |qPBezP;ghE;jv˓P1\a 󲡖tl. F<NիE]3/+C48PtG\] mi i3E$C)~ Q 5V|%aaZk[֌N #s/rC~y-;EfLvڦvBx'9[{.koG =ڨڃQ^㊎ԑZSk1^%Th%ľ}F]uњ(t"ąjSJ6Io[SV  s϶tUMܝBe([7xj~oNFF g<>?+5=L \f^6fěcJ7W:DJpk c+gI;UZUmGq.+<}"/"|DK =mDHJL_6jw*hl'ދrz%v#eLR;{{umGĵ- {u[uzEE>gJ=uUmE FGkz㚉6qȩ~WښBw6Wܓw]^YT7"oaI+>jHJ)_ i)D"ݡ1c~!th)TҖZ}ZY v^XT@%ե3n2Kt{V7l,X8H$ 2O6i(~T/{x͞}`oNF(mi;Sqi'.Uǀ~#HOjquاJz"r{V]3 FKl$|87 V"eϥ%փzxqX,1KƘ*~b%^?}DxR9EeApvnPp%0#.nݐms!9 b`^|DE!9c8]ՕH1Z|vm-(nzCĀ|Xfùـ޳2/r# ZM!8߱ަڷX'B޹g^=)%+DLʈkcw|BF5HB^`ۍNt1gʑsQix*WήDT~Ð+?U׬tۺėN=-(/ܬ9 {q;&`U4T\) ŪXeI,ao4whk̹Agi}}j3PA;9F3RĨ5a FAF/ Dl ~6Frye3GX2P/.~JIFA~N/.)^lW*?Z410;$7AȔ\X=Se ó"4WGؘH] ?k,.3MF?ERz`J-}_S(N7M~W`_%Gya `~b endstream endobj 559 0 obj << /Length1 1842 /Length2 12397 /Length3 0 /Length 13564 /Filter /FlateDecode >> stream xڍPڶ-;wwwkpi!xpwwwCw-}UUWu1}5jjrU fq 3ؕM $ `cdac@r-F:2tɤL]y7;;'GW ?)Sw@  QK:8z9]@gN`e ntJ@v sB Y: zxxڻ88[3<@u ha=XA.5,]=L7vyp[orG /cſ p{[,Av@"+l񇡩Û)M2jӷnE?¼MYl!`o Qh6v/ֿnX4aȪ 96y!#x98@'ܚ*u|k n@?+!,@3O71/v O>l|s2|?UZJF_G'! af0spyoj \v}T7^zSvxc-@ ظ߾#.HO5ݟdi\@m T*-@n[+j`723sq%Ȁ< Ws(\U.?ޖ7/6{/s۷南L]ޖk;Xw<SggS/Czl+ ֳk! X%x2 ?R `UXޢb]M#%Hſ ;/_j/_hm9ۙڛY;[9` /[)t67;vߪsgo]L]eV뿆__-bƠ?=|@O9򂃹`McH]8ΔYTfFNBM]>Md^\7d2Vdf 7sI> کe$Q}V辴KI482]=}يxZ[f#vhED)gR(#%zal|Y" w9)-8B,_VW~c[B^><~'kX?n~7f׭Hp )[GH3 cėӦ߁kWdcЁp]qbŗ]luvCUh-ba0U&"&G,yXDWȒcL3rdʏQVI?gK"Ӭ^>X61DH<~/ 7`/TRZ,3`WQo#C,j`0 BC~^&z2h,>5?I'm>Bfl~EKxAHRyq45dbh銖hs ԡ m;ܳnA/ŊBC9aHҺOl0~^2UbCǸB©h2fh\yY_}!Zxc(\ѵrkQ 8:v(5uJuvodQ!OBDken.⠢q~]'z3mo 'u#4jh^v݉X8|9W*NE|d4It D >}5Ċ伔zZuQs1o88 +ˣ."V=pA]b*}V87PVuX7{~]ur+7@T^MHnkqFMwBN>K_.bOXSÉpaCpR(Vh 2f}MY/xZWb]Y2M8>CuFvh]xnDI/U:yh, vDD 6/ɨ6*ؿyWspzԯ6(4Kb6W~{ff>%^xAkȢi֟LzV,n֝#EWdAJ8XvDֵF4v|8\[ŀ.ۈYO09\ib!xE3U{Mj"I.O]56L(F_>R}:M\V_HRz `hus( |u9'e4,Pn ʹ͸B9:. 0%qp@j5{39 >~;mA.rU>y|g\@|-7XO<&8R%ubLVQey7 /F=bs* |>/:yY/= -MD_Tp>ȓwS!.5\>;۩/I.1q%e|AgF~jҤOiorfѢ* ,`5lB7H_V˅ 5o,J])uTօOu_,fB݌H2ERaǑtLpO Qm -ݘkkpD+ج s}Ef-g&)t0yKu'I*>4g;wmx<Zb~JOlBG$*m/< JJnΗCm3WFn4P^x."8*,PfJ  7GxY$_Ր=:m6-g]1/ K g) ='z~zl9,(;tLNW5=wyXe+PŖ5#1wUܨ nJr 5]v 3P<:|L7n{X"h>'S^֨h׆DpxHd\PJJ\sv~.nd ja&WUITŖױYfc6XB8]ݏZO+;MaLȟ07mNR(ohy.Pe{ăx Aᘜ@]pV/BKsC;H'ݏM9~&9_`.v٫V>xϩۈ{ܹ՚Q*;ΠSa@L IP[x(Ph ćDfj>u0>HF!0O7 {O/)N*([B@R9>*:bO?Ԝ[$@#KNhJqks T?hvr}9|]Ή u%v-,wo'[ IےnInJ̽i٠@$0\"$ ܱL;PgWr@z@jz۞bغoݺ"2-k-5lTpa e>KoI;F;w w[i©X4`ȳ  ]c΄EWsl헗n- >С2:z j1a'D#6ondC~0!``mWdL)LWZ\F;C\g"(I143mS3Wb/\7.TVYqSrޢ6)@RnVby*kIڸa~X3=ϜB@ ^16UpWb'3K}wwL{艤AW!d)5"jt{U 2{hFTOcʨPLgho į#j4(##xp(^)EC3w2^W{ъgV+9K0 7=UW ƽha40F(+ŭ,G$jw޼P)FnvqPu"HzYӍ ^ wcv-wVuO>SG&ӟ>gsq7r0N"+6=[leL/+F5BAjqμes@.l 9~(++Eb^|W#t{պ9 wN,m\k\t Beq/FcI P2пR*F} u(*wD=-ώB́>"{Y63v㷚@h. >iO-axF'[ Ԫ_K n !Œh@ ½/ #jy|NZT'.G`jcN)NZ; uˊp1m%]l..z&p6Z Zu/((;"MT +EH$⶯c\'ʓx*NwA|De4r+b%Ht\\NmNznc;lƬ eG+nI'łb̀ݼ 3:W? - ;jj*"&=qD˛PsoĦQu#{v nJ8cb!L0qhQ!Ya%In? 0ҐЙ|BR BhF!<&a6N֌}6M2R^]ܪ;֘1&Gz^73Qf\XbF YdF۫ X~?{Ўjf .2^~nBO3S (sRbм{s$ u$NB7*b96k$T]Qפb$H➋*Ժ?}^ ;gm~fgQrQH@T%-b}(P0bU֔I_J  OYcSPdd6~\ԣxq53i}1t^&eH5Q#]2ŋf.w,ԇHtwdl΅u,=%u Φn2i}e\Dtkz)d;d4}DOnM?ΛK[Yr[k6el2!_c?'5E%]Sc(y0`oQTx5O(z}J[w|VsqHߗ)ʧHp( $lu9gkVGn҄&\[Z'W#4*׭ϧmXDZP9TBL l5-(˛ O(Ϗ[`BNpZPzNӏN^* <~mI (hФ% (˂>i޾ ~'f?ԯD`gѩ ;/UU:V{60f^#H##q >@cWA!o43L"OzRgK -sbK0YNsߖS{}nm9LoZQ%,,srJz?U<1QsRS)O5jggQib{/Z/t|oH0Te>8= vᔅGPFPSs˫3CVwpASyeE#uZ yabB()g"Fu)61:=Zw< <S G~(_<>lֿsI&@rp=G @7rͿҔ+fJH2K[BѸ:/v,W_z ,d;ʐv"1Kt%: əG`AMzw=*⡢t|q/6&b͔}ܴ GNڶQ|xw~+~ytx5-wiς>7Ub6: GW-Өlt S~"e#y?-<(0 #\JF Knm?* r}9]:zK-~R>㇇CR!x0 -JQHWlX rՄ1ABp|zVshbD ak Hi Nݾ(/8[L3'Rie8VQfЛ=:6KȼiB)[j*)gv\]P a쭋ീ^+J~&<|Zs9 3+ W u/]4{5.\Slp/ˈa XTmIs "dt$٩X%|k +2uJѭBhSae"ѕ{]b,Ƃm \n ɲQ) '9,-nUg+hxkpr[׀Pʙ]#]f4~Y]fZ֪̻Mdd)?tPU|N ǺQw:93br3MvM=V)E} @f-)hwyr>Ps̷,Nh5KǤ ρcliw7Ŋ:0uu#iHg# F3Jl%/5laHWk{|-!_,0N7G+/F,ؖs;Q֡k*IV~,^/Tܹ[! [KoݓIb@+11HqܧEv9Kb)Dcהhڇ>wsO Cf5SΖ xKx.cG]cJ#~&z(`z{@%%D ʔ{=c8\W8Y)A9A[͙7yT=Jmh>AtRcY ݆۰1/{ KvjlQ{҃Sa0M1Kܼ;~d0t =&e2+$P w)$Z F:|f5M26pPIŅo`-@AE7!6fgR#nznx`jvfaM&T>i|7|3ӶE046hHj&Ss/tzWv5\ UƲuYX}%UQXcbW?PdDfre[p!f;ȹ';r~dsݐVr;ܹgN{Xmrod>yg]/4#Tgn!_47DT#duDs]֢^zʯ «e,=p|z.vi$6*с$g>$j Cq.6=Uc%Tg]Qy}A6V4{ ,>yxԴv`x -*Rw0f° f& ߄^ZW0~Dn!AD6Q?ѶT .j{vqwuk2?;-z5X-?nh(5J+gnU.\;EW}첅34<_[J5ե\i-63f`\V'+x\d)Ւ  |q&OOR]Y0[BTtza.RY(FcS_w' &Sd NN-zn#rёGGsЦ$8ʦLyl4|}ں2E=]raߺ*}cV3P]U [ՉH&o~U?cRR({9u]n9ag_lJy]; #$g-uՊfxE [3_>0Lj=/qmQx0ҕa؜ R&T9LOOv MSS#O؄$Do>zL}I- V&xKJTd]s7 \}3A$#3Ro~Y~ޕǾ QX`[E%9/_R]MةJA|$gqH15yxV__5ʶ?`} 𖡽!eJX AJ+u'lʷ :*[wm?'%!Z7̯Ghm@X86{jGa1FuALiZ-jϦCAh~L,$O)>d9%15GUy\th}㖿*J:gtYG>Ush3΀#N1zeT"E\rǎ 0ȮK'o5| sOp+!cDY|$ᢉ#6;aI '4imDtjnx?xSTm0=b,7ЋV4H+X\^bni 4sc @9q1K#54Kc PdUr?҉#AT)&۟AS/8S9J̔ &~̼/idҗe$ʡ%aj_:bN̜msP/N N(o %P

҃\0'Lia{k z|鏦?ZSR^*>976Zm])JhRrh&eلeGsŋ0%|>IT)nk#!IxI.8ammeO41Be9-= :g\مڌ1(Lr=v-''Jn-yV'+o1Q[卋%꒟5-a>)p?1?J>yA^E\#=\"_CZ_d;Hf(QhR6@؆.OzwE4lo^ /_tx6Da#u# J~Z> 9҇Iz3ȡEoxP]ü(?Mk63CoqI| m -,c<"$v4G3;A/@؉%[4UÁvq)kW|&W#'7$x><_3^IrP{$c#ΣSj9i- ڗh̸칊 'V%,S0\8b3 {̶^uaX|o<՛r'D#ćuJy07(Z"e*qiR0w->-$JN|ViJ|v,r RgHU΢;^m,K{fh[ٵn%{Uƫ.<.\c lIqr(x.# r'851Pabi.S(=o&UFS#/˓{сq+ͥg̹tq #5>th4+ן8f{$$ x 3/qxr?Ved7ݬȘfl~g.م8SVyt58ro^0kHD^QpB]9T3ga endstream endobj 561 0 obj << /Length1 2815 /Length2 24241 /Length3 0 /Length 25822 /Filter /FlateDecode >> stream xڌPi Aw}p\@`%xpwww8}UT1wv? 5:Pэ `cdac@ְq#F\m0M2IS7#@ `ggcp 4(HN +k7p=||L9A6榎ES7k8=@tnn,,N +az&5@ y-(:Όam\6@GW wG PU(;6Vۀ Om,oG6657wrp6uqXR ,n^nLSG߆N6f`TIdjc;EnU~h!ttsEO4ݛ9:y:,m-,'a̪h,BzYlll| en۽3/%o18_g'g%8 %j܁*6n3#ҋwh77dg;'CxY89{_V%YuYqUƿ3W'.eb0sp/nTLmYYGK'lecחxj!7`f3a<7´ %$no/к@ T*-lVbVU hbfn-e6@'W ܚp_* xs򍣹@H&7@>t!(7Uo`AV*oElV`~AV `}A\V"\^ sQzA`."*;LKT^Xۼ@0Y&d_ |A`Z/LVd^( 33/&K9_8 5;Xڃgbnp s'{O|QB0Ifia]/a;:XZ!2~\6r6z0c-xqG4տX+ |ś.8 i.5 ZlEg91/GzpI^Ȁ;G E v ~;gwKqwK-q:UO\NCswh;/\\6i6@? vj\g6XݬA?⏩`@p=`#zy ?swn?׿\@iv\ Զ*B'8zfYP|}yZ2J,ca ݥ٣wM~Ɵ6fFzFrŪI5D]젿Crq}%U]U#P9ZAPuY$3 ։8V3'F$h<_IomTz$ЗXc4;rӾy =3^2Q1Xv8RmԢ;+=g[7z7+qAʸMQnv@VK+vK$9_π T]v=I7? DE<XG.$ Aieq0<^? 岱齆$>;/=Qj]2 yQz6trKh$-.;MXZU*`DH1J)d7Bi/UttYA-W#Rͦ_U7vjCº,v֮JJčpwnOJ=#cV5xC߷f*'KEA_Ov"Qj{;Ǣkkݭ?x2JR*v 5${YQբ|CC5Mi_uD$J]&gs::}AWW@C2'e!O Kb5sĕ=僾AqJN6@nmڒB#T|OYlPAΥ`XA.$ղHOB//lUޒW)6S3ȧǐ8 +3R0/ғ F0BX髊Mq!ʋCrvl'[@cKNŝˀ2GYtlq#)8|Zv4D >oඹ%ja(#'MxEyM!Pmc샵]͔2 ;.Mm~MChdx)AnÂ5X;}xVtj7Zb [xGb_|ڐ(gt`( @Uql0>2{z+礈l=5D{Cq'$Ҩ- AT}?5Bs7)e9t>÷FRooK(:ˀ03ZZam հZ{[TYvZ`n` u"0{/TQ5T/=Bn?✟B@oN?~0a%B^O}4$YV1!5+a'e(*Ud5s ],D *UnGC*SfQXaj~d43<*S@hEBjN^F#˧V{:{9_ы ;œ\5<@aEL#ϧvү۹S >1' ȼ%(Ei^>*Y'xW<ސck7Urc~zǿ]eC]{9]C;; =7BӖ$x"MaAhA!GL&o2ɛL?ڢ{iq:>W3ό#% 5`"r7E29iF#A!4"x\,Lҵ׫'tb(cG& Yg)΅N׌Q#95/gyA}q Ɯ#f9IZX=9|򾬝K0 H*p8kGM' |5]{Oؠ+Q*~Vl!Jy]bND|dָ׶x ;:8J+yO:!J1#{ֻC]kcQ%ޘiXwQK9ta*B1|7wd4}˯9us=(eJb 'V1vbXρ2M LCڝ RX&T+#?&tb(^;1R\YU4 W֨Vj-G`TK:>^vc+gQgtߩ] KAj98p2.6.󁶪VROUt]vvōsbZ~!=oMCc:J%.ļiGF)r2cB62](Rvh RiT]R^ޡBWCy\lݾXk [9\_bĞ\p:z^8☉~BF 7G`~WwpLņbfły|x"FA V=6Ԛ;w,]uý>ޅ!dU>e?e_pxL4#A7_Nԁ4Jp2)q,9< aiX&Tr魽xDtr۝H(wGEoM#-oTe²pjIp0t)8/gnVe (Kn͹n9xk]Y".pU܄^Kv7QD9xFzv\iF)t6ΚMV,(Y:9ǎi*E}67l{6Ѿ$6q`_Ɣw*8mA7fķ},ħ3%D DžVu71%XfTĨ$X%<#y+ڀ XhҊBywK'<O HŸw: '*y;N^~[w-NHb FVtǫ: OFyUvO202?rprW`p%!|r$5.FU31Y"834t د- Y2I,]>^kG0F>pqK>)0Y8I(Oc>UH|vPU:5|mP_5\Xy38@m PTѕ. 僉&6)ׅ [\#BI$R/)c.c쵐!<7:U^=bT6g%@XN)=RqHo9!jR[/.k!.C( wpx.LlUY}nZ*Fݼ:ΈaN61lDi*?rb@~:tQ"f6Um}nWmr4P/Lq3BP m]λ᳷<Ѫ (fi%EC I*OEK SL2$5 ߵw҆Q;APΛL:ΛpCy4g=]$߬yȾWy9Mcag#}e;hu; Tw$KsNFvEW7mhLnY[62gYa]te@])'Q8!a2zԱ3m>F+ԁJk uBe:+"8rq7x㜡$62 ,GBXS/=h{Xh;$gxrPt/$KvkʥݹSojgX?L ?D pd/u0wcܲ,⳦"+4=V51]ZJ.׳w fCr,Vi)i-cqe.cD?6z΂^-z/憉\ ~Ѵ8zmOA}O>0KCTjӪ?N9D +KoWoYf[ nb6Z?k6O>kؚ#=t[owv8; l{ i~# E wj!b&hkؽI6V~j~p$ɯ7_;yk6G4_r{"󨄟XQ ۯɜhC˛Lj ;Ls4z 4_lky8O& UgQ Hk)˰;\0qf{a}-4S+vM!-MQAj(Y132N"[7&).Rֱ,3PFc̤aLzJ opG% /ʯu`>W'Ϳ ftWMbSڣC53huj{߅JkNa"%n/\ ?kC O1ȲP"sjoHYB],15zf^ڝ'}ىv-ekx^伴fW'TqHҼ~6zg>~a_&kԱ#-' :8cNԑ9?MW[?ȥ,T㑖>ܢ ڲm"qʞ<: jF>~LCZ$ٮO <7/bxERxԴ ~^ ]A9ՕVmۼadVݍ7OW@ f7Cz9S'O0/$ f!+ʄ5"c0`>&E0$L85T&V,M҅׵p_=k1̨ o;~9v?sT?Oӆ)}2 O9UolA[TK9k9JasgGuQ7<R1^? z_hVpVSR/Vv}V)fY-%%ߞN3"*ߖG.[v[ kbRglnGo ˈS!H")&KQJ%Ռvatsk-9 )Q/é긶44f'AXם atܭq[yOEYJZV<%m|&$CX8vrp }|.UƺmC.?Hz=>ҫfliH8.ٖJkzծĤ yMһUT>ǑGz͂@~16hhcQ\5Zx%{_1qLW9^k @Cz&`|2d~_ w3/ip$>b $r;ZC-uO~YɗaN%.=wʐES WA= DžbYsݖ%Jc4/ q/fSF>MLu3j wh=:¤e_HXȭ+2.MwIt #n)_%:ot rP]A׿h E}¤9藬3괼=9=L p#2cj=]O^uOS!Z" :Ӗ=PVޑ.Z8`VUwBhܛ.8YТ?Y􈄖!>xCΡ?y.Cg~&p>Ĥ +GVi;oPqG3 EC,sSf]=I1`**e32x?g𱡵#GB)V'ί/8̌yFIv3| A#_u!ͽ۠/ldUy3-wfפ/5oIpT\ZչGkH?*e_0SV8T=> QDx߇=*l܃Тiٿ&<_~Z"VR(nmanqQtgHu@xr~SIf<JW|jiD >V>wx5{Cxfܷ\cI`\/T)4P hO4|ECF31m~NMf=h1aF}o %G4QR QlBN4C)P5 !89O5$q1ynWk9yzi6AmXFEWʫ!ٰԸ_Uxhd}&cbaX#ݐǖ`5>%B ˌud>EZ>8ȤKnKsl.=7;HfVRL$=2P=OqD|(PsNpsIvK^48e8D q9ru;"?愯ˇcXgz#|P' {'tд3>AG;CaVxgKS7( vkchj//,~#WV귇嬡V;{Đ7HfC;pB$>aF%e6Rx& C_>{"u]@Bn3j7^ duP*gB-秧p-]vH Zfq7ajfЏ-jѵQ}| gl%$2=I%N]SAlH}g2R{DX)ly?)`cȎ(nH`yn؅" n?fL8u6O Y^-dBMcK&ZqZ@|ZN,ћ=I(ot 0 8E–_g8rFMҞZ%[1||o׉sï]9PH)lTEY=\fUyw!{FU@s6 cɶ~-ۨV JmS3[9Y۪:$_huNw$~<\U] "'v Ziw.|ʎW2PT Gwd払^]Xôiٞ(ry֯T2zǜCƁpljTs~*N(bV Ĕ E8JgWSghzt.dtHp}8T !SV%sօ)}_뉝_@S:DGnC5D.,-5S4#.uE,+LI3W:S o^-WCv&XXs'ۛ IB"%9*q3\Vj]dU.CC9HU뵯c +1"7IoC\h-+bM.%#ʙ5k--wP}Fad|"lqOBeB4!\u}Q6ә2 SzCRkqR(Lz$Lk=UD\+SbydfӅLRb@xdE̱%se:ыQŻ#-숞SkǕE ^a' צ/oҧ TpusUٌG |\ZT#7:/=-x3-1?YQŹAbF,]T^uрR37Tdˤ'M*gL8fS`C/FV.m2[}}FSzۥu .s4E)[3d]2{FySр$CU }؏Ç;M+c"r Z \06 U E#% oF~xKܜ.|Y\Yܟ,=-̓yBMZ:-)iB64b&GƓgּV>{3W^d2Ns57LϢIky&!SZ=HqTID"fP4 zgFI"Og'pEh朓ӑ_O?BL_R% 5:\vx)s)ex[O2ukUJ.r5J=\ DB`r"Y]vd #"8%Kh6rU>V gs`D|ݒT&oblԇ:y]ѰǻV9xnZT`w`!R/breGh[7e%:<XP g)jCG YkrʞGZ}$㡨?b9."=X$f&}5ϽG^lV ,d'sڌ;]TXE }twB[o vRܰ@sU2KkG%Qm2جVPFYb?j Q; ȑ*(dtTG3|)ߤ%p"~%M<2f">xanTjO*ݒ]C'JFGxHxQZKw1PV?>o[B\X4R{ , =\P& cI=jJ\hz |tV'KI\Tcl5Mmԟn} (1e{åcbmupXIVCE'9|k >Jy0B&iW`SΘF[miЂn5'-SYAgzxFbW󶤭jW[b`"QTߦFЩmm,mbc29J5?9ҕsR\eSmcb)FZ7rtөJîcQUìo_>D BӢo-@D_A>/p^yqG>RwwvwW~sg4&8~\:扟ϊz.QTv Nvjíp0qls `inbSw*b Ƭ/.!1z*,xs^|AWBvDžحVǿұYt.IMRqDr$Mk\"~4JfU/jwuh?o{C*+kF*V:ʼ][ |5՟c蜰(AVqSS(lV(!qgԁHMƐ+m+ئ"~d$kF o=ZUfZ [L%UÇNT\W f;’TNZqp)]2vOAK7p8̦)v!N؉|ـVy;OņYrG<pFQG+ - }W3{@DU>hǦ6fkRQtꮨƙD!i%&>6yZ6 +6Ѹs'l h5 >uX ]rW{9L: T% ]Rhdv  XN+*0ɼQVY1?"ïh72iXkrYM0ShW0|MOupGe ?a vxn]sYj᱋u֟*Z@7jm&iWu^3 t8i评EjtQ Slf8->h@gcSA$j/=X[`&NOVly(UlW$;.D4 ["p<LN4c{6 TCtѩaX1)1'^`ڸ[[!=~Hwp#aE~P_.?p(gzC1M(g&ߐ YWpT&ku)ceDL4?DRM9:j[L1rv*<РtvlAd4ZytRMȽLKOvc p ;zl6{=e36c7A-?&mYfD4o"EXn9 {/@ksn27IL=C`N:WZj.eB(seR1`Zu4hȺ`Llީ1Dc9:֑S7Qm Tg;ANzsO}CC"|tռ2W+CkܨO>)_nLjW(B+dma+PMBYt{_]"i~wmxF7&? JPFtЭV/J(:9mRSI^WFزb*ƣҰ{u\JO:Sk\n7$X=юs1)Ġ~7rZрΡk-]LOg)QZ8ZbSMLQ=)[;UJ@-ڪ&y(/.ư׏!'jGx`azOS{?G0XO_ϸLi_Ardc{d {nn̬C񥔜8u71;pcC2Y U(SK#7e{p2xb@fB|s9ji,tŨDO ~}vyi>o|Ӈ'w_ *rayJꆸBO*oWGNDztGr_kRED4Lq+ Ke2hO"Cj:Xi~9.@mD-[:K9\.3R8F#}k gq7:˲~ot|yvDM_nl7JHέ!416vl@K%r;fSMn͝l'=(֓ *«iERIƻ죥ZIL4XWƍN0>+L 2RgUkoViᲫIL۫^v׎e.'T@iwO NA~}ƫ74Vu&E'DNi[wR?E$z P"lzϡ5"! bM_%  pE?LNޓZ] !3L]'B/KC8o'Q %QG-u_ʬħÏ9,7JxBM\ *9`JPE7=nח9=(mµ*2ۆRK!pHd58:"FY g<;=_ޯѐ}Tctv3J3u[bbZ>ȍ/ɦɾ} ‚OGgiy&s0~s1xc.Vۡ2*_;S&sk`tzPD1U2ߧB?)$ ,Z> " /fyҗ=w۳#wM@rVoOhHse$tr*q|w,jbDV)ޒ6< t֢[ڷ~01Gkݼ,>G /~'[TA[=G& Ip\jj$K{K.Y`P uҌpH:EIj([& rp/fWVH&1ujziGBn읈Lu7Ԣi٦%QBiŻsg15 vs43u[*1){~5y)n9IcDj2 ~Jǯ'=MuJuy #jd-*an.ӷBTG&~CX!p v3QHϮ!XZ^~\ExM[]vP<+=]n/}%P't!<uѹgf)s9>]̭AXu*oyOCc`WNtȍ$6KGt v JVWșrzo|Q=ć޾5Tk[m AwRL]8A˻%[ݵcm搘"T 5ҀFz!&t6;n.)=4 ZpDZkIU=JQm8*!Njd#w=~|Bު#w;α]!9]?o"ɬ6`ѧ >9#wr[ޑVSqI_HhXZ/[ދq{ǂʷZyFfm4Uh>*֣ERPnw.Մ07|nw(S|bZ^S((5ϋ|_Pp}@ptx9+ >|5@DL0eo,%Jͻ0 R˲.i1+ :̘^>b8FA! Zi7!p6]$#ưJ=:IMEN0 ׶>A&#\);׏ ]NGJJ}DtyKaUivy1T׫_ Ȧ6pCy"tFgoSn:ٷWi*uq Զ`XgiC AQ~|(om +Ix8\HV:}8A5R"G~|^`r>`_ټ  `x6ʩ@r/#I@e~30};ʚOCfVk"Iroԝ=nG)-/ӦX8%.X"HBe0k|wwxC 3^G)eoZ؝k4\^MѾXcb|?vz Ȝ ݳa-O2d XY*UQ&7] xD#خD (2C+}(,8( cKъ՚22+pg]kH6ǀ˙I5t}YN2|-Z9//ZWDcZ0$ ̆`+fo#v`L:P؛l"\擎{÷z3P˾IC2oZ@9HEɹ`і EEZ -sOM}eIp[J 5 >%W$Lo/YO!l@īᰯwx^M_=K5ل$$ EKDK3Rseng`<5oFWwiGvXuQEG B ?= g`rno+4:CC)՘EaN|#ގY iF@*c"G2կNnf 0 5 dڧٛh? G] \D663>MA=y<fJ \ q(%jhd,z#XT1d(*P>h{Z H+ps^(Z/mB.(^ȧ* ІF*]8bbFUHdT% w93W^`Cu%2Z;{V5'r:]II A|3u,1H*gph!,R}j56"ƨnRQGg4M]=€]]2j.Pa<Ӫj':;_"?( Z;>VdzVˡSfCkUhGv9gFhw7v!P8&w$%8㵒*iRմ٬09 ߰H`[#|,bSAJ8h-浆+_g'VD??+ ;aYh#wXoVƍW{Lgگ(8dMHަd{A3pZqƐ}<޼~@+5.<^%g"0n@9p?X GġsKe(d:Pk\QAr2IҒ 55Y n}T &w~,AcOl)$7i~]c>05WejDۛOほK}__K ur?&\'lH] m=l`FD6` :55_R鲔1XhkavJ u "n%@ӔU*kݭ*(|cB %6?Z+Ԉ`XKZ8gz,aO0/B/^V6-v,0p: ;zۗ5L39ta }vvuC u- SY/-T 6ݨ,@[EZN_@#Q㤘H-|裪v MQE܌:\dt%dyef\-en\ܼ3+<~0LӅݏ@ꌋЦ툖c{wʼ7< :؀n:ek*%w p Vt0pqh>8xVH2[yKӛV9y{_TUV]Go4cI!ش!+l ]7p~x0q45а/;v8m([i,gCܽ;7L'VX_r%{=7+al!:?%ڳ_Gr`ԡe^pZ̑ A{:1z;u.j|%`q"`Ž7lDr| Ołژ0j%j^n 3F1}+1Ȉ@7™ . fÊja]Ao<)#1qOYHBP"S!G%O N# ;!S}Sp] ute^ y+/UΞYT?<)Br@X^]e TT2/S ڧ>VCJfsbɨS,oMvT3ڛQ<=dqZx*q3F7~.;/Vhlfal_%v4e Bܼ"(QwP죘 c85s!ɷwmdPsm V3 K3t wHma&^ Fo&z!mŸyV%u7֡8 gDܸbqodWliz|^xW<(H-7e&,y%2OFqUAHC9Bmk\eP-pÃZ*X8KNjb\Iu8j3K'^X!J-Us`]Y f PZme?)g$߇B%%sU=5]'F9ЋF@ZQIGGLF.N$U:d`"y +sኋWlA.h>!!cyDEnH y|P._뉶ǰ g|lYٮ^fԷ#z !:Iy0b7r*^XV^+`D]+]P£!Q%S4u dzNT^wK] :NUBתOS`SLFuq9Tʒ% k&r>k{&Pt2yP`Q3bý1ze Kn+P~!EPw:VKzͱ !T0FjZ:nW#$Uz䴶7*_~s^X|mp`.Yc4@3.wIzo-yHZ1m#Oe()RǷvZc`[vVmAVtfDaoټ6p֢ac|z5}&P@}NO)ȗve:bS8EGȕe=g^0,R9`ÖŰ;樯SѰ@lFn|X/5D }q;jQ6c]-Úvmqu(LV 5H"JC{!~q PbJ}`0w(nR2R V`CAOI9w+8}׬k5JgkeU,YSߣh'7_>SD$ZT!nt/" ַOFXx8Eu9ycꅋk8战\CU{bL#P*+Ÿ:Օ-Z{At`Œa ֐1D^?l+4Ϥ"zٗbAHhr}6SԊfX-aP%6'伜Dgu9qke8lJHAS%o#=X'>HQn-@|?(gi"ϫZԾYƮ`|LwU~ Nqgl(:{s`aA}<ԧя}<$*~Se~vS-#ł\ ?AٮW˾ft#J'ESR5OO^&ņY05zeji5$E2՚ .3 w͞uxsoN*evu_򼭀w9&xI FmTJ4pf87: -`ԙ лiV]{M+X?^J?3E3j/0<ws?z6ծ!}̂A no,ſ/'p!61#a-éACg[|J$=[}n@;/lq:U#3d/:|Φ5SؼRҷ^kӚhy"pD&iGꪽ ϲ()(%o@XDQA6WN9La Zmթ)6v`\ ǜf0OXBrh@5%CB 6<3W+|oM0!4 GpQxrYw61XҢx+9v )"y` }\5¬zM-KX?ۚhw{224ob~Ѯu=Vɝ ]aVl>;+dcÓ:tx>EH&ˌ"i.d$)'J3:wiFl+Wbz{o;c151^Moh  ̮E@Y)U9]ǮfbB_K]x:XIv,Jy:TNT2ɄØhKN\ilA@ء%5> EW-}pw]{Vi0uOM8w9=܈mbT3zEYե[%qOym4q&hľd~ 2>:O~$?^'pu0$gEW͌3^$5׻D=K ST(262ṈU:SqUG$U$~SqEU 炕[i]|w0`/kN!$=hȊ#sXऺؕmI$LIC V@ βс!S a^t'o17&M8e[~̸9x=^1:6t`@ڈO J;;yIQ uЊh~j) }8EaY;62n֣ %2خx?.o]6kt[Nv(8&h({BZhgg~sECϺ.NE먏YLXuI*=Lkl)XIC[w Z ~Ct8!="Hpd1PJ*x0[v;`^bnfFdq.2'J&<#EEdYG ^$ ԕ!~Ԁ5L ^$xT!~)ƘP*;]Bgݱg!疃kp+PDXLh[щ$g~&Ў`:d*D#Ǻp:sX&ULZ/Ii1,L'a_wdc%fh~+ OTnaT$@ 38*h"Maf\ho|CmݱyLBӊv(BUkXnW1!7/>ϬM)fbyK#@|mBg(hjoR}7/\OjB2HfTJeʗ՛D.9Ev23նEJ'|6zǨ~4/O!fWuh Rߡ=nrNdJEcIr-cޝB?۸DG%ɢ}pQOUQ?/ѴA_Shcoݤ8fov׿I9o@P,*3u1ezUԿMZ@[帪/j&bJ&Td:FP&T";[*ET64> Yt'@>:CK]AY@j1\L2=lP*%@u}03mk + m5mTOR^ՙ309Կ8DZIއނCU^} G B C/Xr̹Rm|Ic$mw(o])}RV#W (rQ3^M 0F_L|ŢYIQw(쓼?. FTmtoj*cu{raD:YOUDICdYx'@aկh'- n%1*TU,_0$|$U)0Y&8 d qo2UΟ+laY"Kb?#w%BV1YM\{뻅h7ԒO 'Ǜ:voXToZ,̧١("-(k-(8k%鉾Yp^;K%J()2kSkd=@4հ)mij5][v:C̭ÏmAF>,Z$Rѐ/tgp@NۥkBub5raI]|m$Cwj m׬7h!hhmҦפ(:A .ğ?m6l*3@m7mlQ-0S×Ƣ oN+jۃkp_M1TNgB13猸b0G drzrLBv=yԨ",4%@ t/ j\8tpY~&ݖiINՓRKR{=g|UR hTe|`4;rI"66A'6Y cyȿ ?͗Z"x Ax]jWNr,sD EK֢u9f0HWw7$`It jUnbu=wU%ݪaO'u# J1"S &\\!Rr.I[_~;R/9ύS^[]ϵ՝⭈wYx$#'HAގ'-9Z]xƘ&}gztkjb!,>I'VO`cM7a<΍ ^6}.B``䵞xEB[(bX+!8-C1.9հ)s8SwweHw9)Ttxv` endstream endobj 563 0 obj << /Length1 1373 /Length2 6101 /Length3 0 /Length 7047 /Filter /FlateDecode >> stream xڍvT.!RCҍtH# 3 tH4ҍt !%J+]sֽܻk֚ywyװ3)!l*8O( x/@!BvvC(솄" BuJ Mh;b@ @#M8IuUupr%$x``7- `-`QH% A\$<==A0$?A EA`$l=. 3?!;E6@أ2MPWUsm_ _멬XtU4[DU ;7NwQbE |)Z+/{0 ן@Oܽ0yݣ4FBA伛8磣QCQ%0u_ "zY<lu&gG:pk5Q?:FQQanTxu+Jb⤑DIFtewhay- kHRCN9?x;9ڏ(g ~%~ׂ+H{.evb?( :zyLWl]@:csUY ?]r o/pp 4O6Ȳ/V|g97"{mF^}}9!D S:X76ODI3FSY)g)UIL<ߙ$ZWSw8˼oTУ?=~7dp|zv6U_o\Kg쮭9"/!xxZ2%:R 4VME=Smi-Kdc`0C̑R5|JONdr}s/)߀4cFqLMB `roҡ[ T k5!wFNxVfy8ZUIpN5b[%|W54 C:λ O\%Fમ0b}'޹]c;+[?=)yjio[/n!]7n=b;I ,wiYޘvzDajrW19Òi=v>P>D{y;z;SY 9.X=zܢ2 _h) ˸H=a$>N3+a e#QX1w_4XZƹFjD?{tyRvnk#Am#+bcu'^gM(iTUHipT* 7^E@]rSrݵ7CYe*0nK;%d?]yS2G彚'4Y>ء2!QGbɼ .HDi쯡>e8K=)sXW2\-70bԾuWMҲY 1OEȊ̘P b i7,[in2Il3(=vaP@`Rܕ4VUz{Ma_V<[IBx]e#h:@f̞y6VI%ݡپ5\:qB>^ބSh<:Me*/hH&75uGd#v|T(lŋIQbiLQrLڟ<՗Գ:{Qx9yn }_=A'i~sHX=#yUľ / Ԧ7ꫝ~E%9,ܻA Ӊ޿`X#I/e#qF\_:y]X)Q$9I|jX/J}0+?3(9k0 "~'+e2-O~cSS4)ג,Md'V ?,*F->W٢~Qt;*0te W.p֟.\V *h<XDEF\PʏrsTZkq#n)޲fI ǻzм3 4e5߁i mm| .UAzƖ{2r>)D{S5Z8&h"G̉էBd3|lIϞO-Ѽ['R ?5AX&4MZ<5tpʺlD4ʂލoq2V?̐.joXZ5mدN(8eu~)C/p BtvsPpEKbf>fb0DU7g ?e1BDywa˟l_ kĦUM+Ip_D!%\PqVOqT{to]S{sQ^,0x=Vezsw= E CMr :a5d8Ě;luΜpRoN]qKjrגt|R%Cul8cڹ~m8i"dQݧRG2xM٤nfx~_ltw{G}t=9\S8m.V597n?59w rvfN̠,w+]][̫*(G cwiM =2۾L\ʢk]:ɋ  InZx~iG rʔd˵?edPjPNWyL1C65q?RY噵"K!"jLd ,6TیPȲ4:Vd?50>dN CXzZD!{횣a䷧|jپf]q1]јE!ZKxLef(Dc's X-|#e f%-4273fka>i|Κ{¼%k(J8Z[#$:g} AK}UKNSKS^UTUc'q.fH~Řcؚ-rS ^RmI5ޭ 0F)~mLW!=8Uom>r+ZI2'i<̅ܙf&iVZHd^.l┼~6Vk})s.$pz/%y[#KIQ6JTo bb| endstream endobj 565 0 obj << /Length1 1511 /Length2 7479 /Length3 0 /Length 8495 /Filter /FlateDecode >> stream xڍT6LH(0H %% 030 )-]4HHt"4ߨsoZ3~v;5@z-]Nik%DCrpD: %HGhAB0" `$ #Qfp@  Qj PaW, Cnb v V`@8n;tVP_!XHgQnn.+a+":Wb U.@]gum`V+ f APwtc?Z;_޿AaVVp'g0 @!M5.'Y2;Q`w0l28 :W+ uU!0&ìeNNҕW~rP u/cu=`>6PͯݜaP7_(@ !a\[ F wؠJAm W;DA|[o` B,!P?Q0揌< 0}2CqspTuJF 8y@>(Z`_YU"Eu? 5}Xpc!n Yxiݿ_(9:ֲR`'_z_ݐ(Q_SCȟuUXCݜWv@fw POie*p_ A_ jP+UJyv ؋5b$A57\08@#~$p@Q$Ep7?o_ /u7+fyH;|@(OLa7WVnjSՏȿ_bE0? m;~Qbn3hw$Ma ^EJ ["rdgyk۟W :o (F ?{{kقީuq&'=U_!|f]{ZPl3V?4d g=MɀcI2*f}^">MS|rJqd^9G|1pTL9:fU)ѩ>?`IeSP+f"VQb~ÒųՍ񆏜[D|%H1Eu1CwE)?iJi]v\s?4F0!sONbm3j7 X}c8I'ݟz|=ب GVfQf%&lO ܽz%mjf5 GW >K?Kz+Æт2nuCQBw&T("x;l+e_AANZb!ORY_'FϧIK\h0Ns$+:xj|-Z/>eTBf۔ R]gzO.\Wsuhxi^)W(w#k= ̻L P4&J4=2*oTbVIn H-`aEZ6t7CRZx:=7$SL7hf_t>7?C4?+ Uy&7b1'TAw2lDWj9HzuJ*}r0"!8xzhPG[8Xw5Ғ» X|elz/_z'kkg˭f:^r{vjӬ[ނHo[457SoPeOa7yW#>6ǐi;A: ,ew?B^og \*EE{|}{3[VM[-@Og"9$x;jKXȔ8b$'Žcq6$c05!p7TzTވ|[_#=v-/H)@ݗRe{k؏` l%KXꨕq֎&UvId^ORGr,߇0&E]!Ta:υ>\[(-we5 λ2?&_@?$$_-ULNNLǓ&0l҃6oHqJi  -k2 %h+n.evkHjZ)`mz1;>mGco`K-@ [+KLuO63]4.=Yk=5'mh,y38\}C)sj+ifʯWMup6܉r\q]pFn|o1} VVe@Gg+ YmAaͷi߈l3ܙVkXv~J`P|(m~2bLMAjg o@JAPj:qtZڝT~#ծ*1VōS^Z!$$ܽ|_RhGccSpwl"a3UP löpPg)$OX]riS8k~欵}8.-ʅ?={gmpH=@pgΑ!3ځ4z{4II9!@ˋܣd{IGOTEs` FzS:5Aw mYLc;X|[2|3^( SNkqpy{l6~͜#(tK+H`@ut{5\W%?XQBE;WOWf>46z7uxZ Ao}W~oZУH*>PwH8v|#cМ -&3%_:2Ώ,Ed^sP* t,%F>ۃqn%KamT ƿLrǥVkQ\v~:II-1Il$2eb\b3V_f"cN>jK.6A.gO&fʳJ/ѩ1EfS.3lm@2ۑl>F%|6#V:ľ~%T:5NPaVO^m n>vA`+ $m|Nx2RQW-L4K*~+I6A/DsG=X!|'cۮMY~x80C\*1Yb3:0_$FM'܊[eRpȺazH)#or )=!QZ&hܱuyMUk M9DCaJlk3`u{M+?%Yal_Yr9ldެ 2/~bW@)z 8F>oяKhN;nL(s-ZeQO*/Eo[A5~Ǯ 77%DyK6>PԷ\O+~;!/t/17l%"lSF Z,J ;o/PhniV'$S.k гQ?}D}CQgqEWWE҄@Sbh`A}]vCZ]j-'>-'$ɺc%z;y^'tęG.=8m (=ݯP#cYg>蓓%.o:#-du女fI]'hf)rtD;׽+׏Y3Nq*_z>5`M,bt[]5˙~t{BĂV;|!h?pq\F8UIMl-Wt=]:ks_9tM̲Sslͼ$>g&Ubz_+C}1:NU~/GD˾,:و,/ A#"-I.a\ *gUSPZOphe#k%VR=噆}Q\674bvvKByŪe:.aϤK\eq2'84€MIUUtXװ{`22>}B47'D|/} "bq 2zlnH{plptM ՘㦞I*y L*sY~QC@E 9/5B{u"[;!7~!O'BwTx׊.i`zqyR͡oāԃČqDɒ+<[ߺ#ͿTTZ7#))1mm:y"^o@YcH8T+xfͭ0άpl >p6/] F_zFVȐFg,Q1P5ť][rW^K,El]?%;+WDܥjlyӣӛYm#X6o:)Î_rbGBYGK:L*iʵuD-Z3 W%Xr|V^=pM,Rcl$ h@qg& %&,_7Xj IїtulFbSw›U:Jy10f^w7M#;N)y&\'p132jT"}.Q'UF7xn)SM%b+t(ĊZ3Ah˭1ǭڳդv?sMi uЅX=֏ A[sx |f9)ERo#Оt ZUM-5X>κ D%m,Fedz4Q/nYQ HDOӎn?# l:݅;t }V)l hBp,$>xYS YWCz-HHz4j0]'ϟ;-]Bd+bFiٸYb'#sI:f^ڏw)Lm}YIk$Yb>s+Hݕn6t˞q*/tYN7W+$w7!4M*AaW2_2*_@˾^8:5B0rđv K'~)W*,W K# "3啕ؤ%S9C/,+qmkv,]dpʑJ;d:(ntV3:)+k\aS_sEy@zwnN|m0QKp 犂9ka S }sYgϊT=[1rєsU 9Ơzw1+JD·au$@]^g mD8|*2&m3Aj{ў\a֜\IKֲZ#maJk;UϼT^d7RYZi!|e => WV yW\ol=}Sabzщ;i/8z $o;j WyPsN%Y~;*؜LlO(&رyEϯD|2ajc̛7DY%YT`#@nLc;W4²[%?V{mߩ/y3pn7z:xtg}ph&}`POyP >ˈ/yR'ʶz~FsP--5i}uSsG ǷW Vo3 >`E!JS WY} B?L)O '2M`պOiBT=1uEOG> _%J0r=?DfWe2 =\}A:Y/gQOFb_/unJ-Mdyk\\пstJ>"~F|jOHLQ7q׺anu؁ZDV ^ TU $Þg_px`Evn5g/b~5 EZ! m$;2^ؖ[O rЇ8~)qsf+hdQnCԀ'cB?U:İ[ ."AsB3߁ rTSw>Ȳ?lI7x yD.H[m2]9wfo ۻ[4nG&9O _P1t~`-J#6?ra endstream endobj 567 0 obj << /Length1 1651 /Length2 8628 /Length3 0 /Length 9714 /Filter /FlateDecode >> stream xڍT6LI CJ R 0 1t %JJw HKt|9soZ3~]Ŧg(( (AXm E`apH^0( )AQh5m$!bqI0( KG% TB@ $ RDzxQ(9yy q?́0/=ԆaP7!C3 !  Aݽ^N2@?8hy@a&9ý (? 07c յ0ğZ* D򷻿;#0#=8 wuUP( [DC}p7Zġ@y} ]_y{=PBp~A_2ACS{ѷ"ϳ#1SWK `(( a@3s$7?$tD ;? o/ 7o (  ѝу>Ygp Gt (oJA  0P}=(,ت#@?E<-/߾t艅ypK(?Lˀo>*>nn< uţ}m$zj s]apeQP#D G;9*ƿ !  UG=P0;2{EŀP//hb$ AB$ mDtDz~wLo/.AD7:"@#뿜UŁ {@etJ s'=_׿O4= {y|Fo21 _@!q y }Xto#`0{)'.UO+ׇǹM_ }j$K-u&I6s*7z/1Y*&`l 0=Ji|u3FgcW6 O$zy~ݪ=%Sb%_,NpڽcE 2QNSdܲj$BvEY, ? o$NIoAnj}J18A5BOA\;j T<~l6ۆ6jtLۍGeXGdAS%Umo?fPńip1HKMa=z6*3êzBV "f'V!rga˅?bgwd4D)ez 4Kk2X[7ii1G:cmybr $oȆe ꏶgaOFȞ OF'f%M>clgf. wJۈM݋P:I0 &LM}dnlMOd\Ve >o'_>oǫtA[Uztb5`pͶ=CԘEAYl{}^Q4nc 6~%ig >; t.~eA-ӎg6p1^o?Lߝ]7ʸ-T\zQ-M3jNk3W|zz~WTZ˴gV/l1-^夌bgIێND_v2ti쓱{M tJ՞0ώN7LՂ9E{@ӹ4y|"岗 ~˛9z5VQpAȥKֽ SP>Ǎ0Ohns}̉đ(axp#^ xtȜu^ܔbR&֚nجÁ& ?޲+OJvѳ]ݪBe6Қ;,Nh]=EUi\%DobK1oL8vQR}ǯ]R#.hizU*ДT>2w׹G躸CG&־ +n˞XTƈna}\8B0,"He.A6ug%JBi1ut-1V{.7MM&.@mǧA{{/gLv}hƛ7ÚQiJ-i-ZYEUP1x#3ɔz]";r3BB t7Bq{GÞ%D3{sJ;V(&yZ][>%YU$OTu v,^ӧˣ GwFg+JL)pMsFDr<3MZ7k=41:H*@|n h0H8S2:EqzTU{I?tB҈ډG'3"h7.۫EҬKQI 31K,AgզWE7[Y1;U.+}SAC)R7KRF[Hu fa/ȏcd>?Y(㽁w{\a.WtW$zYEV|Kr0fxrTnt…~~sA4yɦ_ ꧧr0I7{51 jM51s[5 Nr 94=$ڹW``,;j8cfj4+>i`L .a<[L:AXϗud!r.-b0m(h{~3|$qZ*em%~릋cvxQ(|l[4xg&1#tp%‹99Nla;jVS֓*CWֻ VŅ9"?ES*4VfME~$&3$:kg,V-"g)3jMgвo3Re;F Q*iS̋tÞTMΆջ2 n |5( pCkUt + 0:TQ4B8Ӵ6׺檶u߫"(i*%Q˓HS-ݏ;>iW+&HReHc$Kd t>`[Kx[|Ryd=!,!1ϤY|?9OM_El=Ih#&9۱eԶW_Z{Ko߳oHB*-ÏgǨhA$śó[5]Z%ObfWe*f_v pP֘.l~j7JI ~I Ǟǿlx{1Sv:2RgO \^ǰ'k p(10uS_6TnTj]ɫ6tvIkcpTmofϐXfu"~](yM ZE_ @5 } n@J'+7tFiV^W@Mz뚂(}\h" <7(b 'Q{rզ?]cՖRU9w4uN$h?&fW="Ҝ˻j:CJ2`jq(ǞI,j9UA[HV[́VYf^lQթwoZ%zwi̢FͧVCEM:~u7@#-G)?XMen9mByߨ@@n~7N=RS A%qop$'N)Y$ ~ 9]{J! cɑ,TI6˽vQV̆tC/*yj$1Ŭv+bM/Q$rFT$mrqtOǔߙhwO`st}baQjoCkB[ nܷÇ{K"q"vB3v?/ ѩ:-1+ XV"Fapr{[<ت=s9upy㣼l-kO/fF]}}cS;NMCc[(18AI/QMH|7 f"6$A8Zsu1.GQv zD D҆+U7&](Q1 @: e25Q_2{@VQЮ.6(.l\B_k3TOVU4^HϤ* = Fm7=.ɓ~g_p VeQ i!DUqr]p)8 [ #?E?mțYʽĭHʢGo'Iʘ'lw:fj\*gհQ~{tѸgߞzQ^qe\Um`<((=̥gtpVesjFz|Ƒ㉣ TTIXa)&(%*ci/N"N plcM .&aΰ4Zk|t/vЪ|/-87r5bFƛ@u4ۡE GH\UquܠVd /Kkc] 7b\l_ l Sٕ4;opT1]6"[BAu" Ġ%HAk3:MAq|SW9w+ Ư.g vN˞JGsto(y|H7b2q5F`Ov݃r)9 ~=cI K!I0?'5 Z߾md/ CsܺᲈgC.}gzM o%St%l[Ul Z >|ʉAVتbwp즭/8{>[QqїTL; F&/phՊgi=4B U"EOoyDٝ85|Z z6Lq(7LEwb'pݭr>~RVx||x-稓D+lSAAWދpe|۫(}`1U?+[AĠa)"wްG|JͮWۊ:P pŠ)C"bQnEXcMb|<掍7 jgX&*h ) k{[G_f rR]{=\7p Hb[MM.r^p w@9_rnߗʋ\,ɯ|ڊRv~!7uggG_kVG IcdpEQ҃|@ !IዓC[π*e~*e-uS/f(T^efteeZtFj?a_ * x4g-Մ]OyPcLufVi3/93ILbvp&G!C:kU瓤PIaͣ@zhdo9z?3ưa|l ~*Q ɪ<8}[(Uf]g CqcK}'_δ=?KFj[4+PĀֺf=Iu=?6WCk<(Sq>d>\/ 3m Yt~5($LUGƾfjgJ6a[7}ßLrսP2ҋN)Һt`u9|)Wz:)]; Ma|Y Ũ\|X[-dhO=xǿZ.Gh4pISaAhH00OE{'M^ NImOm>`,qq`܏Sgy *k?Dԭ]Ek-5>$ھ2>͒; \mT IXdį' E?/f1c{a>AqIV_2f293}N[k0m18AI3=s b'UKDvi״{iNs.i7zF~2PPq?Qw׫= w+yΛ,g&uUJGl*=A|sw' qUB0֬~\g׏)RIHD-ŋqT~%}OvDw0};_E vbЭ@Fk[^bݚ=g>e'X4V o$mpu^jAP%C^ f݌f~:+ntL|ev (3:?Qp 8DX+f`Ksk1qGҐ|nۓA]w ƺ[ Ub9e.C3$A|/ XJk*C~MUOy<d53>׌̋~N|5I[/x 0ƇFspޚ8%nG jKcMG6sQJ3@5oIO"0nf~8PT/=S`01/M;_UO3q+xRR.gL2W&gR},+sxDaAE Y&oB-9ɚشG[¦ G*.nυ4sH}!ݡriejv.L27 nmK]_lmjijDY`IYd_Z꽃f(~;;JM~Ll>BJXvR4B*ֹw U?\<`if}",A"0Q7$GA֚Acbu!qTwna2`&jQQe'&ߞw.a9 s8{B"lUJe"ݸ*+Be.T@WoGz.S]"O/l^̓ k]Sh^}g2SX 6z=ħf"<܂z|w DcMGξ.2 5[>-$Lsgwh%ۣλQڟ3+0(#+ֆB=h%;;xd#η;R7Sh <\o< |OsKR$1?O~`V˭N~fG?I;KqjdG2:}gMEGr/9@Ol-<4yӺzNr:5۴֞>\-] hL axkKD\NJȴ endstream endobj 569 0 obj << /Length1 1935 /Length2 13742 /Length3 0 /Length 14933 /Filter /FlateDecode >> stream xڍP !h]w'Xpw܂wwww9gfUVWu뱽ϳjR"eZA;#3-#@XVLKJbl Ktt_ha1tHXl\\ &9rD ]-Lt);[,*}PS99iL- m@ v@g)AclEOFghDghGIpp6(@rr6?T-2+ۙ::k cG 6@YR o+XHwdQdCcc;{C[ [35 /&CL05###O1AEᇾs2vwvsC!e>6YDh? GǮ{yVvn=Zؚ!Ş^()?633 tݍ(a=Cd 8;=_00v,laa?Nx?>?~?."v ٹ&WYJ:~̀ߛh$f4Qp66U1`@;'?.-#}LǥяCKۙ1]LlCGGC؏#@OƏ14z:[; ~0Ok8~q?@;viΘ;в&J׍vowtO=sɱ :2N0iyuGV`:%AE?Nijvq"D׫x3hi B.ڃ[{m_h~%4K4mj_,9lb(gZ|*KwۻRq԰ާQZLяs_U:?~::E)t,Y\ڿS@&e)B)¶utnN m7]ߑXU?͹ ThN2os0NP6 {XY\4ֳgWIS:+WQu5"u;U]"ibŰ)GSقH~eG,,0>+!)SDr_qFqx?bfA7[moRIly魍Lm}v&p }~G442N⳺A! $fwFBUwUlEX%t:0,<܎%6xy@ۓ4_~ں6w |7"mBzbS+j/q3"%_SbxAi>`f,&RJ/~kv,q!Ƴkɷ>!= !p[UsiN zvIt:Ws8rR/ elM9؂0Q lrGm .sry,fzi%W2鉓;aX"?ۓ^XA_]SS90Rf"v{TM'$0p |#tQbJ Z.wV*瞇 ȗ&0a™"4K=`YWH"5DFo%t mb)I~Pڣ XqRE:64VlsmH1]>Cg@ *hlU!f/}#pv-Ej}AlSIO\n4}o*FᔒˈV&8,iZ^1Cxf/bt]SX~D0ifuXYdHkmoyA = hNItCKL[R L+ )'"$ 5tmE큝ҼccCKHzz߈8{A#KUϮ:uR[6|t_UF\NIjwxSM BdǷL7n%a=1A[NZZyH=DKou'2*_tƼo!`$]@|D^>9S_ 8)?TH5FgRmᕚ|gm~ͭ:v<  5;u_NL@8 -A-ymH< SI3H,,:qqԂ`H2G?%v 7;Rcq2 IbE~g~} Ow*X}3>FsŒPV^@{/N<z_fngrEoox@{^h a0)qH]H[J v8a]y1(R_?ѕSύIL"*20 J(rb'DY=(^9xpm"@,ʊSV䧇"x*]$`4R^>[đs=5vffgP, @8$2;6l{NFR_MqV9Sň5Dl^nȷޟ7H{PKD5R(`n&2gNCCܴU>p[ 8!"h.߁o7t=\Xi+؞@Q\Zbޜy,1I?` l\L͠bk+3nTO[>smrU Qzݵ=H)G~^,Gy\d.[SՍpՄcӥ܅6QDIן6=gӂ!D O6l?wS>mWn$m0Ɵn.8#`64b\,T )WHF]` dj>£ȫ5ט|Diڙykd;o:$Ef/_PYǿCLp|Su/JS*7D)6$鄈CKH𫏗& ד=D@#Ku` cp;Os>(cSN snTFQ ߾g%ː3>woUq{؋7D vkPۇYO gX#~~6YWr;6vqU9! $1'_n, ![[ٵط,`&<uz/@sgDw,f7UÛ>(wbBÌTJ&4nI9jE69u1>mׂSog^XQ @hf\]RD^|R-oJ!={c)-c'7Eu5QmB%|,7pTY|'1 .8,Ph{"k6{ X%)2颽"@أ$&y@2Y3q>$MÐlL6q=F-jhL̂; #eƚ2ޙFQ7,\K_x/_M$Xs(3ۢi觗Ybԗ#$&^}d`vಀnH1)ӞA 5 #!jYD+HHV#ku&L#>=+ɔLUɳV6c^Z(E#oJOhuGh- k`a(uN=[^61B%'XͶnh'°ҺVL۬BO<"@ <q/ E Хh񾢑 Ş JjN:c1ЙG3DTV:7yT2CMIJD^ 5ڹx)0zc&SA%Z3<~2qE\No&@bڬ|+@MGouN”Pc_v}Ngme#"(E+ I%i\]X@H SQ->2-|/R8iy.V98{F(2QJl2) V{}$~< @/6?#Ў(YI|go "FS['?(c6~gd W8dIh)R'[]O!a]IoBˎ'g-~a\+GBu 8psrtf< W{ȷ(SoHQSW;-p1ִ"iA3ɋQvbT(5A( ޥa @ [~F= ʪu֋-t* > H%;"i'|>n,%/K"A5{t8di$GWvY)JQ2僙Z{ ONt4 R׹=E8)j*jzn0^QNo |%'A&jmW1OtH?gEpwͱ1"Q/X#X)0fYZ=1!'ε5 o?D&%]z*:}2bu_V3ꌂ΀7sgՠoh]]-9 P)k.|R0[ qxVڻZlgDGP U s7t~PԃXdHq:SYF?r 0 {Mcڬcl[g`EEQfO!xvAѷGq19< lUpw+µ6JQQ,4Yy0Xv{O"~|Qj%]Jl!?%ר,-" dѷ袹6@NX6=jNEw} TŽNlHӬMׂ9^"^ o^UL ,{#CI)ֻ Q+YQ qR YrO]l]رػOmm5OZ8Ɋ@񿯙v(KH]X: #$77>A_fI}휀:jx{ʓqo&MGW,(tAU2;B)}~ D鎘S+X5k 􂓤p[!Cc']g6OclvFՠVvH"[OFG2VX=ҲxA<6͒78+GkOb]]cSW"hAnaŒg[9ux,IJ#y_a¡oetzCk 3boy noU٬.ß˖8Vwx"KJ^5jɁcG)~HC>'n.QYX/ &nӄ=oAtg)N.9?u#|NB1^]kW,̼DB/qۤ1`O) 4I9_h[҂EV6 ǃ :̒dAĔk%vJ~8uKv(ނo-;*!m.>\8N켴≄Ȉ-;HǹST:“mÂ]uATz'?-1[R3e/sO(<'e[C]ZYghjTs !NIYP-y;R'Tڞ9 ^;ꕌTnAjya#g{~*`p=< nH ϽX^iaoU:?勢l Df.wGLl8 :I 3\[ߣ,FXvAq ӵ!ivuV8[YmjZ#?M'̮G _EʊlӇKcmJTML]w*[6kuAJ8E'7S68TE ez%(%^,{I76Na[O5 bH:4$XƯ1 oZY`^nu(/ V zQD@,/c"rJ SDڱl8Ui7Q7dr^q1b_L3S5j%jf% V!}TMaXnIP)‹U UCo-BΙta @+x} /O=a,p4En86"vKZRoQG6gWU*"]02р3ig\Hv@,CwhU%ox`jJzv +m MC>}p'!:樐Z.&ySI*?C/w<ڵ_ymc\1 &|0T/!;>QWID|cXF d6:{-2-0t=*k|Ԣ!7)Ca#<݉o]{ 1d=t 0N\6[sV0ȯ"*#Ef<2GȮ5t(7mg&KX+PS^OB^f E!Uwlvl7lHFIjZyLSmmhjNꪯ08Ye$uv4eu-HH3+NX!=}ۙ*Un KQ!VnSMuCCL; .>->ghfu`ՃSbPZ<8*E+d;!:U4!T{%[?q&xg;!wkorfDM mn U0V#(!+RDDqG2zp/z+CD4>zU?y t(on07q)AF[fvZW`/jMP5 =&TmAG~ǁ-T+Y sODf#RGo^WJLjFQJvqktM_9Wl=jVl|s%Yc]@RD+%WE. 籜GlToB7v|^ i9ost}}~ ioel5./BZ:!2UVAXMH ]OIE9ײGgf|,TWDzfϕ֗NH`u0ź!aP&%95%zy8W8 ݼ4 |~%_(aۨOU/'iCEºOGgl.h"RvjdڬǃRHsT>a)j%wXT4?곏^za؋R445M:^Fǿ;`lTQ9 4Q{0|>!ztO f}{5An#l%$cg|%V"uZrǸW)‹6}fϰhXG4Br$8+[؜^(*m#758B>xvdDMK(V1ҷ/6Q['}%w̸AZ#L&͡7wf)VW.L3`cӯ52[[ʕe@,+8c5ƭWYfMV>~r#bd3Ɖ!ۡϚXR@zH]\đ Kn)Hf❀byLy06P492#gZ@Lj n2>^[\"sZ o7*J•/߲]4H?yTZS!(D&0A*p!,-g5d0NWy_"FW Dp:1@!p|==z ZХtͻpfBAsֺ~yZKQd4ekq|kPA$ґj9W$=Rh 3+ Zu`_>Z/X/|!޼gG]%P&J;C֌BS :{n cIKL 5 lf<" S+x61/2Tfbrf3x$$uD~P\Y~^;llGz?Db9\_Z]^zj;xt(QvA&W)#CZ ȿsWJ\GlOji]rdnJɲ|;"휳-U,m5 Hʼ\D/=~xCW|f6+^:SsSkGP>FaB|ۜ'4KI2-">xӓ\ ^{fz x8), ̊orh# F GV5bITҢ9gn,`W㯰%hўOfqselUb, عH}LJtyZCAsȟUI/qhf!M4=)x9& #44^J7BZ[I͟itnC**+Iegw۩;Zɳc3|VdJɱޑ5ԻPnһpgMaYEȠ$$Ïqi\l%Vcf *NY.CDmמTLj@ܧְix̹)DL dUiYHjZLQ e l:3M<+-8=wև-?x%o!qZNQ!k 0@JdJyMo rT7&NiO3 q%S=Jy)x;vnUw%xDK/LP*%d\*42l=ɓηUe)Ѝ^<խȵ(eϋGbi^(3OmªtyKp_,Zկ 7h.w(n7!uzkd`'PQ9426sd$S>id:_$D>"^#+l .E3AYP,GvjGZKp*i65[6^Ko)q:Q>>9 D/@8'Χ#/Z,6{X.z_Yrx#QdZy!lzUEyB(YZX/q8ٌ2b6p,T57aᲺqQ"(MI깎N'M svEH)[$|uA;|k\ ua޹mtC݂K"ߨNb9ٔf$` )` bMvMx KOE_)˜z;"&bkdYa^C“}QJтo'oLڵ [ F_02 Fx,Nji|6WJ:"Mmvov f?Iv/3vI ߑUw}JϤM-F]}V.ȏ5Kslg>*Ɗ)JIq b߸V|NdkFaX gQN|E^*=ESS8h۴3 yY*:Hp}&-C f3FlˍM*) +->NMfC$XKTy\6CI>__tɫou,Z2xꕧ6IJ~OFy_hLX%{ƧD]G*F23ahY;ÈR]\Y ~~-^|stu5BKqaԩUm̱e{MB&舞| tcQ'".@2Z٭qd6r,,TP`΋9r4j4b,vlBvUYHB.F#J#@<Ng:aL_/ĉK/ Acu;J^8HTUO[i7K>.ocb|i: [om`ӷ'~~&4r_?V>Y Wg2+s7FBZv7y94M]:v ']QSTI܇5eC.S9VƆj>ysIIh]:lx;vwotuר}Ս!fǻxiyսCMM%yۋB ZO et[FT_:NzFt{Y83)&#́Z ~iJޞߺ>)Cݚ"cf#B];HQȔ;[؉]H*\U榓sa}7ŷv_KeK~_՛KNg j~\ A/pT\P+kCyCyJ"I(y ON|:c-˛boKf/6K~G[IZb͆G~q\~Q-I#j5-qBq&yp5I}sU~TuN(vCzT,ɲRǁYsH5k+5 i^cN@fw?70Jpks2 Y'ExXy2s\ C'D8 9vϐH ^j1EBݕmH(S$TU08G[KqNn0m8'"jZ5TpCHJTnčXZxҲvxY2l:'7 B,RXew_y^I4cR+ ׼Ls -C Jl TR MG槌%4aP.9E!FȊon\s`l1vlnd(-z i"ڟI̚vER=BF ߀81{p>7e㍟W9 +Ӻ -EY7R–n8Hih6> stream xڍP n% =`%Hp-|T<{^{w阮XAN(ncg*01YU@Հv +0"@}7Q r0ع8̌\!ZrDAFYzh@.beb 21G!!K-P շ7Ze4Է([.ޚɉ^Ҏք7(퀶@#r#TLAv9m7{[6޲%e@_d7Dp#b}CC+Kk} l0Y2}D} ; }7Ÿąo ggh Y0o,6Od 4|wlv2a`͠ 8%E漙l\ lhGkN?o<ܬo2 c#`opdh0D3oo r|e|k?&>iuG *EUou [9ؙtl&&fۃQ]ǿJ\O){@Ks#vs.#kߊ,,SE[,\fuZRՁ,`o 6yh:.zV ;q3HdohWeWc,@` @|o3fhvؽu.oZ1зwAx;7pczJ# `[ٿ-I["ql?L"fA"NF?_q XYC5ѿ |Y7vi/F1|Ko п[qb-o|+_-["Av[^z/O?k`kv9o : f yͪ[*>:2"^SsS὿bj _G?b.x7f4YsO ^i66آuG{8q`ޕKKld}֠t-8ECks3r s1=hX,.'' PJkpt$ݥI }b3fb-h#!u!~';8`|S 1߇nXY'Ur{R4UDD :f yivmٞ(Y[> FK%p7M+fAWP,gM8Z~L}&JF'GdK @?f𡐿3ͧh~;jYEݸZ+8 t ^;,༐Au9ȉruh,dT߉;lUg\ꪵ!ѭ|N|+i"! rFFlխ`ec+ 2hZ_2SzD=,|qtS7qw;]R69 e wdqpHRRi`9 Q,6blFf5fU6L}<ç$(,5JJЗ~5>r7P}81/*waYfVT.Jza¸\WNJ_ ]zC $.8_Enޏha}]-'O@LfV2=k+hQ|w>yz\!aUaQ }ߴ1$b jڂ#Ն퓶j鉛*(X#R((+DĪ۪n&'+R<:^.h<*Ù9>n1'g^z"g^##lˡDQNYޟ s' h%N!hmgTRfbSOr ėNuiڢa-pHl/b$ADCjXsy VIQEYW'։y}_ ϒy &V.96'7է-0,j.)]:ە;_-X࣍Jwa1&DzK3$?/#d/>j-h gY`۱^? ~ [S>gv bPo6#)I z_j l=*d"ŀ!G-ϐ99bl& T5ƅaFS W)Cd~苗%c_vMKzL0G֖s5tFr4;viG;PQGGs/BΔV0gM *ճJuj1ہב /.-ki]Qcw.u'ڞ>OxUŸr&=N|{c:)F$GSGy۳ᲺD٢Cro^Kqu"44s괐bzQF2G7q~[US|h~ʳ8HRhn|&:3&φ?ND#5oh.zxa&&D֟R~!) d+A䇢, ~1M(ں{/DLg"O7L~φ#g֍1|!2&B#Vto 2,̏M5ca3I[vsT/KqL<*}(1?GzDф(&[5S'b緽wdt>"FZ)i\fSK+TqHO5PW3*t,m80\* j# 6 | p~aSgs6zm|0p"XZWqk2/E? i]- "&ǣXRT<\ecWƪ 4\z/wcقbX[֛72QH *i<TRFUr&Zt5ghԼ?븑4+_*g%-^$םu*m`>5%R6 s̞XK"_EmGJh  'g1v[5W~5ēs!/N47~"u+}r|v,NcR5O<'Z87rwY%4)\kZ!#liIT+h_>JbYwW_v~~]Nl}BŜo,(RٵSlcۉm)y5*R`4c?]>yR f`.c/"՞r=~qv+p ;(ģ9o8Dͱ]Ӱߤ_'= '!?B.5ݥ1l\pCZ4aMeϩML*RG L_⣉.ĸh9he$w,">diG;i˽1k{{'B#i>!.UhۈѠ*k [6K=H5Í\/DMl]"xMF*Z) CK^}ycQUXN i\-97́u1 !Q5hoٟfq_>oSʬVtp;8fa{\Y/m\8]8v|K,D>'2OyWYWeξc^ %ּ"ej'I K6*Cɽo=3Nіg-y">K@,"w!̠ʆ\x1B}SRsߡ8#C<|^g-%5FR2;҈j!SeNR/ggPDnHM^.]/`EyU\3E]GsL vXQc $h! ," `]8,V^XK.s.X[HcC])`6TIjΘb˅&MMh,1R6iAʹ?Ivar -[l˘3<%:{G+Gp:[bfs$+tR0Ly=ٯ" Κ)}?.vϞRB#~]Bs~" H /" Ԉ'lZ~[p 'g֚wNr4rG4(jkE,$ul"n|FO .%pbsW!3U~+{N(8$qՉ_ƵN;!9V&ܾu. Dp7Xbc9rmzpX#fiwOψc^g"VHQa{dWػoE6&3;4*HYll_c-5j@&a}X kex&wy3N_}ba{riPGyggx :] ڊNwnRBF4( &^ܑ%,cwh;nN#oμA8_> iEf$6c+.Lڑ|޿1|/fp읪= !k4nuwOejAaD|dlr/GmZX x\h12g?s]qWQ۵5NB}08?CZHǎ j] }Z= ؼWp Аed[9 ;OC؟/gMOt\P^2QtդkeLw9T@q=SQS@_nYm?% !VώtkĘ:Y.(~ yڽB4"aDZsrL {Y^1e4SoDXcB'OK}SZnJ>"=v8 ;]5X,čO #/D6&X{ζ x"<94.MDf% Be@73Fl*/Ց,/I-D^w(5 `\8fbntI}Y,Mzoqp,P/=^;1ȌCC!o>&x;Xp@Ǭ;]܃MZ<C PW ;LFluS쿳isbh3@\GtA׸3iSM/Id8) ;Ẏÿfjbd?"Xwc"ۂbSx]UTH\qU?s5MjCpL NXx~CnJZpRtcUH~#nc@lJt,!1׬ƽ;h 'q%P5DRF.&Y0D5[T6>*߄._1) gVJhۿ=FB#d{;XsW)g6IC:#=6:V*Y40s.LlBj>^I;sx7m~_] ^qrr*{0A%eͭBT| Du9/ҙ9p7p] 3k}Ѣh15B?^|fs3]AU C7Pu8SM SitGKSB޲ AQ䐙kyDPOE <|o ֙%{{8:-[΢NȪpr]^hYA `ȱQ}C0~-^ |2kG|I:^5\fw&B *BuuyS n&1h"&K5I#4icВ {<ǗqF@k}2AgXαKS@ēk',H$3#p?E9u@(V*`dq[¾cu;Ne#<#E>BF GE. /_\tB[M)'U i0н'|dmcQھ3:$g6 /a#,G?- 4݂R4 ?A6tg1h;$ħw(lo~AIpM=X,29y伺d[Ÿ: Wj;76ÍhRs%]{y %4RTGR y o!409.v\Ld Y|/T I7g޳5E7{P &Sh0i̱ʇw9b-(!rЇ  ~U# q 9ay\c 9C/(t'p㍖y['4g977})G NucI\6w D̎9Hb^3{4!_(jw[eRoRC{7Z>շ-Te?qzowİySc=7s<.f1\$¶옲){IC}]Q}i:Hb y=Si=OT|C BΪ+Qx!,SVW>xkL*ãc|Im4{-7!Ћ&yJ>墣,1zu~/fchw͜TKXԨL·p3x|nmqD# DZ`TԇC5.ҦEJ)w >cx=]H%i}ϫ0, W䕤CL|ӮYmzO(䰤`dIAC"C^LPckf֌RRSJ_TTO/ `Εf*D sf+`x=@q[=h B+d,NJ_ڇ>}&+(8;dIqOcd{LÚŜe>YI =~g۱ :bN(P-5sM"}udKFK e=`Ǹ?ZQ2hp]Ќ%18"jQB-)BVe0P(s8t]GR@vo>b<)1"ҍ$R*9hţ>1zSX))GO? ڰ;bQc0@_&|4A{7d`}tLEPG"{ԁa*Z51/55?<2͙.NX| x57AᳺDO1z1uYLMj1Ҩ~_ٽP~:?%j[g+ʧ O ZHll&xƖ} XF':3jo]q(xġxu;\ Z,u0D*lKǷ@+ּ M\t9bݯG6lDzqb%h_:V۩I>ۭ"etmG-MuWzz t=EnæܪL rfrw,;*tT0ƒi0XFM%G4>c+u8'ͮYlS<&s|Xښ_XFPgJEՒoWv#?+–][bgcq,_>e@K%u׌]tO+.RT01cwESS _8M(يFh~85"o8N+=Vgmk=(ScH_Ԉ <)DJhwj=Bn/( X\ [UG U]k͈Z== 7}ZqL=@ԹeC"O:ťFaiJ( '*?LRTq@+3b>cߴE]X~cv @EM$\t-uTP1e%*LB1-JԠx̾9ew_xpNSZngYN:`JBer7-+l99aZrc ehqZI ~P? $ci(j]N?ш!J2]%tUca[#'K}bFC}f,4ٳPՓE'=y$ ip*c{t?HARލė剉a|Tzs@|p@x.$g .n t;,X2IF碍w}e6sG _ϙ)4qiWB5rB /a.zٗ O]xTY>?љfioNM4wӆ/TĠ;TaéM6}fJt[eEEIOC /׎ æ+% 6"ֺQJ%ia5{Ost5΁G"_~VE ?K8ERZ2ZcNUBhWI D&'}{mBgό]5ܹцA:I͚%N)Y']HĵJeY7Qy0nVt(m8KO{Rg4S,c ^. :_t4)JG23mțq!f b7wxmy..'N=(M,ҹ2D#&YrpZخ6:+4,O'Ncq>K0ЧnWoq$vv $ ݡ)ܝB5N@f);#Y2PD)gLQCchPt򴪂]  4tZOU*ê#tyr4/,cHYT0rg6ø>QO޶5P9s4,˜@1p&V?'znu7&|\}"_G8qVJ[fR31bߎgĻ׺ЩgȊQD-E\b+ir N.2e&[] xiFa&b syGO^?^ѺԂ$ ҰӿDO6M7ۍyU%kטЪ4QCQϔ/>1Ԏ|hԦ ewQ|?H(X% bL(&"d׎9j0U]U&ldpyۃhf`uGE^Z8eMѢS)ŝRh ^MO#8~te$pI+;3_v-J?2Zh#8$Pz'"mǍ{Y"[}8^#2[glh@B&&]z&9_F]%`u+ Z0Gć\1] +܃;"IFgM\/^3qTCǝ߱+:'dbbބN- P_AS* 7$lA.yuS&6f]67G Qn!ՈbS̀l;w5sS.F=Ya>̺?+# ׸CkxX|@IӜTQMqM̦׻:(nA0ނߗB mcҙ^YYwc+0,9.ʉy~+&4mKr=[3Kjkv d3 kQuKPgrjsE9nͦ$͵>[5tYJ0c ?]б@^{H23L0 yGL UO61IL2ajyXyʸt*޿i"\q9R`s!;soh@ُ -~<6ՌK(dRz rO/%iH’ wpiPįw,.}fڑJ?h۝:+3p-0ݯ43fay FE+>6G]80'o;-'cJ>:/X[]ljh7!!:W:=psJ6HF.1Kv nvOEsܫ0)'D[nfׯ3E;A%ˎ{dB&bcnWO)zzs6D%\S&N|]MCAWV52ASV~;]`5l&V*²@*:R?b endstream endobj 573 0 obj << /Length1 2386 /Length2 14991 /Length3 0 /Length 16405 /Filter /FlateDecode >> stream xڍweT.Hw Hww ݍ4 "% ҥ߫;Y35fv6_, x۫$qpL]\LC e/_ `e9U .H[`M7AV/UK*F.v@tC 6fhn?>~S+V `|gwQ'bG_6~]RX[~j'lpy$4 8Rۿ vAp%DN0W`[ C;>#V>tK6؇r} xRt \D'{?IpK09`_ueGt_l3u7uKOf4<Rp +k{ loWu;]sיzx^@sEGs0ۦQbO)AͻWwr.x{]OJv\E|y*uЊАIĒvb,XXP2D,oP.g.2 ĽGd)ti'#qI:+U"*.CY>@Un~O6$2YmNNdaqH ;p>BɭŒXWou>ȏL1t߯JS\xUc$&9ne <Ql-=OJe~$VП`}:aBS/H:R|I;fH&\>y_'ȸQi!mm sVw3 2 ~ ə }"N>_{W.rɂWC:iYvJٖ@2iNZ—nj QÙ%d?/k)0pmZw5OQd ; ^D'{`3kEXњ/Y?GafLMW-`g/{:oP%BJCM |F>h2i! ef(h#Yk@CS@"`߈ɉz"kN,(qoj,c5̉y|AQvhm[պUA{5\pbFXo&}3 ︸?UoidsEީ$*[)d5#` 3CeVuq-AI_? ٨-ƬyX"X*mg㲻,/l&𗴭j52雔EUG=q! Ad>,Ųdaf+Z{9\{vB^kSOXk-PKI^@ڃWPރf=Kq8a԰(G ([5B6Ww9. DO\#_e(dHCļ!!lK/uOgJs' >4\uȈi|B-k\*rQHR17:|aD%s`fף[VU*'mӋE;娎vT1%|CL~Y9O*F 3s=N4 fM_F=9Qp`51@A(WM޳N[^ܔef+BG  W0} LCXL;0QoSǜù ACWx:p~ؑ3\7e-YP63]] |fՀeiqGu~f%ʢV)ZUG+$c&cc@m Gf[h +C.:>>EK3Y }y ”i2BufLK.HS{|. p.g|b`pj^>nC/'+N\nʍ9Zy9gT_t8d M1STaM pA]om<4"fB.&0ÜwڵwƘ"c5Y =dD@<%iomdlLgV?S*f8iol+ S^uťrZ%۬R6D$z#<(fa]=ZX뒪rf!H1/'/6g2T姨WsBb\isSKf;E SEfRxZlpSJkJ4wk8 U3I&YN?\&]c\'cqKо뚤u.S'0υSѾyæ9e羟& LmU&!TڂE-y8{Wv9/sPYP2:ug6 o^ȗBZݵ=BS9U _'%0'&⴬^/Q `Fz1o9?Fjk.iP+0`뗑t\:m =ZSˀy" nQ6B vTbYO_2£AjG+Ӽ*;mz\:%yeZ qd;{4NJZ[?]+Ed^q"a 9a f>&|jK-Y.eFcXL~ifCO*~߶:!AvĚ\n/ImwzpY>o|[ r?H;AVBe_:8ޱ6x krSla}9S =y%P_k1~W lsʦRqPIE%*Q뗷/-/S╇TgGR%Df5m>3x-.2[* Ko֛҄1wg{V]S6h}JC`0\;_G0zS9~#5]ueX4,:bev s0K4Q #@67Y:/(•L2QqK9Zk|1?"άwvHx! r-j]RML'/b$lnRJLM >պ`k=·,f9e lM*)3, ķ&Qcu^x3!>o= e%ɅiRp  PqrΟ t3}6ܪVS|zd`XQ^#rfʗ!Ry &HW}<y64g3o]g'j}A}=wcg'2!g9DuffؽWy :(GT{L]mJ iU-/!%NiCtNtԐ`+~آ߉`FoQʖ̛]bP f2 Uz-eyscX*v^H>1nȵ:F -3[(0NZ Տ *\3 _ʑzɴ4nBj/b=5qC\%kG,Z( slE#/"D.nMҾ9v:Δ:`.QtTnf^H/>@5K%/;2R7\SeL{t¶ah-EROHxׯ۷ӂĿ(6|:Z94A8S|E* !Qړ!,2L: gQ2K2ڊQqWMH:r4x-v$X)2 UjqCIpL5͡vxX=0ٟv j2bNaAZFZqi.N9G Ӵp41̗ϞIP2c@nZa|0(s<6y8uqP8 tJxk#yp5A87I?yD-aasm|y7Т5 -|^^9X֑Hdv6'H֞Tlq;ۗ%|($ȵe78#ӤZ p:):}+|R?yiPCƢ=FwL5'oաBy$Au Lx[<ӞRqE[tRWRzn2A6ެh푰BKYe>H*!;=["?>ʱFMp6q[8SOfcЅQR9US& }/D\Gf~j!YVz\8l9z/FRtS~R=qNVf!{BQ>84M Vfto96-!cbnRE_m*;l-n=}daE^gVP 9`m%4rar5:;j\W6M-*o.dyM˘oa\Z=jWj^Lev)V(o|l)Qͅ.(WS="wO D4|{ Eo`+ȭb&p Je)|CMбXSrej_U^8I-M Psl1!JUڼS:&kOck@~tHlQ:կ PnG?IE⃡huoK2 s3BZb(z]:18zȝ+ET,j>򸃂j"*eJwc~&0 M`0+F3-~yND7ՁmEXU|9rjMJZE؁^jw4>jr8V'QE..,c`\~nP7-_h 7 jLĉ'z4-ZC|Xq |M?7iMrt;4_,!X'gY>=~ҧ̓* ۱V3 Nly`ȇ)@Bp"jEbsPIf};Kۨ/X4\Ck0ZX#7^R߿ڊ#\I2ǜ`mug[<BUe8b'=Po$acr7+l`K|JS LNKRpJ[Kܾ܈ ʂ@2w~،BqAf=GH/͹chJV$fzױXBVbX_kk3;$U'ɟoFzV0$W`ph51%|C~^D5\77 R}y}sžjg\:Y {%Jx@p !bk9rXWd' ~<"X-7eXay)AkOfg x Z`sKv…c@jڒ @C<3Gu@ݎ{?zAhMVdۊ?2*aĪ jUc>"hM @&0Ci/JIŝ-gPiH+'^ $^l(L5Mzyۋ<=m5z+ ^b#b[b_sve&*on|᰻,$ݶdܹin,9r}=uA;[y"Oc) $t*!l^T)yTN~I~a?p -E()A&Z"чUxEQmւcɧd55G{X_Z)Lf2g+qEoޒq|B-;BvY<5z_:۬Ý9 tVy6&DD#=5X xqj!9Сiۯޑ ]*+/O(Xq_hSM:rQ^NؽwנL{߱I( H\>{If .,|4?HSGe;9.MPn?ggwW"^F|ln$+w!D89i_S6gZ^[W:03p>ubk?X!"ܲ֘v]jVLѾljp({z|y :ɶB,meGMYQˑ .kUZT Ӝz USVEaJ++ &nQb :m|ӻBl̰x-*z R6Ik^^Ь[/_ 3]E"%dT"HB?,4߭uF<_QimC1Y`dn~0]׋բ[<;* q؟ʶj>:̎e[TniHC=${&l'f&a.ؤ tp'!u :LЛ5ݏ,3DbiHY\fA{dLo1r4:aPxQ=}87>nLj}n^X"݃&Qj~y WÎ>~I\~R+}^) ;-m_#_{Q|a{ѫe^.MQ@}DTR/ 욖 zk `q1>󏂴l$uT(YW >"o&, =VYe2!zY)'U(Uscpe1-R Πhfh&Z }8 Io35*e_W\nvP@v3]F,l͚N=:˝:#3'жSfw;QMgI@j"l,(w< -;HR*<;,rY|}Nt3()O$PǠ62L2Ϻʌ~ii~[}*bz| rSG!xJŋ.2Do* <{ǘ2ʸ]{! ndI4Έq85"\頹ؒ"f88D7[ T +%[.a U# ,=)xn7"ŒB7[ͣ}=M$=3,oYB#f1 Zߨ{tޮs4sq՝> YLMf?`5a#ll!u0) P0YQUQ$xv<ƫ/c?g3C7L±F.KW`T,F|7N1 ~wޘN2s1%3[Mp| s5q~6Oybh/|CH+hxo(=))Ḟ֡Q#wKyx:"UD0Un+mQsPRdO湦N"RJ0n[~ |%HcޝVy!#\R#QwaRaxQWo&346[,6\NUla5B"+p_Ir咽e>c3Jj 4ռLD ^TsԷQ%Q aʛ1@häUau |n0nYEJċ]X1h_ - i|/m opCbrn?nHx Mv, `]4v8#

G)}D-,~\ od11l$țRyqwŽ)LY͚͉rG+ em"V|=4=uޑ<ޗ"o|( 9PEw@t@oPP=v 2]ңgUex֦yM0Gm.z7འ8%Ԑ]"\?,_R~C4*1ݞ{Ox4MKGTF8gyڴ{?67nx+=7SpR?qzJݠa)JøP$D-Yz? YfUP±L>m&wBk>w- ai+[ג =PDɶQs`ia қt=QGTp5<qfc's cjR[.Q4րðgUZ #dza+0Ѹj#+RgrE'oKyJq+=l 'z Q6{)^$LfΌ=LD)^rHRh`^~9 Kќ߃N+.TVU^˭1)3Vݳ*-j`T:VGcPN=Cԇ9֑+.9+"Ή-[$0,ofs;P")]ć`>%qjmWWgtt0I]R=ߜZx=3/|`ʊiPlh we4HWusH_jW3a}0w[?Khh&5, [g>FI` `K!X0aguUGo?hPXJsc?ApMw}C&8oF 8B2l ;MWlVJiHA߈`v@[ˏUW0e'Hq~(!1k.x%"Kh$0BW&M+OOEЯl}Q&n/3s"$M5u t^Z:<,b qᖑQ` y:c (#Tt#+=G\yn?VIbU9s.PPT^b򻰫9Ϸi wl /Sd nFrH7̦{׼Aw[QSXlej׿&Q;N0 %m~ ?p>kE[ߨFV` jDe,)EU{ͩxЉz"ډUF^|2,0u85K2  i (m'f;ي"3+[F l? 8U, j;8LHPr`*#.c& K(򙍇Bmʙ m ъ֑Ƈi A_:a"ܣV? fG3_rCbڙAo&u0?Lrn0&oDpD+~2$A,gv]4᠍SBo FY}å:!5-e@/xy)aa­WVV Pٟ~.@7-4ME@m%.|pgKwhOHD; dOK89xBe!""lFD&P4xRޝZpX'15Qմca%;&Bs,FH\>G fh)ԑS3|q#yڮ]E*tB05%WsGgi#W܌:#p>6{޼Y)^s0X0Oh]5]:uUq,`|!7|na[tyMDAM`9W$?8E1nx4>/FIAmD^" &h]h-_Q- +/%'#+0H@#NM!p[ɡ9s|w`ϋa Â3M)2p8F)umgv¦LSE/"85IߞEr,zFNTZ)/i]:s:(־ʅIh2l>^bXi tƃk iv"Fvp+ҍX%0 Z]ѽu;Ni^rTj1Rh$ɣneam:ATtU5q̴N.2"f-%mu-n*J[/["t/ej/K=?)CbQ3/ Hk'ڗxkP $Pt瓐DW*;!Na#}x3b6B^3~.el8k(Ry =m  dJ)nf_p]Y._/*97URA\%$?a(y?)Vs/ $|+zlJ)< +?NY Q0R*5SݺTĒʫI m5<-#SƝBkS Jega3e4]%uh-~sUZ Rp"va\2z1 nk.Fskll&΍PV9?%wcKJꁱRñm쁌HT0: -6wA+ q4d.^AmE6(f">[j^Sjtb!6X5^fLd-KhgDBZ3dqIrud>9dfJsu & &-YX},:R RXws=(sGgnQɜxi kH GӋc#(dSZ~xk69]x;;@Hf U PHGFJm|hpbT9ItOx y ni{H2QcJ7!ʷ'BEqޮq N]iL g5=[Om[ռ5գv-c)}=3rUT749FBRdtE”u-PAiTIVvOcz4O{O*K-y޵**d_tqWQ^Xo 3^<ږfk=YI "L~4$oZ- ܴo޸cI|Kl?͋Q b#[aE3"I:+׀ V';=u/75se[6zIA2.wgcӨ[ua_yhm3PD*i10=3=kaM9fKW iw2J?  쩡{5(̈YޝpA`Dm?8/Tga_V $bR-S.Q·OEC$W#nܐa]+]31Sd 'E|ĥ"ed5j+J>{1;ҼEBJ+(l`ӏWw~6n/F:Q_>XqX.9Ҋ#ڂ5 1DĖ_@ؤq=5ٕG~x=+.s("mnE2+W,[Vm!A6^$ *ΧRA;+nFj_fĒܨfu邞YƶѤ]+~#օكϿmY/v'.},o&YyJ7|1|a*{9'fI6KBᝦ N30jYbopU4C[X0q&w5y' !^HlNbVK؃Ü<wrL'O(3Q-L3mOR;4r[{D &<\7_P:xnןD_j3S;jMt C԰ endstream endobj 575 0 obj << /Length1 1520 /Length2 7029 /Length3 0 /Length 8041 /Filter /FlateDecode >> stream xڍw4Ӷ]pމXk]-J!zF(A-JKy%;}Ϟ{_3of~,.-xAb./A >\&&= t!bCB<uCTGw'>H/")ȃ=`:7Cp.Hus_ +**;q a6`8FCoN;('7ٍJq0=q =  3ָq={]7' v ]e5@s8/7 A8089AME5nmNnx!. (h sAq~+͘rgg>ybs3wo?/`p[_mغae?97&lP @" ~ @ly~eݴAn~p}t٠k'b$ 0ȏd~0[+QSVzg;ee^//'DEEaAQi?Peڛ1Uǟ`s?؀@s3 !?+Rtwrg0'?7uG,:fM5[{QeC$MՂlv_Cn_/z8޼?nDٞAZ3>A!DqA7Z|yo[7 nH_w*$20#7n|#>G  pCvqrw o˿qG"ov o&@ 6SPw2Ԟ\+X-q(/9jwZVP'kMLQyg%)edrC!*'ȥ{{4V, WO;*W ~n@)}5LVz1="꣱Q,14]OYhXK~N?SOIB%i|74fYSNXڝڙ#oykctjˠ!qN1JD )i}Mjo(jI7 .9IҢ[PbM3v7hO ]`_d$ $קp;8C+p6\Aܠs[>-cQhpIe"9;5uuQ !rFC\-+[nx 2#m,pfwAiXP MK[Ev7KsDrQ}p64D P%Xd\(=ዴ"d(l.x|̉?b}kN,C.”mt`i>#2`2+ZHxU=.(efn䛠<(8@dž2oޫ]xXˮL"GP/Dדqi.}ͬ}ڎ&B%u >yLNC.A{;`w_*V%޾NߘpT&ˀ$^m{ݧR~6d[wѣ8Z'7A{L(Y Рk4d>mCPWS!:l;ii;B>; iR a)+j[v,JĴǨSw5tCcѣjGͲp爀Ci8fqՆF~( "Q+4<ԘO^.>4R7Н?8[\PmKpEΕ(iJW"E`_!.,%\\6x,lv>jw*޾9V(\ɢE|dvHb2M6/`::'e/yDND1뵺eO តPbeJA`F9q1= 0Nݒxmע/&_Uu$FֻS𜊈hj.g2ӏ H5x Ӝ4%:$J$]Aj+ݥ/ސQй*[&\/JQuMH9Ė<JEޕ^Iٰ1Vi9vIzD ic>K0{ a)< :QlYԦ6)?ϴJi=Hɲm,WqL " ?|$9`fSMe|K٦CAˑ)JP"T5f@܏, }3s F fB;W+uLR)} өN8'[^#roz4m{f [+!PπfedW ;9967{4 iBda܇% g2xBmo xQhKrDG˛_S"N~Cq9Ɓ(zOû@:PG8; nL|"+k0rBJP<9~P6`k #(&n;Q$v;NMWVO`3zH꼭o;F0֞EĹQzcdݶ7"${9WX[MV;BM"o cŝz!SV'5h*2D^'G.#_m>2~^U+;}xp;*Q 4NƀXl&"YȜg\fSw8f-RBBx\oS2 .CS#';*)!AQ۸yp̷鱵5Leh>!ω)U4Q]Mgɟ3?T .V̙;!źm}G΂ޕZgYCU/.'zr&8]HcFdr6SCP)vݾ4݋`Hct΍q >']ϐCޱUcĩ%#Wo7\*qeIsex’ުVWh|J:wthi^p_I*COiPExu-F/_)OgZe84he;* ZNX ~j}W 6J78-ˑ tAR@wyX;ߵnwG=le(u4jJ?Ūlc8Om8F}]|=>tO%>⍽[$zx(=S VMXm+w0t vnza?}.kx!e=Y<02m5G@t:XPU}o &:.qޮH;<ɯ}XڻTٝiP.6_OԥSMo 93.F`1̆;^a~D_]GU$] $@1iw+n.yFV{Xd]׽qwWD=>2m|c|xY?Mq6/Y״ MlsQKe3&Ϸ,+ &BfA׌<]T1Z3 m>eV.pV15&/ıހ+닆w hRۢLJC4XN$&>2`֒о} tALFZa2!Q:em#K |;tϴk^#gCbj v0&DuW.*I!!KLN5t;ܵsS?TR+mE9xtnl&P&XZn8)3Zrc72< 4%⦗?sTM5{'((1bAcr{jk9ќTp^+/E$&N!\|&+)} R}[|sjdXwXD( HE>s5Sʴ~HŇeokHv&NJT4YzۜG|z-y$O4p/F#rC4Rbk <Otӻ=CR$^mIM:LGjMUbeTZMuyAz}}ft[8CP8N6i:CMLckQ(I) PIKA8WL*;E{Rz\͵rpLκoK\i9%Jr)e n~DOȏ8?WJg$O`eC|~y/<`C qѵ@Eo)48x55 k}{.6:37N3UgWw,*B\xҟq?hzN@3|fLV>mT<+j+Xჽa/5y\hzI&zSI^3C}p-#U.m˙R^p&)yd o*J#4֋nuarP+VŸJ,j*⪸di| _WmZe ̖X",Jo".Ckvaی$%O`3aj:"su{3pery"ӂ7p z6ƁWa?|Gq|',YzODs%_ 8Nh 3Zb`?8{UEKea׭a_SE:݇0`^Nк*{_j?,,^âwDMaȐ&Q.FQp+ozn}^srzh.١Y;l[Սɓ}rN|Fa)mJ9H!)h!LXu¾Hx8 WG_cNZ~öBjU|f{qѸZ>qɦFke/f\?j|qg:\5*c㫏Rf坝5=VLxiPN9=jO*@%0zS[+Z/ atBNEr4^B%mڜ=]D}ke%{]y{PŤhMO[IT48j4z .?[j4\l Hw92ܟG@ JO}팸u_Y*]Ǟ1oh r{F oBKԾAV! #uRfIL9%}9Mhdy]*҉ehLO)"℄"~z59שeM#枕M?:!Vy3z0X{?$ iHRFd)?K+~:u 'e*؊j_] RD_fG<"1t`7-IȲh<*YsB@8^9v)|)M* Q ,ˠ^IQ9⫦*=r.$uԈ k5(O#Hӓ ,&porBK-y/z"ʖ!nI5p IcB }v5`A `pH=^m@u[.{}cA4`!pե7$^1h1I%x?awVSU8Q91~0&.\ҿOшYroر?H? *:+<,0'gb-JvvJ4'CbVt0 SY'*.n(4qy~9I.pKs.ENиI(w Syª?{GNx2Oo&=O0+ܧsқsͲꚴw 07mpy-Dz e;#iO./ؼ(hG{rBXpi[ɬ ߡEMM"t>aՎUs#1s` N'؞\n"`4KlQx,va= 6%I f/dU]A,&?؎;9&ϭgc{.MI>~y|{yY"J4}DU T3KHoV$Y-ue?B9^<)8 endstream endobj 577 0 obj << /Length1 1394 /Length2 6195 /Length3 0 /Length 7142 /Filter /FlateDecode >> stream xڍvTk.t7 HJ 4H0 00Ctw"HA4*R"!9k֚yzzr(pu4 #*r$!  0H_nR^3'_8B0X.B` ,-qH?@< F]Q$UAy 06yXNNFw: @@!(@qb;B!H E1~*!ɋB\=E 0{=0€g3QR^ ov@<ցD@(Ol c-@ :, _ٿ !P!P( Cb|1 =|7c'J_yB=nOQO׊b`OY SAQO_"LSMCP]ܳd4@UpԈ]R!x$N}J}#qɳG.id1*/BLY* Jj~o^56Y£F7\=S.4@b Io$ыY@++|ŝf3cU*d'7~m>)T̬4&fFcG=K :2~aI =rqYDc(ʕdfg霩v@uӭ`bȧ܋Xabimi$A8Jޭ[z~x_g~uzqfkiN)S*ⅰ;*r5U]n\žttj]&G^ͧik#K~Ojn+9C{}Q6Fo+%_U-?8՜lW &rIHounn\ݾ{3 wrk/T\W?6G[֨DusʭVU:۪O$Mcrוǐ0=Kmch> +y`fCnӪO}@ާگ\N˘Í29IB@f`Ƌ8LI 7kɢNHYsKN:We(Lł t-t\?qMe!鹝O]'{^é>$r!Z‡u`}qDr0TLb7]x 瞪RE~Oe7u7uJI~_+:s8ICv5/iwC2k%fo@zbAfi#4VU.i ~;s16v&K }͠wt V }) E!*VuE} .r  uBLDV ̕a-u9-Ond:zHjWc":gJ\`ܼC_5욘≡=3*tC !(] iIF}cu R%5/~oaxZv,&?0pD\;P_/ik_{R*PZRε[;i{'֕o?~ ʎMMh.p_:( Lx/WH+=o{$SO+Rr5:ppJ>0 5 v1m!ZX1p0k{|ԝ؉]BP뺪R(U~?vw&ơ9]iC/Xd5%nݻ= [^O' fb\D o:~΄]hVgf l9]hHsi'}cł$l>_@ޒذQ(屿Ms@~<4ئ=+t[gsa7\6dyƟ)jpXY*̊8L.m꼵,-^xA-ʣNݐCx\_teRU u3Ov"8A&`SPIBn^!cvO=P&7RPE":+gYG7WJ?ɻY6'[qL9-RYVÿdIlNhBB>+;fG/wL/tt[*6w&;֥"*&{Vr0/ yw$)îJ[w\oY tZ}̄U?H=\dMGJ[60 =qT0u"*F(6\m\o"?!uǵkoSOmOLjQ+HrV]vG(5oUр@!f7 LO}-VrMϨi;:d:=ph  z;ooǏo #Djp$|%;zc4Z*pvaPuD$əs} xI@x6DR-1Y*SMޱݐ$pT9A` ^EU lI>)u)d)ЦAG_X k^@pD͙ro(@H1dGS$KhP=OB`sw+$SvF" y# ]<[cOܰ#MYQ6 md;>c RWY9o:mOs~Ɛ,pGӼ_a4:y卋YCL7ARnEN8.un㼡t,Zd?cY5KIW4Ҝ?Y9=G'7=:W=߲ncKH]nHؔzi@i~5+|8P*;?D#O73}CZo0:m +c+L ƺ 6-O'/ۃyu@mGzeWlkp?QywL{/&)8zp_sHL>5Vcn*9h<17h[Va--\7sǒ5m}ƤJ2%J-/v7p TCgsCj~PO]w >TָH!W&C>I4 $٬2BUޖ~sinlӯy X@)&WF BdWFP;Zk,/4Ͼ Y?o{RV Sn$Z}dhea=r+_;k7TތǼ 6aޒq):?w6]!2tPdv&g*:r+`.,.=RK1 Dp?lV%RY-jtoڎЀ5oaj˥H q ؇II9mL#Cf9-3~סA6IzC<:Wv'W9t7;7v.鸈k\ĥ-N"bIL>ᅙM|iV,㋞8taFB!mzR ٳ<8՘j򆏝w lZT $rqOZ`Zö́r_{6k2VJҽnbt R358Iև# o>iS"ZzʸW (4џ0* vґL Z=([Wxw[Jz@eX؞QF2t 2z1NQt-)3e;\ؖ%E;Rf&ƽb\ ٫s{z[+6E1ayƱ~gY3/DWOQir&S{QrTDؙmt\=v5_ P.5ALa5' ~{ˏlCj9ԷiHO~BT<~Fw]r>QfhZkUВ:~p&2KsR3w [/ nΕ!d Z3(mUr)#䬷.wH'!YLX5 DL-<יl1+~BH )*HOLNqLK[&JN16 Md)'t6;29D@Y GA1Ŝlwf Esq}yUf; `DH6k~Wgظ8Lج(y`꺏VRv2oaB$\ٷ J$6 P|3#MG3Y7e崙C6:\rl1ZPqdW +5XqɭK?0|'IPmVE"J3=~F_?20RLQySԂI!D?j!6,hOd9{ynVB8aSg:]#cyƐ, r@ckPjf>qio | fs3'Ose1 1Tg;#"AiSFnu3#?;!6}I*cWrEc}fD;jѕY;o¨E+!׬f2j`CnWP$cs8Ylb՛ endstream endobj 579 0 obj << /Length1 1416 /Length2 6295 /Length3 0 /Length 7245 /Filter /FlateDecode >> stream xڍTuX%t)=p#61`"HwA#" H*%Hw#"H|[}|ǎs]y}Ƭg+o ^`>PQP @|  '`30{௵:7؀FNp?BH/4^;\P:aC յ0`?3@^0_kA]?%]-P/ !(p}ז0=7|u zub@~_?,A ߖuWW7 uܸ/a0_09J9׆5TӡyW?I ~*եˑcɢ)| HDW,t̐v2Ek~g"UW¯2)${泝IS%۶*{-Y+ehV/C}\2IL30ҶZ"s%yNRxOvԄ|) 6pndHn苩2^*x-TqkeUh ctgP1}k EK$5}7o 8-VH=R ;)MXsDՠ37 ,\?s+G&haQ=yڈH7xVqG\bUn׽-}c2$T@~E.ʱ_d([籪Av& #xx=+mEafsU^OFh%(0|\j覬)eE]hLGjsQ'K2L9Ca2kުW*E41mx=bdNlE4(` ؛J^8[P9D(y_{zR6ʐJН•vdt L< /-ܽ45 vEdȹz"h޷'+LfA n?WHx i5\$=TtMnC.w?gT}i^.~:s3G`޳߹c]k{Fe+=br~ܬ)R"Rn\2 {Vp_  ;J[%!W+%|i V)ۡ j}ɞzaý>1*}:)aۄ]͖'GiHWrmr[׈9Ծ7_K3 IȰi&@X /Q/FdBn䘌eH7ݝr}5[!l|iP$D2ֱau?9 !gkTVwEgHv#0 =2;ՀEt^Jys%iE֣\N1 QBh^iÕZ39 qzQ,Vy溩lUC/#*hQHUțe94]Y 3` ug@ȴB>tȗ*[L4N8Ci{OXw'3c!Q 8!u߬OxZįr|T!w $$W2*sr{9$/$6*BwqKOCrׇt2;e5z7 CKAvb{6p1י"?ڨ??fC*UcR6SABRkMB+! >wm(1te_D„нwph~qe10%8"VTӆ D;9=uP6 +~$>\b.mō!6J8P!yTII&X$3g ;č n\c0}Hr :y&=?lnQ{Nl9R3~.ܙL%ZǦNdHe54U.mj9p1c}i|c 2.,ћumqc5iZ؆!tNvZfRG$ }-}/uK+ص\䬡v7~ʀ/~#|ܹVe%"5<)Ӝg 8gӃ:i2#8jfQxeם6}yi!}خ|BÞnQG/f:W-d&};E3\>"Ni ^jqS.N-BqIG%ڒ}@Z/Dz~N)KI^n|%)aelG7D8@a.~!3֚:8s qwyV0fX_Y5:D@~Bjf@!ǧfa\khGlJiO١#/^bz;2uzG~Z~YZj''."~NΎADRR9 C\ʕ%UWT:n~O >Q r,[0#uLCR?_~Z鸯rڻ?w|c? xd`|Z骴] Wo/lĻ}VOxckΕBaAj3}- ҃&Q{K峽)dsk Aˢ3>.m(wЩ?F#R }L<FUd6KȊS^CQ\r@`#(E-u:wvVqƣ.vM< r+ . .o K s,/!-zG,"6+=XH34{k1F~XݴbИd5Ƚ|叙C36,pN:Wd4'+Rh2Yn,6^iX} `SKl+-+"YtuSk^ٸpФ+sg%wO]xl"b*$=xr p8*rO,❙UgS*Q[8g,VMը_Hs:|ovȂ^~|}1;^=mr]y8)fJ^ 2^C^:[v}R}upX߁WX fLRl+x$SarqrڡşI34I^pz<͗iZ"0š٘0 UJ wq^ff(% Yk|f[3IZGP @_Ph*jˣ:L T%'+> M6E(~ X9>%0?Pr>v8P<,z٫L4=۩,<1.|+ t/v\}vvXqƱ@ ORDOPtR,[М&==~O΃h-{qecO0=?lqȷv-Zelqcadק)i\R+L:]/Myin TnZ7b>IU0\Z`:$kT$KɤF6y߆DOu!-yft.L ɲe[f%@>}ވ^v14[OdZ`ɜ҇{Mi?œlݤ{31a6q)NC kKRFE4ī&zdyA4eapSh7*Cgu,6WG=A`p'&n^ܘyyKF2g'˼ HL 3z 7.9~`]gRJM D$nqMb;]>{|SOH)ȋэ IaW[Wo+2:lm*!R_e:z~*z_-<ǭy=\ܔ[78 'lo8: n^>|P:s}@{0@XR'ͱ 2^r;h.9KBܫ O!^6b/8L7s]{Zm S8,+Q#KocC9(b:;sJ jSP0вa 6:rAlQ3m/Tnf"оXģu-3}n#^Rh!ÏЬxwG"p>Ͻ] 2z)S3tcߝqt_,t%W]tkni5S\zNQ֗S[Z!B? ZedP][_#Ҧ:ČӉ쟁,|0"/^[/ouU`닾ŋjjA3ɭsޞƐٱު `rb< -ˊ+ ̅(By>2~1L~GXGaa0Y67®J{3H\JLڙWD-Pj|U,rp n|˵DI6s*T Cӽ S>Ns~*WPcLr@i4 :'oX3&zTKҠ|⼀Ø!4D5Egct|wV.J j5[bS5źGh;d]-:\}HŚVCQԜ2Tב}%4MLݼ!MߥzDJiJx#6c `|R(v5:aVh . @탚€͟dc+ bM;^ < y{i^yz6@~~.$QQj*F'vܱ4kyP*a";o XDx5?.^pO}9QฐϭO7a;i`dUc=f/EVo0MS6-|u &fd[`B00oo@vhwऩ9[/3R;&`eVf]fϘYKހDyB,>NX[( G[NJ5l:rΪa)9'Ȣ&~{,amDZďhƥI~!q\JgcuEUuӇ =21o`R{f +P/k ȶ(5da茗CSͤ0[ "Vƞ$Om%J1χ&_nh0%U!i{heg/tO'aaG yA5^X(j5fe:*ύ8DaI;,SںOd*NUbe*<>% endstream endobj 581 0 obj << /Length1 1412 /Length2 6197 /Length3 0 /Length 7161 /Filter /FlateDecode >> stream xڍw4k׶ 3ZtetQ0-F!BAE.zAD&9ky}]ɦPC($F@X$ Tֆ@ԅA@HTpr"0Nc8BJ/2`}*P B5<¢@ PBKU; P8Q>h#߷@nPXJJw:PFH6w:!(W nYG UZHK.B;G ԁM 4tD@P/(:08ぴu-+kw‚+W!w2CB> ꂵ1~ i uvGaP3 ;VB#5; pŸ # ) vUv(8՟  a7G}Dy!}1H;_y !npu@X?>8($Yp7 (k CW/7v _W+;a~|ݡp ߁[aaHcp?6h7e0ϝ%dv(OYHPSؔ*)R@qP$!W#+WiJQ  ߵtPX!8Sw鿪-=B /QXA j#`m忣(VH,E8`7N9a/9#p=;CW +3CK!8VE^T C] GāX]($Da)@@{\%EB\,QKGy;*Ɗ7;-mV< &FQ0PO'L^+}ROLE0m VaSIw>buVѫcGkprLX m8zQM vmNr?;4E` 'tʋ2nIo/`jm; Wore-<^z 2E']S' ޫbKW4e+ssTg0 L-eӯγ =ҩ|mըmٵ=?/V*4>XSgj;3th CKA= !RgwOWr_ c޹õˏ>4x2k+M4M"5t^,F<]7wb2|.ɑjloU?Qj}+1uÒ ldGSD𯶻 >qʡ.`-/Չ7ږ}`/4Y $57y[fј0ib?8YR[TMpD!ILYCL [34ש&⽼ 3ǾQe|Ksٷ# ^^_oak R, Fr7-V=5V9cpe f psX,2'ӵ?i }T3|6 oh=F_6#\O#;M^L6>bu?-hv{tC .O~DrPePW43*XdS 5`-KpVa3X\孅p:w س%ђ9yUƟ}?橞|BNqh[VTdv|ZVtUݡ(Qp,ɈCÓeҮ랭 l-$'ዀk8㇓A`llceϭ(Nu mrg XWYz|\xE -{;PıIu6 #6~O? <g@J.M=Otf;r`)iMoX5߮]zsZ\%$**N'ǩD:í_%}ȩTҫ"#EpUgifՇ-UGG MǬ!xWr[#Rb&R=+Ƹ HUXq:y}@ޗ@jGaj;a+h^Ň djP*^ջ څw5L Q^ށɥ\3vRRݒ9hX3KjhY%EQE-%aN& &Uve ],;ܲɤ]4 ')o[sN%@#'CYg$kk/}g"?|MWM[֩kW5H u8A gWåy3BqLë!Vx|s֔d jsZOU4Vvަ_PGL,MT2pq+ e:qpWY=fR\js۱9iFG-inaQNG9x-;&I^ǛW}k.TL0_1:*X$PmW\ka|,R8[b|eed| 4@6ҩA箂B(L5l]JA׺pA[ znFwiI?Vⷂ QuxAkt~F25&C5'bxd_B/k,۰L.l41-vU[3!CbG&wDgOj0lSQջTn7/j5*NkU.IARtˢ~~xy󅸋ߢb6GpSJnhX_]Rv[Ox /pHLc|݇p8V:ĩ @!O9d=WXͥĔw l宍0l\c-)},5XHݵB8.WSFE/6u)1Ieފg"frxOyH(wB{[?!K}0rtfZpl^v<jp_F t\|U=/чh⼫Ȣ. "3Gh(}dfp6+v[cFX ~<JR[%k2$Nу1ǔW)$L#jä?ҙTog*D5CЈH:*5 Kɢ-Mʡ[?c9te&x(JZD|g|nyza0fN$jԒ{x*F&GS vJ.mZ/zkZk:;gȈe&P|;y qU&D%)Zt>_.ZzgۉFxE>$?&[+K7OzL2EnĞ:]7E.0]?R_ŘjڹdG`wZfgr`VHuxFdP#FN)ӻD!)4Nox={)O\w5J314g D9Rf646LST@94bP"" Ք@ɧBQ}Y% IpJge7)$s]pDŽx`E&m7J>xI?UeJrMVVC_{-jAУɩM#WQسӆQ)8{E)ojL=jacLv 9n.,<;.AEDZXAʖl&Nt7tZ9oOdxrW:..3Ǿ4GştA YV]63RM#m6Z#\w$kߦ wu=S3}Lck$K dկ!^ʷ+WҦ)CU]kS6pWMC(dAo^yXq[y^GVnK|v-qDi _WfSy<όL3V@(?`X::{ Mxʊ$ S1Y6 d~V#O5AwXǠb5)sC|?=Qnf%Pvr1ճ ^);\s.ß2,({eS2ϣcv* SǶ}lRDt@=YXmCvm7;ce+'Y"_fIw lGr֙ ;о;g~eY&I: _o+޿d*]k n [%P5Xwo,eoȡx˥oZ?z(kz+#Ɉu:?6 E$Y󙂮>˴-?$74\|psEHE3!RqL˷oK\f7_nN: K~1Ԩ]of_zYAU "bʡ8Z ~-o~AtYgCŗ)|ө"corn12:J?nZ۩2b<MFiXy=OLeGFqJD5#/L h &+i ߥ x;)^G%j(ͰNpNrPi6κ[@S:tGbyȄՓQ3ZDHT D w9ͯIiQ޽/zvCÍz~Ş3+>*sݑ1>`-Pf؛1a2%\E N^C$"~c1wݼ#&r` RF(.,(^1]⍶1&1&0?~arF ydzNMȣt BC|*dyo*رnMCnL+FYi#4i;=g<#z4^J--Ucꯗ^8|5޼4O9ȿ pTJT,:3ț{{-׏+hc:}ۢeKVU($$cq qbD9 `[HᒿȒtISt&;xL _]NˢƖ.nXg]zS`Vۭ[z<&>S?ihyWz|M\ng%Zr-{s}"Kਣ.>4Iscʣ(|ipZ͂Ke7haW(ؗ?TM8+,[ qƞUS\vTZ => stream xڍT[6HKw3 )- 00C !%H7 HHt(H -^}k}ߚf<ϳo0kqX,0(W g bbҷCXL`W;Tr.`  Pu|Oxy a.by@ ]`N^.v6p>|œ2`; mA@@ýUw:r\lq<]+l2@7@/Bf bC]&nP+ ;@OE%VK s8>nP/; h)s= q!@;!:("2+ j#/7cVZP+֯\ Ĺ{){em "Gـ!^'O`gdk}/'o7 F#>|\` ĿWX||+;` b;[F`ʋh?>?fB#]bm9yu?)C<>\.'>>'aa!߿h_*Pk@pwzπK\0?n+ B?o.gK၎v? DSCFFWle  y\ c݀͏C'?_$H !n1Dnf~oWY@n.. j{AX 0xЖj*啶8!8[[u IgVC%aT/G=:RsH kG\?O:mV0J15x(,F7/*7o\)T=W&+A3ׄ.in*cȯ^_ȶDSZ3}>맬T<'aDުC $Y\eJ( Ø^/Vo)H&4+ݵtL|=S'ycB-gu*,S`a$V۶x1+*0QG"`9N~"&#X|d,-na8WFt4 IDžMSȧIki:{Rud][JZlc~Ks@ab+F( ,z:[" =D'Paw92Em>W5%D|!#HZ0NaxH[hN9$KP%]wD'';>_N%|Wevg:]YNVWNUJhEk,!"n$ʤg _ _a 6q1~Gۿ0:=el19z؜@94'nuՂ-^'sR}av4U!WiFv`I '6%<f$^+½yphfgE˓Ng)}^I)|YbG_3-}_ /cGQcl~ F3IEh"tq9=I &hTӐ_>Qg-hk~bPz]='ۼ/W,#:wuprRq9D8X͚ʈ{%KP;&›KmÂzPߦH}\x?Wx(]5cD~ZoYOjlCjFosM<]G5h^jG׽Kb ې<>^* c1Čw3ՕuJٵFzl<ψ 7tL]p.??hx qC?Bn)ߛZa?HsQy 28P;'mD䫞}[%"~@E6vr"1nҺčfWzj#:\)CELowf)Ti.렃 رHl,v> "`٨7]{5ꆑo.q!Qs&}nqKW|\w8}M;,mzr&u~RZNCYp׀< *(7YR% s=`wy|ʼ.oO;nǑiB+L'89U),hMYB &ZWQm=ocYRF!M`&؋s| d,`]ƾdօhdX3&أ$Ee\ս y[#1W7_/_f"t]5V']]thWf2@hUd]ԟHT!4^땹ljs}kSIOUdKWtZӓ=cj-F@if^XxjldVqL1'dAQ)=äixT ,M7`nAqk/wa!2\'AFd&ODط_ 8{ \V.O"׺pb=V.[H($ tZXI'f$1\ٲg{;0:80 Us?ʡH\ 9OuTMLD\)&q:yPZ?ѹMqB݌ AB;tHg>QL{6Z񚩝i^ֻ,줷Ɓp hc ~G 1ӦռKǬkU/>Rѷf2.{6Zoɖqϸ:TX>4O|Ryx^[\Ԯ \2wwz]O2} [\JꀋS+E5m"%~{+ <@bU,ى .9]A[ kUfA# fP>w憪 Mgn>-c՗ٯ8TJQ<3f6Q2i&-cM>ǰPq@$d5Qኋ<.bdkSfCY튉2%_bG~F~] \N) džRN^MKLdsmk5LQTFaz/MZcnaJL$E=x!x*+ͪܮ|e|᧋98`U:xsSx[w쾲~h䓥1$ 5SqlS̈B.%~8j,]fd1Q,T~~W! Qwn:]5O$ׂ,[{d\jr!, 7'N/sՃBrpM@kԇk/hZLKiѕCA^I+qK4Yx$]E4EaVsOSQH'[r(ia[EG;6 x?k+JwW&ah&)wt0'ӝtt7ŠR0{m< ND٨7ѣ _Zj$ lg}$PòJ}s-4݋i,4Ǝ~\$:<=lͣ -1ow>=嗘I@߇SwVeDPޕhI3iī/ Bf(x,&5 IyG9T6DZ|_4 sK }6ObLG 6!$e ;,%>f @WN0C,=ur<(PΖPT#X[G9-z8K$GΥ [=.gGUY>T_ JxS E2^j -kzt?TW[bؘi|'[c* y7h tPr= Zc FSBOr2[<o˾ M3V\Nȩ7zy`~k+yظʌt$̷A ^/5%*},a09A)Հ>f -+9#fO9{}+w[Za*o,O<מoWB d0.4L2 ߋSCk'#^ٿ>YEjCqjw+^c 3(`ET+]/}$QE?o2XBx?'HKRB|*]>}e궾k5ɐp}З5o] Yw;$rt.ZMZW]ux0"tz1N=dmW[nn?fg2̕B2-νśjTfP;v#\4?HIw; $A#z=L4Alb8?4aR XmT?GPg>i>>Z ?HVZ#و,X>}my$[ ԑǴ+Z+W,QS1p%ϭzH2 A!d=B T`QLfa+mze7{ՔM%x[С7.%I$ɱ1g9c4> AIGh(~3B΢Ã54mĽ%cc"E%TИ5( ۀ]wO`+o?b0 _i-T0MD%=TmtE)ރο&Z패.FmUZY9?y=]_I_ÓbePyEO-f20`8d-Z'RMs`n %5WZ U`?v'HߴJpkf,A` Sg=y \ɎqVpκ4n"XHBKRi`0:]KR ƑYIeQ==Z$*͝FI%ܮŬ-f-5 [!g4BG]ܶ\MDE-Ser]ǞP٭uMLt ɍGLNVuvRSLPI]s_P{.}`h /..)5i?2c!JeZW6}gyڏ}$ZxkX{$^Pwl;yZ.o֨jl߰ GQC6qMrG5yx:kSTbk\C7v{rt&/KF|O4ݙ&n7i H2 ޕ"xR}]r^@!gGF ,kv6& ߎ&R[*U8%_yrBʲ"drTƏH> ތk׫{\bfwVܵ8oc{TJ9N~!/S,))N5Sz;~-z CgeFh:ƙ6g98&70:ai7~UJ$lɛ,KCd[A³a;Hn-ߛTEoz>98骩(׼NP郘߯+6hwnL|ę \QаWj2I$1%ڙ>e<7%][+r)4.!D>7/jK o aRZ:g/9~ʉ5$%jNdHR0p0΢frLJ m$5}4rVutma,z:;x/1|žGꊭ)q#Ír2vzc>#"yH]Ц-wYc<_+Hԫ)‹>m=+R jTޑ޲/yGqVZmE i//ɧ@w];>2oQeE͡j/(GsR2AVcɦ9.#4miX LׯU:3@)E~cVbu({* 4HY%Bo#.Eٞ^W:S1$;#Fgqjw@rZ'ZjzI^UnaϐFE>$߮1'upy$X8Fy~`M2*m JAjӨVR1C;jXmy7E C`saϠWKh;8׭̵ӈ@Cΰn÷ۊp#F҅[]6_-HpZ60qŕ586ϥCi1  r۳U{y> stream xڍvTݶ-H ɇc&*$@( {ST"(J)tT@(7s{c72F=\k_ehrCq@j`и gb!@(Tk¹! H _U,cj0An( ,!#,)"P A!=A#@,__~ ,-- (#(8 pHwp`8s7Eqk8VUivs?0w_^80xjY=$Q- e^7 P?8KDpp?47iBzೠw?>B}p D% Aj~D ~ 18| 1p`AU*9P?*ýX~ ֿ݌D!SlK]TQ2!҅dqt~.Y昧= wwF Yw?#PYVٹN'iD2#vkյ4h.ѵ'?ڼ,wrnօx .%܀5m"=*Tȏ CceĬޞ ѽj; ,\oRXRr_z'_e)r7Od/9:^ۜ']勞Ni29+ q2hfLIM8J/4^Ҏ{mQ^ ,؟VBX,RF](k< K9zN2*m<;ﯵzN=C'{\_}+\7}"|1ar%o{KC&~U|p&62PDj;6t+t*-aʜIi=J*G[V?aE~m7s=E lc~&;XjU6}*2Ma䵼hpurɴeQYJdc^y͒ 45<Ė 9wu&"ĖF VEGДtY#>W Z\Ǝ RD8Iaa1퟽70{ }#7f\t[sw;FHuZj0ːhI3D#͕$!}F&o:=S(#.՗Oԑ7CG8C!)9K2 n{A!U>wbw 4nXi^-o$ehch6~){JJ{xp1.ESUn8խM4s;;dR7YEx)]xEݡa/* 3XWi@N=J5#!6ù=;pXklrRky?XI\GBMgsYַ~* 0rԷCTFr wv6^{`0̵OjYuFK_JT 9+UO-cq= \&A~ƨ^樓۷ږ^h T~IwoiY^jl+5C=ř/&L^1A!JYDnL\ZKAϥXsa7eqߖSC|;w}*UU?^m,.5zi'xcA;®ZkyZ]d4BO[]2l"~vV ]k[KgQ U Y͐KK)M]krLUQu䀴mqGܭI6_QzSA&5.Vt.֏澨yRqd G8O]%(KSVRhhV_N}^0n{!ybYSqElijW*CkŸHآԧߊy/6. )*$.,AVYYOkE[5ɜIܛnqмKA_<ѣO<\nr3Fn tiPV\ҾN#ZM1Rp\ &:lUӅU'i {OOT㗍s.l0<] OU~jE3GR5aEx8Y3)r) rL(Ut=1pA%eb7a*TUga3Bw!u* s f;hhhˍߓDW]V9 ;6 ?>o]ațzEuqK}؞ T,~~ :QdCO9td.9xQ !Wy:/ǯWέ*ˈ<,KQi>\' uOLw;41Xzoc$M.il78~.4PCGƑuQ3 oۆkD< E rޞm?=M2a}_jWzEuX;%{qnKUUV߼5Jmtk9ʼep"Rt`cqmrpI=<[E؃ѱS")L[ ɞ[1wӉ#SýKvc_(8ph%bD:WS۠,o||PQU'ʃ}9 2i/] +[P0mp=J|~gJVRj#k^?k< gI;>1Еog۴cDؐ rϚij2/r0C:Hu25cwC=.VHbX^#gcH8&A7NƘѸ=`ڏy)W,o䋅$Imb9֙i&`bG˷+L^);CZvV%Zt˫ܛA- Σjjej?"vLHHѝ!?|vVj;5t*Iq= x=j)Dq3g)Erd\ e-5ԫ8ƽ>+pȱM&E@DO/ߧ2S-D f!KZ:i7[@}Þ԰ڥ\QR"vCzDrZ)dM6 ]u>t+茨~J%w~IʦNƧd kU5T?l>4kCэ<VE.h0<2kft_`׳_dd=ׁXp%{^Iy*M4i ,!k;)-ywǓx{oվ[E&J4ݬv'# 1,i9]4GNwDRCY@^{CR}'B/HÃ,"'Eȉa݆-SzR KC l#=ЇA=[ 2~:}NF4CJ`Q^5,a;.̺ԉ~yG{Aq3%q+?يKP{Mib-utd>X2 «.V!b9m}Aq(N<;#=YQ3d|L] o;Y3(#+-F_8={줣h_.8>ke9*7KquAfBDSɫS>91z0U\A}Zç=D\st᳓ń^Fs2 u\x8r,cJvu^_DKPE|;#n]mQrbvl=a ~P*Lv:;[p@n*F\ggމ~s~j7M/ NV&4F#fcF/׬CMw:G?f:RLxC7? ky4`W*z.E\Vм^Q4;5ݠq;g7hOa͒no_jBf%iv|iW)2 t,B/y%z#]$F߀E!Mc9hk.|Ȼ:p 4le!*$롋f'iy/.)1wv.gOJ1J>۶6k+Y3v8z9Z8n4#s/8,kC#8һ?P g--ޅs8\o:#f?EZV E/DN4_54J'dCB $Gi%a :5L8;hbJ?垓ꭊMUpH$Yh$Ft>m'oq9[%7ʲCx GazH'$'Ąi/'110D&jɃh ŭ Yl9l.ƘmZHq8/` dt(عNԕ4%]HңuIXZ/- b]~? D78K6bD?U&25^Y}`|O{3gHޗrςMGy- !Z.h|cBr ٣A.}|U愇/Yvw1.b>&XX6?VO`IbAXJݨ,U mjܬ_ bͯ%"36ps*&;/h(Ԭɿg۸N /SK e͇ {kqKд[I3 Us/eGWk0 L Z'+l峔31l݊Kek0$9kEelIJ̔QŜ7˯߈^tʻ$2~QrtW'fS :*s{wKdۢkBFgsi#A''O>vg)+P ,x$ÞD;U,3_(]:q:[wZԑ ۤtۅ_&ܧ/77ּS P߷dSS[' 3.R ~9D ΕfVc~DΘnQ[++ǯ;Z$[)$t-Rc8@DsAMXd2׃ _Ŕۓ}'kB7NmPވ%ozYړT:?VHbU[D ٷ8"S8=:DC6f3J#7b\fI#z;_%tڞk- }0:->uiF3ȼ)Āꋏ޹UU}0&cO;F?V5[+-LuX9nv"\1dVp'S/EWstxVN嗢7:/ԓz^i. X6&B+]*21ULW]m NcVWv=iEϏV5fS\HtƼ`W-W(M]Zb*fNE}H-w9GPQO)#S鱯l70TdwNA¼Y"}oq2/`4<':mmACm~ߊxD&34 Evv|kr6 endstream endobj 587 0 obj << /Length1 1432 /Length2 6214 /Length3 0 /Length 7197 /Filter /FlateDecode >> stream xڍtTk6(ࠀ0 -H C0t7Rҥ H "HJ]"(z{Zfg}];5 xA|9 =cQ? ?;>  &b7"]a"!`<  @" Qq~~?ÿ8@hTp+ C >}; E `8@:wz W N ;Y;!R\0@ ECh&#b\z G wEgHzs:@ CVCxl > ; p/`sP0@0Gs0@QFFx$ s5"W)+NNP8ʕW0$>v/u#<>-l`pk_CX9a.nP(h?@]POWy}/g  A@D>`w(tWD A08?0}H'=?oOF_}?'&+A!*&wm06U t>;vK ZjDn/A?@RS*/wCnÜGs m BVj ss 6 -f^ZkP??e5Gp+pEwϿUCֿ|& ,#`/"~> ! (t =$u@#~~c"#"Qh N0MH~!j DKV׿P'B45< m^!6,޸+R1N?CiEҺ?U _FCfmYA)ʹ( CW.Nl,ag+ :̷-MqL'N mxap(Fƚˆi6fe[BQ MU _aEec/VLzR_uSSy'S'dֹbp {)c۩jI(bEb&"n44 pJ{.9x8![^E} Gϔj&݇wlQz=t{rF.ʷ4.7$aCO1֛Q";Y7NAA'wh}2Chg3hB@i0>†钆ťqdմs΂Ugl܃~sĬjPK EYkscEFY ~#FGHQ=YfJ UzT|5iq(怱#F$nM҅6{7y,7RKVt*Q[r]r!{ŝ!d!?Y^«i~m`,,}+z D2#ἵU{Zxs3CӧlBʍy`ўyahxLe$ܼX,d)jExpw̆D@+5736},2Z 28c$m"/gC=4g+{rvR Ga|w.!8lo2'e/Ί|c)R=Hn1,M{~ 8}0>Rc$d4UE\Z^ĬlZ?f'Rɒy>HO@k"i\/o |Y)6/p/W>~E3ެݙ!le0z(p_b%y݉k?NlPσFUpfwti#SbNpuXxAjʚ˃'e&s_t crSʗêVRP_Zs8H|"=0qXv/K-O.DŽ%r1U#X|e[v/E W܋8 {9gh ) '( lqBx18?&2TF>$6)[P$iP>z0ZA7{kaڻḎ7/61h(\sq!\ `d=lɴDP{sB%zkwK)=4 Nx"'o`ˏ0܌Hl}h~ҳ@Ѝ.Vn|[Q0.!)Ϭ;FO} Zz$oo;$|f(y|:A.ThlpGx]0#27ȚnuDta?sM |dt|襈,V*NWj&Õ?ۙT)t7Z ~mBԽ,SRR(9+q&EN3f0o>cHe3YA Ʃp|li<^51K8"lxLޯ/*IiDl *z9a"ML79 wEΐA*Qڷ',&f։gIk`|[Th)xdK4|7. ݅gyX5Ÿ#Dv,v mK#zʠBM`9d&@k}TܩIRD4B0.6RzTELn˴9um&PݘMOl#)b[Hh9KSk A l"ibQb.}Nz %,έG-?눙v]FF 8?w@)QxGR hl%ѿq/AA+4<8b>F{7$8Z}a/g(Ie4u,|*g׾WX pnpᕇ3=Z \>V K`K#+*-Wbp=6a:~2"H-Z^ j3 La3kMK3WB_}?]pQ";X2,:W9zRʉ;7AwӡW2ʼ;mo~_$ 0GRu@QW)KcG] sy攖GA)=^3۠÷IcB#G;$}vsj>c'(4MLpǎO|u⥑dRuWW[q[˳= wФWWl?,Z=2Y,ͺNݯ@B%.F/Opߜ\sKNw֪tOfɄq@t.e} K.XqoLA)(*F8alP^Ϋgƽ0˭)obr[ |[xѩqުr.TCQäd)/Zҙ b8|rI>Oogוg1Z lq2FBƙor10|sA7MCr0,* Q)el:kx~ œ?)zǫ-`ӧ.(uGT &w|dp,xKIxHhTlsOۅ,\4at?ڤz%ö*oyIw> Rц-Mţ߬ b R/!>p*E*__S[1JU.}⢢ێ߲n jYsyʉ-\b8;l3F#j HrYd J.'ܺL 2f Dʮv&Hx4lE$SH%_ Վ7|UWkۯ7Z2˝zXPў~V8 B*Y2o?q)esAI <Ʋ}DYNp_qgj,%y}lRA##>뮎w)@碀!Q/MNfNfzL)ܰNɭ:YE]WTeNX᭦Eb)zJ/ʀu$$JAjZEAlZI:\ҽ "]%᯾k|1zׄ_ w6CsfxKמ:Vu|:.{u#AMu-1ëT4؛q<y&79BnmAK>qQ4d2D[f(vm+{_$[8dV!/ aJzc2삔{VƂBqq!̽WfRqR],έ#} 8ߑ 2% pϫXʆ/將,nrX);oRk;W ,.nei>5kԻ0'SvηmՄ;SNGM"MWino = 5]הcN _8?$Q+ȗUiU&f&;՚nKSH=\sBt]JG0*+El[}Sa|'|C!u/r0ABܭzcwHR&%۴3f.9ďϵy|"KIFzPu`zJ<2}/voMZ-5S.xZ8=fz!X6'x=0Q N'vVL&N'+s  Ed5#{ijidz@]BqX !AC|(!ٸݑi8& ke2A=7[k_\ mqݤX8I$P  PVaQjix\Ҁ2]s RSvVK.~\ȰT gu乚h&! ?gT08 6QTݥ!k5'JyYX/up!dU esQ7mI*Fu^l2Dt͍ )_}; iU/1}7_,٤2WX5ISnLn2EU]H?C6S-}2'~E҃TF,/jNjoxnO0ߑ0{ѩ} A0.Dxufx <RZMGK'A-Tcm1S/zOD3KD" ˳e#-m;w 9k_ n",kuvի(yn p] endstream endobj 589 0 obj << /Length1 2210 /Length2 18676 /Length3 0 /Length 19997 /Filter /FlateDecode >> stream xڌP[ .Nܝ z9sf&g~Z{6Ċ*L쌀vN"rRLFFzFFf8rrU 'kp@G ;[X8 >dNrvigk C;n @ mg t#ww03wG15_!-@hbgltrTNN 6vftW 's24U2@+TL\ 1P(m6ۀPwrsښehhhobhamha/q!%Gɑ f1[;#_D-}wgZڹzZؚU=Wgm>Dpdf@'###'3't36g+=_J5x{L?z[?< ]'g?&N#-ܟb,_?Ə Ofbgk_G̠ )Asx~fa|ffc0118>7ſyW7ݏ>˿g B Xv PtF6F?L_.(;[[KOrv`M5j ?Ac?33-pp(Z8=57k [_7̇#},-1R?vۙllCCw@lO4k vN.vp,;A/߈ qD N`/`0AL?  VE"}p>A\8?(Aٕ*Gv?#O㿈AٍEl:c;/Ιdnbt:~\>G?L>3/h?" |sw{s?,>d<?Jq1#`'هGaqEILlQ 4O\`?<4vvpxK@711Oe]PCIfV=1CkȟB=[%VVz^iܘ Ft^Ө|O2,&f-î7sXt_/2%x)Z+ϡ)~#MFGBcsr u etV wI9^ NN$?~Lcf\)N;9A8vǎ+c0 #~ 2rW ɔڛڋR6kHي#؂v3-iXNV-`a,+/@ +>+>EKvd&AO19s.n,IQsiv,2!ƂN`W9$+rZ=`ʩV$<91r3)izٸE.F3C%C0j)HE~H}EZCY{Cp%yfMNWۂ8ɥZsRp^y(3scA`a/,S3/4R߃ jM#˽rF V${Տo DHtו׻ܒ%Xԓ])D}'V_V8Y?a*`a/s.-4Fsɯ!OQK:!WmJoz57\ێ(A8|Zk?wD(u), `(#_Out_:*6 .Fc"۲mn!!,\ǡ |6reQV3w(=Yʪg)'LO]tC:!Ŧdۋ_?.#OJn$ƼQ+>+[1_.Wlx.4sJ2@@\Mj,X#Hhɨ蓔b ~J|)p/H4āo<5#!_E\T0*'fhzք\7?ynW9|3vJ*Y sbv$_ Té\7Q넅4 R qFOᶴ"|KQB~9~p"d1Fʜ4'/ Zuj*" BV,sMY%o[۠*%?MZ|Awѭ4;~z1{qΤtZ}[4_8*u2l܀ O{!V0I+i!(%PYGꎻN.K@쓧;I\ LE.bj|&vv6Oͷ (1׺F]0+B6O"!aqOmGVXCI y1[y,J>l$<,+vOE9~3V< _{*@"${GY}0qCcI#!z^/sO((^}r, q vb%dÇxj\W^{c_quLMY|mԖ,ۢ;$eA-RJ7L݊”ubs94 K;w"<XgQc=ITnO\ r˺ۡ4M??Oz 1]a( �qT0﷗0Ι #|1A)oLbuVi^b24fC-;֩ 0GJr$Nnup.805.wNv YܩtD:8Z؜gދjL,>">Q(QK=>p?e\y0XTk12f צDF˖9 D6_aĔ>!N84ϿW^5T)``<kNƜr˅)AM&3/oW¬Ѿ aT?jPJ]Kg Ӵ~i@8BG.AQ*`^؏v*5W%+W5+= qc%p0RKS\fJO'JK_gtL4t #-Z.lG[^fzTq& d!\- C7$+ߎIE?bAWSé&։I>bӥzcn%~-γez;9͹O- ,`3DC9άW!{Y{#3?}LbVݶyŝAy1=c ;&ZDDy cYIE*,O~gcK%j+s䏥!6 B72lOu;PH~ ےfnDˈ_5/M3N7<9 \`:D65U֒=:CLn4q W QBQ^Xinp+x~T֐+lym'].5}q@-#BGߋVIp&ﯯ̘^놜 G|ƭXOC?GN+tIt]>:Uwg344ɨ Q{L |y6Oy-G.̾BE`܃+0^;"Q)SZ=f 配4Y[c[N[qcGgҵ讝Eвo}K_6Y0l T&Lkhęk$y\S3K##B_Zt妦(J1Az !Hle"B(]a*h4 J 3.# Ϙq(hO?_߃N(M|wmwZʐ^xvˡ%yFq#r;8d(>DydscTR&(JٌԴX9!zЍl&\LY|.j@PrH %OJ ohU".v="]aNSo Q"9z ٪fM'w_4b1Uf%'< 2a^eA?a ꢻ "Sss qrڝ 3= Ք 窌* #D&l/ƫ[9YmL僂qx '-秠ȸAxRTyP>Wϧ]K N@踕}9z!hSo}3k.k>^rI@;83E騧ulav,yk:a= ReX\n|)\]UT q-/>)+œ:.À[%y ]=J,aܱHL 98oV /_ւvƍ[Y &i+5ڰ^u#1&gsh1)V¦Xqݶq\LT%_VD}M\r 4Y<27\nυ&A ~^٣NjSu7qC SǔOg~/9ؚX`; @3îڷ̀Nۮ D›ŃJ|dw1է yw4aH0325OB*͠'kj 963CgKP֊6(MPet}Šn鍙Xǁ2̤Op01NTM/?ihu GK;H+!htzf蘬f&XvR۾RZ@0|emu֥-0|Viqg^wZG!8^«Gj萾% 9\;!O{W'Ƥ 'T3Y}^8DE Wp&οI䤕bsҕtA[Q#Ft%ݻzz2N穝K=֎V|bE^2]ձ&NiFZ@K0ab w?K(mT K6.VE@9Dm3{pxV ~0XuFH~OvfU{AkFvUᲨ]B)GWsÜRVb)S LwO|,* +i*dX90ye [@@TEB0͎t}]aC(x8w'U@(jݓR!TqP1tC,h ϼWq.a҇rcTŴ.١#nKuʁC~y2xe]OJ .g({bNM-D4P,dh9M N'tPL3/7!t6S}s(}+y56̑Zd 4e#2 Cy-j { ej?6Ȍh՚w@SvK(3|A,7,@--].aHV |@|HYRAjtfbHHP{5삘ʧs\I;\'p2Ъ| 7~3=DwMN"EXw` P5꽅O+u]$2{\F5*=z#0AI+˗ѕ [Z73H~qIJRrjhk s4F<>w)gyQZl,_~$ɹ.yQ E0 cęݠd0jmTN!$m9s{ {ͬ}v=g,'}n*݀ԜL.72W<#]~/#⎇M+{9 xlX3vzQDkw;F;,=X7ob9HFH#:Ԟ퍪wTYYSNg'Cݳ/VOOqrińǴ'(ݖd)&amL|` U:qdC-.#x?.Re0;&e> q7K&1堂C9hE~ކ̾bR;J3ѭZ瘖K%o3 Bϑ.{Z~-K \c(j'1b2{ qdrOZww3ϵ\8o&-G5_꺙8a6VD^޵bk3[pbd+{r<h>%F'w9Jl?2A4Y_7ONYGz(K܂1Y쑕aGnjp#dPJubk$jIoU\)_MU|Nj9B{1*tQg 06VizGtOzot-A覱T&imΤ6&Ąf27Cxm2Z$1(bPXժ-xd$P2Ƴ4<6'~'23c%-F+*6w䝁8^yNA+ophn%6O9E \3t*̶1T &Aع1 tnqɞIaz.`P$nUj( oGu`,ZlD6{0+IA#݇J6pK}l]f鳳t'm{=/ ڮ;iƉJ;9p2ˌW{FqЙ /ڄo$ke +)'CVZ-|ا'Ǩ|I瑈Y]L`UJ[ b+i V]3(\Cip#[T /, Xm!pD-ɳRơ;~պ.>ц `[Rڌ#F00^;>=31y n3Jt :ɪ}3k L‘L!dd'mKN 1(]yfwf}rOUL6:-=bi \z3U&d=zjD^t%WZ^ Y7EőPq~,t>8!F2Em*x6T{?P nuR*Vm+!I :[g(A۔M=" 7[кG O{9g8lb4:cͻ,-4W,d2N#_ړ8{e>  Ji 7nJ@O[7diߺ| GX#EK<>Yg8MO:|_[]n_B2Yg4F$`LRfz_zaUj]5C6шp)y]9r' yt%Da0d_:Y>\z1)/6fM7h]+C P(TQAA{^ ԩ,\6V1/=z/(ܭ?뛃\U[MPrеNp4ӮvFu^uiЈ9Y}q2ΫND2M%ArdyjF||7`Ur 5Sg0(RE9$:& )EtMoչ>@^TG/\"k[aH +FgHT%a@¨N`CսISDl,\ ȥa gH o6xEr:a%Y2(֠4Ь|x|0/f%bl^BUra֤5p"e DML*ͬ${|0\]߸7E _IqyHezb+L7~88(|x1$y/fŎV\6}LI<]Z}xoXnqӛAożHř0HQl ;e 0Pk_,IVssl8of#O{F\MC04B'?G%&-[=WZyT]T_`EN{ݚ,w1 lvČeux,tS#n-U_3<~Ƶ>1Gsޱ7v VRB% L5\tۯAbU5H\|+zZr)#7;t}9!<ዤ f$;zBd3aB@ `,di> 7f-Ѻzbl.D oBDmmZ.,}JK 3AK(o7hu°ɯ%IY#80f=q>:Oܰt_.'po7~>= |π"T}{  0ʒpHng}2;Auu )9^2L"\Yl;1G>Т7إbrtf~$KKw&pj]ü,,%4f'F hL_h bʳKpxIh.>W)Mؖ%WЃcJRϊ;Awʱ0dd=E6>/'0q^tS0#uxk8ͪYqj$y921+C6BEER%OKaW d mYY?t,DiF.LzҊRVWS̛D)N:;Z,yIPY`8ik|Tmu54 6.nIYjU݇®`X7TdG[h#ϥ ׆((8g.ɘ%+D]S%ѿu`!+c~A**+%ws'r[v;e)p[YWc[crAl-{/&ʭL#%].r2Ⱳ%q{e/$냅Ψ*owA 1e@y`!l+=1 ww)D1-'K\51=EeR no#JL=R^ -~{ݦ df݄U5+h A̳xIU4M5ł\0yvD#8;Ar9u^.lp"+E %e F-Ҥ1 mɒl"zlڅ+g_ᤔ!H* [G|*Py\7ĻgnUgs g3Jl$8k@adMJKMm(2XếmWuz/j5OIa9&io6vv_#a`P-0%oDHO_AYDAE }"[h J1a&)":'|W'qr׉>}w5Oe!#Jo#p+wJ" =A2shlb@Fh9{$7$,n-'dAG6w-Q1ݬw` v2gfw]Q%LuJ=DP̅`ٝxz *E +qp-g#ށ6Ԍ$J2ֲ|-;*;<|(Q' VsѦˍ0|~r)j9b?xgg^t"20ZXPp0p&8~^ #v\r$e"4S~ ?zm~9Yj[ pҩ |eH7ntR`g4e19%4=g"rI|1~Uć \Qو>hClDE/b& ,&N?6ṳ.>mK '%i+!kZƍ*:\jΔ.6R/ſ>9q!tZK;T&pwj*{p𘷊\oTۙ2DGMEϥ,ۉڹ ",n -kvLg{˰3%~yMx] #3͢$#jcLM2ÎՋ'~q'|_$ΞoKVx^>0?-PTT r鳛nk/N:zsڰ`^Y#M|Hx֢P֥)_AK y$ҙ r pRpS;D3"}(oe^|zlRle0ߺk-!)cLn]MMя8"q|y@'Zæe*3|v * 2kV\嗀<):з.,XNjD =OӝK$QYf*CSABT)͉}t9_ 8"[qHmA_ܕDA 0C@N*5Yڐ?m`G[,*u)c+idR/ 5) d#?w9=K93%t%qCT"0N]U+קZw ! h2ž/!I.ٟ)}ⴂT"L Fvp[*\Y A.yN{͞@A;Ty!tw*|W `![;oOMVr'VI@blB3<rgZS\/2̎(Vժڜ0Bܲ}P<'dKFB 4?&NyꩡS#Z%zͽ  ?q/'4*H7ƇԀݻoo_p}^/y's.wST[ v t@uhd}]2ĹzAD м/{/>po! FsRs$m6 Az^&DIc}]wJI.uVzEgR{8a 8Ggk#2" MtjJJdK~AGEǁ;mp7YS}ظK]nي 9O{է[I*]EDd6z%r%V!nޡ߼d`0|x&]{)oM9=dvH "%Q1_b\1j qOu n󁻺U2(y$Gܸ oFũHz0زLpplZ펒#v>]Xo}C]:йqc7[% zzk{B"I([̾E<&!@KZ$HaC^d#2됹 O'eE޷J jkŭk8T%S\~ @m32*8\?Nd$.ײrI 6D.:?׏tJRKf1eQlж|K%`Rj_s{ 7(?6.2m Q'Lǫ8 ޺]6 l?E[24ћ5$9bN '/\ IO9dx"{Y郗6Xy}iT*9v x&xYp_5 !=]UtC:@%C, -5(,%&)ǡ7U"m F_d~^d7k5*k=%d1an_2BAtħ |}o49A>"ԉB)V|9q F嶔G˥85[ Jg7#5RYZeeG X~,>B׭'s kT]@( RDAdNeYhʱph7qO[8G ;8]aRF YBkCe(*-2jG+넘 Zmf=LT⣆C`CL4n2R8ī1ؾS"D詄n n~ p Ʋ:|[; Ф@PD=9»<V&K1yNPMMPV*z R_JoL\c|>C_N]QFܮ0!Ph;Pqf*V끔hH1e 6nn2ZF2KBJ5-kwݴU$yؕRpz|lv3`f#`y]&_q zK۪_ӒgV2y:]m_*|vIk]i&m$e,j2{IO"W\ ᑥ^ip=k A+m|L`r:]yL Jv/5v` 37A-ޏuJo$(Oe+e]-We={ ca?aă#f~TCٝk40Ev q Q,:jmϟ{ReTs$W5  Jל iK Ъ] S=hwZ9eV^IIIiDU9}ʤ F5?؜iY(3@BCc3ғ!S]#W;tE50܌ktTcO(dOّ cϡl}7A*xՋ)M5GvEqC'_[8!Xgۨ?;Y=5c 3, y0 c[)К|A+yP𐷰Q,jS /랢+@B`Y՘`l d;6}JHSO:}|'.{PbI$;#lI,9߰ux bO]x"1x%0˧Uė2>o|O9jpxBE]b|;q 8,` 8B? ܦ˞k4xӠpBױ(l*vkteU.V` 4ZAn;^5Xdi;zNˍ(#`)s}!E(c/j Wz߷WfD{]ߙY\ʛZ*]XP؃0|3YbM H ѹpc4ZX+ H~C<{ $1쭄FѨ+a5ajلy9h/y`Ξ+uqk6hwUicsrvG)=֞^_ˮXI΅L,.Υ͇ZV./L8nB= xb@7޳.dP)L1keTb3e|kEГ@$fw_? xׄi1W47od 攺$6!iC3mCB}XÌ|ES zrt]r¸[έ6loY33(Upη IDKnJ mGWx?s5O{!pȀc s4.o'aFY<+.ezܘTδ;STqURGU~F"@VhUSJ[m?}M"U/^U Z97sʖdR*9'wl]Sjٿbq q A[q-2l#w ?!J߿z-7E_,M<6%I)plz%X5:j\SBFf` M99,m]>s|E_o&Lp$1m\Q_{a>"V-xIzq3ۣbL{Ykֻ&}FJ I'/U$q u;qi+Os$>F极UÊoƝ35sbߩьK(? endstream endobj 591 0 obj << /Length1 2569 /Length2 18352 /Length3 0 /Length 19841 /Filter /FlateDecode >> stream xڌP ܝg]!{p@p79ܓszY{w=P*1ٛ%\Xyb ,ff6FffV u+@'g+{;Y9]@2qc@`eef23މ nfeP`(<,,]@y@mJ` b t25(XmAMmjV@ Aohlhd!HCprNn@3ƶKcD[Z9Gfon6V@;g P(9c,z0o߁r665u0󴲳[J..c;߆6 c7c+c_ԍ"*cPldleFa@m3ڹ8#'n4ݓhgn_dnegf 3W& ;+GW6  :L{:RjvwZA݀'WXXfV.Ÿ 1?tNV=fI4afv6:b&-Y)%U%KG)*jf`08ظ<ll7yؙxCԥv{^R-@g?0s0~\(cIo ܺv@ vT UY_1hD,liLXX흭~,f-G ɿT@oF ;S{K 0vr2D`M+f@hgr;!>RNo'I0A&?$b0IA,&?$d Pv?]eW@ Pv7(ʧA&?]e@ٵA< dbq1umRVPcgS+iژt O(jG 2?l? P P0S{W[bkbM-hh\@-5fnNrX HYj%_ տ AUa *уzdB?8@@/=`?@UG z3Ya:`/L\ o _, c3M 6Ζ =frtAŻlrۿ ZPPAPVAPpE:܃NT.⿾y@SE{S`wI9=tNGdꬠ [>Ե Uذ$'gՙvi졩A"xBu}GݲReCT9=2hD:Oge ƅev=gD64[w5akBչO}|[0Ugɻ8zC`)lc|gWO^iGjܷ)P>kOȁy ]R*_x`8 .܉L-V{kFFnJkT|akYt]%Szǧ:ֻzacCs\0%sHM-jv04z#x_uӹ垼 =me8LЛIMt* 3"铸r Gjy ~7i<P;5,{2C{oAWx"|(x 5%ؿ9M0R3RCiu_n0/l0 @bHol27F]-R.L* GpE, lџՒ}BrjsA-=o(qE7ezI+5uzu=G=jjQ%eR7-(>v%lt,Z3L䮎I4Ǎ)4zض,`Ta !1:aT;{3p }?ff*cFn6`Rk-'j7qs勭E9їnՅxI9?".alu.5'%eHFhQ:Pl>ߺl}A%roEe:"ϥ\*1̦ 엮zH:vLώD7d Y^gDdmҮA ECC2#xQɂrJibK}4f.JOq`1 ]Q TdfqL#h*<8~KF Sgv XOqg*ۊj.z$fb##(!PW$ߩfATs9ۢ&zB~X8+>=dثT"Sz^hUJ(/%dW+:Fh3F` )sT0DP琨sв$6y#ؤXU9?l 1\.`kP|?ftL!dd\V>X0ըzO-̮!Ŭ;vx@l2XxI#P{{aa)l4^ѦLzLtf)s"g]N`z:!|Z\u<"W{TZˆ;WnQjB'Czm[yÐ8|/.w9uWm*1uGT[FRR!ݝOhXG o ]׽OGfLg~[m](@LF:_{9S oig"#!bv/,t0կO,n9 "P>}8;"^q:S%M:J# U#HӶ,iOP޷ F,#IdJO^F>ƴUf`i]d-^Éi!sji mr (4\r81CjuZ~}ZHJ't^;3L6sO&Y F}bj:%XXV,)܌E/wICuqtSi}᳘/`pwNDYTՖk;Ϫ঑=cSPvjZO9RP밨3SH=} ƴDnךO4NM'`!0ZQ3&Y?k*-Yy\C,MWty[EaBB{@* D$FW3L0}%-$ܬ B)l\H#0?$J 8Zĉ8R/mXx<"Cs{ =0iܛ;ٓXr0wK>T>" 癗X.1Ob\;9|9daA$*58:;iV̌B 5JQreC ddPBW]N闰qB!^ YpSc1MM"icotAmVfd $j} `޳kߥ_|kFDڂ1e ߨ ўe!BkMCvyOܿ-D:sb  nXҤ="BZ3<*sx |h0Z7rNs@,sz j(Ģvf} C~bDSWy33 }ޝ^]ZRJeIt#5/BEy!c4DEcwٵ A℩S6}UshcM $5%@ߴv>9gKhRyOS9@~Зm2c٣s>viSOAR~}-@1kѼY9!:x䎾U0ޔHCY ;. D1놀,$p,pfl)Z@GlY_d)Ks߷q>xa4~8je {d(Kx53%WcEݻz]JiUt Z@iRʹ(_eW:Wc =,/0feY003M"^t6O85tV㷰HKЩTۻ?1C/ 0Xv zY:f5Qpub?%/j՗&e:yOPIf /s;˓iɜ sKvcf>xiAKU>7^IcќGe8@eھ /J`$O-QesEA%L`ہ0ܽߞ5NӖW6t6Zi:t1 _zxjM 뷍mغ~ |U:“`T7iE[hfp @zDG]X7x|Tj*Kٍ꣥K|<--?"6e_?jz'֍L8hiۚGQtG: Çȇ]yby^j %;*n*R|P*>l@Nh֨?/c!a\-YޤIp* Aäu&V҂6]=M4tA# /^w@nTxIw$fit6dV5nNJ߱CaRr,?mX0ex(Μ i ;T ,|#+0N5w'~UoBdZ{>| U21T"}YUDhe]`7c\U 39L|NXzLD|Cs28P xI:!J-eoS<}pFhLנJƵ%[t3|q x q5 A9XMhY㧂AE2v1Pfs<)r&T[32b &D5gQy+<xn6>Vw̎*`3N;ҏ*n^W.+WqF%Ɓ&wmBTXĈ}[Į#ogɜ>8gNUCz">?2p'P@Ca|^H9c 3{.sN!05hdJ.NWR&eg;a TTB#:.aINYPH?3>Ɠp܊#B9FLF0_Q&XY K]ਖ਼w|Ċ78woZ!:Ģ"o#TN#tJd E7-;^ap#0FM;-pW Ӥ;XA/^.SfnF#ؾ}'[bBOj#e9c)>㢽̢VQ1z֡U%T( tnd-se[I|S:FwCd&ΛVvD%l}qZebC[NZ#tu,u[q9+bb#/X<?p@|q7X{d-b"9zt!&%.,njM#zMgw3NZ6ǽM]O;m $2 riM'<.[<k- 0Cf(&6k銍 eF*e0,M.rL1rEa0fȈUy DoV9)%PK÷KG!0AQ#!rCBߣG&K9rT41wl-| _ :!dr6ET@)90Je,-Ћ7HQpjF!@ƭSz,zN2k}o̴'HQK YXέTNt._hi6l)Uv乒h*-nC~%L$41tQ%2_hPjpFoxom3!H1Z=0Px'vm#9i <I͘33;.ZO굜ó%O)x#'_Rf |q[!D 8 ?_VY܉nꯈ*ˁ/$B!pAf臨p@LC8䡰(acqBo{:V7NM^VBo\b{fh-#;zCv!bU0_JJFnTT%ZÚi)HQN;[ >ܳt;kZ 3[ aFkC֫%نVuTAץ;68ٌ'M:{X3fnF"!#Tkek12h6TZ[mf"*^sA,}XM":1U{Zl4Q.˃? Q]k.[cV P%;N#ِ0igwha$β暺3i b(<{9M|dwҝg$PnXh/.`I+vJm1DZ#C"/9| !d嬘/T_U[uM5IC)kw?M&4cŇSr;z>A=\:Qz+)iV߂<: ])mil -gAQ"z8{V<%o>r7*bRIcF(ZNY HDeEY;tߡI~=mEY,~sR.).8>kXG/Cٍܿ:ŀgfe,0 &)5oyQt_.T%p=/(jy ZӧȒFu )x=l7$KP΃U.qxk|#i?8QcsΎ^+vlru6ķ"ޱ"_ mB}ytnt]l&* 6D^@ƛ?_Il{h/oh|#^Tbd&O [K9~!q<~tVH mM F7ۮQ鷢垣]戣,ot;|UL<[T2eVvxu˪ֈbprJصS;1b/LA*[٭w"5&XS"gm)*N/idLFVLJxqW&32im5:V?J8FBbI벙\,YܯrJJT1­ja&s0%C7`SFP0ʔvKE&um+R!UƲG!T{QFy1plH qŻ8wI7?+^?}Vk-/I\T%ϏyʅXd"~^9v+AA\\K*nTCۻri9 s@rUT}4saȟe1Zi]}7P򣉞͓͘-MZ< ӏ[E C_散!#Bx*eԩrD;Xھ&xtLskSSMd+SB"xBϤr[q7[qr y7x#*=-OBU%®Tڲc|-u bw{G8l?A ,ܶɞV)flͳ/ G]$GQ:;_.x1&Za( HC+OLF$I.2%n ߳wQԕݚB4 B3DFX !_u0X (=I'JR~"p܀(/־m3?a7YYWkQoP݁]-G7ēE~nOmi*AnN]ۚ5'M"S5˱yvI'ByƸlƾ=u+l ~ WiX RgzFD-"@DD0e~߮!\ i+gQ_'-b,nln;p5.afS>HEy;="@rS44,wAM'.W^OZ$?μ+56IeyFo 4m{] k7╕J󠥂B o5!~ ?;XoAyS>3W^]b0'VVQ_>‹gd$N@Nfh3b YChk1'k~=吝 R@ϋ× î 1_VghM؎5 5Z5 alඡ vjVI˓r >J2GbWIԼcϠ38,9+G4:k<a) 4n ?܊M\d6ٌH#oaHapkyŰ.oI({MDFFu= E׺k~lyy4b*OpK˒L;&Jݻe _ϋN<{1uBq5сNv./z, ?*F7cTخގ)LWLR~DB=E;?ۖMb)W]kIjHCIbcMyZprf˜Ann g7~ٸ&}~PK72%Ld;xx1jDcQP_P{BOgLzY8qO΁V <G z(ǥ)W:Hky_҂39ls >\'fn]u!t +eA;=PݹL4xnigU]JO}_|Sܾ65K(doĄdQ/ҽ>&9Xnb6ȡTj%rRY'Ϣ3ps DX-w*E ul+[F׀1ǔ+!Sl^+'ҭ*[Q! \T^jT[I#'~R%M. !Gya`o rƍ]r]ܬ%wf 5NGȾ$mf?LbF1$yѼQғE?(=efWd{&.;9eX!m`3 ?_򡆭tUQ8&\BWgh\ZLubh˸thnS7oMvynOօ֚S! !(&ϋpow ?b5s]SM,+dJWƺ$ bƝ(o~+FG[x+1|NB:5&/hH#"#u~Dnh:֖("5,I%Y .ov6m xO?+!Qf4U[_(RQ0f-%9:hͦCz c[ro8xRK$d)Z !>ԆBIӽOv&+[:K9KKpO3WvLВ"@R /B-3GpsbVx3c*]ri1et eߓO>X{vJ _G-+a=Ք\t25Ώ kB5ۯ5jtQp;;:2C l𛱹#ɳM4 pL8 ?}$; :bٗ) OeC&0BlarRz[1~5{Kn%k1aslm95UI`wo-,EN{?6 k*d-ɯ.v°VRȔԓx*[v @ voͬg1! h;u_y/s ڕƖa~\5Bq<4g')ޡMM}#DHB3?$NS}G?emjd~WFM)>waR.Hto,2=iy{tJ;R=,C27֜CRTL-aT~e#Jр ;Mso+*#.oV쐷}%& ]%y65$T:*wG>lXA/srE)b~5wgl>DcMLtj4MOiyMrPaoI٥j;W1/{G7#{:&a5;YJ|X#g{ϚusbT1%p3[L[ JJD)wk\w֌'g>d8z\ЫOtbzY*QPy  )8pX8hAè@[3&[ʶSVY؀KIL YU ٕJERtZzPn7?WK\ '6 q7YFTN.j\jO, mB;}))ڏ l *qjPi\c6O57 -P+G6 صL-4on7Zw_Fr5(U.=LS3(؝*QD3anDeE><fejKr[ҍe$K%=^<.J H G`z(Zw*Z,븩orJcT; -.: (d4DMBըb\uZ 忢dz}J&6ڮb} UY MZau9ӨtC"=I !N#T^_!U8VsOn7'S}y-)ejD^U2f'WTcpfO4) .Ǘ%WϵNԗ9o 6: >&C1{&\Hi#-ZO+=Pj,3}hMIMV<r>z^B?kpFc`.KiMx\1/LbRjvFDfdzS\Β)X|;"s䧟\𻭚N=Ojf6_7܅'aKc߃SLELjP~ ho@1] 'Յ0]tG8(z!{`JDkbU6WV?\ 47UD!`G.Iy6Npn ޳p/"+#{L^B҅|рī[{[ ͜m5iyZ57bgu /W cg(̇oe1 Qn[xf1a93B8>R60,5wc-dz=ggx#ĒXΎȏq.lHjN멗Y- {:圍ќ߹q3$|Jx~;΍I)+de$/7~ ܋{2op,#"u,dB'lظaf!ka"iFzuO̻K)&oj˯PT w_twl+ Xo̹ >w%%I|ܷTav@;'[mGg4Tr ח/Zȇfm{MgO|lT*9!uMBEfVj% !8Mq\ A@0ʯvc`S/9?Ew.hF:#U&0y =.Jmwj=ID3]KC3ĚQ|Onm ;8W=`Q#^ s(:f[k585}=;,UrG^ȶKsJqkp])'5_;B NgxrNIXM" ^kŸ rw|C]c7qG$,:QցG?p^U}h$H?25F&tji-?B%W6HH77n@r/6F5_Ihe'@e 2iIj0х=_xbC8| /م >;TK, ~d^]|C=VI ױkYSA+<4U4  z&/76ZmXl]Yl*%C2$3^ NI2 %aוYjs@g#+]IB0S^}YOd!즃E ( 1=UDnb {Zà: c;fRu`TyoO#\ o>;?u%=|%xB$Ύ-">97f&0iRE0#ZXsĥ)QP^B'-?7.:$.1<}D?^ RrWim{ ̆1x1QX>/+ޯ=[Ul5tW+1&{r8wPvL?#NVfp<|жƬm zp% \@1a5:uo@QEM6GIr>8.:pw'GEr2޾(VMЪ$wqŮӑbSTt{זƅ`:3\f>ky*?EE\!Cz-&qXU=>IfrxjtЀܘ"Bʍ鿒ZԠ~?5aH<)Wrx}erB'|?8aкsz_ 8&) ?M@S&=tP)DS{o )H6eqc VwZyG@@y{wro-6& >}Bh\@̇~#QO,^8sӾ6̸֝9b<}'0~'L&+]a$QxHYZ{ڛ\auQR52jlԜ#$p'0M{N٧qŮ](J*EDx}l+-s =wڣ{Z#65͢hHk7OY!Z|ڏ´1%BPT{ql3-IJ?XOYBfin{0 gAE zFGtWh7! 5gZhtf@e' "T8Aoj50Mxd6  1mt l1 aռ_@NxmKw}d>Bs91Qr}Jl!\@GBcPW@y)Ϊpd©aRxw;*_rEV8YYD)U5G^r.EשHOI0_fb:Ux["ۊ4 l. "_؆mr"u@\~V=f3pKਪ\6P"VsK>]S'#A#(x{10 is|̶^V=2cM3M/C9r S+lxDqEx=bu7hҀoy^FQ#ż>)EDLiMgFK;nVߕ E"b=I{Ow'*l!?U^R!Mؿά]+Hc$.2ِ}Ѱ'5wSjH{23vjy ?i1՚ o i.Alk;hA%'9q9>b UP X sxv3~"X#l6K6 _5ԧjQK XԡlvSפ0xZNg*o_f7p( )=sGRP:Vu:ɫBkdgL_޿.L?Ip)%,N5&7*?e +~y9;jO TgR h-ݍ]MlɾF'2'o;Rp1dɕny"'3wt"1/#_ )3oRJq:W4// `6#`w> %7*@[sN"(O'O1V*jY'ep}~t7i0 endstream endobj 593 0 obj << /Length1 1998 /Length2 7440 /Length3 0 /Length 8637 /Filter /FlateDecode >> stream xڍTZ)c蔙VIabf$%DZEIAIF)锐Fz[}k{>g3pܒEUPH-H eh(D@ a..C@!pPtC1XB!,& QnR%'%PG!$\(?0>XRR\W8@A-(u`8_)xe0%uqBx!0};n Y0@2!./ug tFx mn5MY/Ag IwD`( rAC>= h a1(#zBP/P-an4]D4SVF*\\H;O}J78 {>n B v"l=@#$ Df HB0>hEO3_4 GH|ݡp¿ "` $kwCxA@?dm/[*U?k (o-0" IJD[wK+{4Υ,-@ n_!kk uA8mY P!@=_#Exy?LjpWAxmuïlsĜH.M@+p%8vl29_1 C¶0c@!$ `ءH~^i@$ *C P7a=~q:0D@$ &17aw9M!I c0m´l~6``(g{@? @lv ?[?0gv b%;؂@:?+7BȌ}K?+[ v4vPTƊu`;CpX?jjZ@V?/WS<ܰW_ p7F25I:63zZ%؏: L﹪(sM$;zRet%tjS8Ʉt/-Zfኝ͢ MwMf3k=N~ׁX]w;WycIH6g4)ʏ6qo'2,t|*E|Ic Gkdly&Ơa=ZjS;|ƽW7I 28|^FuVCvRg1JpJTNjF)yv@{AOyDcZjmd=nsOj! Mo@WCoG^-WJY4HHH_iQE<ef!(-IιĨTeLϣs-dqź)mrp]k B Y aL:_tp#_=ij9n27דeI+ܲ\z\|li2x* oOVf~qaV5_L$U,b@`9,GK^\o"|T}WF[׀6Z[r"{aۘ\8#V!m߰;ڮ5RL~* ,ދ,(̘#^ڬ?<֬ꯝ1^Fw9W&e^!#nD̕.Ck (A)r\lxI78nE9Tq@$ƍbx4s1cqXz>&bT7XADg1n`4Hu{ j\(Er\k蛸=ýlUѳसj  _V   @V_!RAuڊ߰Jpf5kuGN]`hM{#b)k,OC5sg}S0.QloGm251]Wl#Xp9 H@?1mߍ<~rƱ:gH[R>*]EY]/Y(uuf~Ny?أUsb[`;dC{8y3.\;uY7gOQ} +;V`/T^Vln|_# ;\j~/-rX%P9<8cxΖthZ .a%z8\qf f 5Ѕ:=G's*r0 :(vS5ʾdyN/R N eMo'.!jZqQ.X]SL}x18*ª;vM%_6.5Əܙ[{ł ur86Ww kz("NcHd ~g[gry>qX|M6!Qt:{Ұ^1-&o%#pOJt5YPJ@*ԱK?&_T6kw[!1%kxt$d}[Լ-1j] pBU;)@35k]Z2DP"eavxV+'X#]:3o݄W+h7t톃Ǜ^A%9ikɧmmZ%XOʍ!Y`L1#`8sN IISQ;HX>j-k:"ڤu<{-]5aC5vهR&~ImhQDOE^72OA9XJqQU~֛.[?d0(ɋB OqM泆8g8d\dV'SNs6yŽy㏈+OJ/n> ˭$vcGN#Ļސ4S"y9ev/*5f˙A[DiwHT7)5xp9ΐ(u^ w1M;%9}l1#غfaMb|;qFL.-}u[)CPQŋ)iچu^>ōOuCFwvڥU'V2|h |̮";rsス+qafФ+өQR0L%]D?=S}m %芽9@= g[ `Y{26e"'UP6Cs([Г/[*:sl qVLkJCM*fw\Zמ 7-xc,NN;,7ܐ˒TL6jGü Z`ǙyQwbLRԊ)]&iS)1V{vX&YR}TtCkI=qM28RdkeVA.vD3:\?'XS<i:OgaϷ@\B)GE׌;BΆ!͓zeӜ4]z13G'ww{|Pa)t\802#nDܺ㨜 E^vӥYG'Z7Xѥ"'}8BQ*_2X3$jW/=ꪙ}__[ӛ&1N}axvq+G_ = z5tgIE4^Q؟hپ&Ⱦ>"yղ=, aDp:cu#+ ^(hy4q2^Ю+1Kffp`b"i@x%oRJ9n2^qWꫴIJ~f. XY!bQoʓ:` NZf%&ޯ=zwBZ$݂pMiFR*jPڣ_]h͘X {^4yVv֟>+k0蚾i\T sf>UU%wFC`ΰtd-Ce{#\hZP>eelj bRUvQ<:l$!7ab);Vq)O| 6Lң-3@f} `R+dR燲6W{eҞT9_8i:k Z6h|k×2,Tx epرʲUg6^ƍx;\qVl͆Ft!BvKQpuPPnDëe !rr)Sz T))HI9mΫ$) Z"ose EK@,*}Է{iFԅr3骳Ǫ=#/뾵OZD0EttZUG eONJ[Zzp =mZ3V 1μ:}"&ohdf99E]{CGJ? |^jdr֗rx9u#nݰ1O&n>К-!ܺP&M\YJ%.G NrOt 'Qw)-}I,po#(GZ0u^UJxCbsHfթDފp N`+Xϲ=d;pۭUk2\('m%䛽?1Im rnIc0j/rTDD$t%:>;6P0y+"C$ObeQU6W qv;v9&<ß΋ ڥbYuwmn`{<!´:?dr@X"01׮EA4"hO(V{[q@\u8%.U茔 4MFԶF܌Nl<'EֻR@:+xb1Vu& &vtY5IWyhArdQ GC\ ֎r`.^B8CVƷY%*EUǝ2Bd.}'$ wrn8){(x^C<[~m"yUwsۡ)("je'8 rWAp | |k'v9l*x&>Y< yFv֕Ky+Z%r=ˠ[Yw#OU\9۫>=<[6g(Ȧp e9 k?U&ѡ GroR],~}Lx74^l4ۺ&vZHw:K[}{t(vWgiY`]D}jRqpوN>Wr?竗VsrƤR2I Z\?S%F1$|rʀxjmDUC9_Θyq{<6-ql>K?qQ_TɁkr2K:4 kcEfG]NAAz6gqPFDoEg^Q?Ѷ,S[P::ѡU<*,}-C L>G)8-VOd8Hīױ@E߼sBu+xR^鍽>gwb} )vC @/hxEr5K|wnQw 'w/ຉ5߭' e-"~p!n2h#͛x%uF @ E9D4;va*D[tSb8|R=1mV3q(cwΐroVт[ЕA捒>d0"8MQk1 n0ڎ34 K.qfD!k%G gKkE`.:WJ,jX]7L7 G[6WZ.S.ametf-,K^pP);3^ a*_׍Dm*o[pNEZiI$ +)R98=g8' 4|_*o::} ֹ ZAxW@1!~2F*>~sBWkY}J?' ^ί$iLck7U}rH˸ų +{,@+e>߁yn+ty(}۴'Rq~Ő&[vIzKh=:zQʥ@$KG{M[)gA[O .JVT?LtXq16pU^X䴈S1=csȇcdz0ta1:zZw30[k/ !eR;L^BӷBH\#6^N9lܨG0\8THi0fZ#ͺ}*s%xX?}Ժ endstream endobj 595 0 obj << /Length1 1876 /Length2 10647 /Length3 0 /Length 11812 /Filter /FlateDecode >> stream xڍTj6Lw(!!1C!])ݡtw+7s-ւvYQkJZBArP++'@ZE[[AG vcƠ9?A@WM SBJnNn''CB;PB@.tPG/g+xeg+ r[! v\+WWG!vv6 Zvh\@ K@ߝam.۵V@gf[ . 7%; PsAV;ll)OB`_@ #X 2+ w@=s @NR5O{.`GW6MYb) upA\]0~;,`cbf P? ݄#R'ff rrppps@N ^az;BV&@`+ puvzo [A`ƿafv`OL{?d %bo_.#wIIA=ެ|V.nn/OPEԁHp u_ ixprX~q? ?}&$fo_'&Y7WU%P=+9o+k"Y]-lf+fԡ.o `{ea{7\`z( Z/.^>// Ͽ `g@]a)Xw+3 K6`"A v_o@N;ʿz [@6@^;f2ÿF  qs0-akQ0? J8^I=_+?ֿ53 S6i#؝k 46'uªap&M_Z}/iY9Zska;y,0!u!5dtzΝn8IՙA7I}ֶe_]KP=zkm@ kKhi9ێ4Eid :9Ğϣnb+|]z>ǀg`ػ]jט?fXugӠR1y]L>Q)1cGsynqͿ]r!%1$@:HV"Z.-]`d[IYܰQs/2]OToY¹ \~6quz k-ƳtVWxN[!u̲W;ofsn(?C~ʉlaR!aEyٸ@3\F {mֽIGmq@%ZѹTCC;Gu= ZLH^;#L/,{ b˱yMXD69쪼^;ǮdmICGZ;3$󛲦#FO7)0L6(m涎:;p(8!T!u&nB w(+:44Vin;oAؠvkIh֢f&s^T{GdDhBtd?3?|k6uݲ ͊_6&eR *mR{/C :6 htuds=*oY$QM`P9SXwzSq~4"68bNtjb R눘"+C?|% @êmYJ[V(o8Q,Hwo!J7,KTϱ8y~!Fm[ЈF qD 뱛lw7d.Zy c+UULbW18Ʀ_`y5ƿSn o:kF{fH9ӖѪ"sJJZWs_¢5;*ӊbPa# ç2$j>nl&_U.I3=͎x o7,Bj8N.? S8 M^oP"P-_Q0\cpha!)zSي#yD.wuD#J}B\=&[/~:-L^#"e_ Vʪ0AYO: Z6֧I:z&M\aE.Q%C溵vA2'J98veF G5%q6_“ч@)iTVH~T_sa)^_}>Y?Y Hsv/HeD\_>9BO,EHYr^^]. -Z2JKސ,j)cڄ67G$ (r=!WcGhXMKe]hF>"ߴAwRGs}E1P1t:;h_׵l?aV8#,5h%Гf8ʹƑR^ȎFO;%<{=S&"gMPpYތpF-7%lS#)/zbj!!MpZMr0w(ZZseb81p-|ջ:Um?2Tޙ;!~il"d$6F枩swCx+866Ŏ:]>$k}F`[n Oץ9$ ~݇SVgpDeq Tg}wCp0~0b&!Us5<\,K'+dCU74=K&\)IXZq{TkOd (U8A#;ed*Q$ݴק>_W-l [k(b|} ypRnok.7}j$Oa͵mJS2(O[Bh~4N.I X\klE*>~=\OvzWW4N?>'Vގh/z~r$+4Iӹ^w_f!p! Ӌ|z[L!>hs2\h]51MG ܂YAM$3u7[pm;m@Rp!Rr"ʮsV3N)ԋbׇJӉ3P0{Dކ7LE(.6V^tc~fq29f>m9p|9nTdb%/V2jd(83!t]j/bsI9' N_&+TvoO^fLg^hF/FhK7bZ5kg59l wfa6-)b΄B 2N+U-Z[:ܱ״T},y*(\j?u@9I&Ɔx{fb' 4wqgbza7h83>W)RtOMVATDz4=_4P?f ʲ}ģ137fcM " ǦDVR*kSبȜ4iƥvTj]oG(ޝ/x@.Ka$ )wT%"3B Hx^ƐXzGt 7U zU_zϜ{Zcڐh5cm`1hHӿvAqSjNyƷޠu]ӷڤwR0U,/fI)YzX-]e*G!4lH*GF<slcD}?d@06E;e7Qfܦl's$ {}HV nt3 㪳$ei̘r?u۬x!ZYY.gOhiON\ 4?h ؋EIֺ0K;߶3ۙE^Q`Yͅ~{"|_N3)dwAbVAg ^ Իs?r`9/n!-mI yL>yk+zDj9"|gns8mDkZ98/2tLסXN:WS9d{AS& DCs>'m-vI&ڢ&<*N \:JRjs6-ZdfdO{hxVՂaNF}G*RhF1Pdk g#fdrlޞ 04!DՃwx< p0 wV~5rLWo,E>x|$F7Rup3n!6I>IDG;܈lo[NZdd=gD,v6ג㘔{|ɱţGf2DeS b>$TGm/h2WTF."2|BժIn @4o󒱨.\V:h0vjtcv 3/ i,P%U dALVJ$kNfgZ'} S?HDގ63})4QH>n1__F1AdFw{쎉<%*{jȉW&Lu}SA*{zzevUеP]MF+v{̢܅2plOf~ߞTqueZ%L}~kU`^@ ߗ!ɣDz<2gf/Pq2 ub&S}E0d=|k˶mDΧd,7w1JZ9yLaWM1ťT[O+>zߗ2eyff1kl% V7C;_TJ`@TюR/fǮX(*y7/4@\`7iAʫhTF# ^PG8O+`y 68;*.HajͨT$OZGbpbk[xx=;M  ;t埓d Qz\׈.%-٫s"TjnbKsU|SX/B#-Bb@(ב'@վi^F)6 _ oD |ݦVA+/rظY1K$g:LÕ31^ϞYUH,QRVKj]-1c( jjeb.ى'hdwv`Th0N=##}a #(:EJRЪ7]s㜧vϿx1?OE$`L;2ѥ7g6XY=+/v~֩u!jh8ֶ~Z(aԕX y gwg‡fz% rĄ7 ]EF\D8qtŗPZ9ZR L+^`-AE:ڬiĽb(Zk` 2Z^QχwN}SV";겙E,z}ҶuU?lRЋ)ᡆ[>/fxl:{DpHK3)CfIM:C]%jF<շ*V,^Klҧ%IVO}KLr[GOxkOpvZ[9q?6[w*' P$Em"{&Mqڧ%r52/(cm!)ϑZXgt ]er !_I^0ihƻvA޼˔5wU;g9%I슄u|} åZfa{Xpu~kz?!߲'qlD| 茐Zr $p(a{EН}ʉ4 :4DMu|S%"#2O:,!{VKe> 9F;xX5cWNݰl-iϱK19+ .H {9&&G~|oBfTl:~37!/ѷ^d fz$s&ajL2Z=O$3}p4lYVf퀏tH,83(yyHu_Rb\i&H:TϏ¶WRKef<^yCgv/XU Ycۘ7% *鞯,@ܗ8CPkdxPpJ >^ߞA؆Ox-mH'^F[V;Ij)Rķ%ci 3ѤW>HؙhK)1aJT )H@ ;S0R`/3Vo MqL'URR}4Bf( Ŋ㢴.\5hH1ɑ,etnYf`,{'uVw'S$?|.^7{C$& ^!uCϒ Cyoʣ RYf2$SA3uRAù?<%Kನ( DBǃ|kƭP/Wo7SUN:NO18ծd$pXA7H_D ˽ d̪o 7SwqTU6\c߬TϬ&Wy;dJ[=)e^vZq-!-Q/x%/x]4V3ZcW/U\ .C1m ҏyop1`0 xRi#PM2?ONi~^<+Gl[;%$s_gU 흎 -? 7,?o4@^>HEXK^%9t!pCnTM80q+B޻jO-!6S52qS +8D2Z, h[I/hxq1W!jHSŁgG7֍{GѮO}oF+18,njNX"FQ|M"e:ӱނ0߂) I6c›M/F`H(\[T%V+g680,R6B:Xhk.襑vA?Uq CAqe;հiύ 0>wR##O]?wHMhWTRۢK(~GN@a:5i/|mĴU 3E؂QkzY?ЏXɔ+M0ʀcob=IqX\*.hyZb2*EwZIڡ@p*&4o(MhFWcol, D&o'Zlj叼Xlƿbmz sh9NuG0O{4ǧGtP0)ˁ5]X(٥xpy@%ke6K&:7 mB&-"x²\ѹf/q367~Dޔ[E$*00V-de)i"@k} endstream endobj 597 0 obj << /Length1 721 /Length2 4672 /Length3 0 /Length 5264 /Filter /FlateDecode >> stream xmrg4ju :ѣ D%.E13 3ѣN"D'щ5DF^7]Zz>쳟˥A!0HDT`n `P<V2`pb 2^ `@D!c ȹ*➋`+\7"=`tBTʹ @F`N6NH@ CqA- p'0h8oM8?Ю,Z-A t4x5â>_//u'!p$ A!dM m<?wt-w p f?wrCQ t1p 0YP_z9 $N醀#VB- ]O?ڏcN;z?<50 ⯽bP? \""X7Oa#i|žc4׻9$ #d |r o Y {igKX /(lok} (V{"B-XOΞuZjuӘ'OM{$ަ,}'OίmE3;1|KyzI!TB3`eda0$3;6/3?=KqrytnEGu2rHtn%MbԈpsڧ BJ ;`e`FX(8WD"Q/]*\ұaRƨoV@~CM…bԙe3'3'>]}TJT!{QyŦr؞{ } 2%.Evpz#J, Jc9u}-*;\pf4ѫ&wϯ,3o;!@ LGl** 7$WWpYQ5Ϛ5# o9-ͰEq?sHf =R=]q'b."_{88  8ixxs=e26R>-MԜy$l$Hr*ReK\w:(_``M:ǦBԲmhR@NP >ѝU%' 13atLjgt4O ")<u@VoYA38IG 4_?)o~[u.ᅬpLw$,ttQ[ \6Qb})Ŏ72K@w>T8~5,N乁c-Tlv#$I2<-fJLZ摳lru^Pd<=.m1MMf+km(=[3/71,(m}!\.·ڔe=D{ωM^ E2 !w/3+H6= M4A'Z,Dƞi*s\F. ONޜՍ 6 ۹,W!#%Xfo߷90 )!Us*@>i}ޟ|Gv-z C-d9Du1N,tA po%ǞMݩvIeʾ&Ĵ6flVk;;v^-YlM.#&l^D3 KYOhlu9ZM:IQtf\jwwŶLaG|-;+qm@٧ N4 8$ZTcg3-KVn*?CmY;S^cyס8'"R\R.E(/^,j&Ny[뙧}x0Q;>vdJKo7f>!ʏs5hr\TesnX͈S)lY,W%!%?b:I9;D>b60*/꘤p&8y\/+5D 8ǒܚsϩRXKIHdݢxN m& V}ih6{͎Q z|yń'<3reh;Xy3E ="A`.jbZ_+2f%vI^ف7Ҥz3q|Po_-g畈 eWGߚ&PJ/$/32pDqDwu&:`O#4) =lp7X\~\m+r-]hQ"eG>xTh "#Ud5i\*!' xAE@}oU4gnş5Y,tl:/IZo8io'"v){gdXߟ;ٺE+u7{</&Uiѝ*v|0l (kN1S#k>w?{Y9Ay|'?8*Yf dW(jP ]~:e!=0iټ౱]PEf-|ѝ6%~R)'ryhz`v,z5bphѵ1[$1ʪ{Jb~Կ s;_<9|9t*ʝX|Jy~>M۩^L(ݡ ֣KHڪzԴDjt³ޘy&m=t9+r[lS3΄QDgy+3f^x_hiޠdd357hm Oڻ;=F!}7;\+9n"jqK5T灁?"(l ,A]Dn,,fhaP)Feɻ3o52i@{;H8dg%lo VUÜ{#gZ#K 2f}{UZIݴzEW1M;7I^_w󱛍^1cŐ=!m endstream endobj 599 0 obj << /Length 739 /Filter /FlateDecode >> stream xmUMo0WxvHUdCmU^!1H#x?gx]OTm$|͜s_Iss :L;<Sz==׾f`*_`ɫڟk3'iѴ}=['sv}o|7lԘ[kxBYwS0`Qٮi"m!-M5\F:i0U4e7;yO!(37Px\lCys0l+htcA620Ae L[7Њn& U RZ,cŶPSan aiqD4',IF\Bbu /y2RXR xHXc®eg:'c|0xq?S΃qI&'g΃y9 C :sǡ;(E8o"AJ'Ođ+6KIט'; ztFzKp&q"pBCT/9!ɩ~B}Rq҉TFIܨύ|nTs|neEAxwIyRm4͓_Oyf;s|KۄwU羷{lC'i=>vGr_$Ԩ endstream endobj 600 0 obj << /Length 741 /Filter /FlateDecode >> stream xmUMo0WxvHB!qmU^!1H__myݷDULG^͹t߷.k4c*S'ҵ>]g,yݔKeF$mS3&qGRp`I_3[dE4ݹn'&9綐7UaL)l:M z!YU0rўo>ν9},lj'}4>2]ݼ[ivjs92V+Vh ~y8&X-MmM|ŖE LS7Њ~& U 2X(pm XX(W8X&LR4=zukTGEm7h8Kc`Iu(!a <#G >n-tJ!]O2`̏S#',<ؓL%qO8\π: 3ht ,+9ugCwËpD|ORɉ#ɇW m藒1NwH=8! 4DCp&q"pBCT/9!ɨ~B }Rq҉TFIܨύ|nTs|neEA;~<6OIystg>O:yұϓN|I/|yI>O:yҹϓ.|R T<띹_mKz}K=W7"V{/@̪X endstream endobj 601 0 obj << /Length 741 /Filter /FlateDecode >> stream xmUMo0WxvH UdC۪TBb B~޲4~?؃Y٫= l34{~b/}[nMi7MyTvb}M2n7kVlC8p8_M3Wc75n'&9箐7Uڞ9:|RvŰ=H dU]'wxqi IM+|}eyg7uG *kVكk؝,eJy,~,PUVJw$YR6s WU9*_WuaC; 0,,]!,pa} Q0RhC8]w+Xơ<, x^ G#}ƝZ)=D)W0h8?q B8 I*Ox,{2AfU5h8d/qxo<MD_S99r$`oKטVw=5pB'32'#g!'N_ zAeXB~}w9Ԃr%FrcʍV)7A{~Fymtq'od<9O&<D'S<%ɬ(OFS> stream xmMo0 !Rz|UAa۪V&$E 6~=HUAgɯ~uo$ƛLD- t @ZcNt=YNk`T=Ro æeCڕ(>Պ AiZsn[6uc^0Xah\je?0bprOY[AKS|dۙoF)MZ}4W@{YmG;<9`;K (EytbabisbgEjq(po$}Idon-p!J m-O[L endstream endobj 603 0 obj << /Length 696 /Filter /FlateDecode >> stream xmTMo0Wx$ ! 8l[jWHL7IPV=M̼ su;Uٛ=w]yil;<[[j<=?׾+v`&ߴț<^*;~&Q>MS >_P{=s@dkx;`VY`s4JaQܡn.Uu9\Y6><ٴ.Z.4>Dӗ}~r:-d0VWk,8yLһʮӮђ[*mLr?q 5F8@=@)& 8Rx uD\j2HV0CzL] bctI g$`htы0\F0s jd< I6zg W qȐ+#k .bsrbmXK7ǵH7Gnb>&jؐu1VljOu$՟qWS/%1{\xB!K(hHTЖ枃Jρϯv=k2UKς_:~$/ ~E+7ˢ/ l(/} -+ZXukoԝE?ZKq endstream endobj 604 0 obj << /Length 695 /Filter /FlateDecode >> stream xmTMo0Wx$ ! 8l[jWHL7IPV=M̼ su;Uٛ=w]yil;<[[j<=?׾+v`&ߴț<^*;~&Q>MSǓ>u;q~:fc_0F)lGιmu f8Gӫ6b"!YUe.`M{My?IC4}+̝l/Bj*{pϻƲO('$ *{>J-9_eQ"V$)MP:^9 ^` br @ {@(\,RH&ti m+3ԅ ,;F$БzFFieD(0A1a8yΠFpnù[w6p@ )9r9b_ia|F-(:(nQHY^`nA|n(戥K}s\}sԑoA&vqc⠦ YK^ʛ!_my_)=^ ^{TGRw1RDž'xJzImi9j'pͽܳ/-_Z,N_: ~iyY2q,nЪ5QN Y58.] endstream endobj 605 0 obj << /Length 695 /Filter /FlateDecode >> stream xmTMo0Wx$ ! 8l[jWHL7IPV=M̼ su;Uٛ=w]yil;<[[j<=?׾+v`&ߴț<^*;~&Q>MS>u;q~:fc_0F)lGιmu f8Gӫ6b"!YUe.`M{My?IC4}+̝l/Bj*{pϻƲO('$ *{>J-9_eQ"V$)MP:^9 ^` br @ {@(\,RH&ti m+3ԅ ,;F$БzFFieD(0A1a8yΠFpnù[w6p@ )9r9b_ia|F-(:(nQHY^`nA|n(戥K}s\}sԑoA&vqc⠦ YK^ʛ!_my_)=^ ^{TGRw1RDž'xJzImi9j'pͽܳ/-_Z,N_: ~iyY2q,nЪ5QN Y58.] endstream endobj 606 0 obj << /Length 739 /Filter /FlateDecode >> stream xmUMo0WxvHUdCmU^!1H#x?gx]OTm$|͜s_Iss :L;<Sz==׾f`*_`ɫڟk3'iѴ}=M;7rfnj-eSӵOLg~8 )ok A8 $`I\3`Af<Z]! xNky"7 _㓧q H`nḱRONH=CpB:# =%888QA~!*zƜАT?!~> tw8y*sύ }nFE>7*QύR>7G];~<6OIyktg>O:yұϓN|I/|yIg>O:y҅ϓ.}2 L> stream xmTMo0+J!m$d!mT&t@32U1~3~˻rr\i$^ںQg|6'oxdG2: lic$Pߛ)? _CtPRJ(:Nps0I֡iDAWj~:ytM{47xO_ M! K2XE?iڝ]]TʵHrS0QOKx&Z=1>bqb0q&d'H1[Q/c0&տp*I(kÆ2$l/#A cΘ :X"^fF~NK rJ_dP !@+MTH`ԩ3NE7kfBqxIA2Gs6AEYe/O3рI?kM'WGff@$%~S s셑(wr͂n"&}7dXz s)d?X~`5`?؈`cMv~+5k6c?؜` -d?diCNa\`͡2 ~DSim@]Yd8|pJ endstream endobj 608 0 obj << /Length 739 /Filter /FlateDecode >> stream xmUMo0WxvHUdC۪TBb A!Gp?gxYOTm$|՜s_Iss :L;268{zb/}WUjWm?fd}Oi=7gRx=7i'Էf[7̖s ~ts[(:0p l:5m_-tB}W{X8 jw]lj'OC=6}Ӿ|< D0,6;96ݕq4L MUWqS~Ӿ |Ҳ\Khv7RKs|*Z -1 b[d08A  i$C#.CZ\wF|TT<\`Gc)y ,<$g v1a粳[ RHדL1>g~8 䔷5 B{ $.  3qdAEBu7js"ܨF)EYQУ.?yRmTy'oOz>OZOyʄS&}/6>zչ{ZkZs}=?Fey endstream endobj 609 0 obj << /Length 740 /Filter /FlateDecode >> stream xmUMo0WxvH UdC۪TBb B8߯{ .@=/ۙڽs{K;K.k6/k+[M'ҷ>dyӔKe'$cS`vfSfK}fƁVGGf\bu<19w|擬CTAW $rG]IyMsh$aW7y̟u? sK-`θtJ!'c83?NaO<Dg!;IX 0z)rЃ@kpBQ]^Z7! / U <ɉ#W m/%]cX! gȀhID8QN~ACT/sQQRs 穅ύ>7: F+}n4eE=zG~<6OɈy2kLd>O&y2ϓQ>OfdV>OF<dR'<>O)yJS*}𗏿tx>z{O->tՍ]*3>cC~ endstream endobj 610 0 obj << /Length 739 /Filter /FlateDecode >> stream xmUMo0WxvHUdC۪TBb A!Gp?gxYOTm$|՜s_Iss :L;268{zb/}WUjWm?fd}Oi=7gRd{nCN8oͰof-%6'&9Pu`L/"tkں(a[ duS $xqa MN{}m}gىx` tw8y*sύ }nFE>7*QύR>7G];~<6OIyktg>O:yұϓN|I/|yIg>O:y҅ϓ.}2 L> stream xmUMo0WxvHB!qض*jn$HP#x?gxLT$|+$=wwY[L5Okˍ}M=Ƈ`Uv+NmΧ@j*Ѱϓaҍ*mima%+,U`꿹V#5T>WU2F[l 5GT8XD2dC-X]d>**lv.<T39pD;ps٭N)I&S?^`qq5 B{ $.  SqdAEBu7js"ܨF)EYQУ.?yRmTq'oOz>OZO)yJS*}/6%>{[ቫs}O =F/Ehq endstream endobj 612 0 obj << /Length 675 /Filter /FlateDecode >> stream xuAo0 R{HcIE H9l[5j 8] CfUmóٟۃŏ١.rn|vxb}[6ߺf|歫NNhYgy8å9q7۾q0_f_ дGι/63Dk 6"WT#!YT XF{޺cl_c9K_۾qk8rw麓 G ylIo2"dݾ}hrYWydͼ//V&ZKLxY juwԯx2$OAPyt4sk)$Q "7A?g@c(3jok9E u9/d86dq/@cNiЃ9eyDH{f@/Z`~G. И!`DoSc_ BZ }%m<;4x{[W<Q $R1R R2R:dA3NUAUI5'ZG-^GGGG$zE"ZGmΉ..h$q41q1//fm|T ֵiKN" endstream endobj 613 0 obj << /Length 740 /Filter /FlateDecode >> stream xmUMo0WxvHB!qض*jn$HP#x?gxLT$|+$=wwY[L5Okˍ}M=ٝP7{=,yfܢ_ybsn yS6`z¦}TEA] $rwyś~0uoMd?tNC0}*f6` `bێh[W0ƂtmeӶ4ݶ0[*0M\B+vX*+T*Xb-L s[ #*X,caq\``2Iш P]QA2E;XXJKC k88pLB$qƩ/088?rxy!B=X y82VAנp"Zqx8t9MD/W)u8|}ۆ~)30|SRHCOt$"NN_h 1'>4$OOB:]*N:qJ(sB5Qύ}nTsύ(Q<6O͓yk'<ϓ|t'=y '|yҩϓ|t <)>Ozg~x^uEtϭ{ՍȧU" t endstream endobj 614 0 obj << /Length 740 /Filter /FlateDecode >> stream xmUMo0WxvHB!qض*jn$HP#x?gxLT$|+$=wwY[L5Okˍ}M=ZBd"8ƙGNq[O8"Yx'KBp;2uAf*Y$TWsZ8 Ἁ*<Gt/%]cz*pBi.Dĉr QA3ć$: 3IHKI'S %}nTsf>7*QύJ|nQ=3'Iy6OZ|͓_C'=yґϓ}t>Ozϓ>O:yҙϓ}trT<)]/oW>u1Ԟ}C>m`R~ endstream endobj 615 0 obj << /Length 744 /Filter /FlateDecode >> stream xuUMo0WxvHB!۪TBb AI~=~/?g|{^OTn$+$977Y[~Sjsזk31{lΒr?In_ͯIy'SfA}`>[t}6Lsm!o=LLςt;b[h dU].Tx`d;ݻyLtun?7xZlO{?6æ_8^ߏI7l+76͛ ز(Vis Fjb|PE`)Ce0j*m!,,`qʼnre$E#.CZ\vF|TTtg<\`Gc)y ,<$gsv1a缳[ RHדL1>~8)k A8 $`I\3`A< Z]! xNky"7 _㓧qrH`nk̀RONH=CpB:# =9888QN~!*zƜАdT?!~> tw8y*sύ }nFE>7*QύR>7G;~<:O_Iystg>O:yұϓN|I/|yI>O:yҹϓ.|R T<˝_mkzyS7=W7*#V{/zޮ endstream endobj 616 0 obj << /Length 741 /Filter /FlateDecode >> stream xmUMo0WxvHB!qmU^!1H$G?gxLT$|+$=wwY[~SjKז3tzxuSΕYߓyO>Ǿa =*Tenou<19'}0aQ߾n" 0Uu9\F<#G?ٷrɦv!>wf6``bۖ ~mMy%} n,HYV?mKmw,9_eQTZۏR_VRn-`2TBXXQ`  IF\Bbu .y2RXR xHXc®eg:'c|0Nxq)S΃qI&'g΃Ly C :3ǡ;E8o"A 'Ođ+6KIט'; ztFzKp&q"pBCT/9!ɨ~B }Rq҉TFIܨύ|nTs|neEAxwIyRm4͓_OyJg;s|Kۄ箳O{pC'nM>'r =?^ endstream endobj 617 0 obj << /Length 900 /Filter /FlateDecode >> stream xmUMo:W5?$R. d9M eCkmCp;;w~>|3E_?O]5߶w]Occ]=~?}Oyh9%?۹׬B|Ɯ>);vw%g43>\ 6 EJ78 1{~`W(-;]%=xe_,b+-O;q\L}UI--=BKE1p[! Mߊyu>.N5K)Wb٬8i[_uʕMzQ)V(Txޢjy!Z2P="Zd0\ÃGR\).2*Шa!U,H`+j.5Nα@VK-x%3%AYӀzΚ>kP#5m0Woþj.ZT$X/)n)#Wo(oRZ $Kp4Z-b\1ܰJ P"GXQi/8k^Zq:Zs9dB )sL-7xJ`aɽ)f$1 dъcCZC<73JgznHȰYɚTa,_-O87}KԴܗLloK+gJ.GZyVc48Wt]:P~`rZq.n1] S/Pu7Ue:?&?!d&1yHn5)yғBx#1ޞ]Go׏M?X endstream endobj 509 0 obj << /Type /ObjStm /N 100 /First 933 /Length 5765 /Filter /FlateDecode >> stream x W ڸ*cl`ԖUmRvW"p ,c  !nY6/-Ү/  H`/SQGX":@eaCY`po@kl!+U Z H *su @(`8g-d4*3L4,PS=`=``=3l[*,!tŮħfxaDa)%`h# m-[F fDĖ|(3 !%TܩR2f4J% «+"T瑞pWWҺ-TBXȓ$$R1@PZCx +WO'_@:%.~?yX]v[oS5Z~5TU!vZc[ڀuo:mv,6BH@vQA% HV%MBX]icsZ5Aj01mA5ތϪ Gگ{ݝ+g(`d'o[w Kx= aůd$]oCV;ޖϫ1P~k n@1!g FARÛ),)zӧ>q(^oW?Gg2wFଚFS1uFh0hR逢x4`:JCxQjNZF#,fcO'Ao4霝QwT'ׯ&; J)A(ùczuf!>Y~ݱt$P ސ:`2]S]ls=WbtsuqD(!wi:r$o;>? hp"'N܃%%_|],WUcvd}QnV9Xo,fckG]o(hm.eeav߁op|l.h ڦk hCGV#0OpSѼmҎGv1sNGyBb\(<1`j0QXA2$%`5Ye2&,H"ϾgRRgV:.d8er0'(_KW wa2fC6CDySR*k8.:$h@pMs~~߫z{gz]H@* ׺f6"{|/TW(58AV#qR5 T]n P5㼝SKcVI5SEZQDi<|'>j.{,8/d3DdK99S>/Y(LCPrgK.Ay=%tg ]Rw߂,.˝ʡJpdKޅ`-GlTf ـnL-FtTǠ?I8R_:^) сUIdGGxP;6R'J]ɜd-3_z o]C=Mn! lKKkzGt-m2XvI]]jӈD#A;KxLS-kڟƘcH*-;9^6HiӚY{csYR$8[~:⨁xDTWt4?@'s(#88Ғ9UryF||jYoIܢlzJKңg#=rB4RPH)ܺUcrn"bE:B.9":8lDT :T'l"Z=|Б+2@ӫ9ȶ@'NT.bkx.]0exhm  Ԡu0].Ӂ{M Oem#F{PQGd?lq"I1fC5[ZG㨕urxN\_KƳguǴ9mR]oBW TFu]"P}=A`{JmC%kcq"QGWhNi!Ϋ>܁Mz@ |x!.UwZk^wELPݠꎧnwq13V[% !C3$݄G'>JkȮM|$Dz{@^bww^2b-I X.K{pi,1,/ + 3`KK@}ޔS8Y_|i)gV>sqm]{-ds}HVa!Q{㳛~H XWIo2')HaEz0"j|'3SYZ7GTO7i Ϡ;H㘙h?_#q9ȃWGޡ<-z5]єkNhUmyv{ytq-~Q4â8hYEuǍ,Z죚Y~o@ܷ1Y󲠾&j5BI<ȗ2%"3I]2g`_MDPy4C;E2&6/v^L-~MPtH$`/ی?X}ۗ"ǏZM5m@eA )j-=-:>(vr뱷NP&WtAw; zs~nO1~ f):^~w={勏VN!e3vum$HE6Qo⹑_8|||B{b&|0 CܚmdǏ>tLa,(w8I2l,돿m'Z38s%wy|"Uͩr3N=CNZm%\*Hd; |6Ӟv߲1|yj|+t\$3sȖMJ\wGP\7ioSϛȈybg]DU(ֵ׷ ⲑY䓶6[Qz ii3i%c8&9ld=;~y\^u8e=3ysuE 5F0=N3Oh= L_EdGkM+~#n{Hc fUv38Ά6NǍӡ\fwu#kjs :tw\-̴I\olmqao=FX6ӸFVV&<[?{݃}|MzJcvmImI̔ g{WC  {Kثwqz?ٴ·e_)xӝT( J{Ɠ) [}O&+4h @ @rK1@ g% dx O #E"@eN Lj2@jsOrXA2DzsZj<^ Z8Z-4r #EJ37i[,c1 "8Kmr،:zu4YM7<+46Y{_/Im&,^,h/BlM~{"LK20L2LӋ%EZgL̲'0 ExV1Dz˥B>H˚&a !˞CR/V=(sBk[?H endstream endobj 618 0 obj << /Length 898 /Filter /FlateDecode >> stream xmUMo:W5?$R. d9M eCkmCp;;w~>|3E_?O]5߶w]Occ]=~?}Oyh9%?۹׬B|Ɯ>);vj|N8}No)e0&h?q:P_ X}ac1+a  jҢ~]ߏ{_r)4i_px`!dZ>i]<U_cr%ͪcךv[\٤ժX*be-@E-X@-꩖xkM PY@ ,#bEA 5rEqIb>,彐A$ G#e"&c D`%rE*s(Ǩ5ثCI*=ǔ^pk+ ܛbVLbX+@8:13Jp3<|6 ^ΜANVjRy9cpסAM}Ė)|֪,+pp70h8J+NK}Eլk)5t7Og:|CUd2Wf{ :?PfY6#r率ltiJ/ VRx{vw_?6F endstream endobj 620 0 obj << /Length 900 /Filter /FlateDecode >> stream xmUMo:W5?$R. d9M eCkmCp;;w~>|3E_?O]5߶w]Occ]=~?}Oyh9%?۹׬B|Ɯ>);vw7{>oaI> ѲH8U/RǾ0ñ_x0ӅxBiE.͏S=/b_ixމbc4fi|8EXD_R4.GRQhV̪xvqڎXJfUıkM;rͭSlҏ֋jU,N2@ ",   T[<5 1"àcvG@mg K | +T|5flxZ1YP^ꠦdb}[ה_Q>kUbw88]k|'%Ǿjց{ g䈏rsqk:n87xIue.Aft0!?4ɳ4mFtӔ^z1?z .~lP}L endstream endobj 621 0 obj << /Length 750 /Filter /FlateDecode >> stream xmUMo0Wx$*B!qض*jn$H$3Ch<~3~~~ngjv9{C{K;K.k6㳵ችm#O7٦4\ =؏8ݿ߳4ւ8͌>sIvdXC6OLx9im$l6Dl_7ڞhz*{pɲ2kAʶC+mk>lpfIQTT?LA>J e .1PbpqH I$\kL8Hb،Shąr =z51XQg_s2Ē+ sC:CQ}.'c-BbOEu+Xg~:?aj B.U $,ĨAA 2A%%" 19hM_)ELN 1sR3fg =傸aCYjV^w&L= 3nqFyDŽϠOL5'pZx?i^x?IGO:~I4ϼt~3][gF~Qgf}fB3y,h3cL}f23{,g>KYN0`^ay{7)q W7:*ሟS`R̯ endstream endobj 622 0 obj << /Length 672 /Filter /FlateDecode >> stream xmTn0C6*drضj^pHA@Cfy'n`g#govh/}eg羋򶺜m=Ooٽ[׌uRۉ=Iۏw{VQҜ8ߛIߞ3d_ ~~hZ# W c *'qU;HHV7xwuɻa;zopO_`_ݥNd0m6G_?[6vLClw6ZsaD%!p%blcä  PP[ u_g_x4$O<X^\NB8 \;cBbMx y%P 3jok:E q:/d48Q4A2="\šY+ːs(5$Y r~+A\HȕWr{Nxo $TL~K//p1sQ*GG-G-GzA>|)3Q/G""&!uN>|%h8hh$hb,n~ᰏnˣ+p]h \2 M endstream endobj 623 0 obj << /Length 672 /Filter /FlateDecode >> stream xmTn0C6*drضj^pHA@Cfy'n`g#govh/}eg羋򶺜m=Ooٽ[׌uRۉ=Iۏw{Vq9;\ظ{32bƱ)`Pk IckgUPSH@"7#?d 9aFm-P!.@'1 c09SGTX3 qxryB4 AAN8pЏ}% Jxxm_p?0䗒䗊/ TB~RtА3~N>|T%9%cQ/G:%uF>%WV6G]$ ' $ML/?mwTkW XֵdpZRF ׃ endstream endobj 624 0 obj << /Length 672 /Filter /FlateDecode >> stream xmTn0C6*drضj^pHA@Cfy'n`g#govh/}eg羋򶺜m=Ooٽ[׌uRۉ=Iۏw{Vq5;\ظ{32bƱ)`Pk IckgUPSH@"7#?d 9aFm-P!.@'1 c09SGTX3 qxryB4 AAN8pЏ}% Jxxm_p?0䗒䗊/ TB~RtА3~N>|T%9%cQ/G:%uF>%WV6G]$ ' $ML/?mwTkW XֵdpZRF# endstream endobj 625 0 obj << /Length 719 /Filter /FlateDecode >> stream x}TMo0+J6*ħöUSEj9߯ IVcf͏睟ݛ{)^؝}]u:vzyu|CW$nmmΑmq5)M{`qjS5үxO%r^q &\TƦkw(:m>8+>4m="${Jљ8=tz-/nqOR|-M.nTSXlDmqb]goo*co߭r#el[⌷L @ baomBҽ$`$@B)@p@)p2 d Ί?a.e8s`Wg+`#)S%~8NTҌYE, (6*3FӪr44P#Yf͞hhӰCkE88+j"7G9~PpC+R2C#`p˜1q EE5=F]=7z&`qp&bð| _/cSMrΤ f/%m Ȱw \ԉCb֓x5cfw(:Kzgqf1iXg3Np y/hHS>W#/5ferTapC w=衡xz* endstream endobj 663 0 obj << /Producer (pdfTeX-1.40.24) /Author(\376\377\000A\000l\000e\000x\000a\000n\000d\000r\000o\000s\000\040\000K\000a\000r\000a\000t\000z\000o\000g\000l\000o\000u\000,\000\040\000A\000l\000e\000x\000\040\000S\000m\000o\000l\000a\000,\000\040\000K\000u\000r\000t\000\040\000H\000o\000r\000n\000i\000k)/Title(\376\377\000k\000e\000r\000n\000l\000a\000b\000\040\000-\000\040\000A\000n\000\040\000S\0004\000\040\000P\000a\000c\000k\000a\000g\000e\000\040\000f\000o\000r\000\040\000K\000e\000r\000n\000e\000l\000\040\000M\000e\000t\000h\000o\000d\000s\000\040\000i\000n\000\040\000R)/Subject()/Creator(LaTeX with hyperref)/Keywords(\376\377\000k\000e\000r\000n\000e\000l\000\040\000m\000e\000t\000h\000o\000d\000s\000,\000\040\000s\000u\000p\000p\000o\000r\000t\000\040\000v\000e\000c\000t\000o\000r\000\040\000m\000a\000c\000h\000i\000n\000e\000s\000,\000\040\000q\000u\000a\000d\000r\000a\000t\000i\000c\000\040\000p\000r\000o\000g\000r\000a\000m\000m\000i\000n\000g\000,\000\040\000r\000a\000n\000k\000i\000n\000g\000,\000\040\000c\000l\000u\000s\000t\000e\000r\000i\000n\000g\000,\000\040\000S\0004\000,\000\040\000R) /CreationDate (D:20230131151614+01'00') /ModDate (D:20230131151614+01'00') /Trapped /False /PTEX.Fullbanner (This is pdfTeX, Version 3.141592653-2.6-1.40.24 (TeX Live 2022/Debian) kpathsea version 6.3.4) >> endobj 619 0 obj << /Type /ObjStm /N 70 /First 601 /Length 2840 /Filter /FlateDecode >> stream xڵZr6}cT;J*+vlZrn-1c@ $0ڻ[)g>ݸyg43eFȨ6*LdDu=iόf$g:SZq`$ <'DpAHp,3 AE8 GWZ3\$$&Q"I!>$g,@@J(2'YQ8=ujba4 fIgk_ @ҕdX2p U$Q)BkD9 sAt oAϠ2SD,SL0%!r@*d37@x(P)>m8_I2CR0,2#& Tʌ,R:y $Tw߭N.?IS>NVt%d'O|r~u|zvN۴ծoZ LByvmт:yYOWue*_6oj\ȳ} 黷J sDyG8/s84r8秿< 1G ѐ A{Eȗ#z9"L"sDO%c4FDw! @|X }^]L .JԌ9/r|Tբ&l2X8Ӽ5 \b")iPuіzF^T]OcsmCOK >a_&P.'q1P1p7>ˌ$33w/K1C|qy'vsB'Cx*|o^NɆq@=V} m7oM+;K$҇-4(5 nh&FM~؍ԁmFLJnhȪ\`UnF"X&(#bbR,b@1) H1_|)<*YezaI熋Lo Fc#lJc 7[Ґo|S&;|S&$&ȧ#D'8DH—nf&fFɬE1z,&vswxC.tp/aq/^*ݮͳMM_ HE,A,c1jv"G +N^VU?3Uu<G0<Ed.e(t9J\*PRR1>3Q_ЗG(UsA;#|sQno{ Yߕm-޻ZN 0_x\G۲hkP? <--vR_>L#N؁y[$ç.Rχnv$t`)^Gm[n TX-W2 ~5U;w6u ^7vϲ9m*XJs;qYJ嶸_#b JUJRZmq_*:(PCj2nj؟jv{٢&"( "l}-50Ȟ RN_Ta^)ҩ˶!"⚱E:QOt2[/˪?mU\7{9YlR4։w!')03O"S,z[&]ܚdMS~tSVM5| c~sRjxd] e"c48Y&`LR4\e<*.LRkDQG'rPCE*ҡ}j۪@ E\-56XT_m?%8jRlHM<{DX3w2AQb<-sߴȌo؎&=R,#99$tS(`T_S(aGAN}\r\yT){W9R<쑡9 HfI2#KZ!Y,&D{P 匉X3Oq89[,%Bqyd2@<XWDŽ0$JDb)fO1 NCM[ X.Q.Ad3D@zx1W%=a|H^m@5˨# zF:w7Ɯ5&;;h(iDvT3 =b:%æ$,28F'8#ܰ$:_UH!Lt쵃 $xf2 JiTx4aE& + aC#$Q'4^2oT[ BMp'9\==Cw*cD[Qf)!wBëts,v yRvwӉwpnnq:E NzB=*Ej"ɣf/ݟo?J߱/2^~?Z.<*K|r[$KǥN -?*YJr=D+j.JWSL,T;4~JVISf9\v+enE_l-Y_NSU7ۮ| 23F7& endstream endobj 664 0 obj << /Type /XRef /Index [0 665] /Size 665 /W [1 3 1] /Root 662 0 R /Info 663 0 R /ID [ ] /Length 1623 /Filter /FlateDecode >> stream x%IlVUyZ(P@[JׁBH[LX斡T!10QaxqѸ!(1&nq&q!nFw{EQ_EqdQOVkO26|k2 [yp_%V(Y|怹)b|x,%[<0*|+x, [B_/ b|[RGaۈOӴJ|ڃc>m^Oy!X [o­թ>(ħWG6:Po3>$4f .I @?`;8!0!p ݢTii,30HuqNlaH>imLQ0 0v]`7"âg1fekXP<ˇ=ݣv\jfXi^LS5XoUcRlэz,BO|K@`=t0.- IX -76yZu} _KN*|h>= library(kernlab) options(width = 70) @ \title{\pkg{kernlab} -- An \proglang{S4} Package for Kernel Methods in \proglang{R}} \Plaintitle{kernlab - An S4 Package for Kernel Methods in R} \author{Alexandros Karatzoglou\\Technische Universit\"at Wien \And Alex Smola\\Australian National University, NICTA \And Kurt Hornik\\Wirtschaftsuniversit\"at Wien } \Plainauthor{Alexandros Karatzoglou, Alex Smola, Kurt Hornik} \Abstract{ \pkg{kernlab} is an extensible package for kernel-based machine learning methods in \proglang{R}. It takes advantage of \proglang{R}'s new \proglang{S4} object model and provides a framework for creating and using kernel-based algorithms. The package contains dot product primitives (kernels), implementations of support vector machines and the relevance vector machine, Gaussian processes, a ranking algorithm, kernel PCA, kernel CCA, kernel feature analysis, online kernel methods and a spectral clustering algorithm. Moreover it provides a general purpose quadratic programming solver, and an incomplete Cholesky decomposition method. } \Keywords{kernel methods, support vector machines, quadratic programming, ranking, clustering, \proglang{S4}, \proglang{R}} \Plainkeywords{kernel methods, support vector machines, quadratic programming, ranking, clustering, S4, R} \begin{document} \section{Introduction} Machine learning is all about extracting structure from data, but it is often difficult to solve problems like classification, regression and clustering in the space in which the underlying observations have been made. Kernel-based learning methods use an implicit mapping of the input data into a high dimensional feature space defined by a kernel function, i.e., a function returning the inner product $ \langle \Phi(x),\Phi(y) \rangle$ between the images of two data points $x, y$ in the feature space. The learning then takes place in the feature space, provided the learning algorithm can be entirely rewritten so that the data points only appear inside dot products with other points. This is often referred to as the ``kernel trick'' \citep{kernlab:Schoelkopf+Smola:2002}. More precisely, if a projection $\Phi: X \rightarrow H$ is used, the dot product $\langle\Phi(x),\Phi(y)\rangle$ can be represented by a kernel function~$k$ \begin{equation} \label{eq:kernel} k(x,y)= \langle \Phi(x),\Phi(y) \rangle, \end{equation} which is computationally simpler than explicitly projecting $x$ and $y$ into the feature space~$H$. One interesting property of kernel-based systems is that, once a valid kernel function has been selected, one can practically work in spaces of any dimension without paying any computational cost, since feature mapping is never effectively performed. In fact, one does not even need to know which features are being used. Another advantage is the that one can design and use a kernel for a particular problem that could be applied directly to the data without the need for a feature extraction process. This is particularly important in problems where a lot of structure of the data is lost by the feature extraction process (e.g., text processing). The inherent modularity of kernel-based learning methods allows one to use any valid kernel on a kernel-based algorithm. \subsection{Software review} The most prominent kernel based learning algorithm is without doubt the support vector machine (SVM), so the existence of many support vector machine packages comes as little surprise. Most of the existing SVM software is written in \proglang{C} or \proglang{C++}, e.g.\ the award winning \pkg{libsvm}\footnote{\url{http://www.csie.ntu.edu.tw/~cjlin/libsvm/}} \citep{kernlab:Chang+Lin:2001}, \pkg{SVMlight}\footnote{\url{http://svmlight.joachims.org}} \citep{kernlab:joachim:1999}, \pkg{SVMTorch}\footnote{\url{http://www.torch.ch}}, Royal Holloway Support Vector Machines\footnote{\url{http://svm.dcs.rhbnc.ac.uk}}, \pkg{mySVM}\footnote{\url{http://www-ai.cs.uni-dortmund.de/SOFTWARE/MYSVM/index.eng.html}}, and \pkg{M-SVM}\footnote{\url{http://www.loria.fr/~guermeur/}} with many packages providing interfaces to \proglang{MATLAB} (such as \pkg{libsvm}), and even some native \proglang{MATLAB} toolboxes\footnote{ \url{http://www.isis.ecs.soton.ac.uk/resources/svminfo/}}\,\footnote{ \url{http://asi.insa-rouen.fr/~arakotom/toolbox/index}}\,\footnote{ \url{http://www.cis.tugraz.at/igi/aschwaig/software.html}}. Putting SVM specific software aside and considering the abundance of other kernel-based algorithms published nowadays, there is little software available implementing a wider range of kernel methods with some exceptions like the \pkg{Spider}\footnote{\url{http://www.kyb.tuebingen.mpg.de/bs/people/spider/}} software which provides a \proglang{MATLAB} interface to various \proglang{C}/\proglang{C++} SVM libraries and \proglang{MATLAB} implementations of various kernel-based algorithms, \pkg{Torch} \footnote{\url{http://www.torch.ch}} which also includes more traditional machine learning algorithms, and the occasional \proglang{MATLAB} or \proglang{C} program found on a personal web page where an author includes code from a published paper. \subsection[R software]{\proglang{R} software} The \proglang{R} package \pkg{e1071} offers an interface to the award winning \pkg{libsvm} \citep{kernlab:Chang+Lin:2001}, a very efficient SVM implementation. \pkg{libsvm} provides a robust and fast SVM implementation and produces state of the art results on most classification and regression problems \citep{kernlab:Meyer+Leisch+Hornik:2003}. The \proglang{R} interface provided in \pkg{e1071} adds all standard \proglang{R} functionality like object orientation and formula interfaces to \pkg{libsvm}. Another SVM related \proglang{R} package which was made recently available is \pkg{klaR} \citep{kernlab:Roever:2004} which includes an interface to \pkg{SVMlight}, a popular SVM implementation along with other classification tools like Regularized Discriminant Analysis. However, most of the \pkg{libsvm} and \pkg{klaR} SVM code is in \proglang{C++}. Therefore, if one would like to extend or enhance the code with e.g.\ new kernels or different optimizers, one would have to modify the core \proglang{C++} code. \section[kernlab]{\pkg{kernlab}} \pkg{kernlab} aims to provide the \proglang{R} user with basic kernel functionality (e.g., like computing a kernel matrix using a particular kernel), along with some utility functions commonly used in kernel-based methods like a quadratic programming solver, and modern kernel-based algorithms based on the functionality that the package provides. Taking advantage of the inherent modularity of kernel-based methods, \pkg{kernlab} aims to allow the user to switch between kernels on an existing algorithm and even create and use own kernel functions for the kernel methods provided in the package. \subsection[S4 objects]{\proglang{S4} objects} \pkg{kernlab} uses \proglang{R}'s new object model described in ``Programming with Data'' \citep{kernlab:Chambers:1998} which is known as the \proglang{S4} class system and is implemented in the \pkg{methods} package. In contrast with the older \proglang{S3} model for objects in \proglang{R}, classes, slots, and methods relationships must be declared explicitly when using the \proglang{S4} system. The number and types of slots in an instance of a class have to be established at the time the class is defined. The objects from the class are validated against this definition and have to comply to it at any time. \proglang{S4} also requires formal declarations of methods, unlike the informal system of using function names to identify a certain method in \proglang{S3}. An \proglang{S4} method is declared by a call to \code{setMethod} along with the name and a ``signature'' of the arguments. The signature is used to identify the classes of one or more arguments of the method. Generic functions can be declared using the \code{setGeneric} function. Although such formal declarations require package authors to be more disciplined than when using the informal \proglang{S3} classes, they provide assurance that each object in a class has the required slots and that the names and classes of data in the slots are consistent. An example of a class used in \pkg{kernlab} is shown below. Typically, in a return object we want to include information on the result of the method along with additional information and parameters. Usually \pkg{kernlab}'s classes include slots for the kernel function used and the results and additional useful information. \begin{smallexample} setClass("specc", representation("vector", # the vector containing the cluster centers="matrix", # the cluster centers size="vector", # size of each cluster kernelf="function", # kernel function used withinss = "vector"), # within cluster sum of squares prototype = structure(.Data = vector(), centers = matrix(), size = matrix(), kernelf = ls, withinss = vector())) \end{smallexample} Accessor and assignment function are defined and used to access the content of each slot which can be also accessed with the \verb|@| operator. \subsection{Namespace} Namespaces were introduced in \proglang{R} 1.7.0 and provide a means for packages to control the way global variables and methods are being made available. Due to the number of assignment and accessor function involved, a namespace is used to control the methods which are being made visible outside the package. Since \proglang{S4} methods are being used, the \pkg{kernlab} namespace also imports methods and variables from the \pkg{methods} package. \subsection{Data} The \pkg{kernlab} package also includes data set which will be used to illustrate the methods included in the package. The \code{spam} data set \citep{kernlab:Hastie:2001} set collected at Hewlett-Packard Labs contains data on 2788 and 1813 e-mails classified as non-spam and spam, respectively. The 57 variables of each data vector indicate the frequency of certain words and characters in the e-mail. Another data set included in \pkg{kernlab}, the \code{income} data set \citep{kernlab:Hastie:2001}, is taken by a marketing survey in the San Francisco Bay concerning the income of shopping mall customers. It consists of 14 demographic attributes (nominal and ordinal variables) including the income and 8993 observations. The \code{ticdata} data set \citep{kernlab:Putten:2000} was used in the 2000 Coil Challenge and contains information on customers of an insurance company. The data consists of 86 variables and includes product usage data and socio-demographic data derived from zip area codes. The data was collected to answer the following question: Can you predict who would be interested in buying a caravan insurance policy and give an explanation why? The \code{promotergene} is a data set of E. Coli promoter gene sequences (DNA) with 106 observations and 58 variables available at the UCI Machine Learning repository. Promoters have a region where a protein (RNA polymerase) must make contact and the helical DNA sequence must have a valid conformation so that the two pieces of the contact region spatially align. The data contains DNA sequences of promoters and non-promoters. The \code{spirals} data set was created by the \code{mlbench.spirals} function in the \pkg{mlbench} package \citep{kernlab:Leisch+Dimitriadou}. This two-dimensional data set with 300 data points consists of two spirals where Gaussian noise is added to each data point. \subsection{Kernels} A kernel function~$k$ calculates the inner product of two vectors $x$, $x'$ in a given feature mapping $\Phi: X \rightarrow H$. The notion of a kernel is obviously central in the making of any kernel-based algorithm and consequently also in any software package containing kernel-based methods. Kernels in \pkg{kernlab} are \proglang{S4} objects of class \code{kernel} extending the \code{function} class with one additional slot containing a list with the kernel hyper-parameters. Package \pkg{kernlab} includes 7 different kernel classes which all contain the class \code{kernel} and are used to implement the existing kernels. These classes are used in the function dispatch mechanism of the kernel utility functions described below. Existing kernel functions are initialized by ``creator'' functions. All kernel functions take two feature vectors as parameters and return the scalar dot product of the vectors. An example of the functionality of a kernel in \pkg{kernlab}: <>= ## create a RBF kernel function with sigma hyper-parameter 0.05 rbf <- rbfdot(sigma = 0.05) rbf ## create two random feature vectors x <- rnorm(10) y <- rnorm(10) ## compute dot product between x,y rbf(x, y) @ The package includes implementations of the following kernels: \begin{itemize} \item the linear \code{vanilladot} kernel implements the simplest of all kernel functions \begin{equation} k(x,x') = \langle x, x' \rangle \end{equation} which is useful specially when dealing with large sparse data vectors~$x$ as is usually the case in text categorization. \item the Gaussian radial basis function \code{rbfdot} \begin{equation} k(x,x') = \exp(-\sigma \|x - x'\|^2) \end{equation} which is a general purpose kernel and is typically used when no further prior knowledge is available about the data. \item the polynomial kernel \code{polydot} \begin{equation} k(x, x') = \left( \mathrm{scale} \cdot \langle x, x' \rangle + \mathrm{offset} \right)^\mathrm{degree}. \end{equation} which is used in classification of images. \item the hyperbolic tangent kernel \code{tanhdot} \begin{equation} k(x, x') = \tanh \left( \mathrm{scale} \cdot \langle x, x' \rangle + \mathrm{offset} \right) \end{equation} which is mainly used as a proxy for neural networks. \item the Bessel function of the first kind kernel \code{besseldot} \begin{equation} k(x, x') = \frac{\mathrm{Bessel}_{(\nu+1)}^n(\sigma \|x - x'\|)} {(\|x-x'\|)^{-n(\nu+1)}}. \end{equation} is a general purpose kernel and is typically used when no further prior knowledge is available and mainly popular in the Gaussian process community. \item the Laplace radial basis kernel \code{laplacedot} \begin{equation} k(x, x') = \exp(-\sigma \|x - x'\|) \end{equation} which is a general purpose kernel and is typically used when no further prior knowledge is available. \item the ANOVA radial basis kernel \code{anovadot} performs well in multidimensional regression problems \begin{equation} k(x, x') = \left(\sum_{k=1}^{n}\exp(-\sigma(x^k-{x'}^k)^2)\right)^{d} \end{equation} where $x^k$ is the $k$th component of $x$. \end{itemize} \subsection{Kernel utility methods} The package also includes methods for computing commonly used kernel expressions (e.g., the Gram matrix). These methods are written in such a way that they take functions (i.e., kernels) and matrices (i.e., vectors of patterns) as arguments. These can be either the kernel functions already included in \pkg{kernlab} or any other function implementing a valid dot product (taking two vector arguments and returning a scalar). In case one of the already implemented kernels is used, the function calls a vectorized implementation of the corresponding function. Moreover, in the case of symmetric matrices (e.g., the dot product matrix of a Support Vector Machine) they only require one argument rather than having to pass the same matrix twice (for rows and columns). The computations for the kernels already available in the package are vectorized whenever possible which guarantees good performance and acceptable memory requirements. Users can define their own kernel by creating a function which takes two vectors as arguments (the data points) and returns a scalar (the dot product). This function can then be based as an argument to the kernel utility methods. For a user defined kernel the dispatch mechanism calls a generic method implementation which calculates the expression by passing the kernel function through a pair of \code{for} loops. The kernel methods included are: \begin{description} \item[\code{kernelMatrix}] This is the most commonly used function. It computes $k(x, x')$, i.e., it computes the matrix $K$ where $K_{ij} = k(x_i, x_j)$ and $x$ is a \emph{row} vector. In particular, \begin{verbatim} K <- kernelMatrix(kernel, x) \end{verbatim} computes the matrix $K_{ij} = k(x_i, x_j)$ where the $x_i$ are the columns of $X$ and \begin{verbatim} K <- kernelMatrix(kernel, x1, x2) \end{verbatim} computes the matrix $K_{ij} = k(x1_i, x2_j)$. \item[\code{kernelFast}] This method is different to \code{kernelMatrix} for \code{rbfdot}, \code{besseldot}, and the \code{laplacedot} kernel, which are all RBF kernels. It is identical to \code{kernelMatrix}, except that it also requires the squared norm of the first argument as additional input. It is mainly used in kernel algorithms, where columns of the kernel matrix are computed per invocation. In these cases, evaluating the norm of each column-entry as it is done on a \code{kernelMatrix} invocation on an RBF kernel, over and over again would cause significant computational overhead. Its invocation is via \begin{verbatim} K = kernelFast(kernel, x1, x2, a) \end{verbatim} Here $a$ is a vector containing the squared norms of $x1$. \item[\code{kernelMult}] is a convenient way of computing kernel expansions. It returns the vector $f = (f(x_1), \dots, f(x_m))$ where \begin{equation} f(x_i) = \sum_{j=1}^{m} k(x_i, x_j) \alpha_j, \mbox{~hence~} f = K \alpha. \end{equation} The need for such a function arises from the fact that $K$ may sometimes be larger than the memory available. Therefore, it is convenient to compute $K$ only in stripes and discard the latter after the corresponding part of $K \alpha$ has been computed. The parameter \code{blocksize} determines the number of rows in the stripes. In particular, \begin{verbatim} f <- kernelMult(kernel, x, alpha) \end{verbatim} computes $f_i = \sum_{j=1}^m k(x_i, x_j) \alpha_j$ and \begin{verbatim} f <- kernelMult(kernel, x1, x2, alpha) \end{verbatim} computes $f_i = \sum_{j=1}^m k(x1_i, x2_j) \alpha_j$. \item[\code{kernelPol}] is a method very similar to \code{kernelMatrix} with the only difference that rather than computing $K_{ij} = k(x_i, x_j)$ it computes $K_{ij} = y_i y_j k(x_i, x_j)$. This means that \begin{verbatim} K <- kernelPol(kernel, x, y) \end{verbatim} computes the matrix $K_{ij} = y_i y_j k(x_i, x_j)$ where the $x_i$ are the columns of $x$ and $y_i$ are elements of the vector~$y$. Moreover, \begin{verbatim} K <- kernelPol(kernel, x1, x2, y1, y2) \end{verbatim} computes the matrix $K_{ij} = y1_i y2_j k(x1_i, x2_j)$. Both \code{x1} and \code{x2} may be matrices and \code{y1} and \code{y2} vectors. \end{description} An example using these functions : <>= ## create a RBF kernel function with sigma hyper-parameter 0.05 poly <- polydot(degree=2) ## create artificial data set x <- matrix(rnorm(60), 6, 10) y <- matrix(rnorm(40), 4, 10) ## compute kernel matrix kx <- kernelMatrix(poly, x) kxy <- kernelMatrix(poly, x, y) @ \section{Kernel methods} Providing a solid base for creating kernel-based methods is part of what we are trying to achieve with this package, the other being to provide a wider range of kernel-based methods in \proglang{R}. In the rest of the paper we present the kernel-based methods available in \pkg{kernlab}. All the methods in \pkg{kernlab} can be used with any of the kernels included in the package as well as with any valid user-defined kernel. User defined kernel functions can be passed to existing kernel-methods in the \code{kernel} argument. \subsection{Support vector machine} Support vector machines \citep{kernlab:Vapnik:1998} have gained prominence in the field of machine learning and pattern classification and regression. The solutions to classification and regression problems sought by kernel-based algorithms such as the SVM are linear functions in the feature space: \begin{equation} f(x) = w^\top \Phi(x) \end{equation} for some weight vector $w \in F$. The kernel trick can be exploited in this whenever the weight vector~$w$ can be expressed as a linear combination of the training points, $w = \sum_{i=1}^{n} \alpha_i \Phi(x_i)$, implying that $f$ can be written as \begin{equation} f(x) = \sum_{i=1}^{n}\alpha_i k(x_i, x) \end{equation} A very important issue that arises is that of choosing a kernel~$k$ for a given learning task. Intuitively, we wish to choose a kernel that induces the ``right'' metric in the space. Support Vector Machines choose a function $f$ that is linear in the feature space by optimizing some criterion over the sample. In the case of the 2-norm Soft Margin classification the optimization problem takes the form: \begin{eqnarray} \nonumber \mathrm{minimize} && t(w,\xi) = \frac{1}{2}{\|w\|}^2+\frac{C}{m}\sum_{i=1}^{m}\xi_i \\ \mbox{subject to~} && y_i ( \langle x_i , w \rangle +b ) \geq 1- \xi_i \qquad (i=1,\dots,m)\\ \nonumber && \xi_i \ge 0 \qquad (i=1,\dots, m) \end{eqnarray} Based on similar methodology, SVMs deal with the problem of novelty detection (or one class classification) and regression. \pkg{kernlab}'s implementation of support vector machines, \code{ksvm}, is based on the optimizers found in \pkg{bsvm}\footnote{\url{http://www.csie.ntu.edu.tw/~cjlin/bsvm}} \citep{kernlab:Hsu:2002} and \pkg{libsvm} \citep{kernlab:Chang+Lin:2001} which includes a very efficient version of the Sequential Minimization Optimization (SMO). SMO decomposes the SVM Quadratic Problem (QP) without using any numerical QP optimization steps. Instead, it chooses to solve the smallest possible optimization problem involving two elements of $\alpha_i$ because they must obey one linear equality constraint. At every step, SMO chooses two $\alpha_i$ to jointly optimize and finds the optimal values for these $\alpha_i$ analytically, thus avoiding numerical QP optimization, and updates the SVM to reflect the new optimal values. The SVM implementations available in \code{ksvm} include the C-SVM classification algorithm along with the $\nu$-SVM classification formulation which is equivalent to the former but has a more natural ($\nu$) model parameter taking values in $[0,1]$ and is proportional to the fraction of support vectors found in the data set and the training error. For classification problems which include more than two classes (multi-class) a one-against-one or pairwise classification method \citep{kernlab:Knerr:1990, kernlab:Kressel:1999} is used. This method constructs ${k \choose 2}$ classifiers where each one is trained on data from two classes. Prediction is done by voting where each classifier gives a prediction and the class which is predicted more often wins (``Max Wins''). This method has been shown to produce robust results when used with SVMs \citep{kernlab:Hsu2:2002}. Furthermore the \code{ksvm} implementation provides the ability to produce class probabilities as output instead of class labels. This is done by an improved implementation \citep{kernlab:Lin:2001} of Platt's posteriori probabilities \citep{kernlab:Platt:2000} where a sigmoid function \begin{equation} P(y=1\mid f) = \frac{1}{1+ e^{Af+B}} \end{equation} is fitted on the decision values~$f$ of the binary SVM classifiers, $A$ and $B$ are estimated by minimizing the negative log-likelihood function. To extend the class probabilities to the multi-class case, each binary classifiers class probability output is combined by the \code{couple} method which implements methods for combing class probabilities proposed in \citep{kernlab:Wu:2003}. Another approach for multIn order to create a similar probability output for regression, following \cite{kernlab:Weng:2004}, we suppose that the SVM is trained on data from the model \begin{equation} y_i = f(x_i) + \delta_i \end{equation} where $f(x_i)$ is the underlying function and $\delta_i$ is independent and identical distributed random noise. Given a test data $x$ the distribution of $y$ given $x$ and allows one to draw probabilistic inferences about $y$ e.g. one can construct a predictive interval $\Phi = \Phi(x)$ such that $y \in \Phi$ with a certain probability. If $\hat{f}$ is the estimated (predicted) function of the SVM on new data then $\eta = \eta(x) = y - \hat{f}(x)$ is the prediction error and $y \in \Phi$ is equivalent to $\eta \in \Phi $. Empirical observation shows that the distribution of the residuals $\eta$ can be modeled both by a Gaussian and a Laplacian distribution with zero mean. In this implementation the Laplacian with zero mean is used : \begin{equation} p(z) = \frac{1}{2\sigma}e^{-\frac{|z|}{\sigma}} \end{equation} Assuming that $\eta$ are independent the scale parameter $\sigma$ is estimated by maximizing the likelihood. The data for the estimation is produced by a three-fold cross-validation. For the Laplace distribution the maximum likelihood estimate is : \begin{equation} \sigma = \frac{\sum_{i=1}^m|\eta_i|}{m} \end{equation} i-class classification supported by the \code{ksvm} function is the one proposed in \cite{kernlab:Crammer:2000}. This algorithm works by solving a single optimization problem including the data from all classes: \begin{eqnarray} \nonumber \mathrm{minimize} && t(w_n,\xi) = \frac{1}{2}\sum_{n=1}^k{\|w_n\|}^2+\frac{C}{m}\sum_{i=1}^{m}\xi_i \\ \mbox{subject to~} && \langle x_i , w_{y_i} \rangle - \langle x_i , w_{n} \rangle \geq b_i^n - \xi_i \qquad (i=1,\dots,m) \\ \mbox{where} && b_i^n = 1 - \delta_{y_i,n} \end{eqnarray} where the decision function is \begin{equation} \mathrm{argmax}_{m=1,\dots,k} \langle x_i , w_{n} \rangle \end{equation} This optimization problem is solved by a decomposition method proposed in \cite{kernlab:Hsu:2002} where optimal working sets are found (that is, sets of $\alpha_i$ values which have a high probability of being non-zero). The QP sub-problems are then solved by a modified version of the \pkg{TRON}\footnote{\url{http://www-unix.mcs.anl.gov/~more/tron/}} \citep{kernlab:more:1999} optimization software. One-class classification or novelty detection \citep{kernlab:Williamson:1999, kernlab:Tax:1999}, where essentially an SVM detects outliers in a data set, is another algorithm supported by \code{ksvm}. SVM novelty detection works by creating a spherical decision boundary around a set of data points by a set of support vectors describing the spheres boundary. The $\nu$ parameter is used to control the volume of the sphere and consequently the number of outliers found. Again, the value of $\nu$ represents the fraction of outliers found. Furthermore, $\epsilon$-SVM \citep{kernlab:Vapnik2:1995} and $\nu$-SVM \citep{kernlab:Smola1:2000} regression are also available. The problem of model selection is partially addressed by an empirical observation for the popular Gaussian RBF kernel \citep{kernlab:Caputo:2002}, where the optimal values of the hyper-parameter of sigma are shown to lie in between the 0.1 and 0.9 quantile of the $\|x- x'\| $ statistics. The \code{sigest} function uses a sample of the training set to estimate the quantiles and returns a vector containing the values of the quantiles. Pretty much any value within this interval leads to good performance. An example for the \code{ksvm} function is shown below. <>= ## simple example using the promotergene data set data(promotergene) ## create test and training set tindex <- sample(1:dim(promotergene)[1],5) genetrain <- promotergene[-tindex, ] genetest <- promotergene[tindex,] ## train a support vector machine gene <- ksvm(Class~.,data=genetrain,kernel="rbfdot",kpar="automatic",C=60,cross=3,prob.model=TRUE) gene predict(gene, genetest) predict(gene, genetest, type="probabilities") @ \begin{figure} \centering <>= set.seed(123) x <- rbind(matrix(rnorm(120),,2),matrix(rnorm(120,mean=3),,2)) y <- matrix(c(rep(1,60),rep(-1,60))) svp <- ksvm(x,y,type="C-svc") plot(svp,data=x) @ \caption{A contour plot of the SVM decision values for a toy binary classification problem using the \code{plot} function} \label{fig:ksvm Plot} \end{figure} \subsection{Relevance vector machine} The relevance vector machine \citep{kernlab:Tipping:2001} is a probabilistic sparse kernel model identical in functional form to the SVM making predictions based on a function of the form \begin{equation} y(x) = \sum_{n=1}^{N} \alpha_n K(\mathbf{x},\mathbf{x}_n) + a_0 \end{equation} where $\alpha_n$ are the model ``weights'' and $K(\cdotp,\cdotp)$ is a kernel function. It adopts a Bayesian approach to learning, by introducing a prior over the weights $\alpha$ \begin{equation} p(\alpha, \beta) = \prod_{i=1}^m N(\beta_i \mid 0 , a_i^{-1}) \mathrm{Gamma}(\beta_i\mid \beta_\beta , \alpha_\beta) \end{equation} governed by a set of hyper-parameters $\beta$, one associated with each weight, whose most probable values are iteratively estimated for the data. Sparsity is achieved because in practice the posterior distribution in many of the weights is sharply peaked around zero. Furthermore, unlike the SVM classifier, the non-zero weights in the RVM are not associated with examples close to the decision boundary, but rather appear to represent ``prototypical'' examples. These examples are termed \emph{relevance vectors}. \pkg{kernlab} currently has an implementation of the RVM based on a type~II maximum likelihood method which can be used for regression. The functions returns an \proglang{S4} object containing the model parameters along with indexes for the relevance vectors and the kernel function and hyper-parameters used. <>= x <- seq(-20, 20, 0.5) y <- sin(x)/x + rnorm(81, sd = 0.03) y[41] <- 1 @ <>= rvmm <- rvm(x, y,kernel="rbfdot",kpar=list(sigma=0.1)) rvmm ytest <- predict(rvmm, x) @ \begin{figure} \centering <>= plot(x, y, cex=0.5) lines(x, ytest, col = "red") points(x[RVindex(rvmm)],y[RVindex(rvmm)],pch=21) @ \caption{Relevance vector regression on data points created by the $sinc(x)$ function, relevance vectors are shown circled.} \label{fig:RVM sigmoid} \end{figure} \subsection{Gaussian processes} Gaussian processes \citep{kernlab:Williams:1995} are based on the ``prior'' assumption that adjacent observations should convey information about each other. In particular, it is assumed that the observed variables are normal, and that the coupling between them takes place by means of the covariance matrix of a normal distribution. Using the kernel matrix as the covariance matrix is a convenient way of extending Bayesian modeling of linear estimators to nonlinear situations. Furthermore it represents the counterpart of the ``kernel trick'' in methods minimizing the regularized risk. For regression estimation we assume that rather than observing $t(x_i)$ we observe $y_i = t(x_i) + \xi_i$ where $\xi_i$ is assumed to be independent Gaussian distributed noise with zero mean. The posterior distribution is given by \begin{equation} p(\mathbf{y}\mid \mathbf{t}) = \left[ \prod_ip(y_i - t(x_i)) \right] \frac{1}{\sqrt{(2\pi)^m \det(K)}} \exp \left(\frac{1}{2}\mathbf{t}^T K^{-1} \mathbf{t} \right) \end{equation} and after substituting $\mathbf{t} = K\mathbf{\alpha}$ and taking logarithms \begin{equation} \ln{p(\mathbf{\alpha} \mid \mathbf{y})} = - \frac{1}{2\sigma^2}\| \mathbf{y} - K \mathbf{\alpha} \|^2 -\frac{1}{2}\mathbf{\alpha}^T K \mathbf{\alpha} +c \end{equation} and maximizing $\ln{p(\mathbf{\alpha} \mid \mathbf{y})}$ for $\mathbf{\alpha}$ to obtain the maximum a posteriori approximation yields \begin{equation} \mathbf{\alpha} = (K + \sigma^2\mathbf{1})^{-1} \mathbf{y} \end{equation} Knowing $\mathbf{\alpha}$ allows for prediction of $y$ at a new location $x$ through $y = K(x,x_i){\mathbf{\alpha}}$. In similar fashion Gaussian processes can be used for classification. \code{gausspr} is the function in \pkg{kernlab} implementing Gaussian processes for classification and regression. \subsection{Ranking} The success of Google has vividly demonstrated the value of a good ranking algorithm in real world problems. \pkg{kernlab} includes a ranking algorithm based on work published in \citep{kernlab:Zhou:2003}. This algorithm exploits the geometric structure of the data in contrast to the more naive approach which uses the Euclidean distances or inner products of the data. Since real world data are usually highly structured, this algorithm should perform better than a simpler approach based on a Euclidean distance measure. First, a weighted network is defined on the data and an authoritative score is assigned to every point. The query points act as source nodes that continually pump their scores to the remaining points via the weighted network, and the remaining points further spread the score to their neighbors. The spreading process is repeated until convergence and the points are ranked according to the scores they received. Suppose we are given a set of data points $X = {x_1, \dots, x_{s}, x_{s+1}, \dots, x_{m}}$ in $\mathbf{R}^n$ where the first $s$ points are the query points and the rest are the points to be ranked. The algorithm works by connecting the two nearest points iteratively until a connected graph $G = (X, E)$ is obtained where $E$ is the set of edges. The affinity matrix $K$ defined e.g.\ by $K_{ij} = \exp(-\sigma\|x_i - x_j \|^2)$ if there is an edge $e(i,j) \in E$ and $0$ for the rest and diagonal elements. The matrix is normalized as $L = D^{-1/2}KD^{-1/2}$ where $D_{ii} = \sum_{j=1}^m K_{ij}$, and \begin{equation} f(t+1) = \alpha Lf(t) + (1 - \alpha)y \end{equation} is iterated until convergence, where $\alpha$ is a parameter in $[0,1)$. The points are then ranked according to their final scores $f_{i}(t_f)$. \pkg{kernlab} includes an \proglang{S4} method implementing the ranking algorithm. The algorithm can be used both with an edge-graph where the structure of the data is taken into account, and without which is equivalent to ranking the data by their distance in the projected space. \begin{figure} \centering <>= data(spirals) ran <- spirals[rowSums(abs(spirals) < 0.55) == 2,] ranked <- ranking(ran, 54, kernel = "rbfdot", kpar = list(sigma = 100), edgegraph = TRUE) ranked[54, 2] <- max(ranked[-54, 2]) c<-1:86 op <- par(mfrow = c(1, 2),pty="s") plot(ran) plot(ran, cex=c[ranked[,3]]/40) @ \caption{The points on the left are ranked according to their similarity to the upper most left point. Points with a higher rank appear bigger. Instead of ranking the points on simple Euclidean distance the structure of the data is recognized and all points on the upper structure are given a higher rank although further away in distance than points in the lower structure.} \label{fig:Ranking} \end{figure} \subsection{Online learning with kernels} The \code{onlearn} function in \pkg{kernlab} implements the online kernel algorithms for classification, novelty detection and regression described in \citep{kernlab:Kivinen:2004}. In batch learning, it is typically assumed that all the examples are immediately available and are drawn independently from some distribution $P$. One natural measure of quality for some $f$ in that case is the expected risk \begin{equation} R[f,P] := E_{(x,y)~P}[l(f(x),y)] \end{equation} Since usually $P$ is unknown a standard approach is to instead minimize the empirical risk \begin{equation} R_{emp}[f,P] := \frac{1}{m}\sum_{t=1}^m l(f(x_t),y_t) \end{equation} Minimizing $R_{emp}[f]$ may lead to overfitting (complex functions that fit well on the training data but do not generalize to unseen data). One way to avoid this is to penalize complex functions by instead minimizing the regularized risk. \begin{equation} R_{reg}[f,S] := R_{reg,\lambda}[f,S] := R_{emp}[f] = \frac{\lambda}{2}\|f\|_{H}^2 \end{equation} where $\lambda > 0$ and $\|f\|_{H} = {\langle f,f \rangle}_{H}^{\frac{1}{2}}$ does indeed measure the complexity of $f$ in a sensible way. The constant $\lambda$ needs to be chosen appropriately for each problem. Since in online learning one is interested in dealing with one example at the time the definition of an instantaneous regularized risk on a single example is needed \begin{equation} R_inst[f,x,y] := R_{inst,\lambda}[f,x,y] := R_{reg,\lambda}[f,((x,y))] \end{equation} The implemented algorithms are classical stochastic gradient descent algorithms performing gradient descent on the instantaneous risk. The general form of the update rule is : \begin{equation} f_{t+1} = f_t - \eta \partial_f R_{inst,\lambda}[f,x_t,y_t]|_{f=f_t} \end{equation} where $f_i \in H$ and $\partial_f$< is short hand for $\partial \ \partial f$ (the gradient with respect to $f$) and $\eta_t > 0$ is the learning rate. Due to the learning taking place in a \textit{reproducing kernel Hilbert space} $H$ the kernel $k$ used has the property $\langle f,k(x,\cdotp)\rangle_H = f(x)$ and therefore \begin{equation} \partial_f l(f(x_t)),y_t) = l'(f(x_t),y_t)k(x_t,\cdotp) \end{equation} where $l'(z,y) := \partial_z l(z,y)$. Since $\partial_f\|f\|_H^2 = 2f$ the update becomes \begin{equation} f_{t+1} := (1 - \eta\lambda)f_t -\eta_t \lambda '( f_t(x_t),y_t)k(x_t,\cdotp) \end{equation} The \code{onlearn} function implements the online learning algorithm for regression, classification and novelty detection. The online nature of the algorithm requires a different approach to the use of the function. An object is used to store the state of the algorithm at each iteration $t$ this object is passed to the function as an argument and is returned at each iteration $t+1$ containing the model parameter state at this step. An empty object of class \code{onlearn} is initialized using the \code{inlearn} function. <>= ## create toy data set x <- rbind(matrix(rnorm(90),,2),matrix(rnorm(90)+3,,2)) y <- matrix(c(rep(1,45),rep(-1,45)),,1) ## initialize onlearn object on <- inlearn(2,kernel="rbfdot",kpar=list(sigma=0.2),type="classification") ind <- sample(1:90,90) ## learn one data point at the time for(i in ind) on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) sign(predict(on,x)) @ \subsection{Spectral clustering} Spectral clustering \citep{kernlab:Ng:2001} is a recently emerged promising alternative to common clustering algorithms. In this method one uses the top eigenvectors of a matrix created by some similarity measure to cluster the data. Similarly to the ranking algorithm, an affinity matrix is created out from the data as \begin{equation} K_{ij}=\exp(-\sigma\|x_i - x_j \|^2) \end{equation} and normalized as $L = D^{-1/2}KD^{-1/2}$ where $D_{ii} = \sum_{j=1}^m K_{ij}$. Then the top $k$ eigenvectors (where $k$ is the number of clusters to be found) of the affinity matrix are used to form an $n \times k$ matrix $Y$ where each column is normalized again to unit length. Treating each row of this matrix as a data point, \code{kmeans} is finally used to cluster the points. \pkg{kernlab} includes an \proglang{S4} method called \code{specc} implementing this algorithm which can be used through an formula interface or a matrix interface. The \proglang{S4} object returned by the method extends the class ``vector'' and contains the assigned cluster for each point along with information on the centers size and within-cluster sum of squares for each cluster. In case a Gaussian RBF kernel is being used a model selection process can be used to determine the optimal value of the $\sigma$ hyper-parameter. For a good value of $\sigma$ the values of $Y$ tend to cluster tightly and it turns out that the within cluster sum of squares is a good indicator for the ``quality'' of the sigma parameter found. We then iterate through the sigma values to find an optimal value for $\sigma$. \begin{figure} \centering <>= data(spirals) sc <- specc(spirals, centers=2) plot(spirals, pch=(23 - 2*sc)) @ \caption{Clustering the two spirals data set with \code{specc}} \label{fig:Spectral Clustering} \end{figure} \subsection{Kernel principal components analysis} Principal component analysis (PCA) is a powerful technique for extracting structure from possibly high-dimensional datasets. PCA is an orthogonal transformation of the coordinate system in which we describe the data. The new coordinates by which we represent the data are called principal components. Kernel PCA \citep{kernlab:Schoelkopf:1998} performs a nonlinear transformation of the coordinate system by finding principal components which are nonlinearly related to the input variables. Given a set of centered observations $x_k$, $k=1,\dots,M$, $x_k \in \mathbf{R}^N$, PCA diagonalizes the covariance matrix $C = \frac{1}{M}\sum_{j=1}^Mx_jx_{j}^T$ by solving the eigenvalue problem $\lambda\mathbf{v}=C\mathbf{v}$. The same computation can be done in a dot product space $F$ which is related to the input space by a possibly nonlinear map $\Phi:\mathbf{R}^N \rightarrow F$, $x \mapsto \mathbf{X}$. Assuming that we deal with centered data and use the covariance matrix in $F$, \begin{equation} \hat{C}=\frac{1}{C}\sum_{j=1}^N \Phi(x_j)\Phi(x_j)^T \end{equation} the kernel principal components are then computed by taking the eigenvectors of the centered kernel matrix $K_{ij} = \langle \Phi(x_j),\Phi(x_j) \rangle$. \code{kpca}, the the function implementing KPCA in \pkg{kernlab}, can be used both with a formula and a matrix interface, and returns an \proglang{S4} object of class \code{kpca} containing the principal components the corresponding eigenvalues along with the projection of the training data on the new coordinate system. Furthermore, the \code{predict} function can be used to embed new data points into the new coordinate system. \begin{figure} \centering <>= data(spam) train <- sample(1:dim(spam)[1],400) kpc <- kpca(~.,data=spam[train,-58],kernel="rbfdot",kpar=list(sigma=0.001),features=2) kpcv <- pcv(kpc) plot(rotated(kpc),col=as.integer(spam[train,58]),xlab="1st Principal Component",ylab="2nd Principal Component") @ \caption{Projection of the spam data on two kernel principal components using an RBF kernel} \label{fig:KPCA} \end{figure} \subsection{Kernel feature analysis} Whilst KPCA leads to very good results there are nevertheless some issues to be addressed. First the computational complexity of the standard version of KPCA, the algorithm scales $O(m^3)$ and secondly the resulting feature extractors are given as a dense expansion in terms of the of the training patterns. Sparse solutions are often achieved in supervised learning settings by using an $l_1$ penalty on the expansion coefficients. An algorithm can be derived using the same approach in feature extraction requiring only $n$ basis functions to compute the first $n$ feature. Kernel feature analysis \citep{kernlab:Olvi:2000} is computationally simple and scales approximately one order of magnitude better on large data sets than standard KPCA. Choosing $\Omega [f] = \sum_{i=1}^m |\alpha_i |$ this yields \begin{equation} F_{LP} = \{ \mathbf{w} \vert \mathbf{w} = \sum_{i=1}^m \alpha_i \Phi(x_i) \mathrm{with} \sum_{i=1}^m |\alpha_i | \leq 1 \} \end{equation} This setting leads to the first ``principal vector'' in the $l_1$ context \begin{equation} \mathbf{\nu}^1 = \mathrm{argmax}_{\mathbf{\nu} \in F_{LP}} \frac{1}{m} \sum_{i=1}^m \langle \mathbf{\nu},\mathbf{\Phi}(x_i) - \frac{1}{m}\sum_{j=1}^m\mathbf{\Phi}(x_i) \rangle^2 \end{equation} Subsequent ``principal vectors'' can be defined by enforcing optimality with respect to the remaining orthogonal subspaces. Due to the $l_1$ constrain the solution has the favorable property of being sparse in terms of the coefficients $\alpha_i$. The function \code{kfa} in \pkg{kernlab} implements Kernel Feature Analysis by using a projection pursuit technique on a sample of the data. Results are then returned in an \proglang{S4} object. \begin{figure} \centering <>= data(promotergene) f <- kfa(~.,data=promotergene,features=2,kernel="rbfdot",kpar=list(sigma=0.013)) plot(predict(f,promotergene),col=as.numeric(promotergene[,1]),xlab="1st Feature",ylab="2nd Feature") @ \caption{Projection of the spam data on two features using an RBF kernel} \label{fig:KFA} \end{figure} \subsection{Kernel canonical correlation analysis} Canonical correlation analysis (CCA) is concerned with describing the linear relations between variables. If we have two data sets $x_1$ and $x_2$, then the classical CCA attempts to find linear combination of the variables which give the maximum correlation between the combinations. I.e., if \begin{eqnarray*} && y_1 = \mathbf{w_1}\mathbf{x_1} = \sum_j w_1 x_{1j} \\ && y_2 = \mathbf{w_2}\mathbf{x_2} = \sum_j w_2 x_{2j} \end{eqnarray*} one wishes to find those values of $\mathbf{w_1}$ and $\mathbf{w_2}$ which maximize the correlation between $y_1$ and $y_2$. Similar to the KPCA algorithm, CCA can be extended and used in a dot product space~$F$ which is related to the input space by a possibly nonlinear map $\Phi:\mathbf{R}^N \rightarrow F$, $x \mapsto \mathbf{X}$ as \begin{eqnarray*} && y_1 = \mathbf{w_1}\mathbf{\Phi(x_1)} = \sum_j w_1 \Phi(x_{1j}) \\ && y_2 = \mathbf{w_2}\mathbf{\Phi(x_2)} = \sum_j w_2 \Phi(x_{2j}) \end{eqnarray*} Following \citep{kernlab:kuss:2003}, the \pkg{kernlab} implementation of a KCCA projects the data vectors on a new coordinate system using KPCA and uses linear CCA to retrieve the correlation coefficients. The \code{kcca} method in \pkg{kernlab} returns an \proglang{S4} object containing the correlation coefficients for each data set and the corresponding correlation along with the kernel used. \subsection{Interior point code quadratic optimizer} In many kernel based algorithms, learning implies the minimization of some risk function. Typically we have to deal with quadratic or general convex problems for support vector machines of the type \begin{equation} \begin{array}{ll} \mathrm{minimize} & f(x) \\ \mbox{subject to~} & c_i(x) \leq 0 \mbox{~for all~} i \in [n]. \end{array} \end{equation} $f$ and $c_i$ are convex functions and $n \in \mathbf{N}$. \pkg{kernlab} provides the \proglang{S4} method \code{ipop} implementing an optimizer of the interior point family \citep{kernlab:Vanderbei:1999} which solves the quadratic programming problem \begin{equation} \begin{array}{ll} \mathrm{minimize} & c^\top x+\frac{1}{2}x^\top H x \\ \mbox{subject to~} & b \leq Ax \leq b + r\\ & l \leq x \leq u \\ \end{array} \end{equation} This optimizer can be used in regression, classification, and novelty detection in SVMs. \subsection{Incomplete cholesky decomposition} When dealing with kernel based algorithms, calculating a full kernel matrix should be avoided since it is already a $O(N^2)$ operation. Fortunately, the fact that kernel matrices are positive semidefinite is a strong constraint and good approximations can be found with small computational cost. The Cholesky decomposition factorizes a positive semidefinite $N \times N$ matrix $K$ as $K=ZZ^T$, where $Z$ is an upper triangular $N \times N$ matrix. Exploiting the fact that kernel matrices are usually of low rank, an \emph{incomplete Cholesky decomposition} \citep{kernlab:Wright:1999} finds a matrix $\tilde{Z}$ of size $N \times M$ where $M\ll N$ such that the norm of $K-\tilde{Z}\tilde{Z}^T$ is smaller than a given tolerance $\theta$. The main difference of incomplete Cholesky decomposition to the standard Cholesky decomposition is that pivots which are below a certain threshold are simply skipped. If $L$ is the number of skipped pivots, we obtain a $\tilde{Z}$ with only $M = N - L$ columns. The algorithm works by picking a column from $K$ to be added by maximizing a lower bound on the reduction of the error of the approximation. \pkg{kernlab} has an implementation of an incomplete Cholesky factorization called \code{inc.chol} which computes the decomposed matrix $\tilde{Z}$ from the original data for any given kernel without the need to compute a full kernel matrix beforehand. This has the advantage that no full kernel matrix has to be stored in memory. \section{Conclusions} In this paper we described \pkg{kernlab}, a flexible and extensible kernel methods package for \proglang{R} with existing modern kernel algorithms along with tools for constructing new kernel based algorithms. It provides a unified framework for using and creating kernel-based algorithms in \proglang{R} while using all of \proglang{R}'s modern facilities, like \proglang{S4} classes and namespaces. Our aim for the future is to extend the package and add more kernel-based methods as well as kernel relevant tools. Sources and binaries for the latest version of \pkg{kernlab} are available at CRAN\footnote{\url{http://CRAN.R-project.org}} under the GNU Public License. A shorter version of this introduction to the \proglang{R} package \pkg{kernlab} is published as \cite{kernlab:Karatzoglou+Smola+Hornik:2004} in the \emph{Journal of Statistical Software}. \bibliography{jss} \end{document} kernlab/inst/COPYRIGHTS0000644000175100001440000000056312376021447014313 0ustar hornikusersCOPYRIGHT STATUS ---------------- The R code in this package is Copyright (C) 2002 Alexandros Karatzoglou the C++ code in src/ is Copyright (C) 2002 Alexandros Karatzoglou and Chi-Jen Lin the fast string kernel code is Copyright (C) Choon Hui Theo, SVN Vishwanathan and Alexandros Karatzoglou MSufSort Version 2.2 is Copyright (C) 2005 Michael A Maniscalo kernlab/inst/CITATION0000644000175100001440000000147214366206771014040 0ustar hornikusersbibentry("Manual", other = unlist(citation(auto = meta), recursive = FALSE)) bibentry("Article", title = "kernlab -- An {S4} Package for Kernel Methods in {R}", author = c(person("Alexandros", "Karatzoglou"), person("Alex", "Smola"), person("Kurt", "Hornik", email = "Kurt.Hornik@R-project.org", comment = c(ORCID = "0000-0003-4198-9911")), person("Achim", "Zeileis", email = "Achim.Zeileis@R-project.org", comment = c(ORCID = "0000-0003-0918-3766"))), journal = "Journal of Statistical Software", year = "2004", volume = "11", number = "9", pages = "1--20", doi = "10.18637/jss.v011.i09" )