kernlab/0000755000175100001440000000000014656675315011730 5ustar hornikuserskernlab/MD50000644000175100001440000001563114656675315012246 0ustar hornikusersd5e79ce5d43d37a5d555c476cb7bfb6b *DESCRIPTION 3114a4e24b015242a1b61147f7cffe25 *NAMESPACE 7db9a58cb6e5aeae749727781fe388f5 *R/aobjects.R 0750c9216dfd490ac36814b8b1ae24f2 *R/couplers.R f8e0ac1a792745090fa9a8da65847804 *R/csi.R f08fe0d980d102c2df4158217bbbcff2 *R/gausspr.R ab289bc31386f29fa9b2bc9a667504f4 *R/inchol.R bfa34b64d293a380c5c4d045105d4496 *R/ipop.R 5f574afe5df7904fb80bb214f01fcc6c *R/kcca.R 67aed700531a0ce066bb9300e7f0169c *R/kernelmatrix.R c2688c1b636fb4fb3cf51870ddaafee6 *R/kernels.R 4df2eb88a79a9ba527515d471042c5ef *R/kfa.R 894f285bbb8e123968cdfcf88c2363c4 *R/kha.R 8ddbcd5e7218d96c96446caec78e04fc *R/kkmeans.R 0053416d76fbd2b79749f02cd0d78b92 *R/kmmd.R 03fc2d9d2bc5e3d2719397c9e1bf137f *R/kpca.R ef1e9cfc8727afee41e93643fa582c5c *R/kqr.R 2eeaa24800294a773283f481e2f75f2c *R/ksvm.R 6f8c85ed6f159df002605fa81313c0b4 *R/lssvm.R 9a6305a7f6f48b3d5b9897aee24c7a88 *R/onlearn.R e011e88368b20e857e139cea577cc056 *R/ranking.R ab9bb9489e4eb5e68e70d944e0833c38 *R/rvm.R 42578bea93efc1ad1488b72c8acec274 *R/sigest.R 159df23cf242faa6b7c1a0feb40bdf6d *R/specc.R aa5e5a2cbfaea3d9b6cdbe378ac48cee *build/partial.rdb 5a0a55e086b27f6608fb458e28ad9430 *build/vignette.rds 459e12c6fc9e220b7f1e65b57b0ef15a *data/income.rda 4ca95cc3aea0f13ac9c83dd6976762ac *data/musk.rda f28bea263bd2a517f646f18077477ca4 *data/promotergene.rda c25d29555626d421d6355a5fcd8b52b7 *data/reuters.rda 9d87dc369ed3c41bf2ad51a18a1ac544 *data/spam.rda 9f8bddb825d3e1ae010f21fda364a10d *data/spirals.rda bd354a4237df51a3b31aefd719d328d6 *data/ticdata.rda 2eecd4c2eb5c8860aeba6751bd9bb13d *inst/CITATION 68fe0d0d842fbc1b217f45934a8edf7a *inst/COPYRIGHTS 0d1b1a09dbb52e3b0e58676170c3ce3d *inst/doc/kernlab.R c4c223d07206b59e2d43a585d07164b1 *inst/doc/kernlab.Rnw 1eeae8d27077b1209976be856aeddcb2 *inst/doc/kernlab.pdf ca7923a78d389602d891a3cf6a5193d9 *man/as.kernelMatrix.Rd 76f275d529c9836865d1001120dcb49f *man/couple.Rd e36dc0b16ba570c99ead7a48394dc66d *man/csi-class.Rd f87d54c4c4bf47f760cc6a779c7e525d *man/csi.Rd 704bfeedf89329461a20e4cb51a237f0 *man/dots.Rd 285c27b5d9a389dfd7e2f8e392de215c *man/gausspr-class.Rd 7a7928c7baab5fcafe7eebd0eca2b29d *man/gausspr.Rd b61d371ba2f8d8b137ec3c32a115c3ab *man/inchol-class.Rd 07ae4d31f3ec7b47b0d954178edf2f94 *man/inchol.Rd 452553ee15225244a50b73aa08cca861 *man/income.Rd 9599ae27d6ebe41302c6236aa381b313 *man/inlearn.Rd 5bf5f6dbdfe4cabf0a5a68054984049c *man/ipop-class.Rd 8a8a11772b987cdb2b25c76a38c719c4 *man/ipop.Rd 62c2b5318bb86222cb8d9cd361998d36 *man/kcca-class.Rd a5309043c8dccb762f1de6138b713b05 *man/kcca.Rd ef26a19723ffb7f6eb6dd3539905d6c4 *man/kernel-class.Rd 7357130456764a2b77cbf39d05d8dc98 *man/kernelMatrix.Rd 7a1e2bc5f883b6e7339bd717f0569eaf *man/kfa-class.Rd 22c7587c02310941aa5c484a3551ff70 *man/kfa.Rd 54afaeff97629d4a1353cdd98b5dde37 *man/kha-class.Rd bb2daaf182abb852cf5d23b6acae968b *man/kha.Rd d2c0e45b17490751137f9c3d9737b92a *man/kkmeans.Rd c3458139340043b2d63e9a642386582e *man/kmmd-class.Rd a5ec024bf428181db56378ef64213ffb *man/kmmd.Rd b39a018897562f1cf907c7d0920186ce *man/kpca-class.Rd 02c0ae81032f682604b9777087e0f52c *man/kpca.Rd 5a3b2344811fded04018d0b56d9bca23 *man/kqr-class.Rd d52f9afb916eac019e41a487d8ddad62 *man/kqr.Rd 3bdce4dc10887da4bacdac6830e66db8 *man/ksvm-class.Rd 464ccb70df6aabdf9d714f329a73d929 *man/ksvm.Rd dd6a605572b276158f753cf3e3dce63e *man/lssvm-class.Rd bab982b9b6cdbdfa1d9c50cacd72408d *man/lssvm.Rd 95f670451348298d1c5daa00498f9f65 *man/musk.Rd 6d1c014b9f6bb8b59d032fd444bf5a04 *man/onlearn-class.Rd 46d67ee15c0ca6feeaf2859a0a9dd375 *man/onlearn.Rd 75f80214439e10c8d1b0104f5bcb44ba *man/plot.Rd f67747838e34ee3400ad4ffe299eba71 *man/prc-class.Rd 04ae737bbae10c747929f4367c9a4236 *man/predict.gausspr.Rd 69e21e71600ccf8a8df4a1adb84213fe *man/predict.kqr.Rd 4171d85f12929bb2f37bafb47ef3c477 *man/predict.ksvm.Rd f4c50ba0a0c5b65733f6ccddf4aa3f51 *man/promotergene.Rd f3a2c50017ea501680b53c9e221bf6b5 *man/ranking-class.Rd 7ff29d050c5127aaf2a91f9cc654d899 *man/ranking.Rd 8bee0b6c367f1c5f749b296ff48dcc23 *man/reuters.Rd 2b1f6b6093d9d0a915995b59caf1561d *man/rvm-class.Rd ce6beae5455a6c2545058e3797345ac8 *man/rvm.Rd 86c5fd418857bae9a5c736e8c57a5c5e *man/sigest.Rd 9ecf44a1c6590e7b9937a0a281a9d80c *man/spam.Rd b176c7c0f1edb61818e9ecfde276f349 *man/specc-class.Rd 2e5911ac1b9795811e67385276ae77c4 *man/specc.Rd c707c7af1229bdfca87272866bb3199a *man/spirals.Rd 149b3590c24913c3718c9f1d6c265b9a *man/stringdot.Rd 5a3d623ac56f129716429ba87481eaeb *man/ticdata.Rd fa4feb7dd29492877886e4d86d0cb8f4 *man/vm-class.Rd 2a6f9e9e044a78154d3cfda5936d6f48 *src/Makevars 2a6f9e9e044a78154d3cfda5936d6f48 *src/Makevars.win 3b77d80677bb88fb39cab4a7d2351056 *src/brweight.cpp 048d635dbf0db99a0b707bf0a9c06984 *src/brweight.h 50cd06527f816675b128669d222bee56 *src/ctable.cpp cb1e056dfcc52d5319e71981f9c90611 *src/ctable.h 342cbb0568a2fa8f27b1f0c42542737e *src/cweight.cpp 0ede046d861731d10f965e2ff8f50e4e *src/cweight.h 5c02223129df9d548c614acd0593645d *src/datatype.h f085fe8cca3cb634567600216eb4aad2 *src/dbreakpt.c ca367ba012c1f6b88f1c385d682374b2 *src/dcauchy.c 455ccdeed46ccda0958453306fe9a951 *src/dgpnrm.c c9ae627ea63dec6d72867c2026121648 *src/dgpstep.c da04ccbb77940ddb1043ff8c53da74ce *src/dprecond.c 2d8d64d9eb60161b1efc8af9f58a50ce *src/dprsrch.c 320bbdd591c4f3521c0d891d87b149f3 *src/dspcg.c b06f93e7f1a64c0b3861550eb40868e5 *src/dtron.c 7d35d1a550d9c3e072f87d42b6693af0 *src/dtrpcg.c 616fbd8165eddace388ffc7ffd90c753 *src/dtrqsol.c beb2c099ff3dd87e3474a30a49a8437e *src/errorcode.h a0f99b7568a3b1c4f0e47437b022e4dc *src/esa.cpp ab96f4b2f43cc0306c88547ab6abe1ad *src/esa.h 5a7166f36e34cc037b9c2006f8bc00c9 *src/expdecayweight.cpp 7f04e95fcd76ee21dcea4d7138d96326 *src/expdecayweight.h d16372bf79ce22a92dfcf3c0d0b769e7 *src/ilcpfactory.h f103b80f529451ab71a425a31ed1eabf *src/inductionsort.cpp fd4a5ad4b79ca119885410bb45c7d12f *src/inductionsort.h a73c84f3f5fff2b34dfc76999c312068 *src/init.c 76adf49038c3585cf216cd033a9b4183 *src/introsort.h 0073f847ac8606d19e03cb0eeb27e0a2 *src/isafactory.h 94245de3f9b29eee07fd1f7d8d8929cd *src/iweightfactory.h d2d7af10799002c2392f038e7d767c3f *src/kspectrumweight.cpp b5d07bb286e3767cda7a371c50d0122e *src/kspectrumweight.h b1a983bdf87a406584defc0fa332c455 *src/lcp.cpp 6de81523902a1d4dce2b38ce3d57ce98 *src/lcp.h f47f3118ea197009f6f0e12edeb5fc17 *src/misc.c d5d113bf04eb7759c8fd0f915dd24c64 *src/msufsort.cpp 82af93b02f090a83152b52239e0e3711 *src/msufsort.h e5346edb7625a7d8d192443b23f90323 *src/solvebqp.c 01a09c0f7f2fb72637644b3830b56c26 *src/stack.h 8f9a4e62039b15bdfc44253244e80cf0 *src/stringk.c e1dd32ae69e1b9de964289b0b69c3f98 *src/stringkernel.cpp 1c19c2215be7a2b25f7439fc061f2daa *src/stringkernel.h 7b75f6c1c71267375489ede113008982 *src/svm.cpp 670301bb88ff2b0f28ece190a96635c7 *src/svm.h 5f5910aab31dc2ebacb4b15caba8e873 *src/wkasailcp.cpp fd6807b3526c7d5442f66a2660bd9e4c *src/wkasailcp.h f48a5df5ecbf1ac1831e5582798eb57d *src/wmsufsort.cpp 2694af88ced7e4391e92120d0c90587c *src/wmsufsort.h a324922cf3b84ae82f364be31135168f *vignettes/A.cls 7deb7278b233e702bdf87d4cc1ec3897 *vignettes/jss.bib c4c223d07206b59e2d43a585d07164b1 *vignettes/kernlab.Rnw kernlab/R/0000755000175100001440000000000014221633213012104 5ustar hornikuserskernlab/R/rvm.R0000644000175100001440000004145014221633213013037 0ustar hornikusers## relevance vector machine ## author : alexandros setGeneric("rvm", function(x, ...) standardGeneric("rvm")) setMethod("rvm",signature(x="formula"), function (x, data=NULL, ..., subset, na.action = na.omit){ cl <- match.call() m <- match.call(expand.dots = FALSE) if (is.matrix(eval(m$data, parent.frame()))) m$data <- as.data.frame(data) m$... <- NULL m$formula <- m$x m$x <- NULL m[[1L]] <- quote(stats::model.frame) m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") ret <- rvm(x, y, ...) kcall(ret) <- cl terms(ret) <- Terms if (!is.null(attr(m, "na.action"))) n.action(ret) <- attr(m, "na.action") return (ret) }) setMethod("rvm",signature(x="vector"), function(x,...) { x <- t(t(x)) ret <- rvm(x, ...) ret }) setMethod("rvm",signature(x="list"), function (x, y, type = "regression", kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), alpha = 5, var = 0.1, # variance var.fix = FALSE, # fixed variance? iterations = 100, # no. of iterations verbosity = 0, tol = .Machine$double.eps, minmaxdiff = 1e-3, cross = 0, fit = TRUE, ... ,subset ,na.action = na.omit) { if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") K <- kernelMatrix(kernel,x) ret <- rvm(x=K, y=y, kernel=kernel, alpha = alpha, var= var, var.fix = var.fix, iterations = iterations, verbosity = verbosity, tol = tol, minmaxdiff=minmaxdiff,cross=cross,fit=fit, na.action=na.action) kernelf(ret) <- kernel xmatrix(ret) <- x return(ret) }) setMethod("rvm",signature(x="matrix"), function (x, y, type = "regression", kernel = "rbfdot", kpar = "automatic", alpha = ncol(as.matrix(x)), var = 0.1, # variance var.fix = FALSE, # fixed variance? iterations = 100, # no. of iterations verbosity = 0, tol = .Machine$double.eps, minmaxdiff = 1e-3, cross = 0, fit = TRUE, ... ,subset ,na.action = na.omit) { ## subsetting and na-handling for matrices ret <- new("rvm") if (!missing(subset)) x <- x[subset,] if (is.null(y)) x <- na.action(x) else { df <- na.action(data.frame(y, x)) y <- df[,1] x <- as.matrix(df[,-1]) } ncols <- ncol(x) m <- nrows <- nrow(x) if (is.null (type)) type(ret) <- if (is.factor(y)) "classification" else "regression" else type(ret) <- "regression" # in case of classification: transform factors into integers if (is.factor(y)) { stop("classification not supported with rvm, you can use ksvm(), lssvm() or gausspr()") } else { if (type(ret) == "classification" && any(as.integer (y) != y)) stop ("classification not supported with rvm, you can use ksvm(), lssvm() or gausspr()") if(type(ret) == "classification") lev(ret) <- unique (y) } # initialize nclass(ret) <- length (lev(ret)) if(!is.null(type)) type(ret) <- match.arg(type,c("classification", "regression")) if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","matrix")) if(kernel == "matrix") if(dim(x)[1]==dim(x)[2]) return(rvm(as.kernelMatrix(x), y = y,type = type, alpha = alpha, var = var, # variance var.fix = var.fix, # fixed variance? iterations = iterations, # no. of iterations verbosity = verbosity, tol = tol, minmaxdiff = minmaxdiff, cross = cross, fit = fit ,subset ,na.action = na.omit, ...)) else stop(" kernel matrix not square!") if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "rbfkernel") || is(kernel, "laplacedot") || kernel == "laplacedot"|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=mean(sigest(x,scaled=FALSE)[c(1,3)])) cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if(length(alpha) == m) thetavec <- 1/alpha else if (length(alpha) == 1) thetavec <- rep(1/alpha, m) else stop("length of initial alpha vector is wrong (has to be one or equal with number of train data") wvec <- rep(1, m) piter <- iterations*0.4 if (type(ret) == "regression") { K <- kernelMatrix(kernel, x) diag(K) <- diag(K)+ 10e-7 Kml <- crossprod(K, y) for (i in 1:iterations) { nzindex <- thetavec > tol thetavec [!nzindex] <- wvec [!nzindex] <- 0 Kr <- K [ ,nzindex, drop = FALSE] thetatmp <- thetavec[nzindex] n <- sum (nzindex) Rinv <- backsolve(chol(crossprod(Kr)/var + diag(1/thetatmp)),diag(1,n)) ## compute the new wvec coefficients wvec [nzindex] <- (Rinv %*% (crossprod(Rinv, Kml [nzindex])))/var diagSigma <- rowSums(Rinv^2) ## error err <- sum ((y - Kr %*% wvec [nzindex])^2) if(var < 2e-9) { warning("Model might be overfitted") break } ## log some information if (verbosity > 0) { log.det.Sigma.inv <- - 2 * sum (log (diag (Rinv))) ## compute the marginal likelihood to monitor convergence mlike <- -1/2 * (log.det.Sigma.inv + sum (log (thetatmp)) + m * log (var) + 1/var * err + (wvec [nzindex]^2) %*% (1/thetatmp)) cat ("Marg. Likelihood =", formatC (mlike), "\tnRV=", n, "\tvar=", var, "\n") } ## compute zeta zeta <- 1 - diagSigma / thetatmp ## compute logtheta for convergence checking logtheta <- - log(thetavec[nzindex]) ## update thetavec if(i < piter){ thetavec [nzindex] <- wvec [nzindex]^2 / zeta thetavec [thetavec <= 0] <- 0 } else{ thetavec [nzindex] <- (wvec [nzindex]^2/zeta - diagSigma)/zeta thetavec [thetavec <= 0] <- 0 } ## Stop if largest alpha change is too small maxdiff <- max(abs(logtheta[thetavec[which(nzindex)]!=0] + log(thetavec[thetavec!=0]))) if(maxdiff < minmaxdiff) break; ## update variance if (!var.fix) { var <- err / (m - sum (zeta)) } } if(verbosity == 0) mlike(ret) <- drop(-1/2 * (-2*sum(log(diag(Rinv))) + sum (log (thetatmp)) + m * log (var) + 1/var * err + (wvec [nzindex]^2) %*% (1/thetatmp))) nvar(ret) <- var error(ret) <- sqrt(err/m) if(fit) fitted(ret) <- Kr %*% wvec [nzindex] } if(type(ret)=="classification") { stop("classification with the relevance vector machine not implemented yet") } kcall(ret) <- match.call() kernelf(ret) <- kernel alpha(ret) <- wvec[nzindex] tol(ret) <- tol xmatrix(ret) <- x ymatrix(ret) <- y RVindex(ret) <- which(nzindex) nRV(ret) <- length(RVindex(ret)) if (fit){ if(type(ret)=="classification") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="regression") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(cross!=0) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="classification") { cret <- rvm(x[cind,],factor (lev(ret)[y[cind]], levels = lev(ret)),type=type(ret),kernel=kernel,alpha = alpha,var = var, var.fix=var.fix, tol=tol, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],]) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="regression") { cret <- rvm(x[cind,],y[cind],type=type(ret),kernel=kernel,tol=tol,alpha = alpha, var = var, var.fix=var.fix, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],]) cerror <- drop(crossprod(cres - y[vgr[[i]]])/m) + cerror } } cross(ret) <- cerror } return(ret) }) setMethod("rvm",signature(x="kernelMatrix"), function (x, y, type = "regression", alpha = ncol(as.matrix(x)), var = 0.1, # variance var.fix = FALSE, # fixed variance? iterations = 100, # no. of iterations verbosity = 0, tol = .Machine$double.eps, minmaxdiff = 1e-3, cross = 0, fit = TRUE, ... ,subset ) { ## subsetting and na-handling for matrices ret <- new("rvm") if (!missing(subset)) x <- as.kernelMatrix(x[subset,subset]) if (is.null(y)) stop("response y missing") ncols <- ncol(x) m <- nrows <- nrow(x) if (is.null (type)) type(ret) <- if (is.factor(y)) "classification" else "regression" else type(ret) <- "regression" # in case of classification: transform factors into integers if (is.factor(y)) { stop("Claasification is not implemented, you can use ksvm(), gausspr() or lssvm()") } else { if (type(ret) == "classification" && any(as.integer (y) != y)) stop ("dependent variable has to be of factor or integer type for classification mode.") if(type(ret) == "classification") lev(ret) <- unique (y) } # initialize nclass(ret) <- length (lev(ret)) if(!is.null(type)) type(ret) <- match.arg(type,c("classification", "regression")) if(length(alpha) == m) thetavec <- 1/alpha else if (length(alpha) == 1) thetavec <- rep(1/alpha, m) else stop("length of initial alpha vector is wrong (has to be one or equal with number of train data") wvec <- rep(1, m) piter <- iterations*0.4 if (type(ret) == "regression") { Kml <- crossprod(x, y) for (i in 1:iterations) { nzindex <- thetavec > tol thetavec [!nzindex] <- wvec [!nzindex] <- 0 Kr <- x [ ,nzindex, drop = FALSE] thetatmp <- thetavec[nzindex] n <- sum (nzindex) Rinv <- backsolve(chol(crossprod(Kr)/var + diag(1/thetatmp)),diag(1,n)) ## compute the new wvec coefficients wvec [nzindex] <- (Rinv %*% (crossprod(Rinv, Kml [nzindex])))/var diagSigma <- rowSums(Rinv^2) ## error err <- sum ((y - Kr %*% wvec [nzindex])^2) if(var < 2e-9) { warning("Model might be overfitted") break } ## log some information if (verbosity > 0) { log.det.Sigma.inv <- - 2 * sum (log (diag (Rinv))) ## compute the marginal likelihood to monitor convergence mlike <- -1/2 * (log.det.Sigma.inv + sum (log (thetatmp)) + m * log (var) + 1/var * err + (wvec [nzindex]^2) %*% (1/thetatmp)) cat ("Marg. Likelihood =", formatC (mlike), "\tnRV=", n, "\tvar=", var, "\n") } ## compute zeta zeta <- 1 - diagSigma / thetatmp ## compute logtheta for convergence checking logtheta <- - log(thetavec[nzindex]) ## update thetavec if(i < piter){ thetavec [nzindex] <- wvec [nzindex]^2 / zeta thetavec [thetavec <= 0] <- 0 } else{ thetavec [nzindex] <- (wvec [nzindex]^2/zeta - diagSigma)/zeta thetavec [thetavec <= 0] <- 0 } ## Stop if largest alpha change is too small maxdiff <- max(abs(logtheta[thetavec[which(nzindex)]!=0] + log(thetavec[thetavec!=0]))) if(maxdiff < minmaxdiff) break; ## update variance if (!var.fix) { var <- err / (m - sum (zeta)) } } if(verbosity == 0) mlike(ret) <- drop(-1/2 * (-2*sum(log(diag(Rinv))) + sum (log (thetatmp)) + m * log (var) + 1/var * err + (wvec [nzindex]^2) %*% (1/thetatmp))) nvar(ret) <- var error(ret) <- sqrt(err/m) if(fit) fitted(ret) <- Kr %*% wvec [nzindex] } if(type(ret)=="classification") { stop("classification with the relevance vector machine not implemented yet") } kcall(ret) <- match.call() kernelf(ret) <- " Kernel Matrix used. \n" coef(ret) <- alpha(ret) <- wvec[nzindex] tol(ret) <- tol xmatrix(ret) <- x ymatrix(ret) <- y RVindex(ret) <- which(nzindex) nRV(ret) <- length(RVindex(ret)) if (fit){ if(type(ret)=="classification") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="regression") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(cross!=0) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="classification") { cret <- rvm(as.kernelMatrix(x[cind,cind]),factor (lev(ret)[y[cind]], levels = lev(ret)),type=type(ret),alpha = alpha,var = var, var.fix=var.fix, tol=tol, cross = 0, fit = FALSE) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind][,RVindex(cret),drop=FALSE])) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="regression") { cret <- rvm(as.kernelMatrix(x[cind,cind]),y[cind],type=type(ret),tol=tol,alpha = alpha, var = var, var.fix=var.fix, cross = 0, fit = FALSE) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind][,RVindex(cret),drop=FALSE])) cerror <- drop(crossprod(cres - y[vgr[[i]]])/m)/cross + cerror } } cross(ret) <- cerror } return(ret) }) setMethod("predict", signature(object = "rvm"), function (object, newdata, ...) { if (missing(newdata)) return(fitted(object)) if(!is(newdata,"kernelMatrix") && !is(newdata,"list")){ ncols <- ncol(xmatrix(object)) nrows <- nrow(xmatrix(object)) oldco <- ncols if (!is.null(terms(object))) { newdata <- model.matrix(delete.response(terms(object)), as.data.frame(newdata), na.action = na.action) } else newdata <- if (is.vector (newdata)) t(t(newdata)) else as.matrix(newdata) newcols <- 0 newnrows <- nrow(newdata) newncols <- ncol(newdata) newco <- newncols if (oldco != newco) stop ("test vector does not match model !") p<-0 } if(type(object) == "regression") { if(is(newdata,"kernelMatrix")) ret <- newdata %*% coef(object) - b(object) if(is(newdata,"list")) ret <- kernelMult(kernelf(object),newdata,xmatrix(object)[RVindex(object)],alpha(object)) else ret <- kernelMult(kernelf(object),newdata,as.matrix(xmatrix(object)[RVindex(object),,drop=FALSE]),alpha(object)) } ret }) setMethod("show","rvm", function(object){ cat("Relevance Vector Machine object of class \"rvm\"","\n") cat("Problem type: regression","\n","\n") show(kernelf(object)) cat(paste("\nNumber of Relevance Vectors :", nRV(object),"\n")) cat("Variance : ",round(nvar(object),9)) cat("\n") if(!is.null(fitted(object))) cat(paste("Training error :", round(error(object),9),"\n")) if(cross(object)!= -1) cat("Cross validation error :",round(cross(object),9),"\n") ##train error & loss }) kernlab/R/ipop.R0000644000175100001440000002544511304023134013202 0ustar hornikusers##ipop solves the quadratic programming problem ##minimize c' * primal + 1/2 primal' * H * primal ##subject to b <= A*primal <= b + r ## l <= x <= u ## d is the optimizer itself ##returns primal and dual variables (i.e. x and the Lagrange ##multipliers for b <= A * primal <= b + r) ##for additional documentation see ## R. Vanderbei ## LOQO: an Interior Point Code for Quadratic Programming, 1992 ## Author: R version Alexandros Karatzoglou, orig. matlab Alex J. Smola ## Created: 12/12/97 ## R Version: 12/08/03 ## Updated: 13/10/05 ## This code is released under the GNU Public License setGeneric("ipop",function(c, H, A, b, l, u, r, sigf=7, maxiter=40, margin=0.05, bound=10, verb=0) standardGeneric("ipop")) setMethod("ipop",signature(H="matrix"), function(c, H, A, b, l, u, r, sigf=7, maxiter=40, margin=0.05, bound=10, verb=0) { if(!is.matrix(H)) stop("H must be a matrix") if(!is.matrix(A)&&!is.vector(A)) stop("A must be a matrix or a vector") if(!is.matrix(c)&&!is.vector(c)) stop("c must be a matrix or a vector") if(!is.matrix(l)&&!is.vector(l)) stop("l must be a matrix or a vector") if(!is.matrix(u)&&!is.vector(u)) stop("u must be a matrix or a vector") n <- dim(H)[1] ## check for a decomposed H matrix if(n == dim(H)[2]) smw <- 0 if(n > dim(H)[2]) smw <- 1 if(n < dim(H)[2]) { smw <- 1 n <- dim(H)[2] H <- t(H) } if (is.vector(A)) A <- matrix(A,1) m <- dim(A)[1] primal <- rep(0,n) if (missing(b)) bvec <- rep(0, m) ## if(n !=nrow(H)) ## stop("H matrix is not symmetric") if (n != length(c)) stop("H and c are incompatible!") if (n != ncol(A)) stop("A and c are incompatible!") if (m != length(b)) stop("A and b are incompatible!") if(n !=length(u)) stop("u is incopatible with H") if(n !=length(l)) stop("l is incopatible with H") c <- matrix(c) l <- matrix(l) u <- matrix(u) m <- nrow(A) n <- ncol(A) H.diag <- diag(H) if(smw == 0) H.x <- H else if (smw == 1) H.x <- t(H) b.plus.1 <- max(svd(b)$d) + 1 c.plus.1 <- max(svd(c)$d) + 1 one.x <- -matrix(1,n,1) one.y <- -matrix(1,m,1) ## starting point if(smw == 0) diag(H.x) <- H.diag + 1 else smwn <- dim(H)[2] H.y <- diag(1,m) c.x <- c c.y <- b ## solve the system [-H.x A' A H.y] [x, y] = [c.x c.y] if(smw == 0) { AP <- matrix(0,m+n,m+n) xp <- 1:(m+n) <= n AP[xp,xp] <- -H.x AP[xp == FALSE,xp] <- A AP[xp,xp == FALSE] <- t(A) AP[xp == FALSE, xp== FALSE] <- H.y s.tmp <- solve(AP,c(c.x,c.y)) x <- s.tmp[1:n] y <- s.tmp[-(1:n)] } else { V <- diag(smwn) smwinner <- chol(V + crossprod(H)) smwa1 <- t(A) smwc1 <- c.x smwa2 <- smwa1 - (H %*% solve(smwinner,solve(t(smwinner),crossprod(H,smwa1)))) smwc2 <- smwc1 - (H %*% solve(smwinner,solve(t(smwinner),crossprod(H,smwc1)))) y <- solve(A %*% smwa2 + H.y , c.y + A %*% smwc2) x <- smwa2 %*% y - smwc2 } g <- pmax(abs(x - l), bound) z <- pmax(abs(x), bound) t <- pmax(abs(u - x), bound) s <- pmax(abs(x), bound) v <- pmax(abs(y), bound) w <- pmax(abs(y), bound) p <- pmax(abs(r - w), bound) q <- pmax(abs(y), bound) mu <- as.vector(crossprod(z,g) + crossprod(v,w) + crossprod(s,t) + crossprod(p,q))/(2 * (m + n)) sigfig <- 0 counter <- 0 alfa <- 1 if (verb > 0) # print at least one status report cat("Iter PrimalInf DualInf SigFigs Rescale PrimalObj DualObj","\n") while (counter < maxiter) { ## update the iteration counter counter <- counter + 1 ## central path (predictor) if(smw == 0) H.dot.x <- H %*% x else if (smw == 1) H.dot.x <- H %*% crossprod(H,x) rho <- b - A %*% x + w nu <- l - x + g tau <- u - x - t alpha <- r - w - p sigma <- c - crossprod(A, y) - z + s + H.dot.x beta <- y + q - v gamma.z <- - z gamma.w <- - w gamma.s <- - s gamma.q <- - q ## instrumentation x.dot.H.dot.x <- crossprod(x, H.dot.x) primal.infeasibility <- max(svd(rbind(rho, tau, matrix(alpha), nu))$d)/ b.plus.1 dual.infeasibility <- max(svd(rbind(sigma,t(t(beta))))$d) / c.plus.1 primal.obj <- crossprod(c,x) + 0.5 * x.dot.H.dot.x dual.obj <- crossprod(b,y) - 0.5 * x.dot.H.dot.x + crossprod(l, z) - crossprod(u,s) - crossprod(r,q) old.sigfig <- sigfig sigfig <- max(-log10(abs(primal.obj - dual.obj)/(abs(primal.obj) + 1)), 0) if (sigfig >= sigf) break if (verb > 0) # final report cat( counter, "\t", signif(primal.infeasibility,6), signif(dual.infeasibility,6), sigfig, alfa, primal.obj, dual.obj,"\n") ## some more intermediate variables (the hat section) hat.beta <- beta - v * gamma.w / w hat.alpha <- alpha - p * gamma.q / q hat.nu <- nu + g * gamma.z / z hat.tau <- tau - t * gamma.s / s ## the diagonal terms d <- z / g + s / t e <- 1 / (v / w + q / p) ## initialization before the big cholesky if (smw == 0) diag(H.x) <- H.diag + d diag(H.y) <- e c.x <- sigma - z * hat.nu / g - s * hat.tau / t c.y <- rho - e * (hat.beta - q * hat.alpha / p) ## and solve the system [-H.x A' A H.y] [delta.x, delta.y] <- [c.x c.y] if(smw == 0){ AP[xp,xp] <- -H.x AP[xp == FALSE, xp== FALSE] <- H.y s1.tmp <- solve(AP,c(c.x,c.y)) delta.x<-s1.tmp[1:n] ; delta.y <- s1.tmp[-(1:n)] } else { V <- diag(smwn) smwinner <- chol(V + chunkmult(t(H),2000,d)) smwa1 <- t(A) smwa1 <- smwa1 / d smwc1 <- c.x / d smwa2 <- t(A) - (H %*% solve(smwinner,solve(t(smwinner),crossprod(H,smwa1)))) smwa2 <- smwa2 / d smwc2 <- (c.x - (H %*% solve(smwinner,solve(t(smwinner),crossprod(H,smwc1)))))/d delta.y <- solve(A %*% smwa2 + H.y , c.y + A %*% smwc2) delta.x <- smwa2 %*% delta.y - smwc2 } ## backsubstitution delta.w <- - e * (hat.beta - q * hat.alpha / p + delta.y) delta.s <- s * (delta.x - hat.tau) / t delta.z <- z * (hat.nu - delta.x) / g delta.q <- q * (delta.w - hat.alpha) / p delta.v <- v * (gamma.w - delta.w) / w delta.p <- p * (gamma.q - delta.q) / q delta.g <- g * (gamma.z - delta.z) / z delta.t <- t * (gamma.s - delta.s) / s ## compute update step now (sebastian's trick) alfa <- - (1 - margin) / min(c(delta.g / g, delta.w / w, delta.t / t, delta.p / p, delta.z / z, delta.v / v, delta.s / s, delta.q / q, -1)) newmu <- (crossprod(z,g) + crossprod(v,w) + crossprod(s,t) + crossprod(p,q))/(2 * (m + n)) newmu <- mu * ((alfa - 1) / (alfa + 10))^2 gamma.z <- mu / g - z - delta.z * delta.g / g gamma.w <- mu / v - w - delta.w * delta.v / v gamma.s <- mu / t - s - delta.s * delta.t / t gamma.q <- mu / p - q - delta.q * delta.p / p ## some more intermediate variables (the hat section) hat.beta <- beta - v * gamma.w / w hat.alpha <- alpha - p * gamma.q / q hat.nu <- nu + g * gamma.z / z hat.tau <- tau - t * gamma.s / s ## initialization before the big cholesky ##for ( i in 1 : n H.x(i,i) <- H.diag(i) + d(i) ) { ##H.y <- diag(e) c.x <- sigma - z * hat.nu / g - s * hat.tau / t c.y <- rho - e * (hat.beta - q * hat.alpha / p) ## and solve the system [-H.x A' A H.y] [delta.x, delta.y] <- [c.x c.y] if (smw == 0) { AP[xp,xp] <- -H.x AP[xp == FALSE, xp== FALSE] <- H.y s1.tmp <- solve(AP,c(c.x,c.y)) delta.x<-s1.tmp[1:n] ; delta.y<-s1.tmp[-(1:n)] } else if (smw == 1) { smwc1 <- c.x / d smwc2 <- (c.x - (H %*% solve(smwinner,solve(t(smwinner),crossprod(H,smwc1))))) / d delta.y <- solve(A %*% smwa2 + H.y , c.y + A %*% smwc2) delta.x <- smwa2 %*% delta.y - smwc2 } ## backsubstitution delta.w <- - e * (hat.beta - q * hat.alpha / p + delta.y) delta.s <- s * (delta.x - hat.tau) / t delta.z <- z * (hat.nu - delta.x) / g delta.q <- q * (delta.w - hat.alpha) / p delta.v <- v * (gamma.w - delta.w) / w delta.p <- p * (gamma.q - delta.q) / q delta.g <- g * (gamma.z - delta.z) / z delta.t <- t * (gamma.s - delta.s) / s ## compute the updates alfa <- - (1 - margin) / min(c(delta.g / g, delta.w / w, delta.t / t, delta.p / p, delta.z / z, delta.v / v, delta.s / s, delta.q / q, -1)) x <- x + delta.x * alfa g <- g + delta.g * alfa w <- w + delta.w * alfa t <- t + delta.t * alfa p <- p + delta.p * alfa y <- y + delta.y * alfa z <- z + delta.z * alfa v <- v + delta.v * alfa s <- s + delta.s * alfa q <- q + delta.q * alfa ## these two lines put back in ? ## mu <- (crossprod(z,g) + crossprod(v,w) + crossprod(s,t) + crossprod(p,q))/(2 * (m + n)) ## mu <- mu * ((alfa - 1) / (alfa + 10))^2 mu <- newmu } if (verb > 0) ## final report cat( counter, primal.infeasibility, dual.infeasibility, sigfig, alfa, primal.obj, dual.obj) ret <- new("ipop") ## repackage the results primal(ret) <- x dual(ret) <- drop(y) if ((sigfig > sigf) & (counter < maxiter)) how(ret) <- 'converged' else { ## must have run out of counts if ((primal.infeasibility > 10e5) & (dual.infeasibility > 10e5)) how(ret) <- 'primal and dual infeasible' if (primal.infeasibility > 10e5) how(ret) <- 'primal infeasible' if (dual.infeasibility > 10e5) how(ret) <- 'dual infeasible' else ## don't really know how(ret) <- 'slow convergence, change bound?' } ret }) setGeneric("chunkmult",function(Z, csize, colscale) standardGeneric("chunkmult")) setMethod("chunkmult",signature(Z="matrix"), function(Z, csize, colscale) { n <- dim(Z)[1] m <- dim(Z)[2] d <- sqrt(colscale) nchunks <- ceiling(m/csize) res <- matrix(0,n,n) for( i in 1:nchunks) { lowerb <- (i - 1) * csize + 1 upperb <- min(i * csize, m) buffer <- t(Z[,lowerb:upperb,drop = FALSE]) bufferd <- d[lowerb:upperb] buffer <- buffer / bufferd res <- res + crossprod(buffer) } return(res) }) kernlab/R/specc.R0000644000175100001440000002543412676465043013354 0ustar hornikusers## Spectral clustering ## author : alexandros setGeneric("specc",function(x, ...) standardGeneric("specc")) setMethod("specc", signature(x = "formula"), function(x, data = NULL, na.action = na.omit, ...) { mt <- terms(x, data = data) if(attr(mt, "response") > 0) stop("response not allowed in formula") attr(mt, "intercept") <- 0 cl <- match.call() mf <- match.call(expand.dots = FALSE) mf$formula <- mf$x mf$... <- NULL mf[[1L]] <- quote(stats::model.frame) mf <- eval(mf, parent.frame()) na.act <- attr(mf, "na.action") x <- model.matrix(mt, mf) res <- specc(x, ...) cl[[1]] <- as.name("specc") if(!is.null(na.act)) n.action(res) <- na.action return(res) }) setMethod("specc",signature(x="matrix"),function(x, centers, kernel = "rbfdot", kpar = "automatic", nystrom.red = FALSE, nystrom.sample = dim(x)[1]/6, iterations = 200, mod.sample = 0.75, na.action = na.omit, ...) { x <- na.action(x) rown <- rownames(x) x <- as.matrix(x) m <- nrow(x) if (missing(centers)) stop("centers must be a number or a matrix") if (length(centers) == 1) { nc <- centers if (m < centers) stop("more cluster centers than data points.") } else nc <- dim(centers)[2] if(is.character(kpar)) { kpar <- match.arg(kpar,c("automatic","local")) if(kpar == "automatic") { if (nystrom.red == TRUE) sam <- sample(1:m, floor(mod.sample*nystrom.sample)) else sam <- sample(1:m, floor(mod.sample*m)) sx <- unique(x[sam,]) ns <- dim(sx)[1] dota <- rowSums(sx*sx)/2 ktmp <- crossprod(t(sx)) for (i in 1:ns) ktmp[i,]<- 2*(-ktmp[i,] + dota + rep(dota[i], ns)) ## fix numerical prob. ktmp[ktmp<0] <- 0 ktmp <- sqrt(ktmp) kmax <- max(ktmp) kmin <- min(ktmp + diag(rep(Inf,dim(ktmp)[1]))) kmea <- mean(ktmp) lsmin <- log2(kmin) lsmax <- log2(kmax) midmax <- min(c(2*kmea, kmax)) midmin <- max(c(kmea/2,kmin)) rtmp <- c(seq(midmin,0.9*kmea,0.05*kmea), seq(kmea,midmax,0.08*kmea)) if ((lsmax - (Re(log2(midmax))+0.5)) < 0.5) step <- (lsmax - (Re(log2(midmax))+0.5)) else step <- 0.5 if (((Re(log2(midmin))-0.5)-lsmin) < 0.5 ) stepm <- ((Re(log2(midmin))-0.5) - lsmin) else stepm <- 0.5 tmpsig <- c(2^(seq(lsmin,(Re(log2(midmin))-0.5), stepm)), rtmp, 2^(seq(Re(log2(midmax))+0.5, lsmax,step))) diss <- matrix(rep(Inf,length(tmpsig)*nc),ncol=nc) for (i in 1:length(tmpsig)){ ka <- exp((-(ktmp^2))/(2*(tmpsig[i]^2))) diag(ka) <- 0 d <- 1/sqrt(rowSums(ka)) if(!any(d==Inf) && !any(is.na(d))&& (max(d)[1]-min(d)[1] < 10^4)) { l <- d * ka %*% diag(d) xi <- eigen(l,symmetric=TRUE)$vectors[,1:nc] yi <- xi/sqrt(rowSums(xi^2)) res <- kmeans(yi, centers, iterations) diss[i,] <- res$withinss } } ms <- which.min(rowSums(diss)) kernel <- rbfdot((tmpsig[ms]^(-2))/2) ## Compute Affinity Matrix if (nystrom.red == FALSE) km <- kernelMatrix(kernel, x) } if (kpar=="local") { if (nystrom.red == TRUE) stop ("Local Scaling not supported for nystrom reduction.") s <- rep(0,m) dota <- rowSums(x*x)/2 dis <- crossprod(t(x)) for (i in 1:m) dis[i,]<- 2*(-dis[i,] + dota + rep(dota[i],m)) ## fix numerical prob. dis[dis < 0] <- 0 for (i in 1:m) s[i] <- median(sort(sqrt(dis[i,]))[1:5]) ## Compute Affinity Matrix km <- exp(-dis / s%*%t(s)) kernel <- "Localy scaled RBF kernel" } } else { if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") ## Compute Affinity Matrix if (nystrom.red == FALSE) km <- kernelMatrix(kernel, x) } if (nystrom.red == TRUE){ n <- floor(nystrom.sample) ind <- sample(1:m, m) x <- x[ind,] tmps <- sort(ind, index.return = TRUE) reind <- tmps$ix A <- kernelMatrix(kernel, x[1:n,]) B <- kernelMatrix(kernel, x[-(1:n),], x[1:n,]) d1 <- colSums(rbind(A,B)) d2 <- rowSums(B) + drop(matrix(colSums(B),1) %*% .ginv(A)%*%t(B)) dhat <- sqrt(1/c(d1,d2)) A <- A * (dhat[1:n] %*% t(dhat[1:n])) B <- B * (dhat[(n+1):m] %*% t(dhat[1:n])) Asi <- .sqrtm(.ginv(A)) Q <- A + Asi %*% crossprod(B) %*% Asi tmpres <- svd(Q) U <- tmpres$u L <- tmpres$d V <- rbind(A,B) %*% Asi %*% U %*% .ginv(sqrt(diag(L))) yi <- matrix(0,m,nc) ## for(i in 2:(nc +1)) ## yi[,i-1] <- V[,i]/V[,1] for(i in 1:nc) ## specc yi[,i] <- V[,i]/sqrt(sum(V[,i]^2)) res <- kmeans(yi[reind,], centers, iterations) } else{ if(is(kernel)[1] == "rbfkernel") diag(km) <- 0 d <- 1/sqrt(rowSums(km)) l <- d * km %*% diag(d) xi <- eigen(l)$vectors[,1:nc] yi <- xi/sqrt(rowSums(xi^2)) res <- kmeans(yi, centers, iterations) } cent <- matrix(unlist(lapply(1:nc,ll<- function(l){colMeans(x[which(res$cluster==l), ,drop=FALSE])})),ncol=dim(x)[2], byrow=TRUE) withss <- unlist(lapply(1:nc,ll<- function(l){sum((x[which(res$cluster==l),, drop=FALSE] - cent[l,])^2)})) names(res$cluster) <- rown return(new("specc", .Data=res$cluster, size = res$size, centers=cent, withinss=withss, kernelf= kernel)) }) setMethod("specc",signature(x="list"),function(x, centers, kernel = "stringdot", kpar = list(length=4, lambda=0.5), nystrom.red = FALSE, nystrom.sample = length(x)/6, iterations = 200, mod.sample = 0.75, na.action = na.omit, ...) { x <- na.action(x) m <- length(x) if (missing(centers)) stop("centers must be a number or a matrix") if (length(centers) == 1) { nc <- centers if (m < centers) stop("more cluster centers than data points.") } else nc <- dim(centers)[2] if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if (nystrom.red == TRUE){ n <- nystrom.sample ind <- sample(1:m, m) x <- x[ind,] tmps <- sort(ind, index.return = TRUE) reind <- tmps$ix A <- kernelMatrix(kernel, x[1:n,]) B <- kernelMatrix(kernel, x[-(1:n),], x[1:n,]) d1 <- colSums(rbind(A,B)) d2 <- rowSums(B) + drop(matrix(colSums(B),1) %*% .ginv(A)%*%t(B)) dhat <- sqrt(1/c(d1,d2)) A <- A * (dhat[1:n] %*% t(dhat[1:n])) B <- B * (dhat[(n+1):m] %*% t(dhat[1:n])) Asi <- .sqrtm(.ginv(A)) Q <- A + Asi %*% crossprod(B) %*% Asi tmpres <- svd(Q) U <- tmpres$u L <- tmpres$d V <- rbind(A,B) %*% Asi %*% U %*% .ginv(sqrt(diag(L))) yi <- matrix(0,m,nc) ## for(i in 2:(nc +1)) ## yi[,i-1] <- V[,i]/V[,1] for(i in 1:nc) ## specc yi[,i] <- V[,i]/sqrt(sum(V[,i]^2)) res <- kmeans(yi[reind,], centers, iterations) } else{ ## Compute Affinity Matrix / in our case just the kernel matrix km <- kernelMatrix(kernel, x) if(is(kernel)[1] == "rbfkernel") diag(km) <- 0 d <- 1/sqrt(rowSums(km)) l <- d * km %*% diag(d) xi <- eigen(l)$vectors[,1:nc] sqxi <- rowSums(xi^2) if(any(sqxi==0)) stop("Zero eigenvector elements, try using a lower value for the length hyper-parameter") yi <- xi/sqrt(sqxi) res <- kmeans(yi, centers, iterations) } return(new("specc", .Data=res$cluster, size = res$size, kernelf= kernel)) }) setMethod("specc",signature(x="kernelMatrix"),function(x, centers, nystrom.red = FALSE, iterations = 200, ...) { m <- nrow(x) if (missing(centers)) stop("centers must be a number or a matrix") if (length(centers) == 1) { nc <- centers if (m < centers) stop("more cluster centers than data points.") } else nc <- dim(centers)[2] if(dim(x)[1]!=dim(x)[2]) { nystrom.red <- TRUE if(dim(x)[1] < dim(x)[2]) x <- t(x) m <- nrow(x) n <- ncol(x) } if (nystrom.red == TRUE){ A <- x[1:n,] B <- x[-(1:n),] d1 <- colSums(rbind(A,B)) d2 <- rowSums(B) + drop(matrix(colSums(B),1) %*% .ginv(A)%*%t(B)) dhat <- sqrt(1/c(d1,d2)) A <- A * (dhat[1:n] %*% t(dhat[1:n])) B <- B * (dhat[(n+1):m] %*% t(dhat[1:n])) Asi <- .sqrtm(.ginv(A)) Q <- A + Asi %*% crossprod(B) %*% Asi tmpres <- svd(Q) U <- tmpres$u L <- tmpres$d V <- rbind(A,B) %*% Asi %*% U %*% .ginv(sqrt(diag(L))) yi <- matrix(0,m,nc) ## for(i in 2:(nc +1)) ## yi[,i-1] <- V[,i]/V[,1] for(i in 1:nc) ## specc yi[,i] <- V[,i]/sqrt(sum(V[,i]^2)) res <- kmeans(yi, centers, iterations) } else{ d <- 1/sqrt(rowSums(x)) l <- d * x %*% diag(d) xi <- eigen(l)$vectors[,1:nc] yi <- xi/sqrt(rowSums(xi^2)) res <- kmeans(yi, centers, iterations) } ## cent <- matrix(unlist(lapply(1:nc,ll<- function(l){colMeans(x[which(res$cluster==l),])})),ncol=dim(x)[2], byrow=TRUE) ## withss <- unlist(lapply(1:nc,ll<- function(l){sum((x[which(res$cluster==l),] - cent[l,])^2)})) return(new("specc", .Data=res$cluster, size = res$size, centers = matrix(0), withinss = c(0), kernelf= "Kernel Matrix used as input.")) }) setMethod("show","specc", function(object){ cat("Spectral Clustering object of class \"specc\"","\n") cat("\n","Cluster memberships:","\n","\n") cat(object@.Data,"\n","\n") show(kernelf(object)) cat("\n") if(!any(is.na(centers(object)))){ cat(paste("Centers: ","\n")) show(centers(object)) cat("\n")} cat(paste("Cluster size: ","\n")) show(size(object)) cat("\n") if(!is.logical(withinss(object))){ cat(paste("Within-cluster sum of squares: ", "\n")) show(withinss(object)) cat("\n")} }) .ginv <- function (X, tol = sqrt(.Machine$double.eps)) { if (length(dim(X)) > 2 || !(is.numeric(X) || is.complex(X))) stop("'X' must be a numeric or complex matrix") if (!is.matrix(X)) X <- as.matrix(X) Xsvd <- svd(X) if (is.complex(X)) Xsvd$u <- Conj(Xsvd$u) Positive <- Xsvd$d > max(tol * Xsvd$d[1], 0) if (all(Positive)) Xsvd$v %*% (1/Xsvd$d * t(Xsvd$u)) else if (!any(Positive)) array(0, dim(X)[2:1]) else Xsvd$v[, Positive, drop = FALSE] %*% ((1/Xsvd$d[Positive]) * t(Xsvd$u[, Positive, drop = FALSE])) } .sqrtm <- function(x) { tmpres <- eigen(x) V <- t(tmpres$vectors) D <- tmpres$values if(is.complex(D)) D <- Re(D) D <- pmax(D,0) return(crossprod(V*sqrt(D),V)) } kernlab/R/kkmeans.R0000644000175100001440000004650414221632720013673 0ustar hornikusers## kernel kmeans function ## author: alexandros setGeneric("kkmeans",function(x, ...) standardGeneric("kkmeans")) setMethod("kkmeans", signature(x = "formula"), function(x, data = NULL, na.action = na.omit, ...) { mt <- terms(x, data = data) if(attr(mt, "response") > 0) stop("response not allowed in formula") attr(mt, "intercept") <- 0 cl <- match.call() mf <- match.call(expand.dots = FALSE) mf$formula <- mf$x mf$... <- NULL mf[[1L]] <- quote(stats::model.frame) mf <- eval(mf, parent.frame()) na.act <- attr(mf, "na.action") x <- model.matrix(mt, mf) res <- kkmeans(x, ...) cl[[1]] <- as.name("kkmeans") if(!is.null(na.act)) n.action(res) <- na.action return(res) }) setMethod("kkmeans",signature(x="matrix"),function(x, centers, kernel = "rbfdot", kpar = "automatic", alg ="kkmeans", p = 1, na.action = na.omit, ...) { x <- na.action(x) rown <- rownames(x) x <- as.matrix(x) m <- nrow(x) if (missing(centers)) stop("centers must be a number or a matrix") if (length(centers) == 1) { nc <- centers if (m < centers) stop("more cluster centers than data points.") } else nc <- dim(centers)[2] if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","stringdot")) if(kernel == "matrix") if(dim(x)[1]==dim(x)[2]) return(kkmeans(as.kernelMatrix(x), centers= centers)) else stop(" kernel matrix not square!") if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot"||kernel=="stringdot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "rbfkernel") || is(kernel, "laplacedot") || kernel == "laplacedot"|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=mean(sigest(x,scaled=FALSE)[c(1,3)])) cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if(length(centers) == 1){ suppressWarnings(vgr<- vgr2 <- split(sample(1:m,m),1:centers)) ncenters <- centers } else { ncenters <- ns <- dim(centers)[1] dota <- rowSums(x*x)/2 dotb <- rowSums(centers*centers)/2 ktmp <- x%*%t(centers) for(i in 1:ns) ktmp[,i]<- ktmp[,i] - dota - rep(dotb[i],m) prts <- max.col(ktmp) vgr <- vgr2 <- lapply(1:ns, function(x) which(x==prts)) } if(is.character(alg)) alg <- match.arg(alg,c("kkmeans","kerninghan", "normcut")) if(alg == "kkmeans") { p <- NULL D <- NULL D1 <- NULL w <- rep(1,m) } if(alg=="kerninghan") { p <- p D <- kernelMult(kernel,x, , rep(1,m)) w <- rep(1,m) D1 <- NULL } if(alg=="normcut") { p <- p D1 <- 1 w <- kernelMult(kernel,x, , rep(1,m)) } ## initialize lower bound and distance matrix dismat <- lower <- matrix(0,m,ncenters) ## calculate diagonal kdiag <- rep(1,m) for (i in 1:m) kdiag[i] <- drop(kernel(x[i,],x[i,])) ## initialize center-newcenter distance vector second sum vector secsum <- dc <- rep(1,ncenters) mindis <- rep(0,m) cind <- 1:ncenters for ( i in 1:ncenters) { ## compute second sum eq. 1 secsum[i] <- sum(affinMult(kernel, x[vgr[[i]],,drop=FALSE],,w[vgr[[i]]], p , D, D1) * w[vgr[[i]]])/sum(w[vgr[[i]]])^2 ## calculate initial distance matrix and lower bounds lower[,i] <- dismat[,i] <- - 2 * affinMult(kernel,x,x[vgr[[i]],,drop=FALSE], w[vgr[[i]]], p ,D, D1)/sum(w[vgr[[i]]]) + secsum[i] + kdiag } cluserm <- max.col(-dismat) for(i in 1:ncenters) vgr2[[i]] <- which(cluserm==i) while(1){ for (z in 1:ncenters) dc[z] <- -2*sum(affinMult(kernel, x[vgr2[[z]],,drop=FALSE], x[vgr[[z]],,drop=FALSE], w[vgr[[z]]], p, D, D1)*w[vgr2[[z]]])/(sum(w[vgr[[z]]])*sum(w[vgr2[[z]]])) + sum(affinMult(kernel, x[vgr[[z]],,drop=FALSE], ,w[vgr[[z]]], p, D, D1) * w[vgr[[z]]]) / sum(w[vgr[[z]]])^2 + sum(affinMult(kernel, x[vgr2[[z]],,drop=FALSE], ,w[vgr2[[z]]], p, D, D1) * w[vgr2[[z]]]) / sum(w[vgr2[[z]]])^2 ## assign new cluster indexes vgr <- vgr2 if(sum(abs(dc)) < 1e-15) break for (u in 1:ncenters){ ## compare already calulated distances of every poit to intra - center distance to determine if ## it is necesary to compute the distance at this point, we create an index of points to compute distance if(u > 1) compin <- apply(t(t(dismat[,1:(u-1)]) < dismat[,u] - dc[u]),1,sum)==0 else compin <- rep(TRUE,m) ## compute second sum eq. 1 secsum[u] <- sum(affinMult(kernel, x[vgr[[u]],,drop=FALSE], ,w[vgr[[u]]], p, D, D1) * w[vgr[[u]]])/sum(w[vgr[[u]]])^2 ## compute distance matrix and lower bounds lower[compin,u] <- dismat[compin,u] <- - 2 * affinMult(kernel,x[compin,],x[vgr[[u]],,drop=FALSE], w[vgr[[u]]], p , D, D1)/sum(w[vgr[[u]]]) + secsum[u] + kdiag[compin] } ## calculate new cluster indexes cluserm <- max.col(-dismat) for(i in 1:ncenters) vgr2[[i]] <- which(cluserm==i) } cluster <- max.col(-dismat) size <- unlist(lapply(1:ncenters, ll <- function(l){length(which(cluster==l))})) cent <- matrix(unlist(lapply(1:ncenters,ll<- function(l){colMeans(x[which(cluster==l),])})),ncol=dim(x)[2], byrow=TRUE) withss <- unlist(lapply(1:ncenters,ll<- function(l){sum((x[which(cluster==l),] - cent[l,])^2)})) names(cluster) <- rown return(new("specc", .Data=cluster, size = size, centers=cent, withinss=withss, kernelf= kernel)) }) ## kernel Matrix interface setMethod("kkmeans",signature(x="kernelMatrix"),function(x, centers, ...) { m <- nrow(x) if (missing(centers)) stop("centers must be a number or a matrix") if (length(centers) == 1) { nc <- centers if (m < centers) stop("more cluster centers than data points.") } else nc <- dim(centers)[2] if(length(centers) == 1){ suppressWarnings(vgr<- vgr2 <- split(sample(1:m,m),1:centers)) ncenters <- centers } else ncenters <- dim(centers)[1] ## initialize lower bound and distance matrix dismat <- lower <- matrix(0,m,ncenters) ## diagonal kdiag <- diag(x) ## weigths (should be adapted for future versions !!) w <- rep(1,m) ## initialize center-newcenter distance vector second sum vector secsum <- dc <- rep(1,ncenters) mindis <- rep(0,m) cind <- 1:ncenters for ( i in 1:ncenters) { ## compute second sum eq. 1 secsum[i] <- sum(drop(crossprod(x[vgr[[i]],vgr[[i]],drop=FALSE],w[vgr[[i]]])) * w[vgr[[i]]])/sum(w[vgr[[i]]])^2 ## calculate initial distance matrix and lower bounds lower[,i] <- dismat[,i] <- - 2 * x[,vgr[[i]],drop=FALSE]%*%w[vgr[[i]]]/sum(w[vgr[[i]]]) + secsum[i] + kdiag } cluserm <- max.col(-dismat) for(i in 1:ncenters) vgr2[[i]] <- which(cluserm==i) while(1){ for (z in 1:ncenters) dc[z] <- -2*sum((x[vgr2[[z]],vgr[[z]],drop=FALSE] %*% w[vgr[[z]]])*w[vgr2[[z]]])/(sum(w[vgr[[z]]])*sum(w[vgr2[[z]]])) + sum(drop(crossprod(x[vgr[[z]],vgr[[z]],drop=FALSE],w[vgr[[z]]])) * w[vgr[[z]]]) / sum(w[vgr[[z]]])^2 + sum(drop(crossprod(x[vgr2[[z]],vgr2[[z]],drop=FALSE],w[vgr2[[z]]])) * w[vgr2[[z]]]) / sum(w[vgr2[[z]]])^2 ## assign new cluster indexes vgr <- vgr2 if(sum(abs(dc))<1e-15) break for (u in 1:ncenters){ ## compare already calulated distances of every point to intra - center distance to determine if ## it is necesary to compute the distance at this point, we create an index of points to compute distance if(u > 1) compin <- apply(t(t(dismat[,1:(u-1)]) < dismat[,u] - dc[u]),1,sum)==0 else compin <- rep(TRUE,m) ## compute second sum eq. 1 secsum[u] <- sum(drop(crossprod(x[vgr[[u]],vgr[[u]],drop=FALSE],w[vgr[[u]]])) * w[vgr[[u]]])/sum(w[vgr[[u]]])^2 ## compute distance matrix and lower bounds lower[compin,u] <- dismat[compin,u] <- - 2 * (x[which(compin),vgr[[u]],drop=FALSE] %*% w[vgr[[u]]])/sum(w[vgr[[u]]]) + secsum[u] + kdiag[compin] } ## calculate new cluster indexes cluserm <- max.col(-dismat) for(i in 1:ncenters) vgr2[[i]] <- which(cluserm==i) } cluster <- max.col(-dismat) size <- unlist(lapply(1:ncenters, ll <- function(l){length(which(cluster==l))})) cent <- matrix(unlist(lapply(1:ncenters,ll<- function(l){colMeans(x[which(cluster==l),])})),ncol=dim(x)[2], byrow=TRUE) withss <- unlist(lapply(1:ncenters,ll<- function(l){sum((x[which(cluster==l),] - cent[l,])^2)})) return(new("specc", .Data=cluster, size = size, centers=cent, withinss=withss, kernelf= "Kernel matrix used")) }) ## List interface setMethod("kkmeans",signature(x="list"),function(x, centers, kernel = "stringdot", kpar = list(length=4, lambda=0.5), alg ="kkmeans", p = 1, na.action = na.omit, ...) { x <- na.action(x) m <- length(x) if (missing(centers)) stop("centers must be a number or a matrix") if (length(centers) == 1) { nc <- centers if (m < centers) stop("more cluster centers than data points.") } else nc <- dim(centers)[2] if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if(length(centers) == 1){ suppressWarnings(vgr<- vgr2 <- split(sample(1:m,m),1:centers)) ncenters <- centers } else ncenters <- dim(centers)[1] if(is.character(alg)) alg <- match.arg(alg,c("kkmeans","kerninghan", "normcut")) if(alg == "kkmeans") { p <- NULL D <- NULL D1 <- NULL w <- rep(1,m) } if(alg=="kerninghan") { p <- p D <- kernelMult(kernel,x, , rep(1,m)) w <- rep(1,m) D1 <- NULL } if(alg=="normcut") { p <- p D1 <- 1 w <- kernelMult(kernel,x, , rep(1,m)) } ## initialize lower bound and distance matrix dismat <- lower <- matrix(0,m,ncenters) ## calculate diagonal kdiag <- rep(1,m) for (i in 1:m) kdiag[i] <- drop(kernel(x[[i]],x[[i]])) ## initialize center-newcenter distance vector second sum vector secsum <- dc <- rep(1,ncenters) mindis <- rep(0,m) cind <- 1:ncenters for ( i in 1:ncenters) { ## compute second sum eq. 1 secsum[i] <- sum(affinMult(kernel, x[vgr[[i]]],,w[vgr[[i]]], p , D, D1) * w[vgr[[i]]])/sum(w[vgr[[i]]])^2 ## calculate initial distance matrix and lower bounds lower[,i] <- dismat[,i] <- - 2 * affinMult(kernel,x,x[vgr[[i]]], w[vgr[[i]]], p ,D, D1)/sum(w[vgr[[i]]]) + secsum[i] + kdiag } cluserm <- max.col(-dismat) for(i in 1:ncenters) vgr2[[i]] <- which(cluserm==i) while(1){ for (z in 1:ncenters) dc[z] <- -2*sum(affinMult(kernel, x[vgr2[[z]]], x[vgr[[z]]], w[vgr[[z]]], p, D, D1)*w[vgr2[[z]]])/(sum(w[vgr[[z]]])*sum(w[vgr2[[z]]])) + sum(affinMult(kernel, x[vgr[[z]]], ,w[vgr[[z]]], p, D, D1) * w[vgr[[z]]]) / sum(w[vgr[[z]]])^2 + sum(affinMult(kernel, x[vgr2[[z]]], ,w[vgr2[[z]]], p, D, D1) * w[vgr2[[z]]]) / sum(w[vgr2[[z]]])^2 ## assign new cluster indexes vgr <- vgr2 if(sum(abs(dc))<1e-15) break for (u in 1:ncenters){ ## compare already calulated distances of every poit to intra - center distance to determine if ## it is necesary to compute the distance at this point, we create an index of points to compute distance if(u > 1) compin <- apply(t(t(dismat[,1:(u-1)]) < dismat[,u] - dc[u]),1,sum)==0 else compin <- rep(TRUE,m) ## compute second sum eq. 1 secsum[u] <- sum(affinMult(kernel, x[vgr[[u]]], ,w[vgr[[u]]], p, D, D1) * w[vgr[[u]]])/sum(w[vgr[[u]]])^2 ## compute distance matrix and lower bounds lower[compin,u] <- dismat[compin,u] <- - 2 * affinMult(kernel,x[compin,],x[vgr[[u]]], w[vgr[[u]]], p , D, D1)/sum(w[vgr[[u]]]) + secsum[u] + kdiag[compin] } ## calculate new cluster indexes cluserm <- max.col(-dismat) for(i in 1:ncenters) vgr2[[i]] <- which(cluserm==i) } cluster <- max.col(-dismat) size <- unlist(lapply(1:ncenters, ll <- function(l){length(which(cluster==l))})) cent <- matrix(unlist(lapply(1:ncenters,ll<- function(l){colMeans(x[which(cluster==l),])})),ncol=dim(x)[2], byrow=TRUE) withss <- unlist(lapply(1:ncenters,ll<- function(l){sum((x[which(cluster==l),] - cent[l,])^2)})) return(new("specc", .Data=cluster, size = size, centers=cent, withinss=withss, kernelf= kernel)) }) setGeneric("affinMult",function(kernel, x, y = NULL, z, p, D, D1, blocksize = 256) standardGeneric("affinMult")) affinMult.rbfkernel <- function(kernel, x, y=NULL, z, p, D, D1,blocksize = 256) { if(is.null(p)&is.null(D)&is.null(D1)) res <- kernelMult(kernel,x,y,z) else{ if(!is.matrix(y)&&!is.null(y)) stop("y must be a matrix") if(!is.matrix(z)&&!is.vector(z)) stop("z must be a matrix or a vector") sigma <- kpar(kernel)$sigma n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 dota <- as.matrix(rowSums(x^2)) if (is.null(y) & is.null(D1)) { if(is.vector(z)) { if(!length(z) == n) stop("vector z length must be equal to x rows") z <- matrix(z,n,1) } if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { dotab <- rep(1,blocksize)%*%t(dota) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- exp(sigma*(2*x[lowerl:upperl,]%*%t(x) - dotab - dota[lowerl:upperl]%*%t(rep.int(1,n))))%*%z - z[lowerl:upperl,]*(1-p) lowerl <- upperl + 1 } } if(lowerl <= n) res[lowerl:n,] <- exp(sigma*(2*x[lowerl:n,]%*%t(x) - rep.int(1,n+1-lowerl)%*%t(dota) - dota[lowerl:n]%*%t(rep.int(1,n))))%*%z- z[lowerl:upperl,]*(1-p) } if(is.matrix(y) & is.null(D1)) { n2 <- dim(y)[1] if(is.vector(z)) { if(!length(z) == n2) stop("vector z length must be equal to y rows") z <- matrix(z,n2,1) } if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) dotb <- as.matrix(rowSums(y*y)) if(nblocks > 0) { dotbb <- rep(1,blocksize)%*%t(dotb) for(i in 1:nblocks) { upperl = upperl + blocksize if(upperl < n2) res[lowerl:upperl,] <- exp(sigma*(2*x[lowerl:upperl,]%*%t(y) - dotbb - dota[lowerl:upperl]%*%t(rep.int(1,n2))))%*%z-z[lowerl:upperl,]*(1-p) - z[lowerl:upperl,]*D[lowerl:upperl] if(upperl >n2 & lowerl n2 & n>=n2){ res[lowerl:n,] <- exp(sigma*(2*x[lowerl:n,]%*%t(y) - rep.int(1,n+1-lowerl)%*%t(dotb) -dota[lowerl:n]%*%t(rep.int(1,n2))))%*%z res[lowerl:n2,] <- res[lowerl:n2,] - z[lowerl:n2,]*(1-p) - z[lowerl:n2,]*D[lowerl:n2] } else res[lowerl:n,] <- exp(sigma*(2*x[lowerl:n,]%*%t(y) - rep.int(1,n+1-lowerl)%*%t(dotb) - dota[lowerl:n]%*%t(rep.int(1,n2))))%*%z } } if (is.null(y) & !is.null(D1)) { if(is.vector(z)) { if(!length(z) == n) stop("vector z length must be equal to x rows") z <- matrix(z,n,1) } if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { dotab <- rep(1,blocksize)%*%t(dota) for(i in 1:nblocks) { upperl = upperl + blocksize tmp <- exp(sigma*(2*x[lowerl:upperl,]%*%t(x) - dotab - dota[lowerl:upperl]%*%t(rep.int(1,n)))) D1 <- 1/colSums(tmp) res[lowerl:upperl,] <- D1*tmp%*%diag(D1)%*%z - z[lowerl:upperl,]*(1-D1) lowerl <- upperl + 1 } } if(lowerl <= n){ tmp <- exp(sigma*(2*x[lowerl:n,]%*%t(x) - rep.int(1,n+1-lowerl)%*%t(dota) - dota[lowerl:n]%*%t(rep.int(1,n)))) res[lowerl:n,] <- D1*tmp%*%diag(D1)%*%z- z[lowerl:upperl,]*(1-D1) } } if(is.matrix(y) &!is.null(D1)) { n2 <- dim(y)[1] if(is.vector(z)) { if(!length(z) == n2) stop("vector z length must be equal to y rows") z <- matrix(z,n2,1) } if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) dotb <- as.matrix(rowSums(y*y)) ones <- rep(1,blocksize) if(nblocks > 0) { dotbb <- rep(1,blocksize)%*%t(dotb) for(i in 1:nblocks) { upperl = upperl + blocksize if(upperl < n2) tmp <- exp(sigma*(2*x[lowerl:upperl,]%*%t(y) - dotbb - dota[lowerl:upperl]%*%t(rep.int(1,n2)))) D1 <- 1/colSums(tmp) res[lowerl:upperl,] <- D1*tmp%*%diag(D1)%*%z-z[lowerl:upperl,]*(1-D1) if(upperl >n2 & lowerl n2 & n>=n2){ tmp <- exp(sigma*(2*x[lowerl:n,]%*%t(y) -rep.int(1,n+1-lowerl)%*%t(dotb) -dota[lowerl:n]%*%t(rep.int(1,n2)))) D1 <- 1/colSums(tmp) res[lowerl:n,] <- D1*tmp%*%diag(D1)%*%z res[lowerl:n2,] <- res[lowerl:n2,] - z[lowerl:n2,]*(1-D1) } else{ tmp <- exp(sigma*(2*x[lowerl:n,]%*%t(y) -rep.int(1,n+1-lowerl)%*%t(dotb) -dota[lowerl:n]%*%t(rep.int(1,n2)))) D1 <- 1/colSums(tmp) res[lowerl:n,] <- D1*tmp%*%diag(D1)%*%z } } } } return(res) } setMethod("affinMult",signature(kernel="kernel", x="matrix"),affinMult.rbfkernel) kernlab/R/sigest.R0000644000175100001440000000465612676465031013555 0ustar hornikusers## sigma estimation for RBF kernels ## author: alexandros setGeneric("sigest", function(x, ...) standardGeneric("sigest")) setMethod("sigest",signature(x="formula"), function (x, data=NULL, frac = 0.5, na.action = na.omit, scaled = TRUE){ call <- match.call() m <- match.call(expand.dots = FALSE) if (is.matrix(eval(m$data, parent.frame()))) m$data <- as.data.frame(data) ## m$... <- NULL m$formula <- m$x m$x <- NULL m$scaled <- NULL m$frac <- NULL m[[1L]] <- quote(stats::model.frame) m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { remove <- unique(c(which(labels(Terms) %in% names(attr(x, "contrasts"))), which(!scaled) ) ) scaled <- !attr(x, "assign") %in% remove } ret <- sigest(x, scaled = scaled, frac = frac, na.action = na.action) return (ret) }) setMethod("sigest",signature(x="matrix"), function (x, frac = 0.5, scaled = TRUE, na.action = na.omit) { x <- na.action(x) if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { co <- !apply(x[,scaled, drop = FALSE], 2, var) if (any(co)) { scaled <- rep(FALSE, ncol(x)) warning(paste("Variable(s)", paste("`",colnames(x[,scaled, drop = FALSE])[co], "'", sep="", collapse=" and "), "constant. Cannot scale data.") ) } else { xtmp <- scale(x[,scaled]) x[,scaled] <- xtmp } } m <- dim(x)[1] n <- floor(frac*m) index <- sample(1:m, n, replace = TRUE) index2 <- sample(1:m, n, replace = TRUE) temp <- x[index,, drop=FALSE] - x[index2,,drop=FALSE] dist <- rowSums(temp^2) srange <- 1/quantile(dist[dist!=0],probs=c(0.9,0.5,0.1)) ## ds <- sort(dist[dist!=0]) ## sl <- ds[ceiling(0.2*length(ds))] ## su <- ds[ceiling(0.8*length(ds))] ## srange <- c(1/su,1/median(ds), 1/sl) ## names(srange) <- NULL return(srange) }) kernlab/R/kernelmatrix.R0000644000175100001440000000050311304023134014724 0ustar hornikusers setGeneric("as.kernelMatrix",function(x, center = FALSE) standardGeneric("as.kernelMatrix")) setMethod("as.kernelMatrix", signature(x = "matrix"), function(x, center = FALSE) { if(center){ m <- dim(x)[1] x <- t(t(x - colSums(x)/m) - rowSums(x)/m) + sum(x)/m^2 } return(new("kernelMatrix",.Data = x)) }) kernlab/R/kpca.R0000644000175100001440000001214412676464735013176 0ustar hornikusers## kpca function ## author : alexandros setGeneric("kpca",function(x, ...) standardGeneric("kpca")) setMethod("kpca", signature(x = "formula"), function(x, data = NULL, na.action = na.omit, ...) { mt <- terms(x, data = data) if(attr(mt, "response") > 0) stop("response not allowed in formula") attr(mt, "intercept") <- 0 cl <- match.call() mf <- match.call(expand.dots = FALSE) mf$formula <- mf$x mf$... <- NULL mf[[1L]] <- quote(stats::model.frame) mf <- eval(mf, parent.frame()) na.act <- attr(mf, "na.action") Terms <- attr(mf, "terms") x <- model.matrix(mt, mf) res <- kpca(x, ...) ## fix up call to refer to the generic, but leave arg name as `formula' cl[[1]] <- as.name("kpca") kcall(res) <- cl attr(Terms,"intercept") <- 0 terms(res) <- Terms if(!is.null(na.act)) n.action(res) <- na.act return(res) }) ## Matrix Interface setMethod("kpca",signature(x="matrix"), function(x, kernel = "rbfdot", kpar = list(sigma = 0.1), features = 0, th = 1e-4, na.action = na.omit, ...) { x <- na.action(x) x <- as.matrix(x) m <- nrow(x) ret <- new("kpca") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") km <- kernelMatrix(kernel,x) ## center kernel matrix kc <- t(t(km - colSums(km)/m) - rowSums(km)/m) + sum(km)/m^2 ## compute eigenvectors res <- eigen(kc/m,symmetric=TRUE) if(features == 0) features <- sum(res$values > th) else if(res$values[features] < th) warning(paste("eigenvalues of the kernel matrix are below threshold!")) pcv(ret) <- t(t(res$vectors[,1:features])/sqrt(res$values[1:features])) eig(ret) <- res$values[1:features] names(eig(ret)) <- paste("Comp.", 1:features, sep = "") rotated(ret) <- kc %*% pcv(ret) kcall(ret) <- match.call() kernelf(ret) <- kernel xmatrix(ret) <- x return(ret) }) ## List Interface setMethod("kpca",signature(x="list"), function(x, kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), features = 0, th = 1e-4, na.action = na.omit, ...) { x <- na.action(x) m <- length(x) ret <- new("kpca") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") km <- kernelMatrix(kernel,x) ## center kernel matrix kc <- t(t(km - colSums(km)/m) - rowSums(km)/m) + sum(km)/m^2 ## compute eigenvectors res <- eigen(kc/m,symmetric=TRUE) if(features == 0) features <- sum(res$values > th) else if(res$values[features] < th) warning(paste("eigenvalues of the kernel matrix are below threshold!")) pcv(ret) <- t(t(res$vectors[,1:features])/sqrt(res$values[1:features])) eig(ret) <- res$values[1:features] names(eig(ret)) <- paste("Comp.", 1:features, sep = "") rotated(ret) <- kc %*% pcv(ret) kcall(ret) <- match.call() kernelf(ret) <- kernel xmatrix(ret) <- x return(ret) }) ## Kernel Matrix Interface setMethod("kpca",signature(x= "kernelMatrix"), function(x, features = 0, th = 1e-4, ...) { ret <- new("kpca") m <- dim(x)[1] if(m!= dim(x)[2]) stop("Kernel matrix has to be symetric, and positive semidefinite") ## center kernel matrix kc <- t(t(x - colSums(x)/m) - rowSums(x)/m) + sum(x)/m^2 ## compute eigenvectors res <- eigen(kc/m,symmetric=TRUE) if(features == 0) features <- sum(res$values > th) else if(res$values[features] < th) warning(paste("eigenvalues of the kernel matrix are below threshold!")) pcv(ret) <- t(t(res$vectors[,1:features])/sqrt(res$values[1:features])) eig(ret) <- res$values[1:features] names(eig(ret)) <- paste("Comp.", 1:features, sep = "") rotated(ret) <- kc %*% pcv(ret) kcall(ret) <- match.call() xmatrix(ret) <- x kernelf(ret) <- " Kernel matrix used." return(ret) }) ## project a new matrix into the feature space setMethod("predict",signature(object="kpca"), function(object , x) { if (!is.null(terms(object))) { if(!is.matrix(x) || !is(x,"list")) x <- model.matrix(delete.response(terms(object)), as.data.frame(x), na.action = n.action(object)) } else x <- if (is.vector(x)) t(t(x)) else if (!is(x,"list")) x <- as.matrix(x) if (is.vector(x) || is.data.frame(x)) x <- as.matrix(x) if (!is.matrix(x) && !is(x,"list")) stop("x must be a matrix a vector, a data frame, or a list") if(is(x,"matrix")) { n <- nrow(x) m <- nrow(xmatrix(object))} else { n <- length(x) m <- length(xmatrix(object)) } if(is.character(kernelf(object))) { knc <- x ka <- xmatrix(object) } else { knc <- kernelMatrix(kernelf(object),x,xmatrix(object)) ka <- kernelMatrix(kernelf(object),xmatrix(object)) } ## center ret <- t(t(knc - rowSums(knc)/m) - rowSums(ka)/m) + sum(ka)/(m*n) return(ret %*% pcv(object)) }) kernlab/R/kha.R0000644000175100001440000001042612676464711013016 0ustar hornikusers #Kernel Hebbian Algorithm function setGeneric("kha",function(x, ...) standardGeneric("kha")) setMethod("kha", signature(x = "formula"), function(x, data = NULL, na.action = na.omit, ...) { mt <- terms(x, data = data) if(attr(mt, "response") > 0) stop("response not allowed in formula") attr(mt, "intercept") <- 0 cl <- match.call() mf <- match.call(expand.dots = FALSE) mf$formula <- mf$x mf$... <- NULL mf[[1L]] <- quote(stats::model.frame) mf <- eval(mf, parent.frame()) na.act <- attr(mf, "na.action") Terms <- attr(mf, "terms") x <- model.matrix(mt, mf) res <- kha(x, ...) ## fix up call to refer to the generic, but leave arg name as `formula' cl[[1]] <- as.name("kha") kcall(res) <- cl attr(Terms,"intercept") <- 0 terms(res) <- Terms if(!is.null(na.act)) n.action(res) <- na.act return(res) }) setMethod("kha",signature(x="matrix"), function(x, kernel = "rbfdot", kpar = list(sigma = 0.1), features = 5, eta = 0.005, th = 1e-4, maxiter = 10000, verbose = FALSE, na.action = na.omit, ...) { x <- na.action(x) x <- as.matrix(x) m <- nrow(x) ret <- new("kha") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") ## Initialize A dual variables A <- matrix(runif(features*m),m,features)*2 - 1 AOld <- A ## compute square norm of data a <- rowSums(x^2) ## initialize the empirical sum kernel map eskm <- rep(0,m) for (i in 1:m) eskm[i] <- sum(kernelFast(kernel,x,x[i,,drop=FALSE], a)) eks <- sum(eskm) counter <- 0 step <- th + 1 Aold <- A while(step > th && counter < maxiter) { y <- rep(0, features) ot <- rep(0,m) ## Hebbian Iteration for (i in 1:m) { ## compute y output etkm <- as.vector(kernelFast(kernel,x,x[i,,drop=FALSE], a)) sum1 <- as.vector(etkm %*% A) sum2 <- as.vector(eskm%*%A)/m asum <- colSums(A) sum3 <- as.vector(eskm[i]*asum)/m sum4 <- as.vector(eks * asum)/m^2 y <- sum1 - sum2 - sum3 + sum4 ## update A yy <- y%*%t(y) yy[upper.tri(yy)] <- 0 tA <- t(A) A <- t(tA - eta * yy%*%tA) A[i,] <- A[i,] + eta * y } if (counter %% 100 == 0 ) { step = mean(abs(Aold - A)) Aold <- A if(verbose) cat("Iteration :", counter, "Converged :", step,"\n") } counter <- counter + 1 } ## Normalize in Feature space cA <- t(A) - colSums(A) Fnorm <- rep(0,features) for (j in 1:m) Fnorm <- Fnorm + colSums(t(cA[,j] * cA) * as.vector(kernelFast(kernel,x,x[j,,drop=FALSE],a))) if(any(Fnorm==0)) { warning("Normalization vector contains zeros, replacing them with ones") Fnorm[which(Fnorm==0)] <- 1 } A <- t(t(A)/sqrt(Fnorm)) pcv(ret) <- A eig(ret) <- Fnorm names(eig(ret)) <- paste("Comp.", 1:features, sep = "") eskm(ret) <- eskm kcall(ret) <- match.call() kernelf(ret) <- kernel xmatrix(ret) <- x return(ret) }) ## Project a new matrix into the feature space setMethod("predict",signature(object="kha"), function(object , x) { if (!is.null(terms(object))) { if(!is.matrix(x)) x <- model.matrix(delete.response(terms(object)), as.data.frame(x), na.action = n.action(object)) } else x <- if (is.vector(x)) t(t(x)) else as.matrix(x) if (is.vector(x)||is.data.frame(x)) x<-as.matrix(x) if (!is.matrix(x)) stop("x must be a matrix a vector or a data frame") n <- nrow(x) m <- nrow(xmatrix(object)) A <- pcv(object) y <- matrix(0,n,dim(A)[2]) eks <- sum(eskm(object)) a <- rowSums(xmatrix(object)^2) ## Project data sum2 <- as.vector(eskm(object)%*%A)/m asum <- colSums(A) sum4 <- as.vector(eks * asum)/m^2 for (i in 1:n) { ## compute y output etkm <- as.vector(kernelFast(kernelf(object),xmatrix(object),x[i,,drop=FALSE], a)) sum1 <- as.vector(etkm %*% A) sum3 <- sum(etkm)*asum/m y[i,] <- sum1 - sum2 - sum3 + sum4 } return(y) }) kernlab/R/kernels.R0000644000175100001440000026556713271622147013727 0ustar hornikusers## kernel functions ## Functions for computing a kernel value, matrix, matrix-vector ## product and quadratic form ## ## author : alexandros karatzoglou ## Define the kernel objects, ## functions with an additional slot for the kernel parameter list. ## kernel functions take two vector arguments and return a scalar (dot product) rbfdot<- function(sigma=1) { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must a vector") if (is(x,"vector") && is.null(y)){ return(1) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") return(exp(sigma*(2*crossprod(x,y) - crossprod(x) - crossprod(y)))) # sigma/2 or sigma ?? } } return(new("rbfkernel",.Data=rval,kpar=list(sigma=sigma))) } setClass("rbfkernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) laplacedot<- function(sigma=1) { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must a vector") if (is(x,"vector") && is.null(y)){ return(1) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") return(exp(-sigma*sqrt(-(round(2*crossprod(x,y) - crossprod(x) - crossprod(y),9))))) } } return(new("laplacekernel",.Data=rval,kpar=list(sigma=sigma))) } setClass("laplacekernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) besseldot<- function(sigma = 1, order = 1, degree = 1) { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must a vector") if (is(x,"vector") && is.null(y)){ return(1) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") lim <- 1/(gamma(order+1)*2^(order)) bkt <- sigma*sqrt(-(2*crossprod(x,y) - crossprod(x) - crossprod(y))) if(bkt < 10e-5) res <- lim else res <- besselJ(bkt,order)*(bkt^(-order)) return((res/lim)^degree) } } return(new("besselkernel",.Data=rval,kpar=list(sigma=sigma ,order = order ,degree = degree))) } setClass("besselkernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) anovadot<- function(sigma = 1, degree = 1) { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must a vector") if (is(x,"vector") && is.null(y)){ return(1) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") res <- sum(exp(- sigma * (x - y)^2)) return((res)^degree) } } return(new("anovakernel",.Data=rval,kpar=list(sigma=sigma ,degree = degree))) } setClass("anovakernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) splinedot<- function() { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must a vector") if (is(x,"vector") && is.null(y)){ return(1) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") minv <- pmin(x,y) res <- 1 + x*y*(1+minv) - ((x+y)/2)*minv^2 + (minv^3)/3 fres <- prod(res) return(fres) } } return(new("splinekernel",.Data=rval,kpar=list())) } setClass("splinekernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) fourierdot <- function(sigma = 1) { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must a vector") if (is(x,"vector") && is.null(y)){ return(1) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") res <- (1 - sigma^2)/2*(1 - 2*sigma*cos(x - y) + sigma^2) fres <- prod(res) return(fres) } } return(new("fourierkernel",.Data=rval,kpar=list())) } setClass("fourierkernel",prototype=structure(.Data=function(){},kpar=list(sigma = 1)),contains=c("kernel")) tanhdot <- function(scale = 1, offset = 1) { rval<- function(x, y = NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") if (is(x,"vector") && is.null(y)){ tanh(scale*crossprod(x)+offset) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") tanh(scale*crossprod(x,y)+offset) } } return(new("tanhkernel",.Data=rval,kpar=list(scale=scale,offset=offset))) } setClass("tanhkernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) setClass("polykernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) polydot <- function(degree = 1, scale = 1, offset = 1) { rval<- function(x, y = NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") if (is(x,"vector") && is.null(y)){ (scale*crossprod(x)+offset)^degree } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") (scale*crossprod(x,y)+offset)^degree } } return(new("polykernel",.Data=rval,kpar=list(degree=degree,scale=scale,offset=offset))) } setClass("vanillakernel",prototype=structure(.Data=function(){},kpar=list()),contains=c("kernel")) vanilladot <- function( ) { rval<- function(x, y = NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") if (is(x,"vector") && is.null(y)){ crossprod(x) } if (is(x,"vector") && is(y,"vector")){ if (!length(x)==length(y)) stop("number of dimension must be the same on both data points") crossprod(x,y) } } return(new("vanillakernel",.Data=rval,kpar=list())) } setClass("stringkernel",prototype=structure(.Data=function(){},kpar=list(length = 4, lambda = 1.1, type = "spectrum", normalized = TRUE)),contains=c("kernel")) stringdot <- function(length = 4, lambda = 1.1, type = "spectrum", normalized = TRUE) { type <- match.arg(type,c("sequence","string","fullstring","exponential","constant","spectrum", "boundrange")) ## need to do this to set the length parameters if(type == "spectrum" | type == "boundrange") lambda <- length switch(type, "sequence" = { rval<- function(x, y = NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") if (is(x,"vector") && is.null(y) && normalized == FALSE) return(.Call(subsequencek, as.character(x), as.character(x), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(length), as.double(lambda))) if (is(x,"vector") && is(y,"vector") && normalized == FALSE) return(.Call(subsequencek, as.character(x), as.character(y), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(length), as.double(lambda))) if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(subsequencek, as.character(x), as.character(y), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(length), as.double(lambda)) / sqrt(.Call(subsequencek, as.character(x), as.character(x), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(length), as.double(lambda)) * .Call(subsequencek, as.character(y), as.character(y), as.integer(nchar(y)), as.integer(nchar(y)), as.integer(length), as.double(lambda)))) } }, "exponential" = { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") x <- paste(x,"\n",sep="") if(!is.null(y)) y <- paste(y,"\n",sep="") if (normalized == FALSE){ if(is.null(y)) y <- x return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(2), as.double(lambda))) } if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(2), as.double(lambda)) / sqrt(.Call(stringtv, as.character(x), as.character(x), as.integer(1), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(2), as.double(lambda)) * .Call(stringtv, as.character(y), as.character(y), as.integer(1), as.integer(nchar(y)), as.integer(nchar(y)), as.integer(2), as.double(lambda)))) } }, "constant" = { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") x <- paste(x,"\n",sep="") if(!is.null(y)) y <- paste(y,"\n",sep="") if (normalized == FALSE){ if(is.null(y)) y <- x return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(1), as.double(lambda))) } if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(1), as.double(lambda)) / sqrt(.Call(stringtv, as.character(x), as.character(x), as.integer(1), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(1), as.double(lambda)) * .Call(stringtv, as.character(y), as.character(y), as.integer(1), as.integer(nchar(y)), as.integer(nchar(y)), as.integer(1), as.double(lambda)))) } }, "spectrum" = { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") x <- paste(x,"\n",sep="") if(!is.null(y)) y <- paste(y,"\n",sep="") n <- nchar(x) m <- nchar(y) if(n < length | m < length){ warning("String length smaller than length parameter value") return(0)} if (normalized == FALSE){ if(is.null(y)) y <- x return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(n), as.integer(m), as.integer(3), as.double(length))) } if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(n), as.integer(m), as.integer(3), as.double(length)) / sqrt(.Call(stringtv, as.character(x), as.character(x), as.integer(1), as.integer(n), as.integer(n), as.integer(3), as.double(lambda)) * .Call(stringtv, as.character(y), as.character(y), as.integer(1), as.integer(m), as.integer(m), as.integer(3), as.double(length)))) } }, "boundrange" = { rval <- function(x,y=NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") x <- paste(x,"\n",sep="") if(!is.null(y)) y <- paste(y,"\n",sep="") if (normalized == FALSE){ if(is.null(y)) y <- x return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(4), as.double(lambda))) } if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(stringtv, as.character(x), as.character(y), as.integer(1), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(4), as.double(lambda)) / sqrt(.Call(stringtv, as.character(x), as.character(x), as.integer(1), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(4), as.double(lambda)) * .Call(stringtv, as.character(y), as.character(y), as.integer(1), as.integer(nchar(y)), as.integer(nchar(y)), as.integer(4), as.double(lambda)))) } }, "string" = { rval<- function(x, y = NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") if (is(x,"vector") && is.null(y) && normalized == FALSE) return(.Call(substringk, as.character(x), as.character(x), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(length), as.double(lambda))) if (is(x,"vector") && is(y,"vector") && normalized == FALSE) return(.Call(substringk, as.character(x), as.character(y), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(length), as.double(lambda))) if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(substringk, as.character(x), as.character(y), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(length), as.double(lambda)) / sqrt(.Call(substringk, as.character(x), as.character(x), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(length), as.double(lambda)) * .Call(substringk, as.character(y), as.character(y), as.integer(nchar(y)), as.integer(nchar(y)), as.integer(length), as.double(lambda)))) } }, "fullstring" = { rval<- function(x, y = NULL) { if(!is(x,"vector")) stop("x must be a vector") if(!is(y,"vector")&&!is.null(y)) stop("y must be a vector") if (is(x,"vector") && is.null(y) && normalized == FALSE) return(.Call(fullsubstringk, as.character(x), as.character(x), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(length), as.double(lambda))) if (is(x,"vector") && is(y,"vector") && normalized == FALSE) return(.Call(fullsubstringk, as.character(x), as.character(y), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(length), as.double(lambda))) if (is(x,"vector") && is.null(y) && normalized == TRUE) return(1) if (is(x,"vector") && is(y,"vector") && normalized == TRUE) return(.Call(fullsubstringk, as.character(x), as.character(y), as.integer(nchar(x)), as.integer(nchar(y)), as.integer(length), as.double(lambda)) / sqrt(.Call(fullsubstringk, as.character(x), as.character(x), as.integer(nchar(x)), as.integer(nchar(x)), as.integer(length), as.double(lambda)) * .Call(fullsubstringk, as.character(y), as.character(y), as.integer(nchar(y)), as.integer(nchar(y)), as.integer(length), as.double(lambda)))) } }) return(new("stringkernel",.Data=rval,kpar=list(length=length, lambda =lambda, type = type, normalized = normalized))) } ## show method for kernel functions setMethod("show",signature(object="kernel"), function(object) { switch(class(object), "rbfkernel" = cat(paste("Gaussian Radial Basis kernel function.", "\n","Hyperparameter :" ,"sigma = ", kpar(object)$sigma,"\n")), "laplacekernel" = cat(paste("Laplace kernel function.", "\n","Hyperparameter :" ,"sigma = ", kpar(object)$sigma,"\n")), "besselkernel" = cat(paste("Bessel kernel function.", "\n","Hyperparameter :" ,"sigma = ", kpar(object)$sigma,"order = ",kpar(object)$order, "degree = ", kpar(object)$degree,"\n")), "anovakernel" = cat(paste("Anova RBF kernel function.", "\n","Hyperparameter :" ,"sigma = ", kpar(object)$sigma, "degree = ", kpar(object)$degree,"\n")), "tanhkernel" = cat(paste("Hyperbolic Tangent kernel function.", "\n","Hyperparameters :","scale = ", kpar(object)$scale," offset = ", kpar(object)$offset,"\n")), "polykernel" = cat(paste("Polynomial kernel function.", "\n","Hyperparameters :","degree = ",kpar(object)$degree," scale = ", kpar(object)$scale," offset = ", kpar(object)$offset,"\n")), "vanillakernel" = cat(paste("Linear (vanilla) kernel function.", "\n")), "splinekernel" = cat(paste("Spline kernel function.", "\n")), "stringkernel" = { if(kpar(object)$type =="spectrum" | kpar(object)$type =="boundrange") cat(paste("String kernel function.", " Type = ", kpar(object)$type, "\n","Hyperparameters :","sub-sequence/string length = ",kpar(object)$length, "\n")) else if(kpar(object)$type =="exponential" | kpar(object)$type =="constant") cat(paste("String kernel function.", " Type = ", kpar(object)$type, "\n","Hyperparameters :"," lambda = ", kpar(object)$lambda, "\n")) else cat(paste("String kernel function.", " Type = ", kpar(object)$type, "\n","Hyperparameters :","sub-sequence/string length = ",kpar(object)$length," lambda = ", kpar(object)$lambda, "\n")) if(kpar(object)$normalized == TRUE) cat(" Normalized","\n") if(kpar(object)$normalized == FALSE) cat(" Not Normalized","\n")} ) }) ## create accesor function as in "S4 Classses in 15 pages more or less", well.. if (!isGeneric("kpar")){ if (is.function(kpar)) fun <- kpar else fun <- function(object) standardGeneric("kpar") setGeneric("kpar",fun) } setMethod("kpar","kernel", function(object) object@kpar) ## Functions that return usefull kernel calculations (kernel matrix etc.) ## kernelMatrix function takes two or three arguments kernelMatrix <- function(kernel, x, y=NULL) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(x,"matrix")) stop("x must be a matrix") if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix") n <- nrow(x) res1 <- matrix(rep(0,n*n), ncol = n) if(is.null(y)){ for(i in 1:n) { for(j in i:n) { res1[i,j] <- kernel(x[i,],x[j,]) } } res1 <- res1 + t(res1) diag(res1) <- diag(res1)/2 } if (is(y,"matrix")){ m<-dim(y)[1] res1 <- matrix(0,dim(x)[1],dim(y)[1]) for(i in 1:n) { for(j in 1:m) { res1[i,j] <- kernel(x[i,],y[j,]) } } } return(as.kernelMatrix(res1)) } setGeneric("kernelMatrix",function(kernel, x, y = NULL) standardGeneric("kernelMatrix")) kernelMatrix.rbfkernel <- function(kernel, x, y = NULL) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma n <- dim(x)[1] dota <- rowSums(x*x)/2 if (is(x,"matrix") && is.null(y)){ res <- crossprod(t(x)) for (i in 1:n) res[i,]<- exp(2*sigma*(res[i,] - dota - rep(dota[i],n))) return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m) res[,i]<- exp(2*sigma*(res[,i] - dota - rep(dotb[i],n))) return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="rbfkernel"),kernelMatrix.rbfkernel) kernelMatrix.laplacekernel <- function(kernel, x, y = NULL) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma n <- dim(x)[1] dota <- rowSums(x*x)/2 if (is(x,"matrix") && is.null(y)){ res <- crossprod(t(x)) for (i in 1:n) res[i,]<- exp(-sigma*sqrt(round(-2*(res[i,] - dota - rep(dota[i],n)),9))) return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m) res[,i]<- exp(-sigma*sqrt(round(-2*(res[,i] - dota - rep(dotb[i],n)),9))) return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="laplacekernel"),kernelMatrix.laplacekernel) kernelMatrix.besselkernel <- function(kernel, x, y = NULL) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma nu = kpar(kernel)$order ni = kpar(kernel)$degree n <- dim(x)[1] lim <- 1/(gamma(nu+1)*2^(nu)) dota <- rowSums(x*x)/2 if (is(x,"matrix") && is.null(y)){ res <- crossprod(t(x)) for (i in 1:n){ xx <- sigma*sqrt(round(-2*(res[i,] - dota - rep(dota[i],n)),9)) res[i,] <- besselJ(xx,nu)*(xx^(-nu)) res[i,which(xx<10e-5)] <- lim } return(as.kernelMatrix((res/lim)^ni)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m){ xx <- sigma*sqrt(round(-2*(res[,i] - dota - rep(dotb[i],n)),9)) res[,i] <- besselJ(xx,nu)*(xx^(-nu)) res[which(xx<10e-5),i] <- lim } return(as.kernelMatrix((res/lim)^ni)) } } setMethod("kernelMatrix",signature(kernel="besselkernel"),kernelMatrix.besselkernel) kernelMatrix.anovakernel <- function(kernel, x, y = NULL) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma degree = kpar(kernel)$degree n <- dim(x)[1] if (is(x,"matrix") && is.null(y)){ a <- matrix(0, dim(x)[2], n) res <- matrix(0, n ,n) for (i in 1:n) { a[rep(TRUE,dim(x)[2]), rep(TRUE,n)] <- x[i,] res[i,]<- colSums(exp( - sigma*(a - t(x))^2))^degree } return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] b <- matrix(0, dim(x)[2],m) res <- matrix(0, dim(x)[1],m) for( i in 1:n) { b[rep(TRUE,dim(x)[2]), rep(TRUE,m)] <- x[i,] res[i,]<- colSums(exp( - sigma*(b - t(y))^2))^degree } return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="anovakernel"),kernelMatrix.anovakernel) kernelMatrix.polykernel <- function(kernel, x, y = NULL) { if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix") scale = kpar(kernel)$scale offset = kpar(kernel)$offset degree = kpar(kernel)$degree if (is(x,"matrix") && is.null(y)) { res <- (scale*crossprod(t(x))+offset)^degree return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") res <- (scale*crossprod(t(x),t(y)) + offset)^degree return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="polykernel"),kernelMatrix.polykernel) kernelMatrix.vanilla <- function(kernel, x, y = NULL) { if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix") if (is(x,"matrix") && is.null(y)){ res <- crossprod(t(x)) return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") res <- crossprod(t(x),t(y)) return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="vanillakernel"),kernelMatrix.vanilla) kernelMatrix.tanhkernel <- function(kernel, x, y = NULL) { if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix") if (is(x,"matrix") && is.null(y)){ scale = kpar(kernel)$scale offset = kpar(kernel)$offset res <- tanh(scale*crossprod(t(x)) + offset) return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") res <- tanh(scale*crossprod(t(x),t(y)) + offset) return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="tanhkernel"),kernelMatrix.tanhkernel) kernelMatrix.splinekernel <- function(kernel, x, y = NULL) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma degree = kpar(kernel)$degree n <- dim(x)[1] if (is(x,"matrix") && is.null(y)){ a <- matrix(0, dim(x)[2], n) res <- matrix(0, n ,n) x <- t(x) for (i in 1:n) { dr <- x + x[,i] dp <- x * x[,i] dm <- pmin(x,x[,i]) res[i,] <- apply((1 + dp + dp*dm - (dr/2)*dm^2 + (dm^3)/3),2, prod) } return(as.kernelMatrix(res)) } if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] b <- matrix(0, dim(x)[2],m) res <- matrix(0, dim(x)[1],m) x <- t(x) y <- t(y) for( i in 1:n) { dr <- y + x[,i] dp <- y * x[,i] dm <- pmin(y,x[,i]) res[i,] <- apply((1 + dp + dp*dm - (dr/2)*dm^2 + (dm^3)/3),2, prod) } return(as.kernelMatrix(res)) } } setMethod("kernelMatrix",signature(kernel="splinekernel"),kernelMatrix.splinekernel) kernelMatrix.stringkernel <- function(kernel, x, y=NULL) { n <- length(x) res1 <- matrix(rep(0,n*n), ncol = n) normalized = kpar(kernel)$normalized if(is(x,"list")) x <- sapply(x,paste,collapse="") if(is(y,"list")) y <- sapply(y,paste,collapse="") if (kpar(kernel)$type == "sequence" |kpar(kernel)$type == "string"|kpar(kernel)$type == "fullstring") { resdiag <- rep(0,n) if(normalized == TRUE) kernel <- stringdot(length = kpar(kernel)$length, type = kpar(kernel)$type, lambda = kpar(kernel)$lambda, normalized = FALSE) ## y is null if(is.null(y)){ if(normalized == TRUE){ ## calculate diagonal elements first, and use them to normalize for (i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:n) { for(j in (i:n)[-1]) { res1[i,j] <- kernel(x[[i]],x[[j]])/sqrt(resdiag[i]*resdiag[j]) } } res1 <- res1 + t(res1) diag(res1) <- rep(1,n) } else{ for (i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:n) { for(j in (i:n)[-1]) { res1[i,j] <- kernel(x[[i]],x[[j]]) } } res1 <- res1 + t(res1) diag(res1) <- resdiag } } if (!is.null(y)){ m <- length(y) res1 <- matrix(0,n,m) resdiag1 <- rep(0,m) if(normalized == TRUE){ for(i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:m) resdiag1[i] <- kernel(y[[i]],y[[i]]) for(i in 1:n) { for(j in 1:m) { res1[i,j] <- kernel(x[[i]],y[[j]])/sqrt(resdiag[i]*resdiag1[j]) } } } else{ for(i in 1:n) { for(j in 1:m) { res1[i,j] <- kernel(x[[i]],y[[j]]) } } } } return(as.kernelMatrix(res1)) } else { switch(kpar(kernel)$type, "exponential" = sktype <- 2, "constant" = sktype <- 1, "spectrum" = sktype <- 3, "boundrange" = sktype <- 4) if(sktype==3 &(any(nchar(x) < kpar(kernel)$length)|any(nchar(x) < kpar(kernel)$length))) stop("spectral kernel does not accept strings shorter than the length parameter") if(is(x,"list")) x <- unlist(x) if(is(y,"list")) y <- unlist(y) x <- paste(x,"\n",sep="") if(!is.null(y)) y <- paste(y,"\n",sep="") if(is.null(y)) ret <- matrix(0, length(x),length(x)) else ret <- matrix(0,length(x),length(y)) if(is.null(y)){ for(i in 1:length(x)) ret[i,i:length(x)] <- .Call(stringtv, as.character(x[i]), as.character(x[i:length(x)]), as.integer(length(x) - i + 1), as.integer(nchar(x[i])), as.integer(nchar(x[i:length(x)])), as.integer(sktype), as.double(kpar(kernel)$lambda)) ret <- ret + t(ret) diag(ret) <- diag(ret)/2 } else for(i in 1:length(x)) ret[i,] <- .Call(stringtv, as.character(x[i]), as.character(y), as.integer(length(y)), as.integer(nchar(x[i])), as.integer(nchar(y)), as.integer(sktype), as.double(kpar(kernel)$lambda)) if(normalized == TRUE){ if(is.null(y)) ret <- t((1/sqrt(diag(ret)))*t(ret*(1/sqrt(diag(ret))))) else{ norm1 <- rep(0,length(x)) norm2 <- rep(0,length(y)) for( i in 1:length(x)) norm1[i] <- .Call(stringtv, as.character(x[i]), as.character(x[i]), as.integer(1), as.integer(nchar(x[i])), as.integer(nchar(x[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) for( i in 1:length(y)) norm2[i] <- .Call(stringtv, as.character(y[i]), as.character(y[i]), as.integer(1), as.integer(nchar(y[i])), as.integer(nchar(y[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) ret <- t((1/sqrt(norm2))*t(ret*(1/sqrt(norm1)))) } } } return(as.kernelMatrix(ret)) } setMethod("kernelMatrix",signature(kernel="stringkernel"),kernelMatrix.stringkernel) ## kernelMult computes kernel matrix - vector product ## function computing * z ( %*% z) kernelMult <- function(kernel, x, y=NULL, z, blocksize = 128) { # if(is.function(kernel)) ker <- deparse(substitute(kernel)) # kernel <- do.call(kernel, kpar) if(!is(x,"matrix")) stop("x must be a matrix") if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must ba a matrix or a vector") n <- nrow(x) if(is.null(y)) { ## check if z,x match z <- as.matrix(z) if(is.null(y)&&!dim(z)[1]==n) stop("z columns/length do not match x columns") res1 <- matrix(rep(0,n*n), ncol = n) for(i in 1:n) { for(j in i:n) { res1[j,i] <- kernel(x[i,],x[j,]) } } res1 <- res1 + t(res1) diag(res1) <- diag(res1)/2 } if (is(y,"matrix")) { m <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1] == m) stop("z has wrong dimension") res1 <- matrix(rep.int(0,m*n),ncol=m) for(i in 1:n) { for(j in 1:m) { res1[i,j] <- kernel(x[i,],y[j,]) } } } return(res1%*%z) } setGeneric("kernelMult", function(kernel, x, y=NULL, z, blocksize = 256) standardGeneric("kernelMult")) kernelMult.character <- function(kernel, x, y=NULL, z, blocksize = 256) { return(x%*%z) } setMethod("kernelMult",signature(kernel="character", x="kernelMatrix"),kernelMult.character) kernelMult.rbfkernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or a vector") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) sigma <- kpar(kernel)$sigma n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 dota <- as.matrix(rowSums(x^2)) if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { dotab <- rep(1,blocksize)%*%t(dota) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- exp(sigma*(2*x[lowerl:upperl,]%*%t(x) - dotab - dota[lowerl:upperl]%*%t(rep.int(1,n))))%*%z lowerl <- upperl + 1 } } if(lowerl <= n) res[lowerl:n,] <- exp(sigma*(2*x[lowerl:n,]%*%t(x) - rep.int(1,n+1-lowerl)%*%t(dota) - dota[lowerl:n]%*%t(rep.int(1,n))))%*%z } if(is(y,"matrix")) { n2 <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) dotb <- as.matrix(rowSums(y*y)) if(nblocks > 0) { dotbb <- rep(1,blocksize)%*%t(dotb) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- exp(sigma*(2*x[lowerl:upperl,]%*%t(y) - dotbb - dota[lowerl:upperl]%*%t(rep.int(1,n2))))%*%z lowerl <- upperl + 1 } } if(lowerl <= n) res[lowerl:n,] <- exp(sigma*(2*x[lowerl:n,]%*%t(y) - rep.int(1,n+1-lowerl)%*%t(dotb) - dota[lowerl:n]%*%t(rep.int(1,n2))))%*%z } return(res) } setMethod("kernelMult",signature(kernel="rbfkernel"),kernelMult.rbfkernel) kernelMult.laplacekernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or a vector") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) sigma <- kpar(kernel)$sigma n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 dota <- as.matrix(rowSums(x^2)) if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { dotab <- rep(1,blocksize)%*%t(dota) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- exp(-sigma*sqrt(-round(2*x[lowerl:upperl,]%*%t(x) - dotab - dota[lowerl:upperl]%*%t(rep.int(1,n)),9)))%*%z lowerl <- upperl + 1 } } if(lowerl <= n) res[lowerl:n,] <- exp(-sigma*sqrt(-round(2*x[lowerl:n,]%*%t(x) - rep.int(1,n+1-lowerl)%*%t(dota) - dota[lowerl:n]%*%t(rep.int(1,n)),9)))%*%z } if(is(y,"matrix")) { n2 <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) dotb <- as.matrix(rowSums(y*y)) if(nblocks > 0) { dotbb <- rep(1,blocksize)%*%t(dotb) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- exp(-sigma*sqrt(-round(2*x[lowerl:upperl,]%*%t(y) - dotbb - dota[lowerl:upperl]%*%t(rep.int(1,n2)),9)))%*%z lowerl <- upperl + 1 } } if(lowerl <= n) res[lowerl:n,] <- exp(-sigma*sqrt(-round(2*x[lowerl:n,]%*%t(y) - rep.int(1,n+1-lowerl)%*%t(dotb) - dota[lowerl:n]%*%t(rep.int(1,n2)),9)))%*%z } return(res) } setMethod("kernelMult",signature(kernel="laplacekernel"),kernelMult.laplacekernel) kernelMult.besselkernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) sigma <- kpar(kernel)$sigma nu <- kpar(kernel)$order ni <- kpar(kernel)$degree n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 lim <- 1/(gamma(nu+1)*2^(nu)) dota <- as.matrix(rowSums(x^2)) if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { dotab <- rep(1,blocksize)%*%t(dota) for(i in 1:nblocks) { upperl = upperl + blocksize xx <- sigma*sqrt(-round(2*x[lowerl:upperl,]%*%t(x) - dotab - dota[lowerl:upperl]%*%t(rep.int(1,n)),9)) res1 <- besselJ(xx,nu)*(xx^(-nu)) res1[which(xx<10e-5)] <- lim res[lowerl:upperl,] <- ((res1/lim)^ni)%*%z lowerl <- upperl + 1 } } if(lowerl <= n) { xx <- sigma*sqrt(-round(2*x[lowerl:n,]%*%t(x) - rep.int(1,n+1-lowerl)%*%t(dota) - dota[lowerl:n]%*%t(rep.int(1,n)),9)) res1 <- besselJ(xx,nu)*(xx^(-nu)) res1[which(xx<10e-5)] <- lim res[lowerl:n,] <- ((res1/lim)^ni)%*%z } } if(is(y,"matrix")) { n2 <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) dotb <- as.matrix(rowSums(y*y)) if(nblocks > 0) { dotbb <- rep(1,blocksize)%*%t(dotb) for(i in 1:nblocks) { upperl = upperl + blocksize xx <- sigma*sqrt(-round(2*x[lowerl:upperl,]%*%t(y) - dotbb - dota[lowerl:upperl]%*%t(rep.int(1,n2)),9)) res1 <- besselJ(xx,nu)*(xx^(-nu)) res1[which(xx < 10e-5)] <- lim res[lowerl:upperl,] <- ((res1/lim)^ni)%*%z lowerl <- upperl + 1 } } if(lowerl <= n) { xx <- sigma*sqrt(-round(2*x[lowerl:n,]%*%t(y) - rep.int(1,n+1-lowerl)%*%t(dotb) - dota[lowerl:n]%*%t(rep.int(1,n2)),9)) res1 <- besselJ(xx,nu)*(xx^(-nu)) res1[which(xx < 10e-5)] <- lim res[lowerl:n,] <- ((res1/lim)^ni)%*%z } } return(res) } setMethod("kernelMult",signature(kernel="besselkernel"),kernelMult.besselkernel) kernelMult.anovakernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or a vector") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) sigma <- kpar(kernel)$sigma degree <- kpar(kernel)$degree n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { a <- matrix(0,m,blocksize) re <- matrix(0, n, blocksize) for(i in 1:nblocks) { upperl = upperl + blocksize for(j in 1:n) { a[rep(TRUE,m),rep(TRUE,blocksize)] <- x[j,] re[j,] <- colSums(exp( - sigma*(a - t(x[lowerl:upperl,]))^2))^degree } res[lowerl:upperl,] <- t(re)%*%z lowerl <- upperl + 1 } } if(lowerl <= n){ a <- matrix(0,m,n-lowerl+1) re <- matrix(0,n,n-lowerl+1) for(j in 1:n) { a[rep(TRUE,m),rep(TRUE,n-lowerl+1)] <- x[j,] re[j,] <- colSums(exp( - sigma*(a - t(x[lowerl:n,,drop=FALSE]))^2))^degree } res[lowerl:n,] <- t(re)%*%z } } if(is(y,"matrix")) { n2 <- dim(y)[1] nblocks <- floor(n2/blocksize) z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) { b <- matrix(0, m, blocksize) re <- matrix(0, n, blocksize) for(i in 1:nblocks) { upperl = upperl + blocksize for(j in 1:n) { b[rep(TRUE,dim(x)[2]), rep(TRUE,blocksize)] <- x[j,] re[j,]<- colSums(exp( - sigma*(b - t(y[lowerl:upperl,]))^2)^degree) } res[,1] <- res[,1] + re %*%z[lowerl:upperl,] lowerl <- upperl + 1 } } if(lowerl <= n) { b <- matrix(0, dim(x)[2], n2-lowerl+1) re <- matrix(0, n, n2-lowerl+1) for( i in 1:n) { b[rep(TRUE,dim(x)[2]),rep(TRUE,n2-lowerl+1)] <- x[i,] re[i,]<- colSums(exp( - sigma*(b - t(y[lowerl:n2,,drop=FALSE]))^2)^degree) } res[,1] <- res[,1] + re%*%z[lowerl:n2] } } return(res) } setMethod("kernelMult",signature(kernel="anovakernel"),kernelMult.anovakernel) kernelMult.splinekernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or a vector") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") n <- dim(x)[1] m <- dim(x)[2] if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) x <- t(x) if(nblocks > 0) { re <- matrix(0, dim(z)[1], blocksize) for(i in 1:nblocks) { upperl = upperl + blocksize for (j in lowerl:upperl) { dr <- x + x[ , j] dp <- x * x[ , j] dm <- pmin(x,x[,j]) re[,j-(i-1)*blocksize] <- apply((1 + dp + dp*dm - (dr/2)*dm^2 + (dm^3)/3),2, prod) } res[lowerl:upperl,] <- crossprod(re,z) lowerl <- upperl + 1 } } if(lowerl <= n){ a <- matrix(0,m,n-lowerl+1) re <- matrix(0,dim(z)[1],n-lowerl+1) for(j in lowerl:(n-lowerl+1)) { dr <- x + x[ , j] dp <- x * x[ , j] dm <- pmin(x,x[,j]) re[,j-nblocks*blocksize] <- apply((1 + dp + dp*dm - (dr/2)*dm^2 + (dm^3)/3),2, prod) } res[lowerl:n,] <- crossprod(re,z) } } if(is(y,"matrix")) { n2 <- dim(y)[1] nblocks <- floor(n2/blocksize) z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) x <- t(x) y <- t(y) if(nblocks > 0) { re <- matrix(0, dim(z)[1], blocksize) for(i in 1:nblocks) { upperl = upperl + blocksize for(j in lowerl:upperl) { dr <- y + x[ , j] dp <- y * x[ , j] dm <- pmin(y,x[,j]) re[,j-(i-1)*blocksize] <- apply((1 + dp + dp*dm - (dr/2)*dm^2 + (dm^3)/3),2, prod) } res[lowerl:upperl] <- crossprod(re, z) lowerl <- upperl + 1 } } if(lowerl <= n) { b <- matrix(0, dim(x)[2], n-lowerl+1) re <- matrix(0, dim(z)[1], n-lowerl+1) for(j in lowerl:(n-lowerl+1)) { dr <- y + x[, j] dp <- y * x[, j] dm <- pmin(y,x[,j]) re[,j-nblocks*blocksize] <- apply((1 + dp + dp*dm - (dr/2)*dm^2 + (dm^3)/3),2, prod) } res[lowerl:n] <- crossprod(re, z) } } return(res) } setMethod("kernelMult",signature(kernel="splinekernel"),kernelMult.splinekernel) kernelMult.polykernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) degree <- kpar(kernel)$degree scale <- kpar(kernel)$scale offset <- kpar(kernel)$offset n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- ((scale*x[lowerl:upperl,]%*%t(x) + offset)^degree) %*% z lowerl <- upperl + 1 } if(lowerl <= n) res[lowerl:n,] <- ((scale*x[lowerl:n,]%*%t(x) +offset)^degree)%*%z } if(is(y,"matrix")) { n2 <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- ((scale*x[lowerl:upperl,]%*%t(y) + offset)^degree)%*%z lowerl <- upperl + 1 } if(lowerl <= n) res[lowerl:n,] <- ((scale*x[lowerl:n,]%*%t(y) + offset)^degree)%*%z } return(res) } setMethod("kernelMult",signature(kernel="polykernel"),kernelMult.polykernel) kernelMult.tanhkernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or a vector") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) scale <- kpar(kernel)$scale offset <- kpar(kernel)$offset n <- dim(x)[1] m <- dim(x)[2] nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- tanh(scale*x[lowerl:upperl,]%*%t(x) + offset) %*% z lowerl <- upperl + 1 } if(lowerl <= n) res[lowerl:n,] <- tanh(scale*x[lowerl:n,]%*%t(x) +offset)%*%z } if(is(y,"matrix")) { n2 <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- matrix(rep(0,dim(z)[2]*n), ncol = dim(z)[2]) if(nblocks > 0) for(i in 1:nblocks) { upperl = upperl + blocksize res[lowerl:upperl,] <- tanh(scale*x[lowerl:upperl,]%*%t(y) + offset)%*%z lowerl <- upperl + 1 } if(lowerl <= n) res[lowerl:n,] <- tanh(scale*x[lowerl:n,]%*%t(y) + offset)%*%z } return(res) } setMethod("kernelMult",signature(kernel="tanhkernel"),kernelMult.tanhkernel) kernelMult.vanillakernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or vector") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") n <- dim(x)[1] m <- dim(x)[2] if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if (is.null(y)) { z <- as.matrix(z) if(!dim(z)[1]==n) stop("z rows must equal x rows") res <- t(crossprod(crossprod(x,z),t(x))) } if(is(y,"matrix")) { n2 <- dim(y)[1] z <- as.matrix(z) if(!dim(z)[1]==n2) stop("z length must equal y rows") res <- t(crossprod(crossprod(y,z),t(x))) } return(res) } setMethod("kernelMult",signature(kernel="vanillakernel"),kernelMult.vanillakernel) kernelMult.stringkernel <- function(kernel, x, y=NULL, z, blocksize = 256) { if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") normalized = kpar(kernel)$normalized n <- length(x) res1 <- matrix(rep(0,n*n), ncol = n) resdiag <- rep(0,n) if(is(x,"list")) x <- sapply(x,paste,collapse="") if(is(y,"list")) y <- sapply(y,paste,collapse="") if (kpar(kernel)$type == "sequence" |kpar(kernel)$type == "string"|kpar(kernel)$type == "fullstring") { if(normalized == TRUE) kernel <- stringdot(length = kpar(kernel)$length, type = kpar(kernel)$type, lambda = kpar(kernel)$lambda, normalized = FALSE) ## y is null if(is.null(y)){ if(normalized == TRUE){ z <- as.matrix(z) if(dim(z)[1]!= n) stop("z rows must be equal to x length") dz <- dim(z)[2] vres <- matrix(0,n,dz) ## calculate diagonal elements first, and use them to normalize for (i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:n) { for(j in (i:n)[-1]) { res1[i,j] <- kernel(x[[i]],x[[j]])/sqrt(resdiag[i]*resdiag[j]) } } res1 <- res1 + t(res1) diag(res1) <- rep(1,n) vres <- res1 %*% z } else{ z <- as.matrix(z) if(dim(z)[1]!= n) stop("z rows must be equal to x length") dz <- dim(z)[2] vres <- matrix(0,n,dz) ## calculate diagonal elements first, and use them to normalize for (i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:n) { for(j in (i:n)[-1]) { res1[i,j] <- kernel(x[[i]],x[[j]]) } } res1 <- res1 + t(res1) diag(res1) <- resdiag vres <- res1 %*% z } } if (!is.null(y)){ if(normalized == TRUE){ nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 m <- length(y) z <- as.matrix(z) if(dim(z)[1]!= m) stop("z rows must be equal to y length") resdiag1 <- rep(0,m) dz <- dim(z)[2] vres <- matrix(0,n,dz) for(i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:m) resdiag1[i] <- kernel(y[[i]],y[[i]]) if (nblocks > 0){ res1 <- matrix(0,blocksize,m) for(k in 1:nblocks){ upperl <- upperl + blocksize for(i in lowerl:(upperl)) { for(j in 1:m) { res1[i - (k-1)*blocksize,j] <- kernel(x[[i]],y[[j]])/sqrt(resdiag[i]*resdiag1[j]) } } vres[lowerl:upperl,] <- res1 %*% z lowerl <- upperl +1 } } if(lowerl <= n) { res1 <- matrix(0,n-lowerl+1,m) for(i in lowerl:n) { for(j in 1:m) { res1[i - nblocks*blocksize,j] <- kernel(x[[i]],y[[j]])/sqrt(resdiag[i]*resdiag1[j]) } } vres[lowerl:n,] <- res1 %*% z } } else { nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 m <- length(y) z <- as.matrix(z) if(dim(z)[1]!= m) stop("z rows must be equal to y length") dz <- dim(z)[2] vres <- matrix(0,n,dz) if (nblocks > 0){ res1 <- matrix(0,blocksize,m) for(k in 1:nblocks){ upperl <- upperl + blocksize for(i in lowerl:(upperl)) { for(j in 1:m) { res1[i - (k-1)*blocksize, j] <- kernel(x[[i]],y[[j]]) } } vres[lowerl:upperl,] <- res1 %*% z lowerl <- upperl +1 } } if(lowerl <= n) { res1 <- matrix(0,n-lowerl+1,m) for(i in lowerl:n) { for(j in 1:m) { res1[i - nblocks*blocksize,j] <- kernel(x[[i]],y[[j]]) } } vres[lowerl:n,] <- res1 %*% z } } } } else { switch(kpar(kernel)$type, "exponential" = sktype <- 2, "constant" = sktype <- 1, "spectrum" = sktype <- 3, "boundrange" = sktype <- 4) if(sktype==3 &(any(nchar(x) < kpar(kernel)$length)|any(nchar(x) < kpar(kernel)$length))) stop("spectral kernel does not accept strings shorter than the length parameter") x <- paste(x,"\n",sep="") if(!is.null(y)) y <- paste(y,"\n",sep="") ## y is null if(is.null(y)){ if(normalized == TRUE){ nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 z <- as.matrix(z) if(dim(z)[1]!= n) stop("z rows must be equal to y length") dz <- dim(z)[2] vres <- matrix(0,n,dz) for (i in 1:n) resdiag[i] <- .Call(stringtv, as.character(x[i]), as.character(x[i]), as.integer(1), as.integer(nchar(x[i])), as.integer(nchar(x[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) if (nblocks > 0){ res1 <- matrix(0,blocksize,n) for(k in 1:nblocks){ upperl <- upperl + blocksize for(i in lowerl:(upperl)) { res1[i - (k-1)*blocksize, ] <- .Call(stringtv, as.character(x[i]), as.character(x), as.integer(length(x)), as.integer(nchar(x[i])), as.integer(nchar(x)), as.integer(sktype), as.double(kpar(kernel)$lambda)) / sqrt(resdiag[i]*resdiag) } vres[lowerl:upperl,] <- res1 %*% z lowerl <- upperl +1 } } if(lowerl <= n) { res1 <- matrix(0,n-lowerl+1,n) for(i in lowerl:n) { res1[i - nblocks*blocksize,] <- .Call(stringtv, as.character(x[i]), as.character(x), as.integer(length(x)), as.integer(nchar(x[i])), as.integer(nchar(x)), as.integer(sktype), as.double(kpar(kernel)$lambda)) / sqrt(resdiag[i]*resdiag) } vres[lowerl:n,] <- res1 %*% z } } else { nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 z <- as.matrix(z) if(dim(z)[1]!= n) stop("z rows must be equal to y length") dz <- dim(z)[2] vres <- matrix(0,n,dz) if (nblocks > 0){ res1 <- matrix(0,blocksize,n) for(k in 1:nblocks){ upperl <- upperl + blocksize for(i in lowerl:(upperl)) { res1[i - (k-1)*blocksize, ] <- .Call(stringtv, as.character(x[i]), as.character(x), as.integer(length(x)), as.integer(nchar(x[i])), as.integer(nchar(x)), as.integer(sktype), as.double(kpar(kernel)$lambda)) } vres[lowerl:upperl,] <- res1 %*% z lowerl <- upperl +1 } } if(lowerl <= n) { res1 <- matrix(0,n-lowerl+1,n) for(i in lowerl:n) { res1[i - nblocks*blocksize,] <- .Call(stringtv, as.character(x[i]), as.character(x), as.integer(length(x)), as.integer(nchar(x[i])), as.integer(nchar(x)), as.integer(sktype), as.double(kpar(kernel)$lambda)) } vres[lowerl:n,] <- res1 %*% z } } } if (!is.null(y)){ if(normalized == TRUE){ nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 m <- length(y) z <- as.matrix(z) if(dim(z)[1]!= m) stop("z rows must be equal to y length") resdiag1 <- rep(0,m) dz <- dim(z)[2] vres <- matrix(0,n,dz) for(i in 1:n) resdiag[i] <- .Call(stringtv, as.character(x[i]), as.character(x[i]), as.integer(1), as.integer(nchar(x[i])), as.integer(nchar(x[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) for(i in 1:m) resdiag1[i] <- .Call(stringtv, as.character(y[i]), as.character(y[i]), as.integer(1), as.integer(nchar(y[i])), as.integer(nchar(y[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) if (nblocks > 0){ res1 <- matrix(0,blocksize,m) for(k in 1:nblocks){ upperl <- upperl + blocksize for(i in lowerl:(upperl)) { res1[i - (k-1)*blocksize, ] <- .Call(stringtv, as.character(x[i]), as.character(y), as.integer(length(y)), as.integer(nchar(x[i])), as.integer(nchar(y)), as.integer(sktype), as.double(kpar(kernel)$lambda)) / sqrt(resdiag[i]*resdiag1) } vres[lowerl:upperl,] <- res1 %*% z lowerl <- upperl +1 } } if(lowerl <= n) { res1 <- matrix(0,n-lowerl+1,m) for(i in lowerl:n) { res1[i - nblocks*blocksize,] <- .Call(stringtv, as.character(x[i]), as.character(y), as.integer(length(y)), as.integer(nchar(x[i])), as.integer(nchar(y)), as.integer(sktype), as.double(kpar(kernel)$lambda)) / sqrt(resdiag[i]*resdiag1) } vres[lowerl:n,] <- res1 %*% z } } else { nblocks <- floor(n/blocksize) lowerl <- 1 upperl <- 0 m <- length(y) z <- as.matrix(z) if(dim(z)[1]!= m) stop("z rows must be equal to y length") dz <- dim(z)[2] vres <- matrix(0,n,dz) if (nblocks > 0){ res1 <- matrix(0,blocksize,m) for(k in 1:nblocks){ upperl <- upperl + blocksize for(i in lowerl:(upperl)) { res1[i - (k-1)*blocksize, ] <- .Call(stringtv, as.character(x[i]), as.character(y), as.integer(length(y)), as.integer(nchar(x[i])), as.integer(nchar(y)), as.integer(sktype), as.double(kpar(kernel)$lambda)) } vres[lowerl:upperl,] <- res1 %*% z lowerl <- upperl +1 } } if(lowerl <= n) { res1 <- matrix(0,n-lowerl+1,m) for(i in lowerl:n) { res1[i - nblocks*blocksize,] <- .Call(stringtv, as.character(x[i]), as.character(y), as.integer(length(y)), as.integer(nchar(x[i])), as.integer(nchar(y)), as.integer(sktype), as.double(kpar(kernel)$lambda)) } vres[lowerl:n,] <- res1 %*% z } } } } return(vres) } setMethod("kernelMult",signature(kernel="stringkernel"),kernelMult.stringkernel) ## kernelPol return the quadratic form of a kernel matrix ## kernelPol returns the scalar product of x y componentwise with polarities ## of z and k kernelPol <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(x,"matrix")) stop("x must be a matrix") if(!is(y,"matrix")&&!is.null(y)) stop("y must be a matrix") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must ba a matrix or a vector") n <- nrow(x) z <- as.matrix(z) if(!dim(z)[1]==n) stop("z must have the length equal to x colums") res1 <- matrix(rep(0,n*n), ncol = n) if (is.null(y)) { for(i in 1:n) { for(j in i:n) { res1[i,j] <- kernel(x[i,],x[j,])*z[j]*z[i] } } res1 <- res1 + t(res1) diag(res1) <- diag(res1)/2 } if (is(x,"matrix") && is(y,"matrix")){ m <- dim(y)[1] if(is.null(k)) stop("k not specified!") k <- as.matrix(k) if(!dim(x)[2]==dim(y)[2]) stop("matrixes must have the same number of columns") if(!dim(z)[2]==dim(k)[2]) stop("z and k vectors must have the same number of columns") if(!dim(x)[1]==dim(z)[1]) stop("z and x must have the same number of rows") if(!dim(y)[1]==dim(k)[1]) stop("y and k must have the same number of rows") res1 <- matrix(0,dim(x)[1],dim(y)[1]) for(i in 1:n) { for(j in 1:m) { res1[i,j] <- kernel(x[i,],y[j,])*z[i]*k[j] } } } return(res1) } setGeneric("kernelPol", function(kernel, x, y=NULL, z, k = NULL) standardGeneric("kernelPol")) kernelPol.rbfkernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix a vector or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) sigma <- kpar(kernel)$sigma n <- dim(x)[1] dota <- rowSums(x*x)/2 z <- as.matrix(z) if(!dim(z)[1]==n) stop("z must have the length equal to x colums") if (is.null(y)) { if(is(z,"matrix")&&!dim(z)[1]==n) stop("z must have size equal to x colums") res <- crossprod(t(x)) for (i in 1:n) res[i,] <- z[i,]*(exp(2*sigma*(res[i,] - dota - rep(dota[i],n)))*z) return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.matrix(k) if(!dim(k)[1]==m) stop("k must have equal rows to y") if(!dim(x)[2]==dim(y)[2]) stop("matrixes must have the same number of columns") dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m)#2*sigma or sigma res[,i]<- k[i,]*(exp(2*sigma*(res[,i] - dota - rep(dotb[i],n)))*z) return(res) } } setMethod("kernelPol",signature(kernel="rbfkernel"),kernelPol.rbfkernel) kernelPol.laplacekernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix, vector or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") sigma <- kpar(kernel)$sigma if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) n <- dim(x)[1] dota <- rowSums(x*x)/2 z <- as.matrix(z) if(!dim(z)[1]==n) stop("z must have the length equal to x colums") if (is.null(y)) { if(is(z,"matrix")&&!dim(z)[1]==n) stop("z must have size equal to x colums") res <- crossprod(t(x)) for (i in 1:n) res[i,] <- z[i,]*(exp(-sigma*sqrt(-round(2*(res[i,] - dota - rep(dota[i],n)),9)))*z) return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.matrix(k) if(!dim(k)[1]==m) stop("k must have equal rows to y") if(!dim(x)[2]==dim(y)[2]) stop("matrixes must have the same number of columns") dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m)#2*sigma or sigma res[,i]<- k[i,]*(exp(-sigma*sqrt(-round(2*(res[,i] - dota - rep(dotb[i],n)),9)))*z) return(res) } } setMethod("kernelPol",signature(kernel="laplacekernel"),kernelPol.laplacekernel) kernelPol.besselkernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") sigma <- kpar(kernel)$sigma if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) nu <- kpar(kernel)$order ni <- kpar(kernel)$degree n <- dim(x)[1] lim <- 1/(gamma(nu + 1)*2^nu) dota <- rowSums(x*x)/2 z <- as.matrix(z) if(!dim(z)[1]==n) stop("z must have the length equal to x colums") if (is.null(y)) { if(is(z,"matrix")&&!dim(z)[1]==n) stop("z must have size equal to x colums") res <- crossprod(t(x)) for (i in 1:n) { xx <- sigma*sqrt(-round(2*(res[i,] - dota - rep(dota[i],n)),9)) res[i,] <- besselJ(xx,nu)*(xx^(-nu)) res[i,which(xx < 10e-5)] <- lim res[i,] <- z[i,]*(((res[i,]/lim)^ni)*z) } return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] if(!dim(k)[1]==m) stop("k must have equal rows to y") k <- as.matrix(k) if(!dim(x)[2]==dim(y)[2]) stop("matrixes must have the same number of columns") dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m){#2*sigma or sigma xx <- sigma*sqrt(-round(2*(res[,i] - dota - rep(dotb[i],n)),9)) res[,i] <- besselJ(xx,nu)*(xx^(-nu)) res[which(xx<10e-5),i] <- lim res[,i]<- k[i,]*(((res[,i]/lim)^ni)*z) } return(res) } } setMethod("kernelPol",signature(kernel="besselkernel"),kernelPol.besselkernel) kernelPol.anovakernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") sigma <- kpar(kernel)$sigma degree <- kpar(kernel)$degree if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) n <- dim(x)[1] z <- as.matrix(z) if(!dim(z)[1]==n) stop("z must have the length equal to x colums") if (is.null(y)) { if(is(z,"matrix")&&!dim(z)[1]==n) stop("z must have size equal to x colums") a <- matrix(0, dim(x)[2], n) res <- matrix(0,n,n) for (i in 1:n) { a[rep(TRUE,dim(x)[2]), rep(TRUE,n)] <- x[i,] res[i,]<- z[i,]*((colSums(exp( - sigma*(a - t(x))^2))^degree)*z) } return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.matrix(k) if(!dim(k)[1]==m) stop("k must have equal rows to y") if(!dim(x)[2]==dim(y)[2]) stop("matrixes must have the same number of columns") b <- matrix(0, dim(x)[2],m) res <- matrix(0, dim(x)[1],m) for( i in 1:n) { b[rep(TRUE,dim(x)[2]), rep(TRUE,m)] <- x[i,] res[i,] <- z[i,]*((colSums(exp( - sigma*(b - t(y))^2))^degree)*k) } return(res) } } setMethod("kernelPol",signature(kernel="anovakernel"),kernelPol.anovakernel) kernelPol.splinekernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) sigma <- kpar(kernel)$sigma degree <- kpar(kernel)$degree n <- dim(x)[1] z <- as.vector(z) if(!(length(z)==n)) stop("z must have the length equal to x colums") if (is.null(y)) { res <- kernelMatrix(kernel,x) return(unclass(z*t(res*z))) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.vector(k) if(!(length(k)==m)) stop("k must have length equal to rows of y") res <- kernelMatrix(kernel,x,y) return(unclass(k*t(res*z))) } } setMethod("kernelPol",signature(kernel="splinekernel"),kernelPol.splinekernel) kernelPol.polykernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) degree <- kpar(kernel)$degree scale <- kpar(kernel)$scale offset <- kpar(kernel)$offset n <- dim(x)[1] if(is(z,"matrix")) { z <- as.vector(z) } m <- length(z) if(!(m==n)) stop("z must have the length equal to x colums") if (is.null(y)) { res <- z*t(((scale*crossprod(t(x))+offset)^degree)*z) return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.vector(k) if(!(length(k)==m)) stop("k must have length equal to rows of y") if(!dim(x)[2]==dim(y)[2]) stop("matrixes must have the same number of columns") res<- k*t(((scale*x%*%t(y) + offset)^degree)*z) return(res) } } setMethod("kernelPol",signature(kernel="polykernel"),kernelPol.polykernel) kernelPol.tanhkernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix, vector or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) scale <- kpar(kernel)$scale offset <- kpar(kernel)$offset n <- dim(x)[1] if(is(z,"matrix")) { z <- as.vector(z) } m <- length(z) if(!(m==n)) stop("z must have the length equal to x colums") if (is.null(y)) { res <- z*t(tanh(scale*crossprod(t(x))+offset)*z) return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.vector(k) if(!(length(k)==m)) stop("k must have length equal rows to y") if(!dim(x)[2]==dim(y)[2]) stop("matrixes x, y must have the same number of columns") res<- k*t(tanh(scale*x%*%t(y) + offset)*z) return(res) } } setMethod("kernelPol",signature(kernel="tanhkernel"),kernelPol.tanhkernel) kernelPol.vanillakernel <- function(kernel, x, y=NULL, z, k=NULL) { if(!is(y,"matrix")&&!is.null(y)&&!is(y,"vector")) stop("y must be a matrix, vector or NULL") if(!is(z,"matrix")&&!is(z,"vector")) stop("z must be a matrix or a vector") if(!is(k,"matrix")&&!is(k,"vector")&&!is.null(k)) stop("k must be a matrix or a vector") n <- dim(x)[1] if(is(z,"matrix")) { z <- as.vector(z) } m <- length(z) if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!(m==n)) stop("z must have the length equal to x colums") if (is.null(y)) { res <- z*t(crossprod(t(x))*z) return(res) } if (is(y,"matrix")) { if(is.null(k)) stop("k not specified!") m <- dim(y)[1] k <- as.vector(k) if(!length(k)==m) stop("k must have length equal rows to y") if(!dim(x)[2]==dim(y)[2]) stop("matrixes x, y must have the same number of columns") for( i in 1:m) res<- k*t(x%*%t(y)*z) return(res) } } setMethod("kernelPol",signature(kernel="vanillakernel"),kernelPol.vanillakernel) kernelPol.stringkernel <- function(kernel, x, y=NULL ,z ,k=NULL) { n <- length(x) res1 <- matrix(rep(0,n*n), ncol = n) resdiag <- rep(0,n) if(is(x,"list")) x <- sapply(x,paste,collapse="") if(is(y,"list")) y <- sapply(y,paste,collapse="") normalized = kpar(kernel)$normalized if(normalized == TRUE) kernel <- stringdot(length = kpar(kernel)$length, type = kpar(kernel)$type, lambda = kpar(kernel)$lambda, normalized = FALSE) z <- as.matrix(z) ## y is null if (kpar(kernel)$type == "sequence" |kpar(kernel)$type == "string"|kpar(kernel)$type == "fullstring") { if(is.null(y)){ if(normalized == TRUE){ ## calculate diagonal elements first, and use them to normalize for (i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:n) { for(j in (i:n)[-1]) { res1[i,j] <- (z[i,]*kernel(x[[i]],x[[j]])*z[j,])/sqrt(resdiag[i]*resdiag[j]) } } res1 <- res1 + t(res1) diag(res1) <- z^2 } else { for (i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:n) { for(j in (i:n)[-1]) { res1[i,j] <- (z[i,]*kernel(x[[i]],x[[j]])*z[j,]) } } res1 <- res1 + t(res1) diag(res1) <- resdiag * z^2 } } if (!is.null(y)){ if(normalized == TRUE){ m <- length(y) res1 <- matrix(0,n,m) resdiag1 <- rep(0,m) k <- as.matrix(k) for(i in 1:n) resdiag[i] <- kernel(x[[i]],x[[i]]) for(i in 1:m) resdiag1[i] <- kernel(y[[i]],y[[i]]) for(i in 1:n) { for(j in 1:m) { res1[i,j] <- (z[i,]*kernel(x[[i]],y[[j]])*k[j,])/sqrt(resdiag[i]*resdiag1[j]) } } } } else{ m <- length(y) res1 <- matrix(0,n,m) k <- as.matrix(k) for(i in 1:n) { for(j in 1:m) { res1[i,j] <- (z[i,]*kernel(x[[i]],y[[j]])*k[j,]) } } } } else { switch(kpar(kernel)$type, "exponential" = sktype <- 2, "constant" = sktype <- 1, "spectrum" = sktype <- 3, "boundrange" = sktype <- 4) if(is(x,"list")) x <- unlist(x) if(is(y,"list")) y <- unlist(y) x <- paste(x,"\n",seq="") if(!is.null(y)) y <- paste(y,"\n",seq="") if(is.null(y)) ret <- matrix(0, length(x),length(x)) else ret <- matrix(0,length(x),length(y)) if(is.null(y)){ for( i in 1:length(x)) ret[i,] <- .Call(stringtv, as.character(x[i]), as.character(x), as.integer(length(x)), as.integer(nchar(x[i])), as.integer(nchar(x)), as.integer(sktype), as.double(kpar(kernel)$lambda)) res1 <- k*ret*k } else{ for( i in 1:length(x)) ret[i,] <- .Call(stringtv, as.character(x[i]), as.character(y), as.integer(length(x)), as.integer(nchar(x[i])), as.integer(nchar(y)), as.integer(sktype), as.double(kpar(kernel)$lambda)) res1 <- k*ret*z } if(normalized == TRUE){ if(is.null(y)){ ret <- t((1/sqrt(diag(ret)))*t(ret*(1/sqrt(diag(ret))))) res1 <- k*ret*k } else{ norm1 <- rep(0,length(x)) norm2 <- rep(0,length(y)) for( i in 1:length(x)) norm1[i] <- .Call(stringtv, as.character(x[i]), as.character(x[i]), as.integer(1), as.integer(nchar(x[i])), as.integer(nchar(x[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) for( i in 1:length(y)) norm2[i] <- .Call(stringtv, as.character(y[i]), as.character(y[i]), as.integer(1), as.integer(nchar(y[i])), as.integer(nchar(y[i])), as.integer(sktype), as.double(kpar(kernel)$lambda)) ret <- t((1/sqrt(norm2))*t(ret*(1/sqrt(norm1)))) res1 <- k*ret*z } } } return(res1) } setMethod("kernelPol",signature(kernel="stringkernel"),kernelPol.stringkernel) ## kernelFast returns the kernel matrix, its usefull in algorithms ## which require iterative kernel matrix computations kernelFast <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setGeneric("kernelFast",function(kernel, x, y, a) standardGeneric("kernelFast")) kernelFast.rbfkernel <- function(kernel, x, y, a) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma n <- dim(x)[1] dota <- a/2 if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m) res[,i]<- exp(2*sigma*(res[,i] - dota - rep(dotb[i],n))) return(res) } } setMethod("kernelFast",signature(kernel="rbfkernel"),kernelFast.rbfkernel) kernelFast.laplacekernel <- function(kernel, x, y, a) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma n <- dim(x)[1] dota <- a/2 if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m) res[,i]<- exp(-sigma*sqrt(round(-2*(res[,i] - dota - rep(dotb[i],n)),9))) return(res) } } setMethod("kernelFast",signature(kernel="laplacekernel"),kernelFast.laplacekernel) kernelFast.besselkernel <- function(kernel, x, y, a) { if(is(x,"vector")) x <- as.matrix(x) if(is(y,"vector")) y <- as.matrix(y) if(!is(y,"matrix")) stop("y must be a matrix or a vector") sigma = kpar(kernel)$sigma nu = kpar(kernel)$order ni = kpar(kernel)$degree n <- dim(x)[1] lim <- 1/(gamma(nu+1)*2^(nu)) dota <- a/2 if (is(x,"matrix") && is(y,"matrix")){ if (!(dim(x)[2]==dim(y)[2])) stop("matrixes must have the same number of columns") m <- dim(y)[1] dotb <- rowSums(y*y)/2 res <- x%*%t(y) for( i in 1:m){ xx <- sigma*sqrt(round(-2*(res[,i] - dota - rep(dotb[i],n)),9)) res[,i] <- besselJ(xx,nu)*(xx^(-nu)) res[which(xx<10e-5),i] <- lim } return((res/lim)^ni) } } setMethod("kernelFast",signature(kernel="besselkernel"),kernelFast.besselkernel) kernelFast.anovakernel <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setMethod("kernelFast",signature(kernel="anovakernel"),kernelFast.anovakernel) kernelFast.polykernel <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setMethod("kernelFast",signature(kernel="polykernel"),kernelFast.polykernel) kernelFast.vanilla <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setMethod("kernelFast",signature(kernel="vanillakernel"),kernelFast.vanilla) kernelFast.tanhkernel <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setMethod("kernelFast",signature(kernel="tanhkernel"),kernelFast.tanhkernel) kernelFast.stringkernel <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setMethod("kernelFast",signature(kernel="stringkernel"),kernelFast.stringkernel) kernelFast.splinekernel <- function(kernel, x, y, a) { return(kernelMatrix(kernel,x,y)) } setMethod("kernelFast",signature(kernel="splinekernel"),kernelFast.splinekernel) kernlab/R/onlearn.R0000644000175100001440000001667712560371302013710 0ustar hornikusers## kernel based on-line learning algorithms for classification, novelty detection and regression. ## ## created 15.09.04 alexandros ## updated setGeneric("onlearn",function(obj, x, y = NULL, nu = 0.2, lambda = 1e-4) standardGeneric("onlearn")) setMethod("onlearn", signature(obj = "onlearn"), function(obj , x, y = NULL, nu = 0.2, lambda = 1e-4) { if(onstart(obj) == 1 && onstop(obj) < buffer(obj)) buffernotfull <- TRUE else buffernotfull <- FALSE if(is.vector(x)) x <- matrix(x,,length(x)) d <- dim(x)[2] for (i in 1:dim(x)[1]) { xt <- x[i,,drop=FALSE] yt <- y[i] if(type(obj)=="novelty") { phi <- fit(obj) if(phi < 0) { alpha(obj) <- (1-lambda) * alpha(obj) if(buffernotfull) onstop(obj) <- onstop(obj) + 1 else{ onstop(obj) <- onstop(obj)%%buffer(obj) + 1 onstart(obj) <- onstart(obj)%%buffer(obj) +1 } alpha(obj)[onstop(obj)] <- lambda xmatrix(obj)[onstop(obj),] <- xt rho(obj) <- rho(obj) + lambda*(nu-1) } else rho(obj) <- rho(obj) + lambda*nu rho(obj) <- max(rho(obj), 0) if(onstart(obj) == 1 && onstop(obj) < buffer(obj)) fit(obj) <- drop(kernelMult(kernelf(obj), xt, matrix(xmatrix(obj)[1:onstop(obj),],ncol=d), matrix(alpha(obj)[1:onstop(obj)],ncol=1)) - rho(obj)) else fit(obj) <- drop(kernelMult(kernelf(obj), xt, xmatrix(obj), matrix(alpha(obj),ncol=1)) - rho(obj)) } if(type(obj)=="classification") { if(is.null(pattern(obj)) && is.factor(y)) pattern(obj) <- yt if(!is.null(pattern(obj))) if(pattern(obj) == yt) yt <- 1 else yt <- -1 phi <- fit(obj) alpha(obj) <- (1-lambda) * alpha(obj) if(yt*phi < rho(obj)) { if(buffernotfull) onstop(obj) <- onstop(obj) + 1 else{ onstop(obj) <- onstop(obj)%%buffer(obj) + 1 onstart(obj) <- onstart(obj)%%buffer(obj) +1 } alpha(obj)[onstop(obj)] <- lambda*yt b(obj) <- b(obj) + lambda*yt xmatrix(obj)[onstop(obj),] <- xt rho(obj) <- rho(obj) + lambda*(nu-1) ## (1-nu) ?? } else rho(obj) <- rho(obj) + lambda*nu rho(obj) <- max(rho(obj), 0) if(onstart(obj) == 1 && onstop(obj) < buffer(obj)) fit(obj) <- drop(kernelMult(kernelf(obj), xt, xmatrix(obj)[1:onstop(obj),,drop=FALSE], matrix(alpha(obj)[1:onstop(obj)],ncol=1)) + b(obj)) else fit(obj) <-drop(kernelMult(kernelf(obj), xt, xmatrix(obj), matrix(alpha(obj),ncol=1)) + b(obj)) } if(type(obj)=="regression") { alpha(obj) <- (1-lambda) * alpha(obj) phi <- fit(obj) if(abs(-phi) < rho(obj)) { if(buffernotfull) onstop(obj) <- onstop(obj) + 1 else{ onstop(obj) <- onstop(obj)%%buffer(obj) + 1 onstart(obj) <- onstart(obj)%% buffer(obj) +1 } alpha(obj)[onstop(obj)] <- sign(yt-phi)*lambda xmatrix(obj)[onstop(obj),] <- xt rho(obj) <- rho(obj) + lambda*(1-nu) ## (1-nu) ?? } else{ rho(obj) <- rho(obj) - lambda*nu alpha(obj)[onstop(obj)] <- sign(yt-phi)/rho(obj) } if(onstart(obj) == 1 && onstop(obj) < buffer(obj)) fit(obj) <- drop(kernelMult(kernelf(obj), xt, matrix(xmatrix(obj)[1:onstop(obj),],ncol=d), matrix(alpha(obj)[1:onstop(obj)],ncol=1)) + b(obj)) else fit(obj) <- drop(kernelMult(kernelf(obj), xt, xmatrix(obj), matrix(alpha(obj),ncol=1)) + b(obj)) } } return(obj) }) setGeneric("inlearn",function(d, kernel = "rbfdot", kpar = list(sigma=0.1), type = "novelty", buffersize = 1000) standardGeneric("inlearn")) setMethod("inlearn", signature(d = "numeric"), function(d ,kernel = "rbfdot", kpar = list(sigma=0.1), type = "novelty", buffersize = 1000) { obj <- new("onlearn") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") type(obj) <- match.arg(type,c("novelty","classification","regression")) xmatrix(obj) <- matrix(0,buffersize,d) kernelf(obj) <- kernel onstart(obj) <- 1 onstop(obj) <- 1 fit(obj) <- 0 b(obj) <- 0 alpha(obj) <- rep(0, buffersize) rho(obj) <- 0 buffer(obj) <- buffersize return(obj) }) setMethod("show","onlearn", function(object){ cat("On-line learning object of class \"onlearn\"","\n") cat("\n") cat(paste("Learning problem :", type(object), "\n")) cat cat(paste("Data dimensions :", dim(xmatrix(object))[2], "\n")) cat(paste("Buffersize :", buffer(object), "\n")) cat("\n") show(kernelf(object)) }) setMethod("predict",signature(object="onlearn"), function(object, x) { if(is.vector(x)) x<- matrix(x,1) d <- dim(xmatrix(object))[2] if(type(object)=="novelty") { if(onstart(object) == 1 && onstop(object) < buffer(object)) res <- drop(kernelMult(kernelf(object), x, matrix(xmatrix(object)[1:onstop(object),],ncol= d), matrix(alpha(object)[1:onstop(object)],ncol=1)) - rho(object)) else res <- drop(kernelMult(kernelf(object), x, matrix(xmatrix(object),ncol=d), matrix(alpha(object),ncol=1)) - rho(object)) } if(type(object)=="classification") { if(onstart(object) == 1 && onstop(object) < buffer(object)) res <- drop(kernelMult(kernelf(object), x, matrix(xmatrix(object)[1:onstop(object),],ncol=d), matrix(alpha(object)[1:onstop(object)],ncol=1)) + b(object)) else res <- drop(kernelMult(kernelf(object), x, matrix(xmatrix(object),ncol=d), matrix(alpha(object),ncol=1)) + b(object)) } if(type(object)=="regression") { if(onstart(object) == 1 && onstop(object) < buffer(object)) res <- drop(kernelMult(kernelf(object), x, matrix(xmatrix(object)[1:onstop(object),],ncol=d), matrix(alpha(object)[1:onstop(object)],ncol=1)) + b(object)) else res <- drop(kernelMult(kernelf(object), x, matrix(xmatrix(object),ncol=d), matrix(alpha(object),ncol=1)) + b(object)) } return(res) }) kernlab/R/kmmd.R0000644000175100001440000002030214221632765013167 0ustar hornikusers## calculates the kernel maximum mean discrepancy for samples from two distributions ## author: alexandros karatzoglou setGeneric("kmmd",function(x,...) standardGeneric("kmmd")) setMethod("kmmd", signature(x = "matrix"), function(x, y, kernel="rbfdot",kpar="automatic", alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 150, frac = 1, ...) { x <- as.matrix(x) y <- as.matrix(y) res <- new("kmmd") if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","matrix")) if(kernel == "matrix") if(dim(x)[1]==dim(x)[2]) return(kmmd(x= as.kernelMatrix(x), y = y, Kxy = as.kernelMatrix(x)%*%y, alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 100, frac = 1, ...)) else stop(" kernel matrix not square!") if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "laplacedot")|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=sigest(rbind(x,y),scaled=FALSE)[2]) cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") m <- dim(x)[1] n <- dim(y)[1] N <- max(m,n) M <- min(m,n) Kxx <- kernelMatrix(kernel,x) Kyy <- kernelMatrix(kernel,y) Kxy <- kernelMatrix(kernel,x,y) resmmd <- .submmd(Kxx, Kyy, Kxy, alpha) H0(res) <- (resmmd$mmd1 > resmmd$D1) Radbound(res) <- resmmd$D1 Asymbound(res) <- 0 mmdstats(res)[1] <- resmmd$mmd1 mmdstats(res)[2] <- resmmd$mmd3 if(asymptotic){ boundA <- .submmd3bound(Kxx, Kyy, Kxy, alpha, frac, ntimes, replace) AsympH0(res) <- (resmmd$mmd3 > boundA) Asymbound(res) <- boundA } kernelf(res) <- kernel return(res) }) setMethod("kmmd",signature(x="list"), function(x, y, kernel="stringdot",kpar=list(type="spectrum",length=4), alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 150, frac = 1, ...) { if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") Kxx <- kernelMatrix(kernel,x) Kyy <- kernelMatrix(kernel,y) Kxy <- kernelMatrix(kernel,x,y) ret <- kmmd(x=Kxx,y = Kyy,Kxy=Kxy, alpha=alpha, asymptotic= asymptotic, replace = replace, ntimes = ntimes, frac= frac) kernelf(ret) <- kernel return(ret) }) setMethod("kmmd",signature(x="kernelMatrix"), function (x, y, Kxy, alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 100, frac = 1, ...) { res <- new("kmmd") resmmd <- .submmd(x, y, Kxy, alpha) H0(res) <- (resmmd$mmd1 > resmmd$D1) Radbound(res) <- resmmd$D1 Asymbound(res) <- 0 mmdstats(res)[1] <- resmmd$mmd1 mmdstats(res)[2] <- resmmd$mmd3 if(asymptotic){ boundA <- .submmd3bound(x, y, Kxy, alpha, frac, ntimes, replace) AsympH0(res) <- (resmmd$mmd1 > boundA) Asymbound(res) <- boundA } kernelf(res) <- " Kernel matrix used as input." return(res) }) .submmd <- function(Kxx,Kyy, Kxy, alpha) { m <- dim(Kxx)[1] n <- dim(Kyy)[1] N <- max(m,n) M <- min(m,n) sumKxx <- sum(Kxx) if(m!=n) sumKxxM <- sum(Kxx[1:M,1:M]) else sumKxxM <- sumKxx dgxx <- diag(Kxx) sumKxxnd <- sumKxx - sum(dgxx) R <- max(dgxx) RM <- max(dgxx[1:M]) hu <- colSums(Kxx[1:M,1:M]) - dgxx[1:M] sumKyy <- sum(Kyy) if(m!=n) sumKyyM <- sum(Kyy[1:M,1:M]) else sumKyyM <- sumKyy dgyy <- diag(Kyy) sumKyynd <- sum(Kyy) - sum(dgyy) R <- max(R,dgyy) RM <- max(RM,dgyy[1:M]) # RM instead of R in original hu <- hu + colSums(Kyy[1:M,1:M]) - dgyy[1:M] sumKxy <- sum(Kxy) if (m!=n) sumKxyM <- sum(Kxy[1:M,1:M]) else sumKxyM <- sumKxy dg <- diag(Kxy) # up to M only hu <- hu - colSums(Kxy[1:M,1:M]) - colSums(t(Kxy[1:M,1:M])) + 2*dg # one sided sum mmd1 <- sqrt(max(0,sumKxx/(m*m) + sumKyy/(n*n) - 2/m/n* sumKxy)) mmd3 <- sum(hu)/M/(M-1) D1 <- 2*sqrt(RM/M)+sqrt(log(1/alpha)*4*RM/M) return(list(mmd1=mmd1,mmd3=mmd3,D1=D1)) } .submmd3bound <- function(Kxx,Kyy, Kxy, alpha, frac, ntimes, replace) { ## implements the bootstrapping approach to the MMD3 bound by shuffling ## the kernel matrix ## frac : fraction of data used for bootstrap ## ntimes : how many times MMD is to be evaluated m <- dim(Kxx)[1] n <- dim(Kyy)[1] M <- min(m,n) N <- max(m,n) poslabels <- 1:m neglabels <- (m+1):(m+n) ## bootstrap bootmmd3 <- rep(0,ntimes) for (i in 1:ntimes) { nsamples <- ceiling(frac*min(m,n)) xinds <- sample(1:m,nsamples,replace=replace) yinds <- sample(1:n,nsamples,replace=replace) newlab <- c(poslabels[xinds],neglabels[yinds]) samplenew <- sample(newlab, length(newlab), replace=FALSE) xinds <- samplenew[1:nsamples] yinds <- samplenew[(nsamples+1):length(samplenew)] newm <- length(xinds) newn <- length(yinds) newM <- min(newm,newn) ##get new kernel matrices (without concat to big matrix to save memory) xind1 <- xinds[xinds<=m] xind2 <- xinds[xinds>m]- m yind1 <- yinds[yinds<=m] yind2 <- yinds[yinds>m]-m ##Kxx (this should be implemented with kernelMult for memory efficiency) nKxx <- rbind(cbind(Kxx[xind1,xind1],Kxy[xind1,xind2]), cbind(t(Kxy[xind1,xind2]),Kyy[xind2,xind2])) dgxx <- diag(nKxx) hu <- colSums(nKxx[1:newM,1:newM]) - dgxx[1:newM] # one sided sum rm(nKxx) #Kyy nKyy <- rbind(cbind(Kxx[yind1,yind1],Kxy[yind1,yind2]), cbind(t(Kxy[yind1,yind2]), Kyy[yind2,yind2])) dgyy <- diag(nKyy) hu <- hu + colSums(nKyy[1:newM,1:newM]) - dgyy[1:newM] rm(nKyy) ## Kxy nKxy <- rbind(cbind(Kxx[yind1,xind1],Kxy[yind1,xind2]), cbind(t(Kxy[xind1,yind2]),Kyy[yind2,xind2])) dg <- diag(nKxy) hu <- hu - colSums(nKxy[1:newM,1:newM]) - colSums(t(nKxy[1:newM,1:newM])) + 2*dg rm(nKxy) ## now calculate mmd3 bootmmd3[i] <- sum(hu)/newM/(newM-1) } bootmmd3 <- sort(bootmmd3, decreasing=TRUE); aind <- floor(alpha*ntimes) ## better less than too much (-> floor); ## take threshold in between aind and the next smaller value: bound <- sum(bootmmd3[c(aind,aind+1)])/2; return(bound) } setMethod("show","kmmd", function(object){ cat("Kernel Maximum Mean Discrepancy object of class \"kmmd\"","\n","\n") show(kernelf(object)) if(is.logical(object@H0)){ cat("\n") cat("\n","H0 Hypothesis rejected : ", paste(H0(object))) cat("\n","Rademacher bound : ", paste(Radbound(object))) } cat("\n") if(Asymbound(object)!=0){ cat("\n","H0 Hypothesis rejected (based on Asymptotic bound): ", paste(AsympH0(object))) cat("\n","Asymptotic bound : ", paste(Asymbound(object))) } cat("\n","1st and 3rd order MMD Statistics : ", paste( mmdstats(object))) cat("\n") }) kernlab/R/lssvm.R0000644000175100001440000005505514221633137013412 0ustar hornikusers## reduced least squares support vector machines ## author : alexandros setGeneric("lssvm", function(x, ...) standardGeneric("lssvm")) setMethod("lssvm",signature(x="formula"), function (x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE){ cl <- match.call() m <- match.call(expand.dots = FALSE) if (is.matrix(eval(m$data, parent.frame()))) m$data <- as.data.frame(data) m$... <- NULL m$formula <- m$x m$x <- NULL m$scaled <- NULL m[[1L]] <- quote(stats::model.frame) m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 ## no intercept x <- model.matrix(Terms, m) y <- model.extract(m, "response") if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { remove <- unique(c(which(labels(Terms) %in% names(attr(x, "contrasts"))), which(!scaled) ) ) scaled <- !attr(x, "assign") %in% remove } ret <- lssvm(x, y, scaled = scaled, ...) kcall(ret) <- cl attr(Terms,"intercept") <- 0 ## no intercept terms(ret) <- Terms if (!is.null(attr(m, "na.action"))) n.action(ret) <- attr(m, "na.action") return (ret) }) setMethod("lssvm",signature(x="vector"), function(x,...) { x <- t(t(x)) ret <- lssvm(x, ...) return(ret) }) setMethod("lssvm",signature(x="matrix"), function (x, y, scaled = TRUE, kernel = "rbfdot", kpar = "automatic", type = NULL, tau = 0.01, reduced = TRUE, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, ## prob.model = FALSE, cross = 0, fit = TRUE, ..., subset, na.action = na.omit) { ## subsetting and na-handling for matrices ret <- new("lssvm") if (!missing(subset)) x <- x[subset,] df <- unique(na.action(data.frame(y, x))) y <- df[,1] x <- as.matrix(df[,-1]) n.action(ret) <- na.action if(!is.null(type)) type(ret) <- match.arg(type,c("classification","regression")) if (is.null(type)) type(ret) <- if (is.factor(y)) "classification" else "regression" else type(ret) <- type ## scaling, subsetting, and NA handling x.scale <- y.scale <- NULL ## scaling if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { co <- !apply(x[,scaled, drop = FALSE], 2, var) if (any(co)) { scaled <- rep(FALSE, ncol(x)) warning(paste("Variable(s)", paste("`",colnames(x[,scaled, drop = FALSE])[co], "'", sep="", collapse=" and "), "constant. Cannot scale data.") ) } else { xtmp <- scale(x[,scaled]) x[,scaled] <- xtmp x.scale <- attributes(xtmp)[c("scaled:center","scaled:scale")] } } ncols <- ncol(x) m <- nrows <- nrow(x) if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","matrix")) if(kernel == "matrix") if(dim(x)[1]==dim(x)[2]) return(lssvm(as.kernelMatrix(x), y = y,type = NULL, tau = 0.01, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, cross = 0, fit = TRUE, ...)) else stop(" kernel matrix not square!") if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "rbfkernel") || is(kernel, "laplacedot") || kernel == "laplacedot"|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=mean(sigest(x,scaled=FALSE)[c(1,3)])) cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if(type(ret)=="classification") { if (!is.vector(y) && !is.factor (y)) stop("y must be a vector or a factor.") if(is(y,"vector")) { y <- as.matrix(y) if (nrows != nrow(y)) stop("x and y don't match.") } if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) if (nrows != length(y)) stop("x and y don't match.") } else if (is.numeric(y)) { y <- as.integer(y) lev(ret) <- unique (y) } else stop ("dependent variable has to be of factor or integer type for classification mode.") ## initialize nclass(ret) <- length (unique(y)) p <- 0 svindex <- NULL ## create multidimensional y matrix yind <- t(matrix(1:nclass(ret),nclass(ret),m)) ymat <- matrix(0, m, nclass(ret)) ymat[yind==y] <- 1 if(reduced == FALSE) { K <- kernelMatrix(kernel,x) KP <- K - (1/m)*colSums(K) beta <- solve((KP%*%K + m * tau * K), KP%*%ymat) b <- colMeans(ymat) - colMeans(K%*%beta) alphaindex(ret) <- 1:m } else { G <- csi(x, ymat, rank = rank ,kernel= kernel, delta = delta , tol = tol) rep <- sort(pivots(G),index.return=TRUE)$ix G <- G[rep,] GtP <- t(G) - matrix(rowSums(t(G))/dim(G)[1],dim(G)[2],dim(G)[1]) Gtalpha <- (GtP)%*%G diag(Gtalpha) <- diag(Gtalpha) + tau Gtalpha <- solve(Gtalpha) %*% GtP %*% ymat[rep,,drop=FALSE] beta <- solve(t(G[1:dim(G)[2],]), Gtalpha) b <- colMeans(ymat) - colMeans(G%*%Gtalpha) alphaindex(ret) <- rep[1:dim(G)[2]] } alpha(ret) <- beta ## nonzero alpha*y coef(ret) <- alpha(ret) ## store SV indexes from current problem for later use in predict ## save the indexes from all the SV in a vector (use unique?) svindex <- alphaindex(ret) ## store betas in a vector b(ret) <- b ##store C in return object param(ret)$tau <- tau ## calculate class prob. ## if (prob.model& reduced== TRUE) # warning("Class Probapilities not supported for reduced model.) ## if(prob.model & reduced == FALSE) ## { ## pos <- as.vector(ymat)==1 ## neg <- as.vector(ymat)==-1 ## ones <- rep(1,dim(x)[1]) ## onesneg <- ones[pos] <- 0 ## ones <- rep(1,dim(x)[1]) ## onespos <- ones[neg] <- 0 ##Kpos <- kernelMult(kernel,x,x[pos,],rep(1,sum(pos))) ##Kneg <- kernelMult(kernel,x,x[neg,],rep(1,sum(neg))) ## Kpos <- K[,pos]%*%rep(1,sum(pos)) ## Kneg <- K[,neg]%*%rep(1,sum(neg)) ## classmeans <- c(sum( Kpos * coef(ret)[pos] * as.vector(ymat)[pos]),sum( Kneg * coef(ret)[pos] * as.vector(ymat)[pos])) ## kneg <- K%*%onesneg ## kpos <- K%*%onespos ## M <- (diag(dim(x)[1])- (1/dim(x)[1])*rep(1,dim(x)[1])%*%t(rep(1,dim(x)[1]))) ## kcentered <- M%*%solve(diag(dim(x)[1]) - tau*M%*%K%*%M)%*%M ## prob.model(ret) <- list(Kpos=Kpos, Kneg=Kneg, kcentered=kcentered, classmeans=classmeans) ## } } if(type(ret)=="regression") { if (nrows != nrow(x)) stop("x and y don't match.") ## initialize p <- 0 svindex <- NULL ymat <- y G <- csi(x, ymat, rank = rank ,kernel= kernel, delta = delta , tol = tol) GtP <- t(G) - matrix(rowSums(t(G))/dim(G)[1],dim(G)[2],dim(G)[1]) Gtalpha <- (GtP)%*%G diag(Gtalpha) <- diag(Gtalpha) + tau Gtalpha <- solve(Gtalpha) %*% GtP %*% ymat beta <- solve(t(G[1:dim(G)[2],]), Gtalpha) b <- colMeans(ymat) - colMeans(G%*%Gtalpha) alpha(ret) <- beta ## nonzero alpha*y coef(ret) <- alpha(ret) ## store SV indexes from current problem for later use in predict alphaindex(ret) <- pivots(G)[1:dim(G)[2]] ## save the indexes from all the SV in a vector (use unique?) svindex <- alphaindex(ret) ## store betas in a vector b(ret) <- b ##store C in return object param(ret)$tau <- tau } kcall(ret) <- match.call() kernelf(ret) <- kernel ## param(ret) <- list(C=C, nu = nu, epsilon = epsilon) xmatrix(ret) <- x[alphaindex(ret),,drop = FALSE] ymatrix(ret) <- y nSV(ret) <- length(svindex) if(nSV(ret)==0) stop("No Support Vectors found. You may want to change your parameters") fitted(ret) <- if (fit) predict(ret, x) else NA scaling(ret) <- list(scaled = scaled, x.scale = x.scale) if (fit){ if(type(ret)=="classification") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="regression") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) cret <- lssvm(x[cind,],y[cind],type = type(ret),kernel=kernel,kpar = NULL,reduced = reduced, tau=tau, tol=tol, rank = floor(rank/cross), delta = floor(delta/cross), scaled=FALSE, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],]) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } cross(ret) <- cerror } return(ret) }) ## kernelMatrix interface setMethod("lssvm",signature(x="kernelMatrix"), function (x, y, type = NULL, tau = 0.01, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, cross = 0, fit = TRUE, ...) { ## subsetting and na-handling for matrices ret <- new("lssvm") if(!is.null(type)) type(ret) <- match.arg(type,c("classification","regression")) if (is.null(type)) type(ret) <- if (is.factor(y)) "classification" else "regression" else type(ret) <- type ncols <- ncol(x) m <- nrows <- nrow(x) if(type(ret)=="classification") { if (!is.vector(y) && !is.factor (y)) stop("y must be a vector or a factor.") if (is(y,"vector")) { y <- as.matrix(y) if (nrows != nrow(y)) stop("x and y don't match.")} if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) if (nrows != length(y)) stop("x and y don't match.") } else if (is.numeric(y)) { y <- as.integer(y) lev(ret) <- unique (y) } else stop ("dependent variable has to be of factor or integer type for classification mode.") ## initialize nclass(ret) <- length (unique(y)) p <- 0 svindex <- NULL ## create multidimensional y matrix yind <- t(matrix(1:nclass(ret),nclass(ret),m)) ymat <- matrix(0, m, nclass(ret)) ymat[yind==y] <- 1 KP <- x - (1/m)*colSums(x) beta <- solve((KP%*%x + m * tau * x), KP%*%ymat) b <- colMeans(ymat) - colMeans(x%*%beta) alphaindex(ret) <- 1:m alpha(ret) <- beta ## nonzero alpha*y coef(ret) <- alpha(ret) ## store SV indexes from current problem for later use in predict ## save the indexes from all the SV in a vector (use unique?) svindex <- alphaindex(ret) ## store betas in a vector b(ret) <- b ##store C in return object param(ret)$tau <- tau } if(type(ret)=="regression") { if (nrows != nrow(x)) stop("x and y don't match.") ## initialize p <- 0 svindex <- NULL ymat <- y G <- csi(x, ymat, rank = rank , delta = delta , tol = tol) GtP <- t(G) - matrix(rowSums(t(G))/dim(G)[1],dim(G)[2],dim(G)[1]) Gtalpha <- (GtP)%*%G diag(Gtalpha) <- diag(Gtalpha) + tau Gtalpha <- solve(Gtalpha) %*% GtP %*% ymat[pivots(G),,drop=FALSE] beta <- solve(t(G[1:dim(G)[2],]), Gtalpha) b <- colMeans(ymat) - colMeans(G%*%Gtalpha) alpha(ret) <- beta ## nonzero alpha*y coef(ret) <- alpha(ret) ## store SV indexes from current problem for later use in predict alphaindex(ret) <- pivots(G)[1:dim(G)[2]] ## save the indexes from all the SV in a vector (use unique?) svindex <- alphaindex(ret) ## store betas in a vector b(ret) <- b ##store C in return object param(ret)$tau <- tau } kcall(ret) <- match.call() ## param(ret) <- list(C=C, nu = nu, epsilon = epsilon) xmatrix(ret) <- x ymatrix(ret) <- y kernelf(ret) <- "Kernel matrix used for training." nSV(ret) <- length(svindex) if(nSV(ret)==0) stop("No Support Vectors found. You may want to change your parameters") fitted(ret) <- if (fit) predict(ret, x) else NA if (fit){ if(type(ret)=="classification") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="regression") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) cret <- lssvm(x[cind,cind],y[cind],type = type(ret), tau=tau, rank = floor(rank/cross), delta = floor(delta/cross), cross = 0, fit = FALSE) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind,drop = FALSE][,svindex,drop=FALSE])) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } cross(ret) <- cerror } return(ret) }) ## list interface setMethod("lssvm",signature(x="list"), function (x, y, scaled = TRUE, kernel = "stringdot", kpar = list(length=4, lambda = 0.5), type = NULL, tau = 0.01, reduced = TRUE, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, cross = 0, fit = TRUE, ..., subset) { ## subsetting and na-handling for matrices ret <- new("lssvm") if (!missing(subset)) x <- x[subset] if(!is.null(type)) type(ret) <- match.arg(type,c("classification","regression")) if (is.null(type)) type(ret) <- if (is.factor(y)) "classification" else "regression" else type(ret) <- type m <- nrows <- length(x) if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","stringdot")) if(is.character(kpar)) if(kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot" || kernel == "rbfdot" || kernel == "laplacedot" ) { stop("List interface supports only the stringdot kernel.") } } if(is(kernel,"kernel")) if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if(type(ret)=="classification") { if (!is.vector(y) && !is.factor (y)) stop("y must be a vector or a factor.") if (nrows != nrow(x)) stop("x and y don't match.") if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) } else if (is.numeric(y)) { y <- as.integer(y) lev(ret) <- unique (y) } else stop ("dependent variable has to be of factor or integer type for classification mode.") ## initialize nclass(ret) <- length (unique(y)) p <- 0 svindex <- NULL ## create multidimensional y matrix yind <- t(matrix(1:nclass(ret),nclass(ret),m)) ymat <- matrix(0, m, nclass(ret)) ymat[yind==y] <- 1 if(reduced == FALSE) { K <- kernelMatrix(kernel,x) KP <- K - (1/m)*colSums(K) beta <- solve((KP%*%K + m * tau * K), KP%*%ymat) b <- colMeans(ymat) - colMeans(K%*%beta) alphaindex(ret) <- 1:m } else { G <- csi(x, ymat, rank = rank ,kernel= kernel, delta = delta , tol = tol) GtP <- t(G) - matrix(rowSums(t(G))/dim(G)[1],dim(G)[2],dim(G)[1]) Gtalpha <- (GtP)%*%G diag(Gtalpha) <- diag(Gtalpha) + tau Gtalpha <- solve(Gtalpha) %*% GtP %*% ymat[pivots(G),,drop=FALSE] beta <- solve(t(G[1:dim(G)[2],]), Gtalpha) b <- colMeans(ymat) - colMeans(G%*%Gtalpha) alphaindex(ret) <- pivots(G)[1:dim(G)[2]] } alpha(ret) <- beta ## nonzero alpha*y coef(ret) <- alpha(ret) ## store SV indexes from current problem for later use in predict ## save the indexes from all the SV in a vector (use unique?) svindex <- alphaindex(ret) ## store betas in a vector b(ret) <- b ##store C in return object param(ret)$tau <- tau } if(type(ret)=="regression") { if (nrows != nrow(x)) stop("x and y don't match.") ## initialize p <- 0 svindex <- NULL ymat <- y G <- csi(x, ymat, rank = rank ,kernel= kernel, delta = delta , tol = tol) GtP <- t(G) - matrix(rowSums(t(G))/dim(G)[1],dim(G)[2],dim(G)[1]) Gtalpha <- (GtP)%*%G diag(Gtalpha) <- diag(Gtalpha) + tau Gtalpha <- solve(Gtalpha) %*% GtP %*% ymat[pivots(G),,drop=FALSE] beta <- solve(t(G[1:dim(G)[2],]), Gtalpha) b <- colMeans(ymat) - colMeans(G%*%Gtalpha) alpha(ret) <- beta ## nonzero alpha*y coef(ret) <- alpha(ret) ## store SV indexes from current problem for later use in predict alphaindex(ret) <- pivots(G)[1:dim(G)[2]] ## save the indexes from all the SV in a vector (use unique?) svindex <- alphaindex(ret) ## store betas in a vector b(ret) <- b ##store C in return object param(ret)$tau <- tau } kcall(ret) <- match.call() kernelf(ret) <- kernel ## param(ret) <- list(C=C, nu = nu, epsilon = epsilon) xmatrix(ret) <- x[alphaindex(ret)] ymatrix(ret) <- y SVindex(ret) <- svindex nSV(ret) <- length(svindex) if(nSV(ret)==0) stop("No Support Vectors found. You may want to change your parameters") fitted(ret) <- if (fit) predict(ret, x) else NA if (fit){ if(type(ret)=="classification") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="regression") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) cret <- lssvm(x[cind,],y[cind],type = type(ret),kernel=kernel,kpar = NULL,reduced = reduced, tau=tau, tol=tol, rank = floor(rank/cross), delta = floor(delta/cross), scaled=FALSE, cross = 0, fit = FALSE ) cres <- predict(cret, x[vgr[[i]],]) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } cross(ret) <- cerror } return(ret) }) #**************************************************************# setMethod("predict", signature(object = "lssvm"), function (object, newdata, type = "response", coupler = "minpair") { sc <- 0 type <- match.arg(type,c("response","probabilities","decision")) if (missing(newdata) && type!="response") return(fitted(object)) else if(missing(newdata)) { newdata <- xmatrix(object) sc <- 1 } ncols <- ncol(xmatrix(object)) nrows <- nrow(xmatrix(object)) oldco <- ncols if (!is.null(terms(object))) { if(!is.matrix(newdata)) newdata <- model.matrix(delete.response(terms(object)), as.data.frame(newdata), na.action = n.action(object)) } else newdata <- if (is.vector(newdata)) t(t(newdata)) else as.matrix(newdata) newcols <- 0 newnrows <- nrow(newdata) newncols <- ncol(newdata) newco <- newncols if (oldco != newco) stop ("test vector does not match model !") p<-0 if (!is.null(scaling(object)$x.scale) && sc != 1) newdata[,scaling(object)$scaled] <- scale(newdata[,scaling(object)$scaled, drop = FALSE], center = scaling(object)$x.scale$"scaled:center", scale = scaling(object)$x.scale$"scaled:scale" ) if(is(newdata,"kernelMatrix")) res <- newdata %*% coef(object) - b(object) else res <- t(t(kernelMult(kernelf(object), newdata,xmatrix(object), alpha(object))) + b(object)) if(type == "response" && type(object)=="classification"){ predres <- max.col(res) return(factor (lev(object)[predres], levels = lev(object))) } if (type == "decision" || type(object)=="regression") return(res) if (type =="probabilities" && type(object)=="classification") { res - prob.model(object)$classmeans return(res) } }) #****************************************************************************************# setMethod("show","lssvm", function(object){ cat("Least Squares Support Vector Machine object of class \"lssvm\"","\n") cat("\n") cat(paste("problem type :",type(object), "\n")) cat(paste(" parameter : tau =",param(object)$tau, "\n")) cat("\n") show(kernelf(object)) cat(paste("\nNumber of data points used for training :", nSV(object),"\n")) if(!is.null(fitted(object))) cat(paste("Training error :", round(error(object),6),"\n")) if(cross(object)!= -1) cat("Cross validation error :",round(cross(object),6),"\n") }) ##.partopro <- function(z,s,m){ ##return(2*pi*(1/sqrt((1/z)+s^2))*exp(-(m^2)/(2*((1/z)+s^2)))) ##} kernlab/R/inchol.R0000644000175100001440000001141311304023134013475 0ustar hornikusers setGeneric("inchol", function(x, kernel="rbfdot",kpar=list(sigma=0.1), tol= 0.001, maxiter = dim(x)[1], blocksize = 50, verbose = 0) standardGeneric("inchol")) setMethod("inchol",signature(x="matrix"), function(x, kernel="rbfdot",kpar=list(sigma=0.1), tol= 0.001, maxiter = dim(x)[1], blocksize = 50, verbose = 0) { ## ## Description: ## ## Find the incomplete Cholesky decomposition of the kernel matrix ## ## Parameters: ## ## data : ## kernel : kernlab object ## tol : algo stops when remaining pivots < tol ## max.iter : maximum number of colums in Tk ## ## Return: ## ## Tk : K \approx Tk * Tk' ## pivots : Indices on which we pivoted ## diag.residues : Residuals left on the diagonal ## maxresiduals : Residuals we picked for pivoting ## ## Authors : S.V.N. Vishwanathan / Alex Smola ## R Version : Alexandros Karatzoglou ## For aggressive memory allocation BLOCKSIZE <- blocksize if(!is.matrix(x)) stop("x must be a matrix") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") ## Begin initialization Tk <- T <- pivots <- maxresiduals <- padded.veck <- matrix(0,0,0) counter <- 0 ## End initialization m <- dim(x)[1] ## Compute the diagonal of kernel matrix diag.residues <- matrix(0, m, 1) for (i in 1:m) diag.residues[i] <- kernel(x[i,],x[i,]) ## Choose first pivot residue <- max(diag.residues) index <- which.max(diag.residues == residue) dota <- rowSums(x^2) while( residue > tol && counter < maxiter ) { ## Aggressively allocate memory if(counter %% BLOCKSIZE == 0) { Tktmp <- matrix(0, m, dim(Tk)[2] + BLOCKSIZE) Tktmp[1:m > 0, 1:(dim(Tk)[2] + BLOCKSIZE) <= dim(Tk)[2]] <- Tk Tk <- Tktmp Ttmp <- matrix(0, dim(T)[1]+BLOCKSIZE, BLOCKSIZE+counter) ind <- 1:(dim(T)[1]+BLOCKSIZE) <= dim(T)[1] ind2 <- 1:(BLOCKSIZE + counter) <= counter Ttmp[ind , ind2] <- T Ttmp[ind == FALSE, ind2 == FALSE] <- diag(1, BLOCKSIZE) T <- Ttmp padded.veck.tmp <- matrix(0,dim(padded.veck)[1]+BLOCKSIZE) padded.veck.tmp[1:(dim(padded.veck)[1]+BLOCKSIZE) <= dim(padded.veck)[1]] <- padded.veck padded.veck <- padded.veck.tmp pivots.tmp <- matrix(0, dim(pivots)[1]+BLOCKSIZE) pivots.tmp[1:(dim(pivots)[1] + BLOCKSIZE)<= dim(pivots)[1]] <- pivots pivots <- pivots.tmp maxresiduals.tmp <- matrix(0,dim(maxresiduals)[1]+BLOCKSIZE) maxresiduals.tmp[1:(dim(maxresiduals)[1]+BLOCKSIZE) <= dim(maxresiduals)[1]] <- maxresiduals maxresiduals <- maxresiduals.tmp if(counter == 0) t <- rep(0,BLOCKSIZE) else t <- rep(0,length(t)+BLOCKSIZE) } veck <- kernelFast(kernel, x, x[index, ,drop=FALSE],dota) if (counter == 0) { ## No need to compute t here tau <- sqrt(veck[index]) ## Update T T[1, 1] <- tau ## Compute the update for Tk update <- veck/tau } else { padded.veck[1:counter] <- veck[pivots[1:counter]] ## First compute t ## t <- t(crossprod(padded.veck,backsolve(T,diag(1,nrow=dim(T)[1])))) ## cat("T: ",dim(T), " p:",length(padded.veck),",\n") t[1:counter] <- backsolve(T, k=counter, padded.veck, transpose = TRUE) ## Now compute tau tau <- as.vector(sqrt(veck[index] - crossprod(t))) ## Update T T[1:counter, counter+1] <- t[1:counter] T[counter + 1, counter + 1] <- tau ## Compute the update for Tk update <- (1/tau) * (veck - Tk %*% t) } ## Update Tk Tk[,counter + 1] <- update ## Update diagonal residuals diag.residues <- diag.residues - update^2 ## Update pivots pivots[counter + 1] <- index ## Monitor residuals maxresiduals[counter + 1] <- residue ## Choose next candidate residue <- max( diag.residues ) index <- which.max(diag.residues) ## Update counter counter <- counter + 1 ## Report progress to the user if(counter%%blocksize == 0 && (verbose == TRUE)) cat("counter = ",counter," ", "residue = ", residue, "\n") } ## Throw away extra columns which we might have added Tk <- Tk[, 1:counter] pivots <- pivots[1:counter] maxresiduals <- maxresiduals[1:counter] return(new("inchol",.Data=Tk,pivots=pivots,diagresidues = diag.residues, maxresiduals = maxresiduals)) }) kernlab/R/kcca.R0000644000175100001440000000451012105726255013140 0ustar hornikusers## Simple kernel canonical corelation analysis ## author: alexandros karatzoglou setGeneric("kcca",function(x, y, kernel="rbfdot", kpar=list(sigma = 0.1), gamma=0.1, ncomps = 10, ...) standardGeneric("kcca")) setMethod("kcca", signature(x = "matrix"), function(x,y,kernel="rbfdot",kpar=list(sigma=0.1), gamma=0.1, ncomps =10, ...) { x <- as.matrix(x) y <- as.matrix(y) if(!(nrow(x)==nrow(y))) stop("Number of rows in x, y matrixes is not equal") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") Kx <- kernelMatrix(kernel,x) Ky <- kernelMatrix(kernel,y) n <- dim(Kx)[1] m <- 2 ## Generate LH VK <- matrix(0,n*2,n); VK[0:n,] <- Kx VK[(n+1):(2*n),] <- Ky LH <- tcrossprod(VK, VK) for (i in 1:m) LH[((i-1)*n+1):(i*n),((i-1)*n+1):(i*n)] <- 0 ## Generate RH RH <- matrix(0,n*m,n*m) RH[1:n,1:n] <- (Kx + diag(rep(gamma,n)))%*%Kx + diag(rep(1e-6,n)) RH[(n+1):(2*n),(n+1):(2*n)] <- (Ky + diag(rep(gamma,n)))%*%Ky + diag(rep(1e-6,n)) RH <- (RH+t(RH))/2 ei <- .gevd(LH,RH) ret <- new("kcca") kcor(ret) <- as.double(ei$gvalues[1:ncomps]) xcoef(ret) <- matrix(as.double(ei$gvectors[1:n,1:ncomps]),n) ycoef(ret) <- matrix(as.double(ei$gvectors[(n+1):(2*n),1:ncomps]),n) ## xvar(ret) <- rotated(xpca) %*% cca$xcoef ## yvar(ret) <- rotated(ypca) %*% cca$ycoef return(ret) }) ## gevd compute the generalized eigenvalue ## decomposition for (a,b) .gevd<-function(a,b=diag(nrow(a))) { bs<-.mfunc(b,function(x) .ginvx(sqrt(x))) ev<-eigen(bs%*%a%*%bs) return(list(gvalues=ev$values,gvectors=bs%*%ev$vectors)) } ## mfunc is a helper to compute matrix functions .mfunc<-function(a,fn=sqrt) { e<-eigen(a); y<-e$vectors; v<-e$values return(tcrossprod(y%*%diag(fn(v)),y)) } ## ginvx is a helper to compute reciprocals .ginvx<-function(x) {ifelse(x==0,0,1/x)} kernlab/R/ksvm.R0000644000175100001440000034643414221633101013221 0ustar hornikusers## Support Vector Machines ## author : alexandros karatzoglou ## updated : 08.02.06 setGeneric("ksvm", function(x, ...) standardGeneric("ksvm")) setMethod("ksvm",signature(x="formula"), function (x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE){ cl <- match.call() m <- match.call(expand.dots = FALSE) if (is.matrix(eval(m$data, parent.frame()))) m$data <- as.data.frame(data) m$... <- NULL m$formula <- m$x m$x <- NULL m$scaled <- NULL m[[1L]] <- quote(stats::model.frame) m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 ## no intercept x <- model.matrix(Terms, m) y <- model.extract(m, "response") if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { remove <- unique(c(which(labels(Terms) %in% names(attr(x, "contrasts"))), which(!scaled) ) ) scaled <- !attr(x, "assign") %in% remove } ret <- ksvm(x, y, scaled = scaled, ...) kcall(ret) <- cl attr(Terms,"intercept") <- 0 ## no intercept terms(ret) <- Terms if (!is.null(attr(m, "na.action"))) n.action(ret) <- attr(m, "na.action") return (ret) }) setMethod("ksvm",signature(x="vector"), function(x, ...) { x <- t(t(x)) ret <- ksvm(x, ...) return(ret) }) setMethod("ksvm",signature(x="matrix"), function (x, y = NULL, scaled = TRUE, type = NULL, kernel = "rbfdot", kpar = "automatic", C = 1, nu = 0.2, epsilon = 0.1, prob.model = FALSE, class.weights = NULL, cross = 0, fit = TRUE, cache = 40, tol = 0.001, shrinking = TRUE, ... ,subset ,na.action = na.omit) { ## Comment out sparse code, future impl. will be based on "Matrix" ## sparse <- inherits(x, "matrix.csr") ## if (sparse) { ## if (!require(SparseM)) ## stop("Need SparseM package for handling of sparse structures!") ## } sparse <- FALSE if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","matrix")) if(kernel == "matrix") if(dim(x)[1]==dim(x)[2]) return(ksvm(as.kernelMatrix(x), y = y, type = type, C = C, nu = nu, epsilon = epsilon, prob.model = prob.model, class.weights = class.weights, cross = cross, fit = fit, cache = cache, tol = tol, shrinking = shrinking, ...)) else stop(" kernel matrix not square!") if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } ## subsetting and na-handling for matrices ret <- new("ksvm") if (!missing(subset)) x <- x[subset,] if (is.null(y)) x <- na.action(x) else { df <- na.action(data.frame(y, x)) y <- df[,1] x <- as.matrix(df[,-1]) } n.action(ret) <- na.action if (is.null(type)) type(ret) <- if (is.null(y)) "one-svc" else if (is.factor(y)) "C-svc" else "eps-svr" if(!is.null(type)) type(ret) <- match.arg(type,c("C-svc", "nu-svc", "kbb-svc", "spoc-svc", "C-bsvc", "one-svc", "eps-svr", "eps-bsvr", "nu-svr")) ## ## scaling, subsetting, and NA handling ## if (sparse) { ## scale <- rep(FALSE, ncol(x)) ## if(!is.null(y)) na.fail(y) ## x <- t(t(x)) ## make shure that col-indices are sorted ## } x.scale <- y.scale <- NULL ## scaling if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { co <- !apply(x[,scaled, drop = FALSE], 2, var) if (any(co)) { scaled <- rep(FALSE, ncol(x)) warning(paste("Variable(s)", paste("`",colnames(x[,scaled, drop = FALSE])[co], "'", sep="", collapse=" and "), "constant. Cannot scale data.") ) } else { xtmp <- scale(x[,scaled]) x[,scaled] <- xtmp x.scale <- attributes(xtmp)[c("scaled:center","scaled:scale")] if (is.numeric(y)&&(type(ret)!="C-svc"&&type(ret)!="nu-svc"&&type(ret)!="C-bsvc"&&type(ret)!="spoc-svc"&&type(ret)!="kbb-svc")) { y <- scale(y) y.scale <- attributes(y)[c("scaled:center","scaled:scale")] y <- as.vector(y) } } } ncols <- ncol(x) m <- nrows <- nrow(x) if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "rbfkernel") || is(kernel, "laplacedot") || kernel == "laplacedot"|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=mean(sigest(x,scaled=FALSE)[c(1,3)])) #cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if (!is(y,"vector") && !is.factor (y) & is(y,"matrix") & !(type(ret)=="one-svc")) stop("y must be a vector or a factor.") if(!(type(ret)=="one-svc")) if(is(y,"vector") | is(y,"factor") ) ym <- length(y) else if(is(y,"matrix")) ym <- dim(y)[1] else stop("y must be a matrix or a vector") if ((type(ret) != "one-svc") && ym != m) stop("x and y don't match.") if(nu > 1|| nu <0) stop("nu must be between 0 an 1.") weightlabels <- NULL nweights <- 0 weight <- 0 wl <- 0 ## in case of classification: transform factors into integers if (type(ret) == "one-svc") # one class classification --> set dummy y <- 1 else if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) if (!is.null(class.weights)) { weightlabels <- match (names(class.weights),lev(ret)) if (any(is.na(weightlabels))) stop ("At least one level name is missing or misspelled.") } } else { if ((type(ret) =="C-svc" || type(ret) == "nu-svc" ||type(ret) == "C-bsvc" || type(ret) == "spoc-svc" || type(ret) == "kbb-svc") && any(as.integer (y) != y)) stop ("dependent variable has to be of factor or integer type for classification mode.") if (type(ret) != "eps-svr" || type(ret) != "nu-svr"|| type(ret)!="eps-bsvr") lev(ret) <- sort(unique (y)) } ## initialize nclass(ret) <- length (unique(y)) p <- 0 K <- 0 svindex <- problem <- NULL sigma <- 0.1 degree <- offset <- scale <- 1 switch(is(kernel)[1], "rbfkernel" = { sigma <- kpar(kernel)$sigma ktype <- 2 }, "tanhkernel" = { sigma <- kpar(kernel)$scale offset <- kpar(kernel)$offset ktype <- 3 }, "polykernel" = { degree <- kpar(kernel)$degree sigma <- kpar(kernel)$scale offset <- kpar(kernel)$offset ktype <- 1 }, "vanillakernel" = { ktype <- 0 }, "laplacekernel" = { ktype <- 5 sigma <- kpar(kernel)$sigma }, "besselkernel" = { ktype <- 6 sigma <- kpar(kernel)$sigma degree <- kpar(kernel)$order offset <- kpar(kernel)$degree }, "anovakernel" = { ktype <- 7 sigma <- kpar(kernel)$sigma degree <- kpar(kernel)$degree }, "splinekernel" = { ktype <- 8 }, { ktype <- 4 } ) prior(ret) <- list(NULL) ## C classification if(type(ret) == "C-svc"){ indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ## prepare the data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) if(ktype==4) K <- kernelMatrix(kernel,x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]) resv <- .Call(smo_optim, as.double(t(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE])), as.integer(li+lj), as.integer(ncol(x)), as.double(yd), as.double(K), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]@ia else 0), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), ##linear term as.integer(ktype), as.integer(0), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(wl), ##weightlabel as.double(weight), as.integer(nweights), as.double(cache), as.double(tol), as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix tmpres <- resv[c(-(li+lj+1),-(li+lj+2))][reind] ## alpha svind <- tmpres > 0 alpha(ret)[p] <- list(tmpres[svind]) ## coefficients alpha*y coef(ret)[p] <- list(alpha(ret)[[p]]*yd[reind][svind]) ## store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][svind]) ## store Support Vectors xmatrix(ret)[p] <- list(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE][reind,,drop=FALSE][svind, ,drop=FALSE]) ## save the indexes from all the SV in a vector (use unique?) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- c(b(ret), resv[li+lj+1]) ## store objective function values in a vector obj(ret) <- c(obj(ret), resv[li+lj+2]) ## used to reconstruct indexes for the patterns matrix x from "indexes" (really usefull ?) problem[p] <- list(c(i,j)) ##store C in return object param(ret)$C <- C ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## nu classification if(type(ret) == "nu-svc"){ indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) if(ktype==4) K <- kernelMatrix(kernel,x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]) resv <- .Call(smo_optim, as.double(t(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE])), as.integer(li+lj), as.integer(ncol(x)), as.double(yd), as.double(K), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]@ia else 0), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), #linear term as.integer(ktype), as.integer(1), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(wl), #weightlabl. as.double(weight), as.integer(nweights), as.double(cache), as.double(tol), as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix tmpres <- resv[c(-(li+lj+1),-(li+lj+2))][reind] svind <- tmpres != 0 alpha(ret)[p] <- coef(ret)[p] <- list(tmpres[svind]) ##store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][svind]) ## store Support Vectors xmatrix(ret)[p] <- list(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE][reind,,drop=FALSE][svind,,drop=FALSE]) ##save the indexes from all the SV in a vector (use unique!) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- c(b(ret), resv[li+lj+1]) ## store objective function values in a vector obj(ret) <- c(obj(ret), resv[li+lj+2]) ## used to reconstruct indexes for the patterns matrix x from "indexes" problem[p] <- list(c(i,j)) param(ret)$nu <- nu ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## Bound constraint C classification if(type(ret) == "C-bsvc"){ if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) if(ktype==4) K <- kernelMatrix(kernel,x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]) resv <- .Call(tron_optim, as.double(t(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE])), as.integer(li+lj), as.integer(ncol(x)), as.double(yd), as.double(K), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]@ia else 0), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE]@ja else 0), as.integer(sparse), as.integer(2), as.double(0), ##countc as.integer(ktype), as.integer(5), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), ## cost value of alpha seeding as.double(2), ## step value of alpha seeding as.integer(wl), ##weightlabel as.double(weight), as.integer(nweights), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), ##qpsize as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix svind <- resv[-(li+lj+1)][reind] > 0 alpha(ret)[p] <- list(resv[-(li+lj+1)][reind][svind]) ## nonzero alpha*y coef(ret)[p] <- list(alpha(ret)[[p]] * yd[reind][svind]) ## store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][svind]) ## store Support Vectors xmatrix(ret)[p] <- list(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE][reind,,drop = FALSE][svind,,drop = FALSE]) ## save the indexes from all the SV in a vector (use unique?) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- - sapply(coef(ret),sum) ## store obj. values in vector obj(ret) <- c(obj(ret), resv[(li+lj+1)]) ## used to reconstruct indexes for the patterns matrix x from "indexes" (really usefull ?) problem[p] <- list(c(i,j)) ##store C in return object param(ret)$C <- C ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## SPOC multiclass classification if(type(ret) =="spoc-svc") { if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) xd <- matrix(x[yd$ix,],nrow=dim(x)[1]) count <- 0 if(ktype==4) K <- kernelMatrix(kernel,x) resv <- .Call(tron_optim, as.double(t(xd)), as.integer(nrow(xd)), as.integer(ncol(xd)), as.double(rep(yd$x-1,2)), as.double(K), as.integer(if (sparse) xd@ia else 0), as.integer(if (sparse) xd@ja else 0), as.integer(sparse), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(7), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(C), as.double(2), #Cstep as.integer(0), #weightlabel as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) reind <- sort(yd$ix,method="quick",index.return=TRUE)$ix alpha(ret) <- t(matrix(resv[-(nclass(ret)*nrow(xd) + 1)],nclass(ret)))[reind,,drop=FALSE] coef(ret) <- lapply(1:nclass(ret), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) names(coef(ret)) <- lev(ret) alphaindex(ret) <- lapply(sort(unique(y)), function(x) which(alpha(ret)[,x]!=0)) xmatrix(ret) <- x obj(ret) <- resv[(nclass(ret)*nrow(xd) + 1)] names(alphaindex(ret)) <- lev(ret) svindex <- which(rowSums(alpha(ret)!=0)!=0) b(ret) <- 0 param(ret)$C <- C } ## KBB multiclass classification if(type(ret) =="kbb-svc") { if(!is.null(class.weights)) weightedC <- weightlabels * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) x <- x[yd$ix,,drop=FALSE] count <- sapply(unique(yd$x), function(c) length(yd$x[yd$x==c])) if(ktype==4) K <- kernelMatrix(kernel,x) resv <- .Call(tron_optim, as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(yd$x-1), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(8), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(C), #Cbegin as.double(2), #Cstep as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) reind <- sort(yd$ix,method="quick",index.return=TRUE)$ix alpha(ret) <- matrix(resv[-(nrow(x)*(nclass(ret)-1)+1)],nrow(x))[reind,,drop=FALSE] xmatrix(ret) <- x<- x[reind,,drop=FALSE] coef(ret) <- lapply(1:(nclass(ret)-1), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) alphaindex(ret) <- lapply(sort(unique(y)), function(x) which((y == x) & (rowSums(alpha(ret))!=0))) svindex <- which(rowSums(alpha(ret)!=0)!=0) b(ret) <- - sapply(coef(ret),sum) obj(ret) <- resv[(nrow(x)*(nclass(ret)-1)+1)] param(ret)$C <- C } ## Novelty detection if(type(ret) =="one-svc") { if(ktype==4) K <- kernelMatrix(kernel,x) resv <- .Call(smo_optim, as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(matrix(rep(1,m))), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(2), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres != 0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex,,drop=FALSE] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$nu <- nu } ## epsilon regression if(type(ret) =="eps-svr") { if(ktype==4) K <- kernelMatrix(kernel,x) resv <- .Call(smo_optim, as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(y), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(3), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres != 0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex, ,drop=FALSE] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$epsilon <- epsilon param(ret)$C <- C } ## nu regression if(type(ret) =="nu-svr") { if(ktype==4) K <- kernelMatrix(kernel,x) resv <- .Call(smo_optim, as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(y), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(4), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres!=0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex,,drop=FALSE] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$epsilon <- epsilon param(ret)$nu <- nu } ## bound constraint eps regression if(type(ret) =="eps-bsvr") { if(ktype==4) K <- kernelMatrix(kernel,x) resv <- .Call(tron_optim, as.double(t(x)), as.integer(nrow(x)), as.integer(ncol(x)), as.double(y), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(2), as.integer(0), as.integer(ktype), as.integer(6), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), #Cbegin as.double(2), #Cstep as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(0), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) tmpres <- resv[-(m + 1)] alpha(ret) <- coef(ret) <- tmpres[tmpres!=0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex,,drop=FALSE] b(ret) <- -sum(alpha(ret)) obj(ret) <- resv[(m + 1)] param(ret)$epsilon <- epsilon param(ret)$C <- C } kcall(ret) <- match.call() kernelf(ret) <- kernel ymatrix(ret) <- y SVindex(ret) <- sort(unique(svindex),method="quick") nSV(ret) <- length(unique(svindex)) if(nSV(ret)==0) stop("No Support Vectors found. You may want to change your parameters") fitted(ret) <- if (fit) predict(ret, x) else NULL if(any(scaled)) scaling(ret) <- list(scaled = scaled, x.scale = x.scale, y.scale = y.scale) if (fit){ if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="one-svc") error(ret) <- sum(!fitted(ret))/m if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr"){ if (!is.null(scaling(ret)$y.scale)){ scal <- scaling(ret)$y.scale$"scaled:scale" fitted(ret) <- fitted(ret) # / scaling(ret)$y.scale$"scaled:scale" + scaling(ret)$y.scale$"scaled:center" } else scal <- 1 error(ret) <- drop(crossprod(fitted(ret) - y)/m) } } cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") { if(is.null(class.weights)) cret <- ksvm(x[cind,],y[cind],type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, scaled=FALSE, cross = 0, fit = FALSE ,cache = cache) else cret <- ksvm(x[cind,],as.factor(lev(ret)[y[cind]]),type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, scaled=FALSE, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache) cres <- predict(cret, x[vgr[[i]],,drop=FALSE]) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="one-svc") { cret <- ksvm(x[cind,],type=type(ret),kernel=kernel,kpar = NULL,C=C,nu=nu,epsilon=epsilon,tol=tol,scaled=FALSE, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, x[vgr[[i]],, drop=FALSE]) cerror <- (1 - sum(cres)/length(cres))/cross + cerror } if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") { cret <- ksvm(x[cind,],y[cind],type=type(ret),kernel=kernel,kpar = NULL,C=C,nu=nu,epsilon=epsilon,tol=tol,scaled=FALSE, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, x[vgr[[i]],,drop=FALSE]) if (!is.null(scaling(ret)$y.scale)) scal <- scaling(ret)$y.scale$"scaled:scale" else scal <- 1 cerror <- drop((scal^2)*crossprod(cres - y[vgr[[i]]])/m) + cerror } } cross(ret) <- cerror } prob.model(ret) <- list(NULL) if(prob.model) { if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc") { p <- 0 for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(j,i)] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(i,j)] wl <- c(0,1) nweigths <- 2 } } m <- li+lj suppressWarnings(vgr <- split(c(sample(1:li,li),sample((li+1):(li+lj),lj)),1:3)) pres <- yres <- NULL for(k in 1:3) { cind <- unsplit(vgr[-k],factor(rep((1:3)[-k],unlist(lapply(vgr[-k],length))))) if(is.null(class.weights)) cret <- ksvm(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE][cind,],yd[cind],type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, scaled=FALSE, cross = 0, fit = FALSE ,cache = cache, prob.model = FALSE) else cret <- ksvm(x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE][cind,],as.factor(lev(ret)[y[c(indexes[[i]],indexes[[j]])][cind]]),type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, scaled=FALSE, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache, prob.model = FALSE) yres <- c(yres, yd[vgr[[k]]]) pres <- rbind(pres, predict(cret, x[c(indexes[[i]],indexes[[j]]), ,drop=FALSE][vgr[[k]],],type="decision")) } prob.model(ret)[[p]] <- .probPlatt(pres,yres) } } } if(type(ret) == "eps-svr"||type(ret) == "nu-svr"||type(ret)=="eps-bsvr"){ suppressWarnings(vgr<-split(sample(1:m,m),1:3)) pres <- NULL for(i in 1:3) { cind <- unsplit(vgr[-i],factor(rep((1:3)[-i],unlist(lapply(vgr[-i],length))))) cret <- ksvm(x[cind,],y[cind],type=type(ret),kernel=kernel,kpar = NULL,C=C,nu=nu,epsilon=epsilon,tol=tol,scaled=FALSE, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, x[vgr[[i]],]) if (!is.null(scaling(ret)$y.scale)) cres <- cres * scaling(ret)$y.scale$"scaled:scale" + scaling(ret)$y.scale$"scaled:center" pres <- rbind(pres, cres) } pres[abs(pres) > (5*sd(pres))] <- 0 prob.model(ret) <- list(sum(abs(pres))/dim(pres)[1]) } } return(ret) }) ## kernelmatrix interface setMethod("ksvm",signature(x="kernelMatrix"), function (x, y = NULL, type = NULL, C = 1, nu = 0.2, epsilon = 0.1, prob.model = FALSE, class.weights = NULL, cross = 0, fit = TRUE, cache = 40, tol = 0.001, shrinking = TRUE, ...) { sparse <- FALSE ## subsetting and na-handling for matrices ret <- new("ksvm") if (is.null(type)) type(ret) <- if (is.null(y)) "one-svc" else if (is.factor(y)) "C-svc" else "eps-svr" if(!is.null(type)) type(ret) <- match.arg(type,c("C-svc", "nu-svc", "kbb-svc", "spoc-svc", "C-bsvc", "one-svc", "eps-svr", "eps-bsvr", "nu-svr")) ncols <- ncol(x) m <- nrows <- nrow(x) if (!is(y,"vector") && !is.factor (y) & !is(y,"matrix") & !(type(ret)=="one-svc")) stop("y must be a vector or a factor.") if(!(type(ret)=="one-svc")) if(is(y,"vector") | is(y,"factor")) ym <- length(y) else if(is(y,"matrix")) ym <- dim(y)[1] else stop("y must be a matrix or a vector") if ((type(ret) != "one-svc") && ym != m) stop("x and y don't match.") if(nu > 1|| nu <0) stop("nu must be between 0 an 1.") weightlabels <- NULL nweights <- 0 weight <- 0 wl <- 0 ## in case of classification: transform factors into integers if (type(ret) == "one-svc") # one class classification --> set dummy y <- 1 else if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) if (!is.null(class.weights)) { if (is.null(names (class.weights))) stop ("Weights have to be specified along with their according level names !") weightlabels <- match (names(class.weights),lev(ret)) if (any(is.na(weightlabels))) stop ("At least one level name is missing or misspelled.") } } else { if ((type(ret) =="C-svc" || type(ret) == "nu-svc" ||type(ret) == "C-bsvc" || type(ret) == "spoc-svc" || type(ret) == "kbb-svc") && any(as.integer (y) != y)) stop ("dependent variable has to be of factor or integer type for classification mode.") if (type(ret) != "eps-svr" || type(ret) != "nu-svr"|| type(ret)!="eps-bsvr") lev(ret) <- sort(unique (y)) } ## initialize nclass(ret) <- length (unique(y)) p <- 0 svindex <- problem <- NULL sigma <- 0.1 degree <- offset <- scale <- 1 ktype <- 4 prior(ret) <- list(NULL) ## C classification if(type(ret) == "C-svc"){ indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) xdd <- matrix(1,li+lj,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd), as.double(as.vector(x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE])), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]@ia else 0), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), ##linear term as.integer(ktype), as.integer(0), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(wl), ##weightlabel as.double(weight), as.integer(nweights), as.double(cache), as.double(tol), as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix tmpres <- resv[c(-(li+lj+1),-(li+lj+2))][reind] ## alpha svind <- tmpres > 0 alpha(ret)[p] <- list(tmpres[svind]) ## coefficients alpha*y coef(ret)[p] <- list(alpha(ret)[[p]]*yd[reind][svind]) ## store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][svind]) ## store Support Vectors ## xmatrix(ret)[p] <- list(xd[svind, svind,drop=FALSE]) ## save the indexes from all the SV in a vector (use unique?) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- c(b(ret), resv[li+lj+1]) ## store objective function values in vector obj(ret) <- c(obj(ret), resv[li+lj+2]) ## used to reconstruct indexes for the patterns matrix x from "indexes" (really usefull ?) problem[p] <- list(c(i,j)) ##store C in return object param(ret)$C <- C } } } ## nu classification if(type(ret) == "nu-svc"){ indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) ##xd <- matrix(0,(li+lj),(li+lj)) ##xdi <- 1:(li+lj) <= li ##xd[xdi,rep(TRUE,li+lj)] <- x[indexes[[i]],c(indexes[[i]],indexes[[j]])] ##xd[xdi == FALSE,rep(TRUE,li+lj)] <- x[indexes[[j]],c(indexes[[i]],indexes[[j]])] if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) xdd <- matrix(1,li+lj,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd), as.double(x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]@ia else 0), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), #linear term as.integer(ktype), as.integer(1), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(wl), #weightlabl. as.double(weight), as.integer(nweights), as.double(cache), as.double(tol), as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix tmpres <- resv[c(-(li+lj+1),-(li+lj+2))][reind] alpha(ret)[p] <- coef(ret)[p] <- list(tmpres[tmpres != 0]) ##store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][tmpres != 0]) ## store Support Vectors ## xmatrix(ret)[p] <- list(xd[tmpres != 0,tmpres != 0,drop=FALSE]) ##save the indexes from all the SV in a vector (use unique!) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- c(b(ret), resv[li+lj+1]) ## store objective function values in vector obj(ret) <- c(obj(ret), resv[li+lj+2]) ## used to reconstruct indexes for the patterns matrix x from "indexes" problem[p] <- list(c(i,j)) param(ret)$nu <- nu ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## Bound constraint C classification if(type(ret) == "C-bsvc"){ if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) xdd <- matrix(rnorm(li+lj),li+lj,1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd), as.double(x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]@ia else 0), as.integer(if (sparse) x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE]@ja else 0), as.integer(sparse), as.integer(2), as.double(0), ##countc as.integer(ktype), as.integer(5), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), ## cost value of alpha seeding as.double(2), ## step value of alpha seeding as.integer(wl), ##weightlabel as.double(weight), as.integer(nweights), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), ##qpsize as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix alpha(ret)[p] <- list(resv[-(li+lj+1)][reind][resv[-(li+lj+1)][reind] > 0]) ## nonzero alpha*y coef(ret)[p] <- list(alpha(ret)[[p]] * yd[reind][resv[-(li+lj+1)][reind] > 0]) ## store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][resv[-(li+lj+1)][reind] > 0]) ## store Support Vectors ## xmatrix(ret)[p] <- list(xd[resv > 0 ,resv > 0,drop = FALSE]) ## save the indexes from all the SV in a vector (use unique?) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- - sapply(coef(ret),sum) ## store objective function values vector obj(ret) <- c(obj(ret), resv[(li+lj+1)]) ## used to reconstruct indexes for the patterns matrix x from "indexes" (really usefull ?) problem[p] <- list(c(i,j)) ##store C in return object param(ret)$C <- C } } } ## SPOC multiclass classification if(type(ret) =="spoc-svc") { if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) x <- matrix(x[yd$ix,yd$ix],nrow=dim(x)[1]) count <- 0 xdd <- matrix(1,m,1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(rep(yd$x-1,2)), as.double(x), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(7), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(C), as.double(2), #Cstep as.integer(0), #weightlabel as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) reind <- sort(yd$ix,method="quick",index.return=TRUE)$ix alpha(ret) <- t(matrix(resv[-(nclass(ret)*nrow(xdd)+1)],nclass(ret)))[reind,,drop=FALSE] coef(ret) <- lapply(1:nclass(ret), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) names(coef(ret)) <- lev(ret) alphaindex(ret) <- lapply(sort(unique(y)), function(x) which(alpha(ret)[,x]!=0)) ## xmatrix(ret) <- x names(alphaindex(ret)) <- lev(ret) svindex <- which(rowSums(alpha(ret)!=0)!=0) b(ret) <- 0 obj(ret) <- resv[(nclass(ret)*nrow(xdd)+1)] param(ret)$C <- C } ## KBB multiclass classification if(type(ret) =="kbb-svc") { if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) x <- matrix(x[yd$ix,yd$ix],nrow=dim(x)[1]) count <- sapply(unique(yd$x), function(c) length(yd$x[yd$x==c])) xdd <- matrix(1,m,1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd$x-1), as.double(x), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(8), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), #Cbegin as.double(2), #Cstep as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) reind <- sort(yd$ix,method="quick",index.return=TRUE)$ix alpha(ret) <- matrix(resv[-(nrow(x)*(nclass(ret)-1) + 1)],nrow(x))[reind,,drop=FALSE] coef(ret) <- lapply(1:(nclass(ret)-1), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) alphaindex(ret) <- lapply(sort(unique(y)), function(x) which((y == x) & (rowSums(alpha(ret))!=0))) svindex <- which(rowSums(alpha(ret)!=0)!=0) b(ret) <- - sapply(coef(ret),sum) obj(ret) <- resv[(nrow(x)*(nclass(ret)-1) + 1)] param(ret)$C <- C } ## Novelty detection if(type(ret) =="one-svc") { xdd <- matrix(1,m,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(matrix(rep(1,m))), as.double(x), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(2), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres != 0] svindex <- alphaindex(ret) <- which(tmpres != 0) ## xmatrix(ret) <- x[svindex,svindex,drop=FALSE] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$nu <- nu } ## epsilon regression if(type(ret) =="eps-svr") { xdd <- matrix(1,m,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(y), as.double(x), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(3), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres != 0] svindex <- alphaindex(ret) <- which(tmpres != 0) ## xmatrix(ret) <- x[svindex,svindex ,drop=FALSE] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$epsilon <- epsilon param(ret)$C <- C } ## nu regression if(type(ret) =="nu-svr") { xdd <- matrix(1,m,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(y), as.double(x), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(4), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres!=0] svindex <- alphaindex(ret) <- which(tmpres != 0) ## xmatrix(ret) <- x[svindex,svindex,drop=FALSE] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$epsilon <- epsilon param(ret)$nu <- nu } ## bound constraint eps regression if(type(ret) =="eps-bsvr") { xdd <- matrix(1,m,1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(y), as.double(x), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(2), as.integer(0), as.integer(ktype), as.integer(6), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), #Cbegin as.double(2), #Cstep as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(0), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) tmpres <- resv[-(m+1)] alpha(ret) <- coef(ret) <- tmpres[tmpres!=0] svindex <- alphaindex(ret) <- which(tmpres != 0) ## xmatrix(ret) <- x[svindex,,drop=FALSE] b(ret) <- -sum(alpha(ret)) obj(ret) <- resv[(m+1)] param(ret)$epsilon <- epsilon param(ret)$C <- C } kcall(ret) <- match.call() kernelf(ret) <- " Kernel matrix used as input." ymatrix(ret) <- y SVindex(ret) <- unique(sort(svindex,method="quick")) nSV(ret) <- length(unique(svindex)) if(nSV(ret)==0) stop("No Support Vectors found. You may want to change your parameters") fitted(ret) <- if (fit) predict(ret, as.kernelMatrix(x[,SVindex(ret),drop = FALSE])) else NULL if (fit){ if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="one-svc") error(ret) <- sum(!fitted(ret))/m if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr <- split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") { if(is.null(class.weights)) cret <- ksvm(as.kernelMatrix(x[cind,cind]),y[cind],type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache) else cret <- ksvm(as.kernelMatrix(x[cind,cind]), as.factor(lev(ret)[y[cind]]),type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind,drop = FALSE][,SVindex(cret),drop=FALSE])) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="one-svc") { cret <- ksvm(as.kernelMatrix(x[cind,cind]),type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind,drop = FALSE][,SVindex(cret),drop=FALSE])) cerror <- (1 - sum(cres)/length(cres))/cross + cerror } if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") { cret <- ksvm(as.kernelMatrix(x[cind,cind]),y[cind],type=type(ret), C=C,nu=nu,epsilon=epsilon,tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind,drop = FALSE][,SVindex(cret),drop=FALSE])) cerror <- drop(crossprod(cres - y[vgr[[i]]])/m) + cerror } } cross(ret) <- cerror } prob.model(ret) <- list(NULL) if(prob.model) { if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc") { p <- 0 for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(j,i)] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(i,j)] wl <- c(0,1) nweigths <- 2 } } m <- li+lj suppressWarnings(vgr <- split(c(sample(1:li,li),sample((li+1):(li+lj),lj)),1:3)) pres <- yres <- NULL for(k in 1:3) { cind <- unsplit(vgr[-k],factor(rep((1:3)[-k],unlist(lapply(vgr[-k],length))))) if(is.null(class.weights)) cret <- ksvm(as.kernelMatrix(x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE][cind,cind]),yd[cind],type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache, prob.model=FALSE) else cret <- ksvm(as.kernelMatrix(x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE][cind,cind]), as.factor(lev(ret)[y[c(indexes[[i]],indexes[[j]])][cind]]),type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache, prob.model=FALSE) yres <- c(yres,yd[vgr[[k]]]) pres <- rbind(pres,predict(cret, as.kernelMatrix(x[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE][vgr[[k]], cind,drop = FALSE][,SVindex(cret),drop = FALSE]),type="decision")) } prob.model(ret)[[p]] <- .probPlatt(pres,yres) } } } if(type(ret) == "eps-svr"||type(ret) == "nu-svr"||type(ret)=="eps-bsvr"){ suppressWarnings(vgr<-split(sample(1:m,m),1:3)) pres <- NULL for(i in 1:3) { cind <- unsplit(vgr[-i],factor(rep((1:3)[-i],unlist(lapply(vgr[-i],length))))) cret <- ksvm(as.kernelMatrix(x[cind,cind]),y[cind],type=type(ret), C=C, nu=nu, epsilon=epsilon, tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, as.kernelMatrix(x[vgr[[i]], cind, drop = FALSE][,SVindex(cret), drop = FALSE])) pres <- rbind(pres,predict(cret, as.kernelMatrix(x[vgr[[i]],cind , drop = FALSE][,SVindex(cret) ,drop = FALSE]),type="decision")) } pres[abs(pres) > (5*sd(pres))] <- 0 prob.model(ret) <- list(sum(abs(pres))/dim(pres)[1]) } } return(ret) }) .classAgreement <- function (tab) { n <- sum(tab) if (!is.null(dimnames(tab))) { lev <- intersect(colnames(tab), rownames(tab)) p0 <- sum(diag(tab[lev, lev])) / n } else { m <- min(dim(tab)) p0 <- sum(diag(tab[1:m, 1:m])) / n } return(p0) } ## List Interface setMethod("ksvm",signature(x="list"), function (x, y = NULL, type = NULL, kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), C = 1, nu = 0.2, epsilon = 0.1, prob.model = FALSE, class.weights = NULL, cross = 0, fit = TRUE, cache = 40, tol = 0.001, shrinking = TRUE, ... ,na.action = na.omit) { ret <- new("ksvm") if (is.null(y)) x <- na.action(x) n.action(ret) <- na.action sparse <- FALSE if (is.null(type)) type(ret) <- if (is.null(y)) "one-svc" else if (is.factor(y)) "C-svc" else "eps-svr" if(!is.null(type)) type(ret) <- match.arg(type,c("C-svc", "nu-svc", "kbb-svc", "spoc-svc", "C-bsvc", "one-svc", "eps-svr", "eps-bsvr", "nu-svr")) m <- length(x) if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot","stringdot")) if(is.character(kpar)) if(kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot" || kernel == "rbfdot" || kernel == "laplacedot" ) { stop("List interface supports only the stringdot kernel.") } } if(is(kernel,"kernel") & !is(kernel,"stringkernel")) stop("List interface supports only the stringdot kernel.") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if (!is(y,"vector") && !is.factor(y) & !is(y,"matrix") & !(type(ret)=="one-svc")) stop("y must be a vector or a factor.") if(!(type(ret)=="one-svc")) if(is(y,"vector") | is(y,"factor")) ym <- length(y) else if(is(y,"matrix")) ym <- dim(y)[1] else stop("y must be a matrix or a vector") if ((type(ret) != "one-svc") && ym != m) stop("x and y don't match.") if(nu > 1|| nu <0) stop("nu must be between 0 an 1.") weightlabels <- NULL nweights <- 0 weight <- 0 wl <- 0 ## in case of classification: transform factors into integers if (type(ret) == "one-svc") # one class classification --> set dummy y <- 1 else if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) if (!is.null(class.weights)) { if (is.null(names (class.weights))) stop ("Weights have to be specified along with their according level names !") weightlabels <- match (names(class.weights),lev(ret)) if (any(is.na(weightlabels))) stop ("At least one level name is missing or misspelled.") } } else { if ((type(ret) =="C-svc" || type(ret) == "nu-svc" ||type(ret) == "C-bsvc" || type(ret) == "spoc-svc" || type(ret) == "kbb-svc") && any(as.integer (y) != y)) stop ("dependent variable has to be of factor or integer type for classification mode.") if (type(ret) != "eps-svr" || type(ret) != "nu-svr"|| type(ret)!="eps-bsvr") lev(ret) <- sort(unique (y)) } ## initialize if (type(ret) =="C-svc" || type(ret) == "nu-svc" ||type(ret) == "C-bsvc" || type(ret) == "spoc-svc" || type(ret) == "kbb-svc") nclass(ret) <- length (unique(y)) p <- 0 K <- 0 svindex <- problem <- NULL ktype <- 4 prior(ret) <- list(NULL) sigma <- 0.1 degree <- offset <- scale <- 1 ## C classification if(type(ret) == "C-svc"){ indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) K <- kernelMatrix(kernel,x[c(indexes[[i]],indexes[[j]])]) xdd <- matrix(1,li+lj,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), ##linear term as.integer(ktype), as.integer(0), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(wl), ##weightlabel as.double(weight), as.integer(nweights), as.double(cache), as.double(tol), as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix tmpres <- resv[c(-(li+lj+1),-(li+lj+2))][reind] ## alpha alpha(ret)[p] <- list(tmpres[tmpres > 0]) ## coefficients alpha*y coef(ret)[p] <- list(alpha(ret)[[p]]*yd[reind][tmpres > 0]) ## store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][tmpres>0]) ## store Support Vectors xmatrix(ret)[p] <- list(x[c(indexes[[i]],indexes[[j]])][reind][tmpres > 0]) ## save the indexes from all the SV in a vector (use unique?) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- c(b(ret), resv[li+lj+1]) obj(ret) <- c(obj(ret),resv[li+lj+2]) ## used to reconstruct indexes for the patterns matrix x from "indexes" (really usefull ?) problem[p] <- list(c(i,j)) ##store C in return object param(ret)$C <- C ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## nu classification if(type(ret) == "nu-svc"){ indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) K <- kernelMatrix(kernel,x[c(indexes[[i]],indexes[[j]])]) xdd <- matrix(1,li+lj,1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), #linear term as.integer(ktype), as.integer(1), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(wl), #weightlabl. as.double(weight), as.integer(nweights), as.double(cache), as.double(tol), as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix tmpres <- resv[c(-(li+lj+1),-(li+lj+2))][reind] alpha(ret)[p] <- coef(ret)[p] <- list(tmpres[tmpres != 0]) ##store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][tmpres!=0]) ## store Support Vectors xmatrix(ret)[p] <- list(x[c(indexes[[i]],indexes[[j]])][reind][tmpres != 0]) ##save the indexes from all the SV in a vector (use unique!) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- c(b(ret), resv[li+lj+1]) obj(ret) <- c(obj(ret), resv[li+lj+2]) ## used to reconstruct indexes for the patterns matrix x from "indexes" problem[p] <- list(c(i,j)) param(ret)$nu <- nu ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## Bound constraint C classification if(type(ret) == "C-bsvc"){ if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) indexes <- lapply(sort(unique(y)), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(j,i)]] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- class.weights[weightlabels[c(i,j)]] wl <- c(0,1) nweigths <- 2 } } boolabel <- yd >= 0 prior1 <- sum(boolabel) md <- length(yd) prior0 <- md - prior1 prior(ret)[[p]] <- list(prior1 = prior1, prior0 = prior0) K <- kernelMatrix(kernel,x[c(indexes[[i]],indexes[[j]])]) xdd <- matrix(1,li+lj,1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(2), as.double(0), ##countc as.integer(ktype), as.integer(5), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), ## cost value of alpha seeding as.double(2), ## step value of alpha seeding as.integer(wl), ##weightlabel as.double(weight), as.integer(nweights), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), ##qpsize as.integer(shrinking)) reind <- sort(c(indexes[[i]],indexes[[j]]),method="quick",index.return=TRUE)$ix alpha(ret)[p] <- list(resv[-(li+lj+1)][reind][resv[-(li+lj+1)][reind] > 0]) ## nonzero alpha*y coef(ret)[p] <- list(alpha(ret)[[p]] * yd[reind][resv[-(li+lj+1)][reind] > 0]) ## store SV indexes from current problem for later use in predict alphaindex(ret)[p] <- list(c(indexes[[i]],indexes[[j]])[reind][resv[-(li+lj+1)][reind] > 0]) ## store Support Vectors xmatrix(ret)[p] <- list(x[c(indexes[[i]],indexes[[j]])][reind][resv[-(li+lj+1)][reind] > 0]) ## save the indexes from all the SV in a vector (use unique?) svindex <- c(svindex,alphaindex(ret)[[p]]) ## store betas in a vector b(ret) <- - sapply(coef(ret),sum) obj(ret) <- c(obj(ret),resv[(li+lj+1)]) ## used to reconstruct indexes for the patterns matrix x from "indexes" (really usefull ?) problem[p] <- list(c(i,j)) ##store C in return object param(ret)$C <- C ## margin(ret)[p] <- (min(kernelMult(kernel,xd[1:li,],,alpha(ret)[[p]][1:li])) - max(kernelMult(kernel,xd[li:(li+lj),],,alpha(ret)[[p]][li:(li+lj)])))/2 } } } ## SPOC multiclass classification if(type(ret) =="spoc-svc") { if(!is.null(class.weights)) weightedC <- class.weights[weightlabels] * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) x <- x[yd$ix] count <- 0 K <- kernelMatrix(kernel,x) xdd <- matrix(1,length(x),1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(rep(yd$x-1,2)), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(7), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(C), as.double(2), #Cstep as.integer(0), #weightlabel as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) reind <- sort(yd$ix,method="quick",index.return=TRUE)$ix alpha(ret) <- t(matrix(resv[-(nclass(ret)*nrow(xdd) + 1)],nclass(ret)))[reind,,drop=FALSE] coef(ret) <- lapply(1:nclass(ret), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) names(coef(ret)) <- lev(ret) alphaindex(ret) <- lapply(1:nclass(ret), function(x) which(alpha(ret)[,x]!=0)) names(alphaindex(ret)) <- lev(ret) xmatrix(ret) <- x svindex <- which(rowSums(alpha(ret)!=0)!=0) b(ret) <- 0 obj(ret) <- resv[(nclass(ret)*nrow(xdd) + 1)] param(ret)$C <- C } ## KBB multiclass classification if(type(ret) =="kbb-svc") { if(!is.null(class.weights)) weightedC <- weightlabels * rep(C,nclass(ret)) else weightedC <- rep(C,nclass(ret)) yd <- sort(y,method="quick", index.return = TRUE) x <- x[yd$ix] count <- sapply(unique(yd$x), function(c) length(yd$x[yd$x==c])) K <- kernelMatrix(kernel,x) xdd <- matrix(1,length(x),1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(yd$x-1), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(nclass(ret)), as.integer(count), as.integer(ktype), as.integer(8), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), #Cbegin as.double(2), #Cstep as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(weightedC), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) reind <- sort(yd$ix,method="quick",index.return=TRUE)$ix alpha(ret) <- matrix(resv[-((nclass(ret)-1)*length(x)+1)],length(x))[reind,,drop=FALSE] xmatrix(ret) <- x<- x[reind] coef(ret) <- lapply(1:(nclass(ret)-1), function(x) alpha(ret)[,x][alpha(ret)[,x]!=0]) alphaindex(ret) <- lapply(sort(unique(y)), function(x) which((y == x) & (rowSums(alpha(ret))!=0))) svindex <- which(rowSums(alpha(ret)!=0)!=0) b(ret) <- - sapply(coef(ret),sum) obj(ret) <- resv[((nclass(ret)-1)*length(x)+1)] param(ret)$C <- C } ## Novelty detection if(type(ret) =="one-svc") { K <- kernelMatrix(kernel,x) xdd <- matrix(1,length(x),1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(matrix(rep(1,m))), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(2), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres != 0] svindex <- alphaindex(ret) <- which(tmpres !=0) xmatrix(ret) <- x[svindex] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$nu <- nu } ## epsilon regression if(type(ret) =="eps-svr") { K <- kernelMatrix(kernel,x) xdd <- matrix(1,length(x),1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(y), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(3), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres != 0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$epsilon <- epsilon param(ret)$C <- C } ## nu regression if(type(ret) =="nu-svr") { K <- kernelMatrix(kernel,x) xdd <- matrix(1,length(x),1) resv <- .Call(smo_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(y), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.double(matrix(rep(-1,m))), as.integer(ktype), as.integer(4), as.double(C), as.double(nu), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.integer(0), as.double(0), as.integer(0), as.double(cache), as.double(tol), as.integer(shrinking)) tmpres <- resv[c(-(m+1),-(m+2))] alpha(ret) <- coef(ret) <- tmpres[tmpres!=0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex] b(ret) <- resv[(m+1)] obj(ret) <- resv[(m+2)] param(ret)$epsilon <- epsilon param(ret)$nu <- nu } ## bound constraint eps regression if(type(ret) =="eps-bsvr") { K <- kernelMatrix(kernel,x) xdd <- matrix(1,length(x),1) resv <- .Call(tron_optim, as.double(t(xdd)), as.integer(nrow(xdd)), as.integer(ncol(xdd)), as.double(y), as.double(K), as.integer(if (sparse) x@ia else 0), as.integer(if (sparse) x@ja else 0), as.integer(sparse), as.integer(2), as.integer(0), as.integer(ktype), as.integer(6), as.double(C), as.double(epsilon), as.double(sigma), as.integer(degree), as.double(offset), as.double(1), #Cbegin as.double(2), #Cstep as.integer(0), #weightlabl. as.double(0), as.integer(0), as.double(0), as.double(cache), as.double(tol), as.integer(10), #qpsize as.integer(shrinking)) tmpres <- resv[-(m+1)] alpha(ret) <- coef(ret) <- tmpres[tmpres!=0] svindex <- alphaindex(ret) <- which(tmpres != 0) xmatrix(ret) <- x[svindex] b(ret) <- -sum(alpha(ret)) obj(ret) <- resv[(m+1)] param(ret)$epsilon <- epsilon param(ret)$C <- C } kcall(ret) <- match.call() kernelf(ret) <- kernel ymatrix(ret) <- y SVindex(ret) <- unique(svindex) nSV(ret) <- length(unique(svindex)) if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") nclass(ret) <- m if(type(ret)=="one-svc") nclass(ret) <- 1 if(nSV(ret)==0) stop("No Support Vectors found. You may want to change your parameters") fitted(ret) <- if (fit) { if((type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc") & nclass(ret) > 2) predict(ret, x) else if((type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc"||type(ret)=="spoc-bsvc"||type(ret)=="kbb-bsvc")) predict(ret,as.kernelMatrix(K[reind,reind][,SVindex(ret), drop=FALSE])) else predict(ret,as.kernelMatrix(K[,SVindex(ret), drop=FALSE])) } else NULL if (fit){ if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="one-svc") error(ret) <- sum(!fitted(ret))/m if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") error(ret) <- drop(crossprod(fitted(ret) - y)/m) } cross(ret) <- -1 if(!((type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc") & nclass(ret) > 2)) { if((type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc"||type(ret)=="spoc-bsvc"||type(ret)=="kbb-bsvc")) K <- as.kernelMatrix(K[reind,reind]) if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr <- split(sample(1:dim(K)[1],dim(K)[1]),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") { if(is.null(class.weights)) cret <- ksvm(as.kernelMatrix(K[cind,cind]),y[cind],type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache) else cret <- ksvm(as.kernelMatrix(K[cind,cind]),as.factor(lev(ret)[y[cind]]),type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache) cres <- predict(cret, as.kernelMatrix(K[vgr[[i]], cind,drop = FALSE][,SVindex(cret),drop=FALSE])) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="one-svc") { cret <- ksvm(as.kernelMatrix(K[cind,cind]), type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache) cres <- predict(cret, as.kernelMatrix(K[vgr[[i]], cind,drop = FALSE][,SVindex(cret),drop=FALSE])) cerror <- (1 - sum(cres)/length(cres))/cross + cerror } if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") { cret <- ksvm(as.kernelMatrix(K[cind,cind]),y[cind],type=type(ret), C=C,nu=nu,epsilon=epsilon,tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, as.kernelMatrix(K[vgr[[i]], cind,drop = FALSE][,SVindex(cret),drop=FALSE])) cerror <- drop(crossprod(cres - y[vgr[[i]]])/m) + cerror } } cross(ret) <- cerror } prob.model(ret) <- list(NULL) if(prob.model) { if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc") { p <- 0 for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(j,i)] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(i,j)] wl <- c(0,1) nweigths <- 2 } } m <- li+lj suppressWarnings(vgr <- split(c(sample(1:li,li),sample((li+1):(li+lj),lj)),1:3)) pres <- yres <- NULL for(k in 1:3) { cind <- unsplit(vgr[-k],factor(rep((1:3)[-k],unlist(lapply(vgr[-k],length))))) cret <- ksvm(as.kernelMatrix(as.kernelMatrix(K[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE][cind,cind])), yd[cind], type = type(ret), C=C, nu=nu, tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model=FALSE) yres <- c(yres,yd[vgr[[k]]]) pres <- rbind(pres,predict(cret, as.kernelMatrix(K[c(indexes[[i]],indexes[[j]]),c(indexes[[i]],indexes[[j]]),drop=FALSE][vgr[[k]], cind,drop = FALSE][,SVindex(cret),drop = FALSE]),type="decision")) } prob.model(ret)[[p]] <- .probPlatt(pres,yres) } } } if(type(ret) == "eps-svr"||type(ret) == "nu-svr"||type(ret)=="eps-bsvr"){ suppressWarnings(vgr<-split(sample(1:m,m),1:3)) pres <- NULL for(i in 1:3) { cind <- unsplit(vgr[-i],factor(rep((1:3)[-i],unlist(lapply(vgr[-i],length))))) cret <- ksvm(as.kernelMatrix(K[cind,cind]),y[cind],type=type(ret), C=C, nu=nu, epsilon=epsilon, tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, as.kernelMatrix(K[vgr[[i]], cind, drop = FALSE][,SVindex(cret), drop = FALSE])) pres <- rbind(pres,predict(cret, as.kernelMatrix(K[vgr[[i]],cind , drop = FALSE][,SVindex(cret) ,drop = FALSE]),type="decision")) } pres[abs(pres) > (5*sd(pres))] <- 0 prob.model(ret) <- list(sum(abs(pres))/dim(pres)[1]) } } } else{ if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="spoc-svc"||type(ret)=="kbb-svc"||type(ret)=="C-bsvc") { if(is.null(class.weights)) cret <- ksvm(x[cind],y[cind],type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache) else cret <- ksvm(x[cind],as.factor(lev(ret)[y[cind]]),type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache) cres <- predict(cret, x[vgr[[i]]]) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="eps-svr"||type(ret)=="nu-svr"||type(ret)=="eps-bsvr") { cret <- ksvm(x[cind],y[cind],type=type(ret),kernel=kernel,kpar = NULL,C=C,nu=nu,epsilon=epsilon,tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, x[vgr[[i]]]) cerror <- drop(crossprod(cres - y[vgr[[i]]])/m)/cross + cerror } } cross(ret) <- cerror } prob.model(ret) <- list(NULL) if(prob.model) { if(type(ret)=="C-svc"||type(ret)=="nu-svc"||type(ret)=="C-bsvc") { p <- 0 for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) { yd <- c(rep(-1,li),rep(1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(j,i)] wl <- c(1,0) nweights <- 2 } } else { yd <- c(rep(1,li),rep(-1,lj)) if(!is.null(class.weights)){ weight <- weightlabels[c(i,j)] wl <- c(0,1) nweigths <- 2 } } m <- li+lj suppressWarnings(vgr <- split(c(sample(1:li,li),sample((li+1):(li+lj),lj)),1:3)) pres <- yres <- NULL for(k in 1:3) { cind <- unsplit(vgr[-k],factor(rep((1:3)[-k],unlist(lapply(vgr[-k],length))))) if(is.null(class.weights)) cret <- ksvm(x[c(indexes[[i]], indexes[[j]])][cind],yd[cind],type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, cross = 0, fit = FALSE ,cache = cache, prob.model=FALSE) else cret <- ksvm(x[c(indexes[[i]], indexes[[j]])][cind],as.factor(lev(ret)[y[cind]]),type = type(ret),kernel=kernel,kpar = NULL, C=C, nu=nu, tol=tol, cross = 0, fit = FALSE, class.weights = class.weights,cache = cache, prob.model=FALSE) yres <- c(yres,yd[vgr[[k]]]) pres <- rbind(pres,predict(cret, x[c(indexes[[i]], indexes[[j]])][vgr[[k]]],type="decision")) } prob.model(ret)[[p]] <- .probPlatt(pres,yres) } } } if(type(ret) == "eps-svr"||type(ret) == "nu-svr"||type(ret)=="eps-bsvr"){ suppressWarnings(vgr<-split(sample(1:m,m),1:3)) for(i in 1:3) { cind <- unsplit(vgr[-i],factor(rep((1:3)[-i],unlist(lapply(vgr[-i],length))))) cret <- ksvm(x[cind],y[cind],type=type(ret),kernel=kernel,kpar = NULL,C=C,nu=nu,epsilon=epsilon,tol=tol, cross = 0, fit = FALSE, cache = cache, prob.model = FALSE) cres <- predict(cret, x[vgr[[i]]]) pres <- rbind(pres,predict(cret, x[vgr[[i]]],type="decision")) } pres[abs(pres) > (5*sd(pres))] <- 0 prob.model(ret) <- list(sum(abs(pres))/dim(pres)[1]) } } } return(ret) }) ##**************************************************************# ## predict for matrix, data.frame input setMethod("predict", signature(object = "ksvm"), function (object, newdata, type = "response", coupler = "minpair") { type <- match.arg(type,c("response","probabilities","votes","decision")) if (missing(newdata) && type=="response" & !is.null(fitted(object))) return(fitted(object)) else if(missing(newdata)) stop("Missing data !") if(!is(newdata,"list")){ if (!is.null(terms(object)) & !is(newdata,"kernelMatrix")) { if(!is.matrix(newdata)) newdata <- model.matrix(delete.response(terms(object)), as.data.frame(newdata), na.action = n.action(object)) } else newdata <- if (is.vector(newdata)) t(t(newdata)) else as.matrix(newdata) newnrows <- nrow(newdata) newncols <- ncol(newdata) if(!is(newdata,"kernelMatrix") && !is.null(xmatrix(object))){ if(is(xmatrix(object),"list") && is(xmatrix(object)[[1]],"matrix")) oldco <- ncol(xmatrix(object)[[1]]) if(is(xmatrix(object),"matrix")) oldco <- ncol(xmatrix(object)) if (oldco != newncols) stop ("test vector does not match model !") } } else newnrows <- length(newdata) p <- 0 if (is.list(scaling(object))) newdata[,scaling(object)$scaled] <- scale(newdata[,scaling(object)$scaled, drop = FALSE], center = scaling(object)$x.scale$"scaled:center", scale = scaling(object)$x.scale$"scaled:scale") if(type == "response" || type =="decision" || type=="votes") { if(type(object)=="C-svc"||type(object)=="nu-svc"||type(object)=="C-bsvc") { predres <- 1:newnrows if(type=="decision") votematrix <- matrix(0,nclass(object)*(nclass(object)-1)/2,newnrows) else votematrix <- matrix(0,nclass(object),newnrows) for(i in 1:(nclass(object)-1)) { jj <- i+1 for(j in jj:nclass(object)) { p <- p+1 if(is(newdata,"kernelMatrix")) ret <- newdata[,which(SVindex(object)%in%alphaindex(object)[[p]]), drop=FALSE] %*% coef(object)[[p]] - b(object)[p] else ret <- kernelMult(kernelf(object),newdata,xmatrix(object)[[p]],coef(object)[[p]]) - b(object)[p] if(type=="decision") votematrix[p,] <- ret else{ votematrix[i,ret<0] <- votematrix[i,ret<0] + 1 votematrix[j,ret>0] <- votematrix[j,ret>0] + 1 } } } if(type == "decision") predres <- t(votematrix) else predres <- sapply(predres, function(x) which.max(votematrix[,x])) } if(type(object) == "spoc-svc") { predres <- 1:newnrows votematrix <- matrix(0,nclass(object),newnrows) for(i in 1:nclass(object)){ if(is(newdata,"kernelMatrix")) votematrix[i,] <- newdata[,which(SVindex(object)%in%alphaindex(object)[[i]]), drop=FALSE] %*% coef(object)[[i]] else if (is(newdata,"list")) votematrix[i,] <- kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]]],coef(object)[[i]]) else votematrix[i,] <- kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]],,drop=FALSE],coef(object)[[i]]) } predres <- sapply(predres, function(x) which.max(votematrix[,x])) } if(type(object) == "kbb-svc") { predres <- 1:newnrows votematrix <- matrix(0,nclass(object),newnrows) A <- rowSums(alpha(object)) for(i in 1:nclass(object)) { for(k in (1:i)[-i]) if(is(newdata,"kernelMatrix")) votematrix[k,] <- votematrix[k,] - (newdata[,which(SVindex(object)%in%alphaindex(object)[[i]]), drop=FALSE] %*% alpha(object)[,k][alphaindex(object)[[i]]] + sum(alpha(object)[,k][alphaindex(object)[[i]]])) else if (is(newdata,"list")) votematrix[k,] <- votematrix[k,] - (kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]]],alpha(object)[,k][alphaindex(object)[[i]]]) + sum(alpha(object)[,k][alphaindex(object)[[i]]])) else votematrix[k,] <- votematrix[k,] - (kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]],,drop=FALSE],alpha(object)[,k][alphaindex(object)[[i]]]) + sum(alpha(object)[,k][alphaindex(object)[[i]]])) if(is(newdata,"kernelMatrix")) votematrix[i,] <- votematrix[i,] + (newdata[,which(SVindex(object)%in%alphaindex(object)[[i]]), drop=FALSE] %*% A[alphaindex(object)[[i]]] + sum(A[alphaindex(object)[[i]]])) else if (is(newdata,"list")) votematrix[i,] <- votematrix[i,] + (kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]]],A[alphaindex(object)[[i]]]) + sum(A[alphaindex(object)[[i]]])) else votematrix[i,] <- votematrix[i,] + (kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]],,drop=FALSE],A[alphaindex(object)[[i]]]) + sum(A[alphaindex(object)[[i]]])) if(i <= (nclass(object)-1)) for(kk in i:(nclass(object)-1)) if(is(newdata,"kernelMatrix")) votematrix[kk+1,] <- votematrix[kk+1,] - (newdata[,which(SVindex(object)%in%alphaindex(object)[[i]]), drop=FALSE] %*% alpha(object)[,kk][alphaindex(object)[[i]]] + sum(alpha(object)[,kk][alphaindex(object)[[i]]])) else if (is(newdata,"list")) votematrix[kk+1,] <- votematrix[kk+1,] - (kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]]],alpha(object)[,kk][alphaindex(object)[[i]]]) + sum(alpha(object)[,kk][alphaindex(object)[[i]]])) else votematrix[kk+1,] <- votematrix[kk+1,] - (kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[i]],,drop=FALSE],alpha(object)[,kk][alphaindex(object)[[i]]]) + sum(alpha(object)[,kk][alphaindex(object)[[i]]])) } predres <- sapply(predres, function(x) which.max(votematrix[,x])) } } if(type == "probabilities") { if(is.null(prob.model(object)[[1]])) stop("ksvm object contains no probability model. Make sure you set the paramater prob.model in ksvm during training.") if(type(object)=="C-svc"||type(object)=="nu-svc"||type(object)=="C-bsvc") { binprob <- matrix(0, newnrows, nclass(object)*(nclass(object) - 1)/2) for(i in 1:(nclass(object)-1)) { jj <- i+1 for(j in jj:nclass(object)) { p <- p+1 if(is(newdata,"kernelMatrix")) binprob[,p] <- 1 - .SigmoidPredict(as.vector(newdata[,which(SVindex(object)%in%alphaindex(object)[[p]]), drop=FALSE] %*% coef(object)[[p]] - b(object)[p]), prob.model(object)[[p]]$A, prob.model(object)[[p]]$B) else binprob[,p] <- 1 - .SigmoidPredict(as.vector(kernelMult(kernelf(object),newdata,xmatrix(object)[[p]],coef(object)[[p]]) - b(object)[p]), prob.model(object)[[p]]$A, prob.model(object)[[p]]$B) } } multiprob <- couple(binprob, coupler = coupler) } else stop("probability estimates only supported for C-svc, C-bsvc and nu-svc") } if(type(object) == "one-svc") { if(is(newdata,"kernelMatrix")) ret <- newdata %*% coef(object) - b(object) else ret <- kernelMult(kernelf(object),newdata,xmatrix(object),coef(object)) - b(object) ##one-class-classification: return TRUE/FALSE (probabilities ?) if(type=="decision") return(ret) else { ret[ret>0]<-1 return(ret == 1) } } else { if(type(object)=="eps-svr"||type(object)=="nu-svr"||type(object)=="eps-bsvr") { if(is(newdata,"kernelMatrix")) predres <- newdata %*% coef(object) - b(object) else predres <- kernelMult(kernelf(object),newdata,xmatrix(object),coef(object)) - b(object) } else { ##classification & votes : return votematrix if(type == "votes") return(votematrix) ##classification & probabilities : return probability matrix if(type == "probabilities") { colnames(multiprob) <- lev(object) return(multiprob) } if(is.numeric(lev(object)) && type == "response") return(lev(object)[predres]) if (is.character(lev(object)) && type!="decision") { ##classification & type response: return factors if(type == "response") return(factor (lev(object)[predres], levels = lev(object))) } } } if (!is.null(scaling(object)$y.scale) & !is(newdata,"kernelMatrix") & !is(newdata,"list")) ## return raw values, possibly scaled back return(predres * scaling(object)$y.scale$"scaled:scale" + scaling(object)$y.scale$"scaled:center") else ##else: return raw values return(predres) }) #****************************************************************************************# setMethod("show","ksvm", function(object){ cat("Support Vector Machine object of class \"ksvm\"","\n") cat("\n") cat(paste("SV type:", type(object))) switch(type(object), "C-svc" = cat(paste(" (classification)", "\n")), "nu-svc" = cat(paste(" (classification)", "\n")), "C-bsvc" = cat(paste(" (classification)", "\n")), "one-svc" = cat(paste(" (novelty detection)", "\n")), "spoc-svc" = cat(paste(" (classification)", "\n")), "kbb-svc" = cat(paste(" (classification)", "\n")), "eps-svr" = cat(paste(" (regression)","\n")), "nu-svr" = cat(paste(" (regression)","\n")) ) switch(type(object), "C-svc" = cat(paste(" parameter : cost C =",param(object)$C, "\n")), "nu-svc" = cat(paste(" parameter : nu =", param(object)$nu, "\n")), "C-bsvc" = cat(paste(" parameter : cost C =",param(object)$C, "\n")), "one-svc" = cat(paste(" parameter : nu =", param(object)$nu, "\n")), "spoc-svc" = cat(paste(" parameter : cost C =",param(object)$C, "\n")), "kbb-svc" = cat(paste(" parameter : cost C =",param(object)$C, "\n")), "eps-svr" = cat(paste(" parameter : epsilon =",param(object)$epsilon, " cost C =", param(object)$C,"\n")), "nu-svr" = cat(paste(" parameter : epsilon =", param(object)$epsilon, " nu =", param(object)$nu,"\n")) ) cat("\n") show(kernelf(object)) cat(paste("\nNumber of Support Vectors :", nSV(object),"\n")) cat("\nObjective Function Value :", round(obj(object),4),"\n") ## if(type(object)=="C-svc" || type(object) == "nu-svc") ## cat(paste("Margin width :",margin(object),"\n")) if(!is.null(fitted(object))) cat(paste("Training error :", round(error(object),6),"\n")) if(cross(object)!= -1) cat("Cross validation error :",round(cross(object),6),"\n") if(!is.null(prob.model(object)[[1]])&&(type(object)=="eps-svr" ||type(object)=="nu-svr"||type(object)=="eps-bsvr")) cat("Laplace distr. width :",round(prob.model(object)[[1]],6),"\n") if(!is.null(prob.model(object)[[1]]) & (type(object) == "C-svc"| type(object) == "nu-svc"| type(object) == "C-bsvc")) cat("Probability model included.","\n") ##train error & loss }) setMethod("plot", signature(x = "ksvm", y = "missing"), function(x, data = NULL, grid = 50, slice = list(), ...) { if (type(x) =="C-svc" || type(x) == "nu-svc") { if(nclass(x) > 2) stop("plot function only supports binary classification") if (!is.null(terms(x))&&!is.null(data)) { if(!is.matrix(data)) sub <- model.matrix(delete.response(terms(x)), as.data.frame(data), na.action = n.action(x)) } else if(!is.null(data)) sub <- as.matrix(data) else sub <- xmatrix(x)[[1]] ## sub <- sub[,!colnames(xmatrix(x)[[1]])%in%names(slice)] xr <- seq(min(sub[,2]), max(sub[,2]), length = grid) yr <- seq(min(sub[,1]), max(sub[,1]), length = grid) sc <- 0 # if(is.null(data)) # { # sc <- 1 # data <- xmatrix(x)[[1]] # } if(is.data.frame(data) || !is.null(terms(x))){ lis <- c(list(yr), list(xr), slice) names(lis)[1:2] <- setdiff(colnames(sub),names(slice)) new <- expand.grid(lis)[,labels(terms(x))] } else new <- expand.grid(xr,yr) if(sc== 1) scaling(x) <- NULL preds <- predict(x, new ,type = "decision") if(is.null(terms(x))) xylb <- colnames(sub) else xylb <- names(lis) lvl <- 37 mymax <- max(abs(preds)) mylevels <- pretty(c(0, mymax), 15) nl <- length(mylevels)-2 mycols <- c(hcl(0, 100 * (nl:0/nl)^1.3, 90 - 40 *(nl:0/nl)^1.3), rev(hcl(260, 100 * (nl:0/nl)^1.3, 90 - 40 *(nl:0/nl)^1.3))) mylevels <- c(-rev(mylevels[-1]), mylevels) index <- max(which(mylevels < min(preds))):min(which(mylevels > max(preds))) mycols <- mycols[index] mylevels <- mylevels[index] #FIXME# previously the plot code assumed that the y values are either #FIXME# -1 or 1, but this is not generally true. If generated from a #FIXME# factor, they are typically 1 and 2. Maybe ymatrix should be #FIXME# changed? ymat <- ymatrix(x) ymean <- mean(unique(ymat)) filled.contour(xr, yr, matrix(as.numeric(preds), nrow = length(xr), byrow = TRUE), col = mycols, levels = mylevels, plot.axes = { axis(1) axis(2) if(!is.null(data)){ points(sub[-SVindex(x),2], sub[-SVindex(x),1], pch = ifelse(ymat[-SVindex(x)] < ymean, 2, 1)) points(sub[SVindex(x),2], sub[SVindex(x),1], pch = ifelse(ymat[SVindex(x)] < ymean, 17, 16))} else{ ## points(sub[-SVindex(x),], pch = ifelse(ymat[-SVindex(x)] < ymean, 2, 1)) points(sub, pch = ifelse(ymat[SVindex(x)] < ymean, 17, 16)) }}, nlevels = lvl, plot.title = title(main = "SVM classification plot", xlab = xylb[2], ylab = xylb[1]), ... ) } else { stop("Only plots of classification ksvm objects supported") } }) setGeneric(".probPlatt", function(deci, yres) standardGeneric(".probPlatt")) setMethod(".probPlatt",signature(deci="ANY"), function(deci,yres) { if (is.matrix(deci)) deci <- as.vector(deci) if (!is.vector(deci)) stop("input should be matrix or vector") yres <- as.vector(yres) ## Create label and count priors boolabel <- yres >= 0 prior1 <- sum(boolabel) m <- length(yres) prior0 <- m - prior1 ## set parameters (should be on the interface I guess) maxiter <- 100 minstep <- 1e-10 sigma <- 1e-3 eps <- 1e-5 ## Construct target support hiTarget <- (prior1 + 1)/(prior1 + 2) loTarget <- 1/(prior0 + 2) length <- prior1 + prior0 t <- rep(loTarget, m) t[boolabel] <- hiTarget ##Initial Point & Initial Fun Value A <- 0 B <- log((prior0 + 1)/(prior1 + 1)) fval <- 0 fApB <- deci*A + B bindex <- fApB >= 0 p <- q <- rep(0,m) fval <- sum(t[bindex]*fApB[bindex] + log(1 + exp(-fApB[bindex]))) fval <- fval + sum((t[!bindex] - 1)*fApB[!bindex] + log(1+exp(fApB[!bindex]))) for (it in 1:maxiter) { h11 <- h22 <- sigma h21 <- g1 <- g2 <- 0 fApB <- deci*A + B bindex <- fApB >= 0 p[bindex] <- exp(-fApB[bindex])/(1 + exp(-fApB[bindex])) q[bindex] <- 1/(1+exp(-fApB[bindex])) bindex <- fApB < 0 p[bindex] <- 1/(1 + exp(fApB[bindex])) q[bindex] <- exp(fApB[bindex])/(1 + exp(fApB[bindex])) d2 <- p*q h11 <- h11 + sum(d2*deci^2) h22 <- h22 + sum(d2) h21 <- h21 + sum(deci*d2) d1 <- t - p g1 <- g1 + sum(deci*d1) g2 <- g2 + sum(d1) ## Stopping Criteria if (abs(g1) < eps && abs(g2) < eps) break ## Finding Newton Direction -inv(t(H))%*%g det <- h11*h22 - h21^2 dA <- -(h22*g1 - h21*g2) / det dB <- -(-h21*g1 + h11*g2) / det gd <- g1*dA + g2*dB ## Line Search stepsize <- 1 while(stepsize >= minstep) { newA <- A + stepsize * dA newB <- B + stepsize * dB ## New function value newf <- 0 fApB <- deci * newA + newB bindex <- fApB >= 0 newf <- sum(t[bindex] * fApB[bindex] + log(1 + exp(-fApB[bindex]))) newf <- newf + sum((t[!bindex] - 1)*fApB[!bindex] + log(1 + exp(fApB[!bindex]))) ## Check decrease if (newf < (fval + 0.0001 * stepsize * gd)) { A <- newA B <- newB fval <- newf break } else stepsize <- stepsize/2 } if (stepsize < minstep) { cat("line search fails", A, B, g1, g2, dA, dB, gd) ret <- .SigmoidPredict(deci, A, B) return(ret) } } if(it >= maxiter -1) cat("maximum number of iterations reached",g1,g2) ret <- list(A=A, B=B) return(ret) }) ## Sigmoid predict function .SigmoidPredict <- function(deci, A, B) { fApB <- deci*A +B k <- length(deci) ret <- rep(0,k) bindex <- fApB >= 0 ret[bindex] <- exp(-fApB[bindex])/(1 + exp(-fApB[bindex])) ret[!bindex] <- 1/(1 + exp(fApB[!bindex])) return(ret) } kernlab/R/gausspr.R0000644000175100001440000003566614221632627013743 0ustar hornikusers## Gaussian Processes implementation. Laplace approximation for classification. ## author : alexandros karatzoglou setGeneric("gausspr", function(x, ...) standardGeneric("gausspr")) setMethod("gausspr",signature(x="formula"), function (x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE){ cl <- match.call() m <- match.call(expand.dots = FALSE) if (is.matrix(eval(m$data, parent.frame()))) m$data <- as.data.frame(data) m$... <- NULL m$formula <- m$x m$x <- NULL m[[1L]] <- quote(stats::model.frame) m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { remove <- unique(c(which(labels(Terms) %in% names(attr(x, "contrasts"))), which(!scaled) ) ) scaled <- !attr(x, "assign") %in% remove } ret <- gausspr(x, y, scaled = scaled, ...) kcall(ret) <- cl terms(ret) <- Terms if (!is.null(attr(m, "na.action"))) n.action(ret) <- attr(m, "na.action") return (ret) }) setMethod("gausspr",signature(x="vector"), function(x,...) { x <- t(t(x)) ret <- gausspr(x, ...) ret }) setMethod("gausspr",signature(x="matrix"), function (x, y, scaled = TRUE, type = NULL, kernel = "rbfdot", kpar = "automatic", var = 1, variance.model = FALSE, tol = 0.0005, cross = 0, fit = TRUE, ... ,subset ,na.action = na.omit) { ## should become an option reduced <- FALSE ## subsetting and na-handling for matrices ret <- new("gausspr") if (!missing(subset)) x <- x[subset,] if (is.null(y)) x <- na.action(x) else { df <- na.action(data.frame(y, x)) y <- df[,1] x <- as.matrix(df[,-1]) } ncols <- ncol(x) m <- nrows <- nrow(x) if (is.null (type)) type(ret) <- if (is.factor(y)) "classification" else "regression" else type(ret) <- type x.scale <- y.scale <- NULL ## scaling if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { co <- !apply(x[,scaled, drop = FALSE], 2, var) if (any(co)) { scaled <- rep(FALSE, ncol(x)) warning(paste("Variable(s)", paste("`",colnames(x[,scaled, drop = FALSE])[co], "'", sep="", collapse=" and "), "constant. Cannot scale data.") ) } else { xtmp <- scale(x[,scaled]) x[,scaled] <- xtmp x.scale <- attributes(xtmp)[c("scaled:center","scaled:scale")] if (is.numeric(y)&&(type(ret)!="classification")) { y <- scale(y) y.scale <- attributes(y)[c("scaled:center","scaled:scale")] y <- as.vector(y) } tmpsc <- list(scaled = scaled, x.scale = x.scale, y.scale = y.scale) } } if (var < 10^-3) stop("Noise variance parameter var has to be greater than 10^-3") # in case of classification: transform factors into integers if (is.factor(y)) { lev(ret) <- levels (y) y <- as.integer (y) } else { if (type(ret) == "classification" && any(as.integer (y) != y)) stop ("dependent variable has to be of factor or integer type for classification mode.") if(type(ret) == "classification") lev(ret) <- unique (y) } # initialize nclass(ret) <- length (lev(ret)) if(!is.null(type)) type(ret) <- match.arg(type,c("classification", "regression")) if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot")) if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "rbfkernel") || is(kernel, "laplacedot") || kernel == "laplacedot"|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=mean(sigest(x,scaled=FALSE)[c(1,3)])) cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") p <- 0 if (type(ret) == "classification") { indexes <- lapply(1:nclass(ret), function(kk) which(y == kk)) for (i in 1:(nclass(ret)-1)) { jj <- i+1 for(j in jj:nclass(ret)) { p <- p+1 ##prepare data li <- length(indexes[[i]]) lj <- length(indexes[[j]]) xd <- matrix(0,(li+lj),dim(x)[2]) xdi <- 1:(li+lj) <= li xd[xdi,rep(TRUE,dim(x)[2])] <- x[indexes[[i]],] xd[xdi == FALSE,rep(TRUE,dim(x)[2])] <- x[indexes[[j]],] if(y[indexes[[i]][1]] < y[indexes[[j]]][1]) yd <- c(rep(1,li),rep(-1,lj)) else yd <- c(rep(-1,li),rep(1,lj)) if(reduced == FALSE){ K <- kernelMatrix(kernel,xd) gradnorm <- 1 alphag <- solut <- rep(0,li+lj) while (gradnorm > tol) { f <- crossprod(K,alphag) grad <- -yd/(1 + exp(yd*f)) hess <- exp(yd*f) hess <- hess / ((1 + hess)^2) ## We use solveiter instead of solve to speed up things ## A <- t(t(K)*as.vector(hess)) ## diag(A) <- diag(A) + 1 ## alphag <- alphag - solve(A,(grad + alphag)) solut <- solveiter(K, hess, (grad + alphag), solut) alphag <- alphag - solut gradnorm <- sqrt(sum((grad + alphag)^2)) } } else if (reduced ==TRUE) { yind <- t(matrix(unique(yd),2,length(yd))) ymat <- matrix(0, length(yd), 2) ymat[yind==yd] <- 1 ##Z <- csi(xd, ymat, kernel = kernel, rank = dim(yd)[1]) ##Z <- Z[sort(pivots(Z),index.return = TRUE)$ix, ,drop=FALSE] Z <- inchol(xd, kernel = kernel) gradnorm <- 1 alphag <- rep(0,li+lj) m1 <- dim(Z)[1] n1 <- dim(Z)[2] Ksub <- diag(rep(1,n1)) while (gradnorm > tol) { f <- drop(Z%*%crossprod(Z,alphag)) f[which(f>20)] <- 20 grad <- -yd/(1 + exp(yd*f)) hess <- exp(yd*f) hess <- as.vector(hess / ((1 + hess)^2)) alphag <- alphag - (- Z %*%solve(Ksub + (t(Z)*hess)%*%Z) %*% (t(Z)*hess))%*%(grad + alphag) + (grad + alphag) gradnorm <- sqrt(sum((grad + alphag)^2)) } } alpha(ret)[[p]] <- alphag alphaindex(ret)[[p]] <- c(indexes[[i]],indexes[[j]]) } } } if (type(ret) == "regression") { K <- kernelMatrix(kernel,x) if(variance.model) { sol <- solve(K + diag(rep(var, length = m))) rm(K) alpha(ret) <- sol%*%y } else alpha(ret) <- solve(K + diag(rep(var, length = m))) %*% y } kcall(ret) <- match.call() kernelf(ret) <- kernel xmatrix(ret) <- x if(variance.model) sol(ret) <- sol fitted(ret) <- if (fit) predict(ret, x) else NA if (fit){ if(type(ret)=="classification") error(ret) <- 1 - .classAgreement(table(y,as.integer(fitted(ret)))) if(type(ret)=="regression"){ if (!is.null(scaling(ret)$y.scale)) fitted(ret) <- fitted(ret) * tmpsc$y.scale$"scaled:scale" + tmpsc$y.scale$"scaled:center" error(ret) <- drop(crossprod(fitted(ret) - y)/m) } } if(any(scaled)) scaling(ret) <- tmpsc cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { cerror <- 0 suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) if(type(ret)=="classification") { cret <- gausspr(x[cind,], y[cind], scaled = FALSE, type=type(ret),kernel=kernel,var = var, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],]) cerror <- (1 - .classAgreement(table(y[vgr[[i]]],as.integer(cres))))/cross + cerror } if(type(ret)=="regression") { cret <- gausspr(x[cind,],y[cind],type=type(ret),scaled = FALSE, kernel=kernel,var = var,tol=tol, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],]) if (!is.null(scaling(ret)$y.scale)) scal <- scaling(ret)$y.scale$"scaled:scale" cerror <- drop((scal^2)*crossprod(cres - y[vgr[[i]]])/m) + cerror } } cross(ret) <- cerror } return(ret) }) setMethod("predict", signature(object = "gausspr"), function (object, newdata, type = "response", coupler = "minpair") { sc <- 0 type <- match.arg(type,c("response","probabilities","votes", "variance", "sdeviation")) if (missing(newdata) && type!="response") return(fitted(object)) else if(missing(newdata)) { newdata <- xmatrix(object) sc <- 1 } ncols <- ncol(xmatrix(object)) nrows <- nrow(xmatrix(object)) oldco <- ncols if (!is.null(terms(object))) { newdata <- model.matrix(delete.response(terms(object)), as.data.frame(newdata), na.action = na.action) } else newdata <- if (is.vector (newdata)) t(t(newdata)) else as.matrix(newdata) newcols <- 0 newnrows <- nrow(newdata) newncols <- ncol(newdata) newco <- newncols if (oldco != newco) stop ("test vector does not match model !") if (is.list(scaling(object)) && sc != 1) newdata[,scaling(object)$scaled] <- scale(newdata[,scaling(object)$scaled, drop = FALSE], center = scaling(object)$x.scale$"scaled:center", scale = scaling(object)$x.scale$"scaled:scale" ) p <- 0 if(type == "response") { if(type(object)=="classification") { predres <- 1:newnrows votematrix <- matrix(0,nclass(object),nrows) for(i in 1:(nclass(object)-1)) { jj <- i+1 for(j in jj:nclass(object)) { p <- p+1 ret <- kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[p]],],alpha(object)[[p]]) votematrix[i,ret>0] <- votematrix[i,ret>0] + 1 votematrix[j,ret<0] <- votematrix[j,ret<0] + 1 } } predres <- sapply(predres, function(x) which.max(votematrix[,x])) } } if(type == "probabilities") { if(type(object)=="classification") { binprob <- matrix(0, newnrows, nclass(object)*(nclass(object) - 1)/2) for(i in 1:(nclass(object)-1)) { jj <- i+1 for(j in jj:nclass(object)) { p <- p+1 binprob[,p] <- 1/(1+exp(-kernelMult(kernelf(object),newdata,xmatrix(object)[alphaindex(object)[[p]],],alpha(object)[[p]]))) } } ## multiprob <- sapply(1:newnrows, function(x) couple(binprob[x ,],coupler = coupler)) multiprob <- couple(binprob, coupler = coupler) } } if(type(object) == "regression") { if (type == "variance"||type == "sdeviation") { Ktest <- kernelMatrix(kernelf(object),xmatrix(object), newdata) predres <- diag(kernelMatrix(kernelf(object),newdata) - t(Ktest) %*% sol(object) %*% Ktest) if (type== "sdeviation") predres <- sqrt(predres) if (!is.null(scaling(object)$y.scale)) predres <- predres * scaling(object)$y.scale$"scaled:scale" + scaling(object)$y.scale$"scaled:center" } else { predres <- kernelMult(kernelf(object),newdata,xmatrix(object),as.matrix(alpha(object))) if (!is.null(scaling(object)$y.scale)) predres <- predres * scaling(object)$y.scale$"scaled:scale" + scaling(object)$y.scale$"scaled:center" } } if (is.character(lev(object))) { ##classification & probabilities : return probabilitie matrix if(type == "probabilities") { colnames(multiprob) <- lev(object) return(multiprob) } ##classification & type response: return factors if(type == "response") return(factor (lev(object)[predres], levels = lev(object))) ##classification & votes : return votematrix if(type == "votes") return(votematrix) } else ##else: return raw values return(predres) }) setMethod("show","gausspr", function(object){ cat("Gaussian Processes object of class \"gausspr\"","\n") cat(paste("Problem type:", type(object),"\n")) cat("\n") show(kernelf(object)) cat(paste("\nNumber of training instances learned :", dim(xmatrix(object))[1],"\n")) if(!is.null(fitted(object))) cat(paste("Train error :", round(error(object),9),"\n")) ##train error & loss if(cross(object)!=-1) cat("Cross validation error :",round(cross(object),9),"\n") }) solveiter <- function(B,noiseproc,b,x,itmax = 50,tol = 10e-4 ,verbose = FALSE) { ## ---------------------------- ## Preconditioned Biconjugate Gradient method ## solves linear system Ax <- b for general A ## ------------------------------------------ ## x : initial guess ## itmax : max # iterations ## iterates while mean(abs(Ax-b)) > tol ## ## Simplified form of Numerical Recipes: linbcg ## ## The preconditioned matrix is set to inv(diag(A)) ## A defined through A <- I + N*B diagA <- matrix(1,dim(B)[1],1) + colSums(B)+ diag(B)*(noiseproc-1) ## diags of A cont <- 0 iter <- 0 r <- .Amul2(x,B,noiseproc) r <- b - r rr <- r znrm <- 1 bnrm <- sqrt(sum((b)^2)) z <- r/diagA err <- sqrt(sum((.Amul2(x,B,noiseproc) - b)^2))/bnrm while (iter <= itmax){ iter <- iter + 1 zm1nrm <- znrm zz <- rr/diagA bknum<- drop(crossprod(z,rr)) if (iter == 1) { p <- z pp <- zz } else { bk <- bknum/bkden p <- bk*p + z pp <- bk*pp + zz } bkden <- bknum z <- .Amul2(p,B,noiseproc) akden <- drop(crossprod(z,pp)) ak <- bknum/akden zz <- .Amul2T(pp,B,noiseproc) x <- x + ak*p r <- r - ak*z rr <- rr - ak*zz z <- r/diagA znrm <- 1 err <- mean(abs(r)) if (err K(P,P) is approximated by G*G' ## P : permutation matrix ## Q,R : QR decomposition of G (or center(G) if centering) ## error1 : tr(K-G*G')/tr(K) at each step of the decomposition ## error2 : ||y-Q*Q'*y||.F^2 / ||y||.F^2 at each step of the decomposition ## predicted.gain : predicted gain before adding each column ## true.gain : actual gain after adding each column n <- dim(x)[1] d <- dim(y)[2] if(n != dim(y)[1]) stop("Labels y and data x dont match") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") m <- rank ## make sure rank is smaller than n m <- min(n-2,m) G <- matrix(0,n,min(m+delta,n)) ## Cholesky factor diagK <- rep(drop(kernel(x[1,],x[1,])),n) P <- 1:n ## pivots Q <- matrix(0,n,min(m+delta,n)) ## Q part of the QR decomposition R <- matrix(0,min(m+delta,n),min(m+delta,n)) ## R part of the QR decomposition traceK <- sum(diagK) lambda <- (1-kappa)/traceK if (centering) y <- y - (1/n) * t(matrix(colSums(y),d,n)) sumy2 <- sum(y^2) mu <- kappa/sumy2 error1 <- traceK error2 <- sumy2 predictedgain <- truegain <- rep(0,min(m+delta,n)) k <- 0 # current index of the Cholesky decomposition kadv <- 0 # current index of the look ahead steps Dadv <- diagK D <- diagK ## makes sure that delta is smaller than n - 2 delta <- min(delta,n - 2) ## approximation cost cached quantities A1 <- matrix(0,n,1) A2 <- matrix(0,n,1) A3 <- matrix(0,n,1) GTG <- matrix(0,m+delta,m+delta) QTy <- matrix(0,m+delta,d) QTyyTQ <- matrix(0,m+delta,m+delta) ## first performs delta steps of Cholesky and QR decomposition if(delta > 0) for (i in 1:delta) { kadv <- kadv + 1 ## select best index diagmax <- Dadv[kadv] jast <- 1 for (j in 1:(n-kadv+1)) { if (Dadv[j+kadv-1] > diagmax/0.99){ diagmax <- Dadv[j+kadv-1] jast <- j } } if (diagmax < 1e-12){ kadv <- kadv - 1 ## all pivots are too close to zero, stops break ## this can only happen if the matrix has rank less than delta } else{ jast <- jast + kadv-1 ## permute indices P[c(kadv,jast)] <- P[c(jast,kadv)] Dadv[c(kadv, jast)] <- Dadv[c(jast, kadv)] D[c(kadv, jast)] <- D[c(jast, kadv)] A1[c(kadv, jast)] <- A1[c(jast, kadv)] G[c(kadv, jast),1:kadv-1] <- G[c(jast,kadv),1:kadv-1] Q[c(kadv, jast),1:kadv-1] <- Q[c(jast, kadv),1:kadv-1] ## compute new Cholesky column G[kadv,kadv] <- Dadv[kadv] G[kadv,kadv] <- sqrt(G[kadv,kadv]) newKcol <- kernelMatrix(kernel, x[P[(kadv+1):n],,drop = FALSE],x[P[kadv],,drop=FALSE]) G[(kadv+1):n,kadv]<- (1/G[kadv,kadv])*(newKcol - G[(kadv+1):n,1:kadv-1,drop=FALSE] %*% t(G[kadv,1:kadv-1,drop=FALSE])) ## update diagonal Dadv[(kadv+1):n] <- Dadv[(kadv+1):n] - G[(kadv+1):n,kadv]^2 Dadv[kadv] <- 0 ## performs QR if (centering) Gcol <- G[,kadv,drop=FALSE] - (1/n) * matrix(sum(G[,kadv]),n,1) else Gcol <- G[,kadv, drop=FALSE] R[1:kadv-1,kadv] <- crossprod(Q[,1:kadv-1, drop=FALSE], Gcol) Q[,kadv] <- Gcol - Q[,1:kadv-1,drop=FALSE] %*% R[1:kadv-1,kadv,drop=FALSE] R[kadv,kadv] <- sqrt(sum(Q[,kadv]^2)) Q[,kadv] <- Q[,kadv]/drop(R[kadv,kadv]) ## update cached quantities if (centering) GTG[1:kadv,kadv] <- crossprod(G[,1:kadv], G[,kadv]) else GTG[1:kadv,kadv] <- crossprod(R[1:kadv,1:kadv], R[1:kadv,kadv]) GTG[kadv,1:kadv] <- t(GTG[1:kadv,kadv]) QTy[kadv,] <- crossprod(Q[,kadv], y[P,,drop = FALSE]) QTyyTQ[kadv,1:kadv] <- QTy[kadv,,drop=FALSE] %*% t(QTy[1:kadv,,drop=FALSE]) QTyyTQ[1:kadv,kadv] <- t(QTyyTQ[kadv,1:kadv]) ## update costs A1[kadv:n] <- A1[kadv:n] + GTG[kadv,kadv] * G[kadv:n,kadv]^2 A1[kadv:n] <- A1[kadv:n] + 2 * G[kadv:n,kadv] *(G[kadv:n,1:kadv-1] %*% GTG[1:kadv-1,kadv,drop=FALSE]) } } ## compute remaining costs for all indices A2 <- rowSums(( G[,1:kadv,drop=FALSE] %*% crossprod(R[1:kadv,1:kadv], QTy[1:kadv,,drop=FALSE]))^2) A3 <- rowSums((G[,1:kadv,drop=FALSE] %*% t(R[1:kadv,1:kadv]))^2) ## start main loop while (k < m){ k <- k +1 ## compute the gains in approximation for all remaining indices dJK <- matrix(0,(n-k+1),1) for (i in 1:(n-k+1)) { kast <- k+i-1 if (D[kast] < 1e-12) dJK[i] <- -1e100 ## this column is already generated by already ## selected columns -> cannot be selected else { dJK[i] <- A1[kast] if (kast > kadv) ## add eta dJK[i] <- dJK[i] + D[kast]^2 - (D[kast] - Dadv[kast])^2 dJK[i] <- dJK[i] / D[kast] } } dJy <- matrix(0,n-k+1,1) if (kadv > k){ for (i in 1:(n-k+1)) { kast <- k+i-1 if (A3[kast] < 1e-12) dJy[i] <- 0 else dJy[i] <- A2[kast] / A3[kast] } } ## select the best column dJ <- lambda * dJK + mu * dJy diagmax <- -1 jast <- 0 for (j in 1:(n-k+1)) { if (D[j+k-1] > 1e-12) if (dJ[j] > diagmax/0.9){ diagmax <- dJ[j] jast <- j } } if (jast==0) { ## no more good indices, exit k <- k-1 break } jast <- jast + k - 1 predictedgain[k] <- diagmax ## performs one cholesky + QR step: ## if new pivot not already selected, use pivot ## otherwise, select new look ahead index that maximize Dadv if (jast > kadv){ newpivot <- jast jast <- kadv + 1 } else{ a <- 1e-12 b <- 0 for (j in 1:(n-kadv)) { if (Dadv[j+kadv] > a/0.99){ a <- Dadv[j+kadv] b <- j+kadv } } if (b==0) newpivot <- 0 else newpivot <- b } if (newpivot > 0){ ## performs steps kadv <- kadv + 1 ## permute P[c(kadv, newpivot)] <- P[c(newpivot, kadv)] Dadv[c(kadv, newpivot)] <- Dadv[c(newpivot, kadv)] D[c(kadv, newpivot)] <- D[c(newpivot, kadv)] A1[c(kadv, newpivot)] <- A1[c(newpivot, kadv)] A2[c(kadv, newpivot)] <- A2[c(newpivot, kadv)] A3[c(kadv, newpivot)] <- A3[c(newpivot, kadv)] G[c(kadv, newpivot),1:kadv-1] <- G[c(newpivot, kadv),1:kadv-1] Q[c(kadv, newpivot),1:kadv-1] <- Q[ c(newpivot, kadv),1:kadv-1] ## compute new Cholesky column G[kadv,kadv] <- Dadv[kadv] G[kadv,kadv] <- sqrt(G[kadv,kadv]) newKcol <- kernelMatrix(kernel,x[P[(kadv+1):n],,drop=FALSE],x[P[kadv],,drop=FALSE]) G[(kadv+1):n,kadv] <- 1/G[kadv,kadv]*( newKcol - G[(kadv+1):n,1:kadv-1,drop=FALSE]%*%t(G[kadv,1:kadv-1,drop=FALSE])) ## update diagonal Dadv[(kadv+1):n] <- Dadv[(kadv+1):n] - G[(kadv+1):n,kadv]^2 Dadv[kadv] <- 0 ## performs QR if (centering) Gcol <- G[,kadv,drop=FALSE] - 1/n * matrix(sum(G[,kadv]),n,1 ) else Gcol <- G[,kadv,drop=FALSE] R[1:kadv-1,kadv] <- crossprod(Q[,1:kadv-1], Gcol) Q[,kadv] <- Gcol - Q[,1:kadv-1, drop=FALSE] %*% R[1:kadv-1,kadv, drop=FALSE] R[kadv,kadv] <- sum(abs(Q[,kadv])^2)^(1/2) Q[,kadv] <- Q[,kadv] / drop(R[kadv,kadv]) ## update the cached quantities if (centering) GTG[k:kadv,kadv] <- crossprod(G[,k:kadv], G[,kadv]) else GTG[k:kadv,kadv] <- crossprod(R[1:kadv,k:kadv], R[1:kadv,kadv]) GTG[kadv,k:kadv] <- t(GTG[k:kadv,kadv]) QTy[kadv,] <- crossprod(Q[,kadv], y[P,,drop =FALSE]) QTyyTQ[kadv,k:kadv] <- QTy[kadv,,drop = FALSE] %*% t(QTy[k:kadv,,drop = FALSE]) QTyyTQ[k:kadv,kadv] <- t(QTyyTQ[kadv,k:kadv]) ## update costs A1[kadv:n] <- A1[kadv:n] + GTG[kadv,kadv] * G[kadv:n,kadv]^2 A1[kadv:n] <- A1[kadv:n] + 2 * G[kadv:n,kadv] * (G[kadv:n,k:kadv-1,drop = FALSE] %*% GTG[k:kadv-1,kadv,drop=FALSE]) A3[kadv:n] <- A3[kadv:n] + G[kadv:n,kadv]^2 * sum(R[k:kadv,kadv]^2) temp <- crossprod(R[k:kadv,kadv,drop = FALSE], R[k:kadv,k:kadv-1,drop = FALSE]) A3[kadv:n] <- A3[kadv:n] + 2 * G[kadv:n,kadv] * (G[kadv:n,k:kadv-1] %*% t(temp)) temp <- crossprod(R[k:kadv,kadv,drop = FALSE], QTyyTQ[k:kadv,k:kadv,drop = FALSE]) temp1 <- temp %*% R[k:kadv,kadv,drop = FALSE] A2[kadv:n] <- A2[kadv:n] + G[kadv:n,kadv,drop = FALSE]^2 %*% temp1 temp2 <- temp %*% R[k:kadv,k:kadv-1] A2[kadv:n] <- A2[kadv:n] + 2 * G[kadv:n,kadv] * (G[kadv:n,k:kadv-1,drop=FALSE] %*% t(temp2)) } ## permute pivots in the Cholesky and QR decomposition between p,q p <- k q <- jast if (p < q){ ## store some quantities Gbef <- G[,p:q] Gbeftotal <- G[,k:kadv] GTGbef <- GTG[p:q,p:q] QTyyTQbef <- QTyyTQ[p:q,k:kadv] Rbef <- R[p:q,p:q] Rbeftotal <- R[k:kadv,k:kadv] tempG <- diag(1,q-p+1,q-p+1) tempQ <- diag(1,q-p+1,q-p+1) for (s in seq(q-1,p,-1)) { ## permute indices P[c(s, s+1)] <- P[c(s+1, s)] Dadv[c(s, s+1)] <- Dadv[c(s+1, s)] D[c(s, s+1)] <- D[c(s+1, s)] A1[c(s, s+1)] <- A1[c(s+1, s)] A2[c(s, s+1)] <- A2[c(s+1, s)] A3[c(s, s+1)] <- A3[c(s+1, s)] G[c(s, s+1),1:kadv] <- G[c(s+1,s), 1:kadv] Gbef[c(s, s+1),] <- Gbef[c(s+1, s),] Gbeftotal[c(s, s+1),] <- Gbeftotal[c(s+1, s),] Q[c(s, s+1),1:kadv] <- Q[c(s+1, s) ,1:kadv] ## update decompositions res <- .qr2(t(G[s:(s+1),s:(s+1)])) Q1 <- res$Q R1 <- res$R G[,s:(s+1)] <- G[,s:(s+1)] %*% Q1 G[s,(s+1)] <- 0 R[1:kadv,s:(s+1)] <- R[1:kadv,s:(s+1)] %*% Q1 res <- .qr2(R[s:(s+1),s:(s+1)]) Q2 <- res$Q R2 <- res$R R[s:(s+1),1:kadv] <- crossprod(Q2, R[s:(s+1),1:kadv]) Q[,s:(s+1)] <- Q[,s:(s+1)] %*% Q2 R[s+1,s] <- 0 ## update relevant quantities if( k <= (s-1) && s+2 <= kadv) nonchanged <- c(k:(s-1), (s+2):kadv) if( k <= (s-1) && s+2 > kadv) nonchanged <- k:(s-1) if( k > (s-1) && s+2 <= kadv) nonchanged <- (s+2):kadv GTG[nonchanged,s:(s+1)] <- GTG[nonchanged,s:(s+1)] %*% Q1 GTG[s:(s+1),nonchanged] <- t(GTG[nonchanged,s:(s+1)]) GTG[s:(s+1),s:(s+1)] <- crossprod(Q1, GTG[s:(s+1),s:(s+1)] %*% Q1) QTy[s:(s+1),] <- crossprod(Q2, QTy[s:(s+1),]) QTyyTQ[nonchanged,s:(s+1)] <- QTyyTQ[nonchanged,s:(s+1)] %*% Q2 QTyyTQ[s:(s+1),nonchanged] <- t(QTyyTQ[nonchanged,s:(s+1)]) QTyyTQ[s:(s+1),s:(s+1)] <- crossprod(Q2, QTyyTQ[s:(s+1),s:(s+1)] %*% Q2) tempG[,(s-p+1):(s-p+2)] <- tempG[,(s-p+1):(s-p+2)] %*% Q1 tempQ[,(s-p+1):(s-p+2)] <- tempQ[,(s-p+1):(s-p+2)] %*% Q2 } ## update costs tempG <- tempG[,1] tempGG <- GTGbef %*% tempG A1[k:n] <- A1[k:n] - 2 * G[k:n,k] * (Gbef[k:n,] %*% tempGG) # between p and q -> different if(k > (p-1) ) kmin <- 0 else kmin <- k:(p-1) if((q+1) > kadv) qmin <- 0 else qmin <- (q+1):kadv A1[k:n] <- A1[k:n] - 2 * G[k:n,k] * (G[k:n,kmin,drop=FALSE] %*% GTG[kmin,k,drop=FALSE]) # below p A1[k:n] <- A1[k:n] - 2 * G[k:n,k] * (G[k:n,qmin,drop=FALSE] %*% GTG[qmin,k,drop=FALSE]) # above q tempQ <- tempQ[,1] temp <- G[k:n,qmin,drop=FALSE] %*% t(R[k,qmin,drop=FALSE]) temp <- temp + G[k:n,kmin,drop=FALSE] %*% t(R[k,kmin,drop=FALSE]) temp <- temp + Gbef[k:n,] %*% crossprod(Rbef, tempQ) A3[k:n] <- A3[k:n] - temp^2 A2[k:n] <- A2[k:n] + temp^2 * QTyyTQ[k,k] temp2 <- crossprod(tempQ,QTyyTQbef) %*% Rbeftotal A2[k:n] <- A2[k:n] - 2 * temp * (Gbeftotal[k:n,,drop=FALSE] %*% t(temp2)) } else { ## update costs A1[k:n] <- A1[k:n] - 2 * G[k:n,k] * (G[k:n,k:kadv,drop=FALSE] %*% GTG[k:kadv,k,drop=FALSE]) A3[k:n]<- A3[k:n] - (G[k:n,k:kadv,drop=FALSE] %*% t(R[k,k:kadv,drop=FALSE]))^2 temp <- G[k:n,k:kadv,drop=FALSE] %*% t(R[k,k:kadv,drop=FALSE]) A2[k:n] <- A2[k:n] + (temp^2) * QTyyTQ[k,k] temp2 <- QTyyTQ[k,k:kadv,drop=FALSE] %*% R[k:kadv,k:kadv,drop=FALSE] A2[k:n] <- A2[k:n] - 2 * temp * (G[k:n,k:kadv,drop=FALSE] %*% t(temp2)) } ## update diagonal and other quantities (A1,B1) D[(k+1):n] <- D[(k+1):n] - G[(k+1):n,k]^2 D[k] <- 0 A1[k:n] <- A1[k:n] + GTG[k,k] * (G[k:n,k]^2) ## compute errors and true gains temp2 <- crossprod(Q[,k], y[P,]) temp2 <- sum(temp2^2) temp1 <- sum(G[,k]^2) truegain[k] <- temp1 * lambda + temp2 * mu error1[k+1] <- error1[k] - temp1 error2[k+1] <- error2[k] - temp2 if (truegain[k] < tol) break } ## reduce dimensions of decomposition G <- G[,1:k,drop=FALSE] Q <- Q[,1:k,drop=FALSE] R <- R[1:k,1:k,drop=FALSE] ## compute and normalize errors error <- lambda * error1 + mu * error2 error1 <- error1 / traceK error2 <- error2 / sumy2 repivot <- sort(P, index.return = TRUE)$ix return(new("csi",.Data=G[repivot, ,drop=FALSE],Q= Q[repivot,,drop = FALSE], R = R, pivots=repivot, diagresidues = error1, maxresiduals = error2, truegain = truegain, predgain = predictedgain)) }) ## I guess we can replace this with qr() .qr2 <- function(M) { ## QR decomposition for 2x2 matrices Q <- matrix(0,2,2) R <- matrix(0,2,2) x <- sqrt(M[1,1]^2 + M[2,1]^2) R[1,1] <- x Q[,1] <- M[,1]/x R[1,2] <- crossprod(Q[,1], M[,2]) Q[,2] <- M[,2] - R[1,2] * Q[,1] R[2,2] <- sum(abs(Q[,2])^2)^(1/2) Q[,2] <- Q[,2] / R[2,2] return(list(Q=Q,R=R)) } kernlab/R/couplers.R0000644000175100001440000000770211304023134014063 0ustar hornikusers## wrapper function for couplers ## author : alexandros karatzoglou couple <- function(probin, coupler = "minpair") { if(is.vector(probin)) probin <- matrix(probin,1) m <- dim(probin)[1] coupler <- match.arg(coupler, c("minpair", "pkpd", "vote", "ht")) # if(coupler == "ht") # multiprob <- sapply(1:m, function(x) do.call(coupler, list(probin[x ,], clscnt))) # else multiprob <- sapply(1:m, function(x) do.call(coupler, list(probin[x ,]))) return(t(multiprob)) } ht <- function(probin, clscnt, iter=1000) { nclass <- length(clscnt) probim <- matrix(0, nclass, nclass) for(i in 1:nclass) for(j in 1:nclass) if(j>i) { probim[i,j] <- probin[i] probim[j,i] <- 1 - probin[i] } p <- rep(1/nclass,nclass) u <- matrix((1/nclass)/((1/nclass)+(1/nclass)) ,nclass,nclass) iter <- 0 while(TRUE) { iter <- iter + 1 stoperror <- 0 for(i in 1:nclass){ num <- den <- 0 for(j in 1:nclass) { if (j!=i) { num <- num + (clscnt[i] + clscnt[j]) * probim[i,j] den <- den + (clscnt[i] + clscnt[j]) * u[i,j] } } alpha <- num/(den + 1e-308) p[i] <- p[i]*alpha stoperror <- stoperror + (alpha -1)^2 if(0) { sum <- 0 sum <- sum(p) + sum p <- p/sum for(ui in 1:nclass) for(uj in 1:nclass) u[ui, uj] <- p[ui]/(p[ui] + p[uj]) } else { for(j in 1:nclass) if (i!=j) { u[i,j] <- p[i]/(p[i] + p[j]) u[j,i] <- 1 - u[i,j] } } } if(stoperror < 1e-3) break if(iter > 400) { cat("Too many iterations: aborting", probin, iter, stoperror, p) break } } ## normalize prob. p <- p/sum(p) return(p) } minpair <- function(probin) { ## Count number of classes and construct prob. matrix nclass <- (1+sqrt(1 + 8*length(probin)))/2 if(nclass%%1 != 0) stop("Vector has wrong length only one against one problems supported") probim <- matrix(0, nclass, nclass) probim[upper.tri(probim)] <- probin probim[lower.tri(probim)] <- 1 - probin sum <- colSums(probim^2) Q <- diag(sum) Q[upper.tri(Q)] <- - probin*(1 - probin) Q[lower.tri(Q)] <- - probin*(1 - probin) SQ <- matrix(0,nclass +1, nclass +1) SQ[1:(nclass+1) <= nclass, 1:(nclass+1) <= nclass] <- Q SQ[1:(nclass+1) > nclass, 1:(nclass+1) <= nclass] <- rep(1,nclass) SQ[1:(nclass+1) <= nclass, 1:(nclass+1) > nclass] <- rep(1,nclass) rhs <- rep(0,nclass+1) rhs[nclass + 1] <- 1 p <- solve(SQ,rhs) p <- p[-(nclass+1)]/sum(p[-(nclass+1)]) return(p) } pkpd <- function(probin) { ## Count number of classes and constuct prob. matrix nclass <- k <- (1+sqrt(1 + 8*length(probin)))/2 if(nclass%%1 != 0) stop("Vector has wrong length only one against one problems supported") probim <- matrix(0, nclass, nclass) probim[upper.tri(probim)] <- probin probim[lower.tri(probim)] <- 1 - probin probim[probim==0] <- 1e-300 R <- 1/probim diag(R) <- 0 p <- 1/(rowSums(R) - (k-2)) p <- p/sum(p) return(p) } vote<- function(probin) { nclass <- (1+sqrt(1 + 8*length(probin)))/2 if(nclass%%1 != 0) stop("Vector has wrong length only one against one problems supported") votev <- rep(0,nclass) p <- 0 for(i in 1:(nclass-1)) { jj <- i+1 for(j in jj:nclass) { p <- p+1 votev[i][probin[i] >= 0.5] <- votev[i][probin[i] >= 0.5] + 1 votev[j][probin[j] < 0.5] <- votev[j][probin[j] < 0.5] + 1 } } p <- votev/sum(votev) return(p) } kernlab/R/aobjects.R0000644000175100001440000010724112055335057014036 0ustar hornikusers## S4 object definitions and assigment/accessor functions for the slots. ## ## created 10.09.03 alexandros karatzoglou ## updated 23.08.05 setClass("kernel",representation("function",kpar="list")) setClass("kernelMatrix",representation("matrix"),prototype=structure(.Data=matrix())) setClassUnion("listI", c("list","numeric","vector","integer","matrix")) setClassUnion("output", c("matrix","factor","vector","logical","numeric","list","integer","NULL")) setClassUnion("input", c("matrix","list")) setClassUnion("kfunction", c("function","character")) setClassUnion("mpinput", c("matrix","data.frame","missing")) setClassUnion("lpinput", c("list","missing")) setClassUnion("kpinput", c("kernelMatrix","missing")) setClass("vm", representation(alpha = "listI", ## since setClassUnion is not working type = "character", kernelf = "kfunction", kpar = "list", xmatrix = "input", ymatrix = "output", fitted = "output", lev = "vector", nclass = "numeric", error = "vector", cross = "vector", n.action= "ANY", terms = "ANY", kcall = "call"), contains= "VIRTUAL") #Generic Vector Machine object if(!isGeneric("type")){ if (is.function("type")) fun <- type else fun <- function(object) standardGeneric("type") setGeneric("type", fun) } setMethod("type", "vm", function(object) object@type) setGeneric("type<-", function(x, value) standardGeneric("type<-")) setReplaceMethod("type", "vm", function(x, value) { x@type <- value x }) if(!isGeneric("kernelf")){ if (is.function("kernelf")) fun <- kernelf else fun <- function(object) standardGeneric("kernelf") setGeneric("kernelf", fun) } setMethod("kernelf", "vm", function(object) object@kernelf) setGeneric("kernelf<-", function(x, value) standardGeneric("kernelf<-")) setReplaceMethod("kernelf", "vm", function(x, value) { x@kernelf <- value x }) if(!isGeneric("kpar")){ if (is.function("kpar")) fun <- kpar else fun <- function(object) standardGeneric("kpar") setGeneric("kpar", fun) } setMethod("kpar", "vm", function(object) object@kpar) setGeneric("kpar<-", function(x, value) standardGeneric("kpar<-")) setReplaceMethod("kpar", "vm", function(x, value) { x@kpar <- value x }) if(!isGeneric("kcall")){ if (is.function("kcall")) fun <- kcall else fun <- function(object) standardGeneric("kcall") setGeneric("kcall", fun) } setMethod("kcall", "vm", function(object) object@kcall) setGeneric("kcall<-", function(x, value) standardGeneric("kcall<-")) setReplaceMethod("kcall", "vm", function(x, value) { x@kcall <- value x }) setMethod("terms", "vm", function(x, ...) x@terms) setGeneric("terms<-", function(x, value) standardGeneric("terms<-")) setReplaceMethod("terms", "vm", function(x, value) { x@terms <- value x }) if(!isGeneric("xmatrix")){ if (is.function("xmatrix")) fun <- xmatrix else fun <- function(object) standardGeneric("xmatrix") setGeneric("xmatrix", fun) } setMethod("xmatrix", "vm", function(object) object@xmatrix) setGeneric("xmatrix<-", function(x, value) standardGeneric("xmatrix<-")) setReplaceMethod("xmatrix", "vm", function(x, value) { x@xmatrix <- value x }) if(!isGeneric("ymatrix")){ if (is.function("ymatrix")) fun <- ymatrix else fun <- function(object) standardGeneric("ymatrix") setGeneric("ymatrix", fun) } setMethod("ymatrix", "vm", function(object) object@ymatrix) setGeneric("ymatrix<-", function(x, value) standardGeneric("ymatrix<-")) setReplaceMethod("ymatrix", "vm", function(x, value) { x@ymatrix <- value x }) setMethod("fitted", "vm", function(object, ...) object@fitted) setGeneric("fitted<-", function(x, value) standardGeneric("fitted<-")) setReplaceMethod("fitted", "vm", function(x, value) { x@fitted <- value x }) if(!isGeneric("lev")){ if (is.function("lev")) fun <- lev else fun <- function(object) standardGeneric("lev") setGeneric("lev", fun) } setMethod("lev", "vm", function(object) object@lev) setGeneric("lev<-", function(x, value) standardGeneric("lev<-")) setReplaceMethod("lev", "vm", function(x, value) { x@lev <- value x }) if(!isGeneric("nclass")){ if (is.function("nclass")) fun <- nclass else fun <- function(object) standardGeneric("nclass") setGeneric("nclass", fun) } setMethod("nclass", "vm", function(object) object@nclass) setGeneric("nclass<-", function(x, value) standardGeneric("nclass<-")) setReplaceMethod("nclass", "vm", function(x, value) { x@nclass <- value x }) if(!isGeneric("alpha")){ if (is.function("alpha")) fun <- alpha else fun <- function(object) standardGeneric("alpha") setGeneric("alpha", fun) } setMethod("alpha", "vm", function(object) object@alpha) setGeneric("alpha<-", function(x, value) standardGeneric("alpha<-")) setReplaceMethod("alpha", "vm", function(x, value) { x@alpha <- value x }) if(!isGeneric("error")){ if (is.function("error")) fun <- error else fun <- function(object) standardGeneric("error") setGeneric("error", fun) } setMethod("error", "vm", function(object) object@error) setGeneric("error<-", function(x, value) standardGeneric("error<-")) setReplaceMethod("error", "vm", function(x, value) { x@error <- value x }) if(!isGeneric("cross")){ if (is.function("cross")) fun <- cross else fun <- function(object) standardGeneric("cross") setGeneric("cross", fun) } setMethod("cross", "vm", function(object) object@cross) setGeneric("cross<-", function(x, value) standardGeneric("cross<-")) setReplaceMethod("cross", "vm", function(x, value) { x@cross <- value x }) if(!isGeneric("n.action")){ if (is.function("n.action")) fun <- n.action else fun <- function(object) standardGeneric("n.action") setGeneric("n.action", fun) } setMethod("n.action", "vm", function(object) object@n.action) setGeneric("n.action<-", function(x, value) standardGeneric("n.action<-")) setReplaceMethod("n.action", "vm", function(x, value) { x@n.action <- value x }) setClass("ksvm", representation(param = "list", scaling = "ANY", coef = "ANY", alphaindex = "ANY", b = "numeric", obj = "vector", SVindex = "vector", nSV = "numeric", prior = "list", prob.model = "list" ), contains="vm") if(!isGeneric("param")){ if (is.function("param")) fun <- param else fun <- function(object) standardGeneric("param") setGeneric("param", fun) } setMethod("param", "ksvm", function(object) object@param) setGeneric("param<-", function(x, value) standardGeneric("param<-")) setReplaceMethod("param", "ksvm", function(x, value) { x@param <- value x }) if(!isGeneric("scaling")){ if (is.function("scaling")) fun <- scaling else fun <- function(object) standardGeneric("scaling") setGeneric("scaling", fun) } setMethod("scaling", "ksvm", function(object) object@scaling) setGeneric("scaling<-", function(x, value) standardGeneric("scaling<-")) setReplaceMethod("scaling", "ksvm", function(x, value) { x@scaling<- value x }) if(!isGeneric("obj")){ if (is.function("obj")) fun <- obj else fun <- function(object) standardGeneric("obj") setGeneric("obj", fun) } setMethod("obj", "ksvm", function(object) object@obj) setGeneric("obj<-", function(x, value) standardGeneric("obj<-")) setReplaceMethod("obj", "ksvm", function(x, value) { x@obj<- value x }) setMethod("coef", "ksvm", function(object, ...) object@coef) setGeneric("coef<-", function(x, value) standardGeneric("coef<-")) setReplaceMethod("coef", "ksvm", function(x, value) { x@coef <- value x }) if(!isGeneric("alphaindex")){ if (is.function("alphaindex")) fun <- alphaindex else fun <- function(object) standardGeneric("alphaindex") setGeneric("alphaindex", fun) } setMethod("alphaindex", "ksvm", function(object) object@alphaindex) setGeneric("alphaindex<-", function(x, value) standardGeneric("alphaindex<-")) setReplaceMethod("alphaindex", "ksvm", function(x, value) { x@alphaindex <- value x }) if(!isGeneric("b")){ if (is.function("b")) fun <- b else fun <- function(object) standardGeneric("b") setGeneric("b", fun) } setMethod("b", "ksvm", function(object) object@b) setGeneric("b<-", function(x, value) standardGeneric("b<-")) setReplaceMethod("b", "ksvm", function(x, value) { x@b <- value x }) if(!isGeneric("SVindex")){ if (is.function("SVindex")) fun <- SVindex else fun <- function(object) standardGeneric("SVindex") setGeneric("SVindex", fun) } setMethod("SVindex", "ksvm", function(object) object@SVindex) setGeneric("SVindex<-", function(x, value) standardGeneric("SVindex<-")) setReplaceMethod("SVindex", "ksvm", function(x, value) { x@SVindex <- value x }) if(!isGeneric("nSV")){ if (is.function("nSV")) fun <- nSV else fun <- function(object) standardGeneric("nSV") setGeneric("nSV", fun) } setMethod("nSV", "ksvm", function(object) object@nSV) setGeneric("nSV<-", function(x, value) standardGeneric("nSV<-")) setReplaceMethod("nSV", "ksvm", function(x, value) { x@nSV <- value x }) if(!isGeneric("prior")){ if (is.function("prior")) fun <- prior else fun <- function(object) standardGeneric("prior") setGeneric("prior", fun) } setMethod("prior", "ksvm", function(object) object@prior) setGeneric("prior<-", function(x, value) standardGeneric("prior<-")) setReplaceMethod("prior", "ksvm", function(x, value) { x@prior <- value x }) if(!isGeneric("prob.model")){ if (is.function("prob.model")) fun <- prob.model else fun <- function(object) standardGeneric("prob.model") setGeneric("prob.model", fun) } setMethod("prob.model", "ksvm", function(object) object@prob.model) setGeneric("prob.model<-", function(x, value) standardGeneric("prob.model<-")) setReplaceMethod("prob.model", "ksvm", function(x, value) { x@prob.model <- value x }) setClass("lssvm", representation(param = "list", scaling = "ANY", coef = "ANY", alphaindex = "ANY", ## prob.model = "list", b = "numeric", nSV = "numeric" ), contains="vm") ##setMethod("prob.model", "lssvm", function(object) object@prob.model) ##setGeneric("prob.model<-", function(x, value) standardGeneric("prob.model<-")) ##setReplaceMethod("prob.model", "lssvm", function(x, value) { ## x@prob.model <- value ## x ##}) setMethod("param", "lssvm", function(object) object@param) setReplaceMethod("param", "lssvm", function(x, value) { x@param <- value x }) setMethod("scaling", "lssvm", function(object) object@scaling) setReplaceMethod("scaling", "lssvm", function(x, value) { x@scaling<- value x }) setMethod("coef", "lssvm", function(object, ...) object@coef) setReplaceMethod("coef", "lssvm", function(x, value) { x@coef <- value x }) setMethod("alphaindex", "lssvm", function(object) object@alphaindex) setReplaceMethod("alphaindex", "lssvm", function(x, value) { x@alphaindex <- value x }) setMethod("b", "lssvm", function(object) object@b) setReplaceMethod("b", "lssvm", function(x, value) { x@b <- value x }) setMethod("nSV", "lssvm", function(object) object@nSV) setReplaceMethod("nSV", "lssvm", function(x, value) { x@nSV <- value x }) setClass("kqr", representation(param = "list", scaling = "ANY", coef = "ANY", b = "numeric" ), contains="vm") setMethod("b", "kqr", function(object) object@b) setReplaceMethod("b", "kqr", function(x, value) { x@b <- value x }) setMethod("scaling", "kqr", function(object) object@scaling) setReplaceMethod("scaling", "kqr", function(x, value) { x@scaling <- value x }) setMethod("coef", "kqr", function(object) object@coef) setReplaceMethod("coef", "kqr", function(x, value) { x@coef <- value x }) setMethod("param", "kqr", function(object) object@param) setReplaceMethod("param", "kqr", function(x, value) { x@param <- value x }) ## failed attempt to get rid of all this above ## mkaccesfun <- function(cls) #{ # snames <- slotNames(cls) ## # # for(i in 1:length(snames)) # { resF <- paste("\"",snames[i],"\"",sep="") # if(!isGeneric(snames[i])) # eval(parse(file="",text=paste("setGeneric(",resF,",function(object)","standardGeneric(",resF,")",")",sep=" "))) # setGeneric(snames[i], function(object) standardGeneric(snames[i])) # # setMethod(snames[i], cls, function(object) eval(parse(file="",text=paste("object@",snames[i],sep="")))) # resG <- paste("\"",snames[i],"<-","\"",sep="") #eval(parse(file="",text=paste("setGeneric(",resG,",function(x, value)","standardGeneric(",resG,")",")",sep=" "))) # setReplaceMethod(snames[i], cls, function(x, value) { # eval(parse(file="",text=paste("x@",snames[i],"<-value",sep=""))) # x # }) # } #} setClass("prc", representation(pcv = "matrix", eig = "vector", kernelf = "kfunction", kpar = "list", xmatrix = "input", kcall = "ANY", terms = "ANY", n.action = "ANY"),contains="VIRTUAL") #accessor functions if(!isGeneric("pcv")){ if (is.function("pcv")) fun <- pcv else fun <- function(object) standardGeneric("pcv") setGeneric("pcv", fun) } setMethod("pcv", "prc", function(object) object@pcv) setGeneric("pcv<-", function(x, value) standardGeneric("pcv<-")) setReplaceMethod("pcv", "prc", function(x, value) { x@pcv <- value x }) if(!isGeneric("eig")){ if (is.function("eig")) fun <- eig else fun <- function(object) standardGeneric("eig") setGeneric("eig", fun) } setMethod("eig", "prc", function(object) object@eig) setGeneric("eig<-", function(x, value) standardGeneric("eig<-")) setReplaceMethod("eig", "prc", function(x, value) { x@eig <- value x }) setMethod("kernelf","prc", function(object) object@kernelf) setReplaceMethod("kernelf","prc", function(x, value){ x@kernelf <- value x }) setMethod("xmatrix","prc", function(object) object@xmatrix) setReplaceMethod("xmatrix","prc", function(x, value){ x@xmatrix <- value x }) setMethod("kcall","prc", function(object) object@kcall) setReplaceMethod("kcall","prc", function(x, value){ x@kcall <- value x }) setMethod("terms","prc", function(x, ...) x@terms) setReplaceMethod("terms","prc", function(x, value){ x@terms <- value x }) setMethod("n.action","prc", function(object) object@n.action) setReplaceMethod("n.action","prc", function(x, value){ x@n.action <- value x }) ##kernel principal components object setClass("kpca", representation(rotated = "matrix"),contains="prc") #accessor functions if(!isGeneric("rotated")){ if (is.function("rotated")) fun <- rotated else fun <- function(object) standardGeneric("rotated") setGeneric("rotated", fun) } setMethod("rotated", "kpca", function(object) object@rotated) setGeneric("rotated<-", function(x, value) standardGeneric("rotated<-")) setReplaceMethod("rotated", "kpca", function(x, value) { x@rotated <- value x }) ## kernel maximum mean discrepancy setClass("kmmd", representation(H0="logical", AsympH0 ="logical", kernelf = "kfunction", Asymbound="numeric", Radbound="numeric", xmatrix="input", mmdstats="vector")) if(!isGeneric("mmdstats")){ if (is.function("mmdstats")) fun <- mmdstats else fun <- function(object) standardGeneric("mmdstats") setGeneric("mmdstats", fun) } setMethod("mmdstats","kmmd", function(object) object@mmdstats) setGeneric("mmdstats<-", function(x, value) standardGeneric("mmdstats<-")) setReplaceMethod("mmdstats","kmmd", function(x, value){ x@mmdstats <- value x }) if(!isGeneric("Radbound")){ if (is.function("Radbound")) fun <- Radbound else fun <- function(object) standardGeneric("Radbound") setGeneric("Radbound", fun) } setMethod("Radbound","kmmd", function(object) object@Radbound) setGeneric("Radbound<-", function(x, value) standardGeneric("Radbound<-")) setReplaceMethod("Radbound","kmmd", function(x, value){ x@Radbound <- value x }) if(!isGeneric("Asymbound")){ if (is.function("Asymbound")) fun <- Asymbound else fun <- function(object) standardGeneric("Asymbound") setGeneric("Asymbound", fun) } setMethod("Asymbound","kmmd", function(object) object@Asymbound) setGeneric("Asymbound<-", function(x, value) standardGeneric("Asymbound<-")) setReplaceMethod("Asymbound","kmmd", function(x, value){ x@Asymbound <- value x }) if(!isGeneric("H0")){ if (is.function("H0")) fun <- H0 else fun <- function(object) standardGeneric("H0") setGeneric("H0", fun) } setMethod("H0","kmmd", function(object) object@H0) setGeneric("H0<-", function(x, value) standardGeneric("H0<-")) setReplaceMethod("H0","kmmd", function(x, value){ x@H0 <- value x }) if(!isGeneric("AsympH0")){ if (is.function("AsympH0")) fun <- AsympH0 else fun <- function(object) standardGeneric("AsympH0") setGeneric("AsympH0", fun) } setMethod("AsympH0","kmmd", function(object) object@AsympH0) setGeneric("AsympH0<-", function(x, value) standardGeneric("AsympH0<-")) setReplaceMethod("AsympH0","kmmd", function(x, value){ x@AsympH0 <- value x }) setMethod("kernelf","kmmd", function(object) object@kernelf) setReplaceMethod("kernelf","kmmd", function(x, value){ x@kernelf <- value x }) setClass("ipop", representation(primal = "vector", dual = "numeric", how = "character" )) if(!isGeneric("primal")){ if (is.function("primal")) fun <- primal else fun <- function(object) standardGeneric("primal") setGeneric("primal", fun) } setMethod("primal", "ipop", function(object) object@primal) setGeneric("primal<-", function(x, value) standardGeneric("primal<-")) setReplaceMethod("primal", "ipop", function(x, value) { x@primal <- value x }) if(!isGeneric("dual")){ if (is.function("dual")) fun <- dual else fun <- function(object) standardGeneric("dual") setGeneric("dual", fun) } setMethod("dual", "ipop", function(object) object@dual) setGeneric("dual<-", function(x, value) standardGeneric("dual<-")) setReplaceMethod("dual", "ipop", function(x, value) { x@dual <- value x }) if(!isGeneric("how")){ if (is.function("how")) fun <- how else fun <- function(object) standardGeneric("how") setGeneric("how", fun) } setMethod("how", "ipop", function(object) object@how) setGeneric("how<-", function(x, value) standardGeneric("how<-")) setReplaceMethod("how", "ipop", function(x, value) { x@how <- value x }) # Kernel Canonical Correlation Analysis setClass("kcca", representation(kcor = "vector", xcoef = "matrix", ycoef = "matrix" ##xvar = "matrix", ##yvar = "matrix" )) if(!isGeneric("kcor")){ if (is.function("kcor")) fun <- kcor else fun <- function(object) standardGeneric("kcor") setGeneric("kcor", fun) } setMethod("kcor", "kcca", function(object) object@kcor) setGeneric("kcor<-", function(x, value) standardGeneric("kcor<-")) setReplaceMethod("kcor", "kcca", function(x, value) { x@kcor <- value x }) if(!isGeneric("xcoef")){ if (is.function("xcoef")) fun <- xcoef else fun <- function(object) standardGeneric("xcoef") setGeneric("xcoef", fun) } setMethod("xcoef", "kcca", function(object) object@xcoef) setGeneric("xcoef<-", function(x, value) standardGeneric("xcoef<-")) setReplaceMethod("xcoef", "kcca", function(x, value) { x@xcoef <- value x }) if(!isGeneric("ycoef")){ if (is.function("ycoef")) fun <- ycoef else fun <- function(object) standardGeneric("ycoef") setGeneric("ycoef", fun) } setMethod("ycoef", "kcca", function(object) object@ycoef) setGeneric("ycoef<-", function(x, value) standardGeneric("ycoef<-")) setReplaceMethod("ycoef", "kcca", function(x, value) { x@ycoef <- value x }) ##if(!isGeneric("xvar")){ ## if (is.function("xvar")) ## fun <- xvar ## else fun <- function(object) standardGeneric("xvar") ## setGeneric("xvar", fun) ##} ##setMethod("xvar", "kcca", function(object) object@xvar) ##setGeneric("xvar<-", function(x, value) standardGeneric("xvar<-")) ##setReplaceMethod("xvar", "kcca", function(x, value) { ## x@xvar <- value ## x ##}) ##if(!isGeneric("yvar")){ ## if (is.function("yvar")) ## fun <- yvar ## else fun <- function(object) standardGeneric("yvar") ## setGeneric("yvar", fun) ##} ##setMethod("yvar", "kcca", function(object) object@yvar) ##setGeneric("yvar<-", function(x, value) standardGeneric("yvar<-")) ##setReplaceMethod("yvar", "kcca", function(x, value) { ## x@yvar <- value ## x ##}) ## Gaussian Processes object setClass("gausspr",representation(tol = "numeric", scaling = "ANY", sol = "matrix", alphaindex="list", nvar = "numeric" ),contains="vm") setMethod("alphaindex","gausspr", function(object) object@alphaindex) setReplaceMethod("alphaindex","gausspr", function(x, value){ x@alphaindex <- value x }) if(!isGeneric("sol")){ if (is.function("sol")) fun <- sol else fun <- function(object) standardGeneric("sol") setGeneric("sol", fun) } setMethod("sol","gausspr", function(object) object@sol) setGeneric("sol<-", function(x, value) standardGeneric("sol<-")) setReplaceMethod("sol","gausspr", function(x, value){ x@sol <- value x }) setMethod("scaling","gausspr", function(object) object@scaling) setReplaceMethod("scaling","gausspr", function(x, value){ x@scaling <- value x }) setMethod("coef", "gausspr", function(object, ...) object@alpha) # Relevance Vector Machine object setClass("rvm", representation(tol = "numeric", nvar = "numeric", mlike = "numeric", RVindex = "vector", coef = "ANY", nRV = "numeric"),contains ="vm") if(!isGeneric("tol")){ if (is.function("tol")) fun <- tol else fun <- function(object) standardGeneric("tol") setGeneric("tol", fun) } setMethod("tol", "rvm", function(object) object@tol) setGeneric("tol<-", function(x, value) standardGeneric("tol<-")) setReplaceMethod("tol", "rvm", function(x, value) { x@tol <- value x }) setMethod("coef", "rvm", function(object, ...) object@coef) setReplaceMethod("coef", "rvm", function(x, value) { x@coef <- value x }) if(!isGeneric("RVindex")){ if (is.function("RVindex")) fun <- RVindex else fun <- function(object) standardGeneric("RVindex") setGeneric("RVindex", fun) } setMethod("RVindex", "rvm", function(object) object@RVindex) setGeneric("RVindex<-", function(x, value) standardGeneric("RVindex<-")) setReplaceMethod("RVindex", "rvm", function(x, value) { x@RVindex <- value x }) if(!isGeneric("nvar")){ if (is.function("nvar")) fun <- nvar else fun <- function(object) standardGeneric("nvar") setGeneric("nvar", fun) } setMethod("nvar", "rvm", function(object) object@nvar) setGeneric("nvar<-", function(x, value) standardGeneric("nvar<-")) setReplaceMethod("nvar", "rvm", function(x, value) { x@nvar <- value x }) if(!isGeneric("nRV")){ if (is.function("nRV")) fun <- nRV else fun <- function(object) standardGeneric("nRV") setGeneric("nRV", fun) } setMethod("nRV", "rvm", function(object) object@nRV) setGeneric("nRV<-", function(x, value) standardGeneric("nRV<-")) setReplaceMethod("nRV", "rvm", function(x, value) { x@nRV <- value x }) setMethod("coef", "rvm", function(object, ...) object@alpha) if(!isGeneric("mlike")){ if (is.function("mlike")) fun <- mlike else fun <- function(object) standardGeneric("mlike") setGeneric("mlike", fun) } setMethod("mlike", "rvm", function(object) object@mlike) setGeneric("mlike<-", function(x, value) standardGeneric("mlike<-")) setReplaceMethod("mlike", "rvm", function(x, value) { x@mlike <- value x }) setClass("inchol",representation("matrix", pivots="vector", diagresidues="vector", maxresiduals="vector"), prototype=structure(.Data=matrix(), pivots=vector(), diagresidues=vector(), maxresiduals=vector())) if(!isGeneric("pivots")){ if (is.function("pivots")) fun <- pivots else fun <- function(object) standardGeneric("pivots") setGeneric("pivots", fun) } setMethod("pivots", "inchol", function(object) object@pivots) setGeneric("pivots<-", function(x, value) standardGeneric("pivots<-")) setReplaceMethod("pivots", "inchol", function(x, value) { x@pivots <- value x }) if(!isGeneric("diagresidues")){ if (is.function("diagresidues")) fun <- diagresidues else fun <- function(object) standardGeneric("diagresidues") setGeneric("diagresidues", fun) } setMethod("diagresidues", "inchol", function(object) object@diagresidues) setGeneric("diagresidues<-", function(x,value) standardGeneric("diagresidues<-")) setReplaceMethod("diagresidues", "inchol", function(x, value) { x@diagresidues <- value x }) if(!isGeneric("maxresiduals")){ if (is.function("maxresiduals")) fun <- maxresiduals else fun <- function(object) standardGeneric("maxresiduals") setGeneric("maxresiduals", fun) } setMethod("maxresiduals", "inchol", function(object) object@maxresiduals) setGeneric("maxresiduals<-", function(x,value) standardGeneric("maxresiduals<-")) setReplaceMethod("maxresiduals", "inchol", function(x, value) { x@maxresiduals <- value x }) ## csi object setClass("csi",representation(Q = "matrix", R = "matrix", truegain = "vector", predgain = "vector"),contains="inchol") if(!isGeneric("Q")){ if (is.function("Q")) fun <- Q else fun <- function(object) standardGeneric("Q") setGeneric("Q", fun) } setMethod("Q", "csi", function(object) object@Q) setGeneric("Q<-", function(x, value) standardGeneric("Q<-")) setReplaceMethod("Q", "csi", function(x, value) { x@Q <- value x }) if(!isGeneric("R")){ if (is.function("R")) fun <- R else fun <- function(object) standardGeneric("R") setGeneric("R", fun) } setMethod("R", "csi", function(object) object@R) setGeneric("R<-", function(x, value) standardGeneric("R<-")) setReplaceMethod("R", "csi", function(x, value) { x@R <- value x }) if(!isGeneric("truegain")){ if (is.function("truegain")) fun <- truegain else fun <- function(object) standardGeneric("truegain") setGeneric("truegain", fun) } setMethod("truegain", "csi", function(object) object@truegain) setGeneric("truegain<-", function(x, value) standardGeneric("truegain<-")) setReplaceMethod("truegain", "csi", function(x, value) { x@truegain <- value x }) if(!isGeneric("predgain")){ if (is.function("predgain")) fun <- predgain else fun <- function(object) standardGeneric("predgain") setGeneric("predgain", fun) } setMethod("predgain", "csi", function(object) object@predgain) setGeneric("predgain<-", function(x, value) standardGeneric("predgain<-")) setReplaceMethod("predgain", "csi", function(x, value) { x@predgain <- value x }) setClass("specc",representation("vector", centers="matrix", size="vector", kernelf="kfunction", withinss = "vector" ),prototype=structure(.Data=vector(), centers = matrix(), size=matrix(), kernelf = ls, withinss=vector())) if(!isGeneric("centers")){ if (is.function("centers")) fun <- centers else fun <- function(object) standardGeneric("centers") setGeneric("centers", fun) } setMethod("centers", "specc", function(object) object@centers) setGeneric("centers<-", function(x,value) standardGeneric("centers<-")) setReplaceMethod("centers", "specc", function(x, value) { x@centers <- value x }) if(!isGeneric("size")){ if (is.function("size")) fun <- size else fun <- function(object) standardGeneric("size") setGeneric("size", fun) } setMethod("size", "specc", function(object) object@size) setGeneric("size<-", function(x,value) standardGeneric("size<-")) setReplaceMethod("size", "specc", function(x, value) { x@size <- value x }) if(!isGeneric("withinss")){ if (is.function("withinss")) fun <- withinss else fun <- function(object) standardGeneric("withinss") setGeneric("withinss", fun) } setMethod("withinss", "specc", function(object) object@withinss) setGeneric("withinss<-", function(x,value) standardGeneric("withinss<-")) setReplaceMethod("withinss", "specc", function(x, value) { x@withinss <- value x }) setMethod("kernelf","specc", function(object) object@kernelf) setReplaceMethod("kernelf","specc", function(x, value){ x@kernelf <- value x }) setClass("ranking",representation("matrix", convergence="matrix", edgegraph="matrix"), prototype=structure(.Data=matrix(), convergence=matrix(), edgegraph=matrix())) if(!isGeneric("convergence")){ if (is.function("convergence")) fun <- convergence else fun <- function(object) standardGeneric("convergence") setGeneric("convergence", fun) } setMethod("convergence", "ranking", function(object) object@convergence) setGeneric("convergence<-", function(x,value) standardGeneric("convergence<-")) setReplaceMethod("convergence", "ranking", function(x, value) { x@convergence <- value x }) if(!isGeneric("edgegraph")){ if (is.function("edgegraph")) fun <- edgegraph else fun <- function(object) standardGeneric("edgegraph") setGeneric("edgegraph", fun) } setMethod("edgegraph", "ranking", function(object) object@edgegraph) setGeneric("edgegraph<-", function(x,value) standardGeneric("edgegraph<-")) setReplaceMethod("edgegraph", "ranking", function(x, value) { x@edgegraph <- value x }) ## online learning algorithms class setClass("onlearn", representation( kernelf = "kfunction", buffer = "numeric", kpar = "list", xmatrix = "matrix", fit = "numeric", onstart = "numeric", onstop = "numeric", alpha = "ANY", rho = "numeric", b = "numeric", pattern ="ANY", type="character" )) if(!isGeneric("fit")){ if (is.function("fit")) fun <- fit else fun <- function(object) standardGeneric("fit") setGeneric("fit", fun) } setMethod("fit","onlearn", function(object) object@fit) setGeneric("fit<-", function(x, value) standardGeneric("fit<-")) setReplaceMethod("fit","onlearn", function(x, value){ x@fit <- value x }) if(!isGeneric("onstart")){ if (is.function("onstart")) fun <- onstart else fun <- function(object) standardGeneric("onstart") setGeneric("onstart", fun) } setMethod("onstart", "onlearn", function(object) object@onstart) setGeneric("onstart<-", function(x, value) standardGeneric("onstart<-")) setReplaceMethod("onstart", "onlearn", function(x, value) { x@onstart <- value x }) if(!isGeneric("onstop")){ if (is.function("onstop")) fun <- onstop else fun <- function(object) standardGeneric("onstop") setGeneric("onstop", fun) } setMethod("onstop", "onlearn", function(object) object@onstop) setGeneric("onstop<-", function(x, value) standardGeneric("onstop<-")) setReplaceMethod("onstop", "onlearn", function(x, value) { x@onstop <- value x }) if(!isGeneric("buffer")){ if (is.function("buffer")) fun <- buffer else fun <- function(object) standardGeneric("buffer") setGeneric("buffer", fun) } setMethod("buffer", "onlearn", function(object) object@buffer) setGeneric("buffer<-", function(x, value) standardGeneric("buffer<-")) setReplaceMethod("buffer", "onlearn", function(x, value) { x@buffer <- value x }) setMethod("kernelf","onlearn", function(object) object@kernelf) setReplaceMethod("kernelf","onlearn", function(x, value){ x@kernelf <- value x }) setMethod("kpar","onlearn", function(object) object@kpar) setReplaceMethod("kpar","onlearn", function(x, value){ x@kpar <- value x }) setMethod("xmatrix","onlearn", function(object) object@xmatrix) setReplaceMethod("xmatrix","onlearn", function(x, value){ x@xmatrix <- value x }) setMethod("alpha","onlearn", function(object) object@alpha) setReplaceMethod("alpha","onlearn", function(x, value){ x@alpha <- value x }) setMethod("b","onlearn", function(object) object@b) setReplaceMethod("b","onlearn", function(x, value){ x@b <- value x }) setMethod("type","onlearn", function(object) object@type) setReplaceMethod("type","onlearn", function(x, value){ x@type <- value x }) if(!isGeneric("rho")){ if (is.function("rho")) fun <- rho else fun <- function(object) standardGeneric("rho") setGeneric("rho", fun) } setMethod("rho", "onlearn", function(object) object@rho) setGeneric("rho<-", function(x, value) standardGeneric("rho<-")) setReplaceMethod("rho", "onlearn", function(x, value) { x@rho <- value x }) if(!isGeneric("pattern")){ if (is.function("pattern")) fun <- pattern else fun <- function(object) standardGeneric("pattern") setGeneric("pattern", fun) } setMethod("pattern", "onlearn", function(object) object@pattern) setGeneric("pattern<-", function(x, value) standardGeneric("pattern<-")) setReplaceMethod("pattern", "onlearn", function(x, value) { x@pattern <- value x }) setClass("kfa",representation(alpha = "matrix", alphaindex = "vector", kernelf = "kfunction", xmatrix = "matrix", kcall = "call", terms = "ANY" )) setMethod("coef", "kfa", function(object, ...) object@alpha) setMethod("kernelf","kfa", function(object) object@kernelf) setReplaceMethod("kernelf","kfa", function(x, value){ x@kernelf <- value x }) setMethod("alphaindex","kfa", function(object) object@alphaindex) setReplaceMethod("alphaindex","kfa", function(x, value){ x@alphaindex <- value x }) setMethod("alpha","kfa", function(object) object@alpha) setReplaceMethod("alpha","kfa", function(x, value){ x@alpha <- value x }) setMethod("xmatrix","kfa", function(object) object@xmatrix) setReplaceMethod("xmatrix","kfa", function(x, value){ x@xmatrix <- value x }) setMethod("kcall","kfa", function(object) object@kcall) setReplaceMethod("kcall","kfa", function(x, value){ x@kcall <- value x }) setMethod("terms","kfa", function(x, ...) x@terms) setReplaceMethod("terms","kfa", function(x, value){ x@terms <- value x }) ## kernel hebbian algorithm object setClass("kha", representation(eskm ="vector"),contains="prc") ## accessor functions if(!isGeneric("eskm")){ if (is.function("eskm")) fun <- eskm else fun <- function(object) standardGeneric("eskm") setGeneric("eskm", fun) } setMethod("eskm", "kha", function(object) object@eskm) setGeneric("eskm<-", function(x, value) standardGeneric("eskm<-")) setReplaceMethod("eskm", "kha", function(x, value) { x@eskm <- value x }) kernlab/R/kfa.R0000644000175100001440000001020212676464656013014 0ustar hornikusers ## This code takes the set x of vectors from the input space ## and does projection pursuit to find a good basis for x. ## ## The algorithm is described in Section 14.5 of ## Learning with Kernels by B. Schoelkopf and A. Smola, entitled ## Kernel Feature Analysis. ## ## created : 17.09.04 alexandros ## updated : setGeneric("kfa",function(x, ...) standardGeneric("kfa")) setMethod("kfa", signature(x = "formula"), function(x, data = NULL, na.action = na.omit, ...) { mt <- terms(x, data = data) if(attr(mt, "response") > 0) stop("response not allowed in formula") attr(mt, "intercept") <- 0 cl <- match.call() mf <- match.call(expand.dots = FALSE) mf$formula <- mf$x mf$... <- NULL mf[[1L]] <- quote(stats::model.frame) mf <- eval(mf, parent.frame()) Terms <- attr(mf, "terms") na.act <- attr(mf, "na.action") x <- model.matrix(mt, mf) res <- kfa(x, ...) ## fix up call to refer to the generic, but leave arg name as `formula' cl[[1]] <- as.name("kfa") kcall(res) <- cl attr(Terms,"intercept") <- 0 terms(res) <- Terms if(!is.null(na.act)) n.action(res) <- na.act return(res) }) setMethod("kfa",signature(x="matrix"), function(x, kernel="rbfdot", kpar=list(sigma=0.1), features = 0, subset = 59, normalize = TRUE, na.action = na.omit) { if(!is.matrix(x)) stop("x must be a matrix") x <- na.action(x) if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") ## initialize variables m <- dim(x)[1] if(subset > m) subset <- m if (features==0) features <- subset alpha <- matrix(0,subset,features) alphazero <- rep(1,subset) alphafeat <- matrix(0,features,features) idx <- -(1:subset) randomindex <- sample(1:m, subset) K <- kernelMatrix(kernel,x[randomindex,,drop=FALSE],x) ## main loop for (i in 1:features) { K.cols <- K[-idx, , drop = FALSE] if(i > 1) projections <- K.cols * (alphazero[-idx]%*%t(rep(1,m))) + crossprod(t(alpha[-idx,1:(i-1),drop=FALSE]),K[idx, ,drop = FALSE]) else projections <- K.cols * (alphazero%*%t(rep(1,m))) Q <- apply(projections, 1, sd) Q.tmp <- rep(0,subset) Q.tmp[-idx] <- Q Qidx <- which.max(Q.tmp) Qmax <- Q.tmp[Qidx] if(i > 1) alphafeat[i,1:(i-1)] <- alpha[Qidx,1:(i-1)] alphafeat[i,i] <- alphazero[Qidx] if (i > 1) idx <- c(idx,Qidx) else idx <- Qidx if (i > 1) Qfeat <- c(Qfeat, Qmax) else Qfeat <- Qmax Ksub <- K[idx, idx, drop = FALSE] alphasub <- alphafeat[i,1:i] phisquare <- alphasub %*% Ksub %*% t(t(alphasub)) dotprod <- (alphazero * (K[,idx, drop = FALSE] %*% t(t(alphasub))) + alpha[,1:i]%*%(Ksub%*%t(t(alphasub))))/drop(phisquare) alpha[,1:i] <- alpha[,1:i] - dotprod %*%alphasub if(normalize){ sumalpha <- alphazero + rowSums(abs(alpha)) alphazero <- alphazero / sumalpha alpha <- alpha/ (sumalpha %*% t(rep(1,features))) } } obj <- new("kfa") alpha(obj) <- alphafeat alphaindex(obj) <- randomindex[idx] xmatrix(obj) <- x[alphaindex(obj),] kernelf(obj) <- kernel kcall(obj) <- match.call() return(obj) }) ## project a new matrix into the feature space setMethod("predict",signature(object="kfa"), function(object , x) { if (!is.null(terms(object))) { if(!is.matrix(x)) x <- model.matrix(delete.response(terms(object)), as.data.frame(x), na.action = n.action(object)) } else x <- if (is.vector(x)) t(t(x)) else as.matrix(x) if (!is.matrix(x)) stop("x must be a matrix a vector or a data frame") tmpres <- kernelMult(kernelf(object), x, xmatrix(object), alpha(object)) return(tmpres - matrix(colSums(tmpres)/dim(tmpres)[1],dim(tmpres)[1],dim(tmpres)[2],byrow=TRUE)) }) setMethod("show",signature(object="kfa"), function(object) { cat(paste("Number of features :",dim(alpha(object))[2],"\n")) show(kernelf(object)) }) kernlab/R/ranking.R0000644000175100001440000002172113561524074013675 0ustar hornikusers## manifold ranking ## author: alexandros setGeneric("ranking",function(x, ...) standardGeneric("ranking")) setMethod("ranking",signature(x="matrix"), function (x, y, kernel = "rbfdot", kpar = list(sigma = 1), scale = FALSE, alpha = 0.99, iterations = 600, edgegraph = FALSE, convergence = FALSE, ...) { m <- dim(x)[1] d <- dim(x)[2] if(length(y) != m) { ym <- matrix(0,m,1) ym[y] <- 1 y <- ym } if (is.null(y)) y <- matrix(1, m, 1) labelled <- y != 0 if (!any(labelled)) stop("no labels sublied") if(is.character(kernel)) kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","besseldot","laplacedot")) if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") if(scale) x <- scale(x) ## scaling from ksvm ## normalize ? if (is(kernel)[1]=='rbfkernel' && edgegraph){ sigma = kpar(kernel)$sigma n <- dim(x)[1] dota <- rowSums(x*x)/2 sed <- crossprod(t(x)) for (i in 1:n) sed[i,] <- - 2*(sed[i,] - dota - rep(dota[i],n)) diag(sed) <- 0 K <- exp(- sigma * sed) mst <- minimum.spanning.tree(sed) algo.mst <- mst$E max.squared.edge.length <- mst$max.sed.in.tree edgegraph <- (sed <= max.squared.edge.length) K[!edgegraph] <- 0 ##algo.edge.graph <- sparse(algo.edge.graph) rm(sed) gc() } else { if(edgegraph && is(kernel)[1]!="rbfkernel") warning('edge graph is only implemented for use with the RBF kernel') edgegraph <- matrix() K <- kernelMatrix(kernel,x) } diag(K) <- 0 ##K <- sparse(K) cs <- colSums(K) ##cs[cs <= 10e-6] <- 1 D <- 1/sqrt(cs) K <- D * K %*% diag(D) if(sum(labelled)==1) y <- K[, labelled,drop = FALSE] else y <- as.matrix(colSums(K[, labelled])) K <- alpha * K[, !labelled] ym <- matrix(0,m,iterations) ym[,1] <- y for (iteration in 2:iterations) ym[, iteration] <- ym[, 1] + K %*% ym[!labelled, iteration-1] ym[labelled,] <- NA r <- ym r[!labelled,] <- compute.ranks(-r[!labelled, ]) if(convergence) convergence <- (r - rep(r[,dim(r)[2]],iterations))/(m-sum(labelled)) else convergence <- matrix() res <- cbind(t(t(1:m)), ym[,iterations], r[,iterations]) return(new("ranking", .Data=res, convergence = convergence, edgegraph = edgegraph)) }) ## kernelMatrix interface setMethod("ranking",signature(x="kernelMatrix"), function (x, y, alpha = 0.99, iterations = 600, convergence = FALSE, ...) { m <- dim(x)[1] if(length(y) != m) { ym <- matrix(0,m,1) ym[y] <- 1 y <- ym } if (is.null(y)) y <- matrix(1, m, 1) labelled <- y != 0 if (!any(labelled)) stop("no labels sublied") diag(x) <- 0 ##K <- sparse(K) cs <- colSums(x) ##cs[cs <= 10e-6] <- 1 D <- 1/sqrt(cs) x <- D * x %*% diag(D) if(sum(labelled)==1) y <- x[, labelled,drop = FALSE] else y <- as.matrix(colSums(x[, labelled])) x <- alpha * x[, !labelled] ym <- matrix(0,m,iterations) ym[,1] <- y for (iteration in 2:iterations) ym[, iteration] <- ym[, 1] + x %*% ym[!labelled, iteration-1] ym[labelled,] <- NA r <- ym r[!labelled,] <- compute.ranks(-r[!labelled, ]) if(convergence) convergence <- (r - rep(r[,dim(r)[2]],iterations))/(m-sum(labelled)) else convergence <- matrix() res <- cbind(t(t(1:m)), ym[,iterations], r[,iterations]) return(new("ranking", .Data=res, convergence = convergence)) }) ## list interface setMethod("ranking",signature(x="list"), function (x, y, kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), alpha = 0.99, iterations = 600, convergence = FALSE, ...) { m <- length(x) if(length(y) != m) { ym <- matrix(0,m,1) ym[y] <- 1 y <- ym } if (is.null(y)) y <- matrix(1, m, 1) labelled <- y != 0 if (!any(labelled)) stop("no labels sublied") if(is.character(kernel)) kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","besseldot","laplacedot")) if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") edgegraph <- matrix() K <- kernelMatrix(kernel,x) diag(K) <- 0 ##K <- sparse(K) cs <- colSums(K) ##cs[cs <= 10e-6] <- 1 D <- 1/sqrt(cs) K <- D * K %*% diag(D) if(sum(labelled)==1) y <- K[, labelled,drop = FALSE] else y <- as.matrix(colSums(K[, labelled])) K <- alpha * K[, !labelled] ym <- matrix(0,m,iterations) ym[,1] <- y for (iteration in 2:iterations) ym[, iteration] <- ym[, 1] + K %*% ym[!labelled, iteration-1] ym[labelled,] <- NA r <- ym r[!labelled,] <- compute.ranks(-r[!labelled, ]) if(convergence) convergence <- (r - rep(r[,dim(r)[2]],iterations))/(m-sum(labelled)) else convergence <- matrix() res <- cbind(t(t(1:m)), ym[,iterations], r[,iterations]) return(new("ranking", .Data=res, convergence = convergence, edgegraph = NULL)) }) minimum.spanning.tree <- function(sed) { max.sed.in.tree <- 0 E <- matrix(0,dim(sed)[1],dim(sed)[2]) n <- dim(E)[1] C <- logical(n) cmp <- sed diag(cmp) <- NA ans <- min(cmp, na.rm = TRUE) i <- which.min(cmp) j <- i%/%n + 1 i <- i%%n +1 for (nC in 1:n) { cmp <- sed cmp[C,] <- NA cmp[,!C] <- NA if(nC == 1) { ans <- 1 i <- 1 } else{ ans <- min(cmp, na.rm=TRUE) i <- which.min(cmp)} j <- i%/%n + 1 i <- i%%n + 1 E[i, j] <- nC E[j, i] <- nC C[i] <- TRUE max.sed.in.tree <- max(max.sed.in.tree, sed[i, j]) } ## E <- sparse(E) res <- list(E=E, max.sed.in.tree=max.sed.in.tree) } compute.ranks <- function(am) { rm <- matrix(0,dim(am)[1],dim(am)[2]) for (j in 1:dim(am)[2]) { a <- am[, j] sort <- sort(a, index.return = TRUE) sorted <- sort$x r <- sort$ix r[r] <- 1:length(r) while(1) { if(sum(na.omit(diff(sorted) == 0)) == 0) break tied <- sorted[min(which(diff(sorted) == 0))] sorted[sorted==tied] <- NA r[a==tied] <- mean(r[a==tied]) } rm[, j] <- r } return(rm) } setMethod("show","ranking", function(object) { cat("Ranking object of class \"ranking\"","\n") cat("\n") show(object@.Data) cat("\n") if(!any(is.na(convergence(object)))) cat("convergence matrix included.","\n") if(!any(is.na(edgegraph(object)))) cat("edgegraph matrix included.","\n") }) kernlab/R/kqr.R0000644000175100001440000002444614221633042013036 0ustar hornikuserssetGeneric("kqr", function(x, ...) standardGeneric("kqr")) setMethod("kqr",signature(x="formula"), function (x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE){ cl <- match.call() m <- match.call(expand.dots = FALSE) if (is.matrix(eval(m$data, parent.frame()))) m$data <- as.data.frame(data) m$... <- NULL m$formula <- m$x m$x <- NULL m[[1L]] <- quote(stats::model.frame) m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { remove <- unique(c(which(labels(Terms) %in% names(attr(x, "contrasts"))), which(!scaled) ) ) scaled <- !attr(x, "assign") %in% remove } ret <- kqr(x, y, scaled = scaled, ...) kcall(ret) <- cl terms(ret) <- Terms if (!is.null(attr(m, "na.action"))) n.action(ret) <- attr(m, "na.action") return (ret) }) setMethod("kqr",signature(x="vector"), function(x,...) { x <- t(t(x)) ret <- kqr(x, ...) ret }) setMethod("kqr",signature(x="matrix"), function (x, y, scaled = TRUE, tau = 0.5, C = 0.1, kernel = "rbfdot", kpar = "automatic", reduced = FALSE, rank = dim(x)[1]/6, fit = TRUE, cross = 0, na.action = na.omit) { if((tau > 1)||(tau < 0 )) stop("tau has to be strictly between 0 and 1") ret <- new("kqr") param(ret) <- list(C = C, tau = tau) if (is.null(y)) x <- na.action(x) else { df <- na.action(data.frame(y, x)) y <- df[,1] x <- as.matrix(df[,-1]) } ncols <- ncol(x) m <- nrows <- nrow(x) tmpsc <- NULL x.scale <- y.scale <- NULL ## scaling if (length(scaled) == 1) scaled <- rep(scaled, ncol(x)) if (any(scaled)) { co <- !apply(x[,scaled, drop = FALSE], 2, var) if (any(co)) { scaled <- rep(FALSE, ncol(x)) warning(paste("Variable(s)", paste("`",colnames(x[,scaled, drop = FALSE])[co], "'", sep="", collapse=" and "), "constant. Cannot scale data.") ) } else { xtmp <- scale(x[,scaled]) x[,scaled] <- xtmp x.scale <- attributes(xtmp)[c("scaled:center","scaled:scale")] y <- scale(y) y.scale <- attributes(y)[c("scaled:center","scaled:scale")] y <- as.vector(y) tmpsc <- list(scaled = scaled, x.scale = x.scale,y.scale = y.scale) } } ## Arrange all the kernel mambo jumpo if(is.character(kernel)){ kernel <- match.arg(kernel,c("rbfdot","polydot","tanhdot","vanilladot","laplacedot","besseldot","anovadot","splinedot")) if(is.character(kpar)) if((kernel == "tanhdot" || kernel == "vanilladot" || kernel == "polydot"|| kernel == "besseldot" || kernel== "anovadot"|| kernel=="splinedot") && kpar=="automatic" ) { cat (" Setting default kernel parameters ","\n") kpar <- list() } } if (!is.function(kernel)) if (!is.list(kpar)&&is.character(kpar)&&(is(kernel, "rbfkernel") || is(kernel, "laplacedot") || kernel == "laplacedot"|| kernel=="rbfdot")){ kp <- match.arg(kpar,"automatic") if(kp=="automatic") kpar <- list(sigma=mean(sigest(x,scaled=FALSE,frac=1)[c(1,3)])) cat("Using automatic sigma estimation (sigest) for RBF or laplace kernel","\n") } if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") ## Setup QP problem and call ipop if(!reduced) H = kernelMatrix(kernel,x) else H = csi(x, kernel = kernel, rank = rank) c = -y A = rep(1,m) b = 0 r = 0 l = matrix(C * (tau-1),m,1) u = matrix(C * tau ,m,1) qpsol = ipop(c, H, A, b, l, u, r) alpha(ret)= coef(ret) = primal(qpsol) b(ret) = dual(qpsol)[1] ## Compute training error/loss xmatrix(ret) <- x ymatrix(ret) <- y kernelf(ret) <- kernel kpar(ret) <- kpar type(ret) <- ("Quantile Regresion") if (fit){ fitted(ret) <- predict(ret, x) if (!is.null(scaling(ret)$y.scale)) fitted(ret) <- fitted(ret) * tmpsc$y.scale$"scaled:scale" + tmpsc$y.scale$"scaled:center" error(ret) <- c(pinloss(y, fitted(ret), tau), ramploss(y,fitted(ret),tau)) } else fitted(ret) <- NULL if(any(scaled)) scaling(ret) <- tmpsc ## Crossvalidation cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { pinloss <- 0 ramloss <- 0 crescs <- NULL suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) cret <- kqr(x[cind,],y[cind], tau = tau, C = C, scale = FALSE, kernel = kernel, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],]) crescs <- c(crescs,cres) } if (!is.null(scaling(ret)$y.scale)){ crescs <- crescs * tmpsc$y.scale$"scaled:scale" + tmpsc$y.scale$"scaled:center" ysvgr <- y[unlist(vgr)] * tmpsc$y.scale$"scaled:scale" + tmpsc$y.scale$"scaled:center" } else ysvgr <- y[unlist(vgr)] pinloss <- drop(pinloss(ysvgr, crescs, tau)) ramloss <- drop(ramloss(ysvgr, crescs, tau)) cross(ret) <- c(pinloss, ramloss) } return(ret) }) setMethod("kqr",signature(x="list"), function (x, y, tau = 0.5, C = 0.1, kernel = "strigdot", kpar = list(length=4, C=0.5), fit = TRUE, cross = 0) { if((tau > 1)||(tau < 0 )) stop("tau has to be strictly between 0 and 1") if(!is(kernel,"kernel")) { if(is(kernel,"function")) kernel <- deparse(substitute(kernel)) kernel <- do.call(kernel, kpar) } if(!is(kernel,"kernel")) stop("kernel must inherit from class `kernel'") K <- kernelMatrix(kernel,x) ret <- kqr(K,y = y,tau = tau, C = C, fit = fit, cross = cross) kernelf(ret) <- kernel kpar(ret) <- kpar return(ret) }) setMethod("kqr",signature(x="kernelMatrix"), function (x, y, tau = 0.5, C = 0.1, fit = TRUE, cross = 0) { if((tau > 1)||(tau < 0 )) stop("tau has to be strictly between 0 and 1") ret <- new("kqr") param(ret) <- list(C = C, tau = tau) ncols <- ncol(x) m <- nrows <- nrow(x) y <- as.vector(y) ## Setup QP problem and call ipop H = x c = -y A = rep(1,m) b = 0 r = 0 l = matrix(C * (tau-1),m,1) u = matrix(C * tau ,m,1) qpsol = ipop(c, H, A, b, l, u, r) alpha(ret)= coef(ret) = primal(qpsol) b(ret) = dual(qpsol)[1] ## Compute training error/loss ymatrix(ret) <- y kernelf(ret) <- "Kernel Matrix used." type(ret) <- ("Quantile Regresion") if (fit){ fitted(ret) <- predict(ret, x) error(ret) <- c(pinloss(y, fitted(ret), tau), ramploss(y,fitted(ret),tau)) } else NA ## Crossvalidation cross(ret) <- -1 if(cross == 1) cat("\n","cross should be >1 no cross-validation done!","\n","\n") else if (cross > 1) { pinloss <- 0 ramloss <- 0 crescs <- NULL suppressWarnings(vgr<-split(sample(1:m,m),1:cross)) for(i in 1:cross) { cind <- unsplit(vgr[-i],factor(rep((1:cross)[-i],unlist(lapply(vgr[-i],length))))) cret <- kqr(x[cind,cind],y[cind], tau = tau, C = C, scale = FALSE, cross = 0, fit = FALSE) cres <- predict(cret, x[vgr[[i]],vgr[[i]]]) crescs <- c(crescs,cres) } ysvgr <- y[unlist(vgr)] pinloss <- drop(pinloss(ysvgr, crescs, tau)) ramloss <- drop(ramloss(ysvgr, crescs, tau)) cross(ret) <- c(pinloss, ramloss) } return(ret) }) pinloss <- function(y,f,tau) { if(is.vector(y)) m <- length(y) else m <- dim(y)[1] tmp <- y - f return((tau *sum(tmp*(tmp>=0)) + (tau-1) * sum(tmp * (tmp<0)))/m) } ramploss <- function(y,f,tau) { if(is.vector(y)) m <- length(y) else m <- dim(y)[1] return(sum(y<=f)/m) } setMethod("predict", signature(object = "kqr"), function (object, newdata) { sc <- 0 if (missing(newdata)) if(!is.null(fitted(object))) return(fitted(object)) else stop("newdata is missing and no fitted values found.") if(!is(newdata,"kernelMatrix")){ ncols <- ncol(xmatrix(object)) nrows <- nrow(xmatrix(object)) oldco <- ncols if (!is.null(terms(object))) { newdata <- model.matrix(delete.response(terms(object)), as.data.frame(newdata), na.action = na.action) } else newdata <- if (is.vector (newdata)) t(t(newdata)) else as.matrix(newdata) newcols <- 0 newnrows <- nrow(newdata) newncols <- ncol(newdata) newco <- newncols if (oldco != newco) stop ("test vector does not match model !") if (is.list(scaling(object)) && sc != 1) newdata[,scaling(object)$scaled] <- scale(newdata[,scaling(object)$scaled, drop = FALSE], center = scaling(object)$x.scale$"scaled:center", scale = scaling(object)$x.scale$"scaled:scale" ) predres <- kernelMult(kernelf(object),newdata,xmatrix(object),as.matrix(alpha(object))) - b(object) if (!is.null(scaling(object)$y.scale)) return(predres * scaling(object)$y.scale$"scaled:scale" + scaling(object)$y.scale$"scaled:center") else return(predres) } else { return(newdata%*%alpha(object) - b(object)) } }) setMethod("show","kqr", function(object){ cat("Kernel Quantile Regression object of class \"kqr\"","\n") cat("\n") show(kernelf(object)) cat("\n") cat("Regularization Cost Parameter C: ",round(param(object)[[1]],9)) cat(paste("\nNumber of training instances learned :", dim(xmatrix(object))[1],"\n")) if(!is.null(fitted(object))) cat(paste("Train error :"," pinball loss : ", round(error(object)[1],9)," rambloss :", round(error(object)[2],9),"\n")) ##train error & loss if(cross(object)!=-1) cat("Cross validation error :", " pinballoss : ", round(cross(object)[1],9)," rambloss :", round(cross(object)[2],9),"\n") }) kernlab/vignettes/0000755000175100001440000000000014656670132013730 5ustar hornikuserskernlab/vignettes/kernlab.Rnw0000644000175100001440000014230512055335060016030 0ustar hornikusers\documentclass{A} \usepackage{amsfonts,thumbpdf,alltt} \newenvironment{smallverbatim}{\small\verbatim}{\endverbatim} \newenvironment{smallexample}{\begin{alltt}\small}{\end{alltt}} \SweaveOpts{engine=R,eps=FALSE} %\VignetteIndexEntry{kernlab - An S4 Package for Kernel Methods in R} %\VignetteDepends{kernlab} %\VignetteKeywords{kernel methods, support vector machines, quadratic programming, ranking, clustering, S4, R} %\VignettePackage{kernlab} <>= library(kernlab) options(width = 70) @ \title{\pkg{kernlab} -- An \proglang{S4} Package for Kernel Methods in \proglang{R}} \Plaintitle{kernlab - An S4 Package for Kernel Methods in R} \author{Alexandros Karatzoglou\\Technische Universit\"at Wien \And Alex Smola\\Australian National University, NICTA \And Kurt Hornik\\Wirtschaftsuniversit\"at Wien } \Plainauthor{Alexandros Karatzoglou, Alex Smola, Kurt Hornik} \Abstract{ \pkg{kernlab} is an extensible package for kernel-based machine learning methods in \proglang{R}. It takes advantage of \proglang{R}'s new \proglang{S4} object model and provides a framework for creating and using kernel-based algorithms. The package contains dot product primitives (kernels), implementations of support vector machines and the relevance vector machine, Gaussian processes, a ranking algorithm, kernel PCA, kernel CCA, kernel feature analysis, online kernel methods and a spectral clustering algorithm. Moreover it provides a general purpose quadratic programming solver, and an incomplete Cholesky decomposition method. } \Keywords{kernel methods, support vector machines, quadratic programming, ranking, clustering, \proglang{S4}, \proglang{R}} \Plainkeywords{kernel methods, support vector machines, quadratic programming, ranking, clustering, S4, R} \begin{document} \section{Introduction} Machine learning is all about extracting structure from data, but it is often difficult to solve problems like classification, regression and clustering in the space in which the underlying observations have been made. Kernel-based learning methods use an implicit mapping of the input data into a high dimensional feature space defined by a kernel function, i.e., a function returning the inner product $ \langle \Phi(x),\Phi(y) \rangle$ between the images of two data points $x, y$ in the feature space. The learning then takes place in the feature space, provided the learning algorithm can be entirely rewritten so that the data points only appear inside dot products with other points. This is often referred to as the ``kernel trick'' \citep{kernlab:Schoelkopf+Smola:2002}. More precisely, if a projection $\Phi: X \rightarrow H$ is used, the dot product $\langle\Phi(x),\Phi(y)\rangle$ can be represented by a kernel function~$k$ \begin{equation} \label{eq:kernel} k(x,y)= \langle \Phi(x),\Phi(y) \rangle, \end{equation} which is computationally simpler than explicitly projecting $x$ and $y$ into the feature space~$H$. One interesting property of kernel-based systems is that, once a valid kernel function has been selected, one can practically work in spaces of any dimension without paying any computational cost, since feature mapping is never effectively performed. In fact, one does not even need to know which features are being used. Another advantage is the that one can design and use a kernel for a particular problem that could be applied directly to the data without the need for a feature extraction process. This is particularly important in problems where a lot of structure of the data is lost by the feature extraction process (e.g., text processing). The inherent modularity of kernel-based learning methods allows one to use any valid kernel on a kernel-based algorithm. \subsection{Software review} The most prominent kernel based learning algorithm is without doubt the support vector machine (SVM), so the existence of many support vector machine packages comes as little surprise. Most of the existing SVM software is written in \proglang{C} or \proglang{C++}, e.g.\ the award winning \pkg{libsvm}\footnote{\url{http://www.csie.ntu.edu.tw/~cjlin/libsvm/}} \citep{kernlab:Chang+Lin:2001}, \pkg{SVMlight}\footnote{\url{http://svmlight.joachims.org}} \citep{kernlab:joachim:1999}, \pkg{SVMTorch}\footnote{\url{http://www.torch.ch}}, Royal Holloway Support Vector Machines\footnote{\url{http://svm.dcs.rhbnc.ac.uk}}, \pkg{mySVM}\footnote{\url{http://www-ai.cs.uni-dortmund.de/SOFTWARE/MYSVM/index.eng.html}}, and \pkg{M-SVM}\footnote{\url{http://www.loria.fr/~guermeur/}} with many packages providing interfaces to \proglang{MATLAB} (such as \pkg{libsvm}), and even some native \proglang{MATLAB} toolboxes\footnote{ \url{http://www.isis.ecs.soton.ac.uk/resources/svminfo/}}\,\footnote{ \url{http://asi.insa-rouen.fr/~arakotom/toolbox/index}}\,\footnote{ \url{http://www.cis.tugraz.at/igi/aschwaig/software.html}}. Putting SVM specific software aside and considering the abundance of other kernel-based algorithms published nowadays, there is little software available implementing a wider range of kernel methods with some exceptions like the \pkg{Spider}\footnote{\url{http://www.kyb.tuebingen.mpg.de/bs/people/spider/}} software which provides a \proglang{MATLAB} interface to various \proglang{C}/\proglang{C++} SVM libraries and \proglang{MATLAB} implementations of various kernel-based algorithms, \pkg{Torch} \footnote{\url{http://www.torch.ch}} which also includes more traditional machine learning algorithms, and the occasional \proglang{MATLAB} or \proglang{C} program found on a personal web page where an author includes code from a published paper. \subsection[R software]{\proglang{R} software} The \proglang{R} package \pkg{e1071} offers an interface to the award winning \pkg{libsvm} \citep{kernlab:Chang+Lin:2001}, a very efficient SVM implementation. \pkg{libsvm} provides a robust and fast SVM implementation and produces state of the art results on most classification and regression problems \citep{kernlab:Meyer+Leisch+Hornik:2003}. The \proglang{R} interface provided in \pkg{e1071} adds all standard \proglang{R} functionality like object orientation and formula interfaces to \pkg{libsvm}. Another SVM related \proglang{R} package which was made recently available is \pkg{klaR} \citep{kernlab:Roever:2004} which includes an interface to \pkg{SVMlight}, a popular SVM implementation along with other classification tools like Regularized Discriminant Analysis. However, most of the \pkg{libsvm} and \pkg{klaR} SVM code is in \proglang{C++}. Therefore, if one would like to extend or enhance the code with e.g.\ new kernels or different optimizers, one would have to modify the core \proglang{C++} code. \section[kernlab]{\pkg{kernlab}} \pkg{kernlab} aims to provide the \proglang{R} user with basic kernel functionality (e.g., like computing a kernel matrix using a particular kernel), along with some utility functions commonly used in kernel-based methods like a quadratic programming solver, and modern kernel-based algorithms based on the functionality that the package provides. Taking advantage of the inherent modularity of kernel-based methods, \pkg{kernlab} aims to allow the user to switch between kernels on an existing algorithm and even create and use own kernel functions for the kernel methods provided in the package. \subsection[S4 objects]{\proglang{S4} objects} \pkg{kernlab} uses \proglang{R}'s new object model described in ``Programming with Data'' \citep{kernlab:Chambers:1998} which is known as the \proglang{S4} class system and is implemented in the \pkg{methods} package. In contrast with the older \proglang{S3} model for objects in \proglang{R}, classes, slots, and methods relationships must be declared explicitly when using the \proglang{S4} system. The number and types of slots in an instance of a class have to be established at the time the class is defined. The objects from the class are validated against this definition and have to comply to it at any time. \proglang{S4} also requires formal declarations of methods, unlike the informal system of using function names to identify a certain method in \proglang{S3}. An \proglang{S4} method is declared by a call to \code{setMethod} along with the name and a ``signature'' of the arguments. The signature is used to identify the classes of one or more arguments of the method. Generic functions can be declared using the \code{setGeneric} function. Although such formal declarations require package authors to be more disciplined than when using the informal \proglang{S3} classes, they provide assurance that each object in a class has the required slots and that the names and classes of data in the slots are consistent. An example of a class used in \pkg{kernlab} is shown below. Typically, in a return object we want to include information on the result of the method along with additional information and parameters. Usually \pkg{kernlab}'s classes include slots for the kernel function used and the results and additional useful information. \begin{smallexample} setClass("specc", representation("vector", # the vector containing the cluster centers="matrix", # the cluster centers size="vector", # size of each cluster kernelf="function", # kernel function used withinss = "vector"), # within cluster sum of squares prototype = structure(.Data = vector(), centers = matrix(), size = matrix(), kernelf = ls, withinss = vector())) \end{smallexample} Accessor and assignment function are defined and used to access the content of each slot which can be also accessed with the \verb|@| operator. \subsection{Namespace} Namespaces were introduced in \proglang{R} 1.7.0 and provide a means for packages to control the way global variables and methods are being made available. Due to the number of assignment and accessor function involved, a namespace is used to control the methods which are being made visible outside the package. Since \proglang{S4} methods are being used, the \pkg{kernlab} namespace also imports methods and variables from the \pkg{methods} package. \subsection{Data} The \pkg{kernlab} package also includes data set which will be used to illustrate the methods included in the package. The \code{spam} data set \citep{kernlab:Hastie:2001} set collected at Hewlett-Packard Labs contains data on 2788 and 1813 e-mails classified as non-spam and spam, respectively. The 57 variables of each data vector indicate the frequency of certain words and characters in the e-mail. Another data set included in \pkg{kernlab}, the \code{income} data set \citep{kernlab:Hastie:2001}, is taken by a marketing survey in the San Francisco Bay concerning the income of shopping mall customers. It consists of 14 demographic attributes (nominal and ordinal variables) including the income and 8993 observations. The \code{ticdata} data set \citep{kernlab:Putten:2000} was used in the 2000 Coil Challenge and contains information on customers of an insurance company. The data consists of 86 variables and includes product usage data and socio-demographic data derived from zip area codes. The data was collected to answer the following question: Can you predict who would be interested in buying a caravan insurance policy and give an explanation why? The \code{promotergene} is a data set of E. Coli promoter gene sequences (DNA) with 106 observations and 58 variables available at the UCI Machine Learning repository. Promoters have a region where a protein (RNA polymerase) must make contact and the helical DNA sequence must have a valid conformation so that the two pieces of the contact region spatially align. The data contains DNA sequences of promoters and non-promoters. The \code{spirals} data set was created by the \code{mlbench.spirals} function in the \pkg{mlbench} package \citep{kernlab:Leisch+Dimitriadou}. This two-dimensional data set with 300 data points consists of two spirals where Gaussian noise is added to each data point. \subsection{Kernels} A kernel function~$k$ calculates the inner product of two vectors $x$, $x'$ in a given feature mapping $\Phi: X \rightarrow H$. The notion of a kernel is obviously central in the making of any kernel-based algorithm and consequently also in any software package containing kernel-based methods. Kernels in \pkg{kernlab} are \proglang{S4} objects of class \code{kernel} extending the \code{function} class with one additional slot containing a list with the kernel hyper-parameters. Package \pkg{kernlab} includes 7 different kernel classes which all contain the class \code{kernel} and are used to implement the existing kernels. These classes are used in the function dispatch mechanism of the kernel utility functions described below. Existing kernel functions are initialized by ``creator'' functions. All kernel functions take two feature vectors as parameters and return the scalar dot product of the vectors. An example of the functionality of a kernel in \pkg{kernlab}: <>= ## create a RBF kernel function with sigma hyper-parameter 0.05 rbf <- rbfdot(sigma = 0.05) rbf ## create two random feature vectors x <- rnorm(10) y <- rnorm(10) ## compute dot product between x,y rbf(x, y) @ The package includes implementations of the following kernels: \begin{itemize} \item the linear \code{vanilladot} kernel implements the simplest of all kernel functions \begin{equation} k(x,x') = \langle x, x' \rangle \end{equation} which is useful specially when dealing with large sparse data vectors~$x$ as is usually the case in text categorization. \item the Gaussian radial basis function \code{rbfdot} \begin{equation} k(x,x') = \exp(-\sigma \|x - x'\|^2) \end{equation} which is a general purpose kernel and is typically used when no further prior knowledge is available about the data. \item the polynomial kernel \code{polydot} \begin{equation} k(x, x') = \left( \mathrm{scale} \cdot \langle x, x' \rangle + \mathrm{offset} \right)^\mathrm{degree}. \end{equation} which is used in classification of images. \item the hyperbolic tangent kernel \code{tanhdot} \begin{equation} k(x, x') = \tanh \left( \mathrm{scale} \cdot \langle x, x' \rangle + \mathrm{offset} \right) \end{equation} which is mainly used as a proxy for neural networks. \item the Bessel function of the first kind kernel \code{besseldot} \begin{equation} k(x, x') = \frac{\mathrm{Bessel}_{(\nu+1)}^n(\sigma \|x - x'\|)} {(\|x-x'\|)^{-n(\nu+1)}}. \end{equation} is a general purpose kernel and is typically used when no further prior knowledge is available and mainly popular in the Gaussian process community. \item the Laplace radial basis kernel \code{laplacedot} \begin{equation} k(x, x') = \exp(-\sigma \|x - x'\|) \end{equation} which is a general purpose kernel and is typically used when no further prior knowledge is available. \item the ANOVA radial basis kernel \code{anovadot} performs well in multidimensional regression problems \begin{equation} k(x, x') = \left(\sum_{k=1}^{n}\exp(-\sigma(x^k-{x'}^k)^2)\right)^{d} \end{equation} where $x^k$ is the $k$th component of $x$. \end{itemize} \subsection{Kernel utility methods} The package also includes methods for computing commonly used kernel expressions (e.g., the Gram matrix). These methods are written in such a way that they take functions (i.e., kernels) and matrices (i.e., vectors of patterns) as arguments. These can be either the kernel functions already included in \pkg{kernlab} or any other function implementing a valid dot product (taking two vector arguments and returning a scalar). In case one of the already implemented kernels is used, the function calls a vectorized implementation of the corresponding function. Moreover, in the case of symmetric matrices (e.g., the dot product matrix of a Support Vector Machine) they only require one argument rather than having to pass the same matrix twice (for rows and columns). The computations for the kernels already available in the package are vectorized whenever possible which guarantees good performance and acceptable memory requirements. Users can define their own kernel by creating a function which takes two vectors as arguments (the data points) and returns a scalar (the dot product). This function can then be based as an argument to the kernel utility methods. For a user defined kernel the dispatch mechanism calls a generic method implementation which calculates the expression by passing the kernel function through a pair of \code{for} loops. The kernel methods included are: \begin{description} \item[\code{kernelMatrix}] This is the most commonly used function. It computes $k(x, x')$, i.e., it computes the matrix $K$ where $K_{ij} = k(x_i, x_j)$ and $x$ is a \emph{row} vector. In particular, \begin{verbatim} K <- kernelMatrix(kernel, x) \end{verbatim} computes the matrix $K_{ij} = k(x_i, x_j)$ where the $x_i$ are the columns of $X$ and \begin{verbatim} K <- kernelMatrix(kernel, x1, x2) \end{verbatim} computes the matrix $K_{ij} = k(x1_i, x2_j)$. \item[\code{kernelFast}] This method is different to \code{kernelMatrix} for \code{rbfdot}, \code{besseldot}, and the \code{laplacedot} kernel, which are all RBF kernels. It is identical to \code{kernelMatrix}, except that it also requires the squared norm of the first argument as additional input. It is mainly used in kernel algorithms, where columns of the kernel matrix are computed per invocation. In these cases, evaluating the norm of each column-entry as it is done on a \code{kernelMatrix} invocation on an RBF kernel, over and over again would cause significant computational overhead. Its invocation is via \begin{verbatim} K = kernelFast(kernel, x1, x2, a) \end{verbatim} Here $a$ is a vector containing the squared norms of $x1$. \item[\code{kernelMult}] is a convenient way of computing kernel expansions. It returns the vector $f = (f(x_1), \dots, f(x_m))$ where \begin{equation} f(x_i) = \sum_{j=1}^{m} k(x_i, x_j) \alpha_j, \mbox{~hence~} f = K \alpha. \end{equation} The need for such a function arises from the fact that $K$ may sometimes be larger than the memory available. Therefore, it is convenient to compute $K$ only in stripes and discard the latter after the corresponding part of $K \alpha$ has been computed. The parameter \code{blocksize} determines the number of rows in the stripes. In particular, \begin{verbatim} f <- kernelMult(kernel, x, alpha) \end{verbatim} computes $f_i = \sum_{j=1}^m k(x_i, x_j) \alpha_j$ and \begin{verbatim} f <- kernelMult(kernel, x1, x2, alpha) \end{verbatim} computes $f_i = \sum_{j=1}^m k(x1_i, x2_j) \alpha_j$. \item[\code{kernelPol}] is a method very similar to \code{kernelMatrix} with the only difference that rather than computing $K_{ij} = k(x_i, x_j)$ it computes $K_{ij} = y_i y_j k(x_i, x_j)$. This means that \begin{verbatim} K <- kernelPol(kernel, x, y) \end{verbatim} computes the matrix $K_{ij} = y_i y_j k(x_i, x_j)$ where the $x_i$ are the columns of $x$ and $y_i$ are elements of the vector~$y$. Moreover, \begin{verbatim} K <- kernelPol(kernel, x1, x2, y1, y2) \end{verbatim} computes the matrix $K_{ij} = y1_i y2_j k(x1_i, x2_j)$. Both \code{x1} and \code{x2} may be matrices and \code{y1} and \code{y2} vectors. \end{description} An example using these functions : <>= ## create a RBF kernel function with sigma hyper-parameter 0.05 poly <- polydot(degree=2) ## create artificial data set x <- matrix(rnorm(60), 6, 10) y <- matrix(rnorm(40), 4, 10) ## compute kernel matrix kx <- kernelMatrix(poly, x) kxy <- kernelMatrix(poly, x, y) @ \section{Kernel methods} Providing a solid base for creating kernel-based methods is part of what we are trying to achieve with this package, the other being to provide a wider range of kernel-based methods in \proglang{R}. In the rest of the paper we present the kernel-based methods available in \pkg{kernlab}. All the methods in \pkg{kernlab} can be used with any of the kernels included in the package as well as with any valid user-defined kernel. User defined kernel functions can be passed to existing kernel-methods in the \code{kernel} argument. \subsection{Support vector machine} Support vector machines \citep{kernlab:Vapnik:1998} have gained prominence in the field of machine learning and pattern classification and regression. The solutions to classification and regression problems sought by kernel-based algorithms such as the SVM are linear functions in the feature space: \begin{equation} f(x) = w^\top \Phi(x) \end{equation} for some weight vector $w \in F$. The kernel trick can be exploited in this whenever the weight vector~$w$ can be expressed as a linear combination of the training points, $w = \sum_{i=1}^{n} \alpha_i \Phi(x_i)$, implying that $f$ can be written as \begin{equation} f(x) = \sum_{i=1}^{n}\alpha_i k(x_i, x) \end{equation} A very important issue that arises is that of choosing a kernel~$k$ for a given learning task. Intuitively, we wish to choose a kernel that induces the ``right'' metric in the space. Support Vector Machines choose a function $f$ that is linear in the feature space by optimizing some criterion over the sample. In the case of the 2-norm Soft Margin classification the optimization problem takes the form: \begin{eqnarray} \nonumber \mathrm{minimize} && t(w,\xi) = \frac{1}{2}{\|w\|}^2+\frac{C}{m}\sum_{i=1}^{m}\xi_i \\ \mbox{subject to~} && y_i ( \langle x_i , w \rangle +b ) \geq 1- \xi_i \qquad (i=1,\dots,m)\\ \nonumber && \xi_i \ge 0 \qquad (i=1,\dots, m) \end{eqnarray} Based on similar methodology, SVMs deal with the problem of novelty detection (or one class classification) and regression. \pkg{kernlab}'s implementation of support vector machines, \code{ksvm}, is based on the optimizers found in \pkg{bsvm}\footnote{\url{http://www.csie.ntu.edu.tw/~cjlin/bsvm}} \citep{kernlab:Hsu:2002} and \pkg{libsvm} \citep{kernlab:Chang+Lin:2001} which includes a very efficient version of the Sequential Minimization Optimization (SMO). SMO decomposes the SVM Quadratic Problem (QP) without using any numerical QP optimization steps. Instead, it chooses to solve the smallest possible optimization problem involving two elements of $\alpha_i$ because they must obey one linear equality constraint. At every step, SMO chooses two $\alpha_i$ to jointly optimize and finds the optimal values for these $\alpha_i$ analytically, thus avoiding numerical QP optimization, and updates the SVM to reflect the new optimal values. The SVM implementations available in \code{ksvm} include the C-SVM classification algorithm along with the $\nu$-SVM classification formulation which is equivalent to the former but has a more natural ($\nu$) model parameter taking values in $[0,1]$ and is proportional to the fraction of support vectors found in the data set and the training error. For classification problems which include more than two classes (multi-class) a one-against-one or pairwise classification method \citep{kernlab:Knerr:1990, kernlab:Kressel:1999} is used. This method constructs ${k \choose 2}$ classifiers where each one is trained on data from two classes. Prediction is done by voting where each classifier gives a prediction and the class which is predicted more often wins (``Max Wins''). This method has been shown to produce robust results when used with SVMs \citep{kernlab:Hsu2:2002}. Furthermore the \code{ksvm} implementation provides the ability to produce class probabilities as output instead of class labels. This is done by an improved implementation \citep{kernlab:Lin:2001} of Platt's posteriori probabilities \citep{kernlab:Platt:2000} where a sigmoid function \begin{equation} P(y=1\mid f) = \frac{1}{1+ e^{Af+B}} \end{equation} is fitted on the decision values~$f$ of the binary SVM classifiers, $A$ and $B$ are estimated by minimizing the negative log-likelihood function. To extend the class probabilities to the multi-class case, each binary classifiers class probability output is combined by the \code{couple} method which implements methods for combing class probabilities proposed in \citep{kernlab:Wu:2003}. Another approach for multIn order to create a similar probability output for regression, following \cite{kernlab:Weng:2004}, we suppose that the SVM is trained on data from the model \begin{equation} y_i = f(x_i) + \delta_i \end{equation} where $f(x_i)$ is the underlying function and $\delta_i$ is independent and identical distributed random noise. Given a test data $x$ the distribution of $y$ given $x$ and allows one to draw probabilistic inferences about $y$ e.g. one can construct a predictive interval $\Phi = \Phi(x)$ such that $y \in \Phi$ with a certain probability. If $\hat{f}$ is the estimated (predicted) function of the SVM on new data then $\eta = \eta(x) = y - \hat{f}(x)$ is the prediction error and $y \in \Phi$ is equivalent to $\eta \in \Phi $. Empirical observation shows that the distribution of the residuals $\eta$ can be modeled both by a Gaussian and a Laplacian distribution with zero mean. In this implementation the Laplacian with zero mean is used : \begin{equation} p(z) = \frac{1}{2\sigma}e^{-\frac{|z|}{\sigma}} \end{equation} Assuming that $\eta$ are independent the scale parameter $\sigma$ is estimated by maximizing the likelihood. The data for the estimation is produced by a three-fold cross-validation. For the Laplace distribution the maximum likelihood estimate is : \begin{equation} \sigma = \frac{\sum_{i=1}^m|\eta_i|}{m} \end{equation} i-class classification supported by the \code{ksvm} function is the one proposed in \cite{kernlab:Crammer:2000}. This algorithm works by solving a single optimization problem including the data from all classes: \begin{eqnarray} \nonumber \mathrm{minimize} && t(w_n,\xi) = \frac{1}{2}\sum_{n=1}^k{\|w_n\|}^2+\frac{C}{m}\sum_{i=1}^{m}\xi_i \\ \mbox{subject to~} && \langle x_i , w_{y_i} \rangle - \langle x_i , w_{n} \rangle \geq b_i^n - \xi_i \qquad (i=1,\dots,m) \\ \mbox{where} && b_i^n = 1 - \delta_{y_i,n} \end{eqnarray} where the decision function is \begin{equation} \mathrm{argmax}_{m=1,\dots,k} \langle x_i , w_{n} \rangle \end{equation} This optimization problem is solved by a decomposition method proposed in \cite{kernlab:Hsu:2002} where optimal working sets are found (that is, sets of $\alpha_i$ values which have a high probability of being non-zero). The QP sub-problems are then solved by a modified version of the \pkg{TRON}\footnote{\url{http://www-unix.mcs.anl.gov/~more/tron/}} \citep{kernlab:more:1999} optimization software. One-class classification or novelty detection \citep{kernlab:Williamson:1999, kernlab:Tax:1999}, where essentially an SVM detects outliers in a data set, is another algorithm supported by \code{ksvm}. SVM novelty detection works by creating a spherical decision boundary around a set of data points by a set of support vectors describing the spheres boundary. The $\nu$ parameter is used to control the volume of the sphere and consequently the number of outliers found. Again, the value of $\nu$ represents the fraction of outliers found. Furthermore, $\epsilon$-SVM \citep{kernlab:Vapnik2:1995} and $\nu$-SVM \citep{kernlab:Smola1:2000} regression are also available. The problem of model selection is partially addressed by an empirical observation for the popular Gaussian RBF kernel \citep{kernlab:Caputo:2002}, where the optimal values of the hyper-parameter of sigma are shown to lie in between the 0.1 and 0.9 quantile of the $\|x- x'\| $ statistics. The \code{sigest} function uses a sample of the training set to estimate the quantiles and returns a vector containing the values of the quantiles. Pretty much any value within this interval leads to good performance. An example for the \code{ksvm} function is shown below. <>= ## simple example using the promotergene data set data(promotergene) ## create test and training set tindex <- sample(1:dim(promotergene)[1],5) genetrain <- promotergene[-tindex, ] genetest <- promotergene[tindex,] ## train a support vector machine gene <- ksvm(Class~.,data=genetrain,kernel="rbfdot",kpar="automatic",C=60,cross=3,prob.model=TRUE) gene predict(gene, genetest) predict(gene, genetest, type="probabilities") @ \begin{figure} \centering <>= set.seed(123) x <- rbind(matrix(rnorm(120),,2),matrix(rnorm(120,mean=3),,2)) y <- matrix(c(rep(1,60),rep(-1,60))) svp <- ksvm(x,y,type="C-svc") plot(svp,data=x) @ \caption{A contour plot of the SVM decision values for a toy binary classification problem using the \code{plot} function} \label{fig:ksvm Plot} \end{figure} \subsection{Relevance vector machine} The relevance vector machine \citep{kernlab:Tipping:2001} is a probabilistic sparse kernel model identical in functional form to the SVM making predictions based on a function of the form \begin{equation} y(x) = \sum_{n=1}^{N} \alpha_n K(\mathbf{x},\mathbf{x}_n) + a_0 \end{equation} where $\alpha_n$ are the model ``weights'' and $K(\cdotp,\cdotp)$ is a kernel function. It adopts a Bayesian approach to learning, by introducing a prior over the weights $\alpha$ \begin{equation} p(\alpha, \beta) = \prod_{i=1}^m N(\beta_i \mid 0 , a_i^{-1}) \mathrm{Gamma}(\beta_i\mid \beta_\beta , \alpha_\beta) \end{equation} governed by a set of hyper-parameters $\beta$, one associated with each weight, whose most probable values are iteratively estimated for the data. Sparsity is achieved because in practice the posterior distribution in many of the weights is sharply peaked around zero. Furthermore, unlike the SVM classifier, the non-zero weights in the RVM are not associated with examples close to the decision boundary, but rather appear to represent ``prototypical'' examples. These examples are termed \emph{relevance vectors}. \pkg{kernlab} currently has an implementation of the RVM based on a type~II maximum likelihood method which can be used for regression. The functions returns an \proglang{S4} object containing the model parameters along with indexes for the relevance vectors and the kernel function and hyper-parameters used. <>= x <- seq(-20, 20, 0.5) y <- sin(x)/x + rnorm(81, sd = 0.03) y[41] <- 1 @ <>= rvmm <- rvm(x, y,kernel="rbfdot",kpar=list(sigma=0.1)) rvmm ytest <- predict(rvmm, x) @ \begin{figure} \centering <>= plot(x, y, cex=0.5) lines(x, ytest, col = "red") points(x[RVindex(rvmm)],y[RVindex(rvmm)],pch=21) @ \caption{Relevance vector regression on data points created by the $sinc(x)$ function, relevance vectors are shown circled.} \label{fig:RVM sigmoid} \end{figure} \subsection{Gaussian processes} Gaussian processes \citep{kernlab:Williams:1995} are based on the ``prior'' assumption that adjacent observations should convey information about each other. In particular, it is assumed that the observed variables are normal, and that the coupling between them takes place by means of the covariance matrix of a normal distribution. Using the kernel matrix as the covariance matrix is a convenient way of extending Bayesian modeling of linear estimators to nonlinear situations. Furthermore it represents the counterpart of the ``kernel trick'' in methods minimizing the regularized risk. For regression estimation we assume that rather than observing $t(x_i)$ we observe $y_i = t(x_i) + \xi_i$ where $\xi_i$ is assumed to be independent Gaussian distributed noise with zero mean. The posterior distribution is given by \begin{equation} p(\mathbf{y}\mid \mathbf{t}) = \left[ \prod_ip(y_i - t(x_i)) \right] \frac{1}{\sqrt{(2\pi)^m \det(K)}} \exp \left(\frac{1}{2}\mathbf{t}^T K^{-1} \mathbf{t} \right) \end{equation} and after substituting $\mathbf{t} = K\mathbf{\alpha}$ and taking logarithms \begin{equation} \ln{p(\mathbf{\alpha} \mid \mathbf{y})} = - \frac{1}{2\sigma^2}\| \mathbf{y} - K \mathbf{\alpha} \|^2 -\frac{1}{2}\mathbf{\alpha}^T K \mathbf{\alpha} +c \end{equation} and maximizing $\ln{p(\mathbf{\alpha} \mid \mathbf{y})}$ for $\mathbf{\alpha}$ to obtain the maximum a posteriori approximation yields \begin{equation} \mathbf{\alpha} = (K + \sigma^2\mathbf{1})^{-1} \mathbf{y} \end{equation} Knowing $\mathbf{\alpha}$ allows for prediction of $y$ at a new location $x$ through $y = K(x,x_i){\mathbf{\alpha}}$. In similar fashion Gaussian processes can be used for classification. \code{gausspr} is the function in \pkg{kernlab} implementing Gaussian processes for classification and regression. \subsection{Ranking} The success of Google has vividly demonstrated the value of a good ranking algorithm in real world problems. \pkg{kernlab} includes a ranking algorithm based on work published in \citep{kernlab:Zhou:2003}. This algorithm exploits the geometric structure of the data in contrast to the more naive approach which uses the Euclidean distances or inner products of the data. Since real world data are usually highly structured, this algorithm should perform better than a simpler approach based on a Euclidean distance measure. First, a weighted network is defined on the data and an authoritative score is assigned to every point. The query points act as source nodes that continually pump their scores to the remaining points via the weighted network, and the remaining points further spread the score to their neighbors. The spreading process is repeated until convergence and the points are ranked according to the scores they received. Suppose we are given a set of data points $X = {x_1, \dots, x_{s}, x_{s+1}, \dots, x_{m}}$ in $\mathbf{R}^n$ where the first $s$ points are the query points and the rest are the points to be ranked. The algorithm works by connecting the two nearest points iteratively until a connected graph $G = (X, E)$ is obtained where $E$ is the set of edges. The affinity matrix $K$ defined e.g.\ by $K_{ij} = \exp(-\sigma\|x_i - x_j \|^2)$ if there is an edge $e(i,j) \in E$ and $0$ for the rest and diagonal elements. The matrix is normalized as $L = D^{-1/2}KD^{-1/2}$ where $D_{ii} = \sum_{j=1}^m K_{ij}$, and \begin{equation} f(t+1) = \alpha Lf(t) + (1 - \alpha)y \end{equation} is iterated until convergence, where $\alpha$ is a parameter in $[0,1)$. The points are then ranked according to their final scores $f_{i}(t_f)$. \pkg{kernlab} includes an \proglang{S4} method implementing the ranking algorithm. The algorithm can be used both with an edge-graph where the structure of the data is taken into account, and without which is equivalent to ranking the data by their distance in the projected space. \begin{figure} \centering <>= data(spirals) ran <- spirals[rowSums(abs(spirals) < 0.55) == 2,] ranked <- ranking(ran, 54, kernel = "rbfdot", kpar = list(sigma = 100), edgegraph = TRUE) ranked[54, 2] <- max(ranked[-54, 2]) c<-1:86 op <- par(mfrow = c(1, 2),pty="s") plot(ran) plot(ran, cex=c[ranked[,3]]/40) @ \caption{The points on the left are ranked according to their similarity to the upper most left point. Points with a higher rank appear bigger. Instead of ranking the points on simple Euclidean distance the structure of the data is recognized and all points on the upper structure are given a higher rank although further away in distance than points in the lower structure.} \label{fig:Ranking} \end{figure} \subsection{Online learning with kernels} The \code{onlearn} function in \pkg{kernlab} implements the online kernel algorithms for classification, novelty detection and regression described in \citep{kernlab:Kivinen:2004}. In batch learning, it is typically assumed that all the examples are immediately available and are drawn independently from some distribution $P$. One natural measure of quality for some $f$ in that case is the expected risk \begin{equation} R[f,P] := E_{(x,y)~P}[l(f(x),y)] \end{equation} Since usually $P$ is unknown a standard approach is to instead minimize the empirical risk \begin{equation} R_{emp}[f,P] := \frac{1}{m}\sum_{t=1}^m l(f(x_t),y_t) \end{equation} Minimizing $R_{emp}[f]$ may lead to overfitting (complex functions that fit well on the training data but do not generalize to unseen data). One way to avoid this is to penalize complex functions by instead minimizing the regularized risk. \begin{equation} R_{reg}[f,S] := R_{reg,\lambda}[f,S] := R_{emp}[f] = \frac{\lambda}{2}\|f\|_{H}^2 \end{equation} where $\lambda > 0$ and $\|f\|_{H} = {\langle f,f \rangle}_{H}^{\frac{1}{2}}$ does indeed measure the complexity of $f$ in a sensible way. The constant $\lambda$ needs to be chosen appropriately for each problem. Since in online learning one is interested in dealing with one example at the time the definition of an instantaneous regularized risk on a single example is needed \begin{equation} R_inst[f,x,y] := R_{inst,\lambda}[f,x,y] := R_{reg,\lambda}[f,((x,y))] \end{equation} The implemented algorithms are classical stochastic gradient descent algorithms performing gradient descent on the instantaneous risk. The general form of the update rule is : \begin{equation} f_{t+1} = f_t - \eta \partial_f R_{inst,\lambda}[f,x_t,y_t]|_{f=f_t} \end{equation} where $f_i \in H$ and $\partial_f$< is short hand for $\partial \ \partial f$ (the gradient with respect to $f$) and $\eta_t > 0$ is the learning rate. Due to the learning taking place in a \textit{reproducing kernel Hilbert space} $H$ the kernel $k$ used has the property $\langle f,k(x,\cdotp)\rangle_H = f(x)$ and therefore \begin{equation} \partial_f l(f(x_t)),y_t) = l'(f(x_t),y_t)k(x_t,\cdotp) \end{equation} where $l'(z,y) := \partial_z l(z,y)$. Since $\partial_f\|f\|_H^2 = 2f$ the update becomes \begin{equation} f_{t+1} := (1 - \eta\lambda)f_t -\eta_t \lambda '( f_t(x_t),y_t)k(x_t,\cdotp) \end{equation} The \code{onlearn} function implements the online learning algorithm for regression, classification and novelty detection. The online nature of the algorithm requires a different approach to the use of the function. An object is used to store the state of the algorithm at each iteration $t$ this object is passed to the function as an argument and is returned at each iteration $t+1$ containing the model parameter state at this step. An empty object of class \code{onlearn} is initialized using the \code{inlearn} function. <>= ## create toy data set x <- rbind(matrix(rnorm(90),,2),matrix(rnorm(90)+3,,2)) y <- matrix(c(rep(1,45),rep(-1,45)),,1) ## initialize onlearn object on <- inlearn(2,kernel="rbfdot",kpar=list(sigma=0.2),type="classification") ind <- sample(1:90,90) ## learn one data point at the time for(i in ind) on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) sign(predict(on,x)) @ \subsection{Spectral clustering} Spectral clustering \citep{kernlab:Ng:2001} is a recently emerged promising alternative to common clustering algorithms. In this method one uses the top eigenvectors of a matrix created by some similarity measure to cluster the data. Similarly to the ranking algorithm, an affinity matrix is created out from the data as \begin{equation} K_{ij}=\exp(-\sigma\|x_i - x_j \|^2) \end{equation} and normalized as $L = D^{-1/2}KD^{-1/2}$ where $D_{ii} = \sum_{j=1}^m K_{ij}$. Then the top $k$ eigenvectors (where $k$ is the number of clusters to be found) of the affinity matrix are used to form an $n \times k$ matrix $Y$ where each column is normalized again to unit length. Treating each row of this matrix as a data point, \code{kmeans} is finally used to cluster the points. \pkg{kernlab} includes an \proglang{S4} method called \code{specc} implementing this algorithm which can be used through an formula interface or a matrix interface. The \proglang{S4} object returned by the method extends the class ``vector'' and contains the assigned cluster for each point along with information on the centers size and within-cluster sum of squares for each cluster. In case a Gaussian RBF kernel is being used a model selection process can be used to determine the optimal value of the $\sigma$ hyper-parameter. For a good value of $\sigma$ the values of $Y$ tend to cluster tightly and it turns out that the within cluster sum of squares is a good indicator for the ``quality'' of the sigma parameter found. We then iterate through the sigma values to find an optimal value for $\sigma$. \begin{figure} \centering <>= data(spirals) sc <- specc(spirals, centers=2) plot(spirals, pch=(23 - 2*sc)) @ \caption{Clustering the two spirals data set with \code{specc}} \label{fig:Spectral Clustering} \end{figure} \subsection{Kernel principal components analysis} Principal component analysis (PCA) is a powerful technique for extracting structure from possibly high-dimensional datasets. PCA is an orthogonal transformation of the coordinate system in which we describe the data. The new coordinates by which we represent the data are called principal components. Kernel PCA \citep{kernlab:Schoelkopf:1998} performs a nonlinear transformation of the coordinate system by finding principal components which are nonlinearly related to the input variables. Given a set of centered observations $x_k$, $k=1,\dots,M$, $x_k \in \mathbf{R}^N$, PCA diagonalizes the covariance matrix $C = \frac{1}{M}\sum_{j=1}^Mx_jx_{j}^T$ by solving the eigenvalue problem $\lambda\mathbf{v}=C\mathbf{v}$. The same computation can be done in a dot product space $F$ which is related to the input space by a possibly nonlinear map $\Phi:\mathbf{R}^N \rightarrow F$, $x \mapsto \mathbf{X}$. Assuming that we deal with centered data and use the covariance matrix in $F$, \begin{equation} \hat{C}=\frac{1}{C}\sum_{j=1}^N \Phi(x_j)\Phi(x_j)^T \end{equation} the kernel principal components are then computed by taking the eigenvectors of the centered kernel matrix $K_{ij} = \langle \Phi(x_j),\Phi(x_j) \rangle$. \code{kpca}, the the function implementing KPCA in \pkg{kernlab}, can be used both with a formula and a matrix interface, and returns an \proglang{S4} object of class \code{kpca} containing the principal components the corresponding eigenvalues along with the projection of the training data on the new coordinate system. Furthermore, the \code{predict} function can be used to embed new data points into the new coordinate system. \begin{figure} \centering <>= data(spam) train <- sample(1:dim(spam)[1],400) kpc <- kpca(~.,data=spam[train,-58],kernel="rbfdot",kpar=list(sigma=0.001),features=2) kpcv <- pcv(kpc) plot(rotated(kpc),col=as.integer(spam[train,58]),xlab="1st Principal Component",ylab="2nd Principal Component") @ \caption{Projection of the spam data on two kernel principal components using an RBF kernel} \label{fig:KPCA} \end{figure} \subsection{Kernel feature analysis} Whilst KPCA leads to very good results there are nevertheless some issues to be addressed. First the computational complexity of the standard version of KPCA, the algorithm scales $O(m^3)$ and secondly the resulting feature extractors are given as a dense expansion in terms of the of the training patterns. Sparse solutions are often achieved in supervised learning settings by using an $l_1$ penalty on the expansion coefficients. An algorithm can be derived using the same approach in feature extraction requiring only $n$ basis functions to compute the first $n$ feature. Kernel feature analysis \citep{kernlab:Olvi:2000} is computationally simple and scales approximately one order of magnitude better on large data sets than standard KPCA. Choosing $\Omega [f] = \sum_{i=1}^m |\alpha_i |$ this yields \begin{equation} F_{LP} = \{ \mathbf{w} \vert \mathbf{w} = \sum_{i=1}^m \alpha_i \Phi(x_i) \mathrm{with} \sum_{i=1}^m |\alpha_i | \leq 1 \} \end{equation} This setting leads to the first ``principal vector'' in the $l_1$ context \begin{equation} \mathbf{\nu}^1 = \mathrm{argmax}_{\mathbf{\nu} \in F_{LP}} \frac{1}{m} \sum_{i=1}^m \langle \mathbf{\nu},\mathbf{\Phi}(x_i) - \frac{1}{m}\sum_{j=1}^m\mathbf{\Phi}(x_i) \rangle^2 \end{equation} Subsequent ``principal vectors'' can be defined by enforcing optimality with respect to the remaining orthogonal subspaces. Due to the $l_1$ constrain the solution has the favorable property of being sparse in terms of the coefficients $\alpha_i$. The function \code{kfa} in \pkg{kernlab} implements Kernel Feature Analysis by using a projection pursuit technique on a sample of the data. Results are then returned in an \proglang{S4} object. \begin{figure} \centering <>= data(promotergene) f <- kfa(~.,data=promotergene,features=2,kernel="rbfdot",kpar=list(sigma=0.013)) plot(predict(f,promotergene),col=as.numeric(promotergene[,1]),xlab="1st Feature",ylab="2nd Feature") @ \caption{Projection of the spam data on two features using an RBF kernel} \label{fig:KFA} \end{figure} \subsection{Kernel canonical correlation analysis} Canonical correlation analysis (CCA) is concerned with describing the linear relations between variables. If we have two data sets $x_1$ and $x_2$, then the classical CCA attempts to find linear combination of the variables which give the maximum correlation between the combinations. I.e., if \begin{eqnarray*} && y_1 = \mathbf{w_1}\mathbf{x_1} = \sum_j w_1 x_{1j} \\ && y_2 = \mathbf{w_2}\mathbf{x_2} = \sum_j w_2 x_{2j} \end{eqnarray*} one wishes to find those values of $\mathbf{w_1}$ and $\mathbf{w_2}$ which maximize the correlation between $y_1$ and $y_2$. Similar to the KPCA algorithm, CCA can be extended and used in a dot product space~$F$ which is related to the input space by a possibly nonlinear map $\Phi:\mathbf{R}^N \rightarrow F$, $x \mapsto \mathbf{X}$ as \begin{eqnarray*} && y_1 = \mathbf{w_1}\mathbf{\Phi(x_1)} = \sum_j w_1 \Phi(x_{1j}) \\ && y_2 = \mathbf{w_2}\mathbf{\Phi(x_2)} = \sum_j w_2 \Phi(x_{2j}) \end{eqnarray*} Following \citep{kernlab:kuss:2003}, the \pkg{kernlab} implementation of a KCCA projects the data vectors on a new coordinate system using KPCA and uses linear CCA to retrieve the correlation coefficients. The \code{kcca} method in \pkg{kernlab} returns an \proglang{S4} object containing the correlation coefficients for each data set and the corresponding correlation along with the kernel used. \subsection{Interior point code quadratic optimizer} In many kernel based algorithms, learning implies the minimization of some risk function. Typically we have to deal with quadratic or general convex problems for support vector machines of the type \begin{equation} \begin{array}{ll} \mathrm{minimize} & f(x) \\ \mbox{subject to~} & c_i(x) \leq 0 \mbox{~for all~} i \in [n]. \end{array} \end{equation} $f$ and $c_i$ are convex functions and $n \in \mathbf{N}$. \pkg{kernlab} provides the \proglang{S4} method \code{ipop} implementing an optimizer of the interior point family \citep{kernlab:Vanderbei:1999} which solves the quadratic programming problem \begin{equation} \begin{array}{ll} \mathrm{minimize} & c^\top x+\frac{1}{2}x^\top H x \\ \mbox{subject to~} & b \leq Ax \leq b + r\\ & l \leq x \leq u \\ \end{array} \end{equation} This optimizer can be used in regression, classification, and novelty detection in SVMs. \subsection{Incomplete cholesky decomposition} When dealing with kernel based algorithms, calculating a full kernel matrix should be avoided since it is already a $O(N^2)$ operation. Fortunately, the fact that kernel matrices are positive semidefinite is a strong constraint and good approximations can be found with small computational cost. The Cholesky decomposition factorizes a positive semidefinite $N \times N$ matrix $K$ as $K=ZZ^T$, where $Z$ is an upper triangular $N \times N$ matrix. Exploiting the fact that kernel matrices are usually of low rank, an \emph{incomplete Cholesky decomposition} \citep{kernlab:Wright:1999} finds a matrix $\tilde{Z}$ of size $N \times M$ where $M\ll N$ such that the norm of $K-\tilde{Z}\tilde{Z}^T$ is smaller than a given tolerance $\theta$. The main difference of incomplete Cholesky decomposition to the standard Cholesky decomposition is that pivots which are below a certain threshold are simply skipped. If $L$ is the number of skipped pivots, we obtain a $\tilde{Z}$ with only $M = N - L$ columns. The algorithm works by picking a column from $K$ to be added by maximizing a lower bound on the reduction of the error of the approximation. \pkg{kernlab} has an implementation of an incomplete Cholesky factorization called \code{inc.chol} which computes the decomposed matrix $\tilde{Z}$ from the original data for any given kernel without the need to compute a full kernel matrix beforehand. This has the advantage that no full kernel matrix has to be stored in memory. \section{Conclusions} In this paper we described \pkg{kernlab}, a flexible and extensible kernel methods package for \proglang{R} with existing modern kernel algorithms along with tools for constructing new kernel based algorithms. It provides a unified framework for using and creating kernel-based algorithms in \proglang{R} while using all of \proglang{R}'s modern facilities, like \proglang{S4} classes and namespaces. Our aim for the future is to extend the package and add more kernel-based methods as well as kernel relevant tools. Sources and binaries for the latest version of \pkg{kernlab} are available at CRAN\footnote{\url{http://CRAN.R-project.org}} under the GNU Public License. A shorter version of this introduction to the \proglang{R} package \pkg{kernlab} is published as \cite{kernlab:Karatzoglou+Smola+Hornik:2004} in the \emph{Journal of Statistical Software}. \bibliography{jss} \end{document} kernlab/vignettes/jss.bib0000644000175100001440000003421314130771476015210 0ustar hornikusers@Article{kernlab:Karatzoglou+Smola+Hornik:2004, author = {Alexandros Karatzoglou and Alex Smola and Kurt Hornik and Achim Zeileis}, title = {kernlab -- An \proglang{S4} Package for Kernel Methods in \proglang{R}}, year = 2004, journal = {Journal of Statistical Software}, volume = 11, number = 9, pages = {1--20}, doi = {10.18637/jss.v011.i09}, } @Book{kernlab:Schoelkopf+Smola:2002, author = {Bernhard Sch\"olkopf and Alex Smola}, title = {Learning with Kernels}, publisher = {MIT Press}, year = 2002, } @Book{kernlab:Chambers:1998, Author = {John M. Chambers}, title = {Programming with Data}, Publisher = {Springer, New York}, Year = 1998, note = {ISBN 0-387-98503-4}, } @Book{kernlab:Hastie:2001, author = {T. Hastie and R. Tibshirani and J. H. Friedman}, title = {The Elements of Statistical Learning}, publisher = {Springer}, Year = 2001, } @Article{kernlab:Vanderbei:1999, author = {Robert Vanderbei}, title = {{LOQO}: An Interior Point Code for Quadratic Programming}, journal = {Optimization Methods and Software}, year = 1999, volume = 12, pages = {251--484}, url = {http://www.sor.princeton.edu/~rvdb/ps/loqo6.pdf}, } @Misc{kernlab:Leisch+Dimitriadou, author = {Fiedrich Leisch and Evgenia Dimitriadou}, title = {\pkg{mlbench}---{A} Collection for Artificial and Real-world Machine Learning Benchmarking Problems}, howpublished = {\textsf{R} package, Version 0.5-6}, note = {Available from \url{https://CRAN.R-project.org}}, year = 2001, month = 12, } @Misc{kernlab:Roever:2004, author = {Christian Roever and Nils Raabe and Karsten Luebke and Uwe Ligges}, title = { \pkg{klaR} -- Classification and Visualization}, howpublished = {\textsf{R} package, Version 0.3-3}, note = {Available from \url{http://cran.R-project.org}}, year = 2004, month = 7, } @Article{kernlab:Hsu+Lin:2002, author = {C.-W. Hsu and Chih-Jen Lin}, title = {A Comparison of Methods for Multi-class Support Vector Machines}, journal = {IEEE Transactions on Neural Networks}, year = 2002, volume = 13, pages = {415--425}, url = {http://www.csie.ntu.edu.tw/~cjlin/papers/multisvm.ps.gz}, } @Misc{kernlab:Chang+Lin:2001, author = {Chih-Chung Chang and Chih-Jen Lin}, title = {{LIBSVM}: A Library for Support Vector Machines}, note = {Software available at \url{http://www.csie.ntu.edu.tw/~cjlin/libsvm}}, year = 2001, } @Article{kernlab:Platt:2000, Author = {J. C. Platt}, Title = {Probabilistic Outputs for Support Vector Machines and Comparison to Regularized Likelihood Methods}, Journal = {Advances in Large Margin Classifiers, A. Smola, P. Bartlett, B. Sch\"olkopf and D. Schuurmans, Eds.}, Year = 2000, publisher = {Cambridge, MA: MIT Press}, url = {http://citeseer.nj.nec.com/platt99probabilistic.html}, } @Article{kernlab:Platt:1998, Author = {J. C. Platt}, Title = {Probabilistic Outputs for Support Vector Machines and Comparison to Regularized Likelihood Methods}, Journal = {B. Sch\"olkopf, C. J. C. Burges, A. J. Smola, editors, Advances in Kernel Methods --- Support Vector Learning}, Year = 1998, publisher = {Cambridge, MA: MIT Press}, url = {http://research.microsoft.com/~jplatt/abstracts/smo.html}, } @Article{kernlab:Keerthi:2002, Author = {S. S. Kerthi and E. G. Gilbert}, Title = {Convergence of a Generalized {SMO} Algorithm for {SVM} Classifier Design}, Journal = {Machine Learning}, pages = {351--360}, Year = 2002, volume = 46, url = {http://guppy.mpe.nus.edu.sg/~mpessk/svm/conv_ml.ps.gz}, } @Article{kernlab:Olvi:2000, Author = {Alex J. Smola and Olvi L. Mangasarian and Bernhard Sch\"olkopf}, Title = {Sparse Kernel Feature Analysis}, Journal = {24th Annual Conference of Gesellschaft f\"ur Klassifikation}, publisher = {University of Passau}, Year = 2000, url = {ftp://ftp.cs.wisc.edu/pub/dmi/tech-reports/99-04.ps}, } @Unpublished{kernlab:Lin:2001, Author = {H.-T. Lin and Chih-Jen Lin and R. C. Weng}, Title = {A Note on {Platt's} Probabilistic Outputs for Support Vector Machines}, Year = 2001, note = {Available at \url{http://www.csie.ntu.edu.tw/~cjlin/papers/plattprob.ps}}, } @Unpublished{kernlab:Weng:2004, Author = {C.-J Lin and R C. Weng}, Title = {Probabilistic Predictions for Support Vector Regression}, Year = 2004, note = {Available at \url{http://www.csie.ntu.edu.tw/~cjlin/papers/svrprob.pdf}}, } @Article{kernlab:Crammer:2000, Author = {K. Crammer and Y. Singer}, Title = {On the Learnability and Design of Output Codes for Multiclass Prolems}, Year = 2000, Journal = {Computational Learning Theory}, Pages = {35--46}, url = {http://www.cs.huji.ac.il/~kobics/publications/mlj01.ps.gz}, } @Article{kernlab:joachim:1999, Author = {Thorsten Joachims}, Title = {Making Large-scale {SVM} Learning Practical}, Journal = {In Advances in Kernel Methods --- Support Vector Learning}, Chapter = 11, Year = 1999, publisher = {MIT Press}, url = {http://www-ai.cs.uni-dortmund.de/DOKUMENTE/joachims_99a.ps.gz}, } @Article{kernlab:Meyer:2001, author = {David Meyer}, title = {Support Vector Machines}, journal = {R News}, year = 2001, volume = 1, number = 3, pages = {23--26}, month = {September}, url = {http://CRAN.R-project.org/doc/Rnews/}, note = {\url{http://CRAN.R-project.org/doc/Rnews/}} } @ARTICLE{kernlab:meyer+leisch+hornik:2003, AUTHOR = {David Meyer and Friedrich Leisch and Kurt Hornik}, TITLE = {The Support Vector Machine under Test}, JOURNAL = {Neurocomputing}, YEAR = 2003, MONTH = {September}, PAGES = {169--186}, VOLUME = 55, } @Book{kernlab:Vapnik:1998, author = {Vladimir Vapnik}, Title = {Statistical Learning Theory}, Year = 1998, publisher = {Wiley, New York}, } @Book{kernlab:Vapnik2:1995, author = {Vladimir Vapnik}, Title = {The Nature of Statistical Learning Theory}, Year = 1995, publisher = {Springer, NY}, } @Article{kernlab:Wu:2003, Author = {Ting-Fan Wu and Chih-Jen Lin and Ruby C. Weng}, Title = {Probability Estimates for Multi-class Classification by Pairwise Coupling}, Year = 2003, Journal = {Advances in Neural Information Processing}, Publisher = {MIT Press Cambridge Mass.}, Volume = 16, url = {http://books.nips.cc/papers/files/nips16/NIPS2003_0538.pdf}, } @Article{kernlab:Williams:1995, Author = {Christopher K. I. Williams and Carl Edward Rasmussen}, Title = {Gaussian Processes for Regression}, Year = 1995, Journal = {Advances in Neural Information Processing}, Publisher = {MIT Press Cambridge Mass.}, Volume = 8, url = {http://books.nips.cc/papers/files/nips08/0514.pdf}, } @Article{kernlab:Schoelkopf:1998, Author = {B. Sch\"olkopf and A. Smola and K. R. M\"uller}, Title = {Nonlinear Component Analysis as a Kernel Eigenvalue Problem}, Journal = {Neural Computation}, Volume = 10, Pages = {1299--1319}, Year = 1998, url = {http://mlg.anu.edu.au/~smola/papers/SchSmoMul98.pdf}, } @Article{kernlab:Tipping:2001, Author = {M. E. Tipping}, Title = {Sparse Bayesian Learning and the Relevance Vector Machine}, Journal = {Journal of Machine Learning Research}, Volume = 1, Year = 2001, Pages = {211--244}, url = {http://www.jmlr.org/papers/volume1/tipping01a/tipping01a.pdf}, } @Article{kernlab:Zhou:2003, Author = {D. Zhou and J. Weston and A. Gretton and O. Bousquet and B. Sch\"olkopf}, Title = {Ranking on Data Manifolds}, Journal = {Advances in Neural Information Processing Systems}, Volume = 16, Year = 2003, Publisher = {MIT Press Cambridge Mass.}, url = {http://www.kyb.mpg.de/publications/pdfs/pdf2334.pdf}, } @Article{kernlab:Andrew:2001, Author = {Andrew Y. Ng and Michael I. Jordan and Yair Weiss}, Title = {On Spectral Clustering: Analysis and an Algorithm}, Journal = {Advances in Neural Information Processing Systems}, Volume = 14, Publisher = {MIT Press Cambridge Mass.}, url = {http://www.nips.cc/NIPS2001/papers/psgz/AA35.ps.gz}, } @Article{kernlab:Caputo:2002, Author = {B. Caputo and K. Sim and F. Furesjo and A. Smola}, Title = {Appearance-based Object Recognition using {SVMs}: Which Kernel Should {I} Use?}, Journal = {Proc of NIPS workshop on Statistical methods for computational experiments in visual processing and computer vision, Whistler, 2002}, Year = 2002, } @Article{kernlab:Putten:2000, Author = {Peter van der Putten and Michel de Ruiter and Maarten van Someren}, Title = {CoIL Challenge 2000 Tasks and Results: Predicting and Explaining Caravan Policy Ownership}, Journal = {Coil Challenge 2000}, Year = 2000, url = {http://www.liacs.nl/~putten/library/cc2000/}, } @Article{kernlab:Hsu:2002, Author = {C.-W. Hsu and Chih-Jen Lin}, Title = {A Simple Decomposition Method for Support Vector Machines}, Journal = {Machine Learning}, Year = 2002, Pages = {291--314}, volume = 46, url = {http://www.csie.ntu.edu.tw/~cjlin/papers/decomp.ps.gz}, } @Article{kernlab:Knerr:1990, Author = {S. Knerr and L. Personnaz and G. Dreyfus}, Title = {Single-layer Learning Revisited: A Stepwise Procedure for Building and Training a Neural Network.}, Journal = {J. Fogelman, editor, Neurocomputing: Algorithms, Architectures and Applications}, Publisher = {Springer-Verlag}, Year = 1990, } @Article{kernlab:Kressel:1999, Author = {U. Kre{\ss}el}, Title = {Pairwise Classification and Support Vector Machines}, Year = 1999, Journal = {B. Sch\"olkopf, C. J. C. Burges, A. J. Smola, editors, Advances in Kernel Methods --- Support Vector Learning}, Pages = {255--268}, Publisher = {Cambridge, MA, MIT Press}, } @Article{kernlab:Hsu2:2002, Title = {A Comparison of Methods for Multi-class Support Vector Machines}, Author = {C.-W. Hsu and Chih-Jen Lin}, Journal = {IEEE Transactions on Neural Networks}, Volume = 13, Year = 2002, Pages = {1045--1052}, url = {http://www.csie.ntu.edu.tw/~cjlin/papers/multisvm.ps.gz}, } @Article{kernlab:Tax:1999, Title = {Support Vector Domain Description}, Author = {David M. J. Tax and Robert P. W. Duin}, Journal = {Pattern Recognition Letters}, Volume = 20, Pages = {1191--1199}, Year = 1999, Publisher = {Elsevier}, url = {http://www.ph.tn.tudelft.nl/People/bob/papers/prl_99_svdd.pdf}, } @Article{kernlab:Williamson:1999, Title = {Estimating the Support of a High-Dimensonal Distribution}, Author = {B. Sch\"olkopf and J. Platt and J. Shawe-Taylor and A. J. Smola and R. C. Williamson}, Journal = {Microsoft Research, Redmond, WA}, Volume = {TR 87}, Year = 1999, url = {http://research.microsoft.com/research/pubs/view.aspx?msr_tr_id=MSR-TR-99-87}, } @Article{kernlab:Smola1:2000, Title = {New Support Vector Algorithms}, Author = {B. Sch\"olkopf and A. J. Smola and R. C. Williamson and P. L. Bartlett}, Journal = {Neural Computation}, Volume = 12, Year = 2000, Pages = {1207--1245}, url = {http://caliban.ingentaselect.com/vl=3338649/cl=47/nw=1/rpsv/cgi-bin/cgi?body=linker&reqidx=0899-7667(2000)12:5L.1207}, } @Article{kernlab:Wright:1999, Title = {Modified {Cholesky} Factorizations in Interior-point Algorithms for Linear Programming}, Author = {S. Wright}, Journal = {Journal in Optimization}, Volume = 9, publisher = {SIAM}, Year = 1999, Pages = {1159--1191}, ur = {http://www-unix.mcs.anl.gov/~wright/papers/P600.pdf}, } @Article{kernlab:more:1999, Title = {Newton's Method for Large-scale Bound Constrained Problems}, Author = {Chih-Jen Lin and J. J. More}, Journal = {SIAM Journal on Optimization}, volume = 9, pages = {1100--1127}, Year = 1999, } @Article{kernlab:Ng:2001, Title = {On Spectral Clustering: Analysis and an Algorithm}, Author = {Andrew Y. Ng and Michael I. Jordan and Yair Weiss}, Journal = {Neural Information Processing Symposium 2001}, Year = 2001, url = {http://www.nips.cc/NIPS2001/papers/psgz/AA35.ps.gz} } @Article{kernlab:kuss:2003, Title = {The Geometry of Kernel Canonical Correlation Analysis}, Author = {Malte Kuss and Thore Graepel}, Journal = {MPI-Technical Reports}, url = {http://www.kyb.mpg.de/publication.html?publ=2233}, Year = 2003, } %% Mathias Seeger gp pub. @Article{kernlab:Kivinen:2004, Title = {Online Learning with Kernels}, Author = {Jyrki Kivinen and Alexander Smola and Robert Williamson}, Journal ={IEEE Transactions on Signal Processing}, volume = 52, Year = 2004, url = {http://mlg.anu.edu.au/~smola/papers/KivSmoWil03.pdf}, } kernlab/vignettes/A.cls0000644000175100001440000001273612055335060014611 0ustar hornikusers\def\fileversion{1.0} \def\filename{A} \def\filedate{2004/10/08} %% %% \NeedsTeXFormat{LaTeX2e} \ProvidesClass{A}[\filedate\space\fileversion\space A class ] %% options \LoadClass[10pt,a4paper,twoside]{article} \newif\if@notitle \@notitlefalse \DeclareOption{notitle}{\@notitletrue} \ProcessOptions %% required packages \RequirePackage{graphicx,a4wide,color,hyperref,ae,fancyvrb,thumbpdf} \RequirePackage[T1]{fontenc} \usepackage[authoryear,round,longnamesfirst]{natbib} \bibpunct{(}{)}{;}{a}{}{,} \bibliographystyle{jss} %% paragraphs \setlength{\parskip}{0.7ex plus0.1ex minus0.1ex} \setlength{\parindent}{0em} %% commands \let\code=\texttt \let\proglang=\textsf \newcommand{\pkg}[1]{{\normalfont\fontseries{b}\selectfont #1}} \newcommand{\email}[1]{\href{mailto:#1}{\normalfont\texttt{#1}}} \newcommand{\E}{\mathsf{E}} \newcommand{\VAR}{\mathsf{VAR}} \newcommand{\COV}{\mathsf{COV}} \newcommand{\Prob}{\mathsf{P}} %% for all publications \newcommand{\Plaintitle}[1]{\def\@Plaintitle{#1}} \newcommand{\Shorttitle}[1]{\def\@Shorttitle{#1}} \newcommand{\Plainauthor}[1]{\def\@Plainauthor{#1}} \newcommand{\Keywords}[1]{\def\@Keywords{#1}} \newcommand{\Plainkeywords}[1]{\def\@Plainkeywords{#1}} \newcommand{\Abstract}[1]{\def\@Abstract{#1}} %% defaults \author{Firstname Lastname\\Affiliation} \title{Title} \Abstract{---!!!---an abstract is required---!!!---} \Plainauthor{\@author} \Plaintitle{\@title} \Shorttitle{\@title} \Keywords{---!!!---at least one keyword is required---!!!---} \Plainkeywords{\@Keywords} %% Sweave(-like) %\DefineVerbatimEnvironment{Sinput}{Verbatim}{fontshape=sl} %\DefineVerbatimEnvironment{Soutput}{Verbatim}{} %\DefineVerbatimEnvironment{Scode}{Verbatim}{fontshape=sl} %\newenvironment{Schunk}{}{} \DefineVerbatimEnvironment{Code}{Verbatim}{} \DefineVerbatimEnvironment{CodeInput}{Verbatim}{fontshape=sl} \DefineVerbatimEnvironment{CodeOutput}{Verbatim}{} \newenvironment{CodeChunk}{}{} \setkeys{Gin}{width=0.8\textwidth} %% new \maketitle \def\maketitle{ \begingroup \def\thefootnote{\fnsymbol{footnote}} \def\@makefnmark{\hbox to 0pt{$^{\@thefnmark}$\hss}} \long\def\@makefntext##1{\parindent 1em\noindent \hbox to1.8em{\hss $\m@th ^{\@thefnmark}$}##1} \@maketitle \@thanks \endgroup \setcounter{footnote}{0} \thispagestyle{empty} \markboth{\centerline{\@Shorttitle}}{\centerline{\@Plainauthor}} \pagestyle{myheadings} \let\maketitle\relax \let\@maketitle\relax \gdef\@thanks{}\gdef\@author{}\gdef\@title{}\let\thanks\relax } \def\@maketitle{\vbox{\hsize\textwidth \linewidth\hsize {\centering {\LARGE\bf \@title\par} \def\And{\end{tabular}\hfil\linebreak[0]\hfil \begin{tabular}[t]{c}\large\bf\rule{\z@}{24pt}\ignorespaces}% \begin{tabular}[t]{c}\large\bf\rule{\z@}{24pt}\@author\end{tabular}% \vskip 0.3in minus 0.1in \hrule \begin{abstract} \@Abstract \end{abstract}} \textit{Keywords}:~\@Keywords. \vskip 0.1in minus 0.05in \hrule \vskip 0.2in minus 0.1in }} %% sections, subsections, and subsubsections \newlength{\preXLskip} \newlength{\preLskip} \newlength{\preMskip} \newlength{\preSskip} \newlength{\postMskip} \newlength{\postSskip} \setlength{\preXLskip}{1.8\baselineskip plus 0.5ex minus 0ex} \setlength{\preLskip}{1.5\baselineskip plus 0.3ex minus 0ex} \setlength{\preMskip}{1\baselineskip plus 0.2ex minus 0ex} \setlength{\preSskip}{.8\baselineskip plus 0.2ex minus 0ex} \setlength{\postMskip}{.5\baselineskip plus 0ex minus 0.1ex} \setlength{\postSskip}{.3\baselineskip plus 0ex minus 0.1ex} \newcommand{\jsssec}[2][default]{\vskip \preXLskip% \pdfbookmark[1]{#1}{Section.\thesection.#1}% \refstepcounter{section}% \centerline{\textbf{\Large \thesection. #2}} \nopagebreak \vskip \postMskip \nopagebreak} \newcommand{\jsssecnn}[1]{\vskip \preXLskip% \centerline{\textbf{\Large #1}} \nopagebreak \vskip \postMskip \nopagebreak} \newcommand{\jsssubsec}[2][default]{\vskip \preMskip% \pdfbookmark[2]{#1}{Subsection.\thesubsection.#1}% \refstepcounter{subsection}% \textbf{\large \thesubsection. #2} \nopagebreak \vskip \postSskip \nopagebreak} \newcommand{\jsssubsecnn}[1]{\vskip \preMskip% \textbf{\large #1} \nopagebreak \vskip \postSskip \nopagebreak} \newcommand{\jsssubsubsec}[2][default]{\vskip \preSskip% \pdfbookmark[3]{#1}{Subsubsection.\thesubsubsection.#1}% \refstepcounter{subsubsection}% {\large \textit{#2}} \nopagebreak \vskip \postSskip \nopagebreak} \newcommand{\jsssubsubsecnn}[1]{\vskip \preSskip% {\textit{\large #1}} \nopagebreak \vskip \postSskip \nopagebreak} \newcommand{\jsssimplesec}[2][default]{\vskip \preLskip% %% \pdfbookmark[1]{#1}{Section.\thesection.#1}% \refstepcounter{section}% \textbf{\large #1} \nopagebreak \vskip \postSskip \nopagebreak} \newcommand{\jsssimplesecnn}[1]{\vskip \preLskip% \textbf{\large #1} \nopagebreak \vskip \postSskip \nopagebreak} \renewcommand{\section}{\secdef \jsssec \jsssecnn} \renewcommand{\subsection}{\secdef \jsssubsec \jsssubsecnn} \renewcommand{\subsubsection}{\secdef \jsssubsubsec \jsssubsubsecnn} %% colors \definecolor{Red}{rgb}{0.7,0,0} \definecolor{Blue}{rgb}{0,0,0.8} \hypersetup{% hyperindex = {true}, colorlinks = {true}, linktocpage = {true}, plainpages = {false}, linkcolor = {Blue}, citecolor = {Blue}, urlcolor = {Red}, pdfstartview = {Fit}, pdfpagemode = {UseOutlines}, pdfview = {XYZ null null null} } \AtBeginDocument{ \hypersetup{% pdfauthor = {\@Plainauthor}, pdftitle = {\@Plaintitle}, pdfkeywords = {\@Plainkeywords} } } \if@notitle %% \AtBeginDocument{\maketitle} \else \AtBeginDocument{\maketitle} \fi kernlab/data/0000755000175100001440000000000012560371302012616 5ustar hornikuserskernlab/data/spirals.rda0000644000175100001440000001115414656670134015002 0ustar hornikusers‹]xi8nÔþ ’d‰l!)eÍò«”âP¢´[JQI%Š)¤•P¤ÅJ E$DÙŽ²ïƒë`˜ÌXfA Òëÿ¿½ÎsŸϹ®s]çœû>×±6·7±!ü„%Køü‹îþҰ„°l—z{ºyu÷^ü!¶höÀ\h¯í+Ù…­*:Y–±¿¡“ýßúér.oûµ^8¦X‘§±6z¶Ç$Û,ƒŽ¬ìd—ÐIž T´=U¿’”-ƒ/AǶÐP‡8Yè˜ü–8^…#¯2FÃÔâÐ}G™ÂÏW¡ëú@ú§Ó7œÌð>z¨†Ý½f¤Hà°eÍ, ;§-Ñh¼tO9s&ðcç;ç>³í@ª°Òõ_?äÖçT¿ÖHž‡û®®@›Øv!íå¿¡Íü°ÏË[Î@Ôè­çt4±ä‹”Á¤8v?ŽN}Ëõ¯\ßÌ!'è„娞E³nÅ7ù'"¨&–a íþkíב…Ú‘ËÄ/[œÜøÈ✸T›_Ym7§­ÚÎB•©[¡âã—@ï¿,lÁ/=Ç™ö)öP6/ÑúÀŠl…•$–þ€ryÍ£wÆ!ûìDöÛv²rlò/°ìñ‡ ³¿¸ ©Äï~E) eŒÄeódžÍiÁg6ÆBLþ¯±j÷½ã˜{­KÖl×ÿúHq…é>­žÑÆ0“}eXÕ“ò³]ÏY`Ž…Æ›Ú¡üZg4>›IÀkŸ§ÿ´9Pä¾þ+ý ûÎ8g¯ ?ÿÆU†áÐ~~Ï’­,}-´<«ÊªsT/cg|c¹m#ÖŸÚÓÐb“܆çi~¹ÙIàŠo?ж‘ 6ó¢°‘‚‡JD'±•¤z-Šƒã³Þž&/Sj§h Iѳé$ìúºyëÆ>ü‘¥Ýå»Lh·fQú²ÁÕÌÆ÷óp?VV»XR°kŠzIc¨‡ì†;ž·Å>å ZóW`PYCÕöÙ€"R5V;”lÎñgbvñü´ÂãH×?’©k0ÕQüïb”°õáÙa¹Û8z>²Ý7/G¼¼.íøôeÁ²ï[AgÙ¯$;'dÐ…„·Bob!_ó~ lçDïL? ¤“²g>ŠC=¦æmºÒnÝ^ò—iœnSÛÆylgÍ9öYŸ±k2‚²5‘1’÷ÜhI-”kX$…(`Cé«Ç££0}kZwÝ;m¤;ük¾r{úß~x¨zi¾s«š—¨!sÊWC‰ÿT¬¶ÈkøëdâDÔÚåÆ@ó˜“9-Ÿƒ¤Ê­š W‘öݵTkvÒ¾´=1ßnŽCrÀVméÁAâ|Þ?µnì·[á~­z9?¢Æ> âøÎ‹ºGÿ3Æ5‹¬ÃAÊ2 ÙåÙØÓn¨2×Õ„bI— ërWÚ¯Ã|;T‹ƒï·—¹ÃÈjäëk¾YÑ_â„éÎoùWIcç áÑØVï ” ß–.|í)CbËëÔ ™eX«´µ}NF +ê‰;º¬aÑ9¥Áý>˜éÿÛÔåæ'¼Üê¤^…ìãá¶Å~˨5^±Ë-'Ö‡H¶Ye"½$•É­ñ†^ÇxÑêªÈì=´]|ÆŠu¼w]h’Cö%Ñ2É6EJ{øöžð*ü¼eïzªTÊÌÈ™`MÕ¥žkV~8øÐë êáM£Ðc’…³Øðï͸]>4\ÝóX8n%Nžöð¸ëW­&å¾+&€ò3ÆòÌôvx©w=û '毻}›È2s ÞBwyZb ´ ­×ÑÊÄŽ¡ü &D Ù[Ù]UW…î‘CÒs²¿ ?fÚ3ªqú?TÊK²8@NO°èá/€é1æ 2û% E¹»Éî*Zl†°þ÷  Ÿ»úâ,õ'ŒÜ/$9è-óçįýÀ¬-õVaS…òÅaõ,ŒùKÎ…+ÿÂVk3Þë,+ÿ©áô+)ÔÞbé^kè%ôø]O쌔Z4€ñæçh¡i8Œw>;’:-wÕ–ô€¦ÌÒ~›šÀUK„A‹ØSöDc˜Üþ hí °÷(,•¥g'õ”Ôµ‚OÀæ:i—ª­ΑÕ2GföaÛ™¢‡ÔjSàØqÙ$ô++3*wîö‚ž£N ’Ùk’_ñ›Ç9à](ûÔȼ¾±c™*4;ªÐGÊœqX4êeB&}¼KžÕ‰¬§OXšÁd†øÊ8q]ëüñ[#¤-œIŒŠ¡`ˌ؋Óî8fèæžëÓ,JůÝ'LD;Jþ»×  C5Ï^Ëèi ,˨ek*ÁÝðtîº% ›êÑ‘ï†aK©Êm~8AΛëp¾ôM׌0 e.â:Â`àóÄ ¡ÐÇÕvN†ž¸|ëX±A ÛoPïÅÖµ®’)%Ø·£àUÅÒAèô=súQ´–>)‰¼õ[¹ˆ.òê“|¨)¤ù»ªÏÓneµ-J‘Ú‰Hz,(–c4M½v˜ËíÏ/Xäå„3)m‰Ÿ eçÍ#«›p$5ý‘Ê¢î_óÂÒhV+:õä'NÊTxsì&8Lz|Àr% M2OxcÃ_Í‹Ës¾a»†ÚÜ{½JìÌ ŒsÅ>>’m)ó*p›xÖõ¡H5]Òýš±z«Ä^¸ñáÐ Ù#³16Èá[mÿ§É.yn¼ê‹ó·ïM)Gl™?~ÄãèøÁÝj—ôpò¹}¢~ÏA[*¢½M *O1iÖY XV1­gYl3K´ÍË%ßÂøæde}F8qP;êdÒb\H·p£t;ÐÈr©ú»w gkÐ>e-Pr:”vk5"÷Ö¬Õˆ¯òN+¯ZÈèAÞ»ü.צ2œ¬I[Ë|/€Sº‚)žæEHVÑ9®u³§ú߬²›Âé"®Ã=µ½@¿táé/î-Èýá£ÏÙ»g¾5­r À™~eáä38ÓÔ_ýaJi̾P“VdÇ„ÉÖ{ù ‰lÍÇV4ÂÇw|°©l%=G3,ݤ\$·’ݾWæÎÞ‡œï‰cQEbPï²®…FHÇíÂv0”¦áøÊ,LÌŒp¡*푲I®"ö‹Mʨðž@}æ—µ£J[¡y*êî©€Bö ¯È‡–£·Ÿ®9;­á™z³†Ц‘ñ^j4ÂhSýpÿä=Óª{‹{Þ¯òSûÁ}©"Ävtl7š(ù‡¼vg«ÏEªt;±Ï:ci;óør!óݧ™ 0è*0L¿É›ÁÁœ¢{EÔh蕨tr0"aùAÖÚžD¤Ü?Î-q/nÖô³ô×þÐë/ñLWÈ ¿+åUχþ—JÞI¢µÐw."[è´¨3E;º"ÛaDÞ¨*j¯ ò¤¶ÑÌeóÂν|ôJý=ø©¾ÇÍZswU÷Fë?ÃHíø³cƒÚM5MJ-÷jÓAþ£:¿šxèx7¥és›z±¯Ò¦ç®Â%`É_°Tå`Æ–¥© ¡ÿ çËüÎÁ¨Ñ¼ Å#è­+*z†0_Dyßûßž7™Mêõc™¼«×¡Œ=xTü‘2–mäL‡Þ…¯B¼Aèk%¾8˜| ¨Õ>Ÿ²RqÈþÚ{ùJçÅ|le$j[ ·ž¸7)xÔj[Ó¥‚çJXS“5m%¤÷íMB8°Úå–H‚ R¯î[z¤÷'ôœˆ>Ì0W…aEǪ÷N@ç©Ñ†$Mgj]¹Ðu®q:zbèß^—}^ÊÆžU¥ï× g‹h¼}¶*~šð™¯èÛ‰Í÷c^ěᮏ÷tÒäú$üï!]g¹å½­o°#Ë§Ìøp+´_y1ä›ÄSë>¤t…AÓ²ˆf0k'ƒªM ‰jñPêU]|ý <¡Ú3¶Ò/ ,N6yÍ|RˆÈåÙq°CO¼!Ž+ÜÍò ª™ yäÍ#‹!Ó&1ýxí?¬ênp¼c O5ÅWÇE¤aJ‘dü>ÕX 3ýð1Ç ygí)U`y Uî²å²V !)ʳ`õ§H|/'/BE.»|D\H«vë’CÌì°¾îÉÞŠÒ,ÜEÚ!rC);|øæ¾c«XZȪ ±Ø3Ò!\¸Ü)ZMõ”$+Ó¢vë`û®zÿ–_ì{Á/ôÉ0h— Ôgà#4u §Ž7ØÖ™k` ‡½ÇLw[<à‘ÁZ¬´º®7$x»—Ÿ×зEʉo‘¢YRØO˜ŽÊ XÜcS~JnSWAê] ¹•p‰ÝºÜ»ÿ* ç»MÙõ’‡øåz˜¢©·ž}À¶ÙdƒC•û˜¡a4’Ï,IÒ‡%ôßt6¸!wÕ›uamØÕËÏ0'ÆæT·"d$¨ì'“Öàèz¢Ù{³‹8’“gzQüQZ‡f·!óìçÑññ8º4Ôì¥I 2k¶ïº‹,É?Äekýõ9ŸÙU …cÕZYãkqРN_Î<ÆDOËlˆƒ*¿‚ÝüÓp<¤jÓ™,Ž;ÉKùÎ㨖£+ôŽ;?aùXlCÒåiÍR—ÿ€¼âá`APÓ®ðøét7\|ÌIY©Bó7]6~é‹Ð=`×—»î\ƒ<É‹ÞN¿;U9Å!ûCûŸCò* ¿‘•jð͵y.ûu âàÓÞžW›­°ª”f<µ}²~§,åPtXUÅŽ™a÷y…¡/äE}=k.$˜ ©×33Wc[‚ݺ€Í׼׫ð цÁRܵkK•òknNc.ŸÍš2–ÞŽ¸ßzÿfö´×ÀkaYu3×Ó!ÇÔa_xP&”ï¥a_Z„­ eîïÏ@mfÒÒ}[J`l®.g¥*ˆçm §)b´ž‹» d>m–D˜-vWÊü”öoGê9ãÚIãdåOØR=]FQÊŸx@‘¾¨÷¯^¨…Ø!|G†42"S‹usíZ‘鈜ÆB•³2ÒHOB@{ýDëÆÅAœŠÊt, ûoÛ» Œ¾M±·daôæÙHÿuu0Røzæ’°F-m#Ö„ÀäʶcCûͰ&úƦӆP°T¹dCÿM˜¬÷c³·üvq¦‡c جÓ/—%€SgK 0€h3_,ò_fHb‘}<àëÊAà’}2ñ³6p_jݳèî ÇHE«]ÀÛÆî.XäÉ)í“[âS®­|p˜,ƒ«juaàp®ÉögÀ ³Úã p V³`TÕ¼,$»™[%ƒrrŒáõT²ôüŒÕ: ‘Dú±KPúÕª50+KnT൧ìô{¯¾O,zç×ä+ÀÃw±þ7ÉÀÓýðÐÐwµ¤ÚU®‡q³¿¶@5p71ô¸ªÀø¢¬¥oMNAr‰Ù¼)–›¬}:ô.›“Y?‚↳w6Ë;r8RÇ®0å‡IC%å§NÐ^ë¿oñoY=Ï­µ€eXš˜ð˜…}i‘& Æ‘Xµ—_oxØ ' …oSÄX0<%ñY˜Úô?.ö˜ÛêP/,òFXPÞ>øƒÙ¦GVΙÃð.öñ2¯%@·:ÏÕSÿ ƒá1C)ªHé•L«ºêý¿ÝZ¹§ð³‹]÷m¾è¶rË—£@öé®>´‡ì铵fÊO÷+B#^=ˆ<»áFP¯,’G ‚LU*ès:s‡ŽŒãÿœ·λy,Šÿ#ðiÿ/þŸý ¥Ë kernlab/data/income.rda0000644000175100001440000012320414656670133014576 0ustar hornikusersý7zXZi"Þ6!ÏXÌé[ ¦F])TW"änRÊŸ’ÙTªµ&²†‹KQR”èiʶ©(w©£°±xÓy¦RÓ´ž¶Y*² ;0v®žå+Pg2(ïƒÍ÷šµW«­é!£€Áœ±€…ÁI‚…¨õ«ÎîjšrÕ•©î`¬ÉfcôAH±áð—®Îÿ‰ˆ-í³]åFÖÓèaz6ßa3½uµzt7÷¶ Cd«²?ÕĵãêrfúH–­ åbLóuA5üÅÏœñüiб§÷‹0èïÁX¤%‡#$¶P= z&?kD5cq š#äkñq˜„™ÇV›æçR¹/¹(véêÊĨLuŽÝr:M¢¾>Å©z–±¢—k‡Þ­QÞ;6þ¦Y@ZqÿñC,€]X±Þµ¨õÔ·‚…Ubø”$ÑæFÛn‡Ã}a * Q^×îÃ;À£R‘ËWrEŒ‰*ðµ¨é×ÊJ ÚJ¥ÍDÝÉÕ¿É/BSµTã¡WéâŒ,q'Žœƒ¾e7Å2•Ã|óµp…Sª†3E‰7e힨r@y›äM^41ŸŸP€I“¯z÷¥ &SX¯\å*†ï'>Áø×¶ eJ ðÇZõ†§ì$ uòh¥öÏÒRC_ƕֶƒˆ.ÙOg¶Çj› ¹wÍAi;Ã…6ÝašJ®pã|D}¾øÙ2qD«jîÅ÷¾ÐÙ‰³#XÓ¹yk …tRä:M­;3Ë ¸´ŠD]—‹†5MÇߨ&DãäÜTb“ŸÜÍÓ„¼C03¼wc…f÷¿—¥ºá’M_¿5RÛÑÅ¡(v»<<ên7yq)Ók¾ž|ÅxRݾ”e’ž_‘€ì›©A} ŽaЌ͸È4ui‰IQßåÆ`¡‹“^/Boë‘°x²ïÇ10î9;XŽe5²¶‚åý;89Ep F·7>—ØÝóm zÀ!'Ö*èD¢%ã¾¹è[rF.ÿ ¯—½P\Ôº³â@©]gZ@ÎkC¶ {&oDkö€Á>ceõ(ŽÞ¯I’+\ \9£qeˆàà ޞ#máAcµÏÛš¥ôˆ%rö±ãŵ•ŒáCÜÓ£»ñru„é0+)àºôL‡®/ó¸ Y¦ý{Y\Þ¯"ÉŽÊkY¬Qùí¶›Ù†tȈÇ=hT.™R=#Ç\ⵈGùù‚0HW6u$Ä›×L8Ilj .œöØN¨×eW7ñÿîè©Ý[,Ë-°½—±»xêÆ•;«)”œ&‰vä~¬–ÌNÏM‘àÏêg+Ô…FÕ‡´)X‰åQ¸6åG ªíŸã‚bu¹cïYË;ÎvÞ6çîsÃNº°¦T…ÖEÇäô0ÑhŠWÿz ÅÊ °—íZI#ÆÒÕˆ®è↠~;ðæV0³üå¥ÖŽçj]T♽Y.ýÉŽ¨Q +®s\è1 ó¼û3]ÚˆU°ÝÖ¢äˆ?à£+ªª«Á þþU^E‘®%I‘Õ¢ä(Aò‚Hó$jÞa\ž–ÊèRöc:Õæ­ÊŠÓ–sxŠý™€/æXÞS”°uãû/m0í8X5Ô›Ë耟™[ C€VŽ#Ó×Ò†J‹wíµŸš–,ŒoØaõ“xÁÀ@Û\m®æ™Ø^œLg‚æGr/íšÃÓyû^›-øðŒ„¼È8ù4Î/Ÿð Ij—ë'å\SõäQ× 7)Î;”ÑãYkÇk謋ª¾óºÔó½À&ú7¡†Õ’ÿXÛNúoÜÅœnõŽÓCzD~CdŽtjž‘ß{#ÀÄm6g’Ï–ÑÞE~¶³º!sÉ éHP) ·ÊŸ’®§¡C­¹uW¬r‰s7뇖u¯nZ¶à²ŽŸ2ðvÊož¢h@½Ú©â³{{b3–˜‰Ïˆè6Oá:ˆ‹5®{$²øÙ•¬ª5íˆF:Žø‡Æ³ühn§¦ÒÙµÒ†à bÉ>·Ž.¢ÎÏÑolÆ4g%uê%gÀ ¤5§ŠˆeÕM§4±#Œ¼ÃKE€0CÝ.h$áÀ1é…©àÍÇgçš0Çðð’rfC¬;ƒq|®½êIõ8‘.Œ»xhŠŒ÷¾¢Ô°ÂØ¡70ð 'ïln e·qPm 7Š­ˆ VˆuGG*±¯ÐÕh_#RíÉJ®Îûêd<»W˲¥¡"ϲýɦÐÈ›ñ3Yמ¶xTÄS€h 0rnz/¿èqÁ“!j{!?÷U=¹¤•Ђâÿ1&UßšHõLø¡Ã=ö#|¹1쪂ù¨'tø Z§Ší O26ÄÄT-Ç[+š½âj±G4H²äâŸæ8+¤ x<”â§0uóZÑ'2bŒ¦FœEçõ7w‘[ˆyÆ@~6ŠzF×»¿ÇŽŽY6È*;–#_qc(?, H¿CºBf‡SŸ4U‚@3?þ úÞ¹1ï;x&*¤ãb0ÙzûHç•VçRæ°(jeæîÃàZà…¾}O'·‚i£¹Í¥·\ µ#ÒUOã"= ‘Yî™…y×ÓJÿï"åJœƒuƒ %êUHлàcùR-ßä¦s,ÆŒŠK }zbîUË Ñ2Z¨Òú¾Þ-VÒ¹ìfǘÈB& h"ÇÿÛê¼cZ슎`|)T‚↠«}þG.r$°3MýÁWH‚NUcBA’X½vfpÔ| $ü‹‚{3Ësˆû~‹ØrVܧbëžÅ<§vªÆLúÀ4FK›^Ðníêµ6žïpÐçrε£# r]Oô)†Kr"lÌÓÏn³”™è¡™MÜûeß1¢D™BTé^ÖUñGyä£Oá ´Êê»\ÉÿµÁ‘ÒšŽV QRÜɧÁ8Mñ¨ï²† Ãç’ƒª}çcëÀ¦éû“yÃEs=ÿ¬_ü²n¨øx®\»•Rõ.êÖãÿ伓žß¼jò’ª€HOY‘wëÁfŒ*•Z˜vÊ¢W]ìú뙘HôxàІKC~Øi©žìJ À”‹d…v©wlѯ­ŠC¸t‹kSKŽ—å 9LàJºÜÊpïgû´kÔŽâmGgÈËñ wBR RàWUG  iþ5#óO•è›ë¶ä6^ïî}º£7>E FGÀtÈ&WtKÙÍîæûT~ʇ ĉ¯´H8ªãXÞCšsMöx tÒÛNcR¸q°hªA>¤#ù)Ešm«búR[1ö÷6LÝuzW˜óá.wSHÙP²ç`’Û©"FS* ¡W»ÜO¦žpâck”b!¬éêwóeu¾bSMGÖk&©]Þ¼|Èÿ$C&äVDyÔw*J×ÇEàŠRã³G¢²œ|’çÛOðÚ)séh°¯}×ÅoJÜz`g4ÌúÖìùLn F#Ûo^7§TÇ‹%‚$‰$3tÁ®FgÕP+li¯gz {Abqc©¢ÄþÙÍB#ê">®PÊÞ6XÛå°Ù:ò¾?YÓÌö€[ýxd$óЛ  ¡ØLÚ=]ÿè• OV#mb##6_Ñ{:à^(¡î^!ðJßfÃæ›Ž•uWåh3Ì™ÀÏgi r³ !åæÄ­ÈŽ”b»€Ô]:¼Ïòfhö‡½à5gÁƒqêN›Xœ~€#¬!ûbܦìä“»•"[TõȺ•—dÞåhqÆfV¤¸æ.6³_\H®Uj#´}.a/͇ú¦úÜóÚ£VæN–êƒ8c†aDM±~ì ʒɱíÐI¨8g}¬IE¿.‚€'f„$äË$bêë­¨¾kcër±¶9H°þb …<›Ü:Žúm!y»MD†™äõäãáë‡a´Š•¯‘‘À¨h Ïÿ‘¿`õµUnÙ䀔 —b•jªfì$ÝÇ~‹ÜFÞÅSˆI¾¼Â‘—ªN¯Gf³ª&z{]Ÿ(²¥Yï¼.¼ÆºÂƒ·ƒ”ìþÀˆ¸¯¤8ƒ/HüxŽ•waѯ C`«7z®ŠÉsC-@Öa¶¹Cæ\< àG[7rsކêNÐ'6à‹ÄúÑš¨ªº~<æŸFþSÍ|Ê«ÉxÐöh*cóPvÑ !á•i<çÑŸîò?l6v§yÙ;uCpt½"L ô{›b] (3Àâ>²^…µºA}fP;ä¬û§ ›vo¿‹³íð{NlÍìÄýÃ.:'D¯”L(@OheóOÒ¢ì7‹ÄO|Ê〵·\™¡åÊ¡Ö<64›4cô7ît†«(*6§æ{2«á^zäP¡ðã³·OÇ>K\q.bCœ˜ÙÓúm$Á N§b¼q™1LoJ ËãC?Ãmˆ¦P²  `Í;¾«Ÿ%Ì1yÚk#{u8 °ç1±º£:åÖÉŽ´Py~ùCçýe4n³}“'fù^a%سiϵ*`/‹ØÎ-9i@+Æ8R^ DM¦Ý.eÓ$žÜϱ­mï"Û¢Òî¸IïBXIˆ™ð Ó¥õ»NÛ¿÷¢Áženú3x¸þ#Ð?ÃŒ¾…ÙrF‰Xñ¸üžÔ3aÙäè UÅp¥ 'vå³CÝò%Ùœÿûb$¤×ÅÞBfE†©‡×7¸Õ{ÈŒ¿‚ dU¸ßrfÉêO™_ø'ƒ“{ä´å!"¸… ë.»¥(òû =¶`§AZÿþ¶Œ&ú`ô‡ãyʪ[q޽™›]ìbññ¤8'€ "ô¢¬2ôoäàv>>ðørI—¿hE!P|84öoå01æYw! ÷ÖçÂÖ0û¬n£¶˜½¬sÍi÷Ä\AÌ&ƒ"wMìwÓÐz«Ë}´ÒÑìǼë„Ú€a‚aƒí08¿„TùÌ€ðóüWÜx{|àу]uN(ð„xgLü-úíµ‡û0”•Ñ6âe1´¦>ñK(ÅN†Xÿ]$ÅÍü–"|À\оÍqÝáq€SȈ!°ˆ'mÑ‚‹xœÖP¶ôw«}]G«çKâVYEÒ©øWö¨[1Õ,ôæak-Š…³>&åÙ„4ö»§´ÞÜ;`Zš”µÇ¥&²Iй¨´S ŽDAPç©æ>,-ýÓ ¥ac›.¦¬Û%hCtŒ«¥ÃÑE9ƒ‚;$©žO{Tº•,/ÿž0ÊDލáÔ4ÛWmYv“îEUYtÊž®bä}PÆZ¯O…_F}>êW—ùo(X9qDâ²)TÒv=%MÞ—ã]ë‹ášBÔÓÇnŠÖã òP,„™æ€\u…tcˆEpÃèV˜ó‚¥Ö£…=@¹”㊹•ç%¡þ{ˆvÒ–ì‰#'Hð õ¤ ºû²?+‹ä…/¤(–¤’a¡ Zaÿj`¿R¯/“1^VsêèÙý3?c)µ«6T%iH53D8eN¼ðëmq¿ÔáIÐLóÚár§ÄªÀ‚8ŸP tíQxN[rʤödSÊ6VñlG·ÐçñŠk4ÈD29ÃI9!6Wï¼ ؈ҹ_6b‹ÍýîøðÊl\fes|&tñj’…È៣^ˆælÜš“Ò¡´Þ±D„ì“7Ë­$æâ`c¼ëèÏžUçaK€ncNŒà~ŸœŸÜ„}øaƒŒj&IŽ$,.4h:œõo»Œ²–; 3Z Þ+U^¥x¢«fÈá˜?E¤y¿Kæ³q¥àÏ ej·àÇFJÆn®«ÉÍu/¡¤ò­N$+K0™q•yõ+)— «"=êŽlìÝÅZýmÄß|RdúæfFùa-êwÒûvñHZ:Ä_6¨({1–A±¤` ÷í Ûž‚k„1#û±kGßä¦@X–B“è”Wû_ŸY¡»àêå Íu_=Õ/Æ©óô«€v€«2lUé7¯ö² -ü4λ­„Üùe•ñ‹-MÊïüyɽ P“ ^KJoœÈ¿…0‘ vøÕg{Q»ÃpX—x]»>ñi>(µ,#œ+Ý–8>Ä ¸êÆvj(2Ó<­úSAwáI”óÁ"7•Ùà¢üùŽ•°j«’rÊè) Sƒ4à‹•“Íþuú^ñ¨ðø1ñ~öó?<ä0dÌRç õ ÞUÝ…›æÄá¼°Rx‹UÄÃÕ^Û=Ä^[(QžÁt%ýä,_uñ¥S:–UÂ#*ð¥ˆú­‘GÔ Õ°„ˆ}ßµåÀ{á?9LnP¯A„î)ÐeJõ]G:ò¸œ½èrT~‹¢E eõß'²Çsx»ˆN h(âZ/_oÙÕûç—íDÝtеWÂ5—'ï”-aJxm_Û¨$FYA =}$#ròD–´OxJQ°í #Ä&«ë°„¬OãQ  à4©ê“ˆÜþš½jã±ÛQ€vößœ®Ç*ð”&Ï–Q>³×_ Ño(«úPñÖXx¶?õ ¢òr•¹…Ÿ†/E6e ö­ÍpþÒþŸêÕý²|&Ѥ´ûÙ4¹'Bé9%§c´€PHZveI¡¦k‘ÂŽª*é*dŒdKÿç1zÂZ(k•3Tz@Ö•2óZ²ýÇ臱I.:;4˜óCóošç×jE¶È~(ŸÎ4xJË2Téãd¯±3™¸•·«`Ô“ŸbÓŦ&¶¦{ÀHäöÔ¥›E‘}¹Tˆ€cIÉN´R-·Ó`é¦ùõWá–)%y–—dZðÙéj)/C-„ãª!Ý0É7HÂqÐ_ïð>LK®T¼KŽß$µFïd ö˜’2ÙŠ°k¯!Íùã&ê‰ë­—  y}³‚ yè'hkÞYŽÉ'ëÓ²¼yÛtŒ¤ÿäÄÈgx´K&·®[Ø”odÄ"M®b–CP]ÜÙÁ@ "VƒZPñd_\î¬ÐZVd°êñ¿ø(0št=çá¸J͸8”Øs¶'[;lÙz‡Ýpe> ‹§„•¸ó’B£¥H£Ç/FÙÓy©[ƒÔé–]ùÔf69U ïü赯7Î`Èl¦÷v§­s ümªÐGÒ¹ç톽÷è¹2¤Ô5^àý|°°M"CP–xˆ¢©•è9óg‡ªö¦|„ÀV¡ôü(°—æ ø¬ÌƒT Rs_ë±ÎrâÒÈSx%]øhä•>y&C¾Ô—N'ôŠÁÓ ÿd·Ü•:Q!BŽ/é(pòž¬"|Üívµ²©W‰wBÕ1lA¼ŸFsWÙmZ³@@u`3ll õ~4ð¦<í¿‰z’o Èð|vŸ)]ȹTíÚûì?\Uó2ª¸.#kgú}¿?4ÎVÊÃÆÃñ‹É.²˜ö[ÄÛšÓQÕ\ÜßÏãíž,RÏ…/ì?ìà(e’ߺd9v‘+_ ÞRÖ¬¼X´ôϦS߇G¹W¹~« ¢YÉ-ÖÒrêÛšŠ¨c€À­Ø’«};%\A*ø[›lë&Ø3ÅÁ“æw½ãñß»Pi=)w…ǘއ)(Ù2 B@­Ë‰ ­ÜWÖœŸ¬4=©öw%ÁÐïš þ)iµCÁi1Fk « ƒ*ìŸ,¿äûï©.;—W9àŠöbü § ɵ¤=WѺžg<Š¡d7QA¦Çü`1vy¯õ±Ëb÷!·klfò‚Pšÿ°UVëÍ`ß)€£u«*‹|6Ä£½‰ó®ä—؇ëֆߋûõH‚k)wßÁ;‹-8—Ñ*δAÎ; Ÿã:Ú{Ìm´ QmF…Ô‡Bð5’G×µò¸ÃdÚ‰rÑÎ Àæ´îä±3Ž­ëGpH±t‘øH2ë&S=¿üÕYÆŽýd’x̵ÕÔŸ=#dOðèH{SÕ4…×'Æî¾À÷0pÃÝ~2ûg½2ÑÊCTtèŒìF̻ƩÚ䨎Çܯò1]ŠgóY:äŠ7LCªåÆH“P|ᙟVÅlμ‘Y=/Íã6Æ|C ôLìפ+k3õ"qÕËéÐ|„©ï„ñ¢cUÛ¼…t°ËšÀ…ýà%~w‘´*•"£¬1·P}ïŒàå2¢Ü@–÷.å-’w¯`Jè¥>­(ªó1Ȭ§h.YR™^û‡ýç§=ìÃúºsô%#ãÆ˜¹K®ž€Ú}ál¢Ö»=«Ó½RÒ8'~RrŠÐŸðÌ Ê  #‚F“3¦ ESy3Ìõ@pb}ΟÒÒ NAĆ“ 蜰ôƒk­¥.ݶ|‰-Y"¦=Á øæ³fÅ“î%'—•Œš[gÂl¬}å°K5Z7j®wcjg0®:ÙÔÍ9Ct¯—qÌiœøã‡“KÔÕù¶}…‚2Úí.j8Û»â?7BìŠÆÜ!Yu¬ ÜÆ¾¶ikÐJÇ×ï­ú]Èrè³…Q34«WªÙŽF÷(5¨žõðH³×&¹J8 k¹Ùà¿m€{tsž°ýÖ VãûÕv¼d:†ëzYø¬Îø™Kæo¿õ¾ºª¾I{N(Ww&å¤DÿøŸÀH¤ßwÑ!6cFÔaGZwÌ\™HÐì/—X©~’µ™OÇçìÄ÷øcÓ7³_ºÆË3™…çu£Ûš}–ÞØÛ ÁãþL9҆©àzƒ©iÜžMA!ÎÀU‰ý’롵íó/H0Ý™EC'sË„w°ûw¹U+°ÆeŒÈŒZ‡˜úãT‰IGöŒ‚`;ú}8t =g © 3wÅ×ùY §![ÿNÍ¡?:¶fÿˆ©D"ÐÞ‡%šýÖüÀÈP£úË3á§V‰›wÕ5ÃÁ߯¢IœüÚ.~!ÕuO0ëϪƒÅ­Iá_Þ›ÃÃÚ¶X†žC}ð.úú¦€¿aÍ«Ý)‘°ç “Kû¬S1P GóýAˆ+sDeG!Ýv={î!ᬠÒÑØ”ð¬É¦€¯=a»<Ð01]:“X4]yV?ÌÆPU|¢ÐœVkƒ2þ/'ÿ<›ñWŠtÕ¯±íÔ™ŠR}æ};é¥Wö •“Ò)œíZÀE¿ƒÌCÍÿÄBÚÈvÀÈ ¿{û™æÓqß„'$›Åp|aëÓ^–ʯÑÕH´ 앾³Qœ]¢GÇ虼Ñr½ƒ£Ç“ý¡«/& …ìpI¸Ò£8Û_ZCrº˜úÙƒï‡û¾mt¼6ZˆImµÙdk¶sÖ±–Å^`>üM®¡oüz¹ý¸ x¦r ­ïÊ›Ü@Ù@Ð/v¥ríJí†Ý&tQõ¨m~dÔ=¹]&.¬¢¹z4ÂLPÝ9ØhvùŽ5éé‰ †g/ɳÿu—SYÞ÷lÛ™“j¥¦`i7A¼†6$±ÃvªN3Ö<3.ÿS .I¨àà[ÅÛlFO[!PÒ'Cuƈ D¥6®¤é—GïDNeCLŒ0Ùb?ƒÚ:Áž’z_4y×g÷Ý`¶*G#*÷d°bÐO¥ƒLwÛv¸i~AAªRYÉêöñKÊ.Æ:R¶`!»¢”D«“g¹í˜Üí§Â4þ§µYñ¼@÷ÄGp æÆš3‹–ÿxÓu‰8âGû*Míð4 y2vv…›’1V[HI’“8]?ø¯ ·¯vÏùû5å£ÙaÄ Þ%Î’‹·e|ײ=ü-±²å*LƪJÔÂmª½¥Ò'¶‰ž3Ù^ nØP ChŽVc €W<þU{:²Ù™ñŽá~?›w̳1XêëJ‹À¨©dG¬Ûä/fû#ñ6v´Ü9eܘê—cÄ•PaRçé„Â>ûŒžqÆ\d;,Ô-n±YÃTÁÏ·2®õÙoêbˆ ü?¤H†á“3Á¡*Æ»¡}±m¿°SÔÏxVD$„çH;³ÚI‡N•PÕÓ4[d¥Îš3ít FÃsô ÎÑćeü“ 8·G%ޤ8®8NWŽá>NÄ;Y¥ Ù·©ŒÃÌTHܤrAx ‚Ò3›¯Þ’tÀ?ׯç–0#qoh42*¹é‚¿U 07Lš¤-‰¦*ýb¡ùk)i2 jϲÜ=h~²l‘rHÐxès9:nÑêÈùðJ)ñ–E”Þ f aò„Æ?¸ãîp.‹“˜U7PãƒoÞš3;àcTH·ªÇ9eê5lcŽø¡akGÈÊÊDù•kÊž¯|r²ÚØ^”¯‘g¢cúÌNá`æ¿zL —O=JÒ&%|¿·÷èž©é9ÌðlÛv¨ëçý‹ZC¸ Ó‰Å\“ƒ¯üW[0ñ¹r4³¢xÝg0äú°–%²­l*”cå&ò¿ŽGk¯ÖîQ™Gž¿Tôו‹š€k4%6a ýL×5i;Öʇ»Kú±.'"jðL22s¥ö†d²±îVݪµØÆTJŶRpSXÃë[hÝ{‚Üx%"†çáPX|¢/ÙÖv¦¦Éׇï3 â}'):å„;™=eW÷"Iúz3·ÒtðôuªfT½òÙ€»ÜûÓnP—kœãšÆ§÷è²F%'Å&ðÇèà!‰B¡ÒÄì 9«äg‹d°R°ÓàðG'Tz¢” !žÎ¼" ÁxOÓ¶~3ÕTÙ©,›†Û¯,i¹KS&eGPd*Ùò¼ëv)ONrVZ›Ÿÿ»BAžðB{·ƒÞMá]3r¥ð²¬t‚°2Eµ?⊲÷ijµi°E𑤙~ Ô-ƒL>L¡¹§ :ÔûsjòAröü¶ZßÄyÁ/±²Áê'è¾N”R—ƒñY­T¿Û9P•çDîÏ®tµí·Þ& m%Ò,ØçUU(ƒÜ¶qS? òM 3߉@1šÓÚc÷‘WÉêypJµ¤:¸Wö#ÍÙ…V›M‡d‰UÎÀmði·ó²ÆALò¯Jèô°é£Ôçü´ ø\’¼0ŒÎ-\¯Oøõ•h Ã|`l"ónt• t…‡>–I@±P§dõòÿ}¬„áY»¯t Wq„+[˜¢]2A¬ ®µýd„¥sjq|Žm°2oÞМnD-­E…s!À®âjªhK)°BågV˜ ­oÔGTÊznPŒ<}»ù‚Ömù“LÖx€'÷t·SqÇÒ÷ ”#!c¬¿é ~:×_¿ƒcÅK?ÿΈì[Ã3jÂÔü²æŸ.I ζrÝáµlcï븘»]QÃjŒ ÷Ëþá>y9y¯yš§}ýûvð <”Ù£‰¸`\ãÿŽŽÊÕ› ZÌc¶˜Í ’le“,B‰`ÔnRk–y—msvè:y]°#9ù©ÒC…áÁky9éá¯Ì=“§—añT²\àiöÊ ©¦+Â¥‰”LÇéœóë¼C©—Ø Ö["k²4DíËà¾S_¿¿J•<Ùœr2–ÿåž×zè*“qûíÃa9Ì´}é ®*°A¹À º7ý|þûëœvϺC¨M8e¼LG2ÒJFìs†Ì‚ç#uºxÓžZR܇K£ž~·NGÌF TB¿UÌ¢¼q_¥ºÿ v\‰5G¶ê“Q»åa\Y<Û¼+Ñ— 1ò à+åù-ñg ©iˆ‰Õš†þX{µº^wºœÈ14ú‚èd@´Ã4ýV^%_ ˆ‡ž¸6S=ªº„Ò›C&™¹9‚ÚȦÒtõìRú2aó–ñJm§£é=ªn»ð_ñô4²DÙŠVR–ÌZË”…· i&A[“b©^ÕÆã(¿M6t>ŒÕFüÇŽvÅòžøøÙy@ÜHÍ]‰Ö)œ€W/àx6Ï¡â¢IЏ靽0b×=×ÒÎ0Œ›ÇwÓ*@ØÎ4#1퟇7²Ô1SÃOæÿÅ,¼9T8üêÁóÂøC“yKD ¶†]›B› ;Ða l虽;q&ÒåYÒ¡yÿDEú]œËÿß sHåf~8,ªkƒW¼m7 Il1K]–dz¢x. 4gÃG•‡nN‘¾õᆿó J“Î “Nú—(â(Å]rü0ŒÝÏ`Ù?­bæ‘VÏ9vÉyÚ•…ìÔ‘Ä\— N•ýM}Rµ)ö¾àü ž1« É·ÁVv2ß šV$.DÅ7OP;µ(Oß¹Ù—ÕÆ›‘+k¥7S@ÂŽŸ±~sŒ+´¸âù?Êé|*ÝRO…eÆòÇ W&tq×lÊ yÅ›Þá~äÏœ‹ËþËöÌåá“èÞÉ’åÀÅRêOºÖþåwn…>\1âvvsä{þJúÙ“”Ÿ!0ùÔgzþozò¢"€Mwdçf`ü|Õ¨°Ä ¿¶²@#"¦jÈOù‰ycõ©ÙXY㢹Þiˆô8L©×ùËëžpµáS’¾ŒÃ¤¡àT¸ø Ú×wAjèËå†ë Lé^Tkêž0Êw¬À‰oPæßd¥}¤ÞçƒàWQx›ÒŸ÷RÀð5HB\aJá÷)4Døó»7•ä „ÒÄ’Ô°»Hþ“iŒÜJÚC4ÄŽ;ZcçX®,©(zñ5¼R¿þ/™ÛðL®jû JOZ¾´•Ô&Û ­:ŽCäsÝûªÁ¡?z1Ī¡š¤ó75²Yü´ß„³Z’€Àg(Ìt @^{0ªq)∥re¯B•Â>UåÖseÉA‡nçóW=GÂ;¡Ÿòa¨›ü†ñA ÙúwX’º!µm"|vi“°Ó~ú¼º©òçÈïÔE—g¬2K-v»/p q‚l°$Oúß9ª©Ãw¥›8ŽäIÑ¥#JvøÆ™C“W6ÑÃ$Zk^,{ÊBÚñ ±Ú+fæˆ~ý¯!ÍòjmìºËÈVõɶkcRbÒÙz’¤¥¯ò¦¥š¸>÷Å~htfÅÔ~Ë7cÁ48Ädh\Û/è ÐüÓBH}ø‰Éø×:èÊÐ6üÇžï}¹y÷ØcæoŒK*/‰®­´H»(ÐÆ½ÖH‹S¡ªoÓ"ÀÊFßc”'"ÿ«ŒTU’;á–ÔôßqÎ5 »ûéò~åÆÓc^Ë–ÁUc]‘V˜Ñኽž<"±­S@IU´r*Í3ry%¦*NÏ™”D(†^WÜLa"’l\ÞU¸ÂÞqm|)®žW™yNÄ*ºtÈR&$"BÚ}d4Öª,±Ì³W­eUaê æ6L"LmôP¹l|˰6É×m~)Ê»¢Ü`òO½ Õ:¸¥Y—vð¿lÊ)ø«Vcä#¿¼–õlX5BÍU[þ¾º‚Qr]i`è £zÔÌIMšb´dÛ^H`† ý”Ý[À6^Ÿg¦08^3ýX2ß.æZYY Á0¤úÚg§²à£¿@³©²ó‚xGøÎ2'4¸Éèn>'ˆèâíÍMÂ@Üi Q9é‹p§¿[µ¡Ô¦ßÓg´Ðñµ£°y²Ú6ɇûc&t ÁÔaÜQ9üÅ(a#{¢ðÈPÇH¹È‡¯û®Í4†Y^:Db™’5©xCƒ}ôΚ'™‡µ†|ªçMš·?(ø¦Û0¡§ÙÐk ¸E# бí6¶²ë©…œv(—•諟¯Ux!:TÄÛÛ‹-¤o÷Ú_©*JÜõORßjV“Rªi:OO‹?|UCPÀϺhÝ„c^‘Óÿg…Cƒ_èKŒ¾þÊâÀ¸ˆmP〯ØÑPq”‹´„.¨åHÑâ®(üq2GOû>F ç¿!ìH\“õ7M6ʱ¡°ÃAT <«Õ/G™TUÕ6×#1†ü—%¾¸“îÛ"e¥+vÙ> ÒwhMW Y*¼Ž¼TöÞå:•ÅÛVG§:ȇ¶i?K«A)µ¢¿¨Ý¶÷H{rîü%SbÇŶ·Æãû@{‰„Û>õGw”@a±rîÝ÷/‹<ú€Þwä%eoÒÄ5¢až-øg 3ÕÒ ó/E'LÇU9&¾ž@dv JÎ`l{J”µJrÞ¼sŽlýqˆîŽøËË gqºP^OpÕ"l>$½à‹pëTžTP-¤UïmêÐUGó+=º˜Ÿá~l«è=cÍ×ó±»ê‡ÈJ‚WÜsjSàíSsQ˜/†©º¬‘â»|*ùÒ^1#_µsfužž-ЯÐ/Së!Ÿô+†Œ© Äé@ƒÂ²gSÅÖ›b¬ÓÀl¹Ëê••9L1+;ؤü ׄd„I…ãw€&–Co@ SåÁ!vvÁ¥áÖŒ¬ÚI{Õ' ïøŽç@²Gœˆá¶y!éz<ÚKô÷Ï¿À•?e›Ø¢NKÚ7¦DÕp7ÜüþâuÔá¢éz„È4¯¯ ÏFÎŒGaµ‰.¹ ïæÂl(´,=ô'¨Þy““™0¥pôEö+ϬXØ€v5“ç÷¡! P—t“}4ÖuábÊëô2ЧV8ô½P¡ü ŸÃŽÐ5Dlm÷[$¸¬†¸~a*óCTrßü éÙǨtA^}‚™Ã0,–;Ô%’ciªiRQÛÌ §ß[†“xůœ ½¨¦SG73ã!Lö ¶Ë’ÇÜ>Ÿ3~ìRTR.ö&$«¥FÈåŽ>bxÁðX_Bò ²§é+ #«=)°q…w‰ªªöå®áš8ŽŠëJTúöIÒ¬ôÊ%Ð?³)ƒà?3x®»„÷»7lßy—Ð3½Ãøu4_-Ÿ–³brwªx]¾ Û/`Òo 4XHpP}>$iDýôÓØ—·ü4؉y¤®b'’£Æ)I ®ð–d ¤D#àKAL¶Õ¿ˆ¹ž¾ö=5oÛ]¦3E7†ÓõZ£i’áÍG° š{ìr¯ß<'̶FëçÎ+và}ÙeØàË94iÂÂàLK1¶“=φŽ5AŽ4îˆa&¡?Ó¡‚O©ˆPúVVï%ßÎ ÀÞÌBhC¢õÏiw®6çß4jaè¶çŠ×‡x[ñPÞM‹ï>¦‘ü¤srèÍ…Þ/^ÉÇw¬€¡Æu³× ›<îT¼6Ø6œÝ 3S bApSSÓª[sÄ)®qït½%´§ê{o€øëmÑÁ:¤ iËÞd’é/{QAM…ú\´‘¢Ã·ºô¡çëÊtwç¯6¬¥S¶Í”BO©8é¹ÄŽ¥Oä÷ÚróI‘d.ñ €ä'OÀ&øð‘‡'ôÚ-FbqOqäÚXRð~鉤å*:ĬÃsH8<+µoi¦’Ç ƒNnÄÃx‚Su;L®‚&ž/`çÉj®âÄ™šù«(°oüfjô"" èƒ{ºgÐ~¬ã)(Ò\·v—Ò°+ñ{o>ë <±BÎØX€%·}t¢] ÄO&ÿC±R7ÍqDœàøƒYN$î,ÿÏíàgùÚÚÚ¬ª¸: lÓAûéÏÍ_Žít0ª§¸Ïyu a›š‰ 1Æ>6œtäœþ*—f{)ýªÒÄåkÞ¶Wív{š fMq§]»tÌ©S®CÝß ¶1OÑhBé«T±®¸i³ At=ŒÕÜü 7—cH¶E#ÃÆ_W¿sj‡lå~†púwîm¢(#L½%¨Èˆ÷J„ ÚÏY?n†¡þuùüŒÀ*É õù@:ÞŽ»tÒ7„Ç" Ö%Ô›’ipQ/BE2Æóf‘í’­ò¨ø õ祒–ífõ¸î2}ŒXHÏvC„…H5úD?ÌiEÅp}õÁ`>40\–hâ7í3ŠÎä’¯;1nF|Gzµð«¤Yÿ 1ÊõùŸ¥í-Ój!³]ÌÚ04†ô»ÏÞ=ïlzßòÖúßK›ÄßZéµ"ùk çÙÙ‹Þµ‡ÎÊ/ú¹I¶‹ vÏꘑ—¡×鑯 Ô”2uSò-ÓygØr+Ù@ÌÿÝÁ)ä)‘¶·¯¦Œif¾,›%º é Sáç> //9ïËCŽ5¢L/sKX¯ ª4­-}Q^Ù¡†Ôž‹EeàÉ4#ñs9tô÷$ 3ær¨g’¥¦ªÏä)ؼÖÔªe‹t¸}lÏ!+=„™ï‰}ž¡€ürWH'\”U„íoyc±ýÖ]Û²ß š®ºDMή:æP~zI cÈF÷?…“´ŒV(›,ij`ÁîG‡ EÇÓ·gyÜÜêjC‹z4¬;ídSX 0±}á’„JÜ.Dûà+FÿJƒì1½T‘×q™MKÔlƒ§GÏ¡w VSê=É¥Âuº+&6³ZÛáÖ‹ïÏ9ô&ÀOÊ¡ -v¡É-à 1ä"ÃŒ$èøqìn€˜‰¸'ƒGc˜IÃhÔIqü`Ï'«@w‹w aÄñOG#Iΰ§¯:€^µßM°ËX16/ºô+å„lç‡:¢ s~Î`½%%D+-cœ„z§öZàî à‹ˆÒÚq™ÿ°{q+T!'Îe¿t,AåâŒÀÄšÀ›uôVæ.%±nwæDÑò#^§Br1ÂéÁøƬhN])’õ_±ßõ rãEf>ÀÊSÔdSÚ%ÑÍFùFâÂÞì*1SŽ ”ÈßÉÂÿÔ:êdÚ¸`ô ^îç °þ°I'übˆ2òÞ+@dÿ‰ »É7¥HÉÖ#a>ŸOÂmiœ&H+vÌ@p÷v“VÙiþaD ƒïЇXlÞ¥-`ñ+l-±RˆzZ-›ÁËÉ›¯Ý{ç¸Ke WŠwmÇ-¯XsÙA~ݪ3Eq“øÛtY_9hª5ÓÂqØ`áÉiHÁLÁ-/\>õ ñc1„%ï,7ØewajõKNSøÏæûWnÅUëh>ÕP_ÊáÕ „¥+áùç©¥y]IÑ;:@eçèä`¯§õ Øž‹U¸T>Y%´nuü(Ÿ||7[ªð;©ä©OÁE;ZÛF$•¯-… îÄœ Œ™€pÿ–"«©q—%ÒvXÞH“y½ãxl¨Jü8v{Jí®à»3†·º¦#Aà ­n‡LR@ýD^ô¹x¬Ô/þ1ë=tº×ÂV^â†stKø¦ÔW7•ù;ƒKsêWƒ%“ï4]`^…þ¸z6`³F 4¸Ë†u‘¼&òŒ×0²B%äï3yÂyn’Þy˜óræyÚBGµ½eå-(§Ž~2S<8›ÏU±åÉ뉛5 ü“U‰@v…ÛÕó‘Ô(ð3¸¥<2='2ÓRÎ"تxìj1â­ð(úÞ1tÝ=£DÃ),e;Ó“æq—!ÍbþQfŒ`:1A£§Xx¬Ü¾S¡“­z]ýgu„ó³¢:P‹¢îÚøÙG®”&~3é +&ŠG˜~·êÁ Ѿ/=•Mý4¬ó†‹ch¥Eª¨`ÏßÀÓB1˜—Ù©ƒºœ¯>Ù&;í»R¡è¤107£Ü/FÝ7Cw5w<Úhóiðõ¸««”Ò¤n—-;0Û6e (þ`³B²êÝh=84YZMR8¹î¨EÚ¨í{qPl^HÿÔ,‹¾B墄8Ûõ+-LD‡¦=­ÙÿL äj/…̾\Ð0²Wl^ƒ”û—ͧ ‹m—⣇:ûýý—PošµeùiitãfRŠç[Á¨´Ôx‡Â[Ds@ã̇¶fÓ¸ÁMÈü˜ºËy†®üñ¤À#…Zx^ ©jIu1°ŸZYgz³Ç,\²)›@аn¥KžÕ9ócOà#º9EÆ£§›.@µŒù&«Ü»í©wÞx¯…`û|bæMâ†p›ë'wƒÎeä/&®ðþe{@ø¶® ~˜aãœ!7›^z x4J¿4ë…"WÉ` ¸w8nÇíZM 4¬ÇC}ÔP¨B ÁÝÅõ Ø£¹k¬V¯£ j L‡Ò]ÍŠw»CBJ&Ǭ ¯¥-iÀsöÖìùɈû¶Á'¶<{ô[ÇðA¢éÁ"8‹ô.‰i"¶™bÖŽÐ6  {?Mn41<ï‘î–LåÔS×-µ³ÛLžÔÇkÓlÂx€é ÷„o6vR¡Íd ¿×cõ†ÁmjŽië‡@±ôqhÀ{.#T¾º¦qÃYÓáб‰ZsÔÙˆB¡L±ººA¥¨ö¢ðsgå¶Fñ¥Æÿíðle“ºi¶ƒúí'­ÛEºØ4Ï»WØŸ™"¤Ê¤Iü©¨šëyôZkÒ(óŽ-3®B…`ÄÉ„N¸ ýî2£ú¦&»ÔLsVš~!BŠ·Zã…ܘý¾E7,AgdgBÍŠÅ;°˜ U‚įÀp˜ÏCá•]p·Ò«»ªÑ0'cMÆ/o•þûÚ-v—ÆKuCE2Ž¥êô¸û#·žÐòÈ…ƒ•¦à™>h0ó*Çž³I¡Õ™­0ûƒÜÒœ@­Ù_‡.&ãâÔg€åsÒÃKŒî>™1½lïú’“MÒpŽÛÏ5°ù ~µIÉ­3¼>ÁtÏpå8@^Ûå˜l®klÁ òQ_º³Oû3&Ò¨5Ó\P¤¡_?WJ%>®Ïµ_åÞkÞ6§Uû¤ÃL€ÙHWí.÷¢šp©¼ŸEƒæyÞþ‹ÖY× Ò;#;7ú0‡\œÖUDZya*DL‹p¬²CåI¹Ôã#ެ:~¥e<(st¥ðy¬À¯×’þý8Š¡RšÃÈþ:ˆ‹¶cŠfa©>Mõg Ú‘ßaÂNÎ8¼ê+;/úÊd8‚ó×¾¶î'}Oª¤´ßfuí'î-ú”LI$éVÉE©ÖÞxgHBÆY9i.  ¦³¯( @o£ÞAcÄJëþœ—ªHÜ{Ô¹ Û4 |Ç´•KSDñâáK¹û[Rgö6ÿ½zàkm_,7ж²IäX±rÁUi¨Õ¾r¯òStX©òÂñ¤þQ¿!½R\Hí®ûºhjÂzþö¶¯Õá¦a5ol Nƒ×TyÓE¨/ƒÔ(ycímô׈g½’æO$ë|܉y¶o;"Øûƒi›û½fßq+eo4È7m[Ì„AYë/³f9Ÿ÷¾ÜdO*aÈ{ˆ¤ŽÝul¸ }- ‚Û§0iêŽha¹ò@…÷82k†ý/ú$ÝkÙ´}ØÅÚ¼7ôÎ7}´J¸è‹-sÀãšC®Õ[l›(\–Y¹r™‘øa+3PZ)>“—NÝ…5³ÎÒš§Æ&nFέ¯ ‘É©æ ¯$. ¹V÷®Â”tL×ñ¦l•ì>(¨ØâPœQÃ3ž]iÙöÚ+£žäÅ)”X:_án·"OÊ.¶vЇ¨ƒuqs…¸u 2ïÝ•·ÅFœPÔ„‚2À6ìÌgiœ°Xé<ÉŸ¿”‡ógè›FÛHÐÓ:†ÿ–| ˜4È!¡qèVöáM®&ŒÄÎêZ¥"»µÛžš–ähÏ4¼Á.$jNõH±\/[’âèˆu\4àír v';“(ŸÓS iZ½ZE•Þ‡(Ô,ÂxÊÑÂëöÖ­•„JNxÅ K,žè³Žž×±¡vÙ¤>Ár)ü†ãÞËÏ“ ÝDN`¡UáMÐZÇIϱF¦±Ãe³æá¡¬9xÓê-yqš,’zã ÝÜPŠ©JœEÏÈ×~ß0"+~þ áÇŽ,[_¬ÛU†¨Â½ÅgG×2`“v7 r½î:ÆÇ–šÃgtxTR‰èm.Þ<ò=Xäƒ,†äùЩôã©me½ëÃçØý`ò=É¡(ŒØPDxÓÔø mŒ¨èkŸÈ$–á]~4v„)*³–ÔXBƒÎö¤€oÞ(Md(²|({ײ3MË·žÇDYö¾œJ/ùEŸq΀=üLÁWÏÁ]/>çòzw¶£Ð££´¢!˜i)M©Õóü~£°_N·A•½¶?ü›ØwØW4öèBIÀL¸h€Ñ·ÈvS×è÷GÕí§?èu€…PŵŒï»¶xyìf—Ùä<÷å7˜æN!ãÌð3¯ õ\£HŸÔFAä0n»Žêk'ìÔ-Q"„P}¥´ê»F} wÏ3CÈTçP0¹Oéƒ x–1ú}¦Ý$ËѵT-Ü5“º:gàÓÔT®Å§±“÷ª†”€–K×öÎi&›£ m\Ib¦ƒ¿R0”R /¸uNˆfïºÚDñüÎЯ,½ä8qóZ°šì¨:Û¨WŠŽïYZ|ˆEjm׿J¢§²_)2¿ǽó­Ïg„+µ`(¸ÌöÍÄÑFoÐð cŒ¼ì$ïüGÔkáMÁ¥Ä†¢±©ÙŸCn€œ­Æî/·‘žhJÁô@kKü¡°€¼¤Ý9Ò9¾åvË‹½ÎöÁNÍ)BG2ÁïãêÖ\^cIÅ„‹îQOý†–'hŒTÉ=óú’êL¯›Môæ8SÚÝ ®X«Ž'hôÖJ-ümnd¶ œa4ÓÐw[dÚºÏxz<óŠò4ÄéN ŒñTXÕìô4ã6Ï¢‰Ó]V kr溩~£ôG§ú >VßÝð;å˜AËU ¤2š´[EÜH€WVÕNµŽÙ¬¡'ôÂn›#ÀgóÝÃh°›. ',°0¶gj*ZðxÿpöŒuªR ¡it@D8Å×ÐøxBuˆrÿ¨¯PƆ~™ E&C1ódâ¸Ð ‰ñ1z)¢½kˆ.¥jCáyÜ6@ü€W/Ž\­bM|)}Sgß…%ê“ :ègºÚ¦¢#~.ÙtÂÏl_Ð}Žy¿bZùÍ‘ˆvTK6»lnft•]ä¢CùQyrÒq ¦_‘MúC棋«i?C Kç¦ú2Ó³¡ÒÈýr–dLòoò¹™Ìר‘}à2ugÖ*]˜ƒÙ.HÖ†K…Š“á¦VšÆ“OˆÙ•÷iïË¿AÇ.¸zC½náÚÇ̹©”EU,ŠÝ7Ý´{®º3Áq‡(HJøí:ã°¥¦/CYØëJÂï}hÙ Ÿ¥DÊ#€àOp„0$‰°²ÔÚçÿ£aðËËèý6{ĆßÎ"1é–\8bA—œBó¡(‚2?²9äMàªÔ]2b+¼yžî7İ4îÜEù’ÁÐGθƒÞ%WV„¸?ÜÃȸ'¦t¼µõXQvû\:~b`®§É¾x[#£!m µœú©Õ²%„§³ˆÐÃ3„˜16u tÕv8 dWü¨O=ÿ:'øYáeæ"7EˆA75N5FÇ.Hçò²ÅªÃà·ì]È'bD%’QÚ0ão|« Jë”F”à¥Ô¯ó]´‰Eê` |Ès{L½)K(aOÞxßÝúk!oæòª^h¼Ásnb3Òp’¬Ô‘°ô׌Œ\†YrN‹µ©SçrÓF•Ë4逻©ÆeÏiL®G‚õ¬x”c CÉD–ý˜¹ä.‡JÈtº_ Í>Á2Þ %ñÃi¶SÝÑ3ˆWËglê¹’ë‘@yš³ýP&&,½ »¹ND/¬Ü°C2‡Ìåø‰ãóé<1ÊýœÌÛF¾½IÎ3A:³Êw_(tØ¥&›W’W6Ê‚ªˆUÍ0ðD¼ß_ªÇå\u[¼Á‡Ò&ÚÓ†Nq ÖpsRî ,W¨\ Ã…º —Ë-*°ÈGÌ4•¯Ìö‘ì BèóѬo+¹íÛ¶Ÿ‡"¬†Xèêˆæ©3¤µß,rGVs‹ßì<Ò›¢ß@ìmAC@†˜¼@ú“Ü1Ô×ük,£¾Ãì©Ì Mzmª¿­Ã1ý=X Ó›Ãl"!n;T³®®è-­eá÷Î-yä|Uv•L„´@÷tÔÁÏóX{mXÚY7þ“%tzàÑØú`h•bŽ¢EQsœ#™p¼åA:Ÿ¯½WI}ʹ Û¹ymÀ¼½ŸÞÇS ’Ó~¯FЄ$bZïË¢nÒˆÜ+©MÍYð)áp©;\Ï—R=ù¥ÞþàK‡Ê3%JÌ=•‰Ç0Ý ÔŸW¨c‹ÀÁ?U‰çz¬=Áî.Þ;ÕE7oFÞŸže¦}âêX0$=Ï?Z2 ïýµMŒ)mèÊaICo?u¤CFUò5h›KŽ ”FpéÒ/îÓê© ×ðÏ=žÓŒÓ"±Ê`™hl–²¦•Ðä *~À?ß'd2Hª™ ¹£,”À¦éJí‚'(Eñh¾ï áÏ‹—psU cnPoÞøG©|W©|žm¹Î¼KQÊfbƒ˜¡ù"ÚsèàqøjjñÍfVéRS“·æâ24´cŸ„g<ÅàU] ‹§zF…×úh¤~-Ö£¥Ü"W·ZO_Œ½^C^Lúæ!¬ §ÖÊ q4¦¥¥ê˜„Å«–¯·FÝ.׫A×DÄh¨›gAë-lÆƧ„€ø|*[·Øâ¨Akkþh"š ?¯JÍMWTxi“ÿW”Ø¿òu Û6ï–1:½å6«ù[Ütx(ÊT/\B'a³ •v¦1y…Öõˆ¯ñŠjImS–ò¸ÙÔjCIUñù½1qçÛûWÚðZwÁáÙî¾ç_Î[ÌF3ãæl}æ{­!WÄTB“Y„Öÿ}Ùv=åWçéãƒ{¯qæ[ú¸Æ]åéß–i¶ .P”±m.+&.X_Η¹œ]òJ4/Qí ܯv]$iwhÐ.ͤ.ÛÜ œ„oxÕ„G·€¡ï¹¦êè%ŠqW)kðºÑ¼š4w¥L˜(m[lõœ¼­A QÙ=϶ªÂ±TúãÆãÊq(áR=Š& x‘NÒ’…Ôg\Ô1— ¸å¸¡Â<·ôK~|*Íê*¶çؽÑ\îl ¶l¾kð9“ Ó[ž£4·K3«‘·]ªòüÁ]Ãnã™$ÝT3ÄFéÙ#æe€ˆ“ÚÇu힌¾aÚä’fU+â´á7t‘@ãhÜšZ ÒjZ#@dQÝÁ$ãù¦‡Èख g IÚ/˜KXd3 µ<žrˆ†ä¬‹OÃdÅ âc†¹…B5ä0 úƒx{ˆ’š1•3Øžœ­ ·v~L˜3Òqæå˜ÏÃÂʱ˜)ÃüIØ‘±^ ¡Çsö-Û¶ L]ªhûGÅÚ+ø/ýV^Äé-ïé3vÛœCTÕe!â^˜ë‘j\äaDìcI)’Ì£¿sàÊY÷nÀºa&ú¦øØŒAüïfE#ŠrÍj°7ÅMk@?ñëZ)ú¦¯Ç¬ŸÎ*~•à rÀàSຘ_Na¯gpŒáVØH7ŸQ&l—Ò,ç,”›t T䆧Ù:Ôe[½n ÆÛ3³/þ«“"e)>¨©:*Çål‚Rµá´.<¾‡ ¦7È| í|׸‡ µ·"½—7ŒQ?«¦Fý.¢> $´ª†!†#F;ƒQ V¸g ò‰•¢ÂºÅ×v…n2KRMŸh.îä-ÃvÓ‘LÔÆÄ²ÆÄ:›ˆ²@¢Ò‹÷`¸8máÎ<« BÓ5 <ãvûÅØL*‹7ˆñ¤õW‹D¢yŒÓÍùÅB耶ù}á…Ù è$2ú•‰»ökøh È*k¡ Nõ&ÂJ¤=vV™†NÆ[Лx/KþäHÉà¤OoFŒé²áx©•qòÁNOVsêˆ~åÓ¼„®B+ª=É`ógåóãx²ã P³åÊ\Ë2æöÆ­„ˆÅ3ˆ‰e\Øß~ pˆéû±‰ø‚ZQkê V•öòƒr8O9E*Ô r?4­ÀÒ|ôF»–KÝøüÛ²ÓñOaYN!f‡Â‘8 ëfVD›Sö‘VAÀ“UIûìý+­úa —Í[î´T (žËH8;§’ÿEVÇj¢MSWm(Õ­Q­ìNkÈT&Òô´ø…ûVà|Ëó·$‰vQ•_¡;ù‰Œ+NÖzÇ*JVŸtPÄl{p¸ï‰W1n•¹b|ü7èH¦îûöa{?0–©âLÚqcŠÓ9×*óÁª«®ú!Œ€gã{J<ä<ò6…8¿ 2TﲋüÞ{ëÊ!°Ü=¾j8£ùiñ&$ì. )häòV ëq¤º“‘ã ¹â®%À¦ Ù¡ZÜVlƈÿ!ɑׯ¨e˜-Uš3Pº²;'GÛ§wÂhzˆ·¾-ÄB%Qá©ç”´­\½H!3OÛ|óo¸%Åõä!¢‹œ±ÛtZÅR?RKå= p2 2¹`Â*O?N»0]¥=’¨÷(Š™kYIçÔ…¸ö *s;>˜…_lä ÿ†:Æõâiô_iž¥ªkÅ>zä0i%Á¨__ܲµ©¡1–çòP~¿¥ûuàÙ£ â¨G\°¨… ¿Õ©ë@àAd¹#¾î´¯´CôýÕ‚[eèš±Å"mLä’ô†±Nà–¨æ\r:Äæ”Sðig%@1ªÌÓäÀ»æÀPº-‚`2½ÊÚÅéPˆ{çS‚Vò+©ÇŒQÇÁŠ×ñ ¾¢ô"s¼ZšƒÇ[¶È{¨¶ŒMšoÀ$¼ú¿²Õ VY]gáIëeQ˜q͇ù…øå_U¸Y¢zRÅO&‘%–9w¡å 9›Üh ‹BH YñbP¨-ôñ®ð9gì1tƒP,¸Fµ«Øtª3);ÞçM"Š/ôï`’Ûìbk?¿½ÆÆ_ÛJ^$êd”üZ¦dx_,Âh_ñÇ&ˆh8¤¨Ã%.¯ª‡@ïø:´=9.Òóù×}z@a¸—sx½›Ý¢³Aé÷5e/‹¸è÷ª©ïÊÌÛ¨ƒÕŸË–hÃ)Ëne~dQ:VíøÄ£ŠæêW=ßaL½ F®å+ÂÄ?Žš¦™šNrÞ}†x4¾4t †QUe^¬dÛ« ï©®‡[0Bí¦ô½ºGüˆ®ÌÑþ\ªuFYÐæ[däiÀàõcøt:#6iàývÇäRø¹÷ISîK3~õV5‘õ¥môéüÀ5ñý¸Ôs#´ËN„ýr†áE90Às½Ãš +ì=ç~¢ülœ<¹d·3TÏ=Ñýb¹Ç*GJ r~'ª›'°y©ù æ«k¯Ô`PkI‚Bò:P ^ºT®tç!­àÐÞH”ï³ÝÒÄÏxÜð¼qx‰µ‘òÛ›g_RTú~‰þ2A02•Žô Q«éK™ºÍÂæ" ækTð‡†¼àò¥P~Lì|ôýq¾¡ |óà?ú†$ˆ2LÉùТ³jrP¤Ï)ešbߟš0¬É¼ÔM› V[D ˆ|áËçwŠ`H–€Ö†Sõ{˜Ž>ƒÌ¹© ïIñåÅ׬Žï]sjî]õYÙ¡& á#@R­?a5„Ó1dë+Å@xŠÞË·oÔ9µûˆ5àÅ¢{ëm¬aoqœ¶ ,Éñ':ä$z Ò‡Ô®—ÅíX…Ðy…-í± #q6ð ª?…Àk›XJ ”Ë*a}ò!åyµ»WwK¸è,±F§ª²{—9ZÏOÖó©ÛI]ãÂtË‚@Ì?BùúÐèNÖ‘ÈýF!«ƒ‡ÝðŽq)Çxé÷°}Nñôº)¯A#ÂòôMN; aß^v7ʹ! ¨Ë0‘_¢µmÙZ‚ì°/p#}5ÖL¯8À}:'r爇^h8¸wq¿ßgåêá‚*kô.¦¨‰¨²½L£?P-ú™§ØA.˜ý銸ºÄâ6p£gJ LÞuºÕ MCý&OK©²7Å7ÂTÇÔ.­ý;~ÄjLY+:{J_˜Ð#9Œ}•â‚ TþØ;16Áj§ü~¹|~X Ø›XžŽ£Ç6–Òtäõ¹ „èØúÛ›vØ/˜>˜'±ú’×MÇï®F¢M4Þq_Üq¬ó%àÊÕcAÇ[üËP•rT2‘ãû xc"ÀtjeŽ`ý”DDðÑcã™il}ÎÐøa3®P+ß&„J½hÙ;Ú¡ûùðm¨JÃ"Û±û.^{JïÂÓE1c›päð6pÜ~Z}ÓÊí@º]á'#z‘'vXa¶¥£E6ƒº?Lý™Ä8ÓÂoU xgú½•,3K—ìÒ&ÚIPçÛ/²ØGñ_5‚XÈéXãCS¯Èlb‰ÚŠÐ eæ¡5Ð B(ó×$vÎÉ œ6)T¤°t† w&ð÷!»ÖD ÌãŽ`‘@kà¤j`¾DhH ô©`éÙ]Ù,{Èáw »p{Œr'5­H›>IYñaA[+§Ú¨U4Äh~ïùøöw(íªÞ{߇uC6¥ÕX«}"Øå÷WÌ@{Œ$žÐL‘¯ùº.‚ƒk6(3ùIrtçÞoWÖì©9c"‹B܆GâÇDï)Òê¼FÿhF&|($¼£ÂÕ&¬è¶¥õ-3s]0–åk‡æ³|Oöƒ*>U©KÑ)jd’Û²ïÆ N÷ wp†'â˜j·±~õpm‰©]ëzµ;W§he’8~ÑÉ=ΫÎÓ´Ù³ð“ÖJ—k¥$êÑð:f>ôùoŠeü&ÕJ¯Ý‹)¥ïpÈŽÆÝkæŸûràWH ­E#çÚˆNø:ä½8ÿ’ªC÷K¢LÓɵâJsJ„㉓Ю'üúŸ&6Œ¨_£­äàÿ ÍçÂ\r¦´o`š÷C~ñNÓãJk¸,àϳ|é&3¡Þá^ ÆŸE/Éû}–!Í@#@Áê?¼Ä%²«á¹©nÃüŸ ÀÝZÁìç‹mK¶¿Ø½‘ríjl¢xË鹚éÒD¹Ö“2ÒöæQÇ{VÐcm!Þa}L¢ÞÍYi_>)P+úpå¢Põõ@œö2ò^‘]0gw}®¬êªŒ1U±]øJH^¢lx –FEºJƒ©ç´Ë,•dujÁåíÜŸj½{ú+ÚÉÙ&ˆ¨ò¡.‘óú‹'šÉ‚!Pu»dþbžcfÿDeÙmAûÅtçt}nì—Ê›ßz¤ÍÆ£ˆxcâCºÓø7…Ï+©E†¸§îNp¥“«Â(»J‰ÄÖ9;»†Z ‡ ?êZ3Oäñš]FdY+ ·'ïg‚©ãò¥¢ „ë×ÄݘðÍC?¾H.0±ç²³/m„·Uó©rxb„}z6×wopJ‚ÃÔGë>ì“Gõ¤EÃW=> 3qLYhÚ¨„N¿‹†Õ’¬ 9¢¶ôzš®iߨfÛ_V¢ÚǼà `À\M D¯_ð*Ü~ý;^¿Â¨x›p5ÏÿЯAZ“¦Üáâãß÷™>]=ŽÙÿ‘åœ8ZdÃã6?ë2Ž)ööÄñ=T’ž|¤$C!·|cÍ(@!t{t@ÜàS¬Gð$¯yŸS?ޤWá%Ÿ°låùB,'# Ä”ËåչʎQLò¦8ÿ24óüC~€eáVûõ—ľr7â›~S?æ­Ç ¦…TÆþT@é¼6,zèþM£²-·ŸG—Ó›lŸ½º¾ªïHïFÛ]°”oeê÷½öÅJ¾%ÎÆìbÇ’½6ÉbÙd!|\ÝWsÝëƒÒÊ&òMU°¾Oõª%Š6ž`P†ùÔ¾,ös½Ë¾Ò”6¶Èʨpgl{@Vѯh]ç^"Eñ1w’ø¢]îï'ä¯Ðž˜ÿé¸Ôê3AÖa`‹’Åt6wݽ£|0à‡µdΟ¸×©FRêÛ O*ƒÄ«ƒvn´ŽµÍnä}ǓǦák•Uô¼æ§BÆÞ–³vgïÚn¦D9&Õ”1iã_öO{uâc@ïÛ àÓ7ƒ[ú²3‚&Öµ'(šð–Ôb,òÅ7½ŠzŒFÐ\O|¡“ ±J ~Þ¨#Û“%eŒ~ÛX×jƒU¡¢ÏDK#€¹K–”& Ùyä ê/P÷Bü˜¨\CbV+µ®dc0—`·ñË5œ»2œ »{‡èA’*ª£ƒtT”kXÂõ¾}s¼Ý&Þy$pœ?Ù™:L ‘k÷½[\s.§?­Øª½†¸ 5H%С|PÓbç¶‹õ3­&[ó‹W¹ºmÆðb±ÓõÉ‘÷ #  =³žºÓRÆQn€¬4ʽ¢;æ—ÂXès‹l¯€)rö.ß^"{Þr;œ£€¤‡‘Z Â_.D%P[`‰­l5æWw@ùâìAÐjõ6¤EÍ”M&Eë e£NXà0t#>ÉaVX °•´B·R¶˜Â/šç­¹º8CQ`ŠLÞ€ENè"Ëìp•-±Œ½áºAbæ¿£‘ú@ã=ŒIëá'5˜$#OÜÔÿ <ÁûK™•òÇÕ˜ í®¬åQ\á}ÅVÌ´X,À1»"p K`Éúõ«ª„Mt׿Þ_nß™É<™+¿ÐKÕc“1ìzK7´Mƒ:éÔ{I£“Ço2’fözfLÈbU7—¹a½Änšz €,6üÕ“†,6ƒª—ÜžWN2ÞËèÀßH–MÝ*6‰xÂ;émõWÆšàh=¬ê 6¼ñ 5ÖŒdº +qÞ<®)-h7»]\³ÞeªQÓ·ñ÷þõçgåÑnÞÔhŒ ;Ð(õòøûϧjUÔ\ §iÎkµ#†{‹>ÇŠZ;“"¯K%‘òx1Ƚy:V† ýSOŠP.OË4üØ´c?°î€AðÏ0DÿæòöüOWw"w29G¥U˜ÜÛò1¦ò@CÙàN‡¡šþ‘qº¬¢‘z{êÑ¥¿ Áj!÷Ã3¹ï‚ íæ. ¡êÓgYEçkÂY“˜ã“óxO¸©)°ýðñÔÞ6¿4 BÿÏ“V'Øìfõ¹I¿ˆ|€2¹AŸØŒœ"ë#€CÀ ´$Lbæ#ÊŽùbjÁ-˜£=޽æ)1mrÇšËZi0ŽC¼Iš ›Z/s[K%˜ccûÒá©×ÎBÕÞJå|LÌnQvÎ y\Ø´3RkÛËÛvqÜê4÷óµ§ <…ÍváÖoöÜzèvÔ°¦’RÛI;JNÒöCaQwÿ¼K1Q¤+%Ζ­`f>¥¬Ñ™Rv›w¢Ì9j‹éêr\Q׊QÒ KÀ*»üŒ;¿ç:¦fµ="âmŸÈþ'‹ÆìóåXÇ?ïÑÈ&qjEçÙ?÷Ûïûµºo`ÕVÚ!Ä™“wäÙ#vÂäÁp«øú¡ÆZ>TáI†axœDØ`WOæi§ƒ'Œ¨„‡Qñ\-Î¥·gÈP€¶ËÑ¡7pþi>:š.f ¾ ÿ'=ˤÉ:ú8b=Vp«ëv!°9³ã€b3Ñç®8éÙPîx/½qa½E5aÙ†¾0…~$ßâVæB­W)u®¢éR2®][}vMÍÀ©ï¤oKÍÉy»L׊moÕSØŠP¹'z~œ¸ÛRGi*8ý}¥Êf:Ch9FªH3~–¯ ™¸ü÷¶ìq±ïøB4£Ãg YÙš`­øvÒ›à•¶î”'[y ë³Ã€ÌéðHIð‡¨¼NÉ’RK¬ûêálû Úv½À OyÀ;¢ÕC‰©Á#I±ñoîçxÊœséPM'U&ðœÔ°Lÿ,îÇü†Bø#º¿âÃëtñø©…Â/*Æ$1|…«æ8Qž àÈè°÷?jåèI,Øár/ä ¹¬%ý‰ª²±jz¼Â'£·è}6¡*|dsãwÈñ~øÝ3êÐ&èÇwO¿'3S$"'õWëŒ)”µÜÕUo$ü]âÞÄäéxŠvÍl<Í=rXlí<^ªþwçª&ìÎ\jC7‹ªD±W€„ÕãÛs¹z¿„ÍáÂÒd§™«-9šÿ•ÈÅ ð'ÔµÑ`24C*‰d_1oÇØ»bcx+˜´R‡µÝQAiŒÛw äxÓ®qâ©âÍ¥¾s_æIܧ(WŠ}¹é²-¾EÌtYåøÇCàýÂBK¢rÐ>e>Ià‡\Á Æ”“•/Å.8o¤CF½ZßÕ®=TNq²|wçvÐû(E^·Ø]tRÖä§8F€]û$ó­ksô09þ­n=ê°µ(Y§µ¿) Ô×Nãï0aªÑ¨¹DÂf7½ì‡ æøò_Z?bÈ|àßNÒªÕ¤8`ÝUaMÍ@ï(]>å»c{`@ÛÁejC‹Í}α@LWE:*¡©sñríu|ûWÄÿ§”V¿ç¯ Sý»>¥µq.ÝuŒÎ"«­lP[›øLvjl–G"›…=œŠ{H#žãfhŸw¯5:öÌ›;°‰m‰ƒ6QÓ­¦5·NUØèìë=§^0~yÿE¾Èp ZA釯kVéo”ã«g¬°Z‡Ô‰ôñr#)»þØzPÿ =éŸÿZd ·± žÂ«t²;¬7ßmÁú{¾õâÈa”$ùq°kÆÞØfäOðâÄpR%Æß‹&q"6jdF¼‰µ…¬Ûc 5Û«~ŒÓÈ–a÷]ok«lrYß ½[Ú¤Œpkô@þÿ7Îz´c§`ÑîcÎ{6/õÿ‘-CRh–ì.Ïûj-ZÎÎ39-¹Ü ð›ðv¦LJ%wŠû¾º¥¨ŸYŠVºHæûû÷²Û ¦§/Œ]ñ»ÿkò§äXŸ•m n¬oæhÀ¼LÐHæBkÄŠæ#seí’L6ØèƒXÌš\_˜§ˆ³©Âöyñ"ON>§¥L†JNh™øŠD¤¶zhz±'IO¼o‰$²¸Q¤$lø5(øujŠ,hâįϫK­£ßÄÙ÷)ឲīÎ$y1´¦« ×^‡ÇôG}j€ÇÀ¤ŽhψW˜uÐÌ®ÀÆoê"&s°ùä¥"HÜV¾qüV~r)“Â䌜BvqTíÜ’zŽ£ŸÆ ›Ç¯Þ.È“ž©ÂpEÌ.[=$Ù@©{­„ÔImyo]O Õþ©Cûß0›¯Ä™?L½D+ŠHtRøˆjZTWKÛ„u1ݨ ;–N‹"ªðFÑûÅŠ ˹’ee4Í´õ`ÃLû7Z…€˜…ÛñŠå³F |å ûm†¶?Ò- §ºj'þ³•ÁÓƒǻ|²ñ£Ã+5ûXþ‹´¸ÿº Äwo¨ Ñ`ÕœžWmría¡)4/Œ7­‘8| "ºÆQÖ÷ó£`wtÃ÷D +É—ùØÙ@•–N»ºUõÆ~P£²(!´réñ´#\E„PTø‚F&ž±ÚH3©¥”ìm4' ‹øNºàPó™K\˜Q8o™ÌÚÕ¿÷Ü)Yˆ‹]AmõéOK kì⃂Âjå­]‚3™¿.€L—؆¦ßJ¬¨eguÑ*#ô°xn;¢2|ó2,üÌYu#1ö7P=ÝȧÊÛqNã˜ÆƒÆ¥õ$9;xQ«¨‹J.„³X r"ÿ)x &°F‰Ñîs4‹¬žJx7Eìk`'ÿžÝv[+à»4Ø7”ð*1ƒŽN™Êe¤Ð¢ë,»¾¼¬Û™Î=:Š©¯’FÔ"¦ qÉ­«t\ñ ²ˆù}Èù$%èµp3P^Hdúº9t'Ü(ï àÁ >çÏ(2®<»‚Ê ¿°P.3 èãôžíkô˹vÀy !ºÚ÷Kè68 -ÛÌÍ+D~å4’vÁ ï‡ 'áüjÿ/?{â?‡¯Õ-Uä‡ÕH*@03P+œÁÍÌîå,ùŽ2#ò×Ò¡óÙ÷ŠßÓZùš…‹ú'[²€«Š±ŒöXŽ•žþû(¼…¢O)˜ƒ™×Á OÕ} „²B׺ûß–ã ´uýHO«>’ÄÝÃÓnùú@¢fíeDä7D‰‡-ÏÔ™;%uDô~@k‡aWÉnÛÌNÁ Cé0)Žò~/=NÌN# Ævßqò&9`E©‘¼=}œâáî8hVi|€„:ÍÌãÝÐã“ ¸«@Íò=Áb¡odõÇ'H‰fpIq„GºYüFNßžþ¬ÿ”²éçtgïüæk~3Ó)_gϘÏê°ì¿ÿ±w©w®Æ†ƒ[ú]Z‰q23ôÇ÷s…v™û‹C]l<ñ‡qàë¥üd úrm%ŸíÅr¨ãà÷ùΤÝÐHƒèŠà¹[Õou)8é ˆÏ‡fů·4»®î ×êoÆ4v‡Õpq/ì¶)Ĉ÷S„˜›‚™,¿¥Cw~Ëv“ôÒ„ƒ‚·€¸æ“øà4öâÑâÚÛÔæ¯”!‡ÃT9ƒ«§ÑUÙéÊ .ЙÙ¨„Öl7©R\OÕ7 x˜^Í£½1†‘GLP2/ l¶Û›æ…Ù !ކ ­aÝöYCJTp9òѤ}º»Lì®Lbßòa{W‘¶Ü×»°T„v-3?ƒ(SÈð(Ý:-ï<z‚Z]ˆ†–\©êzÄvo:AþgL(Ú’'¦(w?½èor8=0TœºžEd~¯#J m€¾ ëfݲ^f÷ÏQŽ},–“°‚Ç´uût: ÆODt– !ë,¾äýãL„yTc™<¥n…MâæùŒèUœ2AÕ’¾"šÒKäqÆ&WbOì¹ §Ëþâ :*´¶Î5.¾IéÛ2ò§›²™nÛOa6®$Ù!´GK}°/I5 !iü©î]{H>ó^?ŽÑB惊âq¾W_ËË}ZVÑ[ðY›ðóí~Ô‡\6¬:€kpxÑsW²¼f¹…nåˆ/—¥¯¢÷æZ qî¬4›‡~oòŠ', ØŒßÓûMŠÊ»øüf§Âá@ÈLênŸ©Æ“Ý(Ü’ÝÖ&Ò Þ^®—“š‚TÀC83Ä…Šù@?Ô.°Æš;/X¢—ºÂŠÐ(´»ìðp—©ë…âóz6£SŠüÚ Æ2Üaz•…¯V*Ç_ áUÖ#Ð-B^Ƙ-á|°“Š$\cjòǶ:#hZGS#7–Z]áy-^aÍôºþF[Í'³;–ÉNYU¦Ô÷˜ÇgKe\Ó¬$áVWÁÃ¥³­Ið©ï¿ãÎìè¯Èy½9]…ÙCnc!Øšâ¶L‡a¬iié7A ™Åº't-(ƒßÁü’x¼Ú\]\.\Ð/A¯Ã¨W#qœ3â†eɽ!÷»,ÙAü¥+ý¼"{ç^>¥¸;Œ6:™E å‡’}ƒM‘â˜jæ6¸*þAÄo…^ çv¼åg£ NQÅרc§ü<–.8¨hÓv(†æÕ:ƒ°›1íýW=7mÚh¯çƒN~ì€b-?Þ›„ùnm;´A®.\Fâ™r6- ’qÅ‚ûÝßYXÅÎjÖ;/'Ó(òh[Æã5S Uú–³Ïy¤CnO}æzO&ãqI;À3¥~|ô~¼eÛ— dñ(ÓÂäy&~+CÈ´p†eþXتƒj4{g Äø|´Éôprß"›¤&È_ÁÞÜ=ìü÷¼&ÛÛ´Äåjœš•7Rì~3|ƒŠ—·ÂM3çâýzÝLPh%õ‘”ç†VXÆî“Qâå1iX¦g3>¤¼Pmÿ b%ž <ΩJ·céqìíœ$ Ëv·ÏP¤[nFÃ…óg,jw— ¦=îPæŠ`ƒv w"{9ú›ãœZž¯fÄ몸Os4T±ÍâvÆÖfo=hˆ‘V¦Sžº,ö]Åàƒ»Ý¬vc Ó('8Z}C›¯8î¯<1kÄ âb%'B…Fô¾+½; F±%²Ñòdl8 ÝÖ‡±oïפ»ñôdÕýô¼éÿaOžMÄp·¿%0óXË!jbR4®{±T³ õZ˜(ÂÄâW”Êl-ÉÞ˜èy‹2€Ð˜Ê.’ŽGÁ“͉³vF³€‡ýûä¸Ö™: ÃÛK+¨à¤Ph>b˜%ZxYo‡¥?6â骱{»AÅw˜æƒ%öï2­aÞF.Jb½-ÜŠ$… —šIÜþp*‚ˆï;¤Vä̵™ù†mìï#&ÿ¦ÔŸ•8£c4쪾·ÍCIçÙÀ"ØLÝ®MŠ16ÈíxÉqÙn¬EK]1²Ž¿ŒÝç µÖ²+&xg%Û_&MvdÍr((º›bÞîuËpÒtÒUÈ0ÒÐhÈÞè…èš…Œ¨³Öðz>0 ·›íÊòB93À·W¹¦˜¥WÇüÆw ¢J¶õ ´Í@tbdhª¤Y¦œ(ÐdMƒR¬ÉÆMs5%zöxXÖK»Ë@tU<‘_1¢µlF½ï- K•[•"qp›$°¢î‹¦-¬ÕKiC¶/s— G ]'ì… {Ò÷3 úä&ÚÆiˆh:$Q1 W©=yÿ‡Ò½f¬b±w:â¨}ï gºÏ°G‰±9ö„¼Ëä¨Nk/ÁStæPòp…•¹!iSˆ¦ó—ogç ª”wBES…&¬ŸåCÀåj32}h/¡«é¸8þ`ùvMf¦,üÎSÓJ@t.˜ñvžÎDO”'ÌÚ?¯2g¢„[Nh>P±ËÅYd½I ±##¡ÆÒœo* JÑeud`Ú¹Ì,‚ Ù„†S+ôPYQI-˜¥{¾0ÈŠe—寜 Î9h®då>6PþvàBÌ6,…-ŠKëzµëÕ’ñ’ŽŒtuImÆa|HÖ˜PLÕ­d)˳ɂZçêDº=ð…‚öV°ò3 EÕ}ïj‡¦¦*O¦[ØPï:ìp±çØi¶SRü`ç þ$ÕŽ½î>Q¸¬D‹WkO¾Í ““jR/«b ½å"$ÊE¥°ËÝä+ãfF\Š4S±9=g<ØC’ùä7Äoç#®œh¦ÖJ‡ u…έ…‚æS×-™rH<ÛÆL.½þå ÔW+ù´œ'ƒûY…ú¶ƒÁÂñ¤»Xr¤o4ŊϹrTf½F:”n.þÖa™†_teQJQ$;͈Hc¡¡ìI¹õ ž×AEß*Ž¿;ýÊ<žÀA˽ºƒµÖïT›Ï‚Ý$œ´´v’³¤Ëå­“%wkõ‘‡kŽê¢ÚÚ˜Æ78O × !©‘j4ß|ÇsßNåÛœ{wÐ7jCm ‡B@99Ù[9¯Dóó£­ÖÑjÓW•ñ§1 Ö‰…†¿Kžd‰Ìq¦i•MNÔ_1SùÂí߃ªH’€´b3èü›AæõŒÅµ—p[¨vByÔ1‹ÚB>~ªÅbË×h»zò2F¨gÿÜH-ê“¡tîO·}C—Ôïk@¨]kSXBG±Å ÕæT¿B§œDg¿ïŠG$sï±"ÉžŠ ‡.g1M8éÅ%šö|¸ÉŽ«£*­zZ"Kf&@û;þ‚Œ=™Ivž#ãa^Ê#™@H¼‡ZIr¯Ìh¸QП1tÔ÷•}„—)Ô õ"ºòûfH áô îÝÙ1Ž÷"7ÎéP½&}×”ñ‡Ôv_õÎŒw(B¶Î¶øj– ¥R1ÄhA€ÙxKƒê~˸Ò ­hÈ#O@‹`i pìgÑWÐÞœq¹‰'!êÆ Rë¹ÖW¬ˆ$Î:ctª¬u¶¬ý ôˆ’‰+ AIŒzÞ•4)å½;ÃÛŽ¨¼ )$ÙuÖøÇáÁ ˆ†‰Å”È*SÅQÞ-:ò­éÿŽ‘Z+.}YÓ½éôûö‡C׋]J>¸Éé¬Á?IÅC5;é l‘HØJ5òw-›B{-c±KŠëã‘_ßß{ñIs‘Þ¨K¶€Gú®Qâ©êÕ8¤ŠÀÕÕ¦Éi #ðÁóöDݯ_ÿmô «bÒˆãûÄÍâ@iÌtÒ0‰èdÖáÖ{FL~¯íIp¦¥—õ ™éº%ÅBèzOàÂÏË¥ªW¢†ŽbüT€¿ÀA·ÚQp¢­ÊjàæJ^A7¦ »û†°OsÍu)µeôØ\ø¨KP(ç´ƒûûà¿F0à:‡ç A$µ¾†!äÎc›ãó•= yð?RAV!V¾XðZ.G[ÀôzÌ—Á„ïï%ºIÂû;~A“LlKQÇâ!IÚoÚðwª°n*Ã^ %»†ù¯éF!ÖÇ]5fÛÆŒÅyÕ,Æø‰Ûˆx7 AÔÒ ­k¹Bzhu†PôåEú°Ñn®AÀ#xŠ¡Ú¯¡Ü¡|& i#`6lgùAï<‘š`offNƒ#¶&о=ò©½Íÿ¬õåÌçvêó¦H`48ÛTºº ”ñ½òÐÀ„wA#ǘ‡ƒ{Gû%¬Vx«0CþËÍ© ÄlJì.)#뜧‡Å†‚ Kï8µžºxIãfx;tfª|ð(w_´¬ÿá\ FïX³ù³©‘€Ù·@CV u©ÐS/~æ¨DÁÑ&_ÉßÒÄüáÏë «ÙGö š~D®ÓMºMäãr4#7Q†1A¤ï‚#Sì)Ç 6‚{­V‰¨tf6ÇVBð~£õãºF˜<èdÅN ìä¿9òê&Ôþ‚0jŒ=h•Ÿâá–:RSmˆ:'rSÐNÁRâlÚ¶Ÿ)ýƒj¼—/@£Ò5sÆsìÇÇ…©–&š·€€ÍÔ¾¤lI©èQÒ®ÕÈœõþ\p7ïȔֈÉÜ !8x€‹7„ï³’+¥â14~x\31¡5ðLÕó}¨7Ð.=–ÿâÅçl2ÖôZ"Üìw[¨Á[ýiÍÒ&§Ä X^>}(ç¿]‘•!ëzf8úh¿AîÍS,Z3?B7ˆÜYrû" uE%lF×5×À8I1¡­^Îr…Mª6øˆ‰•ô±2Ö2áÈç.ö—PZo®zÐs¡À›=UD6€4 Ý<)…»º Ly@â³æ8=bmmñÞ!»2‚+y&›Åà=Ña—d0§ÖS°kƒ·’¥°Ë±žØOAæg×ÍS"2'EK÷+Áy&(V+èhyk÷KCE„L2 uú”D«³ú»ÃFÁí n·@3ØÖY$é8P »?Š:9'ÂU…îÁ¢‰ÀPH)Ñbï;Êûº¼–pä§»-¡@N!2²sfù0¿˜.©°ü¬=B¬Þ÷‚â¯dÄ@>[Ÿ.ÅAËvéäš²°7ÖQ“ 7:‚³NðA.Õ™YwÃSŸAsŽ2˜Ææ„µÝõs™«²™¶®s5åèï:)zóûNKÀáh¬TMðÿq5 UNõß.Õ¥õóZ(1]^¨nIV(w¨6ˆ™Åtû°Ä–‡ƒÜù-~Àü¤HORâa"o/€${°E-޾PHaóƒÃ?ÆlÖ*W²¨pÓ轸{£À. ¸®…ShÜJ³OrXÎÃHËÆU6‘ÓJüKh_#VÂ:¦ÈŒJµm;Bô’5Ⱦ՟«/pSLë ]lÁä;²e{é~©6ðʜٿ'ÝϨ3ŠžƒÕx»·Þ°–CJæ*•Z§r1×›db¼.Í[QAmÂÖUOÞò´êÁçÆBÖ#‡Äõ¥ƒx&·aEþX…â½jl×ãQábÐa1O $jÑÞJÈ9[’ÙX35?uT{èˆò³XˆjL9Ô@Æä•ÜÜG £°:²Ö_®CÆö9¨Vmuú…8®sbìï~WA¶¥·šó/{ÌÑšEtøÇ¹‡ô|Õ,ïÝdåЋ˜u#¢»dzÂóÿ Â¿Ìz°µ^FãçÑ¿ahö¿Åÿ̲•V×ûê˜,ÆY«ãlç̓ö5ÒÝ‹’Ä\´lB÷ÇþÐ 푱Ó/…¦‰„Ê ÈÖxT­¢vgÉÈ,ó%Ùðz¨]tŒpQÙ©\Л¦É"–€ü…¹÷îöaÈIOó œîY*•©BöMOÀ^I{ü>•–̘f±‡+Yc ¤ºà–F itÌR‘ÑÇ|,:¡·ìóøÔœœéˆ/ÖÓélÇ8ÂÍÊá›K‹ÒSñ_,Ù7¤(³ï($ÔãŸT[xŸAå»~™ žÁ"¬V‡£f¡;ýïžÕ!«k·ªÒêé·<ŠrJ|Ôo;ïç&9þ¢…(ÄþXW3~ëœè]-—î^!ð­%2ÎâÖA¦È]¬¡Y®4%R28™*…ñ­G)6–]Y÷á:Wš|ÄÐÇÓž½êLyˆ%ûx2?7Ìqg*|Ÿ"ÔˆÁ3%¹/??øN»it¥©‹Œc߈a­àø°÷af*(݆>$‡ŸÐJÝ5·_(ÀãhvgæXJ3HN~RÚ^Uû&„Ñ!(mµtíb43ઙ(!…S•î(dÑ‹¢L¸<´ïÿ´RCýj‚A…鋸±ÂáÅÙ\÷æXŰü…C£!B+ó¤ôø¬ŒÑŒØXçùX¨FL‹ö¶"ðg¶ìß÷üê¸dÂ\€ït9Ϫ¸SA ùŠé“ ‹×öKý‚†2ö.]Ð ¢]Ðßܾ ˆ»gçó¯ùSQoÒ5ú(d‘ù~± ûè*ØÊЬjñ}¯_^‚“C”î¶Í:>ý¸®0v-ùnwEGÐü÷ÓŽ¶R{7²¸´òç«¿ÔVPƒ§2æ=ÜQ°ý¨z?ª“JÜâ13Llž%OéÅQ9Lë{>àÏ}Öó°‡Cô4±˜2+ûÿ Ù©zŽ—‰'íÍÑ/>q[AX6@–Ù ÍߤáPꜱ]wF‚òäeg©¿»pm”>*ñóÔ>¤ö^®¿þ¦ÅØrô‘´ ¼ËšÿˆÆy ÂpÊìÆJb´¡ ¤>!ª¤¥ ‚®#ªÿ=œª™¥LùÂë×Û•ÞÃ|-È«ä—É?›ö"zÇè@ÌX)«àjôÄò&³­ü9¯6ÊoÆUŠE‡ôrFï!Zªaø o—ѱ_jÚ—ÿa¤PèL@XêÒŠ=j—E„Pœ’¸49q9¹Ìðh´dLa´ue­] K€¢øéºh壹l¤ˆ*L[À"âÔ dé:ïXS]¦8º©Fˆo G F®¡#üç»^¹ÓÙ5Doù_ÈzÔ[ÜhÓڵػ³¬–:‡Ø°EíÖ¥bWÑï£WØó^Ïîè¶ù‚›hUK÷׳¼NTpsûÄ JêG”ˆ:û?”€FqoÏèuYoðÂZ¬$7‘ˆ­ œ=—©ÃžÁ&ƒb F»üíjÊŠså-Ó2 cYß1£8¸D›Ïù]MH0Ž# ç˪¿Ë´Ž ÿìõm4’×ÞÌAUŠ_ÐkxPˆ¹–±Rç,4 x`L‚…5ꌶ]ÙÀ¾36Ofßçm{nÇÓKq8*Й/~ k)–H›€|:dà!)ÿFœ^þø³Â1ݸxâøšèÙŠ q>–ÍãiG'-;M±!‘bÉ ¦ñ-<—²7\¢®ö“UŽ;ËaçìãþóŠr˜64Y¤;£¦.«d#âVæ‹Å¥¤ ú¿ Ü!÷§¸€é=oM&7¾Q;‰G`ìO!<3ƒªÐ Yõn²"Q0T6î -Ð6pÅ9.!$ \+n®Ø­ù¯±áV4ºlid® ÎÝýÝÎ ØÓ”âû(4*’ÁãÍnÈÊWrVt1£¢ùì`™ž4­Ò2™Ju0¼×†CmhÀï·¢|”œ0¬s6Wy š1aĪ>:76ÀÕ¢Ùmz‰’]Ôþ hÏElÿXmY:Ù !>ÿÀ7^ºÿ­r¨ð¥B‘«køÒÍã'|ß·úc>戯=öι¤|´¼°nŠx¨œt;AŒ9P-)ûßÍ9G&»Ê†4‡gDÙá"þf9å®hŸÃ¡ù¨Bú®Gƒ¿Lm1ÚVõåJÖ?ízûh|2„,°mqºrNYÔ£±Çs9Ñ¡H>¬m¸Šßu6e4 ÚnðÖÅ*}Æäôà­ÐÔzg|°yˆÐW'wÜ[RØ€<( ˜qsòe«²½ºƒ1 ƒ‹±©,†Å;·ÁWF÷ù§|s’BóW’lüù1–š+%P£BÛƒu)Ñ öÆüEV±4¯¤éW>c›Ž¸Ä’1fAƒ•Jh;V—(Dðjëk“`{M(É2©‰8ðHêÐ{_İêKÕ¯yù«.˘s¼nаlÅ®hÌK%-ìCÁ‰UDÛ'öcZÈÎ3×· ‰ù:e‚ó·/œ^×Oêrqz37†OÃc?¨a bC?w½kÆÛw…¥M†)²á/£§ÂÃùãvM¸n=ãO>]?¶f Q’1é÷³"%¿•Õ ´&V¢ï`}hs€’•íð„ñ'}L}@»¹ šŠ¹¨eâ7ˆç×µøâ?p"Þ6ò{Äo8³ŽN‘Ã=ËÝþ¢ÛÀôfÙnû»F(ï>‹eºÊÃ=|È@%•éø¬ ?hR C_§µaC…ó:ü–t~ 8ù¢X[üÐB7ðLäþýôÛµ“GF|.9]zXšéhU¼Ãeö!ÂN›NÝØN]»~%!Iƒýe¼øý{ðO‡õ–y9VsÒì¿÷PÜìÛû8±óI&ÍMtÚ¥%¶ëZOqÖry¼Qe‰“¬}úÓKÏ®óâ+jqø¿¸dxŸàî³¢•vJ0hæ†ô+c?U JθÓIHQ—hA`GVtä\À– Ò’ „áàïþ<_âòp'.ÿƒ>ÎP³î‹¶›ö©õ'rJ?ÿÜi  2"”’Sx qqÆ1w(¯ŽËmÕûIºE±Þš¹ÿ_‰:<ò0³Œ®Áö ø@«*VñíŸ "rBj1N&BC ™üú[܇Ñeܽ„“®i‘9N±˜H š+%yÝÃtO; =÷ˆXFC·*É|}Ãa—nFè»Ý¹|ÉÙqÏ4Vá•cÿØI’ŠwðŽâ…‰ŠàÆ'“ Ú:r6ÜÖ L)–ÃHaö2ÉY…i*$Nïž(ì÷Jð9 .Ã+)ÚŠ,W‡  äÌêüÅÿÇÁ{Õÿ¬/ÈK:Q/³×ÿ²L9cñóluôuFx¢˜Ö~ë`‰9úŠm|®ZÏám⦉"©n]°÷õiµt]È~O4ú™¾© –²çžMêi{¯–¹q˜‡#*Ç= ¤ÿ&7}Ënòq;])Vq@iâ7µ'qíÌ´èh驲õ›Ö&ù?œL?‰)8ƒç‡òÇ4t]²šÀÔP h¦ù;å7@Da8Žd˜„·²Þ,ÙEÉÔB‡·¸ÿp»Ûß2ÀI¨üiF¤%®p?Å?Ñ̓žÃJ´^T[E»=°)ð2E÷Ä »=H ùš(Þ;ˆE—e¾‰J8ô9¸»ó﬘ùÚzɃÎÙ_yCB|°9Ú¯ÉÞ’Ûf“j_Pîx”ê 6¼Lbü:ô¸wÚð˜GŸcÅ|@Ç]£ÖîŸT ‰,^#X÷ýùãO¶Vƒ:™Å´ÇHu]Ef6d=v¾tSøócºwÂE?gçP±ŽD 5ïa É/=Kƒ.áS!Ag[Þwé¹C„ó ÉåÃ{ø°~oâ¦&|·œH GJfTQö¤xļÖi%Wá9ÒŒ.ï:Ósb-}_ˆ§Õo <Û?4‹´—æïÜT‹)’P<þrAø–QOh ËSÀ}¿ Áþ×,1û\â^<à£+l­e\ëeÓ€AÔ,úPt04ÔwË×ì,ó&D:þÔ–õ~IpfºÙ˜|ÝŠ´œ)Ã¥ývX¿3…,gî ÈKÑSÙùÌu ÑÉb‹ö*;,$M£Žý+}Ü©;BË޽¶½c˜‡þó·ÿR¨vÛ±fNäųû·…½:q‘^2ÕQÂ8‚R{)³ÛasÞMêÑ¢Éi¹rª<áºføk*Ýl™£@ùÁoýME±7„Ó êiÊÀ5 •)|[œ}Ž>’&Ð…žÊ§„­èb#'¤©’ÝPƒ6æ™ñTÃ9†‚òß§É:À¶]´ÿ_ó^$¶]SÙܰ­š³-ËEÏÈW nAxE.8 ž…ä7qÖP‰Îfµ9 MÑ‚o5¨Jâí[ýn´ã¿Ž г¦Û›@?«±9Zн±Üy]Ü–iœËÁšê )¬÷¨Äø×–v @ˆEx¢Å%\¯A´GB6m“–Ž£¹ëqþ`ˤީr?X³(×1·®‚sxç¥ð{¹ÔDñ(Óá·Hfn€t€ð«Sm4í\ÂKkB&.£¯‚™]Ú ¼ÙÖs;2 z¥9Dl·¾^‡ß‚êíÍÕ’à6ÇG éYÑ©¯¹H£&yùÈÔhæò%¤‰wþ›Úéz/†ãÈåÎ$žŽ^6Çw£È“.e ó‹‚…Ï ¯qKÞŸÉøµ0âDNõÞ#ð(;þŽ;a«í% 0hXcc iPFù³™™.«¬ëëcÀùÅ>’ù¬Ò­üÖ ' `„ä¹’ ð¹{žÁó(NÒ8aB:«‘rÌ0ç *€1“ÛHF¾˜â­ ®Î˜Òèi\ÊÊ7Ën \ ÍÑ6H®ÚÀ)ö˜+ìªz#Åñ¡[,¹¶º D?:?8&ÜoÆÀù<¨H‘$&–-ˆsË|3)=-€ŸÕKã+ˤW '”;gº¢)Y$${DC)Äõqè˜dÆaæ(ΡЈ*ìs&—Bs@Ãæ‘{Lƒ=Wà…bÌd`7°>ƒ ÊU~¤DRbЀڨÎ$ü¹ë Ym°ÄøïÿnÏ+Ž|…S¢¡°WSssØÑ#…¾yc¼pøÉÝu£¦¶†* /BýÊŦ¶{ß¼iUò·b)œM``Þpªc–"4—‡¢a´Ô s©QÞr¡zýQ¦q2Rª¦ôN$Mó} p=ò˜]šøjŸ-¹Á‡J´\hØÔj˜ÔÐi[Kor‰Š.2ϸߴçÖ‹ÉB£uLþqPØQl¡_S¤ Â–n™QlÒ3$'ÝW6”´é;µw.¨,PºÁMD$ýâ0½1üø•§‡åÑ‹¡Ê,Ó<¯ %íãçÿ¤ª(“Eþ7€Â¹îäcæ‹÷PÇs»ELÑwálˆ9ÇY ¡…ÕivþÃ\Þú¾ªy]œÉ< k2‰˜Ó̬·#',ìahl¬PkùF}¦KE¡VÚèÛGwìÜ7”B™ù~ÒG¦sp¤èVk§&…—Êbõ­uu,˜¨n*¤sû( WàEL7$Ñ·Éb{#Înù¼Ã½[È\GTx€ÉЂ áôaƒê—û¯çöm5öTÍ× •†aÞ/ŠÄŠcDVRN~‚ýë¡Û8¡n#g]Ú†&P\õ•Ȣ⅙ç{ÑÜ5)Fq‡2Ø1rRë e±´3 „U5}Uô³Em"uµÿˆY^ý‡GKk¾«7¨c¼§(DHL2¡›|§ðØãô¹Ô-à€íÒ¹\o»oÁ;uôp&¢@GwÞ¼P:ÓÖøs^íh@jÜþø`±ƒ3xç ^¶Í€Mî1WU[O㜼Jœý3Æ`­ëí‰|1’ØÓM ê¸w£¹?ÛTçí_[vAµß ‘Þ'n½F‡“až*@ë¾ í:O¯&*nB¢qn³×@ÑÀlÆÉûD=͹Sõïþ¾M[|åöÜ ¹µ¦(ÄR ÉSº²þhQÙn68ø˜ÕKÚd"dêYÎ@i½64÷àž Õï#±U%ùAüRgb?Ô˜o)sZ–+ 4­gž’=ûeO²/dF²è„Ám8¾p÷Hæ ‰‰rJ X r‰‘•»™KÈDFI¯j¤ƒÑ–S÷9Ç V9´S^âíJö)ÕvV›ž¶B&jT,ªâg iL:Ϙc¢êó´Tâ½›"LŒïà}¹2Ý]ÎwSSé(® Dzým ë¼h4Çs—ˆ’«…eÕ”8×1Üéžç:RÁÌfÀèè¹dGT’¡ùŸm>¸èq¶öç›<#bÑ2Îv"ïþàýʤ½˜\üû’<íåÓÄnû!Û’®¥4†JTV~OÚèZ¤är3aÂ$$ö•ò&øò9¹ÔG:’»;<æŠùI S¬"bÇ‘$ÞäÝÏj¶ QR7:jo "Õú@p@)©{A¼j'V°ÂÍÛkøqÅ @œWŸ,!+äGÓ;BмÖ*Œ8ÀÎ-'Ó!å’} ^_"³¢5¦Ò4 ¡¿wÈ sÜŠ®t Ô%1(þ5Ô«äi¢§§N â7z›v{Á@èÿiËR‹\EÂlRŇק|´HDÛ“K‡SögS£mü8xør¯hc¤aÃû§sùÍ 9í ŒT~ pRˇ¤Ÿˆ…ñîÜKØÜt®p %(OØBæ²Ç2A2À–ªd€?)4@å`Vã“c ŠJ2V*äŽ_áŸ*NÇÎAþêböMTbIÔ|~á*¬l®ç€»+•ÿ¸½9vW‚äżBÍæ¤Ö´¯¶£ã W-ϾQ~Òμ$iNÿk®Z¦Ÿ ûÌ«é©Fºû©8XG× zùÐu(S‰&’²pÊôÂÛœñ†\Ñ&vÔËöûÈÕmèŒÿÃð~q¸U¹dA(b™µ2eüí|³H ß8¿H°Ø¹´Yµë#Âó·`JÓ«ÿ]'¾:Ísþ™{ÁT`„\Å#¤lÏ"w‚§õy[9ø:z2ð±ÏElƒ]+_}ñ†Îû%³„Z$hsÝJuX<Í9l&»·Õ ë±µO~R @ݧCÞ/õCjÖÍBçÅä!ˆ'›>×Ð%7áeHCÖ2r·5* ròÑ~í6….ªÆœ¸ ¬ÊµÊ†;.FÞèxw³¡ÌGÿs:ÅhÙh\N*@Ž‹~¾}*žQd®Ç´ûâ‹Kîa-–5øÎRÇÒc&éß¾œè¸Ì&UIaè V’X1ܯ ¥AÇDɯ±D$ïÃ)úÎ|A/«ªÑ=}v²GÌB#^¬Lù%S°Mãܬhiö«ïpøËª2;º}@¯ÐZßR‘|®ä“(!HÌjp¾®+ôÍ|­×Þ×’'íe%«mkRÏKQ˜KÌ£Zvûêß²±€~^5˜’à÷‘æ» ,æ‘Ç-˜NÀ{RÄ …±3œÙœ†£ ñ0ðŠ4[x¨°Û,iËYÐ¥ÉUcšDŠÆçv­/ÈDÄÂû¸é%Ž*U ™ÏªÅwÝ” ˜2}/ÀáŽÒd|GÜÊÉ}«ÜŸV…Bw<®ò캂ܶIK8—±Lao-µø„ìæ‘KØÉmªžcZaÄ[ø1R€PÉ}1ÚÄxYÈÛ<‚Úç ¾Áì„vìN$ŽAØŸã þk qœmmV×òøu[àj«¥Å1Ìþ/ø ´jLZsühø#™{Kï ö’ ŠÊº\fıbï@i]têôiNü0-J»È¥¦«éÏ3¨_^æ>Ÿ¤ñ4åüÈè3ÓyºÚ¿â"ÚAˆù³ª¢wÌ*pÚ*¡¦ñúŸ«¼ƒ-²óWfÄ.öJ$qTkgí¨<­’b²T·Q¼€´Œ÷Æ1päÍ¢­3B¹³l ÿÅqØz­6õI<ð§< ‘u§ÈppSŸkNa>á?6{Ì3ÕÕ+Ôi‹ËDëËRtH€C€6ð§ÛêˆK!ŸHhŒWíÚÖ<Ÿ§îËZXéF>Hʪ­”22š¨¾Í4g—GxzËcfß®4g‰8»-ßa6z¼Š(Ûº© ¿R0"ú’ÁÖñ۷M”…+Ž—äyë-4ìõ}˜4|$Ùfþ·~ ¯ *ïžSx¢­lùmœ •"óþ©€$—£ióÒlþŽþwöZ¯ýá—9ŒÊø%Ù•LÂo í²d…÷w=ßêŽDG sVûð,@àŠ85+~ Ïôcüã'Fkþ­Š-,ÎÓÄ¡JðZ:$ ÈnãÌ ¢Lj²3 (¶+`û3¾;øÁí‘ v­4ÒÌ ìzXïJL1"oýÚ¬t‡N£4à’¦TÁáJä\rô”ÑÁ>þÝdm™ÖMf›Hóßùý‡,= ÅÄîƒÜÏ6«Ôée¨Hí§T”Žd}þ~»ˆH …¤} QL)t­´t{£˜ÄdF±Ð³«U ˜¿Ï=Êb }ÄË‚5àòë/£bå&œ/ZÊëR*OSæ®+ÞÓÂê©_{ÑŒy‚z ,a8éyÊpÒ¿<å(!«  –^ß]ìù­ÔŸô±ý•ˆR$p¾ËX8[—öpE „ÿvƒ™£WÕÒ8˜¶µ&hMåm!žŪêÒ»Ÿ2¢C;ÿÑp¿/Û ¡ŠÉu!À“'?· " H#ªPÍ/ÈÍ,Šìµì#qލ:Ûï)ðÍ!T‡Ÿ‰}Hga¹Í¾Éü—Œo/y°ƒÊ0gÇù¨+$7ÔÇ2>ÝÈŽîUÅÀñ¤jFPãÆ÷~I” G6ãÖé’˜ò¸Ã÷Ár—ŸãÙ6]™Cñüv¯YÞö·³—%ȼ}dÁÈ„:!Èö)+Öy§ý4äÒh‡R¾çpá¦ô#îAþPL¦Ž±Ù‹V–ÊC¸$ák½Æ á‚Û€%Èâ 'ï@Ã~<“L‚|d¾« Wpf8=øÎ¢q'€²jôZ¬;VäýaÛ')¶r6G÷¤ÞÁ“n<.[ß/Q€£—8µí•$NEC —][iéÕÅ­ôÌkg›¥ `ƒˆ¶À˜w?Þ̶%y1û}>0 ‹YZkernlab/data/reuters.rda0000644000175100001440000003773114656670133015026 0ustar hornikusers‹í}ÛŽ$G’‡ÃÕjC @ozQ ¡I +Ùuï^4T«Ù=}Ý®"9³X@ˆÌôÌŒ©Èˆd\ª:zôú½êM¿ Ð7HÚ/XÉÎ1s¬¬Þ0Zì ;9ŪÌsss»3ÿðýo’ß&_|ñÅ—_|õÕ¯¾øò×òãW_Ê¿~õÅW_ü™üÿŸÖ®k]ÝÈÿJþùZþù—ýŸÿäßœW«5>^¹z•—Y‘^nšÖ­šôe9M›,Ÿ¥IަˬI§òѵn–¶K'*\ZÍÓƒGF=J›eV»¿É[~vU•iÒ´Õôz”få,½Íê:+åom•fÓ_º¼vòû4›Íò6¯ðæªtéª(íQ#|0yzéfy•¾ÿ4þÝ-²²¥—·yûÉÕž<¯êôX©˜u3N“Tþw%D‚ä¬Üè:@u BÞº®žæM6‘•àóüÆ¥ÉÆeµü½M³tÝÕSY¹K×u>åjÇûÇ|Iºž‘P{Ý]Fò¥J~V4yê|±l±¸I·I“hýÆB,)/§µÃ›s2¬•¿.«b–—‹&íÖøòÑ£t=mÓDˆÂCï¼þ¡ìE×6­™L]Ýfy™NózÚ­ðÑ©¼>/oªâ†ßYfå‚ËžVe[W¸‚—%Æ×û8+Mò-]›2võpn«®áš ÷AùNv»_:Y¶,õô˜KŠW"‹LVY}íZûŽØæ+7JËŠlv§N„v|üèÝ:î—.¥Ma¼>ÕM—eU,6º…”ä$}//z“µuþ1Í… B«}´ZlFØË¢ã&€Aó®íDöù`]ݸ•^PÔŸþ\U³i…sGIQu²Ær$T}S>ýQKTÊŸuíˆôÊPlU^vŽæH·0 è ÷mU³Û|æÒBØU6ŽÛ-N(æ»”˜Ï¥•P/»èd1ë¬ÛS»ª–#šʰói².pÈqþIÞÒkùü&y¨eíi%»¡VM«Ò/„‘G¡Y'Üœ¹B¨¯7÷î×*»vª øû…+ñPÙô"›¸Bø+KZÉÿ%m¶ÐW´ùÔ¤¦„zÔßÙã¨A>PoÕå¯ÓwK9ͲխÔFñÌoDyÔG‘ÙZ¾7’5pËŸ»I-¿Ú¤Gò¶jŽÏ®dý²7BZuK^Œ¨ù¸³£Tø¿œNÁ Ty\V—zŸ×—Ù»ÀùyÞL…Éþ“Ç'÷m~˜¹i‘‹šÕÍf*)îãÒ Ç©v9'¿Ä›¨©;ÅööU>›ApüF&9»qe1Z…K—·XˆMQàÿø£àY•­sè=UYþ ¶¸gnšu§¡ÒNÅh‹Q°Y²ÄÛ\^.koº ÞÖæY!¤-åL;è(‘×"“oÁˆ‚99ïss"Ûò¶jA¢|¶Å»ÚÍ:Ÿò!µ[Wuÿ^–®hÉ¿ÜÃ2êÏ’~-ÍáÙµ“çèáÍ›¦ÓMRä·É™È«¨Ä¥|Ô5/ÛþIrTD¸äï¶±ÙLD:oZœ±ªäFN¿ž8ˆ^9A[Ëa£žZ¯ëL2NìCU–nÊ3*Vw©& fø}çåWÿûÍôµ~¾”G ¹£[!Lý±Ì!dÉe+ìhÒ×"“æqÈžä³'Æë5ØÍlQ;åNì4G„ÚWÖU6sa1ŽÕ¥ˆÏ2=[91òÒÆÕ7° “MJ›³ÀÒ ÛHø&Ì›7„Kî—jƒ¬iœêø§çuu[¸ü:¯`[Ò伪×ßÉBÂÛ®ð<îâ®ÅØNý8¾Ûß¹ºÒ-äy™ÙÈÒ[M¡M…º(ká<8¥Ñû`Ø ;¥û(Úß¹ë15¢’_;]áÉáô ¿wSZIìyu/lžÞ`tù¢y­d3’I5Ë][3<’ z–•×u·n§ш]ÝÞ'6>_ºÕëì¶LÁ>áë‹‹7ß Y :•«N;‰Št¦k<8ݲÝB§ìž0B¶÷g9®²'o²2[Т’ûO~ûÛïFòG9í—¢°X:Éå5 ×Õº¶/—Áy 6K1êâjíïïû:x²w(?á‹é«÷„¬=¡öc/•§’)< ‡ãÇÔ^CªiÙ$º$é}\u±ð=ùÿYƒµP?dsyhYU¥Ñ÷à GÞkseØ¢¢0ï?¢·#Íše:‡E‘ߪ«3‚{´‘>R Ë„ÍÐ+/Ë…ªªþ²ÜÌ\mÁú¥Z½7xhMWYœq˜Øñûqúôò'Q¸²ª¢ òR­ÔÓï^§Ž|â>f00}þ»Ü’©—EzÌNïµ§çÇHµ_6ïÛÛŠ:pŠ(FŒ$L‡Æ¿rØòuµ>”´°Sm¼Ø"ñ5*€s+§"/òO|ÈZ¾*Â9³@9‘ŸØWpÜnáŽÜï¼¼ v+oë’[|ÖKúÎ}áW‘N”VÈ3:EÞ+êÒ ßú”‡¹³² Ð PWбÈ™£jv{µB8,ÿ9ƒß–‹Ž¦e¼û+o ´â[A4„&häÕ0£éB\ùlM7 š!¾fk9 n°@µ©DËWÞÑß;|ôŽ«D‡ÔWt%Ô)ÇëÅ ¥™úܬ—pÕ^Ûˆ9 b2àíÐ.úÀ4ˆ®’°¡ÕøÍ<œd¿béê±È-:é¢ÅGûãý'ÛŽN1‰žwr´á7kBê`|üX?룪­—ê-Άª™D¬.±øK4á)QùM>Cú+qëÚuP3·ËJm#å”D,ĉœ‚P‰š„ý÷:Ó?ŸWÏÏžõy9ÑnA<ý¡+æî#“P½>{°y¥ól/U<9l 3åco@¶ãÖWSÄ´‰DõÄÌÁSF,½PÕ( –O í×¼SârÐ"b‘ñŸŠDlq7C„6bseU¹ÉrÑ)±øïÆ’‘ž˜*/ŒmðmôŒÒZ¢ŸBÄ{­gü Zº-šw²ù?žõ”gb9Šœ’øÖݦ¿«êkì®°¨¥Z‘0oÕç­º¥O*ÿ‘×0ú×xez¾™ÕºQ¯žüÁÉ£ÑqHš2à?ùP˜éæüvdð|Ju^ËžÞyΓ1CjBÕ` Þ5ų/Äì˜ ½øh‰>1+ 9s$x`ȇQZ¨zÓµhó•±–Ñ0G¶¿où)¿^Ÿª3ïºD6$Ý?å[圪û:~4ˆ®wíɿ؄0ëâãÚ§^D‰‹¯$´5yÁLR’u·ªp®½Õ¬óRm»¼4ÎW‰3%^1¾ûÚ‰Y¦ÏjªñFsÐxhþÔ¼™æŽúòÙœÄÇÖ”ÛNœÙŒ92ù›æ*ÔéÊ¥˜·M¿Fr[âH_Wú¼+h¿#ÅòAQ‡&vc€h"iÑï{W‹ÞO_W”¤ôu¾†ãÁíjáœÈÛ_8ñ,¼T°Y· ÷H¸òW(!QËÇ€jdc/Æ…]„²Òƒ+xñdª+Å! ÎtŠŒÓÚ#p~[–=ñcq':Íòl éÕô1礶MNR‘#{¡y5È8ñ A!¿–MÅàTáðY¯s -‘¶Ên ¡gÕ­ÅÆu•ÍÇÆÏÇé‹®•Ð/ýQŒÏ½‘Ó‰KðÚÝæM¼Ü‡7¬»t¥éƒ±€½ÔRúæ!òÂÔJLSËÛs¤J}ÈÌj¶W Øtÿ‘E0È!‘X Ì<½F̓ñx|…7¯@¶u†­$3>bA ^,¿)*óCoÅò?H-åê60 w$EÎd‡ì4Œ®DÅ@بN&uuíU” AɆ®˜×-^Cy¥ªCSé~÷59¤U¥‘ê§‹Q¾qH©ªAï=_ÿâ`»|Ž©FBCê+Ç{p”,'›¼¦î×Äê‘_:äÁa§Ê ‰+ѧeëÏP XMfÕ-‹o>¨aØ ‡ AåÂ×QÅþÁ€g I¢˜6x7ÁJë$wv1ÞÆ ‹‚ýŽîŸPg!uˆÊÉ^›}Œ};ÿ":ô –¼iècOËqùúÃî@ôË¿ò•LYÈ…¦œÓ¯ßgÓ\|ÇoÒ×íŒ%ˆÉWø×Vª —Q^5Á Y‚Ÿ…Zp޵Eˆáãý“Z¥°€ýñáçÓIŒ±4ÁÑñáèp;ÝpøGIÜá‡×yãKµ 6E[i¨÷¬Î]bª|Õ3§ñ¬Ó´ØhmÚ_ŠÎ´´(e#p0‘Ä1;<9ÊÚ<÷†9_ï`âáPÓ~GêRžŒO¶Ëú?[ÙE«÷¨öhĸs›P’ÏY:ƒ{(ïl—8ŽÑ×gKñÖH Ö]-§óþDÔó:w'j†òÒj\À`p±-Ûšæý¶P÷eˆJø¾§|ªêòÇ ¤ã‘ñ¸€b•ˆlÊÓ‹7¾óejGd†º<°(²¿ž¸ˆCVˆævÀŒ"\ mÏÁƒ’ÉÚÉÿ… gz†Ê€,èÊ£•r× 2¢ 3Qä“_3>âÊ…h"äkG L ù ÚÂÇdûãƒmñØ ƒ"2oâRº¯â­|A†èÇÍ~ß™¸‡3Ò2&‡½U]Èwî[þ—wg%ßäÙ*·$ïÝĤ%îžKìáײΨ&‹•x;4Ò‡jâ°c|Ã릃ÃÇÝ”€gGr¤¶•“© ƒGÇü¼²öñ7E¹}æFå]žüÝ*!ùx'|>Ôƒå#OG'Oö··}øfüºD×Èc÷¨Íà]6m7 g?­Ä@ Ø,äö ½š&eUîm³}Á E²…*näôÓkT#^Í× $x-‚Ñ3UžC°wì'>•(v*oý,:e]eÊk†€O{;ø0/<>»ãÈ07©¼1zûÈ8`¨Ð)3 8–ù ŠÙ^Ú¶¿FB¸¡ôb ï·?ÙQÇy §c ¦!ÿVÍñ­fÓäU¶w¹Ô”»×èüºñFU ðd«„=K^®ÄV@€Ñ#¾ð‚Ï”™¿øëËN´üïç¡Ó» 4ÏÝŒ©Æ¡ªKŸO\QVò#GÔð!Ê=kZ¥gªÀËg¯¾¥ï«º…£/bî«+ŽPdl±EH[jÜCIWÓaµ%Á=%ùüBäºdùL¨;îe³sýÿãéO¢š…òÂqÝßõ]0÷ P©0±è³V/3¬Lò¶BîîÁó ªð¬_úä>ñS>¥Ÿƒ5O߉9¯ój¤Šè¢‰Ðü’ðŒ¨².Ì>!ð†l4ë:Û0–Xvåÿåk¦ŒœP3sr»8KßôhËïW|‰®;‰Uÿ½÷âé!瀅|32,!iÒ—üe¢ÂûÝÇe>É[…é[Þ°w½£ÿ%÷æÿúM,Bäö%CûxÑ Z–þ—U)dˆRþ†¸ëà•€´ÃÑÉééèàô`g·“mËs‰»ª[¯ñ›ç¡çG£''OF'¢-ú'%ÛK0úbGE õÑŸ†< ¦3’ÓŸgÂR·Þ"VïŒãùØ9ø ±/Ð-DÁ'›‰–ºqe§T?Ž[Ô /'û0=è‡ÜwgYžå¤ÏÅ$ñãŽÇOø¸{ôèhûAÇ£Œ³ít)n¯ªÈéËÖÑšBÆÓ§Wï./ÇWj^žž}êVÀéý'DE…°‹¤¼Í×k9 ¯ªkѳ¯^¥Oß¾zu%OÄ4d!rÑ#±©(ùE¼é™I8Ú~˜fߨz?¦3úÆá=߈ñärˆg°99ÃùÌTc²5¡õžâXÁfPÒ©cSÎI·XÐö'4¢Ÿ¢»Æjúìk¦(ïK Yj(ÑIÙ8DIPèа¾âÉ%r…„ái¯‰½Œ+«°ÒB¨~âOò‚µ†dãÂkQô9 4>Üu&ÆøÔôdüäIøž @¿£IœÛ³ÌïÙ•öá¬R'.NµiÒýѾˆæm%î023w·”|Úg¶Ê>5¢jZäè–)‰çvST,dmáQ¡-0²©Äís1©ßé(kcµ¬nE<‡ŒDR*„ž„ç(„0:°SûíãSƒlÖÖƒ œ9?yé¹Ì U¥*À¨.Ô~tuªì -ýÜYÁpžÕ{ó¢cmJaüzaðáÃf Qv3[ B¾@ƼØ(èsûÄ´Yheìѹ“ 9vüÕHá7h†˜Ë™~ªŠôþÿôîõOÐÄ€+·••*‘iP.%:JÏ$‚DdµžâËg—WÔêÚ"!+ÊÄZ¯3]ÆÓËWïùg¯g›îˆ—ÌÖà©{œ»ïaHÕHà«çé×ò¯|µhàKñ½šë WÞù¬')y^#Œ‹õZv‘†áVÛ*â$ÙK‚áa•åçéÛüážüe'jc¹eÅu–ˆ`‚Í´b¶x4Ë/ϓ҃ãaâÊ·3D†+ U8jÖ“BX©ÎÐY1ÏÒ½×Âr,ùìõó˜Íßwôˆ5Øæsï]ö=–d”€3W¥‚éCtõÀpÆú‚ÅØ‚Y]úeäöo³æp±¢“ºmÈEóD oTp–Ž"ô_ŠÓ@ysyq¦LÁ Þ°#m%ŽÙ ¡i"faÿ`XG¹«š[kôj”]@¦ÐÈxX æÎÍoÑsâ#O9fSÝúFñWs«œ!n¯ȹªE=Kã[Hˆ¢U=t~‡O÷Õ?yzùâ§ôkkî}%OU«o|fÎGRBfË4Õ— z© tÎ…ø&l¥ìò–˜¥ÅÆûäÖ9ghäyæYÓT>Œxúòü‡Ëñk/€>y™—ƒ®<ÆË|3h2‘³”Ó7}­æú^žó-$V•ÖZÓ$¶ j¼iâÔkyá½}­¨²rϲ‚~4Ä#Ú?ºý"?½ƒb¶•4t°Oöî‚E‹[­ù扪ééáJ–e›"F›wEHÖV¡sÕXÂ4eI"ô>$²×Ÿ?¥{õÄ÷UÁ]4œ ‘ÞÝPf„mç$®!%†ÅÞ ¨©>,÷Aˆ°~{¿UÚTÈ|#ø‘íwÙ‚-òê³²#S¡Ê¢œdEÂGé‚¥)_Qš{aÚ’xã˜3σ¬ka=’övY#äIߎߌQˆk—ÍT¼«™·Á¶Ö]‚+ëò-_Ü™áƒSVǧ!STaJÜbT&«¬©ø£žUŸ§^e3B› =l ÌM>)\¯Æãñvjîû\‰Úcz<ïCïe‹uRˆ‚׳ÝjøL¾ºµ ¬¨*÷FžÈ¡Ñù„—&ë02çÁ'õ=Ý÷"^’Í»÷羟¦7Tè:ðäYÜ¥ý›jÖ2LZf³™¢œÛÆs’£eºV£°¾ d3ŠÃjB³æ2+Úm ¦Ê„oºGXöˆ?G¹¬È¥¥aec~ø`ÇõÑÞow£‹ O—hYD+MJv µPe¬Å[0‡ÀmÙfÇ´b9ÝDíêr4á×ø>MÀr äy}Ë<ö·H'_ÌK€ök9xÚxž­ä|ÌĸЎ¿å朼=ÅšÏ/>œmw#ˆób;åéÐ%󌃦Žf’ÝÂw›n½&j4匢my3ñemå³ ÉîÇEù_£øœÅ›ŸÐÈh G&«˜øSöén–?[ø¦¨ÚêZ·8t²3´¿ÌŠ p’¸OÌ(|á¢ñH¤ýapbÈL¬òÆcɉ̢²$ý^;·ŽÉ×R´ x&5ax›m¬N»ÆX)´P^»ƒû7p2Àjñ|ÎÒÖ-Úx=DÈ‹r7çt’ê¬^h#÷Xq mõâ@3ƒ#…A‘‚U½+´j]Ý2øIgŽ€gip(»£˜–¼Ôñ,š‰g†™z‡öݦÈж10.Æg&þu´R6ÉõóŸfèpÛ Yi¡)ø„Aµ“àŠmö,³{æ5‡œÿuÇ¡ñù£2¶ºXMGp‚'6ÆÚ„`q €ÖÔ2‰éßHW- æª:D&:JÅàôQõØt'áÒù¼')hòìÍ c„F3˜˜±3jÙ¶•Óâ@?¨@ÅŸéoéP¯1CÈZ(Èü=úÅ ×­Œ9Á"ØVß íÒ³ü÷Âî7U×4ÙMVï}ÈÐÌá/²úÏþ1À¸„-ÞjÌVÞäu¥0£÷U‘c2’v³ ùn"&G´6!¬¯s5£ÑA»Üü¦Þ}®åT[É¡X­áÙªiíAƒ½õ»ò}ÿr•ó¯AÃ7jÉ‘øÖ¡cªpÊ–kÌÕ³é‡)mz0û§áSò\swp ÑøKCëØ4˜Í˜„W)1Ù'Í®‰sg]š¦Y‚oCœµ? ÆïŸõ«S¥F”¡ú˜jyÑ„…[/«ÒRnrw{ï €¦•G{ù2jïg…7ŠJ`B:}*³÷ÖOŽ"ìgú%>†Ø |b`‡³hÛb–(!ýpGôBŠ&bV—lYÔgŒÒ‹ÎsU~{)žm™¾È11æ5O×¥¯–ÙØƒƒñÁIš}˜3 1Ô€4¯ŠiPߥ_èZ»ÂC§²ÿä^ÿ'Ñ¡™‡2:”ý8ÅЙ3œ[¶›ÓH›qÁ:µ&ÃÖ@´±õ1ÛÎoOÞ.f3mÀx1Ñ€q/B<£`ò©x!£ÌÅkY÷ºB“0X÷E—Ó¯Fq»Ê}Ôc›B› 6*ªôÞ³Mñÿ¼p/Ò÷ÐFˆLyUC{÷ÊK<}b@™[]‡ô)ÏxX°vœwœäT´ºƒVµo¬3VÛpÔÈ¢IofX _V+ìLñ·÷-<Þ^ÊJ=–¶; pr´ó–Ec<¥ð·1àæZ°ZÒt¢«ElE^„‘ÂÊÏñ¡Š ž¯„Ó«ã,¬ýÔ×þ·’31AMè,Ëzé …ü#-î¡«S¹©H½ÀU°êð‰ÿ̽ ÛªõkU»è¾ej—ý³?{ÕÝfyû ¡ y¼ëÈ&0‰¢B -?!dá"æ2bÒE½êxF±/j}bÈU‰Õeñ~®MxS&²\vÍ43“;QÍ¥ØÂk‰y‹\4ËÞ«eVäó ?^f“lɳˆX ¯ìG¯Î2à¯ås!Ÿã¼…†À»?O¼~³iN­}7×Ptsv0ëÔ]‘÷Ù^@;qR†²“$¬»ÕÚzœoÔÔHÞòäH[3ÌÊDZQIüZŒó7 [4´Ow¾—¯Ö¬JÇ¡V!Þxºö÷J›Àˆ²‰a:WPX~p¦÷1áy z?£˜>ˆ±º•æMÈ«}§ ãÀáKè2³tê´QDh IÂhæ®t7ÙÎ0Ã&ÏJŸµ’ÅYMÒ‹U^k®C›K)ùTS+ìë4ôvA.P'Ó.›qìÊJ{Q+N ÷‹QÒnÏØÞá«FƒíÜ·®#¸ìñ Dt¸‹É +ت9å‰WV~ô¬æ«³j«ô“Ç#Ï݆b‡†2K/! S±­3c«6yî~Ì(XâÒNâx‘GÕž æk†3µô‰ì0RÞ^0Û¦Óf@;?õܦEüy9‡O©_…–•=TÄT¦‡"kûnŸ»3üóÔ}Ä}Ò3íÌëø ›¨³« ¸–¿Ïk+D‘ +ýtK…è‹)®QT»øÒ7¤¹4¶¾ð¦}Îñ¼‹öèV#@°‚]oÌÄCæÉZL/K¬}lgº Oô]Ê…fT`èël&ºï[=x|~GFB[¯· €ud¢&¨‡\$Z¥²1†<ÆÛ Uÿ^cú4Ä鬮ÖëЬ$܇+Þ q —„‘È6Âq¥ˆ füÜS¶éo÷”FŽÈCmSãìs[„9h*?‹\hyÁÁB1x¡£–9ÅpÐëÞçuÌïÁümšSB(”²-Þzíÿ¬ŸuËð(«~ˆÍð+„Ž‹oxrêÝÌgØÙ*Àþ6ò¢ÈÄFèBÏ¥‚òóê³4Ñ]Hð™É2Ý,èò¹Í5b9ÆèõB¦ÖQï4ˆy#BŸs¶Ç<›â´û>0>³5ÓŇdR4§lç­^pBêUv…×ðÊhš«æmN}^ñâÉ8sG wÞÊÚúÚ:<æ5G¶ˆŽÈ ,Sð€ ä¶×ÕŽ^Òh¢´ÙÎI5ÛÈÛÌìÑf7p$o'êÎ&í úì*E¹^ ¿Ñ]VÜ`´¨ªvžŸÝ¬´©Otê&íÞ¤Ã@A‚H Ô6,jÈj-¡Â Û$p`4»%Ca+é0ø‹;´§çç ¦»K>ð(ÉJ«µ›>`†nêÖ‘™¶ÞAÍ)0CçŠæf?qD>½ÅÄ+FÕ•ŽXj¨ŸQT "ú U’&ßâ~ýìFET},ÿvQK*ùk,³Ðçøt“å<"+´tCdm“·˜Æ.8‹Á.w¸(‘üõKàXE¯fPùÄiiõ§t™AÆx°š|›ÖUÓ@î­Ò&æ¦Ñ>¨#Òí|–§µáN–T­ «ÃßÅJœ…nÇÆ’"F¥~CÀ §6Ò:T%±;>lЇE=tì¬(²®´4eä‡"Cq3nÒEGË2²‘„ÓÛÐ4ßŃÐX·@QH«SÔé6œó³úÇÐDlüT 9¸è µ‰Q(%h=µp:­;ÑÆI{WÍàsx·nÔ«ÜfP;^Ðêˆn» lV­¼‡¡FÚʘy«»eóvPóÑÜÕ”£­÷}(véÖmì³Bèîì ÝÀ|ï[SÓËó뫜V¶h(`:#o¦»Â„œÖ­‚ ƨ¯kâ㽬¯_¶ßÜ+«‘€ÒYF ©ÖF¡ ŸDå2s¢#ŒÖª!ù°AO-lÀJK ””hµ<°}·+¦©0ÉöÿsâT ]­c!Bš¿§× ÑgÔ¨#åKzeHaËΧÈj„)tq¾±ê¸º+XöˆÙý<< Úì&otLý(ëò¦Õ3þ>whÔË®k‡ô¶¼Âj ¾+s ÉÊZ5‡VC³$ê}¬wOB‡pvSu ÕŒÏzÆ·@¼ÚØÅûáQ®nùõÂwë $ê{ü»†ÛìC¼ë  ?Ïé”B1}ë€ÁÕ»’‚ˆ²éß'éÈ`¨h¾2$@¼½µEèôð5Îk>µéAÚ¢É?þ-hM5x7yÍI§¢¦*hánM¶k‡®_t?ð^ËÔoÕ0—î ͳÒËk§ pÞŸ‚QŸíc"0ñ¨ð0©È'WÙ §e€xéÕÃÎí²QVhÐÍĉßÌqÊÉmeõ5“ôÉÕÕ(Êž³È{±¦«eŸ¢¥˜äí8àÏ ½õíéÐs›&˜4½ÿääQcQ¤­Ð¦;r7 p½›Õ‰\êÞxÝïÛSÄ’®=(’òcq±ˆãHrøŸr¢‚(Ü4Üvc¦¨‰6–²À 1Ó­¨‚¥¸?ÜÏæ3Y£û´¬À+Ò —žWdý©Ýð‚ÉÑSCÂIöVáo‚¨pzãâÁÞ6@½ÂeŽ×¸ºÌO nŽzt týM1­Áïש7ij•Å“ðsšÂœÙ>ãëcРr~¢Å²^UîE†êLúÛ{ˆ Jf¢)X÷µòŽlêºðIG­d±Å¹Àpµi¶–-CþÕ`bx§QcWÕ 3MíeV+!jöý©bÔHÿ½?½ÕpCdðݧ†UOQó(/ƒ`ùÌiØåÚñÚ)Ö·½šòéÞÈfïZ¼ÜRŠ…Ù z l{]Fð‚T£R¡°üa<Ð*ŠÅe}ÌQ²ªël£Æ–éžZ#K• =Z³¼©k‹ÔZ³õðU…_vÄièZõh’¤Ðƒ¹Ãÿýõ½ÌºY.Ú}Ã.Y4 â ˆ·å ª‰ÍàlÙlׇ¯)ZäO7ö—.wíp¨£OHöÍR–Ë(D8ì"Bp‚ÁÍ~$k’‰6jÚÁ"çÆÜ%­sÓ‘dÔÐ<«iG˜aº”1lÌ_·µ =óî^ÀT?’·±™ž:ûÉIŸ«uš?þ QP?®Ò 4Ävš)·¤ <¡(£å۳ϦÛC¦½O{çúnBøLoÄ1I¾Ä”1Ç*YN¤y.:´* „|c£s£)oöIÀ+¢Ñ‘[C”/×U»G0MÝcþHï?ÐËêq²·ÿíÑ(=Þ;üVK%‰V ˜ÛoR_”ŸÄÍFD(„íñÉ9Áp TÏÈg,½4”g« VÒ²«O:húÿ:Ũé‘Pvúíc>ét¿Çê5çGßí_Ä¥Ëfy:®q×þ§Å•ÏNR¿û ¥ßðuo4‰ÚÛ.šøjµèxÜ Ÿ¢Ûƒúûõ&Ý ºR=5™¨@Y»xõ{g.ˆˆ~«-åG§NF F(Ø'·»Z›¥hƹ 8n=!u.¨¯b…sµë¾6µ9W=¦ZGnz8xô-…yð:?&4q§ŸÄcŅõJ™ÎçЊs_Ece,s™œMf]±wö)ÿ$K(Ó¨¢uµ %T#E÷‰£«­zãƒýÓm.øùµ²×•ÏÆ½ÁæµGuúOSüŠÃÐìýãñ æ×îØ%Ÿ‚í×A’#r-? TÖ'"ùÇ[$ttêÄû·‡Öe:$/é¥É?¥µq`@<¬'džå…rÍ$BöÜ÷MÛŠMé%±y‘8‚M Ûù–Ï3@3ÊÚQÅŸ)/Û&[ÛüI z8›fžâí!NPÕ¨ƒæÍWÌ5µ§¨Lm¥Œ¥p°)^5+ñ‡Ü]™ôóÝ^¸´«¡î–.Ó!¶uý<›¡w$y"Òæ>g¤6? ½‘ÑJÆxcâ3ÚpÔ–vËf8H¦DÂbC¥å¶«V[lSÞ”[2žøí’Ú3«z¦ ©:Œ­ùZ¦Ì:^%&6!:ô˜…ž«:“^kð\kYãÓ““íShIl·œâ¾}j¢ŽÃìïöÃu6þ>¿'¾M¬Ý ?¨ÉV@=í¦–ÍhÃ…ˆŒ»‘ûèJÜŒRß(‡¨D³Ì ˆGCDNy¡%â·¯µrv#/¶ÅÄYÓhz5í˜ØÌtm×¶/ÓÁ(+¿|°knX\;öN³iÙ 1­ùA– ¸Œ*Ó¾® ð¨uõþÂn±úóôÁ»®6Åëo„œ÷ñ²öìúBj /úZýƲö­ç™ÎÏq,}ùR´Aî9gÃ.}®Oä¿ãØN½¶WöªlrV^ý8èPÜ2ÝaÆûvÇD×è|BOÐKNñ¶lЏ…ÎÓxØôÚt«ÇŽ£¯Y›ð`k†”;}S à#ŃKÒ¢t³Uú6û„‚c@˪…»Ö–ć¾Òãã¯Xࡇ4ëmÌv2K½í£¨·×²Z­£}gŽzYÁýVrßÃ|¦Îôåû³¸|Fˆ¥Ê2ýˆÝ£¶t½âãrÎðÃb—I­s’#¥æ`ïZ6[ªWf¯JÆãqz¦ëÔê›!¨Ù‘[Y<áˆpo1¬¤wÒìŠ)á¨ÿèZqŸ …£@#|0È-2†ª­…Úó„Ë“ã€ýôüøuY1·Š)«¼YãÀÞ^gàóY‹¢Û¾Àß§sPçÛ1gˆºPÎsaïórc·“Œ8±+LÔªC˜œ¥E·þV÷-íf¶Ó±µbê`·CUb ¬¢v¢ñtr[<£ÓÎH_rßÑ×ãÇç[Eç™n×Õ+RÆÈËl²[¿éMÑ­Ö€Nº¹&”û[Ms´šê+Ä}Øtb)KQeÏ£í·1‘n u56ß?ÜÏ̃pª…S€Yƒ††äáðµED=DT,á4zL´ LÏ•­ »o³1=ìá^¾…‡9KK°éÛj´œ›ë¥ýûûøá7™Hà4éSúÂχ¼'téëîz…IOh ñ­åÉÛ|.ér ƒ×{ý,øRy ¡¿éÙ×sƒ¬øšÐÏš1Èn³07kÃë^MÔŠðüZ+•z×w?p9ô¨òD‚âVŸœB3Ïã¢~2ïGÞ¢ƒ,·yI^qwé6êFs·A‰ŸÇýüÀºØÇ|>½«À1¶Úy,`„fLþ8ã]4ã iôwƒ³£³ì5©¦¿?ºh£ cÈØ¿ë™Ã×ïMaüOUb\•q”8Ã&÷¤Ù^ÁDÉñg´²^Í÷KäD[wY-Q{Ÿ3‰r~1w)h¾"Rîd]WòŒ:STÂoº‡Ùf•­6Ã<ã|)¾o³õ@R´ÆDtò°'56±j¶pÙ ¸ÁFnwݪ‘¯¥ Ïñ~‰R š}"±‰_n ÐA¢ìFN¦ð„ŠçnRwvÕßð—+ØïW@¨ö2‡î–¯Ú4¨ìޝXø0º¬k‘}ýDø®Êt¯jÆÜwÛžyQÙm•˜2³ס@YYIÇø{€’þ ¥>г°H8´=ºyàŽXèÁÏÜïÑxßæ€'æ‡ù„1È2"2)z¨W®Ùjö±ÇøªUHŽ1=ad_‘/ïÇGþ08ÖÇ}¶Ïq§9¸½­ö樦ý^D(<5þ/Qè(»ÊFËÊ ­XwÍÒt'¯E â’ßÕÎ.Ôëï"çØë¯ |í_hu_³bCLçèùrÚŒ³gùÑy<]7Ò¦@T¶\:”ª’è¢d/úòú=ˆJi΃|o5­>+ÎüÚÞ^zññ£ì"-<ýí»·ßÒ7•Ä)ö›7ïžÉo¬±“R“§W¿Õ±DçKwS‡¯ž¿øé;<Îâ2€ÆÊÅ4Nä:&‘2Áöt›4Ö¬:V||°Ó 5ˆá5Ù [ÃÔÝq×z”0}Û]¬¯ÖˆXh4ç€!NÂ>ÂÁ6Jã.NÝW9í hÚW5àéâ~Yâ0Ǭ<ø7êµ\Ćj³C›D[á õ eQ-èQÒ˜Šò¢Â¶ãØ›¾3ÒíkFhÌ;]4JQw—Ý}ahŠN ïÃÁÝ`ÕûN7c+öð‰Ù8†°£²£y ‚¼¨øéÚšm[Ù';ìÕ´kÃÜÆßeå¤3›9Ç,Ôµ¢ßá6¬õ.WEhk%¶‰ÕýõuY°ilü5q5 gÙZ™É㨘Ÿ÷÷Þj}4égàDD!¡{Ÿëóå_}¯ÓX LE¡Ä¥F™®œ–¿õȰ§Z–cÛ\1Q_0y£ý#ƒn¼Ÿ7„ž ï*¨4>õñóÙ8ªêþ–yü.(†gr·F æè=c„áiòaÜ6-«ø¾Zf0€\ôyâù¹+§èÈýú‡së‹íyå“Ờ넾 9ú?üçPÌèÈã<_÷'|tÇ“N¾þñìâ›Qìu3/)O€Ÿ…˜ ’Þ±$›pB_º Dë<.ßö`¿<øôë¦E 87ãjM™ÇÃÂr¯v_]ó÷/[÷ép¶.û»ËÖýA)°`CØÏ¯Lvæ¾ü(!Ë|廲_L~q:Ø®ÜÖÎÔÖŽ¾rø—!ð˜Ñ~ª4ÞŸ„ñwø¿mGÔè&’‡»½‡ÿÏ[•øåçÚí?ülû¡5¾ìŒµï6"ò•$Q;âçºmýç}+¹ùÌz×nVc4P dMº£Ñ¦Ý•éǃãÅðp„™- J†XñŒC n,];è’ÔÎÈ{%í?0°¬4Æ{/&_+ÖK;D6ÿW‡†déùF‹é'Xáe£ôÒeòKáÅË—#@Qpˆ(ï*U‰œ>2X|‰*èEDè…àok ٌˈÐV%Pç7:МWqˆÕT{{¸ùö8#+™‰ð3s±æ|Ówt­Ù7[X SÞ0Q™Õ+Õµ/<¯±}eÇ<ªQ¨ >kûJ~#;è6w Pvö…Z塵äèx5°“)ÕPÌ™¦Ð›÷dú韈íZa°WèsþÂÖ._„'ò2xdB C5“ p؈ÖQÿô¨}¯ð—Òo\ûR*t1Xy©¿ëŠH˜)î"îw/dËŠ¬÷Þ“€ºùÞÌÂÞ'‘¿þ÷gÖ·]d¹—êŒ$ZìG=ê`‘ù4Ї|;y”= ÅNÎÏt~:• I¶\²‡H2ÂÒ^9R™]˜ÃÀ¢?ü¶WO³}cSÄZ¬`ÈH®¬2_Žëi(>=¶‘ÌF±ÝP§í3Jß±â(GÁËó¥P²Ýc µ€QRG ¬$¶@¹¨xo–aim ÅHg@ÔHÜí/M…ò}Ñ!AÎ…^ÊNq 1G›ñ¢Ä¦'+(¥ªlmà±1m¾z»+óaõÆ\D”•NémqsbþK.aC…›éÌq?”Žb` y„tnö÷úR’í·ƒ)êÄiKs~M(¥¬rD']‚f½öa¸L†{Šd™èåPØneØ£v¨oyP›@±íA46å-CîmÎÄŒ l´›µ‰8©„óTþ²€&$þ_d¦Öd×zL·áñwůþó?žÅ<‹ÿ Ï"ó<á)E“Ãä˜úý_gèï ³½¸ü?÷íüaóøôøñ–¾/iîx¡×<êàö°b.Z"!·ÖG!^ê÷W:eæI­£8ÜW(ôð˜˜ïŽb~[ÂÐ¥Ml’Éœé$ŠÂ¨EêìßÓÆ8!÷ÿ7•õ«ÿþcÉHIa‰Ÿô¼wL'÷7ÈúéXÛãu·öB#ÚÍCJz| ©"3dºÑ0jˆK< #¦<ºVI² Æý£r~nê/uåðÎð¬h†S<ßi¤˜<Ú`{r•Â-Ç|+7zZtrÓ¡Nn²¤C|ppóÝt·:aH'íp…w±VGQ©;¿c–U<|Ó®}þÓ‹“Æ/¼qõ4+yƒ]¸cc¬ëûèy8s“Î*Zl¦ÀÍ"Ô¯µ¶':ê£æ.‚}ÃÇ,{kŸV«Ôh´|Û{ëé»2 $ˆ/þ»7¿Õ‡ݨvËƒŠ‘­´fÇ–­,<7 ïg *GÂ÷pw£ BÒ—„òÙ<ÒMôþÓóŸ7[DEL{]âÓMšÁ±JU]2ÍpLÑõpí¹Méóôô¯Ó¦(òÌÏBä¿ôbÙôÍôUι£þG-üTáŠÉ²Tè©ç¥Ÿ Iœ­1¦·*q^Ûèn„^“¾ÖV‰‹vËp/*r=¤èZb÷£™ x>Ò§ÍzªíÎì!¦]r¸0u¦“Œä0¬ÒÚ©£ÇÒõÇ— 4ûRÔ"-N' sÛ5®TðÏÓç5:–γuõéS6"Â×Õé ëöØÉœ/‡ôC‚µC³+üµÎ¾ZÌš®] aUu:{sÆRÜ=/M•5Ò9Ê=\p x©Q¸ØÁZGér¶ºC©@õ Û–|&»Ò*>_ãX£”£e¡€¨ÝùÈHFiLÍaJb¹g±m¹i8í@~¾xþ~Í Ýqb,ÕÌõ6un¢—Óà/…jCƒwj›mi–EÖŸlïLî¼tç;eB®ï6Jýx@–øs^ˆ6oúÂFÂÅjÄúM‚šã•ämÊnÁ®ô‡ÉB|•¤—Öaÿ ®‚aÎ$=V?,}+"¬_‹`eòyNGx3}°y9²‰U­ÄÞ6ƒðíì6RUÆ3Ê㢨&¸ó3”""{àAJ“}{fW3pê‡Êÿ(=8Ú[rŠ¢¶ùÄ$ÎÑy˜ÛðÜØwe½eÑ-âåÐú“—ôë÷Çmµœž?¿:ßÖÈ;£ê à–é›(êuÌ2²çdÿ‘ݨlØü ×à`|xÞ …öÇ÷1ÒÖ·OFéï²i¾Ê‡Äh¡'*ÿñœŒ&{cðfñÍ9'a‘ Óó]—ý¾yfÚMа’Ç£phwÔX;¦™ïïo}Ÿ÷˜37€BýÁćI?žý_}ÝZû§u‘M„-_|ñë.ÿù5ÿôÇýçË?î?òÿ‰–ûM¦¾´ßþ:›þb?þ …jë[2-²Æ)< WÏ¿Á?ÿÝ ¯=¬kernlab/data/promotergene.rda0000644000175100001440000000433014656670133016030 0ustar hornikusersBZh91AY&SY°ïµ<ƒÿ€ÿ6H ð @/—Ý€àÐåa AUR?cõ!TÓÊz€z ÓÅJ‰@`4d4@4M€ÑÐaÐ 4j¤ššG”ôõM©êšbhÐhzM¨`4d4@4Mòwwo‰ ;­Ô3S˜^t—cF‘Ø’Ÿ=ä0kHܳeœyº21ŒuMŒ«ÞbœÅ ‚¦1²Ú‚k‡ è5‹G3°›`ʹɨc’Êáa2œë@T&ÁuTDyö4ãÂ#“3;ºÎL– âåÆ.—¦rñŒhËMË Ú.™á*mJt(Âë&Õs€W3£VbË5•Ö¶g*¸À!Gfh‡cXƉ½P›Ê8!,6Eè &F­bƒ 望",±ÔfsÉ5‹JZVM›˜0Èe^Lˆ¦ÉS¥,NS9É\\CA¨&V̳:¶:oÞ8å@ïN68V½ç¼ãј4"‰½ë{ŸFšx±Rä­Xs a-TöµÓ™Æ & k$™apI—ÞÕí¢Á‚¸°ÈšlaÀäJ0l–qÆ;81 aÁ‘eÂ^õ½½šƒœŽs´ DÒaCM‰ÆCŒâd³kyÞÞÆ6$ôSÊæŽ•H§œÂãÒml˜LtÈ«je³Ï°)çËͼ6ås¦«ÆñìgÀ†Œ ØÆ¢<ëZΞ f Óp¦ƒ9Ó”·ØÝ»ŒW»»w^1ð>qœºhÎÅFiÁSÊæ,2;XQÄÒŽ²¦ÖL囃çޱÀ÷T­ZØ–š›:q‰HÃu='…Œ®’kÅÎm˜„Ä4¥Ë‡Ãy(ut* Õ”G8Ó™7;¨®nIWPÓ¨Á)“£çef:Æ@âË ƒ‡HSjÝ݉èÓCJ®:F³®õ{ >Å*¬¨NtAœ1‡Maâ§YAˆœ¢@ô­™nL„•.E NsNh2§ š`Æ =Y5•â² <¦eâºHªcYèÂ^/`Aw°–C&]a“œ¨%E‰1šl.-,áÕOV9S{ÛÛÂû=.mi[.'-D]Ù‘°¼hDÖ :É.YÎq§ ³\Ò žmcœfuShq‰Ör”¾õ ìë‘0E–É¥J«™èËØféÅe:³j£‹%qøÛ׳`&ÂÙzÆ‹.Ê„V5Ö´ªyïÆ/Nñé2s.ñâëDùúßMÛnöï| ¼'ÐúGœ‚êLæh;¤(vK9ä«<ìZãSEŒã`ÐÊ(Sª¤|û^n­B,:elAÈDêÆ´Ž4õ—± @ÏNB«XÈÈ\ñb°5«R1‚ B´N04¹Ä¶hΛP'R)›rfÊÌvXm¥[†šÞ÷{ž«U„ë-‰œÌ^YòÔ A9^²H1WZ¶-<š¤+š:Z}>Þ'—+ ‡H,Õž,ŒBdm,ä&2ដœá†¬å8Œšº¡3ÌLº‡×±=àÆ!©JÂòèÇx°×L)ƒƒF4웘ëÃKƒ‚9,:Üw‡ÚÑĸÀ¹ÌdËã90‰® \½64ïâ},\éÆ=zñˆNpk4Ê’V3¬Ü9ºj¯~gGqÅUU*¼»»f{;7ŒcÞ÷ºÝUUrÞxÆ1ï{­ÕUV®½ï{ëZÕjªªµpÕï{ßZÖ«UUU«€}cÆ5­kJ®¢c6µ­lç9Ê«˜˜3kZÜg9ÎUsãŽ8ÎsœåW11Ê:÷z¹ÄÇLL±…b¼&’µzT3MêþqaX$Ø]Ú–*½#Ìå4áÇø-’.: 1)ÏI°õj]\„ÁD’h+—ð Ñy…–Æfîv¶/oÓŠ¡ªIö„lûÁ·Ñß ƒ„g¦FÙï˜^³„⌜§Ì‡Cåîy8O¬@N‹`À”Ûð¨ÓïØsžÎÉê2 Â{kбš¼Ðþ2js°ºe“y%C×þÈfƒ.çÚI[Â×5ŠØk]´œS”|}€'“‹W ÚÞ't¦ç é&ƒÚl§%ê¤ñô7zæSŽ9\)ŽÕ¥JeÇU¯5 2¼f t”‰yô¦/[çïTúA@§‰J>…ų[s¼Å_E-à `E^ Yô[ŒŽˆ¦‹se”ì}}¿sH^Ðå4õŒß:²à‰`ź¨DÖï&Єæ½+Ýòç•ZˆuDx`Г½5€SCmŒÚó¯ g7 ¥G#²?Š.¯E7|¤ë§¹Æ‡§Dð4OpcÚŒßwòã…A TÚx¢+üÓÁŽn¶ü3ïS8Ïýè—A眙Ýì¸"Ö³iÉ…D¤J†i&†ªUònäÍ·0F3éIëIÆÇŠ»7WËøð÷$­³Œc ý8nÖÿ>&Áª-ÀÙãS4rzU\D³)yÈåå}µëÔã7thYl ëå~QõÕ‘ž^Óçžþ*€C­“kIÒFg@n—’ý½e÷[½’Ý0Yþ†²ÖzQÚŸ³p¡UÑ®<;¸s\›%3gv¡jðû=ÇÙN8. é¿â*^ÖÐc’±3,i\vïB¤G¯ ¤µÁ bM¤Á]¢ÔÖ¼¥/ýÏò²÷|ÜÓAIܳ5’üeP.ØœoZbÐDøyäDZáÆðòS¯Å( ÆÂÑ’«NXd1Ȧâ¦)+ÀÑÑØ©žß.P¬&{*:ýE¸PPw¸?j„&þ6±_“(EufrŽ––)î·þoÞß•¿´×-LÖ}½{?Þö¸å»e2© Þ€C I›™Ú[UíB¾3BÞ+…Sò®C£?ô¸åÓú©Ÿå*,/vЂ8=ûúü|¤Ö€ƒ­íZ*ž Ú ˜§ù*¶üùzqÇ2ùw ÿTñ`ýñœqU$EÏôˆÍDÌ1Î4/÷š˜ÓMÄz›É† ÕØFù'ª5ŽÓGð{=™hxJç-ã­~WA“¿V Ðò z#È cI VSÔ(=^)²cøfã–[ÏÍ,œnç7áÁñëSͧ#êô«Û:Ø7Ñ}ÀîÀ"À'Û­n}å‡Þ–Uºª<ÄÍÙ+í=¸”D%W}组RxMµædóißÔãOÍæ‘FqRúešOå³ãN?‰ÐÆÞ|- ‰PbäcÃO<²í­ˆÊÓ¼KY:G’–¤êË3X•µtV\‘ }(ï[óB³Åà°T9nsNRýEZæjíÂÒsñÍ[;¾M ¨ð¿CÒÒþXeuû-¡VÙ÷ή’{0c/GXgѵ¤QRºê×a“ƒ•énV¿s•®ž_3»`Špþ~—Ôr[9©ŸgÉ‚Y>0€Ô·SHmÚ>ýÛß`d¬JŽ)¡znÌÁÌ9-sü¦šWvŸtÿ Ú WƯŽõïñëðµ>T!™ØT&eS?HW}^€#ƒ25sNß]T YÙŽy†¸‰³U‚éÒæô0ËÂÑæ½˜ Q)|ÖèLÛń̺¸^i*òƒk¡¥G«„äý§uªUrö,àÖê\„)°[ ¯tíhQÆ«íª$êG.¥[)|aºƇ°‹÷ËÑñ<_Ý€l_¥YÚÙî¶C»*,á ‰Éœ]?Œ`šYì„÷á Û›8ºd½\Uú2©çösñ=j鈴1 ßÂiÆPðgØ8ÝbÛ† &ËY$bl>P‹}#ë­¦ È9í"☠r¶_mˆ©mwÄÞíµ4B«u,²ÇCÿ\J]àðó£„Nÿ½0!Kl ‘Ôöå!ÂÜ´5ù;,³rµÈi‹vŒ9ï'¼;cõc(egœNŸ_Ê&*E¥E‡ë®â…5èJºJ]U/XTlƒ¶PÙ…¥ÎIé=?Y7ËØ8åhÃÜóíŸÿ· MȈI‹Ø"AïC,èéªDѶ½"z¬ß˼Óʨ¬ã¢’ÒüK>®«f}R¶£lUuÏn«ãad7¢#Öò¾PnV¢ð;Ô¦J[ ®,Ë0vNô«‰Ká HÚz{+ÔѳʕzE¬peÚopÉ¢>ë˺NýfeVæÃ üeб¦oæ6<ÄTƒeE¢bÄ(y¥è1û?=0ɬÅûźðéøY5 £ù]‚ÆÂg=q6LÎqÿ˜Tw¤}¡ Ç_¾<œà›ã{|Ÿ=ÎùÄ ­d$c60iêz®/"ÍfdшRv…¦òÛ «g¡s¾ÇE_h±W‚ÄžKPǠʾ|„)3Î1B8±–ÈÝ”hÓ,¶œä)ýüo)¬§ñ®ÌV(:ö‡[&±Ùæ*Ã@æuOö rqœ"æ±ÉUíÀ©Ò{Õ(u÷'G¹76KÎLÕ4†n“=ߪ¸C¡oÄßq£gþƒ> Ò†ú¾f¡[lupšr yç¤=µ³ÐLà´Ö56—DÍ _ƒˆœı…?Yâ£ãMœ·£òñ«Ó´ÛŠl{dª8‡­ý6.N1· jØ‹ŽÌì,°Sö ;æ u‘pÚ=~¤$ÏqH–Í X "2€€)ïy>)ÔåAM1œÚ‹U©™DiJ‹Ú‰»þ¼‹¤PþÄ’P-Y‹,­º?x¹þÌCNÐ]ì¤cJ«|¯ÜÏ)€'EÀ]6扊$UWø¢ŠÕo™ÐвW¡$dCö ;É(¢ŒäŠ8°½ñ¬ã&C¡å¹çµ éfWùJà°¿gæ´g½ el›!ê¢ 3h¶ åYÒ·ÛÉMÉàý?pcëuI>I9s¤Ø»ºi— ÿß÷ö<¡ËL<ç2Ã]ùŸQrÛ£Êþª l[ÞÙ# K·3Q³îFM¯àö…ï[ú‡<µ¨ò ÖSudßQyl®,up­ÿt.€*7——¯Óë¿·ltËO"âE¾0øEg»-;F#%î?…s׺ÞÁbS7¡4é³Ç<àUC%¼ófÁÚX¡n-ÄÃÅ\úlR!žÒŸø”Y*,5cÞe,Y€n*ÖQÁpïl£~ K" èØ9¨¼Bà_Áä#ä<)Þ&žšEÆ<½¸ù~ÛäóšáÑH¿‹«¦-<¡ôÕ8ïí=úÆ}t<ž¼x¹›ªÉÖ…‚¬îÈ7,ÑÑq âb«ÕHŒ³[Ù8Ýj H]Ÿ:Ó£"§qˆ6W(Çtm_#gÅ›®V·¾ñ£ ¼ÕýÕj›wHÒé nþ °W)y¹¹ˆ¸ŽóŒ7NãÍOðráo·õ£%ù9n cx\IµˆKÖ›ßÿʯa RÜ{dph5ÝgSUiöèiûJ(~Š'ˆ\á•ÄË,#rIÐ.ô ûŒh¶QW»êþ¯I°-–gƒEÏŠæ(ryxËU@# gùÓÊz |~× 9Qé³Õjï“rµ„æc—µèË^›,èx( š˜ñ¼k¢g‰lÌü↫›'Ae!ãYk„xsðþ [3 fdy¼ºõÝ/ëRËñ„Ä£rs=WÉͳLÃäõšq°2Òqfï¼f[Ä–D˜ÛÕxKÇ©¤?±¨ñÜà.4mãüx6;³Ö6çÚûxƒ±$˜šõÂ5Rf(µ€u™æŒ|Ä/¦´ìïHâc¶8kÀPlDãÞÅ Ý!_>×T†i¨ÒhQcVቑ{xà€3ºjöfà%ÛY6õ3mo¨ot ú‹&h†½õ•z•Wé#§í~ð±X³b09Xji­fÔD>‡sè hd]rN¬´˜å›ò7V‚F –éÞ®_Ç4¿Š¦ÿc¼Ö‚™úšÚCË8CÃ#{Cëx8ãi( <ßÖq%ÅÃâLšâߌ”sN´Wüwd¦Vû_b™ ]µE§òRÉ  ~_€úÃß<2‡Šëºc­g]yü±²ŠjÝh–;¥¡i`…˜¨#˜Ñܰp”­¦/9‰¶+6ëÐb;Âô^°uäšÃ„]Óaj^+¾§´²Æ°Ç+ÛÜÎRr#ÅçeõQ(¨ñåÙÍå³D±ÎQ£ü”0¡† \<™ÚªëP?Âlù]“D y,E ¹¨Tx¬§ Âs=6Ò ÈGÞ%Ùiø.Œ2 Sp\Ð.7/÷Š‹‹7KÇí„(Á#pG51æ O¿·¦·+IUs£„BìüÝoƒM¹£œÕÑHï›w5®)p,mÐCÙá³r¨@çÏ$Ø“¤ƒÕtÉŽ°öcH¹B@(*7 átpc +fðk5˜ÝïÇ^ãȺsØõIS:sJ¦IUecƒ´ÉU1Üs–›€p¨7¡üç ™Âí¹K;Ç0ýw“=E ׳ƶqÈf}ïQ1\k"Ä$Ú´õ Zæ!ÊE)õ ðÚ|Oeq|¬Ò´ùD.›Ì‹d^¬r¨SœùŸZm!Méó„ù†Ïîžé–Žxa{$ûHÀ.¾ª\©fwƇ¿Û­'w0v ¦äÄÀ–VôoYŸôÓzA“˜'øŸ™ÁÂ…!`I¿œLËúkÏGôÛykŒÿ7Å;*²žâéJÖöw&é©úãÓ¡ìþ¥›+)÷Ós£>j6Õ¡7ºF@Dž ŸAíp|Ñ\gÑV;‚:"¿ÓµIÖÙz'vG-jj¥o úƒ:°zž<íh c[𡉠¨á N“+RꯜÜ霑d:ê6LfÙªVñB³á+–×p§é÷éÞI6\³m¥¾ig7¢K­’ÈUA͇¦ÞøW¦¢GṀmE‰=Aª™P½.i’-ã`uÿ«Ta†rSŽo „7"8ѾH}¹èˆZ â$”ºª°Ë• kcÏB„¬¡ü&F²b/öäù+Ae‚ñDDî;Ñ£ûP^+= ­L§ Ù3-¿L.¾#­@óÇÈ#ý–KÿÐ ‡“3£EèFXžÇ.i¼»wýþñ‹Š }¢œT DÿJîZH¹É‰y0üÃì:ÌuÎë°°EnzþöEõ²þ|œrKq[‰\ð]3?êu￵Ë8Žèâíšþ<Î)ö€ƒ7M*ê9Oy.hOKÍVÏžvB3K”Y> j7ÆTâœ~F[l4!:¤O—¿}¡sIJ:›mvWo‡½ùn³MœbÓ Äç"³Ãæ1»L+ÖSkå5 ½R t&.O@»Â=9Ð,𳜃é¡Z¹L‹¶$§)ôÏ'C¬¥h¬c1…mÞ´T çaÈþmDzŸþñ—s©ÏСûPÍR™Ns8ÌëKà·IìÖø¢bÍNÿœ¥ˆö®PûÉÀDM!§À-pì´‹skÜ;‡}(‡{„æÁ~z°­âdgÛ¾ˆ¾Ý´$‡êf‰Kg¦£àÑ*Ô×Ëî½!PÃ05—:n1ÔEÜi{HÁ /ŽãÒ(}†™€^Þˆa wU¬¸ƒÜh ,…<‹ª•=êâ Chd÷½¿#OR9eµMÅ&˜kå´€S)c¼Ý¹“šr:døØO›9j}#æÅkS>»o3 ˆ›|©ŽÝ Ä_¹ÉÇÄc9˜DòäS?¡¡OüAž³ï —b^¢l×¢ÔýRͺnÍÉK]³ •=šã8\–øcR0¤e ´;êÞè«ûý.åw’PöANMü妪Îsc/0:]OIç v{¿{Èu@ ±)«ØOó±tÓr<\ŽúdŸ'Ò{¨ÂR"K,Ù?i&RA'ú ÌMž RÇjt¡”Ör¹< iÿ_ &ž9ÛsºnBã©‹¼‰(2Õ³îl˜Ú¸»©ÜÝc,Ù?*‘Ô+ÀÑå?¶'Ru>øT,ÒÕêîªò'ÕÉZöÏÜë"·hÝÀí ÁÃ] ”æîúý®ÏZ¢*äm !¬ëí°WŽ*ÔÈ´.sè·c‰ tE]”Jžâp®Ž*g3Š©ºà“’ðèº:‘ÊE€¦¿æ÷hóÈ«gqަ>àD®Ž†g}ŸH,ïQz`æs¢zK{uïTâÝïô›–篬oÔO6aw¡¾(ÿ…F+±—[uá1–Ý,4þ=½Òân:Æî QxöçÜb¤hâCÂm€ÇÃW íö9ªËÙälaº}¥É¯¢Mªíºöo¥ᛜU-hÚD˜òÃ7¤•ý­”wâ<8«‹öó¯ʃTÚüü”†èko±ÁN–°ì <®p€: ¸Úß`bØXäçÙäú+‡€/L”±úqÓlS®ã¨Õô`æÉ`d¾±A˜QK¶¾e¬ëRÛ*ÔÞzi*šžVNå*¡ƒy°UŒÖÊO¦Å©šÃ_D×:àë…îO÷W‡ùÎ+É>!¢\öÙNAOwçÀØÉd8õUivot³×.)W“ý`í²â^"¦%ŠŒïL •k`ðQ„Œ‡\ôüÃx ~’8Ý’ÈÞ²Î(zá7aZ¿Ý ÎØª¡µsc¨™LG^_Ó':&æíf&~ÜÖfÜUejòfOzŒ.Öƒ@èÆ,Ó4ÈišÜO†ÄÆê¯iþ¡I„O‹ìYgF‰nF¼ÿ>ÞÍÏû·ú6z9ÿ€'x^H¿2¦ ±èî–dñ(Fh— ×”Óhñ½?h„û8òÎg>+ oërï%¨»jÐ_×[µ¼L¿g<ØÎÄ‘„#h.€Ó1eèrÆ·qÏZ­o͇ÂߦmL?ÏL=Oز³G©2»l7¯ÀbêhewJù–(?HÒ%˜eßb.S|Wèøæ [ð€èE†ëXÇ Î4góaãE…õý¨ }ž>ÚDœ ;Ïñ’ ]@kq"é„%&·k¿LZ­èç3‹Î¸(³R¢RÅŒJ]º„o·€ü@ËË{‘Íâ39ïÚrÖE€f!ÇŽµÜð[òl{al~Áž½û÷¤–XSÀ¡Þ#­p'~¥¢ÿèÓ1\WƒKX—Ú4›Ô˜$Ç•>Ü?ñ£¼>Ñ‘<˜„:Ïpg‡äÕl+Ëâ¢-Žë°ŒJ½b⤎ñ3R¨­¹C„ÿiÕOl.ÀêïK‚r7É6µ&²”]Š×7çÌçaæÑ-T‰3ȸÁ(/œ&ꜽ-Rˆv7x½8‡hV*æhÖÚ̱ŽÖ]cv(¸¸(˜‡NÒa˜šWƒÂ\š¯˜° ¤¬MǤHý®MîI¬X³PÏ(ÂC'ðì×÷!Õ©ž•”§–Äq.dvÿäÆV2ZY ÖôRtó½ç&Éô÷†±Éª²X£|çu¹5‰sêk˜ †×'ÿøoŒýýº~ \û˜Ïqg9z¸Ã†ÔÈ,³¨GA,·ßM:rÆbîçaÊPÐL„eÓÄÌëݱºo!Íïßq¥½€)<.„Ï”g_úgOq£|øs{ͱò™Ùèl+Ñ;ÎÐ7¼²{ûù¨pLNv°?Ï øf<—¥rY´+ܘµíJ|¾¢ŒkUBõvœ[θÖSS?Ò˜f°+VGßêøì²0‘CdÆñi³ú_‚<¸F@ wŒÂIjˆç²y݉~uCýQeçEv–HÄå +øÆÂá ªfÂ7-YËõ¿§<&ZH ªôLoŠJÔZ{ü(YÐ-ºÁ¹¢ùç¸þk]«³M¶ („ý>ÆÐ5œ¯ àh ¶ÓÜË[Ôêì[’1 -¿‘´.á;´e!¤€’2’Üç®R,0TšXÉ“¸“ z›vm±ºÎ“,S œ²pÃ\å5IDÿ@ ÷—¥¾"pE¤°CŒ8#ø‹Hð·ño%ò‰ûÇîíäöÒ_Ìo_ &· óÕ6íÙ¼ÈBh(êýe-]¦zû†o× ‘pþ»ŒŒS¤°»kÓ6Mæ»:hb5ãE, *΃\!tÊçϨ kþ óæ0ãjÅ(Š0Zý:‰§7Ìæ÷ñdè ‰ ¡çõ'æž´ÑyÈ ¤÷ ?iŸÚ\0Âü(¿õAȱVfúï‡B»:Ûë`Jð†ñ6 ™ážΧ0í=ypœŠ‚Es=Š‚ô”¿iÚÑÀ¡oà.­á8.V•¾6Šé/ú‡»–é‰Z··,*Ž ˆ?æä¢Uö5ÌC¬n=2ÖNÔ,ŒÌL<þa=Â>Õ7ɨ ýrªÃ…ýeXèˆ ©_ò _M†¥æ£„›ÍÔÀgçué÷icâeçÔ‰íbÚMDg… ó®ÄÉjè&•À[~­+™\ü¿ÖŽe“ÃÖ :K‡ÂRíøLTS¼…Õa‹æ0°uä°5Û®!±(‡Tôu‰IÀÇîXÜ×!-=¦›OºÛËe6¹3n©¤jÝk“UèdÊÆ·ËÔúdèX ‘¨Í2R­üb®~J.óª&];®`ð@÷ËÙ §¼¥ã(i¿ùüg§ ‚÷§FŽ84ÊIQÈ©c(Úeù_Z»fm• UT „Tå)ö|–üÏr‘ŒŽA áƒD%XBÁè/ᨰãø.?˜÷6AŠhz«}ÎÈ’Í/‚¶Ì°©°µfò,À#’äu3½8ɂ󬸹ž'~~¿ûü8rŸaOˆ¡ =‘D›÷;H$Äd‡¥ÿ7Ø¡6ƒÄQF%´ëoÔÄ‚¿u-»jöo2‚^/l“„jcÔl—!ÊÔúg«Ee õ¸­+Øjøméàˆ‡­êzOOϼú»ÿòrØÚë±_lÑ%}:'nÐ)N3ök…Éë–É¢Õ|SâL~î à`ç­Éfvˆ e¦u@üt‘{-Š¥íyýža]4ÂÏ„‹ÖSAœw2ŒKï\'>sºÎAû˜ßÄŒ#Pâž=àwœ%6wðN ŠLšá ÏÒ´j}Ë-áñq¼=\1ý>›õD¹ðk Дs‡\9û†*dÕQž‘‰WÐÇe§Ìõpøðø\U5ì4óR2ÍÚ\öÓ— < õV½(M†òtÎÑŸ¿MlêgF0¼˜AŹ›‚Íí z5Êf]<ôÓÿ,àܾ.eÄL4v¼Í¯@Gñe,¡(ªz;7æn¡çÂþr[‹%Hl-ìø&°vïM§¾àæLn'yóbAáõÂs0.cûuc‡ü‚Ë””SVe4ºMö5X‘V,U*ØÇˆ×]tÖFÝ©ÓMT,‚—¨\\ÇÏì•Ç¡!×ÈÓ35„ˆmoTû²AýãÂ#†1ýšáÍéî÷|­†o^´–Ÿ.âhZ0´ÙäûF+Q¹%¢MÛû<ÒwA®TAêy½þ·´QCG ÔèɓᎣυF® ¾¥nÏ-`LiÎAÏ–úÖ³0ÎØ`g+†(­ì·Óå*èŸ`þ~t¤,ÝI_h?sz½ƒD&ýˆí«,¨%8Òî»ø×F¶›ê©™‘¯–½[Ôæú)O.Ta,ù‡s"O„l¡J´ÐìC<àx^\º´ Ðzÿñb`û»ÑRÛ˼­òIÔ%IT`pnçØå¤¯ç¼`T·ñM %îÔÌÉÛ@ð^TÉoP‚'Ô”ìÙtd†'ƺUù{;3B _¤°w1’ŸU·è)ýé<©HíÅá‚«´ÍÄûMD”65%ÃVb¯ŸòÁm#7&MÚ¬Óø°âó¦V&±J-“[žîÕsqæZ µæE¨Hã1žxv þ•¦Ãæ™[È_yÅ‘± Ü1I¦¥=„­¶nÆ üRsþy²7ä¼)}Ïh?Q…VÂÓrõ8‚õáØqBæ³qZ)Xüe(崂ܛƖ…ÃhÐNbóL¤ÿ]m+²ß1V››‹Â«óeàKDèÛAذ~FÆBYéVµŒä3'+ó;FûFéøÕ[lZÒ5#ðŸú œõçå¿gß½µOEC×e¼ìIørleöø„=‡Â¶µ%H·÷Ñ P”(äz=ñ±×"Î~HdpÔÁ#'Xå4Z~B¿‡nÏAí), žYsv­ ÀX*'ð»Ëx­5:ì “y¢Ö‘Ë×»±pîøQühäÕ k¨¼)§ù}ˆõÑæ´ˆÔhtoÇ1é…•IPw¸Q »Ý iÜßcZ²S ìˆëJ !8Z ,þFiŸØÜ׆þ°wé1ãWìê‹rdtƒ¾^0;I1þhÙóe+; ÁmâƒAàëâÁs<Èç“„ypûýí‡íÚ¨QpœéiÛŒW»êj÷>/¼ dÜõi76Ó€!b¤†N1Ö@µ}!·.r*ªÛß@³fÀÍôéÓ”œÙÓñF…Ien»‡|EÂòÕÙ#º»w¬ñtXÿÿ^}™MT}ô'wŸ8£‰Rd î&TØ 5 SoŒ½¡Š.=Œ½f]ÏI µ™;HÀj,Óa‡0¶7-m·Ø›ÈHoŽ¿óÈç6TK{´iÞe0}dâc“èÄ׺JcÌ»Ð9t¥cYŒN<Ðõæç_iœ)©ÔÁMuÊàt0ë†ÂÒùÏëÐN ѳ‡¶lL2=RXÃó€Ä+òº)$±ž,}Œ·Crb¾+©ñ ½‰t «à=9iA­ÆM¯“Õq¦Ggb6Ͼ> ÿ[¶¢ãÕ (9œ4wQ _ÚX¼•ÛaAœÖŒM7Dµûš9Ñv ¹[À8óš!âgÚ"DÖØÏ?µuŽò8:6ò« jfñ\¾ÄÏÍ·„•'7Ä‹zÖ¯9òxifÈdGU:‚;RTݸk¥™8‰+Ø„xÂ4ºÄ? ªáØÚx¯8±Q6±ºmò©Q<¹‰B*`ÛDÿ 9Hø‰²>Þn\Œ£"Ä‘R¡ CXš™N9nf+ZF_ëÝÔ.jéÁÆÛTkClyÖ]j%Ä{ãáôâ@_â )–Èxl!M`˜ÓGj§6!ç>pb#ìý÷²»³ÃϽè‰6}€Q(tÉÖ4ØJ‡ÒÛÏ*% .»3Š1™˜2vàGÁa›{*4:ïBZ‡TA©²ýËÝú°_â™–t7 Ã]5r–—`™ƒºŸ',ªÕSõ_[%rh¬ú[ɼrúÙsd Uý¢%ËÀÖ„ HÇÈ=z®[!"1N«zý§ýÆa7 L0Ø5<¥Nw»¢û•Ž OàQ!‰T4NÓôt¸Ë@ÀxÙúKÊ ÝÙ­É|$“—6Ñ`ĸc'͸¢Ò§Ðå¨ä;öÚˆšñ$öëcøfÔ´Õa¢8¹¥æVÛLu¶¶sõJ€µŠ 9-V%% (YúcÎk/{’‡o؉Tù¯Þ›Ta úT7£M{½³Qg¨~žòž‹òDÏAát€}ݭͺýN5G”z3§KŠŽ ¶ ¿tê´E >qiÃÕ£jûXO¨q`¬4êçæ±…ž ýçwÁV “IDj*¢6 óBqŠGÅœwõ=ÃG'Ã4ªBRxÆ+—v,ˆ ìèT-Tj×a,lÑð}²œ ©^fÑ\ý@ä l»Qö"»¾‰Mò€hç””N‹šO(½œ0á¾"Ž$Ÿ^=ЃÃfgK‰ƒ·mµ¸µ6ùì ¤jö!@h‘É´iõ;·íêo»+¶‹¬›³§üï ªLB†¬sJ‰ëþ~—¿\誙êÍËCjÀÙY÷…OÂ4»äŒ þ¥êv¥5ã>Ý}ìœÅœô/ÕV$E Іý@íìë+cš5ž(03q–Ë`RE¦ÜöóØ=F÷Ô=;ɡЋÂOêÛ¼ÛB§\ZñΨnÍ 4ÜË9ki(ÌùàúÓ¬7üÞS òú5?yÊ °-ßû2¹¯ Õ ÑôOˆ7/®KÖetqº'ÊiNiÁ "8Ž¥\—ÐC>ºµAi@VÔ™w ]W¹@«Rü#*f/ú¯R]$Sµº©¿ù5þè]ÊðÒò&Ö–WÛòp@Ã&8¨éˆ¯çöú˜(»Ó7Š1”  ưùðÄí ß‘h6_¤z¬™êª)+±‹F+?r{›ä|ÐRä'G+ñWº‘³üçBïv/<<$E òŠâL}]uy¤êÔ!)$•"™¹vy¢‘(Ù¼'ã¤ÈŸ`Àä 3†Þ«a¸ò e¹îøP`”"’ÇEh„h:z³×v,úݢɘ—?î‡rEEèÀ!L €!.23®›09È„¯šGŒCë:Z½Kßž$¤¼´œwêî|z-ÃI=!þ‡Ï:¥À7ðÀ HYìLg½5ê§ÆdðFW)ˆ£ç)ßΊ?ÄmIÉ@Oí2y7ÝoÉ/ÜÍy2ü®×_sºþŠJþâGiwõ°RÈc% @[ß§&Î/žÆ»drÑÈÉÃ÷±_áI‡›ñâu™ÐÉ‘Õà*£Hz9ö—2ðÀ³Ê÷¨à ÅÈ2ˆ¾VŽ‚r½'Ü6í5dª3LTû (Æ ~¦ú·*Ã~œ3BÐyctböžÇ¹£ ä90îg #ôA“‹1âî¼üI3ôZfôçþr×´Z;ö”1#Mêç'Š·sèËpÃoSLù ¶â´³ähê÷µö€Žq8cu)õY”PÇJ¬Q|8·º¤Pjœå[èì Y[V‹Wg2sáQË5Áª6Óå!]r„­«À)‚*SÌX7>Çê3[Æ4ÆR:±1…±}\ ŒtñÎÐÔN¸õ¹w2v W"Øÿ«·{6»'™TF ,_…~ ÜpMBn ½ºY U´‹6B ËÆ´AIuÅžøË6ÀJOQìY5]d·7 ÇM Cè"õošQê§¾¡zÖEUo678Úœ¸«…J)ÐW·Ì¦üœSÅl@³ï:T´Œo=D¨•Aí4ß—=¢ú*…ç`ÌS”¾~#H%Vþê ,ë7l¼Àè,k~Ó;äpPýiMý©6=‘Ñ{TLTaœä“Xûÿ*'®$U {)„Žd•Õöë4'ƒ“íž+o`K–ÖáߢCt¯RÉBÚýü7 $Lõádà—G}×1ûw´âæ,L ï9‘óŸ_'=³·­üÍ5=Zz¡hT÷æÔ †ÐN†mÓo[àkƒ€fw¬ÇŒòÊ^Ù†.SŒ&øG/½9ȧ57ãu…óf©/oîŠüY’…wBÄÖçJÚf¹~Ó±|ÇŒß$ë¤jÎÀ‡¢œZ¯·v“?±CçÿD]E³/){l!ÚyՄߪöî€L™G.Gþ§ËÓ»âïVj h³¤9Fͨµ‡ûÏìHO}£Á‹&m·{&n°  è%kåÁ…t‰±ñd€ºûW¯žÓb=›¤¢´óŸÝþ§ËìÕ-è¥aDÖYbÙ×Å1¬¨,Ñ)†ÏrLPw:H«nð1¿÷‹_ÈŒ@©˜—u•ýòêñ#5SÂǘÏÛʾ’ÞögÖ ­ÁJ{-87 N/@ïZíÒHµbÈé®õ ¤6*aÙ›6ìÚã–cÇJBÿ›ÿlºG0û%÷6 $¯Ì=¹•¾CëäHÓ|z’mmZN²ãòî÷¹¡„$ŒíUÞþSž¨¡& ÎY#õçàþýܪ%•/s@#ÝmÙMÈdXõ Û3òÏy¬Ñ?Öâ-XÕc’=£¥ª¾B¼L©kp}¿jT§ØŸÄTö:èºØê«¿û§4Ã~ãJÕ’Žô¦¹ñ $þÊ`³ù=ƒ¼!ª¥™YúÒ8†fV8´½X|q »á SP_ Œ·=óÞD@»\˜’¢X@×C¦íY—®Ô4¯õ°^ãÒ'6¸¬”ýUéˆ÷›ýy E Ê†v…md Ù+¨uÓ"Ò0 ¿?ÓAº8®Ë}š¹¶i†ì÷ÈfOqœ4ã/ûßVçì]?…6 Ú‹Á'Ä¡=w¾®,P¨ruœ)D?/È»MÏnŽ«£5ˆp°ëÎמ˜7ùŸ&¼r’Þí룴Vþš +UŸp£¿¢Ì$×`ÙtsMyšÊÛ˜S‘ª|ò|ŽÀ0—†€Õîü—MYzXö)6˜²MLØß)I4´ü4ɺ‡Ñœ,sHH¦‡¾äÓÐ]³26¡X“ÓÕ‡úõGj¬Jq$ñÆœc ð’'iõÈ¥;‹7Úø%w¯¾l¯F)üÄcŸN_—"XþplŽœ:íñÐÕꢾ7Q«Ô3PÄïñ×ø1´oP)¼´‰ªÂ¶õ[ÊX£¿“  ؽÑF:½uXM+RÙ?q¯“‘œîŽýœ5£n ¢3ì’éôKc0èűKÖd?¬N¾Ç˜Ü*r¡jÃw÷”15Ð$ÔÐê=«42Û­?gw¹ê½3“{ã:ðÒ,tÑüËŽÕeè#JФ½­p "ìÞ—Ÿük O]â™è#š=¼¡aP]$,׈³é¸`‹XDh̾qþ ¬ìÈWJº…{¢=«€øK’bÏSè‚ðÝ#,M7¿ï&cÈŠ¦TÀ§Z‰ÆB±6Þ!Oë‹V§£ãW1#^‘Äæ´Uë°omÍKo*U’dŸãÛ&¼ÜäÕ#<˜üF„¨1åå=Wm:‡§Ç×”¾8ÃPì­TfEÓ@g)Òá3üÛw!¸$é6¿ÿ¾Ý×á$è4CÊÀ⌨’³ÛUQÎÁeÃåxzÏ•d‚­€ëÕ)Ù!J#5D"ë™kô¾VéÜð¬”"äŠh˜ˆÌzâÖê°7Ø‚ ²PÒF®‘}|Qì#~¹äHX ´d‹iB~ ¥0š$8 Ý_ÿÛ`Æ2Ä€SÖq¹cÙÏ–Œ‰%”B”õ° •”ÿ7Ò7|Ñ¿¹8 MV&%F-…'=a,ìÛ=cyšsˆÚG˜fÄp¡TRéX€É^Å®aï¦âpWº¡áêGq£¦såÊ8øíñ“(¡¨ÞàûøÚ°Çê= _‘äë½ Î-x]×fä#KÌ/ÞùãY]y] t6ÚÕ{*’ \ÅnÆ…ž‡Í;ï×'"€ø1*Þcú¾\Þà^voð™#IDyÚZppž"Sljé«? ÒÀu…©])&Å¥†Œv:ñçÆ–²²J  ê`æF8Ћ×ánûÛØ¶Jßf;x+áÞ£Ó 6ÄÏÇÍü “;–IŒŠ·kf“óTs?¸~>ê$-HÅ-²ñ‰:s% 1‚ª¬ ˆ¦ïuÕ?á çÖJ˜»u§»Eížá½b~Qû°€L6žÚS’…C*`FÄ ³ àhªãªÉ:û˜‚5æ &`Iz†’·,Uî¬}Pï\¿Œ4óÐI j6¢8¤ñ¸×UÎ=]4…P"{Ù®m!-Ô÷ʆT ËMà4}ܧP‘’F1Õt0P?3ä ¹8m>B¢ÉŠ/Q.¹Îš wpÖ½vpI-¾:\óIÉÝ“ºíïþâê3„"ûNÔ›Ü !]©ºƒá ›¶G4þKë-¾† ÀcD.PR®OZ®ÏžŸáîX ˜/€dï)¿ãY9$íоÜ4kŠXm‘}¨È'MÛäÔ¾<ijÜù×Üý/Œ[µp¾ßÞK8AØ[B¢Rú3,Ÿ„fÜ›—εÉ}2F>mý)CLÄ"i}F½&ùPK„BÀTgÿOà#n#Ê@-«ÊESß-?"R«Ra ¨Ð=ôfÉàâ^æðʺê6ƒËw%z>s{ö¹S[\r86ê¤e@­/‰ÀË )°Žœ4>"v|ß@Ý-fî=7asTì§žXnpÉjÙîŒ_Û^9k#ÖÜhl?ä4$ÏZLL3Úhwˆ'’‰ÿâ›ës® ‰Ñ×àʾüóüˆ+”¦! ´Í¥öFú$›.©-œ¦s]z]…ÜiÆï"¤žá^Â÷á—ÌúŸ|A§¯C›îs©›á(•?¿…Ç^ÝéFzû°Ò¦çjÍzÝ@-%õ2ÿ0S³EÀẎÔLi(®ÍV:‘U.?.þêdb7G MQ„“å‹"9âíƒÌ¤Ì. rY0ò§#}€ø¡ poNö畽¾îÍ&¶·²xWSzp¥Dª“¹ø6ÄBÒy)1æYs»+aÛ([tñÚÆz“s Q6¡ tžìÆZülhæGÿ)¼j$Ì¥<ë¡$ôÇã½”´2³efˆÁžoib'yR˜!ó ŽÙ ÌUz‹ÁX‚­êB¥(â}(§ÆÖZµbÏÁ¸é0RïÎ$?aƒø ï`äUšóm+ÞŒrvz\ÖhFù”¡\˜àÅJÛ鬸™P¸ Ìª!ŸÐ/iÈ>£ ªhÏ#Ü݉Ø,« ÿz0#Î ‹¸YÓÝ ÒÀ¦ú‘™¦aЬùgÓoA)Þó‡…†»„¡éh¾ùêÔÓþ§èâž5ùjŒ‹ì;¼þ§F D%Š{ácqb¢/%µ‚ Lõ)ÛG‚©ÃŒÊò™¾¹q,¼×}Â3TäÚá,áÍëK”³UF“Ìä ~/wÇJ2²<×¥ÜôƒzÆ«4)¬°}o{!E Ã…±×8©ò45ÕÄŒwá]d÷Z4ޤح˙tùž—éåÍ¡¦ñÊ—++~†ú”Ö=µ½9ëšò5ËÁv²ï“‹S²zýµjD÷¿\ ºy¤ó§-3~¼1i"¹g†2A[ç5?×ÊF~0Ȱ|ïâè :ëÊò£ˆÀvû²Í_æ çßp[U§lêÙ”÷0c¥ðEµµNR7TC/ÂC |&‰Ës˜| ,ØT%ù|ë§•ÊÒÐÿÛs! eY ¡ç)R(=+ôމôv÷ïA(ØE{@ÍJŠ4.9Q ä®DÞϘgkL¥Ë>†×q¾)SÖ§É(Óþ+Ñ(q}ùe5ݶÔd.|´HªC*Y0ÔHÎd÷ñ[®:mÅpuEtÅœ}®Îû…? ²EÑa‚U)"—~ÃtžÛŽ®säpsMí!ù†Ê¡¨Jˆ•¨§ä^*’½C€ÚO…b- PP#Zþ1ÅŠHÅOy!é¿£ò‡ÈкêP³Ï÷¤j/³DdÇT€ß&û­ˆ~n_€æœà›™VzŠÔÂJ|Bz+dï¨TUÜ—x]ýýp¤á×ûJ,s{ú·¿Wõ„Ð,¢UÝÚî´PÆB ëÆaÕÎzlOÄpÜYËí3‹2Prœ¶ÓÊëxdg…]õØq6ÎhÂØ+Ðã–9´ÝZ×a™M@–%<쑟gî‚FB9È'ãY%’ Í¢o#ãLàÔb$4n‹’š>WKj8 &œé]^SñÛ¶8¸iºX¨´þB6°˜|UïF¶¶ä Ñ/«[%\ýÎAÝØíoY£àäîÛ^OæÜÿ"Þü  ê: çdUÞi™~óz›ˆ‡dÜÊ]á%%ôtë*Ñ:c¶… $€]i8Ç‚¦Hïhé/óE¿Ài£?·¥,±xDÃ1°ä\¾Š=¢`âD¡…>%R‹¸ì`…gh§ü´ój/~`÷5Ê ¢í‹qÒŽdœ»e!f² INô˜ Tó²ålák!6ÓýÝ‘ ÆUU˜ ŽK&αªC|m (9®ÅàøL~‘d]w ”*9@*•í -¢¢£<®aZ6,‡ñE]tKÈÕÓ]Ï „Î\PfüšE)ÓÔJAβk4Ó_gi)º‰¦ ¶D(™²-6‚ÿè÷1.zöøÌì£"ÍPcå ;m-|‰¦çí ¼pðVCj¸fˆzdÞ}~c QpqR—T»ÙˆJ{dZà !‚¨2Õ,þ‹56¢ù ô6ྦྷöhnº™p€4Jx­˜)oAEvÚn:Õ™%yúi•ÌŸs³r†Ò ÖržK­WÃ|ëó1°¥ùáòƲbêp)iÐ;Ö•„éçg•¸´rkšâúÛ¿Ñ­Ä@“敲tôw6ÈÓ×1è~[&rGw4 ÑZ91BŽã$œÒßÚO zÑßà뜃Âè±·/ºû´˜¦ _@ùluüm ã;©17Oð@^¼ŒF^2ûk£Ë5a¨æ`ÖÃö®y>-åPÒW™Bqp‰ˆä¶*ý§¯m ;óÂý‹À‹‡(”ø¼D@Eæ˜ ä¢Ç«ÑÅX *ÆíýìÅqf‹^¾}—:(µ‹³F4ŸômÑŽ¬!bûw>‡ß°«hL:Ý­4§ \ ž[ÔKnKN0Í‘³îH_iþXÕÚÜ·¦ §÷êP,à•51f+O(N—9ò9¤ÊHâ…ˆ)Ru¥vÀrºãzr@Ö Qj|"ä¨É‹JÈÈ8“gÂv&¤9›+T7Ÿ¶ ¢˜#”¸Ãvùci7õù½>¢4þ{žÚ%¤¨Œ\;„{›¶.f@äfyø@;Kˆ®]ŽØ.#Êø5ZôÆYñõsoóÂd•3ǰ1â%,_LJûƒÕù±-…4bÂáݨý KhÐ!ºlHegGjZ‚#çSæ÷ÇeúU8U:åZ×_ˆnªª˜Óä+H:õjC‘ìCŠOˆ¤Æ-l·p£ :—S±ú¯‡i ¤=³lÀ¸Dô{pç[·²^žë ö ®: ­|ù{¯].ñËî"sŒõÀðËjh$’þ"¼aßV„ÞtJl+" ”U­jnÔØFÃ럮ót5Á}+úT`ºªÇzj »³Þ’3coáy‰ÆÕÖO›»£²ÂÈùªó ÊÃØ­f`Øp%ËÅ®§ºA­HÀŽ$1LZˆc«“ ÇùûÄ]§J”"æÇyk…Kˆñ)7la„‰xXÖNx]ª†¹ÌiiÎS­",ÒY.yƒQ–…gÑ@uK`¨á¯X& 0p÷M`Ž™° ÍQë"Œ²Ð=M¨,&±†ër¿©…H³±›Ü|µ¬c¯º2çÇCÖ/ÊçÊ×õϯ¦&À*±Whå…}s¤{øIoPMͽO[­¡ÝÍ\¬’^íï†6JÔ„¼@ΨÚÈÑ˹%ò’Á]§.åaõ뇪ÖÖí ! m¤m”nqXLœbm$œÏS ‹2YTV­Œ KÓ›‚÷øpatù ¹³>”É]Èí÷Gb›ª» “Ð©ó³ ­ÖŸ(ßí–L£zIgø,ÂÍhùJ—ýz,{¸ ó·€1û3JY5zXqd»ï–×Së`õW9Íj€g˜²Rºþ¦þn5vùÄÛ*Þwä—תɆ̕p7“ð1P [VlåXªš›ØÃ¡Ú‘S¾Ý‘ð]`M0wÆåºFuA‚!Æ 7ÜJ:Q[¡>ËwZEÓÅÒ5‹5Ô>v’¾¤çÕA‘Í ‰)åkÿL$¦µq+.'èÞîL‡g:ÝxñqÄi=µ^ɼ¨ÚaZæ}*ˆ0Ì#¡é<*0Ÿˆóa•dtҡ䜗ê{ålG¥“ø/’ÀT²›Ø:Q§o‹×s7i]þ…—½ÆMVB`x>ͼw+n–^ôçåòoïìã8¯˜weÕ «*1êä‘m­?†àÝG  GãËz²A+¢„ ‹0ØÎýÈ%À »8¥y¢rÍîõgjü½…a º9hù…OáU°l†0j¼ˆšÝ>Œdؠ㹩úHB‰.­x³]ëX”d¬:õ2©2âǯÀùTAT÷—nÜJK„¬Nð+Éá’s¿ §°ÿÇ·¿¿‹m¼³­µƉëèºPóʬŠ6ù‰—xp%¶ÖF«Lð¼=ê{{¹_/²ðü¶1~„g5HmzjdÇœ»¾EÉbŽ•¦‘C‚§ñè…ž°âUãÕî™Æë8¿FÐMÂÉàðï°S<ò“f`•Øxõ,é%ûeÔy½›Ÿñ†‡Ôí ”’0eS±¤Qî-k$J³EA®ýh¯®žŸÌ¸çP§oZH‰v'$\‡µüü[|ûiI;'2ƒüYehÂÖÒUùªTÔaL&J.ê<ú¯v\ïðAÄ \0Â~g«÷*ÕÕæÍ¥ÅÌÔ³ŒØ¼±+køæ³i¹üTºÆ\ðúˆ+ïL,å>°ã~ìG•ȼb/ãPÆ :C°oƒ¨§¬çŒÐǤ0ÂÄqâÍŠñÓ`XòbEÄÄJÎ+g¨ÿØŽ°oD´b§A\ß­T²´R-”Àú6%zTálMå(Ø7 oÍ`äz¨V(’šQ·¶ÃcydäɲÖ7µÄéš±VÁ.ѸU jdYieï¦/–»iï4H&Ç¿©ŠŠ|³Æ y™‹Ì7¹·­÷‡âWé &T-„t²ØÖâ°íäÎBÌ€hÝX7†8ûˆF¿z)ÛS–p)}ŽÇr+½w&7ÂNrÔ&îÁí,Êôàö–ÐæB Vsàò²¯÷L1ˆèõÂךôרR§Öþá6ZJrƒqAlFk—P{9ŠÓÄvǶ<Õ˜,XŽ Vš_ãþóìOúmÆqØ÷kðr‘aÅ1Ž€ÿœrrÛ²U‚Mäj‡[¢Gx)EÛåËñq#Dq‚DÝw¶FHŽýZû±MŒ ï&8 @täêË„¨ôôV‰ˆq DÔÌó§1<Á`·šØ–è€v`˜‰ÛÚ„–ñ5çãŒáÉ€f[s«Bli°—C@W†˜Ëö„*r^ÉrùÝÌÒ ¡>çÚÅŽu·š8ù¬àR¿Ãþ\9å¢+£p‚H½K~¢A>ÁugÿµƒŸ¨Õ«ßÿZ¿Ùüï+4ÃØ5`ž?õCìt-ÓQ§™XL¹$!ȇú‰:æ‰Òt6 •ÎöèB-;xS’ÑËéYCAYõ¥B•è[ ¤¨!…òÔ«gDl‹È¸ËûÕQo€ˆÙJø¼®ø>üvI6.m!NDÃÄ.[k‚èòïÃ=ä(eEDåM‹Ì›ÂF€”6§I8sRž;}ºâ}eú’4vË ÃÃ%æ´ŒÆÓ5‚[fmB†±“«5VÔŸMÕ¡ ‹ÕöÜ8'ï1bé#­‘¾YE Ä/òC¤‘±Îè§(WÎ…9†  u Ò#´ž}¸5´uâÄ ÖÂ*•¡µ_©ÿ‚Þ} ¦’»L/þ“àgn¶Ø-p%qe3Û #$$0;5o0%šð£CDcO;Êì¤Ðå¡r¢ý:äêìÜiJ³×…ŒK¤²EÈä›Òe¤¬!sùÜ:;€„†‘êÄ “…ÛU±­5ÇT»0Ø ·¬^»îŽTcÁ.uÁ o~7µ#Û(ðpPS²RVW%J§ØGÛ•]A´Œ‹˜ñç1 '&öªŠ'+C4eyz+¶‡ÃÍF O–äNw#ÈÛ]eQ·G·)wŦ[«ǯ•ÂÙ#øÊÊ´A{ùåÔÏ-§rl€:PÿŒ§eVõü¨údâ@Mwkí’ö~ Î 7/¡ö²' jý0Y–õ¥D1¤’=è<k EDnEðU&Ûû`Ú—Ëmh¦y´™&¸1Ù3gõ`. {¤ô’@•„•‡u÷$TY¸`±LÛîJ¤†—\BýSk‚Ü~*Jåþâ–zŠÖ僦›kz\'/tD1¶YÛ¾V(—sö£îÒ.-è…‘­ýy¶|gíÍÏ¡à#æ#W×19ê#¤¸½ö¹PBGi;d,V;N—°çȨòÏ‚rÛÛ‹N4TeÚ´sÏÿ|qâH'8P&RRŒp}tÊ-]ª# ôš"¥lE%‘¾Nu¸ Í"uôÛTKØI0¶~Â<½e¥ '›W£Ü"–\8ŸólkQ¨f¹'±¿ŒŸüø®)E†•tIñ@¬`;oDãq{Rf#âDûüO‹BÀ²ü)u°-6ÙÚÕ l•±#ØwÃqµì6ÈqÈr,у‘³1þ%ªG;¢ ×ôYj¦ÖVjq½oõkèbíhnÈ!OhÕ'>aˆRÈK“ÿRxJ󴽩E Ï[Rìø*—ë°LÓùB‚á•$#0Ä1} nü`q2°÷‚JÕaqtp”$‹×€M#ÞŒ,-/¨ù(ró€¿|ü¹?]i;.Í(¹ì¥£˜‚Km#áGòÇÑé¹ë‘4§jÔ½¬pžNjoq´(/a¤ûð…Ö<£ú5Z3‡C³½hELcÿ¨íÔôäcl½Óó$/çü“ˆºÓɽ ïTiµDƒÜ…«ÆŽÓmcØR;õ ŸœFª$˜mÔ‘nYÁŸw–È-¶¿œù†^9X'ÅÛðup"Iñs…è¢õü®P#”•cã­ h:‹€™z×t,ƒêÄœåšÅæÿâX¾íì§Øw7?äøKª Ê.NÍ圡º<“R•Û&±EP ,û+ÞÃÑË.LÙÄe#}y»öB‚^zÞgÎÛ6–´‘tJu ¥ÊC^vƒY ¯¶·ùÁNÍ"ÈG·Šß2Im2mº(X²nl'°,'Þ%®¹m× 6ÎIög=Á¸$®éÏœ/èÅ¥Ú˜ØPy"n”¿ãýb¿¿œÅ»ú©äQvë/ÿ›A{›àÂM’¹‹¸0ÿ¨ì `J úÚá[v^M-ì(¸{¾x2ÏHŸÖ…tGJöžjh®tïyÂ|Â.À –pÎI r#[2¦¹æÜìÝßIs½¿I½Ó®DxÀh€æîm‘‹øÏ(5ÇtY ¦?ª’ôWYÜqäS!kK`Þü‰Îv…bfŽ#…‰¾­NmÌ ‰BÃå#´·‘ëÛð¹Ù(*Kðv7)SÓ¢bc1ÔÌ£º¤¹ð;rCüËŽ÷$Þ‡(rœûé÷R =qJ™ŽV“¾(Ã:]~YÃ{TþÍ“<ÖÄN…3 7ƒ>îµ2eÔ–?Ää§—S¸‚QƒÌMæ+Á¯äÇÚ|ã}Áyκ­ƒ€Ä,K·BïõCCÙ±}ý¹Å¨®ûRŠ…ÅróMEµ1¤6îÄâjÖ²–:}ÔžH~fEûh™QÃV±×ž•OÙ¨ÑTîOòQ΀5 âv‰AûÌLªt\ P\æÌ>ÌÝd¯ð¦9 ?GÂo«2%K^VÞÀ]¡Íë`¬óú·;h« æùÖ×›‡ _h:!ÿhª"Šës.⻌@™-Ù,…1±¼ÓB s+“ ›{å -ÀÂÛNÀ×o8p7®åÀ“X$ˆ3ÿaó„¹ ÔNP åܪræ¡ò¸ðXº[lÄ: 3¾Va,A»’€ÀÀ>.Å!Á5!GƒØo]Zê†úÇ|½²zC6Ѓ7‚ü›óÒ¦ÓÄ‚¾½"¼¥lV"¾jÚmoØ  yÉL5zZØyÇç„/’®ÞWžW lCp}Û/ gÝtÞƒé€7Û3Q*í;ù¤%ÔÞnX¹&I_=@Ò Y]´ø/0ÍÎ{%Qt˜³‹_xÚfˆ»u–“z™ÐïË “¶Ð8© @åò…ìöed¼(ƒø‡^q÷ õIÓ› x qï2ð[ÙžÄå|>0nåÿ‰œ¾•ÀÙ¯™@篢ò°ìÂ^¹Ö‰ôáäƒl5 gŽÍ­à±ü„ƒùUç5 ྱ7frgV𥭆wû½ewÛßœ.zDü{§îE7¬-±’2Oø#Æv’¥}(% díÁ.7¦G0vDÔ3LAŸ9ªGlGR¢1ïŽÔÂûw¡ÉÉ?ÿû„HGì ÌT¸bs-|ú•Ÿ’`ÃDffᚆÂgó+sÕÉ'%Ue´þ,!†¢yÒ?"Év11-"ë0BÑùPÌ(×õA’òÚ2ÔÙö”8ˆ ÅF jFÿèɲåΰJëTØ(´ásعÁzh@Ý©Ñ`ð†öcPñEu@G™Æ-i[›ìU[½§I+$llËœ¾ ?3Px‹ßY!ÈFlĜǟãH<ü^§ŒÉõ¬ÚÛYË* `}S»­^dC_éîƒV”vÃø3!ƒ¨Ôû9úØ«SJbCº ˆßrXþË.Ø»¼Ë´ Ë«­8rÜ7PnýJ.7Н +±³{2%G ‡Žítòå³)׋C+ [`ºÚÂ`ÃÐršðúaÜ@Ë,î"¶1‹y‘øJ)ʟɆêµjK‘]W‹m4LJ^–MEí­àHâ{ùÀE^Dò,Žk ²’'É£CK>`óVºtšp¿Sò"‹bûîí÷£ Mnj‰-”K­œ%ÞKwÌ·FʃaùÎFÕïoü„Ê©X’Í ºpŒ(w?³‚‰¬)‘úR/°ÝÚË/I"}çniEß³ô·‡í§ëÖe褣k®ŽÖ2Óà8J)î — 0 ŸA‹U5q!Í2ávR±ÅºÒ>os%€·8%ú ´ÉÄ‚¾÷ë™iôÖוgJߤ‘,éJJHȵ?ͪ#Ž@¦ó>óŒTŸ$ f~À‚¾Àö?ƒMÙó!°_ õÕ¤ñÅçÄ{y¦~l ¤ xL; ˆ0^zØfÍÛúíE­!˜ 7ë0íñ…p¼­9pŸ¨³=ÊÄ-x\àq*˜2º‘Œù¬¡ÿ‹$¶ÐþÜë¹v„àXêí’^Eú£`rÛ6…ýÚ*õ³×ËápZ´–@./b™Ê¹ó%ÿëÓo¤ó² M$Xcƒ©ï½4,²)¥-Sûd‘_ÔÀìƒÅÓÀ²ª™›\&f¢@×ÐaÝ´¥ÂÚfWT ›ß9¼t×gÍæv!ð+øönIÛKÒ'v÷«æÉë‹•[¯Âˆµ%H-Šné0s]Š;Æäœ8-UKJ@ Æ4¿è1$|ƒJµ"5î¶'.ùîQ6 l(A‡¦rÊÆƒ5îLúñ¬£Ò¦£¤\2QÆÔåUÇ6ª¢pœ"÷'¦è^3¾Î•}1ÞM% ™×A(Þ,ÓyB®âêÚ‹©¸nNófmZ…]Y¬R‚F´Q§a“(Ž»ö¤Â 2'Ôà:‰crEÍn,Ú)…V$]ë4z'SÀð›Î׉ƒÕ‹^«ØZÝ!EÓOÞɲ.õ´IŒ QÅ<«ýª>ä ›*Å«h¢Å<‹§]š@æ"‚h½§8ƒ¤h*Žរö†U¬ÿF;ØV™[_áÁV*üÑ|\VQêlR{Øyy¡æåÀlkGÜ'Ç®;TÜå“ ·{|ÊÞYù– úsãð¶.°ÄK˜£}Æ«²†sÍ2ȼ{w±Æ0m·RGàô².˜iqöŽòÇ-´4Îp"à&s‚d3Õ)à`¢:­$Ø‘Å^R‹¡Þ[„©®ÏxÆáàrÇËÖ¬à¶àæ³1¯piÃñ>p0­àÈÉpE’ª„±oT_õJuïÒþ$jûRót´·•‚Ä?kÊâçþÁ‘° ¼|ÁŸFî.¥‚»ªRh †œ†‰Ì”?‚½‚å!?Çi4Áhô×Å ÍüÃv?æ?¦wÎ#üûÿʉ 1ôúM/£|*f9(I§\—å\{ì;mfÊX«¨šçEÖ)aÇU8uÛ4³Ðît­.Ê dûã}ƒ="×xÙÿøw|žß°“*£´ÿfl rÜñ\€.8¸N'DS·™âÔWßL÷C‹5[N¿!\‡‡&éTòÁ.‡J¦~‰`›.>Fø5Œ©°¶îÛümöôI¬'íÅà =Ë"ÌšVi‘£0ç§¹¶© vüËŽóJk—Õ \¥ÂF'_>JXelƒ¢7ÞRúùQ]ãÚ¹ð«UÝ™äQ™sRƒ4tË–6¤úf"0ÂJ3íá? {p&•HX_N@|rÑŒ>Ðä|fäKk]É3&©)úÿ­]ÎO}ëqÏXÅè´×g{+Ãæ]anõÄJ®1¶ÍáÌDy_{;®îã•Øø‹À)©,7p`Í&¶Ç[k›Ljÿé儳]2F€†¿?< ƒ?ؽ€ °$æÇ´ÁkÔP­êö[tï2’õ‚ä#ˆ#Ìä*ô¼GcàûŸÿ5YkX%ö™n¶Él9'C”ÍθÝìÎ]Üsì‡Ú‚z½¯YªÄ„4ÛÖ:›÷Võ1¹• ïÒ[é †³’-°Å¿Roq‰ok.£«-õ k6o…•عv(݆S4(Žû?®…•D•+‚HþhÏ t†9ŸU0Ó"6E­±8RvwQyáýN„]?$EŸMùXñkÇŠð4.†ž¿Âv4™í ‡Ú×ÝÙÇ,8Z\ÈñªcNÈJIç…Œ** o áÓH{ ¨*LÐQb˜'sÊM„”|h)Oæ3¼#^NÞe€Ð`oÆè±äãó–}ƒX¡d o1èÊ”ÑöçS#a²L|¯ì¢^‘Á¡°vnvªê.¥ÒÚ´v¦È<…! “S)VR?œ9æ\Ü_7°‹S1^ÈwÒ¼a½X6™p< ‰Äßû\5:6 .g°“ÒW:o „ŽÅTp?â1…5ÃëƒiV“Ÿ©ã2 „ÛˆåÞ¢“‚æ°}½@Q:¿dî8mžÙFüE [¶°Ûç=)-Êt¿Œ rÝ'Çœæ:q‹ALF|Tc(Mô‘Pr5:íð}E“û‰š¬˜¹U£t„ècRÄGðjiá‹ñ>¤€dvÕ•+s6½9WÞÔ7g-ÈðÙá)ݱ"¼sÎY”?9›/yË/ãÉtñûBƒ’ 1(“²ähƒYYÛž¥²^ßW§#PÄyóþüŽ+c€­c„œè¯r^—0_M› ´5mnÿŽ€c˜Ú‹jBs¾õT;„h©V¨ •¾yÆêÿo¤À¢bØÊ>þŠÂçðäB9õ81„ø!.ñ ¶ìûºú=HÏH`.ôuøKr’¤?cJ;µÒA¢U®J †ŽtcDƒ•N¹¾v¤'>™«½ój»±–;%÷+’'ZR|j pPé^†vˆk3•àaß5'Ñ€|cONé‹Á²™‡Ã3<¥¼ÔñUôGê ýy“C$™Z[/-9Æþu0n• ;b°&'Àm»&YÃâ%,B#¤ïžœ %vN×F“@ñ;dÊX*+è硹P³â’j¼äôð - ´&·Åö¬~–XYÀd¿Çû¹Ä©pÖ‹øv'Õg›±‘ä…Fc£˜èäÄFyè#óM%B0î¢ÑçdöJ±Í†l†Íõ.Ö®¬„Î"ÉU~EÞÖ'bt‰£]Æo׊ҥ)G`§Õ»óáx“˜œVwýR†›TQW.IJ˜øëàmFëÁñ§ÏK3òýªÚ !{‰Ih’Âi"àFtìuݬô¹@)¸¤üÚ5:¼7Ο°#´!(ƒ¦„jÆPúm*߇¹à )_$ôdïÅ*2ìâ•ÉÁb¨XÝÓ& ʉ¢;Æù„Ó¤QÁ5ʯÏIÎYÈæ™ÎýÁ–‘di1³øWÆÆ”oŒŒ¸8;|¢Ÿ¼Qñ™',Ïý;¾óž!Љ Åͪšs*ùgEÙ ·$>½ô©JJˆi¢Å íæ ãò þ¾*qâÜ@k®Ž™ÊµÐqD)}|Dê;jç@y昫žíÓ”Ç15F•ßäË»yq¡kÁ¼ê/s>£•ÔVÜ™€ž¦G=Dhžw˜fb!O¢¨k¹ÈþÓø¾²q=Q§¸d–H¼O P"Y/ ûê6T¢‹ŸÅzôwÊÉöÞ4k¦ÁQH~Å@ »Ñ‡O¢^`ÙF$Ü,c•‚ói4›û‘”ÃÓâC"ÏË?2aIaNçþâû›òœLÿKOƒàÉejÙ™è逢IϨzY%ÝAíb±œµ8ˆ›~u¹ ‘äO%á9´—P¡†¢¬Á;Îæÿ´¥1‚ÁÜÊêy²rÙ’÷Æaºv› ß„#AÖ‹¬§ZVM̵¤´Ùùw¶L¼¯ÖÛMS8B7é:Óˆ]…/yÚ±o©øMqAºNÊqßÂ…n27¿µ’–¸­MÉÛw‹óU®Øá&š—ëœÕa4® Ø*lˆqíUz *oòÍuî™®&B€ÉÆõ§á6c°Cª»Neرõ´ ŽÜ€çÄ^C?ϵG$f•^jÍr =Hu‹”]—Žˆƒ.•}ík¼‰3;ÏÏŽ[UYACÅ»´úR2ÞÞ˜Ø]‘´q/,Žàăm3Ð^nøƒÚ‰šA³ûxu«–f· õ zÁÿ«¿Õ{×V:™¤OŒè­ÃØ´aAA¬ù_xž d.¼èyéxßÚå\hL‘°«<ðÙÁ¾&É{O[LÁÍ• S3ê~?Ò€n,½ˆ<æ=]Š3KòÙ$M2H¤íåU•»_|´×°ù7Ö“kõTr´ÉÈa¯A/)Ça+5®ÂÞ/L¦ˆOô, „îð¨`Õ(Ån”¯Ù2ab6ki·©e³Ñc“žëßWlj¹ìœ4ÕÓÜø¯”N°j<`ïž•ˆ}ž:©q“”åÛÕ ¿ŸÀò™6RjTŒ¬Î6‘)DN’"P-Øm‰Ã×^šb¾s ‰€>7 ÐÃ9ÊÂSB®$i2Z)•ž~ÚÍlÏ2ºV”Ö® éŽüªíœ Éò ªb@J3Öôá’3¥mŸ[Sne^â6ýR:©y ^Í•@¯½8YÕ™Û50Ò7~BÉU‡©îUÃcÈâõ}îvÞûzLI|:hÎ_Þ•¡Zá(3Jý˜ôIüÖü›qÿº¾ !î{9HqîÒ¹PO/*Žªÿ™Ð´õ¶G)ÃéSÁ¹@ö~U zádÇÚ³eÈXR6¿…í+h6›¢êUê;ç%h˜ iyá*D¦).Uáüým ÙÃÒâ Œ.)Xä@ß‘^^{j]è½|«£>9{@í&a‘w'ÎDS6<ÊÎæ«¹5GY¶ºÓ=²$ãhŸŒ«{ž€ôË4#ÍŽ6¶|=„z®Ò¾Ç8ej™ü|›u@)¦"u©Ô›6i”ï¤oBª¾sß]+óT<¨5¾º4ÁËpˆK ï¶'‡m'o¹öÆdr²:S‚k‰LÔqÓµ¾öNà8wRYêxq¤R´4HÀÐ?4ñ@C6œ9 ,kKÙ’ÌÔŠó% מay)Ž+?›÷žÇ ±mºÓª` Ú‡LõùÓ¾W„tÕK ÐB{iCڋʸ.Q‡Ä©*6noµ„·æHÇMœä̬‰,ˆ‘],:¸Æ²Á‰ŸÂP¥…††’­”KÕŒ€†{="vÔÍš¦ ÃÝú“oU¾ß4|væ¾O‚(Mlhü‚5Œ÷*sá–,ýÕ‹ì¹pϦ™©d‹.Æüˆ ï%I¯ÒP|n€ñ»æ5ûiúŸŠî1QãÕ«$.s±9½ÀøAeWÎòqÂÂ=‘‰VËþ0R BÊ   ùå1D×…Þ¯oSL 9ß­"¿ Í,¥^–îëÙãÌKÄ­¼L¢,8ô  ïªÑë÷\+R…Ž¿Ÿ¢i¢äYk­­VpîÝ+IÙŠÉhì…ÖBãêçµf[v0xe'º|­îªñ†Í<™"AFqO wöUôz­-ÇL4¢6ƒlÇÉ# êÕyuT¯È…N¬}øÃ^j€¼§qgb •" 6\evn’úþ!ˆ¨•ùüB>¹ªU¨§Î޾Òˆª’D¶9†'Š?Ü72_AJ¡¹×n3Ö6©›°Á0³ÇgTÌùdx£¸ÿqH®R4Iá ÿ%´‚a“yƒR£B¿sÊ¡»–VGÔ<¬–9åž¹ƒƒsÍÚ“Ä©Ôsá‡ËyŸÃi«§ãç ܸ<·$§WXÈÙz2·rhP…±f˜¦„ôû|P€›ª€'*o`»ôÈ6¦RöI'= ÆTj³Â¬htT­Œâx¤¨<#$´3+¡Ô‡…d:Ñ$ªÃÔžu`õPbÓ ºOÝB¨ÎZÞ¬˜&Ë¥~o®¸xûK ÁjXçrQ5Õ®¯’<rÝB ó1ÇÄGÙÒ7Vb‘©åõàPûW¹È¥dP§ÙfàyÊ4‰^];„ímìz ÈÆ·@\ZÓmíÀ™Eµ9Ùƒ€\é3ݸâB¡ ’Æ"a•X…"äBáÈFbìÞÝÂq$Äš==oeBÚÙã]e ‡ª¬;B|`Võ Ú©[ÊJ~<@¤Áÿž²@ à„I‚i€Fu?ʱNº 2§)8¤»Ïz¥©ìÝ<ß9K!²¿­Ê›ÓUÊqMú¢‰@+¦`§'Qê¥iwóÙ¡èÎfáÂgiZ߃2MnKšÄä{ˆ °Ôfì[µFÉÆó–¥€^’j‡×öç–«r÷^€¨‡ÒÎz4ëÆ¥©uV„…•]Æ?ñÈüEýÅP -lØð—Ó>úÁNŸQ_Ê'eÄýð}«²‚ cgà%êh ·4+\;Š•‡kŒ­¨x6í#(ýÉ]ÄFç$jê"IK½Œ½Õ°ž%sÌXCä/ÇN·vaDÈ0†Bá_Œ±9»*ËC/a=[׿.]º¸¥—žqåäãt”ÍÏëÒ(¦a–TP <Òž=åõN¬XS$,!8r[:+ŸPÛÙàJ6^ŽèOnËŸ.:ebñi…-“#ß0ä…1µ¬¤6ƒŽº&•óvI¨nŒÿÊô`y'æÞówSå"Ï>·Èb KCHV4zò3”×óÙ3?Ñ”J—W/ÑдA1ŒÎ6?”Eqt}’i_~6¾÷°‰™Œ²I%&H;Ãgy!bõü‘¿iâ^×Ú˜;å%ëÏ– °QNX~c©Æ ¢‚£á†öî?AšÙ½ jND:¾âiÍ¥Ø%ÌG£ÔÚè°ùö¿á<ª²ÿx‰Éñdw\áh„,Ò°SÃ6ð°Û‹Á}Š¢ÀÅ=uäQýR1¢nê“Up”W‚§)”·ùUÓ3FãQ[!éA±³ˆ³±Uš\…pnu®Jɦw ©ˆÜ®|dDlh&‘—4uo¹ö ûZ÷7t´tm<óŒBn¦8’ ïè=ðÌÛÍc3F¾‰Ÿ7#Q›H@gê9;›DiÉ¿D Á×Ú°£AZÇŹ-eËsÝŸÉ„‰œ÷²¼F ó˵»J,¬x•ˆ”¼~Å-"þ)K:Lßzp÷‘«¤ŽŽ–·:¦°´5Lgxø.ð™¾A–`4‘G|2DļÛ ),íª¥)¬”ã‘Í&yûXB°Œâ\·íˆôÄÏC„“ €ƒß5"JîÒ¹Ã3!¤ÆÞSÃ×Uü–€2­lˆÐFiÔB9'Õ%;.IÎy¡Ã~¼V"GÎÂ;ËwÈý{*¢’œ_uÞñ¶Ní¸Ï Sñ/¥äLô™ijÕßÓ#êù6“ËÒ :#ý²ß´K)ƒ„ºuQD¸Y"_nt®(­ô¤ñKPøCÀ­†E7ß´VvŠd¥øCÊu"Ì ¦«t³ I+[Ty ôráþƆî±í×a¹YPü2Bá=¬ìûþÄB°MÚ#ª‰öLèð‡oô{‡šÔÛíþI‹B¶»æÓ{@ÊåBÃÔ€$VÓÂÍßøÌ¡¾³“—‚Èg'·âžðì?Û2ycåË?•TìÑÓªøKÛ°±>]É”³3­À7|41Ôx6žXáS¿Š'<íÈS*üŸ&ÇÃSà¤DzØ@qá·,PÃÈäjîŸ4ñþòxÓâÔ~ä®ÿ,(·±œe„5<­UÍi}(z#·|†âg“[J ÁqšÛ¡Œ†òZK±Pp¡½(ëN` —ˆY:¸mƒt¯ÅB̘þ2“SÆ'Á­ËÌ3‘œ{1vØ‚c´ß›º9'OX#wsÑ5¡šŒd ã}Ș®a"%¨!ÛØC2¥½’3ðü¬_RØQ™,i@·n¤Oñ£¬4ú|YhvÈÛóJ¡!, g㦯P8Å#ƒ߉¿Âjå÷­û1L&±P“J}íPÀàrÚ3ú?ßÛ>}÷:éST g&¡½ :÷ù Ðp:6iuÞúœF;­Í’+9R¥pX‰=\Ü]¬àÛ=l¡0¥¦ìÀÖ1"p€Ù¼@N˜KÊGÃÜ|c­SXù¦¢ ª/KÊ:•¹ Â_·Û©`‘vÖÇó$1åÔ®3xeýš’ iË?>ûžÍX¯ÃÑLZÙþ—RäÙÚ„§¨ÆJ³ åæÉ”Î'Ô•®ké²»Œ§xY³c™ñSÇ~F0^´Òâ@áN—k|aM÷ÜJòÓ'bb òþ¯ñ7¹ßÄfÿúÌâR®@¥¥Ô—Óq0\È­Ëâ0ùò,ÌðÊ”)APô(ÎäãÏÀ¼² ÇÇ1Lp}^â£cBŒ=9-k?D‰08þËKž™ùPoî”>JZh^ưï«èøŒ‹ˆ:ñ©)ÃS òM«Ä×Ky¹”'Ö4‘×ÌÜÀóŒ»ëöà_#¹'Ps3Hµ:HISw6µ¨/^ÿð"˜¸+àQfýŽUwù¼ªaqŸ36‹2£ÔŠô™e¥\Ññê…»RÓÚl…GF_áX¬eª-¾È/B'å¥ 8ÎÊpv¯ú hOü˜ŒÉ¡={½8çlo¡¶Ö’þxºŒ`‘Á}rÖ÷®Ál…›²îw*A2H™”GP"MS÷1èi5@£D3ÁQ'· ë°[šrs\»,õ@ °«ƒAÒ0 ÐnSäVŸSAÚ`b삱Lø€mE¶VhÿÓ8LSe-Á ý¥F„~Ÿ¯Ý"·Ê“]±tF á<—'ë_n †e”Ül¡«µê§PB|¥¢dˆÀbkö±° ;!ÉIì $t—v¡ŠË ]Á–¶mÜOk‹ËÔK5#½:p-B×! Cˆ…ØÞ4`Çù¾ë¤:ãF¬pã+Cj¯š©ìɼ¸„²¥â{ Ò„ª‹ö¼ÙyRͶ^ºöÑ*ÞâØé‰HYˆÉ”@Ö¦iè‡Ä;ýºfx÷"ã;Ñ!÷M¾òåÄ ÄW‡cæà¢ Â`àí‰âð8­’w½Ø°Q×8ÆB”R­(C¬Ÿá DAˆw£ªkËØî ] S’ù¥=nüï«Í<¾iÛºµkÓŒ³qò›+¡” ,Ÿ› ]Yóô Mp'¯d¦ÉNL="b¿ñ›^´Ä`úG`7®)†erõ÷*%+¸&ö°J«ÏÆ‘@æÖ%~ÜÜE'K4ä1|½™BŽ`øëÍëÏ­‚+è:Ãå {9wž¸%pªŽÝQþ»ñi‹æbT†üÃÀhI;[äÖ”YËú=…V)¶¹ â¯íÿ:¸NÑ÷uÉ]ÐPz5²cßÞ¥òG‹¶&ïP³é©á‹œ¨{(¬xà8RêáyeÁƼD(Ô‹ÿ¸ôÞl%¢lÝSÈ-ŠøÌ`w?4‚Ý…ý:ñoôMpz"%‰3/ÏèX¹«ß«ëN8LG¬'Ζ¬¾ê»*ÞÞÝ8Ë–²l2AÊœY=¸„«ªqžËì¯aõ„ö[D“ù3øx ܸÎÈÐ,¼ë5’TÀîŠÄ•ý>sQ`«ËJÊ…ƒ~‚U¤ ,þÿ±ˆz)mì†L.@[õ¬3}ÙÕqš´Á~C¨ˆªQåhø:GŠÏh•¿ƒX;ä.¡³JͰûÑ"›QKâNÒ?˜«òw6mÜ ÝO9ËV‹±/üñ¦a¬Þxìh£‡Mç×=0b ¸j–^7ýV.Ëu=Ó…€]ý(¯íL’¡Ý\)a7Å:¿•mÅX† SH&'_ü6øîá2ëNg<]p¹Ñ¡¾&Öÿû“þÛx$1:ÂRëQªÚÀeþ˜FµÄÀ ©7çb?™ñ»áj`²NìzF^Šó¡ð÷ªï9bó?÷æÚ®Ê>acxØÕ ?¯Í¶i=âTTQË@¶¬)XáÜ=L/¨‚¨£°(¥µ>ïã¬!Å;Ñ(ˆ€íkíoQ=‰Óhæ8ÝŸ\Œ„•lùŽsF/éq1Áí§ÃO×hò³­GŒzPf\ŸÁÿ,¯7 6ÛÄ+Ù«:r "¶6æÒì’w'/Ed3‡Œ\Ç}uÝJ÷3{öõÎPNÒÓqª_¯E^‰\=*I4ÿAmhÝdÈg5±Û‰¡’͆¿xV­Î)Õ©í*ç6…æ“ñ LðȘø*H+/«ü[•uÔÅBÇéžn™Ž¡Ã¾‘ºM/`Ià+uHèi™(ep= ç­ÙÐ}?xŠqP £Z†a8mANkè”…·Lu'ð©íú°QJ÷‡c›ë53 Œ3åÇbE×s±_(ï 1xê”ãF›š"TÁ£ZpèÚ \KÎ\ 1A³vV™èxõª¸ @ú€.>ç\¶ñB§ý¶¦Q ²'˜WWÐ~Œõòfõ0°†¸€¯ŽÇ©È\ ðÎâô9±rÅ·bý“‘ïC|:ì È ¤j`¬p$/ß}c+™^i ËŠ“ònã{‚bv5:UÑïI%H2ÓæÓIL.‚éuDÛè¤ c+Ê+§v«::蘯|H#æGWy8þDËãÑSqÀ4Áß5¡£‰¯Á>½®óŠ`qîæësŒ©|uz4†|6ºvÒï«L´›Æ]7YTíÛ¯òÕhjQè|w6‰Uñ‡Vîð¤©Q9ÙZ•éæ;ªöÛÏea©«6 Éù†\0µº y5{àúF†Fû¶/)2:ž— ±M¥ÅüÖy9ŽsIôÕæ=¤TÂÓ?Lw¼pÃ…D#´É51ÚM³C&¬Êïµù ÿïwÔZŸÑ§ .Ë ù/{±{£à“½ü»ÆN\O®¶å]rãP¼ˆ­±½dmà« µçEã$úb9Nºg@aЏJ1['ÓFí8ÕºIjobþ|fŧ>м c¥Ë¥íëÅ­O‘›>9™V6](ê(Rߌߦ¹sáýtlÍg€¦xÑaÊÇ1¢º¼vüÇîoÎ1¤ô'‡3ÿâs¯ Sc”ŒHát]Ñ"9䃊ÄòVëŒYé ‚ÂÀ|m{ÉhŽpŠ$mgßGß–ÖŸùšª/vp¸£òaÐ`ь∣¢›Ÿ»-]¥`nîéQ_ôáÞû4±H`Ui}yfV¦OJÊB¿«PaÕÒz(¶b d³ âxúQëЧG>D ·ýf¤¯ÝM+'8dC)¨Äéõm$r-œÓ%BŸ*YÃ<}^“Öêó3„«ÏO¶¥„ ‡Gª¶ª1D¯Ž¥÷‡’ñèz¯m`wV;ÅäbVDß(ÃÓ9Æè½MÝLaÕDÞîÔ ¬*[OÛÕ•ÞLkÑÝÈs¶MTGè/tþ—ÓRª7Žò.LŸ¶‡®ô‰&&EBâѤ$7øuv“3f UqWŒÐÖ)o ~ç†XÜ¥#t+¤Ææ§Eõ ‚ìù#}D-#z€ÿŸÑ‰UÆ&’¿ÊÑá€ßÞ; OÎЦ±}1äXÒï3ãñœo¸ü ~ÀØÞ¼z¹÷[m&î_ …– Zº0dÁðã¶h‘sÃ5W GXgu¬ó&È)ÈŒ¾¶¹#Ä6Íg³ëß‚ å×­‘áDÄ>¡N-‹ 8Áy/¤E\퟉¡|×E™ŠiŠzCöEÈ=P3–F8pŒ¦|ôTúÜÆ®~!}]*÷vR„DMþZT»é­»U~3TòAènV&É€õšî£_/È^cÞC•n¥ç«Nô’±™2(oæ“(ÜÊëF& ô‰v´Èé‰×Ú\k/ë'™iKËGxÖý7%©|ºÀÑF--}ܰWEGêôr˜·ø«Š`õ¥âÿ&š¦GTK½SÃ댲•æñÀroÀziÐþ0eËÔ2ñy«È^˜wÿÈf%Ý´ÄB”¬EŒ"?J¸VëÎCtð\Q†nƒF³­FãwB1ÅzYu@]ëgÀŸ!¤!¾Šª7^E²÷œ/´e¾4 'h«hguæxrèÑKé'ÝíÜ@›辯IEáô8Ëõª&\W×áö°jÍåýiœ ѦU¯Ìwâî-L;mÅo•nP 5À=p1s{6 <­×!ÍK4rp¦1½MóôÎñPÇEe×Ë%´:çgÝ¡i±½-°®Ñ²D“½Û+Œí:­“‘χùñ‡›žç'¥TÅÛ’·:]º”` oëû!‘¡£¤~…in; 뉒Å ¥ÊwÜìM„ê´}øBPÈ‚ïäÿ7N›p'[„žÒI˜#¸‹ÿ)Ô=´< ‚„ŠEð8àõJc†ýu‘·!nz# ¡¯œ˜`Ç:Ã?Ú˜YZñ™WT /±\(g¢(i ŽcÛÉébK½d|›¼=?]1¹)7Dõ³í,íí³\ÐbƤ7_>‡_׎wpë…,/•,Ñ"ìcR€ûW«–/XgH|ýtÇt(éê ×¨j®@ÙHIþh»çþQ»i¾:ƒoö†’åÅìzˣ‡DN¹Œp…£Š€OÖA a½„!`Ó˜³ø¾gʱí[«)_nDý «ÈŠ\,ò_Ô`¢ëÄÈàœé·|ôlƯ*´¤àÿ1,ÒÇbK§‡T±(eÂǽÅÇØ!QjlˆÛ@ÌG"Ϋ'û©]t¶ðîçë afÔ3Sž½n Hk¶ß¹ÁyãûxÌaòvÍ/ÅšGìA|Õ'ŒŽ¨î´éjj2ÑcÖë ý‡:¨6ΠÀ¨¥|1oN‹¿~Új iý¬Ù"é¶(} I(Ë;Ö ¦ë‰ƒNÇ7´½9·?, õì cVUu\€’(/5Œv{…&+¯2Íib3àŒþ³I’;bžö} ƒ¶¼R3ß—³þže1‰@+"B³`sçš·ñƒÿÐËÆVø_É™fB Ùrd=Z\{RYr&wŽk”¯q£Úè¨[?ÇiSý Öð'ßBTÑ©/ gò¡3pºÄyLžT ©“1£eyB/…q AùŒL‹M4‘ sÒ!ÐÓãe½?O%F “À¿!ÏÒÁ‡„]Ñÿ/Kš [vÀ%-¤Cš+/¡þ¿Û÷èl$E²ÝËýÎÖJWk–¸1ÒÔ-³MTffM'ãÿ+U*ïóËÑbê+¤PÀõ=¤ç4VcLFãü:YŽ+±5šhrÛnF Å1.Y)ß ÂŒ1:oÍG^5Žé>dÛVîÇb|çg°[ž†#b‚v‹ÍÁÖ6{óó†Zb;8ðÑu9â“R¡‚ẟ^Ôh”¦*;H^¬ƒ7`»V‰Þ Þ£(ù/-ä%@¦ˆsªÚÓ«q5„D.¬‰BƒÝ{øªºÑ<Üàô×ñzØ@›¢¹swl}| †øW]¸—ã•&Èù2óv]Ì®þïY?t`uzyf¡”‡´ö6ÉJ¸ü±_ NS>Ü:fíÚITÖQl„+‡#-ñå%ïYŸY|ô䬵þîfŒ|JBN¥ó壹œqlÝåðµÔš(é¦ç’“Žx-ËË4pÊ µ¢sƒï+5vMd¨ŒÛ8| ¥ï­Z]zId²ÁjÍ•è_G÷qøÖt¨vÓp†w¦ûRµ¢=õò#¡¼Kît"B³sŠÎîtúêE‰|#ÿ¸Mxɇ‡ª™+!¦pä¬]Ÿ“¬èùcjYøŒà'©ìˆ#˘Kå\ÿlWUò T òç ;„$cº]9z#Ñl×Ϻ禦ÿ=ÐÁûÓÑëG·5uÛÑÏ¡Ìp¸´¥`4ê|IH[¦ç¤"†…_v%Þ«k׌> çlàA/G$ßOZ‰@s­,=$ÿ/3HÍ2Ôü9Ÿü. –s\?Ì0,û_~äÁLO:]ç /jöIN¾S [%‹@ÍÎhRàË­S–·/mpþqfßHxè¢yˆr‘#ê!¿ôšÅ ®§7 ª<;£Õʙ屧6¤¼w„Kš“Gc Ë4'I*?ë›Rõã-¥KI¬AðCi°ãEÌð˜pYÝ‹,$Àó¥@¿à¡nFw%ÅGPÄî‘.êÔU°Œ)oSNbæÝ鑵ª®¡ÝöðvjIO;)ä]™â56GMK‘@fß¾ï4[ÃPzŹj7C»ï(Ò‚ômŸÆ…O¶’1°¶É¤õª­Äf¿,{‚þ¢ÿq¡¾Ì¼ÏúË¢“N'ª®›]#ö(5©¾¦,´¥ 9Nò¨äN¶À:’'c,Š™Ãߺ8:³Ç=WKÑ\S5ÚGÞstlA´´$ u1@ÿG™„Q–NOä÷ÅF‚wó?ƒ:Ñ%¡Iµ²Ø6žpl¹ßS˜^M^ £z‚wl_·[QÆùLº» ­wàè0ÄX‹{Hq<}i¼sô†–#8´b® MZžìòöåµzrœ™87GïÜoi¢ 1EèP7í>î@ß-ùcMµò §ìÊ&Ë…±Ÿ—oËØRÐð/ZÍþ¥f8n[ûÐݵ•ÕoÅÍs†2}ºI‡ýµ4Y]Rœï=î.l÷· àÑ^-ùYë5M«S÷Üê~‘ZÊu¯·%–GßB Ì7Ô(ÑL;^^òœËõÔI³† ºp‰ÍU†êÉl{Éfðy¸k2E·Z)Céœ8-mRÂÂÌl¾@aS"“ÎDŠG,Ä™™¬Óûg>¶?SñÖy>|ËÍèrÄ+Q¼ ñŸˆ(°¿Ö„30,ùÀJÏ –i 1"‡x·G¿H­§]Øú];À#|tHk]m% ™ HíË2Ù4þ18P@ o,y’ë\¯œQªàdKCºÂ(bÔ¢Ë+`±ä»”„3š‡’ï±r‹&u„øÉ•P*w¦…Y=¸¶³{|)Ë/œ£Ô‰˜ù¶ŒàaC“®üÑ¢äã¼*í FÆB)éÃmß@0@.W¬R¬Bk) ÙEÔaÚXEÞ#'†¨Fç$.N$Ha ¹"n¶¾Y9`L=+ǃWâCÙ,rK¾ÊM=¹°ÇBކ&l¸½ä ÆI;œ¡)fT‰ßYUY»¥p +6û#šÔ§1²R¾Ò\“W›—Ë\‘w¦…V4»_R·ò!m¸ŠƒXkàÂ=AŠ·D®½Š¶K]¬ÙD ºÁ´“lìF¸vï}­nEÀïàÃ’ü»h™ "°P/¬„õÇ=©¯t¿"|AŸôlÍDÃÃèq芔P{SÉzèb(:ÛyQa,šªùJWúFÕ ßÞ?¢MFS×Ý~,z amŸ  £õYø`ŽB6Ã)4Ø%{…Òj€¿¨ko³æf ¼Úa¢cÃÐåÝ5aæa'¼­™¨ùwp?ec%ý×^GÖ®äN?éîú‘ùÒE9'ùÎ}cŠê­¶òÅèóN5Ù5#]ü,jyÉr¹tyÌâéJ#3ái¿/`É–ÔÀš¼¨ÑË/AW’ÿ° ¿Ft ÊÇJnˆðgÈ®2@¿Œû t˜™·)F¯2ëôãmO±3½XÛÙéíí‚Ï~,¯6 Å(+ ª¶ÖÁÐÕíÄêu^8¶ÍÇQô{Sù÷EnºÁdú5¯Œ¨A®yq²¨o–¦Aú¥M‹†˜mIÒØÑÒlIËz´U¿áùK«içg†ié¢rsDИw\– "€.µ†æg-øëEŠyý/A"—k5±8¸P U‘¹hI½À£/ÄZUÛƒ:ö b›ò=ÿ«w° –åšöà“|ÔÏ‹äg ¥²ø`ô—_”Œ¾å¾ÇÙ¾~=.3'¤—®=Èþ™CI_+ÓðyŽc® ê»–º+ β`¢uƒ ¯›êÒ}±ûçxƺ3S)ì”Ùò†à{Š'•4×I¯O,sg~–#ù7Kæ“À´³¥rö×Õ6o’²=ËÚ©Ut½«ƒ4FÚãÈ\+ê q!𔑵te­]uU–ñ?0Wþ™Ú5ŸÜ»LêÜfÁZ÷võ!”: ¡ÒåùÉ}Cæ)5pS*¹åÕB¾ÈKãÖ>nRbXsñž„V“ãR…ËÕè+oÐÜÄEä„?å‡7á& dí‡\·aÎëcŠÌH8J•¼þ†ˆÇ²¾HRoû}[]ɕǰðʪ¦m'@šÞ.Í’]¿C>ÚØÒS›‰ÚçJLk^Æ=;g×ká›:ÄæËÒó€bBhzµAx¾P”ºc¸¼QF1Ò2&Š®ef@¯t6qråuš—ÁÚX‹ƒÆËa§ï(ëäëó3þB ¿ä­Âæ#ÒÿxRÀuñ[ñ‘–²rêìQ”䬗_i5¼­åO þô?¬žþîUé»To¢ J&t(LòmÄÂu»¦Ø|‚þ$AqZr lýaZw3­Ý+}Ò"‘ñ…óQ`_Ër5züZ@ÛmDwf›Adg¹‘޹^[䙹Ñ(ÔXVùõwÔš¦W:gQÉ÷£Ú¥¹™MLð{™©ì*Ó6.¿ È¢æÖõxÂxÜ*…·–<ÔÆú¼oæÙ†"MZ ¿÷êwˆìO „Þà‰r ܰOð‚aÊ¥„$é Xw¢¡Ø³Á^¡ “ŸQ=An뤧±<½[ ØáÃb,ÉÍ9®æïÃô*¡ó˜—2l’/€˜Gvj¶Mš ÔŽ)aŸÑ5SÖ_yúÄü~š¢ª AfO.20SúǬ÷ÇSÞE¼Át¸q×]1Й“€5—è/YË¡Á°/ûŒ 2’ð6—•Gâp>Iµ`ä·Ü ]ý`¤JàGGéFbÊ~DB)"·b½`TÙ?aÎQÚ s§ðè·½šÿ8ÊêíË\;VI²zhB»*­û’Ö·Äç£ìÎ2M@÷v€YßêÜHõS"sZcˆ²Á¡ w>àaÍ2ý‡xÀ…þp­§h‡h ˜ûê²¶o?ÿ­f7¡úú!’â0íÛîR‹­³ õßà´/xÏ!éUFWmf”ü:«‡< vÈ5 ¢Å¿+‰öy A gÈYbv÷Lå†ôR1áÝJfÈ£oo°„\ÂÛe ¡þ³n3*¿EÒÞ£ß karé'³æÙpHÉ' º‹ASv Á¦“-:•ÿ—f0bˆp›•ÌV«R•sL»°¾8Å]•ã÷xÏß¶¾õ×C`•j,ï¥x8YÙ©ðÑÇíÑÙ§—ac:°(ä¶Ã†¤ÛhßtÒ€Å6y=ü Gÿˆ¾`ÝFöѳ©°ƒ?1â3<½œgœà Ÿ³íö"~%{i%<í±Dʈ¤3ÏF¹W{gÓþÄÁa£[3k‡hâƒ$7e¡Ól讕s’opÞè$QxæÃ…pÙÜ—Hƒ^1ç(PqH! ]Áw—Á…ûVOË)¹Ò@ËŸI)HÂ'N‰ª§ÃŸHƒé)×Å):›Zò5šáZ uð¼mâ»Ì»DÅ@€-ò¥Á€\¬NÙ*g¯ñmu3Èu¾±¹¼s››A”Ae[OÛl\^ùÄüWúÎ?°ˆ`ô_J£U "âb}[•ôÚã¦ki…¾Õݦ2 V‡MC ¾"'™YÆ¡©‹_#…_TÞÛÚtÛŸžóT¿èš÷¦çôªSâH  ¼ë7Ô„iÖ ä`Ž’z¹  äX=ZÁ嚂ݷ½ÎŸ|©úˆ÷ΧaѨ36¡¦| &%n2tƒ\bá s nqî¨S4‰DÕþ—°]·}@yc‘†žò©·šÖ(˜Pgú¿oE²ì±R±gƦþòL«ÌZPª?™JI#‰‰€Ê‚1 ‘á—JäùÃ÷K´ÖQÌÑÐÌ•W"¾Za‹N†p9J–‰ºÌÀz%^™|YII­`aDŒ¶ïõ=2ãòÿÞ 'AÅíå•Ù•©õ ¯æ­<×MKòWC«§¬Í ÿ/"ð´$³t« ¸NÐ’ðFQ¶'o¯¤Éݾ0—.`ЍܗZðë˜9¯œFÜ çœäÕÐs±¸AtIð›QÃÓ–ã?6v£¯1 —˜¦zÜž©×¬òæù3úÚt’,Rþ¤N~¶X‹)ð‡SŒ«»w½¡`Vú ëç§I¦çv«’Ø ÃpkÉ$EõfÙ3æIÀç | Ø†\·Ë´a×Ö ° ¾Îb0ãÎ?ôœº9gá.v_¯Ff ê`|…úÓ~˾s°Ë’å£á¹ÍJ¯Ó­c|$¤"£´ÀÒT(ÓÏYH aË)ÓöãÚû©‡³¼xÈ´º8š.q/32@ mÞªÐ;é¶÷ÜÈ“b ÊrœÚEÞ¸jË«­/%—Gn˜%¸Ñ}5ÔÝÀ#14IJòW¼ÏSðÆ÷hˆpyйôÌ€‰+cCÍÇW[@ÑØÍ;ÿ—¾ˆ<#’Ü£w íEØGÖÏ@<ýÝp˜#î}è°Î«ðoþ!ÇeÔëÓ„ÝüƨX{§Íû`ØQ¡>‰à?ƒñOž2îTß aeK¦7Vj'T_Ü9è %”÷®:5MÏ28§Zs?›9Á+,PLec©Ì¢%EZª µXV‘Aþ°`Hæ¹² 4 ĆL;¤².Ðu¬[¶Ã­» +Ǥ¢gùMÿ5 OŸÎÜ~WӴÓ ét¬¡"æŽ?0žÞËs'`Q2V,º›-02a˜®ÎÍÁ:— «¹Ì"Ššˆ½µ¡) ‹Sv_.wÑ!GÜزÎ2¤R0Ÿ<¬˜ž3ÙJ¤òÔW/ýx·SeÎæßU$¸¶ #-ÉD–±‚™€wyAmÿII—0£O:j#"Ò¥¶“¡ÊC\A*ÖÂ⽞ª —'ˆ³ uësy…Ò3~Ø,˜YÎÝ z¹ˆ1ö¥8{/äÖ UxØ9žù=Yê ‹qç ×˜f 6«n!Ÿ´Þ½B5fnóÒÜ ß7¾ó,=e`ö†Ü0,Åðá„àOÓdŸ¦gÂãÖç«wÎø4 €Gzu+T`'ƒìž­o61®}mÓ•˵=~•>÷SÏ<_KVÀ]ƒa~-àMš9â D™çZââ{âÆ ©E}< `6ÙYùÀq™10¡UÌJÉŸXûð‰ß$Œ¦d=×8þotíòÚ)GŠ¿ßoe‹h}f5guب'ÉÄØ#„Œ£ÚΞ~S¾ýޱõ ǼP~þ3E]R´C_^*Qï\ôG‚î´†“íµ]Z+Ónþš><7<×­¬Ø²i$A~WÒ„Ó_ì¼yÁ¸¸ B¬F6´ø™eŒÒÄÖñró8¿“0âü‡ƶŠÇ}“‘‘¨ñ \Þ™lD!#íáÛ¦“u ˺Áè¥Q²UäµÂÀñ#ÕÅô“¤T“´·KwR!vr´;#sM}ÃÃñL°þ„ÏͽøU¶öç@:ågd¥ø6^¡c Ö$Gõhpk¡ ‚‚1aøUN‰I´{@£f^ô2uZ˜G ¾I|Ñqà ׆Ge^Qv€…VfxØìÇìŠC»d¨UT‚BzACÏÀS±ÕZ“•8††h_',ñ ÿÌ,Šˆ–­€?!ŒÎIïïZLhu½Ÿ}otümãiQÆÃ2·B ÝG]K1µ|…źÚ7aã¢(=ݰŠ+‹•[[÷F=ƒå¨Ö1låäLB‡2œè"·oŠK€ú,Ú`eø¥Þβ¨g±³Ö[”k¦i“SïÇÇà˜­ÞØ£Á«Š:_D ¼| «wg€!;]2ÏÐ9ÄÖ®q™"#CÕêúð)àºêÇ„0ö3½d´ÃLìN»¶¹–ÎO­AaÁ“ÏJçq×ð(4ˆ¶Ûâ³tÀY—O+Aú¤õ†„bÉVŠ„5¦ ²ÑzuŠŒµ­¸ÂdŽ˜5Š}“®| Ó¢îÀÒs€š]ѺétÁÚ'Õg½’yì„Ëýc™-*fJi’1UQ©üHëŽÏ•g‰\Úþ+›síËÿQÞÖt }¡]köóç¯Ë+X<`‘õ.êHñ÷Tè“éÜ`9Ê,›iËQðÞû{Îç:O¬„„kÉÚþ]Ãb`T5G¢ >µ_ÜáŽj.ë¼,[½Ö,†ÔXîßV“ÓD½ã9kè1Ÿ'uü+šÉúœ‡G ¨·¹¤ ¨Ú×±“Ô±“¦MÕ[Ek“iðÛH¸4«;öWƒuŠÛ„{*?Ï.±ì97?=nøN¼U­“ë!Sî¨Tô#RðÙªì" 01r”B¯šÙ'øe¥2Ùç«´DÄNG<>™IX™mjèbDò/å½pÜn5¶PaQ*jamŸÉ0ñnf"ŒÇäEOç°Ž¶'<$Cø©¸I ƒ–\ö¢)8b8Œ6YI5èûôJJ§;oP{÷º³ÇkÏIêÅw‚v·'OEQî¾3Çv[FT¦[ÐÎ=´ª€½'gÀDŰþ-ϺϺìŒ熰³€S t6=0ú~°—r9;k3‰¿µâŸ>Li¾¶cQ|´Ù$â¾`?Õï\Õ­pjR-ßöv¹%Š>*TC…_å¿U¡f=„¿üÝ@Ýçà™w&¦°Õ9XÔŸ u öC+&z,8¹ ìÜö^ÝàÍ×o¸*lxæ¶ûœšáÀáãJå•Ë#W–qia¤v_í™GiMõ¸vaËê Ø–Yø©ìö­3Øûß…2.sínÁK"KŸ0†IŸÁ‚ wÈáN)2&Òž­Ûe—åiÅ•Ã&Æ<Ï{…i%%há0\·èw;ÖÃÕÉjÒ¹»œâAeÝ/ˆmÓwÿɯòK8-Ú±/0ùv32úùÇÒ•Ó4…0HÕz´ºuØdȆgSPùJŽ#ç(ð-s¿îÎ-ÜÞŸxFöq¹}†teŽZn"p`3Õ'°±þšR-m=XÜ~f[@L&ˆ•¿$^´¾ð˜s³Ñ|08PI8õÙí"ñî»Ô&yØqh^ŸÅ`2h2S²;®æ×ÀÈÄÝ©,»IO59뮬ûð-5öÐ7îºcû[žŒ×6³mL{6É¥lU˜‘»7ÍŽÏùŸ H?¦©Í¶R*ù^ò"I%q¢RþWu!ÿ- ¡S'±ôôu»Ê:ê\½Õ½VÌ~|›Tfý*|ëÚwgïë6R?±A”O½:·Gn<ØðPÁ—¦bŽbQiH,NvÜØi_x¿ÖVE Á@þKÐFq»ƒëøkËü]²cTÂÄŠZ;ϱUˆ,¤˜‘'ºV^*h¦U8Ÿø âã™0×Zô}‡ÕVj° Š"BŸk¬­•²´ÄÆÿñè²ÇC¤{‚Ç‚‹–Gþt4¾¼rœ/#Ý7 Ä –=_=Ù¥º½b+¢ÿ”[æÊ f (…AÃ í  ,;»xCÅ êŸRÞÃâLÎÛÑ'Ý P½‡¹àk¹4ˆUô•¢‹yJ²Ó73Ä'ñ¬ñè\æ§»ÍtS(X2ŽF™ü„Žz”6V-ík×gòÞ„lóï ­¬ù{§Ê’./é“ MßFhŒÀô0NBâèÈþ:í…RðÙ§¨›GÉC†+ú'š¨‘EîÁ&¦!w™9ÚëÓL^ý9ƒõE<Ð'D =½G$•wea'` ¸£¹ŠMÉ*ðd úO&}3d6ž ;=òOÖ7úÜÜä)†ŽyEëC ÝssÆä÷ˆÂ8 Y²†,vDð¬ïQ*Y‰{í}6Š÷ªþ‚‰˜¡ùð€†æìqåŠã‹Ü<½ªo*„c¥¤$?«ç A‚Û¥¯èŸšc ¾l:«ÛU8ùuÿ¬ !Ò70Þ*V^¦yIïôÉõû’€Ó õž;=ç^€å"„VD­kƒùÝ 6iþÑ#öˆ=>FX]yZZÏkeÛ¨#QæêѸR×ë«“7záš)ƒÇwé!pr#w¦°”‰ô‘…'šSS¶ìÉ-¾‚0QöÐ-zpŸàhÏÿøD†çÃbnå¢ Ñ@ñtq\Ó‹UîŠÐÖm6l†5=ë²J+SRb¦‡ ÆUBïýEl¿²ËRö–ÝqV‰¢tØÒ—ç¹Sç)¥'؆ʧÇ6‰Ä›Þ¼÷úßÚ\ÚµsÔ7É¿VéYÑìáõØRHÿ9¸ä~©D Y–®uïŒ>R O]^”æ@†,nÄW)àxe裛¨Ô;ÓXís9Ï¡ý¬jâ)dîšÓ,Gœ½ã® Um„f2éwO@“ˆUÔ˜˜¨mKèÒÇê(5a¯F}ÃŽÍû²µ•™Ù¯Á÷{ü–S­]»ç]ÔDÆg›‡6²¬kQÜížañ²X­“ÿÕ¬5 ŒS‘ TFÕ$ɶŒÊ†»®yˆ±S°Ô_¤ŸKÞ¹vªÙ}aÁ´KMnsé;èK¦æ‡ŒÛ¶,^{]œ÷R9㱆gC'×qQÑWc" ç=ò}}Q$ð·ŽZÔhð!ñ £jõ5jèè^pÕ.ÕÜCr¡u@AZKÓË(šº8º_ %¬xLeáZ?VfBy<êöÏÊ 'j ˜M- ¸#51Û ;6 ¹­‰¨3™¤›œ`GÔ F’yUÙ×¥~;¹™€à6îQ:²~]$¾ÄÙªg´u‘Zri3Ša:{1¯NZ|ªöd6R;ýˆbùk±­×eì^̈~ÕTS]º9¡È2"[mr#‰Æf@±ý•’8BÿÑ ‘0=É!hT†\ªŽbrî)‡4×Òç‘q.ÑP2à¡Ö–dá4ö׊2+¼_¤§üs RoUÞ—€5N¡ƒÖ9‘ˆÝA9".[ÈT˜ƒZh!^òþ°\ÆEÚTá$4*]LØ1S­;‰ß]xG\ÅŠH- [ç3+“ü3?L2¼ZcôŸ² æ÷ª¢´G6å@EcdÚ6³Ý¸ú!Ó"š h _yæUÙÀìÓÓ*+}ºóÏÝ¿çï©Ý½sÞ[QhŽ›Ox1·òP~áÅêœÐ·ÛS@ðÜÇp-¥þ]ž$”š6Ä ÷ Uqœ<ø©Z;Ù`ûn, `Ö;=´¢ðˆ’«Û?PÎ ˆ`BZP½ 4É6Ÿé{/ú½;òë%>J(êÙcç@m*Yõn0'Y ?\AÇ€¨šå}ƒ$ÈW&˜¨})‹ÐIêÄ—,ynièà ›Ø­•†5Èîßݨ ¢Ñ¥tãÜöñz8CÌâ=#êã’@Ú±Ñó /aí#¥Ô¬ŠsÜCŠºx¹:Ñ à˜/åøY’õ ßfÄ=Wj •ö|Œ[mê[Ÿ+²ð§<µûšáný%<;º™L+ÊJ™Ôm‡';퟈ØXV©[3v·lº, •M*+‹oW‘°°Usªß2Ï9” ~ZM˜¯J~ë¦{þH•Û‚4/“f¨±´KM·œÿ¯[8fj ØHQ“·¹G²ŽØ¯üù=”ñm>.Ô ,h·PøC…•¸oxE ñ×nñ&ö5I·‡Ø°{PýîGË3'åJ¢AÀ#TY$\ Sü„‚rˆ4 ùßî™]JÑk¤ý‹ál‚L€>¯YJ‘¶eúí_–¤Êú䎳õnö'[uÉÇ‚Ú>«?\W=[ãƒQ¢5±ä0V`°àÐ5;m+ÿ]œ­ã;© ±ì°¦Ø„Ô ©¨ýV ¾Üìçßá‹î4]ÙPW[²¦}u錴ðb»¬ãcüeîÕ:d‹°äëú;.6j}oüìǧ^¶Ú>zuÒé B5†`WTy•U§®7¹Ä’qϪÈj°„HÌÀÞ]†öKŠúŒ_{¤ øÅR?V§ªè«ÆXabmt/ Cšù‚´ó•T—(glplp»É´¨žvYè*Vȼ¶l™—•¾Oÿúg%êÖ|PÎT#„®´¥|7O=hš…¡Cß¼;çw›¥¯Ìs*8ƒvBèÆúÜrBßÞ7ä¥ê ×fñÍÑ*O÷ #YÄùF™|9üȹüölfÈâ½”­¾‘Þî¢6&.[~­5sÝ`ïíQT…çpã.òv«R'ƒtÏYuÙéïiùgV§AM/¹±Î— cÚñðé\ó³:šä®d üòóaå|^›Ray©;[8¹/ÆfvjÌüÒš,w[Š%§´}Ú¬ýÿ½S¼ §µuS]–)ªµ +†`mCx~p–°Æ¿Tö‰î›Õ`ÄJ ê9:¯±LáÿÒdŒ…ZŒ@a¥¬"\waaRVeañ½ð“Œžà$Õ@ïá‡Çž´Äü0ú i¨d„Œ±wøã:‰–ùO_(³M&b s;õ£¨s9¤vQðs½ Ïiµ‹:‚ý,Ö+»ElÜ^v„2¡æ›P‚Dµ×«ô  DòÔxÕ¨7ìz”Zôû8 Yê÷ðÆ‚vÔÄÅ,=ׄŠ“ڕϧ‡YIxSzò7jo’⬠bˆ%Oº'Ì(¤9ÞQ ˆ´üú0¸;Á?Y6¼w[ü„q9ùãè1x+bY9v5Ìß ù‰ß(ûbÙ#(uÒd ÝÍjï˜Ó(ÅXÈ9àLÕ,¢Mb DÌPIÑ«K•èVö¢?ƱÛƬü…É¢k׿Jï p¤3ÛŸƒ+·¯\×G•ŠmVKÚH"ß#ÞÚý7=v7ÈÑmjxµÃÌØR, ¼¡N«¸j¬ W–1׺¦‘"PN1£žœR)%%&N• 9@©ÀÄýR®)õŠlÈX&ÆÚÏõ•¦×—҄빌­3ï»À¢ ™0<"&ñ¸–¨ÑlwƨÝbrŸ{IçååþñÐt£ìäá ùè¢1Jbë:±ñúz™+(8^1Ùïy^ÒšÙ…ÏÈžV­“c .…´Qd†ñÂj É(«^‘Ð^öãš4íO‡¤Fú6<´ô¢Ö×BÞ­)ût){®ÕÌm¸$‹4¹¶:&-¶'Ï-s¯š}ê´ãíà«[‰n¡Éüó?†+:`âèAŠœZÁv7þÀEŸ]7eI‰b;ÓDm¬º`Y°Óç÷‰wáfçy G‰Õ2ÝZâfR#oµv€K2 y­ yw¥œ.‹¹hw2€‚¢@r ×%Uû0sO•L"S®â¿åû“ÌeßJbè=Ã6­öåæÔaÏ\SzJeãf¹M^=• Äk€îâÄkŸMx>¿'Ûë ± 3¢çPÅÊ~Ç)v‘÷ø¹èäãÛ]ËE¸—sN³d|Ï‹¦eËÊd*B[%Þ&±ƒé6R´#l;5±I¿[„˜u¶sµ÷äoUÓoòlüQQú¬“Uèžh¶n2½Wa…TpS¤è-qS&WÑÞï¾…ë7Ü„ˆ.¢÷Ž«‹ â€&A,FµO“A)Æ:èd'‰OÊu}õ{›¯KF÷ð0©ÿ<¤t1ñjG]!Jþ^i,ñ¦‚`<ËÊ3Úth¤ fº2{=;Vú€¦ÕÈb/4$ÖðuŒöö§Ä÷C®*“7oþ@Eö„¬wö Ê‘›ýÐùszãüþ*K×Úƒ{ý‚@œ^réÑçïøJôo|&œ0d×wwJ%ݤ™r ¥½.Ôêé¿PLŒØÏ¨í+pëÞµ( ÒëòßœG ‚” ApÁ¼YÂIF„Ë7Ù…-¤f·ƒ™Y3°µlm§Éšà:"˜*Ž“î2¡‰fPÍèõ½Ž§›n㹦̀´Ÿ ýõÝò‹Ã×2˜Üb7 všæ0LútýéŠôKç¿ì /ñ^"°™bnåÜŒB»í¿×)QIü4¹,æ×¤›¾§Í Ø ÐZªkg¦¿Hnˆ&!k­“1P{ͽcÏ´Ô½iBÚjÇ/FË‹‚;ï[?–Ãð~¥]Q¹¥Éb-b¿ÕâYÎìýòÎöf0H8»×çÀüEKÁÌ¥ïcEà&ú ûù´á÷šý䆒ç‚APùÁJ»ìa#+HM>:) ]1Õ~‹v*éòSó‰ôSGÇc¤I(;¯ê°o.3‘}8'× Ì>ÄŽäŸÅ{âÀyh©—´C”*ÓÉY±ø½ñ(¢,ï>íÜ7¾peQ³Úavkòiõ›AË+ÍÅØ£|ý˜µT( >Å­˜Îé¾3pûÆ m<€#3©ìQ©µQŒ" ËÍš¯Òþ7 Õ:¿Êž/_F}!¥0SGßå=»â•NxZK䢲±v•1Ú³‡æaÍvÜ AÙ¿ÖuqܸP?$uõ #{«¼§h >¤$Ý­zñ’>.¶­âÚ_2®Nùž-×»ú+“>Í©ùpè.]£qV\A¿O„œ’ìÏÛ™Ûëûexm+¾CàpqŸ-›ÀX–r·¨¯í!@§h 3 Çêhê¼j7•O{_ ¾dÔÿªQ\HRŸœiÅÈuxœ» ï­Î#èW™1šh94jí›ÃÂú±B„CoÓQÕ— œØDçŸèqeÏÒ¶PNnÒ>G¬å` O­|ÊyÀ½g{[Ó}¿FcFP$@U|ˆƒç¶øÀJ‡!ÁåÖ•c~Ýï ;ñÖfÚÔ @˜ õàu»®ÿb9V4{ é;ËE&E¯²µÙMÆ.ù;øa=k\`(I>ÜX€ ‡cC 8¦¹ˆžØÄ'm¿¦ÍûõN•ÑÛp†$®T "0/ý¶ „ÞûbîŠg¦À/‰4J2^çGÌ\Ïê§_·“èBKíæ ÃÁ\e+”1¨K´q÷’émÂr.Á±-éâñ…ÜïI6=Ì÷ÖIybGH ]¯BÌHŽ‚à?Z¡9>-蟔ü?ð¿$ø@‰5y¿ÙµúŸ/© PÖÜ'WäIeÂ$¥¯±{¾úäŸÀk‘ú$£¯0ê’¬é¤Q0à¸Ã@zË_XÇOjPßXò0<ÉWkK€áüü xÅE¡ú‰Àð¶KÅ]NFhTùÇÝ×Kbm…Ñþ†œ]" )Ú ö†ôö‘ÍjüL/iÑ0 s¤ÒW>Ó­vR\e§LYaV#×·ôÅx*ýÓ£~*1÷銚„à wo¡®uèaæn¯”—¸¤žJ%æL"¡6µØ.c¾ŽÉ9æ¥RV]­‹ÂÃyC`¿}êi´Žµˆ\)1ÀýbWƒ‚ÃBa–ýÿJ:‘k62ùGa©)Î=ûÙ~§H©ˆï_~[ä.†KgQ}MÀ#õ×ÿX>—m2s†SR|–=²v*F;O—G«¨_ÖW×B~w9¡mý‹‘nÙKòp¡6}¿lbÞ©»`Gá1]y‹<´ cmÝz€áéCr—F\Çy%•_â#®«³ªçÝÞ ¶%'p¼O×kUò«PîˆN‹Qª(R6fý[¥9ÂwËL\Îæµ˜ÑÓcŸàØ3/ê=8øy9?zp×U O/_¦°‰X‚‡ýKõÁá¦éXŒtìü÷ Glþ¹V¯¤#ošxvw¤ä+_bûë®r™6Ój­›ßy”ÇgP&š^C‘>â¹fª³œfâ¯øðA]:}ÀIˆ‹æõ‹xÝSLÒ›~¸õ VŸ}&tE?[¬ ïì­M× A²†qZ§c¯s´pó`£fz¬ÃÑá Sì:¨ô‘ 4»"Ü’õòÅÁ³§\‰Ur!¬SŽ9F—U7‰Yùß [CHe’Q/˜^ÒÎÒz÷ÐJf8ÕÃ>üõb68‘ûÂYŸZ¿âzT<†DBAFõŸÆM¡V-Nû,ÓÄÁ'¬„\í·â~ˆ|O¬Ž9·&üá“;ÊnWµÐ«™,Óù5škàoåÔ®ÄüÖs»œÛÁãl…èÔP¸/¤éÑ`÷$Ï#šv*4î¥ øË»ÚýÝÏ|ª«kõ·#4A.ÃÈ&˜æã½JÞ„Óñ­}ǰ•¦qˆQc5EÖàjæ>Ͼàºü?Ï"¬¾íe)*‡3/,:¶bR± Œ r¡ãO&¼{–fœ¼ †2c’ï·ƒöåõgÉlðS-Ák)Ž|É*ò¾%j«E7úry¬Kø¢å¥Yú¼#€-Û „4*2h0êc>ó»}5ñQ§|¸À44*­ó…”yýåÕ9}¤X—ÅçgÙm»«b²×ÅÍ2PªtzÌWMé’>ŠQK´xŸ)¶gצ}¼–tåi´?¿[Ú±Pk‰»€z¸ErŸ .® rå°m>q Upú‰Ü°Åc>¨{xKI>Ñy…Ý·k‹M44íô~ÒWL¯¥Íɺ–$@TÚXŽŒeÔ-÷b¬ïm&|<¸ÕCç´¥`Ð[- V_ÉJ øJ3N¹z‘eÇíLsó,p·´DÉä—.ÄíL™ùònΩÔ>Ž‚NM0Q…ë‘ï:³¨ÌÛ¬ïdjWðb–ÐQð9«?ÏSa°,º1{gõÊPM@}â‰0cÅwö¶ÞJ–ãÒò®õX_ø+nç¾?Šƒ ^i·Ý]ö`«9ÏBI!GçÄÏ* Ž8ʶøêfü†9"cC›m±Á*!Äâ JGœ/ñZü£Š^­“Ä“b†¦ALÔ¹ÈhI²d7¦ .­Œ[þÓŠ»;i@”jÂØaëMÔ‹†¤’‘ÅÓóv;NcÛrëÞt« ÷å?¬‡WÆ“ÿä·[k».C¦0¤ñ>û‚m)øœû :9E«Õ#6ÓðEf`#‘)ø"¢kÇhh^)]]&ôÀÒÌs‡zFL: Ð ´c}t°à‹LS‹’ Db̹é,Žyè£/>šߋǨô°“Oê¿å8ühÆ9A ל›-‚jóÈQÅuÛ×Ï#º$|ÿœOÀqHTá1`Õ›Ôt ã|žÁ;7©ë#ÿ”E0ž_mÂ~¥:‰ËÌ Î”o´K¨0yI¶Êh“¶_d£,°¥^Žm#Z KBÿ·”ÆÙo#-;CåÅ©îäÔÎ g5º¿êÊM¶– [õ}·Nç†è‘¡¶*ó®}4H„Ù,…öß§9Њ™ìuI¾DÜ]H“˜&5!Ç@·á]¦¥©þ£hȆ¥¬+„tƒdÁ¯p™0G¿NI‹:f)•fYР;:8ãûã¬ôwÞŽˆ)ùìâšEvê`…7Zü~_ñåº"w]üÇæ“àãú·ÁöO-ZÇ »æž$—'7-!Yx ©¼M‰ÜÿÆãþìâHá× ´ð¿Yæ‘ë¢Aµè8Ã~ÖÎói(qdg.²K¼üxôÆ?'öϰŽÜÇõ-xÛYÞÂt¶Jí¢!’oL QeÍ&Ÿ&‰÷ƒÿ×ò€CÖt¼^Q J‘ósÈ…³« ”wüÖTèPxtÛçÁkAÊ0³]çÂ¥O&ÿ;ëÎã‡; ¯a¼Õ½ñ0±`ÞÅÖ4QE²Šnß$j‡¬Müv L¤Á© €€6t+«”ß±I •šîÓФŸÀ#—¨ó%Ä4—k"q÷åj0ä$$z凾_¨AßR¡, ÑlQ–7ã‚ tkè™’¶½/i Ñä(¦{T%¬œþ¿µ£,`Á#‘ZwuÍ+Y…¨›¦wù¦TƒŠà+d2^çAO[¨?,ƒÐ`Er;r°©Ÿdà*„° ^h-=ê­P_Õõ"™*M˜O ¡$¯jǶš V:S‹wY‰n®÷Ð߬ÇSÄ8nþâ¹hˆ†\^ñÒÇ2îöxO7Á9TÓ[ k¦¥x ƒ J¸{ ŒlÃh¶FV>ôÏ[ °Ùdµ0¯ÇíHÎÿlÎ}B–ÛùæáΙM4š>·H1bY&ž½ÇÐnÜÍN®HFÄo Æñ@ýW«mI嬱HC q¦÷f^Ђd†¹a¦m ûëJÆÚþ5"ù¦“Z1¸æÖ‹ÿ¢Ñ§ §Dœ_.WÚ J‰¼}ý²üFwŠ´ÎÒÝIqKÕ㋃$/M£Ua®<ÉÂW1Œ€N¼Çoq©É»® S'j§ÓV.Ë-ás¬ç.0­Õñž‰ÕªÕAxšÞ™ 5¡3lšsÚõ8¼ÚÍ·=#8µúR_Nä£Ôßì©­:áÉžÊfux½ªÄE~ãº5"+‰C«}¬>ñb¢ïÆÝü^¹jÄ0Ï+Ù ÍŸ> ”¹ú/ãZaùL¾$;ýÑ@ÌMË'‘Ä þ$v?-²vÃ=ÏÆÙk1@1,þ]@[ í€8ŽÞR~ü÷{k !ûš8Çì( II Ø(¸“•â–K^0Mò­ßb𦢧fˆŒÌßþ¡ëk–£YÙC4K¦®Ë{&¤üß%ØÄŽk° J0Dh7_ý^Â×?X}éa/¤{ÄV€—Ë()¹ì§!é{ç)ú¢Öã_ åâF{QßSäÊ]Ì™¦0kò K!ÌC/ÀUáØ«§& Dl€¾ÇPË»0gc#Ò ¬–ùïJ ²KÃET…vFö/¥A|«Þ>‡´Ao3—™D»pµ†ó˜„2EêjƒÝ¼œ-¬@¿:Á\þÞÖÕÜ^»Ãv3ûFî°{çª:Á9±ÀIÎ ZÃÕÓmÉšœ[‹¤žÃJ»˜€bí¯Jy·$Ô‘ûµv«"­žÖø–󀩛(m;Çf!uo*ÿ–$O/|.Ï~ÿ@Uý·$éGR@Ã÷à^²4aåÒù؈Röh[ðWÀð‰æÆ×‘½2&Afc“õ˜©ÜN;½}ÿÙ ÜÖ¢žŠ+]Œ2í}½M s/þ­›öo“¯zFÈÇ3QiA^u,ºÑc;<+¹>-¥×5luŠhŒ'©—½žÂmdõJ~bp‰ýâp±þêÍúSv¹ÌØId݃Ǻ†Ÿ”¶‚¦íû[&º”àÔkÙó][1ƒ±›«šø–¦bži[Ó-€I¨€IºÄØLbYcÚ½yã&x’ œ>³£ßÕ•ÓæÄf-0õ= ÎøçÍ€¾NqJ…÷Gʑš¨D¦÷¾31ªíâ=Ó>Õ@+` ¡Áä`Kó†iA5ÀµeMŽÛy ê²þûÖxd=õß/Ô1Hk·t7ˆÁxˆuHŽÙcwÿÿß4w6õ’ëS¡áƒýErS§¢¬£Ä~™+Ý„\ ¾g«ñ,{_\»³‰¥¼´ŸŒßËR‡¦nÔçpY6WÚ¯Q~Øhâuðæ­„‡¥À$oùØB›U|‚ùåoJ8­³YºÞÈÆ²*´\wÄcÔ=B@0ÅAVÏŽ€ñÛäL}Ž—¼ÜÃÉôÁëç*­ÇßzÇøU\kÔ› â=5‡ò–áåÏ×èqþŒÖGf1ªéù÷ÕDÚnv¸X8 ¢”ˆý³ÿjkð`Ú?ÞU:á!\Þb„l‰¸?(¤•¨õÑí›0¨K4Ç]£}LŸE>©¢=ß%7ÇèwÀý…µ`¡6©:4Å+T7Øßï‘á´ˆJ„`ÖØñê¹6 ùæ÷l[§™ð%5E PCí£©¬š‡¬› ÁŸª `‹peºuäX'@ñpQP9S Í„ÙìA:z2OÚ¢È:ewÚ(³¦6ãø‚–;-Úf)Ø“kÛGŸ«Öd„)µìÁ¼¼J—¼?ÿ„ÆH_ørÚ|#ªº‘‚Š€^©Èê1ÖŠ …³[Ÿ|N÷L°ýêñ½Ë¿ã \ÝYÖÌàˆ¦‘Jà†÷Þ süïPÍ11Dì”ò•ÚÀ,dæ7@?f‡ˆo;qóx€‡Ÿ'Ïôåݵ¡o ˜‹>„ס˜ l«%ú™Cåê·€mßJÒ7‡)ü'Œ¡ÄœÍD¢Å˜sBöFêO’ ŠiÓ) s w¤·t[Ë,]ä’Èñ¬GÖíÝ#P°"&Q¾@h¸‹“»MÕÓJž=W&T¡!Ø«m;G›fQ$ª3à_ nïNL°“œ¤·Oj×»5x¦qB"TU°õŸT2~𤡮m%c¨ŸÒ×*a¶Š}:æ·ôŒ3Pb'eaÄ Â‹·¥¤\7>5â-k(1ntR1\ë¹³–yøV´9ð ÛöãÞ·Me˜ ¶~•¶À-‹ìÄ>Òž›åKàìL46LCËg_3eÔJAçpeÈé‡ _Ô› â|ÈžÕL(Ù,DK»S>[áýåå€~MQ½ØxœÌ$ùÆ£µú¸êAÞ&´Qê„øYÂ|’1;ïI•°B;ý ™xã`ÞŠ†Ëóäöùˆû±ïˆØÄý‰ž«pª^ÊVõ>×fìôØá³ë ˨iø%§°Í LÆÜ°¿gX~1#a!Rž‚ èŠÐñez˜+þˆ4~ºÜÈ$XS'ù—ùu@g½ØÉ+H„̦£É§ºý€ÊÛÚJG(&yâÈiª˜Ü*ßõÖ«XP&PÞÂögáj]ŒçÏqXÀq^ ¨M- ãƒýTÈRóZy?ž|;ªG6»´_I бl>uxS€Ò—dfi篂@èÁ›{ƒ¤W«–ú1ÝI„‹|¶ôø°øºàùw´Úö$ôêy¡~8™æäQ™ÃºÃš†Z- ÍQ¼´Fþz ½'Öà™ŠuF5 ÎtA|ÄÿÝ´øLfM¬Q6æ å¿Ì®£Èý>ÈY³‚A±¹˜Ð݆Ó\-¶NU…\¥˜|`¢ ØtvÒ›g0S¦Ip4š%•Ôµh¼(‰¦Uÿ!äÝíf?×Üñ•’á®–”‘Z^vºç£1¸ =õ•Ûë݆˜vBÏFÅFcQ–¨U«+"”½ŸÔ.ˆÕ;éTt“ôDá® 1P—B©2&¢QßE¶ÎŠjd¶ÞªßFž´½dj똗8µÄ¤'/þé™ûÊÑɬm)Åû<¸J ¦0㫃֮¥_§ê¥ýqæ×$-õ00]ç~rñ]«+ðÕ !â t>çËÕü ëµgï`¼#wZCÆs$ã“uø¤Vá.fLjþ¬äí¢ònUù›õ(¼:×ôz´\¼m‘ã¸'­~ôûú¬©  Qy¿‘î´Á ‹¥Ì<˜1òhF-̉a»:Ê-I…‡YÅpÇèÕéÝb\W¼M>Ù8½]ývË E:và¹ê NåÜòék¢´"WkÞk´R‘a·Åb=P@Ý{Ç1µ‚ú’»„ûu#åÍDŒ‹IŸý€ÃªJÇÀ büC"ù.²¤ŠT` /… íܳøPƒIj`ä¸Õ#²*‚­Ç»€9©¨íyy¬ø/7Ê<=h«òVƒ–’gÒwlŠ[`ã|2K­`c½Kñÿz\«-p•ø=suÌ Iþ-"ß t\æí!ÜE¹¨Œ,†7†/]HÄsN2¿Ð[ú,é0€oÿ}¹e]%ÜæÇël‘C俆4Xçô&ºKºšßâ¬+ðmә˒± yl8Û5—'ÞÆìs|ÂúÌ¡T|÷~‚rAq=á”ÁÁF‹Éx !AÖÈN•fRö8§Nåö.%YÑw×GÖ@† 73–~Žw^‹ã€ å5¢ýØÍy+èT!Ƙ>L=ß xެîÌF­üSØ€P3c:Lâ*o Jpý†‹eu-cš=I v¶·D7m6ü=ƒÀ¶ƹšÝM˜­ˆªhëùûµÀ&2¡°˜$ü «;Œ–¢¨ðaOýÚBH€tj쌪-O‚ í,²"Þ/ebJWÂÎŶ²²_çrÕ™P ´ri"rƒ-„ ´Ç‰Äw„ pš³?¤Ýöm,ûÈæ3Dj¢í·ßó¨fc佘Ã[Þ¡ÙædjvÒU«ßŽ´xõ®z¶WÿÏSi®%Ë­ñ,_kî•"‡¶5 ”­ï©"1lÍó¯Zo*ý3°îäòi>œ†ðøu횟¾X (Þ4×ù› ¦—.e}ǬÊ.Éc°B‰ÅIõY>÷ðÿ÷ƒ ÷5&M,kƒŒÄt ÷gEõQT(0—<&ÍòË5¨Í5ÎÆeèNÎrŸZÌ0µ4Ië Ê5ŽW¤ƒBµA­ù¼Ìm©«‡$Ž€’­í[±§õW• ÜuÑ€rD§#å”Õá…}TÞ‡†açZBü/ª¤ +° ² ¯eÌÐ…‡²Ü.úLv¾É¾P§ÌcW§µÚ‰ÎRrØð #g™sýÞ “{¿Œ‰×0Hé*ûS‹ÀûÒÖ òïV[Ÿwn~2Hô¸°RP/´ ÅX@q<ë±®œMè^¦„[HsLz­~ËÄÔ]íû&ž¶– Zo"¡¡å(‡ŸÖômãàC`± NÇñvo: GÅß? ʆO˜knP³2ÜÂ1ôý”xÜÆša@N‰ž‰E±î46¢ˆ9m-z®Æ¸êÅ"'¢¬Ô¿ÿ<ÿË0u$Bœ©ðä÷´¡TçÑêv7Þ!†›;ºÎM™ÑßúIEÌͬ¦‘|itã/¨ŒÛ©ÿÞ7uVxD©Ú© Ú² ¢hoFÇRâ᳈ëP ¬DtÂC.„C[(mY|/uD`àÿóÃV©£³ˆ¯_gj3¥Þ`v@Ýé¿ÃÀàž_C“¢Ø÷@àÍÊkëÙôwu|EÙñ.ý˜ÙQBó$3ÃÚ·ßac-4#…ÿ&¥ÛmÐ{îä ó=ÙQ_6ÄØu …Ã"sšt HŠrOܰŠ2+EŸ8Ž!ê0vöú úL}e TfØLs?ì4ˆ .Y«h ‘3»B8ÉY+’dNo•bl7äø›ôZœ.¨Ò_ë6NM¡ƒ\Èb±ã@¶¤öÁ1‹`A¦ÐîÉ>Ÿ¯ÝǤóëý”©ZÓú?BކÉFu¾X¦‡¨«é‡^àý×ÕÀeñsáï`!çûN¨~5lÉ þ ±+$Ž[[›ìœ1zrHˆáP#>ùg/•ØA¶unºÞ0ó‚oÝö}/„³Ìg>Åb³Ïþ— lÐÝÅŽ¥[’·Ä:¡è|‰©ï ‘ߨªÆ á“V¬²©@¨pXF¯½ôË:÷‹:ñw:Í–x$ãσº@FÈY8Š»Xêëg ¥-±àæOkÒ1¶Ã½z'È›ÐÌï œàù|Ç9÷#$ ð  Ê]ˆ0~p!êãÝü“òŒíYë6i¶¿Ì2©5«dã<Õu9˜ÞóÝš;|9ÍTàêä,WœœšÊ”LèÛr W%’RïÕõƒï‹€íšÅ‡ ÔÆ†»óÆ—Þ¶m#ûÓŽé9Ô¡³@¹Tftcd×G28lÏn˜“ÉèŒ0—5µà”Ѱ¤h1EàRý•ôW{¹R‹Û ÏË8´K˜¶ÊͶVƒ¿-:×+D'}”P¿¦8 –4nŠ‹¨h-Mlý7XwÁqÕzÜK'ZÈò%^LdGEró¡2êàÙ·½7@é—¹Ù§ ¯ÚøÉ\}âË<Ø‚À!™[¬ŠÚ\ˆ(4y3+Å%9à'ð½‡è«­ªq°Ù¢£¦¶ÌH;Í•GC—‹›ÅG#úSáãOVe41®M'ÞªÁ¨OêÃ~}Ï ¡^IÉ!3È*f'M×àÛø…P[¾Y2õB¿ÂTSR*¨©O–yãZĤzJk· ;‘§.g8”;`#à·ÀÕ>àãˆ2oêPu†ŸÄõæM’`IÒKsÝ‘P%çí ­KÍÙ…|6î¾TÍðWL9†š\~ëÎÉñÏà‚„!€…œ‰]6²äTÅkö»¨U¶U\ÿú ± òžWžÄIAÒ¡ÍÕz}ฤxùƒ# F­x¸ö éœ5ã•41ƒF›˜Á=VŠ ¶Y”éÍ´BºNÀá¤ö ­Z,ƒ1HTV‚±AuôW:I ±úÙ¥h~¡Àí_å¤m7× L½Õ;Á¥TݨÏöÏבo ô  ÆÆ8'?¤žI$ñ©y–Ivކõû Œ¦l á±ì"º'%G¸â•ëk~Šëwž >U>íAé*Αý”àlXê$³rR 7+þ+íÒŸè¸Õ™p÷ê“VŽ‹fHçþì·]Wï ´¹^ä'^b›¬Ø¿Jƒ1ë2ütìÿÓ]ChK÷äÖêò¿Œi¨C¾ß{'Z÷nƒ/Û"x|š”â[pžèMpˆ¨<ů3æi#Å®†Âª%^’ÜlFz~´éq°nö¬í¤ûÆM!Â.,,&жx48ðÔo2̾š©±a8øIêN¯OåòI³à‡ã[Þ÷Ŭ›rÔ-ÌÒ%RèàUÒÕçoù€/òP2#yˆìbÄ}0&÷^(N‘º¦E*ÓüH ’×}Té̘ró× œB7±‘‰ ‡^HÓ"ýWQ1Ä"oˆå¼D†Ó‚Å€ëTÐï©–³Uaм•Ø”ëj‘±‘ k ‡ãµÉUDòúÕzGÐ’%Z sW8ÉDš{ÜýdšÆsÈÛöóñGßÉóÞ‘È#2)%¤pQ¨C†kC:ë\÷úΔÈiÑèåz¼·É+Ør¼y\!ªÈ!!P {<.ËýW È ãxê}•]Op†£0¹­¢Wæ ·¨°Ž˜kÙ¨K®úÞ.w¤®™ÃÊu»ýÈ ¼™S¹Mn‹ÁÜ™al ]že ƒƒ2V÷7¸¾’ð¿ ì!WÌa­øâ]š€2¸¯J_Õ¼40ˆß_I|x¶³×jEü쟅Ï&ä»D´°Ò4ÛGÕ}’ײƒ¼*uòßë«4í;ñw¨AxplDi¥lðÀ\ú× -¢õ0è<¦›¥R¤lãå̈‚Ð`þ÷\Àå|ÕìºöÍ[YpصÁ¡n§æ ß)bì¨/wkø©cæ®Ýb8ò üÝÑxÈœb¼†"uލZ¢«NÝDc Ûd·Ï´JœqµÍfГŒññj›møV¬ôµ¥)Äúd´PÜÜ¢ÓCL¨ÍÉN÷{zéN_tDÞ¥sÍ¥}Mˆ·ˆÒ³Î2l™¨ýö»G•'ð±ƒÕOD·KÌ"˜–©ÑdàÄ剭äËMJ?ý)ÿÒ[i„J›¬²€ƒY‚ù08ëxT7r\*êÁ×{êQ‹´mN’§Â³]Ĺà„RÅõ‡kíGd5z=šUƒJèõóŒGù˜ì] Ý¤š“Œ°Œ`ÏÃrKoÃ'e2¬©.‡Ë«ÉÈ®RÖÕu$»¡}–ÊÞþë¡gÌwø$†C]Fë-²ä©ûV©:ÿ‡}ù6²fè©æZнð®)%gnm0ý-7.¿G-êã­Þß* %Pa^ý z8ëpÉ{—Z–¶câö0–£¹øPË}ƒØÒf8Pé:˜º$9½ì·o‰ 7Á+ 9 ZB¸A¾áFˆD® W ËK³¨ˆ±i*}‡C¡TzþÛ#Ñ0õ¦­Œ¼É•VlŠüæHÇ ,ƒP÷NÄzŒõ~äGÖ¹¨¢"z??ÛÓôY[CvMq\ïüµB•˜D¥ 7_Þð‹ˆ!~;à¼-?€€\© úÞ‰Ed^,£têxqde–eå‰DhÖÔOö4(±kcÑHêâ¸ÀìÄ úë©”]úÁM"°4‹ÜÖÈø‘°àWì±™ZÉŽ»dŽ’7{Ûðß›%¢µ7â€çñþ~Ëï>8ÿß Cd€-¹­üè†:D—¹XþYf›êº…Ëiùmiì×+7n‘«Š7*ä·‰Ç(ìnž¶ÝfG”%Ãx6_»±aH'ŒÂtŠÝÄ]9 Œ=LˆÊPdžã.… zF ȱ;ÝÄ I¿úüK¾òƒÒµ±'¿Î¿¼<¬awÊIÀÒÜ?hêèàÎI!Ù°]ãÁ–óëYËŸ:àæ¤GF7Þím’®@ÐÙð©¾JAHÅ4«07À—d‘ÿ^·Šƒ´æf*º-L³#D/ôMsHñ}ˆí Í1kgfb£:&~eŠÏõm ’!›)O^½7 È™HÓW¨À0}ŠÿMmœ،/÷³ ýu@–úê»ô=£}ãÞjÖiÄ‚xŸ7’râZisY,mó:|ý÷pÉ1Y£´&%æígtÎXHbY¸´ Ö °K•ÿ;ºï³´}Ïð€°éÌtЃW(lŽ7õ}ŠÿùµÛùv+—M|z œ8v»tÿ3ý)…ñ_Öý !ëÅh8 Ç-;w¶¸_Cê<¢^–ƒÎ&|HŒe¥m`¿ãnÙ®¢"ÖÂ"Nž°ˆü¼Æ¦Ç+3ÊÉq?8øÛé%FÙõâ"£9¬ì‡ðÃÈ%Åà •õ~;=1ámÑ´t÷ú³x¢¥íȶ"’œõÈç<îò–!ê´ÕàÙyÐÀ ×sÍòw³ ÷Q<=#šÛ§Ï@©:„ ƒ=Dq@jË Ykâ»]ƒæ–F*« \º …ÒÔ©ie7^„E,­0‡ÊÔã/mÓ%-^ëµäž^æ hy@;Œ þdñ0@‹üBíâ7Ç—êì=eÇ[ê—⪧ýõå¹éÿÒß5ÖÿQO‹w°]‡S³yßS±Îkÿ$ìíˆËVJ4UÚ •#xb„¾P¶ç ÿ5ôÁeO¯Å3‹èµtyuØÍE0ÀíyR ÎЫïú/¹lyZP@킟Cqm³¤béIà°¯æLC)8–XlŸC+c–soÆAÏæê­?Þ7ÅWÝËú±4¦ß”À3Û\‰ÄQdþŠLz<©¥—S÷½d2vAîrój‚çî”)Ô •utFÉ0ã*ykÖ™¢äèÏv#™W¨ ‘l9\®r'¥ì\èbË?!JÝð“–d•q÷¥o8ÇdÌ_Æš"í¬4Í'‰âê¯äTgý]ÛJiþ ùZWÇ´ˆëš KHˆÿˆ©°ŒøK„>øÂ´s ô!«°Ùžÿ ™íœÂŠ´Íô¨Ù¿€ îÕy¥# YÔÿH9T{BC:xZÂütŽàa&·»ê 1í9ƒ:¨ëظiqÄTœ»xSø”Éýz‘«ÛHÿ²ŽW4@½©Ó$>#8y·-Èdzh‘Œœµ?4.‡ÊdšY|/l¥óöÏ}II‘®†­ó×=¢Ü9Æÿ‚$;Á!ôµïa3 ƒ.Ý«  ƒ%Êê“n‹ß-ôõ2U¸¿aU HÒ'´ÓE‡ÆÔäè¿_ôbjÝb¾3UÓ¦0ë;ÞÁÉ¿ç4ãvY¯¸õû¾|0ÿüØöœ)e¤ü‡.I©šúö|ïœ0£à‘ôI!naftsUîtìŸÙc‰)oÓ0mîNmà&A¡¢òÄ|"\0N0ï8]¶Áµ ÌÂÊ0ü®¡O8ËM-W¹ü)5‡·)"8¥‘=Oýõ·#§wRX¢E* œ‰Û/-üÖ2ËèEÜwµÃ\Š ö>êÄÅö‡eJY~VÆ” Šcòt² M‘i ÈDNìË cåQNW¬ŒÑZªjwg?É €k¼ìö*Øý)U›…Òû®µaן«ô—èYJÄwu`!Y.±#·m8±9ÂÅ- ÁÛäÛCé°öÛ3›ó†ÇEí…“¼ê\úƙÖ¹`´–j«=¾²ãøs$¤aC¢„ôÐëþY1ȶž·ØlrÎEi ‚ñãÂ%LRù²p#éx¤ôOî;ª× o}I£òC?£@#‘Þê †Pò0ð–’%9ºÿ ¹ò§7Úî,q8V)*ŒpÖ\ŸžBç+Ó¶?£ãàæQ®£¯„7ãÿHs…އ(ϺÕzñ2Þ);Ý“€rÙa¹lû6×Á¿ÒÂÌÙ@^L€N01šúŽÅPŽÖÛ—4”­f/|;¦k6 ïÞO rOR¾Yb‹ã|?O*(•‹¬‚MºÁCñdE¸<Ø­ôíikå1æBŒWÄ¢X‹ÖwZÍô匾FÏAîŠešÊJ<4Zb‰"äf}’ð]ݻډM0)Ï;)¥jQ+]øm‹'K=ø¸U+Åzt¸o¿Záž³8fQÑÆÏ'1îº}BP^óð=5 X¡³àA³¼ðÈEü©šÆv«&g ÛÙ\ÚÑ%кÖ>9´/ŠUe‹é ;…6²#û—Øp¼ìy…æ7ë„À!ðÆœcé{b'&• @7¤C÷Og¯Ð\iH‘0¨2ˆ¬ íòXÂïì ×u­Ïó¢Þ‡Z~Ç{ð@’%v陥aau6–,¿þ¯² %™À³´I8¼ÈEùæRÓ¸g+à€1„|øýþ/¿ïRöPŠ`Ú­}áåtÚÖÄ—mÌq€×°¡ç5aC™Hž]†Ë–ªÊÊPLø^ÞKxy»  OJŸ"M4~O¿àRÉ´¬¬LGåi3¼’™É{?¤o‹Hê,ôÞ‰±J©„m´ØïÆûÚû¥Y+_oƒ³+Ax²ÿôš¼ïÊXi„F>L¢´ ­HøbÅÛ¦OˆÚ¢E*¦|à,=ÅÜË?sà±á–Èg7@UxÛ­aËœ¡h%)÷«{+nz!7éÜ|ü>mΕûŸˆºëVÿVp*)[ïø§þæC%œ™Ë˜"æZùD6ïcÃ…Ñ?`ïfgÏ“›SC™©¢Z |C"dInõÿ¢—>ƒ¶,~ßÜZühiÔ|óüAË–Üϧƒ Þ;~$7»™ãÉž{‡m³8N"—œ6È_ÂGçCÉO¢p!r»°É=ÙÙÒ‚-ãÌ*Ž Ø¸NÆú~e©¡<"Õßœß ö1è%¦W”£Ì¡ÎG-5K¤3•¨^ÙAËú⦉hkäâÕe¡^KÊJ ôõâÇG½0{ÎϺ¥ÙX¸ª ZÕ5‰¸àõ4Õ–ùà# ƒŽe]¦¤c£be•“á˜s§nåx­»ê³õ6ºÜ „µKh7ePlùbK6ÌHGþƒÀXõ¯ÊÁIîš›àmÏvêaÊCeHoW\¹ž÷kÔw|.ˆ²t¼ rñN¬0øÓtJ7¸æYi›2ÿ“Ö­´ÄiŽ90›éÐî ùL¼]aü|Yr%ž²H)Tó¨h)³3Ñ*  „TcZB%nˆ‚O%lcRsáV ãèÕÇú(hé\¸ “ ÿÐowT?ƒðKgè)?Ó)ÄCŒ†2F ±ÃëâàÁöË=5¤šÈ‘u'4oaÍRMK£ «u]u¬»a¾sI–;÷~÷?i-óKä)Ž’Â]ÔCk„…¡’‰SûÜŒ6ÐÏ¡bõ"¢/'4›ÕØæøt¨øÕâÀµƒw XxÆã( è¡ ;1ï‡Ê]FeYšUãõ£²¶bï<½ ežû™‡<ÒYÚf ²mµ[ƒ¨Û-[Z¥îWsfݨØæ 0ΰp1rý0QÜmd-›2Npü!õÈ3UïòÑüHç^³$ƃvï‹Êü÷¯m¼þèv—ÿ3ý¤–)Ê ý`2õR¿öÑ w,b §PÊïv¯ ·Ø(½IN!)ò©ÝצÕ0ºm6¿ÛúGý_½ö™û¶b›óëØiÙ¸nXmŠ ÒM[‰/oœI® +E˜®çìCÕTBŠÍ”U`v"L°‚ìaô½j“J{,yà63_% Àæ#fná„R•)ðt%h‚Öx:4(îá5žsˆælV¬#Ú»DbƒäPßÄ¶Í —û¢8ÿ`„B­´Û+ÖmI­áû“¤’¶ˆ†iwý'· ¤XÁrCm0‘Õ:êˆÁ¹Ÿá§=•…3ë—ìõ€ÒÊÏpé¿ ù%<€€N„2}/uu99@½«¦F Ìãp³ãhÛÔ ëBšÓ¯¿rûÇF}Ep¢ïU”=‡‘®³“‡R"—qFmúšÉ!™¾v¦ÂOñù»ÍÍ¥'Ô“lðõ&WnSN¹b¬JŦº?vJµ²}";´ö­ÅŸYZuI›< b/£Àœ)V)2 AÿqÅiG !RL¤eÈ_«‚>IÚà*‹“6öÅ£ó­}üƒŽócÚxÌŽ©„]µ4óª-Ñðrô–@_‡õg¿oòp¥æEI9PŒ!ð¤3©n(σð¦´äÆ­Ž"{›xµñHв<Û-½KÐÜæ-)ÞOB•&$)¹È½öüÑø/¡_+ç~MHÓînYH"/Þ¹ÿÈþJpSÖuÆÙ×áµDSè¿ÎMCÂNŠçªÃn&v¤³Z IÏR½>,Ú±&2:§¨•C;FƒŠiË“ð˜¬6rú]¥ýÝY.½=?ðѧQëÈTD+þÁXÄóƒ)gKO¤Bif4jɳ0kÒÜV¦¯S7ºJ÷wÆô˜‰£Š.²P8ÉâºÛzv3‚Д:¯—Km„`+#Ð6€ñ‚àÔ ¯/ ~» x ¼TÖ-püÿåµ¢BÏÓ3Õ®Ûåf¬ÃuTûˆÈü£?ÄÆNp¯‡„Ù=[58¿Ï)ÐÒݹ'MB¹d÷7Ó‡ÔohM'£<ñ¶iüF«NøœA?„i. øŽM‚®fý-´ë^r®¡o†ðžA-7ÓBóB⸖ù AKª6‡ñëñ*¿¨•§ÕB—j†o2õÌc[Ÿ=îÛà«xbîÍêŽÀuaØñ½û<«Néß‘©x(¬ÉéLĉVuG?ã´ðÂÙ6¥88 £j¶PÜË¡­à¸˜”„?'?aHO´FÑÈ÷1§ƒ>ÛùûÜñ&=˜tå÷fˆÙâQA…nÓÝݪbù5!1OCàL}n,=úÒŸˆ‰PfÜ›·eVE˜û"iŠZ ®yO%Îr3·MAªø ¥¥§á¼k.×Äâ´Ñ‘#iáô&½w»QÛ)oX‹1Ëi‘-ª-bFß=-³hnØÔ¾×YUàÑ\sÒYÌVùv;墛ð† `”³hb3wå½5±24z=Ÿwäc›ò.Ø[_ÄaÙ'Ôi8Å@bgÐ}üþ™Da0 U>H•€à°z#KHMÖß ¾Û }±kÿ‚—ŠhœˆÝ\šÅ“ýÐ^™åWîF«ñæ:Qèo³‚uëdƒ¢çÐ>”_²‚ ÂÿM.xUµ&ÀåDOËEžå*I+Z§(¼Ëúy_„I‡)~f«œxþ!©.]쪿EªÏ­¦ö]õâör$×¾%…î†Ó^Qk±¯qÕ©$tÓÎ*Vée(V‰6ò=æ”EÔñºXü0H³À,[v•Å.Èê\øÊ¤o– SyCwÏÃå µ½{Š3!Çøc™4]]\ƒ¸3ÙÚEì~ûg)õA°ÑÏ+sÞº€ÊÃE ^+¢ñÅ9ÞW—L ªupjüå2?enùb° |Þú“|V*ž¢]&^x‚wWç^œëO‹÷¡ÑßÚð‡,{ꊽcòÊ`ÇI€ÁõA©T÷Ëùb'FÕçÿÒñ*ŸÐ !¥‚ð™0\º{µMi]Ê÷ {qI=æDÛ£ÐâDu€ãÀg%U _”³û.?ŠÉ?j›àö¼‹’ÛIìh³£W ðÅ+i¢ È +‚ºÈß#6´oü§Á)3+Ûïs`/ÿköß x» ïÚòù.ÆHLðò/+1˜;U>¾Õ[æR[U•3ÓÁº›ºoñ_ðcÉè$s=ùv²‚'ð\'1EAfÒúì²Ä&ZòïeÜSº\Ó¤6pEß´†sE‹roÁPåL%dSÖ~cX©œ”Ê@ÿMmÀã&ŒÞ|‹áÏ#û ÷zœÂñ#å:œûúÀÇ~{îÒêÆÕkÉÛ'‹º¹î~5E ç¹ÂÝÚ éMÍ|A$ø÷îïÜc*¥±{è$ë_²ÜÀþmÕúl#Üí¡£Ä| Iˆ3NÙøÊ¯qÕto8—TfƒP›TKÉ„æñ{Fú$¤P²z–g£øC¡éÇΰe:%·®”öɨ/Í‘ÍE§‰ô}]輋#ÑÑ´FÄ¥=ͺÀ!PPPs¨WÛ|5“o¦Ö'!LÎl{×M>ÓX ÁpéGâ†[º³†î¯âÿüÑê‹^3xÔ¸O# oÊo kÔv…¥xÄàÚ!|”׋Y#¼âÂ[#÷ÌÉÔN_<@hò"tƒýá=â¢ZÍo9ºn‰ÜB¸Ü±œ%Ÿ\Ù®hÚ©šXºƒ#2°àØ*㉉Wí (+÷Å!ŽƒºpìòÉoƒ¼Õ±[E$^Aì ¬Å ^MA(ÛÁ7(åahÞ»HÃ,W,#› iMjµ?¯GQÃg—-:LƒNmnZAР®–Üÿ]]"xe®âÛö@šÉeŸj†Mïuß½*³a¤Ê¶©úÙ®Ù0!Ï0âÉs¨ÌhèÇÆ‹qM¸$ª™³õç•b©C{.ܯ_jgTH‰% ‰7Þ ÜÐŽvØ]Ø‚NGêìÝM¡© Œt½W€Ì”{¦Ë€œÄ;^á >´­Ä 0ðç6–‰üæW÷Eží¡[õ·ÓÃ*³ÔmþÂ^'æ/0éPxzâ‘a¹ã ä>lGRNib£x`“ÌW$ׇÎ<ܤâî•û+úÈå&(·0ö>>vqž 5wŽDUµô0/D‘äJb;ÚÚKMs"ß¶Õ'1q?Yv;0î÷¼‘ˆ…{ÍJ½¼ @c<š‚ëpœ<¾Jê›@Btf'(áV›ór<ÿ¨Ñlm{.!؇”Ž„»Ü-š‹nÞ–Dã³i/Bœk|‰,Ãm-Š®4MldcÉI”BG ù”¤ï”9¿ÏOtÒ)wX ‡¢MoòBÿýY™vçPZNñ§.w³˜o›K´“Ì–H‰cû|)ö]<®Ð}*ØaClŸ"„´By<Âfæ30•¹\1‰B¾¹rù,]yE5â!2ÂÆŒ';à™àÃý:U* À<. \(í°?…ˆ)#œ¨?ÊŠµ»em#HüǦ5Aç Î/°Å˜©9¦t´¼“©SÈ9[ݾ˜`OþE øDp¢e„â̶{!’Pg€‡¿@8Rd]9kØá3ïF¦{wŸx³xNØf{²Ø<¹f z½#;èIòÔ‘´ÿ鶈šSëÜ. KJ—È\0ö~RxíÉàé[ëåº ª‚½—Ÿ„Êm¬ÿÿSøˆÎ)Åò™#‡H3Â6´ƒ`˰•æ?KèvêÁÍ›Ü;IXƒÝ¢•®ËºÅH £9†¾”¹íK!7e(èq•‹ššfœüø´}£CÕûqÈ Wúµd¡BÌÊ`Ý®3 8â¯Xc&ºE-KäÃ;­î¢Âû¾Vd…®P ’¶®| nê*ömíkîFC‰I˜ìµ…[Iþ‡V` ©ÂG‘0›:­jJÖzCˆnGúÃ,}ÈÞ¡ ‡Rj¡H"qžýy‘;3t¯d”ò Ž¢ý-RŒ:1øOÎÂåÿlØÌ"Z>qåM¨\\ë?.'“σd{Ç™ ü¨ðu…¨'úf´]eS°…ßFÒ!h7WÄw*#hfAŽ€ìEÇ uß%jR$Ybêÿ]˜bÖbé{Cl§`ü˜dc±Ž ÌŠás M1ÈQn׿iÃòéç?‘þ hf•˜óö‹kδ§™ï§Àô´ý岊P?,Æ@^|¢ÇyF-›s­Ž¯T3&lf°N%»,Ä =õúXÚä§Ùd„”{rFhcè0ø½›A¸|NlüÁÎ÷è[ÉëË)sxçàï7õ'lâ‡ØjÏ+ŽMrLtÔw»Šñ1ž¿;Ƨ[ó cí¶u¢4hÉ1B¢K+T™žü¡þ+ƒs¡¸¤ ÚÛ笵9³ï/®ßùKuûYßa-Ôâëü¥² ÑémM&ƒf! ZwD×´´…XO勎ÓjäXø á¸%;¬»TIk4®ì|¦CÌ@_÷-éT–s_Æ·Ó×0mfÒ²LL­0“FÜï!*Sã$<–C–-S${ØþE³Ç[Ä·xºQèú¶Ã3ECbeöÒ5c–á/QC)7Hƒª†çàŸ©T'É\*®ni‡Gù¼S›s¥¶3®C™Üâü§P¥T!é¢^‡-HÛð‘±ÆTtý·çˆ­ôhðäŸWªPÃHj“¾~ü“õ/G€/J`Ÿl£tûÌá/uQ‡ŠUƒ\Aì×·Ý_*ïê"Ó;Sª÷°ìuW:ÕÓþº€y#—½Úôj޶ {€ó´EXó§,8Úf³ÚÊ-US†ª'ÕÐ`ϸé$•kÙC’Iz¤wÈ'&P'fÑZc¸` 4ƒ2g½»N·Þ⺧¶›ëköåña ÷ò¾ÁïAÙòÀ°çõÕ%œaxGf‰êTÚnÜÓJ‹'_2ÈýÒ$‚ xÍ fk/ܰbêÓ÷â›h¯!I¸÷Xn>Ø–v¼Ä÷{²mBf“ VÁg’°µ;œfvcW<ˆB8¿Ml\«èÉ!̶¢ÅϨcÅ:©©èeë·¶ êýî½®( kë¬;ª"­Ås.¢r¿ŠÖ`#®*±­Ã3ø`”MûÀx ‰¶:m±aqþüãb?{m¨NÐH'µáwµ·é,˾CæÝj"V[OàÖ8£ _q‰YÊ_’1‘c…3×Ü'º¥­¢—²ô îµ qÝ3‰ª„He¥iˆdXmW¢Gµ$I|§|ñ%–+~½ÊtÃ÷w¤4> øJ+›–Kê99µ¿û¶7 w-»õ¹ïB-G÷”èxðQÜqë¹.è÷|6Õ{´È‚¨Á«ÿºLÂ÷´ò×ÓÍ”§ì0žË8ª²¼ïÓ¬ôx$ÖH«W™þ¬ŽóššjÜÍb1…Š)ºöBþȈ“QU¿„›l0`X4¶…Ã.ìh%ˆsýuŒ{I*p4 ye¢ªï c{)ÈâRSÌ»æF­"ˆ¼_Á…pIƒA"F6vüÞý˜Qûaû/†ÒlNo8#™É›‘S¸Ï“EC'…,ãïôüq ýÓg)°íÀšÜ5Ï¢x½öôÚá¯Ó(Ž<¤šÂ/èaŒßj$u¥#ó€¾ø9Ê þJíƒð)ý’`'â9ð«Í“$jjŸšA,ÙìK†f+j†P)wê+­hÔX¹’t±×¹…"_Ð'Ä–©pòÞ¾ ·¿œ°Ý¬àøIé«¢-Wã(·#„Gãá ë—§`wºu/¡Ùp.r%š‡9xR¯i*Þ–´[…-EÕ-œæ}Ρ0ÎZÒÊt6y&ÓŒŒî¿ÖƒZ¨(˜ùp°zßEöñI¹FM¬òzÙ¢³£ ¡V¬ 5>ººçtˆù«î±8N/ãxâxŹ$c¶«|y­3îéTá0gjý±ÀHÄÕÂ#ôìþɪ5áðnÊ0Ðbƒ¾=ù)Ü·V1ûïÖñox²½€“;$E‰SÍ™ã,ÅsÕzÃ’ŠŽÊj/·Ç¤K®úò,ÿÌŒ–Æ?6÷éÕ†]jÖÒ 5)&ª²H£Ö/9&ÉQœ7ú³JN ¼‰/ç™ý°Î”°ðg¤)w˜•¹ 3rbï$Óà3² ë97m4 A>1ág$KiÕT/ÖTˆ6©+îß?Ó\»Ð®¡±7­ªÊ«-¥×²\‚63×ÞUÏ»š%Â' УùüâÙ=Z³©Î³³¨Ê€“¬®â§£óÀ=ÀŽñ\ØYmÑBh ·ïB%3ÏH®ü~€ÑV.Ÿ’Õ7[ÙøßYVãßeßÕ3¨VÕ”^üô›OcxCEìy9ç“޼wœQ%e¿(¿±¨îÄçÍ(Ì‹RyºŽª¿œ§Ì´ù…³f,MbñhO>ŠpWÐÜÉÄ\Äö~ï cç×ÿykÂúwrÊ÷`Â!ÍjËýþ’€ˆ99¡¡—S¹üñšd”3VÁéY(ß5õo/Ÿ±±&qu¡ä¾_¶˜]nÇÿ"Vá°2N4 ï3÷Ǫò K½¶ãeBŽŒ2ð˜»å2ˆ  Ü¥«n¯ƒ”"å“ÊSÇýªû…zíRx¦(Á)°s@ =?ôu²lžÊV‚"ÂÜ­ÕøãMø DtÜ|GØZ°7LkÉCOS‚®›Èd¾šmƒC= ª ˆÙ+ëˆ#â:³Q102zñàh6sîâöÇ!girã[­r°Ô I{{4¬Æ#º7Ô8öx+ž2La]å˜EޤəN’Wcîè‡pl[8â«ý¿ÂûëtײEjT?¹ó×}‘ 4–¼ÏØf.ˆÅYçuÍXu¢å»+»VO'Póˆ³3Šh¾#U󪃻–i†*‘Eañ{Ie_PÙLÓìhËÀA0ÈTDÆÞ]ãÐË 2¯\ËðŒKºr ŠÕgHoqæC“kÞTJ›Í‚ ù!°à¬zãÝ÷Eú;ËøþãK°QÒ&é$Ì-Á$ÛÔ ´b×iåSne«ZUÂ_éÿÖ»ñ%_Kä͹i]ÏÖW•Rú¡S1!TÕÉu칤¸ËôA˜A<‘E¼Æ‘GÈ-To›ÖNNkl}¶dœó[æ<2¦¹_x_às ²Ö°NFÎ’œµ ×››ÖŒA{Xó3,ê4–û^wô#«Fsvƒš^ØE#g{®NñÆ¿n0CaÜGhÃþH Š®ñûžN€Ÿ:ë,>úÉ—‚œUB‰Ì&0+Ý—žìâ.Y–ÄÍ´‹yei¬&!Ì+9¨ú-•'§ ÙxÓó9ÔÃ) ~r¯0mZƒá„÷fÁHQF‘éÝT1輘„âfÌÏé+ÎðÉeŽBëÖQ´·öä_ã[>«ÆÉ'è“kò§BS„wûÞ3½Ä?ãr4Úšnà“îÂAݶƑE©V¿à…4ŸäŠÈùC´:4…˜zS™ÔÐavjoâbIÌ“LmÚ$Õoßü<ñ”ó0.±ý9sj–ˆfKó ýc¯¤•ÕÉLéM®Uö¢ph«:ž‡ö¹öxí‡g_ §mÚvѽ ï`ºe÷…» ò[#ª ÈíŸ+œŠÙ÷-ê°ÃÙ×9äÔ®+ \ üš6ç´gÑêˆî9ý–&ó(è³Xópßô‹6%ÕiØY ‘Ä£¯‰ß²ü‰«[©jò{‰ò±ÿotÙ…]ÉÛ›»ó‚TnA4ðî¦-ñlÌÿÆ),p¦ß\Å5ý„Pß»NjW*ÚÑŸ`îWAt¬%IºíS…Ñirš…°¶™½`¡¹¤nžÐÊãb¡”¯áá¿Ø©¸ÚwI NJ\U>‰B7½ÕE润ӋOÏ|x:cÉIç_âØîüžá²l‘Ë*ÆPŸOÖ(÷QIH9Ђşñ¦ïé!¨ó’9|ù—ÜÓ/.5ø€¤X€»`áR¶¡¹"ÚkãŒñÚ„i1¦·E/ù<5U†§û¢ÐC¢ôOÆÁà ÆÁú”›&~ôsu¥ô Ú(þÏÞWÄŸq¿V+^“”ªÙïI}¯à8ßÛ–Ô<–ϵ4‰ÁÊ÷-„Yh˜ò}H½…ê¶«¤ Öìàµ\®š˜â~÷äFpÉ I7?ó$ÐtMiºi|©)w`L ýfœ”­@M—•ØØË<2]°WIª)Ä¡~óµIžli÷©¿>´±¹á¼ ¿;R"xè3”éð¨†\—’š?2VÒ ׇÑnª%Ø¥/©'®fŠQß(œW.î•2+?©Eù#éHµV'çâI‚õÙÜ#Ñâ<ƒÜPLíó°ÕÏ&fÈïÊOÐÏ E.-NƒËaŽ<ÚP—ÝO´m¿ö»… èÒr&ˆ±NpA¤¬ 'Bõ>à˜Ò@/æÁüÞªÒ$õ˜ñî~Ô_q)ýÌ` ›e‰sõä1áEª­ÚzºYÈÊ6nåšòù'ÝÐ÷çÏ7äd;)ýCΉ°gC/í’´ê5toO“|Æ…s6-…v}ýºp…„‹"Ÿq¡n»Ù·¢¿;ÏS‹î0£‰Ìku»ºÍUÕrg‰šÚ´ÀÙ¾´Ã‘;îaÙÄÁ‘³Áh¡…œi@[äã:Œ2ЏJŠ 3×­ò‚`üPÁ—BgÈ‹¼êÆÙU)£—bmQõŒ)€J‘`R‹ÒãI<ü îBgÝw9"ŠÛ¬@ò«dÖ0ZCâ¹+«9ÝuG¦¯[S.]ìÒa²ŒÚR-ˆæCëÌHZÊýMvÉüej»ôÈ«'σê`ÄÂä¤}ë¾z˜*§ªÎ´üÍM£ÏWFU&Ysjçš0L}±w.*„,‡Æ¨Zä­v´:/|Î º97GhPC "SÑÚ3ë²>Áâö>M Gdk+ÕK.…-þ åìÄÎY²“g²µzî%Èv•ñ`/ЬÌâ8ã)‹…º½-òtLêün ö¾r»ãÌë=»DVú´îó< Oz ÖD#U«ïŸ=,Œ'nË`œ«Q==NQò67ÎMÜz­‹šC?YŠJ&üÙ4\ R8O_å±/}JDf>ˆã–aÕ’+µAâ[ÆÝ[…¿>³Ê2êY9Ë :S@mâ™9åyyyä°@šcÕå¼Wre²B­¸6~h‡ÎÄ‘‰9HŒ¡¢ÿr«ÐëjÆÚ¿Ä#\÷<‹ú·¿ÐRGÿŠùX{¯Bí¿­\Üöó¯h2ø/¦‰h”ãû¢OîÝ™‡{¡™³UpºIñºIQe€Ú@3ì<«:ÖµŸ73 }„9×6’._·âX“³0d¼»{o C-έ•b ¹Æk1°„¯“ÍÊáþƒà2œý…_YŒXßiëN±—®ºRXî”Õ¥ b•°`á\0v“åžek ['ʹjâ°Â´C±Íjm…ªýÞ6T!ÊS”¿Lݲàù$Zrý?vMP²PÚ0Z¾˜Ñ–û Èx‡‹Ëðª[e¹}Á:—prSiY»w õ·A£ŸIyé\×eârIÍú‡’NæE"ÒÊ®[Fûâ/7â²HHà¼=ØùM¡ôÉ!nƒ"ÆYêœmw†*6Ý|ǵ2Ú<û¿7#özûоoV=³¥piô4;Zù[ë°%Óˆ‚*G÷'UNüë'ãa`ÆCbR × UNþï:l4Š'Ðô?±&³¸?WJ’YÅùߢÅ]åz$YW(êã„(å¨ÞfyÿùKM=»ñ$êæyå±@INtMX(8hè~6w×%ŒjìQ Tß/Jc.M/Øùî4Þåê±C6UÙe­p÷§.Ç -ý b‹s5W§ãýpÒN øSÛ¶ª1+’n#±‹oªñg²ÄT4˜^n¼Ò÷PriˆËy" GpÊ„¤e¢¢¶sàÉåÓ„iÛJ…}[—<å)ÒýÕ‘-¼µjÿ±l‰bIÕR§=_ñIÉÀ(AA/ÒáÕ]ßn§eï©÷ìaëÝg¯ðé¬TÜÉñçf6òñÐ>`6رe 2¡ËÔvâFÈ%ˆŒ<€¸/_àk\Ô®J2®·c÷0©Á¼lžÌb éí+È“Ð6ýˆ“|v¼ ãµ]¿öÕôD¹‡Ýˈ7&N¯Áà"¶)dê-׸¼çXU=NcYɽwo”ç¤2ümò*ù6½q‹õwâýj¬3 X¦ï'?Vhñgm¼˜*à÷LY*£TìX¥˜n@φ¯õ{})•-9Ú§!=£xúà×.ê~£’¬”àGé×ni”^íÒó&–Í69Xn@­§¨]â¤Áf©þEhc"VÅ >zºzžéFj¾|&©à À‡p ´vì~âÒ·R(¨}ÿÓPÑX…Qi½Y:»% Øs—ßÇBð»W £ \š €œrF%&‰(“§¿x“mæª.“T­Pͨrá¿ßzy>«ÇéaiÓÐiº*WÐ¥»i·RrèÃüµ6BK^Úÿ\gĪ´Ê(él)úò×’¶½h*è)`ÌÓðÜ ©?šÅ&(M&p„šqWŽÊ¯ º2±¥ðrx„ûe(š`còÿÍdÚ`±^8÷M£ä›Iàå+-Ó³ÛE{šr^)®-ìSmyXRÛ©LE•­#ªâ~—Ë÷µÆGÄ÷ÃäXmj‚ÞÞ:ò((ã€ØáéÍû=½C 'aÉ7‘ÏGâ JÓë •?ÊJxTË"½‘}Ã¥ãÞÏ"Ô\ ÔO>¹íóP ˜iŸl-÷Dô¯ì<ä±te™*ž"N›¹{Áy·- ñÐNÙ9oÙݢϖ[òžÅ#+Sðt&‰'«O ­ãg7}‹6g²å­l-$Xg€Wtï1,ÖÀôl½ðcûå'ó‹JF‰müƒÎÈnî-dïKrÖ¡àS@ñÉ-„ïmÈ‘ši“Ão1ݶDØØÄab‰ÿz‡¶4z,uÚDŘSYVÊEÞŽ~Ó½cûšÜÏ(ù)¦Ï-2ÉyJѺ¹ð÷ú;có7ZЈ£IhÒµcaÚ‹á¿.-#y¡†Fš”»dä„/’öO ‰ƒê _Íü T]nNãè}êbG©E‚U-DÖYrÈoóð{–›æëçNTƒCî>J¨É1D–ìèb¼¶ãh9<›ol«‘`ÊIU-Ûø-½€ˆlÌ|“²q?IÇ›j»ý~„(½JÃçÉÑo®Ï&Nˆ2Âý»öÍTÃâ ™2¡üPúoâ­kÐ<­&ÁŸÝHù }Øq»£·m¯Š4¯}Êû„JÒöä{ÔUÎÎEn2l9YÁZê<•1wM´ÉL+dÍíëÕ[Už…X[¥uýÂǬ!rÊ“ ¬ŽG±4¾ØÚPú‰ÙµX_ƒßþqZ ÐÒ•"ÀnüɨÐÖ²¹ä*1îÿîY-§{±5S¬ÎO×°í}’ÇðP_Æà3ŸH–—«òñ¾øâˆˆWé3¤‰4°ÿ£§À6áµh¹‡ŒøÅN!5¢ÔÚ¹gêPË,T+d]qÃõžXˆûžKbÙ w ržŠWàVì†ñ-èç?œˆñ.ü3 cA­>Õ鄇ѣ,Ôorœ¬šf=| à¢ðAâr½ÛÐçõKˆ=>ðø" ·*ÇBɲ…/?©DdüÖ°¶zk>Π_`!þ7b2,ó<âX›ÑÇ„°Ù6ë-ötÏ Á8óv¦wÔGQÒÅVÁ¦=…ð%£5ÍÓ®P¼¬†žÅo\uÂ/!n*Òà%~­Â[çË-w$åßA¤ö­$È«ò*¹Í–øÜ…í@b¤Ôð¿Â2¢4^EÏŠ)—¼òL/5›RMÄ^àôÕ¦wCwÓ˜{¢ãeÀ7Ëk‡€¼QÐ÷çF ÜçöW\SÞ·ÂtæzË5wòf¤áÜnd9s ébn(fã| ß ÙH]Q.ˆGäùCÙ•“¿ÚË%àOsðtÔ¢nQlH…øéÿs€ƒ”ÛÐïTf"O]pÉ1܃LϹ§—»ä7²ßó$Þlü 1„‘a::Q‡|Ž~~•€ÚΉ“ðì@ žŸRÍëŽ|ññ{%u©)·„+­©ü޹ŽVw¸k¨ÇnLës.(ê{þG1‘çä“!Õ£ÜV SìÆn¸œçgm”÷tî£H¾?–zu€~&¥6˜üßÝ‹ï”ícÀ’x÷+¤Q«§Çôkïg6¤CÙBd60µç‘.ª4>Aß±¼ó§k9‘» ŠX*8…Ã.ðNiÒ•Mâ\.®š2C©8@~÷´Þ½Ä y(—‡ý*qeN•âË·xÚîÅꘘ`^šæË˜\ëØºE'R\ÖϘ¥ÕŒ½ˆQt‘0ZUDqKÆ Ò$g»þ}xW˜6»¹Ö:Ó®µ»*°¦ž¨¶#ÓóÀ“×Ýó_‘²0´_jÆ ½Í'¶ƒ,²/×I Îô4$Ãqý& hCeKûô$B™âùqޤ=$‰:XÖo0Æþ 9FX uõÜ} ¬úû+¤ÅSŒª:ŽÓp¥ÈŽ&Mއs‚-S²L¬ýZ'ª›ûݹ7°°›Ç:Èm&ÃDµ)ŠÎWV^NÄ‹]Jý’£É™®äï õUhð;P) é°¢{!jDZƒ¨sfbçÙg,ó‡OtÄùý,î"vO7ÝG<á2éKGÍûP˜Ó¤ Ci$Pµ€2öÝÉ,üIùÓ±h º¾*uÿY$2½A¥ž¹™'6ß·\ÏIˆû†G¯ìÃ}_îÓ}  skÒ\Âìñ,®òñ’1•$ä!(eð=ZÖ½5Ó3(ÜЈ¬!ÚõðǪ {~tÃD·³ÚÜ7 GŽ=b’÷з³cNèø bÁ%Oáí]¿éÍV~ö³6‘ƒ–î­É5cc\ò#ÛÖ”ƒLœV#s*vÀvô”ë O«ú*fÅ ¥ž*íÈ÷—–~¹ßª¨s§‰‡ÜëB_e{ñÆ»Œ3Ù]ÛÛxŠß.{Z‚äeLŒËHÐ")?I©óƒÖçN[wñ€“æ‹5xRdÔÀ&oñº(iMvÄü¡¥ \õnNN\O‰çÒX½–v=è»f)KWÛ¸ømmQÈ  Ñ¥7 =¯# o[*äð6½VNE²õÇqž2T]ž¹N,é{ͦiúìë$Otà c8ú“÷(_ÖÀ‡NCæ„Å…v×€Uû˜NY9FàŠL«šÀ¸YE4HbÜv˜ÓPˆy[™nMˆß½™²À]6H%OïEv äG„—õo Z•xï=¡¦Éè{Ø »Ÿó &ÌXäu˜„+Q l8eˆ:PMÀ¹Ò99Àv¿ [Ñç=¾»Q°“Eæšè„'6oB'Á­ïÊ:'\+C!ˆ\àU†ªÛ±Çp£H*ÓéÃA8”õæAöœ9˜,)ÏC§ÇRSIgóVgâr¾R·ƒ.V—Þ|k! ¤Ý™Ö€X˜‚»ú™,ä>©aÁäG¹æ€¯›2َדQÙ¶Øë০†Ÿñv»ãƵ^ØR"½V.?¶rVß)NSn+Z?ã'€ìû‚Y}‚M]Qg\…ƒˆØ<€ÌÝ#Ô[”È3Rf/m9n•4s–ÖCŠš®2’ë ŒZ¤5¼ëaÈŸU7{’!àxP¤'Ç»¥‚žêoÂ~|L$¢”ìǃA"îò„ªñ¶¢—}ñØËñX6|%o¨ŸBOHvµñ˜0}Ú»7ûà³P¨Öú™¶YF°Gäž3pcPI+áÓÁq8àQBû•8é6` ÀPÖ¢5.OÝã¸[g¹š˜éAmÌ&c%˜èfýæñÊèC{šñ…¦T= ]ri†5┚ý½‘¸b¤]p¦1w!¥Þí!˜rk 45ЛX«ëè`Ë:z汋Vj½é,¾Ý­O•E~›(zj23I$ç:¤Ÿ‰-Ð[Ÿ†ƒ–š;EéK"7 Sšæ‡n.¨ý²ù`΂,©!uŸa“ÖB­µ/(Õd¿–™º6î÷ì÷€¢IHû¶º¢åÌn KØz”L½2oñþ5¤$Ñ› O`æxoÙ_,ôÕ#µU@ÿ–dg º·-úëª!d¹tso1õq€ŠQ‰Ía3ÚÊ,¯ã·‰Í: çNÀp:2 tç Ÿ³Øz2]ÖÔsRk6}5Ç”ó>•Ú·û[RP°Ãž•2 ˜gj=\R–ý4.v2ídÂrc*ñ!vÍ ¾î÷_Æ\ù ¸%ýb`v;oÉT¦>UŠ£,ÿµû|ð×0‡âÞ‹ªä@«ÒTd…a+Xéâ1æ´p©œ9mLgîûÁr]á¡°£Cà>ðmãî%".·Œ¼n¿Ê·­|íþõ™ ÞCh*S—¦Ùå´vlgd?¹®Üº Å qË„q<$?¥vÙ5§Æ]B·û[¬UÙu‘»˜ýõ4ð›8ÕÛ@±^‰võªá'([NøõZ7½h«óø*­ˆîB¤:ž–Øì kPL6Ÿ-ÌO­‡¿R±Ã¾_)„iþ*Ç ¹û¢hÎø™W\|èq_U{ë‚6ìzr=Mˆ?j"Bµ¶ð™oz»ž®K\§¨t>|T±L‰ákíè·$6YÎî(+ˆjŸ:䢩:~ï]|ô­òL8ÿŠ%ùÎK›ÒFôŸf#šz›yØf ˆ»,?®<»}¦`UÁ“«< Åà ,ŽWùW’ý’R¥6éfšól¦¦›¤Âõ §2Ë´ÎAé2[Kj£áÈãK ¡jõŠÿY•^4,KQf²kþnVP/ö ¼F×—ø—{FˆX‹3÷ˆ‰Š`%z‰T>Ù%32Qh}ïÂë|ËÁü´ìƉ9þ_»—"•'BD ]G¶µá?£ÁýÈÚÊ«o>x1Glti¨Tôµ»¯²ÝW}2q$ê^ã†J+œÂEƒ~Îâîê†D¼‡|øÆÃ{:Âo)óZÛ3…\èþRBÍ´™ÆŽÈUþµéæ¶õ3«ü¤A¤1_ħ}ó©{{cEãek "q¤ØJíŠ-}rÚçwã¡ýÑlGe¤4ÝâO®‰nÏ[æèm7€m í=óÝ›VePhœ €üxì_ÅÅxÊ`ÂÃù>=\<µö2ò‚ V$^.kqþL-[J¸vÂÃn™óûr—}Ϲ$öÂÂbýÈAõœ,ô€€c~®>2€½ÐnÈ”["_šýäÐÙÔ1í/%t%L(ßϲmaHÉŽA0ÑŽàüaw:ºúVh¯Ÿ»ÊܳñˆæÄÆ}Æo*[„mar˜Qæ’\ýͲ,ž‹Î÷hø#¯:ä°.ŸXð™8Ö0²Æu\0ð^ÝaMù¯€•YÈLòÐÏQädŠ CÇŒ°æ–Êý´gªÒ«—ľEY\Ì>©îœw˜rµÁæÌõäs—ƒÎØðõ¾=ˆÿ·Ô·]Ù€èÅÎ*TXÞÂ|ÁÁ(hœ¿Ü@»ŒŽ·V®ûKg/¸Û(ƒãÙg¤ ’ò>·Röà‚‰f˜Çþ®Åðêñ¥º¯c»RùÉ·=«D0BÓûRÙñxƽb<9Âcø >ÇðOÏD.ZÌC² "ï9LŸùò1\Þ¹‚!RCtèvFWméÓà(q†b¨µ&”SåcéôTè®NÙJßZwOKfml9³÷„¯s˜3ïÎcixøN"Æ•ö§q!S×òWÞšB"¿ºI§ˆ‹]Lü8yöáô:÷͵’ @.‘,uÇ ¿Œ¢L§ÝÈY%Æ%?÷”Ïãî©Ý•ט0Ø{½4Ô5îNѯ¨í€a ,¥»;Öñ(CÙµ²H,v(±Ä 5ăaïJà¤ÆËñ ƒ²íˆ(&¢ñˆÈÊ×1ìWR ƒ+øm»d9ð”:ð…jnøÕåéy‘*É*¦¬7É^°ÞŒ(aPàRÞ¡ï,Ðí?uåÄ ƒ‚ç6áœÙ þ’y™Å¹Î!":D‰-MŸY-£™ž£É>!«3pàà0D‹'2| §•W6IÌ]ø~y+r‹IÖÑg¢8c|bÒü!ýGÃq%47&GZ›î¸û¬ö7¾²¬Ú¶DÏ'c⊗\KsÃE0·æ,ÐÛ<œs9g÷à¿ål°u~Ö‹?zéMýk8—¬+l¯¿²~N¹…l¯ë­¿#‡ö¾ñh±ZŠ2짳͠%‚'¨Þ2¾1‡{B0óæWž~—ø>b;ºÑY8°¢D ¯{ `½ôtý¼“^ÂáN&ÙáuuŠk4 )¸]÷´½Pø©’j'F‰Ê9>X<›ßñÿ)3aqŠà@ñ>½„õZ€‰ž Tƒ½ÑžÏMª¤G÷­œÓs%}ÚÅy]ëød¡àç´ RMÉ2%ΞZµà_:0å§Õ¡¶iQP y2Y pc¢šÕ>´5»v€?ê/t¹Âð!Õ3âƒ&Z¡•»\_ÐyÄRÒB ígš§e-¡…û!‘ªwxÖKÓî£ÎŠWqŸCã›îA‹žd¾/ÃYñS¨L<°6~­?ÚŽC­š«¤÷:}WORb}6vlQ,F:l‡ÂRqŠ|ïÓ±¤”F²ŽX&É™„ôcçÛ]ñµý;C>eѵoÞ;èáõ²Àü”&A †ñ0–0ÓµÓ¤ÁɰÁÀÇnÎM xÄÜ&ʦ¸‡s!Šð¢ü]Z†ÇæÉó[ÕÀàm¢B24¤Ø…#Ow…ˆ ƒ†Uw3í*ñ4”ZÙ¥>p”×þÿopå&JŠcψó+9ÕwäÄ»žS£Ø6&uÂ"q‰ëͬ‹Rg'èxÏ6àäHa–“RDJÚ‚ÅN§töÇãVÏ›m°.in£ž©+M_t‰®÷×{8ĭĈ_0¥´I´Rß!Û ÏKe‘î1²³5°Ë‹¤KØ…ýÿk–êï9ã; èÖkò$å(yqÒFž<¡pê2Æä´Óåí{–n"áAÈÝâRBg™P3Ë.kq‘2Ýy®`‘ÔYÜý2Ä7TÃÆ‚Pò -¼…ºÒÕÒT\Ãè¢#ÅþµJÅDyiûgšêb:½ ”Ó­¯g)ï. 8Óóq&¤/‚ r¼v9¤N‘—}>õ41"‡I`+cR=J¸:±ØFÜ%;ßC¦¸#3êm$þ~J'(¸Ñ6Šû7~yî™%J¼›Ã—T£ÇÖ–ÛúAJšD3F²$ÃUðn²zÏDœ³Z§´'¡ŒH$mÉ yøÑJDªLDÏîç-—ÜÿQ‚ɹè{wÇP.o"rû3»V hz½ÔéCã¼VïÀ·w±{DòzÐÏâÞÒy9¾YùÑÃðÃ*Äfÿ7¯tòrqõf bïr’#–Ìf›wᮽ®º©–ù»lM»3ŸÝhpŸ5Ö´&?1áY\ŠÁàGl× ïãRˆžÅU½CüÐ¥²`²I=7)ÜÃÆÙú|þ”j†ølW» ûlD›¡ôƒÏ¾q§Ôé4ÐC­¯ü¶ÈÑÞ/þ}ÅÏ`¢çBK7.*&¼-Z®ó:X–ysÖ¢—|ä¼"%Žò@¼£(0õß_Ç×­†`Úc{Æ/M$ Pù|éS U¿øV‹ÌsVy æ\ÚÇê”Fè㟇ù? /mhà@Š©H©:;ë6øŠ+à«vœdrÎÑo§2;7‰ëûP©W´±y~©ÂÐe‡ ˜Å¡Kº÷îOœ2ý¸à¯Í‹X9£v3ò&`µr9û±ó,ö­ŽY ä#:ŽŒá^P™Óú€¨Ô"¥Uq›ffYÑu„Ð8ݶ3L$r˜ƒ»&_¸mi=üÏ€_ÔÈru¢Ÿï%²¬ï ɉ™dñ’«'QjÛðzY¢?F(h!™¦R]¯Uòö=Ùä4 @ òõ£8*å)îWvY›ç?\ºr¬â;Joü KÈA$¨£·'ß#wiƒTÁA åÏy"×îN’ÈgÉ#?ø·”ÞµàoÞ¢á}Žù{÷ùÄ8²ð‚¤ŸcÅøê1 iZh"‚¶uq÷ÐÜÉý‘o9º9Q‚MØëóiè’ÅRá›®Þ²:_)©¹÷Ç5ï«q‡¹É6ÿ<…I¸¾+£ûë Ï8'³; C(ê™Îö«Ãf‹3Ë*ÝÏ/ÛÙ‡§ÚìE¢ð°k£ÒŸ:hÎ>±r#Q2µ_¯i¸b`b^’x;ë{¢‹Í`p½i£,é2e˜ò·]ª²/F+öa¤U„’H4ÆÁ†Bâ@†òw}ÖH'qæïŽÞy0o¤¡/¶÷Ì!“ï/f_ÒÊÆ‘3 Õã»ÅE¢r ¥¬ ³N›÷BB»Õ½†=)ÝÏ€.ØÕÐáoÈc8O޵8ªŒˆ?|Ù‡ºM&=ÝŸ¯õSЊíl†MúðìöÈKº6¸s7|êÜ×É£Ûo|ý„F‹Û¤j màDz?ô’”N~ ©ìÇHõõ5¤ÿº){všøÛ&Uð<Û®²ª@#el 7àÈtƒ­o”¸N®s$p-ï°T‡ÄtÌXžµöm¾Æâ]ÒÂ@)z4Ø-±Ø_ A– ­ZVü³ÄPªrxðiAãê­·sotmœÈÅäÓänßy;ZIr‘ö) ¯g”ø ºýöÏ]´q p ÷VÎøOÇ@0¬27/(®d!Ì*-–%Ê¢P³:ü–Úé7mÍéGfŠÚ ¢¬³Wt«kiSö›ü\ªL%H"YjíË£r“`oÖr zž;ó¤Ùg ¯9,‹ðÒ®KGºåhްŽcç?ʧâvô'3¤&eb°Y‡ŒŒ'Iéê°®3ZJ²õ˜ÖÙ9 0ß&$FF)bÁrvÝnkw~ºújŒ+œ›¥¯êXoŠÃÂ[„Ké5Œ Bz:¢9&Š5%ù¼Ž“Ä“ þ9yobvÏ.ò¥{$3ƒ’‹ýˆ1‹¡G•–\xá=Цí.0âg9ø4É““±;Ôúuh±á”LTï¤)’ä„!áýø_7J·f‚D—¤M˜¤Ë*† x¤%d\(-¼/6/NG0«‚-ܶDËmÇ¥Ÿªb[³œeÉ3Z©½(·Yêñά£ÔŸwéÁû}öc:½kðàM•Óª$t¶h–¼= š_ö ¥ÄÌQš@FÔgá=9R£/ùð–`©‡{¸Šxrë¬Ç—o÷E/Œ Ö:\g‘vY«:L({–¡ÛÜÃíA’â•h&&ŸÇ0°p•Û—©!~äæE½Úýiñ)˜Ë:Wõ£?Ù4«oMG.ßÞ IQ›W¥îêRBþ½Ñë&µˆ¹2*érÉø HJ?#Ž,yû.#ñx61}eyÝ(ðèª#àÿNn“Î} Hv0(HÔ¹Ù-,‡^“zW‡‰+¤’V-Pi{AE^+œ.°}Bœ½Ì¡%©údÞoùp—b¶Š:ÜnÙ ³dl| HK,á*N^4låq{ LnoZìÖÊÝkð ´Úœx?Kjµªž]^Ÿèš‘Ï¡¤ô–ò^¾Uå›$QʃüÿV'™œKÿ„ÜÌû£§b ãBˆ¾|C7ŸZ“`?jeÖñÙ*Ã(øy'@Rç1­Øº4¡UóõÝÿÀ<¯ Éþm$ú5q,,ÇÊîHNÖ×ük ¢·,6*åu¶Ý®(œ¤ƒh_¯¾9Z¿òäÏG+¸(ß%Óå[O1ŠN­o-XaÉ õ´™˜¶p[3þ·8ÔœažØaáÜÝ.m%–*±¶Îm@VYpÎõº=5<¥BôøÂ×õUH¶ ²*2ݶëLÊô¯ vñÉOFGÓSóÇ0s¹¢&ÿ¾ˆÊ¹Ìû}DÕ™ïc²“ÊÛõØ[+ãc_£]AHɺ¬kžûÓÓ 5Üc¹{qÃÍÁG#I;.þñý¾hzþOKÈ(ו™ÃÓˆ\¸¶NÏi¢Ÿo¶Q ûÜ#¸ÆÊŒóñö {(eDÈã#ø‚ŽÈ[èùµ*TºF”‘“%Ši@"W {kޤ#ñHVCy~ï6%óÒÃlS1õFjë6k¬¾ÜmXÇHŠB›YÈÁ#Áìë°¯P)—ŠÝð$&máµ?¨¸ÿ·&ÑS“¿õ›Y¿Œ+ø˜óBmPê,j?BûÜVðDPTPPÇiÊO6Ær‘¼åç#Õ<…ó!”Cª•ÀÀƒ6+ÍRAí™>[ÌB{kxì²ÜæHP[+«&ÏjófÙø 2^–ìÜ–>ŠdóÏAh9:sŒìmyA׿Ç)o Z€6›üݫ–egùÓK&ÄŠãRý#-zEJðø‹ïiÚŽ ÒÔMaXÑÅ8¡ƒcæ\Uu”¼~‹Ôö³Çx„É2s‘>éW-ž§RdöMõ2×”üi# v©R =Pé„蹌ÿ9Èõ¸à¼õ™é@€Ýâ{x¤ÝóÝë:nK.œŸð©)¸¡Õ2z݉Ûñ§Ø{ „¯WÓHe. `^ت÷ß©]Ë'îe‡ë·ƒÒkz¤*Ë*hz@Š+ñSRªÊÅ͉p…Aj™ßtëÿ£—uÇA³úêøø…f(g\W‘IÈp$[Ÿ® íÖ÷ ¦mï˜l¸«8»&p¯²aUýÔ_ÙwÊ9»…|ô|Æm¦1µâ||Ó™B8€a,/ëýc&§-4rSuí—d9Ü™ æ¸Þbíx”@·ü‚„XËD*ˆê§drÁû؈-c9‹&Øô,Æ»t§ÂG°Þ.0zÒÃBËR𓼫åµ>­W±˜ù†æ¬òTIž[ÛÚž­>…Jhßíõ_…/$óF5hrC‘~áŒgBÑÅàéOÁÑ)úø‹è³Ý—zKT+]: C¹¸Ã [0 ÑS›•“D=‹¨æuȈ¯RMwk¡ù÷qT¼œü–µ®3éô*±w­â>¦³"Rà!‚ú){¾2R±:ò1]ñôucc¾nÿhW|A&ðÊmá.…-JI¿Ò;zŹ;K.ôÛöùEAr¬CŠ…tRV›ji#!:pÑ;#ší}Å–Ã"›ÛVi(@EçvGzM(œç¥^U*„É€Ô]0VÂåÅY.€ŠêŸ`ÄŸk ÿp&.õ5 òÅñ°G¦YÀ@ü‰D<ÂJmk„1wŒ10\ágýƒî(84^f•ÃI¤8§nÉ»˜ž‘ËO¦2ñØŠSȯhé¯ b¥¼ Æ3 Ïz‡Q–GÌoélC`b–¡¦«=cGW3# òC¨¯7ðz™?­&ÑåeEø–o¬}•éí(<³)W»ÝÝ›I>²yš± ‰÷ \h:B† ¡Ÿíû¾y?óiþ¿5ØŽê‹#¸–Æ{qµ8Ü­cyØ¿ô—ãj¿À¾Ųv/ùåP¶^ºÓ¹Ž©ýÊÖìÏ€ÂÖº{ ð*‘Ë P›jÖd©‡2æ½VµÈ2#E3¨SMqˆ²Ó^Dòð²qb}ó¹ÒJãĈLw)­èbÌò0âgÖ«Aä'J9%R£äª¢Wûç}èr¿ÛßíÕÞPCúH­C©—ʹ¾L½M“õ$ 'Ìš{_hÉP2vXâ/M¿ç‘!°Ze)g‚Þ\»VÇévÅ‹*¯Ø` WWÛܺ¾ÃºF¬ó ÐÄg`²ÿt’9¡|ï/ÝúÒ.Ûõîñ´ƒ‡øò“¡Š5€Çþ‘ ›àÜ'Jý÷81‘w,=”2¢R8ÂnÍÒ2Ñ0+V›†…”Æ’@¦½sà¾"(•TË;ï@‹Rú³{T ð “ ø:ƒ]èÞ¥¼üa9)g/µ¹y—,–9öC{“;j&ˆ{­Eù ÖQ[MÀ/1 ë¹7òÜŒÔÊÄAa[^y@«"+.‹n È•,C6¤{ü½~GÚ g7îEÇúY—nx•#o”ÎU˜gå.›ÜݱڌÕ[êüÚÁ€ô ×)éÁ6\-uð–§ÊJ@ÜA.caqv⎡{Žœõª>IQ]`µüóðZ¨*:îvJ+ÎÎG/ÄÝ6%\<;æÈEÕ ‹Zª8îF”h H–/P#þšPlšžú`eêٳܧONLœADKdµ´:~‘þ¥·/³NîYòû«{>ùP]?UEÎ#€Ë¯:à„wÚ»Ÿœ/öÎ8#TÁÊü¶$Dßfe¡Yn™­’Íõ½zíð½\Їš`pþ˜á²Îã7ß§Ðܹ& 2yžeïó3è/Î7ØùÛˆ&Ÿ»Åp,†³F\°ò†Ò»k˜Ã½Ù`ï꜌ PÚ‘—®m¦–†„q‘áL ‰»àw—5˜P yÜ ‡áœ²‹Ûˆ>]G eÉ\±8Fd<~0Ìï‘8LÉm[n@ h»æÔÉuÕÞš 1,âéö|F²úVìǘâpÿº’‡ûÃì2ö¤ššÀ*°b>EEXVÄyt£Â o8TkšâÊf³ {Þº4ÔŠ™'A;‚žÝ–äÆe¬ßÿujZ«Ðv!í¹îp²àßç–—×äKcDOû +'ksæÄH^çV³d€éAL~zðf]˜ìà˵QåÝ«wíöwÒ‚Ò7œ¶ª+:ÊZ^d6Í ÌòN´G½{¯›eÖªQ» Yñz¹|•Ž>0îÇT"f錶mGÏ1<Ü´kïÀr뉈eEXÔÞO[Úd/R±æMä_!¦´è-j$iº!ÙçË7éóòZ˜Sß—7^\Cô]ln¬Ì]aºA}J‡éw›#T¼­ªiÖçe^½Ø×]Çøõ”CÌsˆÎ."ƒ+£=\™mm(!K¤BpcÌ9ØÁTÖ?û–¼m´±üÚg’­0Œwµ“ë/ä°w'9ÉLbÏñòE ¬¸Ï‹Aý“â%ÿn ädÞgìÊM©ÈàKVmf¶a”Ù­ Lì|–M‰)´Ñ?}œ5Þ±Ç\õYî„ä%” pF$ôåjÿ;¥oñ™°ËkWröËÚœ°5ë^“êqö0­ìžF“1é§Å‚SoÒRi×í¨pŒ âÞ>6`¹“ð]•É“—î“6X>޵ó¶uä9™(—ø#¦wˆ ËýsBË òco"©]¨Ž*PŒ£)møí{ 0!In›a”2kÆMwRNaaYVâÁÂYJâ&Œ²qIVƒ¶«-й9ö?\»õU““*]t%øÞ¸¾_‰òQ; Cé‘ÂD”³æ#d¯â‘ Î|o¶¼_Äö¿8tkÇ0‹ÁW/WEK X:¶šÖöwhñ`frÖ˵þ‘¾j‘ KŃ*‡Ó ׂ/)£Æ‹Ë›­è;ßÙ–£€â˜;Fí¦‰)ò9v·1Pj‘“—z;XD¡”Ì”º®²‘aN#üì!Fÿ¡s<¥;E5pÁ¾W¶¶"°SgÄ>DÑäwç–³)Dyjëp&Râ:RØ€¢c-¼´Å@È©î"—zñ±‰+šš §CÁ„Ÿ¼YA>«o+?~|Ô,7ŽéØ” Nk+ÿ:¸¬sT)Œpê·zÓdn¦ÏÁŒÔ«è•¡úd'ÍÝçn,R©¯ja‚äê¨ëÎ ¦©Ç EeûþßÏ*kyw§+„Uh—QÌ&ˆ Ó yòZ1âQl¾1]G>ÊZ`qÐL „­QUUWíÿ€ Õ¤/ Ô™v«Ygtºo¿kH!pî­Ð@©¦é‡fþII,Ú_²ª…m_Ÿ’HV4.ÛݽÏ~ÚupAƒÈ½B9ñF·B [ª<(þMœ#pg6†qJHø[ß .'tùïqyk¢÷ÃÏY(2m^† !XóSybSWœÌ«-­²¬¼æ>ID:Ý7}[Š_ @܉(hH¡N0¢¿?\Ü`:§Ë.†URðMQXøCEÞñÕLs@¥‚y¸ÝA©€L˜dÞ¿ÔøüŒ29->q Œ¯…Ìe¥ì.ÞgÕ‚Y½F _—²œ‹ß fµæýfþ[)QG °!†eA<°ÊðZÁS"<-¡?¤³êìózjor®Ø¦ûUÆûšÄ:ãÅWvG]5ÃyVZÿ)O¼jV’B¶5Æ}©Súûe—Í 5Ó"ßt–Uo*¦zÍ´ªM°Åü~ % ;LÕ¿ühÉŽn¿÷—‰erÁç,]Vÿ ÷/c²`™h‚a•âm°|,¤ušëÁs¬Ã¶7‚“ŸÑÂ3Z!ñh#›)ÞV³’ã¶~GÛ’c·ÌÈröâ¬]ÔȘ¿‰ÔxŒcþ™Hìl쩪Ò"õù¾ AÞʇÑ=%‹âø†ìFl·øVkãRUEèm≷ûj=}|sÝ¿ sMSg{Dß0ç&¨X1UGÎzpœùzýKúЧw¬95|Ûa_³xÎÊùýd<<Ù†~°¯Â‘’vÁPNFsš2â¹bU$}X˜w]÷{—½ M9$Îá§üww@s…ï}=­ ­F…çG[sÉH?m¢+ì¿ë Ãò~Þ„„Snz× •CÓuDàŒèâã墔a Pƒ:øg{‚KR°óN •@Ýî*¸›p-(£}MÌzM‹p+›°©®;^ÒÚ"¬ ¤)Ô S£Scÿ3á€!¬T{ê©?ÈKTG¢™­Ê¼Šï¢;ÓÕÿÅ8©¹EêŸLR±"þn„Täž^…ªLZlsÈyô«ƒñh?C­¸4ºÝQoƒ¦ 5$óÝ¢3’ûrÁ[íA¡‘íã2zƒ'ZSBÒ¸W“ˆWk·¨Ù1’†i*|5”æ®îŽ#8‡6„ÞFrÚÊ:‡’Iuç+ :&r~m«Z"ðè­„xL˜_& »Èi´*¦ÞRŸ’•%°J="ó‚»À ɨ©¹)ê æ2+ãtà ˆú¯94­{”X¤nzsJ eú½´:SžÛwÇ­µ˜ŠKÇ}dVœâÊÀï%Ë| o\‚,ûD»tO&=¦_ z<=Ã?Ž£p½/Ô°³ÇÛ?fã+Ù©V52µk)mmà óýÌ– FúË:óÄYŽVë—žcµ”½ý)Ž6fƒáš6ìyÓ¡È@ñ}JïÓh˜Z÷ËDZf✺ãœB8˜( ž1@™0'2и=|"qdŸßÁÈ–» 7TEà©ò 24KA°6ï½ÿ¡¤KLÂ.—Kv}Ð|à£òF‰E×Pd„XÏo¥ $@˜9ߢáŸ(Á¾u»Ð~qíמÆÂ|V¥CN¼*!㮑gä(Ó®ëI¨i­ Å `5ÌYL“vÄäàµQ¶W ~ÓvË;JÓÑ™Íh¡|WŒ[ñƒÝ_±!*PÏÚOTQµ&·,|¬‚‰²t° ÇÝd-§´0{[ZÀ\lŸœydÏë‹£¨Øo±ê‹hH?OÞB×"©}>F‹›ã!邤ڻæ¦)èpædO×ár£%bØ3AÌʘIƒÐ,ñ—€P4Ð ~ƩÕÞ"{¡EùærXhƒ •&wF+²ÉØú¼-F¥RÜ"¿“€»š› œ‰UðMðJƒ“ƒràÚg˜Ÿˆ«Vq Ò¿(i»+OÙ™r›E^dA˜ëïðc²Œ`ªe-|Ì’Ÿ’Ö/˜°½ÍÙý.?̆ ·M©ån&ô¨.–¥j¥¯!9¹5ˆ·ÐáØà§³ð&ûª=똜)ð”`æŠÄ1~õÂÖWÁÅH?"ÞZYá,€ƒÃŽáZ–¦’ˆ !?½Pzúû? Ét¶c`VžÖÔ›  I^dI9¸I%Ô½Ÿ§Aì„/®ìððñÔ¹”C’šÇF³gü‡ÿCHøk]®¶bSí¨h!ø@5° ü©ÛxqɹŸO—æºã³ˆ\¢ì+Ýø,Ó÷4•–îÕîr¹"ßämÏÐ=,¹)Q÷Dæy6Öí˜o01zÓ©.%ïÓHýV\ 'gÀ™u޵oW3<„CQ§‘Úû|` Á\Wx~¹=¨ynîИ—3$3|ýÖúÍA›ë Ñiâìucïc¢ÝsÈÔ¹²ÆD*3ÒÙhŒó:,5Ÿ_°ùQ„\P=÷0_²n'9^à š¼ÇNWv` ‰~J¡o'?ÍeLóM¯øÏh#ÿGÉ €¦ä$rŠÁ’ü_Ñ"«ý±¿¬ ¶ôDPÑ3ºf`‹L ƒl±¹ñ5{aè|Pæ§»ÂeËËŒ#­ÞY£@ùd6p#©¸Ð]E¶]U1æ×à°Ze@Vès)å 5 €è¢jça±8ôà±H…£Ùuñ÷ˆÝÙìÐLSÑiÛ— ÉyÅ 6ûnV]¨YMVR²ÉŒ(oÎÐØ¤‘³«ŸF¤²¥6.!9C.$^ɶ»{Ï2ªÌ41Hÿéǽ¦¢„Œ˜¥¼µ½Ó¥ñ¢¤S®CÃzå0QHŽT¿ÀIG+ʥϤȱ¤/Ñøî¤´3_¦Ws"TΚÌó?CHáPç´íØ› ¦ …t=קغ Ü4R’»•:£lÍæ»ú×pÄ©;ðMä†rb3©‚ƒ*'§fpÓ/Cú›}1š.0ø´¤ø5¥Iƒ™ÝªÄï(@,¸{^ؼù“>ŠAV!hߘ{B¢Ì ÷§½Èz Õ–À:Dô°\Š Ô¾ŒL<ïK<„]À¨9̱„]Hi,œ€Ø|ÐþÕ›ž• -´± góÔ‰ ö1Æ£˜Ý8nÇ•™íåí…h.êôDÂfßÂHvQmÓ»¸mÍ*¢"xµÃ%½:ÖÜPù•©ÇSbQDÆþÊXÕJ¤!Á+;G|Æÿù÷­ùÚYøbÕøÛA©Ùmr¿;<¯HCí‡@‰êý´JZã'ÓˆÛ㨛ž U_žò$XâPëQòÌZ»ÜÀ3´¤¾#öó'›#ÝÃ䋯»)]«­ëqä"¯h^ è?¹û°\~Xðflb·]×ç¹}¿a*‚àŠu-,@¨¬XLbVm‘ÿœ2Ò«xfE{¿ßú=¥Ð9Fá âœôÔÚòl‹{þ…‘bƒ6…—±†Ë-ÀÄ›–þôÌOð‰òû¯—Xl_ϳ0ǧ=`ªGí”&¡°Ì¨ ?fÀ/óÅQ%â‹PïÅ¥u`® 7“ýÿ\j‰­î9Co•Å?Ø´Ç5µ ÆNHD/¾'Jð r{òÜ `Ž% •S„b²Qçõ­–öOÎs³»ú’ü:Ò‰¡`XÉ ±¢c½ínJ2f“<½á‹t¶OækŽDvÀr2Ú £œß‚èÊ @›R ¥ª’U„ÅÝ_ïLÿºlŸHq!ÁÀ U=õù8ôãª~áo4¸>çÅê¿õ˜#^…!­%Ÿs>)Ù¶®ëç÷½{ß͘êÉÍ·ßÂæÈ@?VÝ/2€mû¹ÔMij“j*°¿nÍ!/½è|5).~ðpUÿ¦Šà®B9ÛÄ(ɹ-Òæì4A÷íp4ãÊp,ÿ€¥[’ 0þ¼•l~óæ;¹é«wi‰ è~‚ÉsÙ +›¹1áõ\IW¯>«”-l*öîE®†z“³aϹû&ÅÄ4F cÅïl•æ8 *±xä÷;/ˆÎÎ6h5H@3ǾSP—@ðx|ƒñ}:¹ôjçr³Í´f|‹mZ L[¥fÔ@7~8ëçÐÜ‹ÑÄ1P¦ø4ý ß™pÏpáwV{ål–ªª£b;¿ö>ÀiÀüÅd‹×Mó®©"Ö«}·ñP§ùmOkê¯ÎR&¸ø®!ð…_ ñ"žbK­•®ÛGŸ[jo#˜q,VýŸëË?+…fÇû¶QÑy/]gèæ ¢Ñ/:)E8òºðkÞm‰'Adú†öÿ$ÿ² 6 à»ñKµ»ÓlM‘­¤FoIdí&œ%¹%Í¿R¶ykí@|ÕÔGÖ£.ÔÓ¤ºá.§\¦²2àñ¾! d1sÊâã¨\› –”jAªqõÿ+{И*e~7"ûYCX³6ÐË•Ž“ùÜ8àè]¼´Z-é3nùtæ»r i÷R=‰pú¾±ÕñÔçFi…8 nŸ®öç/±#é©Ì'¬«÷«“ÙÀÞ)öQhÁQûÇLˆ›C­÷ÌEöŽ= ˆÔí=ªA^´P«aîeìáD³ÁRF‰ðöš'ÿŒ„¢+¦ï…)éìsí>2€ß…˜˜gÔìZ™Œò¯¡°º\+Á€ôΕD#ìF°&K“t½áY±q,ÝÂN^® :¿Éý×t‘PÜt&Ý[2­5–ÖžþP]!Èlcû®Ež·»q!\šÍìðJi+„1ZÓ‰FPvª|<¢ñdÕ\Lj·ûªo ÌŠÜÄ5³RkV5ÃÉL5IÎvfa'®ñªfßèw"…‰å˽ åIº¤*íFJ û“²Ó›|oögŠ>ÒP.[ñZŸÆ ;€¸àçBø¸T´Í¬î s¢ª&æÓ#³V÷'^Ÿzf ñF’Ïdä…ÚMº±#mµøòìD~W¹eÎâ>Œf"à4É¿½Ê’X·\š€%üT몓 Ûcø‹¯Ïœ'³i†>0 ‹YZkernlab/data/spam.rda0000644000175100001440000032242414656670134014272 0ustar hornikusersBZh91AY&SY%»’ØH7ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáË}ô@8TJR”U tU(¡8̤E¡Hª$€ PŠRTpÜÈ ¨f T *AŠ€P ˆ IJNÌF ðì>•¤"(•0ò(U@Ü"úRžN€°È£ÞÎDæ{7½’{·vaè†ï;§|è ¾ÚT©R¥*@*”©R• ¥Oc •*R¤T©J'¾à Þ°í€Ÿl |3î€9 @%@ *Š"€ ” •R $"$"P*R$’¡¨‰"A$šÔ‘) $I%”’‚ŠˆJ¤"RPU%EPQR’RªR)@)D”¥%$”ªU"¢D!H‰"¡IJA@%U*sëpU"ª•I%( "T¥Pª(A@ (Q@Ph €@ `̶ …²Ù¶Ù&µí«­EÐÈ»n°WaÀŠÁïlŠÁè8mœ!ÞÊ07€ÕÆÑãjò5À<1‘vœí&èæÀ¡€è €Ç{iq@! xà€:†€D€ÃtJi€ˆÕ=Œx»Í§ ðN"‡“XóvMšôèààc¯=ÝÜnð&£€8ëÖŠ¦òz=h€0=ìQs<c'fÙL9žN‡£F¹8«C ¶§ @`†€ÓLFM bi„“h0“О†“É #L& é´™©¦SÈHE<@ ˜™†™„ÀÓI¦&52zž‚i£Ò)ìHñÔ6”õ=4db òe4ÓLL›F¦˜ƒÑzÐÈõ1¥Qÿúj”ßþª©ùU="b Èõªªhú  € €F€SÄÒ„@‚Òz xh4Ôyšd4Ñ“`jhÉ be=SM= Qé´C4i¦DOLLšž§©£M1Fš@4i¡ Iꔥ5Õ6šPiú ¦ƒ@=@hÓÔ£ÔÐÐÐhÐé4ê  h" À&4d4Á4&Ó “MOF©âOФɦÃDõ)þ„cMÐhÈ42hÑÓ¦L µ'¦§“hÕ?ýù¸ «#û ÿ„_0Ÿû´¿Œy:Ÿü6”PŠG±ã>nŒç¥ÕÁ³²h2 ÿØ4M¢ï—™Ë=?KËm'x5 $1”c„%ÖoÐ’²hQEò‡¤ø3yóÔÄ(ü$·úçXæL:KE—ÙÊÇýCª¾œ˜/òrèpQ"œê—3˜«ÇˆÁ$wˆ´ Dþ¦?÷ÅFt§­p5–Á'Íÿ[þÚÕ¢Ä8 øßx÷Éc’¹ó~-@2B =lUuÆg±,ÂqD}š`xaF‡Ib¾ì·Âöý•GFû ‰÷©hí‚‘${«Ô»L8áU!s”ɇ Ü@gȯgÏ僑åÏND Â~”ÈìZãÌ1ÀPD:Çý‡ù•ý#¢ AÜ<¹I€Fò< ûe }Êì÷šgæŠîB"}²ùb£ðHÑ÷N¨†}†Ç»Lv8Üeãû¦¡÷Ð:Âöð?öçßPiþŸ§0>kÕïêálá;%ýÂ8ž†­–Î2@ÿÒŸsƒç~˜©êÏk¼^ЉáS>L U÷+ÍÿʽœÃÅ{ž„'so³XEynîcísò¯âe“‚FÏ]–<´½³å¥‚ ôCqoѹ$#~þJSÖèþG—úÂÒeòA訮h0ÿà‡¹HÀaRèI?gaìB@€&Â7m©mÁ*fg°WsŸ4! V´µJ2b”õ‡äDb…–&Cwl|ø‡sü]dÜŠãðí𯩖ÒàdÄK»°µï+`!qˆ;¼@ fb"ƒ!eV ;K‘I(ù¿uѪ èOËvB=³W Ðq€<ñ~EsÅ’+’Œ­Ê©…A'¡Jy+ŸƒedÀZ0ûœ¾Ô±Ñ…jœ•â&-#Ý;-]øXuµÎÉôÝd"÷3°ÏQ§äν÷8ðF¦Þ>_ŠºGÝ>ÞQùt2_g§AÈ:Ûæ—¹2Á JPÑ þêBÃrDBFFÖ•×ăhîVkrØX¥Žöÿ¬Pé …â€Àò½7kH±’Ay8¦×û5`8` C à µ²Pô´N¾iAgÒqø{­œU×^såµzνý Ĉ5#r ƒ(ÚŽÿ‘§?˜©<†ë€DPRPð‰$rB"‡„yˆMº¬4ñšgSßž?Ë_o>ò†Š•ɨö˜ùs8ÿMc$v(o¿³ÞÞ! =‡“¯÷Mê*¬‡1ëõbAšÁRùTûÓ@€yü*½êÅý(ýBà†Y¥‹'%Ûö(ÄEs 8Q­@ P´TlÏa€DT*O=VŸçÎUÕܺŒFå-O\ÍÍBôuèµîpEºÆCÐ)ñÅÇç@¿²è­'Lª•®‡ÎµˆÌÙ.c±)%ëŠR9^„ô+^´45h•Ä÷_ƒSá/w>ÑàñÓê@t%¤4ká“kÝT6ÄæŽG À…b¿$ÁôÙiA?3¯$ nP” ›?MPY<§Æs¡Øæa»”¸øšŽ4[yq,ÚUvÍW© :$C(«óí[ͱê"µæÆ|Sàè„WzßͼüâÓq ÌJ§­RˆøürZ­¤6ñ!葉ˆ«ë4ôEˆDIæBb¤ô°¢JeÁîÀÁb öþtÀ>ƒK PJý]æ÷~ʧó+Á'€¹>Ìãß›ï= × 3³KúÏa‡‡aE­Óz¯îEjs#J7•Þ+´Ö«¬€íÁ!¹AÆÂvÀNåôè!­ À¾ïWŸ4‡i¤¥ôž(p1#8 @G³²ÆÁ¨¨È(Ÿá%<êP¼C²£ºÓ6Ã?Cê@xÒ¿˜^w½ú}Û “Ô^á17šÎWÙX'-GÞ½&KDhÅNMÉ03|¸%IjÉm-K÷´år5ËÑwêê³òõT5=žŽnÏÌ”r€Ì!#â¨3=ÂF'´¸)`‚°’<Öœ+½èP& ª ôÀCH%W¥kUèq5¡­êR–Â^åá{3+EÐgˆ–é8á8Ná`ÙIxX^¨Ì §ì y‰j12 ‡Ä2© ³xŠ2Z.µ69i,Á¼n0õÿMˆ„†&_žÐœ%¹‹hïèù‰’+>_=C€G\ÝS÷¿BãBk+}lX¯‚Øöƒ¹uq! iw+ÛôùdÐ6Ó¸kÔ“± /ƒ+Y:ÓÀºh \ðCƒñc$âÝx#’;ÛÈÁÙÿõÚ»6p0o×4 R®¨±Ã#×Âó×ywfPV£ÔÒ>9K]ûVµå÷γ Õêê·Uá¼Wßim *HŒD Ž  1e±,¨vNDø6ä­ -Ý€Z-ù¨Y¡ Wm$ì‘&%꫊¨½×ÑC—G›,Ob: <„Èyáy¹C‹uÒb±±âb,¦Yɺ; #•e¸ íò–jøè€rÏŒf.-¥v o êmSH‹ð8!¨‡T‰dǧSÎ@B³Àe g`–ç-}\J>²¦°¿´½ú4òª0¨9´AOi œÞ%Ü›fl @xûB¸m5]–¬äÏ>«a{Ÿt®QìovÕÅe¥8ÆÖ_îÞò°ˆ²É={`¥¸÷ia@r,F+ÑÒWÚ ´ã HáÝK¡ÔXy¡p´Ru9RhfUø±)õ îÿâÅ)½—5j_?wûªx£þ:FÍ×"€ôËnO–î#¥n|yÙªÜΕ;pb \5ú™ut¼âß$ w–ø§›>Æï÷˜doZ4¯L¥×%Tý\}hùÙ¿¸W'¯ÅŒñ’I\s¿_$_ùÜÆ÷¿pê’'¡$ òÒ“ù8.õ,ºÕO‹w½ñYê\f’H˜ ÍLùÑt7ζÿ}W´—§NôHuÛÅ`·ÓÔó–`¢í ‡Äð'E[Ñ_|÷ÎÇ1§÷4™³÷ žEÞ<Ï#«óί_¥yóBø²rp;þcœ!’,€2¤áy幋öé>LgU(÷ÎY¥–Gt™Òg),*BüžšÛå¿«Fš’5U†°¸ <3×|TöwOÊãò8nÕš¾ÇŠÜOË[„’Ρ>Gî¾Pƒ¾vïis%væ‹»ïLŒfa?\í¼—Ó×[õñBS¿*•vh MÀp¬XÑP«äêhZ$A¡Ýpǰ ‚ë®ì‰¯)”¾Ü®#DP8ƒRâçÏFv¦ h ñ¼^ÏÞüÆY—~–¸.°Ýÿp…„ăùÞ"N®Ã-F!Æ?XAÂ.'ޤúÏ4å¦óÄDU÷+Óô¹©?¾äyãsTHø ¤+ ¡i5†Vqìøõ03|õÛ*DEÐòX‚í­±H›¼HÄÈh#±³µQÁiÈMìŽQnIT?Uå )6EéËø­ÌTœQ3Épñ \ÐM÷è®­Æ áa†2@†Â!@rK‚K— ˆ [„[îyä ®ºöùbÙÏ]ME¿<ÛDÉ` j;ˆ(þÆ—„ýßRS´Ž²Ð²¸ô^ÏG½3¯ŠaAg†…±XÎþa–Mf~r¦PsÌ"†÷(¬Rj&d +Ùæ6œJãüä…ˆÏã $ƒBj0«"’=êŒE:4Ø ë…p—¢vê¯waè zJ[ÈàA×·+Qä Z¾$ÔVpf7kø…8‚Õ£oj¿IÚÎà‡b§s+’"Ô’tØ¥*r.ðåÞÔ¨’]âX±*Å,ùÄÝøÄŽô<—âó õºI]Ú$Ó(NÐÄÄ8¢|r ’I=XõÊ'ˆm>4ºÃRqžiËùJü ¼Ê6Bù!°LªÇªŠö›g [˜.”!I1‘¡„h9¥dF}ßÑ@{ÂÌTÏÑ0±—”*ñÐaPƒPˆ‘¬µ „17LC®ý†@!¿$kMÏTL?I¤¾ásK,_)>jI˜ <–Å(ŒÁ~^–q0gAC³c>3Pv{¿»×Ga¤Ð {>º.J‰$KÈ¥‰1n£µ8ësÇ?^Ç…ì‘*4× ?xïí«Ã‹?_¼Õ0»¥&¥‘—‘àoåß™yTª8Yí™y{<¥ð¦šÆ^HÓ*6Uý?9V±3Ô¿ç /ƒƒ3†6›ðÆ—Œž%?`Ï«P§ ‘F»Ó‘¹v[?!˜†[<Šÿiǯ.GTå¬\tÒ MòtÚŸ¤ºùXåieƒ‚áa7žâ6„lÚ-jçëŸò?T; íøñ]‹i’?%¹ï{X¤TÍ|éüú˜øùߦ­Êº_D>%Q5ñgÀùäW6W'fkÊëuK÷@`}S8Æ¢.tÀùÃôÃ^¯ [¯¤ì= !ë"²ÄW"EÂø'´¥_V*Âážljˆù®5¥V˱äy UXØ¥#),ò×3PËÊùKC1!Ó]gŠ€™4ïò^M§?«ê2ÙÎݲ†XL¹glÓæ¦YgÁL2¨ִ1VÖÓù— ¹»kÓsfrI8ûü)ÈÍâ¸â“ã,cÒ–Æ‘Ž™RçêlpŽÄ×t¾ó!iDÙ Ò%ô£Ý ˆólwGˆè®ä½vÚ•÷†»uþtxÂF͆Xñ˜ófaОNAŦ‡†Ý6ÝèØj$žT¡ŸZ¥<‡ìW†âë¸Õ"O›DÓ£¦-5þ3ÁƒR2ìô9>'ñö£gIo)q:W•º)¹ý ¶4/+N¬óÝϪu/ןb¸3Ö,k;òaˆÄ›ÓVS½Ï-­›:Ò‚z¹Öò-:ìFCz©È¥>5ÅÄNi“à»JdêÌ!ØvgK¿Ùc3c9R¦Kî¼Ihp‰©Q1¬³›:© “u|ŠoÐʤ1uÚ¥oDø²¬‘@éD׫,‹Æ¸ƒL-› í’,ùs_» S[jÆEzãú5åd%÷eßÓ[¿‡øÙ#àá&ð˜jA\Ñw<4à‘p>NÑÓ˜#5½ÍV"xdÂþjK¸Ú÷ÈrMd.‚æ¤,ù'4¸Ÿggi*Âh€%œ¬«ª9êÓ‡¡âW_-}ÞÕ<4Ǥ‡š[ ‹Å³™šrp8Þ̵ãs&|ËÈrݪª8óh•õñêRĸštbBðÏŽF|¸Cn`é—rÐ&…ËxôØÉnuLÓ³-šã¤CäÆb³ÌP}ª7aŸ¦ìv' ˜¸ÔæQŠeÑ€îÃas”ƒt±X‡†Ñˆùë;|ú®NŠŒË1T’8×mÇÏâ+Á¢£¿Ä®×ÑLÃ÷ß…7UüWäÃëÇ¥â[{ùnE²…{%7y•å9—'kc½jµ­)zwñor«»º¹B­çÕÛ¯h¢hƒ9]SÁ*<3ƒŽ.Œ‰‘Þh²Å™sgæg؆z«ÊH+à¤>5qêÐ4£e& aJMÒ1ô’Ù¯ßn_Ž{ö"šãÔÕ,ïÇK $WµJQ,øê’6@¡&)çS¯†–VS“^uµóǯn=›7rÞ$ ©Åç3E°1I>PJ,Whàtˆ-–2[iÉÐÒ¬4ªfþz(Èi­Äs¯K•:vzY¾BgJ__qgÖÅ;Ñ¢Òö±ò#‘ëfmãEúøV÷/æóöèçaÈäw'dVØÍjsÌûVšEí¥ý6÷=™’p}ýê¶­bjÉ£ÌÂîurJ93¡ šÓ”“.ñéZRNÕ*àj(ª±_c*D(]{,·Ì)2r$“3u„æ›–7Y•Ø|qd?TÑÛ¥Ce¦F¢°ndIx7SÕv+i*•Á‚B*U§ÁÕÚ‡\óÚ}2ç6бú$\à`È!eçêîQ¶‘¸™¦uó7pH)Í~ÌÌC?IE#YÒvZÍÁz´Ï}™µÚ66`‚=¸qcr­&»ùªÌ}½;·m…^‘ž9%ëò4FpÙÂ:$á±:{Ñßš‰÷"%WÓËQöS:¤qΪ1†}¶«&æ¥8²œÂ3óx·¸¹·o¦ß§oìFŠíiZºXu¢ÆÚQ-MÈëáhVqšÇ£‘Ž–3s“5{ò‘ŠY›'3Umêb0øc²¸p}ûZšË²¼¬X±s#lÑ·—aº²Ñ¤ÕIZÚ9qÝk6Á4hLc%ôô“yZ¼yñcc6ŽáìÝTΨ5ŒÃÕ˜›DÊÃaÖìýªT°#(¹PdïcçdE…8Çj‹ד\ *Ty3ïch;fÅìËø„T½{AZÍÊbÜ,PÖ9v^nN´XÑ0¼È·6ñDÒc³¶‰oÙ¸§æ YWq&K#ØÑ¤`¡%J¬ã·=ãA‹¶¨Ð–ܘ'àÊÎSÜBó¡Ë´1b¿^ûÓ µÇÄäú1¥Hµç ²Õ:Jj‘‹Z4§!µ7€W³ s.;c9ëžÛ˽‘”Ö ¹$éâ·,²4«êíóåÓ5øù´§NL!~ÈÞdÆQI̬L)ámŒÛî 2 [†àà—Ëq„hÄ•ˆ[XWq«Á¡‚µü{ôUÅз? Ñî'lSv§¡RSy×pvu<¾zìÛ“Jös”sv™»f`¤ùoV³n b¬ZöïÎgMêp×¢hdÞzcd¤ hMMŒÆB:©ÔLmðÔÐ×^²Î4z/be¼Z9:BK›å|Í#ýwjdK^0çÓÒÏ¥´Ãf©·KiãjîXm™3'èFÒ<üyYZ$ýË4—C$6Ó¼,)ê¸zZš®QŠT~%†Ê–õjÚÒ×B24åÝ$}ºa—O9NõHiñT`ÏÒ°J&µ¯*åÂE yÁD¹ ¸lÖ£Eo6}\ä6äÅ´í¡›ÒïQTÍj´Nãù:nÖcÓ©éè_ QãéÙ,ÚñXÙÜäÙýnyÔð­V5ˆõILŒYRæçcÑÆ¦ïG3$Úm9by%ò TÉ6çð&M®z9&ÚÎ}‚q7©šS4rwõÓË‚û1J‘™¡"¾ÝÏëe½û.]ËÓ°%3iLk2 `•Ÿ‹°Mιró¬²ÀÒõîÁû*ÔŽHƒW_Öæš²ë…fÞ7"†|Ìe׋O6¬‰{Çg#{¦îf×&¶^\ê7v>{¯îµ%Ha­rF¤¡Ë×ÒbR!Ä–gÝŽ9Öñ»ÜâÝ:¶’•¿Ò{–š÷Ñlϧ*îo–Vc™2{SR³Ùd››?nó5uæëY‹ sÌ!··Ù°¬Lšo9ww>:·®Ê24ßÊrŒ>S7¬9˜Ç¯/ w‰–\[̇׷™­ï[âUˆx¹ QÙà¿·wøëgß#k+$™[^st2±—Æ=voVƒÞe*¹Ó“–ôGÑÝÒnÖ¿ œ¼¾7?M¨ÛqýWÀDe€A`X,?ø}=³ÿË›{ðÛRùdÝÝï®Nð¢?ÆêðÔ/Íè ßÌÓá#‹;ýï¤|¯#îÿíÞ’ôÁýê–ÿÆ¢þÓìõ¿ìü%üN箬ßÿ~¿ÿÉ-Ûû½ûÊjàþýœû*xÿöo˜Þ×ÂÇÿwûÚÅ&/éþ)™Tî\dl{Í“>½ôêY× ûïäi3q“ÿ–«f[¸ÿû-9‚åI ü¢µø‰é70Ÿ#éN!dêÿºžÓéIt±:µcìÿ{i#'oû“ìÒþíëKò™ÚYþd§[g)MríËÞÊxÓò(JÊèÙ–¿fŒæ>öW´{z›B{{”É¡¿¥üÿÿ‰w A6œYl5£Ÿwt7Á¾»»>.¡“НÁ4²}"·¬tmŒØr'6l™¬åmjΣBÜ 7ˆUÊÉ¥Ržöæ¤*ý=ÜÍs2ä9¹‰È×þ—2÷+G{bŽêuœéõsvŒÓ «¡‡€ƒ…4 W®c&ëÞ9 ÅŠ°È}û/ừýîÞçðÙ’F°Â×7œ^5èvG œœÌ©.ãæí‡|‰z=®d…~÷ô)–Ñ€’ Ω¨6WÂJØFÊëpˆÊ¯]o.»Ü¬<¹[ßì[äYŸý®Wÿå¯ÀÈ­Ãëñ ¶©R??gÇÌÛ[6­zúšv5VÃfò¾|à‹ƒÈE–Åù˜A‰zV[¥½Œ¥‹¹*›;.Nñiób2Td6}ªì®Â³ ´l’=¯!‘ªñe„ÿá…öšmÔ5ã÷ÿ‡™§ÎõëÖÏת^ œ¿ôjv{œxxï6ßy쪨-¢]¤Ú¬ÁÃMb JE !Ðbio» "]vWÉü'n¶ÛžÙqK¯jëØú̺š™ý'{Û€ÜÏ|3 /án ”ì Ú³ôíD¥Á¶W+e=â.Âct$<Žï“©[ ¾'vì«Ó‹äõã͙ǵÏÉò¯Pþ~'¡w?8MÝ‹®Ixº –.qYÉ# ÕÙfαözåÚ) ßãÚ¿º`ìȇ<¡lìÄmeB¦¡ÊŒS8V¸} ýŽÖâ@ïÜÅòV˜Ñ³fhË^i¨ €FDjpŒp0R§^^óÔ¡‹ŽZÖ:Ù²Åhµ9\©-Þ¿6Ì7ås-³JUzÔ°µLl’ ƒ6UÂY˜­‘iܦn…ÃÛ3%‰u§ˆrˆS‹à$g‚÷"w_ƒÔií;ú­Îî½nÿs—Ìÿ}Eõ<“ša(Ö©:“ô ”SMc·Ï°vapd¥dL‚JЬ£f[¥e@GמпŽ™{Ví[DÄpTWùÕï :g»Áç;Ö‡#qÇâE¼—Í3qH†t]-=ÝܰªLŠf už²õ‰·©tzý:ò8}m³Y1º6±:ÛŽä0açdÆÏ9õ˜ÆRøÅ Jž-ª ÇK‡µÆW%£‘÷4·9[Ü®NÎŽ„ìäk8¼<É2¢—2n$ÉùÁ%ºÆ6Gž¢¸9ÏI熓=tLO2Q\ ä0˜«n8Ogi_â³¼_V¦~æ3òãõóàd\ŠáÅÈäwIŽ_ª[•wQ´˜ x8w8™Ç³ýÿ;nö^¾mö¸œLdG›8M–›Ó$œ 4âU˜f™E†ŒÃWŠ™Ì°t0l£Q.ÒD­°P[3­šŽ£'5:ÞFÌí>µ®ÎŽã†ÝeMŸ2*5¨nmΫN»gϬ³¥QSX£U…U< ‡QÁ本ùÙ9 ÝÈ­¨äÊ\ÑÎ×çfvvºxݬ\xz Î6lTÊzThXBsRCU®Á韌h]@ô1sÓäü7³îuùcÃèR÷–v×yåb t Xi”Š;IíÄ à×èH>Šü=±Ý4ï­J'„±U0w\„ûVõþˆv~wW_ÒëBR¾g®Ù# ÃbíÚ·êN’Y‚Û»²^þqâƒ7¤‹£ŸŽy×åLËèdP±æpù‹½•ofl;Øõ‹BˆÓ±Z|ôª™Ä•—2ä`Ë–‹H¼³j/úP LáÍ*PܰÐ{4Ã=ÍHj^¦†.3Þ­¨CµL§%V”NýøNõÄìá÷=×—òç[ñ×…l¼èÕ) “J_¦Kk¾RÌã¹ð’2×N«Vk€ A »êcð~›}/…ëxy¶ÂVµ’Ž`ÎôsÛ&9`ç¦5 Ûµògn®÷þ¿Éöÿ±á×òüÜs–Ú4NÁBŒôÖÜ× iw¤s‚Z¸»¦4êƒEÕ¢+BÚ±ºÒÜèJkñRjù›úß1¾óâv=Åùe³âfºÈ1w4êl7ðaëÀA.~ôÍ.×#ÿìO"éïû>†¾48½Í¥ÇºË˃I¨ón&e[D—(#SEKš „ÚPe6-[œ3!õ<2Cã±Î—(ߟ*Ô‘"Ý‚ÒO=¹ ¢Ã¤Ù<Ú¹³ç÷Ÿ Ýüo-?)ùx‹’à qäFµ¤S—&æö¶E$1PÚr\fH®á&°È PBSÕäpx-ÔÊKxû¾ž×§VD‰—HÀÇ™&dÃĉ‰!>u 0žp¸¸O]‡-NäÃÏÈ3¼O,3¶uØðH†P9dŒú:‡T5Ö;¾$0'¡Rº±õÓ¹)bÚÄ™}‘ËÁàÎ5£DBRƒñ‹Ì’œÓÆ0㯒O Å- »÷CYì-ÈÜ<š¯VYïeEνHPñÄFî’—$ѯ½ê|JñRË ´gdωĬšœo^äØˆA.û2þ‡Ò¯Ós«¶ñ0„О %#oÀWcV¶ —ξ|~Ìþbç¬K„é–.n´¦d*†ô¢Ø÷¡‰mÈt…† pÄÃÖ·¹§©_Ã¥CnWá¹kÅNZÛäÇoÞq~¢â~Å)ÁØÄm¤Î™ó}¯¥ÅØäòr†d:±Û=x-Í˜â†æÑ,²EñòwÛkßmœÕçÜUÇ ñÇBÑŽžö „¯›œˆ©TTW)S0mÀ‰ Ý' mÞûÙóþJ¤®ãéuəDžaTÝrøl’i¸ÓŠÓ‚‚|ëý!ó}fqqn>PÃÛ5Âð³ñN)ÔRx†iZø¦ïÚÛâð6ߋָìâOY¶MùL•’ÉWèþCa¶ J[´âÔi=›±©Lcõþé]Îûe­ôâqRr‚™!Ý”>ó]ø¤ý¡nK‘ÔA„’H‚ÂI +Ø¿y)MñCÑ5à6,&æÏÑNÎ÷­áyýƒï‡fŒRÍl°æ9¥¯©9£\B¿c€·­Rd‹í|^Ç»÷»¾T]÷æ!ŽáIêª2ƒXX£V˜s´J§kiãL§Ò‡«k ðá¨nº­:Ü Šb8 ‚ô¥’ŠÜ‚Mûc’Ââ'!™Ç­æ½/]Â$*ÔÌ 78@+©R§3SWvë|O~ü h«z¹§P¶÷¿Z°ô¿¹0+†R•¡¡®ŠÐ—àTNÕü0døªX…KË;a† ú„-d ßøþþïtCX†ti{5¨Aš D/¾ô+%-IÑøÍK.K` Pr\ÕvÖ¤‚Eïs¶«`ƒõœE(à„‘|ëPÄCKQ8äÇÈŠYæÿiÔw6ÇøÚ¢¨C$èÃÔ¦2‚ jÆú/7%ÁÚ—1ÈzAt„ÕzODý^‹2àÒd6תª”C VFN°5ÈÁYe’™ÇñúwëûI€ís@`½ – Á8-#œ>.‘ÏJ·z¬‡o¦×ër6´Ú¥€%ùײ!«c~ÔøØ¶-xÈFȾk×9°z)Q%ß©Á§fŽ¬Ã»ö}κ$i )ì¶ò·.,oÉ‘|´<>ÑéÂOLä¥HÒ‚–}Þ4¦ÿs²ÆS¼°Z°Ê`E|¦ûâ¯Lªxÿ?±Ñ“oÚŸì¶S<±Ûg ɲО‡Ïö½¿›Â¸îIJ„m‡ V&~¹ŠÃ€Â…u)lîzæ¼2Î$î$ÏJtù?g%_ÄKõ5lö)É­¸ÂØðbò×ë~EO†ß3MöÙIs^ÄÚ›öeœâ·B|lEù³æ–;æt‹Zª H`’s…$¤ëç«®±!Ö•*™u‘]PmUüß«7D0cœA ¤3µÅ‚.[ 2ƒ ¨AÄQ×[Ÿž5c˜êá}fù=äªO3±&$¶&xEÖ•ŸŠ…¢Hµ»ÿA˜ ³£‚á#jgtrùQ¦‘Äâ, A€¼I-ú¾ §©6t ¤5©^¾)¹Íâi¦ Nµ5Sž ªÅY‹—óf”å{ø2Ô](+ObO(ÞÍwʉq:1¿ÊøCdúàÅNC VX˜D1Å…%f- ÅTX‹ßK䥔Œ\!ŒB ¯Ýç½sRï‹Ó,qF Ë –]„ߢóÍq1 &‹³4’ U„ú-DÉÁ+4àìu54Y˜_9Ì"¢uz㚯ÚãaFq`<á|5Ú!uœ¤`×^‹PSmAŒ¨HÉÄW{”B÷&M¿Çk·ÆS®(Pµ'6lµ¡z©3L×Å$ù1I2Õ "—¤ LæùçI”õ±¹ñL(t„× t2)0dj‘ÎW¢¬”‘ /Jg Bð$ã5Bb±Bpˆc:¥*²ØÙP³Eã"ff9¤NÎn;Á?2Q‹¬n¾ù`OYb7‘5׆ ¤$Àe^ô¸n{aVªùd„Ifá(. «˜!’Ô zÐù³6Öl^ ‚ê¦IhU‡{ßaÄÔSF û“¶w‡'Þ§@u íXHäÚâ¤*­dœlFxC9à§„ÊxU×Йƹ9®jç8^M¡´Í–kS€è!GX"Òž{[fe•U}‘.²„¯&ÑfkŠISÀï“X‚©äËÔú²Y¡+ÜÛðv@ù¦v›ÚÚµ™†È¾IÕ‰å’ÂdñËØÆ‘ ”F59c§îgž¨ØX¤Ï9˜©2бÌRÆm=G[„ÜÂ-úZЄºoÉOy£ñ 4¹øijÀFbÔ¯ €«:Z \0+à.A.YlUâ+ªE8‹—^ëÚûèõ¿Šµ'Ð{1ø8Úý4UµÇ¯ÆÄ°uc›ïÏ µùöØô‚׈ 6OR‡EŒÓ6H|dÌó³hëöngSFëyn7Yçº×7Çîù±Gvýþœ”øGæ5g÷2?)áRŠ8gô$$i‘ú¾¾¡á‘p_úµTÊ@Zâ¼WÄáÈ£“_¬Öˆ˜„})«TAjþaª§”‰À£Cìž«C€}‡àÁN#˲¸gl"Ž3Òû¿5k÷úG—wÛÿ.F{ÿÝu ¬öó·ðøªIÆÖ$bðj†å ì|HÎGŸÏ‡þL`éµð(-Ì@„Kÿ1ÊŽð¬Åö‚ˆ¬"+beù¼ì‰zü¶'ã‡",\Ž–´}Á\ÃÝøoÍ+''ÕÚ•vyºÜùÞ>8½¸Ó§É¡Ok´¡µØÎŸGo!É%=ïOoÐõ$N‹¯Úäz¼C]£FÎ*±´èPs{@üý=;4[MdpÔbàS p ˜¼¶dGC…Úµ¯Ú þ‡:ÿ™SÑ­IýFmŸ‰Ì©“‚W.4«¶T+FMBùù}†`N|ÚhÛûS¶ªµsNÏ™·Ó^—…ò¯6VP*"îd[-,z§Ë1'˜´ÁÂY*Ë8jß"Éx¬¢Oeìkw´+ãÝ×SJå|ì2Têâz8¶SØ!/Ã[ËÊћן—wRÙjìžÂü …†B$-03<š”vûefJØíâeÐf0fÑ­®§e8–˜áV“ÎÏ ª™ášpg'¥*ž“LµTTë Øoo£÷Í×½†¼ÌíÆïo±*><é*tX PÂJ»zµL¸TÖÁ]r‚“%N0Ž"ÞûRS|J›‡ZÙï?H|Ž>–´E¢Oqã³-èDByI0oïr Ó£%3¼”_$Çm Ô¨\Ôá6Åæz¯aúÞ‡wkÔNÒ¾Ä3*Δ4HLèäI ¦M¢J(f“+EAsË0Z+•’b¡Ie‰îÕ*é¡“‰oGS½~?}½]ŒìkʵàÙ—½âï§Ö³D“gP|•ÐMàÔâÌ…Ìfm] Ê·ˆ¾ÕlÚ½B37ëà³qeÄ”äqv˜ó²‘:È ¨–Î}[诓6ŸŸ©¯†î§/NÀ!6RdbL LÄ¡¯]²$î¥2i9×á{­èq}SJö£Jl`$!…k \tku,œÉJ$ŒâƒpÛ×&3Óv[ãôöµ÷™;8xþDûQNÃ)é¤AĂĄO’^jçfUßS—C1ZLô¹²÷@ññ%FV8˜Á×ChIK‚ºô&–[RbÄ"” Bì‡"W‹. î°râò7zKí40óð ÛÑ—G*‰Î™¸à‹´X1ƒ¸ª´Œ1MSJ‹s ï*þ_%»þÂ8Ûí­‰º¸(niéëÆ‰üY4ˆµ$¦)&‚Œp‘-\>·PïJÃèošÒo.5JUdF%kR£8¶{ fAÈ£.+]. ŽÄÕão;ZÜþÍMŽ<Ÿ&o ‘‰&$œ'ä >ñh5p³Mu%dûm$uI(µöÚ¯X¬ÓÒ¡$FˆÈDQÊ‚¡§ L›ù2s»#\ëi8è?= T‚XžDQ䌴%R„ÆhÍ©•t´8ØV}g;ßËZjE¨¶´d•=BÃf<‹ xõ%4C—Y S~½9#Ïêü_ŸœÍ÷Э¨ïÉ3é&%»6xOãyzJA±Àv‚ÖLm·vYaßá÷ðô~7ÉúZ½¸ç甕–À1É, 9{ âP6~É;ÝÛÑÈçräö¡q³ÑRªû€XŒ4Û¹ öç¬ËèA|›r(^Ò“p²‹ æ™–¥ˆ™{ì)E °ÙnŸ¥ò½oG¶þ¶þ¼áßAÞiË<]ÆÔ(‹$Í èH~}¾ôÅ×jÉD+bB4­T\÷¹²{w{Õú8mÛ¬´Ÿµ¶ú¦½û5žy£¬ÈA[3b÷{R÷ˆk~è×vÿ¨Éßô-©Í)²l8ÝÐ9(Ò\üŸgÞ[ÜR¦¦”iMŒÍ¶(…Y5IbÑ·ÀäAÙ›SOpqreŠØ²Õããt¢«¦À»L`ƒËŒÖ©jgTIüŸësêü-`MŒr*!R Áœsª‡¦3—¡$í{ÝãåÝQ¯…èšBÈ冖Úõ2¤–¾,¶iÀg£L,7¢†é¡V*^¥°QBŒ%}00äm@€$AõŠ˜{LŒÄ‘|QIÔÔP6<ÂF¹Nô:¯ 9è«J²Æ‚B·ZaÚ­£Ý,¡6B²F>ñ9(¤oXãDÌÔ/VQÊôc˜e’SißUæ÷ipÞ¦+ €þs´¾ÎxO,Ýl¬[, ¤„ÀÔMôœoÃçí H^Ü´Ôœ#ÀlÄX˜Š‹¾©^48äPN£-·ÞظÁ=ѼW‰±ðze¤ä©ItYaŠÌí-…š…ã6Í’5·ãV-adJT÷Þ‰2>š ŒB5²‹Å°Y1†l'"bM–¸‘á(V¢Ð‚!—Ÿ-%|6d”ÔÚn½Ú#ìʉÄÒZÖPc*`LEK»ï|¬Üü+Šè†b6x)L¼ÛKÞTo†8?À¦áYˆ°)Á4N™97Èo¶z¤ d±`Å‹/_uS 6‘‘_ÒH{ൺ“l%t×eŠÖ3»FœáLžÜjëèɃ1¾H¢— %ó`ÕªµØÛ ’”¬Õ¸©=wà‘L W¡n«cU!l‹bÜ«+¹bédØVV[XÄLÑÁh:«sbèÆóuó‹3s³4¢å:`àŽ÷_?í©~˜Z4Y|_㜼pX.É"ËyþЂޠøÊQó€=rx‰œÜúĨ|QLåà|â“CA–‰«z0áaïÝÕ‰v_ÓÒöÍ¿ˆr+·âÒ¡®¸q±ÞŒ¿ÁùRÞid™/‘g@=Ñ„D'rxqn(Ø´;‚ótºü^>ûÁËÌïux;ž6Ÿ"´{ò24te‡|ÁÛÖ¥6t'òPf ßwm%P4ïà0A±xç7™AÙ±u9IãÆ˜€¢1™Ô--ÙRtæÙ>\‡ÛŽäΰãäjC vÂŽºfvlÖr×+ѹÓvó âî·f3Ô¼¶ú½bõ4qXêTéЙq…úÙݤ>_*f<’ÕímhêdÅŽ³£‘Œœ' ˜…9H£(œávZN ‚àTLÒ V®,wRæ/—æJÙÁæ÷cQ—ʆÙ>cÈ*o]ç³-SŒ<¤±«kïr+ËÕëjõû¨ä†™ê Áž³ ïÌÒÑJØ`²ED$¸Â@3²¦÷ÍÂ<4ôvÜ÷¤ÒŽhAòCVX “Í1ç•›(ÕkFqrs7Þvr³r+æxy²A€Å’˜òëœÙ9×¥ÉÀ‚ZÚgåðt92क़JmÙ8F’LWð¬–j jÍß?8¥åw¶î0ë\š¸Ž TX1Ù×I‚>Y.Ûu;h62mWÄÔÝéuiÆV<¡HHoÊÚhê*0„ö'²I«ˆ”ayo÷q0ªÌjeâ ˆ>©³PhÕÇ9lõOvÓÅâÖ°¨òj]íŒä”Þ„Ž´úA¿Ýtzz»ø;P':×Zf»¼ë Œ&©1”$“/…Ùâpä´Òóä6ì/J!h>•A*M-ŸÁÊ‹Ïôùð9J“Füväø*þ4Òèr ,–P6’•öþîŽèùÎââÝ‹ûÙ©5Ië 0¥3޽y ®Ø¯-šK3†žûG;fb[šãr…ëRØê, ýí Ž·GŸÂÝ@ˆôg¦Ë pg.ñFÐ1n Ú|/àA²ŠÖÐâqlM+DÔ^€ì,Uܧ›KÖS©NÔ3';ê1ϫݔÓ{±ªšªü<ï¥~y“ácέÝa .€œI4‹Ø¦^µ¹ÅðbŒ`Pô'#yŒ5¬ÚÃiËÂrÛ©:û?–` Àƒ9ÌA é(ô**>e¿7»ð|ÈFBX 3ñ³¾»+Õæ'„û{çD;Zu¦}gö¸{ÝøåŸ&«Vgò±÷Ý9²î|´¦_›]JNý£­ÁÂÄ¥ð’Õȵ›™ÓÅ*ê‹©ì47y¡.5%mœNOd Ë&’ÕɺՌòöØ |+ü+råì¬<¯•êש“Aaï’Q`SISÅz†er#zL´xÞ¾´[8¦ù¸‚ ùhLIJ„«†<8jtŽV¦jZŽ ½\vbSŠyŒ-©bc%…ç-ã †¸élOTÁ‘T“ff€HTó¥ÂíH{°ZJœ ¢Aøv¼ÑŠ©”ŠÅ"Üs•‘X¾ 7áG¼>ìŒ#âI±m‰×©V¢‹Ü2}åHÕ²1 Óî,´¼Æc0¸¾n€ÆàÛW ½šûX©†£ŠI6Ä)|ÂM͵JV@X]VO«G)Ó™±â±pÈ£Ž¹ŒæñB.ºb†² ݈!Ñ¢ ±N,¤–ëúd.q,äïiXÞé)t+èIÂìÓQa kêà…µ–ÅÕÒ –ßz1=+>ì݆˪.zR˜A û­³SX5~è>©1g™0$Iå{<FHrìD€ƒìŒõÄSžùâÔš[\"4.4±ÄP¶Nx 2t€ÏKBÀÕ·[ÔV”%–ÉÁ×G„Ðw½×µCVÈÐdGŸ°§ª E“Š ]f«ª8½£Î(0¸'„SÐSŠž`kŸH+‹Ò•µ ã•ìzmjDá‘? "ÚÙ,2kYÜ‹9”$â°íaœÀžMÒÙ‰ˆÔ³®MÁ †EU­ ‰ÑXc5£%¯;ï™E/ß¿Ôf‘Áì3Ƹ¦2ÎöÊɰ¥p,òTÆé''8eÏ2PÜ"XœÁO¾š¶º]|¯ÊÄ=»h‹_,«Œ“šç‘i|1}·Â€ø ƒ¼”Û—@ªwÎÙæÊ²•X¡…B<ñíúy¸ÝXÌG´1\îΕ: Ì­²Ôu×qªÑAÅ_À:b„û®Éªí…²çóo+7ãø»»9škÐh”¿;ËkBo ÷©ÎÓ½Ôõu<¥Ú™0ŠŒZˆa#!VÔ§ðÓ)F€Ã jÀ¿ûÜì}6}ÔXRòã›^±å×ø½h…±,9úÓ’1 Ø‚Lj÷ëìúhˆôDÐÑFô¹rì…^MNùtL÷BÒµÎ}…–}•eÄ3"H-w&Ý-k9Ã[ WàÌJÞçíºêkHѸR‹£Ìuдe‚þÑ&¹Ã! C" ©=MSéò~ŽÝA,&8Ü9m8šü×’¤x•Sß%%/›ÜWufÑÈhõë‘Ü)c±œØ#2±gTû*…ðv›î²jð‡šx=‹CEíjY{Î}×á¬uâõ>{S zyóô|Å¿LnÜ)+e)^ƒ×­M†íU×fÊöê*[îô(nØs¾I½e+€9º½Êè7-‘;ªûk (RÓŠè««WS(¢Îp|MÚêèAŠ~¬ Ü¶f±âôòÝd–h;úóJœö2kjuFŽÁ—é»ó? ÃPWÚɈÀópnªÏëyÎ1]Ÿ¶’”¯û*²š¨!f𫤉}ÔªÙ²¶*Äé®VŠ2H}kÂÂì@E#,UN´C3A= ÀÓôÿ,3–ÇjÀYà?Bú*ŒnÜæ¬§ p¬ßQŽ­W"¬{ÔoŠ¿%è¨Ö?O Ö7ÖÕDÕ#»\êåyˆ€HI  ä QPBØL]í«TÍÕD–ÒÔ2HQßRdö”︹Õy!­@4F3¼<G –`­†T=¾á¸h>½ªE‰Y’ ž6M´äï:Œå]JNA]Z"ë­Ññx’‚Ö*§3UQ7SBYð\ÂnP±¨0Èö +}ñ¾Œ²fÛæ°4AA ÌØ†Sbõ@YdhƒÛ9âó™7«_ï«%%AòÈ09?:ã²7KYÕÆmsüRCT”Z“hˆéœ3dÑI•Àa ²´¢êMúµ¦r™ßî™=/UãD^™Y{3]Ue³Ce½„†¥8JxB¶ûF {) ¢mc“ƒ+–êæyK=8ªBé2Y¯UY ÷®¬tÏ/º·ë˜¹Ÿ(¤í"ŠáMdè›h´âKªí+ÉG¢N~ŽŠã"ºn¬)œÎ/m×_Btp[,¹2šM`‰‹°Ñ^Ô'Êk§i:/”Ìrn6ùðV¢—Ôª •Vv16²BÅ€­‚ÂbûÖ ¼«‹Íј^p^7\ êiu–˜j^¨!0³Ê~¯ T¤@H"! EÊÉZda ÉÌ$³ãúÇ?c°—ÅògT°ïsË{n\œ¾÷×Rþ)žçîmõób÷ô¢ÃÈÆq¬Zà–‘7Ÿ«©š %¤Ñ”ŒæPež§!™åëï)u¬SvÞŠÐö¶’geóë[‡sÇ¢Þg[«<61b-JaD”VðöÔðs()Äs¥ÙD Ø;¦Ãn=5çD|QªdVk>‹ž]NÊ27¹ÖÜ@Ûˆ§TìVJ@|6üÅHFCD¸%n\¨ vŒ¼‡ð]²ÙšŠ‚¼¸[èéFæìq¢³Mõœ*ŸÜÁÄF]öÊ µœ(ŸèDD9¦ƒ@a#½ í8a:Ïú»:˜s‚-5ñâ DXµ§æ»V§+”ê”o¼’2¸zƒ^Ï3Ô~+T„þKN1ïdõ{?ïQa‰ÓµûåEqUãÉ–CÌvÉþ=9äš´r{¯¥£’W©¾Z/Œ”åáé'bUúÚP“¾ ªŽš"`‰8ÚÔÖq(‹¡ào|-˧] tn_“»êÏSZY]”Z¡ŽgŠVÐeúË3ÎR𔣅OÐ2(ÊΩ…Çèý­¶—”c‡}Õ×,1qª§ Q–yßBù¶«[=Jªx¢-nx?!ƒq:¤‹‘™á~„ÚÁŽrøËI¨IË2Ò)(Ý•ÑUÍuuÃï\î-8e-uHÞ¤OqJZžëV(ˆ ßJDé¢Ê?•ól¯eo¸æC,S€êk]}1SW“Y}|aUâw•MÇë_º)|Ñ 7dpqªÆ ÛeÛ«=¨¨ŽGÌÞ½d.b”š Ù*¾·Õ ä@"s˜‰q'[)Kⱬ˜ÑÙ‘5“ñ¾IÔÀÍ‚òuÝ×9)u^wÛÉ׺v¬:M†Xdn*Ê2¼,#6Ã['hå¸ó,$-^ ÐÇ…@¦, 5“sÒ6@¤Á˜¾ìENs‚ùN@…P販–*]†ÆÊetñÆ¥V§V‘…òŸÖK°¤a0ÃÍäâ˜4Ö¼r~ _œËEí–i^—®3ðKÜIòf.©,è´&Ù&JÆQ¿!Už ›ÈèÌÒ܃JsMaT=‘­o¡Ô¥¡Œ÷Q`Yn_,@D9Ô °×çà{äÓ9À§ yÜåï@”@ìó– ›ÇèKL¨J\È袊é=)•Ë»³•ßæ4š-TwK-ö!¿Ÿrrž®U¸/@HIˆñøþuý=Æ›{Ju'ÖrÉR5Ì6DŠ*0éeçTa™š£gNjzàp_žß/lº’»,HÑ&1ú­O‘³5±’¤8ôè*ÄÄ!!K:Šœa)dÕÏG§•pô(^™ V[¯~ÆÙî2׿©êòõ>?Ö>¤;ÅãË™Jt%Ή²Ð<ò!F>Ó±“· óÊ)BW(èŸa~ŸÐÁý~Ü šîhãÑk}è[·švI]_“!tñö?§‡ lo¥jJ¦Ý/ä2¶¦ª(H‹¬µ¤(:Ie¨ü †¿m²ËGÆ”Êb›7¹ªÄyª‚ gÉû-»ñ`Ýih2H/ͽ9“à´ß€ÆôTQ{sòlàÉ«[²°bŒÄ1mR¦S$ÀñI²jx9ƒbZSD^6:%™âݲ1S,5I¢IrÅR ë…œˆAÝÄÞ Mê¶qÒ à¡'\á!e\fN¡Že,07Ñbí™™„åk`ü)mäœL¥¬Œ"B”AÌD*C ò7@²ÀÖú¶$%à­ÚÉÃA@äI@7à:"}úÀÞ±ú{lÄåNŽ|ÍmZ0U›KµQª ãÊ9¢ÎòtìŸ Ë4͇„Hp§GÚ?}-NŒô™øÛ¤`“7<LyØîcäcL#=&šwˆÑz|sï¥ÛôØ¿ cîPrAEÑ/¶ãßñð†¯#6jâ0†=|‚ðYµçòf…vé4ÉíÉ¢UÑM#-($ #=;šRö¬IÔ» Þi|_·gƒ±ó5lj_ÁÛRG›ë„Y?CÑ¿0¹«5Å€z'x£JÏ›ã_±ÛíÓ’L…=ÿÖ)ŒfqfíÜi0ò8*ë÷6wÍmŒ]PkH¦LQ1U¬VÁéã7×1v7†&¢ >ç)DZøå-æÐ5@æÌˆ^øéuòÓ ? %WǾÿn­S½ù"öy•Á„K­TðM‘IEiÆO¸ Ó¾‹rnLJ—„PË®EXq}M!74,…RFGW8Îbô¨e2 mÐ(¥¶¨æ°¾wžW=úre<›ßæ¦Yf4aW~…<:í=±:¡&µÕ…ûBTYêvÓÚ¦q çµqSu65cš)yDÜóFÍì ¢Œ2[€‰JcÙ¦B mêªÍѶ@öºñY[G iJ Ã+”XÌGÛ„bùÖD…‹ÜÕ`×¢Õ|ß\ìÅ‚x!s¸D éÔ ki§M,jmƒ"°U(èkÂ[‚ë€>Z}0Ö샘Åä` $Já±2•Ë BĽ3S™?^ML½Y]«-~6{!WO4ÄãJ:¹¸aר=·!’ ? ?8zÛ÷˵b;låìFd/¦DÝ j$†]ÛVÍÂ÷ž–±ÓZ“ïÏŸCVœûI:Øž-Ëø ¶æ´ƒ½òŽï7ï!%|“ªPDçSëM’ö¼áh赿YÚR^´¡Mè]3wQ2ŒéÜ[= ·íÁœ†–“«l£Bñ!¹b#uÖÕ &«KtÓ G>ãN ²,t­”ª˜Äª`x\ ‚¥âƣѩ]Ùp©“TøR`›n¸/$ÐP‰. @¶u¼+U«{ ’™KN]ê a#‚ÏÕ…ƒ^ÈCíŽðšãû¾Œ÷»p ÒD18¤È.®¼0Тp~p¼S€9.2¬ØôÇAÖö(]P£ â Ip¡¯W›K£”ñUò†ôjn)¢H sÍ`  ?*hc<8†Yi+H"§“ÖqÁë\±smDáÉzRCW c¨Â[@Ði £¡PÅ£D¢cBfãÍè¬[ü`°­¦M¡5Â'¥Å•´F¸û]lfñŸ øô/³‹¡Ó9oz„mMý‘¦jãT¨ïu‘>Žc­±–ö“º¥´_§‰ÜåÜ«>‰Ã',²+fdf¦tmЗ—!KTö³!Då,––k8‹9€Š)"ЈôFWž"w1Biôƒì;nŠ •¦&r úì½ÅÄb^xëLVJâˆ- X?ÚüÕ4PŸ?{97÷O~¯ªšê⚤ÌfðÁˆ4±´;O Ln¤A¼GÌ…é'Wꛟb›Pt7Þ˜é`«Zf ˜ Ò‹Â4ÊDÕÚÊ.‚UJ쥷%E*Èâ×N²B_rWKÚÛâsÂЋFàc%2I€«¼©¢Œ«†6lxÊ"3òÇS ”@Y£F3Ã#@¡;„ éj ýrêj€0Ù³¿p·".ëC3­°¸&¾l¹CF¨Ã­z…3RÂÓ#`Õl´$¶Iž›–ÚÄvN€¬‘~óZ¨ËÀ•¶\Fk›üF;êT—Ž_KVØK6„>8¡9?-ZÇ%'>bÛé&ÖkËÔOÂoÓž2òßçwe»ÍÒC«ÃñþTzî,6þB1À‡Ð) ±á»^‡Ï:0b£5rA ƒŠ¡~¤z,ì·Æìe±áä*ÓTžÊ |¤0×±èË=fÌ!UQ Ö-Ì%`&ªX¨MX¡{#ù“£ˆ•fÏ"p"䪂›òÚâZÅKa!X-rqÕLÁ©kòRžK½èž9/!ž¡*È -TÕ$ÔØí“›óåB `‹‚rÅÌ'Iuø^üò[à·ÈwZu¼Õ–œ „ÓSKuÃÔZ¢PI 1Û,†ªÂ½Ûšl{KuÑT6˜§n –#+£‡BópŒ;ÄcÍL!†b_x“\’£fù.±Ös3úP0}wÕî %—cÉ]ÛŸ V7ˆjû¨-~ªCͽ0À€“ŸšÝ3[‰Ì½År„G¶“WU¦ˆJ;£UØÜ‹y¬¯3mdç‘`ªJKÆÈRd‰¢áœÐŠÁ·˜&/ª¬’ÃQˈ^›Ôów+"f S]«ÚÉÕ·Ç&YÃ0òMâ> Ùùâ>n„ü㿚Ò»r³ÙoëÕ4ØáÛ³Z|ÚE»:7ñ&Í—ƒ…8SbÛ–C§šº”Ánæe mˆ7’‰OrkÅ9âÒúO_¯\¾1¦U_VÑI_š¼lصúʰñ{ÎY±Säɽ’šŒµ®Ú›¨pñ*Išp@äȹ«Sg8ÁQc&ä4¹­ Yn~®ßm¸ žù‚A-… ŽÀ¡ƒ×Šn=-p f÷³Å–€ZäYô†ÎEŸÐÁ¹b/º2+8ˆãcFŒR4pJJ¤¦Ä“HÀ°Ázö¾X>UU÷^·3”½YñÅ­Ïj] RÓ»5›8’üI‹ŒFOV‚©ÝÚvQsš“ÞÝØ4¯«ïòÖýÁÁDo˓ȀF‹¡öYfnj?‹Ë­Œeî£{hÈRÔï>¡Ê áz³Vd#! bj¶b ŸhßF™"…pœ×búYu6.ìt|Ùù*  àmoc$ׄ„qo¿çbÉ@ ÄVüニ—© \v®ü! ƽIÓwRß^^þ;ŒòS5QÇÞu©NHP¤ 1]Iz¹^B±²­ÓÌ®²×ä/­•™yI¥Þ|¸È%€LW‘Ÿ|úótÁ-s`ƒ x_ל=ço™É”°F]!^!xጠ%€F²P™öË´'ÏÔ MtpAiEÊ8®râœàÒ­dêÒ1Ru¶Ú±E_@ V7À–Ë“*°i.Ú1Ø:â*Q °Þøo@–…$ û×ȶl'›3Z3Úª&£0Dùñ¥Ð`KPAѦ×t¨€°Ec¹†\E‚göš`Áäpæxr¤–µ² 9Ðç§äõ/§&ÂÅÖÊå`]œ®²ŽÀ•Œ800ÀpŠZƸM€²q£–IrR݆ Y¶sU¨ré5 £’Çö'<°Ö96²5)Z¿›\.©ç›* qÑŸ¼ËÜOŒ‘ÍÞWš‰tÉ%s”*jÙ —£²p *qªØlZH³-Iõ•ÑÐÝ|&ÇÅ]\fà„4Ö± ˜äŒöë{!å95"’nÁ):ëÝ.¬3Êð‹šÉþu°‡Ã¥Í¬[+,¿Œ€ø4ö¥ØŠ„DŠ9¤ß+2à*û¹`¦,äy­ïÒî„Ú¼˜?ér—R•–oŸ45p-9ù”:õBé<1Œu ЧU ÷¤É›ƒ*óutÇ‚{¨<ðÀoÁSG[ I©¥x,&‰\å.Õo¡ì|c÷à>÷}Nèóȵ*MAÁˆÓ§ä ƒMäÃ#é6½¦}±« ¢pç¹Ü‡«T¼pâ\WOKœ.>vÇ%R”Ò¦Ž Âš`µœ²ÌötØAÈH ŸéCwN´ß(SÊA¯ÍÛ·®ÍŠAáFзâ±?•«–›NÖüc>Ú®uíd¹áWÕ¢—,3„å0æhd«Ú]ÀE:tq 5á8Ÿ¿o¼ ­¯š…Õˆ9‚ÝÁûC×ÞØãÚHǩݡãá™°%IÔz ¸ W‡ºmëG#4PcY0XlºRl€U³€9ë±\oÂÏ­íû¯‹ëË»ø± ^¯ÃO}ÃiHF”`ýrÞGFÜ®BRcfÉ,Šáq¶õ;Å(ûßpoºàHÜïÚâµô|oÖaù‚•e3¼ƒ¯Û0°3ë7@¼¥H·EBy-ÁÙ0€Ác݃¢ºòTZ{›õ ‚õ€kASù ž/ˆêy¿W^’1££]·ê 'zœkmÙQÆŽ=oÞœz<ö€‡Ùð£ÑdÒÆ=ù{G±ÞzÖ2F†±²1щë@ý#Ç+T|ÜÇ ;68 ¼ÒdÑúMMæ)5!8‰71»}ÞÝ[xÞɽ´!¹Þ[i]ˆÙÖœjkjê™®ïwÑmí´:Ø¥â$3„ÜJbLà2n  ÛHð'jL@f5wxw²Ͷ“DíF!ÊAq„ÙËí(²/H\7ñ(ªÏ&~=/ÜÂÁÝ5ZßÄ¥âÅz³Ô«Öõ*¢OI üT!r@^=])¡FeaøØÀ‚²Š(, ÈK#,0A$BJ´*J·ù|^¶N€4vŸzr2¨Pô[A8¹Q@GÇ|Ng$ËHª³£»î9½u7ŸÍPõh§ÇÛ`€ ´UI"(ºz,ªŠð6Ûm¶Û½÷]ïõ¼w­­­­­­­­­­­­¬>;è¿ëóœç9Îs€ýߎß×…é========ø@0óy€˜Ì<À`ù7³ù_å¾·ÈþOí{žîk…;οŠ7vgô8UË䟖÷ ¨¯ý¿õãð¬Ü>î1ÉÀ¨Š'¤PQ8‰AH)”V”‚P JE•aA&T" U¤PF”IBFâTÄI%¡ E-Ò´Uibid€¡FE DJF©D¤D Rmm²UEFÛm&Ô¥ $R•‚)DJ3V¨¦ÙI­TY6|­ØZJ­Š4¶ÒµRE[EŠÕ›iµY,%U™«eT«û¹Ò4[l•mLÀȳ,¨D”… ´4¢Â!,Í* ¡ûøL¹"R‰0 e‡…Ä‚lj¬W*¨Ñµ\µW5´4* H‚éCJ¨B)ÜÀºI…UÖq†ÂU,k¤…sr¹ÓdѰ`+›…I(ËS,ʈK4ÐÄdiM"Œ¤ÓBE ±$™™1“l6% ¢7•× ×Žk»² ›œrè$„LšP®îb bLj%»¹€Á)BaD$†K3"©")yÜ<ë„.q¦ÌÆÒhDwu ÂP’’2„ˆ€×—tñܸ&T"bÀ"Ë$H± ˜dbÂbw\Ú#b`•Φ$»¹‰3R‚’ŠÞwXÈm0”ÆLȃÝÑ[ÎæD”Ù&]× 6(ŒX×4Ó%BYDŠ0$jj¤€¹cíÈÿ—eŽç@jÏýÎqÞ4e«e•©ü{ ¹íæ#›ÖÜ´üR „ÝÄ#$$¼1ÌcƒÑ¹·yáâyÝ dçžs¸»Ëºo;²‹·^QÇže㌜ï<ïð3î;̺ó.ïã¹qtòUÇ—N¼^2fDy¹ÝÎtn.æ;ž6âCÎuÎéݳ®çŽóuÆéyæëÉŽîãºæìÙ»«šM¢ÌtœñÝÝ77"éÎë‘;®îîÎâw.çyyÞ"óËÁyÝ;®Õ×n»g“µâó¼ñ£Îð<\¹¹5ÎxååÊç—ž2ñq.®î5Îî;©ÉÝÇuÐÚðç]×wwpt‰Þyç’vw.wwwWiÝn]qÛws»·s»»…ºêóÎxçD+»º8îrwws€înÇs‹›®»ºº ss©•ÜŽÈëÇ^]Žq:nœ„Ýw]wqÍ9Ò^yÏ3®åÝΕ˯%眚:K¹ÇC®œZr„L@¦!¥G"P)r‘F«^RåÝ×u×btº8éE˺– §nsryמvç]v¼sÎá‘K§vë®áÜ‹Æð/8at§']θ5À·qvÝs¶åÊc—8êææîuãÇŽÑ«Ï;JðìcA\Èî¼îñEÐuÂMâ¹<ír¸kκ‚ñãx/’xñÎÜ®gs²ç&qݹŠ9×væfws9×+¹ÝÜnó¼F'yåÎó°î’èÈíλÏ:󉻯rã[–ææµ£³\ÉÛºäî6#œÈîé“ÅÞqËuÙ×"͹œÝ®]»­Ëƒ;¸-ÂwDQhwžt¯'5Û»œ‚ÜâÝç^/<Ýë«­È™Îgq4‰Ýº\¸î¢s®OžwvëÎéçžxÀ•çvÖwm­Dî(åÎóÍæó"wyÝ^róÍÞ]Èç.uË®W]ÑItï;s˺ärwg4EÎÊ6Õ·7:c\¼mrÞnèJwiÝÒî\ÒÇNA͵ʴ[V¹ª×)2nZwsyçžeÝ®»µ­EÔÜÉ­C®u̬ .X b1(-"Œ`(T¤΍™ª4©LB(oÔ$7èÈ/ ýÝéÂ0ˆ²  J Ê%TÒ£BŠ_P‰íX‚ )ª T4@`\…WçϪŸyÝ{¯î³Ê,8à'Iãþ2Å/””8B’$’kÿ#H–KE—“ÓƒûüÏüÜ7SÙúŸëw›°d 說‰ Š&%J@U¤QJA ¡F¡AP¡@@¤h•E  UZ¥R€ UT¡( )T¥M´j5Qm±m‹Z£[iÕk›$ÂÌ#jÖ™¬ûyW ¤€Õ 04ª‡1zI–%i­_)_ˆù Ä[õ=Þ.wtŽ·2ylgrÖ|®§{ûÞ;S{÷´çÏú~sgF ˆH¢nDCwŠ)(¢%C˜e’‚URH@à÷˜AÈ•h%¥T=÷²úZžºøßãô3ø]Ž1ÌëyžÃl‚/çªD T¨…ô·Ñþ¯öúb<¦§ž££N5;Î hä³ÏŸÔüsÏö—¹âú;_SGárŸ§£#[¶ÜZ|ÇêeŸ*'Ȩˆô¶*C…PIW“¥Xœ(`¢‹ÜàrP•ûÛsi"4©J(zuIüŒ8UEO*4 ûoíÆ/·A—GÖu¦€Ó–œ£bÊÒoê®fÑSì3ð8Y$ñœo/÷¿=xè\,’íi:÷Å×yrsòo7xcÞ5¢ãu(|-xb˜åñ¸!K̇0¸ç9í›ÏR†nÅ‘½SêX~z:d‡ÁÝl­ Èå¢>²fïýÜæ¨+ªYê.Óê• çp’˜ñ05‚ סÉ6 ÒÖ›Cúdœÿ좀ØC¥|Þ§Ì¡¥ AÐ008dX=ЀÆóßTïÎüÞ/Q‰0=`ðÒbŽk:Y éó[ðêq¬<‘=\È‹Š/BÊ"æêû[¹®ükY›§›áWÕt¼ûÕ8âu®oÃ2€¾ËIôoh  ÀRêæQÊ‹(ø²±•ž@³%Í%9$°X휼:à ¢·ª¯¹Ú .n,ß©æoÒýDRÏ8ˆÂG‡ sÕØ+½™ïA 7UåŠÐ‰à¡Ó¦"ÂKž/V—¥dc¹ZZǯ´k%! –°…xïÞïK+Á­>'ï®ýW¡Ž3·Á=•ÿ P67£[¤±Ûs^Emo¡½ŽóÈã’u}0(;a2 8èaUUë7Þï’ „-ßë8"‚õu·£É841Ùe‹š(Â3[;®Å‘+‡Ð‹¾ðáx.mÎVø]ÔXo­ÓÛÚ¬úµÎ²ùÊÌÑ}”¾VÁ ËèK´¨…7‰ÂQÆÉŠt… !ë6X85ÁX ¢4”5ŠÇ67bÀgÑ—(n©¤¬ Óci‘ÕØy"Ž´‡2ŸŠ[ÆtƒÎ[ãf1DÄ8 ¢xL‹ÈÇlí‘a—º1Õ>åuø(šGÖèEk®]8ÁíÞ»×h±G–/+ôG ñèb…tB ²ßMQptÖ$÷{V(Mv¤t¼è€GˆÕsÿ#°IlX×{8úüUè_ =i‹àâ‹eÏ£ÉÂÖõYE»qXf|ÅV}’ñÊÅ]D•MƒÁ×=ÕðDͽuÞñ@fqO+¥8•³…•¼ìd‰:x㛼yVIzÀlMA a£:\ÿ¤žVŠ•D‚’x-Éf 5R$«¦IŒ¨€ZÞÒ¢z—  Ùíæ¡;ã:‡Û» œåÑÒób<ù=á×(H˜ê*²ÖéÍ—Œ_逷§r/¡‚ã0K’`|öž™åÜ28Ù®,äE~šóùw×xœékƒÉÇeGɱÜ šÛîÐŒbÉEEG „®A¹‰aQroŠš1R›2‚ ìxÖxD#³Ýöe M„Wd`™øÖ Æ×c2­†‹™¿0!¸VÁ‘ÄþZü wÕj6‘V'49nÀ Ô¾ŠS×i“H¢ä1*P¾Ê(z"!2[¡ˆ5A7gW'HÚ–^P@LnKd–!Aœ±d̆¢&ÍlÕJX$ Òÿf°Oúx6ü¯•ƒÐõ½¥›à ‚àèA Q(:ñÈQIhM¢‹Àh´Á£%Mš¤ÔI2¨Â†m`&ˆ°hF#FŠ#bf"’É%ˆÅEƒÄQd+#fÑ$$"±2‰IMŠ1d‚ƒ240Ú"ÆŠX5’ÄV#3H›±²XѨѬ2™L¦Å¶`Äm´h,lÊ5Ñ&‹F*$$ÅhÅ‹PˆÉF"±´FѰš4Z"¢J2Ykµ^õ]M´Uªø$ªaVÞêw›%LåAå`Þw=†JœkQR¡¤D*4Ð R¢¡@… %%"‰JŠ*£@ (R*ÈŒU­FÛXÕFªÅª5F¤ÛE¢Ú6Ø’¶*‹Q[F¢¨´TjÆ©ªR€¡”À`Q©ÕÿßùŸôuš0åžÃ¢p"()„JC „¢±„„ÔžK;Fo Ûþ*8,ûª›° A_S\(d« Üf)P^Göˆ'Òÿ×Éü®ƒýw:çµå¹Žï[ÿÇËWø/çúÿ+¨Œoùøµïÿæï/úî~í×(3á¼§›ôc;|©ÁM` ‹:°¬ˆaBT_½|SÜxO¾Áü—¼=§ÎcE•u<¿3¶Øßn6´­Ør^"ÑàòïZœ »Ð6‰FdDáxQP?‚|7¡Â¨gR ýL”^Óÿ04&–ñ¶ÙØ%2Ì)…˜E£i5جi,mÙ ,Ì ŒcI’Ð×U»[_¼·É? ÜX¯Fæ4{ÚL61ëûcÐúN“S¨Ï§áúžÛGäx«‘ð;E¼Uãá7‚/ÁƒîÜŠX;mlé~‡zû—¶Ë/µ—Y¨‚ð…A’€Z(Q¥Z¤ @”vP4•B¨ÛEcTZ°[EEFb¶+FÕEÔQ¨Å±¨6Ũ™$j‹lQmh¶´Tm,F¨ª(ÖB±j+E%Y"ÆÔ`ÉDÑX´QªˆÛbÔQmŠ4Q[XŒj5Fѱb¶ kAI¤¢R2b ‹*1 ¢-¤´mŒkE%llš$°EF¢Ø¬QbŠŠ ØÆÅŒ›Äj’¢¨¬XˆÂš£k%Z‹4&­ŒS*6H‚5L–5dØÄš*“`²›EFÄ ¢Ø¨¨`›4X$ÆÁ¤¬’H±ÂhX("C3,I±±Š1†‰Ša¡e j5dÂc3°ÊЦ±™¤ØÑ(I€Ú0L¬f¤1"-BK ŒM6%K4Û$M‚SD(„ÖJLFf1KLm‹ukª­Zÿ*Ú±“j-lE¬VŠ6ÅZ5ØÕ ¤ÔU´4/×=Ïáú;,Ž#,Rƃ£ÆZyì?ÄFn0/”îó•ô1B‘+cæ+ˆoGàóo6l†6‰5Zh f7T:qó°Þ¡C!k´å˶œ¬Ö+âŸy«Ö¾=ˆ*´izž^)çBÆQ_5ÿ7UÆN2ý é¾âØÊ ±]°ÁH4‰ ®ŒeÓº¢e×lº!ê¯:¢rUrñëå3%WI›çC—*èÆ*²C"T QmÞXÙl0CR„´ðIÂÊ"È"£V€”ሺ¤ °Ñ¿%`LÛ|çU&LB…ÚΦ5d,䃈ÚVêÏvíng?ó%û  igAȳÍQ8ù'ÚÎPCó¬w3’Ï®­ì¶°Tg± úy_Äñ‘t=GDnˆ=¯=¥£lôÕ O§.ºƒ\^Ågzó™°Hg¬¡X« {¸w/Ѩ÷8²² v¨šUI1¦ªÖJµŒ‹‹‘8¸(ÜÏ1êí GÈzDY¯»Ÿac«Û~ÙíÀí6ãÅ_)­^ÓÇÖ_mŽPgJð—6i½“ÌŒg÷÷ååA¹²ò½]®‚Ö$7¿»'Öà:ë™I+)ð›2_éÁ0ŠÄ V´QÁwÄ\ÚÉÍÅxÊ"µÁïtÆß¾æh—#õlì¿ò¢ÃÜÞðŠ‚ ¤ìâ2umãc °á@zbÍ ØGI‹3—xWC¼B¥ J…+J54EB* @_;°¨`;q|Aýày)AÒ’¶ü$#ô_êýEüóHþö U@Hýº¦!ø‰Ö(²üc™Nc*ª‚?ŽVÒ¬ØdAû„¿­?_tn¯Rúëá7¿Ï¿ñ\yeÅrf©–çO9ÔhÝ€ od‘v"¡JÀƒÎô>J¾Ÿî{CàËÊw}GkÕkûçÏ_cÊù=­3¶áQMç§À† ‡UGÍO]¸û~ç´ÆQm–? 4A&ÿ £Š U ÓüJú™ö߯vSÏ×üßìë¼z=GÂó;¶Ýmm ›õ fR𠦆@…x… ˜­5ÿÚ¼´·´ tP° ÈœH{è3ìýÅgß;#s}ÎYxüÜòí2•«¥Qy¦ V‘©ËRõzä…ÚknìÆm?Y‘”°ßTD@ ç©kP+*½õ[jM"0‰Çø Úù>3@}ò¾Çí`é üØénÿ–¸€?ü‡t ˆ²!óÄSÍŸÅ÷ŽùƒF=Öy÷x1‚*ýK2Œ-+h" !”(þÉ¡—Há’Fcb¨ãönžWãúˆ±°¿ ¿ªùÿªùOë>ëç{úF?Žv¥A7‚À;Ü3UUÄPADRE#58Dã¹®;žÍGJÀx„E=—aýýGY]~hç2ërðî4hÌËS^NÆu '^˜Ý(IÁ"Â0ë‰@9‚v˜d}SlÈ"©`d¢Ï[~V«„õH”åE÷ýª(æ²RÍߨuŸ§Þh{ÇÁx„‘£^E¾YNƒУ€(ðu“ Ã"­¨Xºdsxò±°Ö»êµʰÆ?D$wM>˜X&Ž/-¢ªõX XÅ Æ(pÕDA(®©ÈˆŠçiVq…#¤*¯j¬GuqËÂ)¡q‚p0JEJ¦rC3„)aZ(¬¥ ‹8;” Å`M„”qæÕ iwA\`û$KVfGãs‘ÉÀ`5†2¨Ê!ãÖ­¹M‚œ3€Zd"IÍ·Få"Á*‡dñddÝVY¢ #$(áA)ax/Ö°Œ8EcKvÖ1¶ E’8¶ ÆNËù®f3šGÉì° @vvn^áÒÍh¦1ÙÊ :+‹cë©\2©óÈ•SúN}³b+Y>£s®}BV7xžÄ R«2ݱ•«¯£‡ÑÆlîwþ¯SU—%e€N(|Ùj%õ¿áëå¸>?¹‹ÑsÓî/;Ó‘e‚Êlãøÿ·û>ÿ¯Ã<¿²ü¾þUÿ7ë~ÏÏëcüÕx-®ÿ­¯£_´à”D8D%Tup¯ò@xsßuv`*ih åªü¾Ð?7¯Ucå莨¯x€@ŒQÀ A!ÛgÇ„Še45Lý¯Ûþ¯ØÒˆh}Ïdð?Ýyû÷í›OÀ>¼x9 êÊ2¨mC@8PüÇñ¹|/é,‘TÕQ N5å>gkï<öFC aü‚AX#ŠÄ“ó‰A<)ô2ñ@¨ 7àÆ0€_ ªÕ|—µŒÌ¤Ôk€bJp§yàû\€4'™Â{!=ÇÊÿ¦½wO÷l DXP…‚I ’ M Ö€Þ­å‰Ä &!oÆ .yØ $á|M¦r⓸pÅüÆP=ðh–vì“o!}„J ! L/¤ø>üýoyÄ_óòþ§æõye©ëóüÓó6Â*î ?±E p?àÇôHký’ÿ…úêÇëvÿ£þÆvNõu*?IóL1~˜†–ÌÙßÅÊp7y}|k¯…5ÁuOBþï;2Ï|æG4d*[ä’¸•Zp\ ìvï.onÊ,°kçÞñZ¾Ü×\N9;àéX± EGGæ®ë+?&æ(E‰!}så¹DÐJ’šÂH“TØÂ({Þú8àÙsºã‰rw<÷Z·½_ kœëLæ%ر÷ÝœN)œ+”(6ò!M 6|5äL *!  („bØ»¾|k^ô¼y}ßlÞ|×2µ€€ ù@ðFÓ¹‹Ó€Œ‹‚õXCÅXÞ²yœo{òõX+œpxÝŠkfÞô³~4“­c’)xS“Ú¨¡@8¹w®¾f&6çÊkBmÍh²2˜±B¶Ñ×ÌhcÊg^"7ÍN« òr;¬Y‰ãC´Þ*û¬ð×Yv/¥zÔCXí [ã4ºò¡oŽ‹Æ” ±:ë/ â˜m½Ÿ\ê¸Âï|3E¾íÅ^ÆU$œ.Zºbvo™ó:ëy½ðÑ£AtG•ÛÞ^AÉ^ «!ùZIJÓ`ï4&x¼ºÝm$Ð"åJiâ‘%“L€éû]ÝÚÝuC¶´ê"C:aÓ× qìзô½óf‡–z䎔k¶‚#¸‘¬,}8ßÙ4¦ÃKxA°Æ@aRqcnV»5¤ÖK mTT%p€D— v·äãÝ9acM‚ŸüÚÕ$¤' KTxO5 ƒ†FnÁûºéƒ×²äž:>]eù­Š¦òÇÅ1x"Z%ñ=]ó¡tÄЦP´ÝvUXòÒóñ|Tƨ !c”‘Åj–² aF7p)âÄÚPmúó®3Ž(PƒzÐ àpÀ'4‰Š£À(j"?â ‡Ìêð±ý $uî?çPúsgpM5†IÃe}Úúc(3uó¨f6paâ‚óTl]#®ZºÍ[C ¬²Àöý¢@ø%vjÕó{=ñ2F4ILR²UŠ£]¶­~ÓѨůuÖ2ÌÒ©ÿL ÝŠgŒÞ?îþ.»,œ‹,FGcHÂ8Gü¥÷è+Êà2 Á °ŠÆ`Nõ39½tèÏv5Þ{[üzŽ'Íj{Ï9¿Ï`ùna Å]Ô‚®¬4[Àíÿß÷~çòqå;Ÿoý=ŸeܚݷS£éû]ËǺ×Ö÷>\¡ý1R î¿QÂ}ã9ùœCqýÿ5š ,²ÁçcùŸêÿGøÿ—ý_Ë÷W]u×]p@¾½ñŸ÷ï£æ½=========¾îïùõUM¶Ûm¶Ûm¶Ûm¶ÛwUU™Çy€ ÌÉ$’I$’I$’I$’I$’LÌÌ’I$’I$’I$’I$’I$À ÌÉ$’I$’I$’I$’I$’LÌÌ’I$’I$’I$’I$’I$À ÌÉ$’I$’I$’I$’I$’LÌÌ’I$’I$’I$’I$’I$À ÌÆÛm¶Ûm¶Ûm¶Ûm¶ÛòÅUTãŽ8ã{ffI$’I$’I$’I$’I$’`fd’I$’I$’I$’I$’|×z뮵­}ç­u¯_<õ¼xðö=cØók]wò¯¿~¯W«Õê÷xcnMÂ~Ié cÉVŠ·L–Óº.»J`•x8'ÄE‚수 ÁuC£Š0‚’å±?ÆN ·HœD–ËØ]uínæÁ ¶è²n‚IQ¶.©Ôˆ1I¸j‚òºÄXDX³h;yÝá:®ó#@€pHÅ:ÕJa4Q…µCÍJâ1¡l-Y°Ý¡ë´GYV#(‡æ%v€.xg«‘‹¼0Ц2Y™8 aÛ‚¬ùС‘N>~g)q­âh«å+—•™8y4›jýb¬ Ybï<{Y×\»õížB Ñ>CÞý,h÷#O0Á­_7¯Øñ^ðÍ…9Ï›jq‹ýÍÌf,)NȽKüú+©D/ýGÜ}Åø$MT+’r(‡sÖÌu-©ÑPuí3DÊË‚B‰bÙ&É#I©¸';†çñÆ-|ÈlÒ²r‰úòcÖN(|ˆTWU®1‹S0­ñŸ&¦L:>lÿa3ž b9qµ–þZ×PLÿ7‰é«9ç#í ¹Ø;¯âã^‚½8ôͧšBsó¿9]¾kÞò>iöø8÷{oá vøŸèþ$I À’I’H:Õ¶Û®µm¶ë­|M¶üëàø>ƒà¶Þk­[mºëVÛnºÕ¶Û¬ $`I$ƒI$I À’I’Û®µËmòç9Î7¾¤“¡$’ $`I$ƒI$I À’I’H0$’A$’ ¤“=uÎprÛy®µm’ $`I$ƒI$I À’I5m¶ë­[mºëVÛnº×-·Ëœç89m¼×Z¶ÛuÖ­¶Ýt$’A$’ $`I$ƒI$InºÕ¶Û®µËmòç9Î[o5Ö­¶Ýu«m·]jÛm×Z¶Ù’H0$’A$’ $`I$ƒ–ÛåÎsœ¶Î†’H0$’A$’ $`I$ƒI$I À’I’H0>—è†ÓŸÍ#üàŸíκ뮵¯,ç9žžß·íû~ØÇs±ìÿ‡Nfyyÿ›ŸÁ½ÞŒr½k]k_ðþÃ÷½ïß÷ýÿtñ0ïÛd’I$’I$’I$’I$“rNûÞ÷½€ Ûd’I$’I$’I$’I$“rNûÞ÷½€ Ûd’I$’I$’I$’I$“rNûÞ÷½€ Ûd’I$’I$’I$’I$“rNûÞ÷½€ Ûd’I$’I$’I$’I$“rNûÞ÷½€Šç}÷$’I$’I$’I$™™™$œ’yysœçí²I$’I$’I$’I$’I¹'}ï{ÞÀí²I$’I$’I$’I$’I¹'}ï{ÞÀí²I$’I$’I$’I$’I¹'}ï{ÞÀí²I$¶Ûm¶Ûm¶Ûm·ðÇùÀ}q}·/ôg>§ZŠ}pBHF„ )¢‚“{µµµ¸Ü{žç¹ìû€ { 0îÛd’I$’I$’I$’I$“rNûÞ÷½€ Ûd’I$’I$’I$’I$“rNûÞ÷½€ Ûd’I$’I$’I$’I$“rNûÞ÷½€ Ûd’I$’I$’I$’I$“rNûÞ÷½€ Ûd’I$’I$’I$’I$“rNûÞ÷½€ Ûd’I$’I$’I$’I$“rNûÞ÷½€ Ûd’I$’I$’I$’I$“rNûÞ÷½€ Ûd’I$’I$’I$’I$“rNûÞ÷½€ Ûd’I$’I$’I$’I$“Ó™™ååÎsœó¾û’I$’I$’6Ûm¶ÛmµÜ=d `ödGòÚÿ$þ)ªˆ– þ|`È‹Å6Î 6€D–‘ [¸w΋F|iå÷ý¶Œ¸>c-Žo¥ÛŽêT7ð¾–Äш8wâsèd uDzÈTö«œ4… ¥ÿµ_â¿«ñ½÷ê<Ýý|ã8î¹ÀÄ ¤8•R¡I°bÜ…ý?×ýYý'èùòé|¯K»ò›64åå¹ü]¦{mÊ»Øh)Mÿ”ý>kæ7óiNDPûýß~üôЀ’¢,éÁCB´‹ƒ@´Š”ŠÒ+HÍ„JAF„@93æö]Ýädeîñb‘ôþmTl, Ž˜ j³R ~ù@I÷Ôf>5 ì^p8H´ @€r”ªÒ!ÉJŽDÉ¿½yïK4êêÞËêø—µ¹v Bƒ"SIŽMM6)S‘²Ëâ"sQÅ:¯—¹|<žÃÞ¼žˆ6¿±y ªEN%2RKIF˜ê æ»E8#C·5M±‚‰Ñdä*¤S®3ucvÁDåO3#e–JN²r›«Š½ÉEbÙBÒÈCŠmþ:]¿ibˆîu)ö¹Þ‡%‚ÍN{LD¬Ö=D.S´g†Hà1‘ª@þAÏV Ç:!²b›Aü{²;æž2pèúK°®»îX0þiò,ÈÇÛi¯.“ šZÉugÌî” ’•.çQLYƒ€K6mèÐ}×rù·µVßÉQß<^y÷O¯.áuWcCtR*0RŠõ0*Õ£]TCöS ‡U¯ˆ¢h”i/`¦ÙxÎôVÅÁ0"Ä^y*…5Q/” DŽƮTŠ#Šõ8Þ5rCL˜ß@äÕ×\›…Ü,q¬'¼.Ñ{òGã5¸^ÚäušÄƒtL¥uÁp]Ê7]uÀ?ÇèBªb¤@Az>AEÙ+€ÃdBP”DP¦÷˜Ž—êoó<.®9ÿ¯¹õŸCàkŠþ¢Iÿ;õÌâÿT»Äh¿ü8Ùš¿8áý¿ÈëÒ`Yeƒ(Aoÿ>Ÿðÿ¼ ¸!a•ý3ûÂËM 6 EA  Ÿ°cyb|cð„›#"Ñ}"›„kÐ, , h@òêy­÷èJ£Z±œN6=î»ñœƒoñþƒÉGP¸ óß‹óû([Ñ™õUä>£ªQA?Ü”ËE9D°ÆÊ6n)èþwÒwd4VÎóÞûMûÆ^™”XßÕ3ŸÍÿÑB š´áç—øjk¼/ÁgÙÂwñØ€¿é]»³Èƒ È÷L½"Lé$þ/içV3ÿ¿ñ‘)Fö¬CéÃ:i‘~ì?×ø–ø“È ÎÿÅF—\¯€’£À}YGõ/~òâ;¡Öç&\芅Ð%ËàØ‰t}Èóc’:‡ íG^Š´f£÷+2 Ƈ€²lH¿ê^ZäS×è°ü*ok( ¦\OÏáÇò—·þêp·˜˜»"R‡ôŠð¨ß¹šœÿÑ:²UyÊÆÏ+rÈazÕ$ÿcâÉu¸Ìì£9òÞ”_HN%D/ÿó·Fù¤l$¹¾&” g\P'ûêÞÓÐ Y’ƒÿô¼wúBš Ëmñgì˜-Lÿölg!»×C ×ùLÿî!?6`ŸÜó]¯ ÄZÑžw±o³G|(áö~4?¬×ñT™ÈÑà—ô 5cþÏßÛ÷'Kó?˜y_½[>t÷–+Ÿ ŒÕ…ˆ_×±ŒÀqĈP¨,äå2nÚåÀk2·'ÅÍjf/@›frn®mï_¢Æ2ïSyå½~'Å©åÿËùå0ÿðµÿ[¨@ñ#yS&ÄùÓÜ4¯g~~àó˜ã¹ÿ"*8êd(À¹OsÈ$±—éÏò&yÜgk†›ì•g¤‰øH·/šƒ-fc4Öš®‡åÿã$Ç#C#¾¢Sí´;$%ÃÆ"%—8Zþ×<ÁôòÚc•×GüGÿ8ÈÛéu”ÈïT‹Õäô©?9LÛ±wÆ”¸BØUã'^û‹‚µˆÖI·-§3àÝoGuÿÞý›~‚¿TqIï… €ºBcà9ŒeÛ(i¤xQ²Yo­.ÐÉû)YN²ÑzBxã¹x«ßzowësÜ´¸ß=N+:<›Ò8è+QJiSÂ…“Tín+Ô˜Šoqá‘Åñ다™7¯DßqöМMb'ð9Û•däéû.n(,sªFæÊ˜uÅïÁ»k‚¤OaŒqŒÉ#$NМW€L=YxC³ñÉc[sˆæè¶‚:O¿&èsS S—4sÞÖl8¨ß0pD£çŸ‰¯#|o…º11Fäã9ø+–ØÅØáºqU€Š3ý]¢´±AÛÏçò çï#>W"2}<¾4AxðÃìË0G‡>Já7¨N*MëpØýÝ¡+}_ Ÿæ6¿Ù”á¿…'èc¾¹Ó§‰¨X~®½¥tì@Ø@ BVµ§»vr]¾I”³:BèÿõBD×NÚήK‰_ó\†‚óS’g;ª:$ÌÝfbdmÒçdñÌÏJÿ_„üàSÜ9%Š$óà6 ¢H#š`¢s"ˆ‰–0‡ÃP½FHDíØ„RŸ€ªï®P;=HÑ`MŽ0kË5 ? wYõ&ÄÔÁˆ¹»2Êi¿cyÇ)ù+³Ÿ›3¥"¾\õ5Š–Ž{\‹þÕäEÞ™F£z{“PþCs ß™5³[&åUí¨íÜS^aè¶Ž˜6ÆÚ¾ˆÈ·ŽŸìÊBœÝ§?Þ$g% Ávfì:½5¬’~̼wÃÓâô:¶{ê ¿sÇî8AeaÓ"ãá1îÌÖ‰nËWl?›Aµ–Þ™"ú Žgcî$úçY }–ÿI RH ÚbE·Ð‚Ò sÅ£’îˆ4&‡Ù¦“°R¢"põE0õ¸®Š.É'òyó«T\ fÙêZ“¹Ðgét«¼€}<·‰÷|¤HzxzN%Ë.DG$Þ‰ø”±z_éþ×]ï¿1ÖG Cü}>܉¿æ’Ä¡5I<ù+RA@s'Øü¡üŠÞ˜qƒ Äpš?è¾ùå—# Ãç0òtFçÛ„0-}ô´MOZ³%=býüQ/FIJê2Í4ù(’“!BwMäQ8 À‰„#¹([Xz«Ì›Éâ•ÆÁuŠ‚|?¤¼ü¹“¸û¥Ý.У}¾×¤nwƒýÑt9Uf¯’OÔa‘*ӳɺ_@²$oª¾Ê|£î’ãx‘€ì‘ÃËÑ~]ùesÉÿ›”áepó_ãtº)Åో'T¡?°$Γ^I¿5M¡Õ&ÉdüGV:ù÷r{þo¯‰·OkS?gŠ‹Òä)—*7 b„CÜ«hjZ­Ay)¥ÛÄ5^•;ö‘O‘Kâ7sV\0bõL:\d/'V|ÜÅ®6·NMenK›¯1éÚíA¯•Ús˜Ùg·¾×—´•Øè² ¿çj}£0½½G¥‘Ÿ¤mÏÛÎ\uz~>THxðx¬¾oD{š|ü,w½©Ya‰;­ü‹ºW=¹L»¿$¸ºzüΞò6Vïy¹ X»jIéç?6„hÏôëÏB ßÎÉSqÆåÉ‘.±²éàÑÛ½Óa+”?æÑÌ;½¡rÌ{ÑJ1ƒd¸¯†¢ýíòò‘2¥Î<ð.rîçÐ?ÉÞ&Ãäw‡“þÓ‚f’m¬H£ž5Þp0âKb‘º®ÈÑ4WÓ.lhÊf23'IMRµÈÄþwWŽÊé›+îTê1“à…ö¤ÁY¿/¸N¥ "üøUõ<¸Çùø‹ñ™[WBÍ4„¢J)ñˆT›ÜE«¶Œ„ä·\‚"íŸ9^#Iã‡æSÊia‡¿|Øx`ÑróÒli;=UÕê·Íös"e¾(¦¡T*4«Yièš[<÷7½b³ 7¬3îv‰LÛn⨼PØW+HÚ Ë•ÿ3íÎþ^ðð&ïÑ”=˜¿|­i’ Ö«èóxçHfío¸ÅH2 ‡»7ÑÌ^^µHýSÛïöÊ~¾DùÊð„IŒáÖæãå6ˆ0ÐþEÏæçÞAô ÞŽžÄ§¤òàjÓ×3N'´wñ#Æ ¥ö;7Ïã{pªãYäãæ1JûÅ^ôÛ»É nÑ÷K†c$¦âìð!…ջŋ÷üüù+¼°ý²väÁÙ(}Ÿ5ûYѺx«ø‡!¾]ShÇݳrÉÉÑjOŠr-ù3Ë+UIBù‹ôouŠfx³ý ‡âP•tÇA=²óX0FwÈù1c|t=2lsÚçæTóýn¿ŸžÔ±ó¶Ö.ÞDšKÒ9 ‰æÿĤËÑìs4¨{Ñ6ê~J¼‚²à¾™cöIeè|Ð'Õ¯ú𘗈?ÍË=†/#hS‹0÷ž4÷‡ÐâÞÈgG3´_ùvhÁ¯Â `ãÑ‹½Ç_ì÷WêÕè°1 ÚÆ ÐþvQ»W³û™”€Í–Ùà)¶ƒ/'šÎ Y6oLq´`Ÿ”¿Z-~%Q"” »~ªL‹½óo—æŽj—HEœß)É7©JÒ9eûNÙ£]ô³ ÿ=«Wž©¡7bÎ I=*Ú%¯ÜÙ7iQ¥‚ŽÔÙ j ØÂãØ`,´!Mª;ˆ†„ ¨‹$ёе†’þšœç͉-’WõØjýžúãM7/jÒ·Å!·è‘ÓM=œ>Ÿ4 Óbj^<4^ •@‰Bu×RÄ0ŠWϤFÎvmñÚ»MîK’Ü`´kÓŸšéÚ]8¾º¸cï’š|–%>åò†ôlûï×䯭d”Ö‡ÕdœÚ*­€ö—Ž_,±éH[tL «Ùxö~lj§þv¢îZzwü(u}syâû´ÿ~œ†ñü¿üjø>ÉÛ‚ùrýŒ­`šB?Óö¿Ç:µj¾74o+l ÷ñyrÛîš9Wð†!y•ŠpàîJ¸HÝCôÌM–?TXÛ=«øæH¢e'Šy‚€Jk}3Gdu”ˆòˆT È*zxjÇÛ³º2Ø«7žªbÚ‚¿y»z„‘SpǬÒ”"WtÜ?b7jÞÓ$’‘¬PÞÈ!c×8/ód¨-ØwŒçE\ …â÷=|/³e3G}Ü÷Ü?—0’uf”ÒH‘N”sc¤¿Ø¦\Ãã#‹íÉÁáSþßÚëÌö^zÉ õþÖµ®dnnÓÜêû³‰å ÏmHU­Å[`–OÞAÅát9âEkrùçZè¨ABÀ‹ˆoqeè¾wE"GH<”(ªr¶=kÁјÒ-úç?~(ÊÞ^n$ùI&$q²Í¿Máç`Ш¯SϼÛÜ€oÀj’_SåÒ@ŒT?­LÀÁŸŠy$ù¦Bs‹†@üŸ¯ç^°ÌùM(P|R,åõÖâ?ÐÿªjH0§&„ îDõvÁ-(š¤XžÅå|Ò#‚6É @P¤eú”HS:¼9°®ßT­uÚh:áNˉãEß×·RÛÁ»¦š0Œq‹<¥¥oJޝ,â’¸B %OR>â/‰8:+³ÐgÈùHe}¯Ø7£ù¹‡-D‘³€•gÛô>zè™e>'ήdA®”ž_¡¼áǧ¯$˜HUá¬âQ¬"Z•Üñ4ˆ»†$w†»Ðt<¸7ż'15ø×æxqä8¾n‘?¶B’ÖÃ:j;=W挟/ï71Ïn »D ¤8_pµ¼{Õô|\8Y°Ã½â9ˆOÚUUPàÖá}Äypt´ß$pøä$ï’Îyògà“Äå%äi"}òÊð¹¤óýA ꙈÇG»š„?åPç&4Bÿ5Kf- ±Ëô‡{ÿù$Ö»3ˆ¥øB{&«` ²–â ~‹X âUëSµYg ¨í÷Äæ?òý#áSTüÓG[W©Š>R–8àëB@ç^/åAô®ùq11¦nƒ ¤/ù´ÞªÕ@~¦°ì¡_¦FÝUèZUØ"X|+É"ÞÊ3‚–«ê™þF¶ :Rúõa„ÁýÃi÷VTI-ŽöÓÞ¤ÖûÊrzZÀÌl4Ÿô°u‡<ØpÞrÜî•ïá‹éób]þ%ÆÈ~çóß'ôÄöx×Ö“¨II¤ö%É1ˆ`(䃆¾±Yo«±c‚l$‚ŠÐëïȲèBÅ…ÁVyŽh6†u„(Í®@(†–äùn¡bˆZ&'ZJºÁÕ«5M‚7wÖ‡¥¨Ô8¤!5þFßÑosøÉft¡µ3,²  HËÍ)Щ´v8Ý^Ëh÷¨qž¦vúßSÝk´„zc'ƒèHëÇ ±“„x<þ2:)#?Î>¦Aöü•üæ9>wëûüö%R`:F%;Ŧ=|d4&#3º›|ùI ëûìôvOÁó“Df±w­¿ÚQÛf¯¶,¡—+Â`MoÈ_)¶_ÕÍ‹-tÏuéh£}"I~°±öv¿qíªÉ4]'šsþßæ0/‰©“õ8ÛÞGŸ­¨ý/Ün¶¡¢ÍHUIû¶äáÐW$6íþUS EÐ6ÄÇ\Cl_æ³%ùëñ×1ψýþr{J¥¶ BtšÌQ¨MÊ'D‰¸¨Œ¿žÅúhò@”ζC¶û +7 :oÒ1ðFåX®ýtï#Õf¢þ*Š•æº†ì@“„~WŒRS¦–Z‰ßŽÇG<Çc¶÷™;û”È–ææ÷˜îö%ÙÛÉ%ã…ÿJf‡ñDýˆYOúJÃ)ïïÝ ï6„o“Õ¿!Ç!‡ÍxÚEܽŸw ¤õeƒðÙ¤£b‰7Ø” ø®µˆÊ5c¨1 ‘ ào %Ù™^cg`_Ë6ÿMÜ5'E £-¶H>Œ( îyi™êõ¸QŒ×`Rü\ÒümÄ“EåCÈ ~á j6+½ü^ïIp\TFì™bŠý—éI*ãòÒi`S©ç‘ÏCrNºõNo@gq.`Ç]ý×3Z Æ9#±ÁìUü?.JC‡£Í¤`ÐdŸ>±î5®ûn|±)"|^£z§'~¡c9¹uv2½¿-õÖ^Vᣲ}¢êáS_Ë|á¼¼MQö#»ŽÂ“ºûcLDÝzñµl*žÓ¿E9( Æë~L¿+5tð¿³÷ˆT|M2åäGojœ¾}Öh¨µ*Šäãqå4¸fÔ1ŒÈðëlþÉR|2I´~ƒë³¼fM’%ræ=6§Î_—5!xíÑï%eù¼¨I>µå–œlT /LÞta#p1V¿.%¦¿?õ²oŸ÷'ÅórÒÛ9qžEÛdº_AÄò¨°k4ä:Ó§ý>¿ í+koHË!”œ Z6Pj¨Gû)¼ Y9‹!´y§N‘\àP¸ˆ“9`ÏVÊI„édmcÎa°U{n°Ë!ú½ª¿}«³~_ƒRT‘ÌnÔd¨ˆ)=VÊK¾ÛÉéã+kHÚCè¤OuDõœá{}†½ÿ;¶¯i€wŠNÓëq%Ç>ÇŒ›Þ?'Úáõ; ãQʵHÁ yq}ôK«~Ïd¿‚` »ù1ž(÷OœzÃæáãþ¥üÝçòšÐáŠmâ_(leýŒÌ-O¾•†8£“™óJ•Ø n¥¹ÍÎv¾ ûØ¿n2EdmŽ“þœ}Ï )H÷Y'škÅkô÷†—êîœúmRŠClFéø ö;çÊ<€1íË=«¶R„¢?¨-›cŒáë³Xù§#%->fÔæ º™èÞ’mUq ‹p“Žy•±8Ì<¤¹Š¤ÄC E ü’ 5DF?†6(¾[X€Ž¿‚ä`ÍÃõ†¶ZÜž®DòD•è—akËíê‚‘ qoÑõè±p•UkÀd_až*ÔwcÆíA&%×ÓqêþÅ/è*~b5<#ñÿËö¿sü?Ë”Škƒ%Xv‚A®r”(±Âé`¢b)UÌŠôY(ù>NQ’‚wg‡Ëu ÏX!x&Ü%5åÑ\) ¥Ð–ÜÓRÏS0åÛï?®e¢ŽLWÜ*ñ ^;Æùü]@W;ò—Ëz³~0§ßùuw…"òsÇåzŠ íOÖÔöÔ?òÇì_;×®hDA‚c§ý„Áj ‘¸ß©èÿgÞ“!ùF¾5)z[Á1Àú3ºZ÷XpW“ì7ýö¢ÆåxÁˆ:IJ`Ÿf\h€Ô8+®µ{ì4‚Ú²èV_½»±Ogüí^© |çzW‰ KHîÔÿ¬„‹„bàþ—a½>>=˜ß²P¬‘ {ug¾Ë×nI?Õ*Ø´”¯ŽÂk~‘ÕGSÉÃôÝ_€ÏÛöüˆÓ:ßµäßå3-Àüü¦aꤱìù,#X¡ÕÊ0/‘¿ojä{˜ìŸ¼Iœ˜ÿP5µüÆ«{ .ÒúccÜ‘z¦7s’èb±¸þ¯¼Ï=vˆÖyš]£¥¬ÙßI­ø ,C{ ØÈ¢s[Ô-ËL÷è4¹û¥o/(VÐ.xÕ Á‡ožO²r®‚ùDFä¨)v¹ƒæ,ä“<ûw9ék —¿®¼¿2[ß; Q‚z§@諎$¬ì&êP˜ñòlIÿd•Tk’¾vÆM§0dr€€gùªãfr#pÞ]Ð֘╠]/PØ'L¥ìÒhÈ<æ¾”õ"§™É)ê=H¾è²!|K -ƒ_”¥…¿ÉJ2 #>dõB±>ŽuW™=Z w³F* ¨z'ÏÅ(.ãgNœÖPUôsó sjß—4-õÊNlƒNhžÜ· ¼Þkd„÷KЄ㨸—£zK+ùÏE-†—›øM”ïbc5Üeö”»HçÉ=€£0€×Â%™^òàÐF­óÃh¬ä(ŒXߔ؉ËbÜóâZ½nEiÊä\ú/ÐmTjŒ„èuâ’Öù´Ì’A(%³¸gË}Üië†ER(B,‚b7 虚jk¾èDyÌûZòü”͵>á–ò±8rö8ÙÈY÷Š×õçyPÞâû#âlÁCÒšðÂW¶’ÅÄ—×—/KŠ>lèB¢”OâØÞœ¼nÏŒÎaîøk£Ð”}~?z?¿’¬ØÕçZÎcÅ뼺™c…ÑŽs†@À:áÊ£ímÏÇñAE§{´@wIÔ8ÃónÐo2ÛþÎw{>*û§oÕ§,ÉÝ“Qø*x:z¹ÞWšf,š— LWõóþÖßÛµÙž k‘?®^-½Åœ,´KK“Mø\»LÑ?Ïi/`¬h)^§HÓ6bÍŠgç§4!¾Ó>•…7KVïßà‡ò½à´êT¿³‚ww×"ÉÜ^’gÌ=g©6™„ˆÌ¹ÅöK4àžIb~lù,Qáô™nëÿÀ«LnGþF§eùhƒ§·~˜KÐBÖÿg¾m²¤þ¿a¯õºåSe» e«öJ½ÆÇ¡?ñZ¸õ0Ðý\ÒúÊiîén&1p´ ,,ˆÇ`ÿD[ òÒÐ$Šëÿ(.?«:(5Ö§œ[¿Ö}{×´gMæ½ß©+Ùæ¶ÔÎŒ!ôE Õ t_Gë}Š|™)Ì3à t§P¶GƒÆUS}®©xqä}i'àøe”<¦“Þ4³1Õµö“¾›ª›¿Ö@5ê¶J† l?ÈX9dò}J]xüÜnKaï}ßu=/ÕŸ”÷= xkòº‰îSÌðW=ºÌèlŸ_,¯hw2|_¬‡='w›žÀ×#ÉÒõä€âØŒ¿ j ›Ò•q6ö+&O O!59À&{`¢Ôßð:…É9šàñâ?‘°R€²–¯‡r™5BHoÕ÷›â“ŽöÄ£^$ð@TÁMRù_ŸµãdƱÄ9}kÏ]iød °n9‚7Ý,E©;þÄw‰¥ýä³1ûNsÃ'dÀÏyðµý¯·;nç0•É{v©Ë‡1—Åâïiyiúñ6ÐO:Ôz0_Ô‘DÖùøí~?67Ùÿ0‘OáëPþ!ççÛ;¤p³Z=äAz†bÌ#3›Z¨†8”Vt&)Œ\D(`uÅËF*:gâñ»Pÿ.-xÿ Iµ •§‘’˜ MP,Gv•^žo’Ÿ³0y–†eR¦È ˆ !à)·ßEÃ7l€øÿÔ$ö¾ß(Á¿ѾÅ.«È[ÏúŸ”¶ûËÏõíî³Z®1ÛM_µ½ºv‘~çX‰Ï˜ð#/íúG»ŸèyÆòf»!6>o¬¾7&›FëS—„Ü/qšò¼ù§îôH^/Þ¥Ò/ÅCƒù‰…{@»¤³g ƒ;[Édc„wÈ]¯~YT•±hîIžû7 j0à ,K”óɊѯíO$¬€¼eÒ¹L-¥¦O"jïªÚÛfc2‰[ár—àŸ÷¦“¢ª/Ú›Ÿ¿S»ëqÅßK*v³-ê}c4RÐÐÞã#”FŸÉ60ð‘ Cze¯ˆaQ«I7ÙM¤œŠ*øçw-„/’éŠo‰öw^Ãzºj(i6j¿DNr”@昺J9@wþg*Fnþ’~;^‘YÓ©ÁÈ¥¸q^~½ækÞ†¬Ê–þGö¼¨¡n îöKöÆvß×Ð3_4Û‡W²õªÜ/Ç™™'PÐG‰®ìÉèHµ4£¨tU>×Îgs¨Æô6í#Eí£.þ òûVÕ‡ÄgŒ)’¥˜Ïqä¼q.ä†+2 ×Ê H€jØŠ/ûŸ'cV¦kð—Ì÷¿)œÇ•ü/jC=æ‹þCÖ6I{_ÄÉI`8²Fû„Ц@Ý“%ïÓ¹E·×÷Ê$YõÛTÏß7i!H*ÒšmÙ±/“ú<µØy‹½²G®’9A—JZ´þ#tÉ"ã?Y¤ÃT¥ÙjyôŽâöÔØü]=):XûßÀuªbB¸T€,/Æqœ]«ænì·]q¯;Òè²ó¼.É[-X|,&6‚uøÙÙ2kg×ÏU_y¶6Ò?Í9&{ß}œÔW®×‚‡z…ënGs…$…ŸŠûŒ¦7órÑx Ê7Ÿ›‡‰²æ^‰ƒ&´&dgIóWl±«Ø6ûy%\Nsß›ôï\ê?4Dw²ˆÏà§&8¹§]˜½Gã`‹ívä Ç›õN°áµP÷ý'Þw·(³Äcà1“Åž&ÞìYÈ}lÓ ,ûq=#e,­7‡äñ!†£˜’$% ‚,Œògé¤ì7J#`—`¯ÖYkEÔ2€‡¯â"2PCIÚ”à‡ØÒf{Xõ–¾ÛϮȯ#ªùר”Ä×NÔŸ¹îðPý_¨XWõÿÕ«Á>üÙì ‘€%Ò=Óø¦øÆnoB)üÂ4ôȺɘ·ŒáÚ69Ê.±Ü·Hö;!¹Êõ'™®ülý÷—ÕKÿ<èÞóÒ?OÝcÈàñÜÅ-1™\îwOÞOßï®_4ž_µÈ‘M5^ê"V÷ÞÙ“ù½Ç ß›†¦ÄÎG6’/PùçaáÌ­Ó:¯O$˜Î™ lítÕË®W*4RâŒÒFÆÃmžø£'ˆ¹\N‹‡÷1%='g¢>þŠn•PÍ;IG¦,e™í/ÖšiT;/ÈÃÏÜÌÔø¬@™L„بá÷•zÝÿ 9>Àä¥o@Akºä6b‚g”è¯=­>Žã½Lz[¶'ƒÛšUªOànGþºäëò22èFÊúÊè*Þ¨À¯J5³1vá’ fœC˜ ÇÜôMÿC4Pã<³«°ôRžGi.¹"B¥÷ŒX¸‰Ÿüx@ÁxÀ 7îÍH&ífä›lì|ODé>°À7ÑSìé{þwþ§ô&Ïr¯+Ÿöõ†7µÀ({ûx=Ú|ÿ‡ÖÙ\>JâÀ˜:‘Â>ߢ}ç õÚwͽÂP´¾¡ÿXº©Þn 7žü}'¤Lg(D½˜ë4rç|†TYã0ð¡Ü@o9Qá)‡×îœÕ//z|Õk0·›|NR (¥žå<¼EùµÉØ@ÁÁzJ!ä> º“€)2˜âª]$@Ã<\÷ylÍ—{àÓqÜL†ž”xqz¸4A±ËýÏTõÂýOªHÏï:#Qør‡­‚þ¹p„Ù KâÃ'uñE:¥ NK3Ÿð¸&i"ÿLŒË=®àç7ùë$;›êˆ6ÅÙz½ð²Yñ59_ã âÕ˜sTëß‹WK"\ÿÚÛ' æyÛÚoðX_tH.k¥*þ²'òà˜ø¾\ÿ£ò…uîídlò4»Ì÷øß.ÌûÞ½*TQ;¼S}‚}@÷SZÈíÐ'¿UFŒ+¶@ãŸô?¶;2Рñn¶æ#Ð' W[„òD$ø½ûç#bÚš$È7¿{˜¨°ˆnVã>ÎßS«ÁokÉ@#Kj•ÿ2ï÷›c¨A ãoxEÙ" «!C ›êüoÑ„æØ ½Òã¹à«›˜¬ãñ‰zªU{lÔúÝ‚0«ì4•/UKg~H#àâTþ¦Ó×z½óÃß½b–}mÂÃOïüT Ònµ/¢ºª"Ø1ìɰnu,z7¼øø¸¤ ®ƒ¡øÀWc’ƫ壉ÉgÀ*«ÚŒCÜ|v4wÛy˜¿Œ^|Å~Å€B€{¿ƒ„ÅyVgÓ¿¾ƒx¼×-’Š$¯b¨ /´Q€÷ ÉLì2ÿÿj¼XÀÉÚÿ$WÞ½‰1RÝHCò7ojíK‰Æ·«ŸÓkãaeÙVëlÕÚ «¦NXbÎ=sú³™ßåÀX³ ævk bw'êÙ7xC]¼/O„uìvWQãDVÊ7®=ïÞ^¢Ç7‹íß½®~Yx<8Åð…î–,õDùĽÆÍs„ºûCL)½ª!ILmô´.1îJ@‰K½†Ø»T–×(à!;ÃÆ@ör¸„_f—•SŠ=Û••ìzÞ¢Üô[Öö+%ù#¯(ãò'Yo§ó˜°‹íFà‰€ì'óÅð›'Þ 0ò1ÂÒX¼Ü•|T|Žø¡G‡‰ìp5&‡÷Î^(q¬ß¬kjŒÑî!ßLjìNÒ«3TаïYp9`‰“>èÝîþI):޳7¿–ÒŠtðIÓô¸Ä=hI^ˆ”gмŠ×¨0Â}\¬jb—ú¡Úoï4>5¯¦Ã¸û ݲÉK;áV=(5§[;ëB—Gç£Ãõ>Ó/èýÎÕ&`Ý’„ëX³©"W™vyd—Ð'Oµ?È…ê³ÄcŸ07UÑ÷$Ÿ’*ŸíîqMß³Æù&Ûð¦³õ*de“Ÿ“œC‚θxãO4I†+\A*–Ŭã/—Ô)rÒÖYÝ3+ {¨8½G€h+=’f„a‚4³zu£ ¿(rp) ïc„žKmhô)"Æýzøð¾­×ðl¦‡¹û÷ÊÏfÜG´C›É·!Öô_ï}þ}˜ñ66"s7¦E—ñ’Ú“sÿŸ¿üBp¥%N¥Án‡°MÛOô³=š0Ú'ø*m™ï¼Í#<Ì£Ò ØrU’&‚HM\“ÍtJ ‘h‚(V‰¨+Ì<¦ƒ0A‘ó!l °;±³' ç<°~TN>l¹RŠpú>KF0 ï"~_’“†Ïw–n¶Õ¯zvìè(Ü™åáÜ?Ôò‘êÂñ5œÏê.TD‚~*„ž›ü>‘r3o[6¨ˆ2zî¶ “#qÑ̸Èo)}ôl¢<r”ÐŒúBË%|Öq( Éž\51v-ÅîHñ‡9!FýZÏßöŸPô C¨½¡„zÚÂŒûµ-5’IÈZ¬fÖ[³ö€THÓ ÑáWÔvLáIÜ6®¢Sï7ÑRàÛã0®ÿóü)AƪTéáGNÎ÷ —º¢¦–à/¾*[3eàèô]Eïi?¸gZH5±÷È·qô¨$ñtJ/Š2ëà›y©{ܹÐ^äø.ÿ2=núU&˜s€ ŒÒư"î.û\ÂŽ¬’æ£5xƒá1ÂÙi¾{6k9muQ§ÀµŸ‰ˆ[cƒFs‰}D,ÖÞ’ìΞÙß'uT 8Dà Þ¾ì¨e3KÝRx¼Q V%ï~謈9Ekk¼±Cß?µÖo†g|©Þ‰ülʱŠz³o¯›¥õŸY¯‹¿‰® égŸB˜Ñ:¶,×Åú%U?N¾­b+)s¯‡?'%K¤ŒdÀflcdL8L،ԉg¢±ðiðÝbJ3Ά*ÖŽ Jo7f–&­ Ì\ ÿb.Š¢ó (œ ãStú©«®Ù¶ ›ê˜PSÓôÅóo¥/21øL:R¦i Qª_ÚGBDˆóË ªI¬@Ò·úÐvÔÔ™¦0¢zÀ‚»“Ì’.<RîÝ×I 5 ®Æ¿‹3D~ŠÐÝ 4bŒ®~&ØÌŽ.Yzð”9ñˆvèëõÞŸµÕqà©ú›ÒÏ|»3yR«6¡ì¹ouÎûéüŸ{nÏÚ…Ìþñ âÝý^rl£8‘¦ŰÖÁ7ë‡É¡"ƒ¬*šËÕÌëãÊ(K¼ÎÂ@…Î #z<‚È<2s%ùzz^\¹H Æže³Ó°©²GïMû‘ÃT‚\fÒ1ì/azø–ÍÞǾ4÷KŒ}‹âÝ…ªXRÚUêÏ#ñ#zˆs,:~…æä»-hâ;P{>Êòö<ïúÕúÛþ[ajü™r[¹(©µ!ú$›]ý22-ó\¤[P÷é×t¸ÁRdõæ6!2ï‘ܤRËê\ñÈ!‡%]¨N¸Ž÷QA¬€h‹¼¦k1BÍažìñÊý]œ[6ÒÔ˯;ò.öÆFÿçï´…“†N‰qö÷ÿ[ñjÅíCÄkã–@Í£¿cÁõù”øÝtàãNæZ´xï5|cFàºF ÔºF$æ¼JN`áNSˆaaüæ¸hçMà¢úòï@ÞòiÐX'wy©0RËôœRTÄ?Þ¿¶¸6Yä—ŽžîkœË$¤˜~'Þ[1T˜ˆÝ¿ÚL¬ÂôäbÏ*>Å×û ù£Á®ÐK¹;‚†Ž[¿ÐZÐ#±Ì:‰0¼a1!Bï—´róêàmÌÃ"`Ã*S³åc«®ÞfþthŒùH€Ëêxfaqí{«Ñ†+aSZ@¡ɲpä`D¿ú›9 ÛAÏ·‰õå1C.Þ€‰kÖ‹;¹µ‰\m;ÈøÌг»³)9]±¸ö0xž>¬AÛ(Ȉ€B† w¨îHÀuP7öŠö;ÅÀ: xÁazç6‰¬ ýÁ™ w(2åÃ@~Œlaz.C3T¼ ²¡õØ.û¬B‡Õü2ÍÛ›ÛxÍSó÷ºI Ã«þ.+ ¼?t ºH+\/;·<äv-rådN».¿áq‰ñaÌÚd'8KŸ9Œ>7! Ò~¾(4Ø(—ŠrGƒcŒMhbŽ‚Þ¬¼•ÝøÁŸÜNGy)ÊX®/îŽ\~óRàûy—Ü®RÖ­mbB½¨y‘‹µùêh2bÊt“Ÿ1ÓDÛjÆÓ¨}£ ycCM¤£jTÎ…)—„¬šð`ìκé©X’Ø4 Õ ²‡« Íÿfĸêe¨g÷ ëVïÛ|ø’[l‡–Lå8ÓÓQ@uÝ׊Yu<¡Î#Ùâû~ðâËó§m9Äk'IOtøýs8‰sÇ‘0Ú%Ý`IÝr‚Ž|Á ‰¹\ òÀs\÷3î3‰ 4X‰ù«˜û-élšüã,®””|ç÷±ýÇÑÝß§õ71;é">Þ/Óá¡£úXW¤OŸ¶oÑ×áx¾±éÆGÇÖ§-Ý2zü:ò³£6‘cîå¼Ü€E—ŠÆNz´™«Œ’48úM‚޾Žá(Þ6‡~&Ü*\…µDü|¿r¦ B ÈÞ#?ÎôUž~­ö½F""ðæ^|Â5@{Áìý7c/[Ï´×9zý¯n»l· BãÏÝ×¬ÝÆÍBÒÜÿÑ8ÊFødŇ¼j¢¨8i4s©©´qˆñÂÞæeQÂ2•à–úz$^‡ÔA“> t¢Ffá‘„2RZ¢?>ÿÈÕUÉ\?¦Æ¿o_øh+u*¹$þëØÌfeª‰š5rÐQ”TÇv²pІFž@"”|(ØìÑÊÎ9u‚Jñ„á<C,qy·¾¶9ÝÄ5R)‘ÞÜcÞFÜ3Ø¥·X‚òÚ!JøOU²›f…Wp¯OÓ¿ë§ÞˆcþVæ—êGå;?ïT  ÌgÖ䙡ø:JG½ÎT0ë•ÑlÞÞ„£Êí±€Ò£’½ÉCÛ÷I6éÏÔ”8°íTòi°^1C´µÞ1¦rŠ ‰'²ï;¢ºÊ½;”ÞÂA]ŘwüKØ‹&†]{fy¸bÐôªõù|&xG³P?Áé|Ôb,sˆ˜ú½c† @£~¼\ìøl8Ä„KÀ~~|ú@t*<³LaºPRï“¢9†xÑ,¼Ù[D†Î»ô‰˜0æ¢v¡±ucךÌÐ%õ†ÜCù]N:p›ÝD(TžÕc—b–kN“ÐçÆ+øLsT?gøˆý_ã<ÃèÅÂQèÐëv9ÔRÇ+˜E~¤MíÉ÷l1Úô‰óN#3Ù/\ŸÎ­¤:ŠÄEnì{l⎢à›h*øšpÝR|­gÛ3‚‘LóáôŒ60x°°L$÷KÂ7OW¥vJkÍü—n–1Ð7„Vj/HÁbÒÍCHè¢.ŤDVpP.‘˜Ü* ~¦È• Õä½Å´ÙuýÞ´´½¥Éðø5!8Hƒk¾Üe¯ûŸ·ðýô—xå³ ÏV·>ã0.K×(:=±<™züEÎ$B˯×îoê¨O.ÐEvh<#÷|¨}®xÈNºðƒÔ f’-µ0ýå°Š|f ;êø4š‹ÈÊ“=èWt–„£OC#uä"ó7÷¯Oî¶í9»w>;ËÌ|∫õ9›rš uZïÖñ^uæu©ß„¸”&XÅÐ\AœCÂ$ »ì¯F³Òh¬8-ñ»Ë÷W/(â pz^åˆ@ ¢x˜³¾ðI+òJ§›Ðö&h—Æ?-—-À¦ -îØ*£Œý&þþHõÚbˆn®^q×ãÿ»Þå7ò²Ï1až¾íï„æM¯1xzõ¾þÈòtõ·êæÊ0}éÒ¬Pøƒ®DêfÃüóR` 6—ÙŠU`îÞ ¢SéÑ¥­ æ¯&xE„²ì¨Ë ƒZˆ€ë[çãÉÊÇOØ–rù#òÖ/Oèæ¨ô–ïó¹žy¹¤Vç$¿× í®Öõ9ßφƒö3 Î'½Ãz¶¾n^'qppOBHh2ƒ7QÌuUG¤¶à‡ÍŽ,Àp#„¢9‰x#¤€N;!ô‰ÚÏþ”b—XÒeÒFºMF†¹d°“µªÌõ¤‹ƒW¤Zí «‡#Çh4!|5ÎOZÇßQtS\²Vk—Qyˆ‹¶ÔÀþÐý6735G?¯·àqu)•ż{öÒÍ®Ïv­{TK×õœ¡oEA¨ï¼yË÷¸ P½¶åwRúµ&¹ 4`7LeÐ ¬ëìí¢-ó{±—¯·Æ¢!°à&Uu3NQN¼4-ƒrGž§Ysjóõ4`Èx¥ÈÍ_Ïùþóô#ï5õI2F¶~oSÄZw‰øtèTüçDê<ˆOÏý\{•Q"ižø’ÛÌDD»©zÞšN „zå ˆ¬úzæ@=4’bMë¯âï| K=Ë©XÅDv¦‡?~&áYw?4d0ôOÅ=zÕrÆÃIG©Ìãë;ÙKßûÂˤûÄ'¨¼ÌׇABÿCõÃ8¦¿;n¤´B<Èý×Öq,J¦'é²™?óÂÿ/ËÖ4Û ¢Ý$:Ö¥"j0•„5 :q*I Æ<]¨MÒÐŒ:’%‹²ÀœygKÔ <.“W’ÑØËÄœ(àÚVb(K‚Üb¸Òª‰»Q ÎG§Ãzþ·ëèz’!Ï×ù¹R¡ê|ø]Ïr_Á7mÍWµí4°gS½lÄ„ƒjÙBˆ†J\ÚY$hÄ`Y¼¾£ãT}Õñ5`½ö רm¸{ „¦>E€LˆI‘¥ÏbY¢-ªëÊhØÍBtÉteŽÛ6ù_ Ö|ßnDýOËÑØ?5j@hlºp9'4«[+4 ~çþÓ‰oüÅúÉ‹úzV6(kô»:éUGbJ¨Ô8h®3Ü…|ÄrNí±zóë–H¸ ”§% («)§òôv” ì5¸"b@ûŠJu“ó•¼ÞƒD¿Ò¢Â¤ôüf˜A 'D Ÿu¿Î^Pý¸fif¬Š3®ûýËüÂóÓ²ÈÇð‹µ{yàîkÛoKI7@{çèZT¡å"ñùÍlóÔxÀ|fÁ(T³3ä’@ÕU)=øÔ$‡œLͨVÉôºV¹kI‡Þ‘_ÆKn²w½ø8ž ”ðUÉYq˜Ýµ?-ÖÔœÿ“±»:ÿƒ“w«0¾ƒx§wíùú•|Aîx@hP¦ò!ê `‚6¢IWX7Lz€ùEǤBIEéøæmÊç¦/¨‘ÄÌ‹oš3Éìô½_[Ë—Ð`1òȈÈÓõOcëûåÏð’!Á'šMI†¢ŠDL¤@¿|Œ˜j—t«…É=gÜÒ¥ø`½0qÛr~úæì¢9ûZ­¯ÐÓj!¯Ð[b›³ÆÊ$× ÿ+ÈK\ Õö0H­Ã¯Ñ’¬á¨c›½-—#Eÿ¯ûcýFRÃ×Û,<¤ëâ]øu.ô«y5¶zãÑÛ#©mxþ”ÀÙa˜(ä¥=ð71In},UÏKtÈjLZFÂ3Ú¼zY–@Ð:â£"²¥.Ew%þŒS¾&Qô6s툯§)Û F_ãçˆßÉÉÉ×z¢ëãÝ¡óioíÇÙ?‘‰¸³àØCß ´ð¡°©@Z`\æ`_{H"” ×˜ØØÄBžIM¨ uÝjxž*Ø -I$x•ÖD:‹Jxï†T¼ çóM°!‚-Eá¤É4\Œ°ÂŸÎ’œÕ„‹¤;å¦+Õ"ôž,S0™ÕKM?âfñ„ŠoF³âì^j€ËK Õéó*‚WŸXÐE=q½Bnô£•M`Ò䨓nyrú)÷>§YŽ˜Éœ“$s—ÁÖ2D†õ!¿RNk¼Ìe',ñĪ:8²q€T¦}Ãêw$·½6vM“yz2×kîæµ;kÞ•Àk›¡‹ÖG©n»x·Úlø¶ .ò¤ïM*fèa]t§ùØûŒ‰»,I$rÎ*&˜¾Á¿÷Sòüó¡AÊD=3Ñ2V&xÜ,“ANhZ¶Ha5£°˜Pý‘|ù¬xÊK³Y,U,$£HíT¸GH0ú lÌ¥ˆêªxd#0ñ xÀ¦Â=ñ ¢Úú2ÙûÜv¹Ã&G¬û¿cÈËô±™©“ᇤšpó¶Q’_mÚOå°“d ½;cÈ É–é#J#ÈÆsB>sÙÐ3áC;?’ܺ·ŸN@©whöüY׵؊ӘÿJô‚Šˆ¯.s5ø“yX´%ó¤3ûsuÑõ:Bc}]´i•zÄJzî—#ôð{U¾nÀœ%]a¢zT0Õ¸0OaJTn–ÀÖÊnÑmbš™Í·9ò0’ï]ôøéî4Í‹ÔQÎR7kôÿ-ï©@† ˜€ŠãBÞÙzã™{õù+t•¤Ï'iþd´ÏÕÁ©˜Ó„Lý0*qj¨Çâ–͉g5«Qº:Ò~²X ”Œ¥$¶Y‘)'\H‘Œ4BBÇëøLØñ0tâ`¤qŸ\¥5W ‚¡¾†B¾Ëg8´ôyΚ«ékDQCzV¾¼%ü”˜ì}‡æ›âÕ—ãdþ^‘ËöTþ¦æƒSyŸ©j¥´L«ö ÓÑöž–]j(jßu‹mH²†-e«Ë‚JÅÊ…ÂFùÉR qˆ‘’†ŽŒq¢–°2»d\G|Ü)ŠÌ§¬ü(›%RÎÎØØ=é‰á†Ä«f~‹Ò×ì/òÄ+ ýÁ6}d_úŠP‹ÃpZ0éܼÖ~œ¢9̳ߖËK®ó²åÏeæ8Óˆµ‘b‹ZH Bm'¡ÍAô ¹põ›ÇŒSë$Ã[î\9z”™“ ¨bmVE%‘#¡ÈØo.•ÖAÐe(es2ï‹Ík°žFnqºgŸêÍf£ §· ã}Ju—ûܽÊxÒZuzdÆ,Ã.R²é®©U2À Â,á9ˆžŠû·€€v×(‰l€;8iDQ aELù“Fû¤LÝ㽕ÕUWÝ}?:ÜÜÞ×Is€uìwõXMEäΉƲð0R¢õˤ=7EòuÄBü“³Êkn¹~>ÃX¶à‰´Žð#/ïX¨0YâB[ºmZ_è"-(öY®J²øôtš±rx|Ôîrã€sC ï­¢n huëè%m¿ ~tè`Ø¥‡Ád Qæ„#Ÿ/„ÅùEÀÝ6жo\þÙ{]ˆ¹øÿyÛ²÷7ßSîòÉ\dI#»|hêÆC«þVkÁŒñøÐyå¹>'Ìõ ÞHê•v[à²+Ÿ;¦`éÁJMj|Jîuñ†I¾'©PøÂ‚;+8¤@†çüµMQlŒ%hãÚò|§­o•bW2¿òús›y-Yç¥[<ò8°€ÖM»öÆå ÂÒ þ#ͬø9éøÊÑpž¦˜@\=.n–Xx0ò´Ï3<½ á>™rÆó”‹óºMBù™ËÓ.íéã㩚ˀ3CâÝzƒFòÆ<â@DmØã;OÑ%Q ˆù$î8 „D",ÞÅ+VÖJÚ„ !ôºT¶ÁMø;̃BÀ‡ôü$Žß#!­l|6 c‡^:mº 6cج4ɹü h?´rÉôSÞK›Æi¾Öø£¯Ø…öKB¤©±¯‘O~ÇÁÙ‡ÎbKD‡½®gÀ…¾_hY=V¡ „6è “)e û b®ïõÙú‘€´™Ò¶+µÆZ}‹ç üÝ–©ë)á /·•†Íà¼ÔðGßCRÁ³=Òü´¥“`xrt`_až£‰¡yxÁÛŽº <Œ>æ ÔTVÉB|°lVœœfÛ_`ùäáÉØgþ‰g éðÚ\« ü_Çõ³s¹'¨Ü!ÃsU½@U•žnàѽ÷¬þÇÜû:Þ>öm´G3 Ó‚&>ÍU¹uÌ)¸îMP|ØHY5ób%¾á#Š¡M­¨;™ßÓÙÔõØoÔYöX8aÊŠ_…Cµ÷Ф’¡šPeÜÅMy'óœ­qþ+ÁøºÙƒ×ì 5 €÷î߯‹¼ÑŽbRþêrÈY…Á"ÈÂݶ'§LÌ€ $açjÿÿEèˆ&¥Æ6 k-f §à‚}e<ú&„,’ôæ„xH¼á̸¸&GÄD{p²ÃóS’—¼=V§Bn+Á©Yà3(-0‚Ðoþ?‡ê³å¨ä—³èã—«iØþùÖÆ-<¸®ùÈPdcT06¦Ï,qˆ`¸rï-V®‘Þ#*öb¦‡H {±ºæh–ÝPGY7/æD2»±úôxÕ¼bÆ–»"`¦iÁ!l ûRwÆ•Òxš/ÐhL¢ ÎJ>·I@eèOBQD­ † b3’Áw1Šp%:0òŠZ†Œ ÌŸ¥(i ä\öž;UnxÜÝ#1¾7`ß[«ÆÛ¨€õ› ˜”$"º Ãç  ¬H4ª4H‹óû©^€‘“¬ª:.p›\éÙ…GL’'Ó1Ú@Ñ'9(½:xsÖ¼ØvåÝ /d× …MgÛÿÁÞWï‚U¹ŒšûèÃi‘Ö“JÔÈÕ\(ƒ˜0WÃ;ÂMˆÌâ°”V`_r•G ¡ÔŒZ¢zD4·<ÇOÐð¹ÈÜt`P‹‚¶ùU!ŽW•Ùµ+ô5ÇMœ­ÊrÅ4œ? “RŸD^–+‘ö O‘Àµ5LW, )s¨sc€¸6i¬£³˜h´†bˆvÐ9úO ˜ ±ÖãË?çLsÙzèÇj–ë1`A1Ñ:(GTËÐ'ò5Jo¨ÐWb”ò.êÑÊ,B‰ŸÀhE¡Ïc ,PðÏa^ÀÔ߉K|y›Ÿ2¢(ÍÝ×ÜF¼g°u.—\ÑÞ#™ZfîtæN^lEÿÖ"fÐuݲ„¸y{æÒ?ÝóîÝ•2sxFdD&ÓÚ卿äƒ0y“×®Å}ÍFî•ÃBTã¾jH8ƒ—›Ì‡i¡XgÜÂD0UÁB læpäÝPè0󱊒œÙ—ø»ú+cœjØ„B¶pѼ-šV ݽÙDe¥YíR0#©Åk¼©µÊŒB IgºH>糿åÌû%³ƒ1›õÒõl€¦Fÿµ [wûw³áÿž‹ª|À”;쟯ïúò;¿›Kž¤ÌqÝ4Ër¹<~w&J_#]D€%È|: qì¶É§)Rråè÷Jèè&_•Ò1%é|ôÆ Žw[gY¯ Õé®àÓ§ñ–´Ý‘NFÿ¦m]nkø†)EgÓ€Öúç†#¶ý3DƒvS^ðrè™Yøaè—:™= !Õ Têkž­)@Ú+®YÆryJF‡AxÉÚL7áö'îQäv§—KHáÅ&µgËÆ‹ù¨æíò~ÿ¸`õ°÷¸Ð°;5 ÿàˆÑdyü”¿&#8ØšÑkÙ'(ž¥½öf  zêq€Ð“Ç ü¨"E½çýÊÊ™8/éJ¿LU2LèËKhWž?g9€™†Éˆ‘ ò0SPi|D<€ŠÅÌ•0VT E"=#WØ[­Cî·øø}©ëi¢/G''åÇl<~Í]@tì’ˆåÕ1`˜½\ƒ…Cù¦j}aÊðCh׳v UºhdP€¼f"krNú´ˆ0éàœ´  Õ'E½¤°Ñ„ !y…Qü‘î–Á~BnjPWtÀ@û£Ì7#œ‹´&âHñçÈ Ãh@X­¥â`­D9êY%T4ÊboøÃg¥kÑíøí¹aÛòY¶ÒwÀÍ>Ü®’±•-w†ú"F“RÕdÿ*+,I$º‘ÿ_ é8Tô˜‘ô7üd r=Î;’iH·|гõ8[§(Śϯ𔿦*M‡ªý£ŽK`«'nb{*döŠA«–9iã@ •¯Ÿ^4´ð}½›¯séHÀ6g®MÊ@¹"B+=¦ÝDŒtì/'° O5{È"åÕ̺®*@ïe\žGFPm Ë®[ ¢ö¤5+\]q9"áRGáÈ?ýh½ÿ¡ðºÍ‚Q™ÞùÆa­¤7ù™QLD”ñgUD‚QeBšŠ®¹wŸÌI „d´9ŸÇRt zÐnYuÛ˜ íRO× &jUíz‚=5û?Gö>WÛš£ùÍ@ýAèÝ®þÛù÷æp€¿<‘pªç'(;ö™o&\" ø¾2’¡hðSíX#}ÄUͲٞ .ÃO†0€82¡ˆÝñrë¹}×4àÍçàÆþò>™E{úÅüÇAñ†e9$,ç ü$ØE 3j—$ìåKÔ¿¨9çáe¡Í×Á¨vO^)¾>Å“˜*Ï"ÄP/'Æ~4Ç)ïÃÊA¤vüâåzŸç]øF‚†€á…z­™!‚ f@±\ª ¨ª¦¥E5Zf#4B¨ÕÝ3Š3c`Åœýõcd’3\xLK¯ð±í×k"e¹}–e˜7½û­c†Åä>(ôþõ9h:{SO ž½pôK4ÜM)ˆfäêz{ÏsXó£T¹–ô/ „¹dÛÍ2C[Eá†;‘××^ÃOÉxþo?‡3õD'ÂHKŸµÚÁåRÖ¤¼Rj F€iŒòß2Xl¡ iÏ2m%NpžYõÒôʈ39§uÙÙ6úº*ë»㨠4÷ˆ œ—çÒ4ÃL×Åù‰‡$Š'é4>ŠumÛ–ž:øLhb@n6xÆO”¸08=9ôŠâ'P4ˆèXvíã×u˜ß—8 N$ñþ‘©Bù*G†M§ÍG3G{:<²3ˆÅ~uÕ˜.îÇŒLìöK¢‚WŽ)Ï´©«û(QmïZd²7yƒû5µZÖ®®´#ßN ߢ?cOžý8c>úä¼Ä̼¾ŸÚÎâ°©c³oÍéQ€ËÓ%& —;0æ9j2mŒUi§4W׉›Ÿ;{N¡‡lÁ\Ó#ºü  y/þzrñÿ¶-ïGøz)±„z­ƒËIµ—|ª‰r ó\= «Èp'âT…ÓÀó”/pÞ´P»ð€$ÏÆöЀû'ÙýSN¾øqN`§] ˆQ%\ Í\­äcTÓK59¼È]r¸€EPc€i¨¬:Ñ뵩dU˜­²’6¹=FÊgKq½?/ΰ«nâL¬Ý¤ÚµH."·Žçfxõ‡j6/VüM¤2c«{“DTº0š¥‡5[òE÷± Љ£yNê Ü΂ïØWìªÀˆ‹M¡Ô W:)âˆ@N:Í€kA‚ä{Xà°{nÚ\¢ P)¼=ï™É»‹âà>Æó’{ ŸfŠ|kR×­Yˆ%’0ÅÝ[šw‹\m%W‹ÏïÀ‚˜¦*Íhž iJGæfé½VåÖ Tô» CfËôš  óïÒ¢r¥2cÐ;©Ì(Ó)@sp²L]ˆ.UÌZÕ‹¤€0ÂJ¶~…ïÕÌðyNZÃc6ÁÙ¤ìq2ôŸ:Xc)4*2Î>Ù´¨]™0бîÀ®îë3õ=}<`AÈ–[7\𻍝mÙE5ÞLf§ÈÆ3Óƒ¼sêîÍ ŽGõ†¢ót:¤,BK:+ÒçÏêPcD÷ùÆfÐP Öݨg¼ &MG! ÃùžvåñóðÌ¤Ž“\ºV…5®R2ùÉe«Qôˆ &-roIvºÁk9b åžÇ=AŒ\ôpø=0")‡íÆ•zTÓWêz~¨ó2{;úyhË—uŽœ…·nô.º’–ˆÁÉJVAʹr"~èïp”XEØ»Èa? ²õ#¯SBøfIâÛøë¸?q*=gFÌ€F"œð4u@ÕíosÓa‹o4ü}¯æÿ:œy´'ï´Gn×jö¤‹AåC1^µC Ï͇ãÉæÊÅÝÞj¦uZw<§|(€ë£½Á>¼9åXÏíÄêSÈþÓº·`ÿã¤*ĸ.ªã‚¸(8´IÞ¢6ÐËnÔa&‚;)©g€B,X)œ‡ç±¥D½ “7).ÛÂ0K‚˜ìtDeð«ÊçêX[uæ¿™ÀòÆ®o$¼‰F¼D—_é0?ÛÎï7ùEÆ?†Æ­SÂëC8Ø/2á>¢Ë$FJŒëj\P¢µÂɨÎêÖ†kêŒ) „-}Pƒ Š…ÔhcØ_8ÁB çƒÇJ8‚¹ØÅ¥ª¼%EÖåÞûá¸.Ö›ÅøÛ³ÉWmÇâÛ‘¬d=Í‘ É+" ñl@JÔ‚6t‰ôÇ3ç²Ì@¹+ÃZ$%•@Ñi€èÂfÅžÓŸ"“ªÌœø"ycõ«Km©þߢÇKسE‡¯©cL£ª‰ñ€P"Œ T©ŸjÝ©-ªJÍ!TÚ¼Xñ–gjxè CFR¯¬Ž"dýÈf]}¶«Þ€æ6Œò®„v©³Î”·|€Ø‚–@’c{g T0Ú^©zt¬Ã»ñuXFïÞüÒn¤ãACG='maîeáGcr6¦ ™P`ßüÜ—KíóêÊŠxrr%‡»Z i§]×fLVngi îüø¡ í éxÄ…“©¾|X@7mÓÐë­Zˆ±5c%ÌJV… X HvȨ\)zÜîßœ‚÷|ÛUÜãòb#  Üc±\œG#F¤7ùäY\BitT½+ôM"œÎUOÛMH‰’ˆ=iò‘ HM!¸' U›AEAÔ¡q5\¿"¾}ÛgsðÖq°Æh@8ì|ɬµ5wqìcA% $›sEòÁˆ_Yfjþ¢€Ù.eÍê‘”Hâ¥B-00Á‘&‚ RŽyà [ðˆGI 2Z‰Þf˨? !ïëô=m%%¼l3:9C$)F:÷úÛÖ¿i€Ëû¥“Ê=¸UCš€Æ#ÜiüRÉ"=ŒË¶y÷—ãȾ*´Np!ÜOZ’hCT°¥Ôð ½!<æã&ê¹sWflfŠˆj‹B ö}ÛèÝþoÐ_KðAê-¬4b—FI©·$×l³æfÈiî‚Æ1)ÖuKíΞ²Kò ”õùBÄ¡|‚‚zvdNÖÓ^ÈD0jòªg¯$š0ðûßkz=ŽÃ|ÄQÈpÇÁ~^š¯îídÓ¨¢ÝEæ´u ” \.9ªľu—ËH,9.¹X¤(ŠÏÈc¼ Œ@¾|%tÿF*¨On=Š:BL$´rÌJ…*,¥eb˵TyŒ(^zæ_·D©§§!àf:)_O”Ã+ü„3‰¥Rš¯—Å©¥® ÏÈ„. ÅtoPög¿í¢Qù ï'Òev©Z[+ÏMɼép Û°üþòƒsæ@ÑÑ sšŽ%¹4¥”5Ø4Š'Ÿ*Ç;ž¨Ñ¡¾ÓöТ@C ùø£Øb¦%Èô¥pCËÿ®ÖêˆdÀòúØlA÷¶rŽ­¸ªÞànk=;ŵ‰õn‘v1Á¥RJ&r¨ÄÃT·Ñx^ú¤‡yxzÿ¥ò%‡OêŒ5Km3ÜŽ,°0WGi$üÝNR Û—ßq§'gsNÌ~gšlÝ(ÙH$2çHbê2ãÆüåš Vn³æÑ#DÝüׄà!A!¨$Ê ƒ¸ kôJBñÚµŸ ²¾A^`Q!ÝÚˆ_†¹¨\Ë8¡$mRmgÅ©ƒ‘ q+Z0WÖ‡ÏIˆ’-gùó«BW¬ò(gÜìP5ÁKˆ˜€Æ ØÃi_vrôÔt1¢„Å”S½¸I¿U¹Xb[ ª!jĉBZáé8!ôˆr¡Da¸Ú ÊÃÒ‰«“’Z Œ†xÁЕ6¥¬öØ„4œ:)Ð 4‚ˆõÔ;câÜξ\ù|Mrs b•§î§Á‚ ˆ>úDmK~'þ˜c’Z${ePÒ¸çÑ*1Ò³XDJQ]¶ªL:j$BÞ‚‡,wÉl Gaãæå³%õšé22ìÝĤLJXôw[Á“8¸ šù½ˆD6Ëj˜æñQÔ%»o"—ÃÏKe­õ¸å¯ÁnçÆþ1oG"À(Ä"ñŽ23q®Î§†ÚÞ²© —"¤ù¤Þ¼ZÉYËÛLq›ï¸XE‰,¼íx ½É¢çó\—­ÇÏ39SM©=,ý‚ò?Ô‡þ4÷¯×w– ×y˜(*ZÊúëG߆løƒõüÝ—ÚlEñ!yÁ wbòðÈ\È'‰=ƒãèJÐ&tÞo€û2ðˆlW£“f qˆ‚óÓæÅk¯ý¯ÂÕ­âƒQtºÀÅÆŒìóV·ùHÈ&;&qÝ2ˆ¬ÅDJ\œbÃcù”lÛìàäýçHv ò ô⊜^lŒ³Ûžº„†Í)1A^Q ¹ äÏS¦iK•CB»/뱈ø¯áËŽ–é{¤*ÆîQsðRÛy1䨔ºÌ€èâ–>^ ±[ÔÓOo”ãyvGÄC݇ŠPã[Ù“Êä4/pÖkèì)ËI'äôÇüpbw eiÕ;̃ ¡m¨÷]ÁÎȃÀdŽ®8Z*ßþÞ>öÙ‡FljqÉW£RQÄ4^ã¼oJמ¾ÊÃb¦ÀBÖ)Ú!]aß¹ey1(¹>Méq]w¦/¡ì—ji_WÜ.„ïÊÌ K ¿YëÓ2U¢ „¡ª†õ›Ý]Ê8ôG™ÝE4ýe}áŸÃø† èNV-X ׃/¬ù4g8lt0޹˜ ñý-GÔÀ^È^>/worÇ‚}sŽÆUÆi-ÜÕðÊr{õž ƒu•à`ä4¸"i‰Òáìšb4Î*°˜ê¢ €È“ŒÐZ@ϘR ¬`IX†oÝÃú¥ø-4Ì×geR<+:¡½ƒì‚ºÙÝÕ¹²œüÈÖHû¼7˜oTòAk‘{Åa}Ù±ÚvÔ 5p”Ј€f\@ÿh/.X”»]ÙcëT‡££¥=°X¶ù<ÜêPi‹`‹¦`~$q ;èë&P(¿ïcÕ<†®¼ÒÙfQe}^—§õæËU‰õŠ}ä H iœSŠ“M/X²š<ßõÓÛÖåÀB¼{@ .h}@Æ,æwq}Rà )ÏzXÚh£ŒS+  3 —!Ü1@°½Óµj~_s7ÞIµÛÂY¥\Òp†ZLó’‚ç†Yä&Q@IA$H’‡”EÒD}»oþû^;ªãÄþPF{QP1Ñ<ˆÙୄР“ ˆ V]à[â k’Z§öfâ¨Ø™„]§ï¸F‘¤läšõÖL>e(‚õŠPEß.¢M.þOwsÿ¢…{ -;ÜÞôkðäM½`~¦í$¹ÞQœ9¸*~ÌT…¥‹åéS%eÏ÷>ߘ¯JùŸg;ç©L¡ {/Ѐÿ¼úæ| æÀÚÇ1 á ;ìñPjB~T{IÖ£Ñ{lyÈåÿoÏdâßôÝG y¿%༆f¼¼wäñý¾a¾•§ Ë,A[3ô[÷ßàm¿êù³qýv˜ f€Õùý™<æröåÜ ˜†`C;…JÆ1âôÿViþ·~u¡Ît·¾’¯÷ýÖv<ùú8ØÞÏØêçÝÝ9!À<‘Zf¹èmö¬« Võ/g”>X0Å`˜0Ë!xþî5.@†¥ú¨ÖKyJ„CŸ«ïû”|zU30ù»xèŒÿϤh7lèOôbKwìHù2Ï#ó÷0†€,V?FÓZõ è³ëãïPjé4t²À…iÚðáŧÅKzü ÄûÞ§”ätsÝ>ß¼ç©l]ï‰2:~|à/E™`ÝêM¿Ô@g(<‘xÍÃ#'E׎Ë<ÜÞOwãË#¿ïb}ùÚéî~ ÕëâhÅG£Ëõ¿2Çïwí>0³eA@Ýûª#Q‡Ýôm Î?¤î•CôÑtKá뽈êaÇ}‹CÀ°ÜöŒ³2P¡AÒô}ï“SŒ9¾Á qÇK³ËËTçñKÙŒ¶Šx•ÃÑÇá‚–²ˆª­/% ¯aØÂ‚ðPnu„ðØsa#‡j1ZV·þ»9'§ùlg¥™ÅP|t'­F½ÙsD‚¯?a„5œæÙ²bðXCëjü_†Óë㓹l¼¿?ÀïýÖm¿¼æ û8o$VgNú»^!‹< ²?–‡pÇDwûG{øÍX=ļÁŠ|lTþÔà«AÃ}êxÐÔÐRqa!)± r£Öo«íýêäQÄ`èg²ÛÞkMy®ÉGð¯Þ†ò?b -cbI霡f>u³cï} &Ѿ.«“l5aùµ™šzþC³k¤¤qí±0á>m3´|}Ý\^l&z·cU·V ‹ Iø{‡­¢‘ÈËäA¦YEz)›lê¬p”š,˜ì>Zl[+¢ÁZ)£1½¦!әϸ˜eÚ–Ç'$K6´ ÖŒtô“&ƒá-ÊÓ"ë JÒ}ö(°²°û7»’ VCÅn©rPd„jÈ,GxÙÊÖ*gC&]Ú}§I)Õ3ôl 7“È ¥úaüü}õÙõ[es×læ'Å£¶%ŠBi­¼€33 l¢É ŒëeFÛa+•àc«Ò;ù4´'©»˜‹Ô⦱¢äp„ =iøå ¹ÙÕnž%c±j:c[ÝöfÁ .¥à2ÂÚÝ–®ÌÅ^á¿O^‘ðj¯$gdŒEŠJ6'ì|šï%y‰áå6·õGw SgøŠ2ÛÎ(™f°bøÍO/ :á¾8 JÂlˆ(Hn × €îÀ{BÃÈü¸ ,æÜ‘MüÅøðm·Øvœ ³oͪ¨'ãgšÍeäÿƒ”ÇCáêUâÔ`#7´pl‚jÕÇÆvÙú"Ù„ùHÀ¹ùNaŬƒƒ ÂܧÏÍÈÍYæ]oD¥>ƒÕWÁ±¶‰f£3n'¢“JrRö\\ùgî­=îúÙä0mG®ÃYuæ8Pw‡Í»J}Ö$ þ¯Ë÷Jku¬?´ éóS±gªúYÝwª}v½ˆi©C‚Væ÷Iv=-ëWú¿¿«úXêàêÖß«ywp‹:˜S 0Eê[%¥Ìs_äÈ1pq/Ô.X~òrü‰¢‡4hhƒœÓ2å#²ÒéÐ9tÛ\cyv<Ýç³CóÓü|êt[b»;·ÍžXòÄ’´‹¸>rá—<‚ †ˆƵÕîÕ¦îzæykÕpß“3ä¯,ú.‘À„Ž• \ER³‘ÈQ(/„_>‘°4Fî)™ÌPŽ »ØÃž_V2{ú. ü&˜ò¶ X/¡¨¥C² c 8^ÙpÈ6•m­Ys5]fqïT¤¼d»d¿.žÄ[}ÎçwüeèWëJ7έ äÉ´( ·Ì|úú[ÍLt±náÍR6Füm©²’|!+5ÇÂ1!g¢”MíR>³ªïnÙÛÍÔòߘìÁíh—¡•ý1 >%¹Øsß$u%ŒZˆaMª·çvx<¥yJañÁ<–@]<1SH&É1޲è2†ò‚ûià +•ÛÙX&Ð=&ƒ]£A[ Ožfò·¥ÉËÓÄ¥íPùcÐæniŠñTQ 3QP=iÊ‘>°cœ2JŒ6Aì § 9H=–è›3M\:q^Øh1–Õà½æu2a´®aN=>i–<Óÿ>5Dǃ±Èˤ5£M-@Êii:KW©Oo´3ŸUðŸÎÂó¦6`IŠˆÏÙdÆeµá“ÏÔ7¸êz°2e´þÁz_[A^UÖ½ªÊ³)zàòµºX2*sue¶ŠBR l ŠïÍSH)–ÐЃEŸoLÍÊðÃþÇlå<ÿ¡‚O/,"$ùߨÐN9…íæ_3-“Ò ÕiJíb¶\ ¥Æ‡–V§ßD?N7­ºìO‘èp™úÝBOpÇ^ŒÈNs¨¢™Ð–0Ÿ¯œÆ^£¹ #‘²j‡h'y¨¥o3 ¾„‰Xø}2²hH§ýò´fGË¡~º¥®@i³91Ö=G—V|v°@A1¤¯9𛑤½ÆlÊǧÞêvÌ „ º[¬H¬(RÍL{<¤Þ”ÓW¾»‡pàBD'@Já °%cWj‚‘Õu!\.ºÀŠ­é7k׫õþÇ7CîÆ[ÎÌ'?[ÎÞfŒšvû³í+–(Þ¹ÔLÒÿkÜ^Ú3Z¨Ñ²Q«8˜©*«‡0’†Q«ãÔîúŒwµïk8ÍhÕʧR.ž´˜ßXŽ»4uÏ]ƨwÂx˜À§ŠPWKŸò…fŸçe.uѵfV{Ç#2¦ªBJÔæçinšr Pnj‹;"“Ga.ˆÇùàˆÙÞ÷§ä?ï£æEÉí‘ä¬8ÕâFZId®L‘XìJÜ £§YA‡«ñ]ÎÚ|KCÙh^~jÑslwq³' G‘ùyŠúû{d$U6ñd>rÛ qË@³™Ÿ9Õñ–ÌçYA§ ÃêáPj§jäBôï_EÏ¡ø^ŠDÈ=/Wði]1ÛY8˜u„V†¯ Ù“sŠv NV «Ÿ%dƒî5BCE°xº=Ìwè\£_q%ŠÑ‰×,’Zhö£ÌB€Ÿ®ì„Ÿ*ŠæýrÍ15;Šâ!Ä KÁT.Aydžß|ÿÊD]ó_æk>Æî"6݃¯­Ý½½¸6­‚qymJ‡˜ìN²VŠ&Ó!ì#íî<ÄhnLãÜ$çŽ6Á ,ˆJÂÑF\ÏœÎ`˜áµ}QçüY³ó¤0Æž,ɱöSo¢Á¢$¢´H…¸é"ôv(`ð.éÌÿ )õ»Ž¨…”›èœ„r0\ÓÀ|êI(­5'QÜ_ï†Õz·¥N´!=¯¢Ëðe~iEêt8ù»åÍã®lˆÏוÜHäkR1áÇžÏ8¡·ë|™kíÌ;TvÑ;Qž…| NT¬á@‚+6ÀL¿§ÈgíÈ{CŒÕ¿%*ød-&MJŒuÚº&#jdÌ\€mŒl±%<²ØQÊpÞ ^Úùn0½{%n\’s¶¼•òzŸ]U­ìb¡tcI²NF#ðػ˥ñ7‹6@Ž`’Ñvùà˃ÃG,â`EÖLWœ,´Ó³QU£‚‡“¸ÇÂì¾.ãØ·ðÌÿ Àÿ/÷õ°6èÁiÝÂÆ˜ê™ù©#›>ôoàÜ/‡ ¹RˆûBPkÚ2÷´i5eŒN¼àç—ügW„­‘òÿ™úxõ¼œ°ÑæzŒ 埧pæÒhÁÀÔï/#CD|vE0vZÀù•&›x:cÝôÞ»/IܪCgȺ£±a ¨-G,Ãb©Ü|¹é™º“Í­OJ£nx<ò¾òü$åe8X–Hƒqíp±×*ãF8¬“0$OˆP: ²QB¯Ž ¹@9€¥(T6úO—ÉÜ8Ïf½~\XjžD ì† ¹bY ¶ÅîV^DC_hº<è)¶+Ù¾VT©Šñ …ìN¤±Ü‡™$ ¶œ×æA3ÎÖ($“bCø)£tE>ÚiÌ$%,ßðõ f¦x·€.çç+zœØvÚ†¯Fôì‰좤º÷Wš¾S2fŠ ƒUFkÕ²TÏúbl´ZrSfÇa%òb %â÷?eÞ-Ξp]—Eèi•æ±4«ÐmXW_iÛnç5P[W'…æyØ­Pëxð}¯Ì^}ù~6°ÇY™>þ)÷MmtLD1j‘ÚùÝ{KãŽñ$gëÔ¿Âû¹‚d¡•RñR×á£ìbÓ…—ä6ì™8ë­W Šg›‹Þãâqk+N3TË‘©'yB“Ë}‚l»«èŸ“eóØ•2k‰#ãaï³÷s÷žÞõQÞYÛÁͶ ¶¶|ÎÓÑÍWTüGò꬇‹aݹ äÕW[ÆŠæ2qºšRÚ6Ó³ï,Çr}—±$t’Ä:#‰íÉ]´0ËKàx yÒž©) z^ßÜñyŸ/»¹ñíô©HÙ§À>Ïщñz¾ïÞ“N˜Ñ›…V &®báVÙÚ5‹Œ§1»U`Oíÿ/ŵÖ_Tq¶ŒHbF„q+Òv¼´…‚#Ô­ ÖK§“y/ÍŒBžkÇz]Î>ÏJë6‡[¯¿ß¹ïJ„òcûiÁë&Y¨Ô+ÔŽhÕ©dº Žã ˆÄ/ÚªwnZù¨3,*Â$2g `A·Ñý—¼PÆg&i™‘+)RLÍò·¥Ïu d2UñTÙµ‡1^yüpbáBÎò >¢¢”æ<',ÉW¸Š³V5j{gª‡g˜àÕ ,šp‚âH$ˆ¬R)GæH¤Ñ³júSYÑ‚E衸+«Mn› ËãàW#P÷}ÀìHvŒ–-èLܶ‚&@þذF–ï6iï;¾Þ.“¤Î’°»\d*ÆbòZ ,‹â¹n’…%°²`ûù­ñO\¾SÙÊœ„£(fŽŒò‚±SbÛKÊ…3C-þ!3yúýxëòýÉmù0=Ð<ŒRÛÐ’]æU©±>X£³}ˆhGéÞRû;çZ6[ì1,ØÍîê›Ã±Ó@Ë›N½/5ÂoÕâ˜&‰¹’A‘B+8óë’ºýkL®ÌVj´×Q¤ÍÔ¢Øa ˆI äŒa=Y¤ón¨7&UŠñ™=t½Œfß!Wé…"˜‹²—e}Õ]^÷2» W ú½p•qy¸ÅÎ:·ãӡнØËîãú¿W¿äJáâÉ8ƒeÈ2¼4íþ{F–J ¦CäÅ.Úb½¾5¹ë ïìnÛ3øÏPP &,–\‡$ìOYdÎ%‚üy,îdÆ”î5›VÀ³d #mµÓ,e:‚Μ¨TÁ_¸Âë°ÜÅåi(e1—ö%[?NæãŒÎ«l7ÞoØ}·Ø.—ó ™ëá|~êw§áÖLúŽ\2ál«W!°Ìzü¥Ï`ˆ–5χ[Q·g)ÍÐØ(Þ>)÷¹…Ö¤@‰ ¬Aþ9NžˆòÌP&=ªXñ!õR¼sÐ6/Žl…‰A£z¯BËù›tª':ñ‹õmÛÂæjð>Μ©êrg{± wÊÈØ+œ“Ï­ér÷ë;èfyŽÜV®Þélíèb –òq±U‡Ûnò´7¨Æ6LRG›èxyÑÆ¢©|Ð&œ „g”˜BQ(ˆ†ì³±úï¼þ†fnb±½Èõr}:Ü õ § ýÍ™¹mT5c­÷÷Æ;ÃÈ.:·®é—ögÈ6¢¾^ô8=b)ŠinuŽyÉrBBo‡g[a\Aqûž?Ÿoq>/mr<{ÕÅ}=A—AÖÓÝ$Ö±·öØb^àW òIÚË$\k!‹|m 8ƒCnfŬ .Õ*yöP'*V @î)Ru"Ú]¡±tµJpÚ!{ˆ¡ñ˜`€ü,™oµo¼ÆdŸÃ’ÒµRW„c™D,²ø@I¡¨vbi&J Ü™6§7ê39>êýŠŽÄØ›ðhËTIjm”Kd1b9Ïà˜E­ÝfTªyˆ¯#n®Ç¢Ñ|™z7GU-9 øQÑæÖ‘pù²wŒQx'»‡DˆÄÕ!8^±ôö}Kø|†%`»w#%¸Ê–Œét<®æJ ®K¬@ãø¤Ç¡A1C¾e·wçXÜ17ÛEñ|Ç …T—Î ¿®þº(IF™Œ*¦Fù`vêÍ‘Ÿ¡£µI§+Fôœ½¨ÄÈ®\ˆ±å!¨Ö$‹,”m²æP6†èf8mÉ”YÐ’9-‚>] ÛÞžÄ; ý¶¼YQä¾Ô€”Y¦M Ã÷zE‹_&K{óC¡ëµ‘‹»XM[1ÈÈ@Ë|…-Éy™»d˜s¤ B ¤Ç?cÔÎW~óK:3#„pDáÕõOùPÌãOQÂíæqÚ“Ûø“H"©ËvŽÿÌœäé Gèî¡mÁá<†‰ÀGoÉf¨31tžÞŸ›; ¿g“íø¹·sÝyêË29hÍŸ”X2—,x•¬Áþmžþl €[f$«˜Í•êèâ™O<Ppè”'®y¤X›3“æÎpœvbí?´Ã<¡—÷äzÊ— Ð`+ ÖÊ kbé˜1ä7=vÊ6 UT”¤%Bø8 ‘IŽ-›H o‹Ô›Ð#³‰§Ö~õüd¯"Á»ëü”\˜ën –žu-4M´ÙÎ3èiíãši!ø,t-H·×—q¬´’W¥´Ù'#tAÿØ&[È'wªÓø³8®?¶QGÈÖA˜¹Ü'¤üî~7º]9;¿'aøÍÎvìtý 缈B&J4%¥¯À™—“‹Þò³po4Ûúž­·#òdT1#ÆÖÐH«éI] Ȧg[=ó.3>GC#uCÏw… Œ_ˆòŸÆxK3 †?N\pfzÔ±'ß÷ØšŠ4âß“>Èa¦2éypFölÛ†;ó„«²Õ/4WC­2þò­šÑAˆ›nV;¼íG0yXØèyÌw%ð}Óf0@—¼ð7 d¡zdùQÒ.ßòþ³a¶ò³8æ3\ý£6^Ž×Éþ8×, j¥›ÔWªdã“uħÇåÞñÉÚCW’ߢ2ãëΗmËÓ²t'/·ù¾×•ýêþìCþ_§HR¸úó'ñ¡è ×è\>áÎ?D>æ.cÿjä—ꡘ5))[d‘_ðKþ é-´þþÖ-_ð™õ †9-ÆIð)îÿ•‡ÖqíÏgñ†Ÿvd¦m^ÿþjq¦­ár›‡úþ±œ­ï( bÚ—¡Ï“›ÿz7S·ˆ’ésÄ3âݨWŸ®½cœL•ÿûlcØXYÙ3qˆ+}ú®ÚØÝúkpÓß?ŸfÒŠïiÕû_¬k^ÎRàÿ œ1N ‹LCs0g!÷¢sF2Ý‚›ôj±Y+ýuï>Tá;ѪóEm†Ÿ§„èlþž…ò¹÷Jþ‹÷~Êv9óÏ=ýÕT6c.Ž.Ì\‰·½¦c¸yzº ÛÂVÝ—tr}8ê>,§»=z'÷Q¿™*ö‹b—<ÞŽ63=˜éËÃþ(3£»œ¶!Ñ1÷u?§¥%:2\gŸi{š€ÅuŽÁÛJO.l¯oT‰ifÛ¸. ¾;ÀÝ— ïòkÊáÍIbCNº&GÞ‘aCº÷6ù] oxëÝãçºüø2(.j5Ñ(Q^á’}#DLòþ:¿ ÀHRþÓM›­·êïüù˜¿OE€ NKy­Çì¿Ù8JöwW$+2A•wñe²a´L*Ä-ljÔ¤€CrÒ41i"&Âé+Ø›;ÌDóõ¸^ zÑ«‰Cîþôù¹ÒÆTÝŒ«ù*ºA_°F~‰›t?­¯S>‰¾Sý²YOÝ#!,û([Þ\ô¯{Z¬rH+tÀKí‘IçµgB$LÝ ­™ÂÆtËç$œ†KuÑôLÄÛbŸÐuÆïVîÜÙHŸ>6¶r´7ö ¯ÒíÐ7² ×Ú iÉÙS|1p,ë]9«ŸÊÈä;.3úül.ñ¼®ÕÍó¯Q€×cj]i¶R@5´³Ú90†­¾‰ öê͵¶;°7éüÞ¯[„i\–¢æ‹Û2nÌ‚t²auÚÞ÷¼äbyýßÊ`.Þ§$Ô§dÿ¦W!0s¤’jã#mÁýÔYCBÑ}„0:La.mÑ)ß]÷³¢¥`lJóZ®Ž{ë§jÓlÌîmrô_jtXqGksZKQ×0þPÙ¸ü2)‰Uˆò'±@ïÁZµö:)À¢¦ |®ÎQ#5ßëþµŽ6.î¿áѽžÛŸNòHyô ëbãÕŒ¢•InZ íaѦl—¹¦ÊUQ•cøë–ÖT¸ÏïM?¶EmÆç †ÍzôÒµCÙR?\–[îZóÜzS!°!'Ž×?S™”¾f€GȵTEá KK,ÅôµŽfîÄüw1Þ°Á¶H­Ç,¾Lâ_ôò<Ä™ñ—Ôs@î=¬DìKÉüü·háÑÑ©@áÝß½3{&âÙ(îÝð—p®fÓDöI5²þ¹z~ÎÿAZ›á~DZƒß•’"õÑ,Z]Žä«5½>>„ý˜kÊ ô6¤žU]&êI²Î®?NŽðÉjv|v0=?5ˆdÏðjNfw>deže/(¾3RCfªÞ¡÷3å{<ù¬Þ”Þ‘Ú|dxÍìâÚñ>jëPˆžFLƒÅ§1;òiBF½Æ]L?NÕƒæøßŸµ©õQKÛâdH’¹{Öÿc'»’îˆ„Ž |·ó % a‚ãÝaò¥€&™qŽH 4ù8OÈ^~\5ó<„@sE©\ÈÉs¯àõ¬œ1ÅÕ\}µ€hO³aŒªµ&–»Õ%’f1ÁÞe¢†”™Å<­]Í4Ö³Ÿ¸¨€r½ÇhÓ•œC…‰*AqžÇ{¢z{³÷žï„;Îè{†vNÝ×42¡ót$¯,²HÊr’áXëgîû6"Ü .Qß¼p.J(Ò'˜€^lÜø—Š2ç®P)ê.Î1IÍÓô=Í»Ùieª ¹b¨ k5¶±/©‰¤õ[{î÷qÉQòiAB³p$j.¦ý®CÙ™;1ÙM祘Ö)^KÕ!ix¸·ÿ{ÙÕ*жËh5lø¯®YÊmsð|^6çäã«G¨ï?2;Ø­a6É8ÛÙuZ®^DDÃt<¡hgœµó{‰(Tèn™6b¢ÅËòŽ•¦xÉÏ÷â-Ö݆äl(Ê%›Òî 'ÂG#Ûw0hÛ¥»˜Â®à¨Í)-Êo*RÝ<Ê%¾Ù0å‘zyZ« Òm„7‘åñ=7i¢o^~”òѶ/£Ù±8Óa 9RJ Ž–Róç"jVR¬²¯[Öµth¥|;}R²4fö?j¹ýDo´çû˜<}ütìÏÒÿÁ2 ’ÔõU;™«ÿa©£Þ– ǯ…GG-ú—ù,„ýF$_³Ÿ™ìûœŽÏ­~ÿ&Ž_îíâˆ5 Ô›j¨l¦öëÏÕ&ÉžÐz25•÷¥ÁQ{h/ËqÃ. õ¡#ËÿP‘ë+Æí('Í`³TE—ï/ìánQâ = ‰lƒ×É1]S®ešé ¤v À¸,þ&]íì…:,\¤Ã0¹$¦!Rmú§$Ñ}Æ?vK*v>þ&´ü< tTÖqøÜØU†ÛÏ›ü¬Œ: °Ö)†fÚÜÃËc{b„™ÓÍÔlhq5¾9§¶´À@&Žó&”Ø Îòs³’ê%h•ß§Š $®1KÍ¿+žÁùŒÏ­™äêi<<º³³+5û8y´eàÃ…˜ø‘6tHµf¾dp¤Èvãz³u˜EÞínV ¬ó"^,:d‘PÁG+‹†°Êäf»g¥­³TÎ…ÁO_DÑ»_Ž­ÏY1g YFîœ+6ŠÓR,&¦LkHÚ5ìî7þ®³oÎÉ[~8ĽúrËk¯c'—tÇ^v[Ø¢½l;òø²Xj}9ScèEä>ç3u×l ûÄÆ(†#¨*ÆtÎbF16‹)kÇ£;SAg]h·/Pê1“ÈÅi­S½ ¦•ç5ËŸ·BlŒBØ*$ÅTùŒ™ÅÉ*™8îÆ¨ÄžqYf 4ߨm¶.¤.¥Ë<£ß"Ùjù•‹¸Òóóv2[ÁüØÍ}Gn”&è{Â÷³¹¥¯&õÁ¹i5*‡{ »IsRHøæú>ÿ’ªZ 9„4¢úbm%urKÚs¡Fó2cî§ z&wº—aÖºŸ{´—‚>is¦¾§÷«†¤ „Û»ê6þn.ÏwfDüWž¦}R¢Ýù¸LÉÜi—[‹W û‰15^Á";¦¸ù¼YÅjWxEê1²Ñì¿Mä@D ôGÊ{„Žý_#yÄäÃZûr,¹F9!ع0?Ús$fëkŒ‰MæQÜ13¨äG^¢8þI%Æ^2Òɺ@3ÒîkÞEžN¨êÕK eaÍʂ䲭ڛxŒ0ÔÛÍ®m°sxÄvfLc¦DûÖø¤-—µß<8e¹>¶&®­\öÝm¶×£0 +„GS‡/ƒZC×HÀT3ÅòðˆýÚ‘¼Kód«±“Ðá̸ÈÔÅ`¢+ŒŽÔ§¼Ÿi YuÕî6Š@DdÈöõMñʰ$C(VøPE–[(N;ÂiÉ;8]D)E5!Q,"°¬Ñ- µÙHÅ6$yØß|„ŒžQJ]Ó¯¯2FWÖËÓjÆbêD¶+½NÉ,>€Éäîß*ÎýðqéÉf|ØÅ |wôžÐ•Úçõö”ÚÚÉ lÊC4ß,ÊÐIn‚¦S³L[&sòÒªCO;XÛ¬ÍÒfiTìs›" öüÀÿ%\ \ÿ$òê¡·ÕÓËΨø»ŒÚ®†Öõ¸Ô+2àø'k<ÃyM«£’fÏ1uàŒVÜMÕ—Ù4öS¯²ÝÖê%i¸¿ÒkM—‘-]•îó›“Ì£Ö2 Xæ“=£Vgz˜õ…‚ÄlÝÙ°í<®Î?©3¯ÿŽ7AÌ`–ôÙ-tœÆZŸ~“™ÇË%‘bª¨›¡&ëFEþ¥žÆێű„~ßRÿ©Iúlõã·É9Up Âf/ž !ŒBÎ3-;[TE0 85›¤]8ŠùÁáeÍ ‚OaYf¦'^ì˾Þ€}a¡ÔáßÈOÄoCÈ?ž'ʰ½PŠ™6_Ι2 ß¾­Í…ôûUIÙƒšÓý/[ŒÐø7ûmò™SS'—3Û†!™K⯿µæÐÏ.qý®†nmÜìß¼4cšCo°vt{~É}–©•VŸ-Î6®å fä…[)¤É÷¹Ø¯ h_¾Ñ®&ºèWÕ*LXò­ 8ÆÛÚiÙù¥H‚¼q›¹ÎLe^a°‹×ÈøígçZÝ'—ÆÙœŒþo[YeÿŠ‹J‚ÄçÕ-å>¡BI9¼»™^ÎìÎÕ¡ª>›Žî♟ :›ƒ•½‘ÁµÔoÇœï­Åxòï4h¥—8µ¬iÄ&,ie´p‘Nƒö,±|›V;)4ç)Ø)—m¥=‡Jtí“È{±¹äÕÙ“Sb¿ƒ“"G3Õ^eÓïÓ€RIjÉIÌèpd`U‚äXÜâ5|ÆÃç‚÷O z‹qŽÔë³èÂw…ü' é ÄÂUœðo Øzó|ÓmHKQÓrª*6o—õ,|ìmU=‹AsQÉõŽs"¸â2+¢Ï¼Æ»péÙjá‰r$f ˜Â»Ìó=žNø÷'z¥ ¦òªšxïÞ¬“atíy ßyËY–¥ºï·ÔzŸ¬§à«qß TàÑÀü¬M äeù¦¢]­Ó9{f09tu!!ÍRó@‚*Um.Á"2|} ÿW›7?ÞÅq§ŸâyßD© â.ÿÕõêJJØ¢R¾ƒLµ·&³Ô‡‰%F *ÛK@ß<²Þ—ïr$¦6{Ù¶÷èÖ™Ö‡k²Dy%–xùúÇ0›ÿ‘¹Nš #- Èö£Èð1%^­š¾í¢˜óˆÐÀótÈ.HG6¾ç5”^$6M´ñÔèDô}Yª®ÁÖu` ‹µ'oœÈô=FáäÂj•7~Óu'èÆ,¦XÂU‚M-µ`¥—¥4 ›2/+ÂûèÁûaú\õô¤[C/ÃZ1´(>%H5Úæ÷³LÕL*ôx¼Ö$ª…Û•&pJ™ØÄU¯$7ü½”RO4}ZmnÕð0œ¾ ÊÄ`àà;€œì3ŦÁxº¹Ž"±f¤Þû•]_Œ}®tŒ9Ak ͨ)Ç3[açny\{xLÉc/ë2TÉVΖ{„ôœGŒé4©£ÎtFΞkH‡–í³}=¢Ó ‰¶þSk†ãyß©à¹CG7±Ïx-.Ô¼g±’ˆx~óæò Y Î] ¼Qª#»¨Â5 KALDC0÷øÃËsüã–Ößmµ:~ =ƶhWݱ”E(vìj´WA¥aµæòkÈÒ/s)žü™äucyÍ…WÕ‰9A†£íœc\y›>lŽdŒê^·¶Öã˜ï‹iÕDXðš12U¬³•-ØåØ=¤äÝÓaˆ g|KÁŠl²“ ™ÎõLå]µiñþnr®7—Ào\«£§ Og§µkãÞê\mfóØQ”7ab †Ö^²ÝÜf‹FH8Z"“?3 LL+ƒ¢¼ˆÎÖÞFyé$ÇÔgÃêÆ†ùƒW•j'W eA@¾µi‰ÝÃã›­6šÏEާÍ—4äü'Ÿ‘Ð^öl{¦œàœ‰Òt 'Ëðïª*n±#Ù7”éÜAâ”×q|8ñ¨‹×úPB2Ä<ÜNÙ_³Ã«™¸Ø'F•å>”ÈŸÔ‘:¾›™Ûôn6t]Lí<Á†¨Ï …"ù„>0œu%#’‘BIç7Ê E€£Î§dQ¦²À‡®ûG§¦Ãzzø~ä[ÅêÀl›](?¹ß?|(|ŒÌÕÔíµ"ÑŠÅg'X&õàžZ†|Ú³´aàbHƒ"îÐ"¯5ÇN£Y=|'¥Vzv¯Æb…E.­¯çA*qø7”=I·…ö3;´)Bº„Gœj0x@Gdq‹6›wx]­ßž–&²‰WÍþ›Ä’+wl»Ð2³¾Ûŧ'+¿VO[éóØw+Ñɳì1/ Ø$Oe£¥ÍÅÖJ¥‹© t0ÝɾDÅ,TF2tX<ÉwäÕO‹Ò\¬çb–æ/ŸŒŸBzõ~0j€×™++­ƒµ ªÔ{†|Qš«ÓŸ‘ã‰ü7Vv\,™‡÷Ѹ÷ÍA'¯f‘7LÕçâPÝÃÍÖq¬~/+O(cäPgoÈÙpÐÏ¡âÅͨžHï'H’F¨®KÆÏAƒDZ¦ªAÕÚ5äÒÄ’©œmåŠbF9Xž$Úi”2kvÊ‘Z’X¬ŠÁ5éî;^¥·‹šå`×-ÈÖL”TE 4c ¤+>ªçx¯Áñ|_›í|Ÿß;ÛrÛrýÓèØº™86ü‰ˆqžÄÊÙ‹{ ÖE‰ÍéNÞÍ,óÙ8nwr{æ¸z "!V$ç½ìxê+’¸P8|ìScµìí“ß±ZšîлI´'v¸ôƒ~' ·ÎÙß¾ãR‰®­­ýüP¬>ªZ•ÂáèÒËýÆ?¤O.B4ÈGò¤hj¶vr='ßÜp½¼áW‹Èñúö¼>ŸR nüßNÂ:œUlTÛT!£’è‹C„ôS¢c+s~@[r+,™~žV UöG(±¶ç+I1DT4¥0º<ü)>­Ò³´íJÓɶ¦Å½}™ Ü~|p¨ûå¸Ä8ƒFkŠÝÿR¦;:™½^Ê9Dðʽ3ÇL#ø”YS5xõ)ð^]UkjW€ãÃpŠôKWº˜Ñþ¥çáÑÍöbç§3cíwÞÇŽ×/ˆDò(dV`ä·H>}~rùoeõ ±z³ÿw èDYªÁe#£¢câ¾Teâ6 ¹ÖÄçkÖ\Š@¶÷ånAd¢˜ÎÃbÅ1E 57 ³§=cc9Æ.ûi­–}Äz¨m¨Å¿“µ-þ-ˆ†ÞÇ—Oi>Ô3 “1mó–ê—,r3ЩåíxÃØâ±W³…Æ}íÞç³ÛØäÖ¿‹m«‰Ç6YIÁŸŸR3.ãÇt|Ǩèy~[s¿ÚSfh&hxˆL@ÄQ$‚Ù#Æ$¨˜fí¼‰7»$ÆÒ"D/Êþ.9ƒù<üRì) ÉKÍÌèæ=¾™ÙbUÝ'ÙÞr:U¢Ý|'ÑàzœÄ ¶·—ƒww™Ü˜ÇW¡sbfȧÕÀ~Å èÜg·³Ëp5r6Ž~N|»ü‰‰ìdÃPSK¦)¢Éh0ˆ€ˆŒÖÓ@BÕÓœTm͹7‡ð_TŸ{,³YžºçÅF‡‡Þkõ* ¥áˆ\E)CMŠ,i4Y4c].lhîêJ,š"d`‰&`’ŠŸµç¸NŸ`µ9¿Ç~—»â7^+SÊñœ“ZÚòW¦ñ<¿K¶Ð/‚ƒƒŒ@E4%ŒÊ‹w]EËníÅ¢‰-&M3B£$"°‡ÛçÌÁ»ªž=‚C"ˆ+mMŠY(‚X|…eŠ6T‘b5g;QˆTD4EIÍjî;ÿ•óÛn[¤ûî‡Üi뷻ΟuÕïuò¯¢~?µó5‰4XÚ6‹RÈ_ÆøÿF~'ÊYú”>Ÿê3â|…fLx "#ðþSg²ÚõKÎñúv׌ë9ü‘ädx5H UNŠ.\Š’ÆPØ‹»rv 5IbqâOSŒ·ñ¸è¼w;ì8¿ßÓzP³2Ch¡èºQ‚ H3%¤"ˆŒ¥D¢$ÄA”@КPf}c­Ìl2I’ÄÑ5êë c’ø“ï3`‘,A¢id ’i>ÕÚä–Œ„Ó(¥ƒ0I$‚F#0£b0I$f„¤—­Ý2AŠ3 È’ÁÜ}§¼Rc2¼pT ‚’‘Ba˜Âˆ%$¢Œ«ÖíØ“2&Š3A "$cQ&%`C4ÞþïWw§r¤›ã~?Þz{¥½hg©\½ßta‘°’"‹ÝƒŒ’hüšéFÒF‘1’"4šM‰†Æ‹dM‹X‹DÉ ÌÄ„F’$JŒRcÒ»€pˆù8TîWýþuØ0b‰Š,{Œd=ÿœ€R‰—åуäáÞy>ÏþeLÊ‘]‘ƒiïÆY`‚:ÀVß•Uë¬hØK%cRj•1!à¾OÄf¨éI@æ…NOØJ1ÉHç@SÀXüBÿ‡ý}ÿàëä×d¿¨«/×÷ÞÿÄ÷.]ÿù§è‰KCˆ‹% 5dš!EOÏ,;˜dTÝ}x \l¡B†fq˜?Oä´²ÓC’çÍäf×o*ÅùŒ0@$—h‡/x¥9B¨àªaQ®DбTRÂdQÓsPËá^sLXÉX,ÓDµåh:)±{T:ÐL keQÉ¢rB$8ÐN'*͹QAY‹uv:5•ˆD ;SE[ˆÃ!`J ðÜbû Ŭ›j鎨ÛuÙP´''ÈÃ8èåB(ݶ8@ñ¢;¤b'!,$a¼«5§g1†aTh‚KopKFÕ"é‚[‘Í"kjУV«H¡ã0qGvÑ ‹,¿Ú,Œ­­‡A«ðТ„ ¡«2é"ƒÚB…™DþéªËÎ=TÇå^øÅX€O6ú­—ÂDhIdQÄ5êˆ]Y$Ò J‹eS äßX¥êÖ¶*:>Ÿ¡©£Œ¼çÊ >Vᢠè pÈ­©"ã¸"ÇW‘k²q­´ $F‹¤ˆv1Š®ˆÄ?0?¼øÛù˳š-é¦þ:ꀅú|J"÷þN uqÀ"BãHûÇ n E'¥’$Å Ù*’A G–1s‘½ªXENˆÁq`,1ÂJeÖk`“­ÏKÅ”ã` JZÿZx¡é+‰ ¶áeû¶e/Häõ¡1tûÇ*}A~ MÇéÿ¹êؼhqÂàu÷¿¹¥v5FÊ_ç(¥;KGQëNb ¾=`ë)Q…„»$\(þw˜í¿/#öqésúÞã¬Ñôôje§ùñÐ<ø¶E6ðªŽ!Ä*O Àß‘Íd#¦D{Ȁł*(N߬ý¼v½'_—ƒÍÓ¤ÄO¨þî›-yò‡[Élø.{G‚³ñüÖóv Ÿë½E€e4 •LT4.öӤ𙊺FÕ·ÉÿÏîOÆy¹(¡‡Èüg•éèrõ§Ì@0œŒ²ŒRróš ¢åõµõãíTpÕ1…КÀçÿ¿í‚ËÐ ‡çÂóýw¿ß2áÔt*é¤)¤§a™ IY• ¶,6Š™£bÓ* IlÍ$j’ÆØÖ,dµ‹cS6‹ILÒccA ±„IFŠï‹zhS úÇÿ2Âb>Oa*²åpå0ˆt E¯#„@;gs<©¼Aï3<„Åá Jyž\,ï~e”ÝÂ1*o•CL©M(á0 ÷ÀŠ‹ôcáâÆ«=Ðež2ñ×1¨cF›ÈYÚœ§+Þ{Ýs›‹yãutg¹QL⑲Â#ô¿fnÂ4‰DÌ%¶Ø- Ö °4pÉ… 8õØ8‡…‘`Név=6†¦XÞæ)•Ù½WɈ٠•œ0h‹^ËÞC¼"[ØäÕfŸô²©gzŠ]š,‰º,°¥X'„•xš±«z¦à[HEPj×¶Fíc!¢*› *Û·¬¶I>2)š¤ƒòÑ¡dq§ŒÑ³±Y3IHÀ£Œ#+ ™™+U­<å`å-…Ð(†’L<’žâh‘°è+E¯mªœUvµàæ¹À €ÊØZhWt³Mw#&q®GZ†ÑDDÛGB…ÄL `§„Z¦Šç5v:µHwÕFŒB·]Û4I‹Ù)†*@Ó‹ ¢œö–¶tÎQÁ¼ÓÒ¤cýç[Û'ƒ]­¤,ã’RÝÜÔÉ]†œ çÂŽê½W™|Õ0Ê)õî·Ýužï~ÕW3›Ÿ¥iôýV‘5} ³çsϵáK×)îQ%Oµö}w:;û®Ï¢6Øø!ïDRÑBÒ·Á:õ`÷²®Œ"”FŸÞ%¡rašÙŒ÷F5RgÞ‡-Bf^Ãôh1,ƒá%ë9·:CuT(bkPÉxM©˜>ÏHä ¶° ‚îPºë„@/ÔÂç Ð Ò´#(P£CJ… Ò%Ð @Šb¢„´ýN‡B+»éø*ê,)h0*ö"!êÏ›ú»?ÁõÙá¼FFQ¤Ñ–y˜ÈXýB¬ÕI³ð¿ ò$üéùG?uÇêk'ðò†élŽÑ\nyæ|ºyÖÆ V‚j@ý³Z€“©†hü¹Ö!OhWÛnŠjµmKN4ãs—›ñYêþ¢ö“"¿iökÒcÂe—ƒ¤ß³E-,CC–™j1h“bÑ3I`Ûch±£E´¦°Z EÚ·s*`cF“gÄù­¯„ú}ì>†ß\÷˜üŸS¤ÏAø×3°)ÔmÔÀ¥(Ÿñ Æ ¯ûŸs×ãØ}œ³»Bi4Λ¤ã:N[¿yõ¾‘æù-ÍMÚ(¸ˆ‚Š*–Ž|'ƒ?óÆpúš`”_Õö^§ÔzŽW¸=Vw1þ?šÖÚ@@û¿ÁW0^Ë/_“ÝÒ£_TC%ÐkêÈMQ³A –("ˆH}YÌ`! _­æœÉ@í«žu¢§êó7? ů›I[Ì=Ì¥€`hAa¨•ó¥vò‹ÂˆÂÂ}NÚPøÈ |¾º}”û++)ö9á ìïe£±Ó™£<ó£Ë E$BsMç+¢”®“ÈùÒË’ÜÒí'Q#{L¯ü õäÀB¯y ÀúXw2´P¥$T Jä$€c¯ ƒíB󈪡ÉÂ% =Ãä¼§cð9#cN4aÐg–,óË.cÁç Ó§ÿ9fj£!Â??è_Á#âk>DÓAlGŽ«M›ým\@Rqö^Iø9ÉЬ^õIì'ËãÏ´œëÏr¸<~õC,J¤¬º¦1¸ÛVBû%Š^Y%ÁªäÊï7®ùÇ'KO`ñNò%>Úw›h->ýS½«Ë“ô]£ÆGBüûöû²,hoÃàÑDõ„ XÍ! ľ-Ž Æpjž¢¿­íäõGG9¦x·djí[NkÏ‚ò€Î^jÕj²QÍQo;O"1áF4pXX *4ñ\U=6n•ì{£#ð[çfÐhÑàâÿÅ{™ ,Þ‰î©ÊÎ07K;#µì[»wEù¡¶¹ˆsH=¥`mœ(p¶¼ûÚ||ò®kTz[x·ŽK: {]Q]ÍVD^‡Š8^'&1L‘’4E‘DƒdSµíjìZÖhÉ. ¥ºX“5íù«¹¥[¿Q+È&ººuÇK‚ÎøYoÙb–¯Œ¤>s/ºyczãéåúót´ ƒé (Ÿm_h‚Ü*‡2¤ JùƒrÎ çÔs¡8<¨…‰í¸¢V²ª¼@êœMËlL„Ø­¢ÈØ¥K$4†|iJ£Û( Š€qt^¤àH÷h7I˜ØÏ.#_PÜèÏ‚àkm·:7[9n¶AE„)j€(McE ¦¬¦"–„ECÉ‘‚ˆ‰Š†JE4H30HaBJakJdY1Y”ˆÆY f‚£ ¦li‘™C›"Ce•ŒA&ÂQ„4b‚±Y„bJJ6¾…ª[¢Q¤¢Õh‹DXÄÀRÑTì$-M“9 Û€Så*§qQ伟±ÿöf-Á”š|¥‘§€ä8˜ÈÌ?HR¢S\ÆZAôFå‚Ú÷Ûn¤ ,æ„@TßU%R”ï@a äüì xnõÓÀ¥Ž ÉYoðKþßö}®ÿ•ìâ¯Ç±þ9sÈßìüe1×!?¶¢%aQÿQÿ•·5¨`IŸþf¬GíAGò‰©ÿÄŒÙNbôA@ñ+2Œ„€Ýu±œÏHò]RÖÑã°/ƒÙwîË;M?^‘ p«-‡ùF±®qÙ§ü“5ÁͲ”åå¿÷Tƒq ~PÀ |+„%Uú]OwƒÐ{¼9Þwã³}®ƒjDÅàQÊc7ŒžW>À‚ÞÓ®w☃¶ÏhŽ›„OºßŠÈÊîhPCè‰ ¼¬­ ëòø8Ìîlä÷C¼ºyðéÖÖÐrî\°ûm?ÑŸ0lmÄC¦æ0†âꨪ\)(c¾ÿ ¯ýÿò¿ÕX,íM¿Ðå~@5üìçrž‹2„Ý~®?å'±¦’×oÖ(,N^ù›µ—*ªƒ»¥ÛYûlÂÎ[Ó¼ÛÁk´¿$ACIþbŒm즨¬‘c”ÌáÇ8N(" ‘_±A€Ã''Î…7ü_ÜëÚ/õ3ŠÙúZ\A·Ö7Ü@È©iŒGç9 kæ-\A`ì1`ŠÃ·Ù4fØe ‡fé¦rA/Ë,³ÿ¨øùý ½7ÀR ÒPÐ,b‚F¢Æ±cj Ñb„„IÕw1"(‚A_uö‹ðýÞ2Ê?~âçžât]=•§,hƒ)ÊêwGSV½V\)­·~ipH€î“’áxÞ é¸~‘-T¿tz|—‘ð;W<®§X8¥ EpÙ(狊:ô•óÄ}¥0¡ÄB Š" ,1×P 7ƒ!Hotf(œO±_;ñõ½Þ?¡ýUúÑRь줯°CTM4)B4FZ,á#G“Ψ~‚Ö7‘·õï>¼÷¦?††˜ßÄyãÆ»AœÖˆÖ-Êí\»®¸ß<¿Šûù. à qIžïÆrèÚ•þc Œ¡–šCºf©Z-%§½A×/ªÈbÐ*ÖèLÄŸ–¤EŽPC8ÕP­0Rò®]^¨2a â \h°›Ç'°HB!DØ«¤ ‘Ã@àŸœÞìïAxZîC; ±Év ÁH$ê¨R  AÑ‹<0’±×÷Å$…í¡Ü†CK„2;vs.ÑaÒõößÌÌç‘ÆÑ³å9Å “4°+”Îé2´U[‚H˜z¬õÐ £¯®@,Ìï󜣬'ÕhƒŸ%y$‰k“ 'Ó”5Ì[­à“”ë-((ñì2Î(-/Ca H&²m×mZHÞG¥80»JôR}ô3UªuèSÅh‡Th°}Ÿ*¢H4qÖ~)òã,W‰c%%ƒÅMpGŒ½,ƒ£þôP¹õ/Û#Ó¿y|ù’$¸Ž­³TŠ—¡¬»‚ À‰dÈ“Wz¨@© µ½eIIuÆ ×æç¾;³dã‚ ,½}ð´6¨?˜˜ 3«‰“Ö*¼çYk½ÀC&4/vk8!˜ŒÀÚ¤d?w['»ÁO Öj¾Ú!éµëI·›É&ü?…ž7Õ£UûØBŠöëóe½vïu©»Þp›r¨§*C¥‘9ÿßqž§ÎéÔz\y¾ëÎv>êÅîó÷~†ýßyž“õÿS#ê}¾³F¶ ¸úD¨À¸Cƒ*@p‰ áW’æóû·Çê2í¬²ôYhôå£ô¢õ&h‚ »n@UéM‡gpж’Lß•† GVêaR/îÉ/[¼èêìužÉ㿇Á÷Y™Sžf1ж)E¡ûB¨2Zm:*ŠhS !F¿Ëi…Š8Š%2”8E~•§ò‹Í Ö#úƒ«¬ÙH“¼rúKëâÝ6 46p+V@ä·©aÏôäI:€ ‚o›…8p`yáF @ƒŽãÿ[Ítœøeþùý‹Ü;„ÝeÿçüŽ‹u ‡Æ+,ç#ÕÊùLïm$K樹ü,.uû¬¿Q!ÿÉ¿¾òJÈÓvƒóxLŸ?Î’¶ÔV€õªù -K3x²¶ýó‘×| /ôŠˆ¯õB'Ø€þ ”‹Š¾wèú,«—Zæyø2Ñú>ŽÑsZ†*È*”ðHp?[küó“¾êr‡¬ro„y2þ9Ã…pA”6)`Šƒ‹¡w¶‰‚œ¯Ë!š—À¬<Îߣ¹²9DxRöZ”ŲãŸèêÅç£Ó¦Š}ÿ5Žýò`»Þ‰ôv€>#à¦ÆÈ  ÛâüP¡3ÁuƒœÓC̨‚S †SE'w:J²îŠ-?Aœ_9Zæê§gûÎ ëe÷ÙHƒ4)tXzx‚ £ ÅI ò{UÓi ’±XH’xI‚Ë/h· Št‚$„Q ºª¡—f!¹70Ž&¢ˆCL’Á,ºªÀkm꟯\OW»º”¹}ÍU`³âª–.!áñ °!!K|V%q% ¢M@%ž««–B8­´H2-ÆfæÄ¡`£·Áyp%P4è)`E*GmÙ’ªÏu£C;ytUªsÓlãSBb³™Û¹UiѼQFØ(2×Þ]VÄÕ š\UÁžùªDãôï0ãz5²(M0º.«¥IÞ­‚Ïx&xW˜Ïomñs„ˆÏp~º»ºpÝé-P)© ˆ¡sWÓ¢A ö\¶`>ÿé5Ú>ãf¶Å„„ܤ bÁi¥Õí–÷´†ç¨¯›â™„øŸ1ÆMú:ß&¶rƒÞ”„¤Ä̳:ª6íã’d;ïVÿÁ鄎mÈi¢ŠV Uvü°R‚ÜßÏË,×:HÀ]×@‘QH xyT ²žÒGŽá==¹Í—ð<½s9‰°°b`ógˆÔæ´qâôóÛâÆ]WhsØÂ§¾ôŸ,Æ1{,9A¥‚½õ´_XÆöÆ*#ÍýYiY¾¥Ìc °²2À°" K@"ïQ$^£°áO%–.7‹Ôüµ@7°µTD…§‡ãòMÙØg°æ‡%ÉrY+´r:uo™ÿûp‚¦K²o{Æ»Ïr›;›[[Ÿ†Êíÿ®±ÿñÿE§ðV!w¾¦Ð :*Ð"“h’@€KåR¥ƒMIš2ªS·ý„¥}–GϬ0¹ëÖ "4ï²VuVi)ͲƂá´xŸ? ÐX‚P€î0РpàÂwîñ’;Ç2¨j"*BÃä¹Ñ‘çp%üYuV^OOÞäñ˜‡ö( |¢=~ËÝ–uÝ—îeŽŠ1 îòÊË™>ŸZ¶œ´t¾—¤Ýø|gÐl ‡Ý•Dݬ Á@Å%d@±æ!ÂøüdãùûÐ}Üz.×ÐÞ{ÿ:àîeü›ÌêD‡Ë œ‰ß»³ ÷8ù^W²Çt€ŠEŒ+È!N£ŒO#TSOmHÑõm‰|­«F±€Ó¹Ö×ÚѵµÁe­¶Ñ\Š»ñ€¡î2ÈSfD @ÆçW¬Èÿ |ïO€uYO!Õ" ÉB¥ B Ô •ZB‘h“Ëøûî‡ÿy9öeÑeŸi³¢~TÖ‚(3MëÞmÌhÙôóWM«6€c’†¯ê\:ªÐš ±É“ ]z+knî§oF}Þÿäþ/›î|y|¨pĬ"•è¡N‹0Œ®Ö¸Ë&ÛW݉èI#·hùÎ(Wƒw›<Õ]#OtíQg³ØÐ öC‘¼bÉ pi¬ µ©¨OWžŽÝÑzS3g¥Øê^–Cæw¡F%ÙEè•Ó·2…È'u‡Ø)=e퇳÷çžùÛë'€óß¶÷I× ~TY€Ú³HÍ&ãj¥5 ©ºjUËn,hVªÐŽì~G+Œõ\aqÚŒ8õ{ƯS5y@[=Ï>:a‡d°Q„|-g,iðëˆ7—eyœ]<àžr?nÂ6Zµ‚®|!ÆM뻓~2øÁ3L¤¤ˆôdxºÎõÝή 1Bé%"‡ÖÁÉ×u[9¯%w›Å^jÉ‹£;ëZ9µÛÄy8ÑíΩŽ$ ˜£Ç¬…g<ÕU-?>lUZ¿Ä{ôòWjÙ^ ÃÜBŽ}Öx(Ú[9†¯Žõ¥—Vø®Ù«>~*­1yOV¯Žmyg$ õ È7«N‘±á¾Îûg9ÂóÕñÁ35Ö³ƒÉÍæÎ‡j³¾yêC[zzªkER=a‘U ":ËR÷ˆÖi®£t2hÐÅBÒ'G©ø—¨Éå ùÉ7ªèŽŽ½Yg]ò2çïªGUXà@„ !š®X)Û¸¦6´ÔõÝÞ‚Í)åsµ˜ÖøRp•eànjÕ¨äs¨­# áᕦæ­;ê£E!rÒŽ )Sº úNˆj‡"í‹)ÕÍ@0f&d®ˆnØ +ÁaQ²5™š™H€'à$±Š#% …1O©5„A²€Ì4}G gܘ¾L|‰JQ81î‚ cY«B±’k‹—s’ ô^ø™à¯ªcE…Î}QM·¦‡¢Ð¯u# Lv"­ :b‚mÎLLÞÛÊã>zˆi[‚Ú£ ~»°(àœ VÅèhú[õ¥K¿Íª5¦Çnq=ŽM„FDäágCÕÞPôšêoÌ„úOvHæª4¸Yw"à¶»e6 ªpl‰³ƒ! B€°aHS­ñ}ÿ/«ãþïüçŒý×Îÿ¡|¢‰smþ«Ûÿ¿ú©Ý°ß±ñý°´FÇì– haSñùû.ÿÇõ¥«ÿHÃÿEü«jänþªMΰU7ÿÚò\?¬ƒi/ï«æ.ÍÊ&¤4wGuñý/èaûᅳš²u¦{iÉ‹uìK\Ÿ”?îÑ)_âðÍ-÷g[pÈ鈒f‡§X…¿™½ñ¦®(²ž0ãχïe {kß|kX1²ÿ®pâE¤Ì ³ÿÔØÿôzkÇžêH_½&£ðÿH§-„°'š“ýz3?é9ú¯£Š=hަ#I÷:?ìJ;rní™òP98¸øÐÒçÌØp n‡>ÎI;{4ë øú£’¼$¦…ýF<ŒËfE Xž$ëÿ=zrµZï·CªdÁÿ#:êêá³|¾~%mÓ+rŽ›p½¬ ‘A6®öð±[¶[ø¿›^ËQœvÒÔ%Á·ìÁ®ÇGíêÎým”è–ž´.Êt!ÇǛӿ¢-¬_û5* §_¬ZìÿÅ×Ô‡§e/Õ®«ŸžYxñpTîºiŒ{(Ø¡ßËφ×J+×”¿‡e³ÉÀ"ܧ¯R@&7ÜÜz[Ÿ –é]g÷K¿ ŽÞpYÚÚ'qñ´\½|ƒ»æb½g]—_v—¤¸º#ÖÝUÞ[±ýñÓ^Y·µ·†Ÿ¿.^Ê:FÓÃEZ\üÓ…*u3B¢ tArÁOÙCÿ½)´ÑY±£Ç,µ&ÙjªáêÖ,/Iü¦5•å¹: ¬»f]Æ y,K¡&UyPƒžäË)—r)–d/JÕ€†T‡f´­®–W©LËÊ)<ªS·M´4Yµ_S¶){Òv»§ÊBóWjÖ,èÃV±ì4ûšyT¢M“Xfúó.B(ŠÝéÇê®®V0ºvÄAƒŸK¶éÅ©Ì]»w§¹Ñ†™cÙNÈeÍÂZõ¿ghèîíkÄäIÛ¯~óìaUÛ³-ÐØÓLù5÷8rv©±ÜS"ÕJ†Ÿ&³•’k7®Vþ˜ÐF€À݉å®1 Ë¢\‹8³,´Ë_ã·r2(Lj´ õŒý×kÔ›e¸¡^sÃyUé¯]Uh7 Ì\;F®!2EwNjm&5ÚK‰LÄÄSH^í–Û4;:Þœ(“x&%³J& F…†Œ,$[ELjî>H¨ŠÍÛ5ŒœþfëáÿØÍó¶;ñ'Äeùö€Õ…}Û¢i÷ ¾$ ù¸6Ñî×s³w*Ÿ<[¿ÜÍ"‡ž_i6£‹ ƒ®å<˜7–1ã9hmz˜dm‰´PpSÒ-bû!&NqÅdŒä(€ Uì\–…φ–Z"Î.«Q@ý[ÕY'Ÿ>w¼ö¯-o]n…T¼²,ÒÆI¼½Ëg5}eJNP\¥»ÂºÓÂÃ3ÍP³®F|@ Ç\~Ö#xì‰É¢Öe’,ŒqŽD í<8ÈPŸcZDðÇfŽrÇ ©rtI*Çlg>yôŠÐ9«$÷eÏI¡‹BÂSlsDï gCXï¯V†`=•LUý«Ù˜°X‘™Eq!’ ¨}äÇ_ß7‚šw“ÌÎR"ýg"º¬žÞ¥­çkÙ½=‘½¾xð9…‹£] í*rA¦°žeuÀ¡pÿŒüßïJ"PéuÊhD'xØUBOŽÂ³¢¸#4™k"ɆqpÈ L­Äþß4«8®›’!WàŽëÿÛÑ V5è•fùääÕÁ@­ú@™¬ßÝC2F+Ö,jö~2„·ÊhŸö)ª—×\÷„P)A · ¦$P9 G‡„ bA¤B” R€h@iAq ˆU¡jHž²3I9H3…îA30˜îžÛñ1}¿‰ýú åhË`£@i“B¿´®¨B•tL6ÃL2´ÅHI±l"ÿħõuz¦Aº¼§hKT­¬Göåš /Y¬¢ «lkü_§¥ò=¯äÿo_=Wó^Ÿ…õ¼Ž€Óž¾8ìeÄê‚.âJ}T(‡Â𒔢á…Hå|_™æ¿s/Ë/ƒÓþçó?åí!¶èÍßuî7fžP_8(D ŸØH¼‡íõùŒðôöYãx.§Œé7÷-ê(𣎇ãÆÞ2œ2 ÖŠĨåˆâTaúàìzÏœÀ8ûòi34¡9ÞsøÏÕ»ýŽ—Ñžã“ÿD 1²2±ËÇZ QÙ$€A T’Œ7k"GzÙ4±•š\Fþy_Þ-ÿã÷5ÆüoÍ©×uUÁî=9o[åÝv!îŠÕw«¬_Æ®ì„W••¯Aâj®’„ÈÌPÆ*ªíF±‡žì^d¢†›6Q4µvè“3€,2æB!Òé®fÌÏ5ÿÛ‹cE þ’¼ Svk éÊÇTÔ Ñ£›‚ Í`w•çXZ#;"»JÍãŽxu’ 0·TH¼ëUvÅwC.ƒ–°Î!€¥ Õaé²FÖHFZ¤¡#ŽœˆEÂe’ºåãŠAlå‡T™a”Š@êÓÀæ1HbŠ8 J¶cÑ„ ¦aùêP8¶´oJèmÝm¬p3ªTCdyù¹14õVjÄ–ßФIg2{=VºÏn«·;(°4£áÑ~;òUš«je2HdêUªà÷þÖs‹ú£yìÚ(ú|yXÔ +,áá”§Ø€É/l´¶ºÊ¾FkšÃÅÌ(†’æ–‘ªk›ìjBc“læÖû÷t:Dý’u¶þE  =Z`úëÎGÎØÉ ª`—bëËì'“­Ñ ‘ :!P<±‹)ËÖ”œ;õÖGµãy®Æ¹öJ'h„pY%Eæ¥*ÄkHê…}OìÿWå÷&ϸ? מã#ÜãÕå9)$:}”^Þ~³wÀƽ ™Ìõë·j yJ=ÁxZг– ….QÓt¢¶ìÇÊRB¤¹Ë܋ޑ/pa¼Ba ®Ik±Â@$š™ŠßLÒ‰fëZ.Zb©¼Zö,•æ¦G¹Šc‹QÁècY.9µmc€M<¢½l&0]À1" 5‚…Fu—×5ikQ¯vÍhÎuˆ:Ì. «Âà¸. €öDhD h^8ä}6=÷´ö8ý{õ?gt‡?퓦?¼_ÛýåÒ@¶¤{òüÈþ¢ËŠ–e’ \ðµ ƒ¡:dézñËT‚GV@¡L$†ªgýOäWoc<Æy›-3ô|ΛkÃø^dÔçû1â5¼'ªØñZv¼FîÆ¦¯ ³«Óe©ÔjïÕÙ¨Jš‚©U›#ÙA¸$8q/k Ì œ+ˆ`3@>áþ'Õ>áÕ`þË-ÆyçˆøÓâ^¿oÀŸÍ±ºÉÊ*%THYb%²kWUf¦&2PxH-º¬I6™”O4¨›U ·§zDë5ªÑ®ÐÖpõÆlzûOµ_îÂýI¯›^>ÿVÏF©ƒWÃéù^Nƒ€ =Y<*¢Áæ?£Ëx̸4¢‚E>Ýö¿qE»ÆDQÜ|¢Ýl ý¬$~Ìäàý7¤1oeŸï'Ëãè×ë¾óɼnQv*hEMÒÏý ‡¼2?üÈwІÏE’d íÅÊʈRRÃþoŒ@~R˜H%ˆ‰$édJ^©ú‡Š5vp`s«ˆl=NG5«é´÷Ý}nõwÉÞ½ooþžÃû23ö^ÇÙ_“£`clàÆÐ§ß`~à(–‚b   t?{ò–0?ÚýoëÔíÅöæ«ó#µ‹4Jb¯îj"±“‡2)ñŽÍå6JTZ²1÷B³,3 Eµ;PšSì'%´dãWª¼ê•Y”NeÑÌhoNõ¯›Ë­ˆM´\oZN¬‚óZ$j[ÓÖµ¯¬ø“u“­é,ž•ß%‰mh‘g‹ù5Y`ypº8 õß[¾w·ÁÐ]oôÖ;𮑦p³OxJ£'ºç×lœ¶BøÂ©wD]cšÑ\>x®ÕÜñ‘/­»äÑ&ø<Õž?íëTkžz¾xywßm›­~5|gÂÑúço¿¶V1Á4+ á–8Ñ'&ܦت*Xì[! x†‚$ÚA„""ª5 "JÇP!"³xf*œ$2 Ý ƒ*5YË™ÅkÅ`®Neä)¼JÕç5SU¦ùÍò¡é3¡±òsC°ÌÍd;|öËë9:nõÚ¨t¸ý~rë¥È:®¸| ÒxϾÊàâ»sVÛ8Á•S àö®´,jÝ·«@ß…tZí1«¹á'‡} g\|¶'`QÁK€éÒäP„‹V+r/=º®C g[BX¹´†óš+Õ®ï¶ùÝ]… TQIqùó¡8¶‹{LÖ_Ä™¶Â9érêzY,Ö9¤pmS¥lãÍ3@ê:"Û:ã0ÖŽUw¡§¡m” mð4!•y,Èg~Fe]…§‘^޵®Ÿ9j cŠ(±¤¶%Ë)2Pdªj©SK¡˜\&9@#´èÙ‡¼ ÑÊßžxß~8jïXh•6KKY5gÛöýWž,!é\‚<ªP«ð›&KËæ^sz\évÒ5á‚B¦8\Qï¶é"o(·´#›œœXÉÉ‚8iŠ ü÷0bC„¸ T¸é†yn­NEXä…Ç™Xí¾»ÖvØãNîaeß+ÿªñâæp²øLÒ^s³¦yÆ,¡]L&‡±©g«Ìº˜£ê;6M…xa1’ Z*¡:tÔFé`“„¹,œ29é^ (²ÎŽH»{5tË7§UÙ¶‘ɹ!¤ytêÌ,K£ „ÀFš>„1àc>ŠèÐxÛ/ ÎáB”A“…‹X4ËaœQó!ddI² ºIÛ¨¨ˆC ¬*~tšT!M¯yXt4FJ8i‚E’Ë¢Ód„‡œ®:"Ù§YäÙµ»Å ­š#5±N²7›ž£@°Ô®ÝB½ŠâËYbÜß)æ0|e’s,òVì]­6IÏS##ЇòhfŸYHC-À~½®yòïCZ±Å¨ˆ&šÐôùLë^tN“HFÇ™&´\ÖýW®pàOªòVw¾.´fÞ}/ÇS=YàùNÖyàÒÑ} $A'ÃUÁ§núí}¼¸Óî‡^*ð•ÖÁ„hD @.ctا§ì”(Fé•ô€ŠŸ)´©.!ó«Åúy¯.:ˆò³¸‹Ë}ëÃÓð³Xß ÔîVÇF_OT†ã×Û¼‚Öóâïg§»ß{/¿§Üv „5Û}¸BÝTª/9XË bñÞó˜XçÄʾúùžË9Xá%$]ÚЈ¬È V+¥Ž {ú2õ_À7£€Ùdb •_IAГd+0ÉŒµ9n¨BU(®gHEbp=*L¯ÁÀK«?)ÓP9?=t2FdŠÕjÓ[ð“ Œ¸ŽI#ÙZVì$ëh8Bh Æúª9IµIDض³#V NhT–ßä%‹`,Q˜¸·âiŸŽ—¥#!2‹ê¥âtq0¥îe~åM´f.„UXnë€;` ºë®»0T ZP¥AhQBª½ÂÖ¶7Ýڼͨ´U P¥ P™J¡@"¨«lUEÕI[-4Sžu€Ê?¨sA î.ÂÆ;ŒªºÎ¿,«AƒŸÞ±ÐùŸ/Åù›†x>ž^îW‚·’¢œ+ó{çzO—— ÇÏ!IÏÿ»Ü2êýó}oê£ùNÀ‹@Oÿ'úŸOñ»‹»ò._äK¦Ê H¢Î²ÿYÿø?cö&;—¶®XüªíÇúLöpá­ÙꀷD7þþ¸áÅ…#¯pÕû6²Cûz‘œ±ÁÚSC¯îqAÈg–þ ƒ¤@…N†ôâ#bˆÃ$I‚ÖšÝÀêµ£Æn{F²§MTbÁÓ}lÄ£ªÕ¥¯ÐN¯yDéaÎñ*ý þ/Ñkë;ùc8PR•M"©ÃûïÏÆBo…xh i(JD¥ÂŽÿ„áxÌrïHªjIÅ`É€øŠu>åÅŽêL›Yz4è;·PvЀ÷P_!OÈ@}1ÎT œa%ƒLçŒö%JÖ}4¨ò¿¿‹vß›è{¿¹£öö|ÞÇáè÷›;>ûö´m¬nx”‰0 Ø'£:&j*¸Ë4Þ¿«²Ë‹ÆGäéÌÉ5 QæÀ¨S¶…ƒìþõÝö–zg‹<±– ÁyrÏ'ÌdhËÄAˆd1Ž^€CŠ0ì¸<½âúÍ®X“o¶)D4Ä-wt9f ¥¦È‹Ú7þ/óyGZrˆ*…ž‚ ‚-ø$˜½ oòø{H¨_„ ƒQƒíJ ÈrcJüº˜T508Q/Ñó›  ÷ȉ(¿žù7Ù?ä3ø¿öªôå²LÁÔa‚2Ú.'º¶nÂf݆Ë%âa%’© ˆSM­âVQŽÈNÌ£T¨$ H:É ~¡UÍÆ4iË[Âkxc´ÆÆÎó-:ºšžGëró|*<BØh  &¢±Í ˜D“‡Eä'€CúÕPù ïKøL}èAA’Ëø©’$ºXÍþ“²(RCã5þaÞ¾ý‰V?cb‰a‚¤P`žJü2?•vÁ jȤ¹é¨h+!¨›•LÉk/5ô—UN_‡`+ Òò×ÚC] 3˜$â‰òµ‰—XÊÄ;àŒËÍ„{üó®Epaš·„zÏ‘g¢¤mŽÐ"ŠhÈž¿]é]6ôn󓹺s£»¼r×¹DŠ pÙ5“áâJâ€%߆ä:Ó¤œ’‚,ZàZÙ±Ar–Můu²4ˆÈš{ú<fÑ©Žwû›N²9x¯ÛÖÏï`ªAžm\KEvA‚*–R Šy5´g70è AJ½MdHhé•ñb4I¥£Xg' àCŒw|šÍJ.Ø 4+4P Rö5!Ê ): `üδP&ÓkÛXÂ$2ŽÏ«j†¨–UNL!ÒÆ<»iEe4-5†zH?%Ïçz÷Cœ®6 Ácà?ušSúnîXæ×‹×ŒÊÏ0ýí¯ÌL°J*âkt†'¸‚äJ{™r¼´HjÈ€v‘9pÆ®ªcC¤hÚ°E1`5²CÁF°@±0Hñ¬š1Sm«·Ñ£c½ãÒlšœUíň‚œL¨bLB´ Ч€@¹D*ÓÄdªdB¿•íäî·Ûx¿×øYèëûÊwz}½ŸaŸs‘µñs÷Gàë#­öcDâæûŠ·—7Ñ÷!a¶”iK,°hþ_øˆ€ˆz¬A Žõà–@¢ ’pÄË*™µ?c÷£âkï·àŽs½uÙóÚÆ^Âö\¾¿.¨oUE"hb@2ACÍtW”ïyf*Øü¡Q0©ÿ?Áf½®üº²e$ª?åU†CA¼Xl™e$œ…„A&4 4UÝ—÷VÃFÈH[†ð”â"e @£¶O‡bBÊß™ýÑvó'—r’W|¼ÁXDD"¦„ߢ§—á|è4(¡œ ÓžÇÌyŽƒýu)ÌÅ€Àj÷fw8«ñ¨#þ¢õ‘ Q@Düo Ÿ-··úDÇÒ{ï~ŸÄýì“õ’ç¬Ï^á3Õã7@),4ÔH0Ó5eIH“EQMoNû d¾÷È e…ˆ)i(×ý†®äÀã®Ç³óÕTÑØäêYÆ)D%ìˆz¤ B`Éúù7˜W’p83ÚÝ’,#)Ø0TÓlkg³iµ|Çþ:o³¯¹ï›kkz€ø¹¥­ÜýYƒû²/ÄTã~–OÙç»>ßø|²bÇYäoP€šÈ"lÀ /eö/ú?£0dcšñØÃÂþÉ?㫚”Œ3ä-ßò¢ãhgDB0Ί'S<}9w4†•=Aš¦(óÍh)phü=Pƒ©r‹K骴!Ç×dRcŽPìqÈåpÈ<‡F Á8¨é¥c/Mð·å¡ž7ª[û| íké†ïïÒÕ®)WJrŒØ<Á½xí¹Ï;c1NÝs×lkPv]P<‹v>ÄñÀÇ׿ñÎÙßCÈx?Uß~)Qç+Æå â¯[ëdÚsÇ8©×ºÖt»·ê¬ëµÖùóg¦Š¡G Ì:ÅW„¨îÎ… ÒÈÝ€·ÝååAÄÑ)¡6#çSzÎ4½Z¤©ï!¼ÖeB‹©¨*ºÎµ/¼[Õ h_UÚ³Å4=²uÅœÑ}–K5ÅHy>Š]q|ƒÇ7ûÚÖŽ§¥õÅùe¤2Û<ö‚ÌyâywÛ°žF…¤7”ÊÏ+–5loG¼öÍlñúUYîƒ×S˜»óžÇ¿zǹû» ]Ðã…JÊx†å‹³Øg!œƒœŠ®9çu‹9" Nˆ­o*6Þ4zjŒ¾Ó'{èŒÖwVaÆï¥FЩIVºÛ"R‡:š šTÊi\þ— àåa.ÕLÉ®Ð^Í¡®Ò·cu“ÙpÔuNj6BÊÉh‡šo1¦©kw}kZ—µº·¦± ¥E„«i!¤Jšº0,“-Ž@™q;UNÄ ºTs*£sA>ÙìYÔÁÓê&‰&:,YQG«¦pi@h:má,ÊÅ'x î˜gKºád3FÈ$àâ°F¯/åÝEˆHwµN»qLœðöX¸xRŽèq€Ò@‡£Ô¼ooË4ÂK¹}ìrMù™œöªy7®®µi"!¤o½P·´]ž(_)o62@„ck NàH…6ƒó˜ô™$y©ÄÁ¨×.[¡,éLÆm†·dœ„õ‹ïA­ä'Ý¡­¥vAö)à£/ƒ´·©\u1*-¡¿)ŒaŽ†çž¹6B=iQ#¶4t«QÚÛY´iÆUnëeµÒçY7è–ù‹Ï §ÓëÃ6&ìrð<­*ìíÐ=yÒq£ÊÝÇ;uìßlåq¡Äó»ì}o¾¸èxj/T€Å0b•¾õÝ Ø45J˜‰ýÕh‰¿Õ,*€ÝH”i7Þ‹¤ —d׊‘±ˆcAB±y|ÁŠÉe&Vb±B€%ƒ£kðÕ­2#ÜŠ1ÐYZ¦(mÚÞ—$ÁZ©¡f•i)!¢ä­o&!TÆKó ´”zùJTMfðžÂOŒØ(WÒñ, wÊôÆŽ7ª0lT„U^+„Ö£Ê$„¶ ð›buƒÓ@ØQüÄ S†b8ó[pÚx¥Ïõ’^[·ð8}¿¤Ó¾ï|NÆ{\’/$¨ŠñRR¿Ý&$Ĩ"®J‰¯«Ájíh?Å!Ý@…(€»ø½™Üû>¿<â²è°i€æ¡ÑÎÚ‰ÈL!½‡Wœ_cd3p¿`XÓÄ‚A[À‚o…¢2h#h»5D „% p –‚ç,CŠRà šjR¬SAD™*“†Q$>!¹ dXgé=ïâ¯=>ûô¿(ÔѼ;ßE©Íå»x)¦ˆ‹éµú.w¥Ò€óíïÿÿÃåþ¹üŸæþçû¿×ú>Ïöÿ™î}ß¶÷úÐ8Ÿíü̆˜ß± Í@Oö€üÌæƒFÏûçþM¿Ø“?éÿm>‡ý)þ¼ïÆZùtÃFi ¢ŒáÉßÞ-DÑáþ^q[!ÿr[tdëþ÷ݽ+”vo5lÒò¿dB¶E5lÎWR‘ÕýÊÔk÷¸2ª“«ý_ÐJøë«lµ¨‚R¼¯þ‡ý _ õî0í>LÔ4À-[pQOm9ÃgTža ••¾hñÕŠ÷Õ‡$õ !•q¢•¾ÄÝœÂÈé¥â¶èkBÓýú1sói¶Š£#’m»UVØò`WùÓÜ?ËÒ’©%«aã^.'=Ñþ¯÷Õë2Æ_¬éa›*ÉÓŸ‡öèÚ{ü_ÞbÔ |±F­=CWV+FÌ$L„•\å_bdúiû¿Á‹Kå<`™”f±!Dö|U¡!n‹Šx¥dŽkNòJ¥7–£‹póœv×{£{½ÙØè8]×9Ÿ»Õïâ§RȧÒ_Ù¼¯ìaõ—äAþTb ŸNR®Žß,ŸÅÂþ/!þá¸ð¬° °ßlûbbàª0N›˜ö’Îë¿ú«Lö¬×<_wûêöÇÙíüu„p5ìM›ˆBAœQÅ&¼È¢¨,– + Œj;Àƒ)=Ý ½8A‡0ÁÇ+Ôg xA;8ÓDÐõu­kDЇ…)Á·¹ªºwN©Œð…*[®hþçt,ïN«…Œ‘t<û†.g”"³Í PÙ <ÓÒÍ ŒR<1Eˆ}ut¬–FHÀcÈõDR`ILñ¿Zzß‹;üzL– UÆëVñ  +.©e ÅQ²æCó46º=ý•žC' 3º¥RíÍ”BÙ¤¦… Ÿ‹Õ za‹uUŠ¥UNÐi°Žb–l—ç¥ ›Íi ¹`p€ Þ¸’Té[½Ó¨HÖ°„­ê 5ƒÇ:¢d#>ÇU´+Ac<æò¹ãñ5Ä - jqÄB kù5ŸNo.¢ÆÍBR.È,P üⱄ2…ƒ­ó^P‚G/A¢T9t|ócÒ… q°ýT‡šCòýŠAmBH4†Ñ©¬ihÙ\èÍdÐǼÞKQT‡ ùw¦Õ,@ê<Ö(‚pN2Qâ7êZÞ¬–ò}*»WYàGoHY'=P}Ç'ÈA`p£@‚vÑi,W…*»BÜHrV-UT¦ÕAv.éö¼ýµŠk]Õ ÉªuŠõ×/c¢2p¸~;úÞú\è€5ÁùÞh]lâˆôF|´œì@oŽî¸ªLÝû\ %”£*Ëß„í¯™ŠÊ6:{gËn–rl¨/töüFr¸Òu†ÅB°t`Ñ\^žp0YB¸Îd_ƒŸÝ ±‘|™¶—W{íBæ 6=•GŽ Ô¢H'תº_&μ˜YÏfy=£ôs¿||¹8%éÎ&+›$À õzÞ’¸DfщIè&(B JŠ&©L£8" {ÇØ¦­â&­IDÐO‰!ùå#&¹­`®3}Ís‰³ä[òbº\“$ºªÈèÄœÌ^É2AŠê°±4™Ñ„]–U”^f‘¦ µÚä¥EBM­T¢Ô-l3@Œœ“ÔÇΑƒ^árâŠmH¸Mm¤]eÅG ¥Ê g²Ðt„¦Ð!•/]‘o¶ú~Ž×»Ë‚Ù€ÍHrŒ¢ª$€©»• Lâôú§ñ瘉ùÞh©ª*È>|ež|.Ü>±VÀ` ‰?x>QXÂ?Ñ"„…8m qZöÑ7[b§ÞøžüvËÆs>ïèiäÍ?¸àÕ»‹Â"°0^ÿ2Ql`¶Ýµoe­­Waç ?YGèûö1Žöþ\å–:sÓZªôxÎs4a&‚ú(Ïט“††Rƒä\ùT"0|‚Ëj\«¬(g!3ÉœzésKk”Úÿ × ìJ(ðj2$®Ì®­R!…Pçöþ ÒúkÔ{,¿“,ñŒ³Ï¡Ðé=¶ÎE}J¢'+%ë8Çô±cáXºøÄ"¥„„9ˆ`ÉŽI5¹øõé³–Þá”nÙüþ­ËK!*ÿÞ ( @ –zý“uɺÈLmNõVÏŽöòÕtG¿½ .›Ïn³`ÜDW#+µ u®ÀäRù$WÅòš˜Ø$²<ý Ní–,£ß²Ò 9]ËY˦Ù%€´j’.í²@»ªKÖ`q]¡SƒÆ(ÖÈè–ÛA´×÷kI‹´Së…idg(L‹ª´®«uÓôšEK¾y˜•À\”èò‚  ×9BYíSEj%ö†«}¹:ãw¢FJš.%QŠ!h Œ#…½ÙЇÑ¡$»œ‘} 0œæ¹\Ü‚–>«T¹ÄæÂ5Ïk//ƒä®W}ñ·Z~­uÍ §¬äP®àªÈÝ|Žõ ë“ÀðdPšè™J¾ùP[¢¨àêà ‘ISN+|Ä@V 9‘¥)FÁ^QrqÔ´´#{‚ÓFÛmá:Ö;Ím< p'ªíHˆP!HŠm«Eµ]ZUÒÔFõܵÊÑ×u‹µEZ-«W5bU‘ZÉQ«•Êå„@úÇÏ«Ÿ±^¥e|œ:Š|΃3,a ‘ Å)ŠâŸû+” ÎˆÁ’éaáöóÔ‚O¯ è&uôëäFhšúºÀ³‘¦©C±à!„ v 0¨}ïó½g‹üßuÚ{ïqƒ=¯YíµrÓù¿&î-ŒôéÙ÷𯓭²øßë÷;m;'w±¹Ûîu³Õö¿‹Ýnû.‚#ôÑDïß?ñª«É•‚p“xûê ððš ˆÿ†Ã0ìÓù?eöŸ$}_#­Ò}öŸC¨gž¿Ë-Ø»±êÈ .!U$ØÂ‚ª”U9ŸAæòEÐ&œðŠ#öAíÈ} Éül»üú¾‡<»/,cFx+²%9þ2 @Vðš𦑶(pÏ6tŽ\þG fP,°QÔ€ (¾p}d'gñÌ¢‹€Oú”@†¨ÁõÏèü|?dŽ?¡ñÿ3ûòÈ!k)ši„A¦Å¿Iï=çÛ\MŽÇ#mÂcŸí5|†wŸÞ†ÔŽð@GNŠ 0*‹ö¶Ü'š`„E>° ž¯èGaãÉâ3“®1‹h-<}ùùÏÑ!fͰŠöÀ2£ ±D0ES¯Ú8ȃTò’9 ±ômÖ©"׿+ iÂuQ¯Á°Ü²=Ýnsz|k 8’جr­›ÊÕS¯ðn‡=—~ÿá¾ÑDˆ(¯ I™Žàø 3£Þ•H2N"ãâ[GzÖ"ö¥XJyÑ®óÏʘÉ#d Þ¡"ÆÔ8ª'jµNOŒÝAɳETaŽJ4ÅV(à¡A’'“R¢¡š K.÷C5M/"‹( ×5`ç…wR³H14J$P88'9”Ž!‘ .Û4PÊOÛÙ›*€ýÊlªpñŠüú ƒ‰e芉€€°Çj ÂB+I†It8ö` c$!ysãÊ·œgùgPQÄŽ)]L~-y4± EŠ ÞFw>¡sóGjçZá‚r¸ëëéMX%‚±‰¦’‚€ ‚dE÷T€$T7oÌå€êòètfZƒ–Ytc×Yö÷7Ìjó˜ãòá·Ú9,úýäí`£À¢îát !ûý•ò}_éþÇíû>ïCÞþŸýˆþ2û‰íð3×þ¹›cž@uω_Ú‹ú¬¶/GGö  ÿZâ««û ÿh` vŸfÛÃŒMEûQÂZ-¹±N_»Gôÿkù¶¿2H¾ó´—ø™—DÕŸä.Ôfš~åa?ö›ºÚkäÑãà­8ã^=;®'c‚á·[~AQC‘”€T•ÄŠ¿…ígößÔü¬ÿû–~=9N>ûNzÙèÔÁƒa’É,ê—é§Û:6t×ä´I Y±HP"&Ç2¨~˜æÅ€#YbŒBðN$QÀM¢Âª_h±¤±)/¾uU’5¿Á×àdy×xßñÊŸvÉcMxE\xÄ!M¾þTuH´ˆ$bAÝÛ"5Ú‚Ý·íì‰âÓE$lïØ0ìrÝ~¡íìäù¿éõ(ôúïê­Úõ×£Ti:ƒŠn†©ÔÀhçìü˜ªXB b HðE¶BD¢ClacÑ\ÿÞŒee䠱߲‡¥@ÉX²$’®”mÐÙÆÃ@y.˜DkMƒfV6ðvÀ} áèG6½UOŠÓïhz©‰ÄÉðf h{^0¾GœÃhyX$yp社Ò$¢©·U+Hx>L;NÂ…Z¬v2ËUeã5ªwiš0ÄÃæ«’¨$aò}|UÞðƒY ÙXc`ªP ]S]Ó&S &Õ/²€œGžÃO¹œ GåGÀò»•å)œ¶’¡YÖª’‘ ­U.ðS«µ+º¶I‹KœØ1v2½¦³åë >" Û…Œ"ëÎIø¬Îî2FNíŸx¹“•’Îoî$c®-å¼6ü»ð{‘k"‘µë6Xˆ² Y$É-~Ø((ºˆ1 Úžÿ|¹Rf2‡ÈdÔ2ÂW¹ ¹Î €%3fÖ½•½ÖT…KTbØ®HJ4ƒ¦ÖÀŠHfB³qC\ÃÙ©hNR¯âñÎS‹áT†ýÆZm­ž×?o–÷‚òøãÄD8ñà@®öQĪ”€¢…@!Uô DvÌ‚>°!+±„?yü¡üÏô“þoóŸÜŸÌª¼JÈà Ìvà¨Åœš>7Sdâ:ýþ\mÃG%ãòóüN§çµy-~c~‚»U p{²OÀ”Õ 9žks í%TaÑ5LT aTO™ Æ¥ø__ûDþèŠé¬&Çó%UQ ÂK’3(ÊE”‡îØo¦RF ŸÈL’Э‹Ž/]Q~ù‚e"–À¬eÑ–T5ƒ(Êx@ÓHA°$õ,x Œ7Ö€TÙQ9¯ÝÂ*Dϵ⸓¥Ð¢)æ!U%5 ¤(PÀ’‚ýåmù–ÙþYÄkòÒ%‰8×ð“$Š8²ÇÅXBcyùy‰ eÙ#Ä/ºª™J[mè>I3‡yè'îÖX:ÿ&1Œ*#Áª ô?-9/·©ö°“+ù@7Á‹Óúï 'v(èÀ@q(Y`[—óÿ$¤ˆÛæ” Bœ¢­ï/¤ZêåÕ«ªÒnøž».öò¶6·›ÔD÷°¥DR & ü=þÁ/©@TRSÿÝ!Þq½î|‰ä ?Œ†kø)Z„~;Éè|Sd²¨R ©Z¥PŠ/ O6ÕÛ1a°ïEй æ¥d(MM c+(Y–in•eÂó­‚@¸èB›T]É(óc\f\mÕ"ëåžåµÅÀ]ÑGüZÇis,8XKNR@ÞX¤ a=æd¬ª ’+i Rż’Â%`…LN­¼AÁÎ[(YÒ¡“5ÖYØYn>Õš·Å^Žì’m°#@.9ÞD1.sXšxv²[BŒ’¢%-* Œx˜´6ï9v~šdˆ#ª®Ù”ˆ;‹Y’@’é1ª¬«f/—¦Vq£§gŠÆÉÁÖÑyA‘¬©I°‚,%‘H]šÁDP̦ªo20lÉNWF‹f€X tƒ#¬Y¡¥Í,ˆ"«’’,QMâBȲ %Áªm;eµ*¡Q9ñêîÜ£\7‡Ü)†ð‘kI:8"ÔW,[$”’9@"ZÅ Ã-¦© Y(´ÙI6v^(dÓQ Aɲɽš¤B²³÷wWƒkB©œ"aî<Ü„, R¶³DQ 64y7’F~2ÂÒˆŒÒ¬ +Lá@F鏨[öH Žýtrñ„ct+¼–¬‡Í+ÄÈ+­KÅ/B@ì­åì™øk˜{îÞþŸ8Q/Ó”òºl쓟"5I`ŒšXÖ„°(âÍfDUGIÕ˜Õ°îä ƒk&¬å†G/2¢ß‰¥7“£4¼k'è¨ìÚ‰žqÁ ²GyíÝö¾ùÀa2]"®FŽ^ûÔw*›¾¥LÚªÝe1]d›Éãv`™ûÇ_ì‰@ï‰=®žúÞCëTþrz»g~ìîùò^¾ûȾÛ=õ¯[¢ý·LÚˆ™ªf?`X4cž·¹°8Ò.6„*"²•/9;Ö$¾s‹eF.ÿQÎE”‚YR²›ÓÖbFvˆ²ÑÅH r]u×*§= ©œ b ¸…TGŒTG×ý{ÑdžÐiEAzÌ ÇäP”PQY6ÈUdÆÑi•bÚ-hŠ’£FÕ°”4Ò­…)B"¡&9)ErJ¢Ü÷?_î×Þsï%ÊÖ ÃÁú¦óA“?ÏŽ£Û÷êy´ïYleÎmù =óm³¼;ì÷ªˆ+¢o•„Fü…)@õç<çšÌQÒ@R^¹yØR¢—÷ý­Ïcf YcWåxÜe¼é|[Ÿ}ÅÁžÇóû¯Î÷¼<‡M¼G| -ý/sêSáû%ö¾¿êœü£~Ÿ°rt½ñ_ÜúþñâSëúã¹ý$‚ÊÀ..?®Z2ÿ-:ÛŸ-¨oÁENB“A,q¢ *{ùTyΰ€ˆD×'ínƒ©úÆËø {,h/ÁÁ‚Óa, Hp AD”2ßíròhÚ­}}u>Æ}gUÏsyú¬´muû­È¿:@¥M였U2T@„HÞB8GÙÊEÌ„9åFõ¿Yõøø™8ŒQ}ežFH?”JC”þÙŠLíoF[›~©»œâ­_|»øËØb‰T(õ;Ç”ÿÞh¡¡XRaK+)¦Ri@ˆ¶ª_/I4ÆÒ›JE Xh©‰‰„‹(;iU$`A@¨Š²‚a—aëK“ËáãÕå‘:|ŸßÞWx£ j•]Œ¿»Z»P’F2Þ/nˆ‰Å°EÂãc…¨Š˜ÊXŠZZÈɲsÇ«Vò]Cš•‹Ácj« ZÉÎ?îМ6rÖ`a›)…šw}de83—c‚ÇÚoøÂæÅ$uª¢t Ûbøy*ÉäÕ K¡š„¨ÅY¢X¨³+”€»šFšûW°Ç•:ˆef«2qA»·[T7­ûív¹{¬ìVh¢-aФtsg‡2RyNðÐT35ºÑk`Çœêé¶ØadÕ¶>w²Í•É$ë!÷´¾ƒ«‚³Èu6¡@â>©ÑHê¡b°¨ F›Ê JîÊY´FŒ«wšiÜ­7дt%dÑ8RŒºD AâËu¡‘h×çå_šÎ”gnªED‚C7Æ}5­a“°6„0¸…Õ`4±*kv‹h€uJóŒŒö©1üi™EO=@ºWv:<ÖÝÚEãuÒÞxÈ÷­ÁhÚ&BËÎQK‡Æ&žP`ï( +›xdE6šºKZ@qÙm,p2µÅ1•Pr'°a¬ê‰³ÌÎòCæÎ®>ûÖ5¬óÃÅX[kW£dAh_î5Éá^u ¶ð5Št•»Ww™5¬h|il$E?NG(û;èq¸ÆIíªÎ/ÆB{ +Fw5Û«Ù5ŠG7è½XF± |\1 X–Œ¿!ŽZ®\ä¶hg¤Í¡H¤ —ªŽM Å ƒ×wãbû«¥­‹{óüœŒÎÜ®4;¿j{“ËZÎíCÒÊã“ü§Þ»F¦ wã©W² A%`!ZA*Qà²F/qÇÇZ×kª~uë;³ßÝE~}ü?Ý#F µF6JÕEXÚ5EhÖÚÅ««6´kE@)B"´+JСJ4ѪÖ6ª£ZQc@ÐСBƒB¼LJ% "pÐb‰¢ªP‰ U”T]XA i¥A pàÀló„Sñ§ö¾­ÿF‘ÁÁý £ˆDÄv(EkЍAŸŽÖB¢†ÿeY·í ,'PLÛ“£z­øýæ1á–(ÏÃÔì¢Ë”ª„W˜âíù}?äONÁPPxZzζ=WwcXÊ·p…M“™nñ‚жa[B{fõßÀHÅþÒi¢{·ØÊŤ¶ísÊð %ÚøËtL®^F…¯Cbøýý6‡Ü]u]!ã˜îqÖéŸD"aÎÞ|}zƒ H|s”» +Í(ø¨ŽøV\­°ºR¥d*î­ËŸ (1Ò!mS©àÊì{Ó—g¬{›VÉ3±ÇMx2ë/¾‹0Xê(À&G[Ž!’&žš•§a/’Àhá¹ôÝ Jç Ôï*mÙá~çÙÏÙåªÇb+.ÛÚ^y*ô‹Û1£":.‡Ëàb<â ]šÉDT¿ò:io2/åz‘;I_| › \^-&%ø¢’®ØCedÛ>j¼5ëhõ“êg p G“V¶uœt‘ Ä­Ê%6ÊÕSz—ã…ƒ;Õi•“ÿã|/ÍXX'.Uåå‹ÒÄt /!¦Ú§¨¦:·Ñã"Rµ0l ϦaP¶zŸ@L/CŒ«sÒ¤Ø1.ÙNžU·Ç!qÄÚÏ{Ú«[".`•Ÿ*äׯœ«»}‰Av[c›öÅYc¦• ÀáüOB¸¿‰Ür#sË QâLÆŒã­p™aWV‡ÎPïƒëfM(q¢ßÓ—6ûÖ#%A°/ôw}W!'K#ÃÄN¬-ôÍÿ’¾6äxlƒ ÂZQÙŒuY‹ÛûO8%ÇÊÜŒÐÙiÕn÷óp¬ÒÒ5ì3c‰¸Ö¾‚I&…r ˆ^ƒ"rP¢›¥:’æŒù¢h ½”aù@D½ëŽ/¯Ëd²’±pÔLS x˜ ³^Â%.£j^IëÕ)X¨\ë¹¾úŒ÷rÉ£;5Slûü42äÀƒLf`èJÞd~Âh‰®E49­uYoÓûnNTd…n³RÙgüºð{PŠ]F¿!<½‚œ-ØŸ{¶þîGN[k;†±9ªRêia~Go~…^í<¶O ØPûîi]Úwùß>!ûHZçå,~ø¶š76“v– b4²ˆ= áíèII°Œe5‘Ž.ÁL¢äü¶ù*€û~2 yÁÓQK¦rpãZÞ½?/W¡+þt`ÔÐûoôœNe…(›„äöã>[öLÎp'ú‰H½XˆpÙ㙂—ý’9™U>ÅmÌ:ý£2ß=`O"–Ù¥Da­ô…—âÐxk‰äU­ K:ñ1§»²uâr ÓÛ>q'SÞ™óÐÑ9‘¿m)!ŸUZßʽ¨ÉTÚç.s~Ý–Ð {•áÝv›QK ß2mâDD9Ç©57ÚX¼A\oê9B÷Ç”ðÅepÅßÚ_uëךeC õ wA82mW¹WôÞj5VŸ }2¦žÉœ & Õ“G“@T6–ÿ8"i9De<ÿŸ“Ð¥ðˆù\—H@?«•@O^û[kY<'êhŒÆÊou…®¿VÝtTXmao¿„FCWÆÖû‘ÆÚ|Š“ÔøI÷D†@l'Œtu@´É>»ŽêÍNÎ1eð”L9&Ô¿žQÃYªò†ž¹rííŠ,ô®(Ä8¸ Öf¹Ëy»µ¤(>@i:¡%€a¾ž1’Rbé­ sU¬ÇlíÎþðMtˆR°Ó I¥žŒ xBÝ«Ë oÀØa­§Øaùüi.:EÙøÁä)ÓZ(’q<€Ynf{¶gËa‚æ†yñ½jÚœ4oEظï·àº¤"Óe,",~*¢”nÿ±‹Iz·¬WÂ:N³ÏÇyGŸôÍ"kå;?†¶s«©&ðùJÎÐçb›šUm'H¶IIWжE$`ر>“vt•^€%2µQ[4Á=ÐV@’ºjOˆ©Ïé×oßÕi£‘R­òý=Ýwèìû‚ÚÆ¯ãftªl¥ñEýAñ®Íø'bŽü(ýu"Qš—­¤2Ÿ1H(lg-²Ò…ü ì— d1ë ·#Õ‚H\8ñøbMF3q!ˆN'WwCn§ýé7æu3ýÀ³Fvl¶ª>ïmàµô½g RB?`gò÷ý ÁÚ”O¯;)ާ>âô®G¸¤ä~GÚÎ @6⶯Qý»¶1ó( Óдw#’¶…¨S±f"²ŽPS²ÇÑѹ-^J åÜаâÙPè´%«’Éœ,¤4ˆÑ$˜È¡_V,Þºñû¤;#`*ÌP´’S3Ó…qØ]ábÉ á-ÚÃ0âšäÃ.öXÑ<.£±…KïavÐõ÷¸bN3¥´¹‘m¾ëT |PÂ+±5DL·C,$ÝXew¡¦Û@ÂÂ`"Í<͹´ðäûíFÈ™Q*s´„Ý9sØüB—qŒ‡(‡¸€§ÝŠ)"}³‹/KÕ²¶hœÖ-0Ä™&sã¥6º•À¾œEù ‚¬×ÌN‚CöÉ X^T©T¤êLÅ÷¢©»Ä~+‘{´U;ô‚VÍI¼þ94¤Q ÷±þøû3ß `ÕXŠÒ¾µ/"·oê?5e¸ º X&½ò×¥‚b+´f±§#›4”Ýîò…}€Œ=‘$ÀéÉf¨ä•rñ0¢zðÌ”‡TÃH&ÀÛê„ݦ"pÑ«Æ#C©H˜“Æ3´ŠVÎ&,Ö»÷f'œFnâÄâûrܾ*±ñ-#Õ‘D® /sɱç;v¨Ï‘ A\º¨¹hØj¿6âÄ;ó-úßKøV¥…T9'C6¤«±I·'Â¥J) Pr~bôgR]žiÅ“5â÷}ÑúÒ–³Ø(Û8•. ¨¿MsQD)e”pO ¹>C qH!dÿX°Êqe(¡‚NzŸ¾ëxô+X“w¶çmQó.—º¨ÀN#3£¦-Ö&Wp–Êö"ºÿ}KæßôW¢Óì1Fßîñ Mñ”­ô%Á±ÿá2xƒ¨)2¿4çuvᡲØ-↹3òÓžéµpD1áË©[m§ƒÏ‡[ Ù2û©–¹3Ãë7£ö9ÀE´›ö®Û[ˆ¼bcZƲvI§õ—æÌ §näHñ3®Ç¹]Om/1 ½"Q?P^ÊáŒøa³Ê-»¬ª"2ÔªFªŸ“œ7:`ퟯÏV·]uÄNª —z~XR¡Û¡üáLçÑQD†4¯+0u‘ó†2/…?üùÍLªçK³ÝíÇûÈ3^¬Üv³Ì""£„ˆÛã¨i½%=ªj_ÑD92ÝV Êu’QÓh`ü©n§ˆâ¹­çR±½wä;\% 8ëÊ®$²õârÉŒ|Ú§a«±ô­¦†mÜW-àÙ¨{éiÖY¤¨rÈiò4Á,볓¹ëÝH«—Õy©ŠA¯sÝ}Æ(‡G»mÅ3|Ø“¨IËÙ©:Â±Ž’º¤aB¨)Õ]a P4ÆÊd½èÇÔÖbi0&¹•ýÀµîœË&ó2mâºÙÚhOÏ::dZ\;'·°å6¼Æc\­Ù—Å-þði™Ü“S}¤×ZþVÖFp3UÿÜÉ£üÉ#óž!¿½ýŸ³(E-Óà‡"Ý×- "FЮwú¤LjÑÖbÀ¸Þ gIµ÷Ê)†=µCÚS¼¦¢°å‰Œþt˜Mÿ(eÿh·;¯«a0Ûʉ&¤2ÃÏç3HÓ¶=$ Fú• Œu†›+ƒr¤å"Là M»…ŒŸTG"H}‡3â¨߬t·“: ¬-ªlÚñ=-Mö÷D ž—Í‚|ÿZ&O9×÷(/¡Î› Ï·Ôp‹r©·w×bKõ<• ÝšGFëãÜ : e8æÐû…‡éUxlU#l*u£|Â^è)@ÛaK„ äXã׊%§rêð{È#ØÇkÞFUY€°Ïñã“­œf¼ön†úšnA’fÁ‚èH=-xµPÑcËšÐlöK¤e>Åäs´#‘X8o€•¬ÅˆÉª¼îx¤ˆa!mŸ]­=NIò˜"]›Ï*åÞoëv¢+ë&¾ž˜£‰›#2cK”˜c‘ 1kÞ3yå-M¹5‹ã ;-Ÿ.5Å^t&¼W¶'x³ Á2)GÖÔ¼!$q¼jÕ²Ù†DÜèÕ¥\é—4[àn¹N"ѧMjÞA"‘ˆ¥·35Ð|ËÇr;3œ¢ˆ ˱{W†8NgÛq²aêM¯Rç'ý_@Ŷ!…—E½›!d^@N•*úýL­ h9Æ-ޏ )ßþȸg+¬œéø9åàœîç+~¥ ub׳/'÷=¡S ÿÒg·ý·Ör2Õ(º®b.…d?!±@0Ñ·R‘{R6ÛUÖ á¦SÑ98GÄ*f2o¡W2L5j°è¡=9H†ý$þôÆnÔ¹ä£Wœ`†; ËæŸ$Þ#(ÆÓÔŒÒJOJµM.'§øùÉ“H!h8T’¾÷üèçÒÚ'W$õ¨£Ü4u÷Û1ï ³$§òÜtHÑÛ.ýR\@÷üÞ vÖb³oSšw+íÐ¥¥Ú¨t`!,麖<í×”éTü(œVÞ"hœÇfÑÿhb?ÕP-ÕX›°…tU†(Ë…E*쒺ʟ¡Ì&’8s9[M®óR],ª<‰[ë!@0míJY÷IGm©íjy ïa®ŸîÃþí&ÅtÖhv-Ç{ÓIùâœäº’Bq_®sÃ+v§x´–CYió=•dH¶ÿ4úA6t¤t´Í)8’66‹B)zÉPåû¿L2,J"‚AœUH¬ªæüØzh¥èçç.p=nlŒ¢ˆ´ÓÝœ Rø÷ÿ’4¸J\Ro¿•“òz"/rZtšL£wŸ·iÀ÷ÌfÑ)ÒÀ+ÎN•×{µø @¤«Í3Ö ÎÂþårè&7?fÅdñ. û,Uç4öÃä$kú’î|¯>åˆdoÆÍ¤½Æ\”ÁîÐîï¢0Ï]„Ÿ8Òµe=™^§ÃÔ]xÓªI†kz­Ò›á"žNÓ˜%p™æßA$ Î/«·h‰ö×~nÝúŠ÷m”D(kµ¡;1R– •< ®|âCü »ï cu§¢²ó^»ÁÔ&/NÜ,¯R×O}9ŽO‚ë‹•'»KOÎ0lÃŒÚnߘ†g+®çÜͦ‹¶ÇO|9}”,KÒiÔ/“k~x™ÛnÄb+ZìsØ­¤u4dÓä³·pyãx+-»n{ñðYî÷%˜»/ODq½C@þ7òooo«5¬?a¨iPô¾a³«Zk×ú¡15@}y‡º­U3ÆyUy6Öž‘ŠÏ¬·èÖ¿Ð:ð-"äÑG#K½É'‹bÜ=&›SKG“7è¼»N]óT…åu/›´¹ ¬­ž™ßç"¬U<3qªY. ééˆPˆ£ß%)*0g:ÿ¦,è6àù”þO<—HrþvŠ@½\9tƒ{'?Ë|͵.Ì i²f´iG_’“N}z÷QñovùeïØã}œ¾îõ’uH}ÓÂà7&=˜·àÓW1|È‚ùÎ<9É];{í”C判7‰ÿ¬Rà Ÿ  N꼦òŠÐ c»C ߌ„¹…¦%‹69«ëð7…³ £çóŒcH8@ê1ôMLm·zŸHWíyÛ?/"F€$ªÑ¿ã)Тñ€apQÂÃæ*Lb¼5‡É™œlÅòOrMPü4½™c®ìsV¥¬‰ E8ïЋðqxPk’£äšFïB`ÁC¸ÄNÉR&TüqUs‰OÃñï‡âè)3Ò“^æƒNãëæ_©Q5IFÃz5,ñQè$êý‹êËCЀŸùI¥Ó«P˜´Oàð·C¹Eñ>D¯bd›èƒg²Ÿý3`vοHãs2¸4èaº’6(:¨Hó:ÓÀ¬# ÚÅ·Ç%‘Á§)> =êBpÔC;9×ÅÏÕÁ–ÐKû_·ÎªÀAþ…Õ“A•6?ʰëH"Œ|ÁKRÆz.:k8pàÈ<Š©­§imj>&0.Vs1àoÒ6yZ¼¤z¬&ˆŽ‰Z Á™ gБX8pËí%­Á2{ýˆ¼e6Ûæj+Ûk¦u,¸÷ùöC°bäÑ߯ª9ž 8ÎÓ$¹œÇ62)Ôø$ƾdZ¦%€‡‘õ\CæMORÚ‹4Î %œ‚,ï„üò!¹™ÚÖ›‚b³ÃöSRøZ/äp]DÈÈ{aw-M¦¢éÃùNø']¸Ÿþ껸9 B0!¬8@Ó·ËTWÙZЗhÖ@ÆÌ„˜›¤xáq;7'Dÿp„x|2ø»£Ã‡rm5(»?ï4 yÅmGmİS=…nJ±’ÓŽ _Œ€×öòab¢`Ò™h­-Ò!µe:R°y»GÆÑOáÞÅó%’”1ÝJE3e¼Ï£8|”â¥R€ùa…Ÿä’s¨ä)©Ø ñ„³9IÁr[ºXœ_‘TÔŽËñ{b¾sômnÇýJœNU \÷|O uŽ|ÅØsC>4m«ÎE…ß™ ¨,2D¾ûÚ ~=8UOÕ>4¡Âé>Îw•Kª*9µü ‹€›žôg³]ìPƒÑs"ýÜÖ•ê Íèqo­õ»…íð<ÇS Â'6" VÉøj6ük¥Ùmoá2­YQžèå6Fý× €-æN¸x›­RÑ么;øÇ«¼³Ìlåü®±Ô±Ï7ü2¥Çt»±yÒÏ “îqˆV©=î#¯oËB’]ë/êôî9’ù°ò,«õc  £bî½ÈN3P ^固¹Áü’)ùVq©ÔÈVgú€iLj‹ÐDÄ??ð€‚ïêöLJ‹Å%Y>n즔ÆJâ?ÉSO>8V=#ð úXg]´ÎévWàa8\¹0éÅqŠ &Jýn­ìÄ´ílM/Z*;¤ŸÝ¯+“r41áå×Dó KÌJ‹/Ÿõ_UybÆÓQüꬣÛuxÞÑXüÃöL{ãJ0,?èÓ`{÷Y¶ŽUƒª¹³Ó$g  ‰@ºÁΞîˆÖ_ð,SÒ&àÞÔ7Þ÷Tó½( öަX0• m¡ Ñw5VRé?$Î.VœÐ¥$Ô#°OT*„¼²•øÙqb(5›e“¢UÛ¯¾ç´)øíœ¿ýXK”N ºfGó ZWsQ+è«N‰'àË«‚hÑGô5aEÀµa“¬æM6¹Ê±24*ÇŽ)ŠýîìÀéØÕOÚÌLuηðÖ?ä\ÍYF©-ŠîÅ,èCM˜¬ô+±b‘µLƒŽ2Îä4·È&gGJò]8Þ©ØØÖ£?>ou9jZyjSg†r!fçZ§£a謻ünØC|¯µ h½ÔzHw½$_’²ÍŽp±yá»dÝp|U€9wbÛP¹|‚G‘ gNуŒúƵÕÒ`0è`„8‡¸YŒ³ò×:Tv×1ÎóÂÈÈÉˤë!™PóȖݯÌ8âï0áV;ÍCÜÌÜ ’†Q6÷v‰H /ݻְ²u‹—)Ùܦ2¹ú#Ð2£ÞÖÚCª¤ƒi¥Îæ¸ådö«Âa1ëès œ”-+[¨žHz‡OéõáÒ‰¢{(PÂõ{ЬQ2tHo ÃWô}y¤ÀžºÎi‘@Uðõ[ˆƒ(ÆÓÙ’ Nö‡¤ú—h¨t—3 Â.b %) ´t9âÌénÅÇòÉ}tK¶AhŠfÐ[P|Ê ùãv0?®6E~Õpþÿ¿˜bZaÝÆØ a{íÍh7Ýò1^åÁBäþÝÆ¤_ÝÇ- G÷wï~ˆýsŽ‚†UÙÀo‘Wª«Cô×îi½î¯¦ˆ¶ÿ ’žÂkþÅÎ8¯K.bs—ìî:ÁëKnèk”k§[|– ýÒ·ä!½…qò/P.F²¨=ü]œé=áD¬†®ü>àòÁ8)£#ýÖP}|B1¶ÈDlqµ«kó¾ãMS¼3*$Ï^ÐZ@º&õ… B9oŽ €lëï¼#_Ÿ¯)øzMËVM“H@& {aÎm‡Þ@ìí­·ÿÄôó§ Òdžæ sJê +0ãÍ`ÝR¡'w1„àχ*^ŽÖ½+è.V?ŒÑµÁA9<¢ïÝ1W-íÖÕ ü(Gså*טª×`j«n‡¢6¦+ŠÎö@"¼‡7æ½›˜{ ÔøÑ†I¥lè? bR]X]W®-mF¦à}4?ênúò½2ã•­¼%û”_‹U©h¡ÕßÂ+¶ÐKŸƒ>ÇM‹ b]”\¯™M˜q‘’Ý¢5x'( ·W;&õ¢B–ÜÈ\æÊû뵕 „È‚\n!·¿?¼ÒV''cÍÎqy{vÍ©¤ä …3ƒ¶V(‘ȉp,ÞMö{ ©$ym•§­=câÏÈÕ¿±»UÏãC ?çƒBü ì¨ü)«®tCÂ+£ªâ&¿šéíZvÆÂ¼{áqHÓ¢ˆ×ð^ºœ¶¸¨jq…•ì…«Ñòóž3ßù…p¸ofEê²›O!Ûæ”AŠ™Z˜ÑN/ò'޲E2ºƒ†è!Ú‡ž¼ÒÎ=¾2Ŭ;j'aw –PëUD+屪$݈.“Cc¸üs !÷BAt/4ÖÅ…ÜU|÷‡lÎ0.øó‘"Íi|Ú}pîäœ@¹(YœµhŠ"û² qêîâ“í#'Áêï¡2f±y˜ÐËìû ŠÌ„ãƦr}x™lÍg²è¾ž¥r{‰ñ§Õ /òäÿEÈ‚^m;®´?ÐW¾¯¢í=[*F’1ÖCäJ\È4éMaO7'ß³[þIVsÚNyq¤Ýáoï<2ÛÒZö.=Ç yßý®×|“±&&ñÙ“PÕÍ6MòK?Sók̹îD.kiýÅÇFÓ9éàÆ€BƒÇCo_WŒ ©W^™pßÌû/hâ^¯€s¶Õ*Èé)lëk;bq›®-wþl¯[»Wl C  ¾ËáÿÏ› ÿaE˜€:¿èÜUs¢|ÔpµÅ:¸f¿Û‰”ø¢äZÈOà´’DÅSŒ²ªüIÈà‚VµRI†Xü5&AQæ⃴Øòæý;êˆyáâ`‘¸eó¢¸*Ë`ezœÔJájëõÀ[vE¬ ·ÐÖ5 ¯’×þã1ØXLÞ ½›¨ójçe" û(çð¹ôSBŽ›õ4a¬ÛúÕ줊åõ­â á{º/T¸mÁ~~Mù ½ZGæQg­Å—fsçc?_ÑNª!¡Æ*¹æ™Uö‚fñ]“¼ Ð—Ë ù#ô+vL‰Ð)g*CŸ ¯3* >¼CDÚ€pŠÖi{>`ø¶6I“€kŽÌVcɽyÊíÄœ'U–mµW™TR(pUEvÅ)­øä(¨e}û(Ä8P;öv$ëʸ=éïΜ¢Hö‘‰ÎD 4ÄP‚†ºÞü–çŸLµ(ÇHLG×aµ©±{)Ù“‘;†0Ýó¦Ogç5 {»ûÖ)£z±Lº^ŠDo>l)&°¢ðº”V±@ãôÆ¢ý««å$ýsð5"T¶(» PÂ}¥*öšDß?1}ñq¶vÙhb–ÃÒìãA3˜ XGâwu ¢l[Mpzìõcă“œ/2[f-{dÑyµÎiêØÝ~M&†Bª‘.I·ÿBg%êé0œÙÀëÕ¤¹u=ç ‘RLT(@e£Ýw*?gvÐí­I6²ZÊ"¨6lW Ù9» Ó¦Ý=iHÿ.gàÒ¡ÎÑÑ?¿[)nà2Ü•e,×0Óbû#¢©|ž‚>5T©}âÁ"p5r°Ð›RöE(2šVüôwJ³5¬ÚqÀò/du¹)ÿËtK-°h¢â¦ÝH— ¼¶`¯í.4 ¯#ö±(`µEýÌI¶øl¤ÿØç*)\t•&õäàÆ¥k‘Ù PÑTq–>s%·L[Ç6*‚µyIT­"óÈ¢"§Æ~y¦Ðœ‘/þÝÐZ³©r¸w±5úÌÖÓgðòì ÐPÛ«È3¿fîU}¡D3!ˆ\]Hß;ü ¢™ÜÛn ¸ØÇ2–‡¢yÜÕÿc¿ÀrøÙ¯ÑÁÇmsÛg®ÝEŸf¾‚‰¯ømÍT"­&rñ_¢rb¨.„ð¬g¥—íöâÅ£š+Ýâik¿ÁÙ|Ý$Ì g}ƒåšöÄÏ—Õ?5ˆÇl‚¹'Ô<ìÒȹHSŒã\K¹ˆáoþ>ƒ´#~iîÂüª²h'n¸3Or‹ñq”ÓÚÏhĽÉ”ñÈòç2šö?§‹zíqæÈô½(ÕY‰úŸ¡ˆi¼ï£ÚEpz¸ ROáÓD9¢²^2¾p—øõP\rS\CÔ òû8åaÔä»^]Ñnw\ ÕÊù—á!áéÄ ?×Zh±ŸwrWèw9Ï×Ç ½ªr£ë¤GùÝ’S¬{Ë«gEiùÐ÷¾Œ²,Ýï ˆEÖ´R®P˜š?øÑ´íP;C¿ŽÀ§Íž¢1´„hÕ=\\Ý$‹•7«ÔÍ£5Ä0ÌçLóI)ëH¯Žÿ€m‚K8“?cÀó»}´UL`›}´¹·‚Ûè<ò'ÔoÊµÍ Ôd”ÃwY¨-Å`ÈÅ ƒ(px¬ó»‰k”)þª´Ã˜g›hš#m©ØýZ·´¹³ÜÕ,õPý[ †4[ካÚ™.k4ooÌùCfÅÝÌyÅæª,±´Ê S¯°F¯ÁÊ*›Wo¶áä`ªƒ ºÙB›¡Îûï¦v‚'s–?aæoQÂÚŠØ1¨”UEîšm$`"?ß½ŽW¼øbÍ܆[kÀÁ‚s^p•nNªl¼SLÏ›w~„Ö7X]ôy/kHÒoÓ!yù-£¶mžðéÇãîÓe°¨MÅ? #Pþ¼Ñ"+ñÁó5Ïfø}×ç=¬›‹å65z÷X’ îONÉ¥hØÄ7}9´Î\ðu ½cæ—þø)‘gí2_Þ6ê)µe€%bÌB¬ :&j„tärSõ%ŒJ É_°„S{9™È×H+þó¶—ª˜GaµíNÑäjl®¢è_6bE6¶­ú·Â|ãa³LO·êÐ ¶|•&—u†Rqd ¢6»œ”Ô·#À|¢[óôŒæiƒW;A=Žãò¡žx5k‚ÏFؼ¦ò! ÄŽ’:*D.]|¢›ë”`2˜Áh¾2’{‘–Åw¬ØÁ;µ<NÎRC\ÆM—iq|õíß[BÐ_¶qENšÿûbJ¾‰Øä¢À2²ÏÎÜ 6œÎUßf­=«0pJþ KœW:sHc¬ˆs;™&ÛnDAùÐùþ-3‘ðcý«s¢ ^7Ã4@îðÒúÖ”nWÁXY™¥‘S¤¼‘>}œ1mà}¾?¶8—_@$­3š©Àô.›ê=ºÌû»¶@Ò±‘…l’hù18- Œú^¬·|©ÃAýÇ.ÈöÄ'¨çQØ£”6\LQö÷iÕ¬z­ Ef:?AúßN¾”œA÷õ0î¨ë“r”VꉪÁBß#‰êxéM±TÕ\|òF¥Õ5ìÈÏ&£+ ÷”Y)}X–Œ9¸7¾H ¶xºcçíŒ)·; .Ò씿yFhŠÃƒCk¬©N"gC:j¢žSC²Ã>•˜½›y:®$ƉsZ¶”§=±¤ñ?%à%òÍJd&¡^”!ÅÀ­OâT¨«¶ßuK@,GÞ5L-¤ *׺!ÃrcŒ¡x]ž€q\º(tS1iXï û0S솾o‹jñ†V+-­wÜq^ºˆv‹y¾ÖÔ€¶“Ò"F ‰ËI¡ÜÒ°ÝÿåÆé÷7©C÷ºÞ¸qÀæ"MÝA)_ô ÂU=¯ö—éׇfTÙ̳§c$b¸Î7§5è”8'‹Ä>œ’„SÚU•§pŸ^µôçÔ h6. Í%W(Üf6M z|ZIÑÁÑ#_y¦°„'!ÚÈaN8A{<AŒ;p…j[ âY=Ó¾ÊOñ1²…ñK+ù¸S=4|)Ú^t|/VIë:‚ yÕ‹äzÂÒü[˨££Ú@|že&«u‰\9ÒQv ‘©|‘ŽK×÷äB9bÈ8 h<Õ”ºgWo™jö$gkÍËþot;ÑIØKP%ŠÜ~•þàÅÍž¸È[h8ªrƒ×Ͳf¿îã¢!û„B*þc‰¼—Þ"TÐÕé«dʼn¶XŒ9·:…𮹠9Vmï¥9† ˆK8PZ5^] 4áŒ÷“Ÿ$§=¼âML¥éj«á¥&Êtþèž¶ŽÅßúÖ@µJѧÍUüt3ÂîØ:˜í¾ÂÖÚZã`¤J V Q5äÍß3h:߈4hy€Ré¡¿ ôx¶KÒPB@ÜõW—©Y˜”Ív ²[¢ÝXßkU¥ýŠØ\+‰Ê0Ëcÿ§¡¼ŸF`~¥[8úÒ'°Ôt@ÌBa5²Ž¨ÞVDÑ¡èÌÖd™Ý¶W2ÏÅ·`T[)pÏ`tìF:epIÁøŽ0Êù*ÁSè|)Ñ`°=ÁO¤;Ä(±>e9Êp#›kïaØÉTZ1ˆ2{ú23ÿ[oxÅÊòo:ÞÌäøæ —df„?×éÆÜCÁ‹ÃJïåóÏêžQÉ•ŽiÝÁL(²7²\šÊò#€ÿ €zz£†Ä%ÕPìü$þ ¸ý®Àá%‚× Š³…è;g2lOJ(á¨uÃ’ˆ-Ð.‘ÂÑhñ¹Y|Æ­Û}>Ôªá ÙqÐ?½Þ? `DärFârç;w.!{:°1ò¥œ`FÐr|è?Ï VŽ,Z›câ”ã,‘x ÜÅ›¦®3ÉQÜQÍìÆ³8çÆr/~ÿÅt¡ ”äÎaš,ã·,õ|ÔJÅÞËn-ÏCpV ¯„‹'.Ó×ÔûÆÛ„K¸ c0ÉCºïÒAÉiò!£–­MZ¸¦3s‰<âH7@H¬MKô\ÿªoY‚DÓœ†+‘X!¹Q¤WcÜ©4>•éªÕ•NxBŒþŸm`aÂÙÆþFnë%ÛÀ8Ö>/ØA…'¢ŠHqÇŸG4pÜÑÅmˆPûé&¾Èªà16¯HÎî;ãm0^ÿFhlÓq6ò0Ô?~†Rö¬ÄÀwÚ»¬Q²À‚ÖJžªaÕ×à!¾nÌiAÞÂî¢þ  Rœœ'YGÇWš, ƒÀPâ«&@Yʼn=Ug:ë :mÁ*|blñ¶G)×<‰jû ‰MÂ;ŽëÍ¿ëµ ~À7h”1SNÚñ)$áöAæ+IóÌ'Êîÿ^9+·¤JAâ§x°î¶²ÌOæŒï†§›CÑerb_ŒbB… ¤íTqø0RZ½׆P8›õUÊ»vF°WË\zºç]þé*1Ÿô–Õ·ñ„CkÔË£ÑXyÕÔ2¤íXZg:A†Z¬TP›íÖëÁСй;vßCÞë.†a†I†1«ÆÄÂd‡Âfå>’ñmºàÜ\è°&b%?ï9¢|”/ëObsÂÌW” ‘;5Æ8œà dQØŠƒDÉ£DgŸ¾-x0ÊJ¤s\¹2UÒ ‘¬¶¯yI¿ _ÿÿ!—Tôv¨Á¢­·_th˜cìÐf±ÞE¸6gˆ6“ëùÐå÷Ne+Ž@N R>nèJ6;=_4ÎNH6C…R…¯± €"Ö Έ¢+I7€pÓ#‡ÀßI1âÞQÿÊÇÛŸŸµÞå0º:rŒïÒ9F¶ï‰ÃCŽÿ;eEKhäëãÁ‚oÐ1ÁøÐµL¸ñ@ÿG¥ùNÝݯe2Ò¡ªñMFôÄâE7ŠeðSý…#ƒ³}ãm€]“Ø>ÑŸ ‹5—ßÅ‘…‡j ôqôätØûÁýÈßÙ¥‰ÚðIî #¾Æ“ú™’2‚(˜R%XâJ?µÎ÷ÝG”8 ¸¸ E>r@Ñ™â(êjnL×8p÷>6IÊÍp œÌq²?N™¿»°ü‘èy)‘[SÏy%‡†E•;Ä) ᜄóEDM½ tùÌ•zgÿ@£ï¹tÏpó^}¸ eHJ ‡˜3Gñ-ö3²L­;ûo³Dc?eÍ8€›Úgã)äþ·8³bù}Â5Åâƒ32»1ÒîñÆ[&J¤þ#ºAœM!È¿ápÆ>LIoˆŽÔÀ7¶B,üçn}>¹‚— —á1§WÁ îÄýnVyŸ­±cH‰"çŒßª$ÏGÝwF¡ëDþñU¹óîÂLC £'ô) ô&‚Õ°gs>bhƒ Ö1{×Y ùdlGÀ"cÛAa¾âT<ã;“gj“–쥹ƒç5÷âã lÿk¿dÀœê—õõvËÕ™ðnúÙ .Ún͇GÀ·›gžnÔë+>úþÆ‚#Ò>v9ö ïäë­Ë²[ëOæPŸ¶áèЯâGA)}8­@•:!=(ÑÇÀÎí²ùjD…ãS«ÇItûP¤«ú=^sþe¨ÛÀÊ| ¿¡ìò^‹9€)ÉfÕ¾WÕVôÂmÆöüÈ%ÉïÎÑ>1ñÒ—? J¸Þ×ãÎî°#müI<ÎçEƒÚÁL]¤§ìÄRqÁÕ•Æ0^2äC•}VÞ•|‹|l¯"³-A*Ä\WyRií]§ˆßÈ•n!M&sKÉZ!iXQ…€ï¢ôÛðƒöãëÇÿ£!í&žýÝtÝÅã¾&5S_Ljl¶ÞEkò-}@`žzWYÞØ¢Õ½^Õ$©é¹âŒ¢©Ìoj½ ¹<Þgý¡0×ì;Å´Þ;š|>㉭k[’ä]gÌÛ¬œ§ÿëE˜¡X¹, }€™Ø½@c ½Æá_ëK&SŸ™ŸúbŽ#ÆøÒ­/VLV…Ë)6v*ZdRβ`+Þ‰£H¯›‰‘ ±l„>6&æà)“–EŽác§i8W§©~‚걘öÒ_#p8Ùׂ™¹çø)éÐ~74”RK2Íô÷àWÛsì¥yo¯ˆlæ¯Y,æÇ_g-ŽÆAˆ’Cc:œ>ÎìQ‰ŒñÛ–Oå PUü´Æƒ<£ N#ÖLS‚ëüÉý¶†ýc€„ô¥å‹S ½E×.ÐÈ@Em0ò#(wðŠòVÖœËí§G«"ÞÁ§ë94d„yýÊì@õ„düH¥[¹óVÒe’BY4$3gí(œ•†[}Ю¸Ò|ïvZºÝך觢¼DcÚ…Àà߇ÇJò%ÀQNâ†Ùð?-—‡'+YF›BÊΙˆç Se=Qcë›0Û¿tç˜p»:qù3˜ Zù¡Ûš¸ÔÈ'{>žŽýt0Ñc‘™Ð wßLLÝÈ€ þ±…1ÇM¢ #K¿€ë •l]kÛeô/ßc Þ” ;Ïs ‰œ­ß>7öpÒ¤«êWª2 DqBµ4;6Ý—ãÇd öC§ä¦LA±¸¿Ç‡[öY½ä¹_‚h›D?ÏpËV'ÿcJ$Ž<°±‰¸ÇßCUœÝ’ÛL¥Œä…Ú……y‡A‚vS‡áÄY=%¯ 15ŒOÀ§áû.gÕŸî²Ônª>€ñƒF‡ˆqyFf¬i)ó‡·ÚÏš¯<:™ÉL‹ŠœÜ€ƒEVr¢aá›Þ¢BÐ{Ë>*q{¢Afi$6HټŊPÊH‘ 3<ÃgØÞ.>Õ;…& tOì…A›yfz‹bbœœ}  ¡²ð„ Y3ªSyèrzÞÜMõÖ%¾×Ë :⡼ý,ÊB›cz¥Áöµ Å™ÍnóÉíÀœŠ»aÜÎð¼ÌÀ&èvúéߣ=\¹—2¥È’îm5‡ò·“Ýa1„ = |Òs' UDI%u„òWO¾c7è™Æ~@µ–žÁ]›Ó8FeT‡·B˜HÓìƒ8û`,¯äùJò–†å¢‰ä(«Ìx&$ ¾Ü[dÜWtøiF┇UO]j¼†«°Ž¸tÓÿ©Ó¨ò$%t.¦ž?F¯óNÿÔ Í´„Æô„oäð„[:c£ k3­k žù œ9^«DAûDåšÞ!ýLRC-¼8¶'vR¤Éõ¡ÿC$gRœàdt!³—¢Ž\s˜ˆ·OÄ__‹ö[•~ =*5‡¯áô쵾͙r@úd.ïÃrs›xÂÒûÀ#?|ºø »,^ôÝm¤yôÛŽÝ[­*eH2xzøqUШCæ!ÿÊÑäø÷b¾6ŒB›¦3uݺ…$3| Œ ІT8LØÍûYãcÞÀ@p¯Œ³½9wÞ-„c®dsc§·ß1¹žó€–»œ2Џ ÝG´¯\C˜a=\`áä÷/jú ¡_$#{‡hI¹N¨XŒ ŽíÑ Š/ÿ!]d“GýÅj]B6À¯hyð ŽNêL¦åå7 a™îG2 ˜ôP×Tš¼x@idš¬á\ëgÚ¼ô>ìjþàd>…7a꣎â†Êã3bÃä÷¯~´Û„Ë44nÓ[/Ɔ •"~#Äî4¹U$—‡x§’PéL9p‘bTúªóKŒyï¼ÎOï°¦áHÄ•kH2_zõ—« Õ¦=?{wÌ·òlû+3„Nª°|dQSËË)ä ;Qaú}ã<¡²ÉØA*L¢¡1Œ,©ÏÚ¼Qf-UØ¡+wlÜ©küô?MâŸhÁî$‹®«‘$ô[¬Mf¦\¾F)°´,þIŽj…V1E´LÞ„äe+1»qÅÍW–å`GZÉzáYêcŒµX bŒ™˜HÄÆˆQåü‘kˆ£Q1pgÕw€È;¥·eæmìèê9)ŠÃ±u!ƒ¨˜PæMÔ£Ê{’è¡•ë‹[ŸÊÏöEª MS:Œ:ó{/“%´Òÿh £3@"òÍåhG™QÉ_ÞÚå”P£³!øpõ#xÚ5kÂ|IqŒ Xƒ >(§}›&=¯\]¸Eþ¦¾¼‚5ÔÅh´…ä)NkmÇÂàË»•iÂ…üÖÁ€ƒ9NÄê¢;8ü`²´>€W¢\±ä‰äøŒt+±"lòcAAÍ"æ¯&ÿÝà& MÑÕ޴Ưu`Á(uO¬ë­«½3!IåгŒ’ž‹ˆ’ëíFðßÑ£ðÿ¶ sZÀ}’þ?JjU´ØõŠ<.NQIuÛ bT±Z’oCCF·IaNUÝQ·DÄž'Ç$Ï0Äõ¦‚î¥SZ›Šª,\苟r™sÓyœqQÄç®U• ƒuAd~]˜³Ñá&w¨ÇWô¾…{t+Qão[H€Êê(žŠaNP«}¸WŸ)øU.ÞÕŒøm§}'6º’ÌH›¾-Éoï}ù ²†¡M=ä­zCz'’À§²[¡Ò™’:W*/ï„Y¯"„æ¥KŠRxRÆ`döW5Gƒ‚T5—Ò7Ü׊ú ¥M’›Yî¥è 9«S)±VéÈ‹$bplO >H_ŠùZäY4‘·[a3´Úb#fÃE+­þ—.GÚ}¸íÌÃÍ_ÅSÞÿêuŸZ—×?øÉrP‹…k=L¯äl‹×ö5D$Õã‰á÷ á/TúXR²øïÈí%iá{ZÒ†ÉÅ1OÉ4GC¤ØÒß~HþhÙsqDËψçbÎ øQ¤ÎJaò;§‹w®-$Oê\Ún"ŸÑs(‰â1T%n¹ŒÆ†«)!ýµÙ*˜¾‚G/r‰ô(ñ}KäÚçVÖ>3âr‹¥u>º:4=ŠŒ“ý¿ßíjp©Á^ MÖÅ_ë%H00+|ò&v-’ÒO‚ŠœX+¨Õ] ¾Æãƒ«¥=Ê0‚ÜdRZย )g“Ì$ÍÙ)'Úçÿ x …P§»Åû¬yÀÛæ¥ J6eÚПq4ù‰Ü“³Œî=x–Š¢ûôcªLûóM±ð@9^›LÉŽ¼§Ù:k²1~)jD¶yžip´+tƒR¸*½«³§!8¾.qJ —E8í¦#N k:Hsž†|r8qö\@ÊÆÅ´²¾2%Ó„¡1üÏTÚ2ÆŸ–ö7Èž Ì ˜¿z¡Ú pydé“X®;¤PÒRå[R°mºÆtS–Ÿ©Íö_Ly­»i¥gOùd€¤ÃY3'nEÖ÷ŽÁ…{ôÒº¥ê¨ZÙý8³1yoÒdÑëþ$"ƒ¢@.6Éå—@HÃËü0}lw¼>Z¸?~·ö’8E6—Ï×Z´‚beûhg¨£âv¶ ̢دi4ƒeQ['´‚˜€Ž ×»ó†ôÿ'ïƒâªå³Q¸Ôù3MÂØ½fÓ즸¬˜Ô.è}k#;ä° Ï‡ƒX$]¿„XœL,å/Ø3­-f®† ÞÌqŠï9Ýþo­j~Cþ‚¿ßµÑn2æ0!д0çüNåÊSdpz{,.p6³dRÒ˜>Ê.?çy6—#vÔ¦3ó7y޶°`¯…®Åá÷SDzùz‡¸¯^%}ƒïëio×‡Ì \“·Íhœms½Ùè\²¿câ䯌õ|gÆ|’0ã7?:¹l+Š9HÞ%NßÝ{—š̓Y½3 áѼ¯cgûhï ÎÀ¶ËO 4ÙÒ:çuŒú©‡¤Ï6‘—ꪵYUÕK–xšB'=¾@WâŸP'-šÊ²í—›>Þ&©t„ýzEiMAêáú¸ }x-áqYdKŪáê„V®ÅvýŽÄ~ÕIg”‡\@¾Ìý)@Äú …’ò+ÕÃUD¯PGÂ2„ç³.¡M ž©+ÀðÄQ=æ9§Ò}–g ÷@<†&U †©£›®ª\iw©šÛâ<" Ë® ’¥xZ]7¯ì.Ú%ÞcB(G¸_í°bå' ƒ³ûõa,ÏÙÛDê)¹Ì„‚#wJ9ˆÕ|o/ð˜‹wÿ,µ}Pñˆ,B¡™2ɾÛ1•Èœã¸~˜r´RÝW·éÁŠ¥Ïñ®µãB@ém.Ñ)nuBÁÅ~5E:O(±»Zh%fÀÄU¬›·`j\èòjHdi^ìÛüן—3ûþx“XD¾ÕØ×ZžÛ3©JGwÆÑtU:(ØssõË8‡~qaÅm㯥bDVï·Ã ÍAÁ<8Uµ»WøPßp|rQd`ˆ²¸ÆV¸ü©jŸóz–*˜Ä„­~›ŽŽÛJéaŠú8$qÀ—å o»ÌÃþÁ±ÁÖýCs™áŸÙ4ižÖš¦­çžFaùû<¬`³î Z¨ÿ¾kÿÎ!Ía{} ZŽ ¨vœø„žß0ˆôa°Ž÷b^Èl„‰+#9 ž#-Àð‚7š÷I\àT êB)²§«Z²`¸ÖK"€fýÞBÝ^V‚‚(tˆ2%Šä ˆTÖDOEe|îKrqz?¤pøŽÁâéÊb PìæVCš²š†:°Ü 5WáíÍ뀪2‚§dû¿à®ÏÎ[-ßRÃäž|Ée­8‡+jj±ç»äºP5jõÙ»õÒ¹ÿ]¿D×z¼Âªƒwìh3wPANý«_8]¹u̪èÉ9(YÉÉWÄq§a÷]t0¯[ìOY^©$Zñw› ýðp¹²¥½¬ÑÅu÷Fç( €'B=`ú4£0²›ê-Ç%*Äz%²Ð¨¥ [Gò†ƒ¿1P°ÒL›`áï*ËѪ9Œ‚ïq$uXs§}3ÙŽF˜ oé}¬T½%²G({‘«t°õ-³göÝq‘ý$®×xÔº+8oçøz¾H»í"â{oZù5ó‡Î½0!$#[Æ‚«a“Hòw ÃbóÔZ—Æ'’C®ÄDþ] im‰üWÄOY8ºXñèEÂ’\öÁO}¢¨°}n-ü—,2I_mè)?SÝ ;œîõÕÕ÷ ŒT,Zš¢dˆ#U¸ghýyŽ|:øòß(¢‘kt»òàÔ„¼¬¼ó ¢yסzå(GÔ°@Õ®<7ÐíŒÙÌT#Oz¼¼íyèž5{ãé=ö·+{xǾ'­žÑ øƒt·žŠ. (RJx?OXð\ëbÛ†ï¦8ÄôÈÏ+×¢è‚hv„_[ZQì*ÊŠùN ›bÒÐA¹u‘÷Ì?ín ÌÙ>n'þX Œúÿl?* &¼IêIXrnôÑðK/.Ö¼€³³Ž‰×¤õ«ð®/Sˆ!°/2è»NÉïIÒ +ÿ`B~ bÄ/,F»Ã¤|rç¯ÍkX%Rf-'ß/‚#ä³—É¥—ú%D/¥"ÄxœÔK@zx¸Î÷&½&·—ù49ÌCöšÌÝ ívÊUÜÙ%@‘ˆ±]Q=·D¢BøÎø%N´‚ ¨š²Ä¥D.M v›¹ÞFzàÙßa.m!üÍoFHÊÂm2áX*qvK8_3³ÅüMWÅ|/G Ðê‡Ò©„÷ÓôéÄ&üJFåƒö"Áxl{ö5æ¶Ž/Ù·¸mî¦Ä’«µÎ:õ'U;ƒóÝf[sî2=)¯;‚$„ÚdÅù*½íï;€ɽÕ­ìrsÀa×g²àÿB¹ñõøO‹ž¢M¾#ÚwWGR«neÚ—4«:Ät‡¸ûžac‰š,ƒtsf»‰âö= †¤ÆóËÙ|ØùêÛi1?ßÙWƒß¤аµˆ#ÎŒ‡ÊŽ0€@|¦6ÁEnàŠ¯6©íä…¸|T·ËSJŠŠH[Ç#ûõljB´EIp#ÈZ2;¨ „ÕZ/çn¬[Ô¸HO7âC¢ÝãDjD¯iÍYù³‡ m²²ç ½P ‘ËH6Ÿ„(ÇÕoñPà ºÔšg ޝ±fØ6•ÌW®ÄéDï!äØïï§A×Gúee:œp¢˜-–,TY=Ã«à  ­ÿ¾Åá¥2zÅ&³wj‰·€I}t6?á”3—#{‹ï ÝpxYN43@´ÒhOsDœñ Š¢nŸ™¶ÿ/¯û6€9¾®ÙŽ·OÙMx#Íà}JyewÚE×c[@wwjš<Äh:^—zúŸ–H„é\_¶éç¨æóóR© ý†ˆÛp>)­æð'æ’9Ö 6*á²$hèQ>ü÷µoñøWj¶Ç² ¡W¤¬Ô/˜`ãj¾%×Bö‰Ø0˜Ï!ñÞ¤ ü‡¢àCW§q“_Eà¶b'ê¼=e°-F¼œXZÉTÅÿF­‰BûðSÓŠhÕ÷š‡f)„JpûÏŒë$Ù·Ou_ëMA6OÙÀðÚM"%BakMø– ð@‚×µ*D¹º®l1‰Àð¡3½ÑýùÁϘBLb×FÉó{Ûu¼W>ÄLÙˆ"0ö=S¡²ÇüP?Ü0FS›’x—Žºj’D²¬=šÿüIQÒÜBðñÕœu«¶¨EùÙïÉÇåb²*#å:z™­OºâßÇ xe§¨Jï%'×KLMª]»îѲ2CéÅ®*ºä´Ú¸w|[¯ƒ1[‹è23";¤Zœ8 œ-?66]p‰ÆÎw ñî¶öoÄ‚u †4F‹ÊsTZ‹.þÓ­‡¸ïáit/wúi -°P ý„È‹q_ŸŒs%¾]òcëD>Š9ãP!ˆ /1ßµ?š+Ø·àÝ%‹:‰$¯ÜMÙJe1÷Ô· ºy¾†ßàf–L£Ýej²’—ù¡ —ùë²*2×}âzfüeð`}*^x ƒÆ'<žOï)X¯ÉÆû}Ju¤Áêjë9_VíXÞÑßK´<çÿlvÕŸsh&Tm‡©0»bÚÛúh2 %̦^¾¢¶§ó’¢wUÏŒ¶³¼¦×Ìè%*JAð*#Yo®"’‘Ìb/ÐñqÝ RŠ/ðâ¨,$ "RD#B#WSŠ‚cNÈ3Ö1YøìÔ6BÐ8¬;1âý'›”¤vÁÙÇÕ½„íãºÌ”?`’AOýº‚C¨ÛÄ0sÅÞ£÷O·±,P²¨sýüS8R3­Š›nÓòƒåPkÕe³(º ï3uè,ó>hýÇD¿ö­pÆ™X k†Ñ£³~_µ}ÂŒöKƒ3ª'è1DÍNáäÆ¿EÉ$Lò¹‚0’2¨,ëÔá™;8sAQæâÎZuíAOâŽáË L(èczû±÷öþˆ¶I?vò›üÛW›¶=fÙù!'²¤Ã’gýwn‡T@éÖfö0öÀ9Vº“ãv`yØ€¿Öd§_. s+º&ªÃÌtí˜÷êöIR|?«öm¦úºO£³C”À‰ÌY»¬.±:ªaŒr’¾©k/¡rNà ‡\¦ø\*ƒ(׸½Jzöøµ¿Træ0C‹-B‹°-!’§û»»Ãñª•]zfÚïBcöuY+n[óë&(\¢/ûÛ9â½ÝpòÓÖÐà‰ýðÂ¥V dh¼ÓGøly.F‹Ê­ÿfH__¦Zõ‚pËw)T@zq5®J7 `í‘@wO€Ls9¾öĻۃ]Z/z­¤»Ücë)´Y²¢ufæw%¬†Ðš3ef¼ã”¤x#DÃ!Ü .ú’fÚg «LB›e¯}œ{«àðn‘˜=³[;É(QÄ–×>V˜n ~‡i9\,TwimØ¿&Úi¦– ×§ªRXwëºpñ1>1Ÿ•(kXÖ‚Ä7R÷ÈkjX™å, €çŽ1@v ˜VÕñ)™N¬Ê"ÅJî¢f„¸üJ,õÅDȀϰÒ!ÿÊYb5J^ðm’ÁýÑç¿ê!Z8l¿t96^Ž[ ZÎ6Ïï•™Ÿ'<5v^"Oa™VhŽ•\ƒÃvLS&ý¬ÈñÐW•_Ž^ÁhPüFܯ×ð•ý~JŸVïÅ“‹„ßPF쮕Ãä?¯¾3ò!+›G¾°ß†èom+ôéÜ{0פ?ëL‡nnÝg!=ÝHµe+ÊÂa¥ prè”A¿q¢Ó3ˆ>×1é®Ð-Dkü:X‘sÕ=F zÙñןÝ'À…^!þÜC}€DÆ…ºÊ_ÂnS>§†½r2’"ï|«íâäa¶s'®Þp^à ˆçÌ•‹h¡•äÒ×½XiË-Èêv#Ò539 q|k}ý°O°À8°µ¿Çð&¶âE†V^ÖÓ±ð„ˆÉ(Ò8È;×ß9URó± n¶ð¼6u?[ÄyjÌåE¦ ù¾þ}ríJO_õaõø°žtøÅ‹Û®‰ ÙÂuJª½Â+6a…®Uv9žÚçlâ"æUýìc£õ3ì·¦†ùE3ÀIã+êM·bÓ ÁQæsŸ÷x–œ`=˜®©CÀÝÇÀX÷³ ©G¤~™%X·üèéKª]ØÓÃØ„´×\R=ˆ£3B„v·ë iÁ£hžšÙâtZÁ˃¿Pœ^¹^Z1Ê ”$z"2ÃiïñV²¬K ’¤8ËZšÏ¾JêþaxRÓZCnèó|Þì´×Fç— tÅ—Ü,\ã¸ÈÉ ‘0ÌŸ¤ͨ†¸ñ½»6Ãk—>Ow1÷Dl?Ú‘K:’wŸÊ¾’¯-Ò>7i3|E9°:íÉýëÑTéOþãºjùÌåÈ<'±+2³'ˆJ¢ S¹°B3ãÓ¥íøpwlÓÞ:%yªÆÎ”å«[çõž½fëîŽé(´ó¬SÔ‰Êw7?bØ«x¨†bÀW¯{fnäÙ4­¿¬úë–… ”hGUy‡ÆB•%ô_p;]F¬Úæ×¥ì4R­ oÇ>ÆÄ_7}Á¨‹1©G@1Ñ™{à‹þ¢ƒkÚXö“1s<¹ͼÆ&òM¿Óqa_„.f)ý‘dôŽãOö1«Éû؆z­hyàÌÚ6Ïeúøá<}½øq?ÀËt!ô͘¨õÑZ®iὸKÊÄ ×S_ɶ¢øÁ,B5m” >îh›<Ÿ×? ޤz3[ôÖ#t&%z˜«ò×ÐÉè=×p0r=Ûrñi÷¶Ä  Ü\.O¢ œ[ahÕûè>ˆ d·ÆfèŽ 9P°NÎbËßÃ➪™3˜Ø¶\-Ç[·CØ=üîžbÑÞcäá.2Ôÿ½[£ß32 qŽÅQø¤ƒWúwm{r_éM”+Àeêš¾·Ø”NåÑð±"HÅë’5ž-Øö–o ê¢X‰ñ¡¹Œø|™ÄÃÄL²Å°BÕÖ9'¹âéç&f‡Îñ™²“XØM¥üжôʾ¾ã‚Ëddw¿ ‹c ÏS ¨ëÙ·h\dK…DØ,aàXPòïŸìµ,¢#b„IÍB¢„zs¬7"`ý½–øüÈãùÒ˜ÅhÔ‡žŒ­®Mòs#r¼»ÃþìÿK³Y.¢Êm½ Ð:kni‰_÷s(‹Î-÷½OË4½Kä½¢2@gtÞõoÜlt7eF€?(lç§~e8šãyÔª’ÂP0sÆKFBOoè̉äWb(ËMdšžª‚…Tî.ÉÄV~‡Ø2ÃväTu <ìp%aòæíý•Éåìpî~á‹$íl$•j‡ÏQPD·'ö)ÓQø-ä¦gév¡›M‘u‚”"MèÝf˜TÍ\æÉ@a‘RHw£ò¬nrS|þNçsëÂõ†(áRQ+¾‹%ûÇf&i¿'R¼®._É‘Wd±_wºöõ䂽¡ô} õ×å[1”D‘žq`,¹ ^€C+ä§ï›QÍÅóZºÊîP ;'Õ {Qd`¬vºza-ŒH‘òDâW@èÇv¯DœO¢A*}Eªþºì^˜e¼#‹ñ{H6 Cš*Ć™1^Ëv¤K²ŽNh/™vÊVNûL_K1š¯Ô›Å+PQÈŒ'ÄÊvCŠŸd¨­oÛ¹õµÃðËÇÚuã.±~Âþ¡[¶aG/4fö¼c\IsþúŒã n‘Ï9ÔÄË$BS•yyÌ `vÕæáI”‹ñòmò ¶èüæ†W¹ºäR߉Pm¹'îçW2ì 8„ú1µñÌá4P´lqÈUOaRuõÝE‘Cßî â4~{½×Óv<Èû}0cz8`:vï%«9{ýXeÖY³”òÒ?Ö/%%Œo“]@$m$"ªC.¨að-°¤*„ñþÄk ÆßVZc:ãt;ožÌ°‚òVê›0c ‹òË>Þ{¢¾ñi»Ã¾ÅØ\‚Z$‡Òõû …¸/$7}Žœ©Ú ‰r¬cOÔ‰¿’ J>¾/C<æƒlÞñÔÞu¦ŠUÙ(ž×PôÖgÆðÃj»…bs÷E#F|Ÿ>ɶ·Œ¹X™Q`‰a#ü#Œ~ç¿ÌP®WToÁÕ!φbbø‹½ŒØXr}Éຠ<}Êе'Ÿ^~=Xq ãÆö†`\VF³‡½k¯aEoÐù¾è'OÜ{%Í á…óxQÿòÓ±#ZÄøž/fU$(S Ýêœñú ³€†Nñ9í²˜ï-ïb% öC×÷X—…a”sz˜¶µÚ¸‚>k¼‰˜•å5j¹™.“»Ûyš…Àìw\u÷ƹW‰SÓÙì®^º-:&)œ©úí«!´¹äÐ 46< Zm‰Ÿž²#ó¿í|N4=õÝè7 bW—I*ô¡Z' Þu M‰ƒ$­,^³[í‚Ü‘›'œß3"µP̸a 31H£ïG*ò69¶1ÅÒxÌ$¬ûlj$°IûôD•FNî×ÅÈqø“Ç]¹9|ƒFË–µ¦Ñæ–ÞËåJžýô§³»U›]Î 5aÍ%TâÑuójZkå9!Ah‘n”P4b„ »å¹W/òαù•ªû¶Q4A¦Hí1Gx?_­K¶öD°Â0¹‡Œb©¤vÄÙ'w[iýK v?¸ƒ‡ ÛæÖV&’¿t÷¶ÃZmLù1KþùpÅå%¤$ÞÐ]„Ö ­ Ñ,V¹äÎŽIïó” 2,±šÖµI¥‘ çä0¬EŸ÷r]Ž4Qb,¯(ùÌ Ï)ÊJ‘qÎ; ÄšCÖ³{ÓÏr+µ?·ÐË7¸Ð*44 ¨Ö€#|_kP¼£g•Á¸ú0ó&:шÁÜ!l‚—–ý@‘i(´—VKÈQ+ôJKé~áÚãI{°†.¯˜·9þÿØÆgäèZ P³!eÔj›vÞdÁçSñþEËÖ Ñéöàæ:³ÏƒŽ¥d摊L©O†–åó‡Lµ,´"Aª¥ÛZÆô8~Xÿ¼±1µ‹÷ …½F¶šOÏ"™Š!¾¸ kàãfR´˜u ÁˆÇß\E+ ~•[eˆS™x°Qy2ò»1™ 6‰wcJ@Ñá>Ù÷'ÁͺÂù=O`¶)ƒÇͧ½îÖ¤*N.¬kÓh„¼–&;7` zG²$4¿ØfTÿ]Þ4<_ÄÜûº³%_óþâuØÁNi4Ü@c²fï/E)ûu* :J[ÀÀp=µlÍ”÷KA,@´öPÚr”Í{¬b÷ë§$ç0g×Ós˜GßWý‡©mgzw¤"–¶›‰ÜrÉU;¢0ÉÑä‹&—ÆV¬ Û™ôýkï`D1Á]”s}ÛŽ7 †o]ƒ÷šD‰` j€æ“2½à‡4†DßOñ?2i{1æKwñŒj ´’Ì>ÿ‘³" ¶RpŒkzøÍ£—ÆW²†(¿ºÝd2£Ï^Z– ¨öªÒ¨±°käï†S‡‘Ú„zªØ•cÆMÄ%*WÅŸ»øÜ¥g¤´³‡–I$‹s@p[W‚uϬ;ÛpÞöá¨AYjìc“•÷ÎFðcöÚ²Sî™­5< tb¬öý§"¿*N”° V !†?«Ý{@YíUQ‹ 龸$  œxY$N;%Í+™Vvn ^õ=昞XÐq9“£Û‚ú'»zÄl¥±¦FG&¢âδDT(=g¼}/8hr¦¨j5‰‹y«iE…Ã?ó—#R¯TºùU %Y2a£¹<ßUÉlÄewÂTŸ8pËËd }®Èò§{aà"£§ó£o噋'%0/“?¿HËóÒ~ͽêQf„pÙëú’Ë#Ä=Ocyl¹ÚÕ¯ŸCô&4BKá⥸'$ähˈZZ™.àØK;È!®³!ò›«\ph-•šåðà™õ×ë÷îž2À´tÙáDL‹¹‚ÕÑœFóHÜÉÚ‘¡q~G#&nT‡?©@ÙsËu Òý0è`Ú•‰YÍÁ6›iú¸U¯4#ib*\Êè ý¹b-ìÞV!CæO;bÕÖ  A€l~‰V½Aä­ä¦™\6©›Ê%Ûœº—ZEȺD.¤ÝÞ›`5‘ÄWeáPü´„¼ï{ŒmjØBÅ5‘8 ÅH™y¡?1(Ñ.Ûôú¢tÅßäªÊMNC§ÐJÀû´`¤€.K]p›55x’hZ'Êhø`÷ú0Ì ¯¢]<”èo°žwtªÚ{‘Ÿ,'¿ÅG³¬O÷§-‘F9“÷²ƒá/cÍHô—3g´þ"Ô„&½N„jc=Z„ Mu)å'9>³\_p'k0Æ2ÞX=¤¡ç)Oã5=ì†û9\#5Åžz‡ý«òD®¨ìé11ñéiöë†|X³«ªcí¢yÌa‘=ä§é䛯£Q<]€îæm²LøÀz [ÛɼÜùËÔÂ0¦œ²[à„?Œ63XL¯½Œ*þ˜ÉDöi/$“¸/ éÌG„#ªxÛ-Ù2%[ánÉŸfÖUÊk·‰>mí¥´ÉòÖÃ}œÑ`p'k¦mÅɶîÖ6¬ÌuF\H® ¤‚Ne=ˆBÓÁï{ðÚNÏgÐá²G Úí´[`¸ø3>»ŒñÄCÓÆtÎÔ(fМªòùtøH+jë*ÒãFÞïéá ±ó3gu!¬ý;'§ôp~@lÌ è@œDÏr&¬ÇÒ^—¶]jÅ[†fZ½]<íKu@lR+å;På÷æÚ:ŒB•BuišbH’P­æúÏ÷ ç¹kÕE)³Cœ˜äxØ*-%+Ó-HR…çµ5ROqΉ¬&Š|i£=ØÁ•„xç‚vÅ9‰4™,(ø¤m§5a»f®«›®‘ûÀl`$À‡™&ˆãÝ{éábbg¯ôh©t)ÅãûW'¡ö½Zå°]fßTm[Ð)O¹6ˆs3§#@d.)Ž)°^ÌtƒQj –[ÄamèûŽHçš¶|5™Y:V‡‘·â qÅ;ãD|Lô²?j|Ý#\©ÍJKy•Ä6“ÍyÐq€¶ó ȆëÓÄz·KIô£<õ‹‰ºX²‹RiMrEÔÔ¡z `[Xc@[FsuÌ€è[ÔEö~Ú*—„º:S6ÂDíÞ„€ÏŠT}ö5`DæPÞnÇÖÓóR·ìø¼s¨s —‰:- ª‰Né°¼š,éìoI6{ÉXäp+šÝnkýׯ¼ [ÑòÎt”XUµÃþ¯û‚÷w6ɳ\B+){´­\Q±0söýJ mA.ÕBi":/C½°paqˆ€bF:_Ø‚Á(ƒƒð‚fhŠþãŸìYï=¦¢]ágŽÂý$ù9étkप/ê;92Á-ñ ¡‘ãS)‰FÖ‘Ô—|8 ÑÕ¶æ£vµ:PUÊBXÚt†ÅÀx++{~ê)&7CO/ŸÎ˜(²hÑñÈZÒ[Ò;ôÇè[³(ò¿`„òÊ@qK 2‚K‘7˜áÿ^,1CÑŒà†¡[¼AJ»ýl‘¯æ´´u¾‹=£üfÜûÕq!+2Fr‚*§¹ž,¦’ÿW«&.ï_õì6Ö¡¯–Èlë<eÈÜðôƒGƒÚ›b,7ßõÝuÌ™#À(P]§\`A‘?Ñ£`PY…úQ[;é…ûé,aÏK'È ˆ‹zŘáÁ¬ºº$$Zs°¤u™+è P§È‡ MÚÒéžTÒË:dñÍmÝcmÚ4s|Ýy¥zZŒO¾E¬=Æfž•ytM‘¨œt¡Ôè¢øè›âÑüÓ:¯ŒÄ%Ù-\&sý»ÅvONè±ÄCÅ @Qó !¶ŠÏJ†f©~—}gÅ*›ÂR" ìPüM½ qFœM™,Û*Ó·,àÿÖ–;‡lÑ7—~ŒRoâaJ¼ÄðŠ¢NnX´mÃç¾Æv³w¦ìiè“ɳ)Ñ~Ÿ+-£š=~L$7„©îÓ £a)sS“Ör„?o\œaJ­›yIsãP+²Á¡Í?†O~”¸­“ßKŸ Ílxb°Í¤»tIÊnBø á]Ѿ9_þ8ðXj{ªênèëËŒªÍýÏÀLqç“|hÞÍ’tßíõÈ|éêo kÝôRdm;R0A÷’y^9ÌY;߯÷‘wFÙ½÷ƒnŠ—‚d_qn—¥#öú¨Ba–üždö' âÚ †`1jnO<ðbò×ö·/(ŒXiŸ u÷㪀B¸×’¡íH·Þ¡q{Á?¬9ü¤¡g¯] ¶áŠAZš~£M²ÏBØ>Ù=ž®˜ê°…¶ÞC›é~~˜Dç—bî~B-iµøÆ>œÁÂcg,#ü3K‹×~Œts%VJ2"g‡aÃ9“_àGÈ+k|É|k™­|ñÂÒt°æ….NW½7õצAƒá_±^+ãœ>lª“XCkS{EC±vD «ÐÂ}/]>ÊN’ûë³ËÑÆZšYç ©Þ|‡|ð¸R̯¬‰=H~ÑåŸÓ,üé=“†©Ñê ¨d|=ÈÛ˜(Fý@ãàpd$K‡`‡Ö‹Yé·½äcÊ\t›$k¦}y…<…F`ñõtðÐ…9a~í±#’ zb¾ å8pžÐê® òÃûæn æ¦ n$9Q·tP¥ZFø7ã@Z ý‚Ï‚8²¶ÿ½¶ÝòºÌão¡wÏŽ(ñ‰ Dö‹sÎý'8ƒ-•Ío!É(@ÃÇI$Ãî@. >¸3؋ϓö$Òp¸¶Þƒú‘¢6$ðOkØšÈüšZƒÒÃYëöví8[ ‘ð±¢ê@õL¥øÁ=ÞÖóí„Êûu¿u{ÚN9üHA—ÓR;,mPx!š®ÓähЭmTª7ÎB?çceØB¬æüŒnŒqÒñ<=4@E¸p{q*¿êJUã8÷ ýÁÐzy¼ZÔ@”Ö!#¸ùµ1F§â+}˜Ð7è9,„¢:Ê Ý™þeeˆ}^É _*NGû{Z4Ÿ&L )»óûË”ä5=Þî1=5¾v êË'ÇóbTWÁEái­yöh”•f/OIB>?…ãK캴à꟪ªY7ýýƈS¹Ö(xÎ,M!añúÙ >Ò^ðëÀ«&]/yÖ‚lý$,hÚ¬àEƒ+h=¡tº= cïÓ$H¼廌;Š®6*‰rH¾¹jÛNv3b@6¦Íº<öKæ²3žá|G¯=ÃJa怊bDXŸÞ\)æòŽd¨ï4ž€E𠜢4E‡õióû_h«#ß­³M1‰Ù­qÑ‚ÎFëS¸ü ¾ ÏjSl2+ýmx¡ƒ‰Aš´ÿ“xÛŒc'?ȆâR*Kõ„Uu$$ÇĨñ¾ö³]æHc3¤êâÕ—¦w>fçýz‡i+/¤D tl+‰|ƒà=,5jS¢„^Ô†´m¡’°Ò§A÷¿ ŸàwcFl¹¶ãÍ·eèØ<<&í2Gþ%”ýïÛʰ;Uy#ŽQ^aÏ4óˆT|k¶à=”TtØ~ê—Cø-;:‰íP- UÉâ‡k@I˜ÚV÷;3~¹Œì A8æ?[µ:WJI[ú·NÓQØ:`¢R— -pOS…Òl³am1îÂ=Ër¾%¬%d}÷Sˆ©öB à·”}v¨Á™B!þÒòæ™O[éåC·2®¸ÎN&` Ü´øÚ¡ÿµ¿c„mRíÝ|›Ðî6?]Ë, ðX|npQW@’ dÞF,%@¿máb•.“³ÅQÓý[I´íSø‘#Öç¾r&uéιšU´™Ï”ßźK_Öü¯0öAÔð†Ñj÷Ó•àÞR,_%»Ð8v© —mcÍ÷”ï-†ÍF@å:š<ù „]›s”C 8‡Ó‰[¢&º)ΘG]òNÁW?‹€¢&Ð ^g„+‘Œ( [ièYó¿(\t× õ˜îØOum³’«“ó!…ùDl—/ Ÿ”ݰjÙàΔB§b¨cQ0.9 ú»Âˆ› ¨!ŒQR̰S&Š_#³’UÁ&á‹óVåâd…t„Ùd1ôV…T-ÚÐ¥®è¢ ä¦N›v“âÓEŠP6Y ²hæõ¹L©-µÍüÚ:'˜’9ÝΟ“çnØ™OÉg-°°%›l±¥Êe óÖôF¬xÜ’µŽÕÀ‡‘£é׉a„ýåzý^<á½Iª9FúR|ŽgÛˆ¿_ÔͬZçè÷}ªãkÏãLÎ4’¶•C £ûÑ0É5»ÕÔü)òyK³õ–ÀÍÐÜ.·`öp8c†e¥ÃĈfQ?«îÉ ­ß*ÿ: ïP{›­WMÇI·Ø)†Nû&}ßIpòø+‡5-Raë~ñ×DŸ¬{kkÊÕ´„ÂY½Òn®2-l䳨¨¦°¼å1§¹°Y[ö±·fû¶6V ¾Cµ° ³¥”ê%ÒŸÏŸmøÎ  ‰|ZU¶zuëë6ô%­ãÜRp‡"*Ô­­.fð¿?W ¸C{O÷‡ËÇ>Æ  ]ú?+oÿ?Ï{¿¶—@¦Š(d½|µ»:Z^¥²!ÓTt)ît<B%ˆ7Q#³ Ìt›ÙûOEËIjh|¸Íš‡i6jò9öXhÖ¦À0ái}‰ŸÚV¡UÃ)õ$¿.p_ÂLpBfô>‰xÿ•brùo)]€»kU$:»šÅq>éÖÀl¾žNöûù‘à9Gç{׊œ±¡òDiÃéúyX?>þ¨!«uÃ[ƒWàû·C®)·I!E=‰MRUÅWÒ÷óü„Þ=°ždJ%d:ÊÅf5ñ-Ñ‹@BÁ—=ug½Õ~”˜@ú“}5Ží ‘á6lp‚¨oÿ¤n\0æ=–Kª¼ágCXËqZz—^ ž]î°—g}Ú?ReIŸ¹·ŸTxèä×ßÞÊì–±TOçÊ\¤3@[‹„]¨©¡îþüv3˜‚&îV4è.îf©=¬ ;=õ1XA³Ö¨ö.C?£u˜ööiŸëèýQ¸¡wC ÜŠ>+‰dÿG´XD}HËeÁ¥t÷ŽÍNÞ5A[9JŽ)³ /¾“Ë1±DÐ~Ò‘Jôt$²¿÷”ÿá|žŒöÎD6>æOÇÛ#d‘Hï~@è Ÿ¼–B0ÖÜÔô§üQÝBÊšÉÒÝ»@Étw±¶³Œm£n§ŸmÀI×ó9¤Ðx’OjŠ£®úDõcŸ!¸æ'– ÑO€"„%!!îÐænœ@£2¸ [EQà \Ž,Ëo¢n‰y :ÝýT¥ ì§R¹™D{û÷ÂiLö Ö”üñIÄH­;¤ `<ð÷&íg—?mW"ꆉˆèj–󫀞s‚™Éû{£-Uy'Z€Ž©ׂâû§ÑuI¿²¯£‚òò žóö¬ó“|Y_?Q޽ü½¤Œ¤[‰\Ä—ÈnWbÙº†Ñ‚tb":£$0}åb1¼}»…Ž Ðr|«‹õí] ü[Û¥„±k{!À¢”Œ?¼.,ÿf| àû†eß÷7‡äIc‹´j‡ÅÂçØ¯$ÜÉo[‰6ÊA¤~xðÞ¨FªÖ-GY„Z³úa³Ozx©Sj?ü°ó˜­Æð¼]rî==R„뀹œ¦Y¬"Ù3éc#zNhG/_¾>}–áV±±V¦üãà~H…;õ¯tãsΠOçHr õÌ4‹j'q[8(YÂ0îÑc¯æ^9Wä1¬°Iú^ÖÞ»þn^€Üo&ò`Чôôg ÅÁ&>Idz¨÷ ¸˜¯ÐÍ0³îØ¡eΙŸŠ|Kólòþµ¸ú«ÞqòÊ‹ø6uõ€¾ì2³¶^Ó’]·j”%àZ@!ùO'ü/@Þ÷^wuFn¼4YÚæ2ít–²gŒYvv%¤¯D¥=CUâ ZCsUòrzŠí Žb¹½c¼`ÉÑvÕ— ¶Ë¦aÀ€¶Í¦¶¼b0Ù}šæ§<¿›•üu4%µ¯ “$—b>'%$*”I,‘<£ÿ¨%\=€Ùwò,À  “4¦]¢7+¡Ÿ”¨ÎÊ•ð5·ÑþïÌÒÑü”W¡$-w¶v4¦.ñ$ 2Ï'æNŠŒ0úÜFûŵAlÓ¹.×fÅÇ’8ìÌEÙ Y×u+˜Ué’‡šqðÒší @¢M•B 3íýïOqäÿÀ Qòh›Û.zŒF …—=twƒ£Ô±˜;*¡œhÀÓ ‹b;7̔ξâÙ%AzÞCnº„V6~¾ÈB$ïÖ aÖÓU 9ª’£š£˜d´8êže£,CK«‰Ì± ¤(ãÂzeÚ4µ×ÐReæí›$l¥69þlèY+d’ìÏÜ!:K÷.%mÉî?»Ù/ Œ‘ïó—ÇóÞ«ÆÊ`L˧ÝRT¢ßü¶µD¤j£yÏ5s‹QBâã}•IK%ÀQ|v”0-ÿÎ CbfÚ«Ìž×#U—08æºc­7(« Á¤·îõ›xKÉøiØ‹VÃî h:̱¨85Ô0()Uæ£yM£\«=ä¿öŒâj=‚¬ßk0†?7}Ĉ;ÜìWäWøÈã¦ß·H.…ÏÜÛ¯®œ…©{[PJٸͩûßSsP6h›õùó/Õ]Á²éÛWÜ ÜõçÀü]j\H×R"*™ŽÛЦê£K¥ÍÁcK½yÝA9—ÿn èØÀú3Q–ËZ¦Ýàž—7(-F R+Ý×XšA`µ‰z£&>v®÷é,o­¿hÙhEdœl/¸&8=T+ð¶—8DÙº„d Aõö[ä ,ÃwL\›}=G.-7tã jK/>":ö¿L´7¤IáZûñóKÒŒãøîÃ]Ôè}í|ö“I`Þèwïd™aÖe‰ÓœÖꋽ}›â)BÉß–w{kÍ/7nW )±!sù÷[QC±V¶xÀÿƒóÀ¨\žH"¡,øî3w„£’^åï¾6×Ã<M§k`ûíïSÞ®|q¥(mM+ƒË¦Ûï ÿ¦ ÿ#×_z‰XÃçèi¦ ©×5=Œ|Ø3ÜWk”²Epn(Xrx À}d^TÃ÷„eåã¤÷*L»Ã–ÂÍÊÿ;Íàˆ€±µi$‡ÕaW/íÁ„ññu¬Õ…ÓDå¸øÈ¼áï¹@¢?ž„ýÒΪÌ“™f‡R6a9=—á]ª×àDžÏbn‚à.nvn¿cøú$jY漤§!–„>ÿp<RºÓ 3.Œ.íåYöÿ°ÆK­·À òpõÇ`J$œoÔ!oË­^*‘ø ¶ôž«¨»Ö#nœ ëTŠÒ j½¿¤ŽœÐÀW‡ _$¤›ú¾‹´~ààÂû  æ£A©hà‚/Gªß 4=Dn k.‡Œ¦ñD1p 2­;ÍÑ>v ON6r/¥ÌpºG¢G>»a¸³†¶4Š|O£ à ð’WÆö‘ÌZ¡ç,¤løí—®ïEû:O›Æ©¹[ö·½Ðpï [ï-±-ç=×MIoWÿôäíÓ8«-B¬ì¡ÒÍUÙò_T4à‹çgÝs¸ q™I90ªê¶tùyhšŽ±â.ãù½+ž¤/| „SS7$…ªT:ã…¼£È_?7\Ïì+g­¢K ›7 ô7EG´XŸ Gä~.O)òZù_†0išt„#ã PŸ®ë±µ„ ²<ØŽ’gMiFCñ†hš:0ÇL0Êj mé¯L›ÎWá²ÈFF×&V.ì¹b¶(ÍÛd¸²¢½üž¶§I"8Ì]Xj{œq¸6¸*Ïe'Ɣ«;¡þ xÛ¹°sœØu½•æ`0¬ë’UsØöð9wkìÀ'0Ú1º«Ó¶“¡“’–ü´«›¦7fzÜPØ@7„LÛˆæÕÖøí}eò_à—ÞþëþùÜñ”Í´%\aþ ~gŸçHbݳ *ô-¡Ä²8f91ì¼]Äz0­›m —+§KцzB²nqM‰8 2x-â?ž |–o ”.H뱯¡ÒKð gáÛ4#E'a§9YçÑ l©æºíå9vžcè ?ŸG—Ë’ó>›ITM®?kÒf^”óÓ´1‚ÖªGÍAôÅZ @KõE½Ï»¾/˜ØRk%fôEÕì ò,8íÁ^qPäßÖ`1,"O_—v«p°Øæ·—³œLu_5f¡Hj³‚GI:‹w{„Ãd¨­Ø:¦ñ.Ü?¯Ä>ÚΛ“;Þþæ¦]ö¥­GõÃà6Ä m…ô& V: ­ƒ›mÀMVÇ“ó!e£së–ÿ†•ò«q+ÈY™ëð¶wup>£#Æ«%ñHpk°ÚÌh~ ².ʧÿ~Xs}%dU¢qp „ÇFž"8€™Ÿ|ìŒ!ãš{p‘(ÿfðM8ä7TÓPªd¥bIdó%ÿàGyD]QG¾’tZXž8‰à†”…ýûå®O*åÚ²ûá¦SÞ?¾yÿî‚ê¢Ô^§RXñ:RÞ~fz¼7ãÁk½žFcüN!¼Ž”)[£¬´év¹`l6ð§·†^½êõñõ ôæÌkd–â,ûv7ÂWm®ŸByºUtOE eQ'ºßéW&ŸÔ­fè4úX<Š&ªÊ‹ßÈMs0å›O›X»ÍYä˜X…éÛ›½c?Ù¯úÐÚ!t°–?!/y+’Ìa.éT*î»—BAó]Æ)zMåC»^¦ö*¤(ÏuB²ý~ó_ë9†©ÃŒQÚ˜,‰Ib£€@èy€PnöóNÆ-ˆ’“¥¿ Â/ïÀÔxE:?²sÊ/Q}8ï(’·ö°òþ¨½¢aYì”^"\Áœ§`óürPáé³ üK…ÃHD‡š¾C½çf:±åñôð Ø¬é%õ¬¬ _DÕðÌÙš RÞÕÈEÉJ v; ÚªFŽ?- Ê ×c{“Üb‹G`®„ÿ]ï›# z̶Â%²&5éCañ³ŠÞúdf7´ïI¥¡ì7T)©kÕdȺg³ƒ$Þ›©ßÅ]Íš¬EÖÓëòb”}’çí{„mJ(^á'=ƒß™­²ç¿nŒý¹kÖWyº ajé’Md4ËøaÜ”åŸLÍÔN¸NGôÄv¶kÿ×òðgÃ!·#^4ÕñãÖ£ó·–Ñòù§­Çt7žxºƒ•Ã&̓f(SDMy¹us×ÀÞ½¬ðÛI0¢¨à`lâ Šag­+á¬Ë^õÓìúaæà $*b‰Ip hf‚_Þ¨ *fåþèFO±Ä³~ýÎûûN9Ù!SnÅð§/ŠžwS 8%œâãMVÚlàYX˜¹[æû復¢µ¿ÿ˜¼éÞetvãoü26;^«t¡°dsð „eås÷˜s4Â׸„=δ‘ËÔ1sÿ“v8Ëái[™%a→O€ü²½xÚÄìÝ`'%°ÑÂ\QËØ¢(ˆЂeªT9\F“½ˆN÷]ýEx:¸#B¦Œ1Èÿÿko 46îËÿ—ø¦'Á·žÁ$y®¸çîßXDD5>Ñ_Ïhæ÷0LÒÃêíÅ0À$ÈÙ°4h¹1)^vè³É÷ïÂ÷¾ŒM™ú=L[ŸyÝ¿—Ëqzq ä¸a&Y#]€E’%Vq|ú®6äÕ“þ9;Ù@\{Äæ]%ëê94ߎãÒÁiÊ«U;"a¸…·3…k& ©ÝÑl·»Œ8¯"w¥ó³·}Eg»ôα¸µy—H~£†ô ô L@H'Â#mçŒêöÁMþm€Žœ¸X»ðz st+È]ÜóòA€À^ÁÏbÛpß@Ÿ‘œ¾*ÍlR¨ð?©/ò WÍÚµ‚3ʯQ‚³Ñàæ¹l1qϪ;|H·É–“¼P®„O¡× ŽÉˆ@OÓKÎYÉ¡Ãw§ˆùwwOΈŸkÿmm+°œI–$ž ¢,L)ŒøT†¨‹ãƒÕ7p·D侪’jÁf²]º:®›Löu'·GI*™7¯Ôú¡·Õ…ó¿Ë–nüE)A¶1é_-z¸¸ä«n:Q¾FØÞSX„²–;à§œ‘ W€­Öž­þâÆÖ‰ËÂGL¬; 9-Þ-‰>øckx3%4YC7¥UQío_,Ñ3¸À}&b›õ4›G}Þ9]–½O`2[·õN3’øo!n‚em¢vÏp/i¸Üq1çéU=ö®_ w†?º™@ r .íZvÛ:>´ã“)‚ìÅÕ-w;4Ú/žÞÙ„¸º|’dlBǶ—j [GòeÖÓõ'ðΘÛ¾Åé½²ïz¥çݳ„­q¦r!‹óÂÆ¼ü z ·ø7&`Ó[_„#"È *º·HŸ`ö !z|ò€þyŸvŸÏK©æ¤Ü¬”Ô  Š&×óRPöã‹ø¿pãôÖëÓž:ñ}µœÖ]ø(žŽœCÿÛO ¿"õ/ú¼WðÅZ¹2jSkêeeò'•%¼G˜ÒZd|ö/Ü…ªEú"ãŒÀü­nö%¢æ^ô°mÜu>n­†qÚb÷¦ ,®6ùÌü…L!Γ04{Œü(†ÁÍçéΑÀ3%ufV×W¡pÅ¿º³)Ì;é_k¨/ôdÑÆ[M„s™Y¹p„L†D6åUÚ–÷­5?VbêxÁŽÚ-vÌÍ%0äqU:wd·~ýÙÍo¤¸P¨¡‰›H€‰¢è‚ß|Èռ忿IìÓÑe2.ö³@¨;íAØ8wÙV|Ñ6«n²Éí/-|—ÈÚºÑô~YàW½º;Öy|šAÀú ¿–½ßIÂü¡©ÌÄÍÇ;ðÍEö€ožD˜ Î;-Mñ,“ÈÛ `´í+3½¹x| f`OçDŒ°MUɱÒLz¸Â±©ØfÐ}Ü-Gg‹˜5«ƒóôEþ™–k;kŒá)üä«Ä`UÍYçÆr…Ëç¸5…Ú,\ p…ðןXÔ÷øåMöL¶ÿ°oâ/ÝGq‡Ã:þzÇ% BËiýÏÁÏŸ+INÀ/¬Ô/Û&Ìä_†‚躸§½l¼¦™5>¯Þî¿ðãšxü4ò“äTíÎ G4 "y1ðÁ*'¾«kæpúkÎ!$l60v0×WWþ ¨îIE;n-©b*JÔôcS®’NýÜkŒƒöa«f×*×VÒGâ.ìƒi>ä&½µvª@¹>Ñ(úþx=,huW>‘ 08´ŽÀèÅ$px?È8»ï[@½‚¸ÅaÔ ÊºùɬR2Ðü‰k[´›|RÃj?|}©#ËS$Ä#2i¼O™Ðhð«`¤KX“©šëhETUê^sðN€Cn3Vx¸užóœ­C9ŠiI¬Ï#ø…™ýé hÚ ÖÄÙ@íËírÀà|a[Õàäú£Õ}—RÃÒ_€:ˆšL=Æø¼Â­g]7rûž£¾¼›ìà$ÄS#çôCï4™eˆv€¸ª}'ð®y¹+Ÿù€/ЬvÁ›…ÅÍ(hâf»……³œþ‘í¼yæR%³;Eæ`êAŠ zªø;ZÜbà«ÔuÒÛv>Œ4jœy@øb –bÞçS­…czæKaûºòx¾Ø˜þ¶/HÉxŒϦÜ^î‰IÐ(Ý<œA/M´åL…%¶­·Xz½9‘$bÎÛá¸5·¬ÊûCn =¢ßB¸”c¯—©mµ¿5#½ý†^ìD)åÒò% oÀ?eøG”{EƒÛOIm@ŒJ³ƒ©Hw„­\r ¿è¡Ç§»2Ç_äÝâ.çS–‰–7žH(n¡Ç}¶sþ”Z‹/˜k-ï4Ú2ÞùÅûâ;#äÇÞ?÷GÂi¡áßJ"2mMâ}η7°êâ‘é1”WSÒøƒ¼ýãÿ³Íæ%ï²½  -¬„AÍ_½@­Ñ¹†n£° ý~E+ÃýGع¬E¶ŸTqB—zy;àО¢Ïl›-Ã%K÷¤¨›è‰E ÅÎÅ7 `”µ 9ÙÛÞ¹»>Ea?ôv­­ßðB,·gZ°™úJëýJã%¡C;Ù òâ¶ kêï­e›®”,7x~ûcP>Ú]„X¤ËèðæŒiAQ³’î#CºO £(ÌÈ=fzpUÁp6m÷güè}Ú¤Ëbv¾[¶K%Ìp=DsWÐúË<˜¤få(eеØÅÞdШ$gùó²ßú~âNÖËêM¶ÈQ¡øêaâU⎻N\æäSèÅmÀ®$ƒ ›é´YtA¶oóíáôé¾ê›|"KïŠW‰;™ÐøMÀ&°‡þýjA¦ÅÿT—‘[R»mVʉ*”£ HL±ðp?“q—s ¦{.%iä§kQëŒ÷'Àq¸•jwJ5ÿ†Ò±rñ}x…¤MQZ ¸éˆ|2 B˜âðœÛL*q§bk/õŒŠX§Pì¹m¹|œ¢”§Öâ”fw™kõ:õ®øá藍ͫQíR‡ßÖ)'pO™~:&â(€c"u èðž_GÚ6dÇf5˜ZŸg£ ¤šW% WqV=”M½4©LµUxÏHš×BL¨\Æ g™¡œ."ÑHgÓñïž§ß,ÒÓBòþ\{ oÚµ=)'ëMKaI¬ò'™ýBºnTÉSkZ@7ϰöM¡ˆI„V8 “”^`Cp—Â}Sÿ6jJ÷j’5;nU†: |y}ÊˬJ&ÔÎÝŒUeÛ8TŸÛ 1b.v«ã ªéX˽œX3ÝTyºå!Ö>-¿hê‰i'xëwð3Vy¼Oß/ ‚i½ì0MŸx»¯IšÅ<—¨ÒøÌôÌ8îÆ†âeANé,IpqH¬L2å>Ku@?çú3)ÆüªcõÉ„o uqä"º'&ûOqc²ì=Ãï•[XŠß,Û;¨ÚYýãþK *ä ý](åù‰ks e‡Ñ5.ÕXä¹¾˜LƒåRˉڵÖ{¤Xý¶+ÓPQHøúJ‚`öKÞì!ö#‡ê<ö°÷-¼µ¢)YD†Ä±Å+dÌÉ£„H¯aèl sžòY"mïéÐ"®ÕÎO¾<ý¸º‰}»÷fA$3ÐKµîlæ¦=‰ó;†˜£Õix«ù“œ`¥Î–ÉTF¼AT®Ka –¾”oíËêº hÀ˜y°N¼èYMÊú°Ãé`RÑ[¿ç„ÿNå+¨¯˜ÑtÛË.*²U%ÒÐRzíÀõ¼^eh¯PQ¸V]LT¾?;ü˜o¯ÅèaPþ »º+.Œ”ë׉Z®¯HÜZ¨d¬çgE˜NU¥ "̃o˜š4F½'¦‹æzQ„®Ú?ç!(c\žOM³ €›YŠ•l%‰p§¤L³6!ˆ ‰Ñ ëÖ¢ùØÉ‘Ùà_TqX²«ÑõÉè¿ 3Ç­1g2{Á‹ê÷‚­è6J'À꘦í¿ï]…Ë« J *àS>(P-\>)’‚?÷¬Ä*ßNÚÈ™û¶ÕOs³a3`»pÆö['­ðzºèE² Ï_ÙN¹8@ ßlÕêÅ—Ü%Þ—}ú†E;ÿ"äÿl¾Ð ”ͼ¸¼÷ ¾èUº®UémEÚ½ðC¨xø$ÿ¬æ6üS¸”þˆ•H½ÏvÄRpV9ü½·±ïÐI%¢˜¬ÌFnÁvÜd×8©òºE4GkÑûùcäÕ&Ô¦©G‚`Jz6gúV³VKBôUÅ-]õºáTe-Úª¸~ú¥å˜Tøãµ#¯Àª]Îf6z56%äUsž^}'suk¼†F®ŠK aЉ|¾¬X®‰&°glI/Ü‚o­½¤m±9f·Ì!¨P:F$Ø_3BPFʘA‘b, nAŽáùeðu·è¡*å²cž2oîøTÇûâà všÕ§­€^në°0Å·HšU ¸ë¸4!Hz‹æZÇäÊÿö8 $¨¼1¶©¿9ó…k]á^ÂðälñÌJ„™ÂYF‡‡u„ÌÕ´©ò2Sµ›óª½~ñ©RȘl+ú4›ð¢?‡*åŸûå§&·èk_} ¾ÃM·Òê%¼Ò;_KÉT\cPø¨ØpÆ Û=²[£âÊôÿ›Ò“lYHQA¹O+VBRã†-Õä¸=Ì"Œ¾d'ÐRø§¶l¸HðþøC+ï,Sª8g낺âŽûî?b¼oÿBhÂ6;Y•áÖõ¿M´ƒÁ¾ÿca{‰avÖöÒcp%xosâÏš‡ß*НbݳÇär¯ ›ª€©ã¸ÙŸ‘3\ÑÖ¾ÚvâY {ˆhZ“qˤ~¢® ÷EjË`³›ÞÄpëʆ¬»Ë^J³Ùš zóŽcIÙ¢a˜ehÙ}ÅúÜEhþ„mõŸ(#·amàÞSZ@)H­šÄ5 ¥um²óGÏ£ï #7(ƨŒ»/˜òk|ïShÚs]“Ù¦ <¶æ¶Ø ïZͦc8µ5'¯´ºÉjBZåÀi×51Kæ*Ó1[ Â>¥u(y?ôð3<ïáÀ–™0j{ZBx,3]h²q,g¢f’eïB‘žß âæ«¸rÔ¬¹Ÿm]‚#¿EöðÌ[– 2Xm¬Æ! ïL±F{A#h}½eqà4uªñVýn•ú.TÅø¦ÎÁu¾ôè&Ob¼%9pRúË'0O¬o²`п¼”-ݓ½±ëUqTô»LV¾Pƒóô=§£ƒ_æ·¶¶½¢¬¼m-Ä·Pù¨€_qÒAºµî¢Í­ˆÿÆ¹× ÈE‘ÁA_ˆ` y•ü`ÛÊ‘ÞyXÀ%ñœ‰|Ú®Ø|È™DSó\ÆšâՉįh]xÝp-xošò£tÄôEs½ùË|àOGj!1x·£y²ô=Ý¢ioƒzýo"å+.ÈÞ";;¸wñ¥íH)C ±šr*o§Oy°Óõ ~¼ÿgÈ\S¤]ãn 1ƒ?š¨Íîw›!«-P=0jÎ éa‚ú*ÆÆŸÈ¢aðxìò°¸K r³¿±ÌXÚèÑ’ÛŽ=ûñÆï£…{nä>¹~”€W+¯”^~Û¹¹m›yÆT;+ O5˜Í±0Õ2Í5n$ŒïKÅM—hjªçÃÚ°ü/˜Xú (ÐÄ2}ð›f®óç0®„µ“‹PtG©º›m3ß©øû±è§ñ›l»Da³¨ÙÊËd7Dê¼]}IRûÀí¤….ëÞ1óYí.¥ãçìv2Á$ f`ž¬ž™‹ï¶ø ȪÃ3Æ­î¡G5«|¿­$tȦ14ºwGÑÅPpÎw‰øìJ.?‹¦TÑQ{D{-âP÷3Á/n[MÍ3äÞÍ&ò±?H¶‰ ð…ó¯ñ¬™ÿÑ´·ÊS@l|Áƒ'y•ep’éœqþ§¢.}SÕêˆÉꢈ¯yot…xÑÐÇçΟ«Ÿ7M„ÜÔ¡h§—f³j.GgŒe½þÂÚÕÝá½PæïœßgµE€Eg‹ îC{d D¦ÿ%ìpæ ѹâ-_îÿiqpoGû{(»»®†¿é ‹ +ä‚Ow¢T´ì}‘†dq‰±ë uCPÒ‹Äè`5öòuX\±©M¾Ì•%”=ŒÛO°ãZE,· œÒB)=†öÐFÀ·¥ŒÕå67ƒï¸ÚÍN Ca÷‚4'ázD¢«mWžL¶’¬D¬‘,Ñ2 –‹ÊÒRúןÏv?ÀÐx=ÿÅÒ é«TöÌ0'°ÿ×Ò¼âË)÷ipQ‹úmˆ¤×;zÉ7N\Û&–œ£´óv.ÖêzqM@8PÓP&x͉äß“ë,E(Ó`¼C–*4HH^tÈf«že˜ùsË-öDÆxKÐ'û¿ßZ–áÛ}³g ËÍø˜†Á+Tc y¥žl:–ß[„)¤n¢*C潜h[…7\éμèÈ,že‰äåÆÑEwNËÊ:HÊ0Á±}Ä& œÏ¾$QÓëÙ+Ñ¿‡eÕÎÎ#U´TSbrÂ&æÂ™÷È7øl»e¹V"0G{ yë½oÐNUΉõ¶nÓ ||"»lãÿ¢+£ÈÄŒœ‚uC?¿™»†nÜŒöüí5?öKùeÙUr>]ú’¥¿MD0=Z>–Ë`¹§\M1vPr§S¯=«Èp{É>a} +q·lÿ€^‘ôw®ÚûŒ 1²-xµ–"†-…—LÝÎÛPªîoDbuNÿ”SÄU‹GFšsm†M<Ü7¥)Ò=ðm×’ße˜"û/}¡™Ïå’ÌùÊ-¶€ë,ÄT0VßÞ@"‡drö—ÆÆ®”¤ûɇ Î=Q…iœhº’tf‰ÿ2nêg–Æ Yr,LÅ@žoZ$MˆTö˜OÍ”S|ìÞéôÐMlô*WÝPǽ>gÔjO‹J±Ú¡*çþ©á ÆÓµ4¹€¤Ôoì+AHY[M(&Sï‰ÝL˜­¹;WvísiŒŽÏµä>ë)Aàï+âPIÃ&^ÍðJ¿ Ø‘•§Ê3%j=.÷(t¤ˆ0Ò‰(©¤'ܲg6[‹ál‡dpÕ b‹>Ëá[å*tØàþ¼%ÛðJPÃIA»¬FÏD{i é¹¯f%ú=¥5]škðãöQoe$¾è∋¸Š¥éèãÇ¿C±èL«8Ç^Çt¸@ÔÓ|/ÚÛÕtù2,;õU<+ê¡E§ @˜Ú<Ïú]’‡Îô0è@ ÇV¸úÛª‹9n»¶‰Ê1ªž7ÀäH7õ^#L“A±ïy_nþ)F9¹ݯžfÖ¯G×>A­²ñ24¨e9W¸†+âÝÐû"ƒßRKSзH ¢,"ZS…ö,á­®ßìÝZ…HÞ‰™fwN[ÌÖcÛ‘x¦§°£]4{†‡ÿ]籦¶J—]|4ÏMŠ-á[ÛÍ@“yy@vÒÍG…¾E·nñÖ‚î°j+º# Ù¦Ddœ’‰t#JÝŽŸ ÝÞ•Ö'ñü[[“¡d¡¼î¿.º·Lg%hêÅO,Rë5)—à×a_´[dŽ Çkoä­å¡ > äÕ°Ä¿óýú#!\TúP&—Ê5öùVTL 17‰X +ÐSy¯‰Ô†³§s½Õls¤Û¤Èâ‹Þÿ0ÅßtÞ¢ §¿»Ê|mIíQŒNû:âÆÈ¾no¦Ð ƒ› &6_h…úîÅÒ¬¯™oÞ >úAìY­‘+zq·Ï¾$14ü#Ó.4ü›±Ó‡®±‡bLÄ‘ÈÚoÿb§˜³å”ª&¬¬¢ÙÎ¥ºH1}É7Ûk§gùñ8°†¦¸É›¨tZÚÙ™F¸Oˆ4ˆµZ‚4p’<‘ƒ¿þMË•‘TùÈá„  qX¾C°%@‰=c‡F@"Ýl¼sJ¹ü+åÅ0æ“ù3l A>?Bå ”®²T†ßÎz2 sådóö^+bFuÕÄŽ÷æ.%W¹R£ù»Jòay ¦iHÍu%#¦ùÁ±Jœ\çÁƒýòú)' übOöý´mB­ºÀ caÀ¡6p$HÙ²‹2Äh-_ö(èã­¨ÿˆa¦P÷±ývM¨ä:=+éý¤þ_‰£•õú¤7nÑnnžãNZ;þ4XõÝÝsÃÿ\Vè Ç~ÄÅ©öÐ Ùlj÷pÛ=2mC#”b†—T‚X¿GÔ3yÛÏ’"ÅQrg‡ây\([ñ¼•ÙËòDæä鳂9V1ûÌ$*4]¾ÀÄ•@”Úˆ¬®Ï5%Z@ÕuÀ¿“ÿÝvE´)”•ÒˆlÓ¬sq£÷ž`#2­¹ˆVyHÝ}ä¹r–Ý1rnd†ï®éfûázIG'"„ í'ŽÓó[·o„(Pf¹¹g3…il¹e¡ z¥ç)YµATß sÞÝÄ÷¸y1 å¥×ò§}~“Òn‰ :BÎRÿ„ÏâMmáf ´å GÌV êMà±?íû;Ñ¿½ñ¡­2CðxWÒvŽ=6 Ï´5?¬X“7Ĺ Ï ]Z…2X#£žŸèD-¿Ý—XVVi6âqtyMˆbm¦Ä`‚Îó*ý zs¶×w'Uêjšjh`!å¡îSdÓœàïÅ•z𙋖èט7§ÈŠÖ‚Œ; Æ ”Ô$t#ýkATNJâZaqs¶:n2S•É ÍÙ£Î:|ÿÃ*ú n¦ŒéÍšÞ& ‡¶O¬F+ƒNæ…‘ïôþÚœXîKÀØHɅɨ~õH·cW§¢z²Œ#¥ÿÄ‹¸]ÛC%Û E/vNÚÄ4‚Vµ°Ë C¢’£öй¥ŠÐÑüék~V)®nJz·LÏégø%Žâí–i1eËã¡.–Ú®6ùÐÔè^9Ðï¶À!Ë g­äPÓÌ>u6?ª-CÑ ~êZ6-`Ë„eòŸ9Sfƒëáa)à51Èj øç› r­Ýý@¹dTÑ0j˜rM¡C£»¼»¼¿nK{,q3çlAØN¼U®ýz !–ùO]šäPŠrjÝ\Ö¯/nºœ%hÕ´ø—T(t õÂÇ­ _RÛ{õ½¿}ǃš<åó»Xr n³ðš(‘2·*²Î4«;fMÜh';TÜ™NxÊh»P•Y`áz» Šs<0MóŠÂ «'Nÿa\J È”1Šo¡aM~“³E_ì0< §ÌÎqŸ—›û{–lÀÍ•M–!rfåØd#y0“›1Åìp/œ°šGÂï%§ˆ…GcˆV?]ë¼› i’ØéDg´¢Ë")ßV¬Ð9$¡ò|;gƒÛÀûð‡Ûž’(!ã¼û°åèR×°¾jRñþ{0-ŸÓ[¥“!°ÜÞ;½K?“þ÷)êò’ëMGÆ^,dz2*mù“Aj¹zÒdÅ›…¯ìæ;1ŠC8Îg¶«Œ.yHª„ú+Ò8™g’S5ïêCŸg“ÉùLÖ–'ºâq™”ˆîOºñ˜¸6üâ ;Â÷†•/!Y&¢AÖÄ}…9!ω©YuØUâÞ‹¸Ý1Ybܹ÷æAP%z»Qsù>m5 dNóë‡WWݹ§ÂoõòVi~×{+¯xóæsûRƒ‡×ç˜Î{çöD[ôþWn÷òÔ"`ôÝbýÄŒûº"A#kûcX‘í ìÎj’´K§ù[j—¥¾úŸêÚ½ çÇÀa¢KŒÎ~‚×ó3ZÕ¬ø°š¦hç¤Ó—¥´º,”ëëN«ö«ì£jÔ$Šî UƒÜë+|/JÍÌjC§˜ÅS¹TÓ¿Oü/ãWÅLŸ„ ‘px‹6Ã' Á}]É€Ú" `¼zùÑCæÕè£Â”Ö2Žpˆuk©åÄF´òó\d½)yÎ)3<}Uô¶ëzW÷vGäGÇ–nCÞûaþâ%Þ»ãáây0å^Œk-C»¬{¢”ý+6EƒR\õ{ö ¦k_£ 8,ãDž/Qd$ÔÅHÙÍøŒúé`¹¼˜½LÛYˆ °÷š{lù€jÊÇ˺Éî¦Jƒ«…ËšÞ¾Œ«•8ÕØ‹ égI¼ª©û¼?ÿ¿Á½«6Ôm{æ\á=Ñ—À>}y5cB“îBjàÒ–J’¡ÁÊç±NV½Én5¸*ãç¢q~à"É¢l‹'\fæ‰i$ã(þÐÁm$¨ þÞ³z(“çeŽ´ÏJHøÍ‘ »ÐîdDP*.ÈŽ³È;c Áî×µqR<Ž WG2·Â'Þ½kÚÑ,Ú9>àÎ<ľµžÝs—ˆœ¦NöRë‚J©SÁZ;8Ñ6‰³»oCªµò¿ ÓŸÚÚÎèR;üS"aãëVðª±ÔÉH„FÀËu(¥Júˆp)Ü¥Ùc-‚™ïÏËö_«n«FΠQ+¨ÉBú–”Xì$ÐN»Ûµ×hïÓ~–Ýnv@XdøòÖKG J%kø„“µ¹³]÷=U.Á¶'&âÆ¼’?âˆ]ŸFêï{Œûb¬$Áhxê%§˜È«0pÿ!g½cçèÖϤ×1õRŸ1¹ž+d¼èôÕDZõûÙ(ûLŸõŒ·‡WÙöÔðȤ  ‚(Â’]æ¸ÜY7¦>ÚÃo:Š:¾0Äé2‚ÒßB¢XêãK²kªyDªãJÁÛÞmõSG½| zá{3K=Ê>ªåMj¢Ø-ë4ž;»Öt Ò(F§†³ÝyÖýs´¬ç±H*QÛVÅïáÔ™î )+÷Ëü çËá)zÏÕYCç»nÙD3CE§/êûÁ1†úˈàÆ­@Ha^…¼„ô¶¶,Ë*e)ÚÈFõׯ`Ÿ"˜n@-Ûn(Ê ‹©ûfª¼IB@¸Ôò:ÊØ–0 U¹ ±ëÈ/k_ß ƒýn>ÓÄ41¼Ãö!"4ÐÉÝÿú3»u¤]êZU×È¢‚‰$iúê·ttÅWâ ƒ<C±,_U³“Õ˜gßàì À35¿ä›º,«¤¿ÊÞ÷g9­ë½1!úÆzHÂÎèû@õ7üNsÿÏPÆ¢4ÂÅÎf(„¯–¤™:µD¬ŠIÉŸï¿©a‰ƒMèUºd,x®“[Ï ŒD%¼,¥‘árËŽ¯MÜ^8O¯àˆ½¿çœu†Â™"Ù(XQÈú¯$s9‡‰ã”z(¿cîSâ}çÅ4ví3 ZiÉG!%MGæ5È”Š«èÒÜy´¬§i&JýW¡X&JN¥%D¢ ›e䥹9HÔ‘Ÿ» ìœBÇãÈH£¾êB ĘKDÌîÝ®Ç0a+‰-`s`ƒ¦ùKp@ÅP}†…%±â]è7 …Y|Ó”˜cŒg£\)[#/¾ëïèZívé<#ƒ¸¢+ÚÅÕ¥rý¡W¨-Ô9È+’¶u :òµ-¤ŒP5&Ï2B;®d¡þïİi"͉B×Êhß¡í2©ý üA±{, ­6gá®mJ­EÈA+ëÿ¦(þ#²Ê ¸¡Gˆ<ýó% Jɦ½ž*0ŠTîq{Óò°1DÛ&Õúó#®oºisK-A…Ø#¥ŽuÀõ??Òæ"#ÑëOÿzÜÁ%Š5Yé—ÜDИÏ7²'a?•ýã°¬d,!Œ6lð‘»÷':Ç÷*Æ\J7wÿËàAgj3/*N‡~1û¶¥/ú›ñV ìó†dêµ, ¦Êšî1 ¸ÆÙqêç­Bš ò“K°sï…%êl‘NΦâí¬éé€aÙë~ƒûå-›§ïB¹¡®¨ó!UØò™éd\ÃÆoù­]®ånȉIÄ“ðtÕ9@™r5µâ®Z;í×Í“m»ä–{/A;ê÷ƒÍ4ŽÊÅ^ïô)—zæZYh½Tp Â1þ§Å릵ÌpÐÉЭA„Gb'] ‹Ùæ€ ¾†ª[°ð©ß²ÌïŸóg İÚüRŒÒ@ø•8a®†ÁRŸÔèh·8ùÑ¡´‹ލ’Çd@éðg…>ÙâÍR–œ~•%ÔŽ¿×‰•z5ŸVébÙ-/™Ñî\;½TÇÐái9ê9ÑõÀÿK[ÇE…æ§¶8f¿~NŒóϧÎì6>€9@•¥ÞÍž˜Ÿ|ˆ|û<@{¸çOйbÁyÓ#nÒArQZ˜[‰WB]xcbX(l+8Gj]òÀ} |«áS³_­¨\_õ¢°Ê¹ПmÀ£}D°¼E¾5C}ÎÇ;1*GâÜ*Ê ‘±‡ àïk0‹ö›à樌³’ã×Ò›í÷i\!вd@Ceu§Q]ɈØÝ}í¨{ð2h9uín—Ë=Þê Z&€øÌ–åh;‘˜©ëcõEJwñ•ÐAƒP³k²€˜]yùùö sS†W>~mº’®vsô] cùM©eYS˜z´ ?ÞãÈ‹/ö¹8Pæî is•àÄÍŒYnlö6¦JÛeðÝ¢Oœƒ¾oÚÕÞ¸3?ª ²ûø§æ DȦÒ!Ä×rˆÂÜÚ9ÄgÉŸoÿ2 ;TÕ ƒ"ž 섆§¥‹x5O­ï„™‘¦ÉIóï÷ñáJñë¼mM'W%²Î9År"ìKþÝ_ÚátŠn5 ò$È1Â;ëŸ ˆäÖ&µàžãåʤš7’dP†å¾IÛŽ•ÁõÙ·É"Á*>°OÜ`L·§øÒ…hŸîj/ÑOlŸc@£TÒšJBeÚXäÍ(ïÐúÛ‰[Ö~b)™¯Œlw+Qàao¨8ùÛµ°ÆÛôSJÐ'ôµdã8Ü©üLV<Þpý§ë™ðÖ]ðc+ÍÅV¹WF†/éè}öÑŠz*$öN¨Ò©GO€-Î—Š¾‰à]àJ¤çØçƒRUÌ©d ‡g‘pÀõYá~“R$ˆØ$[Z®Z´)½î ÀÿÜ¥DU©5ßöÈü—@ŸœÎµ¥±¥…~8s¤Ÿ­f<ÁÞNŽXñi@seJJw^ã+3æ}À>†DÕ¿'φ^½Ða‡íeæk“í0öiìï(×­ûƒßð»í¾Ç÷ëÔ*“sßüfcËÜKi•œÊ Aƒ)Îsí—·ô«yæÏ0ò;3ôÿ²ÉgXIá“Ãj•³ØoVF‡vA/ÙÓ”;ÝûÊÔÐ=glAö&Ùx–½B‰¦ÄÖ(‡ëC°]¹JÎd.Ž!¼Y*Q¶E2¹òúÏQ©ëò¤²á7ÿ˜BéŠ'ö)ÁHÂíÔ–o½xtâ™`D;m,¦ `;hƒ×¤œI œ0é[þeÊ’ü Õ7%v”¾ï쥩IÎâd[_5rýêæJ›?*C9ÑRÙbvÏDù‚”Å5ýDFŸñò“Ùv‚ºƒhfqvžòâÓh¯½“ÆW%-þŒ@A äWOE˜v%«€ò.Ó˜ÌAA·×;0¿Œ¢F“èÅ–ÝÞÑò53?ñWbÅÛË÷'0±“yå¹r-A~Çå© +PQD¼›<’[´ÞZz‘"Ud†mhùq\´]Ì@ŸÕ¦Š ÷JÓ2%™å÷쿌ùÖˆœè—û¯³$¤t~|dqZ¤Ç]? ÊMŒ«ræ8ÙìÖ*¸åÓ¤‘ru·¯‰š•8:3È^Û½ŒC£r¾é9t[ Ö%­?8%è$6(T¿ÌÁêñc;»œ„íb$ÛZ-P[޹Á «Il0[Dƒ~­à:ÜöÝÙ*ˆaòsD•Ö‚š÷ˆÏÑŠpÚ}AWËsËÍBéLÄÊÈçÂ)pL¹0}Uš³z€ÝlsÊ%ZîDw¾ …F3rN¼6:ämô¹Љ,y '¨™HtP¡q*v›ÆVQ€¸Ù|æ‡üÓú]Ä/Zˆv¤e?kÑäô^\yceTþ}wðq~[Éïv= ¤GÙÃF8ކK£8²öºp ÇýKW&ù¤Ô_ÚR‡)ZžÁ6Éþê¢ÉÄbŒ5ÉN±‚Ídt²©±ù~)ëw"–yí4‹ð mºÛfy<âÍÛ‡Å(ÍÑÖî¢eô¨ç!:·*¥fœg®{òˆtŸUÛ«^RãÕ!{t %çÒ»ž&.­N¥ýwÅ6/ÈXlÏÅ£ó\–3ûG¨¶‰hÆ:¥xLýB¬7k‹•ãBÅAŸÒecç©Õ°/`ÂÈ•ÃÃA̘æ}vSï%6F‘iÈî}Ðj ùâTü÷˜Q ¨¥\.ŸYÒÈ,ttÑ´ÁÉܵa/ù¦mÅ9K©58TÄ-oŒ¢?ì$dq ù^L@¨mT(õ‹ <ʓՈ¥ècº¬ƒÎ".Ö¶4! iÛ.uRÕG…k Ùž zQ-YrÀcvxɾ£ 3( #rKtÕÈÆ« «¦Ú(½²Õ¸‰Š’,ŽûhPõRÕã‘„®õbt¾°Bײ®'ÝŒ|ݪÆî•/†¹ÿß‚öñÛ8±Õý»w×›EóUrÒ¥~8²á¢Xü7*gjKïPÖäˆ!’¤x{-×Ôïü± §wf®E#Ã^º¹óø+xèZ/ì»ÝÊî³oŒ´6{á:ãŒnG¬ùØîÝÃÞ×tA;ѹgLA\̘¾¼Å€ósüî†É‘ƒÍŒ*œ¤ZÑn†+ÆØŽƒM7âøÛ±ñ'C”苵DÅÇo/®/ÈjÞBÊõ|·—Çù#ãRÝÉH;µb¼“ðcÚXÚ°¯#©²Ù½‘ÑZÿ£M2¿’2a|E5Ä7Ò°Éß$ð=f¼AÒª3,ôÐ 0?ꩃÛta«1Š–ž™dJ%<1R ‹æÝô9‘†þ,8¼’Ð\œ„òåNGUÑ<¶Kœeú…»„Òe/òìoÁ¶ƒ*®1µlêwËUmœ»VÙ_ú¸ä{â=CÃRž;•?‡lº¾ÝïE=}_B­} í|ŠáÖüX{#Ú¡ð<ñФ™€Ÿ€Lç²Ai1;ߥØâå;<Dº]ã×ÒØã'ö1¿]J™äÀÄëf:üá–,Í £®å;ÛG^wìEºšjva0‰1Gò¤m!qW2v®,劽ªøy©Ï¥b11ÈSÈÀ0œˆÿÅDyUÛndÞŠàдwðÁ#Ñî¢nR°JôDëØþýÖ·EQ\t]Áþµ(0ýDzhéR¬É0]®Ka?ÿ +c$oŽrøl¶-×Çüf@!gZIR‰v»B‘ÝJ O 4üOJÔv/âs0XÒq…tgzúRÞvàÇܯ;÷™&=C™qY×e8œiıEi{í•2u˜ÏËļ¶³aShB¿°¦å® "ž£àõ•í T WÜÔì2Ò…Öw¢Þá añ§lg÷RŬM>»îeæ«-­-cH·™lH³çØ û×û üQø#á°èí±q‰¤ªé= K[d^Œ±ÐL–Í}gÆcÿÊ©¹cSºp\pV›‹PFÚY¬«`xñ˜˜µ}å]s?˜â­²j¯ý»'óÿgü=s“2žƒÍê©%5“™$ævØñϵIȵ.÷OߺñÝ+”q}SÏBu|ÃIž12¢‚IA³XJàà]üù {Ö×i1°žúŒ‚ ñ™´½‘í°Ó„ö)8†é07纣¼UAŒŸ½ð´ l9w?Gù[åù»ïY61?¦ºrà$EÍ2(ö\òj´j a•#uŽªGZrît—,EÔ¦|_s Æøý_òº-/.X‰Lç¸e5Ùoºãø…Uªe-ñ”îj‹®e¹Y*æ‹Þ×n(\„ÕA¿‹ Ú5iî®Röé.5;y¨2Ö8˜á¦â®ç§+¡=AþbŸ÷òÞ鉿ÌÇ$føÛcäçÍ㋱÷›3U;Žâñ<ðÛëG ’·NLjDçÜ%»ƒrÎiNe²Ž‚DE-˜ã©Nv*d)°Š! ¦Yü–á}®n8~ËòH(I‰ vbˆMBœ©Ñ,ÅPÓ¨!c¸± ¦“u^ÎoØÒ1†Tb—Uf;(hÓÛ·Bš[–õWmm•\ê¾ Æ! ²ËTS°G Àë€Z_ð[°Øü¸Æ6žè«Í(5ÉBæ}ú¢ VâxŠ›ÅäØ0Žï±ÙÛfr_+0c"ý]fŠýMŸ#Ù‚ª+U)L/)÷ËÅ»%Ï~RÐX–”àä8ÊHöÏ›Y-†A +ÿXÌç{"ôMÓ9))L×[Päµ*³$ÔV®+àT$AƒLS•aÚ9ok¿›³(³)ñ÷Ó®± Àô.ߤe7ür0çèõ…”ÓŽlÁiôK_J“§Ç)mÇw-ç¸tÒ\ MòmãQ؈{\%X—6ò äÁ›M[–fLK¤‡co²["µªýÄSÇïRH”P/È[¢VóÄe®iqÒ:wÍ(A~•c(Ú›Š’‹ŸÜößOÑJˆ2A*a~`Î!ÿ›¯WK¯Œ=ä¶a;›¨“-rÚ•qHG>>_^iÑŒˆÃlÜïš’‘kÏV¸, ¬yņaÌt9Y²7„Þe= ‡‰wK¶ :ÒTfÆ–jÆkç{Q†? BHÏLÁCšyÈBo. –+TYÏ+Ý |  n4/¸uÑ ¯.Uºž˜Æ’u%_ ÐFœÒ­åÜ´:µ-Τ‰Èf-'fp_+µP”hþ£¨[Î2¦Ír \àáTÔeÏ“á 4ZÄXÈœòçSÚO²Cò@U3!cÆ?æ&W¥*<ûl‰ìò¦hÜz~ºØáb^C9¢²9 h£­–´b6I3ôn2hÒKíã7i<õwZ~ê0îÀ(`B)bàÈ%ã[·À\ZWµ÷`qšOtBu2Íñ¿.C)ƪà ŀä"ÓÓrÀä‰C[èÇ¡é÷¢kú¡Õ¿¡Þ‰a>ƒïÛæ—uõ†ŠiekL-_–6Ú«ä¶-œ÷TôÌbÙÀ{?ÆН1–¼FwÚÂ^œbáð_bR†Þe2× ö¹SÈ& ^Õe{9¾t¦÷6ØÊ*n­…¡~ ®ùø©”À¯öðV*š šÇŽa$)ÌŸ¬zW’çìË™È%.£²€O Œ«c…Lî¸À´RéÔ®çüÝüàÈD,ÉýWTP]p!´]‚4=–T¢”ß;»í!ÒœJÕÌd{xf‡Ã¡²Ïæx±T±bk•F=IG‰±VFÞ“§0jw´WäÅd‰i±Ò%¬#÷V ­{î^›C¾—Q\ɯ‰¿\b»Ýö´9šÜ4¦âXxbâ¬'p±Õç³\ ?ÀNÐ4¶NðÞ©ÊèûìàÎknIî ûs;YSÂÙ©}¨m”<^æÝr¤ù‘$¶ª3HA»Ï“¸bñéE„OÒ›| œ¾îÌt§üþˆa¤|åK?æ&ËÓæâ@¬8<;¡úæH¨¡ N“ZÜÛ¯þìäêŸwa¸|jo2tÀüõ ^£® Tnn—ó5ÚvëfMiZg€xš@5ÂÈ_Ía@.ÛäÄýl÷$Š3óÉ–.ïBÔC¡Rºüû[sY1“×°­àžp_—ù»ÙééiŒ( M]ä³ V›NEoÏ,ÔE("#[ ä ;˜æ»M‘ƽþµÓëVÈÿ\Ò7'J}„QÛa„6Ími:¨g˜ÙuQà³áT•›Õâë-YäÂFb`ÓêÍ(4AÔyþ/2#ÚÒΛ„ayºÊYy†áôYû/êC_¥fÖCÆgTN`PÁ 94#:-jd,n)9·JÂ0$Gä*×B>rù5#‹M>°v2£•µ>™i ØGJlM#òÏî§f¦ '­B¹è5°´0P—_TbÖ¹S¼RØM¤âsÀÅévëÐø›`}üŒòc轪¤÷(]6x›v¥²r.ôAAÏnMõ¾¸<;'WË䑱þëãÚZ­4ÙE{!@ˆ¹BÇèœjô· LS¯Ý&‚úOq¨©éqÈé}_õ•Å oO·Q/(óΗ÷Q3Íã3 ³>+ÒžÐc€ü€Ëóɰ“$„Ãeöùj‘ÚÅUok&Êö©a2¢ª¦@æ¿+I¿Þ´ƒFŸÊsG}ðe»ç¡aëå)«“ÆŒ*Â>¢Çª \_‘Œ«Ã¹@ç¥Rö’$O)“E“tC=^ñdÁåC%êÛrô;âZúhÿDM1ýC.®~jÛÕ’Oj“!|%Q2®&Žç¸OÊ¿g¦šà{T‹Ì ý• Ô´µø×èš"a6ªQJŸt‚.ÓÇCZ »&´j¸èc˜ç@8PÙè‹á´fê+JOTÐJêÇüƒ‰l´Ó.Qüq±àûC,¹850CFÞ‹@ŒŒ|3„ºè`?+}ÝHÜßš¦ÞBtTMXFuËP]ØUŸù#oôIÿ{ﮚ$Ý‚{Ã2ïP(¦EEµ}.– 6331(QÀ­æÕW0.tÔÄ—Ô_ËsŠº©*]eZéîJ˜wùŠ“ãHºû°QölW-<JÀ/ËÄ{ÄQÿ†÷Àq±&´ÿ“d_7Ì"‚ ¨#B1SÁ¡^¯Ÿ,«‹É€ò2ûÅö›úæ¾d6 ß‘$º"í Óèuˆ×a_‡øÂIæ«·ÙÁléúUÿîOheúÀ7CǵӇ禉:†.ÝNíûov>×àР¥Ë`üq[÷Íž¡ ¶Ýž1Gͳ<¿ÒF¿†(ƒÎ‡æÆWÁW7PÉÖü°/ß\á-¥Ûó›j,<ªxùµS½žÏcTù¼’¬3mèä'Dåýær ]+–>©;ân;Ãë\Øò3ˆL`Š>qD¸Ëç¯YØŽÞ~*©ìÝ ´ZŸ0Q­âž‘ÆÞ®Á`¯’×H^ðÈEQÃ`·~@£2·'<‚Žö™4ÿJ] -â‘fæ¨â9˜Ç¯[ä68ćQåº,˜ªêâ W½KH¶6!̽7xQ«7"5pd{›¸ƒV~EÓÞ³Øå?Hl—°e: G‘m†â«Ü5’Öô%èŒc¡?”IR0·õ4‡Ëë´—ÅGbp턯ñ¯ê?~]*ɺª*Œê$º ÝöÝ*ân[•’)H'-K¤Ü§«I%xºèÿWX$ÄùK¤!H‰g·•œcÈR†w uý¨…»ò¡9ŸŒ+.þÎ߯×:¨ óì È´*=§”¢îù$¾›¨YÔOôåÿFt Å}¿+™µ[ç“`­·[*‰E¼ž"H‰Æmap ¯'t‡|Üž3(råB€(ƒ)Á×ÈœóOÇÀô­ôµ!ÅèPÈÅ  §As,´­Ÿ¹š’·­KÍáüèŠî×Cð2¾sé4ÇÃw–PËU3eKßÇÑûçší“õZz¾Ã’¦ÐÐà±ØžŸé›óRÏ¿IäÞwg¬Ê–Ö±…¯mj¸^ÄS]Ū†QURæ§ô{#Oõ¢–êE-Ú\æŽÛ(_ãä…cræã‚’ƒÂ¸“Õ2²ŸdØO·NòòégtqîY’j›­†»0‡ÊHª\SÖ®WqÓ&lóÄ‘òPé¯H‡wH=9rÿ­ £Œø~BÓ)à-ñ§Ú¨„Ô¦dðüOï\<²`©YÔ­Kð^ʦà&”„2‚a0`@Äkûê”gàðG8o'C|‚ðä»àÅÛaÑ)0‰y¶ê¨-¨¢6-¢k—F8˜±u£Nê0SÙÏj´²Á;‰lô-0s8g“”û¯VEßÝŽ=gÅmt*J¶Ó—à²K'ÊËâ ſŸ?ë©;“®ç8ù¾Ã›ÑÌbNóûÒ2V!ɟ㲸unÛL,æÝ¶‘›eÖ“gu;8g†1ªÄ­Ð:ÿ$ûi•ùá¸J°Üq]óY»ËV¸yBûP×ãK[Ÿ±©“0E%M Ëö„A…~„"'ùôþ<} r½a¸4~Þ7j ɬ¯œÆ(ÖNg^ µ.wó~Ür„)È>í⓽Û4g³ò—Y >á“ÑœL´§|ióà™ÿß2 }Ÿƒ·£WŒ0ŒŽžõ¦Ñ×öÍí<ø[}®f³<ì1¦tP!M«)Ò3%-% £ØsƲ¦§Åo=[’çÂÚòfÙðêzã^e!º(ÿbÓósÈZZ¿4?LnBÈÞû8&x6¨·ƒ‘ùûsQº‚b Ãø®³€?szrò?F“¨üÕX!8³ðyõ?9±Í‘«ýð¬1Ê]ŽÅ5 d¤ÆˆõgÓÿdP&=Dcð“LŒWð½0‰å+K¤›"9åòÝWFG=ønk1m*¢‰8¸­«-FE”Q‰¹ 퀱^. á² †NC¼¸ú[°Êûv  `­õ~§e;'á´—„ôF@%<™pÍ”²÷Õ‡ÈçšY=—h ˜E‰S.øD@2~ZBúý;¥þ»Ä.[/´£R¢pnª\>Áý#nŒôÞµ¿ûi]᜙Žt¿ñéïÈÉ ó£8¾?ÝRi€Cz-›õLîïÓ§QòªÖþ«Ý÷E³êåëÉÞ«¿ PÒ4+a ˆ×˜æ€É k„’’¶›ñ6#ÂŽïo­OqèZ(‰‘ê7Â’l`84´xW¨ Ì\„÷I_@ÈÐ6/Ÿ‘°Þ71kÌãõh hjj@Dqìø¬Ê¶;¾^î‘?-!z^R—Îú~Ñçͨ2Ënu$ެh‚ë îŠá”,FFõj¡M= m@ºµ¯%4ž½Hð€±/@L7,*»™ž]º¯[ÚàXxæè›$2ª¤fsƒ_^¸ä•…y4%¾Ëè÷cÿ ¶MiÝÈc§ÍCíg§]'·Ùòi˜ŠñI›öÃ*µàÌB$x7íng¥â@­´•k…]€^ ^4iaðþ´½ÿ…rëœ?i¤8F<0ß1ÔçX2¡8¥a rú@4¨Ö“d…Ù8üLd”‘l©£¥ëšn:“¾{8”Ê–W7#®¸x@túVÏtÜb5®•:¨J2¤©YD7¹ÚG(vî·Ã|¢CÀý‰Ýå½[ë»ø‘Ã&&ÚUï»ÀYÛÇ, ÿj©l<„)ìªØRå_qå©÷V{ó»µDÚZ $búŠì~ÄŸ<ó¢Ò8ÇIˆX~þf.Å(Î^­zÁe¬/諦Öã°” d§?9AõÞú¼µÕ¡ñ²4¤bV“qZ[¾wóÄ$ Ž+ßüî,ùçL¶ãϪA#ð­Ä´R÷šž.E#ÌáÎQnîg¦€ð úZ ¡v¤A˜*똿=ï]LŠ+kž`7e<»”c 6ŠK’Jw;“ÊØéÖo{bè½ß;ñÕ}*2)c̆ÑÌ͆#±ÈÐz˜½Ï0«ÊŽ¸Ê·kZý•æ9ú«ÒóñäØLh®ýµ’4•ø$Œ8þ˜=+Ø©òÆßYs`qÀ’‡)ì’‚¿ÈÈfãÕã΄Hx“=í ¢aí­?Ï´õ`ÞYŒÇ:z;¿l ÷Êä~S,üùBä }2ÑÖüwUaæï' ÑaŠ0$I"ëiú;‹d¹ œÚqEj—ôì3†UªÒÚíÆÄB!FS¯ˆ~¥U·.Ñ?î Ó3Ckùz—•â·"jüQÔ‰’˜E‡…‘[*#¥n…~í?gj¯WϺ°²1Wïr²;þäè#ãè¢qu“” ’´\'8=ú¤9U‚¿ñRá…ï ŸÖ ¤¯ª) ì¬xà½[WEeõjâ'@{cõeá¤NÙa^K F)ªÌZmô—®|¶ñp/É—ècé$ª¸ñ©‚¨=Èâçì¶—¾¿yðÂmWrô·Úõö–z¡žH܈~ˆwýŒ‘¯â¢ŽÑ{³y¥âD–êÚÿöø¨7OAªžÛ$"Iˆ{z¤~E{$_†Ž‡ltBÂ\Â(-hé;ÉýgŠZjµëN|›[Mñf¾lÈ)éó³ò2]Ù{…FÃG” Áà0Åìðõa?ê’Ž! eþ}‘®‡m~4QÕ.ä¯Jd`Ò%(`Nh¨ùÐTíÇå"ËTÈûñ¼Îl¤ð<œ®:ÙcÿmB9½‰c §ØÐtASè­ ÁáT ŠOÒõPpm9—o©9ÖûÿBðqÇ—]©“¹—Ôê–å ,Ínî.Ï|É.¤ª;1¶0{²Ëuò-‚ðƒ~—²Æˆ½-˪™- ¥ýKú$V÷ü'ýËzëPJç „jTEúŽ#`kz#}À¢{ T¸«†‘«&ÿ¸\Ijãõ… *v‰§[‚Æäé«ñÓ-Ñ­’ƒTB"^ÔI«t¨A¯r|=÷Ï´*ØvÈ9î .©§ÔŠÉ'N½o…ÙÐýM#-ˆÞJû‰Ð3 ^ñ`Û…–‚½ù9ÌïcÉjyãM5¾¼7‹—‰ Ð/´æ›\UÎ;Õ%Ÿô0%>±!~q]~ ¬ÿ±ÈÄíÕ˯¯Gž÷wÙØRgˆ@×lœŸ<äK&õ6 VkÁ‚‰ãúiÆwküî˜ìcQ Õ˦¶¹‰åÎ4°á/äWÁqÊ uQL8Œ…¤fžÿFlÖ~@l`$÷|;²}±Zïñõ¶ì]W8'+ ‡X]ÅE£° <¨ùHGÅÉq†oˆ—þ(ž`ÑŸ0ÏóÊ.;†›ßwóS •- ”o;`+´è’!½7ð“ãà¼4õ«ìb/H]îuV¹»rc E"!{Û °W":àŽ¾Åüš/:&\=àpŽ UŠËyF4ü§qráÔÀ,çg4RÚksnS…Ò[J¼íøîy­¡8Ÿðme‹•MÒ†öU‹À#B_Ñ'ê5ÙÞµá§o\@u9ÄÀ1Á01 A‹°ŸªR iñ—æûwÍù›kŽ"íQ›†rxà/"çHDì= «PíJ³ôÑÇLJÛ?ÄÙWMMËg$ûä4Žœ£8“AŠ¤Þ¬Ü™‘TöM§¶5ßFàæØù÷̙霼UóÜÈuRÂëØÏäÏAš4‹²ì ŸUFò‰Ó`݇A?&3Fì‡/LÊKVÛQFKÚ‹õ? AÛy‚"»ý@ó “Jr½Nµ·ßH|™ge¬¦íÄþ¡+Ü—Ÿ"ÐS¬¯xÞø0‰@E‹mXoz{¨Iñ@¨(œ+-á5w‹÷Ô!Døõ±Æ‚ “L ~ŒÊ/rc!Q=”/: ŠèPž«v¬›Áu `¦íÄÊ­]l–Gï>ð‡¹RLF^´ßuƒùÚ•M¨X[ÇŒücð6Æ¡gÏÌ—†±Ÿ´ _C' Y©]N†ÃºZÒDM¨ÌøUŸF}9kqŒš;[v òfR´ÌYçs³øYc¯m&Y¼¿åI1:ŒÃd$M©¾cÇ †és%y-ýMn×p¤I²à¡H]-ÛöÍÒg7©ÖÈÓÈ~­—ý§cHY®Õ2R TgÇ•OŒ«}î²¼é»ìÖ}]zÃ2ŸòPûØ ¶©S,êúì1 æ…Íá=MÍ'eˆñtMºqKs?ÖVáè´;ØýüÇo¹rL”ñcëñШsÄTøäPÛ_ Æö‚šÜà÷n¯ªòJ3ŒK½¡¥‚fU2¼²Fm™°[î:Áïo:e‡Ï×2 Ôf<àn«»¿#ÜÂ’u:ñ`Óˆdæ$/SZ튡§Hûú§à*?`||Vý‘8˜ ¶Ýæ”ã‘a BBÃfàûÞóa1²\žtxÝÎ9И4ì/AÁþ%?©Ú‰ápP%ú$ØÃ;þÌð±÷ù /Ù·^BXø*ÓœòtE“ú÷zq£„À&®#¯•‰øäƧF¹wë.Ê@ŸlÝ£|;Br­ï9e« „64ªW²ÆÐœøà£ ²0*áÛ»G`Ï_AÜY wયubat?øNvOÞ¸8Á&ܸ%3¯ï¨ÝÙ~Þ5 ®A¾i¼èÓò[“kÜÓmz¼m0·ˆh÷ªj¦Dç^†¼q™ž:˜tÉ©Ë7–ªŒ+³ ïJ¹€ Ù¿Ý‹ÅKûÙÌ['Ã{•åñûK²m%qí"»W*ö¬>#æžËL#ÅEÕG¨¦vÿS©à%±ìõy3t´­TÇÉJݨðÿ©µð˜»£0«{¬Š·J€§‚êìÏl‡pM5É&ÒÂ0,7Þúáá´ETHÙ5b§ÔÝŽkÐëilp¼ŽÂ»E¢>ÏÂé'j^Ë>P–SPùV×éYAŠl{·6íê-©²óû2ßÀc*6k%}»Žtú¬Iq¿–é%.ü†—Ié€VÐ,¬é¦l‡­Ðа/.¹.¡‚ÃÞð²Û‚E®ÀU{Ù8¿»K©·Òž€¡›²p’סž W§‡–F…†“œÍœ<’§{ ïÉöE7e¾"ÚT¿qÚ»P®L¹x3y;Kjc;Éè÷º”¼¦$·µ»!1%°ˆ°kŸ{vO´ÿD@Vo&¶¿)t5$j~`4ÌJÕØŸvºuŒÖÐCÜR÷$™– 9ôí|û#¤¢ Úô,gù:ÝhN{úª8ó+w”"$ýLÌŠªö×¶zÏ[D¢u˜½¡¬Õ P/`ö¢a^_´ó€¬¸x“ˆY?[à4Ó˜\¢ÈR0ÀèÈ4ÅHÃ$+D^hìf@Uòjºz×|yUÁ€§Þ¦Ò­™5ÿ††uÊÁO€=WdƒÏ´NeêH!:=š ’;žÚ2ö—Ø£ã#YÅ`¼+ê/éJöc'Žç+”ÆÕŒ8ð *F›æ!2½{Àmé%J§EÀ—cÞ¡Ð%õ£4p!6.kÕÞ_ÿæ0…i¸¬ð¦Õ¾Âöȹ 6–´¬aîW.bH’ÁQ2÷×Kk´n vÄ!ë„"€Ã[Á`=C‹ #Æ:­‹¯_:áÚ°×±[k=Mdå+/[Íi†^¶ÝÝûŽ$ÑAWöqO®ÝìYzBê>šÖ jàÝ6Ìl¶eÐŽ²èʘ—ÃzE–¦ëyµøfJo%™šýzhMÑ%Wrü™úÚíÓhJ‡ôîÝ»jdzouIóÍ‘ ñURÀvö}°exjí˜×­†!ÊÅ¢D]Ö$å•ÓŒ»H1F²À„ sïˆ{©( Oüõ,n.)7W¹Rƒ…B*‰`};÷‘)ÔunÍàh®P5•ĹÊÛ@%aÆVW[¦úÞ»hT ¯XpŽnà–¾,{øŽ€©Ã BûºÄù(U^U¡¸o¿Š^O*ÒŒ}òìo>ù3—žŽE·Ù¬änÓó\⚈º(Cн0‚Xê‰U·®NDÒ‡æ[|C[ß/²™KIêjäCÿÒÉT£÷(h`d‰¤ œ ¬bmÞv©(€Ÿ³ñ§Ç×WO:s-’©•ÒÿFá”ÜÔì©Ãµ›~8ÕOjè!§[Ò|½Mm¹š*Áí„ <@‚÷~ÈC1‰Éåâ³¹qFÇÐW€‘ ¾š5›ñŒ’`Î/÷†=¼E {ÈòYxû¥U×rIÛî-Ÿh¯R´xÑî§Á…"çdPOý û¡Õ‰Vç çà.ÞVeœ-+}”&".³OwËdÀgÍŽ%¾Á(ªl9)ÒóÂí#z6ˆŽ/JÛªQœyáÙ!¥?øW$sr;wÅ/ÀŸÒöaö¤ø d—ê41»–/1‰/‘}^±aÚr·{¬y±º/öÆ®ÃVYÚÑ”Þùä8 ÖÅ  %“Q¨…ÿÞ.öZºË¡.ù—Éü‹ ýZSæÛ›‚Oî ¦kÌÍS® ×ʤ†’5»ía™Ìëq¯ø˜£€½À”8IŸUt‘/Ê€pò3èòöò’0Κ>ôLÔo‡V¬Ïÿ ½8Âì¯<â Šd†c[¥ëß®rnµáV‚qOãd±3·áyŒ…&ŒU‰Ð®#<>Ã<¯³´8ë)lŒŒ*ð¢`…îd9cbZ\ ÂDå—q;6Þ¶2w¸ÊbK¸;Î6 0ßqŽðýù|£ü_Š"||öÆÈ{Åb•8Ð1F ²÷; ¾À†# Œ[Óí*Õ3òït§Ðöy-3o0 §°Í£ÅÃÛüc^ÏeidY#Úƒ=ço'jíF?›ë3ƒ˜°jøÁ Nå-%€ÉTþŸQ,âã_Vc 3àq¯bò¤Å£^à!Ù—›i?´ªÃâ–•@<ŸšcuǬÛwÀ‡û¹rÁÁóð`‰éì*Yž`PB;0ñ Á‘?êØ€.†NÒzitúŸ4Ð|l  ÛCÈðb»¬´~Ü'X%™¨ÿòPþƘ¨Li Óq¢ ÈHn3÷ÛxuÜ/Ô# ¡þ2]^Ýàõ7ò ZÇdCáÂ6´Ó¨’¬wˆúUw$Oûý#…Œ»:ìckȹz \÷Ò3ŸÆø±¼é ùB&Î'r&îúûØ3²NEè ÉÔÐ÷Y‰²(Erq}z­—%°gLç߉º½Í}ðW9•¿«Ÿ¿(.Ù9¿Ñ'õüídö~TAð`èX?×Ë+I»;U+‰j ur.F…¨Ë!ž§ „tÛ²+«m3ïTÙ_£ÌE»ü†×Úêd‘£Ak¨f7ùÆtg¸ <M ?ÜöÍÔR9n°e-¦•28I\%\,ÑôkŒÀ™»~t?Ü€mÊ “L@ÜËm!M¬2ÉÂw+_÷HµÎsÀ×ÎemFü„ ¿ä“–wøé$=yÉ£9ê à(B÷5¹Lý§ <!éj³ñEyn±&‹tW†¹ŸøÍl:MÇý/]¿I p²¡)6­íºBžütè|é€üß1¼£°¢LCÙ^WTH‹“ÍÈí(É[Žîÿ )½Ñ… ;Ù$˜ŒÞ,‚ó ÇôB·š€Ò¤ü8Õè…Žµyé:ÙEãñ_–>¿l  œ¡E«µGŸnÂDZo€ÁKkcå2˜òt? _Q}_ÓíŠk‚¦¤µF*–ˆ›&TqéùQ:…³1õéF·Ñ‡uܺ¦¶Ÿ9 Œ†£Ÿ°,Ës¶œÜœñu™z î㪩PMå­á¹!ˆ¦®Õô”ýÁR즵I\–ÃöD »¨=JªÃÆΩÂï9ίƒ{Ô?k¥*Çz›j0‡¿»–rE%ߨ*õÀèÛŒòP¾í'½+¦Ç+± ÂæžùÃ/ݯñ ¶ÙÎOL¹ nEW¤Ù‹”|•"M~!"°ò³t«xF§ñ¢9 i£ E¤•±nÂv„ÇTX89×\qÜ¡ëÃg¿÷<¼ÛÌ!«2Ã;Og®î·Â§Â 0ùS£`õØjiƒ˜¢émñT‹Ó©œ*v"¦Z®Û2atXp¾tˆ4›ˆÉÈqJ9{4ÖxÃx¿hPŠƒ;‰uò6è¾µ p.ˆG:PÞ)õ(¤ (*_‡ù1g¾x5_™‹5†ªp¥8Ú¬6½UD{ &'ïAeq>Ø9›÷ Û´]Ouùó þ@Iƒ\›€éêdKå"N°½9Ùj *ÞfµÍsyÛ/{·Ê­âñ}S²*†›wkuAÉìÍ›¹+ùÒ§ú£qþÁê Þ‹:ž¸ªžìø˜Ó›rùÕêjB¥tÌPÃ?Y¯—lÍ…@Œ¶yý×beÔé¥ ­“âðR˜¦cJ2H´Å&æ’A±Ihoì»?;w&â]ÂPi ÂÎá­ÖIµR¨œÈ)7 r J¤b‚êL–Þ.?7˜åú(>)Þævb+j9ˆúØ…Ë0úÒEâ4̨TyvÁN«]k³ŒáÖŸšˆƒx% 8èwáÌìá/¹‰ orœö†…À=–¡J%Ie©‹kéPÇ' U˜°ŽmKâBö $}ýzÂD‹J’µSÿ yÇvoJ$âãéŽÍàsP³Î®Y vkÙ½>êÔï‡9ùØ1BÂŒ!m8ö€)Ìì!s+¿#Li[5¼6®Y0)±Ð÷HIÉîëqsäZRcBÆ7ÛÓÒè$äf>;°ºÏºÅ8M1WÌÊVÿu†'rh¶ñ%„–ó˜¢tcOñæN³¯L˜ç÷Ÿ¨mÛ3°Ãä+Ì[Úît‹×€*E¸rðuãÒ2ÓÁKw12¡q¸”ÿ3²ÔY£DÁñIsÀå¬30J½>^à¨ÀúÒ«¬´Wr··ŽÑ}Žl9 RDz–ý¢c¸Q×4Rì(ue<ãÔ`rmúKNËy/˜R)ÜAâXvXrîaLL@¯JîKËVQùr¹: T¦Ë)¶þŽÒjõgn ÿùRâ–U’Í w¬©Õ:J´„ÆãM¢À¹V+ñŸÑ—wTÝÓ…J% Rs%ëOLÏK´e[!Ç©-tuwÈ  WA¾(ÏK‹´/š2´›?"—•!üì~íߣ~ Ñ뵊Ӣ/Bé`Åiyš†s87r5Æ1A°uDÂÔ·²Ã7¦ÅCÙ[pÄŠ›êÍM¾^˜=ùsPÄýô(ëÝ‚ÍvY‹ß²®lGUR.è.üYÝ P› ¤D+g}*ÄîúÒ19ÔðÚí/  £óû™K …f;ÐÒK}’‘"n ýx6Öû„T²z –e]ÙŒÃVg7®í¬º{uz#²»R«çlÿª€Þûta—‡"ÒaÇâùö6!Ò®m|¶³ÙåYüæ¥Ý%©—‹‰¤ÑCÉù(„ÀÝÝx4`sÊ \²†ßSBeiJþ%²­8¿¢]ôrEŸÎ¹Ž´Å#¦¤<ÁpkÔå/T€¿É7ÍåSÔÔúÿÝ §ðJNï±ÌËàèqãû;©­Fð¢”Hƒ ³`£Ý¦ðS(r­Ñ±ÖX.›qpç²-þФ‹b?ã2¦®»Ùï”M¾Ç®+] 0Ä(ˆ/ãé^愘:ó i'–«'Iô)çX]Bâ ÁRyäž…AJ«‘¢“b:çpªò›Ûëò+=®©&P×MßP½Ho¬©°N7Vˆ¬|$àA£L·4Sí¤+®dWGVµ³‚»Fmv"Z«ù±{ð [º‘*x†z7ùˆVˆþRÍ*`ç!H-|º‰ SeKZì"DKú³y®-94~bô›5L<¢ÅDBÚzA¨“ÚÂ) ; ·N-šý$ï‚80iA Cë¨àB# ~oY) Mö! DýJ…ðIiÑŸ ÉNzãCÄy‚hc§j޼ UºÅÿZ·B £yf³:Lb =Ђc Åœ(ëŸɇZË/jŠö†Àí†ûZã€Öü²ž]µ•D‘¼´:Þ ÷´WV9&!hžº²ºí×Òg_4•¯&ÃRØÄkó#èÔ“N~N/€úâIU¦{M¨‚¡aäfZ9pb›^‹8XçASÆ”ƒÒPLñÕïÍçuàY² 8á·iÊR:"K»Šƒ¸Ç8hW xr‚¿…áLŸ/ý4„Ъïoï°Ã·(]…Dž‡ßÁ4#Ž…üÏÔÒþ=’åz¡ù-­yLó3ñEîh†ä;ˆÙßš‹Þá©•‰YRþ ’È_‡uŒ`çøäÇã”jMëo"JÞ‘ŒÓðeŒï½Z E¦n‰Ô¾"7²kú@^1b„Sõ1ƒ7'8§©­·ÒÄý޹xá*÷ì´ %-ã}não61ß½¶qÀ¿Æ¦^›EwB<þãY N¸cÊ#ê²Å‰K„þÎ ú.­Š1âlK©{Íú©)ÿcÊó¿.ïãdÔip™¹Ÿ„*x@«ôàs9òÜÆõV"‘æQüSÀP㆒Oø¡1Áw³Ÿ2†â u-٦߽ٙLS*èY‹8–ò‚†¨ûÀ×QŒ—dÀw,œ}q»IsD+é»ñ‰›ÆˆÛ[BžK­R.Ë$=‚X ;â @é±…Pš½ÝCMÒžk½ÃÎòa Iǃ˜ðN´7·¹eVèbDŦÜÝ ×²$÷V3G‡ŸI…@ñdd¹4ƒÛØ™’êÇ:7£Ü›7.ág½ÔƒY*|Xšg‰: öé¿[a$j¥¶•ß¹˜\k=²\ß:Œ¿†_ò/AŒÞ0îö^Ž&ó.N/¼¤#è‰LÑ„˜cûí}JdÀc‚z~h¶D&³ý„¬S˜´À—Ï­ºÛ塵_ËðC™CÝ_QlP^—ã!à¿é¹Þh© ©·#UE}Ið‚êkÖ•éæ·ý¨{›™î©žÍMÍþšO@.mÖ¡#’P^&‡)•/C¡ã/ H·bÐ<º,‰}JK„;î猳¦Õ¢¦€@¹¯êE‡å;YΗµ ¾NýÏfÊ'ʸI«R¦U~tú‘"â_©E§{ÐÓ}†«Hœù0Uš59ïì–H'ù~|Ž3cÌFF€3})«² ö}FVøþ}Ô©¢Èdšh™Œ*rƒŠŸ z«û I43ê ÒÙŸ)ir#[LZ œ~µ‡°©Š8ïþîHM¡ô\“°‡ùÏžòä$FaÒKìEsÒ ¨ç­Íºuâ¡0¯:Þ{§Kh&øi ëù‘|ri¨.—,.`×ö6™w]¸3FÚ†£cí”~‡¿PßS…j’¢VÂaãÆÌ•( ÃàS9©ñÆîÂYpØ‹%#Ù†ÁÜå…w#§§«Ý¯9öð‚6E|0hY¶Ýþ·€€¬cógí±©Š)I¨DôK ßs²Òâ ‚iŠ'ê—‹ê¹Á°“Ra‡‹:´L—¹`¯8¶µÐ+\°7j˜)UذYðŠEa7{ú_H©aÁ€™e‘ù ž¤ÇtKª’ó^gyiñtj¥¹¢rq1B`÷äK<¢ÜbÙTÍŒV‚Ò\²«Zþu5îÎvåj%ú£‹f͉#¿@™ì0è…fýÞ¾d'ïÆ$7QÖíüGºvÊÙÄ\§ÇŸp+úÞÒ)2Ï}Öm˪C,åû⎪_z/mm›‡Ÿç(MmÿTs8ûÔïÅŠ“{®n_/$W…1f…7ÖªŒÙ ÀV™Ò7™FxCgƒƒÜtH/Bì.‚–å˜t¤ÒÒºy]8Å×.ýRÇÙ=ZW¦Ìp-(Ñݵ'©ýÝ&ôÉ6dâÙÏÉÌY¶ßspÀŒXƼ-pØ\œøYÅQÂøOº 0qzÆü4¹m_}­9Ê–u¡;ù?ÞktuWâŸ=C’dížï ±Nj­¶ù–%¤á½ô¨§5º¡mq#B-).ˆÑ(†Öÿ–™³<ßí¬[RÖEfö±ÃèÖâ"?Õ¨ÆÆà˜Ú ×ïD“|ù†^¨÷ÔZ‰êÛ´LCÿ‹üÙ_vjœŒþóÙ::¯·ì’Ü·J(½¦ÈIìõ¯ýèÀ:Ù Ö_Í”uÙ'. ¡‚Z¬l•çx†«A^ Cê=[¨C.‡ûËÞ£^üî8ãsîk“I|ä¥%(—³#£­TJÅ]¼Uò6Èî˜ÉÕeºhîÞ/ a†Jéq†Èì=› 9ú“LWmޏ•ä›÷N¡]ô\FxÂÆ%šã)*drËXbÅЇsLLŽ­ê Gžµù0w0Õj¤íw¶8¡‘vÕ8‘8Ù„„*ñ_ \ÔůÔE¶êFøW‰¢ƒ†áGÉÌÌO Ž8à€{ã,¡åû.1ny]uKÜÂÌ•ì\þ=ŸŒq Œ/¡ï''|üeóc'ÈóðÎÔ]Í ‹ƒhE_³ÔF36]²\³ïŒ'€œ—^Ý=U#Vª^¼êÚq²b÷BÆ ø)¯aZl }£»úÏLý³i¢ÔŽ{‰7¯BCñlgá996³¶‹“B?oNo ÂÆ#¹ªžO/ ÷„K½ÙÔØ²LcÆúY­“¦r÷& A …úhåŽ4·ãЂæ?åqb ©¿cL¼ˆaõ¤Kµëæ©æP„kEß§£< ¢??ü u:ü žÃYc¡‰¼UGb„´À@+XA¿·¬"ËÁ½b éOÇ2 ¨yóÞP+àP†È“ÙSص/-—F†W‡(ªÇI>æK[‰¤‚\ø»',m–b]Ÿ˜ A§bͽm#U9ì¾øî°ÏtFl¿DüšÛ:—îúá•xî)@Ÿèk9F>‰1;ò³uµí”z%&øñƒ5¶+mHg€Pqm®ýŽÅãˆYíÏWLYS—Ò„áÁ:_02¦b*¡NèLêºÑKÿàÇQü>vØ—>3Mþ7g<]†w'©Â7ê(þtJ€›OF¥ÊBn¹~ëø´eŠ8vÓ‘ö ȼh•²éS'Ñv«,T¨Ý€ sZÃxï7x£¥…º®‡Ý ˆèC" œJ¬µñ<òwrÅ0úÎ’r÷"ž"*Å“aCCl'à÷;Í`|è7`?–|øsÔãÕpŽŠ+Øœa¼­µh¨Ñe;¤ ¬ÝH8ËpŒüýr9ÈØ}åÖ”.T<Ôöï˜Ý®—<±É{Ñ‚"ê/UцçH¾6¶l±á^cà˜ËKINM=–&Q¯Î†(YS¾ïYMÄ9.’O¼\¸ð±=g6´ðñùÚ4¦¤Œ%Ð…ÎÊνš:ÀÓ«KË@®§ý˜•_¤æáíæ!þf/Ýþêcç3` Ô¼qô"8ö_%‚‘ bé¤L³òy9(J98LÄ[dÙgèÁ¼ á®) =*»=éøÚ;@œ€Au¯yØü!)Êk›ìî¿”„½6ûR¼Ù®H—\A] oÑmt.´Þm<)_ Êm3hj¤ôôÜÝSé½Zìc8åüãv¿vw»4¸¾«_v% Q²°’š !uh};¥„EþšöšŒî+Á§¶å J¼ÈüÅ‹GÃÏ0»ôã2ijp§ÂêLë ΕO34C]P0µž›_ë2žR8òqYÓ¡wB üâ\Ÿ}ÍAã°1z¡ÇïZÕ–,uäè_ ád4ZiŸ;G¢0ÂpÞ0xtƒ^lâ†!ãÃÖ7–’Àûœ ìkâFµ¨¼câ¢äø«\2V ±s vì ºT€ÇÉÑ2võ²KgÇdfÂÖUëY~ÜõbËda'Û¼ºæ´´öpŠ;óö×<+X.Yôã°9å÷Ä¢º«¥ B+‰èN. ¹¾>„œª@òšC‰¸té„ód›Ü\ïäLv !š˜Â–RÈ<0ÊÉ—ÉÝ6ždÃäé#¼Mwub¦Ù8;»@ÒF !ºÑ ?èöhAva(B@Ó鳘œ‘á{''TYJ^e«WÀù‹;Ì£“}(_kkuKd peNà¯<…°ið@ûh0ÙqÔZÊK‹Óvf1Sî+}¼/TS0%2R<\¬l씸-fylÊ4oŽéqT“tÅYßUkw0/a;‡Î†›-Ü;´™=JCUˆ’u¥õáÕ3P>+µ=¡ÝÍ€Æij2j´Ø„YÑÏÔ5ÄS±¨çˆËߪnch€š”Ën³\{©”G’ƒiYÛñZG<a°Ž`;5ÎTÅ£¦¬›øñ`U%ZÉWé@Ibw—Ú¸Ž‰À ¯¡ûjÑyËù±hx…ú_2ƒaQ—Ú6žZì¾åœýê û Ý*}EÕÔ·N*ôPM O¨k.r ¦ÝºNÀ|ê Fögx'DKdšÖR:5´Øâïò½ÇçÏîñÃPË)ß“b_ï0J¯,}~5KK,Ô›¦ý2cʶëÜÌ5Ä 5á­ ™>øPHùÏ—¸ ŸŒ1†`g¶V´F%—ßI»D2j“à²BWvǽµÌÏ}]Âd“vx¯ ʈeæ5¹>ÿ…²9×úª§´Qò ÃÍÜa‘Yàò’²Å÷÷&lbì”8]ƒÄNåWþoûÅ|8þ^ècVð+,l°tpï#CN§tÔÝ£n%®Ê1mží*4˜a8 §W&“Bö>C3x*S¾Iâ fòúÈ çÐL6(d…]OS³N64Ï´/¹8s˜Ô­<ßò¥¡ ôùÉý¼ÁÄÜ,ZlÞ­sÀ‹ø'ÁÍÃôç³²&ªA“<ijDAî¼»2eìS.Ì™¿ý?;NQßuÑoºßy]Cw«8í×—ÅïpwÜ]Єš#T»=‹w–𔜬U0ˆnÙÏ«ZÄpn)ö/8Ÿçð”ÈN°ûV ¯ସYªÍS+Bé¹Ä ¦‘  (÷ùvk¾Ïtú¸ ›î¢Ø^âÛhŸ÷•Ð xB1{ âqvliÄLS4 ¯š.à³µÇ[Ô!ñüÙi¼@!(J…à>J½Žì,”&̦ÊÞyýæ%[‡u û´„L³3éïÌÝX–Ë+(®¶8WÄÄú¡>B´ba/uÓÄÏn§ôÿÁ¡–ÂÀÊÀrä<ñÄÃò-ýþ!uö¦ïr±MV!Î}–¤Æ¶|¶WY™ÍåžíLñX¶çM÷#ßEx§{"|D ®Œ^Õ‚1WBqspæní[ˆ¯ س13p«YëØÌèiïïý=ˆ6|Gæ,M”%²ðOUƒì/9ܧ_©³©ëu"8IQêèrHÆÚÄQq¡ƒ¹m? ­¢ÉO#˜êÓÞ˜±æÔ¼àu&ç2u!ÏWñ Å¤í µì[Ôäœòû±Jä[ˆöCM²q<]óí1ûrÚÕ¨~zX@wš¦YY7GÜÕébÍmÐÄ “]W]ˆÔu†g_êÏÓbÙ»f÷<·Ö´³<=^ÀTgÑ®6MF9„çðîªR»ž¾q<ì@==Ö[÷ý¿ðó·Ôuïf÷ nd¯$%z’¤v•@´3šÉýì¶Šˆýx§âî^õ±Ùá¶åW§¬«mú:ÕûŒ’ã§W[Èq̽=ZfPõ›àP¡hŒÛÁÏ·½Ñ`—3lvAHŠTbÏdÿB^bA\عøºX: X÷¹M–8ónJâõ´ðsj3mOß<ºyíÛÜ{-ÂYgñ †ãlÈ“ªYKŠŸ³¯Ñ“]KsäÁjGƒo`)XpoŠh§8¶½sºð¹¹Q[sÿ#»ÒéÌÍPÔ¢L¨‡Á£¾4P"¸ðYYPl².…TБö-R £È²AëõU#¤®ŠÅ¥Òã'›÷©ŠëçNë9«„ºÝ<«=SÚ•UfAFÎÛª¹á$…c¡ ¶ZªÔÅUúHd¹‰v9ŠVÕ ¾a„>'„‘ZRÌJ摟î&Pƒ$%×/€¿¾ U\5’j’;æ)…§@jÊèÚP”ÿÃX”Ð~ZèÕa‚"¦íCšc/˜GBÂBÚªq£L\Û(F.¸gÅŰ­½Ý ¥5-†p†/‹HXPÔŸ‹ÂµWÔâ¥Pùžèü¨‘vBV\mÿï~ jt|Œìuw?ïÁ[H¯é_Fâ¹¼ü^vYx¾ùsoïFÁ`C1Lw‰ ¨M£~egµß ál|V^Ók¸Ùf÷6¤ñ´ÊâM8iŽ«Ýq’²>ÚÁwÓXßÝ yþ=+Ñ™Œø{uZ]õèMèåÑp®Ù{0BLSÑ ’‡´²9gb8j#w8ãY¯Ës‰þí›ÄEsê¤=9—èÃz¶Ä[pg‡‰Ý˜(ýjõĮǪÔb‰ÅùwŸË“[RÅŒê8—Z¥çMl Zb~Êàö©]©€<.ÝèÁ¦D¦Î+_Ãüv¢­H'PýÄT’ž|ö6Õ;åÛ=vn¦C¿ÂLö§`ÿ`XlŒSyF1=#@â„ðDœ®$„Í v-"j„eA&cA±t«rµñ#µÓ5¿ñH>ü>57&’Ñ©þZ›ß“äÖKìBkÎ)±^‘TÝ·Íè»fx#1l©^ÏÊCÍ‹*…C±Èkr¯•䘥ğ8£ U–ád/Ì“L=›¿Üѧ•[`ªtt5^u«—ªé0D­<8øjê½ÃÁ#¾÷l=hûucøŸ‰S5Ç4.ä_ŠX >/°û#Õ/Q]5`KˆyþFwâi¾)¼fQÔ4=æöµØdyCöê’|ýZ/(µªFãjc9ÈpwrÎÎ쎽Øó a3”4?ÆK¥¬ësloXÞnÙqý]¨ÿ¦»N_qºö>Åé Á*RÝ »ùÆGH^ÝÎKè‚Vdl¨¢ªþkôn§ÐµüÞ!”¯_»”6Y•ë¾î)BÖR+_zıí¥//8€ø;Çí´ÄŠ#O_g'á#HlÝ¢pT{…ž¶iÖ· ¿ä¼SeÈ–‡"£ª€^“ZM£U|íôQüb–°úîMükÁhÑcl¹ªå®`:‹ÚKŸ rÊDóóÒ=üÉV÷£ [M&áaw¿ä׿²á•!WŠ'Up—ÐîÀ:D¶J›ÕÁ°ùž oŸAmdïzÛsµAƒÂ;`8ðïL΂&6€næ/ž@{-G„y÷1\o-Ï·'h] ð*¶öŒiTŒ’/?ù}°ØóƒÕ»çk¨-ϵ–Õ¥`¥D+L³›ÿžLÇMŽ[ZÃ{fI-0ÍbMÃû U¨˜ÿƒ@:e|M¹Vf¿ó³ÎÐìwÃïj[7›ØfIJ£€îjß™ÉD\¨a0‡—¤# I-Ôþ«ß×%ù×^ç=0Œò&©<™z/›Tx5«†²4¾ZÀI3­ãÂâ¤óä]9[¼xaÁ¡i¢vv?døDhˆþ/hu˜ˆÿù+5<§eÁ¹ã-vvÝ·qCnù§ùüÎjë¨N5™,.2 £2+› .XÈ7× Nf!·"œò÷ ? —Aˆ'xxçÚ"ŽywƒÑžtEïþ(<™ß–›Nq#ñý½Mm¢}`-¤Ðô.']äÖ¢p‹Ÿvÿk+rBW> ’êhΧˆêÚ³ áÔ_äðñt`D¼K¥h&|rªªÒiÂåÓA[³iè½ôŠáÆÄHÜC r+7ã>†ÿ1€RðÁˆ™’k4¶Û‡f1PôCö'Ò  *c—KÓ7¼MmåëfÁ»zõsÊÌ+.Æï¥¤ôÈ‚eǹî£@â& bݬ‡‰û¶©ü¬ÊßUöìID_v'&éõµÃ÷q…¨U`‹£ *Òñè ø˜ªš¢™ï„¹©¿ƒ±«W‹Cr@÷f„ÓØðîÔî ËÚ…mÏK @GTÝܧ^yÁm¾Ä‘QŠ=hÑúUïæ\òƯ|s(%´$@¤B¦Ì ¯¥ÃÚgßYgY޾ç«,ê¦ýÏ¥¼©8î$@¢ÚˇÅÿÇr†6ŒºyA&Öƒ£µæ0ÖCº6µ m#±ò £ÂöÎ1·EÈ$+±ÐFë†gãÊ/àÞ£6æŒl·Õ ÅÁä`à‚~wjG>íáH•œÞ·ì-è²]²§ {ÆY¹¶Ñï ô3µ—xTåJ=n1õÛ4 ãÝr§HD:ª¬Â¢¥Ùl¨!QêûBàÞps:52°1dë´5y©F¼Á“’ªéa0åãQ¨E½75ãcX·¬Œ9cF¯R·ùFüåð'1¹’ ޏ‹rÚßÕ (€ÜgHÛˆNÒ(Q Èêú h_×W!…†T‘­îk¶(×¶?n®}56|²¦Hñk¤FaÆ*sËîÔäi¢W‰möMî"†òÖn-Q*2!Hp>2WcÒ¨ÕúO~±Š4°Ÿ‚èl­Ò¢WådQES’¡¡ fM‘Ëhʇ¤f³Òõº|EðúŒÀx |ÊøÉˆl)"¢ öÕg‰–$¥m»!Y×¼ˆ•+i¨ÙPË)Y-¬¦i×Èë5|:¥™ •üгõ®ÿŸÕ—׌Â/µC]¶XÄ Æ€`«ø™}òûÀ°3Æ;ÚÂd†gN*OýÔ€ vËöªp” äsÓý=Ô€s¢À‚Ós5;šzÉ­GûÂïº`ùÄhmd/Þ’'{ÅS­-¿à3€Œ[mŒ1Ö–Z'Ý+–œNýn×D]œ³NsJâ CO ÑW„D<›§'ï!W6š‡JÐr_ß7ášEÉ“hÂVB°k ·VÅ+²àÈ–ŒáÁ² -n[œ í|Íü@×#ʇVöÉf#ºî¨v´•ža9Ùäpÿø÷qX¢x/¿¸~“uôˆÛ꥟ou¦·©–¹ÕΙ˜÷#i]çâ ê@.•²PmÆãþù‡¿0(óƒôfôOЊýÓâ1Ž˜‰â© ß›˜¯ãwÂ^b‡œWÖÅʃÚoþŒé€0âuà+2Áû£:fé6Ø4fç–¶‰d¬CÇÇÚ—?p)æèRÌJ%“!B,W!*D]b¤ƒð6îò¹L¦bE½Ë-à½-©Øî®ÄšiPIA¸tƼøÄ¬õ%¾<£5’b‡ÕC¦$vÝfDsÙÃB€I!)æô›T⪡f\¸]ޏe Âk©² ÿ¥ ¶£ˆ²Ž^Éþu²ìkJî,ýJ¶’„ù.z:RõØUB]ššÅ äŠæ:¢¿­W§hòp8ú«¥%|¿,>ö=¡¹9îÝ^f•êz_ÜÁ²*±»…˜ÈwÃ4: f*) ³t¬;xtl§—Y.¨(çÄBŒ‘àÜ‹BuQª•’ “±ûá’¸÷Ñ\V–eâ;ã}ü«'¡®¨ÈàX^®Ä¤Y‡?×ñæÄQ|.åäEˆl‘_åêÿ‚׸Žê1Ò‘žÁÕ*¬iPî 8Ñ0ò¼;Ä éÇõ uïÉI}¨ST­»kû\|§Ôûëü!œ5ø§xl dÃD|_àmÌ7’HõkñY‡‹+?šž‚gM Þem‰Ó¯[Jw‡L¼4*µ˜îJŒo»Y,v.¸Eä¾Ì\ZÌ‹üÃpL§µÂ*ü äö»³4ð'…xÔPäÑíø¥éóâzÚ¬³-R‹1&z #-¶.¯æÍèQUjæ¿FÅ…Òt>³¨dÓñhï:Nœ|T׊ýª×l›¨ÊÊ9> ‚ˆnpb/m\ûdt”«XM?Oîiª˜Eæm1T1"µ›nšLÕqcb«™Àw¤„Ÿö©“Ê•Â;oÂX3žžƒ<ûbeÕÂ¥“€Æuékà­±­®Ù  Í*Û@TúB…ÇJn·OuçúpKvpÊzÖSzìî;öÐ?@õï%ö&>Ï\$7Gaeš jž¾9AÛÆ?͘qÕàªàÆ42›¸ ›È‡_޲‡ñjb5OïœK„¯¯ø T‡t Љ4ZgÌZi‰Ö®lå£Qܹ™‘™ØîHåèe4âl&-Ãlÿ'z<#:ÍŠÌP ”>j]@ÊG¡z7Æ¡âñÚÉ¢ŠïrF¾»Æ&öj›ê¸Ê…ø Å­‹rýÌ<æ¡zÎ)ÑË¥¬!–¥gu0Yše¦ì¯lÔo£ïÜ\òÛ@Øü!ûå#§Q•1ì«)o‰>îRË‚âø‚"œ‹é²iŽ.téBA…ôM¤/©àu8˜´[ «™lö··4¡}¡$¬©_Ÿ§ÏÇMGI­Û}öMi†È]Š”Šr÷"ÁªÅKêãgžo›Å®•¥NV›"öÌq¢¸I5žÓ¹¹Š"KG«5oÊìŒü¦Ý5v=#–þ”΂—+<ç)²±ç»LzšÍቱ©t5Ü¥¡xõ,Ï…²lUaÛ],lB¼ šÂåGýu A ³õÜy´Ž;ÚUf!ÒiŒ€ï 6'—Ù}¬'9rÇèxß_v?‰/™»éy‹U¿_ÀÖ¡†àá+=<Ý…9Aõl­«ì}à¥z’C¢ÛýìÏ*…–þsev¿â†ºvŠKZŒ›y²Ü›Ô#5Ð$üIù P‡ÿÊoò¾…Â…•3/ð‰î^œ‰8!Á£Ç;z™9a,%â2Šió8Šö;(m´9ʉâR0êüÙŸÍ9°^à¤,ÐC»f† þCÂ=TËn“Z}ž¯Ô»ŠNrnÌuháƒ%A&[“`ÊGÛ…N:¼^1ýfçF|]«iuC‚FÆ(G¨tdI, hR*‹Ÿ°¨‹\P4Ò©wt8dqfÀpšd¥Å¯ìµ8I_Z?#‚Ñ0a —8ŒVóCekâÒÇùKøæƒnËU·šÓ—H¹ÎXØg²Á§[{1k4wÌÄE¨µXƒçÓ}éµ0†ÁR8=*›¸xdºÔ(9 ’ ýj"`rö÷‘yR¯'ŽiÓ¿“”ùjŽÍ’ íµ[Ý–CX.á¸ákêòøx憎6]Â?‘+%N•›"^<"üÉG%€‰Ÿ¦¹Íõܶƒ‡Gò9AÁ‡‹ÖÛ-O¿Ý›Å|We–³¸ êvÿu™l¾õñ-*Cù¬›&NªUaÔ2€"cì"—DRFã[vJÊÖ]E3ÅscÖ¤\q‹³z-L)EèG£Äþà ޸ /©Ë¿^…ЭlÃÚ\VE¢hލaÛ8õËú®*G¶w3·™]f/øC™JùÆI6¾(Y ”Ý êÙ‘£ qT@ÇBë#„ç,‡ÙV°¨ç*dÏJv ô•°*¡ÔY£ªåÈÒÊʪ«¹Ó«ÖCgŒªÅìõá °àC¸Ö¡DxÔk–0çþÕF™°=*Öïv€jºV„òHuþWØíý’¬éî›ÖÅ9lMŒý•˽1©‰ ¡àòq!H!9»8UŒmÄ9¡o‡þå¯c˜CC‡ÐºéÄÅ ßÓû#î@=+¿†"‘"¼"i[µüáÈ0?=Ç>=VpoîOÎæ±•3ˆ:>×fœ›€"«g¹SÕøÆa#+«™äá« ØJ Vê3ÆÍŒ<8žÀœF§ÁLæ»^äIÏ¥TÜ, ©<ØAAÖŠÀKª”ßûª¦¡˜ÛÕ2‹¶ÿïQ®n{eéGT¡d2d›ö£ÆmNÌó×Üeg°¿‘Ü—Š©r{îMÓ­µ¤~ÇZ ÄX«Ù²ÛÄ$c0ý ?GUö,µt…>J[/IŽ0—õ+ˆô?E!Tnñ¬›ÐSó—¶ÍK—féo©A2ýżÿVá<€|õ4\9ü(Š¡üš"f·n«ˆhʆǬ,e®ÂîC¢ï”6JD$Œ\*ð÷ªó€ §l×V«‘ o•qziJñ¨¯ÔS6þ,p½}§]j¡y ÉZz,Â?1?Ê?#=ÄÉ“ˆÐÌÁ/Ö26A%²¦ëÌÛ[’rl$¶·7†|ƒ÷‘ÃÍTˆzÏë£Ø¶Æ¬£}Gš€À¢=¡r QP9ÙÙ슶~-òäyÓ•$1‹ÂXþ¶jáM‰ ! ÝŸæÔ¸~ür³ #}¼,ß–f&ÿމP‡ÉÚ@´Áà[cjmNr,àç;™V ×?°-¾cMþ=)AËSîf0èþ-ªjwä'F úUí‰Üñ!æ¾ fü,¼å•\âÞX=OòÆÍ™ƒÀ õGñü Ú,ö;CÓ´qqŒ Ÿ7Ãv1>pmHÝêE¤Ba`øe{;sÿ£ƒNp9lvÓlân#”2Ú·ÙÍqÆì*x]ÕšðÑCå”%~?ÄåEÐ2MöÔf‘ޏ. ä8„›} 'ÿŸW/K 6xc#ÉÐ`,È]¤I ÙŠëéŸ4@ÈŽ‹ˆ2|9}çãšjrÙAÛÁpaËâåɰg§‚æÏ,ä§—ã—½¼i7çèÈ q _ö´Q»¸Nÿùý÷Ò>øj9‘^6÷Yl¶ð3_"øÐØa²°!~¡¥mÓ)™†åìÑdM¶>V’áNŸŽ×3¡MñÕZî¤Æi?¼Ê¢ëõÜn´{Ö‘p\eØA5D.‘±ûw‚ˆêU‹ß &×”Öd,*z—;…Zt0þ:’¦DÐôà ä?S‡¸=ƪñ°ùj>É(R®dlÌǪ.*gÀíçy”ßPQÕÙp,¢w? =pæ¹M3µºž7Æ´ùŠŒ‚oöäzo_Y”ƒ8{ÕûØt!éÄßg= ;šìxüÔÚ•š·6~h gíL£è“I|Ûšÿ]Ü{ÿ?¨uò‚lNŠÕ)®¤Ù€Ž0-“G¤ê¸Œw†Dÿ=¤,Î*P|L6Öú,}6-I,ö(¾ßv°îÝG*‚àóã=•-þ!!–ÿ=·Ÿ45ÚSé £3ú"{Md(d"%ék‚â¥!œ¯Ù_U‘›DC-G[VJ9ýÉWp‚—&†‹u¯¾´¥ åÐkŸfïÆ’Ï˜vŸ›HºšÀ.s0""¿Òå;Çßcû¢—}3}ÒágiWûý­9‰¾ŒŒ·HΕ=ÏpöéÖE~—ݾa<ðÞõFà³ñ§Qö%ÁM(I66fc$–& _šS­ èoõså•4ác¯dC5Ž•ÝÖrPRÄ5¨.O¯´ÍSRܱý^#ÐħµÌZA ~-·;Ò-•¹õt_áÉ™¿MkÙ ‡:†æò¼é´9Ðb;e¨Ÿª.'cLNöà„ëšL)q+¡/s-Cš¶b]å÷X‚§d}ˆÑ§¡®™º5;§‡¯qÊJx-—ãÀW¤a!$ËY8ïÞì(;F•JuˆÛt+¾«¾)Ò†Ób—vÆm~­XÑÿ×ã‰=$½Ìz Ð@F¥Ò†,÷bR`á¹u7~oø¸®ò ë÷†s²;&˜¥º‹îÖªe_h[ùÉFd _ç`¤ÂŽjˆ?’"ò-´ ºFë/‹ÿùIžq 똓…PfTc·˜ˆŠ¦o²ÝA¯äá1˜Üƒú»HÒ·þ~Ád‘`Üèy’ÇTdÅŠØPÂñl¤4¸ã°€(X_²\ž_‚ÙTž™úEF½æe–À¢§fVÌ0"ëí³ çî8nÈè–Õúµ}çýÛƒ èÓ2·ÓðÒ(™àI oaèËþIÌüPl‰(†gÇQo'|œm¨—íKg}_õ‹[0ôçe â@óDã®'G„Ô¿z+=Qq"ý‚mIOÃä‚ÝvÌ{CÕ3‹#¥*„8iÕ~X+ªÝ» -ðˆ ÉÚ\™Ð!Ü oi}•l šùJ»½ÃÁBåP°kºŠC:–¥Ð|Yæ\ÐÖËĺ—;ÒJÖ#bW…Äw\bG^x‚Q×¹låÎ8á¹]»ø]&ÖF„ÀÑË[²ÔUnóWò×íýX7uáÜÕfö¹ßo­Š¥ÁÆ0 3ùÁY·3q\}4¨›ã‘21À±5žÕÃQçà‡iÌ HïX¢R-» ñ´T»FуΨã„ûÑ.XÉ¡l\öz½×Cco7ˆ‹'sÕ ã±[ ¢¦¡Ø4+j¹?¥¸Ñ˧fMY$¹P]kBî…Ã%2NS©&³ìº>îúQŽú绯 ”"«-fÃG¥«T3°k²²¨¾$.I)¿‹„X"+[|Ö>-»ål4ìÉ4ç¢`cz$šVûµ¸ñuÍ#õúJÔa¸—kÄó¤¢ò̤ˆDÉ'®—?£Ò›¦Ž¯g¯×;ªÞ]ߦð'›ÆÙxæ7¿£ÅÃêë!+‹X\"vú¤èÇ£­šF<<«Ú|1wê”ÇaŽvù â¾X w<”Ÿ]ZÂq@¸l÷7%-Ð/ô_ÁÅüJTIû>×0¾Âº‚qûWG=l0­}ÐfÇ)­ôpÂ+ÉݶnH,}ΚÞä¦Ù—z¥ÌýX£ùý'WKÜü>)C<êtûÝ[ÅâR,Pü’/BPæHbÙ¤Á +dU<œªlÕ÷9åGEW›”=rß0‘P¯YÇJý¢+­AnrCßô}àLð€¢²?TÁÈcÉòþN—E¶§'“ãŒÇýTfî޼?ÁZcЪ`™j-ðÆÙ±zÆ”àH¢V-5%þ¦ªÁO}tÐæsQÀ*ÂÿÂì&€ØÿÝÙÎæqu«¦M½v5•²~6ÍÃÛaÚåËÛUÀ(·ÀqšŠc4,ÕzÎ…*‚(Ðh:ìŒv?°K‡Lí¨"í®øÈ]ž§d2˯A …‘ˆ÷øZõ2gE·†M&º 0 û³üê~ù“qDj~¹6o|ÀãŸ2#¦¤>è" ,kíÇi%‹„áX<(÷ÌÓ[kõu[쎤}ƒD{ ûQ§ùr@½žgƒ{/ÈBðöe¯ÿ­…5¥ÞúÖ#oK¶i}Ñ‘²Z_#$Qs¿s÷ m.p•ÎÜ ¼lÕÆ)Œr*5çS!! ÒË Ôt<¯` ²gŠËÿ0üÛÂCÿêãt,åØOÏÁ±íÊ0kïW춃ü‘wÜóR#´S5šù”(´m†n‡ÈóbÈ#ÈûÞULRÏ…¼Š7 4ö×È)Ÿç˜­Þ¬wðDKñìÿ²ô‘ËöîÜ;Lq}Õ°¥G~'!s¦©§X{¾ˆwØ/Jê°®Ûd,Äg¹AssÁTk–÷…Û×ièPì$ò"{ŸV3êuw¶mª”ʾO2L¢…¤Á~B­Æä‚ í#UÉ:E$qéÀ™ØÔŸ§­Àto¼tYz`Ì“æ½TÂO‹‡‚£ü ^ª?càV•Ùõ±:]pÖš‘I í2/ÄœBvýK‡œ¬ïǽg3²Ž~Û’Ýú{àó7gÔŠØ5ÈœH7‹›”À)¡2¥Ó°ŠòMU‰“ß9c°¦Pâr®Oms¹V#ŠÏÕ⪨?òeú‚—0"3H‚¥ƒ]MÂQÎuی療û÷L^”F«ß©÷5eœuéÒ±'ŠžÄòÁ*PÚ+#ø8ñ 2iñÅÖô›¨¿©Åº>ŽsHË÷Ž7u€eac)]ŸáM´¡€Õïb3˜75DóÿÑÏ=NŽ‚/ǧb’zV¬"\¡ãòKQ•öÏ8<Ó æs‰±Rà¬þx gHÕ=ò·‹·üöÈR2Äòx ¾ÊgD /HÉÙѦ޽$‰L9-_:`±üV8d‹'þ9M=ÑìÚüL­] émUÂäè)“5ÂdD퀔>jSD¤ÑEƒ¼=ê1BGùÖ–$d!=%÷žšŸ·~ïXª'¶ÓíÚÊžs òÔ¹(ÜÑœ½VÔ8$6í)M»>A&*€BJyþ¹îÏE€äZ½ë4fýô;0|±é¯Ë ,¯l‰²BýÉyZS9³Y,_Bè߇êîyØ^°Ë`ô,ù]¢w#D°{¥L/‚e ¡®68ô¼{+1Æ]” À`’éì.oƒNÜGO¤(ŽÝƶ’€~à&S•-"=¤’g¢·íÁ`GûHCù£¬ï{g[QÏìôS‰ÖûMà*оª¿>µöøWŠfÂÉ’’9С«*£1ÙnM,'꤭; iV'¸á¬1øtðà ¡Iuw˜ráËÎS€À?áÝ»ñr7‚%+×j „RAÌóABÚ³¶‡|eŽžViüY<û$ôá…K1D Âý¹Ë·Èå±;dãk9¹æ@R×Ç+%¸¬¬3-}‡„2*Éé.¬<ЊÉ-/¥wLï¸Öø‰•ïó€:U7uNçÂâ¥÷´UÍ0Ÿ÷"—=岑ð–‰úê)BÊ,n±.£Èu…¥eòåÍaôý[Û £ÇFP­Iø^SQ! àŠQ¤aÐö;ê…).ëkuŠÝDl¸ 1ûrò|X¼ñƒýí „ü:IïAƒÇx«®4ÈFöØ×HŸŒäH)Èæ"Ó·-ê—D+-Ž+YŠÏ}h×­î‹¶5Šc”9kXç«OßÓH•¡ûOzœób¯¢"u‰âÌRt•{ž™@á6L< ýôýmòün>h€“Öy@¿yÂÖpöª’1Ä­¼¸õM%ËÝT÷5WïÛŽ?*^ÑA¨2aj ž®a—dÌB„.…Í 0¯j(~ýÂ×W¼œƒûHø#b…w€ƒ0_õö‘x¥×hR?øE Et …^ ¼°Y|¥??Êîûªv“äÓ³æá.*0Æ0&DžˆÒþ·BÝßÄaúÜÉïtOë7ës_1 ]ç}–lIwÉ»”©Õï bÇÃJ‹ÀD˜ªäÎÔËL4£ŒÑovWÏ6ñk•ì×õ‚ ”ÒxéÐÑnŒ–턱OK¢” ™ì€g…Qy8블cÙ`‰±—r*’Úi㉃û;/5˜<‰§]Yç×{¥”Ñeðáëñže¹˜ sœ7Š+#YÌŸ[ágÅlÉÀÂk¼t²ÄÌŽ@„ô3ë©Þ4ÓÄÛOy@š_¼ÆÚiä÷Ð48h•cµ{Ÿ `ù¬~,’8¤Ó1¶mQ<Ðj#àÁޝÒmRüxâw”èw¸îg»ô …УŽó¾pÿú¥ñV¡ž( þiYü=wB¯¬ùãÆ¶ µ(NzÒfVÛà ŒÔù ¶VÎ&ZéLŠ* S8- ù]Ÿ&€ã8 pý™ýÖYþðä½¢vE¨sŒ|ËÖ"Ì g¶¯Ñ|vµ Ö¼ œÝñUâ­¶YX¦£ÈgÄÄW_[,ÈÈ GÇl~®Œ^eñ ¯þ ö}zm˜wzc¾rb¾²aÁ2R%ns(ȌΦ6ˆt0~ýl6J³¹9Dcšrn±Œpà[SÉÊ1øúæD5°ÏõÈwS‡‰¾ÌÁðXôŒ;4hîkéÇ(íúX—¨Œð‘z‡aT—ãhÙÎÏ*£ÔOóåJ“îíÁ­J‡;ØÉ…´Ñ+rØr‰%TYÂÑ®i[5þ¼'åZÝ©ºZ Щd5زÑc—Éûñdž—ÛcMù¸Þ$æ«JJ~U¿ÀûŠØN³ªøÄ›²Um‹u›„ÍËãV:uÊ 9î dÊò` TN©ùÅãÕ{¥z,Z­˜c_¸d±«´³n- &%`½m™ÕûÕ‘¢©r­@]ÚŒsqŸTÜôL”>ƒ ‚aÒ+Vð¯.àþ‘°Z’ôÙÙ#å®Pt¿ì/2hʶûbMT£V<Ùÿ0Ä¿_ØÒ³ÎõQKÐÛGl(f±ŠäFk¦â•#%~qàVq“{¶wP Ý /w9ÑÒ¦sœà$ϯ7Þc ‡ü[䎙àw®i¹/ª¤”½ó~&Hß©tiÄÉ>n¶Ç°h8ª!…8‹†Hχ~áíõA0,ï¸Ã³\ßê5Õ!½=?lÕÀ‹ênðQfå¶3+à‚« qý[vÚ´pú)Ø|¥—sˆóДT±-vð‰wã æHQf„ÈÎu%5z’=7ûÖɪÑ}ð3E[•€ò(¹Íˆyút ®’]'§¢yÐ'¿1 ±ùw=ÿÎG—·ÌÔÆY’[òPW•˜°Í(ù©XÐRâ"N¶?#2ŒßUÛë㉨Ÿï&%Fnµh¹îhšPe‚èÊï°RbÀ2׌¨ÀÁv¤ÿÈþ$¿–TÐѤ쫲ˆbëÛõ0ž;„‰} `€qÿêZÚÔÆ—{Uåxåˆ$ ÕÝ’~Rdˆdñ¡¬b¹G,ØËJôŠpÙÏ[óìíÉMB†¢#€f¤ãö„G‚W®öFÒ~ËE0Sa¾:9j[IwNÖD„™!ùR•-†½µu, )˜aUüò´åG§œ Ò^Ÿ™§#Ö-ª•~¬g› YÜ›%¿©ù®«‹ÛüÚÅUTÔ:£äHc§ïg ÒJ”IÁÂRLð\~ ‚-ÊâŽâ‘Ó(n½`o7}\RSªtÞA‘ÿZ¬Ü|2¸”Â7XœD:&™4dM„qhj¯#ÄXvª-/LdPÈÃŽf8ý¡|Kk¤B¶èîsŽ•øŒ·’LÝö]8¿ñÁ ï u û«ÖGGȳ¢ãíø:Qîå.Æã1éuß^@Ícdù$m訂Þ uïe'•Ê‹•t;?߯1RxM_cÂè}½·m¾öH²0Øò6eÌÇt—P!@h:ªÃÍö­¯wGó†ëA†½ UEiPÃØgå …b×™ ì_äQÝùeÁ|Ro*¬R*àÁ.§“å„«ˆ7¶¥}´ù:ºŒŸ±éçå>Vå˽7ŸFÔL‹ëàäÊŽ*^W©‰7gטØKDás亸o§4´•:%ô3%‡7ÑךxAf¾F×*Z)®Àpm$ñ¨µæÆPœlRzpÅYn¨Z¼À¨§<“+ÁZÉ}F õœMë*É–â˜U9©“U€yc¿¾ÿe[¥ÕÐqø+ï•«[~q°þÈõ¥¬¢Dâ¿©nR+B»:aÈGÑ€&o@!íã¬ì4í’9Ò‘ƒG×X/7ñì°¹{!¬êoÂòdïM4£Q°Q.}Í jKwö'"ÚX¥ºŽ+I ýg_ÏÖ, fûIYÁ…ÅNÄæªŠ’©ÞÞ诚µÃü#žÙ%R¸¡<‰jC›ñ޶ՏF‡]ð?ZLÆù†¼.×¶!£@5(/^*ÛÆ>»¼ŒM9îœÈÍ23¸ÄÍ´Àï±–0®]9ØÔÃΗ“iÜÚþÀ€¡²_Æh: Ðþt,Aê ÅRRu7NülˆÐE4 Áê‡6'^[ü¥y¦’ÇR³êDÙù·~dÐOîI¨>\ §S–MÜ’ÿ˜q¢•òÖštÏŒóÇJÄ Cw˜‹­r)†Çà3»z#|çȪìúqmyþnÉ ƒÆ¸Oòní-C½Î¥Ê…ž^ôÿjAÒqJ7Æ*íZá0: äG=â]Þc"þ8ÞņT*ðÏiÞûPäTá§ýêUPGç¿Oš}VrÓ6œ±6í©Õ2µ.PÅ S¹€ö«äâqaãü"N³@¹þgD¬g’Ëâ?µ²@·nMah”Va–õ×=7¹ó"9ÖµnÓÙ„%’z˺s2¼àùäÕ¼XЗ9¨ó½è¸‰`W¶DBu¬ë½Eæ`sáeJA¥|y6 ܆œ ¼¼Z¾ñ½,&<^QÈ&­oéœUŸ¸0òdÓpÔ6-õt¥Jf”yö¿6¬–-¼VE§ó¦Ê¡YK[°”DìRêAq*Ô:4úzú›£rŠÊR’X¯0æ¨_ªˤ¸Xd@E½¼g¸LM‡Åû©–Xt.I6îq×KÕ u]=é]È;3¤CeÈžuYnJÜOöM8“-÷¥cIŠí…8 -ËÐò âB@ 7¬Íýøúé_:¼–HH賄߆‹Â:Q¢ q’†$Ï›HÙI7õøÖãÕ$lÖ‰`ò*€¥ý\aË%Dm÷)j)õ$yª{¯á†S¡¸–«'^aÓþ}¡UZ…ýI±b÷4æv·+& ÎU)“ ³‡‡,¬[²DÅ’@ãçѵ½œ„·ÖøøL(–pÿ|Ì#Ï«,•ú5h¶à‘Hç…Yì\H7ØÝÀìEý+äïD2¦6°S,ŠtÓ׊Áˆ„j WÖµG¶õ¢J·@4Í<ý»8Zº¸ýyIîÞà•f´ga7ñLBÆd¹‚|›yìîWú6å…ãIM‚Ï'O÷ͿǑìi„ƒKÔ}D”Ú°1‹&Æ7h`–ï¡Ï\¡ªíe ¾÷K(ßDÎDy£Yùq4Ô?¥3?Û0Gü&~B“Ü·¿è¬i<÷ùñ®I²¤“¥Ù”Ç/æ‘M ¸ÛÆÖŽkŠJiàø"7v5F„cÏí´Uf«d’¡#ä0ÁÅ#¤-á C&µ§"êx‡¶‡šóÛ,‘3[<”ÞiX²u#¢O¸~®ë!ICÏ„¢¢Ÿ$nS‘ç~Ð÷¨S[n=8þ9—9"³ë"ô`¡AƒÔdGR‘Ù,Ît{lÈm^ ¹¸ÈØq_Å–µ ®@—‘ZmKàxh臘AÚn ÅÞ{xßΔ¥«§¼t<¾®b’D¾€ù¬g·å®Ìa¨‘WyÓY˜l)J„zl-–5>@ 6„Et3/궭Ėĸ¤/§I_ü?6”Þ³ ×C»ê Àx[ )al"Ráñ¤Ï§kS–¡—0-Êd/µ+¾22JKáö7w׊òFë÷ª‚ÀY­ó„¶~Ì¡5—Xûµ=° ZtEÌ•ø’Ìž-üV(º#1èO*Sº†‚’šÑµB·/Ù9+5&{Y‰ž¨j.ÓÓ ‘j…!P°NcôÛå Âˆ¤?æð½ŒŒÅ}0êæõî [@@žÎ±…öíáfêjn›Ýõ'ϱ8´ŠU7•d=³;åŵƑq™¡r]—‰wr~€@s”Ý¥CüCÕ©êDdJ§±—=Ë•“z=ƒÎ1Y§Í’?¾@=–mSÊ”z·ÆVTr[÷„†ï1É!ùDq]Α>/ŽÀ›•’ R)áØÁ˜¨ÓÐhô¦[ 8=”ó^>cûŒéANÛ3()[¾—^˜¯W{ÄŸðâwtšÕ«ÑA—(öpdï˜L]‡ìy)Žõ[|rcÓ©þõÇänóý>,¸óÜë[ÄQ u‹Øm왓“JgLgÌ'K'ŽóP¸üî½ÃÄø `x7+£/H䣹 à~ „ð]i”ŠNd—&{–÷tˆ0ª¶2q|_ÜI€*$d°Ïït]¨¹Ñ£’׿è‹ñ$.§n-XõRG?ð«LAÖBœ°°•ÞéÎÉÜ28àk€V·n òß°;  p—9ÑÏÓ‹b^´ÇUœ=aÚße¦ô«*wwt»s[–;¤ÆÈ ß§ÜtJJ‹§©»5Éc#+N㥨íšwó?W[²ª#ºïà?“ýÓ$èÞœ^˜®‚Ž:~¹|RãLöjE×í8,ÆÍ$ÒøÖlœ+ÆxÔ‡mð€sàûœÛ˜Žr&:ˆ+“ÝŠ­"peÓhé{ŽÉÊtS …q#e19:Ë;d3ú>Ö·!0Ô¹˜Æ®*š@^`PK²$,³ åÇFKÔ?AÓD{§“D/QôL]}ËËÿªå=Z}8çöªH¶›mÞÐ=v™¼N•¸×'eþ·¿™;jÄr\”ÎmˆŠGçU>ÅQº;Çï|Ç»Hô´4É‚cŸ¥ÕÊýÈíEû?Y÷¯«ëK D÷õÖ÷Øà‘ ëá=û¡:[à+æoìrj&Èu¥q.Xvè±öj/0b„êP$C¸Øª¼}CÏñ¸eÛ…+, êR›\§“gV9ðuh³‰´n–æ± ºòOÝôY.§äv.UŠ˜¿¿:ýÍrÏc™|ÖS¢759qz.Ï|HqD‹øVöÌ`÷³ÑŸên¶©1 Åøõàxg’-­fî¼~ökòœŒ¦O2¹ ÔJçj ÜŸ>Ý¿!lãƒ%²/O>“¸û4îpžŸý »‰ >‚NÿÞÁÈCIÒ¶¿Ò'¼Ûá‹´‹j¢Šdñ0!BイþƒJ-¦%tð6žàïšB uôÑfÄ/ÂþjENêÊ»iÒ¸qRl-Û¡X­ßóò…®!P´'èéæa›ñÑñH¼ÝØUJ³«¢¼ã•¬¾ù‚¹,P¶Ðl²ª‘ILuXŠ•@ÜÈà︖ô–nàî:HÇí„6ˆ —!ë I¥¬¤ˆ+ezù·ò=D~TŒhK¡1¥cYþ3]̧IˇµÏ¡LvYN[Èš²¿uÇÄ¥ÙŸ­5Ž»„HÔy9¡*Øœh'\ ó–ËBâØù“¢rn“ÍÕÃíÌÖwÅXðÏ•wfhpL·Ö>[%ˆÆ•÷ºÑgáfULWC‡2ý4 "TåæöqèºßJÓ7ÊÆÂƒ¶@¼²2<ç1töçv¼ÿ ÿRq¬‡WíkØe´¸â"ÈÄÿÄò…Ç=Ø¥ ¥¢êzBâSÃì­Nx™xˆ¨Óa( hÎAb5n±ù[ÿ—¹7ÁÚºÔ$Ño}­¿Vì¥Èƒ•¦Ñ®¹n©3ÕÈ gð3…hT¸üÕMjP)Ðî!µµW®»Œ‡l»«˜‚kìY•¡¥øYéõLfA(„&>òÏ$˜mf‰v)ê3çb¹&Î?K*¦ÿÝ×òj)k¾ñŒk2€[šÏô j7KÛÀú®ÂÎ]\¶¹¦*·ýyïa'dX™&–¤wwòU/¸Ù2zz8úK L Žf›‘çFÈ$\“³}ê7i~Ëó%ÎMb´…XÞ> ¿ÝÓN÷6BÅgXšy®ùæSÝ{ŽŽB”ÑÔgoÁ¼lŬ:ºÇIOQ|„x/KÈâqãÙîí ûßÐï/@bÆé-ýÚ&ÑPeÝ+¦’¯ê\ÊûÞ9`ÈðSü7eÝànNNÿjeññDÀ¶íR 4ÿ—ï¦e S|¶Ãûo±•Ó:ê‚~0-–¬ÑÂ:0+ùü1÷=Èô. *£Þñ˜©Ì֖Ч:ýÒWçñh*OUfLq^§F!çõJžäÒèm®sñlÃõϲ])?’¤=ïO?ÓÀÖâÓ†Ò±aш<¸)¢§_ðæ{¬öu£,nƒå©´ü‘/õS%è{Ì€Š?èÄþ¹%Þfz0)¤øÞ¸æÆiÏvõ‹LKžIÒÈ:“¡7Z>†|rè0TÕWgèH¯úæÅéTßÖˇ"czÐ+††OçCóCK^åþÊ6¹°êUïLQiËQñìßÞÌ×­óî{òJkºÛw%Í!lPï©^¶5´ìb·Ë¾”¤kWç”·y™'¢ˆJod?bܹ³}SV…iö·Èld86ê"ö ½Ø§&§„ØKv*QJ|©D×fׯyøâé½_›å½aøó„¦RrhørÜ2È?ŒH´ˆ\˜Š3Dv–1JÎvŽjÔ·mbK­Íû!@ºz¬›Kq‹Ô›ÎÆ–N|ÂÊ\‹}…ЧˆO>¥# CùÒeƒê·+c¸²4NÇNŒyJ¹½ýGàžÛ2 LFL¾òQTºÐNûΰ™lRá30zæÖõX]g£7©l@³¥ûƒx°dÍØøó¥ 8Bí,bSõÎÍH´ô÷Ëh˜<1þÒðÒ=4ag|ÑDœ¡lóËì„ÞÇì&Awöù§ãMõ'!Re5•¹vÄ/ùD½ðrµÓÒusíÊxFÍö×f³bT—þK ÅnÒË`„ǹ±qG1%ðàiDü¿g-¾3^«yâ4­or×…•ÒrN‰½v»xbÒÈ%×°að=åý/öÚÑr%‘g¡±æ6â˜i5k)´î^×Y V›¼aÝïcWrü ãûocª–<÷iÌÊ—ï˦i˜!RÓTš)ÉJ¶“òïÿ“‡> 7ŒÜ?n£è!ºõн(‹Ç1d y4Bäö QžEdc<6½S] GÖÎkà×Óõ+y›' L39~&.„·+>à ¢Lؤ(†¹Û•ÿOÄ»óF¤áâi´H j‘pKÕú†f6A|í²,ë1hF6ÃÄ15Poyø@VØ1skîPýŠ…Þ= Ï:]S\®‘°#Ò±”—‘:ï?’’BP±0ŸÎ‰!ÈÔ³cãíC´C­ë ’p™YòŽÌ‚¡±ÅMbn°õ´ïaj"+ZpM(!M¤[ ·.+×Y4çO!ñ…Ñð"T*ýk·6 ù³G,?}r@¸œ½Hñ:¡G¹ƒ9ï¡¥h­Ÿ±†O´ÍÜ&çwSBqðVŒ;а®l:%ŸÞß(ËΊäs¹›^ŒNcÕÖTV\nßjaÂíú]: åTœ†£·á2ª©xH&†3ßwÜšc#Dm; CÆ¢óC`ÀjŠ“#µñþ“ô|ùoF\ ûØŽÙËÀˆ¹ 1à‚H˜ÇGçšÉV…Œy¸ì_¼f›“'ªz_ÜîO+ú0µí\/Aqýíc¡D;{¸Xé#+òŒÖ€c”uá ½Í’9Š%z±JV—¢/È™¿Í¡ÛêÆíÔŸ-:-=Qy­gžñEuñ¬Ô;ÿdß-ª°˜Z©y®äo™ÊáBéÙíÖ }/®ãaö @n<ðœö³‚Ø.&äø}‚Uۈ󣵌ÐÅô¸1äwãÊ‘™¾6Ð@D8[pÜJ¾=Xýn«Š ;Ó©,Ü qéÂðÐ¥/…cN”tÓÖ‚VHªÉè¦L£ÃŸX˜ÒUE6?X>ásG,-¥ÐÐMÊLŽè#ºŸŒyü¨MØ4­…{܈^=Û¬u§ÐºLž9­m+0ß×4sþ;ø.á5ÈAo²§å<¸´h§¹HaÇIOä¿ øÂgÖkÞÉjéW´† Í™ApcAsKtú˜Ü"ïò”‚Èn>Þ~’Â4äÕ!ØÂóJÏ =\&¦HQØAœàÀ"°À:-›sÀd‘ÅÖ3Á‰Ç]լ݉7Ù¥ËÑq²Ú'ãxfq}ò‰«Ò¸ká—½o_@ÒñíãÎk×ÂâÛ9ßáY€»%ÚÛwhP°¨zB^™|oøˆ“iðã9Åì ˆ%½_¨Í¦}Bý®2ÑâE‡È¤k³êŠZK·IL´Äeaù 6ú¥N½3,ùŽ%³®ÌÕîg®ó¼xmêËŒ–!.îö6Gs4ëZY¾éEQ@¹M)7G€#’¥ŸNŽä÷Òb5ÆB9u༰V–éG\©Þ›fÌtÄ@âN¼¥œ8hpTÍ®X¼É…nF¨<†rîÆt¬Z˜ˆ4§tÏù$izaY<:¸2*§l7¬¹ãݰ ¨+ñ³®ïº£‚q«Z£+~¡€HB›29ñä\ˆ†œŠØ#Þ™Êà:ù¯äÊ÷Jü JÁÍ‹¤™í#ýÈ„©qKŸ$˦–ØådŠ—¸M@HÝŸ˜›oàç¤2¾ëìD½ÇdOA™Ñ[‡+1²Oé$®7¤'íÂÖgÏ ÔßÓ}–Pªå¿3(ï+EHr><$(3ÃxÁ«,Ö•éÉ -f)ïÐn{ø"lÈZbQî²Ç~J„NNí£UµÁÔfTám¬;È~ö) Âîd¹™Êù%ø^/–§2ûlý)~ó ]\‰m5¤‘ˆÍm^‘­)8Z¤û9™ãWÆW´HrøÊ%½ƒqΩËP¨Ê]0Co†óu>2³£“#¾±Þ}¬!eˆRð€+H¨hÙ9:ºUžéŸ«¤`QHºjÉœeX€ªæn/ôÍÄAˆ_<_ Ô…1×´öÛŒ, »'vC䙦)’ü'ÈðŽlE'èà'kÙy処&–ª,ðÎå²¼ÖAÚå2.y›(Û±P"v1MÁñE+HWhdË®‰åëô¥±IPér……|¬näEÚé Èwä²òÅy’‚!éüñȪ<å¶Q> õºmé_HÆí|<¥AI ¾ëõ3žeŽ}Šj™Ž v"&þP#Sf¥eÆî@ÞH\zrŽÕ¥°w”€'|†N-õ¼'kê¿âjö"¢*Kbép½Ÿ2ËfÏG±Ü¨ø´~¡†D”'N•÷¼lÒS¨Ð07w«\LZűkÐQ´o5Ó|¤!„„é}.é»Òç༮xø{ªú¢cqS?øl\»ÀõVÀ´…BOåMéº) ŸÙfÓ¶äB\´§þÍ”,°KkšÊ‡ÜðdÖ ¥õŠ"¹;9¥` m>ûh)ª¿”/æíÓx}ï*‡x¬~˜ëø –CAòóŒÃl&¤QeÍÀ‹;³èáûZ'a8[ùè÷ï¡§Øî¸^ÜiRJ4ÐÌú÷ƒ æGwö4†Éc0O´kåcº$©_K¯~ý¿÷±j ìoñ}trzX]”`t¥.Yâ¤WþjÀ{þRÜÍ óÐ6oM×G.<Î=wZ{âMb8è)+ýæXJоdþ°×¢"D¤/xß/GØ aØ4Àí0ƒ.Ù82>—l;wGfµ›º¥Ì´o¼‹©j“¡¸“çH£ï”Q­ÿ›ncÞÇB詌ù©ÔNß8›ö²ýc†^aF}0/^¬ù`~žþDí²‹yN²‰5åA‡b9·õ÷Œ!¡‰Ô§Ÿ=·¢RA ³o—Ô+ŽÀÍb¯íÄP§-½È×*j~Jtñä nêô§mø_ÿDüaB¯ÙѾì$3àlZe¯Ä¥Ã‹ ÿg’\|iŠ–oƒ‡Lãp¸óî^Ld¤¢sÁF”¤Ò¹Ú|$4ÈZ´"°ß¨slÖ¾é#mqæÊæÍŸÈVå622!i° žFã)6×oàÄLØÁöZ‹ ê|1åóB7Ñœä|Àó}éáèƒÄ—pý-ó󴿆£‡îÁᱬB"®q ƒBô³€r_O}õ1ðìR2n÷ˆt6¥\ TfŒËmǪƒ%ê9OåÐ&Ü®`Ç#@HÓe£G£Ãjј 8›ØÑ0¥R4‰=»/Äÿ3 !Œý¨ÿã4Öeí à£`ï×`/¬€Ïµë÷J8€žœ TìM£agm,ãz3{²û¡òn´ËäË9¦ðêé0Æ>Î9n-p–˜'ñ Q<³‚²óÍãŽJô4¾f¢J÷'æ„Ú®áùy=š0þä¿8·‚>–]Þ^òûÿ¸S[‰IÑóÛtŒÝ2Né6œ_~|ذC€v¦sl2ב!bG‹–äÊS³ryG ÌÒŒ4ñ‰#&•sûT6ªNBh¨JCì1ml¹Â†7>–‡ÕbTÉéæøot‰ªbài¹‘x­K¢…R{vÛíî?w™I‘>Í#ɉþèžI ±‹b©'íÙÀ*yþl~üµÚm•´is#ú :VÚXp!TXh»cáÇ8¼Y”¯‡¿©yçÇ9€ž¥Ç&Ìj×™ŠÞÉÑ.Êç½JÙåöú9NpóóÍÕ<ÉW6†îë«ü]^Já6 ê) i¼ÛkdÔÿ ¡×}º#è¿ÂÅ~ŠyPIwu !ÅGpÖ‘n‡ç<Lƒÿ׈§s¹Î&£»Nñç«fðÿ%u(¶ŽrœóòŽ‘CL[:(„0H¢ÉM`€äQ*zåëD“³á›Õj9W6f*G»Ïkß.6-ÿMD¤"ëýÛc?‘ ”£ó–ד⇦TÍ9 ; š'¿›ë¥Á±Ø+®LËKõ;ÊYY¦ ± LHl*¼Ý í¨j-«´Úl®,úÂ:U‹ÀûØ*ÈÜÓgË0m’>«9„þúì€[š‘F¾“.Ò?4á’JrNiøxˆ x÷ºÊÊ™µª†[™#SR;`°—‰U2?ÒÖ!fò*•™½Î2`{ ¨­öQó•°çYDóµÓ#ÅÃrµo£\¸âì3»R>"k+–;¨?¬æ\…Ù¢èXB35¹x©LØÎ %æmC‰Š3‹c¬µðEh[`[âýù`¾|cÓ.ðŠ»¾°ÛQ!€Þ@¦R¦ÕÖʳLN¢Jô(¤_ÒmatîAû ‚ÆPvB“’!1ÿ¦Åt“^J0f£xð§‘šfd`!:ò (ñéêŠ øæÿˆ)¥x2IL1(TYŽN’>í…Á†2ˆ—Èý]áîpaëvh¤ÔX@z»róÿ› ›™>AΟëwþóë‰^¦èÝuf q£jþ¤WAï=Y’6{ÿhí¬÷Œè÷å~˜—ñ=PBÂlßbH能8å§¶égC‰Ò©+ñÌ2^sCêc|ûk|“òÅ $½½™9Ü…À±Ó¤ž1‘ÿêߌÈd:û¿†§Z¨à”‡f‹WÂUƧ¸Vç#Ù¹`sòW@yušÆÚTÚµ ouÄù)™Ü"øÈvжÖO¯ú׸‡cnJÒªX,;}}>åP‚âõx"ñŒ©.+Ö+„9H³1¸y>ù{" ÝmàÕM&Þ¾§[Sá¦Øâ ¡è¾|(l8µ˜ÉuЀš––Ɇ¢þNš>E¼ým,âðØ »¤þyVX¦{!*Χ=4ˆ£›¦ÃíäÈöêåÖth0~LM¼u07Q°½¶,#/’z•~Ž!›ä#úPú÷€4ƒÆ£«6Ó[<íPqú±ëc§z¹MJ›š}µŠjõ„¢ÞRÃ#”!õ‚À‰âÍÊÒø"Á­<D„Çz]÷‰|qOæBIãëŸá‘jz&’xxê´V̧<•.„äð±«" à¾É]Ò³‰ÝŽ=Ê_ühZJ{O(`V¼ ÞsæSí2238I”ú\3¥4Ú‹åäíuߺ_ïcî»JÎ8Æ¡yõÊÓâ¶7ð›RE˜_ÀÊõ¬`Èdry)ðk,va¸K}ÊngãÄ ¬tß ¿æð·àâKÌì) émˆ/-­& g¦Âúâ¨I‹`>£ŠžµE{±žf>kkVÏ;3 ï~øû!èI0¿Ñž3 ÿQ¨8ñ¯ð}¸Ñõ·"vßþ(ô1,Pô/ašiuÑÞˉ¶ÍË\cþù²à 0uyº”º†´À Z%ªÒ}5þá–N(DGRëmšÿ„XwO¯ØçåŸðvãÄdALÌí燲ΧÜÌyvËȶP,‡K\—L'- жçø*’÷JÕÑóäÜw?ß·!T÷l)³ @ûZ/ÇR; CnÃcü.T‰²“‡f–ÒBpañZ7tUŠ>O×ó 7Øúr…úý‰û âuCJfñ¯JòtK2 Q™ãUht\ =Õô*èûš‰M  Âö *®Ág¦»¨å‡,~7›YeÛz¸“*¦MqW&ïeaÇÕoì¶0ø @ˆb ]6OËD_}ô–`ª°­H•¼ .ð¯fï£x_Åc Né ›¿$"ª}ÕSíëUn’•E› Å.¦y©¨º¤7Ï9‘7ay-ŽgY+VÏðRkÊU|ˆÂBò*x§bvì—µ]Àþ=b¨©ò¼:}{ d[âG · Cõš`ÊùzŸpzÅe.EòõHìâ3ŸÆ¡FôlÙ­|Ý@>×"äbô³AæûŸ¨-ö°€Ý¼pT÷'Z)ëUŽÅ…mX˜×KKúWÛml­t‹¢»uC¼º¡3Ð~X™‰C1¡åÜ…lußÔpÛó¯X&l»(˜ö)¶é™”…âKË)&Œ«³+˜øÉ ößôöK&= ÿ«bذä¥ê9/}È+éð4ÐŽ¯+òÁ!BÓg¥Ë6²§T>œ;00î<£ùù³g‡(ão7îb‰ÙauîÜþξ%ÖÙÈkzƒùªDqn¨l@ †ePçºS;€0IϤU9UÑ'nͪ¼L,ç¤ZñguÕýCÊ:7|¶ä}›¶¨©2:ÑJª„ùÇK ˜µ\çÍx=òû^¶Ò¡ó|}ó#³M?Ká}[åù\ÆVCt4›…,ÏZExÚÖF))”¶7A¡š*·Îì'÷R>(éÌ”¬`Æ"{GWÄ4StVaIcKÐ%ðÝ=E0O"?Ž2ÖBé?S}±NœŽ`P°Í]{†|]Cr - ÊM–ÆhíÚÆï­ä„ Sïy,ZJ™çùÙf#2£àrÃåsУ í€ØÔH¿ž:OamèÑØö2î;bmöó#Ûèö眀+˜ 6ã®tX,‡û±@–[òM‚<ÅPп½Tw1´x0y·ââ\¸÷›*Ø»’¶Þÿ"‘mëËû·¸Ò3OlG*¦ï;,Ïø "¡ ·Ÿøüü~£ÿþXf]x}·*‹t!òy;ui!‚D*øAÎö2n1]*sö…%N ¾E† ™êÓYÓÆ.Žp&2;7 .ô‡7~×OªÝxOõÊ—È]$_«~±þƒvžarVÚcAFÆ‹ a$﩯;'ÒG}èµÒnLÜc?®øD|›·¸QŒ£2Î]œf#½y56zV¡dª1Ðn´Ï{)Ÿ±æ }“,Mt‚­;ãn¬\) ÁÒI”œÆVZ—N¼8`ÖÇ/¢éuúB1œÜê Vˆ?QoÕø‘'T ‰y=ÉÑÛ©mQj6µIãºÈ±œ~Îè–Û¥‘F±ôTÇÃ}ÑwºÂþJ“ЦžDnlÁë Ønè‡Eвh=!>ï ù»BÉ‚rÀ¾ð¼Ù>ëÍhä¶*²¶&´uÛÎ*K¦~\«Wˆ]Ëæ S5Õ/®B)›¨š/lÖzòE²—Ÿ$¼™ÔÿR—{2¢BúÒ¿-ûáåaÐÐ1¯Â|òþÐߛڬì6œM£­)tóÊ’ó™N=ÚÍw»8ªÙd5 ÀÕ€þæ™  üI=~V ±Õâ\±¹¼6 ®’€mà×úQæZ” î‘‘m»_àé›<]²š•>¥hiCõPižìwn¾­Å0ü™rã¾£Ÿðó¢'óŒ§ÛOuù™,qC¢ý¯TÞgµŒUa<'>9°[@Uf¤ÈN´ g'Ž1p¾×6gZ ®½kƒ ñrLÕg²rFörÿ;û«S/AÂYù܉e €ÜRεÃå¨Å-±¦³d»¸ª ¼d¯·¾ñT{(TÒ‚#«ó ¹Ù UoQRÝY3œÒøByÛôM ›Ÿ}cŲ”÷UýÈ®­€‰š‚©cfÀ~ë˜Eð~ümÀOŸlDó¶'ƒQfŽ@X'öݶ_âbƇá­lGüÂø]Ù‰R`g.AÛ_…èâeÁ >:Ñž •QTjÐþçå5a—Q–¼ôÓÌ‘n­i¼›!¾~õ¯¤©þ~^¬¾‘¸Â4Õ]õøŠ·"ùqKßþÙÅ`Ðì/Èåv'Ü#¹ÞôTß2ý=ØAg”pæÊ]ñÿƒ½}Fp·pÄgZ–ÐxªÔɬ^¨÷vhI¦^ØÍ·<,ú³ß.iö2>æäD8Ê{2! …êܸ>Ês ûˆ/¨ÀóµAÐØG+O7»=n´Çeym÷88ÏÓ)î$×qÔ–¸w†Æ €ÁE @–>Ødˆ€L@Uÿo¾nJ0Õ¼iŠ.­úÒÚ¤ÅKlr³œÉŸòe'×™!õzóÄí:àã*¾ædÁy€‹îj2 b®áÛFýç<¥Ç7\¼›“¶tºÑଌqž¨ýíf{`ˆ9Z²õˆÄ4Ç!à2ž*ºÀ¿ jO2Œ™{\M¤õŒÃWdð¢žLGq4«Ž~Ö3ª_¥Eäj†ÎF ñ2¸ÝîDëÇVÈ÷W3Ò§­Œ®É‡Æ´0˜žÂo/ÂU®éoâz•*‘:£]’7ˆ™•míQsÂSÂYžÃÏc…2ïr4!wÅ¥ÙÆö‚³ þÍϨÔlÅ?p`Ák8²YD8ÙûЦýþŒ n,,­!þöQ#%”p¨£™3ûÍ[À«†ÍbÄîÿ™DÝÞŸ••¥¬Rg‡9ÄSëe)¨-»Ôd‹À~¼IÈsÒ™ªø×‘~Ú4ÆŠ9"×Èù¢LNÿ?¶çÏÃh‹sää·fup+ÕwÅõ¶æúÓþ×îéL¥oÿŠÀÏy’¢GD‚ùÌ)šRžªw Ba±V´cõŸcôTRFä07ì¤ÞDÐ%ÖZÎgŒÁ ËUÈ„Ÿ®‡šÎ牊väBB›WoHÜvÇŒYÚzY<Åïˆ y‰½ðjGN{²Òô­‹®–òz‰‡”B¢=+aÙÛe }v•ÿö¹f’ HJäîÌÝ4×̼qa›D° j£Ã½òƒvçHA@ã¬!¬YxÅ×4üdhz~·P¾F ±OÙLLƒ#µ>½6lvt Z:I·#ÕÚžhª¯·ZGi[2ød: t“…0aöߘ?xPLJMRNmvza’×—±G9LÃ]Ø%OàÀ˰"Ïîã¥a0?]AcLz1µÔ²/0B·§Fó c|TÆ^V3ø°Ðdñß¼´ã1 —}é4ƒÃ˜6Â!f„–GË%¹˜Þ! SÛCï“w-Ô–Ý“Q‰&9´æŒY†T„@XÞϹz—EÒâï +“ú…ÉEÅ’wn°Ç«÷ ¡Ú »³À?$’yJl·þdÑ”°7À¤.åñÊ õé‚=£EÎV)J!½CÝ´—×€«ŒsI+èl­2zoÚù¶›~í&kÇ)DLYž=Ô½š¸+@xÀÒÚâÆ(P–ÙJ/jq#ɯ£…k˜M¯&Æ)Lº“oá{§’è$z:>£…àsI¨ LœÓ¨R^ãµfÎüÿò³*Ogm?—ñùJ$×~UÐÙSÐ9m7wéëÊÏÑ8”̬sô£³㵫Œ»êm´’ëg¦`ÐØà”ý'ާ«‰Ÿ>ñ+öæ†QÌdvWaô¿WlpD{Ƹ,ðÅRÞ§¢²r¹®©â­GŒ"0GU¹$yz=‘˜˜a³À°±´CòL\’œžY¼é@MˆüH³¼4F“É_W+óh¿VÅHT- •T2‘¿—Îå*Ê2˜„*Z³}’´rtá©y•Tmô9À‡Lêןb{QÉOê²ÆÖ}Rƒü/óe—=G#8‚iˆ5Kæ±-&ÿºÑ–eå"iýã§ï»^hhk§¿¤ZŸ½šµ×2¤a0uJ­{µ¢_ص¿1wîër¾lûUÓÞó²û|ôùw®÷¼/*éë%ŸÒÿKWž{RRvž62†}ú(KFð¯ÚL™ÊãÒ\N|ßÌ…B á­Þ$ïz0,t‡Äï.‘ô®­%"1è²ýB̻ڲ¥XWûu#Çå-Y…NVŽ8·JF©9‰ÕxôgGSÛ»òŽìZ€ý&^ˆ =D„àô¾ô×½ÜêÛo# ŠÊšà{,÷ä¿¶ã ‰ |ƒÛNÑk–Ý7¸Ò‰t³¹Ú:šw—giâŸsW~”¿¥!?¬`Ä8‰Q3f0ßõÚ8 iˆNü€ŽÎÂNÇd˜¬'%,4Àq¶o8`ÃòS"¯×øD%þúÒN³1·‚fBOä$¶pÈ{¶5QØ}YyÌXRØ_»vÐ ¾=zŒ/™=lv‰Dó‘ºÌG©$>Õe©¯qgœ7ØÎÞ¾¶´Á TÂyD¤l.Ýæ»å£KÍþ«<Ë nûf)¶øTF›Bÿ$º_ßöžP½FÕ‰"?ˆÙ¦²˜>Û¸¶t1þî*-2:4À’†FfÝw[ž—{êþŸs#»/™ZkÄ56•äô­©3G–¸M'v0Ýê|0—üûœKQ%Y×f÷V&â‹3%îšk½öä¤L/2鬵¿ ßE(a×+ÕˆÍJ}“L–Œ„>›XR·¤»7ñIíóm°ÖÕrx;ð‹ËÞf·‰asφ‚ÅÀ¦û¾Ý’_/øü­U´l2VVçi¿´²Íºî«=øgúÖßDr˜¡gèv¢ór +ÁxÕ¯?<Ø7gà[Ñ͉-éX«Ûi&þÆèÖoïœ,³ça3ãÛø»bòŸSw„Ž0ª+Ÿ¤iÐ@/°¸^ *›F´‚ÁY(:m…2xb§î1] ï’ÚÙ¤t“ûïÃ@„Þ/±§éXd,pü;õv7³}FÑ•õÈñhdjîx.^é– ÈsÆßÁ¥FèAuäÝNÕ–<¶ßTðGG³ c™+„ Õ±áà@ ýZ–º}¥ö—õí=UjÙUÝÌIš´ãîœÄ D‘¾Jå6eK ä².þô|Íß9—HE*îZá„MX0Ö`R¶¹åÙ«÷L[‰ÃCô@§ë ˆN«®œã©vh¯ ³Ã½ßçHãQbírlt‘ „Â:/¹Þ˜Ïj¾x‘&—ÛpuhG?BáÒ¢ÛŽÒ6 ›‹ÇEžVuâà¯ò!«ÑÄ5ŒBi2Çc]Waùœm¥¼§]표Õ\‹»ÿG·–r†op­žÛZss lb÷ætŽyà+ÌÒÞHqå¯ÁÇiNLïY‰e~R—m‚i僊­ì¦CH+k>™»-'ò¹_”€¬5«¯W,½ò‡{O4n*ç©’~ˆDCØ[4Y´û(ì)0¤X÷˜ýŠkUî2Es ±ñ"¦{ˆ”¸í±4(¶4´£óè¯Ö–é\o‘—Vͱ؋RÆ}A1/¼eåt¿CŒ›˜ÿñ¦'>ØÑ£¢nböÃøòp¹ åAèQ«šüé×9aWÂFàPÔbyÛk;ÍèèfkaÕ¡W:øÈ¾“°Jz.á…Ölš*FœÌdh»µªÕ¤—_ŠKª©Ý¸ç,•ø–j_ƒµ©ŸÂÿËt0"б)¦•×Lu¥ TépQÚF˜9 ArVÜú†uY8õÎ¥e‰j½qÄ™sÞ-€C7Ô+ò´Q7S½-غ%SS>³%¥vì ë߈JºÝ™|ä!¸zkœ©ç ì냄ü–Bª’Nttâ—Ó,s¬Ñ °ŒW*ªRSH4÷›Ðxƒ$mBÈncCàÅã\h×׆"ÔììÖ¶9·æ€*2JÀîLñ¶Ÿ¼'œ²JP¯ÄTv@³÷ŒGz‡N\*Ћkㄸ@¡0ÿ³Ønj¼ókniø°Åw)o–Í[x8ÀÕ¢Z«¡Ÿ/®Ëg ȟ² mµ¬ iz#U"}{kæË—›XÛóOãy£b¨R›Ë'åê’me)ø05ú]2k-¡‘8Odun íI™“zƒñ4%´gX…Ú¯÷!¢µøµ:à³}Îî»Ø¶ÿõüŸþr±ð%„Ñ D«¾åØ%ËnmGºu&Ê¡ÞíV ÛÉi‘ h©gÅ"½h¡6‘aÉÄ õ¶÷²$Ê™è 8q%íßDóŸ³@‘a¯ýà’dj$ÅŠ}—y‡.€ÍF+Ý(¥¸p¥'f.áœ,ØD»|ˆÍÌsKïºJ‹®5ü]¡°’´×B4Ò†[ÎåÿÀÞøDHP<+{•ËqA˜ÙÝøb¶jÓË-™5`¦òú¶¨Û‡×&7¬®:Ö¶UÈîÁxÔÝk7:à¡LÙ’½ÞèžñO3°‘ÎNQiK{:_¦4vˆ ù)–^Kßcùðí"äBzM­Í¿Ñ¨záÓi±siÀ¨m] $ºá¯fÿg÷{(ÉÏ2lv¼ñøÃ8ÏCj‚y)£ $­0åÛ˲X'+ÍYqŒS ëhÆ¡Iü@#Ŧ0B…£ˆåøKìÒØ?-]8§kFB8z\̃5ìHjçÆæ>u¶%öº( ÏyŸÜQŽ®Ÿ0°Ø˜p±Î|bá4"U$†[Õ€ º¨ªü(/±‡üø|¸Æyg$é6ó8;¬]^€uí0ÌÞ…Mþ¬(©r ÿ‚8‹ÍNtæ²9,gœÝiom‰šìl¹“›YØ l44þ±ÜâoåœÎ.v‰¡Ÿî¶É6!±ÁVÂ6ôÓÇ ¡¯ ²m÷ó§)Þ´ ÂÀ¡GÆõ`åOòGD; ¾áw~_vrÌI‡h‘dÕØ 2¼÷vA0ʼír¯ËäX[F´>ìulòÌúÜt¥JÄÃ'7 $¿ óT Žß·Ìƒ¦øFÍ¿×Bï–¿!O}‚¨°:Ég˜êi˜AÜ-&—¯ =#W¨p¾]§!²Š£üŸ=ÅÔ5)mÎZ0œo dÉm#ÐâS&pX”:üí¿"ijâŒãÅI$ĺø˜-Ìy¨Ó„¥B1Ü$¤îÌV¹se·öc¯'à➯m Å}œŽ>Ôx2†Ì6î>ö6záž•uÇ;AóÈ‚¬ŽÀïæ´±]w·ÖQ˜â±­RÑÅò›œÕÍ{¬š:0OÒÕƒYÞ_0WÜóÙ\”‹ªV$®ØÅ>^ÑNN³ùTîeRpä9T€=,ÞÜQ`xø˜Ü‹ß£Ëˆ6÷žøÓöM®gv<©©©ÙÖüû5ÅÏ–:O¥¿“20?œ$`”iáúC¦“qÒòÅD[hìýîj æN¥¹£ CÁH•s1pùO…Š¥ãbmóA+Êezº_"ϵþ8èQ-™‰ê4ÆÙ&—É1-”£ƒ¶Hð±F¦¾xÃØÔ¢Œ÷¨8“RîdM1iî¶P~2M´Ú»% 0päi6 æ”5=9 ¼[ázûfÖS“YœÂrbFÐ-C ,²2Áçh<©}êh1æëcEJâ6(hšLôÍ^½³Ô2þ&ÒÝä•ÑÈ=šÒV>Ó&žåâ!±ŠK B3;ñàäý.PÁb;‘gqùÕfL±¿õŠ>¯´F°š“wUší&±]ÞD6L„1ëdÛ°Úg¹—þDÝë¯zªR±Ä¹›2eÕ- nÚôcåž… çðšN)éO¬¤ØæÔ¼z©*'.’–ÀͲ€ÏÃì±›w#ñtFÞå0"‰K‘’ÖOÄT®½íW«ß_B*ÊÄU͈a¡¬F†…ŒJ,¤å¨ƒFoX6j–çDWüæ6om×Zes.Ì;þ¢º3 3/ÚȵPýi©gèûQ%'Àâ?¨‰³OdçÐ[™AÒæv,ôª"½§‰ )JlœKï´±¹ÑÀDí/ãÓ)¹\ü{OJ7†­# F(ÕA´a°ã#“Û^§T‚‘aÝ+Á1HÌÐ-„>è^”Ü ±Š6RƒíCµ;zµ&ˆHØ÷góÕ(‡Lž¢ÉÇÍ¡`þl.›ú¡+"¨Ü¢~(Ÿ5@|äÖºZM€lKÆÛ|þ:ëõ}Åùµáü*À4£>f¢Ú¸ %‘‡ÛõÁ:s3‹çÂY ü70åCÖÍÈoÀø÷tNRÕt„`M«Ìu_öAóPùÔ€ L¢K±îŽmµ—_ ÿ4ˆW$ëÁy©¹¦ ¨¤ —|wÓÜš0=ÏÂ]ŒD|VE”“…t[¨èèOMEí¿3]!ÓóöK5ðœ›êJdðÃ6ú=?Â;k“¯*ukEµfì¦3‘J›n§øâùSApmÆcNª+0ÀÁ8|ËTì«%z¯ QŽ‚šÌ.i… ÑE9÷Ë÷ ÿ8TNê(Õÿ@ujV]DŽƒNïÁ ‡e±Ò\©ÌûɦÔð0AßóóFyèú{Z å~ᙀvÈ Ò¨•D½×ža¢/ÚEÅ«ô¥‡ÍÔn½ˆ½q`ç•÷9m̆1 qÛÛþ]Åý_‰æU<^ï-gØ‚†¦~ª*ó1Ÿ›¾ËwæÍ¯ Ó“ø±ýZÜÊüy—IߟL†¥–ÿWµ 3«3êlÆ›tÉ{e}®‹ÄÁRühÝÞž7ý\ܘû ~î)~P_{¨âæ__÷^ŸÞ6Ä=‹s¿4`£AÄ‚ÅûÅøÅª' w\Ÿ½æP¬Rî4[ææ¹£Ru4åaá¹Ë嵿 1kœðx®NƒZoW¦É s2#ÄÿÅ+ìõuÜ ©E0½EépÕˆ‡üo1KÓí÷æà5¡ü(ˆ¨s~~û£rŽwlŸ[_W kå­DÔŸ¡««jÃrè·›l ¤±ÆÛÄo¿P‡[¶‹îÙ_5Y2,ý¾ñ4^úÓÈ[{Öš§Üù8 ·’¬ «üÎÐñÌ!Ás^Á >º"«ÄÛ’[ó«%}¼Ò×ã½jí¦<]ð‡;,œ‡þ2Fƒ®WǾ)×4‚{x-Í麘* E‚—etU_»º°+x€‡…™}j«•Þw¥‰Ñ¤/ º±£5Þ «súÈç81÷ÑË ªþ^/ÄÏ2†ÇÑ[Í9;Ø@_*'Ó9’YŒTZšHùŒXÇU€éàtMò)fõÀîÙÒh™Ü`&Æ525Ê þ‚á`9­QÅ–±lãP¼ðq‡ˆäÉry؇·žâf bn§„ôF ®‚óêØ(kÁFì– ŸÕEû£4µå’eÜo¾Ü&( çÕ=FG¯Hר^©ŽÄx9t|i†UÖ/¤Ìr&ܺ{,jÅE/il»SVå)ÐÈNë„’ãæ$ÁܾÖQGž ýÙªý'¦«7¾ƒHWòø‡?}B©kÓÖIhnvÛÔI¾s¡3#Šf1òí…Ts”M8è­9Ç$SP‚|AÔù@u{ˆmHÜUbò;íBV¿¿m r]96AG㯚LÈ`A^¬Q]&©çÝ„=þþmD¹ó¬“~t^Ã4@º­–£–œ8°³êÈ•T?v ¦‡p©˜daUlß: “[åIdåƒrÞtvà‰»W÷ó˳u8蚬ºI@œspÑ__lmpæó.–1 ¯Á pŽvp7zÌóžZFhÊÙ»žî å©~K’³/A•òŸJ-›õe9å©DãQ \ôÕHxóË; J‘ÚAk–…ÙŸu "mXe«¸ÚVê¨)ë–1¯òQ¾ \ëèʶŒ÷ˆ ä{2‚õ¯aÜÖç‚qؽŸ«é_T‰*“~æÙ¨Ú/dm§ ¾ù.8/(,,Á|µ¡-#îñ¶»m#·ö®Wóšå Ôwoú[锾¹å?làËͽ¯î:][œÒ.çóPb§Ðú<°&j+OúE×Üc£òû2~î ÙÀœPÂyϹÛ„píϼñ›]·÷­c7CK¹agÚOaZoãYòÙe·¨È[·ŽÅ {ì‹b÷Jßã{h^ÓIx™æTQ–£1“ÚêŽî«‹ÕZÃÖÏjþA®,ÿÍ ˆ¼_áý¥xO2—ˆ½T_°ª-Ÿòº¾»º‘±x€cuçYQA=§BäôÐdi!L^µ®»^“”I ß«µ0öÏy¤(ôС¨Ñ_mƃœ«N4Ï­âá.^sëßRŽžècÜoÔõvºf«ã3”éËïWÉùÇ »ýÑ  ?\õòö ï„Í«ZåuÝC{hE§å<‡à1Çÿ~_-âDõöôޝœÍø63Öƒ—xo)w:Ûµút~©&[y|¤åÖßPK29§!B¸ç •»©c#wJxî* g”UG“§ì¹ºÈ`°â§U,G$k˜<ËoÁÊÚ0Sµ—›Ðlz#ÈMÿjÎÑÇð%†ì ÆEÒòê`¤¿[…®ºž_™ìºËZšNíþçòlp©«ÁýòC±’ÏIùqnÈõÅD!´€@¨C ô¸)¶^ã”t ã¹%úbwQTíÐDèrZz¬sê1ž†ñƒÛ9<-#²Ev†Í’sVœŸ®Z¢Îü嫲ÿÒÏþL‰Þ| aÖ·krm)x ¥:€®ÈùÆ/Í êä|âVa?èr´dç7 ó¹\ê{ÿ[ã›îÖBƒ|I¤ÉAGo)°^ÖQ|ÅÁë£~•-žƒØÚ²eþ’ü¬_A€AæW†…" Ñ£p{åqÂA<æç+mQÁšmúU»>Å›`w‹x®ìO‚µ« Ÿ[ÓÂt’ Ùø3“"u&*šçŒøƒ0—®C¸÷®üÿûƒŸ(ònsþ<øÿÙW,~/Ù'SÁ¢IÉvÐ3ûM*ÍÏ,LîÐҞĠˆW‡Œ2 lhë"=aŒ;î 8¸× DЗ®ÚÙn!5+ŒrówOj®Óì,ÃÜ%f!e{¯¥ÁId»‹OòSíqá72¢ð°¶]JžTå¾ÉœÔµ]ŒþúÊ(c:9Þ7Κ`ù¿Á¬.NÄó‰oG–ðH(ÆCÉ~B „´0Kø$ó†š×%Ã’¤ÍÒM d[1_!O+˜Yt–â®TŸýùtl‘g"ëí æHW'6‡¹Õ¹Ê'¶ãehþ÷šÊFaï¾ßy×#Úë#óGoÍ^$¤7^,H¡^R€#¡½ªÒß®”|(¬ª Öæ~êUuøæÀ 9«pï¼!ÊÎ:AJ!}¤CL¾IÅÖÔú£WDl8ZKó»vKfH-Ãë AªŽy"ÌÅKe×Y’N 1Ã׋~Ppäôcc´‡û.Bá¤ñÛŽ·ÄF>oSP‡a¥)½Ò/SÞzn `Ò'µªŸ%\1eeTôÁõTפΚ·£E#Žô¹×nš®d0ŒlÈéMYö¿º~|÷ëu^Ñ0]¶àÀ‚d©Iç—¾kŠnô^G8Ðjã̵Ä,P Z#s䆳×öWØp{‹0pæ‚õHî+˜>Bkgï%p(ÕÛ2%ˆR:-_h1] †È¼Ê:-Pc(IãR8)1n^±NÔFŒN²ÑoŠ’=ê…;î®PF|â 6FÕhuÔ=þG2×GÔ\%9›û yÝ0‰GË û¸ ͸£ ÞʇqcÏ&8×Gêbœs1ä-•cQÎÛz¯E“p6’Øù,Ù âAà8Õˆæ„j—Í·Y]—Âx+Ó¹€ÊOÚ"x“ž†,ÛÆ²èýÿÝ…¯Ftß½{./.Çfá«áÑ‚ë»R ç`Àå6šnIo‘³ePnb6rq}‘§ùÉ3qyÍÎþ0YÞí¢çœX¹ƒêKÒpvá¦Ó~¥1“,×±u@5X²_Ÿ§H©±Qæ½ç˶Sü¨¹ÅåwžÇ[ù‹ƒã¶U‡ji’Œ_˜”,Ì ”÷"\¼–‡eɹEÇ.Ë=ûß¶¨S¤,ß,eØš¼l¿Æj=åö9{´nÌþ«â[äæhbHÎê••ÐÀÖˆaêö0a#®%ÒøñŽV­ ™ÛGB©gÚSh5!:¾3kãvc?[7žp<¦°5'y%dT 6˜XŽéOªŽ-/·Ýˆ"#; t¼¾Ç(Ãèc”§j'‡ÿ鷺Ž.—œø±/>!)yéçiÓ´JSÊöytúÎÆ,&^N7­™·l üÀ[ôYÈ ¿Ü”cD²‘êv!k7¤ÁN2À 7ãò˜…ù·„Ìé«îÞ t¤›I¬L7µ–y\Âö‚·ƒmµU/Ú¡N×¶]µm^Wèáæimù¦ìBúñ°ÙZܺuèe²„ÅÖdÎ >–þ–}5¡zºê»“ùÂ.¡Ðz©Kö5‰Öà´Bï9飕¾÷ÿ4óæç–5¨ë{¸Ù@Ø»ý:ʶC°6’5ŽÉÉhŒ÷º eT=]™coE,Ö€îôf$«‹màuZµ ¼¥…¡nËõ^#$…ă´Yº> sr‰W×–<•ÑS~û»óã5l¥óq*æ ‹˜IFZ|J/Û’í9ŵq6FÙ„6:‰Ìš"‘~Éò§?’_ù¦¡÷¶M|èºö¹©å·"½¿cÿÿ­ðê-FM£Ü7”¬Þ`ZŠÒï°_•o'„ÆX¯Üá,¸éh¤s(RYIÏQ¹Iµ-muÒ÷5[¯«ZC>å§ÝvçšfO¾Ö?|¬vÜIÊðëcöà CnN´›fæ¬õÕ𾟠»aGlÐe+Ê%E_¿ƒÓ<3½ Xd'i±€šDò~yP¼%¾Bˆ(´ÀF.Mp”E€¨@hâƒÖS@a‹£*…¶½Ÿá’4ÏøEíĸP¾ØàÉdí:_‹žR€©å °a%ÌͺçrdQ(z§þ½°Y~lôÕØ±x¬«)VIJ žÎ-ïáä]@‘ÇZ]~c&ëÈå#ŸÉ®–£Nˆ´ÖÄEÔò-«¸+ÍÁ%Á'à Q³¡ò‹z½R0@¦o)ÿd×gžAÙíŠwµ pÇϯpÊDMΔørIMÜúÖîr%ºfRÜ0\`¸Tc´%ÄtýêL3˱½ðírIøÂ¦¡áSEñ_Ää´ëTYÍ vá’k°¬hRŽ’<‡s¢É3B¨¦©„î 1 ÊU¹3¿B&SŸþ®Œ©ê³ ÞÂS¯ˆ-ðÊa’' A¦ÂOèû¿Ÿ5í"~…ô#ís¤ÏB‹»•y^én*·œ1C³T·ÃÔ?ÌEì…÷Ïzùþ¬pú0‚‹¾Þ CžÖ™jFót3ÿ;2Ñ»æCëЙÏS¦MOòóQãfe¾…5.9 ½ô‰M*ˤ\K²Á÷„Äçß}ðò4ÌbžÕ{Ó²ÁÇyw;ÒGÎé«ô^WoŸÁ– @œQû÷§ï9 üÖ¸}Œé±]&‡Hs_(wlM•§‹µ#Jæ‡Íš7‚‘è™Øá·À¼F¶R!ýs¢)€µD«¯(ëÚ2=à‹•¼°2;—»%°l @_eñ 1M]žrŒîÇXW^Û ]#¯›‚žõ"IïçZ¢œ(ئ҉Zð¦xí. Qi0¼•Mv0ô—Yú‡OŒ5&cˆ¤*$°©E<” °%(gü€ƒ*_ÆhÄWaçù°W·¥¹ÔQŠ,¿!ïE[á˜zS嫳Ûÿ¶‰ –»JÞ›ô&¨5*€{¥Å!†Ù4@EŸB"K¨o…=mˆdàÒ §b4•ÓÿLû÷ïÅÀ„ȧVß*0¢ˆ¨ ŠŠÍ9éKŸ ÞÕÔT^ÌÑWZœìÍ£x§—¨}ÝAÑ~uZ‚ï)ù€Í §ý¬ŠK®w­{Úïj½ÆƒaI÷»ìj*ý¿· í h?y}è?É¥ºûü\ú ÷°4qì—±4÷ôvFaÚ"ªÇ®¼/æ.ºSêq'JqÒ1±îȬïI,B¥ÎÁâ–æª’§ZªY†¦æ›q¹·`Þÿ_Þ;_ócò1½õYÐ$‘ÏIïcÄÜŠwz);Yõÿ$Ë×”#¥”ÔqCÎk7Tô6 „©<Â>vºúàvÞ]8Pg#tO\„$t½îã¯ßQ´1 €SVZ›œ×äBEú ¹ Ê–1jzôFŽ,¿Ø½Vnîtì|Œ÷þ¥QòÓ'J„cä]|ˆá·´"ð$éG´²sÔþx“#0DUÓXí"ºtN|Ìœ{·d$Uq¿&¼-ÛÕË\rÝ »D(ÿ1ZȶØ/j¿i¿ò=A4X#ŽÑl£`[Ʊ|ùŽ#én²pÈ'áyŒo¢«C²GˆF/ìrBñ"TªÙ/®ÞÆ8 _žÆÃ}žìB”,Ž~É€Dp³ÝÇ­‚Ž˜_½x† ¥D!ù\+ú#i½çձ¸¢Hô`È^öŸ^,V•,XýÖ ÚÔ ýX} (®Ä*ïkÂÉ›ä8vñt½a±Ž_×_B]àÃâB™Òˆ‹n 1Hßs½¼Ð<~ÏË÷šÆ¾L–M:$sÉgç$ªŽ‘@$µ«å< †9…8ó-;›æ­‡Š–É 4 R²¯ðŽ3WùÊ“’vÝ$ÇÄkÝDôº<,ÙŒ&#ÄÙæ{|$½< \æ(Xóиlrê‚/ÌÑWt& Õ½}w¸=\\`÷z·¤ó=ÜPÃ<¨¦²ÕUŒ+K4ÝÕ®d5W¶:àä:Ž€”•_òÛG݉NHj”%8§L51'@ý'-"E;=Qkº±Z›m§„몃€;–'&mØÃ•ûX4 Ï SýLNŠ(¸·Æš‰‡Unµàè}ð…‰ :1YúÁ0Í—3Ÿ×37Ê;qYñ£W-GÇrxû%ºlžiâ:!¼Èqi냇>?²3 á K· } )š*¹ðQ¢«d Iùl|– WÃI«±å h»ýǤ§Cu›tðŸ—3ö"ë¨+ ÂW5ÝÊ„¥#Q0âàm¥Õ"‹©oèÂý¼8æj7HÔ­›²ì2.a_[Ãl;pVXçÏÓwñ­®ç¡âÜ Is·{ưÛ5ü±äÚV^«¶Â³:ï „b1è/|´!V*òû'›óEõ²DÉ@ƒ9J4 <½_È7¸›g5¬7Ff* WM+ªrÖ®G*Þ{ fuKÒÎ%ô‰RáTÀzU¾?T¨pjLˆÞÆ™eOŽåËPk‘¼ ȉ”AdýF¶ÃÉîÕpÚôЈ=i)žÊÝ2ûJ+@•(DWö?u¿Þ·Îp ší+ц;±ðx‡$*ï3:ÍÞ€¨BiÊ1QʬrÌ:U³«m;²––Uº@|¾üˆ ]4Ã=º#Ïð|®­æf:–qvî¨:såí|/žõ$€Ó*^J¤6w×¶Ÿ¨M¤‰Ëý–‚‘š×Úa•â!†ÖØ× a¼Ø βÿÑ>‡'õ°½8iG :úbÖÁ(ž –öa$ð`®ïì¿ÃJ™~ß~›mâHÞ/>å¥mɪ(Xئæ¥<Ì´ Æä½8œ×òU Ze ø8$QÅ>Wdià<Ž$Ä}Ç’è½×™TB©¹eû"ð»oBú«$¶k°IóΖ XvÇd_ï¿õ»4ÝŽàQ½ej9K G…ÞÈ»tÞvŶ¿$—³œ VÈP‘k'f¬°6žŽû Õà 0VsØ+ÁF#7«ÂU°J7T±d©¥ÐMNxó¦û221Í+9µÎ9»L6ø­—â*ݤR‘*õÁ¥KŒÇ—3ߎJ³yR(ÀŠgŒ×ÝÅ0ãzyÜÇeN®z-4¦’põœ¨˜8-º[ÅvCØßíûK¸Š­jíܺ<Ö½ê<ÜÃ)%¹X ^y¦¶úÈX±ë¥ë›Æñd×ø‡‰Ðx=¸Ô˜Ly†AM— ýžÓÄŸzGgK£7IªŽž/6 e+Æðœ"p§W¸ýÃ;™µØÑ¤öøûˆ8±äô„ßÐ*õNÀ¡ "”!hÔ’³Pµ<ý[í? ¡ŸeÃdÂ#ßæh<×·Ÿ“»Qºd{)#¿æ²~ònðÔgMî,À¿j±i{ôß–?³­|›Óñ¼%µ‘ºýI×r|þDÜí,塲£U«®Ws~<àõ¢½Ï`{&OAÌ„4Go¸,ê¶P&³UH–Ð&µµ³¸=|¯¾Ö 5,ú;AÞI"w#B'¬dŸ’wTa%gì€Aë&?.óV/} G€$Þsˆè­tç52‚›9©¡  jÐÿðÛ¨ÓnˆuöØfþ³8½¨ Î ˤr(³/ëÙOt‹K\C7! Aù‘Rü‚pÚ`%¬ŒœA,„"«¦EkpžuU(CØÍýóØØ^s âÞ4’·0 :æˆ[_ñû”|PÝlEA„;S ¬3,îžÜÈ8DÆŽYòÞ€ýl@ÐÓŠ`%ñu9pBß/x ïζ E&AO·Ðƃ¥m[§‡Ö˜Œ’×gWã'N4NuV.¹D¼ŽÚþ|oå^µß˜ë¼|G?æ¦d‰^iÉt¯Ošäp1Ž4!‚VÎç¨ ¹wþ׉ÜK]9|¢§”° FœLZæô¡rÇÓªUÞ<àÜÔ»"ik¯KløLTßéßLôuýi©m>L:íwõ½{V¾RqA­)•“'Ûh.¢6ÙIã¹6{:çà Ãó2äd»É5–³I€15½•Ìí@ç€oìcp×äóiÅjQÞ³:ß•Ná@“U9ÎQñS…pöÊy>ð‘…âɼ“㌕ Ç‚ÑËÜ`cŠš ÃÚì5Ic^ã£4¥z“ˈ{–VrpBª'+ÒÑ;<[ò4¦•·•oö‘3Œ¾«e¹¢G?ï¶·üSy0–ÅY©•2npEí„ûÊýz~£ÌÝ Ëʰ„$ÉïõÀ ³5IBMàx&³g.6S:·Ö§{8Ó¶.%¿ñv1ÝËÊJySÿÒ´&#ÛfÊÁtlâ€smyÿøîêœ~ó1ʨx£½š!ü~ä/`7Ô¹•!„wÍ¥$œÙ¥Ú°X©ì¿ ÒÅaï,Õ­|»Ê•½K¡ÏéüDÆDò¤úJDÃ{2äfüT¼FD¥[‹'žˆ$‰¯“¶?÷ƒùº=ÍùéÏst$nð«†S6äy4Ñ62À`vóÓCEûñoçŒg¯vxU¹$ÆŒL+EPžNÝ”»Qƒøs*ÜžÍè¨vá´jÏêË@ÑNÞÊ\ÌšI]‡ï™kEÛÎl=X@i¢Žö ©C?ñbNŒÒËtÊ^ZßùD«L5£4{Ålœ2¶âj3*³º"0~N6ö®e±†ÑÆ€Ìa";u45.ƒ¨|$3¤ˆŸ‰m¤dó)ó–è‡}þî:T°<ºW¯ZJ“Ù•‘õÛãQd´÷‡÷ÙKÆtWÙu:¾yr6Á*Œ(·C(oÕ„©¶®@‘ÚŸ÷(š°óÓ\ŠÀ÷·ü]ÑÈ[l8’ÊÃzX)K9Ô¯Azìk : Å|”îµ>x÷©Úme‘~ŒDBèÕ ÊÛì˜÷‹û¸îŠÐojƒÔâ´Rõˆ àxx€¯(ue>è)øé,G&Ê+äykˆ%u þÇØ+žŒ_¤ŸŠioʹ‡À°DqsÖ¤»ý¢ —ÔÖšû UÆ+c$’œ&Ÿp³1A ›Z¯z–Žp…‰Xc¼Ôó3=_Bî°G–aÑË žºH­…wž™mß–t‘ðp,yX¦ü™Ê5%1ú¨5·çfÿSžAÚ¦¦à;…u|¥…½¹õÖPŒppƒnŒ ¥$DÄ©ˆŒAÝ:YêÛͼ7hµÝ0­ÔÏ¿ á^QS@T ޵»üÃ!H;†¦µ©_  éeƒð,1€hšwNúÜ PQ¤·U–vë]`ˆüДýëfϧ2‘œÔØe¬ÖÕ%gǯ?ð‰hµ~t1–ß¡íªK3qêVÍûËþ0P( dm+på¡¿ïõô½‘³-êS¹›®¸ï÷°B8Ú–áÝ®Ñôt¬k‰D—ÙüHøú.êLVT¬½•–rVʼ?!æ¼+fUCüËG¨ „UJ¿ÓqDJð°Ò–sÅî9š+ß_µ%7_Ù²ù8„f‰×‘€R½AàWÝ+Ú#[´r>ÙPÔŸtn›XÏ?,Ñ;ù&'ŒÀQ^£íŠ×Û#šàßéšÜ)=mR ¿yÄÁ¯Ð¢dº³ïlvã%Ÿ`0{ØT|›¯MÎFòFF1‡>ë‘ßf÷t–¼§0!]K_’F½6tÎhÓïV¬>9»ÿŸ£7y\R€ÚþÞÈEч¼Fsâå‚È?¸üæÿž¯|Ö3¬Ÿçk°Ãv}á_O×å=0[[;e ƒvý6#ÿ˜õܘ.}ˆïifÝ®^Lç£ ª¶ç–k6Û\›‚P\ÍOK/~’£õ8í4Çà†JvgmƒË[}ôö ¢ 5÷Ÿ;O¹ø÷ìÆÒKÂOz{ÿs"„þÆñÖ(š%À·ìÝÉiŸŸçŠ®ª¢o¸:ñ— ]saR4ÀüD“½@ì#øAS¦¿Røšî,‚ŒV-‡è;~yݺ<7Dü¹´}¿‹Ü _ûhLá4ƒZñ@ø YIª¹õ$>Ìõ)ÂFŽRö”D [´…¤KGõd#•þǹöè·u¶¦ü€Vo2Çv‘~Ô%-Ú ÓÒôV¦CWy¢ !FÆAyÏþ×ÇW*œÄ@BI!P~’IšžÙµ\nq çš­ýêeô.2ãÔ ®ÑQ™½_!fíSŽ/¸Y‚Å,Üéj Ò™=œÎã ”ý¿Ñœ|Lnåaè.8¿®Ù}¿Èžm1ØÞΗ¦EÑ“¯WKQ–$¥®írÖ¨¬ ó~ÎðOÂèwh÷ šC‡ Ç3 åsÃÐ.5';ûœmÎ^ËòOÓ‚ÜøÙ @J:‡þÄMl¤Ž÷]Zä›þqä¡>Ý g\÷Û…´Àã(¾z’15…@2ÿxjõ”á5üþ­~œŒšH­ºõüxÏ¢QŠƒá³‰–(ï‰r¤òåчãÝú••c)õsuªœx&'‡ç*ªŒW8([A ÊviÕU©Cc­#ùã¾,™ #bOÕ0 ¿òWOXä³dM¨¸¿aÜ`† ¥iášPH¢‡‡5¥9^Ý×á¸õ«8¼Ê£ çÌÇݨ6>¡‘p¶5¸„XÚ§×"£fûR`â…Ò1¬ã%'l}è÷6h-ièdc˜G5™ ûP™›wÇÙaÁw5õ×vã²p< û„&wÀbØSª‘4Cìù,гHùRg‚Ø´.´È%êXúN¶èF6™ÜŸ,é~e ¤Ì*iŽjÃUgË‘ÕúbO›½”4*d&8딳QJPÙÀûBÁDô 6‘ò} Î !ñº–Á«@UÍ šh•[õ~|*Ô߈yÐý ìwÓ™`ßš]igõѹÅK³¸Û_~4‰¼k%ȷѺÌñs³î+m³Iþúg@&1>Þ!’%M”…`l¾x@ô‰檥÷Õ’b¹´—VÝfDY5ýìò"4XX–ŽÞ?ïNCùWöÂ$žÚ;Ùk=b£ªNõs‹+í+¼¯ãlŒ{™£åáöX½œÔ~ˆ=<Å&ÍØú}‰oÞL±‹²3Æê|w:«àÊC(a¼Ã"rûˆÝ6¸ö­Rs?ßÒ’> ãªB´6Ëvöøz&ûë hJMOv›È^4–žÞyÇïÕ!°¨Yš¾·à ›© oàۅlj¡§;/ú­Qf4ÁͰ&¿·é´R=ýû(wÐW«É(-g„¼ Âû\¯(^|šeÂOâù®ÞgZÇOÃÏë8»—¸gufÀ¿ 8×—P@¡ ÝéóÇœYR³9{&ÆP} ê;å²\¼tŠG‰Eþ 6l¬Úó¥ FuÎ2h,¾Ý–|%-ú‡DúA]ØSE×^â%ö}1¤ó5N‚—iU ¾mb>Sæ2ƒ¿P±þ¢ëgH¾‘ v+ÜïëÈÑò¢)+wEÁµ“£‰]l—TTÇ=ß.%ï_ØÞËN—]E·á†Ò­ úƒĶ ™Ý§*+Û¼YÈ΂¤ZÆp"­ŒÙ ÔªßÔŒn†÷9+¸Û3J2®éÞ€ ö„½Zëu‚LmN˜µH‹¿gsÝ%l; —Í :ä—çrv³¬’ùzŸ´²93°¾s„uì3 òÅÆ>ýyó à¤&Jx˜lkäF2ƒb*3mÞž³f ཅ`+äú()ÄÉ­Ï–˜j£[vÅw*s¨’9F0c„_ ScV-K"nGl…™$OÏJBÝ c¡ão†nŽñü%ÚÑMÈènÔQì² Ý® ~Þô˜ö'[¬‰%}÷ï V!ØÜ#`ÚÆ( ü.ie]÷ À«õE ’ql3óÖÞ³­CO2³©t汘6¶\¹Ôçx¡ü¸Åi%.k&sçsïˆÚ7ÑaûãØAxÂ.`¹Þ]Š`8ùhj$b,dì ‹[›á)•Ùñ¶á'<×þ^Î¥~J2ß Ü¼1Å(áçשEÝJO`2ÛK(žPÉgÖ@šû‘ä6Ȥñb +e…CÁÿû«D¤ vf‹9(Ö?~OX}€38[%“nE Õç·„l­ÍRožRßñ7ðøõ|ìÌv yo1/Ž4òQ—ܰˆÚ‚f×/s>P2šÉoü!:).£'œ­´†˜üCg:ëeNù•9ª¯\q ¨òuÒüÑ^öü«Îõ²`üõžIúöüV`Û†ŒðxA¶ðMóP9K-Ì@(Έæ ¬ÿrTõcÿÖ"ÒòCºÒ·ËC‡I×Ð=dŽL+ºPn ilåÚ%kĤLìá÷Ö­6Ÿ¶NæŽãIf£`êÌ õ•˜¿ŒžwçKÉzð*cüŽÓÞšnb$‘Eh—qi”‚Š“PCr¥V¢e!$ðçu°ÚÓÑèZo:¥aF .âMí­Íš&ˆà`†m‹­ÚnyØÍõŠ|€ë5‘ΚK 5ùRÝÙ\}3y®ù‚;ç–¸Àãâùn”t $xí½®uŒÓúÍê–¤¢j' ëþ¾üŽ.ôè¾:0]½œ$ø›e<$#Ûļ¡‘!á5— —çS!¤Ñä•coÑŽú·W“Ž’“£ÖX-Ð7eëÅ" å,° Z¥öü€T¼ÏHFñÔb, `3¸yÄ®y#1ïø¼ì _ù²ìe3hD&6; ÜÞ7å‚–[ö€Ôp–HŒ1Is`"ô¥ ê ý ›Æ5Ñã¸i2“Š¯Ï»£,îãîìB3p_µÐòŒ›MŒ¡n4/D pÛ[ܵge.ù]Ox" 6÷·ó±T<£þ ¬ŸÁÚú]µ×ÙlqjñïWƒ5õ_ŸË£ÔE—ŠŸNÐní$™ L&!¨õÑFr TZ†¿H®ÇRé±ÐLh(|ØÚÔOTºdM÷\²¬#££þ’µ|"$tDkÕþ#jñºëŠ)øí¹úþÎqÔ×+ø¿L‚Î*ò+¡ a::0.Þ$iŽ×ÝèéZ0*•Üs =Ëò²O‡²º `GÏÌèÿ…šßygòlrY–)伎áÕÓ\ð™ ©RîÖN³_èËËÝ`È¢Üo ¨Û´¥.v4Yøù–©Äwei1yù2‡¯g¥9š[Í©þâÛûœiükE섲/Ä"‡]µÀ|Mú¦q 2Èë>Ö@éw~=n@`T¯—¹>»ÙeÁ76H°}“öÜñüºÜîE”#Ÿ7ñ\× ã°ºT18cÈ·RÀtJœ=Iƒ¿æÎîü=/rf½"snÝEWÛÛ.¡h‚¥‹›$ P¼¯ RóÆ„¡üXÄÊ+tbKÏ4JN×ÝõÃâ3[F€VβÎ|Ï ¸Ã—þê¢*°w'êü~GS­:£q/b=? ÞD|X"k§˜É(”ÿ¢¿­ü ~,H–â{†Q±|¹ ¥ÏiM³}r§4T xOl„(5`΂)¼1Cëyæ3;ê0wåÁÒdÄGµ%æ­åÌÅÅ›3ŒÞªÒŒ²E·¸GÐ2ÜN¼‚IþuØÙÍÏçE°¥2•y·Í}¨sšñï®ññl›Åð‡6Ô2“3ñMóoÉ»lÀ0›Â Œæ"¸àNb‰eŒÃEèû†·ªØ¡ÃsÍÛ!^îŠ7Ë;Êpå»ãÚz³Rôþ^GRìs¡­œð#²O/þ)¸„ìDßr¡륵òúÛN•ó$‘)R/ñ´`Ç`®çôúò‘åÝq—ËÀ{y´L>Ø1Tž¶队ã¥&ÜFÓªR›¢’$>ô˜Ä°·œƒ/¢’ãÒ/à°)AçãêM—ÇûiØ.Ý2D4Ø» ê”P›Lôêke¨º'´ôSC¯ƒ]=¦¬5.$•û†MOÞ™¤ß¼~Ä%7ÊêwÌ›&¼ücÀü’9ö ÛŠàCzÛ®ÎYâÔÍ7²Z#±“Û{¿»c@ïæÌüÚ «ôü;êH{i²¼ü1÷,j!Îþ…«Å"Œ–¹[#8sXç vˆ~ùxZÉRó¼Ïq\úÃÞÉä7½Ö\Ô+d3bÐJ »ÏM~¶­-ÚËÐ#IVÁî‚uÀÕ¢ Ã9'G­‡ZÎà­sV?ÖZ*#^V,d; õè§qÛHs=s»Oê\ÏZ÷­’ H(í“}ÿ{Eä o‘;k­à°—‹ ãiC£;žÅC Â^ªNβ¯Š ó~ J~¤‚Áï´åýa oÃÚXà™ŽYŒs³ õò‚>ÞgŒÐÜãRmmŒ²Éöæ„ñmÕ_êºÕÚcF"jç¦1¼I™ñÏw;ñ(¼x­húÔ´/.PXJ!ÄÏ Õùä€<LÊ’<wÚgS÷ ù–!^t-ßEÒ¯í½˜"¦N˪Ҋ¸7^òWª »Íñm qž®7žá .<àø½oÐ&¬Y?ÿRv †|i²g3Ó|ÖÀÞÓ¾¢\p¶¼HŠ«m<”ëp=X7¤‘Ò²Rµ4æ~bÖ¬¾q¢djO õ8`c†‚W“ÑŸ6i³iééRa£èC2×_SŒ˜k­âéä.23¬Y§C2)ÄÞ›ÔÎùm¤*1x‹Œ ÚÑÉô“9¢,»æ;¿¿;Þ ú¹–ì3¹ì7âkìÑ“uê§oß G¼³H³àéùüü¹=ÓÿM©».·W"kG;@ã)ØäQ W³ä³ó™&¯¶ ³_ÃD‘JOï¸CµtAÅ”äVûˆ¢ (6xÙ–ó-ýù“kœh:0x?)_âS‰à)L᫟ °ä宋éDÎòÿü6•eS+\L¨ŒÓJÒ‰Ìi©«„ï§Ù‘8¡úö‘Á·³•TzæäùAÒVÅ¡¦TîBrΤ>HëØê“^ká"°ÆšßS&ïÙ›GhÂBÜþ¹´ð"¤?¥È.q~±†o¾ ˆSÈ‘Ê&ñàˆ⾄*Ý-²M„I‡Ï!‘ñƒŠ¿êd*ƒ~-†éÉú½ôÕíþÈù­ønJ2‘ÚÆXm˜«MçsEB(Qœï„ðhq7>æUyÝ?˜¸•Š,+ïÛýQÌÆQ¼¾¡n:cЍÕ¾VÁå¶Å܈¹´ŠK›’^*w ‡â©f­.p4̦ð}9öf$/„ýƒ¬„q”xEÌÁhâ°Ó*€ùÚ;?äá16Γ×~,¾ðA (¤ \½ éº{¤Ê´ìˆÜÅDgn6 ÙK³fH{QÆÄ"¡Ñø½Uô6½ø³ºtLˆmÚv ÍQ)ƒ9WñϽ.ÑÎì@U¾ikµOù2Ȳ¤SÖ—_!ý¹<Çé'¾1ÆàËp¶$-«½»ôRJVjˆNf¤Y[v(žc¡€Sß¾Ê-U¿Ô~s²«œöùîÛPÓ´@<ª3°×¹ sÆ­¨­K÷´ùƒ(©Æ“QÒNEû×™¬“Š*ÔÆiìËÏè$Ë1Æ “…«â÷kDQŸ”žÏòÝ_.¦R0À•΄æ-f24KñM —É‚ºYl×µfþ/ÿË¥vÉa`€ kòmÓo¨3‘ÛÓŒÒü¾ºmÄJcð„{ 2PD[ucîUÞª$£ÔEýwo™Ú;\дä UÉ"ÍE®ž ¨ko£PÀrâ=¨®+D‚Å8çÓ‘ÈÛþÄ4Åî(? ØkRæ®Ýk…ûçpÙX)õ…•è†îµ¶˜êb×'¯+♼.w)ɦ¶É'<üxîdÈ…¡ÒŽÑ…1¸ SeAö껃ÌwoÓŽbú{i®‚µÅõèŠØ—޳(ˆÒ!´¹v¿ê 8àЉí¿}Åó‹¹ej©âæð"ŸÌP…ÄOz™~ ^J)<5jbòdvfADQU7ÅØ¿šY¾/©’‰‹T@´Êö—_@°IÉ“ Ð'ä,/K[s|²Õ%-1½F¿…¸Þò•¯Ì÷T·yd7€wBúg¶:yŒ)·u‚ #O>ŠwÀŒ©~{´&¬< &JÓ±¸Ém$óïÃLw'oæi‰_õµO¸i¤ò§79Z±—ß ŠJ¼ÃiƒƒÇ½yC'£¡O\3&¾{|Aœl½Læ'€7ˆûnçÑG¬Ú¤Pà¾B²$-:cpƒ¾_Ï=4ø‹}F«?Àšöé dž5Ï«Ö{åù{NÌÖ0Š©ŽöCg´3kÿí?TPŽõ6~e¤—íZÒÒ(¹Ìbì8T 7b%Ÿm{QøjøwȦÎP/S%¼tÕ[í$g¼3gí´Ç௃ª)—-Û#ëƒÍ­êK€‹ƒö,À5œ.ƒ¿e ?OEŠmÃÚHµ1²ÅSº'¸Z0¼jùË¢/d «r‘ÙT-;‘Œéñ5Ž/Ú’Í–a1k®âòužNžÇé9#O不a)>Ö:sÁ´Ç/|ó¶„›Þÿª ct1ŒŠ‰KC8¿=–¡`i¶wׂàBÓÚjoÔCî‹j¤¤>Ž ¯sÞò½àÚ‚Åü¿tÑôŒ<6ΈGmûš°L È-d€ŽmGJe…‘ü?vêlgˆ ú`‘˜fS·™t„(íÐ4#vU,X54a^Ùºô´{¼ÓjÎäpîTB !1w¤EŽ_v@¡i:Õ“ 2Wv ¾Ï3̸EÐÆûÃ}ðÿ˜Ó!°™›é{ 뜗ˆzH¼˜Î{}Y#x^T¸ö'}ó†5 3qZüIßv¸ b zJzrSºÇyXQù6È’Õð1º!¨ðçé³ÝShž›¹¶\þ# ae±R –›+>× î8%uë´èÔºàÙ'RB~ÚÅ+pÇbQÅ'M KyW.Š?ÂÓY-\ÕÿÛà;ÜJ`fç{±¢zJ»š¨Þ°ŸÇLs¡²FS¢š7ö±Ï°`“ªßÄšçj\XóÒ!’ô4, Âã±çºe ¯‹Q4ç‚ÛÏ·¯q8õÙæVÓ,|—h?i(“á}þÔÒW#£ð!H{QõUØØ° ‹äÏSÍúª÷éVëˆb™³DÊa‡e“ßc*|èŸÙñ ta ¬·’Ý«½…_žTJv:$!H´â³Ã©  ³‰D UûræŸãÓÄeö5AcûœeÙh²4Ë{ò©y™xÙ¸Þ›ô ðцKœi ]Æ3;n9`[‘&3æôˆVT¬Á;Ae÷²¤dàчºÓfÈE¦þ¾8I½ö};ò¹V §Å®­>Î9=Íáë‹ µCÞ»8ü‰á`Ûú§mÁ˜™ LÐÖjc™;ÃÚPñ! ´ì‘ì2.9È 'Q% û5… DlÀãÂ/ª*ˆ‘FTø"Û9îNÁW, ÆA+BªÍAÝú»G˜”j}>1~írœaªÄ£ß÷rq–÷0ȦÃL#h#Å\*Y®'›Þïì9 ·æ®•w¤-²igcï6g`£U¥üÒf"†eÍzþþÉ× ßlçW4ƒì›µ1ÿKn{È6ŠÀzÁÛ¥ì;K™í±ð-9¢­d_ï“,Yæ³ÚC¼O¸½‹|5ú_ Tr¸SÂc9èÌ^l‡EîÀªØ®øßHBÊ¡Ôl¥“Öº{ÚôéÚöÜŒsLR©ÍϘxÕQÕþñ•ÉRØ´w\êúéF^¯µÔ3¢E¬šQ•K©†/#sÅMLS䳜®ÐóHrá×÷ÉË¥V1žü™X\‰bsb˜³C¯‰™`ã"ð¦´Á” óÏl*HµNÿ‚EB¼4ÍF,Yšê‚¼•uÌÔ>㶆Ș„ I ʦ7©ž^§5ûmÈÍ,úuŒ™á…ÖÎõð0Vé6S-tà‚51å$7«€=™}9—FyDü¯É&ïÆ2¶$¢W"õ‰´7Ç Î_(^ŽŒEì 7`)ÁêAÌ(¯óÉÞ½Û¶ÝÝâw„ÇÕÀþ^0±¾Ï\PGp$l´<> IÖ¨zñ,'…RÐä£sý–NC:*‚ä.là h`t°ûÅÊUõ&±¼ÅÃ\±\7¬e5½º~hÂ>¡«·OÎ8›¸çBŠ¿ð%ÖcÞ †…ýáã-Z˶ú\•õÒY!£–A MÃNƒÛÏåòn(IÄÓƒïižô»Xbä;XÚ¾hTÈõþ€©u† owZü‡£ÐœTÚQÁŽÞöF¶–zÚUxoeñpŽÆAÞdYFQ©Pr¯¼¬€Ö”.xëees«*å>‘Uv)ªE;4Õ#•aÆ–¥†Ÿ©_×!•rƒË†±¶âlµÙŸõ×ÖúHÈàDBLp`å\½´$< #Z•E`~‰ñ]'ÞÿfçÁR#×LG ohº–‹þ°Ô@›†—±\ Æjðj¤Û•¶¦èçÛú'3_ ÉÒ¡|Q-ÚëÿÍu²ïÊú˜º>!PŽ›JÙÍÚ) Ïù8 š©½Ê¥Q<€ûëfžgåi Á”*åX1Šõ›!i;ÿH} üt}G²¾XzÂÞ‚³`”«àÖŽAr{œŠÿš<Ð%š8êû‘÷Û²xü:›>Ü¥úIZ„ñ“VBÁÔ6éÕP›Î«c•™?†¼òÊ`°W(‡…}€nQoXbôš¡œAÀøv@Nó€.jCãKÀ¹—¡JéÛŒpÍ>ºåß &­ÚÖ©nDÂùzŒüÕ¿Tå™´æEz€Æ¾-Š#‚ŠëõTFÍ °E9墌I]Á2:’Ée‘xOr Ûz¤÷(§_¶Ä%ðe|,â=ÚB•¾ùx»ÛFB×»ñy,ì$Êj©s_Yÿ®]ZjEÑ¦Š¹SЬqŽÄÝjÿWüý¾ºÁws[ÓÈ&Fã”/<”À5õ-@”ÑÉÍ ¨Ë0=èŒhR­ºc¥®IA_æ¡°4gñ* Ó‘­2CjPØ50À̧W²h>ËÆëëÝgõ ìƒõî­iliõ0r@Ò´‘q’z £½šŽæV¯ÆBæ˜# MºhÜì1 ì"ä°žQãζ[㮀²Íøru“ÇK¸Qaÿ$„ÇûEÊ»Ç|9ªæ »ÿ㟢-¼Íû247bV¦›|*xapt¦ƒ°Ës&Ò.ACºEÑýdüûày-ý9ò°u/äv1Û†ž5f58Ñê$T]©Úâz°̉zReFEÓºZ €½`sò"¥ ÂÀLºÁ~7ã(d ·Twy!¨šG:—ç£ Ðtz7‘\¾Ï‚ðMtÆtãËüËðη[ ÇŠ™òZSÁgò~з´¥ì’¡T&sÓpo `Î5‡Ö.ãZM>h›±´mh† \Iš¾n@ýo…,Sãj-}RTKkÊ/Þ;óF@š˜'ñ}…@ö‰B(z|>†ùœpV¢ž`3–dL°î3¤ÃTfVº†d¿ƒgÐÉ\EùÇï9ÆOàæm}οœÑ•+-åz5¬Å6peQú&r†p»¾o,WJ±å؉e‰€´hŠ“Y;ÑÊ~~ !dúk%)Ò% ú½0Èxÿ² G ñŠ&»ì·4d“Ñi˜Úp¶aô˜E”"N’ŽQ¨Õ´>‚*dmÔ´ºeìç©x\sñ6ŒÏtqh2鼆rmïƒU‡Sž•¢æälïÉ—ô…LˆJOÅÉPž·ŸÆözaî•Ë^Ï}Ĭ‹¿þWš|?,²L¼réçöi-G£=.•Èê*tá¹ÎQ®ÜïdõOYø<ämуÚjl©=„šVúªõBUÀ¶ÈÃß¡FË&uJ¼µ÷õ”Ë”Jy$‰q*ÒD7ö.݀涷ú¥PoÎ>0û5šK‚õ&•qÃ`×¾ÑÄAÜQ«/Ù¥ ÕÖ4Ù“êJ òŸ‘óž²5£^\ÐÞAì_Ñ}Á>Q† Çýðˆdô š&Tᨊ<ÿªöë/ãš9p„,Ï}ÔÝ/êIbŽw.ÐÕ.Ð<3çKûàÉÍõYÏöf(›JþÅè™þÃh|a¯8°mA¿€¸ª¨ôÂ3àÜ@¯ƒ~beP)õ<&Øi•TÑU[Xhß]Ò¨œ\Û2*ê²þãÆ“ù=D)_%·H^@ˆQ#ÊÆEJøkEÎ÷½Ý…^L›ù"t)"ÔØ6En}å…Œv¶ïëdºÕ{f=!#5ÉùGhC” ¡ »JgAo¼¦´…ê55VÈ9ÝIKñøO\°…q’UzªXnÍ^dU>,¸æï`þÔª«øšÎ;²•«¿„†ë¶Ö9™1ú}9ø0Êæi 1¨¢u:5PÑg”â‘]ÎÞyójö/EáÇèÖH×AVË`;ùAÀ ¹5ô^¹v)±U$4¯¬‹öu<Á’+&ßrÒ¾NCt?xjjßÛ* › Dwáúܼ#)®‚ºî`ut{2¼Æõ \ºxBô/h˜Úi:gà¨v(ÎRföß(»`ùq¦føö²¨J ²ÏÖ¡´£ÁÑшœW)j‚N~´_ÓŒ>¯TÐüßðÇ_/&_ V˜Þ\ŠÉËdvcFÑ™=Ñû‡¡[ñ”³Ibæ–¬‚ãpì=Ä«<Ç»Ãw*Ìþ€üÃØk&0úË|ß3¢bÏÀáÛž¦JVôG‘ú"æ?+øguMCgÖ²9‘å©—Í?û|DO!{pë~s»X»ñü%Çd0 $·dÌzý°Îކ ‰!ô’´!qE.Â:Fu”§ûWlr5µCD©>U‰ò–ƒ€Þ*ÀD}ì6.8ó±x†ûeýrü%a^·«äj"qɾžßSž×{Ü9__¨"‚Á<'–>îçhíÙ××­Ã[C¥Á\ŸÓ•ϯCzŒ¸iÅòAlP˜>†7ŠEÕêr©aóf¦6aHJÀ.±GN,Ž 0ärc9ë]qŸÁ©Œø/ûN)ªú–0ÐÒÍ-Œý0–‰ŸuRµí#‚u¬•’ §fä*—YJx¯ n¾HÁÿ¦BðÚ¶kØdÐ\Ôô²›uëÅØÉZ q¨©?|ÑÀ»­¾Ö—®qþ9 ùYv9ÉN3e-B·P ëàÛŒ#ÌI•¦c9UÛ÷éóˆëÑð]›Ìƒ"½ OÂK:%RÈØ¯qÇç@á ZªM‚¬Ùk¢Ô’&K(òÒŠ,l* °‡§Ü$PtãŸónm³Y<^¡Ád þ9ίCŽñšÝÓ¼Ó¿²žéèλÁøÃöÁŽK j ÌÁ0§ö³Ùíä‡g ”sŒõVߣ9ïTÏÍ"IÎÈ1GýH` eüYúæT`†?–Àι¢º¶k–4ˆa ?¦ïàAè£Í*ùՆɪ…±(Ræ}réàÁ#jÖWñ%Kûñ'ë‡sƒäÎÞ;q:ÙÜ`¯Í›«'ÈÝÞß>Ÿ’hw€ªCúÏþm"¡ݳJÊå:ˤҼKiLS©Ó‰ BÕi²{Ê!ÕÔîïs¸h~˜‚j: ¼’:¹Œ=€ˆ‹»îǼì¶pä¼ ÛâôÄÅÓ0%w$ZKÌŠ¹ Ñ… æVºNdn· Qµ®zñ5z÷â>"0MQuÛsÙ†WŽ9ÊC¹vÎz$V­Ÿ¸”nhÐN»ZÀ\[[Ž6LF´&àú&ŒpØÿo|ØÂø¥íË•Çî>(hEŸT²ù!©Ð½žr Ïâ!È|oZ˜ Çq­ýÿ…t™úËJ†Ù¯mzÃ× 4á—®‹?Ÿ<Ô‰ EÉ/^ •¾ ¯‹H8V&Y<ÖZ˜„ÅÃźÄ!µpŒ‡ôr‡ìᆪȟ£ÖÆ|ã†Tý¢˜Õºž¼ø£ù`²¬r%˜GìÊ,6hþZÖê»äüœð‚¿Fë?´=' Þº.÷øG‘îí¹5„øä 8¹äÝfÖŘ´(O (Vu+³šÈÅÒV*þ8Ýï,]*á;±jÿêßlüÀÇv.û@¹—å;£ÉuÐsžºðlÉ©~6ëm4¡# £[c“…„k«¸gô¬!9åžLá‡:~¸¦òä¼6£0Mß{¨ª\_oÝÿOøI`Þ«y6hâ@ß‹¨bä‹cþÖ$ Õ¥rG„Chi 7.£ÿÑZã“Ï!Z¤pNÆ”wÎBBÓ~ò´±=KÃ=)Z£Â1Ó}³å0À¸<€ØÛR¤\7£:<Ò+Sê£=Ë¡{–ÌÃÏ '¥o‘ƒIñÎE6Ñ sõ‰4‰¤ÿôÄš•Ø`÷÷+¯åÀeÔ¤¥*FÞÑl“é§øÿe /–rú¸6BÏ##¡:ŸM«=ã"q¿t!ÂQɺÒ+ö™fÂôÜ[®~ëéâG›dK:^IšÆi½AÐÙgk öUéï e(öߺ–ª­D’ ‡ÿ¤ öáLÎr$¢’¿Â£ÎYRž«¾\öyÉ•D·¶ÈZ»&šÀf‚cÛÂ5¿L@Ïú O5ßìrמ‘Eÿ֚Ö:¹æXÐîhY¶p‹QŠçõÍéÄö.Ó‰l¬&íbÌ TÖÞxû.0¥È–D$«J^žçp†çÁ[š@óPÈN’T0¡Ê¹¿)CDõ‘Ð^å­2¤ëóï*öòùÖÂÿá_I¾Ã¢o%X“+ÒŸ­ÚŸí”+jm[Ûï»ñEa J’¬ž0¶ %~gßAÕâ;™p`~µƒ½8"ø½ ^æ»ñ˜Íï!&ˆ¡×¥¥{Ü2`<ʼZ'á\zc»H­Áþb­Y{¥=›™ &Ïš-¹Œã½½<¨/¯ñôx±ÒŸTg–¶A÷¹]ÈÕ\¾€…¨0JU?Cì z¥7%hž=Iã…˃QJ !ïyÂu÷YÞÉ“RÔ®ÌDù(&ë )ðãm€á(-gãgüÝ)EÇ.ÞÃ9ž¶%`úe[3—õë­¾on¾Ü“’Žn?>Ïk6ÀËd þ·ú·ø`}‘ñ±ë¼VZýËÛa.;ùlm‡ˆÒ–0r°l7(2Í/5¢îêóZ )ô]ƒk ‚NVâ!Nïjrnö ­1¹Bˆ£ažþÒñ>!^#Vqâ})Å€˜»l;š.=¡ûŽQ¹ÀŸÌ5¥tØ€r¬¿.ßâl f]{Tx/ç°´3e‚ĨE^•ã zK1–Tb‰ÿmÙ$~A¸¢¦þ ŒAËJm¯@ñýóôXíí¡ÝxèÈu¨Ò1Iqa¹œ>)Ãwžb¯ káV°ÏlÖ;;î^ ³æ3ÓôÂñÔÔnˆa;ÙéÿRK~áƒ(-ö1¸þ¿hr¬×:YÊ‘¿5&ÏÙdGs¢Â0ÀjNYù]ñÿ$žÏ.±´þíÿSlF2ôq1¹è#ê9ÝUÎ7®A›a(„†¬æYæºGn/©›'„àJòáqøD)ÒØ.oäêÜÞUÌå~¦EÏÞêŠË›\Euƒ03ªº’ÜïûHÎ6{c“D]DwUªj H¾¿é ]A,4PÂL:6»gȹBy¾YCỤ¯´ÜhÁ5yPûö+'dóDü˜A.úU]·þ¤•w@•'\õ¼é3p³v`¯0@ãêçßb_’ôx²¬ C•éÿ€÷å¨ÕÌþ-ú2 ý{q°Ö2³—ʇP\ Rè°àáÿ-—ö6#©:#lUP÷€í˜Ée0ŒšÍ>Ç?›‹¹<ÍÍlãzôÚ7§sQIF«ã=Ãs]°¹­¨“6Žóàd­ú-¾B³"×ÇÀ‰AÑ*xëj!¨‰JðãΤÜ'éE?Fî4‹mý»°¶”)껣"ÐÃΛïÊàþXÑß$9Ï~ÊP“'µ7yÞ^”•.nËŠ(D´ä6±ÿŽŸÑFº£vÁ¹KÎXð…pjüŒ-Q´ ÷ këúÅ–ãÂðÖ$ §ˆÁ k·È6 ?T#:Ò¶-Hh. kŒb>Ôeu¸ÆØa°Ü ÷¨V£ÿ‰6„ª¡Ã4ˆ %Ž¥*ZYá”z#`Ø÷’xÊñÛÇ6@7æ®M•¸Ð¤¼Y=öÃÓ!ïV¼€Á¯Œl»²ªHŸ¦è†`¥Üæ¢ ÜÔIj-f5—iZŒý5gÕÌòo¹KÞ­xeÜ,”a Ù'7"šŒö24­ú‹Ý3GµQŒúeeGF ¨ì.o'gÄlØ‘|[h»‹gÊ`/Æö²ƒÛ»˜–*"e·–>aá.‹WµÒßtÔÂwôŒe ,Ú5¿Ã½{#ošitÃÆ®hÛ‘«S1祈W5`RWV¾c†wç¦ó:´J¹¡Û…ï^éIš6e«|ÈóU~1Þ…YDååjuòYþ´¿ªoxqÝüYŸ:!?¿iAË*`£é÷¯d¤UÁOºø¤õû£Ðë×#…^ ›pˆXð{yi¿Û) ‹yä¬% ú×þR€6Nn2gÍã„›‚°£ø'hÄß ägç?ë{ƒ3p^fõrà…4¼´¥&)–v1Ú•$X—K«w AY]Zà)[M'kKz¥{O Û?älîô«3¶©Š†ÙLhVÈfRóÏÔï[d%"å 9 T ŸŠ{4‚¾‹Û“Z.ojàXQÒ«!0@>1sމƒ…T²eÂ`½—ÒKÁªsæqÐ*ºï€ækÖ¿ÀYÆL˜LýñõK‰Á ¹¸âݯT^ܱl_M¡°1Á®ÚñöÔ0&öfÂU:ï[É”¬éŒFˆ4š¸ßv”±îè[‰1j´¬¶“Oùï½H$ÙÂr²Ê€ùRìÝØ5 Àíc¥)Ù2—÷³ï#³p+,q9É6,ΟXƒþIÆ,¨šŠ8ÔAº À¥ÿ.)££5Ÿ¨B& &<¨¾N-„‘nNÅ·òI6žxhm¾HÁwû»‰î,bòºÿ­Tüœ½c& RÕã§|XÂ{5Î}à«ëoÖ#vÓXûfÃâ*íç†*“ ͆Ç'Ÿ6cGËöR“Ã*E%æA\ŒÜ6ÄÌýˆ´C~#ªsz€H]JÄŠ9«R7>©{PôÜ#‡¸ßGϳ˜õ7yRÛÒ_ÙºÓõó¡9‹jÄÇ;Þ0аàY™@ÊCó€*À¶?LÈq¶"³ÙÑý+LG Uûé‡ážbp4G’„<^äìÊŒPpÎo¾}ª„0¨¼âÈ2*j@ù›ú/RXô„rHrÚg=-F¤Éb‰nTÐÕdHøÈáHHÔϨQ»…¾ùEjpÊO‡Êʹú–Zi¦ØžÉzÞ]?U§wuÔˆt(3ɱ<Ô/—*ÖìWRuÔ—´^({Ÿ%öÇhY˜ø(Ÿ î±5úì -5™l×Ñ„r‚ˆ×HçuÅÁö%P)èiÕX !ß«»æÓa_˜×½™¿êÑ>´6Rrðµ#0¨Zˆc5[ah2ßÒŸ@ƒ$åÇMnË7ÌfŸy–l)ÆDŠ*æäЍÐlLÂÇU£œÞ<|A7Wƒ ânywKŒÜ0Ï¢ œCRß¾“ŽÑ`¹¢§¥ ÚÕ{N$‹ãC sgDñÒÙaЧ@â½_ú[×4÷Wg¿WÐ]¬Ú>j'£yãœh=š,Ÿh]ƒöü æ¼`‘s(ÖŠâ»#—»ଅÕ(Ñ^(K‚zçÜRZðÂ(löz5²xlfÍyulC‘ë§qÓ`¶Ð=q™#Xÿ â¯<@¦Êθ¾šp9Æ'è-ðø (Æd›ÎíQÜ$šï³ˆl¢€õÈàíF C4cðÇ)`Ã3bK9;3w8tßa$âE¼x·ˆë£ ¿ãPHï*äN¦¢°ô G¶•ä/Æc"åO€¾à¾R7 6c )lôÎAo…x½Us+9 #·gìß¹Úáohp2Ä3û¯G÷ðÁبFªy1.£¿çFÁ:ÈÑmïùƒAâÔƒ©âãͧ¼R`xÄî¡"xÒtt ñds© ØpëÌuóÓÉAƒ' m¼¨ ÃÄî:»‡vºZ")ÊÀ:jê÷ÌI:ä"…­k Ê«ˆ|$µåPù ÉÞ$Ïm±«EËÚS!`]ºQ#²¨Öðÿ Ì}R%ožÛ£Í4cnä™ !Xfß.G-j‹òVH ‰¹˜ì­¡ãwƒ¼1]öµ¹\*RþRµ$ŒAá­F£ýË_M9Ùàá)rð9€Þĸ÷îØðsAMæa áÖ_¬ø ºç¤Îé.‚e yaßþÏÏsÑB›NÚ[¤0o°Ïaw<þ9×(æf'Q–*J6Š>Z|䜋'O*Øxò‡ªŠhÃ%'³¶B¢r{M™‡;aÅäÓˆÖ%§ÌôÊÜ:ÅÊsòÃÊ>Ý4ÿ`âÎÆ…®‡ŲÿGùGøº7_+’ydo?;4Œ è🔸ˆmÿD„˜ßþÆÄ»Ëôë_gî¶ÐÑ$/…ŒŠ­–´o¼>cÿ(ÿäŠo¹¶]€ßƒ:í× 6üýq3LCW‰·?'8Ĩ=ÕöÌÔÝ€ìøØ~²ëxÄËöÖ¹·B>¯w­¾ºÙ0¶Àí8ü=(º,¬ßÈ´˜ãU–-Yj.¼K»ss1}9†…qÁÙ»»¨7•y'ýËMÚã×4é‡z»‡ƒõM :8ý‹Ø—u]k1áyžÁéˆÕNˆzËí»•ËÔ_|ì Æ]p()%'•uÅÞѦž­ºuQ¬º—T NwÑ'åø9)¼ä5´pÖ@V˜Ò!‡XÍâ ;>MÅJ—?<„4¯BšÜÔTÄ2Ma9/ó‰õü²O`½Çž2¶vWŨ uCŽìbÃê…ìáüs.À1ËïøªŽyŒ™wºê©±Tð=ÊÝÍÂ^šÉKóšÍ•0®¬°®•µ‡jâçN‘±Âì~ÝÆRèp±¯RÀ{z*#°–9Þ£÷*ÿ! ¡"Ëjå;”ƒSØ/’£ V\ÿˆrƒ­ªi$èB‰5#õ?öKÂS¡?ñW'Þ@Ÿã5÷%¦Ãað?BŒOMÙ4Ç0'õæhô|Œ Z›d‘ Â0ã“^ŒWô!ÖÅgsõ¬¸uÐÓñ%`án×’<Ä„)sU)†þ4¿¿ QíèìÓÆ¤\á2á*pÒ‹˜ÖÒ‚3\èÎ\ÕÚ1©6zŒ^…Ÿa¹ð hgùX ð‚^' ¨½ótÐ’¿ëãaœ-ÂÍ¢“lÏØ¿ Øñ¨{ÆŸ©~Z‚^¹S™(ªšÕ²#Í ([LœQ¥lY"?ê*É\ÜN¤¹}yÝ?iee•t”Ä(\ëÝêÀR:£Ãr¨çmƒß;"ŸD ÆF,$™cË/èÔ®R Ë{HJ™À Ûø'…†¶t‹kë9PdÔ´ûcŽã…bØóª2D[^Õ+9+Ñ(4_TÊ€Š5…?iŒãYw'‹Ä>ʉ ÀÙn퇽èÛ{õÕ¯¼7Àÿv ȬúËh'æŸ<9³hF(Ý14k¦âËöt‡M;eKG¹*IÊ®’éñ3Š—cqq‹GŽã7!„\ùŽ©¥ÛÕÉMºh7×ß7ÆaG?¢B~zYB(íÏáëÔÓÜBÖÒ"¡ñ¯ÛǡГ( 2”ö±|É€µ¨ŽVòîzê7œh‡ÜÅÏ­3âkÕF.–=õÄ9¿:F©Rhÿú‘þú®ÑYELÚ„€lm=d:9¦7Ý×~Œ:«dïkdÛ,¹8Cë!<½Å]õã C¸jaéÚÕ8ÿîBÓm!ú{ â~@^¤ %Ú:wDKáÉH…>zç-d¢!2²C0ƒu«‡UËÓI!?«4E€û–ñ·²¬ÅîH%Id“ò†øéž0¥½’lвúTçò'‰Qyè,¬‹Y¢8KS|›?²t\B¿l×ô1'·Ï:¹X¯C„ÎIJ¿jzWîÒæ2ëXŠk­{°ÖFˆ½G{¶?Ì×Ͻ‡cÌ !Þzù’]›òC›RtãÚ¿Z!Ø,j@!$ÐÕØ(¡£ 0ò²sÉ,‘NØ¿ºµN7e2¬í§Ð‹ð¦ó3žœ_fÁ^JˆÎù*0|úͼ&°À)B,œŒq ‰æEεe J®Uøy_ïê.ì™éð¤!Ÿ;Èñ’9}ÔÖSa\(ãj¹Ãû6ˆ¦ÄÞP.غiªKÊÞÐ<·¸ŠA˜#}–àßÚK(^ôRxN¶%òVã#ߢviŽØ˜3i#ÛáT[ŪÝ(Tp>eósw?ÇûcÝäÕ´@3G¿5no¸aí` ð™ÉQ¹s‘K„®P¨ªàmilˆn&¦ŸÁ0”k°öLöÿÃkd„¯ö}Å :¦?Èšoà„¤æð‚cÒu¹}Ü=9Lô¿JVS·œ 3.„,º×¶Ì[ iwˆýÙ¬l=™kº©… %Ld7ßÓ…èëÊæÈ pš‡3TûÁ]q‘}ñ`°ÔŒ‘ªb~`Óá*=„!¨¾jîÆ×¹¼ÿ±lkQ¨!¿1þkw[t,%MÌ«}çXòÔ ¥«}âCË)È bÿðàDÙǰÙé7Qÿ°§õ;h(Y-ªnÙºbú,_»æ£’ôêÊ‚höH×U–Fú¿Éûyé}znë2ñï ¨I2¦Öæ{™õX®P;k­pí0ÜG7Tó×^ɯk×¶ŽÌ9­Gžh{Ýt½³J¥â;÷½«¡6Éþ2Ó8ýþ¾'s°Œüÿ:ž‡×HBoõØw¯ãsÈ¡fy«;¤øä‘¿»½Â—* ’த†Áй¥°åLµ>Zܪ¯›Ž´ÓßòÝÑ…8alXhÀ©1*jêU&ùe[2Ë S¢¿ƒk.¬‡Ý{(ü×2/{¸¾ý^èº268­èSƒÊ_ÃÏÛZ\wZ{pÍ€–?†!žèû4³«×7w;xñ ÕÜÒ:‚ .ÂóHçõ„Å¡@¿³¢ûTÞíu51É™=^]ž}Å9úªÕ fPÉOòÑŒ5&Û6ðŸ]Œ^<&¥Áü”ð’ÿè¸ÍŸ¯‹GœÑ~–®„'Êïˤ¼D0õÞ›cšJ+i3ò’p¼ÓGSŸÿç.žÛŸwæ_öß ÈŸ !Ly¿ æ|"u˜½Øo™$çò,_¸Mµ+«2êE]…ƒ`~Èn|ôn¤–fv#Zuݺ2ÛŒHwiŒqm’Áºjb“}O·%¿s¬>|%B§_Ç•ÿƒ7þSÆÂbŸp$iuävøËO¾ì1WÏ(G0¶£$@ýIs>.S®Ÿœâ„ô ]ßÜÄÁ†MÞ;üç`1uŸŽJMŠ%¡•éß*+j}œ|­¼#)^A(…îÝrz³$ä˜TŽ(’öŸàx „z¦þ}_ËøQfòì‘ÞF,Ñ+YW-£|Q¼Ë“’ŸDµ!t ð*âéÝcö]Æû"’„Ô¼Ø7'þC ©dh>jö:ß‹”&j@¦*?vƒºŒ½ pÙ~Ð4–}ܳ¨}„«¨N=37Æ›“ìßÝèñ2f`Ó,éK |`:wPÁN ²[Uª¼f§£È-ÂXuŠ<žPà ¶m‘W²¹ïª…Á’Ð,òÓÊX¹¼Åe†Nð { ðþ-S‘k5M€(¼}ÓC“³ŠðëÚØß_ŒÂ„«Ç” ¿¨lƒn·ø÷,Õ|L,ÿ,Nâb_Î͂󹾻Fà9ápü¨/÷àë¿ôçÅü¹kâÆ³ÆÛ†îb}÷\E*s_Ö Ü`Lj"Vé)#§¦åâ²ð"O‘Èv¼­¯ÿR6í®ø# Ä"E˜ãÀÜp½|5Ýu»Ïà§Sû±¬±·PÜV„w¤ÝÏõ~:ŸžHob DSÄ| ë ‹ –ëG• džãÃÓWh„vœ¿7îMü&a¬4\0d}¶,‹¨‡S@ö¦Š¡b9%?)]Òx}¢ò§n8Z¹2L‘ùÇ*$ó»Ná¢ýS ŠûŽ ð<³¹éç?„ 5CD"Õåh0.(8¸)ßÏpûÌj“Æj…2‰èÞÃÊðœvV^×DQ1é2›…‘HJ-Ü&Ô¶€Õ!rð²PR®Ìú6k ƒ ŽéKÎßÏ4sÑì¿t¼=t"gbÃf+€ä3l}QX„d-p>F>`XZ³x·\‡œƒûû²ïur¨çã—öQâ"U Ž%ç[çýyI|WKñðôð]d¥`gÚ˜8\w˜v|À·ib‡F{àKnàȬ<¡]§´%òÎóPyyE^<¤•³ö Š®kžgŒ ŽO`"l°ÂP²ïG¿‚½ d xî±ËNëÀ.é¼5öö¢^…«qülcœúRìcØîIÛóÚv‡žuÏì%rj™B)¶ºöÐ(RWTÊí‹uÀö ¾e¹"³¡?¤xg¬iRœÎÌñ[‘ËéàÓ÷’âëGøí½´Bb*«K>Œ ›ùÕy5)¾:QRÉt±ÕÔ63¹kÒÓ¦`ç?z–2¤êÿp³Â“w$tU*ëÄ1ÅgÞ0$rôÔ v$yW¯è±\±­Þ?µ¢¿¯}kýó÷¥HD—ëA+…Q v>”2&í8T»C×ëŽÐoBùÇ;%”Ð!Põµ¡DTt¿ÙŸ ä|àý5*Ä'#ªY»Fž•„7¦ÈìØ/4Œ„ A-¼€A¿ô”â¹€@¡ñf‘ç|çvüŸËglq‹2Ú#û<¬Gã“óÈgô¨4:øÖ$İ–¼Ÿãç…Ïêw^ô`†ºTeÆÝå][rIF3BîÕóGîâ·Ú nÎ4ø5*œ¤ðxüÕ×Ó2jÔòÃüÑd„²¸f}Ì ³b©*É•ùä}ºI@D÷}‡„Ù Wžä¼Umð$ ïXÆP-½ôï;^ é^ÔBM1•FÙL²)øËëÆŒ4:®ÅWgõúCÍ!YRN®X-)ŒýŠ­Éf“•äµ.“+ÄŸ!iñH±½™×)dÓöÚl¿ù~“àÆà ·hÿí'%q¯]óc‡ žÎ J^esáË:œWè 'Ð8>¸fÌ­"=Ø?¡ÑÜ 2¸>)¹56 !ƒÂÉ‹"räÊ!' ´!j$¹;!&Ðe¢ÓÙ“¤@‘R<ûX¢¦Øbö K¡¶+ },¥Ôç>SV°‡z—jé}° Wv€V0ä,UÔŠn ƒOŽ$öbí©´EC’ˆ¾cK d›ïx8Qb¤ð 2I›Q„ÜAÅe$%B€Pö^qÌ3#z†ÍÜbüõ‹Ø÷ì'ûó¦ÛF4‹eÀüvÂtˆr„>,ôRÎ}úó’xˆŸ gw¤gvš•Y‚Þ”3Vf!KöH W§»þ„””oÔ#ÐøçÌ“oJi–xÉ¢ª–eµŠ0±²unBJ¾B§,ßÈ€£ä†å1£Ë¸îûûÇ$•3@¯ûpÇ )W~«$¸ % ü¡š ÄbvÓ4!¶Ï`ñÇ€R—G¼>€Õ*©"-ä@æ÷E0üwf(‡§ý蚎`õWž³P(]«¨ìl¬€èzed3þ^`n§:¬ª5á=lÊc÷ÃvllŒkzׯ(XéÓ?­S«¡Í ±Ð÷øm wHrù}õ¨õá—#öHTéteY†Ù„ÊL·Ê§ÖJ¿„—uí:p]zA¯K}ŒPuÛ;è¿]öøû‹逨cmÅÂûiMzg|/ð%@¸Y3¬ÖLW õžt'ÁTÍ £9Òt…?}¯Ö@ÝYœz® œ‰õ\¨ð•†0ÓoµOjaf)’µ¡š”Xûæe%'F]ôØÀ ÒvᎢy t½wž*F|Ò;Ùß5Îf¢dp±2eIHe Œ|¾éPß!ÓÜýÝañ4­Ì‡å8~ž¯R JH¢F$Ç"¢Ì÷Ÿ¡‘¢ÙV/‘‹Ç’¤u¯@ë:“xã§} ›”lãîÕ—yHÜœ5²ôÍrneè¶þ­ñ±ši,:mÒ¤' Ýi\©CÒ=ù9_Zá¤Y˜rÖ©e¿;#j—Bée…òùÜHÅ•ÍMgà¤zûñTا>¾}!Š\`S-a ÛÏ¥÷Çu£äþ‰i“X0#B5™ø©}!¿'‚v¾i:."­Ò— ¬lyæ¬BɧxÎ;ä‰$IE[®ÙK œdšÃþþµqQñCj€Ù°$ü|F Œ¼5ÿHÞÓ *Ò˜vThr)…™¯‰Òwýðfãd1t®•Éê\\õBÔ*Ð%mF„.P”„ñbKϤP¬¹¼Ö…‰@£^Ë! WD6q—Mk=¶ÒìêôN†ÅcGxÎ-@<^³Vh3]ößò_‡R$â£Ëu9O»ÚIGE¤‡lÄ£3æ?Ì“ÂáGçfc1”¨C š{$cz\Y<ÉÐp8Ùó7 dÇS¯ñ‚ʉõ¼faºÐÇt_íÜ‹TsD£4½ù#GN§„ÐË£ŽV²ÁSp:è«áÒBÕ¿|“õîµ{L¬ìL=î„"mÜŒ_ýêõ'Yw¸ùÜÊçr¶,Td­FöKÅ®p 5ÓÓTÏwÔ“úm ¬ *–ãÆd”[›©$w¡ž™Κ«WKŽÃ¸î1+fú£:W©u’t=ñ Î/×îâŠôT‰H̱PÜ -xád>µi¯wà”†McŒh8X#d’ÛR}-tüü/Ý,-Z^öE©…Î|rn%šß´™9¸þØ‹z—*úutX¤Þ-«¿6/ *ùƒ/¢Ý ˜H©€šoAêÑÈ„æšX÷®òma·¾T¸íŒ¯Ó‰Ax)ôÆž¢¢F¤¤â%z%ÕaQ`ò<À‡‰MÑUx’üxáÛ—[€³°Ær“Ån¶ ¯Z•2eùÔòN?¨t¹ ±™Ò’¯ B[IëuD  >Ú¾"›avÊ¿ÖDŽ«ÑÉ+Ü•ɃV𨇄3nGÅ1wÒ’Ž<ó3á«ù¸†¼ÎëÉ‘7ÒîPòu²•Ò}•µ¿b†JŒßk­¤ÿwùD³¹çþZAe 5Íÿš¤§Ÿ¦ ÝQd7‚Æ_1W¬fȲŸKc}¨™ÃÛÄY•YÃûR>,„h~ÉØžPîlád¾oÏ© üÍ—d&™I󮦶ñkª\S0W©vøPabd\…¨ÑÔaëEUTº×Ë~^LÑÑ›-JŒ§XÛHÆiZ£*[™Œ e ýšÄ¨¿¬f{@ð9ðä9_„ß³qwåû~×y³I~†!¡\Á=·e’·ìÒÿõ¸Ý‘lŸ©4óóÙ%:å´%^ŽóŒ1PGWÅÑIA þ4¬µʳ±Š\:ØŸÐq£vžd›zõˆ7¼“8”Óêqhµç‰ªC¶¶†»<†hÓ½\Ë¡|ÝwˆÔ[!Ja,úJ6ÖÁ.Ì”¼ØÏ4Ú‰g•Ãz(g¶í3è.›Þ+´Ó7êØ®^Œy!z®M–uP°!:öLœ©b+¨w±ð±7÷¬†}WÕ\£ñl…×fúšR™u2ïG· à+ɽ({ìã̧ ©Ý¶ë`*ì5šHYƒ¶/vð/¾‡,êÆn ¤TX´ó]õ7óîÍNtŸ®n˜]–vh{Ý´|óÀõ½'b«KdÑh9î…X«-¾ÈŒÐà!EOël¢(Œ³-_ߎÔ)ÍýŠÆ ç£È‘¼õ¨¿ù £B.yAGA¹âë&D„R_M·)Ýóê£ä¡J1¸Ä¬?øâÄ<š°—JsU0ó›­®ìt_g=;Ào¡ùêFÊíþbŒ†s­÷Ã`¯ÎMòÿoÈq&@ä[†ŒEVl$öQóüúMßJ­ÃLžÏ\þyÆË@_}£äÖì„íôŸÊ}F ÚÎusCv Ö[W2¼Í9Š*/úê‘xß‚@ê~JMü­5Þ"wû»LK:¦Ð þölC±w1Ì ­Ì§‚4W•Åò$­Îæ$«µ¥@ ¢-fjšk xš©¿ª÷ÊFií#Œ’ÚŸÂeeŸ0ƒ)«Œ$ò„ã`G"!h€Ðïv”P[ÊÑ H`ýÀm²MOÕņxu9;¹ ŠA„¨u±,±˜tuÌlÀÆynW-„'ÈS„þé)Ñ¥Ä aØþ¶~¶¯ËÆ¿ÿ¤6 ‘) §ÞÛ/e±î8Xno2Ã@Hƒé…Bms¾ãýŠ•E§€9tu4!rEÜ)\Àí¬L¯Ý>"{ýŸÓïÙÙ‘¸#(òÍ‘<‚HTÀØÔaœT†ƒ¼‚¢4xÎH‰…áÌÀ Ñ5œêÜš"rï1ÇÇ,CÝö\¨S ΠÎèþˆ?Ó†)v30¸p#_&u@ÀæF9Ó” £N‘oŠÍlÛ‰¨}Œ3Vô=•ù_·™º£öçxhÕ°2ß÷.@— ]”“gC ¸ëîÕÙ\#Éï¤Èhkø‚,¥õË´5ê8A­8& íÂáüÿЕ1"¶ñ¸K‹(©í¤Ìb±Ò¥¦ü üÎ dœõhYT> ‹ïšt€æâ˜Î yÒÐEäa‡¯)¸[Ô,¶ÓÕ œ«H2Ryp”|uªá'µ``^×6û)ÄŸžõ/˜_ÙÙÑ\󿔨 w’k™pÙd§‹ó -·Â¶ÚÃõà^®?Q=ÉáVÔ¹]Ây#÷ë‰uò¢ é–ƒ„Š(@³1Ùí\GPV•Ìß7…¤#ü³êõ‚Zå@Jn’è‚ßYR·NŸÑ£êíÄá†ððl¹­’„®>©hP׵A{ {JW ¥øãYp(ž/q†ŒZtwòÄ~<6ßC-‡âtÆqrèU7Œîè“…´-œþ¿ýB„â5$`,Â0“ñS~EiK@½ö9¬’1¢l홚“²î‚u xŽ“ÅmiÃÖ)-N!Nž¦VÌÁ›y0lH^–\ÿ…ÁC5 °´z‘±ž2ºøŽh}÷e!(¤YÿÜàí)ì¹|¹t@÷ž´ö<Î0ýãî[ÊߢÁ“~Ò+ 2 lŸæãéH–Ù-0+™r…ŸêåIޝ¤×:Öl-¢~˜€„Ä“‘("Àåên° ÇO¹“ÿâú¨‰)Jà7˜„ÚÜüa&üìYÌþ€vÅ4šk,Í»tñ*ˆ]‘ØÌUú B°ÀV½*=ÿ ™Ñ@•à2&-v<=QÄ4‡£Ÿ/…uٵǠ/ïO*ßß’~<2|p‚S—õ€“CWš_ßpÉ䱉j¢L¼ßþ>® ºì°È³,ð¡šÓøçœ‚¡n3¨€¦Ô˜‘à¦7êTÕ?¹”&ߊÊ*n¶vÉ‚ ¸OªÁUlj¥üyäµBYŸ-5’]BÝ GœæNÇ©ŠåWúl– ÿ*’™âZjÍI«3=EÄ-µ´Ô4ªÀ¬Ów›ãƒÈå‚,âŒ-¡$‰ÖEQ2ÑI;ÿŨ1º”/QõÞ.Ì ñ./ý÷CÛ olÄP“R‡.lcà$…‹#Á04y Œ˜çã{oõ6ñ!Cû†Ð uͳ7ð²ÿ· Žnk>D袗ݛØhÝÇÙÊõ ËX·±úL—ÏѶ†;·j;¾ï³õ`¯L%Ÿ‚ôRVtN`Ö´èÔU1 }aer>ù‚à÷`Äõ5ÏÂÞ»ßīΠ,ßþh_ýÍžn&¥7ŒŽZñwRDÇÈú xyîϩ󟔺Zì̶•ؼH/Æ*Ë­(qITY<8€në÷ûš'¾A³ÊÚçMLw<#Ôõg® ¦¼X¾Æ³þf v¨;CPlÇ®Ÿ6- T!w¡S›6ðYwüª rk@·î–‰Ý•VÈÓðT„潫} k®õ3•ŽÁÅ…÷bÍ=°QÌzvp³ŠH5_ót¯6Š6C•Z´ã¤ö“Ò¾ŒÑèfhUˆ¡Tì79ü2éèi’{ì5¨«_Tž¡©";v?Zh€¸ˆcMè †iÏ<-ÐjÚžávCW"FªÛÈ´Œu=¯d ÷æÇ”†íxI!›^fùþI¸Wi‚mÓk’«tª”O:ì™r/²p>ßÂJÁu‡;n4©Xrù Ú‡cÁæ©·üu3pdåì´'¶¬`‡Y& ¿&3y•ý´ÂžbD–ÜÙûS´¶HE¾4k\Ö^†¯ç@ùt 1ÐR+¸ö»©] )Jq+ø}ž¼ìÁÞDÏ¡™À´HE©Éò„Jº³fH„ých+¡ÓäNY»c»/á[wì’µwP[Ì»Œ–PµätqÔ†sÙayc¡¼CúÃÕì³ê‰™m‡ÆjÕÍT2·>À!Ÿš‘!ÜTÈ)ÜT]¼’”èí‚æg 91^½Ÿ´ð[}z— ¸ SbéeË>™¶ë$Ì,Õ¡üøÖU¥@1-ûA>«ç6¼˜Óâ‘yè‘jØ6úüÊÜ)€´œ£¯Ù—ž­_´=¨; }*þKÈËdtI÷‘é³ÔYÛtçˆ&ÓòÒÝÖ¡Sj“œµÝPn?‹|ò”Šîþ¼¥ €=Åñ¶úgÕuíDy¹ñpŸPÎO³“h¹YÀ¬ÿ¹¶<ÀÔ§ÛåÐëTµÑ-`¸n!wÈä´øÓ£~¿›Ê:É“œ¥7|¼ÖŽÆ ƒø%mn³¿÷{½Å–ÀÙ€Ætš@1èݵ³ê¢2AÖ¡%³]Aèû ƒ±²OpMžÎgüÝ)a…-M“¾@Ú|X-}Zä;´,Ö#Åžî|é ° YæE×Kª<¡ßj%Ml†x ,ó(•#êþð{øI1KßdÚ’­“óÑ “„n÷½á`§låéàQ)x1ÄÀÙywZòÙ¡yôgô7hmSvYåb}Ô6õio?›²B³ÿR*Z®æKPáËÚ(«“p‘áâÝ(Y§—ãlŠO^ž9T["?r­ ¹)퇛aVUƒ ˜À±†Ä21½)ÍRñYp=ÿ~ Ž:àárnœ&Ò‚ /|Ü„éI<Ò)ü < š–L8(Ó]€<û—ˆ´ Ó6pPp„$³u4µ2aÔzC† S± ¯áf"ïD¾ÇÇN‡Sœ®LCW½§¤AÕþ'hg¸rlæG$ÞWfëÏ¥"¥0=g[¦ Šÿ”¨ ¸1½¿X hñF”I>6¾»1.š·¬ã>Aky²Ÿ½€xE7O?>¦«3 œ®^?"„X.„Æ«™äü À6¼¼‡m|[“~q¿¬Ýz±ÂÜò•©_·v§Ž*÷õC2,íl<µ×[Ô_øî:ÒÄ·‰DßÕmàÒ.t{«¹s«V½êb+{‰¯ °p9Q‚á)èµ Ò>èØfçݼwµa"4¸îYŽ[1¹EÌ)zö¾)½/óyÙNÓ4ð"m.0‰â©@?’ƒ™Xd:YÛ7™Ÿî“*Ö‘À5Ö›YUÓbæ¬ÇÅ_çî2zb Íâé41&Ó‰÷þIÔŽ™ÎΗâ}â‘úÚKÞ<(ä"ùt;ï“íĪFo‚C„³;ž"Oð&f­q+äG­Ãù{©yÆØ_…ëÔ·`Ì» ¸®¡} ‹msZäxy*[zˆz'ˆYÇ9a )wÀÿýðÁ âç/>¨¾ß€•y׳5Ï ½y9Pº«?¯|(§há z[‹(^6ÓÔH€âæ>­ÎìçºMq¤§« t>àýƒÅr:É_?ÏQP_Ò…8¤!æ!A†Ï ÇWídrãgjqX‡ó€Q$Ü­ëí’¤¿Ú“‚ÖJØ8°²#d;âA)s•†D)Ê™îZz4JX€òU ùâi(Ê¢²ù²¼{Q]Ó®H~/ß´õû}?ÇoeÊ5ç o\ÄÛË}pÉ8AQ½N€¨A-L`Š‚z_d}-g3f9lêÞ6åBxä‚T2kÞ4š·À¦èôà98)–rfDfëEp‘î”}±Ž„u¤ ëtŽÅ›w‰…ûVzœE;¼Y5È?Ý“–°{Ø7QœÞ÷2Éê.ógi˜µË5rúgú‚SClœÒûꘫÜSPlxR"E¶² ·!™è˜õ5íÞ‹hcþÞמ$<£c¦“P+9›ÙFÞ±¾ŽDÇ$¹¨<÷} "ð%‡¤í5î‹èÇüz¸“!ãu ÚM;’›ëÜc„fnŠÕ8mÁ^j©qÓdƒ½‰ÌHC’…Ä<âxûL¾'A›*µ(Í+ÑZÈŠCÄ\‚¹ìô3È!EÎ?(Ö{bÓCô.òì•->¤8Òª[#OQ׼ԶعÿCñó’ó®KËd8æ ¯h$©¥ŸÃÒrTÍñ·–EA÷À|)çÿe^‹czÔlÁ”Øo9Ïß<´ua¤ƒ•ÕTèN>ˆÂKýKüf‰O:Y—j¾Û“,ÊÝÕÜ}›Ü¥—ÉÊ+ŒÖ¼Hϵ•Gň¡ªôÇîÖy³‹fÍ(mM7Ó(9$%­ì+­œ'@«‰M’²]Ë"ض°ß‘Ñ_eá!Ä\Á‚~1~rw ‰ôê”V„­g@l¸êÍÄfíFyBå¯/Ä›Ÿýê!3—o?Y¸ÕêfŒ•jÝíö`k–=Õ¹½ÿ î‹‘.)žQ‹ëŒâ]Ýd»Jð¬y@eÉ’¹º 7× Ö@á4½Îm“,ÊMþ76=áqZB`ã­øÒû}ñ¼…—ä·NPƶç*÷a›W°áý„´åM9™1€ kÌ„ú·­Ækr™m4mÕÇéõ[N;…{lÂìÃm´Ç‚e ªr&Ø75Ž@Ÿ>ïg~Þ µëF3òß‘|3²Q·7ÿ}ïf¸lU–éóÝôÈ×Åb¤ÒXñ„ îÁš?;T¼ÇÅ죃evÞ쯇žg9ÍñV(2í¯šŠóª÷PʪD§»ùÞ5©íÙGH¹aaÍн pYÏ[Ù·ì‘Ø[Û¶½Åk¼ÏÞ½¦Ý0Ë‘3¡\‚ý“bòþ/cHéFùHã!4øû˜Þ#ÇJã̹MhŒæ+”2Û—üc %ðNUùª‹¾TÁIp4@¦hÄ+/,›?¦¸ØË†P-+^ä ¦7U¦lëËw)½ãˆ×=æ)ò…óÑiWgü[›a}{$âÆV9ú>uåk³R%³JÇÆÆ¥}!y®õÇñ gðMç*§ ›àç¶ÌóZVä¿#I¡¶×¾À…ðg˜D-è‹ñ " ÂáÖ¬û* †Y[#¹åq¯—WÅ\´ç¶SG¨0o·A‰‘üäyèmJJw_Òf¼™Ë Ôm$ôéÇÚÓàè]²Ò0øÑÏšÄfj'|íp›WÀœmˆ%IP¼‡F'3·Þy ßpƾš²þw0ÓYµ€aµ'ÃuÖ¶ÑÌÝ“q£ÌeæÉx1u•xΊ7¨yDF8äÆ÷äSŽ/6Y9j;)sBfÊp.ª©~±ZÝvƒ5¬M¤F~ö'ü©÷¤ ÖA·—@Æ\ Üc:5}èùsó êUýNŸ+#ïf°8€µ‹¤ˆ <#2æ9”è¼ñ10°g…üÝÍZ=¯eD™L…šVA¯[=Çù`~³Y°ƒÝ]Ûñˆ%¢ÇS¯²s()õ ¢9Êo|-ûôÐ.ìç„t`1©Ì…º}®Òµ÷lñAç,h7”Þ;ú®>SÞÀj±j|ÊÒÞ`î°Y”™øtHM¹ñ æjÃwaë˜J¿¬•qâ*ÒL¼ì†cé5Àx¸ëoøçÎKBîÞø¥ó6ó™$ J"–¼»×#y—£÷ÝàбÚpð~®PDÛ "ì=ÞÆ!OäóäÀˆm¸ÜÂVÕ£7OÆ‘úÍ”ÿêÌTm¬"ùÓzJË®z¿üõI™]³Ýë`5ðbÙE´xk:ï®àð4ôuÉÒdmÔFÆü.ÁnÔáÈ}ˆ.uc©s VF Æt‚wü}÷^ m$I•÷ÿc˾åù•vXк¬\š4jñ»cý |U^½¹½èLµï½µò09qÂÇÃ'.1È uɾ~~Àƒ$]"dçØ¬äŽŽK•Õ8HøìOâ³Aƒµá™‡&묛åGxU? Ú?ª3Îa*| †QñsÕÓÖÇAœhòÑ7+ÆØ9.p|+;&vsC¼=‚ÿÃ"+¨œFªi)H¸£¸‡‹äR~ˆ1ƒ°ÄA{&™Ó°4¦âÏÆÏŠž)²Jºîz >È‚îàÀ¼R°£À;IJ»÷O1„'àõ€€®þz‚ÚŸMIYɇ[ ^º êú%.YI*ñ8¬<›ÞÒU—ùLò`4·¼ÜÔ±mFƒ…žÃYHÔE&Ü´™¶8Òä¢ç~Ÿ¸)ÝÀ‚ʈóÆ8‹!YqüäâäðøD?¾=ªÝçI5‚¹ÃÙ¤tø :§Ò{±n—6Zø ™“?mã… €Ù=þU=3/ÐÂQÀ(‡‹+}ƒ,T€Ô ø²ø£äêå@ùSÄ“|»XA"SØîç0_MŽhçq”+ªw4¶ì¥¥W`9Ñxüñ⟡ÊX\n°v˜î¼CÖ*Aç}˜[aÿ[ˆ´yîlöµ—Óbíã!&-—ž¸t š‰ýQ AI£ú6/|ÄÃo Z™ÀKWC(ÃÞð–kéfPç\fð{ž‡aGCRWd=U‡­B¨É;±ßFv#¹ÜKtç²×}´YØŽæ–À&[†æBÏûÖÏõv–]TϨWI@…VT£¿„_°)~åîìNûˆÜó§Jf¾}…~ÞõkÓr”VlnÚÊ…Tt¼f{ÿ&ømÎl-Où›zzнqƒÙn`]öÿî¦È‚¹¶¦¨ÇŽKŸ|}pg–hï¶ÞÓ<†Œjê±Vh ÓŽXÊÞ€°ÿHöÉÂUÿý†(¼B46—Õá×Gì œKäb|›b„ô;9·ý=%úVcØÊ†ìG™WK ‰WiQzÍ23›Õ B¹49#YÉ„†"Ø|ÐÔ„]»Ç.ɬ*º©®ÌDª(x[~ý/ç _‰=ô"M±0¬23"”¹>;Þ¦KœZzÑø îÉÒf™˜Æ—:ž0šãÈôàè™(¯n!ƽí‰}[»÷ŒIé§N¡„( î\B%™ò–rO¶¡êÜEsÍU:cö¡ù—Y­KíP\Kó÷||‘R&yx[–µ«vKÓËyD•Ù‘!Á§ÎWï[çç)ëï–×Í9ýuß²e…[Q¿é¿SŠŽ½§i–ö¤qX4!©_¬áÎÄ*¸èù‘nô2›Í š]˜®çž7[+ÎÍØ7F{×âS:L—´èÔQ¾×«Xùg¼Åùi<† ÆÃÚ|'CÀòPqÏçØtï ¬!èëì(ìI^DÊÍ£e$ïzŸÉ`+ÛO· sÒqÛ ŠY‚“cÂï}·ÒH"CñçÆÆ¨×ÁnÉÆáÎ ¯›þ fé¶¥lèi¬”C;^õ,S1cÉY0Ÿ÷ÄÓ”¿£äêYçqµ+ß{‹ÚËþRˊ҂܃mHäÓ¶D`«ðRMЇsaÝuÕÅ!Å^d‡ÏAÕ¸ïÚrcÅ̶ ¿:Œú+èA£Ú…b‘V½_@ÖVé iTrèåY|»+f‹y—‹ Ìò¿#ƺ3©OLö0Õ袅8N4Ø>çgzº6ƒá:&׸xêž±Þ’Y<@a¿œø¯Z”,ìÃ!„Ú¿¸‹Ð¹ëo¦±ä e©Å&ذ‡v’¶—KUG\á:ÓH`º½­Wn+P‹2ƒä0‡³ËÍË›ÞZAò’kÅ/ÁÝñG‘eó"6;gháÍã-€!JPwà3½iÏÊ®)[EèyAj:îV^ùø)~ÿ¸í’úϳ&ŽžûÌîü²óûO…ùj<ã¨ÌM3¼í6G¢mV_ö…éåu)›.‘X"û„¼§–ô*OmÕ(ÓŒ2R—Õõ–GÔðÀÛž3WCy酢ͤ-5LûÚQ#F&'PîÿU°Ooá®°!ùUÑ|‡3>§ `6öHw?ú邤­‘ ×ãíÿb2Îã+WÐt™y'Ô,…ÊæîÀï%hò²…ž€—¶;qù–FO::{óÕË1æáS›k ¯m¹îÂ:wdÿ?+ý^Sä\çŽln£kµ;9E†]sZ ÐèÈ×á¤:$ª-Ð2ó¬ì ä-з¢HñÚ¸?2 1Ía¼âêÊD<vŒºa7Âñ¼¦ÇÄ _]ö´õ¤|ékÇóWUR‰Fí·½Ü¢¡Y™øIæÃoåY_'±R‹Æìßîí¤¢hÀB+fÂ}øó¶öÛdHM³Üõ¡tr¦J¡|Ò¨]ÄM®¡ušX:(éVõÂ0ÌÇÿΔo˜¦­=j>Âi\8!©ÒÕ@VŽò®ƒÞÛìçêÚvUjÁVÏÊ€ÿ,Ì.µ¦ÙÒ~uz¢w¿çYvÙ8¸q±$µÛÐ&*™‰på²á™ ¼S…J×cUÎ+¢igƒÈÔâëÚ˘ä·w£¤„FíAVÏv¨“ˆÖ*\[¥@š*8¿ÁFW3cv¥“&·Òm#7¢†oÐk`×\׌¬§8Ctg2G—ÁÕ1J(8Â_¤Ña±ùdÕ¿žò…É£öjô­î¿¼°‚äËEsD”g(O½ 2»Ø¦ÚKåv g3ªßðÔ*]‡fkêŸË1P59“ࣗYÉ’U碶 ñ4'mà­@R±Lt×4Ñ×Ä-T ³•õ®„Bì½Ù‰¢âYLÎ }Á%¿?ù¹'ú‚® ­ú*A´*cíÛý£èU$Ä[>â‡WÔª øßJÿ*ÊçÃídÜ®8§ÇŠ‹µžžŸ„ª„¯Iö¶B coËü`m¶Y²¸öooáÕ³Ÿ:«ÊöO¨„Ñ)ÑnŽè†¾g”Tºègù!Ÿ4÷…õSòÏ£.‘Kdû8ÀÒm:‰I(Šû÷ð^qŸ¶$ð·Wêàÿ¯áðZý3{ÆÍÁV™P~tEêf©°)M¥Ûâƒ}lr/Æh„dŸ˜%‹tCÑ^D;É/ñ.5×úAlåƒSÁúêJggx×ùòÌÑO§×©“þYUž¬n4vg°Õm*gŒ.ä°çà@Ž!a‡^HóYÓV1ÑVcû~ucÌw¸¸Ãî;šäè÷-º®Y™-V¬f\n Ž»ŽÝ³gu\¯YÚÇýŸÇ°t![çíHHþ`ÆxMƒZ6úN)^ü]B^ê‚Ó9–ªoi^ÃrzZ}ªÆ•#“ð˜8›wÝ£è%ÓAA»¦`e–ª‚{Õl~5ñw­uþ Ò¿±Y-mÏ|ôç¶(Z=G]$÷³^¥Yë>A}ÔÞpŒ^ÔJt>}Fˆ'i ç˜õ—èÇ»ëÖï,û40Ñ• §pÇ‘E9.„:Ìb…²Û|]ú¨O’ûÜåÒ!'.\•"¬Ïß#úi&†Ò¥Þsi™´‹B{û„q éXÄ\à÷£Ô•Ô3lYƒAJUpKîXòsný?¦‚f9»åŸf×+ËÙ2›‘×$Twjq1zp•¥ËøNšˆ_’|0`XÔøNŸ1÷8,,Þ Âl2ž¶E&®³ C ÏWš;gñÔ‚÷ÙÅ; qÖÂ;„b= ?Û<:ôf}.á÷ú»ò5L¥ÒÔÛIè¡^:iWÏ!£$>ÌA0:ݼ…^Æáòm~pSQM#Þj}? /©ÏiÒ)£8™PgÏï6Jbw“äx…g>çÛ°¸£I#(½@'ÈR¥Å$Lê"¥‘"r:ç‹h›dc—¹œÛîïfÃ¥‰ŠTÅ‹­Å Þë©ü\Ej2 ±ü'N ÷‘¹ûر§>8˜F÷¦áÙl‘žèìÉ«\ã?‡ÀHó&þ€ÔÊ!}—y¡ÌJk‘ªŽÃ½ ³*í’hÒäb¤‚äÌÁÚ6´¶ÛxCÒ þU̦¶(¶;3º n¸¢Ã!ÙÍ¿àݯ§´fZé‚ÕÙ·­k™. ›Sç-[$ƒwú×Jÿ|Æj4¶<ÖM/éŠq‹`1Ô†c\GUzAO,j_ ЉÓz| : }3Ú$'¡ *œ!ðýÈœˆhz@ÇjbP–@Hl6Üù¬'ùQ1¤3kϓғöè€×uþ‰Oô ¾“Ê„iZä¸ûFÈnº¾pÙøVy›Í¸s÷ö¨؛UüQ©g¬þÚã†[Ý•w.:äRÁÆó½"^ºôýSôûÆ‚hê8!`ÅA›8Ç`ø9냔¸É²‘µÃ­ê¨:Ôe—e²íVqé”# ZáÈN­|Å 6þªºÐ†kCO¨ï§ðA‘*—I4Dûc¼ÝB4'äàlIäg¬wâ}Îhç–Œ êóºãÌßOÓ·E-ß+_Û„¸¦ªÂã(¶‰ºûzûFËÓ(ðl¡?%žŸþ‡*¦Yµ>ö=MvÒÖã@û©MYì#Žakúr7SË1`æÐµôA}ªü†˜Ò‹ò_1¼%ãÙ…MY¢SP·ª¦kF“Îÿò‹PìÅ´uG7Ÿ·_øÍªÒë»=|¯/A¿±jÑ•µMxRå±*ÃvºR¤¡ˆŠsÓ52I) (𛦇PÔKwÚs"j³½²#^Ƈy#šÙú}R'Ó¾ú¡e©/~çöÔ×F0Bš šfÛÉ ña®?§ï™Jy~eåÚáïY¹¿¦ü¿8®7Î,kRJìaƒM!+ZÝŽã„ûªåÙË_5Ù"±r“¶å«”“"¨Çþbx§Ý2â_;Õæ(4XOPMsm쀊ïw¥è|–Ƚ‹G­œ¿±íXZRb㊳íðͺTZS!Lâp‘ìQ ÐM!ïÈÏâéÌZ@‹?èƒ.wóFîƒÊn„Èxæã°N–ô¸d ’²ÊXë®K”¥f•§2Ïy%/÷‘2åÕzßõ’ø£—³kÞŠ$åê€{ÄUœö¡ëЗP¿ØÓIKœ4¶o„ÉŸKBRš&Ìþ@ú]§Uþ¤û>Æ}ª¯ÂÂßG-L²ú¬TLÿ­F k–g@ÿ/|çá AxÎÌâÄ6šyÛUæ£Ü÷$¥S1üVîèVubÌUoàíX*=ë¢üßÖ8jUs<Ï\•»¡-do篒¹ ðÃE'K×D,æöÛžÜk« Oví ´¨ö ­,:Ümþ_|qŒkH`èÒ[ʃçc4ö¥§¦Šå9gÿÄöp°7xÀAd¸8@÷A¹~V]w«´·äð8EÚ+è@TÈX}%ÿšš×÷‡’W˜NBy“é Ì¡3pžS4Èéd7YþJk~ä¶ë¬^šaïàž>Õ§1Ÿ!¥<"ƒ*ÌARy½F '÷ÔÞ´w ,J(};iÕðM ÷šÈ©Û+Qo ‡ß„; 2QÎvf+‰Pu¯^2ëŽÆÒ0 g‚Èú‚‰îȱü0¦3󯱓ÞKY£^¡Yæ eͼdŒ_éé}£‹–ú³ž+š^®i¢!,ÔÒ] ‡à¤#s~³o¿f­fpŸn­,f¨,áJ_ À/ã*äI‘‘ßVûþصÞz?Ð÷—¡˜·Õ×vXÌ\¢âûÅú‘¶?ÜñÇôÉÀQ÷ôž®ˆßª¢’Ö~#…&lÉ%LÚ•úá$ü–ï“«õ­°:è7Çv”¾°bÖ~û¸ n0ëɨpö‚ôéø‘>RØËC`Ü ¥÷˜Msо"¼qæÅ À+¥êö–ÃDäÏÞÖtd@:T1t0y?ê®GK T^¶XÑ€Óx)È:ÅÆDafÿѨ¾û`h\‚ó#©N2æ@À‚„>>gf„ƒ½¹¬AÄÜÜv„™ÍxuîØ6ßìËÑTùo‘Qæ<î:i$8•—ô³[öÉg?«ô%Fä ¢²CM—rJñú²¶Á[}|À¾e³›=IE¢!ïº?¿DàGS_CˆN©®(°;¤â í*¶ô¥‰ä†Ó%ŒúœÑ–˜‚`7 •8š¯i"‚ѳl”|9âëz£. ?b ®ÿ Õ$@ô«¹‹Ì÷dº¦•.Z ĘÃá””í¤AÐÉÂe) ŠÍóÈ„™û’ñûü¦ "âZX~‹T&“öAéc¯t˜¥ýlƒ‘z¦1©¼·óÅÎD7öµTœ$áëÞ^dãÒG÷P/#v!·—9Y”rHì «MŽ&©§”g\é;<† WAœ²"ªKP L¿leQ)»Ñª œçxw$c&´Ç@áq¤X° $ÔóÉ2E$ÄC–žþ¼ÁåÒLçÞ±†‚¦¨h«ë£mÿô¤½½Ü°a8PïxZ+Å ¯$ùš!¥·Bg+/ÁN&•žø‚Í-0ž7•VsÏ€c£òêžÿ´pói|UÊ;·îKŒÆ ¡ê]3öšÂõ Úæi±’Η‹{Í•±’M¿LÚ@Ï@ª§áìѽÙE|ÉéÜ)±ogKuk–ÄL ’„ÕØSŽÍ9âÇ€?Ǿhex ôDâšü¿Y ñpl—a-˜ÍñìEß®¤k‡&€•…D±3ò «MäH*{¨p²À¥Ç•!!P¾«Ôx;ëvÝç“ÂGè i1„zwà4’ŒDÛÄc´Ù„ǰ,4¬ÜH"<9³Kþ…óÈ–wÔ>Ç£‹Ð싪'L¥G®â…²æ­jWh^XѸ·ƒ&‰N =ºFKÅòä»Ì;[KñŠ!3û”g¼FÚtôÄ"ÈøJD¨.g6¨å³pÍ`QÐvýúD|bê/V»¶<À€¯¶‚GM{t&–ÝÈ/íêâÔmùÃ;YdrqìÝb—¼Ôæ)i ÉpÛ®ŒÄ˜Ëî$M“ÇiQ8äÎ< 2V±êè¸>¥ à =~ííåö´Æ;T;GmøÄðÁþÆõÐDü@Çë%ÄW@ÅÙVáð—QyB;lÄåœeõ½ò÷É+æ2ùn’6N„RðF°PqR¾žKV¬ZЏ][1±<ߊæQ’œÖ$Ú&†Èû¢£^Uï eXÖеSRqhÍAæ`äECÖ°…mCWrÖÖ‹ß0–Š¡¯5“¸h¢X TgçÒÇòG~+1Ým §¥])ßV™±et+”=ÙG9´i=”BDk“'IsjFcI:†º¤Ý6\Ï®Éö qÇÓN·ørÔáX݃v–É´ÀÖ±'=OLWIаG°/Š6†Ëd2Åd:Šãþþu;Ä›´“Àú˜« ‰àHÅŠlQ–ŸtÖtìp C²Þÿ¬(®ºzWÈšÔ9n‡#~1óï»3Ä¥.îÞ.i=!é9ÖBo¸ôXŠ»KªöàâNL”ó,_ –~DYÔŠtg-©a—Bø¨^]ã þJ*ßf‘¾]Èϵ׎Âw€Ô&§§ÄÜE>íÊYø÷\Dϲ‘æÖ?¦4Ù¿ËF‘`È~.”i&p[Jáè@¾Éaäº~š:¬¬¸,vÍS ž¤¨›‚ÄA•jÏvõÅY+?B Þ«áÐÚ韕}«½‚öCO”i~ÅN„_\¶vGy¸µ¢Ø_#TÇ|1!Ü%oÇr¥8*ã°¬‹Xa4lÆ6äždMÂPç$•ÐU¾Ì ö̽ÓâšH%8ü¬Å-݉7i˜!´´ `â[„£o ™áëK›˜ŸŒ›shuÄëK¢P7Ç!‚@¨?rMþ· Ω@ÓÊ#±Êâ]öØŠÃvN0  ês®™.&ÊÑÁ–°Ç—Î>þ–½ŽUä·1} ÏŽ‹Ý>jÂÆ~53r°´D'gíT5©;DÉpͦßo¤^žËkÏBËž¤ü…¥ ©ÁÅú7…; «MyL•X•Ô·÷©–Ï„<ñw1!av´ 2Õßì” Æ‹~.¤_6v˜0–ãIgÇ]š.“qßPÌ0“×0ÑÕ³‰OÎÜ+Ížû-Ž´¹öhÜÛñ,Ýè»—f5 e¾ ’¼ *+:ÝÃùq'^à]þ"œ!êD v›¾Öƒí`†{àÏù=i ºfÂ:ö¡À6÷‡T¶ŒIЈa–6ýϲ™ê@¹‡Ç抌™·Sk­9'Ü©Nœ.JÖæÀä2vì›Ø Ú ÔÒ”3böƒñQŸ¹›>ÝŠ ÏO½A±éD›Ò0+)¥ÅR%Ñ<ÒŽ6nÖh³ÖGR%êê‘Ôm;ƒTM¨YNTCw*ã×Ä*ƈó*ÇO«hEÊŽÆÅëg’F6¯ç©ñ´Ì}1JââNW2#ÀƨûÙBœºªLÑU† Þ|.ƨýáM¯ÿ›€ h ¨¦e"Nm$.4æ'ƒžï´KèÈ]ÑW±C墪)«ŒÉ#‹2S/<²³˜9åùq§¯gÞÁw^ÿ8²E£Þ5ŒÔÍ«~²ß™rƉ°BE•$ýòØM†,¸rhIî {j¡Úöp³½*Gf%ô¶þ{ÉgÌ„¯Ý¯M¬j©]ñÑ´Øx‹Û-„äÅ{jí1Ãdÿ~ôÎQ^iPºÿ’ Ôpã-S8?jêŠ@û—›j¡JñÓùXQhœö3¦Bi¾Ì»EëÈ=¡:9a“æqá. JGõNA€TșڪðAºäÙçÃôO™ˆrf¶K]ú¿‡‡‰éEÀ¦Aåä£D ïºYº¢ÃŒ^wѾ÷>òWº W»B+(ј6z®ç£N-~rw…Uòe%,=Uêì!HFjQõM˜¯µv.ÇÖ(MWì>_ô¬3]fç}”Ksé†2+ŹqŒŽ×ž²¯ƒ4”ªw}̃hPÀÈFf 8C}«‘¦~-Þ‹µT#iãç­ÐµLålöÎçÎÏ™l)œí»oâ’¾"­›~QlÛ! aZÓÁ ƒ¼i(?•SC‘e[éߟðžÄР%jÛÌ÷¶ÉZ7ÀžnY¶¨´¹f…äôÞ‘{¸ÃSµv“Ép‰ê­†;€×?V7q+nú¤§ ݯy®SAµô ¡ÊºÚË:¦Ã¾áˆÀý֑ת >~‹Úäõ<®Êv +JƒSr•ëô y» &ŸÉFU xÉŒˆÏtw¥!ˆDbl¶¤j"âÃÚpÿëy½bë¥2KÉÇл™#>úŰAUçvküäε{•Õ\j{Oj¸=fU‹C É5A= >áÊVµÜŸJžºe7$cs 3:p5¥â#}âá ¶`%Öó@È=ÚKNS`ÀÖä¶ÌÅ치—–{ ¡¼i ŽØÙP !àqÜp°>“Ýê Þ‹”æÌÓÂÓø…Ï"¶wíIÓÂ+/`Ö+c‚+̤Ί3üw¯p•ÇQ­9C)Ƶý*Å‚sSsË/9º²/Ÿ6r“YŽeÇJzÆŠOñÒTғ˳‘ãTRÊycVpÕè:Çí§ï8gGžÄ‡’ä¡0,Î’w[æáÖ§N%WÆ;™½Ú˜t,Šhøyæ[Þ—DÎÁå‡,š—ßP>éÕz,ɾÌ䕾GM¸öúÂû• 1™³È¼…õWœÙZNèààõ²þõÉ8¥ú©Šë ‹¶ùK¡jh ö߀q€#èTr)63Qgí`ØwµòP8'–ÃFɇ‚3•vz8ÓA z˜V€ùéô&BÔcÕïÄ€×'ÙêO†«d|`QÂ0½dìÚ|"(Øõz9ž)‰˜Q’bó9ëEÒv° 9a²;r¶ÔÃn*µÏc$~²p;ð«×뇥©[«™D7M•“G"s×üW©m©—ùޖ߯AÍæõ %ŒÃñqgf ¤qŽôj|+”TQT„#~ έ“6€òZÎôÛÑM{oU ©ÁŒâh¾Ô¡å[Ž¥6-Œ@¤†:põGcMÞÏÇ \ã…®üµ~™Öö},PœâLd‰9‘dg(óó/*#3‹ÔËØ4¹‘®õVÅ,mjÏ-Ì?‡´†Š¡Aoß«£“«¥I«p;Çÿt"l¢æ­:µtKNØL$—KJd„Œ˜-åSÐ…˜‡’è ðæ,¹×(¯ôl³ñ*ÐÆi/y‚‚†8Š!ñÔ" ¶¾\JáPç±5U}Qéî :’ÌY±dL^ nùŽž”E?Þ[ É¥H­•j·rP¸‚mõÞExÌ}üYÕùx ØÂ…Ž£7••k}ê]x-YµXËç›<®zeÊŒl* tàžW—~Pûö²k$èuÔ4ÆNüz¦“⌫[Þkû—k¾­‚åLÄèUv~ÙöÄÏË]í9~ë’4.;qé+õÄÂã•b¢1ÙQÔ-0Š9ˆ)1×4fÛj$öO†»—†30ƒ"íÐ+f£®ðiôãM›sKv¢s+į _©P¨èŸü®åU<3t~Q^zî—Hi¿ä-E »[vëéd2v}èpQFI£š.ëˆ?÷ÍLJ¤(Ö ¤-é±¼T[m„/¿Wc½ì‚¼ìÒ.7îg륈¬·Gq÷jˆëȃÚ.âD/(ù°Gb¹øPÀ2…¿é×ÙIs¼‘þƒl`¼Šymn…Cúœ9Ã@ö¨ÿe©sH%¡®—›®›¥hÑu(Å—mü»¶¨Š4ê ŸoÔA`÷úRZ@ÛXFðA° Èõ‘<´NKÅ$Íg/x$…Wé0/¦U"áñxu v~‚2»+CSt»p“f‰„6£KœFB&u$´X±ÓãÁäÙv2µßÏ©YÖ«'üWYÅl ¥8'€Ó4ÀvŒláÂ"­)Lw£ð:ez¶?(>·«JåâôŽ˜c­¸ªÇÿØ·X²¤bO„Ì=tFø±×ŠCOY¥ü³¥$¶o$¯.ÕÎеÍÂ1£ììO)ª6x%sHG;÷[öŸŒW4ìK>˜Më@Ȫ¾cè‚E&"´«ÍK²xZ/ÚÌô7ᄘɠÇjNÅs¦pû`ûD`U3õ¶Êsþ–!KKîã+ÈP<-׿Å-Ú"1-0Ð+m$_*Óêá÷^Ö^ðt¯1}|P*Fì¸Awyr? ¶K ~:þç©£!ê!€üRÔ·?P¸H`#g/HMf‚Ì Ôs—fÏ2)¾ú¸ÆƒRcV±ÜB‹ÎøQy/L'ˆ²IEÐŽ‡wƒÄrÑ ±(?r¢ž)@ _û9?ý½(RwÏÍÓ37#~ÓOû(M6yü5“¶•Öò.¶èeª âéí±wÍÛO–¢ÇßÉeTwmˆ‘D‹µö–ADè¸rE o>H\[ˆJ-ZÖ¶"ÂLÎX‚@¶ux>M³cgزD’bjïš#Ël%v;"‹œpãÕ®À…]†5=HÌÿ"¥V}tÓAOHr÷ú·BŸ$íæø+ßÜŠXûO°;8´æÆqf?ƒLqpåÇ5ÍèQÖ1ù¾îîÇĶu6o½>“kpŽÐîa¹¹ê-ÃCÊÅß ×ÐIÎÑdøD1¶„  ¸Œ­ ìꡇNåóÒ"Ýï{$wêÎÇ”ûÁtžc·9dåÜ9’1×a¸BÝ~ˆÑ5#ʾ1ņ°”©òº¶•äŸÕx:u1b…y˜jêÿ»p‘'ó<ár”Èbåø™²û÷Û]›ò!QµZ;>˜7Õ‡¸ŒÙƒˆÅÀ±Þ鬃Ì/‡„`UoY\dGe¢s‡ö÷<¦z"oV®~˜®w¦b¦–0³îœVtEÁFÙF²¹¿èB~õ°Æ9!ÅÔj¨]Qç4(pÙ·6+d«þٱѣˆí,;´¿³ÝOuxÄ£½5»Åí€ggý¦—T»—Fž¸ü‹CW—@, CODËWýåóãPzµà{¦JZÐ1çwPDBÊ—ü…­8ÍEhB„Ì bn|üÇŠ–üî~ PGÂÌ쨂P¯ +²{¢‚ôvüðW¥—qÍ ÛnÕ9ðã¸\*x ɼ¥õ>LÓâÚw͹XG®¼e€¶ÍÒÓ( ý®8‹¼fgË:CwC>ÆwŽòÁ îc«n‰™où¦‰´‰‚óBbÑáÖ·-_~A-펫;£{Óˆ@hõB4E¼êßyn ¨˜>ħͅû~ƒhÖÚxá^Þ9G?a&å4Ǩd-Ÿˆ¡sƒ«úõЫ*zʽsr–“o5ሌpÓ­RqvÜ}£# ýA«’¦¦ ”Ùù×ntÌj—;úƒÔÆ7µ(a@¶Ðjðr¯ÁàMÑ«Ö~rD’Âuºê4¬ºè>&‹8`×òÓÏ„ÛKBÆiœ+w[åY«&fS‰ŒäS%¯¢á¼¶î°–ÅÛ6€ír<`V €HE —@¥ ¤ -›»~õšÙŠ0˜l„¥‘ü¡Å”¦¨®¡· š¼:,}ý°\n‘E•«ãI©çCÔ³O%»j¯(šà¶‰|ÙÐ#‡ß‚Á&``çf<­-•š¬äYÆñ#'É™+²dËܶ—°àèHÕÒ–…'îûJ(¸q„"à1ç()Ÿ¿‡Ë'T?-™âsš›ÝIÔaØÝ‘ÿŒWÅ2 ·å$fËÃhqê@Κ[ ­$?Ï{TŸÿcB»‡–'ûèͰäçbû*•ÞiIíi5€ ÝX9l|ƃB#èŒÓécGØ»×WTñw`sd¨¸¦¯dNnŠ^¦ðÒÊŒ`ÙìÊ>ˆËRߢuÿ_ºº¤õÅçm€i•žP»FkÏ _ZìµÄ÷—¿² pù·~i[¦²™~•5_±”Ö+ %ÓóÜoP‘G•OÜaóñEðÒ÷£š ®Î×Q™$±N…¦>wY<ô5!OáºaÙ{ôxÂåÜwM ¢ _9ã7JïnD/[XŒS 5%àJHÄ/ÞHðlÔÁë+.8IÑ>u³» tapûºuŒêìá/ Ñ!°7š8›–x\[«©.FÞL,¹¯"]üèÄÃö¦£f°f)`ˆÞØið _sý8£8;²¢„· _PDäNÜ™Œk6+7LešÐ­‘Ëï3Ϲ ͺÉbÀ³Ew=0¼–>¶Á·¡Î–¯Ûí™Òßüu⸄wýýX±Ìo¤ðÀÒ²`4jFáæª,­=)ï±5˜ð¡•bÛ®$ŽêTÁ÷øayjŒ©$ J¤ƒ×P¼5â*;¥”Y{8Ì€·Üë׋1Vð i£Ðç‰ÃE G﬇"òrIµ2³˜~Ì‘¸z^¢Wmá¹iŒÑúIÞ{/Ÿþ[‘\Oúwëžt¸ÿ»¶q+g ÔÏ´^Z–¡[6¥íÑDÁiÈ«=ͺ乺¡äÏÕ¿¶½@eCmXx©®~ø³±G"~,ç†ßêч¤FL}¬E ÎB5Ñ©crDæÌS˜øU9€Úû”óÌP¤ˆ¤.` ¢8|¥þóWcfaД½‚¼*{–ØdY)›WrÎͱ%Ë,«”øÿ{ÑÊ„Ãe(iÄÖG˜äî­ ¾ ÝIûÿ -UÕa¿°‡R«*µŠ°±P¿Xª‡:ÿ‘G=h¿$ï-v,ÌÍ[9÷'t¨'@·è^ºôÂöþ?Ž þ5¢0áìÉ(ÞhÂ%¨ŽÖ”Z ɬÝåï='ýÒA~Ã8£”ˆÙÇçÿ;ÕÍÐàzá¢cä—ìøÓf בô-Ù€ÏAlŽq€(%–Q<“Bæœa©ëœg¯i‡CÀ[ ÷õ¸f%8Ò òê¶Gž+W9Lu#¹×܈iY3ùS¥pмÆhHÂ×ÊͱR€Ò%ý¤ Iz?17¢hÅ’(ãüZd4×òK¤AÏšÆÄòÆ…ªr¼lÿ¯¦í¶µ4ú‚<qΓŸ'.["Žr¯û’\]Á‡qñ-Á0ƒH”\ЗǞ¤f{/,ž8no»ß–XtÚ€“|„í´Ð6·¥'YœŽq ¶Ï"ìJšr…àÉfFœDa°•˜0FtñHó‰±x4zÕîÉâY¢3ðr#†tõ·•zr…ý5º!Jl¥sìúü¾aÓúeïÓÅåX7ð}´>¶ø=Høq´­Eõ­×J¦pÊ7@õ”þsà ‚!Õè¢-[;ñ}gþA†xgT†ŒFðøãþMŸj&çÂ÷p›•àjº('Ï+ûŠÈä¡“õ=×#eÙ>ä.È:ÎE5¼y§»^ÝŧϵhÆI£¼¤ÜMP¨_…äfw8v5mÀË~SJÝÓ‘ÔRø›tAÍJ¾œÞH¿çªË¾*v‘àµø‘ù>²ïêk¯¹ë#Ø3—â*UEWPkª"¨uó{^.£8ˆé8ÏrŒx³@ßJé «Ñƒà°¢9&TÏ¦äŠ 7.Ðr£Þ1å~ë]K¿R bG×AC„Ç鼪lW`ïæ+›Ú‡5÷hK©tä¾Å6.¨û¸݈Ë%ÔȵЙ’5Ù„£ñ(ÁlòQmŽì‹‘?«|ŠpäHOç „-BÂ9æœ~~L¨˜%ã#… _³Ê²­(*¤ª)FdtœúAç1Ù+Q¼h˜g’„¡ €ÛiƒrhëÆ³.°4Ç<"í5ÿò¸xÏ@6­ÇC6-MNDÏ ~Øã=7Õ+öʬ8ú«vð‘mLj9mIO ÇÏçAúÌõÄÙ¢õó¦)¹õ;¢Ž1îfû)ÝSIª è]Ûm\ŸlÉȪàµa227¬"û©¬l)OĽE:k×’#<&²ž;J¹³4“¢B—‡ýìHlŸâÊ–Ûêóò&âø˜Ëp#&dÐ}pÀ@aSùüïÚfLZI^‚‘‡i¾*.XQl ªß ¥ZÓÔsSÏÕó?ÎÞÜŽÅbÖòØÎˆ ôFf%W›Ö3ÔÉ4·X‡c³ªs„û¤¯c—M0ˆ¢G•ÒM¿¶éSÞøBíÇÓ@¥QýE\s¯`ˆ&ìˉ·×Á›·™I.ÎOÏjÎÝâÁ¹RÜ ›}>óy(íŒF¨j8¯-Ãɺ ¯12»Ì¤—ì­±ÀBÌH;§w *v²½b¨ºŽB!ÊÍo‹!9¸ø .å¿¶+žÍ½ŽQ87y=n;ϼ„¦ ‡6ÔË’ÌbR”ðÅ‹G{Uì¨zƒ*•7ï¼+vü[ó}F™©^µ°þ›¼ÉÖÖ~¼Ñ.à©–YSAÀã‚ÂÂŒo—|¼ˆ;ì ½ÞÁã¡#Gá¢óelæa³s»ÉMr¯’ÿŽÞtEBèGMÛ,|ŽN—†ð}ÂMÒ‰34¸i–Ú8_WS´p®¥ñ~ã&7¨æÏÉó!Öë=7ÉRuœª§¦uì/üLC#d1þ\Õqã”®Ô†Ãzí;ŠéúllŸÛ²÷ÒÙô-æU?“./7]:Åiù ™’,œ€×÷cÐåÑGÛW›+ל>¬°ðFK Ç~àñòoÓ\,Ï!Î0É~´ñÒ­;¤\â1:„_v×á T¶ ±K¥¥=XBs×»êùE–ij\WD‹ 5¢4¬ë˃aÜ-CóìçÞ5ÐEZèP˺¶ Kd¬¸v8EÆ?á§€×¹מ,o{® X™8hq™þùˆuà:º"òãÚCIÚôqÏszΉÑcT瀵ÓÏH€9Ó”Ú­ö{Â`Z4°Á% Û–)HZ‰ît=Æçövv9¦[„ïÆ=+ñSbÿÛ¥" `ŠâE3ó´’—Rw£É†.¡æÖe¤þûm"â‡O1ÔÉ=z@u¢ýÞ‘'€Ñr¢«ÇLþË=š¤¦ÄÄ…¤|b æJtv%xã¿zß«S´»J›zÉßu–¬}¨–.…5vb¨›ÑêïågeÀ}SµÑeËᘈ%=„¸ Œscw8!òxÓ‰6(En ˆr¥™ÝäQ‚u¹TªoSµ¯Æ«R>Ò¶ý¾‡¸6¾O*t2sŒ9ˆÎ‡·Ø zl/¼ãÚ¾$DuD¡i”îE–hi°{X6 wžgJüH䱯ûÖ>àd×Q™3Ö³^æî,:åÈò:/DWî_3 SuÓq¸bHáö‹‡ãnU£¹'!zò8g(F ÈvW¹èÌþ0 Œì"ƒ²«QÖ2û8p±ÂZ>Šn;ÄJ¸cx¢ƒ}N(šÓ…ÑzºÄ*ÇØér)ÈÜŠ –d'$´ø‚ÒÓôÊ X\fú$ÝuÙ´|Ñ`Rð<†Ö`—÷­¨Îý‰Ì¬Ý•PxíþõèqXIùûh÷ÐÒ#Ô! ‹_øšÏ±xNÄ)14Dßb›ÿþÇúŒ_;OÕVƒ?EË V9ɇøÏ½º{í}躲L/ä™VÕÃ~ÄA¿R4¿Ú¶¼{±p‰ùLÙsr™õ*ÌÆ‡ñÌPûŽ!¶(º$#¡sº¸wvÐçR£+[çw}^Ùã•AB¬ò·8\‘¸›¦ô ¦h„A¸… øA¿¤³M‚‚nÊ[`²Â7Tö¢8ªë¬Xpÿ­bÛ–$cÑTàniÞÙQj6n8´Ü„¦éKúµüL± Z‡QÖ© Vâú¹ù›r@Û‹[OΣi6mŒý_-,UÊ8æP ˆƒæ ,ä9¥æÓòÙúò½B|)þ¶(Ø— ñå1\äí®e+Gâû/u9‚÷0¢AçOŽ+Z__푤×؆"£ÚÊÛ‘ÅÅŠÜ~#$‰ÙÆ4êÒ¨Ó©Â÷?û}}ˆÌdlÖ©‘yÓ¬S¹‰AX’J@uS(:vkÁ{9âxÒ;æ­}A¡2â‰S*ÊXû•cØÐ©FåÝ;»&Tø1ÜöEK3ô/(–ÞîEq¯ßç"Içã2’ÎÚà…¸!¦D±c§þ´Z‚Yl>µ(òD10ƒ|æ¨d3I0M,&奯c­æšìEä›J íýJsT›’b‘·SAu¸`¾®xnö6’!nÿ.k¸ 6çjœ$ÿF#g)ìà,¼Šml+—t¨‘Ž V¬Ã%£¬0â½)£yòºïîæ'íl<‚›nóŒ¢(€Úk˜7î,‰ãž"Øm˜; mò¨6„)Ò<»ÙýÉå´—!{D!–’ªù"NLs4¨Ð õ&7jaøå kî9°¾¨mhË’kîjQ]ùZo°Òlk$P÷ÚRíÄÎýºŸ/¨Â2TõS•Ç­;)?_­»ÏÏCZ:£¢ ?7Oß1)$ùlPA=‘fØ‘4 ¾qŠZùÔëŠãГ)d=øæT`¾Œt{–óø5¶¹ÜÕ¶de~‹ry³!¼ »7™"€Ú±CöÐáG¹·&öysEbÕ CyJ„Œé“Tc¹”>ÿêÀ¨ó1ý"‘ !Ì1wËÁaëÔ¯ÛR ®–÷þŒ6„ü©Žõs5Câ( BÏY ݵÄyšY”˜1â2—žvhçsÊÄv‡¾£B,š`[ÜïOˆíe©ÇÝ :|ës?òM]†£¶aPg×’ ³–­kµÝ=³æ1èûü&_€Ù>¸¦›Üøôöaw%ï?@PÓwí¶…§™Ç79ÅÎØ2åŽt±Ç§Êõ¦wMP^|,sàob(ú+€G¨uÙat ^4( ÊŽð£‰Xo›õ2ß}윆¤¼ÄCL.Uæwí¤é»ÃÐÅE–…QIä|n({YKEõJxÂAânµñ,ºÜX ›fõw)F:ñ´jö–P«IfJ¦Õ%çe3£Jz% (öŽS÷&‘$@Õ+“ŸŸ±ŠGå)|jªªt<°-0ÿz¨w åHÝo§˜8¢™jL¡•ʳ"J_ýUØ…6ùZ‡²aŽNU‘D˜"[<&uônnò¡$u'øèÞe ¾Hn´ Ìn í´Ì‡Î—Ÿ÷5ö_ÏèY`í˜ïw£ÑQ]E´Á‰Ü²·t´õÂ}¹x´Šé§h<¨¢&[ʂƻø°;$KèIŒBnR¤~³1w)h·³dÍ(‘–LR?7-B~Æ9œHñ6#pŒ£èбѮr‡R)“ï“h„Í.8z«wÎiÜÏÕØSi¦Qo0FñIZÖg‰Ì¢×ÂC˜웯¸LÒêïZÏ?‹â;CóB´7îE.ã~Ì9¦Ò/ÛAo‘ŽÂN^fJX—üÆÖƒU(•*z$ÁFCI@r9ûÇN2¢²ŠÛ¨H –¦;È;gWñ'?•ÄHϼCD ÐÔÑûiG†áöú‘VŒA‡íÎ`ÊwÉR_(xÈãõ=ÖÍÖ&uàÑæ’Ž–¨ä¾j…N9ä9Þ¤d„‘òɈ’™4 »Oeˆf ¼›Î8&;ç®aé_<3ŸŽ Ï'å,ßû‰€c„„ºp—x s–¥‡gZXd¬Jv4%yq 6 ý—A9—Óþ6}û…ú¶bIèû-œKÞM;NaŸ‰wR6š|äæôãט¾œ:+cbÙuÁˆ‚÷š›—{ y Ш…w9Zd[ WÔ%µ˜8¯‡ˆjH¡jK^m7-“"€l÷SX°ümZ{FùûäØÜž=K¶“i7~lФ6‹£æ1N¤o#ãO„*n ÑQtò«”ÖÿSu›L÷â €ðž}­3´g¦ÇQ¥­ˆFh3 '6eøê&âìAÛîf`™mŽ3*CjÁ28­ùîCÌì>BÎaÜÏû õ¬ûØÁcR”Ó»°‡M›HVR=À¹´iGï ¿/o å6/kþWòZÇÃÿŸjðøò^”ar8p[Bt»$=AµËÀL /&HŸ§@g\ \¯ãç¶msŒï•QéP(,µûúø›âPòé®ÜìÖÒ¿mXsrúŒQŰåûÍ7bh½1½F$Óª‚¢8î¶Æ²NLhõúwZÑ•CÁÖ¤ :Ü·6­ U*½ŽqH­Qq§3åº49Øèñ.´÷ƒ‡ã×sÈÒc½æ«X°¦â5Ø”h%-pÁ€|Ãób{.Q‡¯«£*¤y³<²«M¿‘!žYd'˜Ñ\)mÁkPƒ96¿×?ÉEvOcñh<ø÷﫾¾Ê¬¦úxSw!S‚·¡âË$S+…º' $>´¬?4M¬y'Ȧ¸Sf”Æï#wos;Ã)èN•:éuz–ÛÓr qµÜ1@…ˆ¹ 4úN¿ÒÇHòQ»ÒŒ©»ŠüÞ°ìÅÚ‡Çþ.Z¢¯òÏ•‹PWRs¢1ÂvdKöH¼¸¨yáÌÄë&ŒQ Ãqº**«ÝgèoŽ' 2‰Óú´n!ð]ßèçlsaûç¾²vA nˆP–ƒ Ü`õÙûŽïÜ2?Àv²ø‡Ö lq3‚æüA¸{l>e\+[kõ«9V:”n¡ç]½w€ËÆx6üƒ<Â&8šF<^äå2™ëS ”ùÞL~Ãê‚+vv—CìPQtm?éB\|3 DBêx<ä!ÀÖÞV­õánèH)¥4SGßÛþQâB6Öš¸(êg„ŒT‹"±aÆí¬*`Ü[š¡Ê»?ëúŠÙ`ÜÞ~^UÜ%—]n奀·Œ±ˆŠ?ËÕž_VC…þ”jBO;qÀ[¾:D}ø@FØ5£õ5r솚¨CHŽŠùMO:nÜ9@ÿ°“VŠÿQsjfA*c0Z¹ðdÐæîcè èÍÆ“ a’CÐ#eï¦Äq‹œb8ƒ ^÷ÀË~å.›@1aì㨇–&©è H³«1ý«¥|\,@µ«©A´«péõ±bb V¯åœ‡.5'EiyŠX½T·¼3üø8Jçïæ–ß&îgÞÕä_˜Î€“? ~'¶…8.è€õˆBÇÂaN•ݱGÄIb«°7MïÄ×õ/{o7ï"›A&^ˆÈE "yZ˜FXäÁ®èBS÷Jpk¾ æûjȸH­¿s^iè*èÉÖwÀõ)j/}'ÔZ=вÞ…Ïy ‹^¨›Wòú¨*P|÷0ïÀ]ŸMùBúkx<æbÌºÄ’ç ´+ÂÅI|·ÈWk"ø5ÒŒoÛº•¢õ<µÏðŠeÐŽ÷“FXU]Áò?B9é$ÿ“”¡6¡^y… d”y‘–mŸÅcôC0bI ‰¹Íº±¼+›EÕjQÿÑÓ‹™Y¼Ê+kÿš ás‘5¬Žø8ÕªpÑ•pär–“}ÓÜ¥xH~6RøãÇxTÈ- Ù´ê§¼’¹9ÑÀÉñ‰cÚïÆ§Al)98ZÔ#”—ÒV^HmÖ-ƒ£÷>óŸ ¥[ž™Á"ºìòìrîÒN)¹«bØ#jËXõ5.\ó±uq¤R3óüç+­jh/}}Ï!çÍK/S”#\Bsïù ¦¯^¿Çˆ¼8?a¦ðÈCD¸Œg€ ¶8ÅZö5íÍð¾g¯UáU„¹ékkw QdÈ«¼Š¥–iƒ™Ó)¾œ?ÑBŒ©ß8p—}¡óÐzTkLSš$¾æÆú Fnïx_ܦÍ9ŰZž…< Òó@œ¾—y¼ÈÁ äÏÒú•Eö]ÂÔ¤eMi¸}ÕüBxºàªèÎGt}{9JÂ!D¹õîÆ÷’‹®C·F͉|þAäè¢h¬ÌÒ«¬HÖAnv2w1[ 0"!+æ!ÿSÿ~8´¤(¾°èÞ|­|—&"÷«¹¯KC ¥š¹‹ã²(’‡kéB2{l¡¢aìý®#fJ ³5üšå Û!*Ç[k§Õ£+tÖçr¨+:46Aêò݈&³ÿ|[|9%p´rj­Ýen48”š1aÑ_~ŽösX›]¯ècwKù!ÃÂå¯ÅWÍÁuU|¡š‚ßöl_q2¹G^]üu«’!äA¡g–£“|š·¡[£AbÃËÌ™±åäFís¤¥ŸHÓúCjÁ†&ÿ5TÿœåÌjáf…]êœ&º´Ö·ôèÙß2ߪ°«ÀÊ´Q|wæqyF½¦¦ N0ØKýR Á ì–,vtº¯xù"¼]h9ˆO;Kî5†ÞµWju¶„õ „]®U_mê|``^z̳QÈRg÷oH n7N~–H5éËe ßûÉŸD~$Žè$W²cÓ;{ ˜õ›êª zC/?|/¤ƒB£Qu|ߺ&³ªtõ~’¶"xà$xå9¯åÒ\…ιðÐS‘“m [¹ÅÊ52Ÿÿ¦ÈV-†µûüïÙ_Aˆ!ÂíHªå%Æ»`e:`Á½O@ûé|JeeèN-w™»RœD\ÒB`¡03–Îöib‡½zj¤®Oí.ÁèTø¼»Ø+aÐZ³ ËS§·‚¦ÂÃ#N9r'ïßé—.ý/¾0;õX\áo¿òHª¥›y°Êð{QŸN#ûÄÂ[Q˜H£ÀĨVaDtÃæû!;žær£w`çß$<˜F=Ò잆ÕX6»*+™ãþã õ·üÔ´ÒsîìΛ¤‘›æ“åÈç=äÞÙ‘ÒqÉt=•fð¯•U²Ñ&TAKÐÑ>2sO¤4EK¡7[žÈ4WaeYUHìq’8¸î¢3lŸóë€Ú¥üÂZ]`ß!q!ü.4öe¬×bκöÏ8 Ò°ÝäGe)Ys–ýT• ­Qd ßí¤cR޲~Hw"ƾÂ%é J%HÅõ‰ž=Ueš¿˜ÈÓνb÷ÏTƒ²—+--š]^z>BtNÆ«ºïþ­x=8yEÏ4æóÎeÑ0LA¤FeO{ ×*Ýáø’¶Â ™Ì¦a¯{õÔÊîá\3I«ÆJ*¬FV­²®×Þž†qNIáÛ$ôÐ Ÿ@ ŒH;'”ì'‚Ê }PVÁ£½LmU/4l¦!Œ2¡qûôu.{î:d ò‘S«mǤֵ@e 4WÇ=ã{ß„Eš[åR{г‚]ó³”ÈšL¶ÚœÚOÙÁ\Y€¡Çœ‰ôN`™æd›CëT6€†Lè eùòÔ=O\BAmJÕÍ­ˆÎ¹ø¤,GðÊh»ê]ÆÝNèÏAä!´q4cuè îkOÀ‰žök{R€©¶HKñŸ"ÒÙ$©%NµEË/ b(¶ÌªR"Žtœá¦£pB%>ÿÖËŽÌÏ&ï ÷iR¹WSi2ÏÝ}Ê9ô.½7˜ºöG§Ó/ù«‰Æ]ÜÖÂ:/³FI§ëìKtZa¬a/äA[§ØR™Lr3ip÷x¯ŒÜ®L3ôÕ¯ÁûŽŠv*ÇÞýaù¤¡Ž™ZµÈþƒ‡äS?“}IÔv–¹í?¦wÄʰv³ì Uˆø¾&ïÌJ›8Œìo1jÔxdÕ;½1ª©SCM…µ«)ÀˆXÒJgeÉŠúÔKl>DA]çö†î&ÅöéPbðx­ÓÚÌÖk°”^ƒä–¥å4]Ò-Õü~´È_óíº`y ެ¨ºÅ3ÆÄ¿MOsÝFʰ«úÿÑlaà.×s2¸c¼SÇt˜C .Ø´ŸŒ£P¢?ìª bé]e¶§—>ÅJ@W¨œ?ôÞïçQ|°åºÃp“¥ç\[äQ•Ѷ=Ù~ñÔˆ.!®“k4O<ë¸çrã< ±¢µý‡¬`.‹ “¼ä]Œ]DÚ½^;¢ÃBiE.®lŽ&½ÜO¹X$’]›4†Á…tý__ép·4\…ÁÓ#éÜÎbUËEΘ¨®¶žïë& 8³ü+13]ëHT~†Z”±V ˜–éæ|&Ýn7rQ –s™ª¿|ë¸äœõ#·oDÁ !SÞìׇæÒôÅž¾¿ÊªJ“ȉA Ð ³¬q¶˜UO¯:ihš×‚þÁO‡Õ;’¤{Ã÷0p¹o—'t)$Vv?9›uß &Ì5Š>Æ:¥Z?• ¼(ŠÇH.«Ó§z7ýŸ]ÅÏL>p-š¤ƒgYÃÞ¹Sýü·]f£Ìnúž`œ›¯½@è¥Ø=Tº¨-+kѧQ·ö@°¨’Oýõ¤ùf@Ђ<­ŸÛ<ºÛBÃ6ÂñÕe¦Xß36}Ýü@3ùFŒ…®¾ãÄò|¦É!çå½®ÿ+Ú˜ø‡pÚýÓÒòæ«ñÖ¹£a~ϽÍ7l¡Å*+NVÛÔK»®äÀPW†& IÊD~COÑiÊìðOÄ*ŽÍÌË@ø¡ÌM ñ-g\gG âuFçÿ´i›ñz„b†løU›K·Ò=pD"‡ž[^˜£UoÈýE/¹ÞtS'›Ð,Á¦2[-™)åêÌFlM;úo~ «ý’ED‹¸#ßä>̯0µÅ±±EoÐV÷›Ã\’ã@×Þ¯42ˆÀ ÆbOX3 èRa~LnDÎ6M-Û€Y‚á¼1¿qáúGŠâÆÛ8Úzš±6%βž¡lØ)2±tT)Ð]ˆKzßóšý]ʹÞ~Òæ›UqJ q´Å ˃υ‰IsfÌ@op •›9þɲËxvËäÈWÏ„v£âUëÃDYø·mÈcàåÈg”Í»t»ªºÅ¯fÒ±s¸^…Êå„Év[i³ËÉŽêŸPÚ„æcþŸ^ £QÍèÔõ™ H †@Ùûã§qЭê>¡s©2îÒ7”9„ŽÃfŸë@ãÀÁHÉFªEK08cl^*˯†*bfâh$M{ö³O?ºBåžé÷hpò·õ£÷XàÞ‘ƒÜ? ‰ZvŸcg³Ê’7y‡fPJ¥b]ÊTtˆ4êÖ¥ˆ¾á’¶È¤z ›Ñ.DŸVú|* üU1"o†x²wÓ*ÈY8hnì‘ZØ:œüZuƒ;.ζçC64{œ9É¡ frbø.Ó;ŸûZù5¥8r—«¨dSÛjuF¡&Ç©8KÏ1O¤ ÑÂp 7¥y"êgº„€É|‹Y„ì ¾&8Y90¯Ðã%õ¼&Mé±µÉѰ2"Õ)ë HY#Œ IÅ›àsf‰¤”¼Ê-7%žAË“¶`ÅàkÛ+ª%×Sq”Œ#Ù"äa noUÚÖ¶hÜÝéwíN•A;Db väó3™Üš0‹[å_ýû÷¶‘Çdµ_0ywäÇ‹ýÆP—pÛé“q Õ¼pÀ+Øã€ÁpªÜƒ„á ‹±Žò³šký±˜Šƒ%ŠS礑åiµ8úÉÓ¸ç*gtÌ+v=¿naNñÛœyPq*¹Å&šßçÎÑÐ÷MÅFjŒåJ¿0Û´ßã#eU/ˆxÓÔ€³µWÀJÕ™ñÒ{à€Í öë4*\oÚÀéiÎ;&xéî–/¡© ŠÈ‚m»KK™z…ç[î㪔·h%!J‡W¯t7WŽy`|ÅÌá©:™² ÄÐK´¬³Ž¦4ˆ•˜CÑËäD¶âái3Û)§u\±ž-¡ 1àŸæÅŸ¯]„@!]à_‹—« yeŸ#9õÒv䀉¶ ´Êwºÿì‹t8sžœq[´× XüýÈ­¿¶Ûñ|mõøÆi“¿_ÈòDËX¿ BäFq„̱5 ÞèVV—±Ÿð­æS¯ïhF¢ž B{åpÖÓ‰ô»mü5×e¿7bQ,WJïåE-—Ò/ãT]?°oá:Î0÷q ÁSœ ¯Œoé[ÄË%5ìé\+kj¡ÝˆŽe¤ ?l ¨|¨˜á0J?¤R ¯Ê³‘Ó¬°‰¡g‡Müd'DeNs\*¡.wmvî[èD˜WøÇõck¿ÆÕ*Ôs“CQ\Öe°HT¹Âzyzf" ‚4•‹î9 ‹úm‚¦.Í ]oÚÐ÷y “† èAêž¹´´\îÑó³(—ŸR²Ã0Ç2ñ†×|IaÌíf|ï&Ð?•/m8`a&å´ò4OæqÍlwð?úŸm·§$7©¬é0ješzã8ú;Pnâ&¸×uEç2©ý?œdž /<Î}¥}Ÿæª§‹÷ ´~8»tpu}HeÂØrzëhš$ç„ê~ÅÕ}òpƒ¨ŠÝÓ!—©˜àŒéðD™g–D‚÷ßËÙñ4üИ…«ËƒX½;¼;d¤øoL LÛ4Æi¢µ,衼ÕÞ–t9!ðVº0¾%lÔd¡^˜VŽS‘µÏÒFôì ÉÊ·r€ÃR5‹.¦ÓÛº>Öýɓ݂ý~—†,Ü6+ú)4xCO¶q#âɶUçcäÁÒ\FŽ×ö[ƒä~ÞaGl+¶~ÎRØf7Õ@ËBË5R)Q¹œŽz«öJ°s£X6#Ž¥h\%ñ3Ù¬kÝ2dyŒÝQ¦Ï+T±-3~ÝBT!Z‰ëð†ÀÉ~Ä@0·Æ 6å®Gù:T©ºƒ7šlÄÛ.ˆ¹*8TàÓµ´_Po<ºjx»éKÑÕ°K¨KhÚY’# IEÅDäÌ€DöÃ,Ñf°0T±ˆ1¤Þ)ËyH,öÂRÊq ëþßT¥v‚p¢ô¥M‚@¡ÓÅ¿5$Tzž»Ç_ùÊ|L[Âñj¤¢ÝÒ|þ±,<´òž¢šlà qœ¹QÊlìèa—•¥³jMÖ4àjþc%?È}—Ym½€-ë{Î?m¬*¤‰n+ŠCnø&Õ$©¬ë¨ø(h{deq˜Šé›ãÕ.Á§#ø¼rë~¤0Šm²‡F”pÞRbs³A‡Å/¯×60&HÜjNJ]'òÎùj3¶¢´½ Š[¯]ÃÑà·ò‡hJ2qèæo•ÏXÔ÷sX6 pêø8L†¾ÚÜ¿žÆ0V•Üüh´IL’Òê`Õåþ±ø8ª¹UÅÌ,Þwª<]ðÉ&0-Xܹ° çÄBÞöRš/?íëFC5Ó$|ß4o¶eï ‰ÌêBÓò´ôMÇ™{ÙÊÈIº=½—ëŽ_ÐÅËi„Ú££ì`IÂ!¸3% ûÇZÚíÑLò¨À·¢\Gp ·Eÿ-ðmµ8eæÎ­®ÛqÿQkû” ðÃÇBL±Ëjj Pì5¥ÛZ”½¦š]8ÊCVÞ¿=gw”27­”c î»0"µ»g]‡5—Ö~jÐ*l1þoBÏ×+þ ¬‡ÜMihðš&+Ïæ +¡´Éþz¢Mͪ=%ÃÐäeÖ³É5• j9"/ôĨÑ<[œnûöQÞúŸâkø …é«á%•l…BwôÖ›ˆd‘ "hD,è³¬Ë ³Ï!ÂÐÓ²‚ØË3‘Œ‹€K¢D¹@B’ˆúx|«ÛGþW #e3å'#N&!]4Á޹…êŸtÞˆJ„fxí™"Œ²8pu›¾%ûC—¥"î 5u^ƒH¬K}ØÉ(l¬¿…b,$bÍbCàŒI8†Àí\EV ¨!Ÿw‚  ¢#|`ðŽúVSZRe9Ÿö×>˜DÚ6ÐÈ$쾓Ù5"t/}…°tiDÍv!XÈ>X/Õk±jpçB[•¡)uÁô 븒o,UÍsµ­;ï¢1Ð#zˆ¬ß…–D›Þ³iÿcN¾(ê–¡ <òOÖç¬GËøYX2€VuQÙ%uWþ5ú›àÝ?°Ð Â`‹FKú›¯>~pFÇÚbwD}þm©Zܦo³Ž¶±·4Ió2­ØJyÎI…Ö6áxy´¦åÔP{+©1çé? ^¡²@ܺü;IÙ­²ûÎ@ÄÉø•PGkc߉PFŽåŸSLÆ#|>›™±î*÷©Å‡Li;æÔC”Âv,ÐÇ‹=:©rœ¤”!ö4'th2UŽƒÎ™Hí‘N¼É£bàîÝ®°Œ¶Èo5%¯úgvµî³é‹±›ßUÝÁxDú³Éêk¯OìÅ Ÿ¬‹I~³’"è¦Hý°*ü]c:ü¹ò¢L%ã_Å+q%Ò1œ'`ˆ„ö’qÕݨ3À.ÖŒZXÀºd€öT»‘þ3XfÂXoŸ|œ ÍVgˆ¿h´6üCÌ /¤›RÀñÕ¸‚¹iüÖ]J¿^Ðe—9Ä€½Ämy\9skl¡uñ¥Pâ1·3±jVÎ÷ˆd5ôTÌñZC2ç M·:Šê‚0ï£v1Ýâq2ô:íé”h¶M¾ÚNºV$5šÃ~cz­P8£,Tß8…k÷Ë‘òÈÁP!HOåøF¼¨•ÕàŽXš3DÄù„ÀÕ 1˜ÑqãÎ"äÏ'Xñ€/h7§…ܵI57ÃÅÍÏL«¤ FÒ…­üª¢WíZhõ®»âK³•é`áégÙ–¶ä¤.(>JOõÔ&3º› \ÊnÒ.&9ºfà|\ËѤm»¹œ@)\º½õ’•dãsí÷÷FíD©«p•H¼ì¾z…Ðï;¡Ü ² Vå_ôSÁ ™ TID0hgZG«2Éõ;òö€k¡(’ˆµ²?õ³>×ÎЂ X¯A|>‚ÓÄg= ÉÇ6žXj1膗ÿuoš ’tšØKc¤àÂ2=½P±—•<-µÀ‹ú]p×ñÌ@ôÿ‰v‹¥Â½âT6µÌ$·zx8ÇÉ‘8;²Ï¢Kë£ý¤¦ ÚqçÐ)Ü_o 6Ùˆ²e PÎŽc£nÿý†0_uêà.â Ü¢Á9µ;g/òž¸^ ®Äp îÅ?âÞvmÍËáHsýâ0Ýæ¨{¹Ó±àq4<ÿh¸;l€€ÁáŒ÷Y&ÙLìWq~¼åÈ¿Ÿ&ãíòø<´Àåœ OéÛÒò‘Ûg–½ró “äãàËò:«D]™A×ykê?táíCTÊíµdaÛy=,ÁEç[’'ò/s&+or}ñ(G U Z¿š±ó½c Š•Ï½¿Xòæö v`Ïj»Ë½(# ž"‘ô=‰ùyHLD-g÷º]f!@ŽJR(æB²35oþ˜ŒÛûݨuí =à~àܤIø³‘™µ” ‚ÞìA€\ nŠHx–"’-C‹êjbD_·`Hh"m¡µ |1&ÞKŒu?» Vá¿$ZÎL ?^-rƒf·ò úÇj~®«b)–Áy›7g<ŽžIûiïÆÒ¶\fÙµøj4€âHy ø3dÂÚÆæúê˜ìª˜ÛRFXPm¨9[ßRÒܘt ²øˆœ )F/Ä k8S‰º2sß/,qãz³•S½åþ¬w±l-H÷õÐt‘c_ŽÊdÚƒq9N££ð¿Ѹ)¿ê›}h/K4võW-›¬pI“_nn#º–í×¥s}†¥÷WÜj@{s ¼~À¡´Õ$"/ÿÉâ±PÐÔÖƒ¿ýdD°ÃT}É£sÛÇÔOÅ"Ïõ &1~F1aÔ-õÍRžæ1Âj b›‘øG ÚH!I*Š6—A‘'<·Ý,Ë”<4’‡¾#–KIÕÊUw­‡ÑYIË}¦¾Ò7Í A› ݇Æ †žyÂêC³TÉÎ$wÊCðî“ïR¦}Äý'8»õ$—21‚tpö&G±OëI]*úBûÖ-óë¹Ìßg€õ„ñiø Ôç—ç0lx@x²ýö$YrÁ²"1—Í„÷РFÎP¬ÚfBdû—ZåøJFPOJT²àƒ~õ)¹›U´P @‚ÆòòâÜÉïhE}‡»NHî[ÏÌr™±Wj fÜ5×ï 3-2ŒÕ*¬5³ÈÃ?[®^|>èA²Ãã¯bÅ,އÌÙotgQ•ê.ϵß8EŽeæ]üÒê¨rÄ_}ç—‡ãK}ÍF–ñ&Ê´F5x<)så%ÚÕŽ‰Á¡Ïü`Gу9 \äôúœµ¨~uiœ~®ÎÓ Ê¾Iïñ ðrSÇÓôÀ¥u‹K³«-Š€Éz) q“][ˆ¹&xP`¡“8‚¤XwA½B.:æP^ÁK-ç«ç¤‡EKÔ<`BNÅc PÁ7ÑñC3†}q$Bäë„’¶A:D»-q8Eel•6zîˆ+gzµMuk¹ƒ¨ÈR…uFû‚W±×S+AUЙ¤iLT¸3ªã­ŸŒ¼˜ ðç¾ .ò^$µì 7ªÊ1¡ùÑF” ‰/ñeÛÀ÷lñcÔ&xæ{A=1½ËZ1«Fw+>‡:’vî…Ýò„¼Óç&Ÿ à'Ý[Í¥šçkv…lÚ1ÔðÏ* Áo£nåÀ¬M:MjÃó3Å |½j?toeìD}Fþ]ÿ1»3µIìR¶"uä¨j`ÂÔä a l'cètÐ|mÒ"‚ó\©p5yikC•Po*Œ’·‹ ‚Í„1ŠïÚ#Á¤Ÿ–UÖ1j„Å· wÛ.™£qˆ[ü¯{{·%i »“³£Âǹb¾ï¦§$X´‚õN^Žy@´Ïm–¹›“‡mј®Âþæ2æ'}1›IÉÙά?§•k%ø xd™S¶Îra8(¦X>§ø-G¤ù-°óÇÔ T$†VÞøüáÏ$|8oÙzÈ+™eΆHxªâ*X‘³uö€Íüå[ΘäU0|—Š·vÊ+“sÆ «tJ º}Ìå³ÞR}Ñ£5Åõ…Zщ¬5‘zû'ÜG¶G,)Ý…ŸÁc#Å=ã¡—zÖf xÚE œ2‰g<º+×ÂwsRW¸Ñ¸¼õ²¢pò l’Ê~a9@Å| xœthPŒ”Û²ï:,ñ^¼ˆ!6òB>ì}qÇYÇ Ìj}2¦Ô„ »Í²¢ 't— #,ðCw¢Ë°Òɪ¿®ƒo‘“›;=!Ò^]öy`óSO)H &µ±¡ùI{ÿž7¨g÷ñ“ßoÍÀɆò‚°°æoÜQØ^|„çäÞTY“ÒÔŽ&D)dÞÏÄÂ/bO±sŽå@XÚÆ èL‚ïÝ!ÕtÞ£å6§4‡é&­ƒ?¢¹@zþºH.ØUgkJéœ9žÍ³ ‹Mï^á±"w®le!ö×°)Ï·‰ž9{×Ë9øÔdI8tãK°(7ÌÌNP¤¥Þ:#‘Yæ7ºHHÛ3ñ³=m,›E½&N˜ÆqY™=¬¾Q8wüó¿HGºúe§b²¸å[)%⥛9´9Ч‚¨&Ð J¹¹j%;ï¥_q»©éí!œÏz±ëÇ>igÌcyø‹¶ìÔ #•²Q3 |f¨fjüŠœãMÁ–»ýÏ^rrYóG¤WǟLã:r!‡U”˜7,]no½NúÞÑFæ óþ4 Ù|nó^À⪯֒†$î»ºÞ \}±Ÿ½qoi"¶~c´Ô«¤êkGLPBÈ Õ“ŽöëoÚË@•ïl(iÅ‹÷ ›ÇÏE¾T)~—Ð×¥kðfФ¤¨sé7­â‚|>U Iél…ü!„ĸ ‡‹‘ÓqOSÖýûV8C¸=/5tQ?Ç<ÓAøC¶1Åǹ–EÛWÂJ³lñMàm#aR„"ö Ø+ÒØ.¶•°>=[™Þêœ4äX)–Bš0Ú4`›º©ÝLcL2㣲âGž¹Æ+JR<¯Šs0îS"µ®-óÙ㿽CñdÜUMA‚ïÆœ!L¸ØEò’Ï0 Bb¶Ÿæ¨üK.Ìö*e\ø[á “fÈx.‘Ðø‚âMžÉÿ ¡ÓšÅ!2µãØÕÎÉÔhEÊ|ç Bò¯ªË*‡§CMy¡²Ê4£‰¼åhRVzÎoŽæF ]¬œŽØ8PÁ ñ!gÔ<¸Ó ·¯¼Z›0c¼.[Ú„¹ƒÅNóÒ<ØžN»ÆÛŽ(NcÊ3¡fÒ{Gg…·@¹©ÂÑÌ+Á`ûÞô„Õ¤Bð†+߉=k(·ð +¸­„CعW>¯¡³ofI( _Á¯^M ?»pcYa· áRCr )åÀêéÈz¥:zï®Ùè\‡©ÄuÒ,>þwi}UÈñBu–kÞï(×|sz~‡eƒ÷‰ Ü”†]MƒÑ³&¾d4ºÉ(Ò“.¶O¦%¦²áü©Î«æ†¸ÖÃÓÙU¢¿ÙÃÔ»Y²ŠÔ¹1¿Ìµµ£Æ;¾Ô™;—~!` ŒÉevz¹â¹#Ýþ÷x„ΟûÆŸ±É²é™Ë‡ª•ŸØ–Ã5chý¹Bõ,pÈÔ9‚î4dz‰<²øŠl‹<¨!ZÑCÞžŒl]V5ǘݓ;^( íMÊdÆ•ÏðdÑ}׋,£¯²\¿4) ·´€^„ŸN2uB§ZÚXi´f¢·*«•´o5ôÉÜf%[äÑm91ïÉÒ44eH´ùQ÷žœ¢;Ñ£ŽÓv’ê |pU| ñ&º2 v¹6HåÃÎÂ!ÂÕ¹ UP½¿h­ê®èáb:MzÝøeF˜Œêu_y0_·h`£;c*¡¡89Ô "›ÌÉ3tí…¢ªM÷DŒoã¯Z¬?Æ ¼žš-_QT?µ}ÿãå !ʳ╽¾¿ü¡ËDÉz` ü2œgö¾ÿ@¯3çÛpüƒÕëà®þVkå¿ÀX/W ¶:Û´¹nt抨o¬mú>ñ#ò܋͆¸ Ó«‰a‘Í èU#9à¢g#ºœ`ÎblÍY¿BqÒglñË=J=~"\ó,¬æK×µÙT{TãkcçÕmK­WŠ€”àÒ›­,mg1³uóU×!ƒˆµšÎWv~êÕ›W‚ü…)šûΛH?¾¦0VÔªä-M‚üdû5[¼×߇rÖ&òkÇ´¬ã}'–{‰‹À,´ R´‡v :àêÃ×"Ij2C¤œ­%P¶¿Õ`)C2‡ž¯çÛwÊøVÅhЧ·©Œ×<…ºVN>ˆ¶b²Õü•dá qMŠ nB6OFŒDZwJdâîýá-ÝØBÆ=( „$ºãD ‹­û™’¡¼´À„'CQ6ž\ás&¾¥ÖÅ'³$Þº%&DáPElüK:¦³« ÁgÞI¶@éÃÛ.Š_ü›¶`ú)wþ»wwÃëaVXð ø'K)X©Ìýt­Ëµ>ã‘ý½;¬£!:ŸJ:\HLÛZÊâã1Ù~IŒ'Ö‰ØßÐÑ>à¬}DnÒkb€m‘c¤×2ß°®±êÖ‰èpâä¥9¿æ°–_ŹGÁÚû¿(²”Ä­J}ÁV¥K.{°Ðã U´l:gø2 »±OSе¢çuÈÍð7Y1!YãêœEÆóKü½Ôfø§ê\¨`µÛ,Ù9H å’,NÛàV6ÂLQÎ>ÿçžœV¯šï!°·Ð±¿²[n´‘Þ¶7ò^^BÜ-ú¾(V|H\¶Wû;àøyk-껳€G(êðËËn·ÁÛÓ×Öü§qÔ[|hߢ½H\b ÁºØáu@'1¾Žåv4Œš¡µÑzê¿¢LàTSÇéœò4î†ZÜ÷eãÅ·ŠoBú'8¹ÔLäðñ"urŸËçS=æLMÈ]`Å?Ç5ã !sG™ž{Ð5 {Lb9ŸâQâ–g·›$ErSó–Lx?òÀÐã#”ÉϘw~‚t?NztyîB&œ;¶—Z²˜ènN0ü ¢é2=æC?‰áGEh°W¸Í.Çßw³ÏB;pR:$u·!M¹~–aÔý—Ô*1÷3ůïÜl(Z<¢OgEÚõ¹yG%øÛܘÈ?ñ[ì„|,:†&™2¸~¸@uH3‘škžƒYsí´ô#W{èKgÒ®U¿Â=W$Øl-%^™”É2réˆ93ÚmÝú0Ét¨kh·Ù‚|£•ׯ¯Ÿûë[_h+XŒŒ›ðP=—±ÂAd¦À¹Dmþd0¯Oæö¶“MÎp á%0žµŸ-3û ·%uvàÆÑðaïÀ%k˜\OT˰õÇD¶nZ Ü\Rþ<=1—­y‡Ý} C#QÞm$–tQ6G½\»ˆ—Eˆú˜Å,‘uÁ£XZ´#?I€+®¡ùWw0 D S) Ãݰ…¤ÉÏ™=`ÎÚ8ö¥¹µ„c£«3íÀ›Œ±ÇÇ<’ÇšS *ag©ÉI¨X<ÕM6OÛ‡Q”˜QЪøçfǼ‡c¨L ~M…~ßá|ý&cŠ#S’q¸É'eo•p$õ¦‘«‹ô.'È'­ù\T»ä5y²kUÐæ™z5A| Ò³—·ÉãÕ|¶³ëBÖÌ£ JHè=~‚¸ëãŠLçćí4j åj 0áR-rräL0Ò²§¢qwh÷Á[$þZn£Ôz3ç=™6Ž ï1Çìs¾Ø'è¤ËyIŠ÷8K…GÚÅ#tOË8L×#¼%„óF’µdÉÂþ¢x¸®®rx Ÿã(GhÌ y>WÛ«®ü%ë½ôØYk[âç¼’O~ÿ r=4‰y„-{•ŸÁw–õ›úðöC“sJ®¦‡ªÏu|u©#—­ò‚ôXµÃgЪ>ºü㡺ÎåÄVù–HZ''±Í¢TD]Dj®Ûk…¾0Ü5zaÉQ¢'îÒ9×Ç{éÙ+ù1´BlÚO¡]›W:¼KËQH‡=ëƒÒÐý?‹×3z¥'éÒiVô™€#£R–ï<Ð!·AãÈwµ#Œ`=Ó”Fe=©°DÑO&DøÈÿ ˆ„Œ~Îïˆs¾Ô Òî›5úð'Ç&°Z늆‚ß²+0bÑ­\©- ¯ÌL4Þú©8¨•èöÒÐóäí„WõŒOÜf3€v­àèwÄ/GTz¶œ Œ%Ýóßõ "Yô¦þÁÉ"Æ-î½ÒÎcº¸Ò²>Œs’„µŠ¿Á2T“µõÛHpÝܳ¦Y5ùG¹ý¦^ÊÊs)òz>€¬Ü™G5«Ûô òψéOÖ«ÃuŠšk³[éÄ¥$Ôý?²Ç ‰ ßÜFDN¬ÙqŠÔXSzΔûf?ï—ÓkÌp–ó÷»ßÏtõãÐÓòx¢ñ—@¿ø'»ºá²-šÀf;â­Pnˆ¯ÇJäùêg2âÎ"ŒšR€2„õ=8O†xÏ)I dQ¬Ð;EšÇ››¨¿ áÙ$NÉÏMK umÊ‹„LóJ Dv´™xÿ­³”¤C“c37§¤V"ÒJlCV]²9b(ôIjñÌÓ¸aÑ€:Ï&¤ãH@Yü5¼[ªÉ®T#ü>Â’7M@9Nˆëf@¤LšÄ™ÚµŸAäN_ >LƒâW1°|k»M„ºG÷ÝÈåS øð €¦KèˆHrXPM±W¨ÃŠDH]E FÎvÄl…~¢5DŸµX–ÔW_ è÷¸Z;<ÎqPh4ÇUŽƒò+'t¤’Ê*NgÞn'÷½ÍŠ\‰W.ªö%IDÇ϶8# ¬?|büTzb‘< ½ U/JáÄÄïh|BUÙuJ”Ȩr½Pÿ¯ØO&!©“h¯¦µ÷fŠáM„ÆsÕoF;õÜXžeM^ºƒŸƒã®yßÎZLmäºKËí-ÏPÌ>ßרŸ(¥™œI8½G}0×›úBŒ2J¯ü̓èwºÓDšP‰øj|@!Ñ7 å!ô7«õO êuüáU‘gæ±ù|7çƒp®¯]z!¦“ˆ‹ú¨¸ktdæf¨˜{`³#åͦ#ÒGLñf#vAˆú¼Ï% á‘ò.üšºË‚˜âÍ%ýe‰KþÒòÜd½Xáô÷ŸEø"G„ÖØaS…ÀÐlÁ\¢ñb­|ŸSꛥ]¨÷e:£[½ Ó3ÏŽ£c¤þC¦'Ì2˜ÂÝZªc,\aÜÏËmb·5‘® ÜC”ëðZ^ZÑTŽC74ÛþèÔ´V ò} ²WylæáVô×™æôÛ› r«œLÙƒOƒYÿ?›Ä¤Êý>g‘…]on¨ÇÙæ|ó)_S(³4Ê•|Û·ôAYZýd丬ƒú.Ëm³(Á6¨¹DÏ;ÖËo¡Ç‚;;Â’1:¿fbínbî x†PS©Ö?ÆWV÷œQ˾¹‹œX•ÀK!,K³ÐuÀ¿£|J- æŸym,H怞¤­Ýé"^íÀĄ̈Tá⯼^³\“tQ6FZ\ ui ‡÷´QXÈB9êÜÂ}ÆÌßÇ0ª@&ÀÇw—Á¶i X¦˜®¤ö(!;çÑweQûéKÒ™žÛhRŠ!ª§¯<ë,¸ ¹¾¢'àÁœqab.3›pWæÞèÈဌ@ï™0ë)r"=c÷0±F¥'HEå«zy@tôm6&ÜP™p’ñ¥^׈S1QíèŽÊ÷sRë…K7]ÎFÏOï½h£ 㹉û½²®åá«{à]ì¼ÈyÝHù“,Ì•TÉ2álrwž™mÿzv ÖÛ+\Sö ƽ¾OhÐæšPo&mùÙ nWÂu…1M[ 2ÖZ®qgÊÉPµ#¥h¤¬â/íëºÞfJ+%ÛëØhšMÉÀá™(&_¤wt¦Ó8Ûûœ†ŸŒ 2e®HX¿å{Î7$¨Æ³\ˆŒM/[ó×_K¦·\Ù?)ÖkÛ©òJЬ4 [KñjÔòÏ&‹ßî¾ñ™ÔÜt&¦lG»ÁÜYÙOiô­0e;¿ßSa;Šgœ »©’?uÛ²“Ï/Y1ïêz˜`Öç…]rèÃÑH¼•¸î\ÚâÓ` £uª¤ßq„ Ä3ŒlVg2/¨¼Ð-Ÿ:ôž¦K÷A ØEDOCÛìbÏÅ”¦´ï¢—½Þ=²òfÑ'<ÜêJ#÷ÃcƒeíÊW1ƒnÓKwn´~pü’ ("É–¥y.TÕô‡åld!z"¥>óŽËuú Omñµ¦Ÿò3| ¯è3Î o‡ ó3¤N0 Ÿ^¹"LZKÎÜI²~^f>iŸ’‘Œë½¿¹zb¸º×«÷µ]À b­[ù™l*LýUIÓ ÕZõ’ߥ K¾c5†6õH5šÃ3,S×@U!ÞötÐ1‹ËvÉ\åý_ž¯ón¢-¬õ·Î03¦0>Û¹>êÎ^(P–€Þ·^r%C¢3OµÕc 7÷ø_#ïÌ.ŠO,¤~§¢ê9<…™è§À튢ƒ‡¬ïuÛx©v¨-¤lLÙ…ØsJp¼>I€ÂÊÖ—^ðz÷×2 xõ(D¥<•@0±¹ãŸÛ¼pCle ¦Ff%mòªdÊ%ÒI´xªýV sE›µ OœsÆ÷(,}ÐPzeSl¨·²“å9Mª(Ä Öv†ðíI;Ðw«‚(½ù \Ñ{¨ÝãÔ@A¸¨¦,Ða …c¼ù(¹Í‚{,çíŽp-¯vÕg•N»Kûvx¨t?xÝïÂÓ‘#ô7î(>Á¸ŒE¹°ú8ã‡èËGÈè¡Í–š ¡E|N sjÔl†âM£ä9{WÌÄ÷2ãoõÔò·Aሟ“õþn»ÚõÈ;àvRÐàð£råv’õßÊP÷ÈY„%‚M§RÅI·Q¬}ëæ«4Ð8‡7Ð÷àk˓҆Â΀:fpÓ=¸aëgû’~O›ŠZæ—q´¡±F ÑÚæ]T€¥è†#ZüùJ9M¬ëÜ A0‘—Qƒ2º®iXxót£ŒÈ’Å0Y¤Ÿeš­Dp‹õ*V*g<˺xô+kÂ'©’9Ëe#mú½YÛ:HÐù×7ßÖßÑ›´— ŽVÕîĘý.uíJ鼄ú:éFK¢9ŠšŸ…oj•Ù•p¤ cãõ`“½Ô\Ã_¹YŽ'Cdz+Ò§e?4Ê0úˆCg·öaPNî†måÀÞ¬2ÐÐ\d…%ðY%íìr{Q#gTæ«z¼àéŸË‚hÝÈ›ú9ÈØLHn¸Éý¥á·ÿ6´"RŸñ.ù‘þ[FÅKïÉjÅJ<_ìqKçÊç8Ëq'ºüóóžÓŒC¡ã*Zw –¸÷—þéÊ4µE¨0¦åkEÊéK¿ª»¼=øöÏÐ8Ëû ê¾ÿ Œsôå€^5Š=ðÑm ”›7Ëls¥ÑÊìߟlÖ›i.ÂÉG@aG˜âËîæ#*AàöâÎ…fÁ°ŠêµÒÏ<…šôZÏI›ˆZ'çpBDM”ØüÄ{2*= . åå\3Ѭ„I8uxzñ¬õQÖOà(ùàcsêÁ'=)ø˜Å ¹L!²p »^ÓêO¶è“s ?Öhß캈hý&Ò”¤ü‰NHc®~D¼žKLoÇ×Í¡8iZÖ€|n‡€.o#™òjÂÿ´ë™Š!ÖŒáQ“] >[éðt”ÝÆær!MÙp˜ÕëÇû´‰©ùø½²•®ðM£H²DŠwê‡wßgÒJ²Žú³÷ï¾q÷'Í*d=\$Ñ_`¨åeÛ«ÜZ–ƾc•qàj¯V…ïÎIÏ -ˬOȈ¤ËŒç䢜:ð¬ëµ,xhÎç+>”¬{* 9QØ“~åPíÈ‹ÆRÙ9èxiŒ|>Á®a,Ò«oªð—ffÑ)Ñ‚aá ²[WîOµ´¹~µh¼N «Îw¸¦†®N.ˆ"Z`é¼^˜6L7s“?S V1:Þ`ð]¡g‹õ©t©e¿÷™³‚ç¿õê8"¢ÑÁUÝ¿¦øÇeLF³÷0úöZu½Ø([ø¤íØñ¬’ºÒráRóP¶¼:Iðـǂ.VêxwÜ_wµ†-R¨#sÒíą̈š€z·LÚÿijì¯Q¡ ¦U†nw7hW(™n#ÇO¹â Ó¶ -ÔÛÛ;g<¿w¨µ–ÔÑpÙÁ|„ãY¼Qˆ;0c fS.C²“Î0s=°dzàë8Œýó½s¾5Û-rçø.(~{³ZÄãˆ"Ä…Çóöú’ô=/ZµSõUÆ‚s°%خÛrÝî ÀBåÃn'Ôe9V‘ùQfkä¼j š_³Óåq[k¹vPzFÅÚh ž$­u™fþ}äC…Ì0—é•” wè†Z9”x½5ž—š¿\]bï'<ó¹?X²é´è˜x!׎„uŽéØÙVfZÆ&''ˆ&£È-£”xt~8„%¶õ-˜ÐŒåäÄàÅ`ŒÞÁÓt›6A+Ããæ!.luؽîòä¬ Ÿ4îj±Ú-|ZHY·ZNŒX' Ôj±”òA¾Èߣ¦(mC?¹7]u×òUT^ÖÊiaÂ'^?F`߯¢¯&BA ø]4-sœv•–&¯öÑâTSÏ}Õ•S‹ncµÆX*f’®BL< +à¹àaôáq}‚o }ˆ<|Àùû *ÏHH°šøúC^ §·£ùÖñÞ_9¶‹û§=y–/ýÈ]Ñឮ «ø¾SØŸ´6˜qŸ×–î;£dtÁ–N@=倖J÷`+I!Ææí‰ý¦Å„ü^ž•ºÝ‡X–—'±lr0ð—@èBEd}åún€/T¢àБ¢+ï8=éÎ~È÷¼&¸#íÉ]­‚‚ ¦­ HK’£Ž÷|ø—ÑMW‚ôASyìÈ ñÚau8½Z„z Ö Y6¡HήƒCûëPg»nÀƒ‚dñrTË<¢KKQWcí ßSÿµGOÂh¬y3Ïœ»{ä‚1ŽB²vä±@|tOx©J®e‰’ª6‰Ý“e²s2§—´í€:`®s¨æ&HÌ^ëÀbgci.t‘)¸¶V/'{JÕŠwª¥\m_VÅ”ɉ¢të'cã˜8uËUëgÖ‹ƒ0'u×qó›)þ@ŒfÃ,~ÅĪüMÞt$Éè÷3~™Ñ¢€€!< Sh Þv¹Á]ïõšÖ!‘®º2ƒ½·sæm!`¾]ˆÉå(fz¥¡v4F÷¯¨ô`mΪ³M…f)±C=9Hí•V‰üÀŠÑ†Õ óšh{êîºÑ”väÝGþÊ«[ q«½E´3êã÷|e`ÎQ¡9›ƒ‡&ØÃœGí± RPZZmô“á™5úÞ/ šIô ‚Òrô.²EŸ|õÏÜ=L%ºÙÍ!;¥2](Î[ÉWSÓ4PaèÚìP;¬òd¬vsë¦ÜÝYƒÕ^¿é%>Zýz _œõÚã/0üV~"_!©§5šd&jŒ«È +ܹç„rå®5ÌJ!Ñ„‡N1ý¾´ùvL 䜔ô”º&Y¿ÉTGÜþTÍ'<`“KŒÁMWy¸„ëß³û¿üúxXîæÚD… ­Ó,hØ:ÃâDÊLË=:·Q•˜Eu6Ö*ÓœBº?à.q´úF:"iÊÁÞ£LwƒfÂ>U²é8¯¾¾ëù¨ŽÚEÛ@^çÕÚ~ÔéO& 5sp#—ˆ\=`_>ÒÀc|¹·ùݨHñØÁÆ „9ýAW»ùg&ÎVdñä@ŠvÇP¨’¯ ã<éÞŽ]± Ýð@= ª v,1ßËLVˆ_tiöÕ7#ß´¸ÜA††ì ëýHz՛¶4yµcô òe4ÓMݤ½l~§Ñ†éÍ\–ÈQBžÓ{¡wn¡(,ïÁ5×=ÓY@<ƒPÓZ6k½wòâ\‡\ôâžpë·Á’ÛµiC¼ô6ƒª7C«›v¹ÚZ³™éÁ*³6†`†ñt ã×vNó€H'°£Œþ?tå…S©÷#þÄÈ>âno¡wEWÍìZ¾s öŒ¼m4ž¬¶íž0ß—HǾO±a“êñ\¹wrC²ý_‡fØqOÀÐÑãêàÆ ÌOTÅ›ÚÖø„ÓÚJü‚j÷¹6ªìN@·TREÚßq¸áÝÏÂõqߨÍóA凗|:'éVø8Ѱ œïÝ&1kxæþÊÿJ¬IÅøüjìÉÓžpÓga«Œ®W‹IHŸº¸´–8D²³ÃEôAàC©Å+– —›ÞÄ+ÌWæ;˜èœs5ûƒ'à ÚüKGÕ ݸZšÙ]E…f¢CøLIÙW³`Ô`±ü$Dp+ü`“È ÎÁ‹×öw6qW…0Ò¹¾‡V”üï»GðOýé‹o×/] Ž&ßgÐ_RALäQÃàÖE––=³½†V8<’¥$4W©B'ƒùˆÊêJ50LùH¬Å'fš™,GJÓÜIå¸NO‰Z_\Öô0@Ôtz}“äŽÝÍq]û[ꚇÙuék˜SvBÍiÑJª”HñïB·˜£/ C86-åU¤%;©Ç¥«Ñ¬«„7åb®>—[z@ôiyªÌ‰Þ³•Ó[„÷ö¿ Ì•.IŸ=8å¿”­ÂîF@] $jßqßßNz>X€LîÌï#%ë@›öEY.;ª”‰ˆHÞ¢cL¶.ágÜ‚Šã-*¹à»Cg¿ü…Ú“»Š¼¬ŒÀ2íÖÜx]…Vá™âZv·ðêN<:•ùOü:‡`’¶³ºüwñ÷¹jÌ…âË€þš‰ЍHpκIóYÜæª“{†#rþ=¬khúÎ^Vnà­·xõ]ª¡[j8du'LÒg¶<ÏuÜoñéùáÍ€—±îú-@ëÌ€ 0^ij¤L•Œ?Õ†ÓãÐÄÚð9oÒ§·@€‡š —L…·< ;„-…Ü Žð³g«7 ¯˜±jº—!Ç`-Ÿ-í¤rcCB<ç"ò‹m6}a†WÑdGúÉÆCÖî@8M<ûFpÉ÷ütïÌ$ˆÉlùéŸj6c‰’f¥O‹ý”²¡Ó¦”®:$¾ó‡(oýÕÐX»~[ðs˜ÃËÑб¹ä}ŠGÜ¡M ~›+Róç$¶²)x:±°&/zñ2ÿCÂa1èÇfÁƒ–Éú.)õ]™É±åN;Ë>|«ÕÙp¥D±NžÏìyõúÇÞ(úÍIÅ[Lf§Ç½JΡö÷1Tã¡2øÓÙ<¸Ê;%÷Bfø#w¡´y$Ø¥By DšÏíÙ6óR”•…Œ] õDnûÞr ²Á)Ç$¼yZwx¿$“&íYZob¡*Î{_ŽíG ¦~°—VZû­ÌU²‹ð‡<ž™Û)ýv!Ê»CDÃ{Z–‘ÃBÉ}jîÑ7©—˜iÅVnIu0–QjM.àã¶AÍ&È»Ú$ñª#ª 4~˜·@àÅLñ rdÏ„ýߟٕ¯ ¨mß™`¾:ì,ÌZ2â²õuýÉ}„Ž^°Çò¶®S:p‚l(ÀmTÕ¤@Ñ=ï¸þD ·CÓo²o¢ì:ŒGª,µm‚8ocµCK ¦7¢“§ô§Åçés(›AÑ}FFÚòs¾¯ÆA¶cÈÂæìij„0 K‡@6û( ”Ø;î-ë:«¹y™'„"I}ïw>„"%ic sÏyñ¾ÀWô3ªÚê-÷ÎÝB>Z¼%äûQêßþU]çÝr®ºèï?‰ –Ï*§˜êQ8|{ÓC>æø^ gü¶ƒ.Ãí`(ù»lZÖzbBw¥#Ïùj·’:¬$lˆ6’pS-"Ð.™dŸNaë;l‚Ĺ.ößBxÓQ÷ÁÿïÏ —Àz2&O¹jÁ:DÞ«²€Ç©ÙƒåBÄF¶bª ¶ý”Â4ÊÙ¤ ‡”y1Ó›g?o¢¿gGb÷¯^óOtÈeÊô)zÁó†2 W0¬\Õµ¤è‰„ÝÏõúài\¿Z—Òt7èÝ;ÛF)ͨ*XƒFsÈÇ®šáU} ÊûFùœHçÄBµ^;ssÏeÀYàê ùPƳ¢^²žÝ5,!!Ñß rW¶þ*\f ©³«1íìEø’f5ØŠ%SËn²È·DÙ y¼Œ$âËaHßS»¥x“=] 帹ÈtÓ"ÿr™iZ}—‰Þgës@§°‹Ë ©”Ó˜Á9@õ—Ü)0 Ù,ŠÎøÊâ›XÏë%¢¸qµÕP‚ƒñÎèÈÑi•ïikꢳ‰dbÜ!\|Ø28]‘JGè@béÅŸÏ?ªwOˆW‰ù CÝ#WC.Þ„° Ïãzxá©t}`+525ÕÿØ„ÙÚP¬â9¨ …ÿŠiîB5SƒTYðèyÉ,¶w±mIn#Т8ilE÷£=”rÍW²Ú +VÏÂrº/Æ_x(4ö Ý ¸ØŒ§¦;’Áb0dk‰B«Q7ƒ5\€¡$¯@îb÷QdV%qd€Ø(\NÏÆæ_oáE$ÿëp§4Íy¡$Bm³VY¡…rÊk­¹øœ!Š7y\ñi\^£}!ÂŽÀŒ»VÊÓkW÷¡¸°ªæpÖX§•ò}|%:àÕèÚ¹gY}éY›Ñk hÜHoÓ\¿$EÊcdÈyPðÜ$™°Î“¥’n؇ûtï3p·¥Ì{)@öxmôÛØþõ¿äžŒ ÑË·)Ûä·í¨¾¥‹9¶E·j½®-ê¶ó„ö™dÄ6ôó–Bàã»áÐß?ÍÓ=e!=wß‚‚¯ZAÌaw ˆÒÏç°tl2ñË)‰¨vËzΟ Ô¶±h"£Fz}rà -,вqmxË×y,ß±]Ù¶:¹]4tãeô# RKrqk z“Ì—(ïîÎÞac#ŽIáñaqŒ§*r[(¨ <Ða┦ŽøPbiüÞðÿHv¢ƒÜ|<Àl4ÍÓ\û5^)ÙÇ)ò…Ó†ÃyzkT ¿ïodðnÜ]ê,;<Îý×P$²ìØà‘0/f ìza¥-«7-dãB{‹ïÃu÷f5NC.º¾å-šŽ¡¹µ|Œ€Ü&në“àÚ]t±¦w4§ÓBÓ?›a,4'O;Ùvkèþ¡¨µ20ö¿Ž·Œ¯Ù~¡ÃLˆìévä;¹I„úB5àQ׸½[´÷Š‚ J‰õƒQÔ(2T7¬è&쎖7ìBšî(¬ÆÞ»2JþN5›°]€Îçco7ã÷±‰ÿ¬–«n48³æÄeìŒõ—縒Œs+? ©ÂäÍD`îMýä‘€ 4‡µÒ°´Ê'&’&Ÿ±â÷ÔO\ò‰´iÐÝå<ýÚ*µ°ô;%ÃÿvÞ£•¶Õí oí¿Är1 gyÎËx/³ôÑF¿xP–}ô¿à¡Ó0DŠÎª|LèÁ\%5áеâœ@XqclþduÜ=-ÑîýE©?fäàyÚµßï_ƒ!¶+>Áœ¬9ÄÏPÌ0¡»$<ûøbK “ÆÁãú^8Ä…DßRÛDÒ¬àBPš@ì~Õ›höÝ轑€‰÷•ž¶›Ù aºÄ×Çy$F™ Í"ÿéK¨vÏÿ`,ÕžåŒ8Š˜šG£õTž°Ožbcš§+ª žªÿcˆs.‡Î£ŸÀ"¦6KïKg+ÁT—?«WøÜ†ãÍ5¤†ßÄ6bå^¸1“_íOv(pÐ[I˜¢ G´bF¨Õ€p2?MΠ¬1Àž¶X{²TÂÕ®Â;³1 ß (<(Iôrûd¶,•]jtˆ¦1ÉÞ¤þÃ1Û6ž˜æcŠh:³ dh­}ä.ÍÞ¤÷fœèCà\#>¥ÏÚ4@9Ø]Á¥çšßŠ…3H.2%ÏxÓˆªÁ’oÖDǹ;óZ1Ùà"qÛ™¥Ð1,ïÔÆg'ŽO^Œ§Å%õ Œ¡!ÿdcWZr~RcœS ÐIjjÇp§ w¬Gâ)°<çÊA_š‡CQDùö¾â@Ö‰•ª7ªd⼆‰âQì¹@ÝPg÷qDõXr·ºËµÙA%½á¥šŠ`õ½ì«óž„ŒÉú¡‡ßtÓèr݇a‡Ž2 ›–wÅ%Fô¶§cTqzÚùç`ô•çŸ÷ͰPHõJ‰ùü#t!;ºÀzC[fŒž™6PôS•)d3Që*®Ž¥ÐÎø©çTqÓ‹okR¾¤`°·0j\І;ˆJ°Ÿ¼,¡`ô9Æ÷ä¤Ýfãê8‹(½HB8€§É¸|õükr?Ϫ<Ùµˆõ`òž@v9‹½*FÞ/3i‹~n÷ÞAOЏ‡ú¯Òbó³ÐåN5\t«WxqX ¿sä;Uk®‡§¨KÇóxÈí I YõýAÏÈÅ8Ð]Œzgª-݇á±]XºZiñÒÁà}j°Ï¿«ZEׄ„–„‚iPÝZªÐe/$J0™[¤ýÛ7ñ¤é6®öòf¾HßÔ1*ªÅùdih<«wßÞö\Ô3±XCc^äÉ|Ú$z€Q‡ó;ÒÿÀœ}UÉèßðÁ¾œ°aºjömÙcÕ8ß-–ŽHÑŸ›«‡^Žk[ â„Y± 6(ôi›ÆYw (‹èi‘-C—›žPHÙ2‚˜Êåóhÿ€­íŒ[í¯R/W¦\ËÊ,Î`6qJ|5OxÏZ8›Û á¡fH×±#vòÊ`Ïeumþ>…VÊwÏAõ4™qÀ% èR6¯pD&,´Tþ ‘áåëÇ}Ok­º€;À:ü»·±¤ø×ÆøIÏgò¨.uÙ ÿŠbÊŒ-2Ò÷˜®C`þѯ gLÓç4;Ū€Dkÿ^ xo ŒPf"„èÝd·ÑÍDýø¾9¬Ë¾ý÷Ú šÇÜÓvoò´êG›À‡€ŽËn$Ñ£Z†ÔYn/íßÕžÆÞœeÚ´ûŽÐ …©Á௵VŸ9!·~æ·Ò½–ïGWºþ>BîÐEþ$Fð$@Óúܯ¥5`ˆÉÏÚŽ­ÈM¶31æ8]¡ç†apÞ(Òº—âFtÃ-¼…%… iµlö~‹Lÿl<@§Š]fsTdÎ÷…….á0ÒFXæg` Ý8ŽÜø kQßÎ~0Ø$ɪñ&;¦›ð+pDLÆ1ßÁ“þ ÊîénÄÄJ¹ºŽjTIPQEdPTýŠÛþT½K“›Þ-À¯¾‰QË Ãxã)í/ô1ì¢jÂ_†Ë³¼!YOWɧ˜ÎVð¶ò@üƒÛbÍÎò2¡¼E.à€’©‹XèZï2oºÅ±rÅ_}0}»˜¥ØM,)®N¥DˆóM©ý#p´– šfËú]Š…ÝAœîY§‰àîl®4ŽŒÄ&zºébû¾Õc@³_òÒõé`›óOh–Dñç…îžòpÅð¾Èš¥?éGyz£x/T;YpF|£2§4”>> ¸·`¸Áx$ù·àÄ¥ë×[8_ºae€ßû_-•ŒÓ¼Zg•ívìÌ_éÓMé`?!ì­ïØ­5+,!¸ÔJÜ‘k•hš \þ‹¼èØR¢†ƒÏn?•šì¶‹ à·òèÅ…×H€J4ÔMc‘‘(„Zˆ¿F%¶Ì»I'ORŸ¼ç§œŒÊË™šžß{.ò½£´Ä‚T1÷Õ‹å\ÃÚËwx”LôdWûàÚ#uD@–ˆ ¸âä9a2R;ž,X:S;Þ­@3I1ý={åŸ"M6”:—%GH“]±ÚEšò.P^­…M³ Õû¯æ1Æ+G\ÝCTNb¿µúJ ºàþ vÜ@y¶‹Î~(49l É¦MMf1$ÉFJã[”ýÍX(ƒÍm"²Ý[[ó!”Lxäø_¯6Ň{Pã¾¶j›šîìÅ¥ä£eLnkö€è¹  ÷2KHÎŽ‡ ¿wºÜ¿q0)÷_ÁnÕຆ£tL~¨Îùl·Ä Îød^B(q¼Ñ8S5÷[À{u(Y3xp[ZŽ9m Ë Ä/Íð—|³K·f=ð&`Ð5º›ÒûwŸ0Ó ÛCrë]¬Úš×Õü!…Wð0/©Wå£äEœÌxG¨µ2#ÎVÙTžÆôˆUùåþ/‘ „ï Õ¡YœZ)5Ì 9Ù<äð¶¡H!›Zé¨v>ü1&ë ÜѨ*ºw(ÜaE›?l€±ð5"{òóT§ >Ù8è¨?ÈŸå-è=±&¬ÃMe r ±Ù™¥3_FÅ«bóx#ì)7yš*Ÿ êúâ šñÙ KÔ@ÛÙÉ„»ÎÁ"ô¤Qñ ä~VÜÊ3ìë¹- Ža¼0Žz–³R@]¿Á¯éfíw „`µš¿øã¿õñõ†ÓÒ=Ò­zÇ)n¦åqVJIžæFèÚt?µ&ùf1RF¬ðŽvض­6q$26£êĆIG5È‚Çhõ=`]:!9>óÅGrï„Ùmãš¡÷c(ñOgí ÈyCLÓÅ–,C³¨É'+í75öiìÕìÛ‡¬@=ËÔëõO&ÜyØwÃM䩎xY6ô~¤¶6¨;Ä^i΢Mý½,RÛxâ%¶±ÕHaèíMÞ`à‡ÝKdS{öÈh8‡×[ôƒxL*mÁÆo—¸'ÿ$TåÙ¾ñÊC¡úÞ`9šâ½.o<>ñåÿÍÏTàê( ®ÁٖŠõ„@å™§Y‹¤¤¦u5j ÎoÝ¿%Ʊ†ôWuBÃíÉÞ ¾Xpäû¤,wW›8! œe‡âuÜ¿ÞÇrO/’¿óÇËßÝý@Ø¥Àɪ£+<ìe^ܸ¼¤ã}š½ z¯–')Çç’hZLtmc;,‹’Á\ü}µÅqÍ=5àä]_~Ù)$G€¯˜®¬ç1¾êè.¸òN*jV·àkTkB Û|Ã=¤¡ŠH½õæ¥Ø(ãhµ_Ïû¼pû¾Óƒ×/Èå§PóÍ¥;ª´<›–WzîEôZñ<¬®ù#ØèÝQ Ä¦ý í[{U»ràÂ|fó0Io ž°8ÀB­î©‚ôíìŠQ(Üï `ÿ²Ê¡UvÙÀ[Q‡î~vt6-H Ö&t¾ô­àHðÀCûLw)¤«dWêûíCèºû¼‰e+N–D9‰}ÐÙ÷FvyËCyÉë•<‰v Ü•0Ê•Š!íÅXUŠxï;8ÓŒ2É„áGi`dÚ‰ÖÏëËlƒÞŠLOÓü|±ˆzxs¶â&pÚâ¹#þ ¹ˆšü÷»àz†yWn< V sÿ¨ÕÂKÅX}5 C¹5ˆi‹J=Ô±Ñ(SD*¢¯j&€"½Û(ƒ¹kA¦#>`ô­ã4«a•úÏŸ[Ù³nôºhtψ[šKÈ£|5Ë¿]¹ï6×,„GGµ•Às [ªZ‚ÙÄ ÕUìa;—~ßdz™ÓGÜhýÒ$‰¹Íg'RÛ¢æ'iYéJD4ÃÈm<'MðésõŒéÕ~w{þN°ÝŒQ»å¢)¶‹®¯H@2ÇÍZ0­hTÑ›J²fSñ~êBŒ›óà€EÏk¥ ɺý—üìüé­^á ¡Ö˜ñƒÚFäF29¥:^šïyÕ¦i®Õ÷Áì‘Ø ¬É¹ÔmÄå/–íúÀˆõʾL¨„ÏkM\²]™LŒpÒãgÎ`ýÍ^cqðAìHÊŒŠûMï¨ ˜³áëJ„ÿaˆi¬„\-³ŽÏÉ^Xr“Á;òŒ†U-ÜÖ ’-"¿BCæë}šø-§;^«2$hX’lÖG?…›¡Ëøê-bá»IN¥ž'E¶våØ}÷7‰Â²Ý5—&p™hǼ€‚aé˜m…¨9l! ^U^!Iv€í Ž Æ,lD¨1£ëÍèj‡dc–¡[—ÈäQXÔYþUGÒ—Kιùô„ê~¡¸HMd2z|~ûhDi’ǽmq¥j½¾/½|š°ËìØ+‡±ø´$¾ˆY^Xz ˆ¡§…ïYüB^ ú!îád­sfï'_n(ÞAœt“Lzq¹d½ÊTo¡kg“y£¨¸®]Pî"Ôa(Ü3à›õnlÓ øj¡ÂJäîv}Ô,5þB¤&Ä甉ñrÍq¤ž9LºÓÖR46©»·õ¬1>ƒœ*ÈÆ±d. Q³ÓµÆ&%Ì0ÏöÀ³LL¦ ˆöH ð;«Ìçl¼ZtË7=¤`ª‡Ü¦_ê@ t[2<\æ a;"2Tüôyš^åœ^?I¡øï=@™{»]NÁ0k8¸D›`GI­vׯ¶Ñ{Ð\ù“¨¤&XÓò?MæO±M¨ÝµÕ€eTÀð±UçU¾b°mèš’Á]4¡WÓ7û‡êhÑ j Þ1QËìƒ×Z@»<Æm±(iì<“ø¶¨Ê”}¢'ëç=QÊÀÈC„fw6š+}þÊBÚÔâ¹8*sp­ViY“kx‰A…2TÑh0*üˆm¼lä~ÕSÁ{†Kï$¹D^葸MŸ³}±>õäýa‡#8rìA2 ѼêO«çBf{wöŸñç;²Ê±‡Än-:»HjÞJCv‘®ª¶Óÿ? ƒÂMRÚœ ækîp+ÎBT6Ž€ž¿ngÁ\ûÞw•=ªÝF<»ºùÀúÍäé\CÕp½á˜E‡½ž¤Ýag®è}ã²u„Ë<‰¼“ÇÙÛUs]´õÛF+¬º´û+ê¤O\Vþm§5©ö_ªUÁ08—Ð[¿b¹µÇ4Vé"x1ìh¿’C$èÓj2ð(„¥¡*.ˆœß"žßc¶Öqà½#¸ÆÁÿ$NÇ`…tŸ|Yº×ÓVjšå!é˜è{ ¼t·ßœË!?Èu>IsøÍ©¹r ª=V0±×4£îÀwƒÍÆ]ÁäÄOL׎1äìLªèÆŸe>ráÏ1ÈXöáPºÏ€‚É]DÔÑãå;¡´åõËÂRpª§žŽGÁ¡s¯'ã÷Þ9ëoëåF»„ÎDèÂ)ì5Nº"½<ä1ÆñŠnIX÷܉§¦ 9J+­½ÌØû £­PS–ƒè;«<³Ã+( /LJ :ùvÀhˆ 0Ùwº,a Mhu Û³…žðå¯XÜŸ ‘ìºYéœ×!:~@{m$ÍÅ$1öcæJä¼§îued{p(![j¬péZŸ „zÑ à;)m‹ ‰7ù03îs;¤[¹6ª ±3¹‰d…pMG9Žå‰¡Y<à=|/¬Ðˆ>659ZnSÕ#)Ш æ·Åºo™Ž»DA•ð„g_*S Ï& ¡XÞš»Ìl¹…Ï p„Øn)r`¯ÕcŠÌmêüÖž pZëC?iØBž·r¯½Î…ÛA‡¸†ö {§†ËENGC™‹ŠÔŠœÁ²íWPÂý]Œµ9 ¾™DFÊþC‘uƒØ»-8iŽ–¡Ò¼aé>ÿdÙrÁòþœaxÂÝB!IÀ{Õ=™‹ç%¢Í‹²GœdE°gB\AÍ6~ #³K4;ÍfYΩíSb•†r½Éég#Kùþ<eÈ h!ƒKç ñ`ñÞq/Ìš¡µGúº«2w–æÆ”ê6fޤ-¤&òò“gÊU@MÙ¯±uýò’E§™î Þw+ZøW*; ˜œ·ñÎ÷ݱ_‘ïÌŠ³ãÔIË ùPƒá}$[´É µ™ÌÑܰ!¥B¢¼õ6oC{ˆgW]µÒvój‘ ’§)*ÓQd9 ¥;ï„&sE—:ÆBUìª3ƯàŸ£ZüÙ/Ï˹¯÷ò‘¶ªSÿíÓª¿gÕ•ç ÚaÉdÚÚÞ#Uß•Õ<²¼†½¾ª'¦0RûÅ„ R›"€,)À”6²OLÜùG •4Юó× Vo˽m¤·þkÿK‰LªOïwA†szpêsÊÿð­Æ ž{GGŸsªqÇç±jW¡âØÚ¬¾ÇÍ0 ‹Tôy£!W>e1³k–m§ÃV· ¥8n¶X(Úÿ‹+>3Ðx@œ ~ö¯¶ƒžØ‡ÕÁÐî(:7iùŠ™˜û}¢$¿É­Ý륢#Ç/ˆ‚¿ `ì|¶¯5}ÏÓâ? ü—‹™¦–*JƒÃˆ6t§dVFoø4á‡Y0Á,‘.vÜpƒ“$Ót‘©°”3!{æòö;YÌŽúg§#3éíu¹a+Ò©f9NýK®·A…}½xwä"Á™«ªòL xXŠü³Zjª¿#à~¶ú 2õT¥ÎPb€U”—È:8“ÛßAãgë±®èKæ17i*Þp/ŽÝ. ©Ì©—·†Ù/Õ8´@°©Lí0òn–ù©«Y´Æ›¦ÚQ¦–°ª$ÑãZCFé! vþïš}Ò [whtxéŸKõ¯§§^%ïØá žÂ¦.j*ëÔl­¸Aœ¨¼ƒc­Ï‘Æ>æ`ÿsE¥EîØ1­ÊrÞà6‡-!Ð}—Î)®A®Ì³P²bÄÌeUÌGýsŒr°MKÔY³¡Ð76™Þ'a(ëÊ€NGMJK‰"Æ%©VÑù2¿ HXܤfa"]^플+W´¿¸+d³dÃÉV*ôEéz9꿤0§ÄxïUA”Ÿ4Å‹­,šÆÚý@¶:šöÚØX{Q<ø‹d°–`V}N4ÇÁg\À¶‘ÏS­î9"ÀFÃøD"Ô€o ¹ nîz)3ÒŶaɾG¹ž‡÷ï8 YÐ<-ŽG•)Dûr2$øöâK _SmD½þf"|6d‹á þ;¿pÄ`ÜÅÏa&Ä‚Û"8SLÛ·?J%@ñÌâìñCX¾×É 3í÷ä™aQé á:Oî‰s%µá ã£‹c~[•¦‰«kùo%F„/¤s–N=Y?3Ú±Œ”jœ¼&äfVLõ”•ªk¬ÿôð·>]JV.³ÔHvË6QS?²Ü§ZšëŒ˜©$úS­/1ʉ{Ù]³!¬rÆÄ«åÙ³Ñø%¿ +å»\1 *ˆ·r1_›ÕJøŒV3‘Ë âØìÝ"àÏMâÆ  ôã^@Í·J‰ õGÍëÛÈø‚ dBSg†zª¬ð×7"Zƒlõª|‰¼ŠG|¿û=:ËKSïpÏC´W¶™“xÇ e!ÿ²²ûyvDÁ¼_öÌ/—Ržkáœ.†ì_ø,xã7×.É|æC…/9|]Ÿ|c¯lŠÖK ¼MºJÖ ¨ø¡s Œ#b¸˜‘?°‡;ä@;Õù­EIbBÙq'µ'\­-@€öEO2éÒuO3ÐL¶Žà¦á­áØI×4>«pÙ,'>ÆÂáÇú3éù/+:윅æƒûúV™Ïäš,m6íÈ;„EW/i˜€á¢]l§ù¿r*VyFl¸@;wddõ¬ 2LbltØ-«z&V¦7ü¼·ÍPîc¹„ É5ŠT¥NûTDæáÁÐ? Ø•ùI»HkTÌÿŸB¬ƒ¸kH(ë›Få,¦þaá È´õ Œ)å¤Ñkû{J„ÙF0ç ©p(欷ï(Tzxl†Ð>Ò`Õs“˜\)ìE;̯c"Xð>sgÂN@èÆØë¸(ˆó~’ÈÉôó"9Œçž6÷iËï–àXªé9¿ÅÅyºfG^WÐP~ÝŒ#JLšéÌT oô¶“_%6±OÑÒ˜t©²1º»îIZ†{Aé‡ ºF›žä'ÿ…³)ÅUâììikUm’Ä©5}øƒý>=!¶˜´_ôE„\„…vë­•Rt g¡ø•)S´ñtå¼òU_Ö8kˆrSŽ+äø°/"`Ò&qÙ¦ïM–yVè;àœE_³aœù‹•±l;VákO³Ÿ¨¬â–Úyô¶tl9íY¸E¶õöbI™,q2<ÀÖêI@Y­R—_æš0ŒR½9÷‚PºÍ€4£,ù{¡¦ 4Luó:ƒc«@Vw)…Xêpñ‰3FÑD¦;g$»æ½£A™ðañõ AÔÍ|C^(¿` fDBó¬ùõ˜‰ôÇóJøö 䋆¦µáî0ꀲÈZ”2j*½ºk\žägŒ¢oïõ4pŠÈî×Y‘©›%i³v/{œÐÙË1­ù3M<NÒæ6þDJ<[T¨$ Øi¯yt7‡e±èé…<%O÷þ °Y˜¤¾`*.yÔ¾!šuõÝZˆ»[èûî_ ÒMaÑ.£@ÐåÝηYEær r›Ô?ÏÕAE@fk¨0vU3¬rHŒÉŒh±_ò?ŽÞäïÒNs¬lÖµ'›v0yw¸°ùXC5ŸèôÉàáwßðÃ>ì(fÒ3nº‘œ‡;ì¿þ°ýŒB‹³¯á5^Ì.›ë<Û||^cÙ%êÙ ð¡ªõx›][õ4.´C/yõ[é«Bä+²Ž½í@»=ê[m+VòвŒ'ç ¿a™¾®MÓŒÅ_Ô_ž0èÇ®‡Îüí»]_;b°Yz@¾7­GQïÊuuù(ùг|>÷Væ/œ¨sÌ')7}í-BùZ0¦ÄvºR)LÕ^ï €Wmôu=&×ÉN¾Û^~xž×ÍwçÅoã2‹goƒ¸ã½Z\Án°5%ÞÙ’¾(w"–<»]}‰hC¼IkŸy(Øð0mTä«$¬h:¾É@¦2UC0‚Œ¥Ýª´5ïtÌyv®ž4½ümH&àå¤|)¿Õ4o:ðƒ‡úiÑ]Éú[ÂýÑY=.–ô›ªe¾äcWµÎR×æÃµþ"0×àÚJ:½ëðþm&2vëï3PY ¿ûØùà¬ãÓÉÚ"‚c²:Õ²xCÖ²$å¹å÷Íénîˆõ„&¦+|ox©.X8ùÑüÿ7!> ‹ 6|‰§ýG¡ž"9.‰Z†ï3(å¢V%‰„3l¬ËàO^Ù <…|’œUŽÉŠ·ìÇ™ô±{ˆæHý[«©øõ(œìbÄÒïb·¿–¢)=V²ÜòÇå¾ÖT€ÁF¦/õ¶»:ÔhTÆ£k½^§Ý»þ.…Óÿ»ÂǬ¹§˜%à0®“¢å\º¼/ hÓuà…\Çx qö=cì>ݦR=»Te÷މ5k+I×òîi‚CÜÚµk(fî‹Ò~êƒé°xR‚l9d½¿yå~ˆßKˆ3ƒ0Ï­è‘Ûe?dÀpçTñ~ߎÚýœ’¼hKRÆvÚcôVpŒ>±x#‘Æ©1göCþí;žy·‘‰¶Ë¦4ß½PFšIïZÞž# Òé~Ý}löµžÉ›…™2¬;äWÑŽÖòÇ’œJðò~wãSþÐá“ÌÚ!´Þ¼Ù‡ùM^’k¬§ä»ŽÈ ‘nš‹vo[hlcHïí1$~ƒEŒmoŒj-诽n[uK1;W5GvK?¡Ð_Fq¿¯/~4¸ØmbGÂ=‡ò_‘wˆ‹ïŸ$H ùȶ”“”ç]®ÉYŒƒ\²ž_mnÎÊL&£¥ú© ˆXqû¸e¤ˆ½žÑò±ª û‡ÝýVÕ óëÐOdh]8`ÍM?éN¥«å#Æa”AVÓ‹ÃÍðFº-/Pôsú-½Wa+XÅ‹X¸oM8)Ö”=ɶS.ê_=Ae œ»C|ßÚ«°P=Ä6!wêÛEáÅ'»9Ø0Ðú>¼ãäGdÊ»^.İ6|žÙè¼;TU“ÂÆÖÕCH-:PnHû×HcçÉ6ÙÌ:&m@ï>ÎtQ}AÄC¼Ê¸Bî;hMê…5 b¾îÖ×÷mËk¸P¦-W)}3F\jŽêÀªö§ }^© úë]€VPøï½Ò ÏIeëDb^Jùº¨+{œŒ£aÿh»SLMûà·sšÅ*Æw†T°™²½/iQÌ 4R`o9Aû HcVVÕÚ,•m£žÁ9 éVLŠŽ¸2'8°9 {ó–ÁîÕõI†éæŸ mZÓª…P^B#ÉÑ% Œ†Ä*ŽŽùÔ†2©¿¼¨†føe³3+VƒE(_ø¦*7]G·ÕtÁ¼# ‚„!Ü©÷«èƒýãÝ Ö3cŠFo»ãX:=fPÜn×ø-ÅMï„Q3ϳ]çsk㈨P0›¦Ì-Ívt5ë£òÍ;˜xv†©Ô \¹2Bá˜ÇÆõÀe¶¼ýP¼nGHÁ™ÐŠ[O®iÞÏIÅGéuGóÇô›¶œW9‡ïEg²òÞ“PµšuºR9€$Z{d1—iû"ÓP‡3&Gæ lc^Ÿ1s¦`ë ·ÚtüOñÙ$Ÿ™L;ά º¬®HêÝ'§¯×]ð9Š¢Œ¦/â:iÚk3j1»LÍ{}!F/¸þÖý’ÌC¬^ši]°ûŸÑÁ ˆ2æ»E™Ó]ï€ÕC±0@ ¢ä¿†±gÛ$N1HÓ Æ´‚¡odQÙˆkfÿ«°ßÍšt'ÉÈé$Aâgˆ P£¤Ç1CMäO¤Ñ°è6ÿô¢ÈÊÆW”E¸p­Ð(aÝåLÔ•âׄ»±ë0ÈT°ªâzUêeozÐ_T[õÑÒÝ\Öì|0]!/ÔÀ”Âßf.Û.,»ÊBReË—í6o6îoÛ²ƒNæ]éÜÀa]Ή±Åÿ´¡ŽG”=ö£qÿ©êGê •:rÞ4Á)¹ÔM™… WäJãìïÐsËÓlšÉ¥²®bN©ý=ÝÜï>TA³ÞŽä1ZØêÕ6#m?8\ xs‘Õ0LŽBz L½çŒiº2 CöÆßJ›Ã›ýª1Ò.Ï®6¯ñór¹AÔñØY® h99Ëì`]¼ÃÛQìì>V} xʃ-ªÐ¦è2ÖSÃÒV¹jÍK 1t"âäN ÜIk]aâ´¯×èæ0°]Ä.Žƒ¤« p”š5¾ñžxÝ©£Kǰ‘ÜK”jªô`'‚ìíâÚ->îéL!n=l gy^ä$–¢ª#ྖÂúŒÛ¼íÎ7ëòU1&¼ý"´:T¥„=”þuáu.€ËASêÔRöVh‰…–[béI†üÙ…-û=eGL—/4®ýh°ú–¦óÒ}6í=i‹ƒ\’‹4•è! p£+þ×èɸäiÓqOù¡ùì) ´©]øL «Q %ÄJ+ÎÜBÒôáÁ}"xÕGÙ 0 žˆµ• ÏümÔ¬ag;â]3ª•]—æEž<ÇÐ7€v3ìÿæuœÒÏ £ÛöP ÏÞlw¯»ø/>þ(R1¬’d/Šœêõ£Ÿ¦ó—³ÓÍÁ-Ê*ÚMi…ÔE,_8K"/C†—ÍvïQ«‡ÆªU>èÔŒ("Æ ¢ø(|?vnº6üŠYà¥8ÅÂU,ÉLi%ˆù—rœØÝº©ÃÝå£Î¼§Zô!¿ôl©oDEO3c™,1 ƒUù 1¹Q û„\ëÕ¼sí±Â&¿?éäÊ÷pÖâ–ø²Lß™ï4y7ÄJ=ÚiÎ6ÍV¯ÈDpÔÉBÃqx8Êdõßic.PÜ+–ÿ›…`TQ‚–ˆwò¶[rj³zÄ}o mEÁEÎýëžuJ¹fúm :ª¢Î€zÚTMÜ xHI qCi‚SrvXe ÍŠSÄ?ç¬ù©dZ®õ .¤QÍšô>S't·€4Z"NbÉŒø·;b‰T,z‚M >³ïsöeývÒ¾¬z1i&Ž Áöì "ÑîFZHÔÕHVc Jøg$fÆú~j:þPÖk|1ÍoHˆ-³Â:@Ÿ—ù÷)Ø#¼bÖûîðLígñQ z=™ÄXÄ1!œæ„¬ÎèWDOésé'Ri%89¼© YªËmO “ð-M“q èŒÆf_á˜aÄg×ÂÆÖ–\¿Múð–/‡Úñˆ?˜§ò‚Pù™0L!«6§x°b0ñ{uSWE ø‚ӃꎬҲ*U:c5àšÙÐѧTìÊà 6Ä.þ¿ƒZ€9d¦`¯9ž“Öó¡?`†ª«ð(LŒµ‡b\ï‘Ïhÿv“‹ÏàBø>4i…IeRzøúú@ÖüÓØ–“Q/u[Úh™ÁÊOŸ.ôXƒ>gAFLL™– |,ô%¥‘÷ÐÑPY¿þMZÈ©äB9Æ‚ÞCróÉ……² G_œ sÆð.*@WEG/‹*ÍÚê‚þ®äKpã龿¦ïgƽ ’ä«Ý*bŽ>ûR6¤¼ö£!v‰‘R}¼øÞ;…{)v>î†Y«ÖLlñQY—¨¾­gE'z5®mL¾ÑóÌÂTL”St9µ†pÁGOÏ7DÊuÅ¢jùÊÀIy\ÜéÇÒ ¿S(­ã+L¢gG¬ï[/‘‚[ÑØŸæÀß¶™×àÈYìtˆ¼m?½Ù°JLXáÈíäÕù}Nö™÷(÷¶ï`©A‘ Æž?$þ«"tóƱ ÅcŠ’›½Îï€R¦á;¦–ù‡ÚYÁÊI;aEO«á«¿šXÅáù|õ%t?ðä_›Y*Ÿw&z'Ÿ 'EËYé#0­«^&¹çél 5™]¢’«dcíŠÈSf*˜p-U·ù€Üè¦úÅý²k\™ mûTF‘À ¤–õ(˜a¥M—S[´{±)ÈNnñµÌ¥à½¦O¨~ú^¿4töT„Òõ5'bR4ªnoV®ƒk}3R¡X—h§½E'ƒ÷GÔk[ê8ËT$Qð¾õ=3¾²]¨æÒá¥Ú¥Â9ÂEkt{¿çGªN:¯`¢ñ@ÒsÞ=Ll0 ƒïœj! D`^›°Œ#P…ÜSoÊÑÛØP ]ûÿçúÆ–ôêNz`),(²pú'­ZGR~)¸Æ#ê­‰?RA:ãˆÆÆE^ZÂþ&¨DºiFÃoª­PBÇ’àÌAe¨|]bùP÷ðwÞì4¬¢?É<EƯÿbáÊvV ~/Û?W‚%ƒY²Š%2ÒLšÄþy§q±˜’N©> ˜Îá©üß\}W¶9Å%$¤ô˜%E—ZË;ŸðÛEPY¡/<ùD<uGàÂ@'Æu3:Æí&8ÏǘSäGDpÀµ!nH<ØD³×TÄ€tT ® TD!HµÔÅ Æé"Ëo¶Ì¨¢CïfñbɤÓi®U\¶ëxS¦0ey¢M[ñäKë–¥@ëâáí[¼ .O çù5»£:"ši@jÖL™eÙ«±H_0|…?ØÉïÁ–ù _%½J$_ 3zXÒžvœ‹Á³ÊïóbùÿWâ§; Àò^4º¡K—%'•À,¼R´õ¦Üd¹bÛÖëö“¼"vÜ ,-!)ŠŒöõdƒmèÌ ¿îœmU¨«râHÕÄÛ†ïšÛ£û÷‰Í#¸/WLãˆWpZ@p\9…EÇ•Ÿ•ŠQÝV'¨C›H&½Ìß(3k.Ø4âÖ»âùàüö³¡CŽê%™ÍÖë/lí2Ë’I?» ñkDKsPôiËÛæ)‘ Â'Ð@:û{ØAŸÃ”€o±ÇµÈçÿ"rãR†(ÑÿždøWÙM$ç2ß‘ÿóïí¨ñ°^SçžD˜&5ÌŽžŸŸýÎÙâôN xá–Ù'‚»œ\ûcæjßÔ@J`ë±…Å6ç+~•É£õ_ÊEvœ-«—ºGb¾B*xð¼Ôç´wé.ëõ2lÔœB³iô~’È£ÒôT; — ‹6+µŸåY«ðßii$¿Ku•­¬/¡ÄÓ¿Xíp²”©áÅS‰éµ>`7Á{.×ûøˆM|3Û9UHO›é ,iä.¿‰1 +ýu_Ù‰Êè+fhõ ù ¥p²4륬֔ @Ñëã#yÊÊÊÔ…y…'ÉBÔ¢áô°Rû8Κ£y•1$“¿¦Oˆà¹R5CI¶B?ÌÒÈžøE(ãá¸)üa KãcVº¥iû+ë²neQ>4&ï8Áª€ïfIp(Œ¬Bk5tÚ¹·àæˆJ4ä_Ds3+àl8±Š—¡‹é?âg]‡Ûí‘L ¦ó’¡’jÓp=ë0é„H–QÃjy«ÚéƒP·óía\­ï(¯²ê^¬L[rÜfÄÚTñhòÖYÔWi(£ÇÍÛÅ6‚A«´(ÛŽ½ŒÎ†Wxo:d‘ô·¦‘–²ïq+­bø˜Qb´S[ ½¸œïŒƒ~}øh«¾…*šyɱqâu8ƒÁÀ¾ãF}²^4žýË*QVf®Hr~Ð$€5ÿÝÉ‚FÞΩîXª—Sì–²ï#s%\ú¾É Óå<¨»*NY(R)Q´'#åòîœü§Ù{)Xic(¥‰â xáçzíñ6‰Ð¿ýL„á´]ŽñJâ^Ò^³ü†jîÅšŽ5Š”ÝtM¼Nùw&mÍ’j±?[Åhö+¶ftšULj±†P6M/ìÆ£ÆA®KºÁiáÜÕ8+àž&¸Dåg‚`Ž… uÎ3ÀÞ_ƨ1F—R$*Ô1ž¤1òù5¤Ž8É ª^ÑöuÍ`Dôú¢ô“7ò]¬Ö£St®¤`óF'*RçÉɉ¥‹Ð|Š,óE×£žþÀdŸöªX¬ìÐy:Vf—Þ™¶ÿŒ#:ðƒ­tH8Ä2ï\áŒÕš*žìÓ/\4¢ ªú=ß-òW®ð¶®¹’¤ç†hýzA–ÙÄ‹@ÿæQ`ž;#÷ëýiÖ÷þ{w†Çþö2ÀÄðð­™(¡ù4QŠƒéåæS‘ÓvtdA{gˆYIû"í=áñôbý+ú(IwZЧjƳy­!•hæþ]©±E‘u‘îä£ÿ lš²–¨«@Ð}ÐÙ€fèIU8܃ÁãÓèç£%W½‡ð#Îs™ùÐÞt rv› ¤Äiær(hÜ/Y©#Q}"]”Š&Íʧ“EZ6½GpèĉЍÏõðUíÅ]?æDâ6ËŠ¯:t¥]‡GÐ^±áñKÅLbÍ!¨¼êÚåK½² ~N†4ë‡÷ĸ‡ž$Lü¬šVРɶ­ZrM„®ÏrZKË’6êáJøk(»PJÅÂ,Ç?ÛÛW×.Ä{éQB±,À€¾ÞPÃ'M]¶çÓ,Ÿ¯j¾ºÖ‘ÛØã|b±õ:_˜û=yýl„Ïla ÑoMŸ6Î&\½Z4¡0x¤”¥á/™ #BSôãÒúŒ°´vJ6’bDã“¡˜Àt±Q/äg¡‰-Û¢NõБ÷€ÕuM0 .¶¾ã~Œ¿@ÞGŒ:Ä9o KÓ)ô´wÿ²!¦¼ñ*Áç‚Rb^Ö¦.‹‡fÁ0`ÔfþÆš iÀŽö™Ö"Âa½ƒêÇó$?ÍÀ?ùÌës€dê!²…·CNùx$­7W©Ò(Ö«‹;¿æñČĨØ}²tK/˜.áIb…ªËg’ºƒ*¯,Äo®šz@Ò×ðØÚfYè=wÐg4ÃfUÚÙ`²î\s¾×Ÿç+à•(1‘YÓ:0¿&ò§‘'àÛ'Þ|$•6F.h‹ï p|î‘LJ(ÃM wp`›Á®Õ{óPNuš]Q¾ZŒjã”äjýËF;‘íbë´‹j 'ø”¾÷ÃØ¢o_Ží¿ xHKÏJ‰Å£þuÑ$½'˜–Ÿõ ‡ìÚâçÞÁcdX°gœ…Æ:³l†Fz{•+¦5ŠÇöLEÁ\€Ai ÁDeq¨åžZÔ.¿×æ¥{«Ð5V*ûQkdô|ºG:Âû‰Õ·Ý(¦a#Ÿ ÄËõh3?cÀEá®ß e 〇ϸ¹Æ/Ú¦Ó(i^%0cҕΤ»?…„ ßD€dIäW…O›½3“¹ü2Š–y‚ýs™ê¶ƒêÅb‹ t]Í¡V„¢…ùYù›ùvíhýÈõßÄEaAæÄþ¸ÚQ8¸K*„xsaquI¥#æª÷è§ñÏ?Ï8c '€hQ­^#qºéµ=¸È@eôÐó6‚sÕòl®¨G'Ë"Ç ìÿN¹„¥—‡¸’ý9ÉK LA »šÑS(æAÖ"9@½Þó¥Ê‹ÕòÒ€ùâÏÄââ}©2/á_oIÀ¬y–ý7Ë—öj‰cpúÔ]Ié^˜w•ÊÔü8&jÑìqË$È‘ÏëƒûJ¡Ó¯ÿ,{øiœ¦É½qR(“ÝLì½Ñš¸RÅQ!´Ž‰¹QçQ°ô»§¹—U? #6W”ñŒÝ|¨¬ÛÈman”1窀nRÐeŒÑ"c¶BņnÝ^ÇR…eãá(ò ºf?@[ÞÀÒ³RÑQ<õ#ÞÉËú&~‡zŘˠÍTÅþºAÏâÕdÞ”¸)jXZ¡Ç ê‚ `×1v˘Ça0wãõoЍô,ø„l”1šÊô² „jè)Íô,ó¼YQv°)šRw®˜üŠ‹õÒRÓÔ¶iŒýeØ(š]ÞËÒrúìè:{*H.'I¯ØêÄÌYèEò-9wM4½öqµØ^ˆh=vÎÿá­`,e$‘Û p<9ö¡eSø2~M.©¡Ó#ú›k²¼“‚Ñ K" „´sS‘9„ìñ@KH)Âîý˜-é«¡‘£¶Äïϼ²z»L¯¿Êù ¸ªñ2øÕ¯€1’# `»3À®©´Z€zÂÒ0óÑþ×Ü [µàÝàÛŒ{õ|¥Eý.†\”…¸$Ï0ؽ@ÌîâªnT\&ûq [í¨»[”j"]ߥիù¯ÿ$05€ÿÝŸõ8ÚàÞ`ª–2ïû‡QÊâe— ÿÂMJȳdÖÈÍÎj2ò4m­ˆ–')-£¯ªáµ1ˆ®wìU%háøĪïêøÉR[Rë¶aâ/c-·¿ö·Ú403vAÃÀ¸Cî9!U’Îgœ:Ä›à†ï¼8Ή–íÏs"ë} ® r×g˜\û¢Ÿ1Õ¬%’àN̹ z†î’T䨯¯øÐ~®!‘’«%,èÍGY.4w|4™Ÿå^‘¥¯=à÷¯§Ψî‘fñÖωSRx%ô¤«C8˜S¦Wh´RŠ“Kwã$wA/åâ'òþ£­S¾ybÏäÚ¡!z*UÔd…vŠÕòñÈfCu,»>%”·¤ˆ¥»!Ì,ZºÃD’]bÀ[ Boj+]^nËs.WýlädRª“µf"”Yâ@uQÕçýÁÅuIp³ynüŒ¬N=hxQPÝ¢†ÛíF¯ÿd ±£KÃ}C £m¹ÀÏuP'Êò_·’©pÂÔshÿMÎbßמŒYÁþƒ9è6`6èMNÖ .£‚ìaت¸6‹¸! ÊP/1óÑV~“›@eFS“$ j}’GCà™úØë›Íô²@·’¿[Ûžé¸å¯8Æ4|’B –`«Ñ¤Ã%Oµ¨ãé˜×øyë¸Iypy•qú\[u`Φ->¶âÁÇ пҎK¢º}KUÚ‡~L#b¬ñ_ ñ2VLm½s•çØ_ÐEcîðØ,¨~‹LKÍÿíÈõÀ$õÉÞ# sØRÀÁAÜ­öw'Ö2=ŒL†)ª•F¡ÎÂ,óçì¹p쎲NxÐ\s¯ßŽŠf¦tî2‰ÒoÊÈÁÒÇf‹Ô6ФUå  ÿÜéF£VˆÅž>ݹ[E^úÂ亲;Ï)nO‰­9nÝ>¶”7Ó„¿¨Õò%´‡cã±ö…§þ$1\Uœ9ë2Ú>ç¿ËÐzG‰è¡9ä°gÚ¾ó›y¦ŽÜ6Éžfçr&T×Êœ[|‘§bó!»®ž”¤Ñ̹úåIÉBôWÈi³÷L=f§f«•»2_+ø¥@í‚Bòz=‡»:µß Ö$ãeu‚ï-¡ Wy¨‹…ó¹àcÀB>à2…›·)ûí®7Ùg<β:‚’®_Þõkš§ãG?íÅ'h\/¢õƒ®XæÛ޹ޛãG¢y™cŒT$ütr'L‘NÁ£¡š¶º‚‰gùÆóà‰„ˆŠ!Bñ NñçPHŸ4—þêjÍJ?þ’Hk„@\s oeþ™l…ÁÁùQØCûÔ—âÏ:Ûl­Èe`!Pá^ƒ`LYŽŒm`h(Ç®—Xšöx¯)¹i&“1¨r“ÜÝæmx€Á ñ"xfÇ\ÊdkÁ±j<ÿ6($$)§KŒƒ®#ƒuÀµó)–Î/,­Á ö!¢‹ÿØŒé¬w~×7DÆ_ßñ5Å+“óPº“¢Zpa ýëäžÿºfòÍѼÇØd(ÇÑ  eÓÆ4âš²-¯A™6&Ñû”×h¾ž<Ä÷};ŠóˆOœsã¿FîY«í6¶8O ¢±ð§ yL¾y³›¶ëÌÚr@ðÍÇM™KR.ŠK”®FNåf-á‹*ÇN¯ùCB†›?ûÄEÓɡҽóìɺ¡>»æöÞÎ=•J¨%iùÊV’‡ƒúî´´>2ì9‡J`䙤°.Õ ò\Ÿë7 ?ÉÕêwXÛE"á÷!‚“)+z[VH‹˜-éEf€üã2+4±µpÅØyçž´•@ü $ƒßƒfE7 (( OXlOxNd(zfÄFZ0|­½ l`ëÃ¡Æøñ( å$ðcõÙ \tC '‰æ¿ˆb&(6ÚäÕˆø^åtVoóŒýöt;åÙEìxî2~* ®Q÷eƒ´±SýñŸžpÅ‚®Z¯gW ¾ ^x*ÍÀ¼dÓÜ1ö9=?£ì ©W< "À³b˜á ÔßHm›G›ãQ@YZkernlab/src/0000755000175100001440000000000014656670132012507 5ustar hornikuserskernlab/src/inductionsort.cpp0000644000175100001440000000264612234152620016112 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the MSufSort suffix sorting algorithm (Version 2.2). * * The Initial Developer of the Original Code is * Michael A. Maniscalco * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Michael A. Maniscalco * * ***** END LICENSE BLOCK ***** */ #include "inductionsort.h" InductionSortObject::InductionSortObject(unsigned int inductionPosition, unsigned int inductionValue, unsigned int suffixIndex) { // sort value is 64 bits long. // bits are ... // 63 - 60: induction position (0 - 15) // 59 - 29: induction value at induction position (0 - (2^30 -1)) // 28 - 0: suffix index for the suffix sorted by induction (0 - (2^30) - 1) m_sortValue[0] = inductionPosition << 28; m_sortValue[0] |= ((inductionValue & 0x3fffffff) >> 2); m_sortValue[1] = (inductionValue << 30); m_sortValue[1] |= suffixIndex; } kernlab/src/expdecayweight.cpp0000644000175100001440000000557312234152620016222 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ExpDecayWeight.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 #ifndef EXPDECAYWEIGHT_CPP #define EXPDECAYWEIGHT_CPP #include #include #include "expdecayweight.h" using namespace std; /** * Exponential Decay weight function. * W(y,t) := (lambda^{-gamma} - lambda^{-tau}) / (lambda - 1) * * \param floor_len - (IN) Length of floor interval of matched substring. * (cf. gamma in VisSmo02). * \param x_len - (IN) Length of the matched substring. * (cf. tau in visSmo02). * \param weight - (OUT) The weight value. * */ ErrorCode ExpDecayWeight::ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight) // ErrorCode // ExpDecayWeight::ComputeWeight(const Real &floor_len, const Real &x_len, Real &weight) { //' Input validation assert(x_len >= floor_len); //' x_len == floor_len when the substring found ends on an interval. if(floor_len == x_len) { //' substring ended on an interval, so, get the val from val[] weight = 0.0; } else { //weight = (pow(-(floor_len-1), lambda) - pow(-x_len, lambda)) / (1-lambda); //weight = (pow(lambda,((Real)floor_len)) - pow(lambda, (Real)x_len+1)) / (1-lambda); // double a=floor_len*-1.0; // double b=x_len*-1.0; // weight = (pow(lambda,a) - pow(lambda, b)) / (lambda-1); weight = (pow(lambda,Real(-1.0*floor_len)) - pow(lambda, Real(-1.0*x_len))) / (lambda-1); } // std::cout << "floor_len : " << floor_len // << " x_len : " << x_len // << " pow1 : " << pow(lambda,-((Real)floor_len)) // << " pow2 : " << pow(lambda,-(Real)x_len) // << " weight : " << weight << std::endl; return NOERROR; } #endif kernlab/src/kspectrumweight.cpp0000644000175100001440000000652312234152620016431 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/KSpectrumWeight.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 #ifndef KSPECTRUMWEIGHT_CPP #define KSPECTRUMWEIGHT_CPP #include "kspectrumweight.h" #include /** * K-spectrum weight function. Compute number of common (exactly) k character substring. * * \param floor_len - (IN) Length of floor interval of matched substring. (cf. gamma in VisSmo02). * \param x_len - (IN) Length of the matched substring. (cf. tau in VisSmo02). * \param weight - (OUT) The weight value. * */ ErrorCode KSpectrumWeight::ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight) { //' Input validation assert(x_len >= floor_len); //' x_len == floor_len when the substring found ends on an interval. weight = 0.0; if(floor_len < k && x_len >= k) weight = 1.0; // std::cout << "floor_len : " << floor_len // << " x_len : " << x_len // << " weight : " << weight << std::endl; return NOERROR; } #endif //' Question: Why return only 0 or 1? //' Answer : In k-spectrum method, any length of matched substring other than k //' does not play a significant role in the string kernel. So, returning 1 //' means that the substring weight equals to # of suffix in the current interval. //' When 0 is returned, it means that substring weight equals to the floor //' interval entry in val[]. (See the definition of substring weight in //' StringKernel.cpp) //' Question: Why is the following a correct implementation of k-spectrum ? //' Answer : [Val precomputation phase] Every Interval with lcp < k has val := 0. //' For intervals with (lcp==k) or (lcp>k but floor_lcp= k but floor interval //' has val := 0 (floor_lcp < k). Hence, returning weight:=1 will make substring //' weight equals to the size of the immediate ceil interval (# of substring in common). kernlab/src/iweightfactory.h0000644000175100001440000000323312234152620015674 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/I_WeightFactory.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef I_WEIGHTFACTORY_H #define I_WEIGHTFACTORY_H #include "datatype.h" #include "errorcode.h" /// Weight Factory interface for string kernel class I_WeightFactory { public: /// Constructor I_WeightFactory(){} /// Destructor virtual ~I_WeightFactory(){} /// Compute edge weight between floor interval and the end of matched substring. virtual ErrorCode ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight) = 0; }; #endif kernlab/src/msufsort.cpp0000644000175100001440000002410012774377717015106 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the MSufSort suffix sorting algorithm (Version 2.2). * * The Initial Developer of the Original Code is * Michael A. Maniscalco * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Michael A. Maniscalco * * ***** END LICENSE BLOCK ***** */ #include "msufsort.h" #include #include #include #include //============================================================================= // MSufSort. //============================================================================= SYMBOL_TYPE MSufSort::m_reverseAltSortOrder[256]; // chteo: Changed the member initialisation order to get rid of compilation warning [181006] // MSufSort::MSufSort():m_ISA(0), m_chainHeadStack(8192, 0x20000, true), m_suffixesSortedByInduction(120000, 1000000, true), // m_chainMatchLengthStack(8192, 0x10000, true), m_chainCountStack(8192, 0x10000, true) MSufSort::MSufSort():m_chainMatchLengthStack(8192, 0x10000, true), m_chainCountStack(8192, 0x10000, true), m_chainHeadStack(8192, 0x20000, true), m_ISA(0), m_suffixesSortedByInduction(120000, 1000000, true) { // constructor. unsigned char array[10] = {'a', 'e', 'i', 'o', 'u', 'A', 'E', 'I', 'O', 'U'}; int n = 0; for (; n < 10; n++) { m_forwardAltSortOrder[array[n]] = n; m_reverseAltSortOrder[n] = array[n]; } for (int i = 0; i < 256; i++) { bool unresolved = true; for (int j = 0; j < 10; j++) if (array[j] == i) unresolved = false; if (unresolved) { m_forwardAltSortOrder[i] = n; m_reverseAltSortOrder[n++] = i; } } } MSufSort::~MSufSort() { // destructor. // delete the inverse suffix array if allocated. if (m_ISA) delete [] m_ISA; m_ISA = 0; } void MSufSort::ReverseAltSortOrder(SYMBOL_TYPE * data, unsigned int nBytes) { #ifndef SORT_16_BIT_SYMBOLS for (unsigned int i = 0; i < nBytes; i++) data[i] = m_reverseAltSortOrder[data[i]]; #endif } unsigned int MSufSort::GetElapsedSortTime() { return m_sortTime; } unsigned int MSufSort::GetMemoryUsage() { /* unsigned int ret = 5 * m_sourceLength; ret += (m_chainStack.m_stackSize * 4); ret += (m_suffixesSortedByInduction.m_stackSize * 8); ret += sizeof(*this); */ return 0; } unsigned int MSufSort::Sort(SYMBOL_TYPE * source, unsigned int sourceLength) { ///tch: //printf("\nIn MSufSort::Sort()\n"); // set the member variables to the source string and its length. m_source = source; m_sourceLength = sourceLength; m_sourceLengthMinusOne = sourceLength - 1; Initialize(); unsigned int start = clock(); InitialSort(); while (m_chainHeadStack.Count()) ProcessNextChain(); while (m_currentSuffixChainId <= 0xffff) ProcessSuffixesSortedByEnhancedInduction(m_currentSuffixChainId++); unsigned int finish = clock(); m_sortTime = finish - start; ///tch: //printf("\nFinished MSufSort::Sort()\nPress any key to continue...\n"); //printf("%s\n",m_source); //system("pause"); //getchar(); // printf(" %c", 13); return ISA(0); } void MSufSort::Initialize() { // Initializes this object just before sorting begins. if (m_ISA) delete [] m_ISA; m_ISA = new unsigned int[m_sourceLength + 1]; memset(m_ISA, 0, sizeof(unsigned int) * (m_sourceLength + 1)); m_nextSortedSuffixValue = 0; m_numSortedSuffixes = 0; m_suffixMatchLength = 0; m_currentSuffixChainId = 0; m_tandemRepeatDepth = 0; m_firstSortedTandemRepeat = END_OF_CHAIN; m_hasTandemRepeatSortedByInduction = false; m_hasEvenLengthTandemRepeats = false; m_firstUnsortedTandemRepeat = END_OF_CHAIN; for (unsigned int i = 0; i < 0x10000; i++) m_startOfSuffixChain[i] = m_endOfSuffixChain[i] = m_firstSuffixByEnhancedInductionSort[i] = END_OF_CHAIN; for (unsigned int i = 0; i < 0x10000; i++) m_firstSortedPosition[i] = 0; m_numNewChains = 0; #ifdef SHOW_PROGRESS m_progressUpdateIncrement = (unsigned int)(m_sourceLength / 100); m_nextProgressUpdate = 1; #endif } void MSufSort::InitialSort() { // This is the first sorting pass which makes the initial suffix // chains from the given source string. Pushes these chains onto // the stack for further sorting. #ifndef SORT_16_BIT_SYMBOLS #ifdef USE_ALT_SORT_ORDER for (unsigned int suffixIndex = 0; suffixIndex < m_sourceLength; suffixIndex++) m_source[suffixIndex] = m_forwardAltSortOrder[m_source[suffixIndex]]; #endif #endif #ifdef USE_ENHANCED_INDUCTION_SORTING m_ISA[m_sourceLength - 1] = m_ISA[m_sourceLength - 2] = SORTED_BY_ENHANCED_INDUCTION; m_firstSortedPosition[Value16(m_sourceLength - 1)]++; m_firstSortedPosition[Value16(m_sourceLength - 2)]++; for (int suffixIndex = m_sourceLength - 3; suffixIndex >= 0; suffixIndex--) { unsigned short symbol = Value16(suffixIndex); m_firstSortedPosition[symbol]++; #ifdef SORT_16_BIT_SYMBOLS unsigned short valA = ENDIAN_SWAP_16(m_source[suffixIndex]); unsigned short valB = ENDIAN_SWAP_16(m_source[suffixIndex + 1]); if ((suffixIndex == m_sourceLengthMinusOne) || (valA > valB)) m_ISA[suffixIndex] = SORTED_BY_ENHANCED_INDUCTION; else AddToSuffixChain(suffixIndex, symbol); #else bool useEIS = false; if ((m_source[suffixIndex] > m_source[suffixIndex + 1]) || ((m_source[suffixIndex] < m_source[suffixIndex + 1]) && (m_source[suffixIndex] > m_source[suffixIndex + 2]))) useEIS = true; if (!useEIS) { if (m_endOfSuffixChain[symbol] == END_OF_CHAIN) { m_endOfSuffixChain[symbol] = m_startOfSuffixChain[symbol] = suffixIndex; m_newChainIds[m_numNewChains++] = ENDIAN_SWAP_16(symbol); } else { m_ISA[suffixIndex] = m_startOfSuffixChain[symbol]; m_startOfSuffixChain[symbol] = suffixIndex; } } else m_ISA[suffixIndex] = SORTED_BY_ENHANCED_INDUCTION; #endif } #else for (unsigned int suffixIndex = 0; suffixIndex < m_sourceLength; suffixIndex++) { unsigned short symbol = Value16(suffixIndex); AddToSuffixChain(suffixIndex, symbol); } #endif #ifdef USE_ENHANCED_INDUCTION_SORTING unsigned int n = 1; for (unsigned int i = 0; i < 0x10000; i++) { unsigned short p = ENDIAN_SWAP_16(i); unsigned int temp = m_firstSortedPosition[p]; if (temp) { m_firstSortedPosition[p] = n; n += temp; } } #endif MarkSuffixAsSorted(m_sourceLength, m_nextSortedSuffixValue); PushNewChainsOntoStack(true); } void MSufSort::ResolveTandemRepeatsNotSortedWithInduction() { unsigned int tandemRepeatLength = m_suffixMatchLength - 1; unsigned int startOfFinalList = END_OF_CHAIN; while (m_firstSortedTandemRepeat != END_OF_CHAIN) { unsigned int stopLoopAtIndex = startOfFinalList; m_ISA[m_lastSortedTandemRepeat] = startOfFinalList; startOfFinalList = m_firstSortedTandemRepeat; unsigned int suffixIndex = m_firstSortedTandemRepeat; m_firstSortedTandemRepeat = END_OF_CHAIN; while (suffixIndex != stopLoopAtIndex) { if ((suffixIndex >= tandemRepeatLength) && (m_ISA[suffixIndex - tandemRepeatLength] == suffixIndex)) { if (m_firstSortedTandemRepeat == END_OF_CHAIN) m_firstSortedTandemRepeat = m_lastSortedTandemRepeat = (suffixIndex - tandemRepeatLength); else m_lastSortedTandemRepeat = (m_ISA[m_lastSortedTandemRepeat] = (suffixIndex - tandemRepeatLength)); } suffixIndex = m_ISA[suffixIndex]; } } m_tandemRepeatDepth--; if (!m_tandemRepeatDepth) { while (startOfFinalList != END_OF_CHAIN) { unsigned int next = m_ISA[startOfFinalList]; MarkSuffixAsSorted(startOfFinalList, m_nextSortedSuffixValue); startOfFinalList = next; } } else { m_firstSortedTandemRepeat = startOfFinalList; } } unsigned int MSufSort::ISA(unsigned int index) { return (m_ISA[index] & 0x3fffffff); } int MSufSort::CompareStrings(SYMBOL_TYPE * stringA, SYMBOL_TYPE * stringB, int len) { #ifdef SORT_16_BIT_SYMBOLS while (len) { unsigned short valA = ENDIAN_SWAP_16(stringA[0]); unsigned short valB = ENDIAN_SWAP_16(stringB[0]); if (valA > valB) return 1; if (valA < valB) return -1; stringA++; stringB++; len--; } #else while (len) { if (stringA[0] > stringB[0]) return 1; if (stringA[0] < stringB[0]) return -1; stringA++; stringB++; len--; } #endif return 0; } bool MSufSort::VerifySort() { //printf("\n\nVerifying sort\n\n"); bool error = false; int progressMax = m_sourceLength; int progressValue = 0; int progressUpdateStep = progressMax / 100; int nextProgressUpdate = 1; unsigned int * suffixArray = new unsigned int[m_sourceLength]; for (unsigned int i = 0; ((!error) && (i < m_sourceLength)); i++) { if (!(m_ISA[i] & 0x80000000)) error = true; unsigned int n = (m_ISA[i] & 0x3fffffff) - 1; suffixArray[n] = i; } // all ok so far. // now compare the suffixes in lexicographically sorted order to confirm the sort was good. for (unsigned int suffixIndex = 0; ((!error) && (suffixIndex < (m_sourceLength - 1))); suffixIndex++) { if (++progressValue == nextProgressUpdate) { nextProgressUpdate += progressUpdateStep; //printf("Verify sort: %.2f%% complete%c", ((double)progressValue / progressMax) * 100, 13); } SYMBOL_TYPE * ptrA = &m_source[suffixArray[suffixIndex]]; SYMBOL_TYPE * ptrB = &m_source[suffixArray[suffixIndex + 1]]; int maxLen = (ptrA < ptrB) ? m_sourceLength - (ptrB - m_source) : m_sourceLength - (ptrA - m_source); int c = CompareStrings(ptrA, ptrB, maxLen); if (c > 0) error = true; else if ((c == 0) && (ptrB > ptrA)) error = true; } //printf(" %c", 13); delete [] suffixArray; return !error; } kernlab/src/lcp.h0000644000175100001440000000452512234152620013427 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/LCP.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 11 Oct 2006 #ifndef LCP_H #define LCP_H #include "datatype.h" #include "errorcode.h" #include #include #include #include #include /** * LCP array class */ class LCP { private: /// Compacted array /* std::vector _p_array; */ /* std::vector _idx_array; */ /* std::vector _val_array; */ Byte1 *_p_array; UInt32 *_idx_array; UInt32 *_val_array; UInt32 _size; bool _is_compact; UInt32 *_beg; UInt32 *_end; UInt32 *_cache; /* typedef std::vector::const_iterator const_itr; */ /* const_itr _beg; */ /* const_itr _end; */ /* const_itr _cache; */ UInt32 _dist; public: /// Original array - 4bytes //std::vector array; UInt32 *array; /// Constructors LCP(const UInt32 &size); /// Destructors virtual ~LCP(); /// Methods /// Compact 4n bytes array into (1n+8p) bytes arrays ErrorCode compact(void); /// Retrieve lcp array value // ErrorCode lcp(const UInt32 &idx, UInt32 &value); UInt32 operator[] (const UInt32& idx); friend std::ostream& operator << (std::ostream& os, LCP& lcp); }; #endif kernlab/src/dtrpcg.c0000644000175100001440000001532714221631100014122 0ustar hornikusers#include #include #include #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include extern void *xmalloc(size_t); /* LEVEL 1 BLAS */ /* extern int daxpy_(int *, double *, double *, int *, double *, int *); */ /* extern double ddot_(int *, double *, int *, double *, int *); */ /* extern double dnrm2_(int *, double *, int *); */ /* extern int dscal_(int *, double *, double *, int *); */ /* LEVEL 2 BLAS */ /* extern int dtrsv_(char *, char *, char *, int *, double *, int *, double *, int *); */ /* extern int dsymv_(char *, int *, double *, double *, int *, double *, int *, double *, double *, int *); */ /* MINPACK 2 */ extern void dtrqsol(int, double *, double *, double , double *); void dtrpcg(int n, double *A, double *g, double delta, double *L, double tol, double stol, double *w, int *iters, int *info) { /* c ********* c c Subroutine dtrpcg c c Given a dense symmetric positive semidefinite matrix A, this c subroutine uses a preconditioned conjugate gradient method to find c an approximate minimizer of the trust region subproblem c c min { q(s) : || L'*s || <= delta }. c c where q is the quadratic c c q(s) = 0.5*s'*A*s + g'*s, c c This subroutine generates the conjugate gradient iterates for c the equivalent problem c c min { Q(w) : || w || <= delta }. c c where Q is the quadratic defined by c c Q(w) = q(s), w = L'*s. c c Termination occurs if the conjugate gradient iterates leave c the trust region, a negative curvature direction is generated, c or one of the following two convergence tests is satisfied. c c Convergence in the original variables: c c || grad q(s) || <= tol c c Convergence in the scaled variables: c c || grad Q(w) || <= stol c c Note that if w = L'*s, then L*grad Q(w) = grad q(s). c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c A is a double precision array of dimension n*n. c On entry A specifies the matrix A. c On exit A is unchanged. c c g is a double precision array of dimension n. c On entry g must contain the vector g. c On exit g is unchanged. c c delta is a double precision variable. c On entry delta is the trust region size. c On exit delta is unchanged. c c L is a double precision array of dimension n*n. c On entry L need not to be specified. c On exit the lower triangular part of L contains the matrix L. c c tol is a double precision variable. c On entry tol specifies the convergence test c in the un-scaled variables. c On exit tol is unchanged c c stol is a double precision variable. c On entry stol specifies the convergence test c in the scaled variables. c On exit stol is unchanged c c w is a double precision array of dimension n. c On entry w need not be specified. c On exit w contains the final conjugate gradient iterate. c c iters is an integer variable. c On entry iters need not be specified. c On exit iters is set to the number of conjugate c gradient iterations. c c info is an integer variable. c On entry info need not be specified. c On exit info is set as follows: c c info = 1 Convergence in the original variables. c || grad q(s) || <= tol c c info = 2 Convergence in the scaled variables. c || grad Q(w) || <= stol c c info = 3 Negative curvature direction generated. c In this case || w || = delta and a direction c c of negative curvature w can be recovered by c solving L'*w = p. c c info = 4 Conjugate gradient iterates exit the c trust region. In this case || w || = delta. c c info = 5 Failure to converge within itermax(n) iterations. c c ********** */ int i, inc = 1; double one = 1, zero = 0, alpha, malpha, beta, ptq, rho; double *p, *q, *t, *r, *z, sigma, rtr, rnorm, rnorm0, tnorm; p = (double *) xmalloc(sizeof(double)*n); q = (double *) xmalloc(sizeof(double)*n); t = (double *) xmalloc(sizeof(double)*n); r = (double *) xmalloc(sizeof(double)*n); z = (double *) xmalloc(sizeof(double)*n); /* Initialize the iterate w and the residual r. Initialize the residual t of grad q to -g. Initialize the residual r of grad Q by solving L*r = -g. Note that t = L*r. */ for (i=0;i 0) alpha = rho/ptq; else alpha = 0; dtrqsol(n, w, p, delta, &sigma); /* Exit if there is negative curvature or if the iterates exit the trust region. */ if (ptq <= 0 || alpha >= sigma) { F77_CALL(daxpy)(&n, &sigma, p, &inc, w, &inc); if (ptq <= 0) *info = 3; else *info = 4; goto return0; } /* Update w and the residuals r and t. Note that t = L*r. */ malpha = -alpha; F77_CALL(daxpy)(&n, &alpha, p, &inc, w, &inc); F77_CALL(daxpy)(&n, &malpha, q, &inc, r, &inc); F77_CALL(daxpy)(&n, &malpha, z, &inc, t,&inc); /* Exit if the residual convergence test is satisfied. */ rtr = F77_CALL(ddot)(&n, r, &inc, r, &inc); rnorm = sqrt(rtr); tnorm = sqrt(F77_CALL(ddot)(&n, t, &inc, t, &inc)); if (tnorm <= tol) { *info = 1; goto return0; } if (rnorm <= stol) { *info = 2; goto return0; } /* Compute p = r + beta*p and update rho. */ beta = rtr/rho; F77_CALL(dscal)(&n, &beta, p, &inc); F77_CALL(daxpy)(&n, &one, r, &inc, p, &inc); rho = rtr; } /* iters > itermax = n */ *info = 5; return0: free(p); free(q); free(r); free(t); free(z); } kernlab/src/stringkernel.cpp0000644000175100001440000003371514656662561015742 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/StringKernel.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 // 10 Aug 2006 // 11 Oct 2006 #ifndef STRINGKERNEL_CPP #define STRINGKERNEL_CPP #include #include #include #include #include #include #include #include #include "stringkernel.h" StringKernel::StringKernel(): esa(0), weigher(0), val(0), lvs(0) {} /** * Construct string kernel given constructed enhanced suffix array. * * \param esa_ - ESA instance. */ StringKernel::StringKernel(ESA *esa_, int weightfn, Real param, int verb): esa(esa_), val(new Real[esa_->size + 1]), lvs(0), _verb(verb) { switch (weightfn) { case CONSTANT: weigher = new ConstantWeight(); break; case EXPDECAY: weigher = new ExpDecayWeight(param); break; case KSPECTRUM: weigher = new KSpectrumWeight(param); break; case BOUNDRANGE: weigher = new BoundedRangeWeight(param); break; default: weigher = new ConstantWeight(); //int nothing = 0; } } /** * Construct string kernel when given only text and its length. * * \param text - (IN) The text which SuffixArray and StringKernel correspond to. * \param text_length - (IN) The length of #_text#. * \param verb - (IN) Verbosity level. */ StringKernel::StringKernel(const UInt32 &size, SYMBOL *text, int weightfn, Real param, int verb): lvs(0), _verb(verb) { // Build ESA. esa = new ESA(size, text, verb); // Allocate memory space for #val# val = new Real[esa->size + 1]; // Instantiate weigher. switch (weightfn) { case CONSTANT: weigher = new ConstantWeight(); break; case EXPDECAY: weigher = new ExpDecayWeight(param); break; case KSPECTRUM: weigher = new KSpectrumWeight(param); break; case BOUNDRANGE: weigher = new BoundedRangeWeight(param); break; default: weigher = new ConstantWeight(); //int nothing = 0; } } /** * StringKernel destructor. * */ StringKernel::~StringKernel() { //' Delete objects and release allocated memory space. if (esa) { delete esa; esa = 0; } if (val) { delete [] val; val = 0; } if (lvs) { delete [] lvs; lvs = 0; } if (weigher) { delete weigher; weigher = 0; } } /** * An Iterative auxiliary function used in PrecomputeVal(). * * Note: Every lcp-interval can be represented by its first l-index. * Hence, 'val' is stored in val[] at the index := first l-index. * * Pre: val[] is initialised to 0. * * @param left Left bound of current interval * @param right Right bound of current interval */ void StringKernel::IterativeCompute(const UInt32 &left, const UInt32 &right) { //std::cout << "In IterativeCompute() " << std::endl; //' Variables queue > q; vector > childlist; pair p; UInt32 lb = 0; UInt32 rb = 0; UInt32 floor_len = 0; UInt32 x_len = 0; Real cur_val = 0.0; Real edge_weight = 0.0; //' Step 1: At root, 0-[0..size-1]. Store all non-single child-intervals onto #q#. lb = left; //' Should be equal to 0. rb = right; //' Should be equal to size-1. esa->GetChildIntervals(lb, rb, childlist); for (UInt32 jj = 0; jj < childlist.size(); jj++) q.push(childlist[jj]); //' Step 2: Do breadth-first traversal. For every interval, compute val and add //' it to all its non-singleton child-intervals' val-entries in val[]. //' Start with child-interval [i..j] of 0-[0..size-1]. //' assert(j != size-1) while (!q.empty()) { //' Step 2.1: Get an interval from queue, #q#. p = q.front(); q.pop(); //' step 2.2: Get the lcp of floor interval. UInt32 a = 0, b = 0; a = esa->lcptab[p.first]; //svnvish: BUGBUG // Glorious hack. We have to remove it later. // This gives the lcp of parent interval if (p.second < esa->size - 1) { b = esa->lcptab[p.second + 1]; } else { b = 0; } floor_len = (a > b) ? a : b; //' Step 2.3: Get the lcp of current interval. esa->GetLcp(p.first, p.second, x_len); //' Step 2.4: Compute val of current interval. weigher->ComputeWeight(floor_len, x_len, edge_weight); cur_val = edge_weight * (lvs[p.second + 1] - lvs[p.first]); //' Step 2.5: Add #cur_val# to val[]. UInt32 firstlIndex1 = 0; esa->childtab.l_idx(p.first, p.second, firstlIndex1); val[firstlIndex1] += cur_val; // std::cout << "p.first:"<GetChildIntervals(p.first, p.second, childlist); //' Step 2.7: (a) Add #cur_val# to child-intervals' val-entries in val[]. //' (b) Push child-interval onto #q#. for (UInt32 kk = 0; kk < childlist.size(); kk++) { //' (a) UInt32 firstlIndex2 = 0; pair tmp_p = childlist[kk]; if (esa->text[esa->suftab[tmp_p.first]] == SENTINEL) continue; esa->childtab.l_idx(tmp_p.first, tmp_p.second, firstlIndex2); // assert( val[firstlIndex2] == 0 ); val[firstlIndex2] = val[firstlIndex1]; // cur_val; //' (b) q.push(make_pair(tmp_p.first, tmp_p.second)); } } //std::cout << "Out IterativeCompute() " << std::endl; } /** * Precomputation of val(t) of string kernel. * Observation :Every internal node of a suffix tree can be represented by at * least one index of the corresponding lcp array. So, the val * of a node is stored in val[] at the index corresponding to that of * the fist representative lcp value in lcp[]. */ void StringKernel::PrecomputeVal() { //' Memory space requirement check. assert(val != 0); //' Initialise all val entries to zero! memset(val, 0, sizeof(Real)*esa->size + 1); //' Start iterative precomputation of val[] IterativeCompute(0, esa->size - 1); } /** * Compute k(text,x) by performing Chang and Lawler's matching statistics collection * algorithm on the enhanced suffix array. * * \param x - (IN) The input string which is to be evaluated together with * the text in esa. * \param x_len - (IN) The length of #x#. * \param value - (IN) The value of k(x,x'). */ void StringKernel::Compute_K(SYMBOL *x, const UInt32 &x_len, Real &value) { //' Variables UInt32 floor_i = 0; UInt32 floor_j = 0; UInt32 i = 0; UInt32 j = 0; UInt32 lb = 0; UInt32 rb = 0; UInt32 matched_len = 0; UInt32 offset = 0; UInt32 floor_len = 0; UInt32 firstlIndex = 0; Real edge_weight = 0.0; //' Initialisation value = 0.0; lb = 0; rb = esa->size - 1; //' for each suffix, xprime[k..xprime_len-1], find longest match in text for (UInt32 k = 0; k < x_len; k++) { //' Step 1: Matching esa->ExactSuffixMatch(lb, rb, offset, &x[k], x_len - k, i, j, matched_len, floor_i, floor_j, floor_len); //' Step 2: Get suffix link for [floor_i..floor_j] esa->GetSuflink(floor_i, floor_j, lb, rb); assert((floor_j - floor_i) <= (rb - lb)); //' Range check //' Step 3: Compute contribution of this matched substring esa->childtab.l_idx(floor_i, floor_j, firstlIndex); assert(firstlIndex > floor_i && firstlIndex <= floor_j); assert(floor_len <= matched_len); weigher->ComputeWeight(floor_len, matched_len, edge_weight); value += val[firstlIndex] + edge_weight * (lvs[j + 1] - lvs[i]); // std::cout << "i:"<size); //' Allocate memory space for lvs[] lvs = new (nothrow) Real[esa->size + 1]; assert(lvs); //' Assign leaf weight to lvs element according to its position in text. for (UInt32 j = 0; j < esa->size; j++) { pos = esa->suftab[j]; UInt32 *p = upper_bound(clen, clen + m, pos); //' O(log n) lvs[j + 1] = leafWeight[p - clen]; } //' Compute cumulative lvs[]. To be used in matching statistics computation later. lvs[0] = 0.0; partial_sum(lvs, lvs + esa->size + 1, lvs); //chteo: [101006] delete [] clen; clen = 0; } /** * Set lvs[i] = i, for i = 0 to esa->size * Memory space for lvs[] will be allocated. */ void StringKernel::Set_Lvs() { //' Clean up previous lvs, if any. if (lvs) { delete lvs; lvs = 0; } //' Allocate memory space for lvs[] lvs = new (nothrow) Real[esa->size + 1]; //' Check if memory correctly allocated. assert(lvs != 0); //' Range := [0..esa->size] UInt32 localsize = esa->size; for (UInt32 i = 0; i <= localsize; i++) lvs[i] = i; } #endif #include #include #include extern "C" { SEXP stringtv(SEXP rtext, // text document SEXP ltext, // list or vector of text documents to compute kvalues against SEXP nltext, // number of text documents in ltext SEXP vnchar, // number of characters in text SEXP vnlchar, // characters per document in ltext SEXP stype, // type of kernel SEXP param) // parameter for kernel { // R interface for text and list of text computation. Should return a vector of computed kernel values. // Construct ESASK UInt32 text_size = *INTEGER(vnchar); int number_ltext = *INTEGER(nltext); unsigned int *ltext_size = (unsigned int *) malloc (sizeof(unsigned int) * number_ltext); memcpy(ltext_size, INTEGER(vnlchar), number_ltext*sizeof(int)); int weightfn = *INTEGER(stype); const char *text = CHAR(STRING_ELT(rtext,0)); Real kparam = *REAL(param); double kVal; SEXP alpha; PROTECT(alpha = Rf_allocVector(REALSXP, number_ltext)); // Check if stringlength reported from R is correct if(strlen(text)!= text_size) text_size= strlen(text); StringKernel sk(text_size, (SYMBOL*)text, (weightfn - 1), kparam, 0); sk.Set_Lvs(); sk.PrecomputeVal(); for (int i=0; i * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ExpDecayWeight.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 #ifndef EXPDECAYWEIGHT_H #define EXPDECAYWEIGHT_H #include "datatype.h" #include "errorcode.h" #include "iweightfactory.h" #include class ExpDecayWeight : public I_WeightFactory { public: Real lambda; /// Constructors //' NOTE: lambda shouldn't be equal to 1, othexrwise there will be //' divide-by-zero error. ExpDecayWeight(const Real &lambda_=2.0):lambda(lambda_) {} /// Destructor virtual ~ExpDecayWeight(){} /// Compute weight ErrorCode ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight); }; #endif kernlab/src/wmsufsort.cpp0000644000175100001440000000442512234152620015254 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/W_msufsort.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 //' Wrapper for Michael Maniscalco's MSufSort version 2.2 algorithm #ifndef W_MSUFSORT_CPP #define W_MSUFSORT_CPP #include #include #include #include "wmsufsort.h" W_msufsort::W_msufsort() { msuffixsorter = new MSufSort(); } W_msufsort::~W_msufsort() { delete msuffixsorter; } /** * Construct Suffix Array using Michael Maniscalco's algorithm * * \param _text - (IN) The text which resultant SA corresponds to. * \param _len - (IN) The length of the text. * \param _sa - (OUT) Suffix array instance. */ ErrorCode W_msufsort::ConstructSA(SYMBOL *text, const UInt32 &len, UInt32 *&array){ //' A temporary copy of text SYMBOL *text_copy = new SYMBOL[len]; //' chteo: BUGBUG //' redundant? assert(text_copy != NULL); memcpy(text_copy, text, sizeof(SYMBOL) * len); msuffixsorter->Sort(text_copy, len); //' Code adapted from MSufSort::verifySort() for (UInt32 i = 0; i < len; i++) { UInt32 tmp = msuffixsorter->ISA(i)-1; array[tmp] = i; } //' Deallocate the memory allocated for #text_copy# delete [] text_copy; return NOERROR; } #endif kernlab/src/msufsort.h0000644000175100001440000006500412761213650014540 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the MSufSort suffix sorting algorithm (Version 2.2). * * The Initial Developer of the Original Code is * Michael A. Maniscalco * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Michael A. Maniscalco * * ***** END LICENSE BLOCK ***** */ #ifndef MSUFSORT_H #define MSUFSORT_H //==================================================================// // // // v // // MSufSort Version 2.2 // // Author: Michael A Maniscalco // // Date: Nov. 3, 2005 // // // // Notes: // // // //==================================================================// #include "stdio.h" #include "stack.h" #include "introsort.h" #include "inductionsort.h" //==================================================================// // Test app defines: // //==================================================================// #define SHOW_PROGRESS // display progress during sort #define CHECK_SORT // verify that sorting is correct. // #define SORT_16_BIT_SYMBOLS // enable 16 bit symbols. #define USE_INDUCTION_SORTING // enable induction sorting feature. #define USE_ENHANCED_INDUCTION_SORTING // enable enhanced induction sorting feature. #define USE_TANDEM_REPEAT_SORTING // enable the tandem repeat sorting feature. //#define USE_ALT_SORT_ORDER // enable alternative sorting order #define ENDIAN_SWAP_16(value) ((value >> 8) | (value << 8)) #define SUFFIX_SORTED 0x80000000 // flag marks suffix as sorted. #define END_OF_CHAIN 0x3ffffffe // marks the end of a chain #define SORTED_BY_ENHANCED_INDUCTION 0x3fffffff // marks suffix which will be sorted by enhanced induction sort. #ifdef SORT_16_BIT_SYMBOLS #define SYMBOL_TYPE unsigned short #else #define SYMBOL_TYPE unsigned char #endif class MSufSort { public: MSufSort(); virtual ~MSufSort(); unsigned int Sort(SYMBOL_TYPE * source, unsigned int sourceLength); unsigned int GetElapsedSortTime(); unsigned int GetMemoryUsage(); unsigned int ISA(unsigned int index); bool VerifySort(); static void ReverseAltSortOrder(SYMBOL_TYPE * data, unsigned int nBytes); private: int CompareStrings(SYMBOL_TYPE * stringA, SYMBOL_TYPE * stringB, int len); bool IsTandemRepeat2(); bool IsTandemRepeat(); void PassTandemRepeat(); bool IsSortedByInduction(); bool IsSortedByEnhancedInduction(unsigned int suffixIndex); void ProcessSuffixesSortedByInduction(); // MarkSuffixAsSorted // Sets the final inverse suffix array value for a given suffix. // Also invokes the OnSortedSuffix member function. void MarkSuffixAsSorted(unsigned int suffixIndex, unsigned int & sortedIndex); void MarkSuffixAsSorted2(unsigned int suffixIndex, unsigned int & sortedIndex); void MarkSuffixAsSortedByEnhancedInductionSort(unsigned int suffixIndex); // PushNewChainsOntoStack: // Moves all new suffix chains onto the stack of partially sorted // suffixes. (makes them ready for further sub sorting). void PushNewChainsOntoStack(bool originalChains = false); void PushTandemBypassesOntoStack(); // OnSortedSuffix: // Event which is invoked with each sorted suffix at the time of // its sorting. virtual void OnSortedSuffix(unsigned int suffixIndex); // Initialize: // Initializes this object just before sorting begins. void Initialize(); // InitialSort: // This is the first sorting pass which makes the initial suffix // chains from the given source string. Pushes these chains onto // the stack for further sorting. void InitialSort(); // Value16: // Returns the two 8 bit symbols located // at positions N and N + 1 where N = the sourceIndex. unsigned short Value16(unsigned int sourceIndex); // ProcessChain: // Sorts the suffixes of a given chain by the next two symbols of // each suffix in the chain. This creates zero or more new suffix // chains with each sorted by two more symbols than the original // chain. Then pushes these new chains onto the chain stack for // further sorting. void ProcessNextChain(); void AddToSuffixChain(unsigned int suffixIndex, unsigned short suffixChain); void AddToSuffixChain(unsigned int firstSuffixIndex, unsigned int lastSuffixIndex, unsigned short suffixChain); void ProcessSuffixesSortedByEnhancedInduction(unsigned short suffixId); void ResolveTandemRepeatsNotSortedWithInduction(); unsigned int m_sortTime; Stack m_chainMatchLengthStack; Stack m_chainCountStack; Stack m_chainHeadStack; unsigned int m_endOfSuffixChain[0x10000]; unsigned int m_startOfSuffixChain[0x10000]; // m_source: // Address of the string to sort. SYMBOL_TYPE * m_source; // m_sourceLength: // The length of the string pointed to by m_source. unsigned int m_sourceLength; unsigned int m_sourceLengthMinusOne; // m_ISA: // The address of the working space which, when the sort is // completed, will contain the inverse suffix array for the // source string. unsigned int * m_ISA; // m_nextSortedSuffixValue: unsigned int m_nextSortedSuffixValue; // unsigned int m_numSortedSuffixes; // m_newChainIds // Array containing the valid chain numbers in m_newChain array. unsigned short m_newChainIds[0x10000]; // m_numNewChains: // The number of new suffix chain ids stored in m_numChainIds. unsigned int m_numNewChains; Stack m_suffixesSortedByInduction; unsigned int m_suffixMatchLength; unsigned int m_currentSuffixIndex; // m_firstSortedPosition: // For use with enhanced induction sorting. unsigned int m_firstSortedPosition[0x10000]; unsigned int m_firstSuffixByEnhancedInductionSort[0x10000]; unsigned int m_lastSuffixByEnhancedInductionSort[0x10000]; unsigned int m_currentSuffixChainId; #ifdef SHOW_PROGRESS // ShowProgress: // Update the progress indicator. void ShowProgress(); // m_nextProgressUpdate: // Indicates when to update the progress indicator. unsigned int m_nextProgressUpdate; // m_progressUpdateIncrement: // Indicates how many suffixes should be sorted before // incrementing the progress indicator. unsigned int m_progressUpdateIncrement; #endif // members used if alternate sorting order should be applied. SYMBOL_TYPE m_forwardAltSortOrder[256]; static SYMBOL_TYPE m_reverseAltSortOrder[256]; // for tandem repeat sorting bool m_hasTandemRepeatSortedByInduction; unsigned int m_firstUnsortedTandemRepeat; unsigned int m_lastUnsortedTandemRepeat; bool m_hasEvenLengthTandemRepeats; unsigned int m_tandemRepeatDepth; unsigned int m_firstSortedTandemRepeat; unsigned int m_lastSortedTandemRepeat; unsigned int m_tandemRepeatLength; }; //inline unsigned short MSufSort::Value16(unsigned int sourceIndex) //{ // return (sourceIndex < m_sourceLengthMinusOne) ? *(unsigned short *)(m_source + sourceIndex) : m_source[sourceIndex]; //} // fix by Brian Ripley inline unsigned short MSufSort::Value16(unsigned int sourceIndex) { union {unsigned short u; unsigned char b[2];} u16; u16.b[0] = m_source[sourceIndex]; u16.b[1] = (sourceIndex < m_sourceLengthMinusOne) ? m_source[sourceIndex + 1] : 0; return u16.u; } inline bool MSufSort::IsSortedByInduction() { unsigned int n = m_currentSuffixIndex + m_suffixMatchLength - 1; #ifndef USE_INDUCTION_SORTING if (n < m_sourceLengthMinusOne) return false; #endif if ((m_ISA[n] & SUFFIX_SORTED) && ((m_ISA[n] & 0x3fffffff) < m_nextSortedSuffixValue)) { InductionSortObject i(0, m_ISA[n], m_currentSuffixIndex); m_suffixesSortedByInduction.Push(i); } else if ((m_ISA[n + 1] & SUFFIX_SORTED) && ((m_ISA[n + 1] & 0x3fffffff) < m_nextSortedSuffixValue)) { InductionSortObject i(1, m_ISA[n + 1], m_currentSuffixIndex); m_suffixesSortedByInduction.Push(i); } else return false; return true; } inline bool MSufSort::IsSortedByEnhancedInduction(unsigned int suffixIndex) { if (suffixIndex > 0) if (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION) return true; return false; } inline bool MSufSort::IsTandemRepeat() { #ifndef USE_TANDEM_REPEAT_SORTING return false; #else if ((!m_tandemRepeatDepth) && (m_currentSuffixIndex + m_suffixMatchLength) == (m_ISA[m_currentSuffixIndex] + 1)) return true; #ifndef SORT_16_BIT_SYMBOLS if ((!m_tandemRepeatDepth) && ((m_currentSuffixIndex + m_suffixMatchLength) == (m_ISA[m_currentSuffixIndex]))) { m_hasEvenLengthTandemRepeats = true; return false; } #endif return false; #endif } inline void MSufSort::PassTandemRepeat() { unsigned int nextIndex; unsigned int lastIndex; // unsigned int firstIndex = m_currentSuffixIndex; while ((m_currentSuffixIndex + m_suffixMatchLength) == ((nextIndex = m_ISA[m_currentSuffixIndex]) + 1)) { lastIndex = m_currentSuffixIndex; m_currentSuffixIndex = nextIndex; } if (IsSortedByInduction()) { m_hasTandemRepeatSortedByInduction = true; m_currentSuffixIndex = m_ISA[m_currentSuffixIndex]; } else { if (m_firstUnsortedTandemRepeat == END_OF_CHAIN) m_firstUnsortedTandemRepeat = m_lastUnsortedTandemRepeat = lastIndex; else m_lastUnsortedTandemRepeat = (m_ISA[m_lastUnsortedTandemRepeat] = lastIndex); } } inline void MSufSort::PushNewChainsOntoStack(bool originalChains) { // Moves all new suffix chains onto the stack of partially sorted // suffixes. (makes them ready for further sub sorting). #ifdef SORT_16_BIT_SYMBOLS unsigned int newSuffixMatchLength = m_suffixMatchLength + 1; #else unsigned int newSuffixMatchLength = m_suffixMatchLength + 2; #endif if (m_numNewChains) { if (m_hasEvenLengthTandemRepeats) { m_chainCountStack.Push(m_numNewChains - 1); m_chainMatchLengthStack.Push(newSuffixMatchLength); m_chainCountStack.Push(1); m_chainMatchLengthStack.Push(newSuffixMatchLength - 1); } else { m_chainCountStack.Push(m_numNewChains); m_chainMatchLengthStack.Push(newSuffixMatchLength); } if (m_numNewChains > 1) IntroSort(m_newChainIds, m_numNewChains); while (m_numNewChains) { unsigned short chainId = m_newChainIds[--m_numNewChains]; chainId = ENDIAN_SWAP_16(chainId); // unsigned int n = m_startOfSuffixChain[chainId]; m_chainHeadStack.Push(m_startOfSuffixChain[chainId]); m_startOfSuffixChain[chainId] = END_OF_CHAIN; m_ISA[m_endOfSuffixChain[chainId]] = END_OF_CHAIN; } } m_hasEvenLengthTandemRepeats = false; if (m_firstUnsortedTandemRepeat != END_OF_CHAIN) { // Tandem repeats with a terminating suffix that did not get // sorted via induction has occurred (at least once). // We have a suffix chain (indicated by m_firstTandemRepeatWithoutSuffix) // of the suffix in each tandem repeat which immediately proceeded the // terminating suffix in each chain. We want to sort them relative to // each other and then process the tandem repeats. unsigned int tandemRepeatLength = m_suffixMatchLength - 1; unsigned int numChains = m_chainHeadStack.Count(); m_chainHeadStack.Push(m_firstUnsortedTandemRepeat); m_chainCountStack.Push(1); m_chainMatchLengthStack.Push((m_suffixMatchLength << 1) - 1); m_ISA[m_lastUnsortedTandemRepeat] = END_OF_CHAIN; m_firstUnsortedTandemRepeat = END_OF_CHAIN; m_tandemRepeatDepth = 1; while (m_chainHeadStack.Count() > numChains) ProcessNextChain(); m_suffixMatchLength = tandemRepeatLength + 1; ResolveTandemRepeatsNotSortedWithInduction(); m_tandemRepeatDepth = 0; } } inline void MSufSort::AddToSuffixChain(unsigned int suffixIndex, unsigned short suffixChain) { if (m_startOfSuffixChain[suffixChain] == END_OF_CHAIN) { m_endOfSuffixChain[suffixChain] = m_startOfSuffixChain[suffixChain] = suffixIndex; m_newChainIds[m_numNewChains++] = ENDIAN_SWAP_16(suffixChain); } else m_endOfSuffixChain[suffixChain] = m_ISA[m_endOfSuffixChain[suffixChain]] = suffixIndex; } inline void MSufSort::AddToSuffixChain(unsigned int firstSuffixIndex, unsigned int lastSuffixIndex, unsigned short suffixChain) { if (m_startOfSuffixChain[suffixChain] == END_OF_CHAIN) { m_startOfSuffixChain[suffixChain] = firstSuffixIndex; m_endOfSuffixChain[suffixChain] = lastSuffixIndex; m_newChainIds[m_numNewChains++] = ENDIAN_SWAP_16(suffixChain); } else { m_ISA[m_endOfSuffixChain[suffixChain]] = firstSuffixIndex; m_endOfSuffixChain[suffixChain] = lastSuffixIndex; } } inline void MSufSort::OnSortedSuffix(unsigned int suffixIndex) { // Event which is invoked with each sorted suffix at the time of // its sorting. m_numSortedSuffixes++; #ifdef SHOW_PROGRESS if (m_numSortedSuffixes >= m_nextProgressUpdate) { m_nextProgressUpdate += m_progressUpdateIncrement; ShowProgress(); } #endif } #ifdef SORT_16_BIT_SYMBOLS inline void MSufSort::MarkSuffixAsSorted(unsigned int suffixIndex, unsigned int & sortedIndex) { // Sets the final inverse suffix array value for a given suffix. // Also invokes the OnSortedSuffix member function. if (m_tandemRepeatDepth) { // we are processing a list of suffixes which we the second to last in tandem repeats // that were not terminated via induction. These suffixes are not actually to be // marked as sorted yet. Instead, they are to be linked together in sorted order. if (m_firstSortedTandemRepeat == END_OF_CHAIN) m_firstSortedTandemRepeat = m_lastSortedTandemRepeat = suffixIndex; else m_lastSortedTandemRepeat = (m_ISA[m_lastSortedTandemRepeat] = suffixIndex); return; } m_ISA[suffixIndex] = (sortedIndex++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif #ifdef USE_ENHANCED_INDUCTION_SORTING if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { suffixIndex--; unsigned short symbol = Value16(suffixIndex); m_ISA[suffixIndex] = (m_firstSortedPosition[symbol]++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { suffixIndex--; symbol = ENDIAN_SWAP_16(symbol); if (m_firstSuffixByEnhancedInductionSort[symbol] == END_OF_CHAIN) m_firstSuffixByEnhancedInductionSort[symbol] = m_lastSuffixByEnhancedInductionSort[symbol] = suffixIndex; else { m_ISA[m_lastSuffixByEnhancedInductionSort[symbol]] = suffixIndex; m_lastSuffixByEnhancedInductionSort[symbol] = suffixIndex; } } } #endif } inline void MSufSort::MarkSuffixAsSorted2(unsigned int suffixIndex, unsigned int & sortedIndex) { // Sets the final inverse suffix array value for a given suffix. // Also invokes the OnSortedSuffix member function. if (m_tandemRepeatDepth) { // we are processing a list of suffixes which we the second to last in tandem repeats // that were not terminated via induction. These suffixes are not actually to be // marked as sorted yet. Instead, they are to be linked together in sorted order. if (m_firstSortedTandemRepeat == END_OF_CHAIN) m_firstSortedTandemRepeat = m_lastSortedTandemRepeat = suffixIndex; else m_lastSortedTandemRepeat = (m_ISA[m_lastSortedTandemRepeat] = suffixIndex); return; } m_ISA[suffixIndex] = (sortedIndex++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif #ifdef USE_ENHANCED_INDUCTION_SORTING if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { unsigned short symbol = Value16(suffixIndex); symbol = ENDIAN_SWAP_16(symbol); suffixIndex--; if (m_firstSuffixByEnhancedInductionSort[symbol] == END_OF_CHAIN) m_firstSuffixByEnhancedInductionSort[symbol] = m_lastSuffixByEnhancedInductionSort[symbol] = suffixIndex; else { m_ISA[m_lastSuffixByEnhancedInductionSort[symbol]] = suffixIndex; m_lastSuffixByEnhancedInductionSort[symbol] = suffixIndex; } } #endif } #else inline void MSufSort::MarkSuffixAsSorted(unsigned int suffixIndex, unsigned int & sortedIndex) { // Sets the final inverse suffix array value for a given suffix. // Also invokes the OnSortedSuffix member function. if (m_tandemRepeatDepth) { // we are processing a list of suffixes which we the second to last in tandem repeats // that were not terminated via induction. These suffixes are not actually to be // marked as sorted yet. Instead, they are to be linked together in sorted order. if (m_firstSortedTandemRepeat == END_OF_CHAIN) m_firstSortedTandemRepeat = m_lastSortedTandemRepeat = suffixIndex; else m_lastSortedTandemRepeat = (m_ISA[m_lastSortedTandemRepeat] = suffixIndex); return; } m_ISA[suffixIndex] = (sortedIndex++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif #ifdef USE_ENHANCED_INDUCTION_SORTING if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { suffixIndex--; unsigned short symbol = Value16(suffixIndex); m_ISA[suffixIndex] = (m_firstSortedPosition[symbol]++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { suffixIndex--; unsigned short symbol2 = symbol; symbol = Value16(suffixIndex); m_ISA[suffixIndex] = (m_firstSortedPosition[symbol]++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { if (m_source[suffixIndex] < m_source[suffixIndex + 1]) symbol2 = ENDIAN_SWAP_16(symbol); else symbol2 = ENDIAN_SWAP_16(symbol2); suffixIndex--; if (m_firstSuffixByEnhancedInductionSort[symbol2] == END_OF_CHAIN) m_firstSuffixByEnhancedInductionSort[symbol2] = m_lastSuffixByEnhancedInductionSort[symbol2] = suffixIndex; else { m_ISA[m_lastSuffixByEnhancedInductionSort[symbol2]] = suffixIndex; m_lastSuffixByEnhancedInductionSort[symbol2] = suffixIndex; } } } } #endif } inline void MSufSort::MarkSuffixAsSorted2(unsigned int suffixIndex, unsigned int & sortedIndex) { // Sets the final inverse suffix array value for a given suffix. // Also invokes the OnSortedSuffix member function. if (m_tandemRepeatDepth) { // we are processing a list of suffixes which we the second to last in tandem repeats // that were not terminated via induction. These suffixes are not actually to be // marked as sorted yet. Instead, they are to be linked together in sorted order. if (m_firstSortedTandemRepeat == END_OF_CHAIN) m_firstSortedTandemRepeat = m_lastSortedTandemRepeat = suffixIndex; else m_lastSortedTandemRepeat = (m_ISA[m_lastSortedTandemRepeat] = suffixIndex); return; } m_ISA[suffixIndex] = (sortedIndex++ | SUFFIX_SORTED); #ifdef SHOW_PROGRESS OnSortedSuffix(suffixIndex); #endif #ifdef USE_ENHANCED_INDUCTION_SORTING if ((suffixIndex) && (m_ISA[suffixIndex - 1] == SORTED_BY_ENHANCED_INDUCTION)) { unsigned short symbol; if (m_source[suffixIndex] < m_source[suffixIndex + 1]) symbol = Value16(suffixIndex); else symbol = Value16(suffixIndex + 1); symbol = ENDIAN_SWAP_16(symbol); suffixIndex--; if (m_firstSuffixByEnhancedInductionSort[symbol] == END_OF_CHAIN) m_firstSuffixByEnhancedInductionSort[symbol] = m_lastSuffixByEnhancedInductionSort[symbol] = suffixIndex; else { m_ISA[m_lastSuffixByEnhancedInductionSort[symbol]] = suffixIndex; m_lastSuffixByEnhancedInductionSort[symbol] = suffixIndex; } } #endif } #endif inline void MSufSort::ProcessNextChain() { // Sorts the suffixes of a given chain by the next two symbols of // each suffix in the chain. This creates zero or more new suffix // chains with each sorted by two more symbols than the original // chain. Then pushes these new chains onto the chain stack for // further sorting. while (--m_chainCountStack.Top() < 0) { m_chainCountStack.Pop(); m_chainMatchLengthStack.Pop(); } m_suffixMatchLength = m_chainMatchLengthStack.Top(); m_currentSuffixIndex = m_chainHeadStack.Pop(); #ifdef USE_ENHANCED_INDUCTION_SORTING if (m_chainMatchLengthStack.Count() == 1) { // one of the original buckets from InitialSort(). This is important // when enhanced induction sorting is enabled. unsigned short chainId = Value16(m_currentSuffixIndex); unsigned short temp = chainId; chainId = ENDIAN_SWAP_16(chainId); while (m_currentSuffixChainId <= chainId) ProcessSuffixesSortedByEnhancedInduction(m_currentSuffixChainId++); m_nextSortedSuffixValue = m_firstSortedPosition[temp]; } #endif if (m_ISA[m_currentSuffixIndex] == END_OF_CHAIN) MarkSuffixAsSorted(m_currentSuffixIndex, m_nextSortedSuffixValue); // only one suffix in bucket so it is sorted. else { do { if (IsTandemRepeat()) PassTandemRepeat(); else if ((m_currentSuffixIndex != END_OF_CHAIN) && (IsSortedByInduction())) m_currentSuffixIndex = m_ISA[m_currentSuffixIndex]; else { unsigned int firstSuffixIndex = m_currentSuffixIndex; unsigned int lastSuffixIndex = m_currentSuffixIndex; unsigned short targetSymbol = Value16(m_currentSuffixIndex + m_suffixMatchLength); unsigned int nextSuffix; do { nextSuffix = m_ISA[lastSuffixIndex = m_currentSuffixIndex]; if ((m_currentSuffixIndex = nextSuffix) == END_OF_CHAIN) break; else if (IsTandemRepeat()) { PassTandemRepeat(); break; } else if (IsSortedByInduction()) { m_currentSuffixIndex = m_ISA[nextSuffix]; break; } } while (Value16(m_currentSuffixIndex + m_suffixMatchLength) == targetSymbol); AddToSuffixChain(firstSuffixIndex, lastSuffixIndex, targetSymbol); } } while (m_currentSuffixIndex != END_OF_CHAIN); ProcessSuffixesSortedByInduction(); PushNewChainsOntoStack(); } } inline void MSufSort::ProcessSuffixesSortedByInduction() { unsigned int numSuffixes = m_suffixesSortedByInduction.Count(); if (numSuffixes) { InductionSortObject * objects = m_suffixesSortedByInduction.m_stack; if (numSuffixes > 1) IntroSort(objects, numSuffixes); if (m_hasTandemRepeatSortedByInduction) { // During the last pass some suffixes which were sorted via induction were also // determined to be the terminal suffix in a tandem repeat. So when we mark // the suffixes as sorted (where were sorted via induction) we make chain together // the preceding suffix in the tandem repeat (if there is one). unsigned int firstTandemRepeatIndex = END_OF_CHAIN; unsigned int lastTandemRepeatIndex = END_OF_CHAIN; unsigned int tandemRepeatLength = m_suffixMatchLength - 1; m_hasTandemRepeatSortedByInduction = false; for (unsigned int i = 0; i < numSuffixes; i++) { unsigned int suffixIndex = (objects[i].m_sortValue[1] & 0x3fffffff); if ((suffixIndex >= tandemRepeatLength) && (m_ISA[suffixIndex - tandemRepeatLength] == suffixIndex)) { // this suffix was a terminating suffix in a tandem repeat. // add the preceding suffix in the tandem repeat to the list. if (firstTandemRepeatIndex == END_OF_CHAIN) firstTandemRepeatIndex = lastTandemRepeatIndex = (suffixIndex - tandemRepeatLength); else lastTandemRepeatIndex = (m_ISA[lastTandemRepeatIndex] = (suffixIndex - tandemRepeatLength)); } MarkSuffixAsSorted(suffixIndex, m_nextSortedSuffixValue); } // now process each suffix in the tandem repeat list making each as sorted. // build a new list for tandem repeats which preceded each in the list until there are // no preceding tandem suffix for any suffix in the list. while (firstTandemRepeatIndex != END_OF_CHAIN) { m_ISA[lastTandemRepeatIndex] = END_OF_CHAIN; unsigned int suffixIndex = firstTandemRepeatIndex; firstTandemRepeatIndex = END_OF_CHAIN; while (suffixIndex != END_OF_CHAIN) { if ((suffixIndex >= tandemRepeatLength) && (m_ISA[suffixIndex - tandemRepeatLength] == suffixIndex)) { // this suffix was a terminating suffix in a tandem repeat. // add the preceding suffix in the tandem repeat to the list. if (firstTandemRepeatIndex == END_OF_CHAIN) firstTandemRepeatIndex = lastTandemRepeatIndex = (suffixIndex - tandemRepeatLength); else lastTandemRepeatIndex = (m_ISA[lastTandemRepeatIndex] = (suffixIndex - tandemRepeatLength)); } unsigned int nextSuffix = m_ISA[suffixIndex]; MarkSuffixAsSorted(suffixIndex, m_nextSortedSuffixValue); suffixIndex = nextSuffix; } } // finished. } else { // This is the typical branch on the condition. There were no tandem repeats // encountered during the last chain that were terminated with a suffix that // was sorted via induction. In this case we just mark the suffixes as sorted // and we are done. for (unsigned int i = 0; i < numSuffixes; i++) MarkSuffixAsSorted(objects[i].m_sortValue[1] & 0x3fffffff, m_nextSortedSuffixValue); } m_suffixesSortedByInduction.Clear(); } } inline void MSufSort::ProcessSuffixesSortedByEnhancedInduction(unsigned short suffixId) { // if (m_firstSuffixByEnhancedInductionSort[suffixId] != END_OF_CHAIN) { unsigned int currentSuffixIndex = m_firstSuffixByEnhancedInductionSort[suffixId]; unsigned int lastSuffixIndex = m_lastSuffixByEnhancedInductionSort[suffixId]; m_firstSuffixByEnhancedInductionSort[suffixId] = END_OF_CHAIN; m_lastSuffixByEnhancedInductionSort[suffixId] = END_OF_CHAIN; do { unsigned short symbol = Value16(currentSuffixIndex); unsigned int nextIndex = m_ISA[currentSuffixIndex]; MarkSuffixAsSorted2(currentSuffixIndex, m_firstSortedPosition[symbol]); if (currentSuffixIndex == lastSuffixIndex) { if (m_firstSuffixByEnhancedInductionSort[suffixId] == END_OF_CHAIN) return; currentSuffixIndex = m_firstSuffixByEnhancedInductionSort[suffixId]; lastSuffixIndex = m_lastSuffixByEnhancedInductionSort[suffixId]; m_firstSuffixByEnhancedInductionSort[suffixId] = END_OF_CHAIN; m_lastSuffixByEnhancedInductionSort[suffixId] = END_OF_CHAIN; } else currentSuffixIndex = nextIndex; } while (true); } } #ifdef SHOW_PROGRESS inline void MSufSort::ShowProgress() { // Update the progress indicator. //double p = ((double)(m_numSortedSuffixes & 0x3fffffff) / m_sourceLength) * 100; // printf("Progress: %.2f%% %c", p, 13); } #endif #endif kernlab/src/Makevars.win0000644000175100001440000000006011470002335014754 0ustar hornikusersPKG_LIBS = $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) kernlab/src/introsort.h0000644000175100001440000001560012234152620014710 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the MSufSort suffix sorting algorithm (Version 2.2). * * The Initial Developer of the Original Code is * Michael A. Maniscalco * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Michael A. Maniscalco * * ***** END LICENSE BLOCK ***** */ #ifndef TERNARY_INTRO_SORT_H #define TERNARY_INTRO_SORT_H //======================================================================// // Class: IntroSort // // // // Template based implementation of Introspective sorting algorithm // // using a ternary quicksort. // // // // Author: M.A. Maniscalco // // Date: January 20, 2005 // // // //======================================================================// // *** COMPILER WARNING DISABLED *** // Disable a warning which appears in MSVC // "conversion from '__w64 int' to ''" // Just plain annoying ... Restored at end of this file. #ifdef WIN32 #pragma warning (disable : 4244) #endif #define MIN_LENGTH_FOR_QUICKSORT 32 #define MAX_DEPTH_BEFORE_HEAPSORT 128 //===================================================================== // IntroSort class declaration // Notes: Any object used with this class must implement the following // the operators: <=, >=, == //===================================================================== template void IntroSort(T * array, unsigned int count); template void Partition(T * left, unsigned int count, unsigned int depth = 0); template T SelectPivot(T value1, T value2, T value3); template void Swap(T * valueA, T * valueB); template void InsertionSort(T * array, unsigned int count); template void HeapSort(T * array, int length); template void HeapSort(T * array, int k, int N); template inline void IntroSort(T * array, unsigned int count) { // Public method used to invoke the sort. // Call quick sort partition method if there are enough // elements to warrant it or insertion sort otherwise. if (count >= MIN_LENGTH_FOR_QUICKSORT) Partition(array, count); InsertionSort(array, count); } template inline void Swap(T * valueA, T * valueB) { // do the ol' "switch-a-me-do" on two values. T temp = *valueA; *valueA = *valueB; *valueB = temp; } template inline T SelectPivot(T value1, T value2, T value3) { // middle of three method. if (value1 < value2) return ((value2 < value3) ? value2 : (value1 < value3) ? value3 : value1); return ((value1 < value3) ? value1 : (value2 < value3) ? value3 : value2); } template inline void Partition(T * left, unsigned int count, unsigned int depth) { if (++depth > MAX_DEPTH_BEFORE_HEAPSORT) { // If enough recursion has happened then we bail to heap sort since it looks // as if we are experiencing a 'worst case' for quick sort. This should not // happen very often at all. HeapSort(left, count); return; } T * right = left + count - 1; T * startingLeft = left; T * startingRight = right; T * equalLeft = left; T * equalRight = right; // select the pivot value. T pivot = SelectPivot(left[0], right[0], left[((right - left) >> 1)]); // do three way partitioning. do { while ((left < right) && (*left <= pivot)) if (*(left++) == pivot) Swap(equalLeft++, left - 1); // equal to pivot value. move to far left. while ((left < right) && (*right >= pivot)) if (*(right--) == pivot) Swap(equalRight--, right + 1); // equal to pivot value. move to far right. if (left >= right) { if (left == right) { if (*left >= pivot) left--; if (*right <= pivot) right++; } else { left--; right++; } break; // done partitioning } // left and right are ready for swaping Swap(left++, right--); } while (true); // move values that were equal to pivot from the far left into the middle. // these values are now placed in their final sorted position. if (equalLeft > startingLeft) while (equalLeft > startingLeft) Swap(--equalLeft, left--); // move values that were equal to pivot from the far right into the middle. // these values are now placed in their final sorted position. if (equalRight < startingRight) while (equalRight < startingRight) Swap(++equalRight, right++); // Calculate new partition sizes ... unsigned int leftSize = left - startingLeft + 1; unsigned int rightSize = startingRight - right + 1; // Partition left (less than pivot) if there are enough values to warrant it // otherwise do insertion sort on the values. if (leftSize >= MIN_LENGTH_FOR_QUICKSORT) Partition(startingLeft, leftSize, depth); // Partition right (greater than pivot) if there are enough values to warrant it // otherwise do insertion sort on the values. if (rightSize >= MIN_LENGTH_FOR_QUICKSORT) Partition(right, rightSize, depth); } template inline void InsertionSort(T * array, unsigned int count) { // A basic insertion sort. if (count < 3) { if ((count == 2) && (array[0] > array[1])) Swap(array, array + 1); return; } T * ptr2, * ptr3 = array + 1, * ptr4 = array + count; if (array[0] > array[1]) Swap(array, array + 1); while (true) { while ((++ptr3 < ptr4) && (ptr3[0] >= ptr3[-1])); if (ptr3 >= ptr4) break; if (ptr3[-2] <= ptr3[0]) { if (ptr3[-1] > ptr3[0]) Swap(ptr3, ptr3 - 1); } else { ptr2 = ptr3 - 1; T v = *ptr3; while ((ptr2 >= array) && (ptr2[0] > v)) { ptr2[1] = ptr2[0]; ptr2--; } ptr2[1] = v; } } } template inline void HeapSort(T * array, int length) { // A basic heapsort. for (int k = length >> 1; k > 0; k--) HeapSort(array, k, length); do { Swap(array, array + (--length)); HeapSort(array, 1, length); } while (length > 1); } template inline void HeapSort(T * array, int k, int N) { // A basic heapsort. T temp = array[k - 1]; int n = N >> 1; int j = (k << 1); while (k <= n) { if ((j < N) && (array[j - 1] < array[j])) j++; if (temp >= array[j - 1]) break; else { array[k - 1] = array[j - 1]; k = j; j <<= 1; } } array[k - 1] = temp; } // Restore the default warning which appears in MSVC for // warning #4244 which was disabled at top of this file. #ifdef WIN32 #pragma warning (default : 4244) #endif #endif kernlab/src/svm.cpp0000644000175100001440000025317214656662543014041 0ustar hornikusers#include #include #include #include #include #include #include #include #include #include #include "svm.h" typedef float Qfloat; typedef signed char schar; #ifndef min template inline T min(T x,T y) { return (x inline T max(T x,T y) { return (x>y)?x:y; } #endif template inline void swap(T& x, T& y) { T t=x; x=y; y=t; } template inline void clone(T*& dst, S* src, int n) { dst = new T[n]; memcpy((void *)dst,(void *)src,sizeof(T)*n); } inline double powi(double base, int times) { double tmp = base, ret = 1.0; for(int t=times; t>0; t/=2) { if(t%2==1) ret*=tmp; tmp = tmp * tmp; } return ret; } #define INF HUGE_VAL # define TAU 1e-12 #define Malloc(type,n) (type *)malloc((n)*sizeof(type)) #if 0 void info(char *fmt,...) { va_list ap; va_start(ap,fmt); //vprintf(fmt,ap); va_end(ap); } void info_flush() { fflush(stdout); } #else void info(char *fmt,...) {} void info_flush() {} #endif // // Kernel Cache // // l is the number of total data items // size is the cache size limit in bytes // class Cache { public: Cache(int l,long int size, int qpsize); ~Cache(); // request data [0,len) // return some position p where [p,len) need to be filled // (p >= len if nothing needs to be filled) int get_data(const int index, Qfloat **data, int len); void swap_index(int i, int j); // future_option private: int l; long int size; struct head_t { head_t *prev, *next; // a cicular list Qfloat *data; int len; // data[0,len) is cached in this entry }; head_t *head; head_t lru_head; void lru_delete(head_t *h); void lru_insert(head_t *h); }; Cache::Cache(int l_,long int size_,int qpsize):l(l_),size(size_) { head = (head_t *)calloc(l,sizeof(head_t)); // initialized to 0 size /= sizeof(Qfloat); size -= l * sizeof(head_t) / sizeof(Qfloat); size = max(size, (long int) qpsize*l); // cache must be large enough for 'qpsize' columns lru_head.next = lru_head.prev = &lru_head; } Cache::~Cache() { for(head_t *h = lru_head.next; h != &lru_head; h=h->next) free(h->data); free(head); } void Cache::lru_delete(head_t *h) { // delete from current location h->prev->next = h->next; h->next->prev = h->prev; } void Cache::lru_insert(head_t *h) { // insert to last position h->next = &lru_head; h->prev = lru_head.prev; h->prev->next = h; h->next->prev = h; } int Cache::get_data(const int index, Qfloat **data, int len) { head_t *h = &head[index]; if(h->len) lru_delete(h); int more = len - h->len; if(more > 0) { // free old space while(size < more) { head_t *old = lru_head.next; lru_delete(old); free(old->data); size += old->len; old->data = 0; old->len = 0; } // allocate new space h->data = (Qfloat *)realloc(h->data,sizeof(Qfloat)*len); size -= more; swap(h->len,len); } lru_insert(h); *data = h->data; return len; } void Cache::swap_index(int i, int j) { if(i==j) return; if(head[i].len) lru_delete(&head[i]); if(head[j].len) lru_delete(&head[j]); swap(head[i].data,head[j].data); swap(head[i].len,head[j].len); if(head[i].len) lru_insert(&head[i]); if(head[j].len) lru_insert(&head[j]); if(i>j) swap(i,j); for(head_t *h = lru_head.next; h!=&lru_head; h=h->next) { if(h->len > i) { if(h->len > j) swap(h->data[i],h->data[j]); else { // give up lru_delete(h); free(h->data); size += h->len; h->data = 0; h->len = 0; } } } } // // Kernel evaluation // // the static method k_function is for doing single kernel evaluation // the constructor of Kernel prepares to calculate the l*l kernel matrix // the member function get_Q is for getting one column from the Q Matrix // class QMatrix { public: virtual Qfloat *get_Q(int column, int len) const = 0; virtual double *get_QD() const = 0; virtual void swap_index(int i, int j) const = 0; virtual ~QMatrix() {} }; class Kernel: public QMatrix{ public: Kernel(int l, svm_node * const * x, const svm_parameter& param); virtual ~Kernel(); static double k_function(const svm_node *x, const svm_node *y, const svm_parameter& param); virtual Qfloat *get_Q(int column, int len) const = 0; virtual double *get_QD() const = 0; virtual void swap_index(int i, int j) const // no so const... { swap(x[i],x[j]); if(x_square) swap(x_square[i],x_square[j]); } protected: double (Kernel::*kernel_function)(int i, int j) const; private: const svm_node **x; double *x_square; // svm_parameter const int kernel_type; const int degree; const double gamma; const double coef0; const double lim; const double *K; const int m; static double dot(const svm_node *px, const svm_node *py); static double anova(const svm_node *px, const svm_node *py, const double sigma, const int degree); double kernel_linear(int i, int j) const { return dot(x[i],x[j]); } double kernel_poly(int i, int j) const { return powi(gamma*dot(x[i],x[j])+coef0,degree); } double kernel_rbf(int i, int j) const { return exp(-gamma*(x_square[i]+x_square[j]-2*dot(x[i],x[j]))); } double kernel_sigmoid(int i, int j) const { return tanh(gamma*dot(x[i],x[j])+coef0); } double kernel_laplace(int i, int j) const { return exp(-gamma*sqrt(fabs(x_square[i]+x_square[j]-2*dot(x[i],x[j])))); } double kernel_bessel(int i, int j) const { double bkt = gamma*sqrt(fabs(x_square[i]+x_square[j]-2*dot(x[i],x[j]))); if (bkt < 0.000001){ return 1 ; } else { return(powi(((jn((int)degree, bkt)/powi(bkt,((int)degree)))/lim),(int) coef0)); } } double kernel_anova(int i, int j) const { return anova(x[i], x[j], gamma, degree); } double kernel_spline(int i, int j) const { double result=1.0; double min; double t1,t4; const svm_node *px = x[i], *py= x[j]; // px = x[i]; // py = x[j]; while(px->index != -1 && py->index != -1) { if(px->index == py->index) { min=((px->valuevalue)?px->value:py->value); t1 = (px->value * py->value); t4 = min*min; result*=( 1.0 + t1 + (t1*min) ) - ( ((px->value+py->value)/2.0) * t4) + ((t4 * min)/3.0); } ++px; ++py; } return result; } double kernel_R(int i, int j) const { return *(K + m*i +j); } }; Kernel::Kernel(int l, svm_node * const * x_, const svm_parameter& param) :kernel_type(param.kernel_type), degree(param.degree), gamma(param.gamma), coef0(param.coef0), lim(param.lim), K(param.K), m(param.m) { switch(kernel_type) { case LINEAR: kernel_function = &Kernel::kernel_linear; break; case POLY: kernel_function = &Kernel::kernel_poly; break; case RBF: kernel_function = &Kernel::kernel_rbf; break; case SIGMOID: kernel_function = &Kernel::kernel_sigmoid; break; case LAPLACE: kernel_function = &Kernel::kernel_laplace; break; case BESSEL: kernel_function = &Kernel::kernel_bessel; break; case ANOVA: kernel_function = &Kernel::kernel_anova; break; case SPLINE: kernel_function = &Kernel::kernel_spline; break; case R: kernel_function = &Kernel::kernel_R; break; } clone(x,x_,l); if(kernel_type == RBF || kernel_type == LAPLACE || kernel_type == BESSEL) { x_square = new double[l]; for(int i=0;iindex != -1 && py->index != -1) { if(px->index == py->index) { sum += px->value * py->value; ++px; ++py; } else { if(px->index > py->index) ++py; else ++px; } } return sum; } double Kernel::anova(const svm_node *px, const svm_node *py, const double sigma, const int degree) { double sum = 0; double tv; while(px->index != -1 && py->index != -1) { if(px->index == py->index) { tv = (px->value - py->value) * (px->value - py->value); sum += exp( - sigma * tv); ++px; ++py; } else { if(px->index > py->index) { sum += exp( - sigma * (py->value * py->value)); ++py; } else { sum += exp( - sigma * (px->value * px->value)); ++px; } } } return (powi(sum,degree)); } double Kernel::k_function(const svm_node *x, const svm_node *y, const svm_parameter& param) { switch(param.kernel_type) { case LINEAR: return dot(x,y); case POLY: return powi(param.gamma*dot(x,y)+param.coef0,param.degree); case RBF: { double sum = 0; while(x->index != -1 && y->index !=-1) { if(x->index == y->index) { double d = x->value - y->value; sum += d*d; ++x; ++y; } else { if(x->index > y->index) { sum += y->value * y->value; ++y; } else { sum += x->value * x->value; ++x; } } } while(x->index != -1) { sum += x->value * x->value; ++x; } while(y->index != -1) { sum += y->value * y->value; ++y; } return exp(-param.gamma*sum); } case SIGMOID: return tanh(param.gamma*dot(x,y)+param.coef0); default: return 0; /* Unreachable */ } } // Generalized SMO+SVMlight algorithm // Solves: // // min 0.5(\alpha^T Q \alpha) + p^T \alpha // // y^T \alpha = \delta // y_i = +1 or -1 // 0 <= alpha_i <= Cp for y_i = 1 // 0 <= alpha_i <= Cn for y_i = -1 // // Given: // // Q, p, y, Cp, Cn, and an initial feasible point \alpha // l is the size of vectors and matrices // eps is the stopping criterion // // solution will be put in \alpha, objective value will be put in obj // class Solver { public: Solver() {}; virtual ~Solver() {}; struct SolutionInfo { double obj; double rho; double upper_bound_p; double upper_bound_n; double r; // for Solver_NU }; void Solve(int l, const QMatrix& Q, const double *p_, const schar *y_, double *alpha_, double Cp, double Cn, double eps, SolutionInfo* si, int shrinking); protected: int active_size; schar *y; double *G; // gradient of objective function enum { LOWER_BOUND, UPPER_BOUND, FREE }; char *alpha_status; // LOWER_BOUND, UPPER_BOUND, FREE double *alpha; const QMatrix *Q; const double *QD; double eps; double Cp,Cn; double *p; int *active_set; double *G_bar; // gradient, if we treat free variables as 0 int l; bool unshrink; // XXX double get_C(int i) { return (y[i] > 0)? Cp : Cn; } void update_alpha_status(int i) { if(alpha[i] >= get_C(i)) alpha_status[i] = UPPER_BOUND; else if(alpha[i] <= 0) alpha_status[i] = LOWER_BOUND; else alpha_status[i] = FREE; } bool is_upper_bound(int i) { return alpha_status[i] == UPPER_BOUND; } bool is_lower_bound(int i) { return alpha_status[i] == LOWER_BOUND; } bool is_free(int i) { return alpha_status[i] == FREE; } void swap_index(int i, int j); void reconstruct_gradient(); virtual int select_working_set(int &i, int &j); virtual double calculate_rho(); virtual void do_shrinking(); private: bool be_shrunk(int i, double Gmax1, double Gmax2); }; void Solver::swap_index(int i, int j) { Q->swap_index(i,j); swap(y[i],y[j]); swap(G[i],G[j]); swap(alpha_status[i],alpha_status[j]); swap(alpha[i],alpha[j]); swap(p[i],p[j]); swap(active_set[i],active_set[j]); swap(G_bar[i],G_bar[j]); } void Solver::reconstruct_gradient() { // reconstruct inactive elements of G from G_bar and free variables if(active_size == l) return; int i,j; int nr_free = 0; for(j=active_size;j 2*active_size*(l-active_size)) { for(i=active_size;iget_Q(i,active_size); for(j=0;jget_Q(i,l); double alpha_i = alpha[i]; for(j=active_size;jl = l; this->Q = &Q; QD=Q.get_QD(); clone(p, p_,l); clone(y, y_,l); clone(alpha,alpha_,l); this->Cp = Cp; this->Cn = Cn; this->eps = eps; unshrink = false; // initialize alpha_status { alpha_status = new char[l]; for(int i=0;iINT_MAX/100 ? INT_MAX : 100*l); int counter = min(l,1000)+1; while(iter < max_iter) { // show progress and do shrinking if(--counter == 0) { counter = min(l,1000); if(shrinking) do_shrinking(); } int i,j; if(select_working_set(i,j)!=0) { // reconstruct the whole gradient reconstruct_gradient(); // reset active set size and check active_size = l; if(select_working_set(i,j)!=0) break; else counter = 1; // do shrinking next iteration } ++iter; // update alpha[i] and alpha[j], handle bounds carefully const Qfloat *Q_i = Q.get_Q(i,active_size); const Qfloat *Q_j = Q.get_Q(j,active_size); double C_i = get_C(i); double C_j = get_C(j); double old_alpha_i = alpha[i]; double old_alpha_j = alpha[j]; if(y[i]!=y[j]) { double quad_coef = QD[i]+QD[j]+2*Q_i[j]; if (quad_coef <= 0) quad_coef = TAU; double delta = (-G[i]-G[j])/quad_coef; double diff = alpha[i] - alpha[j]; alpha[i] += delta; alpha[j] += delta; if(diff > 0) { if(alpha[j] < 0) { alpha[j] = 0; alpha[i] = diff; } } else { if(alpha[i] < 0) { alpha[i] = 0; alpha[j] = -diff; } } if(diff > C_i - C_j) { if(alpha[i] > C_i) { alpha[i] = C_i; alpha[j] = C_i - diff; } } else { if(alpha[j] > C_j) { alpha[j] = C_j; alpha[i] = C_j + diff; } } } else { double quad_coef = QD[i]+QD[j]-2*Q_i[j]; if (quad_coef <= 0) quad_coef = TAU; double delta = (G[i]-G[j])/quad_coef; double sum = alpha[i] + alpha[j]; alpha[i] -= delta; alpha[j] += delta; if(sum > C_i) { if(alpha[i] > C_i) { alpha[i] = C_i; alpha[j] = sum - C_i; } } else { if(alpha[j] < 0) { alpha[j] = 0; alpha[i] = sum; } } if(sum > C_j) { if(alpha[j] > C_j) { alpha[j] = C_j; alpha[i] = sum - C_j; } } else { if(alpha[i] < 0) { alpha[i] = 0; alpha[j] = sum; } } } // update G double delta_alpha_i = alpha[i] - old_alpha_i; double delta_alpha_j = alpha[j] - old_alpha_j; for(int k=0;k= max_iter) { if(active_size < l) { // reconstruct the whole gradient to calculate objective value reconstruct_gradient(); active_size = l; } } // calculate rho si->rho = calculate_rho(); // calculate objective value { double v = 0; int i; for(i=0;iobj = v/2; } // put back the solution { for(int i=0;iupper_bound_p = Cp; si->upper_bound_n = Cn; delete[] p; delete[] y; delete[] alpha; delete[] alpha_status; delete[] active_set; delete[] G; delete[] G_bar; } // return 1 if already optimal, return 0 otherwise int Solver::select_working_set(int &out_i, int &out_j) { // return i,j such that // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha) // j: minimizes the decrease of obj value // (if quadratic coefficeint <= 0, replace it with tau) // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha) double Gmax = -INF; double Gmax2 = -INF; int Gmax_idx = -1; int Gmin_idx = -1; double obj_diff_min = INF; for(int t=0;t= Gmax) { Gmax = -G[t]; Gmax_idx = t; } } else { if(!is_lower_bound(t)) if(G[t] >= Gmax) { Gmax = G[t]; Gmax_idx = t; } } int i = Gmax_idx; const Qfloat *Q_i = NULL; if(i != -1) // NULL Q_i not accessed: Gmax=-INF if i=-1 Q_i = Q->get_Q(i,active_size); for(int j=0;j= Gmax2) Gmax2 = G[j]; if (grad_diff > 0) { double obj_diff; double quad_coef = QD[i]+QD[j]-2.0*y[i]*Q_i[j]; if (quad_coef > 0) obj_diff = -(grad_diff*grad_diff)/quad_coef; else obj_diff = -(grad_diff*grad_diff)/TAU; if (obj_diff <= obj_diff_min) { Gmin_idx=j; obj_diff_min = obj_diff; } } } } else { if (!is_upper_bound(j)) { double grad_diff= Gmax-G[j]; if (-G[j] >= Gmax2) Gmax2 = -G[j]; if (grad_diff > 0) { double obj_diff; double quad_coef = QD[i]+QD[j]+2.0*y[i]*Q_i[j]; if (quad_coef > 0) obj_diff = -(grad_diff*grad_diff)/quad_coef; else obj_diff = -(grad_diff*grad_diff)/TAU; if (obj_diff <= obj_diff_min) { Gmin_idx=j; obj_diff_min = obj_diff; } } } } } if(Gmax+Gmax2 < eps) return 1; out_i = Gmax_idx; out_j = Gmin_idx; return 0; } bool Solver::be_shrunk(int i, double Gmax1, double Gmax2) { if(is_upper_bound(i)) { if(y[i]==+1) return(-G[i] > Gmax1); else return(-G[i] > Gmax2); } else if(is_lower_bound(i)) { if(y[i]==+1) return(G[i] > Gmax2); else return(G[i] > Gmax1); } else return(false); } void Solver::do_shrinking() { int i; double Gmax1 = -INF; // max { -y_i * grad(f)_i | i in I_up(\alpha) } double Gmax2 = -INF; // max { y_i * grad(f)_i | i in I_low(\alpha) } // find maximal violating pair first for(i=0;i= Gmax1) Gmax1 = -G[i]; } if(!is_lower_bound(i)) { if(G[i] >= Gmax2) Gmax2 = G[i]; } } else { if(!is_upper_bound(i)) { if(-G[i] >= Gmax2) Gmax2 = -G[i]; } if(!is_lower_bound(i)) { if(G[i] >= Gmax1) Gmax1 = G[i]; } } } if(unshrink == false && Gmax1 + Gmax2 <= eps*10) { unshrink = true; reconstruct_gradient(); active_size = l; } for(i=0;i i) { if (!be_shrunk(active_size, Gmax1, Gmax2)) { swap_index(i,active_size); break; } active_size--; } } } double Solver::calculate_rho() { double r; int nr_free = 0; double ub = INF, lb = -INF, sum_free = 0; for(int i=0;i0) r = sum_free/nr_free; else r = (ub+lb)/2; return r; } // // Solver for nu-svm classification and regression // // additional constraint: e^T \alpha = constant // class Solver_NU: public Solver { public: Solver_NU() {} void Solve(int l, const QMatrix& Q, const double *p, const schar *y, double *alpha, double Cp, double Cn, double eps, SolutionInfo* si, int shrinking) { this->si = si; Solver::Solve(l,Q,p,y,alpha,Cp,Cn,eps,si,shrinking); } private: SolutionInfo *si; int select_working_set(int &i, int &j); double calculate_rho(); bool be_shrunk(int i, double Gmax1, double Gmax2, double Gmax3, double Gmax4); void do_shrinking(); }; // return 1 if already optimal, return 0 otherwise int Solver_NU::select_working_set(int &out_i, int &out_j) { // return i,j such that y_i = y_j and // i: maximizes -y_i * grad(f)_i, i in I_up(\alpha) // j: minimizes the decrease of obj value // (if quadratic coefficeint <= 0, replace it with tau) // -y_j*grad(f)_j < -y_i*grad(f)_i, j in I_low(\alpha) double Gmaxp = -INF; double Gmaxp2 = -INF; int Gmaxp_idx = -1; double Gmaxn = -INF; double Gmaxn2 = -INF; int Gmaxn_idx = -1; int Gmin_idx = -1; double obj_diff_min = INF; for(int t=0;t= Gmaxp) { Gmaxp = -G[t]; Gmaxp_idx = t; } } else { if(!is_lower_bound(t)) if(G[t] >= Gmaxn) { Gmaxn = G[t]; Gmaxn_idx = t; } } int ip = Gmaxp_idx; int in = Gmaxn_idx; const Qfloat *Q_ip = NULL; const Qfloat *Q_in = NULL; if(ip != -1) // NULL Q_ip not accessed: Gmaxp=-INF if ip=-1 Q_ip = Q->get_Q(ip,active_size); if(in != -1) Q_in = Q->get_Q(in,active_size); for(int j=0;j= Gmaxp2) Gmaxp2 = G[j]; if (grad_diff > 0) { double obj_diff; double quad_coef = QD[ip]+QD[j]-2*Q_ip[j]; if (quad_coef > 0) obj_diff = -(grad_diff*grad_diff)/quad_coef; else obj_diff = -(grad_diff*grad_diff)/TAU; if (obj_diff <= obj_diff_min) { Gmin_idx=j; obj_diff_min = obj_diff; } } } } else { if (!is_upper_bound(j)) { double grad_diff=Gmaxn-G[j]; if (-G[j] >= Gmaxn2) Gmaxn2 = -G[j]; if (grad_diff > 0) { double obj_diff; double quad_coef = QD[in]+QD[j]-2*Q_in[j]; if (quad_coef > 0) obj_diff = -(grad_diff*grad_diff)/quad_coef; else obj_diff = -(grad_diff*grad_diff)/TAU; if (obj_diff <= obj_diff_min) { Gmin_idx=j; obj_diff_min = obj_diff; } } } } } if(max(Gmaxp+Gmaxp2,Gmaxn+Gmaxn2) < eps) return 1; if (y[Gmin_idx] == +1) out_i = Gmaxp_idx; else out_i = Gmaxn_idx; out_j = Gmin_idx; return 0; } bool Solver_NU::be_shrunk(int i, double Gmax1, double Gmax2, double Gmax3, double Gmax4) { if(is_upper_bound(i)) { if(y[i]==+1) return(-G[i] > Gmax1); else return(-G[i] > Gmax4); } else if(is_lower_bound(i)) { if(y[i]==+1) return(G[i] > Gmax2); else return(G[i] > Gmax3); } else return(false); } void Solver_NU::do_shrinking() { double Gmax1 = -INF; // max { -y_i * grad(f)_i | y_i = +1, i in I_up(\alpha) } double Gmax2 = -INF; // max { y_i * grad(f)_i | y_i = +1, i in I_low(\alpha) } double Gmax3 = -INF; // max { -y_i * grad(f)_i | y_i = -1, i in I_up(\alpha) } double Gmax4 = -INF; // max { y_i * grad(f)_i | y_i = -1, i in I_low(\alpha) } // find maximal violating pair first int i; for(i=0;i Gmax1) Gmax1 = -G[i]; } else if(-G[i] > Gmax4) Gmax4 = -G[i]; } if(!is_lower_bound(i)) { if(y[i]==+1) { if(G[i] > Gmax2) Gmax2 = G[i]; } else if(G[i] > Gmax3) Gmax3 = G[i]; } } if(unshrink == false && max(Gmax1+Gmax2,Gmax3+Gmax4) <= eps*10) { unshrink = true; reconstruct_gradient(); active_size = l; } for(i=0;i i) { if (!be_shrunk(active_size, Gmax1, Gmax2, Gmax3, Gmax4)) { swap_index(i,active_size); break; } active_size--; } } } double Solver_NU::calculate_rho() { int nr_free1 = 0,nr_free2 = 0; double ub1 = INF, ub2 = INF; double lb1 = -INF, lb2 = -INF; double sum_free1 = 0, sum_free2 = 0; for(int i=0;i 0) r1 = sum_free1/nr_free1; else r1 = (ub1+lb1)/2; if(nr_free2 > 0) r2 = sum_free2/nr_free2; else r2 = (ub2+lb2)/2; si->r = (r1+r2)/2; return (r1-r2)/2; } /////////////////// BSVM code class Solver_SPOC { public: Solver_SPOC() {}; ~Solver_SPOC() {}; void Solve(int l, const Kernel& Q, double *alpha_, short *y_, double *C_, double eps, int shrinking, int nr_class); private: int active_size; double *G; // gradient of objective function short *y; bool *alpha_status; // free:true, bound:false double *alpha; const Kernel *Q; double eps; double *C; int *active_set; int l, nr_class; bool unshrinked; double get_C(int i, int m) { if (y[i] == m) return C[m]; return 0; } void update_alpha_status(int i, int m) { if(alpha[i*nr_class+m] >= get_C(i, m)) alpha_status[i*nr_class+m] = false; else alpha_status[i*nr_class+m] = true; } void swap_index(int i, int j); double select_working_set(int &q); void solve_sub_problem(double A, double *B, double C, double *nu); void reconstruct_gradient(); void do_shrinking(); }; void Solver_SPOC::swap_index(int i, int j) { Q->swap_index(i, j); swap(y[i], y[j]); swap(active_set[i], active_set[j]); for (int m=0;mget_Q(i,l); double alpha_i_m = alpha[i*nr_class+m]; for (int j=active_size;jl = l; this->nr_class = nr_class; this->Q = &Q; clone(y,y_,l); clone(alpha,alpha_,l*nr_class); C = C_; this->eps = eps; unshrinked = false; int i, m, q, old_q = -1; // initialize alpha_status { alpha_status = new bool[l*nr_class]; for(i=0;i 0) solve_sub_problem(A, B, C[y[q]], nu); else { i = 0; for (m=1;m B[i]) i = m; nu[i] = -C[y[q]]; } nu[y[q]] += C[y[q]]; for (m=0;m 1e-12) #endif { alpha[q*nr_class+m] = nu[m]; update_alpha_status(q, m); for (i=0;i 0) nSV++; } //info("\noptimization finished, #iter = %d, obj = %lf\n",iter, obj); // info("nSV = %d, nFREE = %d\n",nSV,nFREE); // put back the solution { for(int i=0;i vio_q) { q = i; vio_q = lb - ub; } } return vio_q; } void Solver_SPOC::do_shrinking() { int i, m; double Gm = select_working_set(i); if (Gm < eps) return; // shrink for (i=0;i= th) goto out; for (m++;m= th) goto out; --active_size; swap_index(i, active_size); --i; out: ; } // unshrink, check all variables again before final iterations if (unshrinked || Gm > 10*eps) return; unshrinked = true; reconstruct_gradient(); for (i=l-1;i>=active_size;i--) { double *G_i = &G[i*nr_class]; double th = G_i[y[i]] - Gm/2; for (m=0;m= th) goto out1; for (m++;m= th) goto out1; swap_index(i, active_size); ++active_size; ++i; out1: ; } } int compar(const void *a, const void *b) { if (*(double *)a > *(double *)b) return -1; else if (*(double *)a < *(double *)b) return 1; return 0; } void Solver_SPOC::solve_sub_problem(double A, double *B, double C, double *nu) { int r; double *D; clone(D, B, nr_class+1); qsort(D, nr_class, sizeof(double), compar); D[nr_class] = -INF; double phi = D[0] - A*C; for (r=0;phi<(r+1)*D[r+1];r++) phi += D[r+1]; delete[] D; phi /= (r+1); for (r=0;r 0)? Cp : Cn; } void update_alpha_status(int i) { if(alpha[i] >= get_C(i)) alpha_status[i] = UPPER_BOUND; else if(alpha[i] <= 0) alpha_status[i] = LOWER_BOUND; else alpha_status[i] = FREE; } bool is_upper_bound(int i) { return alpha_status[i] == UPPER_BOUND; } bool is_lower_bound(int i) { return alpha_status[i] == LOWER_BOUND; } bool is_free(int i) { return alpha_status[i] == FREE; } virtual void swap_index(int i, int j); virtual void reconstruct_gradient(); virtual void shrink_one(int k); virtual void unshrink_one(int k); double select_working_set(int &q); void do_shrinking(); private: double Cp, Cn; double *b; schar *y; }; void Solver_B::swap_index(int i, int j) { Q->swap_index(i,j); swap(y[i],y[j]); swap(G[i],G[j]); swap(alpha_status[i],alpha_status[j]); swap(alpha[i],alpha[j]); swap(b[i],b[j]); swap(active_set[i],active_set[j]); swap(G_bar[i],G_bar[j]); } void Solver_B::reconstruct_gradient() { // reconstruct inactive elements of G from G_bar and free variables if(active_size == l) return; int i; for(i=active_size;iget_Q(i,l); double alpha_i = alpha[i]; for(int j=active_size;jl = l; this->Q = &Q; b = b_; clone(y, y_, l); clone(alpha,alpha_,l); this->Cp = Cp; this->Cn = Cn; this->eps = eps; this->qpsize = qpsize; unshrinked = false; // initialize alpha_status { alpha_status = new char[l]; for(int i=0;i1e-12) { alpha[working_set[i]] = qp.x[i]; Qfloat *QB_i = QB[i]; for(j=0;jobj = v/2; } // juggle everything back /*{ for(int i=0;iupper_bound = new double[2]; si->upper_bound[0] = Cp; si->upper_bound[1] = Cn; // info("\noptimization finished, #iter = %d\n",iter); // put back the solution { for(int i=0;i= positive_max[j]) break; positive_max[j-1] = positive_max[j]; positive_set[j-1] = positive_set[j]; } positive_max[j-1] = v; positive_set[j-1] = i; } } for (i=0;i0) continue; } if (v > positive_max[0]) { for (j=1;j= -Gm) continue; } else continue; --active_size; shrink_one(k); --k; // look at the newcomer } // unshrink, check all variables again before final iterations if (unshrinked || Gm > eps*10) return; unshrinked = true; reconstruct_gradient(); for(k=l-1;k>=active_size;k--) { if (is_lower_bound(k)) { if (G[k] > Gm) continue; } else if (is_upper_bound(k)) { if (G[k] < -Gm) continue; } else continue; unshrink_one(k); active_size++; ++k; // look at the newcomer } } class Solver_B_linear : public Solver_B { public: Solver_B_linear() {}; ~Solver_B_linear() {}; int Solve(int l, svm_node * const * x_, double *b_, schar *y_, double *alpha_, double *w, double Cp, double Cn, double eps, SolutionInfo* si, int shrinking, int qpsize); private: double get_C(int i) { return (y[i] > 0)? Cp : Cn; } void swap_index(int i, int j); void reconstruct_gradient(); double dot(int i, int j); double Cp, Cn; double *b; schar *y; double *w; const svm_node **x; }; double Solver_B_linear::dot(int i, int j) { const svm_node *px = x[i], *py = x[j]; double sum = 0; while(px->index != -1 && py->index != -1) { if(px->index == py->index) { sum += px->value * py->value; ++px; ++py; } else { if(px->index > py->index) ++py; else ++px; } } return sum; } void Solver_B_linear::swap_index(int i, int j) { swap(y[i],y[j]); swap(G[i],G[j]); swap(alpha_status[i],alpha_status[j]); swap(alpha[i],alpha[j]); swap(b[i],b[j]); swap(active_set[i],active_set[j]); swap(x[i], x[j]); } void Solver_B_linear::reconstruct_gradient() { int i; for(i=active_size;iindex != -1;px++) sum += w[px->index]*px->value; sum += w[0]; G[i] = y[i]*sum + b[i]; } } int Solver_B_linear::Solve(int l, svm_node * const * x_, double *b_, schar *y_, double *alpha_, double *w, double Cp, double Cn, double eps, SolutionInfo* si, int shrinking, int qpsize) { this->l = l; clone(x, x_, l); clone(b, b_, l); clone(y, y_, l); clone(alpha,alpha_,l); this->Cp = Cp; this->Cn = Cn; this->eps = eps; this->qpsize = qpsize; this->w = w; unshrinked = false; // initialize alpha_status { alpha_status = new char[l]; for(int i=0;iindex != -1;px++) sum += w[px->index]*px->value; sum += w[0]; G[i] += y[i]*sum; } } // optimization step int iter = 0; int counter = min(l*2/qpsize,2000/qpsize)+1; while(1) { // show progress and do shrinking if(--counter == 0) { counter = min(l*2/qpsize, 2000/qpsize); if(shrinking) do_shrinking(); // info("."); } int i,j,q; if (select_working_set(q) < eps) { // reconstruct the whole gradient reconstruct_gradient(); // reset active set size and check active_size = l; // info("*");info_flush(); if (select_working_set(q) < eps) break; else counter = 1; // do shrinking next iteration } if (counter == min(l*2/qpsize, 2000/qpsize)) { bool same = true; for (i=0;i1e-12) { alpha[Bi] = qp.x[i]; update_alpha_status(Bi); double yalpha = y[Bi]*d; for (const svm_node *px = x[Bi];px->index != -1;px++) w[px->index] += yalpha*px->value; w[0] += yalpha; } } for(j=0;jindex != -1;px++) sum += w[px->index]*px->value; sum += w[0]; G[j] = y[j]*sum + b[j]; } } // calculate objective value { double v = 0; int i; for(i=0;iobj = v/2; } // juggle everything back /*{ for(int i=0;iupper_bound = new double[2]; si->upper_bound[0] = Cp; si->upper_bound[1] = Cn; // info("\noptimization finished, #iter = %d\n",iter); // put back the solution { for(int i=0;iget_Q(real_i[i],real_l); double alpha_i = alpha[i], t; int y_i = y[i], yy_i = yy[i], ub, k; t = 2*alpha_i; ub = start2[yy_i*nr_class+y_i+1]; for (j=start2[yy_i*nr_class+y_i];jl = l; this->nr_class = nr_class; this->real_l = l/(nr_class - 1); this->Q = &Q; this->lin = lin; clone(y,y_,l); clone(alpha,alpha_,l); C = C_; this->eps = eps; this->qpsize = qpsize; unshrinked = false; // initialize alpha_status { alpha_status = new char[l]; for(int i=0;i 1e-12) { alpha[Bi] = qp.x[i]; Qfloat *QB_i = QB[i]; int y_Bi = y[Bi], yy_Bi = yy[Bi], ub, k; double t = 2*d; ub = start1[yy_Bi*nr_class+y_Bi+1]; for (j=start1[yy_Bi*nr_class+y_Bi];jobj = v/4; } clone(si->upper_bound,C,nr_class); //info("\noptimization finished, #iter = %d\n",iter); // put back the solution { for(int i=0;i0;i--) swap_index(start2[i], start2[i-1]); t = s + 1; for (i=nr_class*nr_class;i>t;i--) swap_index(start1[i], start1[i-1]); t = nr_class*nr_class; for (i=s+1;i<=t;i++) start1[i]++; for (i=0;i<=s;i++) start2[i]++; } // // Q matrices for various formulations // class BSVC_Q: public Kernel { public: BSVC_Q(const svm_problem& prob, const svm_parameter& param, const schar *y_) :Kernel(prob.l, prob.x, param) { clone(y,y_,prob.l); cache = new Cache(prob.l,(int)(param.cache_size*(1<<20)),param.qpsize); QD = new double[1]; QD[0] = 1; } Qfloat *get_Q(int i, int len) const { Qfloat *data; int start; if((start = cache->get_data(i,&data,len)) < len) { for(int j=start;j*kernel_function)(i,j) + 1); } return data; } double *get_QD() const { return QD; } void swap_index(int i, int j) const { cache->swap_index(i,j); Kernel::swap_index(i,j); swap(y[i],y[j]); } ~BSVC_Q() { delete[] y; delete cache; delete[] QD; } private: schar *y; Cache *cache; double *QD; }; class BONE_CLASS_Q: public Kernel { public: BONE_CLASS_Q(const svm_problem& prob, const svm_parameter& param) :Kernel(prob.l, prob.x, param) { cache = new Cache(prob.l,(int)(param.cache_size*(1<<20)),param.qpsize); QD = new double[1]; QD[0] = 1; } Qfloat *get_Q(int i, int len) const { Qfloat *data; int start; if((start = cache->get_data(i,&data,len)) < len) { for(int j=start;j*kernel_function)(i,j) + 1; } return data; } double *get_QD() const { return QD; } ~BONE_CLASS_Q() { delete cache; delete[] QD; } private: Cache *cache; double *QD; }; class BSVR_Q: public Kernel { public: BSVR_Q(const svm_problem& prob, const svm_parameter& param) :Kernel(prob.l, prob.x, param) { l = prob.l; cache = new Cache(l,(int)(param.cache_size*(1<<20)),param.qpsize); QD = new double[1]; QD[0] = 1; sign = new schar[2*l]; index = new int[2*l]; for(int k=0;kget_data(real_i,&data,l) < l) { for(int j=0;j*kernel_function)(real_i,j) + 1; } // reorder and copy Qfloat *buf = buffer[next_buffer]; next_buffer = (next_buffer+1)%q; schar si = sign[i]; for(int j=0;j*kernel_function)(i,i); } Qfloat *get_Q(int i, int len) const { Qfloat *data; int start; if((start = cache->get_data(i,&data,len)) < len) { for(int j=start;j*kernel_function)(i,j)); } return data; } double *get_QD() const { return QD; } void swap_index(int i, int j) const { cache->swap_index(i,j); Kernel::swap_index(i,j); swap(y[i],y[j]); swap(QD[i],QD[j]); } ~SVC_Q() { delete[] y; delete cache; delete[] QD; } private: schar *y; Cache *cache; double *QD; }; class ONE_CLASS_Q: public Kernel { public: ONE_CLASS_Q(const svm_problem& prob, const svm_parameter& param) :Kernel(prob.l, prob.x, param) { cache = new Cache(prob.l,(long int)(param.cache_size*(1<<20)),param.qpsize); QD = new double[prob.l]; for(int i=0;i*kernel_function)(i,i); } Qfloat *get_Q(int i, int len) const { Qfloat *data; int start; if((start = cache->get_data(i,&data,len)) < len) { for(int j=start;j*kernel_function)(i,j); } return data; } double *get_QD() const { return QD; } void swap_index(int i, int j) const { cache->swap_index(i,j); Kernel::swap_index(i,j); swap(QD[i],QD[j]); } ~ONE_CLASS_Q() { delete cache; delete[] QD; } private: Cache *cache; double *QD; }; class SVR_Q: public Kernel { public: SVR_Q(const svm_problem& prob, const svm_parameter& param) :Kernel(prob.l, prob.x, param) { l = prob.l; cache = new Cache(l,(long int)(param.cache_size*(1<<20)),param.qpsize); QD = new double[2*l]; sign = new schar[2*l]; index = new int[2*l]; for(int k=0;k*kernel_function)(k,k); QD[k+l]=QD[k]; } buffer[0] = new Qfloat[2*l]; buffer[1] = new Qfloat[2*l]; next_buffer = 0; } void swap_index(int i, int j) const { swap(sign[i],sign[j]); swap(index[i],index[j]); swap(QD[i],QD[j]); } Qfloat *get_Q(int i, int len) const { Qfloat *data; int real_i = index[i]; if(cache->get_data(real_i,&data,l) < l) { for(int j=0;j*kernel_function)(real_i,j); } // reorder and copy Qfloat *buf = buffer[next_buffer]; next_buffer = 1 - next_buffer; schar si = sign[i]; for(int j=0;jsvm_type; if(svm_type != C_BSVC && svm_type != EPSILON_BSVR && svm_type != KBB && svm_type != SPOC) return "unknown svm type"; // kernel_type int kernel_type = param->kernel_type; if(kernel_type != LINEAR && kernel_type != POLY && kernel_type != RBF && kernel_type != SIGMOID && kernel_type != R && kernel_type != LAPLACE&& kernel_type != BESSEL&& kernel_type != ANOVA) return "unknown kernel type"; // cache_size,eps,C,nu,p,shrinking if(kernel_type != LINEAR) if(param->cache_size <= 0) return "cache_size <= 0"; if(param->eps <= 0) return "eps <= 0"; if(param->C <= 0) return "C <= 0"; if(svm_type == EPSILON_BSVR) if(param->p < 0) return "p < 0"; if(param->shrinking != 0 && param->shrinking != 1) return "shrinking != 0 and shrinking != 1"; if(svm_type == C_BSVC || svm_type == KBB || svm_type == SPOC) if(param->qpsize < 2) return "qpsize < 2"; if(kernel_type == LINEAR) if (param->Cbegin <= 0) return "Cbegin <= 0"; if(kernel_type == LINEAR) if (param->Cstep <= 1) return "Cstep <= 1"; return NULL; } const char *svm_check_parameter(const svm_problem *prob, const svm_parameter *param) { // svm_type int svm_type = param->svm_type; if(svm_type != C_SVC && svm_type != NU_SVC && svm_type != ONE_CLASS && svm_type != EPSILON_SVR && svm_type != NU_SVR) return "unknown svm type"; // kernel_type int kernel_type = param->kernel_type; if(kernel_type != LINEAR && kernel_type != POLY && kernel_type != RBF && kernel_type != SIGMOID && kernel_type != R && kernel_type != LAPLACE&& kernel_type != BESSEL&& kernel_type != ANOVA&& kernel_type != SPLINE) return "unknown kernel type"; // cache_size,eps,C,nu,p,shrinking if(param->cache_size <= 0) return "cache_size <= 0"; if(param->eps <= 0) return "eps <= 0"; if(svm_type == C_SVC || svm_type == EPSILON_SVR || svm_type == NU_SVR) if(param->C <= 0) return "C <= 0"; if(svm_type == NU_SVC || svm_type == ONE_CLASS || svm_type == NU_SVR) if(param->nu < 0 || param->nu > 1) return "nu < 0 or nu > 1"; if(svm_type == EPSILON_SVR) if(param->p < 0) return "p < 0"; if(param->shrinking != 0 && param->shrinking != 1) return "shrinking != 0 and shrinking != 1"; // check whether nu-svc is feasible if(svm_type == NU_SVC) { int l = prob->l; int max_nr_class = 16; int nr_class = 0; int *label = Malloc(int,max_nr_class); int *count = Malloc(int,max_nr_class); int i; for(i=0;iy[i]; int j; for(j=0;jnu*(n1+n2)/2 > min(n1,n2)) { free(label); free(count); return "specified nu is infeasible"; } } } } return NULL; } #include #include #include extern "C" { struct svm_node ** sparsify (double *x, int r, int c) { struct svm_node** sparse; int i, ii, count; sparse = (struct svm_node **) malloc (r * sizeof(struct svm_node *)); for (i = 0; i < r; i++) { /* determine nr. of non-zero elements */ for (count = ii = 0; ii < c; ii++) if (x[i * c + ii] != 0) count++; /* allocate memory for column elements */ sparse[i] = (struct svm_node *) malloc ((count + 1) * sizeof(struct svm_node)); /* set column elements */ for (count = ii = 0; ii < c; ii++) if (x[i * c + ii] != 0) { sparse[i][count].index = ii; sparse[i][count].value = x[i * c + ii]; count++; } /* set termination element */ sparse[i][count].index = -1; } return sparse; } struct svm_node ** transsparse (double *x, int r, int *rowindex, int *colindex) { struct svm_node** sparse; int i, ii, count = 0, nnz = 0; sparse = (struct svm_node **) malloc (r * sizeof(struct svm_node*)); for (i = 0; i < r; i++) { /* allocate memory for column elements */ nnz = rowindex[i+1] - rowindex[i]; sparse[i] = (struct svm_node *) malloc ((nnz + 1) * sizeof(struct svm_node)); /* set column elements */ for (ii = 0; ii < nnz; ii++) { sparse[i][ii].index = colindex[count]; sparse[i][ii].value = x[count]; count++; } /* set termination element */ sparse[i][ii].index = -1; } return sparse; } void tron_run(const svm_problem *prob, const svm_parameter* param, double *alpha, double *weighted_C, Solver_B::SolutionInfo* sii, int nr_class, int *count) { int l = prob->l; int i; double Cp = param->C; double Cn = param->C; if(param->nr_weight > 0) { Cp = param->C*param->weight[0]; Cn = param->C*param->weight[1]; } switch(param->svm_type) { case C_BSVC: { // double *alpha = new double[l]; double *minus_ones = new double[l]; schar *y = new schar[l]; for(i=0;iy[i] > 0) y[i] = +1; else y[i]=-1; } if (param->kernel_type == LINEAR) { double *w = new double[prob->n+1]; for (i=0;i<=prob->n;i++) w[i] = 0; Solver_B_linear s; int totaliter = 0; double Cpj = param->Cbegin, Cnj = param->Cbegin*Cn/Cp; while (Cpj < Cp) { totaliter += s.Solve(l, prob->x, minus_ones, y, alpha, w, Cpj, Cnj, param->eps, sii, param->shrinking, param->qpsize); if (Cpj*param->Cstep >= Cp) { for (i=0;i<=prob->n;i++) w[i] = 0; for (i=0;i= Cpj) alpha[i] = Cp; else if (y[i] == -1 && alpha[i] >= Cnj) alpha[i] = Cn; else alpha[i] *= Cp/Cpj; double yalpha = y[i]*alpha[i]; for (const svm_node *px = prob->x[i];px->index != -1;px++) w[px->index] += yalpha*px->value; w[0] += yalpha; } } else { for (i=0;iCstep; for (i=0;i<=prob->n;i++) w[i] *= param->Cstep; } Cpj *= param->Cstep; Cnj *= param->Cstep; } totaliter += s.Solve(l, prob->x, minus_ones, y, alpha, w, Cp, Cn, param->eps, sii, param->shrinking, param->qpsize); //info("\noptimization finished, #iter = %d\n",totaliter); delete[] w; } else { Solver_B s; s.Solve(l, BSVC_Q(*prob,*param,y), minus_ones, y, alpha, Cp, Cn, param->eps, sii, param->shrinking, param->qpsize); } // double sum_alpha=0; // for(i=0;iC*prob->l)); // for(i=0;ip - prob->y[i]; y[i] = 1; alpha2[i+l] = 0; linear_term[i+l] = param->p + prob->y[i]; y[i+l] = -1; } if (param->kernel_type == LINEAR) { double *w = new double[prob->n+1]; for (i=0;i<=prob->n;i++) w[i] = 0; struct svm_node **x = new svm_node*[2*l]; for (i=0;ix[i]; Solver_B_linear s; int totaliter = 0; double Cj = param->Cbegin; while (Cj < param->C) { totaliter += s.Solve(2*l, x, linear_term, y, alpha, w, Cj, Cj, param->eps, sii, param->shrinking, param->qpsize); if (Cj*param->Cstep >= param->C) { for (i=0;i<=prob->n;i++) w[i] = 0; for (i=0;i<2*l;i++) { if (alpha[i] >= Cj) alpha[i] = param->C; else alpha[i] *= param->C/Cj; double yalpha = y[i]*alpha[i]; for (const svm_node *px = x[i];px->index != -1;px++) w[px->index] += yalpha*px->value; w[0] += yalpha; } } else { for (i=0;i<2*l;i++) alpha[i] *= param->Cstep; for (i=0;i<=prob->n;i++) w[i] *= param->Cstep; } Cj *= param->Cstep; } totaliter += s.Solve(2*l, x, linear_term, y, alpha2, w, param->C, param->C, param->eps, sii, param->shrinking, param->qpsize); //info("\noptimization finished, #iter = %d\n",totaliter); } else { Solver_B s; s.Solve(2*l, BSVR_Q(*prob,*param), linear_term, y, alpha2, param->C, param->C, param->eps, sii, param->shrinking, param->qpsize); } double sum_alpha = 0; for(i=0;iC*l)); delete[] y; delete[] alpha2; delete[] linear_term; } break; case KBB: { Solver_B::SolutionInfo si; int i=0 , j=0 ,k=0 , ll = l*(nr_class - 1); double *alpha2 = Malloc(double, ll); short *y = new short[ll]; for (i=0;iy[q]; else q += count[j]; } Solver_MB s; s.Solve(ll, BONE_CLASS_Q(*prob,*param), -2, alpha2, y, weighted_C, 2*param->eps, &si, param->shrinking, param->qpsize, nr_class, count); //info("obj = %f, rho = %f\n",si.obj,0.0); int *start = Malloc(int,nr_class); start[0] = 0; for(i=1;iy[i]; } Solver_SPOC s; s.Solve(l, ONE_CLASS_Q(*prob, *param), alpha, y, weighted_C, param->eps, param->shrinking, nr_class); free(weighted_C); delete[] y; } break; } } SEXP tron_optim(SEXP x, SEXP r, SEXP c, SEXP y, SEXP K, SEXP colindex, SEXP rowindex, SEXP sparse, SEXP nclass, SEXP countc, SEXP kernel_type, SEXP svm_type, SEXP cost, SEXP eps, SEXP gamma, SEXP degree, SEXP coef0, SEXP Cbegin, SEXP Cstep, SEXP weightlabels, SEXP weights, SEXP nweights, SEXP weightedc, SEXP cache, SEXP epsilon, SEXP qpsize, SEXP shrinking ) { struct svm_parameter param; struct svm_problem prob; int i ,*count = NULL; double *alpha2 = NULL; SEXP alpha3 = NULL; int nr_class; const char* s; struct Solver_B::SolutionInfo si; param.svm_type = *INTEGER(svm_type); param.kernel_type = *INTEGER(kernel_type); param.degree = *INTEGER(degree); param.gamma = *REAL(gamma); param.coef0 = *REAL(coef0); param.cache_size = *REAL(cache); param.eps = *REAL(epsilon); param.C = *REAL(cost); param.Cbegin = *REAL(Cbegin); param.Cstep = *REAL(Cstep); param.K = REAL(K); param.qpsize = *INTEGER(qpsize); nr_class = *INTEGER(nclass); param.nr_weight = *INTEGER(nweights); if (param.nr_weight > 0) { param.weight = (double *) malloc (sizeof(double) * param.nr_weight); memcpy (param.weight, REAL(weights), param.nr_weight * sizeof(double)); param.weight_label = (int *) malloc (sizeof(int) * param.nr_weight); memcpy (param.weight_label, INTEGER(weightlabels), param.nr_weight * sizeof(int)); } param.p = *REAL(eps); param.shrinking = *INTEGER(shrinking); param.lim = 1/(gammafn(param.degree+1)*powi(2,param.degree)); /* set problem */ prob.l = *INTEGER(r); prob.n = *INTEGER(c); prob.y = (double *) malloc (sizeof(double) * prob.l); memcpy(prob.y, REAL(y), prob.l*sizeof(double)); if (*INTEGER(sparse) > 0) prob.x = transsparse(REAL(x), *INTEGER(r), INTEGER(rowindex), INTEGER(colindex)); else prob.x = sparsify(REAL(x), *INTEGER(r), *INTEGER(c)); s = svm_check_parameterb(&prob, ¶m); //if (s) //printf("%s",s); //else { double *weighted_C = Malloc(double, nr_class); memcpy(weighted_C, REAL(weightedc), nr_class*sizeof(double)); if(param.svm_type == 7) { alpha2 = (double *) malloc (sizeof(double) * prob.l*nr_class); } if(param.svm_type == 8) { count = Malloc(int, nr_class); memcpy(count, INTEGER(countc), nr_class*sizeof(int)); alpha2 = (double *) malloc (sizeof(double) * prob.l*(nr_class-1)); } if(param.svm_type == 5||param.svm_type==6) { alpha2 = (double *) malloc (sizeof(double) * prob.l); } tron_run(&prob, ¶m, alpha2, weighted_C , &si, nr_class, count); //} /* clean up memory */ if (param.nr_weight > 0) { free(param.weight); free(param.weight_label); } if(param.svm_type == 7) { PROTECT(alpha3 = Rf_allocVector(REALSXP, (nr_class*prob.l + 1))); UNPROTECT(1); for (i = 0; i < prob.l; i++) free (prob.x[i]); for (i = 0; i l; int i; switch(param->svm_type) { case C_SVC: { double Cp,Cn; double *minus_ones = new double[l]; schar *y = new schar[l]; for(i=0;iy[i] > 0) y[i] = +1; else y[i]=-1; } if(param->nr_weight > 0) { Cp = C*param->weight[0]; Cn = C*param->weight[1]; } else Cp = Cn = C; Solver s; //have to weight cost parameter for multiclass. problems s.Solve(l, SVC_Q(*prob,*param,y), minus_ones, y, alpha, Cp, Cn, param->eps, si, param->shrinking); delete[] minus_ones; delete[] y; } break; case NU_SVC: { schar *y = new schar[l]; double nu = param->nu; double sum_pos = nu*l/2; double sum_neg = nu*l/2; for(i=0;iy[i]>0) { y[i] = +1; alpha[i] = min(1.0,sum_pos); sum_pos -= alpha[i]; } else { y[i] = -1; alpha[i] = min(1.0,sum_neg); sum_neg -= alpha[i]; } double *zeros = new double[l]; for(i=0;ieps, si, param->shrinking); double r = si->r; //info("C = %f\n",1/r); for(i=0;irho /= r; si->obj /= (r*r); si->upper_bound_p = 1/r; si->upper_bound_n = 1/r; delete[] y; delete[] zeros; } break; case ONE_CLASS: { double *zeros = new double[l]; schar *ones = new schar[l]; int n = (int)(param->nu*l); // # of alpha's at upper bound // set initial alpha probably usefull for smo for(i=0;inu * l - n; for(i=n+1;ieps, si, param->shrinking); delete[] zeros; delete[] ones; } break; case EPSILON_SVR: { double *alpha2 = new double[2*l]; double *linear_term = new double[2*l]; schar *y = new schar[2*l]; for(i=0;ip - prob->y[i]; y[i] = 1; alpha2[i+l] = 0; linear_term[i+l] = param->p + prob->y[i]; y[i+l] = -1; } Solver s; s.Solve(2*l, SVR_Q(*prob,*param), linear_term, y, alpha2, param->C, param->C, param->eps, si, param->shrinking); double sum_alpha = 0; for(i=0;iC*l)); delete[] alpha2; delete[] linear_term; delete[] y; } break; case NU_SVR: { double C = param->C; double *alpha2 = new double[2*l]; double *linear_term = new double[2*l]; schar *y = new schar[2*l]; double sum = C * param->nu * l / 2; for(i=0;iy[i]; y[i] = 1; linear_term[i+l] = prob->y[i]; y[i+l] = -1; } Solver_NU s; s.Solve(2*l, SVR_Q(*prob,*param), linear_term, y, alpha2, C, C, param->eps, si, param->shrinking); //info("epsilon = %f\n",-si->r); for(i=0;i 0) { param.weight = (double *) malloc (sizeof(double) * param.nr_weight); memcpy (param.weight, REAL(weights), param.nr_weight * sizeof(double)); param.weight_label = (int *) malloc (sizeof(int) * param.nr_weight); memcpy (param.weight_label, INTEGER(weightlabels), param.nr_weight * sizeof(int)); } param.p = *REAL(eps); param.shrinking = *INTEGER(shrinking); param.lim = 1/(gammafn(param.degree+1)*powi(2,param.degree)); /* set problem */ prob.l = *INTEGER(r); prob.y = REAL(y); prob.n = *INTEGER(c); if (*INTEGER(sparse) > 0) prob.x = transsparse(REAL(x), *INTEGER(r), INTEGER(rowindex), INTEGER(colindex)); else prob.x = sparsify(REAL(x), *INTEGER(r), *INTEGER(c)); double *alpha2 = (double *) malloc (sizeof(double) * prob.l); s = svm_check_parameter(&prob, ¶m); //if (s) { //printf("%s",s); //} //else { solve_smo(&prob, ¶m, alpha2, &si, *REAL(cost), REAL(linear_term)); //} PROTECT(alpha = Rf_allocVector(REALSXP, prob.l+2)); /* clean up memory */ if (param.nr_weight > 0) { free(param.weight); free(param.weight_label); } for (i = 0; i < prob.l; i++) {free (prob.x[i]); REAL(alpha)[i] = *(alpha2+i); } free (prob.x); REAL(alpha)[prob.l] = si.rho; REAL(alpha)[prob.l+1] = si.obj; free(alpha2); UNPROTECT(1); return alpha; } } kernlab/src/ctable.cpp0000644000175100001440000000661712234152620014442 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ChildTable.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef CTABLE_CPP #define CTABLE_CPP #include "ctable.h" #include /** * Return the value of idx-th "up" field of child table. * val = childtab[idx -1]; * * \param idx - (IN) The index of child table. * \param val - (OUT) The value of idx-th entry in child table's "up" field. */ ErrorCode ChildTable::up(const UInt32 &idx, UInt32 &val){ if(idx == size()) { // Special case: To get the first 0-index val = (*this)[idx-1]; return NOERROR; } // svnvish: BUGBUG // Do we need to this in production code? UInt32 lcp_idx = 0, lcp_prev_idx = 0; lcp_idx = _lcptab[idx]; lcp_prev_idx = _lcptab[idx-1]; assert(lcp_prev_idx > lcp_idx); val = (*this)[idx-1]; return NOERROR; } /** * Return the value of idx-th "down" field of child table. Deprecated. * Instead use val = childtab[idx]; * * \param idx - (IN) The index of child table. * \param val - (OUT) The value of idx-th entry in child table's "down" field. */ ErrorCode ChildTable::down(const UInt32 &idx, UInt32 &val){ // For a l-interval, l-[i..j], childtab[i].down == childtab[j+1].up // If l-[i..j] is last child-interval of its parent OR 0-[0..n], // childtab[i].nextlIndex == childtab[i].down // svnvish: BUGBUG // Do we need to this in production code? // UInt32 lcp_idx = 0, lcp_nextidx = 0; // lcp_nextidx = _lcptab[(*this)[idx]]; // lcp_idx = _lcptab[idx]; // assert(lcp_nextidx > lcp_idx); // childtab[i].down := childtab[i].nextlIndex val = (*this)[idx]; return NOERROR; } /** * Return the first l-index of a given l-[i..j] interval. * * \param i - (IN) Left bound of l-[i..j] * \param j - (IN) Right bound of l-[i..j] * \param idx - (OUT) The first l-index. */ ErrorCode ChildTable::l_idx(const UInt32 &i, const UInt32 &j, UInt32 &idx){ UInt32 up = (*this)[j]; if(i < up && up <= j){ idx = up; }else { idx = (*this)[i]; } return NOERROR; } /** * Dump array elements to output stream * * \param os - (IN) Output stream. * \param ct - (IN) ChildTable object. */ std::ostream& operator << (std::ostream& os, const ChildTable& ct){ for( UInt32 i = 0; i < ct.size(); i++ ){ os << "ct[ " << i << "]: " << ct[i] << std::endl; } return os; } #endif kernlab/src/isafactory.h0000644000175100001440000000306412234152620015012 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/I_SAFactory.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 //' Interface for Enhanced Suffix Array construction algorithms #ifndef I_SAFACTORY_H #define I_SAFACTORY_H #include "datatype.h" #include "errorcode.h" class I_SAFactory { public: ///Constructor I_SAFactory(){} ///Destructor virtual ~I_SAFactory(){} ///Methods virtual ErrorCode ConstructSA(SYMBOL *text, const UInt32 &len, UInt32 *&array) = 0; }; #endif kernlab/src/dtrqsol.c0000644000175100001440000000333611304023134014325 0ustar hornikusers#include #include extern double mymax(double, double); /* LEVEL 1 BLAS */ /*extern double ddot_(int *, double *, int *, double *, int *);*/ void dtrqsol(int n, double *x, double *p, double delta, double *sigma) { /* c ********** c c Subroutine dtrqsol c c This subroutine computes the largest (non-negative) solution c of the quadratic trust region equation c c ||x + sigma*p|| = delta. c c The code is only guaranteed to produce a non-negative solution c if ||x|| <= delta, and p != 0. If the trust region equation has c no solution, sigma = 0. c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x must contain the vector x. c On exit x is unchanged. c c p is a double precision array of dimension n. c On entry p must contain the vector p. c On exit p is unchanged. c c delta is a double precision variable. c On entry delta specifies the scalar delta. c On exit delta is unchanged. c c sigma is a double precision variable. c On entry sigma need not be specified. c On exit sigma contains the non-negative solution. c c ********** */ int inc = 1; double dsq = delta*delta, ptp, ptx, rad, xtx; ptx = F77_CALL(ddot)(&n, p, &inc, x, &inc); ptp = F77_CALL(ddot)(&n, p, &inc, p, &inc); xtx = F77_CALL(ddot)(&n, x, &inc, x, &inc); /* Guard against abnormal cases. */ rad = ptx*ptx + ptp*(dsq - xtx); rad = sqrt(mymax(rad, 0)); if (ptx > 0) *sigma = (dsq - xtx)/(ptx + rad); else if (rad > 0) *sigma = (rad - ptx)/ptp; else *sigma = 0; } kernlab/src/stringk.c0000644000175100001440000001101714656662522014340 0ustar hornikusers#include #include #include #include #include #include #include #include #include #include double ***cache ; double kaux (const char *u, int p, const char *v, int q, int n, double lambda) { register int j; double tmp; /* case 1: if a full substring length is processed, return*/ if (n == 0) return (1.0); /* check, if the value was already computed in a previous computation */ if (cache [n] [p] [q] != -1.0) return (cache [n] [p] [q]); /* case 2: at least one substring is to short */ if (p < n || q < n) return (0.0); /* case 3: recursion */ for (j= 0, tmp = 0; j < q; j++) { if (v [j] == u [p - 1]) tmp += kaux (u, p - 1, v, j, n - 1, lambda) * pow (lambda, (float) (q - j + 1)); } cache [n] [p] [q] = lambda * kaux (u, p - 1, v, q, n, lambda) + tmp; return (cache [n] [p] [q]); } double seqk (const char *u, int p, const char *v, int q, int n, double lambda) { register int j; double kp; /* the simple case: (at least) one string is to short */ if (p < n || q < n) return (0.0); /* the recursion: use kaux for the t'th substrings*/ for (j = 0, kp = 0.0; j < q; j++) { if (v [j] == u [p - 1]) kp += kaux (u, p - 1, v, j, n - 1, lambda) * lambda * lambda; } return (seqk (u, p - 1, v, q, n, lambda) + kp); } /* recursively computes the subsequence kernel between s1 and s2 where subsequences of exactly length n are considered */ SEXP subsequencek(SEXP s1, SEXP s2, SEXP l1, SEXP l2, SEXP nr, SEXP lambdar) { const char *u = CHAR(STRING_ELT(s1, 0)); const char *v = CHAR(STRING_ELT(s2, 0)); int p = *INTEGER(l1); int q = *INTEGER(l2); int n = *INTEGER(nr); double lambda = *REAL(lambdar); int i, j, k; SEXP ret; /* allocate memory for auxiallary cache variable */ cache = (double ***) malloc (n * sizeof (double **)); for (i = 1; i < n; i++) { cache [i] = (double **) malloc (p * sizeof (double *)); for (j = 0; j < p; j++) { cache [i] [j] = (double *) malloc (q * sizeof (double)); for (k = 0; k < q; k++) cache [i] [j] [k] = -1.0; } } PROTECT(ret = Rf_allocVector(REALSXP, 1)); /* invoke recursion */ REAL(ret)[0] = seqk (u, p, v, q, n, lambda); /* free memory */ for (i = 1; i < n; i++) { for (j = 0; j < p; j++) free (cache [i] [j]); free (cache [i]); } free (cache); UNPROTECT(1); return (ret); } /* computes the substring kernel between s1 and s2 where substrings up to length n are considered */ SEXP fullsubstringk (SEXP s1, SEXP s2, SEXP l1, SEXP l2, SEXP nr, SEXP lambdar) { const char *u = CHAR(STRING_ELT(s1, 0)); const char *v = CHAR(STRING_ELT(s2, 0)); int p = *INTEGER(l1); int q = *INTEGER(l2); int n = *INTEGER(nr); double lambda = *REAL(lambdar); register int i, j, k; double ret, tmp; SEXP retk; /* computes the substring kernel */ for (ret = 0.0, i = 0; i < p; i++) { for (j = 0; j < q; j++) if (u [i] == v [j]) { for (k = 0, tmp = lambda * lambda; /* starting condition */ (i + k < p) && (j + k < q) && (u [i + k] == v [j + k]) && (k < n); /* stop conditions */ k++, tmp *= (lambda * lambda)) /* update per iteration */ ret += tmp; } } PROTECT(retk = Rf_allocVector(REALSXP, 1)); REAL(retk)[0] = ret; UNPROTECT(1); return (retk); } /* computes the substring kernel between s1 and s2 where substrings of exactly length n are considered */ SEXP substringk (SEXP s1, SEXP s2, SEXP l1, SEXP l2, SEXP nr, SEXP lambdar) { const char *u = CHAR(STRING_ELT(s1, 0)); const char *v = CHAR(STRING_ELT(s2, 0)); int p = *INTEGER(l1); int q = *INTEGER(l2); int n = *INTEGER(nr); double lambda = *REAL(lambdar); SEXP retk; register int i, j, k; double ret, tmp; /* computes the substring kernel */ for (ret = 0.0, i = 0; i < p; i++) { for (j = 0; j < q; j++) { for (k = 0, tmp = lambda * lambda; /* starting condition */ (i + k < p) && (j + k < q) && (u [i + k] == v [j + k]) && (k < n); /* stop conditions */ k++, tmp *= (lambda * lambda)); /* update per iteration */ if (k == n) ret += tmp; /* update features in case of full match */ } } PROTECT(retk = Rf_allocVector(REALSXP, 1)); REAL(retk)[0] = ret; UNPROTECT(1); return (retk); } kernlab/src/stack.h0000644000175100001440000000623313333062601013754 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the MSufSort suffix sorting algorithm (Version 2.2). * * The Initial Developer of the Original Code is * Michael A. Maniscalco * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Michael A. Maniscalco * * ***** END LICENSE BLOCK ***** */ #ifndef MSUFSORT_STACK_H #define MSUFSORT_STACK_H //============================================================================================= // A quick and dirty stack class for use with the MSufSort algorithm // // Author: M.A. Maniscalco // Date: 7/30/04 // email: michael@www.michael-maniscalco.com // //============================================================================================= #include "memory.h" template class Stack { public: Stack(unsigned int initialSize, unsigned int maxExpandSize, bool preAllocate = false): m_initialSize(initialSize), m_maxExpandSize(maxExpandSize), m_preAllocate(preAllocate) { Initialize(); } virtual ~Stack(){SetSize(0);} void Push(T value); T & Pop(); T & Top(); void SetSize(unsigned int stackSize); void Initialize(); unsigned int Count(); void Clear(); T * m_stack; T * m_stackPtr; T * m_endOfStack; unsigned int m_stackSize; unsigned int m_initialSize; unsigned int m_maxExpandSize; bool m_preAllocate; }; template inline void Stack::Clear() { m_stackPtr = m_stack; } template inline unsigned int Stack::Count() { return (unsigned int)(m_stackPtr - m_stack); } template inline void Stack::Initialize() { m_stack = m_endOfStack = m_stackPtr = 0; m_stackSize = 0; if (m_preAllocate) SetSize(m_initialSize); } template inline void Stack::Push(T value) { if (m_stackPtr >= m_endOfStack) { unsigned int newSize = (m_stackSize < m_maxExpandSize) ? m_stackSize + m_maxExpandSize : (m_stackSize << 1); SetSize(newSize); } *(m_stackPtr++) = value; } template inline T & Stack::Pop() { return *(--m_stackPtr); } template inline T & Stack::Top() { return *(m_stackPtr - 1); } template inline void Stack::SetSize(unsigned int stackSize) { if (m_stackSize == stackSize) return; T * newStack = 0; if (stackSize) { newStack = new T[stackSize]; unsigned int bytesToCopy = (unsigned int)(m_stackPtr - m_stack) * (unsigned int)sizeof(T); if (bytesToCopy) memcpy((void *)newStack, m_stack, bytesToCopy); m_stackPtr = &newStack[m_stackPtr - m_stack]; m_endOfStack = &newStack[stackSize]; m_stackSize = stackSize; } if (m_stack) delete [] m_stack; m_stack = newStack; } #endif kernlab/src/datatype.h0000644000175100001440000000407712234152620014466 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/DataType.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 11 Oct 2006 #ifndef DATATYPE_H #define DATATYPE_H // #define UInt32 unsigned int // #define UInt64 unsigned long long // #define Byte1 unsigned char // #define Byte2 unsigned short // #define Real double typedef unsigned int UInt32; // Seems that even using __extension__ g++ 4.6 will complain that // ISO C++ 1998 does not support 'long long' ... /* #if defined __GNUC__ && __GNUC__ >= 2 __extension__ typedef unsigned long long UInt64; #else typedef unsigned long long UInt64; #endif */ #include typedef uint64_t UInt64; typedef unsigned char Byte1; typedef unsigned short Byte2; typedef double Real; // #define SENTINEL '\n' // #define SENTINEL2 '\0' const char SENTINEL = '\n'; const char SENTINEL2 = '\0'; #ifndef UNICODE // # define SYMBOL Byte1 typedef Byte1 SYMBOL; #else // # define SYMBOL Byte2 typedef Byte2 SYMBOL; #endif #endif kernlab/src/ctable.h0000644000175100001440000000430312234152620014075 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ChildTable.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef CTABLE_H #define CTABLE_H #include #include #include "datatype.h" #include "errorcode.h" #include "lcp.h" // using namespace std; /** * ChildTable represents the parent-child relationship between * the lcp-intervals of suffix array. * Reference: AboKurOhl04 */ class ChildTable : public std::vector { private: // childtab needs lcptab to differentiate between up, down, and // nextlIndex values. LCP& _lcptab; public: // Constructors ChildTable(const UInt32 &size, LCP& lcptab): std::vector(size), _lcptab(lcptab){} // Destructor virtual ~ChildTable() {} // Get first l-index of an l-[i..j] interval ErrorCode l_idx(const UInt32 &i, const UInt32 &j, UInt32 &idx); // .up field ErrorCode up(const UInt32 &idx, UInt32 &val); // .down field ErrorCode down(const UInt32 &idx, UInt32 &val); // .next field can be retrieved by accessing the array directly. friend std::ostream& operator << (std::ostream& os, const ChildTable& ct); }; #endif kernlab/src/stringkernel.h0000644000175100001440000000542612761213650015367 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/StringKernel.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 // 10 Aug 2006 #ifndef STRINGKERNEL_H #define STRINGKERNEL_H #include "datatype.h" #include "errorcode.h" #include "esa.h" #include "isafactory.h" #include "ilcpfactory.h" #include "iweightfactory.h" //#include "W_msufsort.h" #include "wkasailcp.h" #include "cweight.h" #include "expdecayweight.h" #include "brweight.h" #include "kspectrumweight.h" //' Types of substring weighting functions enum WeightFunction{CONSTANT, EXPDECAY, KSPECTRUM, BOUNDRANGE}; using namespace std; class StringKernel { public: /// Variables ESA *esa; I_WeightFactory *weigher; Real *val; //' val array. Storing precomputed val(t) values. Real *lvs; //' leaves array. Storing weights for leaves. /// Constructors StringKernel(); //' Given contructed suffix array StringKernel(ESA *esa_, int weightfn, Real param, int verb=INFO); //' Given text, build suffix array for it StringKernel(const UInt32 &size, SYMBOL *text, int weightfn, Real param, int verb=INFO); /// Destructor virtual ~StringKernel(); //' Methods /// Precompute the contribution of each intervals (or internal nodes) void PrecomputeVal(); /// Compute Kernel matrix void Compute_K(SYMBOL *xprime, const UInt32 &xprime_len, Real &value); /// Set leaves array, lvs[] void Set_Lvs(const Real *leafWeight, const UInt32 *len, const UInt32 &m); /// Set leaves array as lvs[i]=i for i=0 to esa->length void Set_Lvs(); private: int _verb; /// An iterative auxiliary function used in PrecomputeVal() void IterativeCompute(const UInt32 &left, const UInt32 &right); }; #endif kernlab/src/init.c0000644000175100001440000000234413271617375013623 0ustar hornikusers#include #include #include // for NULL #include /* .Call calls */ extern SEXP fullsubstringk(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP smo_optim(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP stringtv(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP subsequencek(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP substringk(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); extern SEXP tron_optim(SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP, SEXP); static const R_CallMethodDef CallEntries[] = { {"fullsubstringk", (DL_FUNC) &fullsubstringk, 6}, {"smo_optim", (DL_FUNC) &smo_optim, 23}, {"stringtv", (DL_FUNC) &stringtv, 7}, {"subsequencek", (DL_FUNC) &subsequencek, 6}, {"substringk", (DL_FUNC) &substringk, 6}, {"tron_optim", (DL_FUNC) &tron_optim, 27}, {NULL, NULL, 0} }; void R_init_kernlab(DllInfo *dll) { R_registerRoutines(dll, NULL, CallEntries, NULL, NULL); R_useDynamicSymbols(dll, FALSE); } kernlab/src/ilcpfactory.h0000644000175100001440000000304512234152620015164 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/I_LCPFactory.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef ILCPFACTORY_H #define ILCPFACTORY_H #include "datatype.h" #include "errorcode.h" #include "lcp.h" class I_LCPFactory { public: /// Constructor I_LCPFactory(){} /// Destructor virtual ~I_LCPFactory(){} /// Methods virtual ErrorCode ComputeLCP(const SYMBOL *text, const UInt32 &length, const UInt32 *sa, LCP& lcp) = 0; }; #endif kernlab/src/wkasailcp.cpp0000644000175100001440000000452112234152620015156 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/W_kasai_lcp.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 11 Oct 2006 #ifndef W_KASAI_LCP_CPP #define W_KASAI_LCP_CPP #include "wkasailcp.h" #include /** * Compute LCP array. Algorithm adapted from Manzini's SWAT2004 paper. * Modification: array indexing changed from 1-based to 0-based. * * \param text - (IN) The text which corresponds to SA. * \param len - (IN) Length of text. * \param sa - (IN) Suffix array. * \param lcp - (OUT) Computed LCP array. */ ErrorCode W_kasai_lcp::ComputeLCP(const SYMBOL *text, const UInt32 &len, const UInt32 *sa, LCP& lcp) { //chteo: [111006:0141] //std::vector isa(len); UInt32 *isa = new UInt32[len]; //' Step 1: Compute inverse suffix array for(UInt32 i=0; i0) h--; } //chteo: [111006:0141] delete [] isa; isa = 0; return NOERROR; } #endif kernlab/src/Makevars0000644000175100001440000000006011470002321014153 0ustar hornikusersPKG_LIBS = $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) kernlab/src/esa.cpp0000644000175100001440000007370512761213650013770 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ESA.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 11 Oct 2006 #ifndef ESA_CPP #define ESA_CPP #include #include #include #include #include #include #include #include #include "esa.h" #ifdef SSARRAY #ifdef __cplusplus extern "C" { #endif #include "sarray.h" #ifdef __cplusplus } #endif #else #include "wmsufsort.h" #endif #include "wkasailcp.h" #define MIN(x,y) (((x) < (y)) ? (x):(y)) ESA::ESA(const UInt32 & size_, SYMBOL *text_, int verb): _verb(verb), size(size_), text(text_), suftab(0), lcptab(size_), childtab(size_, lcptab), suflink(0), bcktab_depth(0), bcktab_size(0), bcktab_val(0), bcktab_key4(0), coef4(0), bcktab_key8(0), coef8(0) { I_SAFactory* sa_fac = 0; I_LCPFactory* lcp_fac = 0; //' input validation assert(size > 0); // if(text[size-1] != SENTINEL) // text = (SYMBOL*)(std::string(text)+SENTINEL).c_str(); assert(text[size-1] == SENTINEL); // CW Sanity test for (unsigned int i = 0; i < size-1 ; i++) { assert(text[i] != 0); } // for (int i = 0; i < size ; i++) { // printf("%c : %i\n", text[i], (int) text[i]); // } #if SSARRAY suftab = new int[size]; for (unsigned int i = 0; i < size - 1 ; i++) { suftab[i] = text[i]; } suftab[size-1] = 0; ssarray((int*) suftab); #else //' Construct Suffix Array if(!sa_fac){ sa_fac = new W_msufsort(); } // CW Try // size = 10; // text[size-1] = 0; suftab = new UInt32[size]; sa_fac->ConstructSA(text, size, suftab); if(sa_fac) { delete sa_fac; sa_fac = NULL; } #endif //' Compute LCP array if(!lcp_fac){ lcp_fac = new W_kasai_lcp(); } // CW lcp_fac->ComputeLCP(text, size, suftab, lcptab); lcp_fac->ComputeLCP(text, size, (UInt32 *) suftab, lcptab); if(lcp_fac) { delete lcp_fac; lcp_fac = NULL; } //' Compress LCP array lcptab.compact(); //' Construct Child Table ConstructChildTable(); #ifdef SLINK //' Construct Suffix link table //' The suffix link interval, (l-1)-[p..q] of interval l-[i..j] can be retrieved //' by following method: //' Let k be the firstlIndex of l-[i..j], p = suflink[2*k], q = suflink[2*k+1]. suflink = new UInt32[2 * size + 2]; //' extra space for extra sentinel char! memset(suflink,0,sizeof(UInt32)*(2 * size +2)); ConstructSuflink(); #else //' Threshold for constructing bucket table if(size >= 1024) ConstructBcktab(); //' Otherwise, just do plain binary search to search for suffix link interval #endif } ESA::~ESA() { //if(text) { delete text; text = 0;} if(suflink) { delete [] suflink; suflink=0; } if(suftab) { delete [] suftab; suftab=0; } if(bcktab_val) { delete [] bcktab_val; bcktab_val=0; } if(bcktab_key4) { delete [] bcktab_key4; bcktab_key4=0;} if(coef4) { delete [] coef4; coef4 = 0; } if(bcktab_key8) { delete [] bcktab_key8; bcktab_key8=0;} if(coef8) { delete [] coef8; coef8 = 0; } } /// The lcp-interval structure. Used in ESA::ConstructChildTable() class lcp_interval { public: UInt32 lcp; UInt32 lb; UInt32 rb; std::vector child; /// Constructors lcp_interval(){} lcp_interval(const UInt32 &lcp_, const UInt32 lb_, const UInt32 &rb_, lcp_interval *itv) { lcp = lcp_; lb = lb_; rb = rb_; if(itv) child.push_back(itv); } /// Destructor ~lcp_interval(){ for(UInt32 i=0; i< child.size(); i++) delete child[i]; child.clear(); } }; /** * Construct 3-fields-merged child table. */ ErrorCode ESA::ConstructChildTable(){ // Input validation assert(text); assert(suftab); //' stack for lcp-intervals std::stack lit; //' Refer to: Abo05::Algorithm 4.5.2. lcp_interval *lastInterval = 0; lcp_interval *new_itv = 0; lit.push(new lcp_interval(0, 0, 0, 0)); //' root interval // Variables to handle 0-idx bool first = true; UInt32 prev_0idx = 0; UInt32 first0idx = 0; // Loop thru and process each index. for(UInt32 idx = 1; idx < size + 1; idx++) { UInt32 tmp_lb = idx - 1; //svnvish: BUGBUG // We just assume that the lcp of size + 1 is zero. // This simplifies the logic of the code UInt32 lcp_idx = 0; if(idx < size){ lcp_idx = lcptab[idx]; } while (lcp_idx < lit.top()->lcp){ lastInterval = lit.top(); lit.pop(); lastInterval->rb = idx - 1; // svnvish: Begin process UInt32 n_child = lastInterval->child.size(); UInt32 i = lastInterval->lb; UInt32 j = lastInterval->rb; // idx -1 ? //Step 1: Set childtab[i].down or childtab[j+1].up to first l-index UInt32 first_l_index = i+1; if(n_child && (lastInterval->child[0]->lb == i)) first_l_index = lastInterval->child[0]->rb+1; //svnvish: BUGBUG // ec = childtab.Set_Up(lastInterval->rb+1, first_l_index); // ec = childtab.Set_Down(lastInterval->lb, first_l_index); childtab[lastInterval->rb] = first_l_index; childtab[lastInterval->lb] = first_l_index; // Now we need to set the NextlIndex fields The main problem here // is that the child intervals might not be contiguous UInt32 ptr = i+1; UInt32 child_count = 0; while(ptr < j){ UInt32 first = j; UInt32 last = j; // Get next child to process if(n_child - child_count){ first = lastInterval->child[child_count]->lb; last = lastInterval->child[child_count]->rb; child_count++; } // Eat away singleton intervals while(ptr < first){ childtab[ptr] = ptr + 1; ptr++; } // Handle an child interval and make appropriate entries in // child table ptr = last + 1; if(last < j){ childtab[first] = ptr; } } //' Free lcp_intervals for(UInt32 child_cnt = 0; child_cnt < n_child; child_cnt++) { delete lastInterval->child[child_cnt]; lastInterval->child[child_cnt] = 0; } // svnvish: End process tmp_lb = lastInterval->lb; if(lcp_idx <= lit.top()->lcp) { lit.top()->child.push_back(lastInterval); lastInterval = 0; } }// while if(lcp_idx > lit.top()->lcp) { new_itv = new lcp_interval(lcp_idx, tmp_lb,0, lastInterval); lit.push(new_itv); new_itv = 0; lastInterval = 0; } // Handle the 0-indices. // 0-indices := { i | LCP[i]=0, \forall i = 0,...,n-1} if((idx < size) && (lcp_idx == 0)) { // svnvish: BUGBUG // ec = childtab.Set_NextlIndex(prev_0_index,k); childtab[prev_0idx] = idx; prev_0idx = idx; // Handle first 0-index specially // Store in childtab[(size-1)+1].up if(first){ // svnvish: BUGBUG // ec = childtab.Set_Up(size,k); CHECKERROR(ec); first0idx = idx; first = false; } } } // for childtab[size-1] = first0idx; // svnvish: All remaining elements in the stack are ignored. // chteo: Free all remaining elements in the stack. while(!lit.empty()) { lastInterval = lit.top(); delete lastInterval; lit.pop(); } assert(lit.empty()); return NOERROR; } #ifdef SLINK /** * Get suffix link interval, [sl_i..sl_j], of a given interval, [i..j]. * * \param i - (IN) Left bound of interval [i..j] * \param j - (IN) Right bound of interval [i..j] * \param sl_i - (OUT) Left bound of suffix link interval [sl_i..sl_j] * \param sl_j - (OUT) Right bound of suffix link interval [sl_i..sl_j] */ ErrorCode ESA::GetSuflink(const UInt32 &i, const UInt32 &j, UInt32 &sl_i, UInt32 &sl_j) { //' Input validation assert(i=0 && j= (j-i)); return NOERROR; } #elif defined(LSEARCH) /** * "Linear" Search version of GetSuflink. Suffix link intervals are not stored * explicitly but searched when needed. * * Note: Slow!!! especially in the case of long and similar texts. */ ErrorCode ESA::GetSuflink(const UInt32 &i, const UInt32 &j, UInt32 &sl_i, UInt32 &sl_j) { //' Variables SYMBOL ch; UInt32 lcp=0; UInt32 final_lcp = 0; UInt32 lb = 0, rb = size-1; //' root interval //' First suflink interval char := Second char of original interval ch = text[suftab[i]+1]; //' lcp of suffix link interval := lcp of original interval - 1 final_lcp = 0; GetLcp(i,j,final_lcp); final_lcp = (final_lcp > 0) ? final_lcp-1 : 0; //' Searching for suffix link interval sl_i = lb; sl_j = rb; while(lcp < final_lcp) { GetIntervalByChar(lb,rb,ch,lcp,sl_i, sl_j); GetLcp(sl_i, sl_j, lcp); lb = sl_i; rb = sl_j; ch = text[suftab[i]+lcp+1]; } assert(sl_j > sl_i); assert((sl_j-sl_i) >= (j-i)); return NOERROR; } #else /** * Construct bucket table. * * \param alpahabet_size - Size of alphabet set */ ErrorCode ESA::ConstructBcktab(const UInt32 &alphabet_size) { UInt32 MAX_DEPTH = 8; //' when alphabet_size is 256 UInt32 sizeof_uint4 = 4; //' 4 bytes integer UInt32 sizeof_uint8 = 8; //' 8 bytes integer UInt32 sizeof_key = sizeof_uint8; //' Step 1: Determine the bcktab_depth for(bcktab_depth = MAX_DEPTH; bcktab_depth >0; bcktab_depth--) { bcktab_size = 0; for(UInt32 i=0; i < size; i++) if(lcptab[i] < bcktab_depth) bcktab_size++; if(bcktab_depth <= 4) sizeof_key = sizeof_uint4; if(bcktab_size <= size/(sizeof_key + sizeof_uint4)) break; } //' Step 2: Allocate memory for bcktab_key and bcktab_val. //' Step 3: Precompute coefficients for computing hash values of prefixes later. //' Step 4: Collect the prefixes with lcp <= bcktab_depth and //' convert them into hash value. if(sizeof_key == sizeof_uint4) { //' (2) bcktab_key4 = new UInt32[bcktab_size]; bcktab_val = new UInt32[bcktab_size]; assert(bcktab_key4 && bcktab_val); //' (3) coef4 = new UInt32[4]; coef4[0] = 1; for(UInt32 i=1; i < 4; i++) coef4[i] = coef4[i-1]*alphabet_size; //' (4) for(UInt32 i=0, k=0; i < size; i++) { if(lcptab[i] < bcktab_depth) { UInt32 c = MIN((size-suftab[i]), bcktab_depth); hash_value4 = 0; for(UInt32 j=0; j < c; j++) hash_value4 += text[suftab[i]+j]*coef4[bcktab_depth-1-j]; bcktab_key4[k] = hash_value4; bcktab_val[k] = i; k++; } } } else { //' (2) bcktab_key8 = new UInt64[bcktab_size]; bcktab_val = new UInt32[bcktab_size]; assert(bcktab_key8 && bcktab_val); //' (3) coef8 = new UInt64[9]; coef8[0] = 1; for(UInt32 i=1; i < 9; i++) coef8[i] = coef8[i-1]*alphabet_size; //' (4) for(UInt32 i=0, k=0; i < size; i++) { if(lcptab[i] < bcktab_depth) { UInt32 c = MIN( (size-suftab[i]), bcktab_depth); hash_value8 = 0; for(UInt32 j=0; j < c; j++) hash_value8 += text[suftab[i]+j]*coef8[bcktab_depth-1-j]; bcktab_key8[k] = hash_value8; bcktab_val[k] = i; k++; } } } //' check if bcktab in ascending order // for(UInt32 ii=1; ii= 1); //' the interval [i..j] must has at least 2 suffixes. //' Variables UInt32 left=0, mid=0, right=0, tmp_right=0; UInt32 llcp=0, mlcp=0, rlcp=0; UInt32 orig_lcp = 0; UInt32 c = 0; UInt32 offset = 0; GetLcp(i, j, orig_lcp); if(orig_lcp <= 1) { sl_i = 0; sl_j = size-1; return NOERROR; } //' Default left = 0; right = size-1; //' Make use of bcktab here. Maximum lcp value is always 1 less than bcktab_depth. //' This is because including lcp values equal to bcktab_depth will violate //' the constraint of prefix uniqueness. offset = MIN(orig_lcp-1, bcktab_depth); assert(offset>=0); if(bcktab_key4) { hash_value4 = 0; for(UInt32 cnt=0; cnt < offset; cnt++) hash_value4 += coef4[bcktab_depth-1-cnt]*text[suftab[i]+1+cnt]; //' lower bound return the exact position of of target, if found one UInt32 *p = std::lower_bound(bcktab_key4, bcktab_key4+bcktab_size, hash_value4); left = bcktab_val[p - bcktab_key4]; //' this hash value is used to find the right bound of target interval hash_value4 += coef4[bcktab_depth-offset]; //' upper bound return the smallest value > than target. UInt32 *q = std::upper_bound(p, bcktab_key4+bcktab_size, hash_value4); if(q == bcktab_key4+bcktab_size) right = size-1; else right = bcktab_val[q - bcktab_key4] - 1; } else if(bcktab_key8) { hash_value8 = 0; for(UInt32 cnt=0; cnt < offset; cnt++) hash_value8 += coef8[bcktab_depth-1-cnt]*text[suftab[i]+1+cnt]; //' lower bound return the exact position of of target, if found one UInt64 *p = std::lower_bound(bcktab_key8, bcktab_key8+bcktab_size, hash_value8); left = bcktab_val[p - bcktab_key8]; //' this hash value is used to find the right bound of target interval hash_value8 += coef8[bcktab_depth-offset]; //' upper bound return the smallest value > than target. UInt64 *q = std::upper_bound(p, bcktab_key8+bcktab_size, hash_value8); if(q == bcktab_key8+bcktab_size) right = size-1; else right = bcktab_val[q - bcktab_key8] - 1; } tmp_right = right; assert(right <= size-1); assert(right > left); offset = 0; //' Compute LEFT boundary of suflink interval Compare(left, offset, &text[suftab[i]+1+offset], orig_lcp-1-offset, llcp); llcp += offset; if(llcp < orig_lcp-1) { Compare(right, offset, &text[suftab[i]+1+offset], orig_lcp-1-offset, rlcp); rlcp += offset; c = MIN(llcp,rlcp); while(right-left > 1){ mid = (left + right)/2; Compare(mid, c, &text[suftab[i]+1+c], orig_lcp-1-c, mlcp); mlcp += c; //' if target not found yet... if(mlcp < orig_lcp-1) { if(text[suftab[mid]+mlcp] < text[suftab[i]+mlcp+1]) { left = mid; llcp = mlcp; } else { right = mid; rlcp = mlcp; } } else { //' mlcp == orig_lcp-1 assert(mlcp == orig_lcp-1); //' target found, but want to make sure it is the LEFTmost... right = mid; rlcp = mlcp; } c = MIN(llcp, rlcp); } sl_i = right; llcp = rlcp; } else { sl_i = left; } //' Compute RIGHT boundary of suflink interval right = tmp_right; left = sl_i; Compare(right, offset, &text[suftab[i]+1+offset], orig_lcp-1-offset, rlcp); rlcp += offset; if(rlcp < orig_lcp-1) { c = MIN(llcp,rlcp); while(right-left > 1){ mid = (left + right)/2; Compare(mid, c, &text[suftab[i]+1+c], orig_lcp-1-c, mlcp); mlcp += c; //' if target not found yet... if(mlcp < orig_lcp-1) { if(text[suftab[mid]+mlcp] < text[suftab[i]+mlcp+1]) { //' target is on the right half left = mid; llcp = mlcp; } else { //' target is on the left half right = mid; rlcp = mlcp; } } else { //' mlcp == orig_lcp-1 assert(mlcp == orig_lcp-1); //' target found, but want to make sure it is the RIGHTmost... left = mid; llcp = mlcp; } c = MIN(llcp, rlcp); } sl_j = left; } else { sl_j = right; } assert(sl_i < sl_j); return NOERROR; } #endif /** * Find suffix link interval, [p..q], for a child interval, [c_i..c_j], given its * parent interval [p_i..p_j]. * * Pre : 1. Suffix link interval for parent interval has been computed. * 2. [child_i..child_j] is not a singleton interval. * * * \param parent_i - (IN) Left bound of parent interval. * \param parent_j - (IN) Right bound of parent interval. * \param child_i - (IN) Left bound of child interval. * \param child_j - (IN) Right bound of child interval. * \param sl_i - (OUT) Left bound of suffix link interval of child interval * \param sl_j - (OUT) Right bound of suffix link interval of child interval */ ErrorCode ESA::FindSuflink(const UInt32 &parent_i, const UInt32 &parent_j, const UInt32 &child_i, const UInt32 &child_j, UInt32 &sl_i, UInt32 &sl_j) { assert(child_i != child_j); //' Variables SYMBOL ch; UInt32 tmp_i = 0; UInt32 tmp_j = 0; UInt32 lcp_child = 0; UInt32 lcp_parent = 0; UInt32 lcp_sl = 0; //' Step 1: Get suffix link interval of parent interval and its lcp value. //' 2: Get lcp values of parent and child intervals. //' Shortcut! if(parent_i ==0 && parent_j == size-1) { //' this is root interval //' (1) sl_i = 0; sl_j = size-1; lcp_sl = 0; //' (2) lcp_parent = 0; GetLcp(child_i,child_j,lcp_child); assert(lcp_child > 0); } else { //' (1) GetSuflink(parent_i,parent_j,sl_i,sl_j); GetLcp(sl_i, sl_j, lcp_sl); //' (2) GetLcp(parent_i,parent_j,lcp_parent); GetLcp(child_i,child_j,lcp_child); assert(lcp_child > 0); } //' Traversing down the subtree of [sl_i..sl_j] and looking for //' the suffix link interval of child interval. while (lcp_sl < lcp_child-1) { //' The character that we want to look for in suflink interval. ch = text[suftab[child_i]+lcp_sl+1]; tmp_i = sl_i; tmp_j = sl_j; GetIntervalByChar(tmp_i, tmp_j, ch, lcp_sl, sl_i, sl_j); assert(sl_i > q; //' The interval queue std::pair interval; //' Step 0: Push root onto queue. And define itself as its suflink. q.push(std::make_pair((unsigned int)0,size-1)); UInt32 idx = 0; childtab.l_idx(0,size-1,idx); suflink[idx+idx] = 0; //' left bound of suffix link interval suflink[idx+idx+1] = size-1; //' right bound of suffix link interval //' Step 1: Breadth first traversal. while (!q.empty()) { //' Step 1.1: Pop interval from queue. interval = q.front(); q.pop(); //' Step 1.2: For each non-singleton child-intervals, [p..q], "find" its //' suffix link interval and then "push" it onto the interval queue. UInt32 i=0,j=0, sl_i=0, sl_j=0, start_idx=interval.first; do { //' Notes: interval.first := left bound of suffix link interval //' interval.second := right bound of suffix link interval assert(interval.first>=0 && interval.second < size); GetIntervalByIndex(interval.first, interval.second, start_idx, i, j); if(j > i) { //' [i..j] is non-singleton interval FindSuflink(interval.first, interval.second, i,j, sl_i, sl_j); assert(!(sl_i == i && sl_j == j)); //' Store suflink of [i..j] UInt32 idx=0; childtab.l_idx(i, j, idx); suflink[idx+idx] = sl_i; suflink[idx+idx+1] = sl_j; //' Push suflink interval onto queue q.push(std::make_pair(i,j)); } start_idx = j+1; //' prepare to get next child interval }while(start_idx < interval.second); } return NOERROR; } /** * Get all child-intervals, including singletons. * Store all non-singleton intervals onto #q#, where interval is defined as * (i,j) where i and j are left and right bounds. * * \param lb - (IN) Left bound of current interval. * \param rb - (IN) Right bound of current interval. * \param q - (OUT) Storage for intervals. */ ErrorCode ESA::GetChildIntervals(const UInt32 &lb, const UInt32 &rb, std::vector > &q) { //' Variables UInt32 k=0; //' general index UInt32 i=0,j=0; //' for interval [i..j] //' Input validation assert(rb-lb >= 1); k = lb; do { assert(lb>=0 && rb 0) { if(j > i) { // chteo: saved 1 operation ;) [260906] //' Non-singleton interval q.push_back(std::make_pair(i,j)); } k = j+1; }while(k < rb); return NOERROR; } /** * Given an l-interval, l-[i..j] and a starting index, idx \in [i..j], * return the child-interval, k-[p..q], of l-[i..j] where p == idx. * * Reference: Abo05::algorithm 4.6.4 * * Pre: #start_idx# is a l-index or equal to parent_i. * * \param parent_i - (IN) Left bound of parent interval. * \param parent_j - (IN) Right bound of parent interval. * \param start_idx - (IN) Predefined left bound of child interval. * \param child_i - (OUT) Left bound of child interval. * \param child_j - (OUT) Right bound of child interval. * * Time complexity: O(|alphabet set|) */ ErrorCode ESA::GetIntervalByIndex(const UInt32 &parent_i, const UInt32 &parent_j, const UInt32 &start_idx, UInt32 &child_i, UInt32 &child_j) { //' Variables UInt32 lcp_child_i = 0; UInt32 lcp_child_j = 0; //' Input validation assert( (parent_i < parent_j) && (parent_i >= 0) && (parent_j < size) && (start_idx >= parent_i) && (start_idx < parent_j)); child_i = start_idx; //' #start_idx# is not and l-index, i.e. #start_idx == #parent_i# if(child_i == parent_i) { childtab.l_idx(parent_i,parent_j,child_j); child_j--; return NOERROR; } //' #start_idx# is a l-index // svnvish:BUGBUG child_j = childtab[child_i]; lcp_child_i = lcptab[child_i]; lcp_child_j = lcptab[child_j]; if(child_i < child_j && lcp_child_i == lcp_child_j) child_j--; else { //' child_i is the left bound of last child interval child_j = parent_j; } return NOERROR; } /** * Given an l-interval, l-[i..j] and a starting character, ch \in alphabet set, * return the child-interval, k-[p..q], of l-[i..j] such that text[sa[p]+depth] == ch. * * Reference: Abo05::algorithm 4.6.4 * * Post: Return [i..j]. If interval was found, i<=j, otherwise, i>j. * * \param parent_i - (IN) Left bound of parent interval. * \param parent_j - (IN) Right bound of parent interval. * \param ch - (IN) Starting character of left bound (suffix) of child interval. * \param depth - (IN) The position where #ch# is located in #text# * i.e. ch = text[suftab[parent_i]+depth]. * \param child_i - (OUT) Left bound of child interval. * \param child_j - (OUT) Right bound of child interval. * * Time complexity: O(|alphabet set|) */ ErrorCode ESA::GetIntervalByChar(const UInt32 &parent_i, const UInt32 &parent_j, const SYMBOL &ch, const UInt32 &depth, UInt32 &child_i, UInt32 &child_j) { //' Input validation assert(parent_i < parent_j && parent_i >= 0 && parent_j < size); //' Variables UInt32 idx = 0; UInt32 idx_next = 0; UInt32 lcp_idx = 0; UInt32 lcp_idx_next = 0; UInt32 lcp = 0; //' #depth# is actually equal to the following statement! //ec = GetLcp(parent_i, parent_j, lcp); CHECKERROR(ec); lcp = depth; //' Step 1: Check if #ch# falls in the initial range. if(text[suftab[parent_i]+lcp] > ch || text[suftab[parent_j]+lcp] < ch) { //' No child interval starts with #ch#, so, return undefined interval. child_i = 1; child_j = 0; return NOERROR; } //' Step 2: #ch# is in the initial range, but not necessarily exists in the range. //' Step 2.1: Get first l-index childtab.l_idx(parent_i, parent_j, idx); assert(idx > parent_i && idx <= parent_j); if(text[suftab[idx-1]+lcp] == ch) { child_i = parent_i; child_j = idx-1; return NOERROR; } //' Step 3: Look for child interval which starts with #ch# // svnvish: BUGBUG //ec = childtab.NextlIndex(idx, idx_next); CHECKERROR(ec); idx_next = childtab[idx]; lcp_idx = lcptab[idx]; lcp_idx_next = lcptab[idx_next]; while(idx < idx_next && lcp_idx == lcp_idx_next && text[suftab[idx]+lcp] < ch) { idx = idx_next; // svnvish: BUGBUG // ec = childtab.NextlIndex(idx, idx_next); CHECKERROR(ec); idx_next = childtab[idx]; lcp_idx = lcptab[idx]; lcp_idx_next = lcptab[idx_next]; } if(text[suftab[idx]+lcp] == ch) { child_i = idx; if(idx < idx_next && lcp_idx == lcp_idx_next) child_j = idx_next - 1; else child_j = parent_j; return NOERROR; } //' Child interval starts with #ch# not found child_i = 1; child_j = 0; return NOERROR; } /** * Return the lcp value of a given interval, l-[i..j]. * * Pre: [i..j] \subseteq [0..size] * * \param i - (IN) Left bound of the interval. * \param j - (IN) Right bound of the interval. * \param val - (OUT) The lcp value of the interval. */ ErrorCode ESA::GetLcp(const UInt32 &i, const UInt32 &j, UInt32 &val) { //' Input validation assert(i < j && i >= 0 && j < size); //' Variables UInt32 up, down; //' 0-[0..size-1]. This is a shortcut! if(i == 0 && j == size) { val = 0; } else { childtab.up(j+1,up); if( (i < up) && (up <= j)) { val = lcptab[up]; } else { childtab.down(i,down); val = lcptab[down]; } } return NOERROR; } /** * Compare #pattern# string to text[suftab[#idx#]..size] and return the * length of the substring matched. * * \param idx - (IN) The index of esa. * \param depth - (IN) The start position of matching mechanism. * \param pattern - (IN) The pattern string. * \param p_len - (IN) The length of #pattern#. * \param matched_len - (OUT) The length of matched substring. */ ErrorCode ESA::Compare(const UInt32 &idx, const UInt32 &depth, SYMBOL *pattern, const UInt32 &p_len, UInt32 &matched_len) { //' Variables UInt32 min=0; min = (p_len < size-(suftab[idx]+depth)) ? p_len : size-(suftab[idx]+depth); matched_len = 0; for(UInt32 k=0; k < min; k++) { if(text[suftab[idx]+depth+k] == pattern[k]) matched_len++; else break; } return NOERROR; } /** * Find the longest matching of text and pattern. * * Note: undefinded interval := [i..j] where i>j * * Post: Return "floor" and "ceil" of longest substring of pattern that exists in text. * Otherwise, that is, no substring of pattern ever exists in text, * return the starting interval, [i..j]. * * \param i - (IN) Left bound of the starting interval. * \param j - (IN) Right bound of the starting interval. * \param offset - (IN) The number of characters between the head of suffix and the * position to start matching. * \param pattern - (IN) The pattern string to match to esa. * \param p_len - (IN) The length of #pattern# * \param lb - (OUT) The left bound of the interval containing * longest matched suffix. * \param rb - (OUT) The right bound of the interval containing * longest matched suffix. * \param matched_len - (OUT) The length of the longest matched suffix. * \param floor_lb - (OUT) Left bound of floor interval of [lb..rb]. * \param floor_rb - (OUT) Right bound of floor interval of [lb..rb]. * \param floor_len - (OUT) The lcp value of floor interval. */ ErrorCode ESA::ExactSuffixMatch(const UInt32 &i, const UInt32 &j, const UInt32 &offset, SYMBOL *pattern, const UInt32 p_len, UInt32 &lb, UInt32 &rb, UInt32 &matched_len, UInt32 &floor_lb, UInt32 &floor_rb, UInt32 &floor_len) { //' Input validation assert(i != j); //' Variables UInt32 min, lcp; bool queryFound = true; //' Initial setting. floor_lb = lb = i; floor_rb = rb = j; matched_len = offset; //' Step 1: Get lcp of floor/starting interval. GetLcp(floor_lb, floor_rb, lcp); floor_len = lcp; //' Step 2: Skipping #offset# characters while(lcp < matched_len) { floor_lb = lb; floor_rb = rb; floor_len = lcp; GetIntervalByChar(floor_lb, floor_rb, pattern[lcp], lcp, lb, rb); // printf("lb, rb : %i, %i\n", lb, rb); assert(lb <= rb); if(lb == rb) break; GetLcp(lb, rb, lcp); } //' Step 3: Continue matching from the point (either an interval or singleton) we stopped. while( (lb<=rb) && queryFound ) { if(lb != rb) { GetLcp(lb, rb, lcp); min = (lcp < p_len) ? lcp : p_len; while(matched_len < min) { queryFound = (text[suftab[lb]+matched_len] == pattern[matched_len]); if(queryFound) matched_len++; else return NOERROR; } assert(matched_len == min); //' Full pattern found! if(matched_len == p_len) return NOERROR; floor_lb = lb; floor_rb = rb; floor_len = lcp; GetIntervalByChar(floor_lb, floor_rb,pattern[matched_len],matched_len,lb,rb); }else { //' lb == rb, i.e. singleton interval. min = (p_len < size-suftab[lb]) ? p_len : size-suftab[lb]; while(matched_len rb) { lb = floor_lb; rb = floor_rb; } return NOERROR; } #endif kernlab/src/esa.h0000644000175100001440000001062112234152620013413 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ESA.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef ESA_H #define ESA_H #include "datatype.h" #include "errorcode.h" #include "lcp.h" #include "ctable.h" #include "ilcpfactory.h" #include "isafactory.h" #include #include //#define SLINK // #define SSARRAY // does not yeet work correctly, CW class ESA { private: int _verb; public: UInt32 size; //' The length of #text# SYMBOL *text; //' Text corresponds to SA #ifdef SSARRAY int *suftab; //' Suffix Array #else UInt32 *suftab; //' Suffix Array #endif LCP lcptab; //' LCP array ChildTable childtab; //' Child table (fields merged) UInt32 *suflink; //' Suffix link table. Two fields: l,r //' --- for bucket table --- UInt32 bcktab_depth; //' Number of char defining each bucket UInt32 bcktab_size; //' size of bucket table UInt32 *bcktab_val; //' value column of bucket table UInt32 *bcktab_key4; //' 4-bytes key column of Bucket table UInt32 *coef4; UInt32 hash_value4; UInt64 *bcktab_key8; //' 8-bytes key column of Bucket table UInt64 *coef8; UInt64 hash_value8; //' --- /// Constructors ESA(const UInt32 & size_, SYMBOL *text_, int verb=INFO); /// Destructor virtual ~ESA(); /// Construct child table ErrorCode ConstructChildTable(); /// Get suffix link interval ErrorCode GetSuflink(const UInt32 &i, const UInt32 &j, UInt32 &sl_i, UInt32 &sl_j); /// Find the suffix link ErrorCode FindSuflink(const UInt32 &parent_i, const UInt32 &parent_j, const UInt32 &child_i, const UInt32 &child_j, UInt32 &sl_i, UInt32 &sl_j); /// Construct suffix link table ErrorCode ConstructSuflink(); /// Construct bucket table ErrorCode ConstructBcktab(const UInt32 &alphabet_size=256); /// Get all non-singleton child-intervals ErrorCode GetChildIntervals(const UInt32 &lb, const UInt32 &rb, std::vector > &q); /// Get intervals by index ErrorCode GetIntervalByIndex(const UInt32 &parent_i, const UInt32 &parent_j, const UInt32 &start_idx, UInt32 &child_i, UInt32 &child_j); /// Get intervals by character ErrorCode GetIntervalByChar(const UInt32 &parent_i, const UInt32 &parent_j, const SYMBOL &start_ch, const UInt32 &depth, UInt32 &child_i, UInt32 &child_j); /// Get lcp value ErrorCode GetLcp(const UInt32 &i, const UInt32 &j, UInt32 &val); /// Compare pattern to text[suftab[idx]..length]. ErrorCode Compare(const UInt32 &idx, const UInt32 &depth, SYMBOL *pattern, const UInt32 &p_len, UInt32 &matched_len); /// Find longest substring of pattern in enhanced suffix array. ErrorCode Match(const UInt32 &i, const UInt32 &j, SYMBOL *pattern, const UInt32 p_len, UInt32 &lb, UInt32 &rb, UInt32 &matched_len); /// Similar to Match() but returns also floor interval of [lb..rb] ErrorCode ExactSuffixMatch(const UInt32 &i, const UInt32 &j, const UInt32 &offset, SYMBOL *pattern, const UInt32 p_len, UInt32 &lb, UInt32 &rb, UInt32 &matched_len, UInt32 &floor_lb, UInt32 &floor_rb, UInt32 &floor_len); }; #endif kernlab/src/lcp.cpp0000644000175100001440000001271513561512465013775 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/LCP.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 11 Oct 2006 #ifndef LCP_CPP #define LCP_CPP #include "lcp.h" // Threshold for compacting LCP[] const Real THRESHOLD = 0.3; LCP::LCP(const UInt32 &size): _p_array(0), _idx_array(0), _val_array(0), _size(size), _is_compact(false), _beg(0), _end(0), _cache(0), _dist(0), array(new UInt32[size]){ } LCP::~LCP() { if(array) {delete [] array; array = 0;} if(_p_array) {delete [] _p_array; _p_array = 0;} if(_idx_array) {delete [] _idx_array; _idx_array = 0;} if(_val_array) {delete [] _val_array; _val_array = 0;} } /** * Compact initial/original lcp array of n elements (i.e. 4n bytes) * into a n byte array with 8 bytes of secondary storage. * */ ErrorCode LCP::compact(void){ // Validate pre-conditions //assert(!array.empty() && array.size() == _size); assert(array); // Already compact. Nothing to do if (_is_compact) return NOERROR; // Count number of lcp-values >= 255. // UInt32 idx_len = std::count_if(array.begin(), array.end(), // std::bind2nd(std::greater(),254)); #ifdef _RWSTD_NO_CLASS_PARTIAL_SPEC UInt32 idx_len = 0; std::count_if(array, array + _size, std::bind2nd(std::greater(),254), idx_len); #else UInt32 idx_len = std::count_if(array, array + _size, std::bind(std::greater(), std::placeholders::_1, 254)); #endif // Compact iff idx_len/|array| > THRESHOLD if((Real)idx_len/_size > THRESHOLD) { //std::cout<< "Not compacting " << std::endl; return NOERROR; } // std::cout<< "Compacting with : " << idx_len << std::endl; // We know how much space to use // _p_array.resize(_size); // _idx_array.resize(idx_len); // _val_array.resize(idx_len); _p_array = new Byte1[_size]; _idx_array = new UInt32[idx_len]; _val_array = new UInt32[idx_len]; // Hold pointers for later. Avoids function calls // _beg = _idx_array.begin(); // _end = _idx_array.end(); // _cache = _idx_array.begin(); _beg = _idx_array; _end = _idx_array + idx_len; _cache = _idx_array; _dist = 0; for(UInt32 i=0, j=0; i<_size; i++) { if(array[i] < 255){ _p_array[i] = array[i]; }else { _p_array[i] = 255; _idx_array[j] = i; _val_array[j] = array[i]; j++; } } //array.resize(0); // array.clear(); delete [] array; array = 0; _is_compact = true; return NOERROR; } /** * Retrieve lcp array values. * * \param idx - (IN) Index of lcp array */ UInt32 LCP::operator [] (const UInt32 &idx) { // input is valid? // assert (idx >= 0 && idx < _size); if(!_is_compact){ // LCP array has not been compacted yet! return array[idx]; } if(_p_array[idx] < 255){ // Found in primary index return (UInt32) _p_array[idx]; } // svnvish: BUGBUG // Do some caching here. // // Now search in secondary index as last resort // std::pair< const_itr, const_itr > p = equal_range(_beg, _end, idx); // return _val_array[std::distance(_beg, p.first)]; if (++_cache == _end){ _cache = _beg; _dist = 0; }else{ _dist++; } UInt32 c_idx = *(_cache); if (c_idx == idx){ return _val_array[_dist]; } // _cache = std::equal_range(_beg, _end, idx).first; _cache = std::lower_bound(_beg, _end, idx); #ifdef _RWSTD_NO_CLASS_PARTIAL_SPEC _dist = 0; std::distance(_beg, _cache, _dist); #else _dist = std::distance(_beg, _cache); #endif //std::cout << "here" << std::endl; // _cache = equal_range(_beg, _end, idx).first; // _dist = std::distance(_beg, _cache); return _val_array[_dist]; // if (c_idx > idx){ // _cache = equal_range(_beg, _cache, idx).first; // }else{ // _cache = equal_range(_cache, _end, idx).first; // } // //_cache = p.first; // _dist = std::distance(_beg, _cache); // return _val_array[_dist]; } /** * Dump array elements to output stream. * * \param os - (IN) Output stream * \param lcp - (IN) LCP object. */ std::ostream& operator << (std::ostream& os, LCP& lcp){ for( UInt32 i = 0; i < lcp._size; i++ ){ os << "lcp[ " << i << "]: " << lcp[i] << std::endl; } return os; } #endif kernlab/src/kspectrumweight.h0000644000175100001440000000326212234152620016073 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/KSpectrumWeight.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 #ifndef KSPECTRUMWEIGHT_H #define KSPECTRUMWEIGHT_H #include "datatype.h" #include "errorcode.h" #include "iweightfactory.h" #include //' K-spectrum weight class class KSpectrumWeight : public I_WeightFactory { Real k; public: /// Constructor KSpectrumWeight(const Real & k_=5.0):k(k_) {} /// Destructor virtual ~KSpectrumWeight(){} /// Compute weight ErrorCode ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight); }; #endif kernlab/src/dprsrch.c0000644000175100001440000001050614221630630014305 0ustar hornikusers#include #include #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include extern double mymin(double, double); extern double mymax(double, double); extern void *xmalloc(size_t); /* LEVEL 1 BLAS */ /*extern double ddot_(int *, double *, int *, double *, int *);*/ /*extern int daxpy_(int *, double *, double *, int *, double *, int *);*/ /* LEVEL 2 BLAS */ /*extern int dsymv_(char *, int *, double *, double *, int *, double *, int *, double *, double *, int *);*/ /* MINPACK 2 */ extern void dbreakpt(int, double *, double *, double *, double *, int *, double *, double *); extern void dgpstep(int, double *, double *, double *, double, double *, double *); void dprsrch(int n, double *x, double *xl, double *xu, double *A, double *g, double *w) { /* c ********** c c Subroutine dprsrch c c This subroutine uses a projected search to compute a step c that satisfies a sufficient decrease condition for the quadratic c c q(s) = 0.5*s'*A*s + g'*s, c c where A is a symmetric matrix and g is a vector. Given the c parameter alpha, the step is c c s[alpha] = P[x + alpha*w] - x, c c where w is the search direction and P the projection onto the c n-dimensional interval [xl,xu]. The final step s = s[alpha] c satisfies the sufficient decrease condition c c q(s) <= mu_0*(g'*s), c c where mu_0 is a constant in (0,1). c c The search direction w must be a descent direction for the c quadratic q at x such that the quadratic is decreasing c in the ray x + alpha*w for 0 <= alpha <= 1. c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is set to the final point P[x + alpha*w]. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c A is a double precision array of dimension n*n. c On entry A specifies the matrix A c On exit A is unchanged. c c g is a double precision array of dimension n. c On entry g specifies the vector g. c On exit g is unchanged. c c w is a double prevision array of dimension n. c On entry w specifies the search direction. c On exit w is the step s[alpha]. c c ********** */ double one = 1, zero = 0; /* Constant that defines sufficient decrease. */ /* Interpolation factor. */ double mu0 = 0.01, interpf = 0.5; double *wa1 = (double *) xmalloc(sizeof(double)*n); double *wa2 = (double *) xmalloc(sizeof(double)*n); /* Set the initial alpha = 1 because the quadratic function is decreasing in the ray x + alpha*w for 0 <= alpha <= 1 */ double alpha = 1, brptmin, brptmax, gts, q; int search = 1, nbrpt, nsteps = 0, i, inc = 1; /* Find the smallest break-point on the ray x + alpha*w. */ dbreakpt(n, x, xl, xu, w, &nbrpt, &brptmin, &brptmax); /* Reduce alpha until the sufficient decrease condition is satisfied or x + alpha*w is feasible. */ while (search && alpha > brptmin) { /* Calculate P[x + alpha*w] - x and check the sufficient decrease condition. */ nsteps++; dgpstep(n, x, xl, xu, alpha, w, wa1); F77_CALL(dsymv)("U", &n, &one, A, &n, wa1, &inc, &zero, wa2, &inc FCONE); gts = F77_CALL(ddot)(&n, g, &inc, wa1, &inc); q = 0.5*F77_CALL(ddot)(&n, wa1, &inc, wa2, &inc) + gts; if (q <= mu0*gts) search = 0; else /* This is a crude interpolation procedure that will be replaced in future versions of the code. */ alpha *= interpf; } /* Force at least one more constraint to be added to the active set if alpha < brptmin and the full step is not successful. There is sufficient decrease because the quadratic function is decreasing in the ray x + alpha*w for 0 <= alpha <= 1. */ if (alpha < 1 && alpha < brptmin) alpha = brptmin; /* Compute the final iterate and step. */ dgpstep(n, x, xl, xu, alpha, w, wa1); F77_CALL(daxpy)(&n, &alpha, w, &inc, x, &inc); for (i=0;i #include void *xmalloc(size_t size) { void *ptr = (void *) malloc(size); return ptr; } double mymax(double a, double b) { if (a > b) return a; return b; } double mymin(double a, double b) { if (a < b) return a; return b; } double sign(double a, double b) { if (b >= 0) return fabs(a); return -fabs(a); } kernlab/src/errorcode.h0000644000175100001440000000374312234152620014636 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ErrorCode.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef _ERRORCODE_H_ #define _ERRORCODE_H_ #include "datatype.h" #include // Verbosity level enum verbosity {QUIET, INFO, DEBUG1}; #define ErrorCode UInt32 /** * for general use */ #define NOERROR 0 #define GENERAL_ERROR 1 #define MEM_ALLOC_FAILED 2 #define INVALID_PARAM 3 #define ARRAY_EMPTY 4 #define OPERATION_FAILED 5 /** * SuffixArray */ #define MATCH_NOT_FOUND 101 #define PARTIAL_MATCH 102 /** * LCP */ #define LCP_COMPACT_FAILED 201 #define CHECKERROR(i) { \ if((i) != NOERROR) { \ exit(EXIT_FAILURE); \ } \ } // #define MESSAGE(msg) { std::cout<<(msg)< * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/W_msufsort.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 13 Jul 2007 : use MSufSort v3.1 instead of v2.2 // Wrapper for Michael Maniscalco's MSufSort version 3.1 algorithm #ifndef W_MSUFSORT_H #define W_MSUFSORT_H #include "datatype.h" #include "isafactory.h" #include "msufsort.h" class W_msufsort : public I_SAFactory { public: ///Variables //'Declaration of object POINTERS, no initialization needed. //'If Declaration of objects, initialize them in member initialization list. MSufSort *msuffixsorter; ///Constructor W_msufsort(); ///Destructor virtual ~W_msufsort(); ///Methods ErrorCode ConstructSA(SYMBOL *text, const UInt32 &len, UInt32 *&array); }; #endif kernlab/src/dprecond.c0000644000175100001440000000207614221630477014452 0ustar hornikusers#include #include #include #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include /* LAPACK */ /* extern int dpotf2_(char *, int *, double *, int *, int *); */ double dcholfact(int n, double *A, double *L) { /* if A is p.d. , A = L*L' if A is p.s.d. , A + lambda*I = L*L'; */ int indef, i; static double lambda = 1e-3/512/512; memcpy(L, A, sizeof(double)*n*n); F77_CALL(dpotf2)("L", &n, L, &n, &indef FCONE); if (indef != 0) { memcpy(L, A, sizeof(double)*n*n); for (i=0;i #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include extern void *xmalloc(size_t); /* LEVEL 1 BLAS */ /* extern double ddot_(int *, double *, int *, double *, int *); extern double dnrm2_(int *, double *, int *); */ /* LEVEL 2 BLAS */ /* extern int dsymv_(char *, int *, double *, double *, int *, double *, int *, double *, double *, int *); */ /* MINPACK 2 */ extern void dbreakpt(int, double *, double *, double *, double *, int *, double *, double *); extern void dgpstep(int, double *, double *, double *, double, double *, double *); void dcauchy(int n, double *x, double *xl, double *xu, double *A, double *g, double delta, double *alpha, double *s, double *wa) { /* c ********** c c Subroutine dcauchy c c This subroutine computes a Cauchy step that satisfies a trust c region constraint and a sufficient decrease condition. c c The Cauchy step is computed for the quadratic c c q(s) = 0.5*s'*A*s + g'*s, c c where A is a symmetric matrix , and g is a vector. Given a c parameter alpha, the Cauchy step is c c s[alpha] = P[x - alpha*g] - x, c c with P the projection onto the n-dimensional interval [xl,xu]. c The Cauchy step satisfies the trust region constraint and the c sufficient decrease condition c c || s || <= delta, q(s) <= mu_0*(g'*s), c c where mu_0 is a constant in (0,1). c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is unchanged. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c A is a double precision array of dimension n*n. c On entry A specifies the matrix A. c On exit A is unchanged. c c g is a double precision array of dimension n. c On entry g specifies the gradient g. c On exit g is unchanged. c c delta is a double precision variable. c On entry delta is the trust region size. c On exit delta is unchanged. c c alpha is a double precision variable. c On entry alpha is the current estimate of the step. c On exit alpha defines the Cauchy step s[alpha]. c c s is a double precision array of dimension n. c On entry s need not be specified. c On exit s is the Cauchy step s[alpha]. c c ********** */ double one = 1, zero = 0; /* Constant that defines sufficient decrease. Interpolation and extrapolation factors. */ double mu0 = 0.01, interpf = 0.1, extrapf = 10; int search, interp, nbrpt, nsteps = 1, i, inc = 1; double alphas, brptmax, brptmin, gts, q; /* FIXME KH 2019-11-09: double *wa = (double *) xmalloc(sizeof(double)*n); */ /* Find the minimal and maximal break-point on x - alpha*g. */ for (i=0;i delta) interp = 1; else { F77_CALL(dsymv)("U", &n, &one, A, &n, s, &inc, &zero, wa, &inc FCONE); gts = F77_CALL(ddot)(&n, g, &inc, s, &inc); q = 0.5*F77_CALL(ddot)(&n, s, &inc, wa, &inc) + gts; interp = q >= mu0*gts ? 1 : 0; } /* Either interpolate or extrapolate to find a successful step. */ if (interp) { /* Reduce alpha until a successful step is found. */ search = 1; while (search) { /* This is a crude interpolation procedure that will be replaced in future versions of the code. */ nsteps++; (*alpha) *= interpf; dgpstep(n, x, xl, xu, -(*alpha), g, s); if (F77_CALL(dnrm2)(&n, s, &inc) <= delta) { F77_CALL(dsymv)("U", &n, &one, A, &n, s, &inc, &zero, wa, &inc FCONE); gts = F77_CALL(ddot)(&n, g, &inc, s, &inc); q = 0.5 * F77_CALL(ddot)(&n, s, &inc, wa, &inc) + gts; search = q > mu0*gts ? 1 : 0; } } } else { search = 1; alphas = *alpha; /* Increase alpha until a successful step is found. */ while (search && (*alpha) <= brptmax) { /* This is a crude extrapolation procedure that will be replaced in future versions of the code. */ nsteps++; alphas = *alpha; (*alpha) *= extrapf; dgpstep(n, x, xl, xu, -(*alpha), g, s); if (F77_CALL(dnrm2)(&n, s, &inc) <= delta) { F77_CALL(dsymv)("U", &n, &one, A, &n, s, &inc, &zero, wa, &inc FCONE); gts = F77_CALL(ddot)(&n, g, &inc, s, &inc); q = 0.5 * F77_CALL(ddot)(&n, s, &inc, wa, &inc) + gts; search = q < mu0*gts ? 1 : 0; } else search = 0; } *alpha = alphas; dgpstep(n, x, xl, xu, -(*alpha), g, s); } /* FIXME KH 2019-11-09: free(wa); */ } kernlab/src/dtron.c0000644000175100001440000001714714221630764014006 0ustar hornikusers#include #include #include #include #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include extern void *xmalloc(size_t); extern double mymin(double, double); extern double mymax(double, double); extern int ufv(int, double *, double *); extern int ugrad(int, double *, double *); extern int uhes(int, double *, double **); /* LEVEL 1 BLAS */ /*extern double dnrm2_(int *, double *, int *);*/ /*extern double ddot_(int *, double *, int *, double *, int *);*/ /* LEVEL 2 BLAS */ /*extern int dsymv_(char *, int *, double *, double *, int *, double *, int *, double *, double *, int *);*/ /* MINPACK 2 */ extern double dgpnrm(int, double *, double *, double *, double *); extern void dcauchy(int, double *, double *, double *, double *, double *, double, double *, double *, double *); extern void dspcg(int, double *, double *, double *, double *, double *, double, double, double *, int *); void dtron(int n, double *x, double *xl, double *xu, double gtol, double frtol, double fatol, double fmin, int maxfev, double cgtol) { /* c ********* c c Subroutine dtron c c The optimization problem of BSVM is a bound-constrained quadratic c optimization problem and its Hessian matrix is positive semidefinite. c We modified the optimization solver TRON by Chih-Jen Lin and c Jorge More' into this version which is suitable for this c special case. c c This subroutine implements a trust region Newton method for the c solution of large bound-constrained quadratic optimization problems c c min { f(x)=0.5*x'*A*x + g0'*x : xl <= x <= xu } c c where the Hessian matrix A is dense and positive semidefinite. The c user must define functions which evaluate the function, gradient, c and the Hessian matrix. c c The user must choose an initial approximation x to the minimizer, c lower bounds, upper bounds, quadratic terms, linear terms, and c constants about termination criterion. c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is the final minimizer. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c gtol is a double precision variable. c On entry gtol specifies the relative error of the projected c gradient. c On exit gtol is unchanged. c c frtol is a double precision variable. c On entry frtol specifies the relative error desired in the c function. Convergence occurs if the estimate of the c relative error between f(x) and f(xsol), where xsol c is a local minimizer, is less than frtol. c On exit frtol is unchanged. c c fatol is a double precision variable. c On entry fatol specifies the absolute error desired in the c function. Convergence occurs if the estimate of the c absolute error between f(x) and f(xsol), where xsol c is a local minimizer, is less than fatol. c On exit fatol is unchanged. c c fmin is a double precision variable. c On entry fmin specifies a lower bound for the function. c The subroutine exits with a warning if f < fmin. c On exit fmin is unchanged. c c maxfev is an integer variable. c On entry maxfev specifies the limit of function evaluations. c On exit maxfev is unchanged. c c cgtol is a double precision variable. c On entry gqttol specifies the convergence criteria for c subproblems. c On exit gqttol is unchanged. c c ********** */ /* Parameters for updating the iterates. */ double eta0 = 1e-4, eta1 = 0.25, eta2 = 0.75; /* Parameters for updating the trust region size delta. */ double sigma1 = 0.25, sigma2 = 0.5, sigma3 = 4; double p5 = 0.5, one = 1; double gnorm, gnorm0, delta, snorm; double alphac = 1, alpha, f, fc, prered, actred, gs; int search = 1, iter = 1, info, inc = 1; double *xc = (double *) xmalloc(sizeof(double)*n); double *s = (double *) xmalloc(sizeof(double)*n); double *wa = (double *) xmalloc(sizeof(double)*n); double *g = (double *) xmalloc(sizeof(double)*n); double *A = NULL; uhes(n, x, &A); ugrad(n, x, g); ufv(n, x, &f); gnorm0 = F77_CALL(dnrm2)(&n, g, &inc); delta = 1000*gnorm0; gnorm = dgpnrm(n, x, xl, xu, g); if (gnorm <= gtol*gnorm0) { /* //printf("CONVERGENCE: GTOL TEST SATISFIED\n"); */ search = 0; } while (search) { /* Save the best function value and the best x. */ fc = f; memcpy(xc, x, sizeof(double)*n); /* Compute the Cauchy step and store in s. */ dcauchy(n, x, xl, xu, A, g, delta, &alphac, s, wa); /* Compute the projected Newton step. */ dspcg(n, x, xl, xu, A, g, delta, cgtol, s, &info); if (ufv(n, x, &f) > maxfev) { /* //printf("ERROR: NFEV > MAXFEV\n"); */ search = 0; continue; } /* Compute the predicted reduction. */ memcpy(wa, g, sizeof(double)*n); F77_CALL(dsymv)("U", &n, &p5, A, &n, s, &inc, &one, wa, &inc FCONE); prered = -F77_CALL(ddot)(&n, s, &inc, wa, &inc); /* Compute the actual reduction. */ actred = fc - f; /* On the first iteration, adjust the initial step bound. */ snorm = F77_CALL(dnrm2)(&n, s, &inc); if (iter == 1) delta = mymin(delta, snorm); /* Compute prediction alpha*snorm of the step. */ gs = F77_CALL(ddot)(&n, g, &inc, s, &inc); if (f - fc - gs <= 0) alpha = sigma3; else alpha = mymax(sigma1, -0.5*(gs/(f - fc - gs))); /* Update the trust region bound according to the ratio of actual to predicted reduction. */ if (actred < eta0*prered) /* Reduce delta. Step is not successful. */ delta = mymin(mymax(alpha, sigma1)*snorm, sigma2*delta); else { if (actred < eta1*prered) /* Reduce delta. Step is not sufficiently successful. */ delta = mymax(sigma1*delta, mymin(alpha*snorm, sigma2*delta)); else if (actred < eta2*prered) /* The ratio of actual to predicted reduction is in the interval (eta1,eta2). We are allowed to either increase or decrease delta. */ delta = mymax(sigma1*delta, mymin(alpha*snorm, sigma3*delta)); else /* The ratio of actual to predicted reduction exceeds eta2. Do not decrease delta. */ delta = mymax(delta, mymin(alpha*snorm, sigma3*delta)); } /* Update the iterate. */ if (actred > eta0*prered) { /* Successful iterate. */ iter++; /* uhes(n, x, &A); */ ugrad(n, x, g); gnorm = dgpnrm(n, x, xl, xu, g); if (gnorm <= gtol*gnorm0) { /* //printf("CONVERGENCE: GTOL = %g TEST SATISFIED\n", gnorm/gnorm0); */ search = 0; continue; } } else { /* Unsuccessful iterate. */ memcpy(x, xc, sizeof(double)*n); f = fc; } /* Test for convergence */ if (f < fmin) { //printf("WARNING: F .LT. FMIN\n"); search = 0; /* warning */ continue; } if (fabs(actred) <= fatol && prered <= fatol) { //printf("CONVERGENCE: FATOL TEST SATISFIED\n"); search = 0; continue; } if (fabs(actred) <= frtol*fabs(f) && prered <= frtol*fabs(f)) { /* //printf("CONVERGENCE: FRTOL TEST SATISFIED\n"); */ search = 0; continue; } } free(g); free(xc); free(s); free(wa); } kernlab/src/dgpnrm.c0000644000175100001440000000217211304023134014121 0ustar hornikusers#include double dgpnrm(int n, double *x, double *xl, double *xu, double *g) { /* c ********** c c Function dgpnrm c c This function computes the infinite norm of the c projected gradient at x. c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is unchanged. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c g is a double precision array of dimension n. c On entry g specifies the gradient g. c On exit g is unchanged. c c ********** */ int i; double norm = 0; for (i=0;i= 0 && x[i] == xl[i]))) if (fabs(g[i]) > norm) norm = fabs(g[i]); return norm; } kernlab/src/solvebqp.c0000644000175100001440000000325614221631132014475 0ustar hornikusers#include #include #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include /* LEVEL 1 BLAS */ /*extern double ddot_(int *, double *, int *, double *, int *); */ /* LEVEL 2 BLAS */ /*extern int dsymv_(char *, int *, double *, double *, int *, double *, int *, double *, double *, int *);*/ /* MINPACK 2 */ extern void dtron(int, double *, double *, double *, double, double, double, double, int, double); struct BQP { double eps; int n; double *x, *C, *Q, *p; }; int nfev, inc = 1; double one = 1, zero = 0, *A, *g0; int uhes(int n, double *x, double **H) { *H = A; return 0; } int ugrad(int n, double *x, double *g) { /* evaluate the gradient g = A*x + g0 */ memcpy(g, g0, sizeof(double)*n); F77_CALL(dsymv)("U", &n, &one, A, &n, x, &inc, &one, g, &inc FCONE); return 0; } int ufv(int n, double *x, double *f) { /* evaluate the function value f(x) = 0.5*x'*A*x + g0'*x */ double *t = (double *) malloc(sizeof(double)*n); F77_CALL(dsymv)("U", &n, &one, A, &n, x, &inc, &zero, t, &inc FCONE); *f = F77_CALL(ddot)(&n, x, &inc, g0, &inc) + 0.5 * F77_CALL(ddot)(&n, x, &inc, t, &inc); free(t); return ++nfev; } void solvebqp(struct BQP *qp) { /* driver for positive semidefinite quadratic programing version of tron */ int i, n, maxfev; double *x, *xl, *xu; double frtol, fatol, fmin, gtol, cgtol; n = qp->n; maxfev = 1000; /* ? */ nfev = 0; x = qp->x; xu = qp->C; A = qp->Q; g0 = qp->p; xl = (double *) malloc(sizeof(double)*n); for (i=0;ieps; dtron(n, x, xl, xu, gtol, frtol, fatol, fmin, maxfev, cgtol); free(xl); } kernlab/src/wkasailcp.h0000644000175100001440000000337712234152620014633 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/W_kasai_lcp.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 #ifndef W_KASAI_LCP_H #define W_KASAI_LCP_H #include "datatype.h" #include "errorcode.h" #include "ilcpfactory.h" #include "lcp.h" /** * Kasai et al's LCP array computation algorithm is * is slightly faster than Manzini's algorithm. However, * it needs inverse suffix array which costs extra memory. */ class W_kasai_lcp : public I_LCPFactory { public: /// Constructor W_kasai_lcp(){} /// Desctructor virtual ~W_kasai_lcp(){} /// Compute LCP array. ErrorCode ComputeLCP(const SYMBOL *text, const UInt32 &len, const UInt32 *sa, LCP& lcp); }; #endif kernlab/src/dgpstep.c0000644000175100001440000000275111304023134014303 0ustar hornikusersvoid dgpstep(int n, double *x, double *xl, double *xu, double alpha, double *w, double *s) { /* c ********** c c Subroutine dgpstep c c This subroutine computes the gradient projection step c c s = P[x + alpha*w] - x, c c where P is the projection on the n-dimensional interval [xl,xu]. c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is unchanged. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c alpha is a double precision variable. c On entry alpha specifies the scalar alpha. c On exit alpha is unchanged. c c w is a double precision array of dimension n. c On entry w specifies the vector w. c On exit w is unchanged. c c s is a double precision array of dimension n. c On entry s need not be specified. c On exit s contains the gradient projection step. c c ********** */ int i; for (i=0;i xu[i]) s[i] = xu[i] - x[i]; else s[i] = alpha*w[i]; } kernlab/src/brweight.h0000644000175100001440000000325412234152620014462 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/BoundedRangeWeight.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 #ifndef BRWEIGHT_H #define BRWEIGHT_H #include "datatype.h" #include "errorcode.h" #include "iweightfactory.h" #include //' Bounded Range weight class class BoundedRangeWeight : public I_WeightFactory { Real n; public: /// Constructor BoundedRangeWeight(const Real &n_=1): n(n_){} /// Destructor virtual ~BoundedRangeWeight(){} /// Compute weight ErrorCode ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight); }; #endif kernlab/src/brweight.cpp0000644000175100001440000000435112233654617015030 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/BoundedRangeWeight.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 #ifndef BRWEIGHT_CPP #define BRWEIGHT_CPP #include "brweight.h" #include #define MIN(x,y) (((x) < (y)) ? (x) : (y)) #define MAX(x,y) (((x) > (y)) ? (x) : (y)) /** * Bounded Range weight function. * W(y,t) := max(0,min(tau,n)-gamma) * * \param floor_len - (IN) Length of floor interval of matched substring. * (cf. gamma in VisSmo02). * \param x_len - (IN) Length of the matched substring. * (cf. tau in visSmo02). * \param weight - (OUT) The weight value. * */ ErrorCode BoundedRangeWeight::ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight) { //' Input validation assert(x_len >= floor_len); //' x_len == floor_len when the substring found ends on an interval. Real tau = (Real)x_len; Real gamma = (Real)floor_len; weight = MAX(0,MIN(tau,n)-gamma); // std::cout << "floor_len:"<= (InductionSortObject & object); bool operator > (InductionSortObject & object); bool operator < (InductionSortObject & object); unsigned int m_sortValue[2]; }; inline bool InductionSortObject::operator <= (InductionSortObject & object) { if (m_sortValue[0] < object.m_sortValue[0]) return true; else if (m_sortValue[0] == object.m_sortValue[0]) return (m_sortValue[1] <= object.m_sortValue[1]); return false; } inline bool InductionSortObject::operator == (InductionSortObject & object) { return ((m_sortValue[0] == object.m_sortValue[0]) && (m_sortValue[1] == object.m_sortValue[1])); } inline bool InductionSortObject::operator >= (InductionSortObject & object) { if (m_sortValue[0] > object.m_sortValue[0]) return true; else if (m_sortValue[0] == object.m_sortValue[0]) return (m_sortValue[1] >= object.m_sortValue[1]); return false; } inline InductionSortObject & InductionSortObject::operator = (InductionSortObject & object) { m_sortValue[0] = object.m_sortValue[0]; m_sortValue[1] = object.m_sortValue[1]; return *this; } inline bool InductionSortObject::operator > (InductionSortObject & object) { if (m_sortValue[0] > object.m_sortValue[0]) return true; else if (m_sortValue[0] == object.m_sortValue[0]) return (m_sortValue[1] > object.m_sortValue[1]); return false; } inline bool InductionSortObject::operator < (InductionSortObject & object) { if (m_sortValue[0] < object.m_sortValue[0]) return true; else if (m_sortValue[0] == object.m_sortValue[0]) return (m_sortValue[1] < object.m_sortValue[1]); return false; } #endif kernlab/src/dbreakpt.c0000644000175100001440000000417111304023134014427 0ustar hornikusersextern double mymin(double, double); extern double mymax(double, double); void dbreakpt(int n, double *x, double *xl, double *xu, double *w, int *nbrpt, double *brptmin, double *brptmax) { /* c ********** c c Subroutine dbreakpt c c This subroutine computes the number of break-points, and c the minimal and maximal break-points of the projection of c x + alpha*w on the n-dimensional interval [xl,xu]. c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is unchanged. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c w is a double precision array of dimension n. c On entry w specifies the vector w. c On exit w is unchanged. c c nbrpt is an integer variable. c On entry nbrpt need not be specified. c On exit nbrpt is the number of break points. c c brptmin is a double precision variable c On entry brptmin need not be specified. c On exit brptmin is minimal break-point. c c brptmax is a double precision variable c On entry brptmax need not be specified. c On exit brptmax is maximal break-point. c c ********** */ int i; double brpt; *nbrpt = 0; for (i=0;i 0) { (*nbrpt)++; brpt = (xu[i] - x[i])/w[i]; if (*nbrpt == 1) *brptmin = *brptmax = brpt; else { *brptmin = mymin(brpt, *brptmin); *brptmax = mymax(brpt, *brptmax); } } else if (x[i] > xl[i] && w[i] < 0) { (*nbrpt)++; brpt = (xl[i] - x[i])/w[i]; if (*nbrpt == 1) *brptmin = *brptmax = brpt; else { *brptmin = mymin(brpt, *brptmin); *brptmax = mymax(brpt, *brptmax); } } if (*nbrpt == 0) *brptmin = *brptmax = 0; } kernlab/src/svm.h0000644000175100001440000000235611304023134013450 0ustar hornikusers#ifndef _LIBSVM_H #define _LIBSVM_H #ifdef __cplusplus extern "C" { #endif struct svm_node { int index; double value; }; struct svm_problem { int l, n; double *y; struct svm_node **x; }; enum { C_SVC, NU_SVC, ONE_CLASS, EPSILON_SVR, NU_SVR, C_BSVC, EPSILON_BSVR, SPOC, KBB }; /* svm_type */ enum { LINEAR, POLY, RBF, SIGMOID, R, LAPLACE, BESSEL, ANOVA, SPLINE }; /* kernel_type */ struct svm_parameter { int svm_type; int kernel_type; int degree; /* for poly */ double gamma; /* for poly/rbf/sigmoid */ double coef0; /* for poly/sigmoid */ /* these are for training only */ double cache_size; /* in MB */ double eps; /* stopping criteria */ double C; /* for C_SVC, EPSILON_SVR and NU_SVR */ int nr_weight; /* for C_SVC */ int *weight_label; /* for C_SVC */ double* weight; /* for C_SVC */ double nu; /* for NU_SVC, ONE_CLASS, and NU_SVR */ double p; /* for EPSILON_SVR */ int shrinking; /* use the shrinking heuristics */ int qpsize; double Cbegin, Cstep; /* for linear kernel */ double lim; /* for bessel kernel */ double *K; /* pointer to kernel matrix */ int m; }; struct BQP { double eps; int n; double *x, *C, *Q, *p; }; #ifdef __cplusplus } #endif #endif /* _LIBSVM_H */ kernlab/src/cweight.cpp0000644000175100001440000000427412234152620014637 0ustar hornikusers/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 2.0 * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Suffix Array based String Kernel. * * The Initial Developer of the Original Code is * Statistical Machine Learning Program (SML), National ICT Australia (NICTA). * Portions created by the Initial Developer are Copyright (C) 2006 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Choon Hui Teo * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ConstantWeight.cpp // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 // 12 Oct 2006 #ifndef CWEIGHT_CPP #define CWEIGHT_CPP #include "cweight.h" #include /** * Constant weight function. Computes number of common substrings. Every * matched substring is of same weight (i.e. 1) * W(y,t) := tau - gamma * * \param floor_len - (IN) Length of floor interval of matched substring. * (cf. gamma in VisSmo02). * \param x_len - (IN) Length of the matched substring. * (cf. tau in visSmo02). * \param weight - (OUT) The weight value. * */ ErrorCode ConstantWeight::ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight) { //' Input validation assert(x_len >= floor_len); //' x_len == floor_len when the substring found ends on an interval. weight = (x_len - floor_len); // std::cout << "floor_len : " << floor_len // << " x_len : " << x_len // << " weight : " << weight << std::endl; return NOERROR; } #endif kernlab/src/dspcg.c0000644000175100001440000001631214221631215013741 0ustar hornikusers#include #ifndef USE_FC_LEN_T # define USE_FC_LEN_T #endif #include extern void *xmalloc(size_t); extern double mymin(double, double); extern double mymax(double, double); /* LEVEL 1 BLAS */ /*extern double dnrm2_(int *, double *, int *);*/ /* LEVEL 2 BLAS */ /*extern int dsymv_(char *, int *, double *, double *, int *, double *, int *, double *, double *, int *);*/ /*extern void dtrsv_(char *, char *, char *, int *, double *, int *, double *, int *);*/ /* MINPACK 2 */ extern void dprsrch(int, double *, double *, double *, double *, double *, double *); extern double dprecond(int, double *, double *); extern void dtrpcg(int, double*, double *, double, double *, double, double, double *, int *, int *); void dspcg(int n, double *x, double *xl, double *xu, double *A, double *g, double delta, double rtol, double *s, int *info) { /* c ********* c c Subroutine dspcg c c This subroutine generates a sequence of approximate minimizers c for the subproblem c c min { q(x) : xl <= x <= xu }. c c The quadratic is defined by c c q(x[0]+s) = 0.5*s'*A*s + g'*s, c c where x[0] is a base point provided by the user, A is a symmetric c positive semidefinite dense matrix, and g is a vector. c c At each stage we have an approximate minimizer x[k], and generate c a direction p[k] by solving the subproblem c c min { q(x[k]+p) : || p || <= delta, s(fixed) = 0 }, c c where fixed is the set of variables fixed at x[k], delta is the c trust region bound. c c B = A(free:free), c c where free is the set of free variables at x[k]. Given p[k], c the next minimizer x[k+1] is generated by a projected search. c c The starting point for this subroutine is x[1] = x[0] + s, where c x[0] is a base point and s is the Cauchy step. c c The subroutine converges when the step s satisfies c c || (g + A*s)[free] || <= rtol*|| g[free] || c c In this case the final x is an approximate minimizer in the c face defined by the free variables. c c The subroutine terminates when the trust region bound does c not allow further progress, that is, || L'*p[k] || = delta. c In this case the final x satisfies q(x) < q(x[k]). c c parameters: c c n is an integer variable. c On entry n is the number of variables. c On exit n is unchanged. c c x is a double precision array of dimension n. c On entry x specifies the vector x. c On exit x is the final minimizer. c c xl is a double precision array of dimension n. c On entry xl is the vector of lower bounds. c On exit xl is unchanged. c c xu is a double precision array of dimension n. c On entry xu is the vector of upper bounds. c On exit xu is unchanged. c c A is a double precision array of dimension n*n. c On entry A specifies the matrix A. c On exit A is unchanged. c c g is a double precision array of dimension n. c On entry g must contain the vector g. c On exit g is unchanged. c c delta is a double precision variable. c On entry delta is the trust region size. c On exit delta is unchanged. c c rtol is a double precision variable. c On entry rtol specifies the accuracy of the final minimizer. c On exit rtol is unchanged. c c s is a double precision array of dimension n. c On entry s is the Cauchy step. c On exit s contain the final step. c c info is an integer variable. c On entry info need not be specified. c On exit info is set as follows: c c info = 1 Convergence. The final step s satisfies c || (g + A*s)[free] || <= rtol*|| g[free] ||, c and the final x is an approximate minimizer c in the face defined by the free variables. c c info = 2 Termination. The trust region bound does c not allow further progress. */ int i, j, nfaces, nfree, inc = 1, infotr, iters = 0, itertr; double gfnorm, gfnormf, stol = 1e-16, alpha; double one = 1, zero = 0; double *B = (double *) xmalloc(sizeof(double)*n*n); double *L = (double *) xmalloc(sizeof(double)*n*n); double *w = (double *) xmalloc(sizeof(double)*n); double *wa = (double *) xmalloc(sizeof(double)*n); double *wxl = (double *) xmalloc(sizeof(double)*n); double *wxu = (double *) xmalloc(sizeof(double)*n); int *indfree = (int *) xmalloc(sizeof(int)*n); double *gfree = (double *) xmalloc(sizeof(double)*n); /* Compute A*(x[1] - x[0]) and store in w. */ F77_CALL(dsymv)("U", &n, &one, A, &n, s, &inc, &zero, w, &inc FCONE); /* Compute the Cauchy point. */ for (j=0;j * S V N Vishwanathan * * ***** END LICENSE BLOCK ***** */ // File : sask/Code/ConstantWeight.h // // Authors : Choon Hui Teo (ChoonHui.Teo@rsise.anu.edu.au) // S V N Vishwanathan (SVN.Vishwanathan@nicta.com.au) // // Created : 09 Feb 2006 // // Updated : 24 Apr 2006 // 12 Jul 2006 // 12 Oct 2006 #ifndef CWEIGHT_H #define CWEIGHT_H #include "datatype.h" #include "errorcode.h" #include "iweightfactory.h" #include //' Constant weight class class ConstantWeight : public I_WeightFactory { public: /// Constructor ConstantWeight(){} /// Destructor virtual ~ConstantWeight(){} /// Compute weight ErrorCode ComputeWeight(const UInt32 &floor_len, const UInt32 &x_len, Real &weight); }; #endif kernlab/NAMESPACE0000644000175100001440000000532313271617320013132 0ustar hornikusersuseDynLib("kernlab", .registration = TRUE) import("methods") importFrom("stats", "coef", "delete.response", "fitted", "kmeans", "median", "model.extract", "model.matrix", "na.action", "na.omit", "predict", "quantile", "rnorm", "runif", "sd", "terms", "var") importFrom("graphics", "axis", "filled.contour", "plot", "points", "title") importFrom("grDevices", "hcl") export( ## kernel functions "rbfdot", "laplacedot", "besseldot", "polydot", "tanhdot", "vanilladot", "anovadot", "splinedot", "stringdot", "kernelMatrix", "kernelMult", "kernelPol", "kernelFast", "as.kernelMatrix", ## High level functions "kmmd", "kpca", "kcca", "kha", "specc", "kkmeans", "ksvm", "rvm", "gausspr", "ranking", "csi", "lssvm", "kqr", ## Utility functions "ipop", "inchol", "couple", "sigest", ## Accessor functions ## VM "type", "prior", "alpha", "alphaindex", "kernelf", "kpar", "param", "scaling", "xmatrix", "ymatrix", "lev", "kcall", "error", "cross", "SVindex", "nSV", "RVindex", "prob.model", "b", "obj", ## kpca "rotated", "eig", "pcv", ## ipop "primal", "dual", "how", ## kcca "kcor", "xcoef", "ycoef", ## "xvar", ## "yvar", ## specc "size", "centers", "withinss", ## rvm "mlike", "nvar", ## ranking "convergence", "edgegraph", ## onlearn "onlearn", "inlearn", "buffer", "rho", ## kfa "kfa", ## inc.chol "pivots", "diagresidues", "maxresiduals", ## csi "R", "Q", "truegain", "predgain", ## kmmd "H0", "AsympH0", "Radbound", "Asymbound", "mmdstats" ) exportMethods("coef", "fitted", "plot", "predict", "show") exportClasses("ksvm", "kmmd", "rvm", "ipop", "gausspr", "lssvm", "kpca", "kha", "kcca", "kernel", "rbfkernel", "laplacekernel", "besselkernel", "tanhkernel", "polykernel","fourierkernel", "vanillakernel", "anovakernel", "splinekernel", "stringkernel", "specc", "ranking", "inchol", "onlearn", "kfa", "csi","kqr", "kernelMatrix","kfunction") kernlab/inst/0000755000175100001440000000000014366206771012677 5ustar hornikuserskernlab/inst/CITATION0000644000175100001440000000147214366206771014040 0ustar hornikusersbibentry("Manual", other = unlist(citation(auto = meta), recursive = FALSE)) bibentry("Article", title = "kernlab -- An {S4} Package for Kernel Methods in {R}", author = c(person("Alexandros", "Karatzoglou"), person("Alex", "Smola"), person("Kurt", "Hornik", email = "Kurt.Hornik@R-project.org", comment = c(ORCID = "0000-0003-4198-9911")), person("Achim", "Zeileis", email = "Achim.Zeileis@R-project.org", comment = c(ORCID = "0000-0003-0918-3766"))), journal = "Journal of Statistical Software", year = "2004", volume = "11", number = "9", pages = "1--20", doi = "10.18637/jss.v011.i09" ) kernlab/inst/doc/0000755000175100001440000000000014656670132013442 5ustar hornikuserskernlab/inst/doc/kernlab.pdf0000644000175100001440000125114514656670132015564 0ustar hornikusers%PDF-1.5 %¿÷¢þ 1 0 obj << /Type /ObjStm /Length 4842 /Filter /FlateDecode /N 78 /First 657 >> stream xœÝ\YsÜ6¶~¿¿‚OwœšAì`*NJÞb'’ìHrlg&•¢º)‰voi²%;ùíóì]R»ÓºñÜ’(nÀÁ98û! ‘d‰L8w‰Jxf²D'\“˜„+•'6áy–%.Üò$O„¶Ô.¹rè•HQ‡De=d¢8ÏÑ3Q&T¨Ü‰„›D ‰÷6ÑʤK´‘€“':W*YbDn1Fb´p‘›c0™Ø …J¬Ôƒ'ÖEaë”I„MÏD"\âtFH%Î •%9õ“À˜_)’\⥔ ÐÖ †Î@¤p@!Ü2cÐÈeãIP—åÀE‚ÎÑRe¸9‰¸P Œ—'ÊO -\pP®hRd†Id¡ibYXÌÍ›È f—Kh¥D_ˆ‘hV] ÈÒ‰yÃ…3"Ñ€¬2 £‰C‚¦Ô«¨ +ä5±Àa¶0W\g˜.L ×ƒ× bˆ=&Ã@Öc@6nÁ}n„Á@6 Ó‚_n,µd›e˜@¶Ý!˜bH‚dk<«pá0¥$).á1©hÈN ^4äò³ Dzûÿù曄–MÑ-š"²ä8a/Gå`¿ÓTÃAò/H¨ö¬j’_ñjÒôªAY'4ÿüUqQ»eÂ^×åô­\CÈ}›ÓO#¼Œ!zËäÛoý¨û“ær8N¾9/ÏϳLqÐÙÁɲgº/q(Ü œÑÆÊ,Ãe¦ÎÂ{ÿœÎÔ¦mŒp¨=`‰N쳟žiÀ3ÝØ¯`MÛŸ…þ4Á÷Ï\„+"nÐUsö-(—ÍÖ“¢)“O¾™€†BÜ€½0ÿÌÄ?²ì_Åv ùÁAqZ¾M®«æ2¹ÄÜŒÇå9^ÿX~ºŽ»u;'æ,âÛŽ§ž„‹Ç[ÜLÄËÏ•œÑ@מ†,-î-=ÖD˜2‰ï§ðyxçáç3Æàa ßV-òÄ÷‰<ó8ˆÚv~œî|;»mSƹXÓÆã׉t¶t´s¶¦=ñ\ª¹{þA~ïbÝ«ñ°;é”àÝ÷¯’ï/‡uSwÆÕ¨áJ3™r´9™œ½/;Mò×§UÓ+odd6'[tç„´\B”®³9fDá÷‚®ã$˜Eæ©[GÝ&8ÙlÒÚɉÚúx84ä*Ø$´5 Æÿå0þl²xæþ,D|o‚±:{ïÏy{úcêý¶ÝŸ"T Áy„slÅáÂó€KèÄ#h:ÿÚZŸ`ZÛ3 ¢‚ÇN\¶­M9ÀKˆ@°^‡e·* ?¢9 ¬s’ T<…3ú• Ñ¡µuÇe=œŒ;‹zú±ùþ¤!Ù‚‹†à.šUÈUç¤l›½zò È–ÉÎ~ûí<Þ+hGâd˜ˆ8?2L²Œ³bâìÙ8[6ΞñÛYÏ:ž#Ïlœëâ9‹N‚»/:î"<á¹ÏEx®åO„ç"¼<ÂË#¼<ÂË#¼(#<ðò/ Ï#¼|Êð9Ú]²*1ÇBÅqKÊ@† F…ˆz\…T˜(í* ©Âl¨E(*@QŠÎÖLˆÛå„h>7Zl1Šºô½ÙÑ«·G¿<ýçãÃGˆwÙÓAgØ­I”F„ãºy|YŒ‰r?à“2`øÓ@/;(bƒÞ7~<•·ƒ‡ÁÞTÝæÒsÚ"¼¤SD»É±øc5ñÁÂ."TO…ˬ´þޏ–ëp'…àNú~ Ñ×xéPV`Q+EÑ*1†„µó¸àž0÷÷ªéÚzÞ¾§çŠnŒ"óÙ3—Îþ=ÁFû_§Zwˆõp/²ž—ÕÅeDn1¹ÄÚlŸ=fOØ÷ì9{Á~d‡ìˆ½d¯Ø1;a§ìgV°3Öa]V2ß™•ƒnQ_²sv^]•ì²Ä.Ø%CèsYXÅÞ³¬ÇúlÀˆÙ ñwÄUÃ.ûYÍêò mëê#kXs9.KÖ\Ù„]±kö‘}b°?Êñž—䂨#eÏzÅE 7åÿ(HüÜ‘›SHìi÷¯ŸU½ŒãùLªŠ~¹,›/ÆVýÁ)kJÌp1lJÀõÍÛ›ÐÃß…Ë`gúU jfqêa?µ;U¯»õ!n룭¼» îæži‹æGÌÁü,Z„Í¿ÁüD!:ÙO΋^]¶­fn}Å$½{N_,Ú$·l“¸X5Jjk£¤}ÞðW~d©­?C‰8ŸYü;ßrÁ³+*‡á AžÄzƒ¸™ÞIGzK%2<§ÒXnBªæù‰±ˆÒ‰ëc}z—ûXuÅp…̉Žöš°iïft'È$S Š*SS:”àT„yÅSúCEA™PýÞyÓ&¦Ã­XØÇyKâo7´$3·ÿ&:þ›mB¬‰Z%_rÿ¤”ÍÎ^·êf³E}“ükΩ\Ò–ôm.ˆ ú¶$îó ·i¾Yå Ïwy|ó+êõÃá“Óƒ·ïädA½ ƒýA]ÍÌEÁrUÑÌ’¢Ánèü½k]væëœ|ûcHÊéðËR_ä-ÎÎ"Z£B0AŽwÁ#A "æeÆ›vo‰çùïä4ÖÓÚ¸ Èükk×r_,s‰›š[~S´×²{8î–ã˜oy›ÿØßðpƒ”2µùÙÛw¿€4Ë‘âK-Óœ†Lz=jûXQRÖñ=D–*ؑ۔¾®R¥Èjä©4šÚOeà |h±ô‰èJVªC’ë4:$™:–¨(Æ’O¬øÄ‚O¬ó˜Å(&–Š ˜%)b"–(b…"(b}"–'²YZü¥l6”c"‰Û&¯ûÏ^¾Ø§áä`ÑA¶©ÄLoÕŠ€´‹ôU¬(îÍ?V¯¿ÊŸóàþŒ‹J|CJ:3Ù­!p¶6öŽv)øÍbÛìæTô#ñܧ ¯`" ï}‚ß)gi'¹™yÿ\Ê®äEbÖ‡lYx—çr™«ë ²$ ›ZåîÎåªì=ôÃéÉOÞ^½ØØ}èÕ8­µ„3÷±©Þæ(îþiÝÃçq¹ƒþ‰[}åyÀt ÛO\S.Ïé¦,¤ äV'0åÚË7ŽN)¦·›zül•e+¡5ßÔå+eü‘ÁUÏlˆ6ùFGhí>Ãúl$ ’,‹x¬kA^Þ,}^ú*ã Ä“”·î Çåmµb1 ×õÒ·™ðVûÅ—+ÎpåÃð’Þè ‰y‹Þž´ñûMIãÚªñFæÉ®1O9Œ!­8^kàèS“ $ã<¥•:Ræ)­-6š§Â™{6ëBãBÞè©…J3+!ó*¥Éj½¡‘ Î@ý­¸µÞs8lZ’+S… ê™Ò—ê¿5nÁRe¦¸‰,K5­7þÜö}ô$a¯_L„•Áþу˦}ÍØõõuÚ©«24“´ìNÒæšýÙyß«¬WÕWý¯| õfRù*þÀ›–þQ0ähù¸€ïW‰²\¿ìÓËÉû*-:iÕc~žUš&gÐh¿¸F¬÷>ãé¨N/þø|r¤ã©ÊgJžú/uQŠ¡Û™ÿ>zÈœ ÏóTÒ ã{ —Ì{¡g­t òf¬?é5ÄlkîXžf6Â&ý—ÕÀ iSÜ_5‘7R‹T e¡I C{¹;bº%†Ѷ¤µ¡"Ü¥´ eÊ‘onü¿bZ¾d&u9§…iFh¸†å;§e¯¨È LÕ^w8núŽRÄzO^þøúðéÑéSö~Xt.«~ý[ž[3È™TÓº.™ÂÄMù# ©ü¿”¦È§ {š– #¤ Pܧl;¥©ß»H‹A·bÂþ¬ûÃ^ÑŠÛÕÕIø¦êe2uÏ·àŽD–‰¸ŽHPþb)"…Òv±cÃöøxÿ(=Þ‡´ "Ž/¶ðú@¶ ;6ðä"¥ÊQMqÆ,Ç´¶*|ÌjŸP\¶Ÿ•J\{IŒxM[½d{m“vþ1í½ÊÛk‘´ËEŽaIÛïÏÇÖ$bÚÞ%± ¼…õùðé,í.H¤ç\uzÙô{ßу‡BH¹…PeDs_PÊ3Š]´ßÞ;Ûe(XïH ”B¬/]bàiÅÕ4æwÁt~yG›aàd§ |ˆ‹üÎJ’íÞÞá§ê«1¨9ÛÎhDRtž§Z爡‚ž¥ÆîØúÝMɨW ©§¥ÞFðMÊáž´± !OÜ’£j7’#­î˜AŸÓé°£¯Nqñ)õÅlæxÛ•6™9Z‡Œ³*„æ°—nÃòáAH›8ä<0²œêð´å©ˆÎvœØuª¦¬ËrœÞ§ƒ²“"x •ç$RÅYÕ«ê¦êx»}Ö B€=Mé´K…Ú1Oî™”–+à+íÈR•¾(ä?f×\ƒ¿jnì«q*QP‡LÑ"W{œ¡|×yÁ3_Œ;—i¿êŒ‡õð¼ñÓß>&W]³«ª¼N‹zôñ»~=þ­ÿVužïïåùž³[8AAä­¥ÍRZŸ5Í}œH3ùÿƒÆiªJgž(¤´£ŸHÎvÝjŸt.jNzÀh+¯)Mæël”Ý=³Ð’[„0;®ˆÜ/)mþƒ¤ÁÒ?|@~êh9’†MƒðmAÑ«Î`ªÁE9hŠºì‘I É»ê=”R:£rÖé=T– ®r6ÕW¬sQí!DÀù»³a÷ÓCÄ ÊñÿŽËß«îLJ™ƒ0Zcì¿Ð.æÅÅ×ú E°…€ (­À™ˆÕí(á5;Ð/~2ZÁÐ$êÚjSúÇ”([¹+?÷s¿”è_Wu‡¤œìëö+֔˽q9Bò_3†|q›ÀPjÒJTÁ]ªh%jËV‘ûÄ󿈔–)bH[HÁ3 Ø£w™*ÅÈpt™6ƒ´™t˜ĠÇ^•ÃQ¯dgói8îý–ç¿ÕWÝîvˆçYJÿ“†[ª[Ph…hQúc’jqD½ï÷Ɖ´\ {“~ÉYSFPÇŒs—[:ˆ–pôÏbD*ærZŠ€e~dþhвÇe–ÒÞò6lä gy_izUM‚Ìý9š4Mé¿.‹ñ'Öéuc[YTáGäJè_L™BÛ'²{HwOAä3Ш½s©ËṲÞô $\Í-+HžTççå¸Ж‰…Ý ai=AষèÚ†J•¸°-5açç8*=º®¨¸6·7Ô/dYØÄIÿ8an#gmZØò™ÐZŠÙ®Õâ®´°òů†H¦{ÖvD 튴ÿl6š½‡aîàÌê#endstream endobj 80 0 obj << /Subtype /XML /Type /Metadata /Length 1572 >> stream GPL Ghostscript 10.03.1 kernel methods, support vector machines, quadratic programming, ranking, clustering, S4, R 2024-08-13T16:40:26+02:00 2024-08-13T16:40:26+02:00 LaTeX with hyperref kernlab - An S4 Package for Kernel Methods in RAlexandros Karatzoglou, Alex Smola, Kurt Hornik endstream endobj 81 0 obj << /Type /ObjStm /Length 2786 /Filter /FlateDecode /N 76 /First 665 >> stream xœÅZ[sÛ¸~ï¯Àc;à~Éd&qšK³N³öN“펙²Õu$¯$ï&ÿ¾ç€^ÄH¶¼L€‰ïœïÜ@ÐK‰WÄâ5‘ :†(g‰·ÄpI¼#ÖBÏ HðãDpmIDgàÊF{4Nj AÂyK$‡“Á)¥"Á©¹#!i%>Œé9ÜÅ‘ÁÃcaj%bC¥¥Ç ‰²p³àÐ;l‰æ‹#Zé O´qx&íà©BpŸ#1Z8hHb¬d¤ö/ib9Ü*„†´Ð°Ä ­Q:b` †F…Àp[«àh=`R,<P:¼U€œÀ¹¤!Nzl‰Ó /9â¬Æ¨Ös¼+€Ž \p ©Ä `FÀ4^ ¼¤ ¡ð’¢@n|º·ð(d °xºÄQÀDœ]eÂÀ]šGÐHÑH–À’ÀK@‹qp»FžÐ@¬Ó8˜ö¨”¡ùHwÔº2 iüqƒz@rÐ ´$r*‹³óàð*ªñ ”UÄ1u®¯ˆ*ÖÂþåéSÂN–w‹ Ñ„½š¯Ö4SNÎûaä:ö~úv[ö¯»ÍÍ|Q­É³gÅ­O$a/+ý 1õèO?ÿÍW+IÈâîæ†\43èbŸny_}Ý ;Ä·ɪ‚{™&Ÿon*òtVÍfœëÀ¹­8w~ÚpÎbÛÀQÕçmHç«gê @0/jÞ¬Q˜6 5€Â¨4ƒMH@L+"@‡¦ílB„m×Ed[ˆŒSìb‹¨âêú½Q`‘L³Ž"{Ö¡ÎìÀ¡‚¦Nûu® Ó–º ©«ñiÐ~N‘5ŸvZã´r\CÒ{Ê]èræÛÓÛœ)]k)jgžÔ(²å8ÕCC[3c»ó‡ÁùkñÝWIÜˤÀÓ9Õ¢j] pa7?D'ûˆÇÞ¤–·&µÚR°Ñî¤bdÒ04i—|SÀ€ú­hǨղï åœY {ÍÆcZ£9„d.™%ã‹ £ºp ]¶ ;¥(—=Ÿ í€ë §jÂï W Ùø˜{±¹•žíº¬ë¸0­Ñç¢ry2ïYÍ%?Œ±±ŒÜézƒ7»ƒOäTC®!•ïÅõ »q=˜A¼µÅ y¹U`ÈE¡Š@$!Úþ-m N‰®vDˆ¡¼ süÏAF%I™˜ir©*RÓîÕí¸eÕÆwÁú°f—Ƴ†?ÚÚ¦Íç‹År³F#•udÀŠ胋 L´8 Þiu9Ÿ¼X~…ñxÆC¡ôZPˆKÛè‘wV­—w«)88Ó?¾n^Ÿo&›*ÖšqÀ+˜ œIŽåô¼Bu°/_((óÍõÒ‡ÉUÕQ\pm’¥\wã)–ê»7è÷³"‹6&–L0™oST¥b“¸mÐC3÷}?Hd¸D† C—ŒP’‘™¾mƒâR;ï‘!GÈJ©&çò²°È–ƒ±JÊÌ®¦ eY½ªÕƒKŽúhÓÑ (^Uñ9Ê&Ňû+— £©1i^íÖ|ã#û'yÞQoŠ„MÙUR3«CwÎYͳòÊC¥±9X¥û±`Èv ¯vÄÖÚQº!á#b.ŠÆÒ~ÎN©Èsyœ9†°j¦Õa$M5´î/‚*`f6Bq¾Ã\¬0ÊäëÛ°½ÿѷΧkÀy<žŽ)™ŽjÀÕQÑèÒyˆ3v–íNÓ ú”º1JKb©ÞŠmE}Ô8X1¶å„º¨³Ré*bjC­Ú.çó—‰ÒIÆÑ·ê¶ ¤w4Ó[et„ÌÄet)£H." š¢pV\Wuæm–z›HºQ©±ÕÑwn<Ô­-kydéŠôUzVá9/&ë*(ûtúñõ»“¿Ÿœž0ãÅty9_\Å7QµãRèäz²‚õumÓ~ºšßn–«ø~±Y"ÅAXû±ó»Ï›hÓhÙ"x=ÝÇùåæ]Vâ›VÈó*æz^÷µŒG¾ÇFø_ËÔ÷‘tåòõ E<Úôürlÿ_â«Sml::¢„nÎGtØO}—¯ãyÍñWK•Ïáœ&ý.š°µž"±@„¡Éí›j~u ]a¡ JÄ0ñWöœ°×ì-;eØOlÂ>³)›.¿|™°KV±»b×ìúÛíuµ`sö_ö+»a_Ø‚-Ù-»Eù©f›ºµÂdzÛj5_^²ßØowËMUŸ[±5Û°;ö;ûƒ}eßþ-3b{"5Ò}3¹Z[s÷¢OÀâU%ŠpQ_~5¿©Ó"ú½Ÿ|©:öv3¹™OŸ/®ÀzAúÓùz æ‚`ÇÎ7Õ—Çõua6…ɱOI]ByϘ_¼{÷ó«÷0×ùyiÍ`u‹ç‹õ|{bkÖúf-;fíÕžVö°õÿA¾Ž–Ó³”h»,匳ä¢}²aÏë[ òxÙB}ŠêÐgºôµUº'°†¦/3¶\]V«”h£ ÄŽ¨;9ø/¬¡<À’B™ÎRà €Âl<«_a©Â}–à¨W”°úÅÅU Ü‹‚Ææ‹_3ÊXHdpgø.¿¶€|Fk’°œiK„U¹ƒ;©ßû‡ÜQqÏ%up"·ÄÖÜð¨ÜÁëÊY÷S’”¡¯%£uÊïÐ’Ö ïAKŽ–X½*.aÍ»§– R,À=Å»¥æTÚˆ•‡ÿ34 µ`’lVQ <Ü€uáfZŽ›g¸É%ÓÁý()yÓ³¸…˜zR …Ù~¤„‘yÝÜ2ØÒFwX;ÃmÃ|³ƒ—8Ü=Ü*]_µR j¤Ú¥ZƒŠÑ¥§Vc…ÛLdwÕŽÈûÁÐ FÊ M G-B=Û÷ia›(±?í»ˆ.ÂÌnž{Ün«Â“³ž~ĤmöÌ£z <”¶“G±âÙ+BñÝLˆ»ñE&¬»M&\.*¶ùcY$B>–ÁÝ}ˆÏè$AÛ«aJuì™ý®æÁ9Ð(‰~Wœ„ $àJ ?, ¸k©‰:wL?:™ô>A¹52Å!xâvøÈ žc>åÀ§ò"çLêú+…qŸ*ÓøÃiñ"Ý{¬·rk¾ñ›‰Ã_@QÏ‹]{êù~Ij°¾„U¸sfg–2Ô'P¨C=鉂bÃ…u ÿØî5NA!Ì5à4”uÆ©? øµÜîŠJVS qH5+\$KI½ó±žûVæNIMñË­œ¶Öîx…]'6íUØî3{Æ&ÔØ¤E›tój뱩€:A¦ÐÒVþ¾œá¸r¶^ëû¼>TÎaæ¼WïýøþÍoßAóâ“{@Á§‹Ï,øàVüõ >Ó.øÌ >9øŒvÁ—÷Ì·_[ûV|ö±*>§ !Ů؊{ü¸Ž‡eŒÄoH%TaN@5¦(·ò±cë88kÔØ4æÿØôßg DAvÂ5!øSü´W9KíQ3æ=À) ‚Ú‚Ãõ§ø³°Y¨.?Áì”òŽâŽ£Pc£ñ- ‡Rã¸Ëõ{€S`ÿZà´¤øôAèŽøž¦•À%~¬íwÒùo ¿S·‘8g/ibÖÉ/ç³YI³â/q×h»ã€ª¯uÝŒ‡‡þô+o7endstream endobj 158 0 obj << /Type /ObjStm /Length 4060 /Filter /FlateDecode /N 78 /First 720 >> stream xœÍ[[sÛ8²~?¿»5‚¸8µµUŽLrÏæØžÜ¶ò@K´Ì‰$jH*væa~û~ ’’¨‹GŽìdË%ó|ht£?4Âx3añó Ž‚)+q”̇£b‰£kÍD+œ&¤¤;–Þ¢Oðó·­cR''žI‡/E3 ÜIP¬HðN"™RJàD1¥jH4jÔôÈ0åb”œX¦¼¡¯¦…@=‰cZ&T gZ+`u1Ó†*u‚éD£@ ÔÎãe§˜‰ ^vš£¼gáu–Cà]ÂŒ%¨@`œ¢—=3Ï…™¥z„Ì*‚á%³†*õŠÙ$f¯™õTŽ7,„'Šä!&&œ8–$1èYâq"㘹­”±À “øÒÉ8Á‰bÎ⹌5C#èÄ0Om—øÒKˆÞóZBÆ@à GmóÔv)bæj–B03œS,!%)€%6Õ êCOp%„vt€èd`èFˆ 5R¿,•‹Öãe,Ñ|©¨ YËP+ mÆ·u(ôÎPz¤¡³pu(ïè uhw$Ú!´ %£­©d±¶¨I’‚hR¡SyÔIFHzuiè=Ô¥{¨ÃØ8ùŸüƒñ£é´¨+öï Θi6GÛ“öèÚ£ÇOŒÓ:›âk…&Ò=~š óôiq‹òbüo" wZD4¾x“–ø€55ñ³¬*æå «!yŽÒB÷„goÊbpžÕ(ˆ¿9yÎøEv[£„þ§_g5Êp¹ÖÑ‚n Hù` ý7€\¢òì¶þù¼Në,èQx¡Á&ÍA”m/·=*[¡É³Ô­–èVKt«%º¼î¾oDDª×Å–Ñ*#Wå`Ô7ÈáiZeás~tròöãéOǧççOßC)ù³é æÓãïòéÑ´Ê—7žçeU_§%ÆÐ¦ú“¬”ù¬.Êà×iû]~>¿¬Cí„AtPšªßåÃúš:Ãá}ë_‚Óbû´èäj@bL0Žñãtö"ËG×Ý%p‘þÆÏø9¿‚|ÿÎ85‚>x"©UãtTal7ð\çÓ®Ãè1ÆüPЧæÙó|œ‘Ï2Ëù%d[¤ú²NÇùàh:gx—ŸæU‘ À3áÎyMÞbè^ÑŠt7úíäâÙÉ«ªáC¯×hÔkÀ,:+ÞÒWv­¯F¿ÍÎ*~æ(7CÛ¶ö_§,ñ£ãýµåMðˆ»þlhÂ÷ø£î¡Ÿ/ µ”®7ô Z¼ªoár¡oéttY¦ƒÏY=ήê•Ë’ÞæiY7ÍéeZÒox9æôJÞgÍsܘ >>*3 eöû<óqVU˳É|pÍ'鬪 >ɧóŠOæã:Ÿ¿òJ)†Ÿ{ÚO€õתgÃ5jM÷“uÝ_ÓÌ=5`_Õýÿï?¼8F§/×T_¬«>†…MÝwëºOv—î»;tß*DK€†|t ÌO[ßhSáŽbZAo’p/i¤ìÙ[óŸÈ(õNbÀ1Áêˆe%Ê1«‰­[ ÆMË‚ú„˜'žÕÛàüðZ+Ô¢'‰ÃÒœìÛƒ$æ Ò­è¾FÛÁ±Ãrt çİ™×uàè6nl(HÎQCü 3 “„:ÁñHÚ7%Ýo ?âOù1?áÏøsþ3Á_ñ×ü”ÿÂÿÅß„ñý=ÿÀ?ò”§ãÙuÊ/ùeV§|ÀÅd’òa6ÆUƳY•‹©àôðª³*žóßøgsJ'—Ã4XŸð)ŸÎùŒÏÒ²ÎÓñ0¿ºj ŠÏr^òŠWùe×¼¾¦âæü†ßòÛœåôLÚ±ÍÔž@¦á©ˆÉÚ6=_·¶5cØ×ÚŒý k+ÊaV¶D%Øüq¸Í> ¦ÑÚþûá#L_hnGÚ£;§óñ˜^}PDfô&PQ }² >Ö.9èfP‘„æ~Z1Ä×ùôs2ð¶GƆ9D¤„^b36¢¹Æ`ÝE‹Í.±a†Zhªè(! Þ N yL³:pJ¸Öý½°A‚ÒËØLLÈÝ›V‰Áî{3&r±Û¥qà " ¹Ù„Y)¾ƒwÐðMÞî²0»HYžå¢}ª©oÁöŒöhÐÆFÆ€)ù„,’Ç€Ö†›zØŒ•´v¿Z“2BnBCÝÈ9¨ÈƒipüÁàœâXÃɈbÅBcÄ‹]À냆ßñaB‰X,ÁYÅâ»Ó*Æp°ËqÉØF–¢ìðð¾·è\h<:ùcSxͽçÐùà¶X*é·»88÷ˆX=¬5Bã#MA{ˆOZñc‘Ù8¢À|Ì‘Î%÷vâ”çŒÿzörñ ΋isëo×u=û_Îonn¢A•gÑ´žGÙpÕ7üÏÁoã|ÊÇùeõeÂÿ¢–»*6T*Z»ÃÇlV&>Jh‰ÆMá¶€8¦éhô[‘®óIåèÌ 9`ZS¥•_+óðBÇônp ®€jE¤iÉÎÁüi-‘;<´x£á ŠÊëËé JÑüóˆ ÀÓ²\$­SbÕû²œ{÷IšC©£ù42,Êz2Ÿ£aÆÏÿõüâÝÑÙ3~úáüí)ϧÃì6ʦ£èºžŒh•–‘‰)â1 Ó‚œ*’W™qQæitUò?Gó¬œdóòããŒi¡˜'(Ð:z5Ï«¼Š2tFUÔÅ´Q ^v‹)¤^ùôª8¤©¦ÅëÈѺ£Â+ödõû6$­ò(ŸV铲˜gÓÐi™~F‹&¼.ŠñeqÛhÓÍ "aL|ä,­ïšÈìëï5Ø£?êù¨LÿˆÒšç£­\ߤùˆWÅU}“–Ù¡ö€£¡ì)£Ø†åkp¤ö©)Ÿ¿^¢)Ùe>¡W&³ùeÅgY1g¼šåÀ~€jy¨’óP-YJ!Í2Öþ7ºg€Ð.½¸ð{®~*½6•q[í_¬*l]þL vöåus\}ºí¯ÿ|½¼ûþ-ëÜõ»ûé>¿õ@¿õéÑ—.R7VÂþ–D¹ ûSÈ%ŽO1ü°òuÅGüšç!lß„ê'—YYå£)/š }6 ‹iá¬YLk£÷¿óßçEÑšEñk>ç_Bì~=p/]¸ÇHÙ‹ÜÛæ©Ñ24¦·o³Vâö}µ[Û§·+‚èñéNÄw»VÌøûVpD…=p<·+.-1…1ž¦0`†˜•jŠSSQ¢`ŒMÃÁ9ÂÒa£p'0ý7`SÀä\ ÿ‚rÚ°R꓌hAï‡ÊMŠH·Ä¦¦†ö1°m›ÝQñ®À¯ðA”óUb¯hEÉØ0‘ûNE œ àØH-eµÝÛ™´’Ú:.ò"”غÃ7¹ê)­îN/¹áõ^¼<:~ú:,Ò'½%ô.·èŽìÙ:說³w/ ·9QÛ½Ÿóýåå‡ÿS’„´ ò›„`«?Zñ@árẌqZ×ù «‹Y›f1+óI¶ê0’ ½qxä„Ýæ,Vò»gÑï­}×xÉkî—QñêøèÝÏ´ˆ|þúâbï$°m4H¬' i¤Aßò‹ï¼ê“¤mti“~m–Ð~8æý‰Õ²®ChåNPæÄi›-qÁ¥\‰jçu>Ƽ]vT}¦Ì‰•Ô¥^Þ±°q1]åbM’RËÈ®ò/Yȳ7ëR+®ùõ×Ùu6]æX,’+òiÖ§mnÜMÝfcØ]½Uù-¯Æiuò0Ê,ãõM±ÊèøYYôiïÌ”2ðViŒ›ÇŽp‹©ªuSÝ0¥Uk}"ö&w~—ñn%w KþõíÅɇ???; it«C{—¥¸ÌŽRv‹o¤ª½-xKVœÖ½ žær¡ƒ—ð¢Y½Ò‹4kŒŽ×†LʱR¡ˆµ^Ðë½°!†»ÆLET¾‘º}´˜mÄ!‰eäü®<ðÿd­‹<…€½ L˺8Jö c<4-ʶ؈ZZ/¾¶5N¶ˆÅ• º«$lE¸›ï¬pÚŒa» +ÂÖ“}cÖsúîüÿ^½ ÉT}V¤6Lg3Zv5|CÀêÍÌYÛäõ%C”òáG‰qž³]Æ_òë½êž&Žv‰8Ï|Ø1‘È¢Mô_TF3Bšôý`¸1-ÈajvYˆž„ÆQSö¢Q”SØeF’D ôËè-YwfÚRVàsþ¢Í|¿Öäý-<Å Z×Ó$÷-óú—œHë@6C»ÒdŸˆæ¡:Þ’(k6’ÄûÚ¶/«KöΓ}õüèøíÏ(ÿÙûµ<Ùn¼¼‹äË€–ØW›5&ôû+L” ¿Ï1‡¢åÐ6y´ó'b³-”Í+ä^ í«mó€=eÕ=]]áUOóѨǭÂóé_]æý‹Ñ 1zÚ¿\¼Ú]á²,†óA=Ì«Ù8ýÊËt˜Ò1½XÍa$2îž-nÔÙmÝ3Z²Ü™.¶ê)—Ps×Ì Ûž°dL}5ÝÓh›Öv°%Ê|§Zõ;èý°½­» ±¶ÿ|?Ÿ´î‡–†Lûþá;¸Ú¨÷ýv­ ªç ï/šoôÊ{ñ)ÛÂV MD$íθXGÞÒÖNO¥J@h0MXÜÞo±e±‘L5ÍTívBÕn'TívBÕn'TívBÕŠåS¯¿Ýõ·êú~»âî±rTe4+ó)E=¦”¾Áÿ,¿ /ù¬âãâ÷ÂF³áÕýW•BÆ$$—ÄIäc¹ÈK„â=—•ÈñÝ·8ɯ®2ˆäõï°1$l ``¼gÍŽ¶ØWËhSk’ùIU[Â@š ÂÀømÎø,§¥¥&ñ^е$‚5„<†­n ¦4ØȾ>†Ápbš*¤ #ZÒÆ¥B> stream xœÍ[[sÛ¸~ï¯ÀãîìÄýÒÙÙ™8Ù4îÆY¯Ý6Éîd:²Ì8ìÊ’*Ò¹ô!¿½ßAI¶%E’%;ãQ’Àá‡sÎ ˆÒ ¦tdJj¦Œ`:z\%³ÏŒbÞнfQÐ{ä°ôÀ2©¤DÃ1©EÃ3i% L:oЈL êV0ƒC$h*‹ŸN¯@Tc„²†^S´‚§†cZ(¼Ç÷´Ò4<0mŠL†#¼4ÂIf$áqŠå0ÜifŒUg˜±^Yf¼¦WŽ™i”gV °i:.2«ñXyÁ¬¥á¬pôQ¯ÀQ^3#:c’N{¼eN zå˜3€ ¼gÎÑt0ç=½ŠÌEW åEÀ(`òÄaÀåÄö ™·`˜wÄpÜø ð-°ÏG̼aAJêX  ‚ F£O,X¢ pÁCf**QŽ$@âz4,J°MaþQKp5:4HØè§' tT1²è„‚‹ä5 H¡”$Ñ ¥1+ œP )‰¤1C o¨DàhAC¤Ô4*"µ¡tDZµ„’HO"†¤Œ$Z‰o(Ac¡œ¤o , µ"µHÁû¿üø#+‡£¦f$– v’8Ö^}¾†|íÓi¯2_Uº¾eœѰ)‡ ¦mÛ·8*Ï«ÞÁèèÓ(-W`¿‘Ô0â¸7ÁfÚÞ'e=ºšôËš²g –ÞOFýÓ²¡âøé3Vü£üÔ€ÂO?¡ùy\©‹·×'E|§áÄõ[ ¸;ÈŸ?5;mzM™Ä™:$Ü$Ò;à–²)l{ ðË]â‡êÌá‡úlÿ`49/'‡HhÓlož–5R*éõ›ß™³Z.°Ø%‡Ylx5Pçç¬8$´ý4DGNÉFÍLv‹K,',7°èzuÖ$@/ªáŸ¸ÄÐÕèdw“ÑI§fè°*¹"E‰Gá– “Js˜&2c@‡U¨ Ç– WwÁv“sþ6ß´±<:³ øfȹtÈ >ÁG=xä¦È”2nâ^™(L.Óµ`¹œã™¶ Òµß2m¡WÉ·fd¸Šä÷L†À…ÔËx¦<·äp¥å­…¤~¢5B Q ì`œA³‚t÷…Í(n_†-Žßâ®l@¢AnÔŒoÆ®)XÝÛãädNYñÏ“Ãi‡~S†í£ïÞ7Íø¯EññãGÞ¯«’›+^ž_ñæcñ¥ÿŸA5,Îê—ß'³|¢ò¶y†St޵ìI!bÅJAT†KXs©œPøÝú‚î yEÝùQ©a­¤ìîâ«ÎiâßTÙ‘œàUèšX]'K‘×£]wÇŽL ßxJº6¾¡»6ö]›|Ï<ÒYT—#1•ƒ•ƒ•½…ûíÕäkŽ]Ž3ŠôÛk¦ç3=ŸéùLÏgz>Óó™žÏô|¦ç3=Ÿé…L/dzaQÀ¦vðPÖ0 x(uØ"àéÕe^üþë«_~ýáÉÑ鋯 û£ójx‘‚íö#Õ¤nž¼ïM ×~+³?©ÆÍh’’¶ÔíEo¾×TIé˲Ð~ðUuÞ¼'#IKøZÁ×}šº‡îOzãçeuñ¾»YšØwÅYoò}2 ©ï#Ê3‹gƒÞE Ó`h¿pÐ18½§d’¨¼m_>«¥†@æ8ø²wYÞdÃaÓTýÇËA‰ŽÅQU×`JBŽON›òò_0ÉóS›cË-.ÿöÛßOOŽ@þèð:—õM.c=Üfr¸Éd,Ñõ˜’÷ûG yÛ ~ó¯Y­?wóÏ]¸­, aNYÚÛ©²¼(Ž‹ª¨«‹Ë^Ñÿ›W¿¬7æ¦ÎàU„Ê­*cnªÌu™®©2YújÙ:lEfô²D$*`½ðÜÁK8Eay‰8dÏõJhÒ °™bCtHE¨=`ËÙè5p±ÌŠho½¢R2U¤æ1ºÖªF:›ƒCD(¯CY‘q–ü‰IX­×‹MZÄú"ÎÀù†ùûg‚ãJ,Kâ´FjÜœ¶ˆ¯•|à,¬3Îé 8…3{·°¢aƒ§ï,C§r9éÊ’™”©†äu§ŠÆÐ  Bèí:îÝ"Á:äÎe¹Z„Häç˜l0\ ŽzPl+À#V˜‚ó‘;xæ=€[T>€Ê»ÌA%`J V-Ò˜Îkã¸Þí‚Ø[ö«ZiªcÃ]¨†‡ ´Re°R íÛxï«™Á5P"LU—¸wŸ¿[Àº„«ê°Y¬O©å.°Ý¬‹ÒNÓ†uQB§ÐY ÎAÊÎRr블{w–ƒ‹`¢á\·¾ pÙÄI¤´ÖQ ~™2a5*<,¶TÝž‡E ‘º'p*'ô²¡`o=Ü*\+÷ȧ¬„Õ¡­4ÒÆÁƒ‚ë¤JÆ*’X¬‘Geï~K ˆK-+-&gм…ûF !$§ÐÀbCj·´ Ê®†Õ'~Ù¯yo8à£Å—ËѤ,šÉhXܽé%üÚ•!%•­Þ¹TÕY]¢\¯,©€ k¯[¢¼Ë^a¤Ý?»Ì°[Ÿ(§©¦A¤«]ØGª¹ º8™%­Ǿ§D˜¶Špi KÍÌ2a­êØ{Ë„WƒÓ:réçò9'1l/ùÜ‚8ÌÂ'ư”qùRDâK °t «ÃB‚„yðþA±iòÓX«Sl~Zû}`[$Té"<‹Zºq‡iÉ<¶Ç‘à…0Â>†ý;ë¯ CHÇ{:hX·t&gh+ 5ˆêÊÖw1¿ù[s»$yÀŽvIbþd>Ž“an¹Kò¼|(›ªß»V¼ÏVâ—zt0œÏQ+^UÃÇúš=ø:Ù¯»ØyY):žíô.¦£`+叨YÞšÔÏÇOysøÃ“£ƒ×:Ï¡ìÂgÛáVÝ›êÂ7w7亻zÓ݆5ö#¶ØÃèþ¢èZ!íùÍîâ–!5¦ŒæJWúÝÞ q×wBÜüNÈËâø}Uœ¯‹Ñañ¡øX|*>_ÛJbé–½ N$ªo¯ï‰¸Ùk÷Dnhº›"ˆv°)²8ª±’륙MªJK:œjàSbòäR¤c®Üë{ˆ¹V¢SðÎé°lF‡õÄiƒ{#tÆá×Lƪ¨œ $t`tµ#@þӻ˱-ŽeÔ£ Oªa¿lFC:›Q|™|8?+Æu1ýwäøøüÝæyQª+»»Eª¥I$s A°C¶´f aÝIœFÖ|XkÞïãÞ¸œÔÅ;¬¼º ‡"Â"·ÜjÊ(.-â<…4±•‰ÑrmIïu"Ò/Oafô¿…Õa»¡¸†é¶Np“ (mYÌ"s];Êþ¦f”U͘(:ÇÍ%k Ëk-—??ŸñËñ?/‹ñÕ¬vúÕ §•´·“ ¬\³íZ™ Fyîw½fö;“N"‰º£ÿi@ç÷hF–û°¦Š]7¾k˜Í¯Æ`+Bȧջw%¢e ÿ Ø ÁlqMûmCŒÎ©¤Ã;¢g,+.«áU½#zéҢɅäþÏÖŒendstream endobj 316 0 obj << /Filter /FlateDecode /Length 7372 >> stream xœ½]K“$·q¾*Æÿ†±/®q°Û O+|  I–)Kâ8ìчÞy®vvz43Krìƒÿºó2««z‡»´ƒöÖ¢€D"_>PûçS³µ§ÿ«ÿ¿xbNoNþ|bééiýßÅûÓ:?ùû?xjÓÖÙàOϯOø ƒÊ6wšœÝšlOÏߟüqxwæÒÖ–<\™íhìÒðx¶1ÛTJóp‹/! wðÓ–â°Ã¦”’JÞœmÆqÜÆá¿ñ‡Û–\†/a¨7ÉXx†˜+åÿ<ÿg¤-*ڌ߆쀾óK æm‰0‡?«/¨Í¸°M@D{áwg.n“qžiª¯^ঢ…Í…$§QýíÍSœL®‰PoaÈþ‘éO%¿Ááö:V¾øbFà 2ÀåXä¶êˆÑgdM‘a毯ˆ‹Æ„˜†ç³éç->ŽÆ:`Ýþl„ãÃpI|.Å–á‰ÉóvxK æà+ùµ%>:oa[þÐO°3ÑnáŽÞŒÁn³?ÝŒvë¼/üÒ—D‚3À¢;|?Á64ÁpÖ?=ÑøÎn}•‘ ´À6ˆL3æÈìÊ9ãFÛž@"~cvÔ“ðègŸñ÷hŒ-ãð_gÈó87ìo$‰û}Ùºß|Z¶%ºHBâôn˜u»çgNÊ:G;„…ÝQr"HN¶pN°nY›ÎÚÆÀ'’Û1Ñ‘y?²À…´ßŠÁW, 8÷o“”䒾èÝp%¦C^¸ì]Œ° lÌÛˇ Ì–b€PÜo.ÛäÆSxäÇ”‹Å ÿsvþ§4ýÙ‡Ýs•І'2¬õm[c*šŽûÆÖ²$Ev„=›ñTqu.D@¯O® âUÍ@]¾¡Á ÿix/ÎyO–T+Å& Κ¥“Ý€~çhÇÙÑ~ù­VðNª¡)Ù8W*°ÆàìÛΕÝ=“­~»Ã—­ 6%É®}çÍÃêïðÀ'äÁÉŸ¤î±šŠÇzà.úzàÙÀñ¡AKk/gyD34|Ñ9¿ý5[dÞÚWd½Œ)À×sŠÏp2˦L7ÈùéÆå­/#3ð7â4*/íh™gÖŒ°×gVèÑÄáWòèhW²Õ,Ðé¾íÊý®ëíÒ.¤vžUj—%•¸A¾,Ÿ,HIG 4fI›w×ýÀøH]rJÏ>ôê§…¿ÃiaÖrp†•¥´Ók3M[6Jÿ·Jû‹ó“ß“À…§“œóé÷0ÿ/Oà\NAÌ`¡xúþ¬qÿãÝÉ7ÇE†ÃŠ X€©U: T\3á F€Å›ê¹ÈAeÌs\l°®Ú~©ï&Á/ð·7BÇmm,h~ˆ%¯@~‰S„f@øs¨I©Jmô`Mñ<½uÁ2˜/Fµä›j¦aM–NÁ¾a8ƒϬzY<°¨èßÈãp6ŸyŽÑÙƒ¼ª: œÜ(8‹ o pj S¶ÞºêrŒ+j=[°Ê\Òƒ«…WßÈ1dÁüÉ8r;w‡9fWåÏ‘›ËèÇ"B$ô\p L÷ècnúÏ+Þ˜3~¸Þ£ùÌÞ›ŒÒŽÁd1Ï¢&Ü@Lôˆœæ…«tŽI$¡›³i_o4¡ÓòrïršKÔ0Ųç@žäúbýh;„)nåÔoÅòD"X'—ÂØ˜¦kÆË döÕÝ9d uj7'Ñz#[è]…Ø9Wá/ü‚]yˆÖKåX¹*é+¨8f¡¤r³Ìà=ú­\]³\ìHõCñó®mõUX °r"+Äàs÷8NwïfvhÁ=â&m)ŒVZ@JÀ²` -›Ͱꑞxj¯ÕAˆï3ˆ°§¹‚!Œ†Œú]Dòà1 ŠØ:Or3†<2Ø!$édIÐÉ4 Ç ‹žB¨â‡8HÝ%ožN3Vß'ï_kò 4Ï0À]ÇBU€m~øö A;xJ‘¼‡4Z…HzœÃTíx†¢&%bi;x|—TÊÊÑJwùVš×e3 6AJØx¥¦òf›‘í»4¤š<ÌþÀô:¥nÏž‹õ °ÖÇO,Âe\ªßÁyz‘OûÔ­Båówº0Z…Q*ÍPEG¸k30ß’©\(¿DM¨Öá-Y½ÍT§­¼ áN©h¹\)z8›*E, ¶–ÃrL–SmHÚZð‚Õx*TZZ¨-ßK¦š@ÇèœkÉÙ¶†$BÔ.gì©ó2í ¬‹ÜšIˆ.œW;ÚÕ–„^æb©à ,l^0™è¦µÅ{ûY!Û½'Ø8&SZéß 8˜ÝOe÷T€–¢6l±ß„æ„§‘ ´(TàÜ‘Ð&EïÎ*ú©ÅÜlQÖ¦«E&²Ãw’mOb€, _‰çB.Õ’m\°•r«&&©‰c·q`Õ3Ž-Œ¯ÇNwê…Qm hß{ÏG]¤Õ-,·& Ðin1XÍ îÝQ C© ˆÕC°m¤¹UÛ?R=´~ VÑ©òap°r>õÏÞsýÐ>oèâ×ÕieË)oÝ( ‚U<éï[GMrS+J‚XšO3’œÔ <g³éjy·ì šJúðæR‰ÚF¦í&˜—šýšpkAŸvTwØ>ÕmŒ¨ÖÊ^+†~j¹6W«3‹V@²Òˆ"Ĭa@U®ˆ sŸ;–ðIØ+~SMØü=×û¡ôvxIª„Ö c mÄÅLgj‘]ñ`èWôj…þ8®ÎͪSù_ à×€]ib³IÃõ|Yçý{æ’£ÌõÔ, ö·#£z:¼™þ¾[ ÁªYŸ?öúDÈlÁd`õ&8»Ü˜ _Á¼››"kö¶[­¿¦ôâlj$ù F 'h ÜËó˜¦Øßu Vý]‚¯ba÷oºÃðÜNÏÿåäüï8<©LZ‡W“Æohœ èsØí'UA¾Š1°É@ü©œ4à„m¡ÆNÉU×q>Dˆ¶ŠT—m\ê‰'D´¢ÓWuàljg„ß SRDPñÐÜÊÔ6qvlf$É»©ÂûóyêAgàÀ‡§ô‰ôƒók/¾Lv‘|ç-«ä+î#ùÓNßãýœË“™eCD‚áw•eëôßÓ¥¡ìÆcˆ¥dlZÖ¾X˜'ü{tª2L!ÊîFÎ÷ÔÆìÜhî‘)²èñ©=-c“ОÿºätÄ7Õu£ðsÿ¶ã¢{ÜÞ˜R? –3…miAQ| €†¨÷…}“õK#8Àxª3ºÞoXÙ—×3óÌj/7¿Ü·)$ôa#á\.:!Á¦½,b͈Mº/J ÅqçHƒ¦vI¾„H«Ø£eášox:cÒi<´wËœÛÕà¶µ¬T2šˆY/Œ£ŒÙ«S¦m‹¹™¾È.35ªghYÀõDK‡¦â¦ŒÀäWwSR^+hŠž!ÍÇgˆ¨‰ªñ†…„ÈÜŽ`fçåLî\¶]žU— Ú´?J=Ëc³tŽ‹iz ê‰PYº›½˜Cg–&_Ý•é}%Évõ1„yÓÆ/éUä¶h=Ô­›EèpWqvÞÚ˜…SÀeñhV^£jR½ª$I@bA…ø)EO§À1ZÒœ©=nE×è^ q7½µ‘«¥,“:җ¼;«›7®Uè¡{¢½t5‹mŠÔ'jðϤCÍ Ñ¢"e"<¹“¢ÎËÀájà¼{äñ±¸”vXášëƒÐlý NªÉþ™ƒ¦¬T{f5‰©¡Õ'ž×üR4„ƨ“âý"h¡®§ïTVf$U Ó~ó0¾“ÆK . €Ü]\‰[N3‰ã\~º¿îѯ’lÎ ,nt¸MÄc¹o®HUÚ¯ç±ÓýGObÉK êçéà Ëq|§œJ^ߤµvFø0jíÔbý=gVKr®F #€¤ÕÂ4WL£Â¶OüÒtí9ïðëB~è«Ø\Ói¹É¹kÈè®…êRá'¶’†å\¬U{Ò⪃ Çö&·œ”_‘÷‡N›@bw=@T]D†b½ùöظ~2ÄÛˆ¦ŒK«]O é"~"¿N« †×e’Èݼ-aÕ"—íØS½G5/ƒø³º îkß}Òjª—œéuÏê©Tê«’„Üv 3=ë‰ÞM« .ð¡`Vc|ÔV ÖtGäªzP¿AÔZpпÊĂʧ€KÞúYJFgHQWúžu]ÁÐ5ŨI:Ó4UWopeG}]kyÒ–ÝpP@Å@ûOÝàù¾ðdÙxªhà–0n™um~Îj¹-Yt&mº ùHoF ˆlC^T0ò±'a·žêñ˜8ËÚàÓq!•X{•ÄÇ¡0ˆ¡èVw‚]èÐ|™>ÊÒN廚BõÔÛÔzåÛT—éºÛM㿪5uõÚì<a-¼ÀÙ#uoiÙ[Œ €·:ÇÚx—ç¸Z×c¦’G{³¦U™:­ÚnT`U!uĵÐ7é.¿ áð”ÀoÙÔž´î—Æ-Ö äá]Õk§6 ¡Ð)¬è—]xw=‚¬ø9™ÔD‰:láÒ^xÅ^»”w Þ1}Åæ¥7¤®¸[{âùÆ\ –‘nÑâ~ÉP%G$Õç,s, :›D\!ÚÚæ=ÌÒ#à%¬}ËUÏi¼, ëe,Uv÷÷Š–<ܨYݾ8¿½ìß!Â[²Ê‚ÈRîª*­÷òTòuVôB(ÄžK½¥Â)|Á”)ëÎRî ÊÖ,h{-´Qƒ Wö¤ëÍoP~XÖi|ÔýaýÍÕÖöx_· H»Ï šÏm=˜¸q3³„í,e=¨÷&êÔž™·Y$fëp8v9vñ *åh Vª3,À£çÝìUnDE@3Õ†k€†­/õ.3³F§?1ŒûÝQm$5þÛ-¡bëWºqWª7 ¯¯7Nù*qØ[(3êµ­A÷KÑ’¶ïÛ»=`@Á¯¢*â7l²k÷ÍHÊDGs4’ê= öç<ÉáÌÀtãT?lÜ¡·J‘ÎøVÍgŸ­›‹=~ŒÇfå¯ïyt(+=+¦ö¦¿·vë²>Ík ú•|¼Ì¿c‚ :{ è^¢íŽ'Ãj¦h¼¡ë†ð8^b¤ÏºH/Êý€×:$ÒýÐÍ“Ä6»i„#ä–G ¥_f—üT3¼EÀ#3\‰+¹qªÒœWÌ@-¶²Wær:žeÈZ¿gÇkW¼ã‹ ?“!Ú]wjóïvÊ6óD5¼EDµräß2ž˜%6L¢;±B)öüÔ˜ñ@`÷ã,Å«N´iì®Î`Ýç!5¤-é«D,K±ö…•ƒyæDñ)~Œ¼RŒÍÚ³6¥Ö(³’¹¼ €pH݈[Ê-©û®* ZÃ×VðËSz 0ící*mo®8ÀïâòPï ›~#D±…p‰;j§çm7t²qí©1_õ¨·§Ö«+_²îÀ.Ћ’VeŠ 2»Ã‹3 ?I³i Ó®¹7¬I”Y›cŒ­pªÎ.œ€õ„M0ÉgYzÚµR'oä†ú›Â×K¢ž¸3?<(UGt’¬˜ò+~ÑsI‚æKegxU軣º1ݯú ög\­¬$ñ‚E4Î.ܨªc…ÔÙ±«9Î/˽êMÕ£Ãû™-9ùÂÓܼKÒ.'79ŸÕ>ÔM+}8›îcO%v'qé‹Q^c?z]vJ¯àÜ¡4D6¿ „¢[D'47¿Ö }Y-r¨l$†˜õªB†Ÿ7ü›ÒœÔˆi(w¢Ò˜úâpU j½‹t1vnã0ÓxhâzÛ'«“¾9‡Y£º}_›O‡bï Ýü÷k=\Ôô—¨éo1s;“V“•ê¯Ì“ó†ë7Jk ùÑ>9!î»Ã‹˜Ä÷ÚFž¹Ý²&¯®ùwäOi|Ú­˜ ®mˆ`úÔï¡¶Og"ãh’z3š]v~¢ÙÌŸZ»&ņm´Î¦µ¯{X§Œÿ ÅNxáŒ#ÃöÞXâA‚0cË‹Õmt5’d¶è,fý&AHy–ЬÿRF˜ç›jqJÄFÒÓ0ÏÜX¿Íaç+¾¢²–ÕÈ e#Ôˆ`g=ÿ‰þ¥vˆ#ÝÑ“Œ-w‰œä,†ÿ«˜cÅ CZ‹FΙ˜`_í»ˆ¦¿i‹yËk^ÇçZ&sÍÖ–1/üS'`?§"~×ÏÔÛf´ý³0ßÑC7æfŒ|®‚l nðÁº¥v PÕàç{y#çËÉ á·Tsà’{µeÉwûÍÄåK}U²ù WÒç|ò'óûþa†#rÄå§êÙÇÙðâûgõìã$.sÄS¨"G<ÝLùèdó¯wkØ’ÑWç—«€£#¿í—ê¯*a¹•¾0ñÐI7”4ú®`í yZ±ÊÅ̪ר–²x£À¤êEÉ)ùàÊA>ÆS‹ëïOþÀ?Ó]endstream endobj 317 0 obj << /Filter /FlateDecode /Length 8224 >> stream xœÍ]Y“¹q~)ü:üâk»÷á·•|Èò2,í2äpxýÐspÈåÌ4Åæ±”#ôÛ  € èé!{¤ =h¶ˆB‰<¾¼ÐZ±‰¯þ/ýÿåÝ[Ýœý錇§«ô—w«_??ûú;åV~òF˜Õógñ ¾òleµ¼Ô«çwgkqþüGªèPf'/³Õó«³ÿY¿>vŒËõõ9›$—ÆÊõÛó ›<”v}7ÚÛõ-üÉ´^®·8‚3¦`ôþm“þŸÿ>gùœÔ“Ӛϟû¿ó”bòÞ­¿¹oßä=¡&ÃM^æ÷¸8g¹_«óÎW¸œœpù+¿ÇM)&`—çÂLÚI;ÕzâÞØõöæ—!'é×/ÂÄʪõîm|h`§ÿvêŒW,ð¾&’Åx§€Z÷yA×øTz¯” …í9n×ÏpsÖŠjÄ;œ™iiõúežbw¾3ÎÄú*e½“IÇׯ åRK8|•éð޵ÞÁNâàŠi6’{˜Ý¯6RMÎ2_úa–¨¸ò‰ìF[·þ㳸{o€–?œ‡1þsý,S©°ç=ÙèËÉ;{FvòHø2p° Gªà{Š»H Ř1nýSت÷@²ë}CBæ½PNTo†#ñ̳¾$ówàˆ_Ä¿ ë;"¼ƒÓÞÞ#¿Hþ„4ÐLÃ'a»\3øúûr(oÂG”VþÜÞr6ðNfw‰ ðâ‡Jº]öÙ!ð¼g¹®8ØpæÃtù¡ì”Çe†¡c @ ¥U`´ô]ÛŽ¬".ŸY§ãh ïºõ6½ ¬,&¥›´Í ø**iFq«Ö þp„I§p©eˆ=YÝû°!œh±$;|  …ÑÕ÷âùY­Ôšnv:ß(ë]ÑŽ6—RGvõ0šãŽ6ó–6œO^+wX‰#+Í; ó’ªüw$Íç³wuÎóÌ78³žãD-¢x†õ8O9— ƒïÜjPëÈp  õœ±ÜÒ%×k O=‡×!óÊFª;˜×ÒÁädÉŸTÞºÐjÚR­æ@*¬Ÿ™ê7ç³éé(5?) Gf@*˜½™Ѻ23Gò«b~uP‰ >Áò÷Pr ,Xký)Lgüy3E8`sB¢.ãlñ˜¼IëØÕLK_M «ç\iÂ5DOtÿý&ÙÕXкE}&~ ¿ò/ÏÏþp±ŽCúóÕÛ1бjD°ÒQ  xâZ¬,lßâZï`y<È™‡ã·ä+„PÊðÉj˜XÇyÁ"Rð¨Õ¾¡áÐTõ‘ßDå SÑJJ/ˆÜöžP÷&ž7ãáysU†DeãôQcø ‡÷m<2/™ B¨QIÈ5ð ?ÿ|rqpS¬ ÀèD°ó¢¿Š*Ö,º 6qÃ2ƒ}FôÖ³Û">DgܼD¡€á.ªl ñú4üÅH“†bTOÜ•K5ÎÝÐ˹Ið–^ëŒNÃ`܃bƒuW_ùÁ3*;ÙEÄôï#¯Hø“{ï¿€WÀ!U—W‚üj¡j¼"ø€W¢j{öüÜIàøsG½˜K4}ð-µ4_Î)lˆç¶ây NA;F ¦ÚÆÃDpŸ\°]°ìpâ`gĹ½‡Hç·» BQ»7n×30ùÙ§yEˆ½ÀÂA-°pBi ÃÞ` Ä“a\› Ž]` †ÄRk°0|ÚÂ>Ÿ‘é¶Í9&½øªè‡Æ/+wþû4§®8 ¶ÖêæG—‹å¡Ï¨¶#%Â+µ¶¿ çɤ„SÌĨ×óéLŽ“µ#Ϥµë“(M ;4 4ðv‡ß)&2læªï†Th.|V Ï6ÑjpÀ0ß¿üYó4¤ñx6¼&9iÀ|ƒ=‘”4(XÍ@<2Þ¸Áýd$©Ëüã=I‡ÿ¥Q#Ìz‚¸ Eá|èÚtþ Ôð—àå}Œ÷(;r:â ”d_>ÛK²r::ï¢ÖIç=Â#R™¥¾A%'œæëçq'ðU¾þ¶„Ö¾)>篋bë8J -³• +xÇ)Pà8=ßm£ú‰8P毜 à·NxТ¸ “_tÛFqQÀßµ6KhXÓ§G¢74¨ò7ëõôá}J}⻂ƒ·‚lnMÕÈ6<×ÜVn*çdŸ¾ÃÞ€W¨}ËÂÎl ¾à #˜+Ø›4oÅ9„m}t^/Âc`NDï§"z„X$q¥',êWƺÉ%͹ðàWþô'Ñ„ ás€2UÒ±S(7ÜÇAƒŒss¼8è`C o" 0a ð´A‹Ä0 ˜@)0$Ïà0-e¿7åHÚ `ŠÅOXeüú—d¾Ë8‚#®­E£…ð?ð€j̾ݗˆ^_ÿ^ÏŸ›-l||køH,Œ ¸¥ŽfáÜä’¡›Ú ™ÕÜjÊJPùžDæR 9ì¼PØUÉED͘cNeù9~K]hº•<Ÿ á3¤˜v V$€ù2;ùä@W%i¤†ðm9ú×­ÎHò¶û•ø¼›¢\A ó¾©j§¬t5¯Y¡PO»M­¤7óúk]WÆtfuîr߃Ù"!|ªHS˜ŽŠ@†÷ù¯ ÂÈ·Æ@_«Fƒ§!Þõ9•½¤ÙŒ§lEb(Ä£Éä ŸËQ;Û ¤" ø1ŽJ•ºŽëAÁ§pdŽ˜Âꉯõ`””Œ%1Àâ>Š vƒˆÎÁuÜÀ³WÅDЬÞE!Ú-ÙPšM ^Ç‹„Œ™…€À¾Ûà¨Àߣ_çiš¸pÃÕ ÷î à©›HwÌ-mK0ü#NìÓ}ŒM _»¤]uñ¢X»Ö' lîÌÒÝHàbe]c¼?Ç¥ 0š‚#â]ãHÎìBAÊmñ°‹‹Rù¾ïïqZ`I@®Ì ®ë‚Ÿëøe髌>±jÛEX3$b†˜¨x.·qjâϯ Šk3‘éåø‰;B­1òŽDæÕ9$i‰“ôã¤Ei?9Qg½¯ÒÙ;TMÁï LÍz°Â}ᬜ¤ñ°Ëó6Êt@Š€„çÔ_ ‘Œím+À•¢óŒ>~ëA· ?ž…Æ· òQãDzºžÆW¯UÕ,Ô<Ô8=Ê^s×›½¶‚ÅŠªÝÝSÝ¡am¢ZT”ðD W&‚Ñp²ÁZÄ¿=ºkø'l—¦ ñ Jº k¡€I@!lJTúþ»€*š½@o_S!ÛÙU=ƒu\ÇùP¾·t/ï‹ öÃx^¡ž„‹6:©5aï+îﺯû83:øt’ÙÔ²FÊaãpŠ/à%;Ü͇¢%_§ÄÎŽä õ Ä|Ǩ [ï:£g鬩½×xfüÕ'g6óˆ¬ýJ²òiÕ*.YÌhcùñóoÏžÿ#Ø5,ÄÌ`‰—˜_Õ\§WMõªï¼È^ª¥õŠäQÈh0ltØÒCÍÇÜä[¦7lsùÔLÏxOgR5õpiÄ(ò…Õy.ôäàŸÞë*¢ÖO‰Õç¡HË9ÙfúÂI2N‹&:/zPOß—÷ Àcl¤¡Cÿoˆ€ÜCN›&H÷Y1f‚ ʧ-ȃ³¡\Ò¸&OÀÎŒŠ…æbyr"ʼn…Â÷EÓŠýÓ.Àí¤v¦ I€Ú2 Òìˆ%néŽørùã®Ê­’î8‰¤Ëÿ²0ÕeñȧD°¬j«›-ç€7Ëç¡"q¹Â*k"aH LP±Žn*ès“5Ëî¢ez’[2}&·}# Ò)½^in'8‹hqÚ†µ´¤TPjÄ( ¬“œPÞ¾%´´âÐÎL®¶ìwXÞá fýÛ]¬j(RÞ( v ÇÀRË/)v/‰XýiŒ_»csslÿx~^'EЬ@â\0v$ÇÒ€€hðQ¹‡Äíãr8¤™¬\è/ÏÓ¤sÓÞõqiÇ>ƒj€~j!Q†.0uñfh!@Ó\ +ÁÔ= dÝμ¢:jŽhÚOÌa4Y8;`-2uëWÚ‹šC&™"#h YÒ!. Qˆ>´ñÅ,¥U­hÊý}*²Ó¯ ¬ª[$ÌTÆ\9 ŠË’ëºïMµ w‰­Û†»*Ÿ5¦Œ€éÜoº'Ne“}@dàE7mý0>×¢ª(ɤÒja‰Ù¯[£q8Ñ#dràbd2{›È½^¶ç]ש3`…Ý“ÂÛ¦¼” AƒüEz¼cýsRr?ÈÕ‡š e~'Óc¢‚ê¾ë+×7ªç@‡!sþ8™D&WüQ<,%¢ã¾ôC‰8Õ™òÀ ¹[ü¥m>x²Úbµ*´TÊ¢8q,wyQW1FÊ vk<$XHqÁ,³2èãÃyWµF3†µFE¼"J Ý8ž~mX‡Å €4ó¼.Ì0¸±à»h1w¡@c˜§ QÙàXõJ9ì4L(¢îæÙ2Ê:"ÜûlêÚr,cÃZ[8>:ÝIŒ™Ï•”½áx«o£k¸ˆ‚‡ÐnLý4uŒË9Ã(-‚¾ÔvÍÊÍM£Ãl° ËÔÀ2pÒBpEP€e“0>º@Ê#öU1K"õ$âfU07áN×ßx´å.wÄob•ä éR‹>q"ûƒOÆšŒŸNÔ’ÙæµÄÄÁ#˜?\$@­9Ð1 §ÂamŸ­šÊÞG^u‚»JÀßâ›Øo›x··*Pg± FtJ3é«‚‰&n8Š Î©­‰¶œ»*(6ÀjŽ¥³T·‡„ù@‘4•Éyººh5ÆÓd(¥]íé–¿Iùö/k!˜³Ç±J^XaZ|–7îQ îa«˜¯Afñš8¡a'X[û]ò²õ¤4I!”º!©Iú½m¿´ãÏ-ÖŠ¢»ì®ö˜'k¤êŸ[¬-yé¹,>0uYã` _­b¤‘îÌ*O”ÏöžJW¸hÆ{ÄXYuSÎúT¼Ç„ê6Ž*®›¸·™¹'#îø]@ÊbdT‰õïÙvC{Dh £mz³€?¹¬Ý*xª°|jMÝRÌqTãŠÈ !ÞIþ¾aƮ֊0XèW5­uƒv£‹D3‹4•@Hâm£Ý‰Aë7eWGÉ$6¬¹¼“”º_Ç!ˆH •ûr2êv-ɤù¼#‘]“PBvU®tÙÏkõK±ˆ¸Š–+ÜûNîÓ«Hl yòW…|•0å§‹æï*Z1(É™?i“©ò᪗AÑoÚ{´`¡âÀ•¢\¨ÿþqa(U¥ùÚª1—\ÇÐØ‰4÷ ¬ª-b/œ@‚0…±š6cÍðΪª5–€AÜ#ç¨Âu]˜ 3‹Ê*…ວdà—wÛûHH¼ÁïQ*†DÿÊž7ç9•DR©A"¨ïë„ñ±Ö8ƒ‰yÅ*öZ§ŠùªÂ”ó’ê^ÈSÀ-Bè7n…¯Ç j$¨o”Û8+–¶RÛŸ¤–xÈífBÜî¤ØúáÊÃ9½x<*`.:›¯éø­ãƒ;’·ïÒ€@½ÞˆëèˆI/KDâX'?»±Ç–MØÃ!±Íq!ÖÛ×Åö ïøÀmzLù'ôëþè–B÷!ü­Ý¸Øz&UGu‚äÇ÷¥IçÌñ»ø B‡Y¨–À“}ox» ‡s¨j2"Ó”-ûc, •Ìò—÷ƒ¤½Õ2%3”En~¹¨Kî¼í˜šPC÷HSC®µ0)é¼mÄÐYpR/Ý™©\¸‰åÍPŸIï–^"VH¢U¬*Š*!-0•¼³6¾;}¤Ó†×2߆Qp9®Î ÄD·RŒQUk*®ÍÚE¿¹+Ý@=jܪÅ+±ÐCg+;ôÿªYúéÑT03‰¦oÔ5Æ?µê*H5C´lèE%jV5Vs£¤ µÑÁz´a¾(®ùjîW‹õ¾¬­êøòkÒ.[Ó”lšï»Žï©Eæ?M6dŹ£¾bðì©Ü§Žlö'K8—‘MÂ>í¶ò^âFˆ]”zÛ‡@íØÝÔ>‘)’žw9úE‰9R[®Qxô=sȶgLÈá×þÐÜb?Œ®FF:Âäj‡©Ž(´ y›´ÍMh”>Ç.s \à÷É*`›ž:ï}¯@N ¾C$ìûÇdFdêz^3Wr&BGö1@F_\q!{+Å'‹a¤Ö?ÿWŠˆ‚ÆáæˆØ`éx8—È3…V‡Kͤ€B9¥øeùûAaáI‡U|ćÇó@mïLªJE ¦ƒIýÛ»øžgu‡C©Ìê8߯×ò½S~R ßŘTÿÌfÔÔü*~E»ÉÂñ†Ô×?ü¾ëX˜|ÀÞ³„"ñ¾ï:htfìÈ@o\ž¡e["ûۿǵâï©'ènÀßànœvnÌ—ƒQ» 4ÀrqÏ5½*sPİûRxºû’†?Ç‘jõ±^mæ¸GÖ‰Öå)>N^#ƒª&e6c¤ð œô›,ƒ£±ÍWâÈŠ?œïÂß¹ÆGõŠù›_v\âAo³$¤ªéEr9˜3(ƾ‹s€Ç0Â6ͤ¤+dÿfm?Eºp§Vó2?óz4R¿ÑÜ»2ß•‡‘?çX è蘇^ ü›-y¶G]6ÝÞí ë;aÓÝ®>qþ¿wEÌÔ—‚ìhYUæçÛ–G·sD×g™˜á0zƒ°,…¸€uö/_‘ðÀåø;ºS$b%¶y´‰‹—eÖe5‘¶èÙ.Ú?²¡ÍÅpµ/•âºû™Çÿ6þœFio8œÞl`ÄW³­nª"‹Ó+bj˜“x›øê¯‰Š¡= ^lÔÍÌ>ý;»Îb÷l¼H!ñ åp –MôŽßô*¹­•˜(Úã›ÛÐÊâ¯Å½¯°ÁÏo @ŠX_WZÌÀ(Ý•ê°*dPá@@ ÇnûõA¥:ºEÛR«ôf,±Y^wBs½»œ‰b ï}ŠËwÐ2˜UÀ}Œ™¥Çe<ñ-Æ–Ññ죀êÜÀÉu®ËìÛk®oo®Aù<Ûl'b(¦|XþN këKMT!ÏüK~”zß7ˆDjyÍ22¸½hŸÖ2߆—fˆ7“ÐhÐŽSÒ›xÝvã:§9B´*pfs™Ð@ŠJ”œ9Ð.À±üQµm·1ÆÌ§Ðy»¼ 9KUÔ˜J™˜Í ¥¼bçm¾­¹º¯‡ÊÜ´Xêhâç꣛ÇÖîf¾bˆLA/'?säµÓeYá]üºoe©J2д׻ÕP/èP‚§*Ij€‹¦nêö熚ö6jËBN¼Ÿ¿!]ö$ÜU çD1Á£QE¼k¨w‰V2a-‘¨c!•ïÝéÍa¤úÖH#Eëõª[Ê%cÇàÁ!¬21´Ï%8û3lWY}GðßÎ<ÏKÐbuw&4þð‹ÍOnϾ_ÎhVÆÚºãœñ‰9½Æž˜§×ÇHTqB±ö.6a²ƒW@º•…¯0^˜Ë¬ð‡ÁNǯ¼LŒjLÔÒví3ô7á787)·£Öÿ„òç”vý5ò5ò± ùH†|$C&òŠØqO¿"ïˌôÍ{2ä]òž ™ú³\‘!ï«u…+m˜×¤±i¾‰É[.yµÓ¿”¥_’Ç?’á·eFº£ûjÆ<ä– yE†\”!{2ärGž“5¾8Ì“6Ô¸Ç{žÄúB¯žŠ'÷eÈ2„í¶b¾|°7äñË>ÃQžü‘ Ù•!ÛŠáË—eÈ«Áºöåì§jòröoÉó›“ž=`B>ß"Ÿìì ÆœùÏD½+g¿#)/ËŒt¹ÓƒCNp"`•"¿D÷$‚Kaÿ†ÂHäªÑ²éñ¾žg|;׋2ä~ ŒÓÃòJ µ¯‹0žàˆþ( !?¬÷$g¬À…Ròo(t¼@S¾"÷«òýip û¾î¥'rßÎ(};@(Ø‘7ßö1ÀÑ·õ‡òäWKïʌ׭•NC¾/Cþ“ ùW2äyò_dÈ7dÈwäù¿”uÑsyF†ÿw™ñ{2ädȳ2„ÎB±É}rK8þ,ZÕ©›†þÔ7e×ÕlÔ”å-Ðá/ÉðweÆŽ-?‘<âouXI~ÍñIä/”2?xû° |ÕW–ô ^ôµr‹ Ó ´ÞWÜ‘˜~ŸÊ#•ª÷eétø×§åˆÐëî‰8³BöçÂD‰ŽÜ´Z¦§C`Ñ~À5d–]µéX–¸_.7°Ä¶ùhG¼'Ã_÷õÜۊɺ>Åáïû¬z9`Õ}_EïÉð}ý6п/ë:­?(BÁkdK÷dÌU6þÉàɶχ¯–ÜÔ2?ûþŒdP½·[ò^’îä‡4iAü¥°ÁvÀÁÛ2äõ€Sˆ=¥Ë½ë3ÓÂWêÌH ƒ!?õ…o2úÈá§“29ÇKÒ/-û'cr¼ŒùŸ‰†'j’Æ"~]5×7}£OyòÏ};±më–¸©žw5é– 'a”:S†|ì‹PýÑʦ‚ð¢¬kOÜömålâtÖ¨2äŽÆ?Ÿý½… „ßjâÐñçéžB¼›¼Ð?Ix]Žíy|Q PH¼ùb`cîûð÷z1dá3QL‰1ð™®ú@†Òè¢/,tÈ›þ,»ÁÛ†`7ýPù`é'÷3ïãM´¡¸åý'ËÇ8?)¼iòçÁü•øçÎþ 'ùíendstream endobj 318 0 obj << /Filter /FlateDecode /Length 7521 >> stream xœÕ]Ys\·•~g͘·®¼Lw•ØÆ¾øeJvìx‘eŦãJ¢©©IQ Éf¸Hq”šùës.p€Æ¥(Kv<å*ûú6.pœå; À¿-ÄZ.þ“ÿ{x¶''{Û“éí"ÿçðlñÉÁÞGß™°ˆëè”[<Ý£/ä"Š…·~µ]œí-•X¼À¶†·•j-U„öG{]¾\)¿VBêåñJ¬µÔÎëååj_¬¼Ô~y¯t6úå)<Ê }ÔË ¶BhýŸ:þ×ÁW0œl8m×ÁZ9 ÷fµ¯µZÇ–÷ÏûïûN™µ“ð&÷=¼ŒK³Œ"õ:(ï§ÖpRF( óp¥ÜÚ`¦Ö®et~¹99F2ôZÇåÓÔ±ñf¹½¤—fúušipѤeï•v´,.«u^:Æ·:Fc4¬Pš^~ù 6Á{Õ´¸Æž…ÕÞ.Ÿ•.¶«} XH¡–GeQ–WÔ™rù¼®|Z5Ø8\6©aûMY¶ï°±¦B¤Ykã¶Þ—6¬=p˾6ëà…¦¯Ìzµ´®}ËOÓFÇ(EXn/ç•]æ9Eˆ/¨ÉM&ß«ôã#¬&ú£õÂb'¼É>ö(Êäq®Þ®[;íûJâšE¢îKÆzЗ¸?6/¬‘&²…eëvEM.h#t¿ÜÀÿƒ8éòF!œ ¸Õðl¶Z¾FòA¡„¤.ŒÄmj¶?uÅ6ú0ñ H–$¾ñB—W"XËö$ mCðM'GX´Ò«@0µIoåÍKC glzGámv¸ÏêPF»óø {À•ˆqùør(¯Äöà £¦0f]Öå&‘•ˆ—ß:`*&‚gßÑŽÃÎXÞ✠PyÛQãJi,ßÛnQóŒÒ¢£œ»EöªÍ¸<[á~%í»Ë>‰öÄ®4ÉEi{‹6öЩ3¥BLúlb;Ú[ë+Ö Êóš¢100wi‹ZyMJÁ 3­¬Se~**ók´Ô¾ X­^ìK¹ŽhR?Û ë2Ç€TöRÛ,ŸˆÐ{P­Bœ–çÖAáêã7B€ÂI«ëÖ˜`Fd›m…Ïšdý§¶­úÏbLƒ‚´¹¦·Sê"ÀëÍi“m4ˆ=#½_0âÊD¿ŠìÐõ§¡Þß–1ù 5‰ E^#Ûç’Âíå$³]™"›±à–’0µÐ[IèÂC‡ls¶$º.7-Ck-qunê’¢Œ`'§‰O€;:Ð'Éú«èÖ! 3–Ç×D>rA'Ñ`pÌo^¢sƒ²›i®ÍkÁ[Mk«¦½JB‘÷>Àæ{šUÚÏÕWÕ™wñ²ý år]¿Kâ4-@§e¼†=]aO¹½#ZîÞv•$v÷U¥6V+ Lvw½‘ƒ‚_$š4HÀ ¦pΆ{^…ú߆âwDYÎb\Î6I•J-;AËFÀÕ–Mé%õæƒé8ß:x[©ä<ŠT*`K![e¢¢^ƒnêÍlög¼Äyp“6Y*¯Ü¬ÊšFùÙf§lÞ~UORA²™ask¦Î޲(Éu0E”z­UAA8m£mÕÖ˜sÓ"áó¬¥6j¹6öÈ¡ž0ònö(®µ²FöèYÝ+6ö)¼ãÌÀA÷ü%™øÁÿŽÙ-ùi'˜ÚIØ®ÞÎmŸv(«™¹Daª[ð]ÁÁÌÝxA³ý-Àu4­÷±€0H0L*—²Q´Þ¸¸kÔgo[ ‹Âw)]„6 ²ï¸%.l¥Ï‚0ÍgmÀê"3©7ÅaèŒ!e=¿ZYÆÀÜÓUò‰RgÁÎÃhd%èa­‹Nâ&#,s‘LSmw&Æ=@dÈ7À5‰³ìö„õÇÜ´º@ÆAš¶£4‘§ÔÊF[€sX-ŒC29ôÖè{‚I˜vÎN\çÀÈ«±o†vء˄ã‚Ëæ˜Äpc|U7!UÆ„÷Œ¶A–6Bºax¶¬Õ†aõ¢pžW¨ÆÇ݉o$XT»*Ki)¸—#EßAÛªÏý‡‰{?;Øûã…¤Áàû —óñho@¡×Ç£Ñÿr!,œ±hw0(ý×¥L‹; n•©ÏlÄwŒzk†7 ×€éÜ‘É*³ØBì´ õêlžF‹¥—¨›óð‡Ëû÷ˆ'_í¦:Â\™zafïpÚ–L@+_ÏØ´9tÝ¢æ}eÓÒìKàÐ&Äa÷JÐÒ“„Y´>c¦‰‡+˜wRåD ¾šÕÈÙÑ(N·¡”Hì])”Hzvd¤AеËFDHv1!lk,š•sƒ˜¢GØNâLfèí´>®¦»EoA­Ä…»:=”Yþw rÕÝi9 ¦Ãþ‚Y ^Ç»K8Ú̋ᶌƒµ3Âgrù>šö­ Þ¼³Š*£ìZx¹pÈ^Ööùš9ÌÄnXÈçLj·'LQ7(à²-À¥NàžvŠ×‘¯º¹×û¢šHkmõY¸&=«yYé¿—•cÈþ ¯ðÚÅþ c®!ÈøjŦÿ={W­ü<3 =4C ìDŒ´—E›|˜ý–˜Ò‘² j0Óú3éŽqm]O÷ªž6Jë.ͼ­êômå6¾`¯Vh¢KZ¬lâMâÐ¥qWg|ÈåµõkK¡†òú)¾|/4pnEH4,»¶à\S'˜Hq2 ¦G™d8²¼´-ªm9\Yô-ð'Ò¨Æú‰&äN>x¢o F?­ŠŽ÷1ôì­3ú.W}eyNÐðkÍ¥tVx'ë ºt4ÂZˆ8é~j§Õ”Ê‹FK¢2•|$X‚&I†M Ý4&`ï´0Mzö8i ÁU}=L¸z°¡<_`1`,E”M)`Zhs1&ÿ>éŠs åÉÔÂjE@Ÿ°8ª2ت ¦Äp à$mXþþym¾¥ô’CÍWé|¦ÓŠyIÁD¢e$a4~*ÀåÖO`À->‚½¹(|X'A³&ªi/¢ïˆ&™F¯mÏ™y£=VPx LHy¤ÚÅ9A %Ñïºx^U6cÈ›JĆøk$GL³\â4´t¦¬dk’Êp<{dð5>¡I€Ij¸ïœ1óQƒžqn[é:cÖï¢C‰¹¾Ëš˜ ±°Ø«B÷x¸¤´Â Hm)C„X”Üɪ‚3$\#íÏ«f¸"®³à+×$§ï”Ç€&Ú˜ÉeÃ)6„à E¬P¡¹äm€ÊÈdxgpá¾–ÀôN§ú6€¸EÉLÀ“G/G :S8ƬËU+‰¿3¾ÉÎÌ-#–Ö±BF,°.À5ë À¶Ñ«wª³ ãeãà9Ÿ°U†_UDW’­,SêCa=€¼BóŸÍN°QQ6³ó„ æØ™ Äm> •lBÒüÓ{Ô$H‹¡Ò}÷KïÁyn>ý _ƒ §Ã²çt•ò§°ùƒf­>©˜‡-ÛŸ¾©¶ÿã<8÷é«pw=• Q,˜qĆoÛO4І}"{ô ¬Hø‚siÔ — $ê~™õ_”q¯`fF^\[Q<‹Ms®%¸ía­a© ¸ÿ®:j…ìžç`×öiíäQµ”˜%Lþƒ‹@J­RiÓÉH0ìÏ\–¤RÉBˆ›'u°ÓJZŸ  „~{D65F]£Jˆ”Œ)ªsKTI*1 ¥ª|¿0€ª¥г¹<]”§ËÓGƒ§×·>­ËÓayº*OÏËÓñà‹óU7ƒvõÛ£[Û]è«óøŸ¥/ÊÓ逿óA/£vO3UžÎVï‘™RŒnvuGkjj3¯5‰Ù=–¶nxeÏÙª Ò¾t=`ÅÇN$_û ›ø פ¥ÜïdÔ¨³?ã£BKÚ«Uч<Œ3Y­¦HŽÓ÷XÓ=—,Áß±RàASFÇ|?–½fÉç:ŒëˆŠBù‰Æ@­½SnN¤ý¾ÂS:™É+Ÿ”9é”ÅÂ¥Â,‹¯°ZW&«é𿦢(Ö’¹®ÂêÝ"®¬hÓwVüiÖ˜n­jHrËv‘éu>4KP6u¸›˜CoìXˆM)"×Â’†Q`Y.ˆ¡¶oŒƒÿ*-èò‡ïØ4¬>5MJ$.°ÂfJÉýëiýõÙÀ ¾¦úÅfÐózðÅ)£™¬ ¬„U}T³ÚÒ—,´kŸß:¡Ëw3èedsS»´«TßJÁÙ`´º¤¢<ÉÁò] z®¿ž”§¼UŒ^ð¨ÓCL¿`#‚Øü¢ æ“‘V4%S…Š"‰ÏŠ ø’íëç?aNU»¦k3¡îšni¼·çy0\š§Ì1÷9z €:¿¸úIÇÕaå, ³aå[,¢ñÅtœÔõ9Ù’6#× hžNAf}v°*ye:α4XÍϪoxZ=¼O:ë¦V,;“SBŽ4ÂŽÜ-ƒ:6'ïšB-¦ãphrNitô«³už™ ¼½¬AßÖ:Uû™f†ÀçÉ0Š»Àe¿0ƒxQáZ^º&kÄNL–T2ÍAF”=¼ }‡¿¿È@&M»A.Ãnb ”JË™xËØãTÈ•æ|úp'8­cÍN0Zm2Û‚¬ƒ˜­W©ˆ»}üø>†ðEÊ7~º*½¶¬<—Y_žEœ©–Ù2êr’`½>ÔÁå€GmûÒrÇä’gänV£Ê¢Ú1÷`T“›Ø&> žGéXuÑEÃ\ûSˆcË ×y¯d‰£XÇÁ)§ƒ¥2¡‹„Â=( _4 Ÿ÷&'usXr\§;R©¡!ÞG/|»lþžÎ‰õÃÖÝ3Þ+xÒ¸)ðµ”:_èЖÂS= ÉTèûdV¡>MjNÿR€Û«×sÀKù­@îßBüŠ`8®OèÈ&¤Ur3xW‰¿¼#€¾P0‚æã`Ù¿^ƒS `¹!¬¥ÃkKn·ô&¤ÔÚ-–[àµÎÒ›àA—ô†GÀ1½¡§C8 (í2™ülì€fŸ²†,¾ 61ûÅà{cû hmë-‘,xÝFµ†i€J 5Œ0¡mùºÕDo§•ö=D^Ëö/.%F5çl9ËLuU%O›kÁ-yÕë´éÔ ;(6ìÁ½i3"ÉÀG¬ë+Àµ¿yd•ŽOÖP(+3ÍØñÃøØ¹ñßTë9ckŸñ×”&·*%¨ö5%P\J©f¤?…¡¢è:î‘þ®篇H\\ëkê½€X?<ü‘­Ÿqw²~Êi°tr©¢|£%f)µ@7ô2} Ï2ª…Ç^üv‚M¿Ë7=ý–ÌÜÑà‹JU6 ÆX“-—¸á;…‚¿†•“ ’…³ ¬)•â]±@ZhŸ·A$3ÙNÉÜàÇ7%uñv Ê]¨zT1aX­ŽúR`¹&bÈN+¥Ë ¼Xn^²¨»M )ßÄÐÞ=Ø0]ÃO²,SI]Ñ^·ÄÒEìƒcêÓ4eÇXŠo±B鉦ٻHR'˜ôTM;¸¹Ùqwn‚Ø%›e]› -‚–XxPÒ;Ì£;Çh¼G·è>*q^JS+ø*ÛÅpxখÔ×zDä<¨d×׳0©äu*Ϊßõ¯µún·æOÀ”Œé|UfãjÉw!Û²O¬Ž”ÂÑŒ'³…¤Ä³Ëâ“FÐF("÷Aµl´MåÒB`Þ팼ž:ô*¼M•øÌzö$' ÙVµ&Ú «†Ù@~y“v6AÙØéÏgÄYx…è´Í¯g÷±Á*f*FY•Û³4ÕÐz©c ÌÇå`n#«ö|4 `d¬êü¾<}[ž¾.O?”§oÊÓgåéay:üšV‰ûçtç´¾Lw”žz6X´ºuÁÿ»<ÅÁÓf°,¿Š©z5€;³vZ-­©MëN²ÊÿMs‘é+½ÂJøp:ú—”>ý“êî#’5¾;/Ó\ìÂŽþQ[Tÿü»é¨_ì#sörj9åSà*ý¥M—S̆µwùw²;ä ó êîO¤ö ³shËý@•?~œO§ºþØöàf¤æ¬Î K3Þ°êïm¯Å q]ƒso»FF­£¯ÎÇ]®ÅuÐÁöõ!q^~rtöª²¡7y:m™Ûq»w£Ó<à<,-éË8Ò•WÎu˜w÷VÒz£B{ÌYÂz»öVÒ[o8±Z©Û‘DàY¶ ;ÑXk9Ø‘ÆR•ÏÕQ…>ž²dõOki8ìÌÀ]jCþi,·`¼-L ýD;…©åŒ£®UÙV.ý1GÀÒ¢ÔÇòù&Z¬d®Ü¹e7_|\ÙC ñ[‚iÿÑ‹ ×ÛÌP¹È†ñ×+ö90'ÕÕ”9Es¥vD<ݧå¼Èb" Ëíšàa—èIuw˜ßðXXæ¸÷†ËãuʪßÅ,`xóþ=zÆSˆ?öTD¼Q³¹ygTA·iï¥lóˆH¨ÄKDur÷LäŽz7:¤ÛxºÁªÈ C÷ë˜ÆÂ»1°;6ý™åæ2¸2ü¼DÁPŠg·D¢=fM;©²’Û<œ•7ìÜÈ”2Ñüº¹âè‘»§#^ô3p÷>«›’ów©8ówa@¼1g1xXÒ`Ö̺~”¾Ceš6‡ÅiÜÑjù‚ÙS&ÌŠÎ×ŵѪ¿¯Ïë¥V‰Á Ÿî—bדBL«À“!ù¥%»ÿ¶Ó)‡ÅU«ÞåÝNXÝä ¡\½.÷ö g«;­š rJ¹Ø=}ò Å45ÞÌ£ÔÒsj `|þÕGzŸåfðë(Dy5 t; y3èåÄ(«W§šaûþVú~|[i®epúV·ª®äÓ÷p¡,¸GB·<Ô›NÐÑØfÖtΜ’®…h)|âRYó÷¬-Z;iSú#Dâx9ÙÛ¯ ÛVóÃÞüƒIeé¬g–Qk¯í+.ÐOͳJ˜›j”®¨g¼K÷ôˆƒsÓ jf§¶^EDœX[ÿ=·oã e ÝÀËgÊ šŸfn£¥Ù‹[ ôG©Êö–niSè„+/ÞÝ«afj—X~sdJÉá=*¾× ?FÐWn¤»p±…tMÙøFù â À×3÷ž³KŽó†ÝÜ Æ‚Â,Ñ9s±¿žñ +yÛ9¥$ÀÅ”º»º»Ùªt[¿“ƒj¯RIü̽®uõÆÎ]¹@W@÷¥õ`¶ÕçSè5 2Ào…bùއæ€$­‹ È| {¢¬YÛ<XsùŸÀ–ú[ŽS¾Å 4fp«Ê*ž ÅùQ Ü¤Æ³£±µèè´JÏY¥`îð6j"F, ±Æìhô|ºø¦SwBß©¦ ¿K¾àý)(ÏsW TÍ„Yz˜’K׸TfØE•)|Ñ´e’`%Ó÷Wøbzž®JáIÉ;´wçÿy3fAâÏjË©’¤q¨ à^ý|¾¼å-ŽÍDÍJåj `.ϳÇÉõ©Àq”í`™Š:‡®åÒ‚Ù½rà_¢,º K|4Ñ”ŒA0 <®ž)BÞΰo]àa"AܹÃóxÅ»¾š‡a‡IÙ€âÈÚÕ'—ÿÞý½cð šZþ€6™„Az³92ì¬Ê×ùäˆxü6N®æèo2€ÍiÛ1ßï˜Ãëz›@&ûC&3V°Þ®ßdÇIÇT£C¦?UОÃÕ@Iº¥^öm§Ž± ,Öœ;vÜE$ó¥Q—ô!^ûösÇ#OžyáïPYƒ—þøzùê'«LK‡“9HŽhދâb˜Ò ‹ôQ† J%f†üß¶Þ<Çæzê´ÌðåPñ{YžV×ûÞT¥ó·t©5£¾±GKþÁwvP.¥©fûòäñ¦RB€w¸:)JhèI§øžJm”Nî‡ÊiPgMŠY‚ãkÛxÁi9Ê› úƒ˜FvÑOî­dN±×ôL)voìÝÎÞÀ\Ñ´‘Ì6Ö€‡ßñº§Ý {wwI›aG£íˆžRÇ E»Ùô¾Îù–lz~[È\þ“îZAÞ3ºfL* 7•IùÍ1”UrÎH_×)çœz­HK*MÂq]Ų¿ëùGè_åè ÞÁmxA^´¦;v®ì@‰쯛WÖ¾Q0‹Öê%OT8ãW{>[¼•ò‡=@g hªlq†·ß@O~zqº÷ýü­Ínž˜÷í_ô¢ƒ9ª5 íÈnm†   ¦>¿sÀɇ…‡Á®ól+ø/`Ü!® ,e7uªÆ…‰e¤H¯/Š&¡<€ Æ)¿üˆ5ùˆ5ù´6ùŽ5¹Ïš> stream xœ­Xy\S×¶>1sœÀéT°½'uª^ëXµZµƒÎ(ˆBDf„0 ˆÈL’•((a–12ª­ÚZ‡Ûjµµµu¸ÕZokï½ëÐͽïímûÚþñÞý=òO~dŸ½÷Zë[ßú¾#alú1‰D¶ØyÑŒé–o/;“­?Mèq±…ARdcxÅ¡} ý»}çÆò÷æÂPåâ°%á‘Ë¢–G¯ˆÙµ}U¬wÜçxŸ5¾ký\üÖnÚ¼sSÈ”ÝS§Ïxcæ¬ÙoΙûÃŒaÖ2.Ì8f3žYÏl`62WæÏÌ&fãÆlf1“™-ÌbÆYÂLe<'f³”™Î,cf0Ë™ÌLf%3‹YÅÌfV3ÎÌf cÇ({f(3ŒÎŒ`xæ%f$ãÀ82£˜94(ƆI`HVJN÷[Ð/«ß¿¥nÒlT6Wl—Û´ýL6_v“ueäÒ¸¿õék€rÀûßø—AS ÂÁkíX»({ûCì†D 9;Ô}¨~èwÃ4ÃJ†µûxøüá‰Ãÿ1"eÄ5>„ÿé%Ï—nŽ Uv=™`Æ—Í¢k‰DœÙ3‹O©HÖÆA$AJ8 è}ê‘ Q…ªv©£ 8{XÛ¢;mpJ]®91h¬™Mu‚­V§Ï“7£Ô•23yÙv®BfÒ -Ð 7À¤²,|“%gÄõü7¤Ø–¬‘Ù‰§À,JM’oP†sP&cñ"#ò2‘.Þì½6TÐ|/#ï¥Ú*؋𑓋Klµö‚®‰îx^]m=z)ú²(ýÔ|¢¨$c¦@ªV*X“¶C× Çà‚º¤o k'ªÒ[Äá&I5ŽÀµ8B*£’ÇÑ3¿!£?í 2žŒ{4‡£Ãƒ»øª@‰3ï¾¥IGSë¡Á yuûšŽ”v¨Í Î Ø pKLöñ‹òήÇ/É,þd–¼S¤=Szfð‡ /.E­NKÖmÚÚ²¹t8’ÍdAbH$¾NFâVœóGþ³$MŸ˜ñyq˜´z6,§Z¿î€óéÂCø¾5xâê§…íð9|él$¯év ¸J­®RN“H6™q´IÒ"¹KéFãC#™À/E'YMdãÞ&àpÀ·ðuœ0õ3b³mûžP?y;Kœ,™=%¯>¤‚TØÅõæü>ÃKÅc|EmCÅ1àN¶¬"öäU·Mîž;×ÄÊíz ´~÷ÍšÕWðN6ô,âzïOQˆ‹óñOÈb&W±?™+'cþ5œïÿ½—Bë×”{å®"…1©"=ƒ78‡.¦Iˉ¦žîëŽR©$nâ½enNa-Þ5.4‰²qï×Éä§ïb¿Ö#u&9qþ#h°„Á:aWP€Rœœ›Ï´ÇWOwš/Y¤ É{·w‘¹‹vRtÅþÆ´½/h4q»!”ó®Ûuøp±å}—J­Û#—G š‡22Q{Lwšh¥ú`>‹"NH7‰/×Iꯣóu)Þß¶Â!uWRZ¢¿O™ë Ü♣ݫ öÊë‚›R§´¤vïgÎe[¶šæËñBæ~>¸HH’  ) :(8(dgíÏËÖ·(Ûà-®}ÓµçwÖî)”Õû¼sû\÷A'×}M{è^öª MÐe–WÙe´hUIft5‹¬HÅ+8FÚ3ãø«f™Ÿj1ì Àž žz‡‡¾Æ.X‘ê1·ø†‡àsIý%\Óð]ËÅ®;wòÛá|åRJþük8b0ûkØ»v¬ßïF+¦ “ÉK$ŠÄ …?nùúX›ñ€üpDw˜¸ýPx€VbTz Ú—`˜ß-‘ÜAÙ³7‹£yl&ƒqƒB¦™£$²×29ÿK¤Š–öó˜o¾rA8£pe]w„nƒ v=å”ÊšÍX`Eå̰„7¦g8ÿÈ, PÍ áí€` ï) XðÈÌþî „ÙFýÊ^-ð­e‚͘AšÿèZðS´à’3?sž×ð/@âÊ.X³jµ¦´KÀïX²êø$²>R>©< Wší2:åØšYç4h–‹ ÝtôóNÃÀRÆS³Ì_5¼ég*ø[/û´÷þDÊzúo •~¾yŽÁ‰ôéòL£8Ä,—‰ùœ"}N7pf6Bª‰€`ðÔíÑZg G5IÐd&Ëg‘˜×Qo{Ö, Õ¬R‡@8¸é­‹.²8¶·ÿ¾ð|u8V@V~N‘…ðúöï±Ðýu†s–ý#éþ‘ ^ºÈ¾ý‚J“3æäixÀö¼Y¢qU+!B´!:Ëšãl±‡a¨A£JѨ]É4‡¹˜lÛf–)5ëèÐQ‚».ÔºÙ9GöȉÏJ.·Þc_ö;¸×áIÌ´Þ­öésË(ħ›Ð³§XH¿?¾a™.³±‹¼ð±"#Õ3<c«jëŒmuÑÅ­`ÌÊÕÕw¡Òc™ÜŸ%ãTd¤Œáæ> ¾ÑÝÚxÌ(ÄÀ¦‡B›¬¤ÊK’uIZ¹B…`äˆì4?{et¨wTköC‡‚J] Ÿã©¸PœÏ£ :WWé³nƒ£™ QkBÀ6‚Ÿµ‚l1”BE²*Ôé¤4;à…ÿ*•ì•p)ÎIµdÐ1É b˺»” Û¡Ú­ë¢XwPe§ì·p,í‘gôvgQÇ?Âû ³ ŸYî¬öÖÄAlÑ)ûJGgƒ¦(Ùi’Ã{ÈØ·Àb‹Ú‡^²–Š“0”¯0h«ïûS%6ð3fRTÿÛl‰£Žý`< ñêŒ=òÈ5ëbvÐl‡ž—×ÛhOꑽÜc c£¨xHIj둵X•×»ÿÊc2ƒÉB²œÈÉp²›ìÆqd .•TÃ(\'ÝüZðh ?·óšæ6|B•Òêë?9[i†³pÚ¿zYåjí*X 4ËB‡¯Þ´cÕ‚xÎÊSf’|‹Ì&E/*¾1OüA†ýokóõY É’ÓöÄC8çÕ[YSglm 8¶xÚ8o"^[§8Gžý±>ë¡Âs[±ó¹Nð¤Á`žáÑA†öß_Ã(™óù³œ>Þ•ý~æ§‹÷ù{2 kfÍg2X>“bQE¶špò·×MÕ*©¤ƒ[  g|Á‡ëÂKiñËróµÚÂ4SÜyªÜìÎ]ÿôdXcr¾¼ 7?k–F¥Î€d.¢8®²²øPyYB£w‚¿*ÐWØQ½%KIË2ØÅém¯C;GÉwîPùQ7–¤ÛÒ¼;}c¬çÜ¿ƒN8ã£î«8•Œ †òKá≓Ð}õ*8oÙ k–ÊOþÓöøm>A›ÇNð=~üÂQ(–¿°b–ú½[â±c³RÐà=Ù¯ùÄlÇz ÉaÆZÖš‡YÍCN9õ[‹×Q>ê4“ظ=«‚ä-¦Ô{ÑeÍÉ9êÎèâ-à͹ïZ6o t_M4!;ú…/XiõyZmi™P]}ÎóZZ3/}ô-¸{<¢A¾ý¬OÞ›F¥!07)+°0´‰ZƒŽò³7pB í›-販pznðœe<¾o Û‡^·DžA#ohôzy{ccû‹È-Â债4“š¬®w,ëº`ýž½z]ºP^D§|×^åïæïr)øÚ÷Î<Ä$ZËhiÄ…ç$xØ"ЇôÌá{oÊbH:%ÿ<ìgøG×Ù{‹ŒRÈ:ô?BýüÖÒ“Qâ­*<À07H9³ì… A7‹~ 7ïŸO r XdM”*„ÂÃV¿ì+2ÓTééjœÖ[q=û[\ÿ–ª—¢kª/Œ{@FBºcrì,ÅþPWny¿¡.íÓ:q–†¤fK­O)ä¬ïŸéƒçoäŒå:ÚíT¡®„,‘™jì‘QIí öZ”IÎge¥ñ¢œï ›_貃Ð@õ"y»wY(zØÞ±¸‚ÙTñøÀâç®às“ÿÕ*åÔ*p¬}®¡˜Î¬Oh¢þ«åE‹[Û¼G÷K›û¸… ±÷æëƒé6›½C×rÿOo`þW¬€_á¢'’|<(¥ XÄ?!'YÞÜ`žIb¦TÌÄ<ÞÔ›éÏZ2KñÙÓ¥rë÷ؤ%™Ä‡&I3å–·,¯{¼ð/ücÀtâGü¤NzH"ú¡?àž'ý§™×üôÇ™'Y¨‡£ðLßžûm·~T‚9>g^Ö6 ý VÀôL´Y›÷ì²¼«Pѧ›–H¨æ(Åz¤|] +V¡k9K†®b®7ŸØ(Ø&ýC°”û4õᘳ„Gç,ÕácMfÉ GŠ ]1‚WÈT‘iî{wg¤¾Iç:GɪÊ?¸TU^rèÔÑ’C'Œf8Éa¿·‰ÝèyËæ¸t$”×µ”žh ¯Èªjš²©!¿›ÿš÷†Ø¥ §ÉÉÄ=y/UQ±Žq¢åؤö::Âm›>F¹©YÒF¿K,0¿àog-|Ï&¿äSX!¯É©/3w,¸Ös{@Øæy·—¢-Ž»ýõçC¾$öÕÂÉ’§á!÷9õUƒÆ8/rRÔGšŽUÔ1¦´mÌj뛳(¹\Ô:¦ÁÆoyܶHµŸ:B¦x.nX´0M#JÌ¢ÍcƒeîÈq:Óü®ø–¬BcPî΀„d\è]i« ö[2Ébm,"³.B³5“ïRÝ&®ç©sòê‹ítÄq¦8¤ Dò  çZÜóXQËeæh¨ËÃWn£M¥A¿W¥†ŒTùÖØ­©Þ°<‹”é:•N\$î–“Vv$–ågé ÙB~QmÛUà.Á¢i ðL ‘'úøïÞIë¼ýPd»YWZ\—×È·”5 º§Ó|¾ôô6ŽÂ¡oÝ!c·kB<­ÀlÇg¦ZaIúŸPŽr).myȆnCÇA´»•_NýryÚ¸0ÕÎXJüeÊfSUYÛéàk“Þ ï­'+€ìG”<¾Žãq”Ë·{˜ð1yÀALuruªº©ï88ðÓ#ưY‚>œB7ŠRˆ»*$Ã/"ÊÏ ß$£˜ƒCiŽp–Å „‰ù¼|ÐQ9^EBÙ$ðÒ†ë=³Õ¹ªœB@ ”œ³²› ‹²soÁ~šÇQ²¥ÓT™d(YÄÍqÉ0™³/ ],ÜÈÁ™—­‡´‹ŸÓ`¿Â̯¤øDdù÷›j({pßéO¯ykò†òõç|÷µž0#¶×"‹Âõ‡4Ò·;J2óŸd9M\WgÅ™›µ®×ùÇ&¤ßDþË›"¢ÁÍqŦ€åoyŸýäNGKñþ¾L—¡}§ ÿaÉô+8_‘â'x˜×„C¸:{8³ZU CµÛ§rª²Œ‡êS/Z˜¡UåÄÓÀ¦&ÏX&Ø læ” êÏ0ÿ ~Tãendstream endobj 320 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 8522 >> stream xœµzXT×îæQ&'‚š3¶Ø{7ÑXcWTì"H‘"u¤ÃPffÍ ½—a`(H±kl1Q£Æ[lטMÌ>¸¹¹oÏÆûnÉ}÷»Ïù>>Ξ³×Zÿú×ÿï3Ê¢ %DóW¬™0Þô¿þ®XûFÞg =„ÐÃ"­ÿ¨GÖè^o4æ½³½¨.äÊ®v¾nΫM-¡EY¥:Üc°‘Þh(Óª3ÕÉAÔÇè(£NÒÈ!ÉÖ×ü9ºV{ öÂ>8¨jRš–„Ñv•&«f°žÏd‘%¾a‰¥"+¾Œ¼e“Toº\Dnûˆ .è9‹‹iG,&T£>¢)‡2rï=í÷^,ƒ´œþåò‰óç²Ö­ápÔ¿¼v‚j7ƒìi¸FŸÚ¨/Ýj˜ÛüV0âë¿Òx ÌÒ‡¸”ž°YbÕº‘då‚Õ­‘ä:õ|Þ‹>bò]X¤¢«5iÍê%z^3uý²ÍÓ°Pò8Ž}^tñ<\eŒûÀµuñ)]×È<$m¥´øW¥ìS.ȇï/B¶þåÁŒkX˜-Yo²¸·ÈA™V-AehýSÓØÅ 6Ì"±â2?^/¨¸ƒvßòÞÈ‹Eï{{âÞ#±÷Æì/£PWÔóç‘˜ÃØÝNÅþ÷è…z8Vº¿ü`sqì‡}Ò²meÛ`5¸¬“:IvnÆ\ùVQ€îÞò¿ñwÙº3_$Wc¤}”®*ðÕš5Éžݘ"“G'É%x.ÆKÐç ™ ÛbCòÎH{(æÁvp†­jgiA!ý5¤Ëò;~fƒ7Ðñ‹jµ—ÉÞêá2Ô*Le ¦?UdÖKÐ:ÚªÕ]jlc´>ñŠ÷® 4ô?=ž±ØMÅÐJ4íG$AL&¤EË@«ââñà3°˜•xãtD"þµ  {Î ÁÃñx¨ŽÓD"˜|ÐKP,]i²¤$ˆ‰ç6Íu¯ÝxxØŠŸb;<OÂÛHº[ðz#­çm› Ö¯/¢ü;}ÄÏѾ{ÆCLȸœ&w’ˆo·yvBlºESåpDYó.gÒtJDÓºoAÝù±H<ò'l!?ŸÎN!RF‹óXDW9Ty˜+g&aKÜó³is]Ýtþ’¨bPƒŽ±âìá7 .ø!¿¿µ?›–Eþ”ÆäË!„k[+’a©¥A”†,wßFÓÉÍ&ˆÚlÛp‚ TožÅï,*@:Ë@Q<–„ŽÁ+€ÁÓEV­ÉeçîóâšÖÙ¬O)݉ÕN¤v¡™ˆCÃÑ*´÷Cðl ~ÿ¯X3Xù½í-ðBt.J,Ø·*c Ì, µóØâ¿fõP/¾´#§# !:ë=wP>ééÛ|¯3,^ôïsÈŸ”Ònã-ÅÏ+¢]Î}Ò3ƒFâpïçCC*p ÷é+-òañ@¼Ãü¤AÑQÞþÀÌ[ùѨû¥;Wo5M^Oºx&éâÓFT½G€ú?D› h”Ak¢þ7ØÎ8¯ŠšPIQ <Ï!)²íhHÝ#úy–÷T‰ЦÈ|FrP‚¨½õYåkó±„1ˆüÔ²ítg¿[ñý:‰ìõ…×÷ûˆ¯£OÐ=u¡±Ä„#3¿ˆüAkõêfM4@34˜{b=W™U+Ac <ü6M* §P…Ä@ãW©/4dWUùTnãÄ×çÇâ¨m¤‘0›è`{˜k h0ÌÍÖ™lÛÁ·4Tü¶ k´7H 6ÂãŽ&ô¢GCN©îÓ?dv<-Ök7¥vÜC„F‚Á>s$`+ž“éù©EÖÕ†ÀÏQ˜!øó>â‡h;ZÏ꓾‹€ùÌFO×YÓhF6©QšÄÈäŒäL‚ŠL©þ²àwȸ/DÇÑpvÜ&ûõòEpʃkÉÞ]Vë§ó“îˆrš{ÙîáË+—®eIÔéäv{~ݘ çÈ”n*2'›9ðZúGˆö$ôèÞV¡I÷ý =d sjLaIIXRØ ŽšÐö°jUú0ò»”‰Ñãq†ÍpT•”­2)€òzБ{ìTmT/ì쌪ò•™Ñ)ò,ß´Ð)8Ñf4Ò%f“%w®hÏC¬W{´¯8 ÙŠlod³I‰ÒÆ›ò IMÉýÕÚ¼ÄuÉ2Ó[¶9 mÏB¦7"o}Ù£ Oê­›®oy‚¦]¼B„ƒïQtÿ[6ٵܹ˜â3¹·%'íéÕr™WÒ9ÐÐõuÛë7åofÆB‡%¾ºð²ŠB]YvbõVµ¤¼æPÑA‡NnŸ(q§ÅûŽnT|¦Xî;gGàpb>~xŽ#7¹Šš™4Fž¿Ãq³KÍáãMwÐŒTÒŠ…€ÂOcÑb47=ýè‘Ûj[# ð&ð€5°Ã\ºA• qŸ€/àtùÐóiÄ ½§ÆeDCŒ­ïv"ˆ86+òØÛ¡Èéu°+Ùw¿id›F—QÍ«…¨Ű»÷î=TZf¬mÌk6Á"@é« E^¯‰n‡E±2=šø„ˆØèDùŠY6³~Ž3ÉÂT[ÈËËЙW¨–(#`8¨ýÛ‹| Ôªf»Ÿ0k3wß²z•rØúÐe£º *¡IYÚ®¤Ú„È}eõ™o¾f“.ˉ à‹‹‹ô7¹Ýê=‘ õªÝ„i‚+3¹LÖ#÷¦NId\O]êÐqfBi{j ÕêôÜòfF|(8¯iÓÕ~bmÄOXD˜ylsˆ#b-ŸE.4:+¿þÄñôR8µ¥ne.Ú5àîJ»Ž»|ܼÖ#x”Õý¦…B2s, »êx®‹ï™2ÊbE²¢ˆ|_@p×®3øÝôÈ}ÆUEŠñä#·%õ/*ÍŠL²ÊY_x——:º+ä'‘¶-;wDoÙ( 4ÒöúUùáÄøÄª”‰1C±Ê ‘>)S™Lº°¬Џö…¿ËFŸ“Xs=ÑÈ6Mbt”—Fˆ¢)Òè˜NööŽkì_ÁϨ¢ÈÖ‰l\¶JJnô©O¥(tª‹úûÏ?߯̓„<.&),™àüòÊ<]i{õúϦ®Èaz¦ÏM|ýÏ<Ä5$2Òí6ho:g2}Qó+! AY4K„z êö‹‡?”à×ïYüÉhQ •´›À‡"4 nvÒÈ“L'Ø¥­9u·A‰uEKÈ54ïZM‹Ó©>âßd(Ía'Ë’ruUžA’ž]RÑÌì¯òŽ ”ÄÇû¹ 1ÛYH†’]½_£¢˜§/^¯‹¨ ,‘Ôî©M)4=€V%KJ”A ž“ŸY”VRSµ=ÄIîâÌ9W;k¥ÀŒ]¸ðS'½›a—$:2Ì <q+>9nÆ»pïmàÊÌÿi-²BÝ~9r£&êÈÆ n}ÅjXIør$¨½“w´É|‘ºðÈу{ëök%_ˆî# ˜²nÁ¦D?âõ¯.Ð{z¾Ò`}â1»hXã«Æ>â¿QèZ>»N‡-® gPpûÉsÔ NŸƒœÅIOx^ÌFŸm.þ9¡j¥Ëdw„z'‘oŒ?˜WY\]Lï¼:KÒÊ¢nGï¡÷bþvŽhqK5©•œøwÙè¼ÜqOû½,B¶G2!K§ŠUIäëœ}·î :Ï´Ÿµ™ùDÐ.Â…¨Ë·ìÿ¿c¶iàdîÆ Wï”Uk°TO€yTZŒÖˆ»TÈrö«Êúˆß `ôšEÙ4”@¶^›©©0EþhaÐÇx¸äE4û¼ôƸÁü€Ewñ0õOÐû7u £TDÓâ¿æX³c¯øÀbX ®nÇ廹Qð]E*dEË .¤ngX l†ø#QO£oÙ„ß\½¶ˆØŒÝf›z¾¶¾öú³ }ÄOÑgüJ0øe†œ0°¾3MUêcšR£5iZ’”R/ÿœÌïe«‹ý|¤þþúc…¡¸ŠŒÒ©±u|…õ‰²€óèëóëIB® —¼ ‹ß7®ÈÛ~ NÙ^:væúD§;¦rj9ȳ:|1Új6ž *ULç¹Æ·Ò¥q"1žÂi“N?:$+éHÌ¥pFü´Èœ×'-¿.½`*¦»)/£My¹ža_hǸ)˃!'€ËKÖåB9S¦óó ØpÂïäµ³çAÐ:Ö¢2¨tçΠ ;Kƒ*+KK+‰®v'YŸQŒæÖ j (ý!J7[ǶÎ`Û(Ô| –‹¦äU£QPH`™©,N3; jçê#jÔ¾„ƒäõ ͘p¥AîÓX'Ë-LÎ"€."y^¹xŠù´£]£€&Áë Â×o¥šn%ì†UƒòFyEŸ?~}`“=„Ù††'…’ë*Ô_hЉ¸­TU¶_çRp;ŠÅh‘J“˜JcòåÓ™éçŽ4iŠM¢y§"üˆ=X¡‰jŸxEªLDCHT ±±• ü÷GÓ§]i¶MJ3‚ŃF~2Úw3 5ßûOÝDOD &èGO‘¨€Óê t…®†¦Aì¡Ú¢Üž°EãñÎ 67B«€0‰ð‚}Öº”}WgÐo“uTc™ÿuï°ŠiçÅ­‰”mÝ"lðß±¹uå•_«;ΠM^ãít‹ª8€è¼x…\)ÇËÚvÙ`{^#Ï5[ëÊ“`0C/$¡mm;üÒ$fÝPdÛ›´ÐE€&%%—Ìó«ÿ´Á|qèùÀáÅšQæf9¾}Pqû탊Cí¥“ÝžÀÂ7AÂ7I Oîˬ7™_¥ŠøqXÖ™À¯T:/¹"ž$pd›‹ žÌG«’•©ÄÅï>eæ.2£c›Æ¥}ÍI(†lýg6XÒ6Oáe*-‘hÚyý5HNÌvGm­6Ú8mlú¤%ç |Š ú¸-µýMÛöwM$е oF7P·!JÇZ¶aä›ÑÏð‡¿GÝR8½h@uÞ²AÈAá&—[:](DIpªåBQH„Fq¸WÓQ¡Bj.Aä7Q„êtP”K8UÁ_x!xõâé !râÿÆ–†”ùúI¥~~ziÅn]Y)‡Ö}P!Õûù†évWèõ\Ç3)£Ï>D?¬5ç¢åOL”vÝᇰÓ¢]$¸äß#œÒØ:.BEþ­øŽJ©„¨~¸²ýt¿áìîÚØ`碔ûAãU]TR’¿û‹µMsÇââ¿“yÿüè:mJäºâ]¨e`±õ}ÔUª7 a'5{>¨ÑÕ3Ì?  À¿6?;5=“Óh,ÕjUÔ2¿•N¹œì*‰IHIJɾyÑo#FI:™9^ís^L¢ý³]ð¿ÓrKñí$P©“4Þ¹.„x™AX`?XòŸF<Œ,ö"©yúíÁ«åWÁÅ%EÄB„¥GÈß…Kw—I­q,þÐõùï£Gº¢EVè>²;%ÈB…BT‰ìØS¸p5ñ Dä5ðIB> ¥³ú¶$©)[ðZpÿµ­=ÊþãŒ! V[üÆ^'8Á÷òK[Ø”ÂÑ&+.C•«àð_~Ÿ—Mf¤ÒV–—•›œ™©5-”ê[mô‚ú;(ýŽß†®°p+ñ–çÕ­?ÌÈw;˜à2Ö{aâ,øæ$Ohœ½ïã+!‡à|[¼ÿYÕÅ”p“Áø »VÂ~†'𜇠éÇŠP÷kâŸ.„ Ï´‡Ù°œPø"Ù²ÈÑk|MO[¤puÆý:ò¿*DÍ­ï³¥  æV,›ÞÀøÐ:òQ-eL[iÚ˜ œ·IÛ? –¡th{oz‚"«ŽPJ¤9eo"„o"L)û=BjÊ#\G¿þú³Ñúð¯Èéq»Õ_g_¿Bãwú¼°E¢ ·°¶œ;n¢ýaÐÔd7–…•yÆ+A¥àJ¾žñɦ™kWHðZì%3yß [ñ¯<-ª@óˆñMôMpŠLˆ_BF¤)(iÅeD4x êöóYƒu²˜ú}òdö«>bL¡}èWö»òS—HÚ¾›tbÔÐÙv³½ô¡•F¾ò°DrÆCrji<.£\à$ñݲC¨ŠW) ^• ‚XFÜ&“¥A·_ô]Ý¢¸ÿÂNÛæ|á)©I4êa/Së¯Ûäå4þé$@V¿äÈ­á»å7·gõ5òÎ$íÍ×IÚÑ v²I~Îåv™¤F ‘˜Mp´Cjt¸²/ŒÃø-¤ ©q Ó|ºm¡td ðWˆ‚ïjiuù-×£>w¬ÿ´ÅˆB ůC[YÔ}Æ LmÙáéÁ¡ ´N^PEôØ{—ç¤*RTJP$JBâ6‡,WØšæP¯U’L3±!Á‡è0ˆÏMÓhS“¹¬üºc·¡ [ÓÃòÝ5Ûa#~Cf¼]sÐv÷€­dð¬9þ…–Ð>јÕ†ÒÒpƒW”gÂÖ)G" ‰ø7ļü‰M¾—˜èµu5ê!Õ_5ýè#þáï³§ƒlëä!]nf*Q­µZ Œ¢’¼æ/_!‰‰!;T˜¹(ë稇Öðß,3gŽTrçqÌ#´ýÉÒG}ÄmÈÝfKi"ý{Õ)m’ïÐs‘®àèS©ÏßQÚéóéø}ܳ8@"~ƒuˆYð Ÿý…¬Oü 9qÛ"pß²…AÝñ5v9øž–ç…5Ã-(dnT½x”V¹BwØH¬ÂØîák·•H\3ˆ‹ù/¯ Nž=òþük6Ãt^Ìà$¶eNR(òÂ.D¼jó3Îåd§¥Þ€ROW‘,b&H½&à~^X‘:<2¹#4×O?–\’{ÿÙöNA_D$æÃÁûQÙþàæ'ØÓø­ìÖÝI{ €9óeÓå+ç7Ì_³}‹'§d¿©;Ô˜'OŽöñܱaÇ&.+¦.Ð@ «Õ;§Øï;ðåâ_‘å³;¿Õʸ5q¡Å™Ë˜aÎ,zoVEh³/¬´;gÃŒ)óÏÞ9Uuþ^s{úkŸÜ-ᇘҿô ÚþÈ[!ºÌzÓ8zðÖÕáë4{ü¹ý镯 òëà|ÞêIßø'fÓ–y®œÏÅ©ÀÄ¢GLLŸ¡BÝc¨3˜]êå¬)ã²4E™PÊè#rCÂCc]ëw³+uÜWSX^f"µÿf™oÓ¡I~:%Dwqk$á¸7²öF«4o/š‡ÞC½s:>$I+°‹Wqq—,$~bVîy[žÆïÝ_š Lj²6@Ä^ß.F­·>ñ¤øõRÃó;d¼¢¤aY?¿\SV®+®…&æ”ëu<S+-ÝQUbz^vÀ¢8ãÑSyM„®°hûªˆ°°0É’ùÒq°’q$ƒFž…œø¯ xÊ?ÓZq¡XÒ²pC†z®E–vè½)_Æ#OJ’U&ùqøI< °àÙîxî…­£Û /ONJÍ|ô5ÁA]²‘î2¦“‚/ùßõ‚¯®V¢M•W_ [g ©,‘IÁqA‰ŠøÕÀ|"Ê9aüüÀ…æË‰Œ@ÜYàYx¶Å"üáÌÆ…Í ¸ýKª¶äØ“kWƒ"=1/¾$±,Ñ@8»OµD“ÍçÊ—|¾>/ÙuÊï&\cÐdÔåëÇ?¼žÇÏñŸ½séì=2»´P¡I"ó)Æ/%¼EÉ+£àÄ 4ùúø…Œú±hÙþSø~Á¢Çxt6sâ§CãÀ…¼Æv|)¡š~fzŠkOã1‘ÆÁp‚„шHÆÞtÖ1šó2=Fòzú÷ßô˜~“ŃŠÂ^ÂYhÉDh¤Ž¹£fQ1ŽK»ÈxTX§E6›Ÿ'Õf Ãÿ‘ñ¿ü? íŠÍIñ°%ÉV|—úO Ф5®‘a\p³n0cpWâ†þS<þ¯±¤Ãj¼xPÿEiM¢_9·]! ÍŽ(L$­«Ý¥è$r÷ Oùµ¦/dÐF²yFôðóæÿôˆ¾Ösý*'·¤Dng}Š?0£1³SÿñæÝÿôTŸØÞ×,²¤Ÿ×ô塊PPs*:\b:3’«ËãÑ1 ¦i+®›Å4]®õ@š¶3endstream endobj 321 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 6501 >> stream xœµy XSgºÒ”xl-­Ò(ÑÞsªµV[uÔ.ÚZ[­¥nµ¸+j•] ì;„ld;É{²o¶K7´VÅ¥.ÓÅVÛji§Î¹w¦Ë|‰‡yžÿÀ©3Ó¹×Þ{ÿòøÀœÃùÞ÷·½ïár|€ÃårÇ,_»iÕüy¡/ŸÈeãa0Žã´>1íΔ;q=ö'ôoѲÌ7²–g¿•ûVÞŠü•« WÅ­)Ž»$amiâ;IÑÉëö¤lHݸI”¾%c«lÜì9sçÍ/[ðÂ+Î4N4ç)Î:ÎtÎzÎÓœœœg8›839›9³8[8Ïr¶ržãlã¼Á™Í‰á,çÌálç¼É™ËÙÁ‰âüŠóggg>g%gçyÎjÎ œ9osÖrrÞá,â(9œG8áœG9ã98:NGÀ™ÂyŽó ÇËÝÈýþUœâ‰y|ðƒ°Uü¾cL̘‹ÄÀØ´±·ÚñЧW{í‘—ù>\ñè#<6í±¡ñÝDOG"þðx¹ BÐ31mÒÊI_D¶ s&¯ž|dÊê'–þÛìK “R²—ü˜ZBõS7ž¬›?µ)@‡ÅàGÛý•uÜÓÁGxh ľ«MÑÉõúRÐ4Sî$¡Åè?´Ñ­ptÜSæ ¨u*AÉPjfßnvÙ«?D"Q1ßɾV”'‹•Ë Xæ#Ù ÝÆ.Ø ûèŽÐÅÔT(A¼•¶ùtTl¯÷êbŠA«Rˉt–'¨g¯‡±IüðÀi¹70ÇËE{ÑÚA*EËf¯ÅcöV¦º¥²¡ï“«@4ì*\»›}X’@ISér($v7ï9ümšev(A%]™–TfgÏZD¡Þ]ÙèðÙ¨ºã=èA8E|±É»ýåÑÅRRu$µ5ö@Q¾8£8Aºt„Âæ †ñšÉš+µGü@ø S.¦‹µbj){\§Ök€Êmeu®jk­• nÄåüÀ‹öû'nþ6ý7“"¾ Cû>+´âßp¬Héµ§Y>ÅFò‘¨\€Â›>þ >%>\wžå’ìy¾XŽ gƒZ”Ò ÓJ(vÊPwXÄŸR•jåŠ)Ã?¶C…¾ä£ih óý—+û—Û)öIöš€}ŒŸF«Åè`,ÔÔ†–ò¿µ¼˜óìâ§Ø¨ð€m´¢\A®+¼€ù¿_wùö1VÀNcgL?·öKôà?C²¶N˜ìÿð¨ñÚNPƒ‡[÷q¤y÷ºØ‚é¢h*sÓ¶]Ûî.ðçø¹èÙ[¼à±àcb,`" z›B§y9ùö²ØËë«b@ÈÆ³"¶€•±‹þ2‘h+⢴ÃhÔ*PÒÔ6v ;~Í" >M;Ûˆæ_GP—~÷»þOøqßs/ähD@S¡¶8Lµ‰Ä'b·øÑìáS¢/ò¾f(Ó•t¢ž(ÇT™5Vª ÷Ú ƒö#Ã×¥É6°8,VŸ©ŠÚ‡fXKã@ˆŸça4ÍúKô‘èí±{R3)Ù©]®Üû@…Ī,êt¾ÀÖqöÚ{@ô96ˆâ²Ù‡3×P²Üبm@¬WÜ´£ÁF…µàrýÜàÊ/yhéGe*n-?õö ­GQ**@O½ògV@±~×&Ú÷Z ¬?ð?¿¶‹´.“~éjû†7ã1ý(„_ÁÈ<ãGíÜàêN^ðu$0£Â)³=¿œ\Ï’)«Ùç`Ãï¹ö2j»Å¿dg«ôeÊL t:5ЄÌ§ÑPc"ßCÙ¡cAè’V t¡š,~³¸à ^á"½­=ôB4ž8ofë¤t¥×èËCwq‚©’1ÕÈzeô©¡„C{ø¥ (‘Z¡î(Ëõ Ò–Qìä¡ö‘2 ÿV¦@¿Æê&Â/Ë=èò57FÁ¬<äD=ÆØ‡_š¡N’"§E !¤öb_¿rÿÀjgÚ[{b ĤüxbmÚÿõ­Ö›³þI\ÛC~ÔkûZ']FŸ ¾ä_°—e—jÅø"J¯=*Fm­y¸`&Æ fa¨öR=]¢!KÞçbà/áŸG®»Ëÿ‹7ý­<º˜Î§ôâ°Ÿ­ÕŽû¨•ŒT7ô»¡Ñ}!Ø]˜å ¦ü7ÀVaƒÉõa¶Üûþu4÷ÒQï„«7U'Ñ„÷w~4)â#8¨èSÀfB”½xzÁzó11yÌ ½` j¥…ùY¥»Þ9µç×èÙ.ÞkIM¼Œø1˲Çv`rƒçÒõ¦NE¢“i•Ù  ä–Ò†jŸë@üþUìØøç¶’wൠ’f5Ö’Ê?’øOÊÉJ”$°Ô¨pBMk.§Ò!73vcCGQJ¡Hî7eãW@µ¾Bï¡mz€tʬ‚:yFVlâ&Y³_þ O5 ç?G¼ÿJU‘vD„ÿ•„½øýT4íwõw¶Pö<ï'XÁ`1:‰pt«¬Ð–×a59ÃCé‚›üjçhÅó;ÑLS%pÿ=¥fE®¢T"û|ØKüһоÎ_ÎvëµZ1H„1í :6jg2З(É’ K7‘ UV¨²6S#N~Ö:üܳA?/(¶6›í;Ô8U•ñt"$@¼11T8µôåVÆ6ñÏ õÈ¥Âû¿4dRÿˆ¹.è1î‡nè¡÷‡.²†¬_EëÁf®’ºˆÃP[÷ .eÀ:ŒÕöQ/ûÕ š?È 8S f•‘p*ÝEªL¥Ž|ƒíÓ”€®\*,ÎÊÌØ Ä®òƒíƒí_7^¢ls%ÔGÓü±sSØ™êÖ*M¤ÍßôûS@8tNž"[’HåDå,ƒ­Ä §2Oïhmm k6P×B'xÝþš.×)Ì‚áÞèõjR²±øíT r ©r´7ÁˆÜ¿XÇ L Ü‹ý2nŒc؉´TÄÑñ±ÆøÐiq“‡ÝÑ‚««Ãú@±O õ(ÓÊ•+AXX%r«Î:Ìèèt÷p‰B¶¦„¢µ`u2`.Qìh‰æ VžDóðŠÀ8Á^™?3?7?_jUYTdCŽQ„#SÖNÖÄŽÙ,?Ú~ªíVÓ‡¤¹Êâ¾ÏꤿT ;‰yç3O£WûÑ_}Ø•½Þ  HüÜÛßñ8Y›­¶> ܘvµQIaÉ¥Saì4î=|È«‡qïâWæ¹ÎâpYˆÃZ K %G/ݵ —(¥QÙcÐ0Ùˆ.oG£¯¨¢^¯­8‘pþöï>ùÆ‹™0{ôÔóÑÜA$ÂÇ~ ½*Peé ABÈÓm‰‚+Nø>öAZkÍ÷{Ìö ÈlHµ¥úòÚ²úèj)öV´s'wgŸ†3ÐÛÓúëÖKûÑ\8Nü\!A4­4¾`«(þ] bÄ=½v¦ÁyêE¯Z¼Žn,6!Øfª²1lG‚Ú”:wâ\B¯NŠøýù´À¹½³pï}ãîj¼GfDªTžŒè;P,ªMayìCìlvÆKýQŸPŸîƒƒžî#Ä dìd×+Ek_ÙÂÊÓ­GýhlM/Õ8Ø´GíÆ­q:½F§ÆU¶`úl°‹ / ÐÛhN}í¥÷?°mŒƒjª5«ÈBÈ¥ó!’Œ©w…Cn×›©J½KƒEº¤´L,e`û#¯¡ÿ‹ŽÅÖf®¡ö¢éˆÂ±™m³”[¹(I/ Q.Ä*rì7vB+žuZ†}ÒfrcÕ.§ö02«ÌƒF‚Ü[yå\¯ôrß@Goð™g HÆãÒÝ|ql½+sSÚöbYpf»-ˆg6³“ò‡ÑL•™júj`7-µù y²íåÉ”<^¯•¿+^]¸8hȃÒÊV›½Ü„¯Ð"ÍÍ-LKìÍï?ÜÞÕZMÖm8"÷ñÝ14¹n´ûb •ýbLbEÙ®ç[åìuŸµµW4½wŒÐUâ®  ä^.ó>ʾÈC'ƒó#é“Vèõ49S)ÓËõP,”; ò°A¥ i)èÉ™C”žVÄC’païŠÐƒh,z=ÊÊI©…¢T’ ä¦y>9hßÛ‚ø”µ ®ŸËÃáÁ1£ œæ&¾Ø÷ZŸáñJ9F,ô»˜õqÆ„ûCÀp,\Ê÷âXh©¾ŠD‘À˜œ8äâdJËôt±Š-ˆe v^(›Þ“%lœûøõ~o`zÜ;á?nfºONŠø}‚$öåe»c€Ø!é238n2”ßìj‚*Â#v–ÈÕ·ö¤¾÷ý7ßýG#ùÇÉX ؃„÷zС°ˆÏT%D¯›ò¿µªúÁßPQ1ÒKÔu­odš‹»É ¬<"ˆQãjé±S\ßÔàí:Û¹æîp7óôªß Ç¿üO·YéTjõz…’\6g¶ˆÍ¹}§Ž7ýà;Lí»ptoà>;ž¿óøHƒ>¸ó*ïÎø¿x†/¥ØÇù®òöÛ¡³‘§t^¯þÜûøK²1I¶K'TÊõ%åÚJµB‡ÑÒÑÑÑ “Õ¥bdÔƒÎnŒ[‹© Í‘ pè;C)¨Ü ¬³Õâ Œ rñ·Ù©CçþQ:1‡» Ýù¯ÂÃlùy§¹¼Îó~3x(ˆz§O¾ ö¢ª1§dj2mƒT#†P8Š}²ªÒzËbç•[u6cp™)›·¥±ê FéU´—á e¥år±˜]Ä.ŒÜ¸iÇ2œvv<'1«‰¬ô4õulòšÜª"w1ž,þpMlpŒ`Sª¥Jbv¤ÆBˆ­EöŒ ™©4 Þjó467wç÷aþû/pâðw=+-óg7ÅáÝ#ÿûzŽtS÷o-nKµ®OÙï,ÞÄ?™„ êõ5Y±#ŠÖku¡T/¾k8™æ>T?ila£‹©ôi8Ò‹ Õ˜ê˜fÄ%ìÔ€ÎV„³\§Ò)Ù†ÖD²Ñju•žÁ£;þo ³ž¥t´‚b#‡š$›’¥ïþR ®H$ºm.ct• Ä£c®eY¶-Æ4ã_Fš.R"± .×^kƒ `%ú“÷¯™Æ>5ƒšø¥ßÿ<ÅV˽¿ËÑgm¢,^Á´è;£ˆîúìÔ©ìˆíÔZà .ôqÑ̯I7yÁ–À:ZôôØ l›À*Ø2ö¹ÛO¢'ÐN”Œ šzfèmÁ¼_ ‡û+Ñ“WP8uüö7Ýâ³£ÙÉÏïa…[æQ«YÞ‚uK±Ô¢Ólœ·öz Øæ|æñrso ³xLnÅ“ÉüjÇ(—ð+?6Ö¸¯Ø:Ç÷áFztâ $Šˆ‚EUccÝÞñõñ³rÙ‰©ØÙÙÂV…ZP„Qæ¦Îòß+[«ˆW •©Ò­{@ÅF¹»Õ\Ùu„·Ä!ÎÍ-JÙu °óû”¼/´. -%º|蜟œøºˆ. Яøhšòg4öÆŠó,—bÿ‹Ø/6°ŽôÝÝÄ}Å¿r,gMQÙ¬¤ÝTJbLq4ì>î6.“ëÓÛÞ@¸g¯ã7²/MŠø}s\P¸P£.ÌJ&ìLÀ¹T éí¬³¹<8çT”׿”¤K:Knüpõƒ«µ$âV›Ü8z™†£—,_™©%#~ß"ÉnŽÂ>ÈŽag²S_:´G¯ŽÁ—‹¸€*»ØÕJÑ;KpôÚ¬»ê<ÜŠÆzQ'öíâŒ}¡’ú;¸y²fð—ù7;sè–^§/Pn/«õTT;¤É݆&Z›G«ö}Œ¡¦­®íÀ¹+ÿ‹Õ•))é¸Ï^Üéã^4àçJ·ãëáã© Õf°â6¨âüaŽZ;wóf*#3&3ø(Z"@Óö]º×^¿ÌŽ'Ùêû†Chí¼ÿ_Ã᪠C~ï€x¹„.—l¤ä;µ(&– ŠÎÝhýÑfÕY¤R½FF“¥3Dé@dÐMm>cƒÑGëÍÁ'Sz·Õ„¸5ÒáçÐã'½÷ŤˆºžÜS¥Ììçp•”¦ÉWÑ^ÑIG¯;Zp×j…ï%5&ÏIegi~Jövß·Ã @“Ÿ'ÏTS‹¤i2ü!”$i1߈ĆøÛíèYñ[M _ÐXd“feåÖ)+ëšk[ðqºÀœ×ÌE;.¡Ý—xÁ‡þ~-«Ñëåå䮉ßdа”>Êrñÿ1[½[NeSÚ«vkH"Êltu‰Sâ,€=ļ%Ï,Xºtß÷õæ&£“cÀs–c¯Ô…N¸•MÒ©ô4ŽÓj³Öbe étùڛϗ YÍC“Ñ„UON•ë—g7›ÓíCª»´ zˆ/>ýíÍÛÍ+–åèUÚR/ »gÍÜ þÛÉ Ê;y#x‚!ÇÆØ MP Ä÷õ’µx€T¬£trœèu„4TJƒ±ÆH¶¢Œ ̘ˆ¡Ý 9äàáÀ¢.YÇ®ˆÄ.úe¹ Œ&¤ÁÀ0ß!4µÙƒ@tðëA«RéÊñ¼ÃNê(O)WøO.ªÿîãïxxð q]NZVzV¡»¨©­ÅßL¢¨‰­ùÎQffF^MiCs›¯=¤Âñë£áuðS×[†ƒ×·gŽãèÕ‚£ÚyÏ–7Ä€NO£ßMþ÷›«èýá1Ó tª ”d•üšoëþ½â a3¢sw—Câè>"ÄÙO؇ —%–%²S³É’9a«Ú4Ÿå¿ÅÛåÛeÛ…ªé–=á¤5ÿÛv,ÞûõA4=ÝL†âäfO!˜ê™€Â/Çr¼ínAÙk1kv&ÈðSƒ– *k z=‚n—øD ¹)™¢z…§®Êi2ã[QôIîWÈÂCï£hA-Öh£Ë`±x±C K5»Kµ´RZSzucƒšÐ_†¹øory(÷œÀ+öd‰Ò2ÒrjJ}þöÖÐâ¬ÀŒÄT{]8Ç T‚n šÈ –/ÞÉò£ÉíscÙå°€H–B“?¸Ýƒž¾@ö|Û…Àç;0¤ˆbë>ûâtxï‡Tç''¼@ôìËX³,ŠÝ;›*سm+„ )˸uÜÀ® à®äÙÔ&µZ¯/“‘¯MReÒšì•Ø,Lî#–½‡Hö¡¡L¥H©XBåHð¤A€>ã8ýôÁ¿­r ¦”|)”2R¦À¨f°X vÆ„Çr+²á>|gïŽÙn8‡m篈_¦ùpUâŠ54â¢ýø—_nEã¼è©_óÐìG¿ú¨Ç_ƒæÞúæ, Æ,9±„ù2»ku‘%¶ïZ/×US¾WªÖhÔØ i‘™¤ #2“§¾#S˜›/|‚Ö¡ðžCًا_X8s¹+;./nkÔByÆ\HG\Ž–¶æš}­xðj.j,õ•ø ºq ÓÞpÞ][i)I|âÊ•‰6±i-Ñžr<íéôÄð›]z¾ÕÓ¸ZÇ=xƒ˜¶ †cµ‘ðå1t†:§TKÒŠ$öY½ú _4Ó‘ì,…aÜHõ¨O¡«À¡ ž©ÎÏV‘iìøÝì¸7YâUöñQZZE‹JALh­R›Åí²9I»Ín= 5x¦ß+¯Ð4(¼ÊdvV¤6´(Ô…ÕGm…³ÞBv¢½ ,˜n*ªÌj~fÐ*¨Uî°ýjAÃÃ`EY?†8Âe¹h ‹?OŠøÓÏ"7bðsô™ ^\—•š.Jϯ*jîð·ùÉpò¡ÖËáü?Ÿëendstream endobj 322 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 940 >> stream xœ‘qL[UÆïëk_Ÿvˆ¾¤]LŸ0Lº D‘)c²¶Ò±Ñ˜hPËœfÃ-Æ% Dš±D rk2„NÖ×áÆ˜4R¦Ã­cuk7©(R (kŒŒmwîóÒèãœÎïŸïœï; R«Ã0ÚmUUæ¼µQ‹äã²KƒÓXœ¦îø×ž1ûÐå ߦ£µZÿ|é¶+vW9ŸÈ/@( mBÙ(å¢|T€žBiè$ 4E©‘ õ¡{Ló.3¢2«jTv»ƒ´êä\ì{ˆX$æ ¹Ï’e›ÏQoʦù•û\æ&©K“äèn訇‹€Õ?öåÍŽAƒ‡ûØÝq¨×ʯ©„ / 8Ä,ͽivI‘IÙ G Ù+‹·ç b™=â­Þß.áI>ùØ Í2Ò\C†Ø…—¬;ËÊ©Q¤<] ×þsÛñÌsù»(uòÃÍrã*]dI8¡rïþÃ&ÌÓl3llxt̻иïÑ;ãò—tÛ±VÐ,šGߢXO°79ôy0"êȾJîKÌŸ ìÊ…–”lŽr–6¥1ÿ §“ÿÃ~9SbîL‘ãS,éöåUÓôæj¾ûÐd°é<5‹~ÿ¼†jR¶)y¸ö·ƒ”9]¦Uƒ~‚ܘ9vªC2tö÷ýòi¯§MŸBS²rZlP CwˆùkâÓ,)„¸ð÷Ùk1<Í/Sfžn2Ò–”mÆÁÂA%×$¸Õr¢ ²µÀG+ËÊì•tbe´îgr'Ê„â0gÉ›r®à 8{J•`Ä‚Í&—·&X#žmøÈ˜ûæáÎÖ3‡¤Æ“µØÅ[Ö¼=æÎX¡±ééçî+Øúä]PE\¬ûB¬þ®öØ“½»¼ïrðôíïôãa~ìûËÉk ur1ö“å# –²ZHp]¸«­ ó°1²ºbƒkìQx1¥N(qž(òÃ… |#1DdI4 0Ày}X’°[¤à¼ØW_ÜF ­»××û°W…87n$ìóu¤µ. C×à½K Q'Ø$ùLè|ÆüèÈA»cO¥Ã-¶´}„ËZxGL èdîËϾj/y¡øÜõ×Åæ9É£AŪnaiåÖöŸÊýâä×ç®ã+ü\QÔTRúÆöšµÊw¶„‰Ib`t‚%> stream xœí]ëoÇ‘ÿ.äXäÃeÇÓï¶u‰YQ$&F`k’¢hñ>dË:äþõ«êîé®î­]Rç€C€x4ìé®~Uýê¹_L£XLø¿üßýÓGÓâèÑ߉øv‘ÿ³ºøtïÑ'/Ü´c°Ò.ö^=J_ˆ…Óèá•3n Ê,öN}3ìž,¡Ï „6ÃárgUZ«áG|í½ zX-ñµœ‚°‰&†Kx+}Ö çWË¥Ô¨'5ü‘|˜šX'ܰºÆ÷“óÒ ?‘qÎ^;œßÔ‘§.}Gá4¯¤²@`ü» nx‰_y'ÂpŠ=Ëàáå9~&…±fXžäè…¡äݰs¹NmÃð‡óø6èɆᬶ=®ä¼Á·.xèï?÷¾„uמ¬»’rNÃÚï<¤Xî}ÿhGKµØQzônRøþ›5ŠL:Ìp§*'!àqGë V7 _áT§`‚Ó[?‰á÷‘|´„™ IÓä­Vi¹‚²r¸XîH3ð‚.âIíc/=Іo‡¸WZè0H˜ó·Kø÷ä•@ã¸Ü±ÊÃ'~øöÛ½¸ÞÓ´^'â]€5:L)o2mñ ÏÉNÅc§'á¡÷tJ&íTÞá…Þ¥îÐvþ*=8.dåH×q,(¡†³r2èÙÆY«Ç! Ÿ‘ äsžŽåÚ†ÛIÛaŸô²*݈¦zl(-Ͳ#áBÁ~Á½€&f/â@î,;ŽŸ¦©†ÝD— !ŠŸZ ¼«WK<Ê…Q(ŸÜ7¹;ÏR]á±¼ýõ2ZcÈ¡•°ÌÞ»¹tÞ¼srx^wì x«áÂãN{v”ǾôbGˆ1íR7{Kg€IÂ*øÌÖRŒ@’éäÀ¡–!.<œB'eœÚ­çïV´5ìvf‚^` X`\é¹ç‹¥Óí…ÎéWd‡®ÉpWi+&ã saÆJ1¯Ìˆ$ãÕùË‹ØI:¶Oãª>Ù{ôçG¸õAŠùïåfn-$Ç®åäGèqá`%…HÜú5Îl‚õº×Ÿ.ÊÓoËÓ'ÌÓ[ŸÆòô¦<½+Oß1íN Ž˜vå顪~{ÃŒvRžŽËÓ~yZ1«QÛ—§3†*nMO™q÷Á”þ{y’Ì“"OõÈlêÍiœO‰6~´²=%#G?)ç]º“6µÙË I¦ü4ó]8ï6ó8…÷fn³¿”ž}¼¢*àõrÃD¶ã4\”«ï@>¯â=öå©$Õ4‰`(w»®ò%qùÉúÙê€Bql+†'3f–Q.4Ê(ÑË(4°% 2*s9­éê!3Ñf¾ËDT*¿‘ w¸a’D¦hÀRÚ·ë“ÅìèDåÿ½›¦@„5‚㤠š“ºTríGb¬wy ãüÉÊ¡ÜÀ-±À–^Eá§F‘”–¾Û¥2)v½8K;ùU‘šTÞWqƒ¢ûs°«3"÷歲ËÄpý$eˆ²ÏNΡÀ;©§¤1YkÁZ:_¨×ÒF€RÌ»/©XZÅM¾ïafº«€¥ä¤/†pp1Â,¬Œè.†µ>cØ`@\4AëŒÕ# ÄD€eù”œ|ÚßYôô46‡VBj8ioêâß>ös¤WÁõI•Ú—Ä3EîÁÙù@*38z›,p2SnÓ‹­¨XDîQ.ÃE™:ÙÈ7Kcð&&;¢èù÷зþºô „¼› acýýp¼¦±âG³S§n}gàØÙa÷íÒhhWru\›“ë¹ê—5ŸÖÃ4Л/¢„}HûŸ!_Ë{³¿'È 5,hvÎuðˆ®îˆ>+O/ÊÓnyzF$l>H““ó ó™›ÁÂk¤VR.Ñý=ö™ Ì·µ¿£µĨ‚WÖºq’¦‘µ ¦É8]ÛÈ,kŒ'–’ÎNP“ÃK«&94åþ_FV,´ñ˜Oxþ40ºx¢¨\‰/f8|˜šèÉÀ‡H(դǺÈÇòàmˆòQL>QÍZ€p|ou8›?jˆ`8'êÿo߸Ê$Ê©°JÊ(gƒ;zè]G~>á_UЭƒåyº¢ú( äj;”OîÞ)gЦqTäʨ—aÅÜQüøÝ9§b$‰7—JáFó&²ÓA3+Ëã û¼4 ]´ì:©w¸'E½£½¶ b'Ÿb«äu=‡ý]ï%™/Ñðvkß_Uú²²ç€›õòóKr‰ÒuCÕb”JFÙ¨{œfgvƒJ .¼9ü‰,JXmZ…î˜ÜžÓºÇÕ$ñS…­Íè×d-’jA·ʤ£åŠ•sÀüµ÷ñšP¤xÔŽ“³!KÐ! ¥¨ëN#‹œé­TÎpK§÷È82'J%Ô/$°"ë{Ôßq"g0lãD9‘¾F 29¬Á`l Ð9ù ”äa’(,âKÙÛ”tÆëÚh9’0 ð­oŸ×sO¯?½Tæ–DŒ#Dæ_uaÞ½e¾ØŽ29óÖY!yàt-O òíÍÜÄ‘T_:9„>í‚ÒQýP³»`‹K»EÌÞñÕ’ã„@Ÿ‘ëÌ´¢óHÈž; óBÞy^eëÕÀ#Ϭ Çè$å‰W¾!ù¬’L”šdš€ü´½ '†x  Ð ÷¿j¸+èõE¬ÜfˆB1i<«¦% (’;à}¼$Ü'ôì:_<è %7lÜãÓj³¾ ¯‰¢Uñë²âª8Ø/¯²µ°»bUUU Çí¥êû-Ðàëóç` £áÇ'Aû5|Þƒ&ÔCHÓ]ÞgG¸efØ-çëoñú€Ÿ^²Ç ÌAµÀŸ¥¦h ²ù‹´~Ñ»;ÓÑ4ù5ž‰‰œˆ xè*u¬àÿ-=Bw·bC£+ \Ê?Œw–H@üú’Õˆo×wIhÉj¹ün ²÷Ø1ž) ŒIøQEj›À›¸±$3÷qôÛ历Ýós„G¼‰9Â#"Ã&£õÄe¤“Ñq¦…¯í¤›M¥¡TGçô§Ë ’ÈèrÉ‚QFˆ7+ûìˆ@ÙhaôÒýŒë½"wö¤ž­Ž ßÕñ‹e ’¡6ÅhYž¬’¢©7>ñŠ`¤þYF@<42ZŽvŠï.Î0ę̀0¥†°Ô•«Š™É#µ[â®úÉJxŒw[8Ej«}WÏiURºÐ—|ã ÂYéæA¢dº©Gá4ßp¸w’|8QÃЄ»7Y²PÔ6œÔÁ}²DDó”Öú013èý€{¶0hïwþ—€Öˆ3c\1_ì3OuÜêPû¢<=/O/ËS ,™˜'ñO€Çõ¯5øç'òE2gÀIÙ)‚Õ}XŸjxŒÙj“áV”Žÿþ>#G¸ %Á°MÖßgc›ÙßgÒ|²qqç Q:³u"ÚX€·4}Kåògé­ª7˜O½Á\€ ‘X æ 5‰¦9ÿŽ ã¨ãêI+Ý‹4v êØ+†\>Èd© 0¦kÍE+LõïS3GÁ¶Wi ب°ÊäËôw'üÇ î´Ò’ØïDy…Hyhu‡p O8wßÞv×õÃm¸yº«]wż;f(8f–”‹_ç㶨õŸ{¥vàÔB+Ä+Š÷J µanôÄŸG8@|R±þÑ“ð"ta›ZÒš\cxyœ_Ût¼ZåÀìŒ/° `Ûáiÿ”âFüy uL)v`îƒdåÞÃ?%6Õ££Öa#DÂÁÇ+ôv-½–?5!r×bÓÕÇ šÌFHä/:% òb+W’(HäÜÛûD0é|íœÖ*ÕºP êÙ`tâãévÓEÜâhõï¡oz?ü•¬/E°Icˆq³«µÐžˆw"­×ÇM6Glˆd² ïí‘Ì; å!‹.´õ®ÍÏ™‰ ~:—@ñ°LD"š+:Ž^q%£Wœ†4«bR%]ɯ¹DiðTZ›Û|¢UUk"´jâ}yJÑ{åÅZõ0Âî’áÛ\žÐÿÅe¸°€¡]Àž—+‘µž„¤n[ãjšKqˆ![ÅO1²OÈÐLÀ—{߯ƒJbÖάÌáÓe°Q_¥ÙÓkŸ@z£„¦Ãš4:Œl¾NMCŒŠ.j^î‚/I¯«Ùd#Wðnä,ÙzbšJ Î~°­:ôŽeÑÑm «mÔ$iDÔ|ºR«Ô&uì¶:s_×…à-;”·Óžµá˜ƒÌGÄÀ߸M  Æ,%|);?j‰+§>ƒ>t¤>©<¤õö¡1‘M“dˆ´£©Ÿ]]¯3_¦,ý(ÒÖ:+¨Ûè†Ó¿1¦Ð¸%†Dî( zJ/1)Y¡ÅG  ¶üré£×QI†»–Ã×óéþxÇ9À´Içi"4œ¼Xˆá/Møð½é¼ÚcšÝWBê…ÙihcÑ‹ÖñÀ!ƒKFjs*ïŠù¢J÷׌t¯ú§ÛqâŠy÷j+‚¸MI~¨YÞ5›s!½eÖ¥RÀ9ÙVLuÜËÓï˜5¸â¢®êäþ‹Y^ÔèV¾¿*OÕCÇȽ­åŒõ¯¾¼sD¸PÑ“6DŠùÔ¨ñr}0BôÒ¯!ÄùÝ]â¬ét1†ÊÝ—‚É h-˜\Þ%xä¤éÑöÎ6ÙE‡µJ`+Æ„Ç8£ù=Eã’у—&E0Q¶\¥}ƒ«±nƽ*ž¬×Ĭù:G]‹hƘ¹ÉÉQGŒ¤tÃj@ކ|¡QÌDÈV¢^ûP%‹  0— N—/¨,ØKª£›Â®û¨²8D|[½ %6ôe ~½EK’ºS‰¯à;J-%l´vów^8šý‰dƒ”Ðæ¶N5î¦õp1[â«Ë•jÄ”Òád4„+Ø‹D Z}:ä}´ñLûœ[A¢¨!a*×3ZF'Î6åBõÝk‡«Ë¼¦¾MR eƒ(`_ë%ZûÎpÿÃ$Ý¡bÈÄZ­WÓÙaÒ—®ÒiaÐFûSîÜéºÖˆ‡÷ ŸäƒðIšG ä§FÇ´O )¹F÷Ù©JS²ÊNzœLŸù¶ÚžÈan-¤1M{£d'ÊcY©µâY­I„EW/=Ú)?VÑ`Á?¦7QaDÕ±¥±9$)ú–3ô·v=CÔ†X"é³Ê¸¨°š}H½.òOʧéûãʳiçÛÓ?½²jqÇôO ,+ÃöAL¢CUµL¢ÈÓ>^3`dá½ýÙpŒvå{²+ˆ-Ô:¾5÷iŽã îp%„8ÝǤ!¬˜¿riW ¥ç Í+ÒKR¸fº×;î'ôªBK‘¿ÜJòWÌÄë4*ÜôÌ¢]0‹ûêc±†tImqô‘ÔÕ^x\JñYWÔKÖ:IMžMÕ†íAÕɾ({#äÇÈQÐnd²:» S"ò/¢åJâªÞT|Ú°¬b#½ïÊ1ð±Ø$^›“¤Ó1:“ÖÙ R3`¸EÊMiUwASºsQh‚ÐsïÏ<}2Dê•öƒS§&`xÏøÃ¸µV [¯6 .Xszq vU.2½fzæ K'Ì»íα»š¶Þ2cTËŒÚúTÅ-Oš‹Yº ÜHº›¸¿•€Úcˆ¯ ùó-ÆÍ¥¶s)ðÜ!©[Y7º˜¸è.Ïr{ÕWû«XÄ;f¾Û3=k}J1ýó®¶û;Ós¥åG†–L_ 5ÐÍVE§ ³ªKY·Ú2Oõ¯ çÇíýõ çúñ¨W~QãèŸ2·‹ûv"~Œ…#(ÃBÐúººYʵEàm±œ(Ðz'Ñ{H5V‡öªÉ2o æ­Hí¨U+§9™¿Àް ‰ušûÇ,™jÜÙTRhôÕºAÀ‚Îï¶CÐj^e‚@`}š×’[×°”(ͰÊÂKêð,q*}Ý<ã'—Òqm»ªÞ^jYŽ~ø”-(<ýž§nYÖó1îRÓÇ*™“JÔm¦›;×?V–º%s T\YMž)ÚyæÏ0"äÞ]Æp+ךq×j$;Ø?;ç„Î[O³Ö"žÌÖ¡|5ÁÓøëTKiÕVO~EÐYb|Êk 2ýjiМ•·?VäèRÞ^%"ðÿ¾µ»§³®ó©Šl}ÒMܽ¤<Ôû¼¢¼w‘dJ*EÎÚu¢¶‘¾NW Ð"V$cBžk–þ0àd{iIEL“ÌeÆgk:¡š‹®*çY uwêþ¦Ü·™œh¶áLN¹vÎÉX¢õ¬A±øˆ_H´Ó†ÌÖ9çä]±ëöo·¯¨­°‰ ¯åBi·ÛŠîêØü„é…søÎv!¬i#{»nÍÙv*°à°M%”‹'»f–¥¿ÝåXfò~JWHmÝ R£Ó›¬A©Mëú™c³ü°úq.!h ]DV®/ñùš<ê«KœÕôVQ³ù×µõ+²ÑRÊŒ˜…Ž‚¹3fDñYe4ùý†sâÈ»Nc@Zð-VvÉœ©{eÕÖPF•-ŸSÞκìpÄ\´!¢¨ÎªÉ/¬&¥s⣹-ŽHMbt5Çü9±]Q$ŽÈ(ÓÈî”vT¡_,‰8ÅH7-4W˜ µ&æú)Q ¦Îxt£mg-<½¦ JqXñÕs6Wü©„¹cÚCG}nÁ¦NqBЃè­)Û‹WøF½€y·â :ÊAˆ ÞÃÿE÷„°ñG<@ •è§{“G ’ƒÎŨì_ ÷û‚aÕ\É#ÎÒ•Uâ~0¡ªÄÛΘ/ꌞ3=Ÿ3óà¬4µN¡ç$â}gš_2ôÕÈœÀø¦!g%9`žÎóað‚iÑžÙ^5Æ!بq}ìuvÕ,žÁ˯R~.J Oåâ|+c¶F#ú t‹ù…^öZevE’!©„8Lc`ñjR™ÆS3IBrQ¢‚ ž‹MÍé(M¨ÍæLmØBA×¢+£’ú¶¼ÿ0}¦EØ”9½áw²K>ˆf¸ýõ5R·Tr!]­(më͵¢Rž9 ]Äæ­õªÇÚ`c-iWµ®ôzõ¥Vè‡ÇÖº/‡}ìu§š‚ÒŒc-Áyn’"Š1»ùÙ¡ääÏ?óP¯Ö!vècŒXí¢?a´)ówí7ŒHhW~}T;¦aÃâÂ’s¢6«8s¢3¿ý#ü(@øËÙ‘²¡ 5&ZÖî –k‚; 8à½Ô8Dˆ?&JU/„6£þÐ_;¸_3|¸.–Ïðy(‘ù–·ÒwÃP_{¦¾†¤²Âz«’ ¿®²rõn.˜¿rž$®RÍŠ™Ð?c´ tÈ©µB)tTÞ%µvÖmIWVPDô¼z®ÊˆJŒõs5sõÁ¨B»V&7 y"móè¼G†€=N½:¯úóL‡éµÑIoùͲDdÑ4˜ë%§>^¦Ñ° %ò³¯–÷öçKš$©óFEgVòlаŒ#õ·ÑS%IüÕÃ&"± s}»£©µ¤2ãêößw¢ä0‘†¥qœD3–GÞ[ÆÊ?ÚÇ8Ä"¡I¢ðUúV×kŽGð¦¤ÌÛ±TÉ«Ëkî²pSi±´uˆKÛßö^,–è{ÿ(9”¤¡µÍ#àµ/²Ÿ,éȾ›œêÐmÊ?o½>é½qú¤’ñcßóiÂ5ùõŸõ£ù53xó“Ô’ànPÝَĬH2Ç®R—P[„©›wé;T­‰“Šþž 1¨l(QHñuB¸1ñº1Û”£ÌåQ7h}¢ Ë á™4Ñ+þ)úÙ mݺÈEKÞ;G6Ï.‘ËFÅž åWóÌa SÇdJêÖ™úçZå®ü K ø+$4 ÊÉä|¨,(ü Mã 12…{)a5ô[-g±ßqú*J·›ê·›(þÁLãfët¹p”O¶RZ\x}÷Žé+ÛwWŸý'"¼‹Ùppµæ£%cÇ~ô¿É@Šendstream endobj 324 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3116 >> stream xœµ—yp÷ÙÇWÈZ0"M³ÄRm…–4¡ähÈARŽ0âà !6Ûø¾mùÒ­Ý}VòJ²|_’,Ë’/Ä¿ÜP®4mxKC$MÈ1…¶é´)ý­ósgº²y§yû’L3óV3šÑ?¿Õ³Ï÷ó|ŸïOEÄM!T*ÕÔ•ë“_xxqìçÀ;¿(Û®x5Äǹ」íNtÏ·.Ï:zûÌYž_P\Rº¦lǺÊ×ÖëÓ3v%efe'çäæ=XMs‰‰$"™x™H!›ˆăÄâ9bñ ñ<±ŠXM2s»v®V?Ë2ëø¬K2§3Am‰È‰Ýª±Uj¹i,‘溄†j¨»Ý\ÏŽ_O(Û‘U´x£`àT¸ß±ºá->$Ÿç kL/5ð€£%Èvï iQÈ>¼PäDpÌîuP¬Ì\­`¢†¿‚(¼…¼rVlt¶Šªš\ ¼×:Dáu²‹>ƒßÕà2R+‡ˆÌøTrÒy5Ò£ßÓŸáy[®æt«­Ð”‰smy,WeÍxKA)l[nÂx„ ç‡b¥IÍ–Rèuº£ÎnÖ=êõŒJƒ‡ ÷EÛÚÛ]±­†7˜^b³ñ6kF5¬FyþíŸ"µßp¶³m7]#ÐÁ¼³5¸jQÊò¢Væ ½òbŸêÜtð’Zv#7Ôϼ <k1‹™yï<âƒhæëp¶ÐK¼(~O š9ƹr¡¡¨«58®ÔŒ‰¬D6wã˹›ÒÊs&{€®_S£Rô)Ýñ9Öšªl pö à©X;°pUl*î±fØmËHå=ö‰ýà‚„X³^±Éá¥*É õ._ëz^êf;Ní¿þ)PÂÔj‹¡ *¨¼^KKo ;RÁ&‹[+!Ó©pkzÈ·¼–r oÌl®Òàãä>d’"Žæ«Àü›ç÷Ùrøú§€)‹)=ìü2V{ámáþ9Våä u.ŸSt‰vuhÐ12 w~³sÊ*ò3Tè®_¨å¼Æ´‘‡AŽ·7ÁŸÅšÍ•³åøNk®yóö4&''ט Ô !Ú%‰]b{}vÛƒÆ2<Û^ÆZ3ŒÉPImˆlÿËÇÿõ¯K' µKÐ^´‰¢È8 ½ pIõRŸÜÎN66s>TÕƒžô©N_Bü«ešOÛ‹,;8“9¯²®¨2‹¯ËÛxÄѺüh6ø¨wV]²`õÚd½·,Òï„‚U-%¢.2|²) Ôéð¶-FKŠcy‡R?iÅ3–Â"êÙ?TþøÐ…VnÕëÆžþSÝ›uÎÑãÚCá§$Ú–gËÍØUR’¥Ïj{Nh¿h!ªñ*xtBdLcãuµ¼[~‚FèÇoŸ=%úEFQ¹Ë…BȄȊ©lÍÓˆ{…üªÚz³•'ðñ“è×ÿa(ÂèyÅ•¦ 9¸]2¹j¡žÉN…J-·Ó1]À\C0ÊWd‰Å‡­P IQ5Ë?U#ÚEÛ¬-4™‹ w¦?+ܪ)äèSŽÀ!¾yÂ?ÃQ/B€öævç“ß%œ]è6z«À΀ѤÏã‹§~År÷ }1«Šý_9TÎU8ZMù4á&šrâÈ1)Ì­!.à …¥{Ûõ¢r–ç:(a ¼ºº ³ïØn1'ý1ÁÔ³*hjjëwˆ¥T‘ÈWC¤;J&N枆6Þ—©¼[’¡ þn¤Wuþ**¼®–Ÿ‘gÓ›yÛN0R1dzC]û~±2œˆ¿ƒãñÝŠ±“ï-ûºçšÙè±8lf¯·êv¯Þ¸û% ¶®Fšó6ºûóÖ3|V?«ýâû“4ìù"I>ûmó b¡b—O“·ÑrÉ8g/1§ÎæÖÖ8Áoò‘‰­Ñ”¬‚nG£¢ã'ò€F$Ñwþ®rÖ;ŒMÀxÁÕ(yÐ_Ç´ "ùÀøÅ/c1âü¥Å^8{¾‹åð¤‹M‚‡—’%`CþùøÙ¥KÐ+ßåS…åuj9U^@{NpS^‹‚þ™¤é%H3ü³âÝSIüÙ¸ÖP'Xƒ=:t•<Œ^Tì7O+NÅZ¥3ÈIŸyH!ê€êüeµü³±µ4¾ó6†1¹é2qޱ=ûÐ `ðlœ€§ãxÆÏ_¿zò´ÛÍ×!ú+cmˆæ*AЗçîÚ–¿¨ôÄ"º$¤9r?áßÔUÿ +R#>¶µÈ ¦‘mŸxKᾦ'òE Ú8W`6”BUÝXôv ØÙþìãI‹ô:sš-Çò*µ4&ø»d ¾lÙ^‰éæ_R)T–v•ƒÑ;”!õ+³âp¶RÚ±¥Êj‹Ñ›•|óOj4ˆ~I£§ItºëšòÞò+x‹?¿¹ßØ…®É#´Ô/zŽõ!yå\á3µ\¶1-bMÖb…{68¼Ó6žù­¥^VµïBƒ'Ï©åFymSv3ÔPOMWK·7赦gn×çeëŒÞìàF xäÙG2Ú Ûõ¬Ùœñ¼ Ùm¥mõ«+w¾©ÔÚ÷‚âo ïïS\p&f`¡.™Ìõ½I ƒEQ[/PŸ]»òaÈÔ«÷³£ûB‹RUn¦s_ë<ë‘.¢¬oh¨£¨ÀÁºu&ûêÒt¶2+³RÙ9Ñ‹’Fû[‡ül_W\꣰bÃcÉóÙÿI¸1æc!W–Ð)ÚÙ©HÐNÅâåã›6ì®Ð âôǯ…ÇÆ;¾ú&Ãk/œänùÄ7‹À¾ö9 Šb’Bj8Fê4ó¦ ­ú›EÇÔ´_ïÏ+È)ÉÌïÑ#ýý~o¤'£ê­¤z²å+’ê€xÒ€=½u¥ˆhR+ )(¶³oÈ„FéTYdlqH…Ø È}A- ØƒÇê´Z¥v]êÆòW£ qxæ}sñŒ2ÇŽ‘46²+h½Z~¥<áçE ‰ §–®Yüà‚ÇŽ"²¥¡šuMÊÕÄ Øôœ.§NäOޱ:m·(z=ºƒg}¥£Ûþ ºûÏè4½›?–qÍ‹æx^lËð&TH»-ŽÝ­¹CpŒºtáÒ5¤>òãûõB3TëŒ`nvJ¢#CcËRÉšEªR£P+-8l^ÁùÉ[Mžó‡FÅvQŠ$äqJ…°ÞQ&NFÖfS]Eem~)‹çÞwE.ù÷T„ (‹âü’pbÇM¥—ÚV<¼6ð*ãÕKF¨§*jÍEʺ×ô‰ç=Ðÿ\¿µ®¢CÌ„ÐcªÕ jQ£ƒ_ïÉ·©£p”ù¿ýzò^3¶ë¼z¬ïc¿4eSZ‘Î~í9o,‚”å5E‚˜ Üýf}å?ÿ MÿÙø%œÒƒ–ÝT]Gqj´®ƒ6>²õÅm;9žã”‘U²¹ZÑ”¿ YÍðÆO2Jsò+›‹†»Ú=îØ½y|ª×eA-@azÜNreÙNjuÓã–tÇO#ˆ‚$%†endstream endobj 325 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 376 >> stream xœm’þCMSSI10‹uùPù`‹ ‹ ’÷øÇÀ»RS434UÊøˆù‹ ÷¼È÷˜ÃøÿÜø|÷Ú÷!²Ò鋨ä3Ðûû…û(ýJáÎ÷Ñ÷3÷ ûÑäû¿ù÷#÷¸VTGD7û!û%ø¿uÐøïÍ÷6ß÷kÞøóù/T­[šE‹û0ûûûn”b©k¯e§„Æ{Ôx‡ŸtŸs‘r‹vBA7$L=¡ÆRk;¿aÜoÞ‹÷-÷ ÷÷ ¡‡ºe¶j°p’0£b–Kœ‹ÓÓØÏë̺y]·øˆ‹ ÷*Èø(ŸËø\ø,÷?î˜È(åø<0ûâü2ƒ‚‹‰†v‡}‰€ˆ}÷gû?Úûm÷|ï÷÷/÷SžÔ8ü 7Ÿ ¹X¡4endstream endobj 326 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2058 >> stream xœMTiTW®î‚®ÂAD°â:U¢Ž¨Ç}‰bÔ ÄC$£( ˆKËÒ "¶­Ø4«@s»A@YBÓ,MƒÐ,µ‰$‚#—5ˆLŒQƒŽ:s›²B~r'°ÓVy­)W˜)ÉÊpMVPËþê€p`?Aéfœˆ®8¢cÀœpq³™ßP·f¢‹È=b†Z¨ÊÍËÉcQšÉÍ„§†h-¯N–Ö”Y€ý7|xè(«ñqÑG¢ìŸØ=ÄÑàáï±}Êkgc}[¯NpúUBFðN;~_.±Ío§mÁxˆÃnYQ%TV„xtc¬*ccà`T!+Óᨬ&¨tɨÈ|gÜPÆŠN ïŠÝÇgožX$WÞ`Ê3Ú¦Gon¯,C™´A“”rìcˆ¡¯—ýü¤ÑÔc½Õ}ý0ÄâH·[d q^²pΖ¶´üÊÚRKYjc_÷íµ +°/:W,[èã¡È1’œ” Zˆo›+{§Qý ¤Ñ̫̈́®ZÉýWõ Õø€ëÿÚo2qö Û¼  [.Xõ§ÀÀÖÇ÷EEÅížýús‰ÂÓ'/ú?ë'R#ÿssóm¸ÌÞÛMhB/ö[¾·â°¹®ÜhªUöéùÆ3× š€={E¹P¿ýÀ>!<4J»5#F«…´LV›¢MŒ?¥ïÉ[pŒÅ(Jù¤Ÿ¶‰[ÄYmóe «LH‡ø$žüðv—½­1{!a­uè{õh‚^mÇû ˆf–Aaƒ0Ȭ°MçÞJI)óžé^àº>àŸ,˜7Dã[wAc†`q\ÿ£â‚Ô¬4QFº›ì¯Z Qà¯÷®J:®›Õ@âQœgâ ¡ä„N_˜Ï·u]Îk„¨Ùv<¥,<'6B$lˆÜ£ØÞ°ÎÓלñEÔ°mayí6ð›78q Ú ¡ Ú/zMœw$†î|ïÖzgj¬•\|ŽeOiœ… æÞj¾ÜpñbS/˜¡5¥<:F{(ö³1¥J³¹¬¼ö’¼s™FÜÉ8A ÑiJÑéÍ‹®ÓÿE–ïôß¡äq¹Æm…ÚCgÔæ”èaŸ]º=p®îHX_ª“‹À}A»'âpà{?« ¶®û…₇h[½í%—_z¨fI±,uЬ€lmnF)»W¶‡„Û7ÈôÅù}%ùy9÷ XÔr¿,B¾ÅçPKV%°M„íÖóp¶ˆoeý[¶]|ߥ~˜éöàõ¯hDüëoî°ÂuöÙôÎYD:Ïó¯A{ËT|Ù¡Üôî=›bgøÁ–ÐÈø  ]_ßCª/äë ¼&oOy|K– qŠ£j ‰½ò]ÓÍþëkæOÝ¡ÕjuÙ©¼§‡ö¨Vwˆ¿jiТ9݃·º¾¹yêwíë‡î™lcE0O±ì9m[Œ8¢#£Bÿ®ô××Eó§³³à[WyDµÿÓoäÏpª˜ˆ1uDú€8„{¥mòå÷†‡À<–¸¾œm_5òÄm-aEQç6Á—З µ¶µ²ñ”¡.€9¢ä@> stream xœ%’kPSW…oIJ¥¨·Š¶7­µV¥øÔ* V)2Šq@ªFS„¨^c Éæ1 ¥bbH5¥2 €ò”Z‚•ÇhÑŠ_£¶û:G§ zΟ3ûÇ:k}k GF ˆÖmÚê3ùpƒt~蕟œ…àìXò¡hÁô×—–÷™É3å ï¥Ë–¯ðñõ[É0®Ìtf3“a™˜YÌlÆ™ÃLµ«1ŽŒœ± <Mï94 …g½;ø¼i<¦›_9˜½xqBÈ·â;h ªj\TÔê8û° ]êŒx Wà7úáÒéñ[ Ð×Òªý+·çÂZÏÃaIË×eƱ6^gkÖ ÐpWˆô• kRƒ‚ ñ9™Ä× ×H^Hâ!Ó‹–‰;‹†¡Ë~‡¡SÝæ<ß-q‡ ‹tRjÿFæ¾M0€ –=ò Êîk”9ÁG²ªV ãÌÖºûýõ&7“©©~'èJEƒte½}GœS•Ÿ>SÕXiÞÍÕ´÷½äeךU_}¸>DJs¨"'G«…D7>H< ¨ñ*JPjAÁË‹ÁÄ?˜…B!¶â;Ö½Õƒ:G&»÷ì’ÚŠ~4@±¦ÖÄ'+Ç,@‡0œ žL< ¢SM܈¡¿n“ÑŽԙ:- _mÉÒêªêÓª’‹¸ÖŽP ¤½?Õo[â矪¤‡´Éš\ÈÑfkáÉ(‡“Ü[˜èjã#tž¹#äûеQ'~³¤JRiò²r9úôõN‘ ûWÛyfùrûeb]þÃÂfh†‡y:í$Ïý_8a‘ŽJüxO–þG’·9ybF—'ìEͲ¡þ©×ò¬%·Am…j{WN–]i=©È)TçiAÍÉ•ʰÂKwÌ)R¨(!;CJ/JCVUiaÑñî´¹Ù: ]€.Lå…ÅÇJ÷î̈† °áljÏ…übœ u鵉¹1Ç"½î-Ãéø’GèŠb¯”Ý™žœÀ½sèbž°XWŸ£ÕîÏÿdóÛ î4\i¾uÍÔ 5Т®MÚ§R€ r *Íd:uª¾og›õ¢‹è\GÕ(Z„³m׋/÷qf³ígø ¬êþÍÝß™w7øÀzˆÇm!èNo°ôÀÛÝîÛã¦ô„Z©Q»`;lÐÄÅíIU$dD½]ít=ß>>I,í‘¿Î?cKË Œ„–‹c!5_V¸£0+ÿX-Ù'–Ó$ÑOâB}ÙåÊòãÅw¡ÒN$]œ­ IÝ¿:Ñy@‘\ˆ:Z*8£p[Óã¶^ò.ø{è~ëò,¾aoZº.Ày<¯c>}乩)ê—®LU›Y¦ NYž„:¡ë7HÐ FQx#¦éH%w°*Ò(×ü„ÕäG¡…ü:b¾9<°Ö/6IÉ­øš-WýZ§Ôgv*`+ùÒ+ÖgÉŠKcc£]ºw ˜yW»™§h}.äÃð: þGä¡)²¨È5ð=„&Ì%¥ÕpŠU+&®úCq— ?ƽ¨¦Â‡t&e)ñ¢³Œ1r®9úüA¸­Ýz¡ CØM\Ÿ¡O:«=Ýpl–&£ÕRÕa'6¨ÿï-«—I§qS}uÎN ó?âG˜àendstream endobj 328 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3586 >> stream xœ¥W{TSW¾>!prTª"=œNÏi«µ÷j­Ö±­·µ*"Xk}Ö·E…Þ@€ð „ü’(È›¼ ˆ Š‚Öh«¶RÓ±¶4jomÇG;½º¦™ÎÚ§k3ëÞ‚GçY÷¬ü‘óؿ緿߷E”¿%‰è°U6,ðüãJq¶àøõ} Åè_ù»àÎ)èÝ û“ú'SžkVhjÚÖ0y¸"B™‘©ÊŠÊŽÎ‘®RǬ–­‰‹_Ÿð^↤äM)›g¿<§à ŠzžZC­¥Þ ¦Së¨Ô{Ô‹Ôj#µ‰ÚLm¡–R[©0jµŒz™ÚN…Ss¨j.µœz›ZAýžz‡šO­¤^¥Þ¥VQ¯S«©ÔDj5™ÚI=M½D¦ü)%u[4Cdñ“øåøýU¼N|ÎWÀ“é´] ñ“H†‘ŽÛ9n`üŠñÍ&$¾ØøDئ‰K'öLš<©nÒÉ '_Ò9‚>ÂSvL©Ú& màúÞ%[EmBª¹…oÙâ#™ ‡dPCq 2‡$¤Cb¢êù¿ÓõNèì”C*oý>Í£­sRå¿L‡ÓèÓUÝ}Hi<Îèiƒª"½"v‡Î»DÿãFkÝba9:Îþ´õƒÿ Ý.Õh8gYƒ” ¥s=ÐÈ÷àþhßq×èöžËG£%ô½k‡ÎœhÈ\Ëae¢$/|Õ <š9æ+šø+u¡Ë.чnd#þÒP2‹&Îþ3˜™=OÄA÷f!Ñ÷î¡ Çâ9lت«ß߸zuàÝùsW- ã‡C”.Ê"6½¬™nh'R ŸŸæ ¯̼N—› ÍI^‘°L›ÄèèÝNù…¸ëòBÌ­#ÑL£óRGJe¦Ñ ø\ŽPI†Ÿµ{ï5› y¼ ExB׸„=.Qº&ò…¯ØhKTíÑÆeq»:vÕFƒC°?ƒ#ðr䇟A¿EôÝï~ìJM~枌r~7¦gìÎfΛˆBÏ¡7ÿvûÂÙÈ%u¼1Ë˜Û L7Ø-ž±Â…f=è‹Á-FñXÁŽ)ö6ú@Êõq`ø§Q0zbÞŸ±ÿûÑê´8þh&¨êÙylÏÆ 9ì…‹§?ùüâòWç._{ó çEAŠ mv8Å}‘¸\Œ±h YÝž°¸$°ËdÇá)h Òn¡³Aé}7(9¨©Jä†&ø<9†”rßðÝY½¿‹¿3bD™Í­Àõ¯ã~ÒË™nöƒ©ÐJ‰®_¿ÏbÚ´À´ØÌí-9=oÍÙ¸I¡à^?XDç§Œ´ÎûûîPmÏÑ^óRŠv‰~v£§Ü‡H^ᨕEO=>¯§P4*}L^A¾LœCÊ”1õ|Ù‡½í>téB5.±.<; ‘ŒÒÂ’2.Ú,… ‘E™¯Ç¿üKîí|þ\ñµBØÆÄÉR毒Ú>Vq¥{jS‘"‡Ÿ'Ƀ´Îj³ÑYÍõ%~VÒíEM?¶kÉÚy©3¦"¼vyÍêj8ÄXí?Ý5&åè9CR ÔøpTêÁÐ_­"ô’›TXŒ²‹æÓ m$ä4‚k<ÿkÚ›½"‡{süž]û-Φ1åNøü®kà‹ƒ©Pœ^¤Qè¸òè8¹ ˜Pø°yÔü(u!ìpµ"ŒXzáÒ<Ù"hoçP“äÑ÷zÎ@ê·g»,-ܾT;43øÏ„aCq‰Z„ ë>ã3ÕLOßLCŸ4û–“÷£<ä ‘?þ®NfLµ꺙QãB©XãÍm#ÍÄã}KOÒ‘¸;@G—µäõk:òΕ6Æ‚ ö¨ó¶(^-×”åvÙŽŽœØxz¤”ÍüE|³Rf)·ÃÔc°·¶álË-}­ÁB(X_X™h[Z¿uo^/˜¡*ŒÐñ¬Op‰Îº‘ŠT5 u±7ßÄϼ?É-™N‹£öƒýz}¡Fo‚Z`¾9»oVàI¡WVþüõgUoKî-;Œ£u8¤|Y±q¯†_ü9 *ȈɎωѳ6ºëðYäçDŒž÷íÅHÁ"1špØ;A†ë1RJXù·_Y~7À4¦ £Û¯›P7ž†ŸÀsb}ìŠ$ÞM’™ü Dƒé•üÁx`|>d#è’ ‰F™o±pùQX éèPxö¨™û—LˆŸôÓá,—Ðì#Å 7* 5{««À̲WsXL«Ièeúl“–¯,k&f,ËÌ‹Ì\Q¢ Ý.«/1G#jz1¤ÁÓ8c•±ºL-/5dYJøŠò6²=‹ 1jõePeéoç¨Ò3b˪,!ºŽÌ%0ò¬œiÇÎþý×>{°³|áF‰÷ÅB"¢Ø&u•*?2йœÒ¢8`„ÿáV_ÇßQp»©\·7êô:(6¤ÚÔ-À8-MŽkÓ G¬ÅÜk8ðÞ ô$z¾ãžÉ7<¯\B¶KbÔG tò7J’IZ%¥…‡W !ÅmÚ:)0ë¼=t@+?¤”¤?_{Û)‡t^ˆÑKö)ŒEÕ)Wq:I±*¹ªÀ²¯ÚRQw9¡šŠ~üB¥¤Õ1Ò÷ut( PÆŒBðŒ ý‰F˜m¨‡ýPÇV@‡ÿD'”ÄòïKFÚŽŠ$ø3|"/´P05¶Ìú‡wØ\ð ›x\% ®ƒÃà"|òpá™t䡨}rBÕâÿ˜ƒñÄïÿùíµ·µói}Œ{ˆ £ß°ë#.y{µëö—ëË‹}[6p£eþáR2¿/üÍ?š™À„Ó£ Ã6¤Tùîþ6Fn±zÚö.”[ßC¿’„ÔK› ˜6³¾—t6Éè‡ÓÆ‹ÕѪ0d}Äx3 ¾Œ Ü@u×ÅÂb´–µØ¡.ÿfñ¤™/±5á§éèwè™ã·[<ðRkt™>kÇ6M,…¥§ro0úËlÃçϸà.4‡WleF5Ðy72‘ú¥  ,êU9ð¼:SŽ{p¯MŽÖ ÕMÔÃál`_¥­èDa|à3¸XsªîüÁêA¸ ŽüºuU; 6Â:X¢ÙZ´5[1F1€&&ZI Ö3³&ŒáÖ 'èSÕ½œ1©5ß Œ­µ±ãÓ×!wÑ‚¨Ùü¶Y¾v÷Ï£ š}D3ÓC48û2ZpùÔe´á²è²­s£Çš4ùG¹?8x««à@¡…·Ö¢5T(ÍE`ì‹ý€¢m‹|—6=ž/®5offhèü¤šx[:¯-(”C¤Ö¦ÕæE)S·‘ñþ{D­!ÌŒ¨oßZÜÿì^.EZžº—Ù 'õµÆJÃ>þn»ugï¹sÇ{ÏôÛµisÔÎM¼G°·‘?¼õâŒeÇŽ9ÔsöMŸ@‘ŠvôΣcvHùÿ×íšK›G´Ô”ïÜh©»Çý›à$ óìŽÃ}ð1óÇ·nâñŽøWXPKéྯh´€3½9›#),‡xG|srg )ó§ŽKL­6’tüîò¸|œ85•Ú&.¸@cÏmR52xJ¢¯Ðw{×GFF­etˆ}‰(ÚMÐsEøu*í))JeJŠ]étÚíN¯ÂÁ¬o<‰p?7f\ô!eúŠ®æ(òÏ^E]WŦõ¯.ØO8¹nWû"¢·Í[ßSUÌ÷Dµ— ªjB.Ô–Ù3M¹ äÇlÚµqIWý¹„ÓYtÖ¸©‡ƒ‡µ^­ÉØ]ÍLviN“y6þ‹;wzs'vñ²#É•¯Yc÷§V–èe5Ò8Éœ:yâ ’ÔÏKÞÇí“7Cõ˜£ÃðxóäêšCÐ1᱈|¨AC3hÈYz\zB a)-ñj$"©²¨6ߤ0™òÜ„…ô|/¢ya±äA]¼Lçp£ÉžŸ}DŽdhòcTôT/{‡úØ»MRn—øYòOrÚK4¡¡ÑÓh/«3 @äÐ9FåÍöa»Æ/áçÐ}¯Ë•¾®~0¤Ü1¦Ç/ùíø’¾þ À/Œ*4Îg>•~èÄIÒ™x=¶‚J4æþ†Ï>yîAM8àY„"±°è@ëÈN˜îÕUdðËðႪ×KwMÕÑ¥½ª›ýa¨'ÄÙ2B|Ó½æ[ឪ«Ži"Òeªš úˆotö]B´ç'dèÛ Èÿ•Ä輄t.ûPZ]*Q‰%Éò’¶ È&ÉëÛ<î—$)G¶þm/Ù*!‰Ga’›ÇNµUglá°Z¢yÀ­Wÿ­OFÓ¯ŒbA!´²†z½~¿¾ÑPWVÆ“·Ð§·›èh|i÷&íº²¨©eƒ‘he¹QÓ¤6äB1 ¿(»D³¯* '‚¥¼£¬µ´{»àÒä›^Ci¶ÒFÞ˜{e¨”­UÝ0•¨¨µTÖžFÆ£D}ïÝuË+ k²lD°tƒ±ÉhöY¯‰Î óÅ›ÈÊÖî#2(äñº(d1ÝP;L1èiÚ†ž£?~ÆMj‹—¦%ÅK;ÒºŽ¶uty¶ÄÛ¯V‘ ýu[Ù¨ØÏسrZuQ)‡ÿûKwçê´S•ÖÏA‚DwÐ/ba•0ÈšòLÙZrqyùéÊ Ø %E5jS–5A¡MTdå%g@£nP[êö“‹k¨oµ1 %5E&µ5ë 9é´ìïl±6h&UöÆ^õÅŽþ÷ÑàñiÿGâÆû礎£¨ÿü¶ endstream endobj 329 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 6295 >> stream xœYXT×Ö½ãÀ«bcrÔwob%±G‰±kE£¢ÄŠ"½ aè 3‡‚ÔêP”`ìˆË ¾8ÖÄDc¢Mb¢&&ûšC¾÷Ÿa€™¼øÞûùø”Ë=sÊÞk¯½ÖADYô¢D"‘d¡“³óäI†ù8JØ÷ÊÑ2ÍJ„¬ÄÈÊ"gØ[›¬¡qØ 832|-›° paТàŲ%!†. sغ,Ò}yÔ6§èí+¦P¨õÔBj"µZD½Cm¤SïRK¨IÔ‡Ôdj)å@M¥©iÔ2j:µœšA9QïQ+¨™ÔlÊŠêGÍ¡úS¨yÔ@jeMI©7(7Š¥S6”-eG‰©!ÔPꔄâ¨ÞÔ›Tê-j9 eAEQ¿‰Dû{q½òz=¯ë-fXÔXγî}ôÓ7×jºÕî~}úÉûÝìÿ^ÿ’þ¯Ü¸là/ƒVúÃÚߺMºRú‰ôÅkÞø“-gï ;ØgpõàKƒ´éo³Ä&׿¡í Ûvv†Lryè[Có‡þìÐ?,þq•Èmænó#ù¡¼¿PŽt¿ê@WT#øˆA/Üe÷+ÒüP’£DܯCc«@¡(EŸ’’€bÿjT½ K+Q]}òä+~µ„tÅKÏäç_‰JùtI5ªÛ“º;.‹OÉC1ˆÁ …06¾‚õ–Ø–î\ÚtÎ:ë§zpÖÛH¿{á ¶mãÐr™¨òGq››é ¦Ò…dÞz’ón°˜–}vkÿÉÓ…+9±]Ò9®Æ0n¢ù¸úùLJgsÒ/–"WïÈHƸv¸\uÖßé!F¿‘,/À4¡™…Aô7åN>î¾ãø‘4¬ d/—9…Ú˜›+îa1‡ÃiE0ò÷«BÅ|M‡‚–¾ 2ÛœŠË‹íwô¯Ø—ñ¸€ÅRŽþ¦në†íMãÉÂ):øB':§‡*½Xð?²â;?c lñÎX<K˜½ ×?‚5‡·âéì|§ÛO¾»råúËŽ“Þqš7ß0E¼NÈÕ‰èáêE±)Üa‹Q•TjBr*§Ü‰”±2ífõÇ$à,™wvÄË€üƒù¶ýÑÞŸŠ”|òÛö!4ý7o?ø­ýâÙÍö…|Ftº¼1H£6,†e:xWW»•M"uì¡„½pþô¥[çL¿Øqñ^sx´ÅÁ­‡¢§?€Lùé­®Q~Þ¼ôi­²Ðã/iíÐþ§´¡!(t¢{z(Õ‹/ Àv,âË;atϸÓ`óÞ„é0m:ŒÃv<–t4³‚âG X¶b;ÌàÞ«ð,6;É­ÎÐUè; o²e¨ÜÛ E{pAµîÕ«IÌ,FMÀR<ðûqÐë걦 }7Hbd$ç•$çð>]TE6B6º^À®Þ0kÖ|Çk_Óuûèή0¸uÌFT¿‰í§:Öß´s§÷æä5ðíãÂ#¸%X-‘v@ØHÜ‘¼q*/ãžt½–»sØÜ@¹ŽŽDaû ÉánJêSv{s½é$ ³Ö™—LÔÃ’6Ò£ ƒì÷~„~\ZHIDbÊÕÅ5ê¸C3?^µÜ™—~±Ï8îE÷œô.n³7=A!]dHP0Šá±¸k÷è…†‘3Ša.hYF¿vïÃÀ2^³÷A¦ˆìíPø˜eöS ]è¾¡ƒ8ŸÎFúNB.š‘!Ë Èü°}ÊT7Ôÿðu¡p—]ßa½ÄP!;ÉJÎMã†"IŽßåùîËèÇ ^ú¤%髨uCvìð™´iMþ—Zº3?1†Œðã$r$«Ë+Nk*àZÎKŸ£_1«ñÍb:˦êàˆN´_ÕVkÁƒ­¬@õuÁêk â!:[lØ´1ös‚10ñ«_î4^ ÅT ŸWݵeà;÷žïÃyÕm+ÚJöl5Åxù¯Øúž;TW¡æÃP„1ÒÆð\Õý¢’>…$C&Ð,Ì4ÅϼG÷ mÖ4#SV‘@?Ä»èñW=oÞº{¸½i¼|TáÑœÊiuB(bæ SU¼YŠèÁ‹œÐe•ÙÜfg¬Â®Ø \ÿúVÒµXU‡Bfz‚¾¦Q2B”^8\f9ó·F8ÝÝ."á6ÍWƒJs“.®ê ¸‚w…{t'¶þÒC¼I§˜lç;ímà Bò÷yôx}ÈMNyÜݤezH"I8OZeVûMET‡SfÏûÍÆQ†ï<·u¢J!H,, y©3 NoÓµ4áÛX·-µÑ°:$øI·i]ª@ù{ ûÜ©ùzëDÅB¼XØlXAC¢‚¢ÒQ!dþ TÆ·ÐëñIK½³:öXÜÁ¨K»ò½P©‡ø°]ÉÛ0·3‚QÑ.pÒ²ÅØ×CQdÏÇ5¨Ž¿ˆÛ3C3b »(#¯TWõ0­0£œI£³\špï̤‚èRT€šPFqf9Ӯɺg:ë³ú-íàDR.ôj`ËýŽ«ö#F[N¶(C1[²2’xð“\_{Ü2 -ð×È+‹´yMiÊMKS§¦íAyˆùü”ß"^ÚŒïÂÖ²ä’ã NÞ„Úršë™xÂpïIˆD ßå튒ãä¾ïÓ–o›`T&ß“ËöV1ÔÂ,LgËO A "™jÈQ7`O\f eå’¢îv4ƘXCÃïì==OÏèž1a`G?Þð&ΰì‘·ïI«å•™;³UéL77•å-† c†±–èšn¸tÁ¡ÕðóGÏ}nYÝ¥Î7Ý›<@C<ÄÓ-·™~ ½  G~ü·0 ° ë®´Þ¦5¶uÕ’@–=M=†¨/[ÿ^Ùt 'L£®â¤-[’?Ô¬ÃóÒ˜È5ÊÛÁë-nJþ& L…$ÒC ©¥%¤–ZÍÎ<Ãð¦ÅÓ“`ÕÝ# à sE„“™"1´òH°O':üàÔBì»#%*ÙÏNE§–%j’ËÞdÛª×WW= Ãñ«Ì¤D*;¤”ÍŠ•8{*cT(-+íaª¢JeAÑQÁ[÷mkyvló3¹=wS!íbaˆXmJ‘'(#’¹”ØÈMó3cáµïŽkAC5Ÿ¢D>Í«LQ‰˜JMIíÍÈÏ[‰‡MÅVßkà÷ýTÒE¸#ôP§]k…*‚ÙÂ%ç™@j4¡y*(³*eH˜5ÆójPß¡ø†wæ^7|y!8M’¿ù2vÎŒÍ Ýã‘•œT\€ö ÂÜÜÂŒìo »à4“ÖL¿öà Iƒ¦ ƒkHoß©H 6¹E;;Þ+V£NË@%̾0Éá'´OR’¿YÒÓð«!]‚¯à–ðÕ.mç]ŽÊ8a]žÔ¨ˆ9oRA¸\ÒÍW;úI7@ÿ8Áz"ÅΜí…ì×°ýZÚRëã^¼e(ï”sÆýFpÚŠÕhkðµX$Yã:s΢•×Û¼våÚå–5Î&œ>i…póÕ 6noLFb>4ëo†Öóô›Y‹ÓhXÞ±kW±ªÙÕdìç…ì*Iψ·Ìû` ªKÔ »oèDgõPI9Ö°š2Ôô²Ÿð`Üwü"^û? ,Ø6?ª2@+*iWd2öñÚÄ@4Í;!ÿ–I»ÈÞ¼pö">E–7’é±­z¨!³zÁy…k±v ‡ñ!M8Áòò8ÄawœËNE2M܉¸“è:‹ÚÐÕüc{.ÔÜC·Qcb‘sÁ&²Ò ä„ů_>õ ‘º^®ú@ ÙÅÑ߬´ú7Ów>CuÜμíêŠbíÅ÷‘üý9›§qëÆ˜äÇsÜ6Ú¬uVšZçè.•¹\»t"ÂRJÂö 4X4`n>zòdþ#,áq¨™üíP˜Il`M“ùÓ‚Ââ7úÇÃKÖ­Û²d<É…E¢.Âüsg/Âæ‹Öµç# ²âÓó«nØH,b×nm>}îÈ‘SgmZ·ÚÍÍ…ŸìÅf(÷‡¶ æñÍíu‰µÑZ^SP™¹'-;¨"y7bJ* j+"ëׯoPyoæåù^e›3qѼ[J|Š"y)¦âå‰aþCW¡,/n¾+úÅ3Ó_®€7à—mß4Eø¸†‹ÍÙZ>1‹é$‹’ÒcP Š+Œ/T©Q:ÊL/ÌÜÍ@_|‰3ïð‘Ò¼†5_Uœ‡n"¦,Ñt·y[Æt1Hgõˆ:Õ’X˜ ÇXƒ ^j0À¬x Í+þ‹Ïþ¸lºvë»l5‘Šú­z5©\9L"”ƒ~|ä£\WMâÒàìËþ³©L‡2ßν‡ûpxñÿO%úÐÒwˆ’­C åá( mEŽÈ¿Ì·Ä_+߇˜K•ÿÜß@.Q¯þ*>I¹#:;} å&Vpqµ¡E~tï>³gKÈþž@›0„5: áf`´Äm¸ŸžŽ›ð4œ–>”Ú°òÀÀ°°ÀÀò0­¶¼\Ëu—žõ©+P|ÅãŠaò¨aµ)_G iŒË–fnÈ¬ÙÆm­ÙyHUª*Q•úv™ lÛé6ʳ ò¸þ:ùeâ6˜Û/{ŠÛûÊù ”‘>·/ÝZàшšíN¶¿}Š&úesaù)ên÷`kt<©ÑþIœ{ãÚWâÄófÏXzÌáëâx2UOC w1=m”âx;Øt†â¥`ÑŠ)tO•ÂàŽ‰4ŠFÁ^A~Df ¸ô˜tÂøé(‡Ñ†–„‡†EÌý „QŸ€ÕeNúDøåµÑ1Rlµìô`Û-¬m_ã ‡[ÈS 9 v 3/>ÿß,¢‘‘úFêÕjÐW¹, ‰e£™ÇLkÐxž` / :šX½¥C±Á̇˜9Ñ wé¯n‘ê{×xÑÕš>¦éýŒ5 ¾º­‡xCg$Ç«ënWd}ÖŒ¬ð÷ô›..ëOýÎÁ÷uÉùºQ?ÓxÀƒwŸ={ðpÿîJ>ûk®:Kw0±#¬Ùó=3NlØa’Æ žJ…ãþ:u°aÝQÆe5¨‘Ÿƒ/Ä®‹œ›¼Å œw–$6ìlÿ6ò·Ž’ÝŨÙÕ 2”×ÔS`Î:ëÓ­Ð×ðm¸3 £Xe]ŠBŒ¯éã¸MâÞE'‘ Gž<|(‘¶´l>XQíÈa…$²›snÿeÐÿ’J´#ÐSüÝc·r1ûK}#½ë‹ü!aLg,€Øöe:Q¾  $ùÝÂt¶#ÑnøÛ¤É.ª@”Ë›`ƒ*;±"%ˆƒÚ„7¥:u†²TeÉg\„7Íeæl£L×¢<>¾Ñ¡Îòª$^É.ˆŸÝꌜ3›y€8©L×38*M™#«F9ä}%Êj4ìq¦îÕ’qÁÓÊWýXS6hŸäD_þÏe¿ì„}8˜ez«†\ßÕÀ×`ŽZ×` ’…átA#jlôBq<#èÔèå…¼â8<—Ä‘½QOÄð:y5’Áàa^%xÆ6Áèe— W¦lŒÓ&yÜ΄]Q©(‚‰)Š.ixqzé¶ê¶-Ôß×£V¶7;'-=‡3DA {öÕ[]Ðu0+µ ŽÀÌžŒ€|*¯‰Z…Iba9‚iÿ×éžý`ƒDdoƒÄp§mò¯öõùùí¨ nØ_U_oØÀ„îI&üÇ9fj^…ªEç… bÁõÕ6§8 ¡\¦4¦$",9&6…Ã_ÿ¹$Y®B(É.ª(Z]žS´'»‡öE'ôð)IS€Aq­6¸¶Óy»â¶^`ñ †«Q]8B £õ?ߺyím<uÄ}>èr«ýu/HÒRI¨.wÝðF¦ÄÅ¥r3\â#•rU¸2²ÒÔ¢²ƒÙûpö“$¦–™i¤®ëTÂI§nC­^üÙy61)%)ƒMåà±1ÆÕ]<2Éô…“?Þ>|àRë±Ëè+¬F߯ýqŸÙ³fìÐÆ—h*J´©{s¸¢cÍõgsï‹ÍSoXåàl¼œ†ô‹†‹õ#í+Û!H¿t•§‚°­ŸhÏ¡3Ì­Ÿa ¶Z¼q‰O•Bm˜¬0e÷® ®¾þºÁæ_=áéêãÆûË”žÊÕ;c `¤¿Á£>¹tÕŠ5Ë'm›“ßÌgeåÁTFªe¡a1¾Sž®"”oóÍã'œô)º¾é‚C¡€„8°r¼bý~±‘~#8Ÿ³Å1Åòø”Ô”dÎ?0X® Uœ”[»'Bã‚P <4:"& E1ò"yq~vVvW§­..D»‰”Ø·'V¾Õ mqE‰º¨¶•v±¥z«Eµ×@Ö*Þ»ØÄXâ&#Ÿ¼äz²0åd¦k–š’äÔÓÓð’Þ›œëÍÒ!(UÁà÷;dð¿t¹õ>p:˜N„Fû:=¨H‡:ÁŽõBnÉîa ¬†d“óÀåF¢Àe’(ÚPš™µ;—«i8Xzžá÷Ž.p|ÑÊ÷—åž $ÍÍ!­ -ñOÜ>óþb°€A??1'ýýùœŸˆV3f¹© f†ÿí.>ß±…ðÖ÷÷¢}¨8>+•|¥†”FTW©KêZ<ÏŒ#¥0÷îœô%N‹?ƒè§û`ôÔvÜ‹“ .È%ÚÕ7ñAÖmo‰Ð)³UÙÊl!U&"_iy°¡ÊQ–åÑvñÄçwO/?ÝaõÂÎmi„&èˆþ©Á~„Ëñ|¼Ü!Ü‹Q ŸÆ’’üZ. š`",Ð]P—¦§£BÆ!RÕtJq”áÚÈ…Eò[»TGY—¢ƒtl&Ö;þr`¸}| EÐÁ.p:Úzªõø—wÎmtYíä¶€ÿÒýªæh :ÇÜ›q÷ÅVïͶ_}lý—Áœô—yáË>š7dÜ“©0¬?|zÛñÂ| 77°3Zõºó—¾{|nùŠ%kfvÞ]hý‹Š½Hýpïñ¿¨;ÿ¢„ËlF³wn,ŠDñn{°›mZLF¸U¡¬Ò´CÒ·ü÷ÆÃÌjƒ&’9½K;ÄÞ1TçH¨fðÀÆŽ' Í3-¼83++›˜ùÊ(u°,"Úß·tq(¶¤´Á’:HÁbý,=“ä’~8HêžžB§JŽdðÈLÖ~Ýg·\o½qXQÌWû|L<ŒR¥D©(5SŽ絫grFÂi gm|ÁâuLÞªÜÔF2ÿH³ù9{rQSY OÜ¥JTpøÞ–˜›„"íPlnüîdø{Ø&f¨Ç—T”‘ßæðø8X£rSƒu"­>»õ~+Ø¶Š…÷ˆ¡­7Uå»Ý}©„Zä3=h]¬ÆýRÄ™ç/ÑTÁèÂ?_´~­ë˜-9k4˹¤¤UË”ªÈ¹ÚÌÌm^øù©ƒ ûë¹âÈÓ’ûgíÕã{ƒ‘2XœÊït‹Œ•£ÈãXb¦Q}lwƒ—Ã86æ”ê4i<ç««Ë÷¥/M ˆò\¿†‹ÀlŽ¼Ì§0´|{VbIÈŽuž© ĸ&\ª9ž_Ð|ª8.×î]íœÅ‚·)x"$;HŸÛkèí/ñ¶°‰¼ñ1tÀ?è8ò¦‰h'2Á,Í«Òzæ.úײ¨(93ª¼¿ìX gû‡!ßççd¢"&7U€·`[œ‡û›Ü¸ž;½àöšJÄTg©s;;Ù³ûøUgÝÖ~ìÙä¯~ëä©¢æâsONÖ7Õ¢SL“ìvÂompXã[£1ö—Ý©Ù\•¶9çSļøÔ×/8,.2Ç£ÞGSÐ6f;Š=P·;{ñ»ñöuF£ëŸú.°ún~äubÝÍy­!ˆ‘¥D$Åä'gíâa&áéÄDŽÙy> stream xœ½}[³]Çmæ;'?âTž6SæÉêÕ½.ÄS5S•¹%SãØšÊTÉ~8¦$Ê6Ž,J±ä_?ø>\½¹E;ž)>ð  ïh4ë÷wË}¹[ðÏþýøb¹{óâ÷/ ±wößëÇ»ÿüÉ‹¿ýù±Üõû¾¯ûÝ'_¼Ðå®ôåþÔ±÷½nwŸ<¾øôòŸÞ¾ž½”¶]>ùj¹¯½·V/ß}ž{o—‡¯^½.½\>ɾ,µ_¾ìzö²—§w/_ÕZïÛR/ÿ” *É~”ãòð-ðËq®ÛåIÎÓUàl}¿<}7$ýDYžW.÷ÛY׺‹‚üýèÇå(u¥_Áyí§ ŸPl-Û¾]Èk½?Ë–Õûîf]¾UÚ~ùoOÄö¶ìýòÕ ýÍPçwÀý~¿úäH»·3µ{]×ûr4iûO>{q)õå'¿’"-?h^µµÞ½ªE:¦Ð}zù/_mR²Ÿ«´v¹_–¾KSÆ_ßÞÀýò¾‹¿¾Ž¿~}s£ðÛeùò%*TÊ}ß6ÑëŸ_|ò7“f·}å?—Ë?òÕ@þeTûôF‰§øëñ×/â¯ïâ¯Ç[U½ÜüëþÅ95Xü¹Á}üµÝø+ÿi`:k ýŠýúQéý.þú<þúìGºúc9þæÆ¯onµø‡þdHíÓný|«2ßÜà8~}›[9þüë¥Ç ù"5ÔûctÌà¿þ°Š_ߨô77Õy{£EßÝ8aŒ„ZÎûV»®QŸ¦Ò£“·d—ø}¹ñ—ŒÒuüüF½¹Ñøon4óÃöùò¦fŸÄï?¿þwüõ׫Ý+oŒWºîõ÷Öã?g¢¼¿nýÈÓöW?2Ëo¨óýŸ>{>VïWN ®Úøjåy¿ÿà 1cdý]üuÆ_û-†c–}ý#w{rÝh¼ÑÊ_Ü Ì[Ï­1øúËòáV›æÌûƒ}LçÒ²ÿþšòîîv7 ÞooTìæòñá!õÿIÌOnµóÄßßhžAwËø÷LÑUõÆøýíɳ\·Ü?~òâ_ÄXÜÄÈ+ÛÚ½XŠmß×û½Þ•ÚÖûøï›Ïïþõî«ÒõhÛÝÄâÛíî·rDø¯/Êyœ÷u½ký€­z÷˜1û½¬rbW9V4iÆûz<ï{€¯…¾·û£&‚ÞïÛ‘ÊL ,a˜½‰©{78ìbQ¯C„‚(`*øï®¡—ŸëðúÅ/^¬{ínƒµ¾R¯„êýnÝ[[_à&Ì›ÀÛr¿”€_K‰m“SA¢ùR£Á!`Ê@ Ǭ;j48¬²gô$Ca”p-œÂµts=X·m)÷m\ßîå¼ð80Mþ/ÛÀû=ÁMz]ÀBæ‹Ø­÷gM+Ù›”pÌÙï·õ.8ÔS:ï2 F U"\kg0×B{MÆÐiLö•½æ9ž6´ù¹cˆ³”0ÙûŠ¡d Ú³Ÿ÷k Öc©÷KÅVR 02ˆNr4G»_¶$AaˆpœÂUts%´be¿/hïµÊQ’ L¿—Ãݺ¯Ë}—¡Ñp¬W €C.†@Kõhâ0e°„a¤¯äð98”õþH"‚Þu°ß]E/>WÂæÙq¿ jÃÁY§™!d`¡ Ö ßMfÆÁYwʱ7`βÆ9Ûyl‰CÀÁ†‘¹SÑÎAæÊ^’ …9ËL §0%ÁT Ô«tévù»ÂkÇú10"mƒ¿¤ï÷}ÜïWa²,+´rX¤.Ëy¿m‰¢ÔûZƒÃ€UJf½?Ï»ÁAVœ’D|ýbè@DRšÅ¯+¡¶H]Ñ2ZmŠ9FÆkCãHï74§LÔ‚%I ßu ˜ã„#P¬ùƒƒÃ*ƒ%³r1û‰Ù7d(Œ®…S„ÞÆa®‡ÖMQ™Û&¿õκ9¦À¤CswyÛ¡3¢ ˜sF4Ù…þlÅ (w+‚ÒÈ·VP¶!¬kÁÚ`6‚ ŠP×8Ìêk•'·ìð+¸?f ÇľU-´IŸs“!Þ榴aú Ù&YCc % €cê®k®3¨¬‡…9#M §pÃ\ Ýź®>›-'k60'—l`zTlìñõ ˜cDÖî3Q`OÚƒ) ³÷ )Œ4ÍÚ“…Q•0ŠÐÑ8\Õ‚5+2Ø;¦©l-ûóÀtÞ"K¸´S;Kã^X*÷D‡Enimš ÀÚ”8H  WÄ!»öo/~ô £,Œ¦BP˜ŠÁa®„vÙ¹Üïh 1Ðv®ö麤m§ÈÅÐèºäm½ÀdsV þe`ØÀç`à J° BWÄ(~pðÿCWÄ¡€ý[ñ¹¶".°Söõ@C<„´ÐJ•ÄÌ<h 4²Q øµîå²­ ™Èµ%«ˆ×/¦è*ãäÿ3‰ ÈV0ìwWÙJO5Юª çì.ÿ³R±ÃxcËí­ÀÞYe—çŠl0š²íœÔA±._Á!àE͇@È⎩âåŪ(5IP\§pÁT«Vc÷îbúŸ]ëeY´vê4RöcƒÝ&R­å¦šÖ¸N!†¬¢àà°Ê` ÃÈ‹/8ÈF^×$CaÖÌ´p ×Ò9Ìõ`ÝZ߬&4|%F³ô³l,Ðfþ†ôë9[‚ ìª¸3pXE°À•/fƃ–ü$€ mzÓ@*ké«*Øie§}i=¬beÝEä‚5Mà+…*a.lgV… t±;‡)‚Q±ß °§Ö$Ba¶ƒ)ᡵq˜jAñûFe•Ñýk ö‚V¸³±ê¾Ã-‡˜G²<8,öÛÑybÒ~[M¦0„l¨b,²Þ˲8D(Œ®„S¸’Îaª… ÄÊ#Ð!$­è@4ŒaÑ–Ã>§ò!çÖM6‰³ÌÒiž…ìc8í9‡€U{À1Ýà vÕÙ’ …_¿ZÅÐ[9\ÕCG£ìÛ8œÐC·0ÇH›sG‡å¼ñ˜c¢ š¢É´0†F0pXE°€aóÁ@–# r×À~w½ô\í21-q:GoS9i ±Ë*Z gq´”lT¥Œ¶lð”%ŠvÜ«w@¹“Ü0¢;¬‡(¾W8@†…QÂUp ×Ï9Ì5ÐZÁ0®7ˆ†9±ãÉ‚ÖdZâÄzb×Â’à ;¾% 9¼ÊŠ<8l2PÂ1b#lXÔŒÃqVf&Ã`”0-‚Âõvs=X·*g³ƒ@vS=«FÚ‡*¦Î†ºI[o;¸T5)¶³‘ƒ@Ö*1r‚AÀ*‚áuóiüV†_¿:ÁÐZ\Õœ8r¶ƒq+ ¥ªÇ14¿æiçT€Ò,ûî }2ôÖøï‡ìBkÅL,`Qs­wƒÁJë}HP˜NSÁ)\EãpU í2ÙÔ1„ÏBšÇ„YIµVxó¬O€ÇsÑœ2eeÑßá.L¥ Tö wâîFi©­Ôf°W\¾S„ÆÆa®ÖJÎD‡­6Mkå1sÐUØ6.8NZ®§µªÿR“…˜ yA‡•5É #ÿoTȊçµ'î £„ w WÎ9Ìê³J+¶SY!» =€ ÌÎåMFüžr|(Üî×cgÓ,rW91l{¢8j^ìà˜SöhøãœÃ){4V8—a0J˜AaZ‡¹¬Û"-tpnÈçº10;mÍEZŠ^'ÙŽv€j9 /“˜¥& Œ£’L,aé.˜ÈÁa%~ÈP%L 'pÁ\ í5LÔ^ŒÕ•«ýÀô­˜6=rêÃ)X†¹ÃhQmOb ¬5q˜2PÂ18  ÍN5ÉP%\ §p-Ã\­ÛQ0ZÛ!M®ÎŽ9p"åè1ÐŽn#ŽÙæø:ÓÏXx[*p±‰éÕ…Åq–;w…9M¾S¸~^|®::dwn!Qì±9u–,z+Ð¥•áB^—ƒöªÁ+[AQx¹28¼ÚÐ Œ|´·sv¯5ÉP%\ £-ÃU=´n0Áeeê°²¸,v¸`v®€7,æå(4½„‡å8h'øïXj*î° @ÇÈlÃj w¯!Aa”pœÂUts%Ì;ÅÅFh¤Õ9eˆ[žÀ\}–-’¾£EÅ*Lß×ÀAѸFS½YŽXé •fä¡0J¸NZ‡©º2öUçbÔh¦llò¥³¿î<±Ëz€ºpÐ ‚ÂA V(˜Š±3€¢ ĺh*8ÁPšå¯ê õ’*ÃI‰§Ñ—˜‚ ^wz5{ÑEnÙV:¹ †Té¨ÖÅ®‹ž3pP% @`ž¸v&œ¸]„ÁÜ"T‰ ­Ã\ =Ao¼¸ÝÊÉ[ÈÇ„Áú±è![”Àr:xòè£àk=rï}ü~ðfywø´‹Ï„¡Ëi0Å»îI‚Â(á*8E(mæJèíѦo­;}sÂÞ}WríZ`õLŒ»£Þ‰à8y ^uh Låþ *~HP\§pÁ\ ­Ùª·ç¹£…¦ÃX/½ê}êÙi •®‡s‡!‹Õ‘(ôx T ,à˜n¶Áà y4D(Œ®„S„ÖÆa®kvîÍÖʆ)ø˜0»öô¹«+…q;`ø SPgŒÿ~n¶à[q‡U ¦qv ²ÊÂ?$(Œ®‚S¸ŠÎa®„VL/Z‘¾“ ŒnEË­aîT² m4©}'+8íѶße|Ðôõâû.30º ºO ¾“ œÂUts%¬b—†8n”¢s̆ý„\ºjø’DnSÓMajº¨yçŽÍÁÀAJ ½"ps\îFqܯÔ$@a«Up WÑ9Ì•Ðu±ê™§«Sò1cè¸ئæJ¯J˫Ԁ±l5=iŦ«3ppQË9p“œw£8¯‚ wìw×Ï‹Ï50k±£H£²«µh¾=€åÖÐV^ÎpÇ’å0m?l…˜­%Ÿîÿ Ì¡wÕÁáлêq¸ÿ'´0ŠÐÒ8\ÕÃz¬j/â^{Õs ù³Á`ƒò¤ zVÙ_V¬ÿ.¸¥Ò6wë÷æØ„¼4Ì‘-±WØ:ŒòÂõss X«}£í¿!`¡s ŒLéæ]c6è Ý`Æ|9,b÷cå@PˆMKž1pP%°€ape|Ü 7&C„Â(áJ8…ëèæZXÍóéÑC÷˜0ê£+Ý¢óXz»l[hL¹æõs 1Ïé°v« ÖÍ1Œa0:C> u­à¬>§×)¶î4rðˆ90ꄟ ›Ã"\duø¤óÅ`ú|hª ¨i.½˜]É‚jÓ“ …Qµp ×Ò9Ìõ`wá–½²1º:=÷½2|w \Û°,èt¸dŒ-«Þð8…ÈÙÖÄÁa•ÁŽÁÒ8ì\‡…QÀ•0Š¡6\ÕB÷0éAYžÎ9 G`ožÊÙ+=‘²Å8ÑH`çÈRu¬ö[—•,•s°ZøÉ@ð–v”]yM;X+Ì8M“ì¡«r¸Ò]'–ì/Øîvåö80ä‰^¸ì=y÷u ƒbD#"n¦ßq“p¦â›p ÷¨`°«z!Á`”0‚"”6s%¬b×Ç~ «˜c:fªp]9ôw®sëAC`·eÑ»ÿ†øÑž \ÕÕ30Lc•¬xe”Éà^-êdw WÎ9ÌêÛ¬ªô#Ú`ÙuV9†‡8O½Æ^å‚`sÈóqP ^e0pQßc ¤71H½8¢A0†]€Á¯_„ N*‡«JÄÝ BY6Æ<&ÄNßn5VúÚÔŠkeÀ¼Z9éð ø˜Úà0àÅüXË«ãPÕ2ª»HC £p%Á\ =|¼‰’)¾£äcÂÈy¼0°j(ô¦S¶w^9Ì DÞ;F`s9‡« –p 5Ä>’‚ J4ì÷PšÅ¯+¡« B¨ œÌMCÐn<#¸Uá+*¸ Á|2èÅa£÷2(ö¸%«®lŽi¸ÿàÇÞ“ …Qµp ÕØŠ'õ9å ‘²H#7^@FÎܸÿ>àyÙ3žIà“;ƒq§µ¶ô3üå5wXÜ1 C\µxÛöܹËwŠÐØŠÏ5Ð^’…NãO¦ÀH{ÁÔÂqˆ{ÈéǧÝÞlzQp¥·‰¢IË'ªÒ+BνX:£8âf‚vœëuüîúyñ¹ ›óÂywXØM`Õˆ¨™ƒ¡î žFÕœt8;̨³ÎûÆ pM T ‚3ĪA7^iâOñ¦€ý[ñ¹ÑW¸_ù|x_)†cgr\á¯2ñµ¯Ù fÛëÒM—âàðêoA£!bƒdõ$Cas-P §p-Ã\=H«Ãûß]%ƒÀg¸Ö ‹ÆÅ8¼²pó nÛ¾j`µStgƒƒ*áõ‹„)ŒÕ ¢=í.BazóM §­Ã\ ݸd6bxãtÔéêzªÖ*ó³Â…ëì2?Ë0/‹n:Å®s$8L,aY/ñÎ%8l~ïl2æ=¶iᮥs˜ë¡›Weȥƪs™ŒŒõëUØ‘ W?Šz×-`l-ÂÙÅÖ91‚ƒÃ*ƒ%³ª³Ñ9¬<Ñ «EU -œ"ô^á™ê¡ë=ÂúÐçfg¯Àtz¾Vx‘ºßé¸ó:Í`¬Èº& Š“¡vƒƒÃ*%sÐw68ô•  £„kᮥs˜ë¡®®wH+W8ÕÂåñ¶L ¦º]úÅÖ4bÜ9lOy¦éüàÐ:d4{h3´p ×Ò9Ìõкaá—rê­óÀ¬ËÂ.ãƸò ]øbë rñªëLXÞ÷ÄÁa•ÁŽá“ÁaaÄÊ¡0J¸Nz‡¹‹¾1 ÿÀFú80'n,µØÃLDŠÜh±è}¿Ëp:Ri½¢Á>ŽÒ‡;û#"Œ]~P„ÆÆa®…ñ:ŽeŽMCŒ £Û¨À»½O8‚‚¨ŸºÌ8 .·ƒwõ5qpXe0ÖÈ1‹ž¯œÚýL2F ×Â)Boã0×C·5D}Ф¬úÐ4axm%FC‰Ûè'ì'½ óÄT°Z ŠNÅàpÕWJ#烃/½Œ=;.@è!Ÿ¿¹r^pVßqvíôÂï1cø²n]Å̦Ó|eÀÄ kíQ5+æÕ hvÖu»ë10‹¾. Ý€!bqÏcèண3˜ka!ÛzµÀE«!ÛŠ(Ë¢^ðC/®Šßc N¦ŒFŽŠˆõ „Ù”ÄÁ`ÁÉxrråÄŠ®C‚Á¼¢V‚•6Sì)Õ¡ïü0ô˜û_ÑÀíKeÜ&:Á0ØÞRm[¢@”MM^Õå<0§FÄ9‡“`MÀyêÓ7“ÏßB9+x¥¾y]åËÁåä1cØ<6Kc³p‰âeê:`žò ±‚¡ 5qxÑÅm`vÝ‚¢Ø$CaúL §p-Ã\]äaá™m¨Ï¨†á¶+Æ4=¯‹÷eL#|Ë`;¿Ó{ëx£»'{H˜Ó}ʾŽ2D(ȘZÕÁ~­øU%Ô >ª5†?V†ê |Þ¯êÆ8aož4Y¦MÛiÌEozà ö׊i;_U‡FņŒ-aJ8ëè æZè劬><&|jü˜1~4D2v¶_c\*‚)`¬0.>3³&Š §ùÁÀAJ ½"ö“êQ|×ÇŽ!`÷ç¡‚S¸ŠÎa®„Î3œØ0>õùcÂÔ†¥»àõ2‚ãÉò}Ó…9ŠúŠœó«&« –p _h j?d(Œ®…S„ÞÆa®‡®öxžÌÜ ÕŒàáɨàñ#¸" ¹rñVw´Á<¹l–ãA)I]‡W3‚#óÙ)œƒ,åL2æaZ8…ki®êaO#:MóT>fÌ¡±ü…/*›ù:e?]4FWá×úD¤´DQÍcá.ö8:0=«ƒÃQ’€CŸQ¸|þæÊyÁY}­’Þü6\mŸ´}³k¤mj=‘h¤­°ã&cðk}¨Äó‚StM Vöˆˆ˜ÖïFñÆË…!@a» Ná*²ø\ÛÈôeûŠ‚»'XQÌÁ-ÙÕÅÕWÚú4ÁaK±²¯‰¢3(fp¸Ø-z`Ž]cœÃ¡ËB†Ât¢›NáZ:‡¹æèº=L{ò˜1;¬†‚°Fn¨gQ—áÂ|Ûé6¶qž—{º®CÀ‹®£ƒ-v»v&W2æ$6-Œ"´4WõШKÄÝ!ò³`›{L¾lðÐÅöO¡<ÌJ&›³M SJ8KîbœƒØ–GM2f`§iᦤ3˜*¡ãqÑëΠ¹Ç„QSE` ýYaÚ¬¡„Ë`ëºqbåúààðnOõ†®êÁa;uùv ³ÏL §½Ã\Ôí_ §º.f Òµíwµž|¾Î`H¶N†t Õf¤QâÃ%ºóVC}f¢@T>@! îpÙ7ˆ3¨Vøª°`yIT ¯K¦ L¢ºÒ4ë>¨>”v…~¿÷.žÓÂîØoÅʇXŒh¦…}µÜ·—¯*†ëÚ­H‹º#Ìæ"3L¤h‘Ÿôõ¸ Pd¯ŠœUf =†•Ú¡ yƒjÕía2ÕVpo¿'*ÇLT>ÿš© 3QmGGpúÝ´L$Iïld4ð+´tA‚Môr Mjåµã•NóVÆs•­X+ãy߬•ñ˜~©Ï´ò3™ÁnOš²‹MOwú4iþŒÑ³2 @7lˆ6z¾y‰«úUt}øêÓŸM::š¢â*K.%úTdÛXdýÕ¨!Ô—FA¦©?#ýYÝÄYFr²ÇŒaÝ+îÞö=Ò™Ud8Gz³×RŒ3S0%Èà°¥'˜ýÀwpЄfCÆnkC §p-Ã\¬† þ.4d¤@K>oh0j}c¬Ê¨Ÿ¿–­Û”1 ³²ƒAÀ–l`dÁÇ}Yp(΂¡0J˜Nà::ƒ¹¬.Û˜VÍ ¦!JHJ•δ)Ìd#t]ÔÄ6XÄâÕPk‰b¥ýŽôd£'àà€¼_¸†qK ×Â)BoãpUí5éÁ¶¦hÓéȘWŸŒ8%õPMÚWÓÝ(º¾i {†²9‘88à[Aô®ƒýî*zñ¹Z1ä’éÉFN8—Ò½Ê<^Ì:¡«Ñ0ÄÊ Ø{¢¨Ì9888ìÊF6Ž.cPhé Å,¿¡„S¸’Æ`®…O´”ÍÁfIcâs&¶Zy̨¸èhsŽ‘rPlüpØ.M7•œcÆ—! ZýM>£f^*ijÔ]Ž™G¹)Ïfã#éÊ}ÁÖfÝ-q£Á²Ná–¤™¢ëƒªà`°g$K†!oC†Â,aZ8Eèmæz°‹VœòJÊz60217«Ì^Ž„Î0%L6é0G†¾ß ŠU=·ÁÁaÏH–0|`38ìºþ‡ …Qµ0Š¡·r¸ª‡¿¢Kjd=±ª¹LîÛ†XxÀ+ƒÇ ´Y‚Ã]T]˜‚Þ™,a =Î`¶3DÈÖS‚ ´6s-´fˆYÏÉϦÓùÅïÈÆµóÞº"Ñ0çJ×QN¡ŽÁ `sö ÷¥Á`eb˜!bµÔ1¡ƒ¸ŠV~®ƒŽFY÷ñx"RŸ% Ó¥4œôõxÐ!-p×R˜#Eo œ¢‡[ˆèÀl epK›DÄ~ç:Øï®¢¿ª—+w˜ãjÑøj§ÐG¦Á!`OO60…ayƒ %ÉP˜ aZEhi®ê¡u+|))Ðâd˜ÂªÑ|P&ûyÕêfÌÚŠ|ek¢h¼ÅÖôd±®ºx;ƒÂìNCD±üO¦ÿìêyñI­6”‘šì1a¾xødœÌ~ô{ŠÜ”j uÜ4 ›ý.«‡¬wØó’ ÌÆ;ïÁ©Œ{’ 0+e*8…«èæJ˜E¿j®8Ï}60bB®´•õ…vp! ¯r&5 ØçKöäd#§(ŒÜà€°µ$‚ ìy×Á~w­øU%´b]R{ö³Xù¸DdòuèvT¾˜yM¦–GúYìΞJhYɼÐð×¢‹}çÅ^“ÙF1”Õâ³ò\ក¶‘ôl vÎþÚ}[w¬2ïlDƒeñ…“ƒ—âÔ4^ÁÁaËH6Á:ƒR¾d £„+ᮤs˜ja#pѺzÒ³Ùh‘|Ðï|ìlªtlIÍçÇßV1e˜mÎ ìyȆ¡[£xå•ÿà^-(`wŠPW9\©¯«EåýÑÈu60;ßX5Fòâ™4“,ÈÁ·’sKY-׉Qà–`M ö\dÓ˜ù à]J’@ë‘j`?»~Vx®€öSÑwŠ‘él`NBfæ‘Ã;¯ ¸3G†ÁhKä‘ÚEk–ûL8è™ÈfgÆÈÁŽÍ$aÓ\CûÝôâsX­E_òTg ‚í{±17#\…@ÁAÕà×ÚxµöÌÉ98iÈÑ×»àÀòsÈ0XJ¸Nz‡«zhÝŽS“´yª³A@Ârò!Ȇ3cݵ¢ë0ÔÄ[Ÿ>(Jaša-€§ Y${»Åqép& £V®‚S¸~Æáªz89›æìõ4g Ã[xyšå<â GóÆ; ;H&õY2Åiù½ƒ–ƒ,alƒ¦Y"<ÓP"(\Gç0ÕBûkÝìi°å9˜•ù=ÛR=íÓê6„R0%¡ÂhQÍ-5(6ÍÎÁaÏD60 3+\ÎÁÄs£„i®¥s˜ë¡uC†»~7² \ÒBK)–3€ç… c݆\œZ¢À[øš88ì)ɦ2«Íà°2ZgÈP%\ §p-Ã\î͸)X”öl` ß WDÃÃôê…A]µïúhÈ`Ù9‘D¤ÖDqj\uppØS’@R‡à€v´$Ã`”0-‚´ s=X·ƒóFÖ³àÛ”ŠçO'ÝH|”_q)Çð¤fô+vN†Åf°qØR’ Œüb·•öÝQÍÞZ8…)é ¦JhŸ-j»DÒ³‘Ý çÝ æEçTûÚô©·°áÚ«&ýжj8‡=!ÙÀ<HΙD½ë`¿»Š^|®„VìðGR–ñ,aèW ÌÎOÑÖUÐ`ƆÐDÁëàà°å$›0®ÁAº*‹(Ü©“öûPZ‹O•PÇY4I‹'=<‚7hÌUÂØzZÄÁk! N¼L̇eö„d³Òö¤gè±p ÃÇáZ8…ki®ê¡u“Ãêe©bfgäcÅÇ,Bšr¿bD0Oˆ'å—-©j./§8õ¸öL.ÓèH*ß~  £„kᮥs˜ë¡uCÀ@$%{Lˆ‘|qÂl?öŠ<”ª°ù™Ö5Qha0pÐR’ „š±£üÊýjHXmG:8…«è¦:èÂØ‹E€XÖ³Ñw!õÐ4È Ì1k`š1ÒSa,[}·4gNqèB ö¬d Ãç僃ú‡ŒÅž7 -‚ÂõvS=lÑ_iGGæ³)ôÁTä™á¬ K«òít bõePÈ¡ó¬‰Ãž–,0™ZAap€ñë2 æ>A%‚Àt s-ô5e>ÄÓÊ€ÏßËl¶âT¼óNØar‘ ¥$Šƒoh‡=1YÂ,Ô,8û d(Œ®…S„ÞÆa®‡Þ#a°¦Ügؘp¢îí°ôRŒà¨»>Ñs7<‡šzAq[@ƒÃž˜l`ê¢%œÃÊ´NCÆj‰Ÿ†NaJ:ƒ©Z¯…y^Fê³é|ŽXÏœH¼L­pÜáte0…jêè ØÖû\Þ@ÏJÙ¶N[q„ÇÂmí FÕ \Ag0WÕÂgêHKö˜0Âe—í¸g±{b'ÙvÝî fx©٠œBÎ%Û‘88ìiÉÖ×q78ègI† …Qµp ×Ò9ÌõÐiÖ5YÐØÏ³3]aÝø^pëmXñU3Ä C¬,ÅL4éõ´±i Ží,0¸Ÿ¼ *ƒ<†…QÀt0‚PQË_ÕÁêµE¢eFmŒœI¢Ï´˜¤¶S*LOY/ýÎYÔÃÒÞ‡=/ÙÀÀÿNv+Wì±ú&:Aèh ®j¡5+ÍN$–úl`Vn7ïlÞʋÊX€eÀX´0Á¦ñ¡ÁÀaÏMQ\9•ìg\Z»ƒQÀt Ó1̵0K_ÝÒ‘þ,a6l„áã,³“ð٦ݸê#© Ð7ƒƒÃ~êɘm¿KLõ$£ùÙ ´®·r˜êa½¶kzPO60Hg†NXu#aF€ªmzhÆ$N°1¢i0pØó“ âp¬×š} «©‹P½f:8ëè æZ°fÕò[E ´Ù™â¦"xp?m•>¨Û©^8…EnÝ;/r‚â8í´eöeÓtI8ýoI†Â(áZ8…kéæzXÝï«" ÚÀt¦ç®•~X:T°ÈÀ®³j–=Û)Mœ öDes0ÅQp8Þ… ƒY5*®£3˜kA/ÏfÀÈ…–0\ÆÞØ×ËÉŒ†È쌿–+cÅiyÉŒCÀö"q`ä|PîƒU‚ÜU°ß]C+=W¶ê&G´éêžÁ‹€Ó²Öa_Õ%à0Zsч4N¸»¥ {’²Ùy98ˆMÕ’‚B:Øï®¢¿ª„Ú §ÝE´i Æ­Ú U³›Áå°u^ÁˆÍ¦k¤ë Øa {²²„aÖêÁA¿T"ªùý†NZƒ¹Úeê›ùЃüf0(ªØ‚¼=Ð×>µê‡F¬Œõˆ”iä`p¤+˜“Ÿ– ¸õåŠc2 æ°P-œ"ô6WõÐõcYæ”hƒ°c) ¿Å{VŽ$Þ +LE>f ŠÊ*=kÙÀÀ÷{7Êë†<$(lã}É®¢1˜ë`ÓLÃ6#/ÚÀèeˆó¡¶0)ô`®°d¤Ôƒ³× ñPƒ€=kY`6ð ­á""x#tÓÑ\ÕÂBü˜6u$K†ßVô7ߎo´ lÇÐd…~§>´A±ó²88ìÕ¡ÿcp°©2æqδpŠÐÒ8ÌõÐ5doòïiÄF±Z"ÈÍ´"mÒ¶lǘ3œ–ùÛË;왾†ö^0h¼ÿFSÁ Bg+?×{>´¥TbØõ’žŸp¶çPø&WK X6X£škÌ)Û˜ŒƒÃžë+aVî}ÁywŽ$Ca”p-œÂµ6S%´¿J×å׳‰ ÌÉRU­#<þÐÇ O$•—WA€…·%{º¯„áº18ì¢!BA“L'­Á\ ½w?4¾+2Š%Œ&^‘tU¿2r !rk÷î‡f ~A,1pøðX–Àà09ÈÈžå$½©®¢•Ÿê =†‡w9§XÂðmy…ɃôÂ9gßm ˜`¢r¼ [¾¯©ü6ëàP™¬sÈP˜}fZ…iéæz¨wX¬º…w;–S,aNž¡_bPŃH~ÃUaú±Ëèe§Þ‡€-ãWÂð½^âÐè$2š¹Q‡NáZ‡¹zçÙè¯Yņ¹$o|kÎ[à†Y̬2»ßW¥§ri6ØŽñíHpÀ—Ä!`Kõ50­é9Ç9èÇu‡ …í¤D-œÂµts=´nû”êë1aVf3”Ã5® /ZàQ)‹Y¸ˆ¢á絜‘ekâà°§úJ~HgpXØ9d(Lljiá¡·qدR–ñÍ•NËH'6aøLþ@ÂŒÒà”¥lORüÃàJÑ<…˜2hSF± ižŠ«É>Ø7’ Ná*:‡©º¥‰AªNHË(6a°fæÎNfÐ%¬yË,Á.1ú­ˆAÁ/œ%k¼lÆàÿÁAo?‡ŒÝÞ -…kéR=tK;˜E~dKF· ÜhÀÎå5’`—agúõdœõ(°¥ü˜Ï:ƒÁ¾ëuÛ1ÞΙôøÙ´óÒ³þêü^7Íæå)ůg*rÌR3†×µ{À rYtø;E5ǵ•в}%   ?¡3D(Œ¦C¸†Æ`®ƒ…CÓ½ ł߿X£ÚƒˆÎhS :ÌKY * œëÄ!`Í÷5ÈáÁðge°œºÈ¹ƒyS¬J…+é¦Z؃9Mò)ÅFŸ»­~ÊSóññùÓIyÆ>Îk·FQµ‚Á`À«¿s ,–ínp8ip…‚|¢*øÏ¦ •¾ª‚9 øìk¤K+xfgªl™-}Ó÷ßk ˜‡~Ä ‚Sã‚AÀ—;0;` Ö6‡ 0ý¦ƒS¸ŽÎ`®….õüÚÈ)6¼mü’/[›ge¾ø=·€y„_, ›RÈâ¤G ã0`K¤•0j ‡S¿2æ;ã#¬T¥0%•Á\ µ„ñ¢}M9ÅFÇQ;˜YC.ð`)CO ß ëgà ?tDc®÷Q¼©Ã8(L¦BP˜ŠÁaª„ÞQ¬]mO)–0'îf±þ0¦±Añ$ÜÆB«ú8Ö)Ú¡ŸUu[ƯA>Üb81ýá> £„kᮥs˜ë¡ÓlÓt»‘Ul`ð9U„4ÍáS7=öoú=qƒé)c,á Ø›6±1pÐó}%Œ†ª<§:’…Q`J…kmæZØáÅ’xN±„aF0 øE3†ÉÁ¢ð.8rŠ FcœG“‘Rl€–í+æþâš.lð„bC§pÃ\ }W±ð«ª#¡XÂhØk)‹Øõ«uëƒuƒxo)¿‚B-¸ÁÁaËú50Ýúƒò]¬I†Â,aZ8…kéæzhÝôªpdK~–Šp§¿Ù®îæ ÛͬEÄS«‰Bø/%qØrM˜¥ß%ü4ò¡A¶Iû}(Íâs%b;ë9¹X´6Ü97Mv®šZ„y´~mÉH¶DÅ71p°x*Cè"8ŠE.à°4œC§pÃ\ p©vûi™ÅFCŒ0 V~Šê„ÏÈ÷æ Ÿô3ýÞéŠ÷â^<‹cd“]ÛÝ`€¬2-IPØæ1U0ŠPÑ8\UBCe ÎóŠ%LÕÀH{E`ßœ­`|ó¡0c]qƒ¢kÊ„àà°¥ý˜…_<øÒe(̦EP˜–ÁaªGD[á R‹ ÌÁ² +ÏimÊ/8L÷‡~<;(4%óàà°§ýJ}M¶®·².Caö´iaCoåpU¿Dj1F 9?WÃyÆQÊ-ö>ÕÀŒ,^_Š[rÞ°IT§Þ&ªÀ ªVª¦cz&Q]éšµÿweÃ=5r <“\¬ž^r1OYà ‘StÝ )#}k¢YÅH‘SŠ9EdSŠ”NÌ)4›˜þì©Ä®”òŸÿiİ¿Ÿúšë/šFìGlÙü¨pfDÞž¿`1¼a[Àøã³ˆ!·ÅÆïÊ|t±Û*Éz€h1Ú“B}Á#BY#L—ÿò瞣íýòþ)-pyóÝËWX÷{Ù7xé² þsiäÙ9Ë¥þÝËWL§ÑË' Æ·¶Úqù2•$9vþãòµt£¬ÆËY.O*Glðvùê¥`Û²ÖË·@.­´~yçRÎËÓWú÷v®F![}!8s¥gÞªúG¯ÀÂaÒ[«—/‚ö[UGNh—‡o@±÷S–ŒàqîZW~e—vFÁ¾.Bþ;¨¹.eæü™ë&òú%^_Ê.v\^'’§ÔxŸ…"¿ÅÑOiƒ ¥œ}oåòFõëgÏu},®Ëùl „8mÙ]fˆÔ„ÌŽ²¢='Šy~Ë命&ÒÃʲÕs+>hkìG9&•¿å૲ƒ×CÆÔ'ŸÉ0úÁÚvÖòéflˆˆˆžÛþ»¨Ò×é¯6Ÿ³…Åìßµnø°Û¦•¨ ¨/Oï„d_Ž£­¹¯ß‚ä\êqÆ„ÝôGÀ²ÊõrßiÔì•UíÒúÈ¡SkUdbžÒ0O¿Q–‹,ü6„ËÒó¾ùªály¶ËÏtˆ×í#нÃb‡¾Ú.x‰ðÛ}¯õ’‹)ñq®šqE+ÈÊø viÅ/çQfÅÞÝ·c?tÀ´E,½-‰‚Çr\ }¶Í"ô;£èëåáë1#nwÌCðk—_–]ÎGÛåÍ´¾¶´ì[™Z쿃bé§œm;ç‡,X«”Íãíà$YÏ´1ßž¾ð*ÏWŠb4ùâ‡qŸÞ(;ÑöÙÉÇ;Ï2ÊžÒ<üJ' å«Ï–Äë*)G$.v®<‡rY÷˜Ôر¦ÑþušEo97]rÉNæá?¦ιÕewªºAž_÷áó[lO³Qœ×¶¡´«täwi§í9i–)´ñeðí¹ÛÕN8dóÝëÀ·Ë›4Ãÿm6L©Ñ/mSÑÚléZÑ/oÏ–¬×H&Švã{šD¿SŠsÙѹÑu7gÑS÷"’%wÝ « ‡TOŸ›(µH3«šÚQ/ùDYþ€v:™W÷ƒöãj¦&2>»¹‹èb²-}‰ãM´î³ö¹²k׋xp~p²mÍËx¶Ó8öû~µx3Iår>£þmy¡P½äù÷¤ÍR×ÔTy:c3ßF<}¦<·)þ…&BŒçû¡¨ gUãYÈÉŠÚŒ#ö5Ìõ¥&d8ó¨‘J"Ãh¢À¶Ž'eK^9¾B{!ß]æ›úá­:Ëæ›y= Ó뽩{­Î›ŒÿjÌÉyxÅúü ž"Ñ…ùLâ¶ÉÞ:÷çÜÒŠFëþíÏ÷’Ïn›Lè]þÆÙE‰§ƒž½rrjNòX–š7ßÜu@Ãfþ|66m=ÏÏlXÙQ³”iãûLep#¿>ik=iÊyGFg6à&SÔuz£[WñU«ù “&Ó“.T½Ÿï­ùqÄ{*´Xÿëå¯ÒtöÖ—ýi)Ÿ&iªNžƒï>8QNŽ®aE¤L‰Kkw¸å¬§£ó3¤Tÿ¾u©™O˜WØgÐ7†žlÿEªp7ùå…[·X×û ¥‡XÏ:lÿW>…n)“A6Ù+¥¾Ð#Q?ŽzkBà.xÃÜ&­^†Vºz«maÅ[*þªð ¥M„ò‹gŸ2üTÚ|€Åf»Õc|£vzÌÍ?·$\‘½ßX§Þ;èøJR²ÝBc»ã Êõ@©xM­vÁ•"Û‘Ovbœ^L‘_Ä0m/o¨ §i²¥t {‘?×g¶ý'.6‹œÈh‚ャeš6iÍýš­+ÿŽçüZ&ºpS‰9t½'R‡©mÇ@úÖ-¤„óîY¯X:=>ëÑ2Ù}“ÝbI¹ÛÍ–ZE†ñ¶œ\7>yþ‘+ËjÃ}¸@ª÷&/'¿Iþ Éâ5T&«hšWXZ‰è´…CÛ­ÄÔNÃ4ÉÇ“ÏÇÉ÷3]w ^”ì§­»n;9O8˜†ô%ýßúq.!äí?+µ]ùy¹ãÊ’ ƒ~2n^y‘?Ÿ=æô9WéªÄþO6oP tËóný}2íƒ4»kG+g+L‰e9ÚÁ|¢vt ±åGÕ™ŽòÑÎJ°Hÿ\ÙW(VZfû` 龿ý«Ñ­Óì¼aw< ay<¾¶ãÀ.ƒ"lçlÊ›¡»NÕü .¨V”¹v L}[ºœ†öl><ãÁ}J¾¥o•I—†ËL¾b’›é5–ª6[bgg ­Ól—Lg ?v¥&ý}šßM!5�äk¸ý0¿‡ëÁZM˧ž£{Æ šÎßiM|U²l;ýjÙ}Ƥzïp˜¿:úÊVŽƒŽ$cñë0?˜¾åxöÐÇó¼Z'B¾ Èƒúæù2»QÕq·µÓ´—®ùsÏÃ_lZ²Ÿpׄ°]~K%ÛÑ®|dùˆ‚A„ˆŒ:³1 79!¤Å& ~˜ëB®ŽÀ4¢Ô™Ã+¿W.»|ã= $×{ÔcÙY7œNp$]ÖËÿr«GîU]–b5MG£étîÞF‡Œù·ƒâsî£;βny-»úÕeÖžÒ^™‡zPðÝI;ËÂN#2fþ¹œÇ½§š¬´r¸Ùzgþ“eÅOj°×ÊRkÏ2>·¥iU×e+ÚwÉeÝn™Ú1ûG¿ûäŸ_|ò7ŸêiNìòc]mA¹¶ùÌœ^—Ì×Da®=½,2ŠÄ4Ýü¯·ñ×çñ×CüõM*qˈEFç ›Ù@ýj(›O8i;Is…wÈ´\O¡+S:ÛïM´¨Ã?ðžå¾ÕM÷Råܘüd2Ý9nUZ¦"®êß·Üÿ_°ï´Qêõ†W49£<¥Õ8{×n„½œ´G³lÓ}R¶;¦RQ 8™Ú —ÊYœÛ¸çìÚ|ÎÄ…ñµmû¡Uݱ–ÙpZe Äq˜Šâ:÷™ œ,fÛqÖºMæ§Þíânè¯nûމܞŸ²WŽ-ä%_×KF›Çr;&OZ´^¾yý‹4‚ìTj#5ÊÒŸ¿=ò™½l{ž,ÙhÕZì½L—2Ù8æ|Úig¼gîì´à/³‹ ñXp¶‚“›#oˆºðÈw=‡‘d|YÓžúù{åx$È6l²˜»_ß¼ ùÌe\{νZꔈKmÆŠ0>Msœåî›çƒYnFŽp˜Wù¿"Ó¢…üÓ8ÕM†Öõ¼3·Ú­ÆHçE³. v9†ßšOyŠ=9|}Ïôí¿^+Ò+®½ÒŠ”þLÈîÏÜá>œÀDN?)ÎèC±A7››ö6·æ/_ŽåâxÅ{è…¸¡}Sito\{´ÜXn'Fºƒ„ ¥/§N$ëéi´;ƒg¢ nßLçIòÙ˜F³ïŸGµcò¦YùÃ|ºxåuŸ§‘™˜2Ñ£†ëV.OðY!„ºÌ'¶§y˜“¦ùµªÙ§é˜z}¯ªÎc4µLÛ’·T6vuZÖc¿Þgûu¾ßâG¥ì¬vùÙËO~{½hÒkÒ—>°6"üö”cÔ3Û ®Û뙑9ã»agçæ[m!é…Ë3÷_)0ç»›gÕ$›A#ø,…0ùý˜4©ÜCþ“•ÈuQïÇKåzûEr$£è½o×½qй÷c7ŠLºX¦‹’çÌ}¤ß’!zÛQªîå™x´8ôÄòõu#Žxo®4 ß¶·g=t^*÷Î÷cñJ÷ø™âöýw¦HóÞæ!¢Ä·+gÔðþL[¬Ét«ù‘™f‡í+Svíæþy ¹½ kŽìÞ ŸÎ‹ÅûõþØqI5z¹x»,•ÿòÒ1Àù÷³›·XÚj”ü;,ïýòwÉ’úé8KÞ[ à6ÿ8ÅJ{äÛ.Yâ¶m§1 ûžïÓð Œ~óâßQ“Ùæ"¾7ÕdЋc9T.²Xý`S@dß]OzszYö¼H9‘Õ´ýlˆ¹ÑŠ0V„çJ}D—õ§};äÍKÈ’‰¯,ý×ÛGMô OB>šg[שZ—åýÖÈÅWkëuK$ç4^wÁÍ5jßáq{_Æp©H|9‰.ë~¥È+<™‘sˆŒÅ.r|¢ÿbHçðÝ8´d§Nx2÷iWÔõNŽÇeBÛêhÅdýðÁ¥cÇwŸÚ¶J||kËÍøw›ç š¡†Ã®æ!²±d;ÅL—®úÝý_q>œÅæ§±ÛzvÊÎaÆ“=gVîÓ84¾¯«º2.7íúOJ /ÅÍ+Ñ 0Vyå’vÛgî?oÛà)ÚþIâÂÔR°<)¶iÇzïä@»&í¦Ùž.`Fµ""ãð03~qïj×÷½9DÿÚ+d-‹÷îrˈ¥}{Ÿ›WZ<²—!qñù£Õãôk³Mrk-@jZØËSù_}÷óÏÝ/G8ÀÇí—È£‰Êç='¼Ìd}°„—›àË»U C¤(È™¼óU"2-50zi5oÊîÄÁ÷ZÁÆQƒ×A†‘¹ >xÒ¤»˜9GIÑÚÞs{în±Heäw¯V<ùXlãû?×»Ã, yñG)…•ëÄK¾ n˜XM¡P77ê7CXZi‡|zÎ?7ø¥@àïG­¯.ÕôQƒˆ!3ϱÌÞ“0ˆÞik·å™¨-¹!¹×©- É(|W´ãòÌE _Œ±Ò÷'î“õ%«_önO%N¾^ºŽÖqßÛûo¡n·7,‹ÉÕ¬œa2gO\ösûà D}.åêaů‡ÝžÚ=¢:jrÒ,l|ä•ìÞ mÏF?Þ<ÃùÌ ¯tèÂŽ0‰Û±%ï_{çOQçv…4_¦˜yÚž ]²; Ght/M~´ì·öÈ 8k½ý|ãjæZEnÌÜhÌþ›Ã9O×°é÷çзMýkOÐû×NÒaþ¢u\òÇá6Š®›¢¯ÓÕõÃ<üiº}?š-ÝñΑ[ÑéshË37µÓúíÊ”m_{5þÁg`endstream endobj 331 0 obj << /Filter /FlateDecode /Length 6164 >> stream xœí=Ms%·qÉ•ö%ß_Ù—a¤7Á÷‡b©œTRqœŠ­­ÊAÒá-É宖䣖\K»þóé0@ƒ™Ç'r7VÙ¥ÃBC|4ýÝ ¼ï6lä†ÿ¥Ï®OØæò仾nÒ?g×›~vòPnãGo„Ù<{qGðg«íè¥Þ<»>¸:}ö-öU´/#ú?;?ùjx}*ì(—ÃÅ)%—ÆÊáÍé–>J;ÜÀgÅöv¸‚&wÒz9ì°gLAïçØ6ŒIÿͳßÁr–‘å¤Ö|ZîO§[)Åè½þé¦'È8¡FÃáK÷%ç,÷ƒ:í¬Âå脵SïÿÁM)&γSaFí¤ƒj=roì°»¼@0ä(ýð"L¬¬öoâG;ý°Sg¼ hñBšˆãlÝd€.ð«ô^)  ÛsÜÿ…˜³VT=îqf¦¥ÕÃË<Åþt 3ÎÄpž‘2ÜÅɤãë‚ù€58 ßE—pü*£íØÙz[‰+²ÙJÀ²´|³•jt–É8(-ÆÝpCA4¡ð´ÅUÚ»bÌ7ìÎ#¼†Ùá:ì˜1îu˜Npf¡{‚Ý9¥U í¡7ÉŒ÷ð1¬"†÷aEã¡9ß~˜âr"&AazIOTÁÁŽéàË·ØÓú²"mz̯ !¼/„@tÚ5WÃ|`˜Qg¸fÐ|]Îc “ų1¼bSYúÍÈ—3OŽSxçÓœmFo¥Ã!vÔF1³ÙòQy °8©>pÀîÅe<8 Þ! R8¿©†ÕRP¥-`r¢¶§[-àpŒþñtË­sÀº(*+¾Gƒ¸“¹L°ß '8ç‡Ï®>_Ç•ŽsÍ?IÒŒÂê°‹ _Ã/"á9£º¸óaô±¸³Þ>yöñ¸óüáÈÑ ï"p»®ž¼ «yo¹è¬G…ÚàX4qOÐT)jz=zÝÁÆç« pÐJ[ئE•Gÿ¢Âÿ>;ù= Ó ïNäèÝæ{PÎÿv"­a¯7 BÔ¨ìæ¾8èÊ_®N¾T¡2{Y(3Šá‹¢©:{²¼FÃ$è÷]0¼`žG]uYç<`ií¸¬dvºÜ¨êt§›o}Va2¡#oƒ"s82¶&|©†Ì¸­#XŸ6SäËê@›ýQÀÉu ¹JÓ o,îM–M½ðý&ì[F  I¾q¨‘€ÇçbáY á,h÷[Æ*p?(l&åË÷×kOìsŽ2ÌÉ‚, €Äe†öäàÄÀÎÀ·0Ö€~àÑ9€¸Í‹9ïY¤ÔØ“T]Jçg6Y4à ^iU‹F´6Î"Ñ«@ÚÑH 2vbB°Õ°‹ö’00îm™9²,wè¡PÉî âáeé›:€Û2œ«mO¼-‰éGlýˆ%Fà=zLãwq ÆùþE{>~µé|Ã¥Å~o„ X}Œ‚Å•¹tÞd¼íŠ4!¨¢($2æŽ|Ž–¸-ý¼‘öoDœcBì÷è7YpŒ`9h¡ì‡=; ¤&&Ó”]Pà>(ÀʨßðÏå°„~Š©êp¨aWLb؃èH€>|V†ì)t#@ÿ™=ù"„;Åül¼º¤-h›8úg< šB¹ÛÃÓy˜È0 ‘‹Ô×á¦a 09uïf̰r‡v„doOsgêsìo˹GvU åÔÝ}–€a™D;„ÞÅ‘®‹d¾1~æš‚G¼ë—ÒÛ.ÓîŸà®sÑ@øsD뼪âú÷EN "Ï´s”´jΙ@Ûß4@D"N½T$0ÍvUf£Ûb` ¶¯h(.Q‘9î2Ä€öô{" #:’ppd6 L0c6ê´.¢vÔë(XëÎL¤¦Àà(â@nµrÕ~ÑÕÕ •ÍQL_‰A“Ƈ‚ÍsU(õð#¦P¢2½Žæ›`Ì«t. N]{øyÅ=!„‹øY:] ò|´»J ¢=S®¡Æ8À¼‚0¹ÌßIÍPú}í@Ø*Y)~Šk8˜†ô]PY±QÂTƒþ<|õ Ì)íÏð¦%‡³ŸN¸õTÄGYH}~é+û(a¢ðU!‘2"Êc ³Ù ?»š¢±)ˆý[òý® #ˆøhªS»¤½¯Ê.wÄr'<òž€˜°Â:çµfÞNû­y§„‡ ¥M3¦ Fm0Äc]§”‡%–IB˜ÌBåáLmül‚ª|—Eªë j~²˜ø€!.§–q>!3´ïHû¾¸ GľÀÙåè7UËý%£úI£‰G`´“”®äüèDŽ™}==ÛK66‘‰9 œ•ÝúIg.M ¬ÈNj‡•ñÄì¯CA¾ ¡¥fµá¤Fòð  èQÜRZËÝ`‡qe¹›Ú;šÒÒ™b}ùëë<. ½+³èrOͲWYWtóÄŒ¹‹Ó¡"Mª¨ Ú ñ±¨¡³»+ß©é}DLJ‘åóî*jl4–ç–b´HÌà - oIMœ´$hl[[+ª³b~]RÏfw^R5¯šóH$c$J1Ü<¢§oKEmmµRÕ‰'­#áŒuæ©ûÒø¡£k-²æìÐ ²œèùàI¹:B¬˜¨àM Õ‚ÁÈä@’f²­ƒGC¼Ë¸ ã©yÂݶ€XhÚnmE ^7µÝ/Š‹NI>¦?ýÌŠML1ï¬ñóÙÁ^Ÿ,Z]È5¼ñˆ»M"¾èb,98kfìd›ã2Z!êIDênZR×Þv!gBh¯‹‡¾­R¨€@>¡«mŒ”­/&–Ãå<¬˜€’3=Óœ€ÖûŽƒê‘Cê‚”»\öJ$dã±rI޹-DçÂ"¸ûCfÜçðS4Œkï7$ÁŽ?«£NFÚr§Ffmmž¶Ñá6æ¤ØÆ¼‰”2ìtŒÁ5-¥Ÿ„1MÔj#ÄBˆ7fö¨¹6›…œÍ|Ž‹C;h2 …­aú›U˜°Eª1þ.5 z¿i¬„<Ü ¬Ñ€¶F ÕY L¸¡­†c†Mc>’Å;-÷¤o™Ã:êÆ• ÉõjY°µÐêõ< ÁÛÛ-Xa‚¬Wü¦ÅLçª-×oÛÎ5PSÖüE5©KÐŽ,Q/p_'ûeFÎs—Î|Ü’­‘£s(bPày°4AMTËÝŒ XõÂc~Ócžs–ß”¬Ÿßl<î'Mo:¬ž›¥ âCÄåÌ™o2ÕJ‚¨ "&´u²”‡#f>§5ÛFÚ¢CêVÓ«µ˜õ 42Cþ&"žý×Û¹Æ(öT5RS˜Öv}¤ È\³õ_㤟•ÆÕÙŸÜÑ*äH¬‰hê¤âApFËIK0TMe~GÔ䢥¢MÜdq÷\>h° LqD™À¦ÖØRëÕXÒç QÒ_‰‰†*½G:«åií -Ä¢[pniv'ûRÔ¡ÈkWÜŸö¨Q`Û4Œ¸$¤e.(2¦ V¡×&ôÙ2>Ð60Qèkš$ŸÂÞM+dõQgáFhœÕ/Z®M~ºªhƒÍ²!‘lß¿"örŒ xø_Óø4W9Ì;‰G óîH¶ªDPW •A#T •]  ²†RÅëFÐäÿ—µu"›ÁÙ"¤¬›dbZeÅïIãú‰ìݬj•æ¦ÚdÚeœŽñÚz]Øež#I ‚6f"÷gÄ“ÉP·µø½”]b­+³ (ŸËâP€7N+ž"±€ô6*îP— Çóo=AºÛÈéïoɹa”$øƒ)Ã(7Dbà•ã}¹i¹²ð›þ¨‹0/V±„Ú`…ÃD•8aµÐº:Í…Öϼ>eã M**»S nƒ@Jät‹ûuh×¥êìpvgq.#’WÖ ÅG[âp²…Q&I‘ xì1MoðXÚR„U[Ø©ÑHulJõ‘)MΊKò ”fÍöMuU<=QÕñÓùáýc¼¹Âš„ŽÆ,§$d}[oSçesGéþ“{Åë¼Gæ^ëm պ㳄xû»Òâ´Ö¤ztÁû±jòS%ý¹}ØnýiM¹åƒÍ@úiÛÓä ÿŸìi{°Êã0K|`{ô>kíi1«sdøJ‡žê^µªMg.è«ó*K–=Æš™7Xh4µ®rë"·v¹õ†Œèm³£D˜N‰Fš÷¸)ÐÒŠ0¢fg·LxbâøÂͤrIÁÜbÊpRZ!Û¤G˜ƒ~}ÖàÆ§û"&¤_ê»Eyé…| ¸çŽm5é¨~ú¦ÃdY§6‘¾Iá¥ö˵‰8 ¬9»ûH­RÝ6£·5æßC•'½92§ÌÚpia›ë÷¤¨íSÔè<˜åÔvºJŘ +'úgsÇ«l$UøóHf!zV€¢…jäm’H[:Ü”YK¤†N¶½~uSª$÷ù¾ä{ rÚU´1_Ûš}~h“Í9&ó¸=šcq ›’l"Ò»¯^Âé§ §qÎR¥ˆ~[’[uf=sÏ‹„~¿b¤§%ž„Âüß•©Kšˆ2ÝB¨;I Èp/(ñâywŠŸ%‚çDggøáô¦ªAǤÿ-¡ÕÛÂL¤Ür?ÝÓt6–8èpw®ªIÈ ·Ø'weˆ¡¾ÈMë¤ÔíaV<3í51§ÞI¢þy°;Xˆ´)žF̽SÒŒŒ®’éV8@áM.Ø>?…ë ­ìÁƒ uNßf5Eù¯æ4èÆ){[*tصIªì›{V8…á•^ÙÇ)X“˜¬jìë<Ê„¨¥Íô÷¥éwvêŽç‡íÃ?ŠUàÕæûØö¢*¥©ox*¤(è0’z=²ÂBùün&ƒŽ ÷oû1áÊuüûŸÁu:æÂʘëÆšj™îQDŠá …þt/)x;Q PÍêEa𔦺-œ¶»«Xu"U9JxI¸©K+¡õœ‰3…ˆP¥óDfN CîÒÔ¡ ­|&|qIø/{‚©éO’â,Ê&ù9m»åq®§,XBäÐûoWô¯fæå&fà£s9îÔ„r³—¸¢VÈÓr ÖÍ™€ˆÈ¥kŸ’AÛŠŠ–/\j Y¼ OÀ¯Á ©à¶¼3u`,8¢TîªGã ódZo®Ó#:˜XÑ]‘“ñÏ´E­Ø<ºG¯,GK‘¼Šƒ :3ý(CU¥`ÑÑp¬ImÔ"—d<ÂÍ~ËÅòM•8Ô³¨l-^¯ˆo:`Û3S3cá6òÖÞ]ì\•‘–×4"çG+½± Ýclz´Q†Mê‹Ã×9Ò@oÖÓ×Rd˜¡‘tĺËXo+()À:Ñ@ +×E–äþÈ’<ëú¾ Œ™yü¾¯d*;¿M)o8WŽtíßéßÍ^®ˆI‹öÂ|Õ;áMµ…O³ga «¡?ipÇ O5 ó ½xWÜ• Ï«§Fï8nTÙ¢T¶Š1pa©ø=VÙd-Þ¹)EX\Öõˆ£–RL™Ž/°¶J`ÆdøU†þWÓG¼O9ß\oë÷ä¯ypùXHõ]ùóùê<»Òñ®³ô}Àp6Íg»ù¡ ÿu´-ËvžçVïÄ _¹y½ ù›Î„?ô¦yÓYoßùëuo°Ï-Vþ|š›ŸvZb½ãO`WŸä–ìî4ø¨Ž£Ÿ3±qwÓÐL™þØ·ÖŽ„ŒÞ £'ÂÙÙ:" Ùßö:ò.TnéõSð*ÛǬw€29íX›?TLõ8·|»ï|+ÇuÕùëûž8{œ¾íLS$Ï·iÎüˆ’ûÞÜ]}œ"ëPBa©r‚¯;ö¦¹è€ðyný²3¶`éEG*ï –æ³ôà»]Ýðçø ï:«ÄÜu†\®Êײ\‘wc_`wdÛ§pÞu6zÑY¯ 鬳å]gO½Ý½è|;ëÌÒcÁ?ý²ÝñaÝ×Úë,q×ñºƒ¸›tïg¹ÕÓ`Ÿ®þ5íô!rîQâ†2áñFØmçÜz¨¿/Cv½eò—=xz„rM:®[}/:@¾éÜ«¾x\•™çí‰5Jê“#œ§T:;ëMXè¯X_u]ú}ÓùönuloDàí%^ùÖ3ðzò©GQ=¢^_wf€4„µ.Ù»Q¦gbw&¤ÿ';·s[OWƒïŒWqꇇҩ2A°ñ æø†œ0ö—Ñ j'£¨R;E"·äOY“ì¡ýÓ  ¹¦àHå–^ÀQ’¿~ü³ÿ8?k“[–žõŸèýø„§îrËÿ¨SOQRµ‰RV„Ü >,'£JŽéeÆ3˜Tï`½\ïLyˆ,&‘„7œ•Ǻ„NåQ¡Ú*pgœÖù¡X°-Òƒ©®CõÊ…yÈæ3Už“›Òg–s3e,ôˆ¯kq¯ñ÷œ^•)nÊ—½1w£2:Gg¿,išürÃôòØZ1My6<®e ¬yU/ÇŽi†óð&UOÙn—ˆQøæªDø‚øka0 2ßùSaÜú‘}yx¤+üÐÀ_b –óá¬~WÒ‰´ê€Ô“ 0Î…ýœr†›áããñ¦N|’‡·›äMÈ‘0=ÀÊ)Ùó‘û” ¡¥Ùg¸ñŠ)3.š÷ÚRV-\¡O¨_Èô.¾X¿ÿ'Íûê1¶>»(y9ºò%ÉGØ€€«›æ¥Þ‡>2X²;ÏùT¦ø¢…°¢zÑ¢¡ÊâäGæÂª~_bŸÂ<žUm'N|‰OtìcNUkSÿÆX,Þ‹ tÊÇñVÕõ/ý×//ð&©x£¸Ò"¼WþDÏxf0ÛŠD §ûï$×>L[ôMN®Wa6½, KW¿²¤Åxû y)õŒ)q&}§”fÄÈ⇫œ+5}0«¸ßIk3·$+šg§ Jg¡JŸ.?sVØ”.G,¬„H‰é©ÖrÜ”·v¥–ÊQ$¤ÿCzŒ”«ªì¡ªð+OýŸg å Yþþäÿð@ endstream endobj 332 0 obj << /Filter /FlateDecode /Length 8490 >> stream xœÝ=M\Çq9 ´‘S‚\7>Íšñëïn>ÈNd$¶[&– ¹Ë5e’KiIQJù멪þªê×ov–¤ ÐA÷ýú£º¾¿Þ×ËA],ø_ùÿÓ—–‹›G_?Rôô¢üïéË‹Ÿ?~ôã/l¼H‡äµ¿xüìQ~C]¤å"¸pHÆ]<~ùh§üåã¯p¬åc•>(`üã«GØýùR‡ƒ^”Ù]_.£Œf÷Íå~9DxhÂî<¶Ê»v/à§Š&$³;âµ,F?Áß~YLúããå–3îSu¹ÿ¾Ü£)ÅÝg¯Æ÷4{OÛƒWð¤¼÷{Ü\ *íìådeQ‡PGÿe û|z©ýÁEá¤ÎTòaw¼¹Æm˜ƒI»g4± vwûM~è᤿¢“FŸ,Þׯg°ø-@ëUÛÐ5>5)YkBt¼¨Âî78`‰!h1â μ8ÜîOmŠÛË=lxQ‹Þ]5 ìîòd&ªÝóy‚ÜFš‚M¸~ÛÀö)ÂQò`@¥É9£÷Àl‚ºØ{ˆa1ù-s€åôâƒ÷»p¸Üþ4@ϱ,*lY”V6ƒ%ÀñZ,qYÐÉ«%U°€ìëz¾ˆð$(Gxœh£*,#ž"ðÓ¢ZlŸ£"ââm‡¾¢½ÒÀxû’m^E|@,¸å åÍZ83¬#L‚‡xW>Fû»käӦ¨•ö?a2Ù÷ôÓ/Öâ@IðÐåãjµ˜|Ëõq¹"AÙñ`¬Mýèñ? bÊÄ¥ÐhÀ÷ô¨Hõ´Lì},‚õ P_ 0\­Ë€ÎXÎàå:¼T—K ˜A¦ @4Â+êð‚ÁñÔî˜!£üý¢#z „pLj"ƒ#ªEácÜ“Uq÷åŽhE'mëÁa׿À÷ô²$vŸ}y™—ŒÉs °IŽù'Œ¯g‰î÷œâwïðWˆÚ*EÌÔŽçƒxÛIóE^6D[š6ÉX±GbTÞù]ÞkRʺÝ×$lë¼, hO‘Eác _ì»Nåe;p ‡@„ÒÉ€@h øXÐBÅTö®tÐ~ŽB7xwÃk @Oäà-À‰·3|b @|q=, K9,üAñˆ“¸ø§¸F¾«»Bò„O$¿L€1O:&fDTÆ"] Ƹ¯PÚw´!+ßw!Ö)×'€[ì±\¡¨\à Žƒ““Ó5£É,Xá¦ô-ÜJ*ÚÛÂTÑ2…‚3lð‘ÏÁpã í;Á€ ùS‡ ãeåÕ@Àç“ìºËQ$‰Ão ëÂË&2 wsËÐ?D C> ç ühÏji±WEñ¤-Y'%ÇŒžÎmø=ܾʛ÷ løYýøþÄðó:°E˜TæØ%õmåÀÑ›|'y·Ï;–½" 4^#â6 dT×ñ}¿‡µC4ÆINÓñþežCÇ(—j¾n"#8(d´—w$µR €„©QC3²½õ( ¬dšø„ CKŽåüî2AÙT;»¼šƒÓ *Yi›Íœn•Mõ†Š *#ŠÖTø:ÑmF´Ç ½óY­³š¯Ã04Ë·v‚0²tdÐtN!BÁÛ"t¥­à>Û£Ó»¼ ¤ü¤ ÔïóÜã Ñ{c2 Vräü‹Êd#½yY1H¾qÞÕˆ»ŒÎ*ÜÒ v¡ XÌ*ÅSäEû1k~F¼ã˜Á¾ n%e>ÆÉo£è$E_0æÝÅÐÕHå’ ^wŽÿq,Kzoƒ§À‡ó{))Èœ9Á̃ÅU²‚^„TÆ`Úíyv‹5 l( ¹Fy`TËà9(åQ! ÿéñ£ß=ʆ+¬¢ºøæV«vVV9G¦+|mOñø~qa>®¬€é)ç/PpÛ‰®p÷?hóânú³?ìn™Ú-ÞÛ×}Ró’Øî>…³‚ øÏÌN&`h2\0³%‰Õf»Ê²ñ7Ó᦮ Ø6Ù4.à“²jØüÛ¶‰L00Œ—&*a°`ëñ’ÝÐoÅ*Ø0òVP߯ùcÑÃð^¢’hú ×劬| U!Â#Û#×Á£k j†¤·L޻ЏtƒÜ!©xüFÂLÝ|IïL´ä..•›,ÔE„ˆ$õŒ=žˆÈ¹hl]¿Ò>Ð8ðPÏòo…R¶±ƒàßÁ–|#Ëœ¥/OºOçŸ\U{ÕÙ9g«lj.š¿ïðÌŠ°F³Ò8Êà—eŽAR-–LÉÎ ž«.pçbñ&ωJ£JäùkO»œfØ Äƒag(CŸ²3Ü2‚®,®xÅ‚Ëð‚-š3$5Þ²©Bð®@$ŪRÉï+¿" È¢šCs)’(ψ`WB®\Çÿ²9bRÌ–áÖÎZ‚Tjh€]9/Ó<97û¯®V æ2Nœÿ„Õ«#‰aŽV‡&£TΞ÷mp׊d/em½éNYÏHÄÿÝI*SQí6ÊÿEŸcæðÖäågt&g#À°p~Àˆ¤Íâò+Š€àT±i¿€« ß=‚ñâ(ÿ~Ò!„ o@TÄ‹—Ìâ àp}ðâÑï7f‰—ÍŽÝ>áŒ,Kg³Æü›|¹Ú%WqÚ‹h –†ùQlUÄãFÎj18››@êA çW†E=AHÔ ÁW…{&0Ä€WÒ-bô³|ã]j .POuùqøA/­v%Ö±€Â r\ˆWÎ\i”®üÎã‚@€ûøŽp#ÓÜÄEÐ63šðI‹-~Ÿ)1RÔ¢Étnj~ËF€ÂÁŽS`¤è„ŸÌ›çÝú»áϳžª´â|€{DÞv²pÐj™BÅì·Û'sÛkp>F1¹D°VÔ!x?è“›ßI`þÌ„ú¸µj‹o‹Ï#=¹"ôá`Ü’³‹ÌagÈ«‹Q0¢máV½1z܆ñ“] V3¯cQ[49®¶ít$79¡|_¢—oÛá#׌P Õ·rÞö©PÕpnW¬Å‡ÆK?lÝH74o…«X$‰ÁF£ÉRÑ}-é¾l†7ùiLöžˆ‰ ueùµ©$43Rk˜OF½˜Š;ÓS°8Ýäs2ç(ÑaÆ!€?é¦UIÓoNZܳWJØow˜š"'8æöÌú6äªL½xŽ ·ýÞ4mÓ*å“çScЯâ-W¬„·œ CÝY…>`昹ïV³¯óp … ,Ö¡›ÜC°#´šWÀæŸî¿Yí óŸ5üd3³ƒ »øDÛà¹ïÅPÕÂé<€”ëN¯ócòIvoiÔõ¥J#ÙŽ^ì.êƒubàU¡rA¡¨ÃuFк…N¼ûûœ)$Ù%˜Ð ôG `¼€\µÂÙÔR˜2›ÖˆÜï‚ò±Ù.”J  [ý‘ÂCÎóÔ"tŸkE.ÜÏî:;äÈô¶ã,w^–,Ž`Ó(ÒýZ¤³G¦‹œ±¡â¾‰nE«Va?fÐp.ÃÔAÔÂŽÝëŒÔŽ¡ŽJ|èÌÉ.‚à´]yÈ= %r—[ë1c0Sd#>Ý¿¸ýÁ^ëœcðGä$(Ê]#,Å瘃/[D.|˜E”§Ž',¢*ãî ïj*’›3™àY@´álrKKGøÜöãò x72RGªðî?'$f„v¨¹À§BÕqï1Ñð Jo˜hÊ0‰xT@NK+\š 1ÐáïúÂÄÙ2Ná¯úä”Ý%ÙS5»@É44%Ü›ÎV×êƒ,é@aFIP\SZjÈ.=š{´}Òâ«ÙæÏä0IÎÀ[`é©Ò¹Ï$ ÛæØh-“F „aÂtάczëJ!fÚóö• .Ûqøc~”Á)×í¸ö F‘*¯ú$s³h ‘ŵ8¢TØ, ƹÙÙ:ælóÞT”Æ™É9^™ ÈJ/öâžëfì°f,pwiÍ*tÅò”P”ÔÆHííz 3¬"g4€bÁp,•F³b.ù3ƒá†\¦',šR<'Ó vW­ª»ŸÜó×yA#“ôXöSà^å±¾†õ§Q¢žÈtÜŠÈ8™_B°:Gód2B å&ÚŸ»]½•‚‡Gxo‚æµÊq)ÍeÑÍuÉÛ@3µÑO—óߊð–ÀصfgÜ2ôiP79¢YŽ<ÕzZ2“sí¹X‚^EËãÖÎב)•5- Ús-d1î·ƒ¼sÒ8íõ ¥S½Š_ N”QÅ·xùàŒžù&æ*f®FÒnÆÕàz±Äë+Gɨr£“q©Y'\Èÿlw>Löø”ÞGö´SœÏÎÛ++v.65œæ§ã%óƒíÌûý!’lŒùGwžÙ°‹\]¹‰†ÙOHJóÒŒ=ؘYx-¼òçK…yÅ`%½n¿žæ_µ½œ¬«¢Q3•×$ƒYO'삨î³ʈ!Ë» xÕý\dp«ÁtÀÛ'Èò ^ôÜÓ%ü©!2q³ân’óÆÍ< `êd~²"–Í$_ öœ0PdåPL‡Îþ…CcxÓ¡±ºn.Ÿæ¾ä޶|¤|±Io[•í÷U¹¶¤¸ÛóV¤1L=• `&¨Çrª 9¸}I€EÉÌxîâk3ŒFs["[ø“›ÒÂLïÚVEëè“W ­€¦{uÖþMûuÝ~]µ_Ï'"àÍLXÔ šA2-ydøy&™¸øZék3õc†§ûÛÒ×𪃳9º‰3(Á‹oóST¥„:Mú•IR9ée®y]Ä»| Õ¹ê …}e—”I¹)z—߃5%¡Hu‹.%»Íg ø6)¸†ŒˆÌ@îNÉ¡Uª9eÐŽÄÍ+ÒWÅW¥ÈÞrBÝ(ÎÊ)œ*ꀛít=²”]cÊäÒ ¥y {üÈ%ìUÍ5¾ŽpKÖö—Zþ†-ƒ ª(º! ’JeÏWî5€ï¡] éEéûXP®«•ùØåÿÞ%-gGLm`¶›¼±½¡¬µ(íÌ>ÇhiâhPgÙ|ü¦W]‰@ÒVæÛ8Žž¸ÅoïólŸÇbjÖ —S¢ÏŽð\ËZ·þûmã‡ló)c&ˆ:!aERuwcÒ{‹ýÖtüh7¸Ò4×|]‚»æ3SCÖ òóç3ÉÇ 7ç¢r¼ž&®ûàçstÉ,èAË*÷V£ÖÖ³#çC"+qQtdõ–9­cÔý|¼Z<»ë¥7¹M‡\ÃxÊâúœéiÌÞ`):²<ž;ÏöV›…G2zcL¤l¿íŽ ­Z? ß]tåR$·£‰›²ÂOf—•É_tvÀ î»Nøl¡7ˆJE"BZ$×[ão—N¨Êðw;‘ ¹ÏÀq4pJyÝ6îH9— P.ÁÀ Ö¤ÇDWÂIoÁ9€Ÿç=£v`²Ø(c~Û!Áâ>ÍÃãbOèÙtFn Ýp‹oÓ¥TÁ”©ÏÙŽ{O»¢t\Â6Jã:÷*ý åds»ý[?ø4 ìPÕŸŽß ·1œ„²‚Œt€ñ‡d&CÛMA©yчýN þèéFPS:>·= æ`AV¶7R:„Uê?·7²æ•¢–¯º¸ºšSÊ÷9(.jMì5$Féö[./fé¿/ÁæÝÐÇnò"èéZ½»L•íº·Šu›œ–ío·Fxåb­v©0nµ'¹x3©±Š¢pþUž òã]}1ätAO邌]Ȉo¯WâÉ~Úiƒ•׌ŠÙ$ǬáÐÃ(¼—ÍìÕP¼JÛÞ± çX6Ká[—C¯8ñF¾;Q:¬6(Âχ9§bv@¤±•蓃1Çû“ÏŠž>j(‡Üãì¿gs³U^8¦š‘?Oª.·Lé㑈¤IõxÅçiñF–Z¥;Ï%xÕ㬗Y¿4á-±?m^öUªvYb#éq«Ô»ŸéÛ®mh€9 AG g7o5sûvAõd¯¼Ì8¾ÏÙÌe"s°[/p]@{s×BäqÔ0Éà«`éÜ`º3.¦5B’ê^t‘4IÓøk3³N§ŽEL‹^&;½J¶hUTò|„ã—T½ñ¡5ÊÇ¥8ç‚ÿnÕD™$±Æüø•ø+l^Næî1Ë6:iùüu梊ÜXSð¾*[¦:¯_ð¢¦n†¸ ØëL¿Ë,´ä™ûežÄ ÝMØ3(ÃqH^«»÷V*_Õ†Õ¹>¢fWIzÛN[ås–rŽ82 ûšén¼Xe…1¬’]¨¤Nvòe 6Z ÷òxåuG®#k³BÎåLÐRI³Ñ‘­*”„ÆÙäÎBû(š¢µÙ%,¡4‰ÀÍE¿¡~Ý©¤ß¿]qéa ¹bxOÎG ¤ò¡+Àª²]O##×"æoÎS…ãÒz%†hW&%‚Ó¸º¾6ïüÀ$g&Pl/›Vzð)¶"XYÛ=çÛyifk_ç Ãê¨íô<˜Úcp5†õý’ŽƒÜÑ•pϦçýpg颪`C÷Ÿ:O°“ZK€c¿ê˜¾¡«r›‹ÅטEšiSƒÜ êʽöD®Þ9›Ý¨¥ÔвK¬ì>E7óè(îê$qLâúðFGÀg@´_ &’®}ŽºmßFiñfl¾TŸ×|‰VFPzù€®E˜[bƒ–0&wöTHnìf˜ïzâmkaD‰å¨úŸl†QƒEŒµÊÞ+†Z>è 9Ë„#ƒÂu[öïV^ºHÍ™ 7ÇÕ¼ƒk¨ MI E@+’0ߘßÛ0©¸ÑSd&ÉäÛ–€Àr˜‡»j’†±œŒRr½Älit(˜ð·W̹‡ïÍ`a×ÞØÁtÕÒˆíãÉ A¯Ã¤M•hw˜\¼.'œñÔ9npÝÔ5—°.¸j9l·Ýr¦mê.F/Á¼jmélm%æá,™ –úÙõüç†#C¢b!›Ïåþ Ï[OµáQÖ3sË´»,ºbÞ‡tÿ}YtêŠù‡.&B™~ÅÔålC¼‰Pk¤‰ï?âv±pZ§b"°“Ìì3¨È;Dðe&-"‚zD¼,’²¼„="0C¸ÄY:ùÔ¾ŒT&Š’,0Ñš­ ³Ç±G€IÔI}uŽÛ4uåÏ“7qv>0É(I[ö“X`Èn6ÔbÝ‚B„”eQ?àXï)¿‘¸Á"¥­_{äan®\0#ƒÑµlù^IkæAÇÞîT–Ú³Q(8BËR—ô üºcCF'Þ×g¨WÂÄð(ý«¶,›å@“z]K¡1€w£Kdè,díNկƵ‡úiJ«™Çò}½]É8©T€€ÞÖ5¥ªiÑ oˆ†)÷ KôVŠÓ†¯hdòù}\,öA¡ÂÁj$ÿ Ë—-º[{™V»j€îäùKÖ'®Kþ¾?^žø®›·•’)¹ÙïR/7¥ãÁy`ÆÇšÜåçaX£AÍ~0Ö°º5ÖŒœÝ&ä¢kVôx?gÿ@ [söá¿êÒõoÆÝI€Âµ2€\¦¼‡öÔÞ:G&Œ ‰– ïaálËA¸pÀ¯,Mbe²g%¿†ElÅüºÆtl‘iͲ4dûä¤> Ô¸ :CïÕÂïlÕª½qº-®Ø[oýÕiÑm q6ÚŒLõ±c:ð‡xLGçŒÆ)ÿ/aúÇn.2tUÂî"IK4ÿCý÷Xæ?šDÒù ¹j1ÜÒ Å„¾è·õE¸7ì~ )c”9$ŸtÍqCGüšiƒoûèu±WþÖ„B·è7E)ãnÞUxáŒÏbô)èËc›_¹\î§ÝSÂUÎQ,%’?½jJ¯«Mp,Ð’ÕÁ¨}a✷À;HY6ÏGCóý=èH²Á oÃ/?5ØÎÁ¢Âýûoò†±Ää–Ç7RXÍÄ‘·6^püÁêæ&àɺuû›•x\ËÅ aÝÌF›Â»Žtóä!™Hk'¥†èl=õvy%¾‡éƒAKž3wœ¥0œöW`É\jØð ÞÖÐô1°ü ‚@Í™ÙÓ:7hw»Yb+®rß<Æ”ÀúÇxËœöÑ»K·08ö[6|ÿ§ …½Æ¬=ßi~ãÉ÷%*9 H6>2U#ÞŒMȪËfÖ¼Êq'Zˆmöøç¬¾ü‘ãª, r—¡nÕ&ÐëU±YS»Heq÷Õg Zúw™mU[ZýV"5z·‘n™s½9·a²\‘¿d’8/±;M`˜Ô°Ø±6‚ç_çí*üɼÍÒ"Pº;Š.¨t£`ˆ$‹ß– C_Ò:í☌¯µ•w6MzäM®š+òtl@Ùx`M-âh{€6‡–t84Ù+ü5Åf/ž7á^û·F/¼ðm÷Sñô¿§=’;Ï{ç(vJH¢þµê”ðìt…5½!õ<§yÖ/p’µ›ZP²"\4\“JïÕ/õ‘8Ùä* þ [é¶ÌurÝÑt#2ßPŒø\rŸl»½ûìÉEWk²¥žuVq2Ñ%`fÓÕüÉÙzáƒF™É1Ï*6[¦Çئ…JÄz)1ä¿~»îéšK²Lë$^\ÎL‡Ï鉔Ɔâ4ŸžðôY%òô¬ŠZL Ø÷£ÍSµ£¥þk»x´8×Ç"PÚÆ½µœMÎÙ2fy —YVÆœ:¯úÄ£ß;wì=Ý\GcO ×êþæ:¶~¢¹}EklÖ±îPxèÄ1«ß›%!Æ]ì[Å8U™±zñô‘ëÅ«âŽuÚ_ß"oÀ®ª}Æ|õ¡sC¥eüCçô=u W…ÚˆkqªîоmžQ8˜Ý,W^’*¤‡I?ö¼Ïžg]À†œgIy¸¡TÚÚjLæÕÏ(Rw¦&§ü‚©Ƕx”_$Ï–ª"—7÷ÍÖ‡á;z÷6Ùa©g›Õ´01}iäÃ>vŽ»³ ¢r7/+]}î_Hò[ lš-»hã3b÷ô*Ïe ì´ÜßÀ¸ËŸò`¬S˜—$0·ßÑŒ±ý3Þ(ÙLQ쮨-s£A ã1¹¯?~OÓ>ðƒ ƒŒæj¾+·a05Œh/Š™ÊæVŸx.•Ùl¢û9û^7”ŠÑH¡Þh0+EÅvù†’kwÿ|IŸÞZ µ~³øÙ£h†oÃCÓSõ²÷/±^vžmC `þ>º( }àÄׂˀ³’OYRôvã6ýÒa»/é¹0|Øh^Ü>‹Ã0u¢iêzoÆ{Ó'÷f±ö, yrô‘ÛÜúm/Ò«p£¬ŸáŽÊãÆWã\±øDŸëÝdœy~ÍþÈ,êì 2 ÑEE1wf‹‰šXŽý¦—ÃÐjã6O ›Uü±y·™JÎ…ø—Š(Äõ»Gÿ Ι÷…endstream endobj 333 0 obj << /Filter /FlateDecode /Length 7956 >> stream xœÅ]K$Çq|Æ‚†>Œo=6§\ùÎ4 2!K6%Y$ˆÒ¡¹»®8;=ÜYŠ»<ø·;"#YUÝ=KÒ¶Q“•Ïx}‘QŸ_Γºœñ_ùÿÙ«‹ùòöâó •Ÿ^–ÿž½ºü›‹ûu˜/Ó”¼ö—7¾ 7Ô¥JóáQpaJÆ]Þ¼ºøx÷£»+è3)eÝîÅÕõ<™”¬5»·ø8FŸìn…õœÔî96ñólÒî5<Õ1)v‡Ç«kcÌdg³ûˆ½HM|Pa·ƒÏçµÛ}ÅÆ9ÜÒ¢M~wø¢ôu‡Γ‹FÌ)ì~ƒoÅ Òîö¬S„‡|M+çÝnŸûÒSTŽOï‹Õµ¼¡¶i÷ÓC~šììÓî¾·}Ù§ó> )BºùoØwÙ¾­',ìýÍó‹JW7¹¸¶Ú\^;Å0|þñîvbçyv°¯®t˜|‚‰~‚]‚992ÂoœH]kÙÑ4ãß¡mœg¥ôîS5ÊÀ«jwøsý×mR‚­û^´j¶Aá‘e¡G»ûë•s°0Ø‚rnÎ&]†6!™ÝžO‰ÅFÍÏä±ôC™Obž“ y<“¬³¢»gWÚO^Eϳô»Û—}Ca>°zŽO’Ö¬“¶ÔkÞ ö÷Wð0ÌÎ}·Ë:£C3.¹mq´ŠÀ+êBÃÌŸõõ¿ÎN•áK¾cƒÐ‰Àô‚ç í'’OGŸ£â¼Á))a×`O€GŒàEjéö®§«kkÌÞíþ [”M¸…³‹6Òšì ”Lˈõš.îZ©)9h‘eCa“þ\È0„@\ç—vþô嵆öÆizû]ÞÎYYM¯[ú)ñàÆè”‡'›¬²ôŠÊÃÌ3è si5ÙKñÊFy µåSƒþ`˜rj_öm,ƒg“Š“nmJ*᦬õï¦Yi¥äœ2$Øtµ6ŠŸfëçtjœaé!€¬<±teýréi²ÞÆòÊï39$'¡¾ÂO‰À8Gb™ ß&úÙ?èÇžJôN=•è7Ï}›èÅŸIô›Ã<èÅ‘\k'å€\t„Ve„™«ÊR‚õ™4H˜ ÷Èô,)O †`UU³÷1[5 -Òˆ°ª´;P×@a»ï1»ˆ7,†çÔÂ+ÏßëúÀÇ~QQhˆ%¢<ÐwÜZeš½½Öææ³±WÌ™U.´ÀZsxšhž¤âd¤P\«“K“ÑQ} ÕxœyS{ªy {ŒF4Ãû·_D'ÆÍÖl* a~5µA ?Aá ná,°™±¹œÏùÿçX¢Ž—"¨ºX˜ÖLÒÚÎÚ (#oW ‚¢I>tÏÃèsF¨Îm—]£}ҦígÇÜæ#¯P>H±ä5Ó…¤•9 …^} Ç.ÚÝwqÎYw÷ïÇ1†öÄy£¨_·²#ìžNáÒ /V8ú‹b2Ã|è PaemÁhfïìþ]z£^aßœÔ<Û`ÖUX‚#«?)zµ|rµñÂúóâúê| e¦ƒÕSpX;üýÈà#Ï?·Æ{&2Öð(:›üF$ã ËoÑ F—Ç9xÔ=Z½ê"x¢'æ»ã@ƒ‰Jw™Õw#€˜ m N Ç]ÕóK>èo×¹Ôyù î%5Îê4/ã8ß2/Ÿç¸¼\_y_^~‚«óòüX•—Ïöcu^>Æ–¥´všçórRïÃËOñ€½/ó³e4qmIA¹ kƒl LBëÀNkà}é,ªÓ*ô!Ìb@eh‡búŒBª%‚iH[CMX ÀÇ|¢°cˆrïXà·Û°5d~<ñö¢Š:ƒÌÀÈcÀ¿ ° r²Æ/2šUBjƒ£`*6béæj!×|ÊÉe¿(ýޏ’J¦µ½øÁÏÁÓaah &Æý΄C%t \gçˆKÉV’6‡Ïl¢4íÂ=RPú ja„#€¯uE¬­°´B¥eÇ|Ýr ùƒð’0öu0ÑÏ bœwÐ¨ì¬ØÓc`z^i¡°Å4¶Ð-™qó!#Nøíj =gômȾÎuF†ƒáwŒn–Ýðí¼¢>  ÖuQÁpo‡ª·ô‘J“5¿l)ö ôÊèÂ.`»6&êÎÚ¶õJqÎãŽeæ¢a4`yÒ…¶œŸL 8ôc}]HiØRÙ÷Å:õ2`޸П×üø¶¤Wý;ç•ËÇaÃñ1¸«ûš Qì’|BeÒ'Ĉ¾»‡ˆÌQ8˜-Úå® ïw,oý²ûxë{rɈð?þéê:x<_½»a§Å⬷b+h‘Ù5gqÝÅ;X'3PLnù‹DÆ ±µ›´ëˆuÉ»îí[¤#:IØÑÓÅU×L~jÿ5SêÁ/"/ \韥u:¸ àa—{ΟbàÅý¸L¸ñ‹q ôL5»v QS~`I΢ßÓT Ó¼ÒLÁ´Ùç)¡ß‡ŠÚ "Ø´_Mø)ò£;&ÆnKÇQa∾Ç#LÜ•ŒHžjòC*ò>žmD*/`‚\˜[$ÑqÞmc}Ÿ„´ËûSh›ë ]CŽÆFå]ôLšØs)·âÈ´‚ý÷Ïr¦¦~J^Z—üRŸ£»™ùi7ôCKKdôÖ<ÓóÒ¨¤R\ÐF ÕlðŸF±‚e®å‚²€ºÐ uQÛƒwæZÐqøHER ×.ãPÜ]'c™Á”^ÞÞS`·úƤŸV¿?bžÑ¶:!¸:í³ØÌ½x¯qî]¡`mžÉÖ <4RÛ=ï»>„³X%VÐÙUSƒG8ºøÄ ¥áù3   À&B©z x”NDôV¢8¢R‰/ëZÒüÀ>c ùa±‡JþQ5¼”q+g¹-ÈšÐÙÝ0³/\¨=Ó%Œ¨2$ =šãîóX`i»˜qy…H3‚È/¾&}ƒobö–R`³(6߬UÀ ;&Œ~x`½”æZ›bé$`lŠÃ¥m)Ù w_å³µ³¥cÖ Lª* 7aMÞc€Ü^Þüìâæ_>®Y“Y™!uc"cECEpíóF‚qr,ZD04˜Å A[ä‹z€iöpÏ~”?…%ŸSc¯ÛºÈ¬a_´žõå£`8¤œÉͰq›ÈXÏÄ–R'ÌÙ?®ZÚ Š>Ro  ŽHØ0g+Aˆb6êõì,‘’L¬ƒ^&ãAÌÙ’ø2“òÍ”â[{xÕMã:UÌ^·áyø³J^t] sQÅ4ÁΕy'\‚ äDáÊw†£x× ä^˜öe­[á^nZHÚˆƒF¾qHmFf[?gÞíš=ù¿@úüÁJè¸ám“ü ö˜ fjÒÉ‘i°IND[u¦±}û”q@Ÿ÷maù{Òùˆ™þÞ¿iŽ¿B&›duÆYR#à@Æeºèž“sËxàº9O?ïÚ†m9¸Ú%’úw ônŸtªØ`ýBÀŠÈ{CˆÜSÝhQðÔ• ý/:ÕöÁzú|÷Š˜¤|Cጅ%“w±drZ ;²6‹|Á óÙ€€.¥±j~óa Aê–ÖW“¬®ëì7Ñ-QpÎjÙå†P”d´~AM…f®Iy h p(ýL s ׎?ᎄQDèРé˜Ý8Ä8z\|m½b°……Yü¡[ràÏëŽõ 'mmNbƒ ËaVÃYP.[T)JÍæ.Õ>ëˆy-¶`p0”s«ô¼>>[ ®xãô³è×é-6=üµˆCФ?y«UÃÇÿØéj͸‘Á¹ƒÒg6ØbºÄ“DÜËõæº{XÝ0a½4jÓ¯LÓxáÈkãNÌbzšÉ=!Þ¢[gÐ2­ç?î¹oò"ñfÈ hL]:8ó¨t#F´&œ-*1)ïƒ1ê nO“jû¿’ð™6â‹ ¸U œ‚ª÷›â> D¼ÂïÒåoÅ%äk$t„paé¹Ç¨FõœÓ®¸ß.Kf—9wË?ëVÇÒåYMÈž©˜§d–ibû‡¨šf«7IoÈC – ¨ÈP8²@ ¥õî”´4„Ö“eXû›ÔÉ»•g™üþÖŒn ðFnÃæaVeÓÄìtžK{€çù#é„3Ã€Ò éô͹NH3ñ+ïï– Òsm0Ǽ+÷—ÍKýÐ~س5ϵæ¨{ jpóëhó›pµ+9$²U¶ldƒH¼©UöŽîG_¥q0üœ;PÒg¾ž"ø³Cû㯺{Râ|0mö>æû¾Þåàöfª7üÝ$-]uvn¸¸yM½ùÔ\„‘?M‘òžó,crLÿîÙիŬÁ!lޝŸV ±yi|Dw…Þ‹Y ÔÖ­÷ €$SÐYd‘V Gj#}Dƒ–:ˆ21[T­uWå–jPp»`‘h2ÂD:IJº ØÖ¯#ºGzñÈEìB^¸6Dͤ¿s¿qYåYïn+ñý–÷Âyy¤,ÚζM0´­Ò„èªF.´S®æ|í®NaκŒ£@¸‰„qÓßHu÷ö#3X>˜2‚yξީûa~‡åF‰ÉAçËbâãvVîâû_G5.ò¨`ó^i5SÒpê«)‘™«@íc=bU¼ü˜ì'€õàßti@ÞLÎ^¾º0èCÒíÁÝÅo¶ËhˆY0N4¹Gg[RvŽßžt“Ê&5Ì$ßž]ßy{ŠõÓçàD†úÇ»Ÿ^]G =̉¹¹?ÙÎ éo†f{Tø“.ÓVLË€Vh‡@ 48f24ø£ãÉó “±ïÓñbÆB°1Í~“êEÇøË趯¥™}Þ!¯Ì ¹îFI Ö…Ëòý†Ð ëiò§¯°D³§lÙGÉRy=y“#\ˆ`—ø2Ž^™( ŠúÎMnAë’`‘Å83gØM¡b£eÝÿÙh9BœòÛ· æòzí¶²†zínÒ˜yQÃÍ"¼Û<··ËÀkfÛ!rÈ2ÒÿA a nç‰9²»–¾Åfú½.=žuËjÕ‘½>ôF8œ]OZõ⯅šûÍ×wY6éäѲ¼O·‘3Âz}¶h±~—Êô\ÑbÏ¿a&ÏoÞm¥9 ®K(Á`µ V›Áàë Í‘ÚÙ‚uèŽÙU.‰åâŃYÀ—à‡q•¦}NÈ^gÅ #ôÛIh@-«û+~oaÝw•Ü1ÆÉySr`jtYNšBÔ¥lUëœìäˆõ†Æ8q üØ–œ æþîªÅ&åÀæœÞ´µ9Ê3V•=~µqð»@c'l•7Wëxï0ÆBRçŒëpã=á.zXY¦U@Á,å,?(Ãö <±ZeLˆ´=Z¤†ˆxÝb—³À‰@˜]àÜѰq¸‰ŸÃÁæÞ-ö’&ñ¶¿É¥«oqà‘\¶-ìÔ¤B§ÄDP´,ïÀÂYá1ÅÂúå3ê ¿ïÅ‘;Ç×u‡¶Â¿À¹`>N°yXc4*A‰ë×KhÕÔD!ƒ½…Ôr'¥¹ñ¹î(ÿŸn¬â[ªíñ[&âFÐ/FûH ’´l“a&rž{ }ñN¾ÛáˆÕcâ–„8’x_‹Š9Ò^.›hYÅ‚uÑ-Q¬YÌ5'ÜCQýÀ£H(¶Ó4ð(QG0.©ÑeÐIg#Uy_é¶fI0Kk= ~WÉÉáçÖÇËQšQBÊàŒÉ‹Â‹ ìh^ÐAâs·ÜÈwÅâX÷o ïOíí±BXÉ‚‰Ý¬þõ˜…rË3ø^—žE Ké|GÉŒø8–|C0[ÆöfœËÛǶˀ¾g¹!‹ÅH²EýÑÚ<–‚ÚY C½å˜Žr®d„yÖà=/Ëðt Ìë íŽRô“RIÄúÙ^¬ñ¦ž„3™LMÆ÷8úq&S˜=šÒÙŒfÓÚ­³­Ù¨€|Ê)¾?îrèm‡ä=íÐa!.epÜ‚X¯Rð€¡Uùžë¶YŽ-ÒV±¼}k`Ÿ¸¹Ç2÷’壙èÅ.ÌCÐ\÷K Ê}¨DÕIPS_ë¯[ú`á•=ó¢²KRïúLs¨?3Œã-ºÑÇŠjeãA3aÉŒÌTåÜ‘ °(¹Z5J-š‚w\8$Ügž‘Qôõ=³Pž]9¼róõéj2«¼ñz6&çç³çwÍ0xé Ž†tbéîE!°ÙäšqxyE—"aµ‹ƒìO¨aXŠvt‡¸Øvù¥•6[ç+Yy—  WoúFŒqᯠQþN+Éõ®®舘K)ôy0¼|Ê¢rü—WäÅÔJ¼x<ÇO9@݈ýæÓ%b„]ô—@@G%ìù;Âõ>my¢n?¥›?Aê XIñkçB 鉩ä\…×<͆YÅL°Gš â“=ýD€øDTþ–L€H†ê¤è’ˆlÿ»R ìZã>FtÊ÷–§t)èšâ¡ºþ¡À 5wÚWr×è:åÌͳíÆ†¾Pi›6Pš¼3½TÊω 7KbÅ»N¾%7îØ_b®èŒ±Ê„R@°¶V§S§¿û§Ó•ÞNèl…7šÒÞ«‹:‡˜ ´™É•ÝQ9|£º0"¯P"ò`ËF}E 4ú†Æt[y–ßmd±iðËbšÒö;£Í³ i4’Ïb·Á/[Â>§RŸÇ‹¹Pѱ¾¥Ĥà°dfÆq#=–”x™›!ƒÏxšVÆÒ­‘j7e^S~ÌÎls_IŠ@±GE·»eªõ”¯öš°áî…¥µÅC³²bå«,tëŸüÃÈÙƒ'ºlm³¿9e̳íP©<ÁPº¼ Ÿ[õˆ:%;úû~$Ù%=‡«q3¯I^îbuWÙ,V.÷`bòv€c±¥Ù08dÐJ³[Ij²¾ŠÉ Z‰‚!¬Ŷ Ù7ÐýŸ‘È h5 C£çL?R½•y¦‰yªû…Èœ×(èþŸn£Ž²è¥ 7+¨­ìÏ=û$ÐsZ)`ï#[×­æõYºàšuŸ·.nx‹Ä-ú•½Ã-•{Wo·‡yíc1c:B^M+‰3¥鱉ÃV2§ç›2mØÿ‡5–éiY)wœôvùß>’2`ô²þoY1L-n¶¸uaò­‹õŠç[5Ï®ñNZåªQÐ;‘7‚bLïeq£ÜÖ*XÕÕë¡Èå§ €=œ`Èç¥cÅǶؒ×Ón$ñððx©ƒôŽ:޲ñg]P²ÎXÕI3¨’¹tQÎ6Éf3e›T†;j€mºª™ ?;®úSb&Èàk2Gb„øwtß·bÊâQ˜Eƒç¹‘íý𮄹|y+ŸŒÛbwÆ"<»yz- Jæi\Ö"vÇ“ƒÿËvëº,¯·ø÷ ˜ã}ÐŽ²‹ÁsѰn; ÝJè(D³*Ò]¨3 °XoM5îÀnÔNË/YœÌ|Ád_VNfø†Ù»Î¬+ägÍdl¿¬òóŽÉ׌ë®õíX&BžÄ^ }çh¿U+ø…»m†œ“dz¸¸@ÎÀî³bƒ.LåZ Ca+ ´†•%n®Úm÷!0ŒbdÚÎËIäN‚±›üÏ— l1á]ˆ}GQÃŒL5|d]òÉZ3:äB>ð:¸DÌ‘¯‘å ñPä}!Ò$ .pË’G.W#@(âN[Ä2TO–vS]€øV]Ù½š3~`cÈõ‘*:€P£¥\½ÊY6ïœå¯÷u3V~_Ej®Ò„%|Åô&«Æ6Äóqœ™:Ð,,Ò Ï6óŠÏðõPvàá ™80 AFcýÖ6 sø¢ZQ°m¦À¢Ó l<„Y׉­] aæ9)äÛ‚µLD^K÷ì±ÙÚv˜íž×VøÞ™ ªW»#Ͳ°kIké²LÌÏXëóŒÛ¢¬ £¸-J…̸Uó‰d…eQ:ß¿‚Áú³¼¶sœœo)å˜Ö‡›ã‡¬×ºÙ¸°T’¬L¤Ò…Ë+â‹£€åi#É8ÊjÞ³çë2s`yž`Eá}™„ƒ`™_‰Âºh¢/ÁÒsù±Ò-ælN‡þF²ŽÕñ)»³•ĺþáÛñn¶¼X«VîòÝü­—hïWJBN+Ï>]¹€{·v×:DÂê„Çvë#.yíJÉÌÉÙ•6EQtHº.D¹G€‡…íè;·Ê§…‡øÚ¬‰00 ÙZ8…ÎíìrÈÕ¿Àð2v+dq=ƒPµÐmäjÀ~=VýVÚwp¨ ËŽŸåÏÄëöOð•‚a{ªB§ùÑôÀ…èx <ON­u¦¸}9ÈÎâ%ÉYSX9ZñÔ#Yì87X«Ý&Oö4ÖBDïjgþ<×&¤ýŠêÛŽ—á6K ¶Œ"öé?¦Äóõ8 ÆyLt“å}¦kÉ”,Mº“œÝaxç^É¢Ô£Ø])ó58 ó*‡2o‘|O±Æ]ÃO6Ƴj×uŽÝž`]œÅYÝó–ÞDrq|æ­ò"`›„£rÕâ’À@VgGÇFqßÑ5‘ë¿ f=Ý®ž\¿ÉÍröýÈ{}mìoË‹&m‚äÓ‹ù“cluzŠ¥õ:v\(UWÙäxNV¹)Ö·¬÷‘ÉJg¢¶VwØÆŠvoû›ÂgùHÏ­^0 m ÷`;¼:Àã¬ÜS|XC.QPYßVÎr»Jls¶,µ<¨Q W,&ÁSný$uFŽ­ÿêâÿ6Lâendstream endobj 334 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3759 >> stream xœWy\Sgº>1’sD *ž 6qqwZ{;ж^•V\+Å PdUÂÙžäMÂbØ‚²†A((*ŠÖV§ÚÑqÆí:.c/µ¶ãÒúú9÷w¿·?ævî½9ù%ù#ß»>ï󼟈>Œ‰Dvž«|<¬?~Dt¿¼9˜ibp^ôö˜Ä1è1ú®Ó‘QŒõå¾(2Ê3Zë¿lWB@âö¤ÀUAÁ!Þ¡a;v® —G̘9kò”© 3YÍx3nÌgŒ³†YËLaÖ1ë™iÌÆ—YÌld<™MÌÇÌ,æf ãżÇ,e–1Ë™ÌÌJfó)óãÈ813‚±g&Ó™áŒ^ä*Ú-úaX̰ñ1ž?ü’Ý;D$ég·p3¸¦(·Ÿcß;òí‘Ù#9}cɵŽvމNcœVzs”÷¨àQ9£¾=yt† r, n¶^U¢–A±0spŸ[¦Ú“) ÎÉI!áϾuIß¶@ÃÉÙv½ÚàÔhúÕG&ZXm²*Á +ÐFÞK%'Èôü ­ r]#Ö€B*g»õÇ¡ºA7UÖCÉì*P282×ñD)A{¼nç(œ‹`gI5Áá—1äò8çnüKùŸÿ|üìe›VKI ë§„eÔâa]¥ö2MßP¾ ‡kY¸ Qâ‚Ë6Â,ض$5ŠÓHœ¯<`É$¥œ½`ÈÞ$#Uÿ£¡eJè‘9 ÅÊašIÔx‹…DÜÁ㸉OˆqvOÆ’ÑO¦ :?|ˆNR’@|ù`Š;¨èÔôÑLÛà”©£éØQc+œ‚ŽðªµZ_€ø,fk”Ÿ_BpŽƒÉñ–Á™Ñ©GB®Y,<þ‹·øô»GÖ“¹äâO¶ã<2×â¼ÇøŽ(CjdgjdYdqXäÜïɲ>Ü͸¼ïæ?-$¿¯“éÒ´¹eÀU@Aµ ÓØ½`HÏV©2s¤´¼d½g˜„7-É cž^ÆšÛ㜷à2üßrúdU'pWϼG"ñzžpýXYZè ’ó^¯hÙù5p8‡}ãpÌÔ¿ûuã·Ëœ÷ŸeÉka/æl“= ûW…=ø<·QËm¬¹,Üð)¿åš!ŒfnODÓÉ8âüh Žè€0=-м™Ч‡ÈV²Ä‘Ì€4âÝ‹m¯ÃUú:\…‰têœ-8·Jôóe|«AŒW7ÞB&áÊ(V3'ÌmšŠ‹¹CY2üF\ßùç.JOE­cÉCC—ÂÝ}Rlb)Ê•¦TÚìÎW$"ŒÇùüì+<çævâ_Y2ÝÚÐ+ š-#ÓXX=a Ù‡à8ôÖ6žáœµs%8¥ÄÎÂzeõÊŸŸ±ÀQ€ePdáø{bLò­ºÒëR“E²]åÛèãÛmù›ØëºôPÙ³6T>_#—tè/@}.@‡ cØùêÒVë\(«…·,¢*A-Æ¥ˆ¼ªP]E\}+TÒÂnSÇi¶BDB¨Í0{˜r®1Ý º¦ìQÀ{tE{ñm¼áR}ì@é-ga#Tþ †(k£t¶Sê½»Aužr")p¡$ôÜíàgâÁÑx‡¯ù²#¿Úz6P¥† ©8ä±KS ¡ÎQeÿŽè]ÜÑL;§½k½EWOcôSm§’!§1†h†*MIªAY*Pç*AãAâ]f`YŽ‘òJ¾«ù -7?š[ õ ;†ÎÑW—î@žüìR®Ï0‚òKò‹á~—Çd~F¾Ò®FÐçsV¦6%ÀPSãwLcº®nÀù—/5ŒsÞ¬Äû—ø‚€ÆÐ.àLg×eýQkY%l …#Rôd)9·øw„ìßlÜÜœOVÈ«“ëÍ•Uõ][ 42sýC#pÇN¿/ÛÉ:×3ëÔ s—†{†Åo€@ÎãAôy)õqÚO–7p™ßðÓFlݲµåè×§þˆ3 )Ý´qލ\(ãiŒç³  ¥îêý={,--u_ZË©Š¦‰Söz^f£º,‹–9%S™›³âC—~Ì6¨uPä ¥%¥µÖÁj…ÆÚV:ñ èÔ=Þ ï¯X¿,>fãjõ. ZÝHµºš¡YÓ[P¿Mç á°%g½"0>|»|=øCp]l[j5-@%—¹–·l=uâ•¢Œžò8xm ‹Ž•QI:ÿ\RlÓöl µÊF€©â¯B…ê-1.ÇŸùœr(WßÕD›Ô š ³—Ué†Jui4Î~ƺÄxǯôZ®1\¦ÒíÑ6ì®>±".&)%Ú¿3äøºNŸ¬“â2a~Iý¾Žs:W ®’CÚë³Ó£†PÈe^F^–;Étq¶(°õÛF›m Œð¿+/!"9EUB}}õ^ËWŸ´¿OÆNOFþÉTºæÈšñ C©(Qª“s¤qKÖ(|[áq§à»×Ÿ);¡Ž¡*òË¢ç 3É"´ÞúJy­ÆîÙy6D­\ “KÎèÏÓ¹(„ËpÖÖ§8všêPöéJ®Hk-äCÉ.Ȇì$÷ìœ ‰ÎdUitÑ”Ð?”×J®è”!²g¹l4lžDÕ줮\ÛKµÕ¨91•IpÄ,šY­5ÿ` Òò3KÁµ ùîǾiƒMÞt‘BApE« «vï’ûû%R ¬St*ºÔ_P .ì«?TÓÜaéƒN8´Ë\šHäô_ò¹…þ}Q9t?•ðdôßÝq䟺»ª÷Q$¬`qúKž¼ GÁ–?¬ض4Õë"‘Ä×@õã5÷éŠæ0s"EÆý4ѱ÷imQnaf†*;O#Û1i¶z7l†í渖½š3ÐÉiëxκWÑ'áL€–ŒåqhEI„vþ&Æ>›D;öÛÞî®âbu^±4#®1œ¢2ÉdÞWYß¼íGÞ“¥„ýP~üå_l x™u\M÷ïÖþÑÇ¢ÓO}W:?©“®9ðÌ ìYºÈÂD3FEÑPÆ=ÊãgÃ5f]ËÞ:™±ÊllîL‹‹ƒiïÉzȾ4ó|øÏœºèðÉŠâ¯oˆ® 6 І¤^Œ+—ó¯SGůœýÿß-–¦0ùz·pÄv+R:Ñ™1×YD§aW ŸòÄÍÊ9C]«_˜ïÖUk?‡ :½C»<¯à Ìù¡^hç›bë"äq1‘‘µ1–¦ºº&)E!%>áÊñ ~d>¦æÂH«ùw­æÿbËTÆFæMöR) 4VÚ\ PH ®*:2)!zÃé¨S×Îþá?¥Îƒ3†[]DÄÆFDÔÅ6 ¹L¦á“6Q›Kîa±Åº zðÿ`% $Þ®ARŽU8’|#«÷åi3(Þ)wÀî Ò ¹¤UÃl+ ýjµeÄNc‹•Š ^k¨Ê/Ö™aPU‘åăk i£¤ ¢§ÅtàûO;÷¹Î¤í„0iº_ëÜCö܉d\EüžtHuÍP¥Ëm•­Ó´)^ãPewB¬6ô™€ë\~¸Õsð‚žÊgÝkB¨¤)´ë†xݨ)ͦ{KbJZfgëå ¨Ó:òb\ùkpêðWÓûúåfðÁëØPë{œs¯í‹ÌÕ£gNî úä×€ö”ÿ«?¿†Ê;а³mW]`©x¦€¨õé_¶Îù°iÃ.?Îù†æÉÿåò‹íd]5.DF„ËMTl+xºyü»g^.Õ? —[˜]Trã Ú£h KŒŽŽ®Œ>`2µ­÷qÕW¢4‹±Wñ_ó*ÖZRjê.2bô=Á7Æš(þb"_áÛ…wž»[EÝ…·øsŠÎ€–ËÊò %RN«Õ§ƒœÌeŸnö§l£RA®-˜â[FºpM$ºê_|ªDÏŠñ;’ÉCJŸSDx´Â¥€ž€¿kBoü-¾•Ÿ¯ÓCW”›Ÿþž'­“nžëd$xˆä$±ÿ‚8 ,.®¨ ¿HöÊ2¦SÓÇ0Ÿ‡ÿHÇaPìN³þD/gê,U®NU”-;½‡‘·´™¨ ›É$òµ*/†ªÒe—Þ»„¢Ò#ß×=tà¥öÃçT9Œ`˜ÿHyœendstream endobj 335 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 330 >> stream xœ?ÀþCMSS9‹uùù`‹ ‹ ’÷Êž¹RS434O»õøˆù-‹ ÷ºÌ÷“Æìè÷ßâøF÷Ùñ§ÓÓ‹Ýòûßû#ûýJè÷Ï÷4÷RûÏëüRø÷“÷'÷ÏSCJNMûøÎuÕøåÒÄà÷²à÷ˆø8X—X³‹ÅÄÂÅæÓ½v_¿›âY¤[£0‹û"1('rc­]¹OÌ|´™‡É|“‰¹}½_‹MIRM2:?ª¾L{4íN惺‹÷éñ÷÷.Õ@ø•‹ ÷&Ìø&Ÿ÷ÆÙø÷;ïÌ'ø:*û–ü:J÷£û;ßû¤÷|È奄ò÷B‹¾ü  7Ÿ [>•jendstream endobj 336 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 275 >> stream xœ÷þCMR5‹‹øáù.‹ ‹ ’÷“Á¼¸12RÃøˆù<‹·ø–··Ÿ÷Æçø"ù©ˆ‹jDKû‹w‹z_œ¢‹ÌŽÅ¤üˆl‹€!Y_Âä‹Å‹Å‹ä‹Â‡½ #·Y!‹–ªù<‹ë÷á÷ Ö·æ÷ ÷¦öøá÷UaˆxM{ƒ…7‹|‹ûXͼÕÃdzåÉßÅ‹õ÷û Òû#û(=)W·‚™¦«´¯qŸlާ¸Å©Î‹ìÜQ)7QK>Jûˆûc‚ЋЄmøe 7Ÿ ´¡rBendstream endobj 337 0 obj << /Filter /FlateDecode /Length 24775 >> stream xœµ½]“.Inæðe‡Â@7¾êQð´*?«Êa:´$Ú/,r" ./Žzgg—ÜžÙÝYŠ”~½óy™ùNŸáR”cb÷4ð"dUV& û|¼¦çÿÙ¿oïOÇó·O¿}JÄ>Û?oïÏòõÓ¿þ‹óx¾_ïžûó׿xR‹ôœîãõ¨³¯wiÏ_¿?ýÕËÿùë¯Ï;¥Ú^¾ùêÓñZî»Öòò@_W¿ëËçï¾:wzù9Húq”ûåw›¯;õóåû¾úTJy­Gyù÷KC‘ô3/Ÿüq^¹½ü·EÎ÷ßJ«Þýåû¿›’þH,¯ ×v•\úP¿Ÿ÷ùò—hué~yç|_ù=šåÔz{ùL^ùõJmUïï>ìËïE{¿üßß{×£ß/ßMÚ_MuþØó¾¿¿þúÏÆs¯×òÜKίé¬ãÙýó§—Ô¾úúoIO~Ò|ª¹<*i¼˜”@÷W/ÿÇWŸÚhy_y<íôzw2þúý¸Ÿ½ÄŸ?Ä_¿‰¿~ýîƒÆ¿þ íϾú JéõnmèõçO_ÿ«M³IúæÈôò¿òÓD~¤Ð7Këÿõ?¼74µyû@‡ï>x¶ß| d2üãø+?>²‡çô›ûþyÿöù7<ã_~ЗE‡Ù­2ù,/uþþ¯>ý¶>”‡çóo¿~úã+hcöúÓ×øj_kz³Áý:¾Œ|^õµ×ç\ïúÚÎçß}óüŸ¿ã\×^Ò®ëùïÇ„6†æóß ìŸ>•ëº_ÇGVKê¯Çùü¾`òÀ\Ïe̯×9àRÇ„0DüöTî40i¡g4 î %xô×TŸ£a"î6Y<È]¸S„rÆáAý·§?yzÊW믹=gPÆ´4ú41­@Ã1©Ö×:šÝ=áñåë¬c øm´¸6̘ÝòëÊÀ@I½!ÆœšÏçÙ<·×Ô‚ÑÀUp WÑ9ì`ÏJ=Ç”šŸ[.÷k»ñ¶S¦<øz-uÀc•:ó€¯ ÃáñHë}iyR´ñ(ÇÄ–ŒÑ"0™˜É!gÓ‚!õp ×Ò9ìýx{ú˧2Fçë®c¯ú}bÊ10ƒKéçëAøæÐ*ç€sÀCîXÆÆ0_(n>ÜÉÁ`“޹x‡|Ž/O£…i®·sØû¾år¤×ÖŸ[/ý¨‘9‰ÉиVí«^pÚà1`Êxv½,å„üÉÁaÉ@ Ç´:ìžÉ ¶×{‘@äÇŠ˜Zë½ í¨x¿£ïùu¬Äï æ¦–¥¥üÚ»Þ@<¦ÀvŒÑ5Ì…• –“ÃŽ/ c‹AîÐ3¹ià¿»‚Þzï‡bmcØb`¥aAq(& tÏ* xØ÷hµÜæ'°aÆGr¾ökáà°d …cŽòz§çàÇå§Ü7-L‹ p-ÃÞõ­_˜1‡^íõºØ7ÇTb†ÜŽ9· 5'ÀÉ£cÆÖoíh³•1åœpÌm7˜x«1ߟma*˜OkÃLœÃ®5?ª»^ã…Ö $Æ^ ã¡tü;„%Œ¬|÷ #þRÓ¹P`Öî ‡%-ƒYz|$Á³ø"‚ èÛŠ ½õÖ›â+Wîñ„op~_0ãý'¼Ÿ–_O|1cjÂŒßèåG=v-eiì°ØsØfÌÿÚÚׄï<Ø ä 3ñNàêyû½öAåâ²ybn.¨CL…~ãÛæz;´Ø`ê90y¡DI ‡/[0'æä’Jçœ2 F‹¾a¦–Îaï‡a=Æýž0qn VŠkc©b*kçÙ¸´×Œ™ÈÁ1Jê0œRZ*ç¢ÉÀa‰@ ÇTb&‡± iŠˆ®ƒ¸ŽÎ`ï… ©±ÖŒ­î°®FwÇòù¾`nbrŽGZ둱(›bh܂۵arÂ{/– ´p ¶üðRÆ@ £…i®¥sØûá¶Ô…'ÑêØ¦•*[Ê0ÃjÉ—¦) ¸±nÖS³Øx4›a´`ÆRSñþ‚AÀÁÅÈ01Áaâ æ°§Aà:ƒ‡^èk“äÉ~ ;sDb|¡ã1 ¨öׂ)bÌvW ˜ŸÁ†Bž[´wü9å" Ö6oŸXåS‚`´0œÀt[Ø©s¬Ï7Pc>*œBsò3/çø `BÃVáàüd öãáãõßç§h°°aÆŒWÎçÉ`Ìpy• x´Œ"T4ÐW6> |åí‚ÄíJ`ü9øþ/¼ü_¯ oÛ`|Þc}„c]‚– ´0Ì9ÌÔ:öÎ*æ6eÌ9gÄ–Áa²3à ‚t˜eüÊs3¦ÖÆü;m|³ã©61 ¨¶00PH/D‡]8[;ëFð z×À)\Cc°wÁûUh×a]¦¦~&Ëp=‡ásnìU9œc½„ym0õL4ȃb¬˜W^88,la˜1™b6påæD2 fߤEP¸–Îaï‡:wŒ8ìÆñ¡txÞÌØgÅ}l¿;,Ïa×Ô×ðMÃ6ux>® 36ŠLKZæsàèTp8/Žéa0Z˜AaZ‡½6ïç¬gXé2y_0ãéfìNÇû¸ñ]e¥b¨ÜÉ9Lè€:)FËã\88,fª3Ö_ÎÛÎa¼Ÿœ‚ѵp ×Ò9ìý°™ÿâæ§^Iû̉Án”}éÜ<Õ›s-šæäß¹_ бˆ^máàð¡cN7C϶ 1‚9ýo˜©¥sØûA;ë‹7ppU,k“Ošï˜}OÀcõhp©ŒùR †ÓeÌàgY(Æ ?6cÁ!`É` çü@Æáºù¦C†Á£…ká¡¥qxè‡%沂©`üxr#˜›˜R4˶6rÿ8fYì[ ÆCÕ\?)’¿zqX2ØÂ;NžƒCvÔ"B %ÒÁ~­ùC'äÕ¹}GMÏí}Áäd+ÜÄŽ]¬ulƒ©fáÆ7(вNKZ8æHÖ1qÀ£XDdÇVÄTÑš?tÂÜUÂÁÓΔz_0ãÃ…ïiŒmì%Æ:tÁò݇ÁozÅ©,ãñæ68( öü‰IÄLc2ç£t‚é¬2%œÂut{/Ô³.JŸm¢¥˜ñaófÈ9^ÏK®'0¬5¦ç™çï÷Eó/š;,l`˜Êíôd0žÌ°¸¦Áhá*8…«èöNh§Òza˜¿/˜NÌø6î]3–šŒfc>ßDË“â:.jæ– ¶0Ì0Ÿ*g%ã0füÜ‚1ƒ¸NáZ‡‡~p©ÇEOI¯û¬÷Ó‰©X_ÇT›œ¯ãcßà·§±‡¨ØLLбÄäÉÀÁf{»@TÚ³yá6k (¶íš*E¨h:!û8 ¢Â%ÛàCy_c‚ç3ÁHŸ­Ü6ipk=àñDÓà§÷jý næ· DmܵƒÒ±Mž"£…+ᮤsØza³~“ƒ¢Éuü¾`nwyc¯2À›þŒúZæbË‚SÀ:K[æt·qoùº‚9‰H 'pÁÞ Î!i˜“ÃH©©vmA‡ØsIc¾icGžzå3.5¡i þ±§žcð÷ÙÞ ²'µàñð±ŒÆcÿˆýe°Œ®€S¸~Îaë€YŽp/·«ïL´®sž00 ¬h¬#}ŒãËl4¬;ÓªÛ0´û`Ÿ‡%ƒ¶’ap^ó<à¸æ\DæriJ8…+i ö^ðuå1ÿާÑ:¼ èX ÆŠ—1…2é:Χ Æ Éýu%(•VP0p˜@oˆaÑd¶ömØœ¿M€Áhpl˜PÑì}à„Ÿ“¦Ö2V¿¾Ô‰I8‰}ðEïæ°†i–³¦jã£Îcuémþ>–©hí°ø£aò0$0±8ƒ|UàC‚Áh! ‚À {4Ç a—<ï &SòØ;Ý4ÃÐŹi;¢;lsÁŒÞèÜpKZ8§´×spÀÔÓï)Ãàѵ ×Ò8<ôÃ Æ ©gæÑðûD ííÆžÃNŽ æøçsR`Ú'‡€%‚æ¢a Çód€±k!˜öʆ Õ~ï‚¿²ôÊo0›ìø0šÏ›¦w?e˜åë¦-m0æÍ}_Pì'úäp ãÏ1UÆ_pŸÞ*‚ $¸ö»iè­·È'7&K,ilQùäS“9$tp8@‡Eåp6Š1¿Ã ?¤…ƒÃ’ŽÉÄLø*Û"C0Z¸NáZ:‡½~¶Ù¡P½plª£MCžºÑ—[qzsòùŒ•ž+†Á<ÙÜ0VV¥`à0%ðdÓ8šÅÙ§3“(ýËyƒÍŸ¼¸ŠÎ`ëƒÄ1á$~çK L%ña<ßïpÛ°® Ɖµâ÷n Š7wXÐÀ1¹a[6Œï¥µE‚`´(fªèöNhSåyB­ ÿ¾/œB³¤£ù>Œ÷[L¢PíBüçªP€hí°ø£c*NX²ë  \'pÁÞu«èüú<9}½/˜ëÄ´U°O€ÿå¼ ´ÜN«ó`G9 F0S‡O? L·õÄ4†L ‚ù(L§pÁÞ ‰8“„ úhؾ/˜D»¼¤Aãx)aE¤kƒaÇݧœÒF‘¯…ƒ’=…è8czŽÖ] Uð7üÏ :ƒ½ öH:D샻5171¡åR«ÌÐ YÐ#cÌ%/MFyppX2ÐÂ1CáaŒ^Ø*.Ã`”H‹ p-ÃÞmÆîñcƒÛ¶pÿû¾`ªvÄ÷°lOLbM¡h÷ͳ‡á¶=6L9‡œÃ„‹íº'f‰CÖä´ëÉwÝ¡…S¸–âðØu®å¼ÒصžMÌP '‰˜`[ß¡nÖRÏÁ½pÆšcV<ÛÂÁaÉ@ Ç(bnrHŒy›2’EÅM-œÂµt{?ìÄâ°|)ï æ&fÀ4V¬ÙèÏ®÷€±f: 'ÅpÞ ‡%ãíibNÎl“ü3×"C0Z¸NáZ:‡½6,G£±ÝíÇXÁiè'"…‰Gé¾yD ïØ–;ŒSé5)Æk/prp˜"ÐÀp6>Ïöð5ž‹Áhà:8…ëh ¶>襥Rl»Èùø}ÁÜ<Ó.°“:ç’“Ž°Ô6SXO?A1æ,Dã‡%ãíibÎË&=q€é ¨Ë0˜ûï 3µt{?4ýŸ§6õìz¼/˜ñï…Mù•df_'þcºæ«p˜Óy’©nyŒ||öÎ!`É` Ã4b&Ä1-"¢g®ƒýî*Zó‡NØ[/þb¾å[îÃTm‡ayv†_ܰŒ ,O|ÑûM Ee¤ëäà°d°…a²6ÄÁaÈÆqbÈL'ˆiᮥsØû·Vá BDÓ9ö7ýŒÁa¢,:ÃAÇ;HŠÊ@0høm´¸@:)n˜NKZ8¦rç=9 ùê‹ ÁháZ8…kéö~èÅÝX^/ù'+‡d`j¡¬<Ìñ à0ò†F .Îa¡–´ˆÕdà°þE Ç §•ÁA¬]ÿ¹k@ÄTP­»`º ·ÁÔhC¦q©(ÇÍmí5ñéØà7yðú¹PèÈ 8,æc»,æF|pà >¢!˜GÀfjiúÁiäÀƒÊc5ш Â^Ï®@Ì£o0¤žt”M ™Å“ƒÃÕC5Sh²1CFöPÍÐÂ)LIg°uBýJ·&ò‹‡Pï F'ŒåÈ Žð-&ˆ÷ B+=“ Ñ#28,TÓ0ãËdרް¡tÑÀup×Ñì½Pφ p1à°‰?01§Ä¶3Æ´~Ô PË“ ñ3ÂõÊÒÜa `¤€ab‚Ã9Üeüõ7ˆMºÿèÊyÛ]}ÛUxÂÚ>¶è·vÕŽÐÐxã»öDá)báK€Ü~Þøî'Áxöµ, –Û“ˆ™ƶŸN$!ûOÓÁ \Gg°÷ GDÍáÀê(ð½/˜¤°H<(„=y¸ÿ…X¿-!àJ¦àMaoá– ´0Lº ö1Á! Ã>Oz%ìwWÑ›?tÂzOQ’çBa©ßVÀSѯ}ŒîjΩ^~SÚ‚)8ÍÓÞ8,TÔ0¥1Ž#8Œ/ '8!C0]dR \Gg°÷ÂÏ*2÷­wLÇœ–ƒ‘tº_–)2x ÐPêò{‘=Ú;Ü=O$0˜ëÚs0h–’WШ¸†Þ~ïƒYú7t®Wˆ›ÁÎçÿ+ÜnìðÌí¤ü}A‘¹u– NsÂtx²1Ç~Ëñæ2 †±oZ&´4ý°ˆö›‡Ú°^Ó­và à!íçë­ù»Kñ~Óf\® £¸:C½½ƒÀ}²aÆøEP_0ã÷è‹Á i—Nà:ƒ½:xo´a[Âá㈣ÈÙL£‚^Þ¯:ÃMÔZÀ<?yâ7=“ƒÃ’Á@ÃLlš:;e:;µp ×Ò9ìý°9FNÉWÙfKOgŒ·b«t—‹ÁÙß”¸À—àæ KÄÌ+#˜˜ÉUmŠˆ®ƒ¸ŽÎ`ï_[‚Ï‹q¦ëû‚9‰ËIS,å°Ë† ˜á][`o5Æ'üt“ƒ’zCŒþ'=›#ö´,ãÜýÞ0SEãðÐ ½3ähaÓ×ÇS¡?1ãÁ8¿9Ž„:|Rȶ ŸÂ{v­ˆŒøÊ+/í–4pL"& æjò'j—o?»zÞxï€æ|:êx"T-&50„ë;¼–0Bxé[ir³ ÆZÃ]Å$ÈÜEƒ€%âíib&F§‘ÈÝ5% B£ÁC/,¥³)çß‚aô>1çAL¾ª­C¢¯b è*‡³`~Õv»FQ²œØÎÁ`“A—a.z/‚C?›ò…L†ÁhaZ8EèmúÁÏìB„¿öK§¹ƒcæ†%otŽå|õ FfÛ…àâ†}2pX"˜ßjY0V ã^¦ÁhÐ6ÌÔÑì½Ðù{•.½Êµ:0‰ ¢9c‹…}|æsɹo0&xrÊBOO^88,haø·@` :‚çÎ)Â`4h&”t{/l@bÞ–õÄh?œ/wžh¿<´Ú`<ÒÂ-û¤p‡sX\@¿êí‡'í—§ ßQO-œÂµt{?4 Ç nnÓ*Ú1g±(ØLWS¿¨Ï›åþ̾m˜¡9‚’Àž 1^Vóh^©Þ ˜3œÂUt{'d‡ ã ×û‚É *É'â7Âá5ü¤…ÐW¯Û°!*ö€³½Ãîc›˜C¡gÎÓàQC„hPWÄÔÐÛï}ÝxòéTøi‹lbÇœÄ 3;ãß8%Ö)·ƒ<“9˜Šo˜+eÊLX"ØÂ0Ãz/4¶¯×!¦¦éண<öÂÖ4œ³uÅþ2fsb°Ùå S8‚ÛØ ãô&ßÛµ`˜QÓÂÁaɰ¬ b°_¶eÍÔ‚yjaZEhiúá~b"+¬õ£ËO, S‹àÏd;àÖ4ýèð{L¯ï†¡o¾¦…ƒÁ&-sÑû°ñ¤K»l0yÅ<ºFáz;‡½ HÅ!Ý*&Üû‚Á'ļ‘MÁÊ_qú:AÄ—Þ+¢Âø;Ël°$Œi<ÆRk|O×ä.Ü]¼¸zl¼k¯IŸÆqRHLR ‚cÏ2Ò)9é0P;à/¶-ÆIP C¡ž ‡%‚ˆa 1“Cfèö”!˜3ˆ”p×Ñì½à ¬~lÝ®°«‰IÄäv2)©Áº+²27˜y›&„ôãà7â –cFјœÁuq† C¹aBÇ`°÷ÂuÍÁpÇÊô\GœDdDb4òOå8ñ–€ñ<NÅAaSopX"ÐÂ1ãÝà%;ƒJÿÜ!•p ÓQí÷.ЇÚ33 ¡ÐáÀÜ J)xÝ…éxÊeÆë^`XâM!A8ß´ppX2ÐÂ1'Ï6ƒÃÙ×2 F‹²a¦–Îa²¬!?ãÑôÎ “‰Ïëå]è°Ì0Ý&Èwv(Ö`¼Ñ07K[æ û58´›îWa ß™t×Ñ<ô‚¯­uE<^cZ9éq Ì©ƒv*fòºt2Ð.EUŒ˜¯{Ô~(.Ó9,x ŽiÄLc¢€£-d† ×Â)\Kç°÷C3~Qª_½Ú¥ÒÓå2gi°‡ûÇuc$1ƒ? à(K3‡Å˜§{†» $ DsÔi“¹@4(+b*æíwÕµ<_c{§òDÉÒ Sth\.œ (جä%>ìºaÊÕ¸&Ïæ›´pŒNƒCÖ1sÈ0˜~ò²üì{ó½–OQtšŸpØÆUÌ1ù`Ö€u’Œp´«!Ûáf„ŠÁL¨Ð±NPtFôN› fTæbôfpHc1‡?ÏeÌŒ i®·sØûaöâøØ0pqˆª³¥À…÷ÌU樕gG\'Œ¯eÎs½SS•®@dÆ–æë´Bhtæk|¬n0Ø*¥±ð™Q܈çê“AÀÉR&æhòn‡ËS†Át‡Q‰ 0½ýÖŠU®=ì »Æ¢cêÍ¡„†å°N¬|a÷W€o2Q®å÷‹§ÃÑÜAñ7ó˜|k YûœxÖàür¨›Nàúyû½4ë-$»¡`V§„@Õ+”}QwC¬B¡…Çý³W…ÓrPŒ•å® ƒ%Ý2ÄD´G¹‰ãœ f¿¨B¸ÊÖ~ë:…IŒE4ºÌŒ‰9m«Qäû©Xšð\ ¸J‡˜~SîÄlèp·MâÄ4m1¢y•g5¸ 愱a¦rÎaW_Öü°«qÊvȤ|_0È…Œ@cÂ4¼J?o­2‚1_7¼ ¸Æ;98|y]Àœ¼à€È˲ÈLûß´p ×Ò9ìýð*BLÆcš¸¬ùÀ\2ÆQŸG™ê2Öa]ö¹ñ»ôLg'eaàðéÖ|`ºŒñàÐd¬»ˆæÖ|èண3Ø{¡]s¶|yø y*˜a'ãüµeV¦ G#]¹å€!WI½“¢±VÅäà°dPSÃàüÃ9ä“éÂ!C0Z¸NáZ:‡½*k…bYx€ˆ1f߃óBÔÖʬWÑéT@‚yîChʬžä.#¯ÅümypÇ¥7ªÖåfʰ7ÕhL§íŒÁƒþz_‡ò3P'JñÝ9•¥Û’ÖøÂ[ÏÌæuŠ"¿7-‰“’À÷+Ò~á6ñæU'5! úQN¨à®¢sØ;¡‘3)zRb¹ž0© Õ´TÎ(¢ñ‘§°%–¥:)6–{0Pì-¯Œ­H“AåqØ!Ø2˨„SPAo¾(oGê•]¯¸hÙ D¡'0£ +¡îaó`Uº;ë±9Ì@ˆ “ïKNÏàà0EÐ;mˆ¤H o¯ç)A0àM§pÁÖ¯«£XT&Ã$Å*¦f8vÎ.• 6pßY†¹`†–åç[´ÑÜa `D¥arVh ›'.b“»`ÆÉ™|§pý¼ùÞ›àB7l¢e;†™„Ò,STù’9è[ؾ*4Š1PÎÙÞ lã):­…Sþ‚í£¢Ná :‡½ šqp+õVžÃÀ$:µr-V4³`CF)žžÆ,Õ¬ ¨St¦?MK+y ƒú§8°sH†gÐeÌêVÔ)LËà°÷CG|cÁi|ŒOU³{`nb2}çPõT$ úC“pû-4³¶ºk¢èÚ¤ÓÎ,ƒx@©ÄRÊ ´È¨EˆŒ4ƒm|,˜1‚”Òæí”z3ÌAL0HãÍeŠ0ØB^\Cg°÷Akq*bÒ™þ¾`Æbg~æIZùÍEÓ)Q+ƒ9 0#9ªÒ—½½ƒÝÒÑ'¦ÑÕí+O‚i”˜ Ná:ƒ½V_‡/'lXhE VƒÍí¶`²EHL_Vu3(ÒŸ'ƒÎ˜SD÷²›¡„S¸’Îaë…ÝfF4]¸=TgŽöÐukA•%uðʜưzÔ2-¿µÛ¬;kë°¸“Ü0ºMš§âžÈÌf?»fÞv×]ýAyFÆý{œs``¼c™8oUÌ cn×3‚›ÉŸA -Õ#‡€tv B†±(8‡óR³Ë0˜Gù&´ {?¬oôÙU$FE}3 j—º#¨Ó—#s»7Ø4å]F±k2p°¹KÛU.íhŽu}áOðíi*`¿»~Öü¡ŠcQ‘?–:rä…²u9›0fow‘6«H£’Ë eOÖµo]·¿e”½e¡Üîþ$”Æ]0¹VZÏ8L0QXçr2ÈLÓž"C„+ᮤsØza[²Kù­é´;•&æ"¦]úÑÕÙp‰×aU5t§RåiWž¿§Ûªr¨yÀ§Ýw41]1|Á 1`Jhv§ÒTÁ(BEãðÐ çËtbõ§ŽÓ¸÷caXè9S5ßÃX`Ø9™¯LŠ|+oÊ98,la˜Lrkž” 3ã`ÃLóùüØå/&ÍŠŽ9b²Ý\‘€ÂìÁl³­à7e,1—Ë)ªÍàÎÁ`“áéDÀŒo#Ë9`Â8¯)Ã`¦¿H‹ p½ÃÞ U<ö‚ò" êsÄu+´·É¬D«ÛæGø: ~SbOZA˜S'PCô[!›Ö¾³*Ú Ø¢©‚S¸ŠÎ`ëƒÄK7@6\š«LÃôƒ˜Œ§€ðÕžºîp–#Ûa>ÏÊ’_A›ýž6‰†A4¾qh8:¿§ ƒ¹ýV©0õ”;Ï)°½Ã®É9,oO“`:9$»wÆe† ×Â)LIg°uBgIF‚R‡E`:¢,{éFðŠø\^´®[Éæ¹Lé èJã KZ8F7ÿMEÛ©!˜[kÓÂ)LIg°uB‡‡dP†L÷:•Ê*k«UÛÔªøŸk¬þV—ßj¶ruÖÖ`ãÎãÃ\ŠÄfs$F×d¬ `Õ7IŽŸ]Wo»ëîy¼®â89GÓb¬°Ï&›:_ç­Ë}’]Ò”¯‹N“IrÛ &©!B\·¢ ¬yUíÀP½–`¨®´sØ;¡b …•ãð2yÂàÍ´e8nY?¦9o‘Þ`œ~ ¦)/ýR½Nçð!rËñ<ÛfîÜ'÷ì~‚ï®›/º«3p²²Ã¶7 ÄÍ‚P,ªÀ±~t»Òš#Êa¼7 ë8`§k&|iã0çÅ;­­=®'¿Ï)Áà·§Ð!(\Gc°õÁj_TCª*«ù>×mëGb-¥zŸZ^½ÿ¹ü$nMýw¼™¾4wøô;sÓugN0@¬N[$fE8SÁ)LCg°õ@Â`4žºœ10'/–eÙÐdz¥SeES€ þeÇ$¸$68Ü=L#0ζɡæ¨\:AF›Nà::ƒ½ªä|ÓÒuUF *‹ö”4ì6ÌIH®8Yþxƒy ®²µAQUå688L¬{aÞ4Û'F•N ‚Y¿vÃLÁÖ93zâ=.ÕvÇPÖ{¶°?ïà¡/,-î†kÃd”¤8WWÛ»NŒòæ'åÉOÙöÇS §0%ÁÖ ³ ;s'úÑ,#gbÿ]]Ö‰’X—wú:̵a ÒŽV6Ë– „7æVÜ4KÈ™*8…«èöN˜¯Fe„àUS×™öcFäíÍ,Ô¢ÓµààAéÌùEæ6L:8kè ö>ØYùÅ’”µ¤äG°ŽÉIG°7ÓÏ*‚§uÀÚéZ7˜ÇŽ&£>RŸ ”ÐâH*feÍq}$ëHÌ3^©à¡¢qxè„­Ô P! SÎ !:¢"dß7»ë†ž–a}ó¬ÑaÚ÷†EçR_8vVØ.™ÿŸ¾jfYž¦‹pøíÉ•˜¦´µßºàñ–V³D^à÷ãõ³Íu’äXfØf¼e]~®2s¢¹Ã`㊘S/Tͽv¶sÌHË 3õóæ{, ñÞDZ—bi’÷Ó‰)üXPÏ®[‚Ë3ƒ@iCA1¦à3/–Œ·§‰©ªôPriA +bªèÍ÷N¨žì¡œî¦ªTï ¦qXåœT¬©.U†€ ®æuª2l…|.U‰çÁÁa¯…51E†RpÀ'š‚ѵp ×Ò9ìý°·vÊñS1eê2hÇ ÞÁyiË{©ÚŒíà›®Q/iÀÚ®e2X"pžà˜FÌäP+ú"B„ëண3Ø{¡ Ò1p± ¶¢ºy,0§6–GfòÈÅpÍ f€' &OŠ+ES…¦(ja§ŽitåL•! SFõ°ÓÐÂ)\Kç°÷Ãoèâg=•ÿõ¾`ª2ÂwŸŠaAÎjq·>Üp=¼Qê¨Ks‡#å,0YU¾‚AR:XH̹ÔTp WÑ9ì°Û1² â,äú> V]öÆ£z‚-¬£Ù†ù×6 oj½ïÉÀ@ x{šˆKWÜz{ä©Ã-î ~{ ‚Âuv[ì|/ó«ãÕU-é̇/U[ºÕtêǹÂaf‚5†¨E§a798,<4L•ƒ$8è~ª)Ão°šZ8…kéö~(ûôæ U/tÏp`p×>œ±bÐB+¬àUpÔ¿)’\§@Rɹpp8Í‹n £šfÎ¥9tQ®DÌ¤Õ 3•ƒ‡^è^è[ñS+Z«'Á¶·.6÷œ5xy+Ò}î€-ï^èK_a*+ƒ2©Û"AðÛÓÔÁ(BGcðÐ ;ÇÄœyªà\cÔT`3ͲҼîŒuwtliÙAöiãä—æõ`°Dð°Ï0¥ên ç€òDS!žô™ö³+è­÷.Ì$MìzîçÆÀ${™upùÄ©ôÉ€,A…nügÔ )Kc‡ŸƒÈÒfù™`€LL{&À@çò»©í÷X¯ny`ªª>1ּܺLH¯¨A„Ìô°ô€ù‰m˜B˽ppX2ø‰¦1¸er¨¼ªqÊÌoÌ´0ŠÐÒ8<ôÃÒ=”S‚Œƒ¬K¯ƒë5°ˆ2Ь¢>„=^=`f{lÖ½ï` $XqRb²j\U‚i ˜Ná::‡½êÙÉjä('v«c†hD ðVq½ƒ½RisƒÙ+Õ u „ƒÂ.÷æ‹ÿÛÓĨ¸Þä€ ÓÊ"C0»u/?›zÞzS_S}aÀ_Å©(‹&æ²LþÊ:w8U-\q*@ 暤º @ݹ0pX"±b˜®ÛÚƒNƒîE†`ûˆËBà::ƒ½—Ø(Á|vKsLVÉÂr2tbÌ?Y™`C'fØ ·;…îÀ›– ¶0ÌQ”M&̺g*˜d8Ìì2j1)\Kç°÷ÃüUAbãÛoQëÍ07sa ãГrUr»ˆæ0=qˆT DsÑ•k ¾,ÑubN–ttøö[ ¦‡K:…ëè ö^¨ŠÝÍﯢdf–Ñ1UÅ,àDÌ&>›Âü‹Îcƒ™±±a ®ï\X¼\†#p܇'áÍ«eÿdÕ2BýúYó‡°W¥CBW9÷‰9$La¦ƒU˜¾À4„s:E““%8l2ØÂ08MGn‹qÀE<ì¨É0˜ObÃL½ÃÞö éĈ@íg²‚T¹x}iá" þd1ÞÂGRÀo(r¼aX¸ՅƒÃ§Ý 21˜É¡ñªŽ)£ÙÕS §p-ÃÞõíºT½ôÊ6åæ&¦ l<ÃíÍM·nEtxÈeíús¡ÈÜ ‡€%-sÒr X80¯» ƒÑ7ÓÂ)BKãðÐë›®ŒE€URejÇœªåN#îÒ‹4azÀ웆 Ù1²1X"ØÀ07,zûÊ›4§Á옩`¡¢Ú?ôAýBuPÄGÖÛû嘦Ã"dôÜ\pN]ym°‰½îIê±Ç59,è˜cTçhr(É7‚!ãÜ0SKç°÷ƒ}c½Û®•¯ÐŸ¬}]ð B”†¦M¾Bƒ¡i·¸0§8Ët7KZ8¦394F§O‚ѵp ×Ò9ìý°rb¼Ü¸ñÞТrb†)JÁú_­R…‚ÅT”Ìà7ÝH¥-ŠQ\*tΞ ˜T­â˜qÀ7¶ˆ Èh1ÏþÓﮢ7ß;a7B׬ÃóñÄY±db`4a|5ÆvTìiq2B¼j ØÞÁ‚°.o K[æäñCp@t–A—a0Z˜AáZ:‡½›~`·Öê¯-0ªó—qB×gXGÆÙ{Àoº6¡Ý“KîyOG^Q`Ø19d¾–‘»‡é›ö»«hÍ:¡×Æ2&Ä(2§¼uÇ n•5”ª*8Œé—NŠ¢2Cï¦ Ø ƒ4ƒÃ’A¢a:1“Ãx]¥/2s#hZ8…kéö~ø-å4Þ0㪠¼#…€¬c3ö0aÃØÁÏ%`^R^uèì…vÈäàðaW1s#Äγ8f —a0ï)—†q%ÁC'di5&6ä[ó`00¼0C^lƒëpŒT>äâ èÒÝ8AqÓÏí 4hà3VœRºñÙ¸ƒé£¤Aà:;ƒ½ö´¦²˜CÖÔ˜rk; æÂ§ã5Á,Y¾bOÁ—*ÌxàV¯T&|ZíÀ‰é]¯Ì9¨àÀ”Ѭ8áÔÂ)\Kqxì‹ÝƒëÙä*X²'¦C=P¨(NtE¶:ÌÁÅwRTfÕNKZ8çÏ“ÁÁ˜ð)â°¨ñ©„S¸’Æ`ï…¹ ¬,•UÎ1é–Óà²ÂRÈ3´{U˜^€® ;FÑÇgÒ®É!`É@‹ÀºSÆ{g¼©Ëp˜fÑû0ßÁÖm±1]sÃŒ:Øzmޱµ}ªåÝ {\ŒgzóŽ÷ €[ŸõÂCÀ’+·ƒŒ‡ò<9Øê2šEPN-œÂµt{?ìn¤ËlBsLÌILÁþ.q*§s `{©ÅÁÜ7/÷BÑ›¦nçàp·­ýÄŒ…A#ÎA1[SF5÷ÁÔÂ)\Kç°÷Ã^¬WÈŠã àðšP­‡é‘‚H¨Á9¹Áüvx|4)“ä'‡%-S²ºsÈÜ|‡‚\^LûÝ4ôÖ[4‰ÔSÅeùˆsRøÌ•éámñ‰/GÇs˜ ®Ï$¸x=œnéç œ6Ö4ºÎyÌÞKzP¸²l½hή¤C9€yüÀ¤¤‰i¼H½¦¤¼ÂÜhWQGcñ™ÀëWŠªDÅàà°d …c 1“CæUêS†`´Hfjéö~è5Ùý؈ ¹OM†Éº|ä{¨ÈÃsWÑNœ2tƒýžÆ´{³yÀÀÃ0C£‚ÂW®:%lC*…«èöN¨cåµÔ¼²ë}Á\ÄTTXC©×Â[À*¢IRspyœþ{VÕo° ct•ÂdÐX6gJÌgg*E¨h:aéV'V® ‡²x„ñ'¼·÷\jÂñ†¯0·M†W_uá Øe¼=MÌÅí±s¨Há1mƒßž\‹Iáz;‡½6^Üë4ë°ìÖÄà*ã­¼C½jP—0ßÛ†©+ª÷ä°dðÅ&W­rÎÁ|!C0_œia¡¥qx臂äPâllj.«oˆ1¾PЬãƒï«G´.L³\FèÐÕaå7··“ƒÃ—•˜“˜É¡³xé”Ñ­¼éÔÂ)LIg°u †óB—®—f\)ƒ˜ºcØÜÙ.¥9‹Š(å°=PÌvN2J¥N› N#†¹xINp@xS½¦ ƒ9H‹ p½ÃÞˇVÉ+DY'CTn 33ìX,ü°²@Y à7]kŒâÀA‘-uÆ!`Š`Ý C ãûy¶OÜXN ‚yTl:Eè({ìÝUe®Ô‰9y­ì:¾ˆ¹Í“Òó«± *§À•$×dà`7'çÄ´¬ËbAU¥·QÍ‘:•0ŠÐÑ8<ôBi›ªH0̺ÃÑs)ˈ Á‹î¯‚ÛDD” )L%ƒ€%‚‘o†éÄL7¾§ Ë5Œ t4½°x²rëßÞÍߘ1¤ál@@e±]ö€Ä7 Mû†)È2 K×98Ü»? Ç4Þr49ÔK}mÌHCÓÂ)\Kç°÷Ãk›fËB;R61¸Š¥J{zµ$Å‹¥MW¥MEš„g{‡%- ƒÚÍÏ“êÓ]S‚@–6]SAk¾÷À>´ÌK¾[O y_™Zq£«Ë/qTí›­Ž@ h¬°>98œ‡9Ñ—ñrq2 ÛnW]ßsÇAÁÖ¹¾uG/°=Íce˜“˜zèÖ^±K£M·ö8Ì=ņ©‡îšî–à21°š©˜qP0þ”Qoï™k!ÌÔRû!ÓQÕOÚ©Šï ¦ñÜ£*ÚpБçŽA _ë«ÄﶉæK8¦3¨ªÂ” ˜ÂTp WÑ9ìÐKëŒôl§r©ßÌML–Ðu°ŠQ+ªø&¿$ò C— na1–s CGAp8O: \„ô”K‡ ­ÁÞ ¥«âl9)¼*dSF£37±¡¬®Lf"dÞ£¹âw… ÍÖ‚?ÓÎ s±Ò€3`¨z©!Áa…BƒIà*;ƒ½ G–x¿};LO WÙñÿ²»³á|–ƽ_öe:ŸVþ6(Ý+ÁÀA_B'F¥ñ'ƒZUÛÂEfiSÂ)\Gç°÷Âò:‡Ë#áî…ãsªì;nöévÚ=4-©Û)tÖ)̰g6LæÝB}aàp÷ºñiªúªUñJ̲]RÂBGcðÐ s'¹pêävL-vê\ìòee¢@óó̎ןNŠñ¯.t6KÏ‹ “y…Éä€$мÈÌ“í 3µt{?dû‡ŽPì;E&‘Ë©¾BfRŽq§©Â`¸fl;CŒ‚ƒÁ&ƒÕ; 31rwÃ<µEÌX„ :;ƒ­ -Àe—Pó쌘‹åi ri°?À Ë©ñœ»ƒüV¿r/Í–vË0—Míb¸iHÌHSÁ(BEãðÐ š„ݪ¯ü¾`TX£ÀZG\«ÃQP±>4ƒy Æ#Π@Ñßk2p°yEGT”‰æøðîE€`pœÂUt{'Ô±“¥ú¡Œ ÷Ó”#t^Í*U*Œ8¼+`*ºap!úl.@Üßž ,ô¹Ï†åP®ë½Á\ ~{r œ 4]P·-îŽ:JLUÝSÂ›ìÆºÛQè…>˜ ãà›rÑÇSšÚ¸LKZ Y¸0Ê9xQC‚ ·§ÐÀ6£õÞ…IŒÝêË«*K똬 jqG\=»K ˜^'NÅ“¢²äôäà°d0èÁ0ª29<`™2ótØ´p ×Ò9ìý°„³‹e Ú‰4l…Ã9F%8x© öïg‘Šu»qƒa¤ Ët°ÃwKÆÛÓÄ •XÊ8ô›•±B†ÁoO¡…S„–Æá¡Vʰr«Zp\ÎB†wd]ÏŽ±Ð.XpØÅ¡$øMõ.L94ú'‡›®<˜ˆz¼ZÍN¶W½Å)¡XÁÅ©ƒS˜ŠÖ~í€B?plP\Ø¡[ ƒÉûf`#{9á DÅ”0dj€8¦ØÑ„sØd°…a®fe_ų·ÂG$Ã`˜H‹ p½Åá±ZÉ*K Ó¡¥i10'ohc™›n5D ŽE§úß<]Ö¥Ðþã•Ãƶw Q˜FÌdP.jì«ÅŽNáþ³+çÍwõÍ|ðrd¦w(Æ*0‰Õ2Œ„Ìç¢k3ña?m)ªÎç5©¿3pø0o|`pÝ7ª^;‡:×ë¢é¦c0Ø{1-EºUOKÇrÄM´Žç%ó“f}Án ÒÐ[0´åü5K„YŠÄ ç9 Ý©V.Â`³¡DP˜ŽÖ~낇F°’E?pk "# ‹S¼çŽ‘›ÕŽ6˜‘Mébk>™sÐÒEYY°?Úµ.óÏ9 ~{š²Âus›òv<–²]ÊÕuä"€ž-œÃ¡ N¢Ê¡» Z‘#^i¿€¹ÏÞÜ``Þ:ÝÚEL0ÀcGÉt—`ðÛS¨®´sØ;á{LV+ZS¼‰ÉÕj|£f/HÌúb͇¹Ç´æ7ëšLK7™†94“‡†Óh.— ‡­Äüµ¸ŽÎ`ï…9ßNösF OŒâ| Ânp@`qÀ…7ï-ÂôÃOD¹¬î”·Øcx'FQ¾Á@AÀ!À£„§NàZû‡>xY5Vè¥}Ìõ‰Q}¡Ì<#,X¸Ã´£ðÙ³¬O¦&E¹eÅ9‡%ƒ- “’]ck`h,"²´ÚŠ˜*zó½2‡µsÑVÈëï æÌºTÉ‘°©¯Ã.­.˜À~Sbä‚ɸ²¥ó‚ƒÃ’Á±e˜¦ Ëà l™)Ãói¦NáZ:‡½:ªmÌ0ë(<Èb¼SnÝ2Ó9!ulÙe=l—iô¼Pब.– »Lƒ˜tëžçtq_Èl—iP §p-ÃÞÝ-t2–Ï#)ÏÑ1ɬ¼Ý†;B•ΨÀŸ`zs6 oÔ¹êä°dØ­=Äàvæ9‡ÜY—³¹|þæÊYÃõ•f€ÕõXÀø}Á\§&qõ6ª{*l|l·tÓ¶ÁoO5ë´1(=ÁÛ<ŒCÀ’ŽAaµhnEC@³£û©‚S¸Š}œ;ýnÊ¿ê¸oûÒxŽáJ¹c³ðbðV†]f+a^ÒÄXÅIOÑloøƒÜíV«·FÍ·²ðŒ®S¸‚Îaï‚üÚùV Ô]Õ­ÀàªlÙ:í̜ż J.0“KOV™ Õ/ šúß…¸T>Ý›_°•Ë`0=çf*íöNXÍÓN÷\' ž:È;«{RgžŒ7]|‹ò¨“@µI­µU×&;¨Zϳmf‚H0ˆ.Ý \5g°h®úÏ·1µT˜Êù¾`*3É3«pÂùŒ+-³*§Ã¬Í\äLvŠ›u9'‡%ƒÿ “‰™°ÕÏ‹ Á,øoZ8…kéö~Xl˜?ªOñ¾`äH8ða3ù>1DF±Ô[œ9‚ ƒÂâ³ÕÜq·¨%:6eó{sÕ¿˜3ÇT0ŠÐÏ8<ô`ÖÇÇ%pvõìõñ‰ÉÄxeúñÚ«îRM<ëãO ¨žK†ÕÇ'æàZp„Pï)ÃàY?(BKãðÐŽGT—k<®¬*Æ512ÒkÎr.áì¯cdŸr“¸"2"éR·æŠ?è “nb¼}ºX4ßùzS L¿h¿÷@åñ‰ÒJÿÐû‚ÁÝ^·ŠÛ'ö6.ËÅÒP F˜Ý­rÃNQuN– h꘢ ±àé…š2[A}jᮥsØû¡ÄjèU%¥½/¥­e¼oÆN+­-ã}—0wI]ñ×NQ¸ MK+Úæ$&8T%º… ƒYs^Z8Ehiú¡O ®£äÈÖ¹À$brÁ%­ $Ñ…%³’ˆÃŒ¨Ü0>°²00PxGØdó‚xµÆ6j;ƒAo…iè ö.èbl«aq"C[•"S.]—uf äQˆ|m0ó3å8,XHÎ^)"0vMpÀÌÑ‚yʱa¦–Îaï‡íÄPa„—fûØ3žbó°CåMÔ…÷œ̶µ Ç`Þ0…7ô] ‡%-s(^È9ÀØomÊ0˜ÎGia˜©¥8<öëÄݪ×rWåî&¡¾2ª¢ŸL-i_<%EÙœ;`Ö|SD¥SÀ Öóäà°ÉÀ¶Ž¹TËÃ9 d9:.Ã`V‰“Aáz;‡½„Ùè'ª¥¹£40ò=–ÙådÒ_ë=a«å#>(ÆãU&ÞÜáæ¾ÒÀ ÊnyžŠœ¡!£¸»T*øÏ®Ÿ7ß{`ŽªÒU(¾Ÿ¶Ë ÌÉŒ¼œ•Éܑ؋=dnÌ{zªÒâÌ:6qK¯ê1Lc¨ÉäPoÝì2Ó»µa¦–Îaï‡ÍþŒ¾ÅX•s@ˆ~Dd×Ô[N ìü³¢yæÜ¿a†Þ.09vœû sñÓ9ðÚÚC†Ãœû5>µ6{'½‚²EE—:' 3ÌÍ[Vâlw—dÆŽje0C¾˜Î• õdàpÔ_ÌÉ9Þ$•B )Ê/¸Aá::ƒ½ŽVƒ€þþ¬Û17f cš&=þˆ˜¶¢ Ã?7Ì€é098,la˜réRsçP}8eF ×Â)\Kç°÷ê®'·7tëVuÝ0—B|šÎ­îù䆯ÝÊü¦ê¯ †Q^Ì#ïNCjUiÔ hª¹ nž|˜ªäûà€ Q_dF S \Gg°÷=K˜QàI>O»`bP¨ïì YÀT-$a,wÀ»a2 ¼ppø´ý£’ý“C£‰2e4»`j!ÌÔRûa3?“φITÅ21…Ág¼Õ‰)¾Híºê¾æ†‘Ê,Œë– ‹e&&39Àº_D|{š:è÷PÑš?t‚_ZBª€ &­ã‰Q¬Z‚U€K¸êˆL0j9ö<)*J¶MJè Qˆ˜Í3¯™²] 2Up WÑ9ìàÿýSzþ³ñ¿¿y:žÿô‰§&-«bCQM9­ŽŽùµnü| š˜Æ‰T©Ò÷J˜IURáÇ¤š˜…ª,À°PfR=êºj?©Žçoñ˜Žgügÿ¼½?ÿÉ×Oÿú/Æü÷ŒRãÁ~ý‹ñTH‚-ËXXÔýu ±ç¯ßŸþêåÓ`øÕ§¢TÏ—OcbüêSE¶ÌU^üÄ›;ðwó¿Û Úüõ×öôo¿ï*Á°CZ;")xºé˜oÂØ @ݨP­'a6*dOò2ÌJÕpËP§rÌFÅŠ÷Je˜•ÊuT«ö؃ÆCþ„§|ÁY8j* ;p{Ò(wú@‘ {И“›=hýýásöaÁRM,Usó˜åwßüÁŠÆˆ¸‘?‡Ì¶ñqŽQH=øÍ¯~÷öý®/ŸýÃ_ýQúkj1»‡e‡åWQhäúR³ü×SùÿðE͆9ŽÄœa?,ŠÁEÒƒ÷_ÏÃà¿û w˵ß/¿âŸå¼ËË·÷æ}§Þ^~7ðyü•®—oƳÆE/õÃóCl<èÿ ´ã¡ßõ|ù5xŒÍwm/÷ŽçÆó}ùã /¿Øã¾Žœop <…Œ}Zï«ßo!“×5XŒfc¿¥—_ŽßQåïîSµß5lâac¼ü=þ¸Žt¾|?¯ºü†ÜÇ–}è󾮫¯ª|f.>þLÀq½ü<ÔúŒþ¤ã¼r{ùü±”oÅxì­B5Q Õæ#ûÕ|d[ÏøÆC¸ñ¾ÿõ_¤|,ï43пŒ÷úõÏ1N¾ÂàóÍè“ÿõMüõ¶ü%fÛøø„$’|=*H"ƒž,ÿ3žêù¼ü×ñªaÑuë×÷ïƘ½ž>hêÃÛFßp·ò‘^Vj¡³Þ0zÙŸõàaF­c`¼Oø…ÇS]y¿1|Û ªÇÑûõòù‡¯°ªµ§—àkLA§Å=aç‹^.Å© iãÅ0øûª¹õã(è_,ö±LnC¿•â—“…=­Aþ¥¡ôúÕ'l»¯Ô^þr¶[†ŠÀ°DÛ6v¥rÏ5'{½ôdyˆ?€Teñ…]õòù»Iñ·[¼â¶u˜Û·#_óà†Ïkà®%Â_¾ý^ÍuÛ7q¥RóÔ1ìo½¿üÑ`VУF}ðzÇ÷öòù™$«ì_ÍGSÅøä½øÄ ×-©3ëþ¬‰¬Ÿ÷é3ÆÑ¶9ôô±_éÔºFÃÑç„×jsè.“Î6¢Ùý”ÏÜ9‘ØÀû¹æ¢6æ½ï9ÏÞÃI>¿\ã%ýâGß¼ñÑ2W2íÖaÌ¢ÿè÷yùÎ5©Œ­Ã²èà¯1§à÷aýñþ{N{xÃ6§¡DÂX,‡Á>š\cÝ3âxè¯õ;gAŸ0èï6¦Ê¿ák:Êx¥Ìcð¸Ê¹5ÿ㯆 c½»öOôôFR)ç˜2çàøÙKLÁ’0 óeÖ…³í¸}pÿOû¨yxo¾svâùø¸6Îgè¤ßÑ×óÈ AÕÇ_µ¯ÃdI>Èö:Ö«ôòp¯0$lM~ºƒüëºÿYJÙ=Æù@)äñ÷k{£þxΕ7O>Ê361úëÌ×:>£t}0XÄ{C­ÍϾúêSÒÝ4 ñ”‚_–ñ ©¦iìD4À«µÿLƒeð=1³bÒãq.f̘ãÔá«›Ã<·ëlñßøÍMó6-7JäÔì;ÄE£þ’þ|N ~A(ÍÃì§"}áà´ÿ†´÷]l¤$d¥.ŸvCöê+*¬äel§3ßÞ(2×ÏR{z|—ûØ‚Á=6«ÚåSVú€s}=8VŽ/Œ’‡ïÎFɦú˜ÆÎ<æ±ñ"þÍÃt±wµšn$ ÿÓ:ÞïöOèø2!ýóz¾ ÔD£ô¡ç«ÝÊEã»×ü%Cþ›Ÿ™ðúåvmh}’kC®cÕìiäGkï¤Z9êGݹë’òóÖü§7|Ð?à¨9vHjòÿ†½ó‘–ÖâÊcfœVÜâ@ž|ýçO_ÿ«&Às¿sTÿh(ì_#‚´9¦Â›»?l½Í)ýÄ3ýéõ¥412¶æ¯Ø€ãNÇöòõ?>X¾ó9²}ÑÁïc‹·«ËŠýÁ£`6XLLûHû0.m#¶96]œ†ÃNüV¦×0H1—ÓÃ8¶¯ÿÖ#Îe·fo“tUy÷Ðì]†¡».2¯'ˆ7·¼‘Î,»Oæ ?®ñWo«ÌžÁ'FõG ÖÆ}žç‡£§z<Úî‰ÛÙBãßÍññºo7Çib~#Ššø€°qÌeÛ‚½ã± Ë56–ŸPàâºÎmIûÝW´ºÇh{ùþâ+~µ‘=}+kmÈÆ‹H«v;~Ôßþ°`ô‘Ô¶Ž­Ò/8$ëY7{{Ùýü|p˜ qvkW¾hp'mƆÅ?WZ c3=&³m{Ç Ë1v,hw^iÝ ®¶:GÜX~Æ~èq[b ?ïûb×ãcÇÍúÙþ\Äc3²?Dߎ§æƒèË@Öï/íĬHyG²Mä?gÜFßZ¬ˆßÍYå‹sm~ÏÿóÜ_~hÀ¾Çõ‡Îã㸂³íñœ|/§Öµ~¢Ë.ö~Ò’F n¯Éyÿ§ŸTdlpFêçm³Ç}æl³m)éñ[—fœ‡Ô£ìÛÍ7 °~t~Ý ߨö ~/_ÓÛ'ëÆø;µëãk\FÝ&/ý÷ØÂ6øe ·îÇ:Oî…?ûY.’«ÞÝ5A?õ®ÖÕ©óÝü|7×i¯1Îʲ_&€oµSGÁòyc93ñc9¼à ¯gq ¬Y÷ú~“}Æçpï›YkrzŸ)ßcòÅ£¿gLLüZ†tÆÇõg˲Æ‹,ÿŸ&w,| ö6~¾\Åç¯ä’1M5ê«Ùc¿±Ùz<§ï×}“¡a¯KÂ=îg|ª×ر§e:p¯éû>ÕÏñ×wñ×yWQï¸Î ØÃ7aåæ»eaÓ‡p×+?8ðܯô…Uðñë@tLzt÷¹çm!^¼‡ÿTï=ùü§ÝJ¿™«ë÷ë·mFýyLWɯè0e—9±Ã¥‹)W;f›}6&šÜ’ç‘Ó™—©hõBs=ŽzØ?±[.(©Òî/»:ù^dŽ!ÿÈŸl?ÿx¶½Ge¸nÖU‘v®¾Ô#ë1Äþ2ºU?HXÜ6ïÓèÐIŘÅìuÕüó=œiX{è`ݺã^=æãáá,uÌ£kg?ž;þ¸•Ú>FfÑÌÏæŸuÀŽ<ýË‹Z:ÿZ\ã¿s×wA›ï÷0xRíà»öýÀò̾t„…Gvßé wqïÛË/°Ò–åðÛïdÕÝ(#ùñW÷&(Ý7¦}Üt×çõ7t£ÛÉÓGoö;GWšÈ¶Þ|ãƒôG3†m‡Ö­Ñ2Ãˇ†äíê†L/ëd; °ìí¼^V[ÁLp~1ßu3œ\— I”x뫹¾*Ù$ï§ïøûÆðe‘þãÜß ohþñíÅêvÿÌÇwc…äF¥}™=и_}¸[ñî !ïaåühÄg^õ`)|0—/n;ó }~Û^W¨Ì|çAÑ?¾“ÿ`VÊÈ6È÷:+Á>Yß!Ö¡9,¯¿™Ȳ _—­ßëiÜã)~Á´Òóhå\¥×Ç1¿Ò‡ÅSc«ûqhæúëÏþ æÒ7¢(WÛl$ãÜïÛÒ±J«µhóë`¹Š^?¡˜SÄãá£Q|7·§«Eµ|“úönv¾Ì©¿=¬Þ¨1êv::Í­õàÞ œ0Þÿìgæ#éy‡ë~bÙ°®Ÿêÿ*&GjÛâGsI÷ãËXTùÞFþy­OE{Mœ‹ýƒX”»ÙV±ÞZ·x³¾p¬ýíwÃs<Ìë=GW¾h`}ú'(ƒ]3ý6µ¶üh;`6Ö/§»åµ<˜9gö~Ø}¿ ¨oõhƸ9«~¥­C‘ëÇ[ ×âWsC¾ŽTuŠ‘ì y„»º6V.ßçC"/Ÿ?ñmnßk½o¦}„Í]Ï–ë&X[ ö v:ùP[¤\2ÍÍZøòÙëʤëÇÃ}àûئ1däá‹Åþ‚)úiÙ¦¿Q§‰ÿŒ0(ïÑúqüÝ|—ï¢È·ùôÐõoçqÎ̟שy™ôùL‡ùzôÇÏ€z´õ]øW0V²_ºèú?j·ë=I/'úÿð+Àržæ©ïw­y`êöàX‹w¯?ï15ýéWá*û¼xWÓjý{‰¨ùüšûå/@â]ø¼ò{|Méå߉á%û¡{éâwÛ×ïâׯ"¯{ü­2ö4ö¯ë?ÏMû7ˇ&>tﮣ“ƒ¿è˜ŒøªŸ‹/ú³·ê›wê{оÇkX"V[î×b'Ÿo|¶;E¨¹¢ßæ×Lóg¡z/ÇyŒÍ$'§Š2›·Bô->Ñeù{è ù½Ö~ûÇ6b½ývúâÊ®ƒìŸò\ö@žÜm¬/|,Ënk¯›éåJn³·¾+\š—ƒ'ýþ7óóX g=ù1W[¬ÐìGF1™zŒüÿòŠºÜøÂ×å¹Xà—˜¿~ôT~ù8ˆ¾ps߯Çt³ýËGÚ‡SøÊB#þåjy†ÃÄ—sí->ún?MÓS+Ês´öð-þfšyŸ×åë³–^†O®Ã|Y…¿°¢Ë±8yL3(¼s“4ÎùaæaJ¸/¡•³dí¾~üzhŽ,+Þ*àŒWùý/œ ûÙhAÕ¢´=ýuÛòðôóÝ¡?’;tDÖü›Ž}?¨kuìR~Z]œn¥ÿô@ºïšœÝ¨»²Baä¤õñ>|Ú9ýsÖÆÕâkÞ£ ×Èâ_~dª?¸7QÉ$ÝÛÁTÇ“¸6óï÷F~oßÃ&ÏÆôwkOø.Ædý?—ýý~ãŸ?–öSö% Å´ûú‰Ã=ú¾`Ì}ZÜÚcÂIÔ—ÏE¿ü²†ªW~øjÉîLý'-8\ˆæ3à=z“Ö¥ñ·?š:åüP0'®tþÑW0˜ŒùìÑN¬B ‡N°~bŽàC(/» yæXÂÛ*¹œ/È~D9éú#«RØ/†& :üX_~ö³ßNƒ|±b½h·™ìuûÝ ßà¹=ç-ƒ8]me_‘€r}` 6OÛ! ©mÊŸvö>ã·ÖÏmIùñŒo+MŒu÷¶.ŸÚàÑß±º"^ççÁ¶è“}"6óµvôåðàÃÏcÏßùaÇã)ž›c´rŽÞÒ³ûy#Ptî—#–3óuê€R!¹é‹zª»A ß. ³ÝVÁI-É?Ч‰àâþ8¿»1—±åÈ3‰äçõ~ì–Õ2›O=Ö ú¯ý`êZõ0öÎ÷|‘HN(ØŽhÚ1bøáìÈrgþ?‰¼±fendstream endobj 338 0 obj << /Filter /FlateDecode /Length 25939 >> stream xœå½]6Irvß¿¢¡«CÓªü¨¬,Á2` ² Ú$5€.Ö¼Xõ×$÷™!w—¦ì ÿvç9'"2³·»—–_¼‹9Œˆ¬§*?ããŸ×ô|àöß·ÇÓñüë§|J¤>ÛÞÏöÃÓ¿ù«ëx¾_ï–Ûóó¤é9ÝÇk¤ë¼^ïr>ÿðxúÅËÿø›ï†Ì;¥z¾üøÝ÷Çk¹ïZË˹÷v×—_þôÈù¸Ó˯ÀÒŽ£Ü/¿ÔÜïÔ®—Ÿ÷Ý÷¥”×z”—ÿei(–v¥ëå—¿ý¸z>_þ¯EÏÏ¿–½ÞíåçššþµDöw¯g/¹´a ÿ~Ý×ËB«~¥ûåÉùîƒø3šåt¶óå—”•_{:Wóþéþü^¼÷Ëÿü3©w=ÚýòÓäýÛiÎ߃zÝ}Èûëþ|<÷Ú—ç^r~MWÏþ‡_=½¤ë»þn°¤ñä'Ï÷5—çïSé¯gÉàûÅËÿðÝ÷çhy÷<žvz=Ž»Gÿúý´ÿí%þù»ø×?|Àø˜M¾û&§ôzŸgzþá}úá¿ÛtO=¿ý@Ð߯¿~ò&éå¿â÷“ø»/͘Fþ&þõãGýJñ¯ÿúÕæ<þ?>ÿç/>Ðü×ñ¯ÿªñ¯ãƒÙS~÷hÿþsÞþÈcü¸Í—¯ÂÿÿzýÀðé»õïþ_?Å_| ï½Chß÷ñ¯3þÕ¿ü)æ#úñ½?}ð×ß|ÐËõAÛÿÿú›žßÏô÷_-öá È=½ŽQIcÁ/>ü5ùâ÷©ó±ýîžõ³É¯?ø½>ú­ç+üúmþk~Ë·ó¯?x\?~ nZýOôýÇ:2 Ì￱ïý¯1íþƒÑôëOèÿø#àÇm>xì_«ùxDøhüèµZÔüöKÆò| ¿úo²ûÞ·løè›šÖÌŸr¾WYýëh¿ý¨'ÿÿT_O>·ÿª1áJ¯õl>&üæÍÿ僇9‡–ôA?ߨ¿ø Suå탿~ôà~3Eÿû~ø&õŸ?P÷ã´Nÿ×ÿùÁËõ/}Fùm¿úoyFß÷ÒÇrôÄ/÷~xú˱”;ÇüwXƒ¥çkMϹÝå5Õç|õúÚÆë]_Ïëù·?>ÿç矞Î×£œ½?ÿóX‘•×óß%ü|*×hÔÚs¾ïóµåãj0°žµ¾æÑÕI8AÈÇxnç9`Ë£áÀ÷†ßžr:êk½'GJùµõ Àï„2ÃÀhŸÛëq-„G‹°Á9ÜD“°÷]¿b-×øÉŸK:Ç›0z¸Ý¯¥>×ñŒ_¯<ðx€çh=69¯W üöTë‘ù«;GMã.S8¡†Ñ¥PÓk¾£Eáf£ غ€ŽÕ2ú<6%_ãݽÙ-§tRj©ozÉã¿t-熡tüRg_8®±eº Ž¥-œÒÎ×4~‰p¶±oZt£EÝ(ÓJ—°÷ãÓ÷³Ž·ãè…Òe|ÐãuÊø²zk±Å;ק޽cj GÅÎr p( hà”6(ãý êåT!Œn„s¸.aïz›ÇóZ h£ÍÉž9¥“’k_ôÞ£UøÚ0ôŽ/£. ãý:ïE€c©@§Œðã ßê™ ÂhÐ6Ê´Ñ콈/•†îΖ=&¡PSåW_îñ⯦!¥áñ¥¶QjÆÔ<%8– ´pÊtûø>CÂt‡mS‡0Z¸ÎaFº€­Ÿ¾£çyrC~ýÇBɤŒ`|i7ðß?ŸW`ƒã1Ÿc"Ëea¸M Ž¥-œ2æ40i|Óã»r Áo&ƒ›hí÷>øï˜Ï1áC/°^R'¤ÊéåÄÉ{–ù ž 'Y¡øÚ(ÃÒ1ŠŸ‹ÇRÁ‡!J3ÁžHBt¼ƒ®Ã0Z˜Á!#CÀÖ‰ÏÇrsÒNãÑŽ)ô1)xH¶Ö¡'ßxo¤wb>æ‹S@p\Þ;“py߈ù|Œ26KÕzÇIäâ/: £EÝ(Ón—°÷#~Ë6^¦£>ŸÇsoNöAé¤ |Câ‰9xØ”Û4Þ ÇCw«e ç×kY”ða û÷øñ£ù c= „ÑÀMp7Ñ%ì@çÊQŽ×6&²ûº5)NJòÇèTúëø½ë}_˜ñÊQ7<–SÇÙ_Û½p´ö:†!À¡4 SÆb@“ƒ CþÝÂhQ6Ê´Ñ%ì½@ÏRƒßhSêXÅXŠe< P¾aG9×{©Ÿ~-†üžŽ«¿i‘`Øt …SƯ:f‹PLJX§ Aðו0öæ{'¾GLFyØUõù!‘0`çä•Çx|âÛj8£ ÌoiPêÂ1Æ,ÀB‚c©°±†K4؆oÇ$Œg€y-tæX#+‚CF†€­œéñ½r­3Æ€óÒ j”D G_®–Æ®°­ý:µÞ²¿Áþº–接ÀæP04iÒ ×:5~{ ‚ÃL {'ر„MÖØ[Ý ëóGú1äã[Mùâ3éê ÒŽ±Rñìh‹æ-œ‚¡~èr ×øá0ø¹ÃhaV‡Yí¶N,_Ã`ô—K¸ÇBI‡æèaÉA¬uå9–÷˜?[Ř59ƳM‹ƒÒ`¿9n…ž£yÂr®L†íG;êÂa&†„½Ÿ}y£yÆã‹öŠAì±PÆ:½aÀà„!a¬ë‡äñMoƒÀxñÏsáßÉU Ž¥ƒ-Œ¢J?þÙÂhQ6Ê´Ò%ìý°¯ï´Oaì©l sÚçrkN> äœ.¾tX*Ýù;®„ñ3å×£,í ›þFé:…0˜ sù;Ê‚`p›½ýÞ‡xIû˜*Æó8ñFœÑƒ’}zc”º0]æÎ-A¿7ÿ>^ŸÒ—接€¿–(i¼Xµ<‡€1LBQh0ÌŸK&‡™öN|±/:ìdìÆ³x,”1>ŸÆC厫g æ†ÐÖL“@S5±YsÇRÀm”Q)SÖÎuÑ Œn‚s¸‰.aïÄ<%Ìš„Ž–ÆÜûX7µ”ÆÝæqqc2¦³†±Ä1κÎRK;—æΣ7ž%³y»ñjß×"Yì®Û9Ü6—°ÿù8öTõŒƒÌÇBéZ-X5]ZÅ6Ûèr½(l[ì3/cy¥E‚c?>”±¡=m³L <Íœ*ìpsÚ`w½ùÞ‰øÇt„¹sÌ1ü e,éÒh9¾mœ×yj|Aãu+cD<îÀCuÉ%ã¹b Ž¥ƒ-Œrñ« }ì°/v†Ñ¬pްÒ$¼ë7¶×x„³Çø½¹.›”Æey¹ÆÊ¢b†³-ÚÕ7ü¦» œ%;.Î>%–»Z eìÛÆ¼>%ŒÉçk¡C:®2­t {?83ô±î Ú=åÄ% f°>îkl\Ç 2´8ƺb¼C¥/c=|^‹a×NÁÞ[I¨¸FÁf:£…¬˜n·KØû¡mû á»mc-ZO;'¬X¨jèmc*=±X£×ÑsO½QÆâ˜¯bp( \Måä‰í0F•|-*„ÑÂp·Ñ%ì½àª,cÍ€MËÍÅcRÆ ÄsÙñ±UIUcÖÁÙ1F5\“ô…#qH ŽM[¥“Æ&CLè0 e g÷ÏT½iA²`ž¡¯¿~¦hîX Àî”BŠ5Ï\!OéÂ\ m”iŸ7ß{ [$œ‡à7ûޤYÏ)ckrà~f,k1:”1e΂…ƒ³aÞðl”1›á y 0( œ%E¨$Ìæ…›Ü©@ Üçp]ÂÞ ŸJ¹yZ¯|hB˜”rðÉŽq ÃÃÀÃýØ>nx huèoÀä(‰«ûàX:ÐÂ)ãÕÄ2,$ŒÅŽwC‡0ZeZiÞõƒާÆÉ[ß?ܤtRÊ9þ›°m/6Žαä:Zà¡·¥´ñ1ɹ„ÀÒNÁÁûøB˜©¯¾è- çp+M»~pBÀeþÅíÛŽ§ƒ2Ö‰øvʘ"ÇiX®)´ܬ9æE\Âg=9ò1› H:þ€MïѰé¢=D cþv寯™„wæ}Êyé„ÐcNI§–R8Jn:ÅÔR«¿NhÇœëßǘv•¥½ããô¥˜QxHy?»¬ÝjȲÕz[þlæEã½üí°xщƒÐc¡Ü7¦Åu”±dË!e8ÆN:N ެã—¸ûЄ„7ž>u~{ +œ#¬4 ïú»\Vá»ïGǘôX(c»=–<×[ØžõÜxä0ŒÃ²À1¦¤ºQjÆs< Ž¥-Œr·‡.á†ûž: £…YfeHØûÁßîÀ1ã©3ØzÉÄ(cÓ‹§ut9Ñô¡þc=xt‡tY 9o)§Ù<°ÐÄ()!‡°¥N †éb~<Îá&š„wÐ,7¦"l8àpÈÄ)ãæÊc¼Ø¢À¿!÷„30ǘ„î2°´àX:ØÂ(•g€S˜àp:„é]aV8‡[éö~|±O/<M:×y,FqÏ•¸_Ä ÍÍ3¥ sÓ¦{p`×p-K[åÊv . ‰§G¡B·—+ašèÍ÷Nø˜øCã+óky,¸êsùšyuƒc¨vë0ÇɆy|œyÝ8`î‹ÇÒÁU¸QR·cp“0æèZÂháV8‡[éö~èˆ:n’É‚“]×^Â=@×9žWÁ€qì^8¦sÈv³}>Ï›‡¢Ó'“oØÜÊ´ÐÚo=øü Å>[üÓ¼y‚¢Ã‚±ËO¼¹çùx´¾;öçª J4v,ñoO“¢ô)`,ŠéWa Ñ ¯„iž·ß;‡÷XþœGçJüáßÏ·<Ÿðrã*o§écy;ö6“c|õ9š;h°;EnÑüSD(0ÌãúbÆyóc•þÉõгQ ®r¢|,8ëõçØ ß8æ_¬¨=ðø.`çBIèfÜàX:ÐÂ):– ½5ÜD†ÃhaV‡[éö~ÌÓÜ“ÓÑx™[Ü<%5-nž)gÜ+i,Õ!d7vÐñ÷Î;ÉhíPâùµŠ’î¦{ikŸzçìæ sM& ‚à {4ÿá– ƒWËØ?ÊlolÒôÏ1%<^¤¿Œã79âJ 8*§)À±T˜ã)ô©ôæµò¦&sg/ œÁ dëÝ~»Íëæ‘S°1{,”ñi–¬ƒw^ºÆØÙä£#lGï eü§y$™ÇÒÁßë4¯%! zêS‡a;{×·q˜•!aï7wcwÉ#cl2oú¥Ê±´â~°ßÚTŽQ«ÃØxñ8õÉqb <8”ò‹0ö˜è@4O¸½šò ±guìïnŸ5×…-oé­pð ü,1íË*üexè4vãÇvŒ¯úØ(üÊñ.zóÀR`ã)ìñÑL rÓœ:„1kÓÿ³ÛgÍßõÀÞÃôšõs&»ÿ!¿§~ozðrB,ËBh«¦Rçß/"¬ùåDÑBtRº9}ùqqæ„°Í}0!8Ìd°õ@þc†€c3N$ΛNÁªf4‚¬ÓÞ— ¾ãõ®W`,íï’Ñ£68”ìÈDhrLæ­ó 2†3!8Ìİw‚ïà{æK#I¿4åâ9àÀÝFB¼Pño¼L¸¯þ§ÒxeÍK0ç9£œ¤Ìæc„»ê.È×{%Lüýnz8ew*t"R2ŸÁ7zôªv9Sz`ó(*iáoÅ*ÀàᯫpÒª1Zã]J×”oØ­‡‚——±cËiï¾nè6ÌàŠÄɃ4%86< 2 ~‚òpÔƒe¯ë0Ìà2í6 ïúñ鉛8¯hãû˜„‹·]å8Š\ZÇ*w¸­Z0\Cs‘¬szîN Ž¥‚ΤF9å[p¬~.:„ÑâØ(a¤¼ë?Èû-Ðý„Uõ#÷‘´Ø¼cÊøæUÆ}m[®°–à€?S[$6laœ’<‡€Ž Æ4UFƒ¶QÂhk¿uAs6æØ$Œ_ù>98^s·†hl¢qðxt áp³4ÌA~£Œ­J{½¥[.^SÍæM®r¡ ¹3]˜àn¢KØ;wü©qEz+“bx\åK¯V9u•{`z6_XMŽ1Ø×´Hp|wžvJ-æ8A ÂøÝç0 MÀÖóêòó;8->JâA…ÎEÜåÝ? ˜§„MaÒÎ16xŒs0 ¥ƒÎ#¢XhlHÀØxÞS‡az É çp+]»~|ú~ÂGŸ *žÓcàí„Ù¿›ëfAHpÆ+ÈK>a¬Æjž£ÎQÞÊàP ¸Dûܺì_®6Uæ¡‹Œp3Ñì]ˆÅ%NŒ:/%–s…Oc\‹aEŸÞüJÝw`œ7^Çòç«›—ˆ5w,oO“RxíkÍá·v.Ò…ÁîúÃíóæ{ô~Þô;(ža<Já)ÇxE»0ìœð;ðZü”üϸRÊKkÇ’ÏCu£$ž‘LÏDB ¸Æš€w]ø<Ìg,Юùs<ÊEJêc)8λó3á¶fÁœ˜²¡s óœûO>)'¯æ§ýS‡ÿ(Ó ã+M»~Äê7„ËuÓ÷NîY”a&Ö‡Á581‡¿ro§VÐ9'GEO§p*t 6îÏSÀaŽ5®âpÇš0Â9ÜH x׋o=0«¸ZBTPir`Å-wÍ‚wš—…¿…cl·*ýž&‡Œ»È×4>ÝG˜SÓÂ0F—R†MZ8¥khw 8qÅ(k* ¢Ù nµ Ø{ñ¹ï=R,èáæ¹ŠÆ ~Ësž¾=7ƒ¡˜ËfÁx{ÆOÒ¯ÉqÂͰ…€€RÀÚ(”)CpYT›³þB ]ÀÖ?gƼҹ33¥CPÚOz3f¡ñ’‰ÇÅ æycu'ÇiQ .Á±t …SÆF7â.©Ò¢BØ®=Ê4Òì½X#¹gœa—IឣkR?²œcò¥ásÁœÕ³MûÆ1Þ›t. ›ްF餄„Ô 0”Ã6±/”i·KØûa ‹§Äöèô“yjÍmµ“‡Ã‚tËØN $ç"@Ð4Ø‚åÔ¹Áa'ºVM ðo¶“Þq:GmöNh™•»äÂ1‡• R2áqUF5d†*æÀXžòjŽÆ½ä”àX:ÐÂ)ØÅÀ¿Û%ŒÅ&NaB‡0Z¸ÎáVº„½_!ñÀ”ùªý¢ EP¥?ãaþLt6žó!/aþJÈ.±4 æ³o±¸šÂÕæ# <Öܱ°(ˆ­ç¢Û`‹JOQÓ`˜¯èF CÂÞ »Ç£¾ŸA*H†ùÆÒ­öºf»¹4lÞ« …uÊU`_~”&Ïд† ÍO  砉Þ|1ÿ ïôªÝÄÙtR=)cLj|³Å:TECE‘~ÓD²P8Šs äK‡ó¤ÔfNì&¡tM,eüû2+œÃ­t {?æ®NkÌôr)vwRp0Ä=K’3y=ñ =M 0_cpLÊÀ·<Ë­y`)` £$R¦„A¿’+à¦ËôûŸÝ>kþ®_Nð7Ýs™àïóÙórÑ_šß]ò\bIŸ]š¹Ä6Š|´Ó"Á°ép/p~žKL±@sü†ßžÂŠàp»]ÂÞÀäF_–±èá¦û±P´ øÆqÍÀ–½ï} ü6Zt~“ãf²½)Áq²­þ¤àDS¨IhÚ‰‡ÃhaV‡[éö~„w÷m^´gxw“’Š-Ì.å,‰Ã]ù6GÙ£-òÀžqzb”y¼rÙ6ñ”¯v8œ cVG¸-˜„½Ûôгv2çéÓ)©k&kÉ>öЭkôæ{+lcߣßöžšÇÒa‹¾ww×hkfÞŸÃÞ²ÃçK³"8Ìʰ÷ã‹ïð´´"\ü<&Å–?\]vÄ›ì0í hG¼×òwmkf{æÁy¤tKü"Ivm'xµ…Ámöö{æo96#‡e!Í–-Ô(7)ÜC2Jë`þnÍÌç\•Îà/q/K…mI‰mÀ]%•©ÁðÛSØn£ Ø{¡¦^Ê‘1– Ì0)Å"#„É.á®…·Xññ_'®´&µçE€c©àd”DÊ”pX8‡©D·ÁÜF°÷B1¸ÇçxÇyä1)å`øa-ÈÁÜœ¨j=Ù`Ø’|/”iá›¶0JWÐŒKÈc€ ¯ë0ÌRYáa·Ix׎×Á3àó¸.erBgÒ%Œ³sçÍ\,c}Í1Y—20ïB€Ã˲:k5¤`ôö§¾œÐpzZ§°Á9ÜD—°õ!ŽË2 bš”ë–/äØõË’ÇŠ<Ë*=°—-†­ Jƒ–‘p<϶g2ÿ ÛIõ;GX|<¿7ÿó³NÌôXÂr!s|¼S¾§»¥Iúj…y™” Ò9.%•u)ŸüFgÕl Š×¢A-Üç0 ]ÀÚÛ2M3½¼F4r·yVùȲ²ºØÑóÜw$RhYgƒÎá{—{›bq Nà I´G€hZv6†m½~\ ‡Ûh¶>ØE³W]cÈr¢Ù#š+űJå–ÿ²óKáå¼28Æ\“·D¡†¥Ãæðlsø}®g Ý·]‡a?ˆH ‡Ÿxº„½3Ðúª–í©iôŸ”Jø±ÓÏ–1Šyåx]æÀ~*4)tW–›¡$–Ž·§IÉMÇÍ.aÌû:Ù0C¶Ì çp+M»~èþä8”'j¼OJ1”DÊÀy@z–' î*ÌÛ ¦qžÈÑV Ž¥-Œr!Ãî?LòhÁÁuF‹c£„•!aï‡RÃèwn’sǤØ5îyÓ‰µ6]û—¦_Þ1½PïäÈ×l. éà%ć‰Œ0Öðć٧hÃpÍ1åÎÆ™„wæËãû¢BaqnXƒÒèQž±j`Fš‹ab%-Œ3ã£(«qŒñ +&àPx"B%a6/ÜÛMÂð¥êešhÞuBëÏ…Ô•PqfKâ­À€ÊÕVÓ nzá 2co’ãïÈÜÓ–æŽk÷8^§^gLùÖ‚Â5d»˜&8‡Yè¶|‘yðâõ…®-“ d–c|=_-»Ä©¥6½& -ŠŒIíïÍ—êÖÚ±ÄÛE8)*ý0(æÔ Ìc#Yà fŸ·ßìŸîȽIøCN~“‚cß"_xúhß<­È—Æ0)JŸlc€û=‡+¸ÛÉò¤\¤„€Æ€ú©¡YÈý´Á9ÜF𮟇ÿ7&Qà‚æ±À 뛋Yå2RÆ'œ#÷ cd¿{À ßât«íC.=ƒŠÅ°†5) Òå CîµQ¦YÞ|±zñË)–«•ÇB8¶>f «õ¢õO#T„éd“UÀÚ¡ˆà˜Àï”0}Žæˆ„¿ùÂàw œÃ-´ö[7·@œ¤ ʉ'BNa&Gä;éšÐqçìeY~ƒ†™½l£”*_Fo/dòÁn„ž”°ÛZ3 d™ò £Yn²KØ»ÀnaÄfÙ'ø~ÐÓ((ðwDjÄBG¨ä1¼™Ò1òR¥\ÊòåJ8EšBŽ€÷é* c4#‚Ãm4 ïzaKH;äÁ)ÀHIÍòÝÞvˆ¥Wg³ˆÂ:l\þÌÚlmPâ¹z¼ëÝt`ñ7Snò³ÛN¾úrTí­v«uTp³XIíJ³øX(ãƒÅ1_9X·£v%Z,ÈÚwßÇ;y|¨“£Xà¦KpìÉ'¥u¥Zr N S‡0Z¸ÆVš„wýøâŠà#úi‹¬IiI4rQçÍÃÉ ¥³Vœa‹9R4ŒqTÏulŸ¶š”štAã ×RSG±ÕÖ´Â9ÜJ—°÷#µÆ.j(ytœò0 †Ÿ®;ve¦a¦ˆŒA¿c…ÃÏ…a¬{޼p,t0Jböˆ0æ7,¨CƒðÛÓ´Á9ÜF°÷â‹ãÉKÇ,GŽ,FIôza&!Xî™y>&äWhå}ìïÊ´=›;–~‰ÕÜ/²BM¢úOU’Ó`˜ßäF CÂÞ‰Åç˪#Z]–à³¢#íœ_‹Ž,ðÚ#'»¦ö_×¹Ò±Ô9Ûóû.è<èä‘#_¥rpÊ}(·%s/Cb÷„á#9Ä¡¯„Ä ?uiîX ØÀ(:\˜î\4£…›àn¢KØ;ñù©ÐÕT/Hx‚CÆa¾h¬¤DÔpUCПa:³U.ïœãT€g,ùoO“ræ/gj w8ê~{ #œáZZ¯ÆÏ§1EõÛž¶‚Ô‚¢®Ü9Úi ÂoÁVµMŸ¿qÈ¥bJlÞ^“"o¯€ÈlS]‡aFÇn”i¥KØûñå2–Ø_Üq§L \íN™)Å|ö;–#¡GS- ƒˆ)ÃBû– öÎXöf·à>UpìµPXL‘9ÜL€ÃÃⳜRáuÅìP± b¤PÙ03HЈÉÁtÞ|1~FžL UqÇÐ ë”Î[‡DO†…e“· Û ¯àPÚß)ÁñUý°Ü)cƒÝŸ§€“—™SÅi×Óçp#MÀÞ‹?57~[´Ñ >ßZ?Ja¢«±„:Z‹àhzVžJv,Ì%Ø©, ÎÑŠ"y]€c© ÷¦QèˆéÍÇGÕù„tÜ”~û³['ÎÍx2›î­‚¿…‚ëJ ¿·9 ²äoÅ–„^ïV¸ºë°ÿ=ë‚Õ›öÓ¢-Ô0LFª°Ð ŒÂM0Ž0Ñ$¼ë„e¤EÃØ/3ˆà1 õæ÷^.úšŒç§*È) ß Ã¼Ü(yªá±áK«%«NAH€ࢂg‹fƒýÝ,ôÖ[¾82:˜Æ™Ä&e<ž¦Q¶8åÈÌ‘‚•÷&ÇÝ-lJšDÁÓÎ瀂yãL…a.Ó6JØö^ļǒA<¹ää>)cÁ~z<'þ<þƒÀ•Õ…ãDeŠî…£Ñ­wJp|˜'DP®±WM|$áRÐa˜§:²"8Ìʰ÷ƒ;zϮߊRç;¬Lr¬TÿíäôÃ$È Æ>ûRþÀàPðŠ5wP´'w˜U˜"Ú&%éÂhqn”iœKXlÿ<ö”$††¤ÚN9¹¼e]:ü$·*š'à&íöšç,¹P>ñÅ!Áqµ,¯“R˜sjJÈL½:udKÅ:­p·Ò%ìý˜‹²Ì[ù±Ì?5xNJ?­ô–Wc+ªµååÛÊ«ÕB] ª\±´6(ñ\É€T¯‹|6…zÕ 3w•éw·Ï%ì=øSœÓkÖéIÁpÀà§Iɤ |3ô£cNãéfN>ØÓÚFk$V²™KZ8åвÚ%\7¯‡B‡a&µ—ÁáVº„½š¯bUXoìNJ"…9Ò/sÝoY9ÔuÞyéd—³Ú•'G=X…!$öÊÌAI*'¬Jr航ÉaEp˜•.á]?¾ðxOª\U.÷jtJ½ä“xÜV ë´´Óæí¶âYÆQ.¸ÇŃÂ+¬) )¼0T$` #œÃ4{/Dzy(±òc¡¤¦‰¨Lf)k—Ÿ $ ó!é|Ë9à•¾0( ä'++Ø«922òÄF3!8Ìİw"Rº$pI˜”¬|R2) ›î<ʼ¹¯ƒ'Ђ…Œu[8°£> Ž¥-œb™X\Â/pzæ: £EÚ(ÓJ—°÷âYnn™Ç‘…¢é…E Ò¥"ÅJ#à9¶Ò ……r]$8ÎÕK9%UÊs GSEŸ¾a+@+œÃ­t {?”K­u+úÉHïÇB)¼æÅg·g¼ífª3Åž9G:Nn\B`éàu®Q’üCÂ!?ÆÐ!üö4­p·Ò%ìýø–³\T†š0:GW xº ÌT¥m cx^Y|Í1&¢BÿŽÉQ58»ASÀ‰ËÝÔ>ßÏÂ\ƒcî)U^'8Üf—°õA¯â7xÝ\qW–ØÏS¥jRð&3oûÉ/2gsä2«™2G÷Á1>«rM‚®)sŒÒUË0Þã€T8fÎaµIx× ;Ô®¢É¸%TÉa§\‡’$âªÊŸáÖcãÀŽâäÀQX_$8–fÔ2 Ê_c¿êÆ#gêF×!Ìãl³Â9ÜJ—°÷C¹½Ç[{±Ïe1¾AáYk†ÿ;´â;@fï›a~Žáètl&‘Bµ(oO“¢£Ö€µbjS…áÑÂl·Ð¼ëÃ÷zIçÙÈõÑ=;{»qYøX×Äîs…ÂnÆq†KEô¢½aÓ`a'¤ÀÝJ8\–Ÿ]_÷kÈSý•0möö{´Ü*MÝÇP]t#ä”Â,¸¹Öª5\ájo¬mðk˜«¡2ÖKY·t.Á±tÐUØ(‰”)á`ŒäÔ!Œn…s¸•.aïGdÂ÷ ‡ØU—.…¢¬sÓûºV]»ä„|Ý90^¡´QrÊ:q ýªgRtY7%œ*è:N›‡§ÆVš„wýøâ=-–d ½ÚkZ,W‚RÎàѪt¤î-lQjŒrŽnñ¤.¡»Çt{ð!¹L7Ýõ˜d8e¤¤»L ¿ÍÖàp«]ÂÖ‹éjUaRq‰¤i"(8¸âd¤-:ÛÝòÆa˜“ÙF©Èc ŸßàX:¸¨2Jab÷)G]eÑ!Ì4Kf…s¸•.aï‡æõ[‹t¸¨pžÂx38"c5Èï¦9mx¹` Å$O Ž©‚©3puRÆ %3„ þ$”i¤IØ{ñåŽïð(ï80#w<8§jÜ€1>œ'Y ¶Ó2æ$wŽÞ¬Âö¹I·£2¹>ånîÍ‘¢- „í¨l¡Lû\ÂÞƒyvÑE¾"åcUl‰S«jQפ" ô(óÐy£d\+êÙªŽ{`© ›£Q.RBŒ+iê0Ì[6 n£ Ø{ñù™|aaJšª,AI‡Ò  °4`µåäF„ƒK¨ªIâ*¡âZ ÓBo¿÷áOóø=ŸJ«†ª<ª÷ê=¥šÇt¬šÈY»DºdñÖbÙñ…*ò"À±Tð¼Þ(…”)‘Þiª䙇Ùà n£ Ø{aG¶Ý²ü]V‚1(™å,£7Щ4kÍòOœ*ÂX{S€µýIƒëÒܱ¼=M ®ÿé¹&}huj0üö&‡›èöN|úŠæÊù¥àèSɘƒ‚â cKª°reÇÊ–clb¯’áÚRf{C)y.f#Ú xkœÍ2sÛ0˜ÁẄ½ ¼£ÅzçfÚë›û†I(ªÓÛeEÖµ)E3e¯V²ô¾Q J¢ñÐÖ%8¦ 4p‚ݽ†€¤}j¨F 7Â9ÜH—°õb‰«WúФZ8…2VFŒd”ó³ÉÜ–o&·ÀæÇî9qnËÁ §Kpìx&PÏS€ÊáL^gán¤ Ø{a•Uɪ·•I9-MMêÊ5ÐèŽNÇn:Ä ›ó87ÛÎQ›8™ÇÒaÎ㤠ƒ¯ü<%VÈ %Y"·Áþî&zó½:,ô³ælœ5Z:%ÝJM^™nŠW<};çe¦6nüX™Ú9üòÒ%8–Žåz§E9?‡LÉiª|{ üïfb4ß;ñŦýPe‰;{L›Qà]¬ˆ‰n3ùY¯ïšÊ·ÛŸ¯f• ­µa“oNæŠØÈVÍMõÚ¯ª5/×~Ui«ešìö.ÌÐß~ª^Ãy«k“¢¤¤LïæyÆà s¬P•çs¶ò¶"†%‘óFïw2l]µ„ Ù–›n¡Lã¬ù;óõjª¾bWJƒGP’©Ô­þÝ­ì0+‘s’½uôìpJo‹ÃÝó(LJWx—€!2ÝS‡a^-l”°ÚløüÍ,= äXò1§àP¡ktÐê§ñÛ>›¥Âiöí{|ƒýýªZMYkƒÏS„K…j"vá:,¯˜Ä¶±ˆ·¿Îáö¹„½ßò.H•ò°s†ÃRã~,œÑ`á¸s‰—ÑÔv0ŒDav‘É1Þ>ºQ¹ÇÒN±C!'ÏE‡0Z¸ÎáVº„½ŸAŒ Ótf½¡“Ò²Ê]¦á G^]á›ù(Ï?›sp´v|ZÖŠI©YŬýX!—ºÈfqŸ2 t{ôŠ¢ˆ/Þ‡¨L”óÀ”™TºÕŠŽ¡Æø‚ߴ© Îæooí J’¥(·¶zc!>*’…ÎáÖ¹€ÝþØÁÙtœø§ËýUI¹H¡ öÑuÌ,EΤµ94wÕ†Ë ]K­u`É7·9Àº‡€Ú#oD]ä ›·* p7Ðì]ø"×4_O\+÷_P)Ì5Þª¦8Öåæ>çÂ>8$ò>§„ÀG÷Ü¢T”XÒÞ‰*®Ï:¿=¹“CVN {?æQ'ÒIf]s3kܤ $^aä+6×­"Vð>0ÌOi£0È©÷)À¡4X)™”)`ìÚκ¨æÇgF8‡Ûèö^|~d}È÷ aÅ,ñ„ti© úŒTn8ÎÉÆbyúÂQ•æ!$8>léãÔÆÀ™³µ/“h0Œi£„.`ëö=R¢`¦›x,”®Ü™X:ÎbÛ£Ûö¨”…cl&μHp|y’‹ 4Õa Ji8uûþè^8ÜJ—°÷Cã(-Ð)%Oq[JET"ðåyÉ Ôäºu_úžE ÒÚ"ÀñeYÁ&%r™UpŸ:NË<F8ƒÛèö^|ö’V¸.vs•T‰ À£ ç7 WàžHΔws†yÈÄdr4FìM Ž¥ƒ»&Q®›”p!ÿJ›: s,+‚ì {?âMEIæn—-n‚r©^ .pÒÆÓxW{X±ïÓ7˜¡jZ8Jgµp`°yA'œ—J'zóÊò,SAõ‚0a‚s¸‰.aïÄ—¡Ä ,ç‚ï±’­1—hÛn%¹ü.Þ¶[ î^ Ž©Â–µ ò¾µÈ\âmÏkj0ü.Þ–f£ Øú Ô7}»/y™>&ádY‚•¼­PúUÊÓ†‘÷'æý Ž’±Æ˜W q™”"gä ”©#[ Ê´Â8ÜH°wb¦Ã-‡ "§ŒWà±P,ÅY+]E–9 ¹à®Ào*+R††Äq³½A)x{ ÂÁâ”ÖzÌÝ\jºxÇà—“à t{¾|;‹ý²5*Æ{=‹íPTñVèu;-VüVB¾^ …o_«‹Ç3{›QÒíÙÛ$AÉÔBÅLÞvzæMýÝLŒæ{'"á}¢?ÓÓ>JA¬™²€Ñ?îÄa#³”/ØNr›È<†¥ ( –ÜŒ”té¶ÙL;UÛmÕB™6º„½¶íM–‹¾á`ï±PR³$¬·ŽU*‡£³:T†Þzûß»ŠCEsÇR`YBärÔTÎÇ`7~Ö©Á°-$βp˜‰!aïD¸V_¸²æÍF¶ £AQ¹‘U ´Ì{» #U îÍ'–Kõžg«h2)‰‰B¾h††k£L]ÀÞ‹O¿>ÜR¤¤ qMP2c2|9pÀƒ\[œÖäÍhÓT=•FÒ9NùC†ÇRN8&6“páž¡L†éE#‚Ámt{/æÙÌÍè`å¼>>™Éôh¦´E‚ãíh†Ä5–Ïfò?œYlˆé¡+{ºgC}L üjX î$À–Yº+ç»ã¡CÁxv“㦻ú”`xæ` Žþ˜o@”5TDV·ÁÿîF{ó½jw¹™ž°|ËOø=&¥Ãëž_’†ªž¸ÊÈô¶½㻸4`GçxMXE?¿#k~¡VYš óS• ÁáF»„½1Š&%M¨8‰¦ûÒ¤¤S冑§ ë‡\4³žÅr"Û’plZ˜É+®ôz-í JØEèÊŸeqÈú &Ý0ØMp˜}Þ~ïWœ7æBúue=™u}.p„ ÇÊŽeò«Hq䘥ÿ*Þ§T¬–jrJ\‚„"¨hpý|·©Â0T˜Áá6º„½ß¼w9n¹ÔÓ}÷c¡ôƲ1Ø4+;½Ø¸­^°mݹ;q m‹ƒÒ`w00›Ë§l*¶}ûB™&º„½Z`ž*wãÞ¬l¸Ni,nŸðä0Ë;„ŸbÁ\þé% Ž®êÄ!Á±tÐ[É(ã'`\—PXÍ~ê(VÝ~Zán¥KØûaÕ,ùfœ$‰€xÆl¹ù»•W-6ô˜KLߢGOªÜæ › ÎFQ>ã€re*¥.†ùZ¸»±q˜Õ.`ë„åC<Ö^“ïZrÒá|`9p÷S!5møMýðú Žñî0ß%8– ¶'§ñõyJC+7¥®C-Ž2­t {?þÔ&oav*D†xfÞ ‚&ˬpnÖdY™æ¦fíX “#sÉä&¦ VÅ5"Ùá©ëpÏ|-*„9㘢L#%á]/ø’bµ©õ<•2woœ®ÝXΞšÊ{ ŒÉõÜ(˜¹,T3…ÏF™6º„½渣ꥸ¡*æ¸c”L }“eœb*”SR ó@¹XV 㸒f˜ÇÒÁ,F9ø¬§Œw‹ B Õ¤ öw7Ñ›ïP|sb•—ã²4–AèE^jª<]àV.·· ãbq ºÈ$•Éå¦Ǘ嘜”VÌùÎ$œÕœóLÇiy,§ÎaFº€­_F»u-›ŽêÁnVà趻ƮáØÂ|ÛlÑnýZ8®n…L‚aSÁ‰ß(‘¶ò¶ÂhgÔ1NØÂÝJXY-×NÌèl¥ð¡ë‡t rÒÝò,<öÊ”áÀ¸`û]TÒ8N–µ›KsŸ‹kšýÖ,¾ á$tæ/¹QÂʰ÷ÃÖÌ·Îëq¥«%³Æn‰±[*«Æê_·€.˜k£ËrXG;íºÌ$8¾,WÚ¥Ø-BØÓH¬.Šn¤KØzñéKÊd E{Z•Ê}ÚMKåqL?˜ðó´À< ª< $2¼ Ž¥ƒ×AF¹XLÑ\M‘Ú®Â0˜ÎFJÀ»^|ÓiÅs9˜7¢öËø&¥3ün`U}`ÑWÔøU& ÇoÊÔ‹+þà( 9›_\7)M:$œ Ï›:N à›V8‡[éö~(€)5°b¿,{?ö»¸œQÖ|,ê«eÕOÅ¡¥íg2Xý=)eª7(Ñ–µŸ”Üxí«Mž¶a¸–Ö«ÝŸ{YÕÊlçW2!8ÜD—°wâËztO¸™ì1)¯ÏVQ¸X¹ôu¯Gh›ô2ÿÜY”k¶6lòm‹NŠ ËÆVNÃs#'l;ôÒ7Ùì]˜>Iˆwáàxjß:)¨¹×UÃádÔ.SK+²¶ÜÌÜ“9ÇÖ`°X¥ äSË}ož˜Ôh*H–ähšàn¢KØ;ñE¨ÎsŠÊ“ÐÇ$œVúS …éEÂ\Øp"+íøJ™åCIŒ§Çõò5§”˪­˜l ú¢C˜o­Yáf¤ Ø:1ó!v–¡Aä€*9[~¼ÿ¬Ð D䕯ÖÀ0n𓊗Gæ…vH,ô+0ÊÑUÜÈ%`“p/:„qÜâV‡iöN|ñ;V«ül•Ú‚ JkÈ¡-ÏObƒù Ìßq£Ð¿ÌZo½T[¬ÒZ´7/ƒÐ¥ÚÂçp]ÂÖÛãòn¾õE‹N9•VÎÞõ$‘¶_¹Ée±´ÉÁ\XÞœ@Ò¹¿åv—p6Ì<ɘ¢³uLåÎáÆ¹„Ýü™Qb,Ô˜‹/îc!À' ºª z} +/L¡¢TáÎ1ž)ƒê% p‘*¼£..cÕÞS÷]ƒaf“ Îá&º„½ß²OüøhU‘誖ºkR¬J«æ—|KÇ0°aºU†Ã®1«%ó‚\Ogs¹šNÂËXn¢KØ;ñűmRŽÔ¾IîáqÙq:'Rß©ãf~Ùe66‚¨•ÛC:qîyOìí¢È­ïß»—ꌼs˜m.`7Þâºíì™mš»Âä7¼„à|4vW7.)Ú†yU’t§c ý°º—jïP xQb”rË“ÆÚaá*‹á·§i‚s¸….`ïC $c~×Q ü•éÐ)‰”\“©ùPüaønÝÁGU×àX:ÐB”|ßY© %!óB2…Çh‘6Š[9%ìýàhr$U4ï mx,”ÂzÆîP—„ ŽÈHƒ§KÄÃÜMŽºQò÷#-KZ8eô‡½.àPœ{¨F7Â9ÜH°÷"œ¨rÄ;=Bjæ°Äõ}¤t`‚T݉*- ý’Ó• p!TF  Ôs´·ˆ&W°ÄP™Á`zû­ìç øt´Ã²$åR kÌ^"· “†Û‚<û‘y7/“UšjV9L×!üö4­0ްÒ$¼ëÇçÎü• SX‹‹U '%]¨žò|eÁafs8Eb˜ãÊF»ðÜ+$8–Ž,¢4F „8"ç4UæÈ"#‚ÃŒt{/bíŒ8LÅÃr‰e|J¸ÆÄì5U ¡û˜ÉjÌHëR0 *cŽIp\lË6)(OÌ€q“”ý)t£…[án¥KØû¡Eå­4M]×cI¹u…ÆÜtPAú*Œ¡•Þ(DÍò‚Çè—£$86ñaÒ-FGíé-QC¼Á·§Pï a°µ×O«ƒx‘·ªúiK©:Äãà2ÃËÄUExF!¹ê1 QjÎ9¼K,K±:äþOy«Ã¬©;t/Åê‚í4 ïú¡áF5»k??™”DÊÀ7/R»U/†ö‚ߢRøä¨Êt }¯WÏ: 5†Ñ"m”°2$ìý˜§Ig5¯†¤àõIÉIH2Û´T.Ó7ÌAŸÅ¯ƒáV©ìàX*l-MÊ!R€¥2£TLƒa.¦7Ê´Ñì½àjú›õ²åñv¦+Ò©ÙI9I©˜d*òDµ¢äXc’)-03Åm”/;%8–¶0ÊêÒù<%Œ©ñn‹aê›ÎáVº„½_lÝu[ź¾bòz‹—u¥VÛeÝB/[£ÃzHp,|Ÿ›eÔP=—À”eê0ÌeŒ¬˜ÍóçŠz,ݱÎ9˜ æ1 ‰uŠìv8U¹¢­÷†ßìíš8¼â7bK…oñ–CÉ™CRƒ_S… 懾ÜBo½÷àóõ &%Ö±¼µ^›”ÊË ‹f³~sR¾º›{ê²Gc}í)Á±t°…Q2)S<ÆÏE‡07Ê´Ò%ìýP&šÆ"òµŒíã/‚RÇë'¬4°® /]…ë3³ ÇÂÉqó¶uJ0l:ØÂ(7º!¡ GV›: ÛzGu¯Ãív {?,ÛjK{C¢ÚØAÇô<Àv¯èii‹„ß–DFÎÁoîW.z9õpgCÜ}Ü‹haî M¹s¸q.a7ÿÛ½¨8ªPUÅfÉ>‚r[Ï›98•7 G Ì4—åÞ7ŽÜ¬4¤š––2Ê¥Ú;._y˜Ã,ÕçŸÃ>kþ®_Íܦ§E®9ÁöÀOÎ ÚDc¼V¨P±ÀµS7&Go–¤Æ$8– ›Týâ¤O•K€¯A)S‡a›˜§À9ddØ:¡“ˆ“Î[E×–ǧ‰õ¼b¬ #sî^JŽfçH/ZŽ1*ðæÕ8>­<ô¤T.”§„ÂZÍSG±ÚÍa„3Ð@o½ÿ-ŸÒ*AÞÁ€’r5:==Êu+,xl­™R±«#Þ4¦\¶ôuÜ;î–û"Á±tX 'Rp8YŸ§„“ŽTS‡0?i³Â9ÜJ—°÷C^qV¸ÍiA¨Åòò]ôDÆÕÆÍ ë‹U [£.7 S‚ãbÉ&%“2%$f:˜:’e>˜V8‡é¶N(´â(1èràwB")檩GUŽ»³œ‰b=ƒ£(4$8– ¶¸-;’²#¸¤6ÂÄë: ³œÉFq#CÀÖ ö ¿:냪5„~h,I¥Ö¬ 8Þ¢2 ŒÈVÊÂC¹s‘àø²ÀßIÁ² AÛ.áÌ:Pt§EæN+œÃŒt['¾ù»Í;*ÇwÛô;»](WFåÛȯKã@"¿¶Hp|„—™Qèev?‡Œë}ª´q¢­·Þzð>ŒÐü9>#ôQðy"ôQRêþþE ¡¹s|C˜,Ù>¯Ö—B»|oå‹Âsz8d”[Dz²Ý‡åŠsÊu rÁ=¯›ÆBA–ãµÂÊU׉m%dxÙaåÍ ›F«¥“Ú•ïé 3¶T&‡íöNh¢¾•A4…Pø06nß+ÓÕDšü'UÄRpdfnðö†üvŒRämæq óL[Ä Ó‘Ï 0ްÏ$ìGþØ„ã°ÏÒ<Š2(À W‰§ÞVÈè8¾“¡óí˜íKZ8…5¶¦sr ‚æó.w­ùރϓamGJ‹$«ÅÖ$òg½øÛÍÄ!Xa»šZ(Ë"$8–¶0JUñóPX,xêæñ¯Yán¥KØûñíoqʘ™<˜ïùŽ¥)x umz“·#Ž6 kî:DÍ´JQÝÝhø´CòI©tþñö•·7!_óœ` a š¿ëÁç§&ã“Æ“ê2Ù)”„ÃdøÅ aÆeqÞ0s81"`rTzgO Ž¥ãíiRjShžK@b²èf§2­t {?–cçorsY‘¹Þànȱ%(Yþ¸¬áŽ¢B÷ÎŒÊhÈðjøM®á …^ÙˆÇu ¥Ãü¶I9¯Ö]B¹™@%t†³"8ÜJ—°÷ãO1ø®–±6¥§éÕ”6fRÆó¬L·×Õ·[iåÚ0‹hvuÎ9àBŸ Ž¥ƒ9Âd9S€’å„B°·•0-´Ö{t°W•ÊšÎd\D¥Â’H¼Jw2V±Ü0ëèˆ69:c6§ÇÒñö4)‰¿Å”p`é8U¾=Mìïn¢7ß;ñéû‰ ¬±¸®•;ž”‹~樸€P~úÎ0ÃÃÄÄ!þ18ƳD|¤ pجñ¤ #ÎÓ\@mJàë*ª;žF8‡Ûèö^èvùbhîx"cýÊËe'Ày··ý’š¢ó'$Ö£ÅO¨[%ã*%_HLt¯1œŸgûñ×¢A˜~Žfƒs¸°÷aæ…m\Ðä®pÇB±ð)ÄÐÑïÐâ§r×2ÌðМï2ðmÇc&Áqó­  %Üõ<%((hêF ·Â9ÜJ—°÷ƒ?[FN6F5ŸV"#(§2jäB×’Šƒ»ž”ÿ¼C¯¢7'GS„nHp,há”"GÑ€•ó¹èF ·Â9ÜJ—°÷Ã"ê;úÈåÝ-Ï()+'èX0ÇZfýæL^0÷çZ‚8¤E‚ãÃjæé¼¹=o–ͱ*í¨©0üöF‡éö^X|o¦ÓÕ«ÎÖ'¥ÐÑ:Ýp—ÊìX ‘GnÖézBÊÒë\8î./_àX*ØÂ(©b5%pÕž*´"ìÏn ·Þ» £>Ô(xö Ï `bdúK^-Vâ™ÇtœŒ¢¬ôÌVÊØÄ”8Z`¥JÃKYi§(Î"$$:„Ã{óŒúV* ¡BÃ|4ar¸‰.aï„U^£^Adw+*½f”Û¬To±á—…dõ+0K¯Áap2”´´&p]cƙƬ³jW0›M¹FÅþ÷°ËZ¿³\³YÎQ»äÚæ”DÊØ€(ƒÃÞ.¹‘áDÒ0ýÎÑ2õ1/Ko†EɬSëísç× s%‚ÃLôö{8â”÷bò‚d1ÉAéIçÚ…‰Ì*pÒ¥@x.„±$8Ué+8° ¯‹ÇÒaëRPây 83'¼P!Œn„s¸‘&`ïÅçñvØÚáÕéÆd„Ú-Š©šoJ³(¦ 3ŠI ±ƒ§Sm‘à¸ôˆc2ŠrÖL ù°@&Ó‘dÚ(a¤ Ø:1c´*‹¿Ã‘:+a»p8ËRn­ ©'”¡á~•™×JV2Jk¶v,ñ|F9“Ò¯»€Ê«¹©A˜Q„²ÀÌ>o¿Ùÿʼn˜fDøË»Û pC–#á•þˆó^ìLü¦š§í\8¸R—÷FHL‘5Û<Û„Ír¡L]ÂÖ‡YÔÞ2eÎÜA©Õò)• ŽÑ ?n®x3ååFI¼sêSB`é`!£äª_Ú% #™ŸèÞ0nÍ çp+M»~h6øã®ËÕ”3 Â]‹©a‚R4å\—2TœÌóuÃ<½¤ïLpÀécŸK,<¾4J"eJ€ÛÖ¢âPvÑiƒýÝMôæ{'¾mDÔ1aDÍrQŸ}1 É£÷D;"hoB‹ äëï ðÆ*‹ÇRaq¤…v„„¬ÐW‘=6$lp·Ñì½ÐùJ–éæ”{ú¤œ¤ŒåªŠHÜÅž+x™‡¦Y9豎;柭ÏÄ6Ö<°Ý(ð(¶ÖÈožá¼87õÎáæ©õ;ûµ8®ÝÝXé±PÆØÂd/HíŽÃðÊIµÀ«f݉RK_›÷)G`€É Rxã)ÉKÛ0³˜rçpã\Ânþç'“cÎÌíq)wʤ´Kq p—°Lõ¬j>æªræ;¶Q”з/KÇÛÓ¤ÔËö–&Áꇎ¨HV8‡[éö~L”SUK°¿Uî"'À›JõÀx–?–FÕa¡j˜ïÎFáÛuÞS€C* ¿Φݡ·GÑÚkÑ lUÍtŸ`n¢KØúðÅ!3X1ìºÜEýÔÁeêgò¯‡ÿx+ÍCýl ‡¶ÒS‚c©à³QÆÄÐÍEýl:$à©´é0l>ê-/f¤ Ø:1Æ«(àiÌûYî6NÉÍ6ÄV¾f|^Ú0«Þà›ö¶zƒÄ€%]ÑL@`©°­'))!•ÝYmN* .¯J0¸&à]/tÎŒËZ–föÆÇBQ>ÇŒ‚ñæ~|ðH8i…×-k(q+p¬‹ÇÍ2“.Õ« p€*‹aú¨Êg«MÀÞ K^‡ÇÃÓY¡):ÒœÜ9)U:?혙a™K|rX®½àø´ÃšI©Õî¾MBQòôÐ!Œn…s˜‘.`ëÄUÕª†ìLToÅwÑk®é’'>zÏ9c;¶#ß Ç¥zä!Á1Uð ÐWyµÏ;[n:´˜†äU†àpMÀÖ‡o:mÑø»Þ×ûÔZ,(Ⱥ ×\=1nyg0(ß–9V˜ÃÃF©˜z™8Ã$86há”NJH`^î4uæˆ"+‚Ãí6 ïú¡•Kײ·œR ¥qTåmq+vû…ç˜%¶pO Ü5Îö¥ìF¨Œ¢ŠÖë]¤ÚmÕÛßÝ:kýÎþ/æ»Ì’3ˆ.?íRÕ(I•_ÎÆ0C¾3-ÙµaÎxЦe£™—W30 BÜÛ+ ðÊæúØt懽QÂʰ÷#îwp6Š#Þ ×Z Œt ’€ wn¤èÉÚ_™ß‚UQ 2!ÆÅ!ÁÿŽœ0}iîX ÐÀ)åP•o0v 054£…›àn¢KØ;a %µ»/ç“3ñ¥b™t)ÅLÝäJ»à7À—ká@Hb[$8– ;—ÓiSÂI“Бnhª´òœ“zë­Ÿ¾¡ßxTdE2]ÎØ î¤TRjReE¥0ÅÉc›€™Ê;£¼ãÏýR1soî¸Øáê¤dÖç ‰‡³&?ÙÙíTïvó¬õn¿ÖcºÕ­Lò¤{§¨[»3¬>*²DéPîâKg˜ÇvH1ë,LŽ¥‚«+£4•xu%]C!#˜Íû»è­÷.D’†r z,”ÄÂêÌ1Д7DX‚ƒ–üoôØí J°ysÝÒ8hm‘»àªS´aËn°P²°Û®1äÛ,Ù;v™ÇsEŦS™m+aÁÔ3ÞýŠP86=ï #tû`ÆÕàh‰!š!!° …S2ç„)Å¢ò¢C™múF1­ùj¾å/ÕÅ/h”¿Ô÷­œ®…‡'ïó˜è*ÐIÁò7¸%—¥­c ·7‰„ëVø“(vG^'àYÏ?»aÞv³üóº=#ƒKQmÍǤTÅŽYÛF,’XƇ—á–Ÿÿsþ½(=x47l ØÀ( < X8a§à £…™n´KØ;!Œv«üŒ¢" E‘“á'LœÝU­q°*¹Š0E6JK~åk1 ¥ë §4æÂNÆ^NÂt/1+œÃ­t {?"–à°²d§»ã‘àÉ—`ºî#È«~N@„‰Ý‹ÇTaîx¾¿;¯=œ ô©Âð»pr¸‘.aëÅâÄíNE {®ÿƒ‚*Mx¥Æ€¡°¨b\Ú.˜?\æVÍ9po GH—àØt¼=MJg UH¸´¹t‚üÙVÂ4Ú›ïÐÆ 8ŠBušŽd‚û4º2Êzà[l¨ÎÀÜØ0ì;8탰LBàb^Ž“’y&2%(hlêæþƬp·Ò$¼ë‡Æ}Ü&cT½ùú<&å:XØ¡`Þ, ¹yVƒy•Æ„92o”1v_\U„æ-œÒI í*Êdzm-ÌŠàp»]ÂÞÕWÊ<‰`<Š.ú'Åk™±JL½O@3rršìŒ4c©¶P2Vv\²º„À‡]ô;)cZ$–Þiê0üöV‡Yö~Èåéf€u­Šoz,@Ág®™Ø#@êT *8²†—ñØ|b& ŽRxCKZ8ë[xG»†MM„àwH˜&ªùûNÈqõݳ.’Oß|’N%n‹Ðtê& ö† ¶í§jŒ£ñ`dJpL¶ýÌôÒf­xôb×fÍT8¶íçBq#§„­ŠþÙÌçš- #'îL\rU3¤â6—gð¥¦êÊ#œàÀì½Hp,Ü1Å*I¹„4^1ŸK‡á·§°"8ÜJ—°÷Ã<ó˜þº\ùPJ¥I)¤T¸t7žGX¬}gZlÇôÍÛ(µ S€ÃlUïƒX{c6?X{c*æÑƒ™àn¢KØ;ñÙKZ‹öõLµA7›I) Î+ã€sèVæ÷ø‚¼DÛøs_›ö)™S`ÒèïÍ’¥Çp±)ùe¬ëu·Ëì–û!XŸ\µ^:QnÉJ]åJ:mDùw~4·ª¯–óuÂ7eõæ³3Œ¯è(‹ÇRñö4)ÚCO ºq ‚¼h5œÁmt{/fRÝc°Ç wäA¹³–A™… ¢.Ó¬dÌ>ŽùÖm”±´J˜Ç¦ÇÒÁ0£ Y§%{„£Ñ7tf$ˆ¬·Ò%ìýø">™•ãîrz3Ê—ô¶ rØÞ'«"´`:¾1ƒéäèª1_VU~R— SÂÉ¥ÂÔ!LÏ·2­t {?æë­Ô®íLܸOVx)|2¾uŠ*†-0}ƒUÿ%8ìŠ-$8¦ :a,ƈåps-*„§Hp„‘&aïŧ©ù8PáØµßJn:)÷-×ÚÌâu.jêʆYScP¬×"Á±t …SÆ×Lï[“€ê¢B1–+ašèÍ÷NÄ([:OK*S0ï`P‡»~¯Tì˜/¾»xkYÑå\=9XØÛ;ªVƒ%…„Ù:³ Ê”Ÿ­@Ê´À8Â@“ð® Ú 6VjOñVì礴[¯ÏÅ|àã9^º¿ïE¿Œ0‡¢©Ì8êÁòX!!°tpÿh”z[d I°ø÷ÐQ-zZán¥KØû¡Þt[Ñé¼NIV‚=+ò™<€ÇËr·À±oÓ}-']&§LJÕ$ †#íª%N— Ò5†Ñ¬³2$ìýÐ;֧摪㉠ÈÒïW«B÷¤Kyþ}«JˆsÀкp,t¼5Še:6 Ü'\¡Â C¸î×>ÿìzë½ Õc…Ö£=Jb8Ë()‰GÖqN3—ya.šÍ­Þ9Ýä^$8–î:DA=žÍ›„YU: sApYý(ã0+CÂÞÏW3YÙ@±<·„ƒNévl_¬ž/ü±ÄØ¥„ÂfËBaá©Ò Ž¥ƒ£´fIMB”IØ0y½î°—Z0+]ÂÞX|«gºc/¢ÅMaå“ÇB©Ê·U±¤oF¡_"HÌË--öƒc,¤¸s Ž¥ƒ·[FɤL ‰/¦a~àeZéö~Ì©‹8žpeó9rÊu§…38l¾p&VàÀ–¿© ,\ˆœ¿Y?§ǦÃJ´’ÒùÙ†„vñ³ †1ß™Ááv›„wýP¸ÌÍ>ŸGéV|#(•EdyÈÁ2ï'ËWòœe1üƒÃç=Õ»óÂåxð“ÃÏRÍÃ÷ ©ÞŸ.ø³#ÃEÀÖ‰éÖ ñŒ ½p8)ãC²p„ !ÁÕyÄúðü¶…­ýBa…ù3-K‡Õ Wé_–¥ ØÎ´4u¶ôµ.feHØûñEõ4ƒ³’é™< û¶ÖeËŒ”×Ó`nžÛÒíh Ggæ…)Áñ¿Qæ¯g ÔúËÃã lË%-°ŒÃ¬ {?l)ÃRïù’ÏÝc¡XNÖdýå4Á-GÎßT 3µ…µ¦ÎE‚ãîyS‚‚dØù9$\c Éeê0ÌÕŒ¬·Ò%ìý˜•HèÒyŸåvJ%¥ÀgKù$K`82 æõo¥šÿ‘µu\<«pPàp>[ó¤„Á<Ÿph¶?‡eÖöí߬o\iéÒÅÚé~-AiãZV~ÊÖÌq¥lŽ*È °0œÊ`ŸáÙâxâˆÈ Ž©,„Ñ o”i£ Ø{a·dJíkÃÚc¡äË|4é«ÎP#úh².”A^’)?Šý¹eVÖŽ%ŸwdFƒš\4Uô¦“_È7Ì+2n  Ø»  ¼„;Ž|+ñÈcú­D©Y);n¥2)WÉV@èÒ.¡\ȾV|aç"Á±T …SÚ­ô­.¡éö:t£…[áf¤ Ø:ñµæ ܱÙcÂ¥°VFüµ[ðÜîMŽÂ°ƒ)Á±tØ~ÌþýyJÀüÑÂs¿79ÜJ—°÷cÌ… ðþLkk§TËnUºœ·Æ«T-–lÁ¼àéò@s$J©‹ÇÒÁcv£ä¦Õ±KÀýý¹èæz|£L+]ÂÞí°‡`܊帞”FÊÐÓ´Ê¿ŠÕñnÜg6Kв¿/ ž–€:ÚîÎæÊ4xþ i‚s¸‰.aïÄŸâ’ó›½€ÿ,Nï2¾z—õ•u[”I¥bt»>¡Åéõ2ÿ®L³¹c)°ž’rðZ$ 'G [4ÒB™&º„½ï3ô~K/è)þ“íåÔ/ðeñŸµ<ÐRý'×XýÇ"+·Ú>Tÿ‘Uÿ)Ïï{±\ô:J±µYÌ?jLÙäSÌ+7gîuÌk¹…R±¢hmJì~Á“’åqäS;8¹tæj˜¿š¿~´rs Ž¥ƒ-n‹Ú#%$$K“æ: óg³¸Tç0+CÂÞ/v±·¥ä.Éí”dåÎK{Pø!Ú3Ým›ÊÂÑ•ø>$8–ÛÅ–5ÎÑ$`zæ©Ã°ïbûÂaVÎHÉ­ó=mÌo•‘ü…KíI¹YNᵌÏ=Tòl,…VÌ·HqåÎaÉ£BB`éx{š”KeÒLBFŠ#I‡ã·'·br¸•.aï‡eóÐÞ©ÂÛYÉ<š¦æ[¹.A˜;>Œ«WËeML?ù38G?t„ìKý`ŒÐéª|>C…a:ÂȈàp«]ÂÖ K¿xòÒóÖâê±PF÷á:VTN£Þãõ‡k.T±ü3ü¦ª¼¨4Žj®.!°/é&¥éµ ZaM¾âšV8‡[iÞõCG+…ÅrOÌÙÿX(Y嬑#ƒ'YI YbMßVBNvøíK8åPIkpag ò Æ^œÁ-ôö{â&³ëpŽ#]·Jç@ŸÞRú0¢êyF «Âð¸Ì%8– #¢à&ð²Ã°ÛýòÔa؆‘û^8ddØ:Á •]9Ž&¥wÅÔO­p‡å(QWˆ«³)šÜ³µÍöŽ¥-œÒº…<›„¦’Ä®B÷-+aZèí÷>èÈÞB¸\D6%åFpJæ5<“¾ZŠ"l™-Ö!gð“ÞrÁ |±! °Tp7ÊAJH(ŠysÅcâÂgMÀ»^ȹºv«"Ïj‡­X*2nïépžî¶a^%05ÊäZñÖöïSœ†jÁÌ3–jNU&ºÔpªÚ(Ó2—°Îwo ,¯’ÉB8N@D%–Ùç¡PÄ[ šøLÛÃñ>³ŠÍä(<˜ › Ö87J'%$ )Â\‡a´0+œÃ­6{'þÔœ©™Ji<¯é²ü7ÒtœøÖT Y‘Þ¦Z”¨˜ ȯ/K‡R£Ôlwƒ€cèsÑ üö4mp·Ñì½XvàE.;ɾ\v'îŒZ²l0}gÎkö\n÷ÂÑ­ð¼Kp,¶\î¶\ný9$`1\ï©Ã°-—JXö~|™=€·¨%=Jê²¾%Ë ÀäK´eÁÖ_mûŒC¦S‚cé°þf.ö»Ï&Û³ñŒ9t¶þ.”°2$ìý˜—±·¹›]E¾Õ“‚µ£>& ZlAcÃIk&GV ²I˜øò=hPoW§„S>¡ã¬îÑzo”ie³ûÙ½_l’‚ˆÇ}Ù×) 6\…ø2FÑ=ˆOúæ‘e0ø*ÆÛûšD–u2¦t m¾èžÍ*ß®Âà[¬¤âïn³·ßû°\È*Ù/VÕ.d‚ê²¼Uº`¬ ÎKõjÌ YŽ€ÁÐùùLŽ¥‚÷±FAÜïc%IdÒ¢A˜×±eÚèö^|úK6â¨=$‚’ä€7Cw󀀛̂‘ÑàP¨–sÀ™ç¬SBàð‘pÊÅÍK¸ÌÂU\á"áF‡iÞõB!µðÇ =Së=&¡j¬†GGg(+õäŒK¡Q“ã*ó©qœ “œÏçËç”Asêð›Ó ç0#]ÀÖ ‚;è§ðt5Ô!Ó¯—9º™4%QÝãÀÍ¡CŽ4]ã¢3Œ•©Š$`bjà@c„ƒž»!ตØ4´4átÏeš(ïú0“°*UŒêžIŒ‚ûNÜ#©î|pË›y¿¦‚ɆíÎ3”“yˆîE‚aÓN1w—ÐÍÝßuF ³"8Ün—°÷C‡ƒÎ£)Ò“’(8e< Dqâ܇ÕPﮜȤ”[yÆ»,HÎt.Ko‚D9ýyJhô¬ž:„­<#­p·Ò%ìý5|¹“áéÚc¡;õ‹\N<º‰cæjÝ;Ýæ•Îæ{r0ĹO ¥~}NÁ]¦— »¹©C:Ü çp+]ÂÞÉåÐ7Sï/Ÿþñ)•¬O ÈTÃ:Åûãó~þéé;Ç쟟ÒóŸÿû;KkÜ»ÞÅ“ÖFw1§ü† ÖßsMJEÞÚßÐuVgH“iR&Ó•¼2%˜ Óë®’&e2!%5–“)(Áô¾3k÷‚éxþ5ãñŒÿÙÞÏöÃÓ¿ù«±N}FtÐX³üð7ã©‘Õ¨ÆçÆÒèXýðxúÅË÷éøî{æá+ùåûó»ïO$“;óË ŸˆÒ¸ïRqûy¿€ûÌ _~Içwýß?ý‡Æï™iˆG²'ó>‚râúºžúAþ«]¬Œâ” ï?ñà2ÊÊUQʉ¡=Îå”ëÔ°µpeår['×jý¿ìaãA'ŽT¾§§ãý®ãuÒãæ³a¹ô„q(ré 3*Èž0|Ú¿ǸØ2W”VºÆ÷ò/6Ñ߸efï¿øx`´0ýî÷ÏñÛï¾G¤ù‘ÚËßþôö·ÿðËß<ÿûŸÿðóO?þô{š4{‰à¸˜2-m•ŒüÓ¯$+ÿ£~"ƒÝúËOm®ý)*r[MFnÑ«1¢nl?N©ûŸ¾ÃtqÕv¿ü-ÿ9ÖÝ/¿þ§ï¾G1Õ;µóå·ƒ Ç¥Aÿñ»ï 꿜ÿv¼ødÚË_|‡ÅrkßÁ:š]c•ÑÊËÏøYF÷jyù;rÔ«Cl/¹´—7ðÖãh­¿üšk¬K݈ë~’º1¨¾üü7ø7²4Wã+çûå‡vå–mX|–—ßZî{XøòÔÇÿòˇä•ôò+RÇ[ñòËE÷/%bÌÚ›î߇º£ûÿüÝX_ NvÎ9ÿÃuY;ÇÑîk~ ]?.výFÆÒÛLÃÚlySÂÛ"A -G'ÙßÈîë>sØóó águßg‹·Ä^Îç‡_ü¬ãAý„~އvÛ£FÝ—ñPñÞ!Úîlm¼O?üj¼=öž #ïÁ0þya?â ÷£\ý’ñ7 †¿üz>µ_þä¿gzù+pãeÓìËŸ±‡z3ÞM=£#oO™ïMêwÕ«Ùkníå§Åý=qâ7Ó’å³ùa£–Aendstream endobj 339 0 obj << /Filter /FlateDecode /Length 6991 >> stream xœå]YeÉQ~¯_qÅÓ-侜\N.°„-±H^ZâÁöøºgl«gÜ=#ƒøíÄöEdÖ\šAF 5ꇮønd,yr‰ŒÌ<ç—ã–.ÿ³ÿŸžŽËxH‚^ì¿§çË_?üÅÏê¸ÌÛl¹]^þ %Òe—~öÛ,çåõóÃ5Ç׿cÞºò¦|Kyÿë7¿¸~ù˜û-©\ß>·’JëåúþñÕq–~}GpMíœýú{ú3Òg¹~Æé8*qÿšÿnÇQæ¯^ÿ=©ëÇ¢®œ·qž êþíñU)ù6ç¸þí»—åòR.×[K„X¹Ÿ³q£§y­w´¤r¹wpÿ„ªG&;Ÿs»£ òô^­ÿr§2£ £ÇßBÉÚ}ÄÿÑÿȰwoÜø×?Áè|§.ß|£_å–xŠçgøw¯~J³þI‘ä%)Ø `îBý=S sÉ}P˜U/¹Îz;ûåýÛË?_Þ=œ·£œc\þH%M¨—ßQ$úcKû¼š)¢îò¼ T|;Æíè—Bá ŘDO¢“ÓOy„äà˜©ÝR N«.¤ºTBž­ÝFs ©„Y°Ò$¼ðãéáçdO£ÆNv¥q«…| $[iü‹TU9 Å“‚S• §Ÿ¨DÙrÔ|ëi‘Zup äSåZ„ƒ"ÃE…Ììw˜ˆâ»ìUWfyf +?4G !\ã!rm•A‘¢2öä4?²!òÿ¨!¤j†$ABk.‹ ¥å!›à€°{!,vËÒØ·Ýç¡iŒë-Q£*“éy#8+IÕ™©:K‰ß©ƒ9Š;­ ¸€"ÔZÓ­QA@-©Üh‘  ©„™àf¢Kxá„<²Î‹j/´L¸ÍÄ̑ī)ê¶´ª:É“D ä Šîu£©B;­uβp´r£…BH­:¸„!ÇÄ%ÐFž:tÍ%ʆ¸•.a÷C}«ó–N®¹ÁÚž„z ?j^À®KêEj)­O‡Ó¬·2Žqò @«.„: 7<—pðRjÑ¡´ÔŸYX »âÛ êü,©^hyþ¼ ôåñð,40Mÿ7†©Î§“–¸ƒ}õl¹¼ôÃ|+·–žÔ7Ct"RX³ˆ^žì} #–eá Ë%N‚ÐX_8b+—`+×ák·kX‰5Ì ?Ô7ªkY!Wˆ®ô‰¦•È陚.ee¹’;tí„\ b!r #WDû!Ar¡ÂraƒýQ|wB£ar’; WÆÌ ဥpÀ¢«‹tXÄ“7šCU|ǹpP#‘‡  U—Bó¶†e*á ‰†£¹DÚ°v?trk£Øˆ‡ÐpvôÊ3¼G»cTÍ¥x<<æ† 'µ‘°hÖË[´ë<vÀ!aóݪ‰Æ°iÑxåx8ÚxfzY0’â«©mô• ¹zÎ…ƒJc‘Zup Y@©¤E‡Ò\âܰv?tì£D®À[é7ê¥cÍõ,¹‚¤MÁ9²Rž*022`™âH@' ÜãpM '<Ôïu›°ØtËq6÷Llj¾NØ«±p”´§j%m–õ‚6 »hŸ§]9æiç3ýf¾ºD y&?ßç@2-yò©ñÀ!Ñmé°ÑQp÷Ç *L=$€6b©!C—†¦ ÃhÉmHØ »šh¤9ýhËxˆÖ‰þÏ1ÞÓø4$΋Ü0·ï¡å‹ueHíã½#–Üu šûuÈ » ö;LDñÝ Ïrë[¢ÑKJ0¾$GßhÉ?²$ÇÔ5»Kí‰F ˜b!S0tD¢V8‡Yév?ô¡Õ¢S1¥Uæ2G(4Ò*O:9óLxjs¯Ói®Ò¾!’¼jy‘ZuH CjÖZ‡ ò\t(-Ù¬¬„„Ý ®²,™"çˆæÓdý<')€Ã³oZ% ¾ì÷<5ã„â ‘* D“i!@“m¡é¸0 ;˜ˆ„Þî„¥ ºæ×c‡ v˜ÎÓv@l‡‰Ö¹å`‡itËJƒc—=*£}‡Éß ±¤:Ö&µ"8`%$ì~Xg›Kk4Ri”RÑÉÆ®)2pPÓÏm‘Í$mH!AZè@S +À!&¢øb¾å„§'ïtõ∮=È‚.Yw[›P@¤9u_½P4 9èààt NceÖ€µ t`õV‡[i^ø¡,:¯Dâ ë¡ÝÕòÙ¼¬»,én¬s‰Ÿi,¯e) :ÒÝ@Ð; ½Çx¶Û  „€Ý…O9'<¨ d …eK ºèRE‰L(œ)õe‹L:m.¼Gœ  ±¤D.Á%®Ã—-n…sÀJHØýi,[xé¡b ëÑb§¬¹]­¡b,ÒÀQò--ŒD4瀆{Q\ÃÁP€€1L07Ñ$¼pBÛ"5øcn] :I«6)i¢±5çµÑàšZüNóI9—â £At!hˆNÀ!awBO”$=7ÒÃŽxNfêv=r6uêš?²:ç†PÉÎ{q!tdu€ « ÈêÔ–\Éô3Â+!a÷C}kMGYª±è :1£ê‚ÇTÇ”ÔYÄTGÖ¡1舩€XDä,brSÁ0ÀFؽ°æX%Z‰µ¦#¶Tds¬-É–™¥µ8u°U€‘¾Ö`+E/n+IWàkM70v'tÍ Aúäàmßç@¨â8ªÎš'jh&=˜¤Ñˆ“F?骻®Sc0`´©°E± 4MPŒçhÁÆ¡¾ë0Zr´b„3ÀjØ½ÐÆHæÈŒ€ÔG š¸Ð-íHuÈ–v‰D‡ÀÙÙw?ó"4Òhâ"$hb#t õV€VBÂu µ…¾Æ‹h?©…ÓíÑÓj¥61ó%zZ­iCˆž:ˆ™§Ñ ÑŽ´#…ô´°Â8ÜJ“ðÂËÄMÑÈŽÁÝðyAú¸¶Ç'§€F³­sÐA] œ·@ª‰*8Ç­]¢4zï‹|¥mQ,,4» ßw=?ãû·Ž`¶YVÚög»¥±}ÿvœ¼šðßyÏo,ÅAÇþ­"±«bÿV5¬û·ÍÒÚàPCÂî„:fYÉeÿö`ù)_b‹–æÙ[¶– ]÷rNû&®#¶ë°Q;7ÚÂô…6b'w÷BýÖU‰çá±DÚ¤œ‘d›c%eG0¦¡ÕqTݲ°ØÊi®±dšK°dTx6&àwXˆò»ÖÆlÆ…#¶A1z[VTyÇ eƒbÌvËÁ0qv;~–ÎÒIØèöƒ+Àö„[`¿Ã@”Þ]P·h ì‘•x^œBÊZWŽ‚0Ô"F>Ú€Ÿ„òÒ =btGˆ GŒÊFKZV- „€Ý…O}kZ7ç2(b‡,>+ê‡Vh:²Ûïg4s$v›çR´ŸÑ4ÄOXš?i–3šfBp¨‰!aw‚ã°ü¼PË¥õÂÁ ög?~ÈgN’yªTATQÏ ’!Z7¬*-©x:)çXÛhªä³nHæs<'¸ЪƒKRh á 6$šÔ8ÿFs ³Â9ÌJ—°ûagR›`±ïˆí¢ó©¾ìÃssá©$öá¹Iñ¥sP 2–“Nû>¼#¶‹îl—Ýuø>¼[X »6Á}²çÚsËY:máéCÏËérŠ&7jÝÜê ïŒðÙ´zèò@iŽÍÏ !zÜVFªáW€Æ¼)±¼§1q¤EÒ\&€&BÂî„í¡eM<{úѤC“Ù–\i£e°>äD¨sd‹Ð!´§Aò\„ŽH?Vš„~ˆo'UtëDçˆMU'5…¹‘s®$×(¯ýümnÔÖäœwÚg:Gl¢r6‘¹ŸéÜpÀD“ð íi¹h®!rÆ@På%é¡F<’ºÑ–Ekëc¥¥áXrÆNÇC‚‡ xhЭlHXé}óCdÛ±ö4Ömæ&“«ïBÛT¹ìS× ñé7NÅ;Õ@üDºIÀVuÙ觇°b¢ïtËž8 :Ðs9"±°ÑÌ""‹Ÿ‰ïkÉËö4 V—(¤Ç,ôâ#º|Än€ýûP|÷@»¯Øhj£è§J6À Ÿ–òŒR§ÖÑ© LÐÜî)®å9âÞ±HpZUHg4¤éjÇ%œz{Äu(Í:`8ÌHØœø¤ý¥ÛUœ+â§ø¾ÂrhàÔ£±q¬[„ÎÑ‹Ø0F/Ç €Ø¡HÀ¡èˆcf„3Àjؽ°Ó-Ebˆef‚‰uê*æÝ~l´œnÑ]Wç  ™„à´ÏÍŽØÔêlêu19Ï +19ï~èXx4Yþ.K XðÍ©e½BŒzwÃW4eChx£õJ_×DFÇŠˆ¯GLÖ+y£¥2Ô0ÀFؽÐ~Öõ,b$C±\ÆIýEv,×qΖ~£û+ÆÐ(f<à´'C±\XªÃ5x.Äml4/¼ø„÷<Ó AŠ&ƒ“¯ê ^B•C$ñ¨³M#RâÓQ4S ñD¼E޾ÚoU"š(ZE3;C¢~/žhd¤xÂ¥Í%L¹sÀ8HØÍ×1qjž0ÒVŽ mÅrkÞ*m´TeÙ˜MÞóË‹Ð~ˆÌ$ž ‰)èˆÌÕ±!a%N¢í~ØvEóÛTÚ¿A÷à=¹±tŸŽ£5Þ¿†íãƒúG]Ò•NG‚ Ö\‡÷0·°v?,•ôÈVìSÁ.3ÎEKÙ.tÞh‰L§äÑœ£êΊKûÔ@°Ël|Út,ûÔiCÂJHØý°è·Þ¶ÇÀê¼ç-EÜ©šäú«?4½×q§U£œé‚Ðþб*w öH\‡?43å7lõ¬«ú¥5A[âë`^í¹Gk<7$ZÎËö­È˶„¶V7úé!¬¬„„ÝÛñ´žO³ß1uÇ£… ’g¨E{z–dxã ´ìxÊ6YpðkFª†§ÜñÓyñ✞--;žj‚s˜‰.awºYуq nÑf ]â–m‘Ðe½‡kWŒœã<4äyŽ{¸@ü­IÀ5[舋¸°°v?ìúô¡»äqÌÖ?fK“h]Ñös£%©¦'B£ÛÕH0:âaGpHÖ$ø!ZÓ±³­vCÂî‡%¦fyüj§#v1“Ãè²\Üä¡m¡Ÿtˇ.ç È]6³!´_ítDofº»¸é*üj§iv/4h$/O;æ&ã¾zlª]vªª¥æž»^Úo<¤ç®°SS^ÞNU¹?wå6‡›hv>ÉŒn´EþÞ˾ª$Ž«‘çí8#?¨¢åN78ø8À"ÀHÏÞ€Ô;Š#5‘¼‡ à€‰ð ;œÈÊÈÛŸÚ|:o›/§úún4¿>W€lhº1bWg@¢õ9`mÅ­iA¼·=ט‡ò›ý:3W;êâwU±›¦„qœƒ›¨ ‡gü®*ßò[®ª¶qȦ íWUY..7U]ƒßTuÀ—»‹‹ú°N=¹ì´ñ}}gˆï£ðVêØvZ†ÆÎÁW–;“NÇN ì“@öQ #vZ`8`¥ïÕl~XÎC÷¢–ÃH†ÄY¢.;>8kÔÛÝòÓH=é]>çðËø*tœFrÄAAÇrɬðãH°ÛÏ3m~|â·•>áëÅŸò†DëS_ ä±"Dzü´û+r³ïr"-L:bE£—X"=“€H:"V„λM ?l‡¬ß¶ t¬=Ó^ˆdí­ö:$o‘-mEHE’×à´·HG¬=¹ko®Ã[¤[a0ÒìNè3ãôkÛ D=È‹!‚ßõ¶¼ Iì”;=Î@ñª„ûÚGG0˜ŒÐ£ll„€Ý õ¬ÉÙÂå®™#vSŒ…&[žóM2ni\–»fÜv•—Z<ù;­ŒŒWZ)€kb(ŽkdPÍ`‚s˜‰.awâÿÁF´žÚ*œ×Ô-@NÛGšUWC§¾®Ù)0£¥£U]QÃÞª N«Ž§‡@Š !ÁÞ¾æ:”–ŽfVdäàÍJ“ð‹ ½+ $ï7.ÍM¶#ÓÖ ÑÀÁï«{“ö-‡„&_´(oq¦ci“uî­Ö¬t »úÜ8ùpY¶_ØæÉyßY9I匽®N~%˲÷rv=þâÅAûÞ‹#¶uâlkÅuøÞ‹š€ŸÍ<”ÞÌ·áãÔ·ÀÄ{@€x0^%ÇåoùÅÞ4ãï™zšÏ£b~÷Ãò§#*‚·x@ÞòñXXéoÙü°¤Ž쥘ÆVŽ A(–ÎÒòÔ› Ê,ô“çãvä¼4«‹Ъãé!¦g¦]-ºÊ¹èPZÖßVBÂîÇ'ŸŒû”Ûä§|—ßí('—âýš@ü혧¿2L¾ÑOúFÉ1òI@“:‚Cüí˜&Áßži:–÷k¶ q+ã ›ŸâËÖhšÈ’w8= v„Zo—3$‡d.N‹wϾÑOzxŽïý9Çñ*$€öˆØ‘Cc—`¯ëðØ­¬„„Ý;aÕ%C²¼Ëˆ¥«_.ª‘ÎnÔÚó–ðæ¾ËÁ¹sô"v¸ОðvÄ^ExS!TxÂÛŒ4»ìÙOùUÓ‹ñ;a?åŒ ^V)ÿôUÓ©Ÿ–\×÷T??¹¡Uù=ïí‡Ë>ÍNŒ¹2árÚLV>èä• ÈÂ%/æìçj•ë…¥‹í ×Ç>íB˦ …S'-ø—o»ð;uò!õO#|àå×WÔê_ÕA­oN¦òã«3‘éµ_i $¢Vø®úKåû ü‹~DEÞŽw‚~EÏÈR‹Šœ’­V×ßáj=é§ÈÆÕ§jr.CV®Z9=¿Ê²rÁŠàZíú~uÈõ÷Š+’¶Räš-†²V¢TT«ã ¿’þEi•µÚ§Ö_ùn•ýç¯QÿÞ–áéfIoÑF‹Æ%5,}øæò£ÇÌÔt}ûÙ7ß¾+V„?•ßûN>xIÔµX~÷†ŠÑÒçHg]‹‰ñ?ýï}R(Ñ ÙyÔLòâQð#ùM¯m®¦ùâ[ý2 ¹q^ß?ò‘¥ÔÒ¸Ú·vêµý%5U9EÖ®?‘oßP\f_ÕéÇ1ZááÐøsµ\çæY¾ÀótÿK:‹_½SuçLׯ>×/èÐÒ¸óÌõú›øÄÎ[eàú|XD-º)>š×ÏžíSAéúÆ¿×ó™Jë#Ÿü%$|ÑhÕýÍ£låŒëùDÄ)ÎóóŹP«bo[]¿ueR•#q­­Ì¬Zsá]àJ¡Æ–Ž´|¬è‹°á³ÅZùNÐ1Ç‘ó¼þð‘ﻤ£ñ·ü{F_²$nî_D"TíôÄ—ïÅÌüͨ¥åý¾;Ãendstream endobj 340 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 271 >> stream xœûþCMBX7‹‹øÖù$‹ ‹ ’÷½¹12N¾øˆù‹Ãøt÷Ÿ÷¡÷ø#ù®„‹aMY9};‹sS£³‹È’µ™ü‰û2S¼÷ ‹Á‹¯‹¯Š¯‹©‹¿Š¨‰Ãû.ù‹÷øcÃÏ÷/÷b÷)øÖ÷xL‰‚Dy‡v‡O‹t‹û´ôŰܿÞÀ‹ï÷ ûÖû3û%V,Qºv©¯µ¤À»fŸˆŒ¯¢¹Œ—‹àÎU5>WKQXû‚ûgŠŠ‹wsøp 7Ÿ $ÐnÃendstream endobj 341 0 obj << /Filter /FlateDecode /Length 7210 >> stream xœÅ=ÙrÇuï,ÊyÈEJ˜L/ÓKª\‰w;‘U¶ ?Iy¸@€K€,êës–^N÷ô\€å”8šÛÓËé³ox{2OêdÆÿÒ¿·/æ“ëo_(z{’þ¹¸=ùÕù‹ÿÒÏ'qŠN»“ó—/ø u¢â<xå?E³œœß¾øj÷Ë›S˜3*e—ÝÕéÙ<™­5»ïñu.ÚÝþî_ë9ªÝ%qólâî¼Õ!*çw‡ûÓ3cÌdg³ûñ!q^ùÝþßÏ>èe÷ƒXçpÍ6ºÝá±®ôOºÎÓŒ66H¿ûèwů‚Wqw‹3ëàå?ÓjqËnOsé)¨Enïqx–w8ÐÛhgwwuìëºoñ­æûßóÿ¸Û àn´ž”·ûóË;szþ÷gV›“3c§àgƒ¯¿Ú}wº,0¬¼¿©{{3‡yVJÌ(sp.C1Îé²ì<;ðV¬ÒÓãn½]JÛ†×  pO°Ó0%žÑ*3á4<¡U6î^0Ò Áøhà+j`O—åw¹öÏêîÆ@âtÔVÎ{¸ã‰âF‹`ø.YìgªÝÃZÚOpƒ{ /W80$bXvËnaˆWa†ÕÞÔ] Ìzφٮ¦³Ó<7{Hc¡uzÖx Ø&l(lÌ&¦ˆŠ8íÔ¬¢ÞM=>-¾Á'5YŸ¾XoOØÛvÚçÁ ·œ¶t¥x{*Ä ¯D°5WP‹¼­ zøAØi<@€IÇj¸Amüî%ÃƨÝAÀÌ8#n¯99 ›f·œœ)5ÅÅz>c [ÂçÅñM'tWšØÏb²‚p:$ô„ ó“8¼D&á“· ÷³È«jè¦Nð|Q‚\î&"+² 'ày7DI±Ño%Úó.£Ä¬Wu‚+Þ:_uG$¢ƒ½¤Gë-^~¼BáA‘DŒRŽ®W ÊÛpj™aïÅ|ÌM€Ÿ7.%È-Ï¡¨%<×½¯ø"Î*€qÍSÌs›0ß°‹Õ µó†ã†ˆN°Ø·ËĀ׎ó:•ÀB2ÜVÊ_­ÖjÄݺÔ5kçA¢nÐ@Ò€t”S Äë=àHsq!¯*‹‹ï$¹\3lïZ«¢ƒO’jb¾N«¥·Q|&Å´—ZoÞ&¡!þ)Ý"C($É]¿J7Ç|U;kvrÈÅ)¡Ñ»WõÞ>ÊxÓÁM‹ÓÕýÔÆ›õÀ=”¤0©‘`plìØ2[+SÓdv÷¦ ú ”m€¸¬±â²¹Îcdöâ†iÇjQÞËþ°´aVþx‹"É× ¾îgi\×M#„‡Nè9÷'A 0v‹Œí2Áæ.yåm­ Yõx’Fk¹6>.6%œ§{dQcæ©ZnÒu[cÿžT™®y4·eÃJI½«jtƒòyêÞ¢£y¦C8|G/‚H’÷8oïdy·â½";S8[@ƒu–ܤòM'Ô>L€ÁQ ¾¤Œ;âôî»j`n©?½ ·nm˜ÉPtÉÂM°dʨsQñâŠw†òDÀE¤÷è¢cGŒˆªA‚j_ñÔ@\ ˜‹¨Vÿ½0èVÐUD£™Á`Œ=9äéöiiØÑót2:Šj¨$½5Ñoʼ €V½-ó¾ezclŠŒz’Á §À†³ï2ŸÉH!qS©ä €ŸwŒWÜ™; .ÃȋږîR‡K'j]wUšì[}N’ù&ж—Â"û,æeÓ²~‘ñì!&ÿ–ù¥—\ˆ­} *PZâ¯Ü&$(ª"-!‰U–2³ö®î&$£­Ú?ø%CãçµÕ4d¦Àš“‹Š['}yöÞ®=Ú}çÂ&§Œ2'矿8ÿ7vË€jFª…„å÷U:7·ù†<Î6ßÁ Ïä9&A§ =Ù#uK†å»½<é=†ý7жˆ’t.•¼ÌP$&¶n¥_Ä, †|ãÂ-×C„ȱJ ¹e«Àø!ÿM¿Zlr“^ŠÖÙšº,ÛÞ¨`=‰§“®.8g5=¢f§‡ð‰x¡Dg¤€D\Èç8ŒtB•¯„¾!ô”ÖUQ½‰Ô3¡³]éWP=QÑME‰BÂH†·í>HOÌÖ ª#wl'5ðËìá|~ës‘x¹—†/4ò»)¯&§cÇÓÑUØ­$')h™X J´úœ!ݹc£ecx­¡²§ Tâè]¯¤ãä`W¤gôKÚm]„Fcß’r„8åÏá8Èxõm›>ŽÈ•ä€éÈÆuìkm61XÚç5¯2?Ó É;>.¢ UªH›ùiƒ÷„ó…¹óÏ“®0£R-ÕʶÔ^¾å0Ì¥ò,ov/µñ ¦÷ì×óoÅ]ú¨R"+áS’Æ@Š `†•>Z«Qÿí^¨ÔŒ<äoh=ßH遲fÀÞ1@éC±w4Ë´™õN¸xî}à®ôQþ.C]6Á—Ûú×–Ã[E&è¶:#_‘½:C³˜1Úƒ`þÂæetÍ^\œ †7{Sµ‹ƒ`½âmlߺØÓ‡ŒtÒ"ºƒ~b’ƒÔs´B,…9Ä~‘„2f©Û ¸Ÿ%nx»äÊ—y¬î•g~½áÓpÝH¢hø#ÅA† µÊÞ¿ÞÕçŸG¼®|1|3|F¸bQ§3èărÀèÝpF±ÎxÀwChì‡o_×ÇÃp† À—ÎBßÕÇ«úxQêã¡>¾N 'è, øõî_@Ô(Ò•œá•\­ øè=”.†î†3ìëãëáØñÛë8ØÎM}|¬÷Ìm@“1S¹ þîrÌ*´…<$«¶ÑOÆø„lAgØpãc_ /ï¾>þbx¥·CÀ= '€û~ ?´Cê„Ïç“'MöÑ0#Ã=Q½äa=~N5ô§&D ª³„ò3vXÆŠù_Ö·b[ûá_}¢+#`ÄGØ~ûÔ…Ü ÜÈC<ü—Ã=Þ=u¯‡Wr·u%jµå}¶2Ã'Þz™w|Wâ‚.Ëýc8혬ÇÌõ^>–-ü¢>þT„òõisCy9qC}¸Ÿ†}Ýg¸íi—n(Óîœ zû¶>>Öǽ¤…(íýFÂ}°î?ÃþthÚ=¼jÛÑbÎ-¼L ª2¼Ë¾«]ñu‰0nð®õ¢g$ØÉ+»ŽkÁ{Œ'my|„E{-?õrÉ3íÈ1 S‡ø©%a¡Y"tůџ3Î>¢RvÙý9 Ø“qä—e3Ûo=G“Ïpà—‘Ò@ªƒ…оËf †wɧ3uéÒ¡“ÎÜ…ç4e³4»¸*Þ”PO< ;bÇ7C¢M±Ëáð@ y„ôÕª©Cž“ßZE7[—’5éÖ…—‡&vì'/îžTÊÁûmSO%úïïr@Öwá&JÿlC<îÖGa{r:žyY²G=õ9n‘3ß7ý“ÇÒÔf¶ä ÝayXÇ7z|«‹ùð&Á¦C7á—ù°MqGrY)0²‹/›ïC3‡2q²XoC·» 5ªzv¦Ög¼Ðð¿(³äÁžcœ­‘Å2 W€\U7‚žCºf1ª8¿g—“Yáþ5úy¶#7¸òû*å/rø@‘×ç°V¬h)ÒXÜ û7O¹œio1¹9ÇåtëoªœÒØÖÉ8H|®™hMÀ(Ó¾ Çññ) §cF¸ÆUŽ„ÛÒãÜJnî—m<¾ƒfœÀh Å!¬¦Jh^<ϸ…4íÎ;Ì»óMºP‰´b`ø9™B œûº ‰£9#jÀ±o³ò•µ‘+xmàc-#Ì{NnT ɺä½Âøò|ÁtŒ§‹!6ÁÝ1n2¥nøË0û{¨‰h¼"r)V‹RÇ÷ü3–]Ëhû7õVÞteðÿ>WF”˜Š¬J™¼çI)§¾M´hâCXkÖ:¼v³J——Ó|'8’È„l¨M Þ‚;ß×¶˜KÚÜV6BSæÙ0™Rw)Ž‘rJ®òz®)1],ˆ@Wʨú¸à^Ü@¬´L~ДLAfUI1hbW¸òÜäoj w¥°§ª4·Äºpú&§[ÄpzÔµHA {ËÑÁB9ÁÔËšNRÐ,—_ ®C·”0&™!­¢[…•pqļcúTÙ“Ô§* z+Ùþ¸ë6–”BŠJûu öóêÏôbH±2ýZ–|ºÊwmÊOI%šsÁ49ŒãHâ1ÊLÔýºP”ø'„[*¤ m[¹|ÏoKÒÓS1J>t_aw*’uµö¹aˆUÜðwaÞÈ ¤ [JêÚàþ"Ûa›“äñéÁBj“ŽK„ZEª®\-j'[‹;%o²çD"+yĉ2ÌølÀh)¢Ú ƒt_+‰I ö8õù1…Wõü²§B[W3˜®ÛQ“‡™ÁÒR×f ™1Ëô¬üùBò¤`‘0½Š°VðG‰ØvRµ4ñ©2OÀNûÁež9‰Ž‹„thë¾±9™–r2?Tž4BÃ³ÌØœHñŽUŒ·vJ›Æû&õÏJc\&øVUã¶å…9?«— $vZ}O¦Ž3.¶fdùìPÁ*ÈMжdwaS‰ƒ0ºô ¼Ñy‘¾‘i(uÀŒ7‹àM\%Sµ5³ûÔ‰O¦ìQ6±m’&Äá·l+›RøÑõYfƒUX•kµÞ¢µôø ^3ïÎvÿÚšº_a±Ä+¬à÷5÷9e=mnYcBu¶³Héd:»µål„åü›bÛ6Ö/B4*«£|½eþjŸÍßsöËD¯uPÎè<{uÇüäŸ&qmǨ>ôÆZR9~¬¡B”ÂÍlµ¼¸PD5Ó=¯&™°Õ\{1«M'Ò±/xzJ÷ Ï©,%[Oœã†?tå›9µ- WWZ®ok¼°É}º>ŽÈÞNB:¦Ä6·“Üø÷Û¤…iÖƒ„¥ñŒô\ÉTéj£dó’¿ ¢°«¥—{Õ)WÍ´OÐç_vf§m£ªæ5ãÄŸjÏeSijËDêkáŽÔÖ·l¡uHR=°Y—;)Oûòt;R ô2y_heRQP&é@î™áù7z4P_˜×…¾mPÑè|uºDyÚk—Áeú¶ÒPgx6å ­{ªÌü¶Òª¤Ÿ,—ªDÕ¡½ÞœPô=/GÞ»—騪ƒ|p+²tµøMÔÒû$.ø¨œN՗γVí¥Oªé(±¢„r£Ÿ(®•xšÃÖj³ùW©¾m;Ϊ ©=¨Ž8ö]µ·9‹¢¢¶àØ¡oøS'Ÿ€1‚Í:ek±ÿĦÝðFt€(.í ¤ÎðZïì’w†Düü[PçóÏòLQýUñƒ}Æ]-ü‘JcÁT•ÁB¬–uA3´[Ñ™fljûP«€·¹wl;ó%š|rÜ`Q0¨‡ M|S5p¡Ã•­=ÌAÇ<—E\M 1}en -tJº9³ÎL:ß3ÝÒMËT™ÖÈž jlº$Ö‹Ïÿþ±j±ÕØq¡=7¥Ž¥M„ ×ÉÂ6¾û}Bu×êÁw|kÈŽÚ¶HñTY“ÌSÞÉvO¬=w–!OзÃÖXyh]Õ˜ë+O&<©5“½ç)|ìKâÛ%ÆMãÇ*Æ¿ŠÕ(ÃH*þ]QaZÓ«(´R> éµÑBçPú«ÊÖ›0>þ6Û/\tŒ6õ×›÷UB{Ó¡’€ò wÙ¡ÍÌ_RĘ@ŸKDÌ z\m5òªj´Yìû¦uÃñrÍ—ƒ^§®ÉN‡4‡¥Q?„kFêÖµJ9’Ÿz”KuEX¤÷­Õd÷GÁ1z£1|»aÏA4)|N‚ʨjÄ™œ_ñ8“!¨,?c'êqîÛj¬®åéö”q5¥ZÝ„/ƒzn\`¦æ¨ÆW#81q„Ùݨºß”¬Z_‘¥ÓØy”I› +p›OÏÖ…-<¥ÆÄ!"¬2<#y3e“­q/,Tºa¨‚%µ6Khqؽðê°S‚¼3‰ŸèñÀ-Eu”æJÖ%g]ÿ·E|¯t¦¾Þ¶å„Ç:#øƒk²9z¶—½.m‹FÆÈõNÏ´÷–$½ «ÀØÌO¦.muÕaÑoå<§\ñÆ æT­ ™túÞ`W= »Ö–µk‡éƒx7ŽO)¡n6r½ =Œ=¸Î&Å /À}ÅMEƒýÁO¶“ð5qGÂY4Nß~Ç›C‹ôÏO0ŽÆWìpAðŸqßj®x´Ó¾zýcËÿ¯!ƒy‘hVÌÏ¡ÂÆñkžÁaÕU„øýFo“<…B]sþ1>Xüà ƵWE)!‡h>sÏoK»Ñd}Ϲ7qçGűKp(OÛÞÔ>’˱ Øï°‘üZÐæ9¶ è:Ó~¦–èX+ŸBcR–q 0 F´…‰z}Åó™àV¹Œ˜Ž…ÎÀÁªæFçܯ `2ÂïÓä«V_}^V&(e•ô‹²³,^›žááÜKÐͳÔÞD[“>ª!T¢Òé#\‚zèÐB,ró¢C½³¿_Å÷-M»\J”^ßC›‡wT íßÚHÁ‰…(˜@¾vMì>U.ßQ^}i›*D†AæoQgÇ9T[š¨xŸ"¼Ñ6ò@àŽš@Ü)µëGÅþ»q¬@(qmZêÙ(NqÏs›¸¬?r7’OìÖ+Šôù§ƒ63ãÕD4WœýFx›â¡´\ø·Ê­F,YV™ì‘ûuy´¹¥iì()R68¯þký°¢>õòZªñ¦.?Ž ^äÃømO¡G\r¼ÁÀnë_ˆÙè›$X÷ÐÝ-€ñ²£áœÜ~ËCÐ1óƒLU,ßd” - qöG¤M1¦R’”[iëB9j;‡–â‘ ¸ñ–Âëgû˜·©<:Wƒ ô¶vRÌÒE6ëdªuÓì>>)ÂRÞ2mó6¤ú!\e]­ŠÐ³°p@émœ +'ø«äÄAÆÍý6ÝÞRhÿm5RÛ«Ö`R!Mm£áÎ`ÄÝS-…PHå"°a¬‚›ŸšÉÖz©Çú»tH´µ30bå†PY‰q?…ˆa—[ïòõ;1Ágj©C&“pz‚èõÙ†öVõQ,¤G‚îØ@.†ë{–rÕ›HûJž²¤DÇ ar}Zº¡ŠÆ¨’¨šq²2CjÛ´MóúÇ´æ²Ã\nèÉÕEÆí*ߟ¦¿1çWÆ•6ÚݵÉKÆË!"e(’Þü¡»Ïqn)Èö½÷\Ò™:x&…s©¸½ÿ¬Ÿ0‘¿?+!(¡`}¸=­BIÌ~3h³? zæô¨EÿõMý8tA¼4ÙóB-"÷ü8¢gY»ÈŽ$ˆÒ›©¡èî·eº) |¬®Ã?ú.¨­²;Ù$^0×^FàÚ ã›\W¡Ý^¥ƒÏËV¦Vbqù sWv‡1éUS_ü’,×™ªÏ%Ò6çÎj E¶¥fÇÿCÉ,½ó©Ê4JÒ–óI;ÖmízÝ>À º^¥It bjŒÉm>—ÚƒöNÈghùCS÷>75ucÞ¹YÕVÙy5§¯RGVþ3 ÜK4lwîÎÝTÿök¡-ü‘߃vµûžØ=m‚FžbhUŠy?oÊAùtºEÉž#T„›°ö½€K%³{Ôo{—}i-[•3iÓ&”ÕO«Ù¡\}\Çióx‹SVëÃdkèùÉ¿‹™µo¼î÷~-D™ìÔ»QÉNc!—Õ_^ü§¸WJendstream endobj 342 0 obj << /Filter /FlateDecode /Length 5505 >> stream xœÍ\IsÉu¶oØÿÀغìérîËXvÄLX–B’ÒG:4±‘ÍÁ2$ä?àŸ­÷r}™•ÕhCÁ ÕU¹¼õ{KÖÇlæÇ ÿ¥ÿÏޱ㫣Žx¸{œþ;{üíéÑ¿|§Ü±Ÿ½æøôò(¾Á=;¶ÚÎ^êãÓ÷G“:9ý>ªè£ÌÎ^fǧçG¯¦ïO„ãrº8a³äÒX9ÝžlØìà¦´Ó ÜVÜho§k¸äNZ/§->ÁSðô¼6ŒIÿûÓ_Ât–‘餞־…ò7|à"þK‚ßQ"€»i| „¨\Yn5<èj«!DXèí»<èûeÞ„mu°œÏÑ X)—i%÷Š v$§„ß,.°Ì­”ÎgF)§¯F¹ˆÛ'jó—nA†¶Äø[vE4v>Ù(°¯0øtÜŸ7L D”~>k­¨ôÃÈ ¤÷œr¶¬n[¥à¬î‰fði×í]•êêw2`Á­n'ë‰"8,뎈x´¥Žk›Ô=ŒŸ4ÔƒÇOŒÛ-²º‡ÛgõöEÏÝ•„¿T”p]†«Æ‡TåÙíÙUí¡ãE—£„1uh·0¡p[{CȲ#lª%'ªp€",£<‘– ÍŠÓ'Z<·®5H‘:>ýõÑé?½Êâá­QQ<°ÃÒÝEwÌ1‚ÏVÈ,Aw'¤Ã¤`¾zW®nËÕ¶\]—«»“»W-…Ïã7bYœ4ˆ¥ÒŒG«bQV¬VAi•[ ®ÚpÀá.¨!çÈB1ÓX—¨\XÑÞ?ƒ0o ‚{Lóq½j~ò#¢r l’&ïúý€ZoÊÕE¹º)Wgåêm¹š ¥_˜KУ+f¤º‚Z¢€+­Dcp ²$ f$jÑâMµ‡¨ÑJ§5ÒÂòY”™¾Œ«S ‚½³kIšâ ÍM›£ Ч:è¥ä‚q/±@µ(ëèž®~nlàå[[X°)¨)>ö³Ó£ßÅ ü7ø„ãÛçF_…¥2aöjú5µiÑ K ˜ŽØ’†ÒÃ5˜ÂÞÐ¥ÛÄÔ~RŒü®²€À‡ à)BÙžW˜³{H³L~4‘7¶.ÒÏA””'k]ÀÒ°ÇùÂvz†ÌÛFúá¢V`õ}‚ Ø@Ý8Áfd¸·ñaëÎâ6Œ½Ž'prf›ªAÙ¤’IqdEfɉð 3„û‘6Úèæ..¸Â„ˆ¨Ö¡xDŽH=pâ<è!¸û ­.1lˆsš€ò!qcpMEyTd;…ϸ’c)[A)9 ¦Ÿ‡{Öã[Š‹bØh|œd$8Û› €8 Ø;"llE$NH4šBžóÆ "A„ˆ9'D$x†ØÌ·y‘†Ž›¼³æÖ©JC|¨æPå8qö°"o°—q`c²ýq]mÔŒYÑI~t6S…kc…—6¬s|cbǧÀ€"Üx§‘¼€Â¯zíjFÛ«1—·62—òØÌÞJG‡–ˆ1€ÁwXZ:+èϵ`bvÓ¼Ò¥ÿ´>zÄDÇ+â‰:š&/’ß¹¸¬=µ\Äo=œ”¬t˜8¬EÌ((Ý~¨˜óÉ@ Ãê«?3F(ÇÏ&åFÿæd£¹À—¦¯««pAÀÈFÕüŸž ÚSaP3Lžþ!&í[qÑ3È”‡ûE…{CUOÁD~,*,˜5°Tƒ˜ÉIÃhšÒj£ xÖÌ@RñÐïÆ» ¬ó÷C}!Þu5qŽcYß;ASK»ZâCr|$=DÔþ®º¸åÇ: 5 U›‡îŒœmYè"žB²"X³™ÄŒ2( ³Ëø%T.­³AþW•²•ÞñEΑ=}?Ô«uÚ§HÎtsY ¥·HÛ› kܺØRØGTs‘`õaE—`Í96D¦W»°.öZ6! ñ/•~ïS½90NµI´!V£~ú‡êÑz¢„Z6IŽIÀ¥Õs¡‡f©ÜíââÀ#õÖ1a®–‰y8š)¾¬>-x)æb蓳.‘µÆÆÜRćDǯ§kkH1­Dt†µ(ÔˆyWR|ºv¶*†0ŒÏÃ,QÍ °ÕÍ$CéêQIJ@1WíŒò–¶ y?)â ›ªaM´îSÀñ=«LŸà9dÊ‚Î6ée`x1¬èV¼Á·”•û ºÍt®­è› [’jå KnζNÌ+·§¤Ûbs 6¨z1°ÿU'§?tAP¬H­äÂEu H‘º`멽¸¦…R\ ¸6³Ù¦¾Ì"Þò‰$8¢®lÆKnúÈOU¼ˆ“!âOÔñ¼úÂU}E²x'?+­j¾2]ðÍeÙÔÃÞêý Iº#oŒ’*X¥.pé-Æ–(kÒ!ÅD üSÀMÐ^[pE:ˆ³w´¦y‘Ɖeü½„d@Rvl̹‰jC8t±§B‘‹ UwåYHí«…\ß¶‰Ï×ìvrˆ0|#5"8ÑäVˆkßRpØ'ˆI UcÖ¯å,‰·e R¶)м(æšE%¬ÇÚtak5Â:´_÷ø»ô­Ûm r“Ú[£sÈ8óÒžx% €èㆴ½C§]Õ  -ƒÖëóLsÝç\µ ñZSž)1©C“ž:p_­‰1.¬÷“Ì!æèA9ÄsEAF5ew5Ò¡™‘°cïõZz´$bZOJ,B rãÄ¡¤Ü£k ShQl=ãÓEõ4¦³&úo‡nÀµP¯Þ¥µnDN‡°}K“¿é>û%-ê<>l˜é\p¼«Z?I˜˜×O‹ÀùgâϪl`ȦiœÁÉ´· ƒšó,OuÓ4Æ&¯¼ñ‡¥3>ZbQ²à¦Bå>Œ+í\æ9Öó)™:+Ñ& ’ð "ŽbWV3O8•cšwŒçˆC£éššƒ|?Ö±H ³ãÚÒQölKãvE¸(‰® D·hH|VE˜Š-ÙJ_œO/öyœLÁVó˾õ‘Ä×íJïucX÷eèŽÌô³“R>ùTÑŠHÆú"Ç-£™Šb<†dËzúáéÒ )ÿGs‘8.[kàv×X‹Õ˜šf´eMóZ]ÆO†Ý}çqB ƒ›>aðÍéõkdÅ4}^íáÙú8 nr"É«‡¨Ž<²áKu<¬‹¥mÚ 2Ÿ ¼í7M‡_䤗‡¤D×^Ô—·õò»òû·åê?ëÏŸ—–—_*CZü8x°¾ü¶>x7xðª\½'t*¯ÔN×ÇAëh÷›ÁsÛÁsÛÁăõÓ9‚ÕânVµÕ… Zqë=JøéÖoaªËª-À—• ?%»+7Çïä«óÏê¦^Oƒ¾áƒ™òoõò©í— O%Êþc›Ó“6§'}ŒCü<>Éñrõ0ØÜh›UnFª·sô2Ñ­o?f¾û ÓåÊD¤8„¾‚œN\Qè{¨*¾¼–ýEVü(™ƒôwá þß|C½ù‘0p` ¶®ŽlÁû‘í_ÎÃpe˜ÃŒ$p7æ®ZR ëðé`[x³wž* Äò㓽ê–ñø—±Œ'Äk7˜çÀ‹CAûòó‡ÁnêƒçƒYÎFÃŒºŒDübpElõ§ró«rõH?ØÑ øA/3>ö¬é½“€VÑÕÓ S?Æ«ÁB+¿QByµ (^³ËËÕWäÕ½®¸Î!ÒåWõW9*e´Îw=þœ pú½‰F}É$§WóçMXU%Ò²¾r4%Äxñ|‰%ÚâGxš+‘2Ê¡D6ìÊÛ.r-9UT2dEm„¸©‡,JSQ¡9ïKâ:²àqv“dé^¦/jYT£©†A$eÛžRxÌSg9ý_úDÛ, WµßdõÀq Lsâ|˜‡%â@“<#UØ`ËJÿLud#gZAÆõàj„©!õTëTX®DÁ_~“²HWîK}¨„ÿ+-¡Ú'½j$žˆójá/ð‰T—WÎA­•!GÍO!µ„2Ìt›½ S…âhùgH¹¶}mǼÀ6 ôpOZ~Ài ž,9ñaé*èÆ–³³zЋ?ý+€acg÷Tƒ>Çg0·Â´­ó‡4èc3°–üxÓ¼ƒßÂÖÁJ="ñÖv´íèvæÚÅ|ûÔþH‘ý³whžØ`³ÀÐíŒ@óλnm7¡V³ÒŽ0­è@Ê4}!˜w¼“‡çöcpªÔŸè‚›˜÷ºh}Ôœ– ®ãÖ©®bÊqñƒ,4Q_:ŸÛRUÚ¢úylÌ,ÁÎ)W>޶VŽ"Ç}Ð-"àr{ºÇ6ô$$<Œ­M¯&I”LÃE(®U³I•ª#{à‹óû>Õ€Üâ¹§b¶ÉÊáý¦‚c)ëW$ö rà?ÉIŸí]Ý1„EìÔÚ×Wèíå‡2 D:Ö¿"ê7@’iîú´»L>³³UªÞ}«<=Õ`ɸ”pŸõÜÓvò]ªùìb¦À\©”­"§[¶ÉÐ íPñe Ø—žI³È‘YҎי‰X±Ð¡ñ>iE?4=¬*R³ ©úÏýdñ|Dâa³˜æ€ÜJ&øsò±ÅÛàĤú³a€—ó‘ ´‡‚€â#Ë;ÀKô&m—ù¦|û–[ì±qÉÒÌB×æÐ¿êU£³r–Vò÷uöÁÈff‰ÎÀŽåÉ8±õƒ”ÍXÖïÄa‹ Wί<>G¹ŸCë(&½“·h;o±[Ö€­ânÔ4?CÄËYrŸ ­ôì${$4à‰p¼åK1ÝWGB½ÞÆžúø;¶6]-¿*Ö‡’M»Àu|ÃïšéèšR@MϬ÷.Ov^qqò"0 jŽ]„ÛÚ/_Úw9lÜ´¦×–¼ŠnÚϸ‘4D}ä1S¯"ÊÒv^Ò²±*xÜVÎÇö<9ZY6ü, AcÑÄJ†ŸÞÑ5Žèú¦S+NÕrÙXÂÔ‘§¿¯¤¡^ÊÍhÛœý 9”ñ©Å éÝ¡§öS2iOm‘†~ª®¤ÙÞuLËŸ\ î4ö ´I°èà·¹»NºðI–¢) wÎ=VPìóàŠp/ƒW¨ R2ù÷ØÑG$lùüÔȶdô ;¿ûxÇ‚}í6߀ >Ô¨~ah j¶éñùð‚ ´ÁÒ~•͉—†¤Ãâ áEbü³àEyg^õïë–ª¸cqø¬qœ ?ÄŠüý5L„INï÷§*~ˆvˆžG'-ž Ôº—/ïìÍUâq¾‡øç¼?2ròe9ÈUÌç•;·ÞÁ €l΀Ìc"TçoÂáóÞÚœJ€q°eíª‚è_Á•i¯›gÈõè{jx¬qCf[d°ÛŒ ãÅ(c£žÈØøÏÃ+éX6mò*M žI°%ð€R×ééy‡Ô9‰@‚·¿kSŸƒ&Ü.‚Ê‘MØŽSÔÛÀÂ×séôÃûÝÑŸTÐI˜endstream endobj 343 0 obj << /Filter /FlateDecode /Length 6287 >> stream xœí]IsÉu–¯ðÜ}ð¥}+XD;÷Ŷ£[¶Gc‡,Xáɇ&‚A4‡èøgû½|Y™/³²M´Æ¬¨®Êå­ß[²ðÃFlåFà¿üÿÅë±¹:ùáD¦»›üßÅëÍÏÏOþæß½ØÄmtÊmΟŸÐr#£Ø¸å­ßFm7ç¯O~;}{} cF).OÏÄVÇhŒž>âí\4Óîæo+åô qBè8½…»*Déü´¿==ÓZoÐÓwìEzÄyé§ÝÞ>(;ýͳ¿¢Ý´WgzBC†n…bkƒVÚÁÓï>úé×øVð2N¯qdÜÜãkJZg§]Kmƒ´|yï†{¹£gãôOût7áâtSŸ}Y—ó ïú`¼ÿ:ÿ » ŒîZ©­ôhþìd²§ç¿‡'¤öì‘3#ÂVX»9Óf¼ÐéÑÿ¦G›ÁÒ•”4Øo'¢¦‘&N/Êî/3à–òÀ ¥§{\aBJ5½9=SȧqÚá\È{7ʹéi~"dòeB½dƒ\yŒt| »D‚vºJ\¢ë›SP+]_á┺‘7¶Š¾¡òÀ5-Bjí‰ÖR5B®åV! }äVˆè@ËÕM¹zQ®ž•«}¹º;¥ œäœ’¸®ÔæLÉ­Ô6Ò<¯Š\ ÀnËz~7áƒQÈ ƒƒ˜{cç‡?&îD  Ÿþîô ¤Ô€ÝÌû×zã¶ÑëöoA¼Ý)A`i‘É'¢¬NäšjÞùÝ)È€°Ô8ýŒ.}0™×**“yMºð‚Vç@0ا[Æ· v½cLäÌ¥ÕMëêœÝjçfŠü¬Dá‚Ó”ë„ Û`­_ÍRÇ„|Ù­³¡½ÛŠàCœ×ùÓy{ †^M'cÖX"2©#XAdè`° XÀÂ$‹?°ùÏ”1[áB'¾Òd öÆ“‰ÆE&.h‘} À™ú‚ØæsƒpK·5I²¾nG°¹/‹ù­ú|ÍÞ»Ÿm²žÞ•Úƒ”Io ji2l\Ä®Ìâ”MJ hÛF~ ƒÙ9etØOÏë[`ìÓmKcN¦L€I\q(Ò³o®;´w÷ù—PO{æ_Õ¥ñío{ßÒyŽÙ!œÉd’þq<ÇÏ Dˆ®•Ä%”#ûœèˆªÙþ“C¡1ëÌû0XÀ¤n3s\Nûç³à5fègRöIzúfȤ۴L+¢&7¤·Qqº³™ë*™Ð}%—@³¬Ÿó´8 Ëru;¸ª¿^†srë•3`·þñhå#Ù^àÜ';±òNçĸÉ0AË À®(=ž…^‚å»\ô5S|½ÐRc؆Mƒ™ä—ÒtɼÈ­ÉDðü¸’j7ç¿<9ÿë™æ0†v4ƒÎûÎTÿ‹,r.ÄÁظŒ‹{ü©Lš Š(ÃDƒOÔZê­ƒmn@Í\¥ó§ š¿<ìïg¤üŠŒX3–3®ÄH¸cAx¥X¼ŸôlH`9ª3݇Í£âcš‘†[àv/D4ñÚxOüíÁ005Ö± àz”«kœÆ‚c˜¡xqï`ÇUÐ)|ÝÑe„`÷ŠÇÔí°Þùµ01¾„à9{þŠbߦý]‚noS–„;‘´ñ%1-Âõ¼í„]ìÛÀ3Žð¨VÝWTwÑ@ÿ&rCQƒÀÁN×õ‘ûºok–†/¸¬L­Åüé›útÝÿ¾òìyºi¼é n ‘`Gô{KcyØöß&Ф’ÁÌ"÷²ÊgŠT4 ñKÿLŒ5êø@ ê„q™j‰Ú@<«ï]™0™  #ª².‚ÈÎ#¿?µGöhå¯ë[»§„ÂKqês|47Å…y«gRn£µ’vÜ ÔŒƒ_Ÿ¢\;\ ™Æ´²1ÇY¨c Λ:ò;(Ћp1m˜C±Ð"ó²Ž5ÞR ó –ud Å58GªV)±A†\'q¦åÊx7IŠTan®J{–¤|ºèay\}n†8S[?…Wœñ—;baP°ÑïM" `}š¨',°õc7%QMPMý´Ì¶ë9Tí%šCc6 /¸øÌ<"F®àéëAñÍ «¿^ µ£Ã3p‹aÂ×J2þ8ã³9ÉØÖj¦ä ÌŽ~…@B¨}pÅ3¡Òã‹ÃYÑÖ=<9Ì¡qxÂÞY…·,K™GªB™Ç’ûõ‹Ì£5h?³L{ùy™G‡×½%p©Ê‘ò€ð;ÊÕ˜ ?ñšÝŽÚ©%㯆ŒÅVéæò$7Ê,΋mrýݶ1¬ìû!$WÑ y+ë‚ÏHŽ ;,{‘Üu¢D<.K iÙXíΚJg¸`Lå5@nfr#§K†R X©.ÖAº ¦ÞÒ¨ž¸UÃÀpÕž=Ír7ìÅŒä¬_GrÅóÈÔÙÚ%GKÊ®MŽòü!E謨Éõyç21†…Li|2ïâR›ôhöu–4I€¸GïXVª/„gçò~Kqg% con×rrÅû åÀ’,ê}O ô,Ùm‡Øˆª=Wú²EðÀÖñ &Ù i¥÷kš2ì6àÊäÆCT©87O¸ãýê]5¦‹Ä[58®,„i!X'Ø*~ë Îi½¢$mJ+Š£ÃlºI‰ŸÎ6äôeåseIêA‹V¦¦œ‡âù6O±ÔêòlzÀ(y /™9¹æ‰ôB oSÞ3i‘ÉÛ\3@âU áRº#p˜Ñ©’ÎPÊiÙ%Z@bƒ…Ê!¥ [s>{®‚ÖYˆ”°d¨1 瀖ʘ@`«váãh— ,B_M¹èµ0+ ótTc€0h˜NåxÅ)´oœæHÌË–§y×™_ÚÎq\ØÈ²±:ÑÐ6És–wd†c|òj§íA¸Wò¿,°»¢E`À³£Kˆby¹ƒ¦X²xÉd—‰ Æh ²¬ñµSÆâ.¯uÂe9L©¸N•ÝùZ¡ËÁ` e…„‡ªóVK ¬;à´„èt2C(k€ŠWL2¸›k[Ü«´çÞÒxhBûöôš1®¨>àq¯Êçº4ÀĸíM‘'w kh9fˆ›x„Ñ·)úþg|ô_(E•Èä ÚLD.(¥êèe&BÐsdˆpIoâùÆtAR5`µ)Ák­´V—¶a(Îä2ïi–¦Ü»áŸo · Æš«ÉœÇµu•ûmÛÑ2·ÂkcgUÀßm¦šÔMÈ5ßaîëë<+ rOê òáž[%Z$ºŒJ^X&Ÿ¿z>¥Þ&•KÑ?Ž{³}‚"QŠ»³,ŸJ7ÙÛ<{LÏH•lß'¾"ëÜpÿ¡‘Ý&‡ :F–ZŒšG`¡ ©’3ž‡9Bƒ;•¾÷6i` ™¢ÎËÕø'=l’M®É 0ÛP‘uÚ•ÖÞjØX±Q[Ipt¦®r—¦¶cQ-ü-‹QPÏ–No ¶Üb«ˆÌ÷Ü?½MPD;ßàÂ=ëXˤäR!Rn듞țo •³p£'Ú±lš9¥SJ2‰ŠZ39(¿¯káž(_Óq›¢rLÖWuÆ/‰ßŠv¡R&E1mº§Ð˜%‚sº‡ÿz|ÞJ°ÏˆÃ’ ª€ÍJHeä)WJ'n ÞÑ !(®Ö;ž‰d‡ã#½ˆGi9ßv´% (;NϦ¦¼T áPmÇuÞŸ~“(L6I¼öPpòz ?$Lƒ‹ñ=N(‚÷ª¤„µj+Öø%êo;ÎåÔ Ïv¢»Á7]TMmt­ ŸÕä·+H»£)1Æí2ûY&ŽÉì/•£ƒ°QÄÉ·ÝEÃÚnº™¤—Q—íë|ûÁ#Ì7D ìoèÊ›™œ+*Ûc+iºôuÞ`¿Pq»2GÉ·'Kyµ ¬ueK¯;š%¶ŸðÙW‘:²k†–ÖJùÒ¨£”ÊSçºã™¶¬Õ8¸`¬âýft;•+³¡‹TÏKcé/éŒ)! 2Ga£ öêHŸN§ýãiÐX÷Èç–`6GuImŒa>´U^ }y5?ûM…Fãˆæ`ãjs×Ã!ÑÉ-üý’<€)9ž±åØ!9¥Bku[$œ—û:빉®&¤ Žã/ñÄF Ìr ]`×k¢¾Š صGiš²‡õ³-ÕO¿™òé ‡¤¬ØJÍ…¥w/2­¢ëª=mjmNË’»âGf µ–0(*Ñùj ¡fá9)$8˰@«Äòé¤!Üþ¼£-cª³ð`|ˆ‰ËT‡tè¸g\5ZóqÐöøEñc_Ö·-6¥$¤Íg(a°™É´”’Êõ¤Y‘Þ¥y—ÊSë*šŽ6úÕã®I×"¶º`µzà76ø@©%;™‹4µ–³æ e5¾àg¶¸»zA¯møqbFÅ.I‡Ú…Ž»Ñ¡Á³™­J=§€X<)WoGG@²/E2Mcj¼ç Ôä?RèžÛбuw½×“2’Kv~Jt”¿ü·–„þÄþhJëö)ªA„g¼ÞUï5vo%jÚ¦5r.>DÔñrfÆ™¢ifü13ð’±ry6tôݾï§Dë§kë(/ËÕÇ‘`¦‹= Éꜥ[;Z¾–Å'°ŠÎS¬·úŽž@¤×ö@ˆ˜Ž.[ÎS[ûâ0:…pìat:_þ8ýQ󈾌O-½MIÞbˆj™¹>²YlìGGpŒÛÖãwÿkÇ;Áú÷)ì{¬ ÿŠGÜúe´'Ü~;2}jÖƒG}ë×µè°íô{JCßF_‚Ã8]ǰi^_ nIƒžI÷øú3þΡc}«›Îâ%c»Ñ¬›>Œ_·‡šah•9øû™¶+(ñëæ í”©€N_Ëÿ€‘Dà'OÃú÷;ŽŠt¼U4Fwrq—]žµ-.“tó›oO­Ä£±Q…˜[u?ô¶µU '_½ægg&çÃ~ö.Ú pØhšf|öWƲÜ\17Æ?îÖ5ÿð*ì€*0Õøáo RuQT‹Ýs`uÿ¾`‹³zóÕ¶|MP3_æÍ—_²„'u'lìÓvJdñå{»¼WhÏòZaSR n¥|Æ>Š~I¥N€#ÝB\ÙAþÑÑtlaqŸ`QýþäOý@Àÿqs|Ä4G-Ee”›·Ó2ëìºA8½¢þPàÁÕÓ¯M¸ÁºÄ+´cÐï¦&¯˜§Yóí_—Ðøç%Jõâ{=èÒ–}-h½z³°ØØ>ÅŸL¶™äáŸÕ1¶=Ä?Ùvü¸ÁØöòúͶ/¾â€¦>Ó(óß[2Ý7±Ó7 W‰ò€õ/¯²þ݇I@ż(uü¢Ž÷åKwß°H_ÿÕÉÿ9 4endstream endobj 344 0 obj << /Filter /FlateDecode /Length 8250 >> stream xœ½]K“$·q–oŠÃ7‡í[‡/î±5E¼ªPAGH²eÉ–B|¬Ž }è} ©^îp—^üÛ™@™( §‡K)ö°5(<ùü2úf§&½Sø/ÿÿäå…Ú]_|s¡éé.ÿ÷äåî.>üÌ«]œâb–Ý£çé ½ÓQMùÙOÑλG//¾ØÿüæúŒZ»yÿìòJM6FçìþñqKtûÃí%>6*êýSl²(eãþ5<5!êÅïw—WÖÚÉ)»ÿOöbj²xí÷‡oñ¹òÁÌû?±qŽ×iÁÅe|SGúiê243TÓ¬± Lþî£ßŽo¯ãþ%ölb€‡G|Íèy™÷êËLAÏ|zoºkù6µû_éitj‰ûÛÚöë:øÔÇýýÏ£ÿº»Àèn™´w@ûGO/öþòѱ…Ûi7Y·lre˜0-)ÎÚÒò­2èùšfá`FenÞja‚_”ÞßäE«°’¥ö³7 .µ<þŠh½Ê]´&ì¯l7v}—w-êÐY앞Íd­ß]à²ÙÆ´’OÒÖ˜æk"–RfÖ0„Y¦Ìþ-mÇb€ˆ®ÁúhaØuC¿Æ—‚RZ›}bC–e ksjRÈX¸Ì0EØò;ÎW7µ;Ñ3´ŽÓçýcl=ÃrC–ÃL ¾M-"tq|^çóÝ%ò–UðJIr¾H¼Ýì Ò5¿ç -L'(—¶ ØÁéuŒD7¶“lEÔ& ´zÇHøuåÄÛ²:Æg¬‡ce¹Ãädà…%ˆ1­õÁgE[ðVî³S:D‡Ë½Z×{¥õçY§e‡­4ŒbLž]Dr¥‰8í"nŽu3Îɧ=é¹Km2°%k6ý—ó £<\'.³ £êtT§o.‚ñ,u j{LM€?Ù$$SSÛ(|JCÉ›ãmX`–vœƒmX8“q³”´ (À‘šar•¡I¡äniŒRÑy®öŸvå1¯Ís•p࿎tbyÚ9#ÄŸ©|¬¢ætà„º©KºÊBã&½ÄUh_"‹H»Õ÷Ø O+ûÀ ÷:+³@ãVÌèV5ÁlM@ „ÜÖ÷’Ù˜=3>NH©uÒŸ4¨ËäÌì‹Z˜Ò€aÿ”4ÍÛ´{sLÖ-‚ibF§8ËVô"þÉfÑs>u¶PÍqmk]öwDð"’—@,²ö‚Ÿ‹r§$Z8 &z¯ˆˆ‹²¦Ùxlë5êB`_p˜lé@ëÄé¹_æ^±äf×ß·Ègð;ÔÉk¹ÁŒÍÖá^à`ÉQα0øŸÃ4ebþ ¦‰–@¦«&(jîk6KæcW'”9D”HBô̧Žñ)LzRÁ­|ÚØ÷,ÕDĤZª€§ñ´EK‡mÌÙ˜¸ú*UˆGsša6L.€Xäírßù&ﺶ'v=-ðeÝ3&/{æÂ××2é¡»¤³Š(Ä_-+-j£ ÝÀªyŒiì TV[•õ#ó  ¾_‡¤”ó–»z=ò-È+õžd_–@êÛ¡‚$H{…Om˜S ‘Å1±4LT­<]ûÚ©6°ñ J ‡ÑyF?Xo²©;?Öpëì¾·gÉ9g’*\¿îþ“¾Eb¡ÕÓê¼5‚¾.CúÇëòG½H3°…‡˜ìõ_–äÉø²Q3>Û”¡ü$|ù—  I|xÕ*MðÓœ_¤ÉáO qÊ%›ÖC§²òSùò2õ5÷_yhÀ 6ݲ¿>$GÍ"d8¶0Ðcùê”ðþ˜äÒjr†[uI¶Kjåñ¡Ôó¤l³ÏëàC¯>/åƒûô,[Eµù7×¹Èv±! tQ64‹køØÖÜÞ×Ý5ñÕöiksŒq朣ÐÎG¹ ãM쬽€²†F‰„uÑB,™ó"@f9æÆÜ)VôÁá㺗`˺žÚ|€U½è žßU?€ÏÇ4Ð †lÐ糉¤Q±@…¡¯lGGXû›²_ÂG‹\eèníN·°PºEcÒ—a>¨£<©’z`S"‚58#rá5¨ìP<£SÀcÎÈ׌‘ùóvòã46¦¦øžsåQF9²ÐßæêrwI½k-|”7äUɈš Ǻc×_%GÁ›5 "z¡À€­~·¦Ñü9Î×þ0NááȬ”¡Œ¾€\å ‰¼Òë#WµLYwÕ!•îRR41 ler–\ç‘SD¸f:ûPÔ³Ü-Læóúô¿K}MQdÜ.u·ï§«ú!ÉáõÚ™I)ª4KžÞvÙÿ<‰¤ÉÏ¡" È[-Å šZÏóJH¡ñ™ê{Ã8»ÍÚÀŒ™Wµ¿Ãöœ%Õ‹°sòâ-3”8º´à-1ËB%`Çó‡f^×ÛøKMïàûÒ”(‡©RßíøVxÝãô÷W¢wÆY¥±¬/"js[mí\o!ëH!íwÈ ¾zå_ò+¾xý_÷fÞ]‰7~Ì,A¢QÎB¤êÙ4°•`H½ÀëSÄQ ci…aT¿2 †›ó&ÂnæM¤ÈøŠÜeB–‚‘FªúÎ !{Q5_W3/u"¾AôßöbÚþûêVôÈ^³½ØTUæ#ðp= Í==cü½¶ýUÅC;ûáë”± BöHª±+ðÞ(cÆÙW%ÌÃa:ªêâë[†gá+P;€ÚAmKµ¹- 0gtqè—†:ÔF‡°Ø%é¸a­óluˆOs¾HÂOj>Ò!†ÄÞ­̪ٙ*£å•§7þ¡Ú©Mß*Üõ‡N>+ÀòÆÏ0ÂñTÒ{šÍÀHhS]q ø)4*:JëMh4ÛIa‚ œIïÿ¸Ô ï:ÎYڼö G¢p¾¥vC•€åÅÜ{K‚’îÉg#vu[ÊÀ; ½€¢©Þ‡^L…›kÈ™k €Ñ¤ SײÊ|0ŒÄDéM§bÓ©º¸ïóðÑAÂÆˆDe8<ØA5Ò1ehÁášÇíÂTDÞª ¾ö€d5šmm׬ˆ+®ÐIiN5ZÅjNÖÔ7î;ÎÒC¨Å&|Í<¶·ÝÉß& b•¯,«¤š"Ö\LX ®ë:6Û±w _Ô8s•jM× 3« Kõ†\²mÂ;Ï'¯téóÖিÀ‰­— bä!~j‘ÙHHšŽT¯;(bÈ÷W©ñEyð1=A 0³“?6Ò’F¦H~¾ÚK=a†x(U4ÃMP§z¨îË“V´ Ú^&> \4xXDñå—Ô8ÃÉ<à-}üCzËî-KF„žäɵÉ;±m45/ˆoЫTÊ5Ç{¨(µ¦*›Óàx4Ê_1haMÒÛ°f®i{ó6Ps9ŸKUF-òÕ RæÞó°ÿ¿ÃUð@שë¶Hcmjc]¿¬¸X}-¡4ý N7˜ ×Tn‰?·U>™«o+0Àù6:÷¾…ö B÷gºêašg[án?Y'˜}-Ø)F óka˜]èár2àk^µøÂó!¯®´|Þô›öCý£W¬ ÉîC蟚î>.Á»L ˜üñUeò~êƒÉrJÉ)ˆâxÚè8Ž©ˆ«èà£Ð ij²&ŒITß´peÖo|,Û`Åñ†ÎÉè!U|…i±®›µár–iéó’¨üÿÀSÿØ`¼ ,Ûá©1qÖÛ©Saè5œKÐØ­œ§|”“Ajk®¨Í’OÙñ˜ÆšTS†Ùn»Ž,‘þ2W&A¿õ€áñyEæréR”Êy£×ÅÊ3[8rÇAç+dëk‡¨An¯ ­ÿ$¹[Ö K2ÇɰX‘‚¥|f ‹{™kþðþ(ÙŽ›âi;X*dÿît$ –ñÆû&‹ÈÈBé-BäöÌRùׇŸÍjw}wõ¤»ï.Ôîß/ðŒëdí¶ H5ï^‚;„©WžÜ\|þÀš±àAQB—Xj­±dloÒ™]‘‚ƒAüg˜j°˜ŠÃ©¾Híý©æa²Vô¹æjïM€4ko3Š!–þãHM“Í]ø¡»+ñÎ?·HU‹½.záÛðÅþ—Ü¡±&*Nì—qbúí,òË!3ù¡K)8ù_&B4úG랥•Ùàd°]`‘&ØÔL§ÉIÜb…°œ)Ó Ì%h§üÎ0h¯>Ô“Šê]ä0XAß*„M=zÆú¥I²½‹*l'›Jáùi€F)ò£Ú¨¦5(\¥òg?¢-O¸ÿ£À8A°©˜mœ²\eå¦îŠ€¸©TÌ&Xq\d¼m8 DGI®ð·ù×lF+à®í =rômêñ–¾&Fš™¯$yðšn`ÍzhGsmÒ-{6àœ¼ªyEyúÑàiJËëiÆ»òëmùÕ5=xiT­U…áft"Dq%í´¡ìÍãŠ|¯6I{`ÄÞ ëÙã6‚£=%»¦ßã+Ž“ên#b•%[E·nþ«Êf"Õ* Û#¡o„Jâ–€Ù¡gt¶™fz¦¤x­±£–gÙþú²œeãSby8ÞúvTn‘îœât6§ñ!É+C¥É1ÿvàv›b¿CÉ¢îV|SeÿM·Aï¸>ÛSŒ)püÍOÝSc2ÙP(+“ Å“8QNKsõû*Õ£ƒÛ ~¹»_éɃ°ß×á(§´gxXqô5ðatµÈ“r¡QõÆ æWeç-‚§wäE#†[¸S©Wº§Æ6a&m2DªŸrq,Ž¿Ð‚¹û[’’ 3YJ1N^•†xÏÄ㌠ûá)A†Ö:îJ(|p++wÅoª7V—¶±t=@ð]jKY:º¦]«Ä t--æ©R¤scfék.èÙjW›zÁˆu‚ŒÞ”ºK·uqÀsTw¯?ÇeœW‚³)àbšvÐxìS”6enö‹ï¨m h0 ô;È*r#”¨ÆK=¢¦’þŸ#©•J%`!¯ä˲dþ½%Ë+›ò- Þ"Uå 1ê:ú–*spwÇ‘?Š aY¯ˆÖ¾ŸFóhê…²²3Ó€ YÌ’…¬¼Ò\v0bJ©ö|~@\9TO?-rSý¶'G^É»ÑÉç£W‰Žj{ M0¢Õ*4 X•¶ëV˜3­§ñqò¾EܳoDWúˆH¨r<Ó¼”iQ”0ØÞ®˜¯ûï[wéµR¶Ên{Çç^Ùa˜×; ÌwOäG¨¾Næ{Ísq÷8t“üÅÐT—ËCrϬÍ1ùÓX¬`Û» 7lO¥ô¥hRƪô)%¶c=ÑÚÏR}I»‘®ÇâÏÅmå¢û+„‡P¾K³Ó Þo¶®ëîæ¬Iþ€n»5Zh”žŸÐDO¢Ì³fËš4{”Eï÷OðEØ{ؼ|ÎVDˆÞ–ª›L×ZÇÌ}yéî³(]zª'§K 'ºöSœPþØýÚ«îeÂÌ“~Âé¯Ê¨,»=!#|d™wÒK˜–¥(Ôï“wÂ2ÈŠnzª†]¡$n:»ª p_Ùî ³øÔ\!ŽB_UÆCÉÊ!×M_‚ý ”×6ÁürâkqàsTÀÀô¯±u­èäØà³ŽÏ㻕{€YRYý×Ö™ª¬S6 yk)Øß^^õ*æf°Û³*­øÝe´] !M¨ÀKЧ® üÁ hH`¸Æa'(ƲÜs‰< ¥®×j–ÅÚAYfà-¬’Ù[|j›øþ›žÆ^ë»rÌ–U(m…Jg ˜«|LOc{ÄŽË#N áqȵä⇮DÞê™åSùí‡ôçô)”¼}£×[ ‚"œçäw8ŽÌ[yV[s§[^§&0–,¨dÅ,Û_AT˜=C#.îây¿&½Â8ˆD(¬Õšm±P9’ìŽwvîaT°‘6˜Òn2‚5~¦šnß^TÞí¦N®ÓkJÚ´q<ÁŽ—ÃìmlÏ{cg ØØ/˜„©~Ÿ… ˜¤ŸIbSoëL¨ Zý?i  6Ÿm[ÇoÎÒ\è Lž­HÀ0o#´sï®T›Gœ¼çíœÒ‡ä6‡M»È#v‚*ˆòuô[œ Ý”ãÜ{;‰úäÍÍó¯<µˆ wæûè9L­­pU*óÃP5¹ìUÞ˜-9T7}p…ô3Ø— â>­5Ë_3èß Pˆ2À®°ó_ÕæÜTÜÏiŠã¨!É¥vzx=Þ*˜.y»1¢õ©5~ù3ãlôÍÕuòÐi@P¸/ÍŽ+ÿ£º¦þ‰¦yiïðÀ*„:†]ÔAœòºsâ`:Ü ÖqžÃ28,gçУ OžÎj¥ø_µ³k’ 3u%Vôž8Gç3šÅ‡£™|Ÿ‘/3PbÏò‹AÀÞâÞj@‚´®ë{-t‘¦1¼ñ”)æÞ¢žŠñˆ;ÑܲCCüë‡Ôñ€ IpßöeêÎPâFV/¸ôýæüú ªsä€<Û<¦€n²Ê}Â\Ôî—žr‰/zEH¥¬ŽW$}W~:°Õ˜ŽQ›tþÓêŽe>ú´äëÉï¨ô#–ƒêûéÍǧ•%úžbËí-Y4 7¥ë¨‡liéz½ú¢: ì“hR>¨sÎåý—µÝ婯N¥Õ6¨Ø~‡äiY^}Õ;¬²1]úÂBãF¨ÇoðÈ=/HkjÕIâG?EÛðc'HÊÛÓX3|ìäUZrß$Åaj¾Õ›Iq#LÔÈ×Ô4ʲrºLµ¸Q©]ÄÏÒ}X6ýúàÔ}ï¥ˆÞ ·#‚Ÿ‰¸:x†>_Ž|äÏs™ú»:7ºô[> stream xœcd`ab`ddätö vŠ04q$ºK*þL`íæaîæa™ô#Pè„àAþ= ÀlÂÀ`Ì` ÖËÀÂÏ(ÈXò£ƒïgP÷‚ïÛ./}Èøì»Ê÷ùßå™”¿"zjÖ«Ýß8¾GþÎ펓ûÝ–^ß&ÿç{ú´ú r»Ù6N±NþûìïÌìß-~ožY<­{a·ä1¶ùsºWÊÿ1ûþUô{éw1öï b=jCóËóýŠ0þXô‰ñÄwÁïMß™¿/úá+Z:ÁwQ÷Žz×Þ[½GnvMOW÷Äî }}³f=’èÜÝÛÝ×=¹«§h†Ëº€ ÝF¿õË~û»lˆW.¿/|WãÍÊÃu Û–Om\PÖÊ¡kÚ‘eî´vG¤\ýôÎîöîÖî¶ŽŽš‰ÎæîÎîŽî²Ïé:Ï•ì\ÚÍñ]ëå¹ïúßY,î«/ºp00$^þ°ŸÏøýÔw.æMßg‰6×µTr±gOªÞ"÷½T‰ ÌX €/[ÙWÍ™´yCÝÔ ùÄÊl릧É9±Ut×t×—püúc#úÝäû®)«&ÌáèùýŠOŽ‹Å|>'@(έendstream endobj 346 0 obj << /Filter /FlateDecode /Length 243 >> stream xœ]‘=nÃ0 FwŸB7°Äø¸$K†EÛ Èxˆ,8ÎÐÛ—¤š¢èð<‰$ÀOõér¾¤e7õÛ¶†Úͼ¤¸Ñ}}lÌD×%UL\Âþcz†›ÏU}zñùó+“áš‹¿úÕÑ+WšÂéž} Í§+UGkñ8ÏXQŠÿž\S:¦ùO©àYìÔ‹lu *]#Ú¢ÂÚŠTX=+ð˜Â(“> stream xœ¥”mlSUÇï]×ö‚³ ±º"Üâ>àS$ "( $޽A×u/´+Ðn+í½»}ïÓ÷—ÛŽµ´c+3,P0’1>H4QÄê_bLŒý¤äÜîô´Ícüdnnî“óœœßÿÿ<Ϲ4ÕØ@Ñ4­ÜõæC›[ªá:Àá¥×Ê‚šdÐÔ_«xõIÔñÚ¾ m\M­ {ÕÏnlÛþú[í½Zs_¯Qkè5kuýˆQß?2¬=n9ö÷šyh`pDk6³Ö"½Q?¬7 ˜õÚ½YbTk4ê-–‡Áð¨n¢¨U†Á­CtÿÑj}«ú©µí¥}´ŸPfª‹ê¦z¨"u:H‡è0¡VT#µ—ÊRŸÒzý9ýGƒW&—m”Ee¿7ö4Þ–·K>•äÃáÂÒ<šKèù’ ½\>¢ž†œÕ £ìƒVå8Œåó›fÑ;K/Ldlã샗HÆF23gX•Ô¼|H2ËÐ7å.uT„DQHò‚Ëcs±xWå°Üïö¹Á >ðG‹MÏ!²ᣧ§@#&“bB‘û¥>¹ ÙKô­’ìdWÇf Ÿ7‡žQNÃYÂ?9Îâÿ$áŸ~„«Š?N©£i‚CõÄ4á]ÒwÖãAà _åóÚ]éû+½üàÑðIAÌDR©«Bï¡C÷èîÉÐAV-³&“Åb2ÍZŠÅÙÙ"[/šÔ?+ ËÆ/dh^º¯†ÒùÛˆŽGƒ‘ÄÑ ü†–öW,>©„>0莚ôþÑ€'àõ’LÆ ¸ã];ÂUþƒ;” ÂþÊðþ 0bÄÙäöG:æNßô%‰¥yõ+áº3k˜+˜RÇßž›¿3Dòr #MÜRîQçjM´ÙØmJ[­U¹Ü²ôÁ‚4T¨K7âjÐØ‡iW]Ëüõç_YD¤_¹Òû—Š—C3Áº5¨‰ÚéœçoøÄº&"ýÌu^Ö]Ü“Fà¯ä>óÞ0dÚÃBÝ!‘Þ°Ï cÅÉwÛFºámFUÎ% Ò›DùÉ*“4è¢w¡NGÚr‚frRà]!¿HÚÕ…»1yyÞï—†O ©D(”N³¨“<:¬ âžhÒ‰¤‚ˆ/ÊBx Ú|è&Ú‡ö5Gcg2ÑX0D,ÄIw’§×Ëó,¡WGí; ÉÊȰesdØl`ç*—vX­9Èrª¥‰Ú<.­”•¿$›j÷:O÷ìqî‡ãHC" å"ì¹ß>ùɯ§#áÄ!îûƒ‡³øñE=ÚáKÂv`œpø“û¯½Á凋ƒ˜ûã [’ãTåMºáÔô\•IþCÎE­AÊ»h=0¨Eñ0[x'^‹ŸÆm¸ ‘/ÚÉ¡ÖGò- ¼°ò®­©Ö¯+¡MéR¾îùEôµz*‹CRÞØx¼²€2ͧâ^Ò0ÆáxGÂ#z8iC%Á*œà®]•ªÇ +]ÂÝu‘c0ÁUæcË5ûj«ô“ZŒ&¢®Õß#xý^¶²ˆ§ìNŸ œb®)·´ˆ3ÍWõö2ÕŸBÒ³'¸JäR¬býØÒÄÞ‚ôcž–þ” êt<‘™âc‚ËUƒÌâ«—Ë 8â¾´K:‡KÍiW"Àdâ©”èŒ8â\%€.ÈË èÂÿ;AÅ®lÜšoZAQRì"endstream endobj 348 0 obj << /Filter /FlateDecode /Length 509 >> stream xœ]“AŽœ0D÷œ‚´ûÿ©åÍd3‹DQ’ €1#C#¦g‘Û§ªHg‘E!»êYÿòòúåu[ïíåûq+?ë½]Öm>êÇíó(µêÛº5]ßÎk¹ÿuz–÷qo./_Çý×ï½¶XP—ÓßëåÇà^uçOå6×},õ··Ú\CÈ×eÉMÝæÿ>¥tþ1-¥S>5L]†-ùÔ0õ´5Ÿ¦H»äSÃ’`».Køê´}–`ŸhS–`'ZÏìLû”%ØJûœ¥0”@‹DR•v³BÏžP°…y©€Õ´ÈKÁrq ªïEä¥Bˆì‘—‚Õ×!K°<7Æ,Á²~D –õ£e ÖhQŽ‚%ˆr,iDäÊI#"oTæÈÌqÎ,áDĪUÈ)X\ê5¡ B"«„øI+$ÄOªX!!BRŒÄ Œ)ØgZ@M‹gs5€1Á1Â1ìjÚÙ¸³aWÓÎÆ $L4Œ4 $L4Œ4 ‡˜2d8Äté 0Ñ0Ò00Ñ0Ò00Ñ0Ò00Ñ0ÒpTwÕwÖwt…t†tt…t†tÜžë7èÈëÊìÌì¸.ו9¯ÌÁÉÅÊÉÊßUÁYÁßUOŽÖc†8eœ×Çx¶åó8êv×Pkh9«ëVÿÍý~ÛùW 5"µendstream endobj 349 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 7174 >> stream xœYy|SU¾OÄ+Bq™`Šz/Ƞ肌 VDd•½´¥{éÞ´Mºd_nò˾6mÓ4Ý›ÒJ¡lR²Šl¸Œ8вŒÛ¨ã;©§Žï¤-ÎŒƒófÞ]>YNÎùý~ßí„Ë>ŒÃårïŠZ¾|éŒéáÙŽÓhê‹£x0j¸ýá‰Ý÷£îûÐÒ1èÞ{9£¸\þÈ1÷ ÇMz|ÊŒ™óâÒ³Sâ¶%ŠâÓEqäoz\ƶ„¸ìÔì¸\Qj\zBjRRf~^jrFœ(…<-NMÌÎKMÏÊœÁápf/ÊÜðÂÆ¨ìM/¾”ûrÞbÑ’ü¸eâm¯Jâ—½–¸"iåêÔ5iÛÓ3æÍ}nÁ=£"îD>ÿàC÷ Çp8rVpæp&rær~ËYřǙÄyŒ³†ó8g-g2çuΜuœ œ8S89Qœ©œMœ9Ó8›9Oq^æLç,æÌàÌä¼Âù=gçUÎrÎkœÑœ1œG8 WÇe¹z® \#×Ä5s-\+ׯµsž%%â çÔp×KäÝÅ{ø–cG8ùðñ]j*šúãÝ #ÇÞCßóö(ïè9Ë"úÇHï{ï¹ûž½ÿÁûÏ> ~àËß,ÿÍçvl»„ÏFÞY0.c\ç¸ (㡟¾ôȪGBt,ý5#a>¿`|Ä7b#ú”DÑÁÐK~.ZŒîÄ/I(ae%€˜ÒXµº‚æFØml 5ÑVUn¥Q͈Ly¨ÍíðþýFˆ,|S? *c=[Xª€B¹LônØcÞM^xš­x¿ünH—ƒ½‰e^ë¡÷ƒ¥"‹A§Ö((,î_*@ÑÕ|uDDè y 45ÀE?œ(?ûv\À–hD ¥$^e¹ÇçðÛi£É³ßVøLh ØÖK¥£Éh)ö\ÿ3PÝΤ´å›ðoŠc¥È 9µ¥-eÿ­ô˜Ã¥Vl‰–.\œ¶fPEjWM³Ö`ªzºÐ08F½Û5cÓ2Q­=º- Ñ NPHŠSKW‚ŽR¸Àä26ºò¢ÿ@;P~·4­€•K×3}ç!ØÇ rC’ÐS‚Ùh3ëÐ[À ¶Êª.‹Ëè2B%å‘KoƒX6ra¹97\ŒTÌe'lŠ|'ƒF✌ ¥òMë¶o`#¥ƒ=1€¢Œn†æ tÀ.¶ývM] £žQÓMÐåž²oGøÙ—Á*‹Üh}$¨ÄE :¹Ai€"Jæ ÝHgëàä`gì¨ò(@Å,1êÍE 7— é¢ç.üÝÓÒØT»³ö„¹Ì訢Œà’ÒƦC$š“v/…Bµ•µ§!¨‡,(•KUJ*¢Ï@ìlí"ùëMúý{E‚^õ)yxBœàcP=F¢‘_^ZÕû¤›ù®ê¯—áÏÔå¥gð(ʾ°ÊTúJƧw@ :ËʨJlœBFg›É¼"‘€­zeÌÌg¦à¯Á×¥ Qadà3˜¬¾ó¨EˆVñ?¯Z;ùé'ñH&"ôîP¿n…Î ¼Þææ­ç¬‘îðQ}”CkQÑ9ÉfC¤˜·U,w¬ö&}A‘(G§ep öãaèÆ &ÒêÚƒàòºÈœ‡»œ/±YÿÐeéP—-Ê\Òå1øC#k– l<~F+‘®5°ó ”X5f°Ip‰mj’ß­±^ ¦"¦À¤1ìÎ6kuš ´VÍ')úÖü`ßÔ ÷ë¼>cß2YoW²ƒ\N¿´@龂y‰óp>Î!;ŸCF‹¦~ò‡Î}=ÌÍ+¯ƒ™r)@¯ÑêUzZ™)zñåU/¯yµh=P3&FÓU£Y_£»™“oú¨oŽ/ÀT›Éj´1çPÃÔËÃsESt%ŠíbÃr Jþ›"B燼'çŒû}{TàÞÖ–³Ú ¦¦¢¥²½ì$¡ÃI˜ž¥K׈Ç 2àÙïhcÇ\îÚi)´Ö¸v’ùÈÌks ©*ZüB Ž’­3èâç½ÔzEwÇÑÆ÷ëÏ3žf«—Ø«îí§$áIÊAâ0UXéºOvvìÊYaØž'ÏÔ”0I“ –Ã&jÊùÜ7϶w*ÉÄÇ í—û–óÄ7gÐÊ[¼Ð6tC` ¸:†>]&QªiV½¯Qç¨3ôе¢ué @̓˕-Ö]ûÑÆÝló‚ŸêNëØô‹­4ÜÜlÜJ®‘³!g]J qà[²›5Úƒ/)}ÝàQ:/òBw‡"’ŽÌ†­DhîÆ÷âÇñcS,}ÏÈrñãˆkK;ðk)zE²rPÙl½?`r¸÷2î.“˹³Ú MY “W’ŸÏ1ó´ЗŬÝVœÔÆÒ#»µÝÇwwtõ–w‘=AK òÙc´…& f]XóŒ>¯Û¢ÐCÂ[çwYšÍްÑ÷QÖ®¢ó!—̓dH´ÊjÙm«ÐgQ ’ ‘FÛ?÷?@—ñYGäÜ©2i~ µôפþÁr›¹ï0êö²¿ºÝ0—3ÞÃe=e‡Ñˆ" <Ùa5̈5(åò¡× ]æN²Â›làg«&%³Í(u–´¶?'”}ŠâŠÚt½}ƒ‡>AG¶úšÀ µP-ÍÒæBÞ€bZ|‡WzSW¥¬Éèܳ¯»2š¸ rÃcæ0›œ&¦õû³u­@Uµ)W¥-IÂã™âå)q1 !zSPVg TUhÈw”däæl>T¸÷lû›™ö$µ'œêÛcHP¥^§Ö3iãçm*¡¸¥•ÄD2 ¡E#†’º¸ûO ù%Rõ-„ƒè•>‡V䘤ÒåLj…Z™ª- çà÷euÅ9¥gùe’XÆ¢ýöúºóó¢·e¦gÐ… ¶˜œîƒou—×ÕÞ±lÈg¦-‰N&KMk¸\U§É=74D#Cß’5»oÕì LZ& æ°i/˜3nOÌÿÍ?ªP|Øo’Àù>JZÂŽÁDâ7µÅj:õ©ÌÇ‹ÂnëNÆ/$Åq®k]ÿ@~¡¢Ð}K8ÛØxDQ¬)QÒz­*]'!D’­ÉÞJ,±®$ö¥¥@mïíØWyªõS±›|¶ÿNtÖp«³™4×Q Ûs9j1?A²¶POŸÎ8¾«µ²±‘–®—mŠ_—“š˜M²Õ\´«‚x ¯Çåt•Š f[¢´âM÷&úöÛëhD#‰/ʨõZg€»û2Ê¼É Ý+(&ï.--.ÑšÊóÉ«šj;néx?„#ñDxb·ÿ‹ÖSLðØ‘=G vJ½ÙTÄ[;÷ÃÓCòx¨ç€ ûÄÏÈÄýÜþ·…óC§Ù=ä§ñÈ¿qéø~›P_´}.1jRƒDicL ´™I8‚öAÓ5„95³Ú(÷C ØV³=êùÿÈ1ÉŒ©‘n°¹­öÐä¾Ñäa<±ÿÂ/­\#œ#¡k7ìaw]çüÌ–ðh$Êßv'àx›8·«‚7{›êÚƒ`<ÕÓÞþ盄8 ²ôjV¯ÔÒ› Šó då%•âš"PÏGO+К5N§Éj31Uûz¨†*uª³ÄMìsrìì(qñŒÙÂÅKRV“® `ÞdrÛèš¶æ†r4—².³²ÀWÜÔW½è«Îª"9AÍ2¢W–‰Ó $¶{jY‰¥€ p1(Y’Wö'6tEàêô?0SV±* i§§TàÓþá¦ÅÁ3¸ ˆ{eõ%ï_%Ä9¡*•Ï`‘†ã*ag=M¨.Œ´ÁEjä±:œ`ïÐ"—Ò(gV™TðµHªZ4 ·*©ÿc¡½È¬w‚À<€¶D2´Ù4/ÀÝw UîࡨX@ü¶éPõ…fëqðR{w¿ŒyøwOâ¨IÇŸûòW†u¾ò£1X~’±»Ð”oô¿!3u"þ=l#tcίï{¶Ž{èfÈò)¯OZ"@ÏL@üNÇÙ¸—â¹høcè·(å‘p©¢ûõ¿(˜üÒ‘p¯=ö.Iüç¿ûòðû@}øÙüDT6ž>a6¿6gål )%´Qu-”D“>õ¸=7й¼ØÕoÛÕ"æY~5â[ª½mžHgUey5XÈDTIbtŠ(¡D>YU}S`ç¾èšm©¯J&gКV‘‡ï§^¾ž*æ,ÿù~FòdÑôõ©#1FìÛgv¶C9U#ñˆs2ó·îÍÛ}¤óøNÝrº6ÚÉtŒ%¦vg=:MdòM‰Zçh=Š˜ïÐðw—^Æ2øÝ‡Ðÿ^\}$ê¾À?ç_íÉ~9¡ð™„dfãÖªâi&Ìù²½ž:Lä éƒVÂ]÷„~ØjÍ®*³ÓØBF‰ºˆx09µøå„&'gÝöä­Ãù(Q*øKÝsþB}<ã ~Æ;žÚÿÌNÿ|Átqð‚é_ðYäîá_÷­I)Q¥kDŒ:K™…æ QPö§¶¿x‰d”zµZOç.Š-•¦ì ÚÞ°´0&/Ù†Þ]ÌvQhð>ý”ûÁ‰=ÄUwôøK+DÙÙ9ÙboQmMsM3½°”ÀÓD´¢ÁáÿÙOMü5)PèŒ*g+¦Šc9Ò4( ¶´¦¸Õ&9ÿ~Ñ+y5íµ C½6oÍIBt}ùÁ¾é\´ø*¹Àë‹ ‰Zƒ¡TFo^³}Ý+„À£ð]xôÜžè7J˜ÿÉêP)9‘3æL}bæнµÖZ£6ZŒÆ°ÿTµZ=©6­Ó­Áb’¼5VÍj4z=ôÁc{V+í% ýab"ѽçS[K¼Œ¨¦ÄòtM¾-Ù¥4e;S› ‡úãúóÕ“±xøv4´”—Ñ\i!*÷Á™ë“y ŠÒ(ó4/4”±ª´2O”!Ê(ô×44Ô7Ð8úÇÙñÂÍ«×ÓTg³p‚ŸªWeçf$Çwæ¸p³ç«:úFh‰^0G‹kÛ8iõP¥Fc•ƒ®;¹ïä ¼>]ÜàmZD¨’pÖ‰Ï<î íy»—ªóß¹'š-Ä“úßÑ ØH‘_ë,+wzl´ÕÙ–ÛêZÙ‹a)¨AÇ*ô2}ø¢~egüáæ†ŠŽ]·×}äŸl}á ¢ 8b©»½ÒRc©`,•„D\ðÖ«M³ÜA_KKMíž}{‚X “Á£'ÞFc2€Á¤ßÌ*@6`A ËìjíØš­šE§&&Ä'–ù|Cètq÷‘ íþ׉)¢åŒJUøD÷÷³¬\¯†âÈÕ{Söï¨ó·WÒ¾žÞ_„ñÊx 2 Å[k´Z«˜ˆ† †›¾1ÿßï4rÕYª & S… u…ÊTÐF†ß´Õµ8hïÑô˜s—¹¬B\ ö¸S“WÇa~á:F™§#vþNß”¬ÛþâÚúäp¡›cÔ[ñ]Ï?¿!f]m(c[³js«·[salZV$Ò‹kÞzû¢+EºòN]å?_t¡füz5zþ{.Zà¡£àl´ÂܬlÎO™Åé¢Í&#e4J¥¬HKcHü_]©µº? D“Uà{îÇßóÐÚAS^mff^^ffm^SSmm‰À¾„êP«Ÿ{5„x}³CZÇf·‚›r+rVj)Ýÿöʵ*-È"AæÐºµ!ò€°Lm"î—r;n·Ú*³1ýdñ‹åXà¶:¬àÊ*·ö¿¼B©ì(¹R)WZ5^5 `c?ù)Uk5 Œ”;•n3qÍedo ¶xÓýoóB9H.@3¦\„Á°vbá7§/Õœü]¶nGz7P=Á#v™¸–ɦdR *Þ«­&Eö0·ÐÈjDco28䤒†[¸e%µäyÁÌ ädêVdtŸ9X8-o0ûßy³Š¤ö¯?8? V™Â0>…×U£gè§€7€¬îOÃÆ\åœæý4lÁOœÐYÁ•µ§æ®Ê*ÎI¡×Ư}R¼ÏüqŒÐ ³já°`2š/ &aÕŠ½IïÀ;p¥­{ï}-gà2\J:œ\×»ãØ)¸Dá%x®@ª•ëóˆ/4(¬&W»ÞMì Ê´º­ÀK‰QëÓ³]…¦   ÖÓCÖa³: ÜØìÙ™lM7F›Vå=Ÿ•ðú’äÕ@-Lì½H`H°tÙ_B3‰Eö¶¥¯ gZ) ôÍhávúºáëó²Ä-« Ú›Üf²Í¤Õ}ÓüÜž’‡úP·`÷£ðÔ QE¾ € ðÉnlf½‰µ(ÜšrµdžΠz_/Û$ €\åЙĄÖ-þ× qnÉ4FyeN·ËA>çmy5ú©—‹>=ÈC¾ÐÝ4‚Ÿ†'+ca Ñ£lGª+†,”¿Úç·Iãe2/Á[ÄŠôØßt¦Ÿ_Ö­µ fð5~>$BÌ–”„œhB–aAt˜,}ðÝÞ°=:ÆßŽG¿ŽGÇMW•óª)q…ª¬ÙÑØá¡½ûÀTvˆ8Ô†üÆÐ¢ÀÙkÔq;ßíºÔWx¡xtC 8µíè|x ^)ž?ÿÙgÕÓ Z¾ShEÑÌi_ã1xR4›Uê(hÞQÝ\Ó–ÅôÞËm$£¼sa=$h6Ìe°,ÀÃà[WC¿ûÓyì`Ë) ׄ‡F-Ãùx™RBD;E¡¨sç ۢΦ_Ãz]‰:‹tPî2Øh/ÿ‚qw}û9¢1 èapÜb©DºL½ŒØø)x8æ' ÞÈ ¿õrwžBdËIù‡ª)a°˜G"Å|ÏÌã/]ð"£Ñ´J8¾—ÁYøo‚Mñí—š=¶gºÎŸð£õ‘ciú–Â9©˜„•¯ç$„ *ù€wZÐ;n×YÔz–Úbg%ˆ™„< ·aÉÔ&Ì»Ž& )¨‰h|?"¨<ãÿ óbÃÁŽ£„âÚ–­Þ¤Ú7™QnÈ£ f%F¥,NÜK dAÓÍ.[oý—¤XK‰á ¥‡9*üêõskßhüà“æÚ#<4 `nTþTH†Í Y&¶%Í©»’…Fo’¡UC&%’d¤Ç¤Õ:ä´Æb0ê€RéÕ:YOòÑS]½$ùäV¦ÔšZ&µ: öµÂ­¾Œ&è‚Ý»>ù¨«CÚL¸Šj> ÁºXS‚$dILN½nTÁÑöw¯tøòkH^¥êÎ@°>Ƙ ÍmÃ:õv¨¯•0uØQk­j D³…¶£ŽŸ±x,€$_Üþl =šäun"-ŸVÅ*´1 &Nî¤æãzA-@§màÉðU¹N¡‹(Aâ -x‚´`{M r÷_á¡7P Â•^Yøb¿öOG{¯äWV¥äÁ>‹Ç=ާâaWžûþ­·:Ýfƒ[©'®Lɤ&%µjc×µ6ÇÎð7¡ôÈá³ý£îæpþ§• Bendstream endobj 350 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2801 >> stream xœ¥VyTSW1ÉË“E©ís°Ó¾ç~\°ukë2UAkÕª¨TÀ«„%a 1(_Ø"[“’ (ÕÚq%µn¨µv¬Tm‹ÖÓ¡§½é¹ÎéÜ’t™9gÎpøƒÇ½÷û¾ßò}÷ (ÑJ ЋW­_?×ö·'Z[~Y!Ö¸ÀE."í+ÃÊG ×^è~Ú²ýŒ‰’-Ž^'WÄ'lOL TKCBÃ""wz¼6vÜÛ“&S”7µ†O­¥&Pë¨÷©õ”åKm 6R^Ô&ÊZBùSïPK©w©eÔrjµ’ZE­¦\©á”Q5”r¢Æ’Âȇœú@0Z <â=¤A¸H¨‰DÛE½â0ñ÷tÝ#Y&ie\˜Ø¡3‡Ö:QNNœƒ+?vqwùÌÕÍ5Èõ„µr˜µ,èœEð]ZÓ%´z¡ãìS¿ãs6+Ã9svÅn!"ƒˆp”ñàŽ­ö/ô&­3C}C4¨ø­È“þþVc{».n ‡ã"$)Qd—Ù¶kŠÃ.šä˰ ËÁÇ]HOò…¡H¹M{ŠÅX¼%´&Zo³e`ŠPìÝ–ÌÉõ[Lk€Á#±¿‚—áeh~¹#ñƒ¯zGÃŧc×±YiÀ̘û3&Õ{¿òž»éù¼Øƒ°˜f¨ÒÛáX òxΦKˆBq,ë:€®ÛÙ’ô0Hø¤@Îoü€EÛ•Q¡|Sj~Ôo¸{fpäÎɚĞ;×~áʹwgÏ\±ÌkIÐÝÓ\ŸV…EðE*êž³"VÕ¢œ7>SÄÓƒ_(FùA³Ñì7Ð<ŠÇ¢gm¬UñH‚èÓøe‚žY‹]F;€è´Žt¡„ô VŽÕC•T ‘\Ä¡ ãB=ÁÀ®ßLBôõét<òwPq­«%°e¶?í‰zÙõ~oÏ_üÞgÝ®YnÞj[àÝ_<ê°ÐB”­d¸|Q\®Ô¼§˜*c…Y¿ëøœu+ã|üù:ICza‡EvH_àŽùØb¡Ý7×-¨È"´zZÇô‰.ÏØµg¬÷%Á°X5]æÛW’ÎŽ7gô&×dÞI…MLdTèÔÕÆs ÜCf± )ÈøéD×—é´æýÜÉг` *Ϋþ¦¥=².®Œ<›?­l{ÁòbhfjêÍå†)4\ŽÌäè ›7z 4µ«µÂ:„E³ì†Ç³îÒýL˸ùs$ÉÑaÓÿVÑ㮇^»å|Wc ¤%'dEdsYþŠ0`<¡£z0ú`gZG±}œý¦¥ô[ v…-€Ú:UJþ¸žGO»s»±5ÿP—·Ó€¹õû~4¢^‹ Ú-´."å×ÒeýËÉIoÆÇÄÙt¦)ù“ôCª3™åRLUòÚ4ij@f,“MoBÇÄ'»ÒªhrºÌüY|_\IÔÕ¹Eç«îj £¡se« Þ¹©G ìJÐaú9aùÆ"8Ó…„Õ¨‰ýÜûs0!D¹2Y³£¡µý‡ÃÈMÃÛ;ãŠe‘·Òe&&ÆÆ²ós0ùe¯-ýV\=HÒÀJ á·•Fb<;ãâ@:9fÀFȹ?Ž¢ø;ˆù3☸߭ÙrÚ„Æ ëA‹ ­«_è 0C~Îþ*™z(9LÓJ¢gjAJY&ŸŸEd?<Å]öFZXFQaOmÂ阫>ˆw/#䔿k‹¡Œ!µ&ÙÎ&AKÔ…ûÈÙZB9zÿè®ÉÊÛÙ-ÿ›*J•›¥Õ»g5Æ%#KJŒ¬ÛvújÇÅûµü`‹_ëB‘ß ­R$`ªBEj (ÔÜ•rû;À¼¹ôJ÷©ºŸÑKºlÈã Yšl:7Ì´»“Á`º9°çjü×YxØ÷“Ðp4®îi™]€ö.TGàoFÿbu¥Dâb¦Y ~@‡©ÓCyI"È›mÍÍÕ 4 ¶àö”TPCÚ(âk‡~¡ ÷æ/Hz:‘ Æ…’þÂ;-hˆEp¸ïš ÍúéŸ4«­ÐVù’A%œ:¿€‡?œ‚¨ãmMM|$(q¿éLO4’õÙ2oÑÒ5Ÿu?°\¾qþä®?Û‰\‰ƒ?GEäú›‡Ö²†J8‚˜1‹‡yL &qûqz ½|üaµA‚A¡æãÖ½i°Þ:œÐÃh:Yý——ZþWáèfí"æ9CÈ…¸t™d…¶I6Ôa’ =A߸ 9ÄåF“Àèktõç‚jÁ¼m¯ó›¦Ú'ÛÜ1ÑÞÛ¨ÖÞµûL˜Ø‰æ]8Õ‰|;Ûd«¿(D!—ØtH…ôÜdÒ9i6 •B)äAI~¡IU§®æ»®_׫Íʾ²¤,G§)ˆ1¦ëÈ¥¢?PmRÔùÊ·î’ò²aÅ¡ÀLóôš%-8Ç«“wÇ@ l7Êr²Þݾo¨˜Y?y£шŸÏÆá•Åù! »q[[GG[Û©SmÛ||¶mÛÈkðö!ÀœÀw¶Mœìu´­¹þè©<þùkɦôŸ>˜Ð{œÁÏÿÿ›©O›;ÿ 5‘Æb=ËšåU2™\.“UÉÍæª*3‡½ñHûcÌŠ<Ì!m‡]†±è6ÝWä æ¢9u^H¦“–EÎH$nt˜NÓí3¾Æxœ=í¹Ü.óGÏþ¢Owx,ܤoß åLGØS-èxäbI¾yló~Þ8øMûìC#Ÿ)ðH‡ïn‡úFò#8uѶ_¡u+ú˜­$š¹Û6íà²òOøéž“Y›dö„÷‡«„FwHBî}ØY¡Š9 ãW(¶.ôÛ™¾7c7¨uA¼©êÞÙîO¸3ÿ¨xטaœ“(Iæ2”¢þ å.‡endstream endobj 351 0 obj << /Filter /FlateDecode /Length 183 >> stream xœ]A E÷œ‚”N›è¢aS7.4F½…¡aQ ”.¼½ejqñæÏü©úóéì]æÕ-ýÀÌ­ó&á–¤‘8:ÏjàÆéüQtêIEVõŸ¯ˆ|-@»é«š°º7‡–žêͤƒÁ9*IùY'„쬕 ½ùûj7Ã`* 5HÖ5ëµ¢! ’Ç"Qª¥æ{›2§$Þr½¤„>ÓZ»¤u¿›Ç‹‹¯°7É(\Úendstream endobj 352 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 329 >> stream xœcd`ab`dddsö Ž4±$ºíùéÅÚÍÃÜÍÃÒýý—ÐcÁ[üW˜9xÅSsRsSóJ Š2sSsKJ2“SKò XŒ ì–1¶3v0v2°Ši1L`8ÁÈð£ƒï—À÷²=Œß~Z0ÿ¬ù^&:{q÷â%%ÝUò^°Uw-êž#Ï÷sªýžïA¨t/óOF•Él³¾«~çýÎðÝ ›ã»=\æ·3Ûo¿ß:¿Õ~{üöù®÷[ù»üwg$y{¶ßÝ¿:k¾«rð}¿»eÁÂùŒ»¿÷3ßö#GtòœžÉÝ“8Vå/ÎI­Ï©n“û-ñǵ«³½®»K²~UãäI=½³&Éýнà'ËÆV{˜dÿ”ý~aÇ_u¶š"¨?¾ýó‰­º»d Pr–‘Íœùendstream endobj 353 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 5319 >> stream xœ•X XSgÖ¾1æöªÔý*XçÞ*SlÝ­[mÕÖµî»Uª(Š *›ÈBBB ÉIBaKØAAŠ[»¨Uc[‹¨­ÚÚÚEkíj;c¿Ûç³ÏÿIÐ03ÎtFîr¾³¼ç=êÞ‰D=æ,]½dÍš ã}ð8Søã÷EcˆBÄÒÝ6tèºþHÛõês²/åû7#qvÒœäy)ó÷¦¦¥gl‘nÍŒY*Û¶}NJظU;WïZ³{m|ÂÈ £FG;nü„§'fOš<å™áÓŸ>cæ“OQÔr*œZAý•ZIEP«©Ôj-õµŽZOͦ"©9ÔËÔ\j,µšGͧ^¤&P ¨…ÔDj5‰ZLM¦–PS¨¥Ô2ª5 ¡¥fR½©>T_ªÕŸ@ ¤6S,5ˆL…QbjÕzŒú ÅQ<ÕƒêIÍ$“KêÿDsEGºMïö‰ø9ñ™îÛº_–l“¼IÇÒ_?²ý‘o™æt„WzFö<ÒëÉ^ù! C>{ÔúèÝÞõ}ø>…}Ÿèëì{½_iÿ©ýeý¯`DÈP9àòÀaÓ~Ï&³—ÍtlðÌÁÍ¡½BuasÂ2ÃÞ"²lHÉ›‚§·à/j÷Š„±çÅBw/»™–ï݉µP£1ti ìkØ ~½m²4Z¡"ìÊÆ}/®Ûœž´ƒ“ÉÆ÷7›–'î¤z(æÛèõø¬äå.–žîj©M´¶:ëªß8~Ô Ì‘Bibž!۠ᾤ{Ñ&âÎòób4Y@ì£úÙ{ Éî.F54êûÉo_xcÞ^äâjÙÞb>|ùܓ旱¨ý¹}yFäÆ9üô|mEéôú[Ë3ò%k÷§ñ¢+^ÑÍÔÔ.ÌÈ΢î3/ŽÁýpî‹yÌ|ºàÔõF½P8‡•8Š]°Ù{é·ëŸ_ûðµ5sÇÌ Ÿà3¢ò …^Ñ$}W…+¬±ÀhãÊ)ÉisUyœV£’éòvî‹.Û ‹‡áexæ®`š˜þ%êãujJÕRHÕðÉXF‹ý˜©ØÙVÆ—Á½¼q·àñ—;Øåû/B ¨žEѺ Ѓ†I¬[£©´”„ÇTäßçÞn½,FËáÇè´K‚L~¿`Éêýáoï‚üq¾0ö×'A6E?À1êçEÙ^±°@˜Ân÷¼r‚·©[f¬Ÿv3ë‡,þHÞ9Íèá9YtAÅéÜzW$€>Wý’>[9ò™SY^n¶î¯ä^ÛvÜQÏ·þðú+u •ünO’e†3Ò¾Êû˜ªú£ˆ¾nŒ—9ƒV½í4Éi=PPSø:˜G^yjªFž—Ïœk÷þZ)B/¬¶ 4‹¦«€'Ò%ŸuAI‚>KžÏe“ŒÆÿ½ «òe$»Ê°gßKzï× ÷¥4=G“¢ã/ÌÕe³ö»¬`2—ñ÷!šô¤RèÇ’Ä¡à!4•ŽYùhM–bÅIcÕCò~€þ?‰GáÉ’MÁj!&œdÈàï¢>çqI\úýÎìá+ ï^6KG É’@ôy‘¤“­‹…S>@šH›ü+¬ž¹§ËÏ"°Ò…ùˆÁå†7LÜ¿çö·ðÍÿ’«±õâ½¢òâ®@ôg¢G->~ôиÎMXš_ &M}ØZ’†‡?YK†‘YE\5„.zªZ±uɽÌ`±YJÀθԕ‰²ª¨­ I‡®ýòD=÷€¥îžG–¯Å‚‰ØuIJšN“¥æR·G§mæÙ gÎwœFýçl„|o4õF`dw¨´©è`ûXØ€N£‡?yaÉFØñÛnÎ_bAA²»]P²örk±¥˜1Ò3î%JdÁrí~ÍÒùk‚BRd õ´ ö‚"i*žšº1q¥"Ž1Ðù'¢Q¡Î¦®A0ËïcÅÃû˜øáf¯ I#mŽw,)Üà|Þº³`u—9½žPz ¸üýÚ‰ÄbbG!Ô0ÕÅáéDÈÕiâ ju$™Ÿ‰ `m4ž¶sdLÜcð»ÊœÜl†íõ€‹Ddò™ì–}U…G 3ù83G {´œ>ôyÑ ®¢»Bî<é¸òâNÉxrVÅŽ†åFDO~‡ãîϾõÆQwS+oÐòAÍøBåþÖp¤†]¹éËHôÍ××/U¼˜®Ó&änàÓæ½¸:;~è4Ð+ºq5¢Ñj¶ÑŸ ¡£›çâPÜ÷&?%_MG4è½µ~(äke9üž­¤[€Y:³õ'¾ø [ÝÑqê0?Â3{• 7ä=hi2íD¨Ý7Núw'=è·ÑQPim©o» æèf(ÆÕèMäÒñ_ÄÂá=¶ZêIŠOMJÙãN¯m¨©iàð˜{&÷ï’cA¸Ž ßA—¹(I<¼‹Ö§è péΉ…¶\Z¤¥6ŸèI[ÛÖØ·ùÈèI#×U'U¦óõɵš«Ò÷³B_Wzba;³lùœ‰8~p™,`㪥¹Zò\N§NÁOC¾_PVØ_©àÇžS½EÆû%¢¾y?®%»˜O®“„×H-ñ6)Þ¹·Z™Óœº‚$ÆI‰YÞ\csx Ô+4Òâ¹ÌwÄÂÖ‡&fæ½_YmRö®´=±1»«39ÛÁj68ø†½Å™²øì˜ù·à ñ‡lÜaV`wfêé LI{K˜ð:‹ÿ©dŒÞKÚU«z1Ô¯ óš]d"âx¿ÉÅ P?ÄVwYIfÑ%u¤vɤvQÔ‹èsTÈ’yÿˆäP…xlpï£ñr<ê]ô]×r/ Ââ¨ÿ¬—ºœ56xV4mrš=…¯ßú(ô6'IïòTÏ š’;ñ’ï›Ûbá’ˆ–€ ?ÛêJz«ðŠ©óç,‚°È­G>*›ÉÉŸFw%ÿéßÂò0ÜíÒü/‘ø&¢~(í2Ðû'ù§ù¢ÃCº´Áµ.Z¥op ¢NŠ‘Ú˦ì}°¿ùL…$ÞÒD§°¶êƒÃÍ-À8J ±jC>™2[°Sò°ç‹._t^²+”2dž½°zkŒJÍ)î.JØ™“œª«Kó­^ŸTTA=ï›.éAš¹ñçÇßoº…!•¢¿ Ó|Š(k¹ª˜JE¡B¯×¨8ÜkQêm*$øµ„ç”dæ¤hã´\&î;úýl»–ä5,M–‘©°æçñ?­À½V*52¤‡©-Ò*‡­ÀhâPŸö4Ìä+Vµa²âWQ•­ÞÂ-¶cF»ý\è÷‹‹Õ°@˜§¬²¼TmU[øg|Šöïïá$¯Ã~”Eã»”w¯ ÚxÌü±8”Yi¿x@ º”&ÂWškn4£ͬÝÖˆÑ÷¨šE#hg ´´Äù¶£¡t5n´R7…•)ÊÓ´‰9z.’à´2ââZÀÉ£¡ô^<œ¨ èÃd%™îFKƒÃäg Þz¢~"´ £ýF6iÙ…L£1H5Á(‹2ܯ¡îWy5©mSôžØÝ±µéM…&0š¹`tã~œ5ZBŒô-èÒ#4ý.Áã»ÀoMXöùôªîý&Œ ñ$²`X‚¾ûàp°¿ÝhgSëâvîLØžàI>ÐÜÜøŠß÷ï©•¢7…*±ðþï}X‹­À¦"«T*ÍSæäq÷Öü±(O™¯Ü°L—¬²¢Ðé°rþöôv»Xh²ˆ]ŒDOà¡äkÞ‚CÏ ûý=†Ö Ívá¯Ø…Û®ÿŠ–üŠÿêbëä xäX¼4"0„ÞÞ_*EÂhߘd£DíJ—å©4:Nž5kŽFnH×C£²Êj[ÛÊ=Üü{S$£»´ª9¸wMé¤Õ|ߺ‰œb¡ÚÌ"æú‰Ïá æ»ç/a8|:î¾,¥Ræ©sWÕ84N…«:tì•÷€ùðÃÝ/ŒÀ=¦ÈŒÊñKr•6aˆÝ¢Ë ¬Õ&‡µ ß,R¤ÁF=‡•C¦3¾ÏÝDmD£œGo³ßî_5Ö̲‰¦·Søs‰“´Xm†+m¬ÏuwÇwèñ¿Ýýù­¤ãË÷qß¾]öÔ2§×¶ãxÔÚ¹ëãšòŠªª+œZ¹ÍF0g%oKLÞžÈïHNÐïÔ¯Ð) $7~‰‚…ˆÐltO,4 —ÙâìŠÌ­.OÍÉ•[w§B.hl¹Ž¬ YÅNØ;R”¹»ÓRåÉ(Š3ËV«¹€+)zµ¥š°UëPV¤–ËÈÚ×TZ]ÖXYKÂï\3(¯ÐƒÔl$šÁváÄÙôYA¢¯Î„%ÑiŠLüê=*4'[«€¬ÎN:S’…]DËA«éz¡˜5Ò+ñϜҙÖ&/šLvûoP5I«É7+ó»(r7Jƒ&.Æ¢Ž‘ÛU¥f³ÝνþÆYK#0Mk׌3{ü–ÈÒ£ Þas”v-WT%n€u9?_ÿ-„úÝAo/ºž¼Y—¶!rÔzM#8ÿyÈÉ‹Ðwì4àï·¾'¸ .¥F«ÎƒT&Õ•Yç©-~õLä¾¹ø ¼|×à§®,ø=‚ú~†¼ú6f–¯Ý›°û¿Ê&A¢=½AîQµA-YLNò»éèA‹åH[;0_VÍÝ>aÎt¾R^å]è@ÇÉx|}Î^ö\:3Ï;‡»á!O|îÙwÙôÜôô‰‹` 3þ“ù¨úëo¨ûíã±'¦×r³Ù¥«öŸ<Õqü³+G6o^1gí³d\¼ÇÎZ}ôýKWÛoÝ<´òåÙÏÍ÷ZgZ‰0Ü„$ï#‰} xY£Ùd S–]­Û¢2pøe½B¢‡ä¦sii‡RQôŸ?fÚYÔJú2ä<ª%}¹yØmxèSÏL"HÙÒ»Ýâ$Ö­¨LJÛ£Ú8·=ýžE …Ï»ðYÙq8bæÉqˆyó²ÖcŽ(d'¥]lAݾÿðêO€ûtz¥AǾV´Ù‹ç­Þ¿-‚ ï@hx¥¨]È šJû€7WЇQ„äà…xºé7“Õd[X¹¼,c&M©ãvà’ˆwcZyÏÐ[p„dmðÂ>z÷ãú<½FÓʳËjlî"3׆Kî<ìÆ\ô3{ÇoÂ?ȉ£…„È…ÁBÏ%ñõøôC™ý¤[ØL:´^ÈbÖé\Of±L“«ÏËæð=¼EªÊSfƒL2uèÞêÊñ}˜Ì”—•T–çØò,> stream xœcd`ab`ddä v ò5400q$º»øÿbíæaîæa™ô]Hèà~þ= ÀØÁÀPÂÀÄÈÈîôŸÑ×ï?ãç 7Ê~H–/(>ð½îG hXKdiVB~FM|w<‡ÊCƒï Ÿž=þ*'¼à­ýÛßÜrÂÚÚ»Ú¤ù丘§Ùóp20Á+®endstream endobj 355 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 4180 >> stream xœXytSu¾O „‹@5Ú8x¯ÊˆŒž¨¸ .ˆ ûbiÙÛÒ}K³4iÚf_ï÷Þ,MÒ,m³tIÚÒR(¤¥(K…Ae‚óô0#Ê0ŠoÞøK¹õÍüBõÈ8ž÷œwrNNþÈïw¿ùlßï7|Þ¸›x|>_øÒÒ¥¯=™ú4æÓÇÉG ãa²&«Ÿ~{î­èÉi_O=q OÀçŸ4%'Ož]š]–³5»\Áãñ¨Ê_”HÉ䯪–T/Uo]–—¿¢ ¨xMIÙ3OÏŸ,º“Ç»·œ7ƒ·’·š÷o ï ÞoxY¼‡xëxóÖó^æ=Â[Ä{…7‡÷*ïuÞbÞã¼%¼e¼)¼©¼[øV¾oç݃+äãµòsøßÝ´G°xÜÝãvŽPø²M`‰W'>6ñ‹›“^ŸÔ?yõdnŠ?mfZ÷ÔÂ[žšöÚ4Ó´öi¿¿µå¶I·Õ&mi#_C/ZÓ›|!ÂG9h²¨ð’ÊV=­P:¯ÕE¶C·3Û`·µ†&Ïp… Ð0VÊȰ¿¯Éå=nIG}ÂVîi«Þ\³ÄÚ:PXú@ÇI'tPÍÐB7C3µ7¨÷BZ¨Yi÷´Ú¨Ç’ßÙ¢özˆU`6êN;úœå£óÜùñi#úD’Jð“OžbO PÚq‘Uc–‚P4ë›^éòz¶¹¢®pÃ>w›;掹Zv³žŽÝ‰]ob˜ÉT-ß j²)]…Õj"coñ¯÷"2Ѐ­Ž6묤ü™-9¹@TYƒÑW ùMÊßµû¯p˜¸ò\SƼՙ¹e¤éƒ±ØòR\‘_· ,„&Ž ãˆ×“¡wÂûû€h¯WHT†|“„2Iô9‘6òŒîpuõòGž¿*@«Î‰ Evãs@¨5PUë‡f*y§=‹DhÂW'_?vo…¦Ä®~Ÿ=÷>w9ʪ4 ¨óC„ ƒ½™ŠÚY;èÁk-çL¢cÈæébý F³…ß´¼š÷ô£pwQ\!÷™¨”f¼ ¸œ!ê …_ö.]9kæãÜ*-ù)—Ñ‹f¡[yAp†K™*ìF ƒ¦ªRÔScÔ÷ÀNkwŠzoƒ#¬­B~W};¾Öÿvð`C¿_Ùµõ(( ý ¿~ƒ&>=´hiFv…„RY(ƒ­ «¬‘(‹j–ƒùFÔ†ÃCߣ¦³kh3uF#>;ÚD_Où+9ŠûËVRÒuëÖg±°ú¬ÏÉÖ3N\x\Eû/qÙsÿ$@Z´Iĸ[þȺ0éCÛZ<ñÈðAÑ”´PM(šj[ZºÃýo­‰f®ÝT•µ™¬>¶Ì'ƒ|Êk*ä[ëVþ#›ÇÂ?Ãf w{ú â E¼ɺöºÛ0Ñ…ŽnÔ׋vîâ£ËèŒè[áGmq-ØM”ÝLÛð!}½Å`\Í.ò *dްbŸÉ¥Ç ¬¶ÊùåµOñ€ð8rþÀçCB$n+™Wl­#et‚!ÆÛÄ’ÍèEG§ ¤ íúyu˜lzÚÂÝ9I×o¥MóÇÔ†Ÿïí8²ãYº¶6×c/µ ·K•RU¬ž 8Ááh Ò’½úÄÞKhÆùÞ=|-¸ H¾”œ,ª4×TáhG ßN÷÷^Š-ã­à&”æGeA-Ù®l¶~ ZßÕB¨uyÙÒ­Šu …\Xå•ww;cm ÚªýUååòÍËßÍ¿‚–ô [û.Ílί' ¬Ôifóë«£ƒ–æx,oÚÎ6A޲ŊpM/á4˜Ò‹^Œð¿<#@t’}# ðЀšš-Üf1.ÖNqs]P)­UWšH“40¸™{‘¸ÿ¨š©ËœÎ,5€F¼b¨àG÷Ri ¥íj#Y±$sõœu^Ÿ3Êú¨´kÏ`ìEqìãO’_‰¼;XÏi b~ˆ9m>Mù‹V(§ÔPjFÉÈXæÚŸá…³µ?;»(FµÞÖsÄMí–ëÞçÄ£‰t#Î’Ù=•b· ¶C¡Ûìa \ éA^c¶šR8€ÅÖ`Àí ½Béè¢ðuÎ?v‹ø_½Åíð1n¬ü_éýï¢Å)C^A—±%Ù¿ŠôEÅod‚ Ê *¸;àè7S‡äå²ü¬Ãòýûvwô·‘­‡ªwáܘp MKŒ…±Ek¥ª-_»ˆÜªøÎöÖÏH@EξÙÖÚѺŸxúðâ¦Ü¬Üâ<Y½{c'>Æ[ÌM-K¹9À8.ªíÜñ=ûèŽW¯Wj³ ETÚH+ô&Q7?ùhòÏ"o¯Ãw ˆ.Ä:P#—Øi%¥Ë°½ª@ÂTáw™Cù2ñO©Â6`OØ?ƒ2ÀC8ì µZ»¥ÆLæÍÞÌ ¸Å¸ÒñÂ:-ù·%Ñ}i÷õ@¾x¥ó” ùvrŠÈÄ¿ÛMDjüjÊ¢Ò‘6³±Ä¢¬š—n–•r·Út4lž‹5ð ¼×|°åxçYÊ¿Íå‡&b0gÇ37qÓÕcÁæjpm¿ßÓ‡ñnòèdrƒÂ¢¡Ösiú|Ø jØÐWÒUÙo‰Ã.è uwjÖè²³³$¥¹Å`‚ PÕ}Ð ƒÀÛÊÝzi±rcæ`þÉ«hê94­‹L»v:h/ÿšûGkDX2Z¨¦¸éÂr ijôŽÑé³’G-íÀ”˜›!TŽ2ÖèLa¹Á#Ü­£4Æ´¶èaÔÐU4£ RÐïÜ­0lm½a1P‹Yh¿ž[.g#';Òá·£ÿí¨f !ÁÓPßœ7"HghîÞÑãß«½Z 4áD¨´0HÀIk÷X**iUJír?ª=Ù¯?‹†ÎòѳEííÍáOm:s´#qìèÎà'ü°lv£…,ÝXU+ùWË”uõ(êå@<óÆÜ2ƒÏVïg N*´ý0Û-ÐnÚië¡ *%ÕÕ³Ì_*‘ª”ËW”o¡ €ÓDzßÙÕÒ~ÑqiWY¤7ÉoO )=~cƺÒY)UÖ Ù:ƒÔ%uoô¡˳Ì4.úóëCÊäÃ)þ»‘Ù"†u0.«W«œg´KI#]GCQÿ ˜Œ&»Ùn Wqªªî¢öõ˜­Û¹;¸{¹_?øÞ¢Ïvõí¢ÂˇõíÐ m±æ®X_à8‰1 [%&R»^µ"ˆJS,f½ŽõB.J…Ê`Ì-ÙD~AûÉ·;ÐM-»¨}ç„0ê†è¬vmÃi;}LO#g’'E¾ÝçYÂb¢n‹GOC¾µd°Ò)K©À‚}© Ð^j€nªÅ¦5Ò›Žã¾šÎÕ'›ŒMvüU1w÷áÆhk”âîÝfÈS—Í·‰Mu´Rç±ÖSݰÃÙó£¼|>WcPËè¨%7Êëö¤7½9zÙSã¤} np¸š1Â}úÊO 9 þ‰OPÏ’#•(³çۃƒÐIZÓ½ˆ›Ì=ñ·`æñ¹h"šÿñWA·¡Áh¶ÛFrþ#ÜDL±Ù··¥k;¥˜£Ë6IòŠÄ›ó·`Ñm£ÚÙæ¯o… Ѫ (+ÊåyË?££9'Î ’åhƒÈЇAµ²0{}e™²=oí£qí{©þ"=)ù?¯·ƒÝ¦Ã ϳ«è >w²ë”/jÔÆ*$å•åê@U¼#žˆ“Ü–QÞ†,Z¼ÒlØ•wèŸVšÍ9[PZ±–ú°7Fùâ ñ†vokïçN¿ÃçðƒÈݹfÖÆèíŸííÝD£×(‘›k¥Kq9¥×¡J¯WBê3‚äb\Q“&*)—THjüŠD¼£½ƒä ¿{B¤^”±rh@Ëêœí~O+„®#RZ))Éë“=Š&ö¢´.ò蚣õBH|hCßrN°˜£êRUøG¨žl~ë·Ã@´yÕ%:;ž pÓ~gÂÕ=ü¤:ù¨¾ƒuÄX³"p »³Íøf[PÓZ».=5k‡Xo#KÆÑêñX'Õä('”b…™ª–p/§ÓV¼ Z+«XWØM£¹Ý>4ˆ~a'Cè”),ÅC¾Mˆ  °±ºP¢Ó *L·‹HÐqºÓ“ңJUJ†QQE»‡u=°Î N‡¿àtèÇR*ѧçŽ^ ÷q§š–%ævq­_‹"17É:÷ …ÎàIÓ9¬ Xìz,0 舅Ã%ƒW:ÑlæûJe!µ[$ ×!1¶Å›€u·SŒÃÙ ðnr0ÛÑѼ£»)t`phà0.9`óè·˜ EPû}NÄú»e;2V­ÍÙIfç—樉´kö±­|¤8µ•']Y”øˆåßÊ÷éÚ¶èÈ9 ;6Êþ¿[94B~‡:PÜs`ÍŠ’lã§ 3a¬Î,˽qQ'~é¦þ“Ýîÿ± µiêVSiè—E/  |T ‚F¬Ñ/~e±Õj³áÇ›<&wðÓóhÒoe‹e•¥%-•Û[!ñQH‡&òˆ¨·,&)–JÊ :ñî¶x'™–ü„sD¯­ŽðÏ%Ÿ$í#"w„N¢QãÕ©-Z…ø?Oš5X!VqMÀÐÐäò\$ÌSE=ž@W dOðÿvÓ C¼S‚¿Ý¤ú/y\tqÁǸ»grΔ’Üœïn¶›|7ˆ½àaX÷ InÜrGè=è߯¢‡Þ‘Íì¦öw½w.Ü:îYQ­Íd-Ç–“1&§Ï`°™!F­JGËDQ9i¡Ê­ªÇCya–½@Úe=ÐÓéòvQU9°™àîŸÅñ¸´çÖ |ìc|¬—|…[/_D÷!Äïì.Y†]ÿ”6:òP„:¹Oœ…úDCœðwƒ&,kÆ2 "°ü.›ÃÎRÚ}‹ÕËÙ’ÿ‘njV¾y/Ú:¨ÓúhMEma[Û*4‰ëNç¹°Vo4‚Z,‹èƒaÀáÀ(ÀF“øÈ™|TÄ87¸5ô* ˜H•½šVÙkhíî4Ì¥UCfq›¤X”úº8ÉFá‚è¶o/£éhJÝ,‘ZE M2—»·‘‡¸ç²ˆ4òæqOF&OäñþÁNãendstream endobj 356 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1279 >> stream xœ]R{LSw¾—>¸òè&Ú Ë¥EÔM3ÜdŠ‚ŠNÜ&Q™lÎøÈ )‚ØˆŠ¥:y?Šmï=½­–Âæ›…DãÜ¢ N#N6ÆÔ™|%ÓÍlÎŒøˆçÒs5»8qlÿý¾ßùr¾ó}ç°Œ6„aY6taÆ{«§ =MÀ?ÚÂë6×A„"´~^÷cî—°äe&‚eã'&Î[ºÆZìlh±Yr­k-9ÖüB‹-oSAÞ:{š5ÿ_0\°Yó×ÛG²r†YÕh¨CŽÍ²vCž}¸ásø¢\´uãF‹Ýºy“=o»ýȵXŠ m›s·®µ?G ÃDÆ1lÔ˜ññ2W­fXëfVdõ°ëe}ìNvëg"Õ,-³écÙ†ð*M¸æ-ÍrÍ·Ú…Úƒ²Û,ãiû~^ëÚÇÊsÕ§ñ0x<õ{å6^»-Ú¿$ðr Ž@e…«¦ÊiRv?=OV¹–ÌqÍ¥—ºt†Á#j´ûùpï>«îiþQ©¼ íð¿Ü›‰f‡çVbä4çµËa8¯wã˜+ö+2’W’ÀÇÕ×BY™‚`ÅŒe©Bi$N§SÙÔQDIJšJØ)% gÐÝK”p—bÿšpxÄÏköC½Ù góT¬:qª£$\Ðȉ<9Œà$Á›Õ¾èºgÙïÔ<€ß?û—b¾¨ö$®Þ®²ËÊ*L“(r1¥Å·PŽ£ÔúÊÞù{€óÕý_¢ê‚»•³FÀxf©S«_)ÎY+8\’Ûç6Ÿ^Ù;ûM?8-0Þ„×j&m™’Gq™sV-¯©Õ,çð ;›Ïœoj4ÝÇÈ^L,â ƒvþqÏ~~ÔLU$÷¦f0“üósë¾JïKG†’°! ûAVÅÃLÁŽ«ãF[›×ûlcjœåå‚©‰nSw¼’ J (±¿)qè§Î}ö¯sNä7VJÐ\Ó³ðFˆÊ™75ò¡¡S96QŽ#?v~z0ïDαÊFÊ«¬j—Ï% æ/óûÒûˆÁ$j¸Lý äÓ¥PÇ\ŠON¶X·à7W€Æ& š›Mx»ÉÙ §p#Gξ§‘;ùàRU4ì9ŠfàÝw1!c'?¬©=%À•;†d’Ûü'q×hì92« :×IAPÒ@™·F £ÉtýC3ŸÂÞ&FÜ®!é:hjV×,™<žžïú1ÓP¼§NbÇ!LªÃdÀYœA¾¦ŽÒÇÏéa±µGƒ­*4b{U¨XÑCíúÿp•£Þú1#Ud`;ÞFíX¡7?-;Œ=ÎÊ)Êa#¡²ÄQ- àŠ© ÀÓàEý/(élzÛ;éÌÜTê|¿ˆ^NIÕ〼À¼à‹ŸèÙ ~ù£h9NzÀ’Ú‹´ŒkÑ'a…îÉE}¡(˜ ¼®…ºoñÚû§Y^û@}1/«Ís'Œ_D¡·9§l¡Õîzy—ü‡¿Ní¾3fè:[Õ¡H<ªã)GoQtïݰªä/ž¬Vš•‡Õ•àñC^ûDUîâu],âT¹§jxm÷Ð8]¡SðÍæàÙÐÞ#'1ÚÔÅSÑŠPŒ9œµÄüôlèâ ÙmZ!o&òÚ½r«ª;\ 5˜Â´Å›#F1Ìß0qÓendstream endobj 357 0 obj << /Filter /FlateDecode /Length 4916 >> stream xœå\YÉq~ŸÕh@/=Æv9ïCkÐ^È–Èëy0 ù¡9çj9l’;Ã%í?ïˆÌªÌȬ̪î.Eɘ‡)TçÇGÖÛ ø†áßøÿòþŒmnÏÞžñðv3þ»¼ß|{qöß+·ñƒ7Âl.nÎb¾ñlcµ¼Ô›‹û³­9¿ø 6U´)³‡Îlsquö§íç‚q¹½>gƒäÒX¹}w¾cƒƒ—Òn_ÃkÅövû ¹“ÖËí[pÆ´~‰Ï†1éÿûâß`:ËÈtRNk>M÷¿ç;)Åà½ÛþöuÝO~B †›´ÌÿÄÅ9ËýV7fárpÂ¥Yþˆ›RLÀ:/Ï…´“vªõÀ½±Ûýí5.CÒooÂÀʪíá]|i`§¿;uÆ«@è/¤‰d1Þ) Öë´ k|+½WJ…Âö·Û?`æ¬E‹™iiõö. q8ßÁ‚gb{•ˆ²ý)&ßþ)¿H5.áðU¢Ã÷ØÖz;‰¹ TÛIîax¿ÙI58ËäÄ|`Ì3\øôô.>óOO×ééUzú.=íÓÓOééa<»‚w_¹ÓbZó®Y@{e#…¼ÒJP ]„çÀ”z{o-3~<Ř1n¤±u¢OcáñTUgثԋüþUX˜wÀȆY«DÁ„U^#ÛI'<,f'œà Æu /€ÙÎ[¹„Y6ç¤>ö þÐ8ˆ‡Æ(?¤§­Ã1zÐÒ'1,¥eÚxs+ÀQÌOýò|/ÓÓMzºJO‡eV‘|êNƒ~½ÌÝz°Ò33µ~Ù ×O§1^ 2ƒ +†,œý>È-hQ’=2 á’òÄ]©RãΫÖÎÁéDÜW£~ÓØEþõ²±Û]ØÏNjà[¹Ùq>x­lœáèý£>a¢e[²t¾ƒm §„1µD3c•ñH@iå€â÷óª^ªÞpP—wÐ 9×óí>N§•z_K¯·ûWYÃ¿Š¯-<~OÌÚ·Ak3“|‡-´°õžF­^ì))ð <Ó÷”xx Þj¥¶ÃùNÃi:¦¶ÿŠ-˜wLD¥z^+>\(6x›Ç‹Ú ΄kz ‹ðXA®Üv?ÑÀéu- D ð%i)1¥©bˆ\£©Jþ@Hv^;0—TÖ@j"‚ß ñ3vB© Z †&ͧJh›Y6½¿ŽÍepHvȰ@Ã[²Šr~•ýb¢«¸ˆàYxWÊrÀcÜ1n#ú1Fþ,0OÚPð[`& ¸Q"s¡a2•üªˆ‘¼#ë¸Î^A„€ã{´J±b`2‚$¤ª}œ¼Tí…ª=Qˆ¢ªÎð& ‡À8T­•dÔ`ï³BtÔ—NÁ2ÿýìâþ´ý=ø¥”?0ÿ‹üø¥´œžþ¼MÏ™îë¼êé%_þY´~ΫýóyKK"µ~7ñ“Ÿ”^Çž†S’ƒ9”9D¶¯âºÅ,™ÁWʸ>ï}Ž­¿oBìÈTÆ'pLzŽ{»Áj‹¸eÔNMd£ß OÖû(),:uogÈ9¨… p&r5ÊW îç„ Æxº h¤ uq.ÜóÁåI(»°­à5 ¨85æÃJÄ],ÓFmvö ~Ä=[ûÇÆ¯ÇDÜKn»3 ÂꢎFÏ|‡Ã¢j [ ¥¾ï s!7fã¶OJÝQ¯1ˆãÓ8¦¶'…“#Ó%’C0k Ô-Ū·i“GøRt®ËÖüCV»ÄÅ‘AèÖ±·Ôñ M€§<&h€)ê !RZSdCVB¤ŽºôÍmM'\švh&U®Ñ&O¬3ã‚`^ItŒu «lˆb;’—¼)bg‡Så<׋ ¢õõ°-¥*ë ¦›n0­ùgeŽ¢ÜdG§)ö^O-ׇUBtµ‰U`x˜™j“lÏK«G¥–Æ]+å0ZõbSt[uüV¿9ßqcƒcÿ|4!ò›êm|ü&7X¦p€Ê\aéŠ9‚rç6 ®žö>;#™æ£Á㪃u€¹»Ðë¿–w’4^KèÅQ•ºØë/õ\…¤É!é°3ù*ဘBù-mäÇåCüëa5~ã ‰´¯LÉahÁ}5匮•6וé9Û,SÿW94ÓZ@R+a‚•E•2ØZÔ75ÿVú¼· ÎïšæÿuÛžeoKØo1We ;Ó‡ýx¹/Ì]rɨÀÄ<¹»àÁYØ@PØðµEM¾°i,?0Œêì„|u†;é4V¦€r€vÄbí}°XÈB#Œ©€ò†¼Š­19^礇+°Q{B8_¥Ã6øDhЏ'”¦žEF@ôI•Á±Áå‹Â#¡Á‘:Ü›!_$!›¢QHIJØB,yKÂ#_‘ö%ÔÂ<ãÜÁþaÉ HNZÂ?#´n1 (u@S[šG@@Ș é$ Ï@–âÀ(fe4€|½˜%ÔÆXO [˜mSOÔ|  =g)…^åh垘êÛ2ˆùƒ6âÝ“ã>¦Lc'”Œ«a'M9‘Ó×Áa•\(üžÅtÄ"á|â€Óz¼—UPpŒ]g—?D±dhöåtàŒ¬Wúi‹¯óú©\<Äמ»YôKÜz¡‡÷­@.qõc°”s¥û^f˜·8ÂCœNîD2° ú„ƒ=^”\r¨ Hc»‰)êtM ‰[ŒqÉÑG ‡mMC}3Jt3¹‚Ó³z¥0ô¤¥qy<¢õŠ [äÐö6ám×Á¬¤ÉèY•"QžÏQÎ*cqà ¨ë°¿ÉÆz½u¯`ePe¢“úÞ‘ Ô!IÍC‘+(¨:ž:Á·qn˜fäÌÄ#ó=ÄÅÃX+Ñ-ƒµ® JàUWvifä…œY ‹¨”1D!¥mœ , Ë|F$‡Ô^’2 ¬•:$ñ@ˆ%¿4õ@9tOË%®ó™ÕbŸíGΨ Âïï_6‚s¹˜î²ôÎÑñœ¨û lÒNjŽQY «óÊbÕU²: j¢bœŠYåëXX.7Šª\mòÅ ÁÚ)ûAsºdÂöØè]ìŒÏMôÖuJæ„j…œ¢.|QGM¥+ÅŽ×ÂÓ%0,të¬L¢([Hj¢ŠçEClÊxÞ¸  Ñ2j#ÁìõB¾Ç¬2¨¦¤Hó먷±¸¿Ÿ rS&è&§Bþ‰ÔŠ~’œÐZèús䄚©žÜ'×ÑÞ5Xí”P•K+èÐjmR4ˆFtHÀ>Úžq" Ôòpú~ì²t¡±ÖßqqR8È ,$ Š./2Ðl…sF‡€xmvº™±¸´±Ë´„­ÜŒYÌp7?’÷Ä8ò–óV°€ào¿)BOkqðtÜñ±ž¥8ð<ÚƒaÊS£=,øÌÑ+ì©Ñýic=u ´—*—T•eGZ%è8&ú'åéïZ>//þ ”% ü¿ e™2Ç+ËÔåxe)ÄéÊR ×U–Ö˜¿3eÙOÁ/‡+õ!"(4§ÏÝñÚ´ô–P™N7•@€Ž^Ó]GŸº(ã*J­ÕAp«¢su(Ï®ƒøcízÕ¬~×Ȩ^-ã±úœ, èï:ÎXÀ?„p‡ÃI'†UW :J:¥Ä=&ŽX…ðq¾¼XºfJ—çʸ/¡ÖÔ`@9e¨÷F"|eX{"o×ys"%ʘZ¢ØÇ8‚u¾}ù ­ÓÖèÊí¯oÛŒEÆi»Ž³Àè ׈ g–Æêhíc§Bq:pofáþ)¶1ÆIñ†CŒ¨qpLõB¹ùi·[8Þ²¹˜V¹^H´ÔB —Q¥x1S×Q#éii$q ÉY™ºÏìïBòì´ôî/PÕÐÚôŠšz<ñOýO°@S—©vÂ7Æò¿Ob_tV#ºáö9o Î×xË2iL+GÚç­#³£E?£7Ãð§rArü¦.ëÓ(k׎~–EOÎÞÔåËpŸA§Å…­Òì8q™¢ÙS,´x÷Ã;’\#™f;K£†Û¥EÖfªwíUôíªó˜Õª‚nÓnr†]¯_Ìnc¸ÏüøW ï)Alá².üŽ—)$¯íai›!ׇé=u¸· ¬Ÿ?%°®;CyVÏ^ÑXJÅ|Ã-î«N1P¥p‚îì»ê= šz¬ÏÜî¢Ïºêœ\¤ÔeUu‚ë}¼ê|nùäéd:hð5’u5gQ°‰bóTEšD†m¸òâ8 ~-Gah‹Yšþ€'Q¸Š[‚Æa¦Üouí¡¹”(J æW‘Ç,=¹ FSä OI Ù”Fž¼‡Š"ÎÜöY®Ž†HÅõfÑXZæ]ÖQÀ`……øwµÓ¢ø¦Úe~}ª×.ÞXö†Ÿ©¯ìv®w¿Œ#äÖãÝTYr§'&:i–r)­Ú»ö“Ÿã•g®o|Á§´ ~`¹L±Ö‰•²äxIùÖaþ¡¤àÆ®^L9Ö•Ïýì€4Àþÿ\yJÄþãrïÞý+oŸ õÀ¯AÝt+nÂïò8ÔC¿U°€*° Dè§#žz¨Ãx,êYt»¢×} ¶°þ•Âñ.,ꯣãøÂí£qÓ/”bSÚ²Of.ô/Šªž•bx.tjdò…¬ý~È]uB¥¢–øa.å3å³Zjm? !ºÐ¡; ÷ƒ˜O!ºSðÔº[ÍÌñŽb(NJõ=á%@ïòK;¹HÈWóÈ7D« 0ƒÂsË(©Ü+è3 j/+ŒuŠò>IûóĤˆ®ÑÏÇUòZ£@§^MNÔ‹ŸBže1¤IEˆ+G}¿8c;¿Ü8à–²84~½ou6 F‡nøP¦õ’·»7ˆ±Æ÷_1ÔÑÄPÏ#ÆŠˆËÌzá±AŸëOdOžr Y´Lë¤åç¨4|÷«|›‰UÅøccò5±ø['ÊRîé_.Îþþþ30dwendstream endobj 358 0 obj << /Filter /FlateDecode /Length 3095 >> stream xœÝZ]o·}ò#}š¼~äÍI“®+±š"‹b-Ë+ÕÒ®¬¶ô¯÷\rfx9¢l'°Ú¢Ðƒ.‡¼¼¼<÷œ;ój!z¹ô7ü?<Û‹ÍÞ«=™zÿóŗ{Ÿÿ`Â"öÑ)·8x±—Ÿ‹(Þú>j»88Ûë”Zü“Æ>Vª^ªˆñÏ÷~î^.•;ZŠ^Kí¼î.–+ÑtjßmÑm¤³Ñw§hÊ }ÔÝšFH! F?£¶BÇ¿<Är^°å´íƒµr\îÝr¥µêc Ýýíü9ÅžS¦w=ÃsOȸàeì̲±ŠÔ}PÞ£÷iSF(Øy¸T®·AìÔÚ^Fç»õæˆÌнŽÝ‹4±ñ¦Û]äN‡þ9í4¸h’[ð¼Ò.»ÅÅ`à­ídÐõêÑðPÚ^¾û–ˆà½ªF\ÑÌÂjo»ãiŠÝrƒ…ª{>9¥»Ì“é »“âùä5œFlºMj¿™Üö ö1`+yp6+ /k/+múà…Îý¸ ÆhÙ­Ó.£QfÃlŒ…Á'Q;•}â….Br¶·yËFçŒ_iû@Öh‰tO»ä#MìdŒñé’†G!l°]Yæ ¦{úôÑc£Wªû¾ÊãÒü"OJÃ)¢¨í¤êÐŒ"¡T„Ïà^¬ܯ¢2Õ©\ [£œã®NAóáJHíNÊâ[Š.°ÄÕ°Ilà+šX OV¨²ˆ¡iv¿é¸Ój˜ÂE‡­NçÖ]§30Ö¨ný¼œÆuÞ–ÊrûË:ûÓâü¹Ý&o‡Š[q–"hXw¶œN5O(5Ög±×mút2RÛýa¸“Öò›ï z ÅÇÅÅç<Ò)\1g:c n®åÛ²Íõ‡çر!Û2„L]i ã\\¬¤ì£5>Û˜ï¬ÓJâŒ$î@P2Ï>„à1ƒ½ÝÒ"f¥pùjˆˆu(tkE0›lãØ¿eÓ<§1®AØJ÷ŽOÀ$MRr ^/Çx©f¿XZ Ó-Mã¦GÛk妳¸—ñ]  ƒy†€ñA…²;9&‘j>´´ l>$eŒNYù΃KlÐãc÷—˜Ï%ã¾>Øû~‚0*S꺸=ÑI%™N¢´P ó“’²ÝÏ8™ˆŽâfë|j}1µ>o´^¿·ÕO­Ë©µ›Zqç_O¦ÖvjN­£†õ»Æ}ã‰çS뺱·_¶üÒxöYãÙóÆÎ˯§ K_5ú³>]DI©Ç‹¢ê¼aÔ ;¿•Ii{§Ì!×Kks¼ôõ-X)£LÁÊÞH†ç|·,f|D;‚bLéÿÇ‘è¸y¶³Èv¸¸ˆ¼êçW±ÂPt€œ0Þ³ƒÄÅh2#Ñxû)¡ÐòÞwÄ…Œ¶F’× ‹‡ 8Tô!O’îþˆœéʾÈsƒd¬¦0)l“AÞŸ±Ù˜bfBî#2à’!üXMwšWwÒ;,¥yMךRYtVFŽöÄ>*Ò¢€˜ë6e·%sÖ—‰RWþ`˼™Ø:ô!LK€hT‚È'e½ó’Ûs&F¢uÙä!ùež‰ø—Ýf U63˜熄¾5áw? á¤}Èy ß[©‡˜F¸£ ¿?¨yä™n qÆÖúª{8O䟢 ‡¬S<Ž+V#rTaåOUSàòîÍp…î*þ|ņ  þ÷•ôð‰oÕ_ ãá“% Os!óf‰àT²Žô{ÈwI IP®Ÿ¦PÙ]ÔxÉ–è‹Ẍ=Bn£•yA=-q2Xê‹|ÄHJžµòP,°sì$Ö¢ÆeÖ?ø”¶æ‰”ÄŽSõ%ãýgÄßqµë‰ óß)ÀœÄâªR4Ûlˆ†Pø8§u÷õØÇÛ‘¨'ƒ|­e4üÔž>ýS¹Çëërk#J;qa‰h]O6Ôä~¦)ÙC.KD\VChÓ7¹x\ÞP*èöµ$ܲT†OG·ì%;W9£+ÑÅEÆ5ä.v2Þ‡ût™]P>Sòá~!!þt%¶ 0‚Ã}?œ€>³þ8ì<Ï;(—mîÅ äD›e 9,B½â’\—±Cfµ¦Zö´–ïàÍT¯‘3þó E0LWºáEAJ+pNÊ !d•¹=ÿ³Ä\ÍCÛ³ÛK!5LxANÅhK~ˆúÁc‚ë¤t´H)šhN ·"6£ßʦ%SpE)">N¦f½‹v)¡¡R Ž[h å‰£»tT*Ûü„ªäYƒK—ÖË7/ê (†“÷²ùòÄa£ÕRëF_Ñ"5>gtþ¦U§YZÏ~ìŽÄÔ YʯvjÉ©eÞ«å>,11Ð¥¯fNáŒÈƒfù4P~v#ä sÞ²éæ8Õ«¤È•)bº ;OrÓV%¸Š¿éà@ù ¾-uˆªL9¦¾êg ¶på\•.ŸËViû¸pßÝ)¯·Î3DÊDœq¼Éóy@÷7ÙŠ ½”̂Ԟùv¼ÜÅ woKê[_åâ£ê¬)½îXåó2“:*£ÕuØÑ—ŸªÖ8ºJè PµT+Ã|NÀ¸Yáq(GNÇëºû§%]nv<)œ”®R¼jML‰³”¢ypÜ8è~.“+†ì©Nùè¤ðKž·Êa¬ÙðýÌ!°{Ù±ÊãnU’"¬³jVŠ•ÖÐqs^8´¥QùÁ¯ÐçRev\“FXð(=9ã!CX–SjF¸XåÛÌ#¨pPçÛÓœKm´³˜3llDß' Ï ÉeÓ½-9—äI7ifSSweȬz#[øÓTy¸z‹0ÏÃÞë!Ç[ò°T–W ³æÒÆwyCCXêfªµ`Þ5.pcÖî |“\d èïÑïø¥)B >ms¿«pè!=&•W.?çzºe1¦›¸BÞä¹@,Á¡"¢À)}•'µÀBz®€Y°Š^qìb}QBl÷¬Ðº¼anrŠö:ŽÉ!I»7ãf LZÚæjY@‹=VxíúŠñ8æ†Ë<3TÄ\GH|Ë¢îzÙzÈÀ‘-¾*‹2¡rš`YxäªõLÑŠ$O¾Ê1Ï“@ Ã3Ü)—c^²¹3ä[œèg“›øÚùÝJŽö*d·bçÿlãâsö ’a$Ž×I9"[>/7iµ¡ÚAj­P½nÊ29çœëi)çT)Œ!d¹%·A¤)/X¡…S¯Þ}d¥åmàÅÎú¡°5ØZ7˜H 8Ì€’zš«´|˜yhÕ 95xk.§¦bLÉÄg,ò> Ò|–qPKû%?å÷4ðF~}„©Xå3;vòz)kεÔTujax¤wâ‰áPÑ Ã¥ûð+Lð!ãïHL¡Ó»iëtïœþSùBc ÷·¾üø_×WE-¹Æ,ßM­Skj=™Zª¡ÈJKO­¼W¹•qEëÝrS¸]H² 1u¦ÜÊ[)‘UÊíoì²§Ô¨4T{e鉨ý±Üø{Ôk'|ÅHжšgôQ÷lóƒTK'rr//B·6ƒÓ«’|föMÁ‰³÷ë2È—ãv9ãGajó^•,Wª­õ·1i:ª¹±<˜Íå+ ôY#Év\ÕË•¢ê˜#,€ué“–î_„gôiUéû¹å+¨Ýya4/FjÚõñAf&f—j£RÒók®Ž^Ö\¾õqEZ®:E\±®³aBZþÅÿpç¤Ô¿Ê*±%«Gü‰Ù1ì“(^ ÏßžXÁÆOµO2HÁ[J´½TRÞs’èœ9 t§zš©dˆû8ê€)¨¶~Ôû“RêX½šœS,OòÿfÖ»OK°Õtdõv÷M©ÍÜòæëqMZŠ¿ *…Ñû“7Oþ6"â @8„»""ôq¡T Êÿ7¾5)”äMƒÄ”qg 6l\&´3ÿ⢤ţ†¡eºë†…VºP¸Îºá 2®õaK‹’´øï¢Ýhý'Š¿Êѽs +ˆ}«&…H3¿÷o)ƒ:„endstream endobj 359 0 obj << /Filter /FlateDecode /Length 4225 >> stream xœå\Ks·Ι—sݲ/³‰w‚÷C§ÊrÅq%vbK,å@ë°|Ó"¹IÉRÉ_Oã1@c¶gw)*Ž«R>ž~÷×̾ž±žÏXø/ÿ{tµÇfg{¯÷xœåŽ®fO÷÷~ÿ̲™ï½f¶º—vð÷¬w0eµí½Ô³ý«½ƒî‹Ë9Ðôœ+Ý̬—Þ+%»waÚ9ãU·¼ž‡iÁ<ïŽÃØôÝ-Ì ç¹±Ýên¾RöŠÉî¯hcZb,·Ýò>Ì3ë„îþ‰ž³:K 8åM·zSŸôY"éF²^;)¤ãß­·Ýó°ËY@Yx“«°Mpmt·Œ´Dï¸Æì½!e¹Ok}÷õ*ÎzÅŒï®ëÚ‹ÊΫ0k½z/÷ÿzWé] Ñs«@÷ûÇ{Ÿïÿ¸·PBÎRõÎ2¦%iåEjŒè.iÝ î‚Ö…S 9ȆºgÌtw°ÀpͼL–S0i\ ,ö`‹Õé°QAIÙž5Ôn`h`¥Õ]T%]ÏaV:á»´KqåáÉ‘¬“Î……' Ù½O“Ö™–Éjë“ȰÕJU&]Ã$ Yç&jBi%"—¦ïVÈ»¹·!Á¸ÄÎr„¸HrhiuCâ.ñ¬¸AªÂqW¹Çä %ŒIžá¤îI¹.„Ϫ†? ÕãGn@ ¤“›yp3É\ÏœL~u6%íœÐ|!E)]`±ÅÝurAÂ'E¤ipÓàëŒ9Lí$ÈqÑÏÁ¥m4ú~X“L™ÌSܘ]“;¯RŒŽbÄ‹ÞxkY~3_p"’ «nª+/¢Ä,ü_Ø·PÌC̉قóÞkeÓ†¤ ¦aÃUÚÀ¸×£¨ V`Ê6~sÜM‹Ì‹lä"jÑÿÏI$Gì]ܾÇÒÇi5Ä1´ ³Á\r§¼xu Â/®@Z‹Ô,ä‚û°ÏƒjyT¸âÜx]'2BÁ€CrŸ’aI{D¶Ì›Ô4€30ä´oÚàC­k‡ßÅ!—(±f=]`«”÷r*›¼ŽÒ¡š´íYιµ,%º¬áú~PjH%)ÆxoPˆ!¢‰u0f%ŠU s–q…à …@ñ(ç³¢ƒc´ˆÌkH \N.\Z-ôŠV³Wªî4± Ó| µWÓÏ1 ò>9¶áceÑSiž4ðÅÙò"ò¼²CŽˆNµUøP§‘»ž£¿ŸäZ {;×:d@?’‹W§užH°ŠÁwHÂÛaU #Ÿ©¼@b‹ñ^’\JJ>åv¤W¡ÚØDOãAQF>ª"•Žâ“ÁÖ“¸¨`#ÄD$Œ4õs££ר‰àD þ6øQZ+ƒúùU*baÑI…bº0-¹4V6ðÙ„5~YÀn)¿ŽÎb0O¸‹ê™\0,FÅ%bÌœ`_|›•ªø¡+Zôþ´¿÷ý^ÂéP*¬ã³ÛiÞê§ûwh3uî )Ò¯ Š]Ý §œF­j Q”{ë6®`å5"…’0J Äl÷ HQ6¸Äúñ¢–Ö%"Ž3âªæƒÒ´zÆhØö4Žƒ›.Q8Ó© á0”2îÓ# »@1bðniMƒ |;5)ñÍg±>hVK{³4ÖÇ’ ÁóuÒ¾žy(ˆQÏw(¹­ÒH¡ÐTAµñ.¶8¥Ž"ó\Ö°~‘ô\™Ï äVN¸BVý2:Tê– lGÉ:D!¸‚œ†Ž$U¶µSä y-°@§Er‡º±&‹“'9?"„‰qPízùŠF±˜ŸT µ6¶©Û˰ØA¾ö¡<‰G•'èÀ·ª„U<:³k¡½ý ix§î;ðhv:@@à⚨HFV†W$/ßòn½Ù«íy¦×ÐÈ’H¥ «{ºQ¨çIP·’žZüyžTŸ÷)Ùzå}‰wé¦B#Ë›Š,ñ!íÃT(©,5ü*”ǧRÀ&¾L[rOA‘Œtm&9GI°`ì׉^¨hHµ«üÖKÜm–çµMèuš6"žuäSÙ´Ádȟ⡈m N+5À¯1ÃÄtϽFÇpéxš|2Ù,/Á髤¯Ô3‘X¸$8`=þAðô¢È Êp’CùG6–Óçsí¨ö¸…ä:¿4xú„Qä¢aK/ô.’óPc,Abª´³¿jzÆ}XH3šO’åRÎÀ§¬t©!…r5“¡ü:•ö°l]FJ½ !TK´g°É€ ?P‡t†ÅŒéÔ0NªE¯3ZœÖœø§Õ¢E,$šÿ2 ;Oàw"­¦˜Ïªøu›q€Ÿ‹ï拯Á‰µìò˜ÄÇp7$^Û Œö¦¼$Ø·A§×Qiž©ö &Ê9£3¦x²Ä?|³·ÿÛƒî‹áPŒµ ê»™ Ö¾©Ïk<¡ÕšÌ\nßJí0UÁ¡ý0ÐünûŠ(ÒoËèŠ*×B…C31Q®S£ªµ[µqé»KÓÒ™·ƒêiðŽÏ¨2 ‚.4ÛÆH9¶mˆƒ8n q¶Жˌÿ8_hÀƒè>-üt˜ä$Zº*£›2ºDªl®`ê]-Næ aîÊÍ5BqeË}™<§(ÖÞ–ÑŠ`l…0à:B¼Ýˆ¯©z¸Gs„ÞOгCrJ1‹ ¸ø0ú¡#4ÿó(¢°0²Œ =ðˆxÌÉFIO(? }Y¸$Ø=¦èܦœq‹ƒn³¯'ì{¿‘ø1Añ]}àÊä‚âb÷°$¼ˆ—Ñ‚*qü2\p’¼,£ÏÊHýw:wåbwÇÚlÉÿ•3ë”w÷Óªì"ÒK*E<\¯[ÿªÍÇé°:ñ®Yöü¬VµVߣñd‹·„XG„J0™²ùŠà»n>ߨϓqµÍ.´ £™ðK"ß. 2uô¯2ê ëïŠ>°Ÿ>¼V®^ôn7>í’àôbïa: bõÄ_j"×Ûi•ÕBÌÛlÕuoˆSÉ„2RUåÉôºëT^L1b«*W”³UÊ’ ·9eVÓô„äu•Ø(óï—ѳù‚k uxèá“Fºœm.NµŸCÔªü󴚉Á²L|H²{ñ˜d÷í£’]!CèÇ-|›O)´¾-‡•-ÕŸwÍ™Ÿ¤`•}¼ƒ¾f–d~OX¨ ø¤²SCi±‘#p<¦ÂQÎá»Ë¿n¢SbB¥‡jŒjè RÃu·pôÑ^Џ)ê¢2ÕlWÈ9ŸPî@å–{¬õ2ü¼q ‹á]d‰Æçr¸à1’¥ÆæV¥ãƯ<÷ñgª¡©½¬›Ÿn4üš#ÀÂî5±l®nó†ØB¥”ÍnÓo𚯋~7ÇÓ-ñ×ÿ¦Ÿm~©‡k,v³!~’½:Ü0ª=¡!FªŒ,1’ÄH”}9b¤" X¾ø·±ó3D?ܬL2›?'\ëg*uD” {W}‰Ýÿ^„>,£mÅlÝkpR÷ÕG 1B#8…¬?ä„”¾&ÊûªW¿©þZ5'©ã¦ý¢º[¢@|ÀA•(„HÇ5£ŽY¾$x¤¡+ÑYRú®"ln™¶õ±¢®°&ÊÐÕ”ºÁwusHˆwHˆG©C)¢éÞ†áËÊ_(Ôôf#Ác¤-êmG Ùþ§úŠ,Áª âÛµgF'[ë«p3'Û– ùAñ^F/+'¿Û6\à!‘/¾)TOʈ:ƹ$x2ñÄÿ{{"Ò»60ÔÄ®-ùcrÁÅF‘*/.jlïŒrí;Éè Ò÷àJÉeYófžòm|ö¹1µÖäYÿJ˜Ôt«Ðh#iFpáºa/|ýn½‚šF7É£–òáZ¬3˜UüCH‡è[‚µß€‰w›o,6\WÇwЇìkáÕ7õª×²ý9t‘;KÐ~3F^ü[ûf,Þh¼Ìšaº¹Ú<|ŸÉÜú÷™ã}è^%þõ t[ü9´ ³¹(îvâ+ÌÑ3¾ßûàãÇ endstream endobj 360 0 obj << /Filter /FlateDecode /Length 6454 >> stream xœÅ\KsGr–¯°Nöž!èËÀ«™­÷c×t„ak×ëux¹°l‡èCAŠ ©_ï̬ꪬêê! •CÁš=]¯|~™•U:;y,ð_þ{v}$Ž/þt$éíqþsv}üåéÑ/_xqwÑ)w|úú(µÇ2Š]€WÞú]Ôöøôúè›Í?]@ŸQJc7'[±Ó1£7ðu.šÍts‚¯•ˆrsŽŸ8!tÜÜÁ[¢t~³¿?Ùj­wFèÍïXÃô‰óÒo¦÷ø^ø ìæ6Îþ2M ˜è6û‡:Ò©ËÐÍPìlÐJ;˜ ýî£ßü[/ãæ{V1ÀË=6SÒ:»™¨/µ Òòé= ×ò>}7¿ÙÓÛh„‹››úíÛ:wøÖÇýýïé¿ÝM`t×Jí¤7@ûÓó£”'§ßmÒÇ[mvÁ ï¿Ù¼¦Ù©<ŸQ¢¹Ö@º3|k„p.liAûDdš®fÂØ¹ko6i-Áš¨Hø–.wg¤‰›}^w üíÛF  ‰~ïâõõ׿Oï;úOïˆþA ɧzSxuY¼r‚‹ã95‹Î4±/Ò¤­ö«”)B6¯Ê,¦{ÖÇE¢´ çizNJì¤öÁ«Æ/%Wã<^¾9QšÂÌ|s÷ò ûËþ`Œ©FR¥ÌÎÔT¦{ MµqÍBÿÅAÉFÍsÑ®êiÓo§¦‹ž›w£üj fShîÄï3‡³û…G!€/`¬-rT˜äÞ:îˆ/ª#¾J¡¢£î2֨ǡ‹Òpw²5$$n~‹/ÁG¥"ÁI¸°™Î+Ï÷·õù}aÓ¼R@:|¥_2T…2&`"àªá ´¦E-÷Э“Þ“ç)5¬dºIYúé–Š4 PX(/A_öÓÒÈå9ü “"#ð, Ï‘;´£‰qcyU)ÅÕjb(冩išpDKËP"Ãî¯pV:U?Î/}‹-çßÙ|Ò`ÂÒÌx ˜Lߨ¯ j{­_ÌÚÁÛÒÛ]uØœÜÍâ’2:B¿k9˜%í®¢éΜA Þd›¤ô¶I#“}:å;“>cà;Lø¯âÎj—] É”Âx=t¬`Ü¥üÈó-°¬Žtn‡„û¼3˜­›tÈ“ƒ&Ai½ MÎþ•ü…áìM‚ *П„&}ˆ©Õÿ ÍC`¦4ÁIÑdj÷ö œDyÒ<P"\€¿VGŽW‘x«"ò#øÔs¤]¯EH+]çö‹mÜ€© M“o: ‰Ã¤x[`¸½pd¨xŽ92°A @aÖ¤Öld'a¶¡ SëÏú@÷PG)¯ ³¼ê$bþ| J—¨dA¢VÒQ/a}`,<ºç"ð³7ù* opz3±ÐœG釙lDXæçærÀ0.?fNUµsú<`l8€¦x§iÂDåAp%É?5d'¹ƒ!‘“Qa ,¶0_ö0_‹¸3Œh"²®_fOc}ïi– ¿ú[þ¦@œD€®ýˆÒäzá%º5t&[˜Ë»xŸ> ˜Ü{=w‡èœuG“R¡!Ø¡ÜÚ<·UrÑé±mž‘ 7FÉý¡ƒjábõׇ½¢ô€wj sÐ,…ˆ¥¨ "€3m:ê"‘àW›„JÎ Ä ±p0Ê3×ù–©JÖè&#~Å~ÿˆÃIøŸl:N²À]ð¡Ù„¸kÀ/PÁwøò<õí o–EŒK¯ãàŠ¾‰–'­ô0 †P+ÁH¦súä¶FdS«¯¸r}ÛR#¡‡‰à>˜.”,Ü9ÃOЊOò-ŽBÚïF±æyê˯²å htP’TÔ;i ˆN‘…uðûĂʹ0 ~)¸ÝÔ!xQ"µé¬7qRyåxü‹2ˆnYÄŒÿ<‰Ûº¢NïJ;n^™Ä²±÷w~Ídi£Ç9 6ƒË8‹e›x®‚š4”…)°Œ>×ËD¡¼ëÃv­åóù€åµnòD=ƒ €æ¸SPÜ™Òõh»¼#&þú3â´ÔCè³!ù-sL„'¶oU¥æª¢ªº ËùÌç6½##¥\¸¢iu1oÜQ"¡b‰ó™B~óCcÝY°[Í5j¿Ñä#þå$€œ€Õ|FÖïÓÞjuÛ ¡ûH,O‚ïµÔηٓ/*{˜Ã`Nûª w–ít‰ZÂ)Îõ5›¯Û–¸…ÖèI^[R“9Èž÷…uyL ‚‡bï;ñÊûoŸ]UMàÂúeu0‡ê)‡ZÌáÌ–iÌÌåàxNôž\u÷Ôßœ¨:9kFt½fÀlL·µH/í!”Öà6/N$Òtˆ¶/á­±’ùcÜÒfY¾=åóZÙ£¶ú<Ã6+³áïAÒ!ÔF+pDmy¹Í¶xņ×YO"…YQò^+hE#ͬ‹B\Ý8«Úsã¡Øìw¢×ƒªU…a™~§ŽÏªwd>…D·ø÷7]žr^u‹äf?ÉdUéNr@]8ɯ³%•ŽðD¶I_`¢°,€ò5§³5°°Ó0†ã¦*c>j*)Ú¢V·$K @²ÁæåëžèF ºá‰p.aÉÐ>úÊm$W, Ï­&h QáË—ýÎ}ò:{&ß{Bdæ{ a‘ ÖŽI7rÔŽsÖÏÒ1´Så5* ¸Rd(£0Ë ô`àv\n©Í)E&ÎiݤDÀqS¿Nþ ɱl¤÷C]ÖÄ}ÍNy ­úmY¿t8µqÈ\ p ÂÚ`6›×k\‹°Þ51Î,ܳ–üò…µÍÞx^e ÏîN,Õà,%˜5ŸC™>ø|€d|%JÙ4a~A’naÖ-`Aôc J4U1ðfåç³òî}ÎáfåžžSŸ<¸A%»Ï‰8=J z¨Á—=±]u-ùc¾Å¼¿Þâ&T´Ùñ¾cé "»àF8«BLà&6ÛWÕ<Œ …(M£j°P­BÂU@=Çl gtâ¿1NŽ·?Ø„>"À²^qÏŽn Ú“kÇ"|ÆH¿‰ q<`‡åŠ»R9FK˜}«¬7ÉÏ¥—;¾.ç:׍9õîùõ´C~]mB¿N]Pöª‚ã6æâ®w¤ ¬ð}yä‘%7c =cì~›ü?Ø.|Ä·@»€Nhø'+¹o»&4$¤iàçu^È_?Ëã»d¢¬ÙÙº—ÊÕž¹nÖ¬I¿¤)ãÆb³MÖ9U ßc¾ñX‰Ðˆ„\­"›G2´ñmœ¦h‚ÃT“@?G‚9\§yg(ðbðŠD_ƒyÕ²Þ"ʨÁÄ"[É„€&äú̾F„¸â4/PhM aW y·…´#dq:¦[ÍO»{}Ÿ–ˆ˜GZÌ»0ÕNÞpòâ, Øyþ6;Ï †9Ebk_fáYéðš)†ž ¸)5Ü KjÁ"žþÛÑéß³ùG°hXj05X@œ–à1=]³§ü¡ÜüCy¹­/·~¹)ÊÓµõGör~zWž.ÊSåfðëUyz^žž Ú¾*O¯ËÓyyÚ—§÷ƒ^Fó»-OÓ`´çƒù½-O÷ƒÑµîM.žÉò´+O²r’v "ÌœÓÄ—xb`~Ùú‹'ÈJ–<ÅÏ„ ¸½Hß5sM(F•‹A¯ua•ãg¬E¡¯Ýœ ˆ»g,*?¯6~3`ÂÍhµï*bß~b^ƒÆ¯ëËúåÕ`Š÷ì©4IüˆKψK…ÃÉüGaË]y­jÄ4f"*¹«fßšüjd@êÏ—ƒ¥\ W¿dPõMt¬ˆŒa^èWeySyzxd×\&Ëü_ ~>?Øøª6þò s¹µ(M~Œy,«Ý{4©Í˜°ÕÀ ‰-Æ—Zvfã7…¾‡eánðëÈžÖw׃^Þ{. g÷xÓZšèÏhú²0EëK5<Ûèá[Ç_$ù¬µ$ØïÓ$PÅÛúŠÄyõT]ÊÄ£"°,ÝÑ÷š»œª&å¤xZGÖŒ¸8Vö¦æô¨<8Òv¯ñ]+AO;ÅÁz³RžŠ“p¼ÒwYgH'Ó&6âÊù¶ûÔîMíVmz/BSQÄì—çz}rÏûËô0ÁÆu SÍu8TEóñM’š×òw{W¯s€ÞpOàÕ0[öOÇÕéK*‘½9¼²v³™F83W⥡”¦:F˜wSÎ{ŸÀ#ü@²èÀÔÍ’¸Q™os  Úe§ÅAOéal+Àe;ˆÃš,B l¾iÊY’Φø5ÙUç£ç%¼Ç©%¼§Åá-V1¥×‘Ÿæ\3ÞW¹‹¹þjQÄ•›ûi¶ˤ’A“‡º¹ÔlV wA©|+,ê?Pû¤Ë¼¾CÅÔ™WïVNz²ó7#Or•zðQò ^3Ñž7MÛA1¿%!ñ&Ýt’&tè~‘$íÞ)?HÔ‰W®ø®[?Y¼Èzô¥ç¹ë¦¢23ðÞeYƒÙŒëóVPZx«F(„ÞÒ‘ûì;ƒ‰kx„íRvØ¿dT‹E#:i3¶©òý˜©bŸ0MWªwi΃ùã[¯ì ùºXz›™¥ŒOh$‹}ÀÇާ)Ëæn¥Õ êÜÅ¢àdé,f¶pªðõá^{ƒÏîRC/úêÕ¡¥xÄ!°Néì†Ù Ïký”OåQm©å¼‚q=ô“àõµdí¨k™ð“áZÀ´¾ŠÓ&–…ùžB²oo±—دî³Î$Y=õßú¨QôÑ8$L¦ŒIhNà™“FÕÞ§·±T²¢Óá=ßV °¬c¡_)’»ÏshOdµEûyŽkHé¦@#¦Úw ó™£b ŠÕ(KNníÙÁRvé1"Œ,óÓãcºJƒ„¶p{å  †,:;véÞ>•Çu…øÁ¿âª–F¿Œ¯n®S¼4œ1‰õš²SÂ3š’ºêlØ·?0å[«êV0‡uï>‹Ä²“pÉkøzn=g™(Ÿ«aêKðfúµnŽ3bƒ %%ˆÇEÞÕqº’*t5qÎ f³ ÓÙ-íñ" ¹¢a—Í]mC¼Ì#êœPñ’@âšÆŽ±Ú*0í~¬ãXUb4—{àKêµÃ¶,!êD¬uMñ¬Rœ…‡ƒESR?=&OèkW8¶ƒq.ƒ“%λ_†w9áÕ.¡ªfMXŒŠ–ÑÔc%Ÿ¾,¢¿lý¶5¼,pw;ÖáËè>ð¼» )ˆ*Õ3Gdeú—³Ñ\¹XqŸý “Ì@|,Öj´XЯ@.x?*j‡•h¼l¤iò¼‚…ñu UÃÜ$r?oÙýpà-^qð”å"oA#³™°‹SÿâðÒ!ÅÑóÿæðÍNã^ÉSæ·7ŒGq ôìæMæŸøž5NäǬ!öwPV ¬}ÇO»&ÅzP^0R~`íÄá2/¸¨?¥¼ îF-îU±ÍÑwú©E£sA¾kmcòëÑ™æ[fŒ›ãM82šó¯j²bzࡃKÛdv *~á屋£Í‡ WwÊlÎY‹ùds{$™dÛSš©/ª¤íœGâ¨{d/-É7×÷2¤’ƒ§@^ª´g×ÒÄ)[D†ö_…ù§Uݲæ”3hÂòqÇÓ‡õ¯‡Ž§ã•^¤â~žãé*Oo0× ^2Ž®%0rª½5­­FmL „Ÿ‚.gÔ0?{±—ôÝ…Qì$ÖòjPÛ8Eöis-P@KVýÏ·ý¼Û~»؉±ñƒ–ÝÒYï £ûÁø]_Òï$ÈÀ1žj-Ÿ=k|;f0ñâ—-nf¨ùFÚOÜf°rYÙ|Ø|³ìÂX7¼%Ðù¶X_Âñÿ¯KØúKàÕc®}BŽá=\M“Ïz2u+Ž ÐÀ©«»Î]ÅKø¢¢ÿÔj× iiÁ¯éJÏLZ¹óàù#^Ååîï:LÙÊ8(0ž”A8¢ð†0¬&¦ëÇ©B˜!Ž/ïa^À¶ïÄñWGh0ÁƒÖì„:¾>Ò«Âüâêè«E–䪼GÄ`  x§Üéõè<5, ~Åsr|¼ôâÐxã"ŒÅ¡Ã@;P*1T¶çš°ŽóaHõR½õߦã:T$´­W9u7oõÒ‘$ |˜÷^¯\ ØÝ¸§ò5€¼MQxL\E¦·´Ý]Ux³`ý'ÝÛ. ¼Ì, ešAa{H‡L³ÔÚóZŠ¡ýt`)ð"_ƒw3gŒüWmV@;w©y°?ΙNÑi¼‹²Jm4 Vî1R;–¢€ÛÊÐ!¬¯D™UI(Zã€hàúóA£í«&)ÙÛIp<`óPÏÔ|Qäi¶yã›èÜNÑ0Û¦MÏÎþªÂÄϦÉg‰©ZÍWÖ-®*d×ûÊd½œ-àân‡·P¶“,Þõ^ >çý_¶áÎìÖxm›FÍMxªW’Š?ý×Ë endstream endobj 361 0 obj << /Filter /FlateDecode /Length 13365 >> stream xœÅ}KeÉqžÖ%Á@›ZVY¬ë““ \X0MX’‘l@†i/Š=Ížátu7§g43òÂÝñÅ#3òÜ{«z(Æ`Ðy#ã‘ÈÈÈÈ<ºÝNávÃúïë§›íöíÍŸn—Þê?¯ŸnÿîÕÍúMn·ýÔK,·¯þp#5Âmßnë^O=í·¯žnîB¼õGàfâ)ÄNø¯¾¸ùÝÝ×÷±žâÒÝ›ûí”B*5Ý}sÿ°¦z÷žŠs({¯wïèÏÐRíéîaÛ2aÿ—mKý¿ú{bW7Ç.í§¶ïÁØýŸû‡”â©÷v÷ŸßëEW/æS T¢õ~ áZ ý.ß_àÒ©ÅZ ûŸ TÞ"Éùú>–ÓÞR#M÷ýz©woß@ŒtJýîL8×|÷á),¤é?°¦­ôÌÍBõc*Ò,¥·L­õ~ô¥©÷œµ«×B½ûï@ØZ­qÁø”·=ÕýîËAâÃý ¼…-Þ}1åî“K-Ü}5[ž[z£_l¶¨ûóh¶ß¹öFªù—¯n~M5vXŸ0²ö=ŸbºÝKN§¾ß¦-\oC¦–Úúí7onÿùöý uWªy¿ýžÆÙßÓÿ¤ù«›PJ8¥[µö|û4 B8Ñè$0*@ê›ú¡ø5ðûiO£†SkŽ€ÁÂ5FÉ~*ùvRØÒ)ÀÌC„*³Õ÷*¼¾ù-UèåDóeïHﬖ•ôxÊ ÑÄ%8³Øu §:abZ7¼ :wR°ðàVRN…1”B£)އÀP̤PŒ)·P8èÁºÕN{•:ôï“/©'š¤Ó@ˆR«o*ø×`Hšú©î#“9Ɏ€™jŒ’p*õvR¨ýDvhò˜y¨ä4 ‹¢u})‚³q¿ÍÜwüÂ-”¨ó+ ¦>að­Ô_Ýci̎€™×Ð’ÚN1”BÝOµ8£†Ia&¥QXõ`ÝzŸ¨¶FÖ @·Y²ó¸n4^B\O §¾>™÷;­©ºêf¨`%Ô9ÜN¤ð™…Š00TÄAaQB£®ïÄ©±VæíDÝÞh Ä0ñ0h4b0xf²èÁcÐ œõ"⌫™¶z;«’…­ÁŒ½a a•‚^´©šïV‰§×(¡3ªÐðÈ`‚a‚~ ¶J϶éxRj¬½; ®a%…çÆ P²4‰²L†`R+U Ö¬SoÓà$CÙ±.<¹²Õ¨tú…L(Á™,‚iüä ßvL*‡QaÒ&ƒ…×°’ÂScP cîY0|“ABkõU Q ½ÎK JÖkÐÌ :™í0­LpE£ ¦û†ž™{DÏ Æ×’"ßÎê›ÌšÁ`³y5D*¡X4¥Ðå(ŠdRD++!ÃÉ4hNVp!C‹¹Ùi•2`p%ËPèlÌGØQ¶³„lm¡*4<2û&Œà„d08’3‚Çh⤠3®a%nÞ¤@“³sð˜k¨†1¤T «¬[¤VX«‰zäîš%N7Ád†1Èû¢–!¸ÁúL|#™ôì1xvO2Æ×‚„ÅØªÌsÌŒ "‚Ã0•ª„(FžÄwÃâ÷äJ²ŠYhc+žmL0>ƒ-ö‚Ùc4u.@3÷ ,¸F^e ¥TPã!0×! aH­V-D3Ò:£ î0+6 2‚wŒ]‚ ([L«?à]W¯YyÓ1(޵ 2 Á~jýUÑ Þ"‡¼ê'_R0 &+¬!Ì 9á㦃-BÉaÿÍ›Q0óàV²!81)”Šulò5LŠaR*…UÑ =ˆc`;ýäJв4 •φZ(£…‡î _„-ŠÃÀYƒƒ­³$óâ=(Pç´Ýñ˜y¨†1äV «¬[Eß•Z2ZêÉ—T´%Áÿ½¢…hÖV’8L˜øVl=ÂÎ^Û 0à¬t–HÈÀì¼L£‚É`CF%°j!ší©º5 <Ø¿!€Fr×–i€wqƒGr©ƒÇ(ûkƒ‚ÁÂ5FIÁl›6œ M 2•Á~ß|ÝÍ‘†2ˆÀQ&Dìà,áM*Ál~wÌA»¼h ØaÝï»øjV}ÀQ¡•Tr‚vt«¨ä#aE7 £†Š00TÄAaU‚ÃøÈ Ìô•ÿÀ0‰Â¢h…ÈA‘­&î¹^é ®8œâÝ0ù&±Ñè)_Ä+Œ]l™QpÑ•a–$ìÜÁƒ0ÊÃ`Ô)&ÆR)¬zˆnƒH…Ü} è'_ÒÑs¤µDì€àvÇ`ðEÔb–Dlðà©… 9Mp%8M¿¸“A’.6þò› gñY¥ŽN§²ŒË‹—•ìÛÆKO'½WÀ‘—¦ŽQ4aˆ-`ð…´AAaåÁ5¬DLŒQÈ]LŒñP5TŠ1äV «¢BDµ4^TŸ\ Q¡qÕi”ÆÁeà‰`…û•|îØge›-â³D¶gV¿ñzªÔ›-àƒùøÙ¤µÚ‹ô¢‚41iYxrYIßÄ%ê48"ÀˆP] "»cðë*ÉXuFaOM XYp +Éh‡AcfÈà¡0T!´ÀI¥!W- Y‚KGâÔ-E´Ê“/ᬉÿ‹€Š¡œ†”âIO ̯hÕÀÔn%±#ð1«“CÛ¢c 03P†Ê7(,ˆV…Ý·ºá$tg­FIC`îÝŠ(89Gi£1”& ¾•³gL f\ÃJ"¶‘“BâmËä‘4ã`JaCJ¥°êÁºêv4 ­á;bó³€Z² ®Èüh Œ‚¯ÞÑQãwò.ÈÍêfú¨0J6¬JF ðhKƒƒÁÌBÖ‘‰¡¯(…˜LjAÞ¢Ì6ì¤hNž-ÃhÂà‰½_ô…MÙ 0à"+‘+á6)SÙ›ã!0j˜†aB*E Ñ ñ*+Ö<½fIÇp%8ñÉT7€´Á x"Fá~g\«+@§Ø@ö™f=Lä Ì”•óÀ0ÉŒÂ";ë#ÀR£øî®d‡ëM0eܰOJQvߨ9 ÞL Ž*é,ÙngõÂgr“À@7 cˆ¸Ý5­% 1rüàÉ—°I°ØÞHF‡Æ ÁFÇ`0El¢9 òµiìO Î⑸’ b…À1‰É#hòДb`˜”FaÑCtC¿st³ar%³Vîz*ÅY.‰ÜKö{ß–Ù ˜†lR0Xxp +)·³:Møn"ƺÜ5`­Âði qw–4ŽˆÌù~Kþ_Â~. ˜˜&ìù‚ÃÀ>¹;  ®a%;¢0“Bå¯ÉC`Ô0)†É­V=D·, sº xò%¼S ùVŒó”°­ ¸bã—<'pÌúŽrä;KhdÈiH#˜6c! *¨ö»IhõWD¯&Ér…ÜŽñŽZ «$8ãüŠà„ÝÁÈ`°E„Âc«S££ °òàV’Ùï2 …ʱN…QC¥˜&·RXõ`Ý2zÑ,ùìpÌNÍ!¸sÚc­œÉ“2µÌ^L|3BÅcT¶IaÀY2@]IÀÎsR(iŸ<F “Â0†”JaÕCtÛ5_nWw~–vhæÓƒ»?xª™FS™0ø"RÑFa/xR0xW§Û•ð‰Ù¤°ó‰Ùä!0j˜ÃäV «¢[“̹ùÄrÂÈð!¨KÊ#àÉÎeŒ¥ ƒ'޽šÇh!3Œ'‘Ñ È‡j·³úVÙù FÁ0T@­ïÅg…vL8ÎlÙ%QÏ•48;­ ¦6o;WX ƒ‰åŽM`täH.à.>®„«&ÄÎÉB`Ô0!†É¨V-D3Ú¦ ”øô³„ÏÓ^Øóa <öÛ6aðE¸b÷ Û$ `T^ ¶ÞÙ·ê[ç´ÞÁ@a® " qPX•ź¤Í!¡…‡®„O{Þå\*²gIpcHaâ[«˜%'ö݌„7¹£$pÓÇ‚Ahf2Øï*¢V?(ÁŠ•X5±+J½+Ù‘í“àzrÚ#-ØÛÁõŒuÀ‹ê1»o“‚ÁQÍè(!'¹ÜN‰Ïö& ¹‚ a&¤XµÍ gΕ@Âný,IѼsòc …­‹4@0­œžo?W~ÍÚÞÔC%ðiÑÁZŸØÀWôF``¨€F`UAÔê–5Ç^É“/ÉÜüXg‹6UHò¸VìýïM\·QÝà ©,³$6ÞRÇðh€Œ/èïC>­~Ѐµª’ÂÂgé‰ Ç,á9ÁûIS™©ä´ ®’X31$ñfp”ÃT+±Ð©Qàã~ZŒ‡Á¨ÁB8–qXµÍJ“‹D<±±Ÿ%ìw&4ˆ‘9õžàÄÞÂ`[5)`t¹®1(Ì<¸†–$>sœŸ$Ls •Â0LJ£°êÁºáŒ‚/BÐÐáL_WR‘’pŽ ù.O8¼h&¾89áX³a`ÿ’…gõ/gIàÈÅ @ã‹|½ÉC`Ô0)†I©V=D7ÒE‘ã´O® ²WÖr‹‘OJN0Aƒ+bÝchBÿ `ð¦Ãk”`/]o'l§›ã!0×P) C…4‹¢zž•$§–®¤`uIˆvx/ÀˆÎlצ9ú†AÞxªŽÂ€“œ(ºNæÄl<F •bb˜”JaÕƒuƒ<|ÒA^fãÁ,©Ü>=Ƀ„Pg¹v!0ñå¦[0:»oƒ€‚YÒ­gA`ã6ªg^˜}=ŒTNü>äÓê«¢" EÊÄ]œ%•ç&ì'Aî¢q¶K»1‘ö³ÃØõ‚—Q°^ir%œ­7)È<F “b`˜”JaÕCt«]R†÷ W‹\ ïnòÔùVÅ^¸Áh å‚kÓ<}û½k¦´U78ȵŸY’œA€Ä§!99Ì5TÃ0ªË›³CΊÍš²$YFêUa¯)aÓ”‘V±Oø5ÕˆQ.gFÌ’-m \ÃJvH6)ÐÂD’O£†I10Ln¥°ê!ºå. V™Íç“/ɘ4-Q9+ˆsä3z£l±3ô¿Kjÿ¬oppõ,ùvHìí2¾J`&¡Õ_u½š¦Ñm»˜WÒ0û fƒÌAµP÷¦ãËyëq\+b rÉ·æ( Xïk¸ÞLO Û&½n<F “b`˜”JaÕƒu AÒèÐÇ;3W’±„ä@c…*#ßÜ$8k ÃÄ7Ä*{FÃH›:vJaÀA6N³„v ‘y(Ú ´êxÌu™%¨:ŸCñí^ÚS¼KR²ÀàÛ5u_1"¹Q,,<¨Æ,É<[…Drã!0óP) cÈ­V=X·ÕêðC d-GisÑr”612JÏ8Œ]œ8£0à]Žw] £kõÈÙ“À@7 CäK·‹à¢ œ…¤ ê<­„sÔÑÆ4:øz…$h¼Ëb)0Ø•&žaTIî7*à[ÁÏaTGR7Œ‚1P˜ˆÄ6 ‹¢X×L:~G% ^7ÁU®Wd>éÎ FiÂÄ7mš®?0vñܔ„w‰n¹’ÈÆ{P@ncq<†n&…bL)…ÂAÖ ¹ør¥c—œ_WÂò3ž?¨ì5ñéÁ»ÜÀØõ|ŽJšÜÑ0Œ¼é-¥0à]¶©®ñÀI 3ëÉB`f¡B R ,Zˆfè{ÞG$‰¸> '¸JF$N©ÄXª‚)BÕý.þ£ú€õº‹+ác×I`ã=ôä°é]”)‚a •ª+–7I¦Ã†{gÏCKxí£­Áj…7Žf¼ë}×,Ñ**Ѭ}EA¯å**Œ~ÝA ðë X‘”ƒÁÌ!Êvs`˜ÔF`ÑB4KœVÇA’Æv~”}£æÈ —ò8JëóÀ’\ÄÖýÏEÝ6©l Pç V²ãÜoˆ ²)}€®Üí×!­Ö^¥*§×³÷Z;k4J8åšà$—<2oþæKƒ-¶…Éa4½,fÌ`XZâ±JDÆWtXV²`åWŽV²`•˜ô½aiÉ‚ÕR>pÔ5Ó¬ Ë·Ägv:뽨W rH+ê¥ÃȺˆH$.õŽæþ!d9Æ;þ%á|…zï”ýÅ~h´ùK8]êΫ}¾6èÇÉeÈ0Ö<èî~À{USIdË!‹[HZñ#0X¦_ÿ[0!~ãl.3tyÇÞÚ©|´ýîf¾PSø*Jì]—’awßÝŒQ9I Ä^¡utK\bÏ}ð# ïnì vÚ…•¾ÿÀKÕ&%öráŒu{r€}©\2^Ø„¹»âž° ¼»™—ÃgÌ dܬÆzŠ»”Üq¯óÝͼ̋tÏÈ%î,Ï–y?5p#½»™·.qŠk±U„û™¡Ò¼èGF;2çqAÙa¬¤]+ƒ";™¾hßÇã&GeQbw6ß.Ðd~”vïÁ ¦1nk^™+[ôKgÆvE f~r %–c/Ù_ïnf~ú 32Ç‘_Àtf:tR%G*qå\u” «9QU\ú*I”XîgVpI™œôîÆe3d¶ Ä²ô _êF‰eº!Ÿ ²Ì±s?P2ò·øè%–”„Õ—Kf6OAnY00àMP,AeÊ|¦uTQ{&*ä(óižòÇ*Ãuž‡¨m1Ž–‰ë0d~xw3N21ú·Â%z ¸uÞ¯½»çg»iw¶µ‰ŸÇ5;_|wã96+‡ƒµAféŒEï›ÌäÃ¥A^YºüÄafÈpg×%kÛÿ¨\`ªgÑ»›×aÊ% Ùv™…3†€®ó¹Ç“!Rb»äÈYºïnæÖ/ê¢:wL‰Jl¯+Ì%ÃG‡›Æj‰K[ ùÝ»…Yý˜_áIŸ0^˱‡‡´$ð¤8ò`°c¹w‡ºœÝX!>Ë6Œ¨ƒÆ<ß%òhœRØ‘©»ÇÛiµæ—‡ ÄDR+U y–‚ ½xìã¥QRøÍòØÞmrMqšëq/2Ðê³5‡Qšð1 žm´ñ:ÑÆm(…°É{ cI°+öC Ã0)ª‡\.GòP\.Ì’ÂWªq%F‡Ix—Dôú{íîçÊs`Vð¼0¯%½Þ˜„¸²-æx}3ØŸM<­½Ê/7^Åtú¯£„¬n•&ÎL…­Åí̱ ,×X9,<0ˆ¼Ü³÷^­$Ê­‹AmüÕ×M^œB†ÉhV-ä,õ¾Üa%óµeM|Œ,ø¸Ä&0_J+Þ1ŒÆyZ]»ÀÆ *¾½&k—ÛTóöšÝ·2æC…VñåjžÙ— £¤žºÜ‡ÈIî s¬ªóŒ«81p<ë4¯lH¦Ú‘3Ü }veJ`& QXUÐxÌs½±¡%»$ÛcMà›#dykô76æûUî’âÅÉQ0xÞØ°“mâäÜ&ÁSý™¶´­ºÊÙ¼V8sz !Œ<·‘Íå•Áß~7ñ¬þª€d­5yÍÍe­¹¨ƒ”°dB"ÿrx0kZŸ%7ÉŽS žYkZø%µI!l|\:½¤Ms@††aR*…ƒ’Wƒ,"ÿ[îÉ,é’_±%ÉRÈðW¦Ç”å†(ç‘ð½Gý5“«>à™x¢%‘_éš"_¥q>YÒ4å!‚b •ÂA =]å¿—ÓU+á´£Œsg¸í<Úæá§€8 D®ûæCl®þ€Ç骕$ÎØÈH”âNW7qj¦†`ZýU9J@JC^ÎH´'´ŽÜ%.*;-wFÒåù¼q‹08 ¤æuGAaJb%œJ7(´†¬ÑyJ ðUû}­ÕW%$ÐYv¹‘;£¤KÀ·ÊBŠvLÕE!Ö°eO£ö±€3Ì)‰Ÿ4™Õ%Ë×ù³I’þ¦†a"…U‰èüIïxÅ%hï¿ò›Ñ5—nÏG‡v÷ö;y>švëûÝ7÷)*ǃØx8ÜÅŸß?`cEë«<­¯|ó#ÖyÛhÿÄ“çG½±ÿ ^ØŽäõÞ=òóØ´7êáîµÃ06[!äùðøDðÈüT6™ -ÈÛÜØnU¤nìîò°öÛU›Qþé¹›uÏjâ¯X…@ü½/‡¿ÝSÜ"ËjÅÓçÀ =ÛÝÇû~]¼Õ»_Í'ÃßãÍñÔb××¾#mŸø5oná*Jª¢ ̓Uek,þò/@ûÀr÷ûñÆùB¹QÛ8~_Néay2œü&÷B;ò3Ãx¡ýÓ|2œ ƒ…x”*ïŽÚŸÒ¸x½>+~4si iÿ×°iÔÂ%îdǹªŠü÷§ñY/P&—ˆçAù^šƒ™<ãcëwßÍÒï·ahÔAÒLµÑø•n )GôýÀþÙýž§ª]}ÇEÐ’|O¼›õÞ8Ê:hl>º‡Ú‰9þ$§¯ylhÈO·šnpËiÏUT;ŸÛ^„ ±UjŒÎ“cˆ8w«è\N–iðål·µÖлï@ó¸gîvÔ+[_ææWÓ¸x." ³à?à'øÜÆ„\îNò•†9Z©))Þ¾úÇ›WÿñwfAhÉì<êÎÌ©U¥£{!KåšÀsøF$Æ»ú^Ó簾¿w6ðKL-½ÐsïíËRöN}þ ¬á5Ü÷Žð[ýNAŒþë ð7h¯l#ø[™T½Æxa¸Óˆ4; ùk{&R@\¦î?Ü?à‰ÏNíñ—ó«—'+í²›Õó†Û5»³+_ãïBÞÏTsóÖ¾ñ° ‚oÅB—=ø’Ízâ1Ø[¯¾yÆD„iñ@N-¼Q$çìÖñËKCñ_ÓÞ}œkÜE‹Cs0¶?Ë–5é7 È#~œ‚ù-w~Ÿæ¶oŽoN Vù&Ô~÷øqjýÑm•é…ÿÀôJÔͪn6tr@Ù$n„L^ía#£³-ÐиDÅoYÞMŒ/Fã|Zw)Çñ€CclYp|yÍ6Å2¼!.Imù—  éÁ-oÚÑ——ÈþgÙ—=6êXë//„Çå>Ó` bÕµ’,kgö¤ÆZ“Ÿ›K[qvrÀ äWQþBç~5Kq=F×îáE¹ \‡…óÎfØûi›[Ö‰|uàS“õª‹œÌ ”ÎÚa~£áÍë–ÿB#Ý9îG«5íïa€nˆíƒÞM¡ü&æâùl®³„ƒIe?•Üîc^ÉLßö~mSì¬Èë3óßóvæ½ð å݇?<·éÁr^Z\Œ´…|yÛÙñµÑß‚ ö†üHm¿áã°H§E†›qkk³ïÛy¥|ë ÕuMðpTžÑ¥çFHõpKðå•Dú„Æ™üï¦~ûåsb ‹j¦gÛ¹(Û\¬Ïö`Ïoã¯<ØÔ´¸h/ìMa;Λíîoïpá½·åêÞâF‰öyyLè+‰` #ø‚ˆX^îXªœY¹3A÷eÎJlé¢g#þ¶"ŸîY„8è/Ž{ú½ _Ì·ª©g¹“¡ñKªó7]„ïñ¥ïEò  ÕšÖßOÓ%ýÒ-ç_ÍõÃás8&$s¹Fû~Å#°v°òúO3Þw%vý8ü¤áQÌÐ~+•º÷8ùF(þ"mÿ÷Ç’5¬˜¨nÓ+ýèßOÇI¿®™³uN–#òn[pü'"xk¨Ü9*«È ·o=y+òWs‚¿žöéÑ9›Ã×ôßÓqv†¸­þHM3Jùö>ÀRÓ>Ž¿¾“¿ ­ç}}si ™a~‰Ógv›I~9C’æ{¿Þ&Q{ç§,>>l°«ËNÞ»•v£3¶ùïðeÙµ©p“(Ä Måö™ýWÏÒ:zž†éÖÉéâÎÒ6²Ý¦óWsÒÉ4¶ï¸bÂ%©Ÿ7‹§¯07Ë$óïÍ•‰ûiNÜŠ ¡ggÛù¶àÑÛMGÛm(þÊ™¿×SßÇetq×§Š­Õ8V(ç!gÙõ±#ze_v+oÝjýæL,6*Óž³É ; WW›»wÎ¥ç*Ò5ÓÜ:³~N£]hp&;gÙÕn¶ @e+•¼¸Œµ©×Ô_¿Q¿3Ó`²Á½•ló‘º\"êÈSÖ&Í-¸ˆz›þí1t¿!2%ì_ÉBŒ£9Õ‘©Nm”‘j´eo'½]>®óo·Î}Rz$6óžãqµ@#&=ØöuÚ¶Ýg_<:&bÂ89àÚ*;6Žyü:¨$kãxèziP±D¼M£üxt-gª„Þ&ãazêm;ÉÆ/×_ B]Ìô^§"MèO?ydûÒ©û±ý…´!&´_ ßÏâÃŽO@øDã­¶pûÍõ›}Ï:p•ïã{m¤¦¤ýO0‘ a·]0ïf>L}†Ka;ï¤þùOÀ÷Ò}Ûzug ²3(LI™ÊE„YWa·nØüŒ{P^œGàNÚ}΃AÜFÌîþÎÍÙrZO]½npþ4{m*ì'¾3ôF8„‘ÎÁ á¦T†î~»¬¤4Ý˶׻5tø²*©Ž”ßJ”ãÿW.»ù²ßÝ}p“cZçüxËý©V÷n¨üÄá÷Æ¥éLÞ_‘áA›ó±Š‘­ÛwÌŠ(Þ…†»¾œö¯ҟD°„ƒÏšâWŽý™ÈüxûÁ±q!taréWÎx¨LùxŠàú]d&9ýdý€CªÒBÜ×IìÆ†[Âü|~­[÷ž²cî—¡9¯d'M„uÅ4¡?èhØûY{ª3~L7¢UGc÷~Š«TöÚÌÑ¡ƒEp­qK‡¥%á}ð¶¦öÑŒDBÕôóÔkû¸äÚ0‘¾¥eVO†®tí® û²}Yžy„éóÈÛîVŸG7:–Ì*C]f.nÀuvó’4Zàû—ç…ÖÛš¯w¶=¼´Ÿc]Ï{vžñçêÝ/Ý’õ²¿øÏŽÖ h÷b æ0¾šËÞ'qùãìÑGVm¸üÉãúͽßsÓÝ⽑-ÆAäýýÁN3¹oTЖ¯9yÞA>_Ô¯L¹ÃÉ׃)ºFìxâuN¢¸>T;(s?·È¸–øì·áJ.™Û¢9ºþ,a†äÞ‰` {Ù×YýdQ¢<›ìš9ÙõÑO‡7R3ñrÏ¥Û­g)ËV ´Û¶Œk7ÒÞ~yÑÖÿh®ñ?—zyJ¸9åޱ…±Ç9‡È Åg+·kÇÒËD‘6’”L|Íaî´–ÌMWóâZõ$=Yí±²x[¯§¥÷ä£×®—?.AZ7•Æù³oûŸ ’ë´UÖuÄ‚À}Í[uÓëå¤Íe:¹8Áíþ¼þJDm&™ø¨¸4 ý}•ù…ú³x:VmÄ/g¼êêŽdÙX”ÏÇW6¿œ黹š\ ‚_Œ…,Iä.ÊvÍ–_ž?g&|j#IÍY—-¼›/Ï,ÝM[Ò!º~ñÚì ^dF—Qó3³+oKœBæl¡ïy—ÆLƆêk¼6FÑ}x¨¡í‹q~3ó·`²¡#¿1R¬®ÙÙ_ ì3¡ý†VíW:ò¯¦º|ìú…P ïDF‹ymýÆØÅó¥‰çèl5¿ÅWõKйÈü"Ý.“ç’IÔ @Þʵ¼ß‹§2«£¦»8_¿?Èa ¾ÞMË‚ÙWWoÝ«si´<ê´¬ûîlð­Ïâ—ç,~×à‡'i¸õN+R¢_§ý¨øñõR5V—ãã´­Ö4ˆ ~7âÏxÚ:ëÊÖ “o×þ4mÊw—öAZjF"Y$õ‰÷_ôjŠ“Íž3><º‚oEŒN3ûñ“üMͶ¦‡]¬ëG³Äºç+®ñÑ “ÆItqÁ}²%èòAφêÒõ‹çfê;(NŸ“}™,—­|àÉí‡ñ¡½}/8ÊbÔ½ïWwCµ¯ÔÝȧ-ŽÃôo¤ã°†ùÁ~˜Gî:’÷HKõãhÇã=ýüÞÑÜ|J¿äk_ÑÌkÁ&íèý³'³m9y0W?ä'Í•èÌgZmÄ?Ã5ïäÐ ýBãVGcY†®3——Ï/34nËŒzçòð6ù~4æÝú屯ú`n¬nÿÓ']N{éX87ãbø'^ž³.:nŸ¹/ÎoTu¶|ñ×ò²Ë;—+W ­õÆ=¬ë­÷y3m ¯ÓËšê“eÁÒqkêõ+Ûf‡ê÷WÇCSk™W%ù÷ŽŽ˜Fvøä“ºçV\ÝÙ–åüŠ·í·¶_8”™.¾ÅÇ«”ž9BÇœkøß*îñè©qéµYáVD/ùõÃCƒKå³®ëµ+'Ä;©…°¬_-G~Íáöày®òìt¶ö¸¨FñJn`”mØÕªöîG^4ŸgJËg]”•6~\爑uKÉzÄy¸29ö\ÜÄ n‡Ÿ‹Ã—[Fò“àÕ…‡)na_g«NQvuÝÎ"DiiR~ܽ¾y ‹\Îü»\¬k¡§1¤´äó´„‹&‚»Rzectù¢öõ{œ¼‰{¸ßN}Ý—]6¿zRÌȆÅêlârpkÜ,{tç¾³ôíµ0ïšow'^[5²…¸¬³B~¾|+È}«îKîû¼ká/¸Åî„Â^ÊmøDû¸ðü?X•kÉ®ýÜ>3ͯÎlýB:è…$r»vÍ™Ÿ{ WrÉWïhìP¥ŠfšVj£ŸãÏrB*õÏ¥òçßN„òRåâp;ˆÇ :&áEñâŽ#Õkâ½” K¤cL/±\ZõsÙªó|ûž–éÿ_û=©ñ¹œy€{h¾ËöòÒ=ƒ€—1ã}¿y^>ò…{Ä344IsËRç½øäÛ^ÎæO™¼Û‡¥Î÷Ò?¼s®Ï•Hè´+W×9¼—Ù—Ü7W=$¸žñ\p'¼_z€á‚f9ðÓ5«_öS÷;ô/¾ì(93Ðîz‚)×ÛÊ1¸´î¸vøQèÖöÙñs†”k† Š‘Í}5ßJ…\¼÷5waZxˆÖÃXQÔç4^â±nbFÇ]öáœJ«ŸRøùkõ=žñwÎÏ*p„/•Xj Ž{ ”•yWJ»I}Ç”¯‡Ÿr>‘ùc5ãà)×e›ãrÐFKÞ19 ƒAÈÕ—²3gœ+h¾É^K=ßí<÷žÌµ%~7x¤ò]Úå㪉gtć¦ºßð˜.Kóxž¼:ò+ÕéûVXàˆâ'º,ÙX~ùÒâY‚îH¹äÁ•šŸ:kÜ ªÊ›©zQ2Ž läãsž&¶s`Þšj ­¹]Ðz†âüã½í¥ÕkÚ}!\öÎoÉd?‘«9.4}œžgïøD|ËbÜÅùÕx\ö½ü=|r¾pu½Òðxá~c9å8¯Ï‘ƒ‡—Êà¤ÛúäPH¶Ñ8óKu¬¥¸\Ã=Óû!èRDã—›·3ïâg1o Õ~±.êø@D­ízÓEöBò¥ÅÁ_êZÇᥩnÏ7Þ/Îã6ÔaÓÎ+õõý’®äÇÀ0òÀmKd#€ío¾¶ZöÖŸæp…9Žõ>AÿÃ=â¢)„r%¤¦ö§ùן|HNNóö½Æä×,?¬æö80+òpÆÀ|þæ-Þf›Cðßp†¥o[9›sš¿= ¶‡WWsß*ä«÷‡~f.Ô%âc¿ã/¸aÒGZ¶TÊÅ·,pKUîºÚ¿@~Vã³H?,Ö„¶9ÛÎŒÅòFû ³OþbÆË/Ý“<ÕÜæãA=¹_|ºcÛ7{ùÅÞ9{J%̳„¶Ø1â¾ßŦœw©ô„—ˆ–*ÏëH-žJ¼~ô\¨±þxÜç,Be’·âÙèYåês)hže¿Ež.n×Çe륎{ÓÌ™o—•¡©qMà˜Åw5ÌtÅòâì$µ|´•æ ¾—¿qàô‚©»b¾k Ö›é.okí¡5 É;Öè—_Öp—Y¿šý>.³’õ9{Cã¸bïiyJnáw|Ù&»‰%ö;o’žÀƒìí³­ ýwåù-ìtH“Mz7@æ-oÉœ£âr<ô ›vñgí…@¬µ~.ñ¸’@òø–}pwØdÙÛïöëUœqøç‹ÕÌuup;R}.+ùâ-_.ãJúî¦@9¦ ð¬X}£+ùQ²‡’ó0Gä_ý_½*i`Ëî°zühB>ÌùÔíI­—¯ˆïùüäæ= ÿr˜ë[xé^ÌÝÓç¾Dá;‹~­¶_q 3>ù Y¸{Šái¼aï^×:{œã<0‰düEv÷âÒóz‡zâêÿ®šÇX?_s÷ÀÙ™æ‡wh„‡pÐüJóŸç¯óeeùÇLÞVÌXYe5îñÜÛúêÙ¤HAÝ-«~ô¹œ?®€O.‡¦ñ„:¬Jg^Ôxˆ4jÆ“f‡ÐhEêyZzâqæP£À£k_ˆã{Ïxáž\‹no±üöR›>ïÁâx´´µI¦É‘íx ¸xW[•¸‰ûúBÐñ’ÃK8ó•š—_²¨¼BhÖâEª- Aæ›@'§dÃ|1¿pV‚øçâÿ—´ü`É]l︕s]·€.lñóÕ[ž,yVA¸gÎËg¥øã<Ý+ë8’ù¹ààH,«/;‹Ï}sðù‡Ÿot¼üö‡óëž}UOïóé»åá¡ýÀF?öÿÄ >¯endstream endobj 362 0 obj << /Filter /FlateDecode /Length 8431 >> stream xœÅ=ÙŽ$ÇqzÆ‚?0 —†_Ü#³‹•w& >P¶,”up m wwvvÅéÕž\°Ý‘Y™YU==³Cû°š¬<"ã¾êÏ›qP›ÿ•ÿ_Ÿ›«³?Ÿ)zº)ÿ=¾ÞüüâìÓßÙ¸ICòÚo.žžå7Ô&›àÂŒÛ\\ŸmãùÅŸp¨åCÇ0(xyÜ\<9ûãöÛs=*³½<£Œfûê|7š°½ÇVy—ÂöüTÑ„d¶{¡ÆÑÂèGøÛ£Iÿyñ¯°\ÙrÆ Ñ95-÷ßç;côRÜ~yÓ¿§Ù{Ú^ùºÍ¯qs1¨´µç «(3Dë*¿ÁCÙQÃ>Ÿk?¸h"œÔ¹A%¶û«K܆LÚ>¥‰m°ÛëüÐÃIÿN}²x_ŸÁâS´­›º¡K|jR²Ö„èxQ…í¯pÀCÐbÄœyt&¸í³:Åá|Õ¨·O*P¶¯ód&ªíóù£PS.ßV8üdžá$y°@šQ fO›±C £É/½§«M)h]ÖM öØüwá@aûÍö\i€êh¶ß|ó« žäØûïpP$ Û?dl•*&µœ¯‚çoa> è‘¶ßœ¤¬²i;œï,,¢Ý^à`=ŽÉ† ÀdÕ@l{×48Eø‰×èÕ¨’†+€§iT×g„ÜÊ'ßÝÎà¢gw´¯ójÄxªÄÝÒo ÚÜäó;@t~¾<ŸÖ/æHqŒâåL7y¯bA´Í›ƒ«qû²!È+3)@ée zÛôXî“NÛÏS(h´“‚Vü“ßKcâï]AšÑV‡ ó^¸ƒ³–ÏE $)e?\¬0϶>of¬~,{¶”¦M†hŒ£ëE*1wF¯+9Ò6žµ[oÜæ÷DÆ*º`ó=æ™ñÄ»éÈ;¥†älÈ'’@tOH@8ìg¿=Ë|h “zuW¦ÌÍ[· °ŽRÈØÿ¸ýgŽT £×üvÓàödzœ¶_=g4xSžÒéý9Ûæ·7<gÄÖzÂ5ÖÛ:.Àèéâo]Á#€aŠ«Xb®y¬ÕÞsÂ>¼jr ÙºBÄ6·Î–ù¡Ò\ŠD78×£€$=Þûôë]ýu}¾ÀPsM²2–Ķû’8ü┓›QY]˜–‰í·Î‘øÉöùwˆÚñåÓ  ¬±‹‰÷¼k´Àf`<„ä_Ù‘@Bðn­wÿž1“<¯2p ígÁH"^=2/Xzû!ï¼g~ø 7Ÿ¦Ðq4äS\MöŒFYª>J”Ã6vã#Ü|•¤yƒ Á3ei€¹ó¦m€©Tü¶Ø%þ]‡æЉÔÈI…=pŠD¾hùpAÃÄ÷󼿰ª¡631³B;ZÔ˦cQpª®ãS=‚÷ÁNõàØˆƒÙÁoå7•“ –E€ÈŸ¹* ¢Äø(ÇtpK(‘„ÑÀXÝŠÂT ^‹†ûd€gª}ýDÜWœCØY@:Wo²À¾+€–ípÍ! d"p)aìï5œ?ÅÍ{@Ä_¢À×ÕÆ{€Ïn®Ï Ø8€õÉ‹³¯ïˆ® ÃFœÒ†”2¾*ذ^ éï‚Dáš¡ÞãeÊ*˜ˆ£íàMýxc¡8.¾ÄwFÐjµQ¦pNÛâ'áï+€7h5y5Øá•\Â5ûQOúyñŠ€·pIðêà½Û€×®âbE}eðîf’¨/AØ Š´;mIúíO*‰1}ŽýäJ!Z¼:YÑÏQ ~ÆdÇeaÊ*°÷ÔvÏD݇üZTnûu›ì÷¿Êq„°+šÓôõò±³\Ä ç'‹0bœœ¿÷Éq’.Hþåó„ÍzØ `$-g#O‡ hÃ&@ªEÚ€9­¯>IÈËŒ1h‰Iy^U^¼7"iÖtÞ}#¦yRÐfûtu|Ž|c°ÜϹ²û=Û銆zu^|Eø›îSA—Õ€DY=*"ý›E²¼*ÊÈEaÅ«aš‚áßáj—Ï1‚KÐÊyá:ø¶7rÉ_*v’_ 53&¨èaòŒè ‘öéw­X°ŒÌÙŽWY°*žlàŽä`¸0ÿ˜W`æ!(lˆMþœÐ XÁ`MetBÅZô)Õm¨Þ±RÖ¶`ïtZ¤à&t%ÆÅ™7gEËÛ£+ÄÀî™A«<¯Ûôb8wÇp{í·vµxŒ1t“_¨» p™ }Q5Ú“~Òžû; vý}šqÚdO#T»œŸ1LBÓx]}GÑç;ƒu,‰“µuðM0l\ˆ•rDÃ].f}5Œr¦tgçžû˜èæ™áÇ"oÊ{Ò1Âö˄塠YŒB"/¿¸_òIžËÂIßLÙî}|Á:èÁºj*<®žßCýõ¶þzY½¨¿.—<ĵ"Wç,>EÆØ -N,Ö™|`Ћư3a²‹" >G»êkä?ª{^tOÏ&@–6u“k&“òŒÊÖè×Lge‚¦é¬ÌÐÇs5èc’P Ž!…¯ãØÜ½3w“(4hAÑîIƒS„íÞDx ‡§`±_ó¿L¥ÇGªL“‰/’=Oþð¯€õ£4#’[‰ 5¯[“5ŸT÷ÒW3 ={0Ûˆœy‹Â¿ÞvDÏùÏ5 ìA/Uct‹Cç¸46¿°Sý‘“EˤCÆàx Ç…‡£Þƒ4Ø¿dCSCÈ—Ã$'ñzUÅlaMÄV¦ Cn8)œ²4¢†£ÿR-qM~!œo€û —CLõ ¶WT´â$ÈU(†ÀÜzI/¡0 §—‚[ŠjÍìbœ˜é÷Üv« HëN®ÉÕ!9åôšƒ“IXX¦%geß©ú(Îô÷|:°ižæ`•«ÅÅo‘ö ‹z(¿bçX±Äøo"®•#ÂÒôIæ!<0miŒ]úÃ<4?·¢ ”>n\p€^FĦÈÀÄ)f $s?` Î-a$ÅÅGøÿP>° ±¿šL`üÃ@¹xàø*ÈëluÞ#n…í÷Ír_~O‚7êMÓhWÈ<†Š4ŒÄ^¶X`'·¦9–-µ}qÜXW“O\¤{г0ÎÃ(\ÅLœíŸ7+s95¬â‘-ñ3C¼¦ÄpÞżôO{fCÜ”˜œÁv$Og»nØJ”œ)w/¤Tê¼>‰rAFirFÙe'z蘒Œa@n° úÉÍyÌ™ê1 Åx¥sw÷)8Â}üé·F ¬®®Çïz/—<)`?íö.'ÅûƒÛ’'%'¿†Ë‡k¼Ågr-öñ—3‹¦» z¨¾Ó©  4wä~m»î×&,™çf+…R"¸I«¦Ç §&kRâ²3ÿ=é¶+3]¸mw‡»ÖénXwíãüª-ÐdMÇnlŽÆÏ=„…¸éï èq]ì¦vÙÏ@Ì4‘3“£ãbW¸ÊjÙs0«üqSå–mä}¸yÚÊéþf‹a¾ô"z؉Ýõ,_‰°Œ&-k³/‰ã’'ª×òØ'M¿—6 y1,FO ö1´Ø{ô׸5kkÑ4þæ:{‘'É3â=–wÓ‹ÂbW°©›}Í“5…ŽÎmO„0æj2%u†"“3„ù!²goŒƒ±¦ó-ä "ÏÞ¡`£Xe!)$†í/‰VB’ÙC³¼z QÚFØåÅ[›@È F—¶ÅÝô"ß¹€†Þþ|‰퇴ʪúà è«U†­é ¸g ñ/EÁŽ\ù<%¤»rnÁPnDÏ nšbuëô=¼£« ;·GéàtuÜ]1–ÖÝ^%¶#+£efýý€Ýá.­åÁ™Åtð×D@ž"c"ßÙ,óа ãû{¾ ŽÕÊö‘‰É?p,2Q¸ˆLT²?!€Ç˜N&_èbkápÆûÈ´L°E¿®?,Zg›«¤Ï£Lèg7-ïÐÄb·á,Óƒ¸Âû<@ƒ±ÄzÉÿÐA{N*ëqÎV=u4çÝôÎZ,¦û_ݦòxßî‹F/Kš«A½{|bt"0ÅëvÛý˜§æ6†ì æTÚ[¶¯5X?åb^0T ·¤ªÆú£~b DƒÔ®>Š+<„eÇÞE`¹Š6Æaž*H1Ì¿Êzèï3-tÜaæmŽ©PÖû‰t±²Oœ|¤}•G{T{OvP‹g\3Pd¦0Ls¢œå"÷¤ºÄ  ]‘ÿç&xß.‚l9;sÝA÷ÚäV¬4kÖé{oNU88 ª‚"7‰Âe¿Èç…@õ%7E©b¶s/p[†å¬ï;GøÎxô øÎ<)’²A1÷zJ©e`o&~Ÿ@›2è»Õ ¿Îïa¶ß›JॶîR$iD ÓÎ=ŠÓ ËTÖë“IÇG8:7aIZsN§¦Œ]øíWj´Ù6W´†>¼[k¼ù3.OBÖVFùÓ$WDkh¬ƒ{3-Ÿµ¯!ŸŽÅ [i@šîݹóœÃ.ДaÈV80ŸÛ³ò÷÷0¹?䇀ò¨PàOT(~ÉÜû·ç‹±`ò!TG|ÏnuÏO¥fÃßbâ–ëüvûðÿjÏrx0žçód,Õ&/”Ó̤.‘rϺÌñÐ1jfôh\æÈxLÎõåÒL êiE^À÷ðLJào‚]‹Þ5áâ{Á99ßUæghˆl±vYH‘fKi±‰"×R0–•UIé8ÖH£›Âˆ à-Ò‹Xñ“?+û,†öHs[ïP¯òH€³dw –äÊ€‰Àá±\ƒ=÷q³³r„9ÉtÍŒw¶˜*K’ëÍÓåö Y–gYì”ÉtÕì* §vya´Žk°9l?;®’ª[¢¼#å@B—jæjÁ.ª~#Ø™J*•Ç‚¨Uÿüž!åòÄ€c‰ë”+ÕN:r¹pÛ•;Rî†`7 Œ`„k›Èiîë“»×6aR8N©‘½ç´‰ Ý¿ÖcÕj~*¥ rֻ޵ŕJ'Њd¥qÀõ(ÜœH)Lwªpk˜ßdr0 Œ!6KŠÇÄ*Ò˜ûGÏ^Ñp*Ö@çû^“  ß5»œ:_V€½?3À\ÁTÙX;¢_oÂ`U“©OŽÝ„ÜbK$øÁ]ÂÿpJJ8ùiS‡ÃØ`ŽÁ v Å)Î ql·ÇÖ(ŠcêKNq}““"*2¯D”;’Vj±ì+…eulßÔ¸’ò ¥ˆ:MéªY]Od§0²…Ã8¹OÓœ§>PP«ÞVÜšlc,{^(RYòG9Ã4FÙhÊM@2ð{–gM®º;Üïr.<¾´BásïˆÆüŽe—QéY¾Ý‚–}w—¶Ï•L‡G(¡j Ч°:Fëlš“êê"ÜrbVòU^®àÖ«½@êÏYR_0ÂÆîÚ.KÉa~üd¥2g•U,jž Ex”s†âß+½$PñÔÉa-fKÙç¢Ýú>¥ƒSÀ3Z ¾®¨fGµ)ŠvŸg‚OQ‡:¡«Ô‚Ų=úT7Z-võð0 [¥hY]ÎFåK쪇… <õðbîŒi2šÅ5´µjÌ]ih…!ªƒKF’dÂLV&—3ùº¶Õªˆæ¦ÎHša‰Y7€ Ià|_í_2§óe\Ý3G£¡Z†ˆ‡’~Œ'Bæe¾ õgç©ãͳT7] ügõT£LîTVÔ‡dv•S¶S¤ª!X ³&Hvi×íî «1¢jt¥c±W_WéX’‡ŠŸOÙ­ÃoªòøLjij ]ÀEØ?ãͧH«ˆ} SõßÍ2.•õ" e e ƒŽúÒxQ:IebVÉÒÜi —OZØuÁ§â½˜ô <ü⫳‹Ÿ-dm‰2~DæÔhô EŒSÅ%—#(¹Xˆ¬A¥L“*}¼Qƒ‚oM(gšVŸw–¬1çÙIcœ@ì‰g€ÖWº=…šZ7h8î†m `ø B=ÆÁùúä˜B-Q¤š6˜ïe6ÆœM›ë…Zq…te¥ñ"Th¿®B»!¥…ü»Un–þR¯”SŸXå†;RBåíb£ÐÈŠü;@ªŒO_ #þy™Š$¥Üü=jq>*íÒj‹®W;Æ4+P)Çi… kê¹ôV Cç~åi$ƒÜ¡«Xyðö—l™ÜXéþ%+ i¬Û·ìZé©OwF@çR†#Å+™÷œu]e.PkÊæýø„×eV$ùȷ@ šŽuDtýFn3¸:ó®ÙÌ‹ö>–?&Ð@Ƙ3½òÏ>Êìý+G¹œö¦ˆJØ–uŸe#sß÷þR¦tŸ£rïU³…e+®7ÙÄêh$psã“§1?)µvˆí5Ú³ÌPhד4ðïöX=P¹VY'²5û„hƒ-‰b²œ¾(¥^L…Yêˆ<å&RE«µ|æÎ׌f 0ù ÆÚ¼Þ»K×bäÕž¯—qaçô‰piÐZ±vG½Ð¨Å£zŸŽbï;¿TÃø;šNA¼éÄ®U˜Õv#^ùü|xNÄþ7·xÿ4%ŒÜîQÇ–>ΚìQÏÊ1ó¨ë€±¼¸Ñ€(rר‚ɼ>¹³G¦¼„)½2.õÎjÀìØ‘J˜¯&Õ†XGpaÉ6Ùa#3*кþ×ß@4h5)ʇ\¹ Ðiýº"ËmØL6@ƒ7–¦‡“,‡”·ŠUb# X5m¤?ê$ô«ÅIé×ý»ê=OÕ{;ñÎ-…l ë"#G¾¢*eµ®YA˜ô®ÃCã ÅzNÁÃeóø$†8^/â!ü³¼vÖë Í€Í40£%KñP«¤V,É»ÙÂ]o0̺ˆ‰¡ ‡/éí[ãð[ì[ùÁS£ìô@/­SÎw΢¹—M`{ .t\NMå½Éqê°ÔœÑ#R¶’Ágwàè'TZr¨…I·Â‚ÍÏXúqrx:7»… ã$eì ““$†s'jùÖöÙ’1f¾ ï@‘Û`gb|yze*NýQþ’=QìM ïRZy€–ÒÊûÁ³ä?’}fÈÙú×ǹluù¨™ËT§ƒW9\fÝöÀŽ 8ãšÓLl>€ìq€ ÷££;¦_­þòàÚ€¦¢µ„èT¯çG!?T%h¡“«¼¸]8Ê4œ!ÒgøÛ}ç_‚ìççõXXû1–ÄãˆÙéJúÑBïGÓ Úœ¥„yÂM)Xéÿ»RR½šßffÌ«;`$bÛ1Œ\DÃÎÛ¬vªhxk—Ý0ð ‘;aí]ª†ïÀW)ÿä6¾*—…–ºÄÊ—>ÏvX|jÎ~$±(T ÔŒX Ŧ*?·”XË Í} N²S îœ&^)êc¹¯¹Qxƒ 6I$Ç嘱ÿªÃž»§ÆØsÜ×c^wó9 …: ö¿XVƒÔãGϪªæ Ü,˜2¢:αç³~óñ½1%I•r½¹Uº‹´¯M3þÕ§û‰{ƒ®tÀºæCh´™ViSa2û«»X&N‰Âpi#=ð~_GœmÕÒ·°%"åeE³ í@¹¼GK…iƒ½Ã­Ì}çÀ°Ïò¸Ip`“,m?É/ú®µo -…™º®ôDàq‹ƒ( è²©Ã<•œùo—³Ø3ŒÖ¾:¶ЙÚêXDÙ Ê- x 0Ú\ tI°dŸ¤FGïqŸäC9€ç[ Œ7kþmªZ× §Bý4ÎòÀ3¼ôÚÇ=î“°ªBĦþ;%ÑP¿+¾~NëÑCœÀi€ “'èÙ†Êc¤Èº»LwOówe!TOír³·YÈ‚Ò ¿m·:몛Ë÷¯òêÀvלËÌþ:¦ÏÉâÓ XsšÒ ¶|Ù†%4âX¯´àÈË©6Ô> XU–y7 ìlæåðô¡Æš7S§žö_¾år¤å„‰ ÙkDá/¤lþ™Ìx·¶S`¢ØXäY}e­eÿûdw!ÍF߬ë!µ™tØ>èY»Þ¨-5¹ÐÕþ—kÝçŸØzz¶@F¯«iÞñu^¢9££æœõþZ‹SE‚ó"–©þ†á4f ³:ò.'%¶|ãKô›*`x÷vŒ’ùÑé¤ÖhûPBš˜Ó@…!šdãÅy­Mé¿’xûÛvù¿™$’:â°›n‘Z}Ï*DV£r XÍ “œE3À/rõ_ ÀmŽGJÄXÆqQdD´žï"žŠ*uÙ“îÊ£<Ù¥E·|½µX«ï÷½Ô¸zZë]I:„³5àØ' D+N—1¥ª ¹.ϼÞÚ]†ÌÄW‚4ÛÈEÞñÓß‘‚I¶ýõ¿g`’õ…ªü ªZƒeëZÚMß1‰]ÁL6'‚Rá#Ôƒ„,Ô‡É{?”z€KÀÐæ‹QtÀÈã;4S?U˜²Ãüma̱·ÚàÀùÓGµ"uÔ…ÿ§O°‰‘½l¤¼luç¶“€/]ȶzV®ÀŒCNï,ï:ÍÉ— ùòÕH`!MéxE¥žÒo¿hMàötBÝõ[þuã 7‚7VgÁ >’²†.rü¾áiý×SÖº¬„[5*Y¡tbüøRí%ø§?س¼¸èËŽ~¿R‰*:qÔ©NK+›}pÔ :ã§•ѵœ‘økJÖd‡)cåCí …W96ú¿ÄÍ´g¢¤•-ð´—§M ]Q[Kº"ÿ€\ÅÊÇûØkmïs݉>p¬ÑøíðžåÉÓ‡Rà¶ kÆ{?²Ó˜°¡ú¤9ïc"¾ÇK-ëx=7ø±—ÑÎkÒ<Ó´ø–u±=_”Õ÷ß’ÛéNe!HfŠ÷ÏsKá‡Þ¢Ïèôy#ä‡AZ°¹0'R®rm§žº|U}¤®`{ΡžäçØTó?¦ŠíüÅQYˆ¦ƒ"eùcåjbÁËÝv­ŸuÄ £þs…\ ÁŸÎƾíÈ$%Й_³† ¼Ì̤´Ö€T”o°æh³£"ƒHw½¡ Èi­yýJª/Ï%c¯§#ƵnïËíâוBܳ•6vÙûü®`=ÉlÀ5ÌË~û3áÝvVT=•—äO`ØŸÙ.w'z•¿mÚk^¿g¬àêÐW|Á›¥W­iVÌOÝ8wSZ*ïŒnɈ‰ÂÁKX^6' ÿf*·.Ù¥v†L &°tRÊjìî äÙ†Ø|D7[+ì!wUEÃ¥ÿT]ÄüÀ¸¦,ø\ÙMÕ Àþ!TƒHŠºô/5ñm#Þ×y0b4Úà!öö[)“_ŽØÍRMEn,žT/"“ò“ó ÖkæYÏcOl;3íx¥Cû¬‘A¾#aá#Vзn– ­øE°þk}-¦,T™‘F@߇€UöÚ%$ÇéÁ±T,)Wfz‘ جíØD?ÜÅüË¿›Ô0jFÙãQ{3nÀjh‹q©´oü&¦ë×Sp ¬z_(Ûɺ”Hyü²ZvûÙ9}®?Å÷)ò)ò¾ yφ¼gC€±y¦t©Ÿ+CnÚúÏÙãïØ›C›üš y̆¼fÏ2ÌT÷b¡6üE›q`¯òp¬lVCÀÅfRmȧlÈÿ´!×lÈ yÕ†\®Ìò†=Õ¶~`oØpõŠ ¿=û?¹ vendstream endobj 363 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 108 >> stream xœcd`ab`dddsö Ž4±$º÷}Ÿú³‰µ›‡¹›‡¥ûƒÐ~Á=ü;@€1‹!–‰‘‘EçG߸ß} ~Ïgü{˜¨'\tv÷²üüî‚j¹jìÕÝË—w/›-ÇÇÀ×# endstream endobj 364 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 952 >> stream xœm’}LeÇï¸z;6V§Y·á’»E²Ä…Ì—ÿ0q¢©3NG`l.ê!ÅB{P®-ö½¿¾ÜQú¥ïÝÐ5¬$jøc1&Ë4‘—ÍD7ýO]ò\9=Ô?ýçÉ/y¾ù=ßïçûà˜ªÃqœÔ67Ÿxn{:üæ©jÓCPG@JÜä]ä뇿ÚójPßÓ÷†a¤ù$ת?ˆamØQìmììuÜíP6a*̇7â?×D#á•ÞNwŸ1»9°€5hÈé熒Ý@=Ýú¬Áš€É¨ˆL(Éß„yÈx¾°âŒAúYûR›ö­w ïtõ´·²Mƒ úÃ"½P^L_R䉉Eãüpnb(TûÚUþ鸛1·¼Àwð‚¢_8=eQ¼[À Ÿâü/¨ ø§hµ‚W7AHGКõ“èIôÚ¯hÏý†òaf‹þ·¾þ—ß‹@ (üì“F3ù d™äýPN_a»ƒ> stream xœÌ½[Ï-Ë’ô¾ÅOû ÎǬ{•H!$.vɆ‡v÷ùzÙÞÙm· -þ<™53î#²æpK>ߊ]9bDdΪ¬¸dý‡¯¯éÇ«ý_ÿß¿)¿¼~üÝ/ÿá—é–þèÿó7åÇ?ûÓ/ÿå¿XÏ××µÏû?}ÿò1ý¸^?Žíøº–íÇŸÊ/¿N¯?üéß¶kW}í4MóU¯ÿÓßþò¯~ýw˜¯ù5-¿þù¯¯eZöcùõ/øãëë¬Âåøõï«xöí:~ý­þ9Ëq-¿þu»bz½Özõ¿nï¯×rýïú«ºã¥Ô-Û×¹m©û¿þðÇe™¿®ëüõ¿ý{?nVãæõkŸª¤û«Fî<¦ë×õ@Ë´|óqÐÕÿk3j}Í•çßüaÞ¿¶s9«¥Ûö5]ûñë_ÿÝŸåk¹~ý¾×cýõþòîÕÒÿé¶ôܯõvK?/ûÛ-ûu®Õ[Ï„þܤËu­ëR=t›wNǯÿK»àuÇl®øÇ†üÚ–cûõ'CüÃþX ¿¦×üëß²S~ýo°åœ~ý7âùÛku6.è¶i©Ó¿²ÛþE»ø¸ÎjÊûâ©. ¹úKuórL?þ8UEët½Gý7øã6×I:çÊaúz½®½ùþúGþë‹ÿB×É_Ëýo¿òŸÿ5ó_‹\ø‡{ž§éëjËçOÿó/úÏ ³¢?§_ÿ+õGþ……ÿšÿú7ü×ß¹þë¯Ù€ÿ„`þôýø¯åSÿ¼´èÏÿü5/üÿ¿}ˆëŸï%1ŸÓ×yñoý¯‡ÿ/¨ßí°¾ÿH*ÿø^ŽñgòŸãÿGþþ›ñ$Èîß.fïŸ.­µüñÿ¾è{÷«—ûÌÿ¡hŒÿïÀ]JFõùO€ú²ÿõßOÉ‚üÏø¯ÿ¸ ü0;G4ÿ~Žÿˆ ÅþŒ†Ê]-࿆þ“'ûßÿé—^Ÿ@[ÝÕüǺ­YçkýšÖóþÚë£çG}2}-õ§5ûôµ¬?þòçÿòÇßÿR÷!óZ7Eóºü¸7"?þòw¿,_Û¼ýø?ë¾§>!~üÛºQúR¼kûšçù¨x¿ü³_ÜR÷>_Ë<7¸úß+=^Ó×¾äC·{èÙF^«yíõ?ç#ë3¶ªßúì^3ô¼æ¯×1ºÝúÚÈÃŽ¼ÎãkÀöl&nkÓ¼¿ýF#ë¾d®.4CëvG†ÞÚ®6~ß63r:ޝcÍ•^m²öÛÇûi•ÎG]¦{:ôzÝúnÖG=tÙÏö¿ã¡ÇÜ3ëÑ~FÛvÄ¡[»[½òU¸Þ¶nÓ †¶ÛÕ’¯ÂúCm7×÷vCÛíjËn¯ÿ¼½˜/í¹[ŸXs}bÍí.uöÿ¯?»àcªâmü”*$ ÿüöË_¡kÚMLžmê"º•ËE$Ñõ;™º¨KÔEôc—‹H¢/z/uuÍ[ /é–¨k”m£˜F]†?¶¶¾THc­7Ä£ÊçÊwŸÞq?N_[‹l¼êZ®Â×½oÒ¾ôµ¯útoÒº^ÙAïÍ ÜbÔëÞ[–­-¿z¯Œúÿµž|áܼ½Ä­M¼3ø‘¥J¶ùëPSÏ‚élÓ&˜Ï¦~Þ÷úÞB!É7¾›8,¥M <ø÷ñ÷uÓZÚ#j>¦ëkš4Q–üÌ©Z8Èõ-ÐðNáÏ'úýÊ2$ÛBJ×È«vhA̺ ®ˆõͽ®ºüØÚƒY«‹C ÞêºÒ/P^]n‹aQ Ó¥í¨ëVv™ÚNH1eÉÏœ«…ƒdßï–eR Ûúè¯7ù<ë6Óxš%þ® º] ð^áüß—–Ûk¿¦e´.ìЂ¨½WÝMn‹V篪 ÷¦bÛëo¶½|üY]´¼^’¨'Áo"X—¯¾N¯¦cymÇ׬µ‹äoZ,”Ò%H;u›UY ­¹=¨ê Ò´¼›<Š%?sª’} ¼W8X¢–I²­¯ZëЭvhAÔº`¯¯«Vç¯2khÛܪ»®¯z/´¹n…ê~B8vp\¦µýzo.ÍÕ˲¼ÿùw·EôˆÁL=ªT•œÈ¶½©§\ԸȤÿëj»Y%¢È_5övÝ Vãkêfþëµ=Ìí¶¯m¯3FÚçö:†/ÞŽ©¾NèEq,‹}ÀÉ~ݯ³ü#Fþ§Kë;‡~ê“à7Ï`?X%[bw¾¨/&¯6‡E ö¯éÔ4€dž¾®ù3bçe€$øÍ3ÈáG7¯Î-³;ÝÜ.×R7ŠÖU7ã§1Hê½ìÞS>óº^÷{¢Â ~srt´­“\ß…×W}`Ïï'å{,I4Z‹=NÏ[82zâhpK S—ï¤5õˆÕÕÕ{ø±iÁq`S($I‹ž ¶8mß8œC×Muà¤vÍë4_—ö¼Žóë8?xà ‹ 2Ú Ç0V½ûoojçÚÞ1ÕŠéM¿Þ_·¡'¬Ð] à–@ªq·L {ë›_‡]2óúzÇ(…%©uÑUÁ§íÇ«eS{æ®óu´Çšòz—h^õ×v¬¸¡P] à–@¦r·L {ë[êS{›´`[Ì-©mÑQÁ§ë‡‰+TKýX××Õ&Gù¼K4­ýxßážœðæ_€ï+h«>U†˜[ž‚u}m3gì» Cˆ$_œÁQÁ§íGõë‹cý!Ô [ª±G:‹.Q¼¶×Ò’d¸ám@A6¾ ÜHÕ!î–i`]ßþz?ÛXPo¼§ñ:KR뢫‚1NÛ#ó·N¸i¦ÌN½xjïöë~Mï€0ï’ß´äõÈ>iÞ÷,ì1Bƒ©:¢kÈ8þ­âXׯ×nõ‡¦·ç"É Þ ü6@¶«¾˜¬çkýšç§‹ö¶™¿­×´]Á”õ:–¶Ï^´½êKû¾dêÚκ¾IMël·$ùMI&= ø¹µ<û”ìScu= +FqAïæF¹M×i7"Á¶Šø; gm›ëû³Å ²LÓWâÀ»G“h´ÛZGytÇlGV¬£®Øúö©°XRƒ¯÷Þ´™¾ƒh{àÍÉk‚åÕ=e0$+‰îWû Dß]Ö½åY4È•$ ßkÄw •2bK¹‚”«Y3–øàzˆ:wMA0$9§ƒUÝoû N‚,¢à°”>Á è¹ Ï^gÕ‚à.<©°„j—¾S8X†IA\닉ϻþ tcÀ ¢Jï>²ïWHv_Ú¦¤H0 oÑ bÛ%ßk|â¯ó)Ûž°H©Úc‰úGks—Ü”5<™L€Þ\#&³âôл²¡K” sÝ•œýN¾ß–‡Ð; ²€ªÃÒúË£?UF ãÅ)4ÐkGB^$qáwû/Q¤/ú^â³9Tqøì†ÏTé(|†£ƒðþšá½}9ªñæÞ®ƒî!.ïâæƒ ¼ ¹+t Â;9ü … bðÑììÙ|mņAx$âÀáv6ˆãí$QàV¢n˜=`âŠÕmg«{P’ 'A~£ôŽŠÆ}éæêš[äñ9òÍ1ú/H°?ZÈÁ¶êmƒÌ…ÐH¬mß[ù°H@à›ù&×»)Úô%/*Ì¢Þ=fý Œqî( ýÓvYEû£ÕŽA CÃû½¢> |S~øÂ¦¢ýÑ $QØV}¦m»ÞŠÕÕ÷¿ËðqodÆ7E[‚º44|ÞE°Ÿ„½u8~äìG I¢À­þLbnxÅêÚK·qz|³ µ.8*ô¥Ááki]oŽ9ûÑ@’¶QžÅþkM² VUoS³ñw z“ µÌ»(Ø´¥Aá«u€~ò¶¡øÜÔ²DÀ­þLbnxÅêêîwÑûò&A¾,½£¢1A_ô­{‰‰Ãç*/Q}‰Ï½ 1ýh!Kܨÿ=qC³$R¶O»}áúE=: mÞKÑ’ îy×™ì)¨^Åõ¦EAu™ÞG!ùZî3aŽî“DƒýC­ Ñ¬áòøž ,È ò¾‰üƒ¾HW§"’W8“‰H^M""ÃÑyˆä“†HtQòãt›·úG‹ü%‘°È¸€þO¥Mò ĉ§ì Iå2Ûtj ó‘Ê x¼%vÑûfÊûnÏOcóvhÛŽV›ªÁH"à@" ýU"‘¤z³K꯱’C1l’¤± ¦*0ÿ”µPÑõȬþõÚQXœ%ƒP²…ÃlI" ¼Ê<šé¸”_ã§líйu …G‰(ôWm0“æBÕÌɹxð‰"±©@k¦ÎDZS¤jÏ©òh–Tã®·/ë}¿B P+IÒ8ªS ˜‡Šß© pÎŒ‚À)3;3#Iˆ­‰(ôWç‚ã®é”+ é¢Q4]3¬€æ‹FÐÒüÝÐäËcz}ÍÚÝjÑØ/Í£¯.ت¨ð«á0À5Ð> Ü ;µó\zÕ I@”/H8ùÌ_7I¥7-‰\ÁàOì’¥woµëëµÕ·E EÖ¼D¢‚P“ #9W‚Œ–D®ë=w:?‹¸qhðaO¥ãŒÑ$xK!S8Šš ùˆÕõMõw² èF’ÔÀè­hŽW˜¦Z?â1w³áÀÜ*´­$‰ [©>DÞR-¬+¬?¦wM­HbäÍ”£³¢9^_¢:–½m¯?о™àÈ*¾­$‰À[ ©BÄÞr-¬+lÍöN¢pºÙ­ñê²@UUðµ‡3áÁ‘#$Òlìn ¤ÑHÄÝ2-ŒÖ_ݶI ÄéJdh_tV´Ç+ÌâUíU…Þ†Á8"¹BâÑÌ.Qð–Bª±·\ #…ËÕŠÏ´$„äl=2\¨Á]ѯ0‹Ks;ÔB×Ǹ#Ž\!‘Çhf—(xKá÷D'×ÁHaþÓÜòb,‘%©…Ñ]ѯð‘ý[k²¯íÀcžnÔT%ÁH@hVÑÈNZE#»DÁ[ KG¯Àñ¤c™Þ‡—ˆäÄÔ1W’äK9x(šà"£JnºhXÉÍ*¹)9¬äæ‹T 5"½ÚÚv㥷~*d£ª!úÓÄc%ŒJXFµúž±FEÖtѰȚ.Y^Uôó¬Ï'ü$AÝK¼m9—;jB˜ºt½ (¥K<ö#o‰¬æ´(²š¾¤Ú¡ˆX€¸d(uþª€HpôàÀÙƒŠæäŒ0PJ#ì§8²Ä:­ö“[MdÓG?󸉃Ãdo‚÷ aˤ@¶Çûˆ)UùK£\4ŠûX¸è’@à½ÂøKrd»ß¯Uºò·òuЈ¿…+€n(x¯ð™¿ªSŽl[„À…Bá_ø[¸è¾Þ+|䯋—#Û½%­k•yIFü \tI ð^áüùxÀ¶JN}Z‹„ÍÝù- WÝ.Pð^á39r°Ý§öÔD9’Œù+¸è’@à½ÂøKÆ#gKœ«Zµ.9„ QêüUußв\x÷¤Ò ƒ‹zZA®} ]Ái… §×HZa MÒ $I+Ä‹F™åx­tWfDåbžÁ¥‹Ó¦^r –@Ž>J‰pJaàI)D'';³sj;q]8S >âÿÄUÒÝpÉt·2u£tˆâÀHßLuH$ð1}‘äûNïª`Œ×–F·[ìfû0|O¹†G7pâ"ØøhpK U7JÄ÷Fúöû=G BìÞžŸ‚¬ ® ÆxmY`{?ïðêG‘{RzóÁÄ.Pà–@šÁÔ-Ñ‚°HÝÙBœFà£ö¦z/Oï©`‹×–Æ´~fò'1{“Mx3ÁÄ.PØVª Q·D #}õýâÔ =ìmõ40.8*Øâ•¥Ñì÷yÄDëMaèNI»@°öTâmhEºÚëœ^ã JoЧ‘eÞIÞ¯+__÷+ÀGz“;¹@ÁB¸%¦*wË´ 0ÒWý`€è¼.ž†ÖWc¼¶4rÝz? ÍÛ¤ÁÀ ’&’@ÀþTbn‰€EÚ®vx¸€ ¼)ž†«Óù)Xâ•¥ñèóÞÍê"æ˜x€¹‚$ñL$€[¿+5a™Öõ/}¯ù{’ 2-ø)XbT=q–3]ÐëNöŸõÍâµr°_' $ó“Í’yè„%ñ@wžr–\AãYÅÔó\]ÐSJK2ƒ¢w« •Iþ¦r$ùE*G’¿ « @zÓݦºÖ^j³ ó!1gâó?M¶…3$Å «ì‰ó#ë$?2ðçG>}›µÙfËz¶7±Ç¤Í]‚lì|*ÞM6†=Â1¥Ûé5Ûàˆ ïâ Ȱ¡Ç!Ž.LήÉσ  FçAôkFçAôK†çAÐ5£ó ($÷»Ïƒ8·÷Izü }toœðÓqÂ÷ ×Â.QàV¦nõâŠÕ]´ ŸÕ@0$H­‹ŽŠÆx}OÜU‰t6íúÜåÏκ8Û'P¯f67à©B‚ÝB$‰€[ýYÜ0}¾iâê­n}U×l$!`¦Ï]€ÖEGEc¼¾,¨t¶àNŸDÌL /wƒ FI"àV¦17< „Ú&Iô³¼ð8&’ï…ï5¦…çgÝEë'a3ÍxBÅ£ÝÄt]Ú)~Z"ê,£Œò•ÕW g<ƒÌ/Æ ÒjêNƒ#A~p{tD—(|¯1? ¤u¤mÇ'±91L=¡BÑnbº­_‡ˆ2Ë'Q]eÕèG õŠñA´:Ô²Íïs¿•ÄÿtÅ6úe…ˆ^ñʲàØÙ¾N6}ú3!É$´ÌëmµgÊoò@(RWwÊævã~¦R™½mqê²ØXÛ9Tƒ>ˆû™päÈ ØŒv‰·ú³è'bnxEêÚî Dþt…64.ú)ÚâÔeÑ1ºn¯/_—Þ%‚ÈŸpˆòç£c$Ü­¶ rxÄÞ0ËŽ@¨®´5˜Ç–CÄÑD Ÿ9I¸‘ ”x£Ñž@ø™¦4z±žä QÊÎã<·÷çY"A½åó‘¹Gº*ÊGú$ÌgäðVÕd†/Ñ#hÃã%èšÑñþ6w’¡( ‡ùB$Їï~ê¢DýHÂ~VÙÒèx‰~Íðx‰~Íðx ø’bãr¸,ÛÙª½÷7nL¬N$õ–­÷ˆË‹€Ü`¥´¢D+ð×-Ò‘®PhNIÝ+±’ÂW–´ÕÍꪞ\vS\J8$inŸÄõ`Z!ƒø§Ø¥T½Ff×4ßDUHÚG¹ºWÈÖÂa¶]" ‚ÊAÝ´åR_QP×ï¦$º¸´¿v³$÷¿/˜¿SøÀ~|î_´ Π‹†çNðE£ÏëÑE£Ïëáûyµþ¿¾óÉõ^—çײ<}Q ðáº#Ç=8U›AÆßÝÓƒ]ú' ï¼ênlU’Ç…ÉÐŽž’ë4ºÛÁ1%É;ò{k¼ÚÂ`Á̰uh<ÈÁCözhTYPŸ‚ç'+…¡­Å’ïVÔYòdã_Êð¨Ž@Žêðaê¤_ z³þ$ä¤ –ð2 Ü)ñer–Ä—Y"ð†Áï A[ª`]¯6È”·öA,,dï›Hž$‚4V²¥R]VPàaT‚øµ9m,|ü½Â'ö… á~´ŸE‡uÐzHƒ#àì Ž€w‰7Ò(9do¸vÝ߈áa–Œ–…wOäÏQàUÂÙ–KA|IÁTß$†qõQ!ø–mà ⦡Èv†íöYÔÖ˜G®hu4“%o)¤ {˵ 0Rxï v#‰ÁV}V´0¸+ÚfÑÔ÷sü£hªŽ!åh$KÝ2ø=Qçú9Ÿ„\mxÁÚ\+b¾ÌKÿN/KD&”釮 Óä~Ñn(€Að-IòÌ–÷zpC@OcœçÒ~Epu\yd´Ä¨£‘‰è³œ2Ñ/ü3©g¬ âÅ`uíLÆñ Blª„‘ûÂTD×…Y@˜y[¯‰If矵uVd¹ãâãxùý’® ec¼ÜG´Óè¸5:ƒ$ÞPH£ýÂÞƒ±Â½'‡Xâ×ö<h ÷V4ÇëK£ÉÇzßåP³wõG?p>I…ndú†éæ°Xß»ÁJIT™/…ËÍyÐÈÀà¬hNPøÄ~ô¡JÑ3éúrý2®U)„˜eðy€AVÁ&”I+862JƒqH’ZÝíñ ÙÊÈé¢a9_4*$ç¬Ç ’<¹#u‹–öÍ(ý+™ ÿÏžMyÌÂêäL‡VÉ«~ `˜DÊRÝ.Ù1ö°d:)I àZú'ëB²`à;XáK^!f ÝØèa >]4ªÁ—‹T®!U7úÌ¡§ÿŽŠ‚¶+;W]Õ£â8=X’**ÞçBEÅ»Dà ¿'rÌmŸƒØ"ýÈ‚{"’ˆ¯òùG¦‚î‘ïzµ0 Z“d`…+ï[¢x•è {äK šÊËHBØÚœ’-²ðò÷ ³ î5×MôôaØZÔ‡®è|4³K¼¥Fð{˵@°í~ïAj– &7º'ò'‰(ð*óÊñ«}óìø0x­ƒêCwH„>Ol¯£kO¥Ð’Ê(@‡Y…ºÇSHc]Q 3b_ŸÁ«ø¶9–ÿÂdDß8}YXvî6äçpµ‰«½ Aúhb—ºÑŸ†ñóx²H¤«>¶½yŠ8Cゟ‚%^]Kfu¤~cE¡ä(yL@€@{4ÛPÀƒèòU÷òËgÁe ‡®°vb_t‰‚7 ~_äý²'Oä±fŒ§~°fÄ‹©×{°•(}—=h@Î =å <§$Ä3žWÇÚëÃn2·ÇÖG€`çù‰ˆŽñú²8/_Ø:̳/²¤‡Üóú}gT†[9>âo¹%/( LZóÕŽêRÌ$\è!æýLM…ÐI¥„Ð-‰de7.Õ'M×»`E±‘ z ´‡8øl$¬Nþ”°º%1P`sÃz}¾hT°OÁéaÅ~§·Mõ¢Iomu”{9[‰±H4ø;¯ÆuÉo!Řúw¸ s" Gñc¤?D翃¾2䄯Zè¢a]ô»›\¼¾ü2¿¦;¸)|‘´v'½ –˜~úJék!6$Z¿.Iר@»RÈv‘ì½ÕÖ§öë„ñr}È8Vz0­PÀ<üS†CEòsfÉOèv(bÆ¥Ð_5ž‹á§]ð:«\Ï×ÖòšÏ÷˽H$æ£Ð>p<胰Qg­€âОD®`6 ÐEÃ&¾hÔ$ÀAßßÛ$ÀV.í$ã F»Pµ&?û€cÓ Í±i¯~ `WO¿ÉWPí¼(¤Úy–Ôw«÷Æv~½W¶î‚ÕOë¶C)u42`?ÙFó) ˜§¬ÌHHª Th<†Ï£:Õx†eùtѰ,Ÿ/”åóEÃ/RòET‹Î.àZt–´Ýãô^Óýå:ÇfÉó„À”BáŸ~XaCë¤_0ÔZdá à¦QáeÝ?‹ ›öÐf²DÁ[ ¿+†¾íÙóQôXÇ´¡'¬Ýò->c–ŽÖg8%  ûü\DgE¹kŒ# Æ`KÛÉÏFÔêì=7Ñ3QF×»ýywö|Œ6ó‘(Ldƒ{¿+Nß§)žÅä­ÑðîÓþb‹DÔYJ à»8ÈWžÁcC<㉠X½]L…§uù=pŸŸ‰èš¨/ = ºµ6Ol{Фçg`9« °Ü“È€€4Þç©§€€t”P<}â±æ#y¹W¾¦ûêö<†ì}H=Ï[S‘;D"ð–Bªp”pöL)<ÜÎDÎÍñÝÈBï®hOT˜¹…Çé"S:EÓ>Ê?˜\kV¦{ƒ´Ç'µøbÜuw@±Q)‚˜FðQþØPÒ@TRÒÀ“(p™ŽQ->_4ªÅç(þ Ÿèm¯ó°7 Jü4 )‘ Ð{"Áë{Äê:Çkñé¢a->]ô{kñ}ܾÙÜ^UNÉ'I}P¿ô AbûƒH¾¬PÜ>Æö±Ljø¸½R¨"ù$‘€ö¾x‰ö§ïÑvè0^nÀ »"XäÊ’ý™åQ ™Œ%ÉÑþï ÏŨô/ü=\[Ë«rXõÚ’¸}ŒíÇÐû(ZeâöZDò-‰\Á01¨2ç‹FUærÑ Ê\Bä¿·Ê|~-¯¶7í'ܸ0~ ´?Ù«¢öÝ}*jß%„­4§ªF¹‡ÎØÁ,ë׉õϧý_8¾ôOvºÒ(Ž'Q<9¡³Ô# $Ö%!T¢Í]4^£2w¹hPæÎõؽ²£K”­ª[Vïs*˜¯,aIú›·`Z¡€yø§õ·Ô#fÕ·uéȹ;’. Éàeá0Û.Q ¼Êç;Ÿ„øÖ™Ø'”Þ`ÉÀ W _’ˆ¯ò $“òåtLÊÖ-[—ÄŒˆBÕø÷ Oòùði9ß ù$ªnãý£'£Jt«Uòà-Ñð–Bš`ÈgQ³`ëÒTƒ˜º;hÏlpOäOQàU>®Cˆ|»‚i®?k+Au}4È¢HßéËÿS Œ|S7Ñþ‘#Tê ZI·R…ùcU³`]á<÷;‘İ·>Ç?{ƒ»¢=^aÔž_õ·ýaLÛÄãGžPÁýh%IÞ0øñÿÛËòIÜۄ㡬ÕÑêÌ—×A[’°:C(Ó]¦!úÉëOÝb¼P-‚ª/j»ùE€ ·é@® ³ÜâÕeI"ÑŽûÓH š%œO{u¼Išm8äø ÀÍc·×DI$‰î á}ÖÁàDzø(@ƒë/ÏàÇð6PÈ~à kFÌÓ¼7»È’(¤>©ÚãÎþµ?–p¸=†äcÄü6€'•€·$ lÖ`Tµ/ ªöIóöZçÖ@V”DÅï6 "Ñ1ùwà^›/»ñ$l„£ïßá*Ì©K8Þs1Žÿô•1§8NÇú3«öù¢QÕ>_ô{«ö}d¿Ú7Õ{ú®Cý$à@üôþèмÏd›`7Gþ!àÁ"U\£oezÙè èß ßøô²$_e£ùb‡Dä» }ÀáÄ!úü%ÖŽtËÑ}﯃i'¹hP©ÏýþJý7ùå5«Åªù1Øïcñƒ×}ÈWøÙWêsìa^bPŸÏêóå¢A}¾„Ðõù>Î>¾(­y÷Aä1ŒŠ§×Œª»å¢w„X&© d’¶º8ß·”ë>C…‡I2þõ–hSPüi ¨°u΋ÂÖéZµC1„ /ˆ:Õx$BÛµI€–õÊw|sšî×i5_>]ïýë6q:–ÒÆPüéG.ÑZÀ«>O›õñÛŸ9W ɾ Þ+|¾I©jùÈöºƒyº "±®ðwpÐíï>óWEó‘íµ´º‰7%ù¿+€. Þ+ü€¿D¹Û¶{8`Ølƒé´ؾ Þë{d¯"Ú+¡×Õf!BlÎÌ?ƒ]s§,¿wÎíà°ÏâÃ*p=ôÁƒ…] ±þ,Lžßõ sEêêCæ0f ›|lœwT°ÅkKß êû̶~Öë¡8ülìn ¤êò÷Í=€‘¾}Òÿ ‘[s~ 4-ø)XbTe!Ù~ÑR_ˆßÑd„ˆlPpyPF`­)À`«?‡GAZ{½Þ%#Ñ ˜Ó/^8Ëk¬þÄg‰ézÙ›TŒÎI7¢×ÖläK EG¡Ù>t{ífˆ³QB±æG­ƒo,…UûÐ}ÞÚy\ŧc Û‡˜?ésÄ:øÆ2ÈáQH•ÆÖ_Ë¥Ñ%$†î}dýÑ Q—Ó5`60ª‰g§ Ýé˜y¸ö’Î’oýä‹'ºà7Œpb|þ;\…‘ DÞ„âüßA[2BãçîûàÿÃEƒÂ{¾hTxÏýîÂ{z¯§Ý°î6G}Å2uò,¹ö÷gˆ|è:Rº¡ ^ÂÜ!î ÐÝE#KTЛTIÔ»K$.=µÆ#q‘ñ4ŽgG¢08‚‚^ŠP‘(Iêóz½ŒZšè±DB‡Ì3Œ|ð׉ÐŒÁuì=ß%: ^ß×w0˜t:`ÞëCè!Ÿr·#1w’pŠ dL”ÿ!2¬T¥´çÑ›±Õ>ø­¨´_â*)¢HJ E‘„@zIO¤Z$Bðñ04#\bN‚zó§­ú1Ýõõ|< æå£­É4Y¬¢%ŒåÑæª_W ¯eiQRŽ Á UgÑ®]"ø^ãào¨”[Jž¤\íÈ™uIÌIxRç®)†$ûÑ_ÝÚ¯›•Ûðé4Hi±´>Áòè7)½¼Ú‰~Ê”¸Ò~ÄÕ¢a®]¢ð½Æ§d±Në¤l)¯“rµ# dFŸ‰Qç®Ý…¤l>½ IÑ|~É;ò.—¬¯z§îPüÛ˜Þ_€}Œ0›ÀwžùÒ!ô·Ët $nõgêÒù×Ä#TW7M-ðb$!ÄlêÑá ŽŠÆx}éÍ­õ`Ÿ„˜Mè{àD&’DÀ­þ,ÒžÞ–5ñÕÕͯéË BYW¢ãÕümqêÒWº®N—¹YǸrìŸ<¬tX=šlÕclkïsŠ!eÿo ç±vk0ö‰ÕŸÂÃX;½^vS bÊ^Àòk´ÝÐ1V ŽbíDtÝZõ¹¦îCÊQ@ññÇ,„еGÇX)<Œ¶ÓØër‹&Æ”ƒ€"äy ¶G×X9:жÕmmLMÞÇ”£€"äÏ™ˆÙí²µ',ƒÆÛûØ}šÚ;–Âqæ  ùs*BÂíÑ7–A £Ö4ö0k>ĬàõT{n­ÆŽQêsl±îÛÇPÍ’Q!åíjUšîÔ—QÎ#éÑ F!픹‰©wI A{ÌŒSÚR檇®‰È¼|™B×Q@ÑòÁ$£ ´ÛÈÑmZàÍo¿;àÕ^ͳ[Å ÛúÜ”D¼ßAq=Œ#ô…x÷w¸3zK@4Û $pþí••Ÿ0Jd“½Ã¨ÐvxAy/ÀöæüÞßMúW{G²Í½XŸKôgfÃüÊô2œKå È0VCBWFæt¥þ–¼¾^Z–¨‡´ô< ˆˆ²[0Á·äèA¦:žˆ° ôÜxãñ]=²@n]P—®Ï«i-ÑpcE°fó)‚|W¾·¨êFhÁʺøU;Kªºq{%3¤µÁÂ#0‘Ä|Š|ØË Gɔ̯³Ù¯s'>W“½ ¹¡83Á€g"àJ’e}xwµ< 23€‰$${Ô‡ ¼,I’Ú䋲FR(,‘„ÉQïÅ›•|š²±Cuþg †ÝçÁ"W’¨œÉ¹·Œ²HF1eí@Ó=\LÓ ^Óíà‚øv ÊÂx‰äž,RI¥œ¿,_â1\bãæ ºhؼÁš7è¢aó_4jÞ ‹òæ ¾‚Ï¢¹’3H²Î¯VápßÉœ±Ñ_@sç ”:†òØO3Çi¤”§‘RVf$$Õ $¬Î_5žI¥°:N¥°¤¾µÍïçË«¾wn0+bšZ@ØÕ)…àŸl’¯0£|MÊÌÅÌHR@" ýU㹬©“¬KêÐ;!_÷üwѾJ[øÌF³xÙÛ‹Ùú~=ssÑÁ´B ðO¹,é=Ì®zéæèƒ\ó $kÑ0Ù.Q œÆA~ËR)ˆíy¼û#¤›£rÍ'ˆ¿E+ˆ,I¾×øÌ_õžDºõd™M?²Ý'Ð W_–ˆ¯ò ¤ý$ðeÕ›ú‰rRú‹xF zAô½¾î¤4é*<–Ö©mY ”ï‰JL=zU幨eœç²øù=˜Èe5gèÅ‚´¾¿h ˜,K”¼ô»ç'›X#'›‡¾ËÛ}î§G²$$[€„òF5q* äHä ÐK/ ^ÚÎÌ@̸;¬o”¡ãlTt£+@I\_ÒÌÛ$ʺ '‘>ÈÕQF*øÇQÈáAÚ…ÉÎt¢6KbÞ%J(ôœ®“¬Tô#‘+@©\ßëÌöå^‚„sIÏI;ÎLE9|}aºí}ÝÒ/@Bé¤çÄd§¢‡‰\JÂðàÓ ƒL”P>éƒÔ§§¢4ƒå.˜ìºô|X23@¢Îú&î8?ãdè ·A#ú·ô˜æA’éëÃÄg­¢o‡ÿOåËù¼$’¨|Lkä9AG‰§h¿SˆàÇ*ì`{5$ÌÉOÅ–K2=Ù¤V9_eý{s;•UÑU9¤soÿM$ßpAF9l„Ä8Û^9±Ä§£„r_ßA_r’q8SÐIlí#=Õ*VßÛçu_HHdJ.,ZeU @N 8ˆ®šðíî?™Þ_¶ÉO`ƒÁ =‚Œò˜ cD‘$*ãv._çª$˜4cŠGL@Ú.ˆ"KBÚK8$çŒP‰Çœ²?N‚¦í<.k7Œ¡JŒìä  $áUŸËmÉhIža³ëüÜpøàeBgÐêŽlžLÒ‹$’ªÚÏö(É«Ç,›:Ì„0È>‚E®$©oVã,—åQ ™Œ% y¥Îõr²Ç‘„S\s úÁ§I6;R'ì°àŒy¬H”$õI2ú*§§Q‹% =§>&âdø·éreøWîÒQC¤qˆ/åè¢aˆ/åè¢aˆ/âÏgtÿªÏgdYÎ~rŽöU0ú@ç|†UѦhÉQ }+ÃS©/íøž˜~rËÀ  Ñëÿh¯,9eÄ”e)O;´@n]rPQ êÜEã%#mF¤K’Y,©›Ÿž…n‰8'E’4«cÁ´BóðO)D•-Ë™Q¶,ef‡bf]2PA¢ú«Æs!=¬Ž“YZÒo˜uCtž0'E’4…cÁ”BæáŸ‰*[–3£lYÊÌÅÌHÒM@" ýUãû­þØF:aúc É0ø\õaõÒö©¼I̤øäÇ 9g'Ê*•b9¤øƒÉþ’^!;ëÍmÓ‹\eSbÆÅ'D>`&éÒ(éË!Ň÷~b{¼ì¾T'1¶ú¸ÿà€.g'ö…ÕÐc%Ýîu¬¹þ†Nã•¿¨ï­Zùñ#–öƒÕ‡Ðóaí¡û„Bêó‘nôºAT¯Wo‰ §FZÁâOƒÛjc%ö³UˆàÙÎ.=åò…Üõ ¦u:×¥nUV³N%íR1Ôqdù[±»Ùj‹àƒ¤%­S„¢?õºCÙ"âÙ ÌL%sZ4A§wRk¡ƒ­6„=H&q6ì1óÈ2Ÿk$˜‚"'p_K$E´­óA$™‹­‰ØÅVB%©(Ëö”ÒTr¿k0˜Ý"®×NAÑ.Qɧúûœu:*u´5;Ú*Dðƒügðžò¥Â w½Æ‚y3òÄzÚ—WÙZö~Iêhk%v´UˆàGÉ5Ê >¥b…Üõ &æ:×ýµ´¦meJµ0ÎJ’zÚš‰=m"øAön;‡¿ëÔ2t<#Áœ±l7+ó U9¹uýì<9g ö±UˆàiANj>厅Üë¬'¼:½ú3¥bN’H°¾ŸôÍ9˰s­B?JCRšô1-Rw[°îKû£K@3H8oú˜„…¹? ˜ÀóÅ($‘Ìc] ‹–|Êše±RÌs¦é¶vM‹}ÁÔi̵XMúS µª±¡VB$O91ûÌ*CÓe\fº!‘˜Þ%’ëlÑ%Ý +4f0Õ#ÄŒjž´sƒ ¤ø– „i”Lòoƒ¨È»áƒä3÷Dæ1šñtÑðÀ?¾h”!¤‹†BºhxàÚg.¯½í*U¶ni_™u¶Ž%ó&·îÁÛ¬³ðL$1'¦úÝœ¬ q$9¦Ã%z8»–W)ºÁEKBB-HŒþ¬’Kzi…œº"‰$ª–iµ_½ÿóè³®ò†)/N¦¼ÌHL«K|*Îÿ“5™KÆî×J'’{»ˆ §ÑDpöZ|®>7P:Ì7BÑLéS`þ)áªu93JÔ¥Ar;2cIH~‰(ôW„D’½}«îÞW´³Ž&s®_Hµe¹¦ X€JÌHÓfvÎYVn’üdká0[’Få b¹”'¾ãŽ4;´ n$™¹ Q ýU㟬NÖ¥?~¬‹_íýD/.c` çÉ–éu´È’§tª4d17d9…>¦êRöv(fÿ–è,bÌ4úLࣅ¢0g¯ÁO°õUõ»^[*S·¼–û6ŸrFd¸Sˆàc²0ÝÃu°©þj­BÉÝ-ór‡µìهȖv…UˆàéCNu>:P(äÎÑ`ƒ-üÚŠ6wëNÍ-ËKwƒ äÙƒÔí %X˜éµ»g‰$å–×ÖîZ.‹ÜlÄn¶ ü 5ÉiÛÇd»PÈ=¯ÁP¶…]±½Ú=J;‡órKÝÉ^«ËÀBO[3±§­B?HMrÞö)Û®(ä¾×`(³C\[Õni$5·´\Ëä3°ÈÓÖLìi«Á””·}L· …Ô÷ìígzûݱ¢ýÁ©¹¶·É§^‘s­eعV!‚$(9aû˜a ¹» ˜õDZýëÐ,áÌ_Û4¶Ó UêÕýh±Ò ±É¹ÙÇ,ºPH=kÁºé– 6½KT:òœ¾Ýgªö+šêGÆdgžc"ö0d%¸´ ز ï,—U^iNB¤;9?û +cãUõ4Ý’À¦w HeÉ"ñ ŸÀ©évüý“\îu¹.fª®v·qµ$—yú²Ç>å«)Á-uúzLšfÔÍ@Ìü-Q¹Õû†£˜¤s-¨´˜ßý™Ò·c dKéT½î§ê'¯ŽJt=œŒr ¢nlAtI¢ºVë;Ïš`•éz8åìØ‚è²„[jçö½(ɳª‹7Òõp2Ê)@˜±Ñ%‰l;·ï'kɰ@×Ãñ(¯`ÆD—$ÒþÛÒOç'ñ:"’uh<ÈÁ£õ¯GÀ”ÜWÜ>íÜæŸ%Ïä¥Ú3 `2ÆÂ‚@º|¡ÅõVS¾@):8î°.(Cx¬—pC‡5  ±`‘+IŽIÞ1ÁB Íô`,‘òˆe™ÛoÂH\Fb4Ca=Ãö°È•$‡:0n\)"Ì‚™ÌHlq‡>oØÉpµ«´PÖ¨r ’pqDø2Å‚óTŸá†êb0ä¾¹vIËW­Cf–Gfz0‘ði½„Ã’ï Οk›/umÓEîm¾è]"Î#‰XW·fT}¿¯/\B’4Ed±”>æÐæJת¤¼¨T%åeFbZ$  @"úüUãyÈ5æ+¸ÁšIƒ5IÖ×Ò^VnBG«ÎE¥!êXc””µPJ‚²ØO5>ª^%%uÊi>8Ñ­GBR]*@‚D©óWç@…:) !É1·ŸRÇe"ƒ² ;X)eQ¢øëÆënx‚0]¤KR¤Q?òx©ÛêŒV䩌FÄex>×)DðŒcBöv(fO_Ï€$º4md¡R˜³¬s#fûIA³·DÔßÖ´»“o‡e(RW · ö®Í•v@O[3±§­BÏì‘ä¥VרÐF(¤¾7`pO\ë24wr]pPo¯óƒ†tg&ö´Uˆà…=ô‚ç:¦û^ƒÁW¿Îužg zD**üêK;ðK©1{Ú*Dð’݇~û×ñ0…Ô÷ å׉j}=wã);ض¯p¥ðÛX‰m"x!$‹Ü%žÊxvÞD'®×`0ÉÞ¹.óÖ?<Í®>Ø—¯Éw /[±—­²ˆÍ¼£ ×ƒ<—ï°úÔç ¦×‰æ±·oÔ(STåÁvòäŠ;ƒµ…нV–d~pIÈSñëÏ<®`^|0Ÿ”!‰ÔíÙ5ûÚäbk"v±UˆàÅ1QB%!O<ŠBîw óêÄõ|µOý({TÑÁ6Çòèik&ö´Uˆà%µ$\òXÈ#rßk0˜d'W,{+ÙÕΑ¢ƒí[¹òäik&ö´UˆàÅ9QBE!Oµ<ŠBî{ÖÓÖDïZÛÉ¢ÊUtÐ^t6_Ñœk-Ãε ¼$ÿ£„ê@Ëw„Bên fý±o—ÛÙ©J„}2§ý6`7Zh$Ì¢„ >Ks„AîW†’äj{e·æ¨b†º£˜WßBß½h ÃN´* ó*ùN+¿ªa‰á–6¼KT©Ã:µ7çXya‹b3Zêx”ÇyjâSw2ö.¤*ê²ÎøsK»#R ¹V!‚úQBõ ÀEJkî3¸ÃrÁîèUö°Î_¦âƒâQÍ÷p<Ê+¸î»ùë~·¨ëà D–p]Ã9T`XZnÕpá%T%òä,Å wCŽÑ¼¿Ü]¡ñ~ù¤ÞÃZY#<g|doÇH–$RŸ±ß…_T{8&²õh2ÊãGþvll»D•gÔŸ(&ÐøHÖñ Øë¡R%‰”fT-¡ÿ\èáˆÄÕƒÉ }¯ÆH¶KTaÆÑ~Ÿ”y8&²õh<*àõšÏ9¶]"ÕUðQ‡ãQ Wƃ<:¼›2Hô-˜Õî"çi,)$©P!ýñhð¶½¶£€N]?R_´ªªaI«v$ÙÒ“à˜Hb•†:KÃÉ G)iX—zsC5Õ$vä°øÂb¿,D´KêniÖßXذD"uæ8½ä³R;´Ø¡°Ì ¹’D EÖú¬zé“ïG¤Xn™àx”SŠ\ _’´pçù ¸0‘„ò}¨ˆ“4Ž$ªÈä¼k’äÓ:;T-”'0è¹v‰ú:öº¬í`R‘ä§;n™à8Õè Üà‚ù’dúº¦¡oÞ‚¸0‘¼kz4’|>H€.Ãqå1Ê–H«`ŵ@ù— Ü`¥ÔÔD‰Và¯[¤ mÒ‹ú÷ˆR?µ…ÿY÷~ý\ƒkþºP K²d®ÃÒÊÌ¡?¥éUñOÆ‹kÒ$³ i‘$TÕDhsÝ/G¶.)Ïaɺ´ ñí¦úþsÀ*ýmT-áÀ´BóðO•*ªþ'gFõ?i‡ ™‘DÕútø(Q ýUã¹Ðµ?þ^2^ÚG}7:ô _}³`­;àiþàsáDC>’À`|ðŠSˆàçé“ân(fß%ºhè¸?pòÁ7Å“‚Ù:8å?*>¸µ¶C7sï– u:Ûäƒ&ˤ ãBÏü£„묞¦‹¶R÷h0ôøeWlK¯åÉÉß¹ÅùÜÙ4ÈÓÖLìi«Áßû•ã£Ãp¬ ö´“QösèØ=†[âž[¢ªŽÖºsyé¯xä‹Ó±+ÈŽ#A"•b.Seg©ƒö‡jr1ý´ï²ªÊh}m÷×;O×qfèY«Á‹s¢„ ÑvšBî{ †_¼ÉGϽ’DªŒÖ×½Es%eÈÓÖLìi«Á3{ ¡B´ÇúA¡úÞ€¡h y¢^™…#EFM²Ï®¢ G%úÐúf¹9°¥§èØM^×U ^Œ­ñDZ_|%TæöX( ò™Õ`¨ú‚¨ÞÇ!›u)ELí&T:¶`-™GåÖt•>„>·ñ¼ú94ׯ«O¬Ãb× ÕÐ=–>ŠÂl^ ªì`G¼?ЫÏ…R-Jr®®Ï¢ña2‹F!‚¿op¦.¯¯<‹K|%T¡÷XW) ó‰Õ`¨n„<Ñ]õ›”Tcµy´¬¦)¶C³h]ˆgѪèõ…æ>ºÞªCól}ZÐz,öKPùßSÁ¦hKçÔ`¡âöW*DʼÖùn"µÅ|páZu|nÇ(m«¯DUXÆ¥N ƒbÇD >Ö‚r­`:¥ UưëËÀfÖ§ÔÕŸÐ}’)„Sh=ˆ§Ð*Dðweõ'OÁ/ëÕ‚'щ磄*Ÿ*NU-b>±¬×rõÛË&TEZ3a›}ù!š;ë8+å•ù´°Ä¯ÊÄÔ¯§©åëÛMØZ;klA~ôâ(Ùø{]ÞkeèÙMç+VUGK±Ë¥$°nÎP^ ¼hÌ)ÐeVgæ@Bš¨JV”fNµÃ+ ì‰.Qµ€uÑm 8¸ÇVí…âËPÂܦÉÝH¥°¾Þ'ÁÚâKPbyd¹Ó‡Ð…}”PÅæ“·T!fî\c‰a×t‰*¬?ªvàr¨“Œî²fè Æ£>¨Ã´c bË_%ºü>·G•Jæì5Öu¿&¾÷ÏÇiÂØõ£a¸KT5`{T¼ôQX©‹‹CŠ‘NB'«’·C1y’pu`Û¨Mè`¬Ì È4eñ±fpt»D*Û¨ Œ• øn@ãQ?3à2cº$áÁV¯T_x>)h´T ¢Ðd”ÅÇëß .˜î[2×7Ùc}\ÿËS X" Ž,ù²aªU  › Y e‚š9{JPÊíH]ñ8ÆÊÃÚºnðýE]7H©ö«o­Ž+Öÿ=*º¡ÃbB†ÈG¬H•$õw7 ‰YZéÁXJIòdŽ#‰ÔÆ-ŠÊñKÝÐamŸCž‰`€k—œë;ƒüc2`ÁLÆPŠG’ï +pI¤ð®J^¨ï±fÐ Öõ0虹vIK;šéÁDâëúÌAWN†Ï‹¡¬³ª’{ƒë*9’ÔGÊl*Ò¸nn趃µ.±c¸(Ñ üuø˜ºhxà_$ew•*»#ÉIß•âQTˆ7¨ ±ƒ•)²#¸(1 üuc‹ä[i¬ÏÅ"ɲ¼cÇúµÄ¢»etô“†(úŸW@|*›¡Ú?Ì‚JÿR"2 ð †Z: Mþª±§U]z*°ë”TIö;N©(qÉ]^Üäk¡l H¬‚ÑDo­TÃøXjÞÚgاõƒ¯ª9&˜­UˆàçñËkn(fOþøØº]w}Ø_^sL dëádÔó—×Üàùv‰®Ð»î’–|`(|r F·ÀãðKTøç×úQµ eR ñV!‚þQBEOÓ’©{4tŽe†Ó%ªp½ß“Bqâ¨8TDgx8åŒì­ÐÌ®*É«·Ö+?"oX"Únô!tf%\1ùä.Å wŽƒ»:òÄtR¢ƒ$R{·oïzB]ŠýlÍÄŽ¶ ¼8H–ñq¡Î¯eè{ †ßy:×ãTˆ=º¯¾Ò-ú«‚©§­™ØÓV!‚¯ìûŒ‡Ç³îœ ö´㨞«ÛÄoqÝóAYv’H=Þ¾¹²Gèhk%v´Õ°+óóþn”-²„o2Æè‚½ìÀÄ1QB…˜3aª:s¿+0X ÓýЪÞÌãEWâÇ×áK*Q¸ƒçëlEŒ% #¬ê­»ÈK$ƒ%¢¦¯Ë¡³³Œ58`&­€%âØíQ¤…¿ª5_4 VT‘ßÛ…æV£Šúê_ëæŠAñ1S–,£Áß·Î×™™²‚Ã×É"E8²fæ±àUãÀDBõ©ƒrW»,Êpé\Z¶Õ]ÝÅæP— žS‹ÀºbS¸PÌ,% Å(Dðu}uÁ)D‹ÉN\ ѱs€dÙÆ‹É.‹2Z: Ön‘'êÏbnAªP±}ëÙâi4>L¦ÑèèÕWÛý¬±Å¦h¦­S žE&®‰ª€},Š…ù¼j0XÆÕ=±ÔŽÍÝå¨,1wÎÖ‚¼è„p”ôZÓïà³2ô+ƒm6ªþ˜/óƒUUûõu„úMàCkM.³ú:3G’Uò¥¾ÈW”¦NµãAY™~ìý”e–HÁà9·µå¾Y kÌÂ~µêøÜŽ´v¯9ßz­ Çz,ä*F‘k ³Ä5o‰*OÜ®û4Z–¤kŽ[¾ðhÌH¨žóÑ_¢0uƒ%5dùyŸZ¥¼£ÊÏ×—d޶FèV«€ß¯ØKÐÝnX°“-r% Ø1šWâ—.‘ÊÇ}ißøÉ ÄTS+ÈKØGÉ4éªÍÔYº5s†‚ޱ¼ ôC—¨šÊõn*ù BÓYY '<røÝ€y»Í°Ûk_©KŽ$RùXŸw›>3õ†eR íV!‚¯üw{çƒw”>à ‡%£H2òŽ¡–xç–èÂʺ_zÅ3qŸaW 7<Y%Rœúä1UèšûGaaïbû¢K¤jó¸÷Ê™Z; ô…‡“QPµÀ.o—¨²Íz¿~]ŸU™Z.òõp< +ps`Ì·KÚ„^£ß¯Õ[ 7&’P*Ê’ï +p\—HAéÚ"1›‘tœ§’V7R•”>`á[‹T»D*V·µ=~žÏßtÄ 4Ûñ ‹É›¡1%W¿®õnp¬ñ#±9yAL=–Œ±è0HwÝŸ“Uåµ[õÍl¾Ë’º×ás³óô.ÕµèK$±ˆUñéd#‰”¬Öµß¢ë ˆõ±ÚÖ VÄ0à˜ˆ¨’ä}lù¨ØÕÐ(ÈÊ&’Xê¾Öêd#‰T¬nËý±+PÃúXlë† b-𲏒drÈ–GAf0‘œ¾†UŸ¶éd#‰Tº6ýæ“®ŒóXlë†*7?€esfÁW’lõù6Lž[™ÀH‚*uIòdc ànõ-¶U¿¨’\¡*à,1ï†âŠ_†æ,€®$Ù¯ú6`æxdfcI/(VìYòdÛü5í¡*”®áªcÆæªc–œ×—q—!çwv¬‚WÆ-J ¾¿nhŽ”³>.9&ÉRïÊ«­Í]ƨn¤B÷E¾þŸ³7¢Η-Zqº€ë}s+¹Ü7¿Dª}‰ªTû²Ä¼îAòÒÁ£ê_{¦ ‚e•¾,r% ¨ž .à}®]Öì3W(°Q²œ±ægücÙÛ«’¶Zª9ÛìMZòH‚kiý°»‘®¡3ŒÑo±Duµœ‰&ÏÅ-º¿ï¾´9ÂòÀž° ¼°’ãƒ%¨ /Cçh0øÈè\Ûnì¯QJ9·ùŽˆ$õ´5{Ú*Dðõ`n5«pX5- £§=ru»pãBl½‡;H"Åœ[;ÇÕíBW[;±«­>€^íÙZ~ÇéCpkxÁžv`ìœ(¡Zâ§ÉPUÁ¹ëÜ—“—Û‰-»ñ;WŽn󽩶EÁp ñZ}ýöUýË!£ ªujI&Ñ`‰ß£„ꔟvjªâ8ŸV †Þ;Éík³z›¢ E[òz[]ý/œEëC<‹V!‚¯æ,-ßè¢70ëÖ‚§Ñq9H¤ ú±x]*œó™Õ`¨‡=½O=IÊ.çÜ–û¼S[bŒ¢42mGÏZ:xvoñ']û3P˜Y År@øÕ_¯¶™wÚ(ˆaç­à…âÀdn£„ª¸^çuÍt¾t4*þ!WL¯£'uXÂåœí5vß]•1\(vÞðB± |Ý7\ífà¢Åd§­À¥ãÁØ9X²ŽÂ‚v \8KÍõù®äþ q@*§K§Ãe5Hìjjîa—‹nó]}më˜á:±“„׉чÐëdlíbW7–’µ‚—‰ßG sÒ<Ðæ Gƒ¡:%òÄüZzq-K¸^t›×û£Æ¦ÌN£õ"žG«ÁW{–ûÐ7«͵õkÁéÀØ9@BÅÖ%þ¢0ZÖ+‚Øú­î’Œ·¹R´¹ãµøZf4uÖoxêŒ>„^ ˜ï:­>4»Ö‘ÏœoD ÕW?ÖÜ‹Â|. ˜uv;íd2k›ëE›׬ н‡ðYdt{buß¼‡³h=V²)ÒXluPíôc1½¨K§Ì‚:%vCÝ—Ù(µ£íÙºHl)qŸ ã48?V›[ýd%®588?n›%\)ýX(ÏêÒ3P±Ž‡¸Þ'Ak—ráè¶„hÑäXGáɱÚvµãþ"‡×öž=ëÁ‚çÆ/G ÕN‡¹(Ãù’qØ¥‹9ø³K¤@u{GÕE’Tè,,Ñ…‹‰ 5£²uQ:ÕŽym±tí,á‚ÔÆjÓu±™[­]úÑêCèwˆãš¼¾Qq¹(Œ®õ`2ÊÖA£šb»OW?|%\§Úvíóî Ž§­ØÓVB¿Ÿx¯9è ³á /ÈÓ‹%\ý8RÛœû^ƒAÏ[»±çIÂ%°mµ·7¨ç agyžöhâ(¡ªè‡Êu¥/÷½ÆBÕPdø±ì½–%\»-÷ÃÛ; =m­,ЯV!‚¿‹L¦ÙŸs;([}ÑÏ‹G¹ÚlèK {§K¤¶µ>*¶ì òF€c ¢„ £<¦Êžsÿh,èK¬@_„‹oÛæ˜C‘ó¨º\)¾p2 *°ØÁòíU {mè¸Ø¡¢ òõp< *ØÏ·ö¡{µcm€T§ÒÝ#ÛÊHK¤ Û­>„.죄*˜ŸÜ¥Ê“sçD0çË ûæ-QÕ±ÛÒ²®Y)2®ÁŽFaÞ3¸@¾$áêØ­®}}VÍk¹Ä7ÀÉ(¤À[`È·K¤bv[ÎÁ¡´ØQø8x Nw*-àK’ú²À!¥[¸0‘HÙo}¼½VTü\±l‡«Š˜»cx°È•$R™Û¶+‹<Þ>5Ù`¹GãAßÝÿõØÉv çnó«eK?)#¶D àêÁx DwO_=´ ¦$ØùΈ¨@yZJIûÊ,’4¨}Þ_BÐÊ×ýŽ¥«ŠYrIÚ ‘¦Á,<‰”ow”?I»‘ÃBf‹…̉`‘j—¯Y¾'‹5V4Òc‰$Ö«ã…¬Àq$‘Jã–EžQíñc‘´:,d6`È1+R%É¥Þ µ,ZéÁXJÕéÂNVà8’œ\h|¶ÎgPyüX"mG«˜-òKÀŠDIr-·Ôùh°`£Çb (;V' ;YãHRÿw±µÜTÀü3¯+²ƒEªYö«À_‡Ïk勤2˜Je0I._bõÂùi‰n°R eÅ%F¿nl‘ª5&…RkL’ëx+£zõñ ÖØŽUøRZLhQ¢ñýec{T±1é“bã.©H-ƒ¢ôqùñ ÞØV z1c¹jhs>Ÿš¯1LΦkz1Ô6ž»ízõt’H5ïv¬mÓø|Ô.Óòa‚—òa«Á3ŽUˆØÛ¡˜=ITùíÔR!Ÿ½ë˜ÌÖÁÉ(«`t3ØÏWÛý) TÅmÇ=ÔÃ:fä@ã­BÏü£„*–ý¥ªŸsïÖàοõúÕÜ'U½íÖŽŽ²…ÍÐÍÖFìf« ‹c¢„ª¡Ÿj̃Üï n:ÕsÙ[ôQ™£Šmë#þåëšáî€ì®ï†‹{Ã#°ÊþøZãéÀhñ4Z ž}$TkýX"/ ²‰5XhëKž9è,’¨jÛºÒ÷Ý•0g[¼—-™Ž³h"øjÍÜÞËœB4ÓÆ©O«Ãb×D —q?–ß‹ÂtZ5|ë%?ot²–œì¬iq5Ìð5‘†¶Ã`=Ö|@¬y«›¥U×¾æÕ£vŠð*1|õÍãq%àh!ÙI+p‘x0™Ø(¡:ñ§Æ¥0_7 Š‘'êöQO†.µ=—ûLS"­5ké*}}nR“¯‡ ÉNZÁëÆÝÏD#ÉW’µ‚—Éðßú ‘:ôÇÖUÔž. «Òh"—»öDO­ÖÖ·üópÒp™ØYÂëÄ*Dðu¥n--ìjÌÑZ²óVàBñ`âû(¡Êô<7y…`é805Ù½ }°˜Ì²(ã¥s„O”’w§yî§Y‹„«iÏûõÔ]ÃÚ:^ '¥n-<«ÓùjQ46\|væñâ³$‚:Å÷¢EhÚµPðâs`ìp ¡ÚöÇŽ Q˜.GæçsçôI¤B·íSjí§¯‹ævÄàáÏÛ†kÅNJ+ÃUÓûe+Ý¡¯­zÖ*Dð÷t…I[€­äŽöPâœ(¡jÿ‡¹°‰çô»5û½KT­wµo:•dÐ` /ÐÏŽù û?µhˆ¾Ôõ VÆ‘å­ÞÇìþTµ÷y´4¨=źÚZY c­B‡ñÚá¬H!îÇ…ÑÑLFÙ®èË »§KT±w}ãþ´8ݲ+ÐŽ-ªÈt™(LÀœ{,³Ñ%ªÚ{ŸÛoWÖãÆÁ¾ph<â;þfldI"µäõ×¶žŸ•¦[*±õh2 ¾ßoƒ» mžïª=e€*ö®ÿ­EcX’:Ã2)Ðv«Áßá¼úR˜xGg80Owî±Ü°{H"ÕÞímrJÊö…û†]îðpbA”ô‚ü'éâþÔ? ;Çð*Ðo‰.$?ïʬ ß­™ºÂÃÑ(¯[`È—$RH~îíK¸¾[.òõp2Ê*À˜Áòí’iyÑI¥l,ró`"‰µë$ù²Ç‘DU·¬"(#¬±·#‡%éËûÅcE¢]¢jº÷¥E^?ª?·Ô ²Û£ñ ˆïRfpt»Dj½Ïû{ÏŸ” [&²uh<Áû”ZU,õóùæ§+‡$®oï…@FÒaŸ ìÝТ‡Á…ªâ}¯ï×û©+ÞYÂ…ÞMR}J¿KìÝÐaMºì#æú–,Rù3(ƒp`ÖÌÆ©Tß_÷çÄQíúC½9®ƒ7XÈÑ P%ɶÍ9/Ç¢ =”báº:RÜÉ G)Sßç³åÚAáúc…½:¬‚7`h¾ J’ú¢0 k ,‚¬ `"‰µë§œ(îd#ÉU_¶–Ë Q|~v¦¬€Â÷ 1 üu«KTûþººêp®^hŸf¹¡¸¨AO0À•$³žùíUŸÚ09Z}oX”äɶwW…Âz ~óÚ6ƒXm¹šPïn•h[ŽCäþܱ¢x¶NFYøföf×öÈzk¥ú î¯d^tm8"Únô!tf%Ü[òè.a;Gƒá[s§ÚΧßo¸ça_Î7{’¤~¶fbG[…¾²¿7Vá¨H)žv`2Êv¸ 6JËÑΖ1÷#i|h·–öí {Ú=òµ5ûÚ*Dð•ÿÑŽ¬~>]ßZ^°«-;'J¸éæq6TOê{ 7þäˆýõež[Ò÷°ÏK{Ú8‹Ö…x>„^¹/­ Ñ5ìÀ vjÁsè°ÄñQB==O["Ý ”M«ÆÊ^ø›ZûŒyIR}mM«ëÎA/\äÓëêßYvèkÞ§¹Q ߀o‡fŽð*±$öPQÚM`'²à…ãÀd:¨g豇ش ¥k‡àF˜ÛÿÇ}ü¿r˜ô>ì÷«‰mÈ+ÅÎ^)V@¯¯©ˆ8}`1ÙY+péx,öL”pSÒþV˜/ †J/ÙÍ+•;³„[Ú³«-Ó2ƒ*peÎŽÞÈåàXe¿Üîò-@h™˜I«Är@øâ|(™Çá03“,œ€%’Þ–”7ØUQ†+§ƒ©º³nûúšèK "¡^„¶…½«ÙuªídG¶&2‹EŸ]XtñM8påÙ…€Wžá€ð«ƒ–»rÇÚƒ§].EÆIÄ áN§‡¸Ö—/D…åçr½llBõ!ìËûthÕuãç$Y!U²ë}È™m!BëÄNI˃UË·»ÆÙ}5.;)­‚'%ÔÇ”6Ø9/ÃuaÀ@M"ûú<{=:I¤+¡>Xm»Š^vfð:°:âý.¶F‡Ÿ–×Ǧ#ɬ>{û“Ì4¼2˜šÊS× ›ãTR¾2ÎáIž,^í£Á®(N«ui“è Ø-Q@=BYq·®2šRƒ…ÊKØ õ©eòiªƒaoïR³ûÀœ@ã?8V¯ž:[|ª³“l\Zàü9¨ê˜ýþOè ÊûÃ=ΠÃâ1;ÞØ8¾K¸¢Y8]J’×%9Û tµ‡»«g¶=Sà:…Œù¹;À‰‹¢„:€&Dw¥ ± û­å:»K¤i¢Ó¯Ø Ýo-/ÐÙîŽh]{¦7j‰àlÇ£`»-®dó׳ŸŸÆî“Øë]~ý6ÎÝÖÒkõôû'¸îPŸo2†äë&Ήêzš ÝP”û^aÏk»Ç¿%Ò§ÑBëóœtٰܸ–äiVÙ_í–tôàV-Á¾vp<Ê)€²ì ôI¸-£í…Z[ë'}+–]A pbA”P·Î“ËtëOê Ýc™èŒ.‘Föf<ÏI×MÒõ$ ‚3r  vp|»DzAÚÞ³íº>é\±\ âàx”SpJ&jo5ª›g;!Öd€UoƶÒ*©7,‘m· üšhq2£p䥸ÂÉ(ßx¼c¨%ÞyK¤3£¥9îö‡:W,¹‚¼àØ€(á~G©æŸÔ? yÇ+Ð$áV}Ù߉°m7£¶'¥ ø"ÀÉ(¯`ÆH÷-Q ûr´­ÞG+–JAtŠ €fltIÂÍ!}‰}Ô¸b©D7ÀÉ(¯ û©^D·K®ã],óHVyjŒ%ºÁ¥Þ;`ÇËckŽYÔÈ1¶ÆaE¢,á”¶µ®¿‘<>Y Ygw@“A? Ìàév‰t ôÀÛGý2–KA|r ²í‰\ ß.Y^ê«£#¶ ,p `" ­/]ðíE b÷¯4O¶=†PºsÜHµÆÇX0óx®íóJº9æºÑus IZV÷ƒJ fá˜H@ ÊL……^Và¸.9¦Í5K3ËÏ<{n-  Abøë0I¤ ¤•"î¨-ä¡yÅ ,jà ¹!bE¢]r¾6u`]Zˤ±¢‰K$±'D}êÁÉ Gé©oÚõ}ô„<6¯¸¡ÊÉ`È1 Pí’ëõÐîcih¥c‰j8©7Áõ-(Ò/“ Ú¡Ê7O`Èž¹’¤îáø“êî‹fz0’ ’|YãH²¹·/îfÉ«ØÜXX€DáûËð× è"Õ%Òõ©.’Ô·[+È}#ƒšr;X+­"@¢øë0V—HWH+ª='#é8Üš’±Û¡ª&è z"‚E®$i?˜1K£@+–BG K¾ƒ ‹‚.¢®‘ŽL]#ôOîÙëüÌ“‘ÂSƒŠªæà ù-‚E®]"M"ï ò Ǭ@Ãrð½ZÕ.'@2ñ6qdŒh˘k¤Ñ= îUû9*,‘^ã>LA$OK_uÁD£­B/8Záè–Üb\«Q¨º5Î¥Å\m[ ¾ ØV!‚gö@Ò›iý' Rßh,ø¤%¦ÛJy’H§F»1í¾+ùÙ‰ýl"x!%ÔK“ÎŽõjz^ƒáÍcçÚþר£º)Z.nw=*ÐÓÖLìi«Á3{ ¡Nšt~¬_ËÈ÷lðæµTé×böQª#á¼›²l ÜY“[·v®A è«î ïx‹HíÖx&- ¤@¼%½›æaK«äS«°àëgz¶0½×Ö çýUS×¹‚^ÿièk¦¾8 Àî7çºëö­+p¡Ø‰Ã Å’@ Ø;@B=6Ùò²óXàbòX2ÔN3X^vY”áÒ!¸´ü™|Qÿ÷eöȪ'᪛:×I+ k¯›3·opuox.-ù9ˆC™u¢¡€ðçö½Ð}uæÀµjE+Óƒ±ç£„›uÒµk×\®K Fe®4s}ˆHKäesÑM ßêªù>æ·$4®ÞÍ·/ý ü@v;éÉ3üýRÞ¾KdÛxÐ2´S^ЪóX2mQ²êØ6dÖ@Á‹Î©uÒ}mCvI•ñ²;TǪ¢%O_¯þý•.Ñýõ]nÒ-n‘©….2¥!Þ!ÙÍ~NÇÍy«Îãd ’èoÑ …i—AA«Îc‰§{SÏ óî©2\w-« ¥9Ú铌,‘V‡ëuÿ¶¼€²^9¯æ6Màª:æ] é^Ð ³sW˜á€ðï‡Z«/´æôEhWA Ì—¹Œê$úë© ×œŒKŠJÉÎúÖwVÝxð~epÝ3h ˜9I–€V‡Àï·ò34ë€Ub'¤ÀEᱪc¶/mÝ`•Øé)p 84ö{”pï^%v¾ËpMèñ0£F×NÅ9ÌTJ÷AÝè‹ëAËÂN ^V!‚¯0Õ×õп׎¨‚†ÃoG 5þd‹ÇL\Á+ÅA‰d™Æ+Ç.‚2Z'˰Œ”<Üj+Í”ªþ†szÃëî¸>ìäàõa"øûfc:Ò5dç«àõáÀØ3@B=@Ù²«¡ŒV ÀrI{rtûè¦u½ô8œí°ûÑ8Ö‡x­>€~¿!‡ÜÀ™¶N-xؽFÝ“×b»9*xœŒB}CÎûÆvìü.‘æ‡–Ž }óam/È׬ޘïB+ˆîÛzŒáúÚ¡±o¢€ºuçBµþdž7`¨ŒŠ}zÙwmÝâP_áWß®çи° ³êxõÝRGÝ1nRK šBÅ~‰nØÉgYOXM©†BN·6c§“Dz)êvòZBã œkwnöpÕžýý)Ôƒ›ª”‚èj'£¼ÀC†\þèÕïpíìÓZ?,¹ýááªçÝD€zW >p‡EãAèS4Î?–ZÞ ‰´<÷|"µÆhn:ã ÿ(¡Ž—'éö™Ì; úÆð*Ð]¢zÚgA®Ïz3¬™ºÂÃñ¨  `ÇH—$ÒÐw>®÷cd€Réz8åÌØév‰jFØîüòGÍ–Jt= ¢vlt»D7(ܳ{'r”@×Áñ(¯àþ%mwôvŸìf¶}(Þ®!ÕÐú¾YyÃ)Ðv«¡ ý(YgÉ9ü¥ûCRï0èÃ,ñÍ[¢Ú®õþ”XèÍþ2vè Æ£<>4À .˜n—H»@;¡µO¤hü@×£É(‡Ÿ°Ùö @÷–l-ÊiÏIÈ*¬@Íc‰$v;ä;Ȇõgºy€À¥y KT…üÖ>óJò›ìHUµýˆîs*ò$‰TÆ×gŹ}VÉo™d¶G“A>(ÕØÙv‰ª•o³ÏÊø-“Ù:4ðÁ&W-+IU™;¢ªÀ1ÅU§'¨ÿjR°#Õ¢CaS¯7ü|‡åQ™Œ% ß‚$ßAVÐ8–ì'ÕÜÐ(êÜÈû4Ü`­ ôV‰Và¯3_¿HZH¡´0°äp·njÈ‹åÝX/ý„% ß_†¿ Bµˆ±ÅãbŒ&gÝèãÖ$‘š÷£nOêNìƒïTt\×/轮ߩCàŒbÔÁß+1]ïo4w®7?–¹=\->r„åa"xa%Ô2ðä>E!wŽƒ÷âZ_‰&ã)8?æófo{ §­™ØÓV!‚gö@B B!õ½<×v0Ùf~ÁRpÞö¾¹:ÚZ‰mõtá%Ô‘ðØF" rÏk0¸ÏëTzóŸ9R^ws­zÂö  (™Ý>5ažaUªg«ðÉ`/f¼ˆ§Ñr@øìœ(¹ÿw̾›¶2šY…ßòºsêÛ€¹K±vë¨÷ ׸_?ûÐú=[°aÕMÅñž Û¸€_Ì´áeb9 âú(¡‹ô%ÂLcÁKÉa©©î̓ây»*Êxåt¸¬~”|q.ô0–pÁö±ÍO¶5U‹òÐW/Ëtè«®Ë;æ;#àB´+¯DÃá³s€„:8òwz³P Z­L$Ô­ñx>ƒV®M†Sƒlþyßi•‹¤ÐúxŸ˜®ÛdÜþêz‡„ÆUß®7ßú€šx¼Ð, ¤€nÑH—§] ­Æ&’ÞñËÓøùÚëhIõ!OZ+ ´ÓÈu×í=ÔõAàjC:_뎰æÖÏvê/tü4ËÌ.¼ÌŒZ)“ %­M» Z‰aœH¨cÔݱ{ùº#¸¬Üª›½ÞÁýRÅ×Gݨo³ïNˆ‹ìÂã>:­t„UŸ7sóNøXfvÚñ2³þý ccaºí(påy0Îö‰´g¸öµ¨Êpáéñ¨Š‹ý½¾ìýT`ËÔ–„-óÇKÃLU²4ŒB_í¹·Ï¾M".;Q.%ÞŽ’]}3-';s-•€%’ÞH1(›¶ë  ×JGËÒFäç©õíê÷ UƒÝ¼uêªì,I$kÂEvUç,wâÙµÀeh×^†–RPýµÜ…;À"·Tí²(pz0v~”P;GÚbÖ\®KõçIÙó{˜ .òn‰¥3~4­3iÉ*1 | ÿ]¸Œì|¼h¬%¯×Ý4ä¾Q×®‚ׇƒ“)êêH íj(ãÁP yyžO÷(”ŠòöB·¢Ï|øb¦'Y!F¿½w௘¸E¤¦¬àÕáîûÚKò#OP»)+`‰8,vz”p[GZÕl—C. †çT»4™Ò.áRõ^›.Ð$çÙºµ 9ô`Õœ«½1Òƒà[ðD:8q~”PgGZ‹`ç­ çVƒ½·»ëí¢Ã¼a¬K;èÜýxŸïf?ö¦Ñ:±ÀY³êú½WÐåøÙ¬Z8‰ªz¥¾0¬¾9cØÖ#ð` -rðÈëÚhìô·@ ãö "݃ÊY» r²{×1l5h³ psÀbÏD ÷t<Í„jIÝ®¡×Ñ{¹K¸Þ¾}÷ˆÝÈíÖìÝìÐî Nüp t»qaAnö`2ÈÁ#çhb{â-‘âúö ›§Øz£©ä vÇ¼ÎøY‘‘sÞ»"€ñ œc¨ì‹.ájþ¶k[—Ø8¼cÉè WGmïOiøÎŒÔ? ?¸# É(‡d¸쎷DºŽu¿›˜}st!W ;<\uçÞ>éž°¦䎀ƣ<>ráV°;º¤¾¹M­‡Î0D 4݃‰„›ڶᘕäÁ9J_pEÀ’Q®ódH¶K¤Õ¡m9áw3r?Ð h<ÊãCÌàév‰nx?É>øl†¥R0]‡Æ£>êü0ƒ ¢Ë’Ðï$ÔÑñ`îIé¬QÛʛư…š†µ,ª-€Œ–¶€.ÑMïš#¹qžÛìHU¶ü„ÝàÁ"U–p“ÁÑ ŒÏ>ž`©dx@“QþŠÌàév‰ô´Ê×UKž ü@7 ñ(79fptIÂÍÇrò“ÖK¥ ºMF9|øgH·Kæ:é<54V °D;Hòd# Wè·ª#`”§> ;RýJŸ âk«GŠ,IÀ}m§sL± YRðÞæ&cÛ&ìPåæ°lÎ,XäJ’þíˆúlmÑëþÏÚ;„g4Ú`ñ Y«a%äòŽÃ}â™u‡ö=\o¡1kÿí‰È”$Òé±­mŸþÁ·'“Ù:4dáÑ’™µ0£~°ëÆŽvÞê[Nà²Ó< °Û©CàL>J¸/æÉYÒb“{FAh[}öNfZUSǹXIzÛ³FB/;…^\%Ôtót³Tí3¹ç5Ú©0×krÎQ]ï’Û>˜ÖLèi§Á3û(ᦛ§Ç¬jŸÉ}¯ÁÐ^™]±SsK¤ï¢îtÍ·2’í%\^îçÙÑV]õ÷™æ"ôˆX7™t$q~”PWO:ÿvÞÊpn5z«'®G}Jof1©Î‹º…:u/ÆÃ[öÝ›²ïwxv¿2Z2è±S—Š#°{€„Z‰²÷;“.'%ÓAmC áZA¾x.+‘f÷ŸS»ué ‘Ö‹knß³M@(šÄC[Ó×bÁnxV¿´ï‚&»4àZt$ñ”ôn¥Qpä0 ÀruX" Æ¤Áúµë® ×&Á©"i²¾5©X©vŒväjú~äªúè²»óŽ„ÆUn÷a‰¡Çõ‰Øy‡ Í‘@ Øx YøÓoI°Ó®ƒ£Ék}èq«ªŒV¡%×ì‰swÑÕ‡qÞ©÷ñP^Í`{ [›¹}Ã#°{Ýí{ìê"v-Àuæô"LöG”pÒwX®E?N$Ôv4XvU•áÊ#¸¬>–§ª>Z_f©«fˆs³­ I5lÁ ó{îèë&?i}y§ˆw¸Ð„/ó%ÔŒ”öŽØEQàbô`"é]HvuÚUU†+¯dÅ™lýu}™×hÝlq¾sʦO•Ù2Xk5›ÃÀ*ù­¥Ù|ã¬ï·k®=G)à,9P‡R²bÝÚ(p}z,™ êFÊW¬]ie¸ -IÞñLîî.¦»!ZƒnÈÈRu„^ÍÙïñÁ÷T\zÚÎY ǃ±o€„º•Ò¥e¦±ÀUã±ÔT÷øí GÅ®Š2\83¦§´)¼cGo›}¿Ö­ç ú€Py„#¸JœB'U[W“k<Š+ÉNZÁ«Äa‰ë£d×εÉÂ|ݰ{—´Þ·Þm±®ŸçÙÆatcĵ݅„¶{ÔOZÂitúz]¨¯Ö`äô¡©¶n-p=ûH¨ƒ(›j;ie4± xÞÙ =Ïi—x‡ÜBÏ ˜ kxžöhõ7¼ÃžšQ›—(ˆ¾öp2Êù<ƒ£R ]Ç£¼d€[]–H³Ã~Ýç>Ïà¨H×ÃÉ(§`ÆD·KtCÂq@I$ Í‘®‡£QAÚ5›±Ñe‰4$œõUþ³ö ˤ@¶M9|ô ¦‡Ä•$Ó²«*øUéÄðÄ–Hb/ÅÊ­1^f¾¾Á}>|÷&08÷&Duì-“ƒz žz!ÜÈ¢F>`ÁWS©’Dêþ¯–A~\‚%V ÙŒ9tDÞ -€) ªŸÃ³* åD Z+:IrÕºó¡¾°T*ºY$ÒbPWßy¡¦ƒÇî7T6?!ö p%É"…Yƒ Ìô`,±- ìq †ŒQYÈña¿„ÆÊ¬±X‘(Iª1£d¿cQ‰J¡½AšÁÉ ×%ª™a¹»LP{ÃS†ª\ü†üÁ"W’Ô$owPÜ¥Á¢™L$±ÁA}œÁÉ ×%×|~™¾î”ø™Ö’™¡ ´‰÷×a¬·D74¬ åà¹5 U‰Â0䇈©²d‘ïs*!X´Òƒ‰DúÚ™=êpàvŒ´RÕÕ¾yCöD°È•$Ç%'¿›B,šéÁX‚:äà NöSö.»¶ëJv`ÿ|Ånf6ryê-u  p¯ª.à†áÖµ·Œ¤W§~¿øŠ÷ 577÷ÑXäˆÁ§DE„¨ûR.w«eêïJL*£mgm ¼]mÀ·Ã\Qï,õO胷W#\W5'ÞÈ'"YÔJˆ¼_°×|½ ãz§-Á¡{:éå  ØÎ êíÈöÙ¤*bªVÈ¢6O&ˆ{›a{fž§N×R.iJ·z· /»3ò¦›^¯p‚¬)D, ÆÔÀߪ+ÖÝy7`]êþáÙH„>julÜÉÒÃË)+• æ—¯ªõ·Vôóú«#¡q[sˆœÅG„ߤxs–zFSÁ›¹!ßE.sZU¥~^î•èd;Fìdk°‹c"BïQ ÏŒõišú]“×VÛqï?æ×¥ªè—º+Ÿ{yÙŒ;Ù˜Cä¬<"üÅÛm[)º]sÁÅ7ù¡¼ˆcÖªÂÝTƒWÞìѵü›w`DäTÅÈè|Zâj!vAèÕ‰·u¤28>¥š ?o7eeãwë*Ux¾^õ­=óŽôaÞµÆ&zDÆZB¯N §Œ}Ì«q‹¯è­‰—:mp0‰4Õ¯“'ò ?ÍB•‰ç%ï©‘áË |ŽžR€˜="“¡¤¿Å0œ&ö¬áibE!zA膗Ð68ž8† Ôœwa÷î—Fª¤{¿ê{Dö“!¨æœº.'Åö-="Ë¿Ðë'ßdÃûnnäKäfæF·‚xø¡7~‡i€çµ˜¡W†sÓβ4›‰† Vœ“²²ËÌ;UŽWÕîuPqN\g½u§Àޏ²ß×]±=,ßu'Ï2+гk"Âï4 §¦" NDO&½Ì`§¦di:‰aXqNçn÷÷UýÝíKúýXqN]³û ÜéYV¿”oqúàܳsÏ=+3zÓa4cÍäHpzz*A襆ñŒµS-Íf#‘êÍ›öOÙ¬PûJWpïuô WoNN½VÊÿZzD–µïåy¿Kàæ¡xZÈgu"/= g¯& ÎUO&½à0™½vÖ¥éÌ$ºQe2¹ÿúØE¢.?ÖP{ËשçvØ`±®|ÙóÜ ï ‰h瞈Vâ—S‘S¯Ðôµó$ÁÉêÉ¡÷&Ó×N»4šD7,}îÎX2Ţ׺P|¯«WÞæÉj‹íÑ41æyèÝÞšJö¬%4qû ô~ÃhfÙ‘f³Ær…l»«·vëôóþy¬Ó¥F|?ê—hlå=:ƒÖø Zƒˆ¾ú*–úƒ³l|šFgЄ³g"Bo O³=eivV ð»4ö{GTýùZ£mþÛð\Ø'ègOÇòB/ŒÎ…ukš¹ÞrÕKÖ¶wïÙQzV®ùR¯µu¡yYP^ 9ÚŽ1A·sˆ¼®GÊ…Óš›½À!ö¢—=™ôrUøÈ7VvNG¤ü¼¼Ç«ð»¬´}áȲú³|­$ÔøO¼#ôÀ–;9zè›ÃÕáGt@•¶Ÿú%_)|c„%äÏ•û4ï}Q‡o‡™ #÷ñôð7e´%싎äçägö£²B¹çD ó}øæý‚BpÄr Šÿþšð‡yy jìˆ*Ç/Ÿb¾¿ªîw:R깸‹gG—6Ó7A­„H-þ~—„þ7¥ýNJ‚r=ôrÐMÐôMP.!Ï¢ï?±B¥y2F@y>!¿–`?B¤2¾åƒA­üÛk®§*Û}åÂ+G¥vDÆoõ9û‹*{'-Á{6îøÁlçåvDUÆõ!ó‹:{'%A¹ž{~ôdc:',·#ùqdM k6AižKPwÏÈ2àƒ¾ @ô7B­07jEàÓFTi=5§K¨ÛXtÙsG¤ÈùþlåvdÆóZfíºê2ç2t²W”JˆQç‹ì¹µ÷¼“–àÈ=ôrÀlßåvdݧÈÖf‚º “† m½­½ÃæSM*¨;·>ÀuÎ%Íóy B,ó*k×Q×kÏ©ÆYQª˜Î£ËÎþ'Bm¸¸øù%A¬Š‹ ¹Já©^I¹ñx×%×YUº1|;ÈňêžuÿBPºûZ`l{NË€5L®¨”;ÿûEÖÆè¹ˆe»jÓv‡%Ô.Ò-¡ösAe»¯õÅ®ë´Ø’ÇD2¤µ#ÏG^û˜Ô_2?Ì@ƈª ¾jI¬æšæQM†í8+:¶Th0+ e$O¦ý½öGë C \„À2á­B=–P?FòêÕHpÁñx{F×Yˆ%ºÑ|;ÈEˆ*Ò}Öâ,P¶û^_l»Nk€-ªÀ dQ+#ù¶´N«Û¬Ž„†È‘¢à¼|üܨL˜kš‡åm¶ë´æØÁñ²¨•©Ù}>mWF¾©¶fµaèŽ{9h®sÂz;’ŸDïٜ݄´2AB™0#¿–P?B¸v÷©»\ûRÞ×JfÕoRlh 7 LJ¡ò T|"T¨=~Æ0ç1Á©ä¸Ô©îåÈoo@šªîñÄélªnŽFmŸÄdãÍ¥Ôõ¸ºjPRÉ]Ëú²dŸždÙÝ[ýr‰Û\~4ùÔ\N>%`dÑ>‚Oùfr$8A=™œªïžLY;×Òt>ݨԒQ>ŒaNˆÔ¸æUü6—w³ã8|÷O¿™ùÞuþ õAÊöÁ<ÓÖ•x6"TP>¬÷µ³ Á©èÉÔLéÅâãÉé&UšO¼N7¨¨$WÜ+Î3"å¬yíü ¥à±|’{–yj¸;;âb¥QKQXßkçœuN¢„ŠËM)ºaLx&ºþâ~* 7ùºô|8ñ¨ç¨`’VÎÍ³ŽªV½jÉV‚£âH&+o˜%`§Gd¬5"\@>*äµÓN;§ ± Bõã¯o"TÃüZÒ.ÃcϲúD²æ›h¦´×É}Ýê÷JÕx¤²´^Ýo¿U7ª´Ã„Žv} fža/òI¹”G;.îåë¡wŒ´w:"µ¦O-øªîÖªKОNF‘^lüâ0]·?®”xÞ*pmÏiåªåBn dQ*!Rùù|¶rÍü¦PÕJKhà{y~xÛ0–KÈÙ.õ/b5——¸‰µ¦„üXBýQ•¥wÛ”7Öš¾ÅÚ®ÓÂUK†î§ž+Je$/ûîé«BVFB£ d‚„ÂUF~lV3Ežºõ6RjIö¥Ô’)¬,gÒnqLÛQ×}-ßÑI‘‰'rŠëáıçO+ Ñ B5±¯eØbp<•4v1×zP!RX–¬ºXó·DÙrÎtkD¨ AÈG>ϧ…9Kƒi¡5!vA¨6õ½ÊZìæ‰%õäšË/âT9^vÉ£ Þ†¥Ô5_ðWóàt]¼"ôd" INö9aOžV à ô§Ÿi:I¸Û 4¯+(û7]f ªR¹µDïB…f¬ö¤žŸåÇL®NލX(@VS\'„=?xBIˆ\O/á|/Bf{ÃbÉPe^VŠwsÅPÕl¥¬h‡õ•®Fyñ[$–‘e­OÙÇÙïµ «AÉ@ùp©Y™t“ˆŽ.ûNK;Oð´´¢½ TåùV¨,öÆ3Õp¡Z?r~Ié™_¾ªÏ;Örµó%œ¿B­õ\owéìˆK<ùˆœ˜fž &¦…è¡RÐq©™7 OUG¦æÖ®‹ff¥Ìºpt¯VbW§âQ?³q=°689¯š U†teK¾Ú›¥‘.\ë…Ú[‚Óh½ŠO£µ‡ØY|D¸BóµhW$ŒOµ!C%s¤õ\íV׶̇+¶D޶ÃÄŽ¶½8'"T£ùRW«Œ]o¸o¬0웎€ÈP åkå«úÆ’ÍJE›¼³7jEzxã$j$Åw}ÐR1×Uç–×ÏŽ*ß^ ô\×i%C^ˆdQ+!%£uq~Y¦'$ÔÑ©]/=6?ª.­“«º4BòÉqU{½PmX£g»jöP™MïÛÍgŸª&ëU!×úcBìR6© ²µ®cºˆh¾Ý|DRR69‹½‚ì¥Ñl>j¤ÊšúøT]!ù!z3@®tÖµ¸¾šŸk¢˜-"šß·›Žz¾ 7âJ© ÓôsæÜ¨UFÍÍM7—£FRÔ¥Êvà"§¼z„XÞª„\W]qôB6ކJÙÎöY>eÙ e;‚H±ÍS³% üæ½NÈvÖòX2¤Þ“E­‚\·_*ÂX®Væ‡Éå7„üXý‘b›2ÉQõÍ{™é9­ä±\È/Ž+ ä9$x1É{hanŒ‘‹P}£v½sXý¡b›íó©RP~óR%ä{NKy4pKäBJ )oA‘Òd~‘ŒUÚSž(Pì#•IÑí:«rdh<ž,jäRÑíi}”(óÃŒd„ rµïÃè'ÈÑ dòßòšVÌÌ«’\Çy!¡Šn \H'!ù&ÃñÛI>\q¹F*FTmO^¶•ÅjŸ÷ª$ÛuZ:dÈÐh&¾× Y *5\ÜÅPCÕÒ-yr(E1¥ìòŒõBÕTŠäz"io©Ñ…¯ËÚ—£zá"•»í±÷º3•‘À˜½9ÀÎÚ#@ÕK¯^’J¤¡g ¼Š“Òû´wJ]¢RÞò:\)ºð™1{kˆ›¥G„ë—^ë͸ièuM—jä…íh%‚HýÈu–WÓ]%ZAØA"'{ƒˆ^\*`z¹ÃªJ¤±ã5XݳÔ3ÿ{èG(]ßñôý¯t%\ÇÒ¸¯ž2÷ôˆŒµ„ê—ÆkCãVtf½(D/î¢j¥×¢6)EŸYMÖJÄùîc.]\õQ‹OØ[/>ýJ×òQ¿Å’-?7ä’±¤W,ÝÍ™CSÅkBô‚P}Òk•œÔ'!‹5 ¬¬|n{5®W¥!™¾”•Ù}ðÜ™ëIžugu¡2¦ßá,áYaÌ#"qÅ£¾)0­€csãi¢¹Bq€ø w¼ÌB~Ü[ °¹Ê£P´"=µ½nëÙ—¸!"—ZÚê{ºÐŒð¢½ Tª„Êߤ iM =ì‰D3Ç‹@¡Â¢×â9©Ï%M"Þâí¼œ:{@5@ó…©Pë¡ÎfýpK › Têó÷hùÉ&11“]Òù¹ÚN҄蚉ó(Ñ?W|?•ÛìloìˆKJ!<"õ9ïEa·‹›ñÉô¢ý(µ)®Øö¶;™ ¡n TU3I5Zo ï{È€òáý3{_Þù:Íχ抉må‹£$;”xP¸ªsyM,sÉJô¾×0ái~‘»¹e)(ÕÕ¢Lv—±£Gò"&f…8M]Í2î!ËØ階=ä+©‘ªé2µŽy.wàr¹ƒ å;‹fJsÄЩ¾³6 µDmÀ·›ˆë'Ø ×O’E=°¢âõ"Ó»jz)— ²ˆ(zßl~¦¹~‚íqý„ eç0mŽ *^ï(½¯¦—Z b‹ˆæ÷íæãÑÕÃs¨«'Æhk–Î[Ó(ä.Ô ÙËmO2¿_—²§¸<û[ÕŠªê £ªŽÑmÂöD‘ bˆØó­¦gÂTtŒN¼©è2銎q#UÑ14§+:†L­€MC2ês}±¨DIÕÂYw¶Ö…Qå™ä¾PÁÀ[eƒï:­>°dhˆ‘,j%dÝhï³iôQ“Åaz2AbÑ€lgâ±ûuD•”‡ð  ¼7ø®Ó KÏY ‹Z ÙU˜t—Õdq˜žLX6 [šx,Á~yvŠKR'ª?~-ÁõM û€~ßs5D§ìË’nIü×jßuVàÈ#"YÔJȦ6¨˜¥ŠY¦'$¦ò ù°ûuD%¾Ë6ä(þRpà:N3ô– ¹%pE„ä¦÷¸rÀ«Hh„žJ“³¿”Å+7ð]§%†l0Ç¥vD¥ì·­–7øOÝ Ä*i ŽÜÓq/g ÀöMP.!’±ß·ú†Ï{€—’ \O'½¬8ÍMßåv„ò÷ùQQ¾4÷…t¡ŽB —¤É·¹×U¯õÑ¡äÚË£ØúÍ~$FAŠú<‘´wõè*ÐuåçsÊ*ÂÉïò “Å ñ[ ŽÙZä“Ü;Õ$¼zI =cÈ&·âšñ7×u•ùÎ ôgñåðJ«Ç]l¬!îIæ‹ÞjHØüÐçš .®È %Vl¼")èõ*nW\€+t©çÒw£D2ÆûS[]qZÓ¸¯³|„&zD6KQ/ù¸.<±Ö¯øÌZQˆ^ÜuÓO_ûÓ–¦§V“QŠšüqõOz(„óÈÇóó¹Lu´*«`{š®þúfì7ËQ÷òƒá´°g O « ° @µ¯5.boêVÖ¾ŒÔ$PWúœž§Gd³,õ¾£åOžVbqE€Ÿi:KT!LG“„û(›*iWHzx¯›øJ‚X•@=³ëŽÕrUvÄ5IGsýÁhNØS„ç„Õ„Ø¡bƒ×Z18ž%š &Ÿ»²{»)0Nˆä†·³¼« l‰u]–¶Ý§Gd³d4Õ" ç=xÞXQˆ^*=x«x{Ùä¸~…¼3éz6»rPYá²Óäé6:5 Äuö5;âšd¡¹î`8kìIijÆjBô‚PÕÁkñ‹Ï#M†Böäç’3q•qÎìaTœ@=×­\úR`\³7UŒæŒ=‰xÒXIˆ]úK½E=)~Q'óH“¡¬)»¯¶ï!(Sî©?½´êÏ«•€è¡jƒ×Ê18<Õ† eDÉçmž®tž<“gÿºMB@2’z®;åô 9¢š¥å©fá-Y¨ âiµšý$Çß;/ëÙ6=Q§à÷O‘ÝÆ9Ú{ÚDô“*®cx«H“Ó¡¹o¬0ìB¨þAÄGd b8u¯ Ž}cÈfÕªöÁ¿aÍfûYH£É~ÜHÕ> Û¨Ú‡î>Uû@H©K4#æjˆIíƒí¬ P¥ƒÐEDðíæ#RµÝ ª}`ä(4e«!ÆÅ¶¯æ§Ra‹ˆæ÷í0WGμ|ë×’½nZ‡Šy¹'™2¨È<ýË[UeŒ…õªŒ¡.Ûë"„J0„="lÏ7šÿžzE†²ße?¯Æ©²¢ümWŸHbFæ‹2âRö—g»#ñ·› °¼8z$ßvNH-#Rìð<õk _TeX) ÊõlÒËñØΠÉ%„jžý½0Ã*HPå³ÇRíÍßRíádñ¡´SýÅŽÊzrÂG,°˜ýꛢRyñè»´ªÈgópŸ´™üL¤Ì"ŽÖY䓪 ys”yŒüb¨F·ºª³HõROW”msB™º5˜!";sˆ|RAÅ!¯¥<\ç1ô¹bš¬€ë·]ìMN%çï%~Î.…ì¡AD?+HèÕ!okUç1v»âêYew.—»/HJ½¬RïÛWv€ç )¿_ëèÙ¬f`Y_Š«­#á¹t¢½8ˆª?^k„¤²c|. ™s÷ñéïw2’øéUõñËyŸ:kq B¯U@bp|2 YLµ“°+Ÿs=™v€ô¢ úVp̬óÀó]ÐÎëf± BåësçÎ "bзJˆ‰‘|a|úÝ¥~æÖ¬êˉãË]çRö4—c»:èB…±°^¨0,J0=¡0FBi@Äžo•#yvš„0—-¼ÞP©³2’ÿÑ|»ùo³—hƒ\I@HIçõ;lÙxr…„Ì×TÄ¥ì).Ïþv T¥ caTª0ZjØžP! ×eÏ·šŸ ýõáõV§Ý‡L³M¨Ñt“i4Ù¤Í6iF“M¸Ñl“n´M7i øI¾-™Ï\,ùêpë¯\P"³wš(m©¨,9àbà\m€EßÃÅ.¯/#Q‰þp~YêGý ²ÛZ€qÔÓt…yL]È‚ÔÜù ½LìVE‚ƒt\ ÄùŽ„ÇægA¥ä;·dä òÏGÍhÅ|÷{€ë:Kž;2äºH¤v /˶©.«"ÁA:®ÀT÷‡Jí=–P·H¸¬"”t~ÍÈ»®Ó ¶!Ãg+©¸Õ4Ÿe¨U¤ã"%›;ò;` uë€$z·«\ýAê÷5'îºNóÈ– y%’© PIÞ¼„ù&aìd%8jÃÅ]<;œg«ÉE•Ôn¾Ö>ï™b'#EžKº8v¨ÜtNHk(Ý›WKåµá·Œ±Ó NÃÅ] 5Ò¬º¥ ¯I~6??ß÷Wh)ÊóDÒÞRËG“µç»Ù­Ý,ùÒllsŸ6€ƒ·*±±¸Yx(‰ýê"1?t‹!ƒ·¬.´DnôU%Kó}ºì.kÒпfˆÈ¿Æ¢–Ô`@8‰ýZrÀæ‡.×Tp5Õ}°oôÜÝIaî5½æÒÑpI`†l¬!rqKD(…ýVs Œ½nÈúú¾«Ë>Ûõjd4#B9æ¿Ç®¶c…¾6 ½ @¿[9-5scg2”Ñìž8ëg®”k$ù¸}êõÖ~pÀ8öÌ7sÕït¨·Œ<"”NþNE)i­œ—Ú¹”5¡òä¯Ê.퉺¶½,¦A¼³ß’!ÃR+ äÎÚd®i&”òõC¦gªÚ1ÈDó­"ê@©¼këŠ|ÑZV˜'d~ÝíTʘ0yî·Ÿ¾JÓeQš~$Ëö„º2ßQÖ|«„ˆ:W¶ízQž‹`œù¹3)SÂä¨_î*9?ÔD¹ù¡&ÓŠj€$á9oeÍ·Jˆ¨çE˾²7ÝŽ2òŒL—D¥Œ “ç~[¨B¡,.ɲ=¡®ÄÜ;@Äšo5Íâ™tü¸‘JLJ÷Ä©Ñô-xn4{ ¾7š¿ÏfoÁS£é[ðÜhö<5š¾O¶é[ð:ª¤Rèk~¦ÌÏF*…Έ¤Ì¯§¼X§‘óË”½í©²ÿsª.6ôŽÚ¹>Kù÷-h«•„Q.ABŠŸ€ßÂu6Ý®F"IsBT’¼|ýÍ"ç—IzÛSeÉçTðzª(”‘{iñÂy:AË C \ q~ý1 MO‚ä̉YræQÉîµýWH¿çè]Ïq&ÝQ¿E* ”m} ù 1p ²ßêûJ¨!*Ù}¯å¿búû=Go{N2é† ú$PE¡Œp:zý¬õKöŒL2¢FYBÃlÜÉÒã¹fú&¨¶#’—^—zƒú*n¤$¤6°Q'GõÛ¾ «íg§×eû.…nt$$ÕRqË•o:…ŽD6¤ç¨×O>ß>;?Ѭ˜BËCÍ5+ž'Ò+Emt(‰â{­¯¦~óN·(HQžçáæ.1.~]Õ^‚Ýz ©Ûbív©ùÑÏ|5o‹Ç!;s€´‡cJ翹H2ó#·*t[&•ÇçÓw„ò¶e–NŸš‡·=@è`k‘“ôpBÿ­ò‚­ümˆÚ‘…{ùWû3¶ëç(·w—‡N5c8ÕDôì‰Pÿ¥ÜBÛyÙP9_<æÇ­ÒºíKGú½p¯;P#NÊ¿•Rˆñ¡3 Hàòp·§çÛ ÜPRݼ?ìÆ ]ê¬JqÎI‹cç¯4ué©–Ô1§JöK z7cR5”H¾"¬=h¯2Ý cå”&Ÿ¼ómNõ ‹(8}Oª³Ž©QÚ“:ÞËÞ??ÏHÈ{€2гt¶v(ô¹S0¤©IîZ>)o1bnÒœ6gœÅù1Ò¸¦Ùd PŽx’p6^…Ž·†ì ¢·.ËÏñUÚ³§ˆG©evèúqÓZ¨¬±ÈÍÇ”žH¢9žM'pÓ©½ïþùœýÓŒ„|ª$;üžá'¯°öóÓÓnN07>-îü¢³âÌ Âô,"ߢ.ë–© åßòû*‡îŒéAþ¦¬rŸ/R·œf~Iæs¾:ºávæGÖÆ¹MÑ©ØÜ1skßW'9¸ ŒÅ›æ#Ïš?ù& lÊ/pj:ŽMnìÌAåZgšQQþz¬Ü;! eÄ'# æ\üÒIÛ*ÛœÇeà¢èõ’¯)ùñddÍ;×ã D§RÖ•'9á*O<”E‰âÑïÕv„²ñ ؈5×&!Fž³oÌTä,0#k^¶žÜ—:—²§¸<ûü"¦òÄCY=Q<¼nÛŽP#> ¶æš$ÄBȹ0ò)?b”•µÛʃ…¡Ræ„*¿ÜË$W<–EÉâ‘,ÛË"Ä'a# Ö\›YêÃ$eGW˜é+ÒÔfö†to3}AšÚÌÞnmæ¯GS›ÙÛѽÍôåhj3{7º·™½=ý¹[^æzFù±–®[yzäç|A¦‹ÎžëÕLø÷` Q3‰±ebHjgôåZç —†-2ò"7ÓÉÔŽHêtݲ"|›»µ=uöô…«)tÚɾIí,'4,Ï%HHö2ò;`Æõ¶ÑáÛàT’–ˆ%OkMn—ƒ*V€\:”,æö”Õ×7/>k)ÊóD1e:¹^v]{^{-z¡ 2‹åë|›KÝ«‚ё࠭9À>Îlrº÷ÕM"`èKo$ZôK@æ Ÿù¶ËvÔiâKÕ×O$eá+!1EJ”N¦‰Y‚£·"&Pn°w>ó3Éf~¨*“—ç½KŽFäk–q@'DÝ'É–Gý¼†=kl–qîŽ$ôuaF$»¶–xYx8ÐŽ;ÐDô³%PAB[Y{Ùô‰RR—ñVó“9µˆPºsìn;Vìn«ñÇ„çk¾[ NÝÍd0UJ¾Èä§QÓvùĽ ]²”}ö·hID“œÃŒ©;uøôZ c~˜6íïåÓ÷´ddïB©Îo²Õ}²Ó·"ÆPê”úÞùÞd ÙN€N’§ìºQj &]Èx~‘¸–üi<VÃfØÈk~’7Ï ÅÊO¾æ®U¶3ž«aÂâLùÁ1¯Ü¾Ë¼Qr®$!×^¾‰ b5—µ‡Ø9ÉYÚYœÑûV¢¾º–oißeû8-ùšXOG_¿ÖÛ^þµA+j,!žŸx®ãùñ ÞòÁ⚨‹¸žö-S@6Ñ|y/žŠègo˜ ÞóBvù.ŸhÓœc?¨”i$!Ân í!ñVj‚dyö— rŒ„Œç|tN”ßÅï-NrñVJ‚rúr˜Ê€†$éx–-A¹„¿·øÅø-^ ·ä˜/”iµïñ"ù†+A±vgî]¼ä„‡J/GGÝ1!U™Ö€(k¾U‚L„ìU\çÇÒ f_Oýv/º¶X.mOÈûËå@e…‡²()<”ezbUÉÖ€({¾U‚L„”×­šÏÏëç=çAçs·ò Pê~›÷=‡nkí Zéo !Ͷ+Ý€IŽ¥É_G%ƆÊ-Y»‡‘’“žÅ; ³ºOYã»W”g•”Áö£´¶µdR1#5/”~ôP§aÛ—sÕKÄÞ áE4¢*"”ê~sŠÎ›mÈ@>‘”Þq0ÈïF„²ànéf†Šœjl"Bå–ýþ–æN¥n0iKc4¿Z¯Èa>N;­9àäuð¨2?æFÙÔÞ±¼s{êy§rÙ¥yøM¶CÞ”$Ü{CÎR#&Nl¿Õ$pÎ:øÜ BÜã*i?)´B@H „ÒГ¶õr»U€è•÷Nõ9´qÁ‚ÊhO΄âB¹Ó®ëþ艎Ҧ¡Ló$qm}ŠÜ®Ì¹AÊ”ú=[žHŠ$LÂIçqÆšèS׎iö™rUÏ0Q2À ëàx«`LÂä‹ò…}ZA¾4"”_~-luð½0aêúlfé…’—‘ìï©|N&ßXzF!±ù‘æ>µú˜¾Œ%cÇA;rä+`³[9}÷wIN“|$[$‘ÑávöGÖ@:ÆÊLˆŠ¬½(ÙNûö,:ÅÁMq$ÖØ›ìÙ—~]Vs"™¿&jw"›yt\"?íéä³”ŸÊÇ™W;Ç¿ƒÎ%Æ„*¿xIÒ„Y”&Ÿ;Óêê@ȾDÌùVóS ¹nMòqlKp‚îåÊI]yÈq!D±ûvóI7ý„-7âÝd»(yo±e¡Òv7/O’+Îz™Îo|Ä%Ö„*¿\j%7‘u©u¤Êö„²[k¾ÑüLßÌtyá+ŒƒIw|ÖO_™ÇÞÞB–×ÛµJØtOI¾Æ*˜ÐOòüERn4{“”M_%åF³wI©ÑŸ¿LÚ±-Wù䀜 •®¹BJ‡Jº¼úŠ8lnÑ·2?áÖa“½ ›S›ÙÇwmªfÊ£&ù¿N³Ç.!’ÕÈ?Ûr¯ÐÈ—Y×S2/TÝ1±wÔF¿»XNés*dÔ×b©gãN޾K¶ÍØ‘ò /ùMä Wã¹q9!~{h6(2o#™Œf]g2:"™‹-/à³í·™×S¥.æTÈ·‘* %D#ùô»­°GíÈBf2qM×¥vD%GŽú¯°ÕìD½ð­ž,$cfêMßÅ"É‘½® ¿IµX) ªõl!3Ñoû&¨¶#œÞxÊïÁî=;Q.Ì@§¦²i”Ù¥Nz¥(}‚$” y'y XCÒ¯œ,áîo"鋼¾¾V÷ä$g%™8Îäiã÷2 L2uW^ëæ§ó ôÑð…=7Xìm«Ð‡tÑK¢OŒ }­™`’‰DåKÉn~á!ßÊ ½§ö$Çn¼Ð»DPï›ÏÅÏcÔǤD>~Sœ~“—òµG=€9¥&I;zè!'âÍ€rSç{Y~Ì D¹#BÉ€IjÁŽzÈjó£È3÷=׋"Gž-Âû/ò.œˆr"Æà³Õ]¿¤þM”Z"óƒ°?=??.f>*.kÇýa¬~’“‘Àt¿…è¿I“°w‚~6pÑ&IÁꎌ'álD{ƒ'6V‘—§žû( «¿Ç8$JîDL €Ú=ÿó¿ [ÛXú8P&qùè B„Ý)Û>+ñ‘Œ [ßÓ€‘Êv¯žÄÓœ†ãí½ŠWoŸŒžXÌÛ'£'óöÉ©…gçmæÁWkUNè+#×MAFA\_ÅâšÑü¾Ý|Ôóx'….§OßTÒ%âIˆ I>{¹NiÄGE‡—Û‡@!øõ®(•‘;ß¶gº¬Š„é¹7.ˆFíæ±ù‰˜Eb§“ïÙ{sPqÐücW‡o.­»˜‰·îrÖw ÂŽd«nXsCth8F}|÷uhbÊvL“˜ö4.L¦áÞh¦FÓÐpoôç±aòq­æ×F ŽábÓG‡M4X³SxØ “›hË|ÿ.n4Û¿‹éøðILJ‡LÓ]«\4xnN¿l0bÒaËûS_äWaKB8HY>OY>.¢o㤮«º¾õsûGy‘’ÿ}ìK〞““àh=÷òºhÛ>A‰„p© Q_½—àÌ'(ÑÓI/g ‹¶í”Ø©ïÏk ò<þÍëÎ|‚=Dò´iŸ DB$²\>ûv«šˆV¢DOƒÙV´mŸ ÄŽø˜µ=äÐø«|15kȺT’‘W3-ç.‡š¯Ç¼ÂðÛÊMq0–PÄ€öoßKéHˆ#G€â⿃±4Ñ£º ¢Æ]A¾Å÷4à½òÚܾ²¿#2]Œ)ÅÚ®X-!1¾ Yƒ)cÁ¦?o“–üïn~8 zŽGÃÛnh ߊ˜ám’›¯!Öá1z ã ŸõÀñÞŽ²Æü0¼Ý;_ŸÃ9o#Bé÷ø¿„·£ƒ¬†1? oSç3¯5Ì/,oBñè/ÞŽ²"Þ ¸ðvï|—§*ó#ÑÛˆô€ô oGY C~Þ¦¾ùéi3úA|5 þ"þ/áíè +blE¤In>Ÿ«qˆ§F„BÆï ޲"&Àólï¼—5ÿ®   «G$F<ö=öÕ0áOÿùbü\ßÅ]9Hr‘½{ï;B⮫.GŒÞŠÃñ‡Ïüù¹ò~t¡tŒ."ÂCO¨èr%!ÂnüIÚIM¬\KÜó1—R#£FgÅáx{oâ§û2q£Ù¾LÔHEƇmdß"*ÅÌŒì Õ˜dÆÏãöc)ÑU–KÛ.Ïþæ*± £ˆýðšžXXG$:#øÀžo5?Ó­‘8ì>ÛÉÅæçL**ÞG§"Ù‘Àõ¾ÕG9|9w=Õå… œªH…Âßè{j_}Ä)KpÜžNz½~ÄõMPnGTP}»Ësû7ßqR”ëéb<Û7A¹„øX=B(Oð6 ep,ßMR!MÆô74ßÀ‰Í6p¢FÓ œ¸Ñl'jôç8íçõ³ItFðC}î>®o'Z…ë nkhÍ£æõÝÜhVßM¦õÝÜhVßM¦õÝÜHÅï‡æ¦›¹hýK£Yý°‹HÏ™¦õÃ.⊙Žv,¢ùߥNAêšê(»…ªºaU=$téX‹<þ¶"è0D²Ó›OÆL‹@=ï°è:ƨHãΟG&íÒ†ƒ–DÙEÑ á,$z‡£ÊMÇhë<ÊÛJˆf„ãs_(ãpŸ˜¤pŸ11£Y,w{ܤÑ,€P€n‡rÆü(˜E¯¼¼¹Í@0+" {‡r¸/:Èió£`wÎkuã Ë…羈‡r´/:Èjxã·±,ê{¯”d$Ʋ"Bá¹/â¡í‹r"ÆP0‹;ßOÆ‚YáøÜñP÷E9c ˜År˧—Ì Á,„P|î= *á¾è!'bb,’K‰Ø¶~á¢pÜ[ð“C{ÑQìNÁÐÞ,VÉâ#Ì‹°ç4HŒp2:ÝÁY`8ÞÞ`I|䇄ëþ.Àe#o3Gp/’¡wÆ¡¾aDFÄG.¶WVëñ-[jŠfip³7|®¹·ò‰µâáÀÛØÄ‹ƒ$D³è³R";>[;È ÜËïäå3ÂüÁâ¤ÄÚJIHõ»ë¼yuòûEÄ•åJPlG˜Ý›{ÏÛ<¥EÀõÀ@(!cñ†,!¥ö`ï]¼Dm'b)j;ñ³ê™ 4BB( bÏ·š>|Îk™¹Ñ¬–™Mk™¹‘ŠŽ™¦;'¸¨ãK£ÙÞ .z6gÒq«Q£yå)7šUžR£iå)7šUžR£Yå©‹"Ûsl™ŽÛÅ¢2eó%z—O¶þ»BZ|¨ ùöùhäo°Ð\–õ”^–˶KP!åßþ3ql Zôý‘oÉ#¿†#‹ÜhVÂJ¦%¬ÜhVÂJ¦%¬´ÞÔ!°q#™›Æ“©Xdj.ßÖl®Çª‘XU>uÛ¦E.Õ…Ü« 7bŠ‘±ß¡ÖÔþò‡=ºö;XJ35¨_¡†>»ºÀ½ìd¼êAHhªØ=áÂ.œ§zK8ÏY„!6«;b­ ¸\ŒÝ…P²0ÒjzƒØ))”fæ©ÐW¾cçßß[x’”?0PkÑÇèÛH¼í‰ÅwD±½×ŠÁ‰xM‚‚$l»zí6#!æ ã ƒ‚n˜ ºÂjóà`ï|}ê›2b^¡0Þ{ÔT‚‚ÑAVØ©s¦³Ó'ƼÒÃx_DM%(d5¼ð» `ï›Ï¸»²ƒˆWDVóîiÔT‚‚ÑAVÄØ žõ帯B^³{‘J0º£#ŠÞJ„Á§<|“xÝ[xTy" #ñy |™û¶6hD$@ÿÅ“½å%¼KÅ7Aqå§ŒKÿP„;ێüNFô78 A‘޼¤Yõ…„ØBÑÀ·P²Ž-Æ‘[ ~4+îûjWÙŽþ«—‰½M4¨8^’Šã"ìFÀ0Ô…[ 1‘±çCÑᎀx—©èC£ ŽŠCñö^¤O ú\lkúŒ2-èsÁ›¹µé7©Ñ´jͪ֨Ѵjͪ֨‘ŽË ™¦¯ãF³×Q£ÙÇë¤Ñ¬ž¨7š×q£Y=5šÖq£Y=5šÕq#—Atí±ÅFarãå*ßiCÏÐ*Øqngy¸QÁB$´q~Žòì×J`þ´P@ÈE_v;ó…ú¼ò·Ÿâ ÚCOée¸\»„Ut¤MöªÇ–ÅÐùZñ0ò;`æ$ˬ¡FšÌ ™ThÖˆÃ@À\Þ¶”:y}’Õ{©å ¥¿Ë¥¸å\îÆïà:j@$4ŠÚ6jˆÄ‰ÎÏU—qæEYOŸ€‚Ð1¡Ú¤³íAH1„Z¿ƒÅ4U…ú5öù,è±§8ðC?eÏzÕ¤Ï@&ý ®ƒ*EŒrVgœ³pYѽcdô8(·ÌH¨„B:ïÂ$BÄ8BäDL LÂYMÜpä!Ø@FÏ|×lFFB° ?y&ÑæçhŒÓ0æGÁê|µ{* ØŠŸŒ£1vðÐANØÎÜiý.ÖÀÁ’×ÐG^¢3az«`nÏ=:ç%þñ]äã$¯Q)vDÐÅÒóÕâ0žWöŒ¤v&ù<弇¨Ø3^K/[Ý4„ çODtL°7 )°ŒÝ-DQH!"!yßIÀ%ŽÜ‰a†s=êÇL¾3¸˜ °Ø‘BotDÑ; „yNSçƒ0>H2 ¸Øq'$ŒÅ¯{ÿ<#ÆÞ Ì¦p2€·¼„oT⫌Ȗüx~] ñóî(öŸ=Ñ7ÁÞ›ønt´‹’r§ÜLAàÀ ËLGpxÁ!Dè„?‰òX© q±½v1§L•4!£Foáxƒoâ9ú4^^KôiÒF¢Oãç>}š0IôiÖˆ£Os}š0IôiÂ$ѧ “DŸf8ú41'ѧ “DŸÆTôilNEŸ&L}š5âèÓÄœDŸ&L}Âuõ©‹ÃH³mß\¬ Ǽ\ðgΤb,w½QêKG$¢r{ù›F@üƵRqŸ?Ûo ù[OqEí¾§ô²\¶]‚*:"þ8ó¯z=bu WTá{r/ÇeÛ%¨¢!jã¼ózÊ5Â~~Ä[HP…ïÉ‹,ÇeÛ%¨‚Þ¾î쟲dDëÒ\Q…ï)½,—m— ŠŽ¨èØñ”ýì&zÞB‚*|Ï{k\¶]‚* ±5„ìTÛá-¦©*ÔoVPæ"kÓË‘ ~Mýaáy¥|Õ¦}™–Šu…hX]ý .Ã:øÕUðËZqNâuü‚¹OüêFƒ áPW ‡…hÕ»0ü"ü²"&&Ñ;~á‘ÇàW7zò{ ±€P¸êU˜ ~¿¿¬†1? Y”÷Æ·/ã=œz‹J +:£#ŠÝ*Ù›FîD|$#ƒûâ.ï (CÈh€ÑYq8ÞÞ0j×Þy”^Q ˆK@8~ôE\SÂQqäVÄ›…¡ÎÏ'_kŒ… G‘&M;|ì"+bl†Žûçú*2#a¤¡7ìH±7z£`dodñ‹ÌÝ[ñ·#(r¢ë„àø‚¯â`¼½Qx­E°¾ ŠØˆÎØ:<IˆÐ[ B:ó/{7¯õÅðéOØq'(ŒÅ4Ã;¢ìYMcÀñdo9 CßXO$¨ìnï‡Æh#ãÀlô}ô!Âï-Žë¿œ”å²óG/=PxÈTC¡_‚!OX½57ŠñåÕÊgÕoõÅŽZMÜ B`qŒ„½•ða2'5A®n//É~>Eú«DCÀpxÑWq0ÞÜ‹ôYéµ™–Žq£Yé5š–Žq£Yé5š–Žq£Yé5š–ŽQ£ié7š•ŽQ£ié7š•ŽõFóÒ1n4+£FÓÒ1nÔ‚w2ë‘Yw¬ûÞß-×çuáhžÌrA†ÓÜ’)ƒŠÌÓϧ9µKPX^q}ôׇ©“ë®%HmˆÐ;{׆­Ž„”æ_yù=É>Z܇‰xÖR„Þ´ú=X6ݨ‹Í6êâÐél£._3M7êrÑǹ¹éF]..÷Òh¶Q—‹XÍ™¦uQ£éF]ÜH•†8¨ämá@Hyªü”ò¤üôÿ©O Œp )D™|HÍ4T"F *9«3ÎIL‚JÞ0ôQžþ7³µ*ù˜Ï› ј$€ÄÓ; Cƒ“ˆ‹\bïé+=B@l‡áÄÞÃñaŽk{JUíWqœ3||;BXqŒ0»S0¶7ȱöÈ¥í=aBF³4ø ŒÆÛElXÆõéLf$DlB§a%šTBwÆü(`sågç{û.`Ã!§aüÊŽzƒ¡wæ]„£"|À¡hÓkt=…±øãîß@%D4š^ØÉÎp–Wðêcßxa¬=ßÔsr@€ˆ¡ûü©ˆž öFŽri<ö¯D6r5u‡Áâ(;¢è„q¨ …gŒÖ„ÈØàF•ÞŒ€ŽÚ½ 0º ŒÇ„7Êeeý.ZcCK3Gpœ*’¡· Æö@dÆ(M€Š­mwß½¼Ùý=×ÍRï*0ooô<’KåFóUôÆD•&nàU#!BîŒÌY•ôÈÅöòÊf± BôÆ{\Fãíž2óEkyÔÖT1>cJ37p€*‘a7öÿ$„et&Àt••ÿ®7®¢>~O-4ï— œ¡ÇÏöVHBb™¿z…*#¼Ëó˜o#À_—¡OP¾7ø&~¶5šnãÅfÛxQ£é6^Üh¶5šnãE¦Ûxq£Ù6^Ôhº7šmãE‘˜é6^ܨÅüä"§øX–]òTùžŠù02œPŠI™¦À=ŸMÔ.!U×§†©cÄ‘‘ñÙ±a­„°oñí—¬â£C±J5=ÔÕ‘`ˆØó­æÓì›M׺yy%7š•WR£iy%7šmIF¦[’q£{²%5šnIF‹èé–dÜh¶%5šnIÆfdÔhZAÆfdQû³ ²ëÙ~¶¥Ùî¼:¯“˜Ší…è_Õý fšöuFì³Vgœ“øä¸‚ÌGÞF±–®¬¼ªt€XR@8êõ®^ÑȤѬˆ‰0+îû232ze—ífä pŽ;½ “(yV¢XVØ#Ϋv~ËáÈÓÈv Ø„»U0²7 ˉøHÖ Þy x [v;Žã‹¾Š£q憹rÝùöXQpÒèMÉà Ÿ6LNžæ²–çÒµw!* C©_pX6Žœ¤nùÚ¼[D›AcûÀWÖ\žqÞFÆ/F×½V ¢u5‰ØWÆëÁ {°7?©JHBbÙ@~28Nƒ„­-y¿aËž |oïUü¤ä‡µž§\ñ)®ªc´ð…1Ú‰ øÒ$àˈ¢7þ$(l¥&Äuç[Z©¼ï1aÅÎÈlNxïDù„ˆ`rT´R’[¾[µKå¨!ã8ºå‚É÷´– ɽóCÆ)U­jŒL`é”Û1L¾ ·LHnAVyË^ €‘É,]‚r;"‚É×ô– ɽ׳¬ý2º‡[.eO¸ûËÝS‚Gaåâ>Q®ÆW‚±– ‹%„ùƒÅ×”J-årji$ÖöLPZGb† bÏ·š†„%§BÖ8§"Àþóis{Í ÑÝdY|Þe¼7dÚ“zFï­çåîÜhVîN¦åîÜhVîN¦åîÜhVîN¦åîp™–»s£Y¹;5š–»s£Y¹;5š–»s£Y¹;gEþ¬Üý>êNeòiæ¯ É•8«tr&_â¯Ý$g:"„ÆäØÂ$»Äb»ö¾—-!²å'MÑ îê½…7±³Š{ŸÀù~÷W’qžyùh”Æ”@(y2N‹¹Ì‹6 ¹+bbeÆŒ¸á 1l~_åBðUZ@g+&Tâ£H%>r+`h.*·:¤êæîr‘2@RÀÖ´ÇáEOÅÁx{Ãö½•OÕ|“0ÉŠ‰Tâ#ޱ#ŠÝxɸh¼Qš™ËsÛøÄëõ6 p~zGÅ¡8kÃ@<}Çä=Ò/HN"ޱ!šÞJø“¼Å½×ýâ¿ ×›4Îo™'¨ŒÔçï±D4¢^$¸_@8Ñ]^ÂØ9Æ *ËÏ$ç ãÿ„Œw…ÞÎèˆ2àMN¢èùb·îߥL¾bæ•üˆƒ'µy™pìQ¨¹ÿc3t—ð²\7Î ÊòrnÃIBÆÛ@ïGgtDð&g§×hIP/8o7ÿcrÀì GdéÔï ŽžóžoR6g1óƒ$@â;"ìÆþØÜ(` „¦nkÿ}ÿABZ€áà‚ŸÂH¼¹a *OÈkÑÕé1õÓc/¨ÔG#!ŠÝ(ø“ôˆ“š Y÷HU¸Òz4 àœ(¿#Šß[œ„­”å>K¹ÁÔ#ãX¶å"üÞâLJ”Û œy­jx YAÈh@–=aùÎÞ›øé{./1}œ¿Wàì/fï¸X±ò‚Š’×{t<~PÌV¡Ó •6'\ŽüÍç*–dåGËRö,ã\OxC6PZMï ¾Îwõ:P{\¥"5†¦ýÛè÷jÈTKˆð;ƒ_È—@úH,ÇÑGRMÇuuÄ¡#"æ|«™¹å¼Ö½Ø@ ™‘áýÔ’iƒBæéßráŽÂò:¨nmÎñìóž¬ð,Ý@lC”orrÓ3R–[ïÌ(hnßÇò-Y‚b;¢è­½Wñ*¼?TÚÃûã¤î™*òªëÓä4[Wƒp?WÖ–^+2oPE[ëùËÜhö²5š¾ìÁf/{P£éËÜhö²5š¾ìA¡’éËÜhö²5š¾ìÁf/{P£éËÜhö²íG/{ø°þ¨~Ä‘‹[³ 4ˆÈ㘺ž|‘Êk°úûmŸ³ùGQ{+.!26¸¬}ƒFä= â!D )º ÁÙx¬2àÌÄèð³ÔÒ?ýRCÌ øÿ› ÉÐ$[ÀÓ; Cƒ“ô‹\dïÊ×Çý6HŒî›§Ñ4ÞÃñ_Äw£ÃSBËÏvþÜßÅÍM<¦s¬žs„»06ƒâVhTl-ß@nãO8·¯VĹí†â;ÏZŸ„¾‰››€þÄ ’ˆCd„É€yþÀF¬­Ð„¸ØÞ³–x± (ª­w«Aó3¸ ŒÆÛÅcŸ’B¼¿ `ƒp;ô„ŠÝÇQ2ÂôN‹AM¶Z"#ƒËRž55Îq¿?K½·Àpœ½ÑzñÉ«³ó»Ð² „Ïü Qõ8HF˜Þ*˜Ûƒk]%>p‘¹u»ËÛ†‰f½_ œ¥ÞW`0ÞÞèA¡ÜcJû7¡fŸ9BÂéq”Œ0½“0¹Ÿ}•úHö”WÖ7^&`œðˆÎê;ÂôÞÞ$ÜóÔïñ}q6¡ð‰'8ª†ÍB/wÙ7挤‘è+c/AÇ8/;í/«\zö…²™!¼ã£'aÁääÄ) É-ëñó€¡lÙ?ZK— ÜŽˆ`òuyr™3?ëɶ[ò ÑXkk£áó©…ÕêÅ‚«‡ñõ‰ 8VGHˆ°[ηJàzò-ô~ÌKÔɽ‡<´ÂôÁà$6k•$¤¶¬ŸÅ¼cÐ{ùW"€~Ë– Üްø>y%"Ê}Öòò¦~Ç wòoDý–,Aµ~oð]¾¼Ä–ìJÙùJ½d@ÜH¾&KP+!L ~!Ÿ_‰ˆjŸ§x`–Ƚ¦aKPnG„?X|€$µ&r)©5ü­šž J#Äg†"ö|«ihN%rº9•Èadã%ç¶ÖÏfÆ #ã;€!Ó™,ÐÏ/Ü*·…õuÊÈ2¹ùZ:,¶#b ˜|»ùªÒDî½ÍÈ®k‚Ò €ˆAß*!&BÖòqêvÓÜ®ŸeC ³×^b2ePÈý[ÚLR9QÙSÞ¿PÒĤr VK†µv„é½½ÙV I3©”ß?äèž É"@enb.˜ãØsí¯r9˵<.ÚÏ8—#øÊD$—3a’\΄Ir9³FœË™˜“\ΘIårf8—36§r9&ÉåL˜$—3a’\ΤQÏåLlI.gBûbÅ60P¾>kù±e{wÙó%)„“#1]ÒƒI+ ᤄ‰·;ãDI*#hîùô ˆFç_æ²ê× b"ç&R9Á#çD#ŠÞJøƒd…“š—Ø»ŽM$$ìfTh€Þ[q8Ñà›øfþp$ûzÊw¨¿ŠÔ› ÂDg#X>g#QìVÁØ^Ôn•&À%ö¶Ã¹Dêí q|ÞWq4ÑÞ0Ž}Öpü7z›B˜9‚Òq„hv«à%eaÅ[© ’=Oý^V Ë32ü”lpN”/ˆð{‹ã—ž|·Ëëï¢õ!‹0p†d$¼%öha}F”=«i yËšKÀ7QÀÐ3Ö êÊW™üË¡FÆk ®®D x““UðQ®´_&tªbêÉ{x%¬vÍ÷èZs!ˆ´¢^$X‡Yƒ ¸'JxyœÑΉÊÊunEù·%þe9ïGg"¼ÉÉéµZè̶òÍã ˜PŸ €ã±ä ¨æ†OÆçYj6¿J˜ŒÅÌ”ücd@‘[ãÉ0^¢µG²üÌrm(#àp<Þ9Q¿ bÀ›œQ¬–:³ã³—Ï$i$æŽÈÒ' ?èÎ¥ÎÃor*a1q§>ânk`-ʶ"$*µj0+àö‚¿(ç–(eÀYÇp­’„ÄÞu±r!Cä¶´ "üÞâ»|É`Dµy’®Ê „ Ðoéˈ2àM¾@¥0¢Üþ+iŸ"A0t ÈeDð&_ ²QnyßvCi$ÁPt ÈD x“ï,F”[Èo˜*ðY4C—€\F”oòu*³åæç[§1Bªcö6l ¨D 8‹ïòù(öXÊ¿(Qâ_ÿAPÍ–€VB4¿·ø&_§uÆj7µq,Ôj»& M,Ÿ ë&ä Sm¾w­§×„Ì|MAIV'jÍëÏý†©›Õ K‡Ô2¢ x“¯K*ê‰z§TìÛJ ŽÀÒ% W1àM~1I( õrBiüèiº& ‘5¯Ò¯þûZJ\ $i%˜çOŸ4rðóŠ3øšü¢„ÒšÏPyÍ3fn@¾Êk5d­ zgïõIQ%™‚Ôã,7O•R i§É“®aKQªBï êð?µŸî]&&{—q£ÙÞeÒh²w7ší]&&{—q£ÙÞe¸œí]&&{—q£ÙÞeÒh²w7ší]&&{—q£áÞeÒB½4¤™|{”³ºËŠJ>¬sV1¿'¨ÎO{Ñ©óS„(Fculc’dS‚Y7x–²ƒÓ ôJޤŽ1ƒ N‰Cð6ÞÏ>ÂíY_£|ÿä¹¼êe–˜¤Œ&RUþ©RåŸz+áû•—š W¹k=(aÄÈ0¹‰¼åwDð&G;¹y) Êeþ«ÍlF@ŽÈ¼Üƒdé–ï â$AvÎUÖßäˆlòjâ•‹ƒ$DØ­‚±½¨Ý*M«ä<˜ "d|b£o¢ú†h~oq”ÎòJTËü7-Ì Iõa4Ëž°zgo~Ø?åë=ߥkLriæITÅ1vD±/¹,+Ý*Mˆ‹Ì’+æÃ&hxÁSq,ÞÜpi\NËòe%&}~ RdG½•ðb=„hõ‘l?êW~cg™ìˆ¢{¢~BÄ€79|vÈZNófÉ8¹b’>SgH)½kÝZɹFÄÞiÞ­™ °Î²ætðÑŽñºV÷âLÌãÌ ½ßƒ: è½ÁÙi5JT{|Ê äh`Ëé_Ýk>H/!bÀ›Æ+êjuù2y£SJSoH~*ŽÄ^c;¢ jMsÎ]Ö^ÂÎq Æ®1žHHغýœ J¹­»°»¼ë£'~oqvj”忥ÒñÀüMfÁXºõvDð&ßG òYQユ¼ H¹|¥KP/!bÀ›üb’ЊzËŽ£Ê¹„¥KPoG”oò}*£õfä†"›ÐÂ0l Êíˆ2à,¾ëW ­ ¶l´ ‘Ëgô߆>Š%Dø½Å/ôK>+ÊÝîr]D"›ÏÂ0t êíˆ2àM¾@e´¢ÞµFúb–ȧ´ð¯ØÐ%¨—1àM~1ÉjE½Ë^C± ud³ZøJjèÔÛmÀ›|NuõRªk¨ÖvMP!!11è[%ÈÔ‘å³ý4×ìu“”ÃчÁ+Í¥Ì).Ïþ–ÌSÉ¥(l­J ˜½rb-VKˆð&_׳:á4ÔË §ñÚÕtMP[Gb‚!lзJ‰²AuFÜKuȨt`øÈc©”9¡rÜo 5IóQk-—–¤NLüLžd J€Ð{ƒ6‹ÔÚOw±“F“]ì¸Ñl;i4ÙÅŽÍv±“F“]ì¸Ñl;ßÍv±“F“]ì¸Ñl;i4ÙÅŽÍv±“F“]ì¸Ñp;i1ÙÅŽ©,Ò°þ¶‹·6ˆ]ŸK}»…7JCYœUѳXee–<Ÿmæ‡Åh­þQæÆÊKˆ¬\íHÔ‹ûS Nˆ’aÎ`e’ò°æ’¨ ´òtB@žÇnWáÓH²±ñ&x¶%œOÛŒf|·G¨¯6a2‘*Ù¥d_QôVÂ8C¢öFjB\l¯} T#>Sá^gB ÞŠÃ ßÄ7£ê”•¨Ãø´½¬ËÆßäL®b,Bå=º|•÷`DØ­‚±½¨Ý*Mˆ‹ì-å£çFÈ èÍÏÐø‚¯âh‚½áÂ#ßù×ïr&[1ñƒd>âv#à%9‚VwZ{àz>%„Òè“ v8Þ3Q;!ŠÞœ,ù¬’„Ô²‹òÈŒ„˜½ùâeOH~°7\_ŸšLxЃTÂÀœ—ˆc$DØýsèÉQ)TÏV‹ÁA¨f.ìh¼c‚t½78y”´JRËöÛÍSM×_e‡ã±ô é‡Ï©ç]?¿ýMìÜÄù§®à¤A&!ŠÞJ˜tê­ÖÉîú!@I‡™7¢àž¨Ÿ1àMNâVK‚zŸ½8EÒQjÁÀÒ%¤—eÀ›|Šü½Ë§îí"é0µàF`éÒË&¿Gþ£Þåsäö(”S vŽ.a½ Áäû$ôõÖ§…FÓQnÁÀÐ%¬·!b ˜|Dÿ£ÞYÜ.M‡Ù?C—Fzc ˜übýzËíó‚Ñt”^ð#0t ëmˆ2àM¾@Eÿ£Þ;_’oMÇéÿK6t éeD x“_Œ@¢ÿQïu”GaGé55t éíˆ6àM¾Ž@g€Þ§,¨@ß}7/ ]BzQœÉ/F ò#½”w/(LÏ„¤²l'Ÿ€½„·Q(?"QYl9v¥@¸‚½× g€Ôû,ßç‘}œÖpj Ý@mGÄ€7ùºØÕ™ˆ¨·¬õ˜?°_ì‹[K—^B”oò}*ß1ÖKùŽñ£ºéš6BbÚÀÊœk” %ãÙ®ly1±ß&á3ƒQÇ¥Í —gw‘ÈÜzþ~ 7š½C¦ïÇp£Ùû1Ôhú~ 7š½C¦ïÇP‰#p&^åª<Âð÷ þèôÅhì²å'¨Þˆ±x>ŸHU±ø>H‹'Dè­„?ˆ×;© råËÇçÁs÷ÊPôN”ßmÀ›¾³á¤$(—ùoÊ–âåö 4 KŸ°|gpt#\ö+ü*`®ù3GHR ²#ŠÝ*ÛÝĵöȵo%jænÿ.4žè›¨žá÷‡Ñ}§$Aµ]²EÄ€¹þp Ž%OP¼37\wç‰vÝßÌM ╈c$DØ­‚—ÄzÄÑâ#ÙöÔØC™û_TpMßawæfOeS;Š÷JÊÔÄÿû_ÿOlDÀQv½j›õS>i¨FŒèVÛ/©ºQt›k½µ‚$ˆjµ•B­‰Ý¦¬µ&Ft«ÓšPm¼W”ŸT«Ï¯ÿQÎÃçWù¿þÏ?Ó¯ÿø¿þÃÿ½–€Y çþãwv~mðk9ËýåüµæÅý~<¿þ‘þú/ÿòoë¿þÛþ”ú„ë_þmù×Ë?Áýg9þå“ÿsÏÏÁ÷þ/KýÏ£´Xå?7ùÏ]þóø×ÿúÿô×ÿñË^^(Ù·µ86ûïeÊg—¯w^þã¿ý• ] r)(ÐY êÀú+ëbi&¬ec¦²3XþÑmÓŸÚZ^ñÎ ¢íS6ê+k]AJŽ-σ#ÏÎ|éÙ>ÏšOy>Î ¥‹ÿ™;”×’Õ ßÅ·GÐq3Q{²–W!…!{8?α‰vX:jÀª;Å?Ë/výä‡í|ê—çÓ¦ ©cùø©ß·/ø–YÖl¶îÖŽ³Ý5 SH>¾ËeSø¸Ú¨=:r×wµ™a¹®2ÙF?.=º nA*‰ÁŽ£-ŸØOÙ*)»|?êØyê XÏöeñ|M\ö|˜×†×ÁÇÅì]ÓÒ¢œ­]ðq5Q{t$ÿˆ3 Wvzþ—môãÒ£‰à¤‘ì(êÈ>ëÕòLùJV_¹PÈ]Ï}ù¤õZ^ŽÉç¼°|òO{=ùøŸåŸ)‹j‘•žC1ðñÒ…2R®e¿˜`ÍÂ?—˜èÇ¥CÁ-Hd'°£h#Ëçy/[ÃåÛêQÏ™ ÷O‘¹%d±• D¾U®å51u\¬®Õš´ØŠ5îOG•¿Šl@ÛÅ{7ܧð÷ãÒ¡+à$ìê°öüÿ©_éëA²SËÍh¿ûÜ;®òëšo$ú8ÛݡÀiqô)O |ÜlÔ BG¶úÝCaØÖòt$6Úq±A*¨©$;Ž:¶c­ÏZÛÞ~ÄI!Û§ìºy|ÊKXå™fÍÇûG¥åµ‘EµÈcºÅ@ÇÍFéAH»TÃò)cí¸ô Ô‚TƒGÛ–tå6ç'}¯«FòYþ•ïòÏW«…cÿÔŒ-.Í“Ü"OÃÏÎýåpí2)û…Õ`­ÑG1ÑŽÿùkè Xa'pc¨ã:ïû§Ü)òcVM?0°oõòzæ•ËQë×€Ö+¯jÔq6z}êëíÒbÙ˃3ðq3Qz’u>¥â€ÖŠ‹vœ{°ŠÞ‚Dv;ˆ~;+kҼவNI€|U¾Ëõ;¯]>å8ûä¸ËñSúÒq½5e).-ò/á¼7¥#kA„¡,E”‰zXÚ“†þw’Ü{›ÔA]ËU(y¼¶‚Q…ÔâäUv£Ì!?G9åg#ÇÅùÍIÒ"O†kU |\m”)EÏò‹¶ò¢Å-6úqéÑUp‹®’ì8êØ–öÍÏ|ñ,_ÿI Xêë¿ùø®ÞÝJÚ(û'/õq6»ä+Á¹©eYw):®&j‡” øö‹û¯åw²ˆ…~\:t Ü¢k$3†6¬½º ¯îêÍ;i¤îZ­Ék¢zê„«Òƒk/ˆ0œuÀb£—¤‚ZJb°ãh¿±¼;ûµú¨×DFò”9ê*©%¶’úXËbí)OetX~ùFTÔ ÿ¤«‰€›‰r#$¯ëÏ’òí¹þ,»‰vX×a]5 D`GÑîÒyŠïk)—ÙÛ×É÷ŒrÉ)˽¿­½óá£ËÅ»DN/ù{^»Ýªw?lô¥yÊ}9ß»¸÷žDÛ*ôý¸tèö¹E×Ç ví¶^ÕK¾Þ¶‹=#KÝ3mÝʱ,ÇKï*Çí’Õ‹û÷S#ëV~3—b ãf£ô`d­³•>O­Ÿbý¸ôè*¸ëî vml×Yîkù²s%Uš'ðv¯ÕQå;ÛåÞx?%®Øë½óT@¾¹.õLQw>®üå0PÎw¿ó¿Ë*üý¸è¸+nfm=õ©ïxm%zP?ÆËH^z$?ùÕïÁlå^Q2KÛÛ·Ö'Ç£Ìqþû¶”—œ¤{?îêÒ¨#wT1Ár7ד…~\zt Ü‚DƒD»p”Lh=kY8',]ÍÿÔ¢®ïZ /ý‚ÃH‰ZÔG†ÆP&ïMlôãúüÖTp RI vm>æ9½—Û@žê-PÅÈu´ ey ª7’¥†¡Ž¼ò¬¾v\fKþ/…äã³®b™Ž›Úƒ¥žifhë+±ÑŽKRA-Xwg°ãh«ÅüDÿÔºé|Îëb‘€<-ÊÉ.«»ò)Ýb¾\öÜY—ÕDB¹‹r‹üo݆踚¨ñÊEPúgÙ%ÂËÚq_a*D‰nf íÑåüÔ…ÛqöÚdAÊ5¼<$´o²ÊåÚôqy¬¸?åâ!-ò ©—6b ãf£öèH×É å*~+í¸>윕Ä`ÇÑÆ–í·öS®ÄI#_ŠÎòðxmÅÞ~¿eys|\…Ö½o¥ÅSôBл‰Òƒ‘µÆíˆa¿kÍÛèÇ¥GÁ Xu'°£¨#+ËË»<Ķ lRÈÑ. µÄº¬Û{`ù¸~Ù…Ž³ÙRÜ¡ºÂ©^b ãf£ô`äS/ŒÌ°íõ²Ã6ÚqéA*z Ñýég3޾ i£òx»´EHòUv)k‰(¹ºbøô‡•v\W!›Fꚢ>œt:¬úª¥Ç]WJÜ??Ô싲Ўë"¤kè-Xbg°cèAü¶ÐYZ¤2i¤Fð?mÍ}´³þÙ®¶&oÇ5‚¿×K8·(o \Ô>}V1Rž³J\œº·¸§hÇ54Þ%P ÒG vЩ¯ål½X}iÑ’»>¹—ôÁgkuÜ:eVq‹¼r{! Ãf¡vèÈZLBPžVe¢÷ˆŽBD#1ØQ´ë}¾dÖ»ÆScáIìÚÏw‘»d@–vIÚó}¦Ü‡úq¿~—Ä·(ïÀŠ¡wµ!KHİ”ô˜h‡õ‚ß4ÐßYtïnÑ_Ÿ)â­ý¾x~jøæ,g½óuà©¿¯M×ß˧Æq¸EyT:™@×c / ·úûêýKiÓ¢,´ã1:5";ƒC‹ ´"ü´û³WMÁng-¯פ»>‹/ú¸èÌ˶²@å÷^ÃÌÀÇŸŸþ´ß»G2Á]^: í°´ïè鷺›!”QåûâÙc7K»j(¤>NäeÒÒc5õq#ßúøŸ¹G^Û=»j±-=šÓøxi¿kA>õ‘„>O}aý¸ôè*¸©$;Ž 8J¶'?ðÔD}ÒÈÕÖÓåf_î¾ûÖÖÓå¡NŽë ¼n+-Ê’ãT |\mÔAGÖºP†¥Ý,ØÆB·VA-H%1ØqôŒKûüJ¹ÑµØ¢ gKg<õ%Š|»­_¿Ë²>ú¸æ\®Ýã}ÅM rüéaFÊ»rv†r[•v\s.›FDegpãè˪SݶVŒ–’aå ©¾R²´g‹–ËQ¹õãºJº5RúÏ) |Ülüó/Aöƒc†ýSCjl£×µ[WA-H%1Øq´'˜|½-Ÿ'ÊÿÑ3€Œœ-¿vÜW«ºÉ?îšGÌ·zu\|úYj–Zœù,,—0ðq³Q{²ô’•ÎgV ¯‘v\Ÿy.(ÝÁ£]Kò|­qæñž{½–RÏågþ”ýVJ¶z¡8õqù™—L7ÿy¯ŸéåÞtØèkëŽì5˜Ð{—ª£S‘·ãzz4"꨻Õ߯ú-{S ÏÚ5„‘’-Õò,—gÁµ×§Â|Üâ(ý¸^öš âw‹Ì07õºOÈRÞc†¥VÛ±‰zX¯û]Cÿ;‹îÝí ÚD®5gÍ_Q‹ãS_¼f>n6jM!uHÂPrg—²ÑŽë lÕˆÒÝì8ÚØ–Z†´^;%ØÉsµ,‘JÕAÝQ8ßâKЫT”â©~\ìæ»‡BJnGu¯;åÖûa}‘Žeñª˜ëa­0è¦ûßYkïnµ·+|y"?q–[c½y}PIó²²üR‹/öúð}·t];îÏ´ YK4§Lqf ãf¢^­;Òî¿ÂPê]6e£×@WA-ºH"0ƒèÏ`µ±Þjù´B®ú<ÿÉ+Ÿ­ß¶R´”Ÿ¶ëcØ¡‘¼”¯aàãj£>‰u$ßSëƒ1¬ÚÀÚØÉ~ý‰£ŽVþ?kuí^÷CØó³,Ùjym}ˆÉÓ¢¿$¸•»ú}#Bþ.=ïž’{®»縔}jah½nçž÷º¶7ž ©=Ëê;ŸÔ½<6ÕÂÓ­0eÉO?k+y˜Mµcyö,Ÿ*ÒjrÉã½ÊÆJ¤«š,;U*BjÏ­Ýö'¯Ájq_½‹/eÓ¨|'¼ë—oKVf¤õlo ìŸëjA ­,³÷²ÅiÔRmæ[JÝኑÚóºëDÚKÒ®®s¶ò–Q~Û‹íZ횯Ÿp¤öÜóI-ÚÊoq­åÌGMôìë³µogYKß)²#µç™ï­eËÙì½:Ê3?ÔwÚË'VꇛϽՉ2ÒNÉV+÷âöz»ÈC:›ÅÜî¨Ëw–·E!MkÛ¼b¿nêYM8¶~.÷³~S> òr-oÛ×èêÛ…•)WnµkcùÝž‡BÚ4ÏN.!í’eZÚ¤½T³õЙæŸVNÀHíY—í&zµ„DöÒŠVK ¥²<°n iÃle$½û¤ìó÷•Lû”i½]Òz–«V}1ÿþiÓ§<†•3WÊ]îÖs+OÁtÏÖÒ^rJMl~ü*µ< ï.¶D\ïM!ÍAÏÒ¾tš }îöŠÁÝÎn¾1ÖM8¶­ÔÁ\ i3öh{Í– z­xÜJÔ¥È?JÜh­S6_¢´³YÞc-/ß-^÷ã(«ÚÊ©«Sý^j•ž ý×¼Õ{á“Ý}í×\Ó›Û³Öšâžü{+k,Fjϲ?B¹Ú•_ÿÓÞ¶¸ïzµ«Oxõ7Râ»·µßsêxò=¦en¶'Oì³N¼­L¨ò¡ïZ?Ò´– Eä}¶+VÉ’•«ÌÞ£Ei©bÙÒ~Ι­ü—kiï]m% °–Ÿó¾¶×e¶e¯ùAJÏõ¹ê¤tl¹çnWe_Úës´+#MêRWSùrVß»)b·ºßÏuéóu)é[!íB°~ê/§ìc´´ßÈÖRHO¾D´Ëëùi§Œ4›÷ñÿ—võðA|Tî _êi,´Êû!³KšÆâ<¹bÂö,Þ'Ù::0šÐû¡í9Ò4@ÏVæg±}ƒ!٘к˜«-c“û–·iÚ„Ô÷¼cÂt :h:ìmCÀ*¡Bl §Á¤é˜f†4; ë=—8w¹ $š¶bËNë­Ïpo£ƒBT^ßq͈úüCÆ;‰;FSÐʈ]ê¼b_›mÃÄù¨nFz°òæ›óîÐÜ,ß8 £DgºQ¸‚‚!2îéb§ÝqtÌ!u 6¤Œé mƒ‰˶ÁpSíaÆ$¤FF¬c'à°š`†)Ò¦N¡HÛzÅõÑ™ðΣ¸–‚’µs=@·C›8!9ÒÑ” Ѿx8¸ ¯bRHJ/Ñb:…§vÐ1IÁ¼BT0ooC0j8^¹t ×rhÔô¡œLRÏöéúKpsìô*(ž4nöÉÃVh³¯£DŒÍ€h:¬6È©cté˜,6Œak\ôoQøJÊVm‡—ÐèšáóÐyÑ:z+æ¢ìðíÀë£ÌJ´õqÔ·Ã_h)9tÀvÌ¥wùbVšæwåd¬ò°>´Š†š up²p³NPÀÍ¡wÓTÌÌ=¥cþO–ï%-`b¬7ß$žj`,ÂLÿûI.êmÁùvÊÅ€03Hoà]”äâÍÛ×·ç’).÷///Ï÷Ÿ?>=Þ>?<}9+~ý϶L›Ë%•¹±pòʶµóË /ÏöµÔå#æx,ᅵîpu_cÙ–¿ ˆCÿ;ÝáuíªKüétGr,ËÏ@ã1澜QѶÙRŽåéñ„Â1Ôå3ÐG­¹,O_N¸Ä0ã&iÒÛú<Öt2ÖåeC°µz1AIú¨öû¢Ò[ÝŒž±RMëòÅmôruör]—7ÜÛ×Þȱü9—…€¦±‡x¶ÚŽë­J²Ÿ/_¡qQf‚›T´ü}ÂkãZöåþí´._¾ÙÊ ªA™ër!së¾.O­J=–{±Îâ=г‡1ÿO“ÊþÑ¥kc¶×¼<Žfï5fk°QªÁÍ‘“Âèw? ú¢ñ‹‡É!¡7Sz{g*=ܶ"¤ÿø}¹'¿Ñ΂¥1óš®zêY eíU)Ô±¦bÊ@Iªí]±±Äšï&.>µAyù2y,û‘ófMRa°$Z'tzïÉêóz‡Kª 9dÀ²zضj áŠóv¡½¨ûão1…yÁæ23¡‰ý|НB°“¤±î¿žú/Mѯf!ž xõ¡ól“xd{5—Äï~¤mò§ÇQsZþwp‹”endstream endobj 366 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1277 >> stream xœuS[LgeÙ™ EÉ [ ‹—ˆÚšbZ£&Ö&hbL¬Z/áâD…Ú RA—›ZnÂÎüßìp‘ÛpQ)Ø*…LðZ.5´M¥Mk_lbmšÚ"|³ý·iÿݵ>4ó2™99çûÎùމ ãL&“°ùÝäÔ5I×WJ¾g¾d Dš!2¼~‘e|þ/ó¾yå^tàoø®¬lŽã"ROïØ½Fä¸ Ü>.…KåÒ¸tî îMn¾ÉÅÍe´\87bâL†‰aWÌ«ÌcáÇÃuË^CŽòù@7žè¦O|ƒf£ÈXlÅx¢É£ªýší‚V—AìÑù 9ÁaØG´›‘K%¡>ƒþ2¢TW)2£›ãèRL³<ÒùLùu8ïÃ[D>ÐA'èÚ^Qª(9²­P) … æJü 6Ÿ3ž‡0 3¨.3…¬&Õ][çÖpæÅa2­²¬—øÚï0 ×á1ô‘+…(cOÕuüGÇw¼¦iä1B7O³áé%!á±sêÇáÑïãïH©ÂÖœü¼÷`²+{p¹N—ãI ‰é+VÈâÆ1íê0¢uÓEã–ÙÐŒHk§¦~D]p(¹Ä8à.Pƒûžï‘+ Q\v*RÆàEË]?ª¬!ò,3›p³?²ÙÑ-`kƒF­±CŒòM…Tº}fŸ“©Ô¶Ôœ¨œTŽ‘|8’ÛRi!e²\EdûÚ@yì³|¡ó9$E9á>D i W¹‘î[‚M`¦’I2á#8èÎ~ÄÕþˆú’šòn°uCMc}Û 6ÇÍÐæ—¾‰ÌÐqZ7ufü‰çà‚O]ý#°éB–’Bª™iî¼Ð”hVšÏºª@q¹Ö­Ž[‰‚åºÎŸ Û•Èa^ ¦L¢Ûi,M,):ûÁ& ýîÛj-tØâ%¡À‹jëTPëÏÙÿ@†Ñ–ÃQýÒÝWaXé ¢¶‚¢Ò€1Æ&D!”T½G %•Ï<|))÷0”W9!v*PÂ’êúŸ¤vúŸÜïÜŸ³u ØÎ€¢Vj 5]èûò/dçåNÕ3'ŽÝ½õW\¨%˜xÇpÝ1ã]ßZkY3Q @¤Årþ œ„äÉù²ÂTD‡Äª²Û„Ûp#x³vÞ%j±;W#*tІW0ªýKUÒ ¶KÐÐe7~ SU/k4w wÝ:ÿ¼›u"R 3ئnV¼6Ò®ôÈ*§ègEðM— 8tìÀ^}>&â"¬ÀU ¸ú·›±1§pÿ¶b/F¸ÖýöŽ=°žn°ãkV¯Üú ¾ŸÑ°Ÿé®xt`‰'”“øí)›zž¾h%å°Wˆ¹‰sx AoöÕ,(‚dØé²‡œ÷]7@œmùµµó ”ò‰½ZÊuƒP1U=¨4U~KjÏ<ØÛ{¨•ÝÙwÌÅ*}ÈkÂRœkÆYŸÙÚ 'Ê8[¿i÷2`~J^¾î©ãçE¿‡Ú%aH›öÌÀPh ¶ôèϯáĵÝ4É–¶a,.ÃD³1닱êÂA9ÒÙC98ô .hR9N†HÀèdL*Ó…öÙ ;rÿÑ/|.`³÷*Fƒ#ÙE½âsÆú÷Yé¼öÊŸ`ÆÎ=Äyí"ƪVŒ9ݶù‘\²Æœ£â#Â×y#çpÜ¿—ÀÌYendstream endobj 367 0 obj << /Type /XRef /Length 261 /Filter /FlateDecode /DecodeParms << /Columns 5 /Predictor 12 >> /W [ 1 3 1 ] /Info 3 0 R /Root 2 0 R /Size 368 /ID [<91369aa86c15b17812571e38e9b25476><3de18f5e0c57058f62e4ef6b2d145179>] >> stream xœcb&F~0ù‰ $À8Jò?ƒpüf ›­ ¿Ï«Gãwè“ÿTÏoÆæíºÑØ^äÓã›1{ºc4f‡ùŸÁ¡TËŠbV! DÊ­‘"f Ri)ˆ”:"YØ€$£ì›·D2]‘© ’5DòÇ‚HIp}ítÌŽ‘Š%`óÁl~.ɸlB.ÈÌD!;uˆ”™ –Í‘² ]Œb§Al‰›`sZÀº4Ájl@$[:XÄ ¬7Dòø€eÀäb)¦N·/@¤àz°k£À~—T<`]‚`[¤@6š4€í²›p ìw ¿;ƒmŒ`bJ/„ endstream endobj startxref 348237 %%EOF kernlab/inst/doc/kernlab.R0000644000175100001440000001050514656670132015204 0ustar hornikusers### R code from vignette source 'kernlab.Rnw' ################################################### ### code chunk number 1: preliminaries ################################################### library(kernlab) options(width = 70) ################################################### ### code chunk number 2: rbf1 ################################################### ## create a RBF kernel function with sigma hyper-parameter 0.05 rbf <- rbfdot(sigma = 0.05) rbf ## create two random feature vectors x <- rnorm(10) y <- rnorm(10) ## compute dot product between x,y rbf(x, y) ################################################### ### code chunk number 3: kernelMatrix ################################################### ## create a RBF kernel function with sigma hyper-parameter 0.05 poly <- polydot(degree=2) ## create artificial data set x <- matrix(rnorm(60), 6, 10) y <- matrix(rnorm(40), 4, 10) ## compute kernel matrix kx <- kernelMatrix(poly, x) kxy <- kernelMatrix(poly, x, y) ################################################### ### code chunk number 4: ksvm ################################################### ## simple example using the promotergene data set data(promotergene) ## create test and training set tindex <- sample(1:dim(promotergene)[1],5) genetrain <- promotergene[-tindex, ] genetest <- promotergene[tindex,] ## train a support vector machine gene <- ksvm(Class~.,data=genetrain,kernel="rbfdot",kpar="automatic",C=60,cross=3,prob.model=TRUE) gene predict(gene, genetest) predict(gene, genetest, type="probabilities") ################################################### ### code chunk number 5: kernlab.Rnw:629-635 ################################################### set.seed(123) x <- rbind(matrix(rnorm(120),,2),matrix(rnorm(120,mean=3),,2)) y <- matrix(c(rep(1,60),rep(-1,60))) svp <- ksvm(x,y,type="C-svc") plot(svp,data=x) ################################################### ### code chunk number 6: rvm ################################################### x <- seq(-20, 20, 0.5) y <- sin(x)/x + rnorm(81, sd = 0.03) y[41] <- 1 ################################################### ### code chunk number 7: rvm2 ################################################### rvmm <- rvm(x, y,kernel="rbfdot",kpar=list(sigma=0.1)) rvmm ytest <- predict(rvmm, x) ################################################### ### code chunk number 8: kernlab.Rnw:686-689 ################################################### plot(x, y, cex=0.5) lines(x, ytest, col = "red") points(x[RVindex(rvmm)],y[RVindex(rvmm)],pch=21) ################################################### ### code chunk number 9: ranking ################################################### data(spirals) ran <- spirals[rowSums(abs(spirals) < 0.55) == 2,] ranked <- ranking(ran, 54, kernel = "rbfdot", kpar = list(sigma = 100), edgegraph = TRUE) ranked[54, 2] <- max(ranked[-54, 2]) c<-1:86 op <- par(mfrow = c(1, 2),pty="s") plot(ran) plot(ran, cex=c[ranked[,3]]/40) ################################################### ### code chunk number 10: onlearn ################################################### ## create toy data set x <- rbind(matrix(rnorm(90),,2),matrix(rnorm(90)+3,,2)) y <- matrix(c(rep(1,45),rep(-1,45)),,1) ## initialize onlearn object on <- inlearn(2,kernel="rbfdot",kpar=list(sigma=0.2),type="classification") ind <- sample(1:90,90) ## learn one data point at the time for(i in ind) on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) sign(predict(on,x)) ################################################### ### code chunk number 11: kernlab.Rnw:894-897 ################################################### data(spirals) sc <- specc(spirals, centers=2) plot(spirals, pch=(23 - 2*sc)) ################################################### ### code chunk number 12: kpca ################################################### data(spam) train <- sample(1:dim(spam)[1],400) kpc <- kpca(~.,data=spam[train,-58],kernel="rbfdot",kpar=list(sigma=0.001),features=2) kpcv <- pcv(kpc) plot(rotated(kpc),col=as.integer(spam[train,58]),xlab="1st Principal Component",ylab="2nd Principal Component") ################################################### ### code chunk number 13: kfa ################################################### data(promotergene) f <- kfa(~.,data=promotergene,features=2,kernel="rbfdot",kpar=list(sigma=0.013)) plot(predict(f,promotergene),col=as.numeric(promotergene[,1]),xlab="1st Feature",ylab="2nd Feature") kernlab/inst/doc/kernlab.Rnw0000644000175100001440000014230512055335060015542 0ustar hornikusers\documentclass{A} \usepackage{amsfonts,thumbpdf,alltt} \newenvironment{smallverbatim}{\small\verbatim}{\endverbatim} \newenvironment{smallexample}{\begin{alltt}\small}{\end{alltt}} \SweaveOpts{engine=R,eps=FALSE} %\VignetteIndexEntry{kernlab - An S4 Package for Kernel Methods in R} %\VignetteDepends{kernlab} %\VignetteKeywords{kernel methods, support vector machines, quadratic programming, ranking, clustering, S4, R} %\VignettePackage{kernlab} <>= library(kernlab) options(width = 70) @ \title{\pkg{kernlab} -- An \proglang{S4} Package for Kernel Methods in \proglang{R}} \Plaintitle{kernlab - An S4 Package for Kernel Methods in R} \author{Alexandros Karatzoglou\\Technische Universit\"at Wien \And Alex Smola\\Australian National University, NICTA \And Kurt Hornik\\Wirtschaftsuniversit\"at Wien } \Plainauthor{Alexandros Karatzoglou, Alex Smola, Kurt Hornik} \Abstract{ \pkg{kernlab} is an extensible package for kernel-based machine learning methods in \proglang{R}. It takes advantage of \proglang{R}'s new \proglang{S4} object model and provides a framework for creating and using kernel-based algorithms. The package contains dot product primitives (kernels), implementations of support vector machines and the relevance vector machine, Gaussian processes, a ranking algorithm, kernel PCA, kernel CCA, kernel feature analysis, online kernel methods and a spectral clustering algorithm. Moreover it provides a general purpose quadratic programming solver, and an incomplete Cholesky decomposition method. } \Keywords{kernel methods, support vector machines, quadratic programming, ranking, clustering, \proglang{S4}, \proglang{R}} \Plainkeywords{kernel methods, support vector machines, quadratic programming, ranking, clustering, S4, R} \begin{document} \section{Introduction} Machine learning is all about extracting structure from data, but it is often difficult to solve problems like classification, regression and clustering in the space in which the underlying observations have been made. Kernel-based learning methods use an implicit mapping of the input data into a high dimensional feature space defined by a kernel function, i.e., a function returning the inner product $ \langle \Phi(x),\Phi(y) \rangle$ between the images of two data points $x, y$ in the feature space. The learning then takes place in the feature space, provided the learning algorithm can be entirely rewritten so that the data points only appear inside dot products with other points. This is often referred to as the ``kernel trick'' \citep{kernlab:Schoelkopf+Smola:2002}. More precisely, if a projection $\Phi: X \rightarrow H$ is used, the dot product $\langle\Phi(x),\Phi(y)\rangle$ can be represented by a kernel function~$k$ \begin{equation} \label{eq:kernel} k(x,y)= \langle \Phi(x),\Phi(y) \rangle, \end{equation} which is computationally simpler than explicitly projecting $x$ and $y$ into the feature space~$H$. One interesting property of kernel-based systems is that, once a valid kernel function has been selected, one can practically work in spaces of any dimension without paying any computational cost, since feature mapping is never effectively performed. In fact, one does not even need to know which features are being used. Another advantage is the that one can design and use a kernel for a particular problem that could be applied directly to the data without the need for a feature extraction process. This is particularly important in problems where a lot of structure of the data is lost by the feature extraction process (e.g., text processing). The inherent modularity of kernel-based learning methods allows one to use any valid kernel on a kernel-based algorithm. \subsection{Software review} The most prominent kernel based learning algorithm is without doubt the support vector machine (SVM), so the existence of many support vector machine packages comes as little surprise. Most of the existing SVM software is written in \proglang{C} or \proglang{C++}, e.g.\ the award winning \pkg{libsvm}\footnote{\url{http://www.csie.ntu.edu.tw/~cjlin/libsvm/}} \citep{kernlab:Chang+Lin:2001}, \pkg{SVMlight}\footnote{\url{http://svmlight.joachims.org}} \citep{kernlab:joachim:1999}, \pkg{SVMTorch}\footnote{\url{http://www.torch.ch}}, Royal Holloway Support Vector Machines\footnote{\url{http://svm.dcs.rhbnc.ac.uk}}, \pkg{mySVM}\footnote{\url{http://www-ai.cs.uni-dortmund.de/SOFTWARE/MYSVM/index.eng.html}}, and \pkg{M-SVM}\footnote{\url{http://www.loria.fr/~guermeur/}} with many packages providing interfaces to \proglang{MATLAB} (such as \pkg{libsvm}), and even some native \proglang{MATLAB} toolboxes\footnote{ \url{http://www.isis.ecs.soton.ac.uk/resources/svminfo/}}\,\footnote{ \url{http://asi.insa-rouen.fr/~arakotom/toolbox/index}}\,\footnote{ \url{http://www.cis.tugraz.at/igi/aschwaig/software.html}}. Putting SVM specific software aside and considering the abundance of other kernel-based algorithms published nowadays, there is little software available implementing a wider range of kernel methods with some exceptions like the \pkg{Spider}\footnote{\url{http://www.kyb.tuebingen.mpg.de/bs/people/spider/}} software which provides a \proglang{MATLAB} interface to various \proglang{C}/\proglang{C++} SVM libraries and \proglang{MATLAB} implementations of various kernel-based algorithms, \pkg{Torch} \footnote{\url{http://www.torch.ch}} which also includes more traditional machine learning algorithms, and the occasional \proglang{MATLAB} or \proglang{C} program found on a personal web page where an author includes code from a published paper. \subsection[R software]{\proglang{R} software} The \proglang{R} package \pkg{e1071} offers an interface to the award winning \pkg{libsvm} \citep{kernlab:Chang+Lin:2001}, a very efficient SVM implementation. \pkg{libsvm} provides a robust and fast SVM implementation and produces state of the art results on most classification and regression problems \citep{kernlab:Meyer+Leisch+Hornik:2003}. The \proglang{R} interface provided in \pkg{e1071} adds all standard \proglang{R} functionality like object orientation and formula interfaces to \pkg{libsvm}. Another SVM related \proglang{R} package which was made recently available is \pkg{klaR} \citep{kernlab:Roever:2004} which includes an interface to \pkg{SVMlight}, a popular SVM implementation along with other classification tools like Regularized Discriminant Analysis. However, most of the \pkg{libsvm} and \pkg{klaR} SVM code is in \proglang{C++}. Therefore, if one would like to extend or enhance the code with e.g.\ new kernels or different optimizers, one would have to modify the core \proglang{C++} code. \section[kernlab]{\pkg{kernlab}} \pkg{kernlab} aims to provide the \proglang{R} user with basic kernel functionality (e.g., like computing a kernel matrix using a particular kernel), along with some utility functions commonly used in kernel-based methods like a quadratic programming solver, and modern kernel-based algorithms based on the functionality that the package provides. Taking advantage of the inherent modularity of kernel-based methods, \pkg{kernlab} aims to allow the user to switch between kernels on an existing algorithm and even create and use own kernel functions for the kernel methods provided in the package. \subsection[S4 objects]{\proglang{S4} objects} \pkg{kernlab} uses \proglang{R}'s new object model described in ``Programming with Data'' \citep{kernlab:Chambers:1998} which is known as the \proglang{S4} class system and is implemented in the \pkg{methods} package. In contrast with the older \proglang{S3} model for objects in \proglang{R}, classes, slots, and methods relationships must be declared explicitly when using the \proglang{S4} system. The number and types of slots in an instance of a class have to be established at the time the class is defined. The objects from the class are validated against this definition and have to comply to it at any time. \proglang{S4} also requires formal declarations of methods, unlike the informal system of using function names to identify a certain method in \proglang{S3}. An \proglang{S4} method is declared by a call to \code{setMethod} along with the name and a ``signature'' of the arguments. The signature is used to identify the classes of one or more arguments of the method. Generic functions can be declared using the \code{setGeneric} function. Although such formal declarations require package authors to be more disciplined than when using the informal \proglang{S3} classes, they provide assurance that each object in a class has the required slots and that the names and classes of data in the slots are consistent. An example of a class used in \pkg{kernlab} is shown below. Typically, in a return object we want to include information on the result of the method along with additional information and parameters. Usually \pkg{kernlab}'s classes include slots for the kernel function used and the results and additional useful information. \begin{smallexample} setClass("specc", representation("vector", # the vector containing the cluster centers="matrix", # the cluster centers size="vector", # size of each cluster kernelf="function", # kernel function used withinss = "vector"), # within cluster sum of squares prototype = structure(.Data = vector(), centers = matrix(), size = matrix(), kernelf = ls, withinss = vector())) \end{smallexample} Accessor and assignment function are defined and used to access the content of each slot which can be also accessed with the \verb|@| operator. \subsection{Namespace} Namespaces were introduced in \proglang{R} 1.7.0 and provide a means for packages to control the way global variables and methods are being made available. Due to the number of assignment and accessor function involved, a namespace is used to control the methods which are being made visible outside the package. Since \proglang{S4} methods are being used, the \pkg{kernlab} namespace also imports methods and variables from the \pkg{methods} package. \subsection{Data} The \pkg{kernlab} package also includes data set which will be used to illustrate the methods included in the package. The \code{spam} data set \citep{kernlab:Hastie:2001} set collected at Hewlett-Packard Labs contains data on 2788 and 1813 e-mails classified as non-spam and spam, respectively. The 57 variables of each data vector indicate the frequency of certain words and characters in the e-mail. Another data set included in \pkg{kernlab}, the \code{income} data set \citep{kernlab:Hastie:2001}, is taken by a marketing survey in the San Francisco Bay concerning the income of shopping mall customers. It consists of 14 demographic attributes (nominal and ordinal variables) including the income and 8993 observations. The \code{ticdata} data set \citep{kernlab:Putten:2000} was used in the 2000 Coil Challenge and contains information on customers of an insurance company. The data consists of 86 variables and includes product usage data and socio-demographic data derived from zip area codes. The data was collected to answer the following question: Can you predict who would be interested in buying a caravan insurance policy and give an explanation why? The \code{promotergene} is a data set of E. Coli promoter gene sequences (DNA) with 106 observations and 58 variables available at the UCI Machine Learning repository. Promoters have a region where a protein (RNA polymerase) must make contact and the helical DNA sequence must have a valid conformation so that the two pieces of the contact region spatially align. The data contains DNA sequences of promoters and non-promoters. The \code{spirals} data set was created by the \code{mlbench.spirals} function in the \pkg{mlbench} package \citep{kernlab:Leisch+Dimitriadou}. This two-dimensional data set with 300 data points consists of two spirals where Gaussian noise is added to each data point. \subsection{Kernels} A kernel function~$k$ calculates the inner product of two vectors $x$, $x'$ in a given feature mapping $\Phi: X \rightarrow H$. The notion of a kernel is obviously central in the making of any kernel-based algorithm and consequently also in any software package containing kernel-based methods. Kernels in \pkg{kernlab} are \proglang{S4} objects of class \code{kernel} extending the \code{function} class with one additional slot containing a list with the kernel hyper-parameters. Package \pkg{kernlab} includes 7 different kernel classes which all contain the class \code{kernel} and are used to implement the existing kernels. These classes are used in the function dispatch mechanism of the kernel utility functions described below. Existing kernel functions are initialized by ``creator'' functions. All kernel functions take two feature vectors as parameters and return the scalar dot product of the vectors. An example of the functionality of a kernel in \pkg{kernlab}: <>= ## create a RBF kernel function with sigma hyper-parameter 0.05 rbf <- rbfdot(sigma = 0.05) rbf ## create two random feature vectors x <- rnorm(10) y <- rnorm(10) ## compute dot product between x,y rbf(x, y) @ The package includes implementations of the following kernels: \begin{itemize} \item the linear \code{vanilladot} kernel implements the simplest of all kernel functions \begin{equation} k(x,x') = \langle x, x' \rangle \end{equation} which is useful specially when dealing with large sparse data vectors~$x$ as is usually the case in text categorization. \item the Gaussian radial basis function \code{rbfdot} \begin{equation} k(x,x') = \exp(-\sigma \|x - x'\|^2) \end{equation} which is a general purpose kernel and is typically used when no further prior knowledge is available about the data. \item the polynomial kernel \code{polydot} \begin{equation} k(x, x') = \left( \mathrm{scale} \cdot \langle x, x' \rangle + \mathrm{offset} \right)^\mathrm{degree}. \end{equation} which is used in classification of images. \item the hyperbolic tangent kernel \code{tanhdot} \begin{equation} k(x, x') = \tanh \left( \mathrm{scale} \cdot \langle x, x' \rangle + \mathrm{offset} \right) \end{equation} which is mainly used as a proxy for neural networks. \item the Bessel function of the first kind kernel \code{besseldot} \begin{equation} k(x, x') = \frac{\mathrm{Bessel}_{(\nu+1)}^n(\sigma \|x - x'\|)} {(\|x-x'\|)^{-n(\nu+1)}}. \end{equation} is a general purpose kernel and is typically used when no further prior knowledge is available and mainly popular in the Gaussian process community. \item the Laplace radial basis kernel \code{laplacedot} \begin{equation} k(x, x') = \exp(-\sigma \|x - x'\|) \end{equation} which is a general purpose kernel and is typically used when no further prior knowledge is available. \item the ANOVA radial basis kernel \code{anovadot} performs well in multidimensional regression problems \begin{equation} k(x, x') = \left(\sum_{k=1}^{n}\exp(-\sigma(x^k-{x'}^k)^2)\right)^{d} \end{equation} where $x^k$ is the $k$th component of $x$. \end{itemize} \subsection{Kernel utility methods} The package also includes methods for computing commonly used kernel expressions (e.g., the Gram matrix). These methods are written in such a way that they take functions (i.e., kernels) and matrices (i.e., vectors of patterns) as arguments. These can be either the kernel functions already included in \pkg{kernlab} or any other function implementing a valid dot product (taking two vector arguments and returning a scalar). In case one of the already implemented kernels is used, the function calls a vectorized implementation of the corresponding function. Moreover, in the case of symmetric matrices (e.g., the dot product matrix of a Support Vector Machine) they only require one argument rather than having to pass the same matrix twice (for rows and columns). The computations for the kernels already available in the package are vectorized whenever possible which guarantees good performance and acceptable memory requirements. Users can define their own kernel by creating a function which takes two vectors as arguments (the data points) and returns a scalar (the dot product). This function can then be based as an argument to the kernel utility methods. For a user defined kernel the dispatch mechanism calls a generic method implementation which calculates the expression by passing the kernel function through a pair of \code{for} loops. The kernel methods included are: \begin{description} \item[\code{kernelMatrix}] This is the most commonly used function. It computes $k(x, x')$, i.e., it computes the matrix $K$ where $K_{ij} = k(x_i, x_j)$ and $x$ is a \emph{row} vector. In particular, \begin{verbatim} K <- kernelMatrix(kernel, x) \end{verbatim} computes the matrix $K_{ij} = k(x_i, x_j)$ where the $x_i$ are the columns of $X$ and \begin{verbatim} K <- kernelMatrix(kernel, x1, x2) \end{verbatim} computes the matrix $K_{ij} = k(x1_i, x2_j)$. \item[\code{kernelFast}] This method is different to \code{kernelMatrix} for \code{rbfdot}, \code{besseldot}, and the \code{laplacedot} kernel, which are all RBF kernels. It is identical to \code{kernelMatrix}, except that it also requires the squared norm of the first argument as additional input. It is mainly used in kernel algorithms, where columns of the kernel matrix are computed per invocation. In these cases, evaluating the norm of each column-entry as it is done on a \code{kernelMatrix} invocation on an RBF kernel, over and over again would cause significant computational overhead. Its invocation is via \begin{verbatim} K = kernelFast(kernel, x1, x2, a) \end{verbatim} Here $a$ is a vector containing the squared norms of $x1$. \item[\code{kernelMult}] is a convenient way of computing kernel expansions. It returns the vector $f = (f(x_1), \dots, f(x_m))$ where \begin{equation} f(x_i) = \sum_{j=1}^{m} k(x_i, x_j) \alpha_j, \mbox{~hence~} f = K \alpha. \end{equation} The need for such a function arises from the fact that $K$ may sometimes be larger than the memory available. Therefore, it is convenient to compute $K$ only in stripes and discard the latter after the corresponding part of $K \alpha$ has been computed. The parameter \code{blocksize} determines the number of rows in the stripes. In particular, \begin{verbatim} f <- kernelMult(kernel, x, alpha) \end{verbatim} computes $f_i = \sum_{j=1}^m k(x_i, x_j) \alpha_j$ and \begin{verbatim} f <- kernelMult(kernel, x1, x2, alpha) \end{verbatim} computes $f_i = \sum_{j=1}^m k(x1_i, x2_j) \alpha_j$. \item[\code{kernelPol}] is a method very similar to \code{kernelMatrix} with the only difference that rather than computing $K_{ij} = k(x_i, x_j)$ it computes $K_{ij} = y_i y_j k(x_i, x_j)$. This means that \begin{verbatim} K <- kernelPol(kernel, x, y) \end{verbatim} computes the matrix $K_{ij} = y_i y_j k(x_i, x_j)$ where the $x_i$ are the columns of $x$ and $y_i$ are elements of the vector~$y$. Moreover, \begin{verbatim} K <- kernelPol(kernel, x1, x2, y1, y2) \end{verbatim} computes the matrix $K_{ij} = y1_i y2_j k(x1_i, x2_j)$. Both \code{x1} and \code{x2} may be matrices and \code{y1} and \code{y2} vectors. \end{description} An example using these functions : <>= ## create a RBF kernel function with sigma hyper-parameter 0.05 poly <- polydot(degree=2) ## create artificial data set x <- matrix(rnorm(60), 6, 10) y <- matrix(rnorm(40), 4, 10) ## compute kernel matrix kx <- kernelMatrix(poly, x) kxy <- kernelMatrix(poly, x, y) @ \section{Kernel methods} Providing a solid base for creating kernel-based methods is part of what we are trying to achieve with this package, the other being to provide a wider range of kernel-based methods in \proglang{R}. In the rest of the paper we present the kernel-based methods available in \pkg{kernlab}. All the methods in \pkg{kernlab} can be used with any of the kernels included in the package as well as with any valid user-defined kernel. User defined kernel functions can be passed to existing kernel-methods in the \code{kernel} argument. \subsection{Support vector machine} Support vector machines \citep{kernlab:Vapnik:1998} have gained prominence in the field of machine learning and pattern classification and regression. The solutions to classification and regression problems sought by kernel-based algorithms such as the SVM are linear functions in the feature space: \begin{equation} f(x) = w^\top \Phi(x) \end{equation} for some weight vector $w \in F$. The kernel trick can be exploited in this whenever the weight vector~$w$ can be expressed as a linear combination of the training points, $w = \sum_{i=1}^{n} \alpha_i \Phi(x_i)$, implying that $f$ can be written as \begin{equation} f(x) = \sum_{i=1}^{n}\alpha_i k(x_i, x) \end{equation} A very important issue that arises is that of choosing a kernel~$k$ for a given learning task. Intuitively, we wish to choose a kernel that induces the ``right'' metric in the space. Support Vector Machines choose a function $f$ that is linear in the feature space by optimizing some criterion over the sample. In the case of the 2-norm Soft Margin classification the optimization problem takes the form: \begin{eqnarray} \nonumber \mathrm{minimize} && t(w,\xi) = \frac{1}{2}{\|w\|}^2+\frac{C}{m}\sum_{i=1}^{m}\xi_i \\ \mbox{subject to~} && y_i ( \langle x_i , w \rangle +b ) \geq 1- \xi_i \qquad (i=1,\dots,m)\\ \nonumber && \xi_i \ge 0 \qquad (i=1,\dots, m) \end{eqnarray} Based on similar methodology, SVMs deal with the problem of novelty detection (or one class classification) and regression. \pkg{kernlab}'s implementation of support vector machines, \code{ksvm}, is based on the optimizers found in \pkg{bsvm}\footnote{\url{http://www.csie.ntu.edu.tw/~cjlin/bsvm}} \citep{kernlab:Hsu:2002} and \pkg{libsvm} \citep{kernlab:Chang+Lin:2001} which includes a very efficient version of the Sequential Minimization Optimization (SMO). SMO decomposes the SVM Quadratic Problem (QP) without using any numerical QP optimization steps. Instead, it chooses to solve the smallest possible optimization problem involving two elements of $\alpha_i$ because they must obey one linear equality constraint. At every step, SMO chooses two $\alpha_i$ to jointly optimize and finds the optimal values for these $\alpha_i$ analytically, thus avoiding numerical QP optimization, and updates the SVM to reflect the new optimal values. The SVM implementations available in \code{ksvm} include the C-SVM classification algorithm along with the $\nu$-SVM classification formulation which is equivalent to the former but has a more natural ($\nu$) model parameter taking values in $[0,1]$ and is proportional to the fraction of support vectors found in the data set and the training error. For classification problems which include more than two classes (multi-class) a one-against-one or pairwise classification method \citep{kernlab:Knerr:1990, kernlab:Kressel:1999} is used. This method constructs ${k \choose 2}$ classifiers where each one is trained on data from two classes. Prediction is done by voting where each classifier gives a prediction and the class which is predicted more often wins (``Max Wins''). This method has been shown to produce robust results when used with SVMs \citep{kernlab:Hsu2:2002}. Furthermore the \code{ksvm} implementation provides the ability to produce class probabilities as output instead of class labels. This is done by an improved implementation \citep{kernlab:Lin:2001} of Platt's posteriori probabilities \citep{kernlab:Platt:2000} where a sigmoid function \begin{equation} P(y=1\mid f) = \frac{1}{1+ e^{Af+B}} \end{equation} is fitted on the decision values~$f$ of the binary SVM classifiers, $A$ and $B$ are estimated by minimizing the negative log-likelihood function. To extend the class probabilities to the multi-class case, each binary classifiers class probability output is combined by the \code{couple} method which implements methods for combing class probabilities proposed in \citep{kernlab:Wu:2003}. Another approach for multIn order to create a similar probability output for regression, following \cite{kernlab:Weng:2004}, we suppose that the SVM is trained on data from the model \begin{equation} y_i = f(x_i) + \delta_i \end{equation} where $f(x_i)$ is the underlying function and $\delta_i$ is independent and identical distributed random noise. Given a test data $x$ the distribution of $y$ given $x$ and allows one to draw probabilistic inferences about $y$ e.g. one can construct a predictive interval $\Phi = \Phi(x)$ such that $y \in \Phi$ with a certain probability. If $\hat{f}$ is the estimated (predicted) function of the SVM on new data then $\eta = \eta(x) = y - \hat{f}(x)$ is the prediction error and $y \in \Phi$ is equivalent to $\eta \in \Phi $. Empirical observation shows that the distribution of the residuals $\eta$ can be modeled both by a Gaussian and a Laplacian distribution with zero mean. In this implementation the Laplacian with zero mean is used : \begin{equation} p(z) = \frac{1}{2\sigma}e^{-\frac{|z|}{\sigma}} \end{equation} Assuming that $\eta$ are independent the scale parameter $\sigma$ is estimated by maximizing the likelihood. The data for the estimation is produced by a three-fold cross-validation. For the Laplace distribution the maximum likelihood estimate is : \begin{equation} \sigma = \frac{\sum_{i=1}^m|\eta_i|}{m} \end{equation} i-class classification supported by the \code{ksvm} function is the one proposed in \cite{kernlab:Crammer:2000}. This algorithm works by solving a single optimization problem including the data from all classes: \begin{eqnarray} \nonumber \mathrm{minimize} && t(w_n,\xi) = \frac{1}{2}\sum_{n=1}^k{\|w_n\|}^2+\frac{C}{m}\sum_{i=1}^{m}\xi_i \\ \mbox{subject to~} && \langle x_i , w_{y_i} \rangle - \langle x_i , w_{n} \rangle \geq b_i^n - \xi_i \qquad (i=1,\dots,m) \\ \mbox{where} && b_i^n = 1 - \delta_{y_i,n} \end{eqnarray} where the decision function is \begin{equation} \mathrm{argmax}_{m=1,\dots,k} \langle x_i , w_{n} \rangle \end{equation} This optimization problem is solved by a decomposition method proposed in \cite{kernlab:Hsu:2002} where optimal working sets are found (that is, sets of $\alpha_i$ values which have a high probability of being non-zero). The QP sub-problems are then solved by a modified version of the \pkg{TRON}\footnote{\url{http://www-unix.mcs.anl.gov/~more/tron/}} \citep{kernlab:more:1999} optimization software. One-class classification or novelty detection \citep{kernlab:Williamson:1999, kernlab:Tax:1999}, where essentially an SVM detects outliers in a data set, is another algorithm supported by \code{ksvm}. SVM novelty detection works by creating a spherical decision boundary around a set of data points by a set of support vectors describing the spheres boundary. The $\nu$ parameter is used to control the volume of the sphere and consequently the number of outliers found. Again, the value of $\nu$ represents the fraction of outliers found. Furthermore, $\epsilon$-SVM \citep{kernlab:Vapnik2:1995} and $\nu$-SVM \citep{kernlab:Smola1:2000} regression are also available. The problem of model selection is partially addressed by an empirical observation for the popular Gaussian RBF kernel \citep{kernlab:Caputo:2002}, where the optimal values of the hyper-parameter of sigma are shown to lie in between the 0.1 and 0.9 quantile of the $\|x- x'\| $ statistics. The \code{sigest} function uses a sample of the training set to estimate the quantiles and returns a vector containing the values of the quantiles. Pretty much any value within this interval leads to good performance. An example for the \code{ksvm} function is shown below. <>= ## simple example using the promotergene data set data(promotergene) ## create test and training set tindex <- sample(1:dim(promotergene)[1],5) genetrain <- promotergene[-tindex, ] genetest <- promotergene[tindex,] ## train a support vector machine gene <- ksvm(Class~.,data=genetrain,kernel="rbfdot",kpar="automatic",C=60,cross=3,prob.model=TRUE) gene predict(gene, genetest) predict(gene, genetest, type="probabilities") @ \begin{figure} \centering <>= set.seed(123) x <- rbind(matrix(rnorm(120),,2),matrix(rnorm(120,mean=3),,2)) y <- matrix(c(rep(1,60),rep(-1,60))) svp <- ksvm(x,y,type="C-svc") plot(svp,data=x) @ \caption{A contour plot of the SVM decision values for a toy binary classification problem using the \code{plot} function} \label{fig:ksvm Plot} \end{figure} \subsection{Relevance vector machine} The relevance vector machine \citep{kernlab:Tipping:2001} is a probabilistic sparse kernel model identical in functional form to the SVM making predictions based on a function of the form \begin{equation} y(x) = \sum_{n=1}^{N} \alpha_n K(\mathbf{x},\mathbf{x}_n) + a_0 \end{equation} where $\alpha_n$ are the model ``weights'' and $K(\cdotp,\cdotp)$ is a kernel function. It adopts a Bayesian approach to learning, by introducing a prior over the weights $\alpha$ \begin{equation} p(\alpha, \beta) = \prod_{i=1}^m N(\beta_i \mid 0 , a_i^{-1}) \mathrm{Gamma}(\beta_i\mid \beta_\beta , \alpha_\beta) \end{equation} governed by a set of hyper-parameters $\beta$, one associated with each weight, whose most probable values are iteratively estimated for the data. Sparsity is achieved because in practice the posterior distribution in many of the weights is sharply peaked around zero. Furthermore, unlike the SVM classifier, the non-zero weights in the RVM are not associated with examples close to the decision boundary, but rather appear to represent ``prototypical'' examples. These examples are termed \emph{relevance vectors}. \pkg{kernlab} currently has an implementation of the RVM based on a type~II maximum likelihood method which can be used for regression. The functions returns an \proglang{S4} object containing the model parameters along with indexes for the relevance vectors and the kernel function and hyper-parameters used. <>= x <- seq(-20, 20, 0.5) y <- sin(x)/x + rnorm(81, sd = 0.03) y[41] <- 1 @ <>= rvmm <- rvm(x, y,kernel="rbfdot",kpar=list(sigma=0.1)) rvmm ytest <- predict(rvmm, x) @ \begin{figure} \centering <>= plot(x, y, cex=0.5) lines(x, ytest, col = "red") points(x[RVindex(rvmm)],y[RVindex(rvmm)],pch=21) @ \caption{Relevance vector regression on data points created by the $sinc(x)$ function, relevance vectors are shown circled.} \label{fig:RVM sigmoid} \end{figure} \subsection{Gaussian processes} Gaussian processes \citep{kernlab:Williams:1995} are based on the ``prior'' assumption that adjacent observations should convey information about each other. In particular, it is assumed that the observed variables are normal, and that the coupling between them takes place by means of the covariance matrix of a normal distribution. Using the kernel matrix as the covariance matrix is a convenient way of extending Bayesian modeling of linear estimators to nonlinear situations. Furthermore it represents the counterpart of the ``kernel trick'' in methods minimizing the regularized risk. For regression estimation we assume that rather than observing $t(x_i)$ we observe $y_i = t(x_i) + \xi_i$ where $\xi_i$ is assumed to be independent Gaussian distributed noise with zero mean. The posterior distribution is given by \begin{equation} p(\mathbf{y}\mid \mathbf{t}) = \left[ \prod_ip(y_i - t(x_i)) \right] \frac{1}{\sqrt{(2\pi)^m \det(K)}} \exp \left(\frac{1}{2}\mathbf{t}^T K^{-1} \mathbf{t} \right) \end{equation} and after substituting $\mathbf{t} = K\mathbf{\alpha}$ and taking logarithms \begin{equation} \ln{p(\mathbf{\alpha} \mid \mathbf{y})} = - \frac{1}{2\sigma^2}\| \mathbf{y} - K \mathbf{\alpha} \|^2 -\frac{1}{2}\mathbf{\alpha}^T K \mathbf{\alpha} +c \end{equation} and maximizing $\ln{p(\mathbf{\alpha} \mid \mathbf{y})}$ for $\mathbf{\alpha}$ to obtain the maximum a posteriori approximation yields \begin{equation} \mathbf{\alpha} = (K + \sigma^2\mathbf{1})^{-1} \mathbf{y} \end{equation} Knowing $\mathbf{\alpha}$ allows for prediction of $y$ at a new location $x$ through $y = K(x,x_i){\mathbf{\alpha}}$. In similar fashion Gaussian processes can be used for classification. \code{gausspr} is the function in \pkg{kernlab} implementing Gaussian processes for classification and regression. \subsection{Ranking} The success of Google has vividly demonstrated the value of a good ranking algorithm in real world problems. \pkg{kernlab} includes a ranking algorithm based on work published in \citep{kernlab:Zhou:2003}. This algorithm exploits the geometric structure of the data in contrast to the more naive approach which uses the Euclidean distances or inner products of the data. Since real world data are usually highly structured, this algorithm should perform better than a simpler approach based on a Euclidean distance measure. First, a weighted network is defined on the data and an authoritative score is assigned to every point. The query points act as source nodes that continually pump their scores to the remaining points via the weighted network, and the remaining points further spread the score to their neighbors. The spreading process is repeated until convergence and the points are ranked according to the scores they received. Suppose we are given a set of data points $X = {x_1, \dots, x_{s}, x_{s+1}, \dots, x_{m}}$ in $\mathbf{R}^n$ where the first $s$ points are the query points and the rest are the points to be ranked. The algorithm works by connecting the two nearest points iteratively until a connected graph $G = (X, E)$ is obtained where $E$ is the set of edges. The affinity matrix $K$ defined e.g.\ by $K_{ij} = \exp(-\sigma\|x_i - x_j \|^2)$ if there is an edge $e(i,j) \in E$ and $0$ for the rest and diagonal elements. The matrix is normalized as $L = D^{-1/2}KD^{-1/2}$ where $D_{ii} = \sum_{j=1}^m K_{ij}$, and \begin{equation} f(t+1) = \alpha Lf(t) + (1 - \alpha)y \end{equation} is iterated until convergence, where $\alpha$ is a parameter in $[0,1)$. The points are then ranked according to their final scores $f_{i}(t_f)$. \pkg{kernlab} includes an \proglang{S4} method implementing the ranking algorithm. The algorithm can be used both with an edge-graph where the structure of the data is taken into account, and without which is equivalent to ranking the data by their distance in the projected space. \begin{figure} \centering <>= data(spirals) ran <- spirals[rowSums(abs(spirals) < 0.55) == 2,] ranked <- ranking(ran, 54, kernel = "rbfdot", kpar = list(sigma = 100), edgegraph = TRUE) ranked[54, 2] <- max(ranked[-54, 2]) c<-1:86 op <- par(mfrow = c(1, 2),pty="s") plot(ran) plot(ran, cex=c[ranked[,3]]/40) @ \caption{The points on the left are ranked according to their similarity to the upper most left point. Points with a higher rank appear bigger. Instead of ranking the points on simple Euclidean distance the structure of the data is recognized and all points on the upper structure are given a higher rank although further away in distance than points in the lower structure.} \label{fig:Ranking} \end{figure} \subsection{Online learning with kernels} The \code{onlearn} function in \pkg{kernlab} implements the online kernel algorithms for classification, novelty detection and regression described in \citep{kernlab:Kivinen:2004}. In batch learning, it is typically assumed that all the examples are immediately available and are drawn independently from some distribution $P$. One natural measure of quality for some $f$ in that case is the expected risk \begin{equation} R[f,P] := E_{(x,y)~P}[l(f(x),y)] \end{equation} Since usually $P$ is unknown a standard approach is to instead minimize the empirical risk \begin{equation} R_{emp}[f,P] := \frac{1}{m}\sum_{t=1}^m l(f(x_t),y_t) \end{equation} Minimizing $R_{emp}[f]$ may lead to overfitting (complex functions that fit well on the training data but do not generalize to unseen data). One way to avoid this is to penalize complex functions by instead minimizing the regularized risk. \begin{equation} R_{reg}[f,S] := R_{reg,\lambda}[f,S] := R_{emp}[f] = \frac{\lambda}{2}\|f\|_{H}^2 \end{equation} where $\lambda > 0$ and $\|f\|_{H} = {\langle f,f \rangle}_{H}^{\frac{1}{2}}$ does indeed measure the complexity of $f$ in a sensible way. The constant $\lambda$ needs to be chosen appropriately for each problem. Since in online learning one is interested in dealing with one example at the time the definition of an instantaneous regularized risk on a single example is needed \begin{equation} R_inst[f,x,y] := R_{inst,\lambda}[f,x,y] := R_{reg,\lambda}[f,((x,y))] \end{equation} The implemented algorithms are classical stochastic gradient descent algorithms performing gradient descent on the instantaneous risk. The general form of the update rule is : \begin{equation} f_{t+1} = f_t - \eta \partial_f R_{inst,\lambda}[f,x_t,y_t]|_{f=f_t} \end{equation} where $f_i \in H$ and $\partial_f$< is short hand for $\partial \ \partial f$ (the gradient with respect to $f$) and $\eta_t > 0$ is the learning rate. Due to the learning taking place in a \textit{reproducing kernel Hilbert space} $H$ the kernel $k$ used has the property $\langle f,k(x,\cdotp)\rangle_H = f(x)$ and therefore \begin{equation} \partial_f l(f(x_t)),y_t) = l'(f(x_t),y_t)k(x_t,\cdotp) \end{equation} where $l'(z,y) := \partial_z l(z,y)$. Since $\partial_f\|f\|_H^2 = 2f$ the update becomes \begin{equation} f_{t+1} := (1 - \eta\lambda)f_t -\eta_t \lambda '( f_t(x_t),y_t)k(x_t,\cdotp) \end{equation} The \code{onlearn} function implements the online learning algorithm for regression, classification and novelty detection. The online nature of the algorithm requires a different approach to the use of the function. An object is used to store the state of the algorithm at each iteration $t$ this object is passed to the function as an argument and is returned at each iteration $t+1$ containing the model parameter state at this step. An empty object of class \code{onlearn} is initialized using the \code{inlearn} function. <>= ## create toy data set x <- rbind(matrix(rnorm(90),,2),matrix(rnorm(90)+3,,2)) y <- matrix(c(rep(1,45),rep(-1,45)),,1) ## initialize onlearn object on <- inlearn(2,kernel="rbfdot",kpar=list(sigma=0.2),type="classification") ind <- sample(1:90,90) ## learn one data point at the time for(i in ind) on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) sign(predict(on,x)) @ \subsection{Spectral clustering} Spectral clustering \citep{kernlab:Ng:2001} is a recently emerged promising alternative to common clustering algorithms. In this method one uses the top eigenvectors of a matrix created by some similarity measure to cluster the data. Similarly to the ranking algorithm, an affinity matrix is created out from the data as \begin{equation} K_{ij}=\exp(-\sigma\|x_i - x_j \|^2) \end{equation} and normalized as $L = D^{-1/2}KD^{-1/2}$ where $D_{ii} = \sum_{j=1}^m K_{ij}$. Then the top $k$ eigenvectors (where $k$ is the number of clusters to be found) of the affinity matrix are used to form an $n \times k$ matrix $Y$ where each column is normalized again to unit length. Treating each row of this matrix as a data point, \code{kmeans} is finally used to cluster the points. \pkg{kernlab} includes an \proglang{S4} method called \code{specc} implementing this algorithm which can be used through an formula interface or a matrix interface. The \proglang{S4} object returned by the method extends the class ``vector'' and contains the assigned cluster for each point along with information on the centers size and within-cluster sum of squares for each cluster. In case a Gaussian RBF kernel is being used a model selection process can be used to determine the optimal value of the $\sigma$ hyper-parameter. For a good value of $\sigma$ the values of $Y$ tend to cluster tightly and it turns out that the within cluster sum of squares is a good indicator for the ``quality'' of the sigma parameter found. We then iterate through the sigma values to find an optimal value for $\sigma$. \begin{figure} \centering <>= data(spirals) sc <- specc(spirals, centers=2) plot(spirals, pch=(23 - 2*sc)) @ \caption{Clustering the two spirals data set with \code{specc}} \label{fig:Spectral Clustering} \end{figure} \subsection{Kernel principal components analysis} Principal component analysis (PCA) is a powerful technique for extracting structure from possibly high-dimensional datasets. PCA is an orthogonal transformation of the coordinate system in which we describe the data. The new coordinates by which we represent the data are called principal components. Kernel PCA \citep{kernlab:Schoelkopf:1998} performs a nonlinear transformation of the coordinate system by finding principal components which are nonlinearly related to the input variables. Given a set of centered observations $x_k$, $k=1,\dots,M$, $x_k \in \mathbf{R}^N$, PCA diagonalizes the covariance matrix $C = \frac{1}{M}\sum_{j=1}^Mx_jx_{j}^T$ by solving the eigenvalue problem $\lambda\mathbf{v}=C\mathbf{v}$. The same computation can be done in a dot product space $F$ which is related to the input space by a possibly nonlinear map $\Phi:\mathbf{R}^N \rightarrow F$, $x \mapsto \mathbf{X}$. Assuming that we deal with centered data and use the covariance matrix in $F$, \begin{equation} \hat{C}=\frac{1}{C}\sum_{j=1}^N \Phi(x_j)\Phi(x_j)^T \end{equation} the kernel principal components are then computed by taking the eigenvectors of the centered kernel matrix $K_{ij} = \langle \Phi(x_j),\Phi(x_j) \rangle$. \code{kpca}, the the function implementing KPCA in \pkg{kernlab}, can be used both with a formula and a matrix interface, and returns an \proglang{S4} object of class \code{kpca} containing the principal components the corresponding eigenvalues along with the projection of the training data on the new coordinate system. Furthermore, the \code{predict} function can be used to embed new data points into the new coordinate system. \begin{figure} \centering <>= data(spam) train <- sample(1:dim(spam)[1],400) kpc <- kpca(~.,data=spam[train,-58],kernel="rbfdot",kpar=list(sigma=0.001),features=2) kpcv <- pcv(kpc) plot(rotated(kpc),col=as.integer(spam[train,58]),xlab="1st Principal Component",ylab="2nd Principal Component") @ \caption{Projection of the spam data on two kernel principal components using an RBF kernel} \label{fig:KPCA} \end{figure} \subsection{Kernel feature analysis} Whilst KPCA leads to very good results there are nevertheless some issues to be addressed. First the computational complexity of the standard version of KPCA, the algorithm scales $O(m^3)$ and secondly the resulting feature extractors are given as a dense expansion in terms of the of the training patterns. Sparse solutions are often achieved in supervised learning settings by using an $l_1$ penalty on the expansion coefficients. An algorithm can be derived using the same approach in feature extraction requiring only $n$ basis functions to compute the first $n$ feature. Kernel feature analysis \citep{kernlab:Olvi:2000} is computationally simple and scales approximately one order of magnitude better on large data sets than standard KPCA. Choosing $\Omega [f] = \sum_{i=1}^m |\alpha_i |$ this yields \begin{equation} F_{LP} = \{ \mathbf{w} \vert \mathbf{w} = \sum_{i=1}^m \alpha_i \Phi(x_i) \mathrm{with} \sum_{i=1}^m |\alpha_i | \leq 1 \} \end{equation} This setting leads to the first ``principal vector'' in the $l_1$ context \begin{equation} \mathbf{\nu}^1 = \mathrm{argmax}_{\mathbf{\nu} \in F_{LP}} \frac{1}{m} \sum_{i=1}^m \langle \mathbf{\nu},\mathbf{\Phi}(x_i) - \frac{1}{m}\sum_{j=1}^m\mathbf{\Phi}(x_i) \rangle^2 \end{equation} Subsequent ``principal vectors'' can be defined by enforcing optimality with respect to the remaining orthogonal subspaces. Due to the $l_1$ constrain the solution has the favorable property of being sparse in terms of the coefficients $\alpha_i$. The function \code{kfa} in \pkg{kernlab} implements Kernel Feature Analysis by using a projection pursuit technique on a sample of the data. Results are then returned in an \proglang{S4} object. \begin{figure} \centering <>= data(promotergene) f <- kfa(~.,data=promotergene,features=2,kernel="rbfdot",kpar=list(sigma=0.013)) plot(predict(f,promotergene),col=as.numeric(promotergene[,1]),xlab="1st Feature",ylab="2nd Feature") @ \caption{Projection of the spam data on two features using an RBF kernel} \label{fig:KFA} \end{figure} \subsection{Kernel canonical correlation analysis} Canonical correlation analysis (CCA) is concerned with describing the linear relations between variables. If we have two data sets $x_1$ and $x_2$, then the classical CCA attempts to find linear combination of the variables which give the maximum correlation between the combinations. I.e., if \begin{eqnarray*} && y_1 = \mathbf{w_1}\mathbf{x_1} = \sum_j w_1 x_{1j} \\ && y_2 = \mathbf{w_2}\mathbf{x_2} = \sum_j w_2 x_{2j} \end{eqnarray*} one wishes to find those values of $\mathbf{w_1}$ and $\mathbf{w_2}$ which maximize the correlation between $y_1$ and $y_2$. Similar to the KPCA algorithm, CCA can be extended and used in a dot product space~$F$ which is related to the input space by a possibly nonlinear map $\Phi:\mathbf{R}^N \rightarrow F$, $x \mapsto \mathbf{X}$ as \begin{eqnarray*} && y_1 = \mathbf{w_1}\mathbf{\Phi(x_1)} = \sum_j w_1 \Phi(x_{1j}) \\ && y_2 = \mathbf{w_2}\mathbf{\Phi(x_2)} = \sum_j w_2 \Phi(x_{2j}) \end{eqnarray*} Following \citep{kernlab:kuss:2003}, the \pkg{kernlab} implementation of a KCCA projects the data vectors on a new coordinate system using KPCA and uses linear CCA to retrieve the correlation coefficients. The \code{kcca} method in \pkg{kernlab} returns an \proglang{S4} object containing the correlation coefficients for each data set and the corresponding correlation along with the kernel used. \subsection{Interior point code quadratic optimizer} In many kernel based algorithms, learning implies the minimization of some risk function. Typically we have to deal with quadratic or general convex problems for support vector machines of the type \begin{equation} \begin{array}{ll} \mathrm{minimize} & f(x) \\ \mbox{subject to~} & c_i(x) \leq 0 \mbox{~for all~} i \in [n]. \end{array} \end{equation} $f$ and $c_i$ are convex functions and $n \in \mathbf{N}$. \pkg{kernlab} provides the \proglang{S4} method \code{ipop} implementing an optimizer of the interior point family \citep{kernlab:Vanderbei:1999} which solves the quadratic programming problem \begin{equation} \begin{array}{ll} \mathrm{minimize} & c^\top x+\frac{1}{2}x^\top H x \\ \mbox{subject to~} & b \leq Ax \leq b + r\\ & l \leq x \leq u \\ \end{array} \end{equation} This optimizer can be used in regression, classification, and novelty detection in SVMs. \subsection{Incomplete cholesky decomposition} When dealing with kernel based algorithms, calculating a full kernel matrix should be avoided since it is already a $O(N^2)$ operation. Fortunately, the fact that kernel matrices are positive semidefinite is a strong constraint and good approximations can be found with small computational cost. The Cholesky decomposition factorizes a positive semidefinite $N \times N$ matrix $K$ as $K=ZZ^T$, where $Z$ is an upper triangular $N \times N$ matrix. Exploiting the fact that kernel matrices are usually of low rank, an \emph{incomplete Cholesky decomposition} \citep{kernlab:Wright:1999} finds a matrix $\tilde{Z}$ of size $N \times M$ where $M\ll N$ such that the norm of $K-\tilde{Z}\tilde{Z}^T$ is smaller than a given tolerance $\theta$. The main difference of incomplete Cholesky decomposition to the standard Cholesky decomposition is that pivots which are below a certain threshold are simply skipped. If $L$ is the number of skipped pivots, we obtain a $\tilde{Z}$ with only $M = N - L$ columns. The algorithm works by picking a column from $K$ to be added by maximizing a lower bound on the reduction of the error of the approximation. \pkg{kernlab} has an implementation of an incomplete Cholesky factorization called \code{inc.chol} which computes the decomposed matrix $\tilde{Z}$ from the original data for any given kernel without the need to compute a full kernel matrix beforehand. This has the advantage that no full kernel matrix has to be stored in memory. \section{Conclusions} In this paper we described \pkg{kernlab}, a flexible and extensible kernel methods package for \proglang{R} with existing modern kernel algorithms along with tools for constructing new kernel based algorithms. It provides a unified framework for using and creating kernel-based algorithms in \proglang{R} while using all of \proglang{R}'s modern facilities, like \proglang{S4} classes and namespaces. Our aim for the future is to extend the package and add more kernel-based methods as well as kernel relevant tools. Sources and binaries for the latest version of \pkg{kernlab} are available at CRAN\footnote{\url{http://CRAN.R-project.org}} under the GNU Public License. A shorter version of this introduction to the \proglang{R} package \pkg{kernlab} is published as \cite{kernlab:Karatzoglou+Smola+Hornik:2004} in the \emph{Journal of Statistical Software}. \bibliography{jss} \end{document} kernlab/inst/COPYRIGHTS0000644000175100001440000000056312376021447014313 0ustar hornikusersCOPYRIGHT STATUS ---------------- The R code in this package is Copyright (C) 2002 Alexandros Karatzoglou the C++ code in src/ is Copyright (C) 2002 Alexandros Karatzoglou and Chi-Jen Lin the fast string kernel code is Copyright (C) Choon Hui Theo, SVN Vishwanathan and Alexandros Karatzoglou MSufSort Version 2.2 is Copyright (C) 2005 Michael A Maniscalo kernlab/build/0000755000175100001440000000000014656670132013017 5ustar hornikuserskernlab/build/vignette.rds0000644000175100001440000000045514656670132015362 0ustar hornikusers‹uPËNÃ0t´$„T!®þÂ¥?€TõR!Um\—d“ZIìà8DÜørŠCœ†aÉkïÎhwv^BBˆK|ß!®§¿ÞB‡™¾·ú:Ä'~¯r”¼€×hÇ[ z4} Oœî—t qÒTHºÑôÕQ$5eœîþë\%©ç¡Zàâ˜æ†@ˆïþ*_p(±¶¹+¬']ùËj77”›¼W[öjMõ¾nªJHEß1Vz¥â#ã8Àwo $‹i%E&¡,φ¡x>¦a\4µB9VÜýÒüœ]¿…s²„_nð£rP>áÌ Ç_³‡íLo»Z,'þôŸ¸HÑFƒ“×ÖONúØvÇÔ¶Ýa ¢TÛîoWµÜ}ekernlab/build/partial.rdb0000644000175100001440000000007514656670070015147 0ustar hornikusers‹‹àb```b`aad`b1…À€… H02°0piÖ¼ÄÜÔb C"Éðh¿eÍ7kernlab/man/0000755000175100001440000000000014656670022012471 5ustar hornikuserskernlab/man/kpca.Rd0000644000175100001440000001203014366214206013666 0ustar hornikusers\name{kpca} \alias{kpca} \alias{kpca,formula-method} \alias{kpca,matrix-method} \alias{kpca,kernelMatrix-method} \alias{kpca,list-method} \alias{predict,kpca-method} \title{Kernel Principal Components Analysis} \description{ Kernel Principal Components Analysis is a nonlinear form of principal component analysis.} \usage{ \S4method{kpca}{formula}(x, data = NULL, na.action, ...) \S4method{kpca}{matrix}(x, kernel = "rbfdot", kpar = list(sigma = 0.1), features = 0, th = 1e-4, na.action = na.omit, ...) \S4method{kpca}{kernelMatrix}(x, features = 0, th = 1e-4, ...) \S4method{kpca}{list}(x, kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), features = 0, th = 1e-4, na.action = na.omit, ...) } \arguments{ \item{x}{the data matrix indexed by row or a formula describing the model, or a kernel Matrix of class \code{kernelMatrix}, or a list of character vectors} \item{data}{an optional data frame containing the variables in the model (when using a formula).} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{features}{Number of features (principal components) to return. (default: 0 , all)} \item{th}{the value of the eigenvalue under which principal components are ignored (only valid when features = 0). (default : 0.0001) } \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{ additional parameters} } \details{Using kernel functions one can efficiently compute principal components in high-dimensional feature spaces, related to input space by some non-linear map.\cr The data can be passed to the \code{kpca} function in a \code{matrix} or a \code{data.frame}, in addition \code{kpca} also supports input in the form of a kernel matrix of class \code{kernelMatrix} or as a list of character vectors where a string kernel has to be used. } \value{ An S4 object containing the principal component vectors along with the corresponding eigenvalues. \item{pcv}{a matrix containing the principal component vectors (column wise)} \item{eig}{The corresponding eigenvalues} \item{rotated}{The original data projected (rotated) on the principal components} \item{xmatrix}{The original data matrix} all the slots of the object can be accessed by accessor functions. } \note{The predict function can be used to embed new data on the new space} \references{ Schoelkopf B., A. Smola, K.-R. Mueller :\cr \emph{Nonlinear component analysis as a kernel eigenvalue problem}\cr Neural Computation 10, 1299-1319\cr \doi{10.1162/089976698300017467}. } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{kcca}}, \code{pca}} \examples{ # another example using the iris data(iris) test <- sample(1:150,20) kpc <- kpca(~.,data=iris[-test,-5],kernel="rbfdot", kpar=list(sigma=0.2),features=2) #print the principal component vectors pcv(kpc) #plot the data projection on the components plot(rotated(kpc),col=as.integer(iris[-test,5]), xlab="1st Principal Component",ylab="2nd Principal Component") #embed remaining points emb <- predict(kpc,iris[test,-5]) points(emb,col=as.integer(iris[test,5])) } \keyword{cluster} kernlab/man/kpca-class.Rd0000644000175100001440000000455712117363140015002 0ustar hornikusers\name{kpca-class} \docType{class} \alias{kpca-class} \alias{rotated} \alias{eig,kpca-method} \alias{kcall,kpca-method} \alias{kernelf,kpca-method} \alias{pcv,kpca-method} \alias{rotated,kpca-method} \alias{xmatrix,kpca-method} \title{Class "kpca"} \description{ The Kernel Principal Components Analysis class} \section{Objects of class "kpca"}{ Objects can be created by calls of the form \code{new("kpca", ...)}. or by calling the \code{kpca} function. } \section{Slots}{ \describe{ \item{\code{pcv}:}{Object of class \code{"matrix"} containing the principal component vectors } \item{\code{eig}:}{Object of class \code{"vector"} containing the corresponding eigenvalues} \item{\code{rotated}:}{Object of class \code{"matrix"} containing the projection of the data on the principal components} \item{\code{kernelf}:}{Object of class \code{"function"} containing the kernel function used} \item{\code{kpar}:}{Object of class \code{"list"} containing the kernel parameters used } \item{\code{xmatrix}:}{Object of class \code{"matrix"} containing the data matrix used } \item{\code{kcall}:}{Object of class \code{"ANY"} containing the function call } \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed on NA } } } \section{Methods}{ \describe{ \item{eig}{\code{signature(object = "kpca")}: returns the eigenvalues } \item{kcall}{\code{signature(object = "kpca")}: returns the performed call} \item{kernelf}{\code{signature(object = "kpca")}: returns the used kernel function} \item{pcv}{\code{signature(object = "kpca")}: returns the principal component vectors } \item{predict}{\code{signature(object = "kpca")}: embeds new data } \item{rotated}{\code{signature(object = "kpca")}: returns the projected data} \item{xmatrix}{\code{signature(object = "kpca")}: returns the used data matrix } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{ksvm-class}}, \code{\link{kcca-class}} } \examples{ # another example using the iris data(iris) test <- sample(1:50,20) kpc <- kpca(~.,data=iris[-test,-5],kernel="rbfdot", kpar=list(sigma=0.2),features=2) #print the principal component vectors pcv(kpc) rotated(kpc) kernelf(kpc) eig(kpc) } \keyword{classes} kernlab/man/kcca-class.Rd0000644000175100001440000000345511304023134014752 0ustar hornikusers\name{kcca-class} \docType{class} \alias{kcca-class} \alias{kcor} \alias{xcoef} \alias{ycoef} %%\alias{yvar} %%\alias{xvar} \alias{kcor,kcca-method} \alias{xcoef,kcca-method} \alias{xvar,kcca-method} \alias{ycoef,kcca-method} \alias{yvar,kcca-method} \title{Class "kcca"} \description{The "kcca" class } \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("kcca", ...)}. or by the calling the \code{kcca} function. } \section{Slots}{ \describe{ \item{\code{kcor}:}{Object of class \code{"vector"} describing the correlations} \item{\code{xcoef}:}{Object of class \code{"matrix"} estimated coefficients for the \code{x} variables} \item{\code{ycoef}:}{Object of class \code{"matrix"} estimated coefficients for the \code{y} variables } %% \item{\code{xvar}:}{Object of class \code{"matrix"} holds the %% canonical variates for \code{x}} %% \item{\code{yvar}:}{Object of class \code{"matrix"} holds the %% canonical variates for \code{y}} } } \section{Methods}{ \describe{ \item{kcor}{\code{signature(object = "kcca")}: returns the correlations} \item{xcoef}{\code{signature(object = "kcca")}: returns the estimated coefficients for the \code{x} variables} \item{ycoef}{\code{signature(object = "kcca")}: returns the estimated coefficients for the \code{y} variables } %% \item{xvar}{\code{signature(object = "kcca")}: returns the canonical %% variates for \code{x}} %% \item{yvar}{\code{signature(object = "kcca")}: returns the canonical %% variates for \code{y}} } } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{kcca}}, \code{\link{kpca-class}} } \examples{ ## dummy data x <- matrix(rnorm(30),15) y <- matrix(rnorm(30),15) kcca(x,y,ncomps=2) } \keyword{classes} kernlab/man/musk.Rd0000644000175100001440000000257011304023134013722 0ustar hornikusers\name{musk} \alias{musk} \docType{data} \title{Musk data set} \description{ This dataset describes a set of 92 molecules of which 47 are judged by human experts to be musks and the remaining 45 molecules are judged to be non-musks. } \usage{data(musk)} \format{ A data frame with 476 observations on the following 167 variables. Variables 1-162 are "distance features" along rays. The distances are measured in hundredths of Angstroms. The distances may be negative or positive, since they are actually measured relative to an origin placed along each ray. The origin was defined by a "consensus musk" surface that is no longer used. Hence, any experiments with the data should treat these feature values as lying on an arbitrary continuous scale. In particular, the algorithm should not make any use of the zero point or the sign of each feature value. Variable 163 is the distance of the oxygen atom in the molecule to a designated point in 3-space. This is also called OXY-DIS. Variable 164 is the X-displacement from the designated point. Variable 165 is the Y-displacement from the designated point. Variable 166 is the Z-displacement from the designated point. Class: 0 for non-musk, and 1 for musk } \source{ UCI Machine Learning data repository \cr } \examples{ data(musk) muskm <- ksvm(Class~.,data=musk,kernel="rbfdot",C=1000) muskm } \keyword{datasets} kernlab/man/kcca.Rd0000644000175100001440000000724113561515565013671 0ustar hornikusers\name{kcca} \alias{kcca} \alias{kcca,matrix-method} \title{Kernel Canonical Correlation Analysis} \description{ Computes the canonical correlation analysis in feature space. } \usage{ \S4method{kcca}{matrix}(x, y, kernel="rbfdot", kpar=list(sigma=0.1), gamma = 0.1, ncomps = 10, ...) } %- maybe also 'usage' for other objects documented here. \arguments{ \item{x}{a matrix containing data index by row} \item{y}{a matrix containing data index by row} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a inner product in feature space between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{gamma}{regularization parameter (default : 0.1)} \item{ncomps}{number of canonical components (default : 10) } \item{\dots}{additional parameters for the \code{kpca} function} } \details{ The kernel version of canonical correlation analysis. Kernel Canonical Correlation Analysis (KCCA) is a non-linear extension of CCA. Given two random variables, KCCA aims at extracting the information which is shared by the two random variables. More precisely given \eqn{x} and \eqn{y} the purpose of KCCA is to provide nonlinear mappings \eqn{f(x)} and \eqn{g(y)} such that their correlation is maximized. } \value{ An S4 object containing the following slots: \item{kcor}{Correlation coefficients in feature space} \item{xcoef}{estimated coefficients for the \code{x} variables in the feature space} \item{ycoef}{estimated coefficients for the \code{y} variables in the feature space} %% \item{xvar}{The canonical variates for \code{x}} %% \item{yvar}{The canonical variates for \code{y}} } \references{ Malte Kuss, Thore Graepel \cr \emph{The Geometry Of Kernel Canonical Correlation Analysis}\cr \url{https://www.microsoft.com/en-us/research/publication/the-geometry-of-kernel-canonical-correlation-analysis/}} \author{ Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{\code{\link{cancor}}, \code{\link{kpca}}, \code{\link{kfa}}, \code{\link{kha}}} \examples{ ## dummy data x <- matrix(rnorm(30),15) y <- matrix(rnorm(30),15) kcca(x,y,ncomps=2) } \keyword{multivariate} kernlab/man/lssvm.Rd0000644000175100001440000002010212117365064014114 0ustar hornikusers\name{lssvm} \docType{methods} \alias{lssvm} \alias{lssvm-methods} \alias{lssvm,formula-method} \alias{lssvm,vector-method} \alias{lssvm,matrix-method} \alias{lssvm,list-method} \alias{lssvm,kernelMatrix-method} \alias{show,lssvm-method} \alias{coef,lssvm-method} \alias{predict,lssvm-method} \title{Least Squares Support Vector Machine} \description{ The \code{lssvm} function is an implementation of the Least Squares SVM. \code{lssvm} includes a reduced version of Least Squares SVM using a decomposition of the kernel matrix which is calculated by the \code{csi} function. } \usage{ \S4method{lssvm}{formula}(x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE) \S4method{lssvm}{vector}(x, ...) \S4method{lssvm}{matrix}(x, y, scaled = TRUE, kernel = "rbfdot", kpar = "automatic", type = NULL, tau = 0.01, reduced = TRUE, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, cross = 0, fit = TRUE, ..., subset, na.action = na.omit) \S4method{lssvm}{kernelMatrix}(x, y, type = NULL, tau = 0.01, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, cross = 0, fit = TRUE, ...) \S4method{lssvm}{list}(x, y, scaled = TRUE, kernel = "stringdot", kpar = list(length=4, lambda = 0.5), type = NULL, tau = 0.01, reduced = TRUE, tol = 0.0001, rank = floor(dim(x)[1]/3), delta = 40, cross = 0, fit = TRUE, ..., subset) } \arguments{ \item{x}{a symbolic description of the model to be fit, a matrix or vector containing the training data when a formula interface is not used or a \code{kernelMatrix} or a list of character vectors.} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which `lssvm' is called from.} \item{y}{a response vector with one label for each row/component of \code{x}. Can be either a factor (for classification tasks) or a numeric vector (for classification or regression - currently nor supported -).} \item{scaled}{A logical vector indicating the variables to be scaled. If \code{scaled} is of length 1, the value is recycled as many times as needed and all non-binary variables are scaled. Per default, data are scaled internally to zero mean and unit variance. The center and scale values are returned and used for later predictions.} \item{type}{Type of problem. Either "classification" or "regression". Depending on whether \code{y} is a factor or not, the default setting for \code{type} is "classification" or "regression" respectively, but can be overwritten by setting an explicit value. (regression is currently not supported)\cr} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel "Gaussian" \item \code{polydot} Polynomial kernel \item \code{vanilladot} Linear kernel \item \code{tanhdot} Hyperbolic tangent kernel \item \code{laplacedot} Laplacian kernel \item \code{besseldot} Bessel kernel \item \code{anovadot} ANOVA RBF kernel \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } Setting the kernel parameter to "matrix" treats \code{x} as a kernel matrix calling the \code{kernelMatrix} interface.\cr The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{ the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. For valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{length, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.\cr \code{kpar} can also be set to the string "automatic" which uses the heuristics in \code{\link{sigest}} to calculate a good \code{sigma} value for the Gaussian RBF or Laplace kernel, from the data. (default = "automatic"). } \item{tau}{the regularization parameter (default 0.01) } \item{reduced}{if set to \code{FALSE} the full linear problem of the lssvm is solved, when \code{TRUE} a reduced method using \code{csi} is used.} \item{rank}{the maximal rank of the decomposed kernel matrix, see \code{csi}} \item{delta}{number of columns of cholesky performed in advance, see \code{csi} (default 40)} \item{tol}{tolerance of termination criterion for the \code{csi} function, lower tolerance leads to more precise approximation but may increase the training time and the decomposed matrix size (default: 0.0001)} \item{fit}{indicates whether the fitted values should be computed and included in the model or not (default: 'TRUE')} \item{cross}{if a integer value k>0 is specified, a k-fold cross validation on the training data is performed to assess the quality of the model: the Mean Squared Error for regression} \item{subset}{An index vector specifying the cases to be used in the training sample. (NOTE: If given, this argument must be named.)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{ additional parameters} } \details{Least Squares Support Vector Machines are reformulation to the standard SVMs that lead to solving linear KKT systems. The algorithm is based on the minimization of a classical penalized least-squares cost function. The current implementation approximates the kernel matrix by an incomplete Cholesky factorization obtained by the \code{\link{csi}} function, thus the solution is an approximation to the exact solution of the lssvm optimization problem. The quality of the solution depends on the approximation and can be influenced by the "rank" , "delta", and "tol" parameters. } \value{ An S4 object of class \code{"lssvm"} containing the fitted model, Accessor functions can be used to access the slots of the object (see examples) which include: \item{alpha}{the parameters of the \code{"lssvm"}} \item{coef}{the model coefficients (identical to alpha)} \item{b}{the model offset.} \item{xmatrix}{the training data used by the model} } \references{ J. A. K. Suykens and J. Vandewalle\cr \emph{Least Squares Support Vector Machine Classifiers}\cr Neural Processing Letters vol. 9, issue 3, June 1999\cr } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{ksvm}}, \code{\link{gausspr}}, \code{\link{csi}} } \examples{ ## simple example data(iris) lir <- lssvm(Species~.,data=iris) lir lirr <- lssvm(Species~.,data= iris, reduced = FALSE) lirr ## Using the kernelMatrix interface iris <- unique(iris) rbf <- rbfdot(0.5) k <- kernelMatrix(rbf, as.matrix(iris[,-5])) klir <- lssvm(k, iris[, 5]) klir pre <- predict(klir, k) } \keyword{classif} \keyword{nonlinear} \keyword{methods} kernlab/man/as.kernelMatrix.Rd0000644000175100001440000000230411304023134016005 0ustar hornikusers\name{as.kernelMatrix} \docType{methods} \alias{kernelMatrix-class} \alias{as.kernelMatrix} \alias{as.kernelMatrix-methods} \alias{as.kernelMatrix,matrix-method} \title{Assing kernelMatrix class to matrix objects} \description{\code{as.kernelMatrix} in package \pkg{kernlab} can be used to coerce the kernelMatrix class to matrix objects representing a kernel matrix. These matrices can then be used with the kernelMatrix interfaces which most of the functions in \pkg{kernlab} support.} \usage{ \S4method{as.kernelMatrix}{matrix}(x, center = FALSE) } \arguments{ \item{x}{matrix to be assigned the \code{kernelMatrix} class } \item{center}{center the kernel matrix in feature space (default: FALSE) } } \author{ Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{\code{\link{kernelMatrix}}, \code{\link{dots}}} \keyword{methods} \examples{ ## Create toy data x <- rbind(matrix(rnorm(10),,2),matrix(rnorm(10,mean=3),,2)) y <- matrix(c(rep(1,5),rep(-1,5))) ### Use as.kernelMatrix to label the cov. matrix as a kernel matrix ### which is eq. to using a linear kernel K <- as.kernelMatrix(crossprod(t(x))) K svp2 <- ksvm(K, y, type="C-svc") svp2 } kernlab/man/csi-class.Rd0000644000175100001440000000545411304023134014630 0ustar hornikusers\name{csi-class} \docType{class} \alias{csi-class} \alias{Q} \alias{R} \alias{predgain} \alias{truegain} \alias{diagresidues,csi-method} \alias{maxresiduals,csi-method} \alias{pivots,csi-method} \alias{predgain,csi-method} \alias{truegain,csi-method} \alias{Q,csi-method} \alias{R,csi-method} \title{Class "csi"} \description{The reduced Cholesky decomposition object} \section{Objects from the Class}{Objects can be created by calls of the form \code{new("csi", ...)}. or by calling the \code{csi} function.} \section{Slots}{ \describe{ \item{\code{.Data}:}{Object of class \code{"matrix"} contains the decomposed matrix} \item{\code{pivots}:}{Object of class \code{"vector"} contains the pivots performed} \item{\code{diagresidues}:}{Object of class \code{"vector"} contains the diagonial residues} \item{\code{maxresiduals}:}{Object of class \code{"vector"} contains the maximum residues} \item{predgain}{Object of class \code{"vector"} contains the predicted gain before adding each column} \item{truegain}{Object of class \code{"vector"} contains the actual gain after adding each column} \item{Q}{Object of class \code{"matrix"} contains Q from the QR decomposition of the kernel matrix} \item{R}{Object of class \code{"matrix"} contains R from the QR decomposition of the kernel matrix} } } \section{Extends}{ Class \code{"matrix"}, directly. } \section{Methods}{ \describe{ \item{diagresidues}{\code{signature(object = "csi")}: returns the diagonial residues} \item{maxresiduals}{\code{signature(object = "csi")}: returns the maximum residues} \item{pivots}{\code{signature(object = "csi")}: returns the pivots performed} \item{predgain}{\code{signature(object = "csi")}: returns the predicted gain before adding each column} \item{truegain}{\code{signature(object = "csi")}: returns the actual gain after adding each column} \item{Q}{\code{signature(object = "csi")}: returns Q from the QR decomposition of the kernel matrix} \item{R}{\code{signature(object = "csi")}: returns R from the QR decomposition of the kernel matrix} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{csi}}, \code{\link{inchol-class}}} \examples{ data(iris) ## create multidimensional y matrix yind <- t(matrix(1:3,3,150)) ymat <- matrix(0, 150, 3) ymat[yind==as.integer(iris[,5])] <- 1 datamatrix <- as.matrix(iris[,-5]) # initialize kernel function rbf <- rbfdot(sigma=0.1) rbf Z <- csi(datamatrix,ymat, kernel=rbf, rank = 30) dim(Z) pivots(Z) # calculate kernel matrix K <- crossprod(t(Z)) # difference between approximated and real kernel matrix (K - kernelMatrix(kernel=rbf, datamatrix))[6,] } \keyword{classes} kernlab/man/lssvm-class.Rd0000644000175100001440000001040611304023134015207 0ustar hornikusers\name{lssvm-class} \docType{class} \alias{lssvm-class} \alias{alpha,lssvm-method} \alias{b,lssvm-method} \alias{cross,lssvm-method} \alias{error,lssvm-method} \alias{kcall,lssvm-method} \alias{kernelf,lssvm-method} \alias{kpar,lssvm-method} \alias{param,lssvm-method} \alias{lev,lssvm-method} \alias{type,lssvm-method} \alias{alphaindex,lssvm-method} \alias{xmatrix,lssvm-method} \alias{ymatrix,lssvm-method} \alias{scaling,lssvm-method} \alias{nSV,lssvm-method} \title{Class "lssvm"} \description{The Gaussian Processes object } \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("lssvm", ...)}. or by calling the \code{lssvm} function } \section{Slots}{ \describe{ \item{\code{kernelf}:}{Object of class \code{"kfunction"} contains the kernel function used} \item{\code{kpar}:}{Object of class \code{"list"} contains the kernel parameter used } \item{\code{param}:}{Object of class \code{"list"} contains the regularization parameter used.} \item{\code{kcall}:}{Object of class \code{"call"} contains the used function call } \item{\code{type}:}{Object of class \code{"character"} contains type of problem } \item{\code{coef}:}{Object of class \code{"ANY"} contains the model parameter } \item{\code{terms}:}{Object of class \code{"ANY"} contains the terms representation of the symbolic model used (when using a formula)} \item{\code{xmatrix}:}{Object of class \code{"matrix"} containing the data matrix used } \item{\code{ymatrix}:}{Object of class \code{"output"} containing the response matrix} \item{\code{fitted}:}{Object of class \code{"output"} containing the fitted values } \item{\code{b}:}{Object of class \code{"numeric"} containing the offset } \item{\code{lev}:}{Object of class \code{"vector"} containing the levels of the response (in case of classification) } \item{\code{scaling}:}{Object of class \code{"ANY"} containing the scaling information performed on the data} \item{\code{nclass}:}{Object of class \code{"numeric"} containing the number of classes (in case of classification) } \item{\code{alpha}:}{Object of class \code{"listI"} containing the computes alpha values } \item{\code{alphaindex}}{Object of class \code{"list"} containing the indexes for the alphas in various classes (in multi-class problems).} \item{\code{error}:}{Object of class \code{"numeric"} containing the training error} \item{\code{cross}:}{Object of class \code{"numeric"} containing the cross validation error} \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed in NA } \item{\code{nSV}:}{Object of class \code{"numeric"} containing the number of model parameters } } } \section{Methods}{ \describe{ \item{alpha}{\code{signature(object = "lssvm")}: returns the alpha vector} \item{cross}{\code{signature(object = "lssvm")}: returns the cross validation error } \item{error}{\code{signature(object = "lssvm")}: returns the training error } \item{fitted}{\code{signature(object = "vm")}: returns the fitted values } \item{kcall}{\code{signature(object = "lssvm")}: returns the call performed} \item{kernelf}{\code{signature(object = "lssvm")}: returns the kernel function used} \item{kpar}{\code{signature(object = "lssvm")}: returns the kernel parameter used} \item{param}{\code{signature(object = "lssvm")}: returns the regularization parameter used} \item{lev}{\code{signature(object = "lssvm")}: returns the response levels (in classification) } \item{type}{\code{signature(object = "lssvm")}: returns the type of problem} \item{scaling}{\code{signature(object = "ksvm")}: returns the scaling values } \item{xmatrix}{\code{signature(object = "lssvm")}: returns the data matrix used} \item{ymatrix}{\code{signature(object = "lssvm")}: returns the response matrix used} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{lssvm}}, \code{\link{ksvm-class}} } \examples{ # train model data(iris) test <- lssvm(Species~.,data=iris,var=2) test alpha(test) error(test) lev(test) } \keyword{classes} kernlab/man/csi.Rd0000644000175100001440000001231012560414652013530 0ustar hornikusers\name{csi} \docType{methods} \alias{csi} \alias{csi-methods} \alias{csi,matrix-method} \title{Cholesky decomposition with Side Information} \description{ The \code{csi} function in \pkg{kernlab} is an implementation of an incomplete Cholesky decomposition algorithm which exploits side information (e.g., classification labels, regression responses) to compute a low rank decomposition of a kernel matrix from the data. } \usage{ \S4method{csi}{matrix}(x, y, kernel="rbfdot", kpar=list(sigma=0.1), rank, centering = TRUE, kappa = 0.99 ,delta = 40 ,tol = 1e-5) } \arguments{ \item{x}{The data matrix indexed by row} \item{y}{the classification labels or regression responses. In classification y is a \eqn{m \times n} matrix where \eqn{m} the number of data and \eqn{n} the number of classes \eqn{y} and \eqn{y_i} is 1 if the corresponding x belongs to class i.} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class \code{kernel}, which computes the inner product in feature space between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well. } \item{rank}{maximal rank of the computed kernel matrix} \item{centering}{if \code{TRUE} centering is performed (default: TRUE)} \item{kappa}{trade-off between approximation of K and prediction of Y (default: 0.99)} \item{delta}{number of columns of cholesky performed in advance (default: 40)} \item{tol}{minimum gain at each iteration (default: 1e-4)} } \details{An incomplete cholesky decomposition calculates \eqn{Z} where \eqn{K= ZZ'} \eqn{K} being the kernel matrix. Since the rank of a kernel matrix is usually low, \eqn{Z} tends to be smaller then the complete kernel matrix. The decomposed matrix can be used to create memory efficient kernel-based algorithms without the need to compute and store a complete kernel matrix in memory. \cr \code{csi} uses the class labels, or regression responses to compute a more appropriate approximation for the problem at hand considering the additional information from the response variable. } \value{ An S4 object of class "csi" which is an extension of the class "matrix". The object is the decomposed kernel matrix along with the slots : \item{pivots}{Indices on which pivots where done} \item{diagresidues}{Residuals left on the diagonal} \item{maxresiduals}{Residuals picked for pivoting} \item{predgain}{predicted gain before adding each column} \item{truegain}{actual gain after adding each column} \item{Q}{QR decomposition of the kernel matrix} \item{R}{QR decomposition of the kernel matrix} slots can be accessed either by \code{object@slot} or by accessor functions with the same name (e.g., \code{pivots(object))}} \references{ Francis R. Bach, Michael I. Jordan\cr \emph{Predictive low-rank decomposition for kernel methods.}\cr Proceedings of the Twenty-second International Conference on Machine Learning (ICML) 2005\cr \url{http://www.di.ens.fr/~fbach/bach_jordan_csi.pdf} } \author{Alexandros Karatzoglou (based on Matlab code by Francis Bach)\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{inchol}}, \code{\link{chol}}, \code{\link{csi-class}}} \examples{ data(iris) ## create multidimensional y matrix yind <- t(matrix(1:3,3,150)) ymat <- matrix(0, 150, 3) ymat[yind==as.integer(iris[,5])] <- 1 datamatrix <- as.matrix(iris[,-5]) # initialize kernel function rbf <- rbfdot(sigma=0.1) rbf Z <- csi(datamatrix,ymat, kernel=rbf, rank = 30) dim(Z) pivots(Z) # calculate kernel matrix K <- crossprod(t(Z)) # difference between approximated and real kernel matrix (K - kernelMatrix(kernel=rbf, datamatrix))[6,] } \keyword{methods} \keyword{algebra} \keyword{array} kernlab/man/promotergene.Rd0000644000175100001440000000271714656670036015502 0ustar hornikusers\name{promotergene} \alias{promotergene} \docType{data} \title{E. coli promoter gene sequences (DNA)} \description{ Promoters have a region where a protein (RNA polymerase) must make contact and the helical DNA sequence must have a valid conformation so that the two pieces of the contact region spatially align. The data contains DNA sequences of promoters and non-promoters. } \usage{data(promotergene)} \format{ A data frame with 106 observations and 58 variables. The first variable \code{Class} is a factor with levels \code{+} for a promoter gene and \code{-} for a non-promoter gene. The remaining 57 variables \code{V2 to V58} are factors describing the sequence. The DNA bases are coded as follows: \code{a} adenine \code{c} cytosine \code{g} guanine \code{t} thymine } \source{ \doi{10.24432/C5S01D} } \references{ Towell, G., Shavlik, J. and Noordewier, M. \cr \emph{Refinement of Approximate Domain Theories by Knowledge-Based Artificial Neural Networks.} \cr In Proceedings of the Eighth National Conference on Artificial Intelligence (AAAI-90) } \examples{ data(promotergene) ## Create classification model using Gaussian Processes prom <- gausspr(Class~.,data=promotergene,kernel="rbfdot", kpar=list(sigma=0.02),cross=4) prom ## Create model using Support Vector Machines promsv <- ksvm(Class~.,data=promotergene,kernel="laplacedot", kpar="automatic",C=60,cross=4) promsv } \keyword{datasets} kernlab/man/ksvm.Rd0000644000175100001440000003646514366221170013750 0ustar hornikusers\name{ksvm} \alias{ksvm} \alias{ksvm,formula-method} \alias{ksvm,vector-method} \alias{ksvm,matrix-method} \alias{ksvm,kernelMatrix-method} \alias{ksvm,list-method} \alias{show,ksvm-method} \alias{coef,ksvm-method} \title{Support Vector Machines} \description{ Support Vector Machines are an excellent tool for classification, novelty detection, and regression. \code{ksvm} supports the well known C-svc, nu-svc, (classification) one-class-svc (novelty) eps-svr, nu-svr (regression) formulations along with native multi-class classification formulations and the bound-constraint SVM formulations.\cr \code{ksvm} also supports class-probabilities output and confidence intervals for regression. } \usage{ \S4method{ksvm}{formula}(x, data = NULL, ..., subset, na.action = na.omit, scaled = TRUE) \S4method{ksvm}{vector}(x, ...) \S4method{ksvm}{matrix}(x, y = NULL, scaled = TRUE, type = NULL, kernel ="rbfdot", kpar = "automatic", C = 1, nu = 0.2, epsilon = 0.1, prob.model = FALSE, class.weights = NULL, cross = 0, fit = TRUE, cache = 40, tol = 0.001, shrinking = TRUE, ..., subset, na.action = na.omit) \S4method{ksvm}{kernelMatrix}(x, y = NULL, type = NULL, C = 1, nu = 0.2, epsilon = 0.1, prob.model = FALSE, class.weights = NULL, cross = 0, fit = TRUE, cache = 40, tol = 0.001, shrinking = TRUE, ...) \S4method{ksvm}{list}(x, y = NULL, type = NULL, kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), C = 1, nu = 0.2, epsilon = 0.1, prob.model = FALSE, class.weights = NULL, cross = 0, fit = TRUE, cache = 40, tol = 0.001, shrinking = TRUE, ..., na.action = na.omit) } \arguments{ \item{x}{a symbolic description of the model to be fit. When not using a formula x can be a matrix or vector containing the training data or a kernel matrix of class \code{kernelMatrix} of the training data or a list of character vectors (for use with the string kernel). Note, that the intercept is always excluded, whether given in the formula or not.} \item{data}{an optional data frame containing the training data, when using a formula. By default the data is taken from the environment which `ksvm' is called from.} \item{y}{a response vector with one label for each row/component of \code{x}. Can be either a factor (for classification tasks) or a numeric vector (for regression).} \item{scaled}{A logical vector indicating the variables to be scaled. If \code{scaled} is of length 1, the value is recycled as many times as needed and all non-binary variables are scaled. Per default, data are scaled internally (both \code{x} and \code{y} variables) to zero mean and unit variance. The center and scale values are returned and used for later predictions.} \item{type}{\code{ksvm} can be used for classification , for regression, or for novelty detection. Depending on whether \code{y} is a factor or not, the default setting for \code{type} is \code{C-svc} or \code{eps-svr}, respectively, but can be overwritten by setting an explicit value.\cr Valid options are: \itemize{ \item \code{C-svc} C classification \item \code{nu-svc} nu classification \item \code{C-bsvc} bound-constraint svm classification \item \code{spoc-svc} Crammer, Singer native multi-class \item \code{kbb-svc} Weston, Watkins native multi-class \item \code{one-svc} novelty detection \item \code{eps-svr} epsilon regression \item \code{nu-svr} nu regression \item \code{eps-bsvr} bound-constraint svm regression } } \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes the inner product in feature space between two vector arguments (see \code{\link{kernels}}). \cr kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel "Gaussian" \item \code{polydot} Polynomial kernel \item \code{vanilladot} Linear kernel \item \code{tanhdot} Hyperbolic tangent kernel \item \code{laplacedot} Laplacian kernel \item \code{besseldot} Bessel kernel \item \code{anovadot} ANOVA RBF kernel \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } Setting the kernel parameter to "matrix" treats \code{x} as a kernel matrix calling the \code{kernelMatrix} interface.\cr The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. For valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{length, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well. In the case of a Radial Basis kernel function (Gaussian) kpar can also be set to the string "automatic" which uses the heuristics in \code{\link{sigest}} to calculate a good \code{sigma} value for the Gaussian RBF or Laplace kernel, from the data. (default = "automatic").} \item{C}{cost of constraints violation (default: 1) this is the `C'-constant of the regularization term in the Lagrange formulation.} \item{nu}{parameter needed for \code{nu-svc}, \code{one-svc}, and \code{nu-svr}. The \code{nu} parameter sets the upper bound on the training error and the lower bound on the fraction of data points to become Support Vectors (default: 0.2).} \item{epsilon}{epsilon in the insensitive-loss function used for \code{eps-svr}, \code{nu-svr} and \code{eps-bsvm} (default: 0.1)} \item{prob.model}{if set to \code{TRUE} builds a model for calculating class probabilities or in case of regression, calculates the scaling parameter of the Laplacian distribution fitted on the residuals. Fitting is done on output data created by performing a 3-fold cross-validation on the training data. For details see references. (default: \code{FALSE})} \item{class.weights}{a named vector of weights for the different classes, used for asymmetric class sizes. Not all factor levels have to be supplied (default weight: 1). All components have to be named.} \item{cache}{cache memory in MB (default 40)} \item{tol}{tolerance of termination criterion (default: 0.001)} \item{shrinking}{option whether to use the shrinking-heuristics (default: \code{TRUE})} \item{cross}{if a integer value k>0 is specified, a k-fold cross validation on the training data is performed to assess the quality of the model: the accuracy rate for classification and the Mean Squared Error for regression} \item{fit}{indicates whether the fitted values should be computed and included in the model or not (default: \code{TRUE})} \item{\dots}{additional parameters for the low level fitting function} \item{subset}{An index vector specifying the cases to be used in the training sample. (NOTE: If given, this argument must be named.)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} } \value{ An S4 object of class \code{"ksvm"} containing the fitted model, Accessor functions can be used to access the slots of the object (see examples) which include: \item{alpha}{The resulting support vectors, (alpha vector) (possibly scaled).} \item{alphaindex}{The index of the resulting support vectors in the data matrix. Note that this index refers to the pre-processed data (after the possible effect of \code{na.omit} and \code{subset})} \item{coef}{The corresponding coefficients times the training labels.} \item{b}{The negative intercept.} \item{nSV}{The number of Support Vectors} \item{obj}{The value of the objective function. In case of one-against-one classification this is a vector of values} \item{error}{Training error} \item{cross}{Cross validation error, (when cross > 0)} \item{prob.model}{Contains the width of the Laplacian fitted on the residuals in case of regression, or the parameters of the sigmoid fitted on the decision values in case of classification.} } \details{ \code{ksvm} uses John Platt's SMO algorithm for solving the SVM QP problem an most SVM formulations. On the \code{spoc-svc}, \code{kbb-svc}, \code{C-bsvc} and \code{eps-bsvr} formulations a chunking algorithm based on the TRON QP solver is used. \cr For multiclass-classification with \eqn{k} classes, \eqn{k > 2}, \code{ksvm} uses the `one-against-one'-approach, in which \eqn{k(k-1)/2} binary classifiers are trained; the appropriate class is found by a voting scheme, The \code{spoc-svc} and the \code{kbb-svc} formulations deal with the multiclass-classification problems by solving a single quadratic problem involving all the classes.\cr If the predictor variables include factors, the formula interface must be used to get a correct model matrix. \cr In classification when \code{prob.model} is \code{TRUE} a 3-fold cross validation is performed on the data and a sigmoid function is fitted on the resulting decision values \eqn{f}. The data can be passed to the \code{ksvm} function in a \code{matrix} or a \code{data.frame}, in addition \code{ksvm} also supports input in the form of a kernel matrix of class \code{kernelMatrix} or as a list of character vectors where a string kernel has to be used.\cr The \code{plot} function for binary classification \code{ksvm} objects displays a contour plot of the decision values with the corresponding support vectors highlighted.\cr The predict function can return class probabilities for classification problems by setting the \code{type} parameter to "probabilities". \cr The problem of model selection is partially addressed by an empirical observation for the RBF kernels (Gaussian , Laplace) where the optimal values of the \eqn{sigma} width parameter are shown to lie in between the 0.1 and 0.9 quantile of the \eqn{\|x- x'\|} statistics. When using an RBF kernel and setting \code{kpar} to "automatic", \code{ksvm} uses the \code{sigest} function to estimate the quantiles and uses the median of the values. } \note{Data is scaled internally by default, usually yielding better results.} \references{ \itemize{ \item Chang Chih-Chung, Lin Chih-Jen\cr \emph{LIBSVM: a library for Support Vector Machines}\cr \url{https://www.csie.ntu.edu.tw/~cjlin/libsvm/} \item Chih-Wei Hsu, Chih-Jen Lin\cr \emph{BSVM} \url{https://www.csie.ntu.edu.tw/~cjlin/bsvm/} \item J. Platt\cr \emph{Probabilistic outputs for support vector machines and comparison to regularized likelihood methods} \cr Advances in Large Margin Classifiers, A. Smola, P. Bartlett, B. Schoelkopf and D. Schuurmans, Eds. Cambridge, MA: MIT Press, 2000. \item H.-T. Lin, C.-J. Lin and R. C. Weng\cr \emph{A note on Platt's probabilistic outputs for support vector machines}\cr \url{https://www.csie.ntu.edu.tw/~htlin/paper/doc/plattprob.pdf} \item C.-W. Hsu and C.-J. Lin \cr \emph{A comparison on methods for multi-class support vector machines}\cr IEEE Transactions on Neural Networks, 13(2002) 415-425.\cr \url{https://www.csie.ntu.edu.tw/~cjlin/papers/multisvm.pdf} \item K. Crammer, Y. Singer\cr \emph{On the learnability and design of output codes for multiclass prolems}\cr Computational Learning Theory, 35-46, 2000.\cr \url{http://www.learningtheory.org/colt2000/papers/CrammerSinger.pdf} \item J. Weston, C. Watkins\cr \emph{Multi-class support vector machines}. Technical Report CSD-TR-98-04, Royal Holloway, University of London, Department of Computer Science. } } \author{ Alexandros Karatzoglou (SMO optimizers in C++ by Chih-Chung Chang & Chih-Jen Lin)\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{\code{\link{predict.ksvm}}, \code{\link{ksvm-class}}, \code{\link{couple}} } \keyword{methods} \keyword{regression} \keyword{nonlinear} \keyword{classif} \keyword{neural} \examples{ ## simple example using the spam data set data(spam) ## create test and training set index <- sample(1:dim(spam)[1]) spamtrain <- spam[index[1:floor(dim(spam)[1]/2)], ] spamtest <- spam[index[((ceiling(dim(spam)[1]/2)) + 1):dim(spam)[1]], ] ## train a support vector machine filter <- ksvm(type~.,data=spamtrain,kernel="rbfdot", kpar=list(sigma=0.05),C=5,cross=3) filter ## predict mail type on the test set mailtype <- predict(filter,spamtest[,-58]) ## Check results table(mailtype,spamtest[,58]) ## Another example with the famous iris data data(iris) ## Create a kernel function using the build in rbfdot function rbf <- rbfdot(sigma=0.1) rbf ## train a bound constraint support vector machine irismodel <- ksvm(Species~.,data=iris,type="C-bsvc", kernel=rbf,C=10,prob.model=TRUE) irismodel ## get fitted values fitted(irismodel) ## Test on the training set with probabilities as output predict(irismodel, iris[,-5], type="probabilities") ## Demo of the plot function x <- rbind(matrix(rnorm(120),,2),matrix(rnorm(120,mean=3),,2)) y <- matrix(c(rep(1,60),rep(-1,60))) svp <- ksvm(x,y,type="C-svc") plot(svp,data=x) ### Use kernelMatrix K <- as.kernelMatrix(crossprod(t(x))) svp2 <- ksvm(K, y, type="C-svc") svp2 # test data xtest <- rbind(matrix(rnorm(20),,2),matrix(rnorm(20,mean=3),,2)) # test kernel matrix i.e. inner/kernel product of test data with # Support Vectors Ktest <- as.kernelMatrix(crossprod(t(xtest),t(x[SVindex(svp2), ]))) predict(svp2, Ktest) #### Use custom kernel k <- function(x,y) {(sum(x*y) +1)*exp(-0.001*sum((x-y)^2))} class(k) <- "kernel" data(promotergene) ## train svm using custom kernel gene <- ksvm(Class~.,data=promotergene[c(1:20, 80:100),],kernel=k, C=5,cross=5) gene #### Use text with string kernels data(reuters) is(reuters) tsv <- ksvm(reuters,rlabels,kernel="stringdot", kpar=list(length=5),cross=3,C=10) tsv ## regression # create data x <- seq(-20,20,0.1) y <- sin(x)/x + rnorm(401,sd=0.03) # train support vector machine regm <- ksvm(x,y,epsilon=0.01,kpar=list(sigma=16),cross=3) plot(x,y,type="l") lines(x,predict(regm,x),col="red") } kernlab/man/rvm.Rd0000644000175100001440000001565514221633644013575 0ustar hornikusers\name{rvm} \alias{rvm} \alias{rvm-methods} \alias{rvm,formula-method} \alias{rvm,list-method} \alias{rvm,vector-method} \alias{rvm,kernelMatrix-method} \alias{rvm,matrix-method} \alias{show,rvm-method} \alias{predict,rvm-method} \alias{coef,rvm-method} \title{Relevance Vector Machine} \description{ The Relevance Vector Machine is a Bayesian model for regression and classification of identical functional form to the support vector machine. The \code{rvm} function currently supports only regression. } \usage{ \S4method{rvm}{formula}(x, data=NULL, ..., subset, na.action = na.omit) \S4method{rvm}{vector}(x, ...) \S4method{rvm}{matrix}(x, y, type="regression", kernel="rbfdot", kpar="automatic", alpha= ncol(as.matrix(x)), var=0.1, var.fix=FALSE, iterations=100, verbosity = 0, tol = .Machine$double.eps, minmaxdiff = 1e-3, cross = 0, fit = TRUE, ... , subset, na.action = na.omit) \S4method{rvm}{list}(x, y, type = "regression", kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), alpha = 5, var = 0.1, var.fix = FALSE, iterations = 100, verbosity = 0, tol = .Machine$double.eps, minmaxdiff = 1e-3, cross = 0, fit = TRUE, ..., subset, na.action = na.omit) } \arguments{ \item{x}{a symbolic description of the model to be fit. When not using a formula x can be a matrix or vector containing the training data or a kernel matrix of class \code{kernelMatrix} of the training data or a list of character vectors (for use with the string kernel). Note, that the intercept is always excluded, whether given in the formula or not.} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which `rvm' is called from.} \item{y}{a response vector with one label for each row/component of \code{x}. Can be either a factor (for classification tasks) or a numeric vector (for regression).} \item{type}{\code{rvm} can only be used for regression at the moment.} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel "Gaussian" \item \code{polydot} Polynomial kernel \item \code{vanilladot} Linear kernel \item \code{tanhdot} Hyperbolic tangent kernel \item \code{laplacedot} Laplacian kernel \item \code{besseldot} Bessel kernel \item \code{anovadot} ANOVA RBF kernel \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. For valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{length, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well. In the case of a Radial Basis kernel function (Gaussian) kpar can also be set to the string "automatic" which uses the heuristics in \code{\link{sigest}} to calculate a good \code{sigma} value for the Gaussian RBF or Laplace kernel, from the data. (default = "automatic").} \item{alpha}{The initial alpha vector. Can be either a vector of length equal to the number of data points or a single number.} \item{var}{the initial noise variance} \item{var.fix}{Keep noise variance fix during iterations (default: FALSE)} \item{iterations}{Number of iterations allowed (default: 100)} \item{tol}{tolerance of termination criterion} \item{minmaxdiff}{termination criteria. Stop when max difference is equal to this parameter (default:1e-3) } \item{verbosity}{print information on algorithm convergence (default = FALSE)} \item{fit}{indicates whether the fitted values should be computed and included in the model or not (default: TRUE)} \item{cross}{if a integer value k>0 is specified, a k-fold cross validation on the training data is performed to assess the quality of the model: the Mean Squared Error for regression} \item{subset}{An index vector specifying the cases to be used in the training sample. (NOTE: If given, this argument must be named.)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{ additional parameters} } \details{The Relevance Vector Machine typically leads to sparser models then the SVM. It also performs better in many cases (specially in regression). } \value{ An S4 object of class "rvm" containing the fitted model. Accessor functions can be used to access the slots of the object which include : \item{alpha}{The resulting relevance vectors} \item{alphaindex}{ The index of the resulting relevance vectors in the data matrix} \item{nRV}{Number of relevance vectors} \item{RVindex}{The indexes of the relevance vectors} \item{error}{Training error (if \code{fit = TRUE})} ... } \references{ Tipping, M. E.\cr \emph{Sparse Bayesian learning and the relevance vector machine}\cr Journal of Machine Learning Research 1, 211-244\cr \url{https://www.jmlr.org/papers/volume1/tipping01a/tipping01a.pdf} } \author{ Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{ksvm}}} \examples{ # create data x <- seq(-20,20,0.1) y <- sin(x)/x + rnorm(401,sd=0.05) # train relevance vector machine foo <- rvm(x, y) foo # print relevance vectors alpha(foo) RVindex(foo) # predict and plot ytest <- predict(foo, x) plot(x, y, type ="l") lines(x, ytest, col="red") } \keyword{regression} \keyword{nonlinear} kernlab/man/ipop.Rd0000644000175100001440000000532014221633756013730 0ustar hornikusers\name{ipop} \alias{ipop} \alias{ipop,ANY,matrix-method} \title{Quadratic Programming Solver} \description{ ipop solves the quadratic programming problem :\cr \eqn{\min(c'*x + 1/2 * x' * H * x)}\cr subject to: \cr \eqn{b <= A * x <= b + r}\cr \eqn{l <= x <= u} } \usage{ ipop(c, H, A, b, l, u, r, sigf = 7, maxiter = 40, margin = 0.05, bound = 10, verb = 0) } \arguments{ \item{c}{Vector or one column matrix appearing in the quadratic function} \item{H}{square matrix appearing in the quadratic function, or the decomposed form \eqn{Z} of the \eqn{H} matrix where \eqn{Z} is a \eqn{n x m} matrix with \eqn{n > m} and \eqn{ZZ' = H}.} \item{A}{Matrix defining the constrains under which we minimize the quadratic function} \item{b}{Vector or one column matrix defining the constrains} \item{l}{Lower bound vector or one column matrix} \item{u}{Upper bound vector or one column matrix} \item{r}{Vector or one column matrix defining constrains} \item{sigf}{Precision (default: 7 significant figures)} \item{maxiter}{Maximum number of iterations} \item{margin}{how close we get to the constrains} \item{bound}{Clipping bound for the variables} \item{verb}{Display convergence information during runtime} } \details{ ipop uses an interior point method to solve the quadratic programming problem. \cr The \eqn{H} matrix can also be provided in the decomposed form \eqn{Z} where \eqn{ZZ' = H} in that case the Sherman Morrison Woodbury formula is used internally. } \value{ An S4 object with the following slots \item{primal}{Vector containing the primal solution of the quadratic problem} \item{dual}{The dual solution of the problem} \item{how}{Character string describing the type of convergence} all slots can be accessed through accessor functions (see example) } \references{ R. J. Vanderbei\cr \emph{LOQO: An interior point code for quadratic programming}\cr Optimization Methods and Software 11, 451-484, 1999 \cr \url{https://vanderbei.princeton.edu/ps/loqo5.pdf} } \author{Alexandros Karatzoglou (based on Matlab code by Alex Smola) \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{solve.QP}, \code{\link{inchol}}, \code{\link{csi}}} \examples{ ## solve the Support Vector Machine optimization problem data(spam) ## sample a scaled part (500 points) of the spam data set m <- 500 set <- sample(1:dim(spam)[1],m) x <- scale(as.matrix(spam[,-58]))[set,] y <- as.integer(spam[set,58]) y[y==2] <- -1 ##set C parameter and kernel C <- 5 rbf <- rbfdot(sigma = 0.1) ## create H matrix etc. H <- kernelPol(rbf,x,,y) c <- matrix(rep(-1,m)) A <- t(y) b <- 0 l <- matrix(rep(0,m)) u <- matrix(rep(C,m)) r <- 0 sv <- ipop(c,H,A,b,l,u,r) sv dual(sv) } \keyword{optimize} kernlab/man/spirals.Rd0000644000175100001440000000054311304023134014416 0ustar hornikusers\name{spirals} \alias{spirals} \title{Spirals Dataset} \description{A toy data set representing two spirals with Gaussian noise. The data was created with the \code{mlbench.spirals} function in \code{mlbench}. } \usage{data(spirals)} \format{ A matrix with 300 observations and 2 variables. } \examples{ data(spirals) plot(spirals) } \keyword{datasets} kernlab/man/inlearn.Rd0000644000175100001440000000600712117362575014414 0ustar hornikusers\name{inlearn} \alias{inlearn} \alias{inlearn,numeric-method} \title{Onlearn object initialization} \description{ Online Kernel Algorithm object \code{onlearn} initialization function. } \usage{ \S4method{inlearn}{numeric}(d, kernel = "rbfdot", kpar = list(sigma = 0.1), type = "novelty", buffersize = 1000) } \arguments{ \item{d}{the dimensionality of the data to be learned} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. For valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the \code{kpar} parameter as well.} \item{type}{the type of problem to be learned by the online algorithm : \code{classification}, \code{regression}, \code{novelty}} \item{buffersize}{the size of the buffer to be used} } \details{ The \code{inlearn} is used to initialize a blank \code{onlearn} object. } \value{ The function returns an \code{S4} object of class \code{onlearn} that can be used by the \code{onlearn} function. } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{onlearn}}, \code{\link{onlearn-class}} } \examples{ ## create toy data set x <- rbind(matrix(rnorm(100),,2),matrix(rnorm(100)+3,,2)) y <- matrix(c(rep(1,50),rep(-1,50)),,1) ## initialize onlearn object on <- inlearn(2, kernel = "rbfdot", kpar = list(sigma = 0.2), type = "classification") ## learn one data point at the time for(i in sample(1:100,100)) on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) sign(predict(on,x)) } \keyword{classif} \keyword{neural} \keyword{regression} \keyword{ts} kernlab/man/couple.Rd0000644000175100001440000000372214366220445014251 0ustar hornikusers\name{couple} \alias{couple} \title{Probabilities Coupling function} \description{ \code{couple} is used to link class-probability estimates produced by pairwise coupling in multi-class classification problems. } \usage{ couple(probin, coupler = "minpair") } \arguments{ \item{probin}{ The pairwise coupled class-probability estimates} \item{coupler}{The type of coupler to use. Currently \code{minpar} and \code{pkpd} and \code{vote} are supported (see reference for more details). If \code{vote} is selected the returned value is a primitive estimate passed on given votes.} } \details{ As binary classification problems are much easier to solve many techniques exist to decompose multi-class classification problems into many binary classification problems (voting, error codes, etc.). Pairwise coupling (one against one) constructs a rule for discriminating between every pair of classes and then selecting the class with the most winning two-class decisions. By using Platt's probabilities output for SVM one can get a class probability for each of the \eqn{k(k-1)/2} models created in the pairwise classification. The couple method implements various techniques to combine these probabilities. } \value{ A matrix with the resulting probability estimates. } \references{ Ting-Fan Wu, Chih-Jen Lin, ruby C. Weng\cr \emph{Probability Estimates for Multi-class Classification by Pairwise Coupling}\cr Neural Information Processing Symposium 2003 \cr \url{https://papers.neurips.cc/paper/2454-probability-estimates-for-multi-class-classification-by-pairwise-coupling.pdf} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{ \code{\link{predict.ksvm}}, \code{\link{ksvm}}} \examples{ ## create artificial pairwise probabilities pairs <- matrix(c(0.82,0.12,0.76,0.1,0.9,0.05),2) couple(pairs) couple(pairs, coupler="pkpd") couple(pairs, coupler ="vote") } \keyword{classif} kernlab/man/kmmd.Rd0000644000175100001440000001224114366220452013704 0ustar hornikusers\name{kmmd} \alias{kmmd} \alias{kmmd,matrix-method} \alias{kmmd,list-method} \alias{kmmd,kernelMatrix-method} \alias{show,kmmd-method} \alias{H0} \alias{Asymbound} \alias{Radbound} \alias{mmdstats} \alias{AsympH0} \title{Kernel Maximum Mean Discrepancy.} \description{The Kernel Maximum Mean Discrepancy \code{kmmd} performs a non-parametric distribution test.} \usage{ \S4method{kmmd}{matrix}(x, y, kernel="rbfdot",kpar="automatic", alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 150, frac = 1, ...) \S4method{kmmd}{kernelMatrix}(x, y, Kxy, alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 100, frac = 1, ...) \S4method{kmmd}{list}(x, y, kernel="stringdot", kpar = list(type = "spectrum", length = 4), alpha = 0.05, asymptotic = FALSE, replace = TRUE, ntimes = 150, frac = 1, ...) } \arguments{ \item{x}{data values, in a \code{matrix}, \code{list}, or \code{kernelMatrix}} \item{y}{data values, in a \code{matrix}, \code{list}, or \code{kernelMatrix}} \item{Kxy}{\code{kernlMatrix} between \eqn{x} and \eqn{y} values (only for the kernelMatrix interface)} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. \code{kernlab} provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{lenght, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the \code{kpar} parameter as well. In the case of a Radial Basis kernel function (Gaussian) kpar can also be set to the string "automatic" which uses the heuristics in 'sigest' to calculate a good 'sigma' value for the Gaussian RBF or Laplace kernel, from the data. (default = "automatic"). } \item{alpha}{the confidence level of the test (default: 0.05)} \item{asymptotic}{calculate the bounds asymptotically (suitable for smaller datasets) (default: FALSE)} \item{replace}{use replace when sampling for computing the asymptotic bounds (default : TRUE)} \item{ntimes}{number of times repeating the sampling procedure (default : 150)} \item{frac}{fraction of points to sample (frac : 1) } \item{\dots}{additional parameters.} } \details{\code{kmmd} calculates the kernel maximum mean discrepancy for samples from two distributions and conducts a test as to whether the samples are from different distributions with level \code{alpha}. } \value{ An S4 object of class \code{kmmd} containing the results of whether the H0 hypothesis is rejected or not. H0 being that the samples \eqn{x} and \eqn{y} come from the same distribution. The object contains the following slots : \item{\code{H0}}{is H0 rejected (logical)} \item{\code{AsympH0}}{is H0 rejected according to the asymptotic bound (logical)} \item{\code{kernelf}}{the kernel function used.} \item{\code{mmdstats}}{the test statistics (vector of two)} \item{\code{Radbound}}{the Rademacher bound} \item{\code{Asymbound}}{the asymptotic bound} see \code{kmmd-class} for more details. } \references{Gretton, A., K. Borgwardt, M. Rasch, B. Schoelkopf and A. Smola\cr \emph{A Kernel Method for the Two-Sample-Problem}\cr Neural Information Processing Systems 2006, Vancouver \cr \url{https://papers.neurips.cc/paper/3110-a-kernel-method-for-the-two-sample-problem.pdf} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{ksvm}} \examples{ # create data x <- matrix(runif(300),100) y <- matrix(runif(300)+1,100) mmdo <- kmmd(x, y) mmdo } \keyword{htest} \keyword{nonlinear} \keyword{nonparametric} kernlab/man/gausspr-class.Rd0000644000175100001440000001041212055335061015535 0ustar hornikusers\name{gausspr-class} \docType{class} \alias{gausspr-class} \alias{alpha,gausspr-method} \alias{cross,gausspr-method} \alias{error,gausspr-method} \alias{kcall,gausspr-method} \alias{kernelf,gausspr-method} \alias{kpar,gausspr-method} \alias{lev,gausspr-method} \alias{type,gausspr-method} \alias{alphaindex,gausspr-method} \alias{xmatrix,gausspr-method} \alias{ymatrix,gausspr-method} \alias{scaling,gausspr-method} \title{Class "gausspr"} \description{The Gaussian Processes object class} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("gausspr", ...)}. or by calling the \code{gausspr} function } \section{Slots}{ \describe{ \item{\code{tol}:}{Object of class \code{"numeric"} contains tolerance of termination criteria} \item{\code{kernelf}:}{Object of class \code{"kfunction"} contains the kernel function used} \item{\code{kpar}:}{Object of class \code{"list"} contains the kernel parameter used } \item{\code{kcall}:}{Object of class \code{"list"} contains the used function call } \item{\code{type}:}{Object of class \code{"character"} contains type of problem } \item{\code{terms}:}{Object of class \code{"ANY"} contains the terms representation of the symbolic model used (when using a formula)} \item{\code{xmatrix}:}{Object of class \code{"input"} containing the data matrix used } \item{\code{ymatrix}:}{Object of class \code{"output"} containing the response matrix} \item{\code{fitted}:}{Object of class \code{"output"} containing the fitted values } \item{\code{lev}:}{Object of class \code{"vector"} containing the levels of the response (in case of classification) } \item{\code{nclass}:}{Object of class \code{"numeric"} containing the number of classes (in case of classification) } \item{\code{alpha}:}{Object of class \code{"listI"} containing the computes alpha values } \item{\code{alphaindex}}{Object of class \code{"list"} containing the indexes for the alphas in various classes (in multi-class problems).} \item{\code{sol}}{Object of class \code{"matrix"} containing the solution to the Gaussian Process formulation, it is used to compute the variance in regression problems.} \item{\code{scaling}}{Object of class \code{"ANY"} containing the scaling coefficients of the data (when case \code{scaled = TRUE} is used).} \item{\code{nvar}:}{Object of class \code{"numeric"} containing the computed variance} \item{\code{error}:}{Object of class \code{"numeric"} containing the training error} \item{\code{cross}:}{Object of class \code{"numeric"} containing the cross validation error} \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed in NA } } } \section{Methods}{ \describe{ \item{alpha}{\code{signature(object = "gausspr")}: returns the alpha vector} \item{cross}{\code{signature(object = "gausspr")}: returns the cross validation error } \item{error}{\code{signature(object = "gausspr")}: returns the training error } \item{fitted}{\code{signature(object = "vm")}: returns the fitted values } \item{kcall}{\code{signature(object = "gausspr")}: returns the call performed} \item{kernelf}{\code{signature(object = "gausspr")}: returns the kernel function used} \item{kpar}{\code{signature(object = "gausspr")}: returns the kernel parameter used} \item{lev}{\code{signature(object = "gausspr")}: returns the response levels (in classification) } \item{type}{\code{signature(object = "gausspr")}: returns the type of problem} \item{xmatrix}{\code{signature(object = "gausspr")}: returns the data matrix used} \item{ymatrix}{\code{signature(object = "gausspr")}: returns the response matrix used} \item{scaling}{\code{signature(object = "gausspr")}: returns the scaling coefficients of the data (when \code{scaled = TRUE} is used)} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{gausspr}}, \code{\link{ksvm-class}}, \code{\link{vm-class}} } \examples{ # train model data(iris) test <- gausspr(Species~.,data=iris,var=2) test alpha(test) error(test) lev(test) } \keyword{classes} kernlab/man/income.Rd0000644000175100001440000000370611304023134014217 0ustar hornikusers\name{income} \alias{income} \title{Income Data} \description{ Customer Income Data from a marketing survey. } \usage{data(income)} \format{ A data frame with 14 categorical variables (8993 observations). Explanation of the variable names: \tabular{rllll}{ \tab 1 \tab \code{INCOME} \tab annual income of household \tab \cr \tab \tab \tab (Personal income if single) \tab ordinal\cr \tab 2 \tab \code{SEX} \tab sex \tab nominal\cr \tab 3 \tab \code{MARITAL.STATUS} \tab marital status \tab nominal\cr \tab 4 \tab \code{AGE} \tab age \tab ordinal\cr \tab 5 \tab \code{EDUCATION} \tab educational grade \tab ordinal\cr \tab 6 \tab \code{OCCUPATION} \tab type of work \tab nominal \cr \tab 7 \tab \code{AREA} \tab how long the interviewed person has lived\tab \cr \tab \tab \tab in the San Francisco/Oakland/San Jose area \tab ordinal\cr \tab 8 \tab \code{DUAL.INCOMES} \tab dual incomes (if married) \tab nominal\cr \tab 9 \tab \code{HOUSEHOLD.SIZE} \tab persons living in the household \tab ordinal\cr \tab 10 \tab \code{UNDER18} \tab persons in household under 18 \tab ordinal\cr \tab 11 \tab \code{HOUSEHOLDER} \tab householder status \tab nominal\cr \tab 12 \tab \code{HOME.TYPE} \tab type of home \tab nominal\cr \tab 13 \tab \code{ETHNIC.CLASS} \tab ethnic classification \tab nominal\cr \tab 14 \tab \code{LANGUAGE} \tab language most often spoken at home \tab nominal\cr } } \details{ A total of N=9409 questionnaires containing 502 questions were filled out by shopping mall customers in the San Francisco Bay area. The dataset is an extract from this survey. It consists of 14 demographic attributes. The dataset is a mixture of nominal and ordinal variables with a lot of missing data. The goal is to predict the Anual Income of Household from the other 13 demographics attributes. } \source{ Impact Resources, Inc., Columbus, OH (1987). } \keyword{datasets} kernlab/man/gausspr.Rd0000644000175100001440000001661514221634017014445 0ustar hornikusers\name{gausspr} \alias{gausspr} \alias{gausspr,formula-method} \alias{gausspr,vector-method} \alias{gausspr,matrix-method} \alias{coef,gausspr-method} \alias{show,gausspr-method} %- Also NEED an '\alias' for EACH other topic documented here. \title{ Gaussian processes for regression and classification} \description{ \code{gausspr} is an implementation of Gaussian processes for classification and regression. } \usage{ \S4method{gausspr}{formula}(x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE) \S4method{gausspr}{vector}(x,...) \S4method{gausspr}{matrix}(x, y, scaled = TRUE, type= NULL, kernel="rbfdot", kpar="automatic", var=1, variance.model = FALSE, tol=0.0005, cross=0, fit=TRUE, ... , subset, na.action = na.omit) } %- maybe also 'usage' for other objects documented here. \arguments{ \item{x}{a symbolic description of the model to be fit or a matrix or vector when a formula interface is not used. When not using a formula x is a matrix or vector containing the variables in the model} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which `gausspr' is called from.} \item{y}{a response vector with one label for each row/component of \code{x}. Can be either a factor (for classification tasks) or a numeric vector (for regression).} \item{type}{Type of problem. Either "classification" or "regression". Depending on whether \code{y} is a factor or not, the default setting for \code{type} is \code{classification} or \code{regression}, respectively, but can be overwritten by setting an explicit value.\cr} \item{scaled}{A logical vector indicating the variables to be scaled. If \code{scaled} is of length 1, the value is recycled as many times as needed and all non-binary variables are scaled. Per default, data are scaled internally (both \code{x} and \code{y} variables) to zero mean and unit variance. The center and scale values are returned and used for later predictions.} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{var}{the initial noise variance, (only for regression) (default : 0.001)} \item{variance.model}{build model for variance or standard deviation estimation (only for regression) (default : FALSE)} \item{tol}{tolerance of termination criterion (default: 0.001)} \item{fit}{indicates whether the fitted values should be computed and included in the model or not (default: 'TRUE')} \item{cross}{if a integer value k>0 is specified, a k-fold cross validation on the training data is performed to assess the quality of the model: the Mean Squared Error for regression} \item{subset}{An index vector specifying the cases to be used in the training sample. (NOTE: If given, this argument must be named.)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{ additional parameters} } \details{ A Gaussian process is specified by a mean and a covariance function. The mean is a function of \eqn{x} (which is often the zero function), and the covariance is a function \eqn{C(x,x')} which expresses the expected covariance between the value of the function \eqn{y} at the points \eqn{x} and \eqn{x'}. The actual function \eqn{y(x)} in any data modeling problem is assumed to be a single sample from this Gaussian distribution. Laplace approximation is used for the parameter estimation in gaussian processes for classification.\cr The predict function can return class probabilities for classification problems by setting the \code{type} parameter to "probabilities". For the regression setting the \code{type} parameter to "variance" or "sdeviation" returns the estimated variance or standard deviation at each predicted point. } \value{ An S4 object of class "gausspr" containing the fitted model along with information. Accessor functions can be used to access the slots of the object which include : \item{alpha}{The resulting model parameters} \item{error}{Training error (if fit == TRUE)} } \references{ C. K. I. Williams and D. Barber \cr Bayesian classification with Gaussian processes. \cr IEEE Transactions on Pattern Analysis and Machine Intelligence, 20(12):1342-1351, 1998\cr \url{https://homepages.inf.ed.ac.uk/ckiw/postscript/pami_final.ps.gz} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{predict.gausspr}}, \code{\link{rvm}}, \code{\link{ksvm}}, \code{\link{gausspr-class}}, \code{\link{lssvm}} } \examples{ # train model data(iris) test <- gausspr(Species~.,data=iris,var=2) test alpha(test) # predict on the training set predict(test,iris[,-5]) # class probabilities predict(test, iris[,-5], type="probabilities") # create regression data x <- seq(-20,20,0.1) y <- sin(x)/x + rnorm(401,sd=0.03) # regression with gaussian processes foo <- gausspr(x, y) foo # predict and plot ytest <- predict(foo, x) plot(x, y, type ="l") lines(x, ytest, col="red") #predict and variance x = c(-4, -3, -2, -1, 0, 0.5, 1, 2) y = c(-2, 0, -0.5,1, 2, 1, 0, -1) plot(x,y) foo2 <- gausspr(x, y, variance.model = TRUE) xtest <- seq(-4,2,0.2) lines(xtest, predict(foo2, xtest)) lines(xtest, predict(foo2, xtest)+2*predict(foo2,xtest, type="sdeviation"), col="red") lines(xtest, predict(foo2, xtest)-2*predict(foo2,xtest, type="sdeviation"), col="red") } \keyword{classif} \keyword{regression} \keyword{nonlinear} \keyword{methods} kernlab/man/inchol-class.Rd0000644000175100001440000000315211304023134015317 0ustar hornikusers\name{inchol-class} \docType{class} \alias{inchol-class} \alias{diagresidues} \alias{maxresiduals} \alias{pivots} \alias{diagresidues,inchol-method} \alias{maxresiduals,inchol-method} \alias{pivots,inchol-method} \title{Class "inchol" } \description{The reduced Cholesky decomposition object} \section{Objects from the Class}{Objects can be created by calls of the form \code{new("inchol", ...)}. or by calling the \code{inchol} function.} \section{Slots}{ \describe{ \item{\code{.Data}:}{Object of class \code{"matrix"} contains the decomposed matrix} \item{\code{pivots}:}{Object of class \code{"vector"} contains the pivots performed} \item{\code{diagresidues}:}{Object of class \code{"vector"} contains the diagonial residues} \item{\code{maxresiduals}:}{Object of class \code{"vector"} contains the maximum residues} } } \section{Extends}{ Class \code{"matrix"}, directly. } \section{Methods}{ \describe{ \item{diagresidues}{\code{signature(object = "inchol")}: returns the diagonial residues} \item{maxresiduals}{\code{signature(object = "inchol")}: returns the maximum residues} \item{pivots}{\code{signature(object = "inchol")}: returns the pivots performed} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{inchol}}, \code{\link{csi-class}}, \code{\link{csi}}} \examples{ data(iris) datamatrix <- as.matrix(iris[,-5]) # initialize kernel function rbf <- rbfdot(sigma=0.1) rbf Z <- inchol(datamatrix,kernel=rbf) dim(Z) pivots(Z) diagresidues(Z) maxresiduals(Z) } \keyword{classes} kernlab/man/kernel-class.Rd0000644000175100001440000000422311304023134015323 0ustar hornikusers\name{kernel-class} \docType{class} \alias{rbfkernel-class} \alias{polykernel-class} \alias{vanillakernel-class} \alias{tanhkernel-class} \alias{anovakernel-class} \alias{besselkernel-class} \alias{laplacekernel-class} \alias{splinekernel-class} \alias{stringkernel-class} \alias{fourierkernel-class} \alias{kfunction-class} \alias{kernel-class} \alias{kpar,kernel-method} \title{Class "kernel" "rbfkernel" "polykernel", "tanhkernel", "vanillakernel"} \description{ The built-in kernel classes in \pkg{kernlab}} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("rbfkernel")}, \code{new{"polykernel"}}, \code{new{"tanhkernel"}}, \code{new{"vanillakernel"}}, \code{new{"anovakernel"}}, \code{new{"besselkernel"}}, \code{new{"laplacekernel"}}, \code{new{"splinekernel"}}, \code{new{"stringkernel"}} or by calling the \code{rbfdot}, \code{polydot}, \code{tanhdot}, \code{vanilladot}, \code{anovadot}, \code{besseldot}, \code{laplacedot}, \code{splinedot}, \code{stringdot} functions etc.. } \section{Slots}{ \describe{ \item{\code{.Data}:}{Object of class \code{"function"} containing the kernel function } \item{\code{kpar}:}{Object of class \code{"list"} containing the kernel parameters } } } \section{Extends}{ Class \code{"kernel"}, directly. Class \code{"function"}, by class \code{"kernel"}. } \section{Methods}{ \describe{ \item{kernelMatrix}{\code{signature(kernel = "rbfkernel", x = "matrix")}: computes the kernel matrix} \item{kernelMult}{\code{signature(kernel = "rbfkernel", x = "matrix")}: computes the quadratic kernel expression} \item{kernelPol}{\code{signature(kernel = "rbfkernel", x = "matrix")}: computes the kernel expansion} \item{kernelFast}{\code{signature(kernel = "rbfkernel", x = "matrix"),,a}: computes parts or the full kernel matrix, mainly used in kernel algorithms where columns of the kernel matrix are computed per invocation } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{ \code{\link{dots}} } \examples{ rbfkernel <- rbfdot(sigma = 0.1) rbfkernel is(rbfkernel) kpar(rbfkernel) } \keyword{classes} kernlab/man/onlearn-class.Rd0000644000175100001440000000672412117365114015523 0ustar hornikusers\name{onlearn-class} \docType{class} \alias{onlearn-class} \alias{alpha,onlearn-method} \alias{b,onlearn-method} \alias{buffer,onlearn-method} \alias{fit,onlearn-method} \alias{kernelf,onlearn-method} \alias{kpar,onlearn-method} \alias{predict,onlearn-method} \alias{rho,onlearn-method} \alias{rho} \alias{show,onlearn-method} \alias{type,onlearn-method} \alias{xmatrix,onlearn-method} \alias{buffer} \title{Class "onlearn"} \description{ The class of objects used by the Kernel-based Online learning algorithms} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("onlearn", ...)}. or by calls to the function \code{inlearn}. } \section{Slots}{ \describe{ \item{\code{kernelf}:}{Object of class \code{"function"} containing the used kernel function} \item{\code{buffer}:}{Object of class \code{"numeric"} containing the size of the buffer} \item{\code{kpar}:}{Object of class \code{"list"} containing the hyperparameters of the kernel function.} \item{\code{xmatrix}:}{Object of class \code{"matrix"} containing the data points (similar to support vectors) } \item{\code{fit}:}{Object of class \code{"numeric"} containing the decision function value of the last data point} \item{\code{onstart}:}{Object of class \code{"numeric"} used for indexing } \item{\code{onstop}:}{Object of class \code{"numeric"} used for indexing} \item{\code{alpha}:}{Object of class \code{"ANY"} containing the model parameters} \item{\code{rho}:}{Object of class \code{"numeric"} containing model parameter} \item{\code{b}:}{Object of class \code{"numeric"} containing the offset} \item{\code{pattern}:}{Object of class \code{"factor"} used for dealing with factors} \item{\code{type}:}{Object of class \code{"character"} containing the problem type (classification, regression, or novelty } } } \section{Methods}{ \describe{ \item{alpha}{\code{signature(object = "onlearn")}: returns the model parameters} \item{b}{\code{signature(object = "onlearn")}: returns the offset } \item{buffer}{\code{signature(object = "onlearn")}: returns the buffer size} \item{fit}{\code{signature(object = "onlearn")}: returns the last decision function value} \item{kernelf}{\code{signature(object = "onlearn")}: return the kernel function used} \item{kpar}{\code{signature(object = "onlearn")}: returns the hyper-parameters used} \item{onlearn}{\code{signature(obj = "onlearn")}: the learning function} \item{predict}{\code{signature(object = "onlearn")}: the predict function} \item{rho}{\code{signature(object = "onlearn")}: returns model parameter} \item{show}{\code{signature(object = "onlearn")}: show function} \item{type}{\code{signature(object = "onlearn")}: returns the type of problem} \item{xmatrix}{\code{signature(object = "onlearn")}: returns the stored data points} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{onlearn}}, \code{\link{inlearn}} } \examples{ ## create toy data set x <- rbind(matrix(rnorm(100),,2),matrix(rnorm(100)+3,,2)) y <- matrix(c(rep(1,50),rep(-1,50)),,1) ## initialize onlearn object on <- inlearn(2,kernel="rbfdot",kpar=list(sigma=0.2), type="classification") ## learn one data point at the time for(i in sample(1:100,100)) on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) sign(predict(on,x)) } \keyword{classes} kernlab/man/kha.Rd0000644000175100001440000001161514221633615013522 0ustar hornikusers\name{kha} \alias{kha} \alias{kha,formula-method} \alias{kha,matrix-method} \alias{predict,kha-method} \encoding{latin1} \title{Kernel Principal Components Analysis} \description{ Kernel Hebbian Algorithm is a nonlinear iterative algorithm for principal component analysis.} \usage{ \S4method{kha}{formula}(x, data = NULL, na.action, ...) \S4method{kha}{matrix}(x, kernel = "rbfdot", kpar = list(sigma = 0.1), features = 5, eta = 0.005, th = 1e-4, maxiter = 10000, verbose = FALSE, na.action = na.omit, ...) } \arguments{ \item{x}{ The data matrix indexed by row or a formula describing the model. Note, that an intercept is always included, whether given in the formula or not.} \item{data}{an optional data frame containing the variables in the model (when using a formula).} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes the inner product in feature space between two vector arguments (see \code{\link{kernels}}). \pkg{kernlab} provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{features}{Number of features (principal components) to return. (default: 5)} \item{eta}{The hebbian learning rate (default : 0.005)} \item{th}{the smallest value of the convergence step (default : 0.0001) } \item{maxiter}{the maximum number of iterations.} \item{verbose}{print convergence every 100 iterations. (default : FALSE)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{ additional parameters} } \details{The original form of KPCA can only be used on small data sets since it requires the estimation of the eigenvectors of a full kernel matrix. The Kernel Hebbian Algorithm iteratively estimates the Kernel Principal Components with only linear order memory complexity. (see ref. for more details) } \value{ An S4 object containing the principal component vectors along with the corresponding normalization values. \item{pcv}{a matrix containing the principal component vectors (column wise)} \item{eig}{The normalization values} \item{xmatrix}{The original data matrix} all the slots of the object can be accessed by accessor functions. } \note{The predict function can be used to embed new data on the new space} \references{Kwang In Kim, M.O. Franz and B. Schölkopf\cr \emph{Kernel Hebbian Algorithm for Iterative Kernel Principal Component Analysis}\cr Max-Planck-Institut für biologische Kybernetik, Tübingen (109)\cr \url{https://is.mpg.de/fileadmin/user_upload/files/publications/pdf2302.pdf} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{kpca}}, \code{\link{kfa}}, \code{\link{kcca}}, \code{pca}} \examples{ # another example using the iris data(iris) test <- sample(1:150,70) kpc <- kha(~.,data=iris[-test,-5],kernel="rbfdot", kpar=list(sigma=0.2),features=2, eta=0.001, maxiter=65) #print the principal component vectors pcv(kpc) #plot the data projection on the components plot(predict(kpc,iris[,-5]),col=as.integer(iris[,5]), xlab="1st Principal Component",ylab="2nd Principal Component") } \keyword{cluster} kernlab/man/prc-class.Rd0000644000175100001440000000353311304023134014632 0ustar hornikusers\name{prc-class} \docType{class} \alias{prc-class} \alias{eig} \alias{pcv} \alias{eig,prc-method} \alias{kcall,prc-method} \alias{kernelf,prc-method} \alias{pcv,prc-method} \alias{xmatrix,prc-method} \title{Class "prc"} \description{Principal Components Class} \section{Objects of class "prc"}{Objects from the class cannot be created directly but only contained in other classes.} \section{Slots}{ \describe{ \item{\code{pcv}:}{Object of class \code{"matrix"} containing the principal component vectors } \item{\code{eig}:}{Object of class \code{"vector"} containing the corresponding eigenvalues} \item{\code{kernelf}:}{Object of class \code{"kfunction"} containing the kernel function used} \item{\code{kpar}:}{Object of class \code{"list"} containing the kernel parameters used } \item{\code{xmatrix}:}{Object of class \code{"input"} containing the data matrix used } \item{\code{kcall}:}{Object of class \code{"ANY"} containing the function call } \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed on NA } } } \section{Methods}{ \describe{ \item{eig}{\code{signature(object = "prc")}: returns the eigenvalues } \item{kcall}{\code{signature(object = "prc")}: returns the performed call} \item{kernelf}{\code{signature(object = "prc")}: returns the used kernel function} \item{pcv}{\code{signature(object = "prc")}: returns the principal component vectors } \item{predict}{\code{signature(object = "prc")}: embeds new data } \item{xmatrix}{\code{signature(object = "prc")}: returns the used data matrix } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{kpca-class}},\code{\link{kha-class}}, \code{\link{kfa-class}} } \keyword{classes} kernlab/man/kqr.Rd0000644000175100001440000002055314221633732013555 0ustar hornikusers\name{kqr} \alias{kqr} \alias{kqr,formula-method} \alias{kqr,vector-method} \alias{kqr,matrix-method} \alias{kqr,list-method} \alias{kqr,kernelMatrix-method} \alias{coef,kqr-method} \alias{show,kqr-method} \title{Kernel Quantile Regression.} \description{The Kernel Quantile Regression algorithm \code{kqr} performs non-parametric Quantile Regression.} \usage{ \S4method{kqr}{formula}(x, data=NULL, ..., subset, na.action = na.omit, scaled = TRUE) \S4method{kqr}{vector}(x,...) \S4method{kqr}{matrix}(x, y, scaled = TRUE, tau = 0.5, C = 0.1, kernel = "rbfdot", kpar = "automatic", reduced = FALSE, rank = dim(x)[1]/6, fit = TRUE, cross = 0, na.action = na.omit) \S4method{kqr}{kernelMatrix}(x, y, tau = 0.5, C = 0.1, fit = TRUE, cross = 0) \S4method{kqr}{list}(x, y, tau = 0.5, C = 0.1, kernel = "strigdot", kpar= list(length=4, C=0.5), fit = TRUE, cross = 0) } \arguments{ \item{x}{e data or a symbolic description of the model to be fit. When not using a formula x can be a matrix or vector containing the training data or a kernel matrix of class \code{kernelMatrix} of the training data or a list of character vectors (for use with the string kernel). Note, that the intercept is always excluded, whether given in the formula or not.} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which \code{kqr} is called from.} \item{y}{a numeric vector or a column matrix containing the response.} \item{scaled}{A logical vector indicating the variables to be scaled. If \code{scaled} is of length 1, the value is recycled as many times as needed and all non-binary variables are scaled. Per default, data are scaled internally (both \code{x} and \code{y} variables) to zero mean and unit variance. The center and scale values are returned and used for later predictions. (default: TRUE)} \item{tau}{the quantile to be estimated, this is generally a number strictly between 0 and 1. For 0.5 the median is calculated. (default: 0.5)} \item{C}{the cost regularization parameter. This parameter controls the smoothness of the fitted function, essentially higher values for C lead to less smooth functions.(default: 1)} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. \code{kernlab} provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{lenght, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the \code{kpar} parameter as well. In the case of a Radial Basis kernel function (Gaussian) kpar can also be set to the string "automatic" which uses the heuristics in 'sigest' to calculate a good 'sigma' value for the Gaussian RBF or Laplace kernel, from the data. (default = "automatic"). } \item{reduced}{use an incomplete cholesky decomposition to calculate a decomposed form \eqn{Z} of the kernel Matrix \eqn{K} (where \eqn{K = ZZ'}) and perform the calculations with \eqn{Z}. This might be useful when using \code{kqr} with large datasets since normally an n times n kernel matrix would be computed. Setting \code{reduced} to \code{TRUE} makes use of \code{csi} to compute a decomposed form instead and thus only a \eqn{n \times m} matrix where \eqn{m < n} and \eqn{n} the sample size is stored in memory (default: FALSE)} \item{rank}{the rank m of the decomposed matrix calculated when using an incomplete cholesky decomposition. This parameter is only taken into account when \code{reduced} is \code{TRUE}(default : dim(x)[1]/6)} \item{fit}{indicates whether the fitted values should be computed and included in the model or not (default: 'TRUE')} \item{cross}{if a integer value k>0 is specified, a k-fold cross validation on the training data is performed to assess the quality of the model: the Pinball loss and the for quantile regression} \item{subset}{An index vector specifying the cases to be used in the training sample. (NOTE: If given, this argument must be named.)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{additional parameters.} } \details{In quantile regression a function is fitted to the data so that it satisfies the property that a portion \eqn{tau} of the data \eqn{y|n} is below the estimate. While the error bars of many regression problems can be viewed as such estimates quantile regression estimates this quantity directly. Kernel quantile regression is similar to nu-Support Vector Regression in that it minimizes a regularized loss function in RKHS. The difference between nu-SVR and kernel quantile regression is in the type of loss function used which in the case of quantile regression is the pinball loss (see reference for details.). Minimizing the regularized loss boils down to a quadratic problem which is solved using an interior point QP solver \code{ipop} implemented in \code{kernlab}. } \value{ An S4 object of class \code{kqr} containing the fitted model along with information.Accessor functions can be used to access the slots of the object which include : \item{alpha}{The resulting model parameters which can be also accessed by \code{coef}.} \item{kernelf}{the kernel function used.} \item{error}{Training error (if fit == TRUE)} see \code{kqr-class} for more details. } \references{Ichiro Takeuchi, Quoc V. Le, Timothy D. Sears, Alexander J. Smola\cr \emph{Nonparametric Quantile Estimation}\cr Journal of Machine Learning Research 7,2006,1231-1264 \cr \url{https://www.jmlr.org/papers/volume7/takeuchi06a/takeuchi06a.pdf} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{predict.kqr}}, \code{\link{kqr-class}}, \code{\link{ipop}}, \code{\link{rvm}}, \code{\link{ksvm}}} \examples{ # create data x <- sort(runif(300)) y <- sin(pi*x) + rnorm(300,0,sd=exp(sin(2*pi*x))) # first calculate the median qrm <- kqr(x, y, tau = 0.5, C=0.15) # predict and plot plot(x, y) ytest <- predict(qrm, x) lines(x, ytest, col="blue") # calculate 0.9 quantile qrm <- kqr(x, y, tau = 0.9, kernel = "rbfdot", kpar= list(sigma=10), C=0.15) ytest <- predict(qrm, x) lines(x, ytest, col="red") # calculate 0.1 quantile qrm <- kqr(x, y, tau = 0.1,C=0.15) ytest <- predict(qrm, x) lines(x, ytest, col="green") # print first 10 model coefficients coef(qrm)[1:10] } \keyword{regression} \keyword{nonlinear} \keyword{methods} kernlab/man/ranking-class.Rd0000644000175100001440000000261612117365252015515 0ustar hornikusers\name{ranking-class} \docType{class} \alias{ranking-class} \alias{edgegraph} \alias{convergence} \alias{convergence,ranking-method} \alias{edgegraph,ranking-method} \alias{show,ranking-method} \title{Class "ranking"} \description{Object of the class \code{"ranking"} are created from the \code{ranking} function and extend the class \code{matrix}} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("ranking", ...)}. } \section{Slots}{ \describe{ \item{\code{.Data}:}{Object of class \code{"matrix"} containing the data ranking and scores} \item{\code{convergence}:}{Object of class \code{"matrix"} containing the convergence matrix} \item{\code{edgegraph}:}{Object of class \code{"matrix"} containing the edgegraph} } } \section{Extends}{ Class \code{"matrix"}, directly. } \section{Methods}{ \describe{ \item{show}{\code{signature(object = "ranking")}: displays the ranking score matrix} } } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{ \code{\link{ranking}} } \examples{ data(spirals) ## create data set to be ranked ran<-spirals[rowSums(abs(spirals)<0.55)==2,] ## rank points according to "relevance" to point 54 (up left) ranked<-ranking(ran,54,kernel="rbfdot", kpar=list(sigma=100),edgegraph=TRUE) ranked edgegraph(ranked)[1:10,1:10] } \keyword{classes} kernlab/man/reuters.Rd0000644000175100001440000000111711304023134014430 0ustar hornikusers\name{reuters} \alias{reuters} \alias{rlabels} \title{Reuters Text Data} \description{A small sample from the Reuters news data set.} \usage{data(reuters)} \format{ A list of 40 text documents along with the labels. \code{reuters} contains the text documents and \code{rlabels} the labels in a vector. } \details{ This dataset contains a list of 40 text documents along with the labels. The data consist out of 20 documents from the \code{acq} category and 20 documents from the crude category. The labels are stored in \code{rlabels} } \source{Reuters} \keyword{datasets} kernlab/man/kmmd-class.Rd0000644000175100001440000000415311304023134014775 0ustar hornikusers\name{kmmd-class} \docType{class} \alias{kmmd-class} \alias{kernelf,kmmd-method} \alias{H0,kmmd-method} \alias{AsympH0,kmmd-method} \alias{Radbound,kmmd-method} \alias{Asymbound,kmmd-method} \alias{mmdstats,kmmd-method} \title{Class "kqr"} \description{The Kernel Maximum Mean Discrepancy object class} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("kmmd", ...)}. or by calling the \code{kmmd} function } \section{Slots}{ \describe{ \item{\code{kernelf}:}{Object of class \code{"kfunction"} contains the kernel function used} \item{\code{xmatrix}:}{Object of class \code{"kernelMatrix"} containing the data used } \item{H0}{Object of class \code{"logical"} contains value of : is H0 rejected (logical)} \item{\code{AsympH0}}{Object of class \code{"logical"} contains value : is H0 rejected according to the asymptotic bound (logical)} \item{\code{mmdstats}}{Object of class \code{"vector"} contains the test statistics (vector of two)} \item{\code{Radbound}}{Object of class \code{"numeric"} contains the Rademacher bound} \item{\code{Asymbound}}{Object of class \code{"numeric"} contains the asymptotic bound} } } \section{Methods}{ \describe{ \item{kernelf}{\code{signature(object = "kmmd")}: returns the kernel function used} \item{H0}{\code{signature(object = "kmmd")}: returns the value of H0 being rejected} \item{AsympH0}{\code{signature(object = "kmmd")}: returns the value of H0 being rejected according to the asymptotic bound} \item{mmdstats}{\code{signature(object = "kmmd")}: returns the values of the mmd statistics} \item{Radbound}{\code{signature(object = "kmmd")}: returns the value of the Rademacher bound} \item{Asymbound}{\code{signature(object = "kmmd")}: returns the value of the asymptotic bound} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{kmmd}}, } \examples{ # create data x <- matrix(runif(300),100) y <- matrix(runif(300)+1,100) mmdo <- kmmd(x, y) H0(mmdo) } \keyword{classes} kernlab/man/onlearn.Rd0000644000175100001440000000467214221634662014425 0ustar hornikusers\name{onlearn} \alias{onlearn} \alias{onlearn,onlearn-method} \title{Kernel Online Learning algorithms} \description{ Online Kernel-based Learning algorithms for classification, novelty detection, and regression. } \usage{ \S4method{onlearn}{onlearn}(obj, x, y = NULL, nu = 0.2, lambda = 1e-04) } \arguments{ \item{obj}{\code{obj} an object of class \code{onlearn} created by the initialization function \code{inlearn} containing the kernel to be used during learning and the parameters of the learned model} \item{x}{vector or matrix containing the data. Factors have to be numerically coded. If \code{x} is a matrix the code is run internally one sample at the time.} \item{y}{the class label in case of classification. Only binary classification is supported and class labels have to be -1 or +1. } \item{nu}{the parameter similarly to the \code{nu} parameter in SVM bounds the training error.} \item{lambda}{the learning rate} } \details{ The online algorithms are based on a simple stochastic gradient descent method in feature space. The state of the algorithm is stored in an object of class \code{onlearn} and has to be passed to the function at each iteration. } \value{ The function returns an \code{S4} object of class \code{onlearn} containing the model parameters and the last fitted value which can be retrieved by the accessor method \code{fit}. The value returned in the classification and novelty detection problem is the decision function value phi. The accessor methods \code{alpha} returns the model parameters. } \references{ Kivinen J. Smola A.J. Williamson R.C. \cr \emph{Online Learning with Kernels}\cr IEEE Transactions on Signal Processing vol. 52, Issue 8, 2004\cr \url{https://alex.smola.org/papers/2004/KivSmoWil04.pdf}} \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{inlearn}}} \examples{ ## create toy data set x <- rbind(matrix(rnorm(100),,2),matrix(rnorm(100)+3,,2)) y <- matrix(c(rep(1,50),rep(-1,50)),,1) ## initialize onlearn object on <- inlearn(2,kernel="rbfdot",kpar=list(sigma=0.2), type="classification") ind <- sample(1:100,100) ## learn one data point at the time for(i in ind) on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) ## or learn all the data on <- onlearn(on,x[ind,],y[ind],nu=0.03,lambda=0.1) sign(predict(on,x)) } \keyword{classif} \keyword{neural} \keyword{regression} \keyword{ts} kernlab/man/ranking.Rd0000644000175100001440000001253414366220457014417 0ustar hornikusers\name{ranking} \alias{ranking} \alias{ranking,matrix-method} \alias{ranking,list-method} \alias{ranking,kernelMatrix-method} \title{Ranking} \description{ A universal ranking algorithm which assigns importance/ranking to data points given a query. } \usage{ \S4method{ranking}{matrix}(x, y, kernel ="rbfdot", kpar = list(sigma = 1), scale = FALSE, alpha = 0.99, iterations = 600, edgegraph = FALSE, convergence = FALSE ,...) \S4method{ranking}{kernelMatrix}(x, y, alpha = 0.99, iterations = 600, convergence = FALSE,...) \S4method{ranking}{list}(x, y, kernel = "stringdot", kpar = list(length = 4, lambda = 0.5), alpha = 0.99, iterations = 600, convergence = FALSE, ...) } \arguments{ \item{x}{a matrix containing the data to be ranked, or the kernel matrix of data to be ranked or a list of character vectors} \item{y}{The index of the query point in the data matrix or a vector of length equal to the rows of the data matrix having a one at the index of the query points index and zero at all the other points.} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. For valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{scale}{If TRUE the data matrix columns are scaled to zero mean and unit variance.} \item{alpha}{ The \code{alpha} parameter takes values between 0 and 1 and is used to control the authoritative scores received from the unlabeled points. For 0 no global structure is found the algorithm ranks the points similarly to the original distance metric.} \item{iterations}{Maximum number of iterations} \item{edgegraph}{Construct edgegraph (only supported with the RBF kernel)} \item{convergence}{Include convergence matrix in results} \item{\dots}{Additional arguments} } \details{ A simple universal ranking algorithm which exploits the intrinsic global geometric structure of the data. In many real world applications this should be superior to a local method in which the data are simply ranked by pairwise Euclidean distances. Firstly a weighted network is defined on the data and an authoritative score is assigned to each query. The query points act as source nodes that continually pump their authoritative scores to the remaining points via the weighted network and the remaining points further spread the scores they received to their neighbors. This spreading process is repeated until convergence and the points are ranked according to their score at the end of the iterations. } \value{ An S4 object of class \code{ranking} which extends the \code{matrix} class. The first column of the returned matrix contains the original index of the points in the data matrix the second column contains the final score received by each point and the third column the ranking of the point. The object contains the following slots : \item{edgegraph}{Containing the edgegraph of the data points. } \item{convergence}{Containing the convergence matrix} } \references{ D. Zhou, J. Weston, A. Gretton, O. Bousquet, B. Schoelkopf \cr \emph{Ranking on Data Manifolds}\cr Advances in Neural Information Processing Systems 16.\cr MIT Press Cambridge Mass. 2004 \cr \url{https://papers.neurips.cc/paper/2447-ranking-on-data-manifolds.pdf} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{ranking-class}}, \code{\link{specc}} } \examples{ data(spirals) ## create data from spirals ran <- spirals[rowSums(abs(spirals) < 0.55) == 2,] ## rank points according to similarity to the most upper left point ranked <- ranking(ran, 54, kernel = "rbfdot", kpar = list(sigma = 100), edgegraph = TRUE) ranked[54, 2] <- max(ranked[-54, 2]) c<-1:86 op <- par(mfrow = c(1, 2),pty="s") plot(ran) plot(ran, cex=c[ranked[,3]]/40) } \keyword{cluster} \keyword{classif} kernlab/man/specc-class.Rd0000644000175100001440000000315311304023134015141 0ustar hornikusers\name{specc-class} \docType{class} \alias{specc-class} \alias{centers} \alias{size} \alias{withinss} \alias{centers,specc-method} \alias{withinss,specc-method} \alias{size,specc-method} \alias{kernelf,specc-method} \title{Class "specc"} \description{ The Spectral Clustering Class} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("specc", ...)}. or by calling the function \code{specc}. } \section{Slots}{ \describe{ \item{\code{.Data}:}{Object of class \code{"vector"} containing the cluster assignments} \item{\code{centers}:}{Object of class \code{"matrix"} containing the cluster centers} \item{\code{size}:}{Object of class \code{"vector"} containing the number of points in each cluster} \item{\code{withinss}:}{Object of class \code{"vector"} containing the within-cluster sum of squares for each cluster} \item{\code{kernelf}}{Object of class \code{kernel} containing the used kernel function.} } } \section{Methods}{ \describe{ \item{centers}{\code{signature(object = "specc")}: returns the cluster centers} \item{withinss}{\code{signature(object = "specc")}: returns the within-cluster sum of squares for each cluster} \item{size}{\code{signature(object = "specc")}: returns the number of points in each cluster } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{specc}}, \code{\link{kpca-class}} } \examples{ ## Cluster the spirals data set. data(spirals) sc <- specc(spirals, centers=2) centers(sc) size(sc) } \keyword{classes} kernlab/man/ksvm-class.Rd0000644000175100001440000001532112117364353015042 0ustar hornikusers\name{ksvm-class} \docType{class} \alias{ksvm-class} \alias{SVindex} \alias{alphaindex} \alias{prob.model} \alias{scaling} \alias{prior} \alias{show} \alias{param} \alias{b} \alias{obj} \alias{nSV} \alias{coef,vm-method} \alias{SVindex,ksvm-method} \alias{alpha,ksvm-method} \alias{alphaindex,ksvm-method} \alias{cross,ksvm-method} \alias{error,ksvm-method} \alias{param,ksvm-method} \alias{fitted,ksvm-method} \alias{prior,ksvm-method} \alias{prob.model,ksvm-method} \alias{kernelf,ksvm-method} \alias{kpar,ksvm-method} \alias{lev,ksvm-method} \alias{kcall,ksvm-method} \alias{scaling,ksvm-method} \alias{type,ksvm-method} \alias{xmatrix,ksvm-method} \alias{ymatrix,ksvm-method} \alias{b,ksvm-method} \alias{obj,ksvm-method} \alias{nSV,ksvm-method} \title{Class "ksvm" } \description{An S4 class containing the output (model) of the \code{ksvm} Support Vector Machines function } \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("ksvm", ...)} or by calls to the \code{ksvm} function. } \section{Slots}{ \describe{ \item{\code{type}:}{Object of class \code{"character"} containing the support vector machine type ("C-svc", "nu-svc", "C-bsvc", "spoc-svc", "one-svc", "eps-svr", "nu-svr", "eps-bsvr")} \item{\code{param}:}{Object of class \code{"list"} containing the Support Vector Machine parameters (C, nu, epsilon)} \item{\code{kernelf}:}{Object of class \code{"function"} containing the kernel function} \item{\code{kpar}:}{Object of class \code{"list"} containing the kernel function parameters (hyperparameters)} \item{\code{kcall}:}{Object of class \code{"ANY"} containing the \code{ksvm} function call} \item{\code{scaling}:}{Object of class \code{"ANY"} containing the scaling information performed on the data} \item{\code{terms}:}{Object of class \code{"ANY"} containing the terms representation of the symbolic model used (when using a formula)} \item{\code{xmatrix}:}{Object of class \code{"input"} (\code{"list"} for multiclass problems or \code{"matrix"} for binary classification and regression problems) containing the support vectors calculated from the data matrix used during computations (possibly scaled and without NA). In the case of multi-class classification each list entry contains the support vectors from each binary classification problem from the one-against-one method.} \item{\code{ymatrix}:}{Object of class \code{"output"} the response \code{"matrix"} or \code{"factor"} or \code{"vector"} or \code{"logical"}} \item{\code{fitted}:}{Object of class \code{"output"} with the fitted values, predictions using the training set.} \item{\code{lev}:}{Object of class \code{"vector"} with the levels of the response (in the case of classification)} \item{\code{prob.model}:}{Object of class \code{"list"} with the class prob. model} \item{\code{prior}:}{Object of class \code{"list"} with the prior of the training set} \item{\code{nclass}:}{Object of class \code{"numeric"} containing the number of classes (in the case of classification)} \item{\code{alpha}:}{Object of class \code{"listI"} containing the resulting alpha vector (\code{"list"} or \code{"matrix"} in case of multiclass classification) (support vectors)} \item{\code{coef}:}{Object of class \code{"ANY"} containing the resulting coefficients} \item{\code{alphaindex}:}{Object of class \code{"list"} containing} \item{\code{b}:}{Object of class \code{"numeric"} containing the resulting offset } \item{\code{SVindex}:}{Object of class \code{"vector"} containing the indexes of the support vectors} \item{\code{nSV}:}{Object of class \code{"numeric"} containing the number of support vectors } \item{\code{obj}:}{Object of class \code{vector} containing the value of the objective function. When using one-against-one in multiclass classification this is a vector.} \item{\code{error}:}{Object of class \code{"numeric"} containing the training error} \item{\code{cross}:}{Object of class \code{"numeric"} containing the cross-validation error } \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed for NA } } } \section{Methods}{ \describe{ \item{SVindex}{\code{signature(object = "ksvm")}: return the indexes of support vectors} \item{alpha}{\code{signature(object = "ksvm")}: returns the complete 5 alpha vector (wit zero values)} \item{alphaindex}{\code{signature(object = "ksvm")}: returns the indexes of non-zero alphas (support vectors)} \item{cross}{\code{signature(object = "ksvm")}: returns the cross-validation error } \item{error}{\code{signature(object = "ksvm")}: returns the training error } \item{obj}{\code{signature(object = "ksvm")}: returns the value of the objective function} \item{fitted}{\code{signature(object = "vm")}: returns the fitted values (predict on training set) } \item{kernelf}{\code{signature(object = "ksvm")}: returns the kernel function} \item{kpar}{\code{signature(object = "ksvm")}: returns the kernel parameters (hyperparameters)} \item{lev}{\code{signature(object = "ksvm")}: returns the levels in case of classification } \item{prob.model}{\code{signature(object="ksvm")}: returns class prob. model values} \item{param}{\code{signature(object="ksvm")}: returns the parameters of the SVM in a list (C, epsilon, nu etc.)} \item{prior}{\code{signature(object="ksvm")}: returns the prior of the training set} \item{kcall}{\code{signature(object="ksvm")}: returns the \code{ksvm} function call} \item{scaling}{\code{signature(object = "ksvm")}: returns the scaling values } \item{show}{\code{signature(object = "ksvm")}: prints the object information} \item{type}{\code{signature(object = "ksvm")}: returns the problem type} \item{xmatrix}{\code{signature(object = "ksvm")}: returns the data matrix used} \item{ymatrix}{\code{signature(object = "ksvm")}: returns the response vector} } } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzolgou@ci.tuwien.ac.at}} \seealso{ \code{\link{ksvm}}, \code{\link{rvm-class}}, \code{\link{gausspr-class}} } \examples{ ## simple example using the promotergene data set data(promotergene) ## train a support vector machine gene <- ksvm(Class~.,data=promotergene,kernel="rbfdot", kpar=list(sigma=0.015),C=50,cross=4) gene # the kernel function kernelf(gene) # the alpha values alpha(gene) # the coefficients coef(gene) # the fitted values fitted(gene) # the cross validation error cross(gene) } \keyword{classes} kernlab/man/vm-class.Rd0000644000175100001440000000732511304023134014473 0ustar hornikusers\name{vm-class} \docType{class} \alias{vm-class} \alias{cross} \alias{alpha} \alias{error} \alias{type} \alias{kernelf} \alias{xmatrix} \alias{ymatrix} \alias{lev} \alias{kcall} \alias{alpha,vm-method} \alias{cross,vm-method} \alias{error,vm-method} \alias{fitted,vm-method} \alias{kernelf,vm-method} \alias{kpar,vm-method} \alias{lev,vm-method} \alias{kcall,vm-method} \alias{type,vm-method} \alias{xmatrix,vm-method} \alias{ymatrix,vm-method} \title{Class "vm" } \description{An S4 VIRTUAL class used as a base for the various vector machine classes in \pkg{kernlab}} \section{Objects from the Class}{ Objects from the class cannot be created directly but only contained in other classes. } \section{Slots}{ \describe{ \item{\code{alpha}:}{Object of class \code{"listI"} containing the resulting alpha vector (list in case of multiclass classification) (support vectors)} \item{\code{type}:}{Object of class \code{"character"} containing the vector machine type e.g., ("C-svc", "nu-svc", "C-bsvc", "spoc-svc", "one-svc", "eps-svr", "nu-svr", "eps-bsvr")} \item{\code{kernelf}:}{Object of class \code{"function"} containing the kernel function} \item{\code{kpar}:}{Object of class \code{"list"} containing the kernel function parameters (hyperparameters)} \item{\code{kcall}:}{Object of class \code{"call"} containing the function call} \item{\code{terms}:}{Object of class \code{"ANY"} containing the terms representation of the symbolic model used (when using a formula)} \item{\code{xmatrix}:}{Object of class \code{"input"} the data matrix used during computations (support vectors) (possibly scaled and without NA)} \item{\code{ymatrix}:}{Object of class \code{"output"} the response matrix/vector } \item{\code{fitted}:}{Object of class \code{"output"} with the fitted values, predictions using the training set.} \item{\code{lev}:}{Object of class \code{"vector"} with the levels of the response (in the case of classification)} \item{\code{nclass}:}{Object of class \code{"numeric"} containing the number of classes (in the case of classification)} \item{\code{error}:}{Object of class \code{"vector"} containing the training error} \item{\code{cross}:}{Object of class \code{"vector"} containing the cross-validation error } \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed for NA } } } \section{Methods}{ \describe{ \item{alpha}{\code{signature(object = "vm")}: returns the complete alpha vector (wit zero values)} \item{cross}{\code{signature(object = "vm")}: returns the cross-validation error } \item{error}{\code{signature(object = "vm")}: returns the training error } \item{fitted}{\code{signature(object = "vm")}: returns the fitted values (predict on training set) } \item{kernelf}{\code{signature(object = "vm")}: returns the kernel function} \item{kpar}{\code{signature(object = "vm")}: returns the kernel parameters (hyperparameters)} \item{lev}{\code{signature(object = "vm")}: returns the levels in case of classification } \item{kcall}{\code{signature(object="vm")}: returns the function call} \item{type}{\code{signature(object = "vm")}: returns the problem type} \item{xmatrix}{\code{signature(object = "vm")}: returns the data matrix used(support vectors)} \item{ymatrix}{\code{signature(object = "vm")}: returns the response vector} } } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzolgou@ci.tuwien.ac.at}} \seealso{ \code{\link{ksvm-class}}, \code{\link{rvm-class}}, \code{\link{gausspr-class}} } \keyword{classes} kernlab/man/predict.ksvm.Rd0000644000175100001440000000513214366217302015366 0ustar hornikusers\name{predict.ksvm} \alias{predict.ksvm} \alias{predict,ksvm-method} \title{predict method for support vector object} \description{Prediction of test data using support vector machines} \usage{ \S4method{predict}{ksvm}(object, newdata, type = "response", coupler = "minpair") } \arguments{ \item{object}{an S4 object of class \code{ksvm} created by the \code{ksvm} function} \item{newdata}{a data frame or matrix containing new data} \item{type}{one of \code{response}, \code{probabilities} ,\code{votes}, \code{decision} indicating the type of output: predicted values, matrix of class probabilities, matrix of vote counts, or matrix of decision values.} \item{coupler}{Coupling method used in the multiclass case, can be one of \code{minpair} or \code{pkpd} (see reference for more details).} } \value{ If \code{type(object)} is \code{C-svc}, \code{nu-svc}, \code{C-bsvm} or \code{spoc-svc} the vector returned depends on the argument \code{type}: \item{response}{predicted classes (the classes with majority vote).} \item{probabilities}{matrix of class probabilities (one column for each class and one row for each input).} \item{votes}{matrix of vote counts (one column for each class and one row for each new input)} If \code{type(object)} is \code{eps-svr}, \code{eps-bsvr} or \code{nu-svr} a vector of predicted values is returned. If \code{type(object)} is \code{one-classification} a vector of logical values is returned. } \references{ \itemize{ \item T.F. Wu, C.J. Lin, R.C. Weng. \cr \emph{Probability estimates for Multi-class Classification by Pairwise Coupling}\cr \url{https://www.csie.ntu.edu.tw/~cjlin/papers/svmprob/svmprob.pdf} \item H.T. Lin, C.J. Lin, R.C. Weng (2007), A note on Platt's probabilistic outputs for support vector machines. \emph{Machine Learning}, \bold{68}, 267--276. \doi{10.1007/s10994-007-5018-6}. } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \keyword{methods} \keyword{regression} \keyword{classif} \examples{ ## example using the promotergene data set data(promotergene) ## create test and training set ind <- sample(1:dim(promotergene)[1],20) genetrain <- promotergene[-ind, ] genetest <- promotergene[ind, ] ## train a support vector machine gene <- ksvm(Class~.,data=genetrain,kernel="rbfdot", kpar=list(sigma=0.015),C=70,cross=4,prob.model=TRUE) gene ## predict gene type probabilities on the test set genetype <- predict(gene,genetest,type="probabilities") genetype } kernlab/man/kfa.Rd0000644000175100001440000001115012117362655013517 0ustar hornikusers\name{kfa} \alias{kfa} \alias{kfa,formula-method} \alias{kfa,matrix-method} \alias{show,kfa-method} \alias{coef,kfa-method} \title{Kernel Feature Analysis} \description{ The Kernel Feature Analysis algorithm is an algorithm for extracting structure from possibly high-dimensional data sets. Similar to \code{kpca} a new basis for the data is found. The data can then be projected on the new basis. } \usage{ \S4method{kfa}{formula}(x, data = NULL, na.action = na.omit, ...) \S4method{kfa}{matrix}(x, kernel = "rbfdot", kpar = list(sigma = 0.1), features = 0, subset = 59, normalize = TRUE, na.action = na.omit) } \arguments{ \item{x}{ The data matrix indexed by row or a formula describing the model. Note, that an intercept is always included, whether given in the formula or not.} \item{data}{an optional data frame containing the variables in the model (when using a formula).} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes an inner product in feature space between two vector arguments. \pkg{kernlab} provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{features}{Number of features (principal components) to return. (default: 0 , all)} \item{subset}{the number of features sampled (used) from the data set} \item{normalize}{normalize the feature selected (default: TRUE)} \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} \item{\dots}{ additional parameters} } \details{ Kernel Feature analysis is similar to Kernel PCA, but instead of extracting eigenvectors of the training dataset in feature space, it approximates the eigenvectors by selecting training patterns which are good basis vectors for the training set. It works by choosing a fixed size subset of the data set and scaling it to unit length (under the kernel). It then chooses the features that maximize the value of the inner product (kernel function) with the rest of the patterns. } \value{ \code{kfa} returns an object of class \code{kfa} containing the features selected by the algorithm. \item{xmatrix}{contains the features selected} \item{alpha}{contains the sparse alpha vector} The \code{predict} function can be used to embed new data points into to the selected feature base. } \references{Alex J. Smola, Olvi L. Mangasarian and Bernhard Schoelkopf\cr \emph{Sparse Kernel Feature Analysis}\cr Data Mining Institute Technical Report 99-04, October 1999\cr \url{ftp://ftp.cs.wisc.edu/pub/dmi/tech-reports/99-04.ps} } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{kpca}}, \code{\link{kfa-class}}} \examples{ data(promotergene) f <- kfa(~.,data=promotergene,features=2,kernel="rbfdot", kpar=list(sigma=0.01)) plot(predict(f,promotergene),col=as.numeric(promotergene[,1])) } \keyword{cluster} kernlab/man/predict.kqr.Rd0000644000175100001440000000214112117365174015203 0ustar hornikusers\name{predict.kqr} \alias{predict.kqr} \alias{predict,kqr-method} \title{Predict method for kernel Quantile Regression object} \description{Prediction of test data for kernel quantile regression} \usage{ \S4method{predict}{kqr}(object, newdata) } \arguments{ \item{object}{an S4 object of class \code{kqr} created by the \code{kqr} function} \item{newdata}{a data frame, matrix, or kernelMatrix containing new data} } \value{The value of the quantile given by the computed \code{kqr} model in a vector of length equal to the the rows of \code{newdata}. } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \keyword{methods} \keyword{regression} \examples{ # create data x <- sort(runif(300)) y <- sin(pi*x) + rnorm(300,0,sd=exp(sin(2*pi*x))) # first calculate the median qrm <- kqr(x, y, tau = 0.5, C=0.15) # predict and plot plot(x, y) ytest <- predict(qrm, x) lines(x, ytest, col="blue") # calculate 0.9 quantile qrm <- kqr(x, y, tau = 0.9, kernel = "rbfdot", kpar= list(sigma=10), C=0.15) ytest <- predict(qrm, x) lines(x, ytest, col="red") } kernlab/man/dots.Rd0000644000175100001440000001005711304023134013713 0ustar hornikusers\name{dots} \alias{dots} \alias{kernels} \alias{rbfdot} \alias{polydot} \alias{tanhdot} \alias{vanilladot} \alias{laplacedot} \alias{besseldot} \alias{anovadot} \alias{fourierdot} \alias{splinedot} \alias{kpar} \alias{kfunction} \alias{show,kernel-method} \title{Kernel Functions} \description{ The kernel generating functions provided in kernlab. \cr The Gaussian RBF kernel \eqn{k(x,x') = \exp(-\sigma \|x - x'\|^2)} \cr The Polynomial kernel \eqn{k(x,x') = (scale + offset)^{degree}}\cr The Linear kernel \eqn{k(x,x') = }\cr The Hyperbolic tangent kernel \eqn{k(x, x') = \tanh(scale + offset)}\cr The Laplacian kernel \eqn{k(x,x') = \exp(-\sigma \|x - x'\|)} \cr The Bessel kernel \eqn{k(x,x') = (- Bessel_{(\nu+1)}^n \sigma \|x - x'\|^2)} \cr The ANOVA RBF kernel \eqn{k(x,x') = \sum_{1\leq i_1 \ldots < i_D \leq N} \prod_{d=1}^D k(x_{id}, {x'}_{id})} where k(x,x) is a Gaussian RBF kernel. \cr The Spline kernel \eqn{ \prod_{d=1}^D 1 + x_i x_j + x_i x_j min(x_i, x_j) - \frac{x_i + x_j}{2} min(x_i,x_j)^2 + \frac{min(x_i,x_j)^3}{3}} \\ The String kernels (see \code{stringdot}. } \usage{ rbfdot(sigma = 1) polydot(degree = 1, scale = 1, offset = 1) tanhdot(scale = 1, offset = 1) vanilladot() laplacedot(sigma = 1) besseldot(sigma = 1, order = 1, degree = 1) anovadot(sigma = 1, degree = 1) splinedot() } \arguments{ \item{sigma}{The inverse kernel width used by the Gaussian the Laplacian, the Bessel and the ANOVA kernel } \item{degree}{The degree of the polynomial, bessel or ANOVA kernel function. This has to be an positive integer.} \item{scale}{The scaling parameter of the polynomial and tangent kernel is a convenient way of normalizing patterns without the need to modify the data itself} \item{offset}{The offset used in a polynomial or hyperbolic tangent kernel} \item{order}{The order of the Bessel function to be used as a kernel} } \details{ The kernel generating functions are used to initialize a kernel function which calculates the dot (inner) product between two feature vectors in a Hilbert Space. These functions can be passed as a \code{kernel} argument on almost all functions in \pkg{kernlab}(e.g., \code{ksvm}, \code{kpca} etc). Although using one of the existing kernel functions as a \code{kernel} argument in various functions in \pkg{kernlab} has the advantage that optimized code is used to calculate various kernel expressions, any other function implementing a dot product of class \code{kernel} can also be used as a kernel argument. This allows the user to use, test and develop special kernels for a given data set or algorithm. For details on the string kernels see \code{stringdot}. } \value{ Return an S4 object of class \code{kernel} which extents the \code{function} class. The resulting function implements the given kernel calculating the inner (dot) product between two vectors. \item{kpar}{a list containing the kernel parameters (hyperparameters) used.} The kernel parameters can be accessed by the \code{kpar} function. } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \note{If the offset in the Polynomial kernel is set to $0$, we obtain homogeneous polynomial kernels, for positive values, we have inhomogeneous kernels. Note that for negative values the kernel does not satisfy Mercer's condition and thus the optimizers may fail. \cr In the Hyperbolic tangent kernel if the offset is negative the likelihood of obtaining a kernel matrix that is not positive definite is much higher (since then even some diagonal elements may be negative), hence if this kernel has to be used, the offset should always be positive. Note, however, that this is no guarantee that the kernel will be positive. } \seealso{\code{stringdot}, \code{\link{kernelMatrix} }, \code{\link{kernelMult}}, \code{\link{kernelPol}}} \examples{ rbfkernel <- rbfdot(sigma = 0.1) rbfkernel kpar(rbfkernel) ## create two vectors x <- rnorm(10) y <- rnorm(10) ## calculate dot product rbfkernel(x,y) } \keyword{symbolmath} kernlab/man/predict.gausspr.Rd0000644000175100001440000000417014221634075016073 0ustar hornikusers\name{predict.gausspr} \alias{predict.gausspr} \alias{predict,gausspr-method} \title{predict method for Gaussian Processes object} \description{Prediction of test data using Gaussian Processes} \usage{ \S4method{predict}{gausspr}(object, newdata, type = "response", coupler = "minpair") } \arguments{ \item{object}{an S4 object of class \code{gausspr} created by the \code{gausspr} function} \item{newdata}{a data frame or matrix containing new data} \item{type}{one of \code{response}, \code{probabilities} indicating the type of output: predicted values or matrix of class probabilities} \item{coupler}{Coupling method used in the multiclass case, can be one of \code{minpair} or \code{pkpd} (see reference for more details).} } \value{ \item{response}{predicted classes (the classes with majority vote) or the response value in regression.} \item{probabilities}{matrix of class probabilities (one column for each class and one row for each input).} } \references{ \itemize{ \item C. K. I. Williams and D. Barber \cr Bayesian classification with Gaussian processes. \cr IEEE Transactions on Pattern Analysis and Machine Intelligence, 20(12):1342-1351, 1998\cr \url{https://homepages.inf.ed.ac.uk/ckiw/postscript/pami_final.ps.gz} \item T.F. Wu, C.J. Lin, R.C. Weng. \cr \emph{Probability estimates for Multi-class Classification by Pairwise Coupling}\cr \url{https://www.csie.ntu.edu.tw/~cjlin/papers/svmprob/svmprob.pdf} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \keyword{methods} \keyword{regression} \keyword{classif} \examples{ ## example using the promotergene data set data(promotergene) ## create test and training set ind <- sample(1:dim(promotergene)[1],20) genetrain <- promotergene[-ind, ] genetest <- promotergene[ind, ] ## train a support vector machine gene <- gausspr(Class~.,data=genetrain,kernel="rbfdot", kpar=list(sigma=0.015)) gene ## predict gene type probabilities on the test set genetype <- predict(gene,genetest,type="probabilities") genetype } kernlab/man/kkmeans.Rd0000644000175100001440000001345214250171226014406 0ustar hornikusers\name{kkmeans} \alias{kkmeans} \alias{kkmeans,matrix-method} \alias{kkmeans,formula-method} \alias{kkmeans,list-method} \alias{kkmeans,kernelMatrix-method} \title{Kernel k-means} \description{ A weighted kernel version of the famous k-means algorithm. } \usage{ \S4method{kkmeans}{formula}(x, data = NULL, na.action = na.omit, ...) \S4method{kkmeans}{matrix}(x, centers, kernel = "rbfdot", kpar = "automatic", alg="kkmeans", p=1, na.action = na.omit, ...) \S4method{kkmeans}{kernelMatrix}(x, centers, ...) \S4method{kkmeans}{list}(x, centers, kernel = "stringdot", kpar = list(length=4, lambda=0.5), alg ="kkmeans", p = 1, na.action = na.omit, ...) } \arguments{ \item{x}{the matrix of data to be clustered, or a symbolic description of the model to be fit, or a kernel Matrix of class \code{kernelMatrix}, or a list of character vectors.} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which `kkmeans' is called from.} \item{centers}{Either the number of clusters or a matrix of initial cluster centers. If the first a random initial partitioning is used.} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class kernel, which computes a inner product in feature space between two vector arguments (see \code{link{kernels}}). \pkg{kernlab} provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel "Gaussian" \item \code{polydot} Polynomial kernel \item \code{vanilladot} Linear kernel \item \code{tanhdot} Hyperbolic tangent kernel \item \code{laplacedot} Laplacian kernel \item \code{besseldot} Bessel kernel \item \code{anovadot} ANOVA RBF kernel \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } Setting the kernel parameter to "matrix" treats \code{x} as a kernel matrix calling the \code{kernelMatrix} interface.\cr The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{a character string or the list of hyper-parameters (kernel parameters). The default character string \code{"automatic"} uses a heuristic the determine a suitable value for the width parameter of the RBF kernel.\cr A list can also be used containing the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{length, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{alg}{the algorithm to use. Options currently include \code{kkmeans} and \code{kerninghan}. } \item{p}{a parameter used to keep the affinity matrix positive semidefinite} \item{na.action}{The action to perform on NA} \item{\dots}{additional parameters} } \details{ \code{kernel k-means} uses the 'kernel trick' (i.e. implicitly projecting all data into a non-linear feature space with the use of a kernel) in order to deal with one of the major drawbacks of \code{k-means} that is that it cannot capture clusters that are not linearly separable in input space. \cr The algorithm is implemented using the triangle inequality to avoid unnecessary and computational expensive distance calculations. This leads to significant speedup particularly on large data sets with a high number of clusters. \cr With a particular choice of weights this algorithm becomes equivalent to Kernighan-Lin, and the norm-cut graph partitioning algorithms. \cr The function also support input in the form of a kernel matrix or a list of characters for text clustering.\cr The data can be passed to the \code{kkmeans} function in a \code{matrix} or a \code{data.frame}, in addition \code{kkmeans} also supports input in the form of a kernel matrix of class \code{kernelMatrix} or as a list of character vectors where a string kernel has to be used. } \value{ An S4 object of class \code{specc} which extends the class \code{vector} containing integers indicating the cluster to which each point is allocated. The following slots contain useful information \item{centers}{A matrix of cluster centers.} \item{size}{The number of point in each cluster} \item{withinss}{The within-cluster sum of squares for each cluster} \item{kernelf}{The kernel function used} } \references{ Inderjit Dhillon, Yuqiang Guan, Brian Kulis\cr A Unified view of Kernel k-means, Spectral Clustering and Graph Partitioning\cr UTCS Technical Report\cr \url{https://people.bu.edu/bkulis/pubs/spectral_techreport.pdf} } \author{ Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{specc}}, \code{\link{kpca}}, \code{\link{kcca}} } \examples{ ## Cluster the iris data set. data(iris) sc <- kkmeans(as.matrix(iris[,-5]), centers=3) sc centers(sc) size(sc) withinss(sc) } \keyword{cluster} kernlab/man/plot.Rd0000644000175100001440000000216511304023134013721 0ustar hornikusers\name{plot} \alias{plot.ksvm} \alias{plot,ksvm,missing-method} \alias{plot,ksvm-method} \title{plot method for support vector object} \description{Plot a binary classification support vector machine object. The \code{plot} function returns a contour plot of the decision values. } \usage{ \S4method{plot}{ksvm}(object, data=NULL, grid = 50, slice = list()) } \arguments{ \item{object}{a \code{ksvm} classification object created by the \code{ksvm} function} \item{data}{a data frame or matrix containing data to be plotted} \item{grid}{granularity for the contour plot.} \item{slice}{a list of named numeric values for the dimensions held constant (only needed if more than two variables are used). Dimensions not specified are fixed at 0. } } \seealso{\code{\link{ksvm}}} \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \keyword{methods} \keyword{regression} \keyword{classif} \examples{ ## Demo of the plot function x <- rbind(matrix(rnorm(120),,2),matrix(rnorm(120,mean=3),,2)) y <- matrix(c(rep(1,60),rep(-1,60))) svp <- ksvm(x,y,type="C-svc") plot(svp,data=x) } kernlab/man/inchol.Rd0000644000175100001440000001025114221633672014231 0ustar hornikusers\name{inchol} \alias{inchol} \alias{inchol,matrix-method} %- Also NEED an '\alias' for EACH other topic documented here. \title{Incomplete Cholesky decomposition} \description{ \code{inchol} computes the incomplete Cholesky decomposition of the kernel matrix from a data matrix. } \usage{ inchol(x, kernel="rbfdot", kpar=list(sigma=0.1), tol = 0.001, maxiter = dim(x)[1], blocksize = 50, verbose = 0) } %- maybe also 'usage' for other objects documented here. \arguments{ \item{x}{The data matrix indexed by row} \item{kernel}{the kernel function used in training and predicting. This parameter can be set to any function, of class \code{kernel}, which computes the inner product in feature space between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{the list of hyper-parameters (kernel parameters). This is a list which contains the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well. } \item{tol}{algorithm stops when remaining pivots bring less accuracy then \code{tol} (default: 0.001)} \item{maxiter}{maximum number of iterations and columns in \eqn{Z}} \item{blocksize}{add this many columns to matrix per iteration} \item{verbose}{print info on algorithm convergence} } \details{An incomplete cholesky decomposition calculates \eqn{Z} where \eqn{K= ZZ'} \eqn{K} being the kernel matrix. Since the rank of a kernel matrix is usually low, \eqn{Z} tends to be smaller then the complete kernel matrix. The decomposed matrix can be used to create memory efficient kernel-based algorithms without the need to compute and store a complete kernel matrix in memory.} \value{ An S4 object of class "inchol" which is an extension of the class "matrix". The object is the decomposed kernel matrix along with the slots : \item{pivots}{Indices on which pivots where done} \item{diagresidues}{Residuals left on the diagonal} \item{maxresiduals}{Residuals picked for pivoting} slots can be accessed either by \code{object@slot} or by accessor functions with the same name (e.g., \code{pivots(object))}} \references{ Francis R. Bach, Michael I. Jordan\cr \emph{Kernel Independent Component Analysis}\cr Journal of Machine Learning Research 3, 1-48\cr \url{https://www.jmlr.org/papers/volume3/bach02a/bach02a.pdf} } \author{Alexandros Karatzoglou (based on Matlab code by S.V.N. (Vishy) Vishwanathan and Alex Smola)\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{csi}}, \code{\link{inchol-class}}, \code{\link{chol}}} \examples{ data(iris) datamatrix <- as.matrix(iris[,-5]) # initialize kernel function rbf <- rbfdot(sigma=0.1) rbf Z <- inchol(datamatrix,kernel=rbf) dim(Z) pivots(Z) # calculate kernel matrix K <- crossprod(t(Z)) # difference between approximated and real kernel matrix (K - kernelMatrix(kernel=rbf, datamatrix))[6,] } \keyword{methods} \keyword{algebra} \keyword{array} kernlab/man/kha-class.Rd0000644000175100001440000000450312117362716014626 0ustar hornikusers\name{kha-class} \docType{class} \alias{kha-class} \alias{eig,kha-method} \alias{kcall,kha-method} \alias{kernelf,kha-method} \alias{pcv,kha-method} \alias{xmatrix,kha-method} \alias{eskm,kha-method} \title{Class "kha"} \description{ The Kernel Hebbian Algorithm class} \section{Objects objects of class "kha"}{ Objects can be created by calls of the form \code{new("kha", ...)}. or by calling the \code{kha} function. } \section{Slots}{ \describe{ \item{\code{pcv}:}{Object of class \code{"matrix"} containing the principal component vectors } \item{\code{eig}:}{Object of class \code{"vector"} containing the corresponding normalization values} \item{\code{eskm}:}{Object of class \code{"vector"} containing the kernel sum} \item{\code{kernelf}:}{Object of class \code{"kfunction"} containing the kernel function used} \item{\code{kpar}:}{Object of class \code{"list"} containing the kernel parameters used } \item{\code{xmatrix}:}{Object of class \code{"matrix"} containing the data matrix used } \item{\code{kcall}:}{Object of class \code{"ANY"} containing the function call } \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed on NA } } } \section{Methods}{ \describe{ \item{eig}{\code{signature(object = "kha")}: returns the normalization values } \item{kcall}{\code{signature(object = "kha")}: returns the performed call} \item{kernelf}{\code{signature(object = "kha")}: returns the used kernel function} \item{pcv}{\code{signature(object = "kha")}: returns the principal component vectors } \item{eskm}{\code{signature(object = "kha")}: returns the kernel sum} \item{predict}{\code{signature(object = "kha")}: embeds new data } \item{xmatrix}{\code{signature(object = "kha")}: returns the used data matrix } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{kha}}, \code{\link{ksvm-class}}, \code{\link{kcca-class}} } \examples{ # another example using the iris data(iris) test <- sample(1:50,20) kpc <- kha(~.,data=iris[-test,-5], kernel="rbfdot", kpar=list(sigma=0.2),features=2, eta=0.001, maxiter=65) #print the principal component vectors pcv(kpc) kernelf(kpc) eig(kpc) } \keyword{classes} kernlab/man/kfa-class.Rd0000644000175100001440000000371511304023134014611 0ustar hornikusers\name{kfa-class} \docType{class} \alias{kfa-class} \alias{alpha,kfa-method} \alias{alphaindex,kfa-method} \alias{kcall,kfa-method} \alias{kernelf,kfa-method} \alias{predict,kfa-method} \alias{xmatrix,kfa-method} \title{Class "kfa"} \description{The class of the object returned by the Kernel Feature Analysis \code{kfa} function} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("kfa", ...)} or by calling the \code{kfa} method. The objects contain the features along with the alpha values. } \section{Slots}{ \describe{ \item{\code{alpha}:}{Object of class \code{"matrix"} containing the alpha values } \item{\code{alphaindex}:}{Object of class \code{"vector"} containing the indexes of the selected feature} \item{\code{kernelf}:}{Object of class \code{"kfunction"} containing the kernel function used} \item{\code{xmatrix}:}{Object of class \code{"matrix"} containing the selected features} \item{\code{kcall}:}{Object of class \code{"call"} containing the \code{kfa} function call} \item{\code{terms}:}{Object of class \code{"ANY"} containing the formula terms} } } \section{Methods}{ \describe{ \item{alpha}{\code{signature(object = "kfa")}: returns the alpha values } \item{alphaindex}{\code{signature(object = "kfa")}: returns the index of the selected features} \item{kcall}{\code{signature(object = "kfa")}: returns the function call } \item{kernelf}{\code{signature(object = "kfa")}: returns the kernel function used } \item{predict}{\code{signature(object = "kfa")}: used to embed more data points to the feature base} \item{xmatrix}{\code{signature(object = "kfa")}: returns the selected features. } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{kfa}}, \code{\link{kpca-class}} } \examples{ data(promotergene) f <- kfa(~.,data=promotergene) } \keyword{classes} kernlab/man/ipop-class.Rd0000644000175100001440000000320714656667250015045 0ustar hornikusers\name{ipop-class} \docType{class} \alias{ipop-class} \alias{primal,ipop-method} \alias{dual,ipop-method} \alias{how,ipop-method} \alias{primal} \alias{dual} \alias{how} \title{Class "ipop"} \description{The quadratic problem solver class} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("ipop", ...)}. or by calling the \code{ipop} function. } \section{Slots}{ \describe{ \item{\code{primal}:}{Object of class \code{"vector"} the primal solution of the problem} \item{\code{dual}:}{Object of class \code{"numeric"} the dual of the problem} \item{\code{how}:}{Object of class \code{"character"} convergence information} } } \section{Methods}{ \describe{ \item{primal}{\code{signature(object = "ipop")}: Return the primal of the problem} \item{dual}{\code{signature(object = "ipop")}: Return the dual of the problem} \item{how}{\code{signature(object = "ipop")}: Return information on convergence} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{ipop}} } \examples{ ## solve the Support Vector Machine optimization problem data(spam) ## sample a scaled part (300 points) of the spam data set m <- 300 set <- sample(1:dim(spam)[1],m) x <- scale(as.matrix(spam[,-58]))[set,] y <- as.integer(spam[set,58]) y[y==2] <- -1 ##set C parameter and kernel C <- 5 rbf <- rbfdot(sigma = 0.1) ## create H matrix etc. H <- kernelPol(rbf,x,,y) c <- matrix(rep(-1,m)) A <- t(y) b <- 0 l <- matrix(rep(0,m)) u <- matrix(rep(C,m)) r <- 0 sv <- ipop(c,H,A,b,l,u,r) primal(sv) dual(sv) how(sv) } \keyword{classes} kernlab/man/kernelMatrix.Rd0000644000175100001440000001254111304023134015407 0ustar hornikusers\name{kernelMatrix} \alias{kernelMatrix} \alias{kernelMult} \alias{kernelPol} \alias{kernelFast} \alias{kernelPol,kernel-method} \alias{kernelMatrix,kernel-method} \alias{kernelMult,kernel-method} \alias{kernelFast,kernel-method} \alias{kernelMatrix,rbfkernel-method} \alias{kernelMatrix,polykernel-method} \alias{kernelMatrix,vanillakernel-method} \alias{kernelMatrix,tanhkernel-method} \alias{kernelMatrix,laplacekernel-method} \alias{kernelMatrix,anovakernel-method} \alias{kernelMatrix,splinekernel-method} \alias{kernelMatrix,besselkernel-method} \alias{kernelMatrix,stringkernel-method} \alias{kernelMult,rbfkernel,ANY-method} \alias{kernelMult,splinekernel,ANY-method} \alias{kernelMult,polykernel,ANY-method} \alias{kernelMult,tanhkernel,ANY-method} \alias{kernelMult,laplacekernel,ANY-method} \alias{kernelMult,besselkernel,ANY-method} \alias{kernelMult,anovakernel,ANY-method} \alias{kernelMult,vanillakernel,ANY-method} \alias{kernelMult,character,kernelMatrix-method} \alias{kernelMult,stringkernel,ANY-method} \alias{kernelPol,rbfkernel-method} \alias{kernelPol,splinekernel-method} \alias{kernelPol,polykernel-method} \alias{kernelPol,tanhkernel-method} \alias{kernelPol,vanillakernel-method} \alias{kernelPol,anovakernel-method} \alias{kernelPol,besselkernel-method} \alias{kernelPol,laplacekernel-method} \alias{kernelPol,stringkernel-method} \alias{kernelFast,rbfkernel-method} \alias{kernelFast,splinekernel-method} \alias{kernelFast,polykernel-method} \alias{kernelFast,tanhkernel-method} \alias{kernelFast,vanillakernel-method} \alias{kernelFast,anovakernel-method} \alias{kernelFast,besselkernel-method} \alias{kernelFast,laplacekernel-method} \alias{kernelFast,stringkernel-method} \alias{kernelFast,splinekernel-method} \title{Kernel Matrix functions} \description{ \code{kernelMatrix} calculates the kernel matrix \eqn{K_{ij} = k(x_i,x_j)} or \eqn{K_{ij} = k(x_i,y_j)}.\cr \code{kernelPol} computes the quadratic kernel expression \eqn{H = z_i z_j k(x_i,x_j)}, \eqn{H = z_i k_j k(x_i,y_j)}.\cr \code{kernelMult} calculates the kernel expansion \eqn{f(x_i) = \sum_{i=1}^m z_i k(x_i,x_j)}\cr \code{kernelFast} computes the kernel matrix, identical to \code{kernelMatrix}, except that it also requires the squared norm of the first argument as additional input, useful in iterative kernel matrix calculations. } \usage{ \S4method{kernelMatrix}{kernel}(kernel, x, y = NULL) \S4method{kernelPol}{kernel}(kernel, x, y = NULL, z, k = NULL) \S4method{kernelMult}{kernel}(kernel, x, y = NULL, z, blocksize = 256) \S4method{kernelFast}{kernel}(kernel, x, y, a) } \arguments{ \item{kernel}{the kernel function to be used to calculate the kernel matrix. This has to be a function of class \code{kernel}, i.e. which can be generated either one of the build in kernel generating functions (e.g., \code{rbfdot} etc.) or a user defined function of class \code{kernel} taking two vector arguments and returning a scalar.} \item{x}{a data matrix to be used to calculate the kernel matrix, or a list of vector when a \code{stringkernel} is used} \item{y}{second data matrix to calculate the kernel matrix, or a list of vector when a \code{stringkernel} is used} \item{z}{a suitable vector or matrix} \item{k}{a suitable vector or matrix} \item{a}{the squared norm of \code{x}, e.g., \code{rowSums(x^2)}} \item{blocksize}{the kernel expansion computations are done block wise to avoid storing the kernel matrix into memory. \code{blocksize} defines the size of the computational blocks.} } \details{ Common functions used during kernel based computations.\cr The \code{kernel} parameter can be set to any function, of class kernel, which computes the inner product in feature space between two vector arguments. \pkg{kernlab} provides the most popular kernel functions which can be initialized by using the following functions: \itemize{ \item \code{rbfdot} Radial Basis kernel function \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} the Spline kernel } (see example.) \code{kernelFast} is mainly used in situations where columns of the kernel matrix are computed per invocation. In these cases, evaluating the norm of each row-entry over and over again would cause significant computational overhead. } \value{ \code{kernelMatrix} returns a symmetric diagonal semi-definite matrix.\cr \code{kernelPol} returns a matrix.\cr \code{kernelMult} usually returns a one-column matrix. } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{rbfdot}}, \code{\link{polydot}}, \code{\link{tanhdot}}, \code{\link{vanilladot}}} \examples{ ## use the spam data data(spam) dt <- as.matrix(spam[c(10:20,3000:3010),-58]) ## initialize kernel function rbf <- rbfdot(sigma = 0.05) rbf ## calculate kernel matrix kernelMatrix(rbf, dt) yt <- as.matrix(as.integer(spam[c(10:20,3000:3010),58])) yt[yt==2] <- -1 ## calculate the quadratic kernel expression kernelPol(rbf, dt, ,yt) ## calculate the kernel expansion kernelMult(rbf, dt, ,yt) } \keyword{algebra} \keyword{array} kernlab/man/ticdata.Rd0000644000175100001440000002013411304023134014350 0ustar hornikusers\name{ticdata} \alias{ticdata} \title{The Insurance Company Data} \description{ This data set used in the CoIL 2000 Challenge contains information on customers of an insurance company. The data consists of 86 variables and includes product usage data and socio-demographic data derived from zip area codes. The data was collected to answer the following question: Can you predict who would be interested in buying a caravan insurance policy and give an explanation why ? } \usage{data(ticdata)} \format{ ticdata: Dataset to train and validate prediction models and build a description (9822 customer records). Each record consists of 86 attributes, containing sociodemographic data (attribute 1-43) and product ownership (attributes 44-86). The sociodemographic data is derived from zip codes. All customers living in areas with the same zip code have the same sociodemographic attributes. Attribute 86, \code{CARAVAN:Number of mobile home policies}, is the target variable. Data Format \tabular{rlll}{ \tab 1 \tab \code{STYPE} \tab Customer Subtype\cr \tab 2 \tab \code{MAANTHUI} \tab Number of houses 1 - 10\cr \tab 3 \tab \code{MGEMOMV} \tab Avg size household 1 - 6\cr \tab 4 \tab \code{MGEMLEEF} \tab Average age\cr \tab 5 \tab \code{MOSHOOFD} \tab Customer main type\cr \tab 6 \tab \code{MGODRK} \tab Roman catholic \cr \tab 7 \tab \code{MGODPR} \tab Protestant ... \cr \tab 8 \tab \code{MGODOV} \tab Other religion \cr \tab 9 \tab \code{MGODGE} \tab No religion \cr \tab 10 \tab \code{MRELGE} \tab Married \cr \tab 11 \tab \code{MRELSA} \tab Living together \cr \tab 12 \tab \code{MRELOV} \tab Other relation \cr \tab 13 \tab \code{MFALLEEN} \tab Singles \cr \tab 14 \tab \code{MFGEKIND} \tab Household without children \cr \tab 15 \tab \code{MFWEKIND} \tab Household with children \cr \tab 16 \tab \code{MOPLHOOG} \tab High level education \cr \tab 17 \tab \code{MOPLMIDD} \tab Medium level education \cr \tab 18 \tab \code{MOPLLAAG} \tab Lower level education \cr \tab 19 \tab \code{MBERHOOG} \tab High status \cr \tab 20 \tab \code{MBERZELF} \tab Entrepreneur \cr \tab 21 \tab \code{MBERBOER} \tab Farmer \cr \tab 22 \tab \code{MBERMIDD} \tab Middle management \cr \tab 23 \tab \code{MBERARBG} \tab Skilled labourers \cr \tab 24 \tab \code{MBERARBO} \tab Unskilled labourers \cr \tab 25 \tab \code{MSKA} \tab Social class A \cr \tab 26 \tab \code{MSKB1} \tab Social class B1 \cr \tab 27 \tab \code{MSKB2} \tab Social class B2 \cr \tab 28 \tab \code{MSKC} \tab Social class C \cr \tab 29 \tab \code{MSKD} \tab Social class D \cr \tab 30 \tab \code{MHHUUR} \tab Rented house \cr \tab 31 \tab \code{MHKOOP} \tab Home owners \cr \tab 32 \tab \code{MAUT1} \tab 1 car \cr \tab 33 \tab \code{MAUT2} \tab 2 cars \cr \tab 34 \tab \code{MAUT0} \tab No car \cr \tab 35 \tab \code{MZFONDS} \tab National Health Service \cr \tab 36 \tab \code{MZPART} \tab Private health insurance \cr \tab 37 \tab \code{MINKM30} \tab Income >30.000 \cr \tab 38 \tab \code{MINK3045} \tab Income 30-45.000 \cr \tab 39 \tab \code{MINK4575} \tab Income 45-75.000 \cr \tab 40 \tab \code{MINK7512} \tab Income 75-122.000 \cr \tab 41 \tab \code{MINK123M} \tab Income <123.000 \cr \tab 42 \tab \code{MINKGEM} \tab Average income \cr \tab 43 \tab \code{MKOOPKLA} \tab Purchasing power class \cr \tab 44 \tab \code{PWAPART} \tab Contribution private third party insurance \cr \tab 45 \tab \code{PWABEDR} \tab Contribution third party insurance (firms) \cr \tab 46 \tab \code{PWALAND} \tab Contribution third party insurance (agriculture) \cr \tab 47 \tab \code{PPERSAUT} \tab Contribution car policies \cr \tab 48 \tab \code{PBESAUT} \tab Contribution delivery van policies \cr \tab 49 \tab \code{PMOTSCO} \tab Contribution motorcycle/scooter policies \cr \tab 50 \tab \code{PVRAAUT} \tab Contribution lorry policies \cr \tab 51 \tab \code{PAANHANG} \tab Contribution trailer policies \cr \tab 52 \tab \code{PTRACTOR} \tab Contribution tractor policies \cr \tab 53 \tab \code{PWERKT} \tab Contribution agricultural machines policies \cr \tab 54 \tab \code{PBROM} \tab Contribution moped policies \cr \tab 55 \tab \code{PLEVEN} \tab Contribution life insurances \cr \tab 56 \tab \code{PPERSONG} \tab Contribution private accident insurance policies \cr \tab 57 \tab \code{PGEZONG} \tab Contribution family accidents insurance policies \cr \tab 58 \tab \code{PWAOREG} \tab Contribution disability insurance policies \cr \tab 59 \tab \code{PBRAND} \tab Contribution fire policies \cr \tab 60 \tab \code{PZEILPL} \tab Contribution surfboard policies \cr \tab 61 \tab \code{PPLEZIER} \tab Contribution boat policies \cr \tab 62 \tab \code{PFIETS} \tab Contribution bicycle policies \cr \tab 63 \tab \code{PINBOED} \tab Contribution property insurance policies \cr \tab 64 \tab \code{PBYSTAND} \tab Contribution social security insurance policies \cr \tab 65 \tab \code{AWAPART} \tab Number of private third party insurance 1 - 12 \cr \tab 66 \tab \code{AWABEDR} \tab Number of third party insurance (firms) ... \cr \tab 67 \tab \code{AWALAND} \tab Number of third party insurance (agriculture) \cr \tab 68 \tab \code{APERSAUT} \tab Number of car policies \cr \tab 69 \tab \code{ABESAUT} \tab Number of delivery van policies \cr \tab 70 \tab \code{AMOTSCO} \tab Number of motorcycle/scooter policies \cr \tab 71 \tab \code{AVRAAUT} \tab Number of lorry policies \cr \tab 72 \tab \code{AAANHANG} \tab Number of trailer policies \cr \tab 73 \tab \code{ATRACTOR} \tab Number of tractor policies \cr \tab 74 \tab \code{AWERKT} \tab Number of agricultural machines policies \cr \tab 75 \tab \code{ABROM} \tab Number of moped policies \cr \tab 76 \tab \code{ALEVEN} \tab Number of life insurances \cr \tab 77 \tab \code{APERSONG} \tab Number of private accident insurance policies \cr \tab 78 \tab \code{AGEZONG} \tab Number of family accidents insurance policies \cr \tab 79 \tab \code{AWAOREG} \tab Number of disability insurance policies \cr \tab 80 \tab \code{ABRAND} \tab Number of fire policies \cr \tab 81 \tab \code{AZEILPL} \tab Number of surfboard policies \cr \tab 82 \tab \code{APLEZIER} \tab Number of boat policies \cr \tab 83 \tab \code{AFIETS} \tab Number of bicycle policies \cr \tab 84 \tab \code{AINBOED} \tab Number of property insurance policies \cr \tab 85 \tab \code{ABYSTAND} \tab Number of social security insurance policies \cr \tab 86 \tab \code{CARAVAN} \tab Number of mobile home policies 0 - 1 \cr } Note: All the variables starting with M are zipcode variables. They give information on the distribution of that variable, e.g., Rented house, in the zipcode area of the customer. } \details{ Information about the insurance company customers consists of 86 variables and includes product usage data and socio-demographic data derived from zip area codes. The data was supplied by the Dutch data mining company Sentient Machine Research and is based on a real world business problem. The training set contains over 5000 descriptions of customers, including the information of whether or not they have a caravan insurance policy. The test set contains 4000 customers. The test and data set are merged in the ticdata set. More information about the data set and the CoIL 2000 Challenge along with publications based on the data set can be found at \url{http://www.liacs.nl/~putten/library/cc2000/}. } \source{ \itemize{ \item UCI KDD Archive:\url{http://kdd.ics.uci.edu} \item Donor: Sentient Machine Research \cr Peter van der Putten \cr Sentient Machine Research \cr Baarsjesweg 224 \cr 1058 AA Amsterdam \cr The Netherlands \cr +31 20 6186927 \cr pvdputten@hotmail.com, putten@liacs.nl } } \references{Peter van der Putten, Michel de Ruiter, Maarten van Someren \emph{CoIL Challenge 2000 Tasks and Results: Predicting and Explaining Caravan Policy Ownership}\cr \url{http://www.liacs.nl/~putten/library/cc2000/}} \keyword{datasets} kernlab/man/stringdot.Rd0000644000175100001440000000631111304023134014755 0ustar hornikusers\name{stringdot} \alias{stringdot} \title{String Kernel Functions} \description{ String kernels. } \usage{ stringdot(length = 4, lambda = 1.1, type = "spectrum", normalized = TRUE) } \arguments{ \item{length}{The length of the substrings considered} \item{lambda}{The decay factor} \item{type}{Type of string kernel, currently the following kernels are supported : \cr \code{spectrum} the kernel considers only matching substring of exactly length \eqn{n} (also know as string kernel). Each such matching substring is given a constant weight. The length parameter in this kernel has to be \eqn{length > 1}.\cr \code{boundrange} this kernel (also known as boundrange) considers only matching substrings of length less than or equal to a given number N. This type of string kernel requires a length parameter \eqn{length > 1}\cr \code{constant} The kernel considers all matching substrings and assigns constant weight (e.g. 1) to each of them. This \code{constant} kernel does not require any additional parameter.\cr \code{exponential} Exponential Decay kernel where the substring weight decays as the matching substring gets longer. The kernel requires a decay factor \eqn{ \lambda > 1}\cr \code{string} essentially identical to the spectrum kernel, only computed using a more conventional way.\cr \code{fullstring} essentially identical to the boundrange kernel only computed in a more conventional way. \cr } \item{normalized}{normalize string kernel values, (default: \code{TRUE})} } \details{ The kernel generating functions are used to initialize a kernel function which calculates the dot (inner) product between two feature vectors in a Hilbert Space. These functions or their function generating names can be passed as a \code{kernel} argument on almost all functions in \pkg{kernlab}(e.g., \code{ksvm}, \code{kpca} etc.). The string kernels calculate similarities between two strings (e.g. texts or sequences) by matching the common substring in the strings. Different types of string kernel exists and are mainly distinguished by how the matching is performed i.e. some string kernels count the exact matchings of \eqn{n} characters (spectrum kernel) between the strings, others allow gaps (mismatch kernel) etc. } \value{ Returns an S4 object of class \code{stringkernel} which extents the \code{function} class. The resulting function implements the given kernel calculating the inner (dot) product between two character vectors. \item{kpar}{a list containing the kernel parameters (hyperparameters) used.} The kernel parameters can be accessed by the \code{kpar} function. } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \note{ The \code{spectrum} and \code{boundrange} kernel are faster and more efficient implementations of the \code{string} and \code{fullstring} kernels which will be still included in \code{kernlab} for the next two versions. } \seealso{ \code{\link{dots} }, \code{\link{kernelMatrix} }, \code{\link{kernelMult}}, \code{\link{kernelPol}}} \examples{ sk <- stringdot(type="string", length=5) sk } \keyword{symbolmath} kernlab/man/spam.Rd0000644000175100001440000000341414656670027013727 0ustar hornikusers\name{spam} \alias{spam} \title{Spam E-mail Database} \description{A data set collected at Hewlett-Packard Labs, that classifies 4601 e-mails as spam or non-spam. In addition to this class label there are 57 variables indicating the frequency of certain words and characters in the e-mail.} \usage{data(spam)} \format{A data frame with 4601 observations and 58 variables. The first 48 variables contain the frequency of the variable name (e.g., business) in the e-mail. If the variable name starts with num (e.g., num650) the it indicates the frequency of the corresponding number (e.g., 650). The variables 49-54 indicate the frequency of the characters `;', `(', `[', `!', `$', and `#'. The variables 55-57 contain the average, longest and total run-length of capital letters. Variable 58 indicates the type of the mail and is either \code{"nonspam"} or \code{"spam"}, i.e. unsolicited commercial e-mail.} \details{ The data set contains 2788 e-mails classified as \code{"nonspam"} and 1813 classified as \code{"spam"}. The ``spam'' concept is diverse: advertisements for products/web sites, make money fast schemes, chain letters, pornography... This collection of spam e-mails came from the collectors' postmaster and individuals who had filed spam. The collection of non-spam e-mails came from filed work and personal e-mails, and hence the word 'george' and the area code '650' are indicators of non-spam. These are useful when constructing a personalized spam filter. One would either have to blind such non-spam indicators or get a very wide collection of non-spam to generate a general purpose spam filter. } \source{ \doi{10.24432/C53G6X} } \references{ T. Hastie, R. Tibshirani, J.H. Friedman. \emph{The Elements of Statistical Learning}. Springer, 2001. } \keyword{datasets} kernlab/man/rvm-class.Rd0000644000175100001440000001100211304023134014640 0ustar hornikusers\name{rvm-class} \docType{class} \alias{rvm-class} \alias{RVindex} \alias{mlike} \alias{nvar} \alias{RVindex,rvm-method} \alias{alpha,rvm-method} \alias{cross,rvm-method} \alias{error,rvm-method} \alias{kcall,rvm-method} \alias{kernelf,rvm-method} \alias{kpar,rvm-method} \alias{lev,rvm-method} \alias{mlike,rvm-method} \alias{nvar,rvm-method} \alias{type,rvm-method} \alias{xmatrix,rvm-method} \alias{ymatrix,rvm-method} \title{Class "rvm"} \description{Relevance Vector Machine Class} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("rvm", ...)}. or by calling the \code{rvm} function. } \section{Slots}{ \describe{ \item{\code{tol}:}{Object of class \code{"numeric"} contains tolerance of termination criteria used.} \item{\code{kernelf}:}{Object of class \code{"kfunction"} contains the kernel function used } \item{\code{kpar}:}{Object of class \code{"list"} contains the hyperparameter used} \item{\code{kcall}:}{Object of class \code{"call"} contains the function call} \item{\code{type}:}{Object of class \code{"character"} contains type of problem} \item{\code{terms}:}{Object of class \code{"ANY"} containing the terms representation of the symbolic model used (when using a formula interface)} \item{\code{xmatrix}:}{Object of class \code{"matrix"} contains the data matrix used during computation} \item{\code{ymatrix}:}{Object of class \code{"output"} contains the response matrix} \item{\code{fitted}:}{Object of class \code{"output"} with the fitted values, (predict on training set).} \item{\code{lev}:}{Object of class \code{"vector"} contains the levels of the response (in classification)} \item{\code{nclass}:}{Object of class \code{"numeric"} contains the number of classes (in classification)} \item{\code{alpha}:}{Object of class \code{"listI"} containing the the resulting alpha vector} \item{\code{coef}:}{Object of class \code{"ANY"} containing the the resulting model parameters} \item{\code{nvar}:}{Object of class \code{"numeric"} containing the calculated variance (in case of regression)} \item{\code{mlike}:}{Object of class \code{"numeric"} containing the computed maximum likelihood} \item{\code{RVindex}:}{Object of class \code{"vector"} containing the indexes of the resulting relevance vectors } \item{\code{nRV}:}{Object of class \code{"numeric"} containing the number of relevance vectors} \item{\code{cross}:}{Object of class \code{"numeric"} containing the resulting cross validation error } \item{\code{error}:}{Object of class \code{"numeric"} containing the training error} \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed on NA} } } \section{Methods}{ \describe{ \item{RVindex}{\code{signature(object = "rvm")}: returns the index of the relevance vectors } \item{alpha}{\code{signature(object = "rvm")}: returns the resulting alpha vector} \item{cross}{\code{signature(object = "rvm")}: returns the resulting cross validation error} \item{error}{\code{signature(object = "rvm")}: returns the training error } \item{fitted}{\code{signature(object = "vm")}: returns the fitted values } \item{kcall}{\code{signature(object = "rvm")}: returns the function call } \item{kernelf}{\code{signature(object = "rvm")}: returns the used kernel function } \item{kpar}{\code{signature(object = "rvm")}: returns the parameters of the kernel function} \item{lev}{\code{signature(object = "rvm")}: returns the levels of the response (in classification)} \item{mlike}{\code{signature(object = "rvm")}: returns the estimated maximum likelihood} \item{nvar}{\code{signature(object = "rvm")}: returns the calculated variance (in regression)} \item{type}{\code{signature(object = "rvm")}: returns the type of problem} \item{xmatrix}{\code{signature(object = "rvm")}: returns the data matrix used during computation} \item{ymatrix}{\code{signature(object = "rvm")}: returns the used response } } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{rvm}}, \code{\link{ksvm-class}} } \examples{ # create data x <- seq(-20,20,0.1) y <- sin(x)/x + rnorm(401,sd=0.05) # train relevance vector machine foo <- rvm(x, y) foo alpha(foo) RVindex(foo) fitted(foo) kernelf(foo) nvar(foo) ## show slots slotNames(foo) } \keyword{classes} kernlab/man/sigest.Rd0000644000175100001440000000631712117366220014255 0ustar hornikusers\name{sigest} \alias{sigest} \alias{sigest,formula-method} \alias{sigest,matrix-method} \title{Hyperparameter estimation for the Gaussian Radial Basis kernel} \description{ Given a range of values for the "sigma" inverse width parameter in the Gaussian Radial Basis kernel for use with Support Vector Machines. The estimation is based on the data to be used. } \usage{ \S4method{sigest}{formula}(x, data=NULL, frac = 0.5, na.action = na.omit, scaled = TRUE) \S4method{sigest}{matrix}(x, frac = 0.5, scaled = TRUE, na.action = na.omit) } \arguments{ \item{x}{a symbolic description of the model upon the estimation is based. When not using a formula x is a matrix or vector containing the data} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which `ksvm' is called from.} \item{frac}{Fraction of data to use for estimation. By default a quarter of the data is used to estimate the range of the sigma hyperparameter.} \item{scaled}{A logical vector indicating the variables to be scaled. If \code{scaled} is of length 1, the value is recycled as many times as needed and all non-binary variables are scaled. Per default, data are scaled internally to zero mean and unit variance (since this the default action in \code{ksvm} as well). The center and scale values are returned and used for later predictions. } \item{na.action}{A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. (NOTE: If given, this argument must be named.)} } \details{ \code{sigest} estimates the range of values for the sigma parameter which would return good results when used with a Support Vector Machine (\code{ksvm}). The estimation is based upon the 0.1 and 0.9 quantile of \eqn{\|x -x'\|^2}. Basically any value in between those two bounds will produce good results. } \value{ Returns a vector of length 3 defining the range (0.1 quantile, median and 0.9 quantile) of the sigma hyperparameter. } \references{ B. Caputo, K. Sim, F. Furesjo, A. Smola, \cr \emph{Appearance-based object recognition using SVMs: which kernel should I use?}\cr Proc of NIPS workshop on Statitsical methods for computational experiments in visual processing and computer vision, Whistler, 2002. } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{\code{\link{ksvm}}} \examples{ ## estimate good sigma values for promotergene data(promotergene) srange <- sigest(Class~.,data = promotergene) srange s <- srange[2] s ## create test and training set ind <- sample(1:dim(promotergene)[1],20) genetrain <- promotergene[-ind, ] genetest <- promotergene[ind, ] ## train a support vector machine gene <- ksvm(Class~.,data=genetrain,kernel="rbfdot", kpar=list(sigma = s),C=50,cross=3) gene ## predict gene type on the test set promoter <- predict(gene,genetest[,-1]) ## Check results table(promoter,genetest[,1]) } \keyword{classif} \keyword{regression} kernlab/man/kqr-class.Rd0000644000175100001440000001051412117363316014654 0ustar hornikusers\name{kqr-class} \docType{class} \alias{kqr-class} \alias{alpha,kqr-method} \alias{cross,kqr-method} \alias{error,kqr-method} \alias{kcall,kqr-method} \alias{kernelf,kqr-method} \alias{kpar,kqr-method} \alias{param,kqr-method} \alias{alphaindex,kqr-method} \alias{b,kqr-method} \alias{xmatrix,kqr-method} \alias{ymatrix,kqr-method} \alias{scaling,kqr-method} \title{Class "kqr"} \description{The Kernel Quantile Regression object class} \section{Objects from the Class}{ Objects can be created by calls of the form \code{new("kqr", ...)}. or by calling the \code{kqr} function } \section{Slots}{ \describe{ \item{\code{kernelf}:}{Object of class \code{"kfunction"} contains the kernel function used} \item{\code{kpar}:}{Object of class \code{"list"} contains the kernel parameter used } \item{\code{coef}:}{Object of class \code{"ANY"} containing the model parameters} \item{\code{param}:}{Object of class \code{"list"} contains the cost parameter C and tau parameter used } \item{\code{kcall}:}{Object of class \code{"list"} contains the used function call } \item{\code{terms}:}{Object of class \code{"ANY"} contains the terms representation of the symbolic model used (when using a formula)} \item{\code{xmatrix}:}{Object of class \code{"input"} containing the data matrix used } \item{\code{ymatrix}:}{Object of class \code{"output"} containing the response matrix} \item{\code{fitted}:}{Object of class \code{"output"} containing the fitted values } \item{\code{alpha}:}{Object of class \code{"listI"} containing the computes alpha values } \item{\code{b}:}{Object of class \code{"numeric"} containing the offset of the model.} \item{\code{scaling}}{Object of class \code{"ANY"} containing the scaling coefficients of the data (when case \code{scaled = TRUE} is used).} \item{\code{error}:}{Object of class \code{"numeric"} containing the training error} \item{\code{cross}:}{Object of class \code{"numeric"} containing the cross validation error} \item{\code{n.action}:}{Object of class \code{"ANY"} containing the action performed in NA } \item{\code{nclass}:}{Inherited from class \code{vm}, not used in kqr} \item{\code{lev}:}{Inherited from class \code{vm}, not used in kqr} \item{\code{type}:}{Inherited from class \code{vm}, not used in kqr} } } \section{Methods}{ \describe{ \item{coef}{\code{signature(object = "kqr")}: returns the coefficients (alpha) of the model} \item{alpha}{\code{signature(object = "kqr")}: returns the alpha vector (identical to \code{coef})} \item{b}{\code{signature(object = "kqr")}: returns the offset beta of the model.} \item{cross}{\code{signature(object = "kqr")}: returns the cross validation error } \item{error}{\code{signature(object = "kqr")}: returns the training error } \item{fitted}{\code{signature(object = "vm")}: returns the fitted values } \item{kcall}{\code{signature(object = "kqr")}: returns the call performed} \item{kernelf}{\code{signature(object = "kqr")}: returns the kernel function used} \item{kpar}{\code{signature(object = "kqr")}: returns the kernel parameter used} \item{param}{\code{signature(object = "kqr")}: returns the cost regularization parameter C and tau used} \item{xmatrix}{\code{signature(object = "kqr")}: returns the data matrix used} \item{ymatrix}{\code{signature(object = "kqr")}: returns the response matrix used} \item{scaling}{\code{signature(object = "kqr")}: returns the scaling coefficients of the data (when \code{scaled = TRUE} is used)} } } \author{Alexandros Karatzoglou\cr \email{alexandros.karatzoglou@ci.tuwien.ac.at}} \seealso{ \code{\link{kqr}}, \code{\link{vm-class}}, \code{\link{ksvm-class}} } \examples{ # create data x <- sort(runif(300)) y <- sin(pi*x) + rnorm(300,0,sd=exp(sin(2*pi*x))) # first calculate the median qrm <- kqr(x, y, tau = 0.5, C=0.15) # predict and plot plot(x, y) ytest <- predict(qrm, x) lines(x, ytest, col="blue") # calculate 0.9 quantile qrm <- kqr(x, y, tau = 0.9, kernel = "rbfdot", kpar = list(sigma = 10), C = 0.15) ytest <- predict(qrm, x) lines(x, ytest, col="red") # print model coefficients and other information coef(qrm) b(qrm) error(qrm) kernelf(qrm) } \keyword{classes} kernlab/man/specc.Rd0000644000175100001440000001421314366220464014055 0ustar hornikusers\name{specc} \alias{specc} \alias{specc,matrix-method} \alias{specc,formula-method} \alias{specc,list-method} \alias{specc,kernelMatrix-method} \alias{show,specc-method} \title{Spectral Clustering} \description{ A spectral clustering algorithm. Clustering is performed by embedding the data into the subspace of the eigenvectors of an affinity matrix. } \usage{ \S4method{specc}{formula}(x, data = NULL, na.action = na.omit, ...) \S4method{specc}{matrix}(x, centers, kernel = "rbfdot", kpar = "automatic", nystrom.red = FALSE, nystrom.sample = dim(x)[1]/6, iterations = 200, mod.sample = 0.75, na.action = na.omit, ...) \S4method{specc}{kernelMatrix}(x, centers, nystrom.red = FALSE, iterations = 200, ...) \S4method{specc}{list}(x, centers, kernel = "stringdot", kpar = list(length=4, lambda=0.5), nystrom.red = FALSE, nystrom.sample = length(x)/6, iterations = 200, mod.sample = 0.75, na.action = na.omit, ...) } \arguments{ \item{x}{the matrix of data to be clustered, or a symbolic description of the model to be fit, or a kernel Matrix of class \code{kernelMatrix}, or a list of character vectors.} \item{data}{an optional data frame containing the variables in the model. By default the variables are taken from the environment which `specc' is called from.} \item{centers}{Either the number of clusters or a set of initial cluster centers. If the first, a random set of rows in the eigenvectors matrix are chosen as the initial centers.} \item{kernel}{the kernel function used in computing the affinity matrix. This parameter can be set to any function, of class kernel, which computes a dot product between two vector arguments. kernlab provides the most popular kernel functions which can be used by setting the kernel parameter to the following strings: \itemize{ \item \code{rbfdot} Radial Basis kernel function "Gaussian" \item \code{polydot} Polynomial kernel function \item \code{vanilladot} Linear kernel function \item \code{tanhdot} Hyperbolic tangent kernel function \item \code{laplacedot} Laplacian kernel function \item \code{besseldot} Bessel kernel function \item \code{anovadot} ANOVA RBF kernel function \item \code{splinedot} Spline kernel \item \code{stringdot} String kernel } The kernel parameter can also be set to a user defined function of class kernel by passing the function name as an argument. } \item{kpar}{a character string or the list of hyper-parameters (kernel parameters). The default character string \code{"automatic"} uses a heuristic to determine a suitable value for the width parameter of the RBF kernel. The second option \code{"local"} (local scaling) uses a more advanced heuristic and sets a width parameter for every point in the data set. This is particularly useful when the data incorporates multiple scales. A list can also be used containing the parameters to be used with the kernel function. Valid parameters for existing kernels are : \itemize{ \item \code{sigma} inverse kernel width for the Radial Basis kernel function "rbfdot" and the Laplacian kernel "laplacedot". \item \code{degree, scale, offset} for the Polynomial kernel "polydot" \item \code{scale, offset} for the Hyperbolic tangent kernel function "tanhdot" \item \code{sigma, order, degree} for the Bessel kernel "besseldot". \item \code{sigma, degree} for the ANOVA kernel "anovadot". \item \code{length, lambda, normalized} for the "stringdot" kernel where length is the length of the strings considered, lambda the decay factor and normalized a logical parameter determining if the kernel evaluations should be normalized. } Hyper-parameters for user defined kernels can be passed through the kpar parameter as well.} \item{nystrom.red}{use nystrom method to calculate eigenvectors. When \code{TRUE} a sample of the dataset is used to calculate the eigenvalues, thus only a \eqn{n x m} matrix where \eqn{n} the sample size is stored in memory (default: \code{FALSE}} \item{nystrom.sample}{number of data points to use for estimating the eigenvalues when using the nystrom method. (default : dim(x)[1]/6)} \item{mod.sample}{proportion of data to use when estimating sigma (default: 0.75)} \item{iterations}{the maximum number of iterations allowed. } \item{na.action}{the action to perform on NA} \item{\dots}{additional parameters} } \details{ Spectral clustering works by embedding the data points of the partitioning problem into the subspace of the \eqn{k} largest eigenvectors of a normalized affinity/kernel matrix. Using a simple clustering method like \code{kmeans} on the embedded points usually leads to good performance. It can be shown that spectral clustering methods boil down to graph partitioning.\cr The data can be passed to the \code{specc} function in a \code{matrix} or a \code{data.frame}, in addition \code{specc} also supports input in the form of a kernel matrix of class \code{kernelMatrix} or as a list of character vectors where a string kernel has to be used.} \value{ An S4 object of class \code{specc} which extends the class \code{vector} containing integers indicating the cluster to which each point is allocated. The following slots contain useful information \item{centers}{A matrix of cluster centers.} \item{size}{The number of point in each cluster} \item{withinss}{The within-cluster sum of squares for each cluster} \item{kernelf}{The kernel function used} } \references{ Andrew Y. Ng, Michael I. Jordan, Yair Weiss\cr \emph{On Spectral Clustering: Analysis and an Algorithm}\cr Neural Information Processing Symposium 2001\cr \url{https://papers.neurips.cc/paper/2092-on-spectral-clustering-analysis-and-an-algorithm.pdf} } \author{Alexandros Karatzoglou \cr \email{alexandros.karatzoglou@ci.tuwien.ac.at} } \seealso{\code{\link{kkmeans}}, \code{\link{kpca}}, \code{\link{kcca}} } \examples{ ## Cluster the spirals data set. data(spirals) sc <- specc(spirals, centers=2) sc centers(sc) size(sc) withinss(sc) plot(spirals, col=sc) } \keyword{cluster} kernlab/DESCRIPTION0000644000175100001440000000272614656675315013445 0ustar hornikusersPackage: kernlab Version: 0.9-33 Title: Kernel-Based Machine Learning Lab Authors@R: c(person("Alexandros", "Karatzoglou", role = c("aut", "cre"), email = "alexandros.karatzoglou@gmail.com"), person("Alex", "Smola", role = "aut"), person("Kurt", "Hornik", role = "aut", email = "Kurt.Hornik@R-project.org", comment = c(ORCID = "0000-0003-4198-9911")), person("National ICT Australia (NICTA)", role = "cph"), person(c("Michael", "A."), "Maniscalco", role = c("ctb", "cph")), person(c("Choon", "Hui"), "Teo", role = "ctb")) Description: Kernel-based machine learning methods for classification, regression, clustering, novelty detection, quantile regression and dimensionality reduction. Among other methods 'kernlab' includes Support Vector Machines, Spectral Clustering, Kernel PCA, Gaussian Processes and a QP solver. Depends: R (>= 2.10) Imports: methods, stats, grDevices, graphics LazyLoad: Yes License: GPL-2 NeedsCompilation: yes Packaged: 2024-08-13 14:40:27 UTC; hornik Author: Alexandros Karatzoglou [aut, cre], Alex Smola [aut], Kurt Hornik [aut] (), National ICT Australia (NICTA) [cph], Michael A. Maniscalco [ctb, cph], Choon Hui Teo [ctb] Maintainer: Alexandros Karatzoglou Repository: CRAN Date/Publication: 2024-08-13 15:25:01 UTC