amap/0000755000176200001440000000000014705445647011210 5ustar liggesusersamap/tests/0000755000176200001440000000000014705246411012336 5ustar liggesusersamap/tests/amap-test.R0000644000176200001440000000340214705246411014353 0ustar liggesusers library(amap) set.seed(1234) data(USArrests) METHODS <- c("euclidean", "maximum", "manhattan", "canberra", "binary","pearson","correlation","spearman","kendall", "abspearson","abscorrelation") METHODSLINKS <- c("ward", "single", "complete", "average", "mcquitty", "median", "centroid","centroid2","ward.D2") for (mymethod in METHODS) { d = Dist(USArrests, method = mymethod) k = Kmeans(USArrests, centers = 4, method = mymethod) print(k) for (mylink in METHODSLINKS) { cat(mylink) cat(mymethod) hc <- hcluster(USArrests,link = mylink, method = mymethod, nbproc=4) print(hc) } } COMMONDIST <- c("euclidean", "maximum", "manhattan", "canberra", "binary") COMMONLINKS <- c( "single", "complete", "average", "mcquitty", "median", "centroid","ward.D2") for (mymethod in COMMONDIST) { d = dist(USArrests,method = mymethod) d2 = Dist(USArrests,method = mymethod) cat("test",mymethod) stopifnot(floor(d * 1000) == floor(d2*1000)) } d = dist(USArrests) for(mylink in COMMONLINKS){ cat("test",mylink) h = hclust(d, method = mylink) hc = hcluster(USArrests,link = mylink) stopifnot(h$order == hc$order) stopifnot(floor(h$height * 1000) == floor(hc$height*1000)) } hc <- hcluster(USArrests, nbproc=1) print(hc) KERNELS = c("gaussien", "quartic", "triweight", "epanechikov" , "cosinus", "uniform") for(myKernel in KERNELS) { myacp = acprob(USArrests, kernel = myKernel); print(myacp) } d <-2 * matrix(c(9, 8, 5, 7, 7, 2 , 8, 9, 2, 5, 1, 7 , 5, 2, 9, 8, 7, 1 , 7, 5, 8, 9, 3, 2 , 7, 1, 7, 3, 9, 6 , 2, 7, 1, 2, 6, 9),ncol=6,byrow=TRUE) - 9 pop(d) amap/tests/amap-test.Rout.save0000644000176200001440000012472314705252641016054 0ustar liggesusers R Under development (unstable) (2024-10-18 r87246) -- "Unsuffered Consequences" Copyright (C) 2024 The R Foundation for Statistical Computing Platform: x86_64-pc-linux-gnu R is free software and comes with ABSOLUTELY NO WARRANTY. You are welcome to redistribute it under certain conditions. Type 'license()' or 'licence()' for distribution details. R is a collaborative project with many contributors. Type 'contributors()' for more information and 'citation()' on how to cite R or R packages in publications. Type 'demo()' for some demos, 'help()' for on-line help, or 'help.start()' for an HTML browser interface to help. Type 'q()' to quit R. > > library(amap) > > set.seed(1234) > > data(USArrests) > > METHODS <- c("euclidean", "maximum", "manhattan", "canberra", + "binary","pearson","correlation","spearman","kendall", + "abspearson","abscorrelation") > METHODSLINKS <- c("ward", "single", "complete", "average", "mcquitty", + "median", "centroid","centroid2","ward.D2") > > > > for (mymethod in METHODS) { + d = Dist(USArrests, method = mymethod) + + k = Kmeans(USArrests, centers = 4, method = mymethod) + print(k) + for (mylink in METHODSLINKS) + { + cat(mylink) + cat(mymethod) + hc <- hcluster(USArrests,link = mylink, method = mymethod, nbproc=4) + print(hc) + } + } K-means clustering with 4 clusters of sizes 10, 20, 6, 14 Cluster means: Murder Assault UrbanPop Rape 1 11.540000 253.1000 70.30000 29.26000 2 4.270000 87.5500 59.75000 14.39000 3 12.266667 305.0000 65.00000 26.90000 4 8.214286 173.2857 70.64286 22.84286 Clustering vector: Alabama Alaska Arizona Arkansas California 1 1 3 4 1 Colorado Connecticut Delaware Florida Georgia 4 2 1 3 4 Hawaii Idaho Illinois Indiana Iowa 2 2 1 2 2 Kansas Kentucky Louisiana Maine Maryland 2 2 1 2 3 Massachusetts Michigan Minnesota Mississippi Missouri 4 1 2 1 4 Montana Nebraska Nevada New Hampshire New Jersey 2 2 1 2 4 New Mexico New York North Carolina North Dakota Ohio 3 1 3 2 2 Oklahoma Oregon Pennsylvania Rhode Island South Carolina 4 4 2 4 3 South Dakota Tennessee Texas Utah Vermont 2 4 4 2 2 Virginia Washington West Virginia Wisconsin Wyoming 4 4 2 2 4 Within cluster sum of squares by cluster: [1] 257.4792 1248.4420 988.9111 318.6684 Available components: [1] "cluster" "centers" "withinss" "size" wardeuclidean Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward Distance : euclidean Number of objects: 50 singleeuclidean Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : single Distance : euclidean Number of objects: 50 completeeuclidean Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : complete Distance : euclidean Number of objects: 50 averageeuclidean Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : average Distance : euclidean Number of objects: 50 mcquittyeuclidean Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : mcquitty Distance : euclidean Number of objects: 50 medianeuclidean Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : median Distance : euclidean Number of objects: 50 centroideuclidean Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid Distance : euclidean Number of objects: 50 centroid2euclidean Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid2 Distance : euclidean Number of objects: 50 ward.D2euclidean Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward.D2 Distance : euclidean Number of objects: 50 K-means clustering with 4 clusters of sizes 20, 14, 4, 12 Cluster means: Murder Assault UrbanPop Rape 1 4.270000 87.5500 59.75000 14.39000 2 8.214286 173.2857 70.64286 22.84286 3 11.950000 316.5000 68.00000 26.70000 4 11.766667 257.9167 68.41667 28.93333 Clustering vector: Alabama Alaska Arizona Arkansas California 4 4 3 2 4 Colorado Connecticut Delaware Florida Georgia 2 1 4 3 2 Hawaii Idaho Illinois Indiana Iowa 1 1 4 1 1 Kansas Kentucky Louisiana Maine Maryland 1 1 4 1 3 Massachusetts Michigan Minnesota Mississippi Missouri 2 4 1 4 2 Montana Nebraska Nevada New Hampshire New Jersey 1 1 4 1 2 New Mexico New York North Carolina North Dakota Ohio 4 4 3 1 1 Oklahoma Oregon Pennsylvania Rhode Island South Carolina 2 2 1 2 4 South Dakota Tennessee Texas Utah Vermont 1 2 2 1 1 Virginia Washington West Virginia Wisconsin Wyoming 2 2 1 1 2 Within cluster sum of squares by cluster: [1] 1193.7025 150.9388 529.0000 444.5069 Available components: [1] "cluster" "centers" "withinss" "size" wardmaximum Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward Distance : maximum Number of objects: 50 singlemaximum Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : single Distance : maximum Number of objects: 50 completemaximum Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : complete Distance : maximum Number of objects: 50 averagemaximum Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : average Distance : maximum Number of objects: 50 mcquittymaximum Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : mcquitty Distance : maximum Number of objects: 50 medianmaximum Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : median Distance : maximum Number of objects: 50 centroidmaximum Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid Distance : maximum Number of objects: 50 centroid2maximum Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid2 Distance : maximum Number of objects: 50 ward.D2maximum Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward.D2 Distance : maximum Number of objects: 50 K-means clustering with 4 clusters of sizes 10, 14, 16, 10 Cluster means: Murder Assault UrbanPop Rape 1 5.590000 112.4000 65.60000 17.27000 2 8.214286 173.2857 70.64286 22.84286 3 11.812500 272.5625 68.31250 28.37500 4 2.950000 62.7000 53.90000 11.51000 Clustering vector: Alabama Alaska Arizona Arkansas California 3 3 3 2 3 Colorado Connecticut Delaware Florida Georgia 2 1 3 3 2 Hawaii Idaho Illinois Indiana Iowa 4 1 3 1 4 Kansas Kentucky Louisiana Maine Maryland 1 1 3 4 3 Massachusetts Michigan Minnesota Mississippi Missouri 2 3 4 3 2 Montana Nebraska Nevada New Hampshire New Jersey 1 1 3 4 2 New Mexico New York North Carolina North Dakota Ohio 3 3 3 4 1 Oklahoma Oregon Pennsylvania Rhode Island South Carolina 2 2 1 2 3 South Dakota Tennessee Texas Utah Vermont 4 2 2 1 4 Virginia Washington West Virginia Wisconsin Wyoming 2 2 4 4 2 Within cluster sum of squares by cluster: [1] 901.2004 997.6573 1239.9202 522.5796 Available components: [1] "cluster" "centers" "withinss" "size" wardmanhattan Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward Distance : manhattan Number of objects: 50 singlemanhattan Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : single Distance : manhattan Number of objects: 50 completemanhattan Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : complete Distance : manhattan Number of objects: 50 averagemanhattan Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : average Distance : manhattan Number of objects: 50 mcquittymanhattan Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : mcquitty Distance : manhattan Number of objects: 50 medianmanhattan Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : median Distance : manhattan Number of objects: 50 centroidmanhattan Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid Distance : manhattan Number of objects: 50 centroid2manhattan Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid2 Distance : manhattan Number of objects: 50 ward.D2manhattan Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward.D2 Distance : manhattan Number of objects: 50 K-means clustering with 4 clusters of sizes 12, 9, 18, 11 Cluster means: Murder Assault UrbanPop Rape 1 2.791667 82.08333 56.16667 10.70833 2 14.077778 251.11111 58.55556 22.78889 3 6.255556 136.00000 68.61111 19.43333 4 10.600000 258.63636 76.45455 34.38182 Clustering vector: Alabama Alaska Arizona Arkansas California 2 4 4 3 4 Colorado Connecticut Delaware Florida Georgia 4 1 3 4 2 Hawaii Idaho Illinois Indiana Iowa 3 1 4 3 1 Kansas Kentucky Louisiana Maine Maryland 3 3 2 1 2 Massachusetts Michigan Minnesota Mississippi Missouri 3 4 1 2 4 Montana Nebraska Nevada New Hampshire New Jersey 3 3 4 1 3 New Mexico New York North Carolina North Dakota Ohio 4 4 2 1 3 Oklahoma Oregon Pennsylvania Rhode Island South Carolina 3 3 3 1 2 South Dakota Tennessee Texas Utah Vermont 1 2 2 3 1 Virginia Washington West Virginia Wisconsin Wyoming 3 3 1 1 3 Within cluster sum of squares by cluster: [1] 0.11262798 0.13928224 0.09135594 0.05188101 Available components: [1] "cluster" "centers" "withinss" "size" wardcanberra Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward Distance : canberra Number of objects: 50 singlecanberra Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : single Distance : canberra Number of objects: 50 completecanberra Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : complete Distance : canberra Number of objects: 50 averagecanberra Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : average Distance : canberra Number of objects: 50 mcquittycanberra Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : mcquitty Distance : canberra Number of objects: 50 mediancanberra Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : median Distance : canberra Number of objects: 50 centroidcanberra Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid Distance : canberra Number of objects: 50 centroid2canberra Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid2 Distance : canberra Number of objects: 50 ward.D2canberra Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward.D2 Distance : canberra Number of objects: 50 K-means clustering with 4 clusters of sizes 50, 0, 0, 0 Cluster means: Murder Assault UrbanPop Rape 1 7.788 170.76 65.54 21.232 2 NaN NaN NaN NaN 3 NaN NaN NaN NaN 4 NaN NaN NaN NaN Clustering vector: Alabama Alaska Arizona Arkansas California 1 1 1 1 1 Colorado Connecticut Delaware Florida Georgia 1 1 1 1 1 Hawaii Idaho Illinois Indiana Iowa 1 1 1 1 1 Kansas Kentucky Louisiana Maine Maryland 1 1 1 1 1 Massachusetts Michigan Minnesota Mississippi Missouri 1 1 1 1 1 Montana Nebraska Nevada New Hampshire New Jersey 1 1 1 1 1 New Mexico New York North Carolina North Dakota Ohio 1 1 1 1 1 Oklahoma Oregon Pennsylvania Rhode Island South Carolina 1 1 1 1 1 South Dakota Tennessee Texas Utah Vermont 1 1 1 1 1 Virginia Washington West Virginia Wisconsin Wyoming 1 1 1 1 1 Within cluster sum of squares by cluster: [1] 0 0 0 0 Available components: [1] "cluster" "centers" "withinss" "size" wardbinary Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward Distance : binary Number of objects: 50 singlebinary Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : single Distance : binary Number of objects: 50 completebinary Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : complete Distance : binary Number of objects: 50 averagebinary Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : average Distance : binary Number of objects: 50 mcquittybinary Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : mcquitty Distance : binary Number of objects: 50 medianbinary Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : median Distance : binary Number of objects: 50 centroidbinary Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid Distance : binary Number of objects: 50 centroid2binary Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid2 Distance : binary Number of objects: 50 ward.D2binary Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward.D2 Distance : binary Number of objects: 50 K-means clustering with 4 clusters of sizes 18, 17, 3, 12 Cluster means: Murder Assault UrbanPop Rape 1 7.461111 170.27778 68.33333 23.23333 2 4.082353 91.41176 66.23529 14.93529 3 14.500000 291.66667 45.66667 18.56667 4 11.850000 253.66667 65.33333 27.81667 Clustering vector: Alabama Alaska Arizona Arkansas California 4 4 4 4 1 Colorado Connecticut Delaware Florida Georgia 1 2 4 4 4 Hawaii Idaho Illinois Indiana Iowa 2 1 1 2 2 Kansas Kentucky Louisiana Maine Maryland 2 1 4 2 4 Massachusetts Michigan Minnesota Mississippi Missouri 2 4 2 3 1 Montana Nebraska Nevada New Hampshire New Jersey 1 2 1 2 2 New Mexico New York North Carolina North Dakota Ohio 4 1 3 2 2 Oklahoma Oregon Pennsylvania Rhode Island South Carolina 1 1 2 1 3 South Dakota Tennessee Texas Utah Vermont 1 4 1 2 2 Virginia Washington West Virginia Wisconsin Wyoming 1 1 1 2 1 Within cluster sum of squares by cluster: [1] 8.690482e-07 1.216409e-03 6.242583e-08 4.003559e-06 Available components: [1] "cluster" "centers" "withinss" "size" wardpearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward Distance : pearson Number of objects: 50 singlepearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : single Distance : pearson Number of objects: 50 completepearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : complete Distance : pearson Number of objects: 50 averagepearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : average Distance : pearson Number of objects: 50 mcquittypearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : mcquitty Distance : pearson Number of objects: 50 medianpearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : median Distance : pearson Number of objects: 50 centroidpearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid Distance : pearson Number of objects: 50 centroid2pearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid2 Distance : pearson Number of objects: 50 ward.D2pearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward.D2 Distance : pearson Number of objects: 50 K-means clustering with 4 clusters of sizes 14, 19, 11, 6 Cluster means: Murder Assault UrbanPop Rape 1 9.414286 218.92857 74.57143 28.82143 2 5.026316 115.78947 64.47368 15.96842 3 13.309091 267.63636 57.81818 25.51818 4 2.616667 54.83333 62.00000 12.33333 Clustering vector: Alabama Alaska Arizona Arkansas California 3 3 1 3 1 Colorado Connecticut Delaware Florida Georgia 1 2 1 3 3 Hawaii Idaho Illinois Indiana Iowa 4 2 1 2 4 Kansas Kentucky Louisiana Maine Maryland 2 2 3 2 3 Massachusetts Michigan Minnesota Mississippi Missouri 2 1 4 3 1 Montana Nebraska Nevada New Hampshire New Jersey 2 2 1 4 2 New Mexico New York North Carolina North Dakota Ohio 3 1 3 4 2 Oklahoma Oregon Pennsylvania Rhode Island South Carolina 2 1 2 2 3 South Dakota Tennessee Texas Utah Vermont 2 1 1 2 2 Virginia Washington West Virginia Wisconsin Wyoming 1 2 2 4 1 Within cluster sum of squares by cluster: [1] 3.390500e-06 2.347051e-05 9.996126e-07 8.814207e-06 Available components: [1] "cluster" "centers" "withinss" "size" wardcorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward Distance : correlation Number of objects: 50 singlecorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : single Distance : correlation Number of objects: 50 completecorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : complete Distance : correlation Number of objects: 50 averagecorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : average Distance : correlation Number of objects: 50 mcquittycorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : mcquitty Distance : correlation Number of objects: 50 mediancorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : median Distance : correlation Number of objects: 50 centroidcorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid Distance : correlation Number of objects: 50 centroid2correlation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid2 Distance : correlation Number of objects: 50 ward.D2correlation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward.D2 Distance : correlation Number of objects: 50 K-means clustering with 4 clusters of sizes 50, 0, 0, 0 Cluster means: Murder Assault UrbanPop Rape 1 7.788 170.76 65.54 21.232 2 NaN NaN NaN NaN 3 NaN NaN NaN NaN 4 NaN NaN NaN NaN Clustering vector: Alabama Alaska Arizona Arkansas California 1 1 1 1 1 Colorado Connecticut Delaware Florida Georgia 1 1 1 1 1 Hawaii Idaho Illinois Indiana Iowa 1 1 1 1 1 Kansas Kentucky Louisiana Maine Maryland 1 1 1 1 1 Massachusetts Michigan Minnesota Mississippi Missouri 1 1 1 1 1 Montana Nebraska Nevada New Hampshire New Jersey 1 1 1 1 1 New Mexico New York North Carolina North Dakota Ohio 1 1 1 1 1 Oklahoma Oregon Pennsylvania Rhode Island South Carolina 1 1 1 1 1 South Dakota Tennessee Texas Utah Vermont 1 1 1 1 1 Virginia Washington West Virginia Wisconsin Wyoming 1 1 1 1 1 Within cluster sum of squares by cluster: [1] 0 0 0 0 Available components: [1] "cluster" "centers" "withinss" "size" wardspearman Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward Distance : spearman Number of objects: 50 singlespearman Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : single Distance : spearman Number of objects: 50 completespearman Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : complete Distance : spearman Number of objects: 50 averagespearman Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : average Distance : spearman Number of objects: 50 mcquittyspearman Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : mcquitty Distance : spearman Number of objects: 50 medianspearman Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : median Distance : spearman Number of objects: 50 centroidspearman Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid Distance : spearman Number of objects: 50 centroid2spearman Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid2 Distance : spearman Number of objects: 50 ward.D2spearman Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward.D2 Distance : spearman Number of objects: 50 K-means clustering with 4 clusters of sizes 50, 0, 0, 0 Cluster means: Murder Assault UrbanPop Rape 1 7.788 170.76 65.54 21.232 2 NaN NaN NaN NaN 3 NaN NaN NaN NaN 4 NaN NaN NaN NaN Clustering vector: Alabama Alaska Arizona Arkansas California 1 1 1 1 1 Colorado Connecticut Delaware Florida Georgia 1 1 1 1 1 Hawaii Idaho Illinois Indiana Iowa 1 1 1 1 1 Kansas Kentucky Louisiana Maine Maryland 1 1 1 1 1 Massachusetts Michigan Minnesota Mississippi Missouri 1 1 1 1 1 Montana Nebraska Nevada New Hampshire New Jersey 1 1 1 1 1 New Mexico New York North Carolina North Dakota Ohio 1 1 1 1 1 Oklahoma Oregon Pennsylvania Rhode Island South Carolina 1 1 1 1 1 South Dakota Tennessee Texas Utah Vermont 1 1 1 1 1 Virginia Washington West Virginia Wisconsin Wyoming 1 1 1 1 1 Within cluster sum of squares by cluster: [1] 0 0 0 0 Available components: [1] "cluster" "centers" "withinss" "size" wardkendall Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward Distance : kendall Number of objects: 50 singlekendall Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : single Distance : kendall Number of objects: 50 completekendall Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : complete Distance : kendall Number of objects: 50 averagekendall Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : average Distance : kendall Number of objects: 50 mcquittykendall Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : mcquitty Distance : kendall Number of objects: 50 mediankendall Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : median Distance : kendall Number of objects: 50 centroidkendall Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid Distance : kendall Number of objects: 50 centroid2kendall Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid2 Distance : kendall Number of objects: 50 ward.D2kendall Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward.D2 Distance : kendall Number of objects: 50 K-means clustering with 4 clusters of sizes 6, 12, 13, 19 Cluster means: Murder Assault UrbanPop Rape 1 2.616667 54.83333 62.00000 12.33333 2 4.791667 109.25000 66.58333 16.05833 3 6.753846 149.84615 64.92308 20.66923 4 12.021053 260.52632 66.42105 27.69474 Clustering vector: Alabama Alaska Arizona Arkansas California 4 4 4 4 4 Colorado Connecticut Delaware Florida Georgia 3 2 4 4 4 Hawaii Idaho Illinois Indiana Iowa 1 3 4 2 1 Kansas Kentucky Louisiana Maine Maryland 2 3 4 2 4 Massachusetts Michigan Minnesota Mississippi Missouri 2 4 1 4 3 Montana Nebraska Nevada New Hampshire New Jersey 3 2 4 1 2 New Mexico New York North Carolina North Dakota Ohio 4 4 4 1 2 Oklahoma Oregon Pennsylvania Rhode Island South Carolina 3 3 2 3 4 South Dakota Tennessee Texas Utah Vermont 2 4 3 2 2 Virginia Washington West Virginia Wisconsin Wyoming 3 3 3 1 3 Within cluster sum of squares by cluster: [1] 1.765606e-06 9.227279e-06 3.860314e-06 5.013077e-06 Available components: [1] "cluster" "centers" "withinss" "size" wardabspearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward Distance : abspearson Number of objects: 50 singleabspearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : single Distance : abspearson Number of objects: 50 completeabspearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : complete Distance : abspearson Number of objects: 50 averageabspearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : average Distance : abspearson Number of objects: 50 mcquittyabspearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : mcquitty Distance : abspearson Number of objects: 50 medianabspearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : median Distance : abspearson Number of objects: 50 centroidabspearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid Distance : abspearson Number of objects: 50 centroid2abspearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid2 Distance : abspearson Number of objects: 50 ward.D2abspearson Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward.D2 Distance : abspearson Number of objects: 50 K-means clustering with 4 clusters of sizes 20, 20, 6, 4 Cluster means: Murder Assault UrbanPop Rape 1 5.020000 117.95000 64.60 16.63500 2 10.990000 235.60000 71.40 27.73500 3 2.616667 54.83333 62.00 12.33333 4 13.375000 284.50000 46.25 25.05000 Clustering vector: Alabama Alaska Arizona Arkansas California 2 4 2 2 2 Colorado Connecticut Delaware Florida Georgia 2 1 2 2 2 Hawaii Idaho Illinois Indiana Iowa 3 1 2 1 3 Kansas Kentucky Louisiana Maine Maryland 1 1 2 1 2 Massachusetts Michigan Minnesota Mississippi Missouri 1 2 3 4 2 Montana Nebraska Nevada New Hampshire New Jersey 1 1 2 3 1 New Mexico New York North Carolina North Dakota Ohio 2 2 4 3 1 Oklahoma Oregon Pennsylvania Rhode Island South Carolina 1 1 1 1 4 South Dakota Tennessee Texas Utah Vermont 1 2 2 1 1 Virginia Washington West Virginia Wisconsin Wyoming 2 1 1 3 2 Within cluster sum of squares by cluster: [1] 1.718668e-05 1.726487e-05 8.814207e-06 1.552004e-08 Available components: [1] "cluster" "centers" "withinss" "size" wardabscorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward Distance : abscorrelation Number of objects: 50 singleabscorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : single Distance : abscorrelation Number of objects: 50 completeabscorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : complete Distance : abscorrelation Number of objects: 50 averageabscorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : average Distance : abscorrelation Number of objects: 50 mcquittyabscorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : mcquitty Distance : abscorrelation Number of objects: 50 medianabscorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : median Distance : abscorrelation Number of objects: 50 centroidabscorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid Distance : abscorrelation Number of objects: 50 centroid2abscorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : centroid2 Distance : abscorrelation Number of objects: 50 ward.D2abscorrelation Call: hcluster(x = USArrests, method = mymethod, link = mylink, nbproc = 4) Cluster method : ward.D2 Distance : abscorrelation Number of objects: 50 Warning messages: 1: did not converge in 10 iterations 2: empty cluster: try a better set of initial centers 3: empty cluster: try a better set of initial centers 4: empty cluster: try a better set of initial centers 5: did not converge in 10 iterations > > COMMONDIST <- c("euclidean", "maximum", "manhattan", "canberra", + "binary") > COMMONLINKS <- c( "single", "complete", "average", "mcquitty", + "median", "centroid","ward.D2") > > for (mymethod in COMMONDIST) { + d = dist(USArrests,method = mymethod) + d2 = Dist(USArrests,method = mymethod) + cat("test",mymethod) + stopifnot(floor(d * 1000) == floor(d2*1000)) + } test euclideantest maximumtest manhattantest canberratest binary> d = dist(USArrests) > for(mylink in COMMONLINKS){ + cat("test",mylink) + h = hclust(d, method = mylink) + hc = hcluster(USArrests,link = mylink) + stopifnot(h$order == hc$order) + stopifnot(floor(h$height * 1000) == floor(hc$height*1000)) + } test singletest completetest averagetest mcquittytest mediantest centroidtest ward.D2> > hc <- hcluster(USArrests, nbproc=1) > print(hc) Call: hcluster(x = USArrests, nbproc = 1) Cluster method : complete Distance : euclidean Number of objects: 50 > > > > > > > KERNELS = c("gaussien", "quartic", "triweight", "epanechikov" , + "cosinus", "uniform") > > for(myKernel in KERNELS) { + myacp = acprob(USArrests, kernel = myKernel); + print(myacp) + } Standard deviations: Comp 1 Comp 2 Comp 3 Comp 4 1.5533005 1.0238885 0.5964794 0.4279277 Eigen values: [1] 3.2817172 1.0106413 0.4610868 0.3944977 Standard deviations: Comp 1 Comp 2 Comp 3 Comp 4 1.5533005 1.0238885 0.5964794 0.4279277 Eigen values: [1] 3.2817172 1.0106413 0.4610868 0.3944977 Standard deviations: Comp 1 Comp 2 Comp 3 Comp 4 1.5533005 1.0238885 0.5964794 0.4279277 Eigen values: [1] 3.2817172 1.0106413 0.4610868 0.3944977 Standard deviations: Comp 1 Comp 2 Comp 3 Comp 4 1.5533005 1.0238885 0.5964794 0.4279277 Eigen values: [1] 3.2817172 1.0106413 0.4610868 0.3944977 Standard deviations: Comp 1 Comp 2 Comp 3 Comp 4 1.5533005 1.0238885 0.5964794 0.4279277 Eigen values: [1] 3.2817172 1.0106413 0.4610868 0.3944977 Standard deviations: Comp 1 Comp 2 Comp 3 Comp 4 1.5533005 1.0238885 0.5964794 0.4279277 Eigen values: [1] 3.2817172 1.0106413 0.4610868 0.3944977 > > > > d <-2 * matrix(c(9, 8, 5, 7, 7, 2 + , 8, 9, 2, 5, 1, 7 + , 5, 2, 9, 8, 7, 1 + , 7, 5, 8, 9, 3, 2 + , 7, 1, 7, 3, 9, 6 + , 2, 7, 1, 2, 6, 9),ncol=6,byrow=TRUE) - 9 > > pop(d) Upper bound (half cost) : 39 Final partition (half cost) : 25 Number of classes : 2 Forward move count : 124 Backward move count : 124 Constraints evaluations count : 248 Number of local optima : 2 Individual class 1 1 1 2 2 2 3 3 1 4 4 1 5 5 1 6 6 2 > > > > > > > proc.time() user system elapsed 0.214 0.153 0.310 amap/INDEX0000644000176200001440000000130713573775110011774 0ustar liggesusersacp Principal component analysis acpgen Generalised principal component analysis acprob Robust principal component analysis afc Correspondance factorial analysis. burt Compute burt table from a factor dataframe. diss Compute a dissimilarity matrix Dist Distance Matrix Computation hcluster Hierarchical Clustering Kmeans K-Means Clustering lubisch Dataset Lubischew plot.acp Graphics for Principal component Analysis pop Optimal Partition (classification). varrob Robust variance amap/MD50000644000176200001440000000615014705445647011522 0ustar liggesusers6425782242cf3e5a5186b98d2d4b5a99 *Changes ef9fd839132915745c21e162ff3fd2ae *DESCRIPTION 9b87025f779cbf59c4e611807d010a61 *INDEX da0fdd33fcfc8e9eb6b6989d369a0b9e *NAMESPACE 57c0c20655ec771b183c3d07eeaf7d1d *R/Kmeans.R 0e4a6fb503a0b193302823e0d6cf6039 *R/acp.R cecc24888d4ff760b46111ed6a0a3fa7 *R/acprob.R 96c149c0e68331643ff09c695c7c931e *R/burt.R 840308485d4b9e57ff7807872333f374 *R/diss.R 81e76ff58e0445d57ea446ed81e30284 *R/dist.R 544f160395d17813a870dbaaefabd77a *R/hcluster.R e4aa724df4d3b29253d77c03e293fe97 *R/pop.R 8a9f723061e683f36295df485ea418ef *build/vignette.rds 364a43413a10df0ffe981f35aeddf37f *cleanup 120b6938b77c8c83bc1db2397f25a4b6 *data/lubisch.tab.gz de314ddf28c269b98ad9ff281ed6f860 *demo/00Index ffec123e2067afe88ddc15000edd6b43 *demo/amap.R e0891bc3830d1d85ac186263199e8a55 *exec/amap.php edac769262ab42d201162cc0d6763a7f *inst/doc/amap.R 1f6194916110fe560cbdd55fc1b13ae2 *inst/doc/amap.Rnw 61277c12f44b384a82f2f4d1aa4845bd *inst/doc/amap.pdf 1f6194916110fe560cbdd55fc1b13ae2 *inst/doc/vignettes/amap.Rnw b83d1692381d779844bebf1efe5baa71 *inst/doc/vignettes/amap.bib fd3bdd9a56b153e377e4e5fc174a4e74 *inst/po/fr/LC_MESSAGES/R-amap.mo fd3bdd9a56b153e377e4e5fc174a4e74 *inst/po/po/fr/LC_MESSAGES/R-amap.mo 2bdfe04f448a6e40b56c52b16bad6640 *man/Kmeans.Rd 55be4ea146d4b6c51a24b7ad8ae1d3e1 *man/acp.Rd 8109de851720ec7e7522e0b0bb0a6dd6 *man/acpgen.Rd 2a01ad1055086a3e0e5893ede2fb3ecf *man/acprob.Rd 6ecbc9c9dd34aa56ef036d1c763c3b2c *man/afc.Rd 2b90d07a26ba121b889dcf626c4c8448 *man/burt.Rd 2d9bc5468cedc3cd71e6c58cdb33f2a2 *man/diss.Rd ee59491deb0e22db9ba5ee708c526de0 *man/dist.Rd 4d99e7504606d44fa8dd4d146d664fbd *man/hcluster.Rd 30f468cb29bf1a593ff62bae61869b63 *man/lubisch.Rd 92f1f0cb76b1a8f87ef1f8c3df08e9d1 *man/plot.acp.Rd bf8a862477275a5dd74f95a838688400 *man/pop.Rd 56f9bc4e2dafcb8a7034abd91fcab9a1 *man/varrob.Rd 66c5107ea07d7c2b3ab59d157981d765 *po/R-amap.pot 84e38e6af2183dfef78f10fc4a27f4a2 *po/R-fr.po 1d80eada7a6d4ca459e23dd400949c8d *src/Makevars.in 0c5458498382c5874124768037f816cd *src/acprob.cpp 3539435e96dbe34cb6f4ac1166c64c89 *src/acprob.h 3ed81d1457abe794f4fd0990070961b8 *src/burt.c 3089a2ec0f8e806c906f8de689a9886e *src/diss.c 142de036d003824ac4eb1bce747f568f *src/distance.cpp 0bbf14c8b648739a6f53b5c8e0146327 *src/distance.h 53aa5c429e2a24a27a24fc8bb9f6b212 *src/distance_T.h e03ebb3a6b559318b9fd3145b03dd528 *src/distance_T.h.h 37aa2ae8966ad80a389c519d22de0fcb *src/hclust.cpp 2b7cecb079de38b10f21504f64a04fb7 *src/hclust.h f121bed16ec5c37ce6708fdfaee48e74 *src/hclust_T.h 15dcdf7a6eef6170ce05013788a02421 *src/hclust_T.h.h d1d5ebf1ebaaeff126b32a8a43c4f2a3 *src/init.c d5cb712175f94b8943fe835c9c88547e *src/kmeans.cpp 863538393c686c98fdc9e9c297bf4513 *src/kmeans.h 2143f5b1097f085d61b4f634046a1b04 *src/matrice.cpp 09b93bef66758feb9c3f8ba302cce57e *src/matrice.h b826a1a326c5d0c562f4fe6e35f0390f *src/matriceTriangle.h 39a55f456b8b382be81892274b370a90 *src/pop.f 2873c427796ee55eec78c45d0044dc82 *src/smartPtr.h acd15ee3b15bf48c2ef14ce3c261f6a8 *tests/amap-test.R 0ddaa5cd50906e8c9819b3ef8f3bcda7 *tests/amap-test.Rout.save 1f6194916110fe560cbdd55fc1b13ae2 *vignettes/amap.Rnw cf3e3391344ea95f1711a432da3f4fe4 *vignettes/amap.bib amap/po/0000755000176200001440000000000012444343274011615 5ustar liggesusersamap/po/R-fr.po0000644000176200001440000000430210336326041012751 0ustar liggesusers## Generer le .pot: library(tools); xgettext2pot("amap") ## msgfmt amap/po/R-fr.po -o amap/inst/po/fr/LC_MESSAGES/R-amap.mo ## [emacs] mettre en utf-8: C-x f ## ou `M-x prefer-coding-system' puis reouverture du fichier. ## ou encore `C-x c CODING-SYSTEM M-x revert-buffer ' msgid "" msgstr "" "Project-Id-Version: amap 0.6\n" "Report-Msgid-Bugs-To: antoinelucas@libertysurf.fr\n" "POT-Creation-Date: 2005-11-12 11:50\n" "PO-Revision-Date: 2005-11-12 12:50+2\n" "Last-Translator: Antoine Lucas \n" "Language-Team: francais \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" msgid "did not converge in" msgstr "Pas de convergence en" msgid "empty cluster: try a better set of initial centers" msgstr "Cluster vide: trouvez une meilleure initialisation des centres" msgid "invalid distance method" msgstr "choix de distance invalide" msgid "ambiguous distance method" msgstr "choix de distance ambigüe" msgid "'centers' must be a number or a matrix" msgstr "'centers' doit être un nombre ou une matrice" msgid "more cluster centers than distinct data points." msgstr "trop de centres: plus que de points distinct" msgid "initial centers are not distinct" msgstr "les centres ne sont pas distincts" msgid "more cluster centers than data points" msgstr "trop de centres: plus que de points" msgid "must have same number of columns in 'x' and 'centers'" msgstr "il doit y avoir le même nombre de colonnes dans 'x' et 'centers'" msgid "Cannot allocate memory" msgstr "Problème lors de l'allocation de mémoire " msgid "Error" msgstr "Erreur" msgid "invalid clustering method" msgstr "choix du lien de classification invalide" msgid "ambiguous clustering method" msgstr "choix du lien de classification ambigüe" msgid "'iter.max' must be positive" msgstr "'iter.max' doit être positive" msgid "Invalid length of members" msgstr "members a une longueure invalide" msgid "Missing values in distance Matrix" msgstr "Valeurs manquantes dans la matrice de distance" msgid "You need to install Biobase package to use this object" msgstr "Vous devriez installer le package BioBase pour utiliser cet objet" amap/po/R-amap.pot0000644000176200001440000000252210336326041013446 0ustar liggesusersmsgid "" msgstr "" "Project-Id-Version: amap 0.6\n" "Report-Msgid-Bugs-To: antoinelucas@libertysurf.fr\n" "POT-Creation-Date: 2005-11-12 11:50\n" "PO-Revision-Date: 2005-11-12 12:50+2\n" "Last-Translator: Antoine Lucas \n" "Language-Team: francais \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" msgid "did not converge in" msgstr "" msgid "iterations" msgstr "" msgid "empty cluster: try a better set of initial centers" msgstr "" msgid "invalid distance method" msgstr "" msgid "ambiguous distance method" msgstr "" msgid "'centers' must be a number or a matrix" msgstr "" msgid "more cluster centers than distinct data points." msgstr "" msgid "initial centers are not distinct" msgstr "" msgid "more cluster centers than data points" msgstr "" msgid "'iter.max' must be positive" msgstr "" msgid "must have same number of columns in 'x' and 'centers'" msgstr "" msgid "Cannot allocate memory" msgstr "" msgid "Error" msgstr "" msgid "invalid clustering method" msgstr "" msgid "ambiguous clustering method" msgstr "" msgid "Must have n >= 2 objects to cluster" msgstr "" msgid "Invalid length of members" msgstr "" msgid "Missing values in distance Matrix" msgstr "" msgid "You need to install Biobase package to use this object" msgstr "" amap/Changes0000644000176200001440000000654112444342270012473 0ustar liggesusersWeb Dec 17 2014 * fix C++ warns. Sat Mar 01 2014 * Fix C++ code. Wed Feb 26 2014 * remove DUP=FALSE in .C call. Tue Dec 03 2013 * remove [again] printf in C++ code * fix memory leaks * add non-regression tests. Wed Sep 11 2013 * remove printf in C/c++ code Sun Nov 06 2011: 0.8-6 * fix distances pearson and correlation (problem with vector 0,0,0...) * add new distances: absolute pearson, absolute correlation * add new link "centroid2" Fri Oct 23 2009:0.8-4 * minor bug fix Mon Sep 31 2007: 0.8-2 * add kendall distance. * change afc man page. Mon Sep 31 2007: 0.8-1 * add parameter labels to plot.acp * use matrix instead of data.frame in internal pca data. * remove #include when built with windows. * k-means hclust and dist use common functions for distance computation * k-means possible with spearman distance Sat Sep 29 2007: 0.8 * clustering possible with float precision; use of templates * suppression of duplicated code(used for no thread / multiple thread) * use of more safe parameter to thead function * correction on man page: only ascii characters Mon Jan 22 2007: 0.7-3 * dimnames(x)[[1]] <- NULL seems not to be allowed anymore. -> changes in acp.R acprob.R Wed Jan 21 2007: 0.7-2 * correct package dependences * minor changes in documentation Tue Jan 20 2006: 0.7-1 * new function pop (optimal partition) * new function diss (dissimilarity) Sat Nov 12 2005: 0.6 * new function burt, matlogic & afc * split functions concerning interactions with other software in package ctc * internationalization (mainly: french). Mon Oct 17 2005: 0.5-1 * minor change on doc (reference to acp function deleted) * minor change to R Kmeans function * load Biobase on startup when available. Mon Oct 03 2005: 0.5 * Delete function acp (recomand use of prcomp and princomp) * add support of class "exprSet" in hcluster, dist and kmeans Tue Sep 27 2005 * merge packages ctc and amap Tue Jul 12 2005: 0.4-4 * demo file * vignette Fri Jul 08 2005: 0.4-3 * example amap.php for a web-application. Wed Mar 16 2005: 0.4-2 * man flag \preformated too recent... changed to \code Tue Mar 15 2005: 0.4-1 * code without C++ style comments. Mon Mar 14 2005: 0.4 * Implement kmeans from stats package and add support for several distances. Wed Mar 09 2005 * New distance "spearman" based on rank. Mon Feb 28 2005: 0.3 * Parallelization for functions dist & hcluster * function dist move to Dist. Tue Dec 07 2004: 0.2-7 * Changes on doc, links and references Tue Dec 07 2004: 0.2-7 * .Fists.lib change to load package stats instead of mva. Wed Jun 23 2004: 0.2-6 * deleting functions (redondance with mva) hclust,plot.hclust,plclust Thu Feb 6 2004: 0.2-5 * function zzz.R modified * Change minor "warning" on manual fixed. Thu Feb 5 2004: 0.2-4 * Change e-mail and date in DESCRIPTION file. Fri Jul 11 2003: 0.2-2 * bug fixed in varrob * fixed documentation varrob.Rd Wed May 28 2003 * hcluster check if distance matrix has missing values * new function acprob for robust pca. Mon Feb 17 2003: 0.2-1 * minor bug fixed for acp & acpgen Fri Feb 14 2003: 0.2 * The package now handles memory allocation errors * Dimnames change for loadings and scores in pca * U renamed as varrob * \link{}s in the docs refer to correct packages amap/R/0000755000176200001440000000000014705226227011400 5ustar liggesusersamap/R/diss.R0000644000176200001440000000057513275577225012504 0ustar liggesusersdiss <- function (x, w=rep(1,ncol(x))) { n <- nrow(x) p <- ncol(x) if(length(w) != p) { warning("Error in dimention on either w or x") return(NULL) } res <- .C(C_diss, as.integer(x), double(n*n), n,p, as.double(w), PACKAGE="amap") matrix(res[[2]],n) } amap/R/acp.R0000644000176200001440000000754313275362715012303 0ustar liggesusers#------------------------------------------------------- # # Created : 29/10/02 # Last Modified : Time-stamp: <2018-05-11 20:49:49 (antoine)> # # Description : Principal component analysis # # Author : Antoine Lucas # lucas@toulouse.inra.fr # # Licence : GPL # #------------------------------------------------------- acp <- function(x,center=TRUE,reduce=TRUE,wI=rep(1,nrow(x)),wV=rep(1,ncol(x))) { x <- as.matrix(x) if(center) x <- t(t(x) - as.vector(( wI %*% x)/sum(wI))) ## x <- scale(x ,center = center, scale = FALSE) if (reduce) x <- apply(x,2,function(u) { u/stats::sd(u)}) ## Di.X'.Dv.X EIG <- eigen( (t(x)* wI) %*% (x * wV) ,symmetric=FALSE) V <- EIG$vector # ou bien: V=svd(x)$v EIG$values <- Re(EIG$values) V <- V %*% diag(sign(EIG$values)) val <- sqrt(abs(EIG$values)) scores <- x %*% V V <- as.matrix(Re(V)) scores <- as.matrix(Re(scores)) dimnames(V)[[2]] <- paste("Comp",1:dim(x)[2]) if(!is.null( dimnames(x)[[2]] )) dimnames(V)[[1]] <- dimnames(x)[[2]] if(!is.null(dimnames(x)[[1]])) dimnames(scores)[[1]] <- dimnames(x)[[1]] dimnames(scores)[[2]] <- paste("Comp",1:dim(x)[2]) ##cmpr <- x %*% (sqrt(wV) * as.matrix(V)) sdev <- apply(scores,2,stats::sd) res <- list(eig=val,sdev=sdev,scores=scores,loadings=V) class(res) <- "acp" res } pca <- acp print.acp <- function(x, ...) { #cat("Call:\n"); dput(x$call) cat("\nStandard deviations:\n") print(x$sdev, ...) cat("\nEigen values:\n") print(x$eig, ...) invisible(x) } # # SECTION GRAPHIQUES # plot.acp <- function(x,i=1,j=2,text=TRUE,label='Composants',col='darkblue',main='Individuals PCA',variables=TRUE,individual.label=NULL,...) { U <- x$scores XLAB <- paste(label,i) YLAB <- paste(label,j) plot.new() plot.window(range(U[,i]),range(U[,j])) axis(1,labels=TRUE,tick=TRUE) axis(2,labels=TRUE,tick=TRUE) box() title(xlab=XLAB,ylab=YLAB,main=main) if(text){ if(is.null(individual.label)) { individual.label=dimnames(x$scores)[[1]] } text(labels=individual.label,U[,i],U[,j],col=col,...) } else{ points(U[,i],U[,j],col=col,...) } if(variables) { par(new=TRUE) biplot.acp(x,circle=FALSE,label="",main="") } } biplot.acp <- function(x,i=1,j=2,label='Composants',col='darkblue',length=0.1,main='Variables PCA',circle=TRUE,...) { U <- x$loadings LIM <- c(-1.3,1.3) XLAB <- paste(label,i) YLAB <- paste(label,j) # PLOT DES AXES graphics::plot.new() graphics::plot.window(LIM,LIM) graphics::axis(1,labels=TRUE,tick=TRUE) graphics::axis(2,labels=TRUE,tick=TRUE) graphics::box() graphics::title(xlab=XLAB,ylab=YLAB,main=main) # PLOT DU NOM DES FLECHES graphics::text(x=U[,i]*1.3,y=U[,j]*1.3,labels=dimnames(U)[[1]],col=col) # PLOT DES FLECHES graphics::arrows(0,0,U[,i],U[,j],length = length,col=col) # CERCLE if(circle) { t2p <- 2 * pi * seq(0,1, length = 200) xc <- cos(t2p) yc <- sin(t2p) graphics::lines(xc,yc,col='darkblue') } } # Graphique: Eboulis des valeurs propres plot2 <- function(x,pourcent=FALSE,eigen=TRUE,label='Comp.',col='lightgrey',main='Scree Graph',ylab='Eigen Values') { if(eigen){ U <- x$eig } else { U <- x$sdev } if(pourcent){U <- U/sum(U) } n <- length(U) names <- paste(label,1:n) graphics::barplot(U,main=main,ylab=ylab,col=col,names.arg=names) } plotAll <- function(x) { graphics::par(mfrow=c(2,2)) plot2(x) ## boxplot(as.list(as.data.frame(x$cmpr))) graphics::plot(x,variables=FALSE) stats::biplot(x) graphics::plot(x,main="Both",variables=TRUE) } amap/R/hcluster.R0000644000176200001440000000513014705226227013353 0ustar liggesusers## Hierarchical clustering ## ## Created : 18/11/02 ## Last Modified : Time-stamp: <2018-05-12 16:48:19 (antoine)> ## ## This function is a "mix" of function dist and function hclust. ## ## Author : Antoine Lucas ## hclusterpar <- hcluster <- function (x, method = "euclidean", diag = FALSE, upper = FALSE, link = "complete", members = NULL, nbproc = 2, doubleprecision = TRUE) { if(inherits(x, "exprSet")) x <- Biobase::exprs(x) ## take from dist if (!is.na(pmatch(method, "euclidian"))) method <- "euclidean" METHODS <- c("euclidean", "maximum", "manhattan", "canberra", "binary","pearson","correlation","spearman","kendall", "abspearson","abscorrelation") method <- pmatch(method, METHODS) if (is.na(method)) stop("invalid distance method") if (method == -1) stop("ambiguous distance method") N <- nrow(x <- as.matrix(x)) #take from hclust METHODSLINKS <- c("ward", "single", "complete", "average", "mcquitty", "median", "centroid","centroid2", "ward.D2") link <- pmatch(link, METHODSLINKS) if (is.na(link)) stop("invalid clustering method") if (link == -1) stop("ambiguous clustering method") if (N < 2) stop("Must have n >= 2 objects to cluster") if (is.null(members)) members <- rep(1, N) if (length(members) != N) stop("Invalid length of members") n <- N precision <- 1 if(doubleprecision) precision <- 2 hcl <- .C(C_hcluster, x = as.double(x), nr = as.integer(n), nc = as.integer(ncol(x)), diag = as.integer(FALSE), method = as.integer(method), iopt = as.integer(link), ia = integer(n), ib = integer(n), order = integer(n), crit = double(n), members = as.double(members), nbprocess = as.integer(nbproc), precision = as.integer(precision), res = as.integer (1), NAOK=TRUE, PACKAGE= "amap") if(hcl$res == 2) stop("Cannot allocate memory") if(hcl$res == 3) stop("Missing values in distance Matrix") if(hcl$res == 1) stop("Error") tree <- list(merge = cbind(hcl$ia[1:(N - 1)], hcl$ib[1:(N - 1)]), height = hcl$crit[1:(N - 1)], order = hcl$order, labels = dimnames(x)[[1]], method = METHODSLINKS[link], call = match.call(), dist.method = METHODS[method] ) class(tree) <- "hclust" tree } amap/R/Kmeans.R0000644000176200001440000000604113574500477012747 0ustar liggesusersKmeans <- function(x, centers, iter.max = 10, nstart = 1, method = "euclidean") { dokmeans <- function() { Z <- .C(C_kmeans_Lloyd2, as.double(x), as.integer(m), as.integer(ncol(x)), centers = as.double(centers), as.integer(k), c1 = integer(m), iter = as.integer(iter.max), nc = integer(k), wss = double(k), method=as.integer(method), PACKAGE="amap") if (Z$iter > iter.max) warning("did not converge in ", iter.max, " iterations", call. = FALSE) if (any(Z$nc == 0)) warning("empty cluster: try a better set of initial centers", call. = FALSE) Z } METHODS <- c("euclidean", "maximum", "manhattan", "canberra", "binary","pearson","correlation","spearman","kendall","abspearson","abscorrelation") method <- pmatch(method, METHODS) if (is.na(method)) stop("invalid distance method") if (method == -1) stop("ambiguous distance method") if(inherits(x,"exprSet")) { requireNamespace("Biobase") x <- Biobase::exprs(x) } x <- as.matrix(x) m <- nrow(x) if(missing(centers)) stop("'centers' must be a number or a matrix") if(length(centers) == 1) { k <- centers ## we need to avoid duplicates here if(nstart == 1) centers <- x[sample(1 : m, k), , drop = FALSE] if(nstart >= 2 || any(duplicated(centers))) { cn <- unique(x) mm <- nrow(cn) if(mm < k) stop("more cluster centers than distinct data points.") centers <- cn[sample(1:mm, k), , drop=FALSE] } } else { centers <- as.matrix(centers) if(any(duplicated(centers))) stop("initial centers are not distinct") cn <- NULL k <- nrow(centers) if(m < k) stop("more cluster centers than data points") } if(iter.max < 1) stop("'iter.max' must be positive") if(ncol(x) != ncol(centers)) stop("must have same number of columns in 'x' and 'centers'") Z <- .C(C_kmeans_Lloyd2, as.double(x), as.integer(m), as.integer(ncol(x)), centers = as.double(centers), as.integer(k), c1 = integer(m), iter = as.integer(iter.max), nc = integer(k), wss = double(k), method=as.integer(method), PACKAGE="amap") if(Z$iter > iter.max) warning("did not converge in ", iter.max, " iterations", call.=FALSE) if(any(Z$nc == 0)) warning("empty cluster: try a better set of initial centers", call.=FALSE) if(nstart >= 2 && !is.null(cn)) { best <- sum(Z$wss) for(i in 2:nstart) { centers <- cn[sample(1:mm, k), , drop=FALSE] ZZ <- dokmeans() if((z <- sum(ZZ$wss)) < best) { Z <- ZZ best <- z } } } centers <- matrix(Z$centers, k) dimnames(centers) <- list(1:k, dimnames(x)[[2]]) cluster <- Z$c1 if(!is.null(rn <- rownames(x))) names(cluster) <- rn out <- list(cluster = cluster, centers = centers, withinss = Z$wss, size = Z$nc) class(out) <- "kmeans" out } amap/R/dist.R0000644000176200001440000000226613574500460012471 0ustar liggesusersDist <- function(x, method="euclidean", nbproc = 2, diag=FALSE, upper=FALSE) { if(inherits(x,"exprSet")) { requireNamespace("Biobase") x <- Biobase::exprs(x) } ## account for possible spellings of euclidean if(!is.na(pmatch(method, "euclidian"))) method <- "euclidean" METHODS <- c("euclidean", "maximum", "manhattan", "canberra", "binary","pearson","correlation","spearman","kendall","abspearson","abscorrelation") method <- pmatch(method, METHODS) if(is.na(method)) stop("invalid distance method") if(method == -1) stop("ambiguous distance method") N <- nrow(x <- as.matrix(x)) d <- .C(C_R_distance, x = as.double(x), nr= N, nc= ncol(x), d = double(N*(N - 1)/2), diag = as.integer(FALSE), method= as.integer(method), nbproc = as.integer(nbproc), ierr=as.integer(0), NAOK=TRUE, PACKAGE="amap" )$d attr(d, "Size") <- N attr(d, "Labels") <- dimnames(x)[[1]] attr(d, "Diag") <- diag attr(d, "Upper") <- upper attr(d, "method") <- METHODS[method] attr(d, "call") <- match.call() class(d) <- "dist" return(d) } amap/R/pop.R0000644000176200001440000000300613275642420012316 0ustar liggesuserspop <- function(x,fmbvr=TRUE,triabs=TRUE,allsol=TRUE) { couts <- as.matrix(x) n <- as.integer(nrow(couts)) ysave <- as.integer(matrix(0,nrow=n,ncol=n)) renum <- y <- ysave bornth <- z0 <- z <- as.double(0) res <- .Fortran(C_pnkfmb, as.integer(fmbvr), as.integer(triabs), as.integer(allsol), n = as.integer(n), couts = as.double(couts), ysave = ysave, y = ysave, renum= renum, bornth = bornth, nbcl0 = as.integer(0), z0 = z0 , nbcl = as.integer(0), z = z, nbemp = as.integer(0), nbdep = as.integer(0), nbsol = as.integer(0), nap = as.integer(0), PACKAGE="amap") class(res) <- "pop" return(res) } print.pop <- function(x,...) { i <- 1:x$n classes <- x$y[i+(i-1)*x$n] cat("Upper bound (half cost) :",x$bornth,'\n') cat("Final partition (half cost) :",x$z,'\n') cat("Number of classes :",x$nbcl,"\n") cat("Forward move count :",x$nbemp,"\n") cat("Backward move count :",x$nbdep,"\n") cat("Constraints evaluations count :",x$nap,"\n") cat("Number of local optima :",x$nbsol,"\n\n") print(data.frame(Individual=i,class=classes)) } amap/R/acprob.R0000644000176200001440000001120113275576707013000 0ustar liggesusers#------------------------------------------------------- # # Created : 30/10/02 # Last Modified : Time-stamp: <2018-05-12 16:44:23 (antoine)> # # Description : Robust principal component analysis # # Author : Antoine Lucas # lucas@toulouse.inra.fr # # Licence : GPL # #------------------------------------------------------- K <- function(u,kernel="gaussien") { switch(kernel, gaussien = (2*pi)^(-1/2) * exp(-u^2/2), quartic = 15/16 * (1-u^2)^2 * (abs(u)<1), triweight = 35/32 * (1-u^2)^3 * (abs(u)<1), epanechikov = 3/4 * (1-u^2) * (abs(u)<1), cosinus = pi/4 * cos (u*pi/2) * (abs(u)<1), uniform = 1/2 * (abs(u)<1), ) } # Variance locale W <- function(x,h,D=NULL,kernel="gaussien") { x <- as.matrix(x) n <- dim(x)[1] p <- dim(x)[2] if (is.null(D)) { D <- diag(1,p) } x <- as.vector(x) D <- as.vector(D) kernel <- substr(kernel,1,1) VarLoc <- .C( C_W, as.double(x), as.double(h), as.double(D), as.integer(n), as.integer(p), as.character(kernel), res=double(p*p), result = as.integer(1), PACKAGE= "amap" ) if(VarLoc$result == 2) stop("Cannot allocate memory") if(VarLoc$result == 1) stop("Error") matrix(VarLoc$res,p) } varrob <- function(x,h,D=NULL,kernel="gaussien") { x <- as.matrix(x) x <- scale(x, center = TRUE, scale = FALSE) n <- dim(x)[1] p <- dim(x)[2] if (is.null(D)) { D <- diag(1,p) } x <- as.vector(x) D <- as.vector(D) kernel <- substr(kernel,1,1) Calcul <- .C( C_VarRob, as.double(x), as.double(h), as.double(D), as.integer(n), as.integer(p), as.character(kernel), res=double(p*p), result = as.integer(1), PACKAGE= "amap") if(Calcul$result == 2) stop("Cannot allocate memory") if(Calcul$result == 1) stop("Error") S <- matrix(Calcul$res,p) Sinv <- solve(S) solve ( Sinv - D / h) } acpgen <- function(x,h1,h2,center=TRUE,reduce=TRUE,kernel="gaussien") { # CENTRONS, ET REDUISONS x <- as.matrix(x) x <- scale(x ,center = center, scale = FALSE) if (reduce == TRUE) { x <- apply(x,2,function(u) { u/stats::sd(u)}) } # ESTIMATION DE W et VarRob n <- dim(x)[1] VarInv <- solve(stats::var(x)*(n-1)/n) # solve= inverser leU <- varrob(x,h1,D=VarInv,kernel=kernel) leW <- W(x,h2,D=VarInv,kernel=kernel) Winv <- solve(leW) # anal. spec de Var.W^-1 : EIG <- eigen(leU %*% Winv) V <- EIG$vector #EIG <- eigen( x %*% Winv %*% t(x) ) #U <- EIG$vector #n <- dim(x)[1] #p <- dim(x)[2] #S <- diag(Re(EIG$values),n) #S1 <- diag(Re(1/EIG$values),n) #S <- sqrt(S[,1:p]) #S1 <- sqrt(S1[,1:p]) #V <- t(x)%*% U%*% S1 # X=U.S.V' -> V = X' U S^-1 # AFFICHAGE DES RESULTATS scores <- x %*% Winv %*% V V <- as.matrix(V) scores <- as.matrix(scores) dimnames(V)[[2]] <- paste("Comp",1:dim(x)[2]) if(!is.null( dimnames(x)[[2]] )) dimnames(V)[[1]] <- dimnames(x)[[2]] if(!is.null( dimnames(x)[[1]] )) dimnames(scores)[[1]] <- dimnames(x)[[1]] dimnames(scores)[[2]] <- paste("Comp",1:dim(x)[2]) eig <- sqrt(EIG$values) sdev <- apply(scores,2,stats::sd) res <- list(eig=eig,sdev=sdev,scores=scores,loadings=V) class(res) <- "acp" res } acprob <- function(x,h=1,center=TRUE,reduce=TRUE,kernel="gaussien") { x <- as.matrix(x) x <- scale(x ,center = center, scale = FALSE) if (reduce == TRUE) { x <- apply(x,2,function(u) { u/stats::sd(u)}) } EIG <- eigen( varrob(x,h),symmetric=TRUE) V <- EIG$vector # ou bien: V=svd(x)$v val <- sqrt(EIG$values) scores <- x %*% V V <- as.matrix(V) scores <- as.matrix(scores) dimnames(V)[[2]] <- paste("Comp",1:dim(x)[2]) if(!is.null( dimnames(x)[[2]] )) dimnames(V)[[1]] <- dimnames(x)[[2]] if(!is.null( dimnames(x)[[1]] )) dimnames(scores)[[1]] <- dimnames(x)[[1]] dimnames(scores)[[2]] <- paste("Comp",1:dim(x)[2]) sdev <- apply(scores,2,stats::sd) res <- list(eig=val,sdev=sdev,scores=scores,loadings=V) class(res) <- "acp" res } amap/R/burt.R0000644000176200001440000000211113275576723012504 0ustar liggesusers ## matlogic <- function(x) { n=nrow(x) m=ncol(x) nblev <- apply(x,2,function(u){nlevels(as.factor(u))}) ## Keep names.... rownames <- rownames(x) colnames <- colnames(x) i <- 0 colnamesnew <- c(apply(x,2,function(u){ i<<- i+1;paste(colnames[i],levels(as.factor(u)),sep=".")}),recursive=TRUE) k <- sum(nblev) res <- as.integer(matrix(0,ncol=k,nrow=n)) x <- c(x,recursive=TRUE) result <- .C(C_matind, as.integer(nblev), as.integer(x), res=res, as.integer(n), as.integer(m), as.integer(k), PACKAGE="amap") result <- matrix(result$res,ncol=k) rownames(result) <- rownames colnames(result) <- colnamesnew result } burt <- function(x) { ind <- matlogic(x) t(ind) %*% ind } ## x: table de burt, ou table afc <- function (x) { f <- as.matrix(x/sum(x)) fi <- apply(f,1,sum) fj <- apply(f,2,sum) ## Dr = diag(fi) ## Dc = diag(fj) f <- (1/fi) * t(t(f)/fj) acp(f,wI=fi,wV=fj,center=TRUE,reduce=FALSE) } amap/exec/0000755000176200001440000000000013466255342012126 5ustar liggesusersamap/exec/amap.php0000644000176200001440000001437013466255271013563 0ustar liggesusers
AMAP Demo Web application V0-3

Upload your data (text-tabulated file)

File Description:
A first Column
A first line
Separator

$MAX_FILE) exit("File size: $taillefichier; max allowed: $MAX_FILE"); } /* ================================== */ /* We write R code in file $ID/prog.R */ /* ================================== */ $fp=fopen("$ID/prog.R",'w'); fwrite($fp,"library(amap)\n"); fwrite($fp,"library(ctc)\n"); /* Read Data */ fwrite($fp,"data <- read.delim('$ID/data.txt',header=$HEADER,row.names=$ROWNAMES,sep='$SEP') \n" ); fwrite($fp,"data <- scale(data) \n"); fwrite($fp,"pca <- acprob(data,h=1) \n"); /* Number of columns to keep ( number of column to keep 90 % of * variance)*/ fwrite($fp,"cumVarNorm <- cumsum(pca\$sdev ^ 2) / sum(pca\$sdev ^ 2)\n"); fwrite($fp,"ncol <- max(which( cumVarNorm< 0.9)) +1 \n"); fwrite($fp,"ncol \n"); fwrite($fp,"data <- pca\$scores[,1:ncol] \n"); /* Hierarchical clustering */ fwrite($fp,"hr0 <- hclusterpar(data) \n"); fwrite($fp,"hc0 <- hclusterpar(t(data)) \n"); fwrite($fp,"hr <- as.dendrogram(hr0) \n"); fwrite($fp,"hc <- as.dendrogram(hc0) \n"); /* A pdf file */ fwrite($fp,"pdf(file='$ID/Rplots.pdf') \n"); fwrite($fp,"plot(pca) \n"); fwrite($fp,"biplot(pca) \n"); fwrite($fp,"plot2.acp(pca) \n"); fwrite($fp,"heatmap(as.matrix(data),Colv=hc,Rowv=hr) \n"); fwrite($fp,"dev.off() \n"); /* Some png images */ fwrite($fp,"bitmap(file='$ID/pcaplot.png') \n"); fwrite($fp,"plot(pca) \n"); fwrite($fp,"dev.off() \n"); fwrite($fp,"bitmap(file='$ID/pcabiplot.png') \n"); fwrite($fp,"biplot(pca) \n"); fwrite($fp,"dev.off() \n"); fwrite($fp,"bitmap(file='$ID/pcaplot2.png') \n"); fwrite($fp,"plot2.acp(pca) \n"); fwrite($fp,"dev.off() \n"); fwrite($fp,"bitmap(file='$ID/heatmap.png') \n"); fwrite($fp,"heatmap(as.matrix(data),Colv=hc,Rowv=hr) \n"); fwrite($fp,"dev.off() \n"); fwrite($fp,"r2atr(hc0,file='$ID/cluster.atr') \n"); fwrite($fp,"r2gtr(hr0,file='$ID/cluster.gtr') \n"); fwrite($fp,"r2cdt(hr0,hc0,data ,file='$ID/cluster.cdt') \n"); fclose($fp); /* ===================== */ /* Send command (R batch)*/ /* ===================== */ system("$R_BIN --no-save < $ID/prog.R > $ID/prog.R.out 2> $ID/prog.R.warnings"); /* ===================================== */ /* We create html page including results */ /* ===================================== */ echo "

Amap Demo results

"; echo "A pdf file

"; echo "

"; echo "

"; echo "

"; echo "

"; echo "3 files for Freeview"; echo " or Treeview: "; echo "cdt "; echo "atr "; echo "gtr "; echo "

"; /* Signature */ echo "

This results made by amap, Code use: prog.R, Out: prog.R.out, Warnings: prog.R.warnings."; } ?>

amap/cleanup0000755000176200001440000000011614705252656012557 0ustar liggesusers#!/bin/sh rm -f ./config.* rm -f src/Makevars rm -f src/*.o rm -f src/*.so amap/demo/0000755000176200001440000000000012444343274012123 5ustar liggesusersamap/demo/00Index0000644000176200001440000000002610264436346013254 0ustar liggesusersamap Amap demo file amap/demo/amap.R0000755000176200001440000000355710336325616013177 0ustar liggesusers################################################### ### chunk number 1: ################################################### cat("\n------ CLUSTERING TOOLS -------\n") cat("\n------ Hierarchical clustering -------\n") data(USArrests) h = hcluster(USArrests) plot(h) readline("Next") ################################################### ### chunk number 2: ################################################### cat("\n------ Hierarchical clustering using function heatmap -------\n") heatmap(as.matrix(USArrests), hclustfun=hcluster, distfun=function(u){u}) readline("Next") ################################################### ### chunk number 3: ################################################### cat("\n------ Parralelized Hierarchical clustering -------\n") h = hclusterpar(USArrests,nbproc=4) readline("Next") ################################################### ### chunk number 4: ################################################### cat("\n------ K-means clustering -------\n") Kmeans(USArrests,centers=3,method="correlation") readline("Next") ################################################### ### chunk number 5: ################################################### cat("\n------ ROBUST TOOLS -------\n") cat("\n------ A robust variance computation -------\n") data(lubisch) lubisch <- lubisch[,-c(1,8)] varrob(scale(lubisch),h=1) readline("Next") ################################################### ### chunk number 6: ################################################### cat("\n------ A robust principal component analysis -------\n") p <- acpgen(lubisch,h1=1,h2=1/sqrt(2)) plot(p) readline("Next") ################################################### ### chunk number 6: ################################################### cat("\n------ Another robust principal component analysis -------\n") p <- acprob(lubisch,h=4) plot(p) readline("Next") amap/vignettes/0000755000176200001440000000000014705252655013213 5ustar liggesusersamap/vignettes/amap.bib0000644000176200001440000000176313275070200014577 0ustar liggesusers@ARTICLE{caussinu+ruiz, author = "H. Caussinus and S. Hakam and A. Ruiz-Gazen", title= "Projections r\'ev\'elatrices contr\^ol\'ees. Recherche d'individus atypiques", journal = "Revue de Statistique Appliqu\'ee", year = 2002, volume = 50, number=4, } @ARTICLE{caussinu+ruiz2, author = "H. Caussinus and M. Fekri and S. Hakam and A. Ruiz-Gazen", title = "A monitoring display of multivariate outliers", journal = "Computational Statistics and Data Analysis", year = 2003, volume = 44, month = "October", pages = "237-252", } @ARTICLE{mpetitjean, author = "M. Petitjean", title = "Agr\'egation des similarit\'es: une solution oubli\'ee.", journal = "RAIRO Oper. Res.", year = 2002, volume = 36, number=1, pages = "101-108", } @BOOK{R:writtingRExt, author = {R core}, title = {Writing R Extensions}, publisher = {Unknown}, year = 2007, address = {Unknown}, abstract = {covers how to create your own packages, write R help files, and the foreign language (C, C++, Fortran, ...) interfaces.} }amap/vignettes/amap.Rnw0000644000176200001440000000342210677417044014621 0ustar liggesusers% building this document: (in R) Sweave ("ctc.Rnw") \documentclass[a4paper]{article} \title{Amap Package} \author{Antoine Lucas} %\VignetteIndexEntry{Introduction to amap} %\VignettePackage{amap} \SweaveOpts{echo=FALSE} %\usepackage{a4wide} \begin{document} \maketitle \tableofcontents \section{Overview} {\tt Amap} package includes standard hierarchical clustering and k-means. We optimize implementation (with a parallelized hierarchical clustering) and allow the possibility of using different distances like Eulidean or Spearman (rank-based metric). We implement a principal component analysis (with robusts methods). \section{Usage} \subsection{Clustering} The standard way of building a hierarchical clustering: <>= library(amap) data(USArrests) h = hcluster(USArrests) plot(h) @ Or for the ``heatmap'': <>= heatmap(as.matrix(USArrests), hclustfun=hcluster, distfun=function(u){u}) @ On a multiprocessor computer: <>= h = hcluster(USArrests,nbproc=4) @ The K-means clustering: <>= Kmeans(USArrests,centers=3,method="correlation") @ \subsection{Robust tools} A robust variance computation: <>= data(lubisch) lubisch <- lubisch[,-c(1,8)] varrob(scale(lubisch),h=1) @ A robust principal component analysis: <>= p <- acpgen(lubisch,h1=1,h2=1/sqrt(2)) plot(p) @ Another robust pca: <>= p <- acprob(lubisch,h=4) plot(p) @ \section{See Also} Theses examples can be tested with command {\tt demo(amap)}.\\ \noindent All functions has got man pages, try {\tt help.start()}.\\ \noindent Robust tools has been published: \cite{caussinu+ruiz} and \cite{caussinu+ruiz2}. \bibliographystyle{plain} \bibliography{amap} \end{document} amap/data/0000755000176200001440000000000012444343274012110 5ustar liggesusersamap/data/lubisch.tab.gz0000644000176200001440000000212111655554615014652 0ustar liggesusersYn0 +H"MѮȝMc`hR~| |2.\ַM~ܿ˔og-dXDMJ'R5*BJC+UT^Pp~*Af.bzB5xbVJB+-V͠:d1N#eb 9 j8% H8]0TWO(P,WXpR\ije21Vd0'eHuu-dم&b eA=f -Y9Y,9 =T+BC & |=\à0\%B sE?T&ƜXX*"enAv18%xzY ?ۦUM!ۖ') ,P` n[5LuN޶栈Fv BV6[ܶ048rX·m8(et@'NYnwcXmQX(TDܕ1TܯhR"'|ecVvY1ɲ IܯԲ P;M_r0fiĊ{ ** g)ѐi8k JZPs" rNcU JwGAU+24ӋP /~ؘhtǪ;(:ŲWgEr0!+ t V=0(Iw,8)Z#Z庇ZvS3r5}PÉ]]h"Y [xVrEO wwVWR *:j'!.d[Ĭ\yY\%;(rbǘ򛜝XQ]AݳSk\]IkTWt{J%T6Vt@ =Cuh%L; dVر.n>=nv^ga'n +ݩ,薳9Yd݂(WU&4wj:(=0C\uNuYamap/src/0000755000176200001440000000000014705252655011772 5ustar liggesusersamap/src/hclust.h0000644000176200001440000000156213275645271013452 0ustar liggesusers#ifdef __cplusplus extern "C" { #endif void hclust(int *n,int *len, int *iopt ,int *ia , int *ib,int *iorder,double *crit,double *membr,double *diss, int *result); void hcluster(double *x, int *nr, int *nc, int *diag, int *method, int *iopt ,int *ia , int *ib,int *iorder,double *crit,double *membr,int *nbprocess,int * precision, int * result); #ifdef __cplusplus }; namespace hierclust { /** Hierachical clustering subroutine * \brief compute hierachical clustering from a distance matrix * This routine is called by hclust * \param n number of individuals * \param ia, ib result (merge) * \param iia, iib result (merge) * \param iorder result (order) * * \note this is an adaptation of the fortran function designed from the * R core team. */ void hcass2( int *n, int *ia, int *ib,int *iorder, int *iia, int *iib); }; #endif amap/src/matriceTriangle.h0000644000176200001440000000302713275645271015260 0ustar liggesusers#ifndef AMAP_TRIANGULAR_MATRIX #define AMAP_TRIANGULAR_MATRIX #include "matrice.h" namespace amap { /** * Matrix data. * * a triangular matrix with 4 row and 4 cols * +---+---+---+---+ * | | | | | * +---+---+---+---+ * | 1 | | | | * +---+---+---+---+ * | 2 | 4 | | | * +---+---+---+---+ * | 3 | 5 | 6 | | * +---+---+---+---+ * * matrix is 4x4 but dataSet is of a size 6 (when not diagonal). * dataSet is of size 10 if matrix is diagonal. * */ template class matriceTriangle : public matrice { private: T blankValue; bool isDiagonal; public: /** * Contructor. * \param values_p the data matrix * \param nrows_p the number of rows */ matriceTriangle(T * values_p, int nrow_p, bool isDiagonal_p) : matrice(values_p, nrow_p, nrow_p), isDiagonal(isDiagonal_p) { }; /** * Accessor on data. */ virtual T & operator[] (int index) { blankValue = 0; int i = index % this->getNrow(); int j = index / this->getNrow(); if (i == j && !isDiagonal) { return blankValue; } if (i <= j) { int temp = i; i = j; j = temp; } int maxRowSize = this->getNrow(); if (!isDiagonal) { i--; maxRowSize--; } // // j * *(j + 1) / 2 : nombres de points qui sont dans "le triangle du haut". // int ij = i + j * maxRowSize - j * (j + 1) / 2; return matrice::operator[](ij); }; }; }; #endif amap/src/acprob.h0000644000176200001440000000434513275645271013420 0ustar liggesusers#ifndef ACPROB_H #define ACPROB_H 1 #ifdef __cplusplus extern "C" { #endif /*! noyau: base function for kernel computation * \brief compute a kernel. called by W or VarRob * \param u: input data (scalar) * \param k: char, g, q, t, e, c, u for: * gaussien = (2*pi)^(-1/2) * exp(-u^2/2), * quartic = 15/16 * (1-u^2)^2 * (abs(u)<1), * triweight = 35/32 * (1-u^2)^3 * (abs(u)<1), * epanechikov = 3/4 * (1-u^2) * (abs(u)<1), * cosinus = pi/4 * cos (u*pi/2) * (abs(u)<1), * uniform = 1/2 * (abs(u)<1), * \param res: output data: one value of type double */ void noyau(double *u, char **k,double *res) ; /** W compute a "local" variance This function is called directly by R * \brief this functions compute a "local" variance, using a specific kernel. * This function depends on function mult, norm and noyau * \param x: matrix; input data n x p * \param h: window size (scalar) * \param d: scalar product matrix p x p * \param n: length x * \param p: number of columns of x * \param kernel: kernel utilised * \param res: matrix returned (allocated in R) * * \note * matrix x[i,j] (n x p) is substitute * by vector vecteur x[i + j x n] (i=0..n-1 ; j=0..p-1) * * \param result flag 0 => correct * 1 => Pb * 2 => Cannot allocate memory */ void W(double *x,double *h,double *d,int *n,int *p,char **kernel,double *res, int * result); /** VarRob: compute robust variance. Function called birecly by R * \brief Robust variance... gives a low ponderation to isolated * values. This ponderation is determined with kernel and window size. * This function depends on function mult, norm and noyau * \param x: data matrix n x p * \param h: kernel window size (scalar) * \param d: matrix of scalar product p x p * \param n: lenght of x * \param p: length of x * \param kernel: kernel used * \param res result (matrix) * \param result flag 0 => correct * 1 => Pb * 2 => Cannot allocate memory */ void VarRob(double *x,double *h,double *d,int *n,int *p,char **kernel,double *res, int * result); #ifdef __cplusplus }; #endif #endif amap/src/acprob.cpp0000644000176200001440000001334013275645271013746 0ustar liggesusers/*! \file : acprob.c * * * \brief Robust principal component analysis * * \date Created : 06/11/02 * \date Last Modified : Time-stamp: <2014-12-17 18:55:45 antoine> * * This Message must remain attached to this source code at all times. * This source code may not be redistributed, nor may any executable * based upon this source code. * * \author Antoine Lucas (antoinelucas@libertysurf.fr) * * Please notify me of any changes made to the code * * */ #include #include #include #include "R.h" #ifndef M_PIl #define M_PIl 3.1415926535897932384626433832795029L /* pi */ #endif #include "smartPtr.h" #include "acprob.h" /* Compiliation: */ /* R CMD SHLIB acprob.c */ /* Fonction noyau dans R: */ /* dyn.load(paste("prog/acp/src/acprob", .Platform$dynlib.ext, sep = "")) */ /* .C("noyau",as.double(0.6),as.character('g'),res=double(1)) */ /*! noyau: base function for kernel computation * \brief compute a kernel. called by W or VarRob * \param u: input data (scalar) * \param k: char, g, q, t, e, c, u for: * gaussien = (2*pi)^(-1/2) * exp(-u^2/2), * quartic = 15/16 * (1-u^2)^2 * (abs(u)<1), * triweight = 35/32 * (1-u^2)^3 * (abs(u)<1), * epanechikov = 3/4 * (1-u^2) * (abs(u)<1), * cosinus = pi/4 * cos (u*pi/2) * (abs(u)<1), * uniform = 1/2 * (abs(u)<1), * \param res: output data: one value of type double */ void noyau(double *u, char **k,double *res) { double pi= M_PIl; switch (**k) { case 'g' : *res = pow(2 * pi,-0.5) * exp(-pow(*u ,2)/2) ; break; case 'q' : *res = 15.0/16 * pow(1- pow(*u,2),2) * (fabs(*u)<1); break; case 't' : *res = 35.0/32 * pow(1- pow(*u,2),3) * (fabs(*u)<1); break; case 'e' : *res = 3.0/4 * (1- pow(*u,2)) * (fabs(*u)<1); break; case 'c' : *res = pi/4 *cos(*u * pi/2) * (fabs(*u)<1); break; case 'u' : *res = 1.0/2 * (fabs(*u)<1) ; break; } /* return *res; */ } /* dans R: switch(kernel, gaussien = (2*pi)^(-1/2) * exp(-u^2/2), quartic = 15/16 * (1-u^2)^2 * (abs(u)<1), triweight = 35/32 * (1-u^2)^3 * (abs(u)<1), epanechikov = 3/4 * (1-u^2) * (abs(u)<1), cosinus = pi/4 * cos (u*pi/2) * (abs(u)<1), uniform = 1/2 * (abs(u)<1), */ /** norm * \brief compute norm: sqrt(x'.d.x). called by W or VarRob * \param x: vector of size p * \param p: size of vector x and matrix d * \param d: matrix of size pxp */ double norm(double *x,int *p,double *d) /* * x: vecteur p:1 * d: matrice p x p * On calcule sqrt( x'.d.x ) */ { int i,j; double res=0; for (i=0; i < *p ; i++) for (j=0; j < *p ; j++) res += d[i+ j * *p]* x[i]*x[j]; return sqrt ( res ); } /** mult * \brief multiplication x.x' (return a matrix). called by W or VarRob * \param x: vector of size p * \param p: size of vector x and matrix d * \param res: matrix of result */ void mult(double *x,int *p,double *res) /* * x: vecteur p:1 * On calcule la matrice x.x' */ { int i,j; for (i=0; i < *p ; i++) for (j=0; j < *p ; j++) res[i+ j * *p] = x [i] * x [j] ; } /** W compute a "local" variance This function is called directly by R * \brief this functions compute a "local" variance, using a specific kernel. * This function depends on function mult, norm and noyau * \param x: matrix; input data n x p * \param h: window size (scalar) * \param d: scalar product matrix p x p * \param n: length x * \param p: number of columns of x * \param kernel: kernel utilised * \param res: matrix returned (allocated in R) * * \note * matrix x[i,j] (n x p) is substitute * by vector vecteur x[i + j x n] (i=0..n-1 ; j=0..p-1) * * \param result flag 0 => correct * 1 => Pb * 2 => Cannot allocate memory */ void W(double *x,double *h,double *d,int *n,int *p,char **kernel,double *res, int * result) { SmartPtr delta (*p); SmartPtr temp (*p * *p); double N=0,K=0,som=0; int i,j,k,l; *result = 1; for (l=0; l < (*p * *p) ; l++) res[l]=0; for (i=0; i < (*n-1) ; i++) { for (j=i+1; j < *n ; j++) { /* delta = Xi-Xj (ligne i et j de la matrice X) */ for (k=0; k < *p ; k++) delta[k]=x[i+(k * *n)]- x[j+(k * *n)]; N = norm(delta.get(),p,d)/ *h; /* tmp2 = K ( |delta/h|^2 ) */ noyau(&N,kernel,&K); som += K; /* temp = delta * delta' (matrice) */ mult(delta.get(),p,temp.get()); for (l=0; l < (*p * *p) ; l++) res[l] += temp[l] * K ; } } for (l=0; l < (*p * *p) ; l++) res[l] = res[l] / som ; *result = 0; } /** VarRob: compute robust variance. Function called birecly by R * \brief Robust variance... gives a low ponderation to isolated * values. This ponderation is determined with kernel and window size. * This function depends on function mult, norm and noyau * \param x: data matrix n x p * \param h: kernel window size (scalar) * \param d: matrix of scalar product p x p * \param n: lenght of x * \param p: length of x * \param kernel: kernel used * \param res result (matrix) * \param result flag 0 => correct * 1 => Pb * 2 => Cannot allocate memory */ void VarRob(double *x,double *h,double *d,int *n,int *p,char **kernel,double *res, int * result) { int i,j; SmartPtr temp (*p * *p); SmartPtr Xi (*p); double N=0,K=0,som=0; *result = 1; som = 0; for (i=0; i < *n ; i++) { for (j=0; j < *p ; j++) Xi[j]=x[i+(j * *n)]; N = norm(Xi.get(),p,d)/ *h; noyau(&N,kernel,&K); mult(Xi.get(),p,temp.get()); for (j=0; j < (*p * *p) ; j++) res[j] += temp[j] * K ; som += K; } for (j=0; j < (*p * *p) ; j++) res[j] = res[j] / som ; *result = 0; } amap/src/diss.c0000644000176200001440000000136613275645271013107 0ustar liggesusers #include /** \fn diss compute a dissimilarity matrix * \brief diss(i,j) = number of values in individual i that * are equal to values in individual j * - number of values that are different */ void diss (int * data, double * res, int * n, int * p,double * w) { int k,i,j=0; for(i=0; i< (*n); i++) { /* follow lines: case when i=j */ j = i; res[i + (j * (*n))] = *p; for(j=i+1; j < (*n); j++) { res[j + (i * (*n))] = 0; for(k=0; k< (*p);k++) { if(data[i + (k * (*n))] == data[j + (k * (*n))]) (res[j + (i * (*n))]) +=w[k] ; else (res[j + (i * (*n))]) -=w[k] ; } res[i + (j * (*n))] = res[j + (i * (*n))] ; } } } amap/src/distance_T.h.h0000644000176200001440000006040214704670505014445 0ustar liggesusers/*! \file distance.c * \brief all functions requiered for R dist function and C hcluster function. * * \date Created: probably in 1995 * \date Last modified: Time-stamp: <2022-10-25 22:01:11 (antoine)> * * \author R core members, and lately: Antoine Lucas * * * R : A Computer Language for Statistical Data Analysis * Copyright (C) 1995, 1996 Robert Gentleman and Ross Ihaka * Copyright (C) 1998, 2001 Robert Gentleman, Ross Ihaka and the * R Development Core Team * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * */ #define _AMAP_DISTANCE_TEMPLATE_CPP 1 #ifdef HAVE_CONFIG_H #include #endif #include "distance_T.h" #include "matriceTriangle.h" #include "distance.h" #include #include #include #include #include #include #include #ifndef WIN32 #include #endif #define MAX( A , B ) ( ( A ) > ( B ) ? ( A ) : ( B ) ) #define MIN( A , B ) ( ( A ) < ( B ) ? ( A ) : ( B ) ) namespace amap { // --------------------------------------------------------- // Distance euclidean (i.e. sqrt(sum of square) ) // // Euclidean distance between 2 vectors a,b is // d = sqrt[ sum_i (a_i - b_i)^2 ] // // This function compute distance between 2 vectors x[i1,] & y[i2,] // x and y are matrix; we use here only line i1 from x and // line i2 from y. Number of column (nc) is the same in x and y, // number of column can differ (nr_x, nr_y). // // Flag will be set to 0 if NA value computed in distance // // When call by function distance or hclust, x and y are the same; it computes // distance between vector x[i1,] and x[i2,] // // \param x matrix of size nr_x * nc; line i1 is of interest // \param y matrix of size nr_y * nc; line i1 is of interest // \param nr_x number of row in matrix x // \param nr_y number of row in matrix y // \param nc number of column in matrix x or y // \param i1 row choosen in matrix x // \param i2 row choosen in matrix y // \param flag set to 0 if NA value computed in distance // \param opt: unused // // Return: distance value // // --------------------------------------------------------- template T distance_T::R_euclidean(vecteur & x, vecteur & y , int * flag, T_tri & opt) { T dev, dist; int count, i; count= 0; dist = 0; for(i = 0 ; i < x.size() && i < y.size() ; i++) { if(R_FINITE(x[i]) && R_FINITE(y[i])) { dev = (x[i] - y[i]); dist += dev * dev; count++; } } if(count == 0) // NA for all j: { *flag = 0; return NA_REAL; } if(count != x.size()) dist /= ((T)count/x.size()); return sqrt(dist); } // --------------------------------------------------------- // // Distance maximum (supremum norm) // // Maximum distance between 2 vectors a,b is // d = max |ai - bi | // // This function compute distance between 2 vectors x[i1,] & y[i2,] // x and y are matrix; we use here only line i1 from x and // line i2 from y. Number of column (nc) is the same in x and y, // number of column can differ (nr_x, nr_y). // // Flag will be set to 0 if NA value computed in distance // // When call by function distance or hclust, x and y are the same; it computes // distance between vector x[i1,] and x[i2,] // // \param x matrix of size nr_x * nc; line i1 is of interest // \param y matrix of size nr_y * nc; line i1 is of interest // \param nr_x number of row in matrix x // \param nr_y number of row in matrix y // \param nc number of column in matrix x or y // \param i1 row choosen in matrix x // \param i2 row choosen in matrix y // \param flag set to 0 if NA value computed in distance // \param opt: unused // // Return: distance value // // --------------------------------------------------------- template T distance_T::R_maximum(vecteur & x, vecteur & y , int * flag, T_tri & opt) { T dev, dist; int count, j; count = 0; dist = std::numeric_limits::min(); for(j = 0 ; j < x.size() && j < y.size() ; j++) { if(R_FINITE(x[j]) && R_FINITE(y[j])) { dev = fabs(x[j] - y[j]); if(dev > dist) dist = dev; count++; } } if(count == 0) { *flag = 0; return NA_REAL; } return dist; } // --------------------------------------------------------- // Distance manhattan (i.e. sum of abs difference ) // // manhattan distance between 2 vectors a,b is // d = sum_i | a_i - b_i | // // This function compute distance between 2 vectors x[i1,] & y[i2,] // x and y are matrix; we use here only line i1 from x and // line i2 from y. Number of column (nc) is the same in x and y, // number of column can differ (nr_x, nr_y). // // Flag will be set to 0 if NA value computed in distance // // When call by function distance or hclust, x and y are the same; it computes // distance between vector x[i1,] and x[i2,] // // \param x matrix of size nr_x * nc; line i1 is of interest // \param y matrix of size nr_y * nc; line i1 is of interest // \param nr_x number of row in matrix x // \param nr_y number of row in matrix y // \param nc number of column in matrix x or y // \param i1 row choosen in matrix x // \param i2 row choosen in matrix y // \param flag set to 0 if NA value computed in distance // \param opt: unused // // Return: distance value // // --------------------------------------------------------- template T distance_T::R_manhattan(vecteur & x, vecteur & y , int * flag, T_tri & opt) { T dist; int count, j; count = 0; dist = 0; for(j = 0 ; j < x.size() ; j++) { if(R_FINITE(x[j]) && R_FINITE(y[j])) { dist += fabs(x[j] - y[j]); count++; } } if(count == 0) { *flag = 0; return NA_REAL; } if(count != x.size()) dist /= ((T)count/x.size()); return dist; } // --------------------------------------------------------- // Distance Camberra // // Camberra distance between 2 vectors a,b is // d = sum_i | a_i - b_i | / | a_i + b_i | // // This function compute distance between 2 vectors x[i1,] & y[i2,] // x and y are matrix; we use here only line i1 from x and // line i2 from y. Number of column (nc) is the same in x and y, // number of column can differ (nr_x, nr_y). // // Flag will be set to 0 if NA value computed in distance // // When call by function distance or hclust, x and y are the same; it computes // distance between vector x[i1,] and x[i2,] // // \param x matrix of size nr_x * nc; line i1 is of interest // \param y matrix of size nr_y * nc; line i1 is of interest // \param nr_x number of row in matrix x // \param nr_y number of row in matrix y // \param nc number of column in matrix x or y // \param i1 row choosen in matrix x // \param i2 row choosen in matrix y // \param flag set to 0 if NA value computed in distance // \param opt: unused // // Return: distance value // // --------------------------------------------------------- template T distance_T::R_canberra(vecteur & x, vecteur & y , int * flag, T_tri & opt) { T dist, sum, diff; int count, j; count = 0; dist = 0; for(j = 0 ; j < x.size() && j < y.size() ; j++) { if(R_FINITE(x[j]) && R_FINITE(y[j])) { sum = fabs(x[j] + y[j]); diff = fabs(x[j] - y[j]); if (sum > DBL_MIN || diff > DBL_MIN) { dist += diff/sum; count++; } } } if(count == 0) { *flag = 0; return NA_REAL; } if(count != x.size()) dist /= ((T)count/x.size()); return dist; } /** \brief Distance binary */ template T distance_T::R_dist_binary(vecteur & x, vecteur & y , int * flag, T_tri & opt) { int total, count, dist; int j; total = 0; count = 0; dist = 0; for(j = 0 ; j < x.size() && j < y.size() ; j++) { if(R_FINITE(x[j]) && R_FINITE(y[j])) { if(x[j] || y[j]){ count++; if( ! (x[j] && y[j]) ) dist++; } total++; } } if(total == 0) { *flag = 0; return NA_REAL; } if(count == 0) return 0; return (T) dist / count; } /** \brief Pearson / Pearson centered (correlation) * \note Added by A. Lucas */ template T distance_T::R_pearson(vecteur & x, vecteur & y , int * flag, T_tri & opt) { T num,sum1,sum2, dist; int count,j; count= 0; num = 0; sum1 = 0; sum2 = 0; for(j = 0 ; j < x.size() && j < y.size() ; j++) { if(R_FINITE(x[j]) && R_FINITE(y[j])) { num += (x[j] * y[j]); sum1 += (x[j] * x[j]); sum2 += (y[j] * y[j]); count++; } } if(count == 0) { *flag = 0; return NA_REAL; } dist = 1 - ( num / sqrt(sum1 * sum2) ); return dist; } /** \brief Absoulute Pearson / Pearson uncentered (correlation) * \note Added by L. Cerulo */ template T distance_T::R_abspearson(vecteur & x, vecteur & y , int * flag, T_tri & opt) { T num,sum1,sum2, dist; int count,j; count= 0; num = 0; sum1 = 0; sum2 = 0; for(j = 0 ; j < x.size() && j < y.size() ; j++) { if(R_FINITE(x[j]) && R_FINITE(y[j])) { num += (x[j] * y[j]); sum1 += (x[j] * x[j]); sum2 += (y[j] * y[j]); count++; } } if(count == 0) { *flag = 0; return NA_REAL; } dist = ( num / sqrt(sum1 * sum2) ); if (dist<0) { dist*=-1; } return (1-dist); } /** \brief Distance correlation (Uncentered Pearson) * \note Added by A. Lucas */ template T distance_T::R_correlation(vecteur & x, vecteur & y , int * flag, T_tri & opt) { T num,denum2,sumx,sumy,sumxx,sumyy,sumxy; int count,j; count= 0; sumx=0; sumy=0; sumxx=0; sumyy=0; sumxy=0; for(j = 0 ; j < x.size() && j < y.size() ; j++) { if(R_FINITE(x[j]) && R_FINITE(y[j])) { sumxy += (x[j] * y[j]); sumx += x[j]; sumy += y[j]; sumxx += x[j] * x[j]; sumyy += y[j] * y[j]; count++; } } if(count == 0) { *flag = 0; return NA_REAL; } num = sumxy - ( sumx*sumy /count ); denum2 = (sumxx - (sumx*sumx /count ) )* (sumyy - (sumy*sumy /count ) ) ; if(denum2 <=0) { return 0; } return 1 - (num / sqrt(denum2)); } /** \brief Absolute Distance correlation (Uncentered Pearson) * \note Added by L. Cerulo */ template T distance_T::R_abscorrelation(vecteur & x, vecteur & y , int * flag, T_tri & opt) { T num,denum,sumx,sumy,sumxx,sumyy,sumxy,dist,term; int count,j; count= 0; sumx=0; sumy=0; sumxx=0; sumyy=0; sumxy=0; for(j = 0 ; j < x.size() && j < y.size() ; j++) { if(R_FINITE(x[j]) && R_FINITE(y[j])) { sumxy += (x[j] * y[j]); sumx += x[j]; sumy += y[j]; sumxx += x[j] * x[j]; sumyy += y[j] * y[j]; count++; } } if(count == 0) { *flag = 0; return NA_REAL; } num = sumxy - ( sumx*sumy /count ); term=(sumxx - (sumx*sumx /count ) )* (sumyy - (sumy*sumy /count ) ); if (term<=0) return 1; denum = sqrt( term ); dist=num/denum; if (dist<0) { dist*=-1; } return (1-dist); } // --------------------------------------------------------- // Distance Spearman // // Spearman distance between 2 vectors a,b is // d = sum_i (rank(a_i) - rank(b_i) )^2 // // If one NA found: return NA // // This function compute distance between 2 vectors x[i1,] & y[i2,] // x and y are matrix; we use here only line i1 from x and // line i2 from y. Number of column (nc) is the same in x and y, // number of column can differ (nr_x, nr_y). // // Flag will be set to 0 if NA value computed in distance // // When call by function distance or hclust, x and y are the same; it computes // distance between vector x[i1,] and x[i2,] // // \param x matrix of size nr_x * nc; line i1 is of interest // \param y matrix of size nr_y * nc; line i1 is of interest // \param nr_x number of row in matrix x // \param nr_y number of row in matrix y // \param nc number of column in matrix x or y // \param i1 row choosen in matrix x // \param i2 row choosen in matrix y // \param flag set to 0 if NA value computed in distance // \param opt: a set of 6 vectors of size nc, allocated but uninitialised. // aim of this parameter is to avoid several vector allocation // // Return: distance value // // --------------------------------------------------------- template T distance_T::R_spearman(vecteur & x, vecteur & y , int * flag, T_tri & opt) { int j; double * data_tri_x = opt.data_tri_x.get(); int * order_tri_x = opt.order_tri_x.get(); int * rank_tri_x = opt.rank_tri_x.get(); double * data_tri_y = opt.data_tri_y.get(); int * order_tri_y = opt.order_tri_y.get(); int * rank_tri_y = opt.rank_tri_y.get(); int n; T diffrang=0; for(j = 0 ; j < x.size() && j < y.size() ; j++) { if(!(R_FINITE(x[j]) && R_FINITE(y[j]))) { *flag = 0; return NA_REAL; } order_tri_x[j] = rank_tri_x[j] = order_tri_y[j] = rank_tri_y[j] = j; data_tri_x[j] = x[j]; data_tri_y[j] = y[j]; } n = x.size(); /* sort and compute rank */ /* First list */ rsort_rank_order(data_tri_x, order_tri_x,rank_tri_x, &n); /* Second list */ rsort_rank_order(data_tri_y, order_tri_y,rank_tri_y, &n); for(j=0;j< x.size();j++) { diffrang += pow((T) ( rank_tri_x[j] - rank_tri_y[j]),2); } return( diffrang ); /* * verification in R: * Dist(x,method='spearman') ; n =dim(x)[2] * l=c(x[3,],x[4,]); sum((rank(l[1:n])-rank(l[(n+1):(2*n)]))^2) * cor.test(x[3,],x[4,],method="spearm")$statistic */ } /** \brief Kendall distance (rank base metric) * 1 - corr_kendall(x,y) * * \note Added by A. Lucas * template T distance_T::R_kendall_corr(double * x, double * y , int nr_x, int nr_y, int nc, int i1, int i2, int * flag, T_tri & opt) { int j,k; double * data_tri_x = opt.data_tri_x; int * order_tri_x = opt.order_tri_x; int * rank_tri_x = opt.rank_tri_x; double * data_tri_y = opt.data_tri_y; int * order_tri_y = opt.order_tri_y; int * rank_tri_y = opt.rank_tri_y; int n; T dist,P=0; for(j = 0 ; j < nc ; j++) { if(!(R_FINITE(x[i1]) && R_FINITE(y[i2]))) { *flag = 0; return NA_REAL; } order_tri_x[j] = rank_tri_x[j] = order_tri_y[j] = rank_tri_y[j] = j; data_tri_x[j] = x[i1]; data_tri_y[j] = y[i2]; i1 += nr_x; i2 += nr_y; } n = nc; // sort and compute rank // First list rsort_rank_order(data_tri_x, order_tri_x,rank_tri_x, &n); // Second list rsort_rank_order(data_tri_y, order_tri_y,rank_tri_y, &n); for(j=0;j T distance_T::R_kendall(vecteur & x, vecteur& y , int * flag, T_tri & opt) { int j,k; double * data_tri_x = opt.data_tri_x.get(); int * order_tri_x = opt.order_tri_x.get(); int * rank_tri_x = opt.rank_tri_x.get(); double * data_tri_y = opt.data_tri_y.get(); int * order_tri_y = opt.order_tri_y.get(); int * rank_tri_y = opt.rank_tri_y.get(); int n; T dist,P=0; bool ordre_x,ordre_y; for(j = 0 ; j < x.size() && j < y.size() ; j++) { if(!(R_FINITE(x[j]) && R_FINITE(y[j]))) { *flag = 0; return NA_REAL; } order_tri_x[j] = rank_tri_x[j] = order_tri_y[j] = rank_tri_y[j] = j; data_tri_x[j] = x[j]; data_tri_y[j] = y[j]; } n = x.size(); /* sort and compute rank */ /* First list */ rsort_rank_order(data_tri_x, order_tri_x,rank_tri_x, &n); /* Second list */ rsort_rank_order(data_tri_y, order_tri_y,rank_tri_y, &n); for(j=0;j void distance_T::distance(double *x, int *nr, int *nc, T *d, int *diag, int *method,int *nbprocess, int * ierr,int i2) { int i; T_argument * arguments; bool dc = (*diag) ? 0 : 1; /* diag=1: we do the diagonal */ /* * Arguments sent to thread (adress): * number of thread * nr * nc * dc * *x * *d * *method * *ierr */ arguments = (T_argument * ) malloc ((*nbprocess) * sizeof( T_argument )); //printf("nb processs %d\n",*nbprocess); for(i=0; i< *nbprocess; ++i) { arguments[i].id =i; arguments[i].x=x; arguments[i].nr = nr; arguments[i].nc = nc; arguments[i].dc = dc; arguments[i].d = d; arguments[i].method = method; arguments[i].nbprocess= *nbprocess; arguments[i].ierr=ierr; arguments[i].i2=i2; } *ierr = 1; /* res = 1 => no missing values res = 0 => missings values */ #ifndef WIN32 pthread_t * th = (pthread_t *) malloc ( *nbprocess * sizeof(pthread_t)); for (i=0; i < *nbprocess ; i++) { pthread_create(th+i,0,distance_T::thread_dist,(void *)(arguments+i)); } /* Attends la fin */ for (i=0; i < *nbprocess ; i++) { pthread_join(*(th+i),NULL); } free( th); #else // p_thread not yet used on windows. arguments[0].nbprocess = 1; arguments[0].i2 = i2; thread_dist((void *)arguments); #endif free( arguments ); } //template T distance_T::distance(double * x, double * y , int nr_x, int nr_y, int nc, // // get the distance function // template void distance_T::getDistfunction(int method,distfunction& distfun) { // T (*distfun)(double*,double*,int, int, int, int, int, int *, T_tri &) = NULL; switch(method) { case EUCLIDEAN: distfun = R_euclidean; break; case MAXIMUM: distfun = R_maximum; break; case MANHATTAN: distfun = R_manhattan; break; case CANBERRA: distfun = R_canberra; break; case BINARY: distfun = R_dist_binary; break; case PEARSON: distfun = R_pearson; break; case CORRELATION: distfun = R_correlation; break; case SPEARMAN: distfun = R_spearman; break; case KENDALL: distfun = R_kendall; break; case ABSPEARSON: distfun = R_abspearson; break; case ABSCORRELATION: distfun = R_abscorrelation; break; default: { Rf_error("distance(): invalid distance"); distfun = R_euclidean; } } } /** thread_dist function that compute distance. * */ template void* distance_T::thread_dist(void* arguments_void) { int nbprocess,nr,nc,i,j,dc; T_argument * arguments = static_cast(arguments_void); T * d; double * x; int * method; int * ierr; /* for spearman dist */ T_tri opt ; distfunction distfun; short int no = arguments[0].id; nr = *arguments[0].nr; nc = *arguments[0].nc; dc = arguments[0].dc; x = arguments[0].x; d = arguments[0].d; method = arguments[0].method; nbprocess = arguments[0].nbprocess; ierr = arguments[0].ierr; int i2 = arguments[0].i2; matrice myMatrice (x, nr, nc); matriceTriangle distMatrice(d, nr, false); getDistfunction(*method,distfun); if( (*method == SPEARMAN) || (*method == KENDALL)) { opt.reset(nc); } /* debut = ((nr+1) / nbprocess + 1 ) * no ; fin = min ( ((nr+1) / nbprocess + 1) * ( no + 1 ) , (nr+1)); */ /* debut des boucles 0 fin: nr+1 */ int debut = (int) floor( ((nr+1.)*nbprocess - sqrt( (nr+1.)*(nr+1.) * nbprocess * nbprocess - (nr+1.)*(nr+1.) * nbprocess * no ) )/nbprocess); int fin = (int) floor(((nr+1.)*nbprocess - sqrt( (nr+1.)*(nr+1.) * nbprocess * nbprocess - (nr+1.)*(nr+1.) * nbprocess * (no+1.) ) )/nbprocess); if (fin > nr) { fin = nr; } //printf("Thread %d debut %d fin %d i2=%d met=%d\n",no,debut,fin,i2,*method); // here: the computation ! // for(j = 0 ; j <= nr ; j++) if (i2==-1) /* compute all distance matrix*/ { for(j = debut ; j < fin ; j++) { vecteur distRow = distMatrice.getRow(j); vecteur rowJ = myMatrice.getRow(j); //ij = (2 * (nr-dc) - j +1) * (j) /2 ; for(i = j+dc ; i < nr ; i++) { vecteur rowI = myMatrice.getRow(i); distRow[i] = distfun(rowI, rowJ ,ierr,opt); } } } else { /* updates the distance only for i2*/ // a row of distance matrix vecteur distRow = distMatrice.getRow(i2); vecteur rowI = myMatrice.getRow(i2); for(j = debut ; j < fin ; j++) { if (i2!=j) { vecteur rowJ = myMatrice.getRow(j); distRow[j] = distfun(rowI, rowJ,ierr,opt); //printf("updated dist %d %d %f\n",i2,j,d[ind1]); } } } return (void*)0; } // --------------------------------------------------------- // // R_distance_kms: compute distance between individual i1 and // centroid i2 // // compute distance and call one of function R_euclidean or R_... // This function is called by kmeans_Lloyd2 // // \param x input matrix (individuals) // \param y input matrix (centroids) // \param nr1,nr2,nc number of row (nr1:x, nr2:y) and columns // nr individuals with nc values. // \param i1, i2: indice of individuals (individual i1, centroid i2) // \param method 1, 2,... method used // \param ierr for NA 0 if no value can be comuted due to NA // \param opt optional parameter send to spearman dist. // // --------------------------------------------------------- template T distance_T::distance_kms(vecteur & x, vecteur & y , int *method, int * ierr, T_tri & opt) { /* * compute distance x[i1,*] - y[i2,*] * x matrix n x p * y matrix m x p * nr1 = n; nr2 = m; nc =p */ T res; distfunction distfun; getDistfunction(*method,distfun); // here: distance computation res = distfun(x,y,ierr, opt); return( res); } }; amap/src/matrice.cpp0000644000176200001440000000161613275645271014127 0ustar liggesusers#include "matrice.h" #include "matriceTriangle.h" #include extern "C" { /** * purpose of this function is to display a matrix, for unitary testing. */ void checkMatrix(double * values, int * nrow, int * ncol) { amap::matrice myMatrix(values, *nrow, *ncol); Rprintf("\n"); for (int i = 0 ; i < *nrow; i++) { amap::vecteur row = myMatrix.getRow(i); for (int j = 0 ; j < row.size(); j++) { Rprintf("%f\t", row[j]); } Rprintf("\n"); } } void checkMatrixTriangle(double * values, int * nrow, int * isDiagonal) { amap::matriceTriangle myMatrix(values, *nrow, *isDiagonal); Rprintf("\n"); for (int i = 0 ; i < *nrow; i++) { amap::vecteur row = myMatrix.getRow(i); for (int j = 0 ; j < row.size(); j++) { Rprintf("%f\t", row[j]); } Rprintf("\n"); } } }; amap/src/init.c0000644000176200001440000000506613275645271013111 0ustar liggesusers#include #include #include #include #include "distance.h" #include "acprob.h" #include "hclust.h" #include "kmeans.h" void diss (int * data, double * res, int * n, int * p,double * w); void matind(int * nblev,int * data,int * res, int * n, int * m,int * k); void checkMatrix(double * values, int * nrow, int * ncol); void checkMatrixTriangle(double * values, int * nrow, int * isDiagonal) ; /* static const R_CMethodDef cMethods[] = { {"R_distance", (DL_FUNC) &R_distance, 8,{REALSXP,INTSXP, INTSXP,REALSXP,INTSXP, INTSXP,INTSXP, INTSXP}}, {"rsort_rank_order", (DL_FUNC) &rsort_rank_order, 4,{REALSXP,INTSXP, INTSXP,INTSXP}}, {"noyau", (DL_FUNC) &noyau, 3, {REALSXP, STRSXP, REALSXP}}, {"W", (DL_FUNC) &W, 8, {REALSXP,REALSXP,REALSXP,INTSXP, INTSXP,STRSXP,REALSXP,INTSXP}}, {"VarRob", (DL_FUNC) &VarRob, 8, {REALSXP,REALSXP,REALSXP,INTSXP, INTSXP,STRSXP,REALSXP,INTSXP}}, {"hclust", (DL_FUNC)&hclust, 10, {INTSXP, INTSXP, INTSXP,INTSXP, INTSXP, INTSXP ,REALSXP ,REALSXP ,REALSXP,INTSXP}}, {"hcluster",(DL_FUNC) &hcluster, 14,{REALSXP,INTSXP, INTSXP, INTSXP,INTSXP, INTSXP, INTSXP,INTSXP, INTSXP,REALSXP ,REALSXP, INTSXP,INTSXP, INTSXP}}, {"kmeans_Lloyd2",(DL_FUNC) &kmeans_Lloyd2,10, {REALSXP, INTSXP, INTSXP,REALSXP, INTSXP, INTSXP, INTSXP, INTSXP,REALSXP, INTSXP}}, {NULL, NULL, 0 } };*/ static const R_CMethodDef cMethods[] = { {"matind", (DL_FUNC) &matind,6}, {"diss", (DL_FUNC) &diss, 5}, {"checkMatrixTriangle", (DL_FUNC)&checkMatrixTriangle, 3}, {"checkMatrix", (DL_FUNC)&checkMatrix, 3}, {"R_distance", (DL_FUNC) &R_distance, 8}, {"rsort_rank_order", (DL_FUNC) &rsort_rank_order, 4}, {"noyau", (DL_FUNC) &noyau, 3}, {"W", (DL_FUNC) &W, 8}, {"VarRob", (DL_FUNC) &VarRob, 8}, {"hclust", (DL_FUNC) &hclust, 10}, {"hcluster", (DL_FUNC) &hcluster, 14}, {"kmeans_Lloyd2", (DL_FUNC) &kmeans_Lloyd2,10}, {NULL, NULL, 0 } }; void F77_NAME(pnkfmb)(int *fmbvr, int *triabs, int *allsol, int *n, double *couts, int * yasve, int * y, int * renum, double* bornth, int * nbcl0, double* z0,int * nbcl, double* z, int * nbemp, int * nbdep, int * nbsol, int * nazp); static const R_FortranMethodDef fMethods[] = { {"pnkfmb", (DL_FUNC) &F77_NAME(pnkfmb), 17}, {NULL, NULL, 0} }; void attribute_visible R_init_amap(DllInfo *info) { R_registerRoutines(info, cMethods,NULL, fMethods, NULL); R_useDynamicSymbols(info, FALSE); R_forceSymbols(info, TRUE); } amap/src/burt.c0000644000176200001440000000247113275645271013117 0ustar liggesusers /** matind * \brief create indicatrices instead of factors. * * \param nblev number of levels for each variables (size 1xm) * \param data: input data n individuals with m variables * \param res: return matrix, indicatrics of size nxsum(nblev) * \param n,m size of matrix data. * * color size color.blue color.red size.large size.medium size.small blue large 1 0 1 0 0 red large 0 1 1 0 0 red small => 0 1 0 0 1 blue medium 1 0 0 1 0 red large 0 1 1 0 0 nblev = 2,3 x = 1 1 2 1 2 2 1 3 2 1 */ void matind(int * nblev,int * data,int * res, int * n, int * m,int * k) { int i,j,curr_col_in_res=0; /* for all variables ... */ for(j = 0; j< *m; j++) { /* for all individuals ... */ for(i = 0; i< *n; i++) { if((i + (*n)*(curr_col_in_res+ data[i+ (*n)*j] -1)) >= 0) { /* res[i,(curr_col_in_res+ data[i,j] -1)] ++;*/ res[i + (*n)*(curr_col_in_res+ data[i+ (*n)*j] -1)] ++; } } curr_col_in_res = curr_col_in_res + nblev[j]; } } amap/src/pop.f0000644000176200001440000006207514326566167012756 0ustar liggesusersC======================================================================C C C C CLASSIFICATION ASCENDANTE HIERARCHIQUE C C -------------------------------------- C C C C EN ENTREE : C C N : DIMENSION DE SNN ET DE Y C C SNN ( N , N ) : MATRICE DES SIMILARITES SYMETRIQUE C C ( MODIFIEE , PUIS RESTAUREE ) C C C C EN SORTIE : C C Y ( N , N ) : PARTITION DES INDIVIDUS C C K : NOMBRE DE CLASSES ( 1 A N ) C C Z : DEMI-COUT DE LA PARTITION C C BORNTH : BORNE SUPERIEURE THEORIQUE DE Z C C C C ATTENTION : C C LA DIAGONALE DE Y CONTIENDRA LES NUMEROS DES CLASSES C C LA DIAGONALE DE SNN N'INTERVIENT JAMAIS DANS LE COUT C C C C======================================================================C C SUBROUTINE PNKCAH ( N , SNN , Y , K , Z , BORNTH ) C IMPLICIT INTEGER ( A - Z ) C C REAL SNN ( N , N ) , DSUP , DIJ , Z , BORNTH DOUBLE PRECISION SNN ( * ) , DSUP , DIJ , Z , BORNTH C C INTEGER Y ( N , N ) INTEGER Y ( * ) C C C INITIALISATION DE LA PARTITION : 1 CLASSE PAR INDIVIDU C CHAQUE CLASSE EST NUMEROTEE DE 1 A N SUR LA DIAGONALE . C ------------------------------------------------------ C DO I = 1 , N DO J = 1 , I-1 Y ( I + (J-1)*N ) = 0 END DO Y ( I + (I-1)*N ) = I END DO C K = N C C C CLASSIFICATION ASCENDANTE HIERARCHIQUE ( COUT < (N*N*N-N)/6 ) C REGROUPEMENT DES 2 CLASSES AYANT LA PLUS GRANDE SIMILARITE ; C ON S'ARRETE LORSQUE TOUTES LES SIMILARITES SNN(I,J) SONT < 0. C -------------------------------------------------------------- C 20 DSUP = -1. I1 = 0 I2 = 0 C DO I = 1 , N C C ON N'EXAMINE QUE LES CLASSES "ACTIVES" : LES AUTRES C LIGNES CORRESPONDENT AUX CLASSES DEJA REGROUPEES C IF ( Y(I+(I-1)*N) .GT. 0 ) THEN C DO J = I+1 , N C C ON N'EXAMINE QUE LES CLASSES "ACTIVES" : LES AUTRES C COLONNES CORRESPONDENT AUX CLASSES DEJA REGROUPEES C IF ( Y(J+(J-1)*N) .GT. 0 ) THEN C DIJ = SNN ( I + (J-1)*N ) C IF ( DIJ.GE.0. .AND. DIJ.GT.DSUP ) THEN C --------- -- C ON REGROUPE EGALEMENT SI DIJ = 0. C I1 = I I2 = J DSUP = DIJ C ENDIF C ENDIF C END DO C ENDIF C END DO C C C TOUTES LES SIMILARITES SONT NEGATIVES : FIN DE L'ALGORITHME C LES NUMEROS DES CLASSES SUR LA DIAGONALE SERONT CONSECUTIFS C ( AVEC CALCUL DU DEMI-COUT HORS DIAGONALE DE LA PARTITION ) C ( MISE A JOUR DES TRIANGULAIRES SUPERIEURES DE Y ET SNN ) C ----------------------------------------------------------- C IF ( I1 .EQ. 0 ) THEN C DO I = 1 , N II = I + (I-1)*N Y(II) = IABS ( Y(II) ) END DO C K0 = 1 DO NCL = 1 , N EFF = 0 DO I = 1 , N II = I + (I-1)*N IF ( Y(II) .EQ. NCL ) THEN Y(II) = K0 EFF = EFF + 1 ENDIF END DO IF ( EFF .NE. 0 ) K0 = K0 + 1 END DO C Z = 0. BORNTH = 0. DO I = 1 , N DO J = 1 , I-1 IJ = I + (J-1) * N JI = J + (I-1) * N SNN (JI) = SNN (IJ) Y (JI) = Y (IJ) Z = Z + Y(IJ) * SNN(IJ) IF ( SNN(IJ) .GT. 0. ) BORNTH = BORNTH + SNN(IJ) END DO END DO C RETURN C ENDIF C C C I1 ET I2 ( I2 > I1 ) SONT LES PLUS PROCHES : ON LES REGROUPE C ------------------------------------------------------------ C LE NOMBRE DE CLASSES DIMINUE DE 1 . C C LE NUMERO DE LA CLASSE DE I1 EST AFFECTE EN NEGATIF A I2 , C AINSI QU'AUX INDIVIDUS DE LA CLASSE DE I2 . C C L'ELEMENT ( I1 , I2 ) DE LA MATRICE DE PARTITION VAUT 1 , C AINSI QUE LES ELEMENTS ( E , I1 ) ET ( E , I2 ) TELS QUE C E SOIT CLASSE , SOIT AVEC I1 , SOIT AVEC I2 . C C K = K - 1 C NEWCLA = - Y ( I1 + (I1-1)*N ) ANCCLA = Y ( I2 + (I2-1)*N ) C DO I = 1 , N II = I + (I-1)*N IF ( Y(II) .EQ. ANCCLA .OR. . Y(II) .EQ. -ANCCLA ) Y(II) = NEWCLA END DO C DO I = 1 , N II = I + (I-1)*N IF ( Y(II) .EQ. NEWCLA .OR. . Y(II) .EQ. -NEWCLA ) THEN DO J = 1 , I-1 JJ = J + (J-1)*N IF ( Y(JJ) .EQ. NEWCLA .OR. . Y(JJ) .EQ. -NEWCLA ) Y ( I + (J-1)*N ) = 1 END DO ENDIF END DO C C C LES SIMILARITES DES AUTRES GROUPES AVEC I1 SONT RECALCULEES C DANS CE CAS PARTICULIER , LES SIMILARITES VERIFIENT : C SNN ( E , I1 U I2 ) = SNN ( E , I1 ) + SNN ( E , I2 ) C ----------------------------------------------------------- C DO E = 1 , I1-1 SNN ( E+(I1-1)*N ) = SNN ( E+(I1-1)*N ) + SNN ( E+(I2-1)*N ) END DO C DO E = I1+1 , I2-1 SNN ( I1+(E-1)*N ) = SNN ( I1+(E-1)*N ) + SNN ( E+(I2-1)*N ) END DO C DO E = I2+1 , N SNN ( I1+(E-1)*N ) = SNN ( I1+(E-1)*N ) + SNN ( I2+(E-1)*N ) END DO C C C ON RELANCE L'ALGORITHME C ----------------------- GOTO 20 C END C======================================================================C C C C ALGORITHME DE CLASSIFICATION : FAURE ET MALGRANGE BOOLEEN C C --------------------------------------------------------- C C C C EN ENTREE : C C UECR : UNITE D'ECRITURE DES RESULTATS C C FMBVR : = .TRUE. POUR LA SOLUTION EXACTE C C = .FALSE. POUR S'ARRETER A LA CAH C C TRIABS : = .TRUE. : TRI INITIAL VAL. ABSOLUE C C = .FALSE. : TRI INITIAL ALGEBRIQUE C C ALLSOL : = .TRUE. POUR TOUTES LES SOLUTIONS C C = .FALSE. POUR UNE SEULE SOLUTION C C N : NOMBRE D'INDIVIDUS C C COUTS (N,N) : MATRICE DES COUTS ( SIGNES ) C C C C EN SORTIE : C C YSAVE (N,N) : SAUVEGARDE DE LA SOLUTION C C Y (N,N) : MATRICE DE PARTITION FINALE C C RENUM (N,N) : ADRESSE DES COUTS DES VARIABLES C C BORNTH : MAJORANT DU COUT DES PARTITIONS C C NBCL0 : NOMBRE DE CLASSES INITIAL C C Z0 : COUT DE LA PARTITION INITIALE C C NBCL : NOMBRE DE CLASSES FINAL C C Z : COUT DE LA PARTITION FINALE C C NBEMP : NOMBRE D'EMPILEMENTS C C NBDEP : NOMBRE DE DEPILEMENTS C C NBSOL : NOMBRE DE SOLUTIONS OPTIMALES C C SAUVEGARDEES APRES LA CAH C C C C ATTENTION : C C LA TRIANGULAIRE INFERIEURE DE RENUM CONTIENT LES C C ADRESSES DES COUTS DES M=N*(N-1)/2 VARIABLES DANS C C LA MATRICE DES COUTS C C C C LA TRIANGULAIRE SUPERIEURE DE RENUM CONTIENT LES C C ADRESSES RECIPROQUES DE CELLES DE LA TRIANGULAIRE C C INFERIEURE . C C C C LA TRIANGULAIRE INFERIEURE DE Y CONTIENT LES C C VALEURS 0 OU 1 CHOISIES POUR CHACUNE DES M C C VARIABLES , OU -1 SI LA VARIABLE N'EST PAS FIXEE . C C C C LA TRIANGULAIRE SUPERIEURE DE Y CONTIENT L' C C ADRESSE DE LA VARIABLE PRECEDEMMENT FIXEE , OU 0 C C POUR LA 1-ERE CHOISIE ; CE NUMERO EST NEGATIF SI C C LA VARIABLE EST CHOISIE PAR IMPLICATION . C C C C LA DIAGONALE DE Y CONTIENDRA LES NUMEROS DES CLASSES C C LA DIAGONALE DE YSAVE AUSSI C C C C LES ARGUMENTS COUTS ET YSAVE PEUVENT AVOIR LA MEME C C ADRESSE , A CONDITION QUE LES DECLARATIONS "REAL" C C ET "INTEGER" SUPPOSENT LE MEME NOMBRE DE MOTS : C C LES COUTS SONT DANS LA TRIANGULAIRE SUPERIEURE , C C LA SAUVEGARDE DANS LA TRIANGULAIRE INFERIEURE . C C C C======================================================================C C C FONCTION D'ADRESSAGE : C ---------------------------------------------------------------- C $ , 1 , 2 , 3 , . . . . . , N-1 C 1 , $ , N , N+1 , , 2*N-3 C 2 , 3 , $ , . C . . C . . C . $ , N*(N-1)/2 C N-1 , 2*N-3 , N*(N-1)/2 , $ C ---------------------------------------------------------------- C ADRSUP(I,J,N) = N*I - (I*(I+1))/2 + J - N C ADRINF(I,J,N) = N*J - (J*(J+1))/2 + I - N C C C ALGORITHME C ---------- C ON ATTRIBUE PROGRESSIVEMENT LA VALEUR Y("K") = 1 , OU Y("K") = 0 C POUR CHAQUE VARIABLE K , AFIN DE DEGRADER LE MOINS POSSIBLE LA C FONCTION ECONOMIQUE Z : Y("K") = 1 SI SON COEFFICIENT DANS Z C EST POSITIF ( OU NUL ) , Y("K") = 0 SINON . C A CHAQUE ATTRIBUTION D'UNE VALEUR A LA VARIABLE K , ON EXAMINE C LES CONTRAINTES : C * SI LA VALEUR ATTRIBUEE EST REFUSEE , ON ATTRIBUE L'AUTRE VALEUR C * SI L'AUTRE VALEUR EST REFUSEE , ON REMET EN QUESTION LE DERNIER C CHOIX EFFECTUE . C * SI LA FONCTION ECONOMIQUE TOMBE EN DESSOUS DU COUT DE LA C SOLUTION SAUVEGARDEE , ON REMET EN QUESTION LE DERNIER CHOIX C EFFECTUE . C * SI ON EST AMENE A REMETTRE EN QUESTION TOUS LES CHOIX JUSQU'A C LA 1-ERE VARIABLE , ET QUE CELLE-CI EST ELLE MEME REFUSEE , C L'ALGORITHME S'ARRETE : LA SOLUTION SAUVEGARDEE EST OPTIMALE . C C ON STOCKE POUR CHAQUE VARIABLE , L'ADRESSE DU DERNIER CHOIX C EFFECTUE , AVEC UN SIGNE NEGATIF LORSQUE CELUI-CI A DEJA ETE C MODIFIE . C C======================================================================C C SUBROUTINE pnkfmb ( FMBVR , TRIABS , ALLSOL , N , , COUTS , YSAVE , Y , RENUM , , BORNTH , NBCL0 , Z0 , NBCL , Z , , NBEMP , NBDEP , NBSOL, NAP ) C IMPLICIT INTEGER ( A - Z ) C INTEGER YSAVE (N*N) , Y (N*N) , RENUM (N*N) C DOUBLE PRECISION COUTS (N*N) , , BORNTH , Z0 , Z , DELTAZ , ZSAVE , ZNEW , , ABS C INTEGER FMBVR , TRIABS , ALLSOL C LOGICAL REFUS , CINTEG C LOGICAL FMBVR , TRIABS , ALLSOL , REFUS , CINTEG C C C----------------------------------------------------------------------C C C C DETERMINATION DU TYPE DE COUTS : REELS OU ENTIERS C CINTEG N'EST UTILISE QU'A L'EDITION DES RESULTATS C ------------------------------------------------- CINTEG = .TRUE. DO I = 1 , N*N ICOUTS = INT ( COUTS(I) ) CINTEG = CINTEG .AND. COUTS(I) .EQ. REAL(ICOUTS) END DO C C C OBTENTION D'UNE PARTITION INITIALE PAR CAH C ------------------------------------------ CALL PNKCAH ( N , COUTS , Y , NBCL0 , Z0 , BORNTH ) C C C ARRET EVENTUEL C --------------------------------------------------- C write (*,*) 'UECR 4000-5002' C IF ( FMBVR .EQ. 0) RETURN C DO I = 1 , N DO J = 1 , I-1 IJ = I + (J-1)*N YSAVE (IJ) = Y (IJ) END DO END DO ZSAVE = Z0 C C C TRI QUADRATIQUE DES COUTS : ARITHMETIQUE OU ALGEBRIQUE C EX-AEQUOS : ON TESTE LES ADRESSES DES ELEMENTS C -------------------------------------------------------------- C LA VARIABLE K (K-EME PLUS GRAND COUT) ASSOCIEE AU COUT (I1,J1) C AURA COMME ADRESSE (IK,JK) DANS LA TRIANGULAIRE INFERIEURE C -------------------------------------------------------------- C DO J1 = 1 , N DO I1 = 1 , J1-1 C I1J1 = I1 + (J1-1) * N J1I1 = J1 + (I1-1) * N RANG = 1 C DO J2 = 1 , N DO I2 = 1 , J2-1 I2J2 = I2 + (J2-1) * N IF ( TRIABS .NE. 0 ) THEN CDIF = ABS(COUTS(I2J2)) - ABS(COUTS(I1J1)) ELSE CDIF = COUTS(I2J2) - COUTS(I1J1) ENDIF C IF ( CDIF ) 50 , 30 , 40 IF (CDIF < 0) THEN GOTO 50 ELSE IF (CDIF == 0) THEN GOTO 30 ELSE GOTO 40 ENDIF 30 IF ( I1J1 .GE. I2J2 ) GOTO 50 40 RANG = RANG + 1 50 CONTINUE C END DO END DO JK = 1 60 FINJ = N*JK - (JK*(JK+1))/2 IF ( RANG .GT. FINJ ) THEN JK = JK + 1 GOTO 60 ENDIF IK = RANG + N - FINJ IKJK = IK + (JK-1)*N RENUM(IKJK) = I1J1 RENUM(I1J1) = IKJK C END DO END DO C C C INITIALISATIONS DIVERSES ; Z EST LE MAJORANT , SAUF POUR K=M C ------------------------------------------------------------ M = ( N * (N-1) ) / 2 C DO I = 1 , N DO J = 1 , I-1 Y ( I + (J-1)*N ) = -1 Y ( J + (I-1)*N ) = 0 END DO Y ( I + (I-1)*N ) = - 1 END DO C NBEMP = 0 NBDEP = 0 NAP = 0 NBSOL = 0 KIJPRE = 1 I = 1 J = 1 K = 0 Z = BORNTH REFUS = .FALSE. C C C EMPILEMENT DE L'ADRESSE SUIVANTE : (I,J)+1 C ------------------------------------------ 1000 IF ( K .GE. M ) GOTO 1500 C NBEMP = NBEMP + 1 I = I + 1 IF ( I .GT. N ) THEN J = J + 1 I = J + 1 ENDIF IJ = I + (J-1)*N JI = J + (I-1)*N K = K + 1 C C C COUT ZNEW = Z +/- DELTAZ ASSOCIE A LA VARIABLE (I,J) , I > J C ------------------------------------------------------------ DELTAZ = COUTS ( RENUM (IJ) ) IF ( DELTAZ .GE. 0. ) THEN VAL01 = 1 ELSE VAL01 = 0 ENDIF C C C CONTROLE DE VALIDITE DU CHOIX INITIAL , PUIS DU CHOIX INVERSE C ON N'INSISTE PAS SI LE MAJORANT Z EST INFERIEUR A ZSAVE C SI REFUS = .TRUE. AVANT CONTROLE , ON VIENT DE DEPILER . C ------------------------------------------------------------- CALL PNKTSY ( N , I , J , VAL01 , Y , RENUM , NAP , REFUS ) IF ( REFUS ) THEN VAL01 = 1 - VAL01 KIJPRE = - KIJPRE CALL PNKTSY ( N , I , J , VAL01 , Y , RENUM , NAP , REFUS ) IF ( REFUS ) GOTO 1200 ENDIF 1100 ZNEW = Z IF ( VAL01 .EQ. 0 ) THEN IF ( DELTAZ .GT. 0. ) ZNEW = Z - DELTAZ ELSE IF ( DELTAZ .LT. 0. ) ZNEW = Z + DELTAZ ENDIF C write (*,'(I13)') ALLSOL IF ( ALLSOL .EQ. 1 ) THEN C write (*,*) "ALLSOL 1" IF ( ZNEW .LT. ZSAVE ) GOTO 1200 ELSE C write (*,*) "ALLSOL 0" IF ( ZNEW .LE. ZSAVE ) GOTO 1200 ENDIF C C C ACCEPTATION DE LA K-EME VARIABLE : ECRITURE DE (I,J) C ---------------------------------------------------- Y(IJ) = VAL01 Y(JI) = KIJPRE KIJPRE = IJ Z = ZNEW GOTO 1000 C C C DEPILEMENT : ANNULATION DE (I,J) C -------------------------------- 1200 NBDEP = NBDEP + 1 IF ( K .LE. 1 ) GOTO 2000 K = K - 1 I = I - 1 IF ( I .LE. J ) THEN J = J - 1 I = N ENDIF IJ = I + (J-1)*N JI = J + (I-1)*N VAL01 = Y(IJ) KIJPRE = Y(JI) Y(IJ) = - 1 Y(JI) = 0 DELTAZ = COUTS ( RENUM (IJ) ) IF ( VAL01 .EQ. 0 ) THEN IF ( DELTAZ .GT. 0. ) Z = Z + DELTAZ ELSE IF ( DELTAZ .LT. 0. ) Z = Z - DELTAZ ENDIF IF ( KIJPRE .LT. 0 ) GOTO 1200 C C C ON ESSAIE L'AUTRE VALEUR C ------------------------ VAL01 = 1 - VAL01 KIJPRE = - KIJPRE CALL PNKTSY ( N , I , J , VAL01 , Y , RENUM , NAP , REFUS ) IF ( REFUS ) GOTO 1200 GOTO 1100 C C C NOUVELLE SOLUTION C ----------------- 1500 DO II = 1 , N DO JJ = 1 , II-1 IJ = II + (JJ-1)*N ISJS = RENUM (IJ) JS = ISJS / N IS = ISJS - JS * N JS = 1 + JS JSIS = JS + (IS-1)*N YSAVE (JSIS) = Y (IJ) END DO END DO ZSAVE = Z NBSOL = NBSOL + 1 C C C CALCUL DU NOMBRE DE CLASSES A PARTIR DE YSAVE C --------------------------------------------- DO IS = 1 , N YSAVE ( IS + (IS-1)*N ) = - 1 END DO NBCL = 0 DO IS = 1 , N ISIS = IS + (IS-1)*N IF ( YSAVE(ISIS) .LT. 0 ) THEN NBCL = NBCL + 1 DO JS = IS+1 , N JSIS = JS + (IS-1)*N JSJS = JS + (JS-1)*N IF ( YSAVE(JSIS) .EQ. 1 ) YSAVE(JSJS) = NBCL END DO YSAVE(ISIS) = NBCL ENDIF END DO C C write (*,*) 'UECR 6000-5002' C C AU MIEUX , ON REEXAMINE K = M : SIMULATION DU REFUS DE K = M+1 C -------------------------------------------------------------- NBDEP = NBDEP - 1 K = M + 1 C I = N J = N GOTO 1200 C C C RECUPERATION DE LA PARTITION OPTIMALE C ------------------------------------- 2000 DO I = 1 , N DO J = 1 , I-1 IJ = I + (J-1)*N JI = J + (I-1)*N Y (IJ) = YSAVE (IJ) Y (JI) = Y (IJ) END DO END DO Z = ZSAVE C C C CALCUL DU NOMBRE DE CLASSES A PARTIR DE Y C ----------------------------------------- NBCL = 0 DO I = 1 , N II = I + (I-1)*N IF ( Y(II) .LT. 0 ) THEN NBCL = NBCL + 1 DO J = I+1 , N IJ = I + (J-1)*N JJ = J + (J-1)*N IF ( Y(IJ) .EQ. 1 ) Y(JJ) = NBCL END DO Y(II) = NBCL ENDIF END DO C C C ECRITURE DE LA PARTITION FINALE ET DES STATISTIQUES C --------------------------------------------------- C write (*,*) 'UECR 7000-5002' C RETURN C END C======================================================================C C C C CONTROLE DE VALIDITE D'UNE NOUVELLE AFFECTATION DANS Y C C ( VOIR PNKFMB ) C C ------------------------------------------------------ C C C C EN ENTREE : C C N : DIMENSION DE Y ET DE RENUM C C I : INDICE DE LIGNE DU NOUVEL Y(I,J) C C J : INDICE DE COLONNE DE Y(I,J) C C VAL01 : VALEUR PROPOSEE POUR Y(I,J) C C Y ( N , N ) : MATRICE DE PARTITION ( DANS LA C C TRIANGULAIRE INFERIEURE ) C C RENUM ( N , N ) : MATRICE DES ADRESSES DES COUTS C C NAP : NOMBRE D'APPELS C C C C EN SORTIE : C C NAP : NOMBRE D'APPELS + 1 C C REFUS : .TRUE. SI ON REFUSE C C .FALSE. SI ON ACCEPTE C C C C ATTENTION : C C LA TRIANGULAIRE INFERIEURE DE RENUM CONTIENT LE C C COUPLE (II,JJ) , ANCIENNE ADRESSE DE LA VARIABLE C C DANS LA TRIANGULAIRE SUPERIEURE , CORRESPONDANT C C AUX COUTS INITIAUX . C C C C LA TRIANGULAIRE SUPERIEURE DE RENUM CONTIENT LE C C COUPLE (I ,J ) , NOUVELLE ADRESSE DE LA VARIABLE C C DANS LA TRIANGULAIRE INFERIEURE , APRES CLASSEMENT C C PAR ORDRE DECROISSANT : VOIR CALREN . C C C C======================================================================C C SUBROUTINE PNKTSY ( N , I , J , VAL01 , Y , RENUM , NAP , REFUS ) C IMPLICIT INTEGER ( A - Z ) C LOGICAL REFUS C C INTEGER Y ( N , N ) , RENUM ( N , N ) INTEGER Y ( * ) , RENUM ( * ) C C NAP = NAP + 1 REFUS = .FALSE. C C C EXTRACTION DES INDICES II ET JJ > II , ASSOCIES A YIJ INITIAL C ------------------------------------------------------------- JJII = RENUM ( I + (J-1)*N ) IIM1 = (JJII-1) / N II = IIM1 + 1 JJ = JJII - N * IIM1 C Y(II,JJ) = VAL01 C C C BOUCLE SUR LES INDICES INITIAUX COHERENTS AVEC YIJ , YIK , YJK C -------------------------------------------------------------- DO 100 KK = 1 , N C C IF ( II - KK ) 10 , 100 , 20 IF ((II - KK) < 0 ) THEN GOTO 10 ELSE IF (( II - KK) == 0) THEN GOTO 100 ELSE GOTO 20 END IF 10 YIK = Y ( RENUM ( II + (KK-1)*N ) ) GOTO 30 20 YIK = Y ( RENUM ( KK + (II-1)*N ) ) C 30 IF ( JJ - KK ) 40 , 100 , 50 30 IF ( ( JJ - KK ) < 0 ) THEN GOTO 40 ELSE IF (( JJ - KK ) == 0 ) THEN GOTO 100 ELSE GOTO 50 ENDIF 40 YJK = Y ( RENUM ( JJ + (KK-1)*N ) ) GOTO 60 50 YJK = Y ( RENUM ( KK + (JJ-1)*N ) ) C C CONTRAIREMENT A LA PROGRAMMATION LINEAIRE , ON TESTE C TROIS PAR TROIS LES CONTRAINTES YIJ + YIK - YJK < 2 C ---------------------------------------------------- C REFUS : YIJ , YIK , YJK ( VALEURS POSSIBLES -1 , 0 , +1 ) C 1 1 0 C 1 0 1 C 0 1 1 C 60 REFUS = VAL01+YIK+YJK .EQ. 2 C IF ( REFUS ) RETURN C 100 CONTINUE C RETURN C END amap/src/matrice.h0000644000176200001440000000552114673473630013574 0ustar liggesusers#ifndef AMAP_matrice #define AMAP_matrice 1 #include namespace amap { template class array { public: /** accessor to a data * \param index index in array * \return data */ virtual T & operator[] (int index) = 0; }; template class vecteur { private: array & myMatrice; int indexFirstData; int stepByData; int vectorSize; public: vecteur(array & values_p, int indexFirstData_p, int stepByData_p, int size) : myMatrice(values_p), indexFirstData(indexFirstData_p), stepByData(stepByData_p) , vectorSize(size) { }; /** * get data at index. * \param index index of data. * \return data. */ virtual T & operator[] (int index) { if (index >= vectorSize) { Rf_error("vecteur::operator[]: out of bound %d - %d", index, vectorSize); } return (myMatrice)[indexFirstData + (index * stepByData)]; }; /** * get size. * \return size */ virtual int size() { return vectorSize; }; }; /** * Matrix data. * * a matrix with 3 row and 4 cols * +---+---+---+---+ * | 0 | 3 | 6 | 9 | * +---+---+---+---+ * | 1 | 4 | 7 | 10| * +---+---+---+---+ * | 2 | 5 | 8 | 11| * +---+---+---+---+ * * */ template class matrice : public array { private: /** * Array with values */ T * values; /** * number of rows. */ int nrow ; /** * number of cols. */ int ncol; public: /** * Contructor. * \param values_p the data matrix * \param nrows_p the number of rows * \param ncols_p the number of cols. */ matrice(T * values_p, int nrow_p, int ncol_p) : values(values_p), nrow(nrow_p), ncol(ncol_p) { }; /** * Accessor on data. */ virtual T & operator[] (int index) { return values[index]; }; /** * get a row. * \return row i. */ vecteur getRow(int index) { if (index >= nrow) { Rf_error("matrice::getRow(): out of bound %d - %d", index, nrow); } vecteur myRow (*this, index, nrow, ncol); return myRow; }; /** * get a column. * \return col i. */ vecteur getCol(int index) { if (index >= ncol) { Rf_error("matrice::getCol(): out of bound %d - %d", index, ncol); } vecteur myCol (*this, index*nrow, 1, nrow); return myCol; }; /** * getSize. */ int size() { return nrow * ncol; }; /** * accessor. * \return number of rows. */ int getNrow() { return nrow; } /** * accessor. * \return number of rows. */ int getNcol() { return ncol; } }; }; #endif amap/src/kmeans.h0000644000176200001440000000043313275645271013422 0ustar liggesusers #ifndef KMEANS_HEADER_AMAP #define KMEANS_HEADER_AMAP 1 #ifdef __cplusplus extern "C" { #endif void kmeans_Lloyd2(double *x, int *pn, int *pp, double *cen, int *pk, int *cl, int *pmaxiter, int *nc, double *wss, int * method); #ifdef __cplusplus } #endif #endif amap/src/Makevars.in0000644000176200001440000000003214326047373014064 0ustar liggesusersPKG_LIBS = -pthread -lc_r amap/src/smartPtr.h0000644000176200001440000000210214673473634013760 0ustar liggesusers#ifndef AMAP_MEMORY #define AMAP_MEMORY 1 #include #include "R.h" /** * This class is like std:auto_ptr - but with a new xxx[size] and delete [] ... */ template class SmartPtr { private: T* data; public: /** * Constructor. Create an array T*. * * \param size size of array. */ SmartPtr(int size) : data(0) { reset(size); } ~SmartPtr() { clear(); } /** */ void reset(int size) { clear(); if (size > 0) { //data = new T[size]; data = (T*) calloc(size, sizeof(T)); if ( data == 0) { int sizeInMo = size * sizeof(T) / 1024 / 1024; Rf_error("AMAP: cannot allocate %d Mo", sizeInMo); } } else { data = 0; } } /** * Clear data (free). */ void clear() { if (data != 0 ) { //delete [] data; free(data); } data = 0; } /** Accesssor. */ T & operator[](int i) { return data[i]; } /** * Get data. */ T * get(){ return data; } private: /** * This is to avoid a copy. */ SmartPtr(const SmartPtr&){}; }; #endif amap/src/hclust.cpp0000644000176200001440000001750214705246551014003 0ustar liggesusers/** \file hclust.c * * \brief Hierarchical Clustering. * * \date Created : 14/11/02 * \date Last Modified : Time-stamp: <2011-11-04 22:29:32 antoine> * * \author F. Murtagh, ESA/ESO/STECF, Garching, February 1986. * \author Modifications for R: Ross Ihaka, Dec 1996 * Fritz Leisch, Jun 2000 * all vars declared: Martin Maechler, Apr 2001 * \author C adaptation: A. Lucas, Dec 2002 */ /* * Parameters: * * N the number of points being clustered * DISS(LEN) dissimilarities in lower half diagonal * storage; LEN = N.N-1/2, * IOPT clustering criterion to be used, * IA, IB, CRIT history of agglomerations; dimensions * N, first N-1 locations only used, * MEMBR, NN, DISNN vectors of length N, used to store * cluster cardinalities, current nearest * neighbour, and the dissimilarity assoc. * with the latter. * MEMBR must be initialized by R to the * default of rep(1, N) * FLAG boolean indicator of agglomerable obj. * clusters. * * * Fortran algorithme: * F. Murtagh, ESA/ESO/STECF, Garching, February 1986. * Modifications for R: Ross Ihaka, Dec 1996 * Fritz Leisch, Jun 2000 * all vars declared: Martin Maechler, Apr 2001 * C adaptation: A. Lucas, Dec 2002 * ------------------------------------------------------------ */ #include #include #include "hclust.h" #include "distance.h" #include "hclust_T.h" #define max( A , B ) ( ( A ) > ( B ) ? ( A ) : ( B ) ) #define min( A , B ) ( ( A ) < ( B ) ? ( A ) : ( B ) ) /** Hierachical clustering * \brief compute hierachical clustering from a distance matrix * hclust can be called directly by R, or with hcluster C function. * \param n number of individuals * \param len = n (n-1) / 2 (size of distance matrix) * \param *iopt integer -> link used * \param ia, ib result (merge) * \param iorder result (order) * \param crit result (height) * \param membr number of individuals in each cluster. * \param diss distance matrix (size len) * \param result flag 0 => correct * 1 => Pb * 2 => Cannot allocate memory * 3 => Pb with distance matrix * * \note this is an adaptation of the fortran function designed from the * R core team. */ void hclust(int *n,int *len, int *iopt ,int *ia , int *ib,int *iorder,double *crit,double *membr,double *diss,int *result) { int nbprocess = 1; if (*iopt!=hclust_T::CENTROID2) { hclust_T::hclust(&nbprocess,NULL,*n,*n,NULL,n,len,iopt ,ia ,ib,iorder, crit,membr,diss, result); } } /* end function hclust */ /** Paralelized hierarchical clustering * \brief allocate distance matrix execute function R_distancepar, launch * hclust on this distance matrix, and free memory of distance matrix. * \param x: data nr x nc * \param nr,nc number of row and columns * \param membr: member, vector 1:nr * \param method integer -> distance method * \param diag integer: if we compute diagonal in distance matrix (usually no) * \param iopt integer -> link used * \param nbprocess nb of process for parallelization * \param precision 1: float; 2: double * \param ia, ib result (merge) * \param iorder result (order) * \param crit result (height) * \param result flag 0 => correct * 1 => Pb * 2 => Cannot allocate memory * 3 => Pb with distance matrix * * \note this is an adaptation of the fortran function designed from the * R core team. */ void hcluster(double *x, int *nr, int *nc, int *diag, int *method, int *iopt ,int *ia , int *ib,int *iorder,double *crit,double *membr,int *nbprocess,int * precision, int * result) { if(*precision == 1) hclust_T::hcluster(x,nr,nc,diag,method, iopt , ia ,ib,iorder,crit,membr, nbprocess, result); else hclust_T::hcluster(x,nr,nc,diag,method, iopt , ia ,ib,iorder,crit,membr, nbprocess, result); return; } /*+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++*/ /* */ /* Given a HIERARCHIC CLUSTERING, described as a sequence of */ /* agglomerations, prepare the seq. of aggloms. and "horiz." */ /* order of objects for plotting the dendrogram using S routine*/ /* 'plclust'. */ /* */ /* Parameters: */ /* */ /* IA, IB: vectors of dimension N defining the agglomer- */ /* ations. */ /* IIA, IIB: used to store IA and IB values differently */ /* (in form needed for S command `plclust` */ /* IORDER: "horiz." order of objects for dendrogram */ /* */ /* F. Murtagh, ESA/ESO/STECF, Garching, June 1991 */ /* C adaptation: A. Lucas, Nov 2002 */ /* */ /* HISTORY */ /* */ /* Adapted from routine HCASS, which additionally determines */ /* cluster assignments at all levels, at extra comput. expense*/ /* */ /*-------------------------------------------------------------*/ void hierclust::hcass2( int *n, int *ia, int *ib,int *iorder, int *iia, int *iib) { int i,j,k,k1,k2,loc; /* Following bit is to get seq. of merges into format acceptable to plclust * I coded clusters as lowest seq. no. of constituents; S's `hclust' codes * singletons as -ve numbers, and non-singletons with their seq. nos. */ for (i=0; i< *n; i++ ) { iia[i]= - ia[i]; iib[i]= - ib[i]; } for (i=0; i< (*n-2); i++ ) /* In the following, smallest (+ve or -ve) seq. no. wanted */ { k = min ( ia[i] , ib[i] ); for (j=i+1; j< (*n-1); j++ ) { if( ia[j] == k ) iia[j]= i+1; if( ib[j] == k ) iib[j]= i+1; } } for (i=0; i< (*n-1); i++ ) { if( (iia[i] > 0) && (iib[i] < 0) ) { k= iia[i]; iia[i] = iib[i]; iib[i] = k; } if( (iia[i] > 0) && (iib[i] > 0)) { k1= min ( iia[i], iib[i] ); k2= max ( iia[i], iib[i] ); iia[i] = k1; iib[i] = k2; } } /* New part for 'order' */ iorder[0] = - iia[*n-2]; iorder[1] = - iib[*n-2]; loc = 2; for (i=(*n-3); i>= 0; i-- ) { for (j=0; j< loc; j++ ) { if ( -iorder[j] == i +1) { /* REPLACE IORDER(J) WITH IIA(I) AND IIB(I) */ iorder[j] = - iia[i]; if (j == (loc-1)) { loc++; iorder[loc-1]= - iib[i]; break; /* for j */ } loc++; for (k=loc-1; k >= (j+1); k--) { iorder[k]=iorder[k-1]; } iorder[j+1]= - iib[i]; break; /* for j */ } } } } amap/src/distance_T.h0000644000176200001440000003174613574503147014231 0ustar liggesusers #ifndef _AMAP_DISTANCE_TEMPLATE #define _AMAP_DISTANCE_TEMPLATE 1 #include "smartPtr.h" #include "matrice.h" namespace amap { template class distance_T { public: /* == 1,2,..., defined by order in the r function dist */ enum { EUCLIDEAN=1, MAXIMUM, MANHATTAN, CANBERRA, BINARY ,PEARSON, CORRELATION, SPEARMAN, KENDALL, ABSPEARSON, ABSCORRELATION}; class T_tri { public: SmartPtr data_tri_x; SmartPtr order_tri_x; SmartPtr rank_tri_x; SmartPtr data_tri_y; SmartPtr order_tri_y; SmartPtr rank_tri_y; T_tri() : data_tri_x(0), order_tri_x(0), rank_tri_x(0), data_tri_y(0), order_tri_y(0), rank_tri_y(0) {}; void reset(int size) { data_tri_x.reset(size); order_tri_x.reset(size); rank_tri_x.reset(size); data_tri_y.reset(size); order_tri_y.reset(size); rank_tri_y.reset(size); } }; typedef T (* distfunction)(vecteur & x, vecteur & y , int *, T_tri &); private: /** \brief arguments sent to distance thread */ struct T_argument { short int id; double * x; int * nr; int * nc; bool dc; T * d; int * method; int nbprocess; int * ierr; int i2; }; // only static functions; no attributes distance_T(); ~distance_T(); public: /** return a distance function, depending on method */ static void getDistfunction(int method,distfunction & distfun); /** \brief R_distance compute parallelized distance. * * compute distance and call function thread_dist * that call one of function R_euclidean or R_... * * \param x input matrix * \param nr,nc number of row and columns * nr individuals with nc values. * \param d distance half matrix. * \param diag if we compute diagonal of dist matrix (usualy: no). * \param method 1, 2,... method used (correspond to the enum) * \param nbprocess: number of threads to create * \param ierr error return; 1 good; 0 missing values * \param i2: if -1: ignored, else, compute * distance between individual i2 and others */ static void distance(double *x, int *nr, int *nc, T *d, int *diag, int *method,int *nbprocess, int * ierr,int i2); /** \brief R_distance_kms: compute distance between individual i1 and * centroid i2 * * compute distance and call one of function R_euclidean or R_... * This function is called by kmeans_Lloyd2 * * \param x input matrix (individuals) * \param y input matrix (centroids) * \param nr1,nr2,nc number of row (nr1:x, nr2:y) and columns * nr individuals with nc values. * \param i1, i2: indice of individuals (individual i1, centroid i2) * \param method 1, 2,... method used (correspond to the enum) * \param ierr for NA 0 if no value can be comuted due to NA * \param opt optional parameter required for spearman */ static T distance_kms(vecteur & x, vecteur & y , int *method, int * ierr, T_tri & opt); private: static void* thread_dist(void* arguments); /** \brief Distance euclidean (i.e. sqrt(sum of square) ) * * Euclidean distance between 2 vectors a,b is * \f[ d = \sqrt{ \sum_i^n (a_i - b_i)^2} * \f] * * When NA values found for a_i or b_i, both a_i and b_i are * skipped. Number of values skipped is couned (#NA in next formula) * * \f[ d = \sqrt{\frac{n}{#NA} \sum_{i=1; a_i \in \Re; b_i \in \Re}^n (a_i - b_i)^2} * \f] * * This function compute distance between 2 vectors x[i1,] & y[i2,] * x and y are matrix; we use here only line i1 from x and * line i2 from y. Number of column (nc) is the same in x and y, * number of column can differ (nr_x, nr_y). * * Flag will be set to 0 if too many NA to compute distance * * When call by function distance or hclust, x and y are the same; it computes * distance between vector x[i1,] and x[i2,] * * \param x matrix of size nr_x * nc; line i1 is of interest * \param y matrix of size nr_y * nc; line i1 is of interest * \param nr_x number of row in matrix x * \param nr_y number of row in matrix y * \param nc number of column in matrix x or y * \param i1 row choosen in matrix x * \param i2 row choosen in matrix y * \param flag set to 0 if NA value computed in distance * \param opt unused * * \return distance value * */ static T R_euclidean(vecteur & x, vecteur & y , int * flag, T_tri & opt); /** \brief Distance maximum (supremum norm) * * Maximum distance between 2 vectors a,b is * \f[ d = \max_i |a_i - b_i| * \f] * * NA values are omitted. * * This function compute distance between 2 vectors x[i1,] & y[i2,] * x and y are matrix; we use here only line i1 from x and * line i2 from y. Number of column (nc) is the same in x and y, * number of column can differ (nr_x, nr_y). * * Flag will be set to 0 if too many NA to compute distance * * When call by function distance or hclust, x and y are the same; it computes * distance between vector x[i1,] and x[i2,] * * \param x matrix of size nr_x * nc; line i1 is of interest * \param y matrix of size nr_y * nc; line i1 is of interest * \param nr_x number of row in matrix x * \param nr_y number of row in matrix y * \param nc number of column in matrix x or y * \param i1 row choosen in matrix x * \param i2 row choosen in matrix y * \param flag set to 0 if NA value computed in distance * \param opt unused * * \return distance value * */ static T R_maximum(vecteur & x, vecteur & y , int * flag, T_tri & opt); /** \brief manhattan (i.e. sum of abs difference ) * * manhattan distance between 2 vectors a,b is * \f[ d = \sum_i^n |a_i - b_i| * \f] * * When NA values found for a_i or b_i, both a_i and b_i are * skipped. Number of values skipped is couned (#NA in next formula) * * \f[ d = \frac{n}{#NA} \sum_{i=1; a_i \in \Re; b_i \in \Re}^n |a_i - b_i|} * \f] * * This function compute distance between 2 vectors x[i1,] & y[i2,] * x and y are matrix; we use here only line i1 from x and * line i2 from y. Number of column (nc) is the same in x and y, * number of column can differ (nr_x, nr_y). * * Flag will be set to 0 if too many NA to compute distance * * When call by function distance or hclust, x and y are the same; it computes * distance between vector x[i1,] and x[i2,] * * \param x matrix of size nr_x * nc; line i1 is of interest * \param y matrix of size nr_y * nc; line i1 is of interest * \param nr_x number of row in matrix x * \param nr_y number of row in matrix y * \param nc number of column in matrix x or y * \param i1 row choosen in matrix x * \param i2 row choosen in matrix y * \param flag set to 0 if NA value computed in distance * \param opt unused * * \return distance value * */ static T R_manhattan(vecteur & x, vecteur & y , int * flag, T_tri & opt); /** \brief Camberra distance * * Camberra distance between 2 vectors a,b is * \f[ d = \sum_i^n |a_i - b_i| / |a_i + b_i| * \f] * * When NA values found for a_i or b_i, both a_i and b_i are * skipped. Number of values skipped is couned (#NA in next formula) * * * This function compute distance between 2 vectors x[i1,] & y[i2,] * x and y are matrix; we use here only line i1 from x and * line i2 from y. Number of column (nc) is the same in x and y, * number of column can differ (nr_x, nr_y). * * Flag will be set to 0 if too many NA to compute distance * * When call by function distance or hclust, x and y are the same; it computes * distance between vector x[i1,] and x[i2,] * * \param x matrix of size nr_x * nc; line i1 is of interest * \param y matrix of size nr_y * nc; line i1 is of interest * \param nr_x number of row in matrix x * \param nr_y number of row in matrix y * \param nc number of column in matrix x or y * \param i1 row choosen in matrix x * \param i2 row choosen in matrix y * \param flag set to 0 if NA value computed in distance * \param opt unused * * \return distance value * */ static T R_canberra(vecteur & x, vecteur & y , int * flag, T_tri & opt); /** \brief Distance binary */ static T R_dist_binary(vecteur & x, vecteur & y , int * flag, T_tri & opt); /** \brief Pearson / Pearson centered (correlation) * \note Added by A. Lucas */ static T R_pearson(vecteur & x, vecteur & y , int * flag, T_tri & opt); /** \brief Distance correlation (Uncentered Pearson) * \note Added by A. Lucas */ static T R_correlation(vecteur & x, vecteur & y , int * flag, T_tri & opt); /** \brief Pearson / Pearson centered (correlation) * \note Added by L. Cerulo */ static T R_abspearson(vecteur & x, vecteur & y , int * flag, T_tri & opt); /** \brief Distance correlation (Uncentered Pearson) * \note Added by L. Cerulo */ static T R_abscorrelation(vecteur & x, vecteur & y , int * flag, T_tri & opt); /** \brief Spearman distance (rank base metric) * * Spearman distance between 2 vectors a,b is * \f[ d = \sum_i^n (rank(a_i) - rank(b_i)) ^ 2 * \f] * * If a NA found: return NA, flag is set to 0. * * * This function compute distance between 2 vectors x[i1,] & y[i2,] * x and y are matrix; we use here only line i1 from x and * line i2 from y. Number of column (nc) is the same in x and y, * number of column can differ (nr_x, nr_y). * * * When call by function distance or hclust, x and y are the same; it computes * distance between vector x[i1,] and x[i2,] * * \param x matrix of size nr_x * nc; line i1 is of interest * \param y matrix of size nr_y * nc; line i1 is of interest * \param nr_x number of row in matrix x * \param nr_y number of row in matrix y * \param nc number of column in matrix x or y * \param i1 row choosen in matrix x * \param i2 row choosen in matrix y * \param flag set to 0 if NA value computed in distance * \param opt a set of 6 vectors of size nc, allocated but uninitialised. * aim of this parameter is to avoid several vector allocation * * \return distance value * */ static T R_spearman(vecteur & x, vecteur & y , int * flag, T_tri & opt); /** \brief Kendall distance (rank base metric) * * Kendall distance between 2 vectors a,b is * \f[ d = \sum_{i,j} K_{i,j}(x,y) * \f] * * With \f$ K_{i,j}(x,y)\f$ is 0 if * \f$ (x_i, x_j) \f$ in same order as \f$ ( y_i,y_j) \f$, * 1 if not. * * * If a NA found: return NA, flag is set to 0. * * * This function compute distance between 2 vectors x[i1,] & y[i2,] * x and y are matrix; we use here only line i1 from x and * line i2 from y. Number of column (nc) is the same in x and y, * number of column can differ (nr_x, nr_y). * * * When call by function distance or hclust, x and y are the same; it computes * distance between vector x[i1,] and x[i2,] * * \param x matrix of size nr_x * nc; line i1 is of interest * \param y matrix of size nr_y * nc; line i1 is of interest * \param nr_x number of row in matrix x * \param nr_y number of row in matrix y * \param nc number of column in matrix x or y * \param i1 row choosen in matrix x * \param i2 row choosen in matrix y * \param flag set to 0 if NA value computed in distance * \param opt a set of 6 vectors of size nc, allocated but uninitialised. * aim of this parameter is to avoid several vector allocation * * \return distance value * */ static T R_kendall(vecteur & x, vecteur & y , int * flag, T_tri & opt); }; }; #endif #ifndef _AMAP_DISTANCE_TEMPLATE_CPP #define _AMAP_DISTANCE_TEMPLATE_CPP 1 #include "distance_T.h.h" #endif amap/src/kmeans.cpp0000644000176200001440000000750313275645271013762 0ustar liggesusers/*! \file : kmeans.c * * * \brief K-means clustering * * \date Created : before 2005 * \date Last Modified : Time-stamp: <2014-03-01 12:51:00 antoine> * * \author R core team. Modified by A. Lucas for distance choice. * * R : A Computer Language for Statistical Data Analysis * Copyright (C) 2004 The R Development Core Team. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include /*#include "modreg.h" */ /* for declarations for registration */ #include "kmeans.h" #include "distance_T.h" using namespace amap; /** K-means clustering using Lloyd algorithm. * \brief compute k-nearest centroid of our dataset. * \param x matrix of size nxp: input data * \param pn nb of individual (pn=n) * \param pp number of observation by individual (pp=p) * \param cen matrix of size k*p: centroids * \param pk number of centroids (k) * \param cl vector of flag of size n * \param pmaxiter integer: maximum iteration * \param nc vector of size k: number of individuals in cluster k. * \param wss vector of size k: sum of square in each cluster. * \param method: which method to use. */ void kmeans_Lloyd2(double *x, int *pn, int *pp, double *cen, int *pk, int *cl, int *pmaxiter, int *nc, double *wss, int * method) { /* n: nb of individuals * k: nb of clusters * p: number of abservations by individuals * x: matrix of size nxp * cen: matrix of size kxp */ int n = *pn, k = *pk, p = *pp, maxiter = *pmaxiter; int iter, i, j, c, it, inew = 0; double best, dd; Rboolean updated; distance_T::T_tri opt; int ierr[1]; //double * data_tri; //int * order_tri; //int * rank_tri; matrice dataMatrice (x, n,p); matrice centroidMatrice (cen, k, p); if( (*method == distance_T::SPEARMAN) || (*method == distance_T::KENDALL)) { opt.reset(p); } for(i = 0; i < n; i++) cl[i] = -1; for(iter = 0; iter < maxiter; iter++) { updated = FALSE; for(i = 0; i < n; i++) { /* find nearest centre for each point */ best = R_PosInf; for(j = 0; j < k; j++) { vecteur dataI = dataMatrice.getRow(i); vecteur centroidJ = centroidMatrice.getRow(j); dd = distance_T::distance_kms(dataI, centroidJ,method,ierr,opt); /*printf("| %f",dd); */ if(dd < best) { best = dd; inew = j+1; } } if(cl[i] != inew) { updated = TRUE; cl[i] = inew; } } if(!updated) break; /* update each centre */ for(j = 0; j < k*p; j++) cen[j] = 0.0; for(j = 0; j < k; j++) nc[j] = 0; for(i = 0; i < n; i++) { it = cl[i] - 1; nc[it]++; for(c = 0; c < p; c++) cen[it+c*k] += x[i+c*n]; } for(j = 0; j < k*p; j++) cen[j] /= nc[j % k]; } *pmaxiter = iter + 1; /* for(j = 0; j < k; j++) wss[j] = 0.0; */ for(i = 0; i < n; i++) { it = cl[i] - 1; vecteur dataI = dataMatrice.getRow(i); vecteur centroidJ = centroidMatrice.getRow(it); wss[it] = distance_T::distance_kms(dataI, centroidJ,method,ierr,opt); wss[it] = wss[it] * (wss[it]) ; /* for(c = 0; c < p; c++) { tmp = x[i+n*c] - cen[it+k*c]; wss[it] += tmp * tmp; }*/ } } amap/src/distance.cpp0000644000176200001440000000270413275645271014274 0ustar liggesusers #include "distance.h" #include "distance_T.h" using namespace amap; /** * R_distance: compute parallelized distance. Function called direclty by R * \brief compute distance and call function thread_dist * that call one of function R_euclidean or R_... * \param x input matrix * \param nr,nc number of row and columns * nr individuals with nc values. * \param d distance half matrix. * \param diag if we compute diagonal of dist matrix (usualy: no). * \param method 1, 2,... method used * \param nbprocess: number of threads to create * \param ierr error return; 1 good; 0 missing values */ void R_distance(double *x, int *nr, int *nc, double *d, int *diag, int *method,int *nbprocess, int * ierr) { distance_T::distance(x,nr,nc, d,diag,method, nbprocess, ierr,-1); } /** * Sort, and return order and rank * \brief This function is used by R_spearman. * order and rank must be initialised with 0:(n-1) * make sort, return x sorted * order = order(x) - 1 * rank = rank(x) -1 * */ void rsort_rank_order(double *x, int *order, int *rank, int * n) { double v; int i, j, h, iv; for (h = 1; h <= *n / 9; h = 3 * h + 1); for (; h > 0; h /= 3) for (i = h; i < *n; i++) { v = x[i]; iv = order[i]; j = i; while (j >= h && (x[j - h] > v)) { x[j] = x[j - h]; order[j] = order[j-h]; rank[order[j]] = j; j -= h; } x[j] = v; order[j] = iv; rank[order[j]] = j; } } amap/src/hclust_T.h.h0000644000176200001440000001771214705246732014165 0ustar liggesusers #define _AMAP_HCLUST_TEMPLATE_CPP 1 #include "hclust.h" #include "distance_T.h" #include "distance.h" #include "hclust_T.h" #include #include "smartPtr.h" #include "R.h" #include #include namespace hclust_T { /** hierarchical clustering * \brief allocate distance matrix execute function R_distance, launch * hclust on this distance matrix, and free memory of distance matrix. * \param x: data nr x nc * \param nr,nc number of row and columns * \param membr: member, vector 1:nr * \param method integer -> distance method * \param diag integer: if we compute diagonal in distance matrix (usually no) * \param iopt integer -> link used * \param ia, ib result (merge) * \param iorder result (order) * \param crit result (height) * \param nbprocess number of processes (thread) used * \param result flag 0 => correct * 1 => Pb * 2 => Cannot allocate memory * 3 => Pb with distance matrix * */ template void hcluster(double *x, int *nr, int *nc, int *diag, int *method, int *iopt ,int *ia , int *ib,int *iorder,double *crit,double *membr,int *nbprocess, int * result) { int len = (*nr * (*nr-1) )/2; int flag; SmartPtr d (len); *result = 1; /* * Calculate d: distance matrix */ amap::distance_T::distance(x,nr,nc,d.get(),diag,method,nbprocess,&flag,-1); if(flag == 0) { Rprintf("AMAP: Unable to compute Hierarchical Clustering: missing values in distance matrix\n"); *result = 3; return; } /* * Hierarchical clustering */ hclust_T::hclust(nbprocess,x,*nr,*nc,method,nr,&len,iopt ,ia ,ib,iorder,crit,membr,d.get(),result); *result = 0; } template void hclust(int * nbprocess,double *mx,int nr, int nc, int *method,int *n,int *len, int *iopt ,int *ia , int *ib,int *iorder,double *crit,double *membr,T *diss,int *result) { int im=0; int jm=0; int jj,ncl,ind,i2,j2,k,ind1,ind2,ind3; double inf,dmin,x,xx; SmartPtr items (0); // nn , disnn : for each cluster i, disnn{i] is shortest cluster j = nn[i] to i. SmartPtr nn (*n); SmartPtr disnn(*n); SmartPtr flag(*n); int h,idx1,idxitem1,idx2; *result = 1; if(*iopt==CENTROID2) { items.reset(*n * nc); } /* Initialisation */ for (int i=0; i<*n ; i++) { flag[i]=true; idxitem1=i; if(items.get() != NULL) { for (int j=0; j::max(); if(*iopt==WARDD2) { for(int i = 0 ; i < *len; i++){ diss[i] = diss[i] * diss[i]; } } /* * Carry out an agglomeration - first create list of NNs */ for (int i=0; i<(*n-1) ; i++) { dmin=inf; for (int j=i+1; j<*n; j++) { ind = ioffst(*n,i,j); if (diss[ind]<= dmin) { dmin = diss[ind]; jm=j; } } nn[i]=jm; disnn[i]=dmin; } /* * Repeat previous steps until N-1 agglomerations carried out. */ while (ncl > 1) { /* * Next, determine least diss. using list of NNs */ dmin = inf; for (int i=0; i<(*n-1) ; i++) { if( flag[i] && disnn[i] < dmin ) { dmin = disnn[i]; im=i; jm=nn[i]; } } /* * This allows an agglomeration to be carried out. * At step n-ncl, we found dmin=dist[i2,j2] */ i2=MIN (im,jm); j2=MAX (im,jm); ia[*n-ncl]=i2+1; ib[*n-ncl]=j2+1; if (*iopt == WARDD2) { dmin = sqrt(dmin); } crit[*n-ncl]=dmin; ncl--; /* * Update dissimilarities from new cluster. */ flag[j2]=false; dmin=inf; jj=0; /* * Cluster3 CENTROID METHOD - IOPT=8. */ if (*iopt==CENTROID2) { /* compute centroind coordinates*/ idx1=i2; idx2=j2; ind3=ioffst(*n,i2,j2); //printf("Aggregate %d-%d %d-%d (method=%d, dmin=%f (%f))\n",i2,j2,im,jm,*method,dmin,diss[ind3]); for(h = 0 ; h< nc ; h++) { if(R_FINITE(mx[idx1]) && R_FINITE(mx[idx2])) { mx[idx1] = (items[idx1]*mx[idx1] + items[idx2]*mx[idx2])/(items[idx1]+items[idx2]); items[idx1]+=items[idx2]; } else { if(!R_FINITE(mx[idx1]) && R_FINITE(mx[idx2])) { mx[idx1] = mx[idx2]; items[idx1]=items[idx2]; } } idx1 += nr; idx2 += nr; } int flg; int dg=0; /* recompute all distances in parallel */ amap::distance_T::distance(mx,&nr,&nc,diss,&dg,method,nbprocess,&flg, i2); /*update disnn and nn*/ for ( k=0; k< *n ; k++) { if(flag[k] && (k != i2) ) { ind1 = ioffst(*n,i2,k); if( (i2 < k) && ( diss[ind1] < dmin ) ) { dmin = diss[ind1]; jj=k; } if( (i2 > k) && (nn[k]!=j2) && ( diss[ind1] < disnn[k] ) ) { disnn[k]=diss[ind1]; nn[k]=i2; } } } } else for ( k=0; k< *n ; k++) { if(flag[k] && (k != i2) ) { x = membr[i2] + membr[j2] + membr[k]; ind1 = ioffst(*n,i2,k); ind2 = ioffst(*n,j2,k); ind3=ioffst(*n,i2,j2); xx=diss[ind3]; /* * Gi & Gj are agglomerated => Gii * We are calculating D(Gii,Gk) (for all k) * * diss[ind1] = D(Gi,Gk) (will be replaced by D(Gii,Gk)) * diss[ind2] = D(Gj,Gk) * xx = diss[ind3] = D(Gi,Gj) * * membr[i2] = #Gi * membr[j2] = #Gj * membr[k] = #Gk * * x = #Gi + #Gj + #Gk */ switch(*iopt) { /* * WARD'S MINIMUM VARIANCE METHOD - IOPT=1. */ case WARD: case WARDD2: { diss[ind1] = (membr[i2]+membr[k])* diss[ind1] + (membr[j2]+membr[k])* diss[ind2] - membr[k] * xx; diss[ind1] = diss[ind1] / x; break; } /* * SINGLE LINK METHOD - IOPT=2. */ case SINGLE: diss[ind1] = MIN (diss[ind1],diss[ind2]); break; /* * COMPLETE LINK METHOD - IOPT=3. */ case COMPLETE: diss[ind1] = MAX (diss[ind1],diss[ind2]); break; /* * AVERAGE LINK (OR GROUP AVERAGE) METHOD - IOPT=4. */ case AVERAGE: diss[ind1] = ( membr[i2] * diss[ind1] + membr[j2] * diss[ind2] ) / (membr[i2] + membr[j2]); break; /* * MCQUITTY'S METHOD - IOPT=5. */ case MCQUITTY: diss[ind1] = 0.5 * diss[ind1]+0.5*diss[ind2]; break; /* * MEDIAN (GOWER'S) METHOD - IOPT=6. */ case MEDIAN: diss[ind1] = 0.5* diss[ind1]+0.5*diss[ind2] -0.25*xx; break; /* * CENTROID METHOD - IOPT=7. */ case CENTROID: diss[ind1] = (membr[i2]*diss[ind1] + membr[j2]*diss[ind2] - membr[i2] * membr[j2]*xx / (membr[i2] + membr[j2]) ) / (membr[i2] + membr[j2]); break; } /* end switch */ if(i2 < k) { if( diss[ind1] < dmin ) { dmin = diss[ind1]; jj=k; } } else{ if( diss[ind1] < disnn[k] ) { disnn[k]=diss[ind1]; nn[k]=i2; } } } /* 800 */ } membr[i2] = membr[i2] + membr[j2]; disnn[i2] = dmin; nn[i2] = jj; /* * Update list of NNs insofar as this is required. */ for (int i=0; i< (*n-1) ; i++) { if( flag[i] && ((nn[i] == i2 ) || (nn[i] == j2) ) ) { dmin = inf; for (int j=i+1; j< *n ; j++) { ind= ioffst(*n,i,j); if(flag[j] && (diss[ind] < dmin) ) { dmin =diss[ind]; jj=j; } } nn[i] = jj; disnn[i] = dmin; } } /*printf("%d/%d\n",ncl,*n);*/ } /* end of while */ SmartPtr iia (*n ); SmartPtr iib (*n ); hierclust::hcass2(n,ia,ib,iorder,iia.get(),iib.get()); /* * Cette boucle devrait etre evitee... */ for (int i=0; i< *n; i++ ) { ia[i]= iia[i]; ib[i]= iib[i]; } *result = 0; } } amap/src/distance.h0000644000176200001440000000147413275645271013744 0ustar liggesusers #ifdef __cplusplus extern "C" { #endif /** * R_distance: compute parallelized distance. Function called direclty by R * \brief compute distance and call function thread_dist * that call one of function R_euclidean or R_... * \param x input matrix * \param nr,nc number of row and columns * nr individuals with nc values. * \param d distance half matrix. * \param diag if we compute diagonal of dist matrix (usualy: no). * \param method 1, 2,... method used * \param nbprocess: number of threads to create * \param ierr error return; 1 good; 0 missing values */ void R_distance(double *x, int *nr, int *nc, double *d, int *diag, int *method,int * nbprocess, int * ierr); void rsort_rank_order(double *x, int *order, int *rank, int * n); #ifdef __cplusplus }; #endif amap/src/hclust_T.h0000644000176200001440000000212214705232702013714 0ustar liggesusers #ifndef _AMAP_HCLUST_TEMPLATE #define _AMAP_HCLUST_TEMPLATE 1 namespace hclust_T { enum {WARD=1,SINGLE,COMPLETE,AVERAGE,MCQUITTY,MEDIAN,CENTROID,CENTROID2,WARDD2}; template void hcluster(double *x, int *nr, int *nc, int *diag, int *method, int *iopt , int *ia , int *ib,int *iorder, double *crit,double *membr, int *nbprocess, int * result); template void hclust(int *nbprocess,double *mx,int nr, int nc,int *method,int *n,int *len, int *iopt ,int *ia , int *ib,int *iorder,double *crit, double *membr,T *diss,int *result); /** \brief Return indice * * The upper half diagonal distance matrix is stored as a vector... * so distance between individual i and j is stored at postion ioffst(i,j) * * \param n number of individuals (distance matrix is nxn) * \param i,j: indices in matrix */ inline int ioffst(int n,int i,int j) { return j>i ? j+i*n-(i+1)*(i+2)/2 : ioffst(n,j,i); } } #endif #ifndef _AMAP_HCLUST_TEMPLATE_CPP #define _AMAP_HCLUST_TEMPLATE_CPP 1 #include "hclust_T.h.h" #endif amap/NAMESPACE0000644000176200001440000000110613275576575012433 0ustar liggesusersuseDynLib(amap, .registration = TRUE, .fixes = "C_") export("Kmeans","acp","pca","K","W","varrob", "acpgen","acprob","plot2","plotAll", "plot.acp","print.acp","biplot.acp","pop","diss") S3method("print","acp") S3method("plot","acp") S3method("biplot","acp") S3method("print","pop") export("Dist","hcluster","hclusterpar") export("matlogic","burt","afc") importFrom("graphics", "arrows", "axis", "barplot", "box", "lines", "par", "plot", "plot.new", "plot.window", "points", "text", "title") importFrom("stats", "biplot", "sd", "var") amap/inst/0000755000176200001440000000000012444343274012154 5ustar liggesusersamap/inst/po/0000755000176200001440000000000013275070562012572 5ustar liggesusersamap/inst/po/po/0000755000176200001440000000000013573775227013223 5ustar liggesusersamap/inst/po/po/fr/0000755000176200001440000000000013573775227013632 5ustar liggesusersamap/inst/po/po/fr/LC_MESSAGES/0000755000176200001440000000000013573775227015417 5ustar liggesusersamap/inst/po/po/fr/LC_MESSAGES/R-amap.mo0000644000176200001440000000373113573775227017075 0ustar liggesusers<& !+6M2 "<%T/z5S-4b+ .A'Fn>!(+#F,jA   'centers' must be a number or a matrix'iter.max' must be positiveCannot allocate memoryErrorInvalid length of membersMissing values in distance MatrixYou need to install Biobase package to use this objectambiguous clustering methodambiguous distance methoddid not converge inempty cluster: try a better set of initial centersinitial centers are not distinctinvalid clustering methodinvalid distance methodmore cluster centers than data pointsmore cluster centers than distinct data points.must have same number of columns in 'x' and 'centers'Project-Id-Version: amap 0.6 Report-Msgid-Bugs-To: antoinelucas@libertysurf.fr POT-Creation-Date: 2005-11-12 11:50 PO-Revision-Date: 2005-11-12 12:50+2 Last-Translator: Antoine Lucas Language-Team: francais MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 'centers' doit être un nombre ou une matrice'iter.max' doit être positiveProblème lors de l'allocation de m'emoire Erreurmembers a une longueure invalideValeurs manquantes dans la matrice de distanceVous devriez installer le package BioBase pour utiliser cet objetchoix du lien de classification ambiguechoix de distance ambigüePas de convergence enCluster vide: trouvez une meilleure initialisation des centresles centres ne sont pas distinctschoix du lien de classification invalidechoix de distance invalidetrop de centres: plus que de pointstrop de centres: plus que de points distinctil doit y avoir le même nombre de colonnes dans 'x' et 'centers'amap/inst/po/fr/0000755000176200001440000000000012444343274013201 5ustar liggesusersamap/inst/po/fr/LC_MESSAGES/0000755000176200001440000000000012444343274014766 5ustar liggesusersamap/inst/po/fr/LC_MESSAGES/R-amap.mo0000644000176200001440000000373110336326153016437 0ustar liggesusers<& !+6M2 "<%T/z5S-4b+ .A'Fn>!(+#F,jA   'centers' must be a number or a matrix'iter.max' must be positiveCannot allocate memoryErrorInvalid length of membersMissing values in distance MatrixYou need to install Biobase package to use this objectambiguous clustering methodambiguous distance methoddid not converge inempty cluster: try a better set of initial centersinitial centers are not distinctinvalid clustering methodinvalid distance methodmore cluster centers than data pointsmore cluster centers than distinct data points.must have same number of columns in 'x' and 'centers'Project-Id-Version: amap 0.6 Report-Msgid-Bugs-To: antoinelucas@libertysurf.fr POT-Creation-Date: 2005-11-12 11:50 PO-Revision-Date: 2005-11-12 12:50+2 Last-Translator: Antoine Lucas Language-Team: francais MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 'centers' doit être un nombre ou une matrice'iter.max' doit être positiveProblème lors de l'allocation de m'emoire Erreurmembers a une longueure invalideValeurs manquantes dans la matrice de distanceVous devriez installer le package BioBase pour utiliser cet objetchoix du lien de classification ambiguechoix de distance ambigüePas de convergence enCluster vide: trouvez une meilleure initialisation des centresles centres ne sont pas distinctschoix du lien de classification invalidechoix de distance invalidetrop de centres: plus que de pointstrop de centres: plus que de points distinctil doit y avoir le même nombre de colonnes dans 'x' et 'centers'amap/inst/doc/0000755000176200001440000000000013275070603012715 5ustar liggesusersamap/inst/doc/vignettes/0000755000176200001440000000000013573775227014744 5ustar liggesusersamap/inst/doc/vignettes/amap.bib0000644000176200001440000000175113573775227016344 0ustar liggesusers@ARTICLE{caussinu+ruiz, author = "H. Caussinus and S. Hakam and A. Ruiz-Gazen", title= "Projections rvlatrices contrles. Recherche d'individus atypiques", journal = "Revue de Statistique Applique", year = 2002, volume = 50, number=4, } @ARTICLE{caussinu+ruiz2, author = "H. Caussinus and M. Fekri and S. Hakam and A. Ruiz-Gazen", title = "A monitoring display of multivariate outliers", journal = "Computational Statistics and Data Analysis", year = 2003, volume = 44, month = "October", pages = "237-252", } @ARTICLE{mpetitjean, author = "M. Petitjean", title = "Agr\'egation des similarit\'es: une solution oubli\'ee.", journal = "RAIRO Oper. Res.", year = 2002, volume = 36, number=1, pages = "101-108", } @BOOK{R:writtingRExt, author = {R core}, title = {Writing R Extensions}, publisher = {Unknown}, year = 2007, address = {Unknown}, abstract = {covers how to create your own packages, write R help files, and the foreign language (C, C++, Fortran, ...) interfaces.} }amap/inst/doc/vignettes/amap.Rnw0000644000176200001440000000342213573775227016353 0ustar liggesusers% building this document: (in R) Sweave ("ctc.Rnw") \documentclass[a4paper]{article} \title{Amap Package} \author{Antoine Lucas} %\VignetteIndexEntry{Introduction to amap} %\VignettePackage{amap} \SweaveOpts{echo=FALSE} %\usepackage{a4wide} \begin{document} \maketitle \tableofcontents \section{Overview} {\tt Amap} package includes standard hierarchical clustering and k-means. We optimize implementation (with a parallelized hierarchical clustering) and allow the possibility of using different distances like Eulidean or Spearman (rank-based metric). We implement a principal component analysis (with robusts methods). \section{Usage} \subsection{Clustering} The standard way of building a hierarchical clustering: <>= library(amap) data(USArrests) h = hcluster(USArrests) plot(h) @ Or for the ``heatmap'': <>= heatmap(as.matrix(USArrests), hclustfun=hcluster, distfun=function(u){u}) @ On a multiprocessor computer: <>= h = hcluster(USArrests,nbproc=4) @ The K-means clustering: <>= Kmeans(USArrests,centers=3,method="correlation") @ \subsection{Robust tools} A robust variance computation: <>= data(lubisch) lubisch <- lubisch[,-c(1,8)] varrob(scale(lubisch),h=1) @ A robust principal component analysis: <>= p <- acpgen(lubisch,h1=1,h2=1/sqrt(2)) plot(p) @ Another robust pca: <>= p <- acprob(lubisch,h=4) plot(p) @ \section{See Also} Theses examples can be tested with command {\tt demo(amap)}.\\ \noindent All functions has got man pages, try {\tt help.start()}.\\ \noindent Robust tools has been published: \cite{caussinu+ruiz} and \cite{caussinu+ruiz2}. \bibliographystyle{plain} \bibliography{amap} \end{document} amap/inst/doc/amap.R0000644000176200001440000000265514705252655013776 0ustar liggesusers### R code from vignette source 'amap.Rnw' ################################################### ### code chunk number 1: amap.Rnw:32-36 ################################################### library(amap) data(USArrests) h = hcluster(USArrests) plot(h) ################################################### ### code chunk number 2: amap.Rnw:39-42 ################################################### heatmap(as.matrix(USArrests), hclustfun=hcluster, distfun=function(u){u}) ################################################### ### code chunk number 3: amap.Rnw:45-46 ################################################### h = hcluster(USArrests,nbproc=4) ################################################### ### code chunk number 4: amap.Rnw:49-50 ################################################### Kmeans(USArrests,centers=3,method="correlation") ################################################### ### code chunk number 5: amap.Rnw:56-59 ################################################### data(lubisch) lubisch <- lubisch[,-c(1,8)] varrob(scale(lubisch),h=1) ################################################### ### code chunk number 6: amap.Rnw:62-64 ################################################### p <- acpgen(lubisch,h1=1,h2=1/sqrt(2)) plot(p) ################################################### ### code chunk number 7: amap.Rnw:67-69 ################################################### p <- acprob(lubisch,h=4) plot(p) amap/inst/doc/amap.pdf0000644000176200001440000047712614705252655014357 0ustar liggesusers%PDF-1.5 % 3 0 obj << /Length 1006 /Filter /FlateDecode >> stream xVKo6W9Q l7HM=4=p%:Kz@:E{gHZ&>`K"p^}3"Z"GLTpHYMmIn:9 ?9#^hDiZ9C/4OyEUiUUIA |_Fh%EZ5yo;L,Ɨ^QH(\ԄB< L^uQzG>X9X1@ ByZx '5ǘ3QG$Rf27 p> Waj-)fft!ND]4N,4g O`A߇('9T Y̸I-؅JLBˑS7'7fvP]4XΔB\gzgo&Κժk4/ ;ЈQQ$oҪ*2[ٷrj^INhi¿<$=O@뤤: `@X+9y٣{~q4FnҰLPo뉣/c?uW~=zhݟڽ.j iQOp*f1 %Q0!^3 5);Mݶ+fh.O%d=z9i.П{if={8܄=wONoy,*_d#k ]Up+h?b2:nnѦAKU(VUC_|ƣLk=ԍށ'UBPJ+fE?6 OWfp4^v;ybȲlu2%N8؀LsI iL/%tV$/R, i! Wb'PX/TA'H\uhF-0_^':lՃyïh7v ^GZ)/rXzi endstream endobj 16 0 obj << /Length 983 /Filter /FlateDecode >> stream xڥVn6}W`1M)n. ]$~KHL̮,)"6^؁Ehg3̌!.֏ B9.3(Z-QL*u0Է&_~~r{EJJ2ܰW83vȑjN~;LVZwJvT+JRi]&vzЦObO;z͏[l],Kw55;9B*ԲUW%`  j=$%;%`l_6p@ Ó[n%<֝P7xH:ҸzQ!t;~NcsgW`aCj7Ѫ5)%(LH J'/A|P.xXv욈L3YVܨm[Fڼ"slTO)pkca>N]TxcnL#h_N@&9F6V|2-b*0;FZ>RFܳ]U {zh٨"I9_G%k"F|F⨄+&>b@$ 'c4*)>:o6=`wד&)rmiz Fm؂+qC#2bk5vGU~ԎPfڊ΀(4/Ѝk!Dg V Q'&%t7? ZqQ0O%gmؠq =KϬ+ouuԛxvs0o9.Si↰Ծ/uc(nvԵguߥj!2筋RUFU;0kN7qƺ_<Z9/g}㨂qYpP0s|)Yaȫ^c8C?X2qXE82pY7P8.؝ԝ>^W endstream endobj 28 0 obj << /Length1 727 /Length2 14053 /Length3 0 /Length 14647 /Filter /FlateDecode >> stream xmcpm-ܱ8yb۶m;Ol۶m6;ضmu3sfN?k^{]n:xibB :fXRRaG3 4(L&FXR3˜A#@*BGO2p6L-ayMI9q*@h t4(Y[d,N@J#?_M1SG; @T^E`hkى4_gY2,:XFF3hfa K/$mMl 7!(ѐ`4bm-ghPػ8v&@G[ @hk4/Ά!hk Y8YM,W|?s&v?iQ[c; [3?:/ﴂvTS:;Z$ k_-' d8:m-?:@cصe;c`˴ _ѢJiUeFmas+{|7x#"8 hpeJ3ڭwłpkp5,c9;@IZ)2^qhՅG[qJ]rL/x>u yHhX~. )yaEaVAÕyvmkٝg9->T Efoc߅] L[46<=jx jɟ7t56"+ `IUr o hɿmW,>.\Dm˩k сΓyqTc Qf[o~5.`2|<F[hfK50ӾV&P}d1Yuc9Gx~P N/J[y*_"܍HУW5LM!dKB$T\b5Ç%꼽eXÇ45߷lgŽ4o˅Ųc3@۲Of\p5#rI6OVw W ~fn]qs>,wb/nfB5͏ F` 0!,"shoʎY_p}:*E e$WIn])?,2,k1px请vW$jؗ 5d߿6 :gw*D>|x^\ ̔5@)t@K-a?8_ةLYcP3yO7:ه pDD 劢[ 7l:>p)p;% G:Բ?؂ֶZT7)"kHH4O[řz]Q3˻s?Tc[ak}XR|](2GcJ ^BI{܂_ %zģ+XmS㨪꿙Ϋ6axNǐs]?QLSp]< +hCn!?;ش2̎ nj|eגehmqcD@n]Zp  W+ SrDB0,.Ѽt lWC8>|(aZD_;ED1 Ō_T䇈q2G"/M0`_G}g֒z&2iJ"Ay K635*# Mbnaҝ*'ï*Z9vwKhm1X|VxlBP=~& IݜXjа2ƻzKp! ؒ!S67bs'L]mzE=ڒ<8pڇ-0+%|MO6/ sKm!,xvx![a ilW bi1CA6]V !7H(sP1!@gj% *HkՉ. 2I9Vm#Ig 9 ױL#D^Ji UX۾΢ p@}rû`zaΚVbdh U{,7bz>Kt1跈 :yG56~vo?(bGYNˈc : IBdMoY$&wXrޝ%e6)j4CI DQ`q}Zj*S*Q"@B=M!l|P/5qޅ)T+$rDl3P9Mr#q/'6tzȌS fZS_͕ysf`8F}e7IENT2Ҡǐ/^&tGb - h=Zw#ӺWو8Tl^6+LVpeB!ӷ />ܛ"8cJ5l'%{A~:C[ϗo3ܺPkж鉒r™Oىy^pD<-unh5W_PS#n Ff ^iNhYW:  !-1b1 +0?bv*ztn uJnxJhh xNuƫ~ŝˣ h||=1_>Ɣ$[%sRE+hPkuƯhڄ5/hnʘqL q2DFxj_]<젆vUzݭK$b]GMOPl%D6,2b|6PY}].iAK#6WG׻&d\yYmCP6c*~bOE.N&ȡAbs C= ߓk7Re,%16C1Sٿz1ky0hGLf'W)YJ-$T8օIq<{A$z@sW%6o0E->Js)v>`t2&dx&^ңֹsG9]1 ]3zVQmZZg]C*nP`]ZJ߼;;3R3\\ݷ{"IN(znFQ%-OfЇt"GBdkA*[a:eX`e4,URB;ԝ&\=!6{X\pe塥\|I=N RՊ͍pׁ'JigUMZst/2lZ*-{HX4uJ"߅1$T,[n0,ފ5TPt֧:@4mYH@S_x-wRUFm'mb-T4鬏ppmH|E,kI=L`fK^`7A1]{-\]F<۔oLL$;t8 Q!T:Dh6Yh2ؗnaiq5sI{HB]-6ųGlx0ԍ */?56Y@70Ds)n2uuvx{A UY2v-˦-Q&Z~^Y7{e3@(.Y{P4FrP7+iL;_n=s-\Y2/!:6k啹t9&LkhG$$~lӐL5C`@n/K:f/|-*F-1Ev(vKKD/Rbm%DvN48@E"̡ qmӃey/9 `7Bu!?S{i#3g~@pK[<)nUCcn__ nOnDtyr_qY~ۏzSqѾ3@OZTYjscP\G`8F%RW )lV1dUFjfMQ[Q)EF܂?Vc,g:& [qVbkm[|dm87*XI50ޣhgJ{V앛*Q(ID78$9RiC))OԳfStbskr6'l"1P n?-mBj *OiPMf4P}sav(} C!A6I1^I`r"U(U~ĻH}sCճ*jrņYwU(Yh8`*F@W)'SH6[ӝ HC[t&twTu /V%ddhrRtpE9k1Q[BAd_w}Dޕ3}3OL! t@4AѻM^)v*1,Sh[`%bs\d]̯˔n]{l6 }^|)Nɀb 2u3ȭ0"qgp?,)Pys1Lq&~x˗/i7i&9Cx5n6Qf̴s!&6G/U(MwWkDGk3ԼMo;, '6UCo*Q6Y!J H3̿*zЙǓ Qh⦤yZHȗ_/wxC]( dݳscW?xu!{͖erl*HsI+XVٝX RR9tGB1Ys Y`CU nRA\)& nqg& i8C:`mCŏ^m6B!8>qB =+x%vt^˼*^2%&T6I53ڨA:?]T`u'?RЖ@؃UH+DPފehk~gwI[R$6qrNўcB܇g׬N?.g~2|1\Hk(v#/E~5wxZ]=*eSۋlz-\wL׮HMjӋ@_4y;yq.hm#bhqPti a]1<' 3%Ǐb? .AX/@XdNO2pQg*ZbZ 3&j@߇|,H 绵7΄i)m kj1ɦQaкt{: ҽޜ+`=ơD>}"x߭Uie%%[k )gUQaLB G>+cti0n&%k]%M %cDρ%1}VF`Kst_8hz:Sl^pŻq: mV\tP jdT=5:&]9+nFpI23/*q18S#X5ؖBoRTk}m O:YܰK>iiGȚgj,'t4+IG:HE]Ujks%ȇ6[NQd5wokpաxL!S[dǃt`voAH3rFqUڌȉ֜c՝|{E飿u{NYn219ل$9Ia*=i7IhFH81|(T1Ͳ{ YDx[@M}BŶizN YQQNjĘ.PAoKz@KRi g.6|i#nУF8meE13ͳV0 rvsk3G3[V׫VDV9!\>_BDha4Տ!c^lT8hK\TL}2hǔ2ٯxr`* 3N-sͣF<ࢹ toD0Y4/̐SF ұ;UeKKXlOq'4 A -!db@n\#F \# Ec+VXЁF6jK砕=#-#S-meĩN<GU/ap= ጓ>ɨTTʢRb`I ndl'&3~hEYlƴYS9Z{Xe{2844'Hh61)igqӍ`[0zrL7'fԲ7ߣ8Dh&-۶Hw՜ eۋ0ָo.>R۽<%JlBdM3n:$#Cdz׈v9Xь#[(9U0bi 6A>LW ]X %3?uiܶ-g e҂4c 4f? Z-;qƠQ>~AC4̯L `Y zγ|Vԝ}}Eⳑ}nTp6M'wK/m冤Cy{&ؽi!buB]\#9zd? 68.- l2"V4/Ģ̮Vd>H)Vj?%BM"3;jK|j}T1&Qb:VN)܇a?gȒ{}їuK`d0N)޿σDs5 \75CZHtr0:!@+vlxV^0+Yu-ǭjъP!DPL\~ aKrZBG 6') 2#.0)blj@QIoeI|&ٜ&?T`Y}!gŃ4.1r]W?"`X5h+8'%/6YƘOա.M~SK#ӏ_dbN1Sk2U]/qfq[Tiآup> ӏ.cgQI ,Xnb6#\p̽7+k/wQCI+1̩/_KtܣK*rn~0X .Rv*~n坜 t7 [#khFhVlQ rՖIeE$r%߷|<n8wUA$Ο57=h:"㜵vH!$p>ɢ)JAg\ԥ=Z^m{,~Fal}b{/Q:RU*$Rcy*B|ЊH@̲k_h;&g/NEJZJp~Pګs>噓7 ΦO3ޏxu5]P] 0̸Ք _(T`9*g~RQmv^ |%[0X [§VryC ºf+da_NŦٵ;+pb?;u} qoYa6iLWjVӕ5vZŎC( W l eG[6\sf$v7߇sZrBaD $ P"S,H}~-cM:RP ~62P XOz;?_NDGt]fcAl,:YUg+ؗekMڛbC3iĨ5)tV4q‰8{hAc9̥B`^";pNrj6cEC}D#-ٞ.zY*=5GfGcꎂ˶5{M,bRVcҋn]q iU4\icԤtt>ⅸ ft[0,!|c[/Ewy5 %4%&7-+/@*RW^@sB˵1Qi߫VW:wǷ3x y\bi1~ܺ.spә67$JM];5v?@]ݻmx֧;3CVuUxėx`93KYu{S\ٛNPN5@蟳o7ׄ$/y~5RIwX& Q-*e)Jv%VhABb!Fr5 19Ga7W2ܻv̔f ! k=ѿhBsztꓘ_-1254P~F"1 kxډ'DG,ȈT8F`XjBzoN$ QoMѨFLӥXT`I$,.:8iu\ѳaٷsQ|TFOF2i?v_O{=+jK&Wy\ 5.X4doCNDcH|2\5֖#m@@~}kHiv љǠ))qd+9-ȱ)%4jlx]=$Q a/uv[#aχ:l7#ULqJПBX*D -h ]:F~C-b AmFK$[W+;<^_Vd:ܝL Or" $=_kL!( †Pʌ#z-d.v˩^O!}1ڇ.ٻNYM]x;y޳WY:r!y4L 8A LWZ$ HQ 員fuv\eHtg[HDe!yd|^2+@ye7 nL 2xo`fwByz_.f[j<2ـu&VЮi[m<o30>1|1pf y+g)PrВH񟘲|U%(Xϥ2`:E~_Im(̔TES|}6 Ǧ2ɹz[X^_̅W!)CV6 ;aT~K""7 ɻ*|i9zE`nsHi$86ecA9˗b*T^OT59>v_ѓ;\u;?(υf٘QN*j'v J/v<+r:5ΒC֋zdyz7u.) pWS-G,gB6%d2,Ev:.,2wT 1oD-wE#] ֨BS{ L&=TvRrF O,wg< 7Fo`C*2K"NME NV0*n hVMa6 #CH]$I16>5̰2_Y4HC(hOseD|M *s,=dTi!kq`*C'n#7ҷ1n@0ZZzi48iJrqI~V|b"@J=N3KGPA'K QjgIM#SPݥ ^9m13YК)֢˥[hC*\Q'eɔ3z{/9ZWZx5vqɏnrL;JTO={;)q7!hEWG%i# G 3]\W<:Mhiu0% z4.fD%p [5lA\PC ߍh2 # bŅhSIoh=zb_cV0;ITr47 SF U:=J>̵5xtڂح!IH Xn|1fM%6/$5]Ic|UȽP`)\`wpҲf kQ:wNQ^-R8sKʾ_I=B"Pd}7dN. B0y:ӓS@a\E0*ŋke /H^1cE|]1anV&|w]>Y>v ӞPv{׉I1BpF|hp۵^_ĥv=XU#ڎ-!%h͏]).Ep:)5)SsTrd;$aP{^S,TTLR6 My,}.9hҡ2HD y~ǚi}HDZr:Gb{7 N{ e "fnXz@@4{kg9wRuZj5130?|41T5G>ݒӯ>(`p'%UsRe6F/䮿TѼIS?)3R$;+5dW ]j򂼉fP0{s}2=U=~;WAF\׮CêfRCMgk>bKKd|gS/5/(͜i)Pd^/vCjA}נ+N6 )ObP= a`Wkcm/`^Z&p73ЄfÏ * &NDD~N*kAgKO|*EhV ɝ_Q JmlxWx:-@-|B.K~a\8]YD+ Q01O:k<TE=;},GacDJ+{P0GǿpvZ<]b%%z3fg hZ)f }oa\ah8~PÚ Ißw(jKo_1HsUyaH~+pz?hŁDT䵠h`chpFˊt N%/*I37]7Q`)ܐUS~IńVA"ˆ~ۿX&J>ԒCv[2~O3%Tz,}X6Eic#Hko<~<)j6@%PC}ɾ;vY1gĊ/]QKKT8)H RKU{ܵD-/~v z/rq,oԒ\;S8 jWN^az{Xn$ U%"c,(mc8Z2Ԃ%04PMLM0a#MŴǏ՟ʶ@Ti:\Zv7z0:<ΫZrcޗNcahriF MJ5ҧ!2>{l{ Z+_kOUdQP]_?4b#x@@Vt̩]Gj2St%da pRn@]]MGϒ>\aMTasmLhZuľuwakY}%+yr>C 12U?(ߡų8 wA9!@Qcậ1UH:X> stream xmucpݶu:ޱ:mwvضӱѱm۷{9֟1k9|CA"aoLPbfab01SP:]Č]<M@`f0SD;YYXMi 4@fVVN {%MٕUo*p̭@@$ZRA :J& +S)H0wL̬əvn@'̝mj jqQF5Q@No3_4_}2߬ǿ-xff hae/ݤ:'_~̀Ѯ -@-jotۛ" 3 hizYU!+g +_͍A' ѿW3{;mw1Jhhw IۙڛYYT]*jd?i%cg8Yyt$L:%"bI ge0spYؽNN@;d; //؛Y4OAMO-0YE43;ǿQuG$7:۔i$5"G/?YT(=׆\@jٚ*@^_,(юedT8I2GBn-IS| w*Q-]>m/n!^RZ*w ݹ3n>Ye `nk͜(()CJN<~~RJO{}Y4k pL!}'㾃@!v<fj!J1M3pP~;>sҮn&DdCVlӬE%QJɌZH]dT? />a)UGM4D7'<1 J ָJɌRSA-oj;> *v]Vd9~YE w G IQN "v$0fE#,TFt~ϻ찍y\ 5 lt3դ{6Ƨiۻ }2bWޅNJݩ*IIdf͋˹>Buܭ"sU.B+ a(Uހ8E$!_(wdgISO Hw=Zv6A ^|/~c|zG)ʟ?'h7Y\p e6th !CEP J")Kl;EO&#|Y.JB4zFէd.s :yRY%ej^dҶa*JxgJѐ~mՃr$%yd8_%ܷuFN`-RWg(^DF!ĖXHNmثjs:|c~h:B yM_؝*vaنeyn'ЪO—&^'J .q4~x#%@czD4_-^#v6aFIVU@m\qx9rϗVª;IK8:"TuIh:!58]5˲3nrNz"ۖe(ER>uYi Q/VF:W~t/&q J 6ǵIqA0DWAZZl}Ք-ޫϯ8uK>jC4?J K~6Y0>)H?oDFMK(JUS7_!7_~.W;f,jQ2VZ5XhPKz勡W)P@>>#)˞suZ-Hq!m,ި~IHHUn1sn;suF|)b]W.i^5y&E\pPWԴ<ѭ!'Q, L ,Ra$`I a "|GXPFG* Ih0Q=`&  aژ")0zMr+*$)%Qht2\9 ш Ftnҋ2[bVo) 4u $~3{kONL mK6q=r݇wig|ۤ+0\AǑiT Ez4x~r5˦ՃiKW1fqC6$q gG֩p#&:IWZ.2]$$TO,ARtsYCAQ&:->ٿ+#Z%TXÙ]Ŝ׊AVG)Ѿ|Gx8-ӯ=,J#UNT\sod }H|iPQ^!(B[lO 1(綋*lJgitY&3=Ì]NvNj3O3!yP.LߧgLU~p6x QdØ>JvuYIF:_P] Ob?r !DM>.Zԛ Al3ݠtm6D0u:jWsZ\ÓFA -έ'qhxϼJ49%vmfz _aFz%+GOS[yz>D|[K%mP/֢Sj 5[B⽬zߠK-_a4 UveTOPou:16F2dq,C;730']+(3U=# @޼;=ګu͇$TB .SXѢ/mS |CׁƔ.&!N5ak= j=ѮLPA6E&b؅9@K0x6V i1bڶw_n=a1,K`}0@4Ep{E!q''o*|K|0N̷Fw OqM]zyXW׸9*dKlXɟ)ߍ8]D!tl vV)x;AUxo'SUoeEjIEǏaNG+ _e|ZGCvL Xꊤjx!Ŷ3VGW ( &4C($0։V}ߟ˼LՀa MH7;t=^㕽Gp֎3H;9̔-NJ#QbhvC[ YXӫF}pm3 7l.=aB=γwժFB*_XC)شW(z:d #ߞNuHA2wUjs! 6Z2z,ז%r@IA{Ax|h, *bhOꂇ2Rƣ{z|BBfJ0Ym|%L0KX<=ER<͢ӹo~-!ol7c&(:[}kap  XDk?5?-AGnHǩPݜ &pXs1?lAjW(@6p |ZP*[kt:owr<~TxbUrD{;frmNs=LִZ9eR&gm S MVڅwVV!V2imC]kpJAlE> -J*nI=n"2`N ܮ S/)R)_][:E3n4ENQX[xS[VژFݾWkpz#^@ {JG+ G:)\sN̛%grOhjLtc[n6u&HI =Q)6Mѻ/zL;<\}K+ZeZy1q<ՒJ.}W˸J%T Uy$ (\٬x*ѝ׺Jhy'v֓g O,# e}ocF[LwV>a):?y@B~G[~XPghT6kAwNnơa?QR-~k>Y:P6ACRd=0$.f5q xȭs)7inT >&&e8AFInM[o]Mm\&UR&$:-"bJlJ8UwH,})YkT'PSLb$_-5Dk\EC$U=Yr809a7dyc?$ NTt[m[jҢ8(i>BoHp -nA+Dw|Қ |ޘjQܽMV :#@3RB=9YYrE*k2~ e(N1{Sw3O6:V#'AVJFy+ִdݸ&o <ڿt? 2fYv$-kta^{Jz8$J)vC>Pq=S1xxbry=e[K`I4Z]&J8ohZiZ2ݙ(" ^y|+g!0KW)uܔenk9Ǿ!U{#IuW.\i |dݮv+5( |鍅eǛP74᪶eMZ"6uV86׏ybm4REjzxYgxjjd M.I|E A]>!VcfdY%HG+lI*1!zDv"lO$_AaHR|-HHTt@Z~Bmf=EXopfL~YbW 73$5lj4}Cm&̘~"4a ޾qj#q3,k9O"vvd`$_W7s?bkBwlhSNiǃ(6ڸ=r~~5$0fhm5Î3y0f[y6!_wN)Uߒ*&w_GRcw!_57HZZ47ֿ"s&Ɠzá 'ɛޛˋV;ׁ ,p;$ڂDŽ5@NS3>l ^-0Z[#rJ5>lfQ˒ >ȡӳY' IR87;=?i Ehzk6&ȧfSjN*Q$JkLZ|ڪ؜gNxFÖUmD%S8rq|J=6Y`W<(޾( ˏ-2o̫&qq \>9oJԻGpCzIzHӃSt-cKJWdPNRV8bksqM"7o`\IM?l(Qwl WRu֑ XgABb }jP(6+ NRGQU ! `Klsȥ/|k i O3SiS<Cf?Er ݉M{PPC~\޿їklpzv' \w쏴]%kШ49޲ƀ6F}DSʑ vdž2ކO8t"8$1 a@:"4!NM98q~sdH+Q\#Cnҋmזfq A %f^L~0Xq~rn.?8 &@ ;Cā]P/8Zt7>C% iYWprbh<͙Uj>os;'<]y٥1%iW U4@xBY ٶO9x %߅ނSA֛TjT_Ʃ/v$үy'c .maKՆ(8v 6@/pzj"LO'܇anRFry;$\EdJTϽTqSyY+YdbB]CELGqՄ&acB]x-Eox7K}OY+xg@Փ퍘Jv"BBp0DtJZsr0ʖz2Q&ѻfEYiUq̜maGGb.BIU?(CGIAˢgL.d$(Br$Wzq+uDr?hVœe+-LL,GJ+x. glrN]+Jlug;E?52x!u}+xu[qtEr']!zY|G ߟbzPei$F1x3z LZr/ΌN 4_0#¨ ~uH T,.I}/g ?s Ea5:AxA )L!gرHJD8~B$bt{!r1~vЁ{b)<_ة s~iN̕v{C9y|-泯ر"&pTRv&>-mGG)>ݢwuWhej]l =5W+vXL[!j]EDS%i&Q>}dnBhѰeZ y:t[ӧ!ZBe9/kA[?`Q37)=T{w")C['(gQVIaڣ=5a\/(7W[B {$]|დ\^*;s%5థ6af cbs0Q -5 Bb֟'k1f[Zq=왩M|NG a@Q4am蒖tA\1r-G,6"ǣ' \w  |!f%Sta ;J|B|n%Mw?ĎKL}au0KD^loXNR"R Tiǃ2l-qUyv =3X!M+\eT7tR&~>ڣG@q(-bnG3L7 3$vZ՗ei(55* \ sL pئڟWQV2DCȯK8 W=w³ʰc"z=N0t5xT!H3DTiyz5L&9uZJ"n4)KƮ!q|{kB1ݽ><(*Z>%ԗR(rS˿F|Dh#0$"_ˡ>yvU^TP K4l" n]RC+̛qu"ZZl[ ]~*2@Jzd!7+fۏrR'j &gGKf(@}|@0RnTNϱ"J60q{.@8#].TOGjAv߳aI ~etef`cǨך$soGъ>+$M>"_,j:0dfSA0޺/f vL!VZ56'TG{Aσ U7OawĚ wj@JHM[AvJLP\ٴz/hjpQ]h{vZV"<:Wn˧?cQE[wn=w a`f!}2ubչ]Lh~nA `(͌M &7zAj[AG/ 0>l'wLV3 ħ]/G5j ,,>`/\N#Ny ,b=:/`&\2ϪxFS-Q&Xt.q]T_Nx|נ^:/OYxe's 3nk<K s|jILqyjTQbJo (b#оiwMD 51U;yxc&H0eMpMud*g]$ëքڣY0@X<\}gvUzzޅsKW;ލAW~?cyX:qٚ;ئTuKh(S֎RC`E΍vRh#ӀY1X5#o_ҥ^N;*V>IY@|3Z?y7<lKk#7\ w>y>jJp?JQx>"xA{h"Y˫gL{.;2g߃[tfcq-0v8`ŏsLV/}9&b6:9V ?`(cqM,EF[$RH 庖ۦ]cWDK&ˣ^;2ZXnlMhY/7vY\@wi9YTp=X"qCUܸsWlmŴMӕM>ixi`|om^ّGyQé\n?5a&#&Ljk07f&^~jZa/_臨ϛKYdS%g_o nrZeD`= endstream endobj 32 0 obj << /Length1 727 /Length2 12689 /Length3 0 /Length 13273 /Filter /FlateDecode >> stream xmwsp߲vlLdǶ3m{Ƕ'ضLlMlN8Ķ{έ[Og%#&)`abGsZ9؋Z @ `e"P,,]4f&@+;+g%]ōMop̭lA1%eE)@ dr6(Z@. Z3 h/L.Lw9f`PPTRTH1Ly.|Af_俒z=-1!VfS=x7wp ts/ 49AlmM@1;G7W3@r: { h;+[oDȸC/,6YHZyV)77K?v-п_mf)ǬI1-aoe_?neg+:[yX5$,#o#+ e?pܜA 2CX]v0b_h~vX~|eRjhwD4h*}[榙$t01MnTB^i_Rnxgguq횱L7ɑ{R+T&&m\e+5z}͗Y SUmKJ<#Š@ǽ0?w&'l!v`#o&BܑNYZeMک9lz28kgZ 6K qMkrC3 ޯ<b?W:\< ' oJ3YNJGd֎wOʐ_'n-Vl!=zLAx"Qqep=E_p%w- [իCJeXWZkW>VJ3#Q2z`,BRb)lp'\/DR[QOJRVɴ<5A[v;_JĐ-1mYryqo0^؅-[{ZΜlT.]Yn3\VΛr8f5 Hq\se W(psSO+JF8Kғ =;}NH#*|if{1MRIKy%sS?K;#hw' yT+7|`P;^O≯= ?(Y!ag/DrB'6Ù>2zg@-kea8A]b}96zn,i\x':yȠᷩ#ffÅ߃Qs^"C8:7yoB%-wLA28+7-e5ɪLŀu9^h ln{ BChs1aY}$E1{)<, ؚ89rvǘ#zvç̿L0'M8}~Y1Ye^Ƶk q$C,9pGבLXq-Y[e6Y3 r?"#'YS-[yȅj/\ޅe=M(ݬ:ۣt6]FښÓa}Fj-_MO0ep} f5hbgbG[Ht"=1Ox*›#"V4Uɡ)KY6lGZې'DҨnv1b'ofGVqZ8?Bߔu ?sռyO\xʃ6^xJtl|dTc/Xrq(K~ 쏒}:-)9z[KA,?BXx ?K0@9+s˭vsI9d:rԚKNx r<ĎԹd *j "q,C(oPLJYˌtJnD8ScS_m@\&7KbTtONW_~Ig`JlPÍK{ (vw[l}'/ *+Wdp؝o䯊@<;bC!Zi5Dl)G)`%enY!,PtTI`9X|0% ɬ}!ITDU_$MfwT6602>Ч2]Α0^!+x8wl:B%[B u4(N&<_:N. ,JKk|gEFT 7 x[/+ R0Ր<ъYH J=Cc/wÅ?JTaez.褧 1-~ E@0tFQ]u׋D D`UW (&)>Z0JЗLRƊnږ whۨfIZ?G6CZYּYzjm4vAKpzzmNh^gN)d Qfϓj*m 6IBYj1IQٍM$%%ݬ˦sɣ䢴QB̎8N nSz˹-/"7㵑ږ3gMw;*2 9W]"Sr̫r}#[eZEg CgPxR+{Fhi*7LWNpщb9Vn~[܉*[:GnS&q]i/0ɷ"`67UQFw*f[7er+qs΁UxJ(M!dcmzK|w("9Q7BYso׬j \ߒq<+^v7^.tǘJ3'4FzMKTmϻNA9&Wiij S;-;sa@E[l$ gSw&%L-|֫{j |AnSUGMMο'WJy¢Jn? R߾JHz50Z]$"z+be;%Ek!Ĥ8[֭ܡPFWǼ-ĩ}fۂ&_14&ҋጫ E.<{YyЈ ʑu;Rrr6Omszi Io9wIGt^]jSD"EW6EuEh nш}:yN/]}WVNg?:bs[}Pv3ބpі%a/1]K2^"bv)S2(nBPߖN<_5얜8l >U+ {ɶ+з G3Du\l)BzwKT:*|&R-rZKtN'!Zg#}FqDŽ tpQc{@HFNxVC}:pr+!_W%pl<֕?3~̉%Xr@C$)JM ȅXyjC*RY]2pۆ1L.. `9l~7(h6 u%>ev-;%yAcf$B(ݸ7(T6)-U0YS %QuaMt?xj>ItJμڜ*Aǣ4T$Ytl;3jiɾ&h>ynM{SRD,Yr+doUUuTjro8rc=[۵e!=g :/G;"{i!yUHntI gh;Q=Q/ya* R;g ՛Q:$%eDTtup]~لd.}@Pɪĵ>p\DX_ .]_ Fךq3D.0g0` ϺFbvI~3E8`ѫ<2ni\$ &N҆l>`-USSWK[R~ek?{G'(-+w0tcnM@vQ26~޴ܺ%"ϝsG{AGǦ"gn`9RQ1  V  yf6#e>HH4!)4B) ^Kݷee!@j.ZVK9Ú)E\Á*+y^U v稟5´O$fa<h2w j.bWIaGr 3Ӻk罕.ׂnhђx? %zSBZw}MI0I.i?8[ :no(h2lG )l+m^޼Ҝ/G)RTE9~jh׌Iidobͩ_״Z+A @r$)k(2M[ZmVa2r c~wVU\u`1W/7K܏?9 I~A^;>tP0pE= vnĴD%byn}_,)鋸V[jΔlێ57DpFx*;paӑSD,fu[5@dwT^ۿmcBzvY>1ôq5_@PVzA0rͬVf`ةkV/ʨb+l7?Vϝ\M=?c~2ə$ōK hrk9խ_LN ~%\Gx:KN6CX濳&5, f0B1Y hCH67N:vqJіhν* bKkuuaBi%KN``9uF-tdJp҃INP(ROP& HG^ttִ3ONЯOyƺ XqQ+ 5½n`Fp'WreΤ7hmzꦏ"_,4f||tk)cػ=#O@2=KaǨ:=~(fV0ybD8|܋'iE\ ]"ݜ .̙ȒY23ǽ&SDnCX^a'"h}@Ŕ:I˨OE E)Է1 6U-"@߸H)N4W5ɌL5XZ)"?C.A: *5 ^T˃On2[P1]J9-E*OseIA"W=3<$srG^4Gm8 '4YMl#iwazot/ bEz?czQi8_$x6kpWwIy_i7?} 54w?ޥx'B(t 1Ƴd~؈!鶕>'nr5]FIrGΤk ٗ8ˎX ;rK5ﮉS}GP7¦L3q%%?=q:dS^VQ U2wۑm3fdAtya/zk&  N2_)r* =bi`iů[}ʡ?}sxtOY ]Ԃ|x'wПnэՑ1'ō00(yqcG=N1-JQʌ RGh"CMY4jN\zdcX[CػK:U`+^p鹇2?82wq~Jsש ]:g.WŶqq0?\'?`i7U-0(^51 <"BNC2{3&p:9!n%'.f§Ӵ+r؛LNM|p >B#!(-4d-Z1FBYR̝NE072@`[ &69;'%4>%Oc6(k_4L\?Qt0+%XVg5s΄-ȷ qP{h,ŌjtXXgU/kE"dVJ(⃰y딀>Ca>^GwX;iFV$CW NdWqXSVq4-޺MnOmiB>~D9x(?RaybagMzxH/(os,zrUG/ 1CZL[Iw8>Ȟ8$qeIܓ;uY; $:HY" @U\/W#.; US}c ̧nYSiV3O~}>ӧMN" Zۥ?f(%T1HLc`q@Lȸq+K)cG]zZY(%tȺD j'.rg.)2a$^s#{9Hu 7+'Q^%mh~ULLϕa`_|;1d}4hZp 9QWS9Ȝ/`#VnvK;vV(B ,>n{!JpvYN *`lTd4ࡺ[3?sDY"\LΰjqLw9a(./̭c 4)%n+ep Y 6Z_a!߰t/X6ɦY/R_tp`).| JUH_8_.tV#zsJ(o[;WA-_%dT""АB'fUn, Vf,tIF tqn94o튭y֚;B0-CW0=b` ϭ2U|Rj<9l0BʟKheKY׻[tZ$:SYkq][hy*δ~]^UɃ%l帽-wBRzmgJF9?[ ^1ol+#}x{(^Gg?趪=R)U&QщYnW!zYJʆ׈,ۉ嚃5#m)0 q8?@Tmk׬k~d j JCۛyTDE2PG"8#| +ө2Lړ7VF. ˬLW~ I$+;Mh>JK#7 p"o.sry93vd"s[J_~:. &Pp =>bhej]1v7h"%+Wbt!`41@>ί 7y݂6 jSCѬ-x@ܙQfAOȋ0#>vN@CvJe&}b1"DnͧiFK>ԏKvJUͮDqNE~ egwj ,|l7M$6@1m6ݪ&6DqGcS (yǯFJ$r`?MZV#/@@~ #.z4_Drb:%t 9*Ifd ֑y{7?m endstream endobj 34 0 obj << /Length1 721 /Length2 20980 /Length3 0 /Length 21551 /Filter /FlateDecode >> stream xlcfݲ-\m۶m|ʶ]].۶m۶{}N/Xr&b"\XELC*"D CN.0r3riLTD,D,LL0DN.DT&ViZZ:iػYX񹹹 928 0 X,mDJZ DT D; @MdfDd093`fNDj jD⢌jDFvDrR۹8 0qWvtD.#d?X<#103Z-`ś=Ԧ5|DTpHMd 0F@D%jop"78zHÃ?1Ge,a0Utj3#(^L-]m*jjog'o(%#'L? o9?\9f%#tw-odA`_IDD=陹YY9Y9}&NN;O.] oUZr? pڹCuf[d! Sk5|w8@c]f8T4q  sp0 l[ۈ{ %)܋R~dIhò~p';Ij=JL8m5ҜEh፱:HX~-9EayQփ+殱]{1qXwqRr%4ƒ&,C_Wϳqּ2ҵJlH3N%$˝AT[Hv]l e-IL)*fcH- yWaU`4ĘTҪ_z|]NSmd3P5샩Dq 8mYwaO9(u4Yf2j"uqX԰jy͗7"b2lys':QEvf>WdqlXym7=,!dp<^knW."Xi!\pOs%:>eԣ5ĆHq^gIJ \CA3$mNe^g_"ӭ`!ڸv}\7Iu&-= ʩKio֎Rg;/YRy$ҁ%6NsB'7"䪉Ԥ#AQzd }B?^*g{:}lxC,B#b`_ Ӿm.Ei̟q>bDV #('4{fQ ؆bZ Mk;cǟ=(>OpdP`J#2 ᐨߙ7N^7` M3GFɞ~0EШAZ_LE7柹[$}m0`/a\J^B [u-[`hurV 1_Wq"3Tn,^Ag}3>\Фezw9T~Q9̺SE@"lo/[/0b̧0-O0Caqta.Xs}n{I&&z_tI+EN FʍWs'tTKl[dz:h1ub!l$Xoظ#r&ČR)eYFn1e 2պu$WV{K I{x5%O TT~0U1"߁?qnV W/GـT^k?kmhcZBݶ0J,X*@_(>SgBф.?[enaSȼI0oZ>&HwaJY{![),Wh=TCėI8p5Qo96 e>XkExڳ(@Rv+}{mP"V0#?ʠJ9[PQG̔׳\=/ m\M[e<ОU_dJ`f,TF:V-]3[%"+"āsn/)Vg[+RG鹤M?PhpI=_*a;. KmK2O i$ )4AQx:n:F$'RYyocC)pfWmf\괔?tӢNL ws?EϨ֧P8*˹CN`[\h v*WX54-c^KuDכ &Pt̞,+!Votف7 |OW{}ܐQZk}~W/x懾X2BGN>?CzR"^7B# HB^ZŞQ4)Z1'F+' fs QrugSxĜ`S4 S& l1 ׀+&'^K"!GY˞^>iIQT&{0Ur3d[?g8%EO."c5FAWE_ C6M5זڷNѴ|073YUxx, @ m'bP nxS3xZ!3jǷl N&Dboz?uѯbȏWOyՒAU!~`L#D%tc\`zHi1#B i[M^To8+˂ zTj_j:QM Q†iɐ1\o?͎0KEާC}$QegMmKfȭ5*=byS|dζ6n_}CT-{?l=qE`>ExzKn/@69Eg>=?ujR.6ӷ#!~~FEڤNYV6LS3;UF NA[p,i*^Qۯ;3?NG+)M.X*tX2E#OG8<T a`}-TFѻ>'X!L_H #%H7qpg  "%42egT׌˸d~ݱO 4l:4]i~=ia޳Z+X1.}Uc_o9L8*1NU ;i:)Z+`@a_/ yԚMOCs_/YkkrSz6oi0.+k?Ұ+YK{Mbel]aph، )Mt1)pUyN; Wi v[5N@{DמA. 7>JX\~(IH]5-젘龗\{\{y-EwIu͛e$,lQ ~9)!)D1L_IlNyK㖄S7j1/!xp0=lgǶӋog.pg*mSP;zpt 9S UM tT 颊f=T}Ml>ɒApT1_T7 oIMsZ1)@31:EȿA sYL86{ڸ{^4@Y{`Hj *Knk*= }ABP\Cgc8F c>FTTs؜}^7p4ѿW2g' upzd[ H_ifHvݼZCZ 􂕎fVˉHCVN(;%dZ>L{ߞ eqk#a "[&}kz_x1P袉+E ?zuK 3{\v֚fˢzfeB;GK8zUpV$G$䬷$Ȟ˭=#mKǭZsMCջRj9(Ziy(p^rFHFn\is!ckƢ[ pX_ӟcԋg0&&&fet%w-%J29i. wʱfx)_2-,/.Тhc_3Z\0M Sq㣟HfA*)or|0>@u"^&Pnߎ,&&hcuTn:7e "u͋W_҉j(%c{_ꊏb ? pqhHH|BtTb4VN)`~ა`Uo76:>ڑF$:vz蘊[wUp:!E3m`FLL-XIe?Pi鲥h: Qto턾-iؿ^h~i!^fypfm4`ϲ,Kö5WxnSݰsLiXvx2iTj2JBkx.(g+",=&͟g]"Pn5]GN ֎du!&Z1gR:/y[hI};/C)ZtȂ"^:\.L^$Qb# x6#BSa=fM$ Ck2BN%8+9BWL/I1Yuطi]U QtY!c{zB?#(h;FACe*ءz4 ?\O~qMK?uȮ!dDշY.ЄWk`2@'qxږP)D$[:MʮǦblsXHOpz4'=efoP^ H|[ h yR'o͊{5nL`G#);FRZmng jДw*I Jo|$9[ zKW.sa8l.2O~o[/LpRK_q.t=L|JocKwWnN#gGqi};`Qd$`Q?;óǸ=.0z7h_y#5a6OR ,Ʉ?nk ql3f՘&~aU?i&83i*eJ4P{nV==9[Z[GF)O=c2Os=K2GB!i˿Y;8Fᕾ}2C-!5}q'Mݫ'xZ^2,c%AtէmߵWbUW>D 3&QӞU lMLIk Cx$"sX벸嫛 YG\`29.Xo-ak7MIZNl{a_< #a:M$aPڽ"捶::+( 8o":*w;S#/#Ye2.&R%։cgxV+:JxG;Ѕ2's8"ReGn[5ut"Pz]tGe Jo Wrj 3uZ8Lr]'/>r*>dXn|9?NQ"lpEE;L5RomlੜNo$k6ϥ,vzZiZd%x#CEopx[;gmdE#I̶,|x"GBHj|A3ƨ?QpV:z [1GD_ݺ#'yO/|,Ohi< NjAo_kֆY_ 6doy ?,@5* t(ߋ$+\'xˈy3t+Ò8]2J‘CHBIIq-Msf1Y Qٕs ,:Q4 Y`j-rKuS-j C +E[uc|✉*ypncϗ.^1Ë c{*"oNR7znRJW '[l@]/ htiG&+ oH˶qt >Et6)7g<Gؘjqv"bo Xأ_盚u*UZL9>)ā\2`t&\+u], ( +tPSÉ].΢QP&?1_Acʼn xs@kH/$f dak Q|D^J1Yg#٬5 ;rU&;c}meI~VX(ÄCEڰ/9*`*}socABs;)ٴZ鸡dnM.CpZO|H4 q唯'n4q.9MON"@_r@^!9#p!2$>T BSB^ Z4 V:}IKExnDnWyg6h8`QRSF(x P1Fݨ!=}v1cbee'>gV\rWݘAym5?+eNE6}iQXցi-[dRK3 ߆{?}~QvT{J3j0%%g) 2(%GɟgwC$R 9F޳ Ȓ^qt_F ×w<$jJk&"ݟN&lp(N?ZzZ@5KS:>5;΄ k{6zШ?ށ}P{'}ց/n5&/FGy{8y$OޒqSb3HC =e"K'm7<] (e6БDiTMD癩X&$zv_?T{]5қ S e)DTZd?^Z`̏討ɉƦ08aC`n yW>^g*/%Ǘp'6%Pco\sz_ {zji35*N)x5"Ͼ{j6Ol,G_H8mC!g,B}w&w[2C30xA^ '%  ӧ }(-] {[~IZ df+ I<LI2-/{݈, ?~թ@m}MP%iAD Ȁ>LV7@EvN`p[0/-.ݸ9KR\TibQ_%29ops+|Kg~H7s'Y7Mlæ?~!RRSa?R%j3l9a͜4p#gUbTa',(楇spLH9RE[T0ZH3_{L>]S{H<'$ k`:>AY7A½ŒH7i Hd $ q*@T5Dt$iPX$;GӖ{Um]E$Z)̡&>D!W>(3P uy޶8jz=X :|DҠ19ϳs4e~Iq):tK: zf 3(S$ŅjN_ otfĀ/WC~eֿ:Ek.sv9q^!R Uq<w Qqʠebkb\9@*jm]zӽAޕ򨟂jm!8b uC2!W4E 򲏶b]I?i Fp}Bƿ{^9b|qqq^xmW$O0*R@9 'âߣ rwETi~^@:[Xt4D^E2 ksJ:JRn J0Ԫ )$a5@sRw icOz}ruPVnw0\Njg,9NɏaΣ!Dʾ vOYjA9%'x)߾&UyPF9DD w`6sF oS=#4a1e/~=_A&; *NAz5;_ E2X@%3ŖXԇdYoU re@ OS*CD+o)]fsR#؍Gj܄PߚQwEeP|ċ]~.zP*ME)lHb :Xnm,/B-ڣ2mp@V- #Y%-ٮ6V28Q={MXwmw?YNAĊQ@jhr0ͭ(}N!>-Adõ|i8=_l-c]޳[.ɬڊ.+pq!ѩ5'Y Ge׹~A.ʰyRk4ߤ%YBfApQiA !hDB Ns9>h!k}bk˦5g~,vCGCs>c{334 y.!˪,{a V$%^yzck֤y`BXiY w0v=+t" DXxWξ9!weWq+~\3?G:z(u83Z?:,sX'i{&A@Nz;^)8WC;#i]Ib[]7y PƗӇOco!x#5UL6ȦT2ܪ'0ۑeT76sMWyw5㸨)7sV&Fq!'mZHb*|YELȑ] cQǷRu5fI8E`saU'Ӊ|! 15E8GGTJhg& ZbΞ& gX-JܤLϐ9\ѩ}LD@ɋC~ܔy߁8!|9yr53H7ӛs_YbsO؛ݞAj>x7Ji Ωtseo߭1uL3 \\3 Ͳ.4wJ>ћ^ǪjDˏ8Aq73UP="!iTGǮV0 d6C1+(@_;߭1 `0q2Ig퓈-႘ lhT }!tNrjZ5(fzR/wu6ɼ+8R'3-hupug0k~Ayڲ"\5]`utz|P)±^k]eD̕3uEHEsJˑ]y\Pt`H:>8{#;OhrNk*|Bh}jߥ&F X}}E_@"ݱ9>Zk݀: KtMJ=2cUtHgz 7>˔}GYCzǞ@zQjd+_CCzCEFr%SKa畸bd 5>V$cPs\*whepQHb 8f=O2u?͕(AU:lÂ&ã\"T )uq?B A6{(& 1qE|W'惉-1~3т\gGbi.@sl?9?ծtT:hMQ$g _+J"bN|z}J:IRrZF!'r`ڑ&ƢQf }TmDsry<453_tnW:|V1%2N x44؏?}9s*8ǢxduB)x,+8ps2B!+z/xż!zx.8ƍ) X%6M-;8 v0uzgLUK*rvGo-@Lh߾̞0sմ߂\95ݓj\j+sSӒDŽe V,TB\}մ~#ը-I{&bRЃQUЎ @LscQ=fL}A(CM:(YFa܄YѶ Cݚ /9lב6<BjID6k 7PGp %vn%)\Z9Lg~-$}F)P5l7 5벐VD5.$q^aDߌJnRwr] ߩྐྵtΧ^ߓ=c8OE?JeZT smR2狂2~hEq.ĺz,r$)dMA9>Ċ1n`œo֙'KO!qPmvnQ7?h!)|X!hk🦢J:eYf'p .cb-eѪ%nƟ35+ۮ7+l .]#ǗA=EN!D FͺTp%0yX#E{LVk5hIO~Ȳ 6%2Vٍ`!.%8ԵVڗ4K[$EWLu2zgA2w^ >1< ?pϪy?ӕLc(߃cgrWC-1=u56[bJywwgoi3q g H;SBd_ФTU4=Mddf H/( <WIR(Q0} zjӠ2-ܦK?%x5Z[-[2*(}ڋֿdem`ZtNB>i 4zm*pA{9`W'8$ҴfڂQ(qp?Žue< =c%>&4c%U{KW `HTG'Bc= T П$'b:fNi1*6,0ӊucAkb7k€ZNE5 wHAz;?%h 9OaDwrypÁ%z))\\4jLz%E)yO9b Lr r -#uQ) Q: 8\йZ΀'NN`I39CG Ln\TW360|hK|?B`}5RN\vIߏ~ UH3.tS\,q3ozT֠qO0~k,l/Bˀ:qwH<kyh3=TagÐwۉC/BM&|,t6V:ru61CAjd ;䴿4Hmq+5ZF.}a9q4#)AKðqKrYJ sRXqXwW,ಂ\Q@tN']B;CDG. sio["4ovLllBx㟈\l+z |4p|(K%CN1 9ZlR~hʋ\߹ y܅տtMP4#]tju`mAcKŘc6> S߃CEB?6m)VIAaZ7];5ٍAч4($Kc!È]Q8VOJz$6XY@ApK߼}ٮ1x~$cԓ|k:X18l`;Xs C-9mثLD4`QVtr6ck\8 '=,vW+<;QG"z0?HX0uCް,947PG6C앋" H"sMF$G50.} HDҦ_G28mLӂ=fR19ETڄ 贚ڽp/PIMGb.󪉰+n@~;'2E`|+eX7 ?+ aQ`-To-O-6O9pvw&&Yyn\r$8Xmc9zg]單ڧj|)sV6ǹ\FN[9}y5] |@Ee@2Aм~Q۟t n}LJj#zql=VEmtTn h"]{sg蒦5n Ěr4[MI)9mή4T M|A*rD;ONܭy~ :OGƟYJ1s7_v?68:.mWg!J_Kު9vbS럘S_WӒI/\H+&gtf){!WHWu%LF99_;6bu|x^reǀR2%Gr^$IG!N\hpG@/aDQ":S֪YϒN5Rؾ8TQӧ { iK3$_3 D)ǶH 3s4!*'* s-!8k嵰m]&TVj+ _ uPs7㖁 3冡}=Jq6u#ц o ~t{K_qyy͝ wesN[1z}lBKZMcrf2]W'w4z%Axs9RP48_zܥ T\#pɌWOV5ψȁOlWA3%V(G.<;[P=d|3y1A}d9:y08CN5}Ho{lqx[UFZb_P !eg\y NNTѯ'#!2zەn@Ə5G麎؋mܚH >{pN}n:]֛98н#eG;Sj[$e!ȴ8a@G:@3+ulM+XJ^TC`Uy[˷sC /Nǭϙ_D+U0at?7:.Qk6$,VjԱWә'4D5D.wW(F8z}y5=quMA^ "&DB, 0]E%c#΂;4 ƃ{nyX%cUGBva;&Kيl,dBLC*ttHس }^QfVᗱv4 9mU6 q/o8*TRCy'TZcַDDl\c],̠sQ2r:$m[usfGk5݀jJ5!_Dt^8]7;4m9E2Qu yVn&D֘])g1J)z[;R%NI+ER4wBqr!49% l9}kh =HD+A@ UƂ3Hs9fb~$><rQ( `x܀H!\B`Ɖ/1A fSc ݾ#D *@[/* Z,?Ik- 퓻dW] 1[ 5 & &Uhq3HCztg/GM3UcFF3N)Eeyif$GCZ~;c3 Oija J[p `rACg #C]b7<%-P?ZPD@Kl*{En'(ByXEb :S݌%ofHV*{Av Õo %-Ʒߒ԰e4 p(:j /HES^o^F5|\݆ WWb_ٙ dӟڥ979P 9.2MP"ؐyBD)0ʇ}/ADE]Żp|>(.*h7;F&3ڜ s7t̃ z)Z tXkw*єe8Z" ͞/H=\Cp rұhhy-""_x++Ɂ貃Z򮩂)*«&J΀˒g5be+nA.(n il4eޮ1qU?7 3b'ﳥI2pQ}v"y(jJ9zLSs5mIgUD'nZaqNSS<,l=$8>i (ˣ4Nw=g95E=5%2rv' -@HDُ|pP{D}9ZO^VbOR{VR|wGva&ݦn&'ゝu Lp 1q:| b5#C X ]${3[+}%p%((dؑ{'O"aIE1u0>\|f wZg7+{tvK39W!&]w1Uip)0:7?&ZTiQ.}>;yKZD}x!cSutzkt^tH]LM}&JZ5eQz#hZt|FpC*QPt2|a\ᝓ +(t#C3fU?晗jt݁]vd&< Z"a( 5\*%%yҒb gn+ DR/^<⡉3ϯqU`u1oVzYk^w Iܹ;9j'vR r*PA*v.x[C+ļJӢl6Wؤˎxn…!ꍸ}26v^xrZدYWg`McT .G82ϕpW6?, &oʁ:Ux09Wu+:jF$,Ld<ʎKS C-^ 8k]TY1/m1==7}'܉߃5LH$KR0+| xIhFM$m m#B82zM!YZku@t,fڠh>^-Z bs}놞,5iLmrw+՚9./ɝnv`VS >snʎDhcף_fcmAI :uwf" 18{jwtm[HBO x19ە$~_+?,S#\,G(QKΓ\BȘopN}?ޢpd8͞Rș2EY2xk6\'6L!wpHL.u8Ȍ;բi2Xe8g23ӃKXȹԼbI63~['bȾ9=Q/GG'AW:ɳn cUCQxV'_~~\ Nߚ6XՑ ZW\~v7@#"⤢^^W>pyb>#jNmҍSϳ̈́%R_w/!`DA`κ`A-5 .=_ \R}yDL]`G1kt;z4eZEpmB0ng=UoMVN+zYг|qş 3ề7pD,cqUIBk(eL-V0l[͵?4cy?g#W]qJ4V}g$?ok.0^y!QWӱc]f?`8#vcs"^SȚ9r=IbY߄Aߢ?AJD"S8)?6dM2Ɠm({i7ί.R/EEN֎>} 2kCǓKGoƊ{Uo|:uU>H^+'z=> stream xmweP]ݶ, wwwwwgwwwww'X܃Kn/;[ժZ5G=Z$^@Ff^ +33 Bh r7q5#OsprYZh)4mMAv g; .&wH Z [ @LIYGFQ @-Mln 3< hX88lA{w_bv uI%Eu /wՅ/hllM7o#< d 0Zes7r@WC9/Vssts:́U;L@^_ d\M boW9@. O2&%(Anv;zϑ@& %q U 9WKgi+,+:<žDua`02XxY]37gg???F=fkf|U%sPts3t,+=zh!+s uBD S_27-& @ģɩXHe2jKo[/*Eύ 5q!/]ׂisu@b5BN\]1ܣC +c*QxIkdU\xsAkJƮЪ"@wWgB"xKhUg`%?m#a0) ,"LיyR".Xˀ2f:5pmFN4&~!r2U2ؤo19tM?m06M>'ٲӈԅ*yבX:p-nGU1 Ig*ζY.ЈI7Մ\Yغn)Zan|ζ0OZFoKD/m!|տХOEGS$dzov>9ͩR V^.*ž$7)*-A~_1ϴk>s`$_#%Ƚ9"L|'g>p_=J禴r-`dL1'mpz`@Q^M_u{!&þM®e5%Mct&c^;h'EBB~B킏 #H%{nz`^VzRgy|}.>N{"I T럘vҮr:iSsun:t'}\x'sCZ4Ixe`y%$@{͸,gX!-*-Gû#Ќ3 _gρjtK]#S*1] E)U 7<:=v{^'k*U1oGOEϫ*&Sඪ?WGu-g[ϻbUc=9xhWWnEv3Qpa Ee}`7ӳm[7)M* E1.tWY+fjS3 NcĪ:4"wG}E ]L6@^yx}Ltc2\x.5com%a3Nv+WD^{sIPP=4WOei&ELuDei9<5΀-p?_6CnѰ6?(zBJ4TwPGFd/`f`O~AHN{\weR:=;CXF~q6hP*Q*+E ,S`b-!XV UxJqwnX~yޖ_Digo> F(CW^(sML{!^fƻ };Hc¢U7G :ثȿ-˚{- wߢ 6?ZFQO#l.%1?3}s~:rXt-nz;oXP>~J5*D "IR82lI[V V->ĜQX02"i`ɤ,hZiy ~.bc3ņb{lz6e+[d.L073aHglՆ*g0 Xмlh%$ybi*n.?%`s{H9 Iku (XyW0UƯ^Hd_jsVKA ĽM/l٦|rDaɎ9zsuyRߍJLbz6zpU Z+ Z^/ۖ\ɠh%TԆֵbNb5/;Ao5Ym2#WSDSÐ1[͝T<[7G87" 5Ca4bB\M򕚀yؚTrr?W DFJ g"KTNYWѹh#2wHe(DZWVlh\* g&+5>,R+7ofLN=] @ _s[q[`V^ZHÐ.|NoχWܸtU)v#^Ӄ'XYua=D,QhU嬍VL}FVc5"^ɑi)l/E1̰A&…f'p!Ly:b6-]Hު ĄZ"ep U㖛~ \8w h z"sΪ:Zʙ(Ma|%A ǑJ1Z>5ާJ*4O}ح:%+Qx7UٱGD1qeKTXU1?x 8!E7PtXD+0ThdQ:ϼxu0Rp2g^{`Ѣ~qL1\%SL|~IGF&J#.?f@ qY|-;0c@ܕx4s=[p us0֐Z7BـY[Pp>wע貮"e|bwWJHllz٪zWe x\"m:普؆lpUAg 81Lr1zr|!T *7|CWZlwHI$<ϰAH>d5 p 8W>KXH6Z9hol̻XzXxK\yZ7`ryX;oQw^_l=!>1EWC#.'W5^I1H͘-1cMYsGL"\{t=HVI覠P&dLIf"k:n,t'#M_R4yډUۻ:6FZjЂLwfAhT&əeBn=)8JZ8 Ŕ{p5?` _C-k#6F`w?@ңjgr3KUpNQzdʥDʳ{v#iM5Ln]7󥻬Jw`orP:hZL{+z%m\`sgr,إD̄\!,.>F»; ^!|BqU7e@p'گ{_GEP[zBIx e\bH.t8LzzKX[O)aF"J%x"/&Y|넯L]{n80z?N״Hxcc^w_U+h:Q;B/`ʣp1տ[n94İYxJ@>VEqMm6:DЛjҐ9j:0ob_n7\?`(gxC\"th-v`VuT)llžN,M^GpN9~PiȥPk|c:3ORkVUK#L\1AZ`kCH00<~3(+-PcƘɼz~v] CmOrI C\JACl:x.27>mGL)|SLiAǯ@UDAo=Dece ܠoVǑ jkF:rW ;EE95$?2!ݛ?j=cOYj^L:0~e`ɝhSQvg*"+Kji<05#adY_69 l2|]r NQu󔗚K&@[fRM:I֔T /_/] ;WB/d4MdUZsB>3z$4Fvj䐫b>=r)i %t]YK#jRFOF/QPq8Z*eɏrSlLȗ](d?)z9 Â&kjl,]htV ;Ft8qgQ` 1{T:5">!ܻSiIy>StM2 %SZ}+uML#S|~<ԐnjxBbSkFmuԬCR&U ɈYQrV %"45 ͖f]M7'!(?+߅Gvn}b\em(n-I 5YƬ6c N갽seԐ]UZ+ ( t RǬK-Jx蝉וé [40s'a>= C۱uuxm, PaZ 4i'\^o|\'j1lި_FG/e&p%0Ÿŵr~#` ]2\&*"]S^v\+ MTy\&jWlt*m*h^G6+Z~Wwݚ0}mzoWEw#Z.5\h^X#An\Ua8y_ tMA%t of2OCMY;J7r-܇1bN΍sbEH*TRU>އmVEEsߎ'u8 (4{ܡ.zU5}3ۆT<#ِE,;Vu E\j7Oѱ S,蘾p3lJ/?# Nh*;6[=h%n7tZJKp/WE '+i/5]qI]HOoi'gb*+3Hwݶ5z_X|۫m`%O.p+ 1 9x'zW o O|.2ߩaO{=g5܍:d! \3/|$w-Vc@,'J#8P uc3E=P#~׆Np9KHiI)7`E).4\MS+/Q ]^f U&;91XGZgoQ"Pdޢxǰ<*jZ(Qj>Rvq/dhTh 7]÷ǩ68\(mɜaeK`ǖJ`5dtʯqȐV3gI^%ܥ/U: `- ioߒlk@*djW,Rn[l*T-g5Zn;,KfGx04mքP} @ke/lq5}ޟy+!BL37gҡKM7׃6UMᯒMDSAR0yY(+W#׌EVwֈpo9?znB;>+FU NeQ"WI<ΘKb HހhF0ZQzr4짢!^&Yf B\MLdGlZaM뽶TW\Gf4%Nr`w{U~OʡagB=;d_8^^Ki DP0D3k( 8˔Rd3Y-6;Ϊ˝ұmdh쾒xbI/?0g=KY;Zi쟝55^D[!φP]&ibRY\Rxmœ HVuOS/2Y:w73ϙcXќ1%/ĭ?#G ZJ͉x>!CS~[y 6BbG~>!C"LW?D3Eo`[9y HCx`ץ6Xɋ-i?DH\VH*0Hёлmך@=XOW57fEWi#7ٱ֎AnȑI kkE\]k8^z'kBj1ɨWGylGXu7;8D5wLk;WC4T{/A`F$;[ٟN 7 ًԪyL?J8͢Vp>])-݂5/ovBd4fiλ!3v`ѷS [eJ c鸀k(9 ,SAs' *bKm@{&.Q0d m}-*Q&xHזF' 5ܹhe.0&Ϭ z| 7w]./25rى 6 Q SwQLGQi6Jԡ@ܭAs=L肋'K53Aҷ)Tnn(tm4UqSɤ %Zax9ږۄMvR`n$i/ MPw/MW+,f~7=PV}ᄒQƎO{3H-{ĭ=_ߡ~7+4鄢 UӒqR^{C362bʖE_g/{R0TGE\3"D2?mhՄ%]^Vڧϡ|!cr?'vRy=>ck y+I{eeGfC*N!mR4h@ؔMՠO[_F> Fl>6DQ*7+o:> "$ RЕP6g2MFJ#&8尦/_z%Smң:F9OEK5B)//2_^hÅLFݔAƿh^J |-*T;c^D2Z*w Ⱥn%*\)&~->ڴIz|Ksd%*={J'a"Tgp"+NaMsao7!w_ [+gFe GHnq /'zBKLLYcT*eMvg!OI 3JJL\ YTj t#S#VkGvHmul>QE2$c:އ9G{ xF|[|GF|`nI_F .e,7UtѻMmGI XUOEP].gO%lf-n<6H2=촇fPm4L>b Homaa%kEqwp;$r50vq;{L4p-Q]'6$D͜vŊ4AfNMhx_hLf&D!bV  ٞ5 = KG OMCF&jkip", O$`@>=y4xۖ^ "t`ceX_$K18OkIz«9b4d:?0"A2%ꬲQKjyK+N.7IHRޱFm|=TO>&%;A>%s7o ҍqb9pX9 Gs3#xr G`ghר\ 6ִeBVOg(,fZx endstream endobj 38 0 obj << /Length1 721 /Length2 7871 /Length3 0 /Length 8467 /Filter /FlateDecode >> stream xmwePXuNH'H.:  H<{p w}U}ut>]P@a^N ]! rbH@`6PG)3D4 N E:yXY m{3 @juiҀ@0kTUӓW˪hd!3{@ aXB]$ #/M ptt:5eTU4Ғl3G0@IO#UbtU"V, !V6Xl&h ݜ-G 0^:8 .e(P:92s!3ヸl\elB&vPA9p K]Z VcJگRp.ƕFt-\82?bC6*q&yG7l`X9m*Y흥Hn鿝\"^$vߐhEj146/2S[hųo[࣠z%sd|47v!\%&(E-yZLW.KYr 10 $͢đ) <)iO,SLRȈLuI^ʑ\|eӡ@o}tm'7eֺM@D514pE8$l>8p[͆ѡՊJ_[ۂ柅7aaļ&SxY&k$ %fT*jQ<]7mdvԌ$MGeE `5,Qf!3=ٱVN۟dkw.ЉGeGP+|zE< iHD߆j9"*iAOD(!o s )qbtp%9Qtd\nV%'u*+oυ@xRFNeKEk33R&@J4{:eu+mo`U Pe祅8{@ucRDaE)[x>poZXz`D,>b=(pʳ>TlN8ڣm]tΥnm_#-gk*=qc̖I=?ق\):%Q"cDo_oJE5h9S)b%S;8hauL*;39ecBο=`D ydZ=?Ų;[$64g=xn4HAF?k,*}ĆFyG }]U:mUpt*Yu}pk'4Sݦh>F i[;Tm@;65cZFUnnS=R4z_ſ6%qx:._065(,ɨVIᰙp z39F9gtK"_];3k(RR@khS)qf8\6۟jD|C+)ŒdcMe-6\9:CjF%ʻfekm<=ԂwcYBҲB|f~¯Lgq_!xliF;Ň6+8\A6ʎ%B;Y*cCM<:vR(!UQ4JmMVWs@$D<2)zt{ e*o+= M9A(/|O~J|\Nd h⼫ȚGQy _Úq#+4<=[Ϡy,` 쨠i8y=`q"4Ny0ӗ{vG$UX6VE#QwNsXچ ~3l/wѦ383}ҭ]Tg`$bϓ:*f.(q=Ot!Q!-{g XOƖXR:"};u p5Q%Ldbt7ZN61 cb "Ar,3gWwt%mh斾rosՇoy_&EJ$2T3YODzq~!JsL~OZt9|rJW"?.90~ژ5z(cc"p$vm$O(.'Z+ s~Iv/} 8-/lN=ScSeX0!s] rm>[,ݮG|G`keo K195"{j*+(qU *K'a ;m80u&Ʈo9jEI:w=+c j|<94Dm*BcEWky9X\;폋uLCW+(vq[ڌlVk'|{X}9 ~s{i]ũbGY<48v#<u ɍ0<1kNL725 hG)Viq1:vSz,E%aQCo_BYe Gʖn&yb;c?!ͳ"O^,g<_W  Ma| yC(HG؄~>]> BJZNMLu1Ɨ{Tjtq4_bEr3rN5;Vw㨲K.d'|5~THY [{de'VX1nκWa^Uj'Ey#?Wlk!){ )JI(ܕH$X_ Eأ|cLr;<۵z =^D]uaU`!((&E2|'_Ei451Ċ Pu=yƪ|hrrvPFT㚈I}.w .տ9D7JҚ$C ԼWZp^fyk-Pm2uPc9‡7A=0ś=W%35qe$=kKJâ͂KI|,J{ӺMGl1/b?)֭Nε~0{trZHj>2R(yWmJbrw0R9a:53^𫾐.Ӻr7WBxhH[d]N\^/_b9lYsrBch}J#8Gb"'Wҗ[Z.={|' 4& {{$oπdCb?mF7{q;ן9;#zaohc|8zՑZb ax<1InGd|Y4'C3`r'1,YI|&5wj࢝>9g5 Ъi?fPŝ)H658Yax6ӎ !3zd ?s G.1k.G돀tOm'C47FSE5.h?aev[A~"AU4BYԳr6uܣI;M2))m#6)Qf(=͇~rF^Xӭ⎾Q~~&BuxG7Xȉa;6S0*:P|8g-9:VU95L\oVp \ӷ2071OPюT2OQrY4`#e V68V˓m(1%!z@ 1ʼޑ'ϱʗ)Zq븩Lm|N\ =%U/ 0'\*ޭ>¸X.yGwϘ r#k`n\4ˌOq b(oNj!몘#]MI4__zX;~yarR(} P=䢖7X+ ؕFbх Vs/2I)rcYݖזI,  v (ػ<ɻᰌ 8uVUqZ3J!:14r< 3_">*_&O.kv[ c&gi`Yo}nV#*;: Du\A ?v•$lo۵SU:J1}L0g`_%!.C}iƋ#l:&"p&#Ⱦd6}S& nc;{2jpsxn:VLwSz+Aн̲(W5;35w}C [@ZNe ])ĵ(96qO>˪O%ygo sW´u[Zn6ϩA _WY>ΏͲnGD lQ}5n-FSzSs<3gw' ozlxcnEKȮɃbS.^:bݹXw}U`X\G oleO<~Nxq^R$R~KP|Y. TA٠~R|ʐ@9j}%ahU^wEpR>l2UNA^7慗Lזd4k0xP?OEw^o06niw[|H^}Z]{y+o;>1ϹɶQ>|Ɍ:݉5/5uf*\9MLUOqAcϛ2 BZ&}B}N;:CӏA\ʣqZ!M!K+&KrANn ieh%}lcpCߖX$ɗ%g5o5 GvgQݑ^w(`"5 Kٌ4] OeSgCqesgmoz[bEfnG"OuV3H!eNuK0:/kEZ3OZ#)P(ՌEuy$ng&cJTa[;AivTT'[J&[=&v6_Y*&sxSZx룎mVh?c$wk7|?[;_nY_X'@i] qsBMf$] _&\tsZ4Ȇ#Q^̙w]'-2Ÿ\NH#]3~GԊ##8-d73915ڴU_rᇽvq@?ѺE< d,Fk>$n{ (Gf껛řؕX}sQvCKy2q endstream endobj 40 0 obj << /Length1 737 /Length2 26626 /Length3 0 /Length 27176 /Filter /FlateDecode >> stream xllo7\wnwm۶m׶m[mvwmwsy|YdZɐ:x؛021p()020001C ;:[ي:pM I =-̝ ()-$P6XX8Y۹Z𸺺 :9lbBlnB`jamB ,))'N@!.J nbkhhMbdmaL calbdBI`jH``񯜜?:Ȋ˩ ӫd$quv_86/e_ZGPtЌ cg#3 [h&ikjG1T$O~`HI01ZƄBđ@`hK/-%Q6q1u?Ά"hkt,l[8Y,?buI`b&OvBB"ҚEmf`kjCL oΎ t=C_+!!;w/ZF6Z^l>+]cGG[7Z`C]3Lk --1P^a^F2^aݫ`^H7ƙONE`Wl+Շ]aˁD9g Tb9r(Eg6(G܎_[|%G kO]8gRpm$,/hS߼<0L+RX[SaK#WSd+l۵+vh3Py"}MpP)~d"G~s۟_W 2kSG52n뉗'pwwF]]!y\w[jA:Mkf+g]zYaCksc|M x.A2-2i zTH)y wVj  => ;mkJ-g~zħ3 c3O_1qۏlvbO!`;%/ʶbFJC$q!G]~Vq8RL뚡QIVڟW1-۸ϣ^Tߓ<9y w,gq|2%ַ2wb 3V:V*G(D?Ŵ{q`2i\ kA^Dgcx7G)z!Ϣ}6zI t}&Ο(:o_)IЫ];3!1̽y&R9DnՋAø,y Cx%I3cCS((*_9R+~CpZz螀#XdAӗb^.U}◧Jrԉ-c=ɔ,("8tj] e/+<6|6^xZp̳L'\jzqll snDoy_6+jf Frww#ۘ՛A(BMWVW:0LYM*\Sa~v֘u6w@zxT]C+}x'ؐ$ <7k `nbB8<./k@w'*(MYUQݨHX=ʵfZx3*ft;or,"/4}$e|d(|ޣwdIEPD"s-N6\  ɗkt`!K^h-* ߈#[*ޜP/ru*rV,y테0SŕD}/*M6I9yeѩ)|Fxbo>BvxGpH`w6M3'D$#JP.@ 0Yu&)1P4}Ϫa .h "d='H鹛HiO9aJ KOz2詵h9 dg2\`я9LOcLM4z9}PV68Hz׻up'#Ёw/j,/Z"RSv@xxq$X5$+ܿnݭFW`VhRBH r7Oy7dVlϷp?qsԇy}BɢΫf/Dy.w5I)TFC2w|ؠk0vvh{P\bRgSqKNemS!;r%AVqֶ(I0w5wSEA_?.ogMGu#{v3: ǖkM( YDaBpUdQ¨{)iϻ*K6ZK4%8<[Gvh{ILQ 5ŚF+ ?=g?(u$Tse5l6['#QWdoKIcp gŪ2*H+ձ5<R|ѭu^aWa!'RVE(%b^ÐdU>wP8*gC5nmf~Nͮ1fꪯ} 84JtʶeP2I@.N]~ШEW!D?xa5KB"V4BvlarC*`*6Ci T"+WPU O=\Jjx4gbBY&cp|Çq?v>I]xE vߕ6Ӱ%>jBt80EEﳤ_փ{&e!xkCC,syKso^2 ^U17PX$mJ4 ?9#U%)cRhU4 0HV-1QQ{ BT-:e\Wr6TΝ{1/2yb;FfTO푣qYASӣx-f9+OюQˤT!^ced%QtSLpHʇs+u xLJp~ :>}PEөT+_%Ϭ n:0dٙgSŐ2?1@M=t4ڗ\҄z/E @=vΊ*sD0OBc`n]#VU T =N3Zc>8um+G5Êȳuщ2Tp-yyw6_TآlXC]@*ͺ&;=@m F]״=3$@0)Մ H xTYzx(>r4bͤ? wTR_S`%cngg\P`!hm*ק9Pn//rR:Z X5K=m}087+v[Iܩ6Ff-x.j'B(Ӛn5\ɊY;4zŪeaFɒz#s{_%ғK<@/Qӎ{T_Z3[O/o{;14jOuL놝wqD@ NɴȓҌ/O/eqtqvv?i;@hebw8 >6|śF@f{P*Dfs`J!VSɒA5Ư=%(wLוTOHߑ#XG֕4F?W1߼UOYG/$Ź۫2Zh`(3^r۞&q2tDĚ5C:K7fyiN^^b'Tcca.Qm/KӠ(4K82 i0kb{ucӃԙ '8ɑ+ݙ{0KޢW Fhۆfog2 ]Z) 0'B0/FiYR6dXUsѷQ/u1>yUNmtgSgA8H氐ޤT>[W8oLei%zz;NF֛_b3rx*!ǚR6 Nx0Nӽm,1Љ`V'oﹹ)rHrlc$eDBs!Lbd4j6X&K|)fjW;QiNb/eHZ/$iP#@?Qb}%r-a7-'%Tk{gFKc}?M/ix_{iLIZźk@%G a@&dde3M[RtX_dA yxz=`L0tϙȸw9% ?|"9q5jQ{:ZezQݾR8kcg0k+&o pqH5|cri%"D$V,4{]PEVNӕDI1ÀmE;?E%Y[3m'rXxfȍ@crأiCL,tSξӦh0P.^g.]"LqҳHq!%5kw$"Q9ͺ d:A=iqTr3jӏEv8)Z8&;ZaEAU} #S 0+=cNvO~:/\4f? :ydrQ&]4Gz]Ãuث汀=Pqu? _:ǔ7/,#}0b8ơ14%x%]g|jT%yf]'P5]M['?ѿ [*rI%w^c 0!!Hf =p] ڏK2V"v̳!3՘֟tu5S޳ |WVkj&yo֥_ލ!w}Q~5( f6U0m"z1Ahsy6-0(atƎ&$<ߋ%Ni9N[;~F'wWW)9/K<{+}!fqW5cL?]`ZP\ Oer7\zrc<4}6IHURݙ=Eg]ֿ e`jWrs/Ýxc))>ȳ( O+Dq hjY Kr>gsz狈:˖w\mBD}0eC#+J:Iu5wgͣ^wC^ * Gf4r{4ȵw m,yH> Ζt32.!x)^(|-_p"{5?y +4Rco|D2:%jm X X  YX gcu0Hg$iaVn:HS9r_F_)w "SmVȅȯ%e|%P8ç 5EB'% Wx:|rҫhWZ]H?H242F-ûqШ$îv'Bat{0ڼbI)/gP8q-m1ǹd)sRbX#N(y4(.~pyi Hq=]0!HƘBZ\o}piL:Gmr075`$85!(1F)+soz6KC ^}N? aWlXhDnǚ \Ĥ9~3`m{!an+] }H&tEClܪ*f I:ǫ3O49#zsQi=$6I1jx(J*#ڐ,NW٩mcs[@IZͦ_`2^@oZc}eǴj64SgW@[#H[_ -Whw̧R1jDH4x?c;܀er2AV[Y26ع7nv]V%q^ݢ -H5! p8>lx 㜫JO]3\*1}}hz./yIZZ9#C6Qg;* {kT8vL`e7D/GƖ/Ym@D:$fC8Ըj &Mmqfi+z`+5ńyͣUն&,T3yv3,L^ww&pK.rԮE\%Cmp;6:va0\+i8CF"Njo`&QB& dIk0fFe~/wڽ L;|eIv܋DUpzT<l۸-Gq_.A2,G ς6Ů.z-oGns0~(j]E5E 3 +U;t 4])*$SelчtP3PZυic"6<#G< !;<6aԺnJzq߭7"lFtv^՞O锌VHxX{ `.TI]lZXv% íJ̕Ը=X[dOo]X82F× 4 Kٔ+J\~y$iU[媭%7$e9rbtp3}3A}j8g*gtI$xGkdhf5ڭ Lih 1D~a1rS2e6>|8bĵ="p>%?2ma3hC t%ϓ{IAEAs.rc}4 ldA[޻׀x}]\6"ݼy*mM/i 17T ^2=Y̢*eܥgbjt$ 3F]\E*, nYd6K $uflVZB f]XX8Q"b+ijO "NRRoMšl}Ykť5:u LxwXwDK@../j] s!J^Jkՙ܄SKriV31UHS5""# V}Iv8viI2>c@,73Yn`W~%Fpnzm֡[Cӡ|U0. Ns2]TeN"I<hmoI\A4QDMqx^~&@1#˸3e33\NNr>rᴬe@(,|k ZOz@ws|.bPxpWM[/`{Xs q_U|`hY%f ba8#G ,?wraoMhCa24fuv2ARAڳ[:^ѸhKX_cSoD5'*1xE]{Ԏ`P"i r`ɇEsܓÖ 8[z ;FEsAً105 p:/JmN+D.8 7ři91Hqi!gPòCND͗v)]]m.O(Qř5$hF[ peW>5k6-ӄS0wO(<0ίt~tsݠgb^Ridvu{v`HᷧDD[?8iXuv [&XiKMC$coػesEDܻw,hI+zF/2Q#rY:e(LVӶ*Ѽw)2KP#KQؐ٧Wx5%:򧗦,|FyV pƕk[= 26ڸ&&Kvϕ=Tn}z+ic{p~!\2؟1vK. f󫸠}!GR;>նjYhgaj;,WXrҡL`7=]i"\EH`Co=R {!7FpѾ"Al he}֑1X ~n9N&M7\.%Ưf< y҉մקXf|i"%̱yYhLkQj‡4%K~i;VsO$sXg3fڑe6 Mo ^lʻ)Y˹a&24?wn}pXֽwދP5©ۮ^Ac..rC)nɜ!Y?kաdYy9WI6ĬH̘1wܛL#legԪMRd@rE)hRMҊL(6AV:n}l>*K3t.-3,zx2pVngf] " xVǪ΀XN|b h6q .u*cV7JdTqxkNG &6lRB y͗<g$e_` 1!+`Dt{yA*ҝ,`0=MMGw!<5rԐ׌>y˯uٷ޶]5Dń#h,o8h!xfӯ,'.%=r\cַ}aJ8fMn$*ȝԗaӧhUKDT}q,eF8AvEnm[dLBjWX 6q"_e}*/~tjxtYo Xu W|yRoɃ)` k2]p rҏb9YMbDڏf_Tm98ƩW%O9,@KX֏sqA,}֫4|"pJߤ1)$`PY?_V81d\}{dq~W?6l藖Re9U^kp%ʽǀjmvMVZ",5HJ \j'V =K0a={G^Dآ@C+Hܮ|.ySzvVDtEԒX'*YN+3{7y2Ʒ-EOw[lpK_g`2,I7s\&"CFL5-W٣%e]LZz3=(z?Jvnҍq>&.)) cM׽ve2V"0S 峓w|ޞ"5)3VEݲ^$ReURK[:r͟8mбsZ B!;,Cg m\C?1Mc.I!E&(l^*ءW@&U^yE'g5hEM/m+3茚h:o3VP2況xmr( .C)04Eўͥ(&%*ğ+F@? z5eyYXB[1.k!REBp v+5yn8T?WPOnjq~ ݤŘeG/Dp"S&| psЁt_];AƇdZ RlvwNP~ ;/!7g~J٘JIi`Bs+!QqA-R X xYBڕGВ\^ڞz!%a>#B)>cvNݦ Laȧy:.S?H0{eO'[y|l;{uF RѺX ` xS"Q5ygooJ>7tu j7&~nu5ec0@.PY;;k`v,@ɸhʴ;^dwO4=Q9ۯYd KeFj*I"b9H׻/#mSpNzɤ)OxdW6?G֏8yk5ƻ]o}앚TXKݗR 3i["Zug؄$R iV{>MW3>GmvMB^؈Ƙq3C/QB֊oHqh՗@8/`8[j~jZg/? M9!c-Ϋxؒt58%}Q;MbcO P \ KnՔ{~4( G~Q; H<%󕀆W9o| ȴY';p쩒RpV뺼niP, M?;LxE^FH[%XSv~F&0I;vH}+" Δ[:{\f~хD0Oh}u &򱜴z+ou7TS1Ӧ2En楆 ̕)[2zl!I5U^_=&4&8FQWP$1o,\ -6uLYf 49`N$9Ԓ=k7Cڰ0%da+=ɡsg,^踓]VAzC]W?7v H-x2 996Sr֡-a(0N@?.?>fsMrqo1'o b*uvSol兮mؽ̊p*i(U~o9~]ZTڴ8Sݡ/n,'%Lv j# |aKW]ڦ^`9$*&#hTPY&~0:%vH/z3YBlͷ `&ј pVζ>ԑˢCNCrn͙ ;Y]XoyiZ+l5f fѧ"I!QؙGQK[[ J"υXG4۟qrau1ԚPrfu!\^$3JMg{=̻e@7eR.^WӐ\lir:* 1hʲ3>iͯnLo!Z4wG{x#l`$dV^*pY>_|C!4k>Q1v#Qe1eDbcVUOA\̌-i/ &TA6쿝08MH(2 *`*ʣ߷J qJi0ܦ_}bz71M޲.!LjlOe2g^1˼)xC]H{K>1Dl]tHOi62C'hg!bzap]ǿ;O{ 3UN="zowY2T׾3',;*z+/! ^1GHXƥx\K. [/p\kN%A,, dswn WP QFm,yo5=k~kp'P4$v JK-2b{fvrV?4g~GJɐA'_Sgֲ~tī&,PXFT-tV~wF3 M/B@4@ W$^C #=f8Й j+} yvfa3uд(̡ch}o[`VJ`u`'-\! B2E2ߘ])3 9J)@P6tb06j`SI麚!)y7,yNdr3^Uٟ qd)? = 8~dS,|Eh> E>{?ʴ7?Dtqzp?ӯ_ĪLf9DR,~V&xt,C&O u|P3,SyPq!i jߡO[&7LʠޑUə')!^XR-Г@_;w5]!F,iHk0)!+S8f_}Jy$z}(wt[C@%L<ȉ=oы+HMo|ۼ:S{fŊP"[*qGH8F6O4ESqO{ hI ]gkG,=e6KMMtJOU]J3N")K[V. H,eEۖg@QPoOE`RneҌ_=st+u8M^ӓĖe jlҜOanqVA[VOGm(݃Hp7emhQsoFJk:O^s|c' -C yvXD[{?j\^DD ; nÿ\ )M_TԼ?& ☙Ǣ% N0O3˸%l-)wx<Nj#7%>:U7bobJz#CF}pO9k2Lb<6x\bY8 ,~$ҫjP$FIߒbh11l!$D@;ܑG{ڟ*̀:H%&S`ߵhC_'oH&d /t:'0cٹoEi;3/iHtSV]#$XNUh,_V*rl3QX&wB[P~~ͩcڷ 4~b^ϭȄ`h=cso|rsB7ɒ1Q[QZ=sRivOEyecesX nMqsen/#I"Fd]!*tņF5H-8YdsjgIvN1FG 󏭎)1a%Hr dlS&kc?)czGx+z==Δ*DJ3IpURczM۷ 0'ޡ*> !Y]rx9&Q:5%cy/2]gd+XeaT-Ye@@҃~hy8V/kՕ/ēl>ú?{$*Mїį{V⯝ZYGV.8Ea1z$ SNUSs% lc6&mĶ9m;mkms\z5Lʵ;a@sb/Sg A +=bjgJ%l ^mrMW+G=bwMlQTcNEX8V|si+ՄIEv%탶alO,W/e&x4YNZ1hEJvpRSL`iH.?:ޡB"( :39:u/'I2>W_DD-k#O_yD9:ӌˇ\:/:HπLF7k/L C=Kj+<@c8Q~)DNuB}9$Ghm(^|jҥNRkTN^+gԭpCn3^]MI'gvLnq<|wNJ`^ʖbxW%wHԍifp(B2^KT>|?7K,6F_ҵ콅vD.Lּ M~0'*oeͣm0?6NY|n{ d{Q=[w%FN-v|zZ@ (@Cz$`ߊ^o~qPOܗhJɓs L^0ebMָ}G @"h~_!ȱ1XrB)yhnNy ݬ $Պ'XFS[(}&(/k \t^8|iTj!>ک란 !euT!!shv~@~̘ M? !]7U x$2i<ٽ,ROJ ~ԁE4uȞW&>ZmBB= G'2\d N+CkkF'((XTC0"l?Phn(g ݢiyJONa| )mcyv=LԋFsorOPY&S#Z˒+&F(=yj!2`xAuExyI.%x8}r.V6"8$CRh^dͫ۬׻Miaw 6bSͣV}$^$ȗ?M}2! ڌcΫTZW"$hD,3Kz'Y睂Ax,6?3/lO($ܩg~)u?)'Bt/d'ܖfw'^7o]?|{2pǼQe|ՄF+zxhuLbMfwTmM6яb(/iH  2~2 c>dEti29K:#r=*ـ 152$aodbP^}ӶN,V1EGal_ *3ޤFC ljs'VTA$Zmo8*U<TPD/'-? !!Sm)ܝ~j;wL TX"}Az4ѢgVQuɕ_"J$g$58QYwOdLcPj8A7|YOU\_3*#ѵP ;9ƱaՁ[B)Y!F33✮qMeJ!y.Ue`Xb4-i,W;MN҈pp !FGu# Pr߶h0{V},}2mچ%gx;Uo)8o6D`'L~("^62<ѥKĖ Vo$J9TP2 ؕV$˧J^'sw; a>~T("QkOꔗ)@ZUb38S6܉03N{8kby?miRzurY5NcGHmmWVUw'3»߹)Nj?̕{yb%Y/tCDԪ{]gq*EtX< < @[\pTF-c-Z׿n^Z5'RVCCTm$l_y  PkdZm)Ҽݏ[0ei?I`ok,R! l[N6:*$GX.n+xC˔7vsEc1tZQ NqH,hwv u1$Xt6&xN/qp{S4KYVfUjA$l-3k~:b@詯OO')$x2ACC`Zac[W5G T:3]~ 4:{D>Zhc%hGlVFD'U5B͠L[쌡HV5{6KQQ[{veBX;xFX^Rfur#Ҡ [Ur ~yIG,t0S(xcm]Y\GMq1qW5m=Bp;=69I@@U 4k 4´5>-?\&.B|DahqO/Rб,j>cKxcBLs-D"Wˀ TCC,=σ"*[ t<$^.ºYkjcyDfUhﶼA(P N,`lmV<"2w:G8ed8 CTEđo!颿he@zgp_IMVc2Lrɾ?kSOŔ]ʈ*K '_/c!4K+dmhm#N~\8bt=mYɸZzkFA\P!@O|Z3 LV]|qW}X dP֔X+(Ȕ)U])UXY/$]C`""fYgXQ+LB*K%+W7S .E&dKKw_ `/HX6kHƟ?G{Y˩ C/!!9 4NQfxubu8`S'v.P}E{>f֏ iߙ6tMH1~jnSp&nQR*)) gC$\/ ۻ*P'|E-.9/L[/;пn2=fqJIu,u>3c9-YP|~xAd@Ͳ`ž p#3~M( fuEW "q#D,:qɄ5 v٦TM︖{^̃HQ}H!3;:VCPiJC X <A5w,j18w"$O4a;Oq9A;O7&ɞO3فu<Ɂ"%8D;)-CU3tmq{ƿCs&ҵL-]i'XzQm nsS. W8] ,s%|s!Xaj }: Cs|z@#V0̪&W\ϔ8)#@ &kWqBEo867Ě m&KE8u\$?EwF=OZBOG)UkTCzʳ55lܐ.,֊LK*h꒹Zd^/#,SP(4lb8я!%ˋn$4M9+-6}/=hJRƥiXC[?"ɋd~ri!.Oo`U)Fq UN>Q;1)$gH!atC~n^6#L8"$z _y2P⼑+jEYcgnѿir4⒳Tk:&tK`;=1lB=0 bKvck:V9aO.Ѡ=S+vGDmsNg;_"QU~]אHEBv(Vfm(\nee+u?>ڤ\90_оeu3%j$QP鱃FJ7`=KR򳤏[" ?o(’f%Hg NLdXp}!)H?pԎ !ww 7u;ȕuػ݊XƧ{6$q3Z߷'z(|z|c(t?6\] @Ndk[GtCRANJ|e5׍o=i~Lϕ+4m04b*5,lIJR㘪~&ۦ;+!!.RP6x_cA_K\T3y/9N&;t"O"V^1#pSjܯe'&`U]n-/M-ߕvaC^w0iclI6#{E-!{"4v--º]* ߠ{^tfl:#\e'z̙g0hK2}CQR c Ȼ3ydhy>ث[Yݢ4CfMV+WAUL"pSd,B8'U͒dB:9U~;p2C47]< XY61pƂ*Z(=h')-z Bݝe5ẋqBjw]jmWЫ~q@"ه'R^Vwe}/ -WT!P L+!\p#Q\z+Y3/^?>AZί^!:[9NX `DǴ@*-lGy6%8ՙr)hDк  ht}=H LڦFpưT#@sjžN,` w_>"݊6vat`'I73ZB^rdk"z2'ւ SdNS]:9X9> cj&Zex$LB-HO4p-aixٓO# {;S\^}J*oSXwkl1?v4w`Ml~ܨEX3wcGm5V)6/b-_-y$e5(7>A- ?Y|NńWQ4jN;,yD}~d 8$r"u(vVN<3Um>:8wБǾx̡bh"< ~.ŭi*:/FW|5rdwQ̹y˒aO@ fX_eItW!`֬ Ѐx#][$;s@OI$[o'%셮PL}kuh-s~ˁLɿ1϶)GLr!95LجV}f]~pIvZ|GjqI߼ }^ 6GV@A#i>xXmHtgʬ)x}}א띩7TKE'U@~\! +ɝfҠ)wͥC$ bՂX:p Jԇ\9xf.ԈpƓfu7FfJ'2LQs>Q% RF~-?rn~TE)m"@y ;WguLhbṂ='E+Aq񈱧 0, P@ )9t! cEEن{]ϒ;Wv>h<o@YdraI[,j/ ۫'W"[uEBx-=!MQ^6d-e?z@Gȡ.\IO^D97~?+ 9LΣfʈgu EL/tO ^8Uˆ\.+63to2(DK/sohz! <ۘC9+p4Bv"W-8./ d3L8V8'Y[ ǹ`- kytt2.쁦2GvJSV{2MOɰxgBۉۮ#U*|3;\ [r4s-&v!ͿHq t?0VB+㙢R,Rj9߳^+5-P^ W[/1I9iƿB[6NM˨ 8"~uz> 9Y䛫'Yہܡ/%' R:{t m!& e4kHP|LJ0ߤdޜ.y!14u@ 6BK6Fj׆n ,Zc^͹Ԋgi )VrL9]N aMs)2%46̈́Aݒ.B=M1bBJ3:[ӯXaK|HTNգ>@/ӲL8wN AlJDЭ17B,ơ($'CQאV?uwo̍~ uOR(Px>ُGr3L*6Z/h}қY.E#L ltOJ|Ժ7]ȅ9(|ie%ba/c7(v&Xŵ%զB ^.1G}뤼?r~At-횜(+ҡi_Ĥe%`h$1mL[;{҅g6Q6d@ ?`6tɌ^)Ħn:5χ.emE^"f6Em BXJl>pؕIp1c:[o5bfq?.dO|J8oCgզ2o{;i xxKJqND5?aT[--^F)@1|׀ձ s1JrI #;PK*o"P'*#^ׅg4uC1 }Hw7TNFoh] } ҬXX;%lo,!mdBэ M괞MHULW=J?F,JsnEb(TxޝΕwGH,7y5 ~To tSH"ö%ˋMi5a:K!;W|"VfZqd`T޵Mj3J|oLc5Pu2Z]ʀYAo`aŊO!n~Ɂ)CRVwI/gdwm /U@ خ\-fܚFdr\jdZ{\"Io0D5SdA2sed:mDwD 6͓F2NL*@/ubl^oyF-F"@ TQٿ(9.un\Y;O.Pmag߈2P`tVѻ@":4U_A8r#$Q9Mj(@"bѝq^5] d%޵.Vؒ7C-G> ^czBf:= endstream endobj 42 0 obj << /Length1 726 /Length2 17475 /Length3 0 /Length 18049 /Filter /FlateDecode >> stream xlcp.̶5vX+m۶mضm;+mvVt߽wSuLsdDbv.*t \e1IF3 )@l`d0100<,-\Tl M,m-j6n7777gW:'W>6)\,Lf6ayMI9q*@jdci 46s6;lL,əL\!fdo US ӫ L2lsqmjb~\k2߬,:FF _Iڙ6qu?GC*?hW9C[S@ bOe3?vHGB򟸥?#ԿyYSKWb*b}+iQ;c{K;s?:7ﴂzg__@&aKQBB޴Zf&F# #+5vur2s?f}ޘ;*5Ohfn@fxuVrDA'6ЈwjW0dr*(\w筄ܯvb{C mĽKo)܋Z)2^dqh~p';q*] Lo89#u=[2l}Š@0{MK!`W?M'17YL͙^;bKkRdg.C#Gh+ ;&Ow^Sd+!hAD\ixF 7ߞ6/]1"DQoR۷\D!"tT|fp7J R+ɪihK*ZD f35>\ZACBo[FL׋=PҲᎭ9ui=PP.ܐ.('[LV{nQf]Sbf#R[.eS֟cXQ(,XmfsҜZ3rN ƀZe80+[copBA݀aHUՌ^Iu.46Wzx>rH|_nhw]گ4׳b}wŅyS;7k!%~MMRJ"HddBCUBclQQF#?K>ȋxOEʎI=h T s +&r= >BLw7~֣KtyNM oKG tH/L NBY BrR57ͧ7 #MCBҍV"[5a3EB]Rh&&xV‚Fl3q D^g\?. &Zܗ|w=X2>Q81%Q<8_ ~Qn3,jpߠ8U~WD}9K' F,Ԍ ( b/9,xT;Nle5tm.. T0^"!`sC{t%Iʅ)/~۴o"/;JF4|sBN(NjV-%{a#70ud_1b[Pg3ۅ[K\[#hF '%@[A1I|HD8okw_6 FWУ~1D(5FVL\<'u!ɹd-S6 WӥT[-5CiTX衅VqjRIsfB,p  \.8\Ư@jc1dn=$Q蓁t"̳Xq]UT,"v{ܖ$'Hdh39㳭-j+@cƦ PObH=h8:H(5 skS/ yH3"P8~;1TS-  M W|˜r;͒Qln啟4J{Pu2K&Y+0mzP8!wC%}lxRsFmI~t6♥Ζ͡oZ6l: :CۍVL_nnYn㶻x+`1:E1IԳjq e=FWc pJOs8wRv0Hw50ZHkirm.!r: (yf&lFG`I2>enRQpSkl\&r7lzCg&avTvϡI.NB;PD xr?_t}v iۆz˦GC+M9zTv++ܓ?xЦ~9xJ@S?LJ^ث9p;f!Կ,'Q̗jJȁwLhQmu.f"(^`:#)֕`eC@-jĩH;RZH@ێיj:#^ijB\N+9v) ƌY`ah 6W\.SPXєuN95ET;BVJ;''ҹc훹Zεzo=xl E1 TIjkXhE FzE2f9=4^R[DO>Q> j|rľ8^$[Sj;Atevl.#ޙZ=46^SX)Հ뺤V>Cl9MB$B+ҕ1B ^Lb P ECN0+O' ]A"qNƜǝ1 S (nW1O)8gAKG'0l[+f4!͜i0_,j.uL8%T,d"HpAç0;uCR+T⸋0=;B ^zyg5+-;lސ1+ۣLU\Gr=.~}׿<1!y~DF<\_|Rˑm[ZHqmk%b%]R>U{RO&wd}|cdӢNҹo owI;胇;i֗,xQFu!AFu+_S0]>LUF?===]s4K.U|kNŵ=ClT^ -$JhO8i.^RM$hd.Sw*c6WŠy~u+l dD>v2_V NTuRe{-^frPw XflbHHL/70:E/l.7](x,cJ:$1XVrlX<@cӕP-.JV;nu) AO%ie<=Hz6.!=2wgorQ~]b^| RXF*1;mpVZqx:Y)_d2UYPF~fF}*Mg4V"J\jc s1TI7H$Wd:Xf[>JK$ŝ3,T!~(gs9W-mf484DέbaP/ܲg(,2 "g3e#jwuL4[W/mlH])@=Ow&y[ޮ{_ $k n$wr2"%t9x˙]G{zlhhU_^ۗ;o~J.^צ䌹xmp!So)ٜW1C+6)(D-u=\rZ:cy;5\ߘFQGѭ*#bӧ5!A)d,-AI'n+QQq4*,˜[XZyq~_ C DIK6i_j_Ee-0Wd]o`.vGd_CS]<&B5QCѭD-L-6Rr_D;4lw%Qw>n"_$l'KrR 9.'Fw pޅ(m,hEE1^<~\рÿkezpR-, B Ft$hXGcVuHӹF8-jGr4 9:x*93iW\ "kfEBy4)=Doȇu"jrigXl\e1=+N @͂?m)&bή$⧸ۇ"l3io $FFT˙DLj|]cmʏ0vhAiE T/viv=6$˨~S!^^8sKs4,X&0=fYku2!J݇{/6݂wDb3ې{g6*9ZBa:i@*][~+/Uo2Vx2_"n*{I @VzDTBF狵 ̋$ 7Z MؓQ@hFHۥ0o|"<:! NkXMnT!c Oѷ X *YEyRj,FFBYg>WcCeq'2; m#K<}D BjP8j5*m?L!$s7KͭF5@F@0ٜC ^6gsV)vAhjᲾp͆JzSJIo1ii+94GCJŻw4|?o|Z(WMlk9``9˂Ǘ BrW.8q "/O`vP{}83@S mxrID(VL^S ;&fw|+K)%`cUFM'*^0+-! 4:x`q1ײ{R\B/3x] BH#vd s32w'K,'j7dФ^HnhR)CW׎jqE:O/99#N}!4Q XV W&քWKxu`WꂚF:ac*؀i>UUdYNJĹLzw蘷F?\}KE<8Yi?=szX:;ZAM߷Xp4}ڑ#jbI43EM9_/^no(ĽgNSy r3.-TGyb`~ DOͮhA]G@.9?mۇn%il[!O׷F]ƮͣT7m1TzfG A\aH :tܰV/ɥ{ǕT]2cTb Bjf|;6F/5"l sC^-GϊkG7ytzk ,Jk%֯#\ZaL؃n%bOWk<;ܸ xbc z}rRxRLxO:Y<0"r9P0^P\mi_p8k|sf)ᖮ'M*Ht}&:{ q?ҏ(F5|Q)GK5KH^Yu{HuzFS:fcl( dꃓ ] u0\~κ/(KxF'GϦ%ӯV᧽n0(=Jd=*81.zN;IC3vn'r6[!t}a{FLx}98%J>IIbm9l&JEE'UCF$0@i4Di })KBҏ ~F$E(;PEݸЁozIR /ᐟ Di}Vq确 }O8z4e.4aqW xc)[OLqo+#JHzSǵbC@#H$%i%?ۗ$l@p3=E! u:9l!Fh&~05%x:ǴB5IO;ʿ)~atyd;8h 3;9+<lx7r 6ٟ:!M Pe*vR'wd蟣- ЬU{]%ÊAl {qq2&6pҽ ZYdsD^h,$Iqə T$l\&tB(/ފ}6Y/Pbp%S(T:ݳ kve. +2Kj(%QeF^FLI#0*t9QdK*i2Ǹ@jSw2ƽáħUO.Uj0aENWBCLsg#lakcs"kq;Ec%Lt2ȅ(L^)WJ6긦@UD4+W u [7̛1DƂ  ԉU] /r̍xEP`)u]gf+Gٹ&r5ϣ=@z> AA?Ø0oAŲ~(׿ &b fl>B.+' mXS/'0<0Qr/䅊oOv!0WV^6(<Њuߡ4 !/9C$K3}Q **ă ֪r yrcP]6[>3AWWY;|1ZWY2 LUGJsmɴYXkXm7I*Ж#K<Ќytdi&e|Duk0NG!yxd O@˿+@Zjr h RpC5Q@WrX:at4w6]~rle;'̕BE[5aoyϡ*X7_U _`U Nb_FZf>K_8SkxYڲ].)}CҵSz`rgݠsdcO9{^737p~U Y ;K$DOgֿG<G0q/:R}Mp8u*e \m،<_O>+y1}Q|3lؾH; q)E6; !ﯖQtZX)[Mv< =7\3# $Y{=X׾Ԟ|:̘->`z`ѯETI=Cnq۫|[~I5[ZY3(4 NqS⸪_!ଢ଼$2RʮR2~Dh= 0O;v(Av6XjȂHJ.tҿ_O$PV(Jz"KN v]D_Pz'Uu(KLObLrCBX?RYvVtWBk HymhuF5nY\OD) J9やﷻX~rO)ȿw |\*$f; IU1hd"LGV:(װ =dijC./fJ`[H󃰒ӡR̢>9aYʽ$^&} #ϦSrN8[i^5dp<6H>TY6]%l*g)u<TfxqfV;[' hVyo`It:GdD0I lBMl 'CtuXl0tk=a--Z|.bkudgFr[Y:!ܩFp֏U >= 8nn` Lc`T85nf2L=継=/W8qm"?hͧ tzɴMs ɡ$zSz:EYjql= tu+^<}OnM};ѧPx@eVTp"0Vf:&\꿝WYϷN+)NK׺pg.(ذn Ԑ+2}-:aTFlC GƿȨ\%Īf?e2X>R3]ۤ!XG_<*HEUI_Ti驀JyZJRj JT+*EG7ϙs@Boks^ #+Ymtu7.^FDN* iO,RR{.n mCv$$i+̃Jl2MYJIIM HKj).a].1XJEFuB|().糴+B"}cdީaerC&uuf/5+B/VI(x׍?I1<[:&"mn&Ï lĮl)Y2ȑ;beq5pw1%<(a/8K3 /)_F@()[.o9B8n~ >{Q5 .QW@+wZ6۝i}3[WlcnDȉ8q5Às#\Z`-e 0A]ˀND3)(xek!ÍP{B(oQ˒N EuSozGY0ŚV,{ A t!ŧbɄYMSUr]Yf(RX{I#E5wj4:8,a@lg Pp9]0i?CmsM^+sS=j4}6DF9Y|CkKBŎF;9c،,p|Ql-43I:q3RӅM5z3axَP)cx85c7d~mj|mR;Ƭsk>Ǭa *HK$+K+Ya;:;XA@~D~nU#؎񽉈ި$uj'r[w'+$$9ÝDzEr_9ȋ$X iˎs;^Yk&ud?oTIw;4>ʢHSt@Kk+3GUEol5U6=zjrHB|-諸VG}OSEx!*G-c]OO@BCuIأ2fl'7!'G2 ^~DRJ/U; (º'hK )gK,ڭcTNZfw#p:o# l]f}e'cL aѓ7~a KN#M=R7*8v#!1wA ӆI932AD,R4pCG7_xtI[b0D"-Ѳlə4kYsohaHޕ>gC#A/ TiG ~T;~>8sQMj x{;”7"q)|, 2Znx'rqBP';1xU┆{"k0y {Q!Fn$}7f )38 lWʩ選/#qB 9Ieu /CloêfKX Qf>H*cOWP!\ceup,5Hb>`kg߭)՚:7 d(^aY0֤NP#'Rvm _i0D@*K"<*|kh\^3xMHD#B"\ejw/]Q5'OeN(-FƊTn.C0\$S*na;"Z݆$+2qѠB8 QKn_A5ERPt`P_]~'Ϸ+ _=GҐGgrF&% ""=ƘFL6UK5ҋ$?DbH!>a^~dAs ./q- K]37~spz]81eTVyctN85wpK$)#jh%P~ WiRo"l/#GwX,|e9zt+kvavby?O=+*W5 c#[E=ݾɸw#,W\B} Bo*|d1^"1ֹesac_)5H3*qYĽAR甾0sД9nc-'b{y<@A·ME,륅@&sr'@rrÝQ~7ve:Є̯Ʈc6BR0̑U.TTFRPޘZhAֿ\-}Y:%ݓR+rIuJMX2#@Q2Khf= 3~i]ve$pEħJ !v3|9E70n"_uNP`"k2GO&۞a_un~GqP} q;ڝ8ՋC՟WsMwښJ:scU #:J/'%X§z?-(BϢtr<$;o.ʽ%AN:B4iB r$Nh)J,x۠oνla+YtatM}eS0MiupxWb7v5/uͰ]"4Ps͠ӉBuB@k}P:'#VvMw.gi1) `.,Qb>^3C]tP^Td+-ܹ4`ޜ_AY c甛No ~q5qAyf\ l+;9ؽL%N4z< 4RhcT;tpq f:;PJeX6+1|+66`Y&-uy_Swk`~~}_A }-sf?wAYHic|+tՄÞ}'iZ,o_g]Rk9mp Q4kvv{ B#wY:b"^y]Jc'QY G-Zq:v77p=oʭM%툘lt%<ŠЈB(~]E^*ʓ-B(xz?DgВ;bYr:ǩdyهRe.Ş *кhP2&4y~boG`WDDCV?Kac\,Sg(_\2[pE<PZz_MCS!HH[^ܯ/sw))aFz/w67yl 8o!^5ꌵv^-wV`#EH,߃3, B+%ؼ%uiܷg}ݶD\Odݮcbj?2)/nf'6jD6/'BްC^ol v|~Dr}H=A>CWi5'++U.@M\A,D8$GTqMUWY/ҋafqnxW| ;EP o)8唜_ A^[-H$v$prgyۻD}D{.Gĺ4K]=4{=O"H^/ē4, X(.R1Bk*aV:+5z+gwnN0{ 2= m3 zgL !yVyu>'M}=#ōh̭#9z'xLDO{Eh):^ؒE+vxQhCoLc 4fyMwKS r%hEƸn"j]" Y&( |&92 ,.߃_;B` .nKs;X{Rh`)? ̶\ܭ}.)iޑA'ynVP"dO:At'mϐUIH_53HYJ~E3b!yDP_V`=K}BeIkBpYʈMW6.q{!د<8B,0`)Ibѡ 8q1?nfn;- ut^$[ps({S5X]GL/R_4nJ~GCp5.XM%|k0u4Honx7 LC~,~+n4oT݈J>;F6GX8)؅ph6鏝yßB%AGZ:Y",aS̷ ِGyTi1޾:ṟe{)N_ݿ7v]X\`\{RaǽvW ^:;MS^]>>G}IٲSu:,hI:IO N9$baC{L|!s I :/&hĺY[AEp3:_ū=7Y@>NĖFdi7zk',R7$Y2]^SPhЮhBL ,4^>U!~v/l-U|$_Yw1?FvtdwI@sd%(iry R<@cHOѯ36b{^65}%]2!-8, ./K]sɭ|qzfv 6lZ0r'\JERg-y Kߥl endstream endobj 44 0 obj << /Length1 725 /Length2 19040 /Length3 0 /Length 19599 /Filter /FlateDecode >> stream xlcf]-\۶Զm۶k۶mm۶m۶q}n/XR3ȑ9Wd$oQ['ew;cZzN2=== DXFXɘflP200IBvfNrCVFU[+[sC3 33/?Nfs+c\LV flcow627H8SLlVQ6FHo6.N3qȈ( *D蔅6Fi89rmlN#Yd?x#B30 Ʀ6tM]r)F&D;[Y[ȅl휝2F6q`忡V7BI2lL?&sGQs7c#ysvrp6Y?162wq8F6VR}t "TvۘS v`@_-Bᅭuag0ؘL ӿI'n&cc7cCe[C 2r0i=e-$A3?Wعx3<;MưQt_M}tAU eAo)'3ϫ%KxJVm(tȐ5ag͂ ^րCswaoHj]Yii1tch`a on n_1큧n ,j? σGpmnrBd.~JUpDn=0wxθLiG9MlK΋BzXqfK-9Io{sl!S╙D'lHv8w!TLw0`09: =mmL-czH@70jKckpc<ȲLlTȜ)1fSO qu\٤›fIB,9p1 璦QEZҗ[>%LpӰϭVX ם41q w,k~|2)ڻ<{`dIo4|gxXT^9 ʁ~erjQhQiR"Κ!{f4\+3L'nY7i+# L/Ad&"CУU3z3Ld[s!8r/ޮq/>Sl1>-/sq6 D91c5֍.p٫t #EH#Kr8dz)*DwFFvh+6"[y(ęQHfS :PBt`SC] Q%2Hp :${S^zc6?A$Sq  j ѽ{wgyệrtȒPaU3Xv2N>DU󏷝%_23](!Eg}0Y먔?4 8bhf 09X4 ommB JRuUN_'i׽+A?#{VA\ZU+7C)E"Pu$co_5N|s-m͋Xo6'Kn qb6kn9$K{>iQKaz]\{~b9}ftKxKY6`}C=H mïx5o2VFD/{[,I[xYVb ?M=.ϔyz"I\a0IvϗG \h E4ͧY{@͏R}k r'C xPa?|:Pe{xsJ<=7hucyrv Q?&#9^ylJcB.$7; 2i/Ove_SEz7$| :nUEǍCuDվʮ\Km|Q˯i$C^Ro϶Ab;7.Cg cZjw8Ξ HZ'%=t $&R] XSǙ{cPfW+~-2~΂uxeRz~P$qLߗr b/xJU 0r@ Gb&b+k>]F飐?J„ؾA1i@烜@C>nTڝ&/xlU}h.Kwj[E4#L) T 6AC&uruHƙ}>҅( _̄jZƟs&+0iǀw m;u,OaD!1^rK$oy4W$b/~%6()X=Y=T{YэI;Ϯ$L;s`FR4p"M}SZ^aJpi5g@Mpu(l+1t~@7W~I_ց2fu\ű2"0̂Pk]zn?ɁIUBNDlI */Drͳ¹6R+bs'cD'bzK$+ĘCdj'RCS ҉\!h2Pz>kXYz/{4{6DS\$`-sNXwW/Qvd"yqO5E֍)>0F8ڭϢyTdA,tبf@cHfvT8 ~V/*@ e~#ø](ZE^,p8D7ԃ6DqT}#6}>ВJ돶h6w5{t"6.l|W-d)g]qf7$Rc(*\FM r%VTPKk|&Ѽ*\TGm\ Ä48wK{h W-<}r`m_ Sz%m}fƸ2\ٍsBrz馅0u3v$WSgiIHII?aap%'0GKn,a3;-T7EIǠ*ӟ$z-6=e&N$qYm@~CMQAvQdAbHqT-*'khS`YI]ZZ"=(3*/FG~DbNq<8YaD} 앺eΜ%jsP1թ62>ô-퓇3 =Bpn̓? cc* +م`vkrq l֨c=N[ $lBo덋4FT %[BFmK!S(N=uTXgτRi긽pKIEVzh2vI&vBE {"ö )/[ /ZbI) T3&ܙdrJ>~CoHɪQA("M.D eSā\`b!pF&>Sk)4 N8QE@bnE (9b1.lzI_>YL gm(0B^qb}'Zx`TIO0!HR|fdtCt{Et0L93×v K4<6+˜.C߫K:9Hg;{H'#Zmڒ[5u,7v8k| :GxeDfibe!{{98U)){rJ!!|M, Le` ݷ!kA2mkg+9͑ ;c\Ҋ/ =qWG Lj1\y#u@,;e_%~+c2oW-nyĦq\zpziPHi=~+qU-jAqL1>@;c9gGRtlYˡZ ̜ʼn5[.r?y[JQĖ! VREyԮ?bp0$6_UzSƢC c2JdBOD{U!`9SEއ˲TJγ.`7jh?=]U k=x^B*|&lR"f2y^{f@;k/wiTZ ;ԺPrPƷ2KVqIPI ԚOM])o'I/XVu E{^v``+zb9`M 4Κ6ZŅ3| ኖ@ -Q-Bf9$AtJ&w=pr4;yY9miO%|YSb]lqZ~s`!@Z!^i09i8C#jr%~&ޡ6恮ys[Vo8^}b=}?i߰ 3h9uH!(cwNv(lV׉11CkDvC:t8^5 Y\wZ(¥Iɡa˧mF8ZyRj@LGL !ems䴵<"3YiR5ԄܔZ~Zin@ $>De:w{-oGZJlLc7kEG:ρYAw^6d4N1Ax>e[1P g[(s'T9Mz)4}/%Msx>/l]foO9W`NL\]EUJ+TFheLO:3W@_IXG6şQ~MG-s|t~֐8{; Ar0m9@ťSLOoYBi9~t%. m'-`Hi絞5xri _ +57\_.lԒUQX`Gg{ }+YXwBuo#ӹvG.nKkAGؚ7]gj202]alS<&w7 rv߸l#,N"[EMN)tRXu==coƙg7--bdX2 ޑ@HkMR{ȴP&([ ك17ӌ3+Vys>Δ֮zd$6gJVqфr"G?yb,dnr#}E'FDTc*o}*⍋fDd+\bvu 44)NPȟ4e~f2.UŎb-ZZC_m}ACy&7<]qZyfHp`kRO,1QcZEN{\3; {1k gsARo =ՑC;~6tzѯ0!L} .]y&(y(L8ݧŰژی<bni'X|oM#O6+ M|;nڌܴ@ >.5M-&D<r%=xAoG`|cV, 7C@%ˏJи >ZHiDc4yaMJNɚX؃oV&yE.r&,msXbޔ"Gd9b 'Q)kvGt{@q7N+FTzQ1/( Å1$CW ~D? oA–J L,n0sX|JRyYVkVaěRx.`J_(^eDEnXJ?q@ؘ8}R-l%;øQXLš`уߦpuǨl"7hؠ5|Zn "Ծ;.'oBLܰ1n1U=^ުy1$WW1h",UJ>!8X_ӑCVk߭s5ޚx7JVAWP>~ oXGd~g%^m#O^*"ǽ^n@G`:NUe9~m׎fa=+WTDCCǻmj';#f.5w;6.;jμ~VtosV8ZZ ?j6Vܘ KvAn :i&+KetJH"է08\ΡN }tYtC}#*ՠ6Qv<\} ®ƾ) S`fi;Q]t0TL4&p:LC3~]IIŶ/Kش䤃 }~PW%~%(VGtR:w'cOsLəEݖRĢؑ=eϞk :A-;@lC5'lW o"`gPn>?%Op%bb%$GMpTfeLu]A2sAo  - Zjُ?0DwY|l}q![ˊ&Z&s];z2hRʉ"P  Ɗ|<ĻU6l+JM:64uB/iFr0 YwϩBs?3J f`,Vbh14Z"dh$QCt M$P_sJ|bG[ rJ sPVOֺf:A"izmK'aM.#]K_tdhDJu ]0*}oo i䡁?p{ Pc=_7m&n`%8*eTk{UtmeåMs) 3Q_CFBgxK~laxmǻ" pέ䅼OlM(%;q|ɳͽkoi9KQS*HW|HRP'CSS:%:&͒һ[Xjڌ+,)p e DiLWIE?IJquI 8m`qd_a+h&O7WfKw݉&MA?h]9Z׏YU/ @:W s(l+:xBZ5  lfU=*[|Ɗ|Gl1AvQ=o<]$8QIn%5) Es\-w@d"C/e, t0xCO3tbkZa揝`NxEm?hd18B4paOSӉ}ҢFa;؜Ճ -`3 fsĘP))u0з6v]( %Ph*O V'VZZd^Xُ۬Rf @[gFF)}jEBo&A<3Fc '1) :_$_S;_#ph^Emmŝ똓 s+"'{.Cj#2cd(9EaYE״mVr %IViHMGd{t-A9ؑ XҞSƹ-=c]V o*/`ړ ~PvCd:G iwB1v ޳byO\ۃ^>;OE?QoDhȉ>|ߋV[n }t1`}_(MkkWsZJj 줛##.T WquF/ Tp[HxXA_hhE.~7DQS%,-@l[R2#ȥ]$@;Q8æh"zrϐg7~jU{tbK1s!k o@<ʑtEx7MPPW1̒w4Vٖ,l pZS0~|(V, ]@HIp6a>sѫMٚ2ԙ=z_r$uTQ|sHEĉHQdGn_Xk|0rUXGѡ-^H47+HEMC2nOA6\̐hߥ+?I[kUrpI"/vlOexiX2]"*D23҈$[h],ӓ3ZcoŞ)c1dz9%ɝ=lh8cfP-4 ysKt,5 Mx;w1 u$P<H}fxE൴gte;{-CZr숕U`CK Ɛ/SP)I7YG!Oq5ZFa0~1>Rg w8f匢аWczR,YW *'>O޸>(S4 e7v tME*bܿ&k Nj|EׁO/em]*ڟMc΢p/(pA/7 Lug=zzcbFy yq J(JS^q*10 e:4*J9.p|3mdizP=]iJ0d#@*(4J?V8P^È<Ңa:؊Aى%gQvgM6*ơwSBE~g7R9Cg\J6r/%# ]\c+"ײ~*l!XK-3n1\sϝHsw _6!Kȟm"4TBj.{٬`zdd }efG=qo۫myrk@>TQv B0x*S>܊A9[ngG ' ɛu$&ϢP@FO4 0zqL.K*Ytқ P#jgS+1ĭW3/CHfnë,S`婃L-v}o绳ڒxE;"xDanダŔ1p&NN; l*mä:KZۃB3gM-`k ,„ suvE@Y:ĕeq_\rFdvV!r$%ЄldطA7~2\A\8#?};ey~ϗO'o_Kɹ Iih#PFoDPø^:%~_VXgm+l{$p={<}Cq;|BayJy)=i]p0]z#(+_RY"xi[bNI/ v `z)+lo U\<?wDkn܂2Ө(ꨙU3ڡ[J9{ٞ dn+HW˙QeXVϺOX-dʵi9yV'n#Ӽ/9=KѺŨ?W2ͻˮcg&-Xv.eAU@JN_$[:d^_ef--B>DK Zܔ>(oLy(%D碽cq_Joā2,{=g&M³MhTbG?_*VyvwO5  _vuf&j))}wm>3䑨 >ōvfJޯ'[/©L t$Gҥ? \̐iy~qDRt7/m]RDy IZ?'8ZM=j%U~Tp? ^ة-i08'_!GA5ֹ C:gD V2 ?SIgĔ{%g* phZљB}~6z6$ə ^cIo˲yu!,tpW.{S YYopŨLDvkh?/t4H_f%]ED`Gg=Y= om'h-Hp<P| ȎV95 ~w 3D(q'~]֓~x""D"U~>CjP^"[m|'-}IOBƖn3Ն `ΪM]ܱդ/o3Y5  }-H,5F=>u>生yMzQrw_] \>uU O;!be5:6u/iXt$ Kc ylՊ ze-xx!Q!42l%̒0,&*wtO=0>S>yEUe&Ċ}fUp,55iIĉggnW5ORcA*T g=;5?T[Dx۶`(Y_u1LYk"Mތ(,v]v"az+Tg7yKӈAo+咼gvk T˒^H [1z"mFKJ3G4oo):@mZDe'qS9DDV(p}35@1ڡ~2f:I0,J{B.ńf 1:V5\S]f"ox>dbQ$by,:#qLQ@U5ZLHjE>ֳaVu=i4< AOɒf~|/h!BN(=P5TKS}\f3A,?SrZcbRwc٭ !j<&?f:@.+5'5q*,Lե8wET7>rA*Z+zm8*$mS!`T^Rr]S{m1 8- ݣP_n΋c(U$哊$KP}hgfv> H汖-`{+ (2Fk*n$6w/,UeXqoZJ8O h+rV. XYr^>^]Xy$Cq1f'vo!Fr0h *dҜZ p'tZc噎sqI}32fz$;Q3ٻ/\_VV!揳]vMhG 괺=)5+e2cI D\`N껬Kh!aY."U ƛEݞ¥c| ؁ .-NNm-8f-t gBq`p&OI[=Uo'r; #9&@W sNmUj4?>vL6'uBpUz{V=bר\gUy& x\tWwUm>mR܏l3gr㞃Ǟgmr[Ŵ -%/?K&k#v{PJ9[WyqUS~f#.Ek\ėf=/Tֿ 5W,2g6Ha>B+bz$9s|jFy/^Ͽ -V[:z%j=!@1}vjn󾓏79M7 /Hޜқ0iEk~RBŘe[:\y9}1x$|!!:YmEtr&^WP ҇S' ~@JrrfљBjg#- TUmJUA YlKֻs[>+k]y+en`ژk#hDwYh# ZRa ]j|Vmyr)+.I͊r)y͵L]6,/, Eõ ese9xʥU H׊4"Ro1<=JRg=Eeʨ_˼`L2 (X8}fH BAoB;ι8?h0 |G]ǩ*p=cOؐ\p3h^1\Q03k j Suz_xtfǼ,Bl7)_ 0h&ohs!*QGA B# ?%BbCΣ!{ȘIUv.F J=cN|~E-\IkiĤ<_p9g.=MԚ1Mr^!oitVV q )<gi 05I2xX%a6= Cbo0pReL(Lw~ɚ!GF.(~+Jx%*$Dk7 'K_Qlc)W4wZ"8FNr<55sۏ^**u 8p *X;T}s3~REH#լ8\{ K;Y_ђ셓r`0UTV鞑6b&^~Ф]F O#xyr%]lå ىgV{zt8hNtv^-smBczAZ|z/OXC뵊}j<6zٳE0aB:f`rH#*E+ Q[9Fw˵P81tpp>Qî7@R[Y_YJJ2bZ&{Nf<7 ܑ" 5 7`5hx 8GY|Dj8͝$GNX]?VO pW>Cd4<ѯC$sƍky?&kbp Pl7DObn\ck]3V1;0nMB05j\Gz(;|Q:鏲 Qm1WoMZ]Б1Τ'47 T4O·͞ ӜN0{] ĺ JfdW!F_Mǧ*JdɍAQ.CV eBNN_/; MFƶF&A;J5K:׼ZhD)Q/XHRd5׮HIY3u^ɉYH&lDGB[;v9"ynNc^MmiLh<]*sY E<}rY+"J\/ʿdY]&* fcu Cj“Ì@X#̮<߉9 f7Sc4_!Wּ>V h#"дߌVa' 9epQ^S4H-f hZTlʟ&W{=0AAUz_1zEpCh &\ʇT,a9uO<,ERT~3㪬o_PL4w;h.U)e|Q+RْpSKmEChqwU/{h4Z_qTgVdO拾 t8z QZwh L/_P\D[F|'Hzs#_ vsX0N;흒cmq 8tϞ[&g  B~ʼݝqlӝ&7fT+!@uyOiV ` em/:.S#Tħ:HRUU*u5mµa 4|D'9f-VqL T/#j|{1 \7\p=}}( NAZM'UQP9 ߢFڤ1Ye Ye)`=A+=y>O'u@ n>r*߰&7fW"6" ? XV(!DB%r4f}D۝P j~}2np |pߗN0ЛMM6ZdA2jg'7%w=Km-57~mN^e.Ae >־2YKGGd ֣z,ܓnon.H;Xa KSW%l ·#%i[Z8,Z绔/ 1+|"`-$#Wx#"BʋPa9Ѽw.{[eF`֘| ÿ +5׆N>lC&PBM)w@a\]x8eǤjcH_%( +U{t nh-zRfA; B' ā쇥Q ^SmB]F} y 0@wZ~EU8in$YdEtf%m?:Nv9TPMbܔł@gzJ>{pG߰?00C8QrH/ck)T-5γ~ 6ܻUf(~:{̝b'#tƴwQf*QqnER DF|ޟ#K]eF=c-VFugn4s0 V3Wowds3/A\𪠋I΢Ksދz2J={cSp?k9NquvT}o<~.In A3h ܐW,7}Y&4"!,Qt??M]~GK0G<H00Ɗ;0`kz!ݸVmbn|< nWD}hl!^#4Jru@dx^_c"4.X7?^XqPb:r0. Oٳ?IOc뭙 tfe70lea2; gS.\:䘌{%N1)ڇLi9ck_vZf,ܙP8;dyC ր>GI5BA Om"n_)ktsfŏDyD6l>Q0v(/_x| ؼ ۦ6L˯0LceII>:\\ *L+7):PqMFR8Ƃ^㶠[.`SXBKw5ԜW )QY,(ަ]Ϣ7uw6-h{3[abuVn¡ER}|BgŰÈ>c3By$My'-@S74Q]\T.F]5OEp@ 3>1T;V 2`kA5N HZG}gDTNlD4ԣUcZ%)eE#ᄚ4(_6S"5TB!4]V kʴ2Gi2{"ʙ_s!5B(AY?~,ȜYϩ'9v~9 ps+O]͚d %)#^O_^X+sYf؄ PjePdy2 `FǕ Rt+K@S;ZJjRh㧂X.$dIҰDn~[DN/m}Tm!l:x֘1ѝw[dY +=0LG'f=!dȽ<}v&~\ ^0[,iRɾԪϨi[owDR ul8@{ -n(d %U.&;]GgR9<` m$\*i#63Uxn$u]sAM_d&1FchY,q+M/#HQ6%6V` P>o\.@i݀Ͱq2! rKcgwGlt?mO%QQ%v\Z^ǒ#_0.ztFK/iB|w,sJA$Ϲv4w㚷ix#T-XAE a|HRdAqNP5x*R΍D,z >'gĊ0T>⠳j8fit 4|VBdr$xArgV!Ɨ=G) @[R}y}sIL.$nk!jPq*X`գNwM=V ɽ ˆk: x}DqXPehi zns endstream endobj 46 0 obj << /Length 852 /Filter /FlateDecode >> stream x}UMk@WlxW1$ |hPzuj%#%m 9ؼ}3IڽضߺItՓ;q\]U}s9|\qtY.Wժ۟oo C}? F`PޗS3ߎN6[w:n7l̵^y]/׵IFsuB`nlB =@ )U 9yI(ѥ S*043``ÍSqf|kiCc, pDˆzF:x0)ljsn l9u}SrI4"nXCA8%&ٵ6AIǚc:˿#7^YHOupQF^odž1BЖEQ?1^׆ƨАԗ039+ãbLi~jЙ}s~zrCOe fYJ|֟uМ8gΈrYφ}ŊϘъ1LҊkgigϘ݊og3f3|3ߊY[3 =L3f/gd ,' f)Rx jb&'W *~8d0UPt" ~7a3t> stream x}UMk@WlxW1$ |hPzuj%#%m 9ؼ}3IڽضߺItՓ;q\]U}s9|\qtY.Wժ۟oO+>G|V|~+>C1 V|B|FB|/g)g1{!>_|&~'a9i0K!cB{XTK5;)NŽbPq<${y儢 1 螡SsWѱ?"~t(Vu endstream endobj 48 0 obj << /Length 852 /Filter /FlateDecode >> stream x}UMk@WlxW1$ |hPzuj%#%m 9ؼ}3IڽضߺItՓ;q\]U}s9|\qtY.Wժ۟oG|F/+>㽴3Z~Z83f3[:٭ ߬Lg3t33 ~!>CO!>S 33>IY ?BXIAup*Çq&#{U-'H8qe%@ 8{Y;lFz?< endstream endobj 49 0 obj << /Length 851 /Filter /FlateDecode >> stream x}Un0+CW`$ MEDbɐCUA6ّD^}{l; imXc3t5n/.XjR˺^o3򼟸kյ uA )`JbD>`2$`TY'``9&Dkx,+0*NXXQQ3c 7M/߻Q 𭑦 btX& #q,pg'~ι58|%Nb'QDa 8g"h~ ' ~XkzǚOx! !=iaM4c̓ʳGym: C[1Flx L^"K~2&NCC&^P_,KV0d 1էMw"CgcY ~Y =9O('=g)YB|֙Bs:Sb+>cF+>3qg3K+>#>cv+>CӊϘ݊͊T_|~+>Cg!>o!>_33Ϙ/>?㓁41K!=,ߊTG^1|8Gh=¼WrBщWI\_tЩUtȢa5}n endstream endobj 50 0 obj << /Length 851 /Filter /FlateDecode >> stream x}Un0+CW`$ MEDbɐCUA6ّD^}{l;y| #:##0)%T\`YQqJƚ`ci|1Mލbo4m `2WQ/cW888sέ-./qJ;&\ k(d?F#h0\?IúXs>Tg ]IncT5obY:socsOPcYB?9Os֙3\Q.4ٰX3Z9#>^Z} ?L[ V|V|oV|3[: } B|)W|L| ,Y a!SMV,鸞:?8C8G潪N$ĸBO2{Nu]ޯpDQ endstream endobj 51 0 obj << /Length 851 /Filter /FlateDecode >> stream x}Un0CƆ"Rjn"73iwWUofx \iEܰpNMk l4\? ?5=cMu/x1g1=ia4c̓˳G6ڀ cxmcfƨog!/lmΘ8+^P_-C#[34IN؆1t?E߂ޡSrY ֟gg9433.XgB3\iafq3fts ,>G|F/'>t3:~:83fw3;:ٝ ߜLgw3t33 ~)>CO)>SK3- ,YJa)SMV襤:?85JC4G潺N$ĸBO<{Nu޿_E,vFo?; endstream endobj 52 0 obj << /Length 851 /Filter /FlateDecode >> stream x}Un0+CW`$ MEDbɐCuA6ّD^}{l?YtգиY}w 9]7puewSClݤMӍ'oܺ sR^}5s89 4Rӫ~R~K}O7Sk`."wAg LC3Ɋw۷qrMR8 o&݋L'lϧӫJnz_~NͿvr*aMߺkܰ^\zu \g$y=W/Q &)H8@hcRe]*q8cMC0c F F 1e|qi.Ke0^Ǣ9^'-pʹ)pq[[G]p_/+v5MĹPN~v-G`~uX}/S/w"': fyRy(#c^g!ch"ƨ-kC^d cRx~h K^| МQV14Nd5cY9Y?C9돡'g ?%>O:ShYggΈrYgDg>[bghX|&^V|{ig33qgng3tZ[Yog,g-g B|B|\3gg|2?f)O5[TT+?MGZN(:p y?0K:q:~Gw2 endstream endobj 53 0 obj << /Length 851 /Filter /FlateDecode >> stream x}Un0+CW`$ MEDbɐCuA6ّD^}{l;~\N=Cu.7զOu½t}5s9Ngiy| #:##0)%T\`YQqJƚ`c2U{75Ҵ!\,18"\aD E_sN[sS9)9^W$js7 GZ ׏p$cMϱXS_ 7^EHOupQF^odž1BЖEQ?[0^׆ƨАԗ0 9+ãbLi~jЙ}s~zrCOe fYJ|֟uМ8gΈrYφ}ŊϘъ1LҊkgigϘ݊og3f3|3ߊY[3 =L3f/gd ,' f)Rx jb&'W *~8d0UPt" ~7q3t> stream x}Un0+CW`$ MEDbɐCUA6ّD^}{l?Ytգ;q\]U}s9n\i|ٺQ]jOtusR^}5s89Ηfi<WOlKQn8N[-#;skQ70j(+o!$oΣ;n},j7Fzoкнzr::Q:XTߙOo'BZ;vv݋ ZԲW<'suB`ilB =@ )U 9yI(ѥ S*043``MSin|kiCXc, pDˆzA:x0)ljsn l9u}SrI4"nXCA8%&ٵ6AIǚc:7^EHOupQF^odž1BЖEQ?[0^׆ƨАԗ0 9+ãbLi~jЙ}s~zrCOe fYJ|֟uМ8gΈrYφ}ŊϘъ1LҊkgigϘ݊og3f3|3ߊY[3 =L3f/gd ,' f)Rx jb&'W *~8d0UPt" ~7a3t> endobj 12 0 obj << /Type /ObjStm /N 34 /First 259 /Length 2782 /Filter /FlateDecode >> stream xZYo9~.1 ;M21AmkGI$V[vl^2X$:H%Qh/B;aM: g0tF'*) V- d LG SjsZ ^Q :%LD"&XyTH"EoV"eP8"D60)-> Ze"F)Am=|%\7B7[j\g6[Mnf@ys:N>w >mHNo~ fp\؇IÇB[*B6At)M04]&jh۰|9 x<<:j=+ZڌV͕߮u+,z6"*!z6wI"F|o{ۆh F5R~-`gv4y  HUXh W8AZ*߃b.ϸs>.Lܿs5-I:<U3*Rŕ7`%%ka56ʭX\0͘ތkM;3$c+tkkYBɡds 6+hהqeju:HƷBx{Aº<y|}8S{F_Gs"Peʾ `n[85ҟN b)}a"yMո=0Q.Kp,Nc{(s=̡P.B9ܷ mi'Âl̆$qin`RZcmޥrnY9By\-n?āc+y<A#cUg&LKV֜0GcģOg2^Ү{(ÔW-:>*X̥@Gr4m94b*9tK>x#rDbəzk.+ntu,}Lǭf nON6Ӝ为S^a 6Ss ִ34kkڮSn^±X!dלP@[a53CrE9M⭩oqfE!)bR`_GvɺS vZni]ɜyq T@7| aC3X2RbDS҈hVe+W0u{qZG5[^)Hm_Ay[h%ؓQ/(RSi H,@/-d[VO3.RFmAs,<#-sEsSQo$/!6yʺ'vti]ݑ)p@ 9Iel%2܄4`Qze^x5ȹn.:n,i J/4S" ,\f(m*Zޘ,G4ڡQmu/t,+pˤX4oGD|_wp:O% ̆.6dhpk3:(=9ƣy4o._An7e lr0`/]!w y$O@6\XN䪑S9i_IOqTWoczm1:dp?GP~h *&_7-'ojG'Gf٬foG!Xb1|ӭ'owH #|w)wMj=/_SxIpa:|ў~ D_Vi1[1]䯲i)OM%L69|159%ϧ2Nͱ7"7ͬJJλ1jAnqmON `[_M 'o@,TbUoԖk@Ѧ1iTZdt_pTd1Rw5AYspGFN@ o~p~`/gII41VJbgJ^'b9VrL/wNQS6\Bn?Ƈ/c{"RS'7t'Eޟsފ8mCpEDhB"%qޙcÜ 1;5!-Np1h:cX=bK>t~1OmHtM 1''W#A8پҰjhuO0.5k7װV^~=~ +U uBC{ys  cC^ \ajE^,5k7װ"jzb;PݯLV4wbUP5kW4e &|ܯ-+~6n@S]i.G8LηxdVу endstream endobj 57 0 obj << /Type /XRef /Index [0 58] /Size 58 /W [1 3 1] /Root 55 0 R /Info 56 0 R /ID [ ] /Length 190 /Filter /FlateDecode >> stream x;jBas>Qs&z}o  RDnc!mX[[ظ 7JL? P@USD.ECtċh-&v vmQIi 2"+rQD&<1YlȪH\V%ufU&' yzsK*x iv̿=y9|{~6ѣq[ endstream endobj startxref 162973 %%EOF amap/inst/doc/amap.Rnw0000644000176200001440000000342210677417044014333 0ustar liggesusers% building this document: (in R) Sweave ("ctc.Rnw") \documentclass[a4paper]{article} \title{Amap Package} \author{Antoine Lucas} %\VignetteIndexEntry{Introduction to amap} %\VignettePackage{amap} \SweaveOpts{echo=FALSE} %\usepackage{a4wide} \begin{document} \maketitle \tableofcontents \section{Overview} {\tt Amap} package includes standard hierarchical clustering and k-means. We optimize implementation (with a parallelized hierarchical clustering) and allow the possibility of using different distances like Eulidean or Spearman (rank-based metric). We implement a principal component analysis (with robusts methods). \section{Usage} \subsection{Clustering} The standard way of building a hierarchical clustering: <>= library(amap) data(USArrests) h = hcluster(USArrests) plot(h) @ Or for the ``heatmap'': <>= heatmap(as.matrix(USArrests), hclustfun=hcluster, distfun=function(u){u}) @ On a multiprocessor computer: <>= h = hcluster(USArrests,nbproc=4) @ The K-means clustering: <>= Kmeans(USArrests,centers=3,method="correlation") @ \subsection{Robust tools} A robust variance computation: <>= data(lubisch) lubisch <- lubisch[,-c(1,8)] varrob(scale(lubisch),h=1) @ A robust principal component analysis: <>= p <- acpgen(lubisch,h1=1,h2=1/sqrt(2)) plot(p) @ Another robust pca: <>= p <- acprob(lubisch,h=4) plot(p) @ \section{See Also} Theses examples can be tested with command {\tt demo(amap)}.\\ \noindent All functions has got man pages, try {\tt help.start()}.\\ \noindent Robust tools has been published: \cite{caussinu+ruiz} and \cite{caussinu+ruiz2}. \bibliographystyle{plain} \bibliography{amap} \end{document} amap/build/0000755000176200001440000000000014705252655012302 5ustar liggesusersamap/build/vignette.rds0000644000176200001440000000031314705252655014636 0ustar liggesusersb```b`a@&0`b fd`azAyh"y%E)%y % uXAZ+  *ּb4.y) 3Gwwjey~L6̜T!%ps QY_/( @hrNb1GRKҊAxiamap/man/0000755000176200001440000000000014704670715011756 5ustar liggesusersamap/man/burt.Rd0000644000176200001440000000256013275363670013225 0ustar liggesusers\name{burt} \alias{burt} \alias{matlogic} \title{Compute burt table from a factor dataframe.} \usage{ burt(x) matlogic(x) } \arguments{ \item{x}{A dataframe that contents only factors} } \description{ %matlogic: tableau disjonctif complet %burt matlogic returns for all variables a matrix of logical values for each levels. burt is defined as t(matlogic).matlogic } \examples{ color <- as.factor(c('blue','red','red','blue','red')) size <- as.factor(c('large','large','small','medium','large')) x <- data.frame(color,size) matlogic(x) ## color.blue color.red size.large size.medium size.small ##1 1 0 1 0 0 ##2 0 1 1 0 0 ##3 0 1 0 0 1 ##4 1 0 0 1 0 ##5 0 1 1 0 0 burt(x) ## color.blue color.red size.large size.medium size.small ## color.blue 2 0 1 1 0 ## color.red 0 3 2 0 1 ## size.large 1 2 3 0 0 ## size.medium 1 0 0 1 0 ## size.small 0 1 0 0 1 } \keyword{multivariate} \author{Antoine Lucas} amap/man/afc.Rd0000644000176200001440000000113113275363532012770 0ustar liggesusers\name{afc} \alias{afc} \title{Correspondance factorial analysis.} \usage{ afc(x) } \arguments{ \item{x}{A contingency table, or a result of function \code{burt} or \code{matlogic}} } \description{ Compute an acp on a contingency table tacking into account weight of rows and columns } \examples{ \dontrun{ color <- as.factor(c('blue','red','red','blue','red')) size <- as.factor(c('large','large','small','medium','large')) x <- data.frame(color,size) afc.1 <- afc(burt(x)) afc.2 <- afc(matlogic(x)) plotAll(afc.1) plotAll(afc.2) } } \keyword{multivariate} \author{Antoine Lucas} amap/man/acp.Rd0000644000176200001440000000304213275363515013006 0ustar liggesusers\name{acp} \alias{acp} \alias{pca} \alias{print.acp} \title{Principal component analysis} \description{Principal component analysis} \usage{ acp(x,center=TRUE,reduce=TRUE,wI=rep(1,nrow(x)),wV=rep(1,ncol(x))) pca(x,center=TRUE,reduce=TRUE,wI=rep(1,nrow(x)),wV=rep(1,ncol(x))) \S3method{print}{acp}(x, ...) } \arguments{ \item{x}{Matrix / data frame} \item{center}{a logical value indicating whether we center data} \item{reduce}{a logical value indicating whether we "reduce" data i.e. divide each column by standard deviation} \item{wI,wV}{weigth vector for individuals / variables} \item{\dots}{arguments to be passed to or from other methods.} } \value{ An object of class \bold{acp} The object is a list with components: \item{sdev}{the standard deviations of the principal components.} \item{loadings}{the matrix of variable loadings (i.e., a matrix whose columns contain the eigenvectors). This is of class \code{"loadings"}: see \code{\link[stats]{loadings}} for its \code{print} method.} \item{scores}{if \code{scores = TRUE}, the scores of the supplied data on the principal components.} \item{eig}{Eigen values} } \details{ This function offer a variant of \code{\link[stats]{princomp}} and \code{\link[stats]{prcomp}} functions, with a slightly different graphic representation (see \code{\link{plot.acp}}). } \examples{ data(lubisch) lubisch <- lubisch[,-c(1,8)] p <- acp(lubisch) plot(p) } \keyword{multivariate} \author{Antoine Lucas} \seealso{\link{plot.acp},\link{acpgen}, \link[stats]{princomp} } amap/man/hcluster.Rd0000644000176200001440000001363414704670715014105 0ustar liggesusers\name{hcluster} \title{Hierarchical Clustering} \alias{hcluster} \alias{hclusterpar} \description{ Hierarchical cluster analysis. } \usage{ hcluster(x, method = "euclidean", diag = FALSE, upper = FALSE, link = "complete", members = NULL, nbproc = 2, doubleprecision = TRUE) } \arguments{ \item{x}{ A numeric matrix of data, or an object that can be coerced to such a matrix (such as a numeric vector or a data frame with all numeric columns). Or an object of class "exprSet". } \item{method}{the distance measure to be used. This must be one of \code{"euclidean"}, \code{"maximum"}, \code{"manhattan"}, \code{"canberra"}, \code{"binary"}, \code{"pearson"}, \code{"abspearson"}, \code{"correlation"}, \code{"abscorrelation"}, \code{"spearman"} or \code{"kendall"}. Any unambiguous substring can be given.} \item{diag}{logical value indicating whether the diagonal of the distance matrix should be printed by \code{print.dist}.} \item{upper}{logical value indicating whether the upper triangle of the distance matrix should be printed by \code{print.dist}.} \item{link}{the agglomeration method to be used. This should be (an unambiguous abbreviation of) one of \code{"ward"}, \code{"single"}, \code{"complete"}, \code{"average"}, \code{"mcquitty"}, \code{"median"} or \code{"centroid"},\code{"centroid2"}, \code{"ward.d2"}.} \item{members}{\code{NULL} or a vector with length size of \code{d}.} \item{nbproc}{integer, number of subprocess for parallelization [Linux & Mac only]} \item{doubleprecision}{True: use of double precision for distance matrix computation; False: use simple precision} } \value{ An object of class \bold{hclust} which describes the tree produced by the clustering process. The object is a list with components: \item{merge}{an \eqn{n-1} by 2 matrix. Row \eqn{i} of \code{merge} describes the merging of clusters at step \eqn{i} of the clustering. If an element \eqn{j} in the row is negative, then observation \eqn{-j} was merged at this stage. If \eqn{j} is positive then the merge was with the cluster formed at the (earlier) stage \eqn{j} of the algorithm. Thus negative entries in \code{merge} indicate agglomerations of singletons, and positive entries indicate agglomerations of non-singletons.} \item{height}{a set of \eqn{n-1} non-decreasing real values. The clustering \emph{height}: that is, the value of the criterion associated with the clustering \code{method} for the particular agglomeration.} \item{order}{a vector giving the permutation of the original observations suitable for plotting, in the sense that a cluster plot using this ordering and matrix \code{merge} will not have crossings of the branches.} \item{labels}{labels for each of the objects being clustered.} \item{call}{the call which produced the result.} \item{method}{the cluster method that has been used.} \item{dist.method}{the distance that has been used to create \code{d} (only returned if the distance object has a \code{"method"} attribute).} There is a \code{\link{print}} and a \code{\link{plot}} method for \code{hclust} objects. The \code{plclust()} function is basically the same as the plot method, \code{plot.hclust}, primarily for back compatibility with S-plus. Its extra arguments are not yet implemented. } \details{ This function is a mix of function \code{hclust} and function \code{dist}. \code{hcluster(x, method = "euclidean",link = "complete") = hclust(dist(x, method = "euclidean"),method = "complete"))} It use twice less memory, as it doesn't store distance matrix. For more details, see documentation of \code{hclust} and \code{Dist}. } \note{Multi-thread (parallelisation) is disable on Windows.} \author{ The \code{hcluster} function is based on C code adapted from Cran Fortran routine by Antoine Lucas. } \seealso{ \code{\link{Dist}}, \code{\link[stats]{hclust}}, \code{\link[stats]{kmeans}}. } \references{ Antoine Lucas and Sylvain Jasson, \emph{Using amap and ctc Packages for Huge Clustering}, R News, 2006, vol 6, issue 5 pages 58-60. } \examples{ data(USArrests) hc <- hcluster(USArrests,link = "ave") plot(hc) plot(hc, hang = -1) ## Do the same with centroid clustering and squared Euclidean distance, ## cut the tree into ten clusters and reconstruct the upper part of the ## tree from the cluster centers. hc <- hclust(dist(USArrests)^2, "cen") memb <- cutree(hc, k = 10) cent <- NULL for(k in 1:10){ cent <- rbind(cent, colMeans(USArrests[memb == k, , drop = FALSE])) } hc1 <- hclust(dist(cent)^2, method = "cen", members = table(memb)) opar <- par(mfrow = c(1, 2)) plot(hc, labels = FALSE, hang = -1, main = "Original Tree") plot(hc1, labels = FALSE, hang = -1, main = "Re-start from 10 clusters") par(opar) ## other combinaison are possible hc <- hcluster(USArrests,method = "euc",link = "ward", nbproc= 1, doubleprecision = TRUE) hc <- hcluster(USArrests,method = "max",link = "single", nbproc= 2, doubleprecision = TRUE) hc <- hcluster(USArrests,method = "man",link = "complete", nbproc= 1, doubleprecision = TRUE) hc <- hcluster(USArrests,method = "can",link = "average", nbproc= 2, doubleprecision = TRUE) hc <- hcluster(USArrests,method = "bin",link = "mcquitty", nbproc= 1, doubleprecision = FALSE) hc <- hcluster(USArrests,method = "pea",link = "median", nbproc= 2, doubleprecision = FALSE) hc <- hcluster(USArrests,method = "abspea",link = "median", nbproc= 2, doubleprecision = FALSE) hc <- hcluster(USArrests,method = "cor",link = "centroid", nbproc= 1, doubleprecision = FALSE) hc <- hcluster(USArrests,method = "abscor",link = "centroid", nbproc= 1, doubleprecision = FALSE) hc <- hcluster(USArrests,method = "spe",link = "complete", nbproc= 2, doubleprecision = FALSE) hc <- hcluster(USArrests,method = "ken",link = "complete", nbproc= 2, doubleprecision = FALSE) } \keyword{multivariate} \keyword{cluster} amap/man/pop.Rd0000644000176200001440000000367413275363631013053 0ustar liggesusers\name{pop} \alias{pop} \title{Optimal Partition (classification).} \usage{ pop(x,fmbvr=TRUE,triabs=TRUE,allsol=TRUE) } \arguments{ \item{x}{A dissimilarity matrix} \item{fmbvr}{Logical, TRUE: look for the exact solution} \item{triabs}{Logical, TRUE: try to init with absolute values} \item{allsol}{Logical, TRUE all solutions, FALSE only one solution} } \description{ Classification: Computing an Optimal Partition from Weighted Categorical Variables or from an Array of Signed Similarities. } \references{ Theory is explained at \url{http://petitjeanmichel.free.fr/itoweb.petitjean.class.html} Marcotorchino F. \emph{Agr\'egation des similarit\'es en classification automatique.} Th\'ese de Doctorat d'Etat en Math\'ematiques, Universit\'e Paris VI, 25 June 1981. Petitjean M. \emph{Agr\'egation des similarit\'es: une solution oubli\'ee.} RAIRO Oper. Res. 2002,36[1],101-108. } \examples{ ## pop from a data matrix data <- matrix(c(1,1,1,1,1 ,1,2,1,2,1 ,2,3,2,3,2 ,2,4,3,3,2 ,1,2,4,2,1 ,2,3,2,3,1),ncol=5,byrow=TRUE) pop(diss(data)) ## pop from a dissimilarity matrix d <-2 * matrix(c(9, 8, 5, 7, 7, 2 , 8, 9, 2, 5, 1, 7 , 5, 2, 9, 8, 7, 1 , 7, 5, 8, 9, 3, 2 , 7, 1, 7, 3, 9, 6 , 2, 7, 1, 2, 6, 9),ncol=6,byrow=TRUE) - 9 pop(d) \dontrun{ d <- 2 * matrix(c(57, 15, 11, 32, 1, 34, 4, 6, 17, 7 , 15, 57, 27, 35, 27, 27, 20, 24, 30, 15 , 11, 27, 57, 25, 25, 20, 34, 25, 17, 15 , 32, 35, 25, 57, 22, 44, 13, 22, 30, 11 , 1, 27, 25, 22, 57, 21, 28, 43, 20, 13 , 34, 27, 20, 44, 21, 57, 18, 27, 21, 8 , 4, 20, 34, 13, 28, 18, 57, 31, 28, 13 , 6, 24, 25, 22, 43, 27, 31, 57, 30, 15 , 17, 30, 17, 30, 20, 21, 28, 30, 57, 12 , 7, 15, 15, 11, 13, 8, 13, 15, 12, 57),ncol=10,byrow=TRUE) - 57 pop(d) } } \keyword{multivariate} \author{Michel Petitjean, \url{http://petitjeanmichel.free.fr/itoweb.petitjean.class.html} R port by Antoine Lucas, } amap/man/acpgen.Rd0000644000176200001440000000762313275363445013513 0ustar liggesusers\name{acpgen} \alias{acpgen} \alias{K} \alias{W} \title{Generalised principal component analysis} \description{Generalised principal component analysis} \usage{ acpgen(x,h1,h2,center=TRUE,reduce=TRUE,kernel="gaussien") K(u,kernel="gaussien") W(x,h,D=NULL,kernel="gaussien") } \arguments{ \item{x}{Matrix or data frame} \item{h}{Scalar: bandwidth of the Kernel} \item{h1}{Scalar: bandwidth of the Kernel for W} \item{h2}{Scalar: bandwidth of the Kernel for U} \item{kernel}{The kernel used. This must be one of '"gaussien"', '"quartic"', '"triweight"', '"epanechikov"' , '"cosinus"' or '"uniform"' } \item{center}{A logical value indicating whether we center data} \item{reduce}{A logical value indicating whether we "reduce" data i.e. divide each column by standard deviation} \item{D}{A product scalar matrix / une matrice de produit scalaire} \item{u}{Vector} } \details{ \code{acpgen} compute generalised pca. i.e. spectral analysis of \eqn{U_n . W_n^{-1}}{Un / Wn}, and project \eqn{X_i}{Xi} with \eqn{W_n^{-1}}{1/Wn} on the principal vector sub-spaces. \eqn{X_i}{Xi} a column vector of \eqn{p} variables of individu \eqn{i} (input data) \code{W} compute estimation of noise in the variance. \deqn{W_n=\frac{\sum_{i=1}^{n-1}\sum_{j=i+1}^{n}K(||X_i-X_j||_{V_n^{-1}}/h)(X_i-X_j)(X_i-X_j)'}{\sum_{i=1}^{n-1}\sum_{j=i+1}^{n}K(||X_i-X_j||_{V_n^{-1}}/h)}}{W: see latex doc} with \eqn{V_n}{Vn} variance estimation; \code{U} compute robust variance. \eqn{U_n^{-1} = S_n^{-1} - 1/h V_n^{-1}}{1/Un = 1/Sn - 1 / (h Vn)} \deqn{S_n=\frac{\sum_{i=1}^{n}K(||X_i||_{V_n^{-1}}/h)(X_i-\mu_n)(X_i-\mu_n)'}{\sum_{i=1}^nK(||X_i||_{V_n^{-1}}/h)}}{S: see latex doc} with \eqn{\mu_n} estimator of the mean. %\eqn{\delta=K(|| X_i-X_j||)} with K: a kernel. \code{K} compute kernel, i.e. gaussien: \deqn{\frac{1}{\sqrt{2\pi}} e^{-u^2/2}}{ 1/sqrt(2pi) exp(-u^2/2)} quartic: \deqn{\frac{15}{16}(1-u^2)^2 I_{|u|\leq 1} }{ 15/16 (1-u^2)^2 if |u| < 1} triweight: \deqn{\frac{35}{32}(1-u^2)^3 I_{|u|\leq 1} }{ 35/32 (1-u^2)^3 if |u| < 1} epanechikov: \deqn{\frac{3}{4}(1-u^2) I_{|u|\leq 1} }{ 3/4 (1-u^2) if |u| < 1} cosinus: \deqn{\frac{\pi}{4}\cos(\frac{\pi}{2}u) I_{|u|\leq 1} }{ pi/4 cos (u * pi/2) if |u| < 1} } \value{ An object of class \bold{acp} The object is a list with components: \item{sdev}{the standard deviations of the principal components.} \item{loadings}{the matrix of variable loadings (i.e., a matrix whose columns contain the eigenvectors). This is of class \code{"loadings"}: see \code{\link[stats]{loadings}} for its \code{print} method.} \item{scores}{if \code{scores = TRUE}, the scores of the supplied data on the principal components.} \item{eig}{Eigen values} } \examples{ data(lubisch) lubisch <- lubisch[,-c(1,8)] p <- acpgen(lubisch,h1=1,h2=1/sqrt(2)) plot(p,main='ACP robuste des individus') # See difference with acp p <- princomp(lubisch) class(p)<- "acp" } \references{ H. Caussinus, M. Fekri, S. Hakam and A. Ruiz-Gazen, \emph{A monitoring display of multivariate outliers} Computational Statistics & Data Analysis, Volume 44, Issues 1-2, 28 October 2003, Pages 237-252 Caussinus, H and Ruiz-Gazen, A. (1993): \emph{Projection Pursuit and Generalized Principal Component Analyses, in New Directions in Statistical Data Analysis and Robustness} (eds. Morgenthaler et al.), pp. 35-46. Birk\"auser Verlag Basel. Caussinus, H. and Ruiz-Gazen, A. (1995). \emph{Metrics for Finding Typical Structures by Means of Principal Component Analysis. In Data Science and its Applications} (eds Y. Escoufier and C. Hayashi), pp. 177-192. Tokyo: Academic Press. Antoine Lucas and Sylvain Jasson, \emph{Using amap and ctc Packages for Huge Clustering}, R News, 2006, vol 6, issue 5 pages 58-60. } \keyword{multivariate} \author{Antoine Lucas} \seealso{\link{acp} \link{acprob} \link[stats]{princomp} } amap/man/lubisch.Rd0000644000176200001440000000027710227236165013675 0ustar liggesusers\name{lubisch} \alias{lubisch} \title{Dataset Lubischew} \usage{ data(lubisch) } \description{Lubischew data (1962): 74 insects, 6 morphologic size. 3 supposed classes} \keyword{datasets} amap/man/Kmeans.Rd0000644000176200001440000000462111654604066013463 0ustar liggesusers\name{Kmeans} \alias{Kmeans} \title{ K-Means Clustering } \description{ Perform k-means clustering on a data matrix. } \usage{ Kmeans(x, centers, iter.max = 10, nstart = 1, method = "euclidean") } \arguments{ \item{x}{ A numeric matrix of data, or an object that can be coerced to such a matrix (such as a numeric vector or a data frame with all numeric columns). Or an object of class "exprSet". } \item{centers}{ Either the number of clusters or a set of initial cluster centers. If the first, a random set of rows in \code{x} are chosen as the initial centers. } \item{iter.max}{ The maximum number of iterations allowed. } \item{nstart}{If \code{centers} is a number, how many random sets should be chosen?} \item{method}{the distance measure to be used. This must be one of \code{"euclidean"}, \code{"maximum"}, \code{"manhattan"}, \code{"canberra"}, \code{"binary"}, \code{"pearson"} , \code{"abspearson"} , \code{"abscorrelation"}, \code{"correlation"}, \code{"spearman"} or \code{"kendall"}. Any unambiguous substring can be given.} } \details{ The data given by \code{x} is clustered by the k-means algorithm. When this terminates, all cluster centres are at the mean of their Voronoi sets (the set of data points which are nearest to the cluster centre). The algorithm of Lloyd--Forgy is used; method="euclidean" should return same result as with function \link[stats]{kmeans}. } \note{ An objective: to allow NA values. } \value{ A list with components: \item{cluster}{ A vector of integers indicating the cluster to which each point is allocated. } \item{centers}{A matrix of cluster centres.} \item{withinss}{The within-cluster sum of square distances for each cluster.} \item{size}{The number of points in each cluster.} } \seealso{ \code{\link{hcluster}},\code{\link[stats]{kmeans}}. } \examples{ ## a 2-dimensional example x <- rbind(matrix(rnorm(100, sd = 0.3), ncol = 2), matrix(rnorm(100, mean = 1, sd = 0.3), ncol = 2)) colnames(x) <- c("x", "y") (cl <- Kmeans(x, 2)) plot(x, col = cl$cluster) points(cl$centers, col = 1:2, pch = 8, cex=2) ## random starts do help here with too many clusters (cl <- Kmeans(x, 5, nstart = 25)) plot(x, col = cl$cluster) points(cl$centers, col = 1:5, pch = 8) Kmeans(x, 5,nstart = 25, method="abscorrelation") } \keyword{multivariate} \keyword{cluster} amap/man/varrob.Rd0000644000176200001440000000211714326040242013523 0ustar liggesusers\name{VarRob} \alias{varrob} \title{Robust variance} \description{Compute a robust variance} \usage{ varrob(x,h,D=NULL,kernel="gaussien") } \arguments{ \item{x}{Matrix / data frame} \item{h}{Scalar: bandwidth of the Kernel} \item{kernel}{The kernel used. This must be one of '"gaussien"', '"quartic"', '"triweight"', '"epanechikov"' , '"cosinus"' or '"uniform"' } \item{D}{A product scalar matrix / une matrice de produit scalaire} } \details{ \code{U} compute robust variance. \eqn{U_n^{-1} = S_n^{-1} - 1/h V_n^{-1}}{1/Un = 1/Sn - 1 / (h Vn)} \deqn{S_n=\frac{\sum_{i=1}^{n}K(||X_i||_{V_n^{-1}}/h)(X_i-\mu_n)(X_i-\mu_n)'}{\sum_{i=1}^nK(||X_i||_{V_n^{-1}}/h)}}{S: see latex doc} with \eqn{\mu_n} estimator of the mean. %\eqn{\delta=K(|| X_i-X_j||)} with K: a kernel. \code{K} compute a kernel. } \value{ A matrix } \references{ H. Caussinus, S. Hakam, A. Ruiz-Gazen Projections revelatrices controlees: groupements et structures diverses. 2002, to appear in Rev. Statist. Appli. } \keyword{multivariate} \author{Antoine Lucas} \seealso{\link{acp} \link[stats]{princomp} } amap/man/acprob.Rd0000644000176200001440000000426513275363475013526 0ustar liggesusers\name{acprob} \alias{acprob} \title{Robust principal component analysis} \description{Robust principal component analysis} \usage{ acprob(x,h,center=TRUE,reduce=TRUE,kernel="gaussien") } \arguments{ \item{x}{Matrix / data frame} \item{h}{Scalar: bandwidth of the Kernel} \item{kernel}{The kernel used. This must be one of '"gaussien"', '"quartic"', '"triweight"', '"epanechikov"' , '"cosinus"' or '"uniform"' } \item{center}{A logical value indicating whether we center data} \item{reduce}{A logical value indicating whether we "reduce" data i.e. divide each column by standard deviation} } \details{ \code{acpgen} compute robust pca. i.e. spectral analysis of a robust variance instead of usual variance. Robust variance: see \code{\link{varrob}} } \value{ An object of class \bold{acp} The object is a list with components: \item{sdev}{the standard deviations of the principal components.} \item{loadings}{the matrix of variable loadings (i.e., a matrix whose columns contain the eigenvectors). This is of class \code{"loadings"}: see \code{\link[stats]{loadings}} for its \code{print} method.} \item{scores}{if \code{scores = TRUE}, the scores of the supplied data on the principal components.} \item{eig}{Eigen values} } \keyword{multivariate} \references{ H. Caussinus, M. Fekri, S. Hakam and A. Ruiz-Gazen, \emph{A monitoring display of multivariate outliers} Computational Statistics & Data Analysis, Volume 44, Issues 1-2, 28 October 2003, Pages 237-252 Caussinus, H and Ruiz-Gazen, A. (1993): \emph{Projection Pursuit and Generalized Principal Component Analyses, in New Directions in Statistical Data Analysis and Robustness} (eds. Morgenthaler et al.), pp. 35-46. Birk\"auser Verlag Basel. Caussinus, H. and Ruiz-Gazen, A. (1995). \emph{Metrics for Finding Typical Structures by Means of Principal Component Analysis. In Data Science and its Applications} (eds Y. Escoufier and C. Hayashi), pp. 177-192. Tokyo: Academic Press. Antoine Lucas and Sylvain Jasson, \emph{Using amap and ctc Packages for Huge Clustering}, R News, 2006, vol 6, issue 5 pages 58-60. } \author{Antoine Lucas} \seealso{\link[stats]{princomp} \link{acpgen}} amap/man/dist.Rd0000644000176200001440000001324414326050134013177 0ustar liggesusers\name{Dist} \title{Distance Matrix Computation} \usage{ Dist(x, method = "euclidean", nbproc = 2, diag = FALSE, upper = FALSE) } \alias{Dist} \arguments{ \item{x}{numeric matrix or (data frame) or an object of class "exprSet". Distances between the rows of \code{x} will be computed.} \item{method}{the distance measure to be used. This must be one of \code{"euclidean"}, \code{"maximum"}, \code{"manhattan"}, \code{"canberra"}, \code{"binary"}, \code{"pearson"}, \code{"abspearson"}, \code{"correlation"}, \code{"abscorrelation"}, \code{"spearman"} or \code{"kendall"}. Any unambiguous substring can be given.} \item{nbproc}{integer, Number of subprocess for parallelization} \item{diag}{logical value indicating whether the diagonal of the distance matrix should be printed by \code{print.dist}.} \item{upper}{logical value indicating whether the upper triangle of the distance matrix should be printed by \code{print.dist}.} } \description{ This function computes and returns the distance matrix computed by using the specified distance measure to compute the distances between the rows of a data matrix. } \details{ Available distance measures are (written for two vectors \eqn{x} and \eqn{y}): \describe{ \item{\code{euclidean}:}{Usual square distance between the two vectors (2 norm).} \item{\code{maximum}:}{Maximum distance between two components of \eqn{x} and \eqn{y} (supremum norm)} \item{\code{manhattan}:}{Absolute distance between the two vectors (1 norm).} \item{\code{canberra}:}{\eqn{\sum_i |x_i - y_i| / |x_i + y_i|}{% sum(|x_i - y_i| / |x_i + y_i|)}. Terms with zero numerator and denominator are omitted from the sum and treated as if the values were missing. } \item{\code{binary}:}{(aka \emph{asymmetric binary}): The vectors are regarded as binary bits, so non-zero elements are `on' and zero elements are `off'. The distance is the \emph{proportion} of bits in which only one is on amongst those in which at least one is on.} \item{\code{pearson}:}{Also named "not centered Pearson" \eqn{1 - \frac{\sum_i x_i y_i}{\sqrt{\sum_i x_i^2 % \sum_i y_i^2}}}{% 1 - sum(x_i y_i) / sqrt [sum(x_i^2) sum(y_i^2)]}. } \item{\code{abspearson}:}{Absolute Pearson \eqn{1 - \left| \frac{\sum_i x_i y_i}{\sqrt{\sum_i x_i^2 % \sum_i y_i^2}} \right| }{% 1 - |sum(x_i y_i) / sqrt [sum(x_i^2) sum(y_i^2)] |}. } \item{\code{correlation}:}{Also named "Centered Pearson" \eqn{1 - corr(x,y)}. } \item{\code{abscorrelation}:}{Absolute correlation \eqn{1 - | corr(x,y) |} with \eqn{ corr(x,y) = \frac{\sum_i x_i y_i -\frac1n \sum_i x_i \sum_i% y_i}{% frac: 2nd part \sqrt{\left(\sum_i x_i^2 -\frac1n \left( \sum_i x_i \right)^2 % \right)% \left( \sum_i y_i^2 -\frac1n \left( \sum_i y_i \right)^2 % \right)} }}. } \item{\code{spearman}:}{Compute a distance based on rank. \eqn{\sum(d_i^2)}{sum (d_i^2)} where \eqn{d_i} is the difference in rank between \eqn{x_i} and \eqn{y_i}. \code{Dist(x,method="spearman")[i,j] =} \code{cor.test(x[i,],x[j,],method="spearman")$statistic} } \item{\code{kendall}:}{Compute a distance based on rank. \eqn{\sum_{i,j} K_{i,j}(x,y)} with \eqn{K_{i,j}(x,y)} is 0 if \eqn{x_i, x_j} in same order as \eqn{y_i,y_j}, 1 if not. } } Missing values are allowed, and are excluded from all computations involving the rows within which they occur. If some columns are excluded in calculating a Euclidean, Manhattan or Canberra distance, the sum is scaled up proportionally to the number of columns used. If all pairs are excluded when calculating a particular distance, the value is \code{NA}. The functions \code{as.matrix.dist()} and \code{as.dist()} can be used for conversion between objects of class \code{"dist"} and conventional distance matrices and vice versa. } \value{ An object of class \code{"dist"}. The lower triangle of the distance matrix stored by columns in a vector, say \code{do}. If \code{n} is the number of observations, i.e., \code{n <- attr(do, "Size")}, then for \eqn{i < j <= n}, the dissimilarity between (row) i and j is \code{do[n*(i-1) - i*(i-1)/2 + j-i]}. The length of the vector is \eqn{n*(n-1)/2}, i.e., of order \eqn{n^2}. The object has the following attributes (besides \code{"class"} equal to \code{"dist"}): \item{Size}{integer, the number of observations in the dataset.} \item{Labels}{optionally, contains the labels, if any, of the observations of the dataset.} \item{Diag, Upper}{logicals corresponding to the arguments \code{diag} and \code{upper} above, specifying how the object should be printed.} \item{call}{optionally, the \code{\link{call}} used to create the object.} \item{methods}{optionally, the distance method used; resulting form \code{\link{dist}()}, the (\code{\link{match.arg}()}ed) \code{method} argument.} } \references{ Mardia, K. V., Kent, J. T. and Bibby, J. M. (1979) \emph{Multivariate Analysis.} London: Academic Press. Wikipedia \url{https://en.wikipedia.org/wiki/Kendall_tau_distance} } \note{Multi-thread (parallelisation) is disable on Windows.} \seealso{ \code{\link[cluster]{daisy}} in the \file{cluster} package with more possibilities in the case of \emph{mixed} (contiuous / categorical) variables. \code{\link[stats]{dist}} \code{\link{hcluster}}. } \examples{ x <- matrix(rnorm(100), nrow=5) Dist(x) Dist(x, diag = TRUE) Dist(x, upper = TRUE) ## compute dist with 8 threads Dist(x,nbproc=8) Dist(x,method="abscorrelation") Dist(x,method="kendall") } \keyword{multivariate} \keyword{cluster} amap/man/diss.Rd0000644000176200001440000000232310677417044013206 0ustar liggesusers\name{diss} \alias{diss} \title{Compute a dissimilarity matrix} \description{ Compute a dissimilarity matrix from a data set (containing only factors). } \usage{ diss(x, w=rep(1,ncol(x)) ) } \arguments{ \item{x}{A matrix or data frame containing only factors.} \item{w}{A vector of weight, by default each variable has got same weight} } \details{ %% Dissimilarity between individual i and j are number of common factors %% substracted to number of different factors. Case of N individuals described by P categorical variables: each element (i,j) of the signed similarities array is computed by sommation over the P variables of the contributions of each variable, multiplied by the weight of the variable. The contribution of a given categorical variable is +1 if the individual i and j are in the same class, and is -1 if they are not. } \value{ A dissimilarity matrix. } \author{Antoine Lucas} \seealso{ \code{\link{Dist}}, \code{\link{pop}} } \examples{ data <- matrix(c(1,1,1,1,1 ,1,2,1,2,1 ,2,3,2,3,2 ,2,4,3,3,2 ,1,2,4,2,1 ,2,3,2,3,1),ncol=5,byrow=TRUE) diss(data) ## With weights diss(data,w=c(1,1,2,2,3)) } \keyword{multivariate} \keyword{cluster} amap/man/plot.acp.Rd0000644000176200001440000000316213275363616013770 0ustar liggesusers\name{plot} \alias{plot.acp} \alias{biplot.acp} \alias{plot2} \alias{plotAll} \title{Graphics for Principal component Analysis} \description{Graphics for Principal component Analysis} \usage{ \S3method{plot}{acp}(x,i=1,j=2,text=TRUE,label='Composants',col='darkblue', main='Individuals PCA',variables=TRUE,individual.label=NULL,...) \S3method{biplot}{acp}(x,i=1,j=2,label='Composants',col='darkblue',length=0.1, main='Variables PCA',circle=TRUE,...) plot2(x,pourcent=FALSE,eigen=TRUE,label='Comp.',col='lightgrey', main='Scree Graph',ylab='Eigen Values') plotAll(x) } \arguments{ \item{x}{Result of acp or princomp} \item{i}{X axis} \item{j}{Y axis} \item{text}{a logical value indicating whether we use text or points for plot} \item{pourcent}{a logical value indicating whether we use pourcentage of values} \item{eigen}{a logical value indicating whether we use eigen values or standard deviation} \item{label}{label for X and Y axis} \item{individual.label}{labels naming individuals} \item{col}{Color of plot} \item{main}{Title of graphic} \item{ylab}{Y label} \item{length}{length of arrows} \item{variables,circle}{a logical value indicating whether we display circle or variables} \item{\dots}{cex, pch, and other options; see points.} } \value{ Graphics: \code{plot.acp} PCA for lines (individuals) \code{plot.acp} PCA for columns (variables) \code{plot2} Eigen values diagram (Scree Graph) \code{plotAll} Plot both 3 graphs } \examples{ data(lubisch) lubisch <- lubisch[,-c(1,8)] p <- acp(lubisch) plotAll(p) } \keyword{multivariate} \author{Antoine Lucas} \seealso{\link{acpgen},\link{acprob}, \link[stats]{princomp}} amap/DESCRIPTION0000644000176200001440000000121214705445647012712 0ustar liggesusersPackage: amap Version: 0.8-20 Date: 2024-10-19 Title: Another Multidimensional Analysis Package Authors@R: person(given = "Antoine", family = "Lucas", role = c("aut", "cre"), email = "antoinelucas@gmail.com") Depends: R (>= 3.6.0) Suggests: Biobase Description: Tools for Clustering and Principal Component Analysis (With robust methods, and parallelized functions). License: GPL Packaged: 2024-10-20 19:10:38 UTC; antoine Repository: CRAN Date/Publication: 2024-10-21 12:40:07 UTC NeedsCompilation: yes Author: Antoine Lucas [aut, cre] Maintainer: Antoine Lucas