tm/0000755000175100001440000000000014755316632010724 5ustar hornikuserstm/tests/0000755000175100001440000000000013065660374012064 5ustar hornikuserstm/tests/testthat/0000755000175100001440000000000014755316631013725 5ustar hornikuserstm/tests/testthat/test-TermDocumentMatrix.R0000644000175100001440000000406113206514642020611 0ustar hornikuserscontext("Term-document matrices") test_that("construction works", { vs <- VectorSource(c("one two two three three three", "This is a short text with a few words")) scorpus <- Corpus(vs) vcorpus <- VCorpus(vs) ms <- TermDocumentMatrix(scorpus) mv <- TermDocumentMatrix(vcorpus) terms <- c("few", "one", "short", "text", "this", "three", "two", "with", "words") docs <- c("1", "2") expect_equal(sort(Terms(ms)), terms) expect_equal(sort(Terms(mv)), terms) expect_equal(Docs(ms), docs) expect_equal(Docs(mv), docs) m <- matrix(c(0, 1, 0, 0, 0, 3, 2, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1), ncol = 2, dimnames = list("Terms" = terms, "Docs" = docs)) expect_equal(as.matrix(ms[order(Terms(ms)), ]), m) expect_equal(as.matrix(mv), m) }) test_that("construction with control arguments works", { vs <- VectorSource("one two two three three three") scorpus <- Corpus(vs) vcorpus <- VCorpus(vs) docs <- "1" ctrl <- list(dictionary = c("three", "two", "zero")) ms <- TermDocumentMatrix(scorpus, ctrl) mv <- TermDocumentMatrix(vcorpus, ctrl) m <- matrix(c(3, 2, 0), dimnames = list("Terms" = ctrl$dictionary, "Docs" = docs)) expect_equal(as.matrix(ms[order(Terms(ms)), ]), m) expect_equal(as.matrix(mv), m) }) test_that("zero matrix works", { vs <- VectorSource("one two three") scorpus <- Corpus(vs) vcorpus <- VCorpus(vs) ctrl <- list(dictionary = "four", wordLengths = c(1, Inf)) ms <- TermDocumentMatrix(scorpus, ctrl) mv <- TermDocumentMatrix(vcorpus, ctrl) m <- matrix(0, dimnames = list("Terms" = ctrl$dictionary, "Docs" = "1")) expect_equal(as.matrix(ms), m) expect_equal(as.matrix(mv), m) }) test_that("empty matrix works", { docs <- "1" ds <- DataframeSource(data.frame(doc_id = docs, text = NA)) scorpus <- Corpus(ds) vcorpus <- VCorpus(ds) ms <- TermDocumentMatrix(scorpus) mv <- TermDocumentMatrix(vcorpus) m <- matrix(numeric(), dimnames = list("Terms" = character(), "Docs" = docs)) expect_equal(as.matrix(ms), m) expect_equal(as.matrix(mv), m) }) tm/tests/testthat/test-Transformation.R0000644000175100001440000000130713207271046020023 0ustar hornikuserscontext("Transformations") test_that("removePunctuation works in latin1 locale", { if (nzchar(suppressWarnings(Sys.setlocale("LC_CTYPE", "en_US.iso88591")))) { id <- c(73L, 108L, 32L, 115L, 39L, 101L, 120L, 112L, 114L, 105L, 109L, 97L, 105L, 116L, 32L, 101L, 110L, 32L, 117L, 110L, 32L, 108L, 97L, 110L, 103L, 97L, 103L, 101L, 32L, 99L, 104L, 226L, 116L, 105L, 233L) iu <- intToUtf8(id) il <- iconv(iu, from = "UTF-8", to = "latin1") td <- id[-5L] tu <- intToUtf8(td) tl <- iconv(tu, from = "UTF-8", to = "latin1") expect_equal(removePunctuation(iu), tu) expect_equal(removePunctuation(il), tl) } else skip("latin1 locale not available") }) tm/tests/testthat/test-Tokenizer.R0000644000175100001440000000061213206536446016774 0ustar hornikuserscontext("Tokenizers") test_that("scan_tokenizer works with character vectors", { tokens <- c("a", "character", "vector", "consisting", "of", "multiple", "elements") expect_equal(scan_tokenizer(c(paste0(tokens[1:3], collapse = " "), paste0(tokens[4:5], collapse = " "), paste0(tokens[6:7], collapse = " "))), tokens) }) tm/tests/testthat/test-Source.R0000644000175100001440000000117213110235234016245 0ustar hornikuserscontext("Sources") test_that("DataframeSource works", { txt <- c("First document.", "Second document.") dm1 <- 1:2 dm2 <- letters[1:2] df <- data.frame(doc_id = c("doc_1", "doc_2"), text = txt, dmeta1 = dm1, dmeta2 = dm2, stringsAsFactors = FALSE) ds <- DataframeSource(df) scorpus <- Corpus(ds) vcorpus <- VCorpus(ds) expect_equal(as.character(scorpus[[2]]), as.character(vcorpus[[2]])) expect_equal(as.character(scorpus[[2]]), txt[2]) expect_equal(meta(scorpus), meta(vcorpus)) expect_equal(meta(scorpus), data.frame(dmeta1 = dm1, dmeta2 = dm2, stringsAsFactors = FALSE)) }) tm/tests/testthat.R0000644000175100001440000000006013065660374014043 0ustar hornikuserslibrary(testthat) library(tm) test_check("tm") tm/MD50000644000175100001440000002654514755316632011250 0ustar hornikusersf471275864f1b29f750384e5665b0562 *DESCRIPTION 9eb5de67c11a7a2ace73df31bec560cc *NAMESPACE c587e5c09daeb47a355ec8510eb75341 *R/RcppExports.R 881f00e795e17803432949ff05facc96 *R/complete.R dffd17856a1f0d1ad45fa6e9cc8deaa1 *R/corpus.R c1ac8a79992c42d3ec695df39b8c3bc9 *R/doc.R beba1a821bfdf61ece1708123ab71324 *R/filter.R b205235d27368949ee5ea0dd3a10b9d7 *R/foreign.R cb5367e831c1be819b9773304985724a *R/hpc.R 32b666ea3b78f2b188cb56c7f3e26790 *R/matrix.R c36f8ed69c326c2b027a670d2662e1d1 *R/meta.R 07d1407f6cfdbdbb6060ebfb11f97f6f *R/pdftools.R b9cd19804a89de8eca51394726256e68 *R/plot.R fd701389b291a843584167ab7385c453 *R/reader.R 5f6ff8b218e7679919b85230b11cdebb *R/score.R 658b904bc1ec319e536ca3844568dabd *R/source.R dee7e0a8b245fd670436a019c54d904c *R/stopwords.R e57141f4a63f3dc13b0ef97c6960a41b *R/tokenizer.R 22ebb540c91c1a2d1494967c6c7395a5 *R/transform.R 1c59b79f99cdeb623f387ea378d0331c *R/utils.R c1de3acc3bc1bc9f64926b93c3be8301 *R/weight.R 76161b65639451c966be75488458b3c3 *build/partial.rdb 5308c469d0f1febee530edbe9c931e3e *build/vignette.rds 687f47ce82c62c4d9dbf26007cb38f0c *data/acq.rda 4127a983c6a0778646eb825d80a01f57 *data/crude.rda 1710cf3dc724c13df75da9f29169d59d *inst/CITATION 68aabe6ff1f89e59ed966800de2ae050 *inst/NEWS.Rd ad6a6fe44b80541732690af3f36a4c32 *inst/doc/extensions.R d194109d976d7f242e64a8eab85026f8 *inst/doc/extensions.Rnw 02783e40ac830733bcf24bb5ddfbbdaf *inst/doc/extensions.pdf fa0d35568c6b1bf9e923168b8118b7d5 *inst/doc/tm.R 788171d86f35f92d96b7320bc5dcad60 *inst/doc/tm.Rnw aaa1296c199cbfaa36038783aaff0e64 *inst/doc/tm.pdf 98f3b5f3d1f670032af4131a627c18d7 *inst/ghostscript/pdf_info.ps 7ec7b5de9c642afedf1159021c89f12a *inst/stopwords/SMART.dat 4c8fb2c1404c10540c267425fcc005f0 *inst/stopwords/catalan.dat 4e8d44fa90d87908846a2d92c2618b31 *inst/stopwords/danish.dat a638b876d5cbec644685d12d452a7407 *inst/stopwords/dutch.dat e181651a30ec45694b7fafc787f357dc *inst/stopwords/english.dat 1094269bf20052a5259983e23c69a552 *inst/stopwords/finnish.dat 29772f7c7dacf306981ad50c5484c4ad *inst/stopwords/french.dat 4a562db64979f200804127c3751a6efa *inst/stopwords/german.dat 1e1f45e67297e049bb22527d7efa8025 *inst/stopwords/hungarian.dat 7dfee49b4660f65f7bb935bef0c773bd *inst/stopwords/italian.dat 4cd3ddc90492cc5a3cbb9f0292d3844d *inst/stopwords/norwegian.dat d3483742365aa7d477512fd1810452c5 *inst/stopwords/portuguese.dat f6a262767ae1863b9e8cc92f78e3bb01 *inst/stopwords/romanian.dat 4bf4046fe7701b4940b8eb2c86f19c08 *inst/stopwords/russian.dat fddb7f14207d2649597b36e22b5eab18 *inst/stopwords/spanish.dat d3930c86664d4112ae772285dca85fd6 *inst/stopwords/swedish.dat 4dc7bdaa3323e71845cf4c018e871048 *inst/texts/acq/reut-00001.xml a63b803ca46191dc3a30eda875d95136 *inst/texts/acq/reut-00002.xml 7638d681bcb7d2f3539b8be8a454dff9 *inst/texts/acq/reut-00003.xml f822ea4bdb0691950284856b51c87e41 *inst/texts/acq/reut-00004.xml 1f8f1f8699bb3883748fa29807477a55 *inst/texts/acq/reut-00005.xml f44aa9f0b51556f382cf8a91d7f36244 *inst/texts/acq/reut-00006.xml e0d5ea56a8f42146f5b7d3735da730dc *inst/texts/acq/reut-00007.xml b7560c91c1f18e919d7548d9d1b59843 *inst/texts/acq/reut-00008.xml 6b2913f0f666d7f84dd38ac05b326726 *inst/texts/acq/reut-00009.xml 5625c064bfff14db909a25a6719dc3f8 *inst/texts/acq/reut-00010.xml 047f38558920a11ebaeab94727465e58 *inst/texts/acq/reut-00011.xml eb26151fa8a7fcd2c87065b0ad8f0924 *inst/texts/acq/reut-00012.xml abdbeb14424b6f5994674e604a0a5590 *inst/texts/acq/reut-00013.xml 05b945b892bbb8d575c6ff6193bb17b8 *inst/texts/acq/reut-00014.xml e5159c22413cae49c015a631df3a74e2 *inst/texts/acq/reut-00015.xml cd87fc59bfcbe37c847bd1548537effa *inst/texts/acq/reut-00016.xml 75ec08b1337a6035d553f8344ece2c2a *inst/texts/acq/reut-00017.xml 908e51c4b6f9f4e65805adef7029c884 *inst/texts/acq/reut-00018.xml e67944c5bb9ef8e0fe811b1ead21199b *inst/texts/acq/reut-00020.xml 1d19206cd4478bfc03bc9335316f6816 *inst/texts/acq/reut-00021.xml 621a7e8ba27aac9b8040adc7fc1d11f9 *inst/texts/acq/reut-00022.xml 736bff1fabc3f07b35cd992e8630ed90 *inst/texts/acq/reut-00023.xml da2ddc7ac585134cb7fe80e812d3ac80 *inst/texts/acq/reut-00024.xml a04162294ae6ae69f3d1a74f0ad0b9b1 *inst/texts/acq/reut-00025.xml 5e757cb13baa266c292da3ff010f1434 *inst/texts/acq/reut-00026.xml 7974dd802d4ca66b7f7f51c355c8e558 *inst/texts/acq/reut-00027.xml 62368bea00c9a71f01293060708fc6a4 *inst/texts/acq/reut-00028.xml 7e06015b7518b608148002364989c4f7 *inst/texts/acq/reut-00029.xml f24469e27c9f16266db0e141892e97d1 *inst/texts/acq/reut-00030.xml acc36dbfdffe0362d39975db07569b85 *inst/texts/acq/reut-00031.xml 7e342636219116a2d428e2188b1dcb0b *inst/texts/acq/reut-00032.xml c40ce905c6896410a672bee72f132b46 *inst/texts/acq/reut-00034.xml ead5a03af44fb5cf4e896f039a122e4b *inst/texts/acq/reut-00035.xml 684ddc28a9bb0fbb6f49fa412b54231d *inst/texts/acq/reut-00036.xml 1be33a6347aa406b843132da98286506 *inst/texts/acq/reut-00039.xml 1bdf38586ab43a0f6996d3135ff1f48c *inst/texts/acq/reut-00040.xml b89e5d9aeba1b0e02cf3bf3fa729e346 *inst/texts/acq/reut-00042.xml 7c3703135baad41765ad1f58fcab0ba5 *inst/texts/acq/reut-00043.xml d5ab6f6dfe5fefb25422b258bcd339d0 *inst/texts/acq/reut-00045.xml 1af51ea6ba1898d33a84b680c1fa4d09 *inst/texts/acq/reut-00046.xml cb00fc7833f2eb9e3ac97c12d900dd4f *inst/texts/acq/reut-00047.xml e5b440d419fa528d4c996cd47e88c0b4 *inst/texts/acq/reut-00048.xml 4ed77929b16a0c6f3264272183b6c951 *inst/texts/acq/reut-00049.xml 7f6df11fcb6617c253921861e217c3c6 *inst/texts/acq/reut-00050.xml ba0a88d8b9caaa0d0fa8bba01bf2a9d9 *inst/texts/acq/reut-00051.xml c8b4ee7875ddba1c1d2886c3e32a7cb6 *inst/texts/acq/reut-00052.xml b0e4f9f398ba4e2ab847e1dc44c2594e *inst/texts/acq/reut-00053.xml ea25a8bf959fe2769e578474d5f0176f *inst/texts/acq/reut-00054.xml 574a5170c695ad0bbc91055ef8fdd2e9 *inst/texts/acq/reut-00055.xml 66cf87f5587906604d96c3f64ab77a9b *inst/texts/acq/reut-00056.xml e1c26b346a6683c393b2f420593b02e5 *inst/texts/crude/reut-00001.xml 401049764894ad7b37be02cee2e926f6 *inst/texts/crude/reut-00002.xml 15a57b39a4172799d7926c440548b1fd *inst/texts/crude/reut-00004.xml 95474b7494ce4835ed952374601f921e *inst/texts/crude/reut-00005.xml e91c3ec329c1f82fc27ea79d33650d32 *inst/texts/crude/reut-00006.xml 5344713574482c3d393766422bd72498 *inst/texts/crude/reut-00007.xml 5803359fee327a77342d4d16bc467271 *inst/texts/crude/reut-00008.xml c0f88331bbf3da5ec273838ac832e7fa *inst/texts/crude/reut-00009.xml ed3994f50fa16217a6c62dfae5909a03 *inst/texts/crude/reut-00010.xml c74f1b54db67c730bcc117536903dc52 *inst/texts/crude/reut-00011.xml 32cf0da1d923fd2aee4fe28200047c3b *inst/texts/crude/reut-00012.xml 42f6d47f40304ddc482e62bf1d1c3c21 *inst/texts/crude/reut-00013.xml 51565e0b464e626cf1db1d812642e295 *inst/texts/crude/reut-00014.xml 8b107465269cd463e8d7deb470423dda *inst/texts/crude/reut-00015.xml 6b69f531b6953be522a58b0456820e04 *inst/texts/crude/reut-00016.xml 5deaf389a9067a5b6090c13195c0d254 *inst/texts/crude/reut-00018.xml 9e745c906a03765fb0b364ae78bbdcd5 *inst/texts/crude/reut-00019.xml 488f96e28466feeac3175f57724a1f8e *inst/texts/crude/reut-00021.xml da9f871a845a256e2c12ace2a2e2fb36 *inst/texts/crude/reut-00022.xml 2439e7823a1ff6403efd3108fa5ecc45 *inst/texts/crude/reut-00023.xml 7d9482d1fc4a624492dacf584a940b4c *inst/texts/custom.xml 717801d47bc20af5d69340eee342ce21 *inst/texts/loremipsum.txt e76c36aad136268277f2c036dc1c37cd *inst/texts/rcv1_2330.xml eda82aaa0c873d62be4905cb32dedb05 *inst/texts/reuters-21578.xml 5901120140c757daf5f21fba990e2bbe *inst/texts/txt/ovid_1.txt 2b5dc16305207ed29df7bbe0cc47abee *inst/texts/txt/ovid_2.txt 08197bca339b621d395220bd7ab719a7 *inst/texts/txt/ovid_3.txt 832ea34c305426cc653701df40750edf *inst/texts/txt/ovid_4.txt 3b3cb14d62de578684d6c59fa6dcba60 *inst/texts/txt/ovid_5.txt d44474e05cd96e80932106e24ed572a1 *man/Corpus.Rd 6339b0d2bae8c6d1e3a383bdea82d425 *man/DataframeSource.Rd 1c104e63fd71cd63ad6e0da3669fbdf5 *man/DirSource.Rd 5871b5f9883ba4359e269bbfca27db37 *man/Docs.Rd 00fa0c14e4086a140646ad23597ca5eb *man/PCorpus.Rd 8a778ebd67c6b9c7af89a2654e665bf6 *man/PlainTextDocument.Rd f1c465f51d627af46612833ffcc17f59 *man/Reader.Rd b4d2dcdc0c2b16f38561637956a7a328 *man/SimpleCorpus.Rd 24c30f62fdf0d4a0219147e947477920 *man/Source.Rd 421f5a82e8adcceeb733dbe31455b8d7 *man/TextDocument.Rd c82a889b500268683904a4ad7fc9d3b1 *man/URISource.Rd 7c84cd5a42cdac47a1b0301e2b6459a6 *man/VCorpus.Rd 3fb4034c6df0b6277f07a028a958b932 *man/VectorSource.Rd 5a32dfd6e72da8d3c8569803d6761126 *man/WeightFunction.Rd 0b79ee972dac094d6f0ed9c1f4d2685f *man/XMLSource.Rd 0a982a855094b02e983d7c7bf5e60c2b *man/XMLTextDocument.Rd 2d25fcd9863b4ac7128c1d2a521e27f2 *man/ZipSource.Rd ca38d43ef3a58075443e49cd244bd1ea *man/Zipf_n_Heaps.Rd f171dc99dd06472ef193979db9f16779 *man/acq.Rd aa36762f11d31e840ba6115b9b913341 *man/combine.Rd 72567bcd2d6725219799bb32ccdf8ffa *man/content_transformer.Rd a6f9579086069dbfd44d6931c8f2b66a *man/crude.Rd f30ebc7d2c9ad750ef0e6037d1669827 *man/findAssocs.Rd 74d7ea8ee4c4ac46492bbc3b52a10dca *man/findFreqTerms.Rd 36e135250b446bbd0e677115bcf1a82a *man/findMostFreqTerms.Rd b40ce87658be7e505312812d97907627 *man/foreign.Rd be785d88b0821a06be0b4772868dc37c *man/getTokenizers.Rd 9ad9e3d7afb9815f04529a435f430a53 *man/getTransformations.Rd 571b3a0a81bfffe0e6ebfc34289fb2de *man/hpc.Rd 6a72cef1df5795bb189bd1a0177e5d4d *man/inspect.Rd ca0d40d80911df57f1e4d825ca645044 *man/matrix.Rd 33870f4b1f105daa8307e58f3ec61fa2 *man/meta.Rd a67ad1293bc40179efe281cf1faeeb63 *man/plot.Rd 7de11cf5180caee710b5fda07b211eb8 *man/readDOC.Rd 13b3964279323a7d94ccab25ca7afaef *man/readDataframe.Rd 56f162b724f8a1ffd21bd47633bbd068 *man/readPDF.Rd d625f0434c021f98e4529ce1427703cf *man/readPlain.Rd b49b3852a0344d682e6bb4f6b30aa6d5 *man/readRCV1.Rd 78b1b12b618650c11e6e3d787f79f807 *man/readReut21578XML.Rd ec13c14161ee1c95f89ce75237aa3df7 *man/readTagged.Rd ce6a6feb64dd79693b7ceba7bdb4c6a0 *man/readXML.Rd 295b85ec0a37c83bc105f97ca48dfc9a *man/removeNumbers.Rd f8e578de76e389cf55176fb546743468 *man/removePunctuation.Rd ef0d87508b367cdd71f066244605407e *man/removeSparseTerms.Rd 2484a54292458f80e26f2956fc5d7501 *man/removeWords.Rd 5bdcaccf0076e98a2341078e61c59be5 *man/stemCompletion.Rd 6a9b411d93cf0276218bca553f1e37e3 *man/stemDocument.Rd e787cae6198de27c6c3d457c41f3af95 *man/stopwords.Rd 15b8549fd381105839451d9b15c7efa3 *man/stripWhitespace.Rd 00e5d6599fb3815ce62ef1f5bf6aa744 *man/termFreq.Rd 1dd2e47bdc3ac7481366dc0d359ef94a *man/tm_filter.Rd 0a2583e28333847146cc5168593b140c *man/tm_map.Rd 6eb083c9b6f1b08700065fd58bf1f8be *man/tm_reduce.Rd 458b061071b9b320951c3b48adf16264 *man/tm_term_score.Rd ca827420b159ae91646b637bb48f75f7 *man/tokenizer.Rd 47bc8704437b53709120add15f205be0 *man/weightBin.Rd abe06433d8438326d1e03c8367312a59 *man/weightSMART.Rd 4e7d2dd30d4de494ba122cd3aff128ee *man/weightTf.Rd 88fbb7eda2e788887e1fe67cb7fd0855 *man/weightTfIdf.Rd 193b23f2d16e20a4944846725eebd155 *man/writeCorpus.Rd 813f07011de972121885f35821e6426b *src/RcppExports.cpp 1b7544de4c9e45507e82e6e5033819fa *src/copy.c 45b4524bfac392e34ba96c2609c77f7c *src/init.c 706a1d7e181fc2acd829a541bc769478 *src/remove.c d6c1687f7b18dd1c068ad609f03a888b *src/scan.c fe9ef490894f8d93571ffb091669c7dd *src/tdm.cpp b7995b66ea58d9604a6bf61ef68381fb *src/tokenizer.cpp f280e050264388e7c120d4869357efb7 *tests/testthat.R 7987b16eeb87d6c4e9787b85e5b764a4 *tests/testthat/test-Source.R ef259599b4562c161bf3e0c4529ebcf5 *tests/testthat/test-TermDocumentMatrix.R 7f1736751d70509612e9a728766fe146 *tests/testthat/test-Tokenizer.R 2003b069d4a811c99d5edf34a42eb2a1 *tests/testthat/test-Transformation.R d194109d976d7f242e64a8eab85026f8 *vignettes/extensions.Rnw 3641da272a48168ad7b4ffef9fbf7d21 *vignettes/references.bib 788171d86f35f92d96b7320bc5dcad60 *vignettes/tm.Rnw tm/R/0000755000175100001440000000000014716601054011115 5ustar hornikuserstm/R/doc.R0000644000175100001440000000577713177022574012031 0ustar hornikusersc.TextDocument <- function(..., recursive = FALSE) { args <- list(...) x <- args[[1L]] if (length(args) == 1L) return(x) if (!all(unlist(lapply(args, inherits, class(x))))) stop("not all arguments are text documents") v <- list(content = args, meta = CorpusMeta(), dmeta = data.frame(row.names = seq_along(args))) class(v) <- c("VCorpus", "Corpus") v } .format_TextDocument <- function(x, ...) c(sprintf("<<%s>>", class(x)[1L]), sprintf("Metadata: %d", length(meta(x)))) inspect.TextDocument <- function(x) { print(x) cat("\n") writeLines(as.character(x)) invisible(x) } PlainTextDocument <- function(x = character(0), author = character(0), datetimestamp = as.POSIXlt(Sys.time(), tz = "GMT"), description = character(0), heading = character(0), id = character(0), language = character(0), origin = character(0), ..., meta = NULL, class = NULL) { p <- list(content = as.character(x), meta = TextDocumentMeta(author, datetimestamp, description, heading, id, language, origin, ..., meta = meta)) class(p) <- unique(c(class, "PlainTextDocument", "TextDocument")) p } as.character.PlainTextDocument <- function(x, ...) content(x) content.PlainTextDocument <- function(x) x$content `content<-.PlainTextDocument` <- function(x, value) { x$content <- as.character(value) x } format.PlainTextDocument <- function(x, ...) c(.format_TextDocument(x), sprintf("Content: chars: %d", sum(nchar(x$content)))) meta.PlainTextDocument <- function(x, tag = NULL, ...) if (is.null(tag)) x$meta else x$meta[[tag]] `meta<-.PlainTextDocument` <- function(x, tag = NULL, ..., value) { if (is.null(tag)) x$meta <- value else x$meta[[tag]] <- value x } words.character <- words.PlainTextDocument <- function(x, ...) scan_tokenizer(x) XMLTextDocument <- function(x = xml_missing(), author = character(0), datetimestamp = as.POSIXlt(Sys.time(), tz = "GMT"), description = character(0), heading = character(0), id = character(0), language = character(0), origin = character(0), ..., meta = NULL) { d <- list(content = x, meta = TextDocumentMeta(author, datetimestamp, description, heading, id, language, origin, ..., meta = meta)) class(d) <- c("XMLTextDocument", "TextDocument") d } as.character.XMLTextDocument <- function(x, ...) xml_text(content(x)) content.XMLTextDocument <- function(x) x$content `content<-.XMLTextDocument` <- function(x, value) { x$content <- value x } format.XMLTextDocument <- .format_TextDocument meta.XMLTextDocument <- meta.PlainTextDocument `meta<-.XMLTextDocument` <- `meta<-.PlainTextDocument` tm/R/plot.R0000644000175100001440000000531213023472034012211 0ustar hornikusersplot.TermDocumentMatrix <- plot.DocumentTermMatrix <- function(x, terms = sample(Terms(x), 20), corThreshold = 0.7, weighting = FALSE, attrs = list(graph = list(rankdir = "BT"), node = list(shape = "rectangle", fixedsize = FALSE)), ...) { if (system.file(package = "Rgraphviz") == "") stop("Plotting requires package 'Rgraphviz'.") m <- if (inherits(x, "TermDocumentMatrix")) t(x) else x m <- as.matrix(m[, terms]) c <- cor(m) c[c < corThreshold] <- 0 c[is.na(c)] <- 0 diag(c) <- 0 p <- Rgraphviz::plot(methods::as(c, "graphNEL"), attrs = attrs, ...) if (weighting) { i <- 1 lw <- round(c[lower.tri(c) & c >= corThreshold] * 10) for (ae in Rgraphviz::AgEdge(p)) { Rgraphviz::lines(ae, lwd = lw[i], len = 1) i <- i + 1 } } invisible(p) } ## Plotting functions for Zipf's and Heaps'law contributed by Kurt Hornik ## See http://en.wikipedia.org/wiki/Zipf%27s_law Zipf_plot <- function(x, type = "l", ...) { if (inherits(x, "TermDocumentMatrix")) x <- t(x) y <- log(sort(col_sums(x), decreasing = TRUE)) x <- log(seq_along(y)) m <- lm(y ~ x) dots <- list(...) if (is.null(dots$xlab)) dots$xlab <- "log(rank)" if (is.null(dots$ylab)) dots$ylab <- "log(frequency)" do.call(plot, c(list(x, y, type = type), dots)) abline(m) ## ## Perhaps this should (invisibly) return the fitted linear model ## instead of just the coefficients? coef(m) ## } ## http://en.wikipedia.org/wiki/Heaps%27_law ## http://en.wikipedia.org/wiki/Text_corpus ## cum_vocabulary_size <- ## function(m) ## { ## ## Should work in general, but it very slow for large simple triplet ## ## matrices ... ## s <- double(nrow(m)) ## v <- double(ncol(m)) ## for(i in seq_along(s)) { ## v <- pmax(v, c(m[i, ])) ## s[i] <- sum(v > 0) ## } ## s ## } cum_vocabulary_size <- function(m) { ## Only works for simple triplet matrices. i <- sapply(split(m$i, m$j), min) tab <- table(i) v <- double(nrow(m)) v[as.numeric(names(tab))] <- tab cumsum(v) } Heaps_plot <- function(x, type = "l", ...) { if (inherits(x, "TermDocumentMatrix")) x <- t(x) y <- log(cum_vocabulary_size(x)) x <- log(cumsum(row_sums(x))) m <- lm(y ~ x) dots <- list(...) if (is.null(dots$xlab)) dots$xlab <- "log(T)" if (is.null(dots$ylab)) dots$ylab <- "log(V)" do.call(plot, c(list(x, y, type = type), dots)) abline(m) ## ## Perhaps this should (invisibly) return the fitted linear model ## instead of just the coefficients? coef(m) ## } tm/R/source.R0000644000175100001440000002016614346266014012547 0ustar hornikusers## Author: Ingo Feinerer ## Sources getSources <- function() c("DataframeSource", "DirSource", "URISource", "VectorSource", "XMLSource", "ZipSource") SimpleSource <- function(encoding = "", length = 0, position = 0, reader = readPlain, ..., class) { if (!is.character(encoding)) stop("invalid encoding") if (!is.numeric(length) || (length < 0)) stop("invalid length entry denoting the number of elements") if (!is.numeric(position)) stop("invalid position") if (!is.function(reader)) stop("invalid default reader") s <- list(encoding = encoding, length = length, position = position, reader = reader, ...) class(s) <- unique(c(class, "SimpleSource", "Source")) s } # A data frame where each row is interpreted as document DataframeSource <- function(x) { stopifnot(all(!is.na(match(c("doc_id", "text"), names(x))))) SimpleSource(length = nrow(x), reader = readDataframe, content = x, class = "DataframeSource") } # A directory with files interpreted as documents DirSource <- function(directory = ".", encoding = "", pattern = NULL, recursive = FALSE, ignore.case = FALSE, mode = "text") { if (!identical(mode, "text") && !identical(mode, "binary") && !identical(mode, "")) stop(sprintf("invalid mode '%s'", mode)) d <- dir(directory, full.names = TRUE, pattern = pattern, recursive = recursive, ignore.case = ignore.case) if (!length(d)) stop("empty directory") isfile <- !file.info(d)[["isdir"]] if (any(is.na(isfile))) stop("non-existent or non-readable file(s): ", paste(d[is.na(isfile)], collapse = " ")) SimpleSource(encoding = encoding, length = sum(isfile), mode = mode, filelist = d[isfile], class = "DirSource") } # Documents identified by a Uniform Resource Identifier URISource <- function(x, encoding = "", mode = "text") { if (!identical(mode, "text") && !identical(mode, "binary") && !identical(mode, "")) stop(sprintf("invalid mode '%s'", mode)) SimpleSource(encoding = encoding, length = length(x), mode = mode, uri = x, class = "URISource") } # A vector where each component is interpreted as document VectorSource <- function(x) SimpleSource(length = length(x), content = x, class = "VectorSource") XMLSource <- function(x, parser = xml_contents, reader) { xmldoc <- read_xml(x) content <- parser(xmldoc) SimpleSource(length = length(content), reader = reader, content = content, uri = x, class = "XMLSource") } # A ZIP file with its compressed files interpreted as documents ZipSource <- function(zipfile, pattern = NULL, recursive = FALSE, ignore.case = FALSE, mode = "text") { if (!identical(mode, "text") && !identical(mode, "binary") && !identical(mode, "")) stop(sprintf("invalid mode '%s'", mode)) SimpleSource(exdir = NULL, files = NULL, mode = mode, pattern = pattern, recursive = recursive, ignore.case = ignore.case, zipfile = zipfile, class = "ZipSource") } # tau:::read_all_bytes read_all_bytes <- function(con, chunksize = 2 ^ 16) { if (is.character(con)) { return(readBin(con, raw(), file.info(con)$size)) } if (!isOpen(con)) { open(con, "rb") on.exit(close(con)) } bytes <- list() repeat { chunk <- readBin(con, raw(), chunksize) bytes <- c(bytes, list(chunk)) if (length(chunk) < chunksize) break } unlist(bytes) } readContent <- function(x, encoding, mode) { if (identical(mode, "text")) iconv(readLines(x, warn = FALSE), encoding, "UTF-8", "byte") else if (identical(mode, "binary")) read_all_bytes(x) else if (identical(mode, "")) NULL else stop("invalid mode") } open.SimpleSource <- close.SimpleSource <- function(con, ...) con open.ZipSource <- function(con, ...) { x <- con exdir <- tempfile("ZipSource") dir.create(exdir, mode = "0700") destfile <- x$zipfile if (!file.exists(destfile)) { destfile <- tempfile() download.file(x$zipfile, destfile) on.exit(file.remove(destfile)) } files <- unzip(destfile, list = TRUE) ## Directories have length 0 files <- files[files$Length > 0, "Name"] ## Idea: Subdirectories contain file separators if (!x$recursive) files <- files[!grepl(.Platform$file.sep, files, fixed = TRUE)] ## Idea: pattern and ignore.case refer to the file name (like basename) ## Cf. also ?dir if (!is.null(x$pattern)) files <- files[grepl(x$pattern, files, ignore.case = x$ignore.case)] unzip(destfile, files, exdir = exdir) x$exdir <- exdir x$files <- files x$length <- length(files) x } close.ZipSource <- function(con, ...) { x <- con if (!is.null(x$exdir)) { unlink(x$exdir, recursive = TRUE) x$exdir <- NULL x$files <- NULL x$length <- 0 } x } eoi <- function(x) UseMethod("eoi", x) eoi.SimpleSource <- function(x) x$length <= x$position getElem <- function(x) UseMethod("getElem", x) getElem.DataframeSource <- function(x) list(content = x$content[x$position, ], uri = NULL) getElem.DirSource <- function(x) { filename <- x$filelist[x$position] list(content = readContent(filename, x$encoding, x$mode), uri = paste0("file://", filename)) } getElem.URISource <- function(x) list(content = readContent(x$uri[x$position], x$encoding, x$mode), uri = x$uri[x$position]) getElem.VectorSource <- function(x) list(content = x$content[x$position], uri = NULL) getElem.XMLSource <- function(x) list(content = x$content[[x$position]], uri = x$uri) getElem.ZipSource <- function(x) { path <- file.path(x$exdir, x$files[x$position]) list(content = readContent(path, x$encoding, x$mode), uri = paste0("file://", path)) } getMeta <- function(x) UseMethod("getMeta", x) getMeta.DataframeSource <- function(x) list(cmeta = NULL, dmeta = x$content[, is.na(match(names(x$content), c("doc_id", "text"))), drop = FALSE]) length.SimpleSource <- function(x) x$length pGetElem <- function(x) UseMethod("pGetElem", x) pGetElem.DataframeSource <- function(x) tm_parLapply(seq_len(x$length), function(y) list(content = x$content[y, ], uri = NULL)) `[.DataframeSource` <- function(x, i, j, ...) x$content[i, j, ...] `[[.DataframeSource` <- function(x, ...) x$content[[...]] pGetElem.DirSource <- function(x) tm_parLapply(x$filelist, function(f) list(content = readContent(f, x$encoding, x$mode), uri = paste0("file://", f))) `[.DirSource` <- function(x, i, ...) x$filelist[i, ...] `[[.DirSource` <- function(x, i, ...) x$filelist[[i, ...]] pGetElem.URISource <- function(x) tm_parLapply(x$uri, function(uri) list(content = readContent(uri, x$encoding, x$mode), uri = uri)) `[.URISource` <- function(x, i, ...) x$uri[i, ...] `[[.URISource` <- function(x, i, ...) x$uri[[i, ...]] pGetElem.VectorSource <- function(x) tm_parLapply(x$content, function(y) list(content = y, uri = NULL)) `[.VectorSource` <- function(x, i, ...) x$content[i, ...] `[[.VectorSource` <- function(x, i, ...) x$content[[i, ...]] pGetElem.ZipSource <- function(x) tm_parLapply(file.path(x$exdir, x$files), function(f) list(content = readContent(f, x$encoding, x$mode), uri = paste0("file://", f))) reader <- function(x) UseMethod("reader", x) reader.SimpleSource <- function(x) x$reader stepNext <- function(x) UseMethod("stepNext", x) stepNext.SimpleSource <- function(x) { x$position <- x$position + 1 x } tm/R/complete.R0000644000175100001440000000372113667334116013061 0ustar hornikusers# Author: Ingo Feinerer stemCompletion <- function(x, dictionary, type = c("prevalent", "first", "longest", "none", "random", "shortest")) { if (inherits(dictionary, "Corpus")) dictionary <- unlist(lapply(dictionary, words)) type <- match.arg(type) possibleCompletions <- lapply(x, function(w) grep(sprintf("^%s", w), dictionary, value = TRUE)) switch(type, first = { setNames(sapply(possibleCompletions, "[", 1), x) }, longest = { ordering <- lapply(possibleCompletions, function(x) order(nchar(x), decreasing = TRUE)) possibleCompletions <- mapply(function(x, id) x[id], possibleCompletions, ordering, SIMPLIFY = FALSE) setNames(sapply(possibleCompletions, "[", 1), x) }, none = { setNames(x, x) }, prevalent = { possibleCompletions <- lapply(possibleCompletions, function(x) sort(table(x), decreasing = TRUE)) n <- names(sapply(possibleCompletions, "[", 1)) setNames(if (length(n)) n else rep_len(NA, length(x)), x) }, random = { setNames(sapply(possibleCompletions, function(x) { if (length(x)) sample(x, 1) else NA }), x) }, shortest = { ordering <- lapply(possibleCompletions, function(x) order(nchar(x))) possibleCompletions <- mapply(function(x, id) x[id], possibleCompletions, ordering, SIMPLIFY = FALSE) setNames(sapply(possibleCompletions, "[", 1), x) } ) } tm/R/RcppExports.R0000644000175100001440000000077113404767400013537 0ustar hornikusers# Generated by using Rcpp::compileAttributes() -> do not edit by hand # Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 tdm <- function(strings, remove_puncts, remove_digits, stopwords, dictionary, min_term_freq, max_term_freq, min_word_length, max_word_length) { .Call(`_tm_tdm`, strings, remove_puncts, remove_digits, stopwords, dictionary, min_term_freq, max_term_freq, min_word_length, max_word_length) } Boost_Tokenizer <- function(strings) { .Call(`_tm_Boost_Tokenizer`, strings) } tm/R/transform.R0000644000175100001440000001067413311700175013255 0ustar hornikusers# Author: Ingo Feinerer # Transformations tm_map <- function(x, FUN, ...) UseMethod("tm_map", x) tm_map.VCorpus <- function(x, FUN, ..., lazy = FALSE) { # Lazy mapping if (lazy) { fun <- function(x) FUN(x, ...) if (is.null(x$lazy)) x$lazy <- list(index = rep_len(TRUE, length(x)), maps = list(fun)) else x$lazy$maps <- c(x$lazy$maps, list(fun)) } else x$content <- tm_parLapply(content(x), FUN, ...) x } tm_map.SimpleCorpus <- function(x, FUN, ...) { if (inherits(FUN, "content_transformer")) FUN <- get("FUN", envir = environment(FUN)) n <- names(content(x)) x$content <- FUN(content(x), ...) if (length(content(x)) != length(n)) warning("transformation drops documents") else names(x$content) <- n x } tm_map.PCorpus <- function(x, FUN, ...) { db <- filehash::dbInit(x$dbcontrol[["dbName"]], x$dbcontrol[["dbType"]]) for (i in seq_along(x)) db[[x$content[[i]]]] <- FUN(x[[i]], ...) filehash::dbReorganize(db) x } # Materialize lazy mappings materialize <- function(x, range = seq_along(x)) { if (!is.null(x$lazy)) { i <- (seq_along(x) %in% range) & x$lazy$index if (any(i)) { x$content[i] <- tm_parLapply(x$content[i], function(d) tm_reduce(d, x$lazy$maps)) x$lazy$index[i] <- FALSE } # Clean up if everything is materialized if (!any(x$lazy$index)) x["lazy"] <- list(NULL) } x } tm_reduce <- function(x, tmFuns, ...) Reduce(function(f, ...) f(...), tmFuns, x, right = TRUE) getTransformations <- function() c("removeNumbers", "removePunctuation", "removeWords", "stemDocument", "stripWhitespace") content_transformer <- function(FUN) { f <- function(x, ...) { content(x) <- FUN(content(x), ...) x } class(f) <- c("content_transformer", "function") f } removeNumbers <- function(x, ...) UseMethod("removeNumbers") removeNumbers.character <- function(x, ucp = FALSE, ...) { if (ucp) gsub("\\p{Nd}+", "", x, perl = TRUE) else .Call(`_tm_remove_chars`, x, 1L) } removeNumbers.PlainTextDocument <- content_transformer(removeNumbers.character) removePunctuation <- function(x, ...) UseMethod("removePunctuation") removePunctuation.character <- function(x, preserve_intra_word_contractions = FALSE, preserve_intra_word_dashes = FALSE, ucp = FALSE, ...) { # Assume there are no ASCII 0x01 (SOH) or ASCII 0x02 (STX) characters. if (preserve_intra_word_contractions) x <- gsub("(\\w)'(\\w)", "\\1\1\\2", x, perl = TRUE) if (preserve_intra_word_dashes) x <- gsub("(\\w)-(\\w)", "\\1\2\\2", x, perl = TRUE) if (ucp) x <- gsub("\\p{P}+", "", x, perl = TRUE) else x <- .Call(`_tm_remove_chars`, x, 0L) if (preserve_intra_word_contractions) x <- gsub("\1", "'", x, fixed = TRUE) if (preserve_intra_word_dashes) x <- gsub("\2", "-", x, fixed = TRUE) x } removePunctuation.PlainTextDocument <- content_transformer(removePunctuation.character) removeWords <- function(x, words) UseMethod("removeWords", x) # Improvements by Kurt Hornik removeWords.character <- function(x, words) gsub(sprintf("(*UCP)\\b(%s)\\b", paste(sort(words, decreasing = TRUE), collapse = "|")), "", x, perl = TRUE) removeWords.PlainTextDocument <- content_transformer(removeWords.character) stemDocument <- function(x, language = "english") UseMethod("stemDocument", x) stemDocument.character <- function(x, language = "english") { s <- unlist(lapply(x, function(line) paste(SnowballC::wordStem(words(line), as.character(language)), collapse = " "))) if (is.character(s)) s else "" } stemDocument.PlainTextDocument <- function(x, language = meta(x, "language")) { language <- as.character(language) if (identical(language, "") || identical(language, character(0)) || is.na(language)) language <- "english" content_transformer(stemDocument.character)(x) } stripWhitespace <- function(x) UseMethod("stripWhitespace", x) stripWhitespace.character <- function(x) gsub("[[:space:]]+", " ", x) stripWhitespace.PlainTextDocument <- content_transformer(stripWhitespace.character) tm/R/matrix.R0000644000175100001440000004462214262016526012554 0ustar hornikusers## Authors: Ingo Feinerer, Kurt Hornik TermDocumentMatrix_classes <- c("TermDocumentMatrix", "simple_triplet_matrix") DocumentTermMatrix_classes <- c("DocumentTermMatrix", "simple_triplet_matrix") .TermDocumentMatrix <- function(x, weighting) { x <- as.simple_triplet_matrix(x) if (!is.null(dimnames(x))) names(dimnames(x)) <- c("Terms", "Docs") class(x) <- TermDocumentMatrix_classes if (is.null(weighting)) weighting <- weightTf ## ## Note that if weighting is a weight function, it already needs to ## know whether we have a term-document or document-term matrix. ## ## Ideally we would require weighting to be a WeightFunction object ## or a character string of length 2. But then ## dtm <- DocumentTermMatrix(crude, ## control = list(weighting = ## function(x) ## weightTfIdf(x, normalize = ## FALSE), ## stopwords = TRUE)) ## in example("DocumentTermMatrix") fails [because weightTfIdf() is ## a weight function and not a weight function generator ...] ## Hence, for now, instead of ## if (inherits(weighting, "WeightFunction")) ## x <- weighting(x) ## use if (is.function(weighting)) x <- weighting(x) ## and hope for the best ... ## else if (is.character(weighting) && (length(weighting) == 2L)) attr(x, "weighting") <- weighting x } .SimpleTripletMatrix <- function(i, j, v, terms, corpus) { docs <- as.character(meta(corpus, "id", "local")) if (length(docs) != length(corpus)) { warning("invalid document identifiers") docs <- NULL } simple_triplet_matrix(i, j, v, nrow = length(terms), ncol = length(corpus), dimnames = list(Terms = terms, Docs = docs)) } filter_global_bounds <- function(m, bounds) { m <- as.simple_triplet_matrix(m) if (length(bounds) == 2L && is.numeric(bounds)) { rs <- row_sums(m > 0) m <- m[(rs >= bounds[1]) & (rs <= bounds[2]), ] } m } TermDocumentMatrix <- function(x, control = list()) UseMethod("TermDocumentMatrix", x) TermDocumentMatrix.SimpleCorpus <- function(x, control = list()) { stopifnot(is.list(control)) if (any(unlist(lapply(control, is.function)))) warning("custom functions are ignored") if (!is.null(control$tokenize) && !identical(control$tokenize, "Boost")) warning("custom tokenizer is ignored") txt <- content(x) ## Conversion to lower case if (is.null(control$tolower) || isTRUE(control$tolower)) txt <- tolower(txt) ## Stopword filtering .stopwords <- if (isTRUE(control$stopwords)) stopwords(meta(x, "language")) else if (is.character(control$stopwords)) control$stopwords else character(0) .dictionary <- if (is.null(control$dictionary)) character(0) else control$dictionary ## Ensure local bounds bl <- control$bounds$local min_term_freq <- if (length(bl) == 2L && is.numeric(bl) && bl[1] >= 0) bl[1] else 0L max_term_freq <- if (length(bl) == 2L && is.numeric(bl) && bl[2] >= 0) min(bl[2], .Machine$integer.max) else .Machine$integer.max ## Filter out too short or too long terms wl <- control$wordLengths min_word_length <- if (is.numeric(wl[1]) && wl[1] >= 0) wl[1] else 3L max_word_length <- if (is.numeric(wl[2]) && wl[2] >= 0) min(wl[2], .Machine$integer.max) else .Machine$integer.max m <- tdm(txt, isTRUE(control$removePunctuation), isTRUE(control$removeNumbers), .stopwords, .dictionary, as.integer(min_term_freq), as.integer(max_term_freq), as.integer(min_word_length), as.integer(max_word_length)) Encoding(m$terms) <- "UTF-8" m <- .SimpleTripletMatrix(m$i, m$j, m$v, m$terms, x) ## Stemming ## ## Ideally tdm() could perform stemming as well but there is no easy way to ## access the SnowballC::wordStem() function from C++ (via Rcpp) without ## significant overhead (as SnowballC does not export its internal C ## functions). ## ## Stemming afterwards is still quite performant as we already have ## all terms. However, there is some overhead involved as we need ## to recheck local bounds and word lengths. ## if (isTRUE(control$stemming)) { stems <- SnowballC::wordStem(m$dimnames$Terms, meta(x, "language")) ## Do as.factor(stems) "by hand" for performance reasons. uniqs <- sort(unique(stems)) stems <- match(stems, uniqs) attributes(stems) <- list(levels = uniqs, class = "factor") m <- rollup(m, "Terms", stems) ## Recheck local bounds ## No need to check lower local bound as rollup aggregates frequencies m[m > max_term_freq] <- 0 ## Recheck word lengths terms_length <- nchar(rownames(m)) m <- m[min_word_length <= terms_length & terms_length <= max_word_length, ] } m <- filter_global_bounds(m, control$bounds$global) .TermDocumentMatrix(m, control$weighting) } TermDocumentMatrix.PCorpus <- TermDocumentMatrix.VCorpus <- function(x, control = list()) { stopifnot(is.list(control)) tflist <- tm_parLapply(unname(content(x)), termFreq, control) v <- unlist(tflist) i <- names(v) terms <- sort(unique(as.character(if (is.null(control$dictionary)) i else control$dictionary))) i <- match(i, terms) j <- rep.int(seq_along(x), lengths(tflist)) m <- .SimpleTripletMatrix(i, j, as.numeric(v), terms, x) m <- filter_global_bounds(m, control$bounds$global) .TermDocumentMatrix(m, control$weighting) } TermDocumentMatrix.default <- function(x, control = list()) TermDocumentMatrix(Corpus(VectorSource(x)), control) DocumentTermMatrix <- function(x, control = list()) t(TermDocumentMatrix(x, control)) as.TermDocumentMatrix <- function(x, ...) UseMethod("as.TermDocumentMatrix") as.TermDocumentMatrix.TermDocumentMatrix <- function(x, ...) x as.TermDocumentMatrix.DocumentTermMatrix <- function(x, ...) t(x) as.TermDocumentMatrix.term_frequency <- as.TermDocumentMatrix.textcnt <- function(x, ...) { m <- simple_triplet_matrix(i = seq_along(x), j = rep_len(1L, length(x)), v = as.numeric(x), nrow = length(x), ncol = 1, dimnames = list(Terms = names(x), Docs = NA_character_)) .TermDocumentMatrix(m, weightTf) } as.TermDocumentMatrix.default <- function(x, weighting, ...) .TermDocumentMatrix(x, weighting) as.DocumentTermMatrix <- function(x, ...) UseMethod("as.DocumentTermMatrix") as.DocumentTermMatrix.DocumentTermMatrix <- function(x, ...) x as.DocumentTermMatrix.TermDocumentMatrix <- function(x, ...) t(x) as.DocumentTermMatrix.term_frequency <- as.DocumentTermMatrix.textcnt <- function(x, ...) t(as.TermDocumentMatrix(x)) as.DocumentTermMatrix.default <- function(x, weighting, ...) { x <- as.simple_triplet_matrix(x) t(.TermDocumentMatrix(t(x), weighting)) } t.TermDocumentMatrix <- t.DocumentTermMatrix <- function(x) { m <- NextMethod("t") attr(m, "weighting") <- attr(x, "weighting") class(m) <- if (inherits(x, "DocumentTermMatrix")) TermDocumentMatrix_classes else DocumentTermMatrix_classes m } termFreq <- function(doc, control = list()) { stopifnot(inherits(doc, "TextDocument") || is.character(doc), is.list(control)) ## Tokenize the corpus .tokenize <- control$tokenize if (is.null(.tokenize) || identical(.tokenize, "words")) .tokenize <- words else if (identical(.tokenize, "Boost")) .tokenize <- Boost_tokenizer else if (identical(.tokenize, "MC")) .tokenize <- MC_tokenizer else if (identical(.tokenize, "scan")) .tokenize <- scan_tokenizer else if (is.Span_Tokenizer(.tokenize)) .tokenize <- as.Token_Tokenizer(.tokenize) if (is.function(.tokenize)) txt <- .tokenize(doc) else stop("invalid tokenizer") ## Conversion to lower case .tolower <- control$tolower if (is.null(.tolower) || isTRUE(.tolower)) .tolower <- tolower if (is.function(.tolower)) txt <- .tolower(txt) ## Punctuation removal .removePunctuation <- control$removePunctuation if (isTRUE(.removePunctuation)) .removePunctuation <- removePunctuation else if (is.list(.removePunctuation)) .removePunctuation <- function(x) do.call(removePunctuation, c(list(x), control$removePunctuation)) ## Number removal .removeNumbers <- control$removeNumbers if (isTRUE(.removeNumbers)) .removeNumbers <- removeNumbers .language <- control$language if (inherits(doc, "TextDocument")) .language <- meta(doc, "language") if (is.null(.language)) .language <- "en" ## Stopword filtering .stopwords <- control$stopwords if (isTRUE(.stopwords)) .stopwords <- function(x) x[is.na(match(x, stopwords(.language)))] else if (is.character(.stopwords)) .stopwords <- function(x) x[is.na(match(x, control$stopwords))] ## Stemming .stemming <- control$stemming if (isTRUE(.stemming)) .stemming <- function(x) SnowballC::wordStem(x, .language) ## Default order for options which support reordering or <- c("removePunctuation", "removeNumbers", "stopwords", "stemming") ## Process control options in specified order nc <- names(control) n <- nc[!is.na(match(nc, or))] for (name in sprintf(".%s", c(n, setdiff(or, n)))) { g <- get(name) if (is.function(g)) txt <- g(txt) } ## If dictionary is set tabulate against it dictionary <- control$dictionary tab <- .table(if (is.null(dictionary)) txt else txt[!is.na(match(txt, dictionary))]) ## Ensure local bounds bl <- control$bounds$local if (length(bl) == 2L && is.numeric(bl)) tab <- tab[(tab >= bl[1]) & (tab <= bl[2]), drop = FALSE] ## Filter out too short or too long terms nc <- nchar(names(tab), type = "chars") wl <- control$wordLengths lb <- if (is.numeric(wl[1])) wl[1] else 3 ub <- if (is.numeric(wl[2])) wl[2] else Inf tab <- tab[(nc >= lb) & (nc <= ub), drop = FALSE] class(tab) <- c("term_frequency", class(tab)) tab } print.TermDocumentMatrix <- print.DocumentTermMatrix <- function(x, ...) { format <- c("term", "document") if (inherits(x, "DocumentTermMatrix")) format <- rev(format) writeLines(sprintf("<<%s (%ss: %d, %ss: %d)>>", class(x)[1], format[1L], nrow(x), format[2L], ncol(x))) writeLines(sprintf("Non-/sparse entries: %d/%.0f", length(x$v), prod(dim(x)) - length(x$v))) sparsity <- if (!prod(dim(x))) 100 else round( (1 - length(x$v) / prod(dim(x))) * 100) writeLines(sprintf("Sparsity : %s%%", sparsity)) writeLines(sprintf("Maximal term length: %s", max(nchar(Terms(x), type = "chars"), 0))) writeLines(sprintf("Weighting : %s (%s)", attr(x, "weighting")[1L], attr(x, "weighting")[2L])) invisible(x) } inspect.TermDocumentMatrix <- inspect.DocumentTermMatrix <- function(x) { print(x) cat("Sample :\n") print(as.matrix(sample.TermDocumentMatrix(x))) } `[.TermDocumentMatrix` <- `[.DocumentTermMatrix` <- function(x, i, j, ..., drop) { m <- NextMethod("[") attr(m, "weighting") <- attr(x, "weighting") class(m) <- if (inherits(x, "DocumentTermMatrix")) DocumentTermMatrix_classes else TermDocumentMatrix_classes m } `dimnames<-.DocumentTermMatrix` <- function(x, value) { x <- NextMethod("dimnames<-") dnx <- x$dimnames if (!is.null(dnx)) names(dnx) <- c("Docs", "Terms") x$dimnames <- dnx x } `dimnames<-.TermDocumentMatrix` <- function(x, value) { x <- NextMethod("dimnames<-") dnx <- x$dimnames if (!is.null(dnx)) names(dnx) <- c("Terms", "Docs") x$dimnames <- dnx x } nDocs <- function(x) UseMethod("nDocs") nTerms <- function(x) UseMethod("nTerms") nDocs.DocumentTermMatrix <- nTerms.TermDocumentMatrix <- function(x) x$nrow nDocs.TermDocumentMatrix <- nTerms.DocumentTermMatrix <- function(x) x$ncol Docs <- function(x) UseMethod("Docs") Terms <- function(x) UseMethod("Terms") Docs.DocumentTermMatrix <- Terms.TermDocumentMatrix <- function(x) { s <- x$dimnames[[1L]] if (is.null(s)) s <- rep.int(NA_character_, x$nrow) s } Docs.TermDocumentMatrix <- Terms.DocumentTermMatrix <- function(x) { s <- x$dimnames[[2L]] if (is.null(s)) s <- rep.int(NA_character_, x$ncol) s } c.term_frequency <- function(..., recursive = FALSE) { do.call("c", lapply(list(...), as.TermDocumentMatrix)) } c.TermDocumentMatrix <- function(..., recursive = FALSE) { m <- lapply(list(...), as.TermDocumentMatrix) if (length(m) == 1L) return(m[[1L]]) weighting <- attr(m[[1L]], "weighting") allTermsNonUnique <- unlist(lapply(m, function(x) Terms(x)[x$i])) allTerms <- unique(allTermsNonUnique) allDocs <- unlist(lapply(m, Docs)) cs <- cumsum(lapply(m, nDocs)) cs <- c(0, cs[-length(cs)]) j <- lapply(m, "[[", "j") m <- simple_triplet_matrix(i = match(allTermsNonUnique, allTerms), j = unlist(j) + rep.int(cs, lengths(j)), v = unlist(lapply(m, "[[", "v")), nrow = length(allTerms), ncol = length(allDocs), dimnames = list(Terms = allTerms, Docs = allDocs)) ## ## - We assume that all arguments have the same weighting ## - Even if all matrices have the same input weighting it might be ## necessary to take additional steps (e.g., normalization for tf-idf or ## check for (0,1)-range for binary tf) ## .TermDocumentMatrix(m, weighting) } c.DocumentTermMatrix <- function(..., recursive = FALSE) { t(do.call("c", lapply(list(...), as.TermDocumentMatrix))) } findFreqTerms <- function(x, lowfreq = 0, highfreq = Inf) { stopifnot(inherits(x, c("DocumentTermMatrix", "TermDocumentMatrix")), is.numeric(lowfreq), is.numeric(highfreq)) if (inherits(x, "DocumentTermMatrix")) x <- t(x) rs <- row_sums(x) names(rs[rs >= lowfreq & rs <= highfreq]) } findAssocs <- function(x, terms, corlimit) UseMethod("findAssocs", x) findAssocs.TermDocumentMatrix <- function(x, terms, corlimit) findAssocs(t(x), terms, corlimit) findAssocs.DocumentTermMatrix <- function(x, terms, corlimit) { stopifnot(is.character(terms), is.numeric(corlimit), corlimit >= 0, corlimit <= 1) j <- match(unique(terms), Terms(x), nomatch = 0L) suppressWarnings( findAssocs(crossapply_simple_triplet_matrix(x[, j], x[, -j], cor), terms, rep_len(corlimit, length(terms)))) } findAssocs.matrix <- function(x, terms, corlimit) { stopifnot(is.numeric(x)) i <- match(terms, rownames(x), nomatch = 0L) names(i) <- terms Map(function(i, cl) { xi <- x[i, ] t <- sort(round(xi[which(xi >= cl)], 2), TRUE) if (!length(t)) names(t) <- NULL t }, i, corlimit) } removeSparseTerms <- function(x, sparse) { stopifnot(inherits(x, c("DocumentTermMatrix", "TermDocumentMatrix")), is.numeric(sparse), sparse > 0, sparse < 1) m <- if (inherits(x, "DocumentTermMatrix")) t(x) else x t <- table(m$i) > m$ncol * (1 - sparse) termIndex <- as.numeric(names(t[t])) if (inherits(x, "DocumentTermMatrix")) x[, termIndex] else x[termIndex, ] } sample.TermDocumentMatrix <- function(x, size = 10) { stopifnot(inherits(x, c("DocumentTermMatrix", "TermDocumentMatrix")), is.numeric(size), size >= 0) if (length(x$v) == 0L) return(x) m <- if (inherits(x, "DocumentTermMatrix")) t(x) else x terms <- sort(names(sort(row_sums(m), decreasing = TRUE) [0:min(size, nTerms(m))])) docs <- sort(names(sort(col_sums(m), decreasing = TRUE) [0:min(size, nDocs(m))])) if (inherits(x, "DocumentTermMatrix")) x[docs, terms] else x[terms, docs] } CategorizedDocumentTermMatrix <- function(x, c) { if (inherits(x, "TermDocumentMatrix")) x <- t(x) else if (!inherits(x, "DocumentTermMatrix")) stop("wrong class") if (length(c) != nDocs(x)) stop("invalid category ids") attr(x, "Category") <- c class(x) <- c("CategorizedDocumentTermMatrix", DocumentTermMatrix_classes) x } findMostFreqTerms <- function(x, n = 6L, ...) UseMethod("findMostFreqTerms") findMostFreqTerms.term_frequency <- function(x, n = 6L, ...) { y <- x[order(x, decreasing = TRUE)[seq_len(n)]] y[y > 0] } findMostFreqTerms.DocumentTermMatrix <- function(x, n = 6L, INDEX = NULL, ...) { terms <- Terms(x) if (!is.null(INDEX)) x <- rollup(x, 1L, INDEX) f <- factor(x$i, seq_len(x$nrow)) js <- split(x$j, f) vs <- split(x$v, f) y <- Map(function(j, v, n) { p <- order(v, decreasing = TRUE)[seq_len(n)] v <- v[p] names(v) <- terms[j[p]] v }, js, vs, pmin(lengths(vs), n)) names(y) <- x$dimnames[[1L]] y } findMostFreqTerms.TermDocumentMatrix <- function(x, n = 6L, INDEX = NULL, ...) { terms <- Terms(x) if (!is.null(INDEX)) x <- rollup(x, 2L, INDEX) f <- factor(x$j, seq_len(x$ncol)) is <- split(x$i, f) vs <- split(x$v, f) y <- Map(function(i, v, n) { p <- order(v, decreasing = TRUE)[seq_len(n)] v <- v[p] names(v) <- terms[i[p]] v }, is, vs, pmin(lengths(vs), n)) names(y) <- x$dimnames[[2L]] y } tm/R/corpus.R0000644000175100001440000002216414716601054012560 0ustar hornikusers# Author: Ingo Feinerer Corpus <- function(x, readerControl = list(reader = reader(x), language = "en")) { stopifnot(inherits(x, "Source")) readerControl <- prepareReader(readerControl, reader(x)) if ( (inherits(x, "DataframeSource") || inherits(x, "DirSource") || inherits(x, "VectorSource") ) && identical(readerControl$reader, reader(x))) SimpleCorpus(x, readerControl) else VCorpus(x, readerControl) } PCorpus <- function(x, readerControl = list(reader = reader(x), language = "en"), dbControl = list(dbName = "", dbType = "DB1")) { stopifnot(inherits(x, "Source")) readerControl <- prepareReader(readerControl, reader(x)) if (!filehash::dbCreate(dbControl$dbName, dbControl$dbType)) stop("error in creating database") db <- filehash::dbInit(dbControl$dbName, dbControl$dbType) x <- open(x) tdl <- vector("list", length(x)) counter <- 1 while (!eoi(x)) { x <- stepNext(x) elem <- getElem(x) doc <- readerControl$reader(elem, readerControl$language, as.character(counter)) filehash::dbInsert(db, meta(doc, "id"), doc) tdl[[counter]] <- meta(doc, "id") counter <- counter + 1 } x <- close(x) cmeta <- CorpusMeta() dmeta <- data.frame(row.names = seq_along(tdl)) ## Check if metadata retrieval is supported for(cl in class(x)) { if (is.function(getS3method("getMeta", cl, TRUE))) { m <- getMeta(x) if (!is.null(m$cmeta)) cmeta <- m$cmeta if (!is.null(m$dmeta)) dmeta <- m$dmeta break } } p <- list(content = tdl, meta = cmeta, dmeta = dmeta, dbcontrol = dbControl) class(p) <- c("PCorpus", "Corpus") p } SimpleCorpus <- function(x, control = list(language = "en")) { stopifnot(inherits(x, "Source")) if (!is.null(control$reader) && !identical(control$reader, reader(x))) warning("custom reader is ignored") content <- if (inherits(x, "VectorSource")) { if (is.character(x$content)) x$content else as.character(x$content) } else if (inherits(x, "DirSource")) { setNames(as.character( lapply(x$filelist, function(f) paste(readContent(f, x$encoding, "text"), collapse = "\n")) ), basename(x$filelist)) } else if (inherits(x, "DataframeSource")) { setNames(as.character(x$content[, "text"]), x$content[, "doc_id"]) } else stop("unsupported source type") dmeta <- if (inherits(x, "DataframeSource")) x$content[, is.na(match(names(x$content), c("doc_id", "text"))), drop = FALSE] else data.frame(row.names = seq_along(x)) s <- list(content = content, meta = CorpusMeta(language = control$language), dmeta = dmeta) class(s) <- c("SimpleCorpus", "Corpus") s } VCorpus <- function(x, readerControl = list(reader = reader(x), language = "en")) { stopifnot(inherits(x, "Source")) readerControl <- prepareReader(readerControl, reader(x)) x <- open(x) tdl <- vector("list", length(x)) ## Check for parallel element access found <- FALSE for(cl in class(x)) { if (is.function(getS3method("pGetElem", cl, TRUE))) { tdl <- mapply(function(elem, id) readerControl$reader(elem, readerControl$language, id), pGetElem(x), id = as.character(seq_along(x)), SIMPLIFY = FALSE) found <- TRUE break } } if(!found) { counter <- 1 while (!eoi(x)) { x <- stepNext(x) elem <- getElem(x) doc <- readerControl$reader(elem, readerControl$language, as.character(counter)) tdl[[counter]] <- doc counter <- counter + 1 } } x <- close(x) cmeta <- CorpusMeta() dmeta <- data.frame(row.names = seq_along(tdl)) ## Check if metadata retrieval is supported for(cl in class(x)) { if (is.function(getS3method("getMeta", cl, TRUE))) { m <- getMeta(x) if (!is.null(m$cmeta)) cmeta <- m$cmeta if (!is.null(m$dmeta)) dmeta <- m$dmeta break } } v <- as.VCorpus(tdl) v$meta <- cmeta v$dmeta <- dmeta v } `[.PCorpus` <- `[.SimpleCorpus` <- function(x, i) { if (!missing(i)) { x$content <- x$content[i] x$dmeta <- x$dmeta[i, , drop = FALSE] } x } `[.VCorpus` <- function(x, i) { if (!missing(i)) { x$content <- x$content[i] x$dmeta <- x$dmeta[i, , drop = FALSE] if (!is.null(x$lazy)) x$lazy$index <- x$lazy$index[i] } x } .map_name_index <- function(x, i) { if (is.character(i)) match(i, meta(x, "id", "local")) else i } `[[.PCorpus` <- function(x, i) { i <- .map_name_index(x, i) db <- filehash::dbInit(x$dbcontrol[["dbName"]], x$dbcontrol[["dbType"]]) filehash::dbFetch(db, x$content[[i]]) } `[[.SimpleCorpus` <- function(x, i) { i <- .map_name_index(x, i) n <- names(x$content) PlainTextDocument(x$content[[i]], id = if (is.null(n)) i else n[i], language = meta(x, "language")) } `[[.VCorpus` <- function(x, i) { i <- .map_name_index(x, i) if (!is.null(x$lazy)) .Call(`_tm_copyCorpus`, x, materialize(x, i)) x$content[[i]] } `[[<-.SimpleCorpus` <- function(x, i, value) { x$content[i] <- paste0(as.character(value), collapse = "\n") x } `[[<-.PCorpus` <- function(x, i, value) { i <- .map_name_index(x, i) db <- filehash::dbInit(x$dbcontrol[["dbName"]], x$dbcontrol[["dbType"]]) db[[x$content[[i]]]] <- value x } `[[<-.VCorpus` <- function(x, i, value) { i <- .map_name_index(x, i) # Mark new objects as inactive for lazy mapping if (!is.null(x$lazy)) x$lazy$index[i] <- FALSE x$content[[i]] <- value x } as.list.PCorpus <- as.list.VCorpus <- function(x, ...) setNames(content(x), as.character(lapply(content(x), meta, "id"))) as.list.SimpleCorpus <- function(x, ...) as.list(content(x)) as.VCorpus <- function(x) UseMethod("as.VCorpus") as.VCorpus.VCorpus <- identity as.VCorpus.list <- function(x) { v <- list(content = x, meta = CorpusMeta(), dmeta = data.frame(row.names = seq_along(x))) class(v) <- c("VCorpus", "Corpus") v } outer_union <- function(x, y, ...) { if (nrow(x) > 0L) x[, setdiff(names(y), names(x))] <- NA if (nrow(y) > 0L) y[, setdiff(names(x), names(y))] <- NA res <- rbind(x, y) if (ncol(res) == 0L) res <- data.frame(row.names = seq_len(nrow(x) + nrow(y))) res } c.VCorpus <- function(..., recursive = FALSE) { args <- list(...) x <- args[[1L]] if (length(args) == 1L) return(x) if (!all(unlist(lapply(args, inherits, class(x))))) stop("not all arguments are of the same corpus type") v <- list(content = do.call("c", lapply(args, content)), meta = CorpusMeta(meta = do.call("c", lapply(args, function(a) meta(a, type = "corpus")))), dmeta = Reduce(outer_union, lapply(args, meta))) class(v) <- c("VCorpus", "Corpus") v } content.VCorpus <- function(x) { if (!is.null(x$lazy)) .Call(`_tm_copyCorpus`, x, materialize(x)) x$content } content.SimpleCorpus <- function(x) x$content content.PCorpus <- function(x) { db <- filehash::dbInit(x$dbcontrol[["dbName"]], x$dbcontrol[["dbType"]]) filehash::dbMultiFetch(db, unlist(x$content)) } inspect <- function(x) UseMethod("inspect", x) inspect.PCorpus <- inspect.SimpleCorpus <- inspect.VCorpus <- function(x) { print(x) cat("\n") print(noquote(content(x))) invisible(x) } length.PCorpus <- length.SimpleCorpus <- length.VCorpus <- function(x) length(x$content) names.PCorpus <- names.SimpleCorpus <- names.VCorpus <- function(x) as.character(meta(x, "id", "local")) `names<-.PCorpus` <- `names<-.VCorpus` <- function(x, value) { meta(x, "id", "local") <- as.character(value) x } format.PCorpus <- format.SimpleCorpus <- format.VCorpus <- function(x, ...) { c(sprintf("<<%s>>", class(x)[1L]), sprintf("Metadata: corpus specific: %d, document level (indexed): %d", length(meta(x, type = "corpus")), ncol(meta(x, type = "indexed"))), sprintf("Content: documents: %d", length(x))) } writeCorpus <- function(x, path = ".", filenames = NULL) { filenames <- file.path(path, if (is.null(filenames)) sprintf("%s.txt", as.character(meta(x, "id", "local"))) else filenames) stopifnot(length(x) == length(filenames)) mapply(function(doc, f) writeLines(as.character(doc), f), x, filenames) invisible(x) } tm/R/filter.R0000644000175100001440000000067013667624327012544 0ustar hornikusers# Author: Ingo Feinerer # Filters tm_filter <- function(x, FUN, ...) UseMethod("tm_filter", x) tm_filter.PCorpus <- tm_filter.SimpleCorpus <- tm_filter.VCorpus <- function(x, FUN, ...) x[tm_index(x, FUN, ...)] tm_index <- function(x, FUN, ...) UseMethod("tm_index", x) tm_index.PCorpus <- tm_index.SimpleCorpus <- tm_index.VCorpus <- function(x, FUN, ...) unlist(tm_parLapply(content(x), function(y) isTRUE(FUN(y, ...)))) tm/R/utils.R0000644000175100001440000000330213206313007012365 0ustar hornikusers## Helper functions .print_via_format <- function(x, ...) { writeLines(format(x, ...)) invisible(x) } ## Efficient alternative to table() proposed by Kurt Hornik .table <- function(x) { u <- sort(unique(x)) if(!length(u)) return(integer()) v <- tabulate(match(x, u)) names(v) <- u v } .xml_content <- function(doc, spec) { switch(spec[[1]], node = xml_text(xml_find_all(doc, spec[[2]])), "function" = spec[[2]](doc), unevaluated = spec[[2]]) } IETF_Snowball_map <- list("danish" = c("da", "dan"), "dutch" = c("nl", "nld", "dut"), "english" = c("en", "eng"), "finnish" = c("fi", "fin"), "french" = c("fr", "fra", "fre"), "german" = c("de", "deu", "ger"), "hungarian" = c("hu", "hun"), "italian" = c("it", "ita"), "norwegian" = c("no", "nor"), "portuguese" = c("pt", "por"), "romanian" = c("ro", "ron", "rum"), "russian" = c("ru", "rus"), "spanish" = c("es", "esl", "spa"), "swedish" = c("sv", "swe"), ## Have stopwords but no SnowballC stemmer ... "catalan" = c("ca", "cat"), ## Have SnowballC stemmer but no stopwords ... "turkish" = c("tr", "tur") ) # Map IETF language tags to languages used by the Snowball stemmer project # http://en.wikipedia.org/wiki/IETF_language_tag map_IETF_Snowball <- local({ codes <- unlist(IETF_Snowball_map, use.names = FALSE) names <- rep.int(names(IETF_Snowball_map), lengths(IETF_Snowball_map)) function(code) { code <- as.character(code) if (identical(code, "") || identical(code, character(0)) || is.na(code)) return("porter") names[charmatch(gsub("-.*", "", code), codes)] } }) tm/R/pdftools.R0000644000175100001440000001046312776627444013115 0ustar hornikuserspdf_info_via_xpdf <- function(file, options = NULL) { outfile <- tempfile("pdfinfo") on.exit(unlink(outfile)) status <- system2("pdfinfo", c(options, shQuote(normalizePath(file))), stdout = outfile) ## Could check the status ... ## This does not work ... ## info <- as.list(read.dcf(outfile)[1L, ]) tags <- c("Title", "Subject", "Keywords", "Author", "Creator", "Producer", "CreationDate", "ModDate", "Tagged", "Form", "Pages", "Encrypted", "Page size", "File size", "Optimized", "PDF version") re <- sprintf("^(%s)", paste(sprintf("%-16s", sprintf("%s:", tags)), collapse = "|")) lines <- readLines(outfile, warn = FALSE) ind <- grepl(re, lines) tags <- sub(": *", "", substring(lines[ind], 1L, 16L)) info <- split(sub(re, "", lines), cumsum(ind)) names(info) <- tags fmt <- "%a %b %d %X %Y" if (!is.null(d <- info$CreationDate)) info$CreationDate <- strptime(d, fmt) if (!is.null(d <- info$ModDate)) info$ModDate <- strptime(d, fmt) if (!is.null(p <- info$Pages)) info$Pages <- as.integer(p) info } pdf_info_via_gs <- function(file) { file <- normalizePath(file) gs_cmd <- tools::find_gs_cmd() out <- system2(gs_cmd, c("-dNODISPLAY -q", sprintf("-sFile=%s", shQuote(file)), system.file("ghostscript", "pdf_info.ps", package = "tm")), stdout = TRUE) out <- out[cumsum(out == "") == 2L][-1L] val <- sub("^[^:]+:[[:space:]]*", "", out) names(val) <- sub(":.*", "", out) val <- as.list(val) if (!is.null(d <- val$CreationDate)) val$CreationDate <- PDF_Date_to_POSIXt(d) if (!is.null(d <- val$ModDate)) val$ModDate <- PDF_Date_to_POSIXt(d) val } PDF_Date_to_POSIXt <- function(s) { ## Strip optional 'D:' prefix. s <- sub("^D:", "", s) ## Strip apostrophes in offset spec. s <- gsub("'", "", s) if (nchar(s) <= 14L) { s <- sprintf("%s%s", s, substring(" 0101000000", nchar(s) + 1L, 14L)) strptime(s, "%Y%m%d%H%M%S") } else if (substring(s, 15L, 15L) == "Z") { strptime(substring(s, 1L, 14L), "%Y%m%d%H%M%S") } else { strptime(s, "%Y%m%d%H%M%S%z") } } pdf_text_via_gs <- function(file) { file <- normalizePath(file) gs_cmd <- tools::find_gs_cmd() tf <- tempfile("pdf") on.exit(unlink(tf)) ## The current mechanism is first converting PDF to Postscript using ## the ps2write device, and then extract text using the ps2ascii.ps ## program. This fails for some files (e.g., ## /data/rsync/PKGS/AlleleRetain/inst/doc/AlleleRetain_User_Guide.pdf ## which Ghostscript also fails to render. Note that rendering via ## gv works "fine": but this uses the pswrite device which produces ## bitmap (from which no text can be extracted, of course). ## Using the txtwrite device is simply too unstable: e.g., ## gs -dBATCH -dNOPAUSE -sDEVICE=txtwrite -dQUIET -sOutputFile=- \ ## /data/rsync/PKGS/AlleleRetain/inst/doc/AlleleRetain_User_Guide.pdf ## keeps segfaulting. ## An additional nuisance is that there seems no simple way to ## detect a ps2ascii.ps failure. ## Finally, note that we currently use -DSIMPLE: without this, more ## information would be made available, but require post-processing. ## Step 1. Convert PDF to Postscript. res <- system2(gs_cmd, c("-q -dNOPAUSE -dBATCH -P- -dSAFER -sDEVICE=ps2write", sprintf("-sOutputFile=%s", tf), "-c save pop -f", shQuote(file))) ## Step 2. Extract text. txt <- system2(gs_cmd, c("-q -dNODISPLAY -P- -dSAFER -dDELAYBIND -dWRITESYSTEMDICT -dSIMPLE", "-c save -f ps2ascii.ps", tf, "-c quit"), stdout = TRUE) ## Argh. How can we catch errors? ## The return values are always 0 ... if (any(grepl("Error handled by opdfread.ps", txt))) { stop(paste(c("Ghostscript failed, with output:", txt), collapse = "\n")) } strsplit(paste(txt, collapse = "\n"), "\f")[[1L]] } tm/R/score.R0000644000175100001440000000143013023472115012343 0ustar hornikuserstm_term_score <- function(x, terms, FUN) UseMethod("tm_term_score", x) tm_term_score.term_frequency <- function(x, terms, FUN = function(x) sum(x, na.rm = TRUE)) FUN(x[match(terms, names(x), nomatch = 0L)]) tm_term_score.PlainTextDocument <- function(x, terms, FUN = function(x) sum(x, na.rm = TRUE)) tm_term_score(termFreq(x, control = list(tolower = FALSE, removePunctuation = TRUE, wordLengths = c(1, Inf))), terms, FUN) tm_term_score.TermDocumentMatrix <- function(x, terms, FUN = col_sums) FUN(x[match(terms, Terms(x), nomatch = 0L), ]) tm_term_score.DocumentTermMatrix <- function(x, terms, FUN = row_sums) FUN(x[, match(terms, Terms(x), nomatch = 0L)]) tm/R/meta.R0000644000175100001440000001067113110235234012161 0ustar hornikusers# Author: Ingo Feinerer TextDocumentMeta <- function(author, datetimestamp, description, heading, id, language, origin, ..., meta = NULL) { if (is.null(meta)) meta <- list(author = author, datetimestamp = datetimestamp, description = description, heading = heading, id = id, language = language, origin = origin, ...) stopifnot(is.list(meta)) if (!is.null(meta$author) && !inherits(meta$author, "person")) meta$author <- as.character(meta$author) if (!is.null(meta$datetimestamp) && !inherits(meta$datetimestamp, "POSIXt")) meta$datetimestamp <- as.character(meta$datetimestamp) if (!is.null(meta$description)) meta$description <- as.character(meta$description) if (!is.null(meta$heading)) meta$heading <- as.character(meta$heading) if (!is.null(meta$id)) meta$id <- as.character(meta$id) if (!is.null(meta$language)) meta$language <- as.character(meta$language) if (!is.null(meta$origin)) meta$origin <- as.character(meta$origin) class(meta) <- "TextDocumentMeta" meta } print.TextDocumentMeta <- function(x, ...) { cat(sprintf(" %s: %s", format(names(x), justify = "left"), sapply(x, as.character)), sep = "\n") invisible(x) } CorpusMeta <- function(..., meta = NULL) { if (is.null(meta)) meta <- list(...) stopifnot(is.list(meta)) class(meta) <- "CorpusMeta" meta } meta.SimpleCorpus <- function(x, tag = NULL, type = c("indexed", "corpus"), ...) { if (identical(tag, "id")) { n <- names(content(x)) return(if (is.null(n)) as.character(seq_along(x)) else n) } if (!is.null(tag) && missing(type)) type <- if (tag %in% names(x$meta)) "corpus" else "indexed" type <- match.arg(type) if (identical(type, "indexed")) if (is.null(tag)) x$dmeta else x$dmeta[tag] else if (identical(type, "corpus")) if (is.null(tag)) x$meta else x$meta[[tag]] else stop("invalid type") } meta.VCorpus <- meta.PCorpus <- function(x, tag = NULL, type = c("indexed", "corpus", "local"), ...) { if (!is.null(tag) && missing(type)) { type <- if (tag %in% names(x$dmeta)) "indexed" else if (tag %in% names(x$meta)) "corpus" else "local" } type <- match.arg(type) if (identical(type, "indexed")) if (is.null(tag)) x$dmeta else x$dmeta[tag] else if (identical(type, "corpus")) if (is.null(tag)) x$meta else x$meta[[tag]] else if (identical(type, "local")) lapply(x, meta, tag) else stop("invalid type") } `meta<-.SimpleCorpus` <- function(x, tag, type = c("indexed", "corpus"), ..., value) { type <- match.arg(type) if (identical(type, "indexed")) x$dmeta[, tag] <- value else if (type == "corpus") x$meta[[tag]] <- value else stop("invalid type") x } `meta<-.VCorpus` <- `meta<-.PCorpus` <- function(x, tag, type = c("indexed", "corpus", "local"), ..., value) { type <- match.arg(type) if (identical(type, "indexed")) x$dmeta[, tag] <- value else if (type == "corpus") x$meta[[tag]] <- value else if (identical(type, "local")) { for (i in seq_along(x)) meta(x[[i]], tag) <- value[i] } else stop("invalid type") x } # Simple Dublin Core to tm metadata mapping # http://en.wikipedia.org/wiki/Dublin_core#Simple_Dublin_Core Dublin_Core_tm_map <- list("contributor" = "contributor", "coverage" = "coverage", "creator" = "author", "date" = "datetimestamp", "description" = "description", "format" = "format", "identifier" = "id", "language" = "language", "publisher" = "publisher", "relation" = "relation", "rights" = "rights", "source" = "source", # or better "origin"? "subject" = "subject", "title" = "heading", "type" = "type" ) DublinCore <- function(x, tag = NULL) { tmm <- unlist(Dublin_Core_tm_map, use.names = FALSE) dcm <- names(Dublin_Core_tm_map) if (is.null(tag)) { m <- lapply(tmm, function(t) meta(x, t)) names(m) <- dcm class(m) <- "TextDocumentMeta" m } else meta(x, tmm[charmatch(tolower(tag), dcm)]) } `DublinCore<-` <- function(x, tag, value) { tmm <- unlist(Dublin_Core_tm_map, use.names = FALSE) dcm <- names(Dublin_Core_tm_map) meta(x, tmm[charmatch(tolower(tag), dcm)]) <- value x } tm/R/weight.R0000644000175100001440000001211512776627444012546 0ustar hornikusers# Author: Ingo Feinerer WeightFunction <- function(x, name, acronym) { class(x) <- c("WeightFunction", "function") attr(x, "name") <- name attr(x, "acronym") <- acronym x } # Actual TermDocumentMatrix weighting functions weightTf <- WeightFunction(function(m) { attr(m, "weighting") <- c("term frequency", "tf") m }, "term frequency", "tf") weightTfIdf <- WeightFunction(function(m, normalize = TRUE) { isDTM <- inherits(m, "DocumentTermMatrix") if (isDTM) m <- t(m) if (normalize) { cs <- col_sums(m) if (any(cs == 0)) warning("empty document(s): ", paste(Docs(m)[cs == 0], collapse = " ")) names(cs) <- seq_len(nDocs(m)) m$v <- m$v / cs[m$j] } rs <- row_sums(m > 0) if (any(rs == 0)) warning("unreferenced term(s): ", paste(Terms(m)[rs == 0], collapse = " ")) lnrs <- log2(nDocs(m) / rs) lnrs[!is.finite(lnrs)] <- 0 m <- m * lnrs attr(m, "weighting") <- c(sprintf("%s%s", "term frequency - inverse document frequency", if (normalize) " (normalized)" else ""), "tf-idf") if (isDTM) t(m) else m }, "term frequency - inverse document frequency", "tf-idf") weightSMART <- WeightFunction(function(m, spec = "nnn", control = list()) { stopifnot(inherits(m, c("DocumentTermMatrix", "TermDocumentMatrix")), is.character(spec), nchar(spec) == 3L, is.list(control)) term_frequency <- match.arg(substr(spec, 1L, 1L), c("n", "l", "a", "b", "L")) document_frequency <- match.arg(substr(spec, 2L, 2L), c("n", "t", "p")) normalization <- match.arg(substr(spec, 3L, 3L), c("n", "c", "u", "b")) isDTM <- inherits(m, "DocumentTermMatrix") if (isDTM) m <- t(m) if (normalization == "b") { ## Need to compute the character lengths of the documents ## before starting the weighting. charlengths <- tapply(nchar(Terms(m))[m$i] * m$v, m$j, sum) } ## Term frequency m$v <- switch(term_frequency, ## natural n = m$v, ## logarithm l = 1 + log2(m$v), ## augmented a = { s <- tapply(m$v, m$j, max) 0.5 + (0.5 * m$v) / s[as.character(m$j)] }, ## boolean b = as.numeric(m$v > 0), ## log ave L = { s <- tapply(m$v, m$j, mean) ((1 + log2(m$v)) / (1 + log2(s[as.character(m$j)]))) }) ## Document frequency rs <- row_sums(m > 0) if (any(rs == 0)) warning("unreferenced term(s): ", paste(Terms(m)[rs == 0], collapse = " ")) df <- switch(document_frequency, ## natural n = 1, ## idf t = log2(nDocs(m) / rs), ## prob idf p = max(0, log2((nDocs(m) - rs) / rs))) df[!is.finite(df)] <- 0 ## Normalization cs <- col_sums(m) if (any(cs == 0)) warning("empty document(s): ", paste(Docs(m)[cs == 0], collapse = " ")) norm <- switch(normalization, ## none n = rep.int(1, nDocs(m)), ## cosine c = sqrt(col_sums(m ^ 2)), ## pivoted unique u = { if (is.null(pivot <- control$pivot)) stop("invalid control argument pivot") if (is.null(slope <- control$slope)) stop("invalid control argument slope") (slope * sqrt(col_sums(m ^ 2)) + (1 - slope) * pivot) }, ## byte size b = { if (is.null(alpha <- control$alpha)) stop("invalid control argument alpha") norm <- double(nDocs(m)) norm[match(names(charlengths), seq_along(norm))] <- charlengths ^ alpha norm }) m <- m * df m$v <- m$v / norm[m$j] attr(m, "weighting") <- c(paste("SMART", spec), "SMART") if (isDTM) t(m) else m }, "SMART", "SMART") weightBin <- WeightFunction(function(m) { m$v <- rep_len(1L, length(m$v)) attr(m, "weighting") <- c("binary", "bin") m }, "binary", "bin") tm/R/reader.R0000644000175100001440000001630513177046106012510 0ustar hornikusers## Author: Ingo Feinerer ## Readers FunctionGenerator <- function(x) { class(x) <- c("FunctionGenerator", "function") x } getReaders <- function() c("readDataframe", "readDOC", "readPDF", "readPlain", "readRCV1", "readRCV1asPlain", "readReut21578XML", "readReut21578XMLasPlain", "readTagged", "readXML") prepareReader <- function(readerControl, reader = NULL, ...) { if (is.null(readerControl$reader)) readerControl$reader <- reader if (inherits(readerControl$reader, "FunctionGenerator")) readerControl$reader <- readerControl$reader(...) if (is.null(readerControl$language)) readerControl$language <- "en" readerControl } processURI <- function(uri) { uri <- as.character(uri) if (identical(substr(uri, 1, 7), "file://")) uri <- substr(uri, 8, nchar(uri)) uri } readDataframe <- function(elem, language, id) { PlainTextDocument(elem$content[, "text"], id = elem$content[, "doc_id"], language = language) } # readDOC needs antiword installed to be able to extract the text readDOC <- function(engine = c("antiword", "executable"), AntiwordOptions = "") { stopifnot(is.character(engine), is.character(AntiwordOptions)) engine <- match.arg(engine) antiword <- switch(engine, antiword = antiword::antiword, executable = function(x) system2("antiword", c(AntiwordOptions, shQuote(normalizePath(x))), stdout = TRUE)) if (!is.function(antiword)) stop("invalid function for DOC extraction") function(elem, language, id) { uri <- processURI(elem$uri) content <- antiword(uri) PlainTextDocument(content, id = basename(elem$uri), language = language) } } class(readDOC) <- c("FunctionGenerator", "function") readPDF <- function(engine = c("pdftools", "xpdf", "Rpoppler", "ghostscript", "Rcampdf", "custom"), control = list(info = NULL, text = NULL)) { stopifnot(is.character(engine), is.list(control)) engine <- match.arg(engine) pdf_info <- switch(engine, pdftools = function(x) { i <- pdftools::pdf_info(x) c(i$keys, list(CreationDate = i$created)) }, xpdf = function(x) pdf_info_via_xpdf(x, control$info), Rpoppler = Rpoppler::PDF_info, ghostscript = pdf_info_via_gs, Rcampdf = Rcampdf::pdf_info, custom = control$info) pdf_text <- switch(engine, pdftools = pdftools::pdf_text, xpdf = function(x) system2("pdftotext", c(control$text, shQuote(x), "-"), stdout = TRUE), Rpoppler = Rpoppler::PDF_text, ghostscript = pdf_text_via_gs, Rcampdf = Rcampdf::pdf_text, custom = control$text) if (!is.function(pdf_info) || !is.function(pdf_text)) stop("invalid function for PDF extraction") function(elem, language, id) { uri <- processURI(elem$uri) meta <- pdf_info(uri) content <- pdf_text(uri) PlainTextDocument(content, meta$Author, meta$CreationDate, meta$Subject, meta$Title, basename(elem$uri), language, meta$Creator) } } class(readPDF) <- c("FunctionGenerator", "function") readPlain <- function(elem, language, id) { if (!is.null(elem$uri)) id <- basename(elem$uri) PlainTextDocument(elem$content, id = id, language = language) } readXML <- function(spec, doc) { stopifnot(is.list(spec), inherits(doc, "TextDocument")) function(elem, language, id) { content <- elem$content node <- if(inherits(content, "xml_node")) content else if(is.character(content)) read_xml(paste(elem$content, collapse = "\n")) else read_xml(content) content(doc) <- if ("content" %in% names(spec)) .xml_content(node, spec[["content"]]) else node for (n in setdiff(names(spec), "content")) meta(doc, n) <- .xml_content(node, spec[[n]]) if (!is.null(elem$uri)) id <- basename(elem$uri) if (!length(meta(doc, "id"))) meta(doc, "id") <- as.character(id) if (!length(meta(doc, "language"))) meta(doc, "language") <- as.character(language) doc } } class(readXML) <- c("FunctionGenerator", "function") RCV1Spec <- list(author = list("unevaluated", ""), datetimestamp = list("function", function(node) as.POSIXlt(xml_text(xml_find_all(node, "@date")), tz = "GMT")), description = list("unevaluated", ""), heading = list("node", "title"), id = list("node", "@itemid"), origin = list("unevaluated", "Reuters Corpus Volume 1"), publisher = list("node", "metadata/dc[@element='dc.publisher']/@value"), topics = list("node", "metadata/codes[@class='bip:topics:1.0']/code/@code"), industries = list("node", "metadata/codes[@class='bip:industries:1.0']/code/@code"), countries = list("node", "metadata/codes[@class='bip:countries:1.0']/code/@code")) readRCV1 <- readXML(spec = RCV1Spec, doc = XMLTextDocument()) readRCV1asPlain <- readXML(spec = c(RCV1Spec, list(content = list("node", "text"))), doc = PlainTextDocument()) Reut21578XMLSpec <- list(author = list("node", "TEXT/AUTHOR"), datetimestamp = list("function", function(node) strptime(xml_text(xml_find_all(node, "DATE")), format = "%d-%B-%Y %H:%M:%S", tz = "GMT")), description = list("unevaluated", ""), heading = list("node", "TEXT/TITLE"), id = list("node", "@NEWID"), topics = list("node", "@TOPICS"), lewissplit = list("node", "@LEWISSPLIT"), cgisplit = list("node", "@CGISPLIT"), oldid = list("node", "@OLDID"), origin = list("unevaluated", "Reuters-21578 XML"), topics_cat = list("node", "TOPICS/D"), places = list("node", "PLACES/D"), people = list("node", "PEOPLE/D"), orgs = list("node", "ORGS/D"), exchanges = list("node", "EXCHANGES/D")) readReut21578XML <- readXML(spec = Reut21578XMLSpec, doc = XMLTextDocument()) readReut21578XMLasPlain <- readXML(spec = c(Reut21578XMLSpec, list(content = list("node", "TEXT/BODY"))), doc = PlainTextDocument()) readTagged <- function(...) { args <- list(...) function(elem, language, id) { if (!is.null(elem$content)) { con <- textConnection(elem$content) on.exit(close(con)) } else con <- elem$uri if (!is.null(elem$uri)) id <- basename(elem$uri) a <- c(list(con = con, meta = list(id = id, language = language)), args) do.call(TaggedTextDocument, a) } } class(readTagged) <- c("FunctionGenerator", "function") tm/R/foreign.R0000644000175100001440000000313513023471774012677 0ustar hornikusers## Readers and writers (eventually?) for foreign document-term matrix ## format files. ## CLUTO: as we do not know the weighting, there is no high-level DTM ## reader. If the weighting is weightTf, one can do ## as.DocumentTermMatrix(read_stm_CLUTO(file), weightTf) ## as CLUTO always has rows as documents and cols as terms. ## MC: a simple reader for now, could certainly use more effort to name ## the weightings more properly. read_dtm_MC <- function(file, scalingtype = NULL) { m <- read_stm_MC(file, scalingtype) s <- attr(m, "scalingtype") as.DocumentTermMatrix(m, rep.int(s, 2L)) } ## ## To write a decent writer we would need to be able to turn weighting ## information into MC scaling information, which may not even be ## possible. Alternatively, we could always use 'txx', or use this in ## case we cannot map ... ## ## Data files for the Blei et al LDA and CTM codes are in a List of List ## format, with lines ## n j1: x1 j2: x2 ... jn: xn ## (see http://www.cs.princeton.edu/~blei/lda-c/). ## As they are used for topic models, they *always* contain raw term ## frequencies. read_dtm_Blei_et_al <- function(file, vocab = NULL) { x <- scan(file, character(), quiet = TRUE) ind <- grepl(":", x, fixed = TRUE) counts <- x[!ind] i <- rep.int(seq_along(counts), counts) x <- strsplit(x[ind], ":", fixed = TRUE) j <- as.integer(unlist(lapply(x, `[`, 1L))) + 1L x <- as.numeric(unlist(lapply(x, `[`, 2L))) m <- simple_triplet_matrix(i, j, x) if (!is.null(vocab)) colnames(m) <- readLines(vocab) as.DocumentTermMatrix(m, weightTf) } tm/R/hpc.R0000644000175100001440000000075414716601054012020 0ustar hornikuserstm_parLapply_engine <- local({ val <- NULL ## Could do some checking on new if given: should inherit from ## "cluster" or have formals (X, FUN, ...). function(new) { if (missing(new)) val else val <<- new } }) tm_parLapply <- function(X, FUN, ...) { engine <- tm_parLapply_engine() if (inherits(engine, "cluster")) parLapply(engine, X, FUN, ...) else if (is.function(engine)) engine(X, FUN, ...) else lapply(X, FUN, ...) } tm/R/stopwords.R0000644000175100001440000000103213034740255013277 0ustar hornikusersstopwords <- { function(kind = "en") { kind <- as.character(kind) resolved <- map_IETF_Snowball(kind) base <- if (is.na(resolved)) kind else if (identical(resolved, "porter")) "english" else resolved s <- system.file("stopwords", paste0(base, ".dat"), package = "tm") if (identical(s, "")) stop(paste("no stopwords available for '", base, "'", sep = "")) readLines(s, encoding = "UTF-8") } } tm/R/tokenizer.R0000644000175100001440000000202613307435131013246 0ustar hornikusersgetTokenizers <- function() c("Boost_tokenizer", "MC_tokenizer", "scan_tokenizer") ## Boost_tokenizer <- Token_Tokenizer(function(x) { y <- Boost_Tokenizer(as.character(x)) Encoding(y) <- "UTF-8" y }) ## MC_tokenizer <- Token_Tokenizer(function(x) { x <- as.character(x) if(!length(x)) return(character()) ASCII_letters <- "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" id <- sprintf("[%s]+", ASCII_letters) http <- sprintf("(https?://%s(\\.%s)*)", id, id) email <- sprintf("(%s@%s(\\.%s)*)", id, id, id) http_or_email <- sprintf("%s|%s", http, email) y <- c(unlist(regmatches(x, gregexpr(http_or_email, x)), FALSE, FALSE), unlist(strsplit(gsub(http_or_email, "", x), sprintf("[^%s]", ASCII_letters)), FALSE, FALSE)) y[nzchar(y)] }) scan_tokenizer <- Token_Tokenizer(function(x) { .Call(`_tm_scan`, as.character(x), 0L) }) tm/vignettes/0000755000175100001440000000000014755301616012730 5ustar hornikuserstm/vignettes/extensions.Rnw0000644000175100001440000002727113177024075015626 0ustar hornikusers\documentclass[a4paper]{article} \usepackage[margin=2cm]{geometry} \usepackage[round]{natbib} \usepackage{url} \newcommand{\acronym}[1]{\textsc{#1}} \newcommand{\pkg}[1]{{\normalfont\fontseries{b}\selectfont #1}} \newcommand{\proglang}[1]{\textsf{#1}} \let\code\texttt %% \VignetteIndexEntry{Extensions} \begin{document} <>= library("tm") library("xml2") @ \title{Extensions\\How to Handle Custom File Formats} \author{Ingo Feinerer} \maketitle \section*{Introduction} The possibility to handle custom file formats is a substantial feature in any modern text mining infrastructure. \pkg{tm} has been designed aware of this aspect from the beginning on, and has modular components which allow for extensions. A general explanation of \pkg{tm}'s extension mechanism is described by~\citet[Sec.~3.3]{Feinerer_etal_2008}, with an updated description as follows. \section*{Sources} A source abstracts input locations and provides uniform methods for access. Each source must provide implementations for following interface functions: \begin{description} \item[close()] closes the source and returns it, \item[eoi()] returns \code{TRUE} if the end of input of the source is reached, \item[getElem()] fetches the element at the current position, \item[length()] gives the number of elements, \item[open()] opens the source and returns it, \item[reader()] returns a default reader for processing elements, \item[pGetElem()] (optional) retrieves all elements in parallel at once, and \item[stepNext()] increases the position in the source to the next element. \end{description} Retrieved elements must be encapsulated in a list with the named components \code{content} holding the document and \code{uri} pointing to the origin of the document (e.g., a file path or a \acronym{URL}; \code{NULL} if not applicable or unavailable). Custom sources are required to inherit from the virtual base class \code{Source} and typically do so by extending the functionality provided by the simple reference implementation \code{SimpleSource}. E.g., a simple source which accepts an \proglang{R} vector as input could be defined as <>= VecSource <- function(x) SimpleSource(length = length(x), content = as.character(x), class = "VecSource") @ which overrides a few defaults (see \code{?SimpleSource} for defaults) and stores the vector in the \code{content} component. The functions \code{close()}, \code{eoi()}, \code{open()}, and \code{stepNext()} have reasonable default methods already for the \code{SimpleSource} class: the identity function for \code{open()} and \code{close()}, incrementing a position counter for \code{stepNext()}, and comparing the current position with the number of available elements as claimed by \code{length()} for \code{eoi()}, respectively. So we only need custom methods for element access: <>= getElem.VecSource <- function(x) list(content = x$content[x$position], uri = NULL) pGetElem.VecSource <- function(x) lapply(x$content, function(y) list(content = y, uri = NULL)) @ \section*{Readers} Readers are functions for extracting textual content and metadata out of elements delivered by a source and for constructing a text document. Each reader must accept following arguments in its signature: \begin{description} \item[elem] a list with the named components \code{content} and \code{uri} (as delivered by a source via \code{getElem()} or \code{pGetElem()}), \item[language] a string giving the language, and \item[id] a character giving a unique identifier for the created text document. \end{description} The element \code{elem} is typically provided by a source whereas the language and the identifier are normally provided by a corpus constructor (for the case that \code{elem\$content} does not give information on these two essential items). In case a reader expects configuration arguments we can use a function generator. A function generator is indicated by inheriting from class \code{FunctionGenerator} and \code{function}. It allows us to process additional arguments, store them in an environment, return a reader function with the well-defined signature described above, and still be able to access the additional arguments via lexical scoping. All corpus constructors in package \pkg{tm} check the reader function for being a function generator and if so apply it to yield the reader with the expected signature. E.g., the reader function \code{readPlain()} is defined as <>= readPlain <- function(elem, language, id) PlainTextDocument(elem$content, id = id, language = language) @ For examples on readers using the function generator please have a look at \code{?readPDF} or \code{?readPDF}. However, for many cases, it is not necessary to define each detailed aspect of how to extend \pkg{tm}. Typical examples are \acronym{XML} files which are very common but can be rather easily handled via standard conforming \acronym{XML} parsers. The aim of the remainder in this document is to give an overview on how simpler, more user-friendly, forms of extension mechanisms can be applied in \pkg{tm}. \section*{Custom Data Formats} A general situation is that you have gathered together some information into a tabular data structure (like a data frame or a list matrix) that suffices to describe documents in a corpus. However, you do not have a distinct file format because you extracted the information out of various resources, e.g., as delivered by \code{readtext()} in package \pkg{readtext}. Now you want to use your information to build a corpus which is recognized by \pkg{tm}. We assume that your information is put together in a data frame. E.g., consider the following example: <>= df <- data.frame(doc_id = c("doc 1" , "doc 2" , "doc 3" ), text = c("content 1", "content 2", "content 3"), title = c("title 1" , "title 2" , "title 3" ), authors = c("author 1" , "author 2" , "author 3" ), topics = c("topic 1" , "topic 2" , "topic 3" ), stringsAsFactors = FALSE) @ We want to map the data frame rows to the relevant entries of a text document. An entry \code{text} in the mapping will be matched to fill the actual content of the text document, \code{doc\_id} will be used as document ID, all other fields will be used as metadata tags. So we can construct a corpus out of the data frame: <<>>= (corpus <- Corpus(DataframeSource(df))) corpus[[1]] meta(corpus[[1]]) @ \section*{Custom XML Sources} Many modern file formats already come in \acronym{XML} format which allows to extract information with any \acronym{XML} conforming parser, e.g., as implemented in \proglang{R} by the \pkg{xml2} package. Now assume we have some custom \acronym{XML} format which we want to access with \pkg{tm}. Then a viable way is to create a custom \acronym{XML} source which can be configured with only a few commands. E.g., have a look at the following example: <>= custom.xml <- system.file("texts", "custom.xml", package = "tm") print(readLines(custom.xml), quote = FALSE) @ As you see there is a top-level tag stating that there is a corpus, and several document tags below. In fact, this structure is very common in \acronym{XML} files found in text mining applications (e.g., both the Reuters-21578 and the Reuters Corpus Volume 1 data sets follow this general scheme). In \pkg{tm} we expect a source to deliver self-contained blocks of information to a reader function, each block containing all information necessary such that the reader can construct a (subclass of a) \code{TextDocument} from it. The \code{XMLSource()} function can now be used to construct a custom \acronym{XML} source. It has three arguments: \begin{description} \item[x] a character giving a uniform resource identifier, \item[parser] a function accepting an \acronym{XML} document (as delivered by \code{read\_xml()} in package \pkg{xml2}) as input and returning a \acronym{XML} elements/nodes (each element/node will then be delivered to the reader as a self-contained block), \item[reader] a reader function capable of turning \acronym{XML} elements/nodes as returned by the parser into a subclass of \code{TextDocument}. \end{description} E.g., a custom source which can cope with our custom \acronym{XML} format could be: <>= mySource <- function(x) XMLSource(x, parser = xml2::xml_children, reader = myXMLReader) @ As you notice in this example we also provide a custom reader function (\code{myXMLReader}). See the next section for details. \section*{Custom XML Readers} As we saw in the previous section we often need a custom reader function to extract information out of \acronym{XML} chunks (typically as delivered by some source). Fortunately, \pkg{tm} provides an easy way to define custom \acronym{XML} reader functions. All you need to do is to provide a so-called \emph{specification}. Let us start with an example which defines a reader function for the file format from the previous section: <>= myXMLReader <- readXML( spec = list(author = list("node", "writer"), content = list("node", "description"), datetimestamp = list("function", function(x) as.POSIXlt(Sys.time(), tz = "GMT")), description = list("node", "@short"), heading = list("node", "caption"), id = list("function", function(x) tempfile()), origin = list("unevaluated", "My private bibliography"), type = list("node", "type")), doc = PlainTextDocument()) @ Formally, \code{readXML()} is the relevant function which constructs an reader. The customization is done via the first argument \code{spec}, the second provides an empty instance of the document which should be returned (augmented with the extracted information out of the \acronym{XML} chunks). The specification must consist of a named list of lists each containing two character vectors. The constructed reader will map each list entry to the content or a metadatum of the text document as specified by the named list entry. Valid names include \code{content} to access the document's content, and character strings which are mapped to metadata entries. Each list entry must consist of two character vectors: the first describes the type of the second argument, and the second is the specification entry. Valid combinations are: \begin{description} \item[\code{type = "node", spec = "XPathExpression"}] the XPath (1.0) expression \code{spec} extracts information out of an \acronym{XML} node (as seen for \code{author}, \code{content}, \code{description}, \code{heading}, and \code{type} in our example specification). \item[\code{type = "function", spec = function(doc) \ldots}] The function \code{spec} is called, passing over the \acronym{XML} document (as delivered by \code{read\_xml()} from package \pkg{xml2}) as first argument (as seen for \code{datetimestamp} and \code{id}). As you notice in our example nobody forces us to actually use the passed over document, instead we can do anything we want (e.g., create a unique character vector via \code{tempfile()} to have a unique identification string). \item[\code{type = "unevaluated", spec = "String"}] the character vector \code{spec} is returned without modification (e.g., \code{origin} in our specification). \end{description} Now that we have all we need to cope with our custom file format, we can apply the source and reader function at any place in \pkg{tm} where a source or reader is expected, respectively. E.g., <<>>= corpus <- VCorpus(mySource(custom.xml)) @ constructs a corpus out of the information in our \acronym{XML} file: <<>>= corpus[[1]] meta(corpus[[1]]) @ \bibliographystyle{abbrvnat} \bibliography{references} \end{document} tm/vignettes/tm.Rnw0000644000175100001440000003354414656640247014057 0ustar hornikusers\documentclass[a4paper]{article} \usepackage[margin=2cm]{geometry} \usepackage[utf8]{inputenc} \usepackage[round]{natbib} \usepackage{url} \newcommand{\acronym}[1]{\textsc{#1}} \newcommand{\class}[1]{\mbox{\textsf{#1}}} \newcommand{\code}[1]{\mbox{\texttt{#1}}} \newcommand{\pkg}[1]{{\normalfont\fontseries{b}\selectfont #1}} \newcommand{\proglang}[1]{\textsf{#1}} %% \VignetteIndexEntry{Introduction to the tm Package} %% \VignetteDepends{SnowballC} \begin{document} <>= library("tm") data("crude") @ \title{Introduction to the \pkg{tm} Package\\Text Mining in \proglang{R}} \author{Ingo Feinerer} \maketitle \section*{Introduction} This vignette gives a short introduction to text mining in \proglang{R} utilizing the text mining framework provided by the \pkg{tm} package. We present methods for data import, corpus handling, preprocessing, metadata management, and creation of term-document matrices. Our focus is on the main aspects of getting started with text mining in \proglang{R}---an in-depth description of the text mining infrastructure offered by \pkg{tm} was published in the \emph{Journal of Statistical Software}~\citep{Feinerer_etal_2008}. An introductory article on text mining in \proglang{R} was published in \emph{R News}~\citep{Rnews:Feinerer:2008}. \section*{Data Import} The main structure for managing documents in \pkg{tm} is a so-called \class{Corpus}, representing a collection of text documents. A corpus is an abstract concept, and there can exist several implementations in parallel. The default implementation is the so-called \class{VCorpus} (short for \emph{Volatile Corpus}) which realizes a semantics as known from most \proglang{R} objects: corpora are \proglang{R} objects held fully in memory. We denote this as volatile since once the \proglang{R} object is destroyed, the whole corpus is gone. Such a volatile corpus can be created via the constructor \code{VCorpus(x, readerControl)}. Another implementation is the \class{PCorpus} which implements a \emph{Permanent Corpus} semantics, i.e., the documents are physically stored outside of \proglang{R} (e.g., in a database), corresponding \proglang{R} objects are basically only pointers to external structures, and changes to the underlying corpus are reflected to all \proglang{R} objects associated with it. Compared to the volatile corpus the corpus encapsulated by a permanent corpus object is not destroyed if the corresponding \proglang{R} object is released. Within the corpus constructor, \code{x} must be a \class{Source} object which abstracts the input location. \pkg{tm} provides a set of predefined sources, e.g., \class{DirSource}, \class{VectorSource}, or \class{DataframeSource}, which handle a directory, a vector interpreting each component as document, or data frame like structures (like \acronym{CSV} files), respectively. Except \class{DirSource}, which is designed solely for directories on a file system, and \class{VectorSource}, which only accepts (character) vectors, most other implemented sources can take connections as input (a character string is interpreted as file path). \code{getSources()} lists available sources, and users can create their own sources. The second argument \code{readerControl} of the corpus constructor has to be a list with the named components \code{reader} and \code{language}. The first component \code{reader} constructs a text document from elements delivered by a source. The \pkg{tm} package ships with several readers (e.g., \code{readPlain()}, \code{readPDF()}, \code{readDOC()}, \ldots). See \code{getReaders()} for an up-to-date list of available readers. Each source has a default reader which can be overridden. E.g., for \code{DirSource} the default just reads in the input files and interprets their content as text. Finally, the second component \code{language} sets the texts' language (preferably using \acronym{ISO} 639-2 codes). In case of a permanent corpus, a third argument \code{dbControl} has to be a list with the named components \code{dbName} giving the filename holding the sourced out objects (i.e., the database), and \code{dbType} holding a valid database type as supported by package \pkg{filehash}. Activated database support reduces the memory demand, however, access gets slower since each operation is limited by the hard disk's read and write capabilities. So e.g., plain text files in the directory \code{txt} containing Latin (\code{lat}) texts by the Roman poet \emph{Ovid} can be read in with following code: <>= txt <- system.file("texts", "txt", package = "tm") (ovid <- VCorpus(DirSource(txt, encoding = "UTF-8"), readerControl = list(language = "lat"))) @ For simple examples \code{VectorSource} is quite useful, as it can create a corpus from character vectors, e.g.: <>= docs <- c("This is a text.", "This another one.") VCorpus(VectorSource(docs)) @ Finally we create a corpus for some Reuters documents as example for later use: <>= reut21578 <- system.file("texts", "crude", package = "tm") reuters <- VCorpus(DirSource(reut21578, mode = "binary"), readerControl = list(reader = readReut21578XMLasPlain)) @ \section*{Data Export} For the case you have created a corpus via manipulating other objects in \proglang{R}, thus do not have the texts already stored on a hard disk, and want to save the text documents to disk, you can simply use \code{writeCorpus()} <>= writeCorpus(ovid) @ which writes a character representation of the documents in a corpus to multiple files on disk. \section*{Inspecting Corpora} Custom \code{print()} methods are available which hide the raw amount of information (consider a corpus could consist of several thousand documents, like a database). \code{print()} gives a concise overview whereas more details are displayed with \code{inspect()}. <<>>= inspect(ovid[1:2]) @ Individual documents can be accessed via \code{[[}, either via the position in the corpus, or via their identifier. <>= meta(ovid[[2]], "id") identical(ovid[[2]], ovid[["ovid_2.txt"]]) @ A character representation of a document is available via \code{as.character()} which is also used when inspecting a document: <>= inspect(ovid[[2]]) lapply(ovid[1:2], as.character) @ \section*{Transformations} Once we have a corpus we typically want to modify the documents in it, e.g., stemming, stopword removal, et cetera. In \pkg{tm}, all this functionality is subsumed into the concept of a \emph{transformation}. Transformations are done via the \code{tm\_map()} function which applies (maps) a function to all elements of the corpus. Basically, all transformations work on single text documents and \code{tm\_map()} just applies them to all documents in a corpus. \subsection*{Eliminating Extra Whitespace} Extra whitespace is eliminated by: <<>>= reuters <- tm_map(reuters, stripWhitespace) @ \subsection*{Convert to Lower Case} Conversion to lower case by: <<>>= reuters <- tm_map(reuters, content_transformer(tolower)) @ We can use arbitrary character processing functions as transformations as long as the function returns a text document. In this case we use \code{content\_transformer()} which provides a convenience wrapper to access and set the content of a document. Consequently most text manipulation functions from base \proglang{R} can directly be used with this wrapper. This works for \code{tolower()} as used here but also e.g.\ for \code{gsub()} which comes quite handy for a broad range of text manipulation tasks. \subsection*{Remove Stopwords} Removal of stopwords by: <>= reuters <- tm_map(reuters, removeWords, stopwords("english")) @ \subsection*{Stemming} Stemming is done by: <>= tm_map(reuters, stemDocument) @ \section*{Filters} Often it is of special interest to filter out documents satisfying given properties. For this purpose the function \code{tm\_filter} is designed. It is possible to write custom filter functions which get applied to each document in the corpus. Alternatively, we can create indices based on selections and subset the corpus with them. E.g., the following statement filters out those documents having an \code{ID} equal to \code{"237"} and the string \code{"INDONESIA SEEN AT CROSSROADS OVER ECONOMIC CHANGE"} as their heading. <<>>= idx <- meta(reuters, "id") == '237' & meta(reuters, "heading") == 'INDONESIA SEEN AT CROSSROADS OVER ECONOMIC CHANGE' reuters[idx] @ \section*{Metadata Management} Metadata is used to annotate text documents or whole corpora with additional information. The easiest way to accomplish this with \pkg{tm} is to use the \code{meta()} function. A text document has a few predefined attributes like \code{author} but can be extended with an arbitrary number of additional user-defined metadata tags. These additional metadata tags are individually attached to a single text document. From a corpus perspective these metadata attachments are locally stored together with each individual text document. Alternatively to \code{meta()} the function \code{DublinCore()} provides a full mapping between Simple Dublin Core metadata and \pkg{tm} metadata structures and can be similarly used to get and set metadata information for text documents, e.g.: <>= DublinCore(crude[[1]], "Creator") <- "Ano Nymous" meta(crude[[1]]) @ For corpora the story is a bit more sophisticated. Corpora in \pkg{tm} have two types of metadata: one is the metadata on the corpus level (\code{corpus}), the other is the metadata related to the individual documents (\code{indexed}) in form of a data frame. The latter is often done for performance reasons (hence the named \code{indexed} for indexing) or because the metadata has an own entity but still relates directly to individual text documents, e.g., a classification result; the classifications directly relate to the documents but the set of classification levels forms an own entity. Both cases can be handled with \code{meta()}: <<>>= meta(crude, tag = "test", type = "corpus") <- "test meta" meta(crude, type = "corpus") meta(crude, "foo") <- letters[1:20] meta(crude) @ \section*{Standard Operators and Functions} Many standard operators and functions (\code{[}, \code{[<-}, \code{[[}, \code{[[<-}, \code{c()}, \code{lapply()}) are available for corpora with semantics similar to standard \proglang{R} routines. E.g., \code{c()} concatenates two (or more) corpora. Applied to several text documents it returns a corpus. The metadata is automatically updated, if corpora are concatenated (i.e., merged). \section*{Creating Term-Document Matrices} A common approach in text mining is to create a term-document matrix from a corpus. In the \pkg{tm} package the classes \class{TermDocumentMatrix} and \class{DocumentTermMatrix} (depending on whether you want terms as rows and documents as columns, or vice versa) employ sparse matrices for corpora. Inspecting a term-document matrix displays a sample, whereas \code{as.matrix()} yields the full matrix in dense format (which can be very memory consuming for large matrices). <<>>= dtm <- DocumentTermMatrix(reuters) inspect(dtm) @ \section*{Operations on Term-Document Matrices} Besides the fact that on this matrix a huge amount of \proglang{R} functions (like clustering, classifications, etc.) can be applied, this package brings some shortcuts. Imagine we want to find those terms that occur at least five times, then we can use the \code{findFreqTerms()} function: <<>>= findFreqTerms(dtm, 5) @ Or we want to find associations (i.e., terms which correlate) with at least $0.8$ correlation for the term \code{opec}, then we use \code{findAssocs()}: <<>>= findAssocs(dtm, "opec", 0.8) @ Term-document matrices tend to get very big already for normal sized data sets. Therefore we provide a method to remove \emph{sparse} terms, i.e., terms occurring only in very few documents. Normally, this reduces the matrix dramatically without losing significant relations inherent to the matrix: <<>>= inspect(removeSparseTerms(dtm, 0.4)) @ This function call removes those terms which have at least a 40 percentage of sparse (i.e., terms occurring 0 times in a document) elements. \section*{Dictionary} A dictionary is a (multi-)set of strings. It is often used to denote relevant terms in text mining. We represent a dictionary with a character vector which may be passed to the \code{DocumentTermMatrix()} constructor as a control argument. Then the created matrix is tabulated against the dictionary, i.e., only terms from the dictionary appear in the matrix. This allows to restrict the dimension of the matrix a priori and to focus on specific terms for distinct text mining contexts, e.g., <<>>= inspect(DocumentTermMatrix(reuters, list(dictionary = c("prices", "crude", "oil")))) @ \section*{Performance} Often you do not need all the generality, modularity and full range of features offered by \pkg{tm} as this sometimes comes at the price of performance. \class{SimpleCorpus} provides a corpus which is optimized for the most common usage scenario: importing plain texts from files in a directory or directly from a vector in \proglang{R}, preprocessing and transforming the texts, and finally exporting them to a term-document matrix. The aim is to boost performance and minimize memory pressure. It loads all documents into memory, and is designed for medium-sized to large data sets. However, it operates only under the following contraints: \begin{itemize} \item only \code{DirSource} and \code{VectorSource} are supported, \item no custom readers, i.e., each document is read in and stored as plain text (as a string, i.e., a character vector of length one), \item transformations applied via \code{tm\_map} must be able to process strings and return strings, \item no lazy transformations in \code{tm\_map}, \item no meta data for individual documents (i.e., no \code{"local"} in \code{meta()}). \end{itemize} \bibliographystyle{abbrvnat} \bibliography{references} \end{document} tm/vignettes/references.bib0000644000175100001440000000131311704521032015510 0ustar hornikusers@Article{Feinerer_etal_2008, author = {Ingo Feinerer and Kurt Hornik and David Meyer}, title = {Text Mining Infrastructure in {R}}, journal = {Journal of Statistical Software}, volume = 25, number = 5, pages = {1--54}, month = {March}, year = 2008, issn = {1548-7660}, coden = {JSSOBK}, url = {http://www.jstatsoft.org/v25/i05} } @Article{Rnews:Feinerer:2008, author = {Ingo Feinerer}, title = {An Introduction to Text Mining in {R}}, journal = {R News}, year = 2008, volume = 8, number = 2, pages = {19--22}, month = oct, url = {http://CRAN.R-project.org/doc/Rnews/}, pdf = {http://CRAN.R-project.org/doc/Rnews/Rnews_2008-2.pdf} } tm/data/0000755000175100001440000000000012315572766011637 5ustar hornikuserstm/data/crude.rda0000644000175100001440000002701214755301617013426 0ustar hornikusers‹í}Ýr#G–¦%ÍŒjçggwìXá Fì¶!‚lþô¬B6›”H‚C€jõe¢*AÔ°PÕÑ諹ö…¯á+?ïü~‚µÏwNfUD·8c{–=KFH ‚@VæÉ“ç÷;'/¿Ûö¾óÆ“ÆÇ?i<ùˆ^Ò¿Æ7>¥?ñÓ"0ÆGK¿Ð_Ÿàß_ÛßñÁ¿®>üä—‡¡ž&q =MÿFu“t¦2*ŸèÜ3ã±ñóðÖ¨< ôB…¹šè@ùEN/3å'qžj?W³4ôM¦ÆIªøù* #5ZxíÖî– ¢4SZtšš¨å)úNŒJMPÐØI¬Fi_g<â,ÉrÈx<Ü+“åjhÞèÌ;‰s“NMêóQí½ÖÖÊèMš¶¡iÍt¼àeÈã6ð<³zª¬h®35Õ4ã0æïFáõ$WÉXuÑ´<¬d–&ø–[¦&Ši57ú¦¶Ú©NoLÞÜ ¿øÉð²Yrc²y2Õqm2Žæa&¤ÕÐéñš<ûª5hñ 2Rh2ÙŽ‰¦X%~S™„nM7ÃäÖ¤vt;Ÿ'ŠÖšy~˜Ó’dæÕœ3š×¥)ˆ¸–M~iÙ¤a_ýlóŸNîççÌF˯U{ý›ÚëÕ^¿ª½þ¸öú`yÌežŽõÔdvRŸÚ7?ÊŒï^NÃØ¾üx’©{=¥E—IÊ,Œ.?2¯>òñ¢zýI˜Y~çh5Ý4žØ7rÑœ|åö×ó¯w¾™¿Mb³|ø½<Ò?ÿ´òvcå÷ÏO:gýóC58îœ]ö»ß(¼ó¹ê^ ª{yuØS—'ÝÞ`uøööþÊ[OL¼òίdß³Ííöîþúîìtu”×wþdxÙ99_yógüæÉùËÍAo¸ò·w÷ö·VÇ-2]qÙê¿ Þ“ÿmÿ¥#­.òIâvîç›<$¾Èõtfßü«Àd~Îp¾ÝþLŒˆé ÂÀ¾úi¤ãëB_7|’†×%ý8Of¡ŸÙß¼ÈÌÃ,›E¡ÛçŸú×aý÷O’((‡þñ,Òt ËßL2‹Üc>NÒk÷—OÍB³ocɹ%;*ý5 ¾ü0ñ‹©‰ó3“ká—%ò”Œy@+™Ÿ?¾4¬ûè¯."Æõ±Ë]¬¿ÇÏZ§7¼¯û½.I…±”öId“@ž“Ó;ô†QÚËü ÉÚˆþôuAb-3Y&b—d°‚”~‘/Â#ÑåéëÔÌA…ccD4ƒo5j®cú3ÑQ.¿ áOŸÎ¢00‰RÚ…M~ÆA‘åé‚D·Ž}}E9LIbò1,©ŸÕ¬$bÃ25Á’éW£³…Gÿ2%ˆ/ R-èµÓ%6´nû4hzmbÁÄð¥²$%Sä^Îj)Efš‘æ`½{y©×ô½0nª LI“„'µÐÕSR•ÁµQ½˜þ¼ð.MF²ÌŸ¨N–%>ô"­·Û»ìÈÊ:n±PXKdÈHJbaVÓ—³àåxcp/ôñ'v _ÍŠÙ,Z@MÍ“4 VÔȈ¯>ÍÊ‘èëq’“R½ëÞòžÛsÎUax†SšSW˜ƒÈ:YçDÿ…#Ï…."u%yrÃ[ëÙUó0Ÿ¨Ž’ôêEšÐªÈ08‰}™Þ±©+ó~ŠML£[McfS¥G´-²§˜±§§G!óöíÆ˜Y]‹8 /³N"ÅÊùšl%p(^{30=œ‡>6ÔÞm¨iDFÝ,PßI®éäçb‡ßLG&­1 QÑVØ)±%4*Ø>Ù)µH åëü*{¤ƒž8~¾q6£ð:ž„d»B*„:*ÍÐ#:Ò”5Ø7&ñŸ²™™ˆá%Üñû$]±¾æ:µ°æNIýÝÚ*iÓŠ8 oˆ4x|@'9^äìà”¨Ib›Èšf‰)€•Ærr‚Žû ²._:­eÉ\=¬“úekdX>É„€ÄXdÓÐ>•i3=+gkriBÆý{mø¼×ÎÌ¡~ÜXÌØÏËÂ}à (ŸŸ…oðGÃŽÐܘ›ÌËh—;“<)òeDä¥ÀFVO/ãR—ÑGuxšÓ5yx´ÁÂÒSK¢õiÎ’þÜÌÕë$½©¶çEø{"ôYRd™¾Õ鿥žÐ¾†æX§ôN ®b‚iF„ ’X­ˆ•÷âÛ0MbÞ‘‹$ I°u ³Õ2Å-§€Lhˆ¤‘!6JÃŒOí$åSƒÏÔø}¡ÓœE+† 3Vù,™èLgð`EuBÆ+VŽÝŽÃêÉÂÚŸaŸ‹ž&ï‘øO$k…x}¡(Z=¢øQ’‰iÒÞ/E/ k휻,!Ej<Â3XŸp M¾…µB%c™…h ›Òh¡Ezî@Zr8£«Z™È¯Üeý æ$ó"3›@Äòɾ Íü].§³|óâµöÕáÓÎÑÑéÉà¤ßtcÞqJÿ¡á~œ¹Ñøÿæ”–v÷ŸÝ½cÆ¿ÏõüxÕ•¸§·ùoûYçµ:î|ÛSþ:ëõ†ø÷èäòÌzšjSuÎ;§¯ûNç³g‡ât¬ú¿kœÎòã )ÏÆ£Úø Ð}ˆ ŸGë@‹L'c9J`㊙·›TlJ“âš‘¤^Sî=“aBÒ5°Ÿ´çb‰+ÑÏ*tŸ|ØÌ¸È¥9è÷tM Ó‘q=ŠTשÈëPòIüÅ€üÙXÓäH²ÒÖÍ›èÛÛ­í=¯œž‹uŽ\u‰ lcË~!,׈)ñLÉÔ<2#¯ýüÞ1¿FõãDáß¼ãõ£xý!ñúù°÷]§ÛW]’Ÿ‡õåð»îWê´ÿªwYÆòúƒ!I´»‚õy»ñ@ëþΪŒÿ±ÏØX–­µåéÖ§ÿ™Ü(M›×ìónRÊUNš˜À9÷^¹ªé5K8¶ö+ DÖŸGâ«–©¹·tus['ZGI>á‘Ç9"µ”#‚µZåÔ€d†ÍÕ²R$KñTþJC&ýÄÍ"„e)ÕJ´Ä°#ìòH^]*WÚà]2¹.¿&ÁÞÞ¾·<þÿ«á~œìüWGyÜøÓäñYç²3<‹Þð²Ú»:S—½Ã+¹?$Œ¡»¿sC·öï£0~ÐÂøÉgÇI‘‘e¨ú$7‡)"6UÐ"Œ33ô¿8÷f¥¬61×Àá†à‰L.Ù2, ©,/‚…÷’Ü™"OÀ²<Çà$VÇE^¤ÿ@Ôßš¸@.?M¦$ãr‰íÝÒ€©ÉÈyGh)÷$!BT"å*3?)bÐôÙ8W·:*ÊÀjýûdt»ïÛxFÀ?‘ø¶›¥¤óó: ö‰§˜ãdfáé[Fz‰Jðu6Á\ò4àU0q˜#î9›<5Sr ßÎ"J¦sR=°¢­`‘Î"µQê(ú¦$ ž=S3ûã y­9A¤çö#ï$•Çz. §3ðtÙ<Qh??Øý#Òõ°'mºþoª¢qUqÜ¿ ISôONÕ—Çý¯HQ z—ß’¦ ¯_«nÿìâ´7쮎¿Ý~8¦ûîªÖzTWUxÿó›b®Ã|#c]qÆ$öÆ ‘Ÿ@´!ófÅ(B13»Y¤œ$1JdŠE,.¥¬9“TË[“0 ÍÜ¢ÐGfLO`Q̹!›&öVs“ƒ‰ o&ª…ä)l~3ÑQ8Öx9Ð#=aûY=šíh!¦Äב€_àc¿£eUgµßªóZêÎÆ¬‘cÌ8³¨³r[°°¬@šÈ´mæÎFÒÓ„ŽüüY1aÁy‡b“4&=ãù³ƒæÖÖ– Õ<™ßg£Yð9'%3œ™h¼NÉØg9_fK£pŠaí¤Û;›±ä€%!œé*K0„ûaUl™ÇEFnŽ|‹ôs-_*+Ïe²ä¹XP1Ã&‚\.I KÔÊ7!°n˜‘Ë4xüwdµ89óÒÄæV¯Kãr¦3ŸŽˆ’$0ÈÍ`#;©©Þ4Lž £ÙeRol‚L±/Ù”ÜFN]a®ô¢ç:Hˆ‘•CúHX[ ÏÛãmiÙ§6%±æn.mc»µ]&ß\n’È’S¸ƒôŠÊ Û‚Ò·¿³%b3«j> 1GÙ³+œH³žƒC¾ÇºxÙ„4¸Å‚6¹3|od|]Ð/ëGi–¾tˆHsC²jöL{Uæ²v†&Ú¦mž—m¼rø`e’ž¥¯å„g­­ÊœŒés’—S®S;$q&‘JN8rLg_—D¶CتIY0 ÏT ]@6>ô‹(ÇŸ„W%¯dBZÆËŸ¢9›:Ž›´PddÈr®‹óÔœhª(rìË{FG¥«½kÉT Y\¤Él&cIÂÑò „[’eç1ax‰°‰¦ £ÄHã")!‡/ïnÊ‘d$ŠzíçåÇ–¡¾K?KHàFHÙÒD64"#tðIñò ãŲ`ó*†u_ÈìŽØ<¶Ì‘!×(ÓZ¡ª•ð/ìNXÙŽŒç5'8YþÖ¿Ñdé·×ÚÛ·^M™°g)A|xfÓ¦c?†"½Á(C>H)g™Òa*jç‘ìœTÓiN`C9ñl€˜–Ù:ÎbÝ'îÕ’¼'>'©@«kg;4•–Ìc¯îüÀ©›ƒô(MtÐ\A˜ƒ15A³*-ƒ Æ$­ò”žƒþldÞ¼›D*Ö¥Êä9B\³ƒD#ëËh ߯c5”}Xj¾/ÀˆÐ¦›dUã( dÛX.3¶ªÞ&G¸ ¹f8ë_ JuJÅ$CDé$#λ%=$ò›OÃ+‘ð7X>0£|sT¤dÐ\E~-cH¿ ºX3ËÂëØÓ’aZáœr ÙhòVÌÖ”áàñÉ"ñO®ôMfñ|vh•H;n0ކ[IGYëe)Ðh,8ŠÜtËè$„ j–Ì?ÏI@Šø @Yêb0?:¿cièxšlªWÆ4ý '÷lÖ‘B»Jžm (8²¹­ †pÀ}v#1„yaQœŒ/†‡ˆ7åÛÞÕ°wùƒŽíˆûqNè¿©½þ¨±Þ™}ÒXïØÖÇùÇÆ¿4Çvó›«W“¡t^Ôy_]œvÎꨩzg½Ë—½óîka¯açô›;qÐí½ÆÃpnv¶ŸíÈõ㽎xFDGcÙÍ}„|®ï;!Or<Ô°ö gÇôšm@Nªàæü4É2è?[Cv&Ä…,s]DÉG’(gyåZM2]H’KrzSr2x 6ÑÃæÖO°ÞP"*°R) bYí€2Ö5æ Ø4R—¤”ŠXy¨,ÛΤÊ–¥<3è®BHý(QB–•¤È³0.c¶9Îö\.I;š¯Ʒıx¼u:BÊì’˜¬æ’`V¡”€_©{Š„™É™ G“T%tVÀÃpî[³²2˜@ÀQNÈÙ|¤ ™ Ð2 ÜAöÅ.·…a.»$±d@²yæ/0àNÛÅTf–×üRhŸ;'[%ûw‹qt5(È,ÌšRmg–™*oé³ìCé˜o+@vg-ÏX곓üówqg'9Ø |w óÜ+9˜:IqjàmaïÃ<ⱪ]´ÆßTpÀžØTåBYgËfq¨DËmß`ø*›EJV·°ÀlÙd—¸ƒ¹°Dß¹†áEgK–úÍŽ¶lªÉ֨љÅð³“8…)_£²³¤lˆ#¸ éd R$òg™8`K·HÜJ¼¤ßXÂlH Ÿ% ’ÝC²{fM1WÀ„ø¶yC„ׇkFÀxl× ìù ¸&kffÇ¡Çú–´:fg Lß‘—‹qíDÄ]¢/Ÿ,"—#“ÍgR/À‡dn"ài˘×+½ÐñÍãDosHˆÜÓÜÌ2W¸˜2t¨äʦg±Þ6,²€uÊ6«FâÂYØv hgXýÀ{ó^x4¸‰”¤HØË‹•l#÷6Lsä—H%µ.È7d¯13ù²$« (? Ⱥdd1K…3½p9»*lËda`#¾A6xï›U*a2~4±±¤|0¿(CC.%‘ãQ/?­DÁÚ}"N˜çˆ(Ò Ä†ä·Ÿìy\rØ[Éý¼5iÒ,ã^ަHSq}#ƒn‚åü(Ì[–á\¨8ã© P؇ y«š†ÁfûùÞVfãBvúäwšUfkÚÊÏô#:‡Íˆ ù.bjrf…¸ÀäK‘ê ÙKÀ*O{d,…ÉÉKIÜÚy1Ĥ! œÓ¹ŸqÅÛ˜ÍÃqî ~ì¢gù·¤»­“!ú%Lò}%PèD¯åá³$¬½ö2.Ùw&ä]Ob‰Ç´dÇœcs¬‹I{’º …à„–ÊË94â·qàJ…qñ#ĨfÊ©,ѸŠVÆ!ÉR(é2%ë¢JNº¼,Ù‰‘öI¼YSÑ!œýy7JÆJ÷‚¶%ÑÄB¯i/g‘K Âë…·N­g´Uˆ#Ò™‰<„JÈX`ù@k£©Øª%fSÁ ŠBw§ˆAðÎ+ίô‚Yæ;ޤ•©[‘H,d„Š%7¹ìf¹»)•S°½ç’«>¨©"úµÔ¬H¿È꘨±³Sè°!Ær“§CÔÔüü­­*Q]‹«ÑÚF:‚ªB°e¦¢H9\›J¬HJ¹È‹¶Qĉ‰f+óô sW•5Mð^ÁR²L9‰ðö É> ^xwÆ×}ÿS6Åaa4ýàÆáô_ºB2ÿ:a‰ýCpêP’0æt°@Àua$¥ãö4sB’öû"$¹+±éå¤K/\:#"}¢EVs´=¤°¨aÒ8ž&£%Ë«’SÒ²Ö’J5v*9šHƸo30Ðá^Ö뺭.[WìÚgYs¹RqB‰:œB:Ò%Â8KðedtQf} Q—Á^N‡Ûœ |£Z¯–S×ïK©—Ùô*÷ë|ì;¹ßkéÐ*¾Ac)Q!ÿ yµÊBé)Y:öí1œÃÉë"Pf~ª"Ù0—‚=gÙtÍ,É7¹ c³ÉÉ#NÞÊÎ۪ƽÍöÏšjwsç F>y‚àì}FŸ9Ï22ùÜÅÈžDma¹/vc†y‘Ç ö}Ħ––XØÁ*4-Pd›»_4Õþ·Oó¡ö6÷¿8àö7ù}z—#dˆÌoÕ°Æ-מÁûÃíC>$Å2wZûÏžï~ñüÀRÏ“,—ZZ- ÖþîÖÖ[;Kë»g8ûîÇ©Þ[{ýIãQ7î­Îÿ~й:¸ŒaÕä:“KÅž3 ±ßÚYÀx1ZÍNo,1’ó{.Ì!A! y$ÉpNW_’jí ÑPÈC ዃÒHÄ”òÁÃ…1ü@^wUýâ°t¿àuFAmvÞ†oiú±ª!׆Ô¤—háš[e>/huÛíý¸˜ÀŒ¶8 ’ÃmA¿ d~í-=Üja¾pX­½ÛÚóÖíŽÃ`TkàéÖ¦j6+‹&,ly{¤ývW¦ÛTA2«”à×Îë×din^ÅAnhàçZåÚ+¶©Íb¬ÃHìË´Õˆ¹}q°ï+W·)3²‰»ƒÕdË{×.p’B’ë-ÇhÕ½§3ÅQDøÙÃI´&i}[¸¥‡Ø,45ØLdôDcÉðKçs&EVgH´pˆ€ð€)^ú{{×CxÊA3І.Û5J‘m¤ÓZôŸéåÓQ*cÓÙ;b0娤 3S_E‹è9( B9ô-‘•ƒcåEyšK^aéd‚÷Ò[ìá’Ë\í¹M.YEÌö~Hor8Bc›Ì\C±V€‘êÕ2O³2Tì Jz2ðÅÂèyMŒYÛníïí­žI7Ïn’‘V ÒÆ27N®Ìxd-q³ˆÚfŒŠ0ÂßèÄpw&DûÒ[ÉpÑžFùý\ ߦ3Ȥ3Ž>!üŽÜG»xgSâ®@‰ä 0±@ß²±UàÕ¢u²äó„8î4xÜ-=Õ.¤¾ :¦Œ‚Èÿй ‡ÇÓ~—} ¸5²rjl»äØã&O´K/!R$È0Bî‰Rƒ›Zð\‰¨¢­t Ö³ÒoÕF¿H­pµ§1<ýR'ó$›†§|F“üÜ"ur:»:‡0ÀÍáKmë«§èr ÂÐBr ž€Ï@|àEÐCÆTZؕ͊pLrn÷•s8}pY€x¼a UFu*UI‰SU´ßÐïV2”2S„êH£O àB´pÁ"¹öG/ßÓËøªá~œGð¯k¯Úxô2÷ö2žþ®3ì\ª«óo{'§õâêðeoÈ ™£“A—<+aVGß~öpà2;«e)Ÿ0,¶±ìQÔþ}ô"¶ñ7 ì°c/•„¨c²¦ôTë·&õÊÞUb²ÞÐFÉô©ÃqºÌÚÿ©í,¦}?a˜‚ðɪ³!AœœQ癩u†âdH~–¹^p+±ŽÔ› .:5Pwµ.%®åbqJÊ‹Qâacdœå9^IÂë ¤NµÌfÈ€&Q—'y­VKud‚©µm̸ý#ÃâZW— Œo“.pž–ý›”‚öà‡)m!‘ É òôšµ’"­Ð± ,QŸÉò"íè¶²–9äd*bwòò³8aèDLêhJ‡*ýs×c-c•Tf®¯#é¬VmšçíØPŒ æ3dågŸæ¸E ›4=Y¾¤õD$5%`/ÆÒ†ÜËe¦+ÖÁháí·¶wÙ¹@­}ÙkÙT°U7XBjÈœaÍP¢ÉB‰›¿ÛŒºêß6N"L ‰Qv °°"Jƒz©+dzîö:‹Šé m ÌXr†²úöJ—jX¼’Ï''ŽùÒ“âϹÞv\쩎ª]·ÏaxŒuDè9¿Gy,ÖÛ{µŠZaý»U8eeÕÒÎ’õçÁ¡…ùàÉQàjˆ ü>ט̲IEå>›{šÛ鰅劲” c›O #ìY¼5Ì­<ôo–Ÿ^:þ_kâ:_7eŒ ÀuF4@¡N‹›©Ž›Ü®1£Õ;¯é|¯ˆÃfÍ_gÀ¼—põ·žƒf—b±]¯$Ô¯çø¿åþ˜ŒN¶Aº–Um{ eaØWIf&­L׺ŒDba.C#Ã.ß# €IÀE1oh-ªÈÙª´LRö§ÌVëd$cñtµZðýEyB+¯*)¬ivCª»…âB¸Ž+Ô«Ê ½÷—ª»e†õ,ÏŸ¥.¯·/µRèÔ¸ÕvÈ,Ö+½p8qþ7Ëoß?õpÔp?΀ÿ÷µ×õn"NÁ6Ï“ÔCç²óâ¤CpBÀ‰‡nÿììdxÖ;çNz"¼:ÝU“œœƒUáŸÑ9ØY%Ø'!Ê¡'llnÆÐ@«C?&&>$—â¿‹}Pëã$FZ J5&›Øãò=¥‘g—F¾;­Ý÷XklôìÔzš†Ü©Ðé¢Y˃ÔÒù5A̪ÛaÞKäÝ% Á! ‘jiüuñT/¦š$°„sª8;?æžÂK³n·ž—¡ZŸÇOضUÄÕ<ë&·X²€l1p´àÀ¢'Å‚e嚃H¢Sî©ë./-3’^ÆL=·Òõ•Î'€DKi'MáÈŒRžº:ÚayöíjöÜPe¤£o @m-ëÎ%ˆè°sЀз\{/Ã%Nr/Ó‘ë3Ži'‹úV¢°öø±xÒˆ!ƒ0ãRìRÈW¨¸`®ž;ôqÈøHQQí¢ûuÇÄFùceÍþ³V{g§ÜWëyµÅ1l™… ¦€}ixc æª@ŸmEoÉ-fG€ @_Jç¶î3wq%úAÙ*lÅû¸ÓŽ?Ÿ'›c@h‡¢œfU9¯'eb²ŽŒõ‰³"bÚÂã´È&Ö¨s–Lâ¡n3ŒäÊÚq[Y²IÞ£ÌíÔ=Í‚…ì4b¨>ô%†6ð’ƒÄ%€†÷Ï^Yv>Ä¥dQSfÉìôðM0HlýúÚÔOÞÇÁü­ÍMÕ{ó†6¯6úò»þùWMu–Œh‡ä³þ zǶÒ<‰}ïËáw_±éNÌmZ~³{üíWÍF_Q _oBºT¾W“‹Ø—b¡ê”“™û­Ýíun¨­¾³»µD¿ÜVÉ®úlUŽOȳîê ‹/ÿ±ž"·ò®ªRqH«ê˜q…i¶§&ójÈ$8ºz&{2n1~È=ƒàc®ð–Ž$ˆh0<‰3Aµö$FÐPâ(Ì&Âbw¹uÁ×…µ*äÍ}\K ‰N°ÃÉ-¶]óCIsÕâ?ëëÍßq˜9²a!z(Û6ÑR2Ëb ʳLê‡ñ’_ÒMíR†­ˆtoBò xÞ¢n^ëxTX•8"÷Ì×ítjU¤[9S*{'u\¯*Wí­ª BÅòÍ*õO³GQ„ªëu©ª-^çÞ 1*9åa©ƒ™.)˜%Y º4z:w¡íjFœ½¿tükÊÿQ€«wy@õÏÙø—æµÄ:꽸¼ê\¾¶=ûWË«¡Â![†g§çêÅÅÝNYû;á}ÒÞ~¶ë25Ž`Ë.}X¡Ý½^«´xôt>$OçÉï¥D]½X3¨U6Å9Ì#¥/’à‹MHÆY1fïäF ™0Ð7 ²Ðl>zK qË&8¶êÖ©E‚A™¹¼Ç«Î™'•7fAïTÖAUEYb2ì¸V!9hèfÁ"[ÅOëw—Ð “‰†áËNR7)•ôºˆý0ò>{Ùµ·TTTrHÂÈ­CÕm a­ÙºÌýv™{ÒhŽ¢›Kо¹_óJÊ|vÕé}Þ¬‡âv@À³¨…š2í3°Z—IÄ«ÇÚ ê–]‹ì%¬ÿá¡G"ÂckSÛ6>ÚFÈ—°á•%pÏöŒû ÷³®Šç'GÍÓ¸ÏË«Ó#½ô{5|Í}ÏHÀ†è®în²8!ÅÓ9^£>Úyö¼ñP”Ï#RW4÷ˆÄY’.5¾X›×TS¤šš>´ÿÛ4ǯÿ\9þ{åÎK/”ïãâñ¼µIswû¯M™‡kÒæ isàH×%ÅÕÚ¤øÝÎm÷Ta{ ÷ãÔÍh<ª°ÆŸ¦ÂþˆôQ§Ûí_ÞñŸvvH «°Ýš­ST¥.{Ì ý…*¬~ :X ÜItͶPÊfˆ zåíp%àáìQ,Y“šê¸ÓøNýèÿã†Ä2óø=M‰=nJ¬Þ×”Ø6Ê\›è¿Ûž˜Àù¯êQ¬Þ×£X–Þ­®‡²1yh>nt-jCÑ{èN[Î|"Ã.‚•¼‰új4£–É× vÏwÞZ<ØRãdé–|•tø+[UÒÿµJÚ²½U6 \×PÝÕ¤‘NDo÷N«Bç&=zFЦà¤u"ä‹I‘€Šfktó%©\¬UwA"—C’qûš¦Mo’D<9i¢~F7ȬU<²ÕEÇ Õ°9N¸OHûÛT'þ6³8´ÝÑÜ߆4ä>ÀôØBõe>o|t6E¯ízëzw~Ü:ŸbÆ7[¿(›„3i\KýSšø7$êg\#Ù1:.uSð¸`Цžt ’ÑÞ×tîÌâîéÒ§,ðbâ¹F¾ãdB¾é¡µS ¸=NøWS´Õ+œ9Ή²ÖçËíÎÜÚÕìqƒë2D“Ã=ïÙ(A_­jžÍjìZ‡onÌÌj#72"”ÊJhÑWMé¸@×p¬OúqQmZ‰#)‰é2I蕹‡&bÄçýÕQ¯á~Öµª­WÝ<ª£RGŸ]Ÿœv{§ôoG]ô/‡ª{Úô® îœCª{Ù¯¿³÷@uPC»[«?ÞÁòáꢎ½£É¶Žt"Ô¦.œÜð¤Š*¹7Jäâsס_ƒX¸‹£*„rY ”o µ & BÊ[¢\Ÿ5ÆlÈm^d?k?÷žV7ó­X.›Ë%µ:"¢¹gêL®MÇõá!§xÈO·=ÕÄkÓÝßÝòjˆ¿¦ˆTi©+“Û•flÍ2_ä*'!b]×D©‡ˆÙ@y ùz4âu‚͵mÃ'Åœ)M§‚]tPa¯Nl´`mŽ àîy‘Ú¢±Ùøö-á¼_MÛ&(qʤ¥/=:•´ %6†Ùð”d&QTãOâðû²âœÛ­š ôó*Ü&ªÔvÓjƒœíÝͲ$Ån³ø…¶aÑ‚ÇZ¬9ßG_ÖeLC`Ð+(sÛݲÙåe/±#׫j.äî7CÁ¸ÑÖˬZ¢ŠÏǬZUÕK15ccº³BU§II@Y¸Ô¡'Þ5›>úMlsì(¹váµ¶& ­ï†VwJêm;ï©nÝp?N5þºöú¯ê¶qÿÎðr±ÙËËþÕ…º"o Nλ—½4.órlî´Õg<;xÞx(:wo50ú¨s®Îýø¿<êÜGû¡é\‡2×úÌ¥ù<áξÛö™F[×Bb§¿µ÷v¹=“±Ë¶™Yõ'ƒm4:膭K«®}ßÌd$äF«Æ¼#“û--»}’Y³ ¯ã ä°•=b\t†¿›˜Ì¤™‰DýZ¿ŸM…TÏñ5éŠOsý ·J~Ûp?΂øwG«dU%ÿ9¬’Ý­‡“"Ý;Ø]øÑ*ùp­’ý«˜olC½ÈÓ Æˆ½Ä»›¸jKÅl>\‘V©+¾»ºª³‹Yí´[»[+ÊÊÕ\käQ‰¿¶ÈF§šÊŒÇÒêåjÛvΓùðDtm` JF)®­Nª¼)›Ipü )nÿ õ((œÁ („!,·v'U+‚Ó¤q™ŒVƒ9в¬úÚv”¨fÂ_kíìòo%r·ÿU1òó¤¼Ac.—ËÍØ¼º§ëˆŠizzç‰×r¢Dé¦k¿TÎ=ÅúÝkÈ~^{ý³Æ£XoÜ¿tàê¼ÆI_^uO¿RWç'CÕ½\ ‰tzÊDüÅåIw\öpûÛ«‰ÆG¹ÞøpåúO>å^Þ¨zfR_#fTÏ.wÝØ;-!·d츞•£§‰o‚ZO¹¹’e\dÞˆµË@ÄÒcŧlI—½æRÍLÐ,ó2©¦ú±*'ÐTç¯ÏzßÉXè_(®¨ƒlHëË„«uùž]Žêz·ËSô’Åy«q;(A4–•¶…ÚÞ„…‘ùúàb”áVOÎÏÐ 3•®tYžæ9̨@1m¡*WA;²rŽtžGY¿»Y. ”`E£…|Yn¹K¸¥«åø¦Ú~¶ kÎÝö`‚88ÝòÒÅ#»MC{#þbqe#ƒÜ5¯Zº«3^•¾ª{4ì®ÞµÖ°SÚ¿y±P/z—çd”©óÎE§{üîk‹: ÷ãlÛ¨½þEãÑ^nÜÛ^n ˾:9=U½ï.:燪t´yÜ¿ºTdž’uª:d+Ÿªþù‹ö·ž5е|p°z{Ò{¬åò{ñbjÞ<šÎÜtþѤœv΀«ܸ,è(˜ÛÉ··Zî&W×ЀïþCP`»µ³­îæ9Ú;­ƒv=ÿ±ò彦z­ýp¾^*?C²§è—#îÄŽ$QYXzl¬ù2Tu­Ë4\rqÄ¢]5Å÷‹Qè{DÔ»•ì´öxön%noAŸo/^ñç]S:Sït߉K3óûÇ*þ®á~œœl7eïªä¹gºðóðäÜ&.û‡WÝáIÿ\ö_£ÆôëÎ97@/­>cëá€Ñž·WÞüTÛû°ø¤îÒ³~m?÷‹”¸gs‹~Ú­7ÓèλÛkß}¶öÝݵïî­}wí»kß}¾î]ÒëÞ]»ŠöÚUNX÷îÚµµ×®­½vmíµ«h¯]ÅöÚùn¯ï¶Ì·Ò w𚇬B‘U\VÿtÉö÷ê4§É¼…Î¥YC„ô“?ȧ~ý®ÇйխqJ_YÃ`½™íI ÞÃÙ?ùVV᎞ýM$î?ý½4»Þ´tm/data/acq.rda0000644000175100001440000004451614755301617013100 0ustar hornikusersBZh91AY&SYÏ?emÿÿÿÿÿþÿÿÿýÿÿÿð¿ÿÿò@ B$B!Aà^¾>¯œçÙî*H#Ûpú‘<ç»Ñ€RƒÖÙ•ZÉhPª ("UZt®º½àòÓl]zïbÖÝîÓÑè{oo{Î;»¼ç…"·qÑPI((¤f€YXGYkpîu¹e‚@†’!^šÞ¥Ê 62­`…@ Û¸0–ÖÒ IÙ…B¨x­ŠªR+d±š°M•„ªŠ)*‰·¼4Ñ4Ð “@A=¦M4Èž‰Š<¦D4õ#14#FAˆÁ©ä!CDjžF¨öTý)äƒM© dÏU=”&h™ ™10F†CL!¡€ŒFšaF&†„L&’" ¤b˜F„ÚM¥=Lž¦É†CÔ@ ’H*zM’MêšxIú SMPõ24cS£C! 44ÐÑêhÑ¡ 4=ABÐMÉ  dɉOÐÐMM¢y#@§¤Ð!R¦òk­k"j Ù^? ‘µ§¯Í®/•kŽ6^'u‹‡µÉ®gºåò9Ü®Vû;G?½Px0TK(1TLJ*劀I’\€Ü Ѩ}¿z"òDQÓ,²À U¹CVÊÙRÔ R*”ÄE €(P`p ÙA°!$ˆ@Ÿ¾ðÄdTEˆŠ "?MŸ¯úøà\ü>‡©çýy½‹«ƒ7àªÐZèíû-@þÎï¯åÇ/›ëõï¸üþüõôòÿ„×âáîcí§Áßëê·Qj}W{GÑÏ7ª2TŸ*¡@ênÕ Ïp8EMÏx~ˆ}0éÁ¨ÔR¢T¨œ ÁŸÑ÷:5mÈñ³¯Z×V·?°q°Ü2Ö”H@$Ùg Ë$BdF[Íø|›*!"ö†(9„X1d$ $d!È21‹ „‹LÙ©Dl;Áþ¶m´„Í¢€ˆb€"3‰)`@A„I!Ü…5P—;b§©·»5hI#œú= ªH É ªØ ÜØᨃd’ƒ+™3@eETd $‘A‘Ad$DUA‚ !ŒD~¡_.)˜ƒÖslÝ€!íy<ᄈ8PM¬ çc1ÕÆÂ´AÊá–¢BùÄcQ$Að¡Q!1&?¨A¡â ÜAëâ€ØAØAˆ(vª«PQ6AA ÅQ" "å‰$ˆˆ… )A BK$X )B’")XH²$HAQPb€ÉA©h w@D^qQÓ›éÀ4:£ I$@2("‚?ÔBÑGí?<×-Íü½;£ý þxsù/ChŸÏb›g?¤]:Üÿ(½J²\=ØíÝ5jbr"OÆûßÃ%‹Ÿ)Ã3Ž_ÁË®X>Uºì®tN«`ï^ÙJšH¨j¾"ÊÞßoޱ©±Zu¾\fÿÀËå?ÕPÊžª‰ã׿xÏP¥:*…z×Qõ¿^ÝF*’ªâ ý*Â#RÇä²"«ÒÀë?âÕ¥yãÍøÄÕltÌè1ëׯ~å²v‹Ê{dÞËçH96Ë¥Ú=w"3ÆìgZ2[á•zjȳ¯†š;-㛚ôg»mŸ=”Bu\+×ë›…ø5×<ŒòsŸýz?f cB0÷|öÊUžœ,q¹`Ûk›æh3&+H,@é4šÍ¥É´+žæƒÈ³Ï¡ÏÂà·^ëªç•~—F8elM(6¹(æË¦Nbã{MúzNô(0ñylÉCܱzíéb8ç“¶ÉÍ8¯=Þn‹r5Ęògm3sâ×ÓŒÈp¾À ’ ´°ˆƒh)4¶²À-‚‚"2XáüY2@PA‚(A€ª5HEAA«÷\`¤ƒ&ˆ:ðT»Aª ïSÑ-Üyþ×é¹ÛŒûe_Ëᡦ ­¦mõ?ÒÎÕìœ~­ÑÿOvÑ’E­óÜÈ»¦1XÄM´ž*Š9M‘ŠóXá+¦'&J •e\ŸÁ£­¾í†HÖ³jà¹aõ8s¼æ-ºKõdG‰Ñë<Ñž©Wm§Éô¾¢áªD‰1­3UÓ{êߦBéNEÕŒ•<º;âËä+͘Bl-YÛ(fÁn‘ô}…ÂÇòýq¹u‘"÷zÞ¾‡“ûÖ¾x~ÿó?êß~½<ʸþzôl³î]ü7O·£eúk÷ÙúÙ]>7]^¶ëéàÎØ»ä}:“ËsüLý®È_J+Õ±©Ï½ëÚt ¶èÄæçX”ó=î².Úëfý¸¾¶ìä»Tp ;#¹/·eUFÖòî䃣fÝz°Ú O hçIË7ˆWIg¦‰¢±7Ý·–?*‹“q¸ƒïÍ{‡äÙàMî.ª‡X6à,8 ªhÇ[ÎÌöB£o–ᑘðÍOÌõy¥ëî÷ü1yݺ»oáø y–îöqé`ñ ™,ÆYI­Ä:mó@?`¢D„vÍ£¿Í“'f¯O«[,r᳊¶ƒ8râÈð#³—Aåñitl$1fx1WB¾{¹nî¿kâßÛ®_ÝÏŸ¨ücÇc6žNÑÝÖhýYèÁ{¤êÃ^ÍÇâì³°GuÊîV¦eÞ4U뽄޻MKˆô™LɰH2à|Ù|è‘!açïÜ|U襲A"K9wjç8x½ÀÃXñð«#ÄÛOÒîw«P®«³¾†vô:y~ÅîjZü1WϾ=^éÚ¾7»Q—t:gïÒì.ÔÃ>z¸Í#7uˆpDáo7KA;µ#æ»dë`\o±×‹ßCÝ0|ݾC:õ.UY‹È²Þ›¦÷¸(Ï·Ì£ž\{ÝÕ˜Ç /À‡}ÿ‹ñnΙÔU´¦¼ÿ¹ãTdX§(qâçÔy§XÄ ÙkØÃtE¾½9{<‹Ö&$hmï¹ÏßÖCóàvœfgG÷»@€‹Eܧh³áÊê‹çŽÞßF¯vÁ´"cáAŽø <–ÉÉ1æP±­# ×eq9 º7@øâ€‚PöOÍ˦²ÊÅyzï.)u–ÀBÎQfé‚©&š;'£–$8V#Å nÝáÔÜC‡œô§z»—3}›k.­‰¿kùnYAFòˆ»" Pm•ÅÃw¾îYAµžïMœ¬¾KÆr;ZÞÏ•¬Òó_‚Þ‰×TÏ '¤í>3tt¼3šnë´äzeŽÑƬ¹ÄtV \q~"’Ðù‚Ÿ·Þ€Xv^/™¸¶·)}ën¥è—]ÐS~hÓh¹ÎI¶y´ö€*CQvk¢áü@ú·qY™XhàW #yé™n #Pˆ >M»jè(Š~fâ²Üðòá”ÅèèGà‰FxæX&£†Îí0Øàtq‚@Ö¦rÆò†f£½Âæ7Èa ãm‘YŸŠûš†Dƒm¬¬¥¤Ä‘F2*¢ûKÐËtW¡Òûÿ8¥8 £´ŸH~SžÞ°9Ïæµ\x}7èuÔÎ/F¶Ü@sÉÆv$×O «ŒŠµ0|áõÓðÛ*ÂÙE~0ÔAíÆwÝ”c9bXëyL×ÊllK0nù‡ÇT‰ç¬ü¡£µ_>œË¾8·=¹åª‡»²òý™ͯJÛ\¦ ²VC"±epå¸/œ·¸A‹QTÑYS†oË—m0Ÿ£"ÎÝ»Ïè鎞U–<£×OÎâAdÊoT0ðG±™ˆéÚòáxË3Ëõõn}üù¥\}UGÕäøôa•pJ7+Š´¶ÖÛ|œW¯Ú„K`Ò˜„^¥àâ;3(å® rúýG|Õ>Æû;ž=„`íØ; ß³ ¸Éð]t@^åµÉŒÑÜZ§Ñð:¤êî£;sãeÞ°ÙØO•뢕“‡,A‡uNÌLlíöÀœ£íNƒ'ω:®Çf¸;Ò7;{®c%WõþL7ÍrD.6u[6nËC¾\—Ì‚ÀÛUÍç¦r(º(Ruµ'•´uü ’2! ©!äAL,»U/p"¯´Ì-å˜Ns¸w„™ˆŽl]AÂ—Þø–TÆ”5²ÞB W«9O}¤gFïšÎ™‹)Ç6‡*wݸô€2W˜ùW­Žm:çŽ4½Ûâ³îO¼–c9ÖZ"c{IæÀ³C°©$jT¯Q&÷"AY9>}V(/ÎÞ‡ó÷3^wh¬è×î=ÆÛAL¤ºoÌl‚ªö°]Îf6&ç³£­¢ßT.•uðÐ,÷ÂÇì>?OÑZžÃ“‡hz8avu»åêE»Ò‚\hœË…#Gl±¯¦_bÅmmg^¯ ’¨)S›€ç\›ÅëK þ'É“ëµ}?»Kë2Ò,„vÃÆÇǦŠÖµ‚# yB„”%Œ]ó¤9Öq“¨ÏÚ¢œ!ªÖ¡à=c1¶¶å½“›ªSÏl<ü¡¯zÖÍÙ³°îaG+K\2Y».Gfb·ê²¡Ø™@Zý¢qÁƒd¢BØàቺp­–Â8àÁ÷òÑÉq­¬)4îà^fM¾Ì“^fn­¥Üjø÷‹©´Hä9­úscƶ%ðO@…Ã"žbÆ8ÒšªÖ>ÃñOr|¬<ý<÷Õm4ç{½SNnašµMÜÉñø³È5åS„< ¿g ¹–·îpÇË44{<|ŠsãÒô}”û«mð…Z”ŠF\5Ez˜†³•æÚ2TðÌÌyÓ³¾^ŒFr-ÄÓï)î¬ûº¯Ï/×Å Ô6½ywxTyôOR<7¾k ÏÁ»Ÿb<›>¯Žu¬' Xž^::YX4Ýê™Öûy¹Ú¸Çl1Œ,4¬ii ˆ,qzž=Þ,hqðÛOfrAt6ØÞO}N#H†¥†®BËí¤ãBc y9o'–ùˆÅxhÃ𑌔J~{gu|V^âý¹ºœÍ§¸‰ªq—ï¹c“h?™Õ~A5Cýù×`œ³½2väè¿«~°Õ?>Á¨9,_±ðk‹³²áÙ•<ŸQ#ä0îñ·Z$1eTqˆCGñò¢¹5í¨çã®G2,ËŽ.Al|Ëäo›g"κS½Ñ`©ÆîÙ÷[ÝÝ6õ½Î=@ ;nÉù×5PG¬ø§Ç¾¯o˵M7kÞ(–Å•Æ_K–½dUÂ€Ú è’íö3hÛIì‡Á‰vÆQÍ„•´Y’`½8N.%©‘#”¬a»62{“ôß.Æø¹å³Ž4[©ëu:bmx#«t[ÉÓË %ˆÉM‘§ C–"Ò˜"”˜Ù>59ítרé‘rŠèÃY–~ ÀALäOá‘©v8Èáûòý¤¹`nª‚0«ØPbñÑáÍ /-ï­sƒ'z{ÚIÐb¸a©Es‘™˜~qžkz(eaã­¼À–×¥@ùÆÎÑåtàs¸nŠkM`‡cK®8U&„Ÿ76¹„“ÝÇ~í8 ñœTL‘R“KW ‹—‡dørí²­6_Všˆ=ÜÈmùi6€^,!ܲóuuq@áZµðáÝt¡ÚÝv8X@°ŽÛC›¾VÉiMEˆ#‰ÎÜß«Y$I"·¼6sëú¥|j,\BƒØÅ}Kíô]¤ÞÞÞâ\SK“ÎÇæ®5ªRÙ'Â܈=/_IË¿¶º”A•©q²ñ#‡!z×>ë”õ†ÆÕØ­f]-\Å)Ei³Áv’~zý–ZÈDXJÜ[?…GÚrº“u)¥nG½_C£V3YýÊ—­iwùëÚÜGÆ ·±ërÓÙîÿ ëôûjÅû8‡•a²Ù¾~6íúþUóty÷7ÆÇ³¹Ü~Â?)N/±Î›à,À± +%¬;­Ypø»ŒÞb„S7sv^œ%ðñÝßìpûü›½»£÷GËù‘GÝÂcïçéÏf½w? Xüì5åⳌ{ õ,ÆŒLñM`¶XÒØ+•óCðˆþ1´4É AÅcáfX:fÆ9¾±†¾Ì\JåùßM|JRXŽ ðç´›LÓ;u€æƒó9î?N¡ sáØ[œ:Cž!ˆ—ŸÆqÔ¤Z³~V8ßκ@1#aÉòÊ9°ÁÙc¢>'h Gà5œ¦pÈâq‡ôœ©¬,µs„h°”·x ‚A*˜Ï˜ ò†ëÜ0ÝYDãÉ®t x¦ÉªVAF_eÂ(.½¼fýu,k׺ó+õ¯;áÜJ—’‰žÂxÀÌLwª>U:ÇnèÀðW …–…‡äéßÓ¦ŽT† ‚²Ïf‡ÿ{ÜÛwhãŠqÎÙÀ‹ŽPTVšb-ä 8àm¤—ë¶\v™÷Bq0p6Tm™‚-OÈÚŒ7¤Kg­[ÃñÜWe>ŒÙÐ4”Qb5«gHH:ºq=C6:¥ãU;ŸAžyS` ¦qÉòLRÀž0ý­W¦¦›é³Ù7xÔ=¨.œÄÆ\ÜäACa•NðfêºíìñSÃŒ…UîŽHàš\¤ rò/Ë>ó=ì§ŸN·¦™c¢èànkQÜúžüÏn)»ÈPbmèr”îê#š(‹ôÏ<ùqš¬×— Í섊b»öhýQ>ùÕÚ¤Ã0ÅÓõ¨Ðˆÿ±/ñ_¥„®Íx1Øàð|¢lüÏûÿM'«Ò¾¨:œG©•~?¤zåÅñƒ¥9Ö5YÒë à×îý ôCœ]ª—cl]÷c쫱[ê\Í1͘Â<#äOÒ@Æ/ú‡äoø{ae¸};Þ` ‘Mà3á{Aê:e<åá«ÐL’S+¨›ïË,òû½Yæ0=Xd׊5×ÌL~š¤»üƒnÁ]6¼$êõ]+%Ù†ÚBºž½sÏÿÈ¢3òÿÚüìŸM®ÚÓù˜~Ã& ®Óǯæf;`¡kM9÷OªK½Ü6üž‚¯Ð0|yGƒ‘çh„É/eØfÉ2"~óz„ù•A#%©T éh’ ,×Ýøuú¹2e3ÝJ)j@» -´vl‰$4Œfl…ú¦í„ç#ß®î[á½ó…ÞüÇ–H:ñP¾eaºQFQbdí•–`žd>,‡æCÙlþÍ(b³÷yœè”ATÏë-]T<{ֳЂÚ„›|;Y0­m,FLŽò¬”‚èUëJì|÷BÀæÀROþâÈDHð?ï!ìþtì ‘|Ÿ&{$>E›>0‚¸ãpµñlÁ¶ÃØ^çx–RæنȃãiÌ%1HÖ¬/pîÈÂqÃ<É  šUz—PxŠ^ýoЉ˜à3©ò#%9Ù98gmD4«ÄÁ¬µ Ë4œ0/ËoôâœxÓéˆþÕU¥A®Ê-$Y篇?¨;S> þrçn¡¦)ñ®üvÃI8E?º]^ýècr^ Ôê"ü¶1Îý玽‡nZ,A×0„/¯³õÜâˆH0ÊÌœˆÞ°Ü¬ 9X¦MÞüg„~=üSî©Þ/¥àUP¢ L´R)FBƒÚ.ã²C¿}½ vâœìñN­µF`[Šd¶Ù*÷¤\£i±\úæQ­Ò¹ß°è \O®½OÝìáF @Èú}WâRzXNÁ˜ß²³Áˆ•UÄ\3êXéòXs<§¦°@¨’Hȧ×T?¾ÍXÉ/É –Š‘›Tæ7è— ‹K!–¿ë:lQÞ5ˆAE IÁCá·ª×]”T„È ‚¬Ö3(e6«ˆW®îàü7:.N½,¬©Su "Úˆ@KwèÆ°} (Š­Ìâû{­¯\/²è˽øÙ¸‚ Pxý9—^Ë~ù*i˜·f¦•gœ »œ#׎¦:Ò‡Î| 3nojú6´@úDo+åôëê;.ÈíGCÔ ÞǺMô„P‘ô ›“}èñ6ƒ‚Ä#9‘‡`à¿ÐÀ1NµÑ2U˜¯`Ó~ÿ37Xf|o*ú…飆¶ÁŸ*·*[öYÆ5ç4&Ÿ¶¾,#xœ¬%§9N¹Â5RÌ÷$Á /*NŸ7ÚPUûÎw¡ÍºÚ|å_‘eA®ÍÍ‘ Ë)ó(g Ò15¦æ çÛïÕùÕ˜¯¼þ•–8<€1›]¤gâGJµ1 iún@£¢Ú |â{y@kDm¥€üÕ”?ô¯’mËñØë{‚‘»yˆ q¿Q¹ÓO~ òv¨„ÿ.âÖnX'TAƒÒ‚^‰#ŽL“{¿âÝ ãWI‰—>õ`O\œ÷ÉŽ’ù[ÊñÓäËõ´=>Aîb{n ‡5O}_7‡&ëÕJ@sÃÇÕ±÷d'd®ÿ«£Ž À:bÜPØIâp! ¨$Pö1©!Ý?zb%õç¯}mßPAñ™9c­;ñ«˜|3lmå™ïqTQ™l‰åKeDxÉ,Ìü%×üNªÝª{KÍ÷tO—««4M̨üt!G¼ïr¥=,6e)7DÅ@@Éy•å8Ë9^ØÙ"2¹±g„Šd÷ Ý2óÊ”P»UU~¥[HTúuUB¿_à³*“‡hccŰ̵$¸¡¿ÿ¥&*¿û¢=7Þ»àƒæ õ:œ6ï„„ ÷qr‘/kì佤@Ý?5ôz"Ÿ0ñ:¨ùƒ‡¥ÐšH¾‡üZÎ¥£ZOÉ øqܾ~?·ŸõÄÒ Ö¼xšØçžÌnY6Ž^êq=ÒEÇ{%µý}}¶ ;”w Ϙìú~|³+®<ÿ(ƒkÛÕ½—8a츗>ª4þÏðÙ@æ %Usn«sŠ@Ûˆ/²óõžÕí?^è÷;q°ZòÈù'(x[I4É'Ö%™D_”¹xV3[{S÷’êR ™òÈßšåþ³lÀb‡sÚs¸pXQ˜Ýù÷ÉSñ~.\»A<ø‚s‰ 2_‘ߥZýå¢T!t:¿¸)ÌîÛ©–Õžinª Ì¥27º]Nñ/ËÔ-\"lÝñÏåÖÇ‘ ÉÙ@Táa±RI[EÚ HleI¿* ¨8«ÁÃ8¼àb/Õbj”4 Ùî>';ñ›ýÛK*¦Ø¾8dß+âõÉúÖ&GHL±8Z³½$†äáy8¶7˕ξ?cEµ_éˆ>W<<ž‘$å/ä-|Ž1(vè$ |<þ& X¥/·›¥C8ACñF·Ä)÷ï«ô®µEL’‘H¿L¼Uñˆ52s¢Ó6*´9›¹ ¢Ü¼ç¨fk X•)•4 ¹ì y®3ÚéX¦’±,†¦KÈtAfa˜F3;xâßwP!!ÀLåî2h„k’VRê• Âuµ±ª¹BÀQbÁ´…Tšˆ§· ±‡4}I„è–'ˆ^¢vdEV²J&Û¾è&· f÷ÞjkÌÖ˜ˆ*ºÖb¨Q)Qs æ¸úúãT¡É›Í ùfáÐÍöAê@›麠ӷHu?è@›ø!Ì[ Îï£36Å1 ï,ÉNÎŒÖò<Ÿ(ÆÓ r8™Ó±•Ìškàú’äp¨kDÂR‹Ö©MÍçi„¬:‡&s¾ù›qLßÛ™ž%H@ç[â/=|ëÃÊŠ•Ì!Ó§K*u@WÛjd@, EDE@ì…t!*F"«?ÀPi¨¡7È—3Í ãÛ@iÑ¡ËuÀÀ$A¹[jìÝ€ú&*‰¨“Ú€TX( ’í˜Ô†ðR±`Ä`°§ ˜îƒÞÕŽ’B¹ÜSGŒ1À¦îþ‘׌•VÂW´SBk½Ðï­Æ<5µb•]YPQX¢*òèq)mµh0GŒÞQtÑ4aŽ ¥u\ÌÂáhÃ…²ÆÒžÇ¦]Xmn70X®íÊo7… Å0`&áÃ%ʉ`¦Ð2f]j«¨Í-²S U2”f ifMæj©©˜Ž81@fa,,†5ÊU­fdZn˜3¦ƒÂïk«4Šêèâ’º‰ªE*Wm¶eÉ«¬˜×\\ÜjÇVˆn¥ –6”JÝZ7TÁÓD·LÄrñ¬3B6P›d†:ÓQÀV&éQ[Ò¥Ù¡q­ã è¨Ác”¨m¶œ!»®!Á:Íï$þðƒŸ¨Aö=â>[ikr—!\0Û§ ´”kƒg+Ô•¥*¨ª§mᇓYšâúL¸Ü3ŠkdÉééÏT8dè3ÉÊ,D'–¬ê&‡¡¨TZ¸IBÎ^ø ÃaáZbpM,^)r÷­DÞÈ8Ø"jk9Ã[dÞÓ]÷¹„:rt;!µÉå†aRTçT!‚ „€È¬ƒ"È6<±®—ÈL£ñš>wšg­7ÊF3XU¿Xìp.|š’p5[Ü]|–í¸ƒÈAâr'…±{ø_+åÏè·Ñ$±õŒ†¸Hâ>æk| œ(h×yÀäÉR€`v´ÚåU•ÑÙ΄×,¡ŽB¶Ø ~4ºB•¾ùl£u݆ž!Ë}¬ùwaè`p†$D= ÀëTèõZd~?×;e †(Ù³‹;¹kñV23°»Cèöl‹×–úi¬NTˆÍ þ0óŸ^¸æQ²ûÛè±>¯­Á“”ïªp‚@บL‚X¼¬bOÜ@ß+¿NxãÅc™¨¸ØB]6<8ìŒp ÷[7md¢†6÷§ÀF÷gwðôvp¦qkž» 0‹G„룞ð4µ7ñÃ0„o‘Éâó†p(ŽÄÞ¡»iq•cŒÏn9]vKpúê0ú"úJIÒ>p0à7›çb`T±Ñ-„ ŠaµÖœ–5À”ÀkHy#³,m Ž0ãÕ>{4hBxõùa‚UñqÏñ笓¢ÌòÌHH#§CÌ+ ‡5PýMî#¹Þ_[òzL@áÂËd8¨È@à:CŽQ8ô°l,žžªÅG/’ï¢+·Pm?L¶¸žc4´Ø¹Qd:=Pì?W~3·{Áz5Þ®?ø«4w N…!Ù?È££ù_´}¹|Vð7BðWPÉÒׄxÏÂÑé~« “€ƒ+@ü„°ÍüíI­Øðôý.<“‘HW^¿·†i„¡qØ1 HÉôµÃjöžðød[.žûß,òí$Iª§„ú–2Ä‚åIðCô<Ÿ…D‘ófmCv~™ØÙNcåÈùGx˜‘MÎxxß»°ÑÊ>*nݤ0Ÿt³ˆ 4¼Þ- i.o Ø…PK‘UáÓ¬³bÃΆׄ| ÖËç&Ÿ¢Î`59îì]ñ²|dä†8• 0$#”Á±¶‚óÙ»¸—`ʼD˜Àù‚çà'høëì3¼‘2©A­t°HH, ê’ëq ›P/`‚C=·À2®“ ³A¢¢túÉf¢T¬vrmpžâ€5øQç Ÿhï^ñ1àµò{¬c6û…í­½™à5÷až>gÁöÛ“îšžg¤Œ˜Fïeçmʲo+)ÂW©©ZŠt¯©È pÏSFqMjÄ´ÔÀµL¹Àpðæf79&@ñ? ƒž\/Öúr»n’ g fž½pZO õ`Ä:jpU¥Èâ–‚‰{ hF¶½ÓT‘)" xe­éƒZ}@ ÆÛz¢ŠÏ™™¹è)Ì¿`ƒÑAŠSœí€[z$VÕ]p¸¬Ä1$˜ªÉ Ùû/NuÔà4Ï|vkD¬ÓHÝa Öên½ n½fÿòTª"”ˆ‚Ý»%µ•qsœÎl#_„XD–¿,>Ç­8àLNÙ†±!ÃF‹Àð–’>M˜t©ÁœÓ³ÐOÓjíGcÕ^²/8çaì‡XS¼šb¦vã’tØ©A<qªË™åšA©päÝ`˜õÓ3õ¬ 8¦xY1X¤éçã,ùާ~" _.‘T(…€3ÜtF!µ‹ÝñÌî HhqϳísØ.ÏZJªë˜Ñ¢üÞV;·+´…Uzó:ãÊVÕX*óU) ë“¢ˆ9CÚ8ï 0˜ÌCr Ñ,€³ ·ã$],d[(Rjtu{u÷-¬­6·ÌüOO–òk‚ìCZ8ïz»‡-œ-ÁÌJv®&/-E šj<8Pz0§† úàÑ“—ÓnÁ‘/è„>ÿØßq¥‰J [`Ÿ†ØˆÎ>¹óUCÍÒI‚ÞS‰¬,ûþ®\M@£öÇÅ™ÔirÞ.j×[Dó8ú,®žøUÒHTQbÉI! -øŸD®¦A·Ï­N´æñhÓ} ƒö<¾ œâæð$U eF@FñSqÑn_·;CʉÝ{¥ S+Ç¥•¾ Ö#Ìû[eŽ]^]üNô‡Àâé”*%1}mý’t}ÏÁ™’)Z¢¨‚ù&=%I*³xaÚ‡= g£ ¨œµé`è'²NFTDD:¤˜D'Ç›Ë]ÖäSMµ€ ™P5‚ü É6ò¨¤ó~8Ýîá¹Ãï'–óž-ºz⣧鉟¤ž¬AÀ¯ÂLK9`Ú[!™PE„1Ó Œùl€æ<ær°‰"øÅù7Mƒy"“#=‹và//ô:CaPoL²æ˜€È Ð]jÆþ1ôȘ²ba…¸êÕ&X$Ý é†ƒçEÀײÏ•Òpe"$ÁlÇŠš°®(ÌÄ5Ìq !(Õ´`aà rγwis =¶ÌôqW×91‹NØŠêäK¦ýùNe‡XÂÁй7 £™PªIRv °¼h!¢]UBÀ'Ãb e1ž²2ŒüHxìnóHÖÎÍ‘'’°À¢¢ÙijP–…Q¨CÔÈL`,XÛ=ÒÎÁ+)I©s¦ p{ßä.܋ȟ5¡>fÞbñHþñ‰ä>Ag‹DTƒŸûe±$U¤èsÚhD? ƒc¿*+ƈDán§§E¦TévzoÀ†\¤õ9ÌAè<÷-é0 MÆuúW(\§dõ0›ÝÊ!vò›Ä EÙDï@ë¿]´—ä•JÔ$„ ™u×{s®Î fRRÆI컯 ®õKÄ;``¥UU4€+–îD’Y¥¡B¯¡p›cdPLl²þ‡BÍ¥‹™\¬Xa¹ôðйڌ6v‰‚¦ô}^]Ëåòœ<½ø—Âø0ãÐ3 å:ïXŠ€Êš—Ï[Ørí1¶JžO½×¬€ÅßÓ`±›ÈMII”´"“Lí‘ÇbÕìÝhïÕ½ Ço:u`¼êÀ­´ƒ’˜ÖbŒ±s2*‡K(¹e†›•N0Y%‡ Åf÷IF,0Z,I±¶¶¬QUtÕZÊm©2‹™¦b¦ºÙª`Šõw™ó¯9ó`á߿ԟ˜AÓ%¹¿[òª$áÓŽvýœ)¯*Ó¦¤Œ– »¢åŽiÁìÉÏ"1(+` QF1%e=M†F¡!ˆ]ˆûï&dmrÅ•Œ=ô!Ñh Ub¤*Ü1)Ä ûЋï/B³4œ¡€‡ rÍ´¬wêýa ÷=îßo>¬nñÞ²,óMÈ2¨6Ø‘7}úfn•Æs’«>½³»–ð9N|®6Â^)Yrå-ÑråzMD¢Nne ¸e) ËcX¥. Î÷òæðÍÅÚc­{µg±8WZ%æ€\¡0;ß) üõIÀ:™°[Nú½L«kŸGjyî|IÞ'|ÉÃÔñ¬Á™iûm’_,6ÉU×t(SëÔã»®„À·;‡ ¼Bèä7A”²µÐ!Ïd›JXL$rö—ás'~6¥Dæ§à‚Úwe{CrÕz,`‰ˆoƒ&ÍQJCrÃÛò¡Ìò¼€|8ñîx2PíÇV VîYÅ;x¤Ò(Œ&9M0ž¿µ G¦@ª"«vñÏMö0Ó ôr'cØûÓ‘ö)¼°¢Q6¨…¶l;Ë={¡%øí&AÀù€Ç™ÀçÂiwö(7±“Äl™~!ˆe¥i8‰‘ñ ôC_¾ êH$ŠªhEG–âh©ƒ(Ó‡°,2V <ž<,vÈA=¤²K-Ô3œœ‚ˆ*j• }Ž“goa/&°A¡Q0AõPUÂô[ÑX·8Ö'Ûq‹cå”L'„'9Xñ¹Cœ1Öózç ¥bÍNNm1Þº· Q!©”S1“¨g8km ´¢Šux4ÝGwㆆ6æ*(_.7tqÄÍhÝl6V±6¤`*šÚm4ˆ§7Ú-Kol˜›”®’Û™eA´2Ùm œÝ™7¼3,”¥šÁ1®V§†¢q^-d8ËÂ:´ÓEXV ­Ä5«Dw¼ÅÞæ®´â Hix»Eöür¦ÙŒYË1¤Ã5œºL K¿E$ ©¦J2®î%¶áEC^ˆ1EžË÷¸:¸O9!:i-¨µíW.»°Yøé ¤âMŒ*,!¶ÃL’¤&™ „Öµ€b)¦CHi šaRÐ ˆÛQ úÅÙCžxÐ ƒ&¡È&c”Ö¸! ¦„¬ö³„Ám‡WÙáôpî!ìâ…îë[6e3/1Œe»´ÆÚ´PC ·o j™Ã¾'ãª|4w‘Ì$P‘a$t‚ÔQ9Ê€ H¤È’J’%I¯Mаv³~c)œ$…&¦¡]†ä?K4wó³g.\yõh ÷I°Ù‡h (`TP‹ÖCˆd5”³`=`Ô³&zs˾⹎進aîÙÊQ¡E© *X…3Œ‹*¤X2Y@KÞ»—Õuw m1€i `((e±aÃeË,ö&ïfd²ѧ!Â'©.ɨpÇׇ:¢p¤Üɀɵ@TR—j1hï¹@ÒDMÁί¯Â}D_Qé>Yy‰Êÿ ´úxé3oÝ]ñS„Cm‘KD~ ¶I²©g$ÑÓ j6p“é<<¦'l3¨N–·®Î4Xêú&I åYÉÓœêml“žÑN$ ñÔîpõ'Äéô·˜Á ÏG0[E ÿ¢ß7¡Àç¶€ävÁÝUå‘’@@‘R@VýõÐ¥¤ ~èøØ×Só=&\ÆèŽÕM(p8|-FÅ­õ´ó5G0ŒBB' ë’ð߃ӷ¼û!²j>Ng´ ¸€Œ®×ňÁ<™ôv/ ùí¢µ‡ž>zÎ@ó€ wÍ1 ¡,3ð'Ú NÃó¼5ÐZf³ æÃ!“yÇé°½žbÁC‚ëœ M>”É·FQX \f3YZ[ Yp2л•€ÚÂÍ, % "Šxpuª"¼Ð¤ÂólŃão}Ã(v'Ò"ùöž]N×ÍÀIôÒ¢}M SÓÜz‡¤ú~s’øÖÔ/N¦,Æ2"6@îÛXÑÅ m 9y|Èrùcžœk˽ߢøHIÐ=|„Bƒ¸]<(È „prŸ,¼$Ú©$a!®Óätjñ“¾×QXm›û”šÂ]æf\LKÅ9M;¤Ò{ý^ùǵêM9)¦)5ªì~øƒ·çuéÌŸ™Iå€ò˜Hø¾ L—ãpTï*@)¦fgØ^5Ú/àóšs¹x«ECç§à™(FIæ_μš¶û½ÏgACü]jDrQr5ÛWÔ$›7xQyI$„’fN¼_xn¼½özžÏØw(úƒ(’ÅŠ*‹‚øóƒ ð>wy!=¤Ôo€J£Úø™6—:AÕ‰š¥¼7ù›‹ÜØÒ5 |ï¡WH^· ûާBO£ÃÜ]R¥¯Ï­oè¹7*½üÚŸa:?‘ ¦Ø)¸ñøòžþ‡ó}™‚4FÒ»süô4âüåeÕ0QbÛST Š¶ ðûÝ›nú¾Wc'ò%4-BžÒ …‘Ð"t“Z $€(n\ Ñ„bœxf[B°}"œxÒ†ç­åÒ\e§¾âFfÜ˃©$ 8à ÜÚêHj ëºV X6DóÛ™œ!‹ÜÉvÑîÅäÜ8N CU¼$ ˆ_ƒø7';6ãÃZô„ïS´*Â{°4š@U‡hÀò™NKEU@F²xUbXZ[‰ÃÙµCNBUM`iˆHlà ÛÙ«iÎŒá Ùº Š”È n ì ð&1QUŠ( bÁˆŒ|˜U@Ø ­B ±ƒö!tYQVDºqbÆ¢“i±“´F "eªnÜÃ0")VÒ¦Z"CHÍ—N•` ¬H²¸•1…$1˜\&JÆÐ<[U¢•@MÔ§.õÖÕíí°™ÂSnØc#w)Ÿ#l=úƒµhb‡-U&…~ù¡´´; óyUîytú“'––)Eê®Ò%‡`3BCDÀ*¬Ñm–ã«.?m5—A»†bH 2Ýkùû4J*mÂ¥‰¸:ÍX8f°íL‚ïÙçuñ ‡¦—ŽÔzƽaëá²±¼9Ý—RÙ{Qi´4>~µ©ÍðŒà6ø¶ j®Fù<Ø/‘ÍñÎóÁ¢Y1kçÓ’2Õ‘I $$œâÔ“e43B"ŽÉh?D¨Ö7 •¼Aª±P(YÏ}õm+“aÈq„%÷,¥‚Äà%iÃË–˜¼‘ ãžÖ BI§(H·Pqìáó°îÒ« ¾ê¨zÀâ7Ó‡ É”ùQn{z‚q®ÆÖ9˜|zÝ £$áL¥™Ó\Ù {³.F"`\qV2XÔ¨X ƒ†RM½(¹µ!àE \X¹ƒ³Gb–VáséøáNÃÒ9¾˜{R”!;²^v_ÃôS[×sÇb/Q杻²ÃE0J9V—)dÌ.9LXeÊa–•E Z)i-¤Ó2µÕÖAV˜ØªeE¬ÆLÆP÷ì>Ì„Ù"Â&¤k8–ÒšÈ9!H`²†+Š2-ÞO3ÜzŽ)~ ¯]!t+DQg8ir©vÕ¨Çîºà¤ PAè ö7õwËûöÎ3ä¶wáÖeykT½Yš¥ËDf^¢U22\ðàÔ)Ã1ßrª mÙ–LÅ„S#Ô*ˆ#[Äûà|½×PîX"A#0 ÅBŽì6îÁ‚@qÐŒrg­×<^ŠÛÞãÛéc¼½| ¸Òp{Ãô°ÃG+—bg½9· %"yAŸÑºá9<[.Â8ŒaÌ…) Itd¹7ÇY(2.³çHj™¦ZQaX„Ù™,A x‘Ú˜Ô2 €”&aoÖ$l²#Žò[c‰¢Ð)‚éÝmO¤­@(ôÂŒH ËšvÀ ‰u²Ø]hгr§gîùÈÙ×A.Li·UºÛPQNíæ«•‰“’@‚q'„x)—3ü¸àd>IÒ9L'ˆž.I ø§îk€Æd€ŸS)á sa[:‰áK±Ã§¦/ä‹}–bÓóh»m9 u&|$Û3aDà3”êJQÁê“îXr`±ÅݽÛF%×V…9ìØgWm)Êïlu¦-§*XV¦1LRØìYÅÔi2HTj²¨¼!)7ãdÑgÇ0ÌL4娀ç‚´!èn*@9ï›Çc“­/­Íww¤k8f0Û§œÚlÂc‚ŒñÌzB«åw¹ÝBmyÀ2Bx '¢,„ˆH€’oÏ)e1 _B²j&9òjNð„MÕixQyÕ½us]·†„Û˜Sn:ÍÍ_Ü'`ìÉÎÓl и1ËŽ …ËuÊ,8AOíÓ¡,-pÆÉ-›V»D 7ÔÏ=aZ™Ùf“C=4t`* ˆM„ဲ E²1dA$,fæX2Îö[+=(à­kªyv;ùw·7sHÆ*©S&uW8UIFFßMdžÑ&k§`¶"pš+%z™Í½Š2²tÕ&“HiAß3Ž&~ßÈ,:®ŒÃ‹©r@˜ã˽n³:FD§Å!fËM;êZ y–˜›Ìö#ÁáÞ%ïîÛ­†œ5uéÀ-Ån¥SYf  ¯”<’tÑ{õÜqFØ)z;ð˜â8Ü(¹‹^\µì—’ÔoÏÆ) ’H²Ö…Âyi–zæÇ*C¡pر¾“(B)!"`HVž¢&ãrÝpµñ¨ýe9æú“¡týF”"xƒvxÂŒ®SåçnR£ôóp˜[W½ÇW8"Jj‚5,‹`¬zVÊñl&ñ÷=[ËL·YßgT¯PÉy¢v¹Ã öÒg{·K®çè¥nM:Ʀ±Øf8¸ /E˜¡WùÙi²l‹|ÛçºBêl^Pv ‘Œ6MÓôÜs}Ž/‹:8C‚(Ž6ìAÞ³e¢-±ž§©Å`rÓ§Àãt­€uI„HF B·CP3,Áˆ @ƒ€öî0”;»Qg °ÚB'!Û i²^G"N ² _äÄ ½l˜Lê¡™ÄFéÀ÷Ý;“°B,“Õ•25³…´Ý“^ô²µyœåÃÂC@rï‚ «´¼làH ¼ ~^åtÝv̼`` fžOB‚T’…£ÈQ ÝPa¼qZŽsLû¹•æO,1Ù!îÛÃ&kÓ×2¹Eˆ"Ú£Qgº9×z@Ÿ:I³ñMvÈc¯}GqNÜÈ_QÛòHvÙìl.ðìâTcŽ^ÙκÀU.=7‘9¡# Æˆ«Û,qˆe.þÏ‘±LW¥°rP % ¥«±@øA¦xŽ)L½yŸD «Å®»ØÀÞ«³à:ç.y†¨E=N˜,PlßÓ‘ƒ#.1 &®uÖÊö1n%q«M(A£XkíɃÑ9CvôCB¼µ“¶W§jñL§‰é` ë!+6 BŒH°D€,€( @Pˆ‘T„ŠI"ÅIB,Šúl¬Už}ˆ ø³±qÓ|$â$+"ô!¥¨x‚0*!±—7ˆv#©T 5o¯j×&ŒÕÎ!šq/ù´2¶6ÖÁÌAÖ[*βÏîT HÐXV0Š¢î¨Š±ïx,´¯VšºÊ,Ìæk\í†c³3j;³Wf¢’TC£`Çvµšb˜ bCÆ[XœÅÕ¸,#_N3qt¶u‡Œ…³h CÒ¬$Z©!PI&õÖ±@¼Y0@4"P^!L=µ¸ëP…B5*xUEtröˆ=;1Q§¤ZŽñ»…Í4-› âÂÎ ,hÛ+´Bf' h£,!Ûw-)%ö<¡¤<øº`sÂØTË«†£E g²Ø™ì¦Ì×ĆÂ"ZAfönêÜÖµrõW¢­ÕK¹ÈC–X‰XFŠZð^ýºávâ1žBÝz»Âyê*[«2,%ʯ‡»Dóéð«¼VÑ™EÐû~@ƒ“HÈÎÀ'× '–áØB™´ÎÈFˆÃ=PpzæÞO:áÄÇÃSìwê/զ½ÐCÅ þtèÞèÇ5©®¤ËåáÇY_Œ<»ŠõV¶ï?aüA’¼,wÛ ¿{4s=æÂ}‘â~‘ß$Òi/XUˆ¨’Õ1‚pÕ•¼Q|çÜó§¸`ßÙ ª¤ŒˆyWðsSìÁç3DФ÷ƒ°j`w>Go{,Ï+ž¯÷€Lïòøx׈Üéñ@Ûõ†éÈþ¥”ö’'æÖÚ5(+/°99à, s 5qmĸlãøsyf¶N¦›Xyç‹°}p E»y”±U¡k—µ‰ú6±*WbM'iJ Îö𖉍“ ™f°²g¸ÜÖYRäË\‹D~}4 5Ôs K…ƒR”³ G@âXW\¨) ,UÌ—¤¶‡e€KÂDý5G'Ä÷tÌ0Ù“c†°gÈ2$wA$%bIÓZ{ª£DQP‘ê¯åÇ‚s 0cûeÅ-ð„ü]ðøÁ{m[³`3ᘄ”FÛŠ˜&-ÝÛ\ÑÞÝš,?<×Ä4'¿ß`ÄËÀµŽµr«§ÛòçÑÔãÃ@èúR1¥L²ô€ ‰åoJ_æûÇû¿·Ã³âl_ ¶|ÿ}C|Ÿ{}åà|*Bª¯ê 7 0Aˆƒÿ‹¹"œ(H@營€tm/src/0000755000175100001440000000000014755301616011507 5ustar hornikuserstm/src/RcppExports.cpp0000644000175100001440000000433413404766411014506 0ustar hornikusers// Generated by using Rcpp::compileAttributes() -> do not edit by hand // Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 #include using namespace Rcpp; // tdm List tdm(const StringVector strings, const bool remove_puncts, const bool remove_digits, const std::vector stopwords, const std::vector dictionary, const unsigned int min_term_freq, const unsigned int max_term_freq, const unsigned int min_word_length, const unsigned int max_word_length); RcppExport SEXP _tm_tdm(SEXP stringsSEXP, SEXP remove_punctsSEXP, SEXP remove_digitsSEXP, SEXP stopwordsSEXP, SEXP dictionarySEXP, SEXP min_term_freqSEXP, SEXP max_term_freqSEXP, SEXP min_word_lengthSEXP, SEXP max_word_lengthSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< const StringVector >::type strings(stringsSEXP); Rcpp::traits::input_parameter< const bool >::type remove_puncts(remove_punctsSEXP); Rcpp::traits::input_parameter< const bool >::type remove_digits(remove_digitsSEXP); Rcpp::traits::input_parameter< const std::vector >::type stopwords(stopwordsSEXP); Rcpp::traits::input_parameter< const std::vector >::type dictionary(dictionarySEXP); Rcpp::traits::input_parameter< const unsigned int >::type min_term_freq(min_term_freqSEXP); Rcpp::traits::input_parameter< const unsigned int >::type max_term_freq(max_term_freqSEXP); Rcpp::traits::input_parameter< const unsigned int >::type min_word_length(min_word_lengthSEXP); Rcpp::traits::input_parameter< const unsigned int >::type max_word_length(max_word_lengthSEXP); rcpp_result_gen = Rcpp::wrap(tdm(strings, remove_puncts, remove_digits, stopwords, dictionary, min_term_freq, max_term_freq, min_word_length, max_word_length)); return rcpp_result_gen; END_RCPP } // Boost_Tokenizer StringVector Boost_Tokenizer(const StringVector strings); RcppExport SEXP _tm_Boost_Tokenizer(SEXP stringsSEXP) { BEGIN_RCPP Rcpp::RObject rcpp_result_gen; Rcpp::RNGScope rcpp_rngScope_gen; Rcpp::traits::input_parameter< const StringVector >::type strings(stringsSEXP); rcpp_result_gen = Rcpp::wrap(Boost_Tokenizer(strings)); return rcpp_result_gen; END_RCPP } tm/src/tdm.cpp0000644000175100001440000000602013410131457012764 0ustar hornikusers// [[Rcpp::depends(BH)]] // [[Rcpp::plugins(cpp11)]] #include #include using namespace Rcpp; static int is_ascii_digit(int c) { static const char *s = "0123456789"; return strchr(s, c) == NULL ? 0 : 1; } static int is_ascii_punct(int c) { static const char *s = "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~"; return strchr(s, c) == NULL ? 0 : 1; } // [[Rcpp::export]] List tdm(const StringVector strings, const bool remove_puncts, const bool remove_digits, const std::vector stopwords, const std::vector dictionary, const unsigned int min_term_freq, const unsigned int max_term_freq, const unsigned int min_word_length, const unsigned int max_word_length) { unsigned int column = 1; std::map line, terms_pos; std::set dict(dictionary.begin(), dictionary.end()), sw(stopwords.begin(), stopwords.end()); std::vector i, j, v; std::vector terms; for (unsigned int index = 0; index < strings.size(); index++) { std::string s = std::string(strings(index)); typedef boost::tokenizer > tokenizer; boost::char_separator sep(" \f\n\r\t\v"); tokenizer tok(s, sep); line.clear(); for (tokenizer::iterator it = tok.begin(); it != tok.end(); ++it) { std::string token = *it; if(remove_puncts) token.erase(std::remove_if(token.begin(), token.end(), &is_ascii_punct), token.end()); if(remove_digits) token.erase(std::remove_if(token.begin(), token.end(), &is_ascii_digit), token.end()); if ((dict.empty() || dict.count(token)) && min_word_length <= token.length() && token.length() <= max_word_length && !sw.count(token)) line[token]++; } for (std::map::iterator it = line.begin(); it != line.end(); ++it) { std::string term = it->first; unsigned int freq = it->second; if (min_term_freq <= freq && freq <= max_term_freq) { unsigned int tpt; if (!terms_pos.count(term)) { tpt = column++; terms_pos[term] = tpt; terms.push_back(term); } else { tpt = terms_pos[term]; } i.push_back(tpt); j.push_back(index + 1); v.push_back(freq); } } } for (const std::string &term : dictionary) if (std::find(terms.begin(), terms.end(), term) == terms.end()) terms.push_back(term); return List::create(Named("i") = i, Named("j") = j, Named("v") = v, Named("terms") = terms); } tm/src/scan.c0000644000175100001440000000503314656576172012613 0ustar hornikusers#include #include /* #include static int is_ascii_space(int c) { return (isspace(c) && isascii(c)); } static int is_space_or_ascii_punct(int c) { return(isspace(c) || (ispunct(c) && isascii(c))); } */ static int is_ascii_space(int c) { static const char *s = " \f\n\r\t\v"; return strchr(s, c) == NULL ? 0 : 1; } static int is_ascii_space_or_punct(int c) { static const char *s = " \f\n\r\t\v!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~"; return strchr(s, c) == NULL ? 0 : 1; } static SEXP tm_scan_one(SEXP this, int (*test) (int)) { SEXP y; Rboolean skip; int size = 256, i, j, nb = 0, ne = 0, u, v, w; int *beg, *end; const char *s; char c, *t, *p; cetype_t e; if(this == NA_STRING) { return ScalarString(NA_STRING); } beg = R_Calloc(size, int); end = R_Calloc(size, int); e = getCharCE(this); s = CHAR(this); i = 0; skip = TRUE; while((c = *s++) != '\0') { if(skip && !test(c)) { skip = FALSE; if(nb >= size) { if(size > INT_MAX / 2) error("too many items"); size *= 2; beg = R_Realloc(beg, size, int); end = R_Realloc(end, size, int); } beg[nb] = i; nb++; } else if(!skip && test(c)) { skip = TRUE; end[ne] = i - 1; ne++; } i++; } if(ne < nb) end[ne] = i - 1; PROTECT(y = NEW_CHARACTER(nb)); s = CHAR(this); v = -1; for(i = 0; i < nb; i++) { u = beg[i]; s += (u - v - 1); v = end[i]; w = v - u + 1; p = t = (char *) R_alloc(w + 1, sizeof(char)); for(j = 0; j < w; j++) { *t++ = *s++; } *t = '\0'; SET_STRING_ELT(y, i, mkCharCE(p, e)); } R_Free(beg); R_Free(end); UNPROTECT(1); return y; } SEXP _tm_scan(SEXP x, SEXP which) { SEXP y, z, this; R_xlen_t i, j, k, nx, ny; int w; int (*test) (int) = is_ascii_space; if(LENGTH(which) > 0) { PROTECT(this = AS_INTEGER(which)); w = INTEGER(this)[0]; if(w == 1) test = is_ascii_space_or_punct; UNPROTECT(1); } nx = LENGTH(x); if(nx < 1) return NEW_CHARACTER(0); if(nx == 1) return tm_scan_one(STRING_ELT(x, 0), test); PROTECT(z = NEW_LIST(nx)); ny = 0; for(i = 0; i < nx; i++) { this = tm_scan_one(STRING_ELT(x, i), test); SET_VECTOR_ELT(z, i, this); ny += LENGTH(this); } // Now unlist. k = 0; PROTECT(y = NEW_STRING(ny)); for(i = 0; i < nx; i++) { this = VECTOR_ELT(z, i); for(j = 0; j < LENGTH(this); j++, k++) SET_STRING_ELT(y, k, STRING_ELT(this, j)); } UNPROTECT(2); return y; } tm/src/init.c0000644000175100001440000000161413572675110012617 0ustar hornikusers#include #include #include SEXP _tm_copyCorpus(SEXP x, SEXP y); SEXP _tm_remove_chars(SEXP x, SEXP which); SEXP _tm_scan(SEXP x, SEXP which); SEXP _tm_tdm(SEXP stringsSEXP, SEXP remove_punctsSEXP, SEXP remove_digitsSEXP, SEXP stopwordsSEXP, SEXP dictionarySEXP, SEXP min_term_freqSEXP, SEXP max_term_freqSEXP, SEXP min_word_lengthSEXP, SEXP max_word_lengthSEXP); SEXP _tm_Boost_Tokenizer(SEXP stringsSEXP); static const R_CallMethodDef CallEntries[] = { {"_tm_copyCorpus", (DL_FUNC) &_tm_copyCorpus, 2}, {"_tm_remove_chars", (DL_FUNC) &_tm_remove_chars, 2}, {"_tm_scan", (DL_FUNC) &_tm_scan, 2}, {"_tm_tdm", (DL_FUNC) &_tm_tdm, 9}, {"_tm_Boost_Tokenizer", (DL_FUNC) &_tm_Boost_Tokenizer, 1}, {NULL, NULL, 0} }; void R_init_tm(DllInfo *dll) { R_registerRoutines(dll, NULL, CallEntries, NULL, NULL); R_useDynamicSymbols(dll, FALSE); } tm/src/tokenizer.cpp0000644000175100001440000000164113324403054014215 0ustar hornikusers// [[Rcpp::depends(BH)]] #include #include using namespace Rcpp; // [[Rcpp::export]] StringVector Boost_Tokenizer(const StringVector strings) { std::vector tokens; std::vector places; for (unsigned int index = 0; index < strings.size(); index++) { if(StringVector::is_na(strings[index])) { places.push_back(tokens.size()); tokens.push_back(""); continue; } std::string str = std::string(strings(index)); typedef boost::tokenizer > tokenizer; boost::char_separator sep(" \f\n\r\t\v"); tokenizer tok(str, sep); for (tokenizer::iterator it = tok.begin(); it != tok.end(); ++it) { tokens.push_back(*it); } } StringVector y = wrap(tokens); for(unsigned int i = 0; i < places.size(); i++) { y[places[i]] = NA_STRING; } return y; } tm/src/copy.c0000644000175100001440000000015613572675071012634 0ustar hornikusers#include SEXP _tm_copyCorpus(SEXP x, SEXP y) { copyVector(x, y); return R_NilValue; } tm/src/remove.c0000644000175100001440000000250114323476227013150 0ustar hornikusers#include #include /* #include static int is_ascii_digit(int c) { return(isdigit(c) && isascii(c)); } static int is_ascii_punct(int c) { return(ispunct(c) && isascii(c)); } */ static int is_ascii_digit(int c) { static const char *s = "0123456789"; return strchr(s, c) == NULL ? 0 : 1; } static int is_ascii_punct(int c) { static const char *s = "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~"; return strchr(s, c) == NULL ? 0 : 1; } SEXP _tm_remove_chars(SEXP x, SEXP which) { SEXP y, this; int n, i, w; const char *s; char c, *t, *p; cetype_t e; int (*test) (int) = is_ascii_punct; if(LENGTH(which) > 0) { PROTECT(this = AS_INTEGER(which)); w = INTEGER(this)[0]; if(w == 1) test = is_ascii_digit; UNPROTECT(1); } PROTECT(x = AS_CHARACTER(x)); n = LENGTH(x); PROTECT(y = NEW_CHARACTER(n)); for(i = 0; i < n; i++) { this = STRING_ELT(x, i); if(this == NA_STRING) { SET_STRING_ELT(y, i, NA_STRING); continue; } e = getCharCE(this); s = CHAR(this); t = p = (char *) R_alloc(strlen(s) + 1, sizeof(char)); while((c = *s++) != '\0') { if(!test(c)) *t++ = c; } *t = '\0'; SET_STRING_ELT(y, i, mkCharCE(p, e)); } setAttrib(y, R_NamesSymbol, getAttrib(x, R_NamesSymbol)); UNPROTECT(2); return y; } tm/NAMESPACE0000644000175100001440000002023614573613672012150 0ustar hornikusersuseDynLib("tm", .registration = TRUE) importFrom("NLP", "content", "content<-", "meta", "meta<-", "words", "as.Token_Tokenizer", "is.Span_Tokenizer", "Token_Tokenizer", "TaggedTextDocument") importFrom("Rcpp", "evalCpp") importFrom("graphics", "abline", "plot") importFrom("parallel", "parLapply") importFrom("stats", "coef", "cor", "lm", "setNames") importFrom("utils", "download.file", "getS3method", "unzip") importFrom("slam", "as.simple_triplet_matrix", "col_sums", "crossapply_simple_triplet_matrix", "read_stm_MC", "rollup", "row_sums", "simple_triplet_matrix") importFrom("xml2", "read_xml", "xml_contents", "xml_find_all", "xml_missing", "xml_text") export("as.DocumentTermMatrix", "as.TermDocumentMatrix", "as.VCorpus", "Boost_tokenizer", "content_transformer", "Corpus", "DataframeSource", "DirSource", "Docs", "DocumentTermMatrix", "DublinCore", "DublinCore<-", "eoi", "FunctionGenerator", "getElem", "getMeta", "Heaps_plot", "findAssocs", "findFreqTerms", "findMostFreqTerms", "getReaders", "getSources", "getTokenizers", "getTransformations", "inspect", "MC_tokenizer", "nDocs", "nTerms", "PCorpus", "pGetElem", "PlainTextDocument", "read_dtm_Blei_et_al", "read_dtm_MC", "readDataframe", "readDOC", "reader", "readPlain", "readReut21578XML", "readReut21578XMLasPlain", "readRCV1", "readRCV1asPlain", "readPDF", "readTagged", "readXML", "removeNumbers", "removePunctuation", "removeSparseTerms", "removeWords", "scan_tokenizer", "SimpleCorpus", "SimpleSource", "stemCompletion", "stemDocument", "stepNext", "stopwords", "stripWhitespace", "TermDocumentMatrix", "termFreq", "Terms", "tm_filter", "tm_index", "tm_map", "tm_parLapply", "tm_parLapply_engine", "tm_reduce", "tm_term_score", "URISource", "VCorpus", "VectorSource", "WeightFunction", "weightTf", "weightTfIdf", "weightBin", "weightSMART", "writeCorpus", "XMLSource", "XMLTextDocument", "Zipf_plot", "ZipSource") S3method("removeNumbers", "character") S3method("removePunctuation", "character") S3method("removeWords", "character") S3method("stemDocument", "character") S3method("stripWhitespace", "character") S3method("words", "character") S3method("[", "DataframeSource") S3method("[[", "DataframeSource") S3method("getElem", "DataframeSource") S3method("getMeta", "DataframeSource") S3method("pGetElem", "DataframeSource") S3method("[", "DirSource") S3method("[[", "DirSource") S3method("getElem", "DirSource") S3method("pGetElem", "DirSource") S3method("[", "DocumentTermMatrix") S3method("c", "DocumentTermMatrix") S3method("dimnames<-", "DocumentTermMatrix") S3method("findAssocs", "DocumentTermMatrix") S3method("findMostFreqTerms", "DocumentTermMatrix") S3method("inspect", "DocumentTermMatrix") S3method("plot", "DocumentTermMatrix") S3method("print", "DocumentTermMatrix") S3method("t", "DocumentTermMatrix") S3method("tm_term_score", "DocumentTermMatrix") S3method("as.VCorpus", "list") S3method("tm_term_score", "term_frequency") S3method("[", "PCorpus") S3method("[[", "PCorpus") S3method("[[<-", "PCorpus") S3method("as.list", "PCorpus") S3method("content", "PCorpus") S3method("format", "PCorpus") S3method("inspect", "PCorpus") S3method("length", "PCorpus") S3method("meta", "PCorpus") S3method("meta<-", "PCorpus") S3method("names", "PCorpus") S3method("names<-", "PCorpus") S3method("print", "PCorpus", .print_via_format) S3method("TermDocumentMatrix", "PCorpus") S3method("tm_filter", "PCorpus") S3method("tm_index", "PCorpus") S3method("tm_map", "PCorpus") S3method("as.character", "PlainTextDocument") S3method("content", "PlainTextDocument") S3method("content<-", "PlainTextDocument") S3method("format", "PlainTextDocument") S3method("meta", "PlainTextDocument") S3method("meta<-", "PlainTextDocument") S3method("print", "PlainTextDocument", .print_via_format) S3method("removeNumbers", "PlainTextDocument") S3method("removePunctuation", "PlainTextDocument") S3method("removeWords", "PlainTextDocument") S3method("stemDocument", "PlainTextDocument") S3method("stripWhitespace", "PlainTextDocument") S3method("tm_term_score", "PlainTextDocument") S3method("words", "PlainTextDocument") S3method("[", "SimpleCorpus") S3method("[[", "SimpleCorpus") S3method("[[<-", "SimpleCorpus") S3method("as.list", "SimpleCorpus") S3method("content", "SimpleCorpus") S3method("format", "SimpleCorpus") S3method("inspect", "SimpleCorpus") S3method("length", "SimpleCorpus") S3method("meta", "SimpleCorpus") S3method("meta<-", "SimpleCorpus") S3method("names", "SimpleCorpus") S3method("print", "SimpleCorpus", .print_via_format) S3method("TermDocumentMatrix", "SimpleCorpus") S3method("tm_filter", "SimpleCorpus") S3method("tm_index", "SimpleCorpus") S3method("tm_map", "SimpleCorpus") S3method("close", "SimpleSource") S3method("eoi", "SimpleSource") S3method("length", "SimpleSource") S3method("open", "SimpleSource") S3method("reader", "SimpleSource") S3method("stepNext", "SimpleSource") S3method("c", "TermDocumentMatrix") S3method("[", "TermDocumentMatrix") S3method("dimnames<-", "TermDocumentMatrix") S3method("findAssocs", "TermDocumentMatrix") S3method("findMostFreqTerms", "TermDocumentMatrix") S3method("inspect", "TermDocumentMatrix") S3method("plot", "TermDocumentMatrix") S3method("print", "TermDocumentMatrix") S3method("t", "TermDocumentMatrix") S3method("tm_term_score", "TermDocumentMatrix") S3method("c", "term_frequency") S3method("findMostFreqTerms", "term_frequency") S3method("c", "TextDocument") S3method("inspect", "TextDocument") S3method("print", "TextDocumentMeta") S3method("[", "URISource") S3method("[[", "URISource") S3method("getElem", "URISource") S3method("pGetElem", "URISource") S3method("[", "VCorpus") S3method("[[", "VCorpus") S3method("[[<-", "VCorpus") S3method("as.list", "VCorpus") S3method("as.VCorpus", "VCorpus") S3method("c", "VCorpus") S3method("content", "VCorpus") S3method("format", "VCorpus") S3method("inspect", "VCorpus") S3method("length", "VCorpus") S3method("meta", "VCorpus") S3method("meta<-", "VCorpus") S3method("names", "VCorpus") S3method("names<-", "VCorpus") S3method("print", "VCorpus", .print_via_format) S3method("TermDocumentMatrix", "VCorpus") S3method("tm_filter", "VCorpus") S3method("tm_index", "VCorpus") S3method("tm_map", "VCorpus") S3method("[", "VectorSource") S3method("[[", "VectorSource") S3method("getElem", "VectorSource") S3method("pGetElem", "VectorSource") S3method("getElem", "XMLSource") S3method("as.character", "XMLTextDocument") S3method("content", "XMLTextDocument") S3method("content<-", "XMLTextDocument") S3method("format", "XMLTextDocument") S3method("meta", "XMLTextDocument") S3method("meta<-", "XMLTextDocument") S3method("print", "XMLTextDocument", .print_via_format) S3method("close", "ZipSource") S3method("getElem", "ZipSource") S3method("open", "ZipSource") S3method("pGetElem", "ZipSource") S3method("TermDocumentMatrix", "default") S3method("as.DocumentTermMatrix", "DocumentTermMatrix") S3method("as.DocumentTermMatrix", "TermDocumentMatrix") S3method("as.DocumentTermMatrix", "default") S3method("as.DocumentTermMatrix", "term_frequency") S3method("as.DocumentTermMatrix", "textcnt") S3method("as.TermDocumentMatrix", "TermDocumentMatrix") S3method("as.TermDocumentMatrix", "DocumentTermMatrix") S3method("as.TermDocumentMatrix", "default") S3method("as.TermDocumentMatrix", "term_frequency") S3method("as.TermDocumentMatrix", "textcnt") S3method("Docs", "DocumentTermMatrix") S3method("Docs", "TermDocumentMatrix") S3method("Terms", "DocumentTermMatrix") S3method("Terms", "TermDocumentMatrix") S3method("nDocs", "DocumentTermMatrix") S3method("nDocs", "TermDocumentMatrix") S3method("nTerms", "DocumentTermMatrix") S3method("nTerms", "TermDocumentMatrix") S3method("findAssocs", "matrix") tm/inst/0000755000175100001440000000000014755301616011675 5ustar hornikuserstm/inst/texts/0000755000175100001440000000000012213264557013043 5ustar hornikuserstm/inst/texts/reuters-21578.xml0000644000175100001440000004050412074065307015742 0ustar hornikusers 26-FEB-1987 15:01:01.79 cocoa el-salvadorusauruguay C T f0704reute u f BC-BAHIA-COCOA-REVIEW 02-26 0105 BAHIA COCOA REVIEW SALVADOR, Feb 26 - Showers continued throughout the week in the Bahia cocoa zone, alleviating the drought since early January and improving prospects for the coming temporao, although normal humidity levels have not been restored, Comissaria Smith said in its weekly review. The dry period means the temporao will be late this year. Arrivals for the week ended February 22 were 155,221 bags of 60 kilos making a cumulative total for the season of 5.93 mln against 5.81 at the same stage last year. Again it seems that cocoa delivered earlier on consignment was included in the arrivals figures. Comissaria Smith said there is still some doubt as to how much old crop cocoa is still available as harvesting has practically come to an end. With total Bahia crop estimates around 6.4 mln bags and sales standing at almost 6.2 mln there are a few hundred thousand bags still in the hands of farmers, middlemen, exporters and processors. There are doubts as to how much of this cocoa would be fit for export as shippers are now experiencing dificulties in obtaining +Bahia superior+ certificates. In view of the lower quality over recent weeks farmers have sold a good part of their cocoa held on consignment. Comissaria Smith said spot bean prices rose to 340 to 350 cruzados per arroba of 15 kilos. Bean shippers were reluctant to offer nearby shipment and only limited sales were booked for March shipment at 1,750 to 1,780 dlrs per tonne to ports to be named. New crop sales were also light and all to open ports with June/July going at 1,850 and 1,880 dlrs and at 35 and 45 dlrs under New York july, Aug/Sept at 1,870, 1,875 and 1,880 dlrs per tonne FOB. Routine sales of butter were made. March/April sold at 4,340, 4,345 and 4,350 dlrs. April/May butter went at 2.27 times New York May, June/July at 4,400 and 4,415 dlrs, Aug/Sept at 4,351 to 4,450 dlrs and at 2.27 and 2.28 times New York Sept and Oct/Dec at 4,480 dlrs and 2.27 times New York Dec, Comissaria Smith said. Destinations were the U.S., Covertible currency areas, Uruguay and open ports. Cake sales were registered at 785 to 995 dlrs for March/April, 785 dlrs for May, 753 dlrs for Aug and 0.39 times New York Dec for Oct/Dec. Buyers were the U.S., Argentina, Uruguay and convertible currency areas. Liquor sales were limited with March/April selling at 2,325 and 2,380 dlrs, June/July at 2,375 dlrs and at 1.25 times New York July, Aug/Sept at 2,400 dlrs and at 1.25 times New York Sept and Oct/Dec at 1.25 times New York Dec, Comissaria Smith said. Total Bahia sales are currently estimated at 6.13 mln bags against the 1986/87 crop and 1.06 mln bags against the 1987/88 crop. Final figures for the period to February 28 are expected to be published by the Brazilian Cocoa Trade Commission after carnival which ends midday on February 27. Reuter 26-FEB-1987 15:02:20.00 usa F Y f0708reute d f BC-STANDARD-OIL-<SRD>-TO 02-26 0082 STANDARD OIL <SRD> TO FORM FINANCIAL UNIT CLEVELAND, Feb 26 - Standard Oil Co and BP North America Inc said they plan to form a venture to manage the money market borrowing and investment activities of both companies. BP North America is a subsidiary of British Petroleum Co Plc <BP>, which also owns a 55 pct interest in Standard Oil. The venture will be called BP/Standard Financial Trading and will be operated by Standard Oil under the oversight of a joint management committee. Reuter 26-FEB-1987 15:03:27.51 usa F A f0714reute d f BC-TEXAS-COMMERCE-BANCSH 02-26 0064 TEXAS COMMERCE BANCSHARES <TCB> FILES PLAN HOUSTON, Feb 26 - Texas Commerce Bancshares Inc's Texas Commerce Bank-Houston said it filed an application with the Comptroller of the Currency in an effort to create the largest banking network in Harris County. The bank said the network would link 31 banks having 13.5 billion dlrs in assets and 7.5 billion dlrs in deposits. Reuter 26-FEB-1987 15:07:13.72 usabrazil F f0725 reute u f BC-TALKING-POINT/BANKAME 02-26 0105 TALKING POINT/BANKAMERICA <BAC> EQUITY OFFER by Janie Gabbett, Reuters LOS ANGELES, Feb 26 - BankAmerica Corp is not under pressure to act quickly on its proposed equity offering and would do well to delay it because of the stock's recent poor performance, banking analysts said. Some analysts said they have recommended BankAmerica delay its up to one-billion-dlr equity offering, which has yet to be approved by the Securities and Exchange Commission. BankAmerica stock fell this week, along with other banking issues, on the news that Brazil has suspended interest payments on a large portion of its foreign debt. The stock traded around 12, down 1/8, this afternoon, after falling to 11-1/2 earlier this week on the news. Banking analysts said that with the immediate threat of the First Interstate Bancorp <I> takeover bid gone, BankAmerica is under no pressure to sell the securities into a market that will be nervous on bank stocks in the near term. BankAmerica filed the offer on January 26. It was seen as one of the major factors leading the First Interstate withdrawing its takeover bid on February 9. A BankAmerica spokesman said SEC approval is taking longer than expected and market conditions must now be re-evaluated. "The circumstances at the time will determine what we do," said Arthur Miller, BankAmerica's Vice President for Financial Communications, when asked if BankAmerica would proceed with the offer immediately after it receives SEC approval. "I'd put it off as long as they conceivably could," said Lawrence Cohn, analyst with Merrill Lynch, Pierce, Fenner and Smith. Cohn said the longer BankAmerica waits, the longer they have to show the market an improved financial outlook. Although BankAmerica has yet to specify the types of equities it would offer, most analysts believed a convertible preferred stock would encompass at least part of it. Such an offering at a depressed stock price would mean a lower conversion price and more dilution to BankAmerica stock holders, noted Daniel Williams, analyst with Sutro Group. Several analysts said that while they believe the Brazilian debt problem will continue to hang over the banking industry through the quarter, the initial shock reaction is likely to ease over the coming weeks. Nevertheless, BankAmerica, which holds about 2.70 billion dlrs in Brazilian loans, stands to lose 15-20 mln dlrs if the interest rate is reduced on the debt, and as much as 200 mln dlrs if Brazil pays no interest for a year, said Joseph Arsenio, analyst with Birr, Wilson and Co. He noted, however, that any potential losses would not show up in the current quarter. With other major banks standing to lose even more than BankAmerica if Brazil fails to service its debt, the analysts said they expect the debt will be restructured, similar to way Mexico's debt was, minimizing losses to the creditor banks. Reuter 26-FEB-1987 15:10:44.60 grainwheatcornbarleyoatsorghum usa C G f0738 reute u f BC-average-prices 02-26 0095 NATIONAL AVERAGE PRICES FOR FARMER-OWNED RESERVE WASHINGTON, Feb 26 - The U.S. Agriculture Department reported the farmer-owned reserve national five-day average price through February 25 as follows (Dlrs/Bu-Sorghum Cwt) - Natl Loan Release Call Avge Rate-X Level Price Price Wheat 2.55 2.40 IV 4.65 -- V 4.65 -- VI 4.45 -- Corn 1.35 1.92 IV 3.15 3.15 V 3.25 -- X - 1986 Rates. Natl Loan Release Call Avge Rate-X Level Price Price Oats 1.24 0.99 V 1.65 -- Barley n.a. 1.56 IV 2.55 2.55 V 2.65 -- Sorghum 2.34 3.25-Y IV 5.36 5.36 V 5.54 -- Reserves I, II and III have matured. Level IV reflects grain entered after Oct 6, 1981 for feedgrain and after July 23, 1981 for wheat. Level V wheat/barley after 5/14/82, corn/sorghum after 7/1/82. Level VI covers wheat entered after January 19, 1984. X-1986 rates. Y-dlrs per CWT (100 lbs). n.a.-not available. Reuter 26-FEB-1987 15:14:36.41 veg-oillinseedlin-oilsoy-oilsun-oilsoybeanoilseedcornsunseedgrainsorghumwheat argentina G f0754 reute r f BC-ARGENTINE-1986/87-GRA 02-26 0066 ARGENTINE 1986/87 GRAIN/OILSEED REGISTRATIONS BUENOS AIRES, Feb 26 - Argentine grain board figures show crop registrations of grains, oilseeds and their products to February 11, in thousands of tonnes, showing those for futurE shipments month, 1986/87 total and 1985/86 total to February 12, 1986, in brackets: Bread wheat prev 1,655.8, Feb 872.0, March 164.6, total 2,692.4 (4,161.0). Maize Mar 48.0, total 48.0 (nil). Sorghum nil (nil) Oilseed export registrations were: Sunflowerseed total 15.0 (7.9) Soybean May 20.0, total 20.0 (nil) The board also detailed export registrations for subproducts, as follows, SUBPRODUCTS Wheat prev 39.9, Feb 48.7, March 13.2, Apr 10.0, total 111.8 (82.7) . Linseed prev 34.8, Feb 32.9, Mar 6.8, Apr 6.3, total 80.8 (87.4). Soybean prev 100.9, Feb 45.1, MAr nil, Apr nil, May 20.0, total 166.1 (218.5). Sunflowerseed prev 48.6, Feb 61.5, Mar 25.1, Apr 14.5, total 149.8 (145.3). Vegetable oil registrations were : Sunoil prev 37.4, Feb 107.3, Mar 24.5, Apr 3.2, May nil, Jun 10.0, total 182.4 (117.6). Linoil prev 15.9, Feb 23.6, Mar 20.4, Apr 2.0, total 61.8, (76.1). Soybean oil prev 3.7, Feb 21.1, Mar nil, Apr 2.0, May 9.0, Jun 13.0, Jul 7.0, total 55.8 (33.7). REUTER 26-FEB-1987 15:14:42.83 usa F f0755 reute d f BC-RED-LION-INNS-FILES-P 02-26 0082 RED LION INNS FILES PLANS OFFERING PORTLAND, Ore., Feb 26 - Red Lion Inns Limited Partnership said it filed a registration statement with the Securities and Exchange Commission covering a proposed offering of 4,790,000 units of limited partnership interests. The company said it expects the offering to be priced at 20 dlrs per unit. It said proceeds from the offering, along with a 102.5 mln dlr mortgage loan, will be used to finance its planned acquisition of 10 Red Lion hotels. Reuter 26-FEB-1987 15:15:40.12 usa F A RM f0758 reute u f BC-USX-<X>-DEBT-DOWGRADE 02-26 0103 USX <X> DEBT DOWGRADED BY MOODY'S NEW YORK, Feb 26 - Moody's Investors Service Inc said it lowered the debt and preferred stock ratings of USX Corp and its units. About seven billion dlrs of securities is affected. Moody's said Marathon Oil Co's recent establishment of up to one billion dlrs in production payment facilities on its prolific Yates Field has significant negative implications for USX's unsecured creditors. The company appears to have positioned its steel segment for a return to profit by late 1987, Moody's added. Ratings lowered include those on USX's senior debt to BA-1 from BAA-3. Reuter 26-FEB-1987 15:17:11.20 earn usa F f0762 reute r f BC-CHAMPION-PRODUCTS-<CH 02-26 0067 CHAMPION PRODUCTS <CH> APPROVES STOCK SPLIT ROCHESTER, N.Y., Feb 26 - Champion Products Inc said its board of directors approved a two-for-one stock split of its common shares for shareholders of record as of April 1, 1987. The company also said its board voted to recommend to shareholders at the annual meeting April 23 an increase in the authorized capital stock from five mln to 25 mln shares. Reuter 26-FEB-1987 15:18:06.67 acq usa F f0767 reute d f BC-COMPUTER-TERMINAL-SYS 02-26 0107 COMPUTER TERMINAL SYSTEMS <CPML> COMPLETES SALE COMMACK, N.Y., Feb 26 - Computer Terminal Systems Inc said it has completed the sale of 200,000 shares of its common stock, and warrants to acquire an additional one mln shares, to <Sedio N.V.> of Lugano, Switzerland for 50,000 dlrs. The company said the warrants are exercisable for five years at a purchase price of .125 dlrs per share. Computer Terminal said Sedio also has the right to buy additional shares and increase its total holdings up to 40 pct of the Computer Terminal's outstanding common stock under certain circumstances involving change of control at the company. The company said if the conditions occur the warrants would be exercisable at a price equal to 75 pct of its common stock's market price at the time, not to exceed 1.50 dlrs per share. Computer Terminal also said it sold the technolgy rights to its Dot Matrix impact technology, including any future improvements, to <Woodco Inc> of Houston, Tex. for 200,000 dlrs. But, it said it would continue to be the exclusive worldwide licensee of the technology for Woodco. The company said the moves were part of its reorganization plan and would help pay current operation costs and ensure product delivery. Computer Terminal makes computer generated labels, forms, tags and ticket printers and terminals. Reuter tm/inst/texts/loremipsum.txt0000644000175100001440000000622212074065307015777 0ustar hornikusersLorem ipsum dolor sit amet, consectetur adipiscing elit. Sed at ante. Mauris eleifend, quam a vulputate dictum, massa quam dapibus leo, eget vulputate orci purus ut lorem. In fringilla mi in ligula. Pellentesque aliquam quam vel dolor. Nunc adipiscing. Sed quam odio, tempus ac, aliquam molestie, varius ac, tellus. Vestibulum ut nulla aliquam risus rutrum interdum. Pellentesque lorem. Curabitur sit amet erat quis risus feugiat viverra. Pellentesque augue justo, sagittis et, lacinia at, venenatis non, arcu. Nunc nec libero. In cursus dictum risus. Etiam tristique nisl a nulla. Ut a orci. Curabitur dolor nunc, egestas at, accumsan at, malesuada nec, magna. Nulla facilisi. Nunc volutpat. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Ut sit amet orci vel mauris blandit vehicula. Nullam quis enim. Integer dignissim viverra velit. Curabitur in odio. In hac habitasse platea dictumst. Ut consequat, tellus eu volutpat varius, justo orci elementum dolor, sed imperdiet nulla tellus ut diam. Vestibulum ipsum ante, malesuada quis, tempus ac, placerat sit amet, elit. Sed eget turpis a pede tempor malesuada. Vivamus quis mi at leo pulvinar hendrerit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Pellentesque aliquet lacus vitae pede. Nullam mollis dolor ac nisi. Phasellus sit amet urna. Praesent pellentesque sapien sed lacus. Donec lacinia odio in odio. In sit amet elit. Maecenas gravida interdum urna. Integer pretium, arcu vitae imperdiet facilisis, elit tellus tempor nisi, vel feugiat ante velit sit amet mauris. Vivamus arcu. Integer pharetra magna ac lacus. Aliquam vitae sapien in nibh vehicula auctor. Suspendisse leo mauris, pulvinar sed, tempor et, consequat ac, lacus. Proin velit. Nulla semper lobortis mauris. Duis urna erat, ornare et, imperdiet eu, suscipit sit amet, massa. Nulla nulla nisi, pellentesque at, egestas quis, fringilla eu, diam. Donec semper, sem nec tristique tempus, justo neque commodo nisl, ut gravida sem tellus suscipit nunc. Aliquam erat volutpat. Ut tincidunt pretium elit. Aliquam pulvinar. Nulla cursus. Suspendisse potenti. Etiam condimentum hendrerit felis. Duis iaculis aliquam enim. Donec dignissim augue vitae orci. Curabitur luctus felis a metus. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. In varius neque at enim. Suspendisse massa nulla, viverra in, bibendum vitae, tempor quis, lorem. Donec dapibus orci sit amet elit. Maecenas rutrum ultrices lectus. Aliquam suscipit, lacus a iaculis adipiscing, eros orci pellentesque nisl, non pharetra dolor urna nec dolor. Integer cursus dolor vel magna. Integer ultrices feugiat sem. Proin nec nibh. Duis eu dui quis nunc sagittis lobortis. Fusce pharetra, enim ut sodales luctus, lectus arcu rhoncus purus, in fringilla augue elit vel lacus. In hac habitasse platea dictumst. Aliquam erat volutpat. Fusce iaculis elit id tellus. Ut accumsan malesuada turpis. Suspendisse potenti. Vestibulum lacus augue, lobortis mattis, laoreet in, varius at, nisi. Nunc gravida. Phasellus faucibus. In hac habitasse platea dictumst. Integer tempor lacus eget lectus. Praesent fringilla augue fringilla dui. tm/inst/texts/txt/0000755000175100001440000000000012213264556013661 5ustar hornikuserstm/inst/texts/txt/ovid_2.txt0000644000175100001440000000131612074065306015602 0ustar hornikusers quas Hector sensurus erat, poscente magistro verberibus iussas praebuit ille manus. Aeacidae Chiron, ego sum praeceptor Amoris: saevus uterque puer, natus uterque dea. sed tamen et tauri cervix oneratur aratro, frenaque magnanimi dente teruntur equi; et mihi cedet Amor, quamvis mea vulneret arcu pectora, iactatas excutiatque faces. quo me fixit Amor, quo me violentius ussit, hoc melior facti vulneris ultor ero: non ego, Phoebe, datas a te mihi mentiar artes, nec nos aëriae voce monemur avis, nec mihi sunt visae Clio Cliusque sorores servanti pecudes vallibus, Ascra, tuis: usus opus movet hoc: vati parete perito; tm/inst/texts/txt/ovid_1.txt0000644000175100001440000000126412074065306015603 0ustar hornikusers Si quis in hoc artem populo non novit amandi, hoc legat et lecto carmine doctus amet. arte citae veloque rates remoque moventur, arte leves currus: arte regendus amor. curribus Automedon lentisque erat aptus habenis, Tiphys in Haemonia puppe magister erat: me Venus artificem tenero praefecit Amori; Tiphys et Automedon dicar Amoris ego. ille quidem ferus est et qui mihi saepe repugnet: sed puer est, aetas mollis et apta regi. Phillyrides puerum cithara perfecit Achillem, atque animos placida contudit arte feros. qui totiens socios, totiens exterruit hostes, creditur annosum pertimuisse senem. tm/inst/texts/txt/ovid_3.txt0000644000175100001440000000134412074065306015604 0ustar hornikusers vera canam: coeptis, mater Amoris, ades! este procul, vittae tenues, insigne pudoris, quaeque tegis medios, instita longa, pedes. nos venerem tutam concessaque furta canemus, inque meo nullum carmine crimen erit. principio, quod amare velis, reperire labora, qui nova nunc primum miles in arma venis. proximus huic labor est placitam exorare puellam: tertius, ut longo tempore duret amor. hic modus, haec nostro signabitur area curru: haec erit admissa meta terenda rota. dum licet, et loris passim potes ire solutis, elige cui dicas 'tu mihi sola places.' haec tibi non tenues veniet delapsa per auras: quaerenda est oculis apta puella tuis. tm/inst/texts/txt/ovid_5.txt0000644000175100001440000000131612074065306015605 0ustar hornikusers mater in Aeneae constitit urbe sui. seu caperis primis et adhuc crescentibus annis, ante oculos veniet vera puella tuos: sive cupis iuvenem, iuvenes tibi mille placebunt. cogeris voti nescius esse tui: seu te forte iuvat sera et sapientior aetas, hoc quoque, crede mihi, plenius agmen erit. tu modo Pompeia lentus spatiare sub umbra, cum sol Herculei terga leonis adit: aut ubi muneribus nati sua munera mater addidit, externo marmore dives opus. nec tibi vitetur quae, priscis sparsa tabellis, porticus auctoris Livia nomen habet: quaque parare necem miseris patruelibus ausae Belides et stricto stat ferus ense pater. tm/inst/texts/txt/ovid_4.txt0000644000175100001440000000137412074065306015610 0ustar hornikusers scit bene venator, cervis ubi retia tendat, scit bene, qua frendens valle moretur aper; aucupibus noti frutices; qui sustinet hamos, novit quae multo pisce natentur aquae: tu quoque, materiam longo qui quaeris amori, ante frequens quo sit disce puella loco. non ego quaerentem vento dare vela iubebo, nec tibi, ut invenias, longa terenda via est. Andromedan Perseus nigris portarit ab Indis, raptaque sit Phrygio Graia puella viro, tot tibi tamque dabit formosas Roma puellas, 'Haec habet' ut dicas 'quicquid in orbe fuit.' Gargara quot segetes, quot habet Methymna racemos, aequore quot pisces, fronde teguntur aves, quot caelum stellas, tot habet tua Roma puellas: tm/inst/texts/crude/0000755000175100001440000000000012213264556014144 5ustar hornikuserstm/inst/texts/crude/reut-00012.xml0000644000175100001440000000543412074065306016310 0ustar hornikusers 2-MAR-1987 01:05:49.72 crude saudi-arabia uae opec RM f0600 reute b f BC-SAUDI-FEBRUARY-CRUDE 03-02 0095 SAUDI FEBRUARY CRUDE OUTPUT PUT AT 3.5 MLN BPD ABU DHABI, March 2 - Saudi crude oil output last month fell to an average of 3.5 mln barrels per day (bpd) from 3.8 mln bpd in January, Gulf oil sources said. They said exports from the Ras Tanurah and Ju'aymah terminals in the Gulf fell to an average 1.9 mln bpd last month from 2.2 mln in January because of lower liftings by some customers. But the drop was much smaller than expected after Gulf exports rallied in the fourth week of February to 2.5 mln bpd from 1.2 mln in the third week, the sources said. The production figures include neutral zone output but not sales from floating storage, which are generally considered part of a country's output for Opec purposes. Saudi Arabia has an Opec quota of 4.133 mln bpd under a production restraint scheme approved by the 13-nation group last December to back new official oil prices averaging 18 dlrs a barrel. The sources said the two-fold jump in exports last week appeared to be the result of buyers rushing to lift February entitlements before the month-end. Last week's high export levels appeared to show continued support for official Opec prices from Saudi Arabia's main crude customers, the four ex-partners of Aramco, the sources said. The four -- Exxon Corp <XON>, Mobil Corp <MOB>, Texaco Inc <TX> and Chevron Corp <CHV> -- signed a long-term agreement last month to buy Saudi crude for 17.52 dlrs a barrel. However the sources said the real test of Saudi Arabia's ability to sell crude at official prices in a weak market will come this month, when demand for petroleum products traditionally tapers off. Spot prices have fallen in recent weeks to more than one dlr below Opec levels. Saudi Arabian oil minister Hisham Nazer yesterday reiterated the kingdom's commitment to the December OPEC accord and said it would never sell below official prices. The sources said total Saudi refinery throughput fell slightly in February to an average 1.1 mln bpd from 1.2 mln in January because of cuts at the Yanbu and Jubail export refineries. They put crude oil exports through Yanbu at 100,000 bpd last month, compared to zero in January, while throughput at Bahrain's refinery and neutral zone production remained steady at around 200,000 bpd each. REUTER tm/inst/texts/crude/reut-00014.xml0000644000175100001440000000227212074065306016307 0ustar hornikusers 2-MAR-1987 07:43:22.81 crude saudi-arabia bahrain hisham-nazer opec F f0161 reute r f AM-OIL-SAUDI 03-02 0114 SAUDI ARABIA REITERATES COMMITMENT TO OPEC ACCORD BAHRAIN, March 2 - Saudi Arabian Oil Minister Hisham Nazer reiterated the kingdom's commitment to last December's OPEC accord to boost world oil prices and stabilize the market, the official Saudi Press Agency SPA said. Asked by the agency about the recent fall in free market oil prices, Nazer said Saudi Arabia "is fully adhering by the ... accord and it will never sell its oil at prices below the pronounced prices under any circumstance." Saudi Arabia was a main architect of December pact under which OPEC agreed to cut its total oil output ceiling by 7.25 pct and return to fixed prices of around 18 dollars a barrel. Reuter tm/inst/texts/crude/reut-00022.xml0000644000175100001440000000453612074065306016313 0ustar hornikusers 2-MAR-1987 14:38:34.72 crude usa nymex Y f0753 reute r f BC-NYMEX-WILL-EXPAND-OFF 03-02 0103 NYMEX WILL EXPAND OFF-HOUR TRADING APRIL ONE By BERNICE NAPACH, Reuters NEW YORK, March 2 - The New York Mercantile Exchange set April one for the debut of a new procedure in the energy complex that will increase the use of energy futures worldwide. On April one, NYMEX will allow oil traders that do not hold a futures position to initiate, after the exchange closes, a transaction that can subsequently be hedged in the futures market, according to an exchange spokeswoman. "This will change the way oil is transacted in the real world," said said Thomas McKiernan, McKiernan and Co chairman. Foreign traders will be able to hedge trades against NYMEX prices before the exchange opens and negotiate prices at a differential to NYMEX prices, McKiernan explained. The expanded program "will serve the industry because the oil market does not close when NYMEX does," said Frank Capozza, secretary of Century Resources Inc. The rule change, which has already taken effect for platinum futures on NYMEX, is expected to increase the open interest and liquidity in U.S. energy futures, according to traders and analysts. Currently, at least one trader in this transaction, called an exchange for physical or EFP, must hold a futures position before entering into the transaction. Under the new arrangement, neither party has to hold a futures position before entering into an EFP and one or both parties can offset their cash transaction with a futures contract the next day, according to exchange officials. When NYMEX announced its proposed rule change in December, NYMEX President Rosemary McFadden, said, "Expansion of the EFP provision will add to globalization of the energy markets by providing for, in effect, 24-hour trading." The Commodity Futures Trading Commission approved the rule change in February, according to a CFTC spokeswoman. Reuter tm/inst/texts/crude/reut-00004.xml0000644000175100001440000000151512074065306016305 0ustar hornikusers 26-FEB-1987 18:18:00.84 crude canada Y E f0308 reute u f BC-TEXACO-CANADA-<TXC>-L 02-26 0064 TEXACO CANADA <TXC> LOWERS CRUDE POSTINGS NEW YORK, Feb 26 - Texaco Canada said it lowered the contract price it will pay for crude oil 64 Canadian cts a barrel, effective today. The decrease brings the company's posted price for the benchmark grade, Edmonton/Swann Hills Light Sweet, to 22.26 Canadian dlrs a bbl. Texaco Canada last changed its crude oil postings on Feb 19. Reuter tm/inst/texts/crude/reut-00023.xml0000644000175100001440000000156512074065306016313 0ustar hornikusers 2-MAR-1987 14:49:06.33 crude nat-gas argentina Y f0783 reute u f BC-ARGENTINE-OIL-PRODUCT 03-02 0071 ARGENTINE OIL PRODUCTION DOWN IN JANUARY 1987 BUENOS AIRES, March 2 - Argentine crude oil production was down 10.8 pct in January 1987 to 12.32 mln barrels, from 13.81 mln barrels in January 1986, Yacimientos Petroliferos Fiscales said. January 1987 natural gas output totalled 1.15 billion cubic metrers, 3.6 pct higher than 1.11 billion cubic metres produced in January 1986, Yacimientos Petroliferos Fiscales added. Reuter tm/inst/texts/crude/reut-00019.xml0000644000175100001440000000320212074065306016306 0ustar hornikusers 2-MAR-1987 11:28:26.03 crude usa Y f0976 reute d f BC-STUDY-GROUP-URGES-INC 03-02 0099 STUDY GROUP URGES INCREASED U.S. OIL RESERVES WASHINGTON, March 2 - A study group said the United States should increase its strategic petroleum reserve to one mln barrels as one way to deal with the present and future impact of low oil prices on the domestic oil industry. U.S. policy now is to raise the strategic reserve to 750 mln barrels, from its present 500 mln, to help protect the economy from an overseas embargo or a sharp price rise. The Aspen Institute for Humanistic Studies, a private group, also called for new research for oil exploration and development techniques. It predicted prices would remain at about 15-18 dlrs a barrel for several years and then rise to the mid 20s, with imports at about 30 pct of U.S. consumption. The study cited two basic policy paths for the nation: to protect the U.S. industry through an import fee or other such device or to accept the full economic benefits of cheap oil. But the group did not strongly back either option, saying there were benefits and drawbacks to both. It said instead that such moves as increasing oil reserves and more exploration and development research would help to guard against or mitigate the risks of increased imports. Reuter tm/inst/texts/crude/reut-00010.xml0000644000175100001440000000511712074065306016304 0ustar hornikusers 1-MAR-1987 08:22:30.94 crude qatar RM f0413 reute u f BC-QATAR-UNVEILS-BUDGET 03-01 0111 QATAR UNVEILS BUDGET FOR FISCAL 1987/88 DOHA, March 1 - The Gulf oil state of Qatar, recovering slightly from last year's decline in world oil prices, announced its first budget since early 1985 and projected a deficit of 5.472 billion riyals. The deficit compared with a shortfall of 7.3 billion riyals in the last published budget for 1985/86. In a statement outlining the budget for the fiscal year 1987/88 beginning today, Finance and Petroleum Minister Sheikh Abdul-Aziz bin Khalifa al-Thani said the government expected to spend 12.217 billion riyals in the period. Projected expenditure in the 1985/86 budget had been 15.6 billion riyals. Sheikh Abdul-Aziz said government revenue would be about 6.745 billion riyals, down by about 30 pct on the 1985/86 projected revenue of 9.7 billion. The government failed to publish a 1986/87 budget due to uncertainty surrounding oil revenues. Sheikh Abdul-Aziz said that during that year the government decided to limit recurrent expenditure each month to one-twelfth of the previous fiscal year's allocations minus 15 pct. He urged heads of government departments and public institutions to help the government rationalise expenditure. He did not say how the 1987/88 budget shortfall would be covered. Sheikh Abdul-Aziz said plans to limit expenditure in 1986/87 had been taken in order to relieve the burden placed on the country's foreign reserves. He added in 1987/88 some 2.766 billion riyals had been allocated for major projects including housing and public buildings, social services, health, education, transport and communications, electricity and water, industry and agriculture. No figure was revealed for expenditure on defence and security. There was also no projection for oil revenue. Qatar, an OPEC member, has an output ceiling of 285,000 barrels per day. Sheikh Abdul-Aziz said: "Our expectations of positive signs regarding (oil) price trends, foremost among them OPEC's determination to shoulder its responsibilites and protect its wealth, have helped us make reasonable estimates for the coming year's revenue on the basis of our assigned quota." REUTER tm/inst/texts/crude/reut-00016.xml0000644000175100001440000000216612074065306016313 0ustar hornikusers 2-MAR-1987 08:25:42.14 crude ship usa Y F f0300 reute u f BC-PHILADELPHIA-PORT-CLO 03-02 0115 PHILADELPHIA PORT CLOSED BY TANKER CRASH PHILADELPHIA, March 2 - The port of Philadelphia was closed when a Cypriot oil tanker, Seapride II, ran aground after hitting a 200-foot tower supporting power lines across the river, a Coast Guard spokesman said. He said there was no oil spill but the ship is lodged on rocks opposite the Hope Creek nuclear power plant in New Jersey. He said the port would be closed until today when they hoped to refloat the ship on the high tide. After delivering oil to a refinery in Paulsboro, New Jersey, the ship apparently lost its steering and hit the power transmission line carrying power from the nuclear plant to the state of Delaware. Reuter tm/inst/texts/crude/reut-00008.xml0000644000175100001440000000654412074065306016320 0ustar hornikusers 1-MAR-1987 03:39:14.63 crude indonesia usa worldbank RM f0379 reute u f BC-INDONESIA-SEEN-AT-CRO 03-01 0107 INDONESIA SEEN AT CROSSROADS OVER ECONOMIC CHANGE By Jeremy Clift, Reuters JAKARTA, March 1 - Indonesia appears to be nearing a political crossroads over measures to deregulate its protected economy, the U.S. Embassy says in a new report. To counter falling oil revenues, the government has launched a series of measures over the past nine months to boost exports outside the oil sector and attract new investment. Indonesia, the only Asian member of OPEC and a leading primary commodity producer, has been severely hit by last year"s fall in world oil prices, which forced it to devalue its currency by 31 pct in September. But the U.S. Embassy report says President Suharto"s government appears to be divided over what direction to lead the economy. "(It) appears to be nearing a crossroads with regard to deregulation, both as it pertains to investments and imports," the report says. It primarily assesses Indonesia"s agricultural sector, but also reviews the country"s general economic performance. It says that while many government officials and advisers are recommending further relaxation, "there are equally strong pressures being exerted to halt all such moves." "This group strongly favours an import substitution economy," the report says. Indonesia"s economic changes have been welcomed by the World Bank and international bankers as steps in the right direction, though they say crucial areas of the economy like plastics and steel remain highly protected, and virtual monopolies. Three sets of measures have been announced since last May, which broadened areas for foreign investment, reduced trade restrictions and liberalised imports. The report says Indonesia"s economic growth in calendar 1986 was probably about zero, and the economy may even have contracted a bit. "This is the lowest rate of growth since the mid-1960s," the report notes. Indonesia, the largest country in South-East Asia with a population of 168 million, is facing general elections in April. But the report hold out little hope for swift improvement in the economic outlook. "For 1987 early indications point to a slightly positive growth rate not exceeding one pct. Economic activity continues to suffer due to the sharp fall in export earnings from the petroleum industry." "Growth in the non-oil sector is low because of weak domestic demand coupled with excessive plant capacity, real declines in construction and trade, and a reduced level of growth in agriculture," the report states. Bankers say continuation of present economic reforms is crucial for the government to get the international lending its needs. A new World Bank loan of 300 mln dlrs last month in balance of payments support was given partly to help the government maintain the momentum of reform, the Bank said. REUTER tm/inst/texts/crude/reut-00009.xml0000644000175100001440000000270012074065306016307 0ustar hornikusers 1-MAR-1987 05:27:27.17 crude bahrain saudi-arabia opec RM f0401 reute u f BC-SAUDI-RIYAL-DEPOSIT-R 03-01 0108 SAUDI RIYAL DEPOSIT RATES REMAIN FIRM BAHRAIN, March 1 - Saudi riyal interbank deposits were steady at yesterday's higher levels in a quiet market. Traders said they were reluctant to take out new positions amidst uncertainty over whether OPEC will succeed in halting the current decline in oil prices. Oil industry sources said yesterday several Gulf Arab producers had had difficulty selling oil at official OPEC prices but Kuwait has said there are no plans for an emergency meeting of the 13-member organisation. A traditional Sunday lull in trading due to the European weekend also contributed to the lack of market activity. Spot-next and one-week rates were put at 6-1/4, 5-3/4 pct after quotes ranging between seven, six yesterday. One, three, and six-month deposits were quoted unchanged at 6-5/8, 3/8, 7-1/8, 6-7/8 and 7-3/8, 1/8 pct respectively. The spot riyal was quietly firmer at 3.7495/98 to the dollar after quotes of 3.7500/03 yesterday. REUTER tm/inst/texts/crude/reut-00007.xml0000644000175100001440000000647212074065306016317 0ustar hornikusers 1-MAR-1987 03:25:46.85 crude kuwait ecuador opec RM f0374 reute b f BC-KUWAIT-SAYS-NO-PLANS 03-01 0091 KUWAIT SAYS NO PLANS FOR EMERGENCY OPEC TALKS KUWAIT, March 1 - Kuwait"s Oil Minister, in remarks published today, said there were no plans for an emergency OPEC meeting to review oil policies after recent weakness in world oil prices. Sheikh Ali al-Khalifa al-Sabah was quoted by the local daily al-Qabas as saying: "None of the OPEC members has asked for such a meeting." He denied Kuwait was pumping above its quota of 948,000 barrels of crude daily (bpd) set under self-imposed production limits of the 13-nation organisation. Traders and analysts in international oil markets estimate OPEC is producing up to one mln bpd above a ceiling of 15.8 mln bpd agreed in Geneva last December. They named Kuwait and the United Arab Emirates, along with the much smaller producer Ecuador, among those producing above quota. Kuwait, they said, was pumping 1.2 mln bpd. "This rumour is baseless. It is based on reports which said Kuwait has the ability to exceed its share. They suppose that because Kuwait has the ability, it will do so," the minister said. Sheikh Ali has said before that Kuwait had the ability to produce up to 4.0 mln bpd. "If we can sell more than our quota at official prices, while some countries are suffering difficulties marketing their share, it means we in Kuwait are unusually clever," he said. He was referring apparently to the Gulf state of qatar, which industry sources said was selling less than 180,000 bpd of its 285,000 bpd quota, because buyers were resisting official prices restored by OPEC last month pegged to a marker of 18 dlrs per barrel. Prices in New York last week dropped to their lowest levels this year and almost three dollars below a three-month high of 19 dollars a barrel. Sheikh Ali also delivered "a challenge to any international oil company that declared Kuwait sold below official prices." Because it was charging its official price, of 16.67 dlrs a barrel, it had lost custom, he said but did not elaborate. However, Kuwait had guaranteed markets for its oil because of its local and international refining facilities and its own distribution network abroad, he added. He reaffirmed that the planned meeting March 7 of OPEC"s differentials committee has been postponed until the start of April at the request of certain of the body"s members. Ecuador"s deputy energy minister Fernando Santos Alvite said last Wednesday his debt-burdened country wanted OPEC to assign a lower official price for its crude, and was to seek this at talks this month of opec"s pricing committee. Referring to pressure by oil companies on OPEC members, in apparent reference to difficulties faced by Qatar, he said: "We expected such pressure. It will continue through March and April." But he expected the situation would later improve. REUTER tm/inst/texts/crude/reut-00001.xml0000644000175100001440000000200512074065306016275 0ustar hornikusers 26-FEB-1987 17:00:56.04 crude usa Y f0119 reute u f BC-DIAMOND-SHAMROCK-(DIA 02-26 0097 DIAMOND SHAMROCK (DIA) CUTS CRUDE PRICES NEW YORK, FEB 26 - Diamond Shamrock Corp said that effective today it had cut its contract prices for crude oil by 1.50 dlrs a barrel. The reduction brings its posted price for West Texas Intermediate to 16.00 dlrs a barrel, the copany said. "The price reduction today was made in the light of falling oil product prices and a weak crude oil market," a company spokeswoman said. Diamond is the latest in a line of U.S. oil companies that have cut its contract, or posted, prices over the last two days citing weak oil markets. Reuter tm/inst/texts/crude/reut-00018.xml0000644000175100001440000000254012074065306016311 0ustar hornikusers 2-MAR-1987 11:20:05.52 crude usa C f0937 reute d f BC-STUDY-GROUP-URGES-INC 03-02 0156 STUDY GROUP URGES INCREASED U.S. OIL RESERVES WASHINGTON, March 2 - A study group said the United States should increase its strategic petroleum reserve to one mln barrels as one way to deal with the present and future impact of low oil prices on the domestic oil industry. U.S. policy now is to raise the strategic reserve to 750 mln barrels, from its present 500 mln, to help protect the economy from an overseas embargo or a sharp price rise. The Aspen Institute for Humanistic Studies, a private group, also called for new research for oil exploration and development techniques. It predicted prices would remain at about 15-18 dlrs a barrel for several years and then rise to the mid 20s, with imports at about 30 pct of U.S. consumption. It said instead that such moves as increasing oil reserves and more exploration and development research would help to guard against or mitigate the risks of increased imports. Reuter tm/inst/texts/crude/reut-00002.xml0000644000175100001440000000634512074065306016311 0ustar hornikusers 26-FEB-1987 17:34:11.89 crude usa opec Y f0189 reute r f BC-/OPEC-MAY-HAVE-TO-MEE 02-26 0105 OPEC MAY HAVE TO MEET TO FIRM PRICES - ANALYSTS BY TED D'AFFLISIO, Reuters NEW YORK, Feb 26 - OPEC may be forced to meet before a scheduled June session to readdress its production cutting agreement if the organization wants to halt the current slide in oil prices, oil industry analysts said. "The movement to higher oil prices was never to be as easy as OPEC thought. They may need an emergency meeting to sort out the problems," said Daniel Yergin, director of Cambridge Energy Research Associates, CERA. Analysts and oil industry sources said the problem OPEC faces is excess oil supply in world oil markets. "OPEC's problem is not a price problem but a production issue and must be addressed in that way," said Paul Mlotok, oil analyst with Salomon Brothers Inc. He said the market's earlier optimism about OPEC and its ability to keep production under control have given way to a pessimistic outlook that the organization must address soon if it wishes to regain the initiative in oil prices. But some other analysts were uncertain that even an emergency meeting would address the problem of OPEC production above the 15.8 mln bpd quota set last December. "OPEC has to learn that in a buyers market you cannot have deemed quotas, fixed prices and set differentials," said the regional manager for one of the major oil companies who spoke on condition that he not be named. "The market is now trying to teach them that lesson again," he added. David T. Mizrahi, editor of Mideast reports, expects OPEC to meet before June, although not immediately. However, he is not optimistic that OPEC can address its principal problems. "They will not meet now as they try to take advantage of the winter demand to sell their oil, but in late March and April when demand slackens," Mizrahi said. But Mizrahi said that OPEC is unlikely to do anything more than reiterate its agreement to keep output at 15.8 mln bpd." Analysts said that the next two months will be critical for OPEC's ability to hold together prices and output. "OPEC must hold to its pact for the next six to eight weeks since buyers will come back into the market then," said Dillard Spriggs of Petroleum Analysis Ltd in New York. But Bijan Moussavar-Rahmani of Harvard University's Energy and Environment Policy Center said that the demand for OPEC oil has been rising through the first quarter and this may have prompted excesses in its production. "Demand for their (OPEC) oil is clearly above 15.8 mln bpd and is probably closer to 17 mln bpd or higher now so what we are seeing characterized as cheating is OPEC meeting this demand through current production," he told Reuters in a telephone interview. Reuter tm/inst/texts/crude/reut-00005.xml0000644000175100001440000000157412074065306016313 0ustar hornikusers 26-FEB-1987 18:21:01.50 crude usa Y f0313 reute u f BC-MARATHON-PETROLEUM-RE 02-26 0075 MARATHON PETROLEUM REDUCES CRUDE POSTINGS NEW YORK, Feb 26 - Marathon Petroleum Co said it reduced the contract price it will pay for all grades of crude oil one dlr a barrel, effective today. The decrease brings Marathon's posted price for both West Texas Intermediate and West Texas Sour to 16.50 dlrs a bbl. The South Louisiana Sweet grade of crude was reduced to 16.85 dlrs a bbl. The company last changed its crude postings on Jan 12. Reuter tm/inst/texts/crude/reut-00015.xml0000644000175100001440000000213712074065306016310 0ustar hornikusers 2-MAR-1987 07:43:41.57 crude kuwait opec V f0163 reute r f BC-OIL-KUWAIT 03-02 0109 KUWAIT MINISTER SAYS NO EMERGENCY OPEC TALKS SET KUWAIT, March 2 - Kuwait's oil minister said in a newspaper interview that there were no plans for an emergency OPEC meeting after the recent weakness in world oil prices. Sheikh Ali al-Khalifa al-Sabah was quoted by the local daily al-Qabas as saying that "none of the OPEC members has asked for such a meeting." He also denied that Kuwait was pumping above its OPEC quota of 948,000 barrels of crude daily (bpd). Crude oil prices fell sharply last week as international oil traders and analysts estimated the 13-nation OPEC was pumping up to one million bpd over its self-imposed limits. Reuter tm/inst/texts/crude/reut-00006.xml0000644000175100001440000000204712074065306016310 0ustar hornikusers 26-FEB-1987 19:00:57.33 crude usa F Y f0379 reute d f BC-HOUSTON-OIL-<HO>-RESE 02-26 0101 HOUSTON OIL <HO> RESERVES STUDY COMPLETED HOUSTON, Feb 26 - Houston Oil Trust said that independent petroleum engineers completed an annual study that estimates the trust's future net revenues from total proved reserves at 88 mln dlrs and its discounted present value of the reserves at 64 mln dlrs. Based on the estimate, the trust said there may be no money available for cash distributions to unitholders for the remainder of the year. It said the estimates reflect a decrease of about 44 pct in net reserve revenues and 39 pct in discounted present value compared with the study made in 1985. Reuter tm/inst/texts/crude/reut-00013.xml0000644000175100001440000000225012074065306016302 0ustar hornikusers 2-MAR-1987 07:39:23.30 crude uae bahrain saudi-arabia kuwait qatar opec V f0149 reute r f BC-GULF-ARAB-DEPUTY-OIL 03-02 0110 GULF ARAB DEPUTY OIL MINISTERS TO MEET IN BAHRAIN ABU DHABI, March 2 - Deputy oil ministers from six Gulf Arab states will meet in Bahrain today to discuss coordination of crude oil marketing, the official Emirates news agency WAM reported. WAM said the officials would be discussing implementation of last Sunday's agreement in Doha by Gulf Cooperation Council (GCC) oil ministers to help each other market their crude oil. Four of the GCC states - Saudi Arabia, the United Arab Emirates (UAE), Kuwait and Qatar - are members of the Organiaation of Petroleum Exporting Countries (OPEC) and some face stiff buyer resistance to official OPEC prices. Reuter tm/inst/texts/crude/reut-00021.xml0000644000175100001440000000173512074065306016310 0ustar hornikusers 2-MAR-1987 12:13:46.82 crude usa Y F f0206 reute r f BC-UNOCAL-<UCL>-UNIT-CUT 03-02 0088 UNOCAL <UCL> UNIT CUTS CRUDE OIL POSTED PRICES LOS ANGELES, March 2 - Unocal Corp's Union Oil Co said it lowered its posted prices for crude oil one to 1.50 dlrs a barrel in the eastern region of the U.S., effective Feb 26. Union said a 1.50 dlrs cut brings its posted price for the U.S. benchmark grade, West Texas Intermediate, to 16 dlrs. Louisiana Sweet also was lowered 1.50 dlrs to 16.35 dlrs, the company said. No changes were made in Union's posted prices for West Coast grades of crude oil, the company said. Reuter tm/inst/texts/crude/reut-00011.xml0000644000175100001440000000521412074065306016303 0ustar hornikusers 1-MAR-1987 18:31:44.74 crude bahrain saudi-arabia hisham-nazer opec RM f0427 reute b f BC-SAUDI-ARABIA-REITERAT 03-01 0084 SAUDI ARABIA REITERATES COMMITMENT TO OPEC PACT BAHRAIN, March 1 - Saudi Arabian Oil Minister Hisham Nazer reiterated the kingdom's commitment to last December's OPEC accord to boost world oil prices and stabilise the market, the official Saudi Press Agency SPA said. Asked by the agency about the recent fall in free market oil prices, Nazer said Saudi Arabia "is fully adhering by the ... Accord and it will never sell its oil at prices below the pronounced prices under any circumstance." Nazer, quoted by SPA, said recent pressure on free market prices "may be because of the end of the (northern hemisphere) winter season and the glut in the market." Saudi Arabia was a main architect of the December accord, under which OPEC agreed to lower its total output ceiling by 7.25 pct to 15.8 mln barrels per day (bpd) and return to fixed prices of around 18 dlrs a barrel. The agreement followed a year of turmoil on oil markets, which saw prices slump briefly to under 10 dlrs a barrel in mid-1986 from about 30 dlrs in late 1985. Free market prices are currently just over 16 dlrs. Nazer was quoted by the SPA as saying Saudi Arabia's adherence to the accord was shown clearly in the oil market. He said contacts among members of OPEC showed they all wanted to stick to the accord. In Jamaica, OPEC President Rilwanu Lukman, who is also Nigerian Oil Minister, said the group planned to stick with the pricing agreement. "We are aware of the negative forces trying to manipulate the operations of the market, but we are satisfied that the fundamentals exist for stable market conditions," he said. Kuwait's Oil Minister, Sheikh Ali al-Khalifa al-Sabah, said in remarks published in the emirate's daily Al-Qabas there were no plans for an emergency OPEC meeting to review prices. Traders and analysts in international oil markets estimate OPEC is producing up to one mln bpd above the 15.8 mln ceiling. They named Kuwait and the United Arab Emirates, along with the much smaller producer Ecuador, among those producing above quota. Sheikh Ali denied that Kuwait was over-producing. REUTER tm/inst/texts/custom.xml0000644000175100001440000000070512074065307015076 0ustar hornikusers Ano Nymous The Invisible Man A story about an invisible man. Science fiction Sokrates Scio Nescio I know that I know nothing. Classics tm/inst/texts/acq/0000755000175100001440000000000012213264556013606 5ustar hornikuserstm/inst/texts/acq/reut-00053.xml0000644000175100001440000000667512074065306015767 0ustar hornikusers 2-MAR-1987 11:23:31.27 acq usa F f0955 reute u f BC-VIACOM 03-02 0104 REDSTONE DETAILS SWEETENED VIACOM <VIA> OFFER WASHINGTON, March 2 - Investor Sumner Redstone, who leads one of the two groups vying for control of Viacom International Inc, offered to sweeten his bid for the company by 1.50 dlrs a share cash and 1.50 dlrs in securities. In a filing with the Securities and Exchange Commission, Redstone, who controls Dedham, Mass.,-based National Amusements Inc, a theater chain operator, offered to raise the cash portion of its Viacom offer to 42 dlrs a share from 40.50 dlrs. Redstone also raised the face value of the preferred stock he is offering to 7.50 dlrs from six dlrs. The Redstone offer, which is being made through Arsenal Holdings Inc, a National Amusements subsidiary set up for that purpose, which also give Viacom shareholders one-fifth of a share of Arsenal common stock after the takeover. Viacom said earlier today it received revised takeover bids from Redstone and MCV Holdings Inc, a group led by Viacom management which is competing with Redstone for control of the company and already has a formal merger agreement with Viacom. The company did not disclose the details of the revised offers, but said a special committee of its board would review them later today. The Redstone group, which has a 19.5 pct stake in Viacom, and the management group, which has a 5.4 pct stake, have both agreed not to buy more shares of the company until a merger is completed, unless the purchases are part of a tender offer for at least half of the outstanding stock. The two rivals also signed confidentiality agreements, which give them access to Viacom's financial records provided they keep the information secret. In his SEC filing, Redstone, who estimated his cost of completing the takeover at 2.95 billion dlrs, said Bank of America is confident it can raise 2.275 billion dlrs. Besides the financing it would raise through a bank syndicate, Bank of America has also agreed to provide a separate 25 mln dlr for the limited purpose of partial financing and has committed to provide another 592 mln dlrs, Redstone said. Merrill Lynch, Pierce Fenner and Smith Inc has increased its underwriting commitment to 175 mln dlrs of subordinated financing debt for the Viacom takeover, from the 150 mln dlrs it agreed to underwrite earlier, Redstone said. Redstone said his group would contribute more than 475 mln dlrs in equity toward the takeover. The Redstone equity contribution to the takeover would consist of all of his group's 6,881,800 Viacom common shares and at least 118 mln dlrs cash, he said. The new offer, the second sweetened deal Redstone has proposed in his month-long bidding war with management, also contains newly drawn up proposed merger documents, he said. Last week, the management group submitted what it called its last offer for the company, valued at 3.1 mln dlrs and consisting of 38.50 dlrs a share cash, preferred stock valued at eight dlrs a share and equity in the new company. Redstone's previous offer had been valued at 3.2 billion dlrs. Reuter tm/inst/texts/acq/reut-00056.xml0000644000175100001440000000313212074065306015753 0ustar hornikusers 2-MAR-1987 11:29:26.84 acq usa F f0981 reute r f BC-CARBIDE-<UK>-LOOKS-TO 03-02 0095 CARBIDE <UK> LOOKS TO ACQUISITIONS FOR GROWTH NEW YORK, March 2 - Union Carbide Corp is looking to acquisitions and joint ventures to aid its chemicals and plastics growth, according the H.W. Lichtenberger, president of Chemicals and Plastics. Describing this as a major departure in the company's approach to commercial development, he told the annual new business forum of the Commercial Development Association "We are looking to acquisitions and joint ventures when they look like the fastest and most promising routes to the growth markets we've identified." Not very long ago Union Carbide had the attitude "that if we couldn't do it ourselves, it wasn't worth doing. Or, if it was worth doing, we had to go it alone," Lichtenberger explained. He said "there are times when exploiting a profitable market is done best with a partner. Nor do we see any need to plow resources into a technology we may not have if we can link up profitably with someone who is already there." He said Carbide has extended its catalyst business that way and is now extending its specialty chemicals business in the same way. Reuter tm/inst/texts/acq/reut-00027.xml0000644000175100001440000000255312074065306015757 0ustar hornikusers 2-MAR-1987 08:22:40.30 acq usa F f0290 reute r f BC-ROPAK-<ROPK>-HAS-34-P 03-02 0109 ROPAK <ROPK> HAS 34 PCT OF BUCKHORN <BKN> FULLERTON, Calif., March 2 - Ropak Corp said it received and accepted about 456,968 common shares and 527,035 Series A convertible preferred shares of Buckhorn Inc at four dlrs and 5.75 dlrs each respectively in response to its tender offer that expired Friday, and it now owns 34.4 pct of Buckhorn voting power. The company had owned 63,000 common and 25,100 preferred shares before starting the hostile tender. Ropak said it is borrowing the funds needed to buy the Buckhorn shares from its bank lender and will not need to use any funds that another bank had committed to provide under a margin loan. Ropak said it waived minimum acceptance requirements to buy the shares and intends to evaluate a number of possible ways of completing an acquisition of Buckhorn. It said it hopes that Buckhorn's board will reevaluate its position and enter into meaningful negotiations. Reuter tm/inst/texts/acq/reut-00042.xml0000644000175100001440000000217712074065306015756 0ustar hornikusers 2-MAR-1987 09:49:48.14 acq usa F f0554 reute u f BC-DIAGNOSTIC-<DRS>-MAKE 03-02 0115 DIAGNOSTIC <DRS> MAKES A BID FOR ROSPATCH <RPCH> OAKLAND , N.J., March 2 - Diagnostic Retrieval Systems Inc said it has made an offer to acquire, through a wholly owned unit, all outstanding shares of Rospatch Corp's common stock for 22 dlrs a share cash, or about 53 mln dlrs. DRS, a warfare systems producer, said it would make the transaction through a cash tender offer for all, but not less than 51 pct, of Rospatch's outstanding common stock followed by a merger with Rospatch, a labels, high technology and wood producer, at the same purchase price per share. DRS said the deal is subject to approval by the Rospatch board, and the tender offer expires on March 6, 1986. Reuter tm/inst/texts/acq/reut-00012.xml0000644000175100001440000000211212074065306015740 0ustar hornikusers 26-FEB-1987 17:36:22.14 acq usa F f0204 reute r f BC-EPSILON-DATA 02-26 0110 DREXEL OFFICIAL HAS STAKE IN EPSILON DATA <EPSI> WASHINGTON, Feb 26 - A senior official of Drexel Burnham Lambert Inc and his father told the Securities and Exchange Commission they have acquired 258,591 shares of Epsilon Data Management Inc, or 9.4 pct of the total outstanding. Kenneth Thomas, senior vice president-investments at Drexel's Los Angeles office, and his father, retired university professor C.A. Thomas, said they bought the stake for 2.1 mln dlrs primarily for investment purposes. They said they may buy more stock or sell some or all of their stake, depending on market conditions, but have no plans to seek control of the company. Reuter tm/inst/texts/acq/reut-00055.xml0000644000175100001440000000152312074065306015754 0ustar hornikusers 2-MAR-1987 11:24:06.09 acq usa F f0958 reute r f BC-UTILICORP-<UCU>-COMPL 03-02 0066 UTILICORP <UCU> COMPLETES ACQUISITION KANSAS CITY, March 2 - UtiliCorp United Inc said it completed the acquisition of West Virginia Power from Dominion Resources for about 21 mln dlrs. The sale was approved by the West Virginia Public Service Commission in January and became effective March one. West Virginia's management will continue to be responsible for operating the utility, it said. Reuter tm/inst/texts/acq/reut-00014.xml0000644000175100001440000000136312074065306015751 0ustar hornikusers 26-FEB-1987 17:43:59.12 acq usa F f0235 reute h f BC-SUFFIELD-FINANCIAL-<S 02-26 0050 SUFFIELD FINANCIAL <SSBK> GETS FED APPROVAL SUFFIELD, Conn., Feb 26 - Suffield Financial Corp said the Federal Reserve Board approved its application to acquire Coastal Bancorp <CSBK>, Portland, Me. Suffield said it still needs the approval of the superintendent of Maine's banking department. Reuter tm/inst/texts/acq/reut-00022.xml0000644000175100001440000000166512074065306015755 0ustar hornikusers 2-MAR-1987 05:48:46.98 acq usa uk F f0923 reute u f BC-SALE-TILNEY-BUYS-STAK 03-02 0083 SALE TILNEY BUYS STAKE IN U.S. INSURANCE BROKER LONDON, March 2 - <Sale Tilney Plc> said it has purchased 80 pct of the ordinary share capital of <B and R International Inc.>, a U.S. Insurance broker, for 5.6 mln dlrs. Sale is paying 3.6 mln dlrs in cash on completion, with the balance plus interest to be paid in equal instalments over the next six years. B and R posted pretax profit of 855,000 dlrs in the year to Dec 31, 1986 when it had net tangible assets of 563,000 dlrs. REUTER tm/inst/texts/acq/reut-00051.xml0000644000175100001440000000203412074065306015746 0ustar hornikusers 2-MAR-1987 10:59:28.36 acq usa sweden F f0833 reute r f BC-ESSELTE-BUSINESS-<ESB 03-02 0097 ESSELTE BUSINESS <ESB> UNIT BUYS ANTONSON UNIT GARDEN CITY, N.Y., March 2 - Esselte Business Systems Inc's Esselte Meto division said it has acquired the Antonson America Co, a subsidiary of <Antonson Machines AB>, of Sweden. Esselte said the Antonson unit, based in LaPorte, Indiana, manufactures scales and label printers. The company said the purchase is part of a plan to increase the range of retail electronic scales being offered by Esselte in the U.S. It said the acquisition will enble Esselte to increase its distribution base in its effort to grow in the U.S. Reuter tm/inst/texts/acq/reut-00004.xml0000644000175100001440000000562412074065306015754 0ustar hornikusers 26-FEB-1987 15:51:17.84 acq usa F f0881 reute u f BC-CHEMLAWN-<CHEM>-RISES 02-26 0106 CHEMLAWN <CHEM> RISES ON HOPES FOR HIGHER BIDS By Cal Mankowski, Reuters NEW YORK, Feb 26 - ChemLawn Corp <CHEM> could attract a higher bid than the 27 dlrs per share offered by Waste Management Inc <WNX>, Wall Street arbitrageurs said. Shares of ChemLawn shot up 11-5/8 to 29-3/8 in over-the-counter- trading with 3.8 mln of the company's 10.1 mln shares changing hands by late afternoon. "This company could go for 10 times cash flow or 30 dlrs, maybe 32 dollars depending on whether there is a competing bidder," an arbitrageur said. Waste Management's tender offer, announced before the opening today, expires March 25. "This is totally by surprise," said Debra Strohmaier, a ChemLawn spokeswoman. The company's board held a regularly scheduled meeting today and was discussing the Waste Management announcement. She said a statement was expected but it was not certain when it would be ready. She was unable to say if there had been any prior contact between Waste Management and ChemLawn officials. "I think they will resist it," said Elliott Schlang, analyst at Prescott, Ball and Turben Inc. "Any company that doesn't like a surprise attack would." Arbitrageurs pointed out it is difficult to resist tender offers for any and all shares for cash. Schlang said ChemLawn could try to find a white knight if does not want to be acquired by Waste Management. Analyst Rosemarie Morbelli of Ingalls and Snyder said ServiceMaster Companies L.P. <SVM> or Rollins Inc <ROL> were examples of companies that could be interested. ChemLawn, with about two mln customers, is the largest U.S. company involved in application of fertilizers, pesticides and herbicides on lawns. Waste Management is involved in removal of wastes. Schlang said ChemLawn's customer base could be valuable to another company that wants to capitalize on a strong residential and commercial distribution system. Both Schlang and Morbelli noted that high growth rates had catapulted ChemLawn's share price into the mid-30's in 1983 but the stock languished as the rate of growth slowed. Schlang said the company's profits are concentrated in the fourth quarter. In 1986 ChemLawn earned 1.19 dlrs per share for the full year, and 2.58 dlrs in the fourth quarter. Morbelli noted ChemLawn competes with thousands of individual entrepreuers who offer lawn and garden care sevice. Reuter tm/inst/texts/acq/reut-00023.xml0000644000175100001440000000457012074065306015754 0ustar hornikusers 2-MAR-1987 06:54:19.43 acq uk usa RM F f0026 reute u f BC-EXCO-BUYS-U.S.-GOVERN 03-02 0114 EXCO BUYS U.S. GOVERNMENT SECURITIES BROKER LONDON, Mar 2 - <Exco International Plc>, a subsidiary of British and Commonwealth Shipping Co Plc <BCOM.L>, said it had agreed in principle to buy an 80 pct stake in <RMJ Holdings Corp> for about 79 mln dlrs. Exco Chairman Richard Lacy told Reuters the acquisition was being made from Bank of New York Co Inc <BK.N>, which currently holds a 50.1 pct, and from RMJ partners who hold the remainder. Bank of New York and the partners will retain about 10 pct each and these stakes will be bought over the next six years. RMJ is the holding company of RMJ Securities, one of the largest U.S. Government securities brokers. It is also involved in broking notes, obligations and other instruments sponsored by U.S. Federal agencies. Lacy said Exco had been considering buying a U.S. Government securities broker for the past four years and had made an offer for RMJ when it was sold by Security Pacific Corp <SPC.N> in 1985. RMJ was then valued at about 50 mln dlrs. B and C managing director Peter Goldie said RMJ would be bought at about the same multiple as Exco, suggesting net income of around 16 mln dlrs. The company's earnings had not been hit by the halving of brokerage fees some 14 months ago as volumes had since doubled. Lacy said that RMJ employed some 300 people, with 200 in the brokerage business and about 70 in its <SMS> unit, which provided computer software for the financial services community. RMJ Securities had offices in New York, where total market turnover of U.S. Government securities was 110 billion dlrs a day, and in London where it has 15 billion. It was also given permission last week to open an office in Tokyo where total market turnover had lifted rapidly to about five billion dlrs a day. The acquisition would contribute between five and 10 pct of B and C's share earnings in 1987 on a proforma basis. REUTER tm/inst/texts/acq/reut-00048.xml0000644000175100001440000000417412074065306015763 0ustar hornikusers 2-MAR-1987 10:36:13.53 gold acq platinum canada brazil E F f0710 reute r f BC-cons-tvx-to-buy 03-02 0090 CONSOLIDATED TVX TO BUY BRAZIL GOLD MINE STAKES TORONTO, March 2 - <Consolidated TVX Mining Corp> said it agreed to issue 7.8 mln treasury shares to acquire interests in three gold mining companies in Brazil and an option to increase the company's interest in a platinum property. The company said the transactions will bring immediate production and earnings to Consolidated TVX, enhance its precious metal potential and is expected to improve cash flow and earnings on a per share basis. The company did not give specific figures. Consolidated TVX said it will acquire 29 pct of CMP, a public gold mining company in which TVX already holds a 15 pct interest, making TVX the largest single shareholder. The company also agreed to acquire a 19 pct stake in Novo Astro, a private company, and a 16 pct interest in Teles Pires Mining, increasing the TVX's ownership to 51 pct. In addition, Consolidated TVX said it will acquire the right to add a 10 pct interest to a platinum property in which it already owns a 29.4 pct stake. CMP earned 11 mln Canadian dlrs in 1986 and expects to produce 42,000 ounces of gold in 1987 at a cost of 160 U.S. dlrs an ounce, Consolidated TVX said. Novo Astro operates Brazil's richest gold mine located in Amapa State, with an average grade of 0.8 ounces of gold a ton in a hardrock quartz vein, Consolidated TVX said. Mining of eluvial surface material produced 25,000 ounces in 1986 and is expected to produce 60,000 ounces in 1987. It also said Teles Pires Mining controls rights to a 350 kilometer section of the Teles Pires River, where one dredge is expected to produce 10,000 ounces of gold in 1987. Reuter tm/inst/texts/acq/reut-00036.xml0000644000175100001440000000373212074065306015757 0ustar hornikusers 2-MAR-1987 09:16:08.70 acq usa F f0448 reute b f BC-/VIACOM-<VIA>-RECEIVE 03-02 0045 VIACOM <VIA> RECEIVES TWO REVISED OFFERS NEW YORK, March 2 - Viacom International Inc said it received revised merger offers from <National Amusements Inc> and <MCV Holdings Inc>. The company said the special committee plans to meet later today to review both offers. Viacom said National Amusements' Arsenal Holdings Inc raised the value of its offer for the Viacom shares not held by National Amusements in three areas. National Amusements holds 19.6 pct of Viacom's stock. The cash value of the offer was raised to 42.00 dlrs from the 40.50 dlrs a Viacom share offered February 23 while the value of the fraction of a share of exchangeable preferred being offered was increased to 7.50 dlrs a share from six dlrs. The interest rate to be used to increase the cash value of the merger, if delayed beyond April 30, was raised to nine pct from eight pct and 12 pct after May 31. A Viacom spokesman said the Arsenal Holdings's offer continues to include a 20 pct interest in Arsenal for present Viacom shareholders. Viacom said MCV Holdings, a group which includes the company's senior management and the Equitable Life Assurance Society of the United States, raised the value of its offer by increasing the value of the preferred being offered to 8.50 dlrs from 8.00 dlrs a share and raising the ownership in the new company to be held by present Viacom shareholders to 45 pct from 25 pct. MCV called its previous offer, made February 26, the "final" proposed revision of its agreement with Viacom. Reuter tm/inst/texts/acq/reut-00017.xml0000644000175100001440000000200712074065306015750 0ustar hornikusers 26-FEB-1987 18:27:56.14 acq usa F f0324 reute d f BC-CIRCUIT-SYSTEMS-<CSYI 02-26 0098 CIRCUIT SYSTEMS <CSYI> BUYS BOARD MAKER ADDISON, Ill., Feb 26 - Circuit Systems Inc said it has bought all of the stock of (Ionic Industries Inc) in exchange for 3,677,272 shares of its common. Following the exchange there will be 4,969,643 shares of Circuit Systems stock outstanding. Ionic holders will own about 74 pct of the outstanding stock of Circuit Systems, it said. Ionic, a maker of circuit boards, had revenues of 8.4 mln dlrs and pretax profits of 232,000 dlrs in 1986, up from revenues of 5.9 mln and pretax profits of 204,000 dlrs in 1985, Circuit Systems said. Reuter tm/inst/texts/acq/reut-00024.xml0000644000175100001440000000212512074065306015747 0ustar hornikusers 2-MAR-1987 06:58:00.68 acq usa uk F f0032 reute u f BC-COLOROLL-AGREES-TO-BU 03-02 0109 COLOROLL AGREES TO BUY U.S. WALLCOVERINGS COMPANY LONDON, March 2 - <Coloroll Group Plc> said it has entered into a conditional agreement to acquire the business and assets of <Wallco Inc> and related companies for 14.5 mln dlrs. Miami-based Wallco manufactures and distributes wallcoverings and showed a pretax profit of 1.5 mln dlrs on turnover of 37 mln in the year ending June 1986. The total U.S. Market was estimated to be worth 840 mln dlrs in 1986, having grown by 47 pct in the previous five years, Coloroll said. The combined sales and profit of the enlarged Coloroll U.S. Business would be 67 mln and four mln dlrs respectively. REUTER tm/inst/texts/acq/reut-00010.xml0000644000175100001440000000133512074065306015744 0ustar hornikusers 26-FEB-1987 17:08:27.52 acq usa F f0143 reute d f BC-GULF-APPLIED-TECHNOLO 02-26 0049 GULF APPLIED TECHNOLOGIES <GATS> SELLS UNITS HOUSTON, Feb 26 - Gulf Applied Technologies Inc said it sold its subsidiaries engaged in pipeline and terminal operations for 12.2 mln dlrs. The company said the sale is subject to certain post closing adjustments, which it did not explain. Reuter tm/inst/texts/acq/reut-00045.xml0000644000175100001440000000245612074065306015761 0ustar hornikusers 2-MAR-1987 10:20:41.80 acq usa F A RM f0657 reute u f BC-BANK-OF-NEW-YORK-<BK> 03-02 0054 BANK OF NEW YORK <BK> TO HAVE GAIN ON UNIT SALE NEW YORK, March 2 - Bank of New York Co said it and the management of RMJ Securities Corp have agreed to sell 80 pct of their interests in RMJ Holding Corp to <British and Commonwealth Holdings PLC> and Bank of New York expects to realize a substantial gain on the transaction. RMJ Holding is the holding company for RMJ Securities, a large broker of U.S. government securities and agency obligations Bank of New York owns a majority interest in RMJ Holding and management of RMJ Securities the remainder. Bank of New York said the sale is expected to be completed during the second quarter. It said it and RMJ Securities management will continue to own 20 pct of RMJ Holding for now, but the agreement provides for the sale of that remaining interest to British and Commonwealth over the next six years. Reuter tm/inst/texts/acq/reut-00032.xml0000644000175100001440000000150612074065306015750 0ustar hornikusers 2-MAR-1987 08:43:25.91 acq usa F f0362 reute d f BC-MARRIOTT-<MHS>-TO-SEL 03-02 0063 MARRIOTT <MHS> TO SELL HOTEL TORONTO, March 2 - <Four Seasons Hotels> said it and <VMS Realty Partners> of Chicago have agreed to purchase the Santa Barbara Biltmore Hotel from Marriott Corp for an undisclosed amount. It said the venture will rename the hotel the Four Seasons Biltmore at Santa Barbara and invest over 13 mln dlrs in improvements on the 228-room property. Reuter tm/inst/texts/acq/reut-00034.xml0000644000175100001440000000166612074065306015761 0ustar hornikusers 2-MAR-1987 09:02:51.89 acq usa F f0411 reute u f BC-LAROCHE-STARTS-BID-FO 03-02 0058 LAROCHE STARTS BID FOR NECO <NPT> SHARES NEW YORK, March 2 - Investor David F. La Roche of North Kingstown, R.I., said he is offering to purchase 170,000 common shares of NECO Enterprises Inc at 26 dlrs each. He said the successful completion of the offer, plus shares he already owns, would give him 50.5 pct of NECO's 962,016 common shares. La Roche said he may buy more, and possible all NECO shares. He said the offer and withdrawal rights will expire at 1630 EST/2130 gmt, March 30, 1987. Reuter tm/inst/texts/acq/reut-00049.xml0000644000175100001440000000146112074065306015760 0ustar hornikusers 2-MAR-1987 10:50:34.12 acq usa F f0802 reute w f BC-AMERICAN-NURSERY-<ANS 03-02 0060 AMERICAN NURSERY <ANSY> BUYS FLORIDA NURSERY TAHLEQUAH, OKLA., March 2 - American Nursery Products Inc said it purchased Miami-based Heinl's Nursery Inc, for undisclosed terms. Heinl's Nursery has sales of about 4.5 mln dlrs and owns 100 acres, of which 75 are in shade houses and about 58,300 square feet cover greenhouses, shipping and office facilities. Reuter tm/inst/texts/acq/reut-00016.xml0000644000175100001440000000122012074065306015743 0ustar hornikusers 26-FEB-1987 18:12:51.94 acq canada E f0301 reute r f BC-VIDEOTRON-BUYS-INTO-E 02-26 0036 VIDEOTRON BUYS INTO EXHIBIT COMPANY MONTREAL, Feb 26 - (Groupe Videotron Ltd) said it agreed to buy 50 pct of (Groupe Promexpo Inc), a company which specializes in product exhibits, for three mln dlrs. Reuter tm/inst/texts/acq/reut-00040.xml0000644000175100001440000000303612074065306015747 0ustar hornikusers 2-MAR-1987 09:33:32.93 acq usa F f0501 reute u f BC-PITTSTON-<PCO>-AGREES 03-02 0111 PITTSTON <PCO> AGREES TO ACQUIRE WTC <WAF> STAMFORD, Conn., March 2 - Pittston Co said it has tentatively agreed to acquire WTC International N.V. in a tax-free exchange of stock. Pittston said it agreed to exchange 0.523 common share for each of the about 8,612,000 WTC common shares outstanding. Pittston said WTC's three principal shareholders, who own 62 pct of its stock, are parties to this agreement. They have granted Pittston the right of first refusal to their shares. WTC has granted Pittston an option to buy WTC shares equal to 18.5 poct of its outstanding stock. The agreement is subject to approval of both boards and WTC shareholders. Pittston said described WTC as a fast growing air freight forwarding company with operations throughout the world. Its revenues totaled nearly 200 mln dlrs in the year ended November 30 and for the quarter ended on that date it earned 1.3 mln dlrs on revenues of 55.8 mln dlrs. Pittston said its Burlington Air Express subsidiary generates about two-thirds of its 450 mln dlrs in annual revenes with its domestic air freight services. Reuter tm/inst/texts/acq/reut-00047.xml0000644000175100001440000000155012074065306015755 0ustar hornikusers 2-MAR-1987 10:36:04.57 acq usa F f0709 reute r f BC-BALLY-<BLY>-COMPLETES 03-02 0071 BALLY <BLY> COMPLETES PURCHASE OF GOLDEN NUGGET CHICAGO, March 2 - Bally Manufacturing Corp said it completed the acquisition of the Golden Nugget Casino Hotel in Atlantic City, New Jersey from Golden Nugget Inc. Bally also acquired from Golden Nugget various parcels of real estate in Atlantic City, it noted. The transaction included 140 mln dlrs in cash and stock and the assumption of a 299 mln dlrs mortgage. Reuter tm/inst/texts/acq/reut-00039.xml0000644000175100001440000000241612074065306015760 0ustar hornikusers 2-MAR-1987 09:28:21.66 acq usa F f0482 reute u f BC-MILLER-TABAK-HAS-91.8 03-02 0057 MILLER TABAK HAS 91.8 PCT OF PENN TRAFFIC <PNF> NEW YORK, March 2 - <Miller Tabak Hirsch and Co> said it has received an accepted 3,424,729 common shares of Penn Traffic Co in response to its 31.60 dlr per share tender offer that expired Friday, and together with the 380,728 shares it already owned, it now has about 91.8 pct of Penn Traffic. The company said Penn Traffic is expected to hold a special shareholders' meeting later this month to approve a merger into Miller Tabak at the tender price. It said two Miller Tabak representatives will be named to the Penn Traffic board on March Four to serve as the only directors with Penn Traffic president and chief executive officer Guido Malacarne. The company said it received financing for the transaction from First National Bank of Minneapolis and Salomon Inc <SB>. Reuter tm/inst/texts/acq/reut-00008.xml0000644000175100001440000000207412074065306015754 0ustar hornikusers 26-FEB-1987 16:59:25.38 acq usa F f0116 reute d f BC-WRATHER 02-26 0109 HONG KONG FIRM UPS WRATHER<WCO> STAKE TO 11 PCT WASHINGTON, Feb 26 - Industrial Equity (Pacific) Ltd, a Hong Kong investment firm, said it raised its stake in Wrather Corp to 816,000 shares, or 11.3 pct of the total outstanding common stock, from 453,300 shares, or 6.3 pct. In a filing with the Securities and Exchange Commission, Industrial Equity, which is principally owned by Brierley Investments Ltd, a publicly held New Zealand company, said it bought 362,700 Wrather common shares between Feb 13 and 24 for 6.6 mln dlrs. When it first disclosed its stake in Wrather earlier this month, it said it bought the stock for investment purposes. Reuter tm/inst/texts/acq/reut-00009.xml0000644000175100001440000000136512074065306015757 0ustar hornikusers 26-FEB-1987 17:01:28.10 acq usa F f0121 reute u f BC-LIEBERT-CORP-<LIEB>-A 02-26 0051 LIEBERT CORP <LIEB> APPROVES MERGER COLUMBUS, Ohio, Feb 26 - Liebert Corp said its shareholders approved the merger of a wholly-owned subsidiary of Emerson Electric Co <EMR>. Under the terms of the merger, each Liebert shareholder will receive .3322 shares of Emerson stock for each Liebert share. Reuter tm/inst/texts/acq/reut-00026.xml0000644000175100001440000000777612074065306015772 0ustar hornikusers 2-MAR-1987 08:17:56.66 acq usa F f0274 reute u f PM-SHEARSON 03-02 0105 AMERICAN EXPRESS <AXP> VIEWING SHEARSON OPTIONS By Patti Domm, Reuters NEW YORK, March 2 - American Express Co, rumored to be considering a spinoff of part of Shearson Lehman Brothers Inc, said it is studying a range of options for its brokerage unit that could improve Shearon's access to capital and help it meet broadening international competition. In a joint statement, American Express and Shearson said the actions under consideration are an integral part of American Express' worldwide financial services strategy and that the two companies have been having both internal and external discussions on the matters. American Express said no decision has been reached on the strategic options and that it and Shearson could ultimately decide to follow growth plans already in place. Last week, rumors circulated on Wall Street that the financial services giant was considering a spinoff of part of Shearson and there was speculation it may be considering selling a stake to a Japanese firm. Analysts said the speculation also focused on American Express selling 20 pct of the profitable brokerage firm to the public. There was some speculation that American Express had also considered a total spinoff of Shearson, but the plan was considered highly unlikely, analysts said. American Express said in the statement on Sunday that it will not comment on rumors and speculation and a spokesman would not go beyond the statement. The company also remained silent last Thursday and Friday, as rumors drove American Express stock up a total of 5-1/2 dlrs in two days to bring it to a Friday close at 74. It said it issued the statement on Sunday because a similar statement was being circulated to employees. Analysts have been divided on whether it makes sense for American Express to give up a stake in the wholly-owned brokerage, which improved its after-tax earnings by about 50 pct in the last year. Some analysts said American Express may consider spinning off part of Shearson because it is concerned that its stock price does not fully reflect the value of the brokerage firm. Shearson contributed 316 mln dlrs of American Express' 1.25 billion dlr net in 1986. American Express' ambitious plans for international growth may be also enhanced by the added cash that spinning out part of Shearson would bring. Analysts speculated that all of Shearson would have a market value of about 3.5 billion dlrs. To some however, the need for added capital is puzzling. "(American) Express is in a position where they can raise capital if they need to," said Larry Eckenfelder of Prudential-Bache Securities. Analysts said rumors were fed by the reorganization of Shearson management Wednesday. Chief operating officer Jeffrey Lane got the added, previously vacant, post of president. The reorganization also created four new positions for chairmen of Shearson's operating divisions, a move analysts speculated would allow Shearson to be a stand alone company. Analysts, contacted on Sunday said the statement does little to clarify last week's market speculation. It does confirm, however, that the financial services firm, which unsuccessfully attempted to expand Shearson with a major acquisition last year, is looking beyond its own walls for growth and positioning in the global market competition. Late last year, Shearson's takeover offer to the E.F. Hutton Group Inc was rejected by Hutton, and analysts said there had been speculation that Shearson also was rebuffed when it approached another major Wall Street brokerage. Reuter tm/inst/texts/acq/reut-00007.xml0000644000175100001440000001026112074065306015750 0ustar hornikusers 26-FEB-1987 16:43:13.65 acq usa F f0061 reute u f BC-AMERICAN-EXPRESS-<AXP 02-26 0108 AMERICAN EXPRESS <AXP> SEEN IN POSSIBLE SPINNOFF By Patti Domm, Reuter New York, Feb 26 - American Express Co remained silent on market rumors it would spinoff all or part of its Shearson Lehman Brothers Inc, but some analysts said the company may be considering such a move because it is unhappy with the market value of its stock. American Express stock got a lift from the rumor, as the market calculated a partially public Shearson may command a good market value, thereby boosting the total value of American Express. The rumor also was accompanied by talk the financial services firm would split its stock and boost its dividend. American Express closed on the New York Stock Exchange at 72-5/8, up 4-1/8 on heavy volume. American Express would not comment on the rumors or its stock activity. Analysts said comments by the company at an analysts' meeting Tuesday helped fuel the rumors as did an announcement yesterday of management changes. At the meeting, company officials said American Express stock is undervalued and does not fully reflect the performance of Shearson, according to analysts. Yesterday, Shearson said it was elevating its chief operating officer, Jeffery Lane, to the added position of president, which had been vacant. It also created four new positions for chairmen of its operating divisions. Analysts speculated a partial spinoff would make most sense, contrary to one variation on market rumors of a total spinoff. Some analysts, however, disagreed that any spinoff of Shearson would be good since it is a strong profit center for American Express, contributing about 20 pct of earnings last year. "I think it is highly unlikely that American Express is going to sell shearson," said Perrin Long of Lipper Analytical. He questioned what would be a better investment than "a very profitable securities firm." Several analysts said American Express is not in need of cash, which might be the only reason to sell a part of a strong asset. But others believe the company could very well of considered the option of spinning out part of Shearson, and one rumor suggests selling about 20 pct of it in the market. Larry Eckenfelder of Prudential-Bache Securities said he believes American Express could have considered a partial spinoff in the past. "Shearson being as profitable as it is would have fetched a big premium in the market place. Shearson's book value is in the 1.4 mln dlr range. Shearson in the market place would probably be worth three to 3.5 bilion dlrs in terms of market capitalization," said Eckenfelder. Some analysts said American Express could use capital since it plans to expand globally. "They have enormous internal growth plans that takes capital. You want your stock to reflect realistic valuations to enhance your ability to make all kinds of endeavors down the road," said E.F. Hutton Group analyst Michael Lewis. "They've outlined the fact that they're investing heavily in the future, which goes heavily into the international arena," said Lewis. "...That does not preclude acquisitions and divestitures along the way," he said. Lewis said if American Express reduced its exposure to the brokerage business by selling part of shearson, its stock might better reflect other assets, such as the travel related services business. "It could find its true water mark with a lesser exposure to brokerage. The value of the other components could command a higher multiple because they constitute a higher percentage of the total operating earnings of the company," he said. Lewis said Shearson contributed 316 mln in after-tax operating earnings, up from about 200 mln dlrs in 1985. Reuter tm/inst/texts/acq/reut-00001.xml0000644000175100001440000000343012074065306015742 0ustar hornikusers 26-FEB-1987 15:18:06.67 acq usa F f0767 reute d f BC-COMPUTER-TERMINAL-SYS 02-26 0107 COMPUTER TERMINAL SYSTEMS <CPML> COMPLETES SALE COMMACK, N.Y., Feb 26 - Computer Terminal Systems Inc said it has completed the sale of 200,000 shares of its common stock, and warrants to acquire an additional one mln shares, to <Sedio N.V.> of Lugano, Switzerland for 50,000 dlrs. The company said the warrants are exercisable for five years at a purchase price of .125 dlrs per share. Computer Terminal said Sedio also has the right to buy additional shares and increase its total holdings up to 40 pct of the Computer Terminal's outstanding common stock under certain circumstances involving change of control at the company. The company said if the conditions occur the warrants would be exercisable at a price equal to 75 pct of its common stock's market price at the time, not to exceed 1.50 dlrs per share. Computer Terminal also said it sold the technolgy rights to its Dot Matrix impact technology, including any future improvements, to <Woodco Inc> of Houston, Tex. for 200,000 dlrs. But, it said it would continue to be the exclusive worldwide licensee of the technology for Woodco. The company said the moves were part of its reorganization plan and would help pay current operation costs and ensure product delivery. Computer Terminal makes computer generated labels, forms, tags and ticket printers and terminals. Reuter tm/inst/texts/acq/reut-00018.xml0000644000175100001440000000256612074065306015763 0ustar hornikusers 1-MAR-1987 22:20:43.45 acq japan M C f0515 reute u f BC-NIPPON-KOKAN-STEEL-AF 03-01 0113 NIPPON KOKAN STEEL AFFILIATES CONSIDERING MERGER TOKYO, March 2 - Toshin Steel Co Ltd <TOSS.T> and <Azuma Steel Co Ltd>, affiliates of Nippon Kokan KK <NKKT.T>, are considering a merger, company spokesmen said. Toshin Steel, owned 41.9 pct by Nippon Kokan, and Azuma Steel, owned 41.3 pct by Nippon Kokan, are expected to decide by the end of March, they said. Both firms have been struggling with losses caused by the recession in the steel industry and the yen's appreciation. Azuma Steel's current losses are estimated at 3.1 billion yen in the year ending March 31 against a 6.99 billion loss a year earlier, a spokesman said. The firm employs 1,100 workers Toshin Steel, with 1,700 workers, has given no forecast for the year ending March 31. But industry sources said they expected the company to show current losses of about five billion yen or more in 1986/87 compared with a 2.98 billion loss in 1985/86. REUTER tm/inst/texts/acq/reut-00030.xml0000644000175100001440000000714112074065306015747 0ustar hornikusers 2-MAR-1987 08:29:05.15 acq usa F f0315 reute u f PM-PUROLATOR 03-02 0102 PUROLATOR <PCC> IN BUYOUT WITH HUTTON <EFH> By Patti Domm NEW YORK, March 2 - New Jersey-based overnight messenger Purolator Courier Corp said it has agreed to be acquired for about 265 mln dlrs by a company formed by E.F. Hutton LBO Inc and certain managers of Purolator's U.S. courier business. Analysts have said that Purolator has been for sale for some time. Purolator announced earlier it was mulling a takeover bid, but analysts wrongly predicted the offer was from another courier company. Hutton LBO, a wholly owned subsidiary of E.F. Hutton Group Inc, will be majority owner of the company. Hutton said the acquiring company, PC Acquisition Inc, is paying 35 dlrs cash per share for 83 pct of Purolator's stock in a tender offer to begin Thursday. The rest of the shares will be purchased for securities and warrants to buy stock in a subsidiary of PC Acquisition, containing Purolator's U.S. courier operations. If all the shares of Purolator are tendered, shareholders would receive for each share 29 dlrs cash, six dlrs in debentures, and a warrant to buy shares in a subsidiary of PC Acquisition containing the U.S. courier operations. Hutton said in the merger shareholders would get 46 mln dlrs aggregate amount of guaranteed debentures due 2002 of PC Acquisition and warrants to buy 15 pct of the common stock of the PC courier subsidiary. Hutton said the company has valued the warrants at two to three dlrs per share. Purolator's stock price closed at 35.125 dlrs on Friday. While some analysts estimated the company was worth in the mid 30s, at least one said it would be worth 38 to 42 dlrs. This follows sales of two other Purolator units. It agreed recently to sell its Canadian Courier unit to Onex Capital for 170 mln dlrs, and previously sold its auto filters business. Purolator retains its Stant division, which makes closure caps for radiators and gas tanks. A Hutton spokesman said the firm is reviewing its options on Stant. Purolator's courier business has been lagging that of its U.S. rivals because of the high price it paid in the past several years to add air delivery to its ground fleet. E.F. Hutton will provide 279 mln dlrs of its funds to complete the transaction. This so-called "bridge" financing will be replaced later with long-term debt most likely in the form of bank loans, Hutton said. Hutton LBO is committed to keeping the courier business, its president Warren Idsal said. "Purolator lost 120 mln dlrs over the last two years largely due to U.S. courier operations, which we believe the management is turning around. We belive it will be a very serious competitor in the future," said Idsal. William Taggart, chief executive officer of U.S. Courier division, will be chief executive officer of the new company. The tender offer will be conditioned on a minimum of two thirds of the common stock being tendered and not withdrawn to the expiration of the offer as well as certain other conditions. The offer will begin Thursday, subject to clearances from the staff of the Interstate Commerce Commission and will expire 20 business days after commencement unless extended. Reuter tm/inst/texts/acq/reut-00028.xml0000644000175100001440000000221712074065306015755 0ustar hornikusers 2-MAR-1987 08:25:56.49 acq usa F f0301 reute r f BC-PENRIL-<PNL>-SEEKS-TO 03-02 0101 PENRIL <PNL> SEEKS TO SELL TWO UNITS ROCKVILLE, Md., March 2 - Penril Corp said it is seeking to sell its Triplett Electrical Instrument Corp subsidiary in Bluffton, Ohio, and Triplett's Alltest division in Hoffman Estates, Ill., as part of a plan to concentrate on its three profitable division and reduce its debt load. The company also said it is evaluating a plan to satisfy its obligations under its 10-7/8 pct subordinated notes but gave no details. Interest on the notes is due today. Penril further said director Clifford L. Alexander Jr. has resigned from the board. It gave no reason. Penril said shareholders at the annual meeting approved the limitation of directors' liability. Reuter tm/inst/texts/acq/reut-00043.xml0000644000175100001440000000170612074065306015754 0ustar hornikusers 2-MAR-1987 10:06:32.63 acq usa F f0625 reute u f BC-THE-JAPAN-FUND-<JPN> 03-02 0085 THE JAPAN FUND <JPN> GETS BUYOUT OFFER NEW YORK, March 2 - The Japan Fund Inc said it has received an unsolicited offer from <Sterling Grace Capital Management LP>, acting together with certain other persons and entities, to purchase all the assets of the fund at five pct below its aggregate net asset value. The Japan Find said tne deal is subject to obtaining satisfactory financing and a due diligence review. It added that the proposal has been referred to its Board of Directors for consideration. Reuter tm/inst/texts/acq/reut-00003.xml0000644000175100001440000000203612074065306015745 0ustar hornikusers 26-FEB-1987 15:49:56.01 acq ship usa F f0874 reute r f BC-MCLEAN'S-<MII>-U.S.-L 02-26 0094 MCLEAN'S <MII> U.S. LINES SETS ASSET TRANSFER CRANFORD, N.J., Feb 26 - McLean Industries Inc's United States Lines Inc subsidiary said it has agreed in principle to transfer its South American service by arranging for the transfer of certain charters and assets to <Crowley Mariotime Corp>'s American Transport Lines Inc subsidiary. U.S. Lines said negotiations on the contract are expected to be completed within the next week. Terms and conditions of the contract would be subject to approval of various regulatory bodies, including the U.S. Bankruptcy Court. Reuter tm/inst/texts/acq/reut-00002.xml0000644000175100001440000000243712074065306015751 0ustar hornikusers 26-FEB-1987 15:19:15.45 earn acq usa F f0773 reute u f BC-OHIO-MATTRESS-<OMT>-M 02-26 0095 OHIO MATTRESS <OMT> MAY HAVE LOWER 1ST QTR NET CLEVELAND, Feb 26 - Ohio Mattress Co said its first quarter, ending February 28, profits may be below the 2.4 mln dlrs, or 15 cts a share, earned in the first quarter of fiscal 1986. The company said any decline would be due to expenses related to the acquisitions in the middle of the current quarter of seven licensees of Sealy Inc, as well as 82 pct of the outstanding capital stock of Sealy. Because of these acquisitions, it said, first quarter sales will be substantially higher than last year's 67.1 mln dlrs. Noting that it typically reports first quarter results in late march, said the report is likely to be issued in early April this year. It said the delay is due to administrative considerations, including conducting appraisals, in connection with the acquisitions. Reuter tm/inst/texts/acq/reut-00005.xml0000644000175100001440000000152112074065306015745 0ustar hornikusers 26-FEB-1987 16:08:33.15 acq usa F f0949 reute r f BC-<COFAB-INC>-BUYS-GULF 02-26 0066 <COFAB INC> BUYS GULFEX FOR UNDISCLOSED AMOUNT HOUSTON, Feb 26 - CoFAB Inc said it acquired <Gulfex Inc>, a Houston-based fabricator of custom high-pressure process vessels for the energy and petrochemical industries. CoFAB said its group of companies manufacture specialized cooling and lubricating systems for the oil and gas, petrochemical, utility, pulp and paper and marine industries. Reuter tm/inst/texts/acq/reut-00015.xml0000644000175100001440000000134412074065306015751 0ustar hornikusers 26-FEB-1987 18:12:35.70 acq canada E F f0300 reute r f BC-VERSATILE-TO-SELL-UNI 02-26 0049 VERSATILE TO SELL UNIT TO VICON VANCOUVER, British Columbia, Feb 26 - <Versatile Corp> said it agreed in principle to sell its Alberta-based Versatile Noble Cultivators Co division to Vicon Inc, of Ontario, for undisclosed terms. The division manufactures tillage and spraying equipment. Reuter tm/inst/texts/acq/reut-00050.xml0000644000175100001440000000275612074065306015760 0ustar hornikusers 2-MAR-1987 10:59:16.80 earn acq E F f0832 reute r f BC-multi-step-to-sell 03-02 0108 MULTI-STEP TO SELL LADDER UNIT, CANCEL SHARES TORONTO, March 2 - <Multi-Step Products Inc>, earlier reporting an initial six month loss, said it agreed to sell wholly owned Multi-Step Manufacturing Inc for 100,000 dlrs cash, subject to shareholder and regulatory approval. Multi-Step also said it will pay 900,000 dlrs to cancel 711,192 of its own shares, which will be acquired from Michael Penhale and his benficiaries. Penhale will control and manage Multi-Step Manufacturing, following the transactions. Multi-Step had a 739,146 dlr loss for the six months ended December 31. The company received its initial public listing in December. The company said its ladder-making unit has been losing 300,000 dlrs quarterly. The sale, expected to close in April, also calls for retirement of the unit's 400,000 dlr bank debt, Multi-Step said. The unit also has agreed to pay a debt of 400,000 dlrs to Tarxien Company Ltd, which is 40 pct owned by Multi-Step. Multi-Step previously said it agreed to acquire the remaining 60 pct of Tarxien it does not already own. Reuter tm/inst/texts/acq/reut-00006.xml0000644000175100001440000000154012074065306015747 0ustar hornikusers 26-FEB-1987 16:32:37.30 acq usa F f0024 reute u f BC-CYCLOPS 02-26 0073 INVESTMENT FIRMS CUT CYCLOPS <CYL> STAKE WASHINGTON, Feb 26 - A group of affiliated New York investment firms said they lowered their stake in Cyclops Corp to 260,500 shares, or 6.4 pct of the total outstanding common stock, from 370,500 shares, or 9.2 pct. In a filing with the Securities and Exchange Commission, the group, led by Mutual Shares Corp, said it sold 110,000 Cyclops common shares on Feb 17 and 19 for 10.0 mln dlrs. Reuter tm/inst/texts/acq/reut-00013.xml0000644000175100001440000000257212074065306015753 0ustar hornikusers 26-FEB-1987 17:38:47.04 acq canada F E f0214 reute d f BC-<NOVA>-WINS-GOVERNMEN 02-26 0106 <NOVA> WINS GOVERNMENT OKAY FOR HUSKY <HYO> DEAL CALGARY, Alberta, Feb 26 - Nova, the Canadian company that owns 56 pct of Husky Oil Ltd, said it received government approval for a transaction under which <Union Faith Canada Holding Ltd> would buy a 43 pct stake in Husky. Nova said the Minister of Regional and Industrial Expansion, Michel Cote, ruled that Union Faith's purchase of the Husky stake would not result in Husky ceding control to a non-Canadian company. It said this ruling was a key condition in completing the deal. Union Faith is equally owned by <Hutchison Whampoa Ltd> and <Hong Kong Electric Holdings Ltd>. Under the agreement with Union Faith, Husky will become a private company with Union Faith and Nova each holding 43 pct of its stock. Nine pct of Husky would be owned by relatives of Li Ka-Shing, chairman of Hutchison, and five pct by the Canadian Imperial Bank of Commerice. Reuter tm/inst/texts/acq/reut-00046.xml0000644000175100001440000000166712074065306015765 0ustar hornikusers 2-MAR-1987 10:29:07.31 acq usa F f0682 reute b f BC-CORNING-<GLW>,-HAZLET 03-02 0083 CORNING <GLW>, HAZLETON <HLC> SET EXCAHNGE RATIO CORNING, N.Y., March 2 - Corning Glass Works said the exchange ratio for its previously announced acquisition of Hazleton Laboratories Corp has been established at 0.5165 Corning common share for each Hazleton common share. Corning said the prospectus regarding the merger is expected to be mailed tomorrow to all Hazleton holders of record February 18. Hazleton shareholders will vote on the proposed merger at a special meeting on March 31. Reuter tm/inst/texts/acq/reut-00020.xml0000644000175100001440000000573112074065306015751 0ustar hornikusers 2-MAR-1987 04:45:57.78 acq sweden F f0812 reute b f BC-WALLENBERGS-FIGHT-BID 03-02 0115 WALLENBERGS FIGHT BID FOR SWEDISH MATCH STAKE STOCKHOLM, March 2 - Sweden's Wallenberg group fought back a bid by the London-based Swedish financier Erik Penser to secure a large stake in Swedish Match <SMBS ST>, one of the companies at the core of their business empire. A statement issued by the Wallenberg holding companies AB Investor and Forvaltnings AB Providentia said they had taken over an option held by Nobel Industrier Sweden AB to acquire 33 pct of the voting rights in Swedish Match. Thre Wallenbergs paid Nobel Industrier <NOBL ST>, in which Penser group has a 72 pct stake, about 20 pct over the market price for the Swedish Match option, the statement said. Swedish Match's B shares open to foreign buyers closed at 424 crowns on Friday. The A shares -- with increased voting rights -- closed at 450 crowns for the restricted and 455 for the free shares. The statement said the deal increased Investor's stake to 49.4 pct of the voting rights and 14.8 pct of the share capital while Providentia is left holding 34.1 pct of the voting rights and 14.5 pct of the share capital in Swedish Match. The Wallenbergs' stake in Swedish Match had previously amounted to 52 pct of the voting rights in the company. The Swedish Match deal will cost the Wallenbergs about 400 mln crowns, share analysts said, making it one of the most expensise moves the group has undertaken in the last four years to defend its far-flung interests from outside predators. The Wallenbergs originally sold Nobel Industrier, an arms and chemicals group, to Penser in 1984 to pay for buying Volvo <VOLV ST> out of two other key group companies, Atlas Copco <ASTS ST> and Stora Koppabergs <SKPS ST>. Since then, the Wallenbergs were ousted as the largest shareholders in SKF (SKFR ST> by Skanska AB <SKBS ST> and Frederik Lundberg wrested control of Incentive AB from them. Lundberg, a Zurich-based Swedish property tycoon, also managed to acquire a 25 pct stake in another Wallenberg company, the diary equipment firm Alfa -Laval AB <ALFS ST>. During 1986, the Wallenbergs have been concentrating on building up their stake in Investor and Providentia to prevent any raid on the heart of their business empire. But analysts say the Wallenbergs' position in the electrical engineering firm ASEA AB <ASEA ST> is also too small at 12.6 pct of the voting rights and there has been growing speculation that the group will be forced to sell off fringe interests to protect its core activities. REUTER tm/inst/texts/acq/reut-00025.xml0000644000175100001440000000212312074065306015746 0ustar hornikusers 2-MAR-1987 08:16:59.80 acq usa F f0267 reute r f BC-SCIENTIFIC-MICRO-SYST 03-02 0111 SCIENTIFIC MICRO SYSTEMS <SMSI> ACUIRES SUPERMAC NEW YORK, March 2 - Scientific Micro Systems Inc said it has acquired Supermac Technology, a rapidly growing supplier of enhancement products and disc drive subsystems for the Apple personal computer market. Scientific Micro said it acquired all the common stock of Supermac in exchange for 1.05 mln shares of its own common stock. The stock closed at 5.50 dlrs bid on Friday. Supermac, a privately held firm based in Mountain View, California, as is Scientific Micro, reported a net profit of 300,000 dlrs on revenue of 9.5 mln dlrs in fiscal 1986. It expects its revenue to approximately double in 1987. Reuter tm/inst/texts/acq/reut-00035.xml0000644000175100001440000000222212074065306015747 0ustar hornikusers 2-MAR-1987 09:03:18.94 acq uk usa F f0414 reute d f BC-SENIOR-ENGINEERING-MA 03-02 0117 SENIOR ENGINEERING MAKES 12.5 MLN DLR US PURCHASE LONDON, March 2 - <Senior Engineering Group Plc> said it reached agreement with <Cronus Industries Inc> to acquire the whole share capital of <South Western Engineering Co> for 12.5 mln dlrs cash. This sum is being financed by a term loan. South Western is one of the U.S.'s leading manufacturers of heat transfer equipment, with a turnover of 54.86 mln dlrs and pre-tax profits of 1.72 mln in 1986. Completion of the deal is conditional on approval under U.S. Hart-Scott-Rodino regulations which is expected within 30 days. Some 350,000 dlrs is payable immediately, 12 mln dlrs payable on completion with the balance due by June 30, 1987. Reuter tm/inst/texts/acq/reut-00052.xml0000644000175100001440000000205512074065306015752 0ustar hornikusers 2-MAR-1987 11:09:06.82 acq canada E F f0882 reute r f BC-FOUR-SEASONS-BUYING-M 03-02 0100 FOUR SEASONS BUYING MARRIOTT <MHS> HOTEL TORONTO, March 2 - <Four Seasons Hotels Inc> and VMS Realty Partners said they agreed to acquire the Santa Barbara Biltmore Hotel in California from Marriott Corp, for undisclosed terms. Closing was expected by March 31, they added. The companies said they would jointly own the hotel and rename it the Four Seasons Biltmore at Santa Barbara. They said they would spend more than 13 mln U.S. dlrs "to enhance the Biltmore's position as one of the finest resort hotels in North America." Chicago-based VMS Realty is a real estate and development firm. Reuter tm/inst/texts/acq/reut-00021.xml0000644000175100001440000000275412074065306015754 0ustar hornikusers 2-MAR-1987 04:52:58.27 acq uk F f0825 reute b f BC-SHV-SAYS-IT-MAKING-TE 03-02 0061 SHV SAYS IT MAKING TENDER OFFER FOR IC GAS LONDON, March 2 - <SHV (United Kingdom) Holding Co Ltd> said it was making a tender offer for up to 33 mln ordinary shares in Imperial Continental Gas Association.<ICGS.L>. It said in a statement the offer was on the basis of 700p for each IC Gas ordinary and 252p for every one stg nominal of IC Gas loan stock. SHV already holds 6.8 mln IC Gas ordinary stock units representing around 4.9 pct of the current issued share capital. Successful completion of the offer would increase SHV's stake in IC Gas to 39.8 mln shares, representing around 27.9 pct of issued share capital, it said. The offer capitalises IC Gas at around one billion stg. It said it was tendering for both ordinary stock and loan stock, which when fully converted, gave a total of 33 mln IC Gas ordinary. It is making the tender offer through N.M. Rothschilds. IC Gas said in a statement it noted the SHV tender offer and the terms were being considered. It said a further statement would be made as soon as possible. REUTER... tm/inst/texts/acq/reut-00031.xml0000644000175100001440000000133712074065306015751 0ustar hornikusers 2-MAR-1987 08:41:41.32 acq usa F f0358 reute r f BC-FINANCIAL-SANTA-BARBA 03-02 0048 FINANCIAL SANTA BARBARA <FSB> TO MAKE PURCHASE SANTA BARBARA, Calif., March 2 - Financial Corp of Santa Barbara said it has signed a definitive agreement to purchase Stanwell Financial, the lending operations unit of mortgage banking company <Stanwell Mortgage>, for undisclosed terms. Reuter tm/inst/texts/acq/reut-00054.xml0000644000175100001440000000201012074065306015743 0ustar hornikusers 2-MAR-1987 11:23:45.24 acq italy spain F f0956 reute d f BC-MONTEDISON-CONCLUDES 03-02 0093 MONTEDISON CONCLUDES TALKS WITH ANTIBIOTICOS MILAN, March 2 - Montedison Spa <MONI.MI> said it has concluded its negotiations with Spanish pharmaceuticals company <Antibioticos SA>. A company spokesman told Reuters "We have concluded the talks and we are now awaiting authorization from Spanish authorities." He declined to comment further. Earlier today the Italian company postponed a scheduled press conference on its talks with Antibioticos. An Italian press report today said Montedison has agreed to acquire Antibioticos for 500 billion lire. REUTER tm/inst/texts/acq/reut-00011.xml0000644000175100001440000000213112074065306015740 0ustar hornikusers 26-FEB-1987 17:09:47.78 acq usa F f0146 reute r f BC-ROBESON 02-26 0113 INVESTMENT GROUP RAISES ROBESON <RBSN> STAKE WASHINGTON, Feb 26 - A group of affiliated Miami-based investment firms led by Fundamental Management Corp said it raised its stake in Robeson Industries Corp to 238,000 shares, or 14.6 pct of the total, from 205,000 or 12.8 pct. In a filing with the Securities and Exchange Commission, the group said it bought 32,800 Robeson common shares between Jan 26 and Feb 9 for 175,691 dlrs. The group said it may buy more shares and plans to study Robeson's operations. Afterwards it may recommend that management make changes in its operations. Fundamental Management Chairman Carl Singer was recently elected to the Robeson board. Reuter tm/inst/texts/acq/reut-00029.xml0000644000175100001440000000227012074065306015755 0ustar hornikusers 2-MAR-1987 08:26:35.85 acq usa F f0305 reute d f BC-<DALE-BURDETT-INC>-FA 03-02 0126 <DALE BURDETT INC> FACES DAMAGE CLAIM WESTMINSTER, Calif., March 2 - Dale Burdett Inc said it faces damages claims totalling about 420,000 dlrs from the former owners of Burdett Publications Inc. The company said on February 20, 1986, its predecessor Nolex Development Inc acquired Burdett Publications Inc in an exchange of 17 mln common shares for all Burdett Publications shares, but the transaction was not qualified with the California Department of Corporations. As a result, it said, the former Burdett Publications owners have a claim for damages against Dale Burdett as successor to Nolex for one yuear starting January 21, 1987, with the damages measured by the difference in values of shares exchanged plus interest from February 20, 1986. Reuter tm/inst/texts/rcv1_2330.xml0000644000175100001440000000265512074065307015114 0ustar hornikusers USA: Tylan stock jumps; weighs sale of company. Tylan stock jumps; weighs sale of company. SAN DIEGO

The stock of Tylan General Inc. jumped Tuesday after the maker of process-management equipment said it is exploring the sale of the company and added that it has already received some inquiries from potential buyers.

Tylan was up $2.50 to $12.75 in early trading on the Nasdaq market.

The company said it has set up a committee of directors to oversee the sale and that Goldman, Sachs & Co. has been retained as its financial adviser.

(c) Reuters Limited 1996
tm/inst/CITATION0000644000175100001440000000150614367741745013046 0ustar hornikuserscitation(auto = meta) bibentry(bibtype = "Article", title = "Text Mining Infrastructure in R", author = c(person("Ingo", "Feinerer", email = "feinerer@logic.at", comment = c(ORCID = "0000-0001-7656-8338")), person("Kurt", "Hornik", email = "Kurt.Hornik@R-project.org", comment = c(ORCID = "0000-0003-4198-9911")), person("David", "Meyer", email = "David.Meyer@wu.ac.at", comment = c(ORCID = "0000-0002-5196-3048"))), year = 2008, journal = "Journal of Statistical Software", volume = 25, number = 5, pages = "1--54", month = "March", doi = "10.18637/jss.v025.i05" ) tm/inst/ghostscript/0000755000175100001440000000000012213264557014245 5ustar hornikuserstm/inst/ghostscript/pdf_info.ps0000644000175100001440000001604212200717467016377 0ustar hornikusers%!PS % Copyright (C) 2007 Artifex Software, Inc. All rights reserved. % % This software is provided AS-IS with no warranty, either express or % implied. % % This software is distributed under license and may not be copied, % modified or distributed except as expressly authorized under the terms % of the license contained in the file LICENSE in this distribution. % % For more information about licensing, please refer to % http://www.ghostscript.com/licensing/. For information on % commercial licensing, go to http://www.artifex.com/licensing/ or % contact Artifex Software, Inc., 101 Lucas Valley Road #110, % San Rafael, CA 94903, U.S.A., +1(415)492-9861. % % $Id: pdf_info.ps 6300 2005-12-28 19:56:24Z alexcher $ % Dump some info from a PDF file % usage: gs -dNODISPLAY -q -sFile=____.pdf [-dDumpMediaSizes] [-dDumpFontsUsed [-dShowEmbeddedFonts] ] toolbin/pdf_info.ps /showoptions { ( where "options" are:) = ( -dDumpMediaSizes=false (default true) MediaBox and CropBox for each page) = ( -dDumpFontsNeeded=false (default true)Fonts used, but not embedded) = ( -dDumpFontsUsed List all fonts used) = ( -dShowEmbeddedFonts only meaningful with -dDumpFontsUsed) = (\n If no options are given, the default is -dDumpMediaSizes -dDumpFontsNeeded) = () = flush } bind def /DumpMediaSizes where { pop } { /DumpMediaSizes true def } ifelse /DumpFontsNeeded where { pop } { /DumpFontsNeeded true def } ifelse [ shellarguments { counttomark 1 eq { dup 0 get (-) 0 get ne { % File specified on the command line using: -- toolbin/pdf_info.ps infile.pdf /File exch def false % don't show usage } { true % show usage and quit } ifelse } { true } ifelse { (\n*** Usage: gs [options] -- toolbin/pdf_info.ps infile.pdf ***\n\n) print showoptions quit } if } if /File where not { (\n *** Missing input file name \(use -sFile=____.pdf\)\n) = ( usage: gs -dNODISPLAY -q -sFile=____.pdf [ options ] toolbin/pdf_info.ps\n) = showoptions quit } if pop % discard the dict from where /QUIET true def % in case they forgot () = File dup (r) file runpdfbegin /PDFPageCount pdfpagecount def ( ) print print ( has ) print PDFPageCount =print ( pages.\n) = flush % Print out the "Info" dictionary if present Trailer /Info knownoget { dup /Title knownoget { (Title: ) print = flush } if dup /Author knownoget { (Author: ) print = flush } if dup /Subject knownoget { (Subject: ) print = flush } if dup /Keywords knownoget { (Keywords: ) print = flush } if dup /Creator knownoget { (Creator: ) print = flush } if dup /Producer knownoget { (Producer: ) print = flush } if dup /CreationDate knownoget { (CreationDate: ) print = flush } if dup /ModDate knownoget { (ModDate: ) print = flush } if dup /Trapped knownoget { (Trapped: ) print = flush } if } if % if Info known DumpMediaSizes { () = % Print out the Page Size info for each page. 1 1 PDFPageCount { dup (Page ) print =print pdfgetpage dup /MediaBox pget { ( MediaBox: ) print oforce_array ==only } if dup /CropBox pget { ( CropBox: ) print oforce_array ==only } if dup /Rotate pget { ( Rotate = ) print =print } if pageusestransparency { ( Page uses transparency features) print } if () = flush } for } if % List of standard font names for use when we are showing the FontsNeeded /StdFontNames [ /Times-Roman /Helvetica /Courier /Symbol /Times-Bold /Helvetica-Bold /Courier-Bold /ZapfDingbats /Times-Italic /Helvetica-Oblique /Courier-Oblique /Times-BoldItalic /Helvetica-BoldOblique /Courier-BoldOblique ] def /res-type-dict 10 dict begin /Font { { exch pop oforce dup //null ne { dup /DescendantFonts knownoget { exch pop 0 get oforce } if dup /FontDescriptor knownoget { dup /FontFile known 1 index /FontFile2 known or exch /FontFile3 known or /ShowEmbeddedFonts where { pop pop //false } if { pop % skip embedded fonts } { /BaseFont knownoget { % not embedded FontsUsed exch //null put } if } ifelse } { /BaseFont knownoget { % no FontDescriptor, not embedded FontsUsed exch //null put } if } ifelse } { pop } ifelse } forall % traverse the dictionary } bind def /XObject { { exch pop oforce dup //null ne { dup /Subtype knownoget { /Form eq { /Resources knownoget { get-fonts-from-res } if } { pop } ifelse } { pop } ifelse } { pop } ifelse } forall } bind def /Pattern { { exch pop oforce dup //null ne { /Resources knownoget { get-fonts-from-res } if } { pop } ifelse } forall } bind def currentdict end readonly def % <> get-fonts-from-res - /get-fonts-from-res { oforce dup //null ne { { oforce dup //null ne { //res-type-dict 3 -1 roll .knownget { exec } { pop } ifelse } { pop pop } ifelse } forall } { pop } ifelse } bind def currentdict /res-type-dict undef /getPDFfonts { % (filename) getPDFfonts array_of_font_names /FontsUsed 1000 dict def % this will increase if needed mark 1 1 PDFPageCount { pdfgetpage % get pagedict dup /Resources pget { get-fonts-from-res } if /Annots knownoget { { oforce dup //null ne { /AP knownoget { { exch pop oforce dup //null ne { dup /Resources knownoget { get-fonts-from-res } if { exch pop oforce dup type /dicttype eq { /Resources knownoget { get-fonts-from-res } if } { pop } ifelse } forall } { pop } ifelse } forall } if } { pop } ifelse } forall } if } for % If DumpFontsUsed is not true, then remove the 'standard' fonts from the list systemdict /DumpFontsUsed known not { StdFontNames { FontsUsed 1 index known { FontsUsed 1 index undef } if pop } forall } if % Now dump the FontsUsed dict into an array so we can sort it. [ FontsUsed { pop } forall ] { 100 string cvs exch 100 string cvs exch lt } .sort } bind def systemdict /DumpFontsUsed known { (\nFont or CIDFont resources used:) = getPDFfonts { = } forall } { DumpFontsNeeded { getPDFfonts dup length 0 gt { (\nFonts Needed that are not embedded \(system fonts required\):) = { ( ) print = } forall } { pop (\nNo system fonts are needed.) = } ifelse } if } ifelse quit tm/inst/stopwords/0000755000175100001440000000000012327630227013735 5ustar hornikuserstm/inst/stopwords/english.dat0000644000175100001440000000167212156574722016075 0ustar hornikusersi me my myself we our ours ourselves you your yours yourself yourselves he him his himself she her hers herself it its itself they them their theirs themselves what which who whom this that these those am is are was were be been being have has had having do does did doing would should could ought i'm you're he's she's it's we're they're i've you've we've they've i'd you'd he'd she'd we'd they'd i'll you'll he'll she'll we'll they'll isn't aren't wasn't weren't hasn't haven't hadn't doesn't don't didn't won't wouldn't shan't shouldn't can't cannot couldn't mustn't let's that's who's what's here's there's when's where's why's how's a an the and but if or because as until while of at by for with about against between into through during before after above below to from up down in out on off over under again further then once here there when where why how all any both each few more most other some such no nor not only own same so than too very tm/inst/stopwords/danish.dat0000644000175100001440000000065012156574721015704 0ustar hornikusersog i jeg det at en den til er som pÃ¥ de med han af for ikke der var mig sig men et har om vi min havde ham hun nu over da fra du ud sin dem os op man hans hvor eller hvad skal selv her alle vil blev kunne ind nÃ¥r være dog noget ville jo deres efter ned skulle denne end dette mit ogsÃ¥ under have dig anden hende mine alt meget sit sine vor mod disse hvis din nogle hos blive mange ad bliver hendes været thi jer sÃ¥dan tm/inst/stopwords/spanish.dat0000644000175100001440000000412212156574730016101 0ustar hornikusersde la que el en y a los del se las por un para con no una su al lo como más pero sus le ya o este sí porque esta entre cuando muy sin sobre también me hasta hay donde quien desde todo nos durante todos uno les ni contra otros ese eso ante ellos e esto mí antes algunos qué unos yo otro otras otra él tanto esa estos mucho quienes nada muchos cual poco ella estar estas algunas algo nosotros mi mis tú te ti tu tus ellas nosotras vosotros vosotras os mío mía míos mías tuyo tuya tuyos tuyas suyo suya suyos suyas nuestro nuestra nuestros nuestras vuestro vuestra vuestros vuestras esos esas estoy estás está estamos estáis están esté estés estemos estéis estén estaré estarás estará estaremos estaréis estarán estaría estarías estaríamos estaríais estarían estaba estabas estábamos estabais estaban estuve estuviste estuvo estuvimos estuvisteis estuvieron estuviera estuvieras estuviéramos estuvierais estuvieran estuviese estuvieses estuviésemos estuvieseis estuviesen estando estado estada estados estadas estad he has ha hemos habéis han haya hayas hayamos hayáis hayan habré habrás habrá habremos habréis habrán habría habrías habríamos habríais habrían había habías habíamos habíais habían hube hubiste hubo hubimos hubisteis hubieron hubiera hubieras hubiéramos hubierais hubieran hubiese hubieses hubiésemos hubieseis hubiesen habiendo habido habida habidos habidas soy eres es somos sois son sea seas seamos seáis sean seré serás será seremos seréis serán sería serías seríamos seríais serían era eras éramos erais eran fui fuiste fue fuimos fuisteis fueron fuera fueras fuéramos fuerais fueran fuese fueses fuésemos fueseis fuesen siendo sido tengo tienes tiene tenemos tenéis tienen tenga tengas tengamos tengáis tengan tendré tendrás tendrá tendremos tendréis tendrán tendría tendrías tendríamos tendríais tendrían tenía tenías teníamos teníais tenían tuve tuviste tuvo tuvimos tuvisteis tuvieron tuviera tuvieras tuviéramos tuvierais tuvieran tuviese tuvieses tuviésemos tuvieseis tuviesen teniendo tenido tenida tenidos tenidas tened tm/inst/stopwords/german.dat0000644000175100001440000000250512156574724015713 0ustar hornikusersaber alle allem allen aller alles als also am an ander andere anderem anderen anderer anderes anderm andern anderr anders auch auf aus bei bin bis bist da damit dann der den des dem die das daß derselbe derselben denselben desselben demselben dieselbe dieselben dasselbe dazu dein deine deinem deinen deiner deines denn derer dessen dich dir du dies diese diesem diesen dieser dieses doch dort durch ein eine einem einen einer eines einig einige einigem einigen einiger einiges einmal er ihn ihm es etwas euer eure eurem euren eurer eures für gegen gewesen hab habe haben hat hatte hatten hier hin hinter ich mich mir ihr ihre ihrem ihren ihrer ihres euch im in indem ins ist jede jedem jeden jeder jedes jene jenem jenen jener jenes jetzt kann kein keine keinem keinen keiner keines können könnte machen man manche manchem manchen mancher manches mein meine meinem meinen meiner meines mit muss musste nach nicht nichts noch nun nur ob oder ohne sehr sein seine seinem seinen seiner seines selbst sich sie ihnen sind so solche solchem solchen solcher solches soll sollte sondern sonst über um und uns unse unsem unsen unser unses unter viel vom von vor während war waren warst was weg weil weiter welche welchem welchen welcher welches wenn werde werden wie wieder will wir wird wirst wo wollen wollte würde würden zu zum zur zwar zwischen tm/inst/stopwords/catalan.dat0000644000175100001440000001066412074065306016040 0ustar hornikusersa abans abans-d'ahir abintestat ací adesiara adés adéu adàgio ah ahir ai aitambé aitampoc aitan aitant aitantost aixà això així aleshores algun alguna algunes alguns algú alhora allà allèn allò allí almenys alto altra altre altres altresí altri alça al·legro amargament amb ambdues ambdós amunt amén anc andante andantino anit ans antany apa aprés aqueix aqueixa aqueixes aqueixos aqueixs aquell aquella aquelles aquells aquest aquesta aquestes aquests aquèn aquí ara arran arrera arrere arreu arri arruix atxim au avall avant aviat avui açò bah baix baldament ballmanetes banzim-banzam bastant bastants ben bis bitllo-bitllo bo bé ca cada cal cap car caram catorze cent centes cents cerca cert certa certes certs cinc cinquanta cinquena cinquenes cinquens cinquè com comsevulla contra cordons corrents cric-crac d daixonses daixò dallonses dallò dalt daltabaix damunt darrera darrere davall davant de debades dedins defora dejorn dejús dellà dementre dempeus demés demà des desena desenes desens després dessobre dessota dessús desè deu devers devora deçà diferents dinou dins dintre disset divers diversa diverses diversos divuit doncs dos dotze dues durant ecs eh el ela elis ell ella elles ells els em emperò en enans enant encara encontinent endalt endarrera endarrere endavant endebades endemig endemés endemà endins endintre enfora engir enguany enguanyasses enjús enlaire enlloc enllà enrera enrere ens ensems ensota ensús entorn entre entremig entretant entrò envers envides environs enviró ençà ep ep era eren eres ergo es escar essent esser est esta estada estades estan estant estar estaran estarem estareu estaria estarien estaries estaré estarà estaràs estaríem estaríeu estat estats estava estaven estaves estem estes esteu estic estiguem estigueren estigueres estigues estiguessis estigueu estigui estiguin estiguis estigué estiguérem estiguéreu estigués estiguí estos està estàs estàvem estàveu et etc etcètera ets excepte fins fora foren fores força fos fossin fossis fou fra fui fóra fórem fóreu fóreu fóssim fóssiu gaire gairebé gaires gens girientorn gratis ha hagi hagin hagis haguda hagudes hagueren hagueres haguessin haguessis hagut haguts hagué haguérem haguéreu hagués haguéssim haguéssiu haguí hala han has hauran haurem haureu hauria haurien hauries hauré haurà hauràs hauríem hauríeu havem havent haver haveu havia havien havies havíem havíeu he hem heu hi ho hom hui hàgim hàgiu i igual iguals inclusive ja jamai jo l la leri-leri les li lla llavors llevat lluny llur llurs lo los ls m ma mai mal malament malgrat manco mant manta mantes mantinent mants massa mateix mateixa mateixes mateixos me mentre mentrestant menys mes meu meua meues meus meva meves mi mig mil mitges mitja mitjançant mitjos moixoni molt molta moltes molts mon mos més n na ne ni ningú no nogensmenys només noranta nos nosaltres nostra nostre nostres nou novena novenes novens novè ns nòs nós o oh oi oidà on onsevulga onsevulla onze pas pengim-penjam per perquè pertot però piano pla poc poca pocs poques potser prest primer primera primeres primers pro prompte prop prou puix pus pàssim qual quals qualsevol qualsevulla qualssevol qualssevulla quan quant quanta quantes quants quaranta quart quarta quartes quarts quasi quatre que quelcom qui quin quina quines quins quinze quisvulla què ran re rebé renoi rera rere res retruc s sa salvament salvant salvat se segon segona segones segons seguida seixanta sempre sengles sens sense ser seran serem sereu seria serien series seré serà seràs seríem seríeu ses set setanta setena setenes setens setze setè seu seua seues seus seva seves si sia siau sic siguem sigues sigueu sigui siguin siguis sinó sis sisena sisenes sisens sisè sobre sobretot sol sola solament soles sols som son sos sota sots sou sovint suara sí sóc són t ta tal tals també tampoc tan tanmateix tant tanta tantes tantost tants te tercer tercera terceres tercers tes teu teua teues teus teva teves ton tos tost tostemps tot tota total totes tothom tothora tots trenta tres tret tretze tu tururut u uf ui uix ultra un una unes uns up upa us va vagi vagin vagis vaig vair vam van vares vas vau vem verbigràcia vers vet veu vint vora vos vosaltres vostra vostre vostres vostè vostès vuit vuitanta vuitena vuitenes vuitens vuitè vés vàreig vàrem vàreu vós xano-xano xau-xau xec érem éreu és ésser àdhuc àlies ça ço òlim ídem últim última últimes últims únic única únics úniques tm/inst/stopwords/french.dat0000644000175100001440000000150512156574723015705 0ustar hornikusersau aux avec ce ces dans de des du elle en et eux il je la le leur lui ma mais me même mes moi mon ne nos notre nous on ou par pas pour qu que qui sa se ses son sur ta te tes toi ton tu un une vos votre vous c d j l à m n s t y été étée étées étés étant suis es est sommes êtes sont serai seras sera serons serez seront serais serait serions seriez seraient étais était étions étiez étaient fus fut fûmes fûtes furent sois soit soyons soyez soient fusse fusses fût fussions fussiez fussent ayant eu eue eues eus ai as avons avez ont aurai auras aura aurons aurez auront aurais aurait aurions auriez auraient avais avait avions aviez avaient eut eûmes eûtes eurent aie aies ait ayons ayez aient eusse eusses eût eussions eussiez eussent ceci cela celà cet cette ici ils les leurs quel quels quelle quelles sans soi tm/inst/stopwords/portuguese.dat0000644000175100001440000000236312156574727016651 0ustar hornikusersde a o que e do da em um para com não uma os no se na por mais as dos como mas ao ele das à seu sua ou quando muito nos já eu também só pelo pela até isso ela entre depois sem mesmo aos seus quem nas me esse eles você essa num nem suas meu às minha numa pelos elas qual nós lhe deles essas esses pelas este dele tu te vocês vos lhes meus minhas teu tua teus tuas nosso nossa nossos nossas dela delas esta estes estas aquele aquela aqueles aquelas isto aquilo estou está estamos estão estive esteve estivemos estiveram estava estávamos estavam estivera estivéramos esteja estejamos estejam estivesse estivéssemos estivessem estiver estivermos estiverem hei há havemos hão houve houvemos houveram houvera houvéramos haja hajamos hajam houvesse houvéssemos houvessem houver houvermos houverem houverei houverá houveremos houverão houveria houveríamos houveriam sou somos são era éramos eram fui foi fomos foram fora fôramos seja sejamos sejam fosse fôssemos fossem for formos forem serei será seremos serão seria seríamos seriam tenho tem temos tém tinha tínhamos tinham tive teve tivemos tiveram tivera tivéramos tenha tenhamos tenham tivesse tivéssemos tivessem tiver tivermos tiverem terei terá teremos terão teria teríamos teriam tm/inst/stopwords/finnish.dat0000644000175100001440000000056012156574723016076 0ustar hornikusersolla olen olet on olemme olette ovat ole oli olisi olisit olisin olisimme olisitte olisivat olit olin olimme olitte olivat ollut olleet en et ei emme ette eivät minä sinä hän me te he tämä tuo se nämä nuo ne kuka ketkä mikä mitkä joka jotka että ja jos koska kuin mutta niin sekä sillä tai vaan vai vaikka kanssa mukaan noin poikki yli kun niin nyt itse tm/inst/stopwords/hungarian.dat0000644000175100001440000000231312156574725016414 0ustar hornikusersa ahogy ahol aki akik akkor alatt által általában amely amelyek amelyekben amelyeket amelyet amelynek ami amit amolyan amíg amikor át abban ahhoz annak arra arról az azok azon azt azzal azért aztán azután azonban bár be belül benne cikk cikkek cikkeket csak de e eddig egész egy egyes egyetlen egyéb egyik egyre ekkor el elég ellen elÅ‘ elÅ‘ször elÅ‘tt elsÅ‘ én éppen ebben ehhez emilyen ennek erre ez ezt ezek ezen ezzel ezért és fel felé hanem hiszen hogy hogyan igen így illetve ill. ill ilyen ilyenkor ison ismét itt jó jól jobban kell kellett keresztül keressünk ki kívül között közül legalább lehet lehetett legyen lenne lenni lesz lett maga magát majd majd már más másik meg még mellett mert mely melyek mi mit míg miért milyen mikor minden mindent mindenki mindig mint mintha mivel most nagy nagyobb nagyon ne néha nekem neki nem néhány nélkül nincs olyan ott össze Å‘ Å‘k Å‘ket pedig persze rá s saját sem semmi sok sokat sokkal számára szemben szerint szinte talán tehát teljes tovább továbbá több úgy ugyanis új újabb újra után utána utolsó vagy vagyis valaki valami valamint való vagyok van vannak volt voltam voltak voltunk vissza vele viszont volna tm/inst/stopwords/norwegian.dat0000644000175100001440000000152312156574726016434 0ustar hornikusersog i jeg det at en et den til er som pÃ¥ de med han av ikke ikkje der sÃ¥ var meg seg men ett har om vi min mitt ha hadde hun nÃ¥ over da ved fra du ut sin dem oss opp man kan hans hvor eller hva skal selv sjøl her alle vil bli ble blei blitt kunne inn nÃ¥r være kom noen noe ville dere som deres kun ja etter ned skulle denne for deg si sine sitt mot Ã¥ meget hvorfor dette disse uten hvordan ingen din ditt blir samme hvilken hvilke sÃ¥nn inni mellom vÃ¥r hver hvem vors hvis bÃ¥de bare enn fordi før mange ogsÃ¥ slik vært være bÃ¥e begge siden dykk dykkar dei deira deires deim di dÃ¥ eg ein eit eitt elles honom hjÃ¥ ho hoe henne hennar hennes hoss hossen ikkje ingi inkje korleis korso kva kvar kvarhelst kven kvi kvifor me medan mi mine mykje no nokon noka nokor noko nokre si sia sidan so somt somme um upp vere vore verte vort varte vart tm/inst/stopwords/swedish.dat0000644000175100001440000000105712156574731016107 0ustar hornikusersoch det att i en jag hon som han pÃ¥ den med var sig för sÃ¥ till är men ett om hade de av icke mig du henne dÃ¥ sin nu har inte hans honom skulle hennes där min man ej vid kunde nÃ¥got frÃ¥n ut när efter upp vi dem vara vad över än dig kan sina här ha mot alla under nÃ¥gon eller allt mycket sedan ju denna själv detta Ã¥t utan varit hur ingen mitt ni bli blev oss din dessa nÃ¥gra deras blir mina samma vilken er sÃ¥dan vÃ¥r blivit dess inom mellan sÃ¥dant varför varje vilka ditt vem vilket sitta sÃ¥dana vart dina vars vÃ¥rt vÃ¥ra ert era vilkas tm/inst/stopwords/SMART.dat0000644000175100001440000000700512074065306015316 0ustar hornikusersa a's able about above according accordingly across actually after afterwards again against ain't all allow allows almost alone along already also although always am among amongst an and another any anybody anyhow anyone anything anyway anyways anywhere apart appear appreciate appropriate are aren't around as aside ask asking associated at available away awfully b be became because become becomes becoming been before beforehand behind being believe below beside besides best better between beyond both brief but by c c'mon c's came can can't cannot cant cause causes certain certainly changes clearly co com come comes concerning consequently consider considering contain containing contains corresponding could couldn't course currently d definitely described despite did didn't different do does doesn't doing don't done down downwards during e each edu eg eight either else elsewhere enough entirely especially et etc even ever every everybody everyone everything everywhere ex exactly example except f far few fifth first five followed following follows for former formerly forth four from further furthermore g get gets getting given gives go goes going gone got gotten greetings h had hadn't happens hardly has hasn't have haven't having he he's hello help hence her here here's hereafter hereby herein hereupon hers herself hi him himself his hither hopefully how howbeit however i i'd i'll i'm i've ie if ignored immediate in inasmuch inc indeed indicate indicated indicates inner insofar instead into inward is isn't it it'd it'll it's its itself j just k keep keeps kept know knows known l last lately later latter latterly least less lest let let's like liked likely little look looking looks ltd m mainly many may maybe me mean meanwhile merely might more moreover most mostly much must my myself n name namely nd near nearly necessary need needs neither never nevertheless new next nine no nobody non none noone nor normally not nothing novel now nowhere o obviously of off often oh ok okay old on once one ones only onto or other others otherwise ought our ours ourselves out outside over overall own p particular particularly per perhaps placed please plus possible presumably probably provides q que quite qv r rather rd re really reasonably regarding regardless regards relatively respectively right s said same saw say saying says second secondly see seeing seem seemed seeming seems seen self selves sensible sent serious seriously seven several shall she should shouldn't since six so some somebody somehow someone something sometime sometimes somewhat somewhere soon sorry specified specify specifying still sub such sup sure t t's take taken tell tends th than thank thanks thanx that that's thats the their theirs them themselves then thence there there's thereafter thereby therefore therein theres thereupon these they they'd they'll they're they've think third this thorough thoroughly those though three through throughout thru thus to together too took toward towards tried tries truly try trying twice two u un under unfortunately unless unlikely until unto up upon us use used useful uses using usually uucp v value various very via viz vs w want wants was wasn't way we we'd we'll we're we've welcome well went were weren't what what's whatever when whence whenever where where's whereafter whereas whereby wherein whereupon wherever whether which while whither who who's whoever whole whom whose why will willing wish with within without won't wonder would would wouldn't x y yes yet you you'd you'll you're you've your yours yourself yourselves z zero tm/inst/stopwords/italian.dat0000644000175100001440000000316612156574725016070 0ustar hornikusersad al allo ai agli all agl alla alle con col coi da dal dallo dai dagli dall dagl dalla dalle di del dello dei degli dell degl della delle in nel nello nei negli nell negl nella nelle su sul sullo sui sugli sull sugl sulla sulle per tra contro io tu lui lei noi voi loro mio mia miei mie tuo tua tuoi tue suo sua suoi sue nostro nostra nostri nostre vostro vostra vostri vostre mi ti ci vi lo la li le gli ne il un uno una ma ed se perché anche come dov dove che chi cui non più quale quanto quanti quanta quante quello quelli quella quelle questo questi questa queste si tutto tutti a c e i l o ho hai ha abbiamo avete hanno abbia abbiate abbiano avrò avrai avrà avremo avrete avranno avrei avresti avrebbe avremmo avreste avrebbero avevo avevi aveva avevamo avevate avevano ebbi avesti ebbe avemmo aveste ebbero avessi avesse avessimo avessero avendo avuto avuta avuti avute sono sei è siamo siete sia siate siano sarò sarai sarà saremo sarete saranno sarei saresti sarebbe saremmo sareste sarebbero ero eri era eravamo eravate erano fui fosti fu fummo foste furono fossi fosse fossimo fossero essendo faccio fai facciamo fanno faccia facciate facciano farò farai farà faremo farete faranno farei faresti farebbe faremmo fareste farebbero facevo facevi faceva facevamo facevate facevano feci facesti fece facemmo faceste fecero facessi facesse facessimo facessero facendo sto stai sta stiamo stanno stia stiate stiano starò starai starà staremo starete staranno starei staresti starebbe staremmo stareste starebbero stavo stavi stava stavamo stavate stavano stetti stesti stette stemmo steste stettero stessi stesse stessimo stessero stando tm/inst/stopwords/russian.dat0000644000175100001440000000250512156574727016131 0ustar hornikusersи в во не что он на Ñ Ñ Ñо как а то вÑе она так его но да ты к у же вы за бы по только ее мне было вот от Ð¼ÐµÐ½Ñ ÐµÑ‰Ðµ нет о из ему теперь когда даже ну вдруг ли еÑли уже или ни быть был него до Ð²Ð°Ñ Ð½Ð¸Ð±ÑƒÐ´ÑŒ опÑть уж вам Ñказал ведь там потом ÑÐµÐ±Ñ Ð½Ð¸Ñ‡ÐµÐ³Ð¾ ей может они тут где еÑть надо ней Ð´Ð»Ñ Ð¼Ñ‹ Ñ‚ÐµÐ±Ñ Ð¸Ñ… чем была Ñам чтоб без будто человек чего раз тоже Ñебе под жизнь будет ж тогда кто Ñтот говорил того потому Ñтого какой ÑовÑем ним здеÑÑŒ Ñтом один почти мой тем чтобы нее кажетÑÑ ÑÐµÐ¹Ñ‡Ð°Ñ Ð±Ñ‹Ð»Ð¸ куда зачем Ñказать вÑех никогда ÑÐµÐ³Ð¾Ð´Ð½Ñ Ð¼Ð¾Ð¶Ð½Ð¾ при наконец два об другой хоть поÑле над больше тот через Ñти Ð½Ð°Ñ Ð¿Ñ€Ð¾ вÑего них ÐºÐ°ÐºÐ°Ñ Ð¼Ð½Ð¾Ð³Ð¾ разве Ñказала три Ñту Ð¼Ð¾Ñ Ð²Ð¿Ñ€Ð¾Ñ‡ÐµÐ¼ хорошо Ñвою Ñтой перед иногда лучше чуть том Ð½ÐµÐ»ÑŒÐ·Ñ Ñ‚Ð°ÐºÐ¾Ð¹ им более вÑегда конечно вÑÑŽ между tm/inst/stopwords/dutch.dat0000644000175100001440000000070512156574722015547 0ustar hornikusersde en van ik te dat die in een hij het niet zijn is was op aan met als voor had er maar om hem dan zou of wat mijn men dit zo door over ze zich bij ook tot je mij uit der daar haar naar heb hoe heeft hebben deze u want nog zal me zij nu ge geen omdat iets worden toch al waren veel meer doen toen moet ben zonder kan hun dus alles onder ja eens hier wie werd altijd doch wordt wezen kunnen ons zelf tegen na reeds wil kon niets uw iemand geweest andere tm/inst/stopwords/romanian.dat0000644000175100001440000000341712327630227016240 0ustar hornikusersa abia acea aceasta această această aceea aceia acel acela acelaÅŸi acelaÅŸi acele acelea aceluiaÅŸi acest acesta aceste acestea acestei aceÅŸti aceÅŸtia acestor acestora acestui acolo acum adică ai aia aici al ăla alături ale alt alta altă altceva alte altele altfel alÅ£i alÅ£ii altul am anume apoi ar are aÅŸ aÅŸa asemenea asta astăzi astfel asupra atare atât atâta atâtea atâţi atâţia aÅ£i atît atîti atîţia atunci au avea avem avut azi ba bine ca că cam când care căreia cărora căruia cât câtă câte câţi către ce cea ceea cei ceilalÅ£i cel cele celelalte celor ceva chiar ci cînd cine cineva cît cîte cîteva cîţi cîţiva cu cui cum cumva da daca dacă dar de deasupra decât deci decît deja deÅŸi despre din dintr dintre doar după ea ei el ele era este eu fără fecăreia fel fi fie fiecare fiecărui fiecăruia fiind foarte fost i-au iar ieri îi îl îmi împotriva în în înainte înapoi înca încît însă însă însuÅŸi într între între îşi îţi l-am la le li lor lui mă mai mare mereu mod mult multă multe mulÅ£i ne nici niciodata nimeni nimic niÅŸte noi noÅŸtri noÅŸtri nostru nouă nu numai o oarecare oarece oarecine oarecui or orice oricum până pe pentru peste pînă plus poată prea prin printr-o puÅ£ini s-ar sa să să-i să-mi să-ÅŸi să-Å£i săi sale sau său se ÅŸi sînt sîntem sînteÅ£i spre sub sunt suntem sunteÅ£i te Å£i toată toate tocmai tot toÅ£i totul totuÅŸi tu tuturor un una unde unei unele uneori unii unor unui unul va vă voi vom vor vreo vreun tm/inst/doc/0000755000175100001440000000000014755301616012442 5ustar hornikuserstm/inst/doc/extensions.Rnw0000644000175100001440000002727113177024075015340 0ustar hornikusers\documentclass[a4paper]{article} \usepackage[margin=2cm]{geometry} \usepackage[round]{natbib} \usepackage{url} \newcommand{\acronym}[1]{\textsc{#1}} \newcommand{\pkg}[1]{{\normalfont\fontseries{b}\selectfont #1}} \newcommand{\proglang}[1]{\textsf{#1}} \let\code\texttt %% \VignetteIndexEntry{Extensions} \begin{document} <>= library("tm") library("xml2") @ \title{Extensions\\How to Handle Custom File Formats} \author{Ingo Feinerer} \maketitle \section*{Introduction} The possibility to handle custom file formats is a substantial feature in any modern text mining infrastructure. \pkg{tm} has been designed aware of this aspect from the beginning on, and has modular components which allow for extensions. A general explanation of \pkg{tm}'s extension mechanism is described by~\citet[Sec.~3.3]{Feinerer_etal_2008}, with an updated description as follows. \section*{Sources} A source abstracts input locations and provides uniform methods for access. Each source must provide implementations for following interface functions: \begin{description} \item[close()] closes the source and returns it, \item[eoi()] returns \code{TRUE} if the end of input of the source is reached, \item[getElem()] fetches the element at the current position, \item[length()] gives the number of elements, \item[open()] opens the source and returns it, \item[reader()] returns a default reader for processing elements, \item[pGetElem()] (optional) retrieves all elements in parallel at once, and \item[stepNext()] increases the position in the source to the next element. \end{description} Retrieved elements must be encapsulated in a list with the named components \code{content} holding the document and \code{uri} pointing to the origin of the document (e.g., a file path or a \acronym{URL}; \code{NULL} if not applicable or unavailable). Custom sources are required to inherit from the virtual base class \code{Source} and typically do so by extending the functionality provided by the simple reference implementation \code{SimpleSource}. E.g., a simple source which accepts an \proglang{R} vector as input could be defined as <>= VecSource <- function(x) SimpleSource(length = length(x), content = as.character(x), class = "VecSource") @ which overrides a few defaults (see \code{?SimpleSource} for defaults) and stores the vector in the \code{content} component. The functions \code{close()}, \code{eoi()}, \code{open()}, and \code{stepNext()} have reasonable default methods already for the \code{SimpleSource} class: the identity function for \code{open()} and \code{close()}, incrementing a position counter for \code{stepNext()}, and comparing the current position with the number of available elements as claimed by \code{length()} for \code{eoi()}, respectively. So we only need custom methods for element access: <>= getElem.VecSource <- function(x) list(content = x$content[x$position], uri = NULL) pGetElem.VecSource <- function(x) lapply(x$content, function(y) list(content = y, uri = NULL)) @ \section*{Readers} Readers are functions for extracting textual content and metadata out of elements delivered by a source and for constructing a text document. Each reader must accept following arguments in its signature: \begin{description} \item[elem] a list with the named components \code{content} and \code{uri} (as delivered by a source via \code{getElem()} or \code{pGetElem()}), \item[language] a string giving the language, and \item[id] a character giving a unique identifier for the created text document. \end{description} The element \code{elem} is typically provided by a source whereas the language and the identifier are normally provided by a corpus constructor (for the case that \code{elem\$content} does not give information on these two essential items). In case a reader expects configuration arguments we can use a function generator. A function generator is indicated by inheriting from class \code{FunctionGenerator} and \code{function}. It allows us to process additional arguments, store them in an environment, return a reader function with the well-defined signature described above, and still be able to access the additional arguments via lexical scoping. All corpus constructors in package \pkg{tm} check the reader function for being a function generator and if so apply it to yield the reader with the expected signature. E.g., the reader function \code{readPlain()} is defined as <>= readPlain <- function(elem, language, id) PlainTextDocument(elem$content, id = id, language = language) @ For examples on readers using the function generator please have a look at \code{?readPDF} or \code{?readPDF}. However, for many cases, it is not necessary to define each detailed aspect of how to extend \pkg{tm}. Typical examples are \acronym{XML} files which are very common but can be rather easily handled via standard conforming \acronym{XML} parsers. The aim of the remainder in this document is to give an overview on how simpler, more user-friendly, forms of extension mechanisms can be applied in \pkg{tm}. \section*{Custom Data Formats} A general situation is that you have gathered together some information into a tabular data structure (like a data frame or a list matrix) that suffices to describe documents in a corpus. However, you do not have a distinct file format because you extracted the information out of various resources, e.g., as delivered by \code{readtext()} in package \pkg{readtext}. Now you want to use your information to build a corpus which is recognized by \pkg{tm}. We assume that your information is put together in a data frame. E.g., consider the following example: <>= df <- data.frame(doc_id = c("doc 1" , "doc 2" , "doc 3" ), text = c("content 1", "content 2", "content 3"), title = c("title 1" , "title 2" , "title 3" ), authors = c("author 1" , "author 2" , "author 3" ), topics = c("topic 1" , "topic 2" , "topic 3" ), stringsAsFactors = FALSE) @ We want to map the data frame rows to the relevant entries of a text document. An entry \code{text} in the mapping will be matched to fill the actual content of the text document, \code{doc\_id} will be used as document ID, all other fields will be used as metadata tags. So we can construct a corpus out of the data frame: <<>>= (corpus <- Corpus(DataframeSource(df))) corpus[[1]] meta(corpus[[1]]) @ \section*{Custom XML Sources} Many modern file formats already come in \acronym{XML} format which allows to extract information with any \acronym{XML} conforming parser, e.g., as implemented in \proglang{R} by the \pkg{xml2} package. Now assume we have some custom \acronym{XML} format which we want to access with \pkg{tm}. Then a viable way is to create a custom \acronym{XML} source which can be configured with only a few commands. E.g., have a look at the following example: <>= custom.xml <- system.file("texts", "custom.xml", package = "tm") print(readLines(custom.xml), quote = FALSE) @ As you see there is a top-level tag stating that there is a corpus, and several document tags below. In fact, this structure is very common in \acronym{XML} files found in text mining applications (e.g., both the Reuters-21578 and the Reuters Corpus Volume 1 data sets follow this general scheme). In \pkg{tm} we expect a source to deliver self-contained blocks of information to a reader function, each block containing all information necessary such that the reader can construct a (subclass of a) \code{TextDocument} from it. The \code{XMLSource()} function can now be used to construct a custom \acronym{XML} source. It has three arguments: \begin{description} \item[x] a character giving a uniform resource identifier, \item[parser] a function accepting an \acronym{XML} document (as delivered by \code{read\_xml()} in package \pkg{xml2}) as input and returning a \acronym{XML} elements/nodes (each element/node will then be delivered to the reader as a self-contained block), \item[reader] a reader function capable of turning \acronym{XML} elements/nodes as returned by the parser into a subclass of \code{TextDocument}. \end{description} E.g., a custom source which can cope with our custom \acronym{XML} format could be: <>= mySource <- function(x) XMLSource(x, parser = xml2::xml_children, reader = myXMLReader) @ As you notice in this example we also provide a custom reader function (\code{myXMLReader}). See the next section for details. \section*{Custom XML Readers} As we saw in the previous section we often need a custom reader function to extract information out of \acronym{XML} chunks (typically as delivered by some source). Fortunately, \pkg{tm} provides an easy way to define custom \acronym{XML} reader functions. All you need to do is to provide a so-called \emph{specification}. Let us start with an example which defines a reader function for the file format from the previous section: <>= myXMLReader <- readXML( spec = list(author = list("node", "writer"), content = list("node", "description"), datetimestamp = list("function", function(x) as.POSIXlt(Sys.time(), tz = "GMT")), description = list("node", "@short"), heading = list("node", "caption"), id = list("function", function(x) tempfile()), origin = list("unevaluated", "My private bibliography"), type = list("node", "type")), doc = PlainTextDocument()) @ Formally, \code{readXML()} is the relevant function which constructs an reader. The customization is done via the first argument \code{spec}, the second provides an empty instance of the document which should be returned (augmented with the extracted information out of the \acronym{XML} chunks). The specification must consist of a named list of lists each containing two character vectors. The constructed reader will map each list entry to the content or a metadatum of the text document as specified by the named list entry. Valid names include \code{content} to access the document's content, and character strings which are mapped to metadata entries. Each list entry must consist of two character vectors: the first describes the type of the second argument, and the second is the specification entry. Valid combinations are: \begin{description} \item[\code{type = "node", spec = "XPathExpression"}] the XPath (1.0) expression \code{spec} extracts information out of an \acronym{XML} node (as seen for \code{author}, \code{content}, \code{description}, \code{heading}, and \code{type} in our example specification). \item[\code{type = "function", spec = function(doc) \ldots}] The function \code{spec} is called, passing over the \acronym{XML} document (as delivered by \code{read\_xml()} from package \pkg{xml2}) as first argument (as seen for \code{datetimestamp} and \code{id}). As you notice in our example nobody forces us to actually use the passed over document, instead we can do anything we want (e.g., create a unique character vector via \code{tempfile()} to have a unique identification string). \item[\code{type = "unevaluated", spec = "String"}] the character vector \code{spec} is returned without modification (e.g., \code{origin} in our specification). \end{description} Now that we have all we need to cope with our custom file format, we can apply the source and reader function at any place in \pkg{tm} where a source or reader is expected, respectively. E.g., <<>>= corpus <- VCorpus(mySource(custom.xml)) @ constructs a corpus out of the information in our \acronym{XML} file: <<>>= corpus[[1]] meta(corpus[[1]]) @ \bibliographystyle{abbrvnat} \bibliography{references} \end{document} tm/inst/doc/tm.Rnw0000644000175100001440000003354414656640247013571 0ustar hornikusers\documentclass[a4paper]{article} \usepackage[margin=2cm]{geometry} \usepackage[utf8]{inputenc} \usepackage[round]{natbib} \usepackage{url} \newcommand{\acronym}[1]{\textsc{#1}} \newcommand{\class}[1]{\mbox{\textsf{#1}}} \newcommand{\code}[1]{\mbox{\texttt{#1}}} \newcommand{\pkg}[1]{{\normalfont\fontseries{b}\selectfont #1}} \newcommand{\proglang}[1]{\textsf{#1}} %% \VignetteIndexEntry{Introduction to the tm Package} %% \VignetteDepends{SnowballC} \begin{document} <>= library("tm") data("crude") @ \title{Introduction to the \pkg{tm} Package\\Text Mining in \proglang{R}} \author{Ingo Feinerer} \maketitle \section*{Introduction} This vignette gives a short introduction to text mining in \proglang{R} utilizing the text mining framework provided by the \pkg{tm} package. We present methods for data import, corpus handling, preprocessing, metadata management, and creation of term-document matrices. Our focus is on the main aspects of getting started with text mining in \proglang{R}---an in-depth description of the text mining infrastructure offered by \pkg{tm} was published in the \emph{Journal of Statistical Software}~\citep{Feinerer_etal_2008}. An introductory article on text mining in \proglang{R} was published in \emph{R News}~\citep{Rnews:Feinerer:2008}. \section*{Data Import} The main structure for managing documents in \pkg{tm} is a so-called \class{Corpus}, representing a collection of text documents. A corpus is an abstract concept, and there can exist several implementations in parallel. The default implementation is the so-called \class{VCorpus} (short for \emph{Volatile Corpus}) which realizes a semantics as known from most \proglang{R} objects: corpora are \proglang{R} objects held fully in memory. We denote this as volatile since once the \proglang{R} object is destroyed, the whole corpus is gone. Such a volatile corpus can be created via the constructor \code{VCorpus(x, readerControl)}. Another implementation is the \class{PCorpus} which implements a \emph{Permanent Corpus} semantics, i.e., the documents are physically stored outside of \proglang{R} (e.g., in a database), corresponding \proglang{R} objects are basically only pointers to external structures, and changes to the underlying corpus are reflected to all \proglang{R} objects associated with it. Compared to the volatile corpus the corpus encapsulated by a permanent corpus object is not destroyed if the corresponding \proglang{R} object is released. Within the corpus constructor, \code{x} must be a \class{Source} object which abstracts the input location. \pkg{tm} provides a set of predefined sources, e.g., \class{DirSource}, \class{VectorSource}, or \class{DataframeSource}, which handle a directory, a vector interpreting each component as document, or data frame like structures (like \acronym{CSV} files), respectively. Except \class{DirSource}, which is designed solely for directories on a file system, and \class{VectorSource}, which only accepts (character) vectors, most other implemented sources can take connections as input (a character string is interpreted as file path). \code{getSources()} lists available sources, and users can create their own sources. The second argument \code{readerControl} of the corpus constructor has to be a list with the named components \code{reader} and \code{language}. The first component \code{reader} constructs a text document from elements delivered by a source. The \pkg{tm} package ships with several readers (e.g., \code{readPlain()}, \code{readPDF()}, \code{readDOC()}, \ldots). See \code{getReaders()} for an up-to-date list of available readers. Each source has a default reader which can be overridden. E.g., for \code{DirSource} the default just reads in the input files and interprets their content as text. Finally, the second component \code{language} sets the texts' language (preferably using \acronym{ISO} 639-2 codes). In case of a permanent corpus, a third argument \code{dbControl} has to be a list with the named components \code{dbName} giving the filename holding the sourced out objects (i.e., the database), and \code{dbType} holding a valid database type as supported by package \pkg{filehash}. Activated database support reduces the memory demand, however, access gets slower since each operation is limited by the hard disk's read and write capabilities. So e.g., plain text files in the directory \code{txt} containing Latin (\code{lat}) texts by the Roman poet \emph{Ovid} can be read in with following code: <>= txt <- system.file("texts", "txt", package = "tm") (ovid <- VCorpus(DirSource(txt, encoding = "UTF-8"), readerControl = list(language = "lat"))) @ For simple examples \code{VectorSource} is quite useful, as it can create a corpus from character vectors, e.g.: <>= docs <- c("This is a text.", "This another one.") VCorpus(VectorSource(docs)) @ Finally we create a corpus for some Reuters documents as example for later use: <>= reut21578 <- system.file("texts", "crude", package = "tm") reuters <- VCorpus(DirSource(reut21578, mode = "binary"), readerControl = list(reader = readReut21578XMLasPlain)) @ \section*{Data Export} For the case you have created a corpus via manipulating other objects in \proglang{R}, thus do not have the texts already stored on a hard disk, and want to save the text documents to disk, you can simply use \code{writeCorpus()} <>= writeCorpus(ovid) @ which writes a character representation of the documents in a corpus to multiple files on disk. \section*{Inspecting Corpora} Custom \code{print()} methods are available which hide the raw amount of information (consider a corpus could consist of several thousand documents, like a database). \code{print()} gives a concise overview whereas more details are displayed with \code{inspect()}. <<>>= inspect(ovid[1:2]) @ Individual documents can be accessed via \code{[[}, either via the position in the corpus, or via their identifier. <>= meta(ovid[[2]], "id") identical(ovid[[2]], ovid[["ovid_2.txt"]]) @ A character representation of a document is available via \code{as.character()} which is also used when inspecting a document: <>= inspect(ovid[[2]]) lapply(ovid[1:2], as.character) @ \section*{Transformations} Once we have a corpus we typically want to modify the documents in it, e.g., stemming, stopword removal, et cetera. In \pkg{tm}, all this functionality is subsumed into the concept of a \emph{transformation}. Transformations are done via the \code{tm\_map()} function which applies (maps) a function to all elements of the corpus. Basically, all transformations work on single text documents and \code{tm\_map()} just applies them to all documents in a corpus. \subsection*{Eliminating Extra Whitespace} Extra whitespace is eliminated by: <<>>= reuters <- tm_map(reuters, stripWhitespace) @ \subsection*{Convert to Lower Case} Conversion to lower case by: <<>>= reuters <- tm_map(reuters, content_transformer(tolower)) @ We can use arbitrary character processing functions as transformations as long as the function returns a text document. In this case we use \code{content\_transformer()} which provides a convenience wrapper to access and set the content of a document. Consequently most text manipulation functions from base \proglang{R} can directly be used with this wrapper. This works for \code{tolower()} as used here but also e.g.\ for \code{gsub()} which comes quite handy for a broad range of text manipulation tasks. \subsection*{Remove Stopwords} Removal of stopwords by: <>= reuters <- tm_map(reuters, removeWords, stopwords("english")) @ \subsection*{Stemming} Stemming is done by: <>= tm_map(reuters, stemDocument) @ \section*{Filters} Often it is of special interest to filter out documents satisfying given properties. For this purpose the function \code{tm\_filter} is designed. It is possible to write custom filter functions which get applied to each document in the corpus. Alternatively, we can create indices based on selections and subset the corpus with them. E.g., the following statement filters out those documents having an \code{ID} equal to \code{"237"} and the string \code{"INDONESIA SEEN AT CROSSROADS OVER ECONOMIC CHANGE"} as their heading. <<>>= idx <- meta(reuters, "id") == '237' & meta(reuters, "heading") == 'INDONESIA SEEN AT CROSSROADS OVER ECONOMIC CHANGE' reuters[idx] @ \section*{Metadata Management} Metadata is used to annotate text documents or whole corpora with additional information. The easiest way to accomplish this with \pkg{tm} is to use the \code{meta()} function. A text document has a few predefined attributes like \code{author} but can be extended with an arbitrary number of additional user-defined metadata tags. These additional metadata tags are individually attached to a single text document. From a corpus perspective these metadata attachments are locally stored together with each individual text document. Alternatively to \code{meta()} the function \code{DublinCore()} provides a full mapping between Simple Dublin Core metadata and \pkg{tm} metadata structures and can be similarly used to get and set metadata information for text documents, e.g.: <>= DublinCore(crude[[1]], "Creator") <- "Ano Nymous" meta(crude[[1]]) @ For corpora the story is a bit more sophisticated. Corpora in \pkg{tm} have two types of metadata: one is the metadata on the corpus level (\code{corpus}), the other is the metadata related to the individual documents (\code{indexed}) in form of a data frame. The latter is often done for performance reasons (hence the named \code{indexed} for indexing) or because the metadata has an own entity but still relates directly to individual text documents, e.g., a classification result; the classifications directly relate to the documents but the set of classification levels forms an own entity. Both cases can be handled with \code{meta()}: <<>>= meta(crude, tag = "test", type = "corpus") <- "test meta" meta(crude, type = "corpus") meta(crude, "foo") <- letters[1:20] meta(crude) @ \section*{Standard Operators and Functions} Many standard operators and functions (\code{[}, \code{[<-}, \code{[[}, \code{[[<-}, \code{c()}, \code{lapply()}) are available for corpora with semantics similar to standard \proglang{R} routines. E.g., \code{c()} concatenates two (or more) corpora. Applied to several text documents it returns a corpus. The metadata is automatically updated, if corpora are concatenated (i.e., merged). \section*{Creating Term-Document Matrices} A common approach in text mining is to create a term-document matrix from a corpus. In the \pkg{tm} package the classes \class{TermDocumentMatrix} and \class{DocumentTermMatrix} (depending on whether you want terms as rows and documents as columns, or vice versa) employ sparse matrices for corpora. Inspecting a term-document matrix displays a sample, whereas \code{as.matrix()} yields the full matrix in dense format (which can be very memory consuming for large matrices). <<>>= dtm <- DocumentTermMatrix(reuters) inspect(dtm) @ \section*{Operations on Term-Document Matrices} Besides the fact that on this matrix a huge amount of \proglang{R} functions (like clustering, classifications, etc.) can be applied, this package brings some shortcuts. Imagine we want to find those terms that occur at least five times, then we can use the \code{findFreqTerms()} function: <<>>= findFreqTerms(dtm, 5) @ Or we want to find associations (i.e., terms which correlate) with at least $0.8$ correlation for the term \code{opec}, then we use \code{findAssocs()}: <<>>= findAssocs(dtm, "opec", 0.8) @ Term-document matrices tend to get very big already for normal sized data sets. Therefore we provide a method to remove \emph{sparse} terms, i.e., terms occurring only in very few documents. Normally, this reduces the matrix dramatically without losing significant relations inherent to the matrix: <<>>= inspect(removeSparseTerms(dtm, 0.4)) @ This function call removes those terms which have at least a 40 percentage of sparse (i.e., terms occurring 0 times in a document) elements. \section*{Dictionary} A dictionary is a (multi-)set of strings. It is often used to denote relevant terms in text mining. We represent a dictionary with a character vector which may be passed to the \code{DocumentTermMatrix()} constructor as a control argument. Then the created matrix is tabulated against the dictionary, i.e., only terms from the dictionary appear in the matrix. This allows to restrict the dimension of the matrix a priori and to focus on specific terms for distinct text mining contexts, e.g., <<>>= inspect(DocumentTermMatrix(reuters, list(dictionary = c("prices", "crude", "oil")))) @ \section*{Performance} Often you do not need all the generality, modularity and full range of features offered by \pkg{tm} as this sometimes comes at the price of performance. \class{SimpleCorpus} provides a corpus which is optimized for the most common usage scenario: importing plain texts from files in a directory or directly from a vector in \proglang{R}, preprocessing and transforming the texts, and finally exporting them to a term-document matrix. The aim is to boost performance and minimize memory pressure. It loads all documents into memory, and is designed for medium-sized to large data sets. However, it operates only under the following contraints: \begin{itemize} \item only \code{DirSource} and \code{VectorSource} are supported, \item no custom readers, i.e., each document is read in and stored as plain text (as a string, i.e., a character vector of length one), \item transformations applied via \code{tm\_map} must be able to process strings and return strings, \item no lazy transformations in \code{tm\_map}, \item no meta data for individual documents (i.e., no \code{"local"} in \code{meta()}). \end{itemize} \bibliographystyle{abbrvnat} \bibliography{references} \end{document} tm/inst/doc/tm.R0000644000175100001440000001036014755301616013205 0ustar hornikusers### R code from vignette source 'tm.Rnw' ################################################### ### code chunk number 1: Init ################################################### library("tm") data("crude") ################################################### ### code chunk number 2: Ovid ################################################### txt <- system.file("texts", "txt", package = "tm") (ovid <- VCorpus(DirSource(txt, encoding = "UTF-8"), readerControl = list(language = "lat"))) ################################################### ### code chunk number 3: VectorSource ################################################### docs <- c("This is a text.", "This another one.") VCorpus(VectorSource(docs)) ################################################### ### code chunk number 4: Reuters ################################################### reut21578 <- system.file("texts", "crude", package = "tm") reuters <- VCorpus(DirSource(reut21578, mode = "binary"), readerControl = list(reader = readReut21578XMLasPlain)) ################################################### ### code chunk number 5: tm.Rnw:118-119 (eval = FALSE) ################################################### ## writeCorpus(ovid) ################################################### ### code chunk number 6: tm.Rnw:129-130 ################################################### inspect(ovid[1:2]) ################################################### ### code chunk number 7: tm.Rnw:134-136 ################################################### meta(ovid[[2]], "id") identical(ovid[[2]], ovid[["ovid_2.txt"]]) ################################################### ### code chunk number 8: tm.Rnw:140-142 ################################################### inspect(ovid[[2]]) lapply(ovid[1:2], as.character) ################################################### ### code chunk number 9: tm.Rnw:156-157 ################################################### reuters <- tm_map(reuters, stripWhitespace) ################################################### ### code chunk number 10: tm.Rnw:162-163 ################################################### reuters <- tm_map(reuters, content_transformer(tolower)) ################################################### ### code chunk number 11: Stopwords ################################################### reuters <- tm_map(reuters, removeWords, stopwords("english")) ################################################### ### code chunk number 12: Stemming ################################################### tm_map(reuters, stemDocument) ################################################### ### code chunk number 13: tm.Rnw:194-197 ################################################### idx <- meta(reuters, "id") == '237' & meta(reuters, "heading") == 'INDONESIA SEEN AT CROSSROADS OVER ECONOMIC CHANGE' reuters[idx] ################################################### ### code chunk number 14: DublinCore ################################################### DublinCore(crude[[1]], "Creator") <- "Ano Nymous" meta(crude[[1]]) ################################################### ### code chunk number 15: tm.Rnw:228-232 ################################################### meta(crude, tag = "test", type = "corpus") <- "test meta" meta(crude, type = "corpus") meta(crude, "foo") <- letters[1:20] meta(crude) ################################################### ### code chunk number 16: tm.Rnw:251-253 ################################################### dtm <- DocumentTermMatrix(reuters) inspect(dtm) ################################################### ### code chunk number 17: tm.Rnw:262-263 ################################################### findFreqTerms(dtm, 5) ################################################### ### code chunk number 18: tm.Rnw:268-269 ################################################### findAssocs(dtm, "opec", 0.8) ################################################### ### code chunk number 19: tm.Rnw:277-278 ################################################### inspect(removeSparseTerms(dtm, 0.4)) ################################################### ### code chunk number 20: tm.Rnw:292-294 ################################################### inspect(DocumentTermMatrix(reuters, list(dictionary = c("prices", "crude", "oil")))) tm/inst/doc/tm.pdf0000644000175100001440000023724614755301616013573 0ustar hornikusers%PDF-1.5 %¿÷¢þ 1 0 obj << /Type /ObjStm /Length 3057 /Filter /FlateDecode /N 57 /First 457 >> stream xœÅZYsÛF~ß_1oI*eÏ}`+å*ÖMK–dYv*´IÜP¤LPŽ_¿Ý3`@:vc0÷týuÏ`aD͉"ÜÁÖCçÄ ILB¹v„3Â¥ƒ¢œp­ á‚pk$$Á¤+"då°™†Hf¡’%RC'Ü™$pOˆ’X˜¥¡œ€þ($ Ú0å8&µÕИ&FYGŒ-Á;Ž-rŽXˆ„X—hèŒ8 0n IAœÑй$ÎB£Rç`R—0˜¹! ƒiI" 'g •$ÌØ1E 1Œ™q(©`RLÀ”ÌšIh@Á´™²PFÃV05 3 &eáÁaˆþAa$ŠXCË\9ƒ¢Ñb ´ÌQâZæÎjêÜŠ\褆‡JJ'ÿúí7Béb8.†J0rJèÉð&Í@µþåüÇ}JèäOf7äÍ_ekžãÙt{¸HÉÏÛÿLh˜pÂK˜ø•ñŸûé—¼ÜlN~>O/áu0­«q2Ÿ®R¨²{rDvogÙ"»šï “×L½f¿”c˜=L4z8eäw!Žtèo2ÜB¢ ‰*$ª¨CâÅÃ¬ËÆ§‹tºÈˆaA ƒt4nξCG˜¢ý`äílãd8‡ò…ÐNÓlö0¿‚ö°±h A„;Ÿ]¥ h‡žlï@çé÷4ðæMuÅ06‡YêkÓÇ绿žíœ¸…žéÛéÕl4žÞ@Öxº1ÍÆË„ñ<[lÝçÄÊÐûv„ª°aGü`…г‡/ ß9Ãmöa:†æRbxùXÆ£Å-Š\ÊÄKìÇ—cÄ”)ÆOñOYÿìÍ ï! ß„oI™z9c¾ µ¤Ñù_Ley©? YmdW¨ p½ßKÇ7·ðêð&Š2ÿ™îÓ=¡çtH¯èˆ¦ô†ÞÒ1ý“NéŒÎé‚>ÐïB¶õ P “ž o2$?ýÍ>Ó1î{ü#dîŒ'©$F™aÊ»á]Ú¢½}0§ñÕÆôf’BY:g¨Î  Z;[¤wA9KÑWÔH/ó * ‹-—[ÇGØßæ#ÐÂYõ“4átÔÙëT Ò´¨¸±ª>ÿZªïŽ.ªêÑ…r¸äÀ¦Miè‘ù&êêQMõDâê«p>ëÔ4©g08y÷ú;;{„zœˆºá¼¡,Ó¡Ý¡ÃZ´«“ªjã´¢ V¨Â€‡w M€`íº"LS‘`z*¼r€ ƒ‹£W6©uC,©1¾Æ©2oï x_rŒ'>!³‚à\S„ŒlÃÕD¤G]Ed’T¹E·é[Ë=†á”žU‚„k ~ÜߦSÀèœF ÷Ð,D ôG¬ amàÇSƒ§eMxFxé„§)áÉõú >w÷Þns`5jÔ9w-Q‰€k¢2Ž,ÛœQuJ¸¨„õ ^1b`‘<”ø«|‡ˆïÌ7Öç~EèªàÁYæ<¸-D £Ϩ˜#}`j¡†¾|Þ‚õ˜Õ¡®Æ \Ëà}÷PPw„w‘î'ƒï" ÏÉ\y>¦Ë›Ê  HÃŽñ޶€®@x- `ƒn–F°fpHÀÞ1Sø@?ÊҞ̦9u£qøæhz7f·4úÛ5½†ÿã@ì×z ‹!j—Vôˆ¹ƒ%UÿžÞã*j’^/ÂÓÜ·}ŸÎdzýJ¿>ÌiHCÛËÒ»qM–~ƒV³ñw°ÇÅíÛ*°Ë:¬¢•x¤æNXA€XÀJöر’1¬>Ÿœï^b瀕Œ×墹8°ªí‚U׺<Àÿ€j{~ÎÅj­u·¹:wýXØŠ¿ÿ럦úÿõçï€Ô÷1S·‹`{·nŸ”÷„Ü—ô“»¿Ì‡W¦ çÏ!nÆã£Ùd2œcX>§ó4gy€~ã!¿Áï2é<ŽÈ'i–õËq¦ð|ôe²28Ï&¸8¨‡èÓQ:Ï®fó´+Z÷aH[´®C¦–΋¸NjцOD2]¤¦ÑAæ>«8-¢±½½ǸѿµõLïØ\M»Î}wÛµÅcËPËíü¹r¯ØAqüñ.k Ϊ¦3Þµ#ŒÙy"¿¨)-i§'²v}x;Ÿíw›?×ùÄ ¬Î9×õumIM7ôÇÐFú}ÝN{~¿Zw9ÏsLëˆX¨."^EÃC:¼¢Ë†ÓÑcè¸Üùú0œ¬çà÷Ù¹Ÿáæ$Šº°E]@(ê‹¢.L_u-)RÒš§\¾¶yþ©|\:5ÐOÕ‰–“‹&¢^j"êI© ¾ç€õ X±' xéRwv.¶ŽÂæ}ÿïŒ-€|ä g:\Šó6be}cÚØj|àdâw¸ÃxÑx‹?ÿá°p+‹jÿ1WÃø&y¨÷†ïKäyxr+ñ[|ÊOLã÷$(‡÷8è`õ ƒÕƒŽ·ÀÖÈÒ§9š«dÏxûBÃq¿‘±Ó_AJ¦‹”Üš,yÎöøBÓr¬ãà|ÿøðó£ã”øT‡lÆ)²sëŹ5aJätñÓOÕ銪꼻BÁLÒþ'“¾ºJ’¦J"õ ñÍšÿåœJÅ‘àNwÑŽ2D¶SZÂ^ŠÒ^Ô™Ô&Û‹›þRyš3y¬ûMJ¼Ð¤ôóÎÅÑÞÁþί[ƒSÞûhA¼¹¯šÞFw.;“®ã^šE^ƒÅ;è?\o|+,²Îù>sI!˜Ž#×*s$¡‹|'¶¸&ÑVz]„Mâ~¯ ±N#˜’ÓȪ£!ñ΋½­Ágèl°_S˜.¶bK5é55ùËt¨©kë\ØøügµESx-Õ+¨*4¨›'Â7Q´åÜH'q/%Ž[CO"î>mð;@ñ,Ð`Z)À¾<…×–ÚÙÙÿ~³õ‡n‚ŠKILG_dd·õw†-¶Žv*° ¯%¬¾z±ˆÀà¡*°P£¾=¾¾NIßƒŽ‚è–gÙËÂOi¹$˜ø 5ˆ{`ádÉŠàö&½¾†k ×ćŸËÓ/Õ­óí·6ö_Ztméendstream endobj 59 0 obj << /Subtype /XML /Type /Metadata /Length 1168 >> stream 2025-02-19T08:09:02+01:00 2025-02-19T08:09:02+01:00 TeX Untitled endstream endobj 60 0 obj << /Filter /FlateDecode /Length 5453 >> stream xœ[K“ã6’¾÷ØËz#*úbj¶‹K¼HÀáu„·mGxÂ3ö¸k=×X«D·DjHªûë7ª¨¶g£-‰ ™ÈüòËê7E.n üçÿß_7O/þñBЯ7þ¿íñæ¿î^üÇÏÕ¨r)Œ¾¹{|ÁoˆaË\ê¦R*W®¼¹;¾ø5û¾ÛH“;WÊlúÍ­,óBÙ*Û·S»)à‹,Ee2x_´4Z—Y·¹UBåE/õþ³RÙ´o6ÿs÷gXß-Ö×"7…îv°âD³ºª°*;âg ÂÈŠ_\‚ ßûi#ˬNÅØ²è°òÛ‘ Ni`@‘;SÐç)Ë’Ý Qæª7·RäNXžùnSU0¬²æÃ„êÈÜY—ý…õÂ9u‹/OAé*KÔyõ…Xè/Dne=²Ÿ7w¿áu?;c$¹µ:× >ßJ›[[9ì{ÖÀ0L>~B«ãV*ûncH`lÖ´<¦t¥Ëš¡¼Î2WÆ8˜ö½Ò’ç†×Àd¶ºyÀeœ®Œ¶Ùpž§¨‡~%3á^ù/¢Ìd!MÐÕÜ+]²"ÂéÜ–úæVƒ #¼"ÝFá¾T¼Œ÷ÜÊ¢Wáï\aGº*m-ìâ9ýŽØwÉ~ÝêÆåà8%y XXXØÎ^Üý 6tߎ0seAX“½£w³ö©k¦© «ì©}·‘°ûJ‚ýè%øb:}t*÷ý0…¯eFOá`©`¡9XPFšÕ }›Éú“S”Ù±íZr¤Êâœ^+—jel^–êÂqR½à³!BÎS{hÿ×OŠ«[ I¯“ýc¢<õ±yO6±â~xëÇi“@iÔY›*' (Û]³ «”ÙÃLù1yNN(Kp콞ü‚¤#¥¤H•ª ìƒVÙt\ÓÛäÕ"(~ª PØ€ ά²ú©É7·ÚØ#ýÝöCdhU‚¨ôiƦ#]Qdˆèc3íqsý„»1¼Æéƒ&ØÕSퟀ“´ÇûCQU`´éÃ=ØöÃé<òÜûºÛÀÖá18>qò΄Kl›q H”§‹UòǺíŽ(:ìê+s%óJé…Í`9| aihjï§ ±Ãeýcнc8Þò¶R• 8§g!P·‘àþ ºhîÉO!s¬§¡ñÉêŠBïÇs°„ÚcˆÐië “˜Œ‚•&D肼Áy½ðG@C¤fut€5ê‘-O[Øl§1¨g¢zè÷ìäôÐfãT9®7ÀûvÚ‡õÔ,ìãS˜I…hA£G[ÿðcøóËFä·ë×#¸Ü—… žü/5jfà9sÛ-÷£9‘€ô¾Û¡=ùý¤ß,+ ¨IŠ­‡VQ¨,ÈbèÓw kŒÓˆv û…„²›ß{਩ž‡úE4«ßé߉檒ÑïãÔõÐÙéüphÇ}*C4÷ÚúÕU2µSz• Sÿ¹?]}€× g¢WÁ(x4ì­UUˆŸßL/ãÔn7¦@ÀPw5Å¥ÉÞôÓûzØþ(Æ…n´ðkviC£¨0[SކLI“gÍ„ŸmC}È_…'Òmaï7WUñh²¯»ðPS>Â\†pÄœ”zÌà4'ø¸{»=4þÁ±Æ ªçáïêZªr`¿ÀÕ®¥ªRšdy{I ·d{ÃÊq¥ÅŽB*’¥Z$Å[]i˜üûþš(Ô¼×vàÍŠ9¬lá°ò¤‡wa•÷„™†–žd}ð]"¤TÙ÷GO´`wNÄ| ÔÚ3"å øñª˜Dofvƒ{h1Ês1ð æñ$nù¹âŒEϹ¯7¸ñ¶&@ƒ—0Ýüão:Îb}*À9¸>c"¡À ´Ž²Ô¼[iôKÜKùÉà‡€[æ· ê9K®³±¿Mvs[à)+~Xª\U˜ìunFÙwͪ67¶ˆ+ÃNÉi32’AŠÅ ²JbÞ.Ù¥†™zÉÓ@þ­"8¦¡ðüY>á¤N.´Ä`î€íÂàÑÎìk˜\ëH/à3Er?Ã~vüÓ[ýîQo'ÿRâ¶ï¶Sh*I›Ht!<H%O £y2Eš-Ð 2,)³±aB]ÓBw«°œ 3+ËÊeˆ¡Ú°×O‡&ð ¡]6CÃJ†@ž>[ ynülÀ´ëÎÃ~’ˆ1DvÍc}>€TÒIže±±É3!阩 ÕÂgÊœô&À«wƒ ñ2FÇ"/“7ΣVýRTÈÔc-û ËQirQϱ¹¡ÈDtˆÖZw^ N¶”nF._¥°ØÌTWÐS«4ý²A3j¤ù°j¡(ëZØ”U^ÁîÆ•7Þp€™ï÷íLÄ÷óïÀ;¡0i¢‰1nXPþlÖWŸÆ°g”ÂŽ¼MC§ëy #²÷#áòª0>ŒtÅ¢== ž.x#ý8­mv€â!\K]Ú±Ìê¡^6È!µÈ~‹.áÝ–È–§ñ c9"0@}<#E4 E€]ÍšH u‘éºHB> 2ÈoÌImÀ}sØÍFx<H æäàò²›#Ò,“$`Y‰µ<Ú !¼¢ááHwM×OMX—jnš°©Ž”™Çãf/£5!¥&ÛÍÂôÉ7‘»…™nÀ½ƒÏ‹¢òй¬.ž›KWd.DI ÇàIþ{âz”(æ’ÀÚ³t,„üTûþЄÉT‚Ì’òY2ùSß5c’ìðæ¼eˆ‡ØßÏ/ §hAUâšµ"¿\Ƶ  ~ð„Pº,¾â+AâZ$ª¾¨çë g¢d?LL1fp1‹ýðf^ò°_Kî³”b Ø5›À;†×}Æ>˜¬ Ž€$YÉX_*F©¯Á÷È¡(žã¼4 œ§8¿†Õ m墳üôº{s  £€k0ˆ˜ü<Ù‘0šËØ5¸a„ˆ•ÑOP¢×]ÓaýF¼ùS8}«œƒÊË]p ·–ÀÙ­æFn›7yør Ò‚¾D>‚ý‰ÀG¨ÓAÕ¶.˜ ñÈÕûË6P6¶˜)?ú1éjÏ5¥&Íúó4¶»ÆÏ8Wg—µFIì“m1Ð ’$96ùÓ¬œà‘% Çutê±¹ßÐXIZAtLZ }· åÿ¥ã˜*/? Û ˜*V2I„m.F¡¢ÏÊ6¯U„ê f´&‚3¦ðŽ¿)úvšã¾o“vM3Œô ÷(i¶%™ƒ¾BÖs=AŽâ¸Ã-%|­‰Ì¾îžšÑ‹¨”ÌϾäß8w»f8|Lé#õ,Çgl–QÊѾ"ÁÁêó¯Cóo`®& bÒåÀ,«y¾23«OìÎm²4üe¶ ÔVP[¥ǤS¸m=¨‚/s3I\]·¥Sn±¼îÀdy`ÁÍ‹ž‡¢}pé‹^Oš`ù™\ÏÄŒ—ôÉÆ0Ü-§×·nAÔ»m}χ  /öà ÚGÿSI.ˆ|šûz P<1Y†š ª Kãþ Qž‰CµÍMAÎp€òóƒzYdÇÓ@º ä Ĥü˜æêð¢ËZêªÐVäBæŸ o䥿Óá]@§kὦ›‡@g—ûÓ¨«ò‚Éþ¼‰@«ðÍIR¡(#¿¤Á‡„X1K¿ZKÓPþäR†¢üÊ6—Uéb³æ«ß3Vz´®¸`,̜Ɩ¡g ‡_ö¦?ÛfeY< *õUt¤Ež%WÖºŒåm4„V„9§`=›’.Ù–øA¾Ö»Ð@›ßé\Jeb¢¡óûìp$Ф¼y€¿aâhFì*í%¨•«-1è EóYǹÒrÙ””¥gª­"}’2„aÛ0óç¸æÖJç"—ì›v¸º?•­‹ŽóÊX´_r bØçe.lR~…Òb4¶aÓžŒ¢£>KXG_ÒBÈ5Í€Ý/%¹ˆg‡ÒP‘É |¥š{O4ß׬ØjÃt4—…¥?yÜÍGr£‘§Â¥BÉÝŠ©æ2‚ÎÏ ãVô&âˆç)B6¤е$ƒöPš‚ö¤ Ÿ˜9„1¾gý©-Oî¯D0JùÆ<²»:Òœ*xƒÈ©žöóQÊ‚äÈØØÄS[¤ñ;+ç§@ªùüôÐŽSXT òJønhäJtÕöP?°0|ÏÇ›ø•§?5g®v¹f¢¯Êo€â¢û(óXØØv˜¿öÑßßÇ—â–¾'Š\Ywá\wÜ‚æŽþØÀŽîBÒÎð‹pD³iÍŽ #UHëÔn ]–3Vá‘z=ΫM³(3× -´#„ƒÀoÂS.ñEœåÁƒ¯D´÷ÓUÜæ‡6ê)ÎÁu” çÙ<ÚdäÝÜÕI²Äû2ëë­* Éÿ‚g{+­1S`‚Á:ä|k%Q¹ˆçXÙªásý´Šÿ°°Š–ƺÐr‘x7ŸÃQ±†’Aòÿl ãzŠ×5]%ûvüË…üŽ‚¿Î»:†…©ÜÉÏŠ!ÒVY„1Y=tãér¤Ü&ŒBÖÊ ñ‘ü®9°!4ð~;K—*ãûlsí–ãÕ“BÙ‰*i{Øy•Ü͸›ï ¤Ü¿¬z…û—PDà¹v‰–ÎÆ}{½¾9ÀØ´81è‚)¿[ôž%Ëß/XürŸ­ÌËR&ûüÓ¡n»+¨iªÜÈpN˜üb>(M<óàù¾ùîÊlºÂ3LñOÍöͯÿÈl”Ôµæ+ °m¢ä³Ïˆš |G‚î(ŠìMÓ¬F¾FpvËlŽyæg¶öµVw’³L>gçÓâhpê—7x(YhÇ]ÈŸ›Ò‹|uG;Ám¥˜7Þ±+Áó˜¶hÐ5ï ±ów8¾­U ¯£¼Ï‡•J†h^Š.Ôòï*Û5Iëè‘Ïk ü~žÃñKâ÷KÊÔäã¯ÙÃ\yû¹Á°Ë†‡Ïœ6LØÊ†v·k:DLÊÜ"û–Ûºˆ 8öqýXo¤á¬ì“ÔœnòÌ|’ûØö´f>Ræ>¨Í~ãfæYÁ»1Î_©YãÒ6„Ÿ&/úJËeg5œï0±Š#Uä~Àkj³Ü-ø¾š—¥ŽrUÏn!¢M‚›„|×vÔbÆ#A(Š¿Fç‹=4*ð­­ÃxÅšÿþÍkE+¶¢È¥r·rînÓ\XÍúRž­Ù‹ú¾ãž8Xt[Ô çK‘Ô=âÈî­!OHüI·¹ þUò>ÝaÙ·ÃΤŸÈC9‹¯í¤tPê-²{¸ÎAñŽ]° aKzû(ÂeÓÑ땨Ƞʣu¸Ò‰ÏŠp#’¾¸À i¤ýH /Zµäí Þ_±!²Ê¯Lro“3ÊÒzgKîmj´PˆŽZ,xOõÛ÷‡î^“áÂCýÎ?ÅžøyšŸRC•踋8m…¯?ÿÓþèm1-õ|bKÿ!¹/æËè-k’6ÀÂXJëÜ8}àîãéi¡*Z¦zé´œ;´;ßTÅ{¤ÅåÎp¨7ËO—WW ’+M8v×|ßà ÃçÓ‰oD#·ë‡ä½ZÞ‡¥•bRH\N") ?­RNa‹\Ç‹°ñû5+ÂØ9IÑ-‚‚6ûë(µ·ˆ9à½O/¨§×ŸØK…LÓëê›t³”‚,·;S7‚ísq=Àß áïñ 0f篧K¨ .lï»@àÙ﹞ŋ°žôÖû ˜þ:Ä/ÇÏÇBOí}•|èã©ÿ|…¸ñå8ÓÈwLøu:¨áÒ‚( › … .Ì+â/éÁÍ¡=¶S<`‚>@b™E¦DêEø©øbÌ®ß&øyTƯ-ú*{?´s˶º>Õí›eH×Ú© Í /Þø¿NºÎÅ-ÿéÆéÀ7ë+.–Vþ^¤ä’töšCø£:a /‹ wÅéeîŽÇùÖ’~e Nˆ·E?LkU—ƒªK»¤R϶ëøg,tXùì]ø›œû\kÐT9üs¥¹l_]ÓäÓ‹oª ½º%«ú« gð@3ûçþXwQÚÓœ÷ûÄcNM¯… ÅžôèÇwín½7.âw­Hè5®ñÈ”Ž’:aÅ›«bN¥_5pîÃÁÇp‰BÁaÁükÉe¾š,ÏTsûá¯m~åïá}tüK<#5ñÛ/où3]û8NÍ1lÍ}öì?/_…¡ ~ù0½ ÷ŒÀ€';ªi¶o‰äñ$•ýç¼ÚËéøÒß=ŠÉã™H÷Y–Žï§RÅ;N±¼¸G¢ DݶçtÆïˆÅúÿ}÷Ý­}‰¹tå6ë¿C‰§5š8öñ6"cµ¦ r û„Õ’mœ é—àé°þ['müב?¼È¾üÒëöÕW›Ï/%û‹ÿÓ¤/ :âʶT7Vo:R«08žšmûØn¿˜7«‡`ØSãhšíshÞ5‰‚@Qº]ó¡ÙÝo¾¿š¬ˆÍf$–ð>‰b lHÍO3ae³vœ •͵17·ÀfèX £K`h}{÷âoðïÿó QGendstream endobj 61 0 obj << /Filter /FlateDecode /Length 269 >> stream xœ]‘Anƒ0E÷œÂ7À$`S Í&ÝdѪj{°‡ˆEŒåEoß?CÒEo¤cü™©Oç×sZ6S”5|ñfæ%Å·õ^›‰/Kªšƒ‰Kئ5\Ç\Õ§·1ÿd6hày÷÷ñÊõgßë“f?ÖÈ·<.cºp5XKÃ> stream xœU{PSW¿!$ç¢Ô¶¤Q¬î½ÔζÛÖv¦S»]«}Y_ˆÖbyyÞ@B‰IHÂý. Ix%‘KQA©/Q©ÏR}¨»Úng·;Ý©ÝvµÓö2ãÞÔv·»³ì?wæžsîýÎ÷{|? GH$’„Œeé©Ï¾ðÜ¢ØË‚輸è|)ÙSïN­’A¢ã½óN$á…ã™þí!"N"™ý«×ÊJÊ5•ùê”Ô²¼|uiJzYÉÖÒÿZ$"©´L]©Y±µj[j^þÚÂíªÌ"‚H#ÖÄzâ-â âU"‹xxšxƒXF>EøµÑ·t’‹¸ç})f¢iJ!ÑøæcB.¸"$àyxæï± Ç}ÿ*NfSÂ<”àc}àƒ[þò‡½OЀ[pd‰o)WÍéA™ŽÕÙ%…Uðä\·^&ëÛ ­¾땬&î7vwL:øŸ]m7ÕÅºÊæKQ‰Ú=Dã“îkãǃþ]Ma±ž—õ9kmf+TCu«¡³žìn t‹Ò6¥¯ßI³X*»_Aìh!Su’;ãRLE§•k«…ù¿¤ Ÿ z5¬jÈÅûRo^o¿u:,.9#Èœ/®Í¨yœÂ½8…Rp*ihÓFŸî÷è’~ bŽâúT ÌYÖFZxA!·­»Œ/=d>§aâ ÿ÷T2žá“ñFT¤ó ÓŠ#øÍ2Åu&ˆö¶›óhaz¥ðùç™Ro—ªíd»¡‹}ŽÄÒÏŠ MÍä%xþ¤ôÄ”XGýG¾•µžn@`ãl`% K%ÃàZž‘—¹‡|œ¼tTÜ÷²^ð‘ûûÜýÔn^~Ÿg[$ú[]ÒÑó8gü-ü›9Šooão”cÇF€¼²ït!Z‚D·ñ%³¾®´¤yV^cNÃ8œ´¿Grr.·7÷^¹âË¡Æî½îAò#}*¶o) ßÔvÓx‚^6bì.ɈdÂX_®É°›ì"P¤:`ع ‚:‡e.寬ÌM]f'ëäÙ9ûŽßÁèПiñÆßü1¦”Y¼¤5úš’iAíÐÁlí–V}«1\. —lIvÛ}6¨cë¬6§]µ5ùµù+Éà'ydqY<Î&hš ¯Nô¯ßµ£§hwQˆIöX½6°@­ÑZËŠ‡_áͨôœÑ«o2uš‚Ûn?¬:êh´ˆh‚Çëq»û$:õî¹@ò“ ؽV¨…­«sÔÙ^Ñöh#Év¥Yi[ ©#ù»?zkH4VßU:8;çR[S­Ó¶C¡ö@÷'¿Y.$½&<"$ÜZ€š<3z¡ƒnpqà&]Èíàœµ`´8)͆-y m£¾uåþS¸BÞàñ#8îøhiv5ëžtóœŸTC‡¢ÎóÒ3÷…ÓËÿK8´&ß´UôØr½]Ú5NsÑãÁõ»3Cëç ñï ñÂ,°-"HÃÎ ôßé=…Ÿ8‰ŸœÀOx;š:ÝÜôÉy%õy”™Á ¯–«Ýû½œ8ºÊ¹8—ÈA£Óå¨u”›ê(MN¶* J¡¨EJ±_‡Ë$T6Ü8ðÕgð1ô¾ìIk0±µ &±dœ)Mä„ñ%žÓ%á_Lâ³×æ(nOÄ:ÞúÿM‰ˆ[gYPâ,¦t Öòr ìd-@–U4î§±?ƒ¾ÄóFðlZq'3/ rJq»ZÏVÏ{\ø«Ø êó7vQßÉGádíÀ¶Ý¥ž<( ¬Ú ê¾ñ…Z\r8š¬KâÚÏnÞ…796iò,uïÁp³ß×A÷ézLûüìâå¦LÍ¡H¨§s4B#ë©·Ùmv0“¦sÅÉœ§ZɦU§RÕhÕ”Óét8Àú`§Ê§së€T|õÒ›«_, l‰hhÎú”>4q|ßõ…|üOÆÅ/?V’ºê©žÌ´â«SøJ ]ºtµ}iF†fùе•ããuÔ¹9ËÍ1œ/æf ï&4ÐíÜHOïCù:ç¦ÿ+7õòJÐ48ºIlE·˜‹§fæ®ÓS,Ʊ€‰ÿ8泺° cs—ÅÁötX¶ÒÂþŸbl™(×ÊJ1|£ Í¸T©q˜ª š¬ jvw·‡Â”b°ÃÐUA).;lý> stream xœ]A E÷œbnÀ´1“†MݸÐõ††EPºðöm]¸ø?æò†÷×ËÕÙü½zQcŽ4û%*‚FëXÓ‚¶*mSu5ÉÀx“áý 9@fïr"þ> stream xœcd`ab`ddä v r247²qäH3ýaînû¹à§k7s7ˤlBß™¿ð``bd”ÖrÎÏ-(-I-RðÍOI-ÊSÊÏMÌSpÊÏIA“a```*Ée`eðc`fddû÷ŸÑr ߯Üë m«®ÈÛð=¸\èö‡ŒuoÖ‰ _û®öS@Ô³ø·¤’\»ð­S¿kغk»êºk9T§Þ½9÷Î ¹­‘ì1ñí'7È}_÷ƒ]x׆ß߃Ùùþ3óbèÙøCx…Мïº/u?|7!qágs~ø‹¦Íg¾¶tqGŽüï‡ìÂÏ\S¿·n`>çú½ž­ÿÄŠ³'çMìÜ3©›c{·{šüïU>©ßë6%ÙñHÎeï^ݹºvEÙ¶èéñÝ>Ýщ%A]l-g£Ö¸¹~áuÂϺ¾3ÿflcå“ãbžfÏÃÉÀ}`†endstream endobj 65 0 obj << /Filter /FlateDecode /Length 161 >> stream xœ]O1ƒ0 Üó ÿ ‰¡b¡ C«ªí‚ã  8QC_ C‡;ɾ;ù,ûá:°K Ñã‹XÇ&Òâ׈#MŽEUƒq˜Ž©0Î:ÙßtxÁf »Ïw=“|^š²©ö zCKÐHQóD¢Uªk­í±ù“ŽÀhgSwJmœý§’£¹Ãyp‘8•¢¥H.à˜~¿r 6ˆ/ô¾Sendstream endobj 66 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 227 >> stream xœcd`ab`ddä v 647²qä~H3ýaîîþqý§k7s7ˤïB_?ñ¿``bd”ÖrÎÏ-(-I-RðÍOI-ÊSNÌ+VN-ÊLC“a``` b`0icÏýÏh™ÌÀ÷Ÿ)ΓaaÙ÷‹{¿/ØÍøîÄ÷{§˜ßýtýkâûÅï:ì3;¾;ÿÞ6»”õ$ÛïÈï×L\0qþTÉã«öî¾ÈÑËÞýå·ðq퉿í~_ý¾à» ûÕ¶×éÝzÝAq±!•iu)Ý)Ý|r\ÌÓìy8ÞIYÄendstream endobj 67 0 obj << /Filter /FlateDecode /Length 269 >> stream xœ]‘1nÃ0 EwB7°ì:T Z’%C‹¢í™ qJ¯] çPÍø‚I‰s¨À6h€­`4ÀNp4Àà4ÀØSÐ8‡ D§×n/]‚•ÔLb&XIÍ$f‚•ÔLb&4@/ ©ŠTÅAdÁ4@¬`ô°z5{1{X½š½˜=4^U¨²¯çbdurƒçÊm¼ÕÊyÓCé!äKæ¿[–µÈ”EÌ/_—‰Zendstream endobj 68 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2319 >> stream xœ]U{PwߨýiQ[r)"º±7µ=ÛªT{Wõô¬@¹¶¶§…ÚZ¤€…ð&‰ðHH²d¿ Â#˜‰`|€UZ•ÞØ¢ôª¶´‡ÖiïzõZíãêüâ,3Þ¦´3›ïþ³ùþ>ï÷³""<Œ‰D R’’_zr}\\èæ·Áذàr1ð™Á›wwD@¤"ÃíË܈Âqàû–ܾŸ‰\• ,V••ÊKV¾¤Ì–—(V&+‹÷)þï!AQ eIRÙóûÞ¬”çæ?÷äúßo"ˆÄ."…x”x•xŒˆ'ˆT"‰xŽxXD<@DâAb! ¡!‚¢<ѵ°ä°vñÓâ±ðgÃE¼M.%sÉÛÔVjÕ,XvO”´“X|/Lö Áùƒ±n&¦pÖ”x0/U¸(?󆌤r*ÌÉ´ûK)…Ú:,Ã;¨KC·n½ß±kÍ×RyІbºê§÷¤j?7"à Ùmé±½™ û*óÓ µÆZ¦šýaéâ{¢›å„ÁŒó²š(ÿ4>4-9‰«‚ÉÒ•ú5k— žüŒã¥8òæ¼€–ÌàðxCKN2f–‰åï盥l „ê’ýË™#H2ÓÒí²ˆáªÞâ¡7­)ð¤°;äi%E9•rØ™¯¿jiÛ†º‘áuÑ/ˆûCdÔ¡C i¿"[ã§ê –­T¨´ ”—S0ӎîùZí\ ØÀÊÚ±”ÎdЕº´î¾N·{8Ï“ÿj¶<£‚f f=«g±X V¿ºDxù,Þí¿û'©b€ 0¹&Ö&Ù\7•UÄÐE |ÚOÖ€ŽÕÚ«²ÊpÓgÔÙw™'d•^ì,«¤žQ©£“p'ÙßÞrƈ|>dGödYSšS‘—ÜÕó®ê½Á˜@³&jÞ×hÉÌOÎvP'<ÌNŸFAWèTõeLÔœ‚sp~¨k‚[¿ËÚ"ü”Bi“ ÆÜ£ø‡ê"$3Šv*pȲWƧRæg³·Bä¶êÏZ\à>œ¤e÷=JpýwÉŸhŠÜ}FªPCŒ\67LÉ‹L…55Ô úüTí¼¾ùŶ£²à05°èA?9?zÎ\åŽýöFKÆ †K}#cî1@}ëåWd»©GøD%¿B¦§Zÿ|}ÿy8Sƒ]ˆ#%º´¦@þ±Øpøˆõ8ú¸,•R˜3 £½ÎË(ppíà@7Ÿ:»iÇfùV…Këóu»zz+œjgñé”d<1}G¢|Ï¡w¾øÇÉëítgc'ÛÙ„æ•5¸ƒk<Øš Ü=#î1ýyˆæ¾£<íݾ·™G ÏC ^øÍx F0Á£×_Tg–ÊX|5;çX)³×œ©ÈÙ_]žo‚ܧ2¶×;À‰ðf :Øè@ï]~ë<óñ‹7¡ž5ÒÂùáýŸá^ó‹B›:Í9=ÉÃë†×]g­o®#b(¦¤ªP¯©HWìJ†r¨k­lEœ€ÌÙ ^äÑu—•UéJ_?¥:~{ÓûhLàîÞÉ®éY.ÆÏÇ)¨Ö[Í•€¦®ÒXQ¯Ú^ŸõyÑWÙ_Xìf;´ ¾ç¡ý!Ëý&´ÍOºEþO°þ‚ø®–â£d›ÍÚöÃ+_ó ùÈ?ð"m8µþó\Y¾×Ø ¨‰ü®Œy®rMV°"›Éf®5«jŒtIzZánC¾GÝŸuš¹ï!Î%>qëú¾‡éùÙà3=x÷Ä /æ<MÔ{—_éÇ“ãÑ’;øjpµ”%U½YŽ<@Ïl|mmA{·DÖÔ`3ƒP&“…A’¯k5Fué2(ì”ûµÂ¿Q›ž ©(GçÞ˜Å+ü·ñ~ñMôŸÈŒ‚žQ™•âºí-]~m_Ía j!?…¿wÜ@ µ¾JCSC“@£ìÞwÞP0€5˜¨CºŽºNg[»“¶QL¦[ ¹®U!Þ2W-møvtÚœ\ Éà_fÇ6d¤¶=¤ØùÜÚƒéiÉsxNÚA¶_ÿô¢ïÅ—2ê·lÝU:>a )±ä›Pˆ­Æ1_k<Š‹.DK¾ž³AŸ9ýWa–實%Yð>j7VKULu”£r§r`Àåó ëÑöÑ’¯؆ØùÖü¶r<ªáÇrzª!¥lWVØL`BªÑlµ4A'ø†®ž£]§áÑ_ÜS\YR¢ôVô9;ìmm?{¦qß]ææ4QǦqë´DX{w­”O®dy=<Žøý|*N¤ÛIœ| ëàV(P–+p/ 33¬9–'çV‡R¿UHýI¸h›l:fë x‡¼Ã€ðrŸµF¶‚íÌÕk•¹9Åû™H>²­?eøÍòé”â’øËàm©‚: #ö1šóRÙÙ¦l­ŽƒA¯Bá¤eu€^Wù¼B5[ZõÕµÆ:S±Ç°~iw%8)D]ðºøZÐ-uOŽŸ¹蟧žÞ²*~Ó‹ ²„Íå¥åêruLé~S}=51`æÌÀ  Aöá-ed}ZÓÞLP‚¢Äo&Ïœyûsã—/üg~D8ê±xbõ†?>¢l+?b¢w÷ôöT»”6™ð¿£ågšƒÁn¼òê€FôíA¬Ïàû¥Í¬mÔçrõõj\êÒ2m +3QOoøGþûGùíó˜Xi§õdü¥ª4‡ÅÙKý†Cf?LÁdïøYks³ 쨑bmL#:(«Öš•õÅŒÒAîÈì47š›Ì²fhìAüª¾±¾­£ÙÕÒBû»F5“7˜.>¶C’ñ©myUõU Uð<¼pV{ñìý8Ï|¤Wtü6_ <-åHØ`yøågUyyéÂÊÈíj?êékðT{Ueš•1TÂÅ‚«³S禎ң—³0‹ð5Îç“_ÎJΦUÙ† HC«.ñ ñ¢óïŸx¿ƒ¶àERÐs0fNT¬^èw:1# PºpØ÷ßà£S¶z1½PìØ¹€ þw¥endstream endobj 69 0 obj << /Filter /FlateDecode /Length 345 >> stream xœ]’Anƒ0E÷œ‚`(žI$äMºÉ¢UÕöĘˆE "dÑÛ÷ÏOÒE¤— Öû²«Ãñõ˜§­¬>Ö9~¥­§<¬é:ßÖ˜ÊS:O¹¨›r˜âö Îxé—¢:¼õË÷Ï’J,¤ñÎïý%UŸµ«ùS}ÿ(ÎCº.}LkŸÏ©èœ Ý8†"åáß_/îþÅi|¬6C`œÃ,ºö%0ÎaÛÀ[C 7”ÀÅpàÞ§¶<¹åÉc`€0ë<-Îa›ÀC(xjxÓðPðÔð¦!u`œÃbQ¸,¶,X. —á+tsø ÅœETCè +ˆU`4Da#±F’L†(',(VPQNYP­ ¢œ² ZA…¯ÒYÍYá«tVsVø*Õœuà΂JIL»ãçeÚuÛÃy¾“2ÞÖ5寋¯ÇÍ”Óß\æÅ¾*‘âÕ°Ëendstream endobj 70 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3509 >> stream xœmW XSw¶¿!{ÝhKÌ€è»ñ¡v±¨8U;SÛqaq©¸Šˆ †E(ÈNÂ!ä„’°( «¢(âŠZ—±JÕq­KêXŸú¦Ê£=aþÌç»QÇ™ïÍËÍ—ïËÍýrÎÿw~çw~‡G9;Q<oTPÀâÙsæø8¾L±Ot²Oâ û›|x ŒåÃXgã¤w6»áæ÷pò;H½K9ñxì ßÄ„¤ô´¨”ɉ‘Q);&/NŒœì/K‹Úù~¤(ŠÝ‘è›ä矚–¾,B–) ŒŒZ½:fmlPp| ŠZE­¦¦Rk¨iÔûTõL­£ÖSÓ©ÅÔF*”ò£fRþTåC-¡fSK©O¨åÔj.H­¤\)1wЦ©gK—Ìâd„ž†ce’‹¿VžÉ†ÕÌ¢ßoc–Çw$ýP\®„`äê¢\1q¥¡X«5S /,¯¬ª2šXc™ÉPfäˆÍkëp4ýXk¼ ƒ¤n?Euá»ûÝ…×qþð»¢/$Äu2O ¿¿MP’P“ܲõ”â0·ö_¸%¶Eѫ˅F¢X¶n£¢¿ÅVœB ÙÈ ¢_7ŽÚjÿBÊCÊ!\ü®×ý²š4%é QÑÜ¿ÆW'µ„\):gᜥîŒn®.èK¤e”XÅöq„¡W+]$ ôž†âb’L«Vl[¹˜È¸±KW Ub<Í=ÃÅüÔ!”ƒoèÚêè t$NüoqâØîd£ß#›zÑ›cZç/èŒs†Nu¹ o£·}¦¨Åh)kæD²—8–&cÈÚd2~òÒ/œ¼j9W+Ö 6tÇî;n=Îþ1j-U˜’‘®²ìb1’~ì?@(ôùúêªj3Xkz} ¯ÊÍ€dfí©Ä¾}·/[Ù³èy'œÁ‰•Ц½ºf`*ld®D° öj ¦˜¹4® cñðP¤)ÒòÓ‰;í`íÄÞ>¤ùHì>¢ì8éòÕÖ+«7–•ƒ‰1+K ÙB Jr óÒ7mIÝ̇›ï Ëír…ï575r5rŽþ»¦õØ£€o{-º÷èL‰&–— Á%ˆ‹-áb¿V*«Ì0Ë&Ü%Ko’eߓ寽´‘Cº´ÎX}ï`àm\iª-¯5ÔêF=Âmó²ÙH‚ T…ªÜüT¥Ä'€ÌZBff+³UYŒF Ú­¨UÖ.ÙèÍ}æ7y¨vªŒ9À¼m5ÖŠÅRÞ~n„›Ÿóñ/Ž›é¦z‡nÆÐQma5¡œR¿Kœç“7Tmh w¯ÿ.qú¡¿æÀ~fúöÑJPh9õHÏ0pý÷’R¤xÄ,:F_ß…|óõ¾ÌØV¶´¨T­S1¤vвH•´-41ü!⛂+Úr0kËê24Š«†XpA¯•KIþˆS1]„çU¥ÆJ¤ÖïuÁ9qáâàˆ¤`8ÝàýÙë÷¬w¬¬®´D¥Œ¹À/S'ç*Ùäm!é›`„4¥–ui{¡‡±8C¯á°e_½¥£ºÓÙÆ:æäï¨üÎaQç'<ìËÑ“oŸ8¼JôEA`ΪpŸ¥‘ÄˆÌØë=0°äiâÁ üºïâ½ãß6݇{ n$n8ÈY32NÄõŒ‰»~2#ïÚw]‡kŽÀ)ø&Í~f¹écø ¼ò‰˜ð2c ba¬l‹:ÎÄ÷Èû•Ç5 ÐXÜàb5óºÁq þ§9ÂKèÄuÓbÉÕêÕ$›c§ÐÙF˜‚ƒ†Ÿ~4™´Z3›•WÍQZxÿ«æ4kksSÛ‰u–M>©‹“XE*¤pÝÄýg/Ç<«ïÄS19C€‡8ÿô/vàÍ‹›¸ÁôÝŽºÒ„ºHh“cŠps„Ì OEx ]écpRs Ò’áœ9Û›Cã¢Ó¶hÃ_Q‹SLÖ‚Ÿ>¸Õ‚y­ ûÜN½˜×ñ/L·Ü…Oó‘ÂPQZJ^ÄATuÂÞİÂÈhˆ`…ŸgÝþ½ëúõwÉÇl0§ yÚ<`¶loì—Òº*CEY…-«CÑ ÌýAÛ/rZ’­â¦ò¦Š=º¢B”LzMn}}uM5±)aûöi«Ñk4í L…zFøKþ®¸hcôÄ~»Þ;ªvsGºXGÚE ×–€öî¶ì5Uê« ˜Ç‚“?CÀB/ÙÚe3«¯*ÄGˆÚèãæs·¾« [Ì*éõ9ëüR’͵9lv­¶¡¸ÑáKg/£ò­2v[ÑÆY®ÉÂ]8Á]˜ÓùOŒà0îF}ñRì\±-<âßô9ú:a¹ld ty±9'¦~6°óÀ„3Š÷Îì mÉ1g™³ôU†ª’*÷ŽÀ-¸Կ²Ìâ[âÌlr[„¶ô ¸VÜV—T_ÁVexD’$5T³ÅQÞìl®öÃS¤n½8Ÿx ÇûÏÝ…÷{–æ(«3q>ï\C}Q¢˜ü Á\Û ûL§…÷»P#^Š+LNŽõÜn‰ëá^ù3=‰Žo¹•u‘óÜÌÿ\AñÝ7wŠvÆâL+vz¬«Xºn17Ï￈¼K¿–-É/–s®Ú((7è¹Ù'¡MSArÚjÈeµ¥>¿BY±7´"˜©Äi ™4¯?àz¼¸¦pwQmÑ‹4œ¾è²¥LôÙtÂïÀ1©lq»‹YmP°‚> stream xœ•yX“×þÿ HÞ×U-i Ž›×ÚÛaë µ¶ÕÚ:+*âžu‹¨@„@ÈÞ'{‘0„!nܳZ÷néT[[íínmO¸Çÿ½ÿ ½½ã¹½¿çåyxHÂûžïù~¾ŸqGô‰'âââú.™µ8ã…ÔÔÔØ#£Ãâ£Ã2‘ù¯š.U"ôq ÿ³) ž|Nôí`">.îñ§fäçвx#2ò7gñ¸#ççmäþË‹A̘ƞ?£`f᛼Yü¢Ù‚9BÑÆôâMóÄ™%›çg-زuÛâì%9Ks—qòVŒ>úOcXcS_xüÅñ/Mxù•W'Nzí™g‡ÄHbñ$±ø3±ˆxŠXLGn§ž¥DÔç}O÷ËéÿÚ€„Ú÷zIz䨠§µ ~fpå£Ï=jNªb&1ßyläcï²2X;_òø©Çÿ–H é7¤fÈ÷CÁК¡íCï +vrØÍáˆÿõO¹ìqìCô ôýe#.?1󉊑Œ‘Wÿ—1•xäïñß\%L¸*]+Šknfq 'Bæ‚M†6à¶¹,.O‹»ÅÛlqr  ˆQLr«Ò?/Å]X„”ŽTò7¹€Òp`8Âe¾YV./×Ui*VÂQ+ásËá(ñQþ é1Wc/" jDKÐëèµÅh²pAŠF¬—•ñ·irñ#ÖÂbì߇‰èýD¼Àçsñ£)$>ý*Üt5™y¿9:ƒÅ© [šôkhÔHnÊ7®gãç #… fg>ßÚNÃtòzû½oÏÛgm`#™ÍÕsØâØ'xŒ‚Bk+ óIÐbl-m*Ú·²|-˜æm- ˜÷?ÄžyjajRá7~¸+b%Aúøö×Ó¿OfîÝÞ5•Å!½Àk*ÔöFm&ý0Bfåh²¥2³9ð2Éøkþa¿À”ÃpÄW(ñÚ$ÚL2ÛÞu~qîöP©‚$b|PÌ="‘±xØè ¦1¾>6íÕÓÓž*-5Rú †°Ä^OÓ} ™M>[ÀLA&ãm°O^H1÷:Ä6¡h¨qüë ^r 7ÉõÈßã~ºM(BÑquFQRø ßJfv@Yt) %I'=E¡Ä»(¾ÿ&±™°ÿ BCØÌÖ¨†!kÉì|/)˜øÑþ´).¶Y„@æª'¥oIš"0¬<%ßà>ƒ?6þZBé‰;ñcBôNÁ’¹å^™{cëòše€BKÑ‹h"Z–ÂÑh | ƒ}~„l/m²+°Q½CYlÛ,Ò±…O¿0Å™`NËšc‹/”Ü·ÁeǵÈ;»?¼sæ ƒP?/",rØ(ŸÙƒt]rÉšÉj=».hYŽDG…â\…¾Û 1´ÔMzÜ,>¹µPÇeŰÀgäñ­ÛiœV·é¤h/û3Œ}€ú.]Íß´•¾@Â*¤ci§o™ª¤4Œ `s¿]åѸ‚“Ià5b,PÇ.>ý}Jž©£6¶´ C¨+¡)þéNô±ïZÿ7ß‘`ÿ³§àÓ»{Á>SGÁÉ(-Pj*¥x2)–ˆTŽÈF èÑœ<%p5Q$¶E&°ÓQ \F7pQ;+ìa§Óì싌«àŠäðüãsË_¯Rø* 6ªbmÔð»­j¹+®%De™PFâža9»{‹ÈüBù`ß½‹«Ï©¢3ªžàÅìxçRžC— &(M*ÛêtÛŒ˜Ø?b²ëªR—md¨@©WfÅ·‹E–âaÌNÖ¨†W®>c=V@¢¤Ÿ®òBpX0™ù.}ÈšÅE>ÅÌî ä˜s*¹Ì%ûõqå½st;“”Rëo,\¬¼ÒÁ†A8’dˆ ‘0ƒì•È&x.Ûšº{tô.îÑLóÁH6¹±À¼‡†Õð(ª&3³u[Ù9±×1^¤1¼lãØvÒѹ³ÙÞêõ‡A9­äžÃF)ÐQ¨ wµZ·³["Œ^“£ÉÛíXâ{d°³[}d4ìyX@N¡Õ$È1qª¹ NÉ€°»6Ôîp;Ü–rÓøø”'1"!sò,4Ö†¿‘èYy"³ó7`Ï!§eM|u ×÷3–xW—ø+‘ ¿›ÐØ[·Ï„Ý@[D»…~ØJfåjr$áºOFH)ž)®/×¾ƒŽ¶’í­¶V¯ÏÊÙ²‡t<ÂèØH\M+VQdÊGhmJeVíÔ ¨5×+ÚoÁÌ”ÐÙ@óI@EЕð¥š2­”æÎÌ“û¦^©U”‘¡®“×)ë&Âu)z§Î œ”•´ÕFLµøÿÈeêyZuÏøfAhfhVý¬à¬¦WR¬rK™/Õð]ò¶T«9V{Ìæ³û,> [K™½ÌVÖ8)¥zEíÊêV¹UæPu­|û)8>¥îœï£÷bë›È!ëá2K>È/ÉU–nàK%T´¼BmPÚ¤]àŸ\Ô;ô½k ´€ØÑrN5¹¸$Þl³Ú¦*£R«Ð«£ñ)šm‰ºTV¯jT4Hö–î•ìI1Øuà êÛMîíìVü€ æøàxA i÷û7BÉÌ|Ø'ú(+ÙçkÔÍ–Ù3éB‘èõ­(™V‘Î7¾GÀ…Vï\©}CÃŒ°½ÅÖAÝà­$ ´›8¹Ú løÙ r/õuê±Wç½ÈyÍÜÇõׇ«ÁÆ¢ª6ÌF#XO“`Öê¥su”ޱfcûAÚF:¾‡‰mw,˜ßúzbŠï&¸ÿer0‚$1åpí;éØ*j*ÊÝ••••åŸÇíÅK®àUó+ù{Ó;æí™ãP¸d&)e$‹òÕÅl9º# à‘[ì’VË*7žKYøÙ¢Oçj´è-3 œ‡÷ýW.¦9t¤ã: Ì´Ã¨nPŠÅ¥R9-—ku:­V7$u|ê„q/QðÙÑf:ÃvvÏ"Æ0Ö…>¸ %05¡*­Z -Ÿ°Žû%”bÕ9U1 Réu f­O}P˜!‹Õ¾Ú _y]^Q^é­pº].»'TàÏ pkx)V­C ÔT©@Y†·acDL–€b“Ä)vÉjd9û¶íËÙ§,O1˜ £Øv‹5²#¥:Pô×FÂ;+÷Äð4‹CÖj ¬9ÜÜÂ’"µR"Ä–”Õ)=‚ð¶í  Tœ^SÏt`NÁ”?:í",q°úVBC¬;Ud$F¿%ƒfoUpO¢™\}š[Á_Aö‡äx[±–·y ¯b‹ô°t«µó¶ðr³„™»¤PQÌ%¹@Õ ¡n—tæÆþ“¨GI“»=HÌ&tþŽ]þÑj2Ü­úªIRD!RÀUqŽÓ"³Ä¨è.r×v[‹Çm.v㯽BÑz §³e@_•µÅïC P¼Eå%0 À¨*DJ‘è-nZ‘]V^c©õ€0’j™â”Šy«p:hã:Øq3tºézJÛíø žJ©:Pß~ª—®êêJyRT+£¹i…³òÒºéJÙCW~e ðç"“žåØ —°â-ÃZ[à^(©å€+0ÍCMf„¥ Ÿ*¨)nhª „ÙvRqãåà"ìyy~Œ?C ȺxaûÕÚl·¹0K8´6µD™%U³ù›7ñ×G€œ_´På$³|XÀx7åd´ƒòF^lN]…Ö]ñ¸#ÂÑÀÇ íÿÞ 9wf†§4B®[h¿â2a·I›¢.›Ð-rІ\E×Ѽë(Ý*· *@lNÍÖ:[ùu¸à]¸è\è¨rVÙªMo¦dæjrˤ=ª±+BþUœNë¥(ïáÄžQT[{vÊŠ2qÇp_åhBbˆan3Í,»0VcÐÀÐcãÐ ­Fƒ¹•R8ôn6¼Ç¨‚ 0ˆÔiÚ‰Ï+Š P¨—7¾»ùÝÍo{‚*Ê¥²¨Øÿ2!Ø!W]N€§b»RI¶5èÑF´…¿k±-6ăF=Žøõƒ)G„vµÐsIø,Û’“g9@G»Htû¡‰UA‚›®w¯ì¯-¯sÔêÐÕÌyRÌuRM"T«T+ 7oà®Ákí%ùƒT|Ô¯1½»7ÃÃщáÞQñJS% šôúa=ùæ"íºžÞl˜o¿듎*ÉJ±WàÁA( @sb¿ G)p·Åᢣï—ÎØöâ}–rȽg´sè‡ÓHô·h½Øœø óüpZ .|;'s îfxlÏÏsï¡>hÐx‡ú;2ñ³LºNÑ  ÊÂèç÷6tš,8àY(‡ãP“#×°yëWsW€- ×_й_¼M™ü¬ÎÝ_}̶1¾QŸt/è a}׫õ8Æžø1ºñGc£d×rÖ‹Ò©Ü…oΟ™;Œ£ÐT\ÉB¸ ®„ÃaœK³éhœ‹Öbï¢ÕµÃPöÃGXU$€d&Cò"Åì¬G\ÍÃnƒë v¾Ö°Ô9 ,(…ƒã騕e:‰¡  §À,8Jb¯Ze¨Ž¥êJ{èÚq0áRLÀˆ{ó>ºÅÈæêóbç šH.Vœ"“Ì£r/(§à»A}a2ybÇ¥ãnŸAãdkåbPLÖ–ëkjW…6¥Îù»GÆÑÆ:¸øô'A¨zp|?&xN¯åX2ó9ì_Ç2’k”ë×5ÔøçÜÎ[Ýôã…µ ªØ[=[LY€JeäZxÍ ™²‘&¯£Þ ¼zŸ¼B[­©Õ×`«ûá;·[ËvhÙP[_½Xb—Q©Ñ¨”*õ–UV•û*ümÙe‚B¶^¯Ó錘±4@åÔZ(æOr_až3oØä——¦æxój i«Î¡øÒh :©HÍÂËñmj’PÌŸåY#XNòØþÈqöûŒs_)ߘ>‚“1g\Óeot>dyÉÞ›7φ¥oRMz½W(aKj A}úÝ™Olì`ü%˜u5¡9ø ²½^·š~h&Ú¤ÆU Ëgó{N~þç>-’&þ¾UÞµ¸ûs7/1ÂïSðSo}K  Àcݹ¶ÿøÇ¹½ã7§ï‚ ò"8“åxƒz•Lk| ?°'rÃɶ‘N½[)¶¬à•²e¥bY1x ¨.ªwÊÞO‘_-:>º¥ÆQk­qž è°_ œ1¼¢áoq”˜Jy#Ð',ˆy£Þí ™©Œþ"D!ÅËÇÓ:¬=±½z>ö ÝÞûaö¥dæÝösÝX¯[K£&r›X³/57"" ù–f:ÚŸ\ E,f'O/ 冸á†ú@¤‘Ìg3ïë Fý°ž.#]©á¸çá•ó :Ä:÷Ö!a'æÖþïþòÃ'k:_k¡ç‡¦–7g†Šö‚}Ô—Ž\ûöÊäg*Ù‰¾‡=Éå¢aé08Ty¾TÅÖ`ŠÓ)6í^Ó8P£žJCÌ'ßKûFHûµµ¿ö¤ôdYp†Mb)jêʯ<óÜ1HŠØF—ÑmtQÒ­µ©•š HµYí¬ð„½N¶Ýåp[ݽ³“û¿õnÇO?woÇÛ=ÛÑÚ¬ÇÛq‹b“ˆ©%šqi’B‹GÆ®³”ûŸjTòÙ:FQ±€¿ä,ÿ@çù³÷騌œÿÇ[…s†©)úÿ0j®ÜM8Ñ5‰UŒX¬G-×"­­m§,ûNÚ a L™™š­R,} œ£ƒ"iŒÍYÖݱ¬ÔXeÛÁ1öƒSúÖm‘lËLk, wôß\'¬uHôXµæÆÿ8‘"FϺƒ¾Oÿ®p°ÚUâ•xJƒ9ÁÜ@Ž]é”%é$+#1£ JEÞÒ`™Ÿ×ÆÛ^ضõH fm°RøÖަ€Ï³×ÑŽó13—CVxåeykò t^!·0GEù½A&¹ç$£+Ò#RmäÞný0žDìûèQ8 &ÞùágŒZർE^鑇ÜwZ;“F÷Èg–b„˜±èø%[ÞnÜvPðK¼£ ¦]m¿†âìw=/ê¦å–ÿÊ|Ì\¾u'ÝEà¥&L{âŸN•3c?É̯ÿÛÁòG¿Ì]_|uÞ1g-)É­y†¶äŸ è?¿/däXÚh($A»¡…ßZزÁ• æ‚åY‹Š¯­ Íé`~¦0ƒb~Ý}Ý'AKa×Ë8»Æ ó“瀕ëÉ:‡A‹Î¢Å¶2 3»™Qm±4[‡á˜ƒpì8Úâ²8Í.ì¼l|O‘½¨±÷!z?b» R,j«ÚiÀò\ ,A›ïÌ8œ…å»ý;®ÆZ»…C¶CRËf 1¨ %/¥dqñ’âÅ PFŠ2_Q Ê/kUì·eÂ[àÀMp€Þ£Ž©]ÓIƬâ4À¥¨›…ì()ö4Šb]YÎ8ъiŽ2kŠ¡\ëÑ•#ÞI‘ì‘ï–ì6Zc'>”…tÔ½VX}ž@Å¿JpwŧÎv)Y:ƒAô@VWâ‘ý‚7£×z  9( @Ôj©w\kÓÚÙ&F¹¹Ö_^“`ÒaøˆýÌ[€ ÂÓp°Ö¢µi€_ã?Ô¦O„ÓAì¬n¹”ðà÷ß-!üŸ‘_hÁr N Ãí2…Q4Rü“ÆòyÖít”$Ñó²DÄú‚¦€;8rºù$hE]U(ðkŠêü•!3m'¯Ìˆ,œúBîôml…Ј]Õƒl4EŠ’îÀ¾B2ó L«a™µ& Nsi¹‹Ö¯6è : §t6Í×°#àew{€T‡ôô 6óvž (/»¾¸¡¢ÒæuÑ¿«†EÿS[0Qþãõ Ü(ÓÉú*“Ò¦2—ØJÌbUÊ…È•ˆ¤aŠc¡ìÀmô¸qäíOÝ”‡~m@/‘)@d¶2·Ê¦·LêX±S 8ÿtœX aÍéÞs èÂv+ÒGÕÈž »Iy &ÿÌÂ|\x¯–iû#l¦¿^Îe3éõÝœ»… ®+%`%íü:nÅ’”¼k -+Ñ!9Æ:QܹiÍY'p´½.žˆt6\±¼n€/1ÿ ÅìØ™½,¸r³S«‹¹Éć£Y1.ê ‚¶ƒ–Ö=þ–@K[ÝN¬Š}èégé5$(0qA…+Al†záòóš^›‡Óî`Œè‚w>~‡Å^pжÓO=¬!7gi6—”˜@,ý¿ãäc) 2·ÅŽÑn.½C^VR¬’³×s—©VƾîI¸wü·Múki wÛLD5Ç~בp7a´¹¨/÷Lx}\Æ”Åsèųç§/š£Tê4ØùÉ0TÃpF,S¯µ­Â·Ídpø›A¶•W.¦„C7/~yÀ &>ÿ1J|~ÌôO€Cà-–‘îZ8¥±i»/h-ÒÚ½   ‹E……µ¢ú†šêÖi‘èµÑhzý©m…óà#Bìó`ì‹ú/ZÌ™+¦M¨ËM^BÆ·0΄†Ìg‹ÉÙÔ/|õŽ ÜÀct÷.£·ßmÓ4ã~Âg]Y9(.çñ½€ªgA9ˆGrcùŒ%Ó7N~kì6”ž òjl3£ÞTåTuÌk›àÈ‹¼µ¾_Í—0%ÅéqØ@UN¾2ô#ÁB°Ð®oümºa`tÀÐ^Áaì,á|ø9Ë@N/œ9L¤ä¤ýÙŸæýP_AÆ%د3ëÄ´}´Ì)wIí8{žÙqf×Cï¼òΘÇfŒ˜zbåÙm@+YÌ»V£Õ:\=|âD›Õl³Úp¥MEÕÒüïVƒ7©W¦®zmÆû?“㈈a¹ÈKÛO\|'kJ9èLz ‹Ö­ßÀÅ‚ Ó%(ô—¹V´¤7ªßþmÙp[ úT}Òž»é÷àæÏ±Ñ‰ÆÃ«,L0ŽZòf~NvÑz 2Eaª!äk •Ôå KùzZIN¹Æ¹ §½×ÂTøˬñ“çNâ²™”…úü‚¡ˆx1`ÿ·Oî<]Á6B&k+©›Éyþ%@=©‚)×.Y?º¸ƒ®pVY«õÀF.¡%óè´Ø0Üù9&nÉÑéÿÎèí=Œ~¥›ÑQgŸù@ì]üˆ}ïÛ+˜]ñ=·T`Ålü¿)æ•VÌÕÑq° ž‡F'ôªôÀ¢ºX!éÑ_X@aR…~oé† :Z Ԙⴽ¥¢¾½?vgÕA,»¤gWcXÜΈr8A¤Úçð•Óÿ0>¥±R¾;»éUl~Ê´b‘L†¡A«Ð f}æœFËà3¨ŸMo×b±[ìVO_î¥ÿ åÂW|À‰-p ÛíîØa#³Ý®µjpFƒÑ .i©½®à%H IÝÆ ™ùÑÿn ´ÿá䃌 ~!ÁYï§×]¾ØÙ¯¦D%•!¶¶…ÕÅõM¾@Ýáåµ›²ó¤%…l^«‰#”UÈíßÎ7QrÆh4`LÖ½,½È‚Â4ý€d~tïòÉÂu*yˆ-T—ʈÝâj õ»_‚gÊ€¾ñÿuÓendstream endobj 72 0 obj << /Filter /FlateDecode /Length 304 >> stream xœ]’±nƒ@D{¾‚?àÀÜ­-Yר‹DQ’ÀÇbQø@ùûÌŽí)æ¤Ìjæ–êp:žò¸–ÕÇ2¥/]ËaÌý¢·é¾$-ÏzsQ7e?¦õI<Óµ›‹êðÖÍß?³–ø@‡¿wW­>k×òQý0¥©×ÛÜ%]º|Ñbï\ÜC,4÷ÿ^5›‡ã<> stream xœmUyTSgÿä=ÔJ%ç N4qmíxÜjÝFpWFDÙQâ†[B ˜ ò…% ²¯-¶* h jE´.⊊€ʘªS±ãv“óáé¼h{æœ9sÞ÷ÏûÞ;÷Þßïþ~÷RÈÙ Qå´4(hê”)S/£mb'Ûp“{‘].Àƒh<ÈÙ<ÜÅÕ Îõõ§È‰¢Ä^´sû®ø¸ùÈ;£cä;FEíØ32(F¾Eö?_Bî;v.ÚµX¾'.>*qãŠ™à– àØÐíù!4 q(£Š¢0´MBK‘7šŠ¦!ôšŽV ™h%òàËE Z…¾§FPû)«ÓP'­Ó%z ­§:û8ŸL¤º…K„±Ì`fãÍœf'üF­˜ÿæäl@• 6I«[ÏíE]°¦Ë]ô &Ú>å:õ‡2¢û](¼œ÷Özµv㲃 «$$» Ò\j,Ã¥ì]¥• ˆž-ÇëbcÖ±ƒ£^?Biíð²•:|ê»hPÙ6pdHò‚0qa ÝC>©¤@®…÷ž™Á™-`Àe:¸±„Œ$˜3˜° ›Ùçþl=;{Ú\Ùçcg†uü¼_Âךr%´ÛÞµS׬´­û —·Wž*KŠJ߈Y2y6Ì)ˆ»Á ¦oL–Ÿ‘šuf%½°×Ãÿë•ù¾ü?aÄø’•d Ÿ0Ö^9w ºEZ”kÊÎÌĦa¸BWa¨`?²Bæ'ÀòVøÇUª· ´Ý4Lµäšæ¿wë S¿ùÇø³˜—ë zÛ·Æ“¾ñÑ‘RH'~œfÞªÙÓ1»É¯ºA œgÈÇùìÉKuMÏN§Å:ƒÖ§c Â7'ØÝ.n¿ôÙFÿÝ]ñ‹}9÷ÞBQMY¹¡L þàÓÁ¼ÉXH¢¥¢¥Ò ~ïÆ‰jlÝ0й/*Ï„üz^ö\VÁ‘^d,ñæ3P!Í WP½4L„~îÉâþÐNˆ`ð½ƒ×ûjKÍåY嘽Áøàö;Ú§òBiWàU Us|;èï_qz…fVâ]o”yÅ¥¥˜½k=NíkšJ¤aE_¤ãlñòjêSj$ÅÚ´bmÞ5ðʵVavÞ´Øñ^7£®¥HOªïiðr6"\°¾´:ÈQàC-Z@¤ ^ݦßÚnrD4Íÿ)a‚óçvœm*üæ”doËv…ö)õ‚m¬‚êqh—î±p¤‘Á)†¼oÍÚ\!/—W««qn«8}œ ï´ù{‘#ŒWÔúéQ;KÎH B=Ú…Ò.ÃÛêÄmØpŸî…7\‘¥à@‘¥¶ô˜ù8f[«¼W;Bª¤Ÿ‘Åqù6®¸uñzþÑFé¦N¨fðƒÈ§„2³©Â0¼.mmœ&]¥Ç©X‘“hT° &S9&À/~uÔªŠS’̦®âúœC9‡³av°-ª2ïŸÂMTaâ‹/H,I*Nz2Æ£HY³§á´­Û’•±²ØM±²$…21YÁŠ"{ðøÁÄOð@H"@Ë!V0[HÄd‘ÀÌÝF©)ÄÁì‹vª®‹¶M|Ç)Ó”*eÚ¶äífÌîN³D÷á{loÃÓ6œÐÈë$½YkI7–å–ãr¶L¶ÉðYä2H¢a2‰ÐJœÀÿ„› N}gʱdâ¸LWn(ç5ComãUf÷QP/»l…ÝôË îbŒv笸Üxsü°ŸÈ_®’ü1%›’s’Ù;BXkû®²CðÞ¹Ûî3†IÁF^­ÚîáC<ÿJ¼ˆ§.IdP²BC©ºDWâs—ÁðTöÐ[ŒæÌþ±¼ÎÃ6ªömÿŽ[»AˆYÏ7_JáŠßH½·¦9öHdq$Æa»7EG†'úb_VljËBƒnÆgо5ÿSKNƒ'WØÚØÞoáË2G¶©{^âZ ±•úµ“þ•ïÖÿ»ý\§§"ø·ðƒÏóØ«NvJfTª«4ìE¦<ؘ Ñ =ñJåóûq”U&@Ã%8qÁè0?Üíq½øh~+4€/ι¯z1Ø\FÔnj2ä:ˆE/’ö’Ä£ÈKNÀdæ¾uõÄÑAqÒMûbpŒƒ'm#ö­V(¸@=ïƒîúù‡¸}p Æ1°€œ(Ž<’pèzTÖw ?fa4€sùóÜ}Æ}¼wX2†Üä „Ìü\_ÿe¶*“Ÿþ802*$Q–£©Ð¿î8Hylu»kÝé.ºé0`¿?ï2Fôh6¤r_Z­›N؆nÚ^^£l³ºéK0”K—§íæ©PvìªWÕ§×ÇÏ00M•0ÂhÆlÆÏVvÏ<4ï["Tï|m0ø+ÖÂXn\;v9›-žÃÍ{·˜u&­YŸUuà ®Â¦ô,]¦2×#â›Õy¡˜@Ä¡dܤ»Kî‘Þ‡¡zLD¬†Ñ.õ óÖ±*áì]ôƒ&GcVg;8;´Ñ¡aú*§­t} çùïi«ÝòÛ™?€Úeü é¤{웸÷²N»Ì­(U$|¿›(²çÙ”˜«4'þÈ’˜$Ú¨ÏÔæØv¡¨"+Ç-®ÀÕ¯ÊϵÁÌëðUÌ0—ZJM¥¬·pÎTëÕdœGDÄëˆT›¤IÊP.…(OX?ÖêyR¬Óšÿag_ÈQ¸ñs/ÜqÜyL²¹r·ö‡ò;›iËýøêQ™÷ï;;çÿßgp]F]r +j:•VºF€wn ÙݼãÈ"Þ0˶Ë#ØÁ’tÞ¼A.ýª&fvendstream endobj 74 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1073 >> stream xœm“mL[UÇÏmáÞƒb½Ö&ï-¸™E³¸dY² £Ž×B&¬Š,ãEGAÞÒñ¾.mG{Úòb¾leÐ"6›x_¶,Ö1uˆ‰¸Ìg÷á´¹&xYÕã‡çÃ9Ožóÿýžä A‚HR«J³2322¶©Q…(š"F¦X8V’ˆ’Å(9a8%Iñ3OáÄ'#;€ˆ /f7ißoo«iQ4¯iiT–6i«•YM ÇÿÓÀÖ¶êšúm e dÈ hA’(ÀÏ‹¢t‘W׋nd‹°TS ª˜µê¤8–Ó«8;Z);d8Ô€2ÑžõÚ;=¡Ó7ô(–¿}²êÍcþp;ctÛ½-šÈn“¡‹åŸ Þª3Ãr¹¦òȆ¤Ã§T8‚Ÿ''mÝÂïºw¤èˆ¦ó9½ ôi÷¨Í¥@øü¤©×Ã4˜:µ¨ Õ|Ôœ÷ûæ…¾)],‚½äÄïÖõðÉPÂ<&=œÀŸboÅendstream endobj 75 0 obj << /Filter /FlateDecode /Length 313 >> stream xœ]’±nÃ0 Dw…þÀŠ‹ pI— -ж?àÈtá!²á8Cÿ¾ÇKÒ¡Ãx6EݪÇ—c×P¿/Sþ´5 cé»L×%[8Ù÷XªMú1¯wbÍçn®êÃk7ýÌÐ`Ãߺ³ÕÏ;~ÙÜÎä©·ËÜe[ºòmÕ>F݃VVú¿šövâ4Ü[·OJň ì”vަÐ¥€˜¾oq+FT`£°qÄÔ–“[ŸÜ&¥€ ˜6Jň Dcbsòæ´U ¸ul•¶Ž“8*qÔN)àÎ1+ÌŽ½RÀÞi%O”&1QòD‚4ÂDâ‰i„‰Ä MŠ›š7)0(4)nR`PhRܤˆR@ábðù²» ùº,VV¾nÜ7=û{4ó4û©U¿0¡1endstream endobj 76 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 4295 >> stream xœ]X tTåÙž$8¹*¥ÊtŽcç*âJ[A\ŠÖÚ*Æ D@b ! ìÉl™-™åÎrï{g¹÷Î>“™Ì$“eCˆ,‘„EùäW¡åW«H[{úŸÚ/éõÔÿÐnçž3çLΜ™ï{ÞçyŸçIdN¡¤  àÆue•/,Y¼xqþÍ‚™Ò™;ŠºÄšY4»á˜[sçðwÌÿÃ|Tv+’}ÿO·H n»÷ÙöÖº¡ëÎUíõ ]mw¾ ®mÙQ÷•H$%míÏvt©Ôí ]m]}yÃêmkw¬kYß*‘¬–THJÖHÖI*%/IÖK6HI6Jž‘,—<'yHR&Y,y^²D²T²Bòˆd•äqI¹äv|`I±äxÁÃï~P´µèƒ9Ì å7|)uo&î"‚7n Ý|ÇÍ£s·|¯dÞæú>}Ë·\½õ›o öHæ}Sø°\bMÏü(]0S}ª  oäÖQË‘uðíYϸh8 ;ç Œõ¢»áe8½ÇLØ‹¡ÉÒdo®W*¨ÎÞ&P=žÄ(7æ”ÇPgß…SŸ_fK|ÒW…me-O«_ m–.è ª^mÛûþØÇÓq%ëóøÁOxŠyÊg×;w]ÊÞÆÎµ›èvG£!oÒNx3qº„Ïøa8»y¬²rEÛËÊyßüýëoÏŽ Ï£ðù"ô3årñ‡¦…âBeÄÏ^{ä/ÊÝþ ÝHNü¦úã{”Ź»j´6ê~Q·h‘‘X¥;6I†Š=£ü??9=y ¿o(>„W:*¯s1np“ªwj%ÚÜìsèÁÏŠfëf äê“eCp/Ü·¶¬ì‘GÊÅïƒx+!Þò¾¸=‰¨èˆ8¢ ïbýü% ïðR h¶¶=øÀƒâ ¬üü+tï»h5R\!'²“ýG€øjÏž0‚6’Œ“¦ÀAXZPrÒ€‡ z„iÔ¬ð†}O˜ðûhÞn¨¯èìü±rÔªE+ó ÌŸ)?{›ì ÚtXÎJƒÛÇÕ¯ªþ÷ló§K·ëT¤ÍNYm”s¢a¤~ ú{lõŒÜ…î’l2âU'ç÷ÑÇk u¦­¶r‰ø7·Émp˜J(ƒž60„]j )8¼Ž!BÒ4¤=iÈÂdi".•uûüŒ¯4Èó¡¨ô$-5€˜ðÁÃçЭ _ÜŸðÆ äB}r÷û[“¹B¿¦¶Ú¨WuwQ­y;G²A_ÐCw+„ÝÁLn<U$⃉A öG6m£€b(<£‚¬_ÒÎ<ƒé¼zæ¹Æiê aÓ£©¡ì/†›×7un®WÚ÷µŒÔÂ3 {ÀV‹9îÌsœ§¹‹ù”á‰Ø…½à|j7çPpvÖVÐ:ñS&Ž(h»Ë60…­1fü!{ñ3¯_<#‚´Ò®¨…Àg‰¼ý=|¢ž•gû‡R¦H÷CKë³:xa×G£$÷÷A!;÷©v×Å7÷•Yà¬ÿÈþñ£° vMZo5a ÷&†ãCÙSk²/?ñóÕ‹ ´ò<Ü©|æ¹ú5Û^Žî³ä9ðâÂüo*rh£®m˜Y(w¶ÙÚ¨ö­â2…CëÐÒZÂY¼y¸úÄ¥×?™Ê(½>M çð8 zÅKP;“I’/f“¾>_ßUxƒˆñž4ŠˆÏ…C”¯‡¢ È¥Âxh<0Ø¥`y/<‘²æŽæN‡²Eümg­f(q‚ƒu±RÚcu™IW±Ç.0<`œ ôw8=tªhfÞ¤<Ò8¤Ú '¡ÿTôõü—úX?ø0Û¯Ÿ ¨Ëe´5êW4¡q Åv†rBŽ<€–óƒá0Pë ™:(†R>/öT5T®Å¿.­¬9ƒ³Cg3§ƒá!LOžhÞÖ¯0¨‚]°j³¬§´œ,%¸xH3Ê'Â{²o"Á™Mͦ&K©YݾV¿<¸åÌ‘±±á<îï|öÝã;¼„÷–Vé@ Ý1›ÐêæÔøÜ§Æ¡U?^/Þö¯˜ËϼC iK^Õíy…#(é/ îG=ñœìG8Ådãœ221|êq¾¡ÙÚF:ŒƒÃ¨-S8ÔÎNè"Ö ¯úR¢›ƒ“¨|Íó¥}iÈYêµ&|ºÂ¹K´hÕŒyȧ›/›Bóß•û/f8±shxgÔ6ÐZݽuû¯NÔžþŸßœx7)$ý) ͬQ{3(‚—†.BúŠeý<ž§£T6åÖjM©¬Í:ßÚÒpÑîÂQ,›²±F(Ý-‚4kŠÆÏâ¯% í£&ñ†Å5V—•¶BÔeùáG~•î]¿n>Zü6úÉ)¼³fn–{¤Ñš7´ûIáU¤ü<:•8 lj¬}¼Y­Ò¨%¢–@YF¶ÃÞNujŸjç>x!máÍc°‡ð³cÁÝ«“h³"f‰;(­šRönÒ/ïxÚ¹¼ÄZ ª¦²ÍNbm÷¾}rß%¹$ƒ~BvqLu ê©ÆG+MJ¼ù(¸ù;õ)#{3Óy¢/`Õ·˜[®óáy('žžª¾´w<;žøkÊ ÉÙ"ôç·äñšþŽpÒŸ…&YŸ7/;Áî¡L´ÕF+-[ ?Û „žJÅÈ@1¤¹~ÿ[Èuç ±!"i Õ=z[©Éi‡ka,çšš55¦ð2¬ê_q®3è28Çv¥÷ÇQBÎX\·u»x§ÂÝC½”Álì5›L ƒ¡C×DƒþÀÒ³'°'0{3~jßë‰d&™e±çÈ”·ÿ ô3º¼»Í€õï)¦yЧÀ¦‹¥fëºÖÕ€™]É›§àä„aÿhòáKç$‚)>#0b·ÿâèˆÓ]CnfÙŒ\îäܼ‹ÓvFÚ`=Tmom­Xݰ‡Õÿyóë ÿ†2ïðÙë£âݰB©§°±²\–D·Ï\we®„-(¯Ýá Á¥lu·Ñ­t/ÓÍB7aó9Ãʨ4‘eŒ¤»ìnm¯Q8µ”Ö­%Œ«ÿñ] {}¯VÕŽÑ5ÛµÝfÕ®7IcKýú5t믽׀­hÎçˆßÝ+IkÓÚY2Çb9Ö wgKå½›-øYüu¸ʱåI›‰f¼6^¬c8 ¥Ï d aìœø­¬Ÿ ‡˜piPðG‚6Ÿ…ì‘j½Ú€;#¡Q~øSô'žíxpO²*VÕW5%.<*Þó†xßâÑDi,èÐI_ÉfüŠüš÷'¢¯íŒì÷ç¥Îñ!2¿m^K8¤ Ðàn¨=*–èµ¹Ìí—vB FÊà6/úZ¦ -.+ÆÂOc µe[ÑÜÍèæt“ugÉkV!«ˆ¸‚±TVãp2ÎRqžÔfu›{ƒîDZà'ÐGÇ]Ä·¤Ë ö4z"Ãà =‡œ{n“]FÎ'»;Ì÷‹ó@¼‹xzì~TøÁ•‰3J¯—õ—à¨ Íæ°P.åæ—×Ö=ÄÝÉ#“ÁÓ}>Ž/ù{tOÝHʾœÞ”Û¨”]v9W©x·X!—}ùAâÄÕË¥­-w‹‹˜çóú»A¢šýi6„>Ÿ©ý4¿”&f×ËEJ|R´‚Xwï t[S•t÷ÁçpåЙ·þtõ¿Ðñþ@7­D ÅeJÙAgþÆh‰HËÍâ}•âã"ñÐööZU5!6¢>BʳhU¾IÊ.rɤ7Uú—½bá“V°2Ö¼KÄšô̯sˆÃöÒ>ó¬ü )E;í`',<†ïIiìì.­V,Rt߯¿Oõ€­Þòì0Y°3þþ8 )cX߬oë¬Ý«š¸2†—’ÊD$ÐMƒˆðÄè‘~ ÞþÎÝ#| L2Akâ1iðˆp$tX÷Ÿ;1ˆÙƒVµS«Çžc†ú‡†&†jÅBxÿ½ÝJÆæ¶ƒ-¿×‡Ä$Gç²èt®`V~¥½‹ÎËÑSR4'„J/ú\}rrGˆH-6WEpÈAi*C› 6•³›oùÚé2ººõT‰É©#àŒÆ:è¡ì,G–âÜ\þ<ž޵‹¥&’×E^ #µÁ( oИBM OÀ+°—•§.Ä/ô]ˆžPøãl"ë#>—N~ä||u×SuÛÈúÆÖ—®Å‘KË%Ú4äÐTÍ͟ѱòâî«·ÉþŠÎ¢Oä¨@z*WW…Í×m ­ 7ÈþæP9ºT·Wï\sõøk“Gc¸Õ`Bú€£ý”‹GÔ‚h¡ÇÓ¤ì¯Ó‘©éÛÅÌ‘†ƒÕ±/r¨xÐøâþÃK}„8þO$†¥©m4Ø´.C »½Å«ù%”X¤½6go¯@ÈÃÞCp†aÄ=Œ¯ñÆB6ÔK5lœó¬Ÿ<ŒÖq€o°„KËcçSçç£Ó šâ4sTzü²eÙKš•Û·“uu=×®ž§ÚõõÛ”÷cÙ'¨Ëî[ÓÏM¯ç"lƒ.!çÆMð²2°v«­í65©ß¤¯Òoêm24Áv¢®¿î˜’—žåv}›¼Vñ|×+µÇ[ø§JdƒÙÀÙ7™<Ò7Ê%vÂnâ5õD¥òj’«ÝÝ:\'õQSrp =¨”KšcÝJÙ'4͸K¯WVíèì’átßaœ2>6à¥îø3Z€n>¡™^“%{8“×ì[™T¼m臓8 |ú›O>¹¸¯â LOÆ®t·2.3ªä“hâ‚7t•+<×=×SÌ9|ΞÖW´ÝJÝÑ•ñjÀÅnx‹X¼1µqª“ŒÙãî¸óm½â¤!§Æið'Ëî_ºeS|¬]©ËR;qrÏú!Ãð”‡ÂJdl´mƒØ À]ÃÃåwù’{N„$ßôàÊ2û’®à÷o¡‰Ù•ò5Ž_øpQ¶{AeDšf zk—SG>.^éÙ )‡ª¶v¸æx>•ô÷sý _˜K@(Ÿ‘y —ÌîkÙÚ¨gäžÞhfâPh‰nžéôp8Í %˜ ¸Ø×í´³•×&Þ?³ìr:!&Ø…x…cÊ9íœr¾¡`´väu/YLŠé…Ë9²{ìÊfº‰i¢Q*›Âí B͉uãÐTjuØ-mÇ`ÉZYk Þ«‹7…ÅCù×¹ ÖB«¢îk¥‰ ÑÓQ\E‹® ÿ-œÜ}4fûhHáaȦcæd ‹´ Úè6 dýß:R^4Ç3°™­¦Í¬Y">°T|¿âË- |÷ï8úJÎHé©Ú±õ@¸¤•°¢¥³Ê‘¤ûÌ1SBÔÀs }Ô²‘q¹©kmÐøüœG=þìu,h/ã£qäÿÙuebîÛ6H£Ö؈p€WC]ÿ}ô·¿…’=BcGmO½u+Y%®¿þ¹Ö±á哎ú=#üÀAT¦ð§¼i!™9qìà €¡Øå¶C;a-¶®è^­]em3·B a/öÕçZ'à LœO¼åñûð ÁéµkYµgÚºÎò* hiëÄ©ó’þ$×—:–šŽç9Üʼn35ãk–oÛ RÎSÞTzzîÉÿá¾£endstream endobj 77 0 obj << /Filter /FlateDecode /Length 616 >> stream xœ]”=nÛ@D{‚7¥ý~,ÀØÆn\$’\€"—† S-¹}fFvŠC`$xCìþéåùe¹Üºýí:þj·n¾,ÓÖÞ¯ÛØºs{½,»Ã±›.ãí³é:¾ ënÿômXÿY[‡Ú|ï߇·¶ÿy:è—Ãý™ñ:µ÷uÛ6,¯m÷Ø÷õqžë®-ӥߟ8ÏŸ·Ué{\Q­*¨ÆêUAuÖ‡ª >°žª‚zb«‚:²NUAX[UPë\T€>3}+ê¡*¨VAB–RÔ Þ"æBæÞ"æBæUA Ö¬ j²Â¦È¨Ð¨À¦È¨Ð¨ UAXÏUA=³ÂµÈ·Ð·@®H°PЀoR0*xMÌFf¯‰ÙÈlà51™ ¼&f#³×Äld6ðš˜Ì^³‘Ùh‚4BMFHà ¦QŒ£xMÌ&f bÅ8Šc×(ÎQ ¸Vp®à°q96.#§‘ÃÆeä4rظŒœF—‘ÓÈaã2r9l\FN#‡ËÈiäÀw)8¼.f'sà•‡^{ðµxCÌAæoˆ9Èà 1™¼!æ s€7Ädð†˜ƒÌÞs9Àb2¾šÐ—ürø!… B`Ð(ÁQ6!£ Q`Ð(ÁQr!Á `bÔ(ÉQ®)ߤoÂ5å›ôM¸¦|“¾ ×”oÒ7ášòMú&\S¾Iß„kÊ7é›pMù&}®)ߤoÂ5å›ômpmòÅ•ÇÐ×yÉGÛ×IÖÛÖ–›Î?o<×.KûwD®×•OuÈî/ZDçendstream endobj 78 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 8837 >> stream xœµzxÕÚî„™AP0ã6è "ˆbA) R¤C@𔀴@ÚNvÍî-;Y»÷^’½CBGÊ¡JÝA@"E¥(èñX°ËÚüÃyî¿fCþsÏýÏóß{ö ™5³Ö÷½ß÷¾ï7ÉÂ:w²²²ºŽŸ=û…Áƒsÿé“.è”~,ÛÀŠÓ›nNÉݲA·ÎŽÇžû=^{¾Õý×X§¬¬‚gÇ–•” ªVð{O-{w¿´÷lIù  úÎù †a“ß(W6lù›ãøã+'TM¼%M/›"Y>Uúî´ÓWÎXµzÖšÂâÙkç¬{»dî3 Ÿ_4`à’Aƒo¼ðâK/yåÕ¡}† ïûZ¿£žÆ°>Øtl6ŽõÅfbý°YØSX!Ö›=ÍÁžÁÞÆžÅæbó°1ØóØ|l,6[€½‰ Äbã°AØxl06{›ˆ½…½„MÂ^Æ&cC°)Ø+ØTìUl6{=ˆÂºc¯c=°‡±v,£°G0ö(–‡åc=±l¬V€å`a8ö8Æ`]°)(”íËÝip'wö’ì;WtþûPÿ‡šºçu?ÖcðÃ=¬Ì},·G}÷ȺG>åýÇ£Š¼Iyóä_ï9¾×½6öú½ ò˜úñyï¢)z>½“þ•y‚™Álë=¸÷O¼ýĹ>â>·ž¬ëûP_e¿Gûí|jÊSîþŠþ_?}þ™7ŸiyöÁÿÕ©ÛçØC™¯¦¶"!žJ?!Ê¥¶|ÑÆÓ­I HL){• æN•ÛuŽ‚z¯/ÂP["r¯@§­zºá×ø2Þð¤èL:ÐS WŠª\ìUÔÓÔ‡Ãd/¸'¼ÞHDæhÑ SÖGj‚ïòß ¿|«Þm¸ü®,ÁWH_±’°–´‚P#Ñ‹k%$Ì‚ÃxïÁ™õ07nj׳¹ï±3sîl:S©Ù©\4®ÎnÏ£ÎÃáiÏCD5 úe\"S‰„A Úð©ýlj.Ní)Ra@gàP¼>èŒGå13¾†ºç앃¾w—ÐlõBB¨’UW…uQ,pÇB Ÿ]U†(ù’í¼p¸x5ݱ…eÂôÞ” …-‡·ñþgÿÀÎÎQàµB*ò«Ð-p¼Ñã$ÖjF†Ë€Ñ_ãZznå'ï^0Ú v`'ͰÛìfÛ·pz¾ÅiñøÃc()DKžÃ)›^oÒˆ«UB†Š‰üêíÃDZ}GÁ¾9.¼Ý›¡lQ™GLS1½¾N_ð,.ÔT–FkÂL€Ð??º"º2º"Pžo6šk@ YKÔê :£ŽÍbKòk4F™åª’ò¸1Àe{©Ý^g/ˆ=QôаÒ'¸s`C ~œÊ:z6´gCSz íªÛô&Õ„•ͺöìH/»ºÏÿ~'=D Ìò û Ù|VÂÖ:°’Þ¿òù'{Ǿé¤5Äëï<÷]ƒ¿n_xV{™ü¯Q…"”Õ&(jã•â|´=†Z¢Š Õäp"h]Ç©ÕXÆ,›¢2·€.…¯ã ѰÉ׸>ÕœjJy’ÎX><Bf[Œtµ·œ~§|†LdJÐZ‰GG‘}ž]®-ͩŵMÏÃå9><îñ…Êw;ÀM(Ààr5B¹Ž° Cµ™;»Ä)Ý:bóˆm#m*»(îœDJ;SYÛÛ³ÓƒÒñV7/Š,¤ gŸS`G¶`¿ÂŽ‚ÏéI'þýÂÎq¥§r…½¤ÒÀ¨ gR‡³:ÀAà;ò°?$Κ2ÍA[×ùô1@6[Ãa>CÄ4A!_±Vj¸“&¶"¥à™?ËÃúQõ*ž<'Ñ*4Iè¸Hª­Ô(p;p«•Ç¥QEøÆµ[%{ ‰¿þ‰?Þ€8ÛcñbIÉZÙ©<ÝóÞÈ ‹/õš¼ÀËOú‹<ùÁþã×v¿0Ájê ÷WLî•vB¹Ý}%½—wK^Aˆ%ZC‰Å>EŒ¦v;ì(»p N‰=èVpp;{Aj:œÌaØ•¸P¦‰|ª†Ú}ã–œ—–EÀܽl¿¾ƒ—³ ÛYÅ܇ómu¢Üíga=zâ•t÷÷ylq!!–«DÕy„oáI'Qz…Laú(Þ,sˆD2m-!¨ï7®Y[XÀv}¾û›u…͆Ýßß]¿y3ÃÆás ­ÐWÏ|rõôÞÉSô bÌ„©ÓFÎýðŠúžÃí)¸$•{£>úÑÎö…hndÎ]Lª5¨®¢ {?j˜Ž‚4‡ÍÎ0mà /q•·J"6”éM` §±(€yÐ×iŽ, ëÃá}=}u· Il—J$úêázšÍƒKpê4ÎÀKB†H½Ç› /ñšÐZúVœ/×2½îžÝ^û}BjN*·ŽC[uA>¼ÉÛL$Ô¾ š}Ë4B¡OBïøœM Á©w«”J‰Ô+‹1TÂf7Ù à|ôÍ@°.Xб52”)¬òòi–€þÕ>Ø >DS Ýâmñh(¹wV`ÅęŠßÝ,æ>¨úi‡½P¼ò¨ÉDk5Q…¢%ñ)PÎFà «(=Bf,†ø¿tµév(â^oœþž > ×ÈÕÔˆÒtìßqê3½uDf¨3ü(jëï¶¿ip<ìŸÚ–{.é<*™VÞ–²ÝjT×þhþæ±3ÁÄSæ(òƒf°û¼³Åì.“´€÷uŽå$ÕZᨫn-h²F"Ì%"ªò!$j”z¿Ä±ÃØÁ³ì+ðÒEЛ1“ûÖPƒœ Æ„‡Ij]R %%5 ó2D¼¤À«ˆ4Ø£aÝÑvà‹)¸;•µ¥ÖŸËNw§'DNY2±%i8ÎGÄ|ÞH½Ä!bØ,\ôˆºÉ*¥\"ñ(cÌ~Ü0«bÀhÉâ ೿|ýÛñÃL<Úh'Á{ã3¬ëµÔ|,Ô|žÎ4Ÿ*U¥PG¯mX[ H#Î>ù ;„Ìœe߀ý´Œß¶G7l ï²È™ì.ʂϷÓ<ŸËŒW*•bQ5¤¯Yîs´èS@^¸²ç*ã%%V‰@©¬¢uĢłU…¢{h¨€ÃˆpÜÒˆ*WÀ°£¿Ä›ªíbqµ®ôÞÒÛ‘‚‹90]o‡«”v^Ï@©”¸M'ÂÛt²³ƒN„÷ÐÉÎ;t"„«Yaÿü Òò&<Šº'CmŠÈ}Uèb„+}ª”MVºL4ÒNÔzãt»˜] 7q:#Ê-!ªE t ´he†[àG :8›?\GátÏîiœÇÖ Ô$•6.;¨Ùv€]žI’‹¼ªú3´#WíOÌ̃×ð¸>PLÂ¥‚Ú2Ž÷˜ƒlNí1«dÆÊwm)Ö1C¸”~q„F€$Tønq¦å©,ÄÊÙP”‰#›Cð‘Ô‘¹äÁZ¶áq·?Vz+QNËažpûB¿Ṵ́mHÉ¥¯²áÎÍÂÂt—Œt:”&¸ýcì9D¼,Ïå4ãÌõQ…‡ÏôÇ©˜Ž‹B•Œ«ÆCÂ&B'p?0É-ªðœüÆaC›_sV¸«šiÇà dòœ.³×êÙðó–ŸZ²¸M~? x$äB-*•yQNej½?ÎW¨D÷ô¿Ìž²Î íXJe¶Òfvt¾gz]á>°Ø<­0ß´M!Ò„ƒEÉ©ÞÅÎ∾ì å>Ü GçÛšë-Md\ñ¸Ô‰ ìÉhUŒ9†Ïc‚ !hô”Ö1,5«µåŠ"E‘zé\x0ÿóf¨HÀ-D -41æn÷9?:‘{¸JPÖÅpdº+/l‰šQ1?[5‚a»±JÙ~³üÂÞ¿¸ê·2f“Ùl¤·¿ÌgX¯ÞšÙà„ Q’»¤)o±È±™‘1p¾“zÆN®ÂߪŸX—Ö'"Ñ ": §²/ð^!¨Ý“ÞPÎ/š9tôëðv½ÁÖjÙþI»%à$¶ëÑ<*ñåЧš¸óˆ9|uW¸ù3‹]óñoàð Ö’£ËˆwNîTDvN´ãqŸ;«vW3\ jÂ:OÅ_òWœYqfå£MçDjÚD˜í»Åö3ÌÍ÷ía@ºð0b¡ÆJs53K‘n@·d8»ŠäÀHí+rÆ#j_@¢ðvʹMBÚè½Â¿#õßhã]‡ÝÿÊvÏ©ÎdW@.>‚àî4ЬbÎéè®5K˜Ì&…¤ڧ(=h“·Ëïò®ß¸÷“(·A;bz' Ã1k#Ò¦|.õË¢Vcüø˜þ£~Ê ÜQ¢¶¸Ô%¢¹’@b$r¨6´•2•°"n@¡Œ99 áD’Ô%ò(>x-ß© U#Š@’_#‘h”‚2a™ LR•¯ÓkT€\‹ äj!?j2¿q²ßd4äΊÜSõ§ì?Ÿ‚x*ë§$³3âÅE¬ ê£ —¯vÿZ N%6J­Bz.7ª”@FŠ}Ê~€ýé$!Öu\ǃXU‹¡ZV„#jc H8fNŸîúß (]ZÞF %Îd‡ 5òêj·¬£ÒXQ*Ýz»7ý€zYmÓXÕ-Ã[G´¼Ö8!ߪvÊ(µ £J2_<üM`q˜¹5ŠB•¥ò*ÁÂÅ»~Þ»^NÒ› ¶õ-ØCö -dóÔBœ¿‹D,-q‘MÊôAÝ-¸*%Ç.‡P³NÐ-øÓìA‡s/øÝѸÌ%dzsWê3WÊ3WúÕõ÷™²óÈetB ëŒÀH–6TÇ£aoÒF2Ô:cèxþØ4¥b1XBö¹ÂvŸý0yº…¶8,.à$Ãr—„VãB`êÕÅËekùâø‹×¾8ùñ÷±XmmG`Lmé궬ö#°áhv{&„9=òè.ù–3¥áˆÂ'@n U)]e[í[í-îz;4'ü¶C”ïØ×¶‚V`ßàˆ;¢ù‰ó âç­~›ß0ݪÊÿï\ÅØ÷úÂa•¯Š™—YmõIa¾`|Õá£Ú¨®S“µ¸1©Z¯MÈŠJJŽäWï’™K9¯Bt.ñ*âv‡ ØÿÔžzøÈ´Wâ]QÞ!À_}õå‘C†­Z½¼ ”“B§|ß¿3º#¦Ó(í´SЙ Ƈ&ØÊÞ|yùËV×”4fHÄØž}¯F”c ƒsDŒ\ÅÃ)°7þ»ûëàkpcô¥§$kä¨J$jâÌßY#a7â¸áÔbð&X¸vÉ"Q¹ª”uèV|Ž9rø|ç„]”ÛtLÄ)å-Çó¨_Uð³ôs¼:¼*¾Ì¹C_ò|™£$Pɘkl“¬Cõ ^  Ô#öUÔªå¢â¹óz= {-ùnï^{t½;¶w³õ}ÒIÀ‡Ùüh_zn0Õ˜ &Rƒôd2æ6»-n&¡Hª›j°®]9õ]R—PÖ3µf£ Xk‹#l±Š}²ˆ€ˆÏ'mÄÖeá5+ß©‰hÖpKÍ–:3°‡5íñxꀛ>€§À·Ç¤ž«Š*KTÅŠU5«ÁJ°Ò¾:PáY1¶èlû ä9‰ƒ;÷¢-ø^ã¶öy¤–˜»táÛ´_hyw«þÀŸxÈ4ž{ƧßßN jW¿ý¿”™ÿ;б‡yB¸sûœ=›Ø>Nc_BÙw¯I㺠r[¨I_Ï%jûÈnq^‹)…Åßâ{ôÛËP ,&(é,Ç ¯÷nÛs,)µK$RC ]'¯å#Kí«2îL2Ÿµ;ùÍëÖ“Ôžýñ ',ûH.Îú`\l­k(Fe7r&Ô§®g~d‹y°˜ƒ°) ‚•±qfY ´d #F¨®ÔðëøàÞúWÅD¹‰òÔq®&Åö ÉË]2®–Îe~¦–2Óð!ÙV‰ML³ÏàRaM¹  m@Jxå5ÕU@@ ÂeM-ñ†$M}—¬/¡©ßŒˆà þì<ÜlëÀ¸ãtõ 캙wpõÕq@þøÅÇ?Ÿ_øáØ3©ž ÀäÉS›œ<°jrˆv”+¶pÓÃR­½‘ «ý"¾¦Bd¤©OÖ/^™]0ðÙ·ú¿õ—YG+™­Ò]úíšzCCMLµYœX–“ã&/›:i`ó_›‘+_[+’ÐloBà–#1˜ŒZè{%TG@fr È™ªf*_ ›ÍbÚM4Š£ü*¡¤tÌ×µï!jú´í«ÿÄ™ $ÓC‰ +*Œa¸TdàK½r¸çþÍ!ÇÊÀAˆÒ“í0û tq0‡y·ä32É{TINC=ÎÌÀ|r®yÓqêJiØŽ{= ú3¢оÕ/5HÄÌ«DEP‰ëï÷°G–1د‘å6Ø)âB‚ÚÁ=&c|á<Òñ˜ÂËø¥‹°;HûÕ‰°šó·U‚°&JS;2¤z§ªÎ:gAÒç"žK ­Z€‹¬E,ÞµjWñn£Å`C”n"€ó›«á€Íf³"'Ñ‚ü§;»ÄºŒbbûàB™FwîÁÙ!ìs9ð:&'◤‹öÌÌa:.“+ªE~e‡!FCûB;Tµg_àtÉzˆëÄ@ žÙÏv‡<-éÁ?§®?çÛdÙ ¶€H]DHi%âA2Û!Ã28EA»žie/â  TÊ·‹fÍ3@ÑQå!CÔƒz^ü«þÕô!ÄH$r.¿àA‹ÓüdHåCåÔ;íÂ3‡ŠW ŒåHuœÉ/L¿ß–÷Ñmì "s|ó[؛؇7²ZDYÄH6#~u‡íIGž´'6ÀG6C^3äÙBö°9t§ Å‚nd=|NÁÄnë©ZÈÍ(‚ª˜ÃŽZýä­êœZ|ì?^·;Z×3ôpþ*&u‹iêPF© ÈœUTÇ÷êÑ6xúH!Wà˜SxærŸ<êd~žä­ÙD ê!´—HtU27GÖÔ‰6Õ1Ò£>I4‚Œü]ñ¸Ä%ºøŽ9/·mf»ÉÈí pÊd„3ž^U=RÝ1¿'úµiÔ GÔ_]¾„_"¡©à)?¾¢ WW È»¹8œyÓóœ•G݇bïžë˜ïêæj[c+¶®±–™J·‚lvm ¤¼)_ʲ…Aˆ4ã@j“X¤¾iÞii®5`ÕN°˜š¬ë­m­¶ÞÏÜW½Ÿ9ö»Z÷“±³ž¡¶Ä$\ØÏfà>"÷Ë N¦/f ¥¬ÆðÿŠôrOÝVepà•B»N_ž/}]8Ú0zjº*¿9cÉbr·™€S[2ÂU,áÒ~¶*¤ÝéS…7#¢,ØŠ Úéæ$ž¼V£ Žhê_Gf»6°ÔLöa-×(´ š¬ö)ƒÛÎ|¸kÝv©åGð#€½'ÁÇÙ.×åi@ Œ 9»;Q†ÎÍ¢LÜ*2oÈv y7Ÿ„/Q?÷>ЗØ¥4Õ” ü %ÆuÌ?–Cî«óžpN­Ît–pÔž`¨¦ˆÂ+ìhÜ]„0„öþ|*n²ò %‚ò ™T½¶”ƒj¿,XGžúèÚÇûÊÍ11+*ÊÖÇù­Æ;¸»ùŸw(ÏìP~ó!Þ?žd_"„Jn´²š7;Þã!‘ #Kus9até| ³ôd‡s³nð'êùh‹b¯²#ºpZÆ›«`¬ãÉZ9P¨¿Æi¸ #ï׸‰c§^ÏŒGôÀP§ïˆ]\FÇ[»gC'Ìå­Õ „ ˜\•,ßIo@ò´’›+—ÞyÔ€ö¬k|žÄ¨®F)ª !w/ãÉ€?R…Äè×Q­éÀÝ›OÞ q„GJAy—ubì9¶ð%vÞsì,y©¬D^¢W¨äˆþ¤>yˆ6ãq“=æ íùÛ®ow}Ýý>z>j÷¹} @R± Ü_zDmm]ퟔV”µ¯nCز ³Áö«.d'¶;É>ò;>O;qØÛßG˜àC…pûD@cR 8yìežŸg=v­oiØ H+þ›r#ÛŒcT¯¯˜*Z»®xYrÎCÁRØEqµ6‚µò~{uî'Ô”ÅB¢ñÿ»ÉZ Y¿F*|’ú­ ‘}32A·™þû Ó³KåKÊyþVR/l¥Öp 4 âÆÒÛWÔüIÓ7³" ‘[ !-âÝ6€à+UÜï,ðúšÚ˜p)‡£HDî©d¦ëñ˜Æ/¬4TI5´xUiñ2u¥®ÂÖX”ùv…Ïè¤ßjKZêïjAŽ4Óý>ÊN?ÕÆcոɧœ8|òèáSà+ö`‰ŸxÍ*d³Ê‚Ɔh¬! t!‘õ‰ §!”t4òÂÕ¥ƒŸzúÍq3™µëÊ×VÌ/Ê—ÉEPE–Åkƒ4¼"nÝÀ´1}¸L,Õ—Ñ÷¸ÑÜôÃíËÐ)¯ÁéFž‰Ø˜ØbyÏ+gÌœ·hž–Ye˜àE¤‘0‰}š8 ·áÔÙ ×W;jáØEo{[×1µHYÔÙ€ÃíÒL$Å~±’_¹$Q’Ò»Š>’ÛƒpïÙПö…''!m}m_릃Ç{Ýè¿wðX– ø+šÑp£ËèÒ;î}53*•~eãüQnžwŽâ±ØD‚{™˜™XÄ)q놺 I¼1aYÏP»‘3ÒoAì nCDjGìËŒÌ+ð<\+¡WâÅ@+®æ³‹Ùö|µT­A Ñk#Æ(Ý’âÁ øsû3q÷-YK Ie}ÑkQ1½ÿÁƒ³` aNn ðÖžj„åx¹R[ʰ Bb¬¯·‡|º¹a«w ¯î›8îÅ׿Ž^±ÜßÄg 67 D‚Ìív™G++Dâ’¾°ó‚Ï~‡>„Ý\´Ÿøuè·lç¢eÑÊ?ßÙmiƒÃÚrž{ÖÄéåðžã—a7Ø9þí5rN TLy}u¼!i:7¶eûJoövËÛÈ΂ÿøé Hþ>ô–œµH¼â'K!Cøñù­gh/Þ®Û;L$ÇÌœ0jôâ³ÇUtˆöZ[ýn=Âô±T¥d$‚ÎYˆs:9€³<–7’-ìRE˜%Ay˜{£5¡Ž‘AÂ7Õ[â8ä" Hx<'R€¯’FbÚ"°+ ªäšJf.ÁÁ"ÜëÙ»‡6§à¢Tî®öÂvC_ò¨¿ÃÉðsÞþè•ày’Å!9îï´ ÿѳڊÃõ7wNh$©ïODNí8× ÜxêÄ@+©Æ±gõ}ëØ’3†úûxv(ÏE||iïÙÓßå 5Äèá ß7iûÇr?ÜŽBræRë‘§Š§;i1qÄšÂYZOh;øå!ü ïÓ‘_Ž>ýóÇ£¯Fîê:üéf'ž°V"«"P0˜ô–…ùæeæeÖef¾~åv@6™¶‡'A}a=b>l=b²Nž;cL"Mz4s@Ì´–ÅùÔ)K‘¹ÈRdª4Ü»þ¼õ°ùˆõpæ¶´u¬ïÈ!¸¤r¶Ã†KcQþ-Ý&y†ÁÓÙn½U(¢Å¶€Ëîn&YòÅ¢ò‘—ù§áð¯àhØö\ §°cY¬ÿ¶KŸ‹!i ø1°#|`+©†ƒy ¦Žû5!*mÒXÔš^Æqc—Mäë3O~Â8kû‘3WÀG`ë ÿ,³ÞŒ¸õ.±¦ÇÜ%ÖôØI¬ì±ÎÿgÎE$eûvJÁ¬²¡,}‹7T?x2^¾4ãŠÌYë@™Ù¤‘dW+³ãú=ÜoRy|.Ä+ ÒXy¥PR6°mÖ_…´«ÖorRÓñ²\è’Ô‡|ž(½—øÊýÙGà{ÒMh®Nü`¨_gÖÕ¡Š…óˆ-†ýb×r²–Z­¶VWWSg@ñ¨ˆHšéõ&oÙÒ¯ÆìåךdFd861µ_X-W‰èÔ­_x¨ƒV¿¿œÂÍ*š«!§¥u<ömb¡kj38NœIìôw¥Vú4ÂÙ;ˆzÀ§¸šû®òôèûbM]ãBÚÚëþˆfdɦóáØ_áØ<ê|šAúDhÔ‹uÒWز—Ø’WÙRƒ\+7ÊH!VGh ^ovÄáë°âKXv–Û¼¿É‡âLíˆT‘ˆ>ŸÑ%T+ÛjâÍÔ Æ‚9䜆p`rÆÏ÷É÷+/M¦aâ–“7C+|Ì%ç$FžD?>îŒ]É÷ùŸN¡Ù“éÎm\Q/Ë!úa¾WçUû‰º}0.tÊ\̰ýÃ÷ ? w*Ü2Ç}ÏK?Oðj ™N¡–k‡Ï¶`ø|ä‹´@€„½,ðz^ú·3¿Ÿúã¤ÏéE(A¥Káì}ò‰Ó}N+J R…¬ZÓ4ÌÕùW\™ç×úÔyŸ£Óžîƒ2§¨­V¨´ì/¾ÅöIRJØÙ>Ù¾°Ç.ƒËàæö설¥›yâüL˜ à#òŽ#µ–}Ác÷:‘¼ )¼c-¨3ÒÑd{Fwë‚aÿ 1ZîKendstream endobj 79 0 obj << /Filter /FlateDecode /Length 190 >> stream xœ]Mƒ …÷œ‚¨m111lìÆE›¦íÃB$ˆ‹Þ¾Ã¨]tñ&ù†7]í½K¼xÄY¿ q뼉°ÌkÔÀgÕ‰§ÓNõ¤+º› ïOްßÕų©)Sm5z6°¥!*?kËR¶ÖJÞü=‰­`°»ór–¤²ÄˆØHb“ÑJ"vk™E6‹Z’kšutÍcóþǺ\¯1‚Ot$‘—w~ÿæ«8Š}†«_ôendstream endobj 80 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 857 >> stream xœ]RmL[U>··ë9›ˆŽXR3½·&$è>²ºaޤÉ>‚Ó¤Õmâ˜AaQX[¡·”ö^Ö¾¥´¥ŸÌ8»AM'&Ä茭ÉÜœ—MTtqþÐe.j4[ÔÓ›3ƒÅŸæMžäIžÏÇË!­q·ÚºÃl~Üd2­G ë4…‡xp¨§Ô}« „‡mDý­Œ–¯¥÷ÜGµ÷# ÇUl5é´?ÖzÔ¸çHKëÑ.£¹ÙÞmlîj1Z;›m¶ÿèÿ$¡UæÝÏY÷"TÖ#ªBO ]ÑÂ(þäú5Oi..sÎwQé2¿öWäȨ›2ÜâBìm^--ü£—â®Ô@ü`öù7-@X«)ÞÖHkÙ“´Q<©£„©á6ÐÇHûÂrP”~™HØÍ4[™†‰P Õï7.6_ê_‚¯a~ìÊô…nܺøÐ2ø‰‘qÆ•¾S@R#‘˜HC8â¹=²[V„΃Þ!ïâ~°t™‹C•Ÿáè–|N­×ÛpR³åx7‹[;äÃýýp 6z>ƒ%ðK@Ú;Â,žÍ†²É”BÃ>S‡iÕÒÃ}u»à¾Ã*Õ½úRMÇ®:Ëή*¨"ÌLËÙ³Ôþ×ï¹ë B~!ý=\º~3­dòñ¶H;xÁ^?a½wKôo` åst}úÄH<B7þÂ6°íLßÂv°-¯Š}ŠÓç„ÍPõÐ5ûyå‚÷sRº¬yåï•ÒMg¸Åyzsž§ërú€nvÿ-Ç%ø~>wšâÑñbÈqøáÀ7õÙéÖpûØþ„a×Éçä/H×—?½ aåáZÝ8»—¸ƒ‰X0¨‡}cCÒk/uKBÓ‡/¤wy”q;YYÅ· 7»ÅIeJ™”Ïx§‡Ó®sÇ3Á}Ä‹·54ÔT2ô1]Ó#øcþ¨?FÂ8¦„mu†Ë® r¿“¾„+)…z#¯/1ëw̲Ä,c=AÇ$¤ MD' ´î*­½Lk'r†©ÏÎN|d4ÃÚlº™¹6ч}½’SvZ™heF+{xØ5ì’ûòÝyG®‰j_¦Ú"M|¡£"Óy˜^YTXÃÇ·—¬Fè_IhÜendstream endobj 81 0 obj << /Filter /FlateDecode /Length 572 >> stream xœ]”=nÛ@D{‚7¥ý~l@ØÆn\$’\€"—† S-¹}fFvгÀH"ðÞ€ÚýÓËóËr¹uûÛuüÕnÝ|Y¦­½_?¶±uçözYv‡c7]ÆÛgÓ9¾ ënÿômXÿY[‡´ùÞ¿omÿóÐ÷úèph¼Ní}ƶ ËkÛú¾žæ¹îÚ2ý÷Uäý‰óüùÓã±*}5ª‚¬UA}`}¬ ê#ë¹*¨gÖ±*¨#ëTÔ‰µUµ¡ 2}õPÔ+ˆŠ¨ ©J© jaµª «WÕY³*¨É ü"…B…2Tu`os!sos!sos!³Ði„4™¨ŒT"•‘Ê@d¢2RF5 kÖh‚4BMFHÃÆ¦;MFH  Òi4Aš 窠â 896víìÜÙ1ªkXç°|—‚SÁïRp*8ð] N¾KÁ©àÀw)8ø.§‚ã½p½ÎwÃÁëbv2;]NÈÀ¨¡aƒÃCAÈ`2 A!€!È d0„ † ƒÀd2°qhçàÎÞs90yhöàìüBP!0yhöàì›QÐ(1yjöäì ¹”`R0!—L &äR‚IÁ„\J0)˜K &r)Á¤`B.%˜LÈ¥“‚ ¹” N^_ÿ{Þ ¼c¾®”nüض¶Üté¢áýrYÚ¿»j½®|ªCvòó)­endstream endobj 82 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 8311 >> stream xœÅz tTEúo‡ê "*mK‡È½¨î ‚Šˆ²Ë¾È*a ÈFÖNïû¾Tïû¾dƒÂ&„h•EqÇqÆ7Ž3þ©æ]ç½W·“3óÿ7ïœwÎ;p¹u«ê«ïû-_‘Ç8€•——7hÙìeËŸ?~<óÃc٢هó…´$û?n-(€CòជŸ1 ]zÍ»ÜÏ€{aFmu qKè…µ¥[jF-—Ôm5T0ÿ²lÛÆšÆ-¥ÿô‹Åš1­fu팺™õ³fóßlœ#˜+moZ Ù¼°tÑ–Å[—”•¿U±lyÕŠm+«W=Yüô3ëž}nÃøç_xqÂÄ—_yôÕׯLyý ëQÖbÖ$ÖÖhÖRÖ㬷XcXËXcYËYO°V°žd­d=ÅZÅZÍšÎz›5ƒõ,k k&ë9Ö,Ö8ÖlÖxÖ›¬çYsXsY/²æ±&°&²°^b-d½ÌZÄz…5˜õkë^Ö묡¬ûXSY°†±8¬Y\ÖC,«•Ï*bd±Y$‹b bÍıÃ?dò„ä®åW,X5sÁ채øvлƒ«»gþ=’º×5tüÐÔ}«î;u¿û)ܶ‘3€cpâž{¨køëÃà5ÎF¼=â碿<üùÈ9#÷“/’òwE- dÔïGmõã#ÞGÇaˆ‘}Ó¸‹R±`ŠâJI½"òšÄýêtÐ¥ìÐ#ý@eò,[ȵ¹l.è²Ä•>1$=ÔÙtÊ4 )SŒÚVÓ™‚e@¤P‰Å!u…¦‚D(ªÃ"ª8»½Ýînß ^øÓw¯\‡„lO*+E&¡ID è"ßÀ·ð W±B)‘UÍšÒQ_:®J¨µÀžr¥©$Ìsíp·ÃvÂͶü¦¸k\׋7ÕwÂ=’±‰†qRheö×)ñJ<Òøêäªøj»Ña€FÂ̶èõz“C×ò ƒB ‰F ’©… iS”ú#à”¸Ü86‰(›TBî“:€îCË?¤—(€P¦SœIX™"9)Çæ.ú´ƒ‰„*ÐHñÅkÆù~žüÙÙ;’ã<³Ëì…nÂζyN…ôƒÄ„“Ï {È @ÜàÏ舸Äõ·¢ï#¦Wqõ“–ŒzoTüöü·ÿôGÊ ~£Û·.&«šú"¬Éd½L|¿¹4œs1kË^ænm«N¿é7 è5Zz!fÓ¬ôcè‰?üøþ·m¤Óï Áá`›ã*VB_×(|kQñ|HŒ+ùËï¨ÛñUû—§ÞïÜwbïiH|¸{áR£ç-Õ˜2F:mm)™F¼Þì+ätÇdy£¬Rª%{WD×gи º’ÉË®½œäÙ‘\è°Ù¡ƒˆiB ä$Ð(Óù Cgè‹¿¶ýï3´•î*ë’‚…hÄ_ÑpÄù~éMšµf£¤~ …é§¹ºæ<õ$NýºóG)ÛÞänr¤!u›Nœ}ç=H\ìZзôþtÌ*ð²*?ÎGSq2¾ú«¢ d0¦KPh @ þŒÊÑKßoE…ôE+r¿Å5 ×Ý”æW÷‹¬¢åƒï5-£àøÄV:ÿåu‚*M$þ9W.çg:Ê­ ·,B…Z$3»œÁH,® ˆ¨UÀuÄѯ³vžÍÍD”h:åU–Æ #Y¹§4Q úaº€~€þÜùçÐÀ‡Óû¨NTÂ5–šËÕk×lZ ‰©ýÃ?ýôÁÎ%ËÍÐl5Swáݾ ÚÆéF‹û6½ˆ•J Å©“•Í$§;W4ÙQ Õw8Sx6Í=DJî“Ô••Ò,4 ,B‚Žƒn$(@«Ö¬Z¢ÃW‡-wÁíAœXÁBkBê“Àz¨“›*-:‹c=æ†df¡^¹aíŽTÊIúNÙüŽô&ä>!ùëH ³häPN4Æ ò.™åçÙþε\"š1’ô½@ŒÑ:‡PŸçïÀWäA”3ˆàÂa-ÂgË)‰D­Ñ¢)w[\ÄHîÒëpµNŽíîí®í®vçÎÀö]Ÿ];ÿ-$jòTK2ÕB:ØÇæzêçÍ«Z¿þöjP%ó•‡VôEs-©Ri@‰Sh h#EÔJÐ…* Ð&Tj*~ÒÙ ÛZ¤«ÝÓîhÇø“På¦J>Iè}þJI=8@ÞPÌ$ûñeš¡§e†¡2Hy~8'†Ævr;«iBâ4hßÏimR§ÊzyWz+ݼ2÷2/ì$ΠÏÎ'ÌLV34‘œ]§pG‹/Ðì"½'í½'U‰Ø,*Ò d™OêÚ¥6)$¦Óƒ1 ö%±›Ù»¹b·´­)èît’)tŸ=âˆØ#…ŽøÓ 5Rt0@½M¹V¡72X²Hg­ž0/W!Ö§(ÿzøç³T<ò¤`¦–¸‹ ¬hÈqî¿àØ5Çòm2-ɹÙ^½%ZRDÏC«é'éü£ôH4Š:èØßÕJ܉ù¨$ƒîÃÜV‡*²€«œ¡ž®šaQ™UV¡fO;½õÒg<~ç9!uÉàÝ"i 8ÝÕ’ iyQuU ;êŠÛ#TðãðÇþ+zÄÓζ„2 ¤èW+íhr6Ù¼/ôÍ8"õ:ßLª¦›uV‹¾8Ï%aUúîZߟAës|ÛfõågàËÕ"\íâ¨*Õ_í_NÇcõ¥cTJêÇ¿ïÖé¬ú¢n$,@e`+-ü¿ÈàbGN"Pœ EóawŽñi 0ö âÚT/ŠVÒë è2° ­ÿ7ô‡|yeúÄÄó8હš9ê9¦}¬'&]•Ÿ:ÒNî!%Quœä,ÉÕýŒa͢Ě¥¸¯ÐñyvŸÃýDBa„00“ÎR¡¡AR$¨#@½T·T³ÔPjª¬©©©/Sn„´v{\¤zP[_Ò¤$ ùr€r„v7Ë Ë}á§ ÀWªE ¯"f¡Ð9‘”„8hPO½r|24cÅÖ…¥vŠ>D8îÒúDL˜œÛ÷ÉìL¼oz"}eºRК"þTRéo Æ,>t6}Q£\…eÊÉÆ¤!B&Aš•vmxBìÅÈ‹î5æÍïÂÓ0t6±{ÿ×=_í»îò\èôCÂÜË\¬{¢œÈRù$礞9×±8 r Ù¿¿¸0;ïïÁ³åå£ñ‚à(ç(8в·ÅM‰¶¸mOŒ mOìk>èÝ_h“Áõ®5Þ’BWUHßB÷¾¶ÏÛ>mùtû5§ÏC â0h÷6.vRh$é±úÓ ¤!eŠ: ÆÐ'à!ý!ã¡BS³}G)Ü uuüU²2ÁVM™à ¼æˆ*,¥èQ@¬’Ë„mâŽòºv.ï*–]²ì`nÚžt6ÁðMrÍh3B#ÔX4V5A¥7ÔÒEÓ.®ûôô¡Pó *^ÝcíÄÑÀÙéO†µa¹ƒª ›Ò®´cá›f„ïž#îJÀ8<Àß_žÚÒZâ܉åÏš^§ü¤"‰‘í-z‘ÏÕ”:Wòk·•/Öo†Dí²ð¡w¿l9 v{ö8»ûEƒíüF†äz“{Áy¬ü‡^ ‡ö¢\!ƒr‚˜>N¡a ¦Z…N U¤P½C•·Èšå-Ú°>¢6vÐç ú™/š!± ÄpÐ0Õáeôi®PGcɬ ˜õ‚ñcÿäÎiÞÂd4˜Äâš“œÆÊ¢G€X¬ç bÚdo!z€ºÌNmHÈ‹ÕGñ'Òȳ›XŽBm£X­ª,›5Q‰J á›k%!eÎ%¯×æ)ú$b>œªXòÜ>™+úD"#L %ºú’ù¿ÞÏʸ IS´ÃÑ’¶“½2øWè9ì;â=ìêìCqV‡6â9 ”‰- (“$ºDŸ¿#3G)÷¿'sÎ…9â¾B~Hz……4¨hê·Ù{ÿE5¥å^IµI^ݧš­Ëù $S Ÿ€¢gæ4ƒ0ªíoZ„ eX¶ª/^ÄUå.÷WxËwÓãyn¾« x6ÎZ“E&iЈê×Ö,Yhuûœ¬æ9%M¢€¤L¾­¡¤£nß/'Ñ=Ÿî'ƒî?‡º€†:ýŽ0.´VÌ/Žö´È-§F1¶çV‘@¡Ä[J ÃÚ&²`²;!DKÐâ‚]}ÈìŠ+ïX›Q@ˆ‘ /;ùÖæ¯—ò³od‡pûŽ—4wDÚv’Ƴí%(ôƒjz ýAç_ ±.]j?·›ty>LnI±GZ­Ú"æ“i­®o.>öéÜøþÇdÊb½ªç³²óyhbïa°ÙÅ…Ñ RH$Ì€7p]’I¦#%ø<ˇ•žrWU¡wµo•wµg Ï!v5l‡iŽãzÞÎÛûékŸ»ž€× xí׆ÿò…øôÜЋñ±0…±&®Âêq5(‡rc㺪y<þÚÆu‚µÊ:y½¶Öâ±áç ‡xºSÚ“ÚSš³<ÓA™»s€X–Óÿw"·üto »ˆeÏ¡,#'ëè‰Õí¥±õ°¾Òyô=ú>BˆÓ[ß¼º5©m2¥á ¸/µ§ÝÚ«dLlŽXêˆ"E×1Ñy›°LS›@¯bú$»ŽgÏ¥¤%)÷H 1ņ†œPuÒ)®~Ê6|ÞAb^Õ¥Óß]ýåÇÝÝ¢ÒåÐ:±²‡«Zˆ)†ÿ *r²û–Ö¹&I:îÇ “JÊýB’ÏœÂzrý*þ*t6xeë‚`'tÆ-Ÿd×ò°í‰ÙcDHÅ<͘ÄpU¹ôa“„Az(|.`îQÑö4¢Á|t 6¥ÍéR¬óúl£ª¹÷ÙÛùøð9ÄÉäýþj¿˜^D˹¨xzÙ­Û¹sÿgÛ~¤GÐ<úü)¤ ¾ŸŒò/|Øvêåñy±}ÁªÜ’”¹%°š%Jy}Õfé&H̘wì m?Ç¥# uáêÙËxo_é<ã)u—Ïûú úàJ>úäV>WiQB¥•)QÒ¤ˆf+§J_ƒo:6¬³•'Û+O˜1¤áÇν×v}ü&x5ðQè#x2Ÿ¯ô•Al)¤ÇqäÏ‹'hKµkêªëªŠÍx÷oÏ·~Ô{Äs„ áCCœØ¾ý°m$B`;ì´¦„Q™½Š!£êÍ·‹'´X”‡Ì\§4¡¿gÔ¨TŠûôÉa„c u zGÆëÅRîlù}p5Á_*þΡð<.&jÞkj;ñ¦½~ñÜÚÕ¤²ÑÌ7Ö›…<³Ö‚E*ñ$¨”¸¢”-÷%‡—² ƒÌüE}àÇ•ædš0VâJãS5 -(@÷ìpò~@ƒOÖŸ|»ªH”Ù¶bgY øj¥DÖ¤©s´™»™ ¾‡áY뎹.•] Uðõ7–ÍÔ÷[sIM{ïô9´öœK4,ûÚÙáœ?¢üî*Ñúùð-BÏö?…¸ÕWáß "{üˆM ‚¾7<|èµZ ÕD1OȦl I¼[ߎýÕ¯þ3Ç QÊê0» ³°ÓßåKÙ¢˜2‰¦D*½wK¨º|«FZK™,f³ |•_ë$8¿H½§ØF¨ÁÓϽò„Ø%òH)»É޹êrƒJ/3I Š]b¿Œ@Ò¹³èÁUÓŠ—ywm%5^]†a×ÎÎŽhS°ÙÖ‰OÀ‘–ÙT1¢¹†¥ÅKçC¢¤¢ó¦=÷Ùcg߇ÄÁŽòÍÔ]ÝD†…oÂÿ§~¡C#–BÞfrmPî Æ‰ÀP/(îhëÕB´—ñßÃÐ_/ÎF é)—-—‡söþ“Ûa ægœq_çŠß ‰~¿Ò«»»½aÐĸpJ³LiVY””±¶€Óh–˜…¢%‰•œ8úîñV‘C.Ðê0Ë5u2HpzÔ¦Hâìí‰<86é›” ƒ_ãÕûð¯2MÇ?ùpÄ׳ÎÒy$=pe‰£êT/Fí¦+¹ÌÄgÀ1xT߶©±«¡Λ¿ñ-¥Ì"ƒ2øµ:W”Âbà::ÕW¯ôÃ@¤‘Éq¥Fq¥^IÌP˜uzfG“€c‡{‡kÇ^$ÇJÄŽ=u¢”¤ŸÃBQ¥1Ë#+¹&¯/…D…ÈÈm0‹–ÂÌnH [ÚZRmÒÅæ\l¦HÎu³Åj.ºÜç#dø(qº¹‡+)ÎbEÆBù×ûýÍg–¤©Ií4GÂÙï»xiÿûðìÙæžAˆÙõqc|—­ûíÖgbr¿œ¯®ëÈ²Žµ‘bH1œs­ëKÒ%¹n (Ä,fhŠåÊDŒEEøóï¯Þ¤Çð™;?eŒQè~ÐÄÊÜÁN7:•¤H ²C®;Xs°¡ÇÂŽ“ie;݇£9ÁkN¶$›ÉÏãr9qÚwá* ¤šDnì^¹Z(ŒêÒT§•^D?ù*ôuø«ÂÐ5{ÐÁôîœlε˜Yî•qnê°H” ©$¬JÜ¡93³¡_˜~3š„÷cÜf¬2l«¦çñ eº-° Nß=ÿJu̵ÄàÀÏ·ã¦+îH8âû–çÜáiwì ‚d0£8Ý cK.æq ûËtX‰¦©ÀTc¨6ÖÔÒ¯ó4 SÖ®å×–Öa¾¨æ7')Û–t%ÉN´™çîòîvîþ?¿ñ?·æ£{ Ê£Œš)ô H1p§ÁpGèÓPײ›ú‰h8ä$= È­ÕR?ö¦½¯ ³§s'üêï7œgbò*~9'£§é©Qzš]ìî„D œ†žwc{O"îiôà)Ä ´§!X”úíI¢Uâ’ÏNwî¶A$Q‰$,Ô8·Û úå_®|±àvy–,h¹CŽ}°Ž—+Eç"cÓïn›9½G4óí|^bÂÉtùDB}-…#W_´Ð¢ÜDj4Y¹r³N bia ãa]Df§è0àLÕkmº"Ó ·ö N Ñx«Íh·ÊTS+ˆªÖú]¤ìt¶¶:wbµÖQ×ZÕ‡,Ï^Ê»ÚÀ•˜ñdب[Hô9h„“1uLŒ§ø†éÎÃÛïî7?FÇ/壿agDOO åMŒÛGAÒÐôØï‘¢1Äw[ÐXz8ù'ÚÃÕÓÄ úùÑ›µØL`/1© ü=pÍEœå_µÒçñ<Æ»z6·òsÁþm¶‘;û×— ž|ÓÂ.©OÛq #[¤½ÅÜ)»¬®¢T0‚Iƒ>ùs‡?W³kúûÃ1UH"Ô‰åZ²zCiù -Ñ Â(kfÛä!M”Áî.GW¤-}›î³Œ¥©½”ŸžK+Àé}§öÞ{ã#4à ˆ8Ä— ¯Ó¼qO½ýL5iÁ[ÂàS—R¤ZÓ©VŸÞ }¤í¹Ðq™ USéÁtÑä™+©yk篷F£Ó©¡Š„ÔIý4Ã×Ñâjï¿2LK=’mFa½¥ÿJ%ƒìçÑṴ́ì‹ŗ™;•Éè:÷X÷¹Cðkâú³ûÞ˜3V1µŠ4õN]BÞÔ’L´ØIè±àdƒm_xøÍÌ_‰îŒlÌ Ó+¨ò™‹TS bŒŸ°šÔMäyÀùóés¥ &=?ïÕ­k<­5”Ñ¥õB=rÃ$Ñ"Š6–©+ëG#vÕwˆîÿÝs¢òýýe}±2Ù‘8íþ‚^çJhVÁT¦+&©k6µì?`‡û©“ )€uÓoYœž]®ÂëtxÉk˜r ( &WÜžt$¢$Ïq1 ””cT¶@­°±ní^ñââÅkð7OÌoÁ2¢¾Å%»2\Ô~ZFoÒK-··ó<š€OqÎÕš,‹»-.@ ØCø°‡ðÃrøx6'¶(NsDå“cº4BÒ$ª¥'ó [ e†­ØIJ=Š–¶H[(M¶Çº½]ø°kÅŠñ/¼6®lVrO0êƒI,f£[ÕUÏüyóoÑýˆwülÕ zðæM*ùÖ¾ „Ù3™(ÓúG›1ôºˆD/©¥sWZ’ ,Éy'´ír·v¡·ÍA#Ñ p € ÂÅ6Gåf™c=:ÁI9õz¨/ÚhZTÁO£çdt6Ù›ûY}9Ρu™a—/ë."Íå­8…~AZô5wjÍk¥Ê±„šýèK4òwèÞS¥GÖ¤H­Së`Ìì—’ŸûbÄgN$Õà)}ß›ð¸¬gËvºh=ý×øÊ„eS ¡ œoíWˆO|ïò™Ëe³É]¨™«›ûúæyØcýPôaÈöØc8v0ªò©|Å»x“O½öîä“ʯ„„T)—‰cÚ°–º¸îÒêKë¢ê¨&¢ê›rðV§CG, Ëõ‹V—•=óÌ|z¤†ô #4øÓ4¿§Nœ`$SxT>jÊßÁ?AôÜ78Y†ý®9 z^ ªQ¬=Íh5“CÉÁù7† b±þç~oïendstream endobj 83 0 obj << /Filter /FlateDecode /Length 2179 >> stream xœåXKo举û7ì¡Ñ—°·"R¤DÞ’™Y Á.8EÛ4’<͵ä‘Ô~üûTñ!Jmõdr|°$’Åz}_Uõ×UÑUŒîq¸ˆWŸ/¾^Póuåþ‡ÕŸw¼¡ÙJE*eéj÷paOÐUF£4Éà&"–ðÕîpqK~ÚÈ$Šc)IÛm¶I‚/”ôúðXWî*R½l¶q ÊSFr\ë7÷»¿ÂMLLnR4’Bà}»ò‚üZCÛý£=vEµÙývªVšFqª”Ý|Ktﯗ䫽-Q”¥ä¨§‰R”ûêáX_zÕ$É'çôàž)òŸ9ì¤èª|ÍIIŽ,R’’¢íN¾?tíÁ­ª„&áWdŸwy1T]X{Ú°4R<ÄØÙ{â„TÑçèÊ;HMlÞÒŒE2U«-…à;cú»ÍV0aN–mÑã7Ê\oí3ê\Ü‘õn¯ýª’$<‹¼ˆ¡z¢õ¥?)Ã1X§ É›vØ£!^NÛTÑúncô¥4RÁÝÏ»ßÏ4ûõ½ñÔ ë†âWA‰ïÑünãÄœä©T¢Pr}í½{·ùÝéu¿TC^æC~µÙÒ˜ÃyˆÁ™Àü4òfôU¡tqåÝ•ø28Ô9ªfðn ¤®žª:œ¿#º)«—ª¼Û\ù¯‚ÄF}Ôå}Û pÞ¨"b‡Š(®È-#¹÷7CV2gõw°Æ9]m½ùëŸt“×õ«Ë0ëyÃ2¸:ƒ4òYÉN–ŸOXFFÜNûö`O‘›êÙjöqT~‹ÉªâÔ(ð‚—8¡¤Áë•Ê$z‰šê-ö°@X¸œ‘:ŸÀ#E ž€wËt 9£"“.tðÉãåö¯ýP¢]CÒ­1Ù{Ÿì˜ë¢;–ÕÚ‹qð×eü˜_òÏUÈüÃÚz8Ì0& è¼S½€)@G€|ÐÅhËD‹CÈèÒë"çº|‚$é^AŸË%PþÁ¢#éyYu˜¨][{¥’©¤Z÷ÃÛ‡_ݧååWò_¿üœ÷«sÝL€LÅ "–@ð1z\n¡Œª|°BÈS)D>¾< C"3,'57Ÿ%ßÓ¸ÁœbpŠ;h•NŒÃ‹Mò2¥Õ–fŠ€Y¸ª1ÐÃí€ú§i™Ò¹)É!oôã¡›ÏîfàG»~Sû ¢%œüœÚËtãý5E^ëkëÍRAÀÛÌ×ÓË`Ö°7¥J‚USõ‘@7õÀk%F÷¸û„ î!€øÁSƒõ€µ8¢Æ¬z,¡1°”F"I÷>QÍj‰‰†aÅÈ×z9e=(¿eà–R÷_&¤d+/¶ÒŸ–V ¯å͘¶= Cîè­½ 2cn¯»ØÚk6§Á`sízC°Ø)ØÚ`/œSêôÒeý_ÇûM>»þfÖȘ¾ÌŒ!É.ÕÛDŠHòÌåÊs”''ÀµÉœ9'ûç8y&¢}‹¿.OÌJŸc‘ý¼×¶‘ŠÀg赌ԱÒ$³*g/pŽk¼ 2 kú#°×cWõÎßJ@dr>ƒ°vµÞw2DÉú;¦—3qRǘ̃ r¬í $3û¡®F‘|–Ü!,z¢ó|;–¢¿4æDÂ¥„¶³G¶EŽíR™%1&«¬<ŒáÊš $Ïr´ˆC êÒDb›ÉH2©v×Ì%=(Æz…Å˜Ê \úÊQtÕåuXW{ßæ Ôr¬Ç'AºŠ±]†öâ‹‚¤áLsgœ¡]þ»ùOPµï6Ñâ`À¡ÂãÏŽDE"£ã´øY[SÀ2"̪ͧWƒ+ ÝWÁÈÖfŠ’ÓÑ­›ñêÙ€„© ˆõ^'‡¶›ˆ*aPѵÁM"Jž6“¤®5¬?Ö¹môÁ…¡q+ôîÏêô‚$LŒ±ôÒ >g17¤C@) TO™Š¤gz$™ª/hŸtyKGª¿b÷ÿ×ÃöÎ4fÆÎÛ[zÿÖÂëkÓ<ï`&ùà4|gü'9‡V^}Ã'ÙRË¢o±úæÝ¨+ÐVš¥ Ú² -Åwkû*ªÿYÅ,ŽÏÔ……b,u¥~šôHº<"aaÕý5ùÉtl›l¿¶ál¦¦Cv^Uß[Úqö´¡_Èv*M::„ÝÞ.€Æ‡ijÚˆ»Þ¡ÒnpíÜÛµ'³^å1hßözÒàÀÚ´IÂÀ²KuÚºx}`[øÉ/YÖ{[ÐEw¡ÏÖØR ¤0èª.Zó•ˆ&c¾|C4PàsÇ2&A=ÑLFåµ.×ˤƒ) ãóý›EÃ˰^Ò'°ÔÉLö ºÈk«Ë¨†QjÔ%uŠN¯¹?CˆotÛÝüóãRÊãÆ?g¹ý1¡0UIA¹ŸüØhÖ¾ÑèraÓèòñ·@üŒÁ:׿âŽÔ†³w»ãtÞJq3è‡VÊH‡¹þ;0ÂD%B8Œä}TxƒÎ”­,R>æ´Y³ª¦sUë¾õoÔLµ¾U2x6æ§XÅÿFMesPÂŽ¹0Ó¹M±åOGÓIÓ?R´sO.þÆ5Î×Û1ÙØ$—æ¬ý3Wn3@_Í  ?î.þÿÄEܘendstream endobj 84 0 obj << /Filter /FlateDecode /Length 1843 >> stream xœXÝnÛ6¾÷SŀɃ£Š¶%[iQ ëCt±›4‰‰9H¢B‘FúÌ{‰Šy”H¶;ô"µ(ñüŸï|çq…d™Ý߬œEó‡Ù㌴OçÝŸ¬œÿ¼Ÿ½þ²Šçi˜&«d¾¿ŸÙ/È;2–ÛCÐ/]·‘CÞ†r(¥yɨwßQs))8¢2Ó'¢^·…E—(gi¦ ´½P†³§L+NUç1ópÜÓŒNëGr¶dÞ÷ü‰Ÿ²yâ³#D‚Ûü[·Ï (¹Zž°ô 2|[Áq?¹wý¬GAi+½ßI  T`à)®Ÿ™>H°JT^*ÔîÒßýù ˜«Í;¶ôï™ÖŠJ‹ú(6’>éØœJŸT*ÖœrFÅ3*Ñ |ùWúîH±ÓE†{Ô]©±HÈÎåXl¶#"‡©ß@YKàŠ ìDÉ¿¿t3(Ö¦ën«Õ.’5§Úô Z¹ØÂÇú:gopGZ¦Õ»H@F6YW6O”î›èDªëF#Ø5þUŠ£Ó÷ÈL„ŠGа¦PÍÌ+P3%l¯¬OÖ_9@ÍH‡§ï|´®‹o_qäù ¹n¯[Ç$XÝ¢¤M˜¨„r`ò뢃GŠþöO³àsÕß$TOjñã F‹Õ.}î™rëoµ%ÓÜü‰ú+tOä/^ |Ô*²é]bˆ¯u!¼5Wj¨ÅPáœ/_M57«¡†›mûÑPvÁ¨z‰TFÇÂôR„TT–¼Bš‹La„ÌSá+77ݬÇd¬E0Á®#+Ä¥ ê’•bË„Ì`ÖÀ©sÁfÜN0òˆïÏ´„éëÚß?ÔV²Àõ˜r 4ûp<1V9(wÑ¡Ñli¿×J”,÷yIoõh‹Oñ!XÒŽŒ^gZ›½cô6¤ÇvÜI{^¾õ´Kùû‘B *N½Žµ®kÈgS)“^!£Þ5¿wFÈ¿X…m€ ð{ž±²Ïcƒ$l¨hÌôxÏ27½î,ó7Hpz™Ý¸&F"‘óŒ").0i-Zvùqõ¬£¤}G‰n½—'†c74•ܵ ƒõLúÁ*¸AÓV¾›,Êlìì’Õú¡bƒ ‚uë¡Ò½Ýþ]àlz;¾Çp+Â3AªŽsfš¡wì|> stream xœÍYYoÜÈ~×oÈÃ@qOâaØ»ÛððÊÚDÁ®…XƒõƒµXP3- wç2ÉññïSUÝMrF²6ü`ÙGU}u|Uú8É3>Éñ_ü±9Ë'gÏ8½Äÿ›É·ó³¿¼ãzÂU&U!&óû³°…Ot‘NNŒÑ™j2ßœ}`ó©ã™sR³zšg¹’Vä–•Ûé ~Iem¡Ys¿‹¥ÓŠmèÙê<7¬l«ÝñÚéOó¿£fâ2Wˆ<³ÜNæßŸÍÿô]o~:Sœg¹`Ÿ§ÂÀýV±ô®_•ôö°OSQdNÓw¬„ÇŽ·–-võþÐà§~»p^ήoÃ%Ʋ¯$«æ`¶¯åzý5.S*ÉbH /`o<Û²v—ŽÔlϴIJeuŸë´«îbÉ–¸ äwpÐâ€w;§œlãá k„am7pͪmEÕ>‡2ÇUÌgYúÉkZO§YÍ%ÛlªíC÷QÀÇÝ>¨¢%*eÙ®^&5h£T6Gûj 8Á)åú9a·;­yB˃d!á`žõu™MgZ ãjá|·¶™ÑÚÏÍ—g¬ÝL翜z·†‡%Øs¼¾ºÄXãtJƬºžÝ¶ ôµr]µ¨Ñ×(UaXXÀ‰šÃ]sØø%¾t^ Í]À/¨"LáN.¹oã&8fwžqe™T”CÖy¦ “äoë©Fl%øæÃec$ ˆKðš´3¼t>µ(DU=<¤Óή=9“@ËÝdÀrá¬ån‹J9„V²½«W%¾×?j5ÒCÐ\Æ e;Ð~Þ”û[v;OéÌX-’ ¼Â^U‹Î½WQ$¿_W¾I¢XvËà’fp-Î9+ÓqGçktê$~Ä Dþøµ§È Ù6é Å“aUÀ>n `(¤¦ß–ML ‡S…EïT\’PpÀs†OöçÉ Nq“¸‹’%RHl 2ů¸@!DŒ”£Å†5ÌkP¢/rÖú/$Û¸‰Ÿ&—¨7h2JØŽþ¸ƒ[ÀZ#Üo„›kÙÅê/‡¦ÒB†ë1…ƒlå7ñv'R¤˜HÅeO k(|µl¯c‹é l6Å4©Œd¡¤l˜Eu•št¨B²™µb‚YÛ¢÷’2—TË„4J°uy´«sk[üérmÙÜoå”°‹ÛBì  Ž_4¥é÷*°Œàùt¸µ ’Të›=ýVN€O• ?–¸Í¤5.%ßË#èé¢äR+:‰ò Áb!P™²¥LHï»ë¼pX_$ïpC1àÜ%Jñ r”ÐwíP|¡Ç—³ðœó¢ó¢¸èyÜF媮öï“Èpüm ,‚Hu„áÂC"[€Týi* h¨ˆu8G„  bŽ'˜ûžÔŒ8íh£ãXæé )B6:vQ6O#båPJ/ 3,>†32 JSh€:Ds(?ÃXï°r JÝÇѯï~Ý–ÿ TP [¿mɬƒ ²š¯oAõî3—DS^?>âMøðÝq¼9”EÝ3GuŸ¤’„œ°‰‘[TKªqEJ¡ð¸ØõÔ8²è+ü¶òÔ7„x…XCé‰\§PF9@Sž,è)ÑPMS9œ Í3­ÌI oî ²[€hgk‚XĬÖm× Äõ R Új©X ’¦+tvo¨Ò€¯¢`U˜VÿqhŸC¼½Å.…΃îk×´Q*7B‚ДÛjXÇþ,½JÐK±ûz·éÜ ²à0ËH *é»–ôÑ#;o*·½5–UíQ ÐWc-ê"Ù'SItÔˆR5ž‹t>Wí*v 1r ÃÞˆEø£G+²îüxe#»ú×`=¨Èãº5–aÇ;2p P2±¦˜ Çi4FéÎ ”QHrôìäYùÚ‡oÝw‡6~ÂFwÝìú…ØâB‹Yô) ¡¶Ù"’5öýÐâ!ÛÕÆ>] £«t§·ßø$94dÇ.Zµ>)j@h}|8bNÌO£ eUÑu¼‚ó®Þ•ËÄ%¶A‰C# OÏvŒÂyì÷‰*·eóëPòfœ$žðŽwØ(K*`Xì%6‹–§ÌŠÙåfÀ±éÆÅÒÊÅÀλg£Ó‘ÎG·˜ÿÀáœÂæ<©èŽìзúEqÔf,;]ü/ÙÎ>ù÷(½•„3JýßݲsÝNլΔb| 2Ý zë7›Žþ¯í{v=ôÄ8gJU¡‡ŽÞô{XîÄ8TQ%Ùó~I"½ÙQuh;+·k`0Ac/_þxA=ë«WÓg§Óš|[.˶|1Åc(Ò'£1ÝQ£°[÷~QÝW‹TðÛüy¸KÌœ­ý'¿î÷ßB{¶ô_üòvú"½Õ,ï9N1dÍÈ-H*cMìZþpI“„€š&ò„âÑ!Qýñ;„¢YAš¬ÖˆúdV*¡ ù›æ‘÷XÏ¡Å7a`ÔÆg«É3/¨›À°ƒû© 4ûÀÙ µ``DŒNú$ºq|´}ÓÆ/œ¥‹¿ëæO»:ª -x¨²ádÇö‡:©ÎvOG»4ö ÷ ͘¥äg#žm  "î+2ÎHAå™±¼+¨$– ¸dõ°õKÐÆÁ•¯Ð¤FHK#GÀ*°ƒâG-éA„ÏØ%ÖTÝ”TøiÄ Ú"gÂÓÑ& LÚ(†+œë× àä£xa˜¤,ãW«£4¡öyœx”xHÚ[®2;­¼°¤À©ÍñˆðÐ ž¡Q¥Ë5Q š;“:BISöã1[Ð0î5*»÷‹Ôø[œ”iG)#o8îCTתl“î`ÐÚ—­ï͹¤Zø&­VDE—é{A#²ô©ñk?°1‘úð<GÀÉ®iÞÙíìþ\»ˆM¢­0 àØ cƸ–]Æq;wdžpF.i.OÙ‹fM8ÑB~P`YOS0悪KÓ‚ž][­J›¾E×iÂo4JHy˜e·«ayþŽä…4ä¤%Ît7É‹“^+È#)ÌËíXA¦¤‹4þ½z36³W™*T7j âÀT«êD} è Ê%h—1Y:f–86`ÕòK瘧m€³4EÙå¼Zžãè=Uöo¾IL¬â@¦90{°;’ ÿ®˜'ìÖŸ¢³¢ûËÏè)AT×™°?â ÐÔf&/Ný3DŸ¦°'Õ:@éõà<št ¨ù}ýÀŽ+\÷s¬1….2O <ƒª§9) þpžŸžd F> stream xœ]’Anƒ@ E÷œ‚0¤0N¤È›t“E«ªíÈ`"!‹Þ¾ß?I]|Kì™o{ªÃñõ˜Çµ¬>–)}ÙZcî»N·%Yy²ó˜‹zSöcZĘ.Ý\T‡·nþþ™­D‚ w~ï.V}Öá…Ÿê{Qšz»Î]²¥Ëg+ö!è~´°Üÿûµiî§á‘Ú¼("°U Ø:&¥€ Øn” ˆº–µ­×¶¢P€±V*D #“£'ÇF)`ãˆ;#ï~oÄ1‘GEµU ¸uÜ)Ü9Â`¤Éè&c¯°w4¥€æ8(Ä€ö‚I¹B@¢9aƒâ ü =‹{øz÷,Q)`t„_¡gqÏ¿Bψ¾‰çÈ})¾Þç6Ët[Ë+ßw쫳ý=“yš½ª„Š_`iMendstream endobj 87 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3248 >> stream xœWipWºmyénÀ&ë©,bF‚@Bò™"IŒÙ16`ÛÈØÄx·dÙh±¶–ôÉÚ%Ûò¾áMÞ bLð!öL^xa2,É„™Ì<WN3•wLfjª^Õ«êþ!ÝVßïžï|çqˆ ‚ÃáÌHˆ]“ôŸËÞx#ða¡?*È?7ØÔ¹!¥ÓÛB!,ÂBìsg§E M¿@QÏ!ÎóD‡#x-ºð@QyYNɼͅÙ9%óÖægÏ‹©(Ë)ÈÎÉþ·E‚ ø…ÑE1%¥ebIFŦ¬ìœ}¹Ûóòì"ˆ­Ä6b!O¼L$‰Äb‘D¬!’‰×ˆµD KÄë‰7‰ Ärb3±…XAÌÁeQAüs8hEP0?øQÈæ3¡y$%oS{¨¿Ó%3¤3î̴ͼ?ëô¬‡aõá…?qÄß³ *ºA˜|þ™ÞˆQ´ò•ï_A”Ž8‘ÜÛ£þ­’{>­"¡ÖX uô¼{¶fãNëz²‹, †¤}EÄ;R`ü&£<ŽæÞ6"N(Bâ —¥º!ÿ ^N Gô\0òøSx¿Q¯-O-É–§ÂnˆoÞv4s<årÅ%¸Ÿ7ŸÔ*"•[4kw§ïN[ÏlÀzž&Á ]}‚E ¸9>Ô+ª&¡ÔXªKr³ô{ND èò3øT;ӲϚaÍ¢}dªýçÑ3Ž¢Kþ´äA$÷Öà3z^ú™ž·ðë0='—£yä+.m–ÀHÉTšJÒ¹]¥ýÙgimƒ>úÔ®îÔdÑûébV©U4÷–…°]`³„vP.ö¿×‹É1|Ég_Œäö¡$ÌÒÕš°Øù(»ñ£½Íò† (Ä$i¦ž®$ãaK[Ùï™:S‹èjRΨ•Z“áb £t47T5*§¶N°[íÖ¦z¾ÅlµÚ€6áošzð3}ªŽ¦3êæ_{ymë©Øáö:–ª)úw7,C“ÇÊâÜ™•q $×Û`¶× oS`6YÀJs]ŒS)­ƒR ©Kn–XË͸®W£5j¢ðéF³ÓGy÷ûP‚$⯈#ò¡ðc‘ÜkèÝéçyïf±áóû)îÿÅ6“¿>’€èG?¹Ò`l@$"s4ë“UǨ ½DqÇ|ìK(¤ˆ÷bÿb§ÃÏ}BÕh,³Æ“Ljö®é®Ï÷¨ËwU˜Ëz lMŽVW‹³ÕÙòÚËw6;[œÍuÝ×ï€ö±ïˆ(t­Õ´­Nm`ÊSøâtÉqzu¹ªL)fÿyù§}äÓ)n·öà¶¶v>G™6…Uq”¥O±¡ÇYÊ&³ÊÛ`ÌCóÝŸv^°8­ÎÜ|Ð!³ÉºbÅö®£¹¢r‡MÞÕV­Áks´¢`ÔÃwu;¿ ¶RD5Á ÑRMëqaz'ñ‹â âJÖ±$æŸñ±y¢³dkŠEº’Ma Ôz6PeЋ' ×ÿKo„…¬A3Ñ[}ý½ãƒg¢:¬]æN ?>y`±ð} ‹ZRË]†BÊ/t“§Ì“ýãf#³Ëš÷Ó=IÖY\µu:G•PC–×äÕÚhTBÁdÉ =­iÖ4Üíñ,¥_^ÔTäÕ íd_OÛp³º¹Ê%D›ÙXÞëÔº¤´î7{R{Ç¿~ÐsÞ!¬5ÕAm ý´x¸ôqž 9FaVaf¹ÀªµjZJùCéÃi#{,:‡´´àÖ2Ì"6„_%ÕÌc$z1H€¡>ªÀRmïìÿò b£B3EHêK%бºÞ®±‹»ø™'²Ç³Æ«Êvpõu{š&>æwr»¿ à(¢z§FWýæ«‹PXèè,;N”à¾32ûóEþ¨ž'¢£š—Ʊˀ^à¿æç<¸`{ä0g…£1âr¹Zbx¿Uܯ¥mä@g—÷köæÌ-[e{S„ ű ñM[F3Æv&¹—áóîþ f»£œ´O¯@MJÀT©U”ìÉ*Þ‰Õ-\‚¨¯®Öþð?Nܱè2v螉5îú ^¤—vGîÁèû@Ñ”&'Ðmm:,\™TNoZc2~O;k»„ ¿9½ôÛ“ ƒ>aâQã¶P¥µQt™ÄÚ'ô?~"Ú¬ø±™w’‚³–O?joìvtÃ(ŒVtí·0IK³JÜÐ2ڢ̤ˆ†Ì´ .p]ê8¯c`PB/ZùćƒÑTÂCSd½ÍQ÷(é»|{|UnºpgFñ:ˆ£Ù(äMôòõÿ¼Ñ+¨ h…v©lòrc™T!(ÌL§`×ÞÝRâ+1Œî ù¢çν+Þ›ì“ d52ú©ó³©ýhÙ½k}¨Êk•Dœy„Þ{d½ÉýͨååTï.ƒô´´ü»ïÐkMˆBî5Ï•jÁIô#ÏK·|òÕåú]q‰²íÑÅÅ®¦*ÁÁ&c«¾þ—Ä…™C×{  ¬€"< °£{x{«¾@øøü?=üÿ·6ÁêìòÍFôg~€#X!cÿD(Ä>4ìEC¾ì‰\t8wþs~Ð=ß|4Œ‚¨+S9¿ÆQ°¡q#Z(¥¹ãÆúzC}Ôœkw_§í”[ç¬,²æHÕ…Xz°V€ -Ò·ë&”ÞÄ#2G¥«Ò\gõà{ªóüÜ€«ÛO¯?w(º&è¥ì<4ô:ŒCEæȃ ]ZFqfI²!0.œù—éy’ˆ1´Š …lPÀèÇFgÆir`e›lmÑ`»Ç§Wø6ûņbãA÷Ò>MQá¾rzr ìäýP7 Ñ#ƒé‚§ã¸GáLÝAWïàĉÃ^¢vçÁ¬¬¼\i2$ÃS«î”Œ(jGªÏ)nJàUúíu9~ņ ªL ÷†ºu… šTê´ÕB6—2iÍŒ-à㇠e¢ÑŒÝÄ Îj'3Q0%Ÿ„óp©et¢oØ3ã€f¼sƒ îˆñð íæâºÜÆÂÜ"¿¼Ú=ù·¯Ö³t½ FiÒÔí$km·eá„éTÈu*•JPVÌ0ŒZ sŒ$Ž`Æœ>¸Í•ð/€ %Ç,ÂX{Æ K?3(Št¾Øi!K ³7æßÃþž§w$÷î?ÿµŸÕÝgÿÓX´’„:cfàͳ_ýñ„;e‡€íxÆÀ•Ôÿ¹ä¤ÀköžØfOÆäŒÎ.ÛJÉêkë:b!’Ês3”rESIsïQÐÓ?!† ¢»úô¼€«·Žt…ª³®Få*áŸa_=Í.>þr‚à;$@Ñ 5½îÎpŒA³ÇP¸£ÍÕfkñ•»ÊåcìsÇØ°1v¶«MÞmÕ7E9Éf³¥Ý湌¶~öäªí1Ô÷[Z̭а¥ˆúœiblL¨ŠbW?ÞÁ®õïýÖÇ^]!]`6˜U¶*¾½Â.µW˜µV¬Î8«Uºj½ZœÈÏ_‘ÿvþÛŒR/ר?ä‹­H03ؽ:lAü/Ë Ùhendstream endobj 88 0 obj << /Filter /FlateDecode /Length 161 >> stream xœ]O1ƒ0 Üó ÿ ©ê‚XèÂÐ µý@p”' aèïKtèp'Ùw'Ÿe×ßzv ä=¾(ul"-~H0ÒäXT5‡é˜ 㬃Ý]‡÷'l²ûüÐ3Ég¥.eUí!ô†– ‘¢æ‰D£TÛXÛ bó'ÑÎúÚ(µqöŸJŽæçMÀ5FâTš–&¹€cú=|È)Ø ¾-%S/endstream endobj 89 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 193 >> stream xœcd`ab`ddä v 1400qH3ýaîîþ¡þ“‰µ›‡¹›‡eÒ÷p¡ïœ‚ßÙø¿3 0012*9çç”–¤)øæ§¤å)„T¤–e‚D‚sóJRSÐT0000ª30d€´³{þgâ¹ÏÀ&¿,û¾«œñû¯ŸL¢ý¾í íIm‰íI]M ÝU“«—È­êëX=eù²ï<N^Ü;Ÿ£}QÕ¼9>9.æiö<œ ßÅE[endstream endobj 90 0 obj << /Filter /FlateDecode /Length 2667 >> stream xœ•YÝoÛÈ÷ßp‚QàV…År?¹t{|N®M›äZG´°ó@S´Å;ŠÔ‘Tÿ÷™Ý%)›NRä!âîìÎÌo¾×¿/âˆ/büçÿÏw'ñâþä÷N« ÿ_¾[ü¸>ùÓ× ®"©ŒX¬ïNܾÐ&2©\$‰Ž„T‹õîäš½[ÆQ,S®â”ô[Yk4ë³Ír5~f}¶\I££4UpvR­LjY¶äQsÍê#ò{w—Nd¢Ø+«ã8t6šÃ ¿Xí—×ÿ@¹“E¥F[ðÈr»X¿=Yÿ-@$'F,cٹߩìÐü‚ûR¸®ñ;©`Y]7}ÖáìŸIVÍ`íŽÙ„mà˜°@Ãò"Õ8ÐÁš6°QìaÛTEA±¼i÷á #z“¤, ÂÀ‰²ßzA@ël³)û²©³j¤(뻦Ýe¸-WZÃm‚­·Å¨P‘ueÑõáÁHPÐtI¼Á(N?0«0,‡“Zó%£bTΰ,ϛݾ*»­_C€¶­Š | '³3àóðØF<‘àYëÍ ëwËõ¯O-È 8ZÊÉ5 ·¦©ñ2€?YƒÆ ˜›ž\¥ÓH%Iâ¹íÀnØÍr†§Ô‘à÷žçÝ¡Î=˜Æ(2ó…×-¶Ï]¤°|ÎRkG ˶٠ Ge@dؾ-6Åw5ú¦#©oËÛC_t¤æJð4’Ü,VÎH©»*#ž2Vl8ÆI=Ù¡ß‚ËÍ£¡‚‘®ðEI,'YžÕãÇ-è‹ ÃÈp=f„#xŠzã4AÞÜ;3&wýÚÛ²o³öÑÝ'‚¹Hâ‰f‡CRrϸ´^@²¹óçðªI|¸ë-úJ»1MÑñwCj·ôÙ}‡§ å?Cq¥U¤L,ÜÜár‰"&DÆ[¥pŽ·†}ŠáC@ØnÊO“¨cåæUÕ#RH~)Ú=˃m)[ ÌGÖe+á$ÍüX²®¬ï«b”çyÚÂT¿œ¶%S:üÓÒJÄO°¶ÙyÖÂqsŒ1yº «f{ï@U´]Èk[ §OKþ-AöA>ƒëAÆ3A‘†ýcHP ’ÔåW»tM¨rAÕx'(È=œ¸¥ÖõMK Òí˜òï  õ+©¹–¾ îÞàzaÔœ·[@Í€Ž|¶VÀ}Úƒž¦U_´uFxÁŠá¬T\Áî3­ti¨ß_Mt«@yìʽ«E Éo6¡ø ³çóêp[•õ%`ú7c nôV÷mÕ†}š€Sn ª“‚êX„P EUù/È»l¿ÏÛæ(ôC £¢†týµ‚}(¡Xþ‚ÐÉnç 5Wó©Òy(¹õf®–ɘGFð/Ö2Hï±»ÐÍ@¸R |W…œ¢Q×·‡¼?´„Š”®â×÷qKùWJAõpŠ‚_…`íÊ]Ye-y ^a’Ðéà˜¿±0Þ7pBˆAê80`ûñÀ)§‡'ìŽÚéâ””†sª‚ÿ‹aЭgtØÓã"fb%C_òWhx„¦ÈŸúcިɔÐ2nŠëkþñãR*Jõ§—m‘A28·uÇ¡Èþe ;½¨úbïwÍ¡;]δDÖ.䀩ã5°þ æ q)mÁø85ÔqWO\=†Ž9QÎùx}†Ò I„¸ISÖ Ù¬RôåÚ¼l·?ô ãé’ZÏT f“U,VÂŒPðä<ŽÏµ¯)º¼-÷Τ«Ðù°·-²‹C®°hNe|õæâÝÏï_…›-ûð÷‹wWƒœ?_þsTá©Gì%»üeý!lC@^ýòêõ¸ù¯«Q7—¯? â”à®:aÄÜ\$[¥ÁVUVßpÂåÅSx‹zμM[ÞSšxn«z±¶óéÄ:¹QÐÿ¼{;ÈÖ7û2ïfOÿw¢BU<”]½4Ä\¦0€&¤ë«‹7ïâü¾ô¤3Jé›÷[}x½N4ÕARŸ˜KCŸ3jm†xÚWY^ÌË}è²ñf,¢h\z}~"ßfm–T7,ö‘àÅžH%Ït úÁež,>E}ò‰øXŸVO’14qV§ê8èBciHS4.ÙÉTæv2’– •JE•ARCñè÷ÜlI;Ð# 'v‹s'4ÛQ­Q~,ëš=ŒM}™Cèn  °†»pü]Û3°Çóe=WŠ„M¢Äš¯ŒU6UÃXµÍ\Ú…iÎ7šŠˆò5¯JéÖìf”Àïhy41²aŠ-< ØŒScî+z(`³D¸ÂÜÔ„5Û¼PsŠÒŠJÓ¤w×®æa® »G¤¡E „UáúP=*ŽšjÖ ›MÓaµVÕ̼Ä#™è€ìÍ ²ŠjŠ—Å!׸^Ó‰æÝãL9¥›ª§X[TèáœpS0‘Ú©®‰ëK}/Ö¨Öâ¼ ††jëjzjæ¼Ð6õynÓˆC&˜¶$ˆ i|ŒŒl"ãÐoßâ3H8ƒŒZúE„„I5y5fV‘zOÀ"ìr‘&ÔJg(aú‚vâ®ÍvµÍ®U§×G F†±‘v7Ðômp×uøDxj:—¸®%ðÚOçÌ»†že4$xhuê|‚áxú‰®©'Œn XÖ¹£pfr¤šÕ ùf¶‹Udãô«¨Bï¨#aÍ“q}W”éô´Ñ®ï}É%ÍUŽT“<Ó)>Ïè]o¡Nqë>Ž&3Wgé)Ã}$4Ë»3`>çWP•ê@!ŽÜ­Ä½°ÐøÑ }phhú’Zµsÿnd¼!ü©@”- –U¸Èø©˜GÑÖ†ÉÌ嬫ùBOêÌ YŒušÓ³0Œš[¸qï#UÖuåwyh‰¹% hèUÿg7¨Ó³€C6!•Ã!ÇJˆðBŠ¿|æ=®óœ`XÚL´&F{¨Âµ ;åe_|¤ê‚°<˜#¡áȶ¬+üjxq /¨L™Ö—˜¢êæš2Œû. .ÒyÍ9ö©I—î‚áËAð"¨ß)ta “r½ÀÍ2Œ.4ÎKEªçYWv ÃG)tðzS…± ³ÔôMs¶BêHÇê^±N(?Cïâòêt¶T e:±Ù8åHÙóƒÆ™ozS|c{ëFâSœþôllÉûÇ}1¶Ù?Œ]ó©+n~rÁ4õôz:r”b|òÿ˜&"?—ÄËójÜßÌé¡ÇýñaÐŒ‹wÃb2.Þ‹v\Ü‹é¸XÎÉÄc¾4v‚¿Î ÅùHðÛp-ãj5®Êqu7®ªqµž›N„´‘Òl’FÖ&ô¤`€I^¯Oþ ÿþ6ÄÅendstream endobj 91 0 obj << /Filter /FlateDecode /Length 2946 >> stream xœµYYoäÆ~×oðƒ0@ž@Ãe³»I¶a°³6à › ^yü@‘Ô c³ \§ìãV)§ U=Ôf“b@ö¼;ïzκ>³ !çìS5 vB¤‡'îfÇ%þCÙ(S6V9a(Ô8¯ð"!áË[ Õ‚Wx½Ö•ˆON¿^6­®½ü}7U[ÁvÇØy öÁÍ%Ç)H§_2–’ÎÑÏ»6ÏÆ²ÅÿY<En¨“”}š¿ºEØ{ft¦ÆŸš®/™à˜gz¦['!ûö®Ê ¹:ÕVùÑðˆ9¹rm ÄR9cÈÄ^v,?›ˆ¢8B&.¹Òš·è\<лq¬Ùcß5N8O]ïž½!Ž‘^Óˆý╀Şá_Çp”©öQnl.…¡å™OƒwìHšLÙï[%MIJ½¥¼“aYÄ.‚M$0.id‰X^gÃP—Â- žÌÙäv› % 3«²®ÁÞ:i|ò¸Aœ<â„Ý•=ÇwVsÀƒ4%PF8±© ±rëÓð¯þBo;ïîÆR£½ ÞpÁ¯ä„$ÉŠÒWD a´- ¦™P%ØZ] öéPBw½_LQg™à«4ë¦Õ6²†©ÆŒg,X¤ Ë0}g=®øipt(#·… è —SgVüBØŽŒã.R˜\‘wõÔ´Ãqa( Ô ?®œ€|ݠ˹“P†2ÊUöˆbes¬;SI„IÌžWèL)Ð&€âÑØØ yVÓ–¾u¤¼MØÂƒ–ž'C™pãå¿´ƒ_‘š•¨P½ˆ7Š‹l¡µòm„´(v*[ûµ ~tùÁûõ|âB·STñ¦ú‰ž5F‡SÂ3Üeç *¡p)éê)¡h¸XBG"³ÛgC`/óB!sjÔæBçi-eUÖÅà/!­·ÛGE_×ËhŽc4F’¢Täe;Ìc² v{)%¼æÓ¡Ê·Þkná/ÏÚeð°¤î™˜ö‚Û”ý“›V«%ª¡§3õïxŠ2Mé)ú‘´‡©±–§´«r„D…Œ MŸÂ$1JÙNïg1Eÿ¼y¿ßîT¤Œˆ½HãThÌ7Q|Û¹x3‡™{à&—:äÑ àdÛ|dQ½tuß}÷œ›» ‚7ȸÕá[Ï#E·|³\Ü„³(LÖá<÷Ûï¿¿tÏtíîÍ—%Ë[j0Q‹:€üVÇز´¤‰\‡éyÍ­ü{:]°¢JAýú·‹ð:úÓ¼ñ]ö¹jP±Zb°ßh² F¬.Ûýxð|fy2ûwYíÖ㱚®©ÏÌÝûòÃT¶ù“ŸÐÖø¸6Ðò¾ðÞ8*Òth@•6!§Á¹ùIÍnM‡GîN—=0a.Šs¨·”y?¥S>biQS¯í¯ˆÌ;.£¦n«uU½˜šÀCeúèB§•?v\ÆCVV¡C6••›zß4:»3—’2¸7 ñÉÃÐüELSܼ¾W&I6šY7ÍxHo%’ôIëò´!O:Ù ÀC±\€» DËyyzbš¸]±á­i·ùË—ùS––Oò\ÐÕ=ÅÂÇ '–]†~xÆ'¼(ÚêéJF¯ê6\X†g,W"° K~Q›2~•eôœ¥ü‚”ò¢6eúœ¸§X„µÕ’3>já£>‰Xø¨çèH>ÚñQ =C_¾Ìg‰-2Õ¯¢ceªèLoá (Œ.é eóÿ…„öHÂ×=ü¾ÄRœ³<ï†ws°™g)Øø—UyÒ¯ûcôÄ6äi•ΣvÛ`;U”ʶ07oªÂ¶Ê°úë[eA]I‚mr[$ÔésìiÏLu+‘=Û-KÂÄ÷Ìþ´gÞú­½i|b©1JÉ4AZJ×Å©àÑkm´ô¦û±ª¢t5=ÕT¶}“Âhá1ËGWâÓÓÓ!›GÜjÈ|+¬X!$tQÍMA2—€fÛê™Nh”tT!#¹³i?·èn›njçÆÝrcÝã¥þ1’!ªTõÊ{RE?4¸2võÞìe¸guõû–ºKÚÌß5|= Èv(nÜû6›^¶ú&Ï ‘yóÀ¼²%ÒÔ4¦Fµ%§5ªë·Â€òýnö"º]fâˆ,=åi¥U†„=×…–ߊ Ó‰cChšÿº®?€>tM¹º~̧Ñ<ž iRø/M¶¯Z¿ —E3™]ˤÿ “£§lŸBì$û¦]pPØ-Db> stream xœYßܶ~¿¿! £E¹­OEJ"Ô@Šä¡E“¢éyðåÖòvïJk‰òùò×wfHJÔÝžøÁ+j8Îï›Ñ}Úäßäø/üßœoòÍáæÓ §ÕMø¯9oþ±»yósQnt¦«¢Úìîoü¾)«¬ÒbS×eV¹ÙoÞ³÷Büº½- ™iÅÙ«cÚ¿¢çØ«¶ÛO£aMp©³œãš³Cg\ÛwæDÂ%¬ƒðo¦› Êþºû× ç™.K¾Ùýûf÷W<§NÏù8=˜ÖÁfɵ×zÚæ™ÖR‹‚™ÑyàMÁ^ÍðÑ®dÏ&Bš%O5Ÿ­umw@ƒ«Z€JXj»vDõªä‚ññ6ôòÔáau-àwß¹cª¹L5wæwÚ*ëd;ûà÷I2²ëf5ÝãÙ~IÕèTMß6-ynvißžæÝ}gSÅýÅ6‰G+1{´\E®ŸÜeZyé²x´qA=]ùbÝПìt†Å¢*ÂâÉtcr¬—ƒV¡»ô£³ûìh;ç/áÍÐ66¸Œò‰ÆÄ7Õ*xþõë4x´4oÌü»’¦åºTì2ôû©ÁŒÄ+ñM½3éA«XâÛõ%†å¶ñw€wЬì¥V¾Å{š¡I“¥ZE†Ï6½É`'·ÜdHÕ¦ e§ýC–‘ž#Q¯B>šißÎ 9ÚÓiÑq´íÇãêˆ~f×?-ÎzáÑMûÇY¯ÌÞcz“)£8ø£¦®%WÎŽP«>ØXÍQÚÄ̇~8…­o~æu‚YXÈU4ð?„U @³‡mš´’̆ÅtÍtø3Ïyžð[3×Çý5û¦ÛÇ >Žøª‚7X&Û[LU劵pcÜW±;Öf6{ º «œÇ ŠìáØ6ÛBÚ¢fÇåì¦{2ÎÞm£½5{hÝ1ªÀs‹y' ðÛuÿwÁ„Ùa¼ Ž# #–È»íuÔ]w%ß~û}Oás¨æÇ`(".d2ô ûðv|ÍQ¬È#º«môRP‹!î¶ïÞ]kÁê»Û7#YUArun ëh€¤ÖzE¾ë*Õ›bnÈè¦Ð~ÈÑ…³{ÄŸg¹Í—–Ås†öŽOДtw|»ÐI5ïúŶ‡ãÌ5*Õ=ï'›îûiò ä—¸ÈÝG“¡æ|9Á+U`ìC¤¡¹”:f°&¦¸! ß>y3:‹Lç㢵áC´²òdF>_1i„0^ÔDu”ÄK|6ÌÀ#Ëœq‰u%ªgQ šEiý\éâÞB‹¤x.)®Í …LÔËç›äµ!‚·lÒÏ7•‹MµøªKIQÐyR鯚\\³ÿÀ¦g-AŒ3U3ôÀ¾%ð€J}&ôjS×øVWh¸ˆá=TêK”E*d@±«÷çó°õp„7H?6àÄà_rv4¤PAr‚jàY•lQ…Óý„döÓ¼ÓD{Áy´Æcp 2zŒÔ3gQ%”j¿H§¢q@YÆ_Øü!“‚4_ŒËX$ó:‹âM¢[=‡æ×Òеg)v¯ I7~îŠÜ‰íÛÌž§8aÁFfOö)©†D(7À‡B"S@gúäœLß·!âÀ4Z}ð¹Á¹à€=¾0¦ª…¾ƒãsMƒåÞk3ÔäÊ÷cxÓ¢ñËZ•àÁ³¿$Ítríª½ÛŽÔ5Ò6åãDG@¿çÐØ0HQQGøÏ( gzXM)>b”0-¦ŽíC¼î£Q‚ím×;U”Èæöó¶„î¢Â^ KôMA/ç ÓáµZÐäì—´ytá Áø]®;$öÿBŽæ>Áh?àõ–9t£øqÉOx¥`n9Î\K¤4…ˆ¨x»µg$㢆†ê jŠo0 ‰A&åÝ8xQ[~eâ§u¸—™WãÙýT„9úÀF 4Í8b HŠKÿ¥‚Þ•s÷´ ©‹ X)Œ¢Ï[&X¡*¹^æùhŠ7’4É‹!f±D 1†þä›JQ묒Պ’ßCµæjr8n„Ùb—Àf¢ä‚†êô$÷íBÃŽC_HØú‘´˜Òþì4¦S÷n7Íž‹ Eª½HÓÚi)°–"t‰Ä?Ù÷ö´µ1¹ýÑÐÇôçð Ò#Ö5?'|$8s¹ت™58Æá—,Ô.þþyA…ó¬óÍ‚½PÑ­rAIÔØòìa ú@ÌW´G[¨Uݲ-œ††CÜFOw´³ @“DÉ[¯§ÿ¡Þ ó÷L'p³(=¸Iê®.CÛíb…¡á£÷óîûP«fŠ—Cé¡1øãFhpœÜ¢·ý L#„†ëР6qÁéÓ¦~u[à0I‘ktH}_ËO6ÑIú€U³¤ÏKsIùµ±Ä×ï<–¬ Ù÷ªãëkøö7¨)YAgr‚ëÝ­’/þEæïËyÍÝòñKQ£¥žšaÚÛåËSø›ÎAÉ,´¦>¡ *7·À[W¾/¯x~ØÝüþýÎKæendstream endobj 93 0 obj << /Filter /FlateDecode /Length 170 >> stream xœ]1à EwNá@Ò©eI— ­ª¶ `"†DÈÐÛHҡ÷ôñÿè™÷Ãu ?‚S/Œ`,逋[ƒBq²Äª´UqweªYzÆû›ôïGH4›¿Ëùór./ÕÖQNãâ¥Â iBÖ ÑµÆt Iÿ­ê­0š=yJÉ,!ÒL¶éŠ’mJýæŸ2ÒAj )î•y,áï4ï|nAû>³Vendstream endobj 94 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 506 >> stream xœcd`ab`dddsö 24±ÔH3þaú!ËÜ]ú#á§kc7s7ˤ Bßs¿§óO``fd¬ªïsÎ/¨,ÊLÏ(Qˆ105H† †––æ: F– ޹©E™É‰y ¾‰%©¹‰%@NŽBp~rfjI%X‹MFII•¾~yy¹^bn±^~QºÈ…òÌ’ … ÔâÔ¢²Ô·ü¼¿ÄÜT°KõÀ¤s~nAiIj‘‚o~JjQ“ƒ ƒ$3Ð[ , Œ\?:ø~t”.ü)±qÓ½ï“ï1ÿHø~E´ûvÛ팫±¯-g'vûw;&ée»µÙv[w;ôn±ßfu¥|O÷Áîë ¶¿Z}aÂÍî[¿Ó_MéXRùºáx÷óîcݧ»ÏN>8ï;÷µ)Kº×tŸ­œ§15¸Û¾Û§Û¡Û½Á»F'(7®›d÷òïçŽ|Ÿ³DhëÞ’íß—n/Ù+.üæ»ùXÑØ•ík»çpœ8³õÒ•ÓáÎA)Ñþr kD/oس¿û,Çs£#êVŽz•½q[cä¦Õo(ZRÅ!ü)0Ï4XCJñ£Ç—אָî}[ß°#u«\Å‚ê©Þ›9ÔE¿óÛ.¯Ø•Ûí'éèniê|òÞÑÕ§ì’ã“ãb1ŸÏÃÉÀiÌendstream endobj 95 0 obj << /Filter /FlateDecode /Length 161 >> stream xœ]O1ƒ0 Üó ÿ U%ÄB†VUÛÇAp¢†þ¾$@‡gé|wòYvýµg—@>¢Ç%°ŽM¤Ù/ ‹ªã0í¬Lœt²»éðþ‚Õ@vãw=‘|^ÎeSmô†æ ‘¢æ‘D£TÛXÛ bó'íÁîΓn ”ª©ø%Gs‡ã$à#q*EK‘\À1ý~ >ä¬_!ÊSendstream endobj 96 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 253 >> stream xœcd`ab`dddwöõõ441•H3þaú!ËÜÝý=ä+k7s7ˤïýBß3¿§òO``fd¬nœàœ_PY”™žQ¢c`j $  --ÍuŒ ,sS‹2“ó|K2RsK€œ…àüäÌÔ’J°›Œ’’+}ýòòr½ÄÜb½ü¢t;): å™% A©Å©Ee©) nùy% ~‰¹© gêA(çü܂ҒÔ"ßü”Ô¢<F+~&FF¥|ßWve|ø•ù{ÈÑÅ‹óòŠ‹óò¯X±xñ 9>9.óù<œ L˜Sendstream endobj 97 0 obj << /Filter /FlateDecode /Length 2513 >> stream xœ¥X[oÜÆ~×{žš‡E€ÃÂKq®äA€4nQ§±ÓJjû`ÍIt–äšäZV~AvÏ93Cr×tÓ “;ç~ùξßd)ßdøþ¯š‹lsñþ‚Ó¯›ð_Õl~sqy%ôƦÖ³¹¹»ð|£Mj¬Üä¹N…T››æâ ûúë]ul\;Þ¸¾yUŽ}ð4ËxfØÇd«…†ÉnÙ. Ïã¯Ù3|Q©- 6¿?”ðn˜¼M¾ù&ùçÍ÷œ§Vk¾¹ùáâæwoØë®Ý^‡²\eHîI±5ŠÕÎ BÁœ)~É- Bîkd¬Ç'8Ïs™f‚=ŸÍ”â·á«òcÝ”û ÇZ2ÞØÞµ÷ãCTa f&®¸úþa¬Û{:,–Â'~²ê®wï®­žâO¤ñî6™]62º|]6‡=xk NÆA* “r¥ìfKÔ*ßÜì.¦aHnÞ]lO1~;H¤kH¶FUG2éêáñ¡®€cyÁ¦Ÿë‰œ=@럩 QÔ;ð‚n‡8Y󦯥®¦éÚ(T1Ô‹5¡r(±ûûSsÉÜ,x¨\ EÙálÂæõu"\hͺÞ0² ºã°/ëÖçG®ÄÉÈy“íã"(#  cAL¹®Á7Á¾Ü»ð; ò&š2>¶«{W]ÿD opl…0“õÙŠ£óD/êþº;öP½Ÿ †uv«@ˆu@$§6 c"ªýPð³òŒà°“¡eï¦rއÜ CÜîÙZX¶Üä©!lúUAi±ÜŠœZ :#b¹, rïJH*‚ Ž-\óêÔ¥Ï" hæÊʧ jõ!ðÙTøÐ글%j½˜ì™žš°õEkü|’…¦±ÞN¿ç lì©aˆ[ùŨÈÈ dê| F¯\ñN÷ }¶~‚ǽƒ-½êÿÒ:æWãI;šRÿüÞ”^½µ|Þ€NÃË!Nâ—ÃËa÷Ðê™'8sD#êü_ŽÀU¢ŒöÎGP?ÌcŒc{îêÓ=üXîçég.ÿüĽ,¤ìXCå̦16_í»ªÜµºNEL†8¯@2O ·¡<ÉÑ[v›üR o“4œÞÆ%‡¢z‡òÊ_¢ýúnù²¼’‡›¶äÇÏÞÁÙê…èìþÖ ¸NÑrõǤþK@ Wd×ã ‚á·-’åå¸èÀú Ó;x¼ÛÙ,ƒuÅ;»•`,ŠÓ‚]¹ëªË«üº\©¸\¦"‹w}F·…dâ×’e3û|KCRÎóíç¬PÐ|=™U(5l/“ÓúóÄ+a9ìÛú'pߌ"!Ìæ€¤DÉ^SFL¯Üb–?Í7<¬´øÅòÆ…«æùü5a`,zºº½ëKÀg(¿#->Ú’W4̃¹ë¥„-˜ç*¦ê{X¯Z ÂcM\ÜÜá†{=c]%Õ©§Tª×ÝÝøXö‰¦k%®É+ÅÇ¡úð£û\}>²9I¼íë[ø—<_b¶¼µ€Ÿ’é“íZEЃÑÿªì«iç}ˆ·«U)AQ{y}ý:"±d\«b»üš’ˆf¤Î?[Û…J¡ÛÎjûññ1}7@|ˆy’ÃÄL±h?}Ygz5à»”ü¤dqOà°'läR-è~Ì <úÃÍÅ_áï?dí endstream endobj 98 0 obj << /Filter /FlateDecode /Length 160 >> stream xœ]O1ƒ0 Üó ÿ ‰Jˆ…. ­ª¶Žƒ2àD! ý}I€ÎÒùîä³ì‡ëÀ.|D/J`›H‹_#Œ49µã0¬Lœu²¿éðþ‚Í@vçw=“|^öM½gÐZ‚FŠš'mUu­µ 6Òíálš®@UJÿ©ähîpž\c$N¥h)’ 8¦ß/Á‡œ‚ â ø‰Sendstream endobj 99 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 175 >> stream xœcd`ab`ddä v ò5400qä~H3ýaîîþáÿ#ˆµ›‡¹›‡eÒw¡‚où_ 0012Š©:çç”–¤)øæ§¤å)åç&æ¡ 2000v00”€t°;ýgôu`àûÏøùò’å Ê…„|¯û(ÖYš•ŸQßÏ¡òÐà;çg¿Ê /xkÿö7·œð¶ö®6i>9.æiö<œ `D<œendstream endobj 100 0 obj << /Type /XRef /Length 165 /Filter /FlateDecode /DecodeParms << /Columns 5 /Predictor 12 >> /W [ 1 3 1 ] /Info 3 0 R /Root 2 0 R /Size 101 /ID [<6620db0e0cbc5f3bbde9c193dcb477c5>] >> stream xœcb&F~0ù‰ $À8JRüÏÀ“vÈf} [Ñi ’)DrŠ‚HÆÏ`r7˜| –µ³!j2Á" A$?ˆ”S‹T€HŽm ’µ¬¾D )‚e7€Håû`q6ɲ ,Þ"/‚M>"Ùk@$ϰl-däåB¸„Q Drm«9 "¹Á&3‚Ó S0’Ê0°J°zÆ`’ƒ åø endstream endobj startxref 81135 %%EOF tm/inst/doc/extensions.R0000644000175100001440000000653114755301614014767 0ustar hornikusers### R code from vignette source 'extensions.Rnw' ################################################### ### code chunk number 1: Init ################################################### library("tm") library("xml2") ################################################### ### code chunk number 2: extensions.Rnw:55-58 ################################################### VecSource <- function(x) SimpleSource(length = length(x), content = as.character(x), class = "VecSource") ################################################### ### code chunk number 3: extensions.Rnw:68-72 ################################################### getElem.VecSource <- function(x) list(content = x$content[x$position], uri = NULL) pGetElem.VecSource <- function(x) lapply(x$content, function(y) list(content = y, uri = NULL)) ################################################### ### code chunk number 4: extensions.Rnw:100-102 ################################################### readPlain <- function(elem, language, id) PlainTextDocument(elem$content, id = id, language = language) ################################################### ### code chunk number 5: extensions.Rnw:124-130 ################################################### df <- data.frame(doc_id = c("doc 1" , "doc 2" , "doc 3" ), text = c("content 1", "content 2", "content 3"), title = c("title 1" , "title 2" , "title 3" ), authors = c("author 1" , "author 2" , "author 3" ), topics = c("topic 1" , "topic 2" , "topic 3" ), stringsAsFactors = FALSE) ################################################### ### code chunk number 6: extensions.Rnw:138-141 ################################################### (corpus <- Corpus(DataframeSource(df))) corpus[[1]] meta(corpus[[1]]) ################################################### ### code chunk number 7: CustomXMLFile ################################################### custom.xml <- system.file("texts", "custom.xml", package = "tm") print(readLines(custom.xml), quote = FALSE) ################################################### ### code chunk number 8: mySource ################################################### mySource <- function(x) XMLSource(x, parser = xml2::xml_children, reader = myXMLReader) ################################################### ### code chunk number 9: myXMLReader ################################################### myXMLReader <- readXML( spec = list(author = list("node", "writer"), content = list("node", "description"), datetimestamp = list("function", function(x) as.POSIXlt(Sys.time(), tz = "GMT")), description = list("node", "@short"), heading = list("node", "caption"), id = list("function", function(x) tempfile()), origin = list("unevaluated", "My private bibliography"), type = list("node", "type")), doc = PlainTextDocument()) ################################################### ### code chunk number 10: extensions.Rnw:244-245 ################################################### corpus <- VCorpus(mySource(custom.xml)) ################################################### ### code chunk number 11: extensions.Rnw:249-251 ################################################### corpus[[1]] meta(corpus[[1]]) tm/inst/doc/extensions.pdf0000644000175100001440000017032014755301616015337 0ustar hornikusers%PDF-1.5 %¿÷¢þ 1 0 obj << /Type /ObjStm /Length 2297 /Filter /FlateDecode /N 35 /First 262 >> stream xœµZ[sÛ¶~ï¯À[Óé4¸ƒä™NfbÉvDZëK즓E‚m¶²ètšôן]€¤HB¢,Ç…‰;v¿ývDF$Ñœ(Âcx!BÅÄ ï1‰&1I´& á’kÂá:†NxÂà[Á#h*‰0 ‘,ÆJD*õ 'óˆ(& A”Äü„¨(މ`D‹È@'D+h'уfGй"†E š ýCP ù‰8Š˜DBCyB"©œD*†IpÅJ)HÌ'R’X¸I‘XB?Ö¥¤†ÄZó~ý•Ð#[Nf“rBd29%ôdrc ‹û8ÿvo Aù<»!¯^¹&£ÜNÊ4[Œ'¥%/ÆÿLh&@21K˜ø™ñûñ§ª^–“çö >²Ù¦'y6{˜Zh²ò–ìßfEYLóô¾¼dê%û©™Cö°(a–ô0äOÐ ÎWø5€\0åcò±^†_YÓÁ¢´‹²û¥ÙY:ÙɾBg ~:Ñ/Aαâ/Aó±unÝ®ö©-²‡| ýag{ÐjÉ 0Ϧg¶„~èÉx·_KèàÕ«ö<êiìL ëZÓ£ñÅÙÁÑÏg{§G<‚‘éîbšÍÒÅ ¡—éâõ¢H—{i^”£ÛINLäG[/(w¥Ç·“ª ¸Ñ³‡O¥§À!É.)tg‰LÚªös¹Lgå-ŠÕDXj Žá(͈)'m~*ò©˜+W¹Âð&б´ª Æpä.Å|ææ¡ãÄèäöº˜¢6Ðjž÷¿Ùôæ¶þ„E£ü_ÐÝ¥{ô7:¡3jiJçôŽ.hFsZÐ’>ÐèWŠûú,…îÍ'7Z=‡•:iìÔ˜Àò¬ ùèËöÒ¹•`ã^‚˜ónrgWèò (¾^ÜÌ-Ô¥GiQ€"˜Áj%è§´wï`D[-¥Ò«j‰ ì;ÀΣ?öŽÇ~<2xv” †Iìð!ì(>€‰`AõS³Þ[ø3Ò—I-·Eôª•Gª’ºg9#æÊëÑeÌ›¼ª¨»5t -¬ ÜÖöèàìfww@ÛuúÅÒÝ‚.Ò…Ôeð7§å? ïý׿Y|ÂáÍ¡軇<,Ô‰÷L]è‰>ô( B8«‚ˆbò´‘wüvtxºÃí\qt-Of-ÎûÐ AO@/JÀã¹&ñªUL{PpÓL-Ã0†óå· ñ„>Øckjª”Æ#§ê–‘œ¾+Õ•9ÆÀü1?d.ÞAS’t™kìõ–ÑSzF¯[SÇbש‹uLl`0‹‡@ïâHõq(vG¦Á‡õo’ôvwtþáƒÇ-ëP˜ÔÕÜø )õá#æ+à³ ,¨Ï®¶µÃAÈ:"Š+÷4ßPSìå1o…ϲßÈa&‚GB?ˆã<Ì—ÎOFPß@8ˆ(óµ±äq¾®­ö-8ÃøP¯€1ó‹S í:E×ù~RZøé¡0·.Å|™x”@‡CcŠö]w€î? ¿vP÷nú€:À¿£'ôçô‚¾§—ü@¬@­ólQ,š‚ëÚÅlRÜ‚]\_§Ôýâ½†à Ø÷–Þ~»¿µ °™¿ï<|Oï1œ›Ûëҿ宯{›§ÙŒ~¦Ÿ²Òú<4³ÂÞ¥~ôÂ~îŠô+-æ8lIËÛÜÚŠÖ¿`HäÒ;•iV‰e€ùÄɦk–5ô³ ÌdÐ,å2²Ð|Y†”~up䃗ž%Q:š}ß&u`“ƒ‘¨2”Þ·Òçþ©Ø4Ö¯LØ}›ÓÁ2“ưà{S´à>Ø{#"s„€§‰nDµóRa0$ÞWåcћ̧øñá›Óñ…w‡ŽZ-71*€ŽÁ(­Qü¦_äÜAÒÛø$„£Öè*4Îj·AÕ‘v_’Wá'¤‰Ž}ª ·M 2025-02-19T08:09:02+01:00 2025-02-19T08:09:02+01:00 TeX Untitled endstream endobj 38 0 obj << /Filter /FlateDecode /Length 3325 >> stream xœZKo䯾ë7ø00 ¸'ÑÐìÙí8 bc'0 dWÎeåEQ3ä˜äx¥Ÿªê›#jw±qÈfw=¿úª¸¿oòŒorüçÿÖÇ«|óxõû§»ÿ§>n¾¿¹úæm¹áe&¸V››‡+÷ßm3UªM)e&m±¹9^½gož¦¦ÛmžåR¼Ô¬ßîà‡Z©‚uãö·›_í¬ÈŒÉíf'xf¹ÙÜÜû?õ[¡3«eÁ>lw’Ë,Ï ›z-%û©ZlvHÎip™È¬Õì‡ó8-Ž=úgÚ°SÙVßçìÇmiàl.–ÂG\®Œà¥` I&§Ö7o톃BZ 4”-3É-*i@ÛÒ:-ÿå^´9çœuÛùú,AÃA2·iݚ–5C38‚¤RkÜŒ_*áö†×JX²æ±ªÔʰá*UVbÝZ8éC;íq½¤¨:w§ŸO>e`×ûjrŠ(Ì%§ä)¸D$Q â5hòÐSÀ I ü?ŒÙ*6K@d%F¾K!¶_nŠÐu³„Òµ‚½a[ 2U1Æþ<Ô˜nJ9 ªš@iºÕv§óä~‚žŸOÒ¶¦ K­òiH %; N]ÁþHÁ®Kùó †Î]‹ðNl¦}ïB%;a­))³üQ&uÝŒ”9XY¡Ä¾©iê( ‡£Ã/ôå8Í·IXóBÒ  XdDÐQZAò5»d!Ú;a´ð±qeIέD‘¢½"v PÞ@ä¼aXª0`‚©*RnC=;wTnÇoƒ«Ë4›áRhò5«ýØÜ²Ûí˪ÉPè¹,ÐÊ­Öc0E}$(`²s³O¡ÊÔùW‘$´Óõš\»pâÎIHxÔô-‰÷”ÐzÊzùâ!nMõP™‚ÈvËnÞþúfm/• ©#Àµ$ìRÇfÖ©t5Œ¬mbøÓ#î)¢qD”G@I•gh08 Û7÷ë¶1Àä Û<6Ó›Cs|Å>(œPÑÍ”€qÏ.R!AKذñ\Ű)¬‚¼*^¨UŸ‡‚9]}š‰C?¶‹ëj!SµM÷8í_Ñ ’M@åZ=¶¬Žo{.têb9f CJB¶H¤Þ»°Å¸®BÈ©PxbÞŸ¢a¥ƒý;Rfu!â¢6ø×t1Ù.\4Î@DæW”?;ð)ø*q¦@Üw¤®?+=ÑIO6õÄ-××}Uê9«^T‘©¡0>TçÃij~Ó9Üf8Ãsy€÷xèìHª”ñÎCsjþ1©R§~4± ”¢ZÝ‚×0¾«,&Ô=‡¶qq©¡Æ§q œpÖë2¾|äåÆu~Õ©Žwh3„TDð™# ›# n®ç÷ÑÙkš{5v±€æãÔœ~iž¦× Å´&ÂdWƒ«’ªP.3íÙ¢˜Iô¶)’ ¥_NEqC=ÞÚÂnÉFC&ËKK;fµ ê$¾ÏÞ¦Þ+¸§¢úÂa @¹ð‹' t§XŒ»‹©×tuu¡Ï˜âÆÐD9ÕÝ!•¿ìІý¦gΉ•ßë­‰/tÕq3´->ð]Û³ „²BBqQʗȺï€ÇO+î—p)g÷ïûýÏ>•‚®Sã¾w­“ͱD8n”ÛB1/¬ÉùABq+ŒZpl Û—Åœ›¬&4+ç¡]ëVL&Š(e„­·r&’'Z¶(¬6ÔP“îë‡ö}C?ÁXèž®ȺŸ“/è-Qí4Z«0£É³k”L$t'*7˜gœ*ôy<}˜·¨¼#ƒå4r_ßþ¼Fuh,ø—kè5L®ýŠ_~ýyu™)[,S­ë§Yæêt:´uu7k•êQ½¯bËóÇz\[ ví_¹ÝúL…òÇé‹.ì‡0Ï¡Bpb 7üÔ:½¡ù=Á…s;¸lÔ‚94áÔ3´Ý¾Ú)<”~îà~Å2ìN[vÃtv8§ ºXôï{ëC5®fž°°E8ì;‡s+‰§ ?"“óÕ›S¸LÛ0/yNå9͇g/‚té´Ã5î·°N v1wÒ‹‰âœù_ã J'Í‚š„Æ£¢é\Aˆ÷ì——=9Z¨ÈE¸ëõsÎ&o-ÉO{<ÂoôìC3·]ŽùPõO™@7÷FPtðýÀ pÖຯuïpšw{ïЛ¯ú¨@$üÒ Ÿ“½D0W!¹Þ¸Ä \X_Æ £kç.»«yà¤4s5ºÝÓ*1•ý€ l,óLO^ØÛ=,¨Á­Áæ a,¶žþUÂj.: º?¢Så‹BèÔM³ù¢ I6 ©"Ò1é0.Kªõß¶;-4%Ù›Ú;n9Xýnç®QÞš·ìÉsñ‹ŽüψÛ«ëÂÕ·¾ ;±¿ÎG“Èk¨ey¹‘_hb%7’W«1«÷Àìêmpú„<ú) ž¼DV)ä,AàmWý“¾Œz¹Þcs9-]ìÐàÁRìνËLNŒ,«?Š)¸kñRº Eó!¼+ýãÜB›f-¥8 ¼á¾l°¿"§Jži+cKáçgé.܇#Œ…Cà$&Ç>U‰ÜÜ…Ø•Nƒ¹¿¢­W"¾ˆÃ¿ÆÑ¯5õŒÈTè¹?¤Tž )ÊaÛ9¸U@²øÑ‹È•mwø…>”(\`g?kòY™‘̳Ÿ•ŠŒ Ç nåõ Kœ.ËúØ”¦Èç?µQC¢?ùnyE&žIÀó`5H4EEL¥-οËÙÊŸlmŠLZùÌ¾ŠŸk’xV¦yÍeŽhq?¡ÐÍâRŒÔ£«D4rö£KÏwptIZ !v½Ï´εæ¯Âh€l³dÇ«ì8ç¹ýÌ‚U¸qEœïÂ| å?M[ú¼R”ªQ@Ç™R:=¥ß·”ȉu„`ôoAmj]J¡KùA 3­óàÔõ&bÎÂO†¹MCJ »Þ6Òß.8%¨EÃK ½Ó<˜QÆc^¶A}CÔ JcÒპûšaèS¡1vÁº>Ì"î’hE<p™R”†DGةוÖqaêièy¹ì¹Ëϰ^7·^ûÕý’óVØßv‘×… œä—¸ õX¥‹±ù[â‹ÞÀŸd]9~uÁ:Dçâ÷Ÿ1ì[b€·Çfö Y îügˆç‹™ýeŸlʬ “Ÿ)!Žp§r™µè㕬ՙPz>¥* Ö@‡fô„CËeËOEµÄî3þ« @âÇ%ÉùïúyN3§MHfß/ÀMŠb÷3ÍŒßÔ=-\û:ãÇz~Œ§VgÌþsÍ·«ÔrR¾Ô$ÔÏÂ3G…( %äWä‚xÌw»™ô2!}a“%AôT‹f0·)“Ó {úÊ?}ÿôÕ)ÁÊŽß"ä4­ˆâ$`‡RRÌq']¢b˜I:£ŠçDGtü<Ű3¾Å¿^{ã9¾aÖl‘ªñ|=“í¨*Ò²ª®óT!¡Ç×ÐãC‹¢¦9:ã˜on®þÿþ«Üendstream endobj 39 0 obj << /Filter /FlateDecode /Length 268 >> stream xœ]‘1nÃ0 EwŸB7°œÚ¦\Ò%C‹¢í‰ ž^OyZ]ý±ÌáKV—¦¹Í÷%ˆ;ËeÊU³sq ëì†ëXªúø6–ïŸ"$mþ>^¥þìZûÒlwÂåVÆ Ë˜/R¼çCJ\IŽÿ~5ÃvãœGÛ6¼G…vl@;Õž h¯:° }Æ÷¨Ð– h«Šgz{ª×§ú=нj`T#Ш*l@E5±EWÚ±á=*ñÉZ m,i B²¤1ˆØ€’*º!ëUçõŒŽNwð¹ ÷e‘¼Ú¢lº€)Ëß.Ë\ô–Õ/ûVŠendstream endobj 40 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2198 >> stream xœV{PSW¿!’saYÝÊDiuîÅî®;m×:cÕu°Ö×Z+‚€Ïõ–‡!È“Ü/’ðLxDÈ%€‘‡¶ˆÚ" ºµ­(ø¨¸]W;Û}Ì´®®íô„¹Î¸7ÚºÓý«3wîÌ=çžs¾ßï÷}¿ó ˆYQ„@ ˆIßœ–üúÊå«"‹Â ¢Â … ™©žI†8!ÄÍr/Œ¹:ÿö;çþ/ˆ(`Þ¯7•ÈËrd‰ÉÅÙ92IbZqQ–äÿ ‚˜+)Þ°I¶¹´l‹\‘U‘“_XD)D*ñ+b1‘Nü†È v¯»ˆ=Äzb/±‰ØLl%’‰íD<ˆ"⎠GðmÔ®¨»B¥ðÒ¬”YãÑ J=AeèŸäZ²8&ö‰`õ Äìp› /S¾ÅïŽ O…׉¥^jÓ¦¹”¯3fPRœÂ*Q©ÌÑKãtt5øèÁ§Þ”gFùÅÖ"J™W‹¤2Çqk 0ïɺ¥=ÙžlØÛhÓ™ÿ$Ì~"¸9F4—‡w)¡Oq×e!–†SÄ\œæ÷‹ ‘|yŠ‹Á ðÏþŒ£qÔwëq7âp ã\oøÇç'¼.ŸÓ w`øMÏ:›Ê¦%dX¶å,-ʫȅ59m˜$­MÐdm"g?‰êê'4ìÌVpþ–0üQøßâým\ $·ƒßy·•Û‚_â6â4Ú+Âó\ßbx>éFŒÃb·€jÕVƒ#ÞXÄ͆õ°~àÀø®‰Š{p&ê®öþiè_ž S\\ëbÒQeªðév:<4®GN¦Ö¤/x»@CñqèR‹¯ñIV€NÇgÖŠ¥Ô°ä˜èÇ6F› d¶¤¦€RHÃ-¢~ìŠVŠ Ûôk³²ÚÂìà㋤úúCCcc–%´ª7)UöTf¥™•Ö žrv‹:ëêž/ºŸ[{–588òÉVQ).‰æe(ÚF<Óû›áÀÅ>¢3EÄ>ÈJP‘Ì9HãDÎ[£øšÕùyÜŒ§Fk¬2€ Tê6+hò¶ äú Rö§í¬¦,äOˆÊY<Æâ?þTÀ˜aEZÐ1Z J\üÁ_¢ ZÛ$M6)”’Ï¡j³3 J«êÔvQ§'U,º· ×ë Gÿƒ*3}Ÿq½ýX×/üìŒØ.òçQrò^×7§Ë‡wé´.¾B÷w²³ã#’¥|ˆõn[­›ÂóÓê2ë5e:#%é,iÈrë ù’]Ã9g5t¨úDMH²j0ßžIZÐ;; ¶­*œº¢¢¬ v%Æb¢¹ÕÈb7ÕÖ{üõ.ªÖ]ëvxÈg"ð‘-cq²BðhTˆ©ðcqªŠ[øKJý§DPÉh ’LêKþËtËO¨ÓüƦ5™Y½:5½òò(…»q"Ë%âdÄ3>ša|’Åï?eŸÎKÛ‰zYKŽ1‚‘~ì@`²ÀHæÕFH/gËDGäžî >ÏQv™5Š ËP¡ÌÙï¶yÀE‡ùenÆ òd·ã8ÕÎråÒ (ØiÉ£9;wÛÑ©n¡XÄûHSyøµ^—bîS'™?ý½— z«öª5hhn9‚<Û»bV2TuÆá©æ1Û« 8ÖÍjPÂó?ŒŸ .®*:~ZêC'Zª²iîôVÞŠR‰»CMiÛ˜tüo_ gUþ”I/¼A,mD=à3ÔçúSB¯…^í]ꨶkšÁgkj8Ñ1Õ3Õv´‹œWe­¦kUpUÏj‡Þ^ÙÈ´‚oª©¯kš½ÍN·œn벓,GKQ'Ó©†PêUf•dü£IÕ¦NC‡ôzé ÙõÅu¤9ŽuÛZ€_²RÚ„ú ¡²¾ÐnrF¸¶"¦Ú\m©–&'I“$SŠ[Òkú`Å À‰œÇ‚Y†ž¹£1þbî™qæè.¼t~üïñ±÷ìÙÐ9 §ú6½Mç¡eÀ ûÖT)M¥PNkë-é8ŒÂÇÁ–‹¤Md;Ü}xˆ·!Qü¿k'œä å^Th>”KÅ?̵~È›ÇÝLP(>—̀ݰ³DžnÖ™yuH™W}ÌÏ—º_íSXÌ c¡pGˆ_A°õpòf3iÌìûàFC_Ò|ÄîEŒd6+hŒ0߀Z •ñ[ôÊF¿Ä_ì/ Jpš=F01&ƒ±ÆœŸ•ð‡Ôœ­«€”â%,Ò;ô®š:¨{N]»Ð5̇ßq´« ³ ]šà2¸ ­Æ eøŸßb«_ò6[Y§kÓùŽœ9r:ÿŒ¥Vï7¸Ü.§³çdÂÐØû—nG4X"E^³ÛÀ[xÖ¶LÙA9«–w•Ì.}=ïaMÞº&ù4_›ËgÒ‚Á˸ù¼D Ƈzü†šáÌÒÓû{áEnN‡8aâg\4Žùp<8ÌÒ6þzâÓµå©èqmb/úk~ Ç]º¶GÇè@Gq+ÑîiL]©¦j¼àµz#¥ÌòÃ58µ…÷q•‡DuN{ýýÝ_sÑ\ì:þ%xù!Æ™hh® ·_³9lp’µ5‹ÖR¢3Qò̃ù{A òö´sæi˜$m>±ýöɯîÂMè^ëJ±ó~j×EŽœu“Wj&VáçKsd~üäP¤.›ÑñV}Íõÿ`û›yHeeüþ9:€%b¹EW*²Â'ï ´´û©øVuG)?YSÃX<ß6Ò9ŒOEØ«C¡@Í>úqOy/¿e2[ŽJåŽ=Çç¸pÎbây«‘y~Üm ¡| “ó£n#Mö<¼ÿIKj ÅP^©5ó§L«P”Ù-„˜º[>x ) R`{VeɈ´W²Ž¯ã›“”ýÚŒHsBÅ ÞŒ‹!ˆÿ#ƒŸãendstream endobj 41 0 obj << /Filter /FlateDecode /Length 269 >> stream xœ]‘1nÃ0 EwB7°ìÚTZ’%C‹¢í™ > stream xœ]U{PwߨýiQ[r)"º±7µ=ÛªT{Wõô¬@¹¶¶§…ÚZ¤€…ð&‰ðHH²d¿ Â#˜‰`|€UZ•ÞØ¢ôª¶´‡ÖiïzõZíãêüâ,3Þ¦´3›ïþ³ùþ>ï÷³""<Œ‰D R’’_zr}\\èæ·Áذàr1ð™Á›wwD@¤"ÃíË܈Âqàû–ܾŸ‰\• ,V••ÊKV¾¤Ì–—(V&+‹÷)þï!AQ eIRÙóûÞ¬”çæ?÷äúßo"ˆÄ."…x”x•xŒˆ'ˆT"‰xŽxXD<@DâAb! ¡!‚¢<ѵ°ä°vñÓâ±ðgÃE¼M.%sÉÛÔVjÕ,XvO”´“X|/Lö Áùƒ±n&¦pÖ”x0/U¸(?󆌤r*ÌÉ´ûK)…Ú:,Ã;¨KC·n½ß±kÍ×RyІbºê§÷¤j?7"à Ùmé±½™ û*óÓ µÆZ¦šýaéâ{¢›å„ÁŒó²š(ÿ4>4-9‰«‚ÉÒ•ú5k— žüŒã¥8òæ¼€–ÌàðxCKN2f–‰åï盥l „ê’ýË™#H2ÓÒí²ˆáªÞâ¡7­)ð¤°;äi%E9•rØ™¯¿jiÛ†º‘áuÑ/ˆûCdÔ¡C i¿"[ã§ê –­T¨´ ”—S0ӎîùZí\ ØÀÊÚ±”ÎdЕº´î¾N·{8Ï“ÿj¶<£‚f f=«g±X V¿ºDxù,Þí¿û'©b€ 0¹&Ö&Ù\7•UÄÐE |ÚOÖ€ŽÕÚ«²ÊpÓgÔÙw™'d•^ì,«¤žQ©£“p'ÙßÞrƈ|>dGödYSšS‘—ÜÕó®ê½Á˜@³&jÞ×hÉÌOÎvP'<ÌNŸFAWèTõeLÔœ‚sp~¨k‚[¿ËÚ"ü”Bi“ ÆÜ£ø‡ê"$3Šv*pȲWƧRæg³·Bä¶êÏZ\à>œ¤e÷=JpýwÉŸhŠÜ}FªPCŒ\67LÉ‹L…55Ô úüTí¼¾ùŶ£²à05°èA?9?zÎ\åŽýöFKÆ †K}#cî1@}ëåWd»©GøD%¿B¦§Zÿ|}ÿy8Sƒ]ˆ#%º´¦@þ±Øpøˆõ8ú¸,•R˜3 £½ÎË(ppíà@7Ÿ:»iÇfùV…Këóu»zz+œjgñé”d<1}G¢|Ï¡w¾øÇÉëítgc'ÛÙ„æ•5¸ƒk<Øš Ü=#î1ýyˆæ¾£<íݾ·™G ÏC ^øÍx F0Á£×_Tg–ÊX|5;çX)³×œ©ÈÙ_]žo‚ܧ2¶×;À‰ðf :Øè@ï]~ë<óñ‹7¡ž5ÒÂùáýŸá^ó‹B›:Í9=ÉÃë†×]g­o®#b(¦¤ªP¯©HWìJ†r¨k­lEœ€ÌÙ ^äÑu—•UéJ_?¥:~{ÓûhLàîÞÉ®éY.ÆÏÇ)¨Ö[Í•€¦®ÒXQ¯Ú^ŸõyÑWÙ_Xìf;´ ¾ç¡ý!Ëý&´ÍOºEþO°þ‚ø®–â£d›ÍÚöÃ+_ó ùÈ?ð"m8µþó\Y¾×Ø ¨‰ü®Œy®rMV°"›Éf®5«jŒtIzZánC¾GÝŸuš¹ï!Î%>qëú¾‡éùÙà3=x÷Ä /æ<MÔ{—_éÇ“ãÑ’;øjpµ”%U½YŽ<@Ïl|mmA{·DÖÔ`3ƒP&“…A’¯k5Fué2(ì”ûµÂ¿Q›ž ©(GçÞ˜Å+ü·ñ~ñMôŸÈŒ‚žQ™•âºí-]~m_Ía j!?…¿wÜ@ µ¾JCSC“@£ìÞwÞP0€5˜¨CºŽºNg[»“¶QL¦[ ¹®U!Þ2W-møvtÚœ\ Éà_fÇ6d¤¶=¤ØùÜÚƒéiÉsxNÚA¶_ÿô¢ïÅ—2ê·lÝU:>a )±ä›Pˆ­Æ1_k<Š‹.DK¾ž³AŸ9ýWa–實%Yð>j7VKULu”£r§r`Àåó ëÑöÑ’¯؆ØùÖü¶r<ªáÇrzª!¥lWVØL`BªÑlµ4A'ø†®ž£]§áÑ_ÜS\YR¢ôVô9;ìmm?{¦qß]ææ4QǦqë´DX{w­”O®dy=<Žøý|*N¤ÛIœ| ëàV(P–+p/ 33¬9–'çV‡R¿UHýI¸h›l:fë x‡¼Ã€ðrŸµF¶‚íÌÕk•¹9Åû™H>²­?eøÍòé”â’øËàm©‚: #ö1šóRÙÙ¦l­ŽƒA¯Bá¤eu€^Wù¼B5[ZõÕµÆ:S±Ç°~iw%8)D]ðºøZÐ-uOŽŸ¹蟧žÞ²*~Ó‹ ²„Íå¥åêruLé~S}=51`æÌÀ  Aöá-ed}ZÓÞLP‚¢Äo&Ïœyûsã—/üg~D8ê±xbõ†?>¢l+?b¢w÷ôöT»”6™ð¿£ågšƒÁn¼òê€FôíA¬Ïàû¥Í¬mÔçrõõj\êÒ2m +3QOoøGþûGùíó˜Xi§õdü¥ª4‡ÅÙKý†Cf?LÁdïøYks³ 쨑bmL#:(«Öš•õÅŒÒAîÈì47š›Ì²fhìAüª¾±¾­£ÙÕÒBû»F5“7˜.>¶C’ñ©myUõU Uð<¼pV{ñìý8Ï|¤Wtü6_ <-åHØ`yøågUyyéÂÊÈíj?êékðT{Ueš•1TÂÅ‚«³S禎ң—³0‹ð5Îç“_ÎJΦUÙ† HC«.ñ ñ¢óïŸx¿ƒ¶àERÐs0fNT¬^èw:1# PºpØ÷ßà£S¶z1½PìØ¹€ þw¥endstream endobj 43 0 obj << /Filter /FlateDecode /Length 295 >> stream xœ]’Ánƒ0 †ïÉÚç‹÷Ü#’‡Ÿêz¿q­Í [¼G6l6ŠÄ )Ø<(&¶“âÀà 5[¼Gâ‘`}(´l¶@ªØâ=*dͤÍ#2+R+ lE’I’JÉ$I% FdV¤V$lŠâÈ æÕEøFsŽê¡M#ªF„B4¨ Ñ4PuÖÏ¡êØuÏu¹t_WÉ›-Ù–¨Ë›²üý˼è-‡¿AM–Úendstream endobj 44 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2641 >> stream xœmV TSW~À{ϵ–4…ª}±VÛ«G´Ó:•%ˆ»‚‚ÑŠ‚‚ KXÂ’…É%!$a‘-B }"¢Öb[—¶êXëZ×éXë™vZeìüa®=΋µÎ9Óyy'çäå{¿ÿ~ßÿ}?Iøú$IŽŠ–„ÆÍ öþxÅ3ÉÇ3Y€Šÿ­Yí‡Æ ÐX_Ûä1…þö<¼øÜÏ’dg…¥§eäd˳¦,OO”g혚žš8%B™-ß‘(OüŸ? ‚xqG\zXx–D‘•°L¶èÜ;°«µÖÔbvZö 'b8Ü,£ÛÐISe1c¤‘A‘®)R¤d¥(RŠ T…š|ü&¤Þäø|¦ß$JÝqé ª€Ã#á±ê©Q¿…·/þ8apÛIÕqÄÀ8B>.8žÔ/–~º°9 1ø9Ü$¢ÑÑŠôµ¶öÔt£ èðûs^ER2 IkÒ䊴$U’£Ä†lWfê/û€ápâ"¢xÿÈhŽ</ F^ñI‡;# ÌlÿóW-MŸƒBÐ\çì“K.¾ ̶ñ(&Û÷`†ØJú =bÊ(]‰I-ÖÓÚõÖm[7ǯP¬@óPpWÜW›>/½Áã9k»Ø{üÈ•»î;F¡[A͘0«E»ã ê,U5bÐÒŽò*­J™¡R±þÒ{á-9Éó›—;2‰ó‡W`œg"ˆ„÷yÉ~øhxª .ÓBeK‹©eÒ7èÛçN\9Óó=ú Áü`ÆÛr̹(1ƒøí_&Š<Ã0þý°©{z£ÔƒâP€áùéá½áQwúAíÄܽ±X\C—?ÛœY^™a•ÙªÑsì‹Úƒ'†²Ãí¬éXUkµÖ‹…÷«LUÖ‰whG™U—_”ƒ´¬å5(-–‚ŠÄ„¾®Äôt«üJ-> ?QˆV1‹Þ)ßÂ.¬NugÜ*¯Ö#bÔ¥e*1O£rS)*et–’êÚº:›µUÙ­U6ž]²ÛíUþXW*Ñyþ?=”÷Á„Þá%˜?2Aôž ŸÂ¦Ò¯¯áV ¥™ÓvfvÆ¢=„˜«½§¯Š99T*TÄÈ´Qk×k‡Yè‚Wiá!¿ Ñô¯.Têò¼—Gáu Áÿi§w¡6£9‡Áš_5µ1£3î|Ùtr6Ÿ¨˜ 3©ì\³Kì‡z•ÞOÖBw´”ïãLÚ°lËŠPÄ$& Ë ¨N ÇùwøZß?u"/±‚.¯BÁçÑ0ö‘Á?8ìÃkχ£Ÿv» Û`&¯´½?KÀB~Ò ¼3=A¢N›³ª1dž2§Š“i<¯ÉÄ/„™súã ÎSMâ *vÊáÁ\±ŸÉ×Ð%Y¹9g ‰ô½ˆ³x"+ün)&5s·9ó:]í­Ýmêe½bp¸h&½t½t)+ü>acç‡@Ø|Ù&æ9TO‘ï}ÖVž#´šýä”ayñ‚õ ²mñÅk3•/"ÐkYä½ã^â&¹É}@ {‚E…ÅZÊeÒ›wô°UÔÞ¦NçüBE’ØH­F«Tï'ÅnΈD‘ fÃä//tu±[U5²3½¹„-¡”È\TRœ³a“bbÞxÿ:ø]«†Qð|{{YY+ÓW8ú·žÞï¹”€ûîM:_fLfSe`åäTJ²™?#ÏÅJem®CùÒ ¼ø Žú/±YòZÑnTÙlk¼ ˯Ãòk°ÂÞTÝdmªx4èuޝï!‡Ó¨"d(1¨4ŠÀÅ8X‚ߊÄo•ê Œ‘2ìÒ6離 H3ùoM[ ¡Þ`+BÌ3µ±.(Ï#{y+wÜÀ·^ŒítÛn¯ul¥åÝwJyËž€}çã7bëb»7‹÷¯ûKúY4„†v~Ðˬ…°AZ´&¾rr­¼c?—?rˆŽÒ—@cìËOîb+Ë*K+ .á³Ê [¤éR>Õ7U#‡©ÚÛ`Gñ‚®¢KÑCËœQÎ0s8bfãk"àÐÇÐÅòžÍf9ÚŽâõ›2d ©q“WäìB¾è‘Wóü`>„À×îoxM‡¶!{…O¢S-»ËÒÅø–D*n™' háí>0Š„çRJ23“'ns¦ôó>Ücëè@nfoFOû«¶4Üȸ^òÔm8ÿWL<*úhÓÕ‚3|3ÿ<âë/‡î§Õ'[SìËê×Ö,î@W™+Ÿ÷žòr$4±fM¹š[Umµð~ £íF».3{R±&JoÑÔè«öHkâ3 û¬Æ“ç I.¥Šw–ì*k*{x¶h0©j1££%Òèwg`Æ(Øò?G©UËê¨b£^/ÆI´ÎZj¯ïâZyЇâwCà“9ðîosà¥g‘z÷éxöÿÏñôïŸÕ>™ ó;3£íP’lÉYÎð¥Ü\Ô¼äÙ¨x÷·Q‘-¨Y0vAüã|endstream endobj 45 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 7497 >> stream xœ­ytTÕÚö !s‚Œs3ÏAl¨ (¢‚…*=@* % %™L’I&“2™Þ÷ô>é“™L’“B%¤sAiR5Ø+*6ìîÁÍ÷ÝŸ¹Þ{¿uý¿µ¾u²’•ÌÉœ·<ïó<o"!!¡ßk³–¤<=~üxî—Qñá}â#²ý¦¿¥MÁ€¾îÝ÷ß'º9˜è“ð—GfädçJ 2$#SrÒ3$â‘Kr²7ˆÿåAL›&^™3#wfÞ«’Yùs¤s eæm\Pœ–R’¾0cÓâÍ[–l}miÖ2Ñòì3VðÔ¸ñO?3áÙ‰Ï=ÿÂäMÄ(b1‰XLq?! þB$Bb(1œAPD¿„þÄt\'‚$F$üÐgKŸŸå}™¾ IS“"¼±¼/ÈÙäiêUÊÛ¯O¿Oï1ö_9`ä€ÈÀ¾wÜ;îÞMtjðÌÁ]÷ ¿¯yÈ^þxþç÷O½ÿgÁJÁoÉúK}òsÉW„W†Núî°ÑÃ^–2L1ìÚð‚áGGÌqãUô4Ë0Ì»#G¬ùÓƒëF½ü÷„”©Ä½ïóíyÂÊÂUl|­,¡¹Y ‚"–ÌËÍmÀçôÚ½þ_K°Ùî  ŒV•CS»@è˫ʅ”‘Ôä§[Ä€Ò‹`+æ€|›2  +õå+áã+áËáãÅó(ƒzWc¡^öz9½¸½T¸H¨/Ö—•äoÑgáG¬…EØ^IèJðÉ,`\ûœšn<ŸÌ¿ÑŸ!•“-M¦5 j$7æXÖÑøù…lß“ïhgà|òbû7O¹f­§‘–Ü*6‰èbî /7ÏÑÊÀ´XZK› :WÖ‚…`ÁÙ"ŠÃòCÒïE¹•À&´ßš*EÈNP§lÓQ¹Ðf²›€è”*£f}ÊÆ”õ JòË$’Qašn DPÃ’¹[CÍå>Ÿ'Àì9½ûLç™`A…´\M¯Ï¨Og×z·èL²4”û‡F6—\ ¤Þüí‹nÌù<õ+×à×z(+ér;=Nwd›0ÚÝm¯¬¯ˆÅªØÚW; X¤‘õÛä¢â|±\Æçåå¨Ë´r•"ç`ÚÌªŠ‚ØkvxÃønòÞ¿'üô ¡ŽÆÇÕYdC®Â†«Éü.¨Œ/ !ŠÉDQ(é:ê‡Âß ‡ÐünØÿiH¡¡4¿Ëh°‡£AÈ/è$ùÝïE “>Ü3{Š—¶IA!‚yºÉó7ÉR}˜VS]1AȤp-/ô' Ù[cÙ„#?&ƯÝ"JŸ*¨ômh]^½ Ph)zMB«ÑR8M¯Ãá°ï2V'p'å7¹5EÎt™‘.|ôéI(Ìs[ÖZrºä:ø¼ã¾À¾µóƒk'>p0øÝD„]¥/®TÈæò1ÐMº-^•|ÍK:Ý‹%´Œ…cÂñÇ£ ?Ÿ‡¡Oc”ªÉ¦FSs;ŸÜœgÓPòyÙùŽmŒN«ÛxT¶ …ƒoþÂOýŒú-]¿q3sš„•È(0LO5^CéyëAz$›Ö¯÷_"AÐAêÐ;û߆’g€Þbàâ@;kӭĦøÀµøýß'¶r0#ñ½Ö ¶5p(,™‘©ßªPZŠÁïH(°ÿÛÇà£å;Ý»A'€ã‡/¡ÙáRk (Åc#½M d¤fäV”ˆîËÌÖˆPyÅNÖmuW“Àkñ/ÕQîjðx,ÀEŸáçäûžx¼@á;00-Z®mƒb(UËUX~!1>èm¥$m’˜—Ó=%"sòð¬ÅO…f…$w¾ä芶…`(2Ñõý õ…÷Œî0¨> ð“àƒÆ¯?kðؼV/8 öÌó¥RèY,Z°&]¥2)-*0ß¶î‚ü¤©ÒŒ/.ŽÆ4ŽŸN°°…M€#®Ã×£‰-·^þ×j5ü±ZñKä.èO’ðäã•¡„‘.Ë‚™ÝÏ“ImM lø‚l¦jQ Ðš $”: «$8÷eÊõ‹µEæ"P¨W¡™×Zk­¥áÞqp t,oJs.w­¦¢¼(Å‘`t÷òRÂÏg{Aôï|d`yJPfQ*+׉9i(ùI |ë°‹ƒ¶r½¶÷® ¼´ÎH5VWEwd„7/^¾|™š±üÊѵÃîcážÞÜ—ýyîðK–K  Ve;ö0Ð|•l;ÍÕâÔšòî&»BýÆBm1N¶Ps ×ZcÃÉöã'ÍÛÓ·§9ºWü#YË*BŽÏ‘Õʆ´¼ ‹#ùo&ó/Á™˜=6ÓubI~aÙVð³ßôµpiÎÒ)+_+£Í^‹@kÔëô i5ØMN eãU€Šö€²ò< &©ÕUê«ÌU€úøýÎ/ά><·’I©|€K¿ë­³åo¾“;QcÕú}ÏiÁÌ@ÿˆyÈeòhKÕœ¶…§¥A¥ßî*–Ù‹†ó»õ‹~8Ž\wŠc=AXÊÂÙŸÎK¢px$™ÿ.|<~[0KŒî{„–’ü÷¯!?dÚ2+Äá´½%€ú¨â‹“L»„r«”RëÔ¯,^¢9×EÃEò÷²hL!ïHE<ɶ¦ž¼Ž{ô'Ó¼ÝJnȵíb`<ˆªÈ´­ÆÍt&÷wŒ‡—-"ggÉŽfWk0h~šEûD­ä®˜yƒÔè T“;ZÛè–×ËkªH 4»Œnà¦êÛ­aú®â‡¥í0³6'HÃCv^ÁиMæçÀ¾ñûQ¶3„­Äå–93™<‘èåÍ(™Ñ’žW¾‘ÀéÖàœ©k}lF ØÖâì¢.IV’¹†¢,CGŒ†/’= Rߌ?ô‚gD/ÒüNqmQ}Ce8ÒXP™Kíh¤àQÌZ½tž‘2òÖlhßÇ8I÷÷0©íšó{b??‡ >=÷ÿ39ArA™bWÃP^]ðU”WTT”ü!¿/ˆC.—TåWäïžßµ`×\·Ú«´*( Y£+¢U"ècyRd¾b¯¢JY±á¤pñ§©[ø7‹Ýd˜‘HàÙß©=wFxàÍ]ecѯ*¦Í+..U¨•Ê`4 Æ¡ã'ŒŸ8îY ^ »Ú¬'hOÏ,b c]è‹z ŽO¬ä Uƒ¡*¬‰ F–³Z­É¸hÖºñqp6K©«B5å¡@9(TË=>¯×åæÖf…ÅÕ¡ÃàÖU*Õ”á2l`‹ÉPd•{нÊjeyfç–ÎÌNM@h¶™í;p¹]v»]X®ˆÔÖ° »8<Í‘5kŽ8+¯¤@§‘bKÊê4~iÖm ”‡=Akït`NÁ”?& ]2,q°êjbŒëN%Érdpû&±+#»’l¼Èêãùû±ü5$`HN€}Ðý+ÖJÒ·0ð<¶H·À¸Ú°!{“$+£0 `—-à\’”S=êqI'.í9 ‰z4ä¥ÂÙ¤Üî?°ËŸa#^E6ô¨¾v²Qˆ”еb€÷’i<™Mî1*¾ƒÜ±ÍÙâ÷Y€—nü½Wh}8þþ©Àl6UfìA}:Qâ^ÔÇ®u«€X,f`Ѫe™ìuñì 2—2Pm¯ñƒ „åUR%*-–¬Ú+êb¬¼‹`ûåèñ¦‹Â¶kÛð9&¬Ü[ß~ì]ÕÕ•Jz…AɈgçÍÊžÝCWš^ºªÕ„óN‹Îäc2À³Ì‘+ÜîV›˰–Á8VÒrÀ˜æ!Š' òÌ…¥ ‡Ê­.Š5U„h©¾ô\${ž{Ÿ‰£¤O ΜÞv>ÆØ\N/f ·Á©“k2::?}cþ–‘ óWÃûT€äwªáØÇGyxí CÕ(áæÔ›Ç­D}p/F6ÄÃ%¶ÿ{;ìä¼Eúõž –|c±ëœ×ŠÝ&c{…>™G6ôMcêÔ1}Pv^78µ;ÖmµãÏN¹‡úL•ž–¬[-^6¬ÚÜXÚÓiðWÊZ+èÞùõG´“÷­ÆòoN6áéÒ^~…®pL<›q‹_]AWyxËÏæ¶|=›…)¾Àªôk½ |òXÔ&“G¶Ÿ=ì ™õºÈ *ET^MI¤¾º&¼oUtãøy3Ÿ ïìM°£žj²bGõÀu¸ïÇdþ7º¥ýÃ×éáò+7ìÿWIð˨óú(þ‡Vy™U>œÿLf)>})€, b•öjõ.ï(8mîXÛÓÀf°Ù”!ÍÞº>U³P½:Ž6ÔÁ%Ç?Ž@SÄ÷÷CÒ÷àô÷Z%ó¿UÁþu ¹F³î °† Îê¾ é¦O¯íZTIoöo²fj4…Tå†*}©{ÝÞú¤µlGn˜±’±šúªmÀÎ]^¯ ª4XVQUĤáœ-[ˤy´Éd4-˜²ô@ë1Ø)þOªP^¶'{øKÏ-ŸÌ®ÉcF·àKo02+®(´±INñQeŒxÈC{ØÃôÞɯ5¯L)J™;®é5ƒßè0¼-’G‚—/¿M¿Q;ùå±™kM9Ù^o\Íܶ‘@gÕá‘ÎÈ5æÐù½çBÿ‹S¡yS~çªàZ 楽fß ñS¯Þ$ à ÃC=‹;lÿñÏ÷®»ðByœÈp¿B½@În|ŽØ»‹½ä¡¤ÇäÓÛWHJiei±²¼´gtÊ+BÕù‚ÃcZªÝ5ŽjÏÑè‘.×iÊCbD¡ë.±–¼åD &ŽzŸ7j£.ñ>üªÝÔG5ÁÐìâj5Þÿ3·»½÷ܳÉüëí½îº±Þ¸–AMä–bý"j+#óòíÍL¼?¹Êün‰I&Í&Žæ7ÄêÃl£$’Cóo˜ÌÓðÞ.²·Æ7$9ÏJ„Ô›‚“¯¿YØÉµÿ»¿þðñšî[˜…Ñ)æåÍiтݠ“zÿì 7ϽôXm—›rñ¶ç·z½ Ì&Ýf·V®ÊQhi=æ8£zãÎ5sõø#³ÿ¡÷f[ÈÔjôµ†£Š£e‘N¹½È©©+g<ÿ؇ )£-^‹Ïâ¥Ü¤ÏàÔªÍZrIMç)÷7=´Ëëö9|w΢¬MñÿÂí;w=ñÈ­É‚"$ÅÒà8h¿À¶¶¶³ïxç5€­Üê’–¦ß¬À"¤Æ»Y|;›—žáØÉm-•Îít”·3µna·Úg:¸U¹¨gU¾ëÿ`¥¬çh ×457þÈÌe xÙÇvæ jw4Dª¼%A¹¿4’É gº4PRÅ2£·g%ËY>Pê)–FÊj%m’mym›1zƒÂoín ‡ü{vì:Å9y"²<¨*Ë^“+e²óÄyÙ*žgeÅKî=S¸ÅöÊE¹;jÜÈÜîC"úºN†I×~ø·ªÔ‚ ^%Ïõ ZDv7ÌdÐäcK1ÔFÎH=|VF«Ú-ÛÁv ~‰ã°x±uÙÑöûzÚ Â[OÊzøºå?Žj>Eq¾£ƒ¹EÜ Þzû3î¿êÈ:±„¥£ÈUz=|>‚9ž:•N}s½Óå x pÁy˜rίØU³ã WŽÍ"²iwÙfÆLZ´F­Y§Ø*”g”¥Ë3žG¡b‹r³|ki»¢£lû*˜¼&¿…f‡Å œ”tÖï½,qÚƒÿtÆ}aiøOÒþN=—v|þõ)÷ܵ4Ò›³Í™´üŸ©é~½—koc`! ÚÍ-ù­y-ë½i`Xž‘›Ztáè,0,L+L¡øßô^÷M4Ü©YW³Zò$pˆý'Qê ”ú6Zâ,³K0çÛxUv{³Ã³ŽÝŸ:ÇØ½vÍ‹M™3ß_à*èBt'bö Ú›+´ë:+w°Gœ¡“0å\ô6L ì¬Ý~ž+î&ÙÞTØÓÞ¬3ë5aÉ’¢×Š–,B)BMŽ:W›SÖªÞ®jKƒ6Áá“_ÇérÓQЀ!U)Š‘]À«®›…÷\5ÐPflwÔEƲÌqâ±ÙcÔ™ŠLÍV¬6æ€Áo ^Êw©vÊwZ¸9p‹Üõû~ow⃊ “c¸O|û–F`4›MÀ”u%~å¯hp3z gš˜‰æ£h€N§Pcœmål5µ‡ì‡¯îkÁ"8= ì§Xðe¶üC‡ú²¢v¸c@¸élâÏ4#=â’ÏæóròìX\®Â‰dCذLmQ5ƒÔwaÂ>GâØÆÄI=©LB‚x b ûÂáÇ›‚fÀªë ¨~ç\z±ûbûJãÆ0ÙûØpaÄðÁä_X9 €„’ÔˆÙXC-Kókëe Y4ÿÉÔ£=–FVø[©,!> wH­×çèJ—¡G„ÓÑŠih9þ>-Z &l!0ôv£‹¶óü6O‹»ê$|Yx®¸W^‚+NÂ…6»ÕÑC.œ·«YèÞGHën ÃØÂu| ÝW¹eIukœ-+1"x<åA '§5gÁÛë5pæÛ;g\_ª!oá9ŠßÕ±uYdåp~·Áh1 GI·Ç8j^j ö½Î}öíŽ]µ-á–¶º¬{ý<èÑÑÌäZÅ ›B÷— õAÂpù)ýðB;#3÷ÝÄÏâßaï°ìsvÔR·«Éô }zI‰p þŽìK,¥€JÛ”]"½&·ª¬¤H«¢×‰—iïúBëûñCÍ û†ë>O¼g‘í.êË]_—2eÉ\fÉœ…óSçj4F=vrJ °8ƒ[’×:Wá7Iã‰-øé ÛÄsg„ ±X¬1zùÌ—L¤`Ò“¡¤'ÇN)ñeíjª«mŠÉêEƒî¤SØÁÅxÝŸÿ¾xc vÊ;aB¼[к÷pùa@-Zµ pÒS _ìÆŽÛDZun‹Pž8ãÄ¿ðmýS39i™Ÿ3a"X æÔ‹pÔ¨æº(ÛPT—ã`ô¼´âÑ(˜´òSHüò>$o¼tá™ †úRý;oýmاãŽ}eô†1Y‘¢X¬®¦‰•Öˆ~¯9 ‡±q)¶ò{¯Ì¸˜Ìÿ¾NÀêñ¼"ó ºL£,OÑ»bfåâr~“_J‡ÕÙó”GYbÖ©tôX4j,zè)4JY¬”ƒR ç¦Šj`üÐ'“n“w¦C‹ÃäYn"öß\ÍÂ¢ëØªœf—ŨÆÚÚÆ†‚š¼Ü‚¢¬‡?šü=øÕpà³°¹Àð¿^ Þ(Þ’IÁdDÑG-¸VçÁ™¦½G.— oï6Òâ4: @ ÊŠ bÈ(o€4ÿúöâf*+eævé ]&5v/à;tCËîêNn û›9ˆú‚)`¶ñ¹W2JµÅ¦,*ó÷—œ¤îvvTfEv}•Ÿ%¡ðªÀÂ×?œ:ר´­v7h-Šš\“Ó  òŠdyy5²úXuUë¥Ùì"ô┊žD4¢Íp¼÷ȃ}žû¡þ©KDóŠ+ºGà#߉@ÞM˜`ECÒÅä=êˆgs¿~ßHð[|w‚…ñOÞmÃs3ãFâ§·& 2Qömž]æC»UÏ‹€:[àÞò¯MßðÒëOmAIàA*»ZÚHÛxõÖÊZÐHUqÞÙ Gù9XªU …BßíÄ+}€|¤˜FÅ`±ËÔx··a`¿)í‚ÑÝÒýØ8Ã…ð3™œž7s*˜D©H×èŸ|¨¯!ï,¼§;ãÈ´NFéQy.Šß}bû‰ï»öü[cŸy*eäÔ#+ßÞ‚´RÀ¿î°8ÃÀùýGŽ´9lN‡gÚTP¥èÈÿn5x•z~êªg¼²çSÍïLJ ¼äÙîmGμ•1%@£•ûð9õuëŘÆ yµeÞ-óu¥~'âø4w×~áØ?9>ýßÙ¸½—Ïõ°1êîû/7p¯â—÷™éE¹Õ§÷-ÕXRÿw’rîŸ%eu|ÜŸÔûÐøÄ;26° ŽKd~üWP[5@mZ$Yº~½Ñ¨3fƒÛd/¯o¯Æí¨Ü‡yy‡âíÕ¸ŸdHe™¢°”­ ¹CæΠ”Kå»ãÜ›žÇÚSf(–)•h´ ½8o0÷8ZC÷8M.žU—ÝåðçÖª‚ÌÃpP|À§œÔçÂÁp°Ëåãêøí.ƒC3°˜-–áwbÏ1Ål 6Ÿ…ó¿ä–¯ßþ³CØŒ®’ÿ÷Λóâpqú¼Õ LŸÑ”ºpEÑæU´|ûÂÊ-àY€úmP>JA (óGÞç§ŽÜܨ’FiIA‰¸ÌUæ×0¸f«Žº—¾'Ñ?e@?‚øìÔ\endstream endobj 46 0 obj << /Filter /FlateDecode /Length 216 >> stream xœ]=Ž1 Fûœ"7˜ iäŠE«e/¥ …¡ØÛc{€b‹géÍÄŸlwûãáXòd»ï6â™&›r‰îã£!Ù ]s1ýÂÆŒÓË´â-TÓí¿Býý«dù¥ÙOáFÝÏÆé—~îÁ1Ò½¤Ê•Ìà )¡ÿýÚÍ —ôz¹\€âW3lzPœãʺ…u%º…u-Š °¢h…5²zNõšì%ÙsŒ×(/Q~ ëVç|O$#ËîïU->Z£2éô²x.ô¹a«tYÆ<”lÑendstream endobj 47 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1197 >> stream xœm“LgÇߣpw¨©Ú³‡¹E³i²¡Y¦1[tÑ©XaQ«-?T £Ê Âl{ës´TP°ÒRÅbÙ™%êæ6Ý$þÈœqüå¶ÌE‡KÞ#gÂŽ¸ìe¼¼Ï“ç}¿ù<ß/"#AцÔíÉ+“’’f. r\„¼HŽ©ñ©-Q0Gs"-Š|¦Ã çcrîä<AqËSJÌŸ”[ó,ñi%¦-)ÙF2älµó ›W%c5"‘ª²‡ÊÀ¼M p¶Šͬþ/<2•¢7RÍÐ,ÚçssJ6UÔcj7A&¤UîÏÉÝS ÛhE÷h)fXF‚!ñ\Þ‡·\¥à3WÐV[S'Ë G)¼j×_§n ˜z:SSÒún§à  L™û÷ZÊD_5[vÎ=ä¼ "#Æg ³5„×AUXúKô¸ì8~ìäõOÇs¸Âþ]í»€V–®Se¶¡Ã0ÃdÝ-ƒ{pÿ’\p nðÐâomY£ÍVËZóó«rì´L8Š5÷¸PähÛj_¼U¿Ô©[šŠ³éxÃk>Õõ~{u•b&Tÿ»¤Ô©8…Sùi>LPgdMX׃×'N&b¯~}b˜§=ÿ„çÁ¿áyª7©á¹“ŠËHÏméLJ@Ÿ2 8ſوI-Sÿ[n¡ ïó¾Ã½e#;ÛvC:dæ[²hyô[CßFØ r«Òhæ©ë¥gŸIÛË $àù²M"¾Ä‰j X ^(ë}¾ úIÛ[ÙœkåÖ5Y›”XeM¬½ÞÁ«ùµÊQÔYl‰2’vì8¼*T¸ Èá ƒÃ—ƾ™¸ ÒX›pgñ{‰»—ìÚwƒô{á\*=UØBkÙYšÖus¢úô½@Îendstream endobj 48 0 obj << /Filter /FlateDecode /Length 294 >> stream xœ]’1nÃ0 EwŸB7°ì¦T\Ò%C‹¢í™9n”nÑUØY¼³ «°³xg‘ÐJÜJ`$´·’¬ 0;öÊ{G ­„Vƒ2@Ld—0OŒ¨ÀV`ëßDçäÎ ¾‰ÎÉ|“;'&J¢ú¬Cõ±ûþë ùº,VV.™KôåÅþþƒyšýU@ª_b–—endstream endobj 49 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2658 >> stream xœuVyTSw~!òÞ©ZbŠ-÷°zJkÝ:§¶u[ÜWÀ(P¦lBXDÂ’õ’° (à°FP;!Ø¢¶Ž¶¶•2­Kmgì‡î3µ½¡?N;/h™ž™óÎûã-ç÷Ý{¿û}÷ЍI>”H$ò‹_¹ëÙ… zfy‚}<‹Tü¸tLí b˜TùøäÏ1ìaœ1©i”HÄÍ[•‘–™£HÈ Ù”!OÈJY™‘* ËW$¤Ëä¿úHQÔ#é™aYÙk9qùñòÍ IÉ)©iO?EQ[¨­Ô6êI*‚Фž¢¢¨ÔJj7µš £PáÔZjµžÚDm¦ü)?*Hˆ—b(uG´_ô'ŸE>wÄñO“’}ý|Ë|¤-Ì£Œ…µø‰ýbülþsü«ýúYT¼œšò³OÑer{ÄÎ@¾4{t6Ò‹tä¶Ë³E*s0’[êÓxr…‘Ü—¡ÅMKÞ G]yíÒû7uUïÝÁ“¦‰/á ôNä·§»·V°$&o+¹mDŠk|°ß}BiOz¦÷ˆ:q:Úpº›<{¤Qªx]\溸”ð;ØÙá”F\ËýXœŒ¿¹ŒÏã\2哇Èé» ¸ÊÏvu=ÚSÓ NpÍêJ‚í°—Mc`Ÿ!2#Y‘‘ L9ÈësÚ³;L§t'Ø{‰’(7Îtz¦}/äiF©ø„7Ë&¦µYÈqÜz?ÍLOš«Ð%àOGÉG¸§¼p—<¹5º0e†ðŒ'„€T·2-4b ¦%«Ogáb±€±t~èön ׀ƨáäïb„c«Ü" Å'p‘[Ü7¶\*CjüÏ„’=Ìôc›¯Œ.Ù¬Z³3z•a-°D‚=oÐo¨‰ˆwc·¬”ž»»x · «PJÃGÐv4ѲÏǺéTô–Wt£Ä T‹³’œN\2ç›9HI†œ÷©¼ô€Ê!!ÓÊ–ç§oÙq²\qQY°’¡”¶ƒÝÝmmÝç··EoÙ*[§äŒèK|´^„ %”Öé î4åöcñpä:Ê cµ±Úäôƒ9…É d6Æ5½+o*©ÏlvýruâÖ¨¶á,N[g:’ ¬Š.0”ó$€RSIµ5ÓÇÀÑ^ï­¡Æ¢o/èÓõ{ëkçÝOןYÙÀGÔ’IFx›½ü9sŽªåÌʲ¼V`-tc¹½†ÿ’©ÑÙ‹ò•‡ ˆ+…ÃU…¬ÄeÎÏ-Ï –\/á2 ü‹u£TÉF¹=þnÑeœç™ƒOˆÇfž“6çµ·Yõv]%°µ¦Zñ7Ýã>2œý4³d™rÇKK[Îïà".–Þ†,Î| —`WMêF^h˜U~ȤlÖFWAµ©¼šE c¢Q–«TÜtV]ªG ´Ö¤±ê*¢œ‘aµIú\R3¿kÁ• üÇ/¡oêðGx¯Ê嬴TZ-6!\Ñàã”æþìLrcD^à?‘–¹qò@ä*.›&]GšÉ%1’?!U4ì/?Иq<þ‚Ò ìo ŒÔ롞—ÅÑ‰š ‘»Õƒ§9lÆPFâv“PŒ`î©A Rt^$aBòâ Á73Ðb¬Èa‰ž!Sþ^|ãüņKœi¹/†Í¡³¿ç= ™ÍÈÕ¾÷t“Γ̚ø°¥œŠ>‰åÚsF4ƒÅ7…ߤù·½HAÎÀNôä ² É\éY =fm5öÂ`F(ŸÍ¼óJW'Æå$É!š}§ »z좃«Pê‹*•G’Îû€=MK¾¨«²4ò’;àZš¨ée1‹ÁÉ!§çqéµ!Úç“›Ó\|¯ÅÑ àP×+ëXÜHVKç3kvÆnˆÝÝ6ø—ïگظS-Ô”{aße¯jFÝ¢6Ï6©l€1ƒÅd»·Ë3zczbzcÍ:»4 JI=œŸ«ÈËÉSd¦©T‹•áN7“iVYuܼ"òˆ¯\†î0š,Çè‘;'O·^¬ðu“2¦Mg×hU‹žyß>7-üӸקžõz½pÜ3OZTTv±)m™=§›º:þJ6Çq¥ô X¦m:{8¢ ²aû‰WÎD}˜3 Ãp­¥û}³ÍV Ul]qy)§£sÁX¨)ÊÞ›½[h5¿ì»Ÿ^·!ƒS›šõÿ€ûéNÔ *<Î"Byâ.âØ*©Œ±Íd¶¹Y+°»—Ù×_·G8j XFæžÃÇx‰gúœê`÷à†³Œ ÔF°Š\´ã¬Gw6HòEßD8‚ÇPãÐñ†dNòÅ~6»é"(4*M9PÑÄ{Z˜3;]Ûú#1€ÌD?Á~¦Φ²k€m°Ù*•`á=CŒ£·¼‡ëq“f] u¦z`»;õyüøfœóÈ×e•{‡Ô”»)¸± —8'æ”ú+1>‡R|›n°U6܉F²tÓ†‚x¿'.k=¬‡ÍM¯Å? À–Þw¬U¶:¨1Ö[K  ¹¥™±‘¹ÑíÈz-×ipÁI¶kÒ•#ߎ~ÞºàeîÞl$2'Îýz؉ gÒéÀ7QŒ«Q\þYäHáinFq†`¯)µ¯´ç³F&^·7b†q{é[_âòZô?—Ù/ká"¯…-À΢S+2Û “-g Æ\e©væýAs Øõ¡ópWv+ï°;ìÍ&C™A elvcÁ±£Ž†ÖîÔ–”ý©EÊlN«×j4FÖHkAkÖ ¾ŠGâ¬qÁ…ô"ýœ} I»»òÙ*Ò.µ2ý}õýÜ÷ôüÍh`u̳d¾1o˼W‡J¹3è‘v2ö 7/µ&/M‡ý¹9Üႃê,ˆ„ŒNí9ö?oWº0ÌÛ‘Â}uy;¡™‘|ÕÒ¬ßÏŸe$£÷ב‘ÿ»ŽXè0tOc›}¬ƒ°¤´M¬dÔˆ_ùÞ+¿{½%¸0ðu\IžÄ…Äß+ƒ[¯{­PPÝdVr©ÉáÕÁáTµ{“'”0ŒäÖ ÔK%C ú´dùcòvùÀÉÞ.W_Z_´^F=÷ËÙcÁyî{¹Ǻïç0ô ‡[™FwøXðD<â7ýõ.wï’Œü²]}0ÄGîàý‰\ç½ëÀö×ÄFü» 澞¨K‡²-spSU”P—µI©[ŽÕ×–96ÀX,W„³’‘[›w‘C¹?*óDXö­øê˜Jª1è´‚Š•GJí¥‚yv“e@yQN^&$@£)QƒŠUWiì\9]i²š­æ¶¦Èã¤+¸pàÚG~WfU[T`=Œìc‰¾£gL£Õê3sg`B¯!á@V‰¼F6ão [©­Ô $UUZìµÙ ¥Õü蟈‹¾Ðë1ª«««ÀÆÚKÍ*½Œn ç/®^àGQÿUB§Ÿendstream endobj 50 0 obj << /Filter /FlateDecode /Length 569 >> stream xœ]”;nÛPD{­‚;Ðçþl@xݸH$ÙE=*L ´\d÷™Ù)R ‘Dàœt×O/Ï/óù6¬,—éW¿ §ó|\úûåc™úpè¯çyµÝ ÇótûlzNoãuµ~ú6^ÿ¹ö?è§{ÿ>¾õõÏ4}²½¿3]Žýý:N}ç×¾Úo6m:µUŸÿ}•uãpúüén×”ÍOTo ª³>4õõ±)¨¬SSP'ÖcSP¬½)¨õÔTí ˆÌfƒ'ê¶)¨[V™¨ŒTfMA5Vš MA Öl j²VSP‹6&#£‘ÁÆdd4²±)¨#+äL‚FAƒœIÐ(h3  r&A£ ÃÆeä4rໜ |—‚SÁïRp*8ð] N¾KÁ©ààu1;™€.H'¤Ðé„tº ÉC³glÚ9¸s0„ † ƒÀd2°bhÉà’ÙBÓ§‹CSP¬ Q©;…¶ n•Ø)µUr«QŠ*I• JQ%©D)ª$U‚(E•¤JÌ–š.9]b¶ÔtÉé¼)æ$sbÅÔ’É%ø)…¤B?¥THlœÚ9¹sÂ&e”4JlœÚ9¹sB.%˜,L^š½8{A®$X,È•‹‚¹’`Q° W, äJ‚EÁ‚\I°(X+  r%A–¥Ï7Ý!ÝÞ—óÜÿªëåÊ·dõhô*endstream endobj 51 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 7750 >> stream xœ­yxU¿÷„™AP0ãš@tDEŠ¢DŠt ‚”€´„´ÍöÞ³»g{ï%Ɇ$¥(å R7ˆH¤¨}­X_•³Üá½ßwvä½÷ó»÷{¾û°Oò°™™sÎÿüþ¿r&ëÙËËËëU6eÁ‚ÇG•ýÏ LIÌ}ùV”I]›Yúäƒ>=]÷ i,„é»á }í‡õÈË+y´´¦ª–Ï[Í8«æõÕÜê ĵ«…Üõè›ÿð ÃJ_¨^TSZ»øÅÉÜ)õSyü—ÂrÑÊ™âU³^Ÿ½zÍÜµëæ¯/«XPùò†ªG–<¶tøˆå#G=þÄ“O=ý̳£;xÜø OİAØl 6ŒÍÞÆ`aeØPlö0ö2ö ö(¶[„MÂ^ÅJ±áØbìEl6‰MÁ¦bcÓ°—°'±éØSØ ìil&ö 6 ›ÝÃîÄ&`wa}±ç±~ØÝX!Fa÷`ì^¬+Æúcùج+ÀîÇhŒÁza±QÉ0Û“7±Ç¨Þü;ó7öÚÓ[PZp_Iô"ÚÈ~¤‘<Ókýøí½7öy¸ÿÎÉwn¹ËÖwXßKýô;{wiáë…ßPoÝóÈ=‡838[îEEEË‹''‹ÿÙ¿´¢ÿŽŒ)P¢½»/uÿ#÷蘭£;˜Ræ‹âØ4¨d~Еg?øùÿêÑç3ì®ÜOKW¹ŽMgR[>ïâè€Ö¢$_¦‰üÊfæ NÕ:]fWI£?c¨-1¹Ÿ¯Óšžnúµ>…7=¨:‹ôçË•"†ªù4µÅå²8K®àÍ~,&óóµècŠêc QþwÅüoßð¿Uï2œÝIVáë€N¨¯[ïdG®f‡7ˆõ"“˜„yp çm8¯XñF¶ðmv^ÁI§a:½ ]èƒ#'wÃÝEÔi86Ãáøˆ¸&TG?…‹e*¡ ¬A>þ›^ˆSo  ‰ ¤N2p4Þv'ãrŸˆYŸÃ?Ù}ò¾ÀëËiVº„¨dR^TgÐÊ’!o"¢ÑU5øâ/ØžK‹*ÖÑ7§°RéL[PÙplç+¸àvAç«aP…ã->,Å·K.Æ`ƒgÅ©5¿~Æè48“´ÀépZßÂ9Å6·ÍdOø‚‘”Ý2 §z½E_"’ª •Õ1:€OfO€ƒ ÛuMẼމµ|†‰ŠMír9Q»Ã8%ò¡¥”ÀQÝp©9pû4îÁ2µPP51Ô®+×圌üKv²CZÅ2lOóçhp{.O^é†÷~¸£{ tï•Ü _ªA­ †•hÐwǸJ24µº£ IyžHªü<±ÈP£·€-<ù°ˆz³èȪ¨>87Ò—QóY²7´Šœ±X_¥C´§§Ù"¸§öBã\¼*bˆ5úüÍô9"Ù©¤¯÷ùr ?G·ÍöÒïSÓ/§ ÛaÙd4UäÂkœ7‰fu ŽfïÄE2@P£u»>cÓOãÔë<¥R,ñË ÕìpZ%ðUôe(l—´Å- e‰ªü\š% ¹ö€wÑTsмl[I2Iuέž6¯bɲ[3€Ù¢¡_ºáT¯"êÇ_rÕZGðPµÄEŒãð¦,+}f.¬€%ø·%èjË¥HúýIú{‚ú4Ú sIK¨‘ èJØàÔ§z¢)ÄÿB†ú‘Ö6ÞbŒÙp ìÕUx* £é"*™Ζš]jÔ—®¶ý ‹Øy`Zˆ©q•AØuÚÝnõÅ ÚÁ»:×*’ê¨s™¥%­öXŒ9GÄU‘\¥QèõÁ*×j@Ža ø²ÏÀsgÁ@ÆHÌÜÀGœ²,IÔ†”„ªJªÄbm´h¤U$߯ˆ59ãQ}“sài¸+·¥6žÊÏô}›£'„nY*s¤h89@$þX£Ø%dØ<5R;’§”‹Å>e‚y7̯>QG²øbøè/ŸÄ¿:r€IÆ[C­àx{Šk®}CÈCÌc‹!æy8Ç<ÍnðØéÛœ")‘Яj¢?ÅãaO2.óŠ˜EðžÔ‡*è ¸„oªûIf[ŽS»ÆÍ¯š»æuGš†D­¦ŸE4ªá#mŽ2·¢… sÀe .ÒP™G61° Ozƒ‘¨Ò_h"#‡x³7 i‚R+Ãv!µ•KÄ~eÓ‡E™^9MÞŸ!²óÇØS&œÅà©‚6myc\áã2Cq*¡ÓZt%ú>‘0™nÆÒ•‘våu„M‡ò»s;@èQòÒ#ÿŽ£Üo³¡ Sø( .°dxŽuuþŠþ‘W"/G_q ‹]|geèÎM®¤+^Ü|ºéLò´=èZC–ë¼âÿê‰9‘í' “þ@4ª ð˜¹x5™¤ËÅeÅü)¼©‚©FµQmV“&ܘRmÔ6Ëö‰öIö‰KwʬՀœ‹óPÚûI§ËœÌmês" t´õož„N„´ðßÐ:Ù'ˆÊæÕ±•(Hô}r0Û›Í;;öÐ •ì;ão¼AʪšïS6ҟᱨ;™y…Ì«™ W¹4µ#—Œ¦áb 2C~E‚a_cä‡ÙäÇGÏ_ ø^cç2×kä"¨,J $KçL›1~ùù“ºÿ”õ¾9‹¨ŽorPMpUYìl@tBoG.ô~‹7úýÑpªq´…‹¢¯ö›%ÞfÉ,á¯õȶ”_Úlñ(“ñ´Y^GÇc†ãT$InhhIºõ:×û9$Ñg$+¾,Vl6lBÑhAöŽ\HDÝ!ÈÞq š÷…T:/sß©|HÁ¸ODmïÊUlgÖ¾ýrüeTÓ‚gK á`{ÿ>ö¤?`=’ؼÛVÂdBìW™E5-¨\-_Æ‚YHËQNìøŽ®‹à4Ø1Ï>…¼-—ÃB$ç3œÏ@>å9üÆB½B!äGuQæW6ý,ž=3È~©’ßÔù=x܈Ä~žV—=+ Ó°Ñ¥aöîƒø;;âo%t¥“vËCÚ SáDóþ©ŽšŸZõÔŸ†xfstiìο=8CˆÓ!ƒÇ#9ð˜ 8Ä÷~p| ®L<÷°‹dYRòÚ$óÖÈF؇8b8¾ ¼–T._*¬UÕ‚š›ÂÄŠŽÂI…¯u [ ³ÖiË‘"êWü43ŒcÆyÉ•î €ýìÌÇj\U¡zÆÚà0X ¤AMT ø Ú' Ô‘ÔªUŠ…‹<,ÿ®³ÓßIïJt¾i—tðn¶8>˜ž, ƒ…Ô ƒ‘Jx­^›—iV¤Ô­&´g—.ÿ.¥kV62&«%màÙâŠÚ좀,b °%I±uetýš×¤B!Í®«9Àf¶yàpën”Ú€—Þ‹§Á·× ¤žkËë«Tе ëÀ°Æ¹.Tç[3¶“hm{ D¡xߎÎý´ ï4n[í\Dj‰…+–¼Bñ%¶×·ê÷þ‰‡\OßvPóý[² ~ûŸ;’óþORws‹p[6$^î†Óº'ÃÙì“h"{.ßÖÃÙv¹³tt9g \ˆàB2·|¦V|‹ïÖo¯óÈe%™ïùuç¶Ý‡S§X,1TÑf¹‰‹Lµ‡gÜ‘b>%L;¸m6’Ôîw’›ŽÚö.œÿÞäD¥g¨@-7 d½{@ÝÈüÈVp`QKì)“­:«hÉ,[»n•´^Ã5sÁíý.þ(L sbüÔemnšíG ×)’{dÙ^:•s|¹^Ê»= ‘Û;D4û.4ÔòCÚ&ä™ SÛ å>ÉÖ´¶'›R4õ}R¼±Š¦~3")+ùóL¦Ë,,Ü{¾õ~õ1ìý&gߺƒª#€üñó~>½äƒÒffz#;€«ä±ão<¶wíŒíª5ÖmÉž¸ à"‡—T…\MÐHSo\¶&¶ dÄ£/ }éoóÕ3[%;õÛ5†¦†„êMQój°ŠœE™¢98u¡:jˆ&ý¾fúS¢ о>¯6ˆE̳D]X‹…]éqÁ°OV‡pHÒ¡m°GÄeõVv˜\Bi?vs˜²óø¹³°;RÿÇ!­@š <<~T§©·rzu§x.·Ù]’ øãHBR»˜æã" kW„+v®ÝY±Ëh38ZZàþæb4äp8ìÈ3·£@âM$E‘.ç ØA¸@¦áK}ò³G‰vX¼L—Å\NJáGb¼M§Cb<—ÉRaPy3!!„!Æ>Ó UÝùg²’¿‘"³ˆÀ#ï°}!GKúðOÀû‡6ž ¼aÛ ¶€˜9 b$Œuɰ¥/Ÿ€aœ‹*€ ÝÈt°gqÀ<_ùJùü¹`.(?¤ÜoH‚FÐH³q×_ÅQ†!;Äh"~ÁÃ6wɈ*€Úi`ƃ爇KùÆZd0n uÁ÷–eûÆ ÊÂÂ질z¹ç9…¡FƒZ“Vâb±Ž'óf5:ÚΦo0Pï Å~.‡ü€'PÐNŠ=Â? 'Àa÷ù®°Ãê´8¹”m`X¡Œd“‹_Õˆl["è‹ÿÝ\Õ¦ƒê/F¯ZέÓTx“„›\]R*¤<þ 6š%¸æÁTªצsä&(€(¦ ©€í ØBÀRóØ»Yl¢\£Ð)) (ÃÛN|°ó ºë\ûàGN‡÷³½.Ë}’ y胭<‹vX‡:ªîp®=Ÿ$âÁìÉw ™ï”ÐTk.Ž¡ú ÄÆ Ì?W6K0@8§Öå;›ª5¦ð nG/Œ ¹¿ʇoØ9†*~mL¢®4ƒZ ÊÂfòø‡—>ÚS³ÿe ³º®f}E’Ûa`\¸+{NfýÏ3”çf(¿v矲O¥BÄPë¸ICM)‹³‰\"jkg®­"Œ]@aU€þìXœ è³'bž‹¦(ò+7¦gçR æA3GnÔÊ‚Dý¤áxy³ 6”=‰àÔó¹ ª³þfí’x7ZÞRØ7ºa!§RË€ rmªv½ ÙÐA¾YײâÆPûó.q9b£ZжˆA9žÇS¡`<¢ŠˆÐ—(uàÖÃg ®E²„KíÏLBû.6êD:Ù0¶ìIvÑ0v¾¼ZV%¯Ò+TrD¿’€œvõ§Ï!ùûèXrþRÑêשq³òôÞz‚öãݺÎ)`9iÞÔ —<¢¢#pšfç Üú™Ãé0Ú’ñ:'!žµ:!œå°œñl9`g’*Â*Ë£ÙS¸ÅÈ&¬IK£-ƒOŸ…Ã! Ÿ/äF"Óêi¤×Ô¾=\W“kê™…„À!ne_LÜZ´5 —¦ wv—uÃúQDý΀Ÿqfr'®‘,ÉÉÿ =ø6˜× P.¿¸cj I}4vü­SÀ•‡ŽŽ°“j|$Ûsþà—/?Ág¨LaGs<ÄGç:O¾öõ .ZCL»ä¥ÉÓ·$G% Âí¨$'Îu[çç[õV$O·´)3é–6eJÿR›ØÃ=ÿï²…t(*Ì;ü쑆yïåCYæ:g´~Ô 0 % ™”ŸÐ†4ÌÅW/,¾°(¨ ¨Cº5ª“ºó3ƒaQ˜¤ •–½ã‰—ØÀ>HJ';è,;ö{Ácð¼ÙêC‰Kée8=x€œ#‚ùg|N¿¹–ˆÂ/6š€ÙHßõ~Vχ¢Ì¿s^Ò åƒQ$ß_Û¸™F˜óê}¨ÇoÂöˆ=f‚Np¢!¹Üj°"‚k_•qÅÓ—/[€î’@ì”EdOFíÌ„óÐÖ]­Û›v4CäU¶$ö,m¼úÙ4LQ*hNôù¾‰}zaØÿä»ïŒendstream endobj 52 0 obj << /Filter /FlateDecode /Length 187 >> stream xœ]±ƒ †wž‚76†Å.mš¶/€p }û§vèðÿÉwÜýp¼[yñHA¿`åÖy“` [ÒÀG˜œgeÅÓëAäzV‘ýMÅ÷'ǰ;ßÕ ÅóRÓI¹÷è``‰JCR~Ö !;k%oþ®š½a´Ge­%ItD#Iˆ±©$ItÄF’›Œ­$!¶4ë|5Íùϸ\o)_iIZ"‡w~ÿCÌ]žÓ2`sendstream endobj 53 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 886 >> stream xœ…R]L[eþNíwPÜqZ3=‡Í%FÍ”¹hÌ2L“]¨…é²ÄdÉ”Nh´Ò–*­XJï¡§šuZ:†-g­ÅV”MÙ&:L\bœ1Á%N1ñFc¸úÚ Ü^˜÷êIÞ<ïóóR¨Fƒ(Šª=zØ`xº¡¡aìªìÔT¡¡ïγÕ&-ÔÑPW»c¸Ÿ<¸Ü»•ÔlCŠÚÓh°uÛ]Nso½ÑÖaîí©7´Ùõm=õG»Ûxþø¯„öõWŒ¯Cèy´íCèÒ©FP/Siòô£TmÙ ÷Î!A®R2uµÚ´ƒ Êž:gÑ/ú¢éÊÁñÑð(„ è÷ Ÿlim9Ù2Ð;`{³¿ËÙ0Ó€ŒBgrŒˆÅT4—Èä°>Æ#qf\7éJyθ³í9S®½p\/ãfÜï8YžÌÊN|Ü1[Ñ^ÒW›oWO‡C"ˆ0ùÑ¥töëUýÙéÔôôÙ|¦+#+Ÿ/ v»x‹·ë1Ù[­æP<‰9ózGÞ©NOY? F!Ê8]JdXoÙÐ\x ³Uz–"ÏüJ/m:ÅIH “Às#&n]ÆfK°Ëçàm–¯|ˆ³DÒöêûSjmÖ€,ÀÔ¹¼Ñ,W)ãr1’—&Æ Áædšá3›jTöÙÊå[ôò„ñK-ÁVÕ²OÆ'ŽD¿Mà„Š$z$O¬ÿáÅxSiþ^iŽø#Îd`vmjñ"Aó„Z èSR«§'¦¢ia}Eo²­ƒ>ü*]YÆ~ðùéä#%®"ãÒ…h!™‰••±{tÈ«xׯkOëoåúPÊÖªŠ¹oPŸzƒ|ž!WUÙûo“å5úËÿÉ…,È]¸Í>>Ï‘að\z+wâ=ð`a̸EÚC¿šûìZ˜AÒ? Ú}!vØê°¼Ç pcx~ð¦øÃs—½ OÂ#~—!Úü d»²oqwŒQžP~ÚAÎàIšaWt·íUv*Ûv;Š»û¤}rõ!y³¾Ê}òÝ?ÀŸµsë5XÙµ¦Ô‘þíw¢y— §! S ùßXŽ|¢Æ0Äã…k#NùÃS§ïtœ!'áÕ9û’ãâØXdˆ‚Ë±ÈæÕFÙ{héP]-B à´Æendstream endobj 54 0 obj << /Filter /FlateDecode /Length 160 >> stream xœ]O1ƒ0 Üó ÿ ‰ ±Ð…¡UEûà8(NÂÐß—èÐáN²ïN>Ë~¸ ìÈgôø¢Ö±‰´ú-"ÁD³cQÕ`¦s*Œ‹BöwÞŸ@°ÈóC/$Ǧ)›êÈ 7´5Ï$Z¥ºÖÚN›?é Löt6uW ÔÎÙ)9š;\'·‰S)ZŠäŽé÷Kð!§`‡øòBRýendstream endobj 55 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 232 >> stream xœcd`ab`ddä v 6400qä~H3ýaîîþñ÷§7k7s7ˤï¥B_?ñ¿``bd”ÖrÎÏ-(-I-RðÍOI-ÊSNÌ+VN-ÊLC“a``` b`0ic/úÏèëÀÀ÷Ÿ©u òïO|Ÿq˜ñõÃï·î0¿þé-ú×ëá÷‹ßUÙgt~wü½}v)ëc¶ßÑßo>˜÷ðb÷sÉÉì-ß~³WŸRÛSÛ]ÛÍñ[ù÷Ñï3¾³±_é~ݾÁ¨¿¾·¾»¾;(!1´"­.¥;¥›ƒOŽ‹yš='Pû]Àendstream endobj 56 0 obj << /Filter /FlateDecode /Length 550 >> stream xœ]”=nÛ@„{‚7¥}?6 lc7.I.@‘KC…)‚–‹Ü>3#;EŠG`l ïûfÅÝ?½<¿,—[·ÿ±]Ç_íÖÍ—eÚÚûõcn¯—ew8vÓe¼}&=Ç·aÝ퟾ ëï?këð6ßó÷á­ízê/‡ûwÆëÔÞ×alÛ°¼¶Ý©ïëižë®-ÓÿŠãýçùó£ÇcÕô=žˆV5ˆÆøP5ˆŒUƒøÈx®Ä3ãX5ˆ#ãT5ˆc«ĆXUÓ÷x"¡££”ªA,ŒCÕ ŒXR´¨pQÁ’¢E…‹ –-*\dè‚Ó÷x"b§i¯q¯a‰i‘q‘AݤoÔ7¯DgŒªA ƬÄdD1¦rŒåMFH  Òi4Aš çªAı¼.f'³£W9Îr¼.f'³ƒ×Åìdvðº˜Ì^³“Ùq˜®u¨ãô\'è> stream xœÅyy|TEºö !Õ‡ETÚ–ÑspÅ]ÜWPpad•€D…„¤;½ïûé®Þ÷}ËN„Mˆˆt 8*‹ ¢¢â̈:£ãŒãL5÷0÷~u: êûÝïþñý~÷×!¿„œ:UõÖó>Ïó¾UBŒE”””ŒYüìâ%÷M›6ûå¦BŨÂu¥RVQø÷‹óÊàøR8~´ÿº[bÑá«Ñì+QÙUÄ(üØýÏlnh’´¬M™¿¹z½¨qÊEÓz™¨–ûŸÅ›^mlY_ýŸž âÉ™+6?Ó4«y¶èYñs-’¤²Wåkç)ÖÍWV/Xÿↅk^ª]¼¤~é¦e ·Ï¨¼ó®Õwß³æ¾û¸ñÑ›âÉÛâFâEb:±¸™XDÜB¼DÜJ,&¦KˆÛˆ¥Ä2âb9±‚xšx™x†¸›XIÌ"î!*‰ÙĽijÄ4â9â>âyââbñ ñ1x˜˜O‰¿=qL°ý+_(ê*C·ÿMe" U¤²¸!Gÿ ðå>¿Ã_‘FÓ)]TBów›ÍK…u7ÜÃì&ùûÌß[¿7}oïa¶8z{¾’°åFq%;ævì*v¬|•´RYi5@ Ú ÍÉ@ÒäРc”dsÚ’¤†§ñUd“‘,Íß—UdÔ4]ðÔÞVÆVó¢7”l½?X‚T…rÓëôB¯=¥ Ê!¹˜¡Éi‚¤TehemIz7XÁ–-2N.êó4šÒÑh*¡ÉèJàé ôùú‚ÂØGo>I7èÌhëd6©MFKØ ›Ø"¶‹Ë—¹F«PDt­4zäÁ\JQЫ€+ëͺ³4VèíòuÃnÒdzÿ¦rÛ¸¾"_ÛHþîÂþA§l"?‹–ö <Š€Â¯L­È,O­pYÝh%žÝl6Û,ì­ìf¡Å`Ñh!Ùd*½T”³%èo¿ÊëñI'¸ØdÓêœ2t%Zò»¤L¤*œæW)bÚ,ÅÏúýN_ÅA[$’NëÂ-´Ø þ¨w Õ¯©ö¨^S¼)d¼LúHÏé÷ûܾÐ=ÂD.Ô•‚¤x!ž ’™x4ÛÖâÖÐw¾×bqZ*äJ½O/KR”ÜÍÞ¼Ý\¹h$Mó½6ðìElÜD:òùP¬ƒèÃÁ’/O¡Î“¥è·…yV ½…ÙkÈÞ…ÆQ€‘Ð8ˆäù—¿g¯¤~`— ÌÓN¹ ’OÕþöØïOüù[Ú~cÚµ.„6‡Ú†#l,¸ø~qrÿDÁY8%ØÐÑ{ ²OA°+ìtRÎc‰ö&tÛ7ß½óûÊòDa˜tó˜”Î/…uÐ)6µH_ZP9’÷Výõwtœçþ¬ûÓCïôîØy’ïmŸ¿ÈêÀ¸¥[²Öx¯³#ë¦rH8„¾r~_RV·¨ê”FjhEló ºw,)¬:UŠÔ…ëÐítA7™4DEÔtТ2HÅiK#ôKÿw„ö¢ê­·)÷Ár4ùïhâ_Xô5K¬|UѼžF-ìÓýÏßq;†~Ó±7h/Ï•÷åÝ9w&¼‘üÀ‘×Þ†ä‰mó†—>Ç‚/«îƒR4ƒñÑKš² Q$Ò¤)M£•Íû ªA_Ø€ÊÙñQšÕÿ*KÒþ€úhÃ%à\AS†JÁCÛø<¼m[úȼ&I½¡’ÿ9§O•®}CPË®/{H5z™<ÆírHGâÉ”.,£—WÂt'>/¸„NQ2/õ¨ëí-µVªnGuº’ìul{5;éžc÷ Ñ¯çzvѽ¨J`­f6JjV­\»’3jß?ò͹ï¿wËÂ% d ý3¾Û5ˆÖ Nä÷¡‡7½ȵZÍoRD´­¿¯˜4…) Ý »<Ùð»B§ßé‡~2«*­Ú: ÅH­»”dS IÊеÀad ÐH¶`úêqfSn*ò+‚)w¤•Al†&µ­În²Û1'±ãña®)C6ÀHÍ()ëvg³*xÈr‡a¨<­J©Kוݠ†j²%eIS?#0¸äG"ÛÑ?ΰ7£d+fнÈ1[ê“"ãÝÄš"#¨"j޼8¬ølùUñ„#QÑ•õu¤TÌä^³ gëƒÀÝéëôvz»=[Â[?>{ì÷lL«³m™låæxÁßRˆPé~tÝGèT8ëÏÂA˜]è«$mÊf´!=`q69Í-j–ñƒ-KœÊ€8d´.cìÁäñ|+™uoÁÃ0z$½}÷çýŸí:H½~è A2|Û@ì ᢠÑFÄÿ &sÅT Vdd)ia Þß5G *)E×àÁ)ž)p ©ämJÙÒ)çŽ$íLïjÝØ]îTÁW¼+UåÞú¨9߀¾]Ÿt|ÔöQçYOÐŽA2R0â äI//£ ·Pì5Ø%ʨ6K·²pŸyŸu_¹­ÕÕU ×AS“x¹j£dƒa£ ‚4^s\SÒì שUÒ¸1ý“39{´ä ¶%ªÂXAΕñäáøš!³ÚiuZ¡ì‡žd'°k6³3O¬þèð¾hëjèwôâh`GÊÄŒ1µ›n§Ö¥uac˜ãŒáx¡;åMÃÜ#Þ]“]ß^åYÉ%wÛž¤ÝâŒ& É-ñÎ6=ÀNª=ËÄ›7Õ¼h^ÉÍ‹cûÞú´m Lo÷ïðôˆªóØ«œ {Þ1ìŒ'g' ±@9Ç’¤9E£‰IdÛ¥Ý ”PߥO¨ÛT­ê6cÌ×GH'/ FBáÞÞÁs­Ü ’8hX ðrþÍÄèÍØR¤î@`ÚÔ¿÷=ay&ÉàG³Jl@ùUÌ%7¹Ü,–$™!àAèe<ƨT˜lNàO¼Eèb<Ø®Ac‹\¯«Û8û! $ë€BÌlVDµyŒ¥@À靸¤“A Ul .ŸÌéA†M'Ü–*ӫ檹—®bGÃ@QÆ–èq·å\ÔM"ÿÕ&ù_î¼îíf9ÓG5Nò ’Z$´§ k3:ÉûÉJ ¾AûþgVªèì¹Ó¨Nä—bHx•M~ÄŠ®øW‘S 6u𫬩˜äï‚\,œÉj‚šUÔTiÂ8Bn¬ îÄBý0ÀYå« Õj¶³Ó„>±W„ÿI„NF­Í®Rˆ ²æU ç;|AO»]~U^VlToUõ4íúñ ÷Ñnê7誣èÚãh‚'äŽáDkÇüëîÎÉ|jz gû1¶*$-ÞRV3æ©v€Å`@ŠFW¡˶«!Îúk²þS€3^væÖÿï'K OÆ †NœR´öÄ;¶PÖ#¹ª YØkôìh@öZ’-=ÿ8"Nžì>ºò÷âþöÕ…ï2Y»ã2«+¨Ž• ‡†‹Aœ"F¡c…ø çE8“Ñ„d´|RÃ:·¾<°"¸<°Â¿^è–{E0c)œÏÂí9Ûû‰7ì»p΀Ç/‰þÛâÓóÁæÇò,æš”»« ª­-«ëçÅ«ZVKVi›ÔÍÆÍv¿?gÙ'4242Úöª||ÈUEü“J £x?/àƒFN‡Ê$•)Tå·<|ãô[Öè&›ŒTó\Æ€9h' “Aô$ÝÉÐl¢]8]ɼ̧ªµlx•¡Ø @äŸÑøoÐØ?¥ãé(l'Ó¿†bÿ ´@ƒÕ¼É´Jè°âŠÊJâ”OvG|—}Õ’ÃCY8ñv&û œãkbjè®N¾ËÙ« [ÂŽ³°W’F|ºý¹32Ƽ-à®ìŽnÇÙ°ñør¥;n‰WœÇZÈc%—ÓkÁ©ù°°Zè*f…=£ö+ ùÊ-¢¢—ô°Yù‰Mr×BrNýÉÃ_žùñ»í}²ê(í6z°ù†v‡ÚÉÉÇyÃùX…©(“­ø´øUÅ>F.˜ÍfÔ!)%æ€ÐÅ>¶}•{D¦ÛØ=)OÛ‡…UB\™$]I²d“þV¬£81É"Âe¸ŽÁ¼ý4†F™0ýšýÆþ4VŒÆ‰ÑX[ŽÉUc+6\ÙéZ‡ž½œ×EüÁ’?œDÝ'JÑh‰õÿÀöy¶ìþxÓwìdVȎßr¶ìÂc¨ôø{‡öÐþ`WØ8Û3*Ÿ6AF¡U7ׯS®…ä3s|A£Î£6²ÇÏ9É·wUÏ190¥Ó?«•K>?Þ=]Š>¼X*ÐÚµPë`¯WhYJÆò´3”çH69kÒ-Ýu fÕàüÀ³óìÖ÷"_D΄߾Ã}¶7ç¦ê‚!®ú¾cïXTß'ÐXm\ÙÔÐT_ÉàÝ¿<×õÉýþýtì\MØ?ÐÙùºs$£ ö:²Ò„ÊÕå3ÞÌåCÄþ ½(+A †=WÌ<8ÒÖiÑjåÃéu Ç’i}XDßÞDƒ\9–ÌúÚpµ&¦ÙqÀ›‡mÁlêµ}±=x\RÖÚ™Ìw <çj~ñ…ÍK(m #¶63R!c´cIÞ$:-Njm›ßÇiØå¥ÌD ÖÙ°þüT8šò˜®q²‹éFÐæ•¡+.BJþˆÆl>ør]›ÞèÜ€‹¿: Ök²˜!GeÁ.Ä”}^‡G]/xu.-ÔÁ'ŸZ<ËqÊ~j'šý b’'tœEÀˆû¼˜üHæ’á,¶W²!ëÏUÄ»@FœÌ ‡ ‹µŒÎ®¥­›Ëø-Œ‚‘Ê&W¥—½;ðÆ[o¶ËÜj‰Ñ$ÂŽBmhRA’߯·Å#4gjïÞÉ0oÎkÓ–«þÓ`þÍß›üùì#l Å> $8³ä }vˆ£¶³unâ_ƒð sÇRÁ¥‡8gî«/iUvTÁ_äê ²,ö#çÑ¡á|e¯2ƒJ353õÈ`‘Ä¡Ådæv4¸»|]Þ®HÍ*?‰­ª’bïÁ^Ug`T¤•‡Íd>ÄK¡P9² DŒLb‘ O”•¶u´e;Ü”—Ç?Ñ.ÍŠ(þyÆî`*~Nnˆÿ>²¼_Šø}‚×ëhŽ`SH Òóˆ÷κ_/ÌÑÓ»Y¾~Ñ+žuEÜ\{ÍÃãŸM2ꀪ‚ÿµ™«Aæ…V£TÄtéŸdŽá6ô#×FÓñ~¬›¬õ–M ì¡e£i=ÜŸÞ>÷tCÒ’°'áßà'¿ëúÚ›r§Ý©]È(ôtù»Ý]ä^‰Ä’4¿/­â*£Å^§þ*S 6Ã9º Ø- ÖÆÍì“BÃ|ë«V‰7W7­Ç5DãoÒAž3ã͸3½hз-°Ý³ýÿýÆÿº;póPBùµ †F?‚,GwLwìäau¶°v˜‘È6‰[M±ÓºÅÑ  áòxè•1iápñ„ýÚc\LÅ/ç&Ù™vF‚é’»¥[ ™‡¡ÿ­ä΃Hp]s Âí‘7s ì‹C® Ù®ð*¨»¿¯x! SèdŠ86jüAŸW:ì#—®«{ ì ^M*kûI‡àRB­•Ñü\§àçÍñ‡—á™ÂTA§ª¨ @¡0JeqÌï±bG³¹è¹ä}Žþ”3rÜmH6McbÁ/Ì+Ñ¡XÄ>FNfИ L{åj#ÇÔ/fo±‰,ÍvQùÏšfÊ6ñ߀,®–ñ*[høÿð†œ7ïÉe_èíôuÂ.’ïí—X´fcKc#¥ÝÑÛkàfu“|8ùæK/¦8þD_‘Ú|qŽ@c7ê ª£Æ°é£¹}ÓàTÈ>r'{?{ýtµAoj¨ kÒ’0ŽÑÔ¯þƒh6™â™QÙjÄ»ë’ßç7}º þ ;ã°WŒØÎ‚}°äË·.¼æ/-È -yBaƒRú<{ÃÍÏõ>ù‰öô@²Ÿ;×ô7v ûÜìÂ'$´±ÎZkH–|—¥Ð´#GsÛû©` ÄV´UâV‰Ô T³¦ÁØ€_F,ùë7èÊq!>n7ô¥=iøìűuaïX4<5Gb±ãb®v)ß‚¯‘1žÿž3ïæ¼N1¹/»¤Ê­Ë0x§cŒ\|u¸ìÝ- ’‹7•aÝ»BqÇ!n“zU\ýYL§[GêV1Ôm5,øçÓB<™ ³ ¸Õ§Ðô‘J ™ó·að`bšë)Z6ˆ–bÿ¹})`§¹ÈÖ¤ iâXË~ì*Fͨ4Fõ$¶Œ©WºÓG¥Ð(Žœ‘rL•ñVwG›Ø/ÖéPGßœqO¢ˆ+WÈÁ¥xZU6[$"†’³£ì:»Î¡+¶~c¤(‰'߀n,E[]}ÝÚšZƒÆª´A)TGÔq;yêÂ7_ì÷¯¦°~SCuc¶i»Ç넞ÿ*X.N°%ÿ¼©ŒÔvƒ ªI~æë×›àŒE·Ž£ üí\œ5\Ž‘­b¦’‡¹prmC™Ô,⢥ `ŽZPœHnEš1i¡†Ä¶*G!Lè‘H*fŠ«\4üf£ÓTaáÚÏŽáÁ9)ºoµMÔÛa-YßÞ¼•ò-žövÏì½zšÚë‡yâî“%gDƒ÷ ÂŽ^ßF¡O@{<–Iê“r<Å\;^~7G+K‹1ø-†ù³—.»ˆ±šÑüªæ´µï?wÆ+ÐKÜݪ×á­ÈF☙¹<S÷sWRøs¦°r¤œÔER“\m¤ÖT×,5’"¦2†çTG Ž ·¹·Å;r—§/puÃæ“¥…iǬÞuh×á_½FƒˆO~:ÿ<+¼÷Ž—ïj ìFÆ dSV“mÏeÛƒæ R.^¢ÿxÏ$×Ï`DzÍZFÏY5wÕœ•“Iu¤"ªÏPè/ Fî6o÷ÈÕYNéWl²J›í#W ƒÈu ­œX˜¢òw·ð:/8Ðwtüœ<÷®§žŸª™QOÙ†ðÜ”VçÛ2é6…ËqŒØq.Æoæ~$ûU+U•YC×ÌZ {Â"w4;`)ëóÔ1ÀÿËá£Õó¦ß7çÑ +ýí´Õk @¿+õÁ Ù&K´lÔ×5ߌxõ_"ºê÷hÜ@Ý;KGTýUŠ^ÇXÈ]%XnõεUÙÖY×ÚÖ9Lvî¾DT·Q]n÷NO.v(r8rØpœl(òò¤„B3®]aõ.ÂCÖØª¬kð î²IÔpƒ\;¼ùОÐkÁ=ÁÙxœdÜ Oã©chb):SxFPgÛÜ€©®¾]ÜCùA·½ÝÝCzy=ÍmõÔ"ö×ÿ¬.àH½ ßEÚwq£×Õ¬z­k}›yo9ŒeÃ0OzxLFžqVµÉØŒEᮕ™•U•¤<„ñ½Ç#{/ðºÐçü*ù‡³ðÏðÌ’£÷¥·¡õ†^&¶Tk2q-N+ó;Ã}]žÞ7ÃK¹“\ËÙÕ‡µÎŰk®LcaÌN‹ã~õ´Yðn¸`ßÒ$!&ì1džU ¶£†ýæ½5°²Îy~Å=©CkƒËrð|ït˜½Y8´­sÒ‚okz[ $B‰ Ê Û„ÉP{æÈ„&¡Ôj f=5¯fVõ¬u³ªËg¼ª3kÌvU¹›û%mÊ—œ[öñâ9bˆB2‹'RÊ &DÏîaÿsýʰ<¬ŠðÊ*îÒö.l ªÐÕ…{ª!jQEÔyæ:…þ dÒ\\Ï݆\rW¾¤ˆ½sèY-©ÄÏâ,Çc–cA6ÌÄõñ_<ýómžFƒ¥Y©Wï\*|iÓ¢M‹ë”F‘¶ª˜:Iy@üˆ|}y/êù]I %˜ÐuÁÊ­ÂÇ=þÖc ~]H I¥V­’'1#}bõÉ'W'ô C\7<å<éÅ Ž$G~¬ø5æ+6n¼ë®¹ìdÈ^Ù1ûYðç™!3ŽŽS$Ojüº ýÄ»µ_b {¾@·¡‰¿k #þp `BQ2vè`¨ ÔØÒðSãÇÄÿtÏXendstream endobj 58 0 obj << /Filter /FlateDecode /Length 4193 >> stream xœ•Zmܶþîß`£@¹íº|“DÃMá&vš")Úä€ð…¼«ÛS½»ºHÚ\Ü_ßy!)J§»8¸·ÉáÌp^žêÇ‹u&/ÖøçÿoÏÖûg?>“ôöÂÿÛ/þrýìßKy!M¦M®.®ožñyaó,wú¢(l¦´¹¸>>{'¾¯Wël­MYæVT»ÕÕø”ŽtøÛhí¬ýêýõßp}á2—«÷P2+eyqýí³ëß#ÕjWwýêJ—&sNŠª«ñÁfk)nΧíд§Þ¿YkqÓvq¸þ™˜°¸CWÁÜÓ>P²b˜ŽŸ«CXYˆm{Z©TÛm}·ƒãm‡–·°VÜ7'ä tk€ÇýÈnÕíÏTJcNà’Е¢áü‹¾ÙŸªáÜÕ/ƒ ‰ Jø©,!Øêw¬´ÌÔºTà ×;0~ОRŠÔ{hP&f l×Ã-¡‡Û:LÌÅ©:â)Ó#p´mw^½Ú‰ö„ვ(‘gÒ¥L8ó²`ðTÁP‡Õõç¬êªð¬’…ðœ -ǺÒÏ9wÍ ú]Ë(òFT}àÞ¦ö« px:©B—šÞÿL_³ähÛxl8+dªÍ`à¬O-~bËÔNª\4Õ¢^Ô:³ð†eÙ×Ã8Áجd2.S…õSÁI4íL v÷õ“ä¬Ê”“~îfu駤¦uÖ›«/®ØÈhê¡:íÏÕ¾^â°Ì ½–‰¡»'N|¨¼« Ö&KÔÅ5¹»É‰t0~L=> ·9¬%+¶6³&˜Å ‡ªW:Ó…Œ¶Ñô~7ȃrÇmѯ¢@wͶ:>ùiYËD÷;ŠJ&§˜<õO\•äŸ8ŒþIR–âþrWÕ†Q£™ûÑ’“¨Mg«+‰) `j*)k†LE‹SÛS¹ÔL.-P0:­¢Ìà ÚvH“¨¾©¨¹F˜B¢æŠ2ú¶íîÎ(Y«œ™ØqKÖ­`ËóCªç,‹¶‡$ë ›"Q„ìÄû‡mÕ×~_rÙjÑ,dá²°Ð,~ûxàÏm¦ó"zæhÂu¶• »Áób ¢Ï‚%ˆzd»9Ý †juˆä'ˆÑAW±­÷xmœW÷}ðÈçCƒ€‹Ù€Ãêc¿YywRài¡gô liÀÓ0¯²¶èÉá!iá}á³ IœCœ„Î1qrLº†&H[[><[ä„"h>¨Tü|µˆ;Šûè~uØ@e•¬>ü« N ˜Ö‹ƒûh[iþ<ÕÀŽ9"€ÀòÒQŽ|k1 ˜5DŒû:¬ Ë\Ác$1ä\pö;$é è~â(ÆäqÍ <¿aLIïÖw»öèŸ o@€„qbÓäy&×Ó¼õÜy~häb )Ÿ7ˆÀºýœ¨’…\ ¥üà0 Ò¢d=~3ñá°•zhvßq8'¿‡6hÐQ0º V²/C€×w»™W ãÐsŠÒÍ bi!w›uI–ÖƒZ0£9EÁü„†Nð×i⨊¿ÁÐ9ùHp¼IèïÚSHLR‹Ò>œéºÐr$a)+;IüDdï8aFGod@Á<îQ0-†ýO9®võóåc"­G˜î%DàX÷Û®ùàc™°²XòT~€7Ñzi#Ô &Â㊱’39&úl|d7ýàO ýKÛ‡ÚÃZ:gZj©~¡Ã¥µ>õñDž´_Pâ¹’ÿ€Hç1oS$Cô,¹ÓûFÚ‡ÐüÐ6n«úm{^ Æ\š5þkвWú¬ÓÇʰíÂ¶èØ£zsÔ{Wm#W£È©Ý;µM "“R=,!‰h'H†iK0ÉÛšQ$ÅGDc`+EÀpVƺ’ŸëÓ¯<ü+Pšô<=Ê,øð┇ƘIs¸\^Hø$ ÑÜðÿ¾õ4åêîñ =æ€o†°sA&~šœc}Øy&BíHÛ»r&tÁFJܺY<Ÿ Ù2'$üm…ÁõH.Bìä„(0”V.µÔG²-gñij ±ìM¶Ï.G\ÀñbÖ"HráR¦ØSÒÞ¨på?Usz¤0Ë×X¿J‘r(/bÈ¡b ‹ZÞM¥&|%K‹UtZ½_¬@Ý )"øÊЫWWüQpdC˜íÒ@ŒHª±°®Ù í„–PS‚vº®¾j©6,Èİ\‹ üidµÙÅýË’óLÉ3±¦2qÐó3÷ÙPê0{oW¥æÖP¬¶æ=¸êxw¨ƒê!Ö¶§ùÙÇs1M½ú¦TZ·ÝDïKŒqôZ®å&^Ëg Ûnôµàm…=(Ÿ,EøØT’ìÝ~ô‹!`,âs ȉ7Ë?“Y|õv kØLZ-Ÿl=¨Lņ˓´·¨p2œAÐ[XÓ@ðWßOƒäî/Öš5` SÖ¥|22›Ž¦8Vc; ã˜Ã.®"ÎhÄÐ)R\óKÈÉpDª24áÓ ë¶æè"!@ˆ¾êÅÜGCšèØ5yͱƜã±·¶„YCÕ8þ”…-) û±ª÷Ëp~+¶›@؉[¯•5‘8ƒ8!YJ1M°àc»¥Dg¤Î”|2Ï©L–¥NfN(ߊkßäw¡bƒºÌ­š…ÒEßRùšrÈ>É´­PfRaÿǬԅß.¶ã ("×.¬x>nÇ}Ûx ©Ï‹¥ñXÓ$5õ¯¥¶à'¿ÐXl‚¹²”d7΃Àiu¦ÀŒ÷ªĉnœîÞ}¨’n먿á7x€©X›˜Zuaêy£2£Ò‚bu!³ÜÌ[– ÙeÅ^ ¸H•³C½«ºâ 6Ô-Ýb\`Nµ9†Ã&¬¹‘2ƒDGÈ#@üÁp8n9ÿ”¨-€DÀú!™°_bž…R÷À¦ 2j±´ù¬mÀ°IN1®oã@bÝôêûÀhÈÌôÌG½Ø}ƒfß]ŽÈèÈå‡*»»JéßPÅLq§QAÈØGI}R#.¹<9Æ+íµ®ˆ|ÚÓû™š¯ À<Ä~¾]ˆSï³g“c±êÔôL›S?Y8ÏgɆñJ¼âXEJæYDG‡¢Â¥Ic~Ýwg—Y &Š{Íú%éÓXl3Ÿùšöê,÷5” ÀýW|–ä;oWX‘j쥀Gú]ZBÈÃg\¾’#GNïÿ"^ß gߢÑ©d°#ê¨]¡›bŸè™¢)À(Ïþ­“‹€—ƒ]ï)²âÁv_s´a¢P;¶Ç:0h¦]³@¦Iœª ¯nøCõá|¨º —þ^‘ž ßTäŠØ/ÞˆCó‘xÖàüq Õ’939UX"~ÓUȳ*rJ±c³ó/±IÁ?±Ëw[×» [פ9p"¦€T .çßPÎç¥,;ý&ÎÞbœúÚ~ìž1òóâT²F¼°E5’ázÃ+Áa!øcÒ‹ø Ó1UòBo1¹%‹‰Ì'‚p+µàü‚)1g:ê—ƒBý&É¿’3sáÌž(”\‘U…È8 ÛêÜÇ*Ñ!˾ÝAFî³ÃÃkq'„îÒDbg&¬K«íG92S?­,º=åþ¦=÷<½«ù~ ¿ä™XqÔ¡$TF—ˆ¡¨^¼÷qýÃ-€oƒW”:OjÂj¤Çîê,Ê:Ä¿æÄ¨<ÇÎÛo¿²×±Pp@W3Çk¥ñÞd7¿_Zx˜¹r#ˆh —c'K]ãR Ô˜ËæÑŒ+ßn—>…kŸ¶ùþJ!Ä;Ÿ¢¥Â‰nf|…–Ðø0Ö_çæÒ’Ë'÷€¾Å„û3bô_VÜ&û$yWoÛý©ù_¬ÊÁ;þ‰îPºøå¤'§•’r(ÊìÐÿµ*é+9&_Ù}>Æç<äªJ§§á¯5—ÔDE‹¿;£Ê’„â5ÑœFM“BMȾ}Bá;˜xS쮊4ØÐk<T)Nd’|`A(”ÞÖ¦Å/—›$±ãƒ­“?è’ìnÆŽDh 9£Pɲˆºýv.;ÑåNҹخB‰·/`žAå ˆ‚kçpÁå¸àÅnâé¼·zlºŸ$t2ïñ—Û1»ý¨Tˆ2Ôœ1nÉp9Ç-ØY¾Hš/é07_ÔãØéõ‹ 3c;6e¦0[÷å\}À û¡Þ$ °¹"â4î<+šf±¦æiæÓt2mÂ(¶0Z‡[j4f¬òxPŸq8À”‰É< «æó¼Lé<´~Ì ÜÎuÙBÞ{[\ËÊÄáÏPf˜ö ÊœN[RæŒ=þ¤½’¢[ù¶ý¤Ç¼}ýíožhâÙp:‹AïžïJãÓ a߱×fƒcu÷Hk7 [uò2#º6Þ(ÞÇj¨œPNâÖ·h”ˆ1Ÿbeè“\Š»¦Žñ6Ÿo¿æ‹·ä³º)žjq¿>\”¬ ÔJ>ÅXB/*³eÓÆ”…O>L¦JH¡¬wÎ¥9žÄïŸ2¿§k­€ºæµ%ݰ©àvD^6ÞO Îž3‘YÏpÜÖAI8ÒA.±‚æTl%¤Øqr³1ÌZmašš¾jû–þ ‘Ëņ¬qYiBCÖ'—…ä @‡øé)‹°Ae´Ëûð¥ÈD{ë’l àjmíøÂ§÷¿¥|â»É0ߊo¾º ¯˜]H\à˜Ïëîƒ6+=-òeåK¥_­z.†jOµ‘æ"æ‡6Ñ! eJ›´XׇaúR(¬(Ð3Â¥„ŸGæàÉ 9> stream xœ¥ZYoÜÈ~×oðƒ §žÄC±/†Ö g³ 8ð.[HÆ‚â´$Æ3ä,ɱ¬ŸªênvsD{­¬!ûª»¾ªæ§iÂOSüçþÖ»“ôôîäNoOÝŸzwú·Ë“ó÷¼<-“2Ùéåí‰]ÁO³<áš‹Ó<׉êôrwrŪÃxßõ«µÌr•¤œ½Z­µÐIšJVßW}U¦¿féõjõñò'œ'¥ÖüôòÝÉå_®Ø¦ÍØìÌ0V»=­TIYäL¬Rü« – ½NÅš—~P²4•¯t‘ð’¹Ô:U\Óî´¥ê¾ÙM×®ÖØª(2ͺCüÔ¯p_)V›mœKÓàÉ“‘a ž DÏê¯U»9HOköÇ÷b;¢¢Ì¸ÎY‡’ƒÒ6}k'Â^l[ÀÙm×ïªqpÏiɪm ö;"eÝÎM‡³XÓ:Ú¾R+1+‹t™IÂXâ¦HtžJ¿Â’Çp2Û‡û¦^!ñ…ìÞS+€ºm·‚µiª9{°d³±Ã¿Ù1_ìÉTÊFT¸ß(·‡­Ó»’=4㽟 I…‰;È’‹lU!³´ÌjQp™/³ºVb(NדCãʺ#‚¬+I”®`ûªLÿ_H"É$w‰{L¹bÕà~§9kvû­ÙŽ%ÍÆOÌ"5Å”ð¢H¤t´³÷«ËÿÓZ&¹ù¤–2±2/ÈÄpïÄzo<›:Z*) M‚ù‰‘í¶bÑT’ Yø%ûª¶º-|Zi Š "@B‹×0ÈãNŽèõ¸ê·ÎúÐìu‰nÇARÃÁÚ­F`Ès2½”ì¾²Ká”Ïô+Ëy<>8Ó·ž\Èá—Ì\<Óü³48L0Mv`Í߉âÞ“ –¸ˆ_úfl ‹¬{à\VÕµŽ5ÿUrH^E¦œŒ»Cá9ž´¬ÖYŠ.U°Ë{ÓúÀ§ìOŒ,Ÿc‹hª›­ „³09æ¥56$âYcc!<§I"žû±¦Ñä5«!nÆ?s:™~æßÐ\F–屿48sùU͉¼œ%¾Ž²=i<ù²Û:Ä*¸X;ÀÙzxF³Kn›­¹fg£ù2g/j:£õ„«Àm4ƒB¤úDÈÄ¿ø!}6îÎbÐIDß¾oÚñšaª}×´fÅñ-¤´á:"ýz5‘”±?ÝhÜ@Rtà/oÞ}  ÊL±Ÿ¶‰Q)`PQ”:Að<VWücÀ`?:iÙ># )KUBîHš?œÁg?¾žPÝ•ˆW×]¿? Ѩ„QŒ¸»ØtD!üAÒ=ù  @ÄãgMû¹ –{í§> xñÐ7å^¿i;¿QÉ~{Üu‡áâÜ…•:^YWw_C˜òK{nMà× UWíŹ_6ÍâM#ýúM@æ Èþ1ˆ´º R8ŒÁ1øßÍSJ@ÉÅ9A4‰ bSÑcý«ôfv•Ǥ{óúCݘ¶žìU³Û¦Æõç8¼š"Ó륺㪈5y¾™xè¬*#¡”ÏÓù5kÍõj¨›Î)zì4ŽW…@“NAÓÛ‚oe^hŠTÕvS±ÿ@r›"œ°Õ-âì0øÆF²UikÕ¯‰Ý®«1hMxó+õ³àïy™!-x6¯Ÿ!)¥lê=‹Ô¤S¿ Í©7&_õw>Ó’áƒ/y7ß0ˆ‰C„/A_–B¸“˜:2± l_"Czl ÙS(Ø]ó9 ®˜•”¿¢mƒnåÇJð†)ÖáFÀ]³1‘6/° ¸ÄŽ£rÍ‹DB-ŒÌئáG2K8×"f)§^ClZô¦ ÎÐÞÂ'r©,YÂ6yöL, y1uù¡RA «‰qéw¬µ—]Rcj"½iÃy¶Y9ïS.%Q@t: ÜW[”'2•“ˆ0'ŒÌ  ]»?ŒNU…©6!–ÕqýÔ†4aÇ lµ¤[ ÕIùDµßj†s¥˜,̸†´ÓèpÞNºÞ¤ÃšÍsšÂ!â…m¬ë]ìv+Ð$h/0–‡ó—}”ÓúcŠ£Þš¢=’À»;#ò›atÄßÔ6½NñéË?W7’ª÷š‡Õ¼É @Àófõq&žBsù= BKA«¶tÁ‹A¡P¡Ã_Ñ=™Ä”¢¹-£ Ao ýV{×¶åTâSþÅŸðÛ[Øbù€fò¬ö8(ú{ÊPZ× º¸3x.·Å÷M¹½,ƒ—©Ö¡sþîÅ?£‹aUFùÒͨÂO‹JPE°G%î(ÊBØYó²L”œw°ÿˆ$W¥ó/–àŒu ÛqB»šº¹Òö{géËbO´ýƒ»²t94$!]; g(MX@ÅPó ¾ˆ!†»F#;–Pû÷–}½õ.¤²ìÙÐA<¹3‘¶ß_w‡í&P;ÿbóÔW&\åN€ÒhÛ¨I»{üàd[8F 훺ÚÒÍc˵;™½½Þ`‹–üT+æ»ÂŒa{¦]€Ž32ž„‡Ú÷Œ2«ËOÖÅ”­_Áî”W ¡~<´Õh€l¨iÅÙË%.2@EJüI«øR,*ž¢!F9ÑñaªáÑ,Þd£€²ÌÚFfë¯yÑ:>±TüÆ—9¤Ñ§Ö<ÿk“ãþ‹·ÝJoMT½¡N´ý¥£|à–µ&ú¾(|pRÎú ›èw3øÞC1›ó‡î‘k Ý:²:´9³qlËìHËi6yư_iºS*WÚ69ëæEmƒ6ª)ðÇwü°!å3È%S ™:é;3ÁPð¡Z¯ýª~ôèHGèˆbK9aØ]}ƒë©³'ù]èn­Lž~XïÐŒìtƒe¦nɬû0W}1ë+){Å0%ºvز_йnÌm¸øóÈË^ÛÇ}2oPPv‘Ùíâ"³…d¯†4¶†B²(rê­3‰2ÿùòäŸðï¿tÁÐendstream endobj 60 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2837 >> stream xœ]–{pׯ×v"]¥U<¦+ IC&mJHI¦) ¡P0ž±±1Æ`Û²õ°dIÖcõÚ#i¥]=¬‡%Y¶d[Œí“§Q?T¯‚ö)Ë!Ú)êup w¢ÇK'â7Æõ²j]þ ¥^þ<ÿ/§Þ©³éK)Ö¥£‘UdpQz°ym~…D}Ðçéƒ d!ãB1‘¤“ñÑLYeC-hå.‘´@÷ëá"¤Î¯pÙžFª²ë;A…t¡®¾ád6óÞ–pÓ®FEõ!Ò:Ùíú¶µöõ¥f1t4n¨¶£““7s7~-÷'Ø(¤äÖhÇÔþ—VWêIÁsìÏáœlÒ=aÈžñôù3,ê ˜µÍ†æ9Å7Â6´ö\Õíñ±ÌXüÌ5]„‰ÙüÅ/¤±šTÛ\‡¾ÏBÓnÆ[›³z(½Ëlq‘¦º¤¥’Qy@ }þ”/õ lÈfBîJ˜‚]J£Vã$U9õ`-,ƒõü}“ªF¿öÂÖÔæëŠ 3Hà \8Þw ]Äq)mr˜œæ#üR™Óèê²uS:CW·A¯—étmš6@õÚ©³rÏ©À©ÀhôýØåÉ·ã‰t"ã\{*½­¹`V¿ülîf ùP’ߘÿš´M©iÕDŒ‰á¾ìÈÌûøoóð/ðßXuaíŸ>ýðô{rŸ æ»XŠ¥Àú6“©æàÎcÛQ¢JÖp¦ Ç ú†eo_ŸŠ“l†`ÈnA_ÖLãÖ>¼&M @ˆ®cû©'$wð…hZbXÁ/þÛhíè \ü›Ï'®M^¯›/òSA‹Åf¢dõÞu?T¡¹tv:xµ—ñÓà•ÿ?Àóä’¿œß—ÛCJî8ì´£Œ_ο.•üå7ñ™?Ü)›è?Ö¼œ–ÞX8ùª0Á×ôå›Ã~aì[ó¯I׈(—Ý Vdb]ùÈ%¸•C­æKd+´Ow> stream xœZßsã¶~÷{ßòàñK öLA‚ž4Ó6¹´—žÓôìfnz¾éÐm1‘(…¢Î§üõÝHJÔ]&™Ä$Åb±ûí· ýzGò<ÆÜÿËÕY|þtö뙤·çîåêüowgWo”9Ï£ÞÏüL+Šmôã¿n_½]ÂüÛý6êf„çI"`Á{¹±©è~ëM?Xüâï7w ºWžŽ!Õƒcûè·êm’Oìbdž¿láhº‹‘ð$›¾_¨›§ßm÷²ø¤ÍëùÁ^G’†fíå)ûvÕ,!I®•Xmë%õÄ¢ë¶~ª›ÞàÇ ïšêC±ÜCÌymti+.nöýæ­ñáЭø#{Ϩ–õú©-6‹ý©wûMu+§ˆƒÞä8_—½‚ƒ#ÿqYÔÍ]õ±ûv]îV7h =ü‰b¯ÞH=’$òüRÂ;€•»9 Æw3«ÑàŒ¸*–Ë=>ç Nï…—‘d$ C&@f÷8 Þý|¸ž‘‘‚â±ïD½]&2[%¢[TøÑ^ÚjY}˜u&Šf¦2Ø.Æ OÈm¼ßÀÀ\‰çE]Î~Y8Ñq†8²íÚ]Ùù嘊Ư–ÎUÍ.@I—;˜Œ±›˜DTN>ÄW¹ÛvëUý[á—eQ~àšp8 MHŇÙeÅFçR¥¢.Èr€ 6ÍÁÞ`Ñ ¼u•“›|Ñnq“:F*Ú'°Þ/£WcŽN†wè…êY–ÕvËÏè³´7ú–#bBfÄóè¾t3ÂA»Ôp¤{áÆÀábf`©é´ßköpf¡­{¾{˜A¤¶*HoÞ0V[cD5ï7æ7‰Çë\ªðJM]A€1h 7Šåh¼,\ÒÐ9éZwØ© Ò "9L`Dœ&]£Ž ¨EœWãH©NbEš­àmo³”òØV\±“Ćù0SçÈÈsf¸”¥Â‚5Àܶ;œì´ƒ\º&s4¢òšfãMõ"t l=uLP^¸´ˆåðdz'Åäjå:¸¦=HŽ"°Å d2à!ÑðŒô!›$Ìz~rWùõn½^ר’â ùXu=¿Ž>;=æý®¼CÆ?óù5T´N–Õo,ºÅË›ˤɼ[¤wÐ`*O(ç½ý1x´Å¿¿ –W*7tæ#Æå†…æÄ­<’±ý ÷Ô‘EpsÚùuØûˆ#mà°#ŠÄº¦Hü@®éÍ/ºÔ™' Xâù‘É LæGYd²`Æfœs´¨e¿§¤£,§‡mU5ü 㔟²Sú‚k;;¹&ɱ¥´Œt`ò/Ü€¡¥VYéFøL1!(RÀ¹ÏH » EXEæŸÖ«åÛ'ÔJô  QŽ 2LØ âÁ$‰_šâjÒ¿2ÿªýqXäÖ->Hz;^±Ú,+p§¸+rÝ µ.µA|3®nN•‹»“qµjˆûQ·#þDì÷}¨ù¹Âíª(Š&Ýø½ÕÁ¼Ì1dÇôÕòÄv L3Ùg‚8‰¥‡…;I†à,‹å²š#M0\˜o @*¢Ð oÕA†ké[ä*¿‰–úmKk±›]©4LÇKÎH fyXèù¢“X%¦\Hcâü ªZÖ¬½å«– f8ë¸%%ú}Ÿ6ÎÓÈä©tIþ÷qµ<Õ&Ý«€CízåïÙx€þ ·HÀ#ЧÀ+‡Íá4Ž”‰ƒYF°ZªICÅZú)èvÞ*Á&Ò·)´äÎã€f/¹×54eêÐ’—BK 8¡ƒã–í±± `”2ñ–E 9(ÈûËt::}lL‹“q”«úý#Ñs ¢¿R-™CÙ³;gPí¸ÊBWiÖ]]Rig©ŠC0¢o’±ˆÞËq®uHÄ‚QăË;Àm¼+#æ{7=‘h¸’J k©ÙÅr¦Ì¤œKÙí¨ƒçfbõíµ¶Óù*Ö<›>æ¹ÙaÐÒ§dª.#æŽÅB3ÚÖ kÀ=;ðwßoaóŽs³£Ë–5ÚKã¨ëé?ÈД£ ƒÝê` ƒfÏ¡è "Ó-?è)âŠû*zЍ;–RZ(!R»Ê ͱÀeY`ó¦þu°ú.Œ’“DŸ7’ý~‡S Ã7Ö˜#|\tUߟœ$V‹¬9T`EØû@ÿ=ë7©›d½v 3à 2³9@Óz>„Ó1/GÎÉ…á©¤Š›‹³ßS©?óvÐÈÇ*ô$¥¾%5&™4$*ðÏ|ĤµëuM–¼iLŽuxªS€5ÿU¾#í(ËââÔïv}SºjÞô ;Š^R–{”D›uʽ”Õš*l‚çùàhð3võœçOŒÅ¾¡Ŭ¿`™ yI”¯öuB A ÖÃvâÆ©ÎuÌ»·£Ôdv|AñÚý@ñûº|Ý¢èÜS>,À©õK«Ç—drø½À?¨i MEý\|RSŠÏ§\»`*LH}¯ØiE`…wàŸ!¥.«þ#WAˆD ÂÜI×8äÖ;O/6zZ8É|OµÛÂÂexfºÍRÀ›]§ñu{Õ¨¼º¾/3†c:k0ëfYP"Ô\ÕÍ?‘1lÊ_Kˆn5•Mdâ,ÄÄó¢ #È`Ô JÂþX½„{’Z÷MT×Nòm,9Æõ®k?!ùï˜Í:Å¡•Ý .¹¹ ¡‚4^"itW)•ŠŒ=€¸Á-}¹n7˜Ë}iàoèq«?}CïÅjKÛ½w!‘œM_ða;bxýå;gÎn~=~­¸Ðö¶¥&ëÇŒz0£ݵðú0g/Ÿº¯Y$í©ÿÔ}XÝÊÔ¹ÄÇ5;ÈØ¨@#•!âȦïÞÉ÷ï'KLpt€\׺ùê«£kÔ¯éJÆëúzª]~㺒×iq‚”SÆZÍÓË4¼È?4xÕïÁÿpaâXÂ/n6mÝÿ ƒnøƒ„ ŒÙS’-x[ÖUSöT šõ¨O Ï!%è±êqÀÞïIoð‡±N¬Mx> O‰¡W1fQ|o´"ÐPè»A¥û’i0óöDMjeLù*ò<'å+` ZÕëë8÷ ɦ´072±Û?xýÂwµSñϨ'NÿX·Mý C“Jm¦ýÀ8\ÐHñmÄô'ݸ5Èå÷ƒš)ê}îŽ5Åçðº I$DÆÊ_=¦ÌgÛ‚¡{GIÏð廫ûÆsi(n‡Ä,Ê|'ñø¹)èúP÷ý}Ë©ü¶$ßBÙ:3D$¹¯`°[¾‘ÆûÚÇn°Éç¢QH"#™bÒà"J{}lm€‡ ÑÜÿ³kOõÐWK ù“ø+((hoж ú…Oæ '¶”ùêÕíí} ‡Ø´—lòX"KÁÂ~t&þóæõ$jÚ$Ri ²‹®Û\_]=??G?âuÛõ#ý‹Xl´nŸ®>(sUÇfÒ°÷¾a2 #çç—HÁA-ú”à§—wgÿ†þ€‘µ-endstream endobj 62 0 obj << /Type /XRef /Length 114 /Filter /FlateDecode /DecodeParms << /Columns 4 /Predictor 12 >> /W [ 1 2 1 ] /Info 3 0 R /Root 2 0 R /Size 63 /ID [<6620db0e0cbc5f3bbde9c193dcb477c5>] >> stream xœcb&F~ñ‰ ÈcÄμ{L ¬€næuŒ!@‚ó` ±2A¬| ÁµHÈ-)Q¬ì VàÞ$˜A² @bÌ@‚å<ˆõD8‚dAêäÌ„à* Á¯ Ò2”íâ½ endstream endobj startxref 61262 %%EOF tm/inst/NEWS.Rd0000644000175100001440000005417714755301540012752 0ustar hornikusers\name{NEWS} \title{News for Package 'tm'} \encoding{UTF-8} \section{Changes in tm version 0.7-16}{ \subsection{BUG FIXES}{ \itemize{ \item Improvements for Rd cross-references. } } } \section{Changes in tm version 0.7-15}{ \subsection{BUG FIXES}{ \itemize{ \item Improvements for Rd cross-references. } } } \section{Changes in tm version 0.7-14}{ \subsection{BUG FIXES}{ \itemize{ \item Use R_Calloc/R_Free instead of the long-deprecated Calloc/Free. } } } \section{Changes in tm version 0.7-13}{ \subsection{BUG FIXES}{ \itemize{ \item Improvements for Rd cross-references. } } } \section{Changes in tm version 0.7-12}{ \subsection{BUG FIXES}{ \itemize{ \item Add missing S3 method registration. } } } \section{Changes in tm version 0.7-11}{ \subsection{BUG FIXES}{ \itemize{ \item Use the default C++ standard instead of C++11. } } } \section{Changes in tm version 0.7-10}{ \subsection{NEW FEATURES}{ \itemize{ \item All built-in \code{pGetElem()} methods now use \code{tm_parLapply()}. } } } \section{Changes in tm version 0.7-9}{ \subsection{BUG FIXES}{ \itemize{ \item Compilation fixes. } } } \section{Changes in tm version 0.7-8}{ \subsection{BUG FIXES}{ \itemize{ \item Fix invalid counting in \code{prevalent} \code{stemCompletion()}. Reported by Bernard Chang. \item \code{tm_index()} now interprets all non-\code{TRUE} logical values returned by the filter function as \code{FALSE}. This fixes corner cases where filter functions return \code{logical(0)} or \code{NA}. Reported by Tom Nicholls. } } } \section{Changes in tm version 0.7-6}{ \subsection{NEW FEATURES}{ \itemize{ \item \code{TermDocumentMatrix.SimpleCorpus()} now also honors a logical \code{removePunctuation} control option (default: false). } } \subsection{BUG FIXES}{ \itemize{ \item Sync encoding fixes in \code{TermDocumentMatrix.SimpleCorpus()} with \code{Boost_tokenizer()}. } } } \section{Changes in tm version 0.7-5}{ \subsection{BUG FIXES}{ \itemize{ \item Handle \code{NA}s consistently in tokenizers. } } } \section{Changes in tm version 0.7-4}{ \subsection{BUG FIXES}{ \itemize{ \item Keep document names in \code{tm_map.SimpleCorpus()}. \item Fix encoding problems in \code{scan_tokenizer()} and \code{Boost_tokenizer()}. } } } \section{Changes in tm version 0.7-3}{ \subsection{BUG FIXES}{ \itemize{ \item \code{scan_tokenizer()} now works with character vectors and character strings. \item \code{removePunctuation()} now works again in \code{latin1} locales. \item Handle empty term-document matrices gracefully. } } } \section{Changes in tm version 0.7-2}{ \subsection{SIGNIFICANT USER-VISIBLE CHANGES}{ \itemize{ \item \code{DataframeSource} now only processes data frames with the two mandatory columns \code{"doc_id"} and \code{"text"}. Additional columns are used as document level metadata. This implements compatibility with \emph{Text Interchange Formats} corpora (\url{https://github.com/ropenscilabs/tif}). \item \code{readTabular()} has been removed. Use \code{DataframeSource} instead. \item \code{removeNumbers()} and \code{removePunctuation()} now have an argument \code{ucp} to check for Unicode general categories \code{Nd} (decimal digits) and \code{P} (punctuation), respectively. Contributed by Kurt Hornik. \item The package \pkg{xml2} is now imported for \acronym{XML} functionality instead of the (\acronym{CRAN} maintainer orphaned) package \pkg{XML}. } } \subsection{NEW FEATURES}{ \itemize{ \item \code{Boost_tokenizer} provides a tokenizer based on the Boost (\url{https://www.boost.org}) Tokenizer. } } \subsection{BUG FIXES}{ \itemize{ \item Correctly handle the \code{dictionary} argument when constructing a term-document matrix from a \code{SimpleCorpus} (reported by Joe Corrigan) or from a \code{VCorpus} (reported by Mark Rosenstein). } } } \section{Changes in tm version 0.7-1}{ \subsection{BUG FIXES}{ \itemize{ \item Compilation fixes for Clang's libc++. } } } \section{Changes in tm version 0.7}{ \subsection{SIGNIFICANT USER-VISIBLE CHANGES}{ \itemize{ \item \code{inspect.TermDocumentMatrix()} now displays a sample instead of the full matrix. The full dense representation is available via \code{as.matrix()}. } } \subsection{NEW FEATURES}{ \itemize{ \item \code{SimpleCorpus} provides a corpus which is optimized for the most common usage scenario: importing plain texts from files in a directory or directly from a vector in \R, preprocessing and transforming the texts, and finally exporting them to a term-document matrix. The aim is to boost performance and minimize memory pressure. It loads all documents into memory, and is designed for medium-sized to large data sets. \item \code{inspect()} on text documents as a shorthand for \code{writeLines(as.character())}. \item \code{findMostFreqTerms()} finds most frequent terms in a document-term or term-document matrix, or a vector of term frequencies. \item \code{tm_parLapply()} is now internally used for the parallelization of transformations, filters, and term-document matrix construction. The preferred parallelization engine can be registered via \code{tm_parLapply_engine()}. The default is to use no parallelization (instead of \code{\link[parallel]{mclapply}} (package \pkg{parallel}) in previous versions). } } } \section{Changes in tm version 0.6-2}{ \subsection{BUG FIXES}{ \itemize{ \item \code{format.PlainTextDocument()} now reports only one character count for a whole document. } } } \section{Changes in tm version 0.6-1}{ \subsection{SIGNIFICANT USER-VISIBLE CHANGES}{ \itemize{ \item \code{format.PlainTextDocument()} now displays a compact representation instead of the content. Use \code{as.character()} to obtain the character content (which in turn can be applied to a corpus via \code{lapply()}). } } \subsection{NEW FEATURES}{ \itemize{ \item \code{ZipSource()} for processing ZIP files. \item Sources now provide \code{open()} and \code{close()}. \item \code{termFreq()} now accepts \code{Span_Tokenizer} and \code{Token_Tokenizer} (both from package \pkg{NLP}) objects as tokenizers. \item \code{readTagged()}, a reader for text documents containing POS-tagged words. } } \subsection{BUG FIXES}{ \itemize{ \item The function \code{removeWords()} now correctly processes words being truncations of others. Reported by ÐлекÑандр Труфанов. } } } \section{Changes in tm version 0.6}{ \subsection{SIGNIFICANT USER-VISIBLE CHANGES}{ \itemize{ \item \code{DirSource()} and \code{URISource()} now use the argument \code{encoding} for conversion via \code{iconv()} to \code{"UTF-8"}. \item \code{termFreq()} now uses \code{words()} as the default tokenizer. \item Text documents now provide the functions \code{content()} and \code{as.character()} to access the (possibly raw) document content and the natural language text in a suitable (not necessarily structured) form. \item The internal representation of corpora, sources, and text documents changed. Saved objects created with older \pkg{tm} versions are incompatible and need to be rebuilt. } } \subsection{NEW FEATURES}{ \itemize{ \item \code{DirSource()} and \code{URISource()} now have a \code{mode} argument specifying how elements should be read (no read, binary, text). \item Improved high-level documentation on corpora (\code{?Corpus}), text documents (\code{?TextDocument}), sources (\code{?Source}), and readers (\code{?Reader}). \item Integration with package \pkg{NLP}. \item Romanian stopwords. Suggested by Cristian Chirita. \item \code{words.PlainTextDocument()} delivers word tokens in the document. } } \subsection{BUG FIXES}{ \itemize{ \item The function \code{stemCompletion()} now avoids spurious duplicate results. Reported by Seong-Hyeon Kim. } } \subsection{DEPRECATED & DEFUNCT}{ \itemize{ \item Following functions have been removed: \itemize{ \item \code{Author()}, \code{DateTimeStamp()}, \code{CMetaData()}, \code{content_meta()}, \code{DMetaData()}, \code{Description()}, \code{Heading()}, \code{ID()}, \code{Language()}, \code{LocalMetaData()}, \code{Origin()}, \code{prescindMeta()}, \code{sFilter()} (use \code{meta()} instead). \item \code{dissimilarity()} (use \code{proxy::dist()} instead). \item \code{makeChunks()} (use \code{[} and \code{[[} manually). \item \code{summary.Corpus()} and \code{summary.TextRepository()} (\code{print()} now gives a more informative but succinct overview). \item \code{TextRepository()} and \code{RepoMetaData()} (use e.g. a list to store multiple corpora instead). } } } } \section{Changes in tm version 0.5-10}{ \subsection{SIGNIFICANT USER-VISIBLE CHANGES}{ \itemize{ \item License changed to GPL-3 (from GPL-2 | GPL-3). \item Following functions have been renamed: \itemize{ \item \code{tm_tag_score()} to \code{tm_term_score()}. } } } \subsection{DEPRECATED & DEFUNCT}{ \itemize{ \item Following functions have been removed: \itemize{ \item \code{Dictionary()} (use a character vector instead; use \code{Terms()} to extract terms from a document-term or term-document matrix), \item \code{GmaneSource()} (but still available via an example in \code{XMLSource()}), \item \code{preprocessReut21578XML()} (moved to package \pkg{tm.corpus.Reuters21578}), \item \code{readGmane()} (but still available via an example in \code{readXML()}), \item \code{searchFullText()} and \code{tm_intersect()} (use \code{grep()} instead). } \item Following S3 classes are no longer registered as S4 classes: \itemize{ \item \code{VCorpus} and \code{PlainTextDocument}. } } } } \section{Changes in tm version 0.5-9}{ \subsection{SIGNIFICANT USER-VISIBLE CHANGES}{ \itemize{ \item Stemming functionality is now provided by the package \pkg{SnowballC} replacing packages \pkg{Snowball} and \pkg{RWeka}. \item All stopword lists (besides Catalan and SMART) available via \code{stopwords()} now come from the Snowball stemmer project. \item Transformations, filters, and term-document matrix construction now use \code{\link[parallel]{mclapply}} (package \pkg{parallel}). Packages \pkg{snow} and \pkg{Rmpi} are no longer used. } } \subsection{DEPRECATED & DEFUNCT}{ \itemize{ \item Following functions have been removed: \itemize{ \item \code{tm_startCluster()} and \code{tm_stopCluster()}. } } } } \section{Changes in tm version 0.5-8}{ \subsection{SIGNIFICANT USER-VISIBLE CHANGES}{ \itemize{ \item The function \code{termFreq()} now processes the \code{tolower} and \code{tokenize} options first. } } \subsection{NEW FEATURES}{ \itemize{ \item Catalan stopwords. Requested by Xavier Fernández i Marín. } } \subsection{BUG FIXES}{ \itemize{ \item The function \code{termFreq()} now correctly accepts user-provided stopwords. Reported by Bettina Grün. \item The function \code{termFreq()} now correctly handles the lower bound of the option \code{wordLength}. Reported by Steven C. Bagley. } } } \section{Changes in tm version 0.5-7}{ \subsection{SIGNIFICANT USER-VISIBLE CHANGES}{ \itemize{ \item The function \code{termFreq()} provides two new arguments for generalized bounds checking of term frequencies and word lengths. This replaces the arguments minDocFreq and minWordLength. \item The function \code{termFreq()} is now sensitive to the order of control options. } } \subsection{NEW FEATURES}{ \itemize{ \item Weighting schemata for term-document matrices in SMART notation. \item Local and global options for term-document matrix construction. \item SMART stopword list was added. } } } \section{Changes in tm version 0.5-5}{ \subsection{NEW FEATURES}{ \itemize{ \item Access documents in a corpus by names (fallback to IDs if names are not set), i.e., allow a string for the corpus operator `[[`. } } \subsection{BUG FIXES}{ \itemize{ \item The function \code{findFreqTerms()} now checks bounds on a global level (to comply with the manual page) instead per document. Reported and fixed by Thomas Zapf-Schramm. } } } \section{Changes in tm version 0.5-4}{ \subsection{SIGNIFICANT USER-VISIBLE CHANGES}{ \itemize{ \item Use IETF language tags for language codes (instead of ISO 639-2). } } \subsection{NEW FEATURES}{ \itemize{ \item The function \code{tm_tag_score()} provides functionality to score documents based on the number of tags found. This is useful for sentiment analysis. \item The weighting function for term frequency-inverse document frequency \code{weightTfIdf()} has now an option for term normalization. \item Plotting functions to test for Zipf's and Heaps' law on a term-document matrix were added: \code{Zipf_plot()} and \code{Heaps_plot()}. Contributed by Kurt Hornik. } } } \section{Changes in tm version 0.5-3}{ \subsection{NEW FEATURES}{ \itemize{ \item The reader function \code{readRCV1asPlain()} was added and combines the functionality of \code{readRCV1()} and \code{as.PlainTextDocument()}. \item The function \code{stemCompletion()} has a set of new heuristics. } } } \section{Changes in tm version 0.5-2}{ \subsection{SIGNIFICANT USER-VISIBLE CHANGES}{ \itemize{ \item The function \code{termFreq()} which is used for building a term-document matrix now uses a whitespace oriented tokenizer as default. } } \subsection{NEW FEATURES}{ \itemize{ \item A combine method for merging multiple term-document matrices was added (\code{c.TermDocumentMatrix()}). \item The function \code{termFreq()} has now an option to remove punctuation characters. } } \subsection{DEPRECATED & DEFUNCT}{ \itemize{ \item Following functions have been removed: \itemize{ \item \code{CSVSource()} (use \code{DataframeSource(read.csv(..., stringsAsFactors = FALSE))} instead), and \item \code{TermDocMatrix()} (use \code{DocumentTermMatrix()} instead). } } } \subsection{BUG FIXES}{ \itemize{ \item \code{removeWords()} no longer skips words at the beginning or the end of a line. Reported by Mark Kimpel. } } } \section{Changes in tm version 0.5-1}{ \subsection{BUG FIXES}{ \itemize{ \item \code{preprocessReut21578XML()} no longer generates invalid file names. } } } \section{Changes in tm version 0.5}{ \subsection{SIGNIFICANT USER-VISIBLE CHANGES}{ \itemize{ \item All classes, functions, and generics are reimplemented using the S3 class system. \item Following functions have been renamed: \itemize{ \item \code{activateCluster()} to \code{tm_startCluster()}, \item \code{asPlain()} to \code{as.PlainTextDocument()}, \item \code{deactivateCluster()} to \code{tm_stopCluster()}, \item \code{tmFilter()} to \code{tm_filter()}, \item \code{tmIndex()} to \code{tm_index()}, \item \code{tmIntersect()} to \code{tm_intersect()}, and \item \code{tmMap()} to \code{tm_map()}. } \item Mail handling functionality is factored out to the \pkg{tm.plugin.mail} package. } } \subsection{DEPRECATED & DEFUNCT}{ \itemize{ \item Following functions have been removed: \itemize{ \item \code{tmTolower()} (use \code{tolower()} instead), and \item \code{replacePatterns()} (use \code{gsub()} instead). } } } } \section{Changes in tm version 0.4}{ \subsection{SIGNIFICANT USER-VISIBLE CHANGES}{ \itemize{ \item The Corpus class is now virtual providing an abstract interface. \item VCorpus, the default implementation of the abstract corpus interface (by subclassing), provides a corpus with volatile (= standard \R object) semantics. It loads all documents into memory, and is designed for small to medium-sized data sets. \item PCorpus, an implementation of the abstract corpus interface (by subclassing), provides a corpus with permanent storage semantics. The actual data is stored in an external database (file) object (as supported by the \pkg{filehash} package), with automatic (un-)loading into memory. It is designed for systems with small memory. \item Language codes are now in ISO 639-2 (instead of ISO 639-1). \item Reader functions no longer have a load argument for lazy loading. } } \subsection{NEW FEATURES}{ \itemize{ \item The reader function \code{readReut21578XMLasPlain()} was added and combines the functionality of \code{readReut21578XML()} and \code{asPlain()}. } } \subsection{BUG FIXES}{ \itemize{ \item \code{weightTfIdf()} no longer applies a binary weighting to an input matrix in term frequency format (which happened only in 0.3-4). } } } \section{Changes in tm version 0.3-4}{ \subsection{SIGNIFICANT USER-VISIBLE CHANGES}{ \itemize{ \item \code{.onLoad()} no longer tries to start a MPI cluster (which often failed in misconfigured environments). Use \code{activateCluster()} and \code{deactivateCluster()} instead. \item DocumentTermMatrix (the improved reimplementation of defunct TermDocMatrix) does not use the \pkg{Matrix} package anymore. } } \subsection{NEW FEATURES}{ \itemize{ \item The \code{DirSource()} constructor now accepts the two new (optional) arguments pattern and ignore.case. With pattern one can define a regular expression for selecting only matching files, and ignore.case specifies whether pattern-matching is case-sensitive. \item The \code{readNewsgroup()} reader function can now be configured for custom date formats (via the DateFormat argument). \item The \code{readPDF()} reader function can now be configured (via the PdfinfoOptions and PdftotextOptions arguments). \item The \code{readDOC()} reader function can now be configured (via the AntiwordOptions argument). \item Sources now can be vectorized. This allows faster corpus construction. \item New XMLSource class for arbitrary XML files. \item The new \code{readTabular()} reader function allows to create a custom tailor-made reader configured via mappings from a tabular data structure. \item The new \code{readXML()} reader function allows to read in arbitrary XML files which are described with a specification. \item The new \code{tmReduce()} transformation allows to combine multiple maps into one transformation. } } \subsection{DEPRECATED & DEFUNCT}{ \itemize{ \item CSVSource is defunct (use DataframeSource instead). \item weightLogical is defunct. \item TermDocMatrix is defunct (use DocumentTermMatrix or TermDocumentMatrix instead). } } } \section{Changes in tm version 0.3-3}{ \subsection{NEW FEATURES}{ \itemize{ \item The abstract Source class gets a default implementation for the \code{stepNext()} method. It increments the position counter by one, a reasonable value for most sources. For special purposes custom methods can be created via overloading \code{stepNext()} of the subclass. \item New URISource class for a single document identified by a Uniform Resource Identifier. \item New DataframeSource for documents stored in a data frame. Each row is interpreted as a single document. } } \subsection{BUG FIXES}{ \itemize{ \item Fix off-by-one error in \code{convertMboxEml()} function. Reported by Angela Bohn. \item Sort row indices in sparse term-document matrices. Kudos to Martin Mächler for his suggestions. \item Sources and readers no longer evaluate calls in a non-standard way. } } } \section{Changes in tm version 0.3-2}{ \subsection{NEW FEATURES}{ \itemize{ \item Weighting functions now have an Acronym slot containing abbreviations of the weighting functions' names. This is highly useful when generating tables with indications which weighting scheme was actually used for your experiments. \item The functions \code{tmFilter()}, \code{tmIndex()}, \code{tmMap()} and \code{TermDocMatrix()} now can use a MPI cluster (via the \pkg{snow} and \pkg{Rmpi} packages) if available. Use \code{(de)activateCluster()} to manually override cluster usage settings. Special thanks to Stefan Theussl for his constructive comments. \item The Source class receives a new Length slot. It contains the number of elements provided by the source (although there might be rare cases where the number cannot be determined in advance---then it should be set to zero). } } } tm/build/0000755000175100001440000000000014755301616012017 5ustar hornikuserstm/build/vignette.rds0000644000175100001440000000035714755301616014363 0ustar hornikusers‹}PË‚0¬€(D‰wÃð>ãŨ¯%Bk` zóËÅE[&zèîÎt¶3éÎeŒ̲ f˜4š#*6á›gõ\DK‘ûkQ(ÖÆô…šZwöÑ*f¼˜Éð ‘Jàaê­xpâøãu£šW…šÚ^=—â¬*õÑK÷Ö3Õ[Jãl„,ö¡Ç™»Ú0Ütm/build/partial.rdb0000644000175100001440000000007514755301573014150 0ustar hornikusers‹‹àb```b`aab`b1…ÀÈg``d`aàÒ¬y‰¹©Å@†D’áÝ?M7tm/man/0000755000175100001440000000000014656644634011505 5ustar hornikuserstm/man/DataframeSource.Rd0000644000175100001440000000246213337466212015032 0ustar hornikusers\name{DataframeSource} \alias{DataframeSource} \title{Data Frame Source} \description{ Create a data frame source. } \usage{ DataframeSource(x) } \arguments{ \item{x}{A data frame giving the texts and metadata.} } \details{ A \emph{data frame source} interprets each row of the data frame \code{x} as a document. The first column must be named \code{"doc_id"} and contain a unique string identifier for each document. The second column must be named \code{"text"} and contain a UTF-8 encoded string representing the document's content. Optional additional columns are used as document level metadata. } \value{ An object inheriting from \code{DataframeSource}, \code{\link{SimpleSource}}, and \code{\link{Source}}. } \seealso{ \code{\link{Source}} for basic information on the source infrastructure employed by package \pkg{tm}, and \code{\link{meta}} for types of metadata. \code{\link[readtext]{readtext}} for reading in a text in multiple formats suitable to be processed by \code{DataframeSource}. } \examples{ docs <- data.frame(doc_id = c("doc_1", "doc_2"), text = c("This is a text.", "This another one."), dmeta1 = 1:2, dmeta2 = letters[1:2], stringsAsFactors = FALSE) (ds <- DataframeSource(docs)) x <- Corpus(ds) inspect(x) meta(x) } tm/man/PlainTextDocument.Rd0000644000175100001440000000455013754752644015406 0ustar hornikusers\name{PlainTextDocument} \alias{PlainTextDocument} \title{Plain Text Documents} \description{ Create plain text documents. } \usage{ PlainTextDocument(x = character(0), author = character(0), datetimestamp = as.POSIXlt(Sys.time(), tz = "GMT"), description = character(0), heading = character(0), id = character(0), language = character(0), origin = character(0), \dots, meta = NULL, class = NULL) } \arguments{ \item{x}{A character string giving the plain text content.} \item{author}{a character string or an object of class \code{\link{person}} giving the author names.} \item{datetimestamp}{an object of class \code{\link{POSIXt}} or a character string giving the creation date/time information. If a character string, exactly one of the \acronym{ISO} 8601 formats defined by \url{https://www.w3.org/TR/NOTE-datetime} should be used. See \code{\link[NLP]{parse_ISO_8601_datetime}} in package \pkg{NLP} for processing such date/time information. } \item{description}{a character string giving a description.} \item{heading}{a character string giving the title or a short heading.} \item{id}{a character string giving a unique identifier.} \item{language}{a character string giving the language (preferably as \acronym{IETF} language tags, see \link[NLP]{language} in package \pkg{NLP}).} \item{origin}{a character string giving information on the source and origin.} \item{\dots}{user-defined document metadata tag-value pairs.} \item{meta}{a named list or \code{NULL} (default) giving all metadata. If set all other metadata arguments are ignored.} \item{class}{a character vector or \code{NULL} (default) giving additional classes to be used for the created plain text document.} } \value{ An object inheriting from \code{class}, \code{PlainTextDocument} and \code{\link{TextDocument}}. } \seealso{ \code{\link{TextDocument}} for basic information on the text document infrastructure employed by package \pkg{tm}. } \examples{ (ptd <- PlainTextDocument("A simple plain text document", heading = "Plain text document", id = basename(tempfile()), language = "en")) meta(ptd) } tm/man/writeCorpus.Rd0000644000175100001440000000147312327470624014314 0ustar hornikusers\name{writeCorpus} \alias{writeCorpus} \title{Write a Corpus to Disk} \description{ Write a plain text representation of a corpus to multiple files on disk corresponding to the individual documents in the corpus. } \usage{ writeCorpus(x, path = ".", filenames = NULL) } \arguments{ \item{x}{A corpus.} \item{path}{A character listing the directory to be written into.} \item{filenames}{Either \code{NULL} or a character vector. In case no filenames are provided, filenames are automatically generated by using the documents' identifiers in \code{x}.} } \details{ The plain text representation of the corpus is obtained by calling \code{as.character} on each document. } \examples{ data("crude") \dontrun{writeCorpus(crude, path = ".", filenames = paste(seq_along(crude), ".txt", sep = ""))} } tm/man/TextDocument.Rd0000644000175100001440000000204514610730625014402 0ustar hornikusers\name{TextDocument} \alias{TextDocument} \title{Text Documents} \description{ Representing and computing on text documents. } \details{ \emph{Text documents} are documents containing (natural language) text. The \pkg{tm} package employs the infrastructure provided by package \pkg{NLP} and represents text documents via the virtual S3 class \code{TextDocument}. Actual S3 text document classes then extend the virtual base class (such as \code{\link{PlainTextDocument}}). All extension classes must provide an \code{\link{as.character}} method which extracts the natural language text in documents of the respective classes in a \dQuote{suitable} (not necessarily structured) form, as well as \code{\link[NLP]{content}} and \code{\link{meta}} methods for accessing the (possibly raw) document content and metadata. } \seealso{ \code{\link{PlainTextDocument}}, and \code{\link{XMLTextDocument}} for the text document classes provided by package \pkg{tm}. \code{\link[NLP]{TextDocument}} for text documents in package \pkg{NLP}. } tm/man/VCorpus.Rd0000644000175100001440000000255313177025175013370 0ustar hornikusers\name{VCorpus} \alias{VCorpus} \alias{as.VCorpus} \title{Volatile Corpora} \description{ Create volatile corpora. } \usage{ VCorpus(x, readerControl = list(reader = reader(x), language = "en")) as.VCorpus(x) } \arguments{ \item{x}{For \code{VCorpus} a \code{\link{Source}} object, and for \code{as.VCorpus} an \R object.} \item{readerControl}{a named list of control parameters for reading in content from \code{x}. \describe{ \item{\code{reader}}{a function capable of reading in and processing the format delivered by \code{x}.} \item{\code{language}}{a character giving the language (preferably as \acronym{IETF} language tags, see \link[NLP]{language} in package \pkg{NLP}). The default language is assumed to be English (\code{"en"}).} } } } \value{ An object inheriting from \code{VCorpus} and \code{Corpus}. } \details{ A \emph{volatile corpus} is fully kept in memory and thus all changes only affect the corresponding \R object. } \seealso{ \code{\link{Corpus}} for basic information on the corpus infrastructure employed by package \pkg{tm}. \code{\link{PCorpus}} provides an implementation with permanent storage semantics. } \examples{ reut21578 <- system.file("texts", "crude", package = "tm") VCorpus(DirSource(reut21578, mode = "binary"), list(reader = readReut21578XMLasPlain)) } tm/man/ZipSource.Rd0000644000175100001440000000322113177024653013703 0ustar hornikusers\name{ZipSource} \alias{ZipSource} \title{ZIP File Source} \description{ Create a ZIP file source. } \usage{ ZipSource(zipfile, pattern = NULL, recursive = FALSE, ignore.case = FALSE, mode = "text") } \arguments{ \item{zipfile}{A character string with the full path name of a ZIP file.} \item{pattern}{an optional regular expression. Only file names in the ZIP file which match the regular expression will be returned.} \item{recursive}{logical. Should the listing recurse into directories?} \item{ignore.case}{logical. Should pattern-matching be case-insensitive?} \item{mode}{a character string specifying if and how files should be read in. Available modes are: \describe{ \item{\code{""}}{No read. In this case \code{\link{getElem}} and \code{\link{pGetElem}} only deliver \acronym{URI}s.} \item{\code{"binary"}}{Files are read in binary raw mode (via \code{\link{readBin}}).} \item{\code{"text"}}{Files are read as text (via \code{\link{readLines}}).} } } } \details{ A \emph{ZIP file source} extracts a compressed ZIP file via \code{\link{unzip}} and interprets each file as a document. } \value{ An object inheriting from \code{ZipSource}, \code{\link{SimpleSource}}, and \code{\link{Source}}. } \seealso{ \code{\link{Source}} for basic information on the source infrastructure employed by package \pkg{tm}. } \examples{ zipfile <- tempfile() files <- Sys.glob(file.path(system.file("texts", "txt", package = "tm"), "*")) zip(zipfile, files) zipfile <- paste0(zipfile, ".zip") Corpus(ZipSource(zipfile, recursive = TRUE))[[1]] file.remove(zipfile) } tm/man/tokenizer.Rd0000644000175100001440000000345714656603573014014 0ustar hornikusers\name{tokenizer} \alias{Boost_tokenizer} \alias{MC_tokenizer} \alias{scan_tokenizer} \title{Tokenizers} \description{Tokenize a document or character vector.} \usage{ Boost_tokenizer(x) MC_tokenizer(x) scan_tokenizer(x) } \arguments{ \item{x}{A character vector, or an object that can be coerced to character by \code{as.character}.} } \value{ A character vector consisting of tokens obtained by tokenization of \code{x}. } \details{ The quality and correctness of a tokenization algorithm highly depends on the context and application scenario. Relevant factors are the language of the underlying text and the notions of whitespace (which can vary with the used encoding and the language) and punctuation marks. Consequently, for superior results you probably need a custom tokenization function. \describe{ \item{Boost_tokenizer}{Uses the Boost (\url{https://www.boost.org}) Tokenizer (via \pkg{Rcpp}).} \item{MC_tokenizer}{Implements the functionality of the tokenizer in the MC toolkit (\url{https://www.cs.utexas.edu/~dml/software/mc/}).} \item{scan_tokenizer}{Simulates \code{scan(\dots, what = "character")}.} } } \seealso{ \code{\link{getTokenizers}} to list tokenizers provided by package \pkg{tm}. \code{\link[NLP]{Regexp_Tokenizer}} for tokenizers using regular expressions provided by package \pkg{NLP}. \code{\link[tau]{tokenize}} for a simple regular expression based tokenizer provided by package \pkg{tau}. \code{\link[tokenizers]{tokenizers}} for a collection of tokenizers provided by package \pkg{tokenizers}. } \examples{ data("crude") Boost_tokenizer(crude[[1]]) MC_tokenizer(crude[[1]]) scan_tokenizer(crude[[1]]) strsplit_space_tokenizer <- function(x) unlist(strsplit(as.character(x), "[[:space:]]+")) strsplit_space_tokenizer(crude[[1]]) } tm/man/stopwords.Rd0000644000175100001440000000354514656603547014045 0ustar hornikusers\name{stopwords} \alias{stopwords} \title{Stopwords} \description{ Return various kinds of stopwords with support for different languages. } \usage{ stopwords(kind = "en") } \arguments{ \item{kind}{A character string identifying the desired stopword list.} } \details{ Available stopword lists are: \describe{ \item{\code{catalan}}{Catalan stopwords (obtained from \url{http://latel.upf.edu/morgana/altres/pub/ca_stop.htm}),} \item{\code{romanian}}{Romanian stopwords (extracted from \url{http://snowball.tartarus.org/otherapps/romanian/romanian1.tgz}),} \item{\code{SMART}}{English stopwords from the SMART information retrieval system (as documented in Appendix 11 of \url{https://jmlr.csail.mit.edu/papers/volume5/lewis04a/}) (which coincides with the stopword list used by the MC toolkit (\url{https://www.cs.utexas.edu/~dml/software/mc/})),} } and a set of stopword lists from the Snowball stemmer project in different languages (obtained from \samp{http://svn.tartarus.org/snowball/trunk/website/algorithms/*/stop.txt}). Supported languages are \code{danish}, \code{dutch}, \code{english}, \code{finnish}, \code{french}, \code{german}, \code{hungarian}, \code{italian}, \code{norwegian}, \code{portuguese}, \code{russian}, \code{spanish}, and \code{swedish}. Language names are case sensitive. Alternatively, their \acronym{IETF} language tags may be used. % % Earlier \pkg{tm} versions (before 2013-06-14) used merged stopword lists from % \url{http://www.ranks.nl/resources/stopwords.html} and the Snowball stemmer % project. } \value{A character vector containing the requested stopwords. An error is raised if no stopwords are available for the requested \code{kind}.} \examples{ stopwords("en") stopwords("SMART") stopwords("german") } \keyword{file} tm/man/readRCV1.Rd0000644000175100001440000000236013754753016013335 0ustar hornikusers\name{readRCV1} \alias{readRCV1} \alias{readRCV1asPlain} \title{Read In a Reuters Corpus Volume 1 Document} \description{ Read in a Reuters Corpus Volume 1 \acronym{XML} document. } \usage{ readRCV1(elem, language, id) readRCV1asPlain(elem, language, id) } \arguments{ \item{elem}{a named list with the component \code{content} which must hold the document to be read in.} \item{language}{a string giving the language.} \item{id}{Not used.} } \value{ An \code{\link{XMLTextDocument}} for \code{readRCV1}, or a \code{\link{PlainTextDocument}} for \code{readRCV1asPlain}, representing the text and metadata extracted from \code{elem$content}. } \seealso{ \code{\link{Reader}} for basic information on the reader infrastructure employed by package \pkg{tm}. } \references{ Lewis, D. D.; Yang, Y.; Rose, T.; and Li, F (2004). RCV1: A New Benchmark Collection for Text Categorization Research. \emph{Journal of Machine Learning Research}, \bold{5}, 361--397. \url{https://www.jmlr.org/papers/volume5/lewis04a/lewis04a.pdf} } \examples{ f <- system.file("texts", "rcv1_2330.xml", package = "tm") f_bin <- readBin(f, raw(), file.size(f)) rcv1 <- readRCV1(elem = list(content = f_bin), language = "en", id = "id1") content(rcv1) meta(rcv1) } tm/man/readXML.Rd0000644000175100001440000000517413155253051013255 0ustar hornikusers\name{readXML} \alias{readXML} \title{Read In an XML Document} \description{ Return a function which reads in an \acronym{XML} document. The structure of the \acronym{XML} document is described with a specification. } \usage{ readXML(spec, doc) } \arguments{ \item{spec}{A named list of lists each containing two components. The constructed reader will map each list entry to the content or metadatum of the text document as specified by the named list entry. Valid names include \code{content} to access the document's content, and character strings which are mapped to metadata entries. Each list entry must consist of two components: the first must be a string describing the type of the second argument, and the second is the specification entry. Valid combinations are: \describe{ \item{\code{type = "node", spec = "XPathExpression"}}{The XPath (1.0) expression \code{spec} extracts information from an \acronym{XML} node.} \item{\code{type = "function", spec = function(doc) \dots}}{The function \code{spec} is called, passing over the \acronym{XML} document (as delivered by \code{\link[xml2]{read_xml}} from package \pkg{xml2}) as first argument.} \item{\code{type = "unevaluated", spec = "String"}}{The character vector \code{spec} is returned without modification.} } } \item{doc}{An (empty) document of some subclass of \code{TextDocument}.} } \details{ Formally this function is a function generator, i.e., it returns a function (which reads in a text document) with a well-defined signature, but can access passed over arguments (e.g., the specification) via lexical scoping. } \value{ A function with the following formals: \describe{ \item{\code{elem}}{a named list with the component \code{content} which must hold the document to be read in.} \item{\code{language}}{a string giving the language.} \item{\code{id}}{a character giving a unique identifier for the created text document.} } The function returns \code{doc} augmented by the parsed information as described by \code{spec} out of the \acronym{XML} file in \code{elem$content}. The arguments \code{language} and \code{id} are used as fallback: \code{language} if no corresponding metadata entry is found in \code{elem$content}, and \code{id} if no corresponding metadata entry is found in \code{elem$content} and if \code{elem$uri} is null. } \seealso{ \code{\link{Reader}} for basic information on the reader infrastructure employed by package \pkg{tm}. Vignette 'Extensions: How to Handle Custom File Formats', and \code{\link{XMLSource}}. } tm/man/tm_term_score.Rd0000644000175100001440000000322113023472150014607 0ustar hornikusers\name{tm_term_score} \alias{tm_term_score} \alias{tm_term_score.DocumentTermMatrix} \alias{tm_term_score.term_frequency} \alias{tm_term_score.PlainTextDocument} \alias{tm_term_score.TermDocumentMatrix} \title{Compute Score for Matching Terms} \description{ Compute a score based on the number of matching terms. } \usage{ \method{tm_term_score}{DocumentTermMatrix}(x, terms, FUN = row_sums) \method{tm_term_score}{PlainTextDocument}(x, terms, FUN = function(x) sum(x, na.rm = TRUE)) \method{tm_term_score}{term_frequency}(x, terms, FUN = function(x) sum(x, na.rm = TRUE)) \method{tm_term_score}{TermDocumentMatrix}(x, terms, FUN = col_sums) } \arguments{ \item{x}{Either a \code{\link{PlainTextDocument}}, a term frequency as returned by \code{\link{termFreq}}, or a \code{\link{TermDocumentMatrix}}.} \item{terms}{A character vector of terms to be matched.} \item{FUN}{A function computing a score from the number of terms matching in \code{x}.} } \value{ A score as computed by \code{FUN} from the number of matching \code{terms} in \code{x}. } \examples{ data("acq") tm_term_score(acq[[1]], c("company", "change")) \dontrun{## Test for positive and negative sentiments ## install.packages("tm.lexicon.GeneralInquirer", repos="http://datacube.wu.ac.at", type="source") require("tm.lexicon.GeneralInquirer") sapply(acq[1:10], tm_term_score, terms_in_General_Inquirer_categories("Positiv")) sapply(acq[1:10], tm_term_score, terms_in_General_Inquirer_categories("Negativ")) tm_term_score(TermDocumentMatrix(acq[1:10], control = list(removePunctuation = TRUE)), terms_in_General_Inquirer_categories("Positiv"))} } tm/man/PCorpus.Rd0000644000175100001440000000357613337466717013401 0ustar hornikusers\name{PCorpus} \alias{PCorpus} \title{Permanent Corpora} \description{ Create permanent corpora. } \usage{ PCorpus(x, readerControl = list(reader = reader(x), language = "en"), dbControl = list(dbName = "", dbType = "DB1")) } \arguments{ \item{x}{A \code{\link{Source}} object.} \item{readerControl}{a named list of control parameters for reading in content from \code{x}. \describe{ \item{\code{reader}}{a function capable of reading in and processing the format delivered by \code{x}.} \item{\code{language}}{a character giving the language (preferably as \acronym{IETF} language tags, see \link[NLP]{language} in package \pkg{NLP}). The default language is assumed to be English (\code{"en"}).} } } \item{dbControl}{a named list of control parameters for the underlying database storage provided by package \pkg{filehash}. \describe{ \item{\code{dbName}}{a character giving the filename for the database.} \item{\code{dbType}}{a character giving the database format (see \code{\link[filehash]{filehashOption}} for possible database formats).} } } } \value{ An object inheriting from \code{PCorpus} and \code{Corpus}. } \details{ A \emph{permanent corpus} stores documents outside of \R in a database. Since multiple \code{PCorpus} \R objects with the same underlying database can exist simultaneously in memory, changes in one get propagated to all corresponding objects (in contrast to the default \R semantics). } \seealso{ \code{\link[tm]{Corpus}} for basic information on the corpus infrastructure employed by package \pkg{tm}. \code{\link{VCorpus}} provides an implementation with volatile storage semantics. } \examples{ txt <- system.file("texts", "txt", package = "tm") \dontrun{ PCorpus(DirSource(txt), dbControl = list(dbName = "pcorpus.db", dbType = "DB1"))} } tm/man/combine.Rd0000644000175100001440000000254112323504623013370 0ustar hornikusers\name{tm_combine} \alias{c.VCorpus} \alias{c.TextDocument} \alias{c.TermDocumentMatrix} \alias{c.term_frequency} \title{Combine Corpora, Documents, Term-Document Matrices, and Term Frequency Vectors} \description{ Combine several corpora into a single one, combine multiple documents into a corpus, combine multiple term-document matrices into a single one, or combine multiple term frequency vectors into a single term-document matrix. } \usage{ \method{c}{VCorpus}(\dots, recursive = FALSE) \method{c}{TextDocument}(\dots, recursive = FALSE) \method{c}{TermDocumentMatrix}(\dots, recursive = FALSE) \method{c}{term_frequency}(\dots, recursive = FALSE) } \arguments{ \item{\dots}{Corpora, text documents, term-document matrices, or term frequency vectors.} \item{recursive}{Not used.} } \seealso{ \code{\link{VCorpus}}, \code{\link{TextDocument}}, \code{\link{TermDocumentMatrix}}, and \code{\link{termFreq}}. } \examples{ data("acq") data("crude") meta(acq, "comment", type = "corpus") <- "Acquisitions" meta(crude, "comment", type = "corpus") <- "Crude oil" meta(acq, "acqLabels") <- 1:50 meta(acq, "jointLabels") <- 1:50 meta(crude, "crudeLabels") <- letters[1:20] meta(crude, "jointLabels") <- 1:20 c(acq, crude) meta(c(acq, crude), type = "corpus") meta(c(acq, crude)) c(acq[[30]], crude[[10]]) c(TermDocumentMatrix(acq), TermDocumentMatrix(crude)) } tm/man/findMostFreqTerms.Rd0000644000175100001440000000322713023461533015372 0ustar hornikusers\name{findMostFreqTerms} \alias{findMostFreqTerms} \alias{findMostFreqTerms.term_frequency} \alias{findMostFreqTerms.DocumentTermMatrix} \alias{findMostFreqTerms.TermDocumentMatrix} \title{Find Most Frequent Terms} \description{ Find most frequent terms in a document-term or term-document matrix, or a vector of term frequencies. } \usage{ findMostFreqTerms(x, n = 6L, ...) \S3method{findMostFreqTerms}{DocumentTermMatrix}(x, n = 6L, INDEX = NULL, ...) \S3method{findMostFreqTerms}{TermDocumentMatrix}(x, n = 6L, INDEX = NULL, ...) } \arguments{ \item{x}{A \code{\link{DocumentTermMatrix}} or \code{\link{TermDocumentMatrix}}, or a vector of term frequencies as obtained by \code{\link{termFreq}()}.} \item{n}{A single integer giving the maximal number of terms.} \item{INDEX}{an object specifying a grouping of documents for rollup, or \code{NULL} (default) in which case each document is considered individually.} \item{...}{arguments to be passed to or from methods.} } \value{ For the document-term or term-document matrix methods, a list with the named frequencies of the up to \code{n} most frequent terms occurring in each document (group). Otherwise, a single such vector of most frequent terms. } \details{ Only terms with positive frequencies are included in the results. } \examples{ data("crude") ## Term frequencies: tf <- termFreq(crude[[14L]]) findMostFreqTerms(tf) ## Document-term matrices: dtm <- DocumentTermMatrix(crude) ## Most frequent terms for each document: findMostFreqTerms(dtm) ## Most frequent terms for the first 10 the second 10 documents, ## respectively: findMostFreqTerms(dtm, INDEX = rep(1 : 2, each = 10L)) } tm/man/tm_map.Rd0000644000175100001440000000412214656644634013250 0ustar hornikusers\name{tm_map} \alias{tm_map} \alias{tm_map.VCorpus} \alias{tm_map.SimpleCorpus} \alias{tm_map.PCorpus} \title{Transformations on Corpora} \description{ Interface to apply transformation functions (also denoted as mappings) to corpora. } \usage{ \method{tm_map}{PCorpus}(x, FUN, \dots) \method{tm_map}{SimpleCorpus}(x, FUN, \dots) \method{tm_map}{VCorpus}(x, FUN, \dots, lazy = FALSE) } \arguments{ \item{x}{A corpus.} \item{FUN}{a transformation function taking a text document (a character vector when \code{x} is a \code{SimpleCorpus}) as input and returning a text document (a character vector of the same length as the input vector for \code{SimpleCorpus}). The function \code{\link{content_transformer}} can be used to create a wrapper to get and set the content of text documents.} \item{\dots}{arguments to \code{FUN}.} \item{lazy}{a logical. Lazy mappings are mappings which are delayed until the content is accessed. It is useful for large corpora if only few documents will be accessed. In such a case it avoids the computationally expensive application of the mapping to all elements in the corpus.} } \value{ A corpus with \code{FUN} applied to each document in \code{x}. In case of lazy mappings only internal flags are set. Access of individual documents triggers the execution of the corresponding transformation function. } \seealso{ \code{\link{getTransformations}} for available transformations. } \note{ Lazy transformations change \R's standard evaluation semantics. } \examples{ data("crude") ## Document access triggers the stemming function ## (i.e., all other documents are not stemmed yet) if(requireNamespace("SnowballC")) { tm_map(crude, stemDocument, lazy = TRUE)[[1]] } ## Use wrapper to apply character processing function tm_map(crude, content_transformer(tolower)) ## Generate a custom transformation function which takes the heading as new content headings <- function(x) PlainTextDocument(meta(x, "heading"), id = meta(x, "id"), language = meta(x, "language")) inspect(tm_map(crude, headings)) } tm/man/weightTfIdf.Rd0000644000175100001440000000267613025174645014200 0ustar hornikusers\name{weightTfIdf} \alias{weightTfIdf} \title{Weight by Term Frequency - Inverse Document Frequency} \description{ Weight a term-document matrix by term frequency - inverse document frequency. } \usage{ weightTfIdf(m, normalize = TRUE) } \arguments{ \item{m}{A \code{\link{TermDocumentMatrix}} in term frequency format.} \item{normalize}{A Boolean value indicating whether the term frequencies should be normalized.} } \details{ Formally this function is of class \code{WeightingFunction} with the additional attributes \code{name} and \code{acronym}. \emph{Term frequency} \eqn{\mathit{tf}_{i,j}} counts the number of occurrences \eqn{n_{i,j}} of a term \eqn{t_i} in a document \eqn{d_j}. In the case of normalization, the term frequency \eqn{\mathit{tf}_{i,j}} is divided by \eqn{\sum_k n_{k,j}}. \emph{Inverse document frequency} for a term \eqn{t_i} is defined as \deqn{\mathit{idf}_i = \log_2 \frac{|D|}{|\{d \mid t_i \in d\}|}} where \eqn{|D|} denotes the total number of documents and where \eqn{|\{d \mid t_i \in d\}|} is the number of documents where the term \eqn{t_i} appears. \emph{Term frequency - inverse document frequency} is now defined as \eqn{\mathit{tf}_{i,j} \cdot \mathit{idf}_i}. } \value{ The weighted matrix. } \references{ Gerard Salton and Christopher Buckley (1988). Term-weighting approaches in automatic text retrieval. \emph{Information Processing and Management}, \bold{24}/5, 513--523. } tm/man/inspect.Rd0000644000175100001440000000120212623274522013417 0ustar hornikusers\name{inspect} \alias{inspect} \alias{inspect.PCorpus} \alias{inspect.TermDocumentMatrix} \alias{inspect.TextDocument} \alias{inspect.VCorpus} \title{Inspect Objects} \description{ Inspect, i.e., display detailed information on a corpus, a term-document matrix, or a text document. } \usage{ \method{inspect}{PCorpus}(x) \method{inspect}{VCorpus}(x) \method{inspect}{TermDocumentMatrix}(x) \method{inspect}{TextDocument}(x) } \arguments{ \item{x}{Either a corpus, a term-document matrix, or a text document.} } \examples{ data("crude") inspect(crude[1:3]) inspect(crude[[1]]) tdm <- TermDocumentMatrix(crude)[1:10, 1:10] inspect(tdm) } tm/man/DirSource.Rd0000644000175100001440000000336312326753523013666 0ustar hornikusers\name{DirSource} \alias{DirSource} \title{Directory Source} \description{ Create a directory source. } \usage{ DirSource(directory = ".", encoding = "", pattern = NULL, recursive = FALSE, ignore.case = FALSE, mode = "text") } \arguments{ \item{directory}{A character vector of full path names; the default corresponds to the working directory \code{getwd()}.} \item{encoding}{a character string describing the current encoding. It is passed to \code{\link{iconv}} to convert the input to UTF-8.} \item{pattern}{an optional regular expression. Only file names which match the regular expression will be returned.} \item{recursive}{logical. Should the listing recurse into directories?} \item{ignore.case}{logical. Should pattern-matching be case-insensitive?} \item{mode}{a character string specifying if and how files should be read in. Available modes are: \describe{ \item{\code{""}}{No read. In this case \code{\link{getElem}} and \code{\link{pGetElem}} only deliver \acronym{URI}s.} \item{\code{"binary"}}{Files are read in binary raw mode (via \code{\link{readBin}}).} \item{\code{"text"}}{Files are read as text (via \code{\link{readLines}}).} } } } \details{ A \emph{directory source} acquires a list of files via \code{\link{dir}} and interprets each file as a document. } \value{ An object inheriting from \code{DirSource}, \code{\link{SimpleSource}}, and \code{\link{Source}}. } \seealso{ \code{\link{Source}} for basic information on the source infrastructure employed by package \pkg{tm}. \code{\link{Encoding}} and \code{\link{iconv}} on encodings. } \examples{ DirSource(system.file("texts", "txt", package = "tm")) } tm/man/readDataframe.Rd0000644000175100001440000000206413110235235014467 0ustar hornikusers\name{readDataframe} \alias{readDataframe} \title{Read In a Text Document from a Data Frame} \description{ Read in a text document from a row in a data frame. } \usage{ readDataframe(elem, language, id) } \arguments{ \item{elem}{a named list with the component \code{content} which must hold a data frame with rows as the documents to be read in. The names of the columns holding the text content and the document identifier must be \code{"text"} and \code{"doc_id"}, respectively.} \item{language}{a string giving the language.} \item{id}{Not used.} } \value{ A \code{\link{PlainTextDocument}} representing \code{elem$content}. } \seealso{ \code{\link{Reader}} for basic information on the reader infrastructure employed by package \pkg{tm}. } \examples{ docs <- data.frame(doc_id = c("doc_1", "doc_2"), text = c("This is a text.", "This another one."), stringsAsFactors = FALSE) ds <- DataframeSource(docs) elem <- getElem(stepNext(ds)) result <- readDataframe(elem, "en", NULL) inspect(result) meta(result) } tm/man/stemCompletion.Rd0000644000175100001440000000270613177025300014757 0ustar hornikusers\name{stemCompletion} \alias{stemCompletion} \title{Complete Stems} \description{ Heuristically complete stemmed words. } \usage{ stemCompletion(x, dictionary, type = c("prevalent", "first", "longest", "none", "random", "shortest")) } \arguments{ \item{x}{A character vector of stems to be completed.} \item{dictionary}{A \code{\link{Corpus}} or character vector to be searched for possible completions.} \item{type}{A \code{character} naming the heuristics to be used: \describe{ \item{\code{prevalent}}{Default. Takes the most frequent match as completion.} \item{\code{first}}{Takes the first found completion.} \item{\code{longest}}{Takes the longest completion in terms of characters.} \item{\code{none}}{Is the identity.} \item{\code{random}}{Takes some completion.} \item{\code{shortest}}{Takes the shortest completion in terms of characters.} } } } \value{ A character vector with completed words. } \examples{ data("crude") stemCompletion(c("compan", "entit", "suppl"), crude) } \references{ Ingo Feinerer (2010). Analysis and Algorithms for Stemming Inversion. \emph{Information Retrieval Technology --- 6th Asia Information Retrieval Societies Conference, AIRS 2010, Taipei, Taiwan, December 1--3, 2010. Proceedings}, volume 6458 of \emph{Lecture Notes in Computer Science}, pages 290--299. Springer-Verlag, December 2010. } tm/man/findFreqTerms.Rd0000644000175100001440000000150712262761010014523 0ustar hornikusers\name{findFreqTerms} \alias{findFreqTerms} \title{Find Frequent Terms} \description{ Find frequent terms in a document-term or term-document matrix. } \usage{ findFreqTerms(x, lowfreq = 0, highfreq = Inf) } \arguments{ \item{x}{A \code{\link{DocumentTermMatrix}} or \code{\link{TermDocumentMatrix}}.} \item{lowfreq}{A numeric for the lower frequency bound.} \item{highfreq}{A numeric for the upper frequency bound.} } \value{ A character vector of terms in \code{x} which occur more or equal often than \code{lowfreq} times and less or equal often than \code{highfreq} times. } \details{This method works for all numeric weightings but is probably most meaningful for the standard term frequency (\code{tf}) weighting of \code{x}.} \examples{ data("crude") tdm <- TermDocumentMatrix(crude) findFreqTerms(tdm, 2, 3) } tm/man/stripWhitespace.Rd0000644000175100001440000000122712324523350015131 0ustar hornikusers\name{stripWhitespace} \alias{stripWhitespace} \alias{stripWhitespace.PlainTextDocument} \title{Strip Whitespace from a Text Document} \description{ Strip extra whitespace from a text document. Multiple whitespace characters are collapsed to a single blank. } \usage{ \method{stripWhitespace}{PlainTextDocument}(x, \dots) } \arguments{ \item{x}{A text document.} \item{\dots}{Not used.} } \value{ The text document with multiple whitespace characters collapsed to a single blank. } \seealso{ \code{\link{getTransformations}} to list available transformation (mapping) functions. } \examples{ data("crude") crude[[1]] stripWhitespace(crude[[1]]) } tm/man/termFreq.Rd0000644000175100001440000001101414656644362013554 0ustar hornikusers\name{termFreq} \alias{termFreq} \title{Term Frequency Vector} \description{ Generate a term frequency vector from a text document. } \usage{ termFreq(doc, control = list()) } \arguments{ \item{doc}{An object inheriting from \code{\link{TextDocument}} or a character vector.} \item{control}{A list of control options which override default settings. First, following two options are processed. \describe{ \item{\code{tokenize}}{A function tokenizing a \code{\link{TextDocument}} into single tokens, a \code{\link[NLP]{Span_Tokenizer}}, \code{\link[NLP]{Token_Tokenizer}}, or a string matching one of the predefined tokenization functions: \describe{ \item{\code{"Boost"}}{for \code{\link{Boost_tokenizer}}, or} \item{\code{"MC"}}{for \code{\link{MC_tokenizer}}, or} \item{\code{"scan"}}{for \code{\link{scan_tokenizer}}, or} \item{\code{"words"}}{for \code{\link[NLP]{words}}.} } Defaults to \code{\link[NLP]{words}}.} \item{\code{tolower}}{Either a logical value indicating whether characters should be translated to lower case or a custom function converting characters to lower case. Defaults to \code{\link{tolower}}.} } Next, a set of options which are sensitive to the order of occurrence in the \code{control} list. Options are processed in the same order as specified. User-specified options have precedence over the default ordering so that first all user-specified options and then all remaining options (with the default settings and in the order as listed below) are processed. \describe{ \item{\code{language}}{A character giving the language (preferably as \acronym{IETF} language tags, see \link[NLP]{language} in package \pkg{NLP}) to be used for \code{stopwords} and \code{stemming} if not provided by \code{doc}.} \item{\code{removePunctuation}}{A logical value indicating whether punctuation characters should be removed from \code{doc}, a custom function which performs punctuation removal, or a list of arguments for \code{\link{removePunctuation}}. Defaults to \code{FALSE}.} \item{\code{removeNumbers}}{A logical value indicating whether numbers should be removed from \code{doc} or a custom function for number removal. Defaults to \code{FALSE}.} \item{\code{stopwords}}{Either a Boolean value indicating stopword removal using default language specific stopword lists shipped with this package, a character vector holding custom stopwords, or a custom function for stopword removal. Defaults to \code{FALSE}.} \item{\code{stemming}}{Either a Boolean value indicating whether tokens should be stemmed or a custom stemming function. Defaults to \code{FALSE}.} } Finally, following options are processed in the given order. \describe{ \item{\code{dictionary}}{A character vector to be tabulated against. No other terms will be listed in the result. Defaults to \code{NULL} which means that all terms in \code{doc} are listed.} \item{\code{bounds}}{A list with a tag \code{local} whose value must be an integer vector of length 2. Terms that appear less often in \code{doc} than the lower bound \code{bounds$local[1]} or more often than the upper bound \code{bounds$local[2]} are discarded. Defaults to \code{list(local = c(1, Inf))} (i.e., every token will be used).} \item{\code{wordLengths}}{An integer vector of length 2. Words shorter than the minimum word length \code{wordLengths[1]} or longer than the maximum word length \code{wordLengths[2]} are discarded. Defaults to \code{c(3, Inf)}, i.e., a minimum word length of 3 characters.} } } } \value{ A table of class \code{c("term_frequency", "integer")} with term frequencies as values and tokens as names. } \seealso{ \code{\link{getTokenizers}} } \examples{ data("crude") termFreq(crude[[14]]) if(requireNamespace("SnowballC")) { strsplit_space_tokenizer <- function(x) unlist(strsplit(as.character(x), "[[:space:]]+")) ctrl <- list(tokenize = strsplit_space_tokenizer, removePunctuation = list(preserve_intra_word_dashes = TRUE), stopwords = c("reuter", "that"), stemming = TRUE, wordLengths = c(4, Inf)) termFreq(crude[[14]], control = ctrl) } } \keyword{math} tm/man/crude.Rd0000644000175100001440000000236314573616004013066 0ustar hornikusers\name{crude} \docType{data} \alias{crude} \title{20 Exemplary News Articles from the Reuters-21578 Data Set of Topic crude} \description{ This data set holds 20 news articles with additional meta information from the Reuters-21578 data set. All documents belong to the topic \code{crude} dealing with crude oil. } \usage{data("crude")} \format{A \code{\link{VCorpus}} of 20 text documents.} \source{Reuters-21578 Text Categorization Collection Distribution 1.0 (\acronym{XML} format).} \references{ %% Emms, Martin and Luz, Saturnino (2007). %% Machine Learning for Natural Language Processing. %% \emph{European Summer School of Logic, Language and Information, %% course reader}. %% \url{http://ronaldo.cs.tcd.ie/esslli07/mlfornlp.pdf} %% \url{http://www.homepages.ed.ac.uk/sluzfil/esslli07/mlfornlp.pdf} Lewis, David (1997). \emph{Reuters-21578 Text Categorization Collection Distribution}. UCI Machine Learning Repository. \doi{10.24432/C52G6M}. %% Luz, Saturnino \emph{\acronym{XML}-encoded version of Reuters-21578}. %% \url{http://ronaldo.cs.tcd.ie/esslli07/data/reuters21578-xml/} %% \url{http://www.homepages.ed.ac.uk/sluzfil/esslli07/data/reuters21578-xml.tar.bz2} } \examples{ data("crude") crude } \keyword{datasets} tm/man/tm_filter.Rd0000644000175100001440000000220013667334116013743 0ustar hornikusers\name{tm_filter} \alias{tm_filter} \alias{tm_filter.PCorpus} \alias{tm_filter.SimpleCorpus} \alias{tm_filter.VCorpus} \alias{tm_index} \alias{tm_index.PCorpus} \alias{tm_index.SimpleCorpus} \alias{tm_index.VCorpus} \title{Filter and Index Functions on Corpora} \description{ Interface to apply filter and index functions to corpora. } \usage{ \method{tm_filter}{PCorpus}(x, FUN, \dots) \method{tm_filter}{SimpleCorpus}(x, FUN, \dots) \method{tm_filter}{VCorpus}(x, FUN, \dots) \method{tm_index}{PCorpus}(x, FUN, \dots) \method{tm_index}{SimpleCorpus}(x, FUN, \dots) \method{tm_index}{VCorpus}(x, FUN, \dots) } \arguments{ \item{x}{A corpus.} \item{FUN}{a filter function taking a text document or a string (if \code{x} is a \code{SimpleCorpus}) as input and returning the logical value \code{TRUE} or \code{FALSE}.} \item{\dots}{arguments to \code{FUN}.} } \value{ \code{tm_filter} returns a corpus containing documents where \code{FUN} matches, whereas \code{tm_index} only returns the corresponding indices. } \examples{ data("crude") # Full-text search tm_filter(crude, FUN = function(x) any(grep("co[m]?pany", content(x)))) } tm/man/weightTf.Rd0000644000175100001440000000101313025174645013535 0ustar hornikusers\name{weightTf} \alias{weightTf} \title{Weight by Term Frequency} \description{ Weight a term-document matrix by term frequency. } \usage{ weightTf(m) } \arguments{ \item{m}{A \code{\link{TermDocumentMatrix}} in term frequency format.} } \details{ Formally this function is of class \code{WeightingFunction} with the additional attributes \code{name} and \code{acronym}. This function acts as the identity function since the input matrix is already in term frequency format. } \value{ The weighted matrix. } tm/man/readPlain.Rd0000644000175100001440000000164612342614656013671 0ustar hornikusers\name{readPlain} \alias{readPlain} \title{Read In a Text Document} \description{ Read in a text document without knowledge about its internal structure and possible available metadata. } \usage{ readPlain(elem, language, id) } \arguments{ \item{elem}{a named list with the component \code{content} which must hold the document to be read in.} \item{language}{a string giving the language.} \item{id}{a character giving a unique identifier for the created text document.} } \value{ A \code{\link{PlainTextDocument}} representing \code{elem$content}. The argument \code{id} is used as fallback if \code{elem$uri} is null. } \seealso{ \code{\link{Reader}} for basic information on the reader infrastructure employed by package \pkg{tm}. } \examples{ docs <- c("This is a text.", "This another one.") vs <- VectorSource(docs) elem <- getElem(stepNext(vs)) (result <- readPlain(elem, "en", "id1")) meta(result) } tm/man/meta.Rd0000644000175100001440000000633313754752752012726 0ustar hornikusers\name{meta} \alias{DublinCore} \alias{DublinCore<-} \alias{meta} \alias{meta.PCorpus} \alias{meta.SimpleCorpus} \alias{meta.VCorpus} \alias{meta<-.PCorpus} \alias{meta<-.SimpleCorpus} \alias{meta<-.VCorpus} \alias{meta.PlainTextDocument} \alias{meta<-.PlainTextDocument} \alias{meta.XMLTextDocument} \alias{meta<-.XMLTextDocument} \title{Metadata Management} \description{ Accessing and modifying metadata of text documents and corpora. } \usage{ \method{meta}{PCorpus}(x, tag = NULL, type = c("indexed", "corpus", "local"), \dots) \method{meta}{PCorpus}(x, tag, type = c("indexed", "corpus", "local"), \dots) <- value \method{meta}{SimpleCorpus}(x, tag = NULL, type = c("indexed", "corpus"), \dots) \method{meta}{SimpleCorpus}(x, tag, type = c("indexed", "corpus"), \dots) <- value \method{meta}{VCorpus}(x, tag = NULL, type = c("indexed", "corpus", "local"), \dots) \method{meta}{VCorpus}(x, tag, type = c("indexed", "corpus", "local"), \dots) <- value \method{meta}{PlainTextDocument}(x, tag = NULL, \dots) \method{meta}{PlainTextDocument}(x, tag = NULL, \dots) <- value \method{meta}{XMLTextDocument}(x, tag = NULL, \dots) \method{meta}{XMLTextDocument}(x, tag = NULL, \dots) <- value DublinCore(x, tag = NULL) DublinCore(x, tag) <- value } \arguments{ \item{x}{For \code{DublinCore} a \code{\link{TextDocument}}, and for \code{meta} a \code{\link{TextDocument}} or a \code{\link{Corpus}}.} \item{tag}{a character giving the name of a metadatum. No tag corresponds to all available metadata.} \item{type}{a character specifying the kind of corpus metadata (see \bold{Details}).} \item{\dots}{Not used.} \item{value}{replacement value.} } \details{ A corpus has two types of metadata. \emph{Corpus metadata} (\code{"corpus"}) contains corpus specific metadata in form of tag-value pairs. \emph{Document level metadata} (\code{"indexed"}) contains document specific metadata but is stored in the corpus as a data frame. Document level metadata is typically used for semantic reasons (e.g., classifications of documents form an own entity due to some high-level information like the range of possible values) or for performance reasons (single access instead of extracting metadata of each document). The latter can be seen as a from of indexing, hence the name \code{"indexed"}. \emph{Document metadata} (\code{"local"}) are tag-value pairs directly stored locally at the individual documents. \code{DublinCore} is a convenience wrapper to access and modify the metadata of a text document using the Simple Dublin Core schema (supporting the 15 metadata elements from the Dublin Core Metadata Element Set \url{https://dublincore.org/documents/dces/}). } \seealso{ \code{\link[NLP]{meta}} for metadata in package \pkg{NLP}. } \references{ Dublin Core Metadata Initiative. \url{https://dublincore.org/} } \examples{ data("crude") meta(crude[[1]]) DublinCore(crude[[1]]) meta(crude[[1]], tag = "topics") meta(crude[[1]], tag = "comment") <- "A short comment." meta(crude[[1]], tag = "topics") <- NULL DublinCore(crude[[1]], tag = "creator") <- "Ano Nymous" DublinCore(crude[[1]], tag = "format") <- "XML" DublinCore(crude[[1]]) meta(crude[[1]]) meta(crude) meta(crude, type = "corpus") meta(crude, "labels") <- 21:40 meta(crude) } tm/man/weightSMART.Rd0000644000175100001440000000654213025174645014066 0ustar hornikusers\name{weightSMART} \alias{weightSMART} \title{SMART Weightings} \encoding{UTF-8} \description{ Weight a term-document matrix according to a combination of weights specified in SMART notation. } \usage{ weightSMART(m, spec = "nnn", control = list()) } \arguments{ \item{m}{A \code{\link{TermDocumentMatrix}} in term frequency format.} \item{spec}{a character string consisting of three characters. The first letter specifies a term frequency schema, the second a document frequency schema, and the third a normalization schema. See \bold{Details} for available built-in schemata.} \item{control}{a list of control parameters. See \bold{Details}.} } \details{ Formally this function is of class \code{WeightingFunction} with the additional attributes \code{name} and \code{acronym}. The first letter of \code{spec} specifies a weighting schema for term frequencies of \code{m}: \describe{ \item{"n"}{(natural) \eqn{\mathit{tf}_{i,j}} counts the number of occurrences \eqn{n_{i,j}} of a term \eqn{t_i} in a document \eqn{d_j}. The input term-document matrix \code{m} is assumed to be in this standard term frequency format already.} \item{"l"}{(logarithm) is defined as \eqn{1 + \log_2(\mathit{tf}_{i,j})}.} \item{"a"}{(augmented) is defined as \eqn{0.5 + \frac{0.5 * \mathit{tf}_{i,j}}{\max_i(\mathit{tf}_{i,j})}}.} \item{"b"}{(boolean) is defined as 1 if \eqn{\mathit{tf}_{i,j} > 0} and 0 otherwise.} \item{"L"}{(log average) is defined as \eqn{\frac{1 + \log_2(\mathit{tf}_{i,j})}{1+\log_2(\mathrm{ave}_{i\in j}(\mathit{tf}_{i,j}))}}.} } The second letter of \code{spec} specifies a weighting schema of document frequencies for \code{m}: \describe{ \item{"n"}{(no) is defined as 1.} \item{"t"}{(idf) is defined as \eqn{\log_2 \frac{N}{\mathit{df}_t}} where \eqn{\mathit{df}_t} denotes how often term \eqn{t} occurs in all documents.} \item{"p"}{(prob idf) is defined as \eqn{\max(0, \log_2(\frac{N - \mathit{df}_t}{\mathit{df}_t}))}.} } The third letter of \code{spec} specifies a schema for normalization of \code{m}: \describe{ \item{"n"}{(none) is defined as 1.} \item{"c"}{(cosine) is defined as \eqn{\sqrt{\mathrm{col\_sums}(m ^ 2)}}.} \item{"u"}{(pivoted unique) is defined as \eqn{\mathit{slope} * \sqrt{\mathrm{col\_sums}(m ^ 2)} + (1 - \mathit{slope}) * \mathit{pivot}} where both \code{slope} and \code{pivot} must be set via named tags in the \code{control} list.} \item{"b"}{(byte size) is defined as \eqn{\frac{1}{\mathit{CharLength}^\alpha}}. The parameter \eqn{\alpha} must be set via the named tag \code{alpha} in the \code{control} list.} } The final result is defined by multiplication of the chosen term frequency component with the chosen document frequency component with the chosen normalization component. } \value{ The weighted matrix. } \references{ Christopher D. Manning and Prabhakar Raghavan and Hinrich Schütze (2008). \emph{Introduction to Information Retrieval}. Cambridge University Press, ISBN 0521865719. } \examples{ data("crude") TermDocumentMatrix(crude, control = list(removePunctuation = TRUE, stopwords = TRUE, weighting = function(x) weightSMART(x, spec = "ntc"))) } tm/man/Reader.Rd0000644000175100001440000000366313176776017013203 0ustar hornikusers\name{Reader} \alias{FunctionGenerator} \alias{Reader} \alias{getReaders} \title{Readers} \description{ Creating readers. } \usage{ getReaders() } \details{ \emph{Readers} are functions for extracting textual content and metadata out of elements delivered by a \code{\link{Source}}, and for constructing a \code{\link{TextDocument}}. A reader must accept following arguments in its signature: \describe{ \item{\code{elem}}{a named list with the components \code{content} and \code{uri} (as delivered by a \code{\link{Source}} via \code{\link{getElem}} or \code{\link{pGetElem}}).} \item{\code{language}}{a character string giving the language.} \item{\code{id}}{a character giving a unique identifier for the created text document.} } The element \code{elem} is typically provided by a source whereas the language and the identifier are normally provided by a corpus constructor (for the case that \code{elem$content} does not give information on these two essential items). In case a reader expects configuration arguments we can use a function generator. A function generator is indicated by inheriting from class \code{FunctionGenerator} and \code{function}. It allows us to process additional arguments, store them in an environment, return a reader function with the well-defined signature described above, and still be able to access the additional arguments via lexical scoping. All corpus constructors in package \pkg{tm} check the reader function for being a function generator and if so apply it to yield the reader with the expected signature. } \value{ For \code{getReaders()}, a character vector with readers provided by package \pkg{tm}. } \seealso{ \code{\link{readDOC}}, \code{\link{readPDF}}, \code{\link{readPlain}}, \code{\link{readRCV1}}, \code{\link{readRCV1asPlain}}, \code{\link{readReut21578XML}}, \code{\link{readReut21578XMLasPlain}}, and \code{\link{readXML}}. } tm/man/acq.Rd0000644000175100001440000000236114573616043012531 0ustar hornikusers\name{acq} \docType{data} \alias{acq} \title{50 Exemplary News Articles from the Reuters-21578 Data Set of Topic acq} \description{ This dataset holds 50 news articles with additional meta information from the Reuters-21578 data set. All documents belong to the topic \code{acq} dealing with corporate acquisitions. } \usage{data("acq")} \format{A \code{\link{VCorpus}} of 50 text documents.} \source{Reuters-21578 Text Categorization Collection Distribution 1.0 (\acronym{XML} format).} \references{ %% Emms, Martin and Luz, Saturnino (2007). %% Machine Learning for Natural Language Processing. %% \emph{European Summer School of Logic, Language and Information, %% course reader}. %% \url{http://ronaldo.cs.tcd.ie/esslli07/mlfornlp.pdf} %% \url{http://www.homepages.ed.ac.uk/sluzfil/esslli07/mlfornlp.pdf} Lewis, David (1997). \emph{Reuters-21578 Text Categorization Collection Distribution}. UCI Machine Learning Repository. \doi{10.24432/C52G6M}. %% Luz, Saturnino \emph{\acronym{XML}-encoded version of Reuters-21578}. %% \url{http://ronaldo.cs.tcd.ie/esslli07/data/reuters21578-xml/} %% \url{http://www.homepages.ed.ac.uk/sluzfil/esslli07/data/reuters21578-xml.tar.bz2} } \examples{ data("acq") acq } \keyword{datasets} tm/man/WeightFunction.Rd0000644000175100001440000000142612324523350014711 0ustar hornikusers\name{WeightFunction} \alias{WeightFunction} \title{Weighting Function} \description{ Construct a weighting function for term-document matrices. } \usage{ WeightFunction(x, name, acronym) } \arguments{ \item{x}{A function which takes a \code{\link{TermDocumentMatrix}} with term frequencies as input, weights the elements, and returns the weighted matrix.} \item{name}{A character naming the weighting function.} \item{acronym}{A character giving an acronym for the name of the weighting function.} } \value{ An object of class \code{WeightFunction} which extends the class \code{function} representing a weighting function. } \examples{ weightCutBin <- WeightFunction(function(m, cutoff) m > cutoff, "binary with cutoff", "bincut") } tm/man/readDOC.Rd0000644000175100001440000000362214367741526013235 0ustar hornikusers\name{readDOC} \alias{readDOC} \title{Read In a MS Word Document} \description{ Return a function which reads in a Microsoft Word document extracting its text. } \usage{ readDOC(engine = c("antiword", "executable"), AntiwordOptions = "") } \arguments{ \item{engine}{a character string for the preferred \acronym{DOC} extraction engine (see \bold{Details}).} \item{AntiwordOptions}{Options passed over to \command{antiword} executable.} } \details{ Formally this function is a function generator, i.e., it returns a function (which reads in a text document) with a well-defined signature, but can access passed over arguments (e.g., options to \command{antiword}) via lexical scoping. Available \acronym{DOC} extraction engines are as follows. \describe{ \item{\code{"antiword"}}{(default) Antiword utility as provided by the function \code{\link[antiword]{antiword}} in package \pkg{antiword}.} \item{\code{"executable"}}{command line \command{antiword} executable which must be installed and accessible on your system. This can convert documents from Microsoft Word version 2, 6, 7, 97, 2000, 2002 and 2003 to plain text. %% As of 2023-02-05, http://www.winfield.demon.nl/ does not work. %% , and is available from \url{http://www.winfield.demon.nl/}. The character vector \code{AntiwordOptions} is passed over to the executable.} } } \value{ A \code{function} with the following formals: \describe{ \item{\code{elem}}{a list with the named component \code{uri} which must hold a valid file name.} \item{\code{language}}{a string giving the language.} \item{\code{id}}{Not used.} } The function returns a \code{\link{PlainTextDocument}} representing the text and metadata extracted from \code{elem$uri}. } \seealso{ \code{\link{Reader}} for basic information on the reader infrastructure employed by package \pkg{tm}. } tm/man/readReut21578XML.Rd0000644000175100001440000000300714573616025014526 0ustar hornikusers\name{readReut21578XML} \alias{readReut21578XML} \alias{readReut21578XMLasPlain} \title{Read In a Reuters-21578 XML Document} \description{ Read in a Reuters-21578 \acronym{XML} document. } \usage{ readReut21578XML(elem, language, id) readReut21578XMLasPlain(elem, language, id) } \arguments{ \item{elem}{a named list with the component \code{content} which must hold the document to be read in.} \item{language}{a string giving the language.} \item{id}{Not used.} } \value{ An \code{\link{XMLTextDocument}} for \code{readReut21578XML}, or a \code{\link{PlainTextDocument}} for \code{readReut21578XMLasPlain}, representing the text and metadata extracted from \code{elem$content}. } \references{ %% Emms, Martin and Luz, Saturnino (2007). %% Machine Learning for Natural Language Processing. %% \emph{European Summer School of Logic, Language and Information, %% course reader}. %% \url{http://ronaldo.cs.tcd.ie/esslli07/mlfornlp.pdf} %% \url{http://www.homepages.ed.ac.uk/sluzfil/esslli07/mlfornlp.pdf} Lewis, David (1997). \emph{Reuters-21578 Text Categorization Collection Distribution}. UCI Machine Learning Repository. \doi{10.24432/C52G6M}. %% Luz, Saturnino \emph{\acronym{XML}-encoded version of Reuters-21578}. %% \url{http://ronaldo.cs.tcd.ie/esslli07/data/reuters21578-xml/} %% \url{http://www.homepages.ed.ac.uk/sluzfil/esslli07/data/reuters21578-xml.tar.bz2} } \seealso{ \code{\link{Reader}} for basic information on the reader infrastructure employed by package \pkg{tm}. } tm/man/XMLTextDocument.Rd0000644000175100001440000000422613754752650015000 0ustar hornikusers\name{XMLTextDocument} \alias{XMLTextDocument} \title{XML Text Documents} \description{ Create \acronym{XML} text documents. } \usage{ XMLTextDocument(x = xml_missing(), author = character(0), datetimestamp = as.POSIXlt(Sys.time(), tz = "GMT"), description = character(0), heading = character(0), id = character(0), language = character(0), origin = character(0), \dots, meta = NULL) } \arguments{ \item{x}{An \code{\link[xml2:read_xml]{XMLDocument}}.} \item{author}{a character or an object of class \code{\link{person}} giving the author names.} \item{datetimestamp}{an object of class \code{\link{POSIXt}} or a character string giving the creation date/time information. If a character string, exactly one of the \acronym{ISO} 8601 formats defined by \url{https://www.w3.org/TR/NOTE-datetime} should be used. See \code{\link[NLP]{parse_ISO_8601_datetime}} in package \pkg{NLP} for processing such date/time information. } \item{description}{a character giving a description.} \item{heading}{a character giving the title or a short heading.} \item{id}{a character giving a unique identifier.} \item{language}{a character giving the language (preferably as \acronym{IETF} language tags, see \link[NLP]{language} in package \pkg{NLP}).} \item{origin}{a character giving information on the source and origin.} \item{\dots}{user-defined document metadata tag-value pairs.} \item{meta}{a named list or \code{NULL} (default) giving all metadata. If set all other metadata arguments are ignored.} } \value{ An object inheriting from \code{XMLTextDocument} and \code{\link{TextDocument}}. } \seealso{ \code{\link{TextDocument}} for basic information on the text document infrastructure employed by package \pkg{tm}. } \examples{ xml <- system.file("extdata", "order-doc.xml", package = "xml2") (xtd <- XMLTextDocument(xml2::read_xml(xml), heading = "XML text document", id = xml, language = "en")) content(xtd) meta(xtd) } tm/man/Source.Rd0000644000175100001440000001053414725623647013235 0ustar hornikusers\name{Source} \alias{Source} \alias{SimpleSource} \alias{close.SimpleSource} \alias{eoi} \alias{eoi.SimpleSource} \alias{getMeta} \alias{getMeta.DataframeSource} \alias{getElem} \alias{getElem.DataframeSource} \alias{getElem.DirSource} \alias{getElem.URISource} \alias{getElem.VectorSource} \alias{getElem.XMLSource} \alias{getSources} \alias{length.SimpleSource} \alias{open.SimpleSource} \alias{pGetElem} \alias{pGetElem.DataframeSource} \alias{pGetElem.DirSource} \alias{pGetElem.URISource} \alias{pGetElem.VectorSource} \alias{reader} \alias{reader.SimpleSource} \alias{stepNext} \alias{stepNext.SimpleSource} \title{Sources} \description{ Creating and accessing sources. } \usage{ SimpleSource(encoding = "", length = 0, position = 0, reader = readPlain, \dots, class) getSources() \method{close}{SimpleSource}(con, \dots) \method{eoi}{SimpleSource}(x) \method{getMeta}{DataframeSource}(x) \method{getElem}{DataframeSource}(x) \method{getElem}{DirSource}(x) \method{getElem}{URISource}(x) \method{getElem}{VectorSource}(x) \method{getElem}{XMLSource}(x) \method{length}{SimpleSource}(x) \method{open}{SimpleSource}(con, \dots) \method{pGetElem}{DataframeSource}(x) \method{pGetElem}{DirSource}(x) \method{pGetElem}{URISource}(x) \method{pGetElem}{VectorSource}(x) \method{reader}{SimpleSource}(x) \method{stepNext}{SimpleSource}(x) } \arguments{ \item{x}{A \code{Source}.} \item{con}{A \code{Source}.} \item{encoding}{a character giving the encoding of the elements delivered by the source.} \item{length}{a non-negative integer denoting the number of elements delivered by the source. If the length is unknown in advance set it to \code{0}.} \item{position}{a numeric indicating the current position in the source.} \item{reader}{a reader function (generator).} \item{\dots}{For \code{SimpleSource} tag-value pairs for storing additional information; not used otherwise.} \item{class}{a character vector giving additional classes to be used for the created source.} } \details{ \emph{Sources} abstract input locations, like a directory, a connection, or simply an \R vector, in order to acquire content in a uniform way. In packages which employ the infrastructure provided by package \pkg{tm}, such sources are represented via the virtual S3 class \code{Source}: such packages then provide S3 source classes extending the virtual base class (such as \code{\link{DirSource}} provided by package \pkg{tm} itself). All extension classes must provide implementations for the functions \code{close}, \code{eoi}, \code{getElem}, \code{length}, \code{open}, \code{reader}, and \code{stepNext}. For parallel element access the (optional) function \code{pGetElem} must be provided as well. If document level metadata is available, the (optional) function \code{getMeta} must be implemented. The functions \code{open} and \code{close} open and close the source, respectively. \code{eoi} indicates end of input. \code{getElem} fetches the element at the current position, whereas \code{pGetElem} retrieves all elements in parallel at once. The function \code{length} gives the number of elements. \code{reader} returns a default reader for processing elements. \code{stepNext} increases the position in the source to acquire the next element. The function \code{SimpleSource} provides a simple reference implementation and can be used when creating custom sources. } \value{ For \code{SimpleSource}, an object inheriting from \code{class}, \code{SimpleSource}, and \code{Source}. For \code{getSources}, a character vector with sources provided by package \pkg{tm}. \code{open} and \code{close} return the opened and closed source, respectively. For \code{eoi}, a logical indicating if the end of input of the source is reached. For \code{getElem} a named list with the components \code{content} holding the document and \code{uri} giving a uniform resource identifier (e.g., a file path or \acronym{URL}; \code{NULL} if not applicable or unavailable). For \code{pGetElem} a list of such named lists. For \code{length}, an integer for the number of elements. For \code{reader}, a function for the default reader. } \seealso{ \code{\link{DataframeSource}}, \code{\link{DirSource}}, \code{\link{URISource}}, \code{\link{VectorSource}}, and \code{\link{XMLSource}}. } tm/man/weightBin.Rd0000644000175100001440000000061313025174645013701 0ustar hornikusers\name{weightBin} \alias{weightBin} \title{Weight Binary} \description{ Binary weight a term-document matrix. } \usage{ weightBin(m) } \arguments{ \item{m}{A \code{\link{TermDocumentMatrix}} in term frequency format.} } \details{ Formally this function is of class \code{WeightingFunction} with the additional attributes \code{name} and \code{acronym}. } \value{ The weighted matrix. } tm/man/removeWords.Rd0000644000175100001440000000144712327511431014273 0ustar hornikusers\name{removeWords} \alias{removeWords} \alias{removeWords.character} \alias{removeWords.PlainTextDocument} \title{Remove Words from a Text Document} \description{ Remove words from a text document. } \usage{ \method{removeWords}{character}(x, words) \method{removeWords}{PlainTextDocument}(x, \dots) } \arguments{ \item{x}{A character or text document.} \item{words}{A character vector giving the words to be removed.} \item{\dots}{passed over argument \code{words}.} } \value{ The character or text document without the specified words. } \seealso{ \code{\link{getTransformations}} to list available transformation (mapping) functions. \code{\link[tau]{remove_stopwords}} provided by package \pkg{tau}. } \examples{ data("crude") crude[[1]] removeWords(crude[[1]], stopwords("english")) } tm/man/tm_reduce.Rd0000644000175100001440000000163712355322342013731 0ustar hornikusers\name{tm_reduce} \alias{tm_reduce} \title{Combine Transformations} \description{ Fold multiple transformations (mappings) into a single one. } \usage{ tm_reduce(x, tmFuns, \dots) } \arguments{ \item{x}{A corpus.} \item{tmFuns}{A list of \pkg{tm} transformations.} \item{\dots}{Arguments to the individual transformations.} } \value{ A single \pkg{tm} transformation function obtained by folding \code{tmFuns} from right to left (via \code{Reduce(\dots, right = TRUE)}). } \seealso{ \code{Reduce} for \R's internal folding/accumulation mechanism, and \code{\link{getTransformations}} to list available transformation (mapping) functions. } \examples{ data(crude) crude[[1]] skipWords <- function(x) removeWords(x, c("it", "the")) funs <- list(stripWhitespace, skipWords, removePunctuation, content_transformer(tolower)) tm_map(crude, FUN = tm_reduce, tmFuns = funs)[[1]] } tm/man/stemDocument.Rd0000644000175100001440000000134314656644001014430 0ustar hornikusers\name{stemDocument} \alias{stemDocument} \alias{stemDocument.character} \alias{stemDocument.PlainTextDocument} \title{Stem Words} \description{ Stem words in a text document using Porter's stemming algorithm. } \usage{ \method{stemDocument}{character}(x, language = "english") \method{stemDocument}{PlainTextDocument}(x, language = meta(x, "language")) } \arguments{ \item{x}{A character vector or text document.} \item{language}{A string giving the language for stemming.} } \details{ The argument \code{language} is passed over to \code{\link[SnowballC]{wordStem}} as the name of the Snowball stemmer. } \examples{ data("crude") inspect(crude[[1]]) if(requireNamespace("SnowballC")) { inspect(stemDocument(crude[[1]])) } } tm/man/readPDF.Rd0000644000175100001440000001014413754752607013237 0ustar hornikusers\name{readPDF} \alias{readPDF} \title{Read In a PDF Document} \description{ Return a function which reads in a portable document format (\acronym{PDF}) document extracting both its text and its metadata. } \usage{ readPDF(engine = c("pdftools", "xpdf", "Rpoppler", "ghostscript", "Rcampdf", "custom"), control = list(info = NULL, text = NULL)) } \arguments{ \item{engine}{a character string for the preferred \acronym{PDF} extraction engine (see \bold{Details}).} \item{control}{a list of control options for the engine with the named components \code{info} and \code{text} (see \bold{Details}).} } \details{ Formally this function is a function generator, i.e., it returns a function (which reads in a text document) with a well-defined signature, but can access passed over arguments (e.g., the preferred \acronym{PDF} extraction \code{engine} and \code{control} options) via lexical scoping. Available \acronym{PDF} extraction engines are as follows. \describe{ \item{\code{"pdftools"}}{(default) Poppler \acronym{PDF} rendering library as provided by the functions \code{\link[pdftools]{pdf_info}} and \code{\link[pdftools]{pdf_text}} in package \pkg{pdftools}.} \item{\code{"xpdf"}}{command line \command{pdfinfo} and \command{pdftotext} executables which must be installed and accessible on your system. Suitable utilities are provided by the Xpdf (\url{http://www.xpdfreader.com/}) \acronym{PDF} viewer or by the Poppler (\url{https://poppler.freedesktop.org/}) \acronym{PDF} rendering library.} \item{\code{"Rpoppler"}}{Poppler \acronym{PDF} rendering library as provided by the functions \code{\link[Rpoppler]{PDF_info}} and \code{\link[Rpoppler]{PDF_text}} in package \pkg{Rpoppler}.} \item{\code{"ghostscript"}}{Ghostscript using \file{pdf_info.ps} and \file{ps2ascii.ps}.} \item{\code{"Rcampdf"}}{Perl CAM::PDF \acronym{PDF} manipulation library as provided by the functions \code{pdf_info} and \code{pdf_text} in package \pkg{Rcampdf}, available from the repository at \url{http://datacube.wu.ac.at}.} \item{\code{"custom"}}{custom user-provided extraction engine.} } Control parameters for engine \code{"xpdf"} are as follows. \describe{ \item{\code{info}}{a character vector specifying options passed over to the \command{pdfinfo} executable.} \item{\code{text}}{a character vector specifying options passed over to the \command{pdftotext} executable.} } Control parameters for engine \code{"custom"} are as follows. \describe{ \item{\code{info}}{a function extracting metadata from a \acronym{PDF}. The function must accept a file path as first argument and must return a named list with the components \code{Author} (as character string), \code{CreationDate} (of class \code{POSIXlt}), \code{Subject} (as character string), \code{Title} (as character string), and \code{Creator} (as character string).} \item{\code{text}}{a function extracting content from a \acronym{PDF}. The function must accept a file path as first argument and must return a character vector.} } } \value{ A \code{function} with the following formals: \describe{ \item{\code{elem}}{a named list with the component \code{uri} which must hold a valid file name.} \item{\code{language}}{a string giving the language.} \item{\code{id}}{Not used.} } The function returns a \code{\link{PlainTextDocument}} representing the text and metadata extracted from \code{elem$uri}. } \seealso{ \code{\link{Reader}} for basic information on the reader infrastructure employed by package \pkg{tm}. } \examples{ uri <- paste0("file://", system.file(file.path("doc", "tm.pdf"), package = "tm")) engine <- if(nzchar(system.file(package = "pdftools"))) { "pdftools" } else { "ghostscript" } reader <- readPDF(engine) pdf <- reader(elem = list(uri = uri), language = "en", id = "id1") cat(content(pdf)[1]) VCorpus(URISource(uri, mode = ""), readerControl = list(reader = readPDF(engine = "ghostscript"))) } \keyword{file} tm/man/content_transformer.Rd0000644000175100001440000000141714610730651016054 0ustar hornikusers\name{content_transformer} \alias{content_transformer} \title{Content Transformers} \description{ Create content transformers, i.e., functions which modify the content of an \R object. } \usage{ content_transformer(FUN) } \arguments{ \item{FUN}{a function.} } \value{ A function with two arguments: \describe{ \item{\code{x}}{an \R object with implemented content getter (\code{\link[NLP]{content}}) and setter (\code{\link[NLP]{content<-}}) functions.} \item{\code{\dots}}{arguments passed over to \code{FUN}.} } } \seealso{ \code{\link{tm_map}} for an interface to apply transformations to corpora. } \examples{ data("crude") crude[[1]] (f <- content_transformer(function(x, pattern) gsub(pattern, "", x))) tm_map(crude, f, "[[:digit:]]+")[[1]] } tm/man/XMLSource.Rd0000644000175100001440000000161013177022735013600 0ustar hornikusers\name{XMLSource} \alias{XMLSource} \title{XML Source} \description{ Create an \acronym{XML} source. } \usage{ XMLSource(x, parser = xml_contents, reader) } \arguments{ \item{x}{a character giving a uniform resource identifier.} \item{parser}{a function accepting an \acronym{XML} document (as delivered by \code{\link[xml2]{read_xml}} in package \pkg{xml2}) as input and returning \acronym{XML} elements/nodes.} \item{reader}{a function capable of turning \acronym{XML} elements/nodes as returned by \code{parser} into a subclass of \code{\link{TextDocument}}.} } \value{ An object inheriting from \code{XMLSource}, \code{\link{SimpleSource}}, and \code{\link{Source}}. } \seealso{ \code{\link{Source}} for basic information on the source infrastructure employed by package \pkg{tm}. Vignette 'Extensions: How to Handle Custom File Formats', and \code{\link{readXML}}. } tm/man/readTagged.Rd0000644000175100001440000000373413177025262014015 0ustar hornikusers\name{readTagged} \alias{readTagged} \title{Read In a POS-Tagged Word Text Document} \description{ Return a function which reads in a text document containing POS-tagged words. } \usage{ readTagged(\dots) } \arguments{ \item{\dots}{Arguments passed to \code{\link[NLP]{TaggedTextDocument}}.} } \details{ Formally this function is a function generator, i.e., it returns a function (which reads in a text document) with a well-defined signature, but can access passed over arguments (\code{\dots}) via lexical scoping. } \value{ A \code{function} with the following formals: \describe{ \item{\code{elem}}{a named list with the component \code{content} which must hold the document to be read in or the component \code{uri} holding a connection object or a character string.} \item{\code{language}}{a string giving the language.} \item{\code{id}}{a character giving a unique identifier for the created text document.} } The function returns a \code{\link[NLP]{TaggedTextDocument}} representing the text and metadata extracted from \code{elem$content} or \code{elem$uri}. The argument \code{id} is used as fallback if \code{elem$uri} is null. } \seealso{ \code{\link{Reader}} for basic information on the reader infrastructure employed by package \pkg{tm}. } \examples{ # See http://www.nltk.org/book/ch05.html or file ca01 in the Brown corpus x <- paste("The/at grand/jj jury/nn commented/vbd on/in a/at number/nn of/in", "other/ap topics/nns ,/, among/in them/ppo the/at Atlanta/np and/cc", "Fulton/np-tl County/nn-tl purchasing/vbg departments/nns which/wdt", "it/pps said/vbd ``/`` are/ber well/ql operated/vbn and/cc follow/vb", "generally/rb accepted/vbn practices/nns which/wdt inure/vb to/in the/at", "best/jjt interest/nn of/in both/abx governments/nns ''/'' ./.") vs <- VectorSource(x) elem <- getElem(stepNext(vs)) (doc <- readTagged()(elem, language = "en", id = "id1")) tagged_words(doc) } tm/man/getTransformations.Rd0000644000175100001440000000107212335713251015645 0ustar hornikusers\name{getTransformations} \alias{getTransformations} \title{Transformations} \description{ Predefined transformations (mappings) which can be used with \code{\link{tm_map}}. } \usage{ getTransformations() } \value{ A character vector with transformations provided by package \pkg{tm}. } \seealso{ \code{\link{removeNumbers}}, \code{\link{removePunctuation}}, \code{\link{removeWords}}, \code{\link{stemDocument}}, and \code{\link{stripWhitespace}}. \code{\link{content_transformer}} to create custom transformations. } \examples{ getTransformations() } tm/man/plot.Rd0000644000175100001440000000254014714614765012750 0ustar hornikusers\name{plot} \alias{plot.TermDocumentMatrix} \title{Visualize a Term-Document Matrix} \description{ Visualize correlations between terms of a term-document matrix. } \usage{ \method{plot}{TermDocumentMatrix}(x, terms = sample(Terms(x), 20), corThreshold = 0.7, weighting = FALSE, attrs = list(graph = list(rankdir = "BT"), node = list(shape = "rectangle", fixedsize = FALSE)), \dots) } \arguments{ \item{x}{A term-document matrix.} \item{terms}{Terms to be plotted. Defaults to 20 randomly chosen terms of the term-document matrix.} \item{corThreshold}{Do not plot correlations below this threshold. Defaults to \code{0.7}.} \item{weighting}{Define whether the line width corresponds to the correlation.} \item{attrs}{Argument passed to the plot method for class \code{\link[graph]{graphNEL}}.} \item{\dots}{Other arguments passed to the \code{\link[graph]{graphNEL}} plot method.} } \details{ Visualization requires that package \pkg{Rgraphviz} is available. } \examples{ \dontrun{data(crude) tdm <- TermDocumentMatrix(crude, control = list(removePunctuation = TRUE, removeNumbers = TRUE, stopwords = TRUE)) plot(tdm, corThreshold = 0.2, weighting = TRUE)} } tm/man/Corpus.Rd0000644000175100001440000000335412747047144013244 0ustar hornikusers\name{Corpus} \alias{Corpus} \title{Corpora} \description{ Representing and computing on corpora. } \details{ \emph{Corpora} are collections of documents containing (natural language) text. In packages which employ the infrastructure provided by package \pkg{tm}, such corpora are represented via the virtual S3 class \code{Corpus}: such packages then provide S3 corpus classes extending the virtual base class (such as \code{\link{VCorpus}} provided by package \pkg{tm} itself). All extension classes must provide accessors to extract subsets (\code{\link{[}}), individual documents (\code{\link{[[}}), and metadata (\code{\link{meta}}). The function \code{\link{length}} must return the number of documents, and \code{\link{as.list}} must construct a list holding the documents. A corpus can have two types of metadata (accessible via \code{\link{meta}}). \emph{Corpus metadata} contains corpus specific metadata in form of tag-value pairs. \emph{Document level metadata} contains document specific metadata but is stored in the corpus as a data frame. Document level metadata is typically used for semantic reasons (e.g., classifications of documents form an own entity due to some high-level information like the range of possible values) or for performance reasons (single access instead of extracting metadata of each document). The function \code{Corpus} is a convenience alias to \code{SimpleCorpus} or \code{VCorpus}, depending on the arguments provided. } \seealso{ \code{\link{SimpleCorpus}}, \code{\link{VCorpus}}, and \code{\link{PCorpus}} for the corpora classes provided by package \pkg{tm}. \code{\link[tm.plugin.dc]{DCorpus}} for a distributed corpus class provided by package \pkg{tm.plugin.dc}. } tm/man/foreign.Rd0000644000175100001440000000353214656603525013422 0ustar hornikusers\name{foreign} \alias{read_dtm_Blei_et_al} \alias{read_dtm_MC} \title{Read Document-Term Matrices} \description{ Read document-term matrices stored in special file formats. } \usage{ read_dtm_Blei_et_al(file, vocab = NULL) read_dtm_MC(file, scalingtype = NULL) } \arguments{ \item{file}{a character string with the name of the file to read.} \item{vocab}{a character string with the name of a vocabulary file (giving the terms, one per line), or \code{NULL}.} \item{scalingtype}{a character string specifying the type of scaling to be used, or \code{NULL} (default), in which case the scaling will be inferred from the names of the files with non-zero entries found (see \bold{Details}).} } \details{ \code{read_dtm_Blei_et_al} reads the (List of Lists type sparse matrix) format employed by the Latent Dirichlet Allocation and Correlated Topic Model C codes by Blei et al (\url{http://www.cs.columbia.edu/~blei/}). MC is a toolkit for creating vector models from text documents (see \url{https://www.cs.utexas.edu/~dml/software/mc/}). It employs a variant of Compressed Column Storage (CCS) sparse matrix format, writing data into several files with suitable names: e.g., a file with \file{_dim} appended to the base file name stores the matrix dimensions. The non-zero entries are stored in a file the name of which indicates the scaling type used: e.g., \file{_tfx_nz} indicates scaling by term frequency (\samp{t}), inverse document frequency (\samp{f}) and no normalization (\samp{x}). See \file{README} in the MC sources for more information. \code{read_dtm_MC} reads such sparse matrix information with argument \code{file} giving the path with the base file name. } \value{ A \link[=DocumentTermMatrix]{document-term matrix}. } \seealso{ \code{\link[slam]{read_stm_MC}} in package \pkg{slam}. } \keyword{IO} tm/man/removePunctuation.Rd0000644000175100001440000000376013754753065015526 0ustar hornikusers\name{removePunctuation} \alias{removePunctuation} \alias{removePunctuation.character} \alias{removePunctuation.PlainTextDocument} \title{Remove Punctuation Marks from a Text Document} \description{ Remove punctuation marks from a text document. } \usage{ \method{removePunctuation}{character}(x, preserve_intra_word_contractions = FALSE, preserve_intra_word_dashes = FALSE, ucp = FALSE, \dots) \method{removePunctuation}{PlainTextDocument}(x, \dots) } \arguments{ \item{x}{a character vector or text document.} \item{preserve_intra_word_contractions}{a logical specifying whether intra-word contractions should be kept.} \item{preserve_intra_word_dashes}{a logical specifying whether intra-word dashes should be kept.} \item{ucp}{a logical specifying whether to use Unicode character properties for determining punctuation characters. If \code{FALSE} (default), characters in the ASCII \code{[:punct:]} class are taken; if \code{TRUE}, the characters with Unicode general category \code{P} (Punctuation).} \item{\dots}{arguments to be passed to or from methods; in particular, from the \code{PlainTextDocument} method to the \code{character} method.} } \value{ The character or text document \code{x} without punctuation marks (besides intra-word contractions (\samp{'}) and intra-word dashes (\samp{-}) if \code{preserve_intra_word_contractions} and \code{preserve_intra_word_dashes} are set, respectively). } \seealso{ \code{\link{getTransformations}} to list available transformation (mapping) functions. \code{\link{regex}} shows the class \code{[:punct:]} of punctuation characters. \url{https://unicode.org/reports/tr44/#General_Category_Values}. } \examples{ data("crude") inspect(crude[[14]]) inspect(removePunctuation(crude[[14]])) inspect(removePunctuation(crude[[14]], preserve_intra_word_contractions = TRUE, preserve_intra_word_dashes = TRUE)) } tm/man/hpc.Rd0000644000175100001440000000653314714614516012544 0ustar hornikusers\name{hpc} \alias{tm_parLapply} \alias{tm_parLapply_engine} \title{Parallelized \sQuote{lapply}} \description{ Parallelize applying a function over a list or vector according to the registered parallelization engine. } \usage{ tm_parLapply(X, FUN, ...) tm_parLapply_engine(new) } \arguments{ \item{X}{A vector (atomic or list), or other objects suitable for the engine in use.} \item{FUN}{the function to be applied to each element of \code{X}.} \item{...}{optional arguments to \code{FUN}.} \item{new}{an object inheriting from class \code{cluster} as created by \code{\link[parallel]{makeCluster}()} from package \pkg{parallel}, or a function with formals \code{X}, \code{FUN} and \code{...}, or \code{NULL} corresponding to the default of using no parallelization engine.} } \details{ Parallelization can be employed to speed up some of the embarrassingly parallel computations performed in package \pkg{tm}, specifically \code{\link{tm_index}()}, \code{\link{tm_map}()} on a non-lazy-mapped \code{\link{VCorpus}}, and \code{\link{TermDocumentMatrix}()} on a \code{\link{VCorpus}} or \code{\link{PCorpus}}. Functions \code{tm_parLapply()} and \code{tm_parLapply_engine()} can be used to customize parallelization according to the available resources. \code{tm_parLapply_engine()} is used for getting (with no arguments) or setting (with argument \code{new}) the parallelization engine employed (see below for examples). If an engine is set to an object inheriting from class \code{cluster}, \code{tm_parLapply()} calls \code{\link[parallel]{parLapply}()} with this cluster and the given arguments. If set to a function, \code{tm_parLapply()} calls the function with the given arguments. Otherwise, it simply calls \code{\link{lapply}()}. Hence, parallelization via \code{\link[parallel]{parLapply}()} and a default cluster registered via \code{\link[parallel]{setDefaultCluster}()} can be achieved via \preformatted{ tm_parLapply_engine(function(X, FUN, ...) parallel::parLapply(NULL, X, FUN, ...))} or re-registering the cluster, say \code{cl}, using \preformatted{ tm_parLapply_engine(cl)} (note that since \R version 3.5.0, one can use \code{\link[parallel]{getDefaultCluster}()} to get the registered default cluster). Using \preformatted{ tm_parLapply_engine(function(X, FUN, ...) parallel::parLapplyLB(NULL, X, FUN, ...))} or \preformatted{ tm_parLapply_engine(function(X, FUN, ...) parallel::parLapplyLB(cl, X, FUN, ...))} gives load-balancing parallelization with the registered default or given cluster, respectively. To achieve parallelization via forking (on Unix-alike platforms), one can use the above with clusters created by \code{\link[parallel]{makeForkCluster}()}, or use \preformatted{ tm_parLapply_engine(parallel::mclapply)} or \preformatted{ tm_parLapply_engine(function(X, FUN, ...) parallel::mclapply(X, FUN, ..., mc.cores = n))} to use \code{\link[parallel]{mclapply}()} with the default or given number \code{n} of cores. } \value{ A list the length of \code{X}, with the result of applying \code{FUN} together with the \code{...} arguments to each element of \code{X}. } \seealso{ \code{\link[parallel]{makeCluster}()}, \code{\link[parallel]{parLapply}()}, \code{\link[parallel]{parLapplyLB}()}, and \code{\link[parallel]{mclapply}()}. } tm/man/removeSparseTerms.Rd0000644000175100001440000000147312262761010015442 0ustar hornikusers\name{removeSparseTerms} \alias{removeSparseTerms} \title{Remove Sparse Terms from a Term-Document Matrix} \description{ Remove sparse terms from a document-term or term-document matrix. } \usage{ removeSparseTerms(x, sparse) } \arguments{ \item{x}{A \code{\link{DocumentTermMatrix}} or a \code{\link{TermDocumentMatrix}}.} \item{sparse}{A numeric for the maximal allowed sparsity in the range from bigger zero to smaller one.} } \value{ A term-document matrix where those terms from \code{x} are removed which have at least a \code{sparse} percentage of empty (i.e., terms occurring 0 times in a document) elements. I.e., the resulting matrix contains only terms with a sparse factor of less than \code{sparse}. } \examples{ data("crude") tdm <- TermDocumentMatrix(crude) removeSparseTerms(tdm, 0.2) } tm/man/Zipf_n_Heaps.Rd0000644000175100001440000000360013754752273014335 0ustar hornikusers\name{Zipf_n_Heaps} \alias{Zipf_plot} \alias{Heaps_plot} \title{Explore Corpus Term Frequency Characteristics} \description{ Explore Zipf's law and Heaps' law, two empirical laws in linguistics describing commonly observed characteristics of term frequency distributions in corpora. } \usage{ Zipf_plot(x, type = "l", \dots) Heaps_plot(x, type = "l", \dots) } \arguments{ \item{x}{a document-term matrix or term-document matrix with unweighted term frequencies.} \item{type}{a character string indicating the type of plot to be drawn, see \code{\link{plot}}.} \item{\dots}{further graphical parameters to be used for plotting.} } \details{ Zipf's law (e.g., \url{https://en.wikipedia.org/wiki/Zipf\%27s_law}) states that given some corpus of natural language utterances, the frequency of any word is inversely proportional to its rank in the frequency table, or, more generally, that the pmf of the term frequencies is of the form \eqn{c k^{-\beta}}, where \eqn{k} is the rank of the term (taken from the most to the least frequent one). We can conveniently explore the degree to which the law holds by plotting the logarithm of the frequency against the logarithm of the rank, and inspecting the goodness of fit of a linear model. Heaps' law (e.g., \url{https://en.wikipedia.org/wiki/Heaps\%27_law}) states that the vocabulary size \eqn{V} (i.e., the number of different terms employed) grows polynomially with the text size \eqn{T} (the total number of terms in the texts), so that \eqn{V = c T^\beta}. We can conveniently explore the degree to which the law holds by plotting \eqn{\log(V)} against \eqn{\log(T)}, and inspecting the goodness of fit of a linear model. } \value{ The coefficients of the fitted linear model. As a side effect, the corresponding plot is produced. } \examples{ data("acq") m <- DocumentTermMatrix(acq) Zipf_plot(m) Heaps_plot(m) } tm/man/URISource.Rd0000644000175100001440000000300212326753543013577 0ustar hornikusers\name{URISource} \alias{URISource} \title{Uniform Resource Identifier Source} \description{ Create a uniform resource identifier source. } \usage{ URISource(x, encoding = "", mode = "text") } \arguments{ \item{x}{A character vector of uniform resource identifiers (\acronym{URI}s.} \item{encoding}{A character string describing the current encoding. It is passed to \code{\link{iconv}} to convert the input to UTF-8.} \item{mode}{a character string specifying if and how \acronym{URI}s should be read in. Available modes are: \describe{ \item{\code{""}}{No read. In this case \code{\link{getElem}} and \code{\link{pGetElem}} only deliver \acronym{URI}s.} \item{\code{"binary"}}{\acronym{URI}s are read in binary raw mode (via \code{\link{readBin}}).} \item{\code{"text"}}{\acronym{URI}s are read as text (via \code{\link{readLines}}).} } } } \details{ A \emph{uniform resource identifier source} interprets each \acronym{URI} as a document. } \value{ An object inheriting from \code{URISource}, \code{\link{SimpleSource}}, and \code{\link{Source}}. } \seealso{ \code{\link{Source}} for basic information on the source infrastructure employed by package \pkg{tm}. \code{\link{Encoding}} and \code{\link{iconv}} on encodings. } \examples{ loremipsum <- system.file("texts", "loremipsum.txt", package = "tm") ovid <- system.file("texts", "txt", "ovid_1.txt", package = "tm") us <- URISource(sprintf("file://\%s", c(loremipsum, ovid))) inspect(VCorpus(us)) } tm/man/getTokenizers.Rd0000644000175100001440000000052713150552514014614 0ustar hornikusers\name{getTokenizers} \alias{getTokenizers} \title{Tokenizers} \description{ Predefined tokenizers. } \usage{ getTokenizers() } \value{ A character vector with tokenizers provided by package \pkg{tm}. } \seealso{ \code{\link{Boost_tokenizer}}, \code{\link{MC_tokenizer}} and \code{\link{scan_tokenizer}}. } \examples{ getTokenizers() } tm/man/removeNumbers.Rd0000644000175100001440000000223113754753061014614 0ustar hornikusers\name{removeNumbers} \alias{removeNumbers} \alias{removeNumbers.character} \alias{removeNumbers.PlainTextDocument} \title{Remove Numbers from a Text Document} \description{ Remove numbers from a text document. } \usage{ \method{removeNumbers}{character}(x, ucp = FALSE, \dots) \method{removeNumbers}{PlainTextDocument}(x, \dots) } \arguments{ \item{x}{a character vector or text document.} \item{ucp}{a logical specifying whether to use Unicode character properties for determining digit characters. If \code{FALSE} (default), characters in the ASCII \code{[:digit:]} class (i.e., the decimal digits from 0 to 9) are taken; if \code{TRUE}, the characters with Unicode general category \code{Nd} (Decimal_Number).} \item{\dots}{arguments to be passed to or from methods; in particular, from the \code{PlainTextDocument} method to the \code{character} method.} } \value{ The text document without numbers. } \seealso{ \code{\link{getTransformations}} to list available transformation (mapping) functions. \url{https://unicode.org/reports/tr44/#General_Category_Values}. } \examples{ data("crude") crude[[1]] removeNumbers(crude[[1]]) } tm/man/findAssocs.Rd0000644000175100001440000000211712262463076014057 0ustar hornikusers\name{findAssocs} \alias{findAssocs} \alias{findAssocs.DocumentTermMatrix} \alias{findAssocs.TermDocumentMatrix} \title{Find Associations in a Term-Document Matrix} \description{ Find associations in a document-term or term-document matrix. } \usage{ \method{findAssocs}{DocumentTermMatrix}(x, terms, corlimit) \method{findAssocs}{TermDocumentMatrix}(x, terms, corlimit) } \arguments{ \item{x}{A \code{\link{DocumentTermMatrix}} or a \code{\link{TermDocumentMatrix}}.} \item{terms}{a character vector holding terms.} \item{corlimit}{a numeric vector (of the same length as \code{terms}; recycled otherwise) for the (inclusive) lower correlation limits of each term in the range from zero to one.} } \value{A named list. Each list component is named after a term in \code{terms} and contains a named numeric vector. Each vector holds matching terms from \code{x} and their rounded correlations satisfying the inclusive lower correlation limit of \code{corlimit}.} \examples{ data("crude") tdm <- TermDocumentMatrix(crude) findAssocs(tdm, c("oil", "opec", "xyz"), c(0.7, 0.75, 0.1)) } tm/man/Docs.Rd0000644000175100001440000000131212324523350012636 0ustar hornikusers\name{Docs} \alias{Docs} \alias{nDocs} \alias{nTerms} \alias{Terms} \title{Access Document IDs and Terms} \description{ Accessing document IDs, terms, and their number of a term-document matrix or document-term matrix. } \usage{ Docs(x) nDocs(x) nTerms(x) Terms(x) } \arguments{ \item{x}{Either a \code{\link{TermDocumentMatrix}} or \code{\link{DocumentTermMatrix}}.} } \value{ For \code{Docs} and \code{Terms}, a character vector with document IDs and terms, respectively. For \code{nDocs} and \code{nTerms}, an integer with the number of document IDs and terms, respectively. } \examples{ data("crude") tdm <- TermDocumentMatrix(crude)[1:10,1:20] Docs(tdm) nDocs(tdm) nTerms(tdm) Terms(tdm) } tm/man/VectorSource.Rd0000644000175100001440000000116512324377361014410 0ustar hornikusers\name{VectorSource} \alias{VectorSource} \title{Vector Source} \description{ Create a vector source. } \usage{ VectorSource(x) } \arguments{ \item{x}{A vector giving the texts.} } \details{ A \emph{vector source} interprets each element of the vector \code{x} as a document. } \value{ An object inheriting from \code{VectorSource}, \code{\link{SimpleSource}}, and \code{\link{Source}}. } \seealso{ \code{\link{Source}} for basic information on the source infrastructure employed by package \pkg{tm}. } \examples{ docs <- c("This is a text.", "This another one.") (vs <- VectorSource(docs)) inspect(VCorpus(vs)) } tm/man/matrix.Rd0000644000175100001440000000742414714614735013301 0ustar hornikusers\name{TermDocumentMatrix} \alias{TermDocumentMatrix} \alias{DocumentTermMatrix} \alias{as.TermDocumentMatrix} \alias{as.DocumentTermMatrix} \title{Term-Document Matrix} \description{ Constructs or coerces to a term-document matrix or a document-term matrix. } \usage{ TermDocumentMatrix(x, control = list()) DocumentTermMatrix(x, control = list()) as.TermDocumentMatrix(x, \dots) as.DocumentTermMatrix(x, \dots) } \arguments{ \item{x}{for the constructors, a corpus or an \R object from which a corpus can be generated via \code{Corpus(VectorSource(x))}; for the coercing functions, either a term-document matrix or a document-term matrix or a \link[slam]{simple triplet matrix} (package \pkg{slam}) or a \link[=termFreq]{term frequency vector}.} \item{control}{a named list of control options. There are local options which are evaluated for each document and global options which are evaluated once for the constructed matrix. Available local options are documented in \code{\link{termFreq}} and are internally delegated to a \code{\link{termFreq}} call. This is different for a \code{\link{SimpleCorpus}}. In this case all options are processed in a fixed order in one pass to improve performance. It always uses the Boost (\url{https://www.boost.org}) Tokenizer (via \pkg{Rcpp}) and takes no custom functions as option arguments. Available global options are: \describe{ \item{\code{bounds}}{A list with a tag \code{global} whose value must be an integer vector of length 2. Terms that appear in less documents than the lower bound \code{bounds$global[1]} or in more documents than the upper bound \code{bounds$global[2]} are discarded. Defaults to \code{list(global = c(1, Inf))} (i.e., every term will be used).} \item{\code{weighting}}{A weighting function capable of handling a \code{TermDocumentMatrix}. It defaults to \code{weightTf} for term frequency weighting. Available weighting functions shipped with the \pkg{tm} package are \code{\link{weightTf}}, \code{\link{weightTfIdf}}, \code{\link{weightBin}}, and \code{\link{weightSMART}}.} }} \item{\dots}{the additional argument \code{weighting} (typically a \code{\link{WeightFunction}}) is allowed when coercing a simple triplet matrix to a term-document or document-term matrix.} } \value{ An object of class \code{TermDocumentMatrix} or class \code{DocumentTermMatrix} (both inheriting from a \link[slam]{simple triplet matrix} in package \pkg{slam}) containing a sparse term-document matrix or document-term matrix. The attribute \code{weighting} contains the weighting applied to the matrix. } \seealso{ \code{\link{termFreq}} for available local control options. } \examples{ data("crude") tdm <- TermDocumentMatrix(crude, control = list(removePunctuation = TRUE, stopwords = TRUE)) dtm <- DocumentTermMatrix(crude, control = list(weighting = function(x) weightTfIdf(x, normalize = FALSE), stopwords = TRUE)) inspect(tdm[202:205, 1:5]) inspect(tdm[c("price", "prices", "texas"), c("127", "144", "191", "194")]) inspect(dtm[1:5, 273:276]) if(requireNamespace("SnowballC")) { s <- SimpleCorpus(VectorSource(unlist(lapply(crude, as.character)))) m <- TermDocumentMatrix(s, control = list(removeNumbers = TRUE, stopwords = TRUE, stemming = TRUE)) inspect(m[c("price", "texa"), c("127", "144", "191", "194")]) } } tm/man/SimpleCorpus.Rd0000644000175100001440000000436313126626477014423 0ustar hornikusers\name{SimpleCorpus} \alias{SimpleCorpus} \title{Simple Corpora} \description{ Create simple corpora. } \usage{ SimpleCorpus(x, control = list(language = "en")) } \arguments{ \item{x}{a \code{\link{DataframeSource}}, \code{\link{DirSource}} or \code{\link{VectorSource}}.} \item{control}{a named list of control parameters. \describe{ \item{\code{language}}{a character giving the language (preferably as \acronym{IETF} language tags, see \link[NLP]{language} in package \pkg{NLP}). The default language is assumed to be English (\code{"en"}).} } } } \value{ An object inheriting from \code{SimpleCorpus} and \code{Corpus}. } \details{ A \emph{simple corpus} is fully kept in memory. Compared to a \code{VCorpus}, it is optimized for the most common usage scenario: importing plain texts from files in a directory or directly from a vector in \R, preprocessing and transforming the texts, and finally exporting them to a term-document matrix. It adheres to the \code{\link{Corpus}} \acronym{API}. However, it takes internally various shortcuts to boost performance and minimize memory pressure; consequently it operates only under the following contraints: \itemize{ \item{only \code{DataframeSource}, \code{DirSource} and \code{VectorSource} are supported,} \item{no custom readers, i.e., each document is read in and stored as plain text (as a string, i.e., a character vector of length one),} \item{transformations applied via \code{\link{tm_map}} must be able to process character vectors and return character vectors (of the same length),} \item{no lazy transformations in \code{\link{tm_map}},} \item{no meta data for individual documents (i.e., no \code{"local"} in \code{\link{meta}}).} } } \seealso{ \code{\link{Corpus}} for basic information on the corpus infrastructure employed by package \pkg{tm}. \code{\link{VCorpus}} provides an implementation with volatile storage semantics, and \code{\link{PCorpus}} provides an implementation with permanent storage semantics. } \examples{ txt <- system.file("texts", "txt", package = "tm") (ovid <- SimpleCorpus(DirSource(txt, encoding = "UTF-8"), control = list(language = "lat"))) } tm/DESCRIPTION0000644000175100001440000000257214755316631012437 0ustar hornikusersPackage: tm Title: Text Mining Package Version: 0.7-16 Date: 2025-02-19 Authors@R: c(person("Ingo", "Feinerer", role = "aut", email = "feinerer@logic.at", comment = c(ORCID = "0000-0001-7656-8338")), person("Kurt", "Hornik", role = c("aut", "cre"), email = "Kurt.Hornik@R-project.org", comment = c(ORCID = "0000-0003-4198-9911")), person("Artifex Software, Inc.", role = c("ctb", "cph"), comment = "pdf_info.ps taken from GPL Ghostscript")) Depends: R (>= 3.4.0), NLP (>= 0.2-0) Imports: Rcpp, parallel, slam (>= 0.1-37), stats, tools, utils, graphics, xml2 LinkingTo: BH, Rcpp Suggests: antiword, filehash, methods, pdftools, Rcampdf, Rgraphviz, Rpoppler, SnowballC, testthat, tm.lexicon.GeneralInquirer Description: A framework for text mining applications within R. License: GPL-3 URL: https://tm.r-forge.r-project.org/ Additional_repositories: https://datacube.wu.ac.at NeedsCompilation: yes Packaged: 2025-02-19 07:09:03 UTC; hornik Author: Ingo Feinerer [aut] (), Kurt Hornik [aut, cre] (), Artifex Software, Inc. [ctb, cph] (pdf_info.ps taken from GPL Ghostscript) Maintainer: Kurt Hornik Repository: CRAN Date/Publication: 2025-02-19 09:00:09 UTC